1 | /* Output Dwarf2 format symbol table information from GCC. |
2 | Copyright (C) 1992-2023 Free Software Foundation, Inc. |
3 | Contributed by Gary Funck (gary@intrepid.com). |
4 | Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com). |
5 | Extensively modified by Jason Merrill (jason@cygnus.com). |
6 | |
7 | This file is part of GCC. |
8 | |
9 | GCC is free software; you can redistribute it and/or modify it under |
10 | the terms of the GNU General Public License as published by the Free |
11 | Software Foundation; either version 3, or (at your option) any later |
12 | version. |
13 | |
14 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
15 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
17 | for more details. |
18 | |
19 | You should have received a copy of the GNU General Public License |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ |
22 | |
23 | /* TODO: Emit .debug_line header even when there are no functions, since |
24 | the file numbers are used by .debug_info. Alternately, leave |
25 | out locations for types and decls. |
26 | Avoid talking about ctors and op= for PODs. |
27 | Factor out common prologue sequences into multiple CIEs. */ |
28 | |
29 | /* The first part of this file deals with the DWARF 2 frame unwind |
30 | information, which is also used by the GCC efficient exception handling |
31 | mechanism. The second part, controlled only by an #ifdef |
32 | DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging |
33 | information. */ |
34 | |
35 | /* DWARF2 Abbreviation Glossary: |
36 | |
37 | CFA = Canonical Frame Address |
38 | a fixed address on the stack which identifies a call frame. |
39 | We define it to be the value of SP just before the call insn. |
40 | The CFA register and offset, which may change during the course |
41 | of the function, are used to calculate its value at runtime. |
42 | |
43 | CFI = Call Frame Instruction |
44 | an instruction for the DWARF2 abstract machine |
45 | |
46 | CIE = Common Information Entry |
47 | information describing information common to one or more FDEs |
48 | |
49 | DIE = Debugging Information Entry |
50 | |
51 | FDE = Frame Description Entry |
52 | information describing the stack call frame, in particular, |
53 | how to restore registers |
54 | |
55 | DW_CFA_... = DWARF2 CFA call frame instruction |
56 | DW_TAG_... = DWARF2 DIE tag */ |
57 | |
58 | #include "config.h" |
59 | #include "system.h" |
60 | #include "coretypes.h" |
61 | #include "target.h" |
62 | #include "function.h" |
63 | #include "rtl.h" |
64 | #include "tree.h" |
65 | #include "memmodel.h" |
66 | #include "tm_p.h" |
67 | #include "stringpool.h" |
68 | #include "insn-config.h" |
69 | #include "ira.h" |
70 | #include "cgraph.h" |
71 | #include "diagnostic.h" |
72 | #include "fold-const.h" |
73 | #include "stor-layout.h" |
74 | #include "varasm.h" |
75 | #include "version.h" |
76 | #include "flags.h" |
77 | #include "rtlhash.h" |
78 | #include "reload.h" |
79 | #include "output.h" |
80 | #include "expr.h" |
81 | #include "dwarf2out.h" |
82 | #include "dwarf2ctf.h" |
83 | #include "dwarf2asm.h" |
84 | #include "toplev.h" |
85 | #include "md5.h" |
86 | #include "tree-pretty-print.h" |
87 | #include "print-rtl.h" |
88 | #include "debug.h" |
89 | #include "common/common-target.h" |
90 | #include "langhooks.h" |
91 | #include "lra.h" |
92 | #include "dumpfile.h" |
93 | #include "opts.h" |
94 | #include "tree-dfa.h" |
95 | #include "gdb/gdb-index.h" |
96 | #include "rtl-iter.h" |
97 | #include "stringpool.h" |
98 | #include "attribs.h" |
99 | #include "file-prefix-map.h" /* remap_debug_filename() */ |
100 | |
101 | static void dwarf2out_source_line (unsigned int, unsigned int, const char *, |
102 | int, bool); |
103 | static rtx_insn *last_var_location_insn; |
104 | static rtx_insn *cached_next_real_insn; |
105 | static void dwarf2out_decl (tree); |
106 | static bool is_redundant_typedef (const_tree); |
107 | |
108 | #ifndef XCOFF_DEBUGGING_INFO |
109 | #define XCOFF_DEBUGGING_INFO 0 |
110 | #endif |
111 | |
112 | #ifndef HAVE_XCOFF_DWARF_EXTRAS |
113 | #define 0 |
114 | #endif |
115 | |
116 | #ifdef VMS_DEBUGGING_INFO |
117 | int vms_file_stats_name (const char *, long long *, long *, char *, int *); |
118 | |
119 | /* Define this macro to be a nonzero value if the directory specifications |
120 | which are output in the debug info should end with a separator. */ |
121 | #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1 |
122 | /* Define this macro to evaluate to a nonzero value if GCC should refrain |
123 | from generating indirect strings in DWARF2 debug information, for instance |
124 | if your target is stuck with an old version of GDB that is unable to |
125 | process them properly or uses VMS Debug. */ |
126 | #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1 |
127 | #else |
128 | #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0 |
129 | #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0 |
130 | #endif |
131 | |
132 | /* ??? Poison these here until it can be done generically. They've been |
133 | totally replaced in this file; make sure it stays that way. */ |
134 | #undef DWARF2_UNWIND_INFO |
135 | #undef DWARF2_FRAME_INFO |
136 | #if (GCC_VERSION >= 3000) |
137 | #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO |
138 | #endif |
139 | |
140 | /* The size of the target's pointer type. */ |
141 | #ifndef PTR_SIZE |
142 | #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT) |
143 | #endif |
144 | |
145 | /* Array of RTXes referenced by the debugging information, which therefore |
146 | must be kept around forever. */ |
147 | static GTY(()) vec<rtx, va_gc> *used_rtx_array; |
148 | |
149 | /* A pointer to the base of a list of incomplete types which might be |
150 | completed at some later time. incomplete_types_list needs to be a |
151 | vec<tree, va_gc> *because we want to tell the garbage collector about |
152 | it. */ |
153 | static GTY(()) vec<tree, va_gc> *incomplete_types; |
154 | |
155 | /* Pointers to various DWARF2 sections. */ |
156 | static GTY(()) section *debug_info_section; |
157 | static GTY(()) section *debug_skeleton_info_section; |
158 | static GTY(()) section *debug_abbrev_section; |
159 | static GTY(()) section *debug_skeleton_abbrev_section; |
160 | static GTY(()) section *debug_aranges_section; |
161 | static GTY(()) section *debug_addr_section; |
162 | static GTY(()) section *debug_macinfo_section; |
163 | static const char *debug_macinfo_section_name; |
164 | static unsigned macinfo_label_base = 1; |
165 | static GTY(()) section *debug_line_section; |
166 | static GTY(()) section *debug_skeleton_line_section; |
167 | static GTY(()) section *debug_loc_section; |
168 | static GTY(()) section *debug_pubnames_section; |
169 | static GTY(()) section *debug_pubtypes_section; |
170 | static GTY(()) section *debug_str_section; |
171 | static GTY(()) section *debug_line_str_section; |
172 | static GTY(()) section *debug_str_dwo_section; |
173 | static GTY(()) section *debug_str_offsets_section; |
174 | static GTY(()) section *debug_ranges_section; |
175 | static GTY(()) section *debug_ranges_dwo_section; |
176 | static GTY(()) section *debug_frame_section; |
177 | |
178 | /* Maximum size (in bytes) of an artificially generated label. */ |
179 | #define MAX_ARTIFICIAL_LABEL_BYTES 40 |
180 | |
181 | /* According to the (draft) DWARF 3 specification, the initial length |
182 | should either be 4 or 12 bytes. When it's 12 bytes, the first 4 |
183 | bytes are 0xffffffff, followed by the length stored in the next 8 |
184 | bytes. |
185 | |
186 | However, the SGI/MIPS ABI uses an initial length which is equal to |
187 | dwarf_offset_size. It is defined (elsewhere) accordingly. */ |
188 | |
189 | #ifndef DWARF_INITIAL_LENGTH_SIZE |
190 | #define DWARF_INITIAL_LENGTH_SIZE (dwarf_offset_size == 4 ? 4 : 12) |
191 | #endif |
192 | |
193 | #ifndef DWARF_INITIAL_LENGTH_SIZE_STR |
194 | #define DWARF_INITIAL_LENGTH_SIZE_STR (dwarf_offset_size == 4 ? "-4" : "-12") |
195 | #endif |
196 | |
197 | /* Round SIZE up to the nearest BOUNDARY. */ |
198 | #define DWARF_ROUND(SIZE,BOUNDARY) \ |
199 | ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY)) |
200 | |
201 | /* CIE identifier. */ |
202 | #if HOST_BITS_PER_WIDE_INT >= 64 |
203 | #define DWARF_CIE_ID \ |
204 | (unsigned HOST_WIDE_INT) (dwarf_offset_size == 4 ? DW_CIE_ID : DW64_CIE_ID) |
205 | #else |
206 | #define DWARF_CIE_ID DW_CIE_ID |
207 | #endif |
208 | |
209 | |
210 | /* A vector for a table that contains frame description |
211 | information for each routine. */ |
212 | #define NOT_INDEXED (-1U) |
213 | #define NO_INDEX_ASSIGNED (-2U) |
214 | |
215 | static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec; |
216 | |
217 | struct GTY((for_user)) indirect_string_node { |
218 | const char *str; |
219 | unsigned int refcount; |
220 | enum dwarf_form form; |
221 | char *label; |
222 | unsigned int index; |
223 | }; |
224 | |
225 | struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node> |
226 | { |
227 | typedef const char *compare_type; |
228 | |
229 | static hashval_t hash (indirect_string_node *); |
230 | static bool equal (indirect_string_node *, const char *); |
231 | }; |
232 | |
233 | static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash; |
234 | |
235 | static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash; |
236 | |
237 | /* With split_debug_info, both the comp_dir and dwo_name go in the |
238 | main object file, rather than the dwo, similar to the force_direct |
239 | parameter elsewhere but with additional complications: |
240 | |
241 | 1) The string is needed in both the main object file and the dwo. |
242 | That is, the comp_dir and dwo_name will appear in both places. |
243 | |
244 | 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp, |
245 | DW_FORM_line_strp or DW_FORM_strx/GNU_str_index. |
246 | |
247 | 3) GCC chooses the form to use late, depending on the size and |
248 | reference count. |
249 | |
250 | Rather than forcing the all debug string handling functions and |
251 | callers to deal with these complications, simply use a separate, |
252 | special-cased string table for any attribute that should go in the |
253 | main object file. This limits the complexity to just the places |
254 | that need it. */ |
255 | |
256 | static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash; |
257 | |
258 | static GTY(()) int dw2_string_counter; |
259 | |
260 | /* True if the compilation unit places functions in more than one section. */ |
261 | static GTY(()) bool have_multiple_function_sections = false; |
262 | |
263 | /* The default cold text section. */ |
264 | static GTY(()) section *cold_text_section; |
265 | |
266 | /* True if currently in text section. */ |
267 | static GTY(()) bool in_text_section_p = false; |
268 | |
269 | /* Last debug-on location in corresponding section. */ |
270 | static GTY(()) const char *last_text_label; |
271 | static GTY(()) const char *last_cold_label; |
272 | |
273 | /* Mark debug-on/off locations per section. |
274 | NULL means the section is not used at all. */ |
275 | static GTY(()) vec<const char *, va_gc> *switch_text_ranges; |
276 | static GTY(()) vec<const char *, va_gc> *switch_cold_ranges; |
277 | |
278 | /* The DIE for C++14 'auto' in a function return type. */ |
279 | static GTY(()) dw_die_ref auto_die; |
280 | |
281 | /* The DIE for C++14 'decltype(auto)' in a function return type. */ |
282 | static GTY(()) dw_die_ref decltype_auto_die; |
283 | |
284 | /* Forward declarations for functions defined in this file. */ |
285 | |
286 | static void output_call_frame_info (int); |
287 | |
288 | /* Personality decl of current unit. Used only when assembler does not support |
289 | personality CFI. */ |
290 | static GTY(()) rtx current_unit_personality; |
291 | |
292 | /* Whether an eh_frame section is required. */ |
293 | static GTY(()) bool do_eh_frame = false; |
294 | |
295 | /* .debug_rnglists next index. */ |
296 | static unsigned int rnglist_idx; |
297 | |
298 | /* Data and reference forms for relocatable data. */ |
299 | #define DW_FORM_data (dwarf_offset_size == 8 ? DW_FORM_data8 : DW_FORM_data4) |
300 | #define DW_FORM_ref (dwarf_offset_size == 8 ? DW_FORM_ref8 : DW_FORM_ref4) |
301 | |
302 | #ifndef DEBUG_FRAME_SECTION |
303 | #define DEBUG_FRAME_SECTION ".debug_frame" |
304 | #endif |
305 | |
306 | #ifndef FUNC_BEGIN_LABEL |
307 | #define FUNC_BEGIN_LABEL "LFB" |
308 | #endif |
309 | |
310 | #ifndef FUNC_SECOND_SECT_LABEL |
311 | #define FUNC_SECOND_SECT_LABEL "LFSB" |
312 | #endif |
313 | |
314 | #ifndef FUNC_END_LABEL |
315 | #define FUNC_END_LABEL "LFE" |
316 | #endif |
317 | |
318 | #ifndef PROLOGUE_END_LABEL |
319 | #define PROLOGUE_END_LABEL "LPE" |
320 | #endif |
321 | |
322 | #ifndef EPILOGUE_BEGIN_LABEL |
323 | #define EPILOGUE_BEGIN_LABEL "LEB" |
324 | #endif |
325 | |
326 | #ifndef FRAME_BEGIN_LABEL |
327 | #define FRAME_BEGIN_LABEL "Lframe" |
328 | #endif |
329 | #define CIE_AFTER_SIZE_LABEL "LSCIE" |
330 | #define CIE_END_LABEL "LECIE" |
331 | #define FDE_LABEL "LSFDE" |
332 | #define FDE_AFTER_SIZE_LABEL "LASFDE" |
333 | #define FDE_END_LABEL "LEFDE" |
334 | #define LINE_NUMBER_BEGIN_LABEL "LSLT" |
335 | #define LINE_NUMBER_END_LABEL "LELT" |
336 | #define LN_PROLOG_AS_LABEL "LASLTP" |
337 | #define LN_PROLOG_END_LABEL "LELTP" |
338 | #define DIE_LABEL_PREFIX "DW" |
339 | |
340 | /* Match the base name of a file to the base name of a compilation unit. */ |
341 | |
342 | static bool |
343 | matches_main_base (const char *path) |
344 | { |
345 | /* Cache the last query. */ |
346 | static const char *last_path = NULL; |
347 | static bool last_match = false; |
348 | if (path != last_path) |
349 | { |
350 | const char *base; |
351 | int length = base_of_path (path, base_out: &base); |
352 | last_path = path; |
353 | last_match = (length == main_input_baselength |
354 | && memcmp (s1: base, main_input_basename, n: length) == 0); |
355 | } |
356 | return last_match; |
357 | } |
358 | |
359 | #ifdef DEBUG_DEBUG_STRUCT |
360 | |
361 | static bool |
362 | dump_struct_debug (tree type, enum debug_info_usage usage, |
363 | enum debug_struct_file criterion, int generic, |
364 | bool matches, bool result) |
365 | { |
366 | /* Find the type name. */ |
367 | tree type_decl = TYPE_STUB_DECL (type); |
368 | tree t = type_decl; |
369 | const char *name = 0; |
370 | if (TREE_CODE (t) == TYPE_DECL) |
371 | t = DECL_NAME (t); |
372 | if (t) |
373 | name = IDENTIFIER_POINTER (t); |
374 | |
375 | fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n" , |
376 | criterion, |
377 | DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr" , |
378 | matches ? "bas" : "hdr" , |
379 | generic ? "gen" : "ord" , |
380 | usage == DINFO_USAGE_DFN ? ";" : |
381 | usage == DINFO_USAGE_DIR_USE ? "." : "*" , |
382 | result, |
383 | (void*) type_decl, name); |
384 | return result; |
385 | } |
386 | #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \ |
387 | dump_struct_debug (type, usage, criterion, generic, matches, result) |
388 | |
389 | #else |
390 | |
391 | #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \ |
392 | (result) |
393 | |
394 | #endif |
395 | |
396 | /* Get the number of HOST_WIDE_INTs needed to represent the precision |
397 | of the number. */ |
398 | |
399 | static unsigned int |
400 | get_full_len (const dw_wide_int &op) |
401 | { |
402 | return CEIL (op.get_precision (), HOST_BITS_PER_WIDE_INT); |
403 | } |
404 | |
405 | static bool |
406 | should_emit_struct_debug (tree type, enum debug_info_usage usage) |
407 | { |
408 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
409 | return false; |
410 | |
411 | enum debug_struct_file criterion; |
412 | tree type_decl; |
413 | bool generic = lang_hooks.types.generic_p (type); |
414 | |
415 | if (generic) |
416 | criterion = debug_struct_generic[usage]; |
417 | else |
418 | criterion = debug_struct_ordinary[usage]; |
419 | |
420 | if (criterion == DINFO_STRUCT_FILE_NONE) |
421 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, false); |
422 | if (criterion == DINFO_STRUCT_FILE_ANY) |
423 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, true); |
424 | |
425 | type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type)); |
426 | |
427 | if (type_decl != NULL) |
428 | { |
429 | if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl)) |
430 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, true); |
431 | |
432 | if (matches_main_base (DECL_SOURCE_FILE (type_decl))) |
433 | return DUMP_GSTRUCT (type, usage, criterion, generic, true, true); |
434 | } |
435 | |
436 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, false); |
437 | } |
438 | |
439 | /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section, |
440 | switch to the data section instead, and write out a synthetic start label |
441 | for collect2 the first time around. */ |
442 | |
443 | static void |
444 | switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED) |
445 | { |
446 | if (eh_frame_section == 0) |
447 | { |
448 | int flags; |
449 | |
450 | if (EH_TABLES_CAN_BE_READ_ONLY) |
451 | { |
452 | int fde_encoding; |
453 | int per_encoding; |
454 | int lsda_encoding; |
455 | |
456 | fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, |
457 | /*global=*/0); |
458 | per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, |
459 | /*global=*/1); |
460 | lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, |
461 | /*global=*/0); |
462 | flags = ((! flag_pic |
463 | || ((fde_encoding & 0x70) != DW_EH_PE_absptr |
464 | && (fde_encoding & 0x70) != DW_EH_PE_aligned |
465 | && (per_encoding & 0x70) != DW_EH_PE_absptr |
466 | && (per_encoding & 0x70) != DW_EH_PE_aligned |
467 | && (lsda_encoding & 0x70) != DW_EH_PE_absptr |
468 | && (lsda_encoding & 0x70) != DW_EH_PE_aligned)) |
469 | ? 0 : SECTION_WRITE); |
470 | } |
471 | else |
472 | flags = SECTION_WRITE; |
473 | |
474 | #ifdef EH_FRAME_SECTION_NAME |
475 | eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL); |
476 | #else |
477 | eh_frame_section = ((flags == SECTION_WRITE) |
478 | ? data_section : readonly_data_section); |
479 | #endif /* EH_FRAME_SECTION_NAME */ |
480 | } |
481 | |
482 | switch_to_section (eh_frame_section); |
483 | |
484 | #ifdef EH_FRAME_THROUGH_COLLECT2 |
485 | /* We have no special eh_frame section. Emit special labels to guide |
486 | collect2. */ |
487 | if (!back) |
488 | { |
489 | tree label = get_file_function_name ("F" ); |
490 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE)); |
491 | targetm.asm_out.globalize_label (asm_out_file, |
492 | IDENTIFIER_POINTER (label)); |
493 | ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label)); |
494 | } |
495 | #endif |
496 | } |
497 | |
498 | /* Switch [BACK] to the eh or debug frame table section, depending on |
499 | FOR_EH. */ |
500 | |
501 | static void |
502 | switch_to_frame_table_section (int for_eh, bool back) |
503 | { |
504 | if (for_eh) |
505 | switch_to_eh_frame_section (back); |
506 | else |
507 | { |
508 | if (!debug_frame_section) |
509 | debug_frame_section = get_section (DEBUG_FRAME_SECTION, |
510 | SECTION_DEBUG, NULL); |
511 | switch_to_section (debug_frame_section); |
512 | } |
513 | } |
514 | |
515 | /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */ |
516 | |
517 | enum dw_cfi_oprnd_type |
518 | dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi) |
519 | { |
520 | switch (cfi) |
521 | { |
522 | case DW_CFA_nop: |
523 | case DW_CFA_GNU_window_save: |
524 | case DW_CFA_remember_state: |
525 | case DW_CFA_restore_state: |
526 | return dw_cfi_oprnd_unused; |
527 | |
528 | case DW_CFA_set_loc: |
529 | case DW_CFA_advance_loc1: |
530 | case DW_CFA_advance_loc2: |
531 | case DW_CFA_advance_loc4: |
532 | case DW_CFA_MIPS_advance_loc8: |
533 | return dw_cfi_oprnd_addr; |
534 | |
535 | case DW_CFA_offset: |
536 | case DW_CFA_offset_extended: |
537 | case DW_CFA_def_cfa: |
538 | case DW_CFA_offset_extended_sf: |
539 | case DW_CFA_def_cfa_sf: |
540 | case DW_CFA_restore: |
541 | case DW_CFA_restore_extended: |
542 | case DW_CFA_undefined: |
543 | case DW_CFA_same_value: |
544 | case DW_CFA_def_cfa_register: |
545 | case DW_CFA_register: |
546 | case DW_CFA_expression: |
547 | case DW_CFA_val_expression: |
548 | return dw_cfi_oprnd_reg_num; |
549 | |
550 | case DW_CFA_def_cfa_offset: |
551 | case DW_CFA_GNU_args_size: |
552 | case DW_CFA_def_cfa_offset_sf: |
553 | return dw_cfi_oprnd_offset; |
554 | |
555 | case DW_CFA_def_cfa_expression: |
556 | return dw_cfi_oprnd_loc; |
557 | |
558 | default: |
559 | gcc_unreachable (); |
560 | } |
561 | } |
562 | |
563 | /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */ |
564 | |
565 | enum dw_cfi_oprnd_type |
566 | dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi) |
567 | { |
568 | switch (cfi) |
569 | { |
570 | case DW_CFA_def_cfa: |
571 | case DW_CFA_def_cfa_sf: |
572 | case DW_CFA_offset: |
573 | case DW_CFA_offset_extended_sf: |
574 | case DW_CFA_offset_extended: |
575 | return dw_cfi_oprnd_offset; |
576 | |
577 | case DW_CFA_register: |
578 | return dw_cfi_oprnd_reg_num; |
579 | |
580 | case DW_CFA_expression: |
581 | case DW_CFA_val_expression: |
582 | return dw_cfi_oprnd_loc; |
583 | |
584 | case DW_CFA_def_cfa_expression: |
585 | return dw_cfi_oprnd_cfa_loc; |
586 | |
587 | default: |
588 | return dw_cfi_oprnd_unused; |
589 | } |
590 | } |
591 | |
592 | /* Output one FDE. */ |
593 | |
594 | static void |
595 | output_fde (dw_fde_ref fde, bool for_eh, bool second, |
596 | char *section_start_label, int fde_encoding, char *augmentation, |
597 | bool any_lsda_needed, int lsda_encoding) |
598 | { |
599 | const char *begin, *end; |
600 | static unsigned int j; |
601 | char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
602 | |
603 | targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh, |
604 | /* empty */ 0); |
605 | targetm.asm_out.internal_label (asm_out_file, FDE_LABEL, |
606 | for_eh + j); |
607 | ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j); |
608 | ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j); |
609 | if (!XCOFF_DEBUGGING_INFO || for_eh) |
610 | { |
611 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4 && !for_eh) |
612 | dw2_asm_output_data (4, 0xffffffff, "Initial length escape value" |
613 | " indicating 64-bit DWARF extension" ); |
614 | dw2_asm_output_delta (for_eh ? 4 : dwarf_offset_size, l2, l1, |
615 | "FDE Length" ); |
616 | } |
617 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
618 | |
619 | if (for_eh) |
620 | dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset" ); |
621 | else |
622 | dw2_asm_output_offset (dwarf_offset_size, section_start_label, |
623 | debug_frame_section, "FDE CIE offset" ); |
624 | |
625 | begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin; |
626 | end = second ? fde->dw_fde_second_end : fde->dw_fde_end; |
627 | |
628 | if (for_eh) |
629 | { |
630 | rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin); |
631 | SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL; |
632 | dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false, |
633 | "FDE initial location" ); |
634 | dw2_asm_output_delta (size_of_encoded_value (fde_encoding), |
635 | end, begin, "FDE address range" ); |
636 | } |
637 | else |
638 | { |
639 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location" ); |
640 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range" ); |
641 | } |
642 | |
643 | if (augmentation[0]) |
644 | { |
645 | if (any_lsda_needed) |
646 | { |
647 | int size = size_of_encoded_value (lsda_encoding); |
648 | |
649 | if (lsda_encoding == DW_EH_PE_aligned) |
650 | { |
651 | int offset = ( 4 /* Length */ |
652 | + 4 /* CIE offset */ |
653 | + 2 * size_of_encoded_value (fde_encoding) |
654 | + 1 /* Augmentation size */ ); |
655 | int pad = -offset & (PTR_SIZE - 1); |
656 | |
657 | size += pad; |
658 | gcc_assert (size_of_uleb128 (size) == 1); |
659 | } |
660 | |
661 | dw2_asm_output_data_uleb128 (size, "Augmentation size" ); |
662 | |
663 | if (fde->uses_eh_lsda) |
664 | { |
665 | ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA" , |
666 | fde->funcdef_number); |
667 | dw2_asm_output_encoded_addr_rtx (lsda_encoding, |
668 | gen_rtx_SYMBOL_REF (Pmode, l1), |
669 | false, |
670 | "Language Specific Data Area" ); |
671 | } |
672 | else |
673 | { |
674 | if (lsda_encoding == DW_EH_PE_aligned) |
675 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE)); |
676 | dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0, |
677 | "Language Specific Data Area (none)" ); |
678 | } |
679 | } |
680 | else |
681 | dw2_asm_output_data_uleb128 (0, "Augmentation size" ); |
682 | } |
683 | |
684 | /* Loop through the Call Frame Instructions associated with this FDE. */ |
685 | fde->dw_fde_current_label = begin; |
686 | { |
687 | size_t from, until, i; |
688 | |
689 | from = 0; |
690 | until = vec_safe_length (v: fde->dw_fde_cfi); |
691 | |
692 | if (fde->dw_fde_second_begin == NULL) |
693 | ; |
694 | else if (!second) |
695 | until = fde->dw_fde_switch_cfi_index; |
696 | else |
697 | from = fde->dw_fde_switch_cfi_index; |
698 | |
699 | for (i = from; i < until; i++) |
700 | output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh); |
701 | } |
702 | |
703 | /* If we are to emit a ref/link from function bodies to their frame tables, |
704 | do it now. This is typically performed to make sure that tables |
705 | associated with functions are dragged with them and not discarded in |
706 | garbage collecting links. We need to do this on a per function basis to |
707 | cope with -ffunction-sections. */ |
708 | |
709 | #ifdef ASM_OUTPUT_DWARF_TABLE_REF |
710 | /* Switch to the function section, emit the ref to the tables, and |
711 | switch *back* into the table section. */ |
712 | switch_to_section (function_section (fde->decl)); |
713 | ASM_OUTPUT_DWARF_TABLE_REF (section_start_label); |
714 | switch_to_frame_table_section (for_eh, true); |
715 | #endif |
716 | |
717 | /* Pad the FDE out to an address sized boundary. */ |
718 | ASM_OUTPUT_ALIGN (asm_out_file, |
719 | floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE))); |
720 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
721 | |
722 | j += 2; |
723 | } |
724 | |
725 | /* Return true if frame description entry FDE is needed for EH. */ |
726 | |
727 | static bool |
728 | fde_needed_for_eh_p (dw_fde_ref fde) |
729 | { |
730 | if (flag_asynchronous_unwind_tables) |
731 | return true; |
732 | |
733 | if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl)) |
734 | return true; |
735 | |
736 | if (fde->uses_eh_lsda) |
737 | return true; |
738 | |
739 | /* If exceptions are enabled, we have collected nothrow info. */ |
740 | if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow)) |
741 | return false; |
742 | |
743 | return true; |
744 | } |
745 | |
746 | /* Output the call frame information used to record information |
747 | that relates to calculating the frame pointer, and records the |
748 | location of saved registers. */ |
749 | |
750 | static void |
751 | output_call_frame_info (int for_eh) |
752 | { |
753 | unsigned int i; |
754 | dw_fde_ref fde; |
755 | dw_cfi_ref cfi; |
756 | char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
757 | char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
758 | bool any_lsda_needed = false; |
759 | char augmentation[6]; |
760 | int augmentation_size; |
761 | int fde_encoding = DW_EH_PE_absptr; |
762 | int per_encoding = DW_EH_PE_absptr; |
763 | int lsda_encoding = DW_EH_PE_absptr; |
764 | int return_reg; |
765 | rtx personality = NULL; |
766 | int dw_cie_version; |
767 | |
768 | /* Don't emit a CIE if there won't be any FDEs. */ |
769 | if (!fde_vec) |
770 | return; |
771 | |
772 | /* Nothing to do if the assembler's doing it all. */ |
773 | if (dwarf2out_do_cfi_asm ()) |
774 | return; |
775 | |
776 | /* If we don't have any functions we'll want to unwind out of, don't emit |
777 | any EH unwind information. If we make FDEs linkonce, we may have to |
778 | emit an empty label for an FDE that wouldn't otherwise be emitted. We |
779 | want to avoid having an FDE kept around when the function it refers to |
780 | is discarded. Example where this matters: a primary function template |
781 | in C++ requires EH information, an explicit specialization doesn't. */ |
782 | if (for_eh) |
783 | { |
784 | bool any_eh_needed = false; |
785 | |
786 | FOR_EACH_VEC_ELT (*fde_vec, i, fde) |
787 | { |
788 | if (fde->uses_eh_lsda) |
789 | any_eh_needed = any_lsda_needed = true; |
790 | else if (fde_needed_for_eh_p (fde)) |
791 | any_eh_needed = true; |
792 | else if (TARGET_USES_WEAK_UNWIND_INFO) |
793 | targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1); |
794 | } |
795 | |
796 | if (!any_eh_needed) |
797 | return; |
798 | } |
799 | |
800 | /* We're going to be generating comments, so turn on app. */ |
801 | if (flag_debug_asm) |
802 | app_enable (); |
803 | |
804 | /* Switch to the proper frame section, first time. */ |
805 | switch_to_frame_table_section (for_eh, back: false); |
806 | |
807 | ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh); |
808 | ASM_OUTPUT_LABEL (asm_out_file, section_start_label); |
809 | |
810 | /* Output the CIE. */ |
811 | ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh); |
812 | ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh); |
813 | if (!XCOFF_DEBUGGING_INFO || for_eh) |
814 | { |
815 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4 && !for_eh) |
816 | dw2_asm_output_data (4, 0xffffffff, |
817 | "Initial length escape value indicating 64-bit DWARF extension" ); |
818 | dw2_asm_output_delta (for_eh ? 4 : dwarf_offset_size, l2, l1, |
819 | "Length of Common Information Entry" ); |
820 | } |
821 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
822 | |
823 | /* Now that the CIE pointer is PC-relative for EH, |
824 | use 0 to identify the CIE. */ |
825 | dw2_asm_output_data ((for_eh ? 4 : dwarf_offset_size), |
826 | (for_eh ? 0 : DWARF_CIE_ID), |
827 | "CIE Identifier Tag" ); |
828 | |
829 | /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to |
830 | use CIE version 1, unless that would produce incorrect results |
831 | due to overflowing the return register column. */ |
832 | return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh); |
833 | dw_cie_version = 1; |
834 | if (return_reg >= 256 || dwarf_version > 2) |
835 | dw_cie_version = 3; |
836 | dw2_asm_output_data (1, dw_cie_version, "CIE Version" ); |
837 | |
838 | augmentation[0] = 0; |
839 | augmentation_size = 0; |
840 | |
841 | personality = current_unit_personality; |
842 | if (for_eh) |
843 | { |
844 | char *p; |
845 | |
846 | /* Augmentation: |
847 | z Indicates that a uleb128 is present to size the |
848 | augmentation section. |
849 | L Indicates the encoding (and thus presence) of |
850 | an LSDA pointer in the FDE augmentation. |
851 | R Indicates a non-default pointer encoding for |
852 | FDE code pointers. |
853 | P Indicates the presence of an encoding + language |
854 | personality routine in the CIE augmentation. */ |
855 | |
856 | fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0); |
857 | per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1); |
858 | lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0); |
859 | |
860 | p = augmentation + 1; |
861 | if (personality) |
862 | { |
863 | *p++ = 'P'; |
864 | augmentation_size += 1 + size_of_encoded_value (per_encoding); |
865 | assemble_external_libcall (personality); |
866 | } |
867 | if (any_lsda_needed) |
868 | { |
869 | *p++ = 'L'; |
870 | augmentation_size += 1; |
871 | } |
872 | if (fde_encoding != DW_EH_PE_absptr) |
873 | { |
874 | *p++ = 'R'; |
875 | augmentation_size += 1; |
876 | } |
877 | if (p > augmentation + 1) |
878 | { |
879 | augmentation[0] = 'z'; |
880 | *p = '\0'; |
881 | } |
882 | |
883 | /* Ug. Some platforms can't do unaligned dynamic relocations at all. */ |
884 | if (personality && per_encoding == DW_EH_PE_aligned) |
885 | { |
886 | int offset = ( 4 /* Length */ |
887 | + 4 /* CIE Id */ |
888 | + 1 /* CIE version */ |
889 | + strlen (s: augmentation) + 1 /* Augmentation */ |
890 | + size_of_uleb128 (1) /* Code alignment */ |
891 | + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT) |
892 | + 1 /* RA column */ |
893 | + 1 /* Augmentation size */ |
894 | + 1 /* Personality encoding */ ); |
895 | int pad = -offset & (PTR_SIZE - 1); |
896 | |
897 | augmentation_size += pad; |
898 | |
899 | /* Augmentations should be small, so there's scarce need to |
900 | iterate for a solution. Die if we exceed one uleb128 byte. */ |
901 | gcc_assert (size_of_uleb128 (augmentation_size) == 1); |
902 | } |
903 | } |
904 | |
905 | dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation" ); |
906 | if (dw_cie_version >= 4) |
907 | { |
908 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size" ); |
909 | dw2_asm_output_data (1, 0, "CIE Segment Size" ); |
910 | } |
911 | dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor" ); |
912 | dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT, |
913 | "CIE Data Alignment Factor" ); |
914 | |
915 | if (dw_cie_version == 1) |
916 | dw2_asm_output_data (1, return_reg, "CIE RA Column" ); |
917 | else |
918 | dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column" ); |
919 | |
920 | if (augmentation[0]) |
921 | { |
922 | dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size" ); |
923 | if (personality) |
924 | { |
925 | dw2_asm_output_data (1, per_encoding, "Personality (%s)" , |
926 | eh_data_format_name (per_encoding)); |
927 | dw2_asm_output_encoded_addr_rtx (per_encoding, |
928 | personality, |
929 | true, NULL); |
930 | } |
931 | |
932 | if (any_lsda_needed) |
933 | dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)" , |
934 | eh_data_format_name (lsda_encoding)); |
935 | |
936 | if (fde_encoding != DW_EH_PE_absptr) |
937 | dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)" , |
938 | eh_data_format_name (fde_encoding)); |
939 | } |
940 | |
941 | FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi) |
942 | output_cfi (cfi, NULL, for_eh); |
943 | |
944 | /* Pad the CIE out to an address sized boundary. */ |
945 | ASM_OUTPUT_ALIGN (asm_out_file, |
946 | floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)); |
947 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
948 | |
949 | /* Loop through all of the FDE's. */ |
950 | FOR_EACH_VEC_ELT (*fde_vec, i, fde) |
951 | { |
952 | unsigned int k; |
953 | |
954 | /* Don't emit EH unwind info for leaf functions that don't need it. */ |
955 | if (for_eh && !fde_needed_for_eh_p (fde)) |
956 | continue; |
957 | |
958 | for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++) |
959 | output_fde (fde, for_eh, second: k, section_start_label, fde_encoding, |
960 | augmentation, any_lsda_needed, lsda_encoding); |
961 | } |
962 | |
963 | if (for_eh && targetm.terminate_dw2_eh_frame_info) |
964 | dw2_asm_output_data (4, 0, "End of Table" ); |
965 | |
966 | /* Turn off app to make assembly quicker. */ |
967 | if (flag_debug_asm) |
968 | app_disable (); |
969 | } |
970 | |
971 | /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */ |
972 | |
973 | static void |
974 | dwarf2out_do_cfi_startproc (bool second) |
975 | { |
976 | int enc; |
977 | rtx ref; |
978 | |
979 | fprintf (stream: asm_out_file, format: "\t.cfi_startproc\n" ); |
980 | |
981 | targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl); |
982 | |
983 | /* .cfi_personality and .cfi_lsda are only relevant to DWARF2 |
984 | eh unwinders. */ |
985 | if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2) |
986 | return; |
987 | |
988 | rtx personality = get_personality_function (current_function_decl); |
989 | |
990 | if (personality) |
991 | { |
992 | enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1); |
993 | ref = personality; |
994 | |
995 | /* ??? The GAS support isn't entirely consistent. We have to |
996 | handle indirect support ourselves, but PC-relative is done |
997 | in the assembler. Further, the assembler can't handle any |
998 | of the weirder relocation types. */ |
999 | if (enc & DW_EH_PE_indirect) |
1000 | { |
1001 | if (targetm.asm_out.make_eh_symbol_indirect != NULL) |
1002 | ref = targetm.asm_out.make_eh_symbol_indirect (ref, true); |
1003 | else |
1004 | ref = dw2_force_const_mem (ref, true); |
1005 | } |
1006 | |
1007 | fprintf (stream: asm_out_file, format: "\t.cfi_personality %#x," , enc); |
1008 | output_addr_const (asm_out_file, ref); |
1009 | fputc (c: '\n', stream: asm_out_file); |
1010 | } |
1011 | |
1012 | if (crtl->uses_eh_lsda) |
1013 | { |
1014 | char lab[MAX_ARTIFICIAL_LABEL_BYTES]; |
1015 | |
1016 | enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0); |
1017 | ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA" , |
1018 | current_function_funcdef_no); |
1019 | ref = gen_rtx_SYMBOL_REF (Pmode, lab); |
1020 | SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL; |
1021 | |
1022 | if (enc & DW_EH_PE_indirect) |
1023 | { |
1024 | if (targetm.asm_out.make_eh_symbol_indirect != NULL) |
1025 | ref = targetm.asm_out.make_eh_symbol_indirect (ref, true); |
1026 | else |
1027 | ref = dw2_force_const_mem (ref, true); |
1028 | } |
1029 | |
1030 | fprintf (stream: asm_out_file, format: "\t.cfi_lsda %#x," , enc); |
1031 | output_addr_const (asm_out_file, ref); |
1032 | fputc (c: '\n', stream: asm_out_file); |
1033 | } |
1034 | } |
1035 | |
1036 | /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that |
1037 | this allocation may be done before pass_final. */ |
1038 | |
1039 | dw_fde_ref |
1040 | dwarf2out_alloc_current_fde (void) |
1041 | { |
1042 | dw_fde_ref fde; |
1043 | |
1044 | fde = ggc_cleared_alloc<dw_fde_node> (); |
1045 | fde->decl = current_function_decl; |
1046 | fde->funcdef_number = current_function_funcdef_no; |
1047 | fde->fde_index = vec_safe_length (v: fde_vec); |
1048 | fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls; |
1049 | fde->uses_eh_lsda = crtl->uses_eh_lsda; |
1050 | fde->nothrow = crtl->nothrow; |
1051 | fde->drap_reg = INVALID_REGNUM; |
1052 | fde->vdrap_reg = INVALID_REGNUM; |
1053 | |
1054 | /* Record the FDE associated with this function. */ |
1055 | cfun->fde = fde; |
1056 | vec_safe_push (v&: fde_vec, obj: fde); |
1057 | |
1058 | return fde; |
1059 | } |
1060 | |
1061 | /* Output a marker (i.e. a label) for the beginning of a function, before |
1062 | the prologue. */ |
1063 | |
1064 | void |
1065 | dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED, |
1066 | unsigned int column ATTRIBUTE_UNUSED, |
1067 | const char *file ATTRIBUTE_UNUSED) |
1068 | { |
1069 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
1070 | char * dup_label; |
1071 | dw_fde_ref fde; |
1072 | section *fnsec; |
1073 | bool do_frame; |
1074 | |
1075 | current_function_func_begin_label = NULL; |
1076 | |
1077 | do_frame = dwarf2out_do_frame (); |
1078 | |
1079 | /* ??? current_function_func_begin_label is also used by except.cc for |
1080 | call-site information. We must emit this label if it might be used. */ |
1081 | if (!do_frame |
1082 | && (!flag_exceptions |
1083 | || targetm_common.except_unwind_info (&global_options) == UI_SJLJ)) |
1084 | return; |
1085 | |
1086 | fnsec = function_section (current_function_decl); |
1087 | switch_to_section (fnsec); |
1088 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL, |
1089 | current_function_funcdef_no); |
1090 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL, |
1091 | current_function_funcdef_no); |
1092 | dup_label = xstrdup (label); |
1093 | current_function_func_begin_label = dup_label; |
1094 | |
1095 | /* We can elide FDE allocation if we're not emitting frame unwind info. */ |
1096 | if (!do_frame) |
1097 | return; |
1098 | |
1099 | /* Unlike the debug version, the EH version of frame unwind info is a per- |
1100 | function setting so we need to record whether we need it for the unit. */ |
1101 | do_eh_frame |= dwarf2out_do_eh_frame (); |
1102 | |
1103 | /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that |
1104 | emit insns as rtx but bypass the bulk of rest_of_compilation, which |
1105 | would include pass_dwarf2_frame. If we've not created the FDE yet, |
1106 | do so now. */ |
1107 | fde = cfun->fde; |
1108 | if (fde == NULL) |
1109 | fde = dwarf2out_alloc_current_fde (); |
1110 | |
1111 | /* Initialize the bits of CURRENT_FDE that were not available earlier. */ |
1112 | fde->dw_fde_begin = dup_label; |
1113 | fde->dw_fde_current_label = dup_label; |
1114 | fde->in_std_section = (fnsec == text_section |
1115 | || (cold_text_section && fnsec == cold_text_section)); |
1116 | fde->ignored_debug = DECL_IGNORED_P (current_function_decl); |
1117 | in_text_section_p = fnsec == text_section; |
1118 | |
1119 | /* We only want to output line number information for the genuine dwarf2 |
1120 | prologue case, not the eh frame case. */ |
1121 | #ifdef DWARF2_DEBUGGING_INFO |
1122 | if (file) |
1123 | dwarf2out_source_line (line, column, file, 0, true); |
1124 | #endif |
1125 | |
1126 | if (dwarf2out_do_cfi_asm ()) |
1127 | dwarf2out_do_cfi_startproc (second: false); |
1128 | else |
1129 | { |
1130 | rtx personality = get_personality_function (current_function_decl); |
1131 | if (!current_unit_personality) |
1132 | current_unit_personality = personality; |
1133 | |
1134 | /* We cannot keep a current personality per function as without CFI |
1135 | asm, at the point where we emit the CFI data, there is no current |
1136 | function anymore. */ |
1137 | if (personality && current_unit_personality != personality) |
1138 | sorry ("multiple EH personalities are supported only with assemblers " |
1139 | "supporting %<.cfi_personality%> directive" ); |
1140 | } |
1141 | } |
1142 | |
1143 | /* Output a marker (i.e. a label) for the end of the generated code |
1144 | for a function prologue. This gets called *after* the prologue code has |
1145 | been generated. */ |
1146 | |
1147 | void |
1148 | dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED, |
1149 | const char *file ATTRIBUTE_UNUSED) |
1150 | { |
1151 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
1152 | |
1153 | /* Output a label to mark the endpoint of the code generated for this |
1154 | function. */ |
1155 | ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL, |
1156 | current_function_funcdef_no); |
1157 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL, |
1158 | current_function_funcdef_no); |
1159 | cfun->fde->dw_fde_vms_end_prologue = xstrdup (label); |
1160 | } |
1161 | |
1162 | /* Output a marker (i.e. a label) for the beginning of the generated code |
1163 | for a function epilogue. This gets called *before* the prologue code has |
1164 | been generated. */ |
1165 | |
1166 | void |
1167 | dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED, |
1168 | const char *file ATTRIBUTE_UNUSED) |
1169 | { |
1170 | dw_fde_ref fde = cfun->fde; |
1171 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
1172 | |
1173 | if (fde->dw_fde_vms_begin_epilogue) |
1174 | return; |
1175 | |
1176 | /* Output a label to mark the endpoint of the code generated for this |
1177 | function. */ |
1178 | ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL, |
1179 | current_function_funcdef_no); |
1180 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL, |
1181 | current_function_funcdef_no); |
1182 | fde->dw_fde_vms_begin_epilogue = xstrdup (label); |
1183 | } |
1184 | |
1185 | /* Mark the ranges of non-debug subsections in the std text sections. */ |
1186 | |
1187 | static void |
1188 | mark_ignored_debug_section (dw_fde_ref fde, bool second) |
1189 | { |
1190 | bool std_section; |
1191 | const char *begin_label, *end_label; |
1192 | const char **last_end_label; |
1193 | vec<const char *, va_gc> **switch_ranges; |
1194 | |
1195 | if (second) |
1196 | { |
1197 | std_section = fde->second_in_std_section; |
1198 | begin_label = fde->dw_fde_second_begin; |
1199 | end_label = fde->dw_fde_second_end; |
1200 | } |
1201 | else |
1202 | { |
1203 | std_section = fde->in_std_section; |
1204 | begin_label = fde->dw_fde_begin; |
1205 | end_label = fde->dw_fde_end; |
1206 | } |
1207 | |
1208 | if (!std_section) |
1209 | return; |
1210 | |
1211 | if (in_text_section_p) |
1212 | { |
1213 | last_end_label = &last_text_label; |
1214 | switch_ranges = &switch_text_ranges; |
1215 | } |
1216 | else |
1217 | { |
1218 | last_end_label = &last_cold_label; |
1219 | switch_ranges = &switch_cold_ranges; |
1220 | } |
1221 | |
1222 | if (fde->ignored_debug) |
1223 | { |
1224 | if (*switch_ranges && !(vec_safe_length (v: *switch_ranges) & 1)) |
1225 | vec_safe_push (v&: *switch_ranges, obj: *last_end_label); |
1226 | } |
1227 | else |
1228 | { |
1229 | *last_end_label = end_label; |
1230 | |
1231 | if (!*switch_ranges) |
1232 | vec_alloc (v&: *switch_ranges, nelems: 16); |
1233 | else if (vec_safe_length (v: *switch_ranges) & 1) |
1234 | vec_safe_push (v&: *switch_ranges, obj: begin_label); |
1235 | } |
1236 | } |
1237 | |
1238 | /* Output a marker (i.e. a label) for the absolute end of the generated code |
1239 | for a function definition. This gets called *after* the epilogue code has |
1240 | been generated. */ |
1241 | |
1242 | void |
1243 | dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED, |
1244 | const char *file ATTRIBUTE_UNUSED) |
1245 | { |
1246 | dw_fde_ref fde; |
1247 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
1248 | |
1249 | last_var_location_insn = NULL; |
1250 | cached_next_real_insn = NULL; |
1251 | |
1252 | if (dwarf2out_do_cfi_asm ()) |
1253 | fprintf (stream: asm_out_file, format: "\t.cfi_endproc\n" ); |
1254 | |
1255 | /* Output a label to mark the endpoint of the code generated for this |
1256 | function. */ |
1257 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL, |
1258 | current_function_funcdef_no); |
1259 | ASM_OUTPUT_LABEL (asm_out_file, label); |
1260 | fde = cfun->fde; |
1261 | gcc_assert (fde != NULL); |
1262 | if (fde->dw_fde_second_begin == NULL) |
1263 | fde->dw_fde_end = xstrdup (label); |
1264 | |
1265 | mark_ignored_debug_section (fde, second: fde->dw_fde_second_begin != NULL); |
1266 | } |
1267 | |
1268 | void |
1269 | dwarf2out_frame_finish (void) |
1270 | { |
1271 | /* Output call frame information. */ |
1272 | if (targetm.debug_unwind_info () == UI_DWARF2) |
1273 | output_call_frame_info (for_eh: 0); |
1274 | |
1275 | /* Output another copy for the unwinder. */ |
1276 | if (do_eh_frame) |
1277 | output_call_frame_info (for_eh: 1); |
1278 | } |
1279 | |
1280 | static void var_location_switch_text_section (void); |
1281 | static void set_cur_line_info_table (section *); |
1282 | |
1283 | void |
1284 | dwarf2out_switch_text_section (void) |
1285 | { |
1286 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
1287 | section *sect; |
1288 | dw_fde_ref fde = cfun->fde; |
1289 | |
1290 | gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL); |
1291 | |
1292 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL, |
1293 | current_function_funcdef_no); |
1294 | |
1295 | fde->dw_fde_second_begin = ggc_strdup (label); |
1296 | if (!in_cold_section_p) |
1297 | { |
1298 | fde->dw_fde_end = crtl->subsections.cold_section_end_label; |
1299 | fde->dw_fde_second_end = crtl->subsections.hot_section_end_label; |
1300 | } |
1301 | else |
1302 | { |
1303 | fde->dw_fde_end = crtl->subsections.hot_section_end_label; |
1304 | fde->dw_fde_second_end = crtl->subsections.cold_section_end_label; |
1305 | } |
1306 | have_multiple_function_sections = true; |
1307 | |
1308 | if (dwarf2out_do_cfi_asm ()) |
1309 | fprintf (stream: asm_out_file, format: "\t.cfi_endproc\n" ); |
1310 | |
1311 | mark_ignored_debug_section (fde, second: false); |
1312 | |
1313 | /* Now do the real section switch. */ |
1314 | sect = current_function_section (); |
1315 | switch_to_section (sect); |
1316 | |
1317 | fde->second_in_std_section |
1318 | = (sect == text_section |
1319 | || (cold_text_section && sect == cold_text_section)); |
1320 | in_text_section_p = sect == text_section; |
1321 | |
1322 | if (dwarf2out_do_cfi_asm ()) |
1323 | dwarf2out_do_cfi_startproc (second: true); |
1324 | |
1325 | var_location_switch_text_section (); |
1326 | |
1327 | if (cold_text_section != NULL) |
1328 | set_cur_line_info_table (sect); |
1329 | } |
1330 | |
1331 | /* And now, the subset of the debugging information support code necessary |
1332 | for emitting location expressions. */ |
1333 | |
1334 | /* Describe an entry into the .debug_addr section. */ |
1335 | |
1336 | enum ate_kind { |
1337 | ate_kind_rtx, |
1338 | ate_kind_rtx_dtprel, |
1339 | ate_kind_label |
1340 | }; |
1341 | |
1342 | struct GTY((for_user)) addr_table_entry { |
1343 | enum ate_kind kind; |
1344 | unsigned int refcount; |
1345 | unsigned int index; |
1346 | union addr_table_entry_struct_union |
1347 | { |
1348 | rtx GTY ((tag ("0" ))) rtl; |
1349 | char * GTY ((tag ("1" ))) label; |
1350 | } |
1351 | GTY ((desc ("%1.kind" ))) addr; |
1352 | }; |
1353 | |
1354 | typedef unsigned int var_loc_view; |
1355 | |
1356 | /* Location lists are ranges + location descriptions for that range, |
1357 | so you can track variables that are in different places over |
1358 | their entire life. */ |
1359 | typedef struct GTY(()) dw_loc_list_struct { |
1360 | dw_loc_list_ref dw_loc_next; |
1361 | const char *begin; /* Label and addr_entry for start of range */ |
1362 | addr_table_entry *begin_entry; |
1363 | const char *end; /* Label for end of range */ |
1364 | addr_table_entry *end_entry; |
1365 | char *ll_symbol; /* Label for beginning of location list. |
1366 | Only on head of list. */ |
1367 | char *vl_symbol; /* Label for beginning of view list. Ditto. */ |
1368 | const char *section; /* Section this loclist is relative to */ |
1369 | dw_loc_descr_ref expr; |
1370 | var_loc_view vbegin, vend; |
1371 | hashval_t hash; |
1372 | /* True if all addresses in this and subsequent lists are known to be |
1373 | resolved. */ |
1374 | bool resolved_addr; |
1375 | /* True if this list has been replaced by dw_loc_next. */ |
1376 | bool replaced; |
1377 | /* True if it has been emitted into .debug_loc* / .debug_loclists* |
1378 | section. */ |
1379 | unsigned char emitted : 1; |
1380 | /* True if hash field is index rather than hash value. */ |
1381 | unsigned char num_assigned : 1; |
1382 | /* True if .debug_loclists.dwo offset has been emitted for it already. */ |
1383 | unsigned char offset_emitted : 1; |
1384 | /* True if note_variable_value_in_expr has been called on it. */ |
1385 | unsigned char noted_variable_value : 1; |
1386 | /* True if the range should be emitted even if begin and end |
1387 | are the same. */ |
1388 | bool force; |
1389 | } dw_loc_list_node; |
1390 | |
1391 | static dw_loc_descr_ref int_loc_descriptor (poly_int64); |
1392 | static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT); |
1393 | |
1394 | /* Convert a DWARF stack opcode into its string name. */ |
1395 | |
1396 | static const char * |
1397 | dwarf_stack_op_name (unsigned int op) |
1398 | { |
1399 | const char *name = get_DW_OP_name (op); |
1400 | |
1401 | if (name != NULL) |
1402 | return name; |
1403 | |
1404 | return "OP_<unknown>" ; |
1405 | } |
1406 | |
1407 | /* Return TRUE iff we're to output location view lists as a separate |
1408 | attribute next to the location lists, as an extension compatible |
1409 | with DWARF 2 and above. */ |
1410 | |
1411 | static inline bool |
1412 | dwarf2out_locviews_in_attribute () |
1413 | { |
1414 | return debug_variable_location_views == 1; |
1415 | } |
1416 | |
1417 | /* Return TRUE iff we're to output location view lists as part of the |
1418 | location lists, as proposed for standardization after DWARF 5. */ |
1419 | |
1420 | static inline bool |
1421 | dwarf2out_locviews_in_loclist () |
1422 | { |
1423 | #ifndef DW_LLE_view_pair |
1424 | return false; |
1425 | #else |
1426 | return debug_variable_location_views == -1; |
1427 | #endif |
1428 | } |
1429 | |
1430 | /* Return a pointer to a newly allocated location description. Location |
1431 | descriptions are simple expression terms that can be strung |
1432 | together to form more complicated location (address) descriptions. */ |
1433 | |
1434 | static inline dw_loc_descr_ref |
1435 | new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1, |
1436 | unsigned HOST_WIDE_INT oprnd2) |
1437 | { |
1438 | dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> (); |
1439 | |
1440 | descr->dw_loc_opc = op; |
1441 | descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const; |
1442 | descr->dw_loc_oprnd1.val_entry = NULL; |
1443 | descr->dw_loc_oprnd1.v.val_unsigned = oprnd1; |
1444 | descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const; |
1445 | descr->dw_loc_oprnd2.val_entry = NULL; |
1446 | descr->dw_loc_oprnd2.v.val_unsigned = oprnd2; |
1447 | |
1448 | return descr; |
1449 | } |
1450 | |
1451 | /* Add a location description term to a location description expression. */ |
1452 | |
1453 | static inline void |
1454 | add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr) |
1455 | { |
1456 | dw_loc_descr_ref *d; |
1457 | |
1458 | /* Find the end of the chain. */ |
1459 | for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next) |
1460 | ; |
1461 | |
1462 | *d = descr; |
1463 | } |
1464 | |
1465 | /* Compare two location operands for exact equality. */ |
1466 | |
1467 | static bool |
1468 | dw_val_equal_p (dw_val_node *a, dw_val_node *b) |
1469 | { |
1470 | if (a->val_class != b->val_class) |
1471 | return false; |
1472 | switch (a->val_class) |
1473 | { |
1474 | case dw_val_class_none: |
1475 | return true; |
1476 | case dw_val_class_addr: |
1477 | return rtx_equal_p (a->v.val_addr, b->v.val_addr); |
1478 | |
1479 | case dw_val_class_offset: |
1480 | case dw_val_class_unsigned_const: |
1481 | case dw_val_class_const: |
1482 | case dw_val_class_unsigned_const_implicit: |
1483 | case dw_val_class_const_implicit: |
1484 | case dw_val_class_range_list: |
1485 | /* These are all HOST_WIDE_INT, signed or unsigned. */ |
1486 | return a->v.val_unsigned == b->v.val_unsigned; |
1487 | |
1488 | case dw_val_class_loc: |
1489 | return a->v.val_loc == b->v.val_loc; |
1490 | case dw_val_class_loc_list: |
1491 | return a->v.val_loc_list == b->v.val_loc_list; |
1492 | case dw_val_class_view_list: |
1493 | return a->v.val_view_list == b->v.val_view_list; |
1494 | case dw_val_class_die_ref: |
1495 | return a->v.val_die_ref.die == b->v.val_die_ref.die; |
1496 | case dw_val_class_fde_ref: |
1497 | return a->v.val_fde_index == b->v.val_fde_index; |
1498 | case dw_val_class_symview: |
1499 | return strcmp (s1: a->v.val_symbolic_view, s2: b->v.val_symbolic_view) == 0; |
1500 | case dw_val_class_lbl_id: |
1501 | case dw_val_class_lineptr: |
1502 | case dw_val_class_macptr: |
1503 | case dw_val_class_loclistsptr: |
1504 | case dw_val_class_high_pc: |
1505 | return strcmp (s1: a->v.val_lbl_id, s2: b->v.val_lbl_id) == 0; |
1506 | case dw_val_class_str: |
1507 | return a->v.val_str == b->v.val_str; |
1508 | case dw_val_class_flag: |
1509 | return a->v.val_flag == b->v.val_flag; |
1510 | case dw_val_class_file: |
1511 | case dw_val_class_file_implicit: |
1512 | return a->v.val_file == b->v.val_file; |
1513 | case dw_val_class_decl_ref: |
1514 | return a->v.val_decl_ref == b->v.val_decl_ref; |
1515 | |
1516 | case dw_val_class_const_double: |
1517 | return (a->v.val_double.high == b->v.val_double.high |
1518 | && a->v.val_double.low == b->v.val_double.low); |
1519 | |
1520 | case dw_val_class_wide_int: |
1521 | return *a->v.val_wide == *b->v.val_wide; |
1522 | |
1523 | case dw_val_class_vec: |
1524 | { |
1525 | size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length; |
1526 | size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length; |
1527 | |
1528 | return (a_len == b_len |
1529 | && !memcmp (s1: a->v.val_vec.array, s2: b->v.val_vec.array, n: a_len)); |
1530 | } |
1531 | |
1532 | case dw_val_class_data8: |
1533 | return memcmp (s1: a->v.val_data8, s2: b->v.val_data8, n: 8) == 0; |
1534 | |
1535 | case dw_val_class_vms_delta: |
1536 | return (!strcmp (s1: a->v.val_vms_delta.lbl1, s2: b->v.val_vms_delta.lbl1) |
1537 | && !strcmp (s1: a->v.val_vms_delta.lbl2, s2: b->v.val_vms_delta.lbl2)); |
1538 | |
1539 | case dw_val_class_discr_value: |
1540 | return (a->v.val_discr_value.pos == b->v.val_discr_value.pos |
1541 | && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval); |
1542 | case dw_val_class_discr_list: |
1543 | /* It makes no sense comparing two discriminant value lists. */ |
1544 | return false; |
1545 | } |
1546 | gcc_unreachable (); |
1547 | } |
1548 | |
1549 | /* Compare two location atoms for exact equality. */ |
1550 | |
1551 | static bool |
1552 | loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b) |
1553 | { |
1554 | if (a->dw_loc_opc != b->dw_loc_opc) |
1555 | return false; |
1556 | |
1557 | /* ??? This is only ever set for DW_OP_constNu, for N equal to the |
1558 | address size, but since we always allocate cleared storage it |
1559 | should be zero for other types of locations. */ |
1560 | if (a->dtprel != b->dtprel) |
1561 | return false; |
1562 | |
1563 | return (dw_val_equal_p (a: &a->dw_loc_oprnd1, b: &b->dw_loc_oprnd1) |
1564 | && dw_val_equal_p (a: &a->dw_loc_oprnd2, b: &b->dw_loc_oprnd2)); |
1565 | } |
1566 | |
1567 | /* Compare two complete location expressions for exact equality. */ |
1568 | |
1569 | bool |
1570 | loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b) |
1571 | { |
1572 | while (1) |
1573 | { |
1574 | if (a == b) |
1575 | return true; |
1576 | if (a == NULL || b == NULL) |
1577 | return false; |
1578 | if (!loc_descr_equal_p_1 (a, b)) |
1579 | return false; |
1580 | |
1581 | a = a->dw_loc_next; |
1582 | b = b->dw_loc_next; |
1583 | } |
1584 | } |
1585 | |
1586 | |
1587 | /* Add a constant POLY_OFFSET to a location expression. */ |
1588 | |
1589 | static void |
1590 | loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset) |
1591 | { |
1592 | dw_loc_descr_ref loc; |
1593 | HOST_WIDE_INT *p; |
1594 | |
1595 | gcc_assert (*list_head != NULL); |
1596 | |
1597 | if (known_eq (poly_offset, 0)) |
1598 | return; |
1599 | |
1600 | /* Find the end of the chain. */ |
1601 | for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next) |
1602 | ; |
1603 | |
1604 | HOST_WIDE_INT offset; |
1605 | if (!poly_offset.is_constant (const_value: &offset)) |
1606 | { |
1607 | loc->dw_loc_next = int_loc_descriptor (poly_offset); |
1608 | add_loc_descr (list_head: &loc->dw_loc_next, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
1609 | return; |
1610 | } |
1611 | |
1612 | p = NULL; |
1613 | if (loc->dw_loc_opc == DW_OP_fbreg |
1614 | || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31)) |
1615 | p = &loc->dw_loc_oprnd1.v.val_int; |
1616 | else if (loc->dw_loc_opc == DW_OP_bregx) |
1617 | p = &loc->dw_loc_oprnd2.v.val_int; |
1618 | |
1619 | /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its |
1620 | offset. Don't optimize if an signed integer overflow would happen. */ |
1621 | if (p != NULL |
1622 | && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset) |
1623 | || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset))) |
1624 | *p += offset; |
1625 | |
1626 | else if (offset > 0) |
1627 | loc->dw_loc_next = new_loc_descr (op: DW_OP_plus_uconst, oprnd1: offset, oprnd2: 0); |
1628 | |
1629 | else |
1630 | { |
1631 | loc->dw_loc_next |
1632 | = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset); |
1633 | add_loc_descr (list_head: &loc->dw_loc_next, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
1634 | } |
1635 | } |
1636 | |
1637 | /* Return a pointer to a newly allocated location description for |
1638 | REG and OFFSET. */ |
1639 | |
1640 | static inline dw_loc_descr_ref |
1641 | new_reg_loc_descr (unsigned int reg, poly_int64 offset) |
1642 | { |
1643 | HOST_WIDE_INT const_offset; |
1644 | if (offset.is_constant (const_value: &const_offset)) |
1645 | { |
1646 | if (reg <= 31) |
1647 | return new_loc_descr (op: (enum dwarf_location_atom) (DW_OP_breg0 + reg), |
1648 | oprnd1: const_offset, oprnd2: 0); |
1649 | else |
1650 | return new_loc_descr (op: DW_OP_bregx, oprnd1: reg, oprnd2: const_offset); |
1651 | } |
1652 | else |
1653 | { |
1654 | dw_loc_descr_ref ret = new_reg_loc_descr (reg, offset: 0); |
1655 | loc_descr_plus_const (list_head: &ret, poly_offset: offset); |
1656 | return ret; |
1657 | } |
1658 | } |
1659 | |
1660 | /* Add a constant OFFSET to a location list. */ |
1661 | |
1662 | static void |
1663 | loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset) |
1664 | { |
1665 | dw_loc_list_ref d; |
1666 | for (d = list_head; d != NULL; d = d->dw_loc_next) |
1667 | loc_descr_plus_const (list_head: &d->expr, poly_offset: offset); |
1668 | } |
1669 | |
1670 | #define DWARF_REF_SIZE \ |
1671 | (dwarf_version == 2 ? DWARF2_ADDR_SIZE : dwarf_offset_size) |
1672 | |
1673 | /* The number of bits that can be encoded by largest DW_FORM_dataN. |
1674 | In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5 |
1675 | DW_FORM_data16 with 128 bits. */ |
1676 | #define DWARF_LARGEST_DATA_FORM_BITS \ |
1677 | (dwarf_version >= 5 ? 128 : 64) |
1678 | |
1679 | /* Utility inline function for construction of ops that were GNU extension |
1680 | before DWARF 5. */ |
1681 | static inline enum dwarf_location_atom |
1682 | dwarf_OP (enum dwarf_location_atom op) |
1683 | { |
1684 | switch (op) |
1685 | { |
1686 | case DW_OP_implicit_pointer: |
1687 | if (dwarf_version < 5) |
1688 | return DW_OP_GNU_implicit_pointer; |
1689 | break; |
1690 | |
1691 | case DW_OP_entry_value: |
1692 | if (dwarf_version < 5) |
1693 | return DW_OP_GNU_entry_value; |
1694 | break; |
1695 | |
1696 | case DW_OP_const_type: |
1697 | if (dwarf_version < 5) |
1698 | return DW_OP_GNU_const_type; |
1699 | break; |
1700 | |
1701 | case DW_OP_regval_type: |
1702 | if (dwarf_version < 5) |
1703 | return DW_OP_GNU_regval_type; |
1704 | break; |
1705 | |
1706 | case DW_OP_deref_type: |
1707 | if (dwarf_version < 5) |
1708 | return DW_OP_GNU_deref_type; |
1709 | break; |
1710 | |
1711 | case DW_OP_convert: |
1712 | if (dwarf_version < 5) |
1713 | return DW_OP_GNU_convert; |
1714 | break; |
1715 | |
1716 | case DW_OP_reinterpret: |
1717 | if (dwarf_version < 5) |
1718 | return DW_OP_GNU_reinterpret; |
1719 | break; |
1720 | |
1721 | case DW_OP_addrx: |
1722 | if (dwarf_version < 5) |
1723 | return DW_OP_GNU_addr_index; |
1724 | break; |
1725 | |
1726 | case DW_OP_constx: |
1727 | if (dwarf_version < 5) |
1728 | return DW_OP_GNU_const_index; |
1729 | break; |
1730 | |
1731 | default: |
1732 | break; |
1733 | } |
1734 | return op; |
1735 | } |
1736 | |
1737 | /* Similarly for attributes. */ |
1738 | static inline enum dwarf_attribute |
1739 | dwarf_AT (enum dwarf_attribute at) |
1740 | { |
1741 | switch (at) |
1742 | { |
1743 | case DW_AT_call_return_pc: |
1744 | if (dwarf_version < 5) |
1745 | return DW_AT_low_pc; |
1746 | break; |
1747 | |
1748 | case DW_AT_call_tail_call: |
1749 | if (dwarf_version < 5) |
1750 | return DW_AT_GNU_tail_call; |
1751 | break; |
1752 | |
1753 | case DW_AT_call_origin: |
1754 | if (dwarf_version < 5) |
1755 | return DW_AT_abstract_origin; |
1756 | break; |
1757 | |
1758 | case DW_AT_call_target: |
1759 | if (dwarf_version < 5) |
1760 | return DW_AT_GNU_call_site_target; |
1761 | break; |
1762 | |
1763 | case DW_AT_call_target_clobbered: |
1764 | if (dwarf_version < 5) |
1765 | return DW_AT_GNU_call_site_target_clobbered; |
1766 | break; |
1767 | |
1768 | case DW_AT_call_parameter: |
1769 | if (dwarf_version < 5) |
1770 | return DW_AT_abstract_origin; |
1771 | break; |
1772 | |
1773 | case DW_AT_call_value: |
1774 | if (dwarf_version < 5) |
1775 | return DW_AT_GNU_call_site_value; |
1776 | break; |
1777 | |
1778 | case DW_AT_call_data_value: |
1779 | if (dwarf_version < 5) |
1780 | return DW_AT_GNU_call_site_data_value; |
1781 | break; |
1782 | |
1783 | case DW_AT_call_all_calls: |
1784 | if (dwarf_version < 5) |
1785 | return DW_AT_GNU_all_call_sites; |
1786 | break; |
1787 | |
1788 | case DW_AT_call_all_tail_calls: |
1789 | if (dwarf_version < 5) |
1790 | return DW_AT_GNU_all_tail_call_sites; |
1791 | break; |
1792 | |
1793 | case DW_AT_dwo_name: |
1794 | if (dwarf_version < 5) |
1795 | return DW_AT_GNU_dwo_name; |
1796 | break; |
1797 | |
1798 | case DW_AT_addr_base: |
1799 | if (dwarf_version < 5) |
1800 | return DW_AT_GNU_addr_base; |
1801 | break; |
1802 | |
1803 | default: |
1804 | break; |
1805 | } |
1806 | return at; |
1807 | } |
1808 | |
1809 | /* And similarly for tags. */ |
1810 | static inline enum dwarf_tag |
1811 | dwarf_TAG (enum dwarf_tag tag) |
1812 | { |
1813 | switch (tag) |
1814 | { |
1815 | case DW_TAG_call_site: |
1816 | if (dwarf_version < 5) |
1817 | return DW_TAG_GNU_call_site; |
1818 | break; |
1819 | |
1820 | case DW_TAG_call_site_parameter: |
1821 | if (dwarf_version < 5) |
1822 | return DW_TAG_GNU_call_site_parameter; |
1823 | break; |
1824 | |
1825 | default: |
1826 | break; |
1827 | } |
1828 | return tag; |
1829 | } |
1830 | |
1831 | /* And similarly for forms. */ |
1832 | static inline enum dwarf_form |
1833 | dwarf_FORM (enum dwarf_form form) |
1834 | { |
1835 | switch (form) |
1836 | { |
1837 | case DW_FORM_addrx: |
1838 | if (dwarf_version < 5) |
1839 | return DW_FORM_GNU_addr_index; |
1840 | break; |
1841 | |
1842 | case DW_FORM_strx: |
1843 | if (dwarf_version < 5) |
1844 | return DW_FORM_GNU_str_index; |
1845 | break; |
1846 | |
1847 | default: |
1848 | break; |
1849 | } |
1850 | return form; |
1851 | } |
1852 | |
1853 | static unsigned long int get_base_type_offset (dw_die_ref); |
1854 | |
1855 | /* Return the size of a location descriptor. */ |
1856 | |
1857 | static unsigned long |
1858 | size_of_loc_descr (dw_loc_descr_ref loc) |
1859 | { |
1860 | unsigned long size = 1; |
1861 | |
1862 | switch (loc->dw_loc_opc) |
1863 | { |
1864 | case DW_OP_addr: |
1865 | size += DWARF2_ADDR_SIZE; |
1866 | break; |
1867 | case DW_OP_GNU_addr_index: |
1868 | case DW_OP_addrx: |
1869 | case DW_OP_GNU_const_index: |
1870 | case DW_OP_constx: |
1871 | gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED); |
1872 | size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index); |
1873 | break; |
1874 | case DW_OP_const1u: |
1875 | case DW_OP_const1s: |
1876 | size += 1; |
1877 | break; |
1878 | case DW_OP_const2u: |
1879 | case DW_OP_const2s: |
1880 | size += 2; |
1881 | break; |
1882 | case DW_OP_const4u: |
1883 | case DW_OP_const4s: |
1884 | size += 4; |
1885 | break; |
1886 | case DW_OP_const8u: |
1887 | case DW_OP_const8s: |
1888 | size += 8; |
1889 | break; |
1890 | case DW_OP_constu: |
1891 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1892 | break; |
1893 | case DW_OP_consts: |
1894 | size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int); |
1895 | break; |
1896 | case DW_OP_pick: |
1897 | size += 1; |
1898 | break; |
1899 | case DW_OP_plus_uconst: |
1900 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1901 | break; |
1902 | case DW_OP_skip: |
1903 | case DW_OP_bra: |
1904 | size += 2; |
1905 | break; |
1906 | case DW_OP_breg0: |
1907 | case DW_OP_breg1: |
1908 | case DW_OP_breg2: |
1909 | case DW_OP_breg3: |
1910 | case DW_OP_breg4: |
1911 | case DW_OP_breg5: |
1912 | case DW_OP_breg6: |
1913 | case DW_OP_breg7: |
1914 | case DW_OP_breg8: |
1915 | case DW_OP_breg9: |
1916 | case DW_OP_breg10: |
1917 | case DW_OP_breg11: |
1918 | case DW_OP_breg12: |
1919 | case DW_OP_breg13: |
1920 | case DW_OP_breg14: |
1921 | case DW_OP_breg15: |
1922 | case DW_OP_breg16: |
1923 | case DW_OP_breg17: |
1924 | case DW_OP_breg18: |
1925 | case DW_OP_breg19: |
1926 | case DW_OP_breg20: |
1927 | case DW_OP_breg21: |
1928 | case DW_OP_breg22: |
1929 | case DW_OP_breg23: |
1930 | case DW_OP_breg24: |
1931 | case DW_OP_breg25: |
1932 | case DW_OP_breg26: |
1933 | case DW_OP_breg27: |
1934 | case DW_OP_breg28: |
1935 | case DW_OP_breg29: |
1936 | case DW_OP_breg30: |
1937 | case DW_OP_breg31: |
1938 | size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int); |
1939 | break; |
1940 | case DW_OP_regx: |
1941 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1942 | break; |
1943 | case DW_OP_fbreg: |
1944 | size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int); |
1945 | break; |
1946 | case DW_OP_bregx: |
1947 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1948 | size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int); |
1949 | break; |
1950 | case DW_OP_piece: |
1951 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1952 | break; |
1953 | case DW_OP_bit_piece: |
1954 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1955 | size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned); |
1956 | break; |
1957 | case DW_OP_deref_size: |
1958 | case DW_OP_xderef_size: |
1959 | size += 1; |
1960 | break; |
1961 | case DW_OP_call2: |
1962 | size += 2; |
1963 | break; |
1964 | case DW_OP_call4: |
1965 | size += 4; |
1966 | break; |
1967 | case DW_OP_call_ref: |
1968 | case DW_OP_GNU_variable_value: |
1969 | size += DWARF_REF_SIZE; |
1970 | break; |
1971 | case DW_OP_implicit_value: |
1972 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned) |
1973 | + loc->dw_loc_oprnd1.v.val_unsigned; |
1974 | break; |
1975 | case DW_OP_implicit_pointer: |
1976 | case DW_OP_GNU_implicit_pointer: |
1977 | size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int); |
1978 | break; |
1979 | case DW_OP_entry_value: |
1980 | case DW_OP_GNU_entry_value: |
1981 | { |
1982 | unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc); |
1983 | size += size_of_uleb128 (op_size) + op_size; |
1984 | break; |
1985 | } |
1986 | case DW_OP_const_type: |
1987 | case DW_OP_GNU_const_type: |
1988 | { |
1989 | unsigned long o |
1990 | = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die); |
1991 | size += size_of_uleb128 (o) + 1; |
1992 | switch (loc->dw_loc_oprnd2.val_class) |
1993 | { |
1994 | case dw_val_class_vec: |
1995 | size += loc->dw_loc_oprnd2.v.val_vec.length |
1996 | * loc->dw_loc_oprnd2.v.val_vec.elt_size; |
1997 | break; |
1998 | case dw_val_class_const: |
1999 | size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT; |
2000 | break; |
2001 | case dw_val_class_const_double: |
2002 | size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT; |
2003 | break; |
2004 | case dw_val_class_wide_int: |
2005 | size += (get_full_len (op: *loc->dw_loc_oprnd2.v.val_wide) |
2006 | * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT); |
2007 | break; |
2008 | default: |
2009 | gcc_unreachable (); |
2010 | } |
2011 | break; |
2012 | } |
2013 | case DW_OP_regval_type: |
2014 | case DW_OP_GNU_regval_type: |
2015 | { |
2016 | unsigned long o |
2017 | = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die); |
2018 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned) |
2019 | + size_of_uleb128 (o); |
2020 | } |
2021 | break; |
2022 | case DW_OP_deref_type: |
2023 | case DW_OP_GNU_deref_type: |
2024 | { |
2025 | unsigned long o |
2026 | = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die); |
2027 | size += 1 + size_of_uleb128 (o); |
2028 | } |
2029 | break; |
2030 | case DW_OP_convert: |
2031 | case DW_OP_reinterpret: |
2032 | case DW_OP_GNU_convert: |
2033 | case DW_OP_GNU_reinterpret: |
2034 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const) |
2035 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
2036 | else |
2037 | { |
2038 | unsigned long o |
2039 | = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die); |
2040 | size += size_of_uleb128 (o); |
2041 | } |
2042 | break; |
2043 | case DW_OP_GNU_parameter_ref: |
2044 | size += 4; |
2045 | break; |
2046 | default: |
2047 | break; |
2048 | } |
2049 | |
2050 | return size; |
2051 | } |
2052 | |
2053 | /* Return the size of a series of location descriptors. */ |
2054 | |
2055 | unsigned long |
2056 | size_of_locs (dw_loc_descr_ref loc) |
2057 | { |
2058 | dw_loc_descr_ref l; |
2059 | unsigned long size; |
2060 | |
2061 | /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr |
2062 | field, to avoid writing to a PCH file. */ |
2063 | for (size = 0, l = loc; l != NULL; l = l->dw_loc_next) |
2064 | { |
2065 | if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra) |
2066 | break; |
2067 | size += size_of_loc_descr (loc: l); |
2068 | } |
2069 | if (! l) |
2070 | return size; |
2071 | |
2072 | for (size = 0, l = loc; l != NULL; l = l->dw_loc_next) |
2073 | { |
2074 | l->dw_loc_addr = size; |
2075 | size += size_of_loc_descr (loc: l); |
2076 | } |
2077 | |
2078 | return size; |
2079 | } |
2080 | |
2081 | /* Return the size of the value in a DW_AT_discr_value attribute. */ |
2082 | |
2083 | static int |
2084 | size_of_discr_value (dw_discr_value *discr_value) |
2085 | { |
2086 | if (discr_value->pos) |
2087 | return size_of_uleb128 (discr_value->v.uval); |
2088 | else |
2089 | return size_of_sleb128 (discr_value->v.sval); |
2090 | } |
2091 | |
2092 | /* Return the size of the value in a DW_AT_discr_list attribute. */ |
2093 | |
2094 | static int |
2095 | size_of_discr_list (dw_discr_list_ref discr_list) |
2096 | { |
2097 | int size = 0; |
2098 | |
2099 | for (dw_discr_list_ref list = discr_list; |
2100 | list != NULL; |
2101 | list = list->dw_discr_next) |
2102 | { |
2103 | /* One byte for the discriminant value descriptor, and then one or two |
2104 | LEB128 numbers, depending on whether it's a single case label or a |
2105 | range label. */ |
2106 | size += 1; |
2107 | size += size_of_discr_value (discr_value: &list->dw_discr_lower_bound); |
2108 | if (list->dw_discr_range != 0) |
2109 | size += size_of_discr_value (discr_value: &list->dw_discr_upper_bound); |
2110 | } |
2111 | return size; |
2112 | } |
2113 | |
2114 | static HOST_WIDE_INT extract_int (const unsigned char *, unsigned); |
2115 | static void get_ref_die_offset_label (char *, dw_die_ref); |
2116 | static unsigned long int get_ref_die_offset (dw_die_ref); |
2117 | |
2118 | /* Output location description stack opcode's operands (if any). |
2119 | The for_eh_or_skip parameter controls whether register numbers are |
2120 | converted using DWARF2_FRAME_REG_OUT, which is needed in the case that |
2121 | hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind |
2122 | info). This should be suppressed for the cases that have not been converted |
2123 | (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */ |
2124 | |
2125 | static void |
2126 | output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip) |
2127 | { |
2128 | dw_val_ref val1 = &loc->dw_loc_oprnd1; |
2129 | dw_val_ref val2 = &loc->dw_loc_oprnd2; |
2130 | |
2131 | switch (loc->dw_loc_opc) |
2132 | { |
2133 | #ifdef DWARF2_DEBUGGING_INFO |
2134 | case DW_OP_const2u: |
2135 | case DW_OP_const2s: |
2136 | dw2_asm_output_data (2, val1->v.val_int, NULL); |
2137 | break; |
2138 | case DW_OP_const4u: |
2139 | if (loc->dtprel) |
2140 | { |
2141 | gcc_assert (targetm.asm_out.output_dwarf_dtprel); |
2142 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4, |
2143 | val1->v.val_addr); |
2144 | fputc (c: '\n', stream: asm_out_file); |
2145 | break; |
2146 | } |
2147 | /* FALLTHRU */ |
2148 | case DW_OP_const4s: |
2149 | dw2_asm_output_data (4, val1->v.val_int, NULL); |
2150 | break; |
2151 | case DW_OP_const8u: |
2152 | if (loc->dtprel) |
2153 | { |
2154 | gcc_assert (targetm.asm_out.output_dwarf_dtprel); |
2155 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8, |
2156 | val1->v.val_addr); |
2157 | fputc (c: '\n', stream: asm_out_file); |
2158 | break; |
2159 | } |
2160 | /* FALLTHRU */ |
2161 | case DW_OP_const8s: |
2162 | gcc_assert (HOST_BITS_PER_WIDE_INT >= 64); |
2163 | dw2_asm_output_data (8, val1->v.val_int, NULL); |
2164 | break; |
2165 | case DW_OP_skip: |
2166 | case DW_OP_bra: |
2167 | { |
2168 | int offset; |
2169 | |
2170 | gcc_assert (val1->val_class == dw_val_class_loc); |
2171 | offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3); |
2172 | |
2173 | dw2_asm_output_data (2, offset, NULL); |
2174 | } |
2175 | break; |
2176 | case DW_OP_implicit_value: |
2177 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2178 | switch (val2->val_class) |
2179 | { |
2180 | case dw_val_class_const: |
2181 | dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL); |
2182 | break; |
2183 | case dw_val_class_vec: |
2184 | { |
2185 | unsigned int elt_size = val2->v.val_vec.elt_size; |
2186 | unsigned int len = val2->v.val_vec.length; |
2187 | unsigned int i; |
2188 | unsigned char *p; |
2189 | |
2190 | if (elt_size > sizeof (HOST_WIDE_INT)) |
2191 | { |
2192 | elt_size /= 2; |
2193 | len *= 2; |
2194 | } |
2195 | for (i = 0, p = (unsigned char *) val2->v.val_vec.array; |
2196 | i < len; |
2197 | i++, p += elt_size) |
2198 | dw2_asm_output_data (elt_size, extract_int (p, elt_size), |
2199 | "fp or vector constant word %u" , i); |
2200 | } |
2201 | break; |
2202 | case dw_val_class_const_double: |
2203 | { |
2204 | unsigned HOST_WIDE_INT first, second; |
2205 | |
2206 | if (WORDS_BIG_ENDIAN) |
2207 | { |
2208 | first = val2->v.val_double.high; |
2209 | second = val2->v.val_double.low; |
2210 | } |
2211 | else |
2212 | { |
2213 | first = val2->v.val_double.low; |
2214 | second = val2->v.val_double.high; |
2215 | } |
2216 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
2217 | first, NULL); |
2218 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
2219 | second, NULL); |
2220 | } |
2221 | break; |
2222 | case dw_val_class_wide_int: |
2223 | { |
2224 | int i; |
2225 | int len = get_full_len (op: *val2->v.val_wide); |
2226 | if (WORDS_BIG_ENDIAN) |
2227 | for (i = len - 1; i >= 0; --i) |
2228 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
2229 | val2->v.val_wide->elt (i), NULL); |
2230 | else |
2231 | for (i = 0; i < len; ++i) |
2232 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
2233 | val2->v.val_wide->elt (i), NULL); |
2234 | } |
2235 | break; |
2236 | case dw_val_class_addr: |
2237 | gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE); |
2238 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL); |
2239 | break; |
2240 | default: |
2241 | gcc_unreachable (); |
2242 | } |
2243 | break; |
2244 | #else |
2245 | case DW_OP_const2u: |
2246 | case DW_OP_const2s: |
2247 | case DW_OP_const4u: |
2248 | case DW_OP_const4s: |
2249 | case DW_OP_const8u: |
2250 | case DW_OP_const8s: |
2251 | case DW_OP_skip: |
2252 | case DW_OP_bra: |
2253 | case DW_OP_implicit_value: |
2254 | /* We currently don't make any attempt to make sure these are |
2255 | aligned properly like we do for the main unwind info, so |
2256 | don't support emitting things larger than a byte if we're |
2257 | only doing unwinding. */ |
2258 | gcc_unreachable (); |
2259 | #endif |
2260 | case DW_OP_const1u: |
2261 | case DW_OP_const1s: |
2262 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
2263 | break; |
2264 | case DW_OP_constu: |
2265 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2266 | break; |
2267 | case DW_OP_consts: |
2268 | dw2_asm_output_data_sleb128 (val1->v.val_int, NULL); |
2269 | break; |
2270 | case DW_OP_pick: |
2271 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
2272 | break; |
2273 | case DW_OP_plus_uconst: |
2274 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2275 | break; |
2276 | case DW_OP_breg0: |
2277 | case DW_OP_breg1: |
2278 | case DW_OP_breg2: |
2279 | case DW_OP_breg3: |
2280 | case DW_OP_breg4: |
2281 | case DW_OP_breg5: |
2282 | case DW_OP_breg6: |
2283 | case DW_OP_breg7: |
2284 | case DW_OP_breg8: |
2285 | case DW_OP_breg9: |
2286 | case DW_OP_breg10: |
2287 | case DW_OP_breg11: |
2288 | case DW_OP_breg12: |
2289 | case DW_OP_breg13: |
2290 | case DW_OP_breg14: |
2291 | case DW_OP_breg15: |
2292 | case DW_OP_breg16: |
2293 | case DW_OP_breg17: |
2294 | case DW_OP_breg18: |
2295 | case DW_OP_breg19: |
2296 | case DW_OP_breg20: |
2297 | case DW_OP_breg21: |
2298 | case DW_OP_breg22: |
2299 | case DW_OP_breg23: |
2300 | case DW_OP_breg24: |
2301 | case DW_OP_breg25: |
2302 | case DW_OP_breg26: |
2303 | case DW_OP_breg27: |
2304 | case DW_OP_breg28: |
2305 | case DW_OP_breg29: |
2306 | case DW_OP_breg30: |
2307 | case DW_OP_breg31: |
2308 | dw2_asm_output_data_sleb128 (val1->v.val_int, NULL); |
2309 | break; |
2310 | case DW_OP_regx: |
2311 | { |
2312 | unsigned r = val1->v.val_unsigned; |
2313 | if (for_eh_or_skip >= 0) |
2314 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
2315 | gcc_assert (size_of_uleb128 (r) |
2316 | == size_of_uleb128 (val1->v.val_unsigned)); |
2317 | dw2_asm_output_data_uleb128 (r, NULL); |
2318 | } |
2319 | break; |
2320 | case DW_OP_fbreg: |
2321 | dw2_asm_output_data_sleb128 (val1->v.val_int, NULL); |
2322 | break; |
2323 | case DW_OP_bregx: |
2324 | { |
2325 | unsigned r = val1->v.val_unsigned; |
2326 | if (for_eh_or_skip >= 0) |
2327 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
2328 | gcc_assert (size_of_uleb128 (r) |
2329 | == size_of_uleb128 (val1->v.val_unsigned)); |
2330 | dw2_asm_output_data_uleb128 (r, NULL); |
2331 | dw2_asm_output_data_sleb128 (val2->v.val_int, NULL); |
2332 | } |
2333 | break; |
2334 | case DW_OP_piece: |
2335 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2336 | break; |
2337 | case DW_OP_bit_piece: |
2338 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2339 | dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL); |
2340 | break; |
2341 | case DW_OP_deref_size: |
2342 | case DW_OP_xderef_size: |
2343 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
2344 | break; |
2345 | |
2346 | case DW_OP_addr: |
2347 | if (loc->dtprel) |
2348 | { |
2349 | if (targetm.asm_out.output_dwarf_dtprel) |
2350 | { |
2351 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, |
2352 | DWARF2_ADDR_SIZE, |
2353 | val1->v.val_addr); |
2354 | fputc (c: '\n', stream: asm_out_file); |
2355 | } |
2356 | else |
2357 | gcc_unreachable (); |
2358 | } |
2359 | else |
2360 | { |
2361 | #ifdef DWARF2_DEBUGGING_INFO |
2362 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL); |
2363 | #else |
2364 | gcc_unreachable (); |
2365 | #endif |
2366 | } |
2367 | break; |
2368 | |
2369 | case DW_OP_GNU_addr_index: |
2370 | case DW_OP_addrx: |
2371 | case DW_OP_GNU_const_index: |
2372 | case DW_OP_constx: |
2373 | gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED); |
2374 | dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index, |
2375 | "(index into .debug_addr)" ); |
2376 | break; |
2377 | |
2378 | case DW_OP_call2: |
2379 | case DW_OP_call4: |
2380 | { |
2381 | unsigned long die_offset |
2382 | = get_ref_die_offset (val1->v.val_die_ref.die); |
2383 | /* Make sure the offset has been computed and that we can encode it as |
2384 | an operand. */ |
2385 | gcc_assert (die_offset > 0 |
2386 | && die_offset <= (loc->dw_loc_opc == DW_OP_call2 |
2387 | ? 0xffff |
2388 | : 0xffffffff)); |
2389 | dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4, |
2390 | die_offset, NULL); |
2391 | } |
2392 | break; |
2393 | |
2394 | case DW_OP_call_ref: |
2395 | case DW_OP_GNU_variable_value: |
2396 | { |
2397 | char label[MAX_ARTIFICIAL_LABEL_BYTES |
2398 | + HOST_BITS_PER_WIDE_INT / 2 + 2]; |
2399 | gcc_assert (val1->val_class == dw_val_class_die_ref); |
2400 | get_ref_die_offset_label (label, val1->v.val_die_ref.die); |
2401 | dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL); |
2402 | } |
2403 | break; |
2404 | |
2405 | case DW_OP_implicit_pointer: |
2406 | case DW_OP_GNU_implicit_pointer: |
2407 | { |
2408 | char label[MAX_ARTIFICIAL_LABEL_BYTES |
2409 | + HOST_BITS_PER_WIDE_INT / 2 + 2]; |
2410 | gcc_assert (val1->val_class == dw_val_class_die_ref); |
2411 | get_ref_die_offset_label (label, val1->v.val_die_ref.die); |
2412 | dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL); |
2413 | dw2_asm_output_data_sleb128 (val2->v.val_int, NULL); |
2414 | } |
2415 | break; |
2416 | |
2417 | case DW_OP_entry_value: |
2418 | case DW_OP_GNU_entry_value: |
2419 | dw2_asm_output_data_uleb128 (size_of_locs (loc: val1->v.val_loc), NULL); |
2420 | output_loc_sequence (val1->v.val_loc, for_eh_or_skip); |
2421 | break; |
2422 | |
2423 | case DW_OP_const_type: |
2424 | case DW_OP_GNU_const_type: |
2425 | { |
2426 | unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l; |
2427 | gcc_assert (o); |
2428 | dw2_asm_output_data_uleb128 (o, NULL); |
2429 | switch (val2->val_class) |
2430 | { |
2431 | case dw_val_class_const: |
2432 | l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
2433 | dw2_asm_output_data (1, l, NULL); |
2434 | dw2_asm_output_data (l, val2->v.val_int, NULL); |
2435 | break; |
2436 | case dw_val_class_vec: |
2437 | { |
2438 | unsigned int elt_size = val2->v.val_vec.elt_size; |
2439 | unsigned int len = val2->v.val_vec.length; |
2440 | unsigned int i; |
2441 | unsigned char *p; |
2442 | |
2443 | l = len * elt_size; |
2444 | dw2_asm_output_data (1, l, NULL); |
2445 | if (elt_size > sizeof (HOST_WIDE_INT)) |
2446 | { |
2447 | elt_size /= 2; |
2448 | len *= 2; |
2449 | } |
2450 | for (i = 0, p = (unsigned char *) val2->v.val_vec.array; |
2451 | i < len; |
2452 | i++, p += elt_size) |
2453 | dw2_asm_output_data (elt_size, extract_int (p, elt_size), |
2454 | "fp or vector constant word %u" , i); |
2455 | } |
2456 | break; |
2457 | case dw_val_class_const_double: |
2458 | { |
2459 | unsigned HOST_WIDE_INT first, second; |
2460 | l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
2461 | |
2462 | dw2_asm_output_data (1, 2 * l, NULL); |
2463 | if (WORDS_BIG_ENDIAN) |
2464 | { |
2465 | first = val2->v.val_double.high; |
2466 | second = val2->v.val_double.low; |
2467 | } |
2468 | else |
2469 | { |
2470 | first = val2->v.val_double.low; |
2471 | second = val2->v.val_double.high; |
2472 | } |
2473 | dw2_asm_output_data (l, first, NULL); |
2474 | dw2_asm_output_data (l, second, NULL); |
2475 | } |
2476 | break; |
2477 | case dw_val_class_wide_int: |
2478 | { |
2479 | int i; |
2480 | int len = get_full_len (op: *val2->v.val_wide); |
2481 | l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
2482 | |
2483 | dw2_asm_output_data (1, len * l, NULL); |
2484 | if (WORDS_BIG_ENDIAN) |
2485 | for (i = len - 1; i >= 0; --i) |
2486 | dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL); |
2487 | else |
2488 | for (i = 0; i < len; ++i) |
2489 | dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL); |
2490 | } |
2491 | break; |
2492 | default: |
2493 | gcc_unreachable (); |
2494 | } |
2495 | } |
2496 | break; |
2497 | case DW_OP_regval_type: |
2498 | case DW_OP_GNU_regval_type: |
2499 | { |
2500 | unsigned r = val1->v.val_unsigned; |
2501 | unsigned long o = get_base_type_offset (val2->v.val_die_ref.die); |
2502 | gcc_assert (o); |
2503 | if (for_eh_or_skip >= 0) |
2504 | { |
2505 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
2506 | gcc_assert (size_of_uleb128 (r) |
2507 | == size_of_uleb128 (val1->v.val_unsigned)); |
2508 | } |
2509 | dw2_asm_output_data_uleb128 (r, NULL); |
2510 | dw2_asm_output_data_uleb128 (o, NULL); |
2511 | } |
2512 | break; |
2513 | case DW_OP_deref_type: |
2514 | case DW_OP_GNU_deref_type: |
2515 | { |
2516 | unsigned long o = get_base_type_offset (val2->v.val_die_ref.die); |
2517 | gcc_assert (o); |
2518 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
2519 | dw2_asm_output_data_uleb128 (o, NULL); |
2520 | } |
2521 | break; |
2522 | case DW_OP_convert: |
2523 | case DW_OP_reinterpret: |
2524 | case DW_OP_GNU_convert: |
2525 | case DW_OP_GNU_reinterpret: |
2526 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const) |
2527 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2528 | else |
2529 | { |
2530 | unsigned long o = get_base_type_offset (val1->v.val_die_ref.die); |
2531 | gcc_assert (o); |
2532 | dw2_asm_output_data_uleb128 (o, NULL); |
2533 | } |
2534 | break; |
2535 | |
2536 | case DW_OP_GNU_parameter_ref: |
2537 | { |
2538 | unsigned long o; |
2539 | gcc_assert (val1->val_class == dw_val_class_die_ref); |
2540 | o = get_ref_die_offset (val1->v.val_die_ref.die); |
2541 | dw2_asm_output_data (4, o, NULL); |
2542 | } |
2543 | break; |
2544 | |
2545 | default: |
2546 | /* Other codes have no operands. */ |
2547 | break; |
2548 | } |
2549 | } |
2550 | |
2551 | /* Output a sequence of location operations. |
2552 | The for_eh_or_skip parameter controls whether register numbers are |
2553 | converted using DWARF2_FRAME_REG_OUT, which is needed in the case that |
2554 | hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind |
2555 | info). This should be suppressed for the cases that have not been converted |
2556 | (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */ |
2557 | |
2558 | void |
2559 | output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip) |
2560 | { |
2561 | for (; loc != NULL; loc = loc->dw_loc_next) |
2562 | { |
2563 | enum dwarf_location_atom opc = loc->dw_loc_opc; |
2564 | /* Output the opcode. */ |
2565 | if (for_eh_or_skip >= 0 |
2566 | && opc >= DW_OP_breg0 && opc <= DW_OP_breg31) |
2567 | { |
2568 | unsigned r = (opc - DW_OP_breg0); |
2569 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
2570 | gcc_assert (r <= 31); |
2571 | opc = (enum dwarf_location_atom) (DW_OP_breg0 + r); |
2572 | } |
2573 | else if (for_eh_or_skip >= 0 |
2574 | && opc >= DW_OP_reg0 && opc <= DW_OP_reg31) |
2575 | { |
2576 | unsigned r = (opc - DW_OP_reg0); |
2577 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
2578 | gcc_assert (r <= 31); |
2579 | opc = (enum dwarf_location_atom) (DW_OP_reg0 + r); |
2580 | } |
2581 | |
2582 | dw2_asm_output_data (1, opc, |
2583 | "%s" , dwarf_stack_op_name (op: opc)); |
2584 | |
2585 | /* Output the operand(s) (if any). */ |
2586 | output_loc_operands (loc, for_eh_or_skip); |
2587 | } |
2588 | } |
2589 | |
2590 | /* Output location description stack opcode's operands (if any). |
2591 | The output is single bytes on a line, suitable for .cfi_escape. */ |
2592 | |
2593 | static void |
2594 | output_loc_operands_raw (dw_loc_descr_ref loc) |
2595 | { |
2596 | dw_val_ref val1 = &loc->dw_loc_oprnd1; |
2597 | dw_val_ref val2 = &loc->dw_loc_oprnd2; |
2598 | |
2599 | switch (loc->dw_loc_opc) |
2600 | { |
2601 | case DW_OP_addr: |
2602 | case DW_OP_GNU_addr_index: |
2603 | case DW_OP_addrx: |
2604 | case DW_OP_GNU_const_index: |
2605 | case DW_OP_constx: |
2606 | case DW_OP_implicit_value: |
2607 | /* We cannot output addresses in .cfi_escape, only bytes. */ |
2608 | gcc_unreachable (); |
2609 | |
2610 | case DW_OP_const1u: |
2611 | case DW_OP_const1s: |
2612 | case DW_OP_pick: |
2613 | case DW_OP_deref_size: |
2614 | case DW_OP_xderef_size: |
2615 | fputc (c: ',', stream: asm_out_file); |
2616 | dw2_asm_output_data_raw (1, val1->v.val_int); |
2617 | break; |
2618 | |
2619 | case DW_OP_const2u: |
2620 | case DW_OP_const2s: |
2621 | fputc (c: ',', stream: asm_out_file); |
2622 | dw2_asm_output_data_raw (2, val1->v.val_int); |
2623 | break; |
2624 | |
2625 | case DW_OP_const4u: |
2626 | case DW_OP_const4s: |
2627 | fputc (c: ',', stream: asm_out_file); |
2628 | dw2_asm_output_data_raw (4, val1->v.val_int); |
2629 | break; |
2630 | |
2631 | case DW_OP_const8u: |
2632 | case DW_OP_const8s: |
2633 | gcc_assert (HOST_BITS_PER_WIDE_INT >= 64); |
2634 | fputc (c: ',', stream: asm_out_file); |
2635 | dw2_asm_output_data_raw (8, val1->v.val_int); |
2636 | break; |
2637 | |
2638 | case DW_OP_skip: |
2639 | case DW_OP_bra: |
2640 | { |
2641 | int offset; |
2642 | |
2643 | gcc_assert (val1->val_class == dw_val_class_loc); |
2644 | offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3); |
2645 | |
2646 | fputc (c: ',', stream: asm_out_file); |
2647 | dw2_asm_output_data_raw (2, offset); |
2648 | } |
2649 | break; |
2650 | |
2651 | case DW_OP_regx: |
2652 | { |
2653 | unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1); |
2654 | gcc_assert (size_of_uleb128 (r) |
2655 | == size_of_uleb128 (val1->v.val_unsigned)); |
2656 | fputc (c: ',', stream: asm_out_file); |
2657 | dw2_asm_output_data_uleb128_raw (r); |
2658 | } |
2659 | break; |
2660 | |
2661 | case DW_OP_constu: |
2662 | case DW_OP_plus_uconst: |
2663 | case DW_OP_piece: |
2664 | fputc (c: ',', stream: asm_out_file); |
2665 | dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned); |
2666 | break; |
2667 | |
2668 | case DW_OP_bit_piece: |
2669 | fputc (c: ',', stream: asm_out_file); |
2670 | dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned); |
2671 | dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned); |
2672 | break; |
2673 | |
2674 | case DW_OP_consts: |
2675 | case DW_OP_breg0: |
2676 | case DW_OP_breg1: |
2677 | case DW_OP_breg2: |
2678 | case DW_OP_breg3: |
2679 | case DW_OP_breg4: |
2680 | case DW_OP_breg5: |
2681 | case DW_OP_breg6: |
2682 | case DW_OP_breg7: |
2683 | case DW_OP_breg8: |
2684 | case DW_OP_breg9: |
2685 | case DW_OP_breg10: |
2686 | case DW_OP_breg11: |
2687 | case DW_OP_breg12: |
2688 | case DW_OP_breg13: |
2689 | case DW_OP_breg14: |
2690 | case DW_OP_breg15: |
2691 | case DW_OP_breg16: |
2692 | case DW_OP_breg17: |
2693 | case DW_OP_breg18: |
2694 | case DW_OP_breg19: |
2695 | case DW_OP_breg20: |
2696 | case DW_OP_breg21: |
2697 | case DW_OP_breg22: |
2698 | case DW_OP_breg23: |
2699 | case DW_OP_breg24: |
2700 | case DW_OP_breg25: |
2701 | case DW_OP_breg26: |
2702 | case DW_OP_breg27: |
2703 | case DW_OP_breg28: |
2704 | case DW_OP_breg29: |
2705 | case DW_OP_breg30: |
2706 | case DW_OP_breg31: |
2707 | case DW_OP_fbreg: |
2708 | fputc (c: ',', stream: asm_out_file); |
2709 | dw2_asm_output_data_sleb128_raw (val1->v.val_int); |
2710 | break; |
2711 | |
2712 | case DW_OP_bregx: |
2713 | { |
2714 | unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1); |
2715 | gcc_assert (size_of_uleb128 (r) |
2716 | == size_of_uleb128 (val1->v.val_unsigned)); |
2717 | fputc (c: ',', stream: asm_out_file); |
2718 | dw2_asm_output_data_uleb128_raw (r); |
2719 | fputc (c: ',', stream: asm_out_file); |
2720 | dw2_asm_output_data_sleb128_raw (val2->v.val_int); |
2721 | } |
2722 | break; |
2723 | |
2724 | case DW_OP_implicit_pointer: |
2725 | case DW_OP_entry_value: |
2726 | case DW_OP_const_type: |
2727 | case DW_OP_regval_type: |
2728 | case DW_OP_deref_type: |
2729 | case DW_OP_convert: |
2730 | case DW_OP_reinterpret: |
2731 | case DW_OP_GNU_implicit_pointer: |
2732 | case DW_OP_GNU_entry_value: |
2733 | case DW_OP_GNU_const_type: |
2734 | case DW_OP_GNU_regval_type: |
2735 | case DW_OP_GNU_deref_type: |
2736 | case DW_OP_GNU_convert: |
2737 | case DW_OP_GNU_reinterpret: |
2738 | case DW_OP_GNU_parameter_ref: |
2739 | gcc_unreachable (); |
2740 | break; |
2741 | |
2742 | default: |
2743 | /* Other codes have no operands. */ |
2744 | break; |
2745 | } |
2746 | } |
2747 | |
2748 | void |
2749 | output_loc_sequence_raw (dw_loc_descr_ref loc) |
2750 | { |
2751 | while (1) |
2752 | { |
2753 | enum dwarf_location_atom opc = loc->dw_loc_opc; |
2754 | /* Output the opcode. */ |
2755 | if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31) |
2756 | { |
2757 | unsigned r = (opc - DW_OP_breg0); |
2758 | r = DWARF2_FRAME_REG_OUT (r, 1); |
2759 | gcc_assert (r <= 31); |
2760 | opc = (enum dwarf_location_atom) (DW_OP_breg0 + r); |
2761 | } |
2762 | else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31) |
2763 | { |
2764 | unsigned r = (opc - DW_OP_reg0); |
2765 | r = DWARF2_FRAME_REG_OUT (r, 1); |
2766 | gcc_assert (r <= 31); |
2767 | opc = (enum dwarf_location_atom) (DW_OP_reg0 + r); |
2768 | } |
2769 | /* Output the opcode. */ |
2770 | fprintf (stream: asm_out_file, format: "%#x" , opc); |
2771 | output_loc_operands_raw (loc); |
2772 | |
2773 | if (!loc->dw_loc_next) |
2774 | break; |
2775 | loc = loc->dw_loc_next; |
2776 | |
2777 | fputc (c: ',', stream: asm_out_file); |
2778 | } |
2779 | } |
2780 | |
2781 | static void |
2782 | build_breg_loc (struct dw_loc_descr_node **head, unsigned int regno) |
2783 | { |
2784 | if (regno <= 31) |
2785 | add_loc_descr (list_head: head, descr: new_loc_descr (op: (enum dwarf_location_atom) |
2786 | (DW_OP_breg0 + regno), oprnd1: 0, oprnd2: 0)); |
2787 | else |
2788 | add_loc_descr (list_head: head, descr: new_loc_descr (op: DW_OP_bregx, oprnd1: regno, oprnd2: 0)); |
2789 | } |
2790 | |
2791 | /* Build a dwarf location for a cfa_reg spanning multiple |
2792 | consecutive registers. */ |
2793 | |
2794 | struct dw_loc_descr_node * |
2795 | build_span_loc (struct cfa_reg reg) |
2796 | { |
2797 | struct dw_loc_descr_node *head = NULL; |
2798 | |
2799 | gcc_assert (reg.span_width > 0); |
2800 | gcc_assert (reg.span > 1); |
2801 | |
2802 | /* Start from the highest number register as it goes in the upper bits. */ |
2803 | unsigned int regno = reg.reg + reg.span - 1; |
2804 | build_breg_loc (head: &head, regno); |
2805 | |
2806 | /* Deal with the remaining registers in the span. */ |
2807 | for (int i = reg.span - 2; i >= 0; i--) |
2808 | { |
2809 | add_loc_descr (list_head: &head, descr: int_loc_descriptor (reg.span_width * 8)); |
2810 | add_loc_descr (list_head: &head, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
2811 | regno--; |
2812 | build_breg_loc (head: &head, regno); |
2813 | add_loc_descr (list_head: &head, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
2814 | } |
2815 | return head; |
2816 | } |
2817 | |
2818 | /* This function builds a dwarf location descriptor sequence from a |
2819 | dw_cfa_location, adding the given OFFSET to the result of the |
2820 | expression. */ |
2821 | |
2822 | struct dw_loc_descr_node * |
2823 | build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset) |
2824 | { |
2825 | struct dw_loc_descr_node *head, *tmp; |
2826 | |
2827 | offset += cfa->offset; |
2828 | |
2829 | if (cfa->reg.span > 1) |
2830 | { |
2831 | head = build_span_loc (reg: cfa->reg); |
2832 | |
2833 | if (maybe_ne (a: offset, b: 0)) |
2834 | loc_descr_plus_const (list_head: &head, poly_offset: offset); |
2835 | } |
2836 | else if (cfa->indirect) |
2837 | { |
2838 | head = new_reg_loc_descr (reg: cfa->reg.reg, offset: cfa->base_offset); |
2839 | head->dw_loc_oprnd1.val_class = dw_val_class_const; |
2840 | head->dw_loc_oprnd1.val_entry = NULL; |
2841 | tmp = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
2842 | add_loc_descr (list_head: &head, descr: tmp); |
2843 | loc_descr_plus_const (list_head: &head, poly_offset: offset); |
2844 | } |
2845 | else |
2846 | head = new_reg_loc_descr (reg: cfa->reg.reg, offset); |
2847 | |
2848 | return head; |
2849 | } |
2850 | |
2851 | /* This function builds a dwarf location descriptor sequence for |
2852 | the address at OFFSET from the CFA when stack is aligned to |
2853 | ALIGNMENT byte. */ |
2854 | |
2855 | struct dw_loc_descr_node * |
2856 | build_cfa_aligned_loc (dw_cfa_location *cfa, |
2857 | poly_int64 offset, HOST_WIDE_INT alignment) |
2858 | { |
2859 | struct dw_loc_descr_node *head; |
2860 | unsigned int dwarf_fp |
2861 | = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM); |
2862 | |
2863 | /* When CFA is defined as FP+OFFSET, emulate stack alignment. */ |
2864 | if (cfa->reg.reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0) |
2865 | { |
2866 | head = new_reg_loc_descr (reg: dwarf_fp, offset: 0); |
2867 | add_loc_descr (list_head: &head, descr: int_loc_descriptor (alignment)); |
2868 | add_loc_descr (list_head: &head, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
2869 | loc_descr_plus_const (list_head: &head, poly_offset: offset); |
2870 | } |
2871 | else |
2872 | head = new_reg_loc_descr (reg: dwarf_fp, offset); |
2873 | return head; |
2874 | } |
2875 | |
2876 | /* And now, the support for symbolic debugging information. */ |
2877 | |
2878 | /* .debug_str support. */ |
2879 | |
2880 | static void dwarf2out_init (const char *); |
2881 | static void dwarf2out_finish (const char *); |
2882 | static void dwarf2out_early_finish (const char *); |
2883 | static void dwarf2out_assembly_start (void); |
2884 | static void dwarf2out_define (unsigned int, const char *); |
2885 | static void dwarf2out_undef (unsigned int, const char *); |
2886 | static void dwarf2out_start_source_file (unsigned, const char *); |
2887 | static void dwarf2out_end_source_file (unsigned); |
2888 | static void dwarf2out_function_decl (tree); |
2889 | static void dwarf2out_begin_block (unsigned, unsigned); |
2890 | static void dwarf2out_end_block (unsigned, unsigned); |
2891 | static bool dwarf2out_ignore_block (const_tree); |
2892 | static void dwarf2out_set_ignored_loc (unsigned, unsigned, const char *); |
2893 | static void dwarf2out_early_global_decl (tree); |
2894 | static void dwarf2out_late_global_decl (tree); |
2895 | static void dwarf2out_type_decl (tree, int); |
2896 | static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool); |
2897 | static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree, |
2898 | dw_die_ref); |
2899 | static void dwarf2out_abstract_function (tree); |
2900 | static void dwarf2out_var_location (rtx_insn *); |
2901 | static void dwarf2out_inline_entry (tree); |
2902 | static void dwarf2out_size_function (tree); |
2903 | static void dwarf2out_begin_function (tree); |
2904 | static void dwarf2out_end_function (unsigned int); |
2905 | static void dwarf2out_register_main_translation_unit (tree unit); |
2906 | static void dwarf2out_set_name (tree, tree); |
2907 | static void dwarf2out_register_external_die (tree decl, const char *sym, |
2908 | unsigned HOST_WIDE_INT off); |
2909 | static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym, |
2910 | unsigned HOST_WIDE_INT *off); |
2911 | |
2912 | /* The debug hooks structure. */ |
2913 | |
2914 | const struct gcc_debug_hooks dwarf2_debug_hooks = |
2915 | { |
2916 | .init: dwarf2out_init, |
2917 | .finish: dwarf2out_finish, |
2918 | .early_finish: dwarf2out_early_finish, |
2919 | .assembly_start: dwarf2out_assembly_start, |
2920 | .define: dwarf2out_define, |
2921 | .undef: dwarf2out_undef, |
2922 | .start_source_file: dwarf2out_start_source_file, |
2923 | .end_source_file: dwarf2out_end_source_file, |
2924 | .begin_block: dwarf2out_begin_block, |
2925 | .end_block: dwarf2out_end_block, |
2926 | .ignore_block: dwarf2out_ignore_block, |
2927 | .source_line: dwarf2out_source_line, |
2928 | .set_ignored_loc: dwarf2out_set_ignored_loc, |
2929 | .begin_prologue: dwarf2out_begin_prologue, |
2930 | #if VMS_DEBUGGING_INFO |
2931 | dwarf2out_vms_end_prologue, |
2932 | dwarf2out_vms_begin_epilogue, |
2933 | #else |
2934 | .end_prologue: debug_nothing_int_charstar, |
2935 | .begin_epilogue: debug_nothing_int_charstar, |
2936 | #endif |
2937 | .end_epilogue: dwarf2out_end_epilogue, |
2938 | .begin_function: dwarf2out_begin_function, |
2939 | .end_function: dwarf2out_end_function, /* end_function */ |
2940 | .register_main_translation_unit: dwarf2out_register_main_translation_unit, |
2941 | .function_decl: dwarf2out_function_decl, /* function_decl */ |
2942 | .early_global_decl: dwarf2out_early_global_decl, |
2943 | .late_global_decl: dwarf2out_late_global_decl, |
2944 | .type_decl: dwarf2out_type_decl, /* type_decl */ |
2945 | .imported_module_or_decl: dwarf2out_imported_module_or_decl, |
2946 | .die_ref_for_decl: dwarf2out_die_ref_for_decl, |
2947 | .register_external_die: dwarf2out_register_external_die, |
2948 | .deferred_inline_function: debug_nothing_tree, /* deferred_inline_function */ |
2949 | /* The DWARF 2 backend tries to reduce debugging bloat by not |
2950 | emitting the abstract description of inline functions until |
2951 | something tries to reference them. */ |
2952 | .outlining_inline_function: dwarf2out_abstract_function, /* outlining_inline_function */ |
2953 | .label: debug_nothing_rtx_code_label, /* label */ |
2954 | .handle_pch: debug_nothing_int, /* handle_pch */ |
2955 | .var_location: dwarf2out_var_location, |
2956 | .inline_entry: dwarf2out_inline_entry, /* inline_entry */ |
2957 | .size_function: dwarf2out_size_function, /* size_function */ |
2958 | .switch_text_section: dwarf2out_switch_text_section, |
2959 | .set_name: dwarf2out_set_name, |
2960 | .start_end_main_source_file: 1, /* start_end_main_source_file */ |
2961 | TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */ |
2962 | }; |
2963 | |
2964 | const struct gcc_debug_hooks dwarf2_lineno_debug_hooks = |
2965 | { |
2966 | .init: dwarf2out_init, |
2967 | .finish: debug_nothing_charstar, |
2968 | .early_finish: debug_nothing_charstar, |
2969 | .assembly_start: dwarf2out_assembly_start, |
2970 | .define: debug_nothing_int_charstar, |
2971 | .undef: debug_nothing_int_charstar, |
2972 | .start_source_file: debug_nothing_int_charstar, |
2973 | .end_source_file: debug_nothing_int, |
2974 | .begin_block: debug_nothing_int_int, /* begin_block */ |
2975 | .end_block: debug_nothing_int_int, /* end_block */ |
2976 | .ignore_block: debug_true_const_tree, /* ignore_block */ |
2977 | .source_line: dwarf2out_source_line, /* source_line */ |
2978 | .set_ignored_loc: debug_nothing_int_int_charstar, /* set_ignored_loc */ |
2979 | .begin_prologue: debug_nothing_int_int_charstar, /* begin_prologue */ |
2980 | .end_prologue: debug_nothing_int_charstar, /* end_prologue */ |
2981 | .begin_epilogue: debug_nothing_int_charstar, /* begin_epilogue */ |
2982 | .end_epilogue: debug_nothing_int_charstar, /* end_epilogue */ |
2983 | .begin_function: debug_nothing_tree, /* begin_function */ |
2984 | .end_function: debug_nothing_int, /* end_function */ |
2985 | .register_main_translation_unit: debug_nothing_tree, /* register_main_translation_unit */ |
2986 | .function_decl: debug_nothing_tree, /* function_decl */ |
2987 | .early_global_decl: debug_nothing_tree, /* early_global_decl */ |
2988 | .late_global_decl: debug_nothing_tree, /* late_global_decl */ |
2989 | .type_decl: debug_nothing_tree_int, /* type_decl */ |
2990 | .imported_module_or_decl: debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */ |
2991 | .die_ref_for_decl: debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */ |
2992 | .register_external_die: debug_nothing_tree_charstar_uhwi, /* register_external_die */ |
2993 | .deferred_inline_function: debug_nothing_tree, /* deferred_inline_function */ |
2994 | .outlining_inline_function: debug_nothing_tree, /* outlining_inline_function */ |
2995 | .label: debug_nothing_rtx_code_label, /* label */ |
2996 | .handle_pch: debug_nothing_int, /* handle_pch */ |
2997 | .var_location: debug_nothing_rtx_insn, /* var_location */ |
2998 | .inline_entry: debug_nothing_tree, /* inline_entry */ |
2999 | .size_function: debug_nothing_tree, /* size_function */ |
3000 | .switch_text_section: debug_nothing_void, /* switch_text_section */ |
3001 | .set_name: debug_nothing_tree_tree, /* set_name */ |
3002 | .start_end_main_source_file: 0, /* start_end_main_source_file */ |
3003 | TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */ |
3004 | }; |
3005 | |
3006 | /* NOTE: In the comments in this file, many references are made to |
3007 | "Debugging Information Entries". This term is abbreviated as `DIE' |
3008 | throughout the remainder of this file. */ |
3009 | |
3010 | /* An internal representation of the DWARF output is built, and then |
3011 | walked to generate the DWARF debugging info. The walk of the internal |
3012 | representation is done after the entire program has been compiled. |
3013 | The types below are used to describe the internal representation. */ |
3014 | |
3015 | /* Whether to put type DIEs into their own section .debug_types instead |
3016 | of making them part of the .debug_info section. Only supported for |
3017 | Dwarf V4 or higher and the user didn't disable them through |
3018 | -fno-debug-types-section. It is more efficient to put them in a |
3019 | separate comdat sections since the linker will then be able to |
3020 | remove duplicates. But not all tools support .debug_types sections |
3021 | yet. For Dwarf V5 or higher .debug_types doesn't exist any more, |
3022 | it is DW_UT_type unit type in .debug_info section. For late LTO |
3023 | debug there should be almost no types emitted so avoid enabling |
3024 | -fdebug-types-section there. */ |
3025 | |
3026 | #define use_debug_types (dwarf_version >= 4 \ |
3027 | && flag_debug_types_section \ |
3028 | && !in_lto_p) |
3029 | |
3030 | /* Various DIE's use offsets relative to the beginning of the |
3031 | .debug_info section to refer to each other. */ |
3032 | |
3033 | typedef long int dw_offset; |
3034 | |
3035 | struct comdat_type_node; |
3036 | |
3037 | /* The entries in the line_info table more-or-less mirror the opcodes |
3038 | that are used in the real dwarf line table. Arrays of these entries |
3039 | are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not |
3040 | supported. */ |
3041 | |
3042 | enum dw_line_info_opcode { |
3043 | /* Emit DW_LNE_set_address; the operand is the label index. */ |
3044 | LI_set_address, |
3045 | |
3046 | /* Emit a row to the matrix with the given line. This may be done |
3047 | via any combination of DW_LNS_copy, DW_LNS_advance_line, and |
3048 | special opcodes. */ |
3049 | LI_set_line, |
3050 | |
3051 | /* Emit a DW_LNS_set_file. */ |
3052 | LI_set_file, |
3053 | |
3054 | /* Emit a DW_LNS_set_column. */ |
3055 | LI_set_column, |
3056 | |
3057 | /* Emit a DW_LNS_negate_stmt; the operand is ignored. */ |
3058 | LI_negate_stmt, |
3059 | |
3060 | /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */ |
3061 | LI_set_prologue_end, |
3062 | LI_set_epilogue_begin, |
3063 | |
3064 | /* Emit a DW_LNE_set_discriminator. */ |
3065 | LI_set_discriminator, |
3066 | |
3067 | /* Output a Fixed Advance PC; the target PC is the label index; the |
3068 | base PC is the previous LI_adv_address or LI_set_address entry. |
3069 | We only use this when emitting debug views without assembler |
3070 | support, at explicit user request. Ideally, we should only use |
3071 | it when the offset might be zero but we can't tell: it's the only |
3072 | way to maybe change the PC without resetting the view number. */ |
3073 | LI_adv_address |
3074 | }; |
3075 | |
3076 | typedef struct GTY(()) dw_line_info_struct { |
3077 | enum dw_line_info_opcode opcode; |
3078 | unsigned int val; |
3079 | } dw_line_info_entry; |
3080 | |
3081 | |
3082 | struct GTY(()) dw_line_info_table { |
3083 | /* The label that marks the end of this section. */ |
3084 | const char *end_label; |
3085 | |
3086 | /* The values for the last row of the matrix, as collected in the table. |
3087 | These are used to minimize the changes to the next row. */ |
3088 | unsigned int file_num; |
3089 | unsigned int line_num; |
3090 | unsigned int column_num; |
3091 | int discrim_num; |
3092 | bool is_stmt; |
3093 | bool in_use; |
3094 | |
3095 | /* This denotes the NEXT view number. |
3096 | |
3097 | If it is 0, it is known that the NEXT view will be the first view |
3098 | at the given PC. |
3099 | |
3100 | If it is -1, we're forcing the view number to be reset, e.g. at a |
3101 | function entry. |
3102 | |
3103 | The meaning of other nonzero values depends on whether we're |
3104 | computing views internally or leaving it for the assembler to do |
3105 | so. If we're emitting them internally, view denotes the view |
3106 | number since the last known advance of PC. If we're leaving it |
3107 | for the assembler, it denotes the LVU label number that we're |
3108 | going to ask the assembler to assign. */ |
3109 | var_loc_view view; |
3110 | |
3111 | /* This counts the number of symbolic views emitted in this table |
3112 | since the latest view reset. Its max value, over all tables, |
3113 | sets symview_upper_bound. */ |
3114 | var_loc_view symviews_since_reset; |
3115 | |
3116 | #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1) |
3117 | #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0) |
3118 | #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1) |
3119 | #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x)) |
3120 | |
3121 | vec<dw_line_info_entry, va_gc> *entries; |
3122 | }; |
3123 | |
3124 | /* This is an upper bound for view numbers that the assembler may |
3125 | assign to symbolic views output in this translation. It is used to |
3126 | decide how big a field to use to represent view numbers in |
3127 | symview-classed attributes. */ |
3128 | |
3129 | static var_loc_view symview_upper_bound; |
3130 | |
3131 | /* If we're keep track of location views and their reset points, and |
3132 | INSN is a reset point (i.e., it necessarily advances the PC), mark |
3133 | the next view in TABLE as reset. */ |
3134 | |
3135 | static void |
3136 | maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table) |
3137 | { |
3138 | if (!debug_internal_reset_location_views) |
3139 | return; |
3140 | |
3141 | /* Maybe turn (part of?) this test into a default target hook. */ |
3142 | int reset = 0; |
3143 | |
3144 | if (targetm.reset_location_view) |
3145 | reset = targetm.reset_location_view (insn); |
3146 | |
3147 | if (reset) |
3148 | ; |
3149 | else if (JUMP_TABLE_DATA_P (insn)) |
3150 | reset = 1; |
3151 | else if (GET_CODE (insn) == USE |
3152 | || GET_CODE (insn) == CLOBBER |
3153 | || GET_CODE (insn) == ASM_INPUT |
3154 | || asm_noperands (insn) >= 0) |
3155 | ; |
3156 | else if (get_attr_min_length (insn) > 0) |
3157 | reset = 1; |
3158 | |
3159 | if (reset > 0 && !RESETTING_VIEW_P (table->view)) |
3160 | RESET_NEXT_VIEW (table->view); |
3161 | } |
3162 | |
3163 | /* The Debugging Information Entry (DIE) structure. DIEs form a tree. |
3164 | The children of each node form a circular list linked by |
3165 | die_sib. die_child points to the node *before* the "first" child node. */ |
3166 | |
3167 | typedef struct GTY((chain_circular ("%h.die_sib" ), for_user)) die_struct { |
3168 | union die_symbol_or_type_node |
3169 | { |
3170 | const char * GTY ((tag ("0" ))) die_symbol; |
3171 | comdat_type_node *GTY ((tag ("1" ))) die_type_node; |
3172 | } |
3173 | GTY ((desc ("%0.comdat_type_p" ))) die_id; |
3174 | vec<dw_attr_node, va_gc> *die_attr; |
3175 | dw_die_ref die_parent; |
3176 | dw_die_ref die_child; |
3177 | dw_die_ref die_sib; |
3178 | dw_die_ref die_definition; /* ref from a specification to its definition */ |
3179 | dw_offset die_offset; |
3180 | unsigned long die_abbrev; |
3181 | int die_mark; |
3182 | unsigned int decl_id; |
3183 | enum dwarf_tag die_tag; |
3184 | /* Die is used and must not be pruned as unused. */ |
3185 | BOOL_BITFIELD die_perennial_p : 1; |
3186 | BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */ |
3187 | /* For an external ref to die_symbol if die_offset contains an extra |
3188 | offset to that symbol. */ |
3189 | BOOL_BITFIELD with_offset : 1; |
3190 | /* Whether this DIE was removed from the DIE tree, for example via |
3191 | prune_unused_types. We don't consider those present from the |
3192 | DIE lookup routines. */ |
3193 | BOOL_BITFIELD removed : 1; |
3194 | /* Lots of spare bits. */ |
3195 | } |
3196 | die_node; |
3197 | |
3198 | /* Set to TRUE while dwarf2out_early_global_decl is running. */ |
3199 | static bool early_dwarf; |
3200 | static bool early_dwarf_finished; |
3201 | class set_early_dwarf { |
3202 | public: |
3203 | bool saved; |
3204 | set_early_dwarf () : saved(early_dwarf) |
3205 | { |
3206 | gcc_assert (! early_dwarf_finished); |
3207 | early_dwarf = true; |
3208 | } |
3209 | ~set_early_dwarf () { early_dwarf = saved; } |
3210 | }; |
3211 | |
3212 | /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */ |
3213 | #define FOR_EACH_CHILD(die, c, expr) do { \ |
3214 | c = die->die_child; \ |
3215 | if (c) do { \ |
3216 | c = c->die_sib; \ |
3217 | expr; \ |
3218 | } while (c != die->die_child); \ |
3219 | } while (0) |
3220 | |
3221 | /* The pubname structure */ |
3222 | |
3223 | typedef struct GTY(()) pubname_struct { |
3224 | dw_die_ref die; |
3225 | const char *name; |
3226 | } |
3227 | pubname_entry; |
3228 | |
3229 | |
3230 | struct GTY(()) dw_ranges { |
3231 | const char *label; |
3232 | /* If this is positive, it's a block number, otherwise it's a |
3233 | bitwise-negated index into dw_ranges_by_label. */ |
3234 | int num; |
3235 | /* If idx is equal to DW_RANGES_IDX_SKELETON, it should be emitted |
3236 | into .debug_rnglists section rather than .debug_rnglists.dwo |
3237 | for -gsplit-dwarf and DWARF >= 5. */ |
3238 | #define DW_RANGES_IDX_SKELETON ((1U << 31) - 1) |
3239 | /* Index for the range list for DW_FORM_rnglistx. */ |
3240 | unsigned int idx : 31; |
3241 | /* True if this range might be possibly in a different section |
3242 | from previous entry. */ |
3243 | unsigned int maybe_new_sec : 1; |
3244 | addr_table_entry *begin_entry; |
3245 | addr_table_entry *end_entry; |
3246 | }; |
3247 | |
3248 | /* A structure to hold a macinfo entry. */ |
3249 | |
3250 | typedef struct GTY(()) macinfo_struct { |
3251 | unsigned char code; |
3252 | unsigned HOST_WIDE_INT lineno; |
3253 | const char *info; |
3254 | } |
3255 | macinfo_entry; |
3256 | |
3257 | |
3258 | struct GTY(()) dw_ranges_by_label { |
3259 | const char *begin; |
3260 | const char *end; |
3261 | }; |
3262 | |
3263 | /* The comdat type node structure. */ |
3264 | struct GTY(()) comdat_type_node |
3265 | { |
3266 | dw_die_ref root_die; |
3267 | dw_die_ref type_die; |
3268 | dw_die_ref skeleton_die; |
3269 | char signature[DWARF_TYPE_SIGNATURE_SIZE]; |
3270 | comdat_type_node *next; |
3271 | }; |
3272 | |
3273 | /* A list of DIEs for which we can't determine ancestry (parent_die |
3274 | field) just yet. Later in dwarf2out_finish we will fill in the |
3275 | missing bits. */ |
3276 | typedef struct GTY(()) limbo_die_struct { |
3277 | dw_die_ref die; |
3278 | /* The tree for which this DIE was created. We use this to |
3279 | determine ancestry later. */ |
3280 | tree created_for; |
3281 | struct limbo_die_struct *next; |
3282 | } |
3283 | limbo_die_node; |
3284 | |
3285 | typedef struct skeleton_chain_struct |
3286 | { |
3287 | dw_die_ref old_die; |
3288 | dw_die_ref new_die; |
3289 | struct skeleton_chain_struct *parent; |
3290 | } |
3291 | skeleton_chain_node; |
3292 | |
3293 | /* Define a macro which returns nonzero for a TYPE_DECL which was |
3294 | implicitly generated for a type. |
3295 | |
3296 | Note that, unlike the C front-end (which generates a NULL named |
3297 | TYPE_DECL node for each complete tagged type, each array type, |
3298 | and each function type node created) the C++ front-end generates |
3299 | a _named_ TYPE_DECL node for each tagged type node created. |
3300 | These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to |
3301 | generate a DW_TAG_typedef DIE for them. Likewise with the Ada |
3302 | front-end, but for each type, tagged or not. */ |
3303 | |
3304 | #define TYPE_DECL_IS_STUB(decl) \ |
3305 | (DECL_NAME (decl) == NULL_TREE \ |
3306 | || (DECL_ARTIFICIAL (decl) \ |
3307 | && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \ |
3308 | /* This is necessary for stub decls that \ |
3309 | appear in nested inline functions. */ \ |
3310 | || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \ |
3311 | && (decl_ultimate_origin (decl) \ |
3312 | == TYPE_STUB_DECL (TREE_TYPE (decl))))))) |
3313 | |
3314 | /* Information concerning the compilation unit's programming |
3315 | language, and compiler version. */ |
3316 | |
3317 | /* Fixed size portion of the DWARF compilation unit header. */ |
3318 | #define \ |
3319 | (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size \ |
3320 | + (dwarf_version >= 5 ? 4 : 3)) |
3321 | |
3322 | /* Fixed size portion of the DWARF comdat type unit header. */ |
3323 | #define \ |
3324 | (DWARF_COMPILE_UNIT_HEADER_SIZE \ |
3325 | + DWARF_TYPE_SIGNATURE_SIZE + dwarf_offset_size) |
3326 | |
3327 | /* Fixed size portion of the DWARF skeleton compilation unit header. */ |
3328 | #define \ |
3329 | (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0)) |
3330 | |
3331 | /* Fixed size portion of public names info. */ |
3332 | #define (2 * dwarf_offset_size + 2) |
3333 | |
3334 | /* Fixed size portion of the address range info. */ |
3335 | #define \ |
3336 | (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4, \ |
3337 | DWARF2_ADDR_SIZE * 2) \ |
3338 | - DWARF_INITIAL_LENGTH_SIZE) |
3339 | |
3340 | /* Size of padding portion in the address range info. It must be |
3341 | aligned to twice the pointer size. */ |
3342 | #define DWARF_ARANGES_PAD_SIZE \ |
3343 | (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4, \ |
3344 | DWARF2_ADDR_SIZE * 2) \ |
3345 | - (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4)) |
3346 | |
3347 | /* Use assembler line directives if available. */ |
3348 | #ifndef DWARF2_ASM_LINE_DEBUG_INFO |
3349 | #ifdef HAVE_AS_DWARF2_DEBUG_LINE |
3350 | #define DWARF2_ASM_LINE_DEBUG_INFO 1 |
3351 | #else |
3352 | #define DWARF2_ASM_LINE_DEBUG_INFO 0 |
3353 | #endif |
3354 | #endif |
3355 | |
3356 | /* Use assembler views in line directives if available. */ |
3357 | #ifndef DWARF2_ASM_VIEW_DEBUG_INFO |
3358 | #ifdef HAVE_AS_DWARF2_DEBUG_VIEW |
3359 | #define DWARF2_ASM_VIEW_DEBUG_INFO 1 |
3360 | #else |
3361 | #define DWARF2_ASM_VIEW_DEBUG_INFO 0 |
3362 | #endif |
3363 | #endif |
3364 | |
3365 | /* Return true if GCC configure detected assembler support for .loc. */ |
3366 | |
3367 | bool |
3368 | dwarf2out_default_as_loc_support (void) |
3369 | { |
3370 | return DWARF2_ASM_LINE_DEBUG_INFO; |
3371 | #if (GCC_VERSION >= 3000) |
3372 | # undef DWARF2_ASM_LINE_DEBUG_INFO |
3373 | # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO |
3374 | #endif |
3375 | } |
3376 | |
3377 | /* Return true if GCC configure detected assembler support for views |
3378 | in .loc directives. */ |
3379 | |
3380 | bool |
3381 | dwarf2out_default_as_locview_support (void) |
3382 | { |
3383 | return DWARF2_ASM_VIEW_DEBUG_INFO; |
3384 | #if (GCC_VERSION >= 3000) |
3385 | # undef DWARF2_ASM_VIEW_DEBUG_INFO |
3386 | # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO |
3387 | #endif |
3388 | } |
3389 | |
3390 | /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported |
3391 | view computation, and it refers to a view identifier for which we |
3392 | will not emit a label because it is known to map to a view number |
3393 | zero. We won't allocate the bitmap if we're not using assembler |
3394 | support for location views, but we have to make the variable |
3395 | visible for GGC and for code that will be optimized out for lack of |
3396 | support but that's still parsed and compiled. We could abstract it |
3397 | out with macros, but it's not worth it. */ |
3398 | static GTY(()) bitmap zero_view_p; |
3399 | |
3400 | /* Evaluate to TRUE iff N is known to identify the first location view |
3401 | at its PC. When not using assembler location view computation, |
3402 | that must be view number zero. Otherwise, ZERO_VIEW_P is allocated |
3403 | and views label numbers recorded in it are the ones known to be |
3404 | zero. */ |
3405 | #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \ |
3406 | || (N) == (var_loc_view)-1 \ |
3407 | || (zero_view_p \ |
3408 | && bitmap_bit_p (zero_view_p, (N)))) |
3409 | |
3410 | /* Return true iff we're to emit .loc directives for the assembler to |
3411 | generate line number sections. |
3412 | |
3413 | When we're not emitting views, all we need from the assembler is |
3414 | support for .loc directives. |
3415 | |
3416 | If we are emitting views, we can only use the assembler's .loc |
3417 | support if it also supports views. |
3418 | |
3419 | When the compiler is emitting the line number programs and |
3420 | computing view numbers itself, it resets view numbers at known PC |
3421 | changes and counts from that, and then it emits view numbers as |
3422 | literal constants in locviewlists. There are cases in which the |
3423 | compiler is not sure about PC changes, e.g. when extra alignment is |
3424 | requested for a label. In these cases, the compiler may not reset |
3425 | the view counter, and the potential PC advance in the line number |
3426 | program will use an opcode that does not reset the view counter |
3427 | even if the PC actually changes, so that compiler and debug info |
3428 | consumer can keep view numbers in sync. |
3429 | |
3430 | When the compiler defers view computation to the assembler, it |
3431 | emits symbolic view numbers in locviewlists, with the exception of |
3432 | views known to be zero (forced resets, or reset after |
3433 | compiler-visible PC changes): instead of emitting symbols for |
3434 | these, we emit literal zero and assert the assembler agrees with |
3435 | the compiler's assessment. We could use symbolic views everywhere, |
3436 | instead of special-casing zero views, but then we'd be unable to |
3437 | optimize out locviewlists that contain only zeros. */ |
3438 | |
3439 | static bool |
3440 | output_asm_line_debug_info (void) |
3441 | { |
3442 | return (dwarf2out_as_loc_support |
3443 | && (dwarf2out_as_locview_support |
3444 | || !debug_variable_location_views)); |
3445 | } |
3446 | |
3447 | static bool asm_outputs_debug_line_str (void); |
3448 | |
3449 | /* Minimum line offset in a special line info. opcode. |
3450 | This value was chosen to give a reasonable range of values. */ |
3451 | #define DWARF_LINE_BASE -10 |
3452 | |
3453 | /* First special line opcode - leave room for the standard opcodes. */ |
3454 | #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1) |
3455 | |
3456 | /* Range of line offsets in a special line info. opcode. */ |
3457 | #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1) |
3458 | |
3459 | /* Flag that indicates the initial value of the is_stmt_start flag. |
3460 | In the present implementation, we do not mark any lines as |
3461 | the beginning of a source statement, because that information |
3462 | is not made available by the GCC front-end. */ |
3463 | #define DWARF_LINE_DEFAULT_IS_STMT_START 1 |
3464 | |
3465 | /* Maximum number of operations per instruction bundle. */ |
3466 | #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN |
3467 | #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1 |
3468 | #endif |
3469 | |
3470 | /* This location is used by calc_die_sizes() to keep track |
3471 | the offset of each DIE within the .debug_info section. */ |
3472 | static unsigned long next_die_offset; |
3473 | |
3474 | /* Record the root of the DIE's built for the current compilation unit. */ |
3475 | static GTY(()) dw_die_ref single_comp_unit_die; |
3476 | |
3477 | /* A list of type DIEs that have been separated into comdat sections. */ |
3478 | static GTY(()) comdat_type_node *comdat_type_list; |
3479 | |
3480 | /* A list of CU DIEs that have been separated. */ |
3481 | static GTY(()) limbo_die_node *cu_die_list; |
3482 | |
3483 | /* A list of DIEs with a NULL parent waiting to be relocated. */ |
3484 | static GTY(()) limbo_die_node *limbo_die_list; |
3485 | |
3486 | /* A list of DIEs for which we may have to generate |
3487 | DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */ |
3488 | static GTY(()) limbo_die_node *deferred_asm_name; |
3489 | |
3490 | struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data> |
3491 | { |
3492 | typedef const char *compare_type; |
3493 | |
3494 | static hashval_t hash (dwarf_file_data *); |
3495 | static bool equal (dwarf_file_data *, const char *); |
3496 | }; |
3497 | |
3498 | /* Filenames referenced by this compilation unit. */ |
3499 | static GTY(()) hash_table<dwarf_file_hasher> *file_table; |
3500 | |
3501 | struct decl_die_hasher : ggc_ptr_hash<die_node> |
3502 | { |
3503 | typedef tree compare_type; |
3504 | |
3505 | static hashval_t hash (die_node *); |
3506 | static bool equal (die_node *, tree); |
3507 | }; |
3508 | /* A hash table of references to DIE's that describe declarations. |
3509 | The key is a DECL_UID() which is a unique number identifying each decl. */ |
3510 | static GTY (()) hash_table<decl_die_hasher> *decl_die_table; |
3511 | |
3512 | struct GTY ((for_user)) variable_value_struct { |
3513 | unsigned int decl_id; |
3514 | vec<dw_die_ref, va_gc> *dies; |
3515 | }; |
3516 | |
3517 | struct variable_value_hasher : ggc_ptr_hash<variable_value_struct> |
3518 | { |
3519 | typedef tree compare_type; |
3520 | |
3521 | static hashval_t hash (variable_value_struct *); |
3522 | static bool equal (variable_value_struct *, tree); |
3523 | }; |
3524 | /* A hash table of DIEs that contain DW_OP_GNU_variable_value with |
3525 | dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is |
3526 | DECL_CONTEXT of the referenced VAR_DECLs. */ |
3527 | static GTY (()) hash_table<variable_value_hasher> *variable_value_hash; |
3528 | |
3529 | struct block_die_hasher : ggc_ptr_hash<die_struct> |
3530 | { |
3531 | static hashval_t hash (die_struct *); |
3532 | static bool equal (die_struct *, die_struct *); |
3533 | }; |
3534 | |
3535 | /* A hash table of references to DIE's that describe COMMON blocks. |
3536 | The key is DECL_UID() ^ die_parent. */ |
3537 | static GTY (()) hash_table<block_die_hasher> *common_block_die_table; |
3538 | |
3539 | typedef struct GTY(()) die_arg_entry_struct { |
3540 | dw_die_ref die; |
3541 | tree arg; |
3542 | } die_arg_entry; |
3543 | |
3544 | |
3545 | /* Node of the variable location list. */ |
3546 | struct GTY ((chain_next ("%h.next" ))) var_loc_node { |
3547 | /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables, |
3548 | EXPR_LIST chain. For small bitsizes, bitsize is encoded |
3549 | in mode of the EXPR_LIST node and first EXPR_LIST operand |
3550 | is either NOTE_INSN_VAR_LOCATION for a piece with a known |
3551 | location or NULL for padding. For larger bitsizes, |
3552 | mode is 0 and first operand is a CONCAT with bitsize |
3553 | as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp. |
3554 | NULL as second operand. */ |
3555 | rtx GTY (()) loc; |
3556 | const char * GTY (()) label; |
3557 | struct var_loc_node * GTY (()) next; |
3558 | var_loc_view view; |
3559 | }; |
3560 | |
3561 | /* Variable location list. */ |
3562 | struct GTY ((for_user)) var_loc_list_def { |
3563 | struct var_loc_node * GTY (()) first; |
3564 | |
3565 | /* Pointer to the last but one or last element of the |
3566 | chained list. If the list is empty, both first and |
3567 | last are NULL, if the list contains just one node |
3568 | or the last node certainly is not redundant, it points |
3569 | to the last node, otherwise points to the last but one. |
3570 | Do not mark it for GC because it is marked through the chain. */ |
3571 | struct var_loc_node * GTY ((skip ("%h" ))) last; |
3572 | |
3573 | /* Pointer to the last element before section switch, |
3574 | if NULL, either sections weren't switched or first |
3575 | is after section switch. */ |
3576 | struct var_loc_node * GTY ((skip ("%h" ))) last_before_switch; |
3577 | |
3578 | /* DECL_UID of the variable decl. */ |
3579 | unsigned int decl_id; |
3580 | }; |
3581 | typedef struct var_loc_list_def var_loc_list; |
3582 | |
3583 | /* Call argument location list. */ |
3584 | struct GTY ((chain_next ("%h.next" ))) call_arg_loc_node { |
3585 | rtx GTY (()) call_arg_loc_note; |
3586 | const char * GTY (()) label; |
3587 | tree GTY (()) block; |
3588 | bool tail_call_p; |
3589 | rtx GTY (()) symbol_ref; |
3590 | struct call_arg_loc_node * GTY (()) next; |
3591 | }; |
3592 | |
3593 | |
3594 | struct decl_loc_hasher : ggc_ptr_hash<var_loc_list> |
3595 | { |
3596 | typedef const_tree compare_type; |
3597 | |
3598 | static hashval_t hash (var_loc_list *); |
3599 | static bool equal (var_loc_list *, const_tree); |
3600 | }; |
3601 | |
3602 | /* Table of decl location linked lists. */ |
3603 | static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table; |
3604 | |
3605 | /* Head and tail of call_arg_loc chain. */ |
3606 | static GTY (()) struct call_arg_loc_node *call_arg_locations; |
3607 | static struct call_arg_loc_node *call_arg_loc_last; |
3608 | |
3609 | /* Number of call sites in the current function. */ |
3610 | static int call_site_count = -1; |
3611 | /* Number of tail call sites in the current function. */ |
3612 | static int tail_call_site_count = -1; |
3613 | |
3614 | /* A cached location list. */ |
3615 | struct GTY ((for_user)) cached_dw_loc_list_def { |
3616 | /* The DECL_UID of the decl that this entry describes. */ |
3617 | unsigned int decl_id; |
3618 | |
3619 | /* The cached location list. */ |
3620 | dw_loc_list_ref loc_list; |
3621 | }; |
3622 | typedef struct cached_dw_loc_list_def cached_dw_loc_list; |
3623 | |
3624 | struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list> |
3625 | { |
3626 | |
3627 | typedef const_tree compare_type; |
3628 | |
3629 | static hashval_t hash (cached_dw_loc_list *); |
3630 | static bool equal (cached_dw_loc_list *, const_tree); |
3631 | }; |
3632 | |
3633 | /* Table of cached location lists. */ |
3634 | static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table; |
3635 | |
3636 | /* A vector of references to DIE's that are uniquely identified by their tag, |
3637 | presence/absence of children DIE's, and list of attribute/value pairs. */ |
3638 | static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table; |
3639 | |
3640 | /* A hash map to remember the stack usage for DWARF procedures. The value |
3641 | stored is the stack size difference between before the DWARF procedure |
3642 | invokation and after it returned. In other words, for a DWARF procedure |
3643 | that consumes N stack slots and that pushes M ones, this stores M - N. */ |
3644 | static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map; |
3645 | |
3646 | /* A global counter for generating labels for line number data. */ |
3647 | static unsigned int line_info_label_num; |
3648 | |
3649 | /* The current table to which we should emit line number information |
3650 | for the current function. This will be set up at the beginning of |
3651 | assembly for the function. */ |
3652 | static GTY(()) dw_line_info_table *cur_line_info_table; |
3653 | |
3654 | /* The two default tables of line number info. */ |
3655 | static GTY(()) dw_line_info_table *text_section_line_info; |
3656 | static GTY(()) dw_line_info_table *cold_text_section_line_info; |
3657 | |
3658 | /* The set of all non-default tables of line number info. */ |
3659 | static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info; |
3660 | |
3661 | /* A flag to tell pubnames/types export if there is an info section to |
3662 | refer to. */ |
3663 | static bool info_section_emitted; |
3664 | |
3665 | /* A pointer to the base of a table that contains a list of publicly |
3666 | accessible names. */ |
3667 | static GTY (()) vec<pubname_entry, va_gc> *pubname_table; |
3668 | |
3669 | /* A pointer to the base of a table that contains a list of publicly |
3670 | accessible types. */ |
3671 | static GTY (()) vec<pubname_entry, va_gc> *pubtype_table; |
3672 | |
3673 | /* A pointer to the base of a table that contains a list of macro |
3674 | defines/undefines (and file start/end markers). */ |
3675 | static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table; |
3676 | |
3677 | /* True if .debug_macinfo or .debug_macros section is going to be |
3678 | emitted. */ |
3679 | #define have_macinfo \ |
3680 | ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \ |
3681 | && debug_info_level >= DINFO_LEVEL_VERBOSE \ |
3682 | && !macinfo_table->is_empty ()) |
3683 | |
3684 | /* Vector of dies for which we should generate .debug_ranges info. */ |
3685 | static GTY (()) vec<dw_ranges, va_gc> *ranges_table; |
3686 | |
3687 | /* Vector of pairs of labels referenced in ranges_table. */ |
3688 | static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label; |
3689 | |
3690 | /* Whether we have location lists that need outputting */ |
3691 | static GTY(()) bool have_location_lists; |
3692 | |
3693 | /* Unique label counter. */ |
3694 | static GTY(()) unsigned int loclabel_num; |
3695 | |
3696 | /* Unique label counter for point-of-call tables. */ |
3697 | static GTY(()) unsigned int poc_label_num; |
3698 | |
3699 | /* The last file entry emitted by maybe_emit_file(). */ |
3700 | static GTY(()) struct dwarf_file_data * last_emitted_file; |
3701 | |
3702 | /* Number of internal labels generated by gen_internal_sym(). */ |
3703 | static GTY(()) int label_num; |
3704 | |
3705 | static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table; |
3706 | |
3707 | /* Instances of generic types for which we need to generate debug |
3708 | info that describe their generic parameters and arguments. That |
3709 | generation needs to happen once all types are properly laid out so |
3710 | we do it at the end of compilation. */ |
3711 | static GTY(()) vec<tree, va_gc> *generic_type_instances; |
3712 | |
3713 | /* Offset from the "steady-state frame pointer" to the frame base, |
3714 | within the current function. */ |
3715 | static poly_int64 frame_pointer_fb_offset; |
3716 | static bool frame_pointer_fb_offset_valid; |
3717 | |
3718 | static vec<dw_die_ref> base_types; |
3719 | |
3720 | /* Flags to represent a set of attribute classes for attributes that represent |
3721 | a scalar value (bounds, pointers, ...). */ |
3722 | enum dw_scalar_form |
3723 | { |
3724 | dw_scalar_form_constant = 0x01, |
3725 | dw_scalar_form_exprloc = 0x02, |
3726 | dw_scalar_form_reference = 0x04 |
3727 | }; |
3728 | |
3729 | /* Forward declarations for functions defined in this file. */ |
3730 | |
3731 | static bool is_pseudo_reg (const_rtx); |
3732 | static tree type_main_variant (tree); |
3733 | static bool is_tagged_type (const_tree); |
3734 | static const char *dwarf_tag_name (unsigned); |
3735 | static const char *dwarf_attr_name (unsigned); |
3736 | static const char *dwarf_form_name (unsigned); |
3737 | static tree decl_ultimate_origin (const_tree); |
3738 | static tree decl_class_context (tree); |
3739 | static void add_dwarf_attr (dw_die_ref, dw_attr_node *); |
3740 | static inline unsigned int AT_index (dw_attr_node *); |
3741 | static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned); |
3742 | static inline unsigned AT_flag (dw_attr_node *); |
3743 | static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT); |
3744 | static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT); |
3745 | static void add_AT_double (dw_die_ref, enum dwarf_attribute, |
3746 | HOST_WIDE_INT, unsigned HOST_WIDE_INT); |
3747 | static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int, |
3748 | unsigned int, unsigned char *); |
3749 | static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *); |
3750 | static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *); |
3751 | static inline const char *AT_string (dw_attr_node *); |
3752 | static enum dwarf_form AT_string_form (dw_attr_node *); |
3753 | static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref); |
3754 | static void add_AT_specification (dw_die_ref, dw_die_ref); |
3755 | static inline dw_die_ref AT_ref (dw_attr_node *); |
3756 | static inline int AT_ref_external (dw_attr_node *); |
3757 | static inline void set_AT_ref_external (dw_attr_node *, int); |
3758 | static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref); |
3759 | static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute, |
3760 | dw_loc_list_ref); |
3761 | static inline dw_loc_list_ref AT_loc_list (dw_attr_node *); |
3762 | static void add_AT_view_list (dw_die_ref, enum dwarf_attribute); |
3763 | static inline dw_loc_list_ref AT_loc_list (dw_attr_node *); |
3764 | static addr_table_entry *add_addr_table_entry (void *, enum ate_kind); |
3765 | static void remove_addr_table_entry (addr_table_entry *); |
3766 | static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool); |
3767 | static inline rtx AT_addr (dw_attr_node *); |
3768 | static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *); |
3769 | static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *); |
3770 | static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *); |
3771 | static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *); |
3772 | static void add_AT_range_list (dw_die_ref, enum dwarf_attribute, |
3773 | unsigned long, bool); |
3774 | static inline const char *AT_lbl (dw_attr_node *); |
3775 | static const char *get_AT_low_pc (dw_die_ref); |
3776 | static bool is_c (void); |
3777 | static bool is_cxx (void); |
3778 | static bool is_cxx (const_tree); |
3779 | static bool is_fortran (void); |
3780 | static bool is_ada (void); |
3781 | static bool remove_AT (dw_die_ref, enum dwarf_attribute); |
3782 | static void remove_child_TAG (dw_die_ref, enum dwarf_tag); |
3783 | static void add_child_die (dw_die_ref, dw_die_ref); |
3784 | static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree); |
3785 | static dw_die_ref strip_naming_typedef (tree, dw_die_ref); |
3786 | static dw_die_ref lookup_type_die_strip_naming_typedef (tree); |
3787 | static void equate_type_number_to_die (tree, dw_die_ref); |
3788 | static var_loc_list *lookup_decl_loc (const_tree); |
3789 | static void equate_decl_number_to_die (tree, dw_die_ref); |
3790 | static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view); |
3791 | static void print_spaces (FILE *); |
3792 | static void print_die (dw_die_ref, FILE *); |
3793 | static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *); |
3794 | static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *); |
3795 | static void die_checksum (dw_die_ref, struct md5_ctx *, int *); |
3796 | static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *); |
3797 | static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *); |
3798 | static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *); |
3799 | static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *, |
3800 | struct md5_ctx *, int *); |
3801 | struct checksum_attributes; |
3802 | static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref); |
3803 | static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *); |
3804 | static void checksum_die_context (dw_die_ref, struct md5_ctx *); |
3805 | static void generate_type_signature (dw_die_ref, comdat_type_node *); |
3806 | static bool same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *); |
3807 | static bool same_dw_val_p (const dw_val_node *, const dw_val_node *, int *); |
3808 | static bool same_attr_p (dw_attr_node *, dw_attr_node *, int *); |
3809 | static bool same_die_p (dw_die_ref, dw_die_ref, int *); |
3810 | static bool is_type_die (dw_die_ref); |
3811 | static inline bool is_template_instantiation (dw_die_ref); |
3812 | static bool is_declaration_die (dw_die_ref); |
3813 | static bool should_move_die_to_comdat (dw_die_ref); |
3814 | static dw_die_ref clone_as_declaration (dw_die_ref); |
3815 | static dw_die_ref clone_die (dw_die_ref); |
3816 | static dw_die_ref clone_tree (dw_die_ref); |
3817 | static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref); |
3818 | static void generate_skeleton_ancestor_tree (skeleton_chain_node *); |
3819 | static void generate_skeleton_bottom_up (skeleton_chain_node *); |
3820 | static dw_die_ref generate_skeleton (dw_die_ref); |
3821 | static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref, |
3822 | dw_die_ref, |
3823 | dw_die_ref); |
3824 | static void break_out_comdat_types (dw_die_ref); |
3825 | static void copy_decls_for_unworthy_types (dw_die_ref); |
3826 | |
3827 | static void add_sibling_attributes (dw_die_ref); |
3828 | static void output_location_lists (dw_die_ref); |
3829 | static int constant_size (unsigned HOST_WIDE_INT); |
3830 | static unsigned long size_of_die (dw_die_ref); |
3831 | static void calc_die_sizes (dw_die_ref); |
3832 | static void calc_base_type_die_sizes (void); |
3833 | static void mark_dies (dw_die_ref); |
3834 | static void unmark_dies (dw_die_ref); |
3835 | static void unmark_all_dies (dw_die_ref); |
3836 | static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *); |
3837 | static unsigned long size_of_aranges (void); |
3838 | static enum dwarf_form value_format (dw_attr_node *); |
3839 | static void output_value_format (dw_attr_node *); |
3840 | static void output_abbrev_section (void); |
3841 | static void output_die_abbrevs (unsigned long, dw_die_ref); |
3842 | static void output_die (dw_die_ref); |
3843 | static void output_compilation_unit_header (enum dwarf_unit_type); |
3844 | static void output_comp_unit (dw_die_ref, int, const unsigned char *); |
3845 | static void output_comdat_type_unit (comdat_type_node *, bool); |
3846 | static const char *dwarf2_name (tree, int); |
3847 | static void add_pubname (tree, dw_die_ref); |
3848 | static void add_enumerator_pubname (const char *, dw_die_ref); |
3849 | static void add_pubname_string (const char *, dw_die_ref); |
3850 | static void add_pubtype (tree, dw_die_ref); |
3851 | static void output_pubnames (vec<pubname_entry, va_gc> *); |
3852 | static void output_aranges (void); |
3853 | static unsigned int add_ranges (const_tree, bool = false); |
3854 | static void add_ranges_by_labels (dw_die_ref, const char *, const char *, |
3855 | bool *, bool); |
3856 | static void output_ranges (void); |
3857 | static dw_line_info_table *new_line_info_table (void); |
3858 | static void output_line_info (bool); |
3859 | static void output_file_names (void); |
3860 | static bool is_base_type (tree); |
3861 | static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref); |
3862 | static int decl_quals (const_tree); |
3863 | static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref); |
3864 | static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref); |
3865 | static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref); |
3866 | static unsigned int debugger_reg_number (const_rtx); |
3867 | static void add_loc_descr_op_piece (dw_loc_descr_ref *, int); |
3868 | static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status); |
3869 | static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int, |
3870 | enum var_init_status); |
3871 | static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx, |
3872 | enum var_init_status); |
3873 | static dw_loc_descr_ref based_loc_descr (rtx, poly_int64, |
3874 | enum var_init_status); |
3875 | static bool is_based_loc (const_rtx); |
3876 | static bool resolve_one_addr (rtx *); |
3877 | static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx, |
3878 | enum var_init_status); |
3879 | static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode, |
3880 | enum var_init_status); |
3881 | struct loc_descr_context; |
3882 | static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref); |
3883 | static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list); |
3884 | static dw_loc_list_ref loc_list_from_tree (tree, int, |
3885 | struct loc_descr_context *); |
3886 | static dw_loc_descr_ref loc_descriptor_from_tree (tree, int, |
3887 | struct loc_descr_context *); |
3888 | static tree field_type (const_tree); |
3889 | static unsigned int simple_type_align_in_bits (const_tree); |
3890 | static unsigned int simple_decl_align_in_bits (const_tree); |
3891 | static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree); |
3892 | struct vlr_context; |
3893 | static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *, |
3894 | HOST_WIDE_INT *); |
3895 | static void add_AT_location_description (dw_die_ref, enum dwarf_attribute, |
3896 | dw_loc_list_ref); |
3897 | static void add_data_member_location_attribute (dw_die_ref, tree, |
3898 | struct vlr_context *); |
3899 | static bool add_const_value_attribute (dw_die_ref, machine_mode, rtx); |
3900 | static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *); |
3901 | static void insert_wide_int (const wide_int_ref &, unsigned char *, int); |
3902 | static unsigned insert_float (const_rtx, unsigned char *); |
3903 | static rtx rtl_for_decl_location (tree); |
3904 | static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool); |
3905 | static bool tree_add_const_value_attribute (dw_die_ref, tree); |
3906 | static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree); |
3907 | static void add_desc_attribute (dw_die_ref, tree); |
3908 | static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref); |
3909 | static void add_comp_dir_attribute (dw_die_ref); |
3910 | static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int, |
3911 | struct loc_descr_context *); |
3912 | static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree, |
3913 | struct loc_descr_context *); |
3914 | static void add_subscript_info (dw_die_ref, tree, bool); |
3915 | static void add_byte_size_attribute (dw_die_ref, tree); |
3916 | static void add_alignment_attribute (dw_die_ref, tree); |
3917 | static void add_bit_offset_attribute (dw_die_ref, tree); |
3918 | static void add_bit_size_attribute (dw_die_ref, tree); |
3919 | static void add_prototyped_attribute (dw_die_ref, tree); |
3920 | static void add_abstract_origin_attribute (dw_die_ref, tree); |
3921 | static void add_pure_or_virtual_attribute (dw_die_ref, tree); |
3922 | static void add_src_coords_attributes (dw_die_ref, tree); |
3923 | static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false); |
3924 | static void add_discr_value (dw_die_ref, dw_discr_value *); |
3925 | static void add_discr_list (dw_die_ref, dw_discr_list_ref); |
3926 | static inline dw_discr_list_ref AT_discr_list (dw_attr_node *); |
3927 | static dw_die_ref scope_die_for (tree, dw_die_ref); |
3928 | static inline bool local_scope_p (dw_die_ref); |
3929 | static inline bool class_scope_p (dw_die_ref); |
3930 | static inline bool class_or_namespace_scope_p (dw_die_ref); |
3931 | static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref); |
3932 | static void add_calling_convention_attribute (dw_die_ref, tree); |
3933 | static const char *type_tag (const_tree); |
3934 | static tree member_declared_type (const_tree); |
3935 | #if 0 |
3936 | static const char *decl_start_label (tree); |
3937 | #endif |
3938 | static void gen_array_type_die (tree, dw_die_ref); |
3939 | static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref); |
3940 | #if 0 |
3941 | static void gen_entry_point_die (tree, dw_die_ref); |
3942 | #endif |
3943 | static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref); |
3944 | static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref); |
3945 | static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*); |
3946 | static void gen_unspecified_parameters_die (tree, dw_die_ref); |
3947 | static void gen_formal_types_die (tree, dw_die_ref); |
3948 | static void gen_subprogram_die (tree, dw_die_ref); |
3949 | static void gen_variable_die (tree, tree, dw_die_ref); |
3950 | static void gen_const_die (tree, dw_die_ref); |
3951 | static void gen_label_die (tree, dw_die_ref); |
3952 | static void gen_lexical_block_die (tree, dw_die_ref); |
3953 | static void gen_inlined_subroutine_die (tree, dw_die_ref); |
3954 | static void gen_field_die (tree, struct vlr_context *, dw_die_ref); |
3955 | static void gen_ptr_to_mbr_type_die (tree, dw_die_ref); |
3956 | static dw_die_ref gen_compile_unit_die (const char *); |
3957 | static void gen_inheritance_die (tree, tree, tree, dw_die_ref); |
3958 | static void gen_member_die (tree, dw_die_ref); |
3959 | static void gen_struct_or_union_type_die (tree, dw_die_ref, |
3960 | enum debug_info_usage); |
3961 | static void gen_subroutine_type_die (tree, dw_die_ref); |
3962 | static void gen_typedef_die (tree, dw_die_ref); |
3963 | static void gen_type_die (tree, dw_die_ref); |
3964 | static void gen_block_die (tree, dw_die_ref); |
3965 | static void decls_for_scope (tree, dw_die_ref, bool = true); |
3966 | static bool is_naming_typedef_decl (const_tree); |
3967 | static inline dw_die_ref get_context_die (tree); |
3968 | static void gen_namespace_die (tree, dw_die_ref); |
3969 | static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree); |
3970 | static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref); |
3971 | static dw_die_ref force_decl_die (tree); |
3972 | static dw_die_ref force_type_die (tree); |
3973 | static dw_die_ref setup_namespace_context (tree, dw_die_ref); |
3974 | static dw_die_ref declare_in_namespace (tree, dw_die_ref); |
3975 | static struct dwarf_file_data * lookup_filename (const char *); |
3976 | static void retry_incomplete_types (void); |
3977 | static void gen_type_die_for_member (tree, tree, dw_die_ref); |
3978 | static void gen_generic_params_dies (tree); |
3979 | static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage); |
3980 | static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage); |
3981 | static void splice_child_die (dw_die_ref, dw_die_ref); |
3982 | static int file_info_cmp (const void *, const void *); |
3983 | static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view, |
3984 | const char *, var_loc_view, const char *); |
3985 | static void output_loc_list (dw_loc_list_ref); |
3986 | static char *gen_internal_sym (const char *); |
3987 | static bool want_pubnames (void); |
3988 | |
3989 | static void prune_unmark_dies (dw_die_ref); |
3990 | static void prune_unused_types_mark_generic_parms_dies (dw_die_ref); |
3991 | static void prune_unused_types_mark (dw_die_ref, int); |
3992 | static void prune_unused_types_walk (dw_die_ref); |
3993 | static void prune_unused_types_walk_attribs (dw_die_ref); |
3994 | static void prune_unused_types_prune (dw_die_ref); |
3995 | static void prune_unused_types (void); |
3996 | static int maybe_emit_file (struct dwarf_file_data *fd); |
3997 | static inline const char *AT_vms_delta1 (dw_attr_node *); |
3998 | static inline const char *AT_vms_delta2 (dw_attr_node *); |
3999 | #if VMS_DEBUGGING_INFO |
4000 | static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute, |
4001 | const char *, const char *); |
4002 | #endif |
4003 | static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree); |
4004 | static void gen_remaining_tmpl_value_param_die_attribute (void); |
4005 | static bool generic_type_p (tree); |
4006 | static void schedule_generic_params_dies_gen (tree t); |
4007 | static void gen_scheduled_generic_parms_dies (void); |
4008 | static void resolve_variable_values (void); |
4009 | |
4010 | static const char *comp_dir_string (void); |
4011 | |
4012 | static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &); |
4013 | |
4014 | /* enum for tracking thread-local variables whose address is really an offset |
4015 | relative to the TLS pointer, which will need link-time relocation, but will |
4016 | not need relocation by the DWARF consumer. */ |
4017 | |
4018 | enum dtprel_bool |
4019 | { |
4020 | dtprel_false = 0, |
4021 | dtprel_true = 1 |
4022 | }; |
4023 | |
4024 | /* Return the operator to use for an address of a variable. For dtprel_true, we |
4025 | use DW_OP_const*. For regular variables, which need both link-time |
4026 | relocation and consumer-level relocation (e.g., to account for shared objects |
4027 | loaded at a random address), we use DW_OP_addr*. */ |
4028 | |
4029 | static inline enum dwarf_location_atom |
4030 | dw_addr_op (enum dtprel_bool dtprel) |
4031 | { |
4032 | if (dtprel == dtprel_true) |
4033 | return (dwarf_split_debug_info ? dwarf_OP (op: DW_OP_constx) |
4034 | : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u)); |
4035 | else |
4036 | return dwarf_split_debug_info ? dwarf_OP (op: DW_OP_addrx) : DW_OP_addr; |
4037 | } |
4038 | |
4039 | /* Return a pointer to a newly allocated address location description. If |
4040 | dwarf_split_debug_info is true, then record the address with the appropriate |
4041 | relocation. */ |
4042 | static inline dw_loc_descr_ref |
4043 | new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel) |
4044 | { |
4045 | dw_loc_descr_ref ref = new_loc_descr (op: dw_addr_op (dtprel), oprnd1: 0, oprnd2: 0); |
4046 | |
4047 | ref->dw_loc_oprnd1.val_class = dw_val_class_addr; |
4048 | ref->dw_loc_oprnd1.v.val_addr = addr; |
4049 | ref->dtprel = dtprel; |
4050 | if (dwarf_split_debug_info) |
4051 | ref->dw_loc_oprnd1.val_entry |
4052 | = add_addr_table_entry (addr, |
4053 | dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx); |
4054 | else |
4055 | ref->dw_loc_oprnd1.val_entry = NULL; |
4056 | |
4057 | return ref; |
4058 | } |
4059 | |
4060 | /* Section names used to hold DWARF debugging information. */ |
4061 | |
4062 | #ifndef DEBUG_INFO_SECTION |
4063 | #define DEBUG_INFO_SECTION ".debug_info" |
4064 | #endif |
4065 | #ifndef DEBUG_DWO_INFO_SECTION |
4066 | #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo" |
4067 | #endif |
4068 | #ifndef DEBUG_LTO_INFO_SECTION |
4069 | #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info" |
4070 | #endif |
4071 | #ifndef DEBUG_LTO_DWO_INFO_SECTION |
4072 | #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo" |
4073 | #endif |
4074 | #ifndef DEBUG_ABBREV_SECTION |
4075 | #define DEBUG_ABBREV_SECTION ".debug_abbrev" |
4076 | #endif |
4077 | #ifndef DEBUG_LTO_ABBREV_SECTION |
4078 | #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev" |
4079 | #endif |
4080 | #ifndef DEBUG_DWO_ABBREV_SECTION |
4081 | #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo" |
4082 | #endif |
4083 | #ifndef DEBUG_LTO_DWO_ABBREV_SECTION |
4084 | #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo" |
4085 | #endif |
4086 | #ifndef DEBUG_ARANGES_SECTION |
4087 | #define DEBUG_ARANGES_SECTION ".debug_aranges" |
4088 | #endif |
4089 | #ifndef DEBUG_ADDR_SECTION |
4090 | #define DEBUG_ADDR_SECTION ".debug_addr" |
4091 | #endif |
4092 | #ifndef DEBUG_MACINFO_SECTION |
4093 | #define DEBUG_MACINFO_SECTION ".debug_macinfo" |
4094 | #endif |
4095 | #ifndef DEBUG_LTO_MACINFO_SECTION |
4096 | #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo" |
4097 | #endif |
4098 | #ifndef DEBUG_DWO_MACINFO_SECTION |
4099 | #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo" |
4100 | #endif |
4101 | #ifndef DEBUG_LTO_DWO_MACINFO_SECTION |
4102 | #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo" |
4103 | #endif |
4104 | #ifndef DEBUG_MACRO_SECTION |
4105 | #define DEBUG_MACRO_SECTION ".debug_macro" |
4106 | #endif |
4107 | #ifndef DEBUG_LTO_MACRO_SECTION |
4108 | #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro" |
4109 | #endif |
4110 | #ifndef DEBUG_DWO_MACRO_SECTION |
4111 | #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo" |
4112 | #endif |
4113 | #ifndef DEBUG_LTO_DWO_MACRO_SECTION |
4114 | #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo" |
4115 | #endif |
4116 | #ifndef DEBUG_LINE_SECTION |
4117 | #define DEBUG_LINE_SECTION ".debug_line" |
4118 | #endif |
4119 | #ifndef DEBUG_LTO_LINE_SECTION |
4120 | #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line" |
4121 | #endif |
4122 | #ifndef DEBUG_DWO_LINE_SECTION |
4123 | #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo" |
4124 | #endif |
4125 | #ifndef DEBUG_LTO_DWO_LINE_SECTION |
4126 | #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo" |
4127 | #endif |
4128 | #ifndef DEBUG_LOC_SECTION |
4129 | #define DEBUG_LOC_SECTION ".debug_loc" |
4130 | #endif |
4131 | #ifndef DEBUG_DWO_LOC_SECTION |
4132 | #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo" |
4133 | #endif |
4134 | #ifndef DEBUG_LOCLISTS_SECTION |
4135 | #define DEBUG_LOCLISTS_SECTION ".debug_loclists" |
4136 | #endif |
4137 | #ifndef DEBUG_DWO_LOCLISTS_SECTION |
4138 | #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo" |
4139 | #endif |
4140 | #ifndef DEBUG_PUBNAMES_SECTION |
4141 | #define DEBUG_PUBNAMES_SECTION \ |
4142 | ((debug_generate_pub_sections == 2) \ |
4143 | ? ".debug_gnu_pubnames" : ".debug_pubnames") |
4144 | #endif |
4145 | #ifndef DEBUG_PUBTYPES_SECTION |
4146 | #define DEBUG_PUBTYPES_SECTION \ |
4147 | ((debug_generate_pub_sections == 2) \ |
4148 | ? ".debug_gnu_pubtypes" : ".debug_pubtypes") |
4149 | #endif |
4150 | #ifndef DEBUG_STR_OFFSETS_SECTION |
4151 | #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets" |
4152 | #endif |
4153 | #ifndef DEBUG_DWO_STR_OFFSETS_SECTION |
4154 | #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo" |
4155 | #endif |
4156 | #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION |
4157 | #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo" |
4158 | #endif |
4159 | #ifndef DEBUG_STR_SECTION |
4160 | #define DEBUG_STR_SECTION ".debug_str" |
4161 | #endif |
4162 | #ifndef DEBUG_LTO_STR_SECTION |
4163 | #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str" |
4164 | #endif |
4165 | #ifndef DEBUG_STR_DWO_SECTION |
4166 | #define DEBUG_STR_DWO_SECTION ".debug_str.dwo" |
4167 | #endif |
4168 | #ifndef DEBUG_LTO_STR_DWO_SECTION |
4169 | #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo" |
4170 | #endif |
4171 | #ifndef DEBUG_RANGES_SECTION |
4172 | #define DEBUG_RANGES_SECTION ".debug_ranges" |
4173 | #endif |
4174 | #ifndef DEBUG_RNGLISTS_SECTION |
4175 | #define DEBUG_RNGLISTS_SECTION ".debug_rnglists" |
4176 | #endif |
4177 | #ifndef DEBUG_DWO_RNGLISTS_SECTION |
4178 | #define DEBUG_DWO_RNGLISTS_SECTION ".debug_rnglists.dwo" |
4179 | #endif |
4180 | #ifndef DEBUG_LINE_STR_SECTION |
4181 | #define DEBUG_LINE_STR_SECTION ".debug_line_str" |
4182 | #endif |
4183 | #ifndef DEBUG_LTO_LINE_STR_SECTION |
4184 | #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str" |
4185 | #endif |
4186 | |
4187 | /* Section flags for .debug_str section. */ |
4188 | #define DEBUG_STR_SECTION_FLAGS \ |
4189 | (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \ |
4190 | ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \ |
4191 | : SECTION_DEBUG) |
4192 | |
4193 | /* Section flags for .debug_str.dwo section. */ |
4194 | #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE) |
4195 | |
4196 | /* Attribute used to refer to the macro section. */ |
4197 | #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \ |
4198 | : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros) |
4199 | |
4200 | /* Labels we insert at beginning sections we can reference instead of |
4201 | the section names themselves. */ |
4202 | |
4203 | #ifndef TEXT_SECTION_LABEL |
4204 | #define TEXT_SECTION_LABEL "Ltext" |
4205 | #endif |
4206 | #ifndef COLD_TEXT_SECTION_LABEL |
4207 | #define COLD_TEXT_SECTION_LABEL "Ltext_cold" |
4208 | #endif |
4209 | #ifndef DEBUG_LINE_SECTION_LABEL |
4210 | #define DEBUG_LINE_SECTION_LABEL "Ldebug_line" |
4211 | #endif |
4212 | #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL |
4213 | #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line" |
4214 | #endif |
4215 | #ifndef DEBUG_INFO_SECTION_LABEL |
4216 | #define DEBUG_INFO_SECTION_LABEL "Ldebug_info" |
4217 | #endif |
4218 | #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL |
4219 | #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info" |
4220 | #endif |
4221 | #ifndef DEBUG_ABBREV_SECTION_LABEL |
4222 | #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev" |
4223 | #endif |
4224 | #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL |
4225 | #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev" |
4226 | #endif |
4227 | #ifndef DEBUG_ADDR_SECTION_LABEL |
4228 | #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr" |
4229 | #endif |
4230 | #ifndef DEBUG_LOC_SECTION_LABEL |
4231 | #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc" |
4232 | #endif |
4233 | #ifndef DEBUG_RANGES_SECTION_LABEL |
4234 | #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges" |
4235 | #endif |
4236 | #ifndef DEBUG_MACINFO_SECTION_LABEL |
4237 | #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo" |
4238 | #endif |
4239 | #ifndef DEBUG_MACRO_SECTION_LABEL |
4240 | #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro" |
4241 | #endif |
4242 | #define SKELETON_COMP_DIE_ABBREV 1 |
4243 | #define SKELETON_TYPE_DIE_ABBREV 2 |
4244 | |
4245 | /* Definitions of defaults for formats and names of various special |
4246 | (artificial) labels which may be generated within this file (when the -g |
4247 | options is used and DWARF2_DEBUGGING_INFO is in effect. |
4248 | If necessary, these may be overridden from within the tm.h file, but |
4249 | typically, overriding these defaults is unnecessary. */ |
4250 | |
4251 | static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4252 | static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4253 | static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4254 | static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4255 | static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4256 | static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4257 | static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4258 | static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4259 | static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4260 | static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4261 | static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4262 | static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4263 | static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4264 | static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES]; |
4265 | static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES]; |
4266 | |
4267 | #ifndef TEXT_END_LABEL |
4268 | #define TEXT_END_LABEL "Letext" |
4269 | #endif |
4270 | #ifndef COLD_END_LABEL |
4271 | #define COLD_END_LABEL "Letext_cold" |
4272 | #endif |
4273 | #ifndef BLOCK_BEGIN_LABEL |
4274 | #define BLOCK_BEGIN_LABEL "LBB" |
4275 | #endif |
4276 | #ifndef BLOCK_INLINE_ENTRY_LABEL |
4277 | #define BLOCK_INLINE_ENTRY_LABEL "LBI" |
4278 | #endif |
4279 | #ifndef BLOCK_END_LABEL |
4280 | #define BLOCK_END_LABEL "LBE" |
4281 | #endif |
4282 | #ifndef LINE_CODE_LABEL |
4283 | #define LINE_CODE_LABEL "LM" |
4284 | #endif |
4285 | |
4286 | |
4287 | /* Return the root of the DIE's built for the current compilation unit. */ |
4288 | static dw_die_ref |
4289 | comp_unit_die (void) |
4290 | { |
4291 | if (!single_comp_unit_die) |
4292 | single_comp_unit_die = gen_compile_unit_die (NULL); |
4293 | return single_comp_unit_die; |
4294 | } |
4295 | |
4296 | /* We allow a language front-end to designate a function that is to be |
4297 | called to "demangle" any name before it is put into a DIE. */ |
4298 | |
4299 | static const char *(*demangle_name_func) (const char *); |
4300 | |
4301 | void |
4302 | dwarf2out_set_demangle_name_func (const char *(*func) (const char *)) |
4303 | { |
4304 | demangle_name_func = func; |
4305 | } |
4306 | |
4307 | /* Test if rtl node points to a pseudo register. */ |
4308 | |
4309 | static inline bool |
4310 | is_pseudo_reg (const_rtx rtl) |
4311 | { |
4312 | return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER) |
4313 | || (GET_CODE (rtl) == SUBREG |
4314 | && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER)); |
4315 | } |
4316 | |
4317 | /* Return a reference to a type, with its const and volatile qualifiers |
4318 | removed. */ |
4319 | |
4320 | static inline tree |
4321 | type_main_variant (tree type) |
4322 | { |
4323 | type = TYPE_MAIN_VARIANT (type); |
4324 | |
4325 | /* ??? There really should be only one main variant among any group of |
4326 | variants of a given type (and all of the MAIN_VARIANT values for all |
4327 | members of the group should point to that one type) but sometimes the C |
4328 | front-end messes this up for array types, so we work around that bug |
4329 | here. */ |
4330 | if (TREE_CODE (type) == ARRAY_TYPE) |
4331 | while (type != TYPE_MAIN_VARIANT (type)) |
4332 | type = TYPE_MAIN_VARIANT (type); |
4333 | |
4334 | return type; |
4335 | } |
4336 | |
4337 | /* Return true if the given type node represents a tagged type. */ |
4338 | |
4339 | static inline bool |
4340 | is_tagged_type (const_tree type) |
4341 | { |
4342 | enum tree_code code = TREE_CODE (type); |
4343 | |
4344 | return (code == RECORD_TYPE || code == UNION_TYPE |
4345 | || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE); |
4346 | } |
4347 | |
4348 | /* Set label to debug_info_section_label + die_offset of a DIE reference. */ |
4349 | |
4350 | static void |
4351 | get_ref_die_offset_label (char *label, dw_die_ref ref) |
4352 | { |
4353 | sprintf (s: label, format: "%s+%ld" , debug_info_section_label, ref->die_offset); |
4354 | } |
4355 | |
4356 | /* Return die_offset of a DIE reference to a base type. */ |
4357 | |
4358 | static unsigned long int |
4359 | get_base_type_offset (dw_die_ref ref) |
4360 | { |
4361 | if (ref->die_offset) |
4362 | return ref->die_offset; |
4363 | if (comp_unit_die ()->die_abbrev) |
4364 | { |
4365 | calc_base_type_die_sizes (); |
4366 | gcc_assert (ref->die_offset); |
4367 | } |
4368 | return ref->die_offset; |
4369 | } |
4370 | |
4371 | /* Return die_offset of a DIE reference other than base type. */ |
4372 | |
4373 | static unsigned long int |
4374 | get_ref_die_offset (dw_die_ref ref) |
4375 | { |
4376 | gcc_assert (ref->die_offset); |
4377 | return ref->die_offset; |
4378 | } |
4379 | |
4380 | /* Convert a DIE tag into its string name. */ |
4381 | |
4382 | static const char * |
4383 | dwarf_tag_name (unsigned int tag) |
4384 | { |
4385 | const char *name = get_DW_TAG_name (tag); |
4386 | |
4387 | if (name != NULL) |
4388 | return name; |
4389 | |
4390 | return "DW_TAG_<unknown>" ; |
4391 | } |
4392 | |
4393 | /* Convert a DWARF attribute code into its string name. */ |
4394 | |
4395 | static const char * |
4396 | dwarf_attr_name (unsigned int attr) |
4397 | { |
4398 | const char *name; |
4399 | |
4400 | switch (attr) |
4401 | { |
4402 | #if VMS_DEBUGGING_INFO |
4403 | case DW_AT_HP_prologue: |
4404 | return "DW_AT_HP_prologue" ; |
4405 | #else |
4406 | case DW_AT_MIPS_loop_unroll_factor: |
4407 | return "DW_AT_MIPS_loop_unroll_factor" ; |
4408 | #endif |
4409 | |
4410 | #if VMS_DEBUGGING_INFO |
4411 | case DW_AT_HP_epilogue: |
4412 | return "DW_AT_HP_epilogue" ; |
4413 | #else |
4414 | case DW_AT_MIPS_stride: |
4415 | return "DW_AT_MIPS_stride" ; |
4416 | #endif |
4417 | } |
4418 | |
4419 | name = get_DW_AT_name (attr); |
4420 | |
4421 | if (name != NULL) |
4422 | return name; |
4423 | |
4424 | return "DW_AT_<unknown>" ; |
4425 | } |
4426 | |
4427 | /* Convert a DWARF value form code into its string name. */ |
4428 | |
4429 | static const char * |
4430 | dwarf_form_name (unsigned int form) |
4431 | { |
4432 | const char *name = get_DW_FORM_name (form); |
4433 | |
4434 | if (name != NULL) |
4435 | return name; |
4436 | |
4437 | return "DW_FORM_<unknown>" ; |
4438 | } |
4439 | |
4440 | /* Determine the "ultimate origin" of a decl. The decl may be an inlined |
4441 | instance of an inlined instance of a decl which is local to an inline |
4442 | function, so we have to trace all of the way back through the origin chain |
4443 | to find out what sort of node actually served as the original seed for the |
4444 | given block. */ |
4445 | |
4446 | static tree |
4447 | decl_ultimate_origin (const_tree decl) |
4448 | { |
4449 | if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON)) |
4450 | return NULL_TREE; |
4451 | |
4452 | /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if |
4453 | we're trying to output the abstract instance of this function. */ |
4454 | if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl) |
4455 | return NULL_TREE; |
4456 | |
4457 | /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the |
4458 | most distant ancestor, this should never happen. */ |
4459 | gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl))); |
4460 | |
4461 | return DECL_ABSTRACT_ORIGIN (decl); |
4462 | } |
4463 | |
4464 | /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT |
4465 | of a virtual function may refer to a base class, so we check the 'this' |
4466 | parameter. */ |
4467 | |
4468 | static tree |
4469 | decl_class_context (tree decl) |
4470 | { |
4471 | tree context = NULL_TREE; |
4472 | |
4473 | if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl)) |
4474 | context = DECL_CONTEXT (decl); |
4475 | else |
4476 | context = TYPE_MAIN_VARIANT |
4477 | (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))))); |
4478 | |
4479 | if (context && !TYPE_P (context)) |
4480 | context = NULL_TREE; |
4481 | |
4482 | return context; |
4483 | } |
4484 | |
4485 | /* Add an attribute/value pair to a DIE. */ |
4486 | |
4487 | static inline void |
4488 | add_dwarf_attr (dw_die_ref die, dw_attr_node *attr) |
4489 | { |
4490 | /* Maybe this should be an assert? */ |
4491 | if (die == NULL) |
4492 | return; |
4493 | |
4494 | if (flag_checking) |
4495 | { |
4496 | /* Check we do not add duplicate attrs. Can't use get_AT here |
4497 | because that recurses to the specification/abstract origin DIE. */ |
4498 | dw_attr_node *a; |
4499 | unsigned ix; |
4500 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
4501 | gcc_assert (a->dw_attr != attr->dw_attr); |
4502 | } |
4503 | |
4504 | vec_safe_reserve (v&: die->die_attr, nelems: 1); |
4505 | vec_safe_push (v&: die->die_attr, obj: *attr); |
4506 | } |
4507 | |
4508 | enum dw_val_class |
4509 | AT_class (dw_attr_node *a) |
4510 | { |
4511 | return a->dw_attr_val.val_class; |
4512 | } |
4513 | |
4514 | /* Return the index for any attribute that will be referenced with a |
4515 | DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String |
4516 | indices are stored in dw_attr_val.v.val_str for reference counting |
4517 | pruning. */ |
4518 | |
4519 | static inline unsigned int |
4520 | AT_index (dw_attr_node *a) |
4521 | { |
4522 | if (AT_class (a) == dw_val_class_str) |
4523 | return a->dw_attr_val.v.val_str->index; |
4524 | else if (a->dw_attr_val.val_entry != NULL) |
4525 | return a->dw_attr_val.val_entry->index; |
4526 | return NOT_INDEXED; |
4527 | } |
4528 | |
4529 | /* Add a flag value attribute to a DIE. */ |
4530 | |
4531 | static inline void |
4532 | add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag) |
4533 | { |
4534 | dw_attr_node attr; |
4535 | |
4536 | attr.dw_attr = attr_kind; |
4537 | attr.dw_attr_val.val_class = dw_val_class_flag; |
4538 | attr.dw_attr_val.val_entry = NULL; |
4539 | attr.dw_attr_val.v.val_flag = flag; |
4540 | add_dwarf_attr (die, attr: &attr); |
4541 | } |
4542 | |
4543 | static inline unsigned |
4544 | AT_flag (dw_attr_node *a) |
4545 | { |
4546 | gcc_assert (a && AT_class (a) == dw_val_class_flag); |
4547 | return a->dw_attr_val.v.val_flag; |
4548 | } |
4549 | |
4550 | /* Add a signed integer attribute value to a DIE. */ |
4551 | |
4552 | static inline void |
4553 | add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val) |
4554 | { |
4555 | dw_attr_node attr; |
4556 | |
4557 | attr.dw_attr = attr_kind; |
4558 | attr.dw_attr_val.val_class = dw_val_class_const; |
4559 | attr.dw_attr_val.val_entry = NULL; |
4560 | attr.dw_attr_val.v.val_int = int_val; |
4561 | add_dwarf_attr (die, attr: &attr); |
4562 | } |
4563 | |
4564 | HOST_WIDE_INT |
4565 | AT_int (dw_attr_node *a) |
4566 | { |
4567 | gcc_assert (a && (AT_class (a) == dw_val_class_const |
4568 | || AT_class (a) == dw_val_class_const_implicit)); |
4569 | return a->dw_attr_val.v.val_int; |
4570 | } |
4571 | |
4572 | /* Add an unsigned integer attribute value to a DIE. */ |
4573 | |
4574 | static inline void |
4575 | add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind, |
4576 | unsigned HOST_WIDE_INT unsigned_val) |
4577 | { |
4578 | dw_attr_node attr; |
4579 | |
4580 | attr.dw_attr = attr_kind; |
4581 | attr.dw_attr_val.val_class = dw_val_class_unsigned_const; |
4582 | attr.dw_attr_val.val_entry = NULL; |
4583 | attr.dw_attr_val.v.val_unsigned = unsigned_val; |
4584 | add_dwarf_attr (die, attr: &attr); |
4585 | } |
4586 | |
4587 | unsigned HOST_WIDE_INT |
4588 | AT_unsigned (dw_attr_node *a) |
4589 | { |
4590 | gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const |
4591 | || AT_class (a) == dw_val_class_unsigned_const_implicit)); |
4592 | return a->dw_attr_val.v.val_unsigned; |
4593 | } |
4594 | |
4595 | dw_wide_int * |
4596 | alloc_dw_wide_int (const wide_int_ref &w) |
4597 | { |
4598 | dw_wide_int *p |
4599 | = (dw_wide_int *) ggc_internal_alloc (s: sizeof (dw_wide_int) |
4600 | + ((w.get_len () - 1) |
4601 | * sizeof (HOST_WIDE_INT))); |
4602 | p->precision = w.get_precision (); |
4603 | p->len = w.get_len (); |
4604 | memcpy (dest: p->val, src: w.get_val (), n: p->len * sizeof (HOST_WIDE_INT)); |
4605 | return p; |
4606 | } |
4607 | |
4608 | /* Add an unsigned wide integer attribute value to a DIE. */ |
4609 | |
4610 | static inline void |
4611 | add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind, |
4612 | const wide_int_ref &w) |
4613 | { |
4614 | dw_attr_node attr; |
4615 | |
4616 | attr.dw_attr = attr_kind; |
4617 | attr.dw_attr_val.val_class = dw_val_class_wide_int; |
4618 | attr.dw_attr_val.val_entry = NULL; |
4619 | attr.dw_attr_val.v.val_wide = alloc_dw_wide_int (w); |
4620 | add_dwarf_attr (die, attr: &attr); |
4621 | } |
4622 | |
4623 | /* Add an unsigned double integer attribute value to a DIE. */ |
4624 | |
4625 | static inline void |
4626 | add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind, |
4627 | HOST_WIDE_INT high, unsigned HOST_WIDE_INT low) |
4628 | { |
4629 | dw_attr_node attr; |
4630 | |
4631 | attr.dw_attr = attr_kind; |
4632 | attr.dw_attr_val.val_class = dw_val_class_const_double; |
4633 | attr.dw_attr_val.val_entry = NULL; |
4634 | attr.dw_attr_val.v.val_double.high = high; |
4635 | attr.dw_attr_val.v.val_double.low = low; |
4636 | add_dwarf_attr (die, attr: &attr); |
4637 | } |
4638 | |
4639 | /* Add a floating point attribute value to a DIE and return it. */ |
4640 | |
4641 | static inline void |
4642 | add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind, |
4643 | unsigned int length, unsigned int elt_size, unsigned char *array) |
4644 | { |
4645 | dw_attr_node attr; |
4646 | |
4647 | attr.dw_attr = attr_kind; |
4648 | attr.dw_attr_val.val_class = dw_val_class_vec; |
4649 | attr.dw_attr_val.val_entry = NULL; |
4650 | attr.dw_attr_val.v.val_vec.length = length; |
4651 | attr.dw_attr_val.v.val_vec.elt_size = elt_size; |
4652 | attr.dw_attr_val.v.val_vec.array = array; |
4653 | add_dwarf_attr (die, attr: &attr); |
4654 | } |
4655 | |
4656 | /* Add an 8-byte data attribute value to a DIE. */ |
4657 | |
4658 | static inline void |
4659 | add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind, |
4660 | unsigned char data8[8]) |
4661 | { |
4662 | dw_attr_node attr; |
4663 | |
4664 | attr.dw_attr = attr_kind; |
4665 | attr.dw_attr_val.val_class = dw_val_class_data8; |
4666 | attr.dw_attr_val.val_entry = NULL; |
4667 | memcpy (dest: attr.dw_attr_val.v.val_data8, src: data8, n: 8); |
4668 | add_dwarf_attr (die, attr: &attr); |
4669 | } |
4670 | |
4671 | /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using |
4672 | dwarf_split_debug_info, address attributes in dies destined for the |
4673 | final executable have force_direct set to avoid using indexed |
4674 | references. */ |
4675 | |
4676 | static inline void |
4677 | add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high, |
4678 | bool force_direct) |
4679 | { |
4680 | dw_attr_node attr; |
4681 | char * lbl_id; |
4682 | |
4683 | lbl_id = xstrdup (lbl_low); |
4684 | attr.dw_attr = DW_AT_low_pc; |
4685 | attr.dw_attr_val.val_class = dw_val_class_lbl_id; |
4686 | attr.dw_attr_val.v.val_lbl_id = lbl_id; |
4687 | if (dwarf_split_debug_info && !force_direct) |
4688 | attr.dw_attr_val.val_entry |
4689 | = add_addr_table_entry (lbl_id, ate_kind_label); |
4690 | else |
4691 | attr.dw_attr_val.val_entry = NULL; |
4692 | add_dwarf_attr (die, attr: &attr); |
4693 | |
4694 | attr.dw_attr = DW_AT_high_pc; |
4695 | if (dwarf_version < 4) |
4696 | attr.dw_attr_val.val_class = dw_val_class_lbl_id; |
4697 | else |
4698 | attr.dw_attr_val.val_class = dw_val_class_high_pc; |
4699 | lbl_id = xstrdup (lbl_high); |
4700 | attr.dw_attr_val.v.val_lbl_id = lbl_id; |
4701 | if (attr.dw_attr_val.val_class == dw_val_class_lbl_id |
4702 | && dwarf_split_debug_info && !force_direct) |
4703 | attr.dw_attr_val.val_entry |
4704 | = add_addr_table_entry (lbl_id, ate_kind_label); |
4705 | else |
4706 | attr.dw_attr_val.val_entry = NULL; |
4707 | add_dwarf_attr (die, attr: &attr); |
4708 | } |
4709 | |
4710 | /* Hash and equality functions for debug_str_hash. */ |
4711 | |
4712 | hashval_t |
4713 | indirect_string_hasher::hash (indirect_string_node *x) |
4714 | { |
4715 | return htab_hash_string (x->str); |
4716 | } |
4717 | |
4718 | bool |
4719 | indirect_string_hasher::equal (indirect_string_node *x1, const char *x2) |
4720 | { |
4721 | return strcmp (s1: x1->str, s2: x2) == 0; |
4722 | } |
4723 | |
4724 | /* Add STR to the given string hash table. */ |
4725 | |
4726 | static struct indirect_string_node * |
4727 | find_AT_string_in_table (const char *str, |
4728 | hash_table<indirect_string_hasher> *table, |
4729 | enum insert_option insert = INSERT) |
4730 | { |
4731 | struct indirect_string_node *node; |
4732 | |
4733 | indirect_string_node **slot |
4734 | = table->find_slot_with_hash (comparable: str, hash: htab_hash_string (str), insert); |
4735 | if (*slot == NULL) |
4736 | { |
4737 | node = ggc_cleared_alloc<indirect_string_node> (); |
4738 | node->str = ggc_strdup (str); |
4739 | *slot = node; |
4740 | } |
4741 | else |
4742 | node = *slot; |
4743 | |
4744 | node->refcount++; |
4745 | return node; |
4746 | } |
4747 | |
4748 | /* Add STR to the indirect string hash table. */ |
4749 | |
4750 | static struct indirect_string_node * |
4751 | find_AT_string (const char *str, enum insert_option insert = INSERT) |
4752 | { |
4753 | if (! debug_str_hash) |
4754 | debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
4755 | |
4756 | return find_AT_string_in_table (str, table: debug_str_hash, insert); |
4757 | } |
4758 | |
4759 | /* Add a string attribute value to a DIE. */ |
4760 | |
4761 | static inline void |
4762 | add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str) |
4763 | { |
4764 | dw_attr_node attr; |
4765 | struct indirect_string_node *node; |
4766 | |
4767 | node = find_AT_string (str); |
4768 | |
4769 | attr.dw_attr = attr_kind; |
4770 | attr.dw_attr_val.val_class = dw_val_class_str; |
4771 | attr.dw_attr_val.val_entry = NULL; |
4772 | attr.dw_attr_val.v.val_str = node; |
4773 | add_dwarf_attr (die, attr: &attr); |
4774 | } |
4775 | |
4776 | static inline const char * |
4777 | AT_string (dw_attr_node *a) |
4778 | { |
4779 | gcc_assert (a && AT_class (a) == dw_val_class_str); |
4780 | return a->dw_attr_val.v.val_str->str; |
4781 | } |
4782 | |
4783 | /* Call this function directly to bypass AT_string_form's logic to put |
4784 | the string inline in the die. */ |
4785 | |
4786 | static void |
4787 | set_indirect_string (struct indirect_string_node *node) |
4788 | { |
4789 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4790 | /* Already indirect is a no op. */ |
4791 | if (node->form == DW_FORM_strp |
4792 | || node->form == DW_FORM_line_strp |
4793 | || node->form == dwarf_FORM (form: DW_FORM_strx)) |
4794 | { |
4795 | gcc_assert (node->label); |
4796 | return; |
4797 | } |
4798 | ASM_GENERATE_INTERNAL_LABEL (label, "LASF" , dw2_string_counter); |
4799 | ++dw2_string_counter; |
4800 | node->label = xstrdup (label); |
4801 | |
4802 | if (!dwarf_split_debug_info) |
4803 | { |
4804 | node->form = DW_FORM_strp; |
4805 | node->index = NOT_INDEXED; |
4806 | } |
4807 | else |
4808 | { |
4809 | node->form = dwarf_FORM (form: DW_FORM_strx); |
4810 | node->index = NO_INDEX_ASSIGNED; |
4811 | } |
4812 | } |
4813 | |
4814 | /* A helper function for dwarf2out_finish, called to reset indirect |
4815 | string decisions done for early LTO dwarf output before fat object |
4816 | dwarf output. */ |
4817 | |
4818 | int |
4819 | reset_indirect_string (indirect_string_node **h, void *) |
4820 | { |
4821 | struct indirect_string_node *node = *h; |
4822 | if (node->form == DW_FORM_strp |
4823 | || node->form == DW_FORM_line_strp |
4824 | || node->form == dwarf_FORM (form: DW_FORM_strx)) |
4825 | { |
4826 | free (ptr: node->label); |
4827 | node->label = NULL; |
4828 | node->form = (dwarf_form) 0; |
4829 | node->index = 0; |
4830 | } |
4831 | return 1; |
4832 | } |
4833 | |
4834 | /* Add a string representing a file or filepath attribute value to a DIE. */ |
4835 | |
4836 | static inline void |
4837 | add_filepath_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, |
4838 | const char *str) |
4839 | { |
4840 | if (! asm_outputs_debug_line_str ()) |
4841 | add_AT_string (die, attr_kind, str); |
4842 | else |
4843 | { |
4844 | dw_attr_node attr; |
4845 | struct indirect_string_node *node; |
4846 | |
4847 | if (!debug_line_str_hash) |
4848 | debug_line_str_hash |
4849 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
4850 | |
4851 | node = find_AT_string_in_table (str, table: debug_line_str_hash); |
4852 | set_indirect_string (node); |
4853 | node->form = DW_FORM_line_strp; |
4854 | |
4855 | attr.dw_attr = attr_kind; |
4856 | attr.dw_attr_val.val_class = dw_val_class_str; |
4857 | attr.dw_attr_val.val_entry = NULL; |
4858 | attr.dw_attr_val.v.val_str = node; |
4859 | add_dwarf_attr (die, attr: &attr); |
4860 | } |
4861 | } |
4862 | |
4863 | /* Find out whether a string should be output inline in DIE |
4864 | or out-of-line in .debug_str section. */ |
4865 | |
4866 | static enum dwarf_form |
4867 | find_string_form (struct indirect_string_node *node) |
4868 | { |
4869 | unsigned int len; |
4870 | |
4871 | if (node->form) |
4872 | return node->form; |
4873 | |
4874 | len = strlen (s: node->str) + 1; |
4875 | |
4876 | /* If the string is shorter or equal to the size of the reference, it is |
4877 | always better to put it inline. */ |
4878 | if (len <= (unsigned) dwarf_offset_size || node->refcount == 0) |
4879 | return node->form = DW_FORM_string; |
4880 | |
4881 | /* If we cannot expect the linker to merge strings in .debug_str |
4882 | section, only put it into .debug_str if it is worth even in this |
4883 | single module. */ |
4884 | if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET |
4885 | || ((debug_str_section->common.flags & SECTION_MERGE) == 0 |
4886 | && (len - dwarf_offset_size) * node->refcount <= len)) |
4887 | return node->form = DW_FORM_string; |
4888 | |
4889 | set_indirect_string (node); |
4890 | |
4891 | return node->form; |
4892 | } |
4893 | |
4894 | /* Find out whether the string referenced from the attribute should be |
4895 | output inline in DIE or out-of-line in .debug_str section. */ |
4896 | |
4897 | static enum dwarf_form |
4898 | AT_string_form (dw_attr_node *a) |
4899 | { |
4900 | gcc_assert (a && AT_class (a) == dw_val_class_str); |
4901 | return find_string_form (node: a->dw_attr_val.v.val_str); |
4902 | } |
4903 | |
4904 | /* Add a DIE reference attribute value to a DIE. */ |
4905 | |
4906 | static inline void |
4907 | add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die) |
4908 | { |
4909 | dw_attr_node attr; |
4910 | gcc_checking_assert (targ_die != NULL); |
4911 | |
4912 | /* With LTO we can end up trying to reference something we didn't create |
4913 | a DIE for. Avoid crashing later on a NULL referenced DIE. */ |
4914 | if (targ_die == NULL) |
4915 | return; |
4916 | |
4917 | attr.dw_attr = attr_kind; |
4918 | attr.dw_attr_val.val_class = dw_val_class_die_ref; |
4919 | attr.dw_attr_val.val_entry = NULL; |
4920 | attr.dw_attr_val.v.val_die_ref.die = targ_die; |
4921 | attr.dw_attr_val.v.val_die_ref.external = 0; |
4922 | add_dwarf_attr (die, attr: &attr); |
4923 | } |
4924 | |
4925 | /* Change DIE reference REF to point to NEW_DIE instead. */ |
4926 | |
4927 | static inline void |
4928 | change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die) |
4929 | { |
4930 | gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref); |
4931 | ref->dw_attr_val.v.val_die_ref.die = new_die; |
4932 | ref->dw_attr_val.v.val_die_ref.external = 0; |
4933 | } |
4934 | |
4935 | /* Add an AT_specification attribute to a DIE, and also make the back |
4936 | pointer from the specification to the definition. */ |
4937 | |
4938 | static inline void |
4939 | add_AT_specification (dw_die_ref die, dw_die_ref targ_die) |
4940 | { |
4941 | add_AT_die_ref (die, attr_kind: DW_AT_specification, targ_die); |
4942 | gcc_assert (!targ_die->die_definition); |
4943 | targ_die->die_definition = die; |
4944 | } |
4945 | |
4946 | static inline dw_die_ref |
4947 | AT_ref (dw_attr_node *a) |
4948 | { |
4949 | gcc_assert (a && AT_class (a) == dw_val_class_die_ref); |
4950 | return a->dw_attr_val.v.val_die_ref.die; |
4951 | } |
4952 | |
4953 | static inline int |
4954 | AT_ref_external (dw_attr_node *a) |
4955 | { |
4956 | if (a && AT_class (a) == dw_val_class_die_ref) |
4957 | return a->dw_attr_val.v.val_die_ref.external; |
4958 | |
4959 | return 0; |
4960 | } |
4961 | |
4962 | static inline void |
4963 | set_AT_ref_external (dw_attr_node *a, int i) |
4964 | { |
4965 | gcc_assert (a && AT_class (a) == dw_val_class_die_ref); |
4966 | a->dw_attr_val.v.val_die_ref.external = i; |
4967 | } |
4968 | |
4969 | /* Add a location description attribute value to a DIE. */ |
4970 | |
4971 | static inline void |
4972 | add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc) |
4973 | { |
4974 | dw_attr_node attr; |
4975 | |
4976 | attr.dw_attr = attr_kind; |
4977 | attr.dw_attr_val.val_class = dw_val_class_loc; |
4978 | attr.dw_attr_val.val_entry = NULL; |
4979 | attr.dw_attr_val.v.val_loc = loc; |
4980 | add_dwarf_attr (die, attr: &attr); |
4981 | } |
4982 | |
4983 | dw_loc_descr_ref |
4984 | AT_loc (dw_attr_node *a) |
4985 | { |
4986 | gcc_assert (a && AT_class (a) == dw_val_class_loc); |
4987 | return a->dw_attr_val.v.val_loc; |
4988 | } |
4989 | |
4990 | static inline void |
4991 | add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list) |
4992 | { |
4993 | dw_attr_node attr; |
4994 | |
4995 | if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS) |
4996 | return; |
4997 | |
4998 | attr.dw_attr = attr_kind; |
4999 | attr.dw_attr_val.val_class = dw_val_class_loc_list; |
5000 | attr.dw_attr_val.val_entry = NULL; |
5001 | attr.dw_attr_val.v.val_loc_list = loc_list; |
5002 | add_dwarf_attr (die, attr: &attr); |
5003 | have_location_lists = true; |
5004 | } |
5005 | |
5006 | static inline dw_loc_list_ref |
5007 | AT_loc_list (dw_attr_node *a) |
5008 | { |
5009 | gcc_assert (a && AT_class (a) == dw_val_class_loc_list); |
5010 | return a->dw_attr_val.v.val_loc_list; |
5011 | } |
5012 | |
5013 | /* Add a view list attribute to DIE. It must have a DW_AT_location |
5014 | attribute, because the view list complements the location list. */ |
5015 | |
5016 | static inline void |
5017 | add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind) |
5018 | { |
5019 | dw_attr_node attr; |
5020 | |
5021 | if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS) |
5022 | return; |
5023 | |
5024 | attr.dw_attr = attr_kind; |
5025 | attr.dw_attr_val.val_class = dw_val_class_view_list; |
5026 | attr.dw_attr_val.val_entry = NULL; |
5027 | attr.dw_attr_val.v.val_view_list = die; |
5028 | add_dwarf_attr (die, attr: &attr); |
5029 | gcc_checking_assert (get_AT (die, DW_AT_location)); |
5030 | gcc_assert (have_location_lists); |
5031 | } |
5032 | |
5033 | /* Return a pointer to the location list referenced by the attribute. |
5034 | If the named attribute is a view list, look up the corresponding |
5035 | DW_AT_location attribute and return its location list. */ |
5036 | |
5037 | static inline dw_loc_list_ref * |
5038 | AT_loc_list_ptr (dw_attr_node *a) |
5039 | { |
5040 | gcc_assert (a); |
5041 | switch (AT_class (a)) |
5042 | { |
5043 | case dw_val_class_loc_list: |
5044 | return &a->dw_attr_val.v.val_loc_list; |
5045 | case dw_val_class_view_list: |
5046 | { |
5047 | dw_attr_node *l; |
5048 | l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location); |
5049 | if (!l) |
5050 | return NULL; |
5051 | gcc_checking_assert (l + 1 == a); |
5052 | return AT_loc_list_ptr (a: l); |
5053 | } |
5054 | default: |
5055 | gcc_unreachable (); |
5056 | } |
5057 | } |
5058 | |
5059 | /* Return the location attribute value associated with a view list |
5060 | attribute value. */ |
5061 | |
5062 | static inline dw_val_node * |
5063 | view_list_to_loc_list_val_node (dw_val_node *val) |
5064 | { |
5065 | gcc_assert (val->val_class == dw_val_class_view_list); |
5066 | dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location); |
5067 | if (!loc) |
5068 | return NULL; |
5069 | gcc_checking_assert (&(loc + 1)->dw_attr_val == val); |
5070 | gcc_assert (AT_class (loc) == dw_val_class_loc_list); |
5071 | return &loc->dw_attr_val; |
5072 | } |
5073 | |
5074 | struct addr_hasher : ggc_ptr_hash<addr_table_entry> |
5075 | { |
5076 | static hashval_t hash (addr_table_entry *); |
5077 | static bool equal (addr_table_entry *, addr_table_entry *); |
5078 | }; |
5079 | |
5080 | /* Table of entries into the .debug_addr section. */ |
5081 | |
5082 | static GTY (()) hash_table<addr_hasher> *addr_index_table; |
5083 | |
5084 | /* Hash an address_table_entry. */ |
5085 | |
5086 | hashval_t |
5087 | addr_hasher::hash (addr_table_entry *a) |
5088 | { |
5089 | inchash::hash hstate; |
5090 | switch (a->kind) |
5091 | { |
5092 | case ate_kind_rtx: |
5093 | hstate.add_int (v: 0); |
5094 | break; |
5095 | case ate_kind_rtx_dtprel: |
5096 | hstate.add_int (v: 1); |
5097 | break; |
5098 | case ate_kind_label: |
5099 | return htab_hash_string (a->addr.label); |
5100 | default: |
5101 | gcc_unreachable (); |
5102 | } |
5103 | inchash::add_rtx (a->addr.rtl, hstate); |
5104 | return hstate.end (); |
5105 | } |
5106 | |
5107 | /* Determine equality for two address_table_entries. */ |
5108 | |
5109 | bool |
5110 | addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2) |
5111 | { |
5112 | if (a1->kind != a2->kind) |
5113 | return false; |
5114 | switch (a1->kind) |
5115 | { |
5116 | case ate_kind_rtx: |
5117 | case ate_kind_rtx_dtprel: |
5118 | return rtx_equal_p (a1->addr.rtl, a2->addr.rtl); |
5119 | case ate_kind_label: |
5120 | return strcmp (s1: a1->addr.label, s2: a2->addr.label) == 0; |
5121 | default: |
5122 | gcc_unreachable (); |
5123 | } |
5124 | } |
5125 | |
5126 | /* Initialize an addr_table_entry. */ |
5127 | |
5128 | void |
5129 | init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr) |
5130 | { |
5131 | e->kind = kind; |
5132 | switch (kind) |
5133 | { |
5134 | case ate_kind_rtx: |
5135 | case ate_kind_rtx_dtprel: |
5136 | e->addr.rtl = (rtx) addr; |
5137 | break; |
5138 | case ate_kind_label: |
5139 | e->addr.label = (char *) addr; |
5140 | break; |
5141 | } |
5142 | e->refcount = 0; |
5143 | e->index = NO_INDEX_ASSIGNED; |
5144 | } |
5145 | |
5146 | /* Add attr to the address table entry to the table. Defer setting an |
5147 | index until output time. */ |
5148 | |
5149 | static addr_table_entry * |
5150 | add_addr_table_entry (void *addr, enum ate_kind kind) |
5151 | { |
5152 | addr_table_entry *node; |
5153 | addr_table_entry finder; |
5154 | |
5155 | gcc_assert (dwarf_split_debug_info); |
5156 | if (! addr_index_table) |
5157 | addr_index_table = hash_table<addr_hasher>::create_ggc (n: 10); |
5158 | init_addr_table_entry (e: &finder, kind, addr); |
5159 | addr_table_entry **slot = addr_index_table->find_slot (value: &finder, insert: INSERT); |
5160 | |
5161 | if (*slot == HTAB_EMPTY_ENTRY) |
5162 | { |
5163 | node = ggc_cleared_alloc<addr_table_entry> (); |
5164 | init_addr_table_entry (e: node, kind, addr); |
5165 | *slot = node; |
5166 | } |
5167 | else |
5168 | node = *slot; |
5169 | |
5170 | node->refcount++; |
5171 | return node; |
5172 | } |
5173 | |
5174 | /* Remove an entry from the addr table by decrementing its refcount. |
5175 | Strictly, decrementing the refcount would be enough, but the |
5176 | assertion that the entry is actually in the table has found |
5177 | bugs. */ |
5178 | |
5179 | static void |
5180 | remove_addr_table_entry (addr_table_entry *entry) |
5181 | { |
5182 | gcc_assert (dwarf_split_debug_info && addr_index_table); |
5183 | /* After an index is assigned, the table is frozen. */ |
5184 | gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED); |
5185 | entry->refcount--; |
5186 | } |
5187 | |
5188 | /* Given a location list, remove all addresses it refers to from the |
5189 | address_table. */ |
5190 | |
5191 | static void |
5192 | remove_loc_list_addr_table_entries (dw_loc_descr_ref descr) |
5193 | { |
5194 | for (; descr; descr = descr->dw_loc_next) |
5195 | if (descr->dw_loc_oprnd1.val_entry != NULL) |
5196 | { |
5197 | gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED); |
5198 | remove_addr_table_entry (entry: descr->dw_loc_oprnd1.val_entry); |
5199 | } |
5200 | } |
5201 | |
5202 | /* A helper function for dwarf2out_finish called through |
5203 | htab_traverse. Assign an addr_table_entry its index. All entries |
5204 | must be collected into the table when this function is called, |
5205 | because the indexing code relies on htab_traverse to traverse nodes |
5206 | in the same order for each run. */ |
5207 | |
5208 | int |
5209 | index_addr_table_entry (addr_table_entry **h, unsigned int *index) |
5210 | { |
5211 | addr_table_entry *node = *h; |
5212 | |
5213 | /* Don't index unreferenced nodes. */ |
5214 | if (node->refcount == 0) |
5215 | return 1; |
5216 | |
5217 | gcc_assert (node->index == NO_INDEX_ASSIGNED); |
5218 | node->index = *index; |
5219 | *index += 1; |
5220 | |
5221 | return 1; |
5222 | } |
5223 | |
5224 | /* Return the tag of a given DIE. */ |
5225 | |
5226 | enum dwarf_tag |
5227 | dw_get_die_tag (dw_die_ref die) |
5228 | { |
5229 | return die->die_tag; |
5230 | } |
5231 | |
5232 | /* Return a reference to the children list of a given DIE. */ |
5233 | |
5234 | dw_die_ref |
5235 | dw_get_die_child (dw_die_ref die) |
5236 | { |
5237 | return die->die_child; |
5238 | } |
5239 | |
5240 | /* Return a reference to the sibling of a given DIE. */ |
5241 | |
5242 | dw_die_ref |
5243 | dw_get_die_sib (dw_die_ref die) |
5244 | { |
5245 | return die->die_sib; |
5246 | } |
5247 | |
5248 | /* Add an address constant attribute value to a DIE. When using |
5249 | dwarf_split_debug_info, address attributes in dies destined for the |
5250 | final executable should be direct references--setting the parameter |
5251 | force_direct ensures this behavior. */ |
5252 | |
5253 | static inline void |
5254 | add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr, |
5255 | bool force_direct) |
5256 | { |
5257 | dw_attr_node attr; |
5258 | |
5259 | attr.dw_attr = attr_kind; |
5260 | attr.dw_attr_val.val_class = dw_val_class_addr; |
5261 | attr.dw_attr_val.v.val_addr = addr; |
5262 | if (dwarf_split_debug_info && !force_direct) |
5263 | attr.dw_attr_val.val_entry = add_addr_table_entry (addr, kind: ate_kind_rtx); |
5264 | else |
5265 | attr.dw_attr_val.val_entry = NULL; |
5266 | add_dwarf_attr (die, attr: &attr); |
5267 | } |
5268 | |
5269 | /* Get the RTX from to an address DIE attribute. */ |
5270 | |
5271 | static inline rtx |
5272 | AT_addr (dw_attr_node *a) |
5273 | { |
5274 | gcc_assert (a && AT_class (a) == dw_val_class_addr); |
5275 | return a->dw_attr_val.v.val_addr; |
5276 | } |
5277 | |
5278 | /* Add a file attribute value to a DIE. */ |
5279 | |
5280 | static inline void |
5281 | add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind, |
5282 | struct dwarf_file_data *fd) |
5283 | { |
5284 | dw_attr_node attr; |
5285 | |
5286 | attr.dw_attr = attr_kind; |
5287 | attr.dw_attr_val.val_class = dw_val_class_file; |
5288 | attr.dw_attr_val.val_entry = NULL; |
5289 | attr.dw_attr_val.v.val_file = fd; |
5290 | add_dwarf_attr (die, attr: &attr); |
5291 | } |
5292 | |
5293 | /* Get the dwarf_file_data from a file DIE attribute. */ |
5294 | |
5295 | static inline struct dwarf_file_data * |
5296 | AT_file (dw_attr_node *a) |
5297 | { |
5298 | gcc_assert (a && (AT_class (a) == dw_val_class_file |
5299 | || AT_class (a) == dw_val_class_file_implicit)); |
5300 | return a->dw_attr_val.v.val_file; |
5301 | } |
5302 | |
5303 | #if VMS_DEBUGGING_INFO |
5304 | /* Add a vms delta attribute value to a DIE. */ |
5305 | |
5306 | static inline void |
5307 | add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind, |
5308 | const char *lbl1, const char *lbl2) |
5309 | { |
5310 | dw_attr_node attr; |
5311 | |
5312 | attr.dw_attr = attr_kind; |
5313 | attr.dw_attr_val.val_class = dw_val_class_vms_delta; |
5314 | attr.dw_attr_val.val_entry = NULL; |
5315 | attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1); |
5316 | attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2); |
5317 | add_dwarf_attr (die, &attr); |
5318 | } |
5319 | #endif |
5320 | |
5321 | /* Add a symbolic view identifier attribute value to a DIE. */ |
5322 | |
5323 | static inline void |
5324 | add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind, |
5325 | const char *view_label) |
5326 | { |
5327 | dw_attr_node attr; |
5328 | |
5329 | attr.dw_attr = attr_kind; |
5330 | attr.dw_attr_val.val_class = dw_val_class_symview; |
5331 | attr.dw_attr_val.val_entry = NULL; |
5332 | attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label); |
5333 | add_dwarf_attr (die, attr: &attr); |
5334 | } |
5335 | |
5336 | /* Add a label identifier attribute value to a DIE. */ |
5337 | |
5338 | static inline void |
5339 | add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind, |
5340 | const char *lbl_id) |
5341 | { |
5342 | dw_attr_node attr; |
5343 | |
5344 | attr.dw_attr = attr_kind; |
5345 | attr.dw_attr_val.val_class = dw_val_class_lbl_id; |
5346 | attr.dw_attr_val.val_entry = NULL; |
5347 | attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id); |
5348 | if (dwarf_split_debug_info) |
5349 | attr.dw_attr_val.val_entry |
5350 | = add_addr_table_entry (addr: attr.dw_attr_val.v.val_lbl_id, |
5351 | kind: ate_kind_label); |
5352 | add_dwarf_attr (die, attr: &attr); |
5353 | } |
5354 | |
5355 | /* Add a section offset attribute value to a DIE, an offset into the |
5356 | debug_line section. */ |
5357 | |
5358 | static inline void |
5359 | add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind, |
5360 | const char *label) |
5361 | { |
5362 | dw_attr_node attr; |
5363 | |
5364 | attr.dw_attr = attr_kind; |
5365 | attr.dw_attr_val.val_class = dw_val_class_lineptr; |
5366 | attr.dw_attr_val.val_entry = NULL; |
5367 | attr.dw_attr_val.v.val_lbl_id = xstrdup (label); |
5368 | add_dwarf_attr (die, attr: &attr); |
5369 | } |
5370 | |
5371 | /* Add a section offset attribute value to a DIE, an offset into the |
5372 | debug_macinfo section. */ |
5373 | |
5374 | static inline void |
5375 | add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind, |
5376 | const char *label) |
5377 | { |
5378 | dw_attr_node attr; |
5379 | |
5380 | attr.dw_attr = attr_kind; |
5381 | attr.dw_attr_val.val_class = dw_val_class_macptr; |
5382 | attr.dw_attr_val.val_entry = NULL; |
5383 | attr.dw_attr_val.v.val_lbl_id = xstrdup (label); |
5384 | add_dwarf_attr (die, attr: &attr); |
5385 | } |
5386 | |
5387 | /* Add a range_list attribute value to a DIE. When using |
5388 | dwarf_split_debug_info, address attributes in dies destined for the |
5389 | final executable should be direct references--setting the parameter |
5390 | force_direct ensures this behavior. */ |
5391 | |
5392 | #define UNRELOCATED_OFFSET ((addr_table_entry *) 1) |
5393 | #define RELOCATED_OFFSET (NULL) |
5394 | |
5395 | static void |
5396 | add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind, |
5397 | long unsigned int offset, bool force_direct) |
5398 | { |
5399 | dw_attr_node attr; |
5400 | |
5401 | attr.dw_attr = attr_kind; |
5402 | attr.dw_attr_val.val_class = dw_val_class_range_list; |
5403 | /* For the range_list attribute, use val_entry to store whether the |
5404 | offset should follow split-debug-info or normal semantics. This |
5405 | value is read in output_range_list_offset. */ |
5406 | if (dwarf_split_debug_info && !force_direct) |
5407 | attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET; |
5408 | else |
5409 | attr.dw_attr_val.val_entry = RELOCATED_OFFSET; |
5410 | attr.dw_attr_val.v.val_offset = offset; |
5411 | add_dwarf_attr (die, attr: &attr); |
5412 | } |
5413 | |
5414 | /* Return the start label of a delta attribute. */ |
5415 | |
5416 | static inline const char * |
5417 | AT_vms_delta1 (dw_attr_node *a) |
5418 | { |
5419 | gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta)); |
5420 | return a->dw_attr_val.v.val_vms_delta.lbl1; |
5421 | } |
5422 | |
5423 | /* Return the end label of a delta attribute. */ |
5424 | |
5425 | static inline const char * |
5426 | AT_vms_delta2 (dw_attr_node *a) |
5427 | { |
5428 | gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta)); |
5429 | return a->dw_attr_val.v.val_vms_delta.lbl2; |
5430 | } |
5431 | |
5432 | static inline const char * |
5433 | AT_lbl (dw_attr_node *a) |
5434 | { |
5435 | gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id |
5436 | || AT_class (a) == dw_val_class_lineptr |
5437 | || AT_class (a) == dw_val_class_macptr |
5438 | || AT_class (a) == dw_val_class_loclistsptr |
5439 | || AT_class (a) == dw_val_class_high_pc)); |
5440 | return a->dw_attr_val.v.val_lbl_id; |
5441 | } |
5442 | |
5443 | /* Get the attribute of type attr_kind. */ |
5444 | |
5445 | dw_attr_node * |
5446 | get_AT (dw_die_ref die, enum dwarf_attribute attr_kind) |
5447 | { |
5448 | dw_attr_node *a; |
5449 | unsigned ix; |
5450 | dw_die_ref spec = NULL; |
5451 | |
5452 | if (! die) |
5453 | return NULL; |
5454 | |
5455 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
5456 | if (a->dw_attr == attr_kind) |
5457 | return a; |
5458 | else if (a->dw_attr == DW_AT_specification |
5459 | || a->dw_attr == DW_AT_abstract_origin) |
5460 | spec = AT_ref (a); |
5461 | |
5462 | if (spec) |
5463 | return get_AT (die: spec, attr_kind); |
5464 | |
5465 | return NULL; |
5466 | } |
5467 | |
5468 | /* Returns the parent of the declaration of DIE. */ |
5469 | |
5470 | static dw_die_ref |
5471 | get_die_parent (dw_die_ref die) |
5472 | { |
5473 | dw_die_ref t; |
5474 | |
5475 | if (!die) |
5476 | return NULL; |
5477 | |
5478 | if ((t = get_AT_ref (die, DW_AT_abstract_origin)) |
5479 | || (t = get_AT_ref (die, DW_AT_specification))) |
5480 | die = t; |
5481 | |
5482 | return die->die_parent; |
5483 | } |
5484 | |
5485 | /* Return the "low pc" attribute value, typically associated with a subprogram |
5486 | DIE. Return null if the "low pc" attribute is either not present, or if it |
5487 | cannot be represented as an assembler label identifier. */ |
5488 | |
5489 | static inline const char * |
5490 | get_AT_low_pc (dw_die_ref die) |
5491 | { |
5492 | dw_attr_node *a = get_AT (die, attr_kind: DW_AT_low_pc); |
5493 | |
5494 | return a ? AT_lbl (a) : NULL; |
5495 | } |
5496 | |
5497 | /* Return the value of the string attribute designated by ATTR_KIND, or |
5498 | NULL if it is not present. */ |
5499 | |
5500 | const char * |
5501 | get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind) |
5502 | { |
5503 | dw_attr_node *a = get_AT (die, attr_kind); |
5504 | |
5505 | return a ? AT_string (a) : NULL; |
5506 | } |
5507 | |
5508 | /* Return the value of the flag attribute designated by ATTR_KIND, or -1 |
5509 | if it is not present. */ |
5510 | |
5511 | int |
5512 | get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind) |
5513 | { |
5514 | dw_attr_node *a = get_AT (die, attr_kind); |
5515 | |
5516 | return a ? AT_flag (a) : 0; |
5517 | } |
5518 | |
5519 | /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0 |
5520 | if it is not present. */ |
5521 | |
5522 | unsigned |
5523 | get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind) |
5524 | { |
5525 | dw_attr_node *a = get_AT (die, attr_kind); |
5526 | |
5527 | return a ? AT_unsigned (a) : 0; |
5528 | } |
5529 | |
5530 | dw_die_ref |
5531 | get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind) |
5532 | { |
5533 | dw_attr_node *a = get_AT (die, attr_kind); |
5534 | |
5535 | return a ? AT_ref (a) : NULL; |
5536 | } |
5537 | |
5538 | struct dwarf_file_data * |
5539 | get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind) |
5540 | { |
5541 | dw_attr_node *a = get_AT (die, attr_kind); |
5542 | |
5543 | return a ? AT_file (a) : NULL; |
5544 | } |
5545 | |
5546 | /* Return TRUE if the language is C. */ |
5547 | |
5548 | static inline bool |
5549 | is_c (void) |
5550 | { |
5551 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5552 | |
5553 | return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99 |
5554 | || lang == DW_LANG_C11 || lang == DW_LANG_ObjC); |
5555 | |
5556 | |
5557 | } |
5558 | |
5559 | /* Return TRUE if the language is C++. */ |
5560 | |
5561 | static inline bool |
5562 | is_cxx (void) |
5563 | { |
5564 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5565 | |
5566 | return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus |
5567 | || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14); |
5568 | } |
5569 | |
5570 | /* Return TRUE if DECL was created by the C++ frontend. */ |
5571 | |
5572 | static bool |
5573 | is_cxx (const_tree decl) |
5574 | { |
5575 | if (in_lto_p) |
5576 | { |
5577 | const_tree context = get_ultimate_context (decl); |
5578 | if (context && TRANSLATION_UNIT_LANGUAGE (context)) |
5579 | return startswith (TRANSLATION_UNIT_LANGUAGE (context), prefix: "GNU C++" ); |
5580 | } |
5581 | return is_cxx (); |
5582 | } |
5583 | |
5584 | /* Return TRUE if the language is Fortran. */ |
5585 | |
5586 | static inline bool |
5587 | is_fortran (void) |
5588 | { |
5589 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5590 | |
5591 | return (lang == DW_LANG_Fortran77 |
5592 | || lang == DW_LANG_Fortran90 |
5593 | || lang == DW_LANG_Fortran95 |
5594 | || lang == DW_LANG_Fortran03 |
5595 | || lang == DW_LANG_Fortran08); |
5596 | } |
5597 | |
5598 | static inline bool |
5599 | is_fortran (const_tree decl) |
5600 | { |
5601 | if (in_lto_p) |
5602 | { |
5603 | const_tree context = get_ultimate_context (decl); |
5604 | if (context && TRANSLATION_UNIT_LANGUAGE (context)) |
5605 | return (strncmp (TRANSLATION_UNIT_LANGUAGE (context), |
5606 | s2: "GNU Fortran" , n: 11) == 0 |
5607 | || strcmp (TRANSLATION_UNIT_LANGUAGE (context), |
5608 | s2: "GNU F77" ) == 0); |
5609 | } |
5610 | return is_fortran (); |
5611 | } |
5612 | |
5613 | /* Return TRUE if the language is Rust. |
5614 | Note, returns FALSE for dwarf_version < 5 && dwarf_strict. */ |
5615 | |
5616 | static inline bool |
5617 | is_rust (void) |
5618 | { |
5619 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5620 | |
5621 | return lang == DW_LANG_Rust; |
5622 | } |
5623 | |
5624 | /* Return TRUE if the language is Ada. */ |
5625 | |
5626 | static inline bool |
5627 | is_ada (void) |
5628 | { |
5629 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5630 | |
5631 | return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83; |
5632 | } |
5633 | |
5634 | /* Return TRUE if the language is D. */ |
5635 | |
5636 | static inline bool |
5637 | is_dlang (void) |
5638 | { |
5639 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5640 | |
5641 | return lang == DW_LANG_D; |
5642 | } |
5643 | |
5644 | /* Remove the specified attribute if present. Return TRUE if removal |
5645 | was successful. */ |
5646 | |
5647 | static bool |
5648 | remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind) |
5649 | { |
5650 | dw_attr_node *a; |
5651 | unsigned ix; |
5652 | |
5653 | if (! die) |
5654 | return false; |
5655 | |
5656 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
5657 | if (a->dw_attr == attr_kind) |
5658 | { |
5659 | if (AT_class (a) == dw_val_class_str) |
5660 | if (a->dw_attr_val.v.val_str->refcount) |
5661 | a->dw_attr_val.v.val_str->refcount--; |
5662 | |
5663 | /* vec::ordered_remove should help reduce the number of abbrevs |
5664 | that are needed. */ |
5665 | die->die_attr->ordered_remove (ix); |
5666 | return true; |
5667 | } |
5668 | return false; |
5669 | } |
5670 | |
5671 | /* Remove CHILD from its parent. PREV must have the property that |
5672 | PREV->DIE_SIB == CHILD. Does not alter CHILD. */ |
5673 | |
5674 | static void |
5675 | remove_child_with_prev (dw_die_ref child, dw_die_ref prev) |
5676 | { |
5677 | gcc_assert (child->die_parent == prev->die_parent); |
5678 | gcc_assert (prev->die_sib == child); |
5679 | if (prev == child) |
5680 | { |
5681 | gcc_assert (child->die_parent->die_child == child); |
5682 | prev = NULL; |
5683 | } |
5684 | else |
5685 | prev->die_sib = child->die_sib; |
5686 | if (child->die_parent->die_child == child) |
5687 | child->die_parent->die_child = prev; |
5688 | child->die_sib = NULL; |
5689 | } |
5690 | |
5691 | /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that |
5692 | PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */ |
5693 | |
5694 | static void |
5695 | replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev) |
5696 | { |
5697 | dw_die_ref parent = old_child->die_parent; |
5698 | |
5699 | gcc_assert (parent == prev->die_parent); |
5700 | gcc_assert (prev->die_sib == old_child); |
5701 | |
5702 | new_child->die_parent = parent; |
5703 | if (prev == old_child) |
5704 | { |
5705 | gcc_assert (parent->die_child == old_child); |
5706 | new_child->die_sib = new_child; |
5707 | } |
5708 | else |
5709 | { |
5710 | prev->die_sib = new_child; |
5711 | new_child->die_sib = old_child->die_sib; |
5712 | } |
5713 | if (old_child->die_parent->die_child == old_child) |
5714 | old_child->die_parent->die_child = new_child; |
5715 | old_child->die_sib = NULL; |
5716 | } |
5717 | |
5718 | /* Move all children from OLD_PARENT to NEW_PARENT. */ |
5719 | |
5720 | static void |
5721 | move_all_children (dw_die_ref old_parent, dw_die_ref new_parent) |
5722 | { |
5723 | dw_die_ref c; |
5724 | new_parent->die_child = old_parent->die_child; |
5725 | old_parent->die_child = NULL; |
5726 | FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent); |
5727 | } |
5728 | |
5729 | /* Remove child DIE whose die_tag is TAG. Do nothing if no child |
5730 | matches TAG. */ |
5731 | |
5732 | static void |
5733 | remove_child_TAG (dw_die_ref die, enum dwarf_tag tag) |
5734 | { |
5735 | dw_die_ref c; |
5736 | |
5737 | c = die->die_child; |
5738 | if (c) do { |
5739 | dw_die_ref prev = c; |
5740 | c = c->die_sib; |
5741 | while (c->die_tag == tag) |
5742 | { |
5743 | remove_child_with_prev (child: c, prev); |
5744 | c->die_parent = NULL; |
5745 | /* Might have removed every child. */ |
5746 | if (die->die_child == NULL) |
5747 | return; |
5748 | c = prev->die_sib; |
5749 | } |
5750 | } while (c != die->die_child); |
5751 | } |
5752 | |
5753 | /* Add a CHILD_DIE as the last child of DIE. */ |
5754 | |
5755 | static void |
5756 | add_child_die (dw_die_ref die, dw_die_ref child_die) |
5757 | { |
5758 | /* FIXME this should probably be an assert. */ |
5759 | if (! die || ! child_die) |
5760 | return; |
5761 | gcc_assert (die != child_die); |
5762 | |
5763 | child_die->die_parent = die; |
5764 | if (die->die_child) |
5765 | { |
5766 | child_die->die_sib = die->die_child->die_sib; |
5767 | die->die_child->die_sib = child_die; |
5768 | } |
5769 | else |
5770 | child_die->die_sib = child_die; |
5771 | die->die_child = child_die; |
5772 | } |
5773 | |
5774 | /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */ |
5775 | |
5776 | static void |
5777 | add_child_die_after (dw_die_ref die, dw_die_ref child_die, |
5778 | dw_die_ref after_die) |
5779 | { |
5780 | gcc_assert (die |
5781 | && child_die |
5782 | && after_die |
5783 | && die->die_child |
5784 | && die != child_die); |
5785 | |
5786 | child_die->die_parent = die; |
5787 | child_die->die_sib = after_die->die_sib; |
5788 | after_die->die_sib = child_die; |
5789 | if (die->die_child == after_die) |
5790 | die->die_child = child_die; |
5791 | } |
5792 | |
5793 | /* Unassociate CHILD from its parent, and make its parent be |
5794 | NEW_PARENT. */ |
5795 | |
5796 | static void |
5797 | reparent_child (dw_die_ref child, dw_die_ref new_parent) |
5798 | { |
5799 | for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib) |
5800 | if (p->die_sib == child) |
5801 | { |
5802 | remove_child_with_prev (child, prev: p); |
5803 | break; |
5804 | } |
5805 | add_child_die (die: new_parent, child_die: child); |
5806 | } |
5807 | |
5808 | /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT |
5809 | is the specification, to the end of PARENT's list of children. |
5810 | This is done by removing and re-adding it. */ |
5811 | |
5812 | static void |
5813 | splice_child_die (dw_die_ref parent, dw_die_ref child) |
5814 | { |
5815 | /* We want the declaration DIE from inside the class, not the |
5816 | specification DIE at toplevel. */ |
5817 | if (child->die_parent != parent) |
5818 | { |
5819 | dw_die_ref tmp = get_AT_ref (die: child, attr_kind: DW_AT_specification); |
5820 | |
5821 | if (tmp) |
5822 | child = tmp; |
5823 | } |
5824 | |
5825 | gcc_assert (child->die_parent == parent |
5826 | || (child->die_parent |
5827 | == get_AT_ref (parent, DW_AT_specification))); |
5828 | |
5829 | reparent_child (child, new_parent: parent); |
5830 | } |
5831 | |
5832 | /* Create and return a new die with TAG_VALUE as tag. */ |
5833 | |
5834 | dw_die_ref |
5835 | new_die_raw (enum dwarf_tag tag_value) |
5836 | { |
5837 | dw_die_ref die = ggc_cleared_alloc<die_node> (); |
5838 | die->die_tag = tag_value; |
5839 | return die; |
5840 | } |
5841 | |
5842 | /* Create and return a new die with a parent of PARENT_DIE. If |
5843 | PARENT_DIE is NULL, the new DIE is placed in limbo and an |
5844 | associated tree T must be supplied to determine parenthood |
5845 | later. */ |
5846 | |
5847 | static inline dw_die_ref |
5848 | new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t) |
5849 | { |
5850 | dw_die_ref die = new_die_raw (tag_value); |
5851 | |
5852 | if (parent_die != NULL) |
5853 | add_child_die (die: parent_die, child_die: die); |
5854 | else |
5855 | { |
5856 | limbo_die_node *limbo_node; |
5857 | |
5858 | /* No DIEs created after early dwarf should end up in limbo, |
5859 | because the limbo list should not persist past LTO |
5860 | streaming. */ |
5861 | if (tag_value != DW_TAG_compile_unit |
5862 | /* These are allowed because they're generated while |
5863 | breaking out COMDAT units late. */ |
5864 | && tag_value != DW_TAG_type_unit |
5865 | && tag_value != DW_TAG_skeleton_unit |
5866 | && !early_dwarf |
5867 | /* Allow nested functions to live in limbo because they will |
5868 | only temporarily live there, as decls_for_scope will fix |
5869 | them up. */ |
5870 | && (TREE_CODE (t) != FUNCTION_DECL |
5871 | || !decl_function_context (t)) |
5872 | /* Same as nested functions above but for types. Types that |
5873 | are local to a function will be fixed in |
5874 | decls_for_scope. */ |
5875 | && (!RECORD_OR_UNION_TYPE_P (t) |
5876 | || !TYPE_CONTEXT (t) |
5877 | || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL) |
5878 | /* FIXME debug-early: Allow late limbo DIE creation for LTO, |
5879 | especially in the ltrans stage, but once we implement LTO |
5880 | dwarf streaming, we should remove this exception. */ |
5881 | && !in_lto_p) |
5882 | { |
5883 | fprintf (stderr, format: "symbol ended up in limbo too late:" ); |
5884 | debug_generic_stmt (t); |
5885 | gcc_unreachable (); |
5886 | } |
5887 | |
5888 | limbo_node = ggc_cleared_alloc<limbo_die_node> (); |
5889 | limbo_node->die = die; |
5890 | limbo_node->created_for = t; |
5891 | limbo_node->next = limbo_die_list; |
5892 | limbo_die_list = limbo_node; |
5893 | } |
5894 | |
5895 | return die; |
5896 | } |
5897 | |
5898 | /* Return the DIE associated with the given type specifier. */ |
5899 | |
5900 | dw_die_ref |
5901 | lookup_type_die (tree type) |
5902 | { |
5903 | dw_die_ref die = TYPE_SYMTAB_DIE (type); |
5904 | if (die && die->removed) |
5905 | { |
5906 | TYPE_SYMTAB_DIE (type) = NULL; |
5907 | TREE_ASM_WRITTEN (type) = 0; |
5908 | return NULL; |
5909 | } |
5910 | return die; |
5911 | } |
5912 | |
5913 | /* Given a TYPE_DIE representing the type TYPE, if TYPE is an |
5914 | anonymous type named by the typedef TYPE_DIE, return the DIE of the |
5915 | anonymous type instead the one of the naming typedef. */ |
5916 | |
5917 | static inline dw_die_ref |
5918 | strip_naming_typedef (tree type, dw_die_ref type_die) |
5919 | { |
5920 | if (type |
5921 | && TREE_CODE (type) == RECORD_TYPE |
5922 | && type_die |
5923 | && type_die->die_tag == DW_TAG_typedef |
5924 | && is_naming_typedef_decl (TYPE_NAME (type))) |
5925 | type_die = get_AT_ref (die: type_die, attr_kind: DW_AT_type); |
5926 | return type_die; |
5927 | } |
5928 | |
5929 | /* Like lookup_type_die, but if type is an anonymous type named by a |
5930 | typedef[1], return the DIE of the anonymous type instead the one of |
5931 | the naming typedef. This is because in gen_typedef_die, we did |
5932 | equate the anonymous struct named by the typedef with the DIE of |
5933 | the naming typedef. So by default, lookup_type_die on an anonymous |
5934 | struct yields the DIE of the naming typedef. |
5935 | |
5936 | [1]: Read the comment of is_naming_typedef_decl to learn about what |
5937 | a naming typedef is. */ |
5938 | |
5939 | static inline dw_die_ref |
5940 | lookup_type_die_strip_naming_typedef (tree type) |
5941 | { |
5942 | dw_die_ref die = lookup_type_die (type); |
5943 | return strip_naming_typedef (type, type_die: die); |
5944 | } |
5945 | |
5946 | /* Equate a DIE to a given type specifier. */ |
5947 | |
5948 | static inline void |
5949 | equate_type_number_to_die (tree type, dw_die_ref type_die) |
5950 | { |
5951 | TYPE_SYMTAB_DIE (type) = type_die; |
5952 | } |
5953 | |
5954 | static dw_die_ref maybe_create_die_with_external_ref (tree); |
5955 | struct GTY(()) sym_off_pair |
5956 | { |
5957 | const char * GTY((skip)) sym; |
5958 | unsigned HOST_WIDE_INT off; |
5959 | }; |
5960 | static GTY(()) hash_map<tree, sym_off_pair> *external_die_map; |
5961 | |
5962 | /* Returns a hash value for X (which really is a die_struct). */ |
5963 | |
5964 | inline hashval_t |
5965 | decl_die_hasher::hash (die_node *x) |
5966 | { |
5967 | return (hashval_t) x->decl_id; |
5968 | } |
5969 | |
5970 | /* Return true if decl_id of die_struct X is the same as UID of decl *Y. */ |
5971 | |
5972 | inline bool |
5973 | decl_die_hasher::equal (die_node *x, tree y) |
5974 | { |
5975 | return (x->decl_id == DECL_UID (y)); |
5976 | } |
5977 | |
5978 | /* Return the DIE associated with a given declaration. */ |
5979 | |
5980 | dw_die_ref |
5981 | lookup_decl_die (tree decl) |
5982 | { |
5983 | dw_die_ref *die = decl_die_table->find_slot_with_hash (comparable: decl, DECL_UID (decl), |
5984 | insert: NO_INSERT); |
5985 | if (!die) |
5986 | { |
5987 | if (in_lto_p) |
5988 | return maybe_create_die_with_external_ref (decl); |
5989 | return NULL; |
5990 | } |
5991 | if ((*die)->removed) |
5992 | { |
5993 | decl_die_table->clear_slot (slot: die); |
5994 | return NULL; |
5995 | } |
5996 | return *die; |
5997 | } |
5998 | |
5999 | |
6000 | /* Return the DIE associated with BLOCK. */ |
6001 | |
6002 | static inline dw_die_ref |
6003 | lookup_block_die (tree block) |
6004 | { |
6005 | dw_die_ref die = BLOCK_DIE (block); |
6006 | if (!die && in_lto_p) |
6007 | return maybe_create_die_with_external_ref (block); |
6008 | return die; |
6009 | } |
6010 | |
6011 | /* Associate DIE with BLOCK. */ |
6012 | |
6013 | static inline void |
6014 | equate_block_to_die (tree block, dw_die_ref die) |
6015 | { |
6016 | BLOCK_DIE (block) = die; |
6017 | } |
6018 | #undef BLOCK_DIE |
6019 | |
6020 | |
6021 | /* For DECL which might have early dwarf output query a SYMBOL + OFFSET |
6022 | style reference. Return true if we found one refering to a DIE for |
6023 | DECL, otherwise return false. */ |
6024 | |
6025 | static bool |
6026 | dwarf2out_die_ref_for_decl (tree decl, const char **sym, |
6027 | unsigned HOST_WIDE_INT *off) |
6028 | { |
6029 | dw_die_ref die; |
6030 | |
6031 | if (in_lto_p) |
6032 | { |
6033 | /* During WPA stage and incremental linking we use a hash-map |
6034 | to store the decl <-> label + offset map. */ |
6035 | if (!external_die_map) |
6036 | return false; |
6037 | sym_off_pair *desc = external_die_map->get (k: decl); |
6038 | if (!desc) |
6039 | return false; |
6040 | *sym = desc->sym; |
6041 | *off = desc->off; |
6042 | return true; |
6043 | } |
6044 | |
6045 | if (TREE_CODE (decl) == BLOCK) |
6046 | die = lookup_block_die (block: decl); |
6047 | else |
6048 | die = lookup_decl_die (decl); |
6049 | if (!die) |
6050 | return false; |
6051 | |
6052 | /* Similar to get_ref_die_offset_label, but using the "correct" |
6053 | label. */ |
6054 | *off = die->die_offset; |
6055 | while (die->die_parent) |
6056 | die = die->die_parent; |
6057 | /* For the containing CU DIE we compute a die_symbol in |
6058 | compute_comp_unit_symbol. */ |
6059 | gcc_assert (die->die_tag == DW_TAG_compile_unit |
6060 | && die->die_id.die_symbol != NULL); |
6061 | *sym = die->die_id.die_symbol; |
6062 | return true; |
6063 | } |
6064 | |
6065 | /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */ |
6066 | |
6067 | static void |
6068 | add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, |
6069 | const char *symbol, HOST_WIDE_INT offset) |
6070 | { |
6071 | /* Create a fake DIE that contains the reference. Don't use |
6072 | new_die because we don't want to end up in the limbo list. */ |
6073 | /* ??? We probably want to share these, thus put a ref to the DIE |
6074 | we create here to the external_die_map entry. */ |
6075 | dw_die_ref ref = new_die_raw (tag_value: die->die_tag); |
6076 | ref->die_id.die_symbol = symbol; |
6077 | ref->die_offset = offset; |
6078 | ref->with_offset = 1; |
6079 | add_AT_die_ref (die, attr_kind, targ_die: ref); |
6080 | } |
6081 | |
6082 | /* Create a DIE for DECL if required and add a reference to a DIE |
6083 | at SYMBOL + OFFSET which contains attributes dumped early. */ |
6084 | |
6085 | static void |
6086 | dwarf2out_register_external_die (tree decl, const char *sym, |
6087 | unsigned HOST_WIDE_INT off) |
6088 | { |
6089 | if (debug_info_level == DINFO_LEVEL_NONE) |
6090 | return; |
6091 | |
6092 | if (!external_die_map) |
6093 | external_die_map = hash_map<tree, sym_off_pair>::create_ggc (size: 1000); |
6094 | gcc_checking_assert (!external_die_map->get (decl)); |
6095 | sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), .off: off }; |
6096 | external_die_map->put (k: decl, v: p); |
6097 | } |
6098 | |
6099 | /* If we have a registered external DIE for DECL return a new DIE for |
6100 | the concrete instance with an appropriate abstract origin. */ |
6101 | |
6102 | static dw_die_ref |
6103 | maybe_create_die_with_external_ref (tree decl) |
6104 | { |
6105 | if (!external_die_map) |
6106 | return NULL; |
6107 | sym_off_pair *desc = external_die_map->get (k: decl); |
6108 | if (!desc) |
6109 | return NULL; |
6110 | |
6111 | const char *sym = desc->sym; |
6112 | unsigned HOST_WIDE_INT off = desc->off; |
6113 | external_die_map->remove (k: decl); |
6114 | |
6115 | in_lto_p = false; |
6116 | dw_die_ref die = (TREE_CODE (decl) == BLOCK |
6117 | ? lookup_block_die (block: decl) : lookup_decl_die (decl)); |
6118 | gcc_assert (!die); |
6119 | in_lto_p = true; |
6120 | |
6121 | tree ctx; |
6122 | dw_die_ref parent = NULL; |
6123 | /* Need to lookup a DIE for the decls context - the containing |
6124 | function or translation unit. */ |
6125 | if (TREE_CODE (decl) == BLOCK) |
6126 | { |
6127 | ctx = BLOCK_SUPERCONTEXT (decl); |
6128 | /* ??? We do not output DIEs for all scopes thus skip as |
6129 | many DIEs as needed. */ |
6130 | while (TREE_CODE (ctx) == BLOCK |
6131 | && !lookup_block_die (block: ctx)) |
6132 | ctx = BLOCK_SUPERCONTEXT (ctx); |
6133 | } |
6134 | else |
6135 | ctx = DECL_CONTEXT (decl); |
6136 | /* Peel types in the context stack. */ |
6137 | while (ctx && TYPE_P (ctx)) |
6138 | ctx = TYPE_CONTEXT (ctx); |
6139 | /* Likewise namespaces in case we do not want to emit DIEs for them. */ |
6140 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
6141 | while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL) |
6142 | ctx = DECL_CONTEXT (ctx); |
6143 | if (ctx) |
6144 | { |
6145 | if (TREE_CODE (ctx) == BLOCK) |
6146 | parent = lookup_block_die (block: ctx); |
6147 | else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL |
6148 | /* Keep the 1:1 association during WPA. */ |
6149 | && !flag_wpa |
6150 | && flag_incremental_link != INCREMENTAL_LINK_LTO) |
6151 | /* Otherwise all late annotations go to the main CU which |
6152 | imports the original CUs. */ |
6153 | parent = comp_unit_die (); |
6154 | else if (TREE_CODE (ctx) == FUNCTION_DECL |
6155 | && TREE_CODE (decl) != FUNCTION_DECL |
6156 | && TREE_CODE (decl) != PARM_DECL |
6157 | && TREE_CODE (decl) != RESULT_DECL |
6158 | && TREE_CODE (decl) != BLOCK) |
6159 | /* Leave function local entities parent determination to when |
6160 | we process scope vars. */ |
6161 | ; |
6162 | else |
6163 | parent = lookup_decl_die (decl: ctx); |
6164 | } |
6165 | else |
6166 | /* In some cases the FEs fail to set DECL_CONTEXT properly. |
6167 | Handle this case gracefully by globalizing stuff. */ |
6168 | parent = comp_unit_die (); |
6169 | /* Create a DIE "stub". */ |
6170 | switch (TREE_CODE (decl)) |
6171 | { |
6172 | case TRANSLATION_UNIT_DECL: |
6173 | { |
6174 | die = comp_unit_die (); |
6175 | /* We re-target all CU decls to the LTRANS CU DIE, so no need |
6176 | to create a DIE for the original CUs. */ |
6177 | return die; |
6178 | } |
6179 | case NAMESPACE_DECL: |
6180 | if (is_fortran (decl)) |
6181 | die = new_die (tag_value: DW_TAG_module, parent_die: parent, t: decl); |
6182 | else |
6183 | die = new_die (tag_value: DW_TAG_namespace, parent_die: parent, t: decl); |
6184 | break; |
6185 | case FUNCTION_DECL: |
6186 | die = new_die (tag_value: DW_TAG_subprogram, parent_die: parent, t: decl); |
6187 | break; |
6188 | case VAR_DECL: |
6189 | die = new_die (tag_value: DW_TAG_variable, parent_die: parent, t: decl); |
6190 | break; |
6191 | case RESULT_DECL: |
6192 | die = new_die (tag_value: DW_TAG_variable, parent_die: parent, t: decl); |
6193 | break; |
6194 | case PARM_DECL: |
6195 | die = new_die (tag_value: DW_TAG_formal_parameter, parent_die: parent, t: decl); |
6196 | break; |
6197 | case CONST_DECL: |
6198 | die = new_die (tag_value: DW_TAG_constant, parent_die: parent, t: decl); |
6199 | break; |
6200 | case LABEL_DECL: |
6201 | die = new_die (tag_value: DW_TAG_label, parent_die: parent, t: decl); |
6202 | break; |
6203 | case BLOCK: |
6204 | die = new_die (tag_value: DW_TAG_lexical_block, parent_die: parent, t: decl); |
6205 | break; |
6206 | default: |
6207 | gcc_unreachable (); |
6208 | } |
6209 | if (TREE_CODE (decl) == BLOCK) |
6210 | equate_block_to_die (block: decl, die); |
6211 | else |
6212 | equate_decl_number_to_die (decl, die); |
6213 | |
6214 | add_desc_attribute (die, decl); |
6215 | |
6216 | /* Add a reference to the DIE providing early debug at $sym + off. */ |
6217 | add_AT_external_die_ref (die, attr_kind: DW_AT_abstract_origin, symbol: sym, offset: off); |
6218 | |
6219 | return die; |
6220 | } |
6221 | |
6222 | /* Returns a hash value for X (which really is a var_loc_list). */ |
6223 | |
6224 | inline hashval_t |
6225 | decl_loc_hasher::hash (var_loc_list *x) |
6226 | { |
6227 | return (hashval_t) x->decl_id; |
6228 | } |
6229 | |
6230 | /* Return true if decl_id of var_loc_list X is the same as |
6231 | UID of decl *Y. */ |
6232 | |
6233 | inline bool |
6234 | decl_loc_hasher::equal (var_loc_list *x, const_tree y) |
6235 | { |
6236 | return (x->decl_id == DECL_UID (y)); |
6237 | } |
6238 | |
6239 | /* Return the var_loc list associated with a given declaration. */ |
6240 | |
6241 | static inline var_loc_list * |
6242 | lookup_decl_loc (const_tree decl) |
6243 | { |
6244 | if (!decl_loc_table) |
6245 | return NULL; |
6246 | return decl_loc_table->find_with_hash (comparable: decl, DECL_UID (decl)); |
6247 | } |
6248 | |
6249 | /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */ |
6250 | |
6251 | inline hashval_t |
6252 | dw_loc_list_hasher::hash (cached_dw_loc_list *x) |
6253 | { |
6254 | return (hashval_t) x->decl_id; |
6255 | } |
6256 | |
6257 | /* Return true if decl_id of cached_dw_loc_list X is the same as |
6258 | UID of decl *Y. */ |
6259 | |
6260 | inline bool |
6261 | dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y) |
6262 | { |
6263 | return (x->decl_id == DECL_UID (y)); |
6264 | } |
6265 | |
6266 | /* Equate a DIE to a particular declaration. */ |
6267 | |
6268 | static void |
6269 | equate_decl_number_to_die (tree decl, dw_die_ref decl_die) |
6270 | { |
6271 | unsigned int decl_id = DECL_UID (decl); |
6272 | |
6273 | *decl_die_table->find_slot_with_hash (comparable: decl, hash: decl_id, insert: INSERT) = decl_die; |
6274 | decl_die->decl_id = decl_id; |
6275 | } |
6276 | |
6277 | /* Return how many bits covers PIECE EXPR_LIST. */ |
6278 | |
6279 | static HOST_WIDE_INT |
6280 | decl_piece_bitsize (rtx piece) |
6281 | { |
6282 | int ret = (int) GET_MODE (piece); |
6283 | if (ret) |
6284 | return ret; |
6285 | gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT |
6286 | && CONST_INT_P (XEXP (XEXP (piece, 0), 0))); |
6287 | return INTVAL (XEXP (XEXP (piece, 0), 0)); |
6288 | } |
6289 | |
6290 | /* Return pointer to the location of location note in PIECE EXPR_LIST. */ |
6291 | |
6292 | static rtx * |
6293 | decl_piece_varloc_ptr (rtx piece) |
6294 | { |
6295 | if ((int) GET_MODE (piece)) |
6296 | return &XEXP (piece, 0); |
6297 | else |
6298 | return &XEXP (XEXP (piece, 0), 1); |
6299 | } |
6300 | |
6301 | /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits. |
6302 | Next is the chain of following piece nodes. */ |
6303 | |
6304 | static rtx_expr_list * |
6305 | decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next) |
6306 | { |
6307 | if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE) |
6308 | return alloc_EXPR_LIST (bitsize, loc_note, next); |
6309 | else |
6310 | return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode, |
6311 | GEN_INT (bitsize), |
6312 | loc_note), next); |
6313 | } |
6314 | |
6315 | /* Return rtx that should be stored into loc field for |
6316 | LOC_NOTE and BITPOS/BITSIZE. */ |
6317 | |
6318 | static rtx |
6319 | construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos, |
6320 | HOST_WIDE_INT bitsize) |
6321 | { |
6322 | if (bitsize != -1) |
6323 | { |
6324 | loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX); |
6325 | if (bitpos != 0) |
6326 | loc_note = decl_piece_node (NULL_RTX, bitsize: bitpos, next: loc_note); |
6327 | } |
6328 | return loc_note; |
6329 | } |
6330 | |
6331 | /* This function either modifies location piece list *DEST in |
6332 | place (if SRC and INNER is NULL), or copies location piece list |
6333 | *SRC to *DEST while modifying it. Location BITPOS is modified |
6334 | to contain LOC_NOTE, any pieces overlapping it are removed resp. |
6335 | not copied and if needed some padding around it is added. |
6336 | When modifying in place, DEST should point to EXPR_LIST where |
6337 | earlier pieces cover PIECE_BITPOS bits, when copying SRC points |
6338 | to the start of the whole list and INNER points to the EXPR_LIST |
6339 | where earlier pieces cover PIECE_BITPOS bits. */ |
6340 | |
6341 | static void |
6342 | adjust_piece_list (rtx *dest, rtx *src, rtx *inner, |
6343 | HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos, |
6344 | HOST_WIDE_INT bitsize, rtx loc_note) |
6345 | { |
6346 | HOST_WIDE_INT diff; |
6347 | bool copy = inner != NULL; |
6348 | |
6349 | if (copy) |
6350 | { |
6351 | /* First copy all nodes preceding the current bitpos. */ |
6352 | while (src != inner) |
6353 | { |
6354 | *dest = decl_piece_node (loc_note: *decl_piece_varloc_ptr (piece: *src), |
6355 | bitsize: decl_piece_bitsize (piece: *src), NULL_RTX); |
6356 | dest = &XEXP (*dest, 1); |
6357 | src = &XEXP (*src, 1); |
6358 | } |
6359 | } |
6360 | /* Add padding if needed. */ |
6361 | if (bitpos != piece_bitpos) |
6362 | { |
6363 | *dest = decl_piece_node (NULL_RTX, bitsize: bitpos - piece_bitpos, |
6364 | next: copy ? NULL_RTX : *dest); |
6365 | dest = &XEXP (*dest, 1); |
6366 | } |
6367 | else if (*dest && decl_piece_bitsize (piece: *dest) == bitsize) |
6368 | { |
6369 | gcc_assert (!copy); |
6370 | /* A piece with correct bitpos and bitsize already exist, |
6371 | just update the location for it and return. */ |
6372 | *decl_piece_varloc_ptr (piece: *dest) = loc_note; |
6373 | return; |
6374 | } |
6375 | /* Add the piece that changed. */ |
6376 | *dest = decl_piece_node (loc_note, bitsize, next: copy ? NULL_RTX : *dest); |
6377 | dest = &XEXP (*dest, 1); |
6378 | /* Skip over pieces that overlap it. */ |
6379 | diff = bitpos - piece_bitpos + bitsize; |
6380 | if (!copy) |
6381 | src = dest; |
6382 | while (diff > 0 && *src) |
6383 | { |
6384 | rtx piece = *src; |
6385 | diff -= decl_piece_bitsize (piece); |
6386 | if (copy) |
6387 | src = &XEXP (piece, 1); |
6388 | else |
6389 | { |
6390 | *src = XEXP (piece, 1); |
6391 | free_EXPR_LIST_node (piece); |
6392 | } |
6393 | } |
6394 | /* Add padding if needed. */ |
6395 | if (diff < 0 && *src) |
6396 | { |
6397 | if (!copy) |
6398 | dest = src; |
6399 | *dest = decl_piece_node (NULL_RTX, bitsize: -diff, next: copy ? NULL_RTX : *dest); |
6400 | dest = &XEXP (*dest, 1); |
6401 | } |
6402 | if (!copy) |
6403 | return; |
6404 | /* Finally copy all nodes following it. */ |
6405 | while (*src) |
6406 | { |
6407 | *dest = decl_piece_node (loc_note: *decl_piece_varloc_ptr (piece: *src), |
6408 | bitsize: decl_piece_bitsize (piece: *src), NULL_RTX); |
6409 | dest = &XEXP (*dest, 1); |
6410 | src = &XEXP (*src, 1); |
6411 | } |
6412 | } |
6413 | |
6414 | /* Add a variable location node to the linked list for DECL. */ |
6415 | |
6416 | static struct var_loc_node * |
6417 | add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view) |
6418 | { |
6419 | unsigned int decl_id; |
6420 | var_loc_list *temp; |
6421 | struct var_loc_node *loc = NULL; |
6422 | HOST_WIDE_INT bitsize = -1, bitpos = -1; |
6423 | |
6424 | if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl)) |
6425 | { |
6426 | tree realdecl = DECL_DEBUG_EXPR (decl); |
6427 | if (handled_component_p (t: realdecl) |
6428 | || (TREE_CODE (realdecl) == MEM_REF |
6429 | && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR)) |
6430 | { |
6431 | bool reverse; |
6432 | tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos, |
6433 | &bitsize, &reverse); |
6434 | if (!innerdecl |
6435 | || !DECL_P (innerdecl) |
6436 | || DECL_IGNORED_P (innerdecl) |
6437 | || TREE_STATIC (innerdecl) |
6438 | || bitsize == 0 |
6439 | || bitpos + bitsize > 256) |
6440 | return NULL; |
6441 | decl = innerdecl; |
6442 | } |
6443 | } |
6444 | |
6445 | decl_id = DECL_UID (decl); |
6446 | var_loc_list **slot |
6447 | = decl_loc_table->find_slot_with_hash (comparable: decl, hash: decl_id, insert: INSERT); |
6448 | if (*slot == NULL) |
6449 | { |
6450 | temp = ggc_cleared_alloc<var_loc_list> (); |
6451 | temp->decl_id = decl_id; |
6452 | *slot = temp; |
6453 | } |
6454 | else |
6455 | temp = *slot; |
6456 | |
6457 | /* For PARM_DECLs try to keep around the original incoming value, |
6458 | even if that means we'll emit a zero-range .debug_loc entry. */ |
6459 | if (temp->last |
6460 | && temp->first == temp->last |
6461 | && TREE_CODE (decl) == PARM_DECL |
6462 | && NOTE_P (temp->first->loc) |
6463 | && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl |
6464 | && DECL_INCOMING_RTL (decl) |
6465 | && NOTE_VAR_LOCATION_LOC (temp->first->loc) |
6466 | && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc)) |
6467 | == GET_CODE (DECL_INCOMING_RTL (decl)) |
6468 | && prev_real_insn (as_a<rtx_insn *> (p: temp->first->loc)) == NULL_RTX |
6469 | && (bitsize != -1 |
6470 | || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc), |
6471 | NOTE_VAR_LOCATION_LOC (loc_note)) |
6472 | || (NOTE_VAR_LOCATION_STATUS (temp->first->loc) |
6473 | != NOTE_VAR_LOCATION_STATUS (loc_note)))) |
6474 | { |
6475 | loc = ggc_cleared_alloc<var_loc_node> (); |
6476 | temp->first->next = loc; |
6477 | temp->last = loc; |
6478 | loc->loc = construct_piece_list (loc_note, bitpos, bitsize); |
6479 | } |
6480 | else if (temp->last) |
6481 | { |
6482 | struct var_loc_node *last = temp->last, *unused = NULL; |
6483 | rtx *piece_loc = NULL, last_loc_note; |
6484 | HOST_WIDE_INT piece_bitpos = 0; |
6485 | if (last->next) |
6486 | { |
6487 | last = last->next; |
6488 | gcc_assert (last->next == NULL); |
6489 | } |
6490 | if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST) |
6491 | { |
6492 | piece_loc = &last->loc; |
6493 | do |
6494 | { |
6495 | HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (piece: *piece_loc); |
6496 | if (piece_bitpos + cur_bitsize > bitpos) |
6497 | break; |
6498 | piece_bitpos += cur_bitsize; |
6499 | piece_loc = &XEXP (*piece_loc, 1); |
6500 | } |
6501 | while (*piece_loc); |
6502 | } |
6503 | /* TEMP->LAST here is either pointer to the last but one or |
6504 | last element in the chained list, LAST is pointer to the |
6505 | last element. */ |
6506 | if (label && strcmp (s1: last->label, s2: label) == 0 && last->view == view) |
6507 | { |
6508 | /* For SRA optimized variables if there weren't any real |
6509 | insns since last note, just modify the last node. */ |
6510 | if (piece_loc != NULL) |
6511 | { |
6512 | adjust_piece_list (dest: piece_loc, NULL, NULL, |
6513 | bitpos, piece_bitpos, bitsize, loc_note); |
6514 | return NULL; |
6515 | } |
6516 | /* If the last note doesn't cover any instructions, remove it. */ |
6517 | if (temp->last != last) |
6518 | { |
6519 | temp->last->next = NULL; |
6520 | unused = last; |
6521 | last = temp->last; |
6522 | gcc_assert (strcmp (last->label, label) != 0 || last->view != view); |
6523 | } |
6524 | else |
6525 | { |
6526 | gcc_assert (temp->first == temp->last |
6527 | || (temp->first->next == temp->last |
6528 | && TREE_CODE (decl) == PARM_DECL)); |
6529 | memset (s: temp->last, c: '\0', n: sizeof (*temp->last)); |
6530 | temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize); |
6531 | return temp->last; |
6532 | } |
6533 | } |
6534 | if (bitsize == -1 && NOTE_P (last->loc)) |
6535 | last_loc_note = last->loc; |
6536 | else if (piece_loc != NULL |
6537 | && *piece_loc != NULL_RTX |
6538 | && piece_bitpos == bitpos |
6539 | && decl_piece_bitsize (piece: *piece_loc) == bitsize) |
6540 | last_loc_note = *decl_piece_varloc_ptr (piece: *piece_loc); |
6541 | else |
6542 | last_loc_note = NULL_RTX; |
6543 | /* If the current location is the same as the end of the list, |
6544 | and either both or neither of the locations is uninitialized, |
6545 | we have nothing to do. */ |
6546 | if (last_loc_note == NULL_RTX |
6547 | || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note), |
6548 | NOTE_VAR_LOCATION_LOC (loc_note))) |
6549 | || ((NOTE_VAR_LOCATION_STATUS (last_loc_note) |
6550 | != NOTE_VAR_LOCATION_STATUS (loc_note)) |
6551 | && ((NOTE_VAR_LOCATION_STATUS (last_loc_note) |
6552 | == VAR_INIT_STATUS_UNINITIALIZED) |
6553 | || (NOTE_VAR_LOCATION_STATUS (loc_note) |
6554 | == VAR_INIT_STATUS_UNINITIALIZED)))) |
6555 | { |
6556 | /* Add LOC to the end of list and update LAST. If the last |
6557 | element of the list has been removed above, reuse its |
6558 | memory for the new node, otherwise allocate a new one. */ |
6559 | if (unused) |
6560 | { |
6561 | loc = unused; |
6562 | memset (s: loc, c: '\0', n: sizeof (*loc)); |
6563 | } |
6564 | else |
6565 | loc = ggc_cleared_alloc<var_loc_node> (); |
6566 | if (bitsize == -1 || piece_loc == NULL) |
6567 | loc->loc = construct_piece_list (loc_note, bitpos, bitsize); |
6568 | else |
6569 | adjust_piece_list (dest: &loc->loc, src: &last->loc, inner: piece_loc, |
6570 | bitpos, piece_bitpos, bitsize, loc_note); |
6571 | last->next = loc; |
6572 | /* Ensure TEMP->LAST will point either to the new last but one |
6573 | element of the chain, or to the last element in it. */ |
6574 | if (last != temp->last) |
6575 | temp->last = last; |
6576 | } |
6577 | else if (unused) |
6578 | ggc_free (unused); |
6579 | } |
6580 | else |
6581 | { |
6582 | loc = ggc_cleared_alloc<var_loc_node> (); |
6583 | temp->first = loc; |
6584 | temp->last = loc; |
6585 | loc->loc = construct_piece_list (loc_note, bitpos, bitsize); |
6586 | } |
6587 | return loc; |
6588 | } |
6589 | |
6590 | /* Keep track of the number of spaces used to indent the |
6591 | output of the debugging routines that print the structure of |
6592 | the DIE internal representation. */ |
6593 | static int print_indent; |
6594 | |
6595 | /* Indent the line the number of spaces given by print_indent. */ |
6596 | |
6597 | static inline void |
6598 | print_spaces (FILE *outfile) |
6599 | { |
6600 | fprintf (stream: outfile, format: "%*s" , print_indent, "" ); |
6601 | } |
6602 | |
6603 | /* Print a type signature in hex. */ |
6604 | |
6605 | static inline void |
6606 | print_signature (FILE *outfile, char *sig) |
6607 | { |
6608 | int i; |
6609 | |
6610 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
6611 | fprintf (stream: outfile, format: "%02x" , sig[i] & 0xff); |
6612 | } |
6613 | |
6614 | static inline void |
6615 | print_discr_value (FILE *outfile, dw_discr_value *discr_value) |
6616 | { |
6617 | if (discr_value->pos) |
6618 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval); |
6619 | else |
6620 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval); |
6621 | } |
6622 | |
6623 | static void print_loc_descr (dw_loc_descr_ref, FILE *); |
6624 | |
6625 | /* Print the value associated to the VAL DWARF value node to OUTFILE. If |
6626 | RECURSE, output location descriptor operations. */ |
6627 | |
6628 | static void |
6629 | print_dw_val (dw_val_node *val, bool recurse, FILE *outfile) |
6630 | { |
6631 | switch (val->val_class) |
6632 | { |
6633 | case dw_val_class_addr: |
6634 | fprintf (stream: outfile, format: "address" ); |
6635 | break; |
6636 | case dw_val_class_offset: |
6637 | fprintf (stream: outfile, format: "offset" ); |
6638 | break; |
6639 | case dw_val_class_loc: |
6640 | fprintf (stream: outfile, format: "location descriptor" ); |
6641 | if (val->v.val_loc == NULL) |
6642 | fprintf (stream: outfile, format: " -> <null>\n" ); |
6643 | else if (recurse) |
6644 | { |
6645 | fprintf (stream: outfile, format: ":\n" ); |
6646 | print_indent += 4; |
6647 | print_loc_descr (val->v.val_loc, outfile); |
6648 | print_indent -= 4; |
6649 | } |
6650 | else |
6651 | { |
6652 | if (flag_dump_noaddr || flag_dump_unnumbered) |
6653 | fprintf (stream: outfile, format: " #\n" ); |
6654 | else |
6655 | fprintf (stream: outfile, format: " (%p)\n" , (void *) val->v.val_loc); |
6656 | } |
6657 | break; |
6658 | case dw_val_class_loc_list: |
6659 | fprintf (stream: outfile, format: "location list -> label:%s" , |
6660 | val->v.val_loc_list->ll_symbol); |
6661 | break; |
6662 | case dw_val_class_view_list: |
6663 | val = view_list_to_loc_list_val_node (val); |
6664 | fprintf (stream: outfile, format: "location list with views -> labels:%s and %s" , |
6665 | val->v.val_loc_list->ll_symbol, |
6666 | val->v.val_loc_list->vl_symbol); |
6667 | break; |
6668 | case dw_val_class_range_list: |
6669 | fprintf (stream: outfile, format: "range list" ); |
6670 | break; |
6671 | case dw_val_class_const: |
6672 | case dw_val_class_const_implicit: |
6673 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int); |
6674 | break; |
6675 | case dw_val_class_unsigned_const: |
6676 | case dw_val_class_unsigned_const_implicit: |
6677 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned); |
6678 | break; |
6679 | case dw_val_class_const_double: |
6680 | fprintf (stream: outfile, format: "constant (" HOST_WIDE_INT_PRINT_DEC"," \ |
6681 | HOST_WIDE_INT_PRINT_UNSIGNED")" , |
6682 | val->v.val_double.high, |
6683 | val->v.val_double.low); |
6684 | break; |
6685 | case dw_val_class_wide_int: |
6686 | { |
6687 | int i = val->v.val_wide->get_len (); |
6688 | fprintf (stream: outfile, format: "constant (" ); |
6689 | gcc_assert (i > 0); |
6690 | if (val->v.val_wide->elt (i: i - 1) == 0) |
6691 | fprintf (stream: outfile, format: "0x" ); |
6692 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_HEX, |
6693 | val->v.val_wide->elt (i: --i)); |
6694 | while (--i >= 0) |
6695 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_PADDED_HEX, |
6696 | val->v.val_wide->elt (i)); |
6697 | fprintf (stream: outfile, format: ")" ); |
6698 | break; |
6699 | } |
6700 | case dw_val_class_vec: |
6701 | fprintf (stream: outfile, format: "floating-point or vector constant" ); |
6702 | break; |
6703 | case dw_val_class_flag: |
6704 | fprintf (stream: outfile, format: "%u" , val->v.val_flag); |
6705 | break; |
6706 | case dw_val_class_die_ref: |
6707 | if (val->v.val_die_ref.die != NULL) |
6708 | { |
6709 | dw_die_ref die = val->v.val_die_ref.die; |
6710 | |
6711 | if (die->comdat_type_p) |
6712 | { |
6713 | fprintf (stream: outfile, format: "die -> signature: " ); |
6714 | print_signature (outfile, |
6715 | sig: die->die_id.die_type_node->signature); |
6716 | } |
6717 | else if (die->die_id.die_symbol) |
6718 | { |
6719 | fprintf (stream: outfile, format: "die -> label: %s" , die->die_id.die_symbol); |
6720 | if (die->with_offset) |
6721 | fprintf (stream: outfile, format: " + %ld" , die->die_offset); |
6722 | } |
6723 | else |
6724 | fprintf (stream: outfile, format: "die -> %ld" , die->die_offset); |
6725 | if (flag_dump_noaddr || flag_dump_unnumbered) |
6726 | fprintf (stream: outfile, format: " #" ); |
6727 | else |
6728 | fprintf (stream: outfile, format: " (%p)" , (void *) die); |
6729 | } |
6730 | else |
6731 | fprintf (stream: outfile, format: "die -> <null>" ); |
6732 | break; |
6733 | case dw_val_class_vms_delta: |
6734 | fprintf (stream: outfile, format: "delta: @slotcount(%s-%s)" , |
6735 | val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1); |
6736 | break; |
6737 | case dw_val_class_symview: |
6738 | fprintf (stream: outfile, format: "view: %s" , val->v.val_symbolic_view); |
6739 | break; |
6740 | case dw_val_class_lbl_id: |
6741 | case dw_val_class_lineptr: |
6742 | case dw_val_class_macptr: |
6743 | case dw_val_class_loclistsptr: |
6744 | case dw_val_class_high_pc: |
6745 | fprintf (stream: outfile, format: "label: %s" , val->v.val_lbl_id); |
6746 | break; |
6747 | case dw_val_class_str: |
6748 | if (val->v.val_str->str != NULL) |
6749 | fprintf (stream: outfile, format: "\"%s\"" , val->v.val_str->str); |
6750 | else |
6751 | fprintf (stream: outfile, format: "<null>" ); |
6752 | break; |
6753 | case dw_val_class_file: |
6754 | case dw_val_class_file_implicit: |
6755 | fprintf (stream: outfile, format: "\"%s\" (%d)" , val->v.val_file->filename, |
6756 | val->v.val_file->emitted_number); |
6757 | break; |
6758 | case dw_val_class_data8: |
6759 | { |
6760 | int i; |
6761 | |
6762 | for (i = 0; i < 8; i++) |
6763 | fprintf (stream: outfile, format: "%02x" , val->v.val_data8[i]); |
6764 | break; |
6765 | } |
6766 | case dw_val_class_discr_value: |
6767 | print_discr_value (outfile, discr_value: &val->v.val_discr_value); |
6768 | break; |
6769 | case dw_val_class_discr_list: |
6770 | for (dw_discr_list_ref node = val->v.val_discr_list; |
6771 | node != NULL; |
6772 | node = node->dw_discr_next) |
6773 | { |
6774 | if (node->dw_discr_range) |
6775 | { |
6776 | fprintf (stream: outfile, format: " .. " ); |
6777 | print_discr_value (outfile, discr_value: &node->dw_discr_lower_bound); |
6778 | print_discr_value (outfile, discr_value: &node->dw_discr_upper_bound); |
6779 | } |
6780 | else |
6781 | print_discr_value (outfile, discr_value: &node->dw_discr_lower_bound); |
6782 | |
6783 | if (node->dw_discr_next != NULL) |
6784 | fprintf (stream: outfile, format: " | " ); |
6785 | } |
6786 | default: |
6787 | break; |
6788 | } |
6789 | } |
6790 | |
6791 | /* Likewise, for a DIE attribute. */ |
6792 | |
6793 | static void |
6794 | print_attribute (dw_attr_node *a, bool recurse, FILE *outfile) |
6795 | { |
6796 | print_dw_val (val: &a->dw_attr_val, recurse, outfile); |
6797 | } |
6798 | |
6799 | |
6800 | /* Print the list of operands in the LOC location description to OUTFILE. This |
6801 | routine is a debugging aid only. */ |
6802 | |
6803 | static void |
6804 | print_loc_descr (dw_loc_descr_ref loc, FILE *outfile) |
6805 | { |
6806 | dw_loc_descr_ref l = loc; |
6807 | |
6808 | if (loc == NULL) |
6809 | { |
6810 | print_spaces (outfile); |
6811 | fprintf (stream: outfile, format: "<null>\n" ); |
6812 | return; |
6813 | } |
6814 | |
6815 | for (l = loc; l != NULL; l = l->dw_loc_next) |
6816 | { |
6817 | print_spaces (outfile); |
6818 | if (flag_dump_noaddr || flag_dump_unnumbered) |
6819 | fprintf (stream: outfile, format: "#" ); |
6820 | else |
6821 | fprintf (stream: outfile, format: "(%p)" , (void *) l); |
6822 | fprintf (stream: outfile, format: " %s" , |
6823 | dwarf_stack_op_name (op: l->dw_loc_opc)); |
6824 | if (l->dw_loc_oprnd1.val_class != dw_val_class_none) |
6825 | { |
6826 | fprintf (stream: outfile, format: " " ); |
6827 | print_dw_val (val: &l->dw_loc_oprnd1, recurse: false, outfile); |
6828 | } |
6829 | if (l->dw_loc_oprnd2.val_class != dw_val_class_none) |
6830 | { |
6831 | fprintf (stream: outfile, format: ", " ); |
6832 | print_dw_val (val: &l->dw_loc_oprnd2, recurse: false, outfile); |
6833 | } |
6834 | fprintf (stream: outfile, format: "\n" ); |
6835 | } |
6836 | } |
6837 | |
6838 | /* Print the information associated with a given DIE, and its children. |
6839 | This routine is a debugging aid only. */ |
6840 | |
6841 | static void |
6842 | print_die (dw_die_ref die, FILE *outfile) |
6843 | { |
6844 | dw_attr_node *a; |
6845 | dw_die_ref c; |
6846 | unsigned ix; |
6847 | |
6848 | print_spaces (outfile); |
6849 | fprintf (stream: outfile, format: "DIE %4ld: %s " , |
6850 | die->die_offset, dwarf_tag_name (tag: die->die_tag)); |
6851 | if (flag_dump_noaddr || flag_dump_unnumbered) |
6852 | fprintf (stream: outfile, format: "#\n" ); |
6853 | else |
6854 | fprintf (stream: outfile, format: "(%p)\n" , (void*) die); |
6855 | print_spaces (outfile); |
6856 | fprintf (stream: outfile, format: " abbrev id: %lu" , die->die_abbrev); |
6857 | fprintf (stream: outfile, format: " offset: %ld" , die->die_offset); |
6858 | fprintf (stream: outfile, format: " mark: %d\n" , die->die_mark); |
6859 | |
6860 | if (die->comdat_type_p) |
6861 | { |
6862 | print_spaces (outfile); |
6863 | fprintf (stream: outfile, format: " signature: " ); |
6864 | print_signature (outfile, sig: die->die_id.die_type_node->signature); |
6865 | fprintf (stream: outfile, format: "\n" ); |
6866 | } |
6867 | |
6868 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
6869 | { |
6870 | print_spaces (outfile); |
6871 | fprintf (stream: outfile, format: " %s: " , dwarf_attr_name (attr: a->dw_attr)); |
6872 | |
6873 | print_attribute (a, recurse: true, outfile); |
6874 | fprintf (stream: outfile, format: "\n" ); |
6875 | } |
6876 | |
6877 | if (die->die_child != NULL) |
6878 | { |
6879 | print_indent += 4; |
6880 | FOR_EACH_CHILD (die, c, print_die (c, outfile)); |
6881 | print_indent -= 4; |
6882 | } |
6883 | if (print_indent == 0) |
6884 | fprintf (stream: outfile, format: "\n" ); |
6885 | } |
6886 | |
6887 | /* Print the list of operations in the LOC location description. */ |
6888 | |
6889 | DEBUG_FUNCTION void |
6890 | debug_dwarf_loc_descr (dw_loc_descr_ref loc) |
6891 | { |
6892 | print_loc_descr (loc, stderr); |
6893 | } |
6894 | |
6895 | /* Print the information collected for a given DIE. */ |
6896 | |
6897 | DEBUG_FUNCTION void |
6898 | debug_dwarf_die (dw_die_ref die) |
6899 | { |
6900 | print_die (die, stderr); |
6901 | } |
6902 | |
6903 | DEBUG_FUNCTION void |
6904 | debug (die_struct &ref) |
6905 | { |
6906 | print_die (die: &ref, stderr); |
6907 | } |
6908 | |
6909 | DEBUG_FUNCTION void |
6910 | debug (die_struct *ptr) |
6911 | { |
6912 | if (ptr) |
6913 | debug (ref&: *ptr); |
6914 | else |
6915 | fprintf (stderr, format: "<nil>\n" ); |
6916 | } |
6917 | |
6918 | |
6919 | /* Print all DWARF information collected for the compilation unit. |
6920 | This routine is a debugging aid only. */ |
6921 | |
6922 | DEBUG_FUNCTION void |
6923 | debug_dwarf (void) |
6924 | { |
6925 | print_indent = 0; |
6926 | print_die (die: comp_unit_die (), stderr); |
6927 | } |
6928 | |
6929 | /* Verify the DIE tree structure. */ |
6930 | |
6931 | DEBUG_FUNCTION void |
6932 | verify_die (dw_die_ref die) |
6933 | { |
6934 | gcc_assert (!die->die_mark); |
6935 | if (die->die_parent == NULL |
6936 | && die->die_sib == NULL) |
6937 | return; |
6938 | /* Verify the die_sib list is cyclic. */ |
6939 | dw_die_ref x = die; |
6940 | do |
6941 | { |
6942 | x->die_mark = 1; |
6943 | x = x->die_sib; |
6944 | } |
6945 | while (x && !x->die_mark); |
6946 | gcc_assert (x == die); |
6947 | x = die; |
6948 | do |
6949 | { |
6950 | /* Verify all dies have the same parent. */ |
6951 | gcc_assert (x->die_parent == die->die_parent); |
6952 | if (x->die_child) |
6953 | { |
6954 | /* Verify the child has the proper parent and recurse. */ |
6955 | gcc_assert (x->die_child->die_parent == x); |
6956 | verify_die (die: x->die_child); |
6957 | } |
6958 | x->die_mark = 0; |
6959 | x = x->die_sib; |
6960 | } |
6961 | while (x && x->die_mark); |
6962 | } |
6963 | |
6964 | /* Sanity checks on DIEs. */ |
6965 | |
6966 | static void |
6967 | check_die (dw_die_ref die) |
6968 | { |
6969 | unsigned ix; |
6970 | dw_attr_node *a; |
6971 | bool inline_found = false; |
6972 | int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0; |
6973 | int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0; |
6974 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
6975 | { |
6976 | switch (a->dw_attr) |
6977 | { |
6978 | case DW_AT_inline: |
6979 | if (a->dw_attr_val.v.val_unsigned) |
6980 | inline_found = true; |
6981 | break; |
6982 | case DW_AT_location: |
6983 | ++n_location; |
6984 | break; |
6985 | case DW_AT_low_pc: |
6986 | ++n_low_pc; |
6987 | break; |
6988 | case DW_AT_high_pc: |
6989 | ++n_high_pc; |
6990 | break; |
6991 | case DW_AT_artificial: |
6992 | ++n_artificial; |
6993 | break; |
6994 | case DW_AT_decl_column: |
6995 | ++n_decl_column; |
6996 | break; |
6997 | case DW_AT_decl_line: |
6998 | ++n_decl_line; |
6999 | break; |
7000 | case DW_AT_decl_file: |
7001 | ++n_decl_file; |
7002 | break; |
7003 | default: |
7004 | break; |
7005 | } |
7006 | } |
7007 | if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1 |
7008 | || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1) |
7009 | { |
7010 | fprintf (stderr, format: "Duplicate attributes in DIE:\n" ); |
7011 | debug_dwarf_die (die); |
7012 | gcc_unreachable (); |
7013 | } |
7014 | if (inline_found) |
7015 | { |
7016 | /* A debugging information entry that is a member of an abstract |
7017 | instance tree [that has DW_AT_inline] should not contain any |
7018 | attributes which describe aspects of the subroutine which vary |
7019 | between distinct inlined expansions or distinct out-of-line |
7020 | expansions. */ |
7021 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
7022 | gcc_assert (a->dw_attr != DW_AT_low_pc |
7023 | && a->dw_attr != DW_AT_high_pc |
7024 | && a->dw_attr != DW_AT_location |
7025 | && a->dw_attr != DW_AT_frame_base |
7026 | && a->dw_attr != DW_AT_call_all_calls |
7027 | && a->dw_attr != DW_AT_GNU_all_call_sites); |
7028 | } |
7029 | } |
7030 | |
7031 | #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx) |
7032 | #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx) |
7033 | #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx) |
7034 | |
7035 | /* Calculate the checksum of a location expression. */ |
7036 | |
7037 | static inline void |
7038 | loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx) |
7039 | { |
7040 | int tem; |
7041 | inchash::hash hstate; |
7042 | hashval_t hash; |
7043 | |
7044 | tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc); |
7045 | CHECKSUM (tem); |
7046 | hash_loc_operands (loc, hstate); |
7047 | hash = hstate.end(); |
7048 | CHECKSUM (hash); |
7049 | } |
7050 | |
7051 | /* Calculate the checksum of an attribute. */ |
7052 | |
7053 | static void |
7054 | attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark) |
7055 | { |
7056 | dw_loc_descr_ref loc; |
7057 | rtx r; |
7058 | |
7059 | CHECKSUM (at->dw_attr); |
7060 | |
7061 | /* We don't care that this was compiled with a different compiler |
7062 | snapshot; if the output is the same, that's what matters. */ |
7063 | if (at->dw_attr == DW_AT_producer) |
7064 | return; |
7065 | |
7066 | switch (AT_class (a: at)) |
7067 | { |
7068 | case dw_val_class_const: |
7069 | case dw_val_class_const_implicit: |
7070 | CHECKSUM (at->dw_attr_val.v.val_int); |
7071 | break; |
7072 | case dw_val_class_unsigned_const: |
7073 | case dw_val_class_unsigned_const_implicit: |
7074 | CHECKSUM (at->dw_attr_val.v.val_unsigned); |
7075 | break; |
7076 | case dw_val_class_const_double: |
7077 | CHECKSUM (at->dw_attr_val.v.val_double); |
7078 | break; |
7079 | case dw_val_class_wide_int: |
7080 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (), |
7081 | get_full_len (*at->dw_attr_val.v.val_wide) |
7082 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
7083 | break; |
7084 | case dw_val_class_vec: |
7085 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array, |
7086 | (at->dw_attr_val.v.val_vec.length |
7087 | * at->dw_attr_val.v.val_vec.elt_size)); |
7088 | break; |
7089 | case dw_val_class_flag: |
7090 | CHECKSUM (at->dw_attr_val.v.val_flag); |
7091 | break; |
7092 | case dw_val_class_str: |
7093 | CHECKSUM_STRING (AT_string (at)); |
7094 | break; |
7095 | |
7096 | case dw_val_class_addr: |
7097 | r = AT_addr (a: at); |
7098 | gcc_assert (GET_CODE (r) == SYMBOL_REF); |
7099 | CHECKSUM_STRING (XSTR (r, 0)); |
7100 | break; |
7101 | |
7102 | case dw_val_class_offset: |
7103 | CHECKSUM (at->dw_attr_val.v.val_offset); |
7104 | break; |
7105 | |
7106 | case dw_val_class_loc: |
7107 | for (loc = AT_loc (a: at); loc; loc = loc->dw_loc_next) |
7108 | loc_checksum (loc, ctx); |
7109 | break; |
7110 | |
7111 | case dw_val_class_die_ref: |
7112 | die_checksum (AT_ref (a: at), ctx, mark); |
7113 | break; |
7114 | |
7115 | case dw_val_class_fde_ref: |
7116 | case dw_val_class_vms_delta: |
7117 | case dw_val_class_symview: |
7118 | case dw_val_class_lbl_id: |
7119 | case dw_val_class_lineptr: |
7120 | case dw_val_class_macptr: |
7121 | case dw_val_class_loclistsptr: |
7122 | case dw_val_class_high_pc: |
7123 | break; |
7124 | |
7125 | case dw_val_class_file: |
7126 | case dw_val_class_file_implicit: |
7127 | CHECKSUM_STRING (AT_file (at)->filename); |
7128 | break; |
7129 | |
7130 | case dw_val_class_data8: |
7131 | CHECKSUM (at->dw_attr_val.v.val_data8); |
7132 | break; |
7133 | |
7134 | default: |
7135 | break; |
7136 | } |
7137 | } |
7138 | |
7139 | /* Calculate the checksum of a DIE. */ |
7140 | |
7141 | static void |
7142 | die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark) |
7143 | { |
7144 | dw_die_ref c; |
7145 | dw_attr_node *a; |
7146 | unsigned ix; |
7147 | |
7148 | /* To avoid infinite recursion. */ |
7149 | if (die->die_mark) |
7150 | { |
7151 | CHECKSUM (die->die_mark); |
7152 | return; |
7153 | } |
7154 | die->die_mark = ++(*mark); |
7155 | |
7156 | CHECKSUM (die->die_tag); |
7157 | |
7158 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
7159 | attr_checksum (at: a, ctx, mark); |
7160 | |
7161 | FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark)); |
7162 | } |
7163 | |
7164 | #undef CHECKSUM |
7165 | #undef CHECKSUM_BLOCK |
7166 | #undef CHECKSUM_STRING |
7167 | |
7168 | /* For DWARF-4 types, include the trailing NULL when checksumming strings. */ |
7169 | #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx) |
7170 | #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx) |
7171 | #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx) |
7172 | #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx) |
7173 | #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx) |
7174 | #define CHECKSUM_ATTR(FOO) \ |
7175 | if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark) |
7176 | |
7177 | /* Calculate the checksum of a number in signed LEB128 format. */ |
7178 | |
7179 | static void |
7180 | checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx) |
7181 | { |
7182 | unsigned char byte; |
7183 | bool more; |
7184 | |
7185 | while (1) |
7186 | { |
7187 | byte = (value & 0x7f); |
7188 | value >>= 7; |
7189 | more = !((value == 0 && (byte & 0x40) == 0) |
7190 | || (value == -1 && (byte & 0x40) != 0)); |
7191 | if (more) |
7192 | byte |= 0x80; |
7193 | CHECKSUM (byte); |
7194 | if (!more) |
7195 | break; |
7196 | } |
7197 | } |
7198 | |
7199 | /* Calculate the checksum of a number in unsigned LEB128 format. */ |
7200 | |
7201 | static void |
7202 | checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx) |
7203 | { |
7204 | while (1) |
7205 | { |
7206 | unsigned char byte = (value & 0x7f); |
7207 | value >>= 7; |
7208 | if (value != 0) |
7209 | /* More bytes to follow. */ |
7210 | byte |= 0x80; |
7211 | CHECKSUM (byte); |
7212 | if (value == 0) |
7213 | break; |
7214 | } |
7215 | } |
7216 | |
7217 | /* Checksum the context of the DIE. This adds the names of any |
7218 | surrounding namespaces or structures to the checksum. */ |
7219 | |
7220 | static void |
7221 | checksum_die_context (dw_die_ref die, struct md5_ctx *ctx) |
7222 | { |
7223 | const char *name; |
7224 | dw_die_ref spec; |
7225 | int tag = die->die_tag; |
7226 | |
7227 | if (tag != DW_TAG_namespace |
7228 | && tag != DW_TAG_structure_type |
7229 | && tag != DW_TAG_class_type) |
7230 | return; |
7231 | |
7232 | name = get_AT_string (die, attr_kind: DW_AT_name); |
7233 | |
7234 | spec = get_AT_ref (die, attr_kind: DW_AT_specification); |
7235 | if (spec != NULL) |
7236 | die = spec; |
7237 | |
7238 | if (die->die_parent != NULL) |
7239 | checksum_die_context (die: die->die_parent, ctx); |
7240 | |
7241 | CHECKSUM_ULEB128 ('C'); |
7242 | CHECKSUM_ULEB128 (tag); |
7243 | if (name != NULL) |
7244 | CHECKSUM_STRING (name); |
7245 | } |
7246 | |
7247 | /* Calculate the checksum of a location expression. */ |
7248 | |
7249 | static inline void |
7250 | loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx) |
7251 | { |
7252 | /* Special case for lone DW_OP_plus_uconst: checksum as if the location |
7253 | were emitted as a DW_FORM_sdata instead of a location expression. */ |
7254 | if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL) |
7255 | { |
7256 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
7257 | CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned); |
7258 | return; |
7259 | } |
7260 | |
7261 | /* Otherwise, just checksum the raw location expression. */ |
7262 | while (loc != NULL) |
7263 | { |
7264 | inchash::hash hstate; |
7265 | hashval_t hash; |
7266 | |
7267 | CHECKSUM_ULEB128 (loc->dtprel); |
7268 | CHECKSUM_ULEB128 (loc->dw_loc_opc); |
7269 | hash_loc_operands (loc, hstate); |
7270 | hash = hstate.end (); |
7271 | CHECKSUM (hash); |
7272 | loc = loc->dw_loc_next; |
7273 | } |
7274 | } |
7275 | |
7276 | /* Calculate the checksum of an attribute. */ |
7277 | |
7278 | static void |
7279 | attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at, |
7280 | struct md5_ctx *ctx, int *mark) |
7281 | { |
7282 | dw_loc_descr_ref loc; |
7283 | rtx r; |
7284 | |
7285 | if (AT_class (a: at) == dw_val_class_die_ref) |
7286 | { |
7287 | dw_die_ref target_die = AT_ref (a: at); |
7288 | |
7289 | /* For pointer and reference types, we checksum only the (qualified) |
7290 | name of the target type (if there is a name). For friend entries, |
7291 | we checksum only the (qualified) name of the target type or function. |
7292 | This allows the checksum to remain the same whether the target type |
7293 | is complete or not. */ |
7294 | if ((at->dw_attr == DW_AT_type |
7295 | && (tag == DW_TAG_pointer_type |
7296 | || tag == DW_TAG_reference_type |
7297 | || tag == DW_TAG_rvalue_reference_type |
7298 | || tag == DW_TAG_ptr_to_member_type)) |
7299 | || (at->dw_attr == DW_AT_friend |
7300 | && tag == DW_TAG_friend)) |
7301 | { |
7302 | dw_attr_node *name_attr = get_AT (die: target_die, attr_kind: DW_AT_name); |
7303 | |
7304 | if (name_attr != NULL) |
7305 | { |
7306 | dw_die_ref decl = get_AT_ref (die: target_die, attr_kind: DW_AT_specification); |
7307 | |
7308 | if (decl == NULL) |
7309 | decl = target_die; |
7310 | CHECKSUM_ULEB128 ('N'); |
7311 | CHECKSUM_ULEB128 (at->dw_attr); |
7312 | if (decl->die_parent != NULL) |
7313 | checksum_die_context (die: decl->die_parent, ctx); |
7314 | CHECKSUM_ULEB128 ('E'); |
7315 | CHECKSUM_STRING (AT_string (name_attr)); |
7316 | return; |
7317 | } |
7318 | } |
7319 | |
7320 | /* For all other references to another DIE, we check to see if the |
7321 | target DIE has already been visited. If it has, we emit a |
7322 | backward reference; if not, we descend recursively. */ |
7323 | if (target_die->die_mark > 0) |
7324 | { |
7325 | CHECKSUM_ULEB128 ('R'); |
7326 | CHECKSUM_ULEB128 (at->dw_attr); |
7327 | CHECKSUM_ULEB128 (target_die->die_mark); |
7328 | } |
7329 | else |
7330 | { |
7331 | dw_die_ref decl = get_AT_ref (die: target_die, attr_kind: DW_AT_specification); |
7332 | |
7333 | if (decl == NULL) |
7334 | decl = target_die; |
7335 | target_die->die_mark = ++(*mark); |
7336 | CHECKSUM_ULEB128 ('T'); |
7337 | CHECKSUM_ULEB128 (at->dw_attr); |
7338 | if (decl->die_parent != NULL) |
7339 | checksum_die_context (die: decl->die_parent, ctx); |
7340 | die_checksum_ordered (target_die, ctx, mark); |
7341 | } |
7342 | return; |
7343 | } |
7344 | |
7345 | CHECKSUM_ULEB128 ('A'); |
7346 | CHECKSUM_ULEB128 (at->dw_attr); |
7347 | |
7348 | switch (AT_class (a: at)) |
7349 | { |
7350 | case dw_val_class_const: |
7351 | case dw_val_class_const_implicit: |
7352 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
7353 | CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int); |
7354 | break; |
7355 | |
7356 | case dw_val_class_unsigned_const: |
7357 | case dw_val_class_unsigned_const_implicit: |
7358 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
7359 | CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned); |
7360 | break; |
7361 | |
7362 | case dw_val_class_const_double: |
7363 | CHECKSUM_ULEB128 (DW_FORM_block); |
7364 | CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double)); |
7365 | CHECKSUM (at->dw_attr_val.v.val_double); |
7366 | break; |
7367 | |
7368 | case dw_val_class_wide_int: |
7369 | CHECKSUM_ULEB128 (DW_FORM_block); |
7370 | CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide) |
7371 | * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT); |
7372 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (), |
7373 | get_full_len (*at->dw_attr_val.v.val_wide) |
7374 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
7375 | break; |
7376 | |
7377 | case dw_val_class_vec: |
7378 | CHECKSUM_ULEB128 (DW_FORM_block); |
7379 | CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length |
7380 | * at->dw_attr_val.v.val_vec.elt_size); |
7381 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array, |
7382 | (at->dw_attr_val.v.val_vec.length |
7383 | * at->dw_attr_val.v.val_vec.elt_size)); |
7384 | break; |
7385 | |
7386 | case dw_val_class_flag: |
7387 | CHECKSUM_ULEB128 (DW_FORM_flag); |
7388 | CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0); |
7389 | break; |
7390 | |
7391 | case dw_val_class_str: |
7392 | CHECKSUM_ULEB128 (DW_FORM_string); |
7393 | CHECKSUM_STRING (AT_string (at)); |
7394 | break; |
7395 | |
7396 | case dw_val_class_addr: |
7397 | r = AT_addr (a: at); |
7398 | gcc_assert (GET_CODE (r) == SYMBOL_REF); |
7399 | CHECKSUM_ULEB128 (DW_FORM_string); |
7400 | CHECKSUM_STRING (XSTR (r, 0)); |
7401 | break; |
7402 | |
7403 | case dw_val_class_offset: |
7404 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
7405 | CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset); |
7406 | break; |
7407 | |
7408 | case dw_val_class_loc: |
7409 | for (loc = AT_loc (a: at); loc; loc = loc->dw_loc_next) |
7410 | loc_checksum_ordered (loc, ctx); |
7411 | break; |
7412 | |
7413 | case dw_val_class_fde_ref: |
7414 | case dw_val_class_symview: |
7415 | case dw_val_class_lbl_id: |
7416 | case dw_val_class_lineptr: |
7417 | case dw_val_class_macptr: |
7418 | case dw_val_class_loclistsptr: |
7419 | case dw_val_class_high_pc: |
7420 | break; |
7421 | |
7422 | case dw_val_class_file: |
7423 | case dw_val_class_file_implicit: |
7424 | CHECKSUM_ULEB128 (DW_FORM_string); |
7425 | CHECKSUM_STRING (AT_file (at)->filename); |
7426 | break; |
7427 | |
7428 | case dw_val_class_data8: |
7429 | CHECKSUM (at->dw_attr_val.v.val_data8); |
7430 | break; |
7431 | |
7432 | default: |
7433 | break; |
7434 | } |
7435 | } |
7436 | |
7437 | struct checksum_attributes |
7438 | { |
7439 | dw_attr_node *at_name; |
7440 | dw_attr_node *at_type; |
7441 | dw_attr_node *at_friend; |
7442 | dw_attr_node *at_accessibility; |
7443 | dw_attr_node *at_address_class; |
7444 | dw_attr_node *at_alignment; |
7445 | dw_attr_node *at_allocated; |
7446 | dw_attr_node *at_artificial; |
7447 | dw_attr_node *at_associated; |
7448 | dw_attr_node *at_binary_scale; |
7449 | dw_attr_node *at_bit_offset; |
7450 | dw_attr_node *at_bit_size; |
7451 | dw_attr_node *at_bit_stride; |
7452 | dw_attr_node *at_byte_size; |
7453 | dw_attr_node *at_byte_stride; |
7454 | dw_attr_node *at_const_value; |
7455 | dw_attr_node *at_containing_type; |
7456 | dw_attr_node *at_count; |
7457 | dw_attr_node *at_data_location; |
7458 | dw_attr_node *at_data_member_location; |
7459 | dw_attr_node *at_decimal_scale; |
7460 | dw_attr_node *at_decimal_sign; |
7461 | dw_attr_node *at_default_value; |
7462 | dw_attr_node *at_digit_count; |
7463 | dw_attr_node *at_discr; |
7464 | dw_attr_node *at_discr_list; |
7465 | dw_attr_node *at_discr_value; |
7466 | dw_attr_node *at_encoding; |
7467 | dw_attr_node *at_endianity; |
7468 | dw_attr_node *at_explicit; |
7469 | dw_attr_node *at_is_optional; |
7470 | dw_attr_node *at_location; |
7471 | dw_attr_node *at_lower_bound; |
7472 | dw_attr_node *at_mutable; |
7473 | dw_attr_node *at_ordering; |
7474 | dw_attr_node *at_picture_string; |
7475 | dw_attr_node *at_prototyped; |
7476 | dw_attr_node *at_small; |
7477 | dw_attr_node *at_segment; |
7478 | dw_attr_node *at_string_length; |
7479 | dw_attr_node *at_string_length_bit_size; |
7480 | dw_attr_node *at_string_length_byte_size; |
7481 | dw_attr_node *at_threads_scaled; |
7482 | dw_attr_node *at_upper_bound; |
7483 | dw_attr_node *at_use_location; |
7484 | dw_attr_node *at_use_UTF8; |
7485 | dw_attr_node *at_variable_parameter; |
7486 | dw_attr_node *at_virtuality; |
7487 | dw_attr_node *at_visibility; |
7488 | dw_attr_node *at_vtable_elem_location; |
7489 | }; |
7490 | |
7491 | /* Collect the attributes that we will want to use for the checksum. */ |
7492 | |
7493 | static void |
7494 | collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die) |
7495 | { |
7496 | dw_attr_node *a; |
7497 | unsigned ix; |
7498 | |
7499 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
7500 | { |
7501 | switch (a->dw_attr) |
7502 | { |
7503 | case DW_AT_name: |
7504 | attrs->at_name = a; |
7505 | break; |
7506 | case DW_AT_type: |
7507 | attrs->at_type = a; |
7508 | break; |
7509 | case DW_AT_friend: |
7510 | attrs->at_friend = a; |
7511 | break; |
7512 | case DW_AT_accessibility: |
7513 | attrs->at_accessibility = a; |
7514 | break; |
7515 | case DW_AT_address_class: |
7516 | attrs->at_address_class = a; |
7517 | break; |
7518 | case DW_AT_alignment: |
7519 | attrs->at_alignment = a; |
7520 | break; |
7521 | case DW_AT_allocated: |
7522 | attrs->at_allocated = a; |
7523 | break; |
7524 | case DW_AT_artificial: |
7525 | attrs->at_artificial = a; |
7526 | break; |
7527 | case DW_AT_associated: |
7528 | attrs->at_associated = a; |
7529 | break; |
7530 | case DW_AT_binary_scale: |
7531 | attrs->at_binary_scale = a; |
7532 | break; |
7533 | case DW_AT_bit_offset: |
7534 | attrs->at_bit_offset = a; |
7535 | break; |
7536 | case DW_AT_bit_size: |
7537 | attrs->at_bit_size = a; |
7538 | break; |
7539 | case DW_AT_bit_stride: |
7540 | attrs->at_bit_stride = a; |
7541 | break; |
7542 | case DW_AT_byte_size: |
7543 | attrs->at_byte_size = a; |
7544 | break; |
7545 | case DW_AT_byte_stride: |
7546 | attrs->at_byte_stride = a; |
7547 | break; |
7548 | case DW_AT_const_value: |
7549 | attrs->at_const_value = a; |
7550 | break; |
7551 | case DW_AT_containing_type: |
7552 | attrs->at_containing_type = a; |
7553 | break; |
7554 | case DW_AT_count: |
7555 | attrs->at_count = a; |
7556 | break; |
7557 | case DW_AT_data_location: |
7558 | attrs->at_data_location = a; |
7559 | break; |
7560 | case DW_AT_data_member_location: |
7561 | attrs->at_data_member_location = a; |
7562 | break; |
7563 | case DW_AT_decimal_scale: |
7564 | attrs->at_decimal_scale = a; |
7565 | break; |
7566 | case DW_AT_decimal_sign: |
7567 | attrs->at_decimal_sign = a; |
7568 | break; |
7569 | case DW_AT_default_value: |
7570 | attrs->at_default_value = a; |
7571 | break; |
7572 | case DW_AT_digit_count: |
7573 | attrs->at_digit_count = a; |
7574 | break; |
7575 | case DW_AT_discr: |
7576 | attrs->at_discr = a; |
7577 | break; |
7578 | case DW_AT_discr_list: |
7579 | attrs->at_discr_list = a; |
7580 | break; |
7581 | case DW_AT_discr_value: |
7582 | attrs->at_discr_value = a; |
7583 | break; |
7584 | case DW_AT_encoding: |
7585 | attrs->at_encoding = a; |
7586 | break; |
7587 | case DW_AT_endianity: |
7588 | attrs->at_endianity = a; |
7589 | break; |
7590 | case DW_AT_explicit: |
7591 | attrs->at_explicit = a; |
7592 | break; |
7593 | case DW_AT_is_optional: |
7594 | attrs->at_is_optional = a; |
7595 | break; |
7596 | case DW_AT_location: |
7597 | attrs->at_location = a; |
7598 | break; |
7599 | case DW_AT_lower_bound: |
7600 | attrs->at_lower_bound = a; |
7601 | break; |
7602 | case DW_AT_mutable: |
7603 | attrs->at_mutable = a; |
7604 | break; |
7605 | case DW_AT_ordering: |
7606 | attrs->at_ordering = a; |
7607 | break; |
7608 | case DW_AT_picture_string: |
7609 | attrs->at_picture_string = a; |
7610 | break; |
7611 | case DW_AT_prototyped: |
7612 | attrs->at_prototyped = a; |
7613 | break; |
7614 | case DW_AT_small: |
7615 | attrs->at_small = a; |
7616 | break; |
7617 | case DW_AT_segment: |
7618 | attrs->at_segment = a; |
7619 | break; |
7620 | case DW_AT_string_length: |
7621 | attrs->at_string_length = a; |
7622 | break; |
7623 | case DW_AT_string_length_bit_size: |
7624 | attrs->at_string_length_bit_size = a; |
7625 | break; |
7626 | case DW_AT_string_length_byte_size: |
7627 | attrs->at_string_length_byte_size = a; |
7628 | break; |
7629 | case DW_AT_threads_scaled: |
7630 | attrs->at_threads_scaled = a; |
7631 | break; |
7632 | case DW_AT_upper_bound: |
7633 | attrs->at_upper_bound = a; |
7634 | break; |
7635 | case DW_AT_use_location: |
7636 | attrs->at_use_location = a; |
7637 | break; |
7638 | case DW_AT_use_UTF8: |
7639 | attrs->at_use_UTF8 = a; |
7640 | break; |
7641 | case DW_AT_variable_parameter: |
7642 | attrs->at_variable_parameter = a; |
7643 | break; |
7644 | case DW_AT_virtuality: |
7645 | attrs->at_virtuality = a; |
7646 | break; |
7647 | case DW_AT_visibility: |
7648 | attrs->at_visibility = a; |
7649 | break; |
7650 | case DW_AT_vtable_elem_location: |
7651 | attrs->at_vtable_elem_location = a; |
7652 | break; |
7653 | default: |
7654 | break; |
7655 | } |
7656 | } |
7657 | } |
7658 | |
7659 | /* Calculate the checksum of a DIE, using an ordered subset of attributes. */ |
7660 | |
7661 | static void |
7662 | die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark) |
7663 | { |
7664 | dw_die_ref c; |
7665 | dw_die_ref decl; |
7666 | struct checksum_attributes attrs; |
7667 | |
7668 | CHECKSUM_ULEB128 ('D'); |
7669 | CHECKSUM_ULEB128 (die->die_tag); |
7670 | |
7671 | memset (s: &attrs, c: 0, n: sizeof (attrs)); |
7672 | |
7673 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
7674 | if (decl != NULL) |
7675 | collect_checksum_attributes (attrs: &attrs, die: decl); |
7676 | collect_checksum_attributes (attrs: &attrs, die); |
7677 | |
7678 | CHECKSUM_ATTR (attrs.at_name); |
7679 | CHECKSUM_ATTR (attrs.at_accessibility); |
7680 | CHECKSUM_ATTR (attrs.at_address_class); |
7681 | CHECKSUM_ATTR (attrs.at_allocated); |
7682 | CHECKSUM_ATTR (attrs.at_artificial); |
7683 | CHECKSUM_ATTR (attrs.at_associated); |
7684 | CHECKSUM_ATTR (attrs.at_binary_scale); |
7685 | CHECKSUM_ATTR (attrs.at_bit_offset); |
7686 | CHECKSUM_ATTR (attrs.at_bit_size); |
7687 | CHECKSUM_ATTR (attrs.at_bit_stride); |
7688 | CHECKSUM_ATTR (attrs.at_byte_size); |
7689 | CHECKSUM_ATTR (attrs.at_byte_stride); |
7690 | CHECKSUM_ATTR (attrs.at_const_value); |
7691 | CHECKSUM_ATTR (attrs.at_containing_type); |
7692 | CHECKSUM_ATTR (attrs.at_count); |
7693 | CHECKSUM_ATTR (attrs.at_data_location); |
7694 | CHECKSUM_ATTR (attrs.at_data_member_location); |
7695 | CHECKSUM_ATTR (attrs.at_decimal_scale); |
7696 | CHECKSUM_ATTR (attrs.at_decimal_sign); |
7697 | CHECKSUM_ATTR (attrs.at_default_value); |
7698 | CHECKSUM_ATTR (attrs.at_digit_count); |
7699 | CHECKSUM_ATTR (attrs.at_discr); |
7700 | CHECKSUM_ATTR (attrs.at_discr_list); |
7701 | CHECKSUM_ATTR (attrs.at_discr_value); |
7702 | CHECKSUM_ATTR (attrs.at_encoding); |
7703 | CHECKSUM_ATTR (attrs.at_endianity); |
7704 | CHECKSUM_ATTR (attrs.at_explicit); |
7705 | CHECKSUM_ATTR (attrs.at_is_optional); |
7706 | CHECKSUM_ATTR (attrs.at_location); |
7707 | CHECKSUM_ATTR (attrs.at_lower_bound); |
7708 | CHECKSUM_ATTR (attrs.at_mutable); |
7709 | CHECKSUM_ATTR (attrs.at_ordering); |
7710 | CHECKSUM_ATTR (attrs.at_picture_string); |
7711 | CHECKSUM_ATTR (attrs.at_prototyped); |
7712 | CHECKSUM_ATTR (attrs.at_small); |
7713 | CHECKSUM_ATTR (attrs.at_segment); |
7714 | CHECKSUM_ATTR (attrs.at_string_length); |
7715 | CHECKSUM_ATTR (attrs.at_string_length_bit_size); |
7716 | CHECKSUM_ATTR (attrs.at_string_length_byte_size); |
7717 | CHECKSUM_ATTR (attrs.at_threads_scaled); |
7718 | CHECKSUM_ATTR (attrs.at_upper_bound); |
7719 | CHECKSUM_ATTR (attrs.at_use_location); |
7720 | CHECKSUM_ATTR (attrs.at_use_UTF8); |
7721 | CHECKSUM_ATTR (attrs.at_variable_parameter); |
7722 | CHECKSUM_ATTR (attrs.at_virtuality); |
7723 | CHECKSUM_ATTR (attrs.at_visibility); |
7724 | CHECKSUM_ATTR (attrs.at_vtable_elem_location); |
7725 | CHECKSUM_ATTR (attrs.at_type); |
7726 | CHECKSUM_ATTR (attrs.at_friend); |
7727 | CHECKSUM_ATTR (attrs.at_alignment); |
7728 | |
7729 | /* Checksum the child DIEs. */ |
7730 | c = die->die_child; |
7731 | if (c) do { |
7732 | dw_attr_node *name_attr; |
7733 | |
7734 | c = c->die_sib; |
7735 | name_attr = get_AT (die: c, attr_kind: DW_AT_name); |
7736 | if (is_template_instantiation (c)) |
7737 | { |
7738 | /* Ignore instantiations of member type and function templates. */ |
7739 | } |
7740 | else if (name_attr != NULL |
7741 | && (is_type_die (c) || c->die_tag == DW_TAG_subprogram)) |
7742 | { |
7743 | /* Use a shallow checksum for named nested types and member |
7744 | functions. */ |
7745 | CHECKSUM_ULEB128 ('S'); |
7746 | CHECKSUM_ULEB128 (c->die_tag); |
7747 | CHECKSUM_STRING (AT_string (name_attr)); |
7748 | } |
7749 | else |
7750 | { |
7751 | /* Use a deep checksum for other children. */ |
7752 | /* Mark this DIE so it gets processed when unmarking. */ |
7753 | if (c->die_mark == 0) |
7754 | c->die_mark = -1; |
7755 | die_checksum_ordered (die: c, ctx, mark); |
7756 | } |
7757 | } while (c != die->die_child); |
7758 | |
7759 | CHECKSUM_ULEB128 (0); |
7760 | } |
7761 | |
7762 | /* Add a type name and tag to a hash. */ |
7763 | static void |
7764 | die_odr_checksum (int tag, const char *name, md5_ctx *ctx) |
7765 | { |
7766 | CHECKSUM_ULEB128 (tag); |
7767 | CHECKSUM_STRING (name); |
7768 | } |
7769 | |
7770 | #undef CHECKSUM |
7771 | #undef CHECKSUM_STRING |
7772 | #undef CHECKSUM_ATTR |
7773 | #undef CHECKSUM_LEB128 |
7774 | #undef CHECKSUM_ULEB128 |
7775 | |
7776 | /* Generate the type signature for DIE. This is computed by generating an |
7777 | MD5 checksum over the DIE's tag, its relevant attributes, and its |
7778 | children. Attributes that are references to other DIEs are processed |
7779 | by recursion, using the MARK field to prevent infinite recursion. |
7780 | If the DIE is nested inside a namespace or another type, we also |
7781 | need to include that context in the signature. The lower 64 bits |
7782 | of the resulting MD5 checksum comprise the signature. */ |
7783 | |
7784 | static void |
7785 | generate_type_signature (dw_die_ref die, comdat_type_node *type_node) |
7786 | { |
7787 | int mark; |
7788 | const char *name; |
7789 | unsigned char checksum[16]; |
7790 | struct md5_ctx ctx; |
7791 | dw_die_ref decl; |
7792 | dw_die_ref parent; |
7793 | |
7794 | name = get_AT_string (die, attr_kind: DW_AT_name); |
7795 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
7796 | parent = get_die_parent (die); |
7797 | |
7798 | /* First, compute a signature for just the type name (and its surrounding |
7799 | context, if any. This is stored in the type unit DIE for link-time |
7800 | ODR (one-definition rule) checking. */ |
7801 | |
7802 | if (is_cxx () && name != NULL) |
7803 | { |
7804 | md5_init_ctx (ctx: &ctx); |
7805 | |
7806 | /* Checksum the names of surrounding namespaces and structures. */ |
7807 | if (parent != NULL) |
7808 | checksum_die_context (die: parent, ctx: &ctx); |
7809 | |
7810 | /* Checksum the current DIE. */ |
7811 | die_odr_checksum (tag: die->die_tag, name, ctx: &ctx); |
7812 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
7813 | |
7814 | add_AT_data8 (die: type_node->root_die, attr_kind: DW_AT_GNU_odr_signature, data8: &checksum[8]); |
7815 | } |
7816 | |
7817 | /* Next, compute the complete type signature. */ |
7818 | |
7819 | md5_init_ctx (ctx: &ctx); |
7820 | mark = 1; |
7821 | die->die_mark = mark; |
7822 | |
7823 | /* Checksum the names of surrounding namespaces and structures. */ |
7824 | if (parent != NULL) |
7825 | checksum_die_context (die: parent, ctx: &ctx); |
7826 | |
7827 | /* Checksum the DIE and its children. */ |
7828 | die_checksum_ordered (die, ctx: &ctx, mark: &mark); |
7829 | unmark_all_dies (die); |
7830 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
7831 | |
7832 | /* Store the signature in the type node and link the type DIE and the |
7833 | type node together. */ |
7834 | memcpy (dest: type_node->signature, src: &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE], |
7835 | DWARF_TYPE_SIGNATURE_SIZE); |
7836 | die->comdat_type_p = true; |
7837 | die->die_id.die_type_node = type_node; |
7838 | type_node->type_die = die; |
7839 | |
7840 | /* If the DIE is a specification, link its declaration to the type node |
7841 | as well. */ |
7842 | if (decl != NULL) |
7843 | { |
7844 | decl->comdat_type_p = true; |
7845 | decl->die_id.die_type_node = type_node; |
7846 | } |
7847 | } |
7848 | |
7849 | /* Do the location expressions look same? */ |
7850 | static inline bool |
7851 | same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark) |
7852 | { |
7853 | return loc1->dw_loc_opc == loc2->dw_loc_opc |
7854 | && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark) |
7855 | && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark); |
7856 | } |
7857 | |
7858 | /* Do the values look the same? */ |
7859 | static bool |
7860 | same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark) |
7861 | { |
7862 | dw_loc_descr_ref loc1, loc2; |
7863 | rtx r1, r2; |
7864 | |
7865 | if (v1->val_class != v2->val_class) |
7866 | return false; |
7867 | |
7868 | switch (v1->val_class) |
7869 | { |
7870 | case dw_val_class_const: |
7871 | case dw_val_class_const_implicit: |
7872 | return v1->v.val_int == v2->v.val_int; |
7873 | case dw_val_class_unsigned_const: |
7874 | case dw_val_class_unsigned_const_implicit: |
7875 | return v1->v.val_unsigned == v2->v.val_unsigned; |
7876 | case dw_val_class_const_double: |
7877 | return v1->v.val_double.high == v2->v.val_double.high |
7878 | && v1->v.val_double.low == v2->v.val_double.low; |
7879 | case dw_val_class_wide_int: |
7880 | return *v1->v.val_wide == *v2->v.val_wide; |
7881 | case dw_val_class_vec: |
7882 | if (v1->v.val_vec.length != v2->v.val_vec.length |
7883 | || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size) |
7884 | return false; |
7885 | if (memcmp (s1: v1->v.val_vec.array, s2: v2->v.val_vec.array, |
7886 | n: v1->v.val_vec.length * v1->v.val_vec.elt_size)) |
7887 | return false; |
7888 | return true; |
7889 | case dw_val_class_flag: |
7890 | return v1->v.val_flag == v2->v.val_flag; |
7891 | case dw_val_class_str: |
7892 | return !strcmp (s1: v1->v.val_str->str, s2: v2->v.val_str->str); |
7893 | |
7894 | case dw_val_class_addr: |
7895 | r1 = v1->v.val_addr; |
7896 | r2 = v2->v.val_addr; |
7897 | if (GET_CODE (r1) != GET_CODE (r2)) |
7898 | return false; |
7899 | return !rtx_equal_p (r1, r2); |
7900 | |
7901 | case dw_val_class_offset: |
7902 | return v1->v.val_offset == v2->v.val_offset; |
7903 | |
7904 | case dw_val_class_loc: |
7905 | for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc; |
7906 | loc1 && loc2; |
7907 | loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next) |
7908 | if (!same_loc_p (loc1, loc2, mark)) |
7909 | return false; |
7910 | return !loc1 && !loc2; |
7911 | |
7912 | case dw_val_class_die_ref: |
7913 | return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark); |
7914 | |
7915 | case dw_val_class_symview: |
7916 | return strcmp (s1: v1->v.val_symbolic_view, s2: v2->v.val_symbolic_view) == 0; |
7917 | |
7918 | case dw_val_class_fde_ref: |
7919 | case dw_val_class_vms_delta: |
7920 | case dw_val_class_lbl_id: |
7921 | case dw_val_class_lineptr: |
7922 | case dw_val_class_macptr: |
7923 | case dw_val_class_loclistsptr: |
7924 | case dw_val_class_high_pc: |
7925 | return true; |
7926 | |
7927 | case dw_val_class_file: |
7928 | case dw_val_class_file_implicit: |
7929 | return v1->v.val_file == v2->v.val_file; |
7930 | |
7931 | case dw_val_class_data8: |
7932 | return !memcmp (s1: v1->v.val_data8, s2: v2->v.val_data8, n: 8); |
7933 | |
7934 | default: |
7935 | return true; |
7936 | } |
7937 | } |
7938 | |
7939 | /* Do the attributes look the same? */ |
7940 | |
7941 | static bool |
7942 | same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark) |
7943 | { |
7944 | if (at1->dw_attr != at2->dw_attr) |
7945 | return false; |
7946 | |
7947 | /* We don't care that this was compiled with a different compiler |
7948 | snapshot; if the output is the same, that's what matters. */ |
7949 | if (at1->dw_attr == DW_AT_producer) |
7950 | return true; |
7951 | |
7952 | return same_dw_val_p (v1: &at1->dw_attr_val, v2: &at2->dw_attr_val, mark); |
7953 | } |
7954 | |
7955 | /* Do the dies look the same? */ |
7956 | |
7957 | static bool |
7958 | same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark) |
7959 | { |
7960 | dw_die_ref c1, c2; |
7961 | dw_attr_node *a1; |
7962 | unsigned ix; |
7963 | |
7964 | /* To avoid infinite recursion. */ |
7965 | if (die1->die_mark) |
7966 | return die1->die_mark == die2->die_mark; |
7967 | die1->die_mark = die2->die_mark = ++(*mark); |
7968 | |
7969 | if (die1->die_tag != die2->die_tag) |
7970 | return false; |
7971 | |
7972 | if (vec_safe_length (v: die1->die_attr) != vec_safe_length (v: die2->die_attr)) |
7973 | return false; |
7974 | |
7975 | FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1) |
7976 | if (!same_attr_p (at1: a1, at2: &(*die2->die_attr)[ix], mark)) |
7977 | return false; |
7978 | |
7979 | c1 = die1->die_child; |
7980 | c2 = die2->die_child; |
7981 | if (! c1) |
7982 | { |
7983 | if (c2) |
7984 | return false; |
7985 | } |
7986 | else |
7987 | for (;;) |
7988 | { |
7989 | if (!same_die_p (die1: c1, die2: c2, mark)) |
7990 | return false; |
7991 | c1 = c1->die_sib; |
7992 | c2 = c2->die_sib; |
7993 | if (c1 == die1->die_child) |
7994 | { |
7995 | if (c2 == die2->die_child) |
7996 | break; |
7997 | else |
7998 | return false; |
7999 | } |
8000 | } |
8001 | |
8002 | return true; |
8003 | } |
8004 | |
8005 | /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its |
8006 | children, and set die_symbol. */ |
8007 | |
8008 | static void |
8009 | compute_comp_unit_symbol (dw_die_ref unit_die) |
8010 | { |
8011 | const char *die_name = get_AT_string (die: unit_die, attr_kind: DW_AT_name); |
8012 | const char *base = die_name ? lbasename (die_name) : "anonymous" ; |
8013 | char *name = XALLOCAVEC (char, strlen (base) + 64); |
8014 | char *p; |
8015 | int i, mark; |
8016 | unsigned char checksum[16]; |
8017 | struct md5_ctx ctx; |
8018 | |
8019 | /* Compute the checksum of the DIE, then append part of it as hex digits to |
8020 | the name filename of the unit. */ |
8021 | |
8022 | md5_init_ctx (ctx: &ctx); |
8023 | mark = 0; |
8024 | die_checksum (die: unit_die, ctx: &ctx, mark: &mark); |
8025 | unmark_all_dies (unit_die); |
8026 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
8027 | |
8028 | /* When we this for comp_unit_die () we have a DW_AT_name that might |
8029 | not start with a letter but with anything valid for filenames and |
8030 | clean_symbol_name doesn't fix that up. Prepend 'g' if the first |
8031 | character is not a letter. */ |
8032 | sprintf (s: name, format: "%s%s." , ISALPHA (*base) ? "" : "g" , base); |
8033 | clean_symbol_name (name); |
8034 | |
8035 | p = name + strlen (s: name); |
8036 | for (i = 0; i < 4; i++) |
8037 | { |
8038 | sprintf (s: p, format: "%.2x" , checksum[i]); |
8039 | p += 2; |
8040 | } |
8041 | |
8042 | unit_die->die_id.die_symbol = xstrdup (name); |
8043 | } |
8044 | |
8045 | /* Returns true if DIE represents a type, in the sense of TYPE_P. */ |
8046 | |
8047 | static bool |
8048 | is_type_die (dw_die_ref die) |
8049 | { |
8050 | switch (die->die_tag) |
8051 | { |
8052 | case DW_TAG_array_type: |
8053 | case DW_TAG_class_type: |
8054 | case DW_TAG_interface_type: |
8055 | case DW_TAG_enumeration_type: |
8056 | case DW_TAG_pointer_type: |
8057 | case DW_TAG_reference_type: |
8058 | case DW_TAG_rvalue_reference_type: |
8059 | case DW_TAG_string_type: |
8060 | case DW_TAG_structure_type: |
8061 | case DW_TAG_subroutine_type: |
8062 | case DW_TAG_union_type: |
8063 | case DW_TAG_ptr_to_member_type: |
8064 | case DW_TAG_set_type: |
8065 | case DW_TAG_subrange_type: |
8066 | case DW_TAG_base_type: |
8067 | case DW_TAG_const_type: |
8068 | case DW_TAG_file_type: |
8069 | case DW_TAG_packed_type: |
8070 | case DW_TAG_volatile_type: |
8071 | case DW_TAG_typedef: |
8072 | return true; |
8073 | default: |
8074 | return false; |
8075 | } |
8076 | } |
8077 | |
8078 | /* Returns true iff C is a compile-unit DIE. */ |
8079 | |
8080 | static inline bool |
8081 | is_cu_die (dw_die_ref c) |
8082 | { |
8083 | return c && (c->die_tag == DW_TAG_compile_unit |
8084 | || c->die_tag == DW_TAG_skeleton_unit); |
8085 | } |
8086 | |
8087 | /* Returns true iff C is a unit DIE of some sort. */ |
8088 | |
8089 | static inline bool |
8090 | is_unit_die (dw_die_ref c) |
8091 | { |
8092 | return c && (c->die_tag == DW_TAG_compile_unit |
8093 | || c->die_tag == DW_TAG_partial_unit |
8094 | || c->die_tag == DW_TAG_type_unit |
8095 | || c->die_tag == DW_TAG_skeleton_unit); |
8096 | } |
8097 | |
8098 | /* Returns true iff C is a namespace DIE. */ |
8099 | |
8100 | static inline bool |
8101 | is_namespace_die (dw_die_ref c) |
8102 | { |
8103 | return c && c->die_tag == DW_TAG_namespace; |
8104 | } |
8105 | |
8106 | /* Return true if this DIE is a template parameter. */ |
8107 | |
8108 | static inline bool |
8109 | is_template_parameter (dw_die_ref die) |
8110 | { |
8111 | switch (die->die_tag) |
8112 | { |
8113 | case DW_TAG_template_type_param: |
8114 | case DW_TAG_template_value_param: |
8115 | case DW_TAG_GNU_template_template_param: |
8116 | case DW_TAG_GNU_template_parameter_pack: |
8117 | return true; |
8118 | default: |
8119 | return false; |
8120 | } |
8121 | } |
8122 | |
8123 | /* Return true if this DIE represents a template instantiation. */ |
8124 | |
8125 | static inline bool |
8126 | is_template_instantiation (dw_die_ref die) |
8127 | { |
8128 | dw_die_ref c; |
8129 | |
8130 | if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram) |
8131 | return false; |
8132 | FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true); |
8133 | return false; |
8134 | } |
8135 | |
8136 | static char * |
8137 | gen_internal_sym (const char *prefix) |
8138 | { |
8139 | char buf[MAX_ARTIFICIAL_LABEL_BYTES]; |
8140 | |
8141 | ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++); |
8142 | return xstrdup (buf); |
8143 | } |
8144 | |
8145 | /* Return true if this DIE is a declaration. */ |
8146 | |
8147 | static bool |
8148 | is_declaration_die (dw_die_ref die) |
8149 | { |
8150 | dw_attr_node *a; |
8151 | unsigned ix; |
8152 | |
8153 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
8154 | if (a->dw_attr == DW_AT_declaration) |
8155 | return true; |
8156 | |
8157 | return false; |
8158 | } |
8159 | |
8160 | /* Return true if this DIE is nested inside a subprogram. */ |
8161 | |
8162 | static bool |
8163 | is_nested_in_subprogram (dw_die_ref die) |
8164 | { |
8165 | dw_die_ref decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
8166 | |
8167 | if (decl == NULL) |
8168 | decl = die; |
8169 | return local_scope_p (decl); |
8170 | } |
8171 | |
8172 | /* Return true if this DIE contains a defining declaration of a |
8173 | subprogram. */ |
8174 | |
8175 | static bool |
8176 | contains_subprogram_definition (dw_die_ref die) |
8177 | { |
8178 | dw_die_ref c; |
8179 | |
8180 | if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die)) |
8181 | return true; |
8182 | FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1); |
8183 | return false; |
8184 | } |
8185 | |
8186 | /* Return true if this is a type DIE that should be moved to a |
8187 | COMDAT .debug_types section or .debug_info section with DW_UT_*type |
8188 | unit type. */ |
8189 | |
8190 | static bool |
8191 | should_move_die_to_comdat (dw_die_ref die) |
8192 | { |
8193 | switch (die->die_tag) |
8194 | { |
8195 | case DW_TAG_class_type: |
8196 | case DW_TAG_structure_type: |
8197 | case DW_TAG_enumeration_type: |
8198 | case DW_TAG_union_type: |
8199 | /* Don't move declarations, inlined instances, types nested in a |
8200 | subprogram, or types that contain subprogram definitions. */ |
8201 | if (is_declaration_die (die) |
8202 | || get_AT (die, attr_kind: DW_AT_abstract_origin) |
8203 | || is_nested_in_subprogram (die) |
8204 | || contains_subprogram_definition (die)) |
8205 | return false; |
8206 | return true; |
8207 | case DW_TAG_array_type: |
8208 | case DW_TAG_interface_type: |
8209 | case DW_TAG_pointer_type: |
8210 | case DW_TAG_reference_type: |
8211 | case DW_TAG_rvalue_reference_type: |
8212 | case DW_TAG_string_type: |
8213 | case DW_TAG_subroutine_type: |
8214 | case DW_TAG_ptr_to_member_type: |
8215 | case DW_TAG_set_type: |
8216 | case DW_TAG_subrange_type: |
8217 | case DW_TAG_base_type: |
8218 | case DW_TAG_const_type: |
8219 | case DW_TAG_file_type: |
8220 | case DW_TAG_packed_type: |
8221 | case DW_TAG_volatile_type: |
8222 | case DW_TAG_typedef: |
8223 | default: |
8224 | return false; |
8225 | } |
8226 | } |
8227 | |
8228 | /* Make a clone of DIE. */ |
8229 | |
8230 | static dw_die_ref |
8231 | clone_die (dw_die_ref die) |
8232 | { |
8233 | dw_die_ref clone = new_die_raw (tag_value: die->die_tag); |
8234 | dw_attr_node *a; |
8235 | unsigned ix; |
8236 | |
8237 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
8238 | add_dwarf_attr (die: clone, attr: a); |
8239 | |
8240 | return clone; |
8241 | } |
8242 | |
8243 | /* Make a clone of the tree rooted at DIE. */ |
8244 | |
8245 | static dw_die_ref |
8246 | clone_tree (dw_die_ref die) |
8247 | { |
8248 | dw_die_ref c; |
8249 | dw_die_ref clone = clone_die (die); |
8250 | |
8251 | FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c))); |
8252 | |
8253 | return clone; |
8254 | } |
8255 | |
8256 | /* Make a clone of DIE as a declaration. */ |
8257 | |
8258 | static dw_die_ref |
8259 | clone_as_declaration (dw_die_ref die) |
8260 | { |
8261 | dw_die_ref clone; |
8262 | dw_die_ref decl; |
8263 | dw_attr_node *a; |
8264 | unsigned ix; |
8265 | |
8266 | /* If the DIE is already a declaration, just clone it. */ |
8267 | if (is_declaration_die (die)) |
8268 | return clone_die (die); |
8269 | |
8270 | /* If the DIE is a specification, just clone its declaration DIE. */ |
8271 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
8272 | if (decl != NULL) |
8273 | { |
8274 | clone = clone_die (die: decl); |
8275 | if (die->comdat_type_p) |
8276 | add_AT_die_ref (die: clone, attr_kind: DW_AT_signature, targ_die: die); |
8277 | return clone; |
8278 | } |
8279 | |
8280 | clone = new_die_raw (tag_value: die->die_tag); |
8281 | |
8282 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
8283 | { |
8284 | /* We don't want to copy over all attributes. |
8285 | For example we don't want DW_AT_byte_size because otherwise we will no |
8286 | longer have a declaration and GDB will treat it as a definition. */ |
8287 | |
8288 | switch (a->dw_attr) |
8289 | { |
8290 | case DW_AT_abstract_origin: |
8291 | case DW_AT_artificial: |
8292 | case DW_AT_containing_type: |
8293 | case DW_AT_external: |
8294 | case DW_AT_name: |
8295 | case DW_AT_type: |
8296 | case DW_AT_virtuality: |
8297 | case DW_AT_linkage_name: |
8298 | case DW_AT_MIPS_linkage_name: |
8299 | add_dwarf_attr (die: clone, attr: a); |
8300 | break; |
8301 | case DW_AT_byte_size: |
8302 | case DW_AT_alignment: |
8303 | default: |
8304 | break; |
8305 | } |
8306 | } |
8307 | |
8308 | if (die->comdat_type_p) |
8309 | add_AT_die_ref (die: clone, attr_kind: DW_AT_signature, targ_die: die); |
8310 | |
8311 | add_AT_flag (die: clone, attr_kind: DW_AT_declaration, flag: 1); |
8312 | return clone; |
8313 | } |
8314 | |
8315 | |
8316 | /* Structure to map a DIE in one CU to its copy in a comdat type unit. */ |
8317 | |
8318 | struct decl_table_entry |
8319 | { |
8320 | dw_die_ref orig; |
8321 | dw_die_ref copy; |
8322 | }; |
8323 | |
8324 | /* Helpers to manipulate hash table of copied declarations. */ |
8325 | |
8326 | /* Hashtable helpers. */ |
8327 | |
8328 | struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry> |
8329 | { |
8330 | typedef die_struct *compare_type; |
8331 | static inline hashval_t hash (const decl_table_entry *); |
8332 | static inline bool equal (const decl_table_entry *, const die_struct *); |
8333 | }; |
8334 | |
8335 | inline hashval_t |
8336 | decl_table_entry_hasher::hash (const decl_table_entry *entry) |
8337 | { |
8338 | return htab_hash_pointer (entry->orig); |
8339 | } |
8340 | |
8341 | inline bool |
8342 | decl_table_entry_hasher::equal (const decl_table_entry *entry1, |
8343 | const die_struct *entry2) |
8344 | { |
8345 | return entry1->orig == entry2; |
8346 | } |
8347 | |
8348 | typedef hash_table<decl_table_entry_hasher> decl_hash_type; |
8349 | |
8350 | /* Copy DIE and its ancestors, up to, but not including, the compile unit |
8351 | or type unit entry, to a new tree. Adds the new tree to UNIT and returns |
8352 | a pointer to the copy of DIE. If DECL_TABLE is provided, it is used |
8353 | to check if the ancestor has already been copied into UNIT. */ |
8354 | |
8355 | static dw_die_ref |
8356 | copy_ancestor_tree (dw_die_ref unit, dw_die_ref die, |
8357 | decl_hash_type *decl_table) |
8358 | { |
8359 | dw_die_ref parent = die->die_parent; |
8360 | dw_die_ref new_parent = unit; |
8361 | dw_die_ref copy; |
8362 | decl_table_entry **slot = NULL; |
8363 | struct decl_table_entry *entry = NULL; |
8364 | |
8365 | /* If DIE refers to a stub unfold that so we get the appropriate |
8366 | DIE registered as orig in decl_table. */ |
8367 | if (dw_die_ref c = get_AT_ref (die, attr_kind: DW_AT_signature)) |
8368 | die = c; |
8369 | |
8370 | if (decl_table) |
8371 | { |
8372 | /* Check if the entry has already been copied to UNIT. */ |
8373 | slot = decl_table->find_slot_with_hash (comparable: die, hash: htab_hash_pointer (die), |
8374 | insert: INSERT); |
8375 | if (*slot != HTAB_EMPTY_ENTRY) |
8376 | { |
8377 | entry = *slot; |
8378 | return entry->copy; |
8379 | } |
8380 | |
8381 | /* Record in DECL_TABLE that DIE has been copied to UNIT. */ |
8382 | entry = XCNEW (struct decl_table_entry); |
8383 | entry->orig = die; |
8384 | entry->copy = NULL; |
8385 | *slot = entry; |
8386 | } |
8387 | |
8388 | if (parent != NULL) |
8389 | { |
8390 | dw_die_ref spec = get_AT_ref (die: parent, attr_kind: DW_AT_specification); |
8391 | if (spec != NULL) |
8392 | parent = spec; |
8393 | if (!is_unit_die (c: parent)) |
8394 | new_parent = copy_ancestor_tree (unit, die: parent, decl_table); |
8395 | } |
8396 | |
8397 | copy = clone_as_declaration (die); |
8398 | add_child_die (die: new_parent, child_die: copy); |
8399 | |
8400 | if (decl_table) |
8401 | { |
8402 | /* Record the pointer to the copy. */ |
8403 | entry->copy = copy; |
8404 | } |
8405 | |
8406 | return copy; |
8407 | } |
8408 | /* Copy the declaration context to the new type unit DIE. This includes |
8409 | any surrounding namespace or type declarations. If the DIE has an |
8410 | AT_specification attribute, it also includes attributes and children |
8411 | attached to the specification, and returns a pointer to the original |
8412 | parent of the declaration DIE. Returns NULL otherwise. */ |
8413 | |
8414 | static dw_die_ref |
8415 | copy_declaration_context (dw_die_ref unit, dw_die_ref die) |
8416 | { |
8417 | dw_die_ref decl; |
8418 | dw_die_ref new_decl; |
8419 | dw_die_ref orig_parent = NULL; |
8420 | |
8421 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
8422 | if (decl == NULL) |
8423 | decl = die; |
8424 | else |
8425 | { |
8426 | unsigned ix; |
8427 | dw_die_ref c; |
8428 | dw_attr_node *a; |
8429 | |
8430 | /* The original DIE will be changed to a declaration, and must |
8431 | be moved to be a child of the original declaration DIE. */ |
8432 | orig_parent = decl->die_parent; |
8433 | |
8434 | /* Copy the type node pointer from the new DIE to the original |
8435 | declaration DIE so we can forward references later. */ |
8436 | decl->comdat_type_p = true; |
8437 | decl->die_id.die_type_node = die->die_id.die_type_node; |
8438 | |
8439 | remove_AT (die, attr_kind: DW_AT_specification); |
8440 | |
8441 | FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a) |
8442 | { |
8443 | if (a->dw_attr != DW_AT_name |
8444 | && a->dw_attr != DW_AT_declaration |
8445 | && a->dw_attr != DW_AT_external) |
8446 | add_dwarf_attr (die, attr: a); |
8447 | } |
8448 | |
8449 | FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c))); |
8450 | } |
8451 | |
8452 | if (decl->die_parent != NULL |
8453 | && !is_unit_die (c: decl->die_parent)) |
8454 | { |
8455 | new_decl = copy_ancestor_tree (unit, die: decl, NULL); |
8456 | if (new_decl != NULL) |
8457 | { |
8458 | remove_AT (die: new_decl, attr_kind: DW_AT_signature); |
8459 | add_AT_specification (die, targ_die: new_decl); |
8460 | } |
8461 | } |
8462 | |
8463 | return orig_parent; |
8464 | } |
8465 | |
8466 | /* Generate the skeleton ancestor tree for the given NODE, then clone |
8467 | the DIE and add the clone into the tree. */ |
8468 | |
8469 | static void |
8470 | generate_skeleton_ancestor_tree (skeleton_chain_node *node) |
8471 | { |
8472 | if (node->new_die != NULL) |
8473 | return; |
8474 | |
8475 | node->new_die = clone_as_declaration (die: node->old_die); |
8476 | |
8477 | if (node->parent != NULL) |
8478 | { |
8479 | generate_skeleton_ancestor_tree (node: node->parent); |
8480 | add_child_die (die: node->parent->new_die, child_die: node->new_die); |
8481 | } |
8482 | } |
8483 | |
8484 | /* Generate a skeleton tree of DIEs containing any declarations that are |
8485 | found in the original tree. We traverse the tree looking for declaration |
8486 | DIEs, and construct the skeleton from the bottom up whenever we find one. */ |
8487 | |
8488 | static void |
8489 | generate_skeleton_bottom_up (skeleton_chain_node *parent) |
8490 | { |
8491 | skeleton_chain_node node; |
8492 | dw_die_ref c; |
8493 | dw_die_ref first; |
8494 | dw_die_ref prev = NULL; |
8495 | dw_die_ref next = NULL; |
8496 | |
8497 | node.parent = parent; |
8498 | |
8499 | first = c = parent->old_die->die_child; |
8500 | if (c) |
8501 | next = c->die_sib; |
8502 | if (c) do { |
8503 | if (prev == NULL || prev->die_sib == c) |
8504 | prev = c; |
8505 | c = next; |
8506 | next = (c == first ? NULL : c->die_sib); |
8507 | node.old_die = c; |
8508 | node.new_die = NULL; |
8509 | if (is_declaration_die (die: c)) |
8510 | { |
8511 | if (is_template_instantiation (die: c)) |
8512 | { |
8513 | /* Instantiated templates do not need to be cloned into the |
8514 | type unit. Just move the DIE and its children back to |
8515 | the skeleton tree (in the main CU). */ |
8516 | remove_child_with_prev (child: c, prev); |
8517 | add_child_die (die: parent->new_die, child_die: c); |
8518 | c = prev; |
8519 | } |
8520 | else if (c->comdat_type_p) |
8521 | { |
8522 | /* This is the skeleton of earlier break_out_comdat_types |
8523 | type. Clone the existing DIE, but keep the children |
8524 | under the original (which is in the main CU). */ |
8525 | dw_die_ref clone = clone_die (die: c); |
8526 | |
8527 | replace_child (old_child: c, new_child: clone, prev); |
8528 | generate_skeleton_ancestor_tree (node: parent); |
8529 | add_child_die (die: parent->new_die, child_die: c); |
8530 | c = clone; |
8531 | continue; |
8532 | } |
8533 | else |
8534 | { |
8535 | /* Clone the existing DIE, move the original to the skeleton |
8536 | tree (which is in the main CU), and put the clone, with |
8537 | all the original's children, where the original came from |
8538 | (which is about to be moved to the type unit). */ |
8539 | dw_die_ref clone = clone_die (die: c); |
8540 | move_all_children (old_parent: c, new_parent: clone); |
8541 | |
8542 | /* If the original has a DW_AT_object_pointer attribute, |
8543 | it would now point to a child DIE just moved to the |
8544 | cloned tree, so we need to remove that attribute from |
8545 | the original. */ |
8546 | remove_AT (die: c, attr_kind: DW_AT_object_pointer); |
8547 | |
8548 | replace_child (old_child: c, new_child: clone, prev); |
8549 | generate_skeleton_ancestor_tree (node: parent); |
8550 | add_child_die (die: parent->new_die, child_die: c); |
8551 | node.old_die = clone; |
8552 | node.new_die = c; |
8553 | c = clone; |
8554 | } |
8555 | } |
8556 | generate_skeleton_bottom_up (parent: &node); |
8557 | } while (next != NULL); |
8558 | } |
8559 | |
8560 | /* Wrapper function for generate_skeleton_bottom_up. */ |
8561 | |
8562 | static dw_die_ref |
8563 | generate_skeleton (dw_die_ref die) |
8564 | { |
8565 | skeleton_chain_node node; |
8566 | |
8567 | node.old_die = die; |
8568 | node.new_die = NULL; |
8569 | node.parent = NULL; |
8570 | |
8571 | /* If this type definition is nested inside another type, |
8572 | and is not an instantiation of a template, always leave |
8573 | at least a declaration in its place. */ |
8574 | if (die->die_parent != NULL |
8575 | && is_type_die (die: die->die_parent) |
8576 | && !is_template_instantiation (die)) |
8577 | node.new_die = clone_as_declaration (die); |
8578 | |
8579 | generate_skeleton_bottom_up (parent: &node); |
8580 | return node.new_die; |
8581 | } |
8582 | |
8583 | /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned |
8584 | declaration. The original DIE is moved to a new compile unit so that |
8585 | existing references to it follow it to the new location. If any of the |
8586 | original DIE's descendants is a declaration, we need to replace the |
8587 | original DIE with a skeleton tree and move the declarations back into the |
8588 | skeleton tree. */ |
8589 | |
8590 | static dw_die_ref |
8591 | remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child, |
8592 | dw_die_ref prev) |
8593 | { |
8594 | dw_die_ref skeleton, orig_parent; |
8595 | |
8596 | /* Copy the declaration context to the type unit DIE. If the returned |
8597 | ORIG_PARENT is not NULL, the skeleton needs to be added as a child of |
8598 | that DIE. */ |
8599 | orig_parent = copy_declaration_context (unit, die: child); |
8600 | |
8601 | skeleton = generate_skeleton (die: child); |
8602 | if (skeleton == NULL) |
8603 | remove_child_with_prev (child, prev); |
8604 | else |
8605 | { |
8606 | skeleton->comdat_type_p = true; |
8607 | skeleton->die_id.die_type_node = child->die_id.die_type_node; |
8608 | |
8609 | /* If the original DIE was a specification, we need to put |
8610 | the skeleton under the parent DIE of the declaration. |
8611 | This leaves the original declaration in the tree, but |
8612 | it will be pruned later since there are no longer any |
8613 | references to it. */ |
8614 | if (orig_parent != NULL) |
8615 | { |
8616 | remove_child_with_prev (child, prev); |
8617 | add_child_die (die: orig_parent, child_die: skeleton); |
8618 | } |
8619 | else |
8620 | replace_child (old_child: child, new_child: skeleton, prev); |
8621 | } |
8622 | |
8623 | return skeleton; |
8624 | } |
8625 | |
8626 | static void |
8627 | copy_dwarf_procs_ref_in_attrs (dw_die_ref die, |
8628 | comdat_type_node *type_node, |
8629 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs); |
8630 | |
8631 | /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF |
8632 | procedure, put it under TYPE_NODE and return the copy. Continue looking for |
8633 | DWARF procedure references in the DW_AT_location attribute. */ |
8634 | |
8635 | static dw_die_ref |
8636 | copy_dwarf_procedure (dw_die_ref die, |
8637 | comdat_type_node *type_node, |
8638 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs) |
8639 | { |
8640 | gcc_assert (die->die_tag == DW_TAG_dwarf_procedure); |
8641 | |
8642 | /* DWARF procedures are not supposed to have children... */ |
8643 | gcc_assert (die->die_child == NULL); |
8644 | |
8645 | /* ... and they are supposed to have only one attribute: DW_AT_location. */ |
8646 | gcc_assert (vec_safe_length (die->die_attr) == 1 |
8647 | && ((*die->die_attr)[0].dw_attr == DW_AT_location)); |
8648 | |
8649 | /* Do not copy more than once DWARF procedures. */ |
8650 | bool existed; |
8651 | dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (k: die, existed: &existed); |
8652 | if (existed) |
8653 | return die_copy; |
8654 | |
8655 | die_copy = clone_die (die); |
8656 | add_child_die (die: type_node->root_die, child_die: die_copy); |
8657 | copy_dwarf_procs_ref_in_attrs (die: die_copy, type_node, copied_dwarf_procs); |
8658 | return die_copy; |
8659 | } |
8660 | |
8661 | /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF |
8662 | procedures in DIE's attributes. */ |
8663 | |
8664 | static void |
8665 | copy_dwarf_procs_ref_in_attrs (dw_die_ref die, |
8666 | comdat_type_node *type_node, |
8667 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs) |
8668 | { |
8669 | dw_attr_node *a; |
8670 | unsigned i; |
8671 | |
8672 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a) |
8673 | { |
8674 | dw_loc_descr_ref loc; |
8675 | |
8676 | if (a->dw_attr_val.val_class != dw_val_class_loc) |
8677 | continue; |
8678 | |
8679 | for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next) |
8680 | { |
8681 | switch (loc->dw_loc_opc) |
8682 | { |
8683 | case DW_OP_call2: |
8684 | case DW_OP_call4: |
8685 | case DW_OP_call_ref: |
8686 | gcc_assert (loc->dw_loc_oprnd1.val_class |
8687 | == dw_val_class_die_ref); |
8688 | loc->dw_loc_oprnd1.v.val_die_ref.die |
8689 | = copy_dwarf_procedure (die: loc->dw_loc_oprnd1.v.val_die_ref.die, |
8690 | type_node, |
8691 | copied_dwarf_procs); |
8692 | |
8693 | default: |
8694 | break; |
8695 | } |
8696 | } |
8697 | } |
8698 | } |
8699 | |
8700 | /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and |
8701 | rewrite references to point to the copies. |
8702 | |
8703 | References are looked for in DIE's attributes and recursively in all its |
8704 | children attributes that are location descriptions. COPIED_DWARF_PROCS is a |
8705 | mapping from old DWARF procedures to their copy. It is used not to copy |
8706 | twice the same DWARF procedure under TYPE_NODE. */ |
8707 | |
8708 | static void |
8709 | copy_dwarf_procs_ref_in_dies (dw_die_ref die, |
8710 | comdat_type_node *type_node, |
8711 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs) |
8712 | { |
8713 | dw_die_ref c; |
8714 | |
8715 | copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs); |
8716 | FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c, |
8717 | type_node, |
8718 | copied_dwarf_procs)); |
8719 | } |
8720 | |
8721 | /* Traverse the DIE and set up additional .debug_types or .debug_info |
8722 | DW_UT_*type sections for each type worthy of being placed in a COMDAT |
8723 | section. */ |
8724 | |
8725 | static void |
8726 | break_out_comdat_types (dw_die_ref die) |
8727 | { |
8728 | dw_die_ref c; |
8729 | dw_die_ref first; |
8730 | dw_die_ref prev = NULL; |
8731 | dw_die_ref next = NULL; |
8732 | dw_die_ref unit = NULL; |
8733 | |
8734 | first = c = die->die_child; |
8735 | if (c) |
8736 | next = c->die_sib; |
8737 | if (c) do { |
8738 | if (prev == NULL || prev->die_sib == c) |
8739 | prev = c; |
8740 | c = next; |
8741 | next = (c == first ? NULL : c->die_sib); |
8742 | if (should_move_die_to_comdat (die: c)) |
8743 | { |
8744 | dw_die_ref replacement; |
8745 | comdat_type_node *type_node; |
8746 | |
8747 | /* Break out nested types into their own type units. */ |
8748 | break_out_comdat_types (die: c); |
8749 | |
8750 | /* Create a new type unit DIE as the root for the new tree. */ |
8751 | unit = new_die (tag_value: DW_TAG_type_unit, NULL, NULL); |
8752 | add_AT_unsigned (die: unit, attr_kind: DW_AT_language, |
8753 | unsigned_val: get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language)); |
8754 | |
8755 | /* Add the new unit's type DIE into the comdat type list. */ |
8756 | type_node = ggc_cleared_alloc<comdat_type_node> (); |
8757 | type_node->root_die = unit; |
8758 | type_node->next = comdat_type_list; |
8759 | comdat_type_list = type_node; |
8760 | |
8761 | /* Generate the type signature. */ |
8762 | generate_type_signature (die: c, type_node); |
8763 | |
8764 | /* Copy the declaration context, attributes, and children of the |
8765 | declaration into the new type unit DIE, then remove this DIE |
8766 | from the main CU (or replace it with a skeleton if necessary). */ |
8767 | replacement = remove_child_or_replace_with_skeleton (unit, child: c, prev); |
8768 | type_node->skeleton_die = replacement; |
8769 | |
8770 | /* Add the DIE to the new compunit. */ |
8771 | add_child_die (die: unit, child_die: c); |
8772 | |
8773 | /* Types can reference DWARF procedures for type size or data location |
8774 | expressions. Calls in DWARF expressions cannot target procedures |
8775 | that are not in the same section. So we must copy DWARF procedures |
8776 | along with this type and then rewrite references to them. */ |
8777 | hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs; |
8778 | copy_dwarf_procs_ref_in_dies (die: c, type_node, copied_dwarf_procs); |
8779 | |
8780 | if (replacement != NULL) |
8781 | c = replacement; |
8782 | } |
8783 | else if (c->die_tag == DW_TAG_namespace |
8784 | || c->die_tag == DW_TAG_class_type |
8785 | || c->die_tag == DW_TAG_structure_type |
8786 | || c->die_tag == DW_TAG_union_type) |
8787 | { |
8788 | /* Look for nested types that can be broken out. */ |
8789 | break_out_comdat_types (die: c); |
8790 | } |
8791 | } while (next != NULL); |
8792 | } |
8793 | |
8794 | /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations. |
8795 | Enter all the cloned children into the hash table decl_table. */ |
8796 | |
8797 | static dw_die_ref |
8798 | clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table) |
8799 | { |
8800 | dw_die_ref c; |
8801 | dw_die_ref clone; |
8802 | struct decl_table_entry *entry; |
8803 | decl_table_entry **slot; |
8804 | |
8805 | if (die->die_tag == DW_TAG_subprogram) |
8806 | clone = clone_as_declaration (die); |
8807 | else |
8808 | clone = clone_die (die); |
8809 | |
8810 | slot = decl_table->find_slot_with_hash (comparable: die, |
8811 | hash: htab_hash_pointer (die), insert: INSERT); |
8812 | |
8813 | /* Assert that DIE isn't in the hash table yet. If it would be there |
8814 | before, the ancestors would be necessarily there as well, therefore |
8815 | clone_tree_partial wouldn't be called. */ |
8816 | gcc_assert (*slot == HTAB_EMPTY_ENTRY); |
8817 | |
8818 | entry = XCNEW (struct decl_table_entry); |
8819 | entry->orig = die; |
8820 | entry->copy = clone; |
8821 | *slot = entry; |
8822 | |
8823 | if (die->die_tag != DW_TAG_subprogram) |
8824 | FOR_EACH_CHILD (die, c, |
8825 | add_child_die (clone, clone_tree_partial (c, decl_table))); |
8826 | |
8827 | return clone; |
8828 | } |
8829 | |
8830 | /* Walk the DIE and its children, looking for references to incomplete |
8831 | or trivial types that are unmarked (i.e., that are not in the current |
8832 | type_unit). */ |
8833 | |
8834 | static void |
8835 | copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table) |
8836 | { |
8837 | dw_die_ref c; |
8838 | dw_attr_node *a; |
8839 | unsigned ix; |
8840 | |
8841 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
8842 | { |
8843 | if (AT_class (a) == dw_val_class_die_ref) |
8844 | { |
8845 | dw_die_ref targ = AT_ref (a); |
8846 | decl_table_entry **slot; |
8847 | struct decl_table_entry *entry; |
8848 | |
8849 | if (targ->die_mark != 0 || targ->comdat_type_p) |
8850 | continue; |
8851 | |
8852 | slot = decl_table->find_slot_with_hash (comparable: targ, |
8853 | hash: htab_hash_pointer (targ), |
8854 | insert: INSERT); |
8855 | |
8856 | if (*slot != HTAB_EMPTY_ENTRY) |
8857 | { |
8858 | /* TARG has already been copied, so we just need to |
8859 | modify the reference to point to the copy. */ |
8860 | entry = *slot; |
8861 | a->dw_attr_val.v.val_die_ref.die = entry->copy; |
8862 | } |
8863 | else |
8864 | { |
8865 | dw_die_ref parent = unit; |
8866 | dw_die_ref copy = clone_die (die: targ); |
8867 | |
8868 | /* Record in DECL_TABLE that TARG has been copied. |
8869 | Need to do this now, before the recursive call, |
8870 | because DECL_TABLE may be expanded and SLOT |
8871 | would no longer be a valid pointer. */ |
8872 | entry = XCNEW (struct decl_table_entry); |
8873 | entry->orig = targ; |
8874 | entry->copy = copy; |
8875 | *slot = entry; |
8876 | |
8877 | /* If TARG is not a declaration DIE, we need to copy its |
8878 | children. */ |
8879 | if (!is_declaration_die (die: targ)) |
8880 | { |
8881 | FOR_EACH_CHILD ( |
8882 | targ, c, |
8883 | add_child_die (copy, |
8884 | clone_tree_partial (c, decl_table))); |
8885 | } |
8886 | |
8887 | /* Make sure the cloned tree is marked as part of the |
8888 | type unit. */ |
8889 | mark_dies (copy); |
8890 | |
8891 | /* If TARG has surrounding context, copy its ancestor tree |
8892 | into the new type unit. */ |
8893 | if (targ->die_parent != NULL |
8894 | && !is_unit_die (c: targ->die_parent)) |
8895 | parent = copy_ancestor_tree (unit, die: targ->die_parent, |
8896 | decl_table); |
8897 | |
8898 | add_child_die (die: parent, child_die: copy); |
8899 | a->dw_attr_val.v.val_die_ref.die = copy; |
8900 | |
8901 | /* Make sure the newly-copied DIE is walked. If it was |
8902 | installed in a previously-added context, it won't |
8903 | get visited otherwise. */ |
8904 | if (parent != unit) |
8905 | { |
8906 | /* Find the highest point of the newly-added tree, |
8907 | mark each node along the way, and walk from there. */ |
8908 | parent->die_mark = 1; |
8909 | while (parent->die_parent |
8910 | && parent->die_parent->die_mark == 0) |
8911 | { |
8912 | parent = parent->die_parent; |
8913 | parent->die_mark = 1; |
8914 | } |
8915 | copy_decls_walk (unit, die: parent, decl_table); |
8916 | } |
8917 | } |
8918 | } |
8919 | } |
8920 | |
8921 | FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table)); |
8922 | } |
8923 | |
8924 | /* Collect skeleton dies in DIE created by break_out_comdat_types already |
8925 | and record them in DECL_TABLE. */ |
8926 | |
8927 | static void |
8928 | collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table) |
8929 | { |
8930 | dw_die_ref c; |
8931 | |
8932 | if (dw_attr_node *a = get_AT (die, attr_kind: DW_AT_signature)) |
8933 | { |
8934 | dw_die_ref targ = AT_ref (a); |
8935 | gcc_assert (targ->die_mark == 0 && targ->comdat_type_p); |
8936 | decl_table_entry **slot |
8937 | = decl_table->find_slot_with_hash (comparable: targ, |
8938 | hash: htab_hash_pointer (targ), |
8939 | insert: INSERT); |
8940 | gcc_assert (*slot == HTAB_EMPTY_ENTRY); |
8941 | /* Record in DECL_TABLE that TARG has been already copied |
8942 | by remove_child_or_replace_with_skeleton. */ |
8943 | decl_table_entry *entry = XCNEW (struct decl_table_entry); |
8944 | entry->orig = targ; |
8945 | entry->copy = die; |
8946 | *slot = entry; |
8947 | } |
8948 | FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table)); |
8949 | } |
8950 | |
8951 | /* Copy declarations for "unworthy" types into the new comdat section. |
8952 | Incomplete types, modified types, and certain other types aren't broken |
8953 | out into comdat sections of their own, so they don't have a signature, |
8954 | and we need to copy the declaration into the same section so that we |
8955 | don't have an external reference. */ |
8956 | |
8957 | static void |
8958 | copy_decls_for_unworthy_types (dw_die_ref unit) |
8959 | { |
8960 | mark_dies (unit); |
8961 | decl_hash_type decl_table (10); |
8962 | collect_skeleton_dies (die: unit, decl_table: &decl_table); |
8963 | copy_decls_walk (unit, die: unit, decl_table: &decl_table); |
8964 | unmark_dies (unit); |
8965 | } |
8966 | |
8967 | /* Traverse the DIE and add a sibling attribute if it may have the |
8968 | effect of speeding up access to siblings. To save some space, |
8969 | avoid generating sibling attributes for DIE's without children. */ |
8970 | |
8971 | static void |
8972 | add_sibling_attributes (dw_die_ref die) |
8973 | { |
8974 | dw_die_ref c; |
8975 | |
8976 | if (! die->die_child) |
8977 | return; |
8978 | |
8979 | if (die->die_parent && die != die->die_parent->die_child) |
8980 | add_AT_die_ref (die, attr_kind: DW_AT_sibling, targ_die: die->die_sib); |
8981 | |
8982 | FOR_EACH_CHILD (die, c, add_sibling_attributes (c)); |
8983 | } |
8984 | |
8985 | /* Output all location lists for the DIE and its children. */ |
8986 | |
8987 | static void |
8988 | output_location_lists (dw_die_ref die) |
8989 | { |
8990 | dw_die_ref c; |
8991 | dw_attr_node *a; |
8992 | unsigned ix; |
8993 | |
8994 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
8995 | if (AT_class (a) == dw_val_class_loc_list) |
8996 | output_loc_list (AT_loc_list (a)); |
8997 | |
8998 | FOR_EACH_CHILD (die, c, output_location_lists (c)); |
8999 | } |
9000 | |
9001 | /* During assign_location_list_indexes and output_loclists_offset the |
9002 | current index, after it the number of assigned indexes (i.e. how |
9003 | large the .debug_loclists* offset table should be). */ |
9004 | static unsigned int loc_list_idx; |
9005 | |
9006 | /* Output all location list offsets for the DIE and its children. */ |
9007 | |
9008 | static void |
9009 | output_loclists_offsets (dw_die_ref die) |
9010 | { |
9011 | dw_die_ref c; |
9012 | dw_attr_node *a; |
9013 | unsigned ix; |
9014 | |
9015 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9016 | if (AT_class (a) == dw_val_class_loc_list) |
9017 | { |
9018 | dw_loc_list_ref l = AT_loc_list (a); |
9019 | if (l->offset_emitted) |
9020 | continue; |
9021 | dw2_asm_output_delta (dwarf_offset_size, l->ll_symbol, |
9022 | loc_section_label, NULL); |
9023 | gcc_assert (l->hash == loc_list_idx); |
9024 | loc_list_idx++; |
9025 | l->offset_emitted = true; |
9026 | } |
9027 | |
9028 | FOR_EACH_CHILD (die, c, output_loclists_offsets (c)); |
9029 | } |
9030 | |
9031 | /* Recursively set indexes of location lists. */ |
9032 | |
9033 | static void |
9034 | assign_location_list_indexes (dw_die_ref die) |
9035 | { |
9036 | dw_die_ref c; |
9037 | dw_attr_node *a; |
9038 | unsigned ix; |
9039 | |
9040 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9041 | if (AT_class (a) == dw_val_class_loc_list) |
9042 | { |
9043 | dw_loc_list_ref list = AT_loc_list (a); |
9044 | if (!list->num_assigned) |
9045 | { |
9046 | list->num_assigned = true; |
9047 | list->hash = loc_list_idx++; |
9048 | } |
9049 | } |
9050 | |
9051 | FOR_EACH_CHILD (die, c, assign_location_list_indexes (c)); |
9052 | } |
9053 | |
9054 | /* We want to limit the number of external references, because they are |
9055 | larger than local references: a relocation takes multiple words, and |
9056 | even a sig8 reference is always eight bytes, whereas a local reference |
9057 | can be as small as one byte (though DW_FORM_ref is usually 4 in GCC). |
9058 | So if we encounter multiple external references to the same type DIE, we |
9059 | make a local typedef stub for it and redirect all references there. |
9060 | |
9061 | This is the element of the hash table for keeping track of these |
9062 | references. */ |
9063 | |
9064 | struct external_ref |
9065 | { |
9066 | dw_die_ref type; |
9067 | dw_die_ref stub; |
9068 | unsigned n_refs; |
9069 | }; |
9070 | |
9071 | /* Hashtable helpers. */ |
9072 | |
9073 | struct external_ref_hasher : free_ptr_hash <external_ref> |
9074 | { |
9075 | static inline hashval_t hash (const external_ref *); |
9076 | static inline bool equal (const external_ref *, const external_ref *); |
9077 | }; |
9078 | |
9079 | inline hashval_t |
9080 | external_ref_hasher::hash (const external_ref *r) |
9081 | { |
9082 | dw_die_ref die = r->type; |
9083 | hashval_t h = 0; |
9084 | |
9085 | /* We can't use the address of the DIE for hashing, because |
9086 | that will make the order of the stub DIEs non-deterministic. */ |
9087 | if (! die->comdat_type_p) |
9088 | /* We have a symbol; use it to compute a hash. */ |
9089 | h = htab_hash_string (die->die_id.die_symbol); |
9090 | else |
9091 | { |
9092 | /* We have a type signature; use a subset of the bits as the hash. |
9093 | The 8-byte signature is at least as large as hashval_t. */ |
9094 | comdat_type_node *type_node = die->die_id.die_type_node; |
9095 | memcpy (dest: &h, src: type_node->signature, n: sizeof (h)); |
9096 | } |
9097 | return h; |
9098 | } |
9099 | |
9100 | inline bool |
9101 | external_ref_hasher::equal (const external_ref *r1, const external_ref *r2) |
9102 | { |
9103 | return r1->type == r2->type; |
9104 | } |
9105 | |
9106 | typedef hash_table<external_ref_hasher> external_ref_hash_type; |
9107 | |
9108 | /* Return a pointer to the external_ref for references to DIE. */ |
9109 | |
9110 | static struct external_ref * |
9111 | lookup_external_ref (external_ref_hash_type *map, dw_die_ref die) |
9112 | { |
9113 | struct external_ref ref, *ref_p; |
9114 | external_ref **slot; |
9115 | |
9116 | ref.type = die; |
9117 | slot = map->find_slot (value: &ref, insert: INSERT); |
9118 | if (*slot != HTAB_EMPTY_ENTRY) |
9119 | return *slot; |
9120 | |
9121 | ref_p = XCNEW (struct external_ref); |
9122 | ref_p->type = die; |
9123 | *slot = ref_p; |
9124 | return ref_p; |
9125 | } |
9126 | |
9127 | /* Subroutine of optimize_external_refs, below. |
9128 | |
9129 | If we see a type skeleton, record it as our stub. If we see external |
9130 | references, remember how many we've seen. */ |
9131 | |
9132 | static void |
9133 | optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map) |
9134 | { |
9135 | dw_die_ref c; |
9136 | dw_attr_node *a; |
9137 | unsigned ix; |
9138 | struct external_ref *ref_p; |
9139 | |
9140 | if (is_type_die (die) |
9141 | && (c = get_AT_ref (die, attr_kind: DW_AT_signature))) |
9142 | { |
9143 | /* This is a local skeleton; use it for local references. */ |
9144 | ref_p = lookup_external_ref (map, die: c); |
9145 | ref_p->stub = die; |
9146 | } |
9147 | |
9148 | /* Scan the DIE references, and remember any that refer to DIEs from |
9149 | other CUs (i.e. those which are not marked). */ |
9150 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9151 | if (AT_class (a) == dw_val_class_die_ref |
9152 | && (c = AT_ref (a))->die_mark == 0 |
9153 | && is_type_die (die: c)) |
9154 | { |
9155 | ref_p = lookup_external_ref (map, die: c); |
9156 | ref_p->n_refs++; |
9157 | } |
9158 | |
9159 | FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map)); |
9160 | } |
9161 | |
9162 | /* htab_traverse callback function for optimize_external_refs, below. SLOT |
9163 | points to an external_ref, DATA is the CU we're processing. If we don't |
9164 | already have a local stub, and we have multiple refs, build a stub. */ |
9165 | |
9166 | int |
9167 | dwarf2_build_local_stub (external_ref **slot, dw_die_ref data) |
9168 | { |
9169 | struct external_ref *ref_p = *slot; |
9170 | |
9171 | if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict) |
9172 | { |
9173 | /* We have multiple references to this type, so build a small stub. |
9174 | Both of these forms are a bit dodgy from the perspective of the |
9175 | DWARF standard, since technically they should have names. */ |
9176 | dw_die_ref cu = data; |
9177 | dw_die_ref type = ref_p->type; |
9178 | dw_die_ref stub = NULL; |
9179 | |
9180 | if (type->comdat_type_p) |
9181 | { |
9182 | /* If we refer to this type via sig8, use AT_signature. */ |
9183 | stub = new_die (tag_value: type->die_tag, parent_die: cu, NULL_TREE); |
9184 | add_AT_die_ref (die: stub, attr_kind: DW_AT_signature, targ_die: type); |
9185 | } |
9186 | else |
9187 | { |
9188 | /* Otherwise, use a typedef with no name. */ |
9189 | stub = new_die (tag_value: DW_TAG_typedef, parent_die: cu, NULL_TREE); |
9190 | add_AT_die_ref (die: stub, attr_kind: DW_AT_type, targ_die: type); |
9191 | } |
9192 | |
9193 | stub->die_mark++; |
9194 | ref_p->stub = stub; |
9195 | } |
9196 | return 1; |
9197 | } |
9198 | |
9199 | /* DIE is a unit; look through all the DIE references to see if there are |
9200 | any external references to types, and if so, create local stubs for |
9201 | them which will be applied in build_abbrev_table. This is useful because |
9202 | references to local DIEs are smaller. */ |
9203 | |
9204 | static external_ref_hash_type * |
9205 | optimize_external_refs (dw_die_ref die) |
9206 | { |
9207 | external_ref_hash_type *map = new external_ref_hash_type (10); |
9208 | optimize_external_refs_1 (die, map); |
9209 | map->traverse <dw_die_ref, dwarf2_build_local_stub> (argument: die); |
9210 | return map; |
9211 | } |
9212 | |
9213 | /* The following 3 variables are temporaries that are computed only during the |
9214 | build_abbrev_table call and used and released during the following |
9215 | optimize_abbrev_table call. */ |
9216 | |
9217 | /* First abbrev_id that can be optimized based on usage. */ |
9218 | static unsigned int abbrev_opt_start; |
9219 | |
9220 | /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with |
9221 | abbrev_id smaller than this, because they must be already sized |
9222 | during build_abbrev_table). */ |
9223 | static unsigned int abbrev_opt_base_type_end; |
9224 | |
9225 | /* Vector of usage counts during build_abbrev_table. Indexed by |
9226 | abbrev_id - abbrev_opt_start. */ |
9227 | static vec<unsigned int> abbrev_usage_count; |
9228 | |
9229 | /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */ |
9230 | static vec<dw_die_ref> sorted_abbrev_dies; |
9231 | |
9232 | /* The format of each DIE (and its attribute value pairs) is encoded in an |
9233 | abbreviation table. This routine builds the abbreviation table and assigns |
9234 | a unique abbreviation id for each abbreviation entry. The children of each |
9235 | die are visited recursively. */ |
9236 | |
9237 | static void |
9238 | build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map) |
9239 | { |
9240 | unsigned int abbrev_id = 0; |
9241 | dw_die_ref c; |
9242 | dw_attr_node *a; |
9243 | unsigned ix; |
9244 | dw_die_ref abbrev; |
9245 | |
9246 | /* Scan the DIE references, and replace any that refer to |
9247 | DIEs from other CUs (i.e. those which are not marked) with |
9248 | the local stubs we built in optimize_external_refs. */ |
9249 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9250 | if (AT_class (a) == dw_val_class_die_ref |
9251 | && (c = AT_ref (a))->die_mark == 0) |
9252 | { |
9253 | struct external_ref *ref_p; |
9254 | gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol); |
9255 | |
9256 | if (is_type_die (die: c) |
9257 | && (ref_p = lookup_external_ref (map: extern_map, die: c)) |
9258 | && ref_p->stub && ref_p->stub != die) |
9259 | { |
9260 | gcc_assert (a->dw_attr != DW_AT_signature); |
9261 | change_AT_die_ref (ref: a, new_die: ref_p->stub); |
9262 | } |
9263 | else |
9264 | /* We aren't changing this reference, so mark it external. */ |
9265 | set_AT_ref_external (a, i: 1); |
9266 | } |
9267 | |
9268 | FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev) |
9269 | { |
9270 | dw_attr_node *die_a, *abbrev_a; |
9271 | unsigned ix; |
9272 | bool ok = true; |
9273 | |
9274 | if (abbrev_id == 0) |
9275 | continue; |
9276 | if (abbrev->die_tag != die->die_tag) |
9277 | continue; |
9278 | if ((abbrev->die_child != NULL) != (die->die_child != NULL)) |
9279 | continue; |
9280 | |
9281 | if (vec_safe_length (v: abbrev->die_attr) != vec_safe_length (v: die->die_attr)) |
9282 | continue; |
9283 | |
9284 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a) |
9285 | { |
9286 | abbrev_a = &(*abbrev->die_attr)[ix]; |
9287 | if ((abbrev_a->dw_attr != die_a->dw_attr) |
9288 | || (value_format (abbrev_a) != value_format (die_a))) |
9289 | { |
9290 | ok = false; |
9291 | break; |
9292 | } |
9293 | } |
9294 | if (ok) |
9295 | break; |
9296 | } |
9297 | |
9298 | if (abbrev_id >= vec_safe_length (v: abbrev_die_table)) |
9299 | { |
9300 | vec_safe_push (v&: abbrev_die_table, obj: die); |
9301 | if (abbrev_opt_start) |
9302 | abbrev_usage_count.safe_push (obj: 0); |
9303 | } |
9304 | if (abbrev_opt_start && abbrev_id >= abbrev_opt_start) |
9305 | { |
9306 | abbrev_usage_count[abbrev_id - abbrev_opt_start]++; |
9307 | sorted_abbrev_dies.safe_push (obj: die); |
9308 | } |
9309 | |
9310 | die->die_abbrev = abbrev_id; |
9311 | FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map)); |
9312 | } |
9313 | |
9314 | /* Callback function for sorted_abbrev_dies vector sorting. We sort |
9315 | by die_abbrev's usage count, from the most commonly used |
9316 | abbreviation to the least. */ |
9317 | |
9318 | static int |
9319 | die_abbrev_cmp (const void *p1, const void *p2) |
9320 | { |
9321 | dw_die_ref die1 = *(const dw_die_ref *) p1; |
9322 | dw_die_ref die2 = *(const dw_die_ref *) p2; |
9323 | |
9324 | gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start); |
9325 | gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start); |
9326 | |
9327 | if (die1->die_abbrev >= abbrev_opt_base_type_end |
9328 | && die2->die_abbrev >= abbrev_opt_base_type_end) |
9329 | { |
9330 | if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start] |
9331 | > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start]) |
9332 | return -1; |
9333 | if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start] |
9334 | < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start]) |
9335 | return 1; |
9336 | } |
9337 | |
9338 | /* Stabilize the sort. */ |
9339 | if (die1->die_abbrev < die2->die_abbrev) |
9340 | return -1; |
9341 | if (die1->die_abbrev > die2->die_abbrev) |
9342 | return 1; |
9343 | |
9344 | return 0; |
9345 | } |
9346 | |
9347 | /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes |
9348 | of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1] |
9349 | into dw_val_class_const_implicit or |
9350 | dw_val_class_unsigned_const_implicit. */ |
9351 | |
9352 | static void |
9353 | optimize_implicit_const (unsigned int first_id, unsigned int end, |
9354 | vec<bool> &implicit_consts) |
9355 | { |
9356 | /* It never makes sense if there is just one DIE using the abbreviation. */ |
9357 | if (end < first_id + 2) |
9358 | return; |
9359 | |
9360 | dw_attr_node *a; |
9361 | unsigned ix, i; |
9362 | dw_die_ref die = sorted_abbrev_dies[first_id]; |
9363 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9364 | if (implicit_consts[ix]) |
9365 | { |
9366 | enum dw_val_class new_class = dw_val_class_none; |
9367 | switch (AT_class (a)) |
9368 | { |
9369 | case dw_val_class_unsigned_const: |
9370 | if ((HOST_WIDE_INT) AT_unsigned (a) < 0) |
9371 | continue; |
9372 | |
9373 | /* The .debug_abbrev section will grow by |
9374 | size_of_sleb128 (AT_unsigned (a)) and we avoid the constants |
9375 | in all the DIEs using that abbreviation. */ |
9376 | if (constant_size (AT_unsigned (a)) * (end - first_id) |
9377 | <= (unsigned) size_of_sleb128 (AT_unsigned (a))) |
9378 | continue; |
9379 | |
9380 | new_class = dw_val_class_unsigned_const_implicit; |
9381 | break; |
9382 | |
9383 | case dw_val_class_const: |
9384 | new_class = dw_val_class_const_implicit; |
9385 | break; |
9386 | |
9387 | case dw_val_class_file: |
9388 | new_class = dw_val_class_file_implicit; |
9389 | break; |
9390 | |
9391 | default: |
9392 | continue; |
9393 | } |
9394 | for (i = first_id; i < end; i++) |
9395 | (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class |
9396 | = new_class; |
9397 | } |
9398 | } |
9399 | |
9400 | /* Attempt to optimize abbreviation table from abbrev_opt_start |
9401 | abbreviation above. */ |
9402 | |
9403 | static void |
9404 | optimize_abbrev_table (void) |
9405 | { |
9406 | if (abbrev_opt_start |
9407 | && vec_safe_length (v: abbrev_die_table) > abbrev_opt_start |
9408 | && (dwarf_version >= 5 || vec_safe_length (v: abbrev_die_table) > 127)) |
9409 | { |
9410 | auto_vec<bool, 32> implicit_consts; |
9411 | sorted_abbrev_dies.qsort (die_abbrev_cmp); |
9412 | |
9413 | unsigned int abbrev_id = abbrev_opt_start - 1; |
9414 | unsigned int first_id = ~0U; |
9415 | unsigned int last_abbrev_id = 0; |
9416 | unsigned int i; |
9417 | dw_die_ref die; |
9418 | if (abbrev_opt_base_type_end > abbrev_opt_start) |
9419 | abbrev_id = abbrev_opt_base_type_end - 1; |
9420 | /* Reassign abbreviation ids from abbrev_opt_start above, so that |
9421 | most commonly used abbreviations come first. */ |
9422 | FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die) |
9423 | { |
9424 | dw_attr_node *a; |
9425 | unsigned ix; |
9426 | |
9427 | /* If calc_base_type_die_sizes has been called, the CU and |
9428 | base types after it can't be optimized, because we've already |
9429 | calculated their DIE offsets. We've sorted them first. */ |
9430 | if (die->die_abbrev < abbrev_opt_base_type_end) |
9431 | continue; |
9432 | if (die->die_abbrev != last_abbrev_id) |
9433 | { |
9434 | last_abbrev_id = die->die_abbrev; |
9435 | if (dwarf_version >= 5 && first_id != ~0U) |
9436 | optimize_implicit_const (first_id, end: i, implicit_consts); |
9437 | abbrev_id++; |
9438 | (*abbrev_die_table)[abbrev_id] = die; |
9439 | if (dwarf_version >= 5) |
9440 | { |
9441 | first_id = i; |
9442 | implicit_consts.truncate (size: 0); |
9443 | |
9444 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9445 | switch (AT_class (a)) |
9446 | { |
9447 | case dw_val_class_const: |
9448 | case dw_val_class_unsigned_const: |
9449 | case dw_val_class_file: |
9450 | implicit_consts.safe_push (obj: true); |
9451 | break; |
9452 | default: |
9453 | implicit_consts.safe_push (obj: false); |
9454 | break; |
9455 | } |
9456 | } |
9457 | } |
9458 | else if (dwarf_version >= 5) |
9459 | { |
9460 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9461 | if (!implicit_consts[ix]) |
9462 | continue; |
9463 | else |
9464 | { |
9465 | dw_attr_node *other_a |
9466 | = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix]; |
9467 | if (!dw_val_equal_p (a: &a->dw_attr_val, |
9468 | b: &other_a->dw_attr_val)) |
9469 | implicit_consts[ix] = false; |
9470 | } |
9471 | } |
9472 | die->die_abbrev = abbrev_id; |
9473 | } |
9474 | gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1); |
9475 | if (dwarf_version >= 5 && first_id != ~0U) |
9476 | optimize_implicit_const (first_id, end: i, implicit_consts); |
9477 | } |
9478 | |
9479 | abbrev_opt_start = 0; |
9480 | abbrev_opt_base_type_end = 0; |
9481 | abbrev_usage_count.release (); |
9482 | sorted_abbrev_dies.release (); |
9483 | } |
9484 | |
9485 | /* Return the power-of-two number of bytes necessary to represent VALUE. */ |
9486 | |
9487 | static int |
9488 | constant_size (unsigned HOST_WIDE_INT value) |
9489 | { |
9490 | int log; |
9491 | |
9492 | if (value == 0) |
9493 | log = 0; |
9494 | else |
9495 | log = floor_log2 (x: value); |
9496 | |
9497 | log = log / 8; |
9498 | log = 1 << (floor_log2 (x: log) + 1); |
9499 | |
9500 | return log; |
9501 | } |
9502 | |
9503 | /* Return the size of a DIE as it is represented in the |
9504 | .debug_info section. */ |
9505 | |
9506 | static unsigned long |
9507 | size_of_die (dw_die_ref die) |
9508 | { |
9509 | unsigned long size = 0; |
9510 | dw_attr_node *a; |
9511 | unsigned ix; |
9512 | enum dwarf_form form; |
9513 | |
9514 | size += size_of_uleb128 (die->die_abbrev); |
9515 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9516 | { |
9517 | switch (AT_class (a)) |
9518 | { |
9519 | case dw_val_class_addr: |
9520 | if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED) |
9521 | { |
9522 | gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED); |
9523 | size += size_of_uleb128 (AT_index (a)); |
9524 | } |
9525 | else |
9526 | size += DWARF2_ADDR_SIZE; |
9527 | break; |
9528 | case dw_val_class_offset: |
9529 | size += dwarf_offset_size; |
9530 | break; |
9531 | case dw_val_class_loc: |
9532 | { |
9533 | unsigned long lsize = size_of_locs (loc: AT_loc (a)); |
9534 | |
9535 | /* Block length. */ |
9536 | if (dwarf_version >= 4) |
9537 | size += size_of_uleb128 (lsize); |
9538 | else |
9539 | size += constant_size (value: lsize); |
9540 | size += lsize; |
9541 | } |
9542 | break; |
9543 | case dw_val_class_loc_list: |
9544 | if (dwarf_split_debug_info && dwarf_version >= 5) |
9545 | { |
9546 | gcc_assert (AT_loc_list (a)->num_assigned); |
9547 | size += size_of_uleb128 (AT_loc_list (a)->hash); |
9548 | } |
9549 | else |
9550 | size += dwarf_offset_size; |
9551 | break; |
9552 | case dw_val_class_view_list: |
9553 | size += dwarf_offset_size; |
9554 | break; |
9555 | case dw_val_class_range_list: |
9556 | if (value_format (a) == DW_FORM_rnglistx) |
9557 | { |
9558 | gcc_assert (rnglist_idx); |
9559 | dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset]; |
9560 | size += size_of_uleb128 (r->idx); |
9561 | } |
9562 | else |
9563 | size += dwarf_offset_size; |
9564 | break; |
9565 | case dw_val_class_const: |
9566 | size += size_of_sleb128 (AT_int (a)); |
9567 | break; |
9568 | case dw_val_class_unsigned_const: |
9569 | { |
9570 | int csize = constant_size (value: AT_unsigned (a)); |
9571 | if (dwarf_version == 3 |
9572 | && a->dw_attr == DW_AT_data_member_location |
9573 | && csize >= 4) |
9574 | size += size_of_uleb128 (AT_unsigned (a)); |
9575 | else |
9576 | size += csize; |
9577 | } |
9578 | break; |
9579 | case dw_val_class_symview: |
9580 | if (symview_upper_bound <= 0xff) |
9581 | size += 1; |
9582 | else if (symview_upper_bound <= 0xffff) |
9583 | size += 2; |
9584 | else if (symview_upper_bound <= 0xffffffff) |
9585 | size += 4; |
9586 | else |
9587 | size += 8; |
9588 | break; |
9589 | case dw_val_class_const_implicit: |
9590 | case dw_val_class_unsigned_const_implicit: |
9591 | case dw_val_class_file_implicit: |
9592 | /* These occupy no size in the DIE, just an extra sleb128 in |
9593 | .debug_abbrev. */ |
9594 | break; |
9595 | case dw_val_class_const_double: |
9596 | size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR; |
9597 | if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS) |
9598 | size++; /* block */ |
9599 | break; |
9600 | case dw_val_class_wide_int: |
9601 | size += (get_full_len (op: *a->dw_attr_val.v.val_wide) |
9602 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
9603 | if (get_full_len (op: *a->dw_attr_val.v.val_wide) |
9604 | * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS) |
9605 | size++; /* block */ |
9606 | break; |
9607 | case dw_val_class_vec: |
9608 | size += constant_size (value: a->dw_attr_val.v.val_vec.length |
9609 | * a->dw_attr_val.v.val_vec.elt_size) |
9610 | + a->dw_attr_val.v.val_vec.length |
9611 | * a->dw_attr_val.v.val_vec.elt_size; /* block */ |
9612 | break; |
9613 | case dw_val_class_flag: |
9614 | if (dwarf_version >= 4) |
9615 | /* Currently all add_AT_flag calls pass in 1 as last argument, |
9616 | so DW_FORM_flag_present can be used. If that ever changes, |
9617 | we'll need to use DW_FORM_flag and have some optimization |
9618 | in build_abbrev_table that will change those to |
9619 | DW_FORM_flag_present if it is set to 1 in all DIEs using |
9620 | the same abbrev entry. */ |
9621 | gcc_assert (a->dw_attr_val.v.val_flag == 1); |
9622 | else |
9623 | size += 1; |
9624 | break; |
9625 | case dw_val_class_die_ref: |
9626 | if (AT_ref_external (a)) |
9627 | { |
9628 | /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions |
9629 | we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr |
9630 | is sized by target address length, whereas in DWARF3 |
9631 | it's always sized as an offset. */ |
9632 | if (AT_ref (a)->comdat_type_p) |
9633 | size += DWARF_TYPE_SIGNATURE_SIZE; |
9634 | else if (dwarf_version == 2) |
9635 | size += DWARF2_ADDR_SIZE; |
9636 | else |
9637 | size += dwarf_offset_size; |
9638 | } |
9639 | else |
9640 | size += dwarf_offset_size; |
9641 | break; |
9642 | case dw_val_class_fde_ref: |
9643 | size += dwarf_offset_size; |
9644 | break; |
9645 | case dw_val_class_lbl_id: |
9646 | if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED) |
9647 | { |
9648 | gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED); |
9649 | size += size_of_uleb128 (AT_index (a)); |
9650 | } |
9651 | else |
9652 | size += DWARF2_ADDR_SIZE; |
9653 | break; |
9654 | case dw_val_class_lineptr: |
9655 | case dw_val_class_macptr: |
9656 | case dw_val_class_loclistsptr: |
9657 | size += dwarf_offset_size; |
9658 | break; |
9659 | case dw_val_class_str: |
9660 | form = AT_string_form (a); |
9661 | if (form == DW_FORM_strp || form == DW_FORM_line_strp) |
9662 | size += dwarf_offset_size; |
9663 | else if (form == dwarf_FORM (form: DW_FORM_strx)) |
9664 | size += size_of_uleb128 (AT_index (a)); |
9665 | else |
9666 | size += strlen (s: a->dw_attr_val.v.val_str->str) + 1; |
9667 | break; |
9668 | case dw_val_class_file: |
9669 | size += constant_size (value: maybe_emit_file (fd: a->dw_attr_val.v.val_file)); |
9670 | break; |
9671 | case dw_val_class_data8: |
9672 | size += 8; |
9673 | break; |
9674 | case dw_val_class_vms_delta: |
9675 | size += dwarf_offset_size; |
9676 | break; |
9677 | case dw_val_class_high_pc: |
9678 | size += DWARF2_ADDR_SIZE; |
9679 | break; |
9680 | case dw_val_class_discr_value: |
9681 | size += size_of_discr_value (discr_value: &a->dw_attr_val.v.val_discr_value); |
9682 | break; |
9683 | case dw_val_class_discr_list: |
9684 | { |
9685 | unsigned block_size = size_of_discr_list (discr_list: AT_discr_list (a)); |
9686 | |
9687 | /* This is a block, so we have the block length and then its |
9688 | data. */ |
9689 | size += constant_size (value: block_size) + block_size; |
9690 | } |
9691 | break; |
9692 | default: |
9693 | gcc_unreachable (); |
9694 | } |
9695 | } |
9696 | |
9697 | return size; |
9698 | } |
9699 | |
9700 | /* Size the debugging information associated with a given DIE. Visits the |
9701 | DIE's children recursively. Updates the global variable next_die_offset, on |
9702 | each time through. Uses the current value of next_die_offset to update the |
9703 | die_offset field in each DIE. */ |
9704 | |
9705 | static void |
9706 | calc_die_sizes (dw_die_ref die) |
9707 | { |
9708 | dw_die_ref c; |
9709 | |
9710 | gcc_assert (die->die_offset == 0 |
9711 | || (unsigned long int) die->die_offset == next_die_offset); |
9712 | die->die_offset = next_die_offset; |
9713 | next_die_offset += size_of_die (die); |
9714 | |
9715 | FOR_EACH_CHILD (die, c, calc_die_sizes (c)); |
9716 | |
9717 | if (die->die_child != NULL) |
9718 | /* Count the null byte used to terminate sibling lists. */ |
9719 | next_die_offset += 1; |
9720 | } |
9721 | |
9722 | /* Size just the base type children at the start of the CU. |
9723 | This is needed because build_abbrev needs to size locs |
9724 | and sizing of type based stack ops needs to know die_offset |
9725 | values for the base types. */ |
9726 | |
9727 | static void |
9728 | calc_base_type_die_sizes (void) |
9729 | { |
9730 | unsigned long die_offset = (dwarf_split_debug_info |
9731 | ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE |
9732 | : DWARF_COMPILE_UNIT_HEADER_SIZE); |
9733 | unsigned int i; |
9734 | dw_die_ref base_type; |
9735 | #if ENABLE_ASSERT_CHECKING |
9736 | dw_die_ref prev = comp_unit_die ()->die_child; |
9737 | #endif |
9738 | |
9739 | die_offset += size_of_die (die: comp_unit_die ()); |
9740 | for (i = 0; base_types.iterate (ix: i, ptr: &base_type); i++) |
9741 | { |
9742 | #if ENABLE_ASSERT_CHECKING |
9743 | gcc_assert (base_type->die_offset == 0 |
9744 | && prev->die_sib == base_type |
9745 | && base_type->die_child == NULL |
9746 | && base_type->die_abbrev); |
9747 | prev = base_type; |
9748 | #endif |
9749 | if (abbrev_opt_start |
9750 | && base_type->die_abbrev >= abbrev_opt_base_type_end) |
9751 | abbrev_opt_base_type_end = base_type->die_abbrev + 1; |
9752 | base_type->die_offset = die_offset; |
9753 | die_offset += size_of_die (die: base_type); |
9754 | } |
9755 | } |
9756 | |
9757 | /* Set the marks for a die and its children. We do this so |
9758 | that we know whether or not a reference needs to use FORM_ref_addr; only |
9759 | DIEs in the same CU will be marked. We used to clear out the offset |
9760 | and use that as the flag, but ran into ordering problems. */ |
9761 | |
9762 | static void |
9763 | mark_dies (dw_die_ref die) |
9764 | { |
9765 | dw_die_ref c; |
9766 | |
9767 | gcc_assert (!die->die_mark); |
9768 | |
9769 | die->die_mark = 1; |
9770 | FOR_EACH_CHILD (die, c, mark_dies (c)); |
9771 | } |
9772 | |
9773 | /* Clear the marks for a die and its children. */ |
9774 | |
9775 | static void |
9776 | unmark_dies (dw_die_ref die) |
9777 | { |
9778 | dw_die_ref c; |
9779 | |
9780 | if (! use_debug_types) |
9781 | gcc_assert (die->die_mark); |
9782 | |
9783 | die->die_mark = 0; |
9784 | FOR_EACH_CHILD (die, c, unmark_dies (c)); |
9785 | } |
9786 | |
9787 | /* Clear the marks for a die, its children and referred dies. */ |
9788 | |
9789 | static void |
9790 | unmark_all_dies (dw_die_ref die) |
9791 | { |
9792 | dw_die_ref c; |
9793 | dw_attr_node *a; |
9794 | unsigned ix; |
9795 | |
9796 | if (!die->die_mark) |
9797 | return; |
9798 | die->die_mark = 0; |
9799 | |
9800 | FOR_EACH_CHILD (die, c, unmark_all_dies (c)); |
9801 | |
9802 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9803 | if (AT_class (a) == dw_val_class_die_ref) |
9804 | unmark_all_dies (die: AT_ref (a)); |
9805 | } |
9806 | |
9807 | /* Calculate if the entry should appear in the final output file. It may be |
9808 | from a pruned a type. */ |
9809 | |
9810 | static bool |
9811 | include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p) |
9812 | { |
9813 | /* By limiting gnu pubnames to definitions only, gold can generate a |
9814 | gdb index without entries for declarations, which don't include |
9815 | enough information to be useful. */ |
9816 | if (debug_generate_pub_sections == 2 && is_declaration_die (die: p->die)) |
9817 | return false; |
9818 | |
9819 | if (table == pubname_table) |
9820 | { |
9821 | /* Enumerator names are part of the pubname table, but the |
9822 | parent DW_TAG_enumeration_type die may have been pruned. |
9823 | Don't output them if that is the case. */ |
9824 | if (p->die->die_tag == DW_TAG_enumerator && |
9825 | (p->die->die_parent == NULL |
9826 | || !p->die->die_parent->die_perennial_p)) |
9827 | return false; |
9828 | |
9829 | /* Everything else in the pubname table is included. */ |
9830 | return true; |
9831 | } |
9832 | |
9833 | /* The pubtypes table shouldn't include types that have been |
9834 | pruned. */ |
9835 | return (p->die->die_offset != 0 |
9836 | || !flag_eliminate_unused_debug_types); |
9837 | } |
9838 | |
9839 | /* Return the size of the .debug_pubnames or .debug_pubtypes table |
9840 | generated for the compilation unit. */ |
9841 | |
9842 | static unsigned long |
9843 | size_of_pubnames (vec<pubname_entry, va_gc> *names) |
9844 | { |
9845 | unsigned long size; |
9846 | unsigned i; |
9847 | pubname_entry *p; |
9848 | int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0; |
9849 | |
9850 | size = DWARF_PUBNAMES_HEADER_SIZE; |
9851 | FOR_EACH_VEC_ELT (*names, i, p) |
9852 | if (include_pubname_in_output (table: names, p)) |
9853 | size += strlen (s: p->name) + dwarf_offset_size + 1 + space_for_flags; |
9854 | |
9855 | size += dwarf_offset_size; |
9856 | return size; |
9857 | } |
9858 | |
9859 | /* Return the size of the information in the .debug_aranges section. */ |
9860 | |
9861 | static unsigned long |
9862 | size_of_aranges (void) |
9863 | { |
9864 | unsigned long size; |
9865 | |
9866 | size = DWARF_ARANGES_HEADER_SIZE; |
9867 | |
9868 | /* Count the address/length pair for this compilation unit. */ |
9869 | if (switch_text_ranges) |
9870 | size += 2 * DWARF2_ADDR_SIZE |
9871 | * (vec_safe_length (v: switch_text_ranges) / 2 + 1); |
9872 | if (switch_cold_ranges) |
9873 | size += 2 * DWARF2_ADDR_SIZE |
9874 | * (vec_safe_length (v: switch_cold_ranges) / 2 + 1); |
9875 | if (have_multiple_function_sections) |
9876 | { |
9877 | unsigned fde_idx; |
9878 | dw_fde_ref fde; |
9879 | |
9880 | FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde) |
9881 | { |
9882 | if (fde->ignored_debug) |
9883 | continue; |
9884 | if (!fde->in_std_section) |
9885 | size += 2 * DWARF2_ADDR_SIZE; |
9886 | if (fde->dw_fde_second_begin && !fde->second_in_std_section) |
9887 | size += 2 * DWARF2_ADDR_SIZE; |
9888 | } |
9889 | } |
9890 | |
9891 | /* Count the two zero words used to terminated the address range table. */ |
9892 | size += 2 * DWARF2_ADDR_SIZE; |
9893 | return size; |
9894 | } |
9895 | |
9896 | /* Select the encoding of an attribute value. */ |
9897 | |
9898 | static enum dwarf_form |
9899 | value_format (dw_attr_node *a) |
9900 | { |
9901 | switch (AT_class (a)) |
9902 | { |
9903 | case dw_val_class_addr: |
9904 | /* Only very few attributes allow DW_FORM_addr. */ |
9905 | switch (a->dw_attr) |
9906 | { |
9907 | case DW_AT_low_pc: |
9908 | case DW_AT_high_pc: |
9909 | case DW_AT_entry_pc: |
9910 | case DW_AT_trampoline: |
9911 | return (AT_index (a) == NOT_INDEXED |
9912 | ? DW_FORM_addr : dwarf_FORM (form: DW_FORM_addrx)); |
9913 | default: |
9914 | break; |
9915 | } |
9916 | switch (DWARF2_ADDR_SIZE) |
9917 | { |
9918 | case 1: |
9919 | return DW_FORM_data1; |
9920 | case 2: |
9921 | return DW_FORM_data2; |
9922 | case 4: |
9923 | return DW_FORM_data4; |
9924 | case 8: |
9925 | return DW_FORM_data8; |
9926 | default: |
9927 | gcc_unreachable (); |
9928 | } |
9929 | case dw_val_class_loc_list: |
9930 | if (dwarf_split_debug_info |
9931 | && dwarf_version >= 5 |
9932 | && AT_loc_list (a)->num_assigned) |
9933 | return DW_FORM_loclistx; |
9934 | /* FALLTHRU */ |
9935 | case dw_val_class_view_list: |
9936 | case dw_val_class_range_list: |
9937 | /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo |
9938 | but in .debug_info use DW_FORM_sec_offset, which is shorter if we |
9939 | care about sizes of .debug* sections in shared libraries and |
9940 | executables and don't take into account relocations that affect just |
9941 | relocatable objects - for DW_FORM_rnglistx we'd have to emit offset |
9942 | table in the .debug_rnglists section. */ |
9943 | if (dwarf_split_debug_info |
9944 | && dwarf_version >= 5 |
9945 | && AT_class (a) == dw_val_class_range_list |
9946 | && rnglist_idx |
9947 | && a->dw_attr_val.val_entry != RELOCATED_OFFSET) |
9948 | return DW_FORM_rnglistx; |
9949 | if (dwarf_version >= 4) |
9950 | return DW_FORM_sec_offset; |
9951 | /* FALLTHRU */ |
9952 | case dw_val_class_vms_delta: |
9953 | case dw_val_class_offset: |
9954 | switch (dwarf_offset_size) |
9955 | { |
9956 | case 4: |
9957 | return DW_FORM_data4; |
9958 | case 8: |
9959 | return DW_FORM_data8; |
9960 | default: |
9961 | gcc_unreachable (); |
9962 | } |
9963 | case dw_val_class_loc: |
9964 | if (dwarf_version >= 4) |
9965 | return DW_FORM_exprloc; |
9966 | switch (constant_size (value: size_of_locs (loc: AT_loc (a)))) |
9967 | { |
9968 | case 1: |
9969 | return DW_FORM_block1; |
9970 | case 2: |
9971 | return DW_FORM_block2; |
9972 | case 4: |
9973 | return DW_FORM_block4; |
9974 | default: |
9975 | gcc_unreachable (); |
9976 | } |
9977 | case dw_val_class_const: |
9978 | return DW_FORM_sdata; |
9979 | case dw_val_class_unsigned_const: |
9980 | switch (constant_size (value: AT_unsigned (a))) |
9981 | { |
9982 | case 1: |
9983 | return DW_FORM_data1; |
9984 | case 2: |
9985 | return DW_FORM_data2; |
9986 | case 4: |
9987 | /* In DWARF3 DW_AT_data_member_location with |
9988 | DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not |
9989 | constant, so we need to use DW_FORM_udata if we need |
9990 | a large constant. */ |
9991 | if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location) |
9992 | return DW_FORM_udata; |
9993 | return DW_FORM_data4; |
9994 | case 8: |
9995 | if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location) |
9996 | return DW_FORM_udata; |
9997 | return DW_FORM_data8; |
9998 | default: |
9999 | gcc_unreachable (); |
10000 | } |
10001 | case dw_val_class_const_implicit: |
10002 | case dw_val_class_unsigned_const_implicit: |
10003 | case dw_val_class_file_implicit: |
10004 | return DW_FORM_implicit_const; |
10005 | case dw_val_class_const_double: |
10006 | switch (HOST_BITS_PER_WIDE_INT) |
10007 | { |
10008 | case 8: |
10009 | return DW_FORM_data2; |
10010 | case 16: |
10011 | return DW_FORM_data4; |
10012 | case 32: |
10013 | return DW_FORM_data8; |
10014 | case 64: |
10015 | if (dwarf_version >= 5) |
10016 | return DW_FORM_data16; |
10017 | /* FALLTHRU */ |
10018 | default: |
10019 | return DW_FORM_block1; |
10020 | } |
10021 | case dw_val_class_wide_int: |
10022 | switch (get_full_len (op: *a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT) |
10023 | { |
10024 | case 8: |
10025 | return DW_FORM_data1; |
10026 | case 16: |
10027 | return DW_FORM_data2; |
10028 | case 32: |
10029 | return DW_FORM_data4; |
10030 | case 64: |
10031 | return DW_FORM_data8; |
10032 | case 128: |
10033 | if (dwarf_version >= 5) |
10034 | return DW_FORM_data16; |
10035 | /* FALLTHRU */ |
10036 | default: |
10037 | return DW_FORM_block1; |
10038 | } |
10039 | case dw_val_class_symview: |
10040 | /* ??? We might use uleb128, but then we'd have to compute |
10041 | .debug_info offsets in the assembler. */ |
10042 | if (symview_upper_bound <= 0xff) |
10043 | return DW_FORM_data1; |
10044 | else if (symview_upper_bound <= 0xffff) |
10045 | return DW_FORM_data2; |
10046 | else if (symview_upper_bound <= 0xffffffff) |
10047 | return DW_FORM_data4; |
10048 | else |
10049 | return DW_FORM_data8; |
10050 | case dw_val_class_vec: |
10051 | switch (constant_size (value: a->dw_attr_val.v.val_vec.length |
10052 | * a->dw_attr_val.v.val_vec.elt_size)) |
10053 | { |
10054 | case 1: |
10055 | return DW_FORM_block1; |
10056 | case 2: |
10057 | return DW_FORM_block2; |
10058 | case 4: |
10059 | return DW_FORM_block4; |
10060 | default: |
10061 | gcc_unreachable (); |
10062 | } |
10063 | case dw_val_class_flag: |
10064 | if (dwarf_version >= 4) |
10065 | { |
10066 | /* Currently all add_AT_flag calls pass in 1 as last argument, |
10067 | so DW_FORM_flag_present can be used. If that ever changes, |
10068 | we'll need to use DW_FORM_flag and have some optimization |
10069 | in build_abbrev_table that will change those to |
10070 | DW_FORM_flag_present if it is set to 1 in all DIEs using |
10071 | the same abbrev entry. */ |
10072 | gcc_assert (a->dw_attr_val.v.val_flag == 1); |
10073 | return DW_FORM_flag_present; |
10074 | } |
10075 | return DW_FORM_flag; |
10076 | case dw_val_class_die_ref: |
10077 | if (AT_ref_external (a)) |
10078 | { |
10079 | if (AT_ref (a)->comdat_type_p) |
10080 | return DW_FORM_ref_sig8; |
10081 | else |
10082 | return DW_FORM_ref_addr; |
10083 | } |
10084 | else |
10085 | return DW_FORM_ref; |
10086 | case dw_val_class_fde_ref: |
10087 | return DW_FORM_data; |
10088 | case dw_val_class_lbl_id: |
10089 | return (AT_index (a) == NOT_INDEXED |
10090 | ? DW_FORM_addr : dwarf_FORM (form: DW_FORM_addrx)); |
10091 | case dw_val_class_lineptr: |
10092 | case dw_val_class_macptr: |
10093 | case dw_val_class_loclistsptr: |
10094 | return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data; |
10095 | case dw_val_class_str: |
10096 | return AT_string_form (a); |
10097 | case dw_val_class_file: |
10098 | switch (constant_size (value: maybe_emit_file (fd: a->dw_attr_val.v.val_file))) |
10099 | { |
10100 | case 1: |
10101 | return DW_FORM_data1; |
10102 | case 2: |
10103 | return DW_FORM_data2; |
10104 | case 4: |
10105 | return DW_FORM_data4; |
10106 | default: |
10107 | gcc_unreachable (); |
10108 | } |
10109 | |
10110 | case dw_val_class_data8: |
10111 | return DW_FORM_data8; |
10112 | |
10113 | case dw_val_class_high_pc: |
10114 | switch (DWARF2_ADDR_SIZE) |
10115 | { |
10116 | case 1: |
10117 | return DW_FORM_data1; |
10118 | case 2: |
10119 | return DW_FORM_data2; |
10120 | case 4: |
10121 | return DW_FORM_data4; |
10122 | case 8: |
10123 | return DW_FORM_data8; |
10124 | default: |
10125 | gcc_unreachable (); |
10126 | } |
10127 | |
10128 | case dw_val_class_discr_value: |
10129 | return (a->dw_attr_val.v.val_discr_value.pos |
10130 | ? DW_FORM_udata |
10131 | : DW_FORM_sdata); |
10132 | case dw_val_class_discr_list: |
10133 | switch (constant_size (value: size_of_discr_list (discr_list: AT_discr_list (a)))) |
10134 | { |
10135 | case 1: |
10136 | return DW_FORM_block1; |
10137 | case 2: |
10138 | return DW_FORM_block2; |
10139 | case 4: |
10140 | return DW_FORM_block4; |
10141 | default: |
10142 | gcc_unreachable (); |
10143 | } |
10144 | |
10145 | default: |
10146 | gcc_unreachable (); |
10147 | } |
10148 | } |
10149 | |
10150 | /* Output the encoding of an attribute value. */ |
10151 | |
10152 | static void |
10153 | output_value_format (dw_attr_node *a) |
10154 | { |
10155 | enum dwarf_form form = value_format (a); |
10156 | |
10157 | dw2_asm_output_data_uleb128 (form, "(%s)" , dwarf_form_name (form)); |
10158 | } |
10159 | |
10160 | /* Given a die and id, produce the appropriate abbreviations. */ |
10161 | |
10162 | static void |
10163 | output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev) |
10164 | { |
10165 | unsigned ix; |
10166 | dw_attr_node *a_attr; |
10167 | |
10168 | dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)" ); |
10169 | dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)" , |
10170 | dwarf_tag_name (tag: abbrev->die_tag)); |
10171 | |
10172 | if (abbrev->die_child != NULL) |
10173 | dw2_asm_output_data (1, DW_children_yes, "DW_children_yes" ); |
10174 | else |
10175 | dw2_asm_output_data (1, DW_children_no, "DW_children_no" ); |
10176 | |
10177 | for (ix = 0; vec_safe_iterate (v: abbrev->die_attr, ix, ptr: &a_attr); ix++) |
10178 | { |
10179 | dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)" , |
10180 | dwarf_attr_name (attr: a_attr->dw_attr)); |
10181 | output_value_format (a: a_attr); |
10182 | if (value_format (a: a_attr) == DW_FORM_implicit_const) |
10183 | { |
10184 | if (AT_class (a: a_attr) == dw_val_class_file_implicit) |
10185 | { |
10186 | int f = maybe_emit_file (fd: a_attr->dw_attr_val.v.val_file); |
10187 | const char *filename = a_attr->dw_attr_val.v.val_file->filename; |
10188 | dw2_asm_output_data_sleb128 (f, "(%s)" , filename); |
10189 | } |
10190 | else |
10191 | dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL); |
10192 | } |
10193 | } |
10194 | |
10195 | dw2_asm_output_data (1, 0, NULL); |
10196 | dw2_asm_output_data (1, 0, NULL); |
10197 | } |
10198 | |
10199 | |
10200 | /* Output the .debug_abbrev section which defines the DIE abbreviation |
10201 | table. */ |
10202 | |
10203 | static void |
10204 | output_abbrev_section (void) |
10205 | { |
10206 | unsigned int abbrev_id; |
10207 | dw_die_ref abbrev; |
10208 | |
10209 | FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev) |
10210 | if (abbrev_id != 0) |
10211 | output_die_abbrevs (abbrev_id, abbrev); |
10212 | |
10213 | /* Terminate the table. */ |
10214 | dw2_asm_output_data (1, 0, NULL); |
10215 | } |
10216 | |
10217 | /* Return a new location list, given the begin and end range, and the |
10218 | expression. */ |
10219 | |
10220 | static inline dw_loc_list_ref |
10221 | new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin, |
10222 | const char *end, var_loc_view vend, |
10223 | const char *section) |
10224 | { |
10225 | dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> (); |
10226 | |
10227 | retlist->begin = begin; |
10228 | retlist->begin_entry = NULL; |
10229 | retlist->end = end; |
10230 | retlist->end_entry = NULL; |
10231 | retlist->expr = expr; |
10232 | retlist->section = section; |
10233 | retlist->vbegin = vbegin; |
10234 | retlist->vend = vend; |
10235 | |
10236 | return retlist; |
10237 | } |
10238 | |
10239 | /* Return true iff there's any nonzero view number in the loc list. |
10240 | |
10241 | ??? When views are not enabled, we'll often extend a single range |
10242 | to the entire function, so that we emit a single location |
10243 | expression rather than a location list. With views, even with a |
10244 | single range, we'll output a list if start or end have a nonzero |
10245 | view. If we change this, we may want to stop splitting a single |
10246 | range in dw_loc_list just because of a nonzero view, even if it |
10247 | straddles across hot/cold partitions. */ |
10248 | |
10249 | static bool |
10250 | loc_list_has_views (dw_loc_list_ref list) |
10251 | { |
10252 | if (!debug_variable_location_views) |
10253 | return false; |
10254 | |
10255 | for (dw_loc_list_ref loc = list; |
10256 | loc != NULL; loc = loc->dw_loc_next) |
10257 | if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend)) |
10258 | return true; |
10259 | |
10260 | return false; |
10261 | } |
10262 | |
10263 | /* Generate a new internal symbol for this location list node, if it |
10264 | hasn't got one yet. */ |
10265 | |
10266 | static inline void |
10267 | gen_llsym (dw_loc_list_ref list) |
10268 | { |
10269 | gcc_assert (!list->ll_symbol); |
10270 | list->ll_symbol = gen_internal_sym (prefix: "LLST" ); |
10271 | |
10272 | if (!loc_list_has_views (list)) |
10273 | return; |
10274 | |
10275 | if (dwarf2out_locviews_in_attribute ()) |
10276 | { |
10277 | /* Use the same label_num for the view list. */ |
10278 | label_num--; |
10279 | list->vl_symbol = gen_internal_sym (prefix: "LVUS" ); |
10280 | } |
10281 | else |
10282 | list->vl_symbol = list->ll_symbol; |
10283 | } |
10284 | |
10285 | /* Generate a symbol for the list, but only if we really want to emit |
10286 | it as a list. */ |
10287 | |
10288 | static inline void |
10289 | maybe_gen_llsym (dw_loc_list_ref list) |
10290 | { |
10291 | if (!list || (!list->dw_loc_next && !loc_list_has_views (list))) |
10292 | return; |
10293 | |
10294 | gen_llsym (list); |
10295 | } |
10296 | |
10297 | /* Determine whether or not to skip loc_list entry CURR. If SIZEP is |
10298 | NULL, don't consider size of the location expression. If we're not |
10299 | to skip it, and SIZEP is non-null, store the size of CURR->expr's |
10300 | representation in *SIZEP. */ |
10301 | |
10302 | static bool |
10303 | skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL) |
10304 | { |
10305 | /* Don't output an entry that starts and ends at the same address. */ |
10306 | if (strcmp (s1: curr->begin, s2: curr->end) == 0 |
10307 | && curr->vbegin == curr->vend && !curr->force) |
10308 | return true; |
10309 | |
10310 | if (!sizep) |
10311 | return false; |
10312 | |
10313 | unsigned long size = size_of_locs (loc: curr->expr); |
10314 | |
10315 | /* If the expression is too large, drop it on the floor. We could |
10316 | perhaps put it into DW_TAG_dwarf_procedure and refer to that |
10317 | in the expression, but >= 64KB expressions for a single value |
10318 | in a single range are unlikely very useful. */ |
10319 | if (dwarf_version < 5 && size > 0xffff) |
10320 | return true; |
10321 | |
10322 | *sizep = size; |
10323 | |
10324 | return false; |
10325 | } |
10326 | |
10327 | /* Output a view pair loclist entry for CURR, if it requires one. */ |
10328 | |
10329 | static void |
10330 | dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr) |
10331 | { |
10332 | if (!dwarf2out_locviews_in_loclist ()) |
10333 | return; |
10334 | |
10335 | if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend)) |
10336 | return; |
10337 | |
10338 | #ifdef DW_LLE_view_pair |
10339 | dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair" ); |
10340 | |
10341 | if (dwarf2out_as_locview_support) |
10342 | { |
10343 | if (ZERO_VIEW_P (curr->vbegin)) |
10344 | dw2_asm_output_data_uleb128 (0, "Location view begin" ); |
10345 | else |
10346 | { |
10347 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
10348 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vbegin); |
10349 | dw2_asm_output_symname_uleb128 (label, "Location view begin" ); |
10350 | } |
10351 | |
10352 | if (ZERO_VIEW_P (curr->vend)) |
10353 | dw2_asm_output_data_uleb128 (0, "Location view end" ); |
10354 | else |
10355 | { |
10356 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
10357 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vend); |
10358 | dw2_asm_output_symname_uleb128 (label, "Location view end" ); |
10359 | } |
10360 | } |
10361 | else |
10362 | { |
10363 | dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin" ); |
10364 | dw2_asm_output_data_uleb128 (curr->vend, "Location view end" ); |
10365 | } |
10366 | #endif /* DW_LLE_view_pair */ |
10367 | |
10368 | return; |
10369 | } |
10370 | |
10371 | /* Output the location list given to us. */ |
10372 | |
10373 | static void |
10374 | output_loc_list (dw_loc_list_ref list_head) |
10375 | { |
10376 | int vcount = 0, lcount = 0; |
10377 | |
10378 | if (list_head->emitted) |
10379 | return; |
10380 | list_head->emitted = true; |
10381 | |
10382 | if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ()) |
10383 | { |
10384 | ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol); |
10385 | |
10386 | for (dw_loc_list_ref curr = list_head; curr != NULL; |
10387 | curr = curr->dw_loc_next) |
10388 | { |
10389 | unsigned long size; |
10390 | |
10391 | if (skip_loc_list_entry (curr, sizep: &size)) |
10392 | continue; |
10393 | |
10394 | vcount++; |
10395 | |
10396 | /* ?? dwarf_split_debug_info? */ |
10397 | if (dwarf2out_as_locview_support) |
10398 | { |
10399 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
10400 | |
10401 | if (!ZERO_VIEW_P (curr->vbegin)) |
10402 | { |
10403 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vbegin); |
10404 | dw2_asm_output_symname_uleb128 (label, |
10405 | "View list begin (%s)" , |
10406 | list_head->vl_symbol); |
10407 | } |
10408 | else |
10409 | dw2_asm_output_data_uleb128 (0, |
10410 | "View list begin (%s)" , |
10411 | list_head->vl_symbol); |
10412 | |
10413 | if (!ZERO_VIEW_P (curr->vend)) |
10414 | { |
10415 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vend); |
10416 | dw2_asm_output_symname_uleb128 (label, |
10417 | "View list end (%s)" , |
10418 | list_head->vl_symbol); |
10419 | } |
10420 | else |
10421 | dw2_asm_output_data_uleb128 (0, |
10422 | "View list end (%s)" , |
10423 | list_head->vl_symbol); |
10424 | } |
10425 | else |
10426 | { |
10427 | dw2_asm_output_data_uleb128 (curr->vbegin, |
10428 | "View list begin (%s)" , |
10429 | list_head->vl_symbol); |
10430 | dw2_asm_output_data_uleb128 (curr->vend, |
10431 | "View list end (%s)" , |
10432 | list_head->vl_symbol); |
10433 | } |
10434 | } |
10435 | } |
10436 | |
10437 | ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol); |
10438 | |
10439 | const char *last_section = NULL; |
10440 | const char *base_label = NULL; |
10441 | |
10442 | /* Walk the location list, and output each range + expression. */ |
10443 | for (dw_loc_list_ref curr = list_head; curr != NULL; |
10444 | curr = curr->dw_loc_next) |
10445 | { |
10446 | unsigned long size; |
10447 | |
10448 | /* Skip this entry? If we skip it here, we must skip it in the |
10449 | view list above as well. */ |
10450 | if (skip_loc_list_entry (curr, sizep: &size)) |
10451 | continue; |
10452 | |
10453 | lcount++; |
10454 | |
10455 | if (dwarf_version >= 5) |
10456 | { |
10457 | if (dwarf_split_debug_info && HAVE_AS_LEB128) |
10458 | { |
10459 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10460 | /* For -gsplit-dwarf, emit DW_LLE_startx_length, which has |
10461 | uleb128 index into .debug_addr and uleb128 length. */ |
10462 | dw2_asm_output_data (1, DW_LLE_startx_length, |
10463 | "DW_LLE_startx_length (%s)" , |
10464 | list_head->ll_symbol); |
10465 | dw2_asm_output_data_uleb128 (curr->begin_entry->index, |
10466 | "Location list range start index " |
10467 | "(%s)" , curr->begin); |
10468 | dw2_asm_output_delta_uleb128 (curr->end, curr->begin, |
10469 | "Location list length (%s)" , |
10470 | list_head->ll_symbol); |
10471 | } |
10472 | else if (dwarf_split_debug_info) |
10473 | { |
10474 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10475 | /* For -gsplit-dwarf without usable .uleb128 support, emit |
10476 | DW_LLE_startx_endx, which has two uleb128 indexes into |
10477 | .debug_addr. */ |
10478 | dw2_asm_output_data (1, DW_LLE_startx_endx, |
10479 | "DW_LLE_startx_endx (%s)" , |
10480 | list_head->ll_symbol); |
10481 | dw2_asm_output_data_uleb128 (curr->begin_entry->index, |
10482 | "Location list range start index " |
10483 | "(%s)" , curr->begin); |
10484 | dw2_asm_output_data_uleb128 (curr->end_entry->index, |
10485 | "Location list range end index " |
10486 | "(%s)" , curr->end); |
10487 | } |
10488 | else if (!have_multiple_function_sections && HAVE_AS_LEB128) |
10489 | { |
10490 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10491 | /* If all code is in .text section, the base address is |
10492 | already provided by the CU attributes. Use |
10493 | DW_LLE_offset_pair where both addresses are uleb128 encoded |
10494 | offsets against that base. */ |
10495 | dw2_asm_output_data (1, DW_LLE_offset_pair, |
10496 | "DW_LLE_offset_pair (%s)" , |
10497 | list_head->ll_symbol); |
10498 | dw2_asm_output_delta_uleb128 (curr->begin, curr->section, |
10499 | "Location list begin address (%s)" , |
10500 | list_head->ll_symbol); |
10501 | dw2_asm_output_delta_uleb128 (curr->end, curr->section, |
10502 | "Location list end address (%s)" , |
10503 | list_head->ll_symbol); |
10504 | } |
10505 | else if (HAVE_AS_LEB128) |
10506 | { |
10507 | /* Otherwise, find out how many consecutive entries could share |
10508 | the same base entry. If just one, emit DW_LLE_start_length, |
10509 | otherwise emit DW_LLE_base_address for the base address |
10510 | followed by a series of DW_LLE_offset_pair. */ |
10511 | if (last_section == NULL || curr->section != last_section) |
10512 | { |
10513 | dw_loc_list_ref curr2; |
10514 | for (curr2 = curr->dw_loc_next; curr2 != NULL; |
10515 | curr2 = curr2->dw_loc_next) |
10516 | { |
10517 | if (strcmp (s1: curr2->begin, s2: curr2->end) == 0 |
10518 | && !curr2->force) |
10519 | continue; |
10520 | break; |
10521 | } |
10522 | if (curr2 == NULL || curr->section != curr2->section) |
10523 | last_section = NULL; |
10524 | else |
10525 | { |
10526 | last_section = curr->section; |
10527 | base_label = curr->begin; |
10528 | dw2_asm_output_data (1, DW_LLE_base_address, |
10529 | "DW_LLE_base_address (%s)" , |
10530 | list_head->ll_symbol); |
10531 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label, |
10532 | "Base address (%s)" , |
10533 | list_head->ll_symbol); |
10534 | } |
10535 | } |
10536 | /* Only one entry with the same base address. Use |
10537 | DW_LLE_start_length with absolute address and uleb128 |
10538 | length. */ |
10539 | if (last_section == NULL) |
10540 | { |
10541 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10542 | dw2_asm_output_data (1, DW_LLE_start_length, |
10543 | "DW_LLE_start_length (%s)" , |
10544 | list_head->ll_symbol); |
10545 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin, |
10546 | "Location list begin address (%s)" , |
10547 | list_head->ll_symbol); |
10548 | dw2_asm_output_delta_uleb128 (curr->end, curr->begin, |
10549 | "Location list length " |
10550 | "(%s)" , list_head->ll_symbol); |
10551 | } |
10552 | /* Otherwise emit DW_LLE_offset_pair, relative to above emitted |
10553 | DW_LLE_base_address. */ |
10554 | else |
10555 | { |
10556 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10557 | dw2_asm_output_data (1, DW_LLE_offset_pair, |
10558 | "DW_LLE_offset_pair (%s)" , |
10559 | list_head->ll_symbol); |
10560 | dw2_asm_output_delta_uleb128 (curr->begin, base_label, |
10561 | "Location list begin address " |
10562 | "(%s)" , list_head->ll_symbol); |
10563 | dw2_asm_output_delta_uleb128 (curr->end, base_label, |
10564 | "Location list end address " |
10565 | "(%s)" , list_head->ll_symbol); |
10566 | } |
10567 | } |
10568 | /* The assembler does not support .uleb128 directive. Emit |
10569 | DW_LLE_start_end with a pair of absolute addresses. */ |
10570 | else |
10571 | { |
10572 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10573 | dw2_asm_output_data (1, DW_LLE_start_end, |
10574 | "DW_LLE_start_end (%s)" , |
10575 | list_head->ll_symbol); |
10576 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin, |
10577 | "Location list begin address (%s)" , |
10578 | list_head->ll_symbol); |
10579 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end, |
10580 | "Location list end address (%s)" , |
10581 | list_head->ll_symbol); |
10582 | } |
10583 | } |
10584 | else if (dwarf_split_debug_info) |
10585 | { |
10586 | /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr |
10587 | and 4 byte length. */ |
10588 | dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry, |
10589 | "Location list start/length entry (%s)" , |
10590 | list_head->ll_symbol); |
10591 | dw2_asm_output_data_uleb128 (curr->begin_entry->index, |
10592 | "Location list range start index (%s)" , |
10593 | curr->begin); |
10594 | /* The length field is 4 bytes. If we ever need to support |
10595 | an 8-byte length, we can add a new DW_LLE code or fall back |
10596 | to DW_LLE_GNU_start_end_entry. */ |
10597 | dw2_asm_output_delta (4, curr->end, curr->begin, |
10598 | "Location list range length (%s)" , |
10599 | list_head->ll_symbol); |
10600 | } |
10601 | else if (!have_multiple_function_sections) |
10602 | { |
10603 | /* Pair of relative addresses against start of text section. */ |
10604 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section, |
10605 | "Location list begin address (%s)" , |
10606 | list_head->ll_symbol); |
10607 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section, |
10608 | "Location list end address (%s)" , |
10609 | list_head->ll_symbol); |
10610 | } |
10611 | else |
10612 | { |
10613 | /* Pair of absolute addresses. */ |
10614 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin, |
10615 | "Location list begin address (%s)" , |
10616 | list_head->ll_symbol); |
10617 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end, |
10618 | "Location list end address (%s)" , |
10619 | list_head->ll_symbol); |
10620 | } |
10621 | |
10622 | /* Output the block length for this list of location operations. */ |
10623 | if (dwarf_version >= 5) |
10624 | dw2_asm_output_data_uleb128 (size, "Location expression size" ); |
10625 | else |
10626 | { |
10627 | gcc_assert (size <= 0xffff); |
10628 | dw2_asm_output_data (2, size, "Location expression size" ); |
10629 | } |
10630 | |
10631 | output_loc_sequence (loc: curr->expr, for_eh_or_skip: -1); |
10632 | } |
10633 | |
10634 | /* And finally list termination. */ |
10635 | if (dwarf_version >= 5) |
10636 | dw2_asm_output_data (1, DW_LLE_end_of_list, |
10637 | "DW_LLE_end_of_list (%s)" , list_head->ll_symbol); |
10638 | else if (dwarf_split_debug_info) |
10639 | dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry, |
10640 | "Location list terminator (%s)" , |
10641 | list_head->ll_symbol); |
10642 | else |
10643 | { |
10644 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, |
10645 | "Location list terminator begin (%s)" , |
10646 | list_head->ll_symbol); |
10647 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, |
10648 | "Location list terminator end (%s)" , |
10649 | list_head->ll_symbol); |
10650 | } |
10651 | |
10652 | gcc_assert (!list_head->vl_symbol |
10653 | || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0)); |
10654 | } |
10655 | |
10656 | /* Output a range_list offset into the .debug_ranges or .debug_rnglists |
10657 | section. Emit a relocated reference if val_entry is NULL, otherwise, |
10658 | emit an indirect reference. */ |
10659 | |
10660 | static void |
10661 | output_range_list_offset (dw_attr_node *a) |
10662 | { |
10663 | const char *name = dwarf_attr_name (attr: a->dw_attr); |
10664 | |
10665 | if (a->dw_attr_val.val_entry == RELOCATED_OFFSET) |
10666 | { |
10667 | if (dwarf_version >= 5) |
10668 | { |
10669 | dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset]; |
10670 | dw2_asm_output_offset (dwarf_offset_size, r->label, |
10671 | debug_ranges_section, "%s" , name); |
10672 | } |
10673 | else |
10674 | { |
10675 | char *p = strchr (s: ranges_section_label, c: '\0'); |
10676 | sprintf (s: p, format: "+" HOST_WIDE_INT_PRINT_HEX, |
10677 | a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE); |
10678 | dw2_asm_output_offset (dwarf_offset_size, ranges_section_label, |
10679 | debug_ranges_section, "%s" , name); |
10680 | *p = '\0'; |
10681 | } |
10682 | } |
10683 | else if (dwarf_version >= 5) |
10684 | { |
10685 | dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset]; |
10686 | gcc_assert (rnglist_idx); |
10687 | dw2_asm_output_data_uleb128 (r->idx, "%s" , name); |
10688 | } |
10689 | else |
10690 | dw2_asm_output_data (dwarf_offset_size, |
10691 | a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE, |
10692 | "%s (offset from %s)" , name, ranges_section_label); |
10693 | } |
10694 | |
10695 | /* Output the offset into the debug_loc section. */ |
10696 | |
10697 | static void |
10698 | output_loc_list_offset (dw_attr_node *a) |
10699 | { |
10700 | char *sym = AT_loc_list (a)->ll_symbol; |
10701 | |
10702 | gcc_assert (sym); |
10703 | if (!dwarf_split_debug_info) |
10704 | dw2_asm_output_offset (dwarf_offset_size, sym, debug_loc_section, |
10705 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
10706 | else if (dwarf_version >= 5) |
10707 | { |
10708 | gcc_assert (AT_loc_list (a)->num_assigned); |
10709 | dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)" , |
10710 | dwarf_attr_name (attr: a->dw_attr), |
10711 | sym); |
10712 | } |
10713 | else |
10714 | dw2_asm_output_delta (dwarf_offset_size, sym, loc_section_label, |
10715 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
10716 | } |
10717 | |
10718 | /* Output the offset into the debug_loc section. */ |
10719 | |
10720 | static void |
10721 | output_view_list_offset (dw_attr_node *a) |
10722 | { |
10723 | char *sym = (*AT_loc_list_ptr (a))->vl_symbol; |
10724 | |
10725 | gcc_assert (sym); |
10726 | if (dwarf_split_debug_info) |
10727 | dw2_asm_output_delta (dwarf_offset_size, sym, loc_section_label, |
10728 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
10729 | else |
10730 | dw2_asm_output_offset (dwarf_offset_size, sym, debug_loc_section, |
10731 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
10732 | } |
10733 | |
10734 | /* Output an attribute's index or value appropriately. */ |
10735 | |
10736 | static void |
10737 | output_attr_index_or_value (dw_attr_node *a) |
10738 | { |
10739 | const char *name = dwarf_attr_name (attr: a->dw_attr); |
10740 | |
10741 | if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED) |
10742 | { |
10743 | dw2_asm_output_data_uleb128 (AT_index (a), "%s" , name); |
10744 | return; |
10745 | } |
10746 | switch (AT_class (a)) |
10747 | { |
10748 | case dw_val_class_addr: |
10749 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s" , name); |
10750 | break; |
10751 | case dw_val_class_high_pc: |
10752 | case dw_val_class_lbl_id: |
10753 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s" , name); |
10754 | break; |
10755 | default: |
10756 | gcc_unreachable (); |
10757 | } |
10758 | } |
10759 | |
10760 | /* Output a type signature. */ |
10761 | |
10762 | static inline void |
10763 | output_signature (const char *sig, const char *name) |
10764 | { |
10765 | int i; |
10766 | |
10767 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
10768 | dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name); |
10769 | } |
10770 | |
10771 | /* Output a discriminant value. */ |
10772 | |
10773 | static inline void |
10774 | output_discr_value (dw_discr_value *discr_value, const char *name) |
10775 | { |
10776 | if (discr_value->pos) |
10777 | dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s" , name); |
10778 | else |
10779 | dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s" , name); |
10780 | } |
10781 | |
10782 | /* Output the DIE and its attributes. Called recursively to generate |
10783 | the definitions of each child DIE. */ |
10784 | |
10785 | static void |
10786 | output_die (dw_die_ref die) |
10787 | { |
10788 | dw_attr_node *a; |
10789 | dw_die_ref c; |
10790 | unsigned long size; |
10791 | unsigned ix; |
10792 | |
10793 | dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)" , |
10794 | (unsigned long)die->die_offset, |
10795 | dwarf_tag_name (tag: die->die_tag)); |
10796 | |
10797 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
10798 | { |
10799 | const char *name = dwarf_attr_name (attr: a->dw_attr); |
10800 | |
10801 | switch (AT_class (a)) |
10802 | { |
10803 | case dw_val_class_addr: |
10804 | output_attr_index_or_value (a); |
10805 | break; |
10806 | |
10807 | case dw_val_class_offset: |
10808 | dw2_asm_output_data (dwarf_offset_size, a->dw_attr_val.v.val_offset, |
10809 | "%s" , name); |
10810 | break; |
10811 | |
10812 | case dw_val_class_range_list: |
10813 | output_range_list_offset (a); |
10814 | break; |
10815 | |
10816 | case dw_val_class_loc: |
10817 | size = size_of_locs (loc: AT_loc (a)); |
10818 | |
10819 | /* Output the block length for this list of location operations. */ |
10820 | if (dwarf_version >= 4) |
10821 | dw2_asm_output_data_uleb128 (size, "%s" , name); |
10822 | else |
10823 | dw2_asm_output_data (constant_size (value: size), size, "%s" , name); |
10824 | |
10825 | output_loc_sequence (loc: AT_loc (a), for_eh_or_skip: -1); |
10826 | break; |
10827 | |
10828 | case dw_val_class_const: |
10829 | /* ??? It would be slightly more efficient to use a scheme like is |
10830 | used for unsigned constants below, but gdb 4.x does not sign |
10831 | extend. Gdb 5.x does sign extend. */ |
10832 | dw2_asm_output_data_sleb128 (AT_int (a), "%s" , name); |
10833 | break; |
10834 | |
10835 | case dw_val_class_unsigned_const: |
10836 | { |
10837 | int csize = constant_size (value: AT_unsigned (a)); |
10838 | if (dwarf_version == 3 |
10839 | && a->dw_attr == DW_AT_data_member_location |
10840 | && csize >= 4) |
10841 | dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s" , name); |
10842 | else |
10843 | dw2_asm_output_data (csize, AT_unsigned (a), "%s" , name); |
10844 | } |
10845 | break; |
10846 | |
10847 | case dw_val_class_symview: |
10848 | { |
10849 | int vsize; |
10850 | if (symview_upper_bound <= 0xff) |
10851 | vsize = 1; |
10852 | else if (symview_upper_bound <= 0xffff) |
10853 | vsize = 2; |
10854 | else if (symview_upper_bound <= 0xffffffff) |
10855 | vsize = 4; |
10856 | else |
10857 | vsize = 8; |
10858 | dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view, |
10859 | "%s" , name); |
10860 | } |
10861 | break; |
10862 | |
10863 | case dw_val_class_const_implicit: |
10864 | if (flag_debug_asm) |
10865 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s (" |
10866 | HOST_WIDE_INT_PRINT_DEC ")\n" , |
10867 | ASM_COMMENT_START, name, AT_int (a)); |
10868 | break; |
10869 | |
10870 | case dw_val_class_unsigned_const_implicit: |
10871 | if (flag_debug_asm) |
10872 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s (" |
10873 | HOST_WIDE_INT_PRINT_HEX ")\n" , |
10874 | ASM_COMMENT_START, name, AT_unsigned (a)); |
10875 | break; |
10876 | |
10877 | case dw_val_class_const_double: |
10878 | { |
10879 | unsigned HOST_WIDE_INT first, second; |
10880 | |
10881 | if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS) |
10882 | dw2_asm_output_data (1, |
10883 | HOST_BITS_PER_DOUBLE_INT |
10884 | / HOST_BITS_PER_CHAR, |
10885 | NULL); |
10886 | |
10887 | if (WORDS_BIG_ENDIAN) |
10888 | { |
10889 | first = a->dw_attr_val.v.val_double.high; |
10890 | second = a->dw_attr_val.v.val_double.low; |
10891 | } |
10892 | else |
10893 | { |
10894 | first = a->dw_attr_val.v.val_double.low; |
10895 | second = a->dw_attr_val.v.val_double.high; |
10896 | } |
10897 | |
10898 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
10899 | first, "%s" , name); |
10900 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
10901 | second, NULL); |
10902 | } |
10903 | break; |
10904 | |
10905 | case dw_val_class_wide_int: |
10906 | { |
10907 | int i; |
10908 | int len = get_full_len (op: *a->dw_attr_val.v.val_wide); |
10909 | int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
10910 | if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS) |
10911 | dw2_asm_output_data (1, get_full_len (op: *a->dw_attr_val.v.val_wide) |
10912 | * l, NULL); |
10913 | |
10914 | if (WORDS_BIG_ENDIAN) |
10915 | for (i = len - 1; i >= 0; --i) |
10916 | { |
10917 | dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i), |
10918 | "%s" , name); |
10919 | name = "" ; |
10920 | } |
10921 | else |
10922 | for (i = 0; i < len; ++i) |
10923 | { |
10924 | dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i), |
10925 | "%s" , name); |
10926 | name = "" ; |
10927 | } |
10928 | } |
10929 | break; |
10930 | |
10931 | case dw_val_class_vec: |
10932 | { |
10933 | unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size; |
10934 | unsigned int len = a->dw_attr_val.v.val_vec.length; |
10935 | unsigned int i; |
10936 | unsigned char *p; |
10937 | |
10938 | dw2_asm_output_data (constant_size (value: len * elt_size), |
10939 | len * elt_size, "%s" , name); |
10940 | if (elt_size > sizeof (HOST_WIDE_INT)) |
10941 | { |
10942 | elt_size /= 2; |
10943 | len *= 2; |
10944 | } |
10945 | for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array; |
10946 | i < len; |
10947 | i++, p += elt_size) |
10948 | dw2_asm_output_data (elt_size, extract_int (p, elt_size), |
10949 | "fp or vector constant word %u" , i); |
10950 | break; |
10951 | } |
10952 | |
10953 | case dw_val_class_flag: |
10954 | if (dwarf_version >= 4) |
10955 | { |
10956 | /* Currently all add_AT_flag calls pass in 1 as last argument, |
10957 | so DW_FORM_flag_present can be used. If that ever changes, |
10958 | we'll need to use DW_FORM_flag and have some optimization |
10959 | in build_abbrev_table that will change those to |
10960 | DW_FORM_flag_present if it is set to 1 in all DIEs using |
10961 | the same abbrev entry. */ |
10962 | gcc_assert (AT_flag (a) == 1); |
10963 | if (flag_debug_asm) |
10964 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s\n" , |
10965 | ASM_COMMENT_START, name); |
10966 | break; |
10967 | } |
10968 | dw2_asm_output_data (1, AT_flag (a), "%s" , name); |
10969 | break; |
10970 | |
10971 | case dw_val_class_loc_list: |
10972 | output_loc_list_offset (a); |
10973 | break; |
10974 | |
10975 | case dw_val_class_view_list: |
10976 | output_view_list_offset (a); |
10977 | break; |
10978 | |
10979 | case dw_val_class_die_ref: |
10980 | if (AT_ref_external (a)) |
10981 | { |
10982 | if (AT_ref (a)->comdat_type_p) |
10983 | { |
10984 | comdat_type_node *type_node |
10985 | = AT_ref (a)->die_id.die_type_node; |
10986 | |
10987 | gcc_assert (type_node); |
10988 | output_signature (sig: type_node->signature, name); |
10989 | } |
10990 | else |
10991 | { |
10992 | const char *sym = AT_ref (a)->die_id.die_symbol; |
10993 | int size; |
10994 | |
10995 | gcc_assert (sym); |
10996 | /* In DWARF2, DW_FORM_ref_addr is sized by target address |
10997 | length, whereas in DWARF3 it's always sized as an |
10998 | offset. */ |
10999 | if (dwarf_version == 2) |
11000 | size = DWARF2_ADDR_SIZE; |
11001 | else |
11002 | size = dwarf_offset_size; |
11003 | /* ??? We cannot unconditionally output die_offset if |
11004 | non-zero - others might create references to those |
11005 | DIEs via symbols. |
11006 | And we do not clear its DIE offset after outputting it |
11007 | (and the label refers to the actual DIEs, not the |
11008 | DWARF CU unit header which is when using label + offset |
11009 | would be the correct thing to do). |
11010 | ??? This is the reason for the with_offset flag. */ |
11011 | if (AT_ref (a)->with_offset) |
11012 | dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset, |
11013 | debug_info_section, "%s" , name); |
11014 | else |
11015 | dw2_asm_output_offset (size, sym, debug_info_section, "%s" , |
11016 | name); |
11017 | } |
11018 | } |
11019 | else |
11020 | { |
11021 | gcc_assert (AT_ref (a)->die_offset); |
11022 | dw2_asm_output_data (dwarf_offset_size, AT_ref (a)->die_offset, |
11023 | "%s" , name); |
11024 | } |
11025 | break; |
11026 | |
11027 | case dw_val_class_fde_ref: |
11028 | { |
11029 | char l1[MAX_ARTIFICIAL_LABEL_BYTES]; |
11030 | |
11031 | ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL, |
11032 | a->dw_attr_val.v.val_fde_index * 2); |
11033 | dw2_asm_output_offset (dwarf_offset_size, l1, debug_frame_section, |
11034 | "%s" , name); |
11035 | } |
11036 | break; |
11037 | |
11038 | case dw_val_class_vms_delta: |
11039 | #ifdef ASM_OUTPUT_DWARF_VMS_DELTA |
11040 | dw2_asm_output_vms_delta (dwarf_offset_size, |
11041 | AT_vms_delta2 (a), AT_vms_delta1 (a), |
11042 | "%s" , name); |
11043 | #else |
11044 | dw2_asm_output_delta (dwarf_offset_size, |
11045 | AT_vms_delta2 (a), AT_vms_delta1 (a), |
11046 | "%s" , name); |
11047 | #endif |
11048 | break; |
11049 | |
11050 | case dw_val_class_lbl_id: |
11051 | output_attr_index_or_value (a); |
11052 | break; |
11053 | |
11054 | case dw_val_class_lineptr: |
11055 | dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a), |
11056 | debug_line_section, "%s" , name); |
11057 | break; |
11058 | |
11059 | case dw_val_class_macptr: |
11060 | dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a), |
11061 | debug_macinfo_section, "%s" , name); |
11062 | break; |
11063 | |
11064 | case dw_val_class_loclistsptr: |
11065 | dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a), |
11066 | debug_loc_section, "%s" , name); |
11067 | break; |
11068 | |
11069 | case dw_val_class_str: |
11070 | if (a->dw_attr_val.v.val_str->form == DW_FORM_strp) |
11071 | dw2_asm_output_offset (dwarf_offset_size, |
11072 | a->dw_attr_val.v.val_str->label, |
11073 | debug_str_section, |
11074 | "%s: \"%s\"" , name, AT_string (a)); |
11075 | else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp) |
11076 | dw2_asm_output_offset (dwarf_offset_size, |
11077 | a->dw_attr_val.v.val_str->label, |
11078 | debug_line_str_section, |
11079 | "%s: \"%s\"" , name, AT_string (a)); |
11080 | else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (form: DW_FORM_strx)) |
11081 | dw2_asm_output_data_uleb128 (AT_index (a), |
11082 | "%s: \"%s\"" , name, AT_string (a)); |
11083 | else |
11084 | dw2_asm_output_nstring (AT_string (a), -1, "%s" , name); |
11085 | break; |
11086 | |
11087 | case dw_val_class_file: |
11088 | { |
11089 | int f = maybe_emit_file (fd: a->dw_attr_val.v.val_file); |
11090 | |
11091 | dw2_asm_output_data (constant_size (value: f), f, "%s (%s)" , name, |
11092 | a->dw_attr_val.v.val_file->filename); |
11093 | break; |
11094 | } |
11095 | |
11096 | case dw_val_class_file_implicit: |
11097 | if (flag_debug_asm) |
11098 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s (%d, %s)\n" , |
11099 | ASM_COMMENT_START, name, |
11100 | maybe_emit_file (fd: a->dw_attr_val.v.val_file), |
11101 | a->dw_attr_val.v.val_file->filename); |
11102 | break; |
11103 | |
11104 | case dw_val_class_data8: |
11105 | { |
11106 | int i; |
11107 | |
11108 | for (i = 0; i < 8; i++) |
11109 | dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i], |
11110 | i == 0 ? "%s" : NULL, name); |
11111 | break; |
11112 | } |
11113 | |
11114 | case dw_val_class_high_pc: |
11115 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a), |
11116 | get_AT_low_pc (die), "DW_AT_high_pc" ); |
11117 | break; |
11118 | |
11119 | case dw_val_class_discr_value: |
11120 | output_discr_value (discr_value: &a->dw_attr_val.v.val_discr_value, name); |
11121 | break; |
11122 | |
11123 | case dw_val_class_discr_list: |
11124 | { |
11125 | dw_discr_list_ref list = AT_discr_list (a); |
11126 | const int size = size_of_discr_list (discr_list: list); |
11127 | |
11128 | /* This is a block, so output its length first. */ |
11129 | dw2_asm_output_data (constant_size (value: size), size, |
11130 | "%s: block size" , name); |
11131 | |
11132 | for (; list != NULL; list = list->dw_discr_next) |
11133 | { |
11134 | /* One byte for the discriminant value descriptor, and then as |
11135 | many LEB128 numbers as required. */ |
11136 | if (list->dw_discr_range) |
11137 | dw2_asm_output_data (1, DW_DSC_range, |
11138 | "%s: DW_DSC_range" , name); |
11139 | else |
11140 | dw2_asm_output_data (1, DW_DSC_label, |
11141 | "%s: DW_DSC_label" , name); |
11142 | |
11143 | output_discr_value (discr_value: &list->dw_discr_lower_bound, name); |
11144 | if (list->dw_discr_range) |
11145 | output_discr_value (discr_value: &list->dw_discr_upper_bound, name); |
11146 | } |
11147 | break; |
11148 | } |
11149 | |
11150 | default: |
11151 | gcc_unreachable (); |
11152 | } |
11153 | } |
11154 | |
11155 | FOR_EACH_CHILD (die, c, output_die (c)); |
11156 | |
11157 | /* Add null byte to terminate sibling list. */ |
11158 | if (die->die_child != NULL) |
11159 | dw2_asm_output_data (1, 0, "end of children of DIE %#lx" , |
11160 | (unsigned long) die->die_offset); |
11161 | } |
11162 | |
11163 | /* Output the dwarf version number. */ |
11164 | |
11165 | static void |
11166 | output_dwarf_version () |
11167 | { |
11168 | /* ??? For now, if -gdwarf-6 is specified, we output version 5 with |
11169 | views in loclist. That will change eventually. */ |
11170 | if (dwarf_version == 6) |
11171 | { |
11172 | static bool once; |
11173 | if (!once) |
11174 | { |
11175 | warning (0, "%<-gdwarf-6%> is output as version 5 with " |
11176 | "incompatibilities" ); |
11177 | once = true; |
11178 | } |
11179 | dw2_asm_output_data (2, 5, "DWARF version number" ); |
11180 | } |
11181 | else |
11182 | dw2_asm_output_data (2, dwarf_version, "DWARF version number" ); |
11183 | } |
11184 | |
11185 | /* Output the compilation unit that appears at the beginning of the |
11186 | .debug_info section, and precedes the DIE descriptions. */ |
11187 | |
11188 | static void |
11189 | (enum dwarf_unit_type ut) |
11190 | { |
11191 | if (!XCOFF_DEBUGGING_INFO) |
11192 | { |
11193 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
11194 | dw2_asm_output_data (4, 0xffffffff, |
11195 | "Initial length escape value indicating 64-bit DWARF extension" ); |
11196 | dw2_asm_output_data (dwarf_offset_size, |
11197 | next_die_offset - DWARF_INITIAL_LENGTH_SIZE, |
11198 | "Length of Compilation Unit Info" ); |
11199 | } |
11200 | |
11201 | output_dwarf_version (); |
11202 | if (dwarf_version >= 5) |
11203 | { |
11204 | const char *name; |
11205 | switch (ut) |
11206 | { |
11207 | case DW_UT_compile: name = "DW_UT_compile" ; break; |
11208 | case DW_UT_type: name = "DW_UT_type" ; break; |
11209 | case DW_UT_split_compile: name = "DW_UT_split_compile" ; break; |
11210 | case DW_UT_split_type: name = "DW_UT_split_type" ; break; |
11211 | default: gcc_unreachable (); |
11212 | } |
11213 | dw2_asm_output_data (1, ut, "%s" , name); |
11214 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
11215 | } |
11216 | dw2_asm_output_offset (dwarf_offset_size, abbrev_section_label, |
11217 | debug_abbrev_section, |
11218 | "Offset Into Abbrev. Section" ); |
11219 | if (dwarf_version < 5) |
11220 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
11221 | } |
11222 | |
11223 | /* Output the compilation unit DIE and its children. */ |
11224 | |
11225 | static void |
11226 | output_comp_unit (dw_die_ref die, int output_if_empty, |
11227 | const unsigned char *dwo_id) |
11228 | { |
11229 | const char *secname, *oldsym; |
11230 | char *tmp; |
11231 | |
11232 | /* Unless we are outputting main CU, we may throw away empty ones. */ |
11233 | if (!output_if_empty && die->die_child == NULL) |
11234 | return; |
11235 | |
11236 | /* Even if there are no children of this DIE, we must output the information |
11237 | about the compilation unit. Otherwise, on an empty translation unit, we |
11238 | will generate a present, but empty, .debug_info section. IRIX 6.5 `nm' |
11239 | will then complain when examining the file. First mark all the DIEs in |
11240 | this CU so we know which get local refs. */ |
11241 | mark_dies (die); |
11242 | |
11243 | external_ref_hash_type *extern_map = optimize_external_refs (die); |
11244 | |
11245 | /* For now, optimize only the main CU, in order to optimize the rest |
11246 | we'd need to see all of them earlier. Leave the rest for post-linking |
11247 | tools like DWZ. */ |
11248 | if (die == comp_unit_die ()) |
11249 | abbrev_opt_start = vec_safe_length (v: abbrev_die_table); |
11250 | |
11251 | build_abbrev_table (die, extern_map); |
11252 | |
11253 | optimize_abbrev_table (); |
11254 | |
11255 | delete extern_map; |
11256 | |
11257 | /* Initialize the beginning DIE offset - and calculate sizes/offsets. */ |
11258 | next_die_offset = (dwo_id |
11259 | ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE |
11260 | : DWARF_COMPILE_UNIT_HEADER_SIZE); |
11261 | calc_die_sizes (die); |
11262 | |
11263 | oldsym = die->die_id.die_symbol; |
11264 | if (oldsym && die->comdat_type_p) |
11265 | { |
11266 | tmp = XALLOCAVEC (char, strlen (oldsym) + 24); |
11267 | |
11268 | sprintf (s: tmp, format: ".gnu.linkonce.wi.%s" , oldsym); |
11269 | secname = tmp; |
11270 | die->die_id.die_symbol = NULL; |
11271 | switch_to_section (get_section (secname, SECTION_DEBUG, NULL)); |
11272 | } |
11273 | else |
11274 | { |
11275 | switch_to_section (debug_info_section); |
11276 | ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label); |
11277 | info_section_emitted = true; |
11278 | } |
11279 | |
11280 | /* For LTO cross unit DIE refs we want a symbol on the start of the |
11281 | debuginfo section, not on the CU DIE. */ |
11282 | if ((flag_generate_lto || flag_generate_offload) && oldsym) |
11283 | { |
11284 | /* ??? No way to get visibility assembled without a decl. */ |
11285 | tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, |
11286 | get_identifier (oldsym), char_type_node); |
11287 | TREE_PUBLIC (decl) = true; |
11288 | TREE_STATIC (decl) = true; |
11289 | DECL_ARTIFICIAL (decl) = true; |
11290 | DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN; |
11291 | DECL_VISIBILITY_SPECIFIED (decl) = true; |
11292 | targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN); |
11293 | #ifdef ASM_WEAKEN_LABEL |
11294 | /* We prefer a .weak because that handles duplicates from duplicate |
11295 | archive members in a graceful way. */ |
11296 | ASM_WEAKEN_LABEL (asm_out_file, oldsym); |
11297 | #else |
11298 | targetm.asm_out.globalize_label (asm_out_file, oldsym); |
11299 | #endif |
11300 | ASM_OUTPUT_LABEL (asm_out_file, oldsym); |
11301 | } |
11302 | |
11303 | /* Output debugging information. */ |
11304 | output_compilation_unit_header (ut: dwo_id |
11305 | ? DW_UT_split_compile : DW_UT_compile); |
11306 | if (dwarf_version >= 5) |
11307 | { |
11308 | if (dwo_id != NULL) |
11309 | for (int i = 0; i < 8; i++) |
11310 | dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL); |
11311 | } |
11312 | output_die (die); |
11313 | |
11314 | /* Leave the marks on the main CU, so we can check them in |
11315 | output_pubnames. */ |
11316 | if (oldsym) |
11317 | { |
11318 | unmark_dies (die); |
11319 | die->die_id.die_symbol = oldsym; |
11320 | } |
11321 | } |
11322 | |
11323 | /* Whether to generate the DWARF accelerator tables in .debug_pubnames |
11324 | and .debug_pubtypes. This is configured per-target, but can be |
11325 | overridden by the -gpubnames or -gno-pubnames options. */ |
11326 | |
11327 | static inline bool |
11328 | want_pubnames (void) |
11329 | { |
11330 | if (debug_info_level <= DINFO_LEVEL_TERSE |
11331 | /* Names and types go to the early debug part only. */ |
11332 | || in_lto_p) |
11333 | return false; |
11334 | if (debug_generate_pub_sections != -1) |
11335 | return debug_generate_pub_sections; |
11336 | return targetm.want_debug_pub_sections; |
11337 | } |
11338 | |
11339 | /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */ |
11340 | |
11341 | static void |
11342 | add_AT_pubnames (dw_die_ref die) |
11343 | { |
11344 | if (want_pubnames ()) |
11345 | add_AT_flag (die, attr_kind: DW_AT_GNU_pubnames, flag: 1); |
11346 | } |
11347 | |
11348 | /* Add a string attribute value to a skeleton DIE. */ |
11349 | |
11350 | static inline void |
11351 | add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, |
11352 | const char *str) |
11353 | { |
11354 | dw_attr_node attr; |
11355 | struct indirect_string_node *node; |
11356 | |
11357 | if (! skeleton_debug_str_hash) |
11358 | skeleton_debug_str_hash |
11359 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
11360 | |
11361 | node = find_AT_string_in_table (str, table: skeleton_debug_str_hash); |
11362 | find_string_form (node); |
11363 | if (node->form == dwarf_FORM (form: DW_FORM_strx)) |
11364 | node->form = DW_FORM_strp; |
11365 | |
11366 | attr.dw_attr = attr_kind; |
11367 | attr.dw_attr_val.val_class = dw_val_class_str; |
11368 | attr.dw_attr_val.val_entry = NULL; |
11369 | attr.dw_attr_val.v.val_str = node; |
11370 | add_dwarf_attr (die, attr: &attr); |
11371 | } |
11372 | |
11373 | /* Helper function to generate top-level dies for skeleton debug_info and |
11374 | debug_types. */ |
11375 | |
11376 | static void |
11377 | add_top_level_skeleton_die_attrs (dw_die_ref die) |
11378 | { |
11379 | const char *dwo_file_name = concat (aux_base_name, ".dwo" , NULL); |
11380 | const char *comp_dir = comp_dir_string (); |
11381 | |
11382 | add_skeleton_AT_string (die, attr_kind: dwarf_AT (at: DW_AT_dwo_name), str: dwo_file_name); |
11383 | if (comp_dir != NULL) |
11384 | add_skeleton_AT_string (die, attr_kind: DW_AT_comp_dir, str: comp_dir); |
11385 | add_AT_pubnames (die); |
11386 | if (addr_index_table != NULL && addr_index_table->size () > 0) |
11387 | add_AT_lineptr (die, attr_kind: dwarf_AT (at: DW_AT_addr_base), label: debug_addr_section_label); |
11388 | } |
11389 | |
11390 | /* Output skeleton debug sections that point to the dwo file. */ |
11391 | |
11392 | static void |
11393 | output_skeleton_debug_sections (dw_die_ref comp_unit, |
11394 | const unsigned char *dwo_id) |
11395 | { |
11396 | /* These attributes will be found in the full debug_info section. */ |
11397 | remove_AT (die: comp_unit, attr_kind: DW_AT_producer); |
11398 | remove_AT (die: comp_unit, attr_kind: DW_AT_language); |
11399 | |
11400 | switch_to_section (debug_skeleton_info_section); |
11401 | ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label); |
11402 | |
11403 | /* Produce the skeleton compilation-unit header. This one differs enough from |
11404 | a normal CU header that it's better not to call output_compilation_unit |
11405 | header. */ |
11406 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
11407 | dw2_asm_output_data (4, 0xffffffff, |
11408 | "Initial length escape value indicating 64-bit " |
11409 | "DWARF extension" ); |
11410 | |
11411 | dw2_asm_output_data (dwarf_offset_size, |
11412 | DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE |
11413 | - DWARF_INITIAL_LENGTH_SIZE |
11414 | + size_of_die (die: comp_unit), |
11415 | "Length of Compilation Unit Info" ); |
11416 | output_dwarf_version (); |
11417 | if (dwarf_version >= 5) |
11418 | { |
11419 | dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton" ); |
11420 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
11421 | } |
11422 | dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_abbrev_section_label, |
11423 | debug_skeleton_abbrev_section, |
11424 | "Offset Into Abbrev. Section" ); |
11425 | if (dwarf_version < 5) |
11426 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
11427 | else |
11428 | for (int i = 0; i < 8; i++) |
11429 | dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL); |
11430 | |
11431 | comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV; |
11432 | output_die (die: comp_unit); |
11433 | |
11434 | /* Build the skeleton debug_abbrev section. */ |
11435 | switch_to_section (debug_skeleton_abbrev_section); |
11436 | ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label); |
11437 | |
11438 | output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, abbrev: comp_unit); |
11439 | |
11440 | dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev" ); |
11441 | } |
11442 | |
11443 | /* Output a comdat type unit DIE and its children. */ |
11444 | |
11445 | static void |
11446 | output_comdat_type_unit (comdat_type_node *node, |
11447 | bool early_lto_debug ATTRIBUTE_UNUSED) |
11448 | { |
11449 | const char *secname; |
11450 | char *tmp; |
11451 | int i; |
11452 | #if defined (OBJECT_FORMAT_ELF) |
11453 | tree comdat_key; |
11454 | #endif |
11455 | |
11456 | /* First mark all the DIEs in this CU so we know which get local refs. */ |
11457 | mark_dies (die: node->root_die); |
11458 | |
11459 | external_ref_hash_type *extern_map = optimize_external_refs (die: node->root_die); |
11460 | |
11461 | build_abbrev_table (die: node->root_die, extern_map); |
11462 | |
11463 | delete extern_map; |
11464 | extern_map = NULL; |
11465 | |
11466 | /* Initialize the beginning DIE offset - and calculate sizes/offsets. */ |
11467 | next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE; |
11468 | calc_die_sizes (die: node->root_die); |
11469 | |
11470 | #if defined (OBJECT_FORMAT_ELF) |
11471 | if (dwarf_version >= 5) |
11472 | { |
11473 | if (!dwarf_split_debug_info) |
11474 | secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION; |
11475 | else |
11476 | secname = (early_lto_debug |
11477 | ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION); |
11478 | } |
11479 | else if (!dwarf_split_debug_info) |
11480 | secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types" ; |
11481 | else |
11482 | secname = (early_lto_debug |
11483 | ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo" ); |
11484 | |
11485 | tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2); |
11486 | sprintf (s: tmp, dwarf_version >= 5 ? "wi." : "wt." ); |
11487 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
11488 | sprintf (s: tmp + 3 + i * 2, format: "%02x" , node->signature[i] & 0xff); |
11489 | comdat_key = get_identifier (tmp); |
11490 | targetm.asm_out.named_section (secname, |
11491 | SECTION_DEBUG | SECTION_LINKONCE, |
11492 | comdat_key); |
11493 | #else |
11494 | tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2); |
11495 | sprintf (tmp, (dwarf_version >= 5 |
11496 | ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt." )); |
11497 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
11498 | sprintf (tmp + 17 + i * 2, "%02x" , node->signature[i] & 0xff); |
11499 | secname = tmp; |
11500 | switch_to_section (get_section (secname, SECTION_DEBUG, NULL)); |
11501 | #endif |
11502 | |
11503 | /* Output debugging information. */ |
11504 | output_compilation_unit_header (dwarf_split_debug_info |
11505 | ? DW_UT_split_type : DW_UT_type); |
11506 | output_signature (sig: node->signature, name: "Type Signature" ); |
11507 | dw2_asm_output_data (dwarf_offset_size, node->type_die->die_offset, |
11508 | "Offset to Type DIE" ); |
11509 | output_die (die: node->root_die); |
11510 | |
11511 | unmark_dies (die: node->root_die); |
11512 | } |
11513 | |
11514 | /* Return the DWARF2/3 pubname associated with a decl. */ |
11515 | |
11516 | static const char * |
11517 | dwarf2_name (tree decl, int scope) |
11518 | { |
11519 | if (DECL_NAMELESS (decl)) |
11520 | return NULL; |
11521 | return lang_hooks.dwarf_name (decl, scope ? 1 : 0); |
11522 | } |
11523 | |
11524 | /* Add a new entry to .debug_pubnames if appropriate. */ |
11525 | |
11526 | static void |
11527 | add_pubname_string (const char *str, dw_die_ref die) |
11528 | { |
11529 | pubname_entry e; |
11530 | |
11531 | e.die = die; |
11532 | e.name = xstrdup (str); |
11533 | vec_safe_push (v&: pubname_table, obj: e); |
11534 | } |
11535 | |
11536 | static void |
11537 | add_pubname (tree decl, dw_die_ref die) |
11538 | { |
11539 | if (!want_pubnames ()) |
11540 | return; |
11541 | |
11542 | /* Don't add items to the table when we expect that the consumer will have |
11543 | just read the enclosing die. For example, if the consumer is looking at a |
11544 | class_member, it will either be inside the class already, or will have just |
11545 | looked up the class to find the member. Either way, searching the class is |
11546 | faster than searching the index. */ |
11547 | if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent)) |
11548 | || is_cu_die (c: die->die_parent) || is_namespace_die (c: die->die_parent)) |
11549 | { |
11550 | const char *name = dwarf2_name (decl, scope: 1); |
11551 | |
11552 | if (name) |
11553 | add_pubname_string (str: name, die); |
11554 | } |
11555 | } |
11556 | |
11557 | /* Add an enumerator to the pubnames section. */ |
11558 | |
11559 | static void |
11560 | add_enumerator_pubname (const char *scope_name, dw_die_ref die) |
11561 | { |
11562 | pubname_entry e; |
11563 | |
11564 | gcc_assert (scope_name); |
11565 | e.name = concat (scope_name, get_AT_string (die, attr_kind: DW_AT_name), NULL); |
11566 | e.die = die; |
11567 | vec_safe_push (v&: pubname_table, obj: e); |
11568 | } |
11569 | |
11570 | /* Add a new entry to .debug_pubtypes if appropriate. */ |
11571 | |
11572 | static void |
11573 | add_pubtype (tree decl, dw_die_ref die) |
11574 | { |
11575 | pubname_entry e; |
11576 | |
11577 | if (!want_pubnames ()) |
11578 | return; |
11579 | |
11580 | if ((TREE_PUBLIC (decl) |
11581 | || is_cu_die (c: die->die_parent) || is_namespace_die (c: die->die_parent)) |
11582 | && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl))) |
11583 | { |
11584 | tree scope = NULL; |
11585 | const char *scope_name = "" ; |
11586 | const char *sep = is_cxx () ? "::" : "." ; |
11587 | const char *name; |
11588 | |
11589 | scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL; |
11590 | if (scope && TREE_CODE (scope) == NAMESPACE_DECL) |
11591 | { |
11592 | scope_name = lang_hooks.dwarf_name (scope, 1); |
11593 | if (scope_name != NULL && scope_name[0] != '\0') |
11594 | scope_name = concat (scope_name, sep, NULL); |
11595 | else |
11596 | scope_name = "" ; |
11597 | } |
11598 | |
11599 | if (TYPE_P (decl)) |
11600 | name = type_tag (decl); |
11601 | else |
11602 | name = lang_hooks.dwarf_name (decl, 1); |
11603 | |
11604 | /* If we don't have a name for the type, there's no point in adding |
11605 | it to the table. */ |
11606 | if (name != NULL && name[0] != '\0') |
11607 | { |
11608 | e.die = die; |
11609 | e.name = concat (scope_name, name, NULL); |
11610 | vec_safe_push (v&: pubtype_table, obj: e); |
11611 | } |
11612 | |
11613 | /* Although it might be more consistent to add the pubinfo for the |
11614 | enumerators as their dies are created, they should only be added if the |
11615 | enum type meets the criteria above. So rather than re-check the parent |
11616 | enum type whenever an enumerator die is created, just output them all |
11617 | here. This isn't protected by the name conditional because anonymous |
11618 | enums don't have names. */ |
11619 | if (die->die_tag == DW_TAG_enumeration_type) |
11620 | { |
11621 | dw_die_ref c; |
11622 | |
11623 | FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c)); |
11624 | } |
11625 | } |
11626 | } |
11627 | |
11628 | /* Output a single entry in the pubnames table. */ |
11629 | |
11630 | static void |
11631 | output_pubname (dw_offset die_offset, pubname_entry *entry) |
11632 | { |
11633 | dw_die_ref die = entry->die; |
11634 | int is_static = get_AT_flag (die, attr_kind: DW_AT_external) ? 0 : 1; |
11635 | |
11636 | dw2_asm_output_data (dwarf_offset_size, die_offset, "DIE offset" ); |
11637 | |
11638 | if (debug_generate_pub_sections == 2) |
11639 | { |
11640 | /* This logic follows gdb's method for determining the value of the flag |
11641 | byte. */ |
11642 | uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE; |
11643 | switch (die->die_tag) |
11644 | { |
11645 | case DW_TAG_typedef: |
11646 | case DW_TAG_base_type: |
11647 | case DW_TAG_subrange_type: |
11648 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE); |
11649 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1); |
11650 | break; |
11651 | case DW_TAG_enumerator: |
11652 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
11653 | GDB_INDEX_SYMBOL_KIND_VARIABLE); |
11654 | if (!is_cxx ()) |
11655 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1); |
11656 | break; |
11657 | case DW_TAG_subprogram: |
11658 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
11659 | GDB_INDEX_SYMBOL_KIND_FUNCTION); |
11660 | if (!is_ada ()) |
11661 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static); |
11662 | break; |
11663 | case DW_TAG_constant: |
11664 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
11665 | GDB_INDEX_SYMBOL_KIND_VARIABLE); |
11666 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static); |
11667 | break; |
11668 | case DW_TAG_variable: |
11669 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
11670 | GDB_INDEX_SYMBOL_KIND_VARIABLE); |
11671 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static); |
11672 | break; |
11673 | case DW_TAG_namespace: |
11674 | case DW_TAG_imported_declaration: |
11675 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE); |
11676 | break; |
11677 | case DW_TAG_class_type: |
11678 | case DW_TAG_interface_type: |
11679 | case DW_TAG_structure_type: |
11680 | case DW_TAG_union_type: |
11681 | case DW_TAG_enumeration_type: |
11682 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE); |
11683 | if (!is_cxx ()) |
11684 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1); |
11685 | break; |
11686 | default: |
11687 | /* An unusual tag. Leave the flag-byte empty. */ |
11688 | break; |
11689 | } |
11690 | dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE, |
11691 | "GDB-index flags" ); |
11692 | } |
11693 | |
11694 | dw2_asm_output_nstring (entry->name, -1, "external name" ); |
11695 | } |
11696 | |
11697 | |
11698 | /* Output the public names table used to speed up access to externally |
11699 | visible names; or the public types table used to find type definitions. */ |
11700 | |
11701 | static void |
11702 | output_pubnames (vec<pubname_entry, va_gc> *names) |
11703 | { |
11704 | unsigned i; |
11705 | unsigned long pubnames_length = size_of_pubnames (names); |
11706 | pubname_entry *pub; |
11707 | |
11708 | if (!XCOFF_DEBUGGING_INFO) |
11709 | { |
11710 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
11711 | dw2_asm_output_data (4, 0xffffffff, |
11712 | "Initial length escape value indicating 64-bit DWARF extension" ); |
11713 | dw2_asm_output_data (dwarf_offset_size, pubnames_length, |
11714 | "Pub Info Length" ); |
11715 | } |
11716 | |
11717 | /* Version number for pubnames/pubtypes is independent of dwarf version. */ |
11718 | dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version" ); |
11719 | |
11720 | if (dwarf_split_debug_info) |
11721 | dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_info_section_label, |
11722 | debug_skeleton_info_section, |
11723 | "Offset of Compilation Unit Info" ); |
11724 | else |
11725 | dw2_asm_output_offset (dwarf_offset_size, debug_info_section_label, |
11726 | debug_info_section, |
11727 | "Offset of Compilation Unit Info" ); |
11728 | dw2_asm_output_data (dwarf_offset_size, next_die_offset, |
11729 | "Compilation Unit Length" ); |
11730 | |
11731 | FOR_EACH_VEC_ELT (*names, i, pub) |
11732 | { |
11733 | if (include_pubname_in_output (table: names, p: pub)) |
11734 | { |
11735 | dw_offset die_offset = pub->die->die_offset; |
11736 | |
11737 | /* We shouldn't see pubnames for DIEs outside of the main CU. */ |
11738 | if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator) |
11739 | gcc_assert (pub->die->die_mark); |
11740 | |
11741 | /* If we're putting types in their own .debug_types sections, |
11742 | the .debug_pubtypes table will still point to the compile |
11743 | unit (not the type unit), so we want to use the offset of |
11744 | the skeleton DIE (if there is one). */ |
11745 | if (pub->die->comdat_type_p && names == pubtype_table) |
11746 | { |
11747 | comdat_type_node *type_node = pub->die->die_id.die_type_node; |
11748 | |
11749 | if (type_node != NULL) |
11750 | die_offset = (type_node->skeleton_die != NULL |
11751 | ? type_node->skeleton_die->die_offset |
11752 | : comp_unit_die ()->die_offset); |
11753 | } |
11754 | |
11755 | output_pubname (die_offset, entry: pub); |
11756 | } |
11757 | } |
11758 | |
11759 | dw2_asm_output_data (dwarf_offset_size, 0, NULL); |
11760 | } |
11761 | |
11762 | /* Output public names and types tables if necessary. */ |
11763 | |
11764 | static void |
11765 | output_pubtables (void) |
11766 | { |
11767 | if (!want_pubnames () || !info_section_emitted) |
11768 | return; |
11769 | |
11770 | switch_to_section (debug_pubnames_section); |
11771 | output_pubnames (names: pubname_table); |
11772 | /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2. |
11773 | It shouldn't hurt to emit it always, since pure DWARF2 consumers |
11774 | simply won't look for the section. */ |
11775 | switch_to_section (debug_pubtypes_section); |
11776 | output_pubnames (names: pubtype_table); |
11777 | } |
11778 | |
11779 | |
11780 | /* Output the information that goes into the .debug_aranges table. |
11781 | Namely, define the beginning and ending address range of the |
11782 | text section generated for this compilation unit. */ |
11783 | |
11784 | static void |
11785 | output_aranges (void) |
11786 | { |
11787 | unsigned i; |
11788 | unsigned long aranges_length = size_of_aranges (); |
11789 | |
11790 | if (!XCOFF_DEBUGGING_INFO) |
11791 | { |
11792 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
11793 | dw2_asm_output_data (4, 0xffffffff, |
11794 | "Initial length escape value indicating 64-bit DWARF extension" ); |
11795 | dw2_asm_output_data (dwarf_offset_size, aranges_length, |
11796 | "Length of Address Ranges Info" ); |
11797 | } |
11798 | |
11799 | /* Version number for aranges is still 2, even up to DWARF5. */ |
11800 | dw2_asm_output_data (2, 2, "DWARF aranges version" ); |
11801 | if (dwarf_split_debug_info) |
11802 | dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_info_section_label, |
11803 | debug_skeleton_info_section, |
11804 | "Offset of Compilation Unit Info" ); |
11805 | else |
11806 | dw2_asm_output_offset (dwarf_offset_size, debug_info_section_label, |
11807 | debug_info_section, |
11808 | "Offset of Compilation Unit Info" ); |
11809 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address" ); |
11810 | dw2_asm_output_data (1, 0, "Size of Segment Descriptor" ); |
11811 | |
11812 | /* We need to align to twice the pointer size here. */ |
11813 | if (DWARF_ARANGES_PAD_SIZE) |
11814 | { |
11815 | /* Pad using a 2 byte words so that padding is correct for any |
11816 | pointer size. */ |
11817 | dw2_asm_output_data (2, 0, "Pad to %d byte boundary" , |
11818 | 2 * DWARF2_ADDR_SIZE); |
11819 | for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2) |
11820 | dw2_asm_output_data (2, 0, NULL); |
11821 | } |
11822 | |
11823 | /* It is necessary not to output these entries if the sections were |
11824 | not used; if the sections were not used, the length will be 0 and |
11825 | the address may end up as 0 if the section is discarded by ld |
11826 | --gc-sections, leaving an invalid (0, 0) entry that can be |
11827 | confused with the terminator. */ |
11828 | if (switch_text_ranges) |
11829 | { |
11830 | const char *prev_loc = text_section_label; |
11831 | const char *loc; |
11832 | unsigned idx; |
11833 | |
11834 | FOR_EACH_VEC_ELT (*switch_text_ranges, idx, loc) |
11835 | if (prev_loc) |
11836 | { |
11837 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
11838 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, loc, prev_loc, "Length" ); |
11839 | prev_loc = NULL; |
11840 | } |
11841 | else |
11842 | prev_loc = loc; |
11843 | |
11844 | if (prev_loc) |
11845 | { |
11846 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
11847 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label, |
11848 | prev_loc, "Length" ); |
11849 | } |
11850 | } |
11851 | |
11852 | if (switch_cold_ranges) |
11853 | { |
11854 | const char *prev_loc = cold_text_section_label; |
11855 | const char *loc; |
11856 | unsigned idx; |
11857 | |
11858 | FOR_EACH_VEC_ELT (*switch_cold_ranges, idx, loc) |
11859 | if (prev_loc) |
11860 | { |
11861 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
11862 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, loc, prev_loc, "Length" ); |
11863 | prev_loc = NULL; |
11864 | } |
11865 | else |
11866 | prev_loc = loc; |
11867 | |
11868 | if (prev_loc) |
11869 | { |
11870 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
11871 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label, |
11872 | prev_loc, "Length" ); |
11873 | } |
11874 | } |
11875 | |
11876 | if (have_multiple_function_sections) |
11877 | { |
11878 | unsigned fde_idx; |
11879 | dw_fde_ref fde; |
11880 | |
11881 | FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde) |
11882 | { |
11883 | if (fde->ignored_debug) |
11884 | continue; |
11885 | if (!fde->in_std_section) |
11886 | { |
11887 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin, |
11888 | "Address" ); |
11889 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end, |
11890 | fde->dw_fde_begin, "Length" ); |
11891 | } |
11892 | if (fde->dw_fde_second_begin && !fde->second_in_std_section) |
11893 | { |
11894 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin, |
11895 | "Address" ); |
11896 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end, |
11897 | fde->dw_fde_second_begin, "Length" ); |
11898 | } |
11899 | } |
11900 | } |
11901 | |
11902 | /* Output the terminator words. */ |
11903 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
11904 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
11905 | } |
11906 | |
11907 | /* Add a new entry to .debug_ranges. Return its index into |
11908 | ranges_table vector. */ |
11909 | |
11910 | static unsigned int |
11911 | add_ranges_num (int num, bool maybe_new_sec) |
11912 | { |
11913 | dw_ranges r = { NULL, .num: num, .idx: 0, .maybe_new_sec: maybe_new_sec, NULL, NULL }; |
11914 | vec_safe_push (v&: ranges_table, obj: r); |
11915 | return vec_safe_length (v: ranges_table) - 1; |
11916 | } |
11917 | |
11918 | /* Add a new entry to .debug_ranges corresponding to a block, or a |
11919 | range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if |
11920 | this entry might be in a different section from previous range. */ |
11921 | |
11922 | static unsigned int |
11923 | add_ranges (const_tree block, bool maybe_new_sec) |
11924 | { |
11925 | return add_ranges_num (num: block ? BLOCK_NUMBER (block) : 0, maybe_new_sec); |
11926 | } |
11927 | |
11928 | /* Note that (*rnglist_table)[offset] is either a head of a rnglist |
11929 | chain, or middle entry of a chain that will be directly referred to. */ |
11930 | |
11931 | static void |
11932 | note_rnglist_head (unsigned int offset) |
11933 | { |
11934 | if (dwarf_version < 5 || (*ranges_table)[offset].label) |
11935 | return; |
11936 | (*ranges_table)[offset].label = gen_internal_sym (prefix: "LLRL" ); |
11937 | } |
11938 | |
11939 | /* Add a new entry to .debug_ranges corresponding to a pair of labels. |
11940 | When using dwarf_split_debug_info, address attributes in dies destined |
11941 | for the final executable should be direct references--setting the |
11942 | parameter force_direct ensures this behavior. */ |
11943 | |
11944 | static void |
11945 | add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end, |
11946 | bool *added, bool force_direct) |
11947 | { |
11948 | unsigned int in_use = vec_safe_length (v: ranges_by_label); |
11949 | unsigned int offset; |
11950 | dw_ranges_by_label rbl = { .begin: begin, .end: end }; |
11951 | vec_safe_push (v&: ranges_by_label, obj: rbl); |
11952 | offset = add_ranges_num (num: -(int)in_use - 1, maybe_new_sec: true); |
11953 | if (!*added) |
11954 | { |
11955 | add_AT_range_list (die, attr_kind: DW_AT_ranges, offset, force_direct); |
11956 | *added = true; |
11957 | note_rnglist_head (offset); |
11958 | if (dwarf_split_debug_info && force_direct) |
11959 | (*ranges_table)[offset].idx = DW_RANGES_IDX_SKELETON; |
11960 | } |
11961 | } |
11962 | |
11963 | /* Emit .debug_ranges section. */ |
11964 | |
11965 | static void |
11966 | output_ranges (void) |
11967 | { |
11968 | unsigned i; |
11969 | static const char *const start_fmt = "Offset %#x" ; |
11970 | const char *fmt = start_fmt; |
11971 | dw_ranges *r; |
11972 | |
11973 | switch_to_section (debug_ranges_section); |
11974 | ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label); |
11975 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
11976 | { |
11977 | int block_num = r->num; |
11978 | |
11979 | if (block_num > 0) |
11980 | { |
11981 | char blabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
11982 | char elabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
11983 | |
11984 | ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num); |
11985 | ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num); |
11986 | |
11987 | /* If all code is in the text section, then the compilation |
11988 | unit base address defaults to DW_AT_low_pc, which is the |
11989 | base of the text section. */ |
11990 | if (!have_multiple_function_sections) |
11991 | { |
11992 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel, |
11993 | text_section_label, |
11994 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
11995 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel, |
11996 | text_section_label, NULL); |
11997 | } |
11998 | |
11999 | /* Otherwise, the compilation unit base address is zero, |
12000 | which allows us to use absolute addresses, and not worry |
12001 | about whether the target supports cross-section |
12002 | arithmetic. */ |
12003 | else |
12004 | { |
12005 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12006 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
12007 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL); |
12008 | } |
12009 | |
12010 | fmt = NULL; |
12011 | } |
12012 | |
12013 | /* Negative block_num stands for an index into ranges_by_label. */ |
12014 | else if (block_num < 0) |
12015 | { |
12016 | int lab_idx = - block_num - 1; |
12017 | |
12018 | if (!have_multiple_function_sections) |
12019 | { |
12020 | gcc_unreachable (); |
12021 | #if 0 |
12022 | /* If we ever use add_ranges_by_labels () for a single |
12023 | function section, all we have to do is to take out |
12024 | the #if 0 above. */ |
12025 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, |
12026 | (*ranges_by_label)[lab_idx].begin, |
12027 | text_section_label, |
12028 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
12029 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, |
12030 | (*ranges_by_label)[lab_idx].end, |
12031 | text_section_label, NULL); |
12032 | #endif |
12033 | } |
12034 | else |
12035 | { |
12036 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, |
12037 | (*ranges_by_label)[lab_idx].begin, |
12038 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
12039 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, |
12040 | (*ranges_by_label)[lab_idx].end, |
12041 | NULL); |
12042 | } |
12043 | } |
12044 | else |
12045 | { |
12046 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
12047 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
12048 | fmt = start_fmt; |
12049 | } |
12050 | } |
12051 | } |
12052 | |
12053 | /* Non-zero if .debug_line_str should be used for .debug_line section |
12054 | strings or strings that are likely shareable with those. */ |
12055 | #define DWARF5_USE_DEBUG_LINE_STR \ |
12056 | (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \ |
12057 | && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \ |
12058 | /* FIXME: there is no .debug_line_str.dwo section, \ |
12059 | for -gsplit-dwarf we should use DW_FORM_strx instead. */ \ |
12060 | && !dwarf_split_debug_info) |
12061 | |
12062 | |
12063 | /* Returns TRUE if we are outputting DWARF5 and the assembler supports |
12064 | DWARF5 .debug_line tables using .debug_line_str or we generate |
12065 | it ourselves, except for split-dwarf which doesn't have a |
12066 | .debug_line_str. */ |
12067 | static bool |
12068 | asm_outputs_debug_line_str (void) |
12069 | { |
12070 | if (dwarf_version >= 5 |
12071 | && ! output_asm_line_debug_info () |
12072 | && DWARF5_USE_DEBUG_LINE_STR) |
12073 | return true; |
12074 | else |
12075 | { |
12076 | #if defined(HAVE_AS_GDWARF_5_DEBUG_FLAG) && defined(HAVE_AS_WORKING_DWARF_N_FLAG) |
12077 | return !dwarf_split_debug_info && dwarf_version >= 5; |
12078 | #else |
12079 | return false; |
12080 | #endif |
12081 | } |
12082 | } |
12083 | |
12084 | /* Return true if it is beneficial to use DW_RLE_base_address{,x}. |
12085 | I is index of the following range. */ |
12086 | |
12087 | static bool |
12088 | use_distinct_base_address_for_range (unsigned int i) |
12089 | { |
12090 | if (i >= vec_safe_length (v: ranges_table)) |
12091 | return false; |
12092 | |
12093 | dw_ranges *r2 = &(*ranges_table)[i]; |
12094 | /* Use DW_RLE_base_address{,x} if there is a next range in the |
12095 | range list and is guaranteed to be in the same section. */ |
12096 | return r2->num != 0 && r2->label == NULL && !r2->maybe_new_sec; |
12097 | } |
12098 | |
12099 | /* Assign .debug_rnglists indexes and unique indexes into the debug_addr |
12100 | section when needed. */ |
12101 | |
12102 | static void |
12103 | index_rnglists (void) |
12104 | { |
12105 | unsigned i; |
12106 | dw_ranges *r; |
12107 | bool base = false; |
12108 | |
12109 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
12110 | { |
12111 | if (r->label && r->idx != DW_RANGES_IDX_SKELETON) |
12112 | r->idx = rnglist_idx++; |
12113 | |
12114 | int block_num = r->num; |
12115 | if ((HAVE_AS_LEB128 || block_num < 0) |
12116 | && !have_multiple_function_sections) |
12117 | continue; |
12118 | if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec)) |
12119 | base = false; |
12120 | if (block_num > 0) |
12121 | { |
12122 | char blabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
12123 | char elabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
12124 | |
12125 | ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num); |
12126 | ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num); |
12127 | |
12128 | if (HAVE_AS_LEB128) |
12129 | { |
12130 | if (!base && use_distinct_base_address_for_range (i: i + 1)) |
12131 | { |
12132 | r->begin_entry = add_addr_table_entry (addr: xstrdup (blabel), |
12133 | kind: ate_kind_label); |
12134 | base = true; |
12135 | } |
12136 | if (base) |
12137 | /* If we have a base, no need for further |
12138 | begin_entry/end_entry, as DW_RLE_offset_pair will be |
12139 | used. */ |
12140 | continue; |
12141 | r->begin_entry |
12142 | = add_addr_table_entry (addr: xstrdup (blabel), kind: ate_kind_label); |
12143 | /* No need for end_entry, DW_RLE_start{,x}_length will use |
12144 | length as opposed to a pair of addresses. */ |
12145 | } |
12146 | else |
12147 | { |
12148 | r->begin_entry |
12149 | = add_addr_table_entry (addr: xstrdup (blabel), kind: ate_kind_label); |
12150 | r->end_entry |
12151 | = add_addr_table_entry (addr: xstrdup (elabel), kind: ate_kind_label); |
12152 | } |
12153 | } |
12154 | |
12155 | /* Negative block_num stands for an index into ranges_by_label. */ |
12156 | else if (block_num < 0) |
12157 | { |
12158 | int lab_idx = - block_num - 1; |
12159 | const char *blabel = (*ranges_by_label)[lab_idx].begin; |
12160 | const char *elabel = (*ranges_by_label)[lab_idx].end; |
12161 | |
12162 | r->begin_entry |
12163 | = add_addr_table_entry (addr: xstrdup (blabel), kind: ate_kind_label); |
12164 | if (!HAVE_AS_LEB128) |
12165 | r->end_entry |
12166 | = add_addr_table_entry (addr: xstrdup (elabel), kind: ate_kind_label); |
12167 | } |
12168 | } |
12169 | } |
12170 | |
12171 | /* Emit .debug_rnglists or (when DWO is true) .debug_rnglists.dwo section. */ |
12172 | |
12173 | static bool |
12174 | output_rnglists (unsigned generation, bool dwo) |
12175 | { |
12176 | unsigned i; |
12177 | dw_ranges *r; |
12178 | char l1[MAX_ARTIFICIAL_LABEL_BYTES]; |
12179 | char l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
12180 | char basebuf[MAX_ARTIFICIAL_LABEL_BYTES]; |
12181 | |
12182 | if (dwo) |
12183 | switch_to_section (debug_ranges_dwo_section); |
12184 | else |
12185 | { |
12186 | switch_to_section (debug_ranges_section); |
12187 | ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label); |
12188 | } |
12189 | /* There are up to 4 unique ranges labels per generation. |
12190 | See also init_sections_and_labels. */ |
12191 | ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL, |
12192 | 2 + 2 * dwo + generation * 6); |
12193 | ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL, |
12194 | 3 + 2 * dwo + generation * 6); |
12195 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
12196 | dw2_asm_output_data (4, 0xffffffff, |
12197 | "Initial length escape value indicating " |
12198 | "64-bit DWARF extension" ); |
12199 | dw2_asm_output_delta (dwarf_offset_size, l2, l1, |
12200 | "Length of Range Lists" ); |
12201 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
12202 | output_dwarf_version (); |
12203 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size" ); |
12204 | dw2_asm_output_data (1, 0, "Segment Size" ); |
12205 | /* Emit the offset table only for -gsplit-dwarf. If we don't care |
12206 | about relocation sizes and primarily care about the size of .debug* |
12207 | sections in linked shared libraries and executables, then |
12208 | the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes |
12209 | into it are usually larger than just DW_FORM_sec_offset offsets |
12210 | into the .debug_rnglists section. */ |
12211 | dw2_asm_output_data (4, dwo ? rnglist_idx : 0, |
12212 | "Offset Entry Count" ); |
12213 | if (dwo) |
12214 | { |
12215 | ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label); |
12216 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
12217 | if (r->label && r->idx != DW_RANGES_IDX_SKELETON) |
12218 | dw2_asm_output_delta (dwarf_offset_size, r->label, |
12219 | ranges_base_label, NULL); |
12220 | } |
12221 | |
12222 | const char *lab = "" ; |
12223 | const char *base = NULL; |
12224 | bool skipping = false; |
12225 | bool ret = false; |
12226 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
12227 | { |
12228 | int block_num = r->num; |
12229 | |
12230 | if (r->label) |
12231 | { |
12232 | if (dwarf_split_debug_info |
12233 | && (r->idx == DW_RANGES_IDX_SKELETON) == dwo) |
12234 | { |
12235 | ret = true; |
12236 | skipping = true; |
12237 | continue; |
12238 | } |
12239 | ASM_OUTPUT_LABEL (asm_out_file, r->label); |
12240 | lab = r->label; |
12241 | } |
12242 | if (skipping) |
12243 | { |
12244 | if (block_num == 0) |
12245 | skipping = false; |
12246 | continue; |
12247 | } |
12248 | if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec)) |
12249 | base = NULL; |
12250 | if (block_num > 0) |
12251 | { |
12252 | char blabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
12253 | char elabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
12254 | |
12255 | ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num); |
12256 | ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num); |
12257 | |
12258 | if (HAVE_AS_LEB128) |
12259 | { |
12260 | /* If all code is in the text section, then the compilation |
12261 | unit base address defaults to DW_AT_low_pc, which is the |
12262 | base of the text section. */ |
12263 | if (!have_multiple_function_sections) |
12264 | { |
12265 | dw2_asm_output_data (1, DW_RLE_offset_pair, |
12266 | "DW_RLE_offset_pair (%s)" , lab); |
12267 | dw2_asm_output_delta_uleb128 (blabel, text_section_label, |
12268 | "Range begin address (%s)" , lab); |
12269 | dw2_asm_output_delta_uleb128 (elabel, text_section_label, |
12270 | "Range end address (%s)" , lab); |
12271 | continue; |
12272 | } |
12273 | if (base == NULL && use_distinct_base_address_for_range (i: i + 1)) |
12274 | { |
12275 | if (dwarf_split_debug_info) |
12276 | { |
12277 | dw2_asm_output_data (1, DW_RLE_base_addressx, |
12278 | "DW_RLE_base_addressx (%s)" , lab); |
12279 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
12280 | "Base address index (%s)" , |
12281 | blabel); |
12282 | } |
12283 | else |
12284 | { |
12285 | dw2_asm_output_data (1, DW_RLE_base_address, |
12286 | "DW_RLE_base_address (%s)" , lab); |
12287 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12288 | "Base address (%s)" , lab); |
12289 | } |
12290 | strcpy (dest: basebuf, src: blabel); |
12291 | base = basebuf; |
12292 | } |
12293 | if (base) |
12294 | { |
12295 | dw2_asm_output_data (1, DW_RLE_offset_pair, |
12296 | "DW_RLE_offset_pair (%s)" , lab); |
12297 | dw2_asm_output_delta_uleb128 (blabel, base, |
12298 | "Range begin address (%s)" , lab); |
12299 | dw2_asm_output_delta_uleb128 (elabel, base, |
12300 | "Range end address (%s)" , lab); |
12301 | continue; |
12302 | } |
12303 | if (dwarf_split_debug_info) |
12304 | { |
12305 | dw2_asm_output_data (1, DW_RLE_startx_length, |
12306 | "DW_RLE_startx_length (%s)" , lab); |
12307 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
12308 | "Range begin address index " |
12309 | "(%s)" , blabel); |
12310 | } |
12311 | else |
12312 | { |
12313 | dw2_asm_output_data (1, DW_RLE_start_length, |
12314 | "DW_RLE_start_length (%s)" , lab); |
12315 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12316 | "Range begin address (%s)" , lab); |
12317 | } |
12318 | dw2_asm_output_delta_uleb128 (elabel, blabel, |
12319 | "Range length (%s)" , lab); |
12320 | } |
12321 | else if (dwarf_split_debug_info) |
12322 | { |
12323 | dw2_asm_output_data (1, DW_RLE_startx_endx, |
12324 | "DW_RLE_startx_endx (%s)" , lab); |
12325 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
12326 | "Range begin address index " |
12327 | "(%s)" , blabel); |
12328 | dw2_asm_output_data_uleb128 (r->end_entry->index, |
12329 | "Range end address index " |
12330 | "(%s)" , elabel); |
12331 | } |
12332 | else |
12333 | { |
12334 | dw2_asm_output_data (1, DW_RLE_start_end, |
12335 | "DW_RLE_start_end (%s)" , lab); |
12336 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12337 | "Range begin address (%s)" , lab); |
12338 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, |
12339 | "Range end address (%s)" , lab); |
12340 | } |
12341 | } |
12342 | |
12343 | /* Negative block_num stands for an index into ranges_by_label. */ |
12344 | else if (block_num < 0) |
12345 | { |
12346 | int lab_idx = - block_num - 1; |
12347 | const char *blabel = (*ranges_by_label)[lab_idx].begin; |
12348 | const char *elabel = (*ranges_by_label)[lab_idx].end; |
12349 | |
12350 | if (!have_multiple_function_sections) |
12351 | gcc_unreachable (); |
12352 | if (HAVE_AS_LEB128) |
12353 | { |
12354 | if (dwarf_split_debug_info) |
12355 | { |
12356 | dw2_asm_output_data (1, DW_RLE_startx_length, |
12357 | "DW_RLE_startx_length (%s)" , lab); |
12358 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
12359 | "Range begin address index " |
12360 | "(%s)" , blabel); |
12361 | } |
12362 | else |
12363 | { |
12364 | dw2_asm_output_data (1, DW_RLE_start_length, |
12365 | "DW_RLE_start_length (%s)" , lab); |
12366 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12367 | "Range begin address (%s)" , lab); |
12368 | } |
12369 | dw2_asm_output_delta_uleb128 (elabel, blabel, |
12370 | "Range length (%s)" , lab); |
12371 | } |
12372 | else if (dwarf_split_debug_info) |
12373 | { |
12374 | dw2_asm_output_data (1, DW_RLE_startx_endx, |
12375 | "DW_RLE_startx_endx (%s)" , lab); |
12376 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
12377 | "Range begin address index " |
12378 | "(%s)" , blabel); |
12379 | dw2_asm_output_data_uleb128 (r->end_entry->index, |
12380 | "Range end address index " |
12381 | "(%s)" , elabel); |
12382 | } |
12383 | else |
12384 | { |
12385 | dw2_asm_output_data (1, DW_RLE_start_end, |
12386 | "DW_RLE_start_end (%s)" , lab); |
12387 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12388 | "Range begin address (%s)" , lab); |
12389 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, |
12390 | "Range end address (%s)" , lab); |
12391 | } |
12392 | } |
12393 | else |
12394 | dw2_asm_output_data (1, DW_RLE_end_of_list, |
12395 | "DW_RLE_end_of_list (%s)" , lab); |
12396 | } |
12397 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
12398 | return ret; |
12399 | } |
12400 | |
12401 | /* Data structure containing information about input files. */ |
12402 | struct file_info |
12403 | { |
12404 | const char *path; /* Complete file name. */ |
12405 | const char *fname; /* File name part. */ |
12406 | int length; /* Length of entire string. */ |
12407 | struct dwarf_file_data * file_idx; /* Index in input file table. */ |
12408 | int dir_idx; /* Index in directory table. */ |
12409 | }; |
12410 | |
12411 | /* Data structure containing information about directories with source |
12412 | files. */ |
12413 | struct dir_info |
12414 | { |
12415 | const char *path; /* Path including directory name. */ |
12416 | int length; /* Path length. */ |
12417 | int prefix; /* Index of directory entry which is a prefix. */ |
12418 | int count; /* Number of files in this directory. */ |
12419 | int dir_idx; /* Index of directory used as base. */ |
12420 | }; |
12421 | |
12422 | /* Callback function for file_info comparison. We sort by looking at |
12423 | the directories in the path. */ |
12424 | |
12425 | static int |
12426 | file_info_cmp (const void *p1, const void *p2) |
12427 | { |
12428 | const struct file_info *const s1 = (const struct file_info *) p1; |
12429 | const struct file_info *const s2 = (const struct file_info *) p2; |
12430 | const unsigned char *cp1; |
12431 | const unsigned char *cp2; |
12432 | |
12433 | /* Take care of file names without directories. We need to make sure that |
12434 | we return consistent values to qsort since some will get confused if |
12435 | we return the same value when identical operands are passed in opposite |
12436 | orders. So if neither has a directory, return 0 and otherwise return |
12437 | 1 or -1 depending on which one has the directory. We want the one with |
12438 | the directory to sort after the one without, so all no directory files |
12439 | are at the start (normally only the compilation unit file). */ |
12440 | if ((s1->path == s1->fname || s2->path == s2->fname)) |
12441 | return (s2->path == s2->fname) - (s1->path == s1->fname); |
12442 | |
12443 | cp1 = (const unsigned char *) s1->path; |
12444 | cp2 = (const unsigned char *) s2->path; |
12445 | |
12446 | while (1) |
12447 | { |
12448 | ++cp1; |
12449 | ++cp2; |
12450 | /* Reached the end of the first path? If so, handle like above, |
12451 | but now we want longer directory prefixes before shorter ones. */ |
12452 | if ((cp1 == (const unsigned char *) s1->fname) |
12453 | || (cp2 == (const unsigned char *) s2->fname)) |
12454 | return ((cp1 == (const unsigned char *) s1->fname) |
12455 | - (cp2 == (const unsigned char *) s2->fname)); |
12456 | |
12457 | /* Character of current path component the same? */ |
12458 | else if (*cp1 != *cp2) |
12459 | return *cp1 - *cp2; |
12460 | } |
12461 | } |
12462 | |
12463 | struct file_name_acquire_data |
12464 | { |
12465 | struct file_info *files; |
12466 | int used_files; |
12467 | int max_files; |
12468 | }; |
12469 | |
12470 | /* Traversal function for the hash table. */ |
12471 | |
12472 | int |
12473 | file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad) |
12474 | { |
12475 | struct dwarf_file_data *d = *slot; |
12476 | struct file_info *fi; |
12477 | const char *f; |
12478 | |
12479 | gcc_assert (fnad->max_files >= d->emitted_number); |
12480 | |
12481 | if (! d->emitted_number) |
12482 | return 1; |
12483 | |
12484 | gcc_assert (fnad->max_files != fnad->used_files); |
12485 | |
12486 | fi = fnad->files + fnad->used_files++; |
12487 | |
12488 | f = d->filename; |
12489 | |
12490 | /* Skip all leading "./". */ |
12491 | while (f[0] == '.' && IS_DIR_SEPARATOR (f[1])) |
12492 | f += 2; |
12493 | |
12494 | /* Create a new array entry. */ |
12495 | fi->path = f; |
12496 | fi->length = strlen (s: f); |
12497 | fi->file_idx = d; |
12498 | |
12499 | /* Search for the file name part. */ |
12500 | f = strrchr (s: f, DIR_SEPARATOR); |
12501 | #if defined (DIR_SEPARATOR_2) |
12502 | { |
12503 | const char *g = strrchr (fi->path, DIR_SEPARATOR_2); |
12504 | |
12505 | if (g != NULL) |
12506 | { |
12507 | if (f == NULL || f < g) |
12508 | f = g; |
12509 | } |
12510 | } |
12511 | #endif |
12512 | |
12513 | fi->fname = f == NULL ? fi->path : f + 1; |
12514 | return 1; |
12515 | } |
12516 | |
12517 | /* Helper function for output_file_names. Emit a FORM encoded |
12518 | string STR, with assembly comment start ENTRY_KIND and |
12519 | index IDX */ |
12520 | |
12521 | static void |
12522 | output_line_string (enum dwarf_form form, const char *str, |
12523 | const char *entry_kind, unsigned int idx) |
12524 | { |
12525 | switch (form) |
12526 | { |
12527 | case DW_FORM_string: |
12528 | dw2_asm_output_nstring (str, -1, "%s: %#x" , entry_kind, idx); |
12529 | break; |
12530 | case DW_FORM_line_strp: |
12531 | if (!debug_line_str_hash) |
12532 | debug_line_str_hash |
12533 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
12534 | |
12535 | struct indirect_string_node *node; |
12536 | node = find_AT_string_in_table (str, table: debug_line_str_hash); |
12537 | set_indirect_string (node); |
12538 | node->form = form; |
12539 | dw2_asm_output_offset (dwarf_offset_size, node->label, |
12540 | debug_line_str_section, "%s: %#x: \"%s\"" , |
12541 | entry_kind, 0, node->str); |
12542 | break; |
12543 | default: |
12544 | gcc_unreachable (); |
12545 | } |
12546 | } |
12547 | |
12548 | /* Output the directory table and the file name table. We try to minimize |
12549 | the total amount of memory needed. A heuristic is used to avoid large |
12550 | slowdowns with many input files. */ |
12551 | |
12552 | static void |
12553 | output_file_names (void) |
12554 | { |
12555 | struct file_name_acquire_data fnad; |
12556 | int numfiles; |
12557 | struct file_info *files; |
12558 | struct dir_info *dirs; |
12559 | int *saved; |
12560 | int *savehere; |
12561 | int *backmap; |
12562 | int ndirs; |
12563 | int idx_offset; |
12564 | int i; |
12565 | |
12566 | if (!last_emitted_file) |
12567 | { |
12568 | if (dwarf_version >= 5) |
12569 | { |
12570 | const char *comp_dir = comp_dir_string (); |
12571 | if (comp_dir == NULL) |
12572 | comp_dir = "" ; |
12573 | dw2_asm_output_data (1, 1, "Directory entry format count" ); |
12574 | enum dwarf_form str_form = DW_FORM_string; |
12575 | if (DWARF5_USE_DEBUG_LINE_STR) |
12576 | str_form = DW_FORM_line_strp; |
12577 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
12578 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
12579 | get_DW_FORM_name (form: str_form)); |
12580 | dw2_asm_output_data_uleb128 (1, "Directories count" ); |
12581 | if (str_form == DW_FORM_string) |
12582 | dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x" , 0); |
12583 | else |
12584 | output_line_string (form: str_form, str: comp_dir, entry_kind: "Directory Entry" , idx: 0); |
12585 | const char *filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
12586 | if (filename0 == NULL) |
12587 | filename0 = "" ; |
12588 | #ifdef VMS_DEBUGGING_INFO |
12589 | dw2_asm_output_data (1, 4, "File name entry format count" ); |
12590 | #else |
12591 | dw2_asm_output_data (1, 2, "File name entry format count" ); |
12592 | #endif |
12593 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
12594 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
12595 | get_DW_FORM_name (form: str_form)); |
12596 | dw2_asm_output_data_uleb128 (DW_LNCT_directory_index, |
12597 | "DW_LNCT_directory_index" ); |
12598 | dw2_asm_output_data_uleb128 (DW_FORM_data1, "%s" , |
12599 | get_DW_FORM_name (form: DW_FORM_data1)); |
12600 | #ifdef VMS_DEBUGGING_INFO |
12601 | dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp" ); |
12602 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
12603 | dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size" ); |
12604 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
12605 | #endif |
12606 | dw2_asm_output_data_uleb128 (1, "File names count" ); |
12607 | |
12608 | output_line_string (form: str_form, str: filename0, entry_kind: "File Entry" , idx: 0); |
12609 | dw2_asm_output_data (1, 0, NULL); |
12610 | #ifdef VMS_DEBUGGING_INFO |
12611 | dw2_asm_output_data_uleb128 (0, NULL); |
12612 | dw2_asm_output_data_uleb128 (0, NULL); |
12613 | #endif |
12614 | } |
12615 | else |
12616 | { |
12617 | dw2_asm_output_data (1, 0, "End directory table" ); |
12618 | dw2_asm_output_data (1, 0, "End file name table" ); |
12619 | } |
12620 | return; |
12621 | } |
12622 | |
12623 | numfiles = last_emitted_file->emitted_number; |
12624 | |
12625 | /* Allocate the various arrays we need. */ |
12626 | files = XALLOCAVEC (struct file_info, numfiles); |
12627 | dirs = XALLOCAVEC (struct dir_info, numfiles); |
12628 | |
12629 | fnad.files = files; |
12630 | fnad.used_files = 0; |
12631 | fnad.max_files = numfiles; |
12632 | file_table->traverse<file_name_acquire_data *, file_name_acquire> (argument: &fnad); |
12633 | gcc_assert (fnad.used_files == fnad.max_files); |
12634 | |
12635 | qsort (files, numfiles, sizeof (files[0]), file_info_cmp); |
12636 | |
12637 | /* Find all the different directories used. */ |
12638 | dirs[0].path = files[0].path; |
12639 | dirs[0].length = files[0].fname - files[0].path; |
12640 | dirs[0].prefix = -1; |
12641 | dirs[0].count = 1; |
12642 | dirs[0].dir_idx = 0; |
12643 | files[0].dir_idx = 0; |
12644 | ndirs = 1; |
12645 | |
12646 | for (i = 1; i < numfiles; i++) |
12647 | if (files[i].fname - files[i].path == dirs[ndirs - 1].length |
12648 | && memcmp (s1: dirs[ndirs - 1].path, s2: files[i].path, |
12649 | n: dirs[ndirs - 1].length) == 0) |
12650 | { |
12651 | /* Same directory as last entry. */ |
12652 | files[i].dir_idx = ndirs - 1; |
12653 | ++dirs[ndirs - 1].count; |
12654 | } |
12655 | else |
12656 | { |
12657 | int j; |
12658 | |
12659 | /* This is a new directory. */ |
12660 | dirs[ndirs].path = files[i].path; |
12661 | dirs[ndirs].length = files[i].fname - files[i].path; |
12662 | dirs[ndirs].count = 1; |
12663 | dirs[ndirs].dir_idx = ndirs; |
12664 | files[i].dir_idx = ndirs; |
12665 | |
12666 | /* Search for a prefix. */ |
12667 | dirs[ndirs].prefix = -1; |
12668 | for (j = 0; j < ndirs; j++) |
12669 | if (dirs[j].length < dirs[ndirs].length |
12670 | && dirs[j].length > 1 |
12671 | && (dirs[ndirs].prefix == -1 |
12672 | || dirs[j].length > dirs[dirs[ndirs].prefix].length) |
12673 | && memcmp (s1: dirs[j].path, s2: dirs[ndirs].path, n: dirs[j].length) == 0) |
12674 | dirs[ndirs].prefix = j; |
12675 | |
12676 | ++ndirs; |
12677 | } |
12678 | |
12679 | /* Now to the actual work. We have to find a subset of the directories which |
12680 | allow expressing the file name using references to the directory table |
12681 | with the least amount of characters. We do not do an exhaustive search |
12682 | where we would have to check out every combination of every single |
12683 | possible prefix. Instead we use a heuristic which provides nearly optimal |
12684 | results in most cases and never is much off. */ |
12685 | saved = XALLOCAVEC (int, ndirs); |
12686 | savehere = XALLOCAVEC (int, ndirs); |
12687 | |
12688 | memset (s: saved, c: '\0', n: ndirs * sizeof (saved[0])); |
12689 | for (i = 0; i < ndirs; i++) |
12690 | { |
12691 | int j; |
12692 | int total; |
12693 | |
12694 | /* We can always save some space for the current directory. But this |
12695 | does not mean it will be enough to justify adding the directory. */ |
12696 | savehere[i] = dirs[i].length; |
12697 | total = (savehere[i] - saved[i]) * dirs[i].count; |
12698 | |
12699 | for (j = i + 1; j < ndirs; j++) |
12700 | { |
12701 | savehere[j] = 0; |
12702 | if (saved[j] < dirs[i].length) |
12703 | { |
12704 | /* Determine whether the dirs[i] path is a prefix of the |
12705 | dirs[j] path. */ |
12706 | int k; |
12707 | |
12708 | k = dirs[j].prefix; |
12709 | while (k != -1 && k != (int) i) |
12710 | k = dirs[k].prefix; |
12711 | |
12712 | if (k == (int) i) |
12713 | { |
12714 | /* Yes it is. We can possibly save some memory by |
12715 | writing the filenames in dirs[j] relative to |
12716 | dirs[i]. */ |
12717 | savehere[j] = dirs[i].length; |
12718 | total += (savehere[j] - saved[j]) * dirs[j].count; |
12719 | } |
12720 | } |
12721 | } |
12722 | |
12723 | /* Check whether we can save enough to justify adding the dirs[i] |
12724 | directory. */ |
12725 | if (total > dirs[i].length + 1) |
12726 | { |
12727 | /* It's worthwhile adding. */ |
12728 | for (j = i; j < ndirs; j++) |
12729 | if (savehere[j] > 0) |
12730 | { |
12731 | /* Remember how much we saved for this directory so far. */ |
12732 | saved[j] = savehere[j]; |
12733 | |
12734 | /* Remember the prefix directory. */ |
12735 | dirs[j].dir_idx = i; |
12736 | } |
12737 | } |
12738 | } |
12739 | |
12740 | /* Emit the directory name table. */ |
12741 | idx_offset = dirs[0].length > 0 ? 1 : 0; |
12742 | enum dwarf_form str_form = DW_FORM_string; |
12743 | enum dwarf_form idx_form = DW_FORM_udata; |
12744 | if (dwarf_version >= 5) |
12745 | { |
12746 | const char *comp_dir = comp_dir_string (); |
12747 | if (comp_dir == NULL) |
12748 | comp_dir = "" ; |
12749 | dw2_asm_output_data (1, 1, "Directory entry format count" ); |
12750 | if (DWARF5_USE_DEBUG_LINE_STR) |
12751 | str_form = DW_FORM_line_strp; |
12752 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
12753 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
12754 | get_DW_FORM_name (form: str_form)); |
12755 | dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count" ); |
12756 | if (str_form == DW_FORM_string) |
12757 | { |
12758 | dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x" , 0); |
12759 | for (i = 1 - idx_offset; i < ndirs; i++) |
12760 | dw2_asm_output_nstring (dirs[i].path, |
12761 | dirs[i].length |
12762 | - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR, |
12763 | "Directory Entry: %#x" , i + idx_offset); |
12764 | } |
12765 | else |
12766 | { |
12767 | output_line_string (form: str_form, str: comp_dir, entry_kind: "Directory Entry" , idx: 0); |
12768 | for (i = 1 - idx_offset; i < ndirs; i++) |
12769 | { |
12770 | const char *str |
12771 | = ggc_alloc_string (contents: dirs[i].path, |
12772 | length: dirs[i].length |
12773 | - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR); |
12774 | output_line_string (form: str_form, str, entry_kind: "Directory Entry" , |
12775 | idx: (unsigned) i + idx_offset); |
12776 | } |
12777 | } |
12778 | } |
12779 | else |
12780 | { |
12781 | for (i = 1 - idx_offset; i < ndirs; i++) |
12782 | dw2_asm_output_nstring (dirs[i].path, |
12783 | dirs[i].length |
12784 | - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR, |
12785 | "Directory Entry: %#x" , i + idx_offset); |
12786 | |
12787 | dw2_asm_output_data (1, 0, "End directory table" ); |
12788 | } |
12789 | |
12790 | /* We have to emit them in the order of emitted_number since that's |
12791 | used in the debug info generation. To do this efficiently we |
12792 | generate a back-mapping of the indices first. */ |
12793 | backmap = XALLOCAVEC (int, numfiles); |
12794 | for (i = 0; i < numfiles; i++) |
12795 | backmap[files[i].file_idx->emitted_number - 1] = i; |
12796 | |
12797 | if (dwarf_version >= 5) |
12798 | { |
12799 | const char *filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
12800 | if (filename0 == NULL) |
12801 | filename0 = "" ; |
12802 | /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and |
12803 | DW_FORM_data2. Choose one based on the number of directories |
12804 | and how much space would they occupy in each encoding. |
12805 | If we have at most 256 directories, all indexes fit into |
12806 | a single byte, so DW_FORM_data1 is most compact (if there |
12807 | are at most 128 directories, DW_FORM_udata would be as |
12808 | compact as that, but not shorter and slower to decode). */ |
12809 | if (ndirs + idx_offset <= 256) |
12810 | idx_form = DW_FORM_data1; |
12811 | /* If there are more than 65536 directories, we have to use |
12812 | DW_FORM_udata, DW_FORM_data2 can't refer to them. |
12813 | Otherwise, compute what space would occupy if all the indexes |
12814 | used DW_FORM_udata - sum - and compare that to how large would |
12815 | be DW_FORM_data2 encoding, and pick the more efficient one. */ |
12816 | else if (ndirs + idx_offset <= 65536) |
12817 | { |
12818 | unsigned HOST_WIDE_INT sum = 1; |
12819 | for (i = 0; i < numfiles; i++) |
12820 | { |
12821 | int file_idx = backmap[i]; |
12822 | int dir_idx = dirs[files[file_idx].dir_idx].dir_idx; |
12823 | sum += size_of_uleb128 (dir_idx); |
12824 | } |
12825 | if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1)) |
12826 | idx_form = DW_FORM_data2; |
12827 | } |
12828 | #ifdef VMS_DEBUGGING_INFO |
12829 | dw2_asm_output_data (1, 4, "File name entry format count" ); |
12830 | #else |
12831 | dw2_asm_output_data (1, 2, "File name entry format count" ); |
12832 | #endif |
12833 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
12834 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
12835 | get_DW_FORM_name (form: str_form)); |
12836 | dw2_asm_output_data_uleb128 (DW_LNCT_directory_index, |
12837 | "DW_LNCT_directory_index" ); |
12838 | dw2_asm_output_data_uleb128 (idx_form, "%s" , |
12839 | get_DW_FORM_name (form: idx_form)); |
12840 | #ifdef VMS_DEBUGGING_INFO |
12841 | dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp" ); |
12842 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
12843 | dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size" ); |
12844 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
12845 | #endif |
12846 | dw2_asm_output_data_uleb128 (numfiles + 1, "File names count" ); |
12847 | |
12848 | output_line_string (form: str_form, str: filename0, entry_kind: "File Entry" , idx: 0); |
12849 | |
12850 | /* Include directory index. */ |
12851 | if (idx_form != DW_FORM_udata) |
12852 | dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2, |
12853 | 0, NULL); |
12854 | else |
12855 | dw2_asm_output_data_uleb128 (0, NULL); |
12856 | |
12857 | #ifdef VMS_DEBUGGING_INFO |
12858 | dw2_asm_output_data_uleb128 (0, NULL); |
12859 | dw2_asm_output_data_uleb128 (0, NULL); |
12860 | #endif |
12861 | } |
12862 | |
12863 | /* Now write all the file names. */ |
12864 | for (i = 0; i < numfiles; i++) |
12865 | { |
12866 | int file_idx = backmap[i]; |
12867 | int dir_idx = dirs[files[file_idx].dir_idx].dir_idx; |
12868 | |
12869 | #ifdef VMS_DEBUGGING_INFO |
12870 | #define MAX_VMS_VERSION_LEN 6 /* ";32768" */ |
12871 | |
12872 | /* Setting these fields can lead to debugger miscomparisons, |
12873 | but VMS Debug requires them to be set correctly. */ |
12874 | |
12875 | int ver; |
12876 | long long cdt; |
12877 | long siz; |
12878 | int maxfilelen = (strlen (files[file_idx].path) |
12879 | + dirs[dir_idx].length |
12880 | + MAX_VMS_VERSION_LEN + 1); |
12881 | char *filebuf = XALLOCAVEC (char, maxfilelen); |
12882 | |
12883 | vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver); |
12884 | snprintf (filebuf, maxfilelen, "%s;%d" , |
12885 | files[file_idx].path + dirs[dir_idx].length, ver); |
12886 | |
12887 | output_line_string (str_form, filebuf, "File Entry" , (unsigned) i + 1); |
12888 | |
12889 | /* Include directory index. */ |
12890 | if (dwarf_version >= 5 && idx_form != DW_FORM_udata) |
12891 | dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2, |
12892 | dir_idx + idx_offset, NULL); |
12893 | else |
12894 | dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL); |
12895 | |
12896 | /* Modification time. */ |
12897 | dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path, |
12898 | &cdt, 0, 0, 0) == 0) |
12899 | ? cdt : 0, NULL); |
12900 | |
12901 | /* File length in bytes. */ |
12902 | dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path, |
12903 | 0, &siz, 0, 0) == 0) |
12904 | ? siz : 0, NULL); |
12905 | #else |
12906 | output_line_string (form: str_form, |
12907 | str: files[file_idx].path + dirs[dir_idx].length, |
12908 | entry_kind: "File Entry" , idx: (unsigned) i + 1); |
12909 | |
12910 | /* Include directory index. */ |
12911 | if (dwarf_version >= 5 && idx_form != DW_FORM_udata) |
12912 | dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2, |
12913 | dir_idx + idx_offset, NULL); |
12914 | else |
12915 | dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL); |
12916 | |
12917 | if (dwarf_version >= 5) |
12918 | continue; |
12919 | |
12920 | /* Modification time. */ |
12921 | dw2_asm_output_data_uleb128 (0, NULL); |
12922 | |
12923 | /* File length in bytes. */ |
12924 | dw2_asm_output_data_uleb128 (0, NULL); |
12925 | #endif /* VMS_DEBUGGING_INFO */ |
12926 | } |
12927 | |
12928 | if (dwarf_version < 5) |
12929 | dw2_asm_output_data (1, 0, "End file name table" ); |
12930 | } |
12931 | |
12932 | |
12933 | /* Output one line number table into the .debug_line section. */ |
12934 | |
12935 | static void |
12936 | output_one_line_info_table (dw_line_info_table *table) |
12937 | { |
12938 | char line_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
12939 | unsigned int current_line = 1; |
12940 | bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START; |
12941 | dw_line_info_entry *ent, *prev_addr = NULL; |
12942 | size_t i; |
12943 | unsigned int view; |
12944 | |
12945 | view = 0; |
12946 | |
12947 | FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent) |
12948 | { |
12949 | switch (ent->opcode) |
12950 | { |
12951 | case LI_set_address: |
12952 | /* ??? Unfortunately, we have little choice here currently, and |
12953 | must always use the most general form. GCC does not know the |
12954 | address delta itself, so we can't use DW_LNS_advance_pc. Many |
12955 | ports do have length attributes which will give an upper bound |
12956 | on the address range. We could perhaps use length attributes |
12957 | to determine when it is safe to use DW_LNS_fixed_advance_pc. */ |
12958 | ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val); |
12959 | |
12960 | view = 0; |
12961 | |
12962 | /* This can handle any delta. This takes |
12963 | 4+DWARF2_ADDR_SIZE bytes. */ |
12964 | dw2_asm_output_data (1, 0, "set address %s%s" , line_label, |
12965 | debug_variable_location_views |
12966 | ? ", reset view to 0" : "" ); |
12967 | dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL); |
12968 | dw2_asm_output_data (1, DW_LNE_set_address, NULL); |
12969 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL); |
12970 | |
12971 | prev_addr = ent; |
12972 | break; |
12973 | |
12974 | case LI_adv_address: |
12975 | { |
12976 | ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val); |
12977 | char prev_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
12978 | ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val); |
12979 | |
12980 | view++; |
12981 | |
12982 | dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i" , view); |
12983 | dw2_asm_output_delta (2, line_label, prev_label, |
12984 | "from %s to %s" , prev_label, line_label); |
12985 | |
12986 | prev_addr = ent; |
12987 | break; |
12988 | } |
12989 | |
12990 | case LI_set_line: |
12991 | if (ent->val == current_line) |
12992 | { |
12993 | /* We still need to start a new row, so output a copy insn. */ |
12994 | dw2_asm_output_data (1, DW_LNS_copy, |
12995 | "copy line %u" , current_line); |
12996 | } |
12997 | else |
12998 | { |
12999 | int line_offset = ent->val - current_line; |
13000 | int line_delta = line_offset - DWARF_LINE_BASE; |
13001 | |
13002 | current_line = ent->val; |
13003 | if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1)) |
13004 | { |
13005 | /* This can handle deltas from -10 to 234, using the current |
13006 | definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE. |
13007 | This takes 1 byte. */ |
13008 | dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta, |
13009 | "line %u" , current_line); |
13010 | } |
13011 | else |
13012 | { |
13013 | /* This can handle any delta. This takes at least 4 bytes, |
13014 | depending on the value being encoded. */ |
13015 | dw2_asm_output_data (1, DW_LNS_advance_line, |
13016 | "advance to line %u" , current_line); |
13017 | dw2_asm_output_data_sleb128 (line_offset, NULL); |
13018 | dw2_asm_output_data (1, DW_LNS_copy, NULL); |
13019 | } |
13020 | } |
13021 | break; |
13022 | |
13023 | case LI_set_file: |
13024 | dw2_asm_output_data (1, DW_LNS_set_file, "set file %u" , ent->val); |
13025 | dw2_asm_output_data_uleb128 (ent->val, "%u" , ent->val); |
13026 | break; |
13027 | |
13028 | case LI_set_column: |
13029 | dw2_asm_output_data (1, DW_LNS_set_column, "column %u" , ent->val); |
13030 | dw2_asm_output_data_uleb128 (ent->val, "%u" , ent->val); |
13031 | break; |
13032 | |
13033 | case LI_negate_stmt: |
13034 | current_is_stmt = !current_is_stmt; |
13035 | dw2_asm_output_data (1, DW_LNS_negate_stmt, |
13036 | "is_stmt %d" , current_is_stmt); |
13037 | break; |
13038 | |
13039 | case LI_set_prologue_end: |
13040 | dw2_asm_output_data (1, DW_LNS_set_prologue_end, |
13041 | "set prologue end" ); |
13042 | break; |
13043 | |
13044 | case LI_set_epilogue_begin: |
13045 | dw2_asm_output_data (1, DW_LNS_set_epilogue_begin, |
13046 | "set epilogue begin" ); |
13047 | break; |
13048 | |
13049 | case LI_set_discriminator: |
13050 | dw2_asm_output_data (1, 0, "discriminator %u" , ent->val); |
13051 | dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL); |
13052 | dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL); |
13053 | dw2_asm_output_data_uleb128 (ent->val, NULL); |
13054 | break; |
13055 | } |
13056 | } |
13057 | |
13058 | /* Emit debug info for the address of the end of the table. */ |
13059 | dw2_asm_output_data (1, 0, "set address %s" , table->end_label); |
13060 | dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL); |
13061 | dw2_asm_output_data (1, DW_LNE_set_address, NULL); |
13062 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL); |
13063 | |
13064 | dw2_asm_output_data (1, 0, "end sequence" ); |
13065 | dw2_asm_output_data_uleb128 (1, NULL); |
13066 | dw2_asm_output_data (1, DW_LNE_end_sequence, NULL); |
13067 | } |
13068 | |
13069 | static unsigned int output_line_info_generation; |
13070 | |
13071 | /* Output the source line number correspondence information. This |
13072 | information goes into the .debug_line section. */ |
13073 | |
13074 | static void |
13075 | output_line_info (bool prologue_only) |
13076 | { |
13077 | char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
13078 | char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES]; |
13079 | bool saw_one = false; |
13080 | int opc; |
13081 | |
13082 | ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, |
13083 | output_line_info_generation); |
13084 | ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, |
13085 | output_line_info_generation); |
13086 | ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, |
13087 | output_line_info_generation); |
13088 | ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, |
13089 | output_line_info_generation++); |
13090 | |
13091 | if (!XCOFF_DEBUGGING_INFO) |
13092 | { |
13093 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
13094 | dw2_asm_output_data (4, 0xffffffff, |
13095 | "Initial length escape value indicating 64-bit DWARF extension" ); |
13096 | dw2_asm_output_delta (dwarf_offset_size, l2, l1, |
13097 | "Length of Source Line Info" ); |
13098 | } |
13099 | |
13100 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
13101 | |
13102 | output_dwarf_version (); |
13103 | if (dwarf_version >= 5) |
13104 | { |
13105 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size" ); |
13106 | dw2_asm_output_data (1, 0, "Segment Size" ); |
13107 | } |
13108 | dw2_asm_output_delta (dwarf_offset_size, p2, p1, "Prolog Length" ); |
13109 | ASM_OUTPUT_LABEL (asm_out_file, p1); |
13110 | |
13111 | /* Define the architecture-dependent minimum instruction length (in bytes). |
13112 | In this implementation of DWARF, this field is used for information |
13113 | purposes only. Since GCC generates assembly language, we have no |
13114 | a priori knowledge of how many instruction bytes are generated for each |
13115 | source line, and therefore can use only the DW_LNE_set_address and |
13116 | DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix |
13117 | this as '1', which is "correct enough" for all architectures, |
13118 | and don't let the target override. */ |
13119 | dw2_asm_output_data (1, 1, "Minimum Instruction Length" ); |
13120 | |
13121 | if (dwarf_version >= 4) |
13122 | dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN, |
13123 | "Maximum Operations Per Instruction" ); |
13124 | dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START, |
13125 | "Default is_stmt_start flag" ); |
13126 | dw2_asm_output_data (1, DWARF_LINE_BASE, |
13127 | "Line Base Value (Special Opcodes)" ); |
13128 | dw2_asm_output_data (1, DWARF_LINE_RANGE, |
13129 | "Line Range Value (Special Opcodes)" ); |
13130 | dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE, |
13131 | "Special Opcode Base" ); |
13132 | |
13133 | for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++) |
13134 | { |
13135 | int n_op_args; |
13136 | switch (opc) |
13137 | { |
13138 | case DW_LNS_advance_pc: |
13139 | case DW_LNS_advance_line: |
13140 | case DW_LNS_set_file: |
13141 | case DW_LNS_set_column: |
13142 | case DW_LNS_fixed_advance_pc: |
13143 | case DW_LNS_set_isa: |
13144 | n_op_args = 1; |
13145 | break; |
13146 | default: |
13147 | n_op_args = 0; |
13148 | break; |
13149 | } |
13150 | |
13151 | dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args" , |
13152 | opc, n_op_args); |
13153 | } |
13154 | |
13155 | /* Write out the information about the files we use. */ |
13156 | output_file_names (); |
13157 | ASM_OUTPUT_LABEL (asm_out_file, p2); |
13158 | if (prologue_only) |
13159 | { |
13160 | /* Output the marker for the end of the line number info. */ |
13161 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
13162 | return; |
13163 | } |
13164 | |
13165 | if (separate_line_info) |
13166 | { |
13167 | dw_line_info_table *table; |
13168 | size_t i; |
13169 | |
13170 | FOR_EACH_VEC_ELT (*separate_line_info, i, table) |
13171 | if (table->in_use) |
13172 | { |
13173 | output_one_line_info_table (table); |
13174 | saw_one = true; |
13175 | } |
13176 | } |
13177 | if (cold_text_section_line_info && cold_text_section_line_info->in_use) |
13178 | { |
13179 | output_one_line_info_table (table: cold_text_section_line_info); |
13180 | saw_one = true; |
13181 | } |
13182 | |
13183 | /* ??? Some Darwin linkers crash on a .debug_line section with no |
13184 | sequences. Further, merely a DW_LNE_end_sequence entry is not |
13185 | sufficient -- the address column must also be initialized. |
13186 | Make sure to output at least one set_address/end_sequence pair, |
13187 | choosing .text since that section is always present. */ |
13188 | if (text_section_line_info->in_use || !saw_one) |
13189 | output_one_line_info_table (table: text_section_line_info); |
13190 | |
13191 | /* Output the marker for the end of the line number info. */ |
13192 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
13193 | } |
13194 | |
13195 | /* Return true if DW_AT_endianity should be emitted according to REVERSE. */ |
13196 | |
13197 | static inline bool |
13198 | need_endianity_attribute_p (bool reverse) |
13199 | { |
13200 | return reverse && (dwarf_version >= 3 || !dwarf_strict); |
13201 | } |
13202 | |
13203 | /* Given a pointer to a tree node for some base type, return a pointer to |
13204 | a DIE that describes the given type. REVERSE is true if the type is |
13205 | to be interpreted in the reverse storage order wrt the target order. |
13206 | |
13207 | This routine must only be called for GCC type nodes that correspond to |
13208 | Dwarf base (fundamental) types. */ |
13209 | |
13210 | dw_die_ref |
13211 | base_type_die (tree type, bool reverse) |
13212 | { |
13213 | dw_die_ref base_type_result; |
13214 | enum dwarf_type encoding; |
13215 | bool fpt_used = false; |
13216 | struct fixed_point_type_info fpt_info; |
13217 | tree type_bias = NULL_TREE; |
13218 | |
13219 | /* If this is a subtype that should not be emitted as a subrange type, |
13220 | use the base type. See subrange_type_for_debug_p. */ |
13221 | if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE) |
13222 | type = TREE_TYPE (type); |
13223 | |
13224 | switch (TREE_CODE (type)) |
13225 | { |
13226 | case INTEGER_TYPE: |
13227 | if ((dwarf_version >= 4 || !dwarf_strict) |
13228 | && TYPE_NAME (type) |
13229 | && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL |
13230 | && DECL_IS_UNDECLARED_BUILTIN (TYPE_NAME (type)) |
13231 | && DECL_NAME (TYPE_NAME (type))) |
13232 | { |
13233 | const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))); |
13234 | if (strcmp (s1: name, s2: "char16_t" ) == 0 |
13235 | || strcmp (s1: name, s2: "char8_t" ) == 0 |
13236 | || strcmp (s1: name, s2: "char32_t" ) == 0) |
13237 | { |
13238 | encoding = DW_ATE_UTF; |
13239 | break; |
13240 | } |
13241 | } |
13242 | if ((dwarf_version >= 3 || !dwarf_strict) |
13243 | && lang_hooks.types.get_fixed_point_type_info) |
13244 | { |
13245 | memset (s: &fpt_info, c: 0, n: sizeof (fpt_info)); |
13246 | if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info)) |
13247 | { |
13248 | fpt_used = true; |
13249 | encoding = ((TYPE_UNSIGNED (type)) |
13250 | ? DW_ATE_unsigned_fixed |
13251 | : DW_ATE_signed_fixed); |
13252 | break; |
13253 | } |
13254 | } |
13255 | if (TYPE_STRING_FLAG (type)) |
13256 | { |
13257 | if ((dwarf_version >= 4 || !dwarf_strict) |
13258 | && is_rust () |
13259 | && int_size_in_bytes (type) == 4) |
13260 | encoding = DW_ATE_UTF; |
13261 | else if (TYPE_UNSIGNED (type)) |
13262 | encoding = DW_ATE_unsigned_char; |
13263 | else |
13264 | encoding = DW_ATE_signed_char; |
13265 | } |
13266 | else if (TYPE_UNSIGNED (type)) |
13267 | encoding = DW_ATE_unsigned; |
13268 | else |
13269 | encoding = DW_ATE_signed; |
13270 | |
13271 | if (!dwarf_strict |
13272 | && lang_hooks.types.get_type_bias) |
13273 | type_bias = lang_hooks.types.get_type_bias (type); |
13274 | break; |
13275 | |
13276 | case REAL_TYPE: |
13277 | if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))) |
13278 | { |
13279 | if (dwarf_version >= 3 || !dwarf_strict) |
13280 | encoding = DW_ATE_decimal_float; |
13281 | else |
13282 | encoding = DW_ATE_lo_user; |
13283 | } |
13284 | else |
13285 | encoding = DW_ATE_float; |
13286 | break; |
13287 | |
13288 | case FIXED_POINT_TYPE: |
13289 | if (!(dwarf_version >= 3 || !dwarf_strict)) |
13290 | encoding = DW_ATE_lo_user; |
13291 | else if (TYPE_UNSIGNED (type)) |
13292 | encoding = DW_ATE_unsigned_fixed; |
13293 | else |
13294 | encoding = DW_ATE_signed_fixed; |
13295 | break; |
13296 | |
13297 | /* Dwarf2 doesn't know anything about complex ints, so use |
13298 | a user defined type for it. */ |
13299 | case COMPLEX_TYPE: |
13300 | if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (type))) |
13301 | encoding = DW_ATE_complex_float; |
13302 | else |
13303 | encoding = DW_ATE_lo_user; |
13304 | break; |
13305 | |
13306 | case BOOLEAN_TYPE: |
13307 | /* GNU FORTRAN/Ada/C++ BOOLEAN type. */ |
13308 | encoding = DW_ATE_boolean; |
13309 | break; |
13310 | |
13311 | case BITINT_TYPE: |
13312 | /* C23 _BitInt(N). */ |
13313 | if (TYPE_UNSIGNED (type)) |
13314 | encoding = DW_ATE_unsigned; |
13315 | else |
13316 | encoding = DW_ATE_signed; |
13317 | break; |
13318 | |
13319 | default: |
13320 | /* No other TREE_CODEs are Dwarf fundamental types. */ |
13321 | gcc_unreachable (); |
13322 | } |
13323 | |
13324 | base_type_result = new_die_raw (tag_value: DW_TAG_base_type); |
13325 | |
13326 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_byte_size, |
13327 | unsigned_val: int_size_in_bytes (type)); |
13328 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_encoding, unsigned_val: encoding); |
13329 | if (TREE_CODE (type) == BITINT_TYPE) |
13330 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_bit_size, TYPE_PRECISION (type)); |
13331 | |
13332 | if (need_endianity_attribute_p (reverse)) |
13333 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_endianity, |
13334 | BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big); |
13335 | |
13336 | add_alignment_attribute (base_type_result, type); |
13337 | |
13338 | if (fpt_used) |
13339 | { |
13340 | switch (fpt_info.scale_factor_kind) |
13341 | { |
13342 | case fixed_point_scale_factor_binary: |
13343 | add_AT_int (die: base_type_result, attr_kind: DW_AT_binary_scale, |
13344 | int_val: fpt_info.scale_factor.binary); |
13345 | break; |
13346 | |
13347 | case fixed_point_scale_factor_decimal: |
13348 | add_AT_int (die: base_type_result, attr_kind: DW_AT_decimal_scale, |
13349 | int_val: fpt_info.scale_factor.decimal); |
13350 | break; |
13351 | |
13352 | case fixed_point_scale_factor_arbitrary: |
13353 | /* Arbitrary scale factors cannot be described in standard DWARF. */ |
13354 | if (!dwarf_strict) |
13355 | { |
13356 | /* Describe the scale factor as a rational constant. */ |
13357 | const dw_die_ref scale_factor |
13358 | = new_die (tag_value: DW_TAG_constant, parent_die: comp_unit_die (), t: type); |
13359 | |
13360 | add_scalar_info (scale_factor, DW_AT_GNU_numerator, |
13361 | fpt_info.scale_factor.arbitrary.numerator, |
13362 | dw_scalar_form_constant, NULL); |
13363 | add_scalar_info (scale_factor, DW_AT_GNU_denominator, |
13364 | fpt_info.scale_factor.arbitrary.denominator, |
13365 | dw_scalar_form_constant, NULL); |
13366 | |
13367 | add_AT_die_ref (die: base_type_result, attr_kind: DW_AT_small, targ_die: scale_factor); |
13368 | } |
13369 | break; |
13370 | |
13371 | default: |
13372 | gcc_unreachable (); |
13373 | } |
13374 | } |
13375 | |
13376 | if (type_bias) |
13377 | add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias, |
13378 | dw_scalar_form_constant |
13379 | | dw_scalar_form_exprloc |
13380 | | dw_scalar_form_reference, |
13381 | NULL); |
13382 | |
13383 | return base_type_result; |
13384 | } |
13385 | |
13386 | /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM |
13387 | named 'auto' in its type: return true for it, false otherwise. */ |
13388 | |
13389 | static inline bool |
13390 | is_cxx_auto (tree type) |
13391 | { |
13392 | if (is_cxx ()) |
13393 | { |
13394 | tree name = TYPE_IDENTIFIER (type); |
13395 | if (name == get_identifier ("auto" ) |
13396 | || name == get_identifier ("decltype(auto)" )) |
13397 | return true; |
13398 | } |
13399 | return false; |
13400 | } |
13401 | |
13402 | /* Given a pointer to an arbitrary ..._TYPE tree node, return true if the |
13403 | given input type is a Dwarf "fundamental" type. Otherwise return null. */ |
13404 | |
13405 | static inline bool |
13406 | is_base_type (tree type) |
13407 | { |
13408 | switch (TREE_CODE (type)) |
13409 | { |
13410 | case INTEGER_TYPE: |
13411 | case REAL_TYPE: |
13412 | case FIXED_POINT_TYPE: |
13413 | case COMPLEX_TYPE: |
13414 | case BOOLEAN_TYPE: |
13415 | case BITINT_TYPE: |
13416 | return true; |
13417 | |
13418 | case VOID_TYPE: |
13419 | case OPAQUE_TYPE: |
13420 | case ARRAY_TYPE: |
13421 | case RECORD_TYPE: |
13422 | case UNION_TYPE: |
13423 | case QUAL_UNION_TYPE: |
13424 | case ENUMERAL_TYPE: |
13425 | case FUNCTION_TYPE: |
13426 | case METHOD_TYPE: |
13427 | case POINTER_TYPE: |
13428 | case REFERENCE_TYPE: |
13429 | case NULLPTR_TYPE: |
13430 | case OFFSET_TYPE: |
13431 | case LANG_TYPE: |
13432 | case VECTOR_TYPE: |
13433 | return false; |
13434 | |
13435 | default: |
13436 | if (is_cxx_auto (type)) |
13437 | return false; |
13438 | gcc_unreachable (); |
13439 | } |
13440 | } |
13441 | |
13442 | /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE |
13443 | node, return the size in bits for the type if it is a constant, or else |
13444 | return the alignment for the type if the type's size is not constant, or |
13445 | else return BITS_PER_WORD if the type actually turns out to be an |
13446 | ERROR_MARK node. */ |
13447 | |
13448 | static inline unsigned HOST_WIDE_INT |
13449 | simple_type_size_in_bits (const_tree type) |
13450 | { |
13451 | if (TREE_CODE (type) == ERROR_MARK) |
13452 | return BITS_PER_WORD; |
13453 | else if (TYPE_SIZE (type) == NULL_TREE) |
13454 | return 0; |
13455 | else if (tree_fits_uhwi_p (TYPE_SIZE (type))) |
13456 | return tree_to_uhwi (TYPE_SIZE (type)); |
13457 | else |
13458 | return TYPE_ALIGN (type); |
13459 | } |
13460 | |
13461 | /* Similarly, but return an offset_int instead of UHWI. */ |
13462 | |
13463 | static inline offset_int |
13464 | offset_int_type_size_in_bits (const_tree type) |
13465 | { |
13466 | if (TREE_CODE (type) == ERROR_MARK) |
13467 | return BITS_PER_WORD; |
13468 | else if (TYPE_SIZE (type) == NULL_TREE) |
13469 | return 0; |
13470 | else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST) |
13471 | return wi::to_offset (TYPE_SIZE (type)); |
13472 | else |
13473 | return TYPE_ALIGN (type); |
13474 | } |
13475 | |
13476 | /* Given a pointer to a tree node for a subrange type, return a pointer |
13477 | to a DIE that describes the given type. */ |
13478 | |
13479 | static dw_die_ref |
13480 | subrange_type_die (tree type, tree low, tree high, tree bias, |
13481 | dw_die_ref context_die) |
13482 | { |
13483 | dw_die_ref subrange_die; |
13484 | const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type); |
13485 | |
13486 | if (context_die == NULL) |
13487 | context_die = comp_unit_die (); |
13488 | |
13489 | subrange_die = new_die (tag_value: DW_TAG_subrange_type, parent_die: context_die, t: type); |
13490 | |
13491 | if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes) |
13492 | { |
13493 | /* The size of the subrange type and its base type do not match, |
13494 | so we need to generate a size attribute for the subrange type. */ |
13495 | add_AT_unsigned (die: subrange_die, attr_kind: DW_AT_byte_size, unsigned_val: size_in_bytes); |
13496 | } |
13497 | |
13498 | add_alignment_attribute (subrange_die, type); |
13499 | |
13500 | if (low) |
13501 | add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL); |
13502 | if (high) |
13503 | add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL); |
13504 | if (bias && !dwarf_strict) |
13505 | add_scalar_info (subrange_die, DW_AT_GNU_bias, bias, |
13506 | dw_scalar_form_constant |
13507 | | dw_scalar_form_exprloc |
13508 | | dw_scalar_form_reference, |
13509 | NULL); |
13510 | |
13511 | return subrange_die; |
13512 | } |
13513 | |
13514 | /* Returns the (const and/or volatile) cv_qualifiers associated with |
13515 | the decl node. This will normally be augmented with the |
13516 | cv_qualifiers of the underlying type in add_type_attribute. */ |
13517 | |
13518 | static int |
13519 | decl_quals (const_tree decl) |
13520 | { |
13521 | return ((TREE_READONLY (decl) |
13522 | /* The C++ front-end correctly marks reference-typed |
13523 | variables as readonly, but from a language (and debug |
13524 | info) standpoint they are not const-qualified. */ |
13525 | && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE |
13526 | ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED) |
13527 | | (TREE_THIS_VOLATILE (decl) |
13528 | ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED)); |
13529 | } |
13530 | |
13531 | /* Determine the TYPE whose qualifiers match the largest strict subset |
13532 | of the given TYPE_QUALS, and return its qualifiers. Ignore all |
13533 | qualifiers outside QUAL_MASK. */ |
13534 | |
13535 | static int |
13536 | get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask) |
13537 | { |
13538 | tree t; |
13539 | int best_rank = 0, best_qual = 0, max_rank; |
13540 | |
13541 | type_quals &= qual_mask; |
13542 | max_rank = popcount_hwi (x: type_quals) - 1; |
13543 | |
13544 | for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank; |
13545 | t = TYPE_NEXT_VARIANT (t)) |
13546 | { |
13547 | int q = TYPE_QUALS (t) & qual_mask; |
13548 | |
13549 | if ((q & type_quals) == q && q != type_quals |
13550 | && check_base_type (cand: t, base: type)) |
13551 | { |
13552 | int rank = popcount_hwi (x: q); |
13553 | |
13554 | if (rank > best_rank) |
13555 | { |
13556 | best_rank = rank; |
13557 | best_qual = q; |
13558 | } |
13559 | } |
13560 | } |
13561 | |
13562 | return best_qual; |
13563 | } |
13564 | |
13565 | struct dwarf_qual_info_t { int q; enum dwarf_tag t; }; |
13566 | static const dwarf_qual_info_t dwarf_qual_info[] = |
13567 | { |
13568 | { .q: TYPE_QUAL_CONST, .t: DW_TAG_const_type }, |
13569 | { .q: TYPE_QUAL_VOLATILE, .t: DW_TAG_volatile_type }, |
13570 | { .q: TYPE_QUAL_RESTRICT, .t: DW_TAG_restrict_type }, |
13571 | { .q: TYPE_QUAL_ATOMIC, .t: DW_TAG_atomic_type } |
13572 | }; |
13573 | static const unsigned int dwarf_qual_info_size = ARRAY_SIZE (dwarf_qual_info); |
13574 | |
13575 | /* If DIE is a qualified DIE of some base DIE with the same parent, |
13576 | return the base DIE, otherwise return NULL. Set MASK to the |
13577 | qualifiers added compared to the returned DIE. */ |
13578 | |
13579 | static dw_die_ref |
13580 | qualified_die_p (dw_die_ref die, int *mask, unsigned int depth) |
13581 | { |
13582 | unsigned int i; |
13583 | for (i = 0; i < dwarf_qual_info_size; i++) |
13584 | if (die->die_tag == dwarf_qual_info[i].t) |
13585 | break; |
13586 | if (i == dwarf_qual_info_size) |
13587 | return NULL; |
13588 | if (vec_safe_length (v: die->die_attr) != 1) |
13589 | return NULL; |
13590 | dw_die_ref type = get_AT_ref (die, attr_kind: DW_AT_type); |
13591 | if (type == NULL || type->die_parent != die->die_parent) |
13592 | return NULL; |
13593 | *mask |= dwarf_qual_info[i].q; |
13594 | if (depth) |
13595 | { |
13596 | dw_die_ref ret = qualified_die_p (die: type, mask, depth: depth - 1); |
13597 | if (ret) |
13598 | return ret; |
13599 | } |
13600 | return type; |
13601 | } |
13602 | |
13603 | /* If TYPE is long double or complex long double that |
13604 | should be emitted as artificial typedef to _Float128 or |
13605 | complex _Float128, return the type it should be emitted as. |
13606 | This is done in case the target already supports 16-byte |
13607 | composite floating point type (ibm_extended_format). */ |
13608 | |
13609 | static tree |
13610 | long_double_as_float128 (tree type) |
13611 | { |
13612 | if (type != long_double_type_node |
13613 | && type != complex_long_double_type_node) |
13614 | return NULL_TREE; |
13615 | |
13616 | machine_mode mode, fmode; |
13617 | if (TREE_CODE (type) == COMPLEX_TYPE) |
13618 | mode = TYPE_MODE (TREE_TYPE (type)); |
13619 | else |
13620 | mode = TYPE_MODE (type); |
13621 | if (known_eq (GET_MODE_SIZE (mode), 16) && !MODE_COMPOSITE_P (mode)) |
13622 | FOR_EACH_MODE_IN_CLASS (fmode, MODE_FLOAT) |
13623 | if (known_eq (GET_MODE_SIZE (fmode), 16) |
13624 | && MODE_COMPOSITE_P (fmode)) |
13625 | { |
13626 | if (type == long_double_type_node) |
13627 | { |
13628 | if (float128_type_node |
13629 | && (TYPE_MODE (float128_type_node) |
13630 | == TYPE_MODE (type))) |
13631 | return float128_type_node; |
13632 | return NULL_TREE; |
13633 | } |
13634 | for (int i = 0; i < NUM_FLOATN_NX_TYPES; i++) |
13635 | if (COMPLEX_FLOATN_NX_TYPE_NODE (i) != NULL_TREE |
13636 | && (TYPE_MODE (COMPLEX_FLOATN_NX_TYPE_NODE (i)) |
13637 | == TYPE_MODE (type))) |
13638 | return COMPLEX_FLOATN_NX_TYPE_NODE (i); |
13639 | } |
13640 | |
13641 | return NULL_TREE; |
13642 | } |
13643 | |
13644 | /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging |
13645 | entry that chains the modifiers specified by CV_QUALS in front of the |
13646 | given type. REVERSE is true if the type is to be interpreted in the |
13647 | reverse storage order wrt the target order. */ |
13648 | |
13649 | static dw_die_ref |
13650 | modified_type_die (tree type, int cv_quals, bool reverse, |
13651 | dw_die_ref context_die) |
13652 | { |
13653 | enum tree_code code = TREE_CODE (type); |
13654 | dw_die_ref mod_type_die; |
13655 | dw_die_ref sub_die = NULL; |
13656 | tree item_type = NULL; |
13657 | tree qualified_type; |
13658 | tree name, low, high; |
13659 | dw_die_ref mod_scope; |
13660 | struct array_descr_info info; |
13661 | /* Only these cv-qualifiers are currently handled. */ |
13662 | const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE |
13663 | | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC | |
13664 | ENCODE_QUAL_ADDR_SPACE(~0U)); |
13665 | const bool reverse_base_type |
13666 | = need_endianity_attribute_p (reverse) && is_base_type (type); |
13667 | |
13668 | if (code == ERROR_MARK) |
13669 | return NULL; |
13670 | |
13671 | if (lang_hooks.types.get_debug_type) |
13672 | { |
13673 | tree debug_type = lang_hooks.types.get_debug_type (type); |
13674 | |
13675 | if (debug_type != NULL_TREE && debug_type != type) |
13676 | return modified_type_die (type: debug_type, cv_quals, reverse, context_die); |
13677 | } |
13678 | |
13679 | cv_quals &= cv_qual_mask; |
13680 | |
13681 | /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type |
13682 | tag modifier (and not an attribute) old consumers won't be able |
13683 | to handle it. */ |
13684 | if (dwarf_version < 3) |
13685 | cv_quals &= ~TYPE_QUAL_RESTRICT; |
13686 | |
13687 | /* Likewise for DW_TAG_atomic_type for DWARFv5. */ |
13688 | if (dwarf_version < 5) |
13689 | cv_quals &= ~TYPE_QUAL_ATOMIC; |
13690 | |
13691 | /* See if we already have the appropriately qualified variant of |
13692 | this type. */ |
13693 | qualified_type = get_qualified_type (type, cv_quals); |
13694 | |
13695 | if (qualified_type == sizetype) |
13696 | { |
13697 | /* Try not to expose the internal sizetype type's name. */ |
13698 | if (TYPE_NAME (qualified_type) |
13699 | && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL) |
13700 | { |
13701 | tree t = TREE_TYPE (TYPE_NAME (qualified_type)); |
13702 | |
13703 | gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE |
13704 | && (TYPE_PRECISION (t) |
13705 | == TYPE_PRECISION (qualified_type)) |
13706 | && (TYPE_UNSIGNED (t) |
13707 | == TYPE_UNSIGNED (qualified_type))); |
13708 | qualified_type = t; |
13709 | } |
13710 | else if (qualified_type == sizetype |
13711 | && TREE_CODE (sizetype) == TREE_CODE (size_type_node) |
13712 | && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node) |
13713 | && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node)) |
13714 | qualified_type = size_type_node; |
13715 | if (type == sizetype) |
13716 | type = qualified_type; |
13717 | } |
13718 | |
13719 | /* If we do, then we can just use its DIE, if it exists. */ |
13720 | if (qualified_type) |
13721 | { |
13722 | mod_type_die = lookup_type_die (type: qualified_type); |
13723 | |
13724 | /* DW_AT_endianity doesn't come from a qualifier on the type, so it is |
13725 | dealt with specially: the DIE with the attribute, if it exists, is |
13726 | placed immediately after the regular DIE for the same base type. */ |
13727 | if (mod_type_die |
13728 | && (!reverse_base_type |
13729 | || ((mod_type_die = mod_type_die->die_sib) != NULL |
13730 | && get_AT_unsigned (die: mod_type_die, attr_kind: DW_AT_endianity)))) |
13731 | return mod_type_die; |
13732 | } |
13733 | |
13734 | name = qualified_type ? TYPE_NAME (qualified_type) : NULL; |
13735 | |
13736 | /* Handle C typedef types. */ |
13737 | if (name |
13738 | && TREE_CODE (name) == TYPE_DECL |
13739 | && DECL_ORIGINAL_TYPE (name) |
13740 | && !DECL_ARTIFICIAL (name)) |
13741 | { |
13742 | tree dtype = TREE_TYPE (name); |
13743 | |
13744 | /* Skip the typedef for base types with DW_AT_endianity, no big deal. */ |
13745 | if (qualified_type == dtype && !reverse_base_type) |
13746 | { |
13747 | tree origin = decl_ultimate_origin (decl: name); |
13748 | |
13749 | /* Typedef variants that have an abstract origin don't get their own |
13750 | type DIE (see gen_typedef_die), so fall back on the ultimate |
13751 | abstract origin instead. */ |
13752 | if (origin != NULL && origin != name) |
13753 | return modified_type_die (TREE_TYPE (origin), cv_quals, reverse, |
13754 | context_die); |
13755 | |
13756 | /* For a named type, use the typedef. */ |
13757 | gen_type_die (qualified_type, context_die); |
13758 | return lookup_type_die (type: qualified_type); |
13759 | } |
13760 | else |
13761 | { |
13762 | int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype); |
13763 | dquals &= cv_qual_mask; |
13764 | if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED |
13765 | || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type)) |
13766 | /* cv-unqualified version of named type. Just use |
13767 | the unnamed type to which it refers. */ |
13768 | return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals, |
13769 | reverse, context_die); |
13770 | /* Else cv-qualified version of named type; fall through. */ |
13771 | } |
13772 | } |
13773 | |
13774 | mod_scope = scope_die_for (type, context_die); |
13775 | |
13776 | if (cv_quals) |
13777 | { |
13778 | int sub_quals = 0, first_quals = 0; |
13779 | unsigned i; |
13780 | dw_die_ref first = NULL, last = NULL; |
13781 | |
13782 | /* Determine a lesser qualified type that most closely matches |
13783 | this one. Then generate DW_TAG_* entries for the remaining |
13784 | qualifiers. */ |
13785 | sub_quals = get_nearest_type_subqualifiers (type, type_quals: cv_quals, |
13786 | qual_mask: cv_qual_mask); |
13787 | if (sub_quals && use_debug_types) |
13788 | { |
13789 | bool needed = false; |
13790 | /* If emitting type units, make sure the order of qualifiers |
13791 | is canonical. Thus, start from unqualified type if |
13792 | an earlier qualifier is missing in sub_quals, but some later |
13793 | one is present there. */ |
13794 | for (i = 0; i < dwarf_qual_info_size; i++) |
13795 | if (dwarf_qual_info[i].q & cv_quals & ~sub_quals) |
13796 | needed = true; |
13797 | else if (needed && (dwarf_qual_info[i].q & cv_quals)) |
13798 | { |
13799 | sub_quals = 0; |
13800 | break; |
13801 | } |
13802 | } |
13803 | mod_type_die = modified_type_die (type, cv_quals: sub_quals, reverse, context_die); |
13804 | if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope) |
13805 | { |
13806 | /* As not all intermediate qualified DIEs have corresponding |
13807 | tree types, ensure that qualified DIEs in the same scope |
13808 | as their DW_AT_type are emitted after their DW_AT_type, |
13809 | only with other qualified DIEs for the same type possibly |
13810 | in between them. Determine the range of such qualified |
13811 | DIEs now (first being the base type, last being corresponding |
13812 | last qualified DIE for it). */ |
13813 | unsigned int count = 0; |
13814 | first = qualified_die_p (die: mod_type_die, mask: &first_quals, |
13815 | depth: dwarf_qual_info_size); |
13816 | if (first == NULL) |
13817 | first = mod_type_die; |
13818 | gcc_assert ((first_quals & ~sub_quals) == 0); |
13819 | for (count = 0, last = first; |
13820 | count < (1U << dwarf_qual_info_size); |
13821 | count++, last = last->die_sib) |
13822 | { |
13823 | int quals = 0; |
13824 | if (last == mod_scope->die_child) |
13825 | break; |
13826 | if (qualified_die_p (die: last->die_sib, mask: &quals, depth: dwarf_qual_info_size) |
13827 | != first) |
13828 | break; |
13829 | } |
13830 | } |
13831 | |
13832 | for (i = 0; i < dwarf_qual_info_size; i++) |
13833 | if (dwarf_qual_info[i].q & cv_quals & ~sub_quals) |
13834 | { |
13835 | dw_die_ref d; |
13836 | if (first && first != last) |
13837 | { |
13838 | for (d = first->die_sib; ; d = d->die_sib) |
13839 | { |
13840 | int quals = 0; |
13841 | qualified_die_p (die: d, mask: &quals, depth: dwarf_qual_info_size); |
13842 | if (quals == (first_quals | dwarf_qual_info[i].q)) |
13843 | break; |
13844 | if (d == last) |
13845 | { |
13846 | d = NULL; |
13847 | break; |
13848 | } |
13849 | } |
13850 | if (d) |
13851 | { |
13852 | mod_type_die = d; |
13853 | continue; |
13854 | } |
13855 | } |
13856 | if (first) |
13857 | { |
13858 | d = new_die_raw (tag_value: dwarf_qual_info[i].t); |
13859 | add_child_die_after (die: mod_scope, child_die: d, after_die: last); |
13860 | last = d; |
13861 | } |
13862 | else |
13863 | d = new_die (tag_value: dwarf_qual_info[i].t, parent_die: mod_scope, t: type); |
13864 | if (mod_type_die) |
13865 | add_AT_die_ref (die: d, attr_kind: DW_AT_type, targ_die: mod_type_die); |
13866 | mod_type_die = d; |
13867 | first_quals |= dwarf_qual_info[i].q; |
13868 | } |
13869 | } |
13870 | else if (code == POINTER_TYPE || code == REFERENCE_TYPE) |
13871 | { |
13872 | dwarf_tag tag = DW_TAG_pointer_type; |
13873 | if (code == REFERENCE_TYPE) |
13874 | { |
13875 | if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4) |
13876 | tag = DW_TAG_rvalue_reference_type; |
13877 | else |
13878 | tag = DW_TAG_reference_type; |
13879 | } |
13880 | mod_type_die = new_die (tag_value: tag, parent_die: mod_scope, t: type); |
13881 | |
13882 | add_AT_unsigned (die: mod_type_die, attr_kind: DW_AT_byte_size, |
13883 | unsigned_val: simple_type_size_in_bits (type) / BITS_PER_UNIT); |
13884 | add_alignment_attribute (mod_type_die, type); |
13885 | item_type = TREE_TYPE (type); |
13886 | |
13887 | addr_space_t as = TYPE_ADDR_SPACE (item_type); |
13888 | if (!ADDR_SPACE_GENERIC_P (as)) |
13889 | { |
13890 | int action = targetm.addr_space.debug (as); |
13891 | if (action >= 0) |
13892 | { |
13893 | /* Positive values indicate an address_class. */ |
13894 | add_AT_unsigned (die: mod_type_die, attr_kind: DW_AT_address_class, unsigned_val: action); |
13895 | } |
13896 | else |
13897 | { |
13898 | /* Negative values indicate an (inverted) segment base reg. */ |
13899 | dw_loc_descr_ref d |
13900 | = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED); |
13901 | add_AT_loc (die: mod_type_die, attr_kind: DW_AT_segment, loc: d); |
13902 | } |
13903 | } |
13904 | } |
13905 | else if (code == ARRAY_TYPE |
13906 | || (lang_hooks.types.get_array_descr_info |
13907 | && lang_hooks.types.get_array_descr_info (type, &info))) |
13908 | { |
13909 | gen_type_die (type, context_die); |
13910 | return lookup_type_die (type); |
13911 | } |
13912 | else if (code == INTEGER_TYPE |
13913 | && TREE_TYPE (type) != NULL_TREE |
13914 | && subrange_type_for_debug_p (type, &low, &high)) |
13915 | { |
13916 | tree bias = NULL_TREE; |
13917 | if (lang_hooks.types.get_type_bias) |
13918 | bias = lang_hooks.types.get_type_bias (type); |
13919 | mod_type_die = subrange_type_die (type, low, high, bias, context_die); |
13920 | item_type = TREE_TYPE (type); |
13921 | } |
13922 | else if (is_base_type (type)) |
13923 | { |
13924 | /* If a target supports long double as different floating point |
13925 | modes with the same 16-byte size, use normal DW_TAG_base_type |
13926 | only for the composite (ibm_extended_real_format) type and |
13927 | for the other for the time being emit instead a "_Float128" |
13928 | or "complex _Float128" DW_TAG_base_type and a "long double" |
13929 | or "complex long double" typedef to it. */ |
13930 | if (tree other_type = long_double_as_float128 (type)) |
13931 | { |
13932 | dw_die_ref other_die; |
13933 | if (TYPE_NAME (other_type)) |
13934 | other_die |
13935 | = modified_type_die (type: other_type, cv_quals: TYPE_UNQUALIFIED, reverse, |
13936 | context_die); |
13937 | else |
13938 | { |
13939 | other_die = base_type_die (type, reverse); |
13940 | add_child_die (die: comp_unit_die (), child_die: other_die); |
13941 | add_name_attribute (other_die, |
13942 | TREE_CODE (type) == COMPLEX_TYPE |
13943 | ? "complex _Float128" : "_Float128" ); |
13944 | } |
13945 | mod_type_die = new_die_raw (tag_value: DW_TAG_typedef); |
13946 | add_AT_die_ref (die: mod_type_die, attr_kind: DW_AT_type, targ_die: other_die); |
13947 | } |
13948 | else |
13949 | mod_type_die = base_type_die (type, reverse); |
13950 | |
13951 | /* The DIE with DW_AT_endianity is placed right after the naked DIE. */ |
13952 | if (reverse_base_type) |
13953 | { |
13954 | dw_die_ref after_die |
13955 | = modified_type_die (type, cv_quals, reverse: false, context_die); |
13956 | add_child_die_after (die: comp_unit_die (), child_die: mod_type_die, after_die); |
13957 | } |
13958 | else |
13959 | add_child_die (die: comp_unit_die (), child_die: mod_type_die); |
13960 | |
13961 | add_pubtype (decl: type, die: mod_type_die); |
13962 | } |
13963 | else |
13964 | { |
13965 | gen_type_die (type, context_die); |
13966 | |
13967 | /* We have to get the type_main_variant here (and pass that to the |
13968 | `lookup_type_die' routine) because the ..._TYPE node we have |
13969 | might simply be a *copy* of some original type node (where the |
13970 | copy was created to help us keep track of typedef names) and |
13971 | that copy might have a different TYPE_UID from the original |
13972 | ..._TYPE node. */ |
13973 | if (code == FUNCTION_TYPE || code == METHOD_TYPE) |
13974 | { |
13975 | /* For function/method types, can't just use type_main_variant here, |
13976 | because that can have different ref-qualifiers for C++, |
13977 | but try to canonicalize. */ |
13978 | tree main = TYPE_MAIN_VARIANT (type); |
13979 | for (tree t = main; t; t = TYPE_NEXT_VARIANT (t)) |
13980 | if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0 |
13981 | && check_base_type (cand: t, base: main) |
13982 | && check_lang_type (cand: t, base: type)) |
13983 | return lookup_type_die (type: t); |
13984 | return lookup_type_die (type); |
13985 | } |
13986 | /* Vectors have the debugging information in the type, |
13987 | not the main variant. */ |
13988 | else if (code == VECTOR_TYPE) |
13989 | return lookup_type_die (type); |
13990 | else |
13991 | return lookup_type_die (type: type_main_variant (type)); |
13992 | } |
13993 | |
13994 | /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those, |
13995 | don't output a DW_TAG_typedef, since there isn't one in the |
13996 | user's program; just attach a DW_AT_name to the type. |
13997 | Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type |
13998 | if the base type already has the same name. */ |
13999 | if (name |
14000 | && ((TREE_CODE (name) != TYPE_DECL |
14001 | && (qualified_type == TYPE_MAIN_VARIANT (type) |
14002 | || (cv_quals == TYPE_UNQUALIFIED))) |
14003 | || (TREE_CODE (name) == TYPE_DECL |
14004 | && TREE_TYPE (name) == qualified_type |
14005 | && DECL_NAME (name)))) |
14006 | { |
14007 | if (TREE_CODE (name) == TYPE_DECL) |
14008 | /* Could just call add_name_and_src_coords_attributes here, |
14009 | but since this is a builtin type it doesn't have any |
14010 | useful source coordinates anyway. */ |
14011 | name = DECL_NAME (name); |
14012 | add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name)); |
14013 | } |
14014 | else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type) |
14015 | { |
14016 | if (TREE_CODE (type) == BITINT_TYPE) |
14017 | { |
14018 | char name_buf[sizeof ("unsigned _BitInt(2147483647)" )]; |
14019 | snprintf (s: name_buf, maxlen: sizeof (name_buf), |
14020 | format: "%s_BitInt(%d)" , TYPE_UNSIGNED (type) ? "unsigned " : "" , |
14021 | TYPE_PRECISION (type)); |
14022 | add_name_attribute (mod_type_die, name_buf); |
14023 | } |
14024 | else |
14025 | { |
14026 | /* This probably indicates a bug. */ |
14027 | name = TYPE_IDENTIFIER (type); |
14028 | add_name_attribute (mod_type_die, |
14029 | name |
14030 | ? IDENTIFIER_POINTER (name) : "__unknown__" ); |
14031 | } |
14032 | } |
14033 | |
14034 | if (qualified_type && !reverse_base_type) |
14035 | equate_type_number_to_die (type: qualified_type, type_die: mod_type_die); |
14036 | |
14037 | if (item_type) |
14038 | /* We must do this after the equate_type_number_to_die call, in case |
14039 | this is a recursive type. This ensures that the modified_type_die |
14040 | recursion will terminate even if the type is recursive. Recursive |
14041 | types are possible in Ada. */ |
14042 | sub_die = modified_type_die (type: item_type, |
14043 | TYPE_QUALS_NO_ADDR_SPACE (item_type), |
14044 | reverse, |
14045 | context_die); |
14046 | |
14047 | if (sub_die != NULL) |
14048 | add_AT_die_ref (die: mod_type_die, attr_kind: DW_AT_type, targ_die: sub_die); |
14049 | |
14050 | add_gnat_descriptive_type_attribute (mod_type_die, type, context_die); |
14051 | if (TYPE_ARTIFICIAL (type)) |
14052 | add_AT_flag (die: mod_type_die, attr_kind: DW_AT_artificial, flag: 1); |
14053 | |
14054 | return mod_type_die; |
14055 | } |
14056 | |
14057 | /* Generate DIEs for the generic parameters of T. |
14058 | T must be either a generic type or a generic function. |
14059 | See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */ |
14060 | |
14061 | static void |
14062 | gen_generic_params_dies (tree t) |
14063 | { |
14064 | tree parms, args; |
14065 | int parms_num, i; |
14066 | dw_die_ref die = NULL; |
14067 | int non_default; |
14068 | |
14069 | if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t))) |
14070 | return; |
14071 | |
14072 | if (TYPE_P (t)) |
14073 | die = lookup_type_die (type: t); |
14074 | else if (DECL_P (t)) |
14075 | die = lookup_decl_die (decl: t); |
14076 | |
14077 | gcc_assert (die); |
14078 | |
14079 | parms = lang_hooks.get_innermost_generic_parms (t); |
14080 | if (!parms) |
14081 | /* T has no generic parameter. It means T is neither a generic type |
14082 | or function. End of story. */ |
14083 | return; |
14084 | |
14085 | parms_num = TREE_VEC_LENGTH (parms); |
14086 | args = lang_hooks.get_innermost_generic_args (t); |
14087 | if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST) |
14088 | non_default = int_cst_value (TREE_CHAIN (args)); |
14089 | else |
14090 | non_default = TREE_VEC_LENGTH (args); |
14091 | for (i = 0; i < parms_num; i++) |
14092 | { |
14093 | tree parm, arg, arg_pack_elems; |
14094 | dw_die_ref parm_die; |
14095 | |
14096 | parm = TREE_VEC_ELT (parms, i); |
14097 | arg = TREE_VEC_ELT (args, i); |
14098 | arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg); |
14099 | gcc_assert (parm && TREE_VALUE (parm) && arg); |
14100 | |
14101 | if (parm && TREE_VALUE (parm) && arg) |
14102 | { |
14103 | /* If PARM represents a template parameter pack, |
14104 | emit a DW_TAG_GNU_template_parameter_pack DIE, followed |
14105 | by DW_TAG_template_*_parameter DIEs for the argument |
14106 | pack elements of ARG. Note that ARG would then be |
14107 | an argument pack. */ |
14108 | if (arg_pack_elems) |
14109 | parm_die = template_parameter_pack_die (TREE_VALUE (parm), |
14110 | arg_pack_elems, |
14111 | die); |
14112 | else |
14113 | parm_die = generic_parameter_die (TREE_VALUE (parm), arg, |
14114 | true /* emit name */, die); |
14115 | if (i >= non_default) |
14116 | add_AT_flag (die: parm_die, attr_kind: DW_AT_default_value, flag: 1); |
14117 | } |
14118 | } |
14119 | } |
14120 | |
14121 | /* Create and return a DIE for PARM which should be |
14122 | the representation of a generic type parameter. |
14123 | For instance, in the C++ front end, PARM would be a template parameter. |
14124 | ARG is the argument to PARM. |
14125 | EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the |
14126 | name of the PARM. |
14127 | PARENT_DIE is the parent DIE which the new created DIE should be added to, |
14128 | as a child node. */ |
14129 | |
14130 | static dw_die_ref |
14131 | generic_parameter_die (tree parm, tree arg, |
14132 | bool emit_name_p, |
14133 | dw_die_ref parent_die) |
14134 | { |
14135 | dw_die_ref tmpl_die = NULL; |
14136 | const char *name = NULL; |
14137 | |
14138 | /* C++20 accepts class literals as template parameters, and var |
14139 | decls with initializers represent them. The VAR_DECLs would be |
14140 | rejected, but we can take the DECL_INITIAL constructor and |
14141 | attempt to expand it. */ |
14142 | if (arg && VAR_P (arg)) |
14143 | arg = DECL_INITIAL (arg); |
14144 | |
14145 | if (!parm || !DECL_NAME (parm) || !arg) |
14146 | return NULL; |
14147 | |
14148 | /* We support non-type generic parameters and arguments, |
14149 | type generic parameters and arguments, as well as |
14150 | generic generic parameters (a.k.a. template template parameters in C++) |
14151 | and arguments. */ |
14152 | if (TREE_CODE (parm) == PARM_DECL) |
14153 | /* PARM is a nontype generic parameter */ |
14154 | tmpl_die = new_die (tag_value: DW_TAG_template_value_param, parent_die, t: parm); |
14155 | else if (TREE_CODE (parm) == TYPE_DECL) |
14156 | /* PARM is a type generic parameter. */ |
14157 | tmpl_die = new_die (tag_value: DW_TAG_template_type_param, parent_die, t: parm); |
14158 | else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm)) |
14159 | /* PARM is a generic generic parameter. |
14160 | Its DIE is a GNU extension. It shall have a |
14161 | DW_AT_name attribute to represent the name of the template template |
14162 | parameter, and a DW_AT_GNU_template_name attribute to represent the |
14163 | name of the template template argument. */ |
14164 | tmpl_die = new_die (tag_value: DW_TAG_GNU_template_template_param, |
14165 | parent_die, t: parm); |
14166 | else |
14167 | gcc_unreachable (); |
14168 | |
14169 | if (tmpl_die) |
14170 | { |
14171 | tree tmpl_type; |
14172 | |
14173 | /* If PARM is a generic parameter pack, it means we are |
14174 | emitting debug info for a template argument pack element. |
14175 | In other terms, ARG is a template argument pack element. |
14176 | In that case, we don't emit any DW_AT_name attribute for |
14177 | the die. */ |
14178 | if (emit_name_p) |
14179 | { |
14180 | name = IDENTIFIER_POINTER (DECL_NAME (parm)); |
14181 | gcc_assert (name); |
14182 | add_AT_string (die: tmpl_die, attr_kind: DW_AT_name, str: name); |
14183 | } |
14184 | |
14185 | if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm)) |
14186 | { |
14187 | /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter |
14188 | TMPL_DIE should have a child DW_AT_type attribute that is set |
14189 | to the type of the argument to PARM, which is ARG. |
14190 | If PARM is a type generic parameter, TMPL_DIE should have a |
14191 | child DW_AT_type that is set to ARG. */ |
14192 | tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg); |
14193 | add_type_attribute (tmpl_die, tmpl_type, |
14194 | (TREE_THIS_VOLATILE (tmpl_type) |
14195 | ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED), |
14196 | false, parent_die); |
14197 | } |
14198 | else |
14199 | { |
14200 | /* So TMPL_DIE is a DIE representing a |
14201 | a generic generic template parameter, a.k.a template template |
14202 | parameter in C++ and arg is a template. */ |
14203 | |
14204 | /* The DW_AT_GNU_template_name attribute of the DIE must be set |
14205 | to the name of the argument. */ |
14206 | name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, scope: 1); |
14207 | if (name) |
14208 | add_AT_string (die: tmpl_die, attr_kind: DW_AT_GNU_template_name, str: name); |
14209 | } |
14210 | |
14211 | if (TREE_CODE (parm) == PARM_DECL) |
14212 | /* So PARM is a non-type generic parameter. |
14213 | DWARF3 5.6.8 says we must set a DW_AT_const_value child |
14214 | attribute of TMPL_DIE which value represents the value |
14215 | of ARG. |
14216 | We must be careful here: |
14217 | The value of ARG might reference some function decls. |
14218 | We might currently be emitting debug info for a generic |
14219 | type and types are emitted before function decls, we don't |
14220 | know if the function decls referenced by ARG will actually be |
14221 | emitted after cgraph computations. |
14222 | So must defer the generation of the DW_AT_const_value to |
14223 | after cgraph is ready. */ |
14224 | append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg); |
14225 | } |
14226 | |
14227 | return tmpl_die; |
14228 | } |
14229 | |
14230 | /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing. |
14231 | PARM_PACK must be a template parameter pack. The returned DIE |
14232 | will be child DIE of PARENT_DIE. */ |
14233 | |
14234 | static dw_die_ref |
14235 | template_parameter_pack_die (tree parm_pack, |
14236 | tree parm_pack_args, |
14237 | dw_die_ref parent_die) |
14238 | { |
14239 | dw_die_ref die; |
14240 | int j; |
14241 | |
14242 | gcc_assert (parent_die && parm_pack); |
14243 | |
14244 | die = new_die (tag_value: DW_TAG_GNU_template_parameter_pack, parent_die, t: parm_pack); |
14245 | add_name_and_src_coords_attributes (die, parm_pack); |
14246 | for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++) |
14247 | generic_parameter_die (parm: parm_pack, |
14248 | TREE_VEC_ELT (parm_pack_args, j), |
14249 | emit_name_p: false /* Don't emit DW_AT_name */, |
14250 | parent_die: die); |
14251 | return die; |
14252 | } |
14253 | |
14254 | /* Return the debugger register number described by a given RTL node. */ |
14255 | |
14256 | static unsigned int |
14257 | debugger_reg_number (const_rtx rtl) |
14258 | { |
14259 | unsigned regno = REGNO (rtl); |
14260 | |
14261 | gcc_assert (regno < FIRST_PSEUDO_REGISTER); |
14262 | |
14263 | #ifdef LEAF_REG_REMAP |
14264 | if (crtl->uses_only_leaf_regs) |
14265 | { |
14266 | int leaf_reg = LEAF_REG_REMAP (regno); |
14267 | if (leaf_reg != -1) |
14268 | regno = (unsigned) leaf_reg; |
14269 | } |
14270 | #endif |
14271 | |
14272 | regno = DEBUGGER_REGNO (regno); |
14273 | gcc_assert (regno != INVALID_REGNUM); |
14274 | return regno; |
14275 | } |
14276 | |
14277 | /* Optionally add a DW_OP_piece term to a location description expression. |
14278 | DW_OP_piece is only added if the location description expression already |
14279 | doesn't end with DW_OP_piece. */ |
14280 | |
14281 | static void |
14282 | add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size) |
14283 | { |
14284 | dw_loc_descr_ref loc; |
14285 | |
14286 | if (*list_head != NULL) |
14287 | { |
14288 | /* Find the end of the chain. */ |
14289 | for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next) |
14290 | ; |
14291 | |
14292 | if (loc->dw_loc_opc != DW_OP_piece) |
14293 | loc->dw_loc_next = new_loc_descr (op: DW_OP_piece, oprnd1: size, oprnd2: 0); |
14294 | } |
14295 | } |
14296 | |
14297 | /* Return a location descriptor that designates a machine register or |
14298 | zero if there is none. */ |
14299 | |
14300 | static dw_loc_descr_ref |
14301 | reg_loc_descriptor (rtx rtl, enum var_init_status initialized) |
14302 | { |
14303 | rtx regs; |
14304 | |
14305 | if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER) |
14306 | return 0; |
14307 | |
14308 | /* We only use "frame base" when we're sure we're talking about the |
14309 | post-prologue local stack frame. We do this by *not* running |
14310 | register elimination until this point, and recognizing the special |
14311 | argument pointer and soft frame pointer rtx's. |
14312 | Use DW_OP_fbreg offset DW_OP_stack_value in this case. */ |
14313 | if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx) |
14314 | && (ira_use_lra_p |
14315 | ? lra_eliminate_regs (rtl, VOIDmode, NULL_RTX) |
14316 | : eliminate_regs (rtl, VOIDmode, NULL_RTX)) != rtl) |
14317 | { |
14318 | dw_loc_descr_ref result = NULL; |
14319 | |
14320 | if (dwarf_version >= 4 || !dwarf_strict) |
14321 | { |
14322 | result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode, |
14323 | initialized); |
14324 | if (result) |
14325 | add_loc_descr (list_head: &result, |
14326 | descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
14327 | } |
14328 | return result; |
14329 | } |
14330 | |
14331 | regs = targetm.dwarf_register_span (rtl); |
14332 | |
14333 | if (REG_NREGS (rtl) > 1 || regs) |
14334 | return multiple_reg_loc_descriptor (rtl, regs, initialized); |
14335 | else |
14336 | { |
14337 | unsigned int debugger_regnum = debugger_reg_number (rtl); |
14338 | if (debugger_regnum == IGNORED_DWARF_REGNUM) |
14339 | return 0; |
14340 | return one_reg_loc_descriptor (debugger_regnum, initialized); |
14341 | } |
14342 | } |
14343 | |
14344 | /* Return a location descriptor that designates a machine register for |
14345 | a given hard register number. */ |
14346 | |
14347 | static dw_loc_descr_ref |
14348 | one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized) |
14349 | { |
14350 | dw_loc_descr_ref reg_loc_descr; |
14351 | |
14352 | if (regno <= 31) |
14353 | reg_loc_descr |
14354 | = new_loc_descr (op: (enum dwarf_location_atom) (DW_OP_reg0 + regno), oprnd1: 0, oprnd2: 0); |
14355 | else |
14356 | reg_loc_descr = new_loc_descr (op: DW_OP_regx, oprnd1: regno, oprnd2: 0); |
14357 | |
14358 | if (initialized == VAR_INIT_STATUS_UNINITIALIZED) |
14359 | add_loc_descr (list_head: ®_loc_descr, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
14360 | |
14361 | return reg_loc_descr; |
14362 | } |
14363 | |
14364 | /* Given an RTL of a register, return a location descriptor that |
14365 | designates a value that spans more than one register. */ |
14366 | |
14367 | static dw_loc_descr_ref |
14368 | multiple_reg_loc_descriptor (rtx rtl, rtx regs, |
14369 | enum var_init_status initialized) |
14370 | { |
14371 | int size, i; |
14372 | dw_loc_descr_ref loc_result = NULL; |
14373 | |
14374 | /* Simple, contiguous registers. */ |
14375 | if (regs == NULL_RTX) |
14376 | { |
14377 | unsigned reg = REGNO (rtl); |
14378 | int nregs; |
14379 | |
14380 | #ifdef LEAF_REG_REMAP |
14381 | if (crtl->uses_only_leaf_regs) |
14382 | { |
14383 | int leaf_reg = LEAF_REG_REMAP (reg); |
14384 | if (leaf_reg != -1) |
14385 | reg = (unsigned) leaf_reg; |
14386 | } |
14387 | #endif |
14388 | |
14389 | gcc_assert ((unsigned) DEBUGGER_REGNO (reg) == debugger_reg_number (rtl)); |
14390 | nregs = REG_NREGS (rtl); |
14391 | |
14392 | /* At present we only track constant-sized pieces. */ |
14393 | if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (const_value: &size)) |
14394 | return NULL; |
14395 | size /= nregs; |
14396 | |
14397 | loc_result = NULL; |
14398 | while (nregs--) |
14399 | { |
14400 | dw_loc_descr_ref t; |
14401 | |
14402 | t = one_reg_loc_descriptor (DEBUGGER_REGNO (reg), |
14403 | initialized: VAR_INIT_STATUS_INITIALIZED); |
14404 | add_loc_descr (list_head: &loc_result, descr: t); |
14405 | add_loc_descr_op_piece (list_head: &loc_result, size); |
14406 | ++reg; |
14407 | } |
14408 | return loc_result; |
14409 | } |
14410 | |
14411 | /* Now onto stupid register sets in non contiguous locations. */ |
14412 | |
14413 | gcc_assert (GET_CODE (regs) == PARALLEL); |
14414 | |
14415 | /* At present we only track constant-sized pieces. */ |
14416 | if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (const_value: &size)) |
14417 | return NULL; |
14418 | loc_result = NULL; |
14419 | |
14420 | for (i = 0; i < XVECLEN (regs, 0); ++i) |
14421 | { |
14422 | dw_loc_descr_ref t; |
14423 | |
14424 | t = one_reg_loc_descriptor (regno: debugger_reg_number (XVECEXP (regs, 0, i)), |
14425 | initialized: VAR_INIT_STATUS_INITIALIZED); |
14426 | add_loc_descr (list_head: &loc_result, descr: t); |
14427 | add_loc_descr_op_piece (list_head: &loc_result, size); |
14428 | } |
14429 | |
14430 | if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED) |
14431 | add_loc_descr (list_head: &loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
14432 | return loc_result; |
14433 | } |
14434 | |
14435 | static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT); |
14436 | |
14437 | /* Return a location descriptor that designates a constant i, |
14438 | as a compound operation from constant (i >> shift), constant shift |
14439 | and DW_OP_shl. */ |
14440 | |
14441 | static dw_loc_descr_ref |
14442 | int_shift_loc_descriptor (HOST_WIDE_INT i, int shift) |
14443 | { |
14444 | dw_loc_descr_ref ret = int_loc_descriptor (i >> shift); |
14445 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (shift)); |
14446 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
14447 | return ret; |
14448 | } |
14449 | |
14450 | /* Return a location descriptor that designates constant POLY_I. */ |
14451 | |
14452 | static dw_loc_descr_ref |
14453 | int_loc_descriptor (poly_int64 poly_i) |
14454 | { |
14455 | enum dwarf_location_atom op; |
14456 | |
14457 | HOST_WIDE_INT i; |
14458 | if (!poly_i.is_constant (const_value: &i)) |
14459 | { |
14460 | /* Create location descriptions for the non-constant part and |
14461 | add any constant offset at the end. */ |
14462 | dw_loc_descr_ref ret = NULL; |
14463 | HOST_WIDE_INT constant = poly_i.coeffs[0]; |
14464 | for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j) |
14465 | { |
14466 | HOST_WIDE_INT coeff = poly_i.coeffs[j]; |
14467 | if (coeff != 0) |
14468 | { |
14469 | dw_loc_descr_ref start = ret; |
14470 | unsigned int factor; |
14471 | int bias; |
14472 | unsigned int regno = targetm.dwarf_poly_indeterminate_value |
14473 | (j, &factor, &bias); |
14474 | |
14475 | /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value: |
14476 | add COEFF * (REGNO / FACTOR) now and subtract |
14477 | COEFF * BIAS from the final constant part. */ |
14478 | constant -= coeff * bias; |
14479 | add_loc_descr (list_head: &ret, descr: new_reg_loc_descr (reg: regno, offset: 0)); |
14480 | if (coeff % factor == 0) |
14481 | coeff /= factor; |
14482 | else |
14483 | { |
14484 | int amount = exact_log2 (x: factor); |
14485 | gcc_assert (amount >= 0); |
14486 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (poly_i: amount)); |
14487 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
14488 | } |
14489 | if (coeff != 1) |
14490 | { |
14491 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (poly_i: coeff)); |
14492 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
14493 | } |
14494 | if (start) |
14495 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
14496 | } |
14497 | } |
14498 | loc_descr_plus_const (list_head: &ret, poly_offset: constant); |
14499 | return ret; |
14500 | } |
14501 | |
14502 | /* Pick the smallest representation of a constant, rather than just |
14503 | defaulting to the LEB encoding. */ |
14504 | if (i >= 0) |
14505 | { |
14506 | int clz = clz_hwi (x: i); |
14507 | int ctz = ctz_hwi (x: i); |
14508 | if (i <= 31) |
14509 | op = (enum dwarf_location_atom) (DW_OP_lit0 + i); |
14510 | else if (i <= 0xff) |
14511 | op = DW_OP_const1u; |
14512 | else if (i <= 0xffff) |
14513 | op = DW_OP_const2u; |
14514 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5 |
14515 | && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT) |
14516 | /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and |
14517 | DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes, |
14518 | while DW_OP_const4u is 5 bytes. */ |
14519 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5); |
14520 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
14521 | && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT) |
14522 | /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes, |
14523 | while DW_OP_const4u is 5 bytes. */ |
14524 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8); |
14525 | |
14526 | else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff |
14527 | && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i) |
14528 | <= 4) |
14529 | { |
14530 | /* As i >= 2**31, the double cast above will yield a negative number. |
14531 | Since wrapping is defined in DWARF expressions we can output big |
14532 | positive integers as small negative ones, regardless of the size |
14533 | of host wide ints. |
14534 | |
14535 | Here, since the evaluator will handle 32-bit values and since i >= |
14536 | 2**31, we know it's going to be interpreted as a negative literal: |
14537 | store it this way if we can do better than 5 bytes this way. */ |
14538 | return int_loc_descriptor (poly_i: (HOST_WIDE_INT) (int32_t) i); |
14539 | } |
14540 | else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff) |
14541 | op = DW_OP_const4u; |
14542 | |
14543 | /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at |
14544 | least 6 bytes: see if we can do better before falling back to it. */ |
14545 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
14546 | && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT) |
14547 | /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */ |
14548 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8); |
14549 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16 |
14550 | && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31) |
14551 | >= HOST_BITS_PER_WIDE_INT) |
14552 | /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes, |
14553 | DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */ |
14554 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16); |
14555 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32 |
14556 | && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT |
14557 | && size_of_uleb128 (i) > 6) |
14558 | /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */ |
14559 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32); |
14560 | else |
14561 | op = DW_OP_constu; |
14562 | } |
14563 | else |
14564 | { |
14565 | if (i >= -0x80) |
14566 | op = DW_OP_const1s; |
14567 | else if (i >= -0x8000) |
14568 | op = DW_OP_const2s; |
14569 | else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000) |
14570 | { |
14571 | if (size_of_int_loc_descriptor (i) < 5) |
14572 | { |
14573 | dw_loc_descr_ref ret = int_loc_descriptor (poly_i: -i); |
14574 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
14575 | return ret; |
14576 | } |
14577 | op = DW_OP_const4s; |
14578 | } |
14579 | else |
14580 | { |
14581 | if (size_of_int_loc_descriptor (i) |
14582 | < (unsigned long) 1 + size_of_sleb128 (i)) |
14583 | { |
14584 | dw_loc_descr_ref ret = int_loc_descriptor (poly_i: -i); |
14585 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
14586 | return ret; |
14587 | } |
14588 | op = DW_OP_consts; |
14589 | } |
14590 | } |
14591 | |
14592 | return new_loc_descr (op, oprnd1: i, oprnd2: 0); |
14593 | } |
14594 | |
14595 | /* Likewise, for unsigned constants. */ |
14596 | |
14597 | static dw_loc_descr_ref |
14598 | uint_loc_descriptor (unsigned HOST_WIDE_INT i) |
14599 | { |
14600 | const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT); |
14601 | const unsigned HOST_WIDE_INT max_uint |
14602 | = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT); |
14603 | |
14604 | /* If possible, use the clever signed constants handling. */ |
14605 | if (i <= max_int) |
14606 | return int_loc_descriptor (poly_i: (HOST_WIDE_INT) i); |
14607 | |
14608 | /* Here, we are left with positive numbers that cannot be represented as |
14609 | HOST_WIDE_INT, i.e.: |
14610 | max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT) |
14611 | |
14612 | Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes |
14613 | whereas may be better to output a negative integer: thanks to integer |
14614 | wrapping, we know that: |
14615 | x = x - 2 ** DWARF2_ADDR_SIZE |
14616 | = x - 2 * (max (HOST_WIDE_INT) + 1) |
14617 | So numbers close to max (unsigned HOST_WIDE_INT) could be represented as |
14618 | small negative integers. Let's try that in cases it will clearly improve |
14619 | the encoding: there is no gain turning DW_OP_const4u into |
14620 | DW_OP_const4s. */ |
14621 | if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT |
14622 | && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000) |
14623 | || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000))) |
14624 | { |
14625 | const unsigned HOST_WIDE_INT first_shift = i - max_int - 1; |
14626 | |
14627 | /* Now, -1 < first_shift <= max (HOST_WIDE_INT) |
14628 | i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */ |
14629 | const HOST_WIDE_INT second_shift |
14630 | = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1; |
14631 | |
14632 | /* So we finally have: |
14633 | -max (HOST_WIDE_INT) - 1 <= second_shift <= -1. |
14634 | i.e. min (HOST_WIDE_INT) <= second_shift < 0. */ |
14635 | return int_loc_descriptor (poly_i: second_shift); |
14636 | } |
14637 | |
14638 | /* Last chance: fallback to a simple constant operation. */ |
14639 | return new_loc_descr |
14640 | (op: (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff) |
14641 | ? DW_OP_const4u |
14642 | : DW_OP_const8u, |
14643 | oprnd1: i, oprnd2: 0); |
14644 | } |
14645 | |
14646 | /* Generate and return a location description that computes the unsigned |
14647 | comparison of the two stack top entries (a OP b where b is the top-most |
14648 | entry and a is the second one). The KIND of comparison can be LT_EXPR, |
14649 | LE_EXPR, GT_EXPR or GE_EXPR. */ |
14650 | |
14651 | static dw_loc_descr_ref |
14652 | uint_comparison_loc_list (enum tree_code kind) |
14653 | { |
14654 | enum dwarf_location_atom op, flip_op; |
14655 | dw_loc_descr_ref ret, bra_node, jmp_node, tmp; |
14656 | |
14657 | switch (kind) |
14658 | { |
14659 | case LT_EXPR: |
14660 | op = DW_OP_lt; |
14661 | break; |
14662 | case LE_EXPR: |
14663 | op = DW_OP_le; |
14664 | break; |
14665 | case GT_EXPR: |
14666 | op = DW_OP_gt; |
14667 | break; |
14668 | case GE_EXPR: |
14669 | op = DW_OP_ge; |
14670 | break; |
14671 | default: |
14672 | gcc_unreachable (); |
14673 | } |
14674 | |
14675 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
14676 | jmp_node = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
14677 | |
14678 | /* Until DWARFv4, operations all work on signed integers. It is nevertheless |
14679 | possible to perform unsigned comparisons: we just have to distinguish |
14680 | three cases: |
14681 | |
14682 | 1. when a and b have the same sign (as signed integers); then we should |
14683 | return: a OP(signed) b; |
14684 | |
14685 | 2. when a is a negative signed integer while b is a positive one, then a |
14686 | is a greater unsigned integer than b; likewise when a and b's roles |
14687 | are flipped. |
14688 | |
14689 | So first, compare the sign of the two operands. */ |
14690 | ret = new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0); |
14691 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
14692 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_xor, oprnd1: 0, oprnd2: 0)); |
14693 | /* If they have different signs (i.e. they have different sign bits), then |
14694 | the stack top value has now the sign bit set and thus it's smaller than |
14695 | zero. */ |
14696 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_lit0, oprnd1: 0, oprnd2: 0)); |
14697 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_lt, oprnd1: 0, oprnd2: 0)); |
14698 | add_loc_descr (list_head: &ret, descr: bra_node); |
14699 | |
14700 | /* We are in case 1. At this point, we know both operands have the same |
14701 | sign, to it's safe to use the built-in signed comparison. */ |
14702 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
14703 | add_loc_descr (list_head: &ret, descr: jmp_node); |
14704 | |
14705 | /* We are in case 2. Here, we know both operands do not have the same sign, |
14706 | so we have to flip the signed comparison. */ |
14707 | flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt; |
14708 | tmp = new_loc_descr (op: flip_op, oprnd1: 0, oprnd2: 0); |
14709 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
14710 | bra_node->dw_loc_oprnd1.v.val_loc = tmp; |
14711 | add_loc_descr (list_head: &ret, descr: tmp); |
14712 | |
14713 | /* This dummy operation is necessary to make the two branches join. */ |
14714 | tmp = new_loc_descr (op: DW_OP_nop, oprnd1: 0, oprnd2: 0); |
14715 | jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
14716 | jmp_node->dw_loc_oprnd1.v.val_loc = tmp; |
14717 | add_loc_descr (list_head: &ret, descr: tmp); |
14718 | |
14719 | return ret; |
14720 | } |
14721 | |
14722 | /* Likewise, but takes the location description lists (might be destructive on |
14723 | them). Return NULL if either is NULL or if concatenation fails. */ |
14724 | |
14725 | static dw_loc_list_ref |
14726 | loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right, |
14727 | enum tree_code kind) |
14728 | { |
14729 | if (left == NULL || right == NULL) |
14730 | return NULL; |
14731 | |
14732 | add_loc_list (ret: &left, list: right); |
14733 | if (left == NULL) |
14734 | return NULL; |
14735 | |
14736 | add_loc_descr_to_each (list: left, ref: uint_comparison_loc_list (kind)); |
14737 | return left; |
14738 | } |
14739 | |
14740 | /* Return size_of_locs (int_shift_loc_descriptor (i, shift)) |
14741 | without actually allocating it. */ |
14742 | |
14743 | static unsigned long |
14744 | size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift) |
14745 | { |
14746 | return size_of_int_loc_descriptor (i >> shift) |
14747 | + size_of_int_loc_descriptor (shift) |
14748 | + 1; |
14749 | } |
14750 | |
14751 | /* Return size_of_locs (int_loc_descriptor (i)) without |
14752 | actually allocating it. */ |
14753 | |
14754 | static unsigned long |
14755 | size_of_int_loc_descriptor (HOST_WIDE_INT i) |
14756 | { |
14757 | unsigned long s; |
14758 | |
14759 | if (i >= 0) |
14760 | { |
14761 | int clz, ctz; |
14762 | if (i <= 31) |
14763 | return 1; |
14764 | else if (i <= 0xff) |
14765 | return 2; |
14766 | else if (i <= 0xffff) |
14767 | return 3; |
14768 | clz = clz_hwi (x: i); |
14769 | ctz = ctz_hwi (x: i); |
14770 | if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5 |
14771 | && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT) |
14772 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
14773 | - clz - 5); |
14774 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
14775 | && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT) |
14776 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
14777 | - clz - 8); |
14778 | else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff |
14779 | && size_of_int_loc_descriptor (i: (HOST_WIDE_INT) (int32_t) i) |
14780 | <= 4) |
14781 | return size_of_int_loc_descriptor (i: (HOST_WIDE_INT) (int32_t) i); |
14782 | else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff) |
14783 | return 5; |
14784 | s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i); |
14785 | if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
14786 | && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT) |
14787 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
14788 | - clz - 8); |
14789 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16 |
14790 | && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT) |
14791 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
14792 | - clz - 16); |
14793 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32 |
14794 | && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT |
14795 | && s > 6) |
14796 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
14797 | - clz - 32); |
14798 | else |
14799 | return 1 + s; |
14800 | } |
14801 | else |
14802 | { |
14803 | if (i >= -0x80) |
14804 | return 2; |
14805 | else if (i >= -0x8000) |
14806 | return 3; |
14807 | else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000) |
14808 | { |
14809 | if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i) |
14810 | { |
14811 | s = size_of_int_loc_descriptor (i: -i) + 1; |
14812 | if (s < 5) |
14813 | return s; |
14814 | } |
14815 | return 5; |
14816 | } |
14817 | else |
14818 | { |
14819 | unsigned long r = 1 + size_of_sleb128 (i); |
14820 | if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i) |
14821 | { |
14822 | s = size_of_int_loc_descriptor (i: -i) + 1; |
14823 | if (s < r) |
14824 | return s; |
14825 | } |
14826 | return r; |
14827 | } |
14828 | } |
14829 | } |
14830 | |
14831 | /* Return loc description representing "address" of integer value. |
14832 | This can appear only as toplevel expression. */ |
14833 | |
14834 | static dw_loc_descr_ref |
14835 | address_of_int_loc_descriptor (int size, HOST_WIDE_INT i) |
14836 | { |
14837 | int litsize; |
14838 | dw_loc_descr_ref loc_result = NULL; |
14839 | |
14840 | if (!(dwarf_version >= 4 || !dwarf_strict)) |
14841 | return NULL; |
14842 | |
14843 | litsize = size_of_int_loc_descriptor (i); |
14844 | /* Determine if DW_OP_stack_value or DW_OP_implicit_value |
14845 | is more compact. For DW_OP_stack_value we need: |
14846 | litsize + 1 (DW_OP_stack_value) |
14847 | and for DW_OP_implicit_value: |
14848 | 1 (DW_OP_implicit_value) + 1 (length) + size. */ |
14849 | if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size) |
14850 | { |
14851 | loc_result = int_loc_descriptor (poly_i: i); |
14852 | add_loc_descr (list_head: &loc_result, |
14853 | descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
14854 | return loc_result; |
14855 | } |
14856 | |
14857 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
14858 | oprnd1: size, oprnd2: 0); |
14859 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_const; |
14860 | loc_result->dw_loc_oprnd2.v.val_int = i; |
14861 | return loc_result; |
14862 | } |
14863 | |
14864 | /* Return a location descriptor that designates a base+offset location. */ |
14865 | |
14866 | static dw_loc_descr_ref |
14867 | based_loc_descr (rtx reg, poly_int64 offset, |
14868 | enum var_init_status initialized) |
14869 | { |
14870 | unsigned int regno; |
14871 | dw_loc_descr_ref result; |
14872 | dw_fde_ref fde = cfun->fde; |
14873 | |
14874 | /* We only use "frame base" when we're sure we're talking about the |
14875 | post-prologue local stack frame. We do this by *not* running |
14876 | register elimination until this point, and recognizing the special |
14877 | argument pointer and soft frame pointer rtx's. */ |
14878 | if (reg == arg_pointer_rtx || reg == frame_pointer_rtx) |
14879 | { |
14880 | rtx elim = (ira_use_lra_p |
14881 | ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX) |
14882 | : eliminate_regs (reg, VOIDmode, NULL_RTX)); |
14883 | |
14884 | if (elim != reg) |
14885 | { |
14886 | /* Allow hard frame pointer here even if frame pointer |
14887 | isn't used since hard frame pointer is encoded with |
14888 | DW_OP_fbreg which uses the DW_AT_frame_base attribute, |
14889 | not hard frame pointer directly. */ |
14890 | elim = strip_offset_and_add (x: elim, offset: &offset); |
14891 | gcc_assert (elim == hard_frame_pointer_rtx |
14892 | || elim == stack_pointer_rtx); |
14893 | |
14894 | /* If drap register is used to align stack, use frame |
14895 | pointer + offset to access stack variables. If stack |
14896 | is aligned without drap, use stack pointer + offset to |
14897 | access stack variables. */ |
14898 | if (crtl->stack_realign_tried |
14899 | && reg == frame_pointer_rtx) |
14900 | { |
14901 | int base_reg |
14902 | = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM) |
14903 | ? HARD_FRAME_POINTER_REGNUM |
14904 | : REGNO (elim)); |
14905 | return new_reg_loc_descr (reg: base_reg, offset); |
14906 | } |
14907 | |
14908 | gcc_assert (frame_pointer_fb_offset_valid); |
14909 | offset += frame_pointer_fb_offset; |
14910 | HOST_WIDE_INT const_offset; |
14911 | if (offset.is_constant (const_value: &const_offset)) |
14912 | return new_loc_descr (op: DW_OP_fbreg, oprnd1: const_offset, oprnd2: 0); |
14913 | else |
14914 | { |
14915 | dw_loc_descr_ref ret = new_loc_descr (op: DW_OP_fbreg, oprnd1: 0, oprnd2: 0); |
14916 | loc_descr_plus_const (list_head: &ret, poly_offset: offset); |
14917 | return ret; |
14918 | } |
14919 | } |
14920 | } |
14921 | |
14922 | regno = REGNO (reg); |
14923 | #ifdef LEAF_REG_REMAP |
14924 | if (crtl->uses_only_leaf_regs) |
14925 | { |
14926 | int leaf_reg = LEAF_REG_REMAP (regno); |
14927 | if (leaf_reg != -1) |
14928 | regno = (unsigned) leaf_reg; |
14929 | } |
14930 | #endif |
14931 | regno = DWARF_FRAME_REGNUM (regno); |
14932 | |
14933 | HOST_WIDE_INT const_offset; |
14934 | if (!optimize && fde |
14935 | && (fde->drap_reg == regno || fde->vdrap_reg == regno) |
14936 | && offset.is_constant (const_value: &const_offset)) |
14937 | { |
14938 | /* Use cfa+offset to represent the location of arguments passed |
14939 | on the stack when drap is used to align stack. |
14940 | Only do this when not optimizing, for optimized code var-tracking |
14941 | is supposed to track where the arguments live and the register |
14942 | used as vdrap or drap in some spot might be used for something |
14943 | else in other part of the routine. */ |
14944 | return new_loc_descr (op: DW_OP_fbreg, oprnd1: const_offset, oprnd2: 0); |
14945 | } |
14946 | |
14947 | result = new_reg_loc_descr (reg: regno, offset); |
14948 | |
14949 | if (initialized == VAR_INIT_STATUS_UNINITIALIZED) |
14950 | add_loc_descr (list_head: &result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
14951 | |
14952 | return result; |
14953 | } |
14954 | |
14955 | /* Return true if this RTL expression describes a base+offset calculation. */ |
14956 | |
14957 | static inline bool |
14958 | is_based_loc (const_rtx rtl) |
14959 | { |
14960 | return (GET_CODE (rtl) == PLUS |
14961 | && ((REG_P (XEXP (rtl, 0)) |
14962 | && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER |
14963 | && CONST_INT_P (XEXP (rtl, 1))))); |
14964 | } |
14965 | |
14966 | /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0) |
14967 | failed. */ |
14968 | |
14969 | static dw_loc_descr_ref |
14970 | tls_mem_loc_descriptor (rtx mem) |
14971 | { |
14972 | tree base; |
14973 | dw_loc_descr_ref loc_result; |
14974 | |
14975 | if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem)) |
14976 | return NULL; |
14977 | |
14978 | base = get_base_address (MEM_EXPR (mem)); |
14979 | if (base == NULL |
14980 | || !VAR_P (base) |
14981 | || !DECL_THREAD_LOCAL_P (base)) |
14982 | return NULL; |
14983 | |
14984 | loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL); |
14985 | if (loc_result == NULL) |
14986 | return NULL; |
14987 | |
14988 | if (maybe_ne (MEM_OFFSET (mem), b: 0)) |
14989 | loc_descr_plus_const (list_head: &loc_result, MEM_OFFSET (mem)); |
14990 | |
14991 | return loc_result; |
14992 | } |
14993 | |
14994 | /* Output debug info about reason why we failed to expand expression as dwarf |
14995 | expression. */ |
14996 | |
14997 | static void |
14998 | expansion_failed (tree expr, rtx rtl, char const *reason) |
14999 | { |
15000 | if (dump_file && (dump_flags & TDF_DETAILS)) |
15001 | { |
15002 | fprintf (stream: dump_file, format: "Failed to expand as dwarf: " ); |
15003 | if (expr) |
15004 | print_generic_expr (dump_file, expr, dump_flags); |
15005 | if (rtl) |
15006 | { |
15007 | fprintf (stream: dump_file, format: "\n" ); |
15008 | print_rtl (dump_file, rtl); |
15009 | } |
15010 | fprintf (stream: dump_file, format: "\nReason: %s\n" , reason); |
15011 | } |
15012 | } |
15013 | |
15014 | /* Helper function for const_ok_for_output. */ |
15015 | |
15016 | static bool |
15017 | const_ok_for_output_1 (rtx rtl) |
15018 | { |
15019 | if (targetm.const_not_ok_for_debug_p (rtl)) |
15020 | { |
15021 | if (GET_CODE (rtl) != UNSPEC) |
15022 | { |
15023 | expansion_failed (NULL_TREE, rtl, |
15024 | reason: "Expression rejected for debug by the backend.\n" ); |
15025 | return false; |
15026 | } |
15027 | |
15028 | /* If delegitimize_address couldn't do anything with the UNSPEC, and |
15029 | the target hook doesn't explicitly allow it in debug info, assume |
15030 | we can't express it in the debug info. */ |
15031 | /* Don't complain about TLS UNSPECs, those are just too hard to |
15032 | delegitimize. Note this could be a non-decl SYMBOL_REF such as |
15033 | one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL |
15034 | rather than DECL_THREAD_LOCAL_P is not just an optimization. */ |
15035 | if (flag_checking |
15036 | && (XVECLEN (rtl, 0) == 0 |
15037 | || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF |
15038 | || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE)) |
15039 | inform (current_function_decl |
15040 | ? DECL_SOURCE_LOCATION (current_function_decl) |
15041 | : UNKNOWN_LOCATION, |
15042 | #if NUM_UNSPEC_VALUES > 0 |
15043 | "non-delegitimized UNSPEC %s (%d) found in variable location" , |
15044 | ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES) |
15045 | ? unspec_strings[XINT (rtl, 1)] : "unknown" ), |
15046 | #else |
15047 | "non-delegitimized UNSPEC %d found in variable location" , |
15048 | #endif |
15049 | XINT (rtl, 1)); |
15050 | expansion_failed (NULL_TREE, rtl, |
15051 | reason: "UNSPEC hasn't been delegitimized.\n" ); |
15052 | return false; |
15053 | } |
15054 | |
15055 | if (CONST_POLY_INT_P (rtl)) |
15056 | return false; |
15057 | |
15058 | /* FIXME: Refer to PR60655. It is possible for simplification |
15059 | of rtl expressions in var tracking to produce such expressions. |
15060 | We should really identify / validate expressions |
15061 | enclosed in CONST that can be handled by assemblers on various |
15062 | targets and only handle legitimate cases here. */ |
15063 | switch (GET_CODE (rtl)) |
15064 | { |
15065 | case SYMBOL_REF: |
15066 | break; |
15067 | case NOT: |
15068 | case NEG: |
15069 | return false; |
15070 | case PLUS: |
15071 | { |
15072 | /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the |
15073 | operands. */ |
15074 | subrtx_var_iterator::array_type array; |
15075 | bool first = false; |
15076 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL) |
15077 | if (SYMBOL_REF_P (*iter) |
15078 | || LABEL_P (*iter) |
15079 | || GET_CODE (*iter) == UNSPEC) |
15080 | { |
15081 | first = true; |
15082 | break; |
15083 | } |
15084 | if (!first) |
15085 | return true; |
15086 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL) |
15087 | if (SYMBOL_REF_P (*iter) |
15088 | || LABEL_P (*iter) |
15089 | || GET_CODE (*iter) == UNSPEC) |
15090 | return false; |
15091 | return true; |
15092 | } |
15093 | case MINUS: |
15094 | { |
15095 | /* Disallow negation of SYMBOL_REFs or UNSPECs when they |
15096 | appear in the second operand of MINUS. */ |
15097 | subrtx_var_iterator::array_type array; |
15098 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL) |
15099 | if (SYMBOL_REF_P (*iter) |
15100 | || LABEL_P (*iter) |
15101 | || GET_CODE (*iter) == UNSPEC) |
15102 | return false; |
15103 | return true; |
15104 | } |
15105 | default: |
15106 | return true; |
15107 | } |
15108 | |
15109 | if (CONSTANT_POOL_ADDRESS_P (rtl)) |
15110 | { |
15111 | bool marked; |
15112 | get_pool_constant_mark (rtl, &marked); |
15113 | /* If all references to this pool constant were optimized away, |
15114 | it was not output and thus we can't represent it. */ |
15115 | if (!marked) |
15116 | { |
15117 | expansion_failed (NULL_TREE, rtl, |
15118 | reason: "Constant was removed from constant pool.\n" ); |
15119 | return false; |
15120 | } |
15121 | } |
15122 | |
15123 | if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE) |
15124 | return false; |
15125 | |
15126 | /* Avoid references to external symbols in debug info, on several targets |
15127 | the linker might even refuse to link when linking a shared library, |
15128 | and in many other cases the relocations for .debug_info/.debug_loc are |
15129 | dropped, so the address becomes zero anyway. Hidden symbols, guaranteed |
15130 | to be defined within the same shared library or executable are fine. */ |
15131 | if (SYMBOL_REF_EXTERNAL_P (rtl)) |
15132 | { |
15133 | tree decl = SYMBOL_REF_DECL (rtl); |
15134 | |
15135 | if (decl == NULL || !targetm.binds_local_p (decl)) |
15136 | { |
15137 | expansion_failed (NULL_TREE, rtl, |
15138 | reason: "Symbol not defined in current TU.\n" ); |
15139 | return false; |
15140 | } |
15141 | } |
15142 | |
15143 | return true; |
15144 | } |
15145 | |
15146 | /* Return true if constant RTL can be emitted in DW_OP_addr or |
15147 | DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or |
15148 | non-marked constant pool SYMBOL_REFs can't be referenced in it. */ |
15149 | |
15150 | static bool |
15151 | const_ok_for_output (rtx rtl) |
15152 | { |
15153 | if (GET_CODE (rtl) == SYMBOL_REF) |
15154 | return const_ok_for_output_1 (rtl); |
15155 | |
15156 | if (GET_CODE (rtl) == CONST) |
15157 | { |
15158 | subrtx_var_iterator::array_type array; |
15159 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL) |
15160 | if (!const_ok_for_output_1 (rtl: *iter)) |
15161 | return false; |
15162 | return true; |
15163 | } |
15164 | |
15165 | return true; |
15166 | } |
15167 | |
15168 | /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP |
15169 | if possible, NULL otherwise. */ |
15170 | |
15171 | static dw_die_ref |
15172 | base_type_for_mode (machine_mode mode, bool unsignedp) |
15173 | { |
15174 | dw_die_ref type_die; |
15175 | tree type = lang_hooks.types.type_for_mode (mode, unsignedp); |
15176 | |
15177 | if (type == NULL) |
15178 | return NULL; |
15179 | switch (TREE_CODE (type)) |
15180 | { |
15181 | case INTEGER_TYPE: |
15182 | case REAL_TYPE: |
15183 | break; |
15184 | default: |
15185 | return NULL; |
15186 | } |
15187 | type_die = lookup_type_die (type); |
15188 | if (!type_die) |
15189 | type_die = modified_type_die (type, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
15190 | context_die: comp_unit_die ()); |
15191 | if (type_die == NULL || type_die->die_tag != DW_TAG_base_type) |
15192 | return NULL; |
15193 | return type_die; |
15194 | } |
15195 | |
15196 | /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned |
15197 | type matching MODE, or, if MODE is narrower than or as wide as |
15198 | DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not |
15199 | possible. */ |
15200 | |
15201 | static dw_loc_descr_ref |
15202 | convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op) |
15203 | { |
15204 | machine_mode outer_mode = mode; |
15205 | dw_die_ref type_die; |
15206 | dw_loc_descr_ref cvt; |
15207 | |
15208 | if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE) |
15209 | { |
15210 | add_loc_descr (list_head: &op, descr: new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0)); |
15211 | return op; |
15212 | } |
15213 | type_die = base_type_for_mode (mode: outer_mode, unsignedp: 1); |
15214 | if (type_die == NULL) |
15215 | return NULL; |
15216 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15217 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15218 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15219 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15220 | add_loc_descr (list_head: &op, descr: cvt); |
15221 | return op; |
15222 | } |
15223 | |
15224 | /* Return location descriptor for comparison OP with operands OP0 and OP1. */ |
15225 | |
15226 | static dw_loc_descr_ref |
15227 | compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0, |
15228 | dw_loc_descr_ref op1) |
15229 | { |
15230 | dw_loc_descr_ref ret = op0; |
15231 | add_loc_descr (list_head: &ret, descr: op1); |
15232 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
15233 | if (STORE_FLAG_VALUE != 1) |
15234 | { |
15235 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (STORE_FLAG_VALUE)); |
15236 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
15237 | } |
15238 | return ret; |
15239 | } |
15240 | |
15241 | /* Subroutine of scompare_loc_descriptor for the case in which we're |
15242 | comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE, |
15243 | and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */ |
15244 | |
15245 | static dw_loc_descr_ref |
15246 | scompare_loc_descriptor_wide (enum dwarf_location_atom op, |
15247 | scalar_int_mode op_mode, |
15248 | dw_loc_descr_ref op0, dw_loc_descr_ref op1) |
15249 | { |
15250 | dw_die_ref type_die = base_type_for_mode (mode: op_mode, unsignedp: 0); |
15251 | dw_loc_descr_ref cvt; |
15252 | |
15253 | if (type_die == NULL) |
15254 | return NULL; |
15255 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15256 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15257 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15258 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15259 | add_loc_descr (list_head: &op0, descr: cvt); |
15260 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15261 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15262 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15263 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15264 | add_loc_descr (list_head: &op1, descr: cvt); |
15265 | return compare_loc_descriptor (op, op0, op1); |
15266 | } |
15267 | |
15268 | /* Subroutine of scompare_loc_descriptor for the case in which we're |
15269 | comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE, |
15270 | and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */ |
15271 | |
15272 | static dw_loc_descr_ref |
15273 | scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl, |
15274 | scalar_int_mode op_mode, |
15275 | dw_loc_descr_ref op0, dw_loc_descr_ref op1) |
15276 | { |
15277 | int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode: op_mode)) * BITS_PER_UNIT; |
15278 | /* For eq/ne, if the operands are known to be zero-extended, |
15279 | there is no need to do the fancy shifting up. */ |
15280 | if (op == DW_OP_eq || op == DW_OP_ne) |
15281 | { |
15282 | dw_loc_descr_ref last0, last1; |
15283 | for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next) |
15284 | ; |
15285 | for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next) |
15286 | ; |
15287 | /* deref_size zero extends, and for constants we can check |
15288 | whether they are zero extended or not. */ |
15289 | if (((last0->dw_loc_opc == DW_OP_deref_size |
15290 | && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (mode: op_mode)) |
15291 | || (CONST_INT_P (XEXP (rtl, 0)) |
15292 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0)) |
15293 | == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode)))) |
15294 | && ((last1->dw_loc_opc == DW_OP_deref_size |
15295 | && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (mode: op_mode)) |
15296 | || (CONST_INT_P (XEXP (rtl, 1)) |
15297 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1)) |
15298 | == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode))))) |
15299 | return compare_loc_descriptor (op, op0, op1); |
15300 | |
15301 | /* EQ/NE comparison against constant in narrower type than |
15302 | DWARF2_ADDR_SIZE can be performed either as |
15303 | DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift> |
15304 | DW_OP_{eq,ne} |
15305 | or |
15306 | DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask> |
15307 | DW_OP_{eq,ne}. Pick whatever is shorter. */ |
15308 | if (CONST_INT_P (XEXP (rtl, 1)) |
15309 | && GET_MODE_BITSIZE (mode: op_mode) < HOST_BITS_PER_WIDE_INT |
15310 | && (size_of_int_loc_descriptor (i: shift) + 1 |
15311 | + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift) |
15312 | >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1 |
15313 | + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1)) |
15314 | & GET_MODE_MASK (op_mode)))) |
15315 | { |
15316 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (GET_MODE_MASK (op_mode))); |
15317 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15318 | op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) |
15319 | & GET_MODE_MASK (op_mode)); |
15320 | return compare_loc_descriptor (op, op0, op1); |
15321 | } |
15322 | } |
15323 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: shift)); |
15324 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15325 | if (CONST_INT_P (XEXP (rtl, 1))) |
15326 | op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift); |
15327 | else |
15328 | { |
15329 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: shift)); |
15330 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15331 | } |
15332 | return compare_loc_descriptor (op, op0, op1); |
15333 | } |
15334 | |
15335 | /* Return location descriptor for signed comparison OP RTL. */ |
15336 | |
15337 | static dw_loc_descr_ref |
15338 | scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl, |
15339 | machine_mode mem_mode) |
15340 | { |
15341 | machine_mode op_mode = GET_MODE (XEXP (rtl, 0)); |
15342 | dw_loc_descr_ref op0, op1; |
15343 | |
15344 | if (op_mode == VOIDmode) |
15345 | op_mode = GET_MODE (XEXP (rtl, 1)); |
15346 | if (op_mode == VOIDmode) |
15347 | return NULL; |
15348 | |
15349 | scalar_int_mode int_op_mode; |
15350 | if (dwarf_strict |
15351 | && dwarf_version < 5 |
15352 | && (!is_a <scalar_int_mode> (m: op_mode, result: &int_op_mode) |
15353 | || GET_MODE_SIZE (mode: int_op_mode) > DWARF2_ADDR_SIZE)) |
15354 | return NULL; |
15355 | |
15356 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: op_mode, mem_mode, |
15357 | VAR_INIT_STATUS_INITIALIZED); |
15358 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode: op_mode, mem_mode, |
15359 | VAR_INIT_STATUS_INITIALIZED); |
15360 | |
15361 | if (op0 == NULL || op1 == NULL) |
15362 | return NULL; |
15363 | |
15364 | if (is_a <scalar_int_mode> (m: op_mode, result: &int_op_mode)) |
15365 | { |
15366 | if (GET_MODE_SIZE (mode: int_op_mode) < DWARF2_ADDR_SIZE) |
15367 | return scompare_loc_descriptor_narrow (op, rtl, op_mode: int_op_mode, op0, op1); |
15368 | |
15369 | if (GET_MODE_SIZE (mode: int_op_mode) > DWARF2_ADDR_SIZE) |
15370 | return scompare_loc_descriptor_wide (op, op_mode: int_op_mode, op0, op1); |
15371 | } |
15372 | return compare_loc_descriptor (op, op0, op1); |
15373 | } |
15374 | |
15375 | /* Return location descriptor for unsigned comparison OP RTL. */ |
15376 | |
15377 | static dw_loc_descr_ref |
15378 | ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl, |
15379 | machine_mode mem_mode) |
15380 | { |
15381 | dw_loc_descr_ref op0, op1; |
15382 | |
15383 | machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0)); |
15384 | if (test_op_mode == VOIDmode) |
15385 | test_op_mode = GET_MODE (XEXP (rtl, 1)); |
15386 | |
15387 | scalar_int_mode op_mode; |
15388 | if (!is_a <scalar_int_mode> (m: test_op_mode, result: &op_mode)) |
15389 | return NULL; |
15390 | |
15391 | if (dwarf_strict |
15392 | && dwarf_version < 5 |
15393 | && GET_MODE_SIZE (mode: op_mode) > DWARF2_ADDR_SIZE) |
15394 | return NULL; |
15395 | |
15396 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: op_mode, mem_mode, |
15397 | VAR_INIT_STATUS_INITIALIZED); |
15398 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode: op_mode, mem_mode, |
15399 | VAR_INIT_STATUS_INITIALIZED); |
15400 | |
15401 | if (op0 == NULL || op1 == NULL) |
15402 | return NULL; |
15403 | |
15404 | if (GET_MODE_SIZE (mode: op_mode) < DWARF2_ADDR_SIZE) |
15405 | { |
15406 | HOST_WIDE_INT mask = GET_MODE_MASK (op_mode); |
15407 | dw_loc_descr_ref last0, last1; |
15408 | for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next) |
15409 | ; |
15410 | for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next) |
15411 | ; |
15412 | if (CONST_INT_P (XEXP (rtl, 0))) |
15413 | op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask); |
15414 | /* deref_size zero extends, so no need to mask it again. */ |
15415 | else if (last0->dw_loc_opc != DW_OP_deref_size |
15416 | || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (mode: op_mode)) |
15417 | { |
15418 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: mask)); |
15419 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15420 | } |
15421 | if (CONST_INT_P (XEXP (rtl, 1))) |
15422 | op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask); |
15423 | /* deref_size zero extends, so no need to mask it again. */ |
15424 | else if (last1->dw_loc_opc != DW_OP_deref_size |
15425 | || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (mode: op_mode)) |
15426 | { |
15427 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: mask)); |
15428 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15429 | } |
15430 | } |
15431 | else if (GET_MODE_SIZE (mode: op_mode) == DWARF2_ADDR_SIZE) |
15432 | { |
15433 | HOST_WIDE_INT bias = 1; |
15434 | bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1); |
15435 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: bias, oprnd2: 0)); |
15436 | if (CONST_INT_P (XEXP (rtl, 1))) |
15437 | op1 = int_loc_descriptor (poly_i: (unsigned HOST_WIDE_INT) bias |
15438 | + INTVAL (XEXP (rtl, 1))); |
15439 | else |
15440 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_plus_uconst, |
15441 | oprnd1: bias, oprnd2: 0)); |
15442 | } |
15443 | return compare_loc_descriptor (op, op0, op1); |
15444 | } |
15445 | |
15446 | /* Return location descriptor for {U,S}{MIN,MAX}. */ |
15447 | |
15448 | static dw_loc_descr_ref |
15449 | minmax_loc_descriptor (rtx rtl, machine_mode mode, |
15450 | machine_mode mem_mode) |
15451 | { |
15452 | enum dwarf_location_atom op; |
15453 | dw_loc_descr_ref op0, op1, ret; |
15454 | dw_loc_descr_ref bra_node, drop_node; |
15455 | |
15456 | scalar_int_mode int_mode; |
15457 | if (dwarf_strict |
15458 | && dwarf_version < 5 |
15459 | && (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
15460 | || GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE)) |
15461 | return NULL; |
15462 | |
15463 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15464 | VAR_INIT_STATUS_INITIALIZED); |
15465 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
15466 | VAR_INIT_STATUS_INITIALIZED); |
15467 | |
15468 | if (op0 == NULL || op1 == NULL) |
15469 | return NULL; |
15470 | |
15471 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
15472 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15473 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
15474 | if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX) |
15475 | { |
15476 | /* Checked by the caller. */ |
15477 | int_mode = as_a <scalar_int_mode> (m: mode); |
15478 | if (GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
15479 | { |
15480 | HOST_WIDE_INT mask = GET_MODE_MASK (int_mode); |
15481 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: mask)); |
15482 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15483 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: mask)); |
15484 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15485 | } |
15486 | else if (GET_MODE_SIZE (mode: int_mode) == DWARF2_ADDR_SIZE) |
15487 | { |
15488 | HOST_WIDE_INT bias = 1; |
15489 | bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1); |
15490 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: bias, oprnd2: 0)); |
15491 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: bias, oprnd2: 0)); |
15492 | } |
15493 | } |
15494 | else if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
15495 | && GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
15496 | { |
15497 | int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode: int_mode)) * BITS_PER_UNIT; |
15498 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: shift)); |
15499 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15500 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: shift)); |
15501 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15502 | } |
15503 | else if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
15504 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
15505 | { |
15506 | dw_die_ref type_die = base_type_for_mode (mode: int_mode, unsignedp: 0); |
15507 | dw_loc_descr_ref cvt; |
15508 | if (type_die == NULL) |
15509 | return NULL; |
15510 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15511 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15512 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15513 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15514 | add_loc_descr (list_head: &op0, descr: cvt); |
15515 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15516 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15517 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15518 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15519 | add_loc_descr (list_head: &op1, descr: cvt); |
15520 | } |
15521 | |
15522 | if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN) |
15523 | op = DW_OP_lt; |
15524 | else |
15525 | op = DW_OP_gt; |
15526 | ret = op0; |
15527 | add_loc_descr (list_head: &ret, descr: op1); |
15528 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
15529 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
15530 | add_loc_descr (list_head: &ret, descr: bra_node); |
15531 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15532 | drop_node = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
15533 | add_loc_descr (list_head: &ret, descr: drop_node); |
15534 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15535 | bra_node->dw_loc_oprnd1.v.val_loc = drop_node; |
15536 | if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX) |
15537 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
15538 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
15539 | ret = convert_descriptor_to_mode (mode: int_mode, op: ret); |
15540 | return ret; |
15541 | } |
15542 | |
15543 | /* Helper function for mem_loc_descriptor. Perform OP binary op, |
15544 | but after converting arguments to type_die, afterwards |
15545 | convert back to unsigned. */ |
15546 | |
15547 | static dw_loc_descr_ref |
15548 | typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die, |
15549 | scalar_int_mode mode, machine_mode mem_mode) |
15550 | { |
15551 | dw_loc_descr_ref cvt, op0, op1; |
15552 | |
15553 | if (type_die == NULL) |
15554 | return NULL; |
15555 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15556 | VAR_INIT_STATUS_INITIALIZED); |
15557 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
15558 | VAR_INIT_STATUS_INITIALIZED); |
15559 | if (op0 == NULL || op1 == NULL) |
15560 | return NULL; |
15561 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15562 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15563 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15564 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15565 | add_loc_descr (list_head: &op0, descr: cvt); |
15566 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15567 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15568 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15569 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15570 | add_loc_descr (list_head: &op1, descr: cvt); |
15571 | add_loc_descr (list_head: &op0, descr: op1); |
15572 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
15573 | return convert_descriptor_to_mode (mode, op: op0); |
15574 | } |
15575 | |
15576 | /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value, |
15577 | const0 is DW_OP_lit0 or corresponding typed constant, |
15578 | const1 is DW_OP_lit1 or corresponding typed constant |
15579 | and constMSB is constant with just the MSB bit set |
15580 | for the mode): |
15581 | DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4> |
15582 | L1: const0 DW_OP_swap |
15583 | L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl |
15584 | DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> |
15585 | L3: DW_OP_drop |
15586 | L4: DW_OP_nop |
15587 | |
15588 | CTZ is similar: |
15589 | DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4> |
15590 | L1: const0 DW_OP_swap |
15591 | L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr |
15592 | DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> |
15593 | L3: DW_OP_drop |
15594 | L4: DW_OP_nop |
15595 | |
15596 | FFS is similar: |
15597 | DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4> |
15598 | L1: const1 DW_OP_swap |
15599 | L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr |
15600 | DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> |
15601 | L3: DW_OP_drop |
15602 | L4: DW_OP_nop */ |
15603 | |
15604 | static dw_loc_descr_ref |
15605 | clz_loc_descriptor (rtx rtl, scalar_int_mode mode, |
15606 | machine_mode mem_mode) |
15607 | { |
15608 | dw_loc_descr_ref op0, ret, tmp; |
15609 | HOST_WIDE_INT valv; |
15610 | dw_loc_descr_ref l1jump, l1label; |
15611 | dw_loc_descr_ref l2jump, l2label; |
15612 | dw_loc_descr_ref l3jump, l3label; |
15613 | dw_loc_descr_ref l4jump, l4label; |
15614 | rtx msb; |
15615 | |
15616 | if (GET_MODE (XEXP (rtl, 0)) != mode) |
15617 | return NULL; |
15618 | |
15619 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15620 | VAR_INIT_STATUS_INITIALIZED); |
15621 | if (op0 == NULL) |
15622 | return NULL; |
15623 | ret = op0; |
15624 | if (GET_CODE (rtl) == CLZ) |
15625 | { |
15626 | if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv)) |
15627 | valv = GET_MODE_BITSIZE (mode); |
15628 | } |
15629 | else if (GET_CODE (rtl) == FFS) |
15630 | valv = 0; |
15631 | else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv)) |
15632 | valv = GET_MODE_BITSIZE (mode); |
15633 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
15634 | l1jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
15635 | add_loc_descr (list_head: &ret, descr: l1jump); |
15636 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0)); |
15637 | tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode, |
15638 | VAR_INIT_STATUS_INITIALIZED); |
15639 | if (tmp == NULL) |
15640 | return NULL; |
15641 | add_loc_descr (list_head: &ret, descr: tmp); |
15642 | l4jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
15643 | add_loc_descr (list_head: &ret, descr: l4jump); |
15644 | l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS |
15645 | ? const1_rtx : const0_rtx, |
15646 | mode, mem_mode, |
15647 | VAR_INIT_STATUS_INITIALIZED); |
15648 | if (l1label == NULL) |
15649 | return NULL; |
15650 | add_loc_descr (list_head: &ret, descr: l1label); |
15651 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15652 | l2label = new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0); |
15653 | add_loc_descr (list_head: &ret, descr: l2label); |
15654 | if (GET_CODE (rtl) != CLZ) |
15655 | msb = const1_rtx; |
15656 | else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
15657 | msb = GEN_INT (HOST_WIDE_INT_1U |
15658 | << (GET_MODE_BITSIZE (mode) - 1)); |
15659 | else |
15660 | msb = immed_wide_int_const |
15661 | (wi::set_bit_in_zero (bit: GET_MODE_PRECISION (mode) - 1, |
15662 | precision: GET_MODE_PRECISION (mode)), mode); |
15663 | if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0) |
15664 | tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32 |
15665 | ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64 |
15666 | ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), oprnd2: 0); |
15667 | else |
15668 | tmp = mem_loc_descriptor (msb, mode, mem_mode, |
15669 | VAR_INIT_STATUS_INITIALIZED); |
15670 | if (tmp == NULL) |
15671 | return NULL; |
15672 | add_loc_descr (list_head: &ret, descr: tmp); |
15673 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15674 | l3jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
15675 | add_loc_descr (list_head: &ret, descr: l3jump); |
15676 | tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode, |
15677 | VAR_INIT_STATUS_INITIALIZED); |
15678 | if (tmp == NULL) |
15679 | return NULL; |
15680 | add_loc_descr (list_head: &ret, descr: tmp); |
15681 | add_loc_descr (list_head: &ret, descr: new_loc_descr (GET_CODE (rtl) == CLZ |
15682 | ? DW_OP_shl : DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
15683 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15684 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: 1, oprnd2: 0)); |
15685 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15686 | l2jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
15687 | add_loc_descr (list_head: &ret, descr: l2jump); |
15688 | l3label = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
15689 | add_loc_descr (list_head: &ret, descr: l3label); |
15690 | l4label = new_loc_descr (op: DW_OP_nop, oprnd1: 0, oprnd2: 0); |
15691 | add_loc_descr (list_head: &ret, descr: l4label); |
15692 | l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15693 | l1jump->dw_loc_oprnd1.v.val_loc = l1label; |
15694 | l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15695 | l2jump->dw_loc_oprnd1.v.val_loc = l2label; |
15696 | l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15697 | l3jump->dw_loc_oprnd1.v.val_loc = l3label; |
15698 | l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15699 | l4jump->dw_loc_oprnd1.v.val_loc = l4label; |
15700 | return ret; |
15701 | } |
15702 | |
15703 | /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant, |
15704 | const1 is DW_OP_lit1 or corresponding typed constant): |
15705 | const0 DW_OP_swap |
15706 | L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and |
15707 | DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1> |
15708 | L2: DW_OP_drop |
15709 | |
15710 | PARITY is similar: |
15711 | L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and |
15712 | DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1> |
15713 | L2: DW_OP_drop */ |
15714 | |
15715 | static dw_loc_descr_ref |
15716 | popcount_loc_descriptor (rtx rtl, scalar_int_mode mode, |
15717 | machine_mode mem_mode) |
15718 | { |
15719 | dw_loc_descr_ref op0, ret, tmp; |
15720 | dw_loc_descr_ref l1jump, l1label; |
15721 | dw_loc_descr_ref l2jump, l2label; |
15722 | |
15723 | if (GET_MODE (XEXP (rtl, 0)) != mode) |
15724 | return NULL; |
15725 | |
15726 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15727 | VAR_INIT_STATUS_INITIALIZED); |
15728 | if (op0 == NULL) |
15729 | return NULL; |
15730 | ret = op0; |
15731 | tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode, |
15732 | VAR_INIT_STATUS_INITIALIZED); |
15733 | if (tmp == NULL) |
15734 | return NULL; |
15735 | add_loc_descr (list_head: &ret, descr: tmp); |
15736 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15737 | l1label = new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0); |
15738 | add_loc_descr (list_head: &ret, descr: l1label); |
15739 | l2jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
15740 | add_loc_descr (list_head: &ret, descr: l2jump); |
15741 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
15742 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_rot, oprnd1: 0, oprnd2: 0)); |
15743 | tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode, |
15744 | VAR_INIT_STATUS_INITIALIZED); |
15745 | if (tmp == NULL) |
15746 | return NULL; |
15747 | add_loc_descr (list_head: &ret, descr: tmp); |
15748 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15749 | add_loc_descr (list_head: &ret, descr: new_loc_descr (GET_CODE (rtl) == POPCOUNT |
15750 | ? DW_OP_plus : DW_OP_xor, oprnd1: 0, oprnd2: 0)); |
15751 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15752 | tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode, |
15753 | VAR_INIT_STATUS_INITIALIZED); |
15754 | add_loc_descr (list_head: &ret, descr: tmp); |
15755 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
15756 | l1jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
15757 | add_loc_descr (list_head: &ret, descr: l1jump); |
15758 | l2label = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
15759 | add_loc_descr (list_head: &ret, descr: l2label); |
15760 | l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15761 | l1jump->dw_loc_oprnd1.v.val_loc = l1label; |
15762 | l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15763 | l2jump->dw_loc_oprnd1.v.val_loc = l2label; |
15764 | return ret; |
15765 | } |
15766 | |
15767 | /* BSWAP (constS is initial shift count, either 56 or 24): |
15768 | constS const0 |
15769 | L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr |
15770 | const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or |
15771 | DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8 |
15772 | DW_OP_minus DW_OP_swap DW_OP_skip <L1> |
15773 | L2: DW_OP_drop DW_OP_swap DW_OP_drop */ |
15774 | |
15775 | static dw_loc_descr_ref |
15776 | bswap_loc_descriptor (rtx rtl, scalar_int_mode mode, |
15777 | machine_mode mem_mode) |
15778 | { |
15779 | dw_loc_descr_ref op0, ret, tmp; |
15780 | dw_loc_descr_ref l1jump, l1label; |
15781 | dw_loc_descr_ref l2jump, l2label; |
15782 | |
15783 | if (BITS_PER_UNIT != 8 |
15784 | || (GET_MODE_BITSIZE (mode) != 32 |
15785 | && GET_MODE_BITSIZE (mode) != 64)) |
15786 | return NULL; |
15787 | |
15788 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15789 | VAR_INIT_STATUS_INITIALIZED); |
15790 | if (op0 == NULL) |
15791 | return NULL; |
15792 | |
15793 | ret = op0; |
15794 | tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8), |
15795 | mode, mem_mode, |
15796 | VAR_INIT_STATUS_INITIALIZED); |
15797 | if (tmp == NULL) |
15798 | return NULL; |
15799 | add_loc_descr (list_head: &ret, descr: tmp); |
15800 | tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode, |
15801 | VAR_INIT_STATUS_INITIALIZED); |
15802 | if (tmp == NULL) |
15803 | return NULL; |
15804 | add_loc_descr (list_head: &ret, descr: tmp); |
15805 | l1label = new_loc_descr (op: DW_OP_pick, oprnd1: 2, oprnd2: 0); |
15806 | add_loc_descr (list_head: &ret, descr: l1label); |
15807 | tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8), |
15808 | mode, mem_mode, |
15809 | VAR_INIT_STATUS_INITIALIZED); |
15810 | add_loc_descr (list_head: &ret, descr: tmp); |
15811 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_pick, oprnd1: 3, oprnd2: 0)); |
15812 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
15813 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
15814 | tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode, |
15815 | VAR_INIT_STATUS_INITIALIZED); |
15816 | if (tmp == NULL) |
15817 | return NULL; |
15818 | add_loc_descr (list_head: &ret, descr: tmp); |
15819 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15820 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_pick, oprnd1: 2, oprnd2: 0)); |
15821 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15822 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_or, oprnd1: 0, oprnd2: 0)); |
15823 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15824 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
15825 | tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode, |
15826 | VAR_INIT_STATUS_INITIALIZED); |
15827 | add_loc_descr (list_head: &ret, descr: tmp); |
15828 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_eq, oprnd1: 0, oprnd2: 0)); |
15829 | l2jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
15830 | add_loc_descr (list_head: &ret, descr: l2jump); |
15831 | tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode, |
15832 | VAR_INIT_STATUS_INITIALIZED); |
15833 | add_loc_descr (list_head: &ret, descr: tmp); |
15834 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
15835 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15836 | l1jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
15837 | add_loc_descr (list_head: &ret, descr: l1jump); |
15838 | l2label = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
15839 | add_loc_descr (list_head: &ret, descr: l2label); |
15840 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15841 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0)); |
15842 | l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15843 | l1jump->dw_loc_oprnd1.v.val_loc = l1label; |
15844 | l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15845 | l2jump->dw_loc_oprnd1.v.val_loc = l2label; |
15846 | return ret; |
15847 | } |
15848 | |
15849 | /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode): |
15850 | DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot |
15851 | [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg |
15852 | DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or |
15853 | |
15854 | ROTATERT is similar: |
15855 | DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE> |
15856 | DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot |
15857 | [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */ |
15858 | |
15859 | static dw_loc_descr_ref |
15860 | rotate_loc_descriptor (rtx rtl, scalar_int_mode mode, |
15861 | machine_mode mem_mode) |
15862 | { |
15863 | rtx rtlop1 = XEXP (rtl, 1); |
15864 | dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL }; |
15865 | int i; |
15866 | |
15867 | if (is_narrower_int_mode (GET_MODE (rtlop1), limit: mode)) |
15868 | rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1); |
15869 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15870 | VAR_INIT_STATUS_INITIALIZED); |
15871 | op1 = mem_loc_descriptor (rtlop1, mode, mem_mode, |
15872 | VAR_INIT_STATUS_INITIALIZED); |
15873 | if (op0 == NULL || op1 == NULL) |
15874 | return NULL; |
15875 | if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE) |
15876 | for (i = 0; i < 2; i++) |
15877 | { |
15878 | if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT) |
15879 | mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)), |
15880 | mode, mem_mode, |
15881 | VAR_INIT_STATUS_INITIALIZED); |
15882 | else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT) |
15883 | mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32 |
15884 | ? DW_OP_const4u |
15885 | : HOST_BITS_PER_WIDE_INT == 64 |
15886 | ? DW_OP_const8u : DW_OP_constu, |
15887 | GET_MODE_MASK (mode), oprnd2: 0); |
15888 | else |
15889 | mask[i] = NULL; |
15890 | if (mask[i] == NULL) |
15891 | return NULL; |
15892 | add_loc_descr (list_head: &mask[i], descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15893 | } |
15894 | ret = op0; |
15895 | add_loc_descr (list_head: &ret, descr: op1); |
15896 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
15897 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
15898 | if (GET_CODE (rtl) == ROTATERT) |
15899 | { |
15900 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
15901 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus_uconst, |
15902 | oprnd1: GET_MODE_BITSIZE (mode), oprnd2: 0)); |
15903 | } |
15904 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15905 | if (mask[0] != NULL) |
15906 | add_loc_descr (list_head: &ret, descr: mask[0]); |
15907 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_rot, oprnd1: 0, oprnd2: 0)); |
15908 | if (mask[1] != NULL) |
15909 | { |
15910 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15911 | add_loc_descr (list_head: &ret, descr: mask[1]); |
15912 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15913 | } |
15914 | if (GET_CODE (rtl) == ROTATE) |
15915 | { |
15916 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
15917 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus_uconst, |
15918 | oprnd1: GET_MODE_BITSIZE (mode), oprnd2: 0)); |
15919 | } |
15920 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
15921 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_or, oprnd1: 0, oprnd2: 0)); |
15922 | return ret; |
15923 | } |
15924 | |
15925 | /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref |
15926 | for DEBUG_PARAMETER_REF RTL. */ |
15927 | |
15928 | static dw_loc_descr_ref |
15929 | parameter_ref_descriptor (rtx rtl) |
15930 | { |
15931 | dw_loc_descr_ref ret; |
15932 | dw_die_ref ref; |
15933 | |
15934 | if (dwarf_strict) |
15935 | return NULL; |
15936 | gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL); |
15937 | /* With LTO during LTRANS we get the late DIE that refers to the early |
15938 | DIE, thus we add another indirection here. This seems to confuse |
15939 | gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */ |
15940 | ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl)); |
15941 | ret = new_loc_descr (op: DW_OP_GNU_parameter_ref, oprnd1: 0, oprnd2: 0); |
15942 | if (ref) |
15943 | { |
15944 | ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15945 | ret->dw_loc_oprnd1.v.val_die_ref.die = ref; |
15946 | ret->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15947 | } |
15948 | else |
15949 | { |
15950 | ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref; |
15951 | ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl); |
15952 | } |
15953 | return ret; |
15954 | } |
15955 | |
15956 | /* The following routine converts the RTL for a variable or parameter |
15957 | (resident in memory) into an equivalent Dwarf representation of a |
15958 | mechanism for getting the address of that same variable onto the top of a |
15959 | hypothetical "address evaluation" stack. |
15960 | |
15961 | When creating memory location descriptors, we are effectively transforming |
15962 | the RTL for a memory-resident object into its Dwarf postfix expression |
15963 | equivalent. This routine recursively descends an RTL tree, turning |
15964 | it into Dwarf postfix code as it goes. |
15965 | |
15966 | MODE is the mode that should be assumed for the rtl if it is VOIDmode. |
15967 | |
15968 | MEM_MODE is the mode of the memory reference, needed to handle some |
15969 | autoincrement addressing modes. |
15970 | |
15971 | Return 0 if we can't represent the location. */ |
15972 | |
15973 | dw_loc_descr_ref |
15974 | mem_loc_descriptor (rtx rtl, machine_mode mode, |
15975 | machine_mode mem_mode, |
15976 | enum var_init_status initialized) |
15977 | { |
15978 | dw_loc_descr_ref mem_loc_result = NULL; |
15979 | enum dwarf_location_atom op; |
15980 | dw_loc_descr_ref op0, op1; |
15981 | rtx inner = NULL_RTX; |
15982 | |
15983 | if (mode == VOIDmode) |
15984 | mode = GET_MODE (rtl); |
15985 | |
15986 | /* Note that for a dynamically sized array, the location we will generate a |
15987 | description of here will be the lowest numbered location which is |
15988 | actually within the array. That's *not* necessarily the same as the |
15989 | zeroth element of the array. */ |
15990 | |
15991 | rtl = targetm.delegitimize_address (rtl); |
15992 | |
15993 | if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode) |
15994 | return NULL; |
15995 | |
15996 | scalar_int_mode int_mode = BImode, inner_mode, op1_mode; |
15997 | switch (GET_CODE (rtl)) |
15998 | { |
15999 | case POST_INC: |
16000 | case POST_DEC: |
16001 | case POST_MODIFY: |
16002 | return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized); |
16003 | |
16004 | case SUBREG: |
16005 | /* The case of a subreg may arise when we have a local (register) |
16006 | variable or a formal (register) parameter which doesn't quite fill |
16007 | up an entire register. For now, just assume that it is |
16008 | legitimate to make the Dwarf info refer to the whole register which |
16009 | contains the given subreg. */ |
16010 | if (!subreg_lowpart_p (rtl)) |
16011 | break; |
16012 | inner = SUBREG_REG (rtl); |
16013 | /* FALLTHRU */ |
16014 | case TRUNCATE: |
16015 | if (inner == NULL_RTX) |
16016 | inner = XEXP (rtl, 0); |
16017 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16018 | && is_a <scalar_int_mode> (GET_MODE (inner), result: &inner_mode) |
16019 | && (GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
16020 | #ifdef POINTERS_EXTEND_UNSIGNED |
16021 | || (int_mode == Pmode && mem_mode != VOIDmode) |
16022 | #endif |
16023 | ) |
16024 | && GET_MODE_SIZE (mode: inner_mode) <= DWARF2_ADDR_SIZE) |
16025 | { |
16026 | mem_loc_result = mem_loc_descriptor (rtl: inner, |
16027 | mode: inner_mode, |
16028 | mem_mode, initialized); |
16029 | break; |
16030 | } |
16031 | if (dwarf_strict && dwarf_version < 5) |
16032 | break; |
16033 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16034 | && is_a <scalar_int_mode> (GET_MODE (inner), result: &inner_mode) |
16035 | ? GET_MODE_SIZE (mode: int_mode) <= GET_MODE_SIZE (mode: inner_mode) |
16036 | : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner)))) |
16037 | { |
16038 | dw_die_ref type_die; |
16039 | dw_loc_descr_ref cvt; |
16040 | |
16041 | mem_loc_result = mem_loc_descriptor (rtl: inner, |
16042 | GET_MODE (inner), |
16043 | mem_mode, initialized); |
16044 | if (mem_loc_result == NULL) |
16045 | break; |
16046 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
16047 | if (type_die == NULL) |
16048 | { |
16049 | mem_loc_result = NULL; |
16050 | break; |
16051 | } |
16052 | if (maybe_ne (a: GET_MODE_SIZE (mode), b: GET_MODE_SIZE (GET_MODE (inner)))) |
16053 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16054 | else |
16055 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_reinterpret), oprnd1: 0, oprnd2: 0); |
16056 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16057 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16058 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16059 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
16060 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16061 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE) |
16062 | { |
16063 | /* Convert it to untyped afterwards. */ |
16064 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16065 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
16066 | } |
16067 | } |
16068 | break; |
16069 | |
16070 | case REG: |
16071 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16072 | || (GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE |
16073 | && rtl != arg_pointer_rtx |
16074 | && rtl != frame_pointer_rtx |
16075 | #ifdef POINTERS_EXTEND_UNSIGNED |
16076 | && (int_mode != Pmode || mem_mode == VOIDmode) |
16077 | #endif |
16078 | )) |
16079 | { |
16080 | dw_die_ref type_die; |
16081 | unsigned int debugger_regnum; |
16082 | |
16083 | if (dwarf_strict && dwarf_version < 5) |
16084 | break; |
16085 | if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER) |
16086 | break; |
16087 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
16088 | if (type_die == NULL) |
16089 | break; |
16090 | |
16091 | debugger_regnum = debugger_reg_number (rtl); |
16092 | if (debugger_regnum == IGNORED_DWARF_REGNUM) |
16093 | break; |
16094 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_regval_type), |
16095 | oprnd1: debugger_regnum, oprnd2: 0); |
16096 | mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref; |
16097 | mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die; |
16098 | mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0; |
16099 | break; |
16100 | } |
16101 | /* Whenever a register number forms a part of the description of the |
16102 | method for calculating the (dynamic) address of a memory resident |
16103 | object, DWARF rules require the register number be referred to as |
16104 | a "base register". This distinction is not based in any way upon |
16105 | what category of register the hardware believes the given register |
16106 | belongs to. This is strictly DWARF terminology we're dealing with |
16107 | here. Note that in cases where the location of a memory-resident |
16108 | data object could be expressed as: OP_ADD (OP_BASEREG (basereg), |
16109 | OP_CONST (0)) the actual DWARF location descriptor that we generate |
16110 | may just be OP_BASEREG (basereg). This may look deceptively like |
16111 | the object in question was allocated to a register (rather than in |
16112 | memory) so DWARF consumers need to be aware of the subtle |
16113 | distinction between OP_REG and OP_BASEREG. */ |
16114 | if (REGNO (rtl) < FIRST_PSEUDO_REGISTER) |
16115 | mem_loc_result = based_loc_descr (reg: rtl, offset: 0, initialized: VAR_INIT_STATUS_INITIALIZED); |
16116 | else if (stack_realign_drap |
16117 | && crtl->drap_reg |
16118 | && crtl->args.internal_arg_pointer == rtl |
16119 | && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER) |
16120 | { |
16121 | /* If RTL is internal_arg_pointer, which has been optimized |
16122 | out, use DRAP instead. */ |
16123 | mem_loc_result = based_loc_descr (crtl->drap_reg, offset: 0, |
16124 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16125 | } |
16126 | break; |
16127 | |
16128 | case SIGN_EXTEND: |
16129 | case ZERO_EXTEND: |
16130 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16131 | || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), result: &inner_mode)) |
16132 | break; |
16133 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: inner_mode, |
16134 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16135 | if (op0 == 0) |
16136 | break; |
16137 | else if (GET_CODE (rtl) == ZERO_EXTEND |
16138 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
16139 | && GET_MODE_BITSIZE (mode: inner_mode) < HOST_BITS_PER_WIDE_INT |
16140 | /* If DW_OP_const{1,2,4}u won't be used, it is shorter |
16141 | to expand zero extend as two shifts instead of |
16142 | masking. */ |
16143 | && GET_MODE_SIZE (mode: inner_mode) <= 4) |
16144 | { |
16145 | mem_loc_result = op0; |
16146 | add_loc_descr (list_head: &mem_loc_result, |
16147 | descr: int_loc_descriptor (GET_MODE_MASK (inner_mode))); |
16148 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
16149 | } |
16150 | else if (GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE) |
16151 | { |
16152 | int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode: inner_mode); |
16153 | shift *= BITS_PER_UNIT; |
16154 | if (GET_CODE (rtl) == SIGN_EXTEND) |
16155 | op = DW_OP_shra; |
16156 | else |
16157 | op = DW_OP_shr; |
16158 | mem_loc_result = op0; |
16159 | add_loc_descr (list_head: &mem_loc_result, descr: int_loc_descriptor (poly_i: shift)); |
16160 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
16161 | add_loc_descr (list_head: &mem_loc_result, descr: int_loc_descriptor (poly_i: shift)); |
16162 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16163 | } |
16164 | else if (!dwarf_strict || dwarf_version >= 5) |
16165 | { |
16166 | dw_die_ref type_die1, type_die2; |
16167 | dw_loc_descr_ref cvt; |
16168 | |
16169 | type_die1 = base_type_for_mode (mode: inner_mode, |
16170 | GET_CODE (rtl) == ZERO_EXTEND); |
16171 | if (type_die1 == NULL) |
16172 | break; |
16173 | type_die2 = base_type_for_mode (mode: int_mode, unsignedp: 1); |
16174 | if (type_die2 == NULL) |
16175 | break; |
16176 | mem_loc_result = op0; |
16177 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16178 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16179 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1; |
16180 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16181 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
16182 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16183 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16184 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2; |
16185 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16186 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
16187 | } |
16188 | break; |
16189 | |
16190 | case MEM: |
16191 | { |
16192 | rtx new_rtl = avoid_constant_pool_reference (rtl); |
16193 | if (new_rtl != rtl) |
16194 | { |
16195 | mem_loc_result = mem_loc_descriptor (rtl: new_rtl, mode, mem_mode, |
16196 | initialized); |
16197 | if (mem_loc_result != NULL) |
16198 | return mem_loc_result; |
16199 | } |
16200 | } |
16201 | mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), |
16202 | mode: get_address_mode (mem: rtl), mem_mode: mode, |
16203 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16204 | if (mem_loc_result == NULL) |
16205 | mem_loc_result = tls_mem_loc_descriptor (mem: rtl); |
16206 | if (mem_loc_result != NULL) |
16207 | { |
16208 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16209 | || GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
16210 | { |
16211 | dw_die_ref type_die; |
16212 | dw_loc_descr_ref deref; |
16213 | HOST_WIDE_INT size; |
16214 | |
16215 | if (dwarf_strict && dwarf_version < 5) |
16216 | return NULL; |
16217 | if (!GET_MODE_SIZE (mode).is_constant (const_value: &size)) |
16218 | return NULL; |
16219 | type_die |
16220 | = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
16221 | if (type_die == NULL) |
16222 | return NULL; |
16223 | deref = new_loc_descr (op: dwarf_OP (op: DW_OP_deref_type), oprnd1: size, oprnd2: 0); |
16224 | deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref; |
16225 | deref->dw_loc_oprnd2.v.val_die_ref.die = type_die; |
16226 | deref->dw_loc_oprnd2.v.val_die_ref.external = 0; |
16227 | add_loc_descr (list_head: &mem_loc_result, descr: deref); |
16228 | } |
16229 | else if (GET_MODE_SIZE (mode: int_mode) == DWARF2_ADDR_SIZE) |
16230 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0)); |
16231 | else |
16232 | add_loc_descr (list_head: &mem_loc_result, |
16233 | descr: new_loc_descr (op: DW_OP_deref_size, |
16234 | oprnd1: GET_MODE_SIZE (mode: int_mode), oprnd2: 0)); |
16235 | } |
16236 | break; |
16237 | |
16238 | case LO_SUM: |
16239 | return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized); |
16240 | |
16241 | case LABEL_REF: |
16242 | /* Some ports can transform a symbol ref into a label ref, because |
16243 | the symbol ref is too far away and has to be dumped into a constant |
16244 | pool. */ |
16245 | case CONST: |
16246 | case SYMBOL_REF: |
16247 | case UNSPEC: |
16248 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16249 | || (GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE |
16250 | #ifdef POINTERS_EXTEND_UNSIGNED |
16251 | && (int_mode != Pmode || mem_mode == VOIDmode) |
16252 | #endif |
16253 | )) |
16254 | break; |
16255 | |
16256 | if (GET_CODE (rtl) == UNSPEC) |
16257 | { |
16258 | /* If delegitimize_address couldn't do anything with the UNSPEC, we |
16259 | can't express it in the debug info. This can happen e.g. with some |
16260 | TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend |
16261 | approves. */ |
16262 | bool not_ok = false; |
16263 | subrtx_var_iterator::array_type array; |
16264 | FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL) |
16265 | if (*iter != rtl && !CONSTANT_P (*iter)) |
16266 | { |
16267 | not_ok = true; |
16268 | break; |
16269 | } |
16270 | |
16271 | if (not_ok) |
16272 | break; |
16273 | |
16274 | FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL) |
16275 | if (!const_ok_for_output_1 (rtl: *iter)) |
16276 | { |
16277 | not_ok = true; |
16278 | break; |
16279 | } |
16280 | |
16281 | if (not_ok) |
16282 | break; |
16283 | |
16284 | rtl = gen_rtx_CONST (GET_MODE (rtl), rtl); |
16285 | goto symref; |
16286 | } |
16287 | |
16288 | if (GET_CODE (rtl) == SYMBOL_REF |
16289 | && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE) |
16290 | { |
16291 | dw_loc_descr_ref temp; |
16292 | |
16293 | /* If this is not defined, we have no way to emit the data. */ |
16294 | if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel) |
16295 | break; |
16296 | |
16297 | temp = new_addr_loc_descr (addr: rtl, dtprel: dtprel_true); |
16298 | |
16299 | /* We check for DWARF 5 here because gdb did not implement |
16300 | DW_OP_form_tls_address until after 7.12. */ |
16301 | mem_loc_result = new_loc_descr (op: (dwarf_version >= 5 |
16302 | ? DW_OP_form_tls_address |
16303 | : DW_OP_GNU_push_tls_address), |
16304 | oprnd1: 0, oprnd2: 0); |
16305 | add_loc_descr (list_head: &mem_loc_result, descr: temp); |
16306 | |
16307 | break; |
16308 | } |
16309 | |
16310 | if (!const_ok_for_output (rtl)) |
16311 | { |
16312 | if (GET_CODE (rtl) == CONST) |
16313 | switch (GET_CODE (XEXP (rtl, 0))) |
16314 | { |
16315 | case NOT: |
16316 | op = DW_OP_not; |
16317 | goto try_const_unop; |
16318 | case NEG: |
16319 | op = DW_OP_neg; |
16320 | goto try_const_unop; |
16321 | try_const_unop: |
16322 | rtx arg; |
16323 | arg = XEXP (XEXP (rtl, 0), 0); |
16324 | if (!CONSTANT_P (arg)) |
16325 | arg = gen_rtx_CONST (int_mode, arg); |
16326 | op0 = mem_loc_descriptor (rtl: arg, mode: int_mode, mem_mode, |
16327 | initialized); |
16328 | if (op0) |
16329 | { |
16330 | mem_loc_result = op0; |
16331 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16332 | } |
16333 | break; |
16334 | default: |
16335 | mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode: int_mode, |
16336 | mem_mode, initialized); |
16337 | break; |
16338 | } |
16339 | break; |
16340 | } |
16341 | |
16342 | symref: |
16343 | mem_loc_result = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
16344 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
16345 | break; |
16346 | |
16347 | case CONCAT: |
16348 | case CONCATN: |
16349 | case VAR_LOCATION: |
16350 | case DEBUG_IMPLICIT_PTR: |
16351 | expansion_failed (NULL_TREE, rtl, |
16352 | reason: "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor" ); |
16353 | return 0; |
16354 | |
16355 | case ENTRY_VALUE: |
16356 | if (dwarf_strict && dwarf_version < 5) |
16357 | return NULL; |
16358 | if (REG_P (ENTRY_VALUE_EXP (rtl))) |
16359 | { |
16360 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16361 | || GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
16362 | op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode, |
16363 | VOIDmode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16364 | else |
16365 | { |
16366 | unsigned int debugger_regnum = debugger_reg_number (ENTRY_VALUE_EXP (rtl)); |
16367 | if (debugger_regnum == IGNORED_DWARF_REGNUM) |
16368 | return NULL; |
16369 | op0 = one_reg_loc_descriptor (regno: debugger_regnum, |
16370 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16371 | } |
16372 | } |
16373 | else if (MEM_P (ENTRY_VALUE_EXP (rtl)) |
16374 | && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0))) |
16375 | { |
16376 | op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode, |
16377 | VOIDmode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16378 | if (op0 && op0->dw_loc_opc == DW_OP_fbreg) |
16379 | return NULL; |
16380 | } |
16381 | else |
16382 | gcc_unreachable (); |
16383 | if (op0 == NULL) |
16384 | return NULL; |
16385 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_entry_value), oprnd1: 0, oprnd2: 0); |
16386 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc; |
16387 | mem_loc_result->dw_loc_oprnd1.v.val_loc = op0; |
16388 | break; |
16389 | |
16390 | case DEBUG_PARAMETER_REF: |
16391 | mem_loc_result = parameter_ref_descriptor (rtl); |
16392 | break; |
16393 | |
16394 | case PRE_MODIFY: |
16395 | /* Extract the PLUS expression nested inside and fall into |
16396 | PLUS code below. */ |
16397 | rtl = XEXP (rtl, 1); |
16398 | goto plus; |
16399 | |
16400 | case PRE_INC: |
16401 | case PRE_DEC: |
16402 | /* Turn these into a PLUS expression and fall into the PLUS code |
16403 | below. */ |
16404 | rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0), |
16405 | gen_int_mode (GET_CODE (rtl) == PRE_INC |
16406 | ? GET_MODE_UNIT_SIZE (mem_mode) |
16407 | : -GET_MODE_UNIT_SIZE (mem_mode), |
16408 | mode)); |
16409 | |
16410 | /* fall through */ |
16411 | |
16412 | case PLUS: |
16413 | plus: |
16414 | if (is_based_loc (rtl) |
16415 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16416 | && (GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
16417 | || XEXP (rtl, 0) == arg_pointer_rtx |
16418 | || XEXP (rtl, 0) == frame_pointer_rtx)) |
16419 | mem_loc_result = based_loc_descr (XEXP (rtl, 0), |
16420 | INTVAL (XEXP (rtl, 1)), |
16421 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16422 | else |
16423 | { |
16424 | mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
16425 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16426 | if (mem_loc_result == 0) |
16427 | break; |
16428 | |
16429 | if (CONST_INT_P (XEXP (rtl, 1)) |
16430 | && (GET_MODE_SIZE (mode: as_a <scalar_int_mode> (m: mode)) |
16431 | <= DWARF2_ADDR_SIZE)) |
16432 | loc_descr_plus_const (list_head: &mem_loc_result, INTVAL (XEXP (rtl, 1))); |
16433 | else |
16434 | { |
16435 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
16436 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16437 | if (op1 == 0) |
16438 | return NULL; |
16439 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
16440 | add_loc_descr (list_head: &mem_loc_result, |
16441 | descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
16442 | } |
16443 | } |
16444 | break; |
16445 | |
16446 | /* If a pseudo-reg is optimized away, it is possible for it to |
16447 | be replaced with a MEM containing a multiply or shift. */ |
16448 | case MINUS: |
16449 | op = DW_OP_minus; |
16450 | goto do_binop; |
16451 | |
16452 | case MULT: |
16453 | op = DW_OP_mul; |
16454 | goto do_binop; |
16455 | |
16456 | case DIV: |
16457 | if ((!dwarf_strict || dwarf_version >= 5) |
16458 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16459 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
16460 | { |
16461 | mem_loc_result = typed_binop (op: DW_OP_div, rtl, |
16462 | type_die: base_type_for_mode (mode, unsignedp: 0), |
16463 | mode: int_mode, mem_mode); |
16464 | break; |
16465 | } |
16466 | op = DW_OP_div; |
16467 | goto do_binop; |
16468 | |
16469 | case UMOD: |
16470 | op = DW_OP_mod; |
16471 | goto do_binop; |
16472 | |
16473 | case ASHIFT: |
16474 | op = DW_OP_shl; |
16475 | goto do_shift; |
16476 | |
16477 | case ASHIFTRT: |
16478 | op = DW_OP_shra; |
16479 | goto do_shift; |
16480 | |
16481 | case LSHIFTRT: |
16482 | op = DW_OP_shr; |
16483 | goto do_shift; |
16484 | |
16485 | do_shift: |
16486 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16487 | break; |
16488 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: int_mode, mem_mode, |
16489 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16490 | { |
16491 | rtx rtlop1 = XEXP (rtl, 1); |
16492 | if (is_a <scalar_int_mode> (GET_MODE (rtlop1), result: &op1_mode) |
16493 | && GET_MODE_BITSIZE (mode: op1_mode) < GET_MODE_BITSIZE (mode: int_mode)) |
16494 | rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1); |
16495 | op1 = mem_loc_descriptor (rtl: rtlop1, mode: int_mode, mem_mode, |
16496 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16497 | } |
16498 | |
16499 | if (op0 == 0 || op1 == 0) |
16500 | break; |
16501 | |
16502 | mem_loc_result = op0; |
16503 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
16504 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16505 | break; |
16506 | |
16507 | case AND: |
16508 | op = DW_OP_and; |
16509 | goto do_binop; |
16510 | |
16511 | case IOR: |
16512 | op = DW_OP_or; |
16513 | goto do_binop; |
16514 | |
16515 | case XOR: |
16516 | op = DW_OP_xor; |
16517 | goto do_binop; |
16518 | |
16519 | do_binop: |
16520 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
16521 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16522 | if (XEXP (rtl, 0) == XEXP (rtl, 1)) |
16523 | { |
16524 | if (op0 == 0) |
16525 | break; |
16526 | mem_loc_result = op0; |
16527 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
16528 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16529 | break; |
16530 | } |
16531 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
16532 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16533 | |
16534 | if (op0 == 0 || op1 == 0) |
16535 | break; |
16536 | |
16537 | mem_loc_result = op0; |
16538 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
16539 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16540 | break; |
16541 | |
16542 | case MOD: |
16543 | if ((!dwarf_strict || dwarf_version >= 5) |
16544 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16545 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
16546 | { |
16547 | mem_loc_result = typed_binop (op: DW_OP_mod, rtl, |
16548 | type_die: base_type_for_mode (mode, unsignedp: 0), |
16549 | mode: int_mode, mem_mode); |
16550 | break; |
16551 | } |
16552 | |
16553 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
16554 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16555 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
16556 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16557 | |
16558 | if (op0 == 0 || op1 == 0) |
16559 | break; |
16560 | |
16561 | mem_loc_result = op0; |
16562 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
16563 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
16564 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
16565 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_div, oprnd1: 0, oprnd2: 0)); |
16566 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
16567 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
16568 | break; |
16569 | |
16570 | case UDIV: |
16571 | if ((!dwarf_strict || dwarf_version >= 5) |
16572 | && is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16573 | { |
16574 | /* We can use a signed divide if the sign bit is not set. */ |
16575 | if (GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
16576 | { |
16577 | op = DW_OP_div; |
16578 | goto do_binop; |
16579 | } |
16580 | |
16581 | mem_loc_result = typed_binop (op: DW_OP_div, rtl, |
16582 | type_die: base_type_for_mode (mode: int_mode, unsignedp: 1), |
16583 | mode: int_mode, mem_mode); |
16584 | } |
16585 | break; |
16586 | |
16587 | case NOT: |
16588 | op = DW_OP_not; |
16589 | goto do_unop; |
16590 | |
16591 | case ABS: |
16592 | op = DW_OP_abs; |
16593 | goto do_unop; |
16594 | |
16595 | case NEG: |
16596 | op = DW_OP_neg; |
16597 | goto do_unop; |
16598 | |
16599 | do_unop: |
16600 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
16601 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16602 | |
16603 | if (op0 == 0) |
16604 | break; |
16605 | |
16606 | mem_loc_result = op0; |
16607 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16608 | break; |
16609 | |
16610 | case CONST_INT: |
16611 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16612 | || GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
16613 | #ifdef POINTERS_EXTEND_UNSIGNED |
16614 | || (int_mode == Pmode |
16615 | && mem_mode != VOIDmode |
16616 | && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl)) |
16617 | #endif |
16618 | ) |
16619 | { |
16620 | mem_loc_result = int_loc_descriptor (INTVAL (rtl)); |
16621 | break; |
16622 | } |
16623 | if ((!dwarf_strict || dwarf_version >= 5) |
16624 | && (GET_MODE_BITSIZE (mode: int_mode) == HOST_BITS_PER_WIDE_INT |
16625 | || GET_MODE_BITSIZE (mode: int_mode) == HOST_BITS_PER_DOUBLE_INT)) |
16626 | { |
16627 | dw_die_ref type_die = base_type_for_mode (mode: int_mode, unsignedp: 1); |
16628 | scalar_int_mode amode; |
16629 | if (type_die == NULL) |
16630 | return NULL; |
16631 | if (INTVAL (rtl) >= 0 |
16632 | && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, limit: 0) |
16633 | .exists (mode: &amode)) |
16634 | && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl) |
16635 | /* const DW_OP_convert <XXX> vs. |
16636 | DW_OP_const_type <XXX, 1, const>. */ |
16637 | && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1 |
16638 | < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (mode: int_mode)) |
16639 | { |
16640 | mem_loc_result = int_loc_descriptor (INTVAL (rtl)); |
16641 | op0 = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16642 | op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16643 | op0->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16644 | op0->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16645 | add_loc_descr (list_head: &mem_loc_result, descr: op0); |
16646 | return mem_loc_result; |
16647 | } |
16648 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_const_type), oprnd1: 0, |
16649 | INTVAL (rtl)); |
16650 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16651 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16652 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16653 | if (GET_MODE_BITSIZE (mode: int_mode) == HOST_BITS_PER_WIDE_INT) |
16654 | mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const; |
16655 | else |
16656 | { |
16657 | mem_loc_result->dw_loc_oprnd2.val_class |
16658 | = dw_val_class_const_double; |
16659 | mem_loc_result->dw_loc_oprnd2.v.val_double |
16660 | = double_int::from_shwi (INTVAL (rtl)); |
16661 | } |
16662 | } |
16663 | break; |
16664 | |
16665 | case CONST_DOUBLE: |
16666 | if (!dwarf_strict || dwarf_version >= 5) |
16667 | { |
16668 | dw_die_ref type_die; |
16669 | |
16670 | /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a |
16671 | CONST_DOUBLE rtx could represent either a large integer |
16672 | or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0, |
16673 | the value is always a floating point constant. |
16674 | |
16675 | When it is an integer, a CONST_DOUBLE is used whenever |
16676 | the constant requires 2 HWIs to be adequately represented. |
16677 | We output CONST_DOUBLEs as blocks. */ |
16678 | if (mode == VOIDmode |
16679 | || (GET_MODE (rtl) == VOIDmode |
16680 | && maybe_ne (a: GET_MODE_BITSIZE (mode), |
16681 | HOST_BITS_PER_DOUBLE_INT))) |
16682 | break; |
16683 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
16684 | if (type_die == NULL) |
16685 | return NULL; |
16686 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_const_type), oprnd1: 0, oprnd2: 0); |
16687 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16688 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16689 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16690 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
16691 | if (!SCALAR_FLOAT_MODE_P (mode)) |
16692 | { |
16693 | mem_loc_result->dw_loc_oprnd2.val_class |
16694 | = dw_val_class_const_double; |
16695 | mem_loc_result->dw_loc_oprnd2.v.val_double |
16696 | = rtx_to_double_int (rtl); |
16697 | } |
16698 | else |
16699 | #endif |
16700 | { |
16701 | scalar_float_mode float_mode = as_a <scalar_float_mode> (m: mode); |
16702 | unsigned int length = GET_MODE_SIZE (mode: float_mode); |
16703 | unsigned char *array = ggc_vec_alloc<unsigned char> (c: length); |
16704 | unsigned int elt_size = insert_float (rtl, array); |
16705 | |
16706 | mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec; |
16707 | mem_loc_result->dw_loc_oprnd2.v.val_vec.length |
16708 | = length / elt_size; |
16709 | mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size; |
16710 | mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array; |
16711 | } |
16712 | } |
16713 | break; |
16714 | |
16715 | case CONST_WIDE_INT: |
16716 | if (!dwarf_strict || dwarf_version >= 5) |
16717 | { |
16718 | dw_die_ref type_die; |
16719 | |
16720 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
16721 | if (type_die == NULL) |
16722 | return NULL; |
16723 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_const_type), oprnd1: 0, oprnd2: 0); |
16724 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16725 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16726 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16727 | mem_loc_result->dw_loc_oprnd2.val_class |
16728 | = dw_val_class_wide_int; |
16729 | mem_loc_result->dw_loc_oprnd2.v.val_wide |
16730 | = alloc_dw_wide_int (w: rtx_mode_t (rtl, mode)); |
16731 | } |
16732 | break; |
16733 | |
16734 | case CONST_POLY_INT: |
16735 | mem_loc_result = int_loc_descriptor (poly_i: rtx_to_poly_int64 (x: rtl)); |
16736 | break; |
16737 | |
16738 | case EQ: |
16739 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_eq, rtl, mem_mode); |
16740 | break; |
16741 | |
16742 | case GE: |
16743 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_ge, rtl, mem_mode); |
16744 | break; |
16745 | |
16746 | case GT: |
16747 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_gt, rtl, mem_mode); |
16748 | break; |
16749 | |
16750 | case LE: |
16751 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_le, rtl, mem_mode); |
16752 | break; |
16753 | |
16754 | case LT: |
16755 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_lt, rtl, mem_mode); |
16756 | break; |
16757 | |
16758 | case NE: |
16759 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_ne, rtl, mem_mode); |
16760 | break; |
16761 | |
16762 | case GEU: |
16763 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_ge, rtl, mem_mode); |
16764 | break; |
16765 | |
16766 | case GTU: |
16767 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_gt, rtl, mem_mode); |
16768 | break; |
16769 | |
16770 | case LEU: |
16771 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_le, rtl, mem_mode); |
16772 | break; |
16773 | |
16774 | case LTU: |
16775 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_lt, rtl, mem_mode); |
16776 | break; |
16777 | |
16778 | case UMIN: |
16779 | case UMAX: |
16780 | if (!SCALAR_INT_MODE_P (mode)) |
16781 | break; |
16782 | /* FALLTHRU */ |
16783 | case SMIN: |
16784 | case SMAX: |
16785 | mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode); |
16786 | break; |
16787 | |
16788 | case ZERO_EXTRACT: |
16789 | case SIGN_EXTRACT: |
16790 | if (CONST_INT_P (XEXP (rtl, 1)) |
16791 | && CONST_INT_P (XEXP (rtl, 2)) |
16792 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16793 | && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), result: &inner_mode) |
16794 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
16795 | && GET_MODE_SIZE (mode: inner_mode) <= DWARF2_ADDR_SIZE |
16796 | && ((unsigned) INTVAL (XEXP (rtl, 1)) |
16797 | + (unsigned) INTVAL (XEXP (rtl, 2)) |
16798 | <= GET_MODE_BITSIZE (mode: int_mode))) |
16799 | { |
16800 | int shift, size; |
16801 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: inner_mode, |
16802 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16803 | if (op0 == 0) |
16804 | break; |
16805 | if (GET_CODE (rtl) == SIGN_EXTRACT) |
16806 | op = DW_OP_shra; |
16807 | else |
16808 | op = DW_OP_shr; |
16809 | mem_loc_result = op0; |
16810 | size = INTVAL (XEXP (rtl, 1)); |
16811 | shift = INTVAL (XEXP (rtl, 2)); |
16812 | if (BITS_BIG_ENDIAN) |
16813 | shift = GET_MODE_BITSIZE (mode: inner_mode) - shift - size; |
16814 | if (shift + size != (int) DWARF2_ADDR_SIZE) |
16815 | { |
16816 | add_loc_descr (list_head: &mem_loc_result, |
16817 | descr: int_loc_descriptor (DWARF2_ADDR_SIZE |
16818 | - shift - size)); |
16819 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
16820 | } |
16821 | if (size != (int) DWARF2_ADDR_SIZE) |
16822 | { |
16823 | add_loc_descr (list_head: &mem_loc_result, |
16824 | descr: int_loc_descriptor (DWARF2_ADDR_SIZE - size)); |
16825 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16826 | } |
16827 | } |
16828 | break; |
16829 | |
16830 | case IF_THEN_ELSE: |
16831 | { |
16832 | dw_loc_descr_ref op2, bra_node, drop_node; |
16833 | op0 = mem_loc_descriptor (XEXP (rtl, 0), |
16834 | GET_MODE (XEXP (rtl, 0)) == VOIDmode |
16835 | ? word_mode : GET_MODE (XEXP (rtl, 0)), |
16836 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16837 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
16838 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16839 | op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode, |
16840 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16841 | if (op0 == NULL || op1 == NULL || op2 == NULL) |
16842 | break; |
16843 | |
16844 | mem_loc_result = op1; |
16845 | add_loc_descr (list_head: &mem_loc_result, descr: op2); |
16846 | add_loc_descr (list_head: &mem_loc_result, descr: op0); |
16847 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
16848 | add_loc_descr (list_head: &mem_loc_result, descr: bra_node); |
16849 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
16850 | drop_node = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
16851 | add_loc_descr (list_head: &mem_loc_result, descr: drop_node); |
16852 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
16853 | bra_node->dw_loc_oprnd1.v.val_loc = drop_node; |
16854 | } |
16855 | break; |
16856 | |
16857 | case FLOAT_EXTEND: |
16858 | case FLOAT_TRUNCATE: |
16859 | case FLOAT: |
16860 | case UNSIGNED_FLOAT: |
16861 | case FIX: |
16862 | case UNSIGNED_FIX: |
16863 | if (!dwarf_strict || dwarf_version >= 5) |
16864 | { |
16865 | dw_die_ref type_die; |
16866 | dw_loc_descr_ref cvt; |
16867 | |
16868 | op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)), |
16869 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16870 | if (op0 == NULL) |
16871 | break; |
16872 | if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), result: &int_mode) |
16873 | && (GET_CODE (rtl) == FLOAT |
16874 | || GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE)) |
16875 | { |
16876 | type_die = base_type_for_mode (mode: int_mode, |
16877 | GET_CODE (rtl) == UNSIGNED_FLOAT); |
16878 | if (type_die == NULL) |
16879 | break; |
16880 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16881 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16882 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16883 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16884 | add_loc_descr (list_head: &op0, descr: cvt); |
16885 | } |
16886 | type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX); |
16887 | if (type_die == NULL) |
16888 | break; |
16889 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16890 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16891 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16892 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16893 | add_loc_descr (list_head: &op0, descr: cvt); |
16894 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16895 | && (GET_CODE (rtl) == FIX |
16896 | || GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE)) |
16897 | { |
16898 | op0 = convert_descriptor_to_mode (mode: int_mode, op: op0); |
16899 | if (op0 == NULL) |
16900 | break; |
16901 | } |
16902 | mem_loc_result = op0; |
16903 | } |
16904 | break; |
16905 | |
16906 | case CLZ: |
16907 | case CTZ: |
16908 | case FFS: |
16909 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16910 | mem_loc_result = clz_loc_descriptor (rtl, mode: int_mode, mem_mode); |
16911 | break; |
16912 | |
16913 | case POPCOUNT: |
16914 | case PARITY: |
16915 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16916 | mem_loc_result = popcount_loc_descriptor (rtl, mode: int_mode, mem_mode); |
16917 | break; |
16918 | |
16919 | case BSWAP: |
16920 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16921 | mem_loc_result = bswap_loc_descriptor (rtl, mode: int_mode, mem_mode); |
16922 | break; |
16923 | |
16924 | case ROTATE: |
16925 | case ROTATERT: |
16926 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16927 | mem_loc_result = rotate_loc_descriptor (rtl, mode: int_mode, mem_mode); |
16928 | break; |
16929 | |
16930 | case COMPARE: |
16931 | /* In theory, we could implement the above. */ |
16932 | /* DWARF cannot represent the unsigned compare operations |
16933 | natively. */ |
16934 | case SS_MULT: |
16935 | case US_MULT: |
16936 | case SS_DIV: |
16937 | case US_DIV: |
16938 | case SS_PLUS: |
16939 | case US_PLUS: |
16940 | case SS_MINUS: |
16941 | case US_MINUS: |
16942 | case SS_NEG: |
16943 | case US_NEG: |
16944 | case SS_ABS: |
16945 | case SS_ASHIFT: |
16946 | case US_ASHIFT: |
16947 | case SS_TRUNCATE: |
16948 | case US_TRUNCATE: |
16949 | case UNORDERED: |
16950 | case ORDERED: |
16951 | case UNEQ: |
16952 | case UNGE: |
16953 | case UNGT: |
16954 | case UNLE: |
16955 | case UNLT: |
16956 | case LTGT: |
16957 | case FRACT_CONVERT: |
16958 | case UNSIGNED_FRACT_CONVERT: |
16959 | case SAT_FRACT: |
16960 | case UNSIGNED_SAT_FRACT: |
16961 | case SQRT: |
16962 | case ASM_OPERANDS: |
16963 | case VEC_MERGE: |
16964 | case VEC_SELECT: |
16965 | case VEC_CONCAT: |
16966 | case VEC_DUPLICATE: |
16967 | case VEC_SERIES: |
16968 | case HIGH: |
16969 | case FMA: |
16970 | case STRICT_LOW_PART: |
16971 | case CONST_VECTOR: |
16972 | case CONST_FIXED: |
16973 | case CLRSB: |
16974 | case CLOBBER: |
16975 | case SMUL_HIGHPART: |
16976 | case UMUL_HIGHPART: |
16977 | case BITREVERSE: |
16978 | case COPYSIGN: |
16979 | break; |
16980 | |
16981 | case CONST_STRING: |
16982 | resolve_one_addr (&rtl); |
16983 | goto symref; |
16984 | |
16985 | /* RTL sequences inside PARALLEL record a series of DWARF operations for |
16986 | the expression. An UNSPEC rtx represents a raw DWARF operation, |
16987 | new_loc_descr is called for it to build the operation directly. |
16988 | Otherwise mem_loc_descriptor is called recursively. */ |
16989 | case PARALLEL: |
16990 | { |
16991 | int index = 0; |
16992 | dw_loc_descr_ref exp_result = NULL; |
16993 | |
16994 | for (; index < XVECLEN (rtl, 0); index++) |
16995 | { |
16996 | rtx elem = XVECEXP (rtl, 0, index); |
16997 | if (GET_CODE (elem) == UNSPEC) |
16998 | { |
16999 | /* Each DWARF operation UNSPEC contain two operands, if |
17000 | one operand is not used for the operation, const0_rtx is |
17001 | passed. */ |
17002 | gcc_assert (XVECLEN (elem, 0) == 2); |
17003 | |
17004 | HOST_WIDE_INT dw_op = XINT (elem, 1); |
17005 | HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0)); |
17006 | HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1)); |
17007 | exp_result |
17008 | = new_loc_descr (op: (enum dwarf_location_atom) dw_op, oprnd1, |
17009 | oprnd2); |
17010 | } |
17011 | else |
17012 | exp_result |
17013 | = mem_loc_descriptor (rtl: elem, mode, mem_mode, |
17014 | initialized: VAR_INIT_STATUS_INITIALIZED); |
17015 | |
17016 | if (!mem_loc_result) |
17017 | mem_loc_result = exp_result; |
17018 | else |
17019 | add_loc_descr (list_head: &mem_loc_result, descr: exp_result); |
17020 | } |
17021 | |
17022 | break; |
17023 | } |
17024 | |
17025 | default: |
17026 | if (flag_checking) |
17027 | { |
17028 | print_rtl (stderr, rtl); |
17029 | gcc_unreachable (); |
17030 | } |
17031 | break; |
17032 | } |
17033 | |
17034 | if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED) |
17035 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
17036 | |
17037 | return mem_loc_result; |
17038 | } |
17039 | |
17040 | /* Return a descriptor that describes the concatenation of two locations. |
17041 | This is typically a complex variable. */ |
17042 | |
17043 | static dw_loc_descr_ref |
17044 | concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized) |
17045 | { |
17046 | /* At present we only track constant-sized pieces. */ |
17047 | unsigned int size0, size1; |
17048 | if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (const_value: &size0) |
17049 | || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (const_value: &size1)) |
17050 | return 0; |
17051 | |
17052 | dw_loc_descr_ref cc_loc_result = NULL; |
17053 | dw_loc_descr_ref x0_ref |
17054 | = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED); |
17055 | dw_loc_descr_ref x1_ref |
17056 | = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED); |
17057 | |
17058 | if (x0_ref == 0 || x1_ref == 0) |
17059 | return 0; |
17060 | |
17061 | cc_loc_result = x0_ref; |
17062 | add_loc_descr_op_piece (list_head: &cc_loc_result, size: size0); |
17063 | |
17064 | add_loc_descr (list_head: &cc_loc_result, descr: x1_ref); |
17065 | add_loc_descr_op_piece (list_head: &cc_loc_result, size: size1); |
17066 | |
17067 | if (initialized == VAR_INIT_STATUS_UNINITIALIZED) |
17068 | add_loc_descr (list_head: &cc_loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
17069 | |
17070 | return cc_loc_result; |
17071 | } |
17072 | |
17073 | /* Return a descriptor that describes the concatenation of N |
17074 | locations. */ |
17075 | |
17076 | static dw_loc_descr_ref |
17077 | concatn_loc_descriptor (rtx concatn, enum var_init_status initialized) |
17078 | { |
17079 | unsigned int i; |
17080 | dw_loc_descr_ref cc_loc_result = NULL; |
17081 | unsigned int n = XVECLEN (concatn, 0); |
17082 | unsigned int size; |
17083 | |
17084 | for (i = 0; i < n; ++i) |
17085 | { |
17086 | dw_loc_descr_ref ref; |
17087 | rtx x = XVECEXP (concatn, 0, i); |
17088 | |
17089 | /* At present we only track constant-sized pieces. */ |
17090 | if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (const_value: &size)) |
17091 | return NULL; |
17092 | |
17093 | ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED); |
17094 | if (ref == NULL) |
17095 | return NULL; |
17096 | |
17097 | add_loc_descr (list_head: &cc_loc_result, descr: ref); |
17098 | add_loc_descr_op_piece (list_head: &cc_loc_result, size); |
17099 | } |
17100 | |
17101 | if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED) |
17102 | add_loc_descr (list_head: &cc_loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
17103 | |
17104 | return cc_loc_result; |
17105 | } |
17106 | |
17107 | /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer |
17108 | for DEBUG_IMPLICIT_PTR RTL. */ |
17109 | |
17110 | static dw_loc_descr_ref |
17111 | implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset) |
17112 | { |
17113 | dw_loc_descr_ref ret; |
17114 | dw_die_ref ref; |
17115 | |
17116 | if (dwarf_strict && dwarf_version < 5) |
17117 | return NULL; |
17118 | gcc_assert (VAR_P (DEBUG_IMPLICIT_PTR_DECL (rtl)) |
17119 | || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL |
17120 | || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL); |
17121 | ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl)); |
17122 | ret = new_loc_descr (op: dwarf_OP (op: DW_OP_implicit_pointer), oprnd1: 0, oprnd2: offset); |
17123 | ret->dw_loc_oprnd2.val_class = dw_val_class_const; |
17124 | if (ref) |
17125 | { |
17126 | ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
17127 | ret->dw_loc_oprnd1.v.val_die_ref.die = ref; |
17128 | ret->dw_loc_oprnd1.v.val_die_ref.external = 0; |
17129 | } |
17130 | else |
17131 | { |
17132 | ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref; |
17133 | ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl); |
17134 | } |
17135 | return ret; |
17136 | } |
17137 | |
17138 | /* Output a proper Dwarf location descriptor for a variable or parameter |
17139 | which is either allocated in a register or in a memory location. For a |
17140 | register, we just generate an OP_REG and the register number. For a |
17141 | memory location we provide a Dwarf postfix expression describing how to |
17142 | generate the (dynamic) address of the object onto the address stack. |
17143 | |
17144 | MODE is mode of the decl if this loc_descriptor is going to be used in |
17145 | .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are |
17146 | allowed, VOIDmode otherwise. |
17147 | |
17148 | If we don't know how to describe it, return 0. */ |
17149 | |
17150 | static dw_loc_descr_ref |
17151 | loc_descriptor (rtx rtl, machine_mode mode, |
17152 | enum var_init_status initialized) |
17153 | { |
17154 | dw_loc_descr_ref loc_result = NULL; |
17155 | scalar_int_mode int_mode; |
17156 | |
17157 | switch (GET_CODE (rtl)) |
17158 | { |
17159 | case SUBREG: |
17160 | /* The case of a subreg may arise when we have a local (register) |
17161 | variable or a formal (register) parameter which doesn't quite fill |
17162 | up an entire register. For now, just assume that it is |
17163 | legitimate to make the Dwarf info refer to the whole register which |
17164 | contains the given subreg. */ |
17165 | if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl)) |
17166 | loc_result = loc_descriptor (SUBREG_REG (rtl), |
17167 | GET_MODE (SUBREG_REG (rtl)), initialized); |
17168 | else |
17169 | goto do_default; |
17170 | break; |
17171 | |
17172 | case REG: |
17173 | loc_result = reg_loc_descriptor (rtl, initialized); |
17174 | break; |
17175 | |
17176 | case MEM: |
17177 | loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode: get_address_mode (mem: rtl), |
17178 | GET_MODE (rtl), initialized); |
17179 | if (loc_result == NULL) |
17180 | loc_result = tls_mem_loc_descriptor (mem: rtl); |
17181 | if (loc_result == NULL) |
17182 | { |
17183 | rtx new_rtl = avoid_constant_pool_reference (rtl); |
17184 | if (new_rtl != rtl) |
17185 | loc_result = loc_descriptor (rtl: new_rtl, mode, initialized); |
17186 | } |
17187 | break; |
17188 | |
17189 | case CONCAT: |
17190 | loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1), |
17191 | initialized); |
17192 | break; |
17193 | |
17194 | case CONCATN: |
17195 | loc_result = concatn_loc_descriptor (concatn: rtl, initialized); |
17196 | break; |
17197 | |
17198 | case VAR_LOCATION: |
17199 | /* Single part. */ |
17200 | if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL) |
17201 | { |
17202 | rtx loc = PAT_VAR_LOCATION_LOC (rtl); |
17203 | if (GET_CODE (loc) == EXPR_LIST) |
17204 | loc = XEXP (loc, 0); |
17205 | loc_result = loc_descriptor (rtl: loc, mode, initialized); |
17206 | break; |
17207 | } |
17208 | |
17209 | rtl = XEXP (rtl, 1); |
17210 | /* FALLTHRU */ |
17211 | |
17212 | case PARALLEL: |
17213 | { |
17214 | rtvec par_elems = XVEC (rtl, 0); |
17215 | int num_elem = GET_NUM_ELEM (par_elems); |
17216 | machine_mode mode; |
17217 | int i, size; |
17218 | |
17219 | /* Create the first one, so we have something to add to. */ |
17220 | loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0), |
17221 | VOIDmode, initialized); |
17222 | if (loc_result == NULL) |
17223 | return NULL; |
17224 | mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0)); |
17225 | /* At present we only track constant-sized pieces. */ |
17226 | if (!GET_MODE_SIZE (mode).is_constant (const_value: &size)) |
17227 | return NULL; |
17228 | add_loc_descr_op_piece (list_head: &loc_result, size); |
17229 | for (i = 1; i < num_elem; i++) |
17230 | { |
17231 | dw_loc_descr_ref temp; |
17232 | |
17233 | temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0), |
17234 | VOIDmode, initialized); |
17235 | if (temp == NULL) |
17236 | return NULL; |
17237 | add_loc_descr (list_head: &loc_result, descr: temp); |
17238 | mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0)); |
17239 | /* At present we only track constant-sized pieces. */ |
17240 | if (!GET_MODE_SIZE (mode).is_constant (const_value: &size)) |
17241 | return NULL; |
17242 | add_loc_descr_op_piece (list_head: &loc_result, size); |
17243 | } |
17244 | } |
17245 | break; |
17246 | |
17247 | case CONST_INT: |
17248 | if (mode != VOIDmode && mode != BLKmode) |
17249 | { |
17250 | int_mode = as_a <scalar_int_mode> (m: mode); |
17251 | loc_result = address_of_int_loc_descriptor (size: GET_MODE_SIZE (mode: int_mode), |
17252 | INTVAL (rtl)); |
17253 | } |
17254 | break; |
17255 | |
17256 | case CONST_DOUBLE: |
17257 | if (mode == VOIDmode) |
17258 | mode = GET_MODE (rtl); |
17259 | |
17260 | if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict)) |
17261 | { |
17262 | gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl)); |
17263 | |
17264 | /* Note that a CONST_DOUBLE rtx could represent either an integer |
17265 | or a floating-point constant. A CONST_DOUBLE is used whenever |
17266 | the constant requires more than one word in order to be |
17267 | adequately represented. We output CONST_DOUBLEs as blocks. */ |
17268 | scalar_mode smode = as_a <scalar_mode> (m: mode); |
17269 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
17270 | oprnd1: GET_MODE_SIZE (mode: smode), oprnd2: 0); |
17271 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
17272 | if (!SCALAR_FLOAT_MODE_P (smode)) |
17273 | { |
17274 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double; |
17275 | loc_result->dw_loc_oprnd2.v.val_double |
17276 | = rtx_to_double_int (rtl); |
17277 | } |
17278 | else |
17279 | #endif |
17280 | { |
17281 | unsigned int length = GET_MODE_SIZE (mode: smode); |
17282 | unsigned char *array = ggc_vec_alloc<unsigned char> (c: length); |
17283 | unsigned int elt_size = insert_float (rtl, array); |
17284 | |
17285 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec; |
17286 | loc_result->dw_loc_oprnd2.v.val_vec.length = length / elt_size; |
17287 | loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size; |
17288 | loc_result->dw_loc_oprnd2.v.val_vec.array = array; |
17289 | } |
17290 | } |
17291 | break; |
17292 | |
17293 | case CONST_WIDE_INT: |
17294 | if (mode == VOIDmode) |
17295 | mode = GET_MODE (rtl); |
17296 | |
17297 | if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict)) |
17298 | { |
17299 | int_mode = as_a <scalar_int_mode> (m: mode); |
17300 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
17301 | oprnd1: GET_MODE_SIZE (mode: int_mode), oprnd2: 0); |
17302 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int; |
17303 | loc_result->dw_loc_oprnd2.v.val_wide |
17304 | = alloc_dw_wide_int (w: rtx_mode_t (rtl, int_mode)); |
17305 | } |
17306 | break; |
17307 | |
17308 | case CONST_VECTOR: |
17309 | if (mode == VOIDmode) |
17310 | mode = GET_MODE (rtl); |
17311 | |
17312 | if (mode != VOIDmode |
17313 | /* The combination of a length and byte elt_size doesn't extend |
17314 | naturally to boolean vectors, where several elements are packed |
17315 | into the same byte. */ |
17316 | && GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL |
17317 | && (dwarf_version >= 4 || !dwarf_strict)) |
17318 | { |
17319 | unsigned int length; |
17320 | if (!CONST_VECTOR_NUNITS (rtl).is_constant (const_value: &length)) |
17321 | return NULL; |
17322 | |
17323 | unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl)); |
17324 | unsigned char *array |
17325 | = ggc_vec_alloc<unsigned char> (c: length * elt_size); |
17326 | unsigned int i; |
17327 | unsigned char *p; |
17328 | machine_mode imode = GET_MODE_INNER (mode); |
17329 | |
17330 | gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl)); |
17331 | switch (GET_MODE_CLASS (mode)) |
17332 | { |
17333 | case MODE_VECTOR_INT: |
17334 | for (i = 0, p = array; i < length; i++, p += elt_size) |
17335 | { |
17336 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
17337 | insert_wide_int (rtx_mode_t (elt, imode), p, elt_size); |
17338 | } |
17339 | break; |
17340 | |
17341 | case MODE_VECTOR_FLOAT: |
17342 | for (i = 0, p = array; i < length; i++, p += elt_size) |
17343 | { |
17344 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
17345 | insert_float (elt, p); |
17346 | } |
17347 | break; |
17348 | |
17349 | default: |
17350 | gcc_unreachable (); |
17351 | } |
17352 | |
17353 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
17354 | oprnd1: length * elt_size, oprnd2: 0); |
17355 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec; |
17356 | loc_result->dw_loc_oprnd2.v.val_vec.length = length; |
17357 | loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size; |
17358 | loc_result->dw_loc_oprnd2.v.val_vec.array = array; |
17359 | } |
17360 | break; |
17361 | |
17362 | case CONST: |
17363 | if (mode == VOIDmode |
17364 | || CONST_SCALAR_INT_P (XEXP (rtl, 0)) |
17365 | || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0)) |
17366 | || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR) |
17367 | { |
17368 | loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized); |
17369 | break; |
17370 | } |
17371 | /* FALLTHROUGH */ |
17372 | case SYMBOL_REF: |
17373 | if (!const_ok_for_output (rtl)) |
17374 | break; |
17375 | /* FALLTHROUGH */ |
17376 | case LABEL_REF: |
17377 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
17378 | && GET_MODE_SIZE (mode: int_mode) == DWARF2_ADDR_SIZE |
17379 | && (dwarf_version >= 4 || !dwarf_strict)) |
17380 | { |
17381 | loc_result = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
17382 | add_loc_descr (list_head: &loc_result, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
17383 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
17384 | } |
17385 | break; |
17386 | |
17387 | case DEBUG_IMPLICIT_PTR: |
17388 | loc_result = implicit_ptr_descriptor (rtl, offset: 0); |
17389 | break; |
17390 | |
17391 | case PLUS: |
17392 | if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR |
17393 | && CONST_INT_P (XEXP (rtl, 1))) |
17394 | { |
17395 | loc_result |
17396 | = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1))); |
17397 | break; |
17398 | } |
17399 | /* FALLTHRU */ |
17400 | do_default: |
17401 | default: |
17402 | if ((is_a <scalar_int_mode> (m: mode, result: &int_mode) |
17403 | && GET_MODE (rtl) == int_mode |
17404 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
17405 | && dwarf_version >= 4) |
17406 | || (!dwarf_strict && mode != VOIDmode && mode != BLKmode)) |
17407 | { |
17408 | /* Value expression. */ |
17409 | loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized); |
17410 | if (loc_result) |
17411 | add_loc_descr (list_head: &loc_result, |
17412 | descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
17413 | } |
17414 | break; |
17415 | } |
17416 | |
17417 | return loc_result; |
17418 | } |
17419 | |
17420 | /* We need to figure out what section we should use as the base for the |
17421 | address ranges where a given location is valid. |
17422 | 1. If this particular DECL has a section associated with it, use that. |
17423 | 2. If this function has a section associated with it, use that. |
17424 | 3. Otherwise, use the text section. |
17425 | XXX: If you split a variable across multiple sections, we won't notice. */ |
17426 | |
17427 | static const char * |
17428 | secname_for_decl (const_tree decl) |
17429 | { |
17430 | const char *secname; |
17431 | |
17432 | if (VAR_OR_FUNCTION_DECL_P (decl) |
17433 | && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl)) |
17434 | && DECL_SECTION_NAME (decl)) |
17435 | secname = DECL_SECTION_NAME (decl); |
17436 | else if (current_function_decl && DECL_SECTION_NAME (current_function_decl)) |
17437 | { |
17438 | if (in_cold_section_p) |
17439 | { |
17440 | section *sec = current_function_section (); |
17441 | if (sec->common.flags & SECTION_NAMED) |
17442 | return sec->named.name; |
17443 | } |
17444 | secname = DECL_SECTION_NAME (current_function_decl); |
17445 | } |
17446 | else if (cfun && in_cold_section_p) |
17447 | secname = crtl->subsections.cold_section_label; |
17448 | else |
17449 | secname = text_section_label; |
17450 | |
17451 | return secname; |
17452 | } |
17453 | |
17454 | /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */ |
17455 | |
17456 | static bool |
17457 | decl_by_reference_p (tree decl) |
17458 | { |
17459 | return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL |
17460 | || VAR_P (decl)) |
17461 | && DECL_BY_REFERENCE (decl)); |
17462 | } |
17463 | |
17464 | /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor |
17465 | for VARLOC. */ |
17466 | |
17467 | static dw_loc_descr_ref |
17468 | dw_loc_list_1 (tree loc, rtx varloc, int want_address, |
17469 | enum var_init_status initialized) |
17470 | { |
17471 | int have_address = 0; |
17472 | dw_loc_descr_ref descr; |
17473 | machine_mode mode; |
17474 | |
17475 | if (want_address != 2) |
17476 | { |
17477 | gcc_assert (GET_CODE (varloc) == VAR_LOCATION); |
17478 | /* Single part. */ |
17479 | if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL) |
17480 | { |
17481 | varloc = PAT_VAR_LOCATION_LOC (varloc); |
17482 | if (GET_CODE (varloc) == EXPR_LIST) |
17483 | varloc = XEXP (varloc, 0); |
17484 | mode = GET_MODE (varloc); |
17485 | if (MEM_P (varloc)) |
17486 | { |
17487 | rtx addr = XEXP (varloc, 0); |
17488 | descr = mem_loc_descriptor (rtl: addr, mode: get_address_mode (mem: varloc), |
17489 | mem_mode: mode, initialized); |
17490 | if (descr) |
17491 | have_address = 1; |
17492 | else |
17493 | { |
17494 | rtx x = avoid_constant_pool_reference (varloc); |
17495 | if (x != varloc) |
17496 | descr = mem_loc_descriptor (rtl: x, mode, VOIDmode, |
17497 | initialized); |
17498 | } |
17499 | } |
17500 | else |
17501 | descr = mem_loc_descriptor (rtl: varloc, mode, VOIDmode, initialized); |
17502 | } |
17503 | else |
17504 | return 0; |
17505 | } |
17506 | else |
17507 | { |
17508 | if (GET_CODE (varloc) == VAR_LOCATION) |
17509 | mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc)); |
17510 | else |
17511 | mode = DECL_MODE (loc); |
17512 | descr = loc_descriptor (rtl: varloc, mode, initialized); |
17513 | have_address = 1; |
17514 | } |
17515 | |
17516 | if (!descr) |
17517 | return 0; |
17518 | |
17519 | if (want_address == 2 && !have_address |
17520 | && (dwarf_version >= 4 || !dwarf_strict)) |
17521 | { |
17522 | if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE) |
17523 | { |
17524 | expansion_failed (expr: loc, NULL_RTX, |
17525 | reason: "DWARF address size mismatch" ); |
17526 | return 0; |
17527 | } |
17528 | add_loc_descr (list_head: &descr, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
17529 | have_address = 1; |
17530 | } |
17531 | /* Show if we can't fill the request for an address. */ |
17532 | if (want_address && !have_address) |
17533 | { |
17534 | expansion_failed (expr: loc, NULL_RTX, |
17535 | reason: "Want address and only have value" ); |
17536 | return 0; |
17537 | } |
17538 | |
17539 | /* If we've got an address and don't want one, dereference. */ |
17540 | if (!want_address && have_address) |
17541 | { |
17542 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc)); |
17543 | enum dwarf_location_atom op; |
17544 | |
17545 | if (size > DWARF2_ADDR_SIZE || size == -1) |
17546 | { |
17547 | expansion_failed (expr: loc, NULL_RTX, |
17548 | reason: "DWARF address size mismatch" ); |
17549 | return 0; |
17550 | } |
17551 | else if (size == DWARF2_ADDR_SIZE) |
17552 | op = DW_OP_deref; |
17553 | else |
17554 | op = DW_OP_deref_size; |
17555 | |
17556 | add_loc_descr (list_head: &descr, descr: new_loc_descr (op, oprnd1: size, oprnd2: 0)); |
17557 | } |
17558 | |
17559 | return descr; |
17560 | } |
17561 | |
17562 | /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL |
17563 | if it is not possible. */ |
17564 | |
17565 | static dw_loc_descr_ref |
17566 | new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset) |
17567 | { |
17568 | if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0) |
17569 | return new_loc_descr (op: DW_OP_piece, oprnd1: bitsize / BITS_PER_UNIT, oprnd2: 0); |
17570 | else if (dwarf_version >= 3 || !dwarf_strict) |
17571 | return new_loc_descr (op: DW_OP_bit_piece, oprnd1: bitsize, oprnd2: offset); |
17572 | else |
17573 | return NULL; |
17574 | } |
17575 | |
17576 | /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor |
17577 | for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */ |
17578 | |
17579 | static dw_loc_descr_ref |
17580 | dw_sra_loc_expr (tree decl, rtx loc) |
17581 | { |
17582 | rtx p; |
17583 | unsigned HOST_WIDE_INT padsize = 0; |
17584 | dw_loc_descr_ref descr, *descr_tail; |
17585 | unsigned HOST_WIDE_INT decl_size; |
17586 | rtx varloc; |
17587 | enum var_init_status initialized; |
17588 | |
17589 | if (DECL_SIZE (decl) == NULL |
17590 | || !tree_fits_uhwi_p (DECL_SIZE (decl))) |
17591 | return NULL; |
17592 | |
17593 | decl_size = tree_to_uhwi (DECL_SIZE (decl)); |
17594 | descr = NULL; |
17595 | descr_tail = &descr; |
17596 | |
17597 | for (p = loc; p; p = XEXP (p, 1)) |
17598 | { |
17599 | unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (piece: p); |
17600 | rtx loc_note = *decl_piece_varloc_ptr (piece: p); |
17601 | dw_loc_descr_ref cur_descr; |
17602 | dw_loc_descr_ref *tail, last = NULL; |
17603 | unsigned HOST_WIDE_INT opsize = 0; |
17604 | |
17605 | if (loc_note == NULL_RTX |
17606 | || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX) |
17607 | { |
17608 | padsize += bitsize; |
17609 | continue; |
17610 | } |
17611 | initialized = NOTE_VAR_LOCATION_STATUS (loc_note); |
17612 | varloc = NOTE_VAR_LOCATION (loc_note); |
17613 | cur_descr = dw_loc_list_1 (loc: decl, varloc, want_address: 2, initialized); |
17614 | if (cur_descr == NULL) |
17615 | { |
17616 | padsize += bitsize; |
17617 | continue; |
17618 | } |
17619 | |
17620 | /* Check that cur_descr either doesn't use |
17621 | DW_OP_*piece operations, or their sum is equal |
17622 | to bitsize. Otherwise we can't embed it. */ |
17623 | for (tail = &cur_descr; *tail != NULL; |
17624 | tail = &(*tail)->dw_loc_next) |
17625 | if ((*tail)->dw_loc_opc == DW_OP_piece) |
17626 | { |
17627 | opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned |
17628 | * BITS_PER_UNIT; |
17629 | last = *tail; |
17630 | } |
17631 | else if ((*tail)->dw_loc_opc == DW_OP_bit_piece) |
17632 | { |
17633 | opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned; |
17634 | last = *tail; |
17635 | } |
17636 | |
17637 | if (last != NULL && opsize != bitsize) |
17638 | { |
17639 | padsize += bitsize; |
17640 | /* Discard the current piece of the descriptor and release any |
17641 | addr_table entries it uses. */ |
17642 | remove_loc_list_addr_table_entries (descr: cur_descr); |
17643 | continue; |
17644 | } |
17645 | |
17646 | /* If there is a hole, add DW_OP_*piece after empty DWARF |
17647 | expression, which means that those bits are optimized out. */ |
17648 | if (padsize) |
17649 | { |
17650 | if (padsize > decl_size) |
17651 | { |
17652 | remove_loc_list_addr_table_entries (descr: cur_descr); |
17653 | goto discard_descr; |
17654 | } |
17655 | decl_size -= padsize; |
17656 | *descr_tail = new_loc_descr_op_bit_piece (bitsize: padsize, offset: 0); |
17657 | if (*descr_tail == NULL) |
17658 | { |
17659 | remove_loc_list_addr_table_entries (descr: cur_descr); |
17660 | goto discard_descr; |
17661 | } |
17662 | descr_tail = &(*descr_tail)->dw_loc_next; |
17663 | padsize = 0; |
17664 | } |
17665 | *descr_tail = cur_descr; |
17666 | descr_tail = tail; |
17667 | if (bitsize > decl_size) |
17668 | goto discard_descr; |
17669 | decl_size -= bitsize; |
17670 | if (last == NULL) |
17671 | { |
17672 | HOST_WIDE_INT offset = 0; |
17673 | if (GET_CODE (varloc) == VAR_LOCATION |
17674 | && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL) |
17675 | { |
17676 | varloc = PAT_VAR_LOCATION_LOC (varloc); |
17677 | if (GET_CODE (varloc) == EXPR_LIST) |
17678 | varloc = XEXP (varloc, 0); |
17679 | } |
17680 | do |
17681 | { |
17682 | if (GET_CODE (varloc) == CONST |
17683 | || GET_CODE (varloc) == SIGN_EXTEND |
17684 | || GET_CODE (varloc) == ZERO_EXTEND) |
17685 | varloc = XEXP (varloc, 0); |
17686 | else if (GET_CODE (varloc) == SUBREG) |
17687 | varloc = SUBREG_REG (varloc); |
17688 | else |
17689 | break; |
17690 | } |
17691 | while (1); |
17692 | /* DW_OP_bit_size offset should be zero for register |
17693 | or implicit location descriptions and empty location |
17694 | descriptions, but for memory addresses needs big endian |
17695 | adjustment. */ |
17696 | if (MEM_P (varloc)) |
17697 | { |
17698 | unsigned HOST_WIDE_INT memsize; |
17699 | if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (const_value: &memsize)) |
17700 | goto discard_descr; |
17701 | memsize *= BITS_PER_UNIT; |
17702 | if (memsize != bitsize) |
17703 | { |
17704 | if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN |
17705 | && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD)) |
17706 | goto discard_descr; |
17707 | if (memsize < bitsize) |
17708 | goto discard_descr; |
17709 | if (BITS_BIG_ENDIAN) |
17710 | offset = memsize - bitsize; |
17711 | } |
17712 | } |
17713 | |
17714 | *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset); |
17715 | if (*descr_tail == NULL) |
17716 | goto discard_descr; |
17717 | descr_tail = &(*descr_tail)->dw_loc_next; |
17718 | } |
17719 | } |
17720 | |
17721 | /* If there were any non-empty expressions, add padding till the end of |
17722 | the decl. */ |
17723 | if (descr != NULL && decl_size != 0) |
17724 | { |
17725 | *descr_tail = new_loc_descr_op_bit_piece (bitsize: decl_size, offset: 0); |
17726 | if (*descr_tail == NULL) |
17727 | goto discard_descr; |
17728 | } |
17729 | return descr; |
17730 | |
17731 | discard_descr: |
17732 | /* Discard the descriptor and release any addr_table entries it uses. */ |
17733 | remove_loc_list_addr_table_entries (descr); |
17734 | return NULL; |
17735 | } |
17736 | |
17737 | /* Return the dwarf representation of the location list LOC_LIST of |
17738 | DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree |
17739 | function. */ |
17740 | |
17741 | static dw_loc_list_ref |
17742 | dw_loc_list (var_loc_list *loc_list, tree decl, int want_address) |
17743 | { |
17744 | const char *endname, *secname; |
17745 | var_loc_view endview; |
17746 | rtx varloc; |
17747 | enum var_init_status initialized; |
17748 | struct var_loc_node *node; |
17749 | dw_loc_descr_ref descr; |
17750 | char label_id[MAX_ARTIFICIAL_LABEL_BYTES]; |
17751 | dw_loc_list_ref list = NULL; |
17752 | dw_loc_list_ref *listp = &list; |
17753 | |
17754 | /* Now that we know what section we are using for a base, |
17755 | actually construct the list of locations. |
17756 | The first location information is what is passed to the |
17757 | function that creates the location list, and the remaining |
17758 | locations just get added on to that list. |
17759 | Note that we only know the start address for a location |
17760 | (IE location changes), so to build the range, we use |
17761 | the range [current location start, next location start]. |
17762 | This means we have to special case the last node, and generate |
17763 | a range of [last location start, end of function label]. */ |
17764 | |
17765 | if (cfun && crtl->has_bb_partition) |
17766 | { |
17767 | bool save_in_cold_section_p = in_cold_section_p; |
17768 | in_cold_section_p = first_function_block_is_cold; |
17769 | if (loc_list->last_before_switch == NULL) |
17770 | in_cold_section_p = !in_cold_section_p; |
17771 | secname = secname_for_decl (decl); |
17772 | in_cold_section_p = save_in_cold_section_p; |
17773 | } |
17774 | else |
17775 | secname = secname_for_decl (decl); |
17776 | |
17777 | for (node = loc_list->first; node; node = node->next) |
17778 | { |
17779 | bool range_across_switch = false; |
17780 | if (GET_CODE (node->loc) == EXPR_LIST |
17781 | || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX) |
17782 | { |
17783 | if (GET_CODE (node->loc) == EXPR_LIST) |
17784 | { |
17785 | descr = NULL; |
17786 | /* This requires DW_OP_{,bit_}piece, which is not usable |
17787 | inside DWARF expressions. */ |
17788 | if (want_address == 2) |
17789 | descr = dw_sra_loc_expr (decl, loc: node->loc); |
17790 | } |
17791 | else |
17792 | { |
17793 | initialized = NOTE_VAR_LOCATION_STATUS (node->loc); |
17794 | varloc = NOTE_VAR_LOCATION (node->loc); |
17795 | descr = dw_loc_list_1 (loc: decl, varloc, want_address, initialized); |
17796 | } |
17797 | if (descr) |
17798 | { |
17799 | /* If section switch happens in between node->label |
17800 | and node->next->label (or end of function) and |
17801 | we can't emit it as a single entry list, |
17802 | emit two ranges, first one ending at the end |
17803 | of first partition and second one starting at the |
17804 | beginning of second partition. */ |
17805 | if (node == loc_list->last_before_switch |
17806 | && (node != loc_list->first || loc_list->first->next |
17807 | /* If we are to emit a view number, we will emit |
17808 | a loclist rather than a single location |
17809 | expression for the entire function (see |
17810 | loc_list_has_views), so we have to split the |
17811 | range that straddles across partitions. */ |
17812 | || !ZERO_VIEW_P (node->view)) |
17813 | && current_function_decl) |
17814 | { |
17815 | endname = cfun->fde->dw_fde_end; |
17816 | endview = 0; |
17817 | range_across_switch = true; |
17818 | } |
17819 | /* The variable has a location between NODE->LABEL and |
17820 | NODE->NEXT->LABEL. */ |
17821 | else if (node->next) |
17822 | endname = node->next->label, endview = node->next->view; |
17823 | /* If the variable has a location at the last label |
17824 | it keeps its location until the end of function. */ |
17825 | else if (!current_function_decl) |
17826 | endname = text_end_label, endview = 0; |
17827 | else |
17828 | { |
17829 | ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL, |
17830 | current_function_funcdef_no); |
17831 | endname = ggc_strdup (label_id); |
17832 | endview = 0; |
17833 | } |
17834 | |
17835 | *listp = new_loc_list (expr: descr, begin: node->label, vbegin: node->view, |
17836 | end: endname, vend: endview, section: secname); |
17837 | if (TREE_CODE (decl) == PARM_DECL |
17838 | && node == loc_list->first |
17839 | && NOTE_P (node->loc) |
17840 | && strcmp (s1: node->label, s2: endname) == 0) |
17841 | (*listp)->force = true; |
17842 | listp = &(*listp)->dw_loc_next; |
17843 | } |
17844 | } |
17845 | |
17846 | if (cfun |
17847 | && crtl->has_bb_partition |
17848 | && node == loc_list->last_before_switch) |
17849 | { |
17850 | bool save_in_cold_section_p = in_cold_section_p; |
17851 | in_cold_section_p = !first_function_block_is_cold; |
17852 | secname = secname_for_decl (decl); |
17853 | in_cold_section_p = save_in_cold_section_p; |
17854 | } |
17855 | |
17856 | if (range_across_switch) |
17857 | { |
17858 | if (GET_CODE (node->loc) == EXPR_LIST) |
17859 | descr = dw_sra_loc_expr (decl, loc: node->loc); |
17860 | else |
17861 | { |
17862 | initialized = NOTE_VAR_LOCATION_STATUS (node->loc); |
17863 | varloc = NOTE_VAR_LOCATION (node->loc); |
17864 | descr = dw_loc_list_1 (loc: decl, varloc, want_address, |
17865 | initialized); |
17866 | } |
17867 | gcc_assert (descr); |
17868 | /* The variable has a location between NODE->LABEL and |
17869 | NODE->NEXT->LABEL. */ |
17870 | if (node->next) |
17871 | endname = node->next->label, endview = node->next->view; |
17872 | else |
17873 | endname = cfun->fde->dw_fde_second_end, endview = 0; |
17874 | *listp = new_loc_list (expr: descr, cfun->fde->dw_fde_second_begin, vbegin: 0, |
17875 | end: endname, vend: endview, section: secname); |
17876 | listp = &(*listp)->dw_loc_next; |
17877 | } |
17878 | } |
17879 | |
17880 | /* Try to avoid the overhead of a location list emitting a location |
17881 | expression instead, but only if we didn't have more than one |
17882 | location entry in the first place. If some entries were not |
17883 | representable, we don't want to pretend a single entry that was |
17884 | applies to the entire scope in which the variable is |
17885 | available. */ |
17886 | if (list && loc_list->first->next) |
17887 | gen_llsym (list); |
17888 | else |
17889 | maybe_gen_llsym (list); |
17890 | |
17891 | return list; |
17892 | } |
17893 | |
17894 | /* Return true if the loc_list has only single element and thus |
17895 | can be represented as location description. */ |
17896 | |
17897 | static bool |
17898 | single_element_loc_list_p (dw_loc_list_ref list) |
17899 | { |
17900 | gcc_assert (!list->dw_loc_next || list->ll_symbol); |
17901 | return !list->ll_symbol; |
17902 | } |
17903 | |
17904 | /* Duplicate a single element of location list. */ |
17905 | |
17906 | static inline dw_loc_descr_ref |
17907 | copy_loc_descr (dw_loc_descr_ref ref) |
17908 | { |
17909 | dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> (); |
17910 | memcpy (dest: copy, src: ref, n: sizeof (dw_loc_descr_node)); |
17911 | return copy; |
17912 | } |
17913 | |
17914 | /* To each location in list LIST append loc descr REF. */ |
17915 | |
17916 | static void |
17917 | add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref) |
17918 | { |
17919 | dw_loc_descr_ref copy; |
17920 | add_loc_descr (list_head: &list->expr, descr: ref); |
17921 | list = list->dw_loc_next; |
17922 | while (list) |
17923 | { |
17924 | copy = copy_loc_descr (ref); |
17925 | add_loc_descr (list_head: &list->expr, descr: copy); |
17926 | while (copy->dw_loc_next) |
17927 | copy = copy->dw_loc_next = copy_loc_descr (ref: copy->dw_loc_next); |
17928 | list = list->dw_loc_next; |
17929 | } |
17930 | } |
17931 | |
17932 | /* To each location in list LIST prepend loc descr REF. */ |
17933 | |
17934 | static void |
17935 | prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref) |
17936 | { |
17937 | dw_loc_descr_ref copy; |
17938 | dw_loc_descr_ref ref_end = list->expr; |
17939 | add_loc_descr (list_head: &ref, descr: list->expr); |
17940 | list->expr = ref; |
17941 | list = list->dw_loc_next; |
17942 | while (list) |
17943 | { |
17944 | dw_loc_descr_ref end = list->expr; |
17945 | list->expr = copy = copy_loc_descr (ref); |
17946 | while (copy->dw_loc_next != ref_end) |
17947 | copy = copy->dw_loc_next = copy_loc_descr (ref: copy->dw_loc_next); |
17948 | copy->dw_loc_next = end; |
17949 | list = list->dw_loc_next; |
17950 | } |
17951 | } |
17952 | |
17953 | /* Given two lists RET and LIST |
17954 | produce location list that is result of adding expression in LIST |
17955 | to expression in RET on each position in program. |
17956 | Might be destructive on both RET and LIST. |
17957 | |
17958 | TODO: We handle only simple cases of RET or LIST having at most one |
17959 | element. General case would involve sorting the lists in program order |
17960 | and merging them that will need some additional work. |
17961 | Adding that will improve quality of debug info especially for SRA-ed |
17962 | structures. */ |
17963 | |
17964 | static void |
17965 | add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list) |
17966 | { |
17967 | if (!list) |
17968 | return; |
17969 | if (!*ret) |
17970 | { |
17971 | *ret = list; |
17972 | return; |
17973 | } |
17974 | if (!list->dw_loc_next) |
17975 | { |
17976 | add_loc_descr_to_each (list: *ret, ref: list->expr); |
17977 | return; |
17978 | } |
17979 | if (!(*ret)->dw_loc_next) |
17980 | { |
17981 | prepend_loc_descr_to_each (list, ref: (*ret)->expr); |
17982 | *ret = list; |
17983 | return; |
17984 | } |
17985 | expansion_failed (NULL_TREE, NULL_RTX, |
17986 | reason: "Don't know how to merge two non-trivial" |
17987 | " location lists.\n" ); |
17988 | *ret = NULL; |
17989 | return; |
17990 | } |
17991 | |
17992 | /* LOC is constant expression. Try a luck, look it up in constant |
17993 | pool and return its loc_descr of its address. */ |
17994 | |
17995 | static dw_loc_descr_ref |
17996 | cst_pool_loc_descr (tree loc) |
17997 | { |
17998 | /* Get an RTL for this, if something has been emitted. */ |
17999 | rtx rtl = lookup_constant_def (loc); |
18000 | |
18001 | if (!rtl || !MEM_P (rtl)) |
18002 | { |
18003 | gcc_assert (!rtl); |
18004 | return 0; |
18005 | } |
18006 | gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF); |
18007 | |
18008 | /* TODO: We might get more coverage if we was actually delaying expansion |
18009 | of all expressions till end of compilation when constant pools are fully |
18010 | populated. */ |
18011 | if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0)))) |
18012 | { |
18013 | expansion_failed (expr: loc, NULL_RTX, |
18014 | reason: "CST value in contant pool but not marked." ); |
18015 | return 0; |
18016 | } |
18017 | return mem_loc_descriptor (XEXP (rtl, 0), mode: get_address_mode (mem: rtl), |
18018 | GET_MODE (rtl), initialized: VAR_INIT_STATUS_INITIALIZED); |
18019 | } |
18020 | |
18021 | /* Return dw_loc_list representing address of addr_expr LOC |
18022 | by looking for inner INDIRECT_REF expression and turning |
18023 | it into simple arithmetics. |
18024 | |
18025 | See loc_list_from_tree for the meaning of CONTEXT. */ |
18026 | |
18027 | static dw_loc_list_ref |
18028 | loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev, |
18029 | loc_descr_context *context) |
18030 | { |
18031 | tree obj, offset; |
18032 | poly_int64 bitsize, bitpos, bytepos; |
18033 | machine_mode mode; |
18034 | int unsignedp, reversep, volatilep = 0; |
18035 | dw_loc_list_ref list_ret = NULL, list_ret1 = NULL; |
18036 | |
18037 | obj = get_inner_reference (TREE_OPERAND (loc, 0), |
18038 | &bitsize, &bitpos, &offset, &mode, |
18039 | &unsignedp, &reversep, &volatilep); |
18040 | STRIP_NOPS (obj); |
18041 | if (!multiple_p (a: bitpos, BITS_PER_UNIT, multiple: &bytepos)) |
18042 | { |
18043 | expansion_failed (expr: loc, NULL_RTX, reason: "bitfield access" ); |
18044 | return 0; |
18045 | } |
18046 | if (!INDIRECT_REF_P (obj)) |
18047 | { |
18048 | expansion_failed (expr: obj, |
18049 | NULL_RTX, reason: "no indirect ref in inner refrence" ); |
18050 | return 0; |
18051 | } |
18052 | if (!offset && known_eq (bitpos, 0)) |
18053 | list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1, |
18054 | context); |
18055 | else if (toplev |
18056 | && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE |
18057 | && (dwarf_version >= 4 || !dwarf_strict)) |
18058 | { |
18059 | list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context); |
18060 | if (!list_ret) |
18061 | return 0; |
18062 | if (offset) |
18063 | { |
18064 | /* Variable offset. */ |
18065 | list_ret1 = loc_list_from_tree (offset, 0, context); |
18066 | if (list_ret1 == 0) |
18067 | return 0; |
18068 | add_loc_list (ret: &list_ret, list: list_ret1); |
18069 | if (!list_ret) |
18070 | return 0; |
18071 | add_loc_descr_to_each (list: list_ret, |
18072 | ref: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
18073 | } |
18074 | HOST_WIDE_INT value; |
18075 | if (bytepos.is_constant (const_value: &value) && value > 0) |
18076 | add_loc_descr_to_each (list: list_ret, |
18077 | ref: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: value, oprnd2: 0)); |
18078 | else if (maybe_ne (a: bytepos, b: 0)) |
18079 | loc_list_plus_const (list_head: list_ret, offset: bytepos); |
18080 | add_loc_descr_to_each (list: list_ret, |
18081 | ref: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
18082 | } |
18083 | return list_ret; |
18084 | } |
18085 | |
18086 | /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case |
18087 | all operations from LOC are nops, move to the last one. Insert in NOPS all |
18088 | operations that are skipped. */ |
18089 | |
18090 | static void |
18091 | loc_descr_to_next_no_nop (dw_loc_descr_ref &loc, |
18092 | hash_set<dw_loc_descr_ref> &nops) |
18093 | { |
18094 | while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop) |
18095 | { |
18096 | nops.add (k: loc); |
18097 | loc = loc->dw_loc_next; |
18098 | } |
18099 | } |
18100 | |
18101 | /* Helper for loc_descr_without_nops: free the location description operation |
18102 | P. */ |
18103 | |
18104 | bool |
18105 | free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED) |
18106 | { |
18107 | ggc_free (loc); |
18108 | return true; |
18109 | } |
18110 | |
18111 | /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that |
18112 | finishes LOC. */ |
18113 | |
18114 | static void |
18115 | loc_descr_without_nops (dw_loc_descr_ref &loc) |
18116 | { |
18117 | if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL) |
18118 | return; |
18119 | |
18120 | /* Set of all DW_OP_nop operations we remove. */ |
18121 | hash_set<dw_loc_descr_ref> nops; |
18122 | |
18123 | /* First, strip all prefix NOP operations in order to keep the head of the |
18124 | operations list. */ |
18125 | loc_descr_to_next_no_nop (loc, nops); |
18126 | |
18127 | for (dw_loc_descr_ref cur = loc; cur != NULL;) |
18128 | { |
18129 | /* For control flow operations: strip "prefix" nops in destination |
18130 | labels. */ |
18131 | if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc) |
18132 | loc_descr_to_next_no_nop (loc&: cur->dw_loc_oprnd1.v.val_loc, nops); |
18133 | if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc) |
18134 | loc_descr_to_next_no_nop (loc&: cur->dw_loc_oprnd2.v.val_loc, nops); |
18135 | |
18136 | /* Do the same for the operations that follow, then move to the next |
18137 | iteration. */ |
18138 | if (cur->dw_loc_next != NULL) |
18139 | loc_descr_to_next_no_nop (loc&: cur->dw_loc_next, nops); |
18140 | cur = cur->dw_loc_next; |
18141 | } |
18142 | |
18143 | nops.traverse<void *, free_loc_descr> (NULL); |
18144 | } |
18145 | |
18146 | |
18147 | struct dwarf_procedure_info; |
18148 | |
18149 | /* Helper structure for location descriptions generation. */ |
18150 | struct loc_descr_context |
18151 | { |
18152 | /* The type that is implicitly referenced by DW_OP_push_object_address, or |
18153 | NULL_TREE if DW_OP_push_object_address in invalid for this location |
18154 | description. This is used when processing PLACEHOLDER_EXPR nodes. */ |
18155 | tree context_type; |
18156 | /* The ..._DECL node that should be translated as a |
18157 | DW_OP_push_object_address operation. */ |
18158 | tree base_decl; |
18159 | /* Information about the DWARF procedure we are currently generating. NULL if |
18160 | we are not generating a DWARF procedure. */ |
18161 | struct dwarf_procedure_info *dpi; |
18162 | /* True if integral PLACEHOLDER_EXPR stands for the first argument passed |
18163 | by consumer. Used for DW_TAG_generic_subrange attributes. */ |
18164 | bool placeholder_arg; |
18165 | /* True if PLACEHOLDER_EXPR has been seen. */ |
18166 | bool placeholder_seen; |
18167 | /* True if strict preservation of signedness has been requested. */ |
18168 | bool strict_signedness; |
18169 | }; |
18170 | |
18171 | /* DWARF procedures generation |
18172 | |
18173 | DWARF expressions (aka. location descriptions) are used to encode variable |
18174 | things such as sizes or offsets. Such computations can have redundant parts |
18175 | that can be factorized in order to reduce the size of the output debug |
18176 | information. This is the whole point of DWARF procedures. |
18177 | |
18178 | Thanks to stor-layout.cc, size and offset expressions in GENERIC trees are |
18179 | already factorized into functions ("size functions") in order to handle very |
18180 | big and complex types. Such functions are quite simple: they have integral |
18181 | arguments, they return an integral result and their body contains only a |
18182 | return statement with arithmetic expressions. This is the only kind of |
18183 | function we are interested in translating into DWARF procedures, here. |
18184 | |
18185 | DWARF expressions and DWARF procedure are executed using a stack, so we have |
18186 | to define some calling convention for them to interact. Let's say that: |
18187 | |
18188 | - Before calling a DWARF procedure, DWARF expressions must push on the stack |
18189 | all arguments in reverse order (right-to-left) so that when the DWARF |
18190 | procedure execution starts, the first argument is the top of the stack. |
18191 | |
18192 | - Then, when returning, the DWARF procedure must have consumed all arguments |
18193 | on the stack, must have pushed the result and touched nothing else. |
18194 | |
18195 | - Each integral argument and the result are integral types can be hold in a |
18196 | single stack slot. |
18197 | |
18198 | - We call "frame offset" the number of stack slots that are "under DWARF |
18199 | procedure control": it includes the arguments slots, the temporaries and |
18200 | the result slot. Thus, it is equal to the number of arguments when the |
18201 | procedure execution starts and must be equal to one (the result) when it |
18202 | returns. */ |
18203 | |
18204 | /* Helper structure used when generating operations for a DWARF procedure. */ |
18205 | struct dwarf_procedure_info |
18206 | { |
18207 | /* The FUNCTION_DECL node corresponding to the DWARF procedure that is |
18208 | currently translated. */ |
18209 | tree fndecl; |
18210 | /* The number of arguments FNDECL takes. */ |
18211 | unsigned args_count; |
18212 | }; |
18213 | |
18214 | /* Return a pointer to a newly created DIE node for a DWARF procedure. Add |
18215 | LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE, |
18216 | equate it to this DIE. */ |
18217 | |
18218 | static dw_die_ref |
18219 | new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl, |
18220 | dw_die_ref parent_die) |
18221 | { |
18222 | dw_die_ref dwarf_proc_die; |
18223 | |
18224 | if ((dwarf_version < 3 && dwarf_strict) |
18225 | || location == NULL) |
18226 | return NULL; |
18227 | |
18228 | dwarf_proc_die = new_die (tag_value: DW_TAG_dwarf_procedure, parent_die, t: fndecl); |
18229 | if (fndecl) |
18230 | equate_decl_number_to_die (decl: fndecl, decl_die: dwarf_proc_die); |
18231 | add_AT_loc (die: dwarf_proc_die, attr_kind: DW_AT_location, loc: location); |
18232 | return dwarf_proc_die; |
18233 | } |
18234 | |
18235 | /* Return whether TYPE is a supported type as a DWARF procedure argument |
18236 | type or return type (we handle only scalar types and pointer types that |
18237 | aren't wider than the DWARF expression evaluation stack). */ |
18238 | |
18239 | static bool |
18240 | is_handled_procedure_type (tree type) |
18241 | { |
18242 | return ((INTEGRAL_TYPE_P (type) |
18243 | || TREE_CODE (type) == OFFSET_TYPE |
18244 | || TREE_CODE (type) == POINTER_TYPE) |
18245 | && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE); |
18246 | } |
18247 | |
18248 | /* Helper for resolve_args_picking: do the same but stop when coming across |
18249 | visited nodes. For each node we visit, register in FRAME_OFFSETS the frame |
18250 | offset *before* evaluating the corresponding operation. */ |
18251 | |
18252 | static bool |
18253 | resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset, |
18254 | struct dwarf_procedure_info *dpi, |
18255 | hash_map<dw_loc_descr_ref, unsigned> &frame_offsets) |
18256 | { |
18257 | /* The "frame_offset" identifier is already used to name a macro... */ |
18258 | unsigned frame_offset_ = initial_frame_offset; |
18259 | dw_loc_descr_ref l; |
18260 | |
18261 | for (l = loc; l != NULL;) |
18262 | { |
18263 | bool existed; |
18264 | unsigned &l_frame_offset = frame_offsets.get_or_insert (k: l, existed: &existed); |
18265 | |
18266 | /* If we already met this node, there is nothing to compute anymore. */ |
18267 | if (existed) |
18268 | { |
18269 | /* Make sure that the stack size is consistent wherever the execution |
18270 | flow comes from. */ |
18271 | gcc_assert ((unsigned) l_frame_offset == frame_offset_); |
18272 | break; |
18273 | } |
18274 | l_frame_offset = frame_offset_; |
18275 | |
18276 | /* If needed, relocate the picking offset with respect to the frame |
18277 | offset. */ |
18278 | if (l->frame_offset_rel) |
18279 | { |
18280 | unsigned HOST_WIDE_INT off; |
18281 | switch (l->dw_loc_opc) |
18282 | { |
18283 | case DW_OP_pick: |
18284 | off = l->dw_loc_oprnd1.v.val_unsigned; |
18285 | break; |
18286 | case DW_OP_dup: |
18287 | off = 0; |
18288 | break; |
18289 | case DW_OP_over: |
18290 | off = 1; |
18291 | break; |
18292 | default: |
18293 | gcc_unreachable (); |
18294 | } |
18295 | /* frame_offset_ is the size of the current stack frame, including |
18296 | incoming arguments. Besides, the arguments are pushed |
18297 | right-to-left. Thus, in order to access the Nth argument from |
18298 | this operation node, the picking has to skip temporaries *plus* |
18299 | one stack slot per argument (0 for the first one, 1 for the second |
18300 | one, etc.). |
18301 | |
18302 | The targetted argument number (N) is already set as the operand, |
18303 | and the number of temporaries can be computed with: |
18304 | frame_offsets_ - dpi->args_count */ |
18305 | off += frame_offset_ - dpi->args_count; |
18306 | |
18307 | /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */ |
18308 | if (off > 255) |
18309 | return false; |
18310 | |
18311 | if (off == 0) |
18312 | { |
18313 | l->dw_loc_opc = DW_OP_dup; |
18314 | l->dw_loc_oprnd1.v.val_unsigned = 0; |
18315 | } |
18316 | else if (off == 1) |
18317 | { |
18318 | l->dw_loc_opc = DW_OP_over; |
18319 | l->dw_loc_oprnd1.v.val_unsigned = 0; |
18320 | } |
18321 | else |
18322 | { |
18323 | l->dw_loc_opc = DW_OP_pick; |
18324 | l->dw_loc_oprnd1.v.val_unsigned = off; |
18325 | } |
18326 | } |
18327 | |
18328 | /* Update frame_offset according to the effect the current operation has |
18329 | on the stack. */ |
18330 | switch (l->dw_loc_opc) |
18331 | { |
18332 | case DW_OP_deref: |
18333 | case DW_OP_swap: |
18334 | case DW_OP_rot: |
18335 | case DW_OP_abs: |
18336 | case DW_OP_neg: |
18337 | case DW_OP_not: |
18338 | case DW_OP_plus_uconst: |
18339 | case DW_OP_skip: |
18340 | case DW_OP_reg0: |
18341 | case DW_OP_reg1: |
18342 | case DW_OP_reg2: |
18343 | case DW_OP_reg3: |
18344 | case DW_OP_reg4: |
18345 | case DW_OP_reg5: |
18346 | case DW_OP_reg6: |
18347 | case DW_OP_reg7: |
18348 | case DW_OP_reg8: |
18349 | case DW_OP_reg9: |
18350 | case DW_OP_reg10: |
18351 | case DW_OP_reg11: |
18352 | case DW_OP_reg12: |
18353 | case DW_OP_reg13: |
18354 | case DW_OP_reg14: |
18355 | case DW_OP_reg15: |
18356 | case DW_OP_reg16: |
18357 | case DW_OP_reg17: |
18358 | case DW_OP_reg18: |
18359 | case DW_OP_reg19: |
18360 | case DW_OP_reg20: |
18361 | case DW_OP_reg21: |
18362 | case DW_OP_reg22: |
18363 | case DW_OP_reg23: |
18364 | case DW_OP_reg24: |
18365 | case DW_OP_reg25: |
18366 | case DW_OP_reg26: |
18367 | case DW_OP_reg27: |
18368 | case DW_OP_reg28: |
18369 | case DW_OP_reg29: |
18370 | case DW_OP_reg30: |
18371 | case DW_OP_reg31: |
18372 | case DW_OP_bregx: |
18373 | case DW_OP_piece: |
18374 | case DW_OP_deref_size: |
18375 | case DW_OP_nop: |
18376 | case DW_OP_bit_piece: |
18377 | case DW_OP_implicit_value: |
18378 | case DW_OP_stack_value: |
18379 | case DW_OP_deref_type: |
18380 | case DW_OP_convert: |
18381 | case DW_OP_reinterpret: |
18382 | case DW_OP_GNU_deref_type: |
18383 | case DW_OP_GNU_convert: |
18384 | case DW_OP_GNU_reinterpret: |
18385 | break; |
18386 | |
18387 | case DW_OP_addr: |
18388 | case DW_OP_const1u: |
18389 | case DW_OP_const1s: |
18390 | case DW_OP_const2u: |
18391 | case DW_OP_const2s: |
18392 | case DW_OP_const4u: |
18393 | case DW_OP_const4s: |
18394 | case DW_OP_const8u: |
18395 | case DW_OP_const8s: |
18396 | case DW_OP_constu: |
18397 | case DW_OP_consts: |
18398 | case DW_OP_dup: |
18399 | case DW_OP_over: |
18400 | case DW_OP_pick: |
18401 | case DW_OP_lit0: |
18402 | case DW_OP_lit1: |
18403 | case DW_OP_lit2: |
18404 | case DW_OP_lit3: |
18405 | case DW_OP_lit4: |
18406 | case DW_OP_lit5: |
18407 | case DW_OP_lit6: |
18408 | case DW_OP_lit7: |
18409 | case DW_OP_lit8: |
18410 | case DW_OP_lit9: |
18411 | case DW_OP_lit10: |
18412 | case DW_OP_lit11: |
18413 | case DW_OP_lit12: |
18414 | case DW_OP_lit13: |
18415 | case DW_OP_lit14: |
18416 | case DW_OP_lit15: |
18417 | case DW_OP_lit16: |
18418 | case DW_OP_lit17: |
18419 | case DW_OP_lit18: |
18420 | case DW_OP_lit19: |
18421 | case DW_OP_lit20: |
18422 | case DW_OP_lit21: |
18423 | case DW_OP_lit22: |
18424 | case DW_OP_lit23: |
18425 | case DW_OP_lit24: |
18426 | case DW_OP_lit25: |
18427 | case DW_OP_lit26: |
18428 | case DW_OP_lit27: |
18429 | case DW_OP_lit28: |
18430 | case DW_OP_lit29: |
18431 | case DW_OP_lit30: |
18432 | case DW_OP_lit31: |
18433 | case DW_OP_breg0: |
18434 | case DW_OP_breg1: |
18435 | case DW_OP_breg2: |
18436 | case DW_OP_breg3: |
18437 | case DW_OP_breg4: |
18438 | case DW_OP_breg5: |
18439 | case DW_OP_breg6: |
18440 | case DW_OP_breg7: |
18441 | case DW_OP_breg8: |
18442 | case DW_OP_breg9: |
18443 | case DW_OP_breg10: |
18444 | case DW_OP_breg11: |
18445 | case DW_OP_breg12: |
18446 | case DW_OP_breg13: |
18447 | case DW_OP_breg14: |
18448 | case DW_OP_breg15: |
18449 | case DW_OP_breg16: |
18450 | case DW_OP_breg17: |
18451 | case DW_OP_breg18: |
18452 | case DW_OP_breg19: |
18453 | case DW_OP_breg20: |
18454 | case DW_OP_breg21: |
18455 | case DW_OP_breg22: |
18456 | case DW_OP_breg23: |
18457 | case DW_OP_breg24: |
18458 | case DW_OP_breg25: |
18459 | case DW_OP_breg26: |
18460 | case DW_OP_breg27: |
18461 | case DW_OP_breg28: |
18462 | case DW_OP_breg29: |
18463 | case DW_OP_breg30: |
18464 | case DW_OP_breg31: |
18465 | case DW_OP_fbreg: |
18466 | case DW_OP_push_object_address: |
18467 | case DW_OP_call_frame_cfa: |
18468 | case DW_OP_GNU_variable_value: |
18469 | case DW_OP_GNU_addr_index: |
18470 | case DW_OP_GNU_const_index: |
18471 | ++frame_offset_; |
18472 | break; |
18473 | |
18474 | case DW_OP_drop: |
18475 | case DW_OP_xderef: |
18476 | case DW_OP_and: |
18477 | case DW_OP_div: |
18478 | case DW_OP_minus: |
18479 | case DW_OP_mod: |
18480 | case DW_OP_mul: |
18481 | case DW_OP_or: |
18482 | case DW_OP_plus: |
18483 | case DW_OP_shl: |
18484 | case DW_OP_shr: |
18485 | case DW_OP_shra: |
18486 | case DW_OP_xor: |
18487 | case DW_OP_bra: |
18488 | case DW_OP_eq: |
18489 | case DW_OP_ge: |
18490 | case DW_OP_gt: |
18491 | case DW_OP_le: |
18492 | case DW_OP_lt: |
18493 | case DW_OP_ne: |
18494 | case DW_OP_regx: |
18495 | case DW_OP_xderef_size: |
18496 | --frame_offset_; |
18497 | break; |
18498 | |
18499 | case DW_OP_call2: |
18500 | case DW_OP_call4: |
18501 | case DW_OP_call_ref: |
18502 | { |
18503 | dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die; |
18504 | int *stack_usage = dwarf_proc_stack_usage_map->get (k: dwarf_proc); |
18505 | |
18506 | if (stack_usage == NULL) |
18507 | return false; |
18508 | frame_offset_ += *stack_usage; |
18509 | break; |
18510 | } |
18511 | |
18512 | case DW_OP_implicit_pointer: |
18513 | case DW_OP_entry_value: |
18514 | case DW_OP_const_type: |
18515 | case DW_OP_regval_type: |
18516 | case DW_OP_form_tls_address: |
18517 | case DW_OP_GNU_push_tls_address: |
18518 | case DW_OP_GNU_uninit: |
18519 | case DW_OP_GNU_encoded_addr: |
18520 | case DW_OP_GNU_implicit_pointer: |
18521 | case DW_OP_GNU_entry_value: |
18522 | case DW_OP_GNU_const_type: |
18523 | case DW_OP_GNU_regval_type: |
18524 | case DW_OP_GNU_parameter_ref: |
18525 | /* loc_list_from_tree will probably not output these operations for |
18526 | size functions, so assume they will not appear here. */ |
18527 | /* Fall through... */ |
18528 | |
18529 | default: |
18530 | gcc_unreachable (); |
18531 | } |
18532 | |
18533 | /* Now, follow the control flow (except subroutine calls). */ |
18534 | switch (l->dw_loc_opc) |
18535 | { |
18536 | case DW_OP_bra: |
18537 | if (!resolve_args_picking_1 (loc: l->dw_loc_next, initial_frame_offset: frame_offset_, dpi, |
18538 | frame_offsets)) |
18539 | return false; |
18540 | /* Fall through. */ |
18541 | |
18542 | case DW_OP_skip: |
18543 | l = l->dw_loc_oprnd1.v.val_loc; |
18544 | break; |
18545 | |
18546 | case DW_OP_stack_value: |
18547 | return true; |
18548 | |
18549 | default: |
18550 | l = l->dw_loc_next; |
18551 | break; |
18552 | } |
18553 | } |
18554 | |
18555 | return true; |
18556 | } |
18557 | |
18558 | /* Make a DFS over operations reachable through LOC (i.e. follow branch |
18559 | operations) in order to resolve the operand of DW_OP_pick operations that |
18560 | target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame |
18561 | offset *before* LOC is executed. Return if all relocations were |
18562 | successful. */ |
18563 | |
18564 | static bool |
18565 | resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset, |
18566 | struct dwarf_procedure_info *dpi) |
18567 | { |
18568 | /* Associate to all visited operations the frame offset *before* evaluating |
18569 | this operation. */ |
18570 | hash_map<dw_loc_descr_ref, unsigned> frame_offsets; |
18571 | |
18572 | return |
18573 | resolve_args_picking_1 (loc, initial_frame_offset, dpi, frame_offsets); |
18574 | } |
18575 | |
18576 | /* Try to generate a DWARF procedure that computes the same result as FNDECL. |
18577 | Return NULL if it is not possible. */ |
18578 | |
18579 | static dw_die_ref |
18580 | function_to_dwarf_procedure (tree fndecl) |
18581 | { |
18582 | struct dwarf_procedure_info dpi; |
18583 | struct loc_descr_context ctx = { |
18584 | NULL_TREE, /* context_type */ |
18585 | NULL_TREE, /* base_decl */ |
18586 | .dpi: &dpi, /* dpi */ |
18587 | .placeholder_arg: false, /* placeholder_arg */ |
18588 | .placeholder_seen: false, /* placeholder_seen */ |
18589 | .strict_signedness: true /* strict_signedness */ |
18590 | }; |
18591 | dw_die_ref dwarf_proc_die; |
18592 | tree tree_body = DECL_SAVED_TREE (fndecl); |
18593 | dw_loc_descr_ref loc_body, epilogue; |
18594 | |
18595 | tree cursor; |
18596 | unsigned i; |
18597 | |
18598 | /* Do not generate multiple DWARF procedures for the same function |
18599 | declaration. */ |
18600 | dwarf_proc_die = lookup_decl_die (decl: fndecl); |
18601 | if (dwarf_proc_die != NULL) |
18602 | return dwarf_proc_die; |
18603 | |
18604 | /* DWARF procedures are available starting with the DWARFv3 standard. */ |
18605 | if (dwarf_version < 3 && dwarf_strict) |
18606 | return NULL; |
18607 | |
18608 | /* We handle only functions for which we still have a body, that return a |
18609 | supported type and that takes arguments with supported types. Note that |
18610 | there is no point translating functions that return nothing. */ |
18611 | if (tree_body == NULL_TREE |
18612 | || DECL_RESULT (fndecl) == NULL_TREE |
18613 | || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl)))) |
18614 | return NULL; |
18615 | |
18616 | for (cursor = DECL_ARGUMENTS (fndecl); |
18617 | cursor != NULL_TREE; |
18618 | cursor = TREE_CHAIN (cursor)) |
18619 | if (!is_handled_procedure_type (TREE_TYPE (cursor))) |
18620 | return NULL; |
18621 | |
18622 | /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */ |
18623 | if (TREE_CODE (tree_body) != RETURN_EXPR) |
18624 | return NULL; |
18625 | tree_body = TREE_OPERAND (tree_body, 0); |
18626 | if (TREE_CODE (tree_body) != MODIFY_EXPR |
18627 | || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl)) |
18628 | return NULL; |
18629 | tree_body = TREE_OPERAND (tree_body, 1); |
18630 | |
18631 | /* Try to translate the body expression itself. Note that this will probably |
18632 | cause an infinite recursion if its call graph has a cycle. This is very |
18633 | unlikely for size functions, however, so don't bother with such things at |
18634 | the moment. */ |
18635 | dpi.fndecl = fndecl; |
18636 | dpi.args_count = list_length (DECL_ARGUMENTS (fndecl)); |
18637 | loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx); |
18638 | if (!loc_body) |
18639 | return NULL; |
18640 | |
18641 | /* After evaluating all operands in "loc_body", we should still have on the |
18642 | stack all arguments plus the desired function result (top of the stack). |
18643 | Generate code in order to keep only the result in our stack frame. */ |
18644 | epilogue = NULL; |
18645 | for (i = 0; i < dpi.args_count; ++i) |
18646 | { |
18647 | dw_loc_descr_ref op_couple = new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0); |
18648 | op_couple->dw_loc_next = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
18649 | op_couple->dw_loc_next->dw_loc_next = epilogue; |
18650 | epilogue = op_couple; |
18651 | } |
18652 | add_loc_descr (list_head: &loc_body, descr: epilogue); |
18653 | if (!resolve_args_picking (loc: loc_body, initial_frame_offset: dpi.args_count, dpi: &dpi)) |
18654 | return NULL; |
18655 | |
18656 | /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed |
18657 | because they are considered useful. Now there is an epilogue, they are |
18658 | not anymore, so give it another try. */ |
18659 | loc_descr_without_nops (loc&: loc_body); |
18660 | |
18661 | /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as |
18662 | a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely, |
18663 | though, given that size functions do not come from source, so they should |
18664 | not have a dedicated DW_TAG_subprogram DIE. */ |
18665 | dwarf_proc_die |
18666 | = new_dwarf_proc_die (location: loc_body, fndecl, |
18667 | parent_die: get_context_die (DECL_CONTEXT (fndecl))); |
18668 | |
18669 | /* The called DWARF procedure consumes one stack slot per argument and |
18670 | returns one stack slot. */ |
18671 | dwarf_proc_stack_usage_map->put (k: dwarf_proc_die, v: 1 - dpi.args_count); |
18672 | |
18673 | return dwarf_proc_die; |
18674 | } |
18675 | |
18676 | /* Helper function for loc_list_from_tree. Perform OP binary op, |
18677 | but after converting arguments to type_die, afterwards convert |
18678 | back to unsigned. */ |
18679 | |
18680 | static dw_loc_list_ref |
18681 | typed_binop_from_tree (enum dwarf_location_atom op, tree loc, |
18682 | dw_die_ref type_die, scalar_int_mode mode, |
18683 | struct loc_descr_context *context) |
18684 | { |
18685 | dw_loc_list_ref op0, op1; |
18686 | dw_loc_descr_ref cvt, binop; |
18687 | |
18688 | if (type_die == NULL) |
18689 | return NULL; |
18690 | |
18691 | op0 = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context); |
18692 | op1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context); |
18693 | if (op0 == NULL || op1 == NULL) |
18694 | return NULL; |
18695 | |
18696 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
18697 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
18698 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
18699 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
18700 | add_loc_descr_to_each (list: op0, ref: cvt); |
18701 | |
18702 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
18703 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
18704 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
18705 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
18706 | add_loc_descr_to_each (list: op1, ref: cvt); |
18707 | |
18708 | add_loc_list (ret: &op0, list: op1); |
18709 | if (op0 == NULL) |
18710 | return NULL; |
18711 | |
18712 | binop = new_loc_descr (op, oprnd1: 0, oprnd2: 0); |
18713 | convert_descriptor_to_mode (mode, op: binop); |
18714 | add_loc_descr_to_each (list: op0, ref: binop); |
18715 | |
18716 | return op0; |
18717 | } |
18718 | |
18719 | /* Generate Dwarf location list representing LOC. |
18720 | If WANT_ADDRESS is false, expression computing LOC will be computed |
18721 | If WANT_ADDRESS is 1, expression computing address of LOC will be returned |
18722 | if WANT_ADDRESS is 2, expression computing address useable in location |
18723 | will be returned (i.e. DW_OP_reg can be used |
18724 | to refer to register values). |
18725 | |
18726 | CONTEXT provides information to customize the location descriptions |
18727 | generation. Its context_type field specifies what type is implicitly |
18728 | referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation |
18729 | will not be generated. |
18730 | |
18731 | Its DPI field determines whether we are generating a DWARF expression for a |
18732 | DWARF procedure, so PARM_DECL references are processed specifically. |
18733 | |
18734 | If CONTEXT is NULL, the behavior is the same as if context_type, base_decl |
18735 | and dpi fields were null. */ |
18736 | |
18737 | static dw_loc_list_ref |
18738 | loc_list_from_tree_1 (tree loc, int want_address, |
18739 | struct loc_descr_context *context) |
18740 | { |
18741 | dw_loc_descr_ref ret = NULL, ret1 = NULL; |
18742 | dw_loc_list_ref list_ret = NULL, list_ret1 = NULL; |
18743 | int have_address = 0; |
18744 | enum dwarf_location_atom op; |
18745 | |
18746 | /* ??? Most of the time we do not take proper care for sign/zero |
18747 | extending the values properly. Hopefully this won't be a real |
18748 | problem... */ |
18749 | |
18750 | if (context != NULL |
18751 | && context->base_decl == loc |
18752 | && want_address == 0) |
18753 | { |
18754 | if (dwarf_version >= 3 || !dwarf_strict) |
18755 | return new_loc_list (expr: new_loc_descr (op: DW_OP_push_object_address, oprnd1: 0, oprnd2: 0), |
18756 | NULL, vbegin: 0, NULL, vend: 0, NULL); |
18757 | else |
18758 | return NULL; |
18759 | } |
18760 | |
18761 | switch (TREE_CODE (loc)) |
18762 | { |
18763 | case ERROR_MARK: |
18764 | expansion_failed (expr: loc, NULL_RTX, reason: "ERROR_MARK" ); |
18765 | return 0; |
18766 | |
18767 | case PLACEHOLDER_EXPR: |
18768 | /* This case involves extracting fields from an object to determine the |
18769 | position of other fields. It is supposed to appear only as the first |
18770 | operand of COMPONENT_REF nodes and to reference precisely the type |
18771 | that the context allows or its enclosing type. */ |
18772 | if (context != NULL |
18773 | && (TREE_TYPE (loc) == context->context_type |
18774 | || TREE_TYPE (loc) == TYPE_CONTEXT (context->context_type)) |
18775 | && want_address >= 1) |
18776 | { |
18777 | if (dwarf_version >= 3 || !dwarf_strict) |
18778 | { |
18779 | ret = new_loc_descr (op: DW_OP_push_object_address, oprnd1: 0, oprnd2: 0); |
18780 | have_address = 1; |
18781 | break; |
18782 | } |
18783 | else |
18784 | return NULL; |
18785 | } |
18786 | /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for |
18787 | the single argument passed by consumer. */ |
18788 | else if (context != NULL |
18789 | && context->placeholder_arg |
18790 | && INTEGRAL_TYPE_P (TREE_TYPE (loc)) |
18791 | && want_address == 0) |
18792 | { |
18793 | ret = new_loc_descr (op: DW_OP_pick, oprnd1: 0, oprnd2: 0); |
18794 | ret->frame_offset_rel = 1; |
18795 | context->placeholder_seen = true; |
18796 | break; |
18797 | } |
18798 | else |
18799 | expansion_failed (expr: loc, NULL_RTX, |
18800 | reason: "PLACEHOLDER_EXPR for an unexpected type" ); |
18801 | break; |
18802 | |
18803 | case CALL_EXPR: |
18804 | { |
18805 | tree callee = get_callee_fndecl (loc); |
18806 | dw_die_ref dwarf_proc; |
18807 | |
18808 | if (callee |
18809 | && is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))) |
18810 | && (dwarf_proc = function_to_dwarf_procedure (fndecl: callee))) |
18811 | { |
18812 | /* DWARF procedures are used for size functions, which are built |
18813 | when size expressions contain conditional constructs, so we |
18814 | request strict preservation of signedness for comparisons. */ |
18815 | bool old_strict_signedness; |
18816 | if (context) |
18817 | { |
18818 | old_strict_signedness = context->strict_signedness; |
18819 | context->strict_signedness = true; |
18820 | } |
18821 | |
18822 | /* Evaluate arguments right-to-left so that the first argument |
18823 | will be the top-most one on the stack. */ |
18824 | for (int i = call_expr_nargs (loc) - 1; i >= 0; --i) |
18825 | { |
18826 | tree arg = CALL_EXPR_ARG (loc, i); |
18827 | ret1 = loc_descriptor_from_tree (arg, 0, context); |
18828 | if (!ret1) |
18829 | { |
18830 | expansion_failed (expr: arg, NULL_RTX, reason: "CALL_EXPR argument" ); |
18831 | return NULL; |
18832 | } |
18833 | add_loc_descr (list_head: &ret, descr: ret1); |
18834 | } |
18835 | |
18836 | ret1 = new_loc_descr (op: DW_OP_call4, oprnd1: 0, oprnd2: 0); |
18837 | ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
18838 | ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc; |
18839 | ret1->dw_loc_oprnd1.v.val_die_ref.external = 0; |
18840 | add_loc_descr (list_head: &ret, descr: ret1); |
18841 | if (context) |
18842 | context->strict_signedness = old_strict_signedness; |
18843 | } |
18844 | else |
18845 | expansion_failed (expr: loc, NULL_RTX, reason: "CALL_EXPR target" ); |
18846 | break; |
18847 | } |
18848 | |
18849 | case PREINCREMENT_EXPR: |
18850 | case PREDECREMENT_EXPR: |
18851 | case POSTINCREMENT_EXPR: |
18852 | case POSTDECREMENT_EXPR: |
18853 | expansion_failed (expr: loc, NULL_RTX, reason: "PRE/POST INDCREMENT/DECREMENT" ); |
18854 | /* There are no opcodes for these operations. */ |
18855 | return 0; |
18856 | |
18857 | case ADDR_EXPR: |
18858 | /* If we already want an address, see if there is INDIRECT_REF inside |
18859 | e.g. for &this->field. */ |
18860 | if (want_address) |
18861 | { |
18862 | list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref |
18863 | (loc, toplev: want_address == 2, context); |
18864 | if (list_ret) |
18865 | have_address = 1; |
18866 | else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0)) |
18867 | && (ret = cst_pool_loc_descr (loc))) |
18868 | have_address = 1; |
18869 | } |
18870 | /* Otherwise, process the argument and look for the address. */ |
18871 | if (!list_ret && !ret) |
18872 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 1, context); |
18873 | else |
18874 | { |
18875 | if (want_address) |
18876 | expansion_failed (expr: loc, NULL_RTX, reason: "need address of ADDR_EXPR" ); |
18877 | return NULL; |
18878 | } |
18879 | break; |
18880 | |
18881 | case VAR_DECL: |
18882 | if (DECL_THREAD_LOCAL_P (loc)) |
18883 | { |
18884 | rtx rtl; |
18885 | enum dwarf_location_atom tls_op; |
18886 | enum dtprel_bool dtprel = dtprel_false; |
18887 | |
18888 | if (targetm.have_tls) |
18889 | { |
18890 | /* If this is not defined, we have no way to emit the |
18891 | data. */ |
18892 | if (!targetm.asm_out.output_dwarf_dtprel) |
18893 | return 0; |
18894 | |
18895 | /* The way DW_OP_GNU_push_tls_address is specified, we |
18896 | can only look up addresses of objects in the current |
18897 | module. We used DW_OP_addr as first op, but that's |
18898 | wrong, because DW_OP_addr is relocated by the debug |
18899 | info consumer, while DW_OP_GNU_push_tls_address |
18900 | operand shouldn't be. */ |
18901 | if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc)) |
18902 | return 0; |
18903 | dtprel = dtprel_true; |
18904 | /* We check for DWARF 5 here because gdb did not implement |
18905 | DW_OP_form_tls_address until after 7.12. */ |
18906 | tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address |
18907 | : DW_OP_GNU_push_tls_address); |
18908 | } |
18909 | else |
18910 | { |
18911 | if (!targetm.emutls.debug_form_tls_address |
18912 | || !(dwarf_version >= 3 || !dwarf_strict)) |
18913 | return 0; |
18914 | /* We stuffed the control variable into the DECL_VALUE_EXPR |
18915 | to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should |
18916 | no longer appear in gimple code. We used the control |
18917 | variable in specific so that we could pick it up here. */ |
18918 | loc = DECL_VALUE_EXPR (loc); |
18919 | tls_op = DW_OP_form_tls_address; |
18920 | } |
18921 | |
18922 | rtl = rtl_for_decl_location (loc); |
18923 | if (rtl == NULL_RTX) |
18924 | return 0; |
18925 | |
18926 | if (!MEM_P (rtl)) |
18927 | return 0; |
18928 | rtl = XEXP (rtl, 0); |
18929 | if (! CONSTANT_P (rtl)) |
18930 | return 0; |
18931 | |
18932 | ret = new_addr_loc_descr (addr: rtl, dtprel); |
18933 | ret1 = new_loc_descr (op: tls_op, oprnd1: 0, oprnd2: 0); |
18934 | add_loc_descr (list_head: &ret, descr: ret1); |
18935 | |
18936 | have_address = 1; |
18937 | break; |
18938 | } |
18939 | /* FALLTHRU */ |
18940 | |
18941 | case PARM_DECL: |
18942 | if (context != NULL && context->dpi != NULL |
18943 | && DECL_CONTEXT (loc) == context->dpi->fndecl) |
18944 | { |
18945 | /* We are generating code for a DWARF procedure and we want to access |
18946 | one of its arguments: find the appropriate argument offset and let |
18947 | the resolve_args_picking pass compute the offset that complies |
18948 | with the stack frame size. */ |
18949 | unsigned i = 0; |
18950 | tree cursor; |
18951 | |
18952 | for (cursor = DECL_ARGUMENTS (context->dpi->fndecl); |
18953 | cursor != NULL_TREE && cursor != loc; |
18954 | cursor = TREE_CHAIN (cursor), ++i) |
18955 | ; |
18956 | /* If we are translating a DWARF procedure, all referenced parameters |
18957 | must belong to the current function. */ |
18958 | gcc_assert (cursor != NULL_TREE); |
18959 | |
18960 | ret = new_loc_descr (op: DW_OP_pick, oprnd1: i, oprnd2: 0); |
18961 | ret->frame_offset_rel = 1; |
18962 | break; |
18963 | } |
18964 | /* FALLTHRU */ |
18965 | |
18966 | case RESULT_DECL: |
18967 | if (DECL_HAS_VALUE_EXPR_P (loc)) |
18968 | { |
18969 | tree value_expr = DECL_VALUE_EXPR (loc); |
18970 | |
18971 | /* Non-local frame structures are DECL_IGNORED_P variables so we need |
18972 | to wait until they get an RTX in order to reference them. */ |
18973 | if (early_dwarf |
18974 | && TREE_CODE (value_expr) == COMPONENT_REF |
18975 | && VAR_P (TREE_OPERAND (value_expr, 0)) |
18976 | && DECL_NONLOCAL_FRAME (TREE_OPERAND (value_expr, 0))) |
18977 | ; |
18978 | else |
18979 | return loc_list_from_tree_1 (loc: value_expr, want_address, context); |
18980 | } |
18981 | |
18982 | /* FALLTHRU */ |
18983 | |
18984 | case FUNCTION_DECL: |
18985 | { |
18986 | rtx rtl; |
18987 | var_loc_list *loc_list = lookup_decl_loc (decl: loc); |
18988 | |
18989 | if (loc_list && loc_list->first) |
18990 | { |
18991 | list_ret = dw_loc_list (loc_list, decl: loc, want_address); |
18992 | have_address = want_address != 0; |
18993 | break; |
18994 | } |
18995 | rtl = rtl_for_decl_location (loc); |
18996 | if (rtl == NULL_RTX) |
18997 | { |
18998 | if (TREE_CODE (loc) != FUNCTION_DECL |
18999 | && early_dwarf |
19000 | && want_address != 1 |
19001 | && ! DECL_IGNORED_P (loc) |
19002 | && (INTEGRAL_TYPE_P (TREE_TYPE (loc)) |
19003 | || POINTER_TYPE_P (TREE_TYPE (loc))) |
19004 | && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc))) |
19005 | <= DWARF2_ADDR_SIZE)) |
19006 | { |
19007 | dw_die_ref ref = lookup_decl_die (decl: loc); |
19008 | if (ref) |
19009 | { |
19010 | ret = new_loc_descr (op: DW_OP_GNU_variable_value, oprnd1: 0, oprnd2: 0); |
19011 | ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
19012 | ret->dw_loc_oprnd1.v.val_die_ref.die = ref; |
19013 | ret->dw_loc_oprnd1.v.val_die_ref.external = 0; |
19014 | } |
19015 | else if (current_function_decl |
19016 | && DECL_CONTEXT (loc) == current_function_decl) |
19017 | { |
19018 | ret = new_loc_descr (op: DW_OP_GNU_variable_value, oprnd1: 0, oprnd2: 0); |
19019 | ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref; |
19020 | ret->dw_loc_oprnd1.v.val_decl_ref = loc; |
19021 | } |
19022 | break; |
19023 | } |
19024 | expansion_failed (expr: loc, NULL_RTX, reason: "DECL has no RTL" ); |
19025 | return 0; |
19026 | } |
19027 | else if (CONST_INT_P (rtl)) |
19028 | { |
19029 | HOST_WIDE_INT val = INTVAL (rtl); |
19030 | if (TYPE_UNSIGNED (TREE_TYPE (loc))) |
19031 | val &= GET_MODE_MASK (DECL_MODE (loc)); |
19032 | ret = int_loc_descriptor (poly_i: val); |
19033 | } |
19034 | else if (GET_CODE (rtl) == CONST_STRING) |
19035 | { |
19036 | expansion_failed (expr: loc, NULL_RTX, reason: "CONST_STRING" ); |
19037 | return 0; |
19038 | } |
19039 | else if (CONSTANT_P (rtl) && const_ok_for_output (rtl)) |
19040 | ret = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
19041 | else |
19042 | { |
19043 | machine_mode mode, mem_mode; |
19044 | |
19045 | /* Certain constructs can only be represented at top-level. */ |
19046 | if (want_address == 2) |
19047 | { |
19048 | ret = loc_descriptor (rtl, VOIDmode, |
19049 | initialized: VAR_INIT_STATUS_INITIALIZED); |
19050 | have_address = 1; |
19051 | } |
19052 | else |
19053 | { |
19054 | mode = GET_MODE (rtl); |
19055 | mem_mode = VOIDmode; |
19056 | if (MEM_P (rtl)) |
19057 | { |
19058 | mem_mode = mode; |
19059 | mode = get_address_mode (mem: rtl); |
19060 | rtl = XEXP (rtl, 0); |
19061 | have_address = 1; |
19062 | } |
19063 | ret = mem_loc_descriptor (rtl, mode, mem_mode, |
19064 | initialized: VAR_INIT_STATUS_INITIALIZED); |
19065 | } |
19066 | if (!ret) |
19067 | expansion_failed (expr: loc, rtl, |
19068 | reason: "failed to produce loc descriptor for rtl" ); |
19069 | } |
19070 | } |
19071 | break; |
19072 | |
19073 | case MEM_REF: |
19074 | if (!integer_zerop (TREE_OPERAND (loc, 1))) |
19075 | { |
19076 | have_address = 1; |
19077 | goto do_plus; |
19078 | } |
19079 | /* Fallthru. */ |
19080 | case INDIRECT_REF: |
19081 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19082 | have_address = 1; |
19083 | break; |
19084 | |
19085 | case TARGET_MEM_REF: |
19086 | case SSA_NAME: |
19087 | case DEBUG_EXPR_DECL: |
19088 | return NULL; |
19089 | |
19090 | case COMPOUND_EXPR: |
19091 | return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address, |
19092 | context); |
19093 | |
19094 | CASE_CONVERT: |
19095 | case VIEW_CONVERT_EXPR: |
19096 | case SAVE_EXPR: |
19097 | case MODIFY_EXPR: |
19098 | case NON_LVALUE_EXPR: |
19099 | return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address, |
19100 | context); |
19101 | |
19102 | case COMPONENT_REF: |
19103 | case BIT_FIELD_REF: |
19104 | case ARRAY_REF: |
19105 | case ARRAY_RANGE_REF: |
19106 | case REALPART_EXPR: |
19107 | case IMAGPART_EXPR: |
19108 | { |
19109 | tree obj, offset; |
19110 | poly_int64 bitsize, bitpos, bytepos; |
19111 | machine_mode mode; |
19112 | int unsignedp, reversep, volatilep = 0; |
19113 | |
19114 | obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode, |
19115 | &unsignedp, &reversep, &volatilep); |
19116 | |
19117 | gcc_assert (obj != loc); |
19118 | |
19119 | list_ret = loc_list_from_tree_1 (loc: obj, |
19120 | want_address: want_address == 2 |
19121 | && known_eq (bitpos, 0) |
19122 | && !offset ? 2 : 1, |
19123 | context); |
19124 | /* TODO: We can extract value of the small expression via shifting even |
19125 | for nonzero bitpos. */ |
19126 | if (list_ret == 0) |
19127 | return 0; |
19128 | if (!multiple_p (a: bitpos, BITS_PER_UNIT, multiple: &bytepos) |
19129 | || !multiple_p (a: bitsize, BITS_PER_UNIT)) |
19130 | { |
19131 | expansion_failed (expr: loc, NULL_RTX, |
19132 | reason: "bitfield access" ); |
19133 | return 0; |
19134 | } |
19135 | |
19136 | if (offset != NULL_TREE) |
19137 | { |
19138 | /* Variable offset. */ |
19139 | list_ret1 = loc_list_from_tree_1 (loc: offset, want_address: 0, context); |
19140 | if (list_ret1 == 0) |
19141 | return 0; |
19142 | add_loc_list (ret: &list_ret, list: list_ret1); |
19143 | if (!list_ret) |
19144 | return 0; |
19145 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
19146 | } |
19147 | |
19148 | HOST_WIDE_INT value; |
19149 | if (bytepos.is_constant (const_value: &value) && value > 0) |
19150 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_plus_uconst, |
19151 | oprnd1: value, oprnd2: 0)); |
19152 | else if (maybe_ne (a: bytepos, b: 0)) |
19153 | loc_list_plus_const (list_head: list_ret, offset: bytepos); |
19154 | |
19155 | have_address = 1; |
19156 | break; |
19157 | } |
19158 | |
19159 | case INTEGER_CST: |
19160 | if ((want_address || !tree_fits_shwi_p (loc)) |
19161 | && (ret = cst_pool_loc_descr (loc))) |
19162 | have_address = 1; |
19163 | else if (want_address == 2 |
19164 | && tree_fits_shwi_p (loc) |
19165 | && (ret = address_of_int_loc_descriptor |
19166 | (size: int_size_in_bytes (TREE_TYPE (loc)), |
19167 | i: tree_to_shwi (loc)))) |
19168 | have_address = 1; |
19169 | else if (tree_fits_shwi_p (loc)) |
19170 | ret = int_loc_descriptor (poly_i: tree_to_shwi (loc)); |
19171 | else if (tree_fits_uhwi_p (loc)) |
19172 | ret = uint_loc_descriptor (i: tree_to_uhwi (loc)); |
19173 | else |
19174 | { |
19175 | expansion_failed (expr: loc, NULL_RTX, |
19176 | reason: "Integer operand is not host integer" ); |
19177 | return 0; |
19178 | } |
19179 | break; |
19180 | |
19181 | case POLY_INT_CST: |
19182 | { |
19183 | if (want_address) |
19184 | { |
19185 | expansion_failed (expr: loc, NULL_RTX, |
19186 | reason: "constant address with a runtime component" ); |
19187 | return 0; |
19188 | } |
19189 | poly_int64 value; |
19190 | if (!poly_int_tree_p (t: loc, value: &value)) |
19191 | { |
19192 | expansion_failed (expr: loc, NULL_RTX, reason: "constant too big" ); |
19193 | return 0; |
19194 | } |
19195 | ret = int_loc_descriptor (poly_i: value); |
19196 | } |
19197 | break; |
19198 | |
19199 | case CONSTRUCTOR: |
19200 | case REAL_CST: |
19201 | case STRING_CST: |
19202 | case COMPLEX_CST: |
19203 | if ((ret = cst_pool_loc_descr (loc))) |
19204 | have_address = 1; |
19205 | else if (TREE_CODE (loc) == CONSTRUCTOR) |
19206 | { |
19207 | tree type = TREE_TYPE (loc); |
19208 | unsigned HOST_WIDE_INT size = int_size_in_bytes (type); |
19209 | unsigned HOST_WIDE_INT offset = 0; |
19210 | unsigned HOST_WIDE_INT cnt; |
19211 | constructor_elt *ce; |
19212 | |
19213 | if (TREE_CODE (type) == RECORD_TYPE) |
19214 | { |
19215 | /* This is very limited, but it's enough to output |
19216 | pointers to member functions, as long as the |
19217 | referenced function is defined in the current |
19218 | translation unit. */ |
19219 | FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce) |
19220 | { |
19221 | tree val = ce->value; |
19222 | |
19223 | tree field = ce->index; |
19224 | |
19225 | if (val) |
19226 | STRIP_NOPS (val); |
19227 | |
19228 | if (!field || DECL_BIT_FIELD (field)) |
19229 | { |
19230 | expansion_failed (expr: loc, NULL_RTX, |
19231 | reason: "bitfield in record type constructor" ); |
19232 | size = offset = (unsigned HOST_WIDE_INT)-1; |
19233 | ret = NULL; |
19234 | break; |
19235 | } |
19236 | |
19237 | HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field)); |
19238 | unsigned HOST_WIDE_INT pos = int_byte_position (field); |
19239 | gcc_assert (pos + fieldsize <= size); |
19240 | if (pos < offset) |
19241 | { |
19242 | expansion_failed (expr: loc, NULL_RTX, |
19243 | reason: "out-of-order fields in record constructor" ); |
19244 | size = offset = (unsigned HOST_WIDE_INT)-1; |
19245 | ret = NULL; |
19246 | break; |
19247 | } |
19248 | if (pos > offset) |
19249 | { |
19250 | ret1 = new_loc_descr (op: DW_OP_piece, oprnd1: pos - offset, oprnd2: 0); |
19251 | add_loc_descr (list_head: &ret, descr: ret1); |
19252 | offset = pos; |
19253 | } |
19254 | if (val && fieldsize != 0) |
19255 | { |
19256 | ret1 = loc_descriptor_from_tree (val, want_address, context); |
19257 | if (!ret1) |
19258 | { |
19259 | expansion_failed (expr: loc, NULL_RTX, |
19260 | reason: "unsupported expression in field" ); |
19261 | size = offset = (unsigned HOST_WIDE_INT)-1; |
19262 | ret = NULL; |
19263 | break; |
19264 | } |
19265 | add_loc_descr (list_head: &ret, descr: ret1); |
19266 | } |
19267 | if (fieldsize) |
19268 | { |
19269 | ret1 = new_loc_descr (op: DW_OP_piece, oprnd1: fieldsize, oprnd2: 0); |
19270 | add_loc_descr (list_head: &ret, descr: ret1); |
19271 | offset = pos + fieldsize; |
19272 | } |
19273 | } |
19274 | |
19275 | if (offset != size) |
19276 | { |
19277 | ret1 = new_loc_descr (op: DW_OP_piece, oprnd1: size - offset, oprnd2: 0); |
19278 | add_loc_descr (list_head: &ret, descr: ret1); |
19279 | offset = size; |
19280 | } |
19281 | |
19282 | have_address = !!want_address; |
19283 | } |
19284 | else |
19285 | expansion_failed (expr: loc, NULL_RTX, |
19286 | reason: "constructor of non-record type" ); |
19287 | } |
19288 | else |
19289 | /* We can construct small constants here using int_loc_descriptor. */ |
19290 | expansion_failed (expr: loc, NULL_RTX, |
19291 | reason: "constructor or constant not in constant pool" ); |
19292 | break; |
19293 | |
19294 | case TRUTH_AND_EXPR: |
19295 | case TRUTH_ANDIF_EXPR: |
19296 | case BIT_AND_EXPR: |
19297 | op = DW_OP_and; |
19298 | goto do_binop; |
19299 | |
19300 | case TRUTH_XOR_EXPR: |
19301 | case BIT_XOR_EXPR: |
19302 | op = DW_OP_xor; |
19303 | goto do_binop; |
19304 | |
19305 | case TRUTH_OR_EXPR: |
19306 | case TRUTH_ORIF_EXPR: |
19307 | case BIT_IOR_EXPR: |
19308 | op = DW_OP_or; |
19309 | goto do_binop; |
19310 | |
19311 | case EXACT_DIV_EXPR: |
19312 | case FLOOR_DIV_EXPR: |
19313 | case TRUNC_DIV_EXPR: |
19314 | /* Turn a divide by a power of 2 into a shift when possible. */ |
19315 | if (TYPE_UNSIGNED (TREE_TYPE (loc)) |
19316 | && tree_fits_uhwi_p (TREE_OPERAND (loc, 1))) |
19317 | { |
19318 | const int log2 = exact_log2 (x: tree_to_uhwi (TREE_OPERAND (loc, 1))); |
19319 | if (log2 > 0) |
19320 | { |
19321 | list_ret |
19322 | = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19323 | if (list_ret == 0) |
19324 | return 0; |
19325 | |
19326 | add_loc_descr_to_each (list: list_ret, ref: uint_loc_descriptor (i: log2)); |
19327 | add_loc_descr_to_each (list: list_ret, |
19328 | ref: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
19329 | break; |
19330 | } |
19331 | } |
19332 | |
19333 | /* fall through */ |
19334 | |
19335 | case CEIL_DIV_EXPR: |
19336 | case ROUND_DIV_EXPR: |
19337 | if (TYPE_UNSIGNED (TREE_TYPE (loc))) |
19338 | { |
19339 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (loc)); |
19340 | scalar_int_mode int_mode; |
19341 | |
19342 | if ((dwarf_strict && dwarf_version < 5) |
19343 | || !is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
19344 | return 0; |
19345 | |
19346 | /* We can use a signed divide if the sign bit is not set. */ |
19347 | if (GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
19348 | { |
19349 | op = DW_OP_div; |
19350 | goto do_binop; |
19351 | } |
19352 | |
19353 | list_ret = typed_binop_from_tree (op: DW_OP_div, loc, |
19354 | type_die: base_type_for_mode (mode: int_mode, unsignedp: 1), |
19355 | mode: int_mode, context); |
19356 | break; |
19357 | } |
19358 | op = DW_OP_div; |
19359 | goto do_binop; |
19360 | |
19361 | case MINUS_EXPR: |
19362 | op = DW_OP_minus; |
19363 | goto do_binop; |
19364 | |
19365 | case FLOOR_MOD_EXPR: |
19366 | case CEIL_MOD_EXPR: |
19367 | case ROUND_MOD_EXPR: |
19368 | case TRUNC_MOD_EXPR: |
19369 | if (TYPE_UNSIGNED (TREE_TYPE (loc))) |
19370 | { |
19371 | op = DW_OP_mod; |
19372 | goto do_binop; |
19373 | } |
19374 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19375 | list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address: 0, context); |
19376 | if (list_ret == 0 || list_ret1 == 0) |
19377 | return 0; |
19378 | |
19379 | add_loc_list (ret: &list_ret, list: list_ret1); |
19380 | if (list_ret == 0) |
19381 | return 0; |
19382 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
19383 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
19384 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_div, oprnd1: 0, oprnd2: 0)); |
19385 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
19386 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
19387 | break; |
19388 | |
19389 | case MULT_EXPR: |
19390 | op = DW_OP_mul; |
19391 | goto do_binop; |
19392 | |
19393 | case LSHIFT_EXPR: |
19394 | op = DW_OP_shl; |
19395 | goto do_binop; |
19396 | |
19397 | case RSHIFT_EXPR: |
19398 | op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra); |
19399 | goto do_binop; |
19400 | |
19401 | case POINTER_PLUS_EXPR: |
19402 | case PLUS_EXPR: |
19403 | do_plus: |
19404 | if (tree_fits_shwi_p (TREE_OPERAND (loc, 1))) |
19405 | { |
19406 | /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be |
19407 | smarter to encode their opposite. The DW_OP_plus_uconst operation |
19408 | takes 1 + X bytes, X being the size of the ULEB128 addend. On the |
19409 | other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y |
19410 | bytes, Y being the size of the operation that pushes the opposite |
19411 | of the addend. So let's choose the smallest representation. */ |
19412 | const tree tree_addend = TREE_OPERAND (loc, 1); |
19413 | offset_int wi_addend; |
19414 | HOST_WIDE_INT shwi_addend; |
19415 | dw_loc_descr_ref loc_naddend; |
19416 | |
19417 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19418 | if (list_ret == 0) |
19419 | return 0; |
19420 | |
19421 | /* Try to get the literal to push. It is the opposite of the addend, |
19422 | so as we rely on wrapping during DWARF evaluation, first decode |
19423 | the literal as a "DWARF-sized" signed number. */ |
19424 | wi_addend = wi::to_offset (t: tree_addend); |
19425 | wi_addend = wi::sext (x: wi_addend, DWARF2_ADDR_SIZE * 8); |
19426 | shwi_addend = wi_addend.to_shwi (); |
19427 | loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT)) |
19428 | ? int_loc_descriptor (poly_i: -shwi_addend) |
19429 | : NULL; |
19430 | |
19431 | if (loc_naddend != NULL |
19432 | && ((unsigned) size_of_uleb128 (shwi_addend) |
19433 | > size_of_loc_descr (loc: loc_naddend))) |
19434 | { |
19435 | add_loc_descr_to_each (list: list_ret, ref: loc_naddend); |
19436 | add_loc_descr_to_each (list: list_ret, |
19437 | ref: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
19438 | } |
19439 | else |
19440 | { |
19441 | for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; ) |
19442 | { |
19443 | loc_naddend = loc_cur; |
19444 | loc_cur = loc_cur->dw_loc_next; |
19445 | ggc_free (loc_naddend); |
19446 | } |
19447 | loc_list_plus_const (list_head: list_ret, offset: wi_addend.to_shwi ()); |
19448 | } |
19449 | break; |
19450 | } |
19451 | |
19452 | op = DW_OP_plus; |
19453 | goto do_binop; |
19454 | |
19455 | case LE_EXPR: |
19456 | op = DW_OP_le; |
19457 | goto do_comp_binop; |
19458 | |
19459 | case GE_EXPR: |
19460 | op = DW_OP_ge; |
19461 | goto do_comp_binop; |
19462 | |
19463 | case LT_EXPR: |
19464 | op = DW_OP_lt; |
19465 | goto do_comp_binop; |
19466 | |
19467 | case GT_EXPR: |
19468 | op = DW_OP_gt; |
19469 | goto do_comp_binop; |
19470 | |
19471 | do_comp_binop: |
19472 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0)))) |
19473 | { |
19474 | list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context); |
19475 | list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context); |
19476 | list_ret = loc_list_from_uint_comparison (left: list_ret, right: list_ret1, |
19477 | TREE_CODE (loc)); |
19478 | break; |
19479 | } |
19480 | else |
19481 | goto do_binop; |
19482 | |
19483 | case EQ_EXPR: |
19484 | op = DW_OP_eq; |
19485 | goto do_binop; |
19486 | |
19487 | case NE_EXPR: |
19488 | op = DW_OP_ne; |
19489 | goto do_binop; |
19490 | |
19491 | do_binop: |
19492 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19493 | list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address: 0, context); |
19494 | if (list_ret == 0 || list_ret1 == 0) |
19495 | return 0; |
19496 | |
19497 | add_loc_list (ret: &list_ret, list: list_ret1); |
19498 | if (list_ret == 0) |
19499 | return 0; |
19500 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
19501 | break; |
19502 | |
19503 | case TRUTH_NOT_EXPR: |
19504 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19505 | if (list_ret == 0) |
19506 | return 0; |
19507 | |
19508 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_lit0, oprnd1: 0, oprnd2: 0)); |
19509 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_eq, oprnd1: 0, oprnd2: 0)); |
19510 | break; |
19511 | |
19512 | case BIT_NOT_EXPR: |
19513 | op = DW_OP_not; |
19514 | goto do_unop; |
19515 | |
19516 | case ABS_EXPR: |
19517 | op = DW_OP_abs; |
19518 | goto do_unop; |
19519 | |
19520 | case NEGATE_EXPR: |
19521 | op = DW_OP_neg; |
19522 | goto do_unop; |
19523 | |
19524 | do_unop: |
19525 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19526 | if (list_ret == 0) |
19527 | return 0; |
19528 | |
19529 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
19530 | break; |
19531 | |
19532 | case MIN_EXPR: |
19533 | case MAX_EXPR: |
19534 | { |
19535 | const enum tree_code code = |
19536 | TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR; |
19537 | |
19538 | loc = build3 (COND_EXPR, TREE_TYPE (loc), |
19539 | build2 (code, integer_type_node, |
19540 | TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)), |
19541 | TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0)); |
19542 | } |
19543 | |
19544 | /* fall through */ |
19545 | |
19546 | case COND_EXPR: |
19547 | { |
19548 | dw_loc_descr_ref lhs |
19549 | = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context); |
19550 | dw_loc_list_ref rhs |
19551 | = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), want_address: 0, context); |
19552 | dw_loc_descr_ref bra_node, jump_node, tmp; |
19553 | |
19554 | /* DW_OP_bra is branch-on-nonzero so avoid doing useless work. */ |
19555 | if (TREE_CODE (TREE_OPERAND (loc, 0)) == NE_EXPR |
19556 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (loc, 0), 1))) |
19557 | list_ret |
19558 | = loc_list_from_tree_1 (TREE_OPERAND (TREE_OPERAND (loc, 0), 0), |
19559 | want_address: 0, context); |
19560 | /* Likewise, swap the operands for a logically negated condition. */ |
19561 | else if (TREE_CODE (TREE_OPERAND (loc, 0)) == TRUTH_NOT_EXPR) |
19562 | { |
19563 | lhs = loc_descriptor_from_tree (TREE_OPERAND (loc, 2), 0, context); |
19564 | rhs = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address: 0, context); |
19565 | list_ret |
19566 | = loc_list_from_tree_1 (TREE_OPERAND (TREE_OPERAND (loc, 0), 0), |
19567 | want_address: 0, context); |
19568 | } |
19569 | else |
19570 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19571 | if (list_ret == 0 || lhs == 0 || rhs == 0) |
19572 | return 0; |
19573 | |
19574 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
19575 | add_loc_descr_to_each (list: list_ret, ref: bra_node); |
19576 | |
19577 | add_loc_list (ret: &list_ret, list: rhs); |
19578 | jump_node = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
19579 | add_loc_descr_to_each (list: list_ret, ref: jump_node); |
19580 | |
19581 | add_loc_descr_to_each (list: list_ret, ref: lhs); |
19582 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
19583 | bra_node->dw_loc_oprnd1.v.val_loc = lhs; |
19584 | |
19585 | /* ??? Need a node to point the skip at. Use a nop. */ |
19586 | tmp = new_loc_descr (op: DW_OP_nop, oprnd1: 0, oprnd2: 0); |
19587 | add_loc_descr_to_each (list: list_ret, ref: tmp); |
19588 | jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
19589 | jump_node->dw_loc_oprnd1.v.val_loc = tmp; |
19590 | } |
19591 | break; |
19592 | |
19593 | case FIX_TRUNC_EXPR: |
19594 | return 0; |
19595 | |
19596 | case COMPOUND_LITERAL_EXPR: |
19597 | return loc_list_from_tree_1 (COMPOUND_LITERAL_EXPR_DECL (loc), |
19598 | want_address: 0, context); |
19599 | |
19600 | default: |
19601 | /* Leave front-end specific codes as simply unknown. This comes |
19602 | up, for instance, with the C STMT_EXPR. */ |
19603 | if ((unsigned int) TREE_CODE (loc) |
19604 | >= (unsigned int) LAST_AND_UNUSED_TREE_CODE) |
19605 | { |
19606 | expansion_failed (expr: loc, NULL_RTX, |
19607 | reason: "language specific tree node" ); |
19608 | return 0; |
19609 | } |
19610 | |
19611 | /* Otherwise this is a generic code; we should just lists all of |
19612 | these explicitly. We forgot one. */ |
19613 | if (flag_checking) |
19614 | gcc_unreachable (); |
19615 | |
19616 | /* In a release build, we want to degrade gracefully: better to |
19617 | generate incomplete debugging information than to crash. */ |
19618 | return NULL; |
19619 | } |
19620 | |
19621 | if (!ret && !list_ret) |
19622 | return 0; |
19623 | |
19624 | if (want_address == 2 && !have_address |
19625 | && (dwarf_version >= 4 || !dwarf_strict)) |
19626 | { |
19627 | if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE) |
19628 | { |
19629 | expansion_failed (expr: loc, NULL_RTX, |
19630 | reason: "DWARF address size mismatch" ); |
19631 | return 0; |
19632 | } |
19633 | if (ret) |
19634 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
19635 | else |
19636 | add_loc_descr_to_each (list: list_ret, |
19637 | ref: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
19638 | have_address = 1; |
19639 | } |
19640 | /* Show if we can't fill the request for an address. */ |
19641 | if (want_address && !have_address) |
19642 | { |
19643 | expansion_failed (expr: loc, NULL_RTX, |
19644 | reason: "Want address and only have value" ); |
19645 | return 0; |
19646 | } |
19647 | |
19648 | gcc_assert (!ret || !list_ret); |
19649 | |
19650 | /* If we've got an address and don't want one, dereference. */ |
19651 | if (!want_address && have_address) |
19652 | { |
19653 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc)); |
19654 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (loc)); |
19655 | scalar_int_mode int_mode; |
19656 | dw_die_ref type_die; |
19657 | dw_loc_descr_ref deref; |
19658 | |
19659 | /* If the size is greater than DWARF2_ADDR_SIZE, bail out. */ |
19660 | if (size > DWARF2_ADDR_SIZE || size == -1) |
19661 | { |
19662 | expansion_failed (expr: loc, NULL_RTX, |
19663 | reason: "DWARF address size mismatch" ); |
19664 | return 0; |
19665 | } |
19666 | |
19667 | /* If it is equal to DWARF2_ADDR_SIZE, extension does not matter. */ |
19668 | else if (size == DWARF2_ADDR_SIZE) |
19669 | deref = new_loc_descr (op: DW_OP_deref, oprnd1: size, oprnd2: 0); |
19670 | |
19671 | /* If it is lower than DWARF2_ADDR_SIZE, DW_OP_deref_size will zero- |
19672 | extend the value, which is really OK for unsigned types only. */ |
19673 | else if (!(context && context->strict_signedness) |
19674 | || TYPE_UNSIGNED (TREE_TYPE (loc)) |
19675 | || (dwarf_strict && dwarf_version < 5) |
19676 | || !is_a <scalar_int_mode> (m: mode, result: &int_mode) |
19677 | || !(type_die = base_type_for_mode (mode, unsignedp: false))) |
19678 | deref = new_loc_descr (op: DW_OP_deref_size, oprnd1: size, oprnd2: 0); |
19679 | |
19680 | /* Use DW_OP_deref_type for signed integral types if possible, but |
19681 | convert back to the generic type to avoid type mismatches later. */ |
19682 | else |
19683 | { |
19684 | deref = new_loc_descr (op: dwarf_OP (op: DW_OP_deref_type), oprnd1: size, oprnd2: 0); |
19685 | deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref; |
19686 | deref->dw_loc_oprnd2.v.val_die_ref.die = type_die; |
19687 | deref->dw_loc_oprnd2.v.val_die_ref.external = 0; |
19688 | add_loc_descr (list_head: &deref, |
19689 | descr: new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0)); |
19690 | } |
19691 | |
19692 | if (ret) |
19693 | add_loc_descr (list_head: &ret, descr: deref); |
19694 | else |
19695 | add_loc_descr_to_each (list: list_ret, ref: deref); |
19696 | } |
19697 | |
19698 | if (ret) |
19699 | list_ret = new_loc_list (expr: ret, NULL, vbegin: 0, NULL, vend: 0, NULL); |
19700 | |
19701 | return list_ret; |
19702 | } |
19703 | |
19704 | /* Likewise, but strip useless DW_OP_nop operations in the resulting |
19705 | expressions. */ |
19706 | |
19707 | static dw_loc_list_ref |
19708 | loc_list_from_tree (tree loc, int want_address, |
19709 | struct loc_descr_context *context) |
19710 | { |
19711 | dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context); |
19712 | |
19713 | for (dw_loc_list_ref loc_cur = result; |
19714 | loc_cur != NULL; loc_cur = loc_cur->dw_loc_next) |
19715 | loc_descr_without_nops (loc&: loc_cur->expr); |
19716 | return result; |
19717 | } |
19718 | |
19719 | /* Same as above but return only single location expression. */ |
19720 | static dw_loc_descr_ref |
19721 | loc_descriptor_from_tree (tree loc, int want_address, |
19722 | struct loc_descr_context *context) |
19723 | { |
19724 | dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context); |
19725 | if (!ret) |
19726 | return NULL; |
19727 | if (ret->dw_loc_next) |
19728 | { |
19729 | expansion_failed (expr: loc, NULL_RTX, |
19730 | reason: "Location list where only loc descriptor needed" ); |
19731 | return NULL; |
19732 | } |
19733 | return ret->expr; |
19734 | } |
19735 | |
19736 | /* Given a pointer to what is assumed to be a FIELD_DECL node, return a |
19737 | pointer to the declared type for the relevant field variable, or return |
19738 | `integer_type_node' if the given node turns out to be an |
19739 | ERROR_MARK node. */ |
19740 | |
19741 | static inline tree |
19742 | field_type (const_tree decl) |
19743 | { |
19744 | tree type; |
19745 | |
19746 | if (TREE_CODE (decl) == ERROR_MARK) |
19747 | return integer_type_node; |
19748 | |
19749 | type = DECL_BIT_FIELD_TYPE (decl); |
19750 | if (type == NULL_TREE) |
19751 | type = TREE_TYPE (decl); |
19752 | |
19753 | return type; |
19754 | } |
19755 | |
19756 | /* Given a pointer to a tree node, return the alignment in bits for |
19757 | it, or else return BITS_PER_WORD if the node actually turns out to |
19758 | be an ERROR_MARK node. */ |
19759 | |
19760 | static inline unsigned |
19761 | simple_type_align_in_bits (const_tree type) |
19762 | { |
19763 | return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD; |
19764 | } |
19765 | |
19766 | static inline unsigned |
19767 | simple_decl_align_in_bits (const_tree decl) |
19768 | { |
19769 | return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD; |
19770 | } |
19771 | |
19772 | /* Return the result of rounding T up to ALIGN. */ |
19773 | |
19774 | static inline offset_int |
19775 | round_up_to_align (const offset_int &t, unsigned int align) |
19776 | { |
19777 | return wi::udiv_trunc (x: t + align - 1, y: align) * align; |
19778 | } |
19779 | |
19780 | /* Helper structure for RECORD_TYPE processing. */ |
19781 | struct vlr_context |
19782 | { |
19783 | /* Root RECORD_TYPE. It is needed to generate data member location |
19784 | descriptions in variable-length records (VLR), but also to cope with |
19785 | variants, which are composed of nested structures multiplexed with |
19786 | QUAL_UNION_TYPE nodes. Each time such a structure is passed to a |
19787 | function processing a FIELD_DECL, it is required to be non null. */ |
19788 | tree struct_type; |
19789 | |
19790 | /* When generating a variant part in a RECORD_TYPE (i.e. a nested |
19791 | QUAL_UNION_TYPE), this holds an expression that computes the offset for |
19792 | this variant part as part of the root record (in storage units). For |
19793 | regular records, it must be NULL_TREE. */ |
19794 | tree variant_part_offset; |
19795 | }; |
19796 | |
19797 | /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest |
19798 | addressed byte of the "containing object" for the given FIELD_DECL. If |
19799 | possible, return a native constant through CST_OFFSET (in which case NULL is |
19800 | returned); otherwise return a DWARF expression that computes the offset. |
19801 | |
19802 | Set *CST_OFFSET to 0 and return NULL if we are unable to determine what |
19803 | that offset is, either because the argument turns out to be a pointer to an |
19804 | ERROR_MARK node, or because the offset expression is too complex for us. |
19805 | |
19806 | CTX is required: see the comment for VLR_CONTEXT. */ |
19807 | |
19808 | static dw_loc_descr_ref |
19809 | field_byte_offset (const_tree decl, struct vlr_context *ctx, |
19810 | HOST_WIDE_INT *cst_offset) |
19811 | { |
19812 | tree tree_result; |
19813 | dw_loc_list_ref loc_result; |
19814 | |
19815 | *cst_offset = 0; |
19816 | |
19817 | if (TREE_CODE (decl) == ERROR_MARK) |
19818 | return NULL; |
19819 | else |
19820 | gcc_assert (TREE_CODE (decl) == FIELD_DECL); |
19821 | |
19822 | /* We cannot handle variable bit offsets at the moment, so abort if it's the |
19823 | case. */ |
19824 | if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST) |
19825 | return NULL; |
19826 | |
19827 | /* We used to handle only constant offsets in all cases. Now, we handle |
19828 | properly dynamic byte offsets only when PCC bitfield type doesn't |
19829 | matter. */ |
19830 | if (PCC_BITFIELD_TYPE_MATTERS |
19831 | && DECL_BIT_FIELD_TYPE (decl) |
19832 | && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST) |
19833 | { |
19834 | offset_int object_offset_in_bits; |
19835 | offset_int object_offset_in_bytes; |
19836 | offset_int bitpos_int; |
19837 | tree type; |
19838 | tree field_size_tree; |
19839 | offset_int deepest_bitpos; |
19840 | offset_int field_size_in_bits; |
19841 | unsigned int type_align_in_bits; |
19842 | unsigned int decl_align_in_bits; |
19843 | offset_int type_size_in_bits; |
19844 | |
19845 | bitpos_int = wi::to_offset (t: bit_position (decl)); |
19846 | type = field_type (decl); |
19847 | type_size_in_bits = offset_int_type_size_in_bits (type); |
19848 | type_align_in_bits = simple_type_align_in_bits (type); |
19849 | |
19850 | field_size_tree = DECL_SIZE (decl); |
19851 | |
19852 | /* The size could be unspecified if there was an error, or for |
19853 | a flexible array member. */ |
19854 | if (!field_size_tree) |
19855 | field_size_tree = bitsize_zero_node; |
19856 | |
19857 | /* If the size of the field is not constant, use the type size. */ |
19858 | if (TREE_CODE (field_size_tree) == INTEGER_CST) |
19859 | field_size_in_bits = wi::to_offset (t: field_size_tree); |
19860 | else |
19861 | field_size_in_bits = type_size_in_bits; |
19862 | |
19863 | decl_align_in_bits = simple_decl_align_in_bits (decl); |
19864 | |
19865 | /* The GCC front-end doesn't make any attempt to keep track of the |
19866 | starting bit offset (relative to the start of the containing |
19867 | structure type) of the hypothetical "containing object" for a |
19868 | bit-field. Thus, when computing the byte offset value for the |
19869 | start of the "containing object" of a bit-field, we must deduce |
19870 | this information on our own. This can be rather tricky to do in |
19871 | some cases. For example, handling the following structure type |
19872 | definition when compiling for an i386/i486 target (which only |
19873 | aligns long long's to 32-bit boundaries) can be very tricky: |
19874 | |
19875 | struct S { int field1; long long field2:31; }; |
19876 | |
19877 | Fortunately, there is a simple rule-of-thumb which can be used |
19878 | in such cases. When compiling for an i386/i486, GCC will |
19879 | allocate 8 bytes for the structure shown above. It decides to |
19880 | do this based upon one simple rule for bit-field allocation. |
19881 | GCC allocates each "containing object" for each bit-field at |
19882 | the first (i.e. lowest addressed) legitimate alignment boundary |
19883 | (based upon the required minimum alignment for the declared |
19884 | type of the field) which it can possibly use, subject to the |
19885 | condition that there is still enough available space remaining |
19886 | in the containing object (when allocated at the selected point) |
19887 | to fully accommodate all of the bits of the bit-field itself. |
19888 | |
19889 | This simple rule makes it obvious why GCC allocates 8 bytes for |
19890 | each object of the structure type shown above. When looking |
19891 | for a place to allocate the "containing object" for `field2', |
19892 | the compiler simply tries to allocate a 64-bit "containing |
19893 | object" at each successive 32-bit boundary (starting at zero) |
19894 | until it finds a place to allocate that 64- bit field such that |
19895 | at least 31 contiguous (and previously unallocated) bits remain |
19896 | within that selected 64 bit field. (As it turns out, for the |
19897 | example above, the compiler finds it is OK to allocate the |
19898 | "containing object" 64-bit field at bit-offset zero within the |
19899 | structure type.) |
19900 | |
19901 | Here we attempt to work backwards from the limited set of facts |
19902 | we're given, and we try to deduce from those facts, where GCC |
19903 | must have believed that the containing object started (within |
19904 | the structure type). The value we deduce is then used (by the |
19905 | callers of this routine) to generate DW_AT_location and |
19906 | DW_AT_bit_offset attributes for fields (both bit-fields and, in |
19907 | the case of DW_AT_location, regular fields as well). */ |
19908 | |
19909 | /* Figure out the bit-distance from the start of the structure to |
19910 | the "deepest" bit of the bit-field. */ |
19911 | deepest_bitpos = bitpos_int + field_size_in_bits; |
19912 | |
19913 | /* This is the tricky part. Use some fancy footwork to deduce |
19914 | where the lowest addressed bit of the containing object must |
19915 | be. */ |
19916 | object_offset_in_bits = deepest_bitpos - type_size_in_bits; |
19917 | |
19918 | /* Round up to type_align by default. This works best for |
19919 | bitfields. */ |
19920 | object_offset_in_bits |
19921 | = round_up_to_align (t: object_offset_in_bits, align: type_align_in_bits); |
19922 | |
19923 | if (wi::gtu_p (x: object_offset_in_bits, y: bitpos_int)) |
19924 | { |
19925 | object_offset_in_bits = deepest_bitpos - type_size_in_bits; |
19926 | |
19927 | /* Round up to decl_align instead. */ |
19928 | object_offset_in_bits |
19929 | = round_up_to_align (t: object_offset_in_bits, align: decl_align_in_bits); |
19930 | } |
19931 | |
19932 | object_offset_in_bytes |
19933 | = wi::lrshift (x: object_offset_in_bits, LOG2_BITS_PER_UNIT); |
19934 | if (ctx->variant_part_offset == NULL_TREE) |
19935 | { |
19936 | *cst_offset = object_offset_in_bytes.to_shwi (); |
19937 | return NULL; |
19938 | } |
19939 | tree_result = wide_int_to_tree (sizetype, cst: object_offset_in_bytes); |
19940 | } |
19941 | else |
19942 | tree_result = byte_position (decl); |
19943 | |
19944 | if (ctx->variant_part_offset != NULL_TREE) |
19945 | tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result), |
19946 | ctx->variant_part_offset, tree_result); |
19947 | |
19948 | /* If the byte offset is a constant, it's simplier to handle a native |
19949 | constant rather than a DWARF expression. */ |
19950 | if (TREE_CODE (tree_result) == INTEGER_CST) |
19951 | { |
19952 | *cst_offset = wi::to_offset (t: tree_result).to_shwi (); |
19953 | return NULL; |
19954 | } |
19955 | |
19956 | struct loc_descr_context loc_ctx = { |
19957 | .context_type: ctx->struct_type, /* context_type */ |
19958 | NULL_TREE, /* base_decl */ |
19959 | NULL, /* dpi */ |
19960 | .placeholder_arg: false, /* placeholder_arg */ |
19961 | .placeholder_seen: false, /* placeholder_seen */ |
19962 | .strict_signedness: false /* strict_signedness */ |
19963 | }; |
19964 | loc_result = loc_list_from_tree (loc: tree_result, want_address: 0, context: &loc_ctx); |
19965 | |
19966 | /* We want a DWARF expression: abort if we only have a location list with |
19967 | multiple elements. */ |
19968 | if (!loc_result || !single_element_loc_list_p (list: loc_result)) |
19969 | return NULL; |
19970 | else |
19971 | return loc_result->expr; |
19972 | } |
19973 | |
19974 | /* The following routines define various Dwarf attributes and any data |
19975 | associated with them. */ |
19976 | |
19977 | /* Add a location description attribute value to a DIE. |
19978 | |
19979 | This emits location attributes suitable for whole variables and |
19980 | whole parameters. Note that the location attributes for struct fields are |
19981 | generated by the routine `data_member_location_attribute' below. */ |
19982 | |
19983 | static inline void |
19984 | add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind, |
19985 | dw_loc_list_ref descr) |
19986 | { |
19987 | bool check_no_locviews = true; |
19988 | if (descr == 0) |
19989 | return; |
19990 | if (single_element_loc_list_p (list: descr)) |
19991 | add_AT_loc (die, attr_kind, loc: descr->expr); |
19992 | else |
19993 | { |
19994 | add_AT_loc_list (die, attr_kind, loc_list: descr); |
19995 | gcc_assert (descr->ll_symbol); |
19996 | if (attr_kind == DW_AT_location && descr->vl_symbol |
19997 | && dwarf2out_locviews_in_attribute ()) |
19998 | { |
19999 | add_AT_view_list (die, attr_kind: DW_AT_GNU_locviews); |
20000 | check_no_locviews = false; |
20001 | } |
20002 | } |
20003 | |
20004 | if (check_no_locviews) |
20005 | gcc_assert (!get_AT (die, DW_AT_GNU_locviews)); |
20006 | } |
20007 | |
20008 | /* Add DW_AT_accessibility attribute to DIE if needed. */ |
20009 | |
20010 | static void |
20011 | add_accessibility_attribute (dw_die_ref die, tree decl) |
20012 | { |
20013 | /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type |
20014 | children, otherwise the default is DW_ACCESS_public. In DWARF2 |
20015 | the default has always been DW_ACCESS_public. */ |
20016 | if (TREE_PROTECTED (decl)) |
20017 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_protected); |
20018 | else if (TREE_PRIVATE (decl)) |
20019 | { |
20020 | if (dwarf_version == 2 |
20021 | || die->die_parent == NULL |
20022 | || die->die_parent->die_tag != DW_TAG_class_type) |
20023 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_private); |
20024 | } |
20025 | else if (dwarf_version > 2 |
20026 | && die->die_parent |
20027 | && die->die_parent->die_tag == DW_TAG_class_type) |
20028 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_public); |
20029 | } |
20030 | |
20031 | /* Attach the specialized form of location attribute used for data members of |
20032 | struct and union types. In the special case of a FIELD_DECL node which |
20033 | represents a bit-field, the "offset" part of this special location |
20034 | descriptor must indicate the distance in bytes from the lowest-addressed |
20035 | byte of the containing struct or union type to the lowest-addressed byte of |
20036 | the "containing object" for the bit-field. (See the `field_byte_offset' |
20037 | function above). |
20038 | |
20039 | For any given bit-field, the "containing object" is a hypothetical object |
20040 | (of some integral or enum type) within which the given bit-field lives. The |
20041 | type of this hypothetical "containing object" is always the same as the |
20042 | declared type of the individual bit-field itself (for GCC anyway... the |
20043 | DWARF spec doesn't actually mandate this). Note that it is the size (in |
20044 | bytes) of the hypothetical "containing object" which will be given in the |
20045 | DW_AT_byte_size attribute for this bit-field. (See the |
20046 | `byte_size_attribute' function below.) It is also used when calculating the |
20047 | value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute' |
20048 | function below.) |
20049 | |
20050 | CTX is required: see the comment for VLR_CONTEXT. */ |
20051 | |
20052 | static void |
20053 | add_data_member_location_attribute (dw_die_ref die, |
20054 | tree decl, |
20055 | struct vlr_context *ctx) |
20056 | { |
20057 | HOST_WIDE_INT offset; |
20058 | dw_loc_descr_ref loc_descr = 0; |
20059 | |
20060 | if (TREE_CODE (decl) == TREE_BINFO) |
20061 | { |
20062 | /* We're working on the TAG_inheritance for a base class. */ |
20063 | if (BINFO_VIRTUAL_P (decl) && is_cxx ()) |
20064 | { |
20065 | /* For C++ virtual bases we can't just use BINFO_OFFSET, as they |
20066 | aren't at a fixed offset from all (sub)objects of the same |
20067 | type. We need to extract the appropriate offset from our |
20068 | vtable. The following dwarf expression means |
20069 | |
20070 | BaseAddr = ObAddr + *((*ObAddr) - Offset) |
20071 | |
20072 | This is specific to the V3 ABI, of course. */ |
20073 | |
20074 | dw_loc_descr_ref tmp; |
20075 | |
20076 | /* Make a copy of the object address. */ |
20077 | tmp = new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0); |
20078 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20079 | |
20080 | /* Extract the vtable address. */ |
20081 | tmp = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
20082 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20083 | |
20084 | /* Calculate the address of the offset. */ |
20085 | offset = tree_to_shwi (BINFO_VPTR_FIELD (decl)); |
20086 | gcc_assert (offset < 0); |
20087 | |
20088 | tmp = int_loc_descriptor (poly_i: -offset); |
20089 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20090 | tmp = new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0); |
20091 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20092 | |
20093 | /* Extract the offset. */ |
20094 | tmp = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
20095 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20096 | |
20097 | /* Add it to the object address. */ |
20098 | tmp = new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0); |
20099 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20100 | } |
20101 | else |
20102 | offset = tree_to_shwi (BINFO_OFFSET (decl)); |
20103 | } |
20104 | else |
20105 | { |
20106 | loc_descr = field_byte_offset (decl, ctx, cst_offset: &offset); |
20107 | |
20108 | if (!loc_descr) |
20109 | ; |
20110 | |
20111 | /* If loc_descr is available, then we know the offset is dynamic. */ |
20112 | else if (gnat_encodings == DWARF_GNAT_ENCODINGS_ALL) |
20113 | { |
20114 | loc_descr = NULL; |
20115 | offset = 0; |
20116 | } |
20117 | |
20118 | /* Data member location evaluation starts with the base address on the |
20119 | stack. Compute the field offset and add it to this base address. */ |
20120 | else |
20121 | add_loc_descr (list_head: &loc_descr, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
20122 | } |
20123 | |
20124 | if (!loc_descr) |
20125 | { |
20126 | /* While DW_AT_data_bit_offset has been added already in DWARF4, |
20127 | e.g. GDB only added support to it in November 2016. For DWARF5 |
20128 | we need newer debug info consumers anyway. We might change this |
20129 | to dwarf_version >= 4 once most consumers catched up. */ |
20130 | if (dwarf_version >= 5 |
20131 | && TREE_CODE (decl) == FIELD_DECL |
20132 | && DECL_BIT_FIELD_TYPE (decl) |
20133 | && (ctx->variant_part_offset == NULL_TREE |
20134 | || TREE_CODE (ctx->variant_part_offset) == INTEGER_CST)) |
20135 | { |
20136 | tree off = bit_position (decl); |
20137 | if (ctx->variant_part_offset) |
20138 | off = bit_from_pos (ctx->variant_part_offset, off); |
20139 | if (tree_fits_uhwi_p (off) && get_AT (die, attr_kind: DW_AT_bit_size)) |
20140 | { |
20141 | remove_AT (die, attr_kind: DW_AT_byte_size); |
20142 | remove_AT (die, attr_kind: DW_AT_bit_offset); |
20143 | add_AT_unsigned (die, attr_kind: DW_AT_data_bit_offset, unsigned_val: tree_to_uhwi (off)); |
20144 | return; |
20145 | } |
20146 | } |
20147 | if (dwarf_version > 2) |
20148 | { |
20149 | /* Don't need to output a location expression, just the constant. */ |
20150 | if (offset < 0) |
20151 | add_AT_int (die, attr_kind: DW_AT_data_member_location, int_val: offset); |
20152 | else |
20153 | add_AT_unsigned (die, attr_kind: DW_AT_data_member_location, unsigned_val: offset); |
20154 | return; |
20155 | } |
20156 | else |
20157 | { |
20158 | enum dwarf_location_atom op; |
20159 | |
20160 | /* The DWARF2 standard says that we should assume that the structure |
20161 | address is already on the stack, so we can specify a structure |
20162 | field address by using DW_OP_plus_uconst. */ |
20163 | op = DW_OP_plus_uconst; |
20164 | loc_descr = new_loc_descr (op, oprnd1: offset, oprnd2: 0); |
20165 | } |
20166 | } |
20167 | |
20168 | add_AT_loc (die, attr_kind: DW_AT_data_member_location, loc: loc_descr); |
20169 | } |
20170 | |
20171 | /* Writes integer values to dw_vec_const array. */ |
20172 | |
20173 | static void |
20174 | insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest) |
20175 | { |
20176 | while (size != 0) |
20177 | { |
20178 | *dest++ = val & 0xff; |
20179 | val >>= 8; |
20180 | --size; |
20181 | } |
20182 | } |
20183 | |
20184 | /* Reads integers from dw_vec_const array. Inverse of insert_int. */ |
20185 | |
20186 | static HOST_WIDE_INT |
20187 | (const unsigned char *src, unsigned int size) |
20188 | { |
20189 | HOST_WIDE_INT val = 0; |
20190 | |
20191 | src += size; |
20192 | while (size != 0) |
20193 | { |
20194 | val <<= 8; |
20195 | val |= *--src & 0xff; |
20196 | --size; |
20197 | } |
20198 | return val; |
20199 | } |
20200 | |
20201 | /* Writes wide_int values to dw_vec_const array. */ |
20202 | |
20203 | static void |
20204 | insert_wide_int (const wide_int_ref &val, unsigned char *dest, int elt_size) |
20205 | { |
20206 | int i; |
20207 | |
20208 | if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT) |
20209 | { |
20210 | insert_int (val: (HOST_WIDE_INT) val.elt (i: 0), size: elt_size, dest); |
20211 | return; |
20212 | } |
20213 | |
20214 | /* We'd have to extend this code to support odd sizes. */ |
20215 | gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0); |
20216 | |
20217 | int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT); |
20218 | |
20219 | if (WORDS_BIG_ENDIAN) |
20220 | for (i = n - 1; i >= 0; i--) |
20221 | { |
20222 | insert_int (val: (HOST_WIDE_INT) val.elt (i), size: sizeof (HOST_WIDE_INT), dest); |
20223 | dest += sizeof (HOST_WIDE_INT); |
20224 | } |
20225 | else |
20226 | for (i = 0; i < n; i++) |
20227 | { |
20228 | insert_int (val: (HOST_WIDE_INT) val.elt (i), size: sizeof (HOST_WIDE_INT), dest); |
20229 | dest += sizeof (HOST_WIDE_INT); |
20230 | } |
20231 | } |
20232 | |
20233 | /* Writes floating point values to dw_vec_const array. */ |
20234 | |
20235 | static unsigned |
20236 | insert_float (const_rtx rtl, unsigned char *array) |
20237 | { |
20238 | long val[4]; |
20239 | int i; |
20240 | scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl)); |
20241 | |
20242 | real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode); |
20243 | |
20244 | /* real_to_target puts 32-bit pieces in each long. Pack them. */ |
20245 | if (GET_MODE_SIZE (mode) < 4) |
20246 | { |
20247 | gcc_assert (GET_MODE_SIZE (mode) == 2); |
20248 | insert_int (val: val[0], size: 2, dest: array); |
20249 | return 2; |
20250 | } |
20251 | |
20252 | for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++) |
20253 | { |
20254 | insert_int (val: val[i], size: 4, dest: array); |
20255 | array += 4; |
20256 | } |
20257 | return 4; |
20258 | } |
20259 | |
20260 | /* Attach a DW_AT_const_value attribute for a variable or a parameter which |
20261 | does not have a "location" either in memory or in a register. These |
20262 | things can arise in GNU C when a constant is passed as an actual parameter |
20263 | to an inlined function. They can also arise in C++ where declared |
20264 | constants do not necessarily get memory "homes". */ |
20265 | |
20266 | static bool |
20267 | add_const_value_attribute (dw_die_ref die, machine_mode mode, rtx rtl) |
20268 | { |
20269 | scalar_mode int_mode; |
20270 | |
20271 | switch (GET_CODE (rtl)) |
20272 | { |
20273 | case CONST_INT: |
20274 | { |
20275 | HOST_WIDE_INT val = INTVAL (rtl); |
20276 | |
20277 | if (val < 0) |
20278 | add_AT_int (die, attr_kind: DW_AT_const_value, int_val: val); |
20279 | else |
20280 | add_AT_unsigned (die, attr_kind: DW_AT_const_value, unsigned_val: (unsigned HOST_WIDE_INT) val); |
20281 | } |
20282 | return true; |
20283 | |
20284 | case CONST_WIDE_INT: |
20285 | if (is_int_mode (mode, int_mode: &int_mode) |
20286 | && (GET_MODE_PRECISION (mode: int_mode) |
20287 | & (HOST_BITS_PER_WIDE_INT - 1)) == 0) |
20288 | { |
20289 | add_AT_wide (die, attr_kind: DW_AT_const_value, w: rtx_mode_t (rtl, int_mode)); |
20290 | return true; |
20291 | } |
20292 | return false; |
20293 | |
20294 | case CONST_DOUBLE: |
20295 | /* Note that a CONST_DOUBLE rtx could represent either an integer or a |
20296 | floating-point constant. A CONST_DOUBLE is used whenever the |
20297 | constant requires more than one word in order to be adequately |
20298 | represented. */ |
20299 | if (TARGET_SUPPORTS_WIDE_INT == 0 |
20300 | && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl))) |
20301 | add_AT_double (die, attr_kind: DW_AT_const_value, |
20302 | CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl)); |
20303 | else |
20304 | { |
20305 | scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl)); |
20306 | unsigned int length = GET_MODE_SIZE (mode); |
20307 | unsigned char *array = ggc_vec_alloc<unsigned char> (c: length); |
20308 | unsigned int elt_size = insert_float (rtl, array); |
20309 | |
20310 | add_AT_vec (die, attr_kind: DW_AT_const_value, length: length / elt_size, elt_size, |
20311 | array); |
20312 | } |
20313 | return true; |
20314 | |
20315 | case CONST_VECTOR: |
20316 | { |
20317 | unsigned int length; |
20318 | if (!CONST_VECTOR_NUNITS (rtl).is_constant (const_value: &length)) |
20319 | return false; |
20320 | |
20321 | machine_mode mode = GET_MODE (rtl); |
20322 | /* The combination of a length and byte elt_size doesn't extend |
20323 | naturally to boolean vectors, where several elements are packed |
20324 | into the same byte. */ |
20325 | if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL) |
20326 | return false; |
20327 | |
20328 | unsigned int elt_size = GET_MODE_UNIT_SIZE (mode); |
20329 | unsigned char *array |
20330 | = ggc_vec_alloc<unsigned char> (c: length * elt_size); |
20331 | unsigned int i; |
20332 | unsigned char *p; |
20333 | machine_mode imode = GET_MODE_INNER (mode); |
20334 | |
20335 | switch (GET_MODE_CLASS (mode)) |
20336 | { |
20337 | case MODE_VECTOR_INT: |
20338 | for (i = 0, p = array; i < length; i++, p += elt_size) |
20339 | { |
20340 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
20341 | insert_wide_int (val: rtx_mode_t (elt, imode), dest: p, elt_size); |
20342 | } |
20343 | break; |
20344 | |
20345 | case MODE_VECTOR_FLOAT: |
20346 | for (i = 0, p = array; i < length; i++, p += elt_size) |
20347 | { |
20348 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
20349 | insert_float (rtl: elt, array: p); |
20350 | } |
20351 | break; |
20352 | |
20353 | default: |
20354 | gcc_unreachable (); |
20355 | } |
20356 | |
20357 | add_AT_vec (die, attr_kind: DW_AT_const_value, length, elt_size, array); |
20358 | } |
20359 | return true; |
20360 | |
20361 | case CONST_STRING: |
20362 | if (dwarf_version >= 4 || !dwarf_strict) |
20363 | { |
20364 | dw_loc_descr_ref loc_result; |
20365 | resolve_one_addr (&rtl); |
20366 | rtl_addr: |
20367 | loc_result = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
20368 | add_loc_descr (list_head: &loc_result, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
20369 | add_AT_loc (die, attr_kind: DW_AT_location, loc: loc_result); |
20370 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
20371 | return true; |
20372 | } |
20373 | return false; |
20374 | |
20375 | case CONST: |
20376 | if (CONSTANT_P (XEXP (rtl, 0))) |
20377 | return add_const_value_attribute (die, mode, XEXP (rtl, 0)); |
20378 | /* FALLTHROUGH */ |
20379 | case SYMBOL_REF: |
20380 | if (!const_ok_for_output (rtl)) |
20381 | return false; |
20382 | /* FALLTHROUGH */ |
20383 | case LABEL_REF: |
20384 | if (dwarf_version >= 4 || !dwarf_strict) |
20385 | goto rtl_addr; |
20386 | return false; |
20387 | |
20388 | case PLUS: |
20389 | /* In cases where an inlined instance of an inline function is passed |
20390 | the address of an `auto' variable (which is local to the caller) we |
20391 | can get a situation where the DECL_RTL of the artificial local |
20392 | variable (for the inlining) which acts as a stand-in for the |
20393 | corresponding formal parameter (of the inline function) will look |
20394 | like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not |
20395 | exactly a compile-time constant expression, but it isn't the address |
20396 | of the (artificial) local variable either. Rather, it represents the |
20397 | *value* which the artificial local variable always has during its |
20398 | lifetime. We currently have no way to represent such quasi-constant |
20399 | values in Dwarf, so for now we just punt and generate nothing. */ |
20400 | return false; |
20401 | |
20402 | case HIGH: |
20403 | case CONST_FIXED: |
20404 | case MINUS: |
20405 | case SIGN_EXTEND: |
20406 | case ZERO_EXTEND: |
20407 | case CONST_POLY_INT: |
20408 | return false; |
20409 | |
20410 | case MEM: |
20411 | if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING |
20412 | && MEM_READONLY_P (rtl) |
20413 | && GET_MODE (rtl) == BLKmode) |
20414 | { |
20415 | add_AT_string (die, attr_kind: DW_AT_const_value, XSTR (XEXP (rtl, 0), 0)); |
20416 | return true; |
20417 | } |
20418 | return false; |
20419 | |
20420 | default: |
20421 | /* No other kinds of rtx should be possible here. */ |
20422 | gcc_unreachable (); |
20423 | } |
20424 | } |
20425 | |
20426 | /* Determine whether the evaluation of EXPR references any variables |
20427 | or functions which aren't otherwise used (and therefore may not be |
20428 | output). */ |
20429 | static tree |
20430 | reference_to_unused (tree * tp, int * walk_subtrees, |
20431 | void * data ATTRIBUTE_UNUSED) |
20432 | { |
20433 | if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp)) |
20434 | *walk_subtrees = 0; |
20435 | |
20436 | if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp) |
20437 | && ! TREE_ASM_WRITTEN (*tp)) |
20438 | return *tp; |
20439 | /* ??? The C++ FE emits debug information for using decls, so |
20440 | putting gcc_unreachable here falls over. See PR31899. For now |
20441 | be conservative. */ |
20442 | else if (!symtab->global_info_ready && VAR_P (*tp)) |
20443 | return *tp; |
20444 | else if (VAR_P (*tp)) |
20445 | { |
20446 | varpool_node *node = varpool_node::get (decl: *tp); |
20447 | if (!node || !node->definition) |
20448 | return *tp; |
20449 | } |
20450 | else if (TREE_CODE (*tp) == FUNCTION_DECL |
20451 | && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp))) |
20452 | { |
20453 | /* The call graph machinery must have finished analyzing, |
20454 | optimizing and gimplifying the CU by now. |
20455 | So if *TP has no call graph node associated |
20456 | to it, it means *TP will not be emitted. */ |
20457 | if (!symtab->global_info_ready || !cgraph_node::get (decl: *tp)) |
20458 | return *tp; |
20459 | } |
20460 | else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp)) |
20461 | return *tp; |
20462 | |
20463 | return NULL_TREE; |
20464 | } |
20465 | |
20466 | /* Generate an RTL constant from a decl initializer INIT with decl type TYPE, |
20467 | for use in a later add_const_value_attribute call. */ |
20468 | |
20469 | static rtx |
20470 | rtl_for_decl_init (tree init, tree type) |
20471 | { |
20472 | rtx rtl = NULL_RTX; |
20473 | |
20474 | STRIP_NOPS (init); |
20475 | |
20476 | /* If a variable is initialized with a string constant without embedded |
20477 | zeros, build CONST_STRING. */ |
20478 | if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE) |
20479 | { |
20480 | tree enttype = TREE_TYPE (type); |
20481 | tree domain = TYPE_DOMAIN (type); |
20482 | scalar_int_mode mode; |
20483 | |
20484 | if (is_int_mode (TYPE_MODE (enttype), int_mode: &mode) |
20485 | && GET_MODE_SIZE (mode) == 1 |
20486 | && domain |
20487 | && TYPE_MAX_VALUE (domain) |
20488 | && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST |
20489 | && integer_zerop (TYPE_MIN_VALUE (domain)) |
20490 | && compare_tree_int (TYPE_MAX_VALUE (domain), |
20491 | TREE_STRING_LENGTH (init) - 1) == 0 |
20492 | && ((size_t) TREE_STRING_LENGTH (init) |
20493 | == strlen (TREE_STRING_POINTER (init)) + 1)) |
20494 | { |
20495 | rtl = gen_rtx_CONST_STRING (VOIDmode, |
20496 | ggc_strdup (TREE_STRING_POINTER (init))); |
20497 | rtl = gen_rtx_MEM (BLKmode, rtl); |
20498 | MEM_READONLY_P (rtl) = 1; |
20499 | } |
20500 | } |
20501 | /* Other aggregates, and complex values, could be represented using |
20502 | CONCAT: FIXME! |
20503 | If this changes, please adjust tree_add_const_value_attribute |
20504 | so that for early_dwarf it will for such initializers mangle referenced |
20505 | decls. */ |
20506 | else if (AGGREGATE_TYPE_P (type) |
20507 | || (TREE_CODE (init) == VIEW_CONVERT_EXPR |
20508 | && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0)))) |
20509 | || TREE_CODE (type) == COMPLEX_TYPE) |
20510 | ; |
20511 | /* Vectors only work if their mode is supported by the target. |
20512 | FIXME: generic vectors ought to work too. */ |
20513 | else if (TREE_CODE (type) == VECTOR_TYPE |
20514 | && !VECTOR_MODE_P (TYPE_MODE (type))) |
20515 | ; |
20516 | /* If the initializer is something that we know will expand into an |
20517 | immediate RTL constant, expand it now. We must be careful not to |
20518 | reference variables which won't be output. */ |
20519 | else if (initializer_constant_valid_p (init, type) |
20520 | && ! walk_tree (&init, reference_to_unused, NULL, NULL)) |
20521 | { |
20522 | /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if |
20523 | possible. */ |
20524 | if (TREE_CODE (type) == VECTOR_TYPE) |
20525 | switch (TREE_CODE (init)) |
20526 | { |
20527 | case VECTOR_CST: |
20528 | break; |
20529 | case CONSTRUCTOR: |
20530 | if (TREE_CONSTANT (init)) |
20531 | { |
20532 | vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init); |
20533 | bool constant_p = true; |
20534 | tree value; |
20535 | unsigned HOST_WIDE_INT ix; |
20536 | |
20537 | /* Even when ctor is constant, it might contain non-*_CST |
20538 | elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't |
20539 | belong into VECTOR_CST nodes. */ |
20540 | FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) |
20541 | if (!CONSTANT_CLASS_P (value)) |
20542 | { |
20543 | constant_p = false; |
20544 | break; |
20545 | } |
20546 | |
20547 | if (constant_p) |
20548 | { |
20549 | init = build_vector_from_ctor (type, elts); |
20550 | break; |
20551 | } |
20552 | } |
20553 | /* FALLTHRU */ |
20554 | |
20555 | default: |
20556 | return NULL; |
20557 | } |
20558 | |
20559 | /* Large _BitInt BLKmode INTEGER_CSTs would yield a MEM. */ |
20560 | if (TREE_CODE (init) == INTEGER_CST |
20561 | && TREE_CODE (TREE_TYPE (init)) == BITINT_TYPE |
20562 | && TYPE_MODE (TREE_TYPE (init)) == BLKmode) |
20563 | { |
20564 | if (tree_fits_shwi_p (init)) |
20565 | return GEN_INT (tree_to_shwi (init)); |
20566 | else |
20567 | return NULL; |
20568 | } |
20569 | |
20570 | rtl = expand_expr (exp: init, NULL_RTX, VOIDmode, modifier: EXPAND_INITIALIZER); |
20571 | |
20572 | /* If expand_expr returns a MEM, it wasn't immediate. */ |
20573 | gcc_assert (!rtl || !MEM_P (rtl)); |
20574 | } |
20575 | |
20576 | return rtl; |
20577 | } |
20578 | |
20579 | /* Generate RTL for the variable DECL to represent its location. */ |
20580 | |
20581 | static rtx |
20582 | rtl_for_decl_location (tree decl) |
20583 | { |
20584 | rtx rtl; |
20585 | |
20586 | /* Here we have to decide where we are going to say the parameter "lives" |
20587 | (as far as the debugger is concerned). We only have a couple of |
20588 | choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL. |
20589 | |
20590 | DECL_RTL normally indicates where the parameter lives during most of the |
20591 | activation of the function. If optimization is enabled however, this |
20592 | could be either NULL or else a pseudo-reg. Both of those cases indicate |
20593 | that the parameter doesn't really live anywhere (as far as the code |
20594 | generation parts of GCC are concerned) during most of the function's |
20595 | activation. That will happen (for example) if the parameter is never |
20596 | referenced within the function. |
20597 | |
20598 | We could just generate a location descriptor here for all non-NULL |
20599 | non-pseudo values of DECL_RTL and ignore all of the rest, but we can be |
20600 | a little nicer than that if we also consider DECL_INCOMING_RTL in cases |
20601 | where DECL_RTL is NULL or is a pseudo-reg. |
20602 | |
20603 | Note however that we can only get away with using DECL_INCOMING_RTL as |
20604 | a backup substitute for DECL_RTL in certain limited cases. In cases |
20605 | where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl), |
20606 | we can be sure that the parameter was passed using the same type as it is |
20607 | declared to have within the function, and that its DECL_INCOMING_RTL |
20608 | points us to a place where a value of that type is passed. |
20609 | |
20610 | In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different, |
20611 | we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL |
20612 | because in these cases DECL_INCOMING_RTL points us to a value of some |
20613 | type which is *different* from the type of the parameter itself. Thus, |
20614 | if we tried to use DECL_INCOMING_RTL to generate a location attribute in |
20615 | such cases, the debugger would end up (for example) trying to fetch a |
20616 | `float' from a place which actually contains the first part of a |
20617 | `double'. That would lead to really incorrect and confusing |
20618 | output at debug-time. |
20619 | |
20620 | So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL |
20621 | in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There |
20622 | are a couple of exceptions however. On little-endian machines we can |
20623 | get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is |
20624 | not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is |
20625 | an integral type that is smaller than TREE_TYPE (decl). These cases arise |
20626 | when (on a little-endian machine) a non-prototyped function has a |
20627 | parameter declared to be of type `short' or `char'. In such cases, |
20628 | TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will |
20629 | be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the |
20630 | passed `int' value. If the debugger then uses that address to fetch |
20631 | a `short' or a `char' (on a little-endian machine) the result will be |
20632 | the correct data, so we allow for such exceptional cases below. |
20633 | |
20634 | Note that our goal here is to describe the place where the given formal |
20635 | parameter lives during most of the function's activation (i.e. between the |
20636 | end of the prologue and the start of the epilogue). We'll do that as best |
20637 | as we can. Note however that if the given formal parameter is modified |
20638 | sometime during the execution of the function, then a stack backtrace (at |
20639 | debug-time) will show the function as having been called with the *new* |
20640 | value rather than the value which was originally passed in. This happens |
20641 | rarely enough that it is not a major problem, but it *is* a problem, and |
20642 | I'd like to fix it. |
20643 | |
20644 | A future version of dwarf2out.cc may generate two additional attributes for |
20645 | any given DW_TAG_formal_parameter DIE which will describe the "passed |
20646 | type" and the "passed location" for the given formal parameter in addition |
20647 | to the attributes we now generate to indicate the "declared type" and the |
20648 | "active location" for each parameter. This additional set of attributes |
20649 | could be used by debuggers for stack backtraces. Separately, note that |
20650 | sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also. |
20651 | This happens (for example) for inlined-instances of inline function formal |
20652 | parameters which are never referenced. This really shouldn't be |
20653 | happening. All PARM_DECL nodes should get valid non-NULL |
20654 | DECL_INCOMING_RTL values. FIXME. */ |
20655 | |
20656 | /* Use DECL_RTL as the "location" unless we find something better. */ |
20657 | rtl = DECL_RTL_IF_SET (decl); |
20658 | |
20659 | /* When generating abstract instances, ignore everything except |
20660 | constants, symbols living in memory, and symbols living in |
20661 | fixed registers. */ |
20662 | if (! reload_completed) |
20663 | { |
20664 | if (rtl |
20665 | && (CONSTANT_P (rtl) |
20666 | || (MEM_P (rtl) |
20667 | && CONSTANT_P (XEXP (rtl, 0))) |
20668 | || (REG_P (rtl) |
20669 | && VAR_P (decl) |
20670 | && TREE_STATIC (decl)))) |
20671 | { |
20672 | rtl = targetm.delegitimize_address (rtl); |
20673 | return rtl; |
20674 | } |
20675 | rtl = NULL_RTX; |
20676 | } |
20677 | else if (TREE_CODE (decl) == PARM_DECL) |
20678 | { |
20679 | if (rtl == NULL_RTX |
20680 | || is_pseudo_reg (rtl) |
20681 | || (MEM_P (rtl) |
20682 | && is_pseudo_reg (XEXP (rtl, 0)) |
20683 | && DECL_INCOMING_RTL (decl) |
20684 | && MEM_P (DECL_INCOMING_RTL (decl)) |
20685 | && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl)))) |
20686 | { |
20687 | tree declared_type = TREE_TYPE (decl); |
20688 | tree passed_type = DECL_ARG_TYPE (decl); |
20689 | machine_mode dmode = TYPE_MODE (declared_type); |
20690 | machine_mode pmode = TYPE_MODE (passed_type); |
20691 | |
20692 | /* This decl represents a formal parameter which was optimized out. |
20693 | Note that DECL_INCOMING_RTL may be NULL in here, but we handle |
20694 | all cases where (rtl == NULL_RTX) just below. */ |
20695 | if (dmode == pmode) |
20696 | rtl = DECL_INCOMING_RTL (decl); |
20697 | else if ((rtl == NULL_RTX || is_pseudo_reg (rtl)) |
20698 | && SCALAR_INT_MODE_P (dmode) |
20699 | && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode)) |
20700 | && DECL_INCOMING_RTL (decl)) |
20701 | { |
20702 | rtx inc = DECL_INCOMING_RTL (decl); |
20703 | if (REG_P (inc)) |
20704 | rtl = inc; |
20705 | else if (MEM_P (inc)) |
20706 | { |
20707 | if (BYTES_BIG_ENDIAN) |
20708 | rtl = adjust_address_nv (inc, dmode, |
20709 | GET_MODE_SIZE (pmode) |
20710 | - GET_MODE_SIZE (dmode)); |
20711 | else |
20712 | rtl = inc; |
20713 | } |
20714 | } |
20715 | } |
20716 | |
20717 | /* If the parm was passed in registers, but lives on the stack, then |
20718 | make a big endian correction if the mode of the type of the |
20719 | parameter is not the same as the mode of the rtl. */ |
20720 | /* ??? This is the same series of checks that are made in dbxout.cc before |
20721 | we reach the big endian correction code there. It isn't clear if all |
20722 | of these checks are necessary here, but keeping them all is the safe |
20723 | thing to do. */ |
20724 | else if (MEM_P (rtl) |
20725 | && XEXP (rtl, 0) != const0_rtx |
20726 | && ! CONSTANT_P (XEXP (rtl, 0)) |
20727 | /* Not passed in memory. */ |
20728 | && !MEM_P (DECL_INCOMING_RTL (decl)) |
20729 | /* Not passed by invisible reference. */ |
20730 | && (!REG_P (XEXP (rtl, 0)) |
20731 | || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM |
20732 | || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM |
20733 | #if !HARD_FRAME_POINTER_IS_ARG_POINTER |
20734 | || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM |
20735 | #endif |
20736 | ) |
20737 | /* Big endian correction check. */ |
20738 | && BYTES_BIG_ENDIAN |
20739 | && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl) |
20740 | && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))), |
20741 | UNITS_PER_WORD)) |
20742 | { |
20743 | machine_mode addr_mode = get_address_mode (mem: rtl); |
20744 | poly_int64 offset = (UNITS_PER_WORD |
20745 | - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))); |
20746 | |
20747 | rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)), |
20748 | plus_constant (addr_mode, XEXP (rtl, 0), offset)); |
20749 | } |
20750 | } |
20751 | else if (VAR_P (decl) |
20752 | && rtl |
20753 | && MEM_P (rtl) |
20754 | && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl))) |
20755 | { |
20756 | machine_mode addr_mode = get_address_mode (mem: rtl); |
20757 | poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)), |
20758 | GET_MODE (rtl)); |
20759 | |
20760 | /* If a variable is declared "register" yet is smaller than |
20761 | a register, then if we store the variable to memory, it |
20762 | looks like we're storing a register-sized value, when in |
20763 | fact we are not. We need to adjust the offset of the |
20764 | storage location to reflect the actual value's bytes, |
20765 | else gdb will not be able to display it. */ |
20766 | if (maybe_ne (a: offset, b: 0)) |
20767 | rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)), |
20768 | plus_constant (addr_mode, XEXP (rtl, 0), offset)); |
20769 | } |
20770 | |
20771 | /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant, |
20772 | and will have been substituted directly into all expressions that use it. |
20773 | C does not have such a concept, but C++ and other languages do. */ |
20774 | if (!rtl && VAR_P (decl) && DECL_INITIAL (decl)) |
20775 | rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl)); |
20776 | |
20777 | if (rtl) |
20778 | rtl = targetm.delegitimize_address (rtl); |
20779 | |
20780 | /* If we don't look past the constant pool, we risk emitting a |
20781 | reference to a constant pool entry that isn't referenced from |
20782 | code, and thus is not emitted. */ |
20783 | if (rtl) |
20784 | rtl = avoid_constant_pool_reference (rtl); |
20785 | |
20786 | /* Try harder to get a rtl. If this symbol ends up not being emitted |
20787 | in the current CU, resolve_addr will remove the expression referencing |
20788 | it. */ |
20789 | if (rtl == NULL_RTX |
20790 | && !(early_dwarf && (flag_generate_lto || flag_generate_offload)) |
20791 | && VAR_P (decl) |
20792 | && !DECL_EXTERNAL (decl) |
20793 | && TREE_STATIC (decl) |
20794 | && DECL_NAME (decl) |
20795 | && !DECL_HARD_REGISTER (decl) |
20796 | && DECL_MODE (decl) != VOIDmode) |
20797 | { |
20798 | rtl = make_decl_rtl_for_debug (decl); |
20799 | if (!MEM_P (rtl) |
20800 | || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF |
20801 | || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl) |
20802 | rtl = NULL_RTX; |
20803 | } |
20804 | |
20805 | return rtl; |
20806 | } |
20807 | |
20808 | /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is |
20809 | returned. If so, the decl for the COMMON block is returned, and the |
20810 | value is the offset into the common block for the symbol. */ |
20811 | |
20812 | static tree |
20813 | fortran_common (tree decl, HOST_WIDE_INT *value) |
20814 | { |
20815 | tree val_expr, cvar; |
20816 | machine_mode mode; |
20817 | poly_int64 bitsize, bitpos; |
20818 | tree offset; |
20819 | HOST_WIDE_INT cbitpos; |
20820 | int unsignedp, reversep, volatilep = 0; |
20821 | |
20822 | /* If the decl isn't a VAR_DECL, or if it isn't static, or if |
20823 | it does not have a value (the offset into the common area), or if it |
20824 | is thread local (as opposed to global) then it isn't common, and shouldn't |
20825 | be handled as such. */ |
20826 | if (!VAR_P (decl) |
20827 | || !TREE_STATIC (decl) |
20828 | || !DECL_HAS_VALUE_EXPR_P (decl) |
20829 | || !is_fortran ()) |
20830 | return NULL_TREE; |
20831 | |
20832 | val_expr = DECL_VALUE_EXPR (decl); |
20833 | if (TREE_CODE (val_expr) != COMPONENT_REF) |
20834 | return NULL_TREE; |
20835 | |
20836 | cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode, |
20837 | &unsignedp, &reversep, &volatilep); |
20838 | |
20839 | if (cvar == NULL_TREE |
20840 | || !VAR_P (cvar) |
20841 | || DECL_ARTIFICIAL (cvar) |
20842 | || !TREE_PUBLIC (cvar) |
20843 | /* We don't expect to have to cope with variable offsets, |
20844 | since at present all static data must have a constant size. */ |
20845 | || !bitpos.is_constant (const_value: &cbitpos)) |
20846 | return NULL_TREE; |
20847 | |
20848 | *value = 0; |
20849 | if (offset != NULL) |
20850 | { |
20851 | if (!tree_fits_shwi_p (offset)) |
20852 | return NULL_TREE; |
20853 | *value = tree_to_shwi (offset); |
20854 | } |
20855 | if (cbitpos != 0) |
20856 | *value += cbitpos / BITS_PER_UNIT; |
20857 | |
20858 | return cvar; |
20859 | } |
20860 | |
20861 | /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value |
20862 | data attribute for a variable or a parameter. We generate the |
20863 | DW_AT_const_value attribute only in those cases where the given variable |
20864 | or parameter does not have a true "location" either in memory or in a |
20865 | register. This can happen (for example) when a constant is passed as an |
20866 | actual argument in a call to an inline function. (It's possible that |
20867 | these things can crop up in other ways also.) Note that one type of |
20868 | constant value which can be passed into an inlined function is a constant |
20869 | pointer. This can happen for example if an actual argument in an inlined |
20870 | function call evaluates to a compile-time constant address. |
20871 | |
20872 | CACHE_P is true if it is worth caching the location list for DECL, |
20873 | so that future calls can reuse it rather than regenerate it from scratch. |
20874 | This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines, |
20875 | since we will need to refer to them each time the function is inlined. */ |
20876 | |
20877 | static bool |
20878 | add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p) |
20879 | { |
20880 | rtx rtl; |
20881 | dw_loc_list_ref list; |
20882 | var_loc_list *loc_list; |
20883 | cached_dw_loc_list *cache; |
20884 | |
20885 | if (early_dwarf) |
20886 | return false; |
20887 | |
20888 | if (TREE_CODE (decl) == ERROR_MARK) |
20889 | return false; |
20890 | |
20891 | if (get_AT (die, attr_kind: DW_AT_location) |
20892 | || get_AT (die, attr_kind: DW_AT_const_value)) |
20893 | return true; |
20894 | |
20895 | gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL |
20896 | || TREE_CODE (decl) == RESULT_DECL); |
20897 | |
20898 | /* Try to get some constant RTL for this decl, and use that as the value of |
20899 | the location. */ |
20900 | |
20901 | rtl = rtl_for_decl_location (decl); |
20902 | if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING) |
20903 | && add_const_value_attribute (die, DECL_MODE (decl), rtl)) |
20904 | return true; |
20905 | |
20906 | /* See if we have single element location list that is equivalent to |
20907 | a constant value. That way we are better to use add_const_value_attribute |
20908 | rather than expanding constant value equivalent. */ |
20909 | loc_list = lookup_decl_loc (decl); |
20910 | if (loc_list |
20911 | && loc_list->first |
20912 | && loc_list->first->next == NULL |
20913 | && NOTE_P (loc_list->first->loc) |
20914 | && NOTE_VAR_LOCATION (loc_list->first->loc) |
20915 | && NOTE_VAR_LOCATION_LOC (loc_list->first->loc)) |
20916 | { |
20917 | struct var_loc_node *node; |
20918 | |
20919 | node = loc_list->first; |
20920 | rtl = NOTE_VAR_LOCATION_LOC (node->loc); |
20921 | if (GET_CODE (rtl) == EXPR_LIST) |
20922 | rtl = XEXP (rtl, 0); |
20923 | if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING) |
20924 | && add_const_value_attribute (die, DECL_MODE (decl), rtl)) |
20925 | return true; |
20926 | } |
20927 | /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its |
20928 | list several times. See if we've already cached the contents. */ |
20929 | list = NULL; |
20930 | if (loc_list == NULL || cached_dw_loc_list_table == NULL) |
20931 | cache_p = false; |
20932 | if (cache_p) |
20933 | { |
20934 | cache = cached_dw_loc_list_table->find_with_hash (comparable: decl, DECL_UID (decl)); |
20935 | if (cache) |
20936 | list = cache->loc_list; |
20937 | } |
20938 | if (list == NULL) |
20939 | { |
20940 | list = loc_list_from_tree (loc: decl, want_address: decl_by_reference_p (decl) ? 0 : 2, |
20941 | NULL); |
20942 | /* It is usually worth caching this result if the decl is from |
20943 | BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */ |
20944 | if (cache_p && list && list->dw_loc_next) |
20945 | { |
20946 | cached_dw_loc_list **slot |
20947 | = cached_dw_loc_list_table->find_slot_with_hash (comparable: decl, |
20948 | DECL_UID (decl), |
20949 | insert: INSERT); |
20950 | cache = ggc_cleared_alloc<cached_dw_loc_list> (); |
20951 | cache->decl_id = DECL_UID (decl); |
20952 | cache->loc_list = list; |
20953 | *slot = cache; |
20954 | } |
20955 | } |
20956 | if (list) |
20957 | { |
20958 | add_AT_location_description (die, attr_kind: DW_AT_location, descr: list); |
20959 | return true; |
20960 | } |
20961 | /* None of that worked, so it must not really have a location; |
20962 | try adding a constant value attribute from the DECL_INITIAL. */ |
20963 | return tree_add_const_value_attribute_for_decl (die, decl); |
20964 | } |
20965 | |
20966 | /* Mangle referenced decls. */ |
20967 | static tree |
20968 | mangle_referenced_decls (tree *tp, int *walk_subtrees, void *) |
20969 | { |
20970 | if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp)) |
20971 | *walk_subtrees = 0; |
20972 | |
20973 | if (VAR_OR_FUNCTION_DECL_P (*tp)) |
20974 | assign_assembler_name_if_needed (*tp); |
20975 | |
20976 | return NULL_TREE; |
20977 | } |
20978 | |
20979 | /* Attach a DW_AT_const_value attribute to DIE. The value of the |
20980 | attribute is the const value T. */ |
20981 | |
20982 | static bool |
20983 | tree_add_const_value_attribute (dw_die_ref die, tree t) |
20984 | { |
20985 | tree init; |
20986 | tree type = TREE_TYPE (t); |
20987 | |
20988 | if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node) |
20989 | return false; |
20990 | |
20991 | init = t; |
20992 | gcc_assert (!DECL_P (init)); |
20993 | |
20994 | if (TREE_CODE (init) == INTEGER_CST) |
20995 | { |
20996 | if (tree_fits_uhwi_p (init)) |
20997 | { |
20998 | add_AT_unsigned (die, attr_kind: DW_AT_const_value, unsigned_val: tree_to_uhwi (init)); |
20999 | return true; |
21000 | } |
21001 | if (tree_fits_shwi_p (init)) |
21002 | { |
21003 | add_AT_int (die, attr_kind: DW_AT_const_value, int_val: tree_to_shwi (init)); |
21004 | return true; |
21005 | } |
21006 | } |
21007 | if (!early_dwarf) |
21008 | { |
21009 | rtx rtl = rtl_for_decl_init (init, type); |
21010 | if (rtl) |
21011 | return add_const_value_attribute (die, TYPE_MODE (type), rtl); |
21012 | } |
21013 | else |
21014 | { |
21015 | /* For early_dwarf force mangling of all referenced symbols. */ |
21016 | tree initializer = init; |
21017 | STRIP_NOPS (initializer); |
21018 | /* rtl_for_decl_init punts on other aggregates, and complex values. */ |
21019 | if (AGGREGATE_TYPE_P (type) |
21020 | || (TREE_CODE (initializer) == VIEW_CONVERT_EXPR |
21021 | && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (initializer, 0)))) |
21022 | || TREE_CODE (type) == COMPLEX_TYPE) |
21023 | ; |
21024 | else if (initializer_constant_valid_p (initializer, type)) |
21025 | walk_tree (&initializer, mangle_referenced_decls, NULL, NULL); |
21026 | } |
21027 | /* If the host and target are sane, try harder. */ |
21028 | if (CHAR_BIT == 8 && BITS_PER_UNIT == 8 |
21029 | && initializer_constant_valid_p (init, type)) |
21030 | { |
21031 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init)); |
21032 | if (size > 0 && (int) size == size) |
21033 | { |
21034 | unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (c: size); |
21035 | |
21036 | if (native_encode_initializer (init, array, size) == size) |
21037 | { |
21038 | add_AT_vec (die, attr_kind: DW_AT_const_value, length: size, elt_size: 1, array); |
21039 | return true; |
21040 | } |
21041 | ggc_free (array); |
21042 | } |
21043 | } |
21044 | return false; |
21045 | } |
21046 | |
21047 | /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the |
21048 | attribute is the const value of T, where T is an integral constant |
21049 | variable with static storage duration |
21050 | (so it can't be a PARM_DECL or a RESULT_DECL). */ |
21051 | |
21052 | static bool |
21053 | tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl) |
21054 | { |
21055 | |
21056 | if (!decl |
21057 | || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL) |
21058 | || (VAR_P (decl) && !TREE_STATIC (decl))) |
21059 | return false; |
21060 | |
21061 | if (TREE_READONLY (decl) |
21062 | && ! TREE_THIS_VOLATILE (decl) |
21063 | && DECL_INITIAL (decl)) |
21064 | /* OK */; |
21065 | else |
21066 | return false; |
21067 | |
21068 | /* Don't add DW_AT_const_value if abstract origin already has one. */ |
21069 | if (get_AT (die: var_die, attr_kind: DW_AT_const_value)) |
21070 | return false; |
21071 | |
21072 | return tree_add_const_value_attribute (die: var_die, DECL_INITIAL (decl)); |
21073 | } |
21074 | |
21075 | /* Convert the CFI instructions for the current function into a |
21076 | location list. This is used for DW_AT_frame_base when we targeting |
21077 | a dwarf2 consumer that does not support the dwarf3 |
21078 | DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA |
21079 | expressions. */ |
21080 | |
21081 | static dw_loc_list_ref |
21082 | convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset) |
21083 | { |
21084 | int ix; |
21085 | dw_fde_ref fde; |
21086 | dw_loc_list_ref list, *list_tail; |
21087 | dw_cfi_ref cfi; |
21088 | dw_cfa_location last_cfa, next_cfa; |
21089 | const char *start_label, *last_label, *section; |
21090 | dw_cfa_location remember; |
21091 | |
21092 | fde = cfun->fde; |
21093 | gcc_assert (fde != NULL); |
21094 | |
21095 | section = secname_for_decl (decl: current_function_decl); |
21096 | list_tail = &list; |
21097 | list = NULL; |
21098 | |
21099 | memset (s: &next_cfa, c: 0, n: sizeof (next_cfa)); |
21100 | next_cfa.reg.set_by_dwreg (INVALID_REGNUM); |
21101 | remember = next_cfa; |
21102 | |
21103 | start_label = fde->dw_fde_begin; |
21104 | |
21105 | /* ??? Bald assumption that the CIE opcode list does not contain |
21106 | advance opcodes. */ |
21107 | FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi) |
21108 | lookup_cfa_1 (cfi, loc: &next_cfa, remember: &remember); |
21109 | |
21110 | last_cfa = next_cfa; |
21111 | last_label = start_label; |
21112 | |
21113 | if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0) |
21114 | { |
21115 | /* If the first partition contained no CFI adjustments, the |
21116 | CIE opcodes apply to the whole first partition. */ |
21117 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
21118 | begin: fde->dw_fde_begin, vbegin: 0, end: fde->dw_fde_end, vend: 0, section); |
21119 | list_tail =&(*list_tail)->dw_loc_next; |
21120 | start_label = last_label = fde->dw_fde_second_begin; |
21121 | } |
21122 | |
21123 | FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi) |
21124 | { |
21125 | switch (cfi->dw_cfi_opc) |
21126 | { |
21127 | case DW_CFA_set_loc: |
21128 | case DW_CFA_advance_loc1: |
21129 | case DW_CFA_advance_loc2: |
21130 | case DW_CFA_advance_loc4: |
21131 | if (!cfa_equal_p (&last_cfa, &next_cfa)) |
21132 | { |
21133 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
21134 | begin: start_label, vbegin: 0, end: last_label, vend: 0, section); |
21135 | |
21136 | list_tail = &(*list_tail)->dw_loc_next; |
21137 | last_cfa = next_cfa; |
21138 | start_label = last_label; |
21139 | } |
21140 | last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr; |
21141 | break; |
21142 | |
21143 | case DW_CFA_advance_loc: |
21144 | /* The encoding is complex enough that we should never emit this. */ |
21145 | gcc_unreachable (); |
21146 | |
21147 | default: |
21148 | lookup_cfa_1 (cfi, loc: &next_cfa, remember: &remember); |
21149 | break; |
21150 | } |
21151 | if (ix + 1 == fde->dw_fde_switch_cfi_index) |
21152 | { |
21153 | if (!cfa_equal_p (&last_cfa, &next_cfa)) |
21154 | { |
21155 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
21156 | begin: start_label, vbegin: 0, end: last_label, vend: 0, section); |
21157 | |
21158 | list_tail = &(*list_tail)->dw_loc_next; |
21159 | last_cfa = next_cfa; |
21160 | start_label = last_label; |
21161 | } |
21162 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
21163 | begin: start_label, vbegin: 0, end: fde->dw_fde_end, vend: 0, section); |
21164 | list_tail = &(*list_tail)->dw_loc_next; |
21165 | start_label = last_label = fde->dw_fde_second_begin; |
21166 | } |
21167 | } |
21168 | |
21169 | if (!cfa_equal_p (&last_cfa, &next_cfa)) |
21170 | { |
21171 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
21172 | begin: start_label, vbegin: 0, end: last_label, vend: 0, section); |
21173 | list_tail = &(*list_tail)->dw_loc_next; |
21174 | start_label = last_label; |
21175 | } |
21176 | |
21177 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &next_cfa, offset), |
21178 | begin: start_label, vbegin: 0, |
21179 | end: fde->dw_fde_second_begin |
21180 | ? fde->dw_fde_second_end : fde->dw_fde_end, vend: 0, |
21181 | section); |
21182 | |
21183 | maybe_gen_llsym (list); |
21184 | |
21185 | return list; |
21186 | } |
21187 | |
21188 | /* Compute a displacement from the "steady-state frame pointer" to the |
21189 | frame base (often the same as the CFA), and store it in |
21190 | frame_pointer_fb_offset. OFFSET is added to the displacement |
21191 | before the latter is negated. */ |
21192 | |
21193 | static void |
21194 | compute_frame_pointer_to_fb_displacement (poly_int64 offset) |
21195 | { |
21196 | rtx reg, elim; |
21197 | |
21198 | #ifdef FRAME_POINTER_CFA_OFFSET |
21199 | reg = frame_pointer_rtx; |
21200 | offset += FRAME_POINTER_CFA_OFFSET (current_function_decl); |
21201 | #else |
21202 | reg = arg_pointer_rtx; |
21203 | offset += ARG_POINTER_CFA_OFFSET (current_function_decl); |
21204 | #endif |
21205 | |
21206 | elim = (ira_use_lra_p |
21207 | ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX) |
21208 | : eliminate_regs (reg, VOIDmode, NULL_RTX)); |
21209 | elim = strip_offset_and_add (x: elim, offset: &offset); |
21210 | |
21211 | frame_pointer_fb_offset = -offset; |
21212 | |
21213 | /* ??? AVR doesn't set up valid eliminations when there is no stack frame |
21214 | in which to eliminate. This is because it's stack pointer isn't |
21215 | directly accessible as a register within the ISA. To work around |
21216 | this, assume that while we cannot provide a proper value for |
21217 | frame_pointer_fb_offset, we won't need one either. We can use |
21218 | hard frame pointer in debug info even if frame pointer isn't used |
21219 | since hard frame pointer in debug info is encoded with DW_OP_fbreg |
21220 | which uses the DW_AT_frame_base attribute, not hard frame pointer |
21221 | directly. */ |
21222 | frame_pointer_fb_offset_valid |
21223 | = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx); |
21224 | } |
21225 | |
21226 | /* Generate a DW_AT_name attribute given some string value to be included as |
21227 | the value of the attribute. */ |
21228 | |
21229 | void |
21230 | add_name_attribute (dw_die_ref die, const char *name_string) |
21231 | { |
21232 | if (name_string != NULL && *name_string != 0) |
21233 | { |
21234 | if (demangle_name_func) |
21235 | name_string = (*demangle_name_func) (name_string); |
21236 | |
21237 | add_AT_string (die, attr_kind: DW_AT_name, str: name_string); |
21238 | } |
21239 | } |
21240 | |
21241 | /* Generate a DW_AT_name attribute given some string value representing a |
21242 | file or filepath to be included as value of the attribute. */ |
21243 | static void |
21244 | add_filename_attribute (dw_die_ref die, const char *name_string) |
21245 | { |
21246 | if (name_string != NULL && *name_string != 0) |
21247 | add_filepath_AT_string (die, attr_kind: DW_AT_name, str: name_string); |
21248 | } |
21249 | |
21250 | /* Generate a DW_AT_description attribute given some string value to be included |
21251 | as the value of the attribute. */ |
21252 | |
21253 | static void |
21254 | add_desc_attribute (dw_die_ref die, const char *name_string) |
21255 | { |
21256 | if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict)) |
21257 | return; |
21258 | |
21259 | if (name_string == NULL || *name_string == 0) |
21260 | return; |
21261 | |
21262 | if (demangle_name_func) |
21263 | name_string = (*demangle_name_func) (name_string); |
21264 | |
21265 | add_AT_string (die, attr_kind: DW_AT_description, str: name_string); |
21266 | } |
21267 | |
21268 | /* Generate a DW_AT_description attribute given some decl to be included |
21269 | as the value of the attribute. */ |
21270 | |
21271 | static void |
21272 | add_desc_attribute (dw_die_ref die, tree decl) |
21273 | { |
21274 | tree decl_name; |
21275 | |
21276 | if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict)) |
21277 | return; |
21278 | |
21279 | if (decl == NULL_TREE || !DECL_P (decl)) |
21280 | return; |
21281 | decl_name = DECL_NAME (decl); |
21282 | |
21283 | if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL) |
21284 | { |
21285 | const char *name = dwarf2_name (decl, scope: 0); |
21286 | add_desc_attribute (die, name_string: name ? name : IDENTIFIER_POINTER (decl_name)); |
21287 | } |
21288 | else |
21289 | { |
21290 | char *desc = print_generic_expr_to_str (decl); |
21291 | add_desc_attribute (die, name_string: desc); |
21292 | free (ptr: desc); |
21293 | } |
21294 | } |
21295 | |
21296 | /* Retrieve the descriptive type of TYPE, if any, make sure it has a |
21297 | DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE |
21298 | of TYPE accordingly. |
21299 | |
21300 | ??? This is a temporary measure until after we're able to generate |
21301 | regular DWARF for the complex Ada type system. */ |
21302 | |
21303 | static void |
21304 | add_gnat_descriptive_type_attribute (dw_die_ref die, tree type, |
21305 | dw_die_ref context_die) |
21306 | { |
21307 | tree dtype; |
21308 | dw_die_ref dtype_die; |
21309 | |
21310 | if (!lang_hooks.types.descriptive_type) |
21311 | return; |
21312 | |
21313 | dtype = lang_hooks.types.descriptive_type (type); |
21314 | if (!dtype) |
21315 | return; |
21316 | |
21317 | dtype_die = lookup_type_die (type: dtype); |
21318 | if (!dtype_die) |
21319 | { |
21320 | gen_type_die (dtype, context_die); |
21321 | dtype_die = lookup_type_die (type: dtype); |
21322 | gcc_assert (dtype_die); |
21323 | } |
21324 | |
21325 | add_AT_die_ref (die, attr_kind: DW_AT_GNAT_descriptive_type, targ_die: dtype_die); |
21326 | } |
21327 | |
21328 | /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */ |
21329 | |
21330 | static const char * |
21331 | comp_dir_string (void) |
21332 | { |
21333 | const char *wd; |
21334 | char *wd_plus_sep = NULL; |
21335 | static const char *cached_wd = NULL; |
21336 | |
21337 | if (cached_wd != NULL) |
21338 | return cached_wd; |
21339 | |
21340 | wd = get_src_pwd (); |
21341 | if (wd == NULL) |
21342 | return NULL; |
21343 | |
21344 | if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR) |
21345 | { |
21346 | size_t wdlen = strlen (s: wd); |
21347 | wd_plus_sep = XNEWVEC (char, wdlen + 2); |
21348 | strcpy (dest: wd_plus_sep, src: wd); |
21349 | wd_plus_sep [wdlen] = DIR_SEPARATOR; |
21350 | wd_plus_sep [wdlen + 1] = 0; |
21351 | wd = wd_plus_sep; |
21352 | } |
21353 | |
21354 | cached_wd = remap_debug_filename (wd); |
21355 | |
21356 | /* remap_debug_filename can just pass through wd or return a new gc string. |
21357 | These two types can't be both stored in a GTY(())-tagged string, but since |
21358 | the cached value lives forever just copy it if needed. */ |
21359 | if (cached_wd != wd) |
21360 | { |
21361 | cached_wd = xstrdup (cached_wd); |
21362 | if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL) |
21363 | free (ptr: wd_plus_sep); |
21364 | } |
21365 | |
21366 | return cached_wd; |
21367 | } |
21368 | |
21369 | /* Generate a DW_AT_comp_dir attribute for DIE. */ |
21370 | |
21371 | static void |
21372 | add_comp_dir_attribute (dw_die_ref die) |
21373 | { |
21374 | const char * wd = comp_dir_string (); |
21375 | if (wd != NULL) |
21376 | add_filepath_AT_string (die, attr_kind: DW_AT_comp_dir, str: wd); |
21377 | } |
21378 | |
21379 | /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a |
21380 | pointer computation, ...), output a representation for that bound according |
21381 | to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See |
21382 | loc_list_from_tree for the meaning of CONTEXT. */ |
21383 | |
21384 | static void |
21385 | add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value, |
21386 | int forms, struct loc_descr_context *context) |
21387 | { |
21388 | dw_die_ref context_die, decl_die = NULL; |
21389 | dw_loc_list_ref list; |
21390 | bool strip_conversions = true; |
21391 | bool placeholder_seen = false; |
21392 | |
21393 | while (strip_conversions) |
21394 | switch (TREE_CODE (value)) |
21395 | { |
21396 | case ERROR_MARK: |
21397 | case SAVE_EXPR: |
21398 | return; |
21399 | |
21400 | CASE_CONVERT: |
21401 | case VIEW_CONVERT_EXPR: |
21402 | value = TREE_OPERAND (value, 0); |
21403 | break; |
21404 | |
21405 | default: |
21406 | strip_conversions = false; |
21407 | break; |
21408 | } |
21409 | |
21410 | /* If possible and permitted, output the attribute as a constant. */ |
21411 | if ((forms & dw_scalar_form_constant) != 0 |
21412 | && TREE_CODE (value) == INTEGER_CST) |
21413 | { |
21414 | unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value)); |
21415 | |
21416 | /* If HOST_WIDE_INT is big enough then represent the bound as |
21417 | a constant value. We need to choose a form based on |
21418 | whether the type is signed or unsigned. We cannot just |
21419 | call add_AT_unsigned if the value itself is positive |
21420 | (add_AT_unsigned might add the unsigned value encoded as |
21421 | DW_FORM_data[1248]). Some DWARF consumers will lookup the |
21422 | bounds type and then sign extend any unsigned values found |
21423 | for signed types. This is needed only for |
21424 | DW_AT_{lower,upper}_bound, since for most other attributes, |
21425 | consumers will treat DW_FORM_data[1248] as unsigned values, |
21426 | regardless of the underlying type. */ |
21427 | if (prec <= HOST_BITS_PER_WIDE_INT |
21428 | || tree_fits_uhwi_p (value)) |
21429 | { |
21430 | if (TYPE_UNSIGNED (TREE_TYPE (value))) |
21431 | add_AT_unsigned (die, attr_kind: attr, TREE_INT_CST_LOW (value)); |
21432 | else |
21433 | add_AT_int (die, attr_kind: attr, TREE_INT_CST_LOW (value)); |
21434 | } |
21435 | else if (dwarf_version >= 5 |
21436 | && TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (value))) == 128) |
21437 | /* Otherwise represent the bound as an unsigned value with |
21438 | the precision of its type. The precision and signedness |
21439 | of the type will be necessary to re-interpret it |
21440 | unambiguously. */ |
21441 | add_AT_wide (die, attr_kind: attr, w: wi::to_wide (t: value)); |
21442 | else |
21443 | { |
21444 | rtx v = immed_wide_int_const (wi::to_wide (t: value), |
21445 | TYPE_MODE (TREE_TYPE (value))); |
21446 | dw_loc_descr_ref loc |
21447 | = loc_descriptor (rtl: v, TYPE_MODE (TREE_TYPE (value)), |
21448 | initialized: VAR_INIT_STATUS_INITIALIZED); |
21449 | if (loc) |
21450 | add_AT_loc (die, attr_kind: attr, loc); |
21451 | } |
21452 | return; |
21453 | } |
21454 | |
21455 | /* Otherwise, if it's possible and permitted too, output a reference to |
21456 | another DIE. */ |
21457 | if ((forms & dw_scalar_form_reference) != 0) |
21458 | { |
21459 | tree decl = NULL_TREE; |
21460 | |
21461 | /* Some type attributes reference an outer type. For instance, the upper |
21462 | bound of an array may reference an embedding record (this happens in |
21463 | Ada). */ |
21464 | if (TREE_CODE (value) == COMPONENT_REF |
21465 | && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR |
21466 | && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL) |
21467 | decl = TREE_OPERAND (value, 1); |
21468 | |
21469 | else if (VAR_P (value) |
21470 | || TREE_CODE (value) == PARM_DECL |
21471 | || TREE_CODE (value) == RESULT_DECL) |
21472 | decl = value; |
21473 | |
21474 | if (decl != NULL_TREE) |
21475 | { |
21476 | decl_die = lookup_decl_die (decl); |
21477 | |
21478 | /* ??? Can this happen, or should the variable have been bound |
21479 | first? Probably it can, since I imagine that we try to create |
21480 | the types of parameters in the order in which they exist in |
21481 | the list, and won't have created a forward reference to a |
21482 | later parameter. */ |
21483 | if (decl_die != NULL) |
21484 | { |
21485 | if (get_AT (die: decl_die, attr_kind: DW_AT_location) |
21486 | || get_AT (die: decl_die, attr_kind: DW_AT_data_member_location) |
21487 | || get_AT (die: decl_die, attr_kind: DW_AT_data_bit_offset) |
21488 | || get_AT (die: decl_die, attr_kind: DW_AT_const_value)) |
21489 | { |
21490 | add_AT_die_ref (die, attr_kind: attr, targ_die: decl_die); |
21491 | return; |
21492 | } |
21493 | } |
21494 | } |
21495 | } |
21496 | |
21497 | /* Last chance: try to create a stack operation procedure to evaluate the |
21498 | value. Do nothing if even that is not possible or permitted. */ |
21499 | if ((forms & dw_scalar_form_exprloc) == 0) |
21500 | return; |
21501 | |
21502 | list = loc_list_from_tree (loc: value, want_address: 2, context); |
21503 | if (context && context->placeholder_arg) |
21504 | { |
21505 | placeholder_seen = context->placeholder_seen; |
21506 | context->placeholder_seen = false; |
21507 | } |
21508 | if (list == NULL || single_element_loc_list_p (list)) |
21509 | { |
21510 | /* If this attribute is not a reference nor constant, it is |
21511 | a DWARF expression rather than location description. For that |
21512 | loc_list_from_tree (value, 0, &context) is needed. */ |
21513 | dw_loc_list_ref list2 = loc_list_from_tree (loc: value, want_address: 0, context); |
21514 | if (list2 && single_element_loc_list_p (list: list2)) |
21515 | { |
21516 | if (placeholder_seen) |
21517 | { |
21518 | struct dwarf_procedure_info dpi; |
21519 | dpi.fndecl = NULL_TREE; |
21520 | dpi.args_count = 1; |
21521 | if (!resolve_args_picking (loc: list2->expr, initial_frame_offset: 1, dpi: &dpi)) |
21522 | return; |
21523 | } |
21524 | add_AT_loc (die, attr_kind: attr, loc: list2->expr); |
21525 | return; |
21526 | } |
21527 | } |
21528 | |
21529 | /* If that failed to give a single element location list, fall back to |
21530 | outputting this as a reference... still if permitted. */ |
21531 | if (list == NULL |
21532 | || (forms & dw_scalar_form_reference) == 0 |
21533 | || placeholder_seen) |
21534 | return; |
21535 | |
21536 | if (!decl_die) |
21537 | { |
21538 | if (current_function_decl == 0) |
21539 | context_die = comp_unit_die (); |
21540 | else |
21541 | context_die = lookup_decl_die (decl: current_function_decl); |
21542 | |
21543 | decl_die = new_die (tag_value: DW_TAG_variable, parent_die: context_die, t: value); |
21544 | add_AT_flag (die: decl_die, attr_kind: DW_AT_artificial, flag: 1); |
21545 | add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false, |
21546 | context_die); |
21547 | } |
21548 | |
21549 | add_AT_location_description (die: decl_die, attr_kind: DW_AT_location, descr: list); |
21550 | add_AT_die_ref (die, attr_kind: attr, targ_die: decl_die); |
21551 | } |
21552 | |
21553 | /* Return the default for DW_AT_lower_bound, or -1 if there is not any |
21554 | default. */ |
21555 | |
21556 | static int |
21557 | lower_bound_default (void) |
21558 | { |
21559 | switch (get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language)) |
21560 | { |
21561 | case DW_LANG_C: |
21562 | case DW_LANG_C89: |
21563 | case DW_LANG_C99: |
21564 | case DW_LANG_C11: |
21565 | case DW_LANG_C_plus_plus: |
21566 | case DW_LANG_C_plus_plus_11: |
21567 | case DW_LANG_C_plus_plus_14: |
21568 | case DW_LANG_ObjC: |
21569 | case DW_LANG_ObjC_plus_plus: |
21570 | return 0; |
21571 | case DW_LANG_Fortran77: |
21572 | case DW_LANG_Fortran90: |
21573 | case DW_LANG_Fortran95: |
21574 | case DW_LANG_Fortran03: |
21575 | case DW_LANG_Fortran08: |
21576 | return 1; |
21577 | case DW_LANG_UPC: |
21578 | case DW_LANG_D: |
21579 | case DW_LANG_Python: |
21580 | return dwarf_version >= 4 ? 0 : -1; |
21581 | case DW_LANG_Ada95: |
21582 | case DW_LANG_Ada83: |
21583 | case DW_LANG_Cobol74: |
21584 | case DW_LANG_Cobol85: |
21585 | case DW_LANG_Modula2: |
21586 | case DW_LANG_PLI: |
21587 | return dwarf_version >= 4 ? 1 : -1; |
21588 | default: |
21589 | return -1; |
21590 | } |
21591 | } |
21592 | |
21593 | /* Given a tree node describing an array bound (either lower or upper) output |
21594 | a representation for that bound. */ |
21595 | |
21596 | static void |
21597 | add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr, |
21598 | tree bound, struct loc_descr_context *context) |
21599 | { |
21600 | int dflt; |
21601 | |
21602 | while (1) |
21603 | switch (TREE_CODE (bound)) |
21604 | { |
21605 | /* Strip all conversions. */ |
21606 | CASE_CONVERT: |
21607 | case VIEW_CONVERT_EXPR: |
21608 | bound = TREE_OPERAND (bound, 0); |
21609 | break; |
21610 | |
21611 | /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds |
21612 | are even omitted when they are the default. */ |
21613 | case INTEGER_CST: |
21614 | /* If the value for this bound is the default one, we can even omit the |
21615 | attribute. */ |
21616 | if (bound_attr == DW_AT_lower_bound |
21617 | && tree_fits_shwi_p (bound) |
21618 | && (dflt = lower_bound_default ()) != -1 |
21619 | && tree_to_shwi (bound) == dflt) |
21620 | return; |
21621 | |
21622 | /* FALLTHRU */ |
21623 | |
21624 | default: |
21625 | /* Let GNAT encodings do the magic for self-referential bounds. */ |
21626 | if (is_ada () |
21627 | && gnat_encodings == DWARF_GNAT_ENCODINGS_ALL |
21628 | && contains_placeholder_p (bound)) |
21629 | return; |
21630 | |
21631 | add_scalar_info (die: subrange_die, attr: bound_attr, value: bound, |
21632 | forms: dw_scalar_form_constant |
21633 | | dw_scalar_form_exprloc |
21634 | | dw_scalar_form_reference, |
21635 | context); |
21636 | return; |
21637 | } |
21638 | } |
21639 | |
21640 | /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing |
21641 | possibly nested array subscripts in a flat sequence if COLLAPSE_P is true. |
21642 | |
21643 | This function reuses previously set type and bound information if |
21644 | available. */ |
21645 | |
21646 | static void |
21647 | add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p) |
21648 | { |
21649 | dw_die_ref child = type_die->die_child; |
21650 | struct array_descr_info info; |
21651 | int dimension_number; |
21652 | |
21653 | if (lang_hooks.types.get_array_descr_info) |
21654 | { |
21655 | memset (s: &info, c: 0, n: sizeof (info)); |
21656 | if (lang_hooks.types.get_array_descr_info (type, &info)) |
21657 | /* Fortran sometimes emits array types with no dimension. */ |
21658 | gcc_assert (info.ndimensions >= 0 |
21659 | && info.ndimensions |
21660 | <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN); |
21661 | } |
21662 | else |
21663 | info.ndimensions = 0; |
21664 | |
21665 | for (dimension_number = 0; |
21666 | TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p); |
21667 | type = TREE_TYPE (type), dimension_number++) |
21668 | { |
21669 | tree domain = TYPE_DOMAIN (type); |
21670 | |
21671 | if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0) |
21672 | break; |
21673 | |
21674 | /* Arrays come in three flavors: Unspecified bounds, fixed bounds, |
21675 | and (in GNU C only) variable bounds. Handle all three forms |
21676 | here. */ |
21677 | |
21678 | /* Find and reuse a previously generated DW_TAG_subrange_type if |
21679 | available. |
21680 | |
21681 | For multi-dimensional arrays, as we iterate through the |
21682 | various dimensions in the enclosing for loop above, we also |
21683 | iterate through the DIE children and pick at each |
21684 | DW_TAG_subrange_type previously generated (if available). |
21685 | Each child DW_TAG_subrange_type DIE describes the range of |
21686 | the current dimension. At this point we should have as many |
21687 | DW_TAG_subrange_type's as we have dimensions in the |
21688 | array. */ |
21689 | dw_die_ref subrange_die = NULL; |
21690 | if (child) |
21691 | while (1) |
21692 | { |
21693 | child = child->die_sib; |
21694 | if (child->die_tag == DW_TAG_subrange_type) |
21695 | subrange_die = child; |
21696 | if (child == type_die->die_child) |
21697 | { |
21698 | /* If we wrapped around, stop looking next time. */ |
21699 | child = NULL; |
21700 | break; |
21701 | } |
21702 | if (child->die_tag == DW_TAG_subrange_type) |
21703 | break; |
21704 | } |
21705 | if (!subrange_die) |
21706 | subrange_die = new_die (tag_value: DW_TAG_subrange_type, parent_die: type_die, NULL); |
21707 | |
21708 | if (domain) |
21709 | { |
21710 | /* We have an array type with specified bounds. */ |
21711 | tree lower = TYPE_MIN_VALUE (domain); |
21712 | tree upper = TYPE_MAX_VALUE (domain); |
21713 | tree index_type = TREE_TYPE (domain); |
21714 | |
21715 | if (dimension_number <= info.ndimensions - 1) |
21716 | { |
21717 | lower = info.dimen[dimension_number].lower_bound; |
21718 | upper = info.dimen[dimension_number].upper_bound; |
21719 | index_type = info.dimen[dimension_number].bounds_type; |
21720 | } |
21721 | |
21722 | /* Define the index type. */ |
21723 | if (index_type && !get_AT (die: subrange_die, attr_kind: DW_AT_type)) |
21724 | add_type_attribute (subrange_die, index_type, TYPE_UNQUALIFIED, |
21725 | false, type_die); |
21726 | |
21727 | /* ??? If upper is NULL, the array has unspecified length, |
21728 | but it does have a lower bound. This happens with Fortran |
21729 | dimension arr(N:*) |
21730 | Since the debugger is definitely going to need to know N |
21731 | to produce useful results, go ahead and output the lower |
21732 | bound solo, and hope the debugger can cope. */ |
21733 | |
21734 | if (lower && !get_AT (die: subrange_die, attr_kind: DW_AT_lower_bound)) |
21735 | add_bound_info (subrange_die, bound_attr: DW_AT_lower_bound, bound: lower, NULL); |
21736 | |
21737 | if (!get_AT (die: subrange_die, attr_kind: DW_AT_upper_bound) |
21738 | && !get_AT (die: subrange_die, attr_kind: DW_AT_count)) |
21739 | { |
21740 | if (upper) |
21741 | add_bound_info (subrange_die, bound_attr: DW_AT_upper_bound, bound: upper, NULL); |
21742 | else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type)) |
21743 | /* Zero-length array. */ |
21744 | add_bound_info (subrange_die, bound_attr: DW_AT_count, |
21745 | bound: build_int_cst (TREE_TYPE (lower), 0), NULL); |
21746 | } |
21747 | } |
21748 | |
21749 | /* Otherwise we have an array type with an unspecified length. The |
21750 | DWARF-2 spec does not say how to handle this; let's just leave out the |
21751 | bounds. */ |
21752 | } |
21753 | } |
21754 | |
21755 | /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */ |
21756 | |
21757 | static void |
21758 | add_byte_size_attribute (dw_die_ref die, tree tree_node) |
21759 | { |
21760 | dw_die_ref decl_die; |
21761 | HOST_WIDE_INT size; |
21762 | |
21763 | switch (TREE_CODE (tree_node)) |
21764 | { |
21765 | case ERROR_MARK: |
21766 | size = 0; |
21767 | break; |
21768 | case ENUMERAL_TYPE: |
21769 | case RECORD_TYPE: |
21770 | case UNION_TYPE: |
21771 | case QUAL_UNION_TYPE: |
21772 | if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL |
21773 | && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node)))) |
21774 | { |
21775 | add_AT_die_ref (die, attr_kind: DW_AT_byte_size, targ_die: decl_die); |
21776 | return; |
21777 | } |
21778 | size = int_size_in_bytes (tree_node); |
21779 | break; |
21780 | case FIELD_DECL: |
21781 | /* For a data member of a struct or union, the DW_AT_byte_size is |
21782 | generally given as the number of bytes normally allocated for an |
21783 | object of the *declared* type of the member itself. This is true |
21784 | even for bit-fields. */ |
21785 | size = int_size_in_bytes (field_type (decl: tree_node)); |
21786 | break; |
21787 | default: |
21788 | gcc_unreachable (); |
21789 | } |
21790 | |
21791 | /* Note that `size' might be -1 when we get to this point. If it is, that |
21792 | indicates that the byte size of the entity in question is variable. */ |
21793 | if (size >= 0) |
21794 | add_AT_unsigned (die, attr_kind: DW_AT_byte_size, unsigned_val: size); |
21795 | |
21796 | /* Support for dynamically-sized objects was introduced in DWARF3. */ |
21797 | else if (TYPE_P (tree_node) |
21798 | && (dwarf_version >= 3 || !dwarf_strict) |
21799 | && gnat_encodings != DWARF_GNAT_ENCODINGS_ALL) |
21800 | { |
21801 | struct loc_descr_context ctx = { |
21802 | .context_type: const_cast<tree> (tree_node), /* context_type */ |
21803 | NULL_TREE, /* base_decl */ |
21804 | NULL, /* dpi */ |
21805 | .placeholder_arg: false, /* placeholder_arg */ |
21806 | .placeholder_seen: false, /* placeholder_seen */ |
21807 | .strict_signedness: false /* strict_signedness */ |
21808 | }; |
21809 | |
21810 | tree tree_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (tree_node)); |
21811 | add_scalar_info (die, attr: DW_AT_byte_size, value: tree_size, |
21812 | forms: dw_scalar_form_constant |
21813 | | dw_scalar_form_exprloc |
21814 | | dw_scalar_form_reference, |
21815 | context: &ctx); |
21816 | } |
21817 | } |
21818 | |
21819 | /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default |
21820 | alignment. */ |
21821 | |
21822 | static void |
21823 | add_alignment_attribute (dw_die_ref die, tree tree_node) |
21824 | { |
21825 | if (dwarf_version < 5 && dwarf_strict) |
21826 | return; |
21827 | |
21828 | unsigned align; |
21829 | |
21830 | if (DECL_P (tree_node)) |
21831 | { |
21832 | if (!DECL_USER_ALIGN (tree_node)) |
21833 | return; |
21834 | |
21835 | align = DECL_ALIGN_UNIT (tree_node); |
21836 | } |
21837 | else if (TYPE_P (tree_node)) |
21838 | { |
21839 | if (!TYPE_USER_ALIGN (tree_node)) |
21840 | return; |
21841 | |
21842 | align = TYPE_ALIGN_UNIT (tree_node); |
21843 | } |
21844 | else |
21845 | gcc_unreachable (); |
21846 | |
21847 | add_AT_unsigned (die, attr_kind: DW_AT_alignment, unsigned_val: align); |
21848 | } |
21849 | |
21850 | /* For a FIELD_DECL node which represents a bit-field, output an attribute |
21851 | which specifies the distance in bits from the highest order bit of the |
21852 | "containing object" for the bit-field to the highest order bit of the |
21853 | bit-field itself. |
21854 | |
21855 | For any given bit-field, the "containing object" is a hypothetical object |
21856 | (of some integral or enum type) within which the given bit-field lives. The |
21857 | type of this hypothetical "containing object" is always the same as the |
21858 | declared type of the individual bit-field itself. The determination of the |
21859 | exact location of the "containing object" for a bit-field is rather |
21860 | complicated. It's handled by the `field_byte_offset' function (above). |
21861 | |
21862 | Note that it is the size (in bytes) of the hypothetical "containing object" |
21863 | which will be given in the DW_AT_byte_size attribute for this bit-field. |
21864 | (See `byte_size_attribute' above). */ |
21865 | |
21866 | static inline void |
21867 | add_bit_offset_attribute (dw_die_ref die, tree decl) |
21868 | { |
21869 | HOST_WIDE_INT object_offset_in_bytes; |
21870 | tree original_type = DECL_BIT_FIELD_TYPE (decl); |
21871 | HOST_WIDE_INT bitpos_int; |
21872 | HOST_WIDE_INT highest_order_object_bit_offset; |
21873 | HOST_WIDE_INT highest_order_field_bit_offset; |
21874 | HOST_WIDE_INT bit_offset; |
21875 | |
21876 | /* The containing object is within the DECL_CONTEXT. */ |
21877 | struct vlr_context ctx = { DECL_CONTEXT (decl), NULL_TREE }; |
21878 | |
21879 | field_byte_offset (decl, ctx: &ctx, cst_offset: &object_offset_in_bytes); |
21880 | |
21881 | /* Must be a field and a bit field. */ |
21882 | gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL); |
21883 | |
21884 | /* We can't yet handle bit-fields whose offsets are variable, so if we |
21885 | encounter such things, just return without generating any attribute |
21886 | whatsoever. Likewise for variable or too large size. */ |
21887 | if (! tree_fits_shwi_p (bit_position (decl)) |
21888 | || ! tree_fits_uhwi_p (DECL_SIZE (decl))) |
21889 | return; |
21890 | |
21891 | bitpos_int = int_bit_position (field: decl); |
21892 | |
21893 | /* Note that the bit offset is always the distance (in bits) from the |
21894 | highest-order bit of the "containing object" to the highest-order bit of |
21895 | the bit-field itself. Since the "high-order end" of any object or field |
21896 | is different on big-endian and little-endian machines, the computation |
21897 | below must take account of these differences. */ |
21898 | highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT; |
21899 | highest_order_field_bit_offset = bitpos_int; |
21900 | |
21901 | if (! BYTES_BIG_ENDIAN) |
21902 | { |
21903 | highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl)); |
21904 | highest_order_object_bit_offset += |
21905 | simple_type_size_in_bits (type: original_type); |
21906 | } |
21907 | |
21908 | bit_offset |
21909 | = (! BYTES_BIG_ENDIAN |
21910 | ? highest_order_object_bit_offset - highest_order_field_bit_offset |
21911 | : highest_order_field_bit_offset - highest_order_object_bit_offset); |
21912 | |
21913 | if (bit_offset < 0) |
21914 | add_AT_int (die, attr_kind: DW_AT_bit_offset, int_val: bit_offset); |
21915 | else |
21916 | add_AT_unsigned (die, attr_kind: DW_AT_bit_offset, unsigned_val: (unsigned HOST_WIDE_INT) bit_offset); |
21917 | } |
21918 | |
21919 | /* For a FIELD_DECL node which represents a bit field, output an attribute |
21920 | which specifies the length in bits of the given field. */ |
21921 | |
21922 | static inline void |
21923 | add_bit_size_attribute (dw_die_ref die, tree decl) |
21924 | { |
21925 | /* Must be a field and a bit field. */ |
21926 | gcc_assert (TREE_CODE (decl) == FIELD_DECL |
21927 | && DECL_BIT_FIELD_TYPE (decl)); |
21928 | |
21929 | if (tree_fits_uhwi_p (DECL_SIZE (decl))) |
21930 | add_AT_unsigned (die, attr_kind: DW_AT_bit_size, unsigned_val: tree_to_uhwi (DECL_SIZE (decl))); |
21931 | } |
21932 | |
21933 | /* If the compiled language is ANSI C, then add a 'prototyped' |
21934 | attribute, if arg types are given for the parameters of a function. */ |
21935 | |
21936 | static inline void |
21937 | add_prototyped_attribute (dw_die_ref die, tree func_type) |
21938 | { |
21939 | switch (get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language)) |
21940 | { |
21941 | case DW_LANG_C: |
21942 | case DW_LANG_C89: |
21943 | case DW_LANG_C99: |
21944 | case DW_LANG_C11: |
21945 | case DW_LANG_ObjC: |
21946 | if (prototype_p (func_type)) |
21947 | add_AT_flag (die, attr_kind: DW_AT_prototyped, flag: 1); |
21948 | break; |
21949 | default: |
21950 | break; |
21951 | } |
21952 | } |
21953 | |
21954 | /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found |
21955 | by looking in the type declaration, the object declaration equate table or |
21956 | the block mapping. */ |
21957 | |
21958 | static inline void |
21959 | add_abstract_origin_attribute (dw_die_ref die, tree origin) |
21960 | { |
21961 | dw_die_ref origin_die = NULL; |
21962 | |
21963 | /* For late LTO debug output we want to refer directly to the abstract |
21964 | DIE in the early debug rather to the possibly existing concrete |
21965 | instance and avoid creating that just for this purpose. */ |
21966 | sym_off_pair *desc; |
21967 | if (in_lto_p |
21968 | && external_die_map |
21969 | && (desc = external_die_map->get (k: origin))) |
21970 | { |
21971 | add_AT_external_die_ref (die, attr_kind: DW_AT_abstract_origin, |
21972 | symbol: desc->sym, offset: desc->off); |
21973 | return; |
21974 | } |
21975 | |
21976 | if (DECL_P (origin)) |
21977 | origin_die = lookup_decl_die (decl: origin); |
21978 | else if (TYPE_P (origin)) |
21979 | origin_die = lookup_type_die (type: origin); |
21980 | else if (TREE_CODE (origin) == BLOCK) |
21981 | origin_die = lookup_block_die (block: origin); |
21982 | |
21983 | /* XXX: Functions that are never lowered don't always have correct block |
21984 | trees (in the case of java, they simply have no block tree, in some other |
21985 | languages). For these functions, there is nothing we can really do to |
21986 | output correct debug info for inlined functions in all cases. Rather |
21987 | than die, we'll just produce deficient debug info now, in that we will |
21988 | have variables without a proper abstract origin. In the future, when all |
21989 | functions are lowered, we should re-add a gcc_assert (origin_die) |
21990 | here. */ |
21991 | |
21992 | if (origin_die) |
21993 | { |
21994 | dw_attr_node *a; |
21995 | /* Like above, if we already created a concrete instance DIE |
21996 | do not use that for the abstract origin but the early DIE |
21997 | if present. */ |
21998 | if (in_lto_p |
21999 | && (a = get_AT (die: origin_die, attr_kind: DW_AT_abstract_origin))) |
22000 | origin_die = AT_ref (a); |
22001 | add_AT_die_ref (die, attr_kind: DW_AT_abstract_origin, targ_die: origin_die); |
22002 | } |
22003 | } |
22004 | |
22005 | /* We do not currently support the pure_virtual attribute. */ |
22006 | |
22007 | static inline void |
22008 | add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl) |
22009 | { |
22010 | if (DECL_VINDEX (func_decl)) |
22011 | { |
22012 | add_AT_unsigned (die, attr_kind: DW_AT_virtuality, unsigned_val: DW_VIRTUALITY_virtual); |
22013 | |
22014 | if (tree_fits_shwi_p (DECL_VINDEX (func_decl))) |
22015 | add_AT_loc (die, attr_kind: DW_AT_vtable_elem_location, |
22016 | loc: new_loc_descr (op: DW_OP_constu, |
22017 | oprnd1: tree_to_shwi (DECL_VINDEX (func_decl)), |
22018 | oprnd2: 0)); |
22019 | |
22020 | /* GNU extension: Record what type this method came from originally. */ |
22021 | if (debug_info_level > DINFO_LEVEL_TERSE |
22022 | && DECL_CONTEXT (func_decl)) |
22023 | add_AT_die_ref (die, attr_kind: DW_AT_containing_type, |
22024 | targ_die: lookup_type_die (DECL_CONTEXT (func_decl))); |
22025 | } |
22026 | } |
22027 | |
22028 | /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the |
22029 | given decl. This used to be a vendor extension until after DWARF 4 |
22030 | standardized it. */ |
22031 | |
22032 | static void |
22033 | add_linkage_attr (dw_die_ref die, tree decl) |
22034 | { |
22035 | const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); |
22036 | |
22037 | /* Mimic what assemble_name_raw does with a leading '*'. */ |
22038 | if (name[0] == '*') |
22039 | name = &name[1]; |
22040 | |
22041 | if (dwarf_version >= 4) |
22042 | add_AT_string (die, attr_kind: DW_AT_linkage_name, str: name); |
22043 | else |
22044 | add_AT_string (die, attr_kind: DW_AT_MIPS_linkage_name, str: name); |
22045 | } |
22046 | |
22047 | /* Add source coordinate attributes for the given decl. */ |
22048 | |
22049 | static void |
22050 | add_src_coords_attributes (dw_die_ref die, tree decl) |
22051 | { |
22052 | expanded_location s; |
22053 | |
22054 | if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION) |
22055 | return; |
22056 | s = expand_location (DECL_SOURCE_LOCATION (decl)); |
22057 | add_AT_file (die, attr_kind: DW_AT_decl_file, fd: lookup_filename (s.file)); |
22058 | add_AT_unsigned (die, attr_kind: DW_AT_decl_line, unsigned_val: s.line); |
22059 | if (debug_column_info && s.column) |
22060 | add_AT_unsigned (die, attr_kind: DW_AT_decl_column, unsigned_val: s.column); |
22061 | } |
22062 | |
22063 | /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */ |
22064 | |
22065 | static void |
22066 | add_linkage_name_raw (dw_die_ref die, tree decl) |
22067 | { |
22068 | /* Defer until we have an assembler name set. */ |
22069 | if (!DECL_ASSEMBLER_NAME_SET_P (decl)) |
22070 | { |
22071 | limbo_die_node *asm_name; |
22072 | |
22073 | asm_name = ggc_cleared_alloc<limbo_die_node> (); |
22074 | asm_name->die = die; |
22075 | asm_name->created_for = decl; |
22076 | asm_name->next = deferred_asm_name; |
22077 | deferred_asm_name = asm_name; |
22078 | } |
22079 | else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)) |
22080 | add_linkage_attr (die, decl); |
22081 | } |
22082 | |
22083 | /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */ |
22084 | |
22085 | static void |
22086 | add_linkage_name (dw_die_ref die, tree decl) |
22087 | { |
22088 | if (debug_info_level > DINFO_LEVEL_NONE |
22089 | && VAR_OR_FUNCTION_DECL_P (decl) |
22090 | && TREE_PUBLIC (decl) |
22091 | && !(VAR_P (decl) && DECL_REGISTER (decl)) |
22092 | && die->die_tag != DW_TAG_member) |
22093 | add_linkage_name_raw (die, decl); |
22094 | } |
22095 | |
22096 | /* Add a DW_AT_name attribute and source coordinate attribute for the |
22097 | given decl, but only if it actually has a name. */ |
22098 | |
22099 | static void |
22100 | add_name_and_src_coords_attributes (dw_die_ref die, tree decl, |
22101 | bool no_linkage_name) |
22102 | { |
22103 | tree decl_name; |
22104 | |
22105 | decl_name = DECL_NAME (decl); |
22106 | if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL) |
22107 | { |
22108 | const char *name = dwarf2_name (decl, scope: 0); |
22109 | if (name) |
22110 | add_name_attribute (die, name_string: name); |
22111 | else |
22112 | add_desc_attribute (die, decl); |
22113 | |
22114 | if (! DECL_ARTIFICIAL (decl)) |
22115 | add_src_coords_attributes (die, decl); |
22116 | |
22117 | if (!no_linkage_name) |
22118 | add_linkage_name (die, decl); |
22119 | } |
22120 | else |
22121 | add_desc_attribute (die, decl); |
22122 | |
22123 | #ifdef VMS_DEBUGGING_INFO |
22124 | /* Get the function's name, as described by its RTL. This may be different |
22125 | from the DECL_NAME name used in the source file. */ |
22126 | if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl)) |
22127 | { |
22128 | add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address, |
22129 | XEXP (DECL_RTL (decl), 0), false); |
22130 | vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0)); |
22131 | } |
22132 | #endif /* VMS_DEBUGGING_INFO */ |
22133 | } |
22134 | |
22135 | /* Add VALUE as a DW_AT_discr_value attribute to DIE. */ |
22136 | |
22137 | static void |
22138 | add_discr_value (dw_die_ref die, dw_discr_value *value) |
22139 | { |
22140 | dw_attr_node attr; |
22141 | |
22142 | attr.dw_attr = DW_AT_discr_value; |
22143 | attr.dw_attr_val.val_class = dw_val_class_discr_value; |
22144 | attr.dw_attr_val.val_entry = NULL; |
22145 | attr.dw_attr_val.v.val_discr_value.pos = value->pos; |
22146 | if (value->pos) |
22147 | attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval; |
22148 | else |
22149 | attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval; |
22150 | add_dwarf_attr (die, attr: &attr); |
22151 | } |
22152 | |
22153 | /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */ |
22154 | |
22155 | static void |
22156 | add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list) |
22157 | { |
22158 | dw_attr_node attr; |
22159 | |
22160 | attr.dw_attr = DW_AT_discr_list; |
22161 | attr.dw_attr_val.val_class = dw_val_class_discr_list; |
22162 | attr.dw_attr_val.val_entry = NULL; |
22163 | attr.dw_attr_val.v.val_discr_list = discr_list; |
22164 | add_dwarf_attr (die, attr: &attr); |
22165 | } |
22166 | |
22167 | static inline dw_discr_list_ref |
22168 | AT_discr_list (dw_attr_node *attr) |
22169 | { |
22170 | return attr->dw_attr_val.v.val_discr_list; |
22171 | } |
22172 | |
22173 | #ifdef VMS_DEBUGGING_INFO |
22174 | /* Output the debug main pointer die for VMS */ |
22175 | |
22176 | void |
22177 | dwarf2out_vms_debug_main_pointer (void) |
22178 | { |
22179 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
22180 | dw_die_ref die; |
22181 | |
22182 | /* Allocate the VMS debug main subprogram die. */ |
22183 | die = new_die_raw (DW_TAG_subprogram); |
22184 | add_name_attribute (die, VMS_DEBUG_MAIN_POINTER); |
22185 | ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL, |
22186 | current_function_funcdef_no); |
22187 | add_AT_lbl_id (die, DW_AT_entry_pc, label); |
22188 | |
22189 | /* Make it the first child of comp_unit_die (). */ |
22190 | die->die_parent = comp_unit_die (); |
22191 | if (comp_unit_die ()->die_child) |
22192 | { |
22193 | die->die_sib = comp_unit_die ()->die_child->die_sib; |
22194 | comp_unit_die ()->die_child->die_sib = die; |
22195 | } |
22196 | else |
22197 | { |
22198 | die->die_sib = die; |
22199 | comp_unit_die ()->die_child = die; |
22200 | } |
22201 | } |
22202 | #endif /* VMS_DEBUGGING_INFO */ |
22203 | |
22204 | /* walk_tree helper function for uses_local_type, below. */ |
22205 | |
22206 | static tree |
22207 | uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
22208 | { |
22209 | if (!TYPE_P (*tp)) |
22210 | *walk_subtrees = 0; |
22211 | else |
22212 | { |
22213 | tree name = TYPE_NAME (*tp); |
22214 | if (name && DECL_P (name) && decl_function_context (name)) |
22215 | return *tp; |
22216 | } |
22217 | return NULL_TREE; |
22218 | } |
22219 | |
22220 | /* If TYPE involves a function-local type (including a local typedef to a |
22221 | non-local type), returns that type; otherwise returns NULL_TREE. */ |
22222 | |
22223 | static tree |
22224 | uses_local_type (tree type) |
22225 | { |
22226 | tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL); |
22227 | return used; |
22228 | } |
22229 | |
22230 | /* Return the DIE for the scope that immediately contains this type. |
22231 | Non-named types that do not involve a function-local type get global |
22232 | scope. Named types nested in namespaces or other types get their |
22233 | containing scope. All other types (i.e. function-local named types) get |
22234 | the current active scope. */ |
22235 | |
22236 | static dw_die_ref |
22237 | scope_die_for (tree t, dw_die_ref context_die) |
22238 | { |
22239 | dw_die_ref scope_die = NULL; |
22240 | tree containing_scope; |
22241 | |
22242 | /* Non-types always go in the current scope. */ |
22243 | gcc_assert (TYPE_P (t)); |
22244 | |
22245 | /* Use the scope of the typedef, rather than the scope of the type |
22246 | it refers to. */ |
22247 | if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t))) |
22248 | containing_scope = DECL_CONTEXT (TYPE_NAME (t)); |
22249 | else |
22250 | containing_scope = TYPE_CONTEXT (t); |
22251 | |
22252 | /* Use the containing namespace if there is one. */ |
22253 | if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL) |
22254 | { |
22255 | if (context_die == lookup_decl_die (decl: containing_scope)) |
22256 | /* OK */; |
22257 | else if (debug_info_level > DINFO_LEVEL_TERSE) |
22258 | context_die = get_context_die (containing_scope); |
22259 | else |
22260 | containing_scope = NULL_TREE; |
22261 | } |
22262 | |
22263 | /* Ignore function type "scopes" from the C frontend. They mean that |
22264 | a tagged type is local to a parmlist of a function declarator, but |
22265 | that isn't useful to DWARF. */ |
22266 | if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE) |
22267 | containing_scope = NULL_TREE; |
22268 | |
22269 | if (SCOPE_FILE_SCOPE_P (containing_scope)) |
22270 | { |
22271 | /* If T uses a local type keep it local as well, to avoid references |
22272 | to function-local DIEs from outside the function. */ |
22273 | if (current_function_decl && uses_local_type (type: t)) |
22274 | scope_die = context_die; |
22275 | else |
22276 | scope_die = comp_unit_die (); |
22277 | } |
22278 | else if (TYPE_P (containing_scope)) |
22279 | { |
22280 | /* For types, we can just look up the appropriate DIE. */ |
22281 | if (debug_info_level > DINFO_LEVEL_TERSE) |
22282 | scope_die = get_context_die (containing_scope); |
22283 | else |
22284 | { |
22285 | scope_die = lookup_type_die_strip_naming_typedef (type: containing_scope); |
22286 | if (scope_die == NULL) |
22287 | scope_die = comp_unit_die (); |
22288 | } |
22289 | } |
22290 | else |
22291 | scope_die = context_die; |
22292 | |
22293 | return scope_die; |
22294 | } |
22295 | |
22296 | /* Returns true if CONTEXT_DIE is internal to a function. */ |
22297 | |
22298 | static inline bool |
22299 | local_scope_p (dw_die_ref context_die) |
22300 | { |
22301 | for (; context_die; context_die = context_die->die_parent) |
22302 | if (context_die->die_tag == DW_TAG_inlined_subroutine |
22303 | || context_die->die_tag == DW_TAG_subprogram) |
22304 | return true; |
22305 | |
22306 | return false; |
22307 | } |
22308 | |
22309 | /* Returns true if CONTEXT_DIE is a class. */ |
22310 | |
22311 | static inline bool |
22312 | class_scope_p (dw_die_ref context_die) |
22313 | { |
22314 | return (context_die |
22315 | && (context_die->die_tag == DW_TAG_structure_type |
22316 | || context_die->die_tag == DW_TAG_class_type |
22317 | || context_die->die_tag == DW_TAG_interface_type |
22318 | || context_die->die_tag == DW_TAG_union_type)); |
22319 | } |
22320 | |
22321 | /* Returns true if CONTEXT_DIE is a class or namespace, for deciding |
22322 | whether or not to treat a DIE in this context as a declaration. */ |
22323 | |
22324 | static inline bool |
22325 | class_or_namespace_scope_p (dw_die_ref context_die) |
22326 | { |
22327 | return (class_scope_p (context_die) |
22328 | || (context_die && context_die->die_tag == DW_TAG_namespace)); |
22329 | } |
22330 | |
22331 | /* Many forms of DIEs require a "type description" attribute. This |
22332 | routine locates the proper "type descriptor" die for the type given |
22333 | by 'type' plus any additional qualifiers given by 'cv_quals', and |
22334 | adds a DW_AT_type attribute below the given die. */ |
22335 | |
22336 | static void |
22337 | add_type_attribute (dw_die_ref object_die, tree type, int cv_quals, |
22338 | bool reverse, dw_die_ref context_die) |
22339 | { |
22340 | enum tree_code code = TREE_CODE (type); |
22341 | dw_die_ref type_die = NULL; |
22342 | |
22343 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
22344 | return; |
22345 | |
22346 | /* ??? If this type is an unnamed subrange type of an integral, floating-point |
22347 | or fixed-point type, use the inner type. This is because we have no |
22348 | support for unnamed types in base_type_die. This can happen if this is |
22349 | an Ada subrange type. Correct solution is emit a subrange type die. */ |
22350 | if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE) |
22351 | && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0) |
22352 | type = TREE_TYPE (type), code = TREE_CODE (type); |
22353 | |
22354 | if (code == ERROR_MARK |
22355 | /* Handle a special case. For functions whose return type is void, we |
22356 | generate *no* type attribute. (Note that no object may have type |
22357 | `void', so this only applies to function return types). */ |
22358 | || code == VOID_TYPE) |
22359 | return; |
22360 | |
22361 | type_die = modified_type_die (type, |
22362 | cv_quals: cv_quals | TYPE_QUALS (type), |
22363 | reverse, |
22364 | context_die); |
22365 | |
22366 | if (type_die != NULL) |
22367 | add_AT_die_ref (die: object_die, attr_kind: DW_AT_type, targ_die: type_die); |
22368 | } |
22369 | |
22370 | /* Given an object die, add the calling convention attribute for the |
22371 | function call type. */ |
22372 | static void |
22373 | add_calling_convention_attribute (dw_die_ref subr_die, tree decl) |
22374 | { |
22375 | enum dwarf_calling_convention value = DW_CC_normal; |
22376 | |
22377 | value = ((enum dwarf_calling_convention) |
22378 | targetm.dwarf_calling_convention (TREE_TYPE (decl))); |
22379 | |
22380 | if (is_fortran () |
22381 | && id_equal (DECL_ASSEMBLER_NAME (decl), str: "MAIN__" )) |
22382 | { |
22383 | /* DWARF 2 doesn't provide a way to identify a program's source-level |
22384 | entry point. DW_AT_calling_convention attributes are only meant |
22385 | to describe functions' calling conventions. However, lacking a |
22386 | better way to signal the Fortran main program, we used this for |
22387 | a long time, following existing custom. Now, DWARF 4 has |
22388 | DW_AT_main_subprogram, which we add below, but some tools still |
22389 | rely on the old way, which we thus keep. */ |
22390 | value = DW_CC_program; |
22391 | |
22392 | if (dwarf_version >= 4 || !dwarf_strict) |
22393 | add_AT_flag (die: subr_die, attr_kind: DW_AT_main_subprogram, flag: 1); |
22394 | } |
22395 | |
22396 | /* Only add the attribute if the backend requests it, and |
22397 | is not DW_CC_normal. */ |
22398 | if (value && (value != DW_CC_normal)) |
22399 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_calling_convention, unsigned_val: value); |
22400 | } |
22401 | |
22402 | /* Given a tree pointer to a struct, class, union, or enum type node, return |
22403 | a pointer to the (string) tag name for the given type, or zero if the type |
22404 | was declared without a tag. */ |
22405 | |
22406 | static const char * |
22407 | type_tag (const_tree type) |
22408 | { |
22409 | const char *name = 0; |
22410 | |
22411 | if (TYPE_NAME (type) != 0) |
22412 | { |
22413 | tree t = 0; |
22414 | |
22415 | /* Find the IDENTIFIER_NODE for the type name. */ |
22416 | if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE |
22417 | && !TYPE_NAMELESS (type)) |
22418 | t = TYPE_NAME (type); |
22419 | |
22420 | /* The g++ front end makes the TYPE_NAME of *each* tagged type point to |
22421 | a TYPE_DECL node, regardless of whether or not a `typedef' was |
22422 | involved. */ |
22423 | else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL |
22424 | && ! DECL_IGNORED_P (TYPE_NAME (type))) |
22425 | { |
22426 | /* We want to be extra verbose. Don't call dwarf_name if |
22427 | DECL_NAME isn't set. The default hook for decl_printable_name |
22428 | doesn't like that, and in this context it's correct to return |
22429 | 0, instead of "<anonymous>" or the like. */ |
22430 | if (DECL_NAME (TYPE_NAME (type)) |
22431 | && !DECL_NAMELESS (TYPE_NAME (type))) |
22432 | name = lang_hooks.dwarf_name (TYPE_NAME (type), 2); |
22433 | } |
22434 | |
22435 | /* Now get the name as a string, or invent one. */ |
22436 | if (!name && t != 0) |
22437 | name = IDENTIFIER_POINTER (t); |
22438 | } |
22439 | |
22440 | return (name == 0 || *name == '\0') ? 0 : name; |
22441 | } |
22442 | |
22443 | /* Return the type associated with a data member, make a special check |
22444 | for bit field types. */ |
22445 | |
22446 | static inline tree |
22447 | member_declared_type (const_tree member) |
22448 | { |
22449 | return (DECL_BIT_FIELD_TYPE (member) |
22450 | ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member)); |
22451 | } |
22452 | |
22453 | /* Get the decl's label, as described by its RTL. This may be different |
22454 | from the DECL_NAME name used in the source file. */ |
22455 | |
22456 | #if 0 |
22457 | static const char * |
22458 | decl_start_label (tree decl) |
22459 | { |
22460 | rtx x; |
22461 | const char *fnname; |
22462 | |
22463 | x = DECL_RTL (decl); |
22464 | gcc_assert (MEM_P (x)); |
22465 | |
22466 | x = XEXP (x, 0); |
22467 | gcc_assert (GET_CODE (x) == SYMBOL_REF); |
22468 | |
22469 | fnname = XSTR (x, 0); |
22470 | return fnname; |
22471 | } |
22472 | #endif |
22473 | |
22474 | /* For variable-length arrays that have been previously generated, but |
22475 | may be incomplete due to missing subscript info, fill the subscript |
22476 | info. Return TRUE if this is one of those cases. */ |
22477 | |
22478 | static bool |
22479 | fill_variable_array_bounds (tree type) |
22480 | { |
22481 | if (TREE_ASM_WRITTEN (type) |
22482 | && TREE_CODE (type) == ARRAY_TYPE |
22483 | && variably_modified_type_p (type, NULL)) |
22484 | { |
22485 | dw_die_ref array_die = lookup_type_die (type); |
22486 | if (!array_die) |
22487 | return false; |
22488 | add_subscript_info (type_die: array_die, type, collapse_p: !is_ada ()); |
22489 | return true; |
22490 | } |
22491 | return false; |
22492 | } |
22493 | |
22494 | /* These routines generate the internal representation of the DIE's for |
22495 | the compilation unit. Debugging information is collected by walking |
22496 | the declaration trees passed in from dwarf2out_decl(). */ |
22497 | |
22498 | static void |
22499 | gen_array_type_die (tree type, dw_die_ref context_die) |
22500 | { |
22501 | dw_die_ref array_die; |
22502 | |
22503 | /* GNU compilers represent multidimensional array types as sequences of one |
22504 | dimensional array types whose element types are themselves array types. |
22505 | We sometimes squish that down to a single array_type DIE with multiple |
22506 | subscripts in the Dwarf debugging info. The draft Dwarf specification |
22507 | say that we are allowed to do this kind of compression in C, because |
22508 | there is no difference between an array of arrays and a multidimensional |
22509 | array. We don't do this for Ada to remain as close as possible to the |
22510 | actual representation, which is especially important against the language |
22511 | flexibilty wrt arrays of variable size. */ |
22512 | |
22513 | bool collapse_nested_arrays = !is_ada (); |
22514 | |
22515 | if (fill_variable_array_bounds (type)) |
22516 | return; |
22517 | |
22518 | dw_die_ref scope_die = scope_die_for (t: type, context_die); |
22519 | tree element_type; |
22520 | |
22521 | /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as |
22522 | DW_TAG_string_type doesn't have DW_AT_type attribute). */ |
22523 | if (TREE_CODE (type) == ARRAY_TYPE |
22524 | && TYPE_STRING_FLAG (type) |
22525 | && is_fortran () |
22526 | && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node)) |
22527 | { |
22528 | HOST_WIDE_INT size; |
22529 | |
22530 | array_die = new_die (tag_value: DW_TAG_string_type, parent_die: scope_die, t: type); |
22531 | add_name_attribute (die: array_die, name_string: type_tag (type)); |
22532 | equate_type_number_to_die (type, type_die: array_die); |
22533 | size = int_size_in_bytes (type); |
22534 | if (size >= 0) |
22535 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_byte_size, unsigned_val: size); |
22536 | /* ??? We can't annotate types late, but for LTO we may not |
22537 | generate a location early either (gfortran.dg/save_6.f90). */ |
22538 | else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload)) |
22539 | && TYPE_DOMAIN (type) != NULL_TREE |
22540 | && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE) |
22541 | { |
22542 | tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); |
22543 | tree rszdecl = szdecl; |
22544 | |
22545 | size = int_size_in_bytes (TREE_TYPE (szdecl)); |
22546 | if (!DECL_P (szdecl)) |
22547 | { |
22548 | if (INDIRECT_REF_P (szdecl) |
22549 | && DECL_P (TREE_OPERAND (szdecl, 0))) |
22550 | { |
22551 | rszdecl = TREE_OPERAND (szdecl, 0); |
22552 | if (int_size_in_bytes (TREE_TYPE (rszdecl)) |
22553 | != DWARF2_ADDR_SIZE) |
22554 | size = 0; |
22555 | } |
22556 | else |
22557 | size = 0; |
22558 | } |
22559 | if (size > 0) |
22560 | { |
22561 | dw_loc_list_ref loc |
22562 | = loc_list_from_tree (loc: rszdecl, want_address: szdecl == rszdecl ? 2 : 0, |
22563 | NULL); |
22564 | if (loc) |
22565 | { |
22566 | add_AT_location_description (die: array_die, attr_kind: DW_AT_string_length, |
22567 | descr: loc); |
22568 | if (size != DWARF2_ADDR_SIZE) |
22569 | add_AT_unsigned (die: array_die, dwarf_version >= 5 |
22570 | ? DW_AT_string_length_byte_size |
22571 | : DW_AT_byte_size, unsigned_val: size); |
22572 | } |
22573 | } |
22574 | } |
22575 | return; |
22576 | } |
22577 | |
22578 | array_die = new_die (tag_value: DW_TAG_array_type, parent_die: scope_die, t: type); |
22579 | add_name_attribute (die: array_die, name_string: type_tag (type)); |
22580 | equate_type_number_to_die (type, type_die: array_die); |
22581 | |
22582 | if (VECTOR_TYPE_P (type)) |
22583 | add_AT_flag (die: array_die, attr_kind: DW_AT_GNU_vector, flag: 1); |
22584 | |
22585 | /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */ |
22586 | if (is_fortran () |
22587 | && TREE_CODE (type) == ARRAY_TYPE |
22588 | && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE |
22589 | && !TYPE_STRING_FLAG (TREE_TYPE (type))) |
22590 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_ordering, unsigned_val: DW_ORD_col_major); |
22591 | |
22592 | #if 0 |
22593 | /* We default the array ordering. Debuggers will probably do the right |
22594 | things even if DW_AT_ordering is not present. It's not even an issue |
22595 | until we start to get into multidimensional arrays anyway. If a debugger |
22596 | is ever caught doing the Wrong Thing for multi-dimensional arrays, |
22597 | then we'll have to put the DW_AT_ordering attribute back in. (But if |
22598 | and when we find out that we need to put these in, we will only do so |
22599 | for multidimensional arrays. */ |
22600 | add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major); |
22601 | #endif |
22602 | |
22603 | if (VECTOR_TYPE_P (type)) |
22604 | { |
22605 | /* For VECTOR_TYPEs we use an array DIE with appropriate bounds. */ |
22606 | dw_die_ref subrange_die = new_die (tag_value: DW_TAG_subrange_type, parent_die: array_die, NULL); |
22607 | int lb = lower_bound_default (); |
22608 | if (lb == -1) |
22609 | lb = 0; |
22610 | add_bound_info (subrange_die, bound_attr: DW_AT_lower_bound, size_int (lb), NULL); |
22611 | add_bound_info (subrange_die, bound_attr: DW_AT_upper_bound, |
22612 | size_int (lb + TYPE_VECTOR_SUBPARTS (type) - 1), NULL); |
22613 | } |
22614 | else |
22615 | add_subscript_info (type_die: array_die, type, collapse_p: collapse_nested_arrays); |
22616 | |
22617 | /* Add representation of the type of the elements of this array type and |
22618 | emit the corresponding DIE if we haven't done it already. */ |
22619 | element_type = TREE_TYPE (type); |
22620 | if (collapse_nested_arrays) |
22621 | while (TREE_CODE (element_type) == ARRAY_TYPE) |
22622 | { |
22623 | if (TYPE_STRING_FLAG (element_type) && is_fortran ()) |
22624 | break; |
22625 | element_type = TREE_TYPE (element_type); |
22626 | } |
22627 | |
22628 | add_type_attribute (object_die: array_die, type: element_type, cv_quals: TYPE_UNQUALIFIED, |
22629 | TREE_CODE (type) == ARRAY_TYPE |
22630 | && TYPE_REVERSE_STORAGE_ORDER (type), |
22631 | context_die); |
22632 | |
22633 | add_gnat_descriptive_type_attribute (die: array_die, type, context_die); |
22634 | if (TYPE_ARTIFICIAL (type)) |
22635 | add_AT_flag (die: array_die, attr_kind: DW_AT_artificial, flag: 1); |
22636 | |
22637 | if (get_AT (die: array_die, attr_kind: DW_AT_name)) |
22638 | add_pubtype (decl: type, die: array_die); |
22639 | |
22640 | add_alignment_attribute (die: array_die, tree_node: type); |
22641 | } |
22642 | |
22643 | /* This routine generates DIE for array with hidden descriptor, details |
22644 | are filled into *info by a langhook. */ |
22645 | |
22646 | static void |
22647 | gen_descr_array_type_die (tree type, struct array_descr_info *info, |
22648 | dw_die_ref context_die) |
22649 | { |
22650 | const dw_die_ref scope_die = scope_die_for (t: type, context_die); |
22651 | const dw_die_ref array_die = new_die (tag_value: DW_TAG_array_type, parent_die: scope_die, t: type); |
22652 | struct loc_descr_context context = { |
22653 | .context_type: type, /* context_type */ |
22654 | .base_decl: info->base_decl, /* base_decl */ |
22655 | NULL, /* dpi */ |
22656 | .placeholder_arg: false, /* placeholder_arg */ |
22657 | .placeholder_seen: false, /* placeholder_seen */ |
22658 | .strict_signedness: false /* strict_signedness */ |
22659 | }; |
22660 | enum dwarf_tag subrange_tag = DW_TAG_subrange_type; |
22661 | int dim; |
22662 | |
22663 | add_name_attribute (die: array_die, name_string: type_tag (type)); |
22664 | equate_type_number_to_die (type, type_die: array_die); |
22665 | |
22666 | if (info->ndimensions > 1) |
22667 | switch (info->ordering) |
22668 | { |
22669 | case array_descr_ordering_row_major: |
22670 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_ordering, unsigned_val: DW_ORD_row_major); |
22671 | break; |
22672 | case array_descr_ordering_column_major: |
22673 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_ordering, unsigned_val: DW_ORD_col_major); |
22674 | break; |
22675 | default: |
22676 | break; |
22677 | } |
22678 | |
22679 | if (dwarf_version >= 3 || !dwarf_strict) |
22680 | { |
22681 | if (info->data_location) |
22682 | add_scalar_info (die: array_die, attr: DW_AT_data_location, value: info->data_location, |
22683 | forms: dw_scalar_form_exprloc, context: &context); |
22684 | if (info->associated) |
22685 | add_scalar_info (die: array_die, attr: DW_AT_associated, value: info->associated, |
22686 | forms: dw_scalar_form_constant |
22687 | | dw_scalar_form_exprloc |
22688 | | dw_scalar_form_reference, context: &context); |
22689 | if (info->allocated) |
22690 | add_scalar_info (die: array_die, attr: DW_AT_allocated, value: info->allocated, |
22691 | forms: dw_scalar_form_constant |
22692 | | dw_scalar_form_exprloc |
22693 | | dw_scalar_form_reference, context: &context); |
22694 | if (info->stride) |
22695 | { |
22696 | const enum dwarf_attribute attr |
22697 | = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride; |
22698 | const int forms |
22699 | = (info->stride_in_bits) |
22700 | ? dw_scalar_form_constant |
22701 | : (dw_scalar_form_constant |
22702 | | dw_scalar_form_exprloc |
22703 | | dw_scalar_form_reference); |
22704 | |
22705 | add_scalar_info (die: array_die, attr, value: info->stride, forms, context: &context); |
22706 | } |
22707 | } |
22708 | if (dwarf_version >= 5) |
22709 | { |
22710 | if (info->rank) |
22711 | { |
22712 | add_scalar_info (die: array_die, attr: DW_AT_rank, value: info->rank, |
22713 | forms: dw_scalar_form_constant |
22714 | | dw_scalar_form_exprloc, context: &context); |
22715 | subrange_tag = DW_TAG_generic_subrange; |
22716 | context.placeholder_arg = true; |
22717 | } |
22718 | } |
22719 | |
22720 | add_gnat_descriptive_type_attribute (die: array_die, type, context_die); |
22721 | |
22722 | for (dim = 0; dim < info->ndimensions; dim++) |
22723 | { |
22724 | dw_die_ref subrange_die = new_die (tag_value: subrange_tag, parent_die: array_die, NULL); |
22725 | |
22726 | if (info->dimen[dim].bounds_type) |
22727 | add_type_attribute (object_die: subrange_die, |
22728 | type: info->dimen[dim].bounds_type, cv_quals: TYPE_UNQUALIFIED, |
22729 | reverse: false, context_die); |
22730 | if (info->dimen[dim].lower_bound) |
22731 | add_bound_info (subrange_die, bound_attr: DW_AT_lower_bound, |
22732 | bound: info->dimen[dim].lower_bound, context: &context); |
22733 | if (info->dimen[dim].upper_bound) |
22734 | add_bound_info (subrange_die, bound_attr: DW_AT_upper_bound, |
22735 | bound: info->dimen[dim].upper_bound, context: &context); |
22736 | if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride) |
22737 | add_scalar_info (die: subrange_die, attr: DW_AT_byte_stride, |
22738 | value: info->dimen[dim].stride, |
22739 | forms: dw_scalar_form_constant |
22740 | | dw_scalar_form_exprloc |
22741 | | dw_scalar_form_reference, |
22742 | context: &context); |
22743 | } |
22744 | |
22745 | gen_type_die (info->element_type, context_die); |
22746 | add_type_attribute (object_die: array_die, type: info->element_type, cv_quals: TYPE_UNQUALIFIED, |
22747 | TREE_CODE (type) == ARRAY_TYPE |
22748 | && TYPE_REVERSE_STORAGE_ORDER (type), |
22749 | context_die); |
22750 | |
22751 | if (get_AT (die: array_die, attr_kind: DW_AT_name)) |
22752 | add_pubtype (decl: type, die: array_die); |
22753 | |
22754 | add_alignment_attribute (die: array_die, tree_node: type); |
22755 | } |
22756 | |
22757 | #if 0 |
22758 | static void |
22759 | gen_entry_point_die (tree decl, dw_die_ref context_die) |
22760 | { |
22761 | tree origin = decl_ultimate_origin (decl); |
22762 | dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl); |
22763 | |
22764 | if (origin != NULL) |
22765 | add_abstract_origin_attribute (decl_die, origin); |
22766 | else |
22767 | { |
22768 | add_name_and_src_coords_attributes (decl_die, decl); |
22769 | add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)), |
22770 | TYPE_UNQUALIFIED, false, context_die); |
22771 | } |
22772 | |
22773 | if (DECL_ABSTRACT_P (decl)) |
22774 | equate_decl_number_to_die (decl, decl_die); |
22775 | else |
22776 | add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl)); |
22777 | } |
22778 | #endif |
22779 | |
22780 | /* Walk through the list of incomplete types again, trying once more to |
22781 | emit full debugging info for them. */ |
22782 | |
22783 | static void |
22784 | retry_incomplete_types (void) |
22785 | { |
22786 | set_early_dwarf s; |
22787 | int i; |
22788 | |
22789 | for (i = vec_safe_length (v: incomplete_types) - 1; i >= 0; i--) |
22790 | if (should_emit_struct_debug (type: (*incomplete_types)[i], usage: DINFO_USAGE_DIR_USE)) |
22791 | gen_type_die ((*incomplete_types)[i], comp_unit_die ()); |
22792 | vec_safe_truncate (v: incomplete_types, size: 0); |
22793 | } |
22794 | |
22795 | /* Determine what tag to use for a record type. */ |
22796 | |
22797 | static enum dwarf_tag |
22798 | record_type_tag (tree type) |
22799 | { |
22800 | if (! lang_hooks.types.classify_record) |
22801 | return DW_TAG_structure_type; |
22802 | |
22803 | switch (lang_hooks.types.classify_record (type)) |
22804 | { |
22805 | case RECORD_IS_STRUCT: |
22806 | return DW_TAG_structure_type; |
22807 | |
22808 | case RECORD_IS_CLASS: |
22809 | return DW_TAG_class_type; |
22810 | |
22811 | case RECORD_IS_INTERFACE: |
22812 | if (dwarf_version >= 3 || !dwarf_strict) |
22813 | return DW_TAG_interface_type; |
22814 | return DW_TAG_structure_type; |
22815 | |
22816 | default: |
22817 | gcc_unreachable (); |
22818 | } |
22819 | } |
22820 | |
22821 | /* Generate a DIE to represent an enumeration type. Note that these DIEs |
22822 | include all of the information about the enumeration values also. Each |
22823 | enumerated type name/value is listed as a child of the enumerated type |
22824 | DIE. */ |
22825 | |
22826 | static dw_die_ref |
22827 | gen_enumeration_type_die (tree type, dw_die_ref context_die) |
22828 | { |
22829 | dw_die_ref type_die = lookup_type_die (type); |
22830 | dw_die_ref orig_type_die = type_die; |
22831 | |
22832 | if (type_die == NULL) |
22833 | { |
22834 | type_die = new_die (tag_value: DW_TAG_enumeration_type, |
22835 | parent_die: scope_die_for (t: type, context_die), t: type); |
22836 | equate_type_number_to_die (type, type_die); |
22837 | add_name_attribute (die: type_die, name_string: type_tag (type)); |
22838 | if ((dwarf_version >= 4 || !dwarf_strict) |
22839 | && ENUM_IS_SCOPED (type)) |
22840 | add_AT_flag (die: type_die, attr_kind: DW_AT_enum_class, flag: 1); |
22841 | if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type)) |
22842 | add_AT_flag (die: type_die, attr_kind: DW_AT_declaration, flag: 1); |
22843 | if (!dwarf_strict) |
22844 | add_AT_unsigned (die: type_die, attr_kind: DW_AT_encoding, |
22845 | TYPE_UNSIGNED (type) |
22846 | ? DW_ATE_unsigned |
22847 | : DW_ATE_signed); |
22848 | } |
22849 | else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type)) |
22850 | return type_die; |
22851 | else |
22852 | remove_AT (die: type_die, attr_kind: DW_AT_declaration); |
22853 | |
22854 | /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the |
22855 | given enum type is incomplete, do not generate the DW_AT_byte_size |
22856 | attribute or the DW_AT_element_list attribute. */ |
22857 | if (TYPE_SIZE (type)) |
22858 | { |
22859 | tree link; |
22860 | |
22861 | if (!ENUM_IS_OPAQUE (type)) |
22862 | TREE_ASM_WRITTEN (type) = 1; |
22863 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_byte_size)) |
22864 | add_byte_size_attribute (die: type_die, tree_node: type); |
22865 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_alignment)) |
22866 | add_alignment_attribute (die: type_die, tree_node: type); |
22867 | if ((dwarf_version >= 3 || !dwarf_strict) |
22868 | && (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_type))) |
22869 | { |
22870 | tree underlying = lang_hooks.types.enum_underlying_base_type (type); |
22871 | add_type_attribute (object_die: type_die, type: underlying, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
22872 | context_die); |
22873 | } |
22874 | if (TYPE_STUB_DECL (type) != NULL_TREE) |
22875 | { |
22876 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_decl_file)) |
22877 | add_src_coords_attributes (die: type_die, TYPE_STUB_DECL (type)); |
22878 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_accessibility)) |
22879 | add_accessibility_attribute (die: type_die, TYPE_STUB_DECL (type)); |
22880 | } |
22881 | |
22882 | /* If the first reference to this type was as the return type of an |
22883 | inline function, then it may not have a parent. Fix this now. */ |
22884 | if (type_die->die_parent == NULL) |
22885 | add_child_die (die: scope_die_for (t: type, context_die), child_die: type_die); |
22886 | |
22887 | for (link = TYPE_VALUES (type); |
22888 | link != NULL; link = TREE_CHAIN (link)) |
22889 | { |
22890 | dw_die_ref enum_die = new_die (tag_value: DW_TAG_enumerator, parent_die: type_die, t: link); |
22891 | tree value = TREE_VALUE (link); |
22892 | |
22893 | if (DECL_P (value)) |
22894 | equate_decl_number_to_die (decl: value, decl_die: enum_die); |
22895 | |
22896 | gcc_assert (!ENUM_IS_OPAQUE (type)); |
22897 | add_name_attribute (die: enum_die, |
22898 | IDENTIFIER_POINTER (TREE_PURPOSE (link))); |
22899 | |
22900 | if (TREE_CODE (value) == CONST_DECL) |
22901 | value = DECL_INITIAL (value); |
22902 | |
22903 | if (simple_type_size_in_bits (TREE_TYPE (value)) |
22904 | <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value)) |
22905 | { |
22906 | /* For constant forms created by add_AT_unsigned DWARF |
22907 | consumers (GDB, elfutils, etc.) always zero extend |
22908 | the value. Only when the actual value is negative |
22909 | do we need to use add_AT_int to generate a constant |
22910 | form that can represent negative values. */ |
22911 | HOST_WIDE_INT val = TREE_INT_CST_LOW (value); |
22912 | if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0) |
22913 | add_AT_unsigned (die: enum_die, attr_kind: DW_AT_const_value, |
22914 | unsigned_val: (unsigned HOST_WIDE_INT) val); |
22915 | else |
22916 | add_AT_int (die: enum_die, attr_kind: DW_AT_const_value, int_val: val); |
22917 | } |
22918 | else |
22919 | /* Enumeration constants may be wider than HOST_WIDE_INT. Handle |
22920 | that here. TODO: This should be re-worked to use correct |
22921 | signed/unsigned double tags for all cases. */ |
22922 | add_AT_wide (die: enum_die, attr_kind: DW_AT_const_value, w: wi::to_wide (t: value)); |
22923 | } |
22924 | |
22925 | add_gnat_descriptive_type_attribute (die: type_die, type, context_die); |
22926 | if (TYPE_ARTIFICIAL (type) |
22927 | && (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_artificial))) |
22928 | add_AT_flag (die: type_die, attr_kind: DW_AT_artificial, flag: 1); |
22929 | } |
22930 | else |
22931 | add_AT_flag (die: type_die, attr_kind: DW_AT_declaration, flag: 1); |
22932 | |
22933 | add_pubtype (decl: type, die: type_die); |
22934 | |
22935 | return type_die; |
22936 | } |
22937 | |
22938 | /* Generate a DIE to represent either a real live formal parameter decl or to |
22939 | represent just the type of some formal parameter position in some function |
22940 | type. |
22941 | |
22942 | Note that this routine is a bit unusual because its argument may be a |
22943 | ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which |
22944 | represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE |
22945 | node. If it's the former then this function is being called to output a |
22946 | DIE to represent a formal parameter object (or some inlining thereof). If |
22947 | it's the latter, then this function is only being called to output a |
22948 | DW_TAG_formal_parameter DIE to stand as a placeholder for some formal |
22949 | argument type of some subprogram type. |
22950 | If EMIT_NAME_P is true, name and source coordinate attributes |
22951 | are emitted. */ |
22952 | |
22953 | static dw_die_ref |
22954 | gen_formal_parameter_die (tree node, tree origin, bool emit_name_p, |
22955 | dw_die_ref context_die) |
22956 | { |
22957 | tree node_or_origin = node ? node : origin; |
22958 | tree ultimate_origin; |
22959 | dw_die_ref parm_die = NULL; |
22960 | |
22961 | if (DECL_P (node_or_origin)) |
22962 | { |
22963 | parm_die = lookup_decl_die (decl: node); |
22964 | |
22965 | /* If the contexts differ, we may not be talking about the same |
22966 | thing. |
22967 | ??? When in LTO the DIE parent is the "abstract" copy and the |
22968 | context_die is the specification "copy". */ |
22969 | if (parm_die |
22970 | && parm_die->die_parent != context_die |
22971 | && (parm_die->die_parent->die_tag != DW_TAG_GNU_formal_parameter_pack |
22972 | || parm_die->die_parent->die_parent != context_die) |
22973 | && !in_lto_p) |
22974 | { |
22975 | gcc_assert (!DECL_ABSTRACT_P (node)); |
22976 | /* This can happen when creating a concrete instance, in |
22977 | which case we need to create a new DIE that will get |
22978 | annotated with DW_AT_abstract_origin. */ |
22979 | parm_die = NULL; |
22980 | } |
22981 | |
22982 | if (parm_die && parm_die->die_parent == NULL) |
22983 | { |
22984 | /* Check that parm_die already has the right attributes that |
22985 | we would have added below. If any attributes are |
22986 | missing, fall through to add them. */ |
22987 | if (! DECL_ABSTRACT_P (node_or_origin) |
22988 | && !get_AT (die: parm_die, attr_kind: DW_AT_location) |
22989 | && !get_AT (die: parm_die, attr_kind: DW_AT_const_value)) |
22990 | /* We are missing location info, and are about to add it. */ |
22991 | ; |
22992 | else |
22993 | { |
22994 | add_child_die (die: context_die, child_die: parm_die); |
22995 | return parm_die; |
22996 | } |
22997 | } |
22998 | } |
22999 | |
23000 | /* If we have a previously generated DIE, use it, unless this is an |
23001 | concrete instance (origin != NULL), in which case we need a new |
23002 | DIE with a corresponding DW_AT_abstract_origin. */ |
23003 | bool reusing_die; |
23004 | if (parm_die && origin == NULL) |
23005 | reusing_die = true; |
23006 | else |
23007 | { |
23008 | parm_die = new_die (tag_value: DW_TAG_formal_parameter, parent_die: context_die, t: node); |
23009 | reusing_die = false; |
23010 | } |
23011 | |
23012 | switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin))) |
23013 | { |
23014 | case tcc_declaration: |
23015 | ultimate_origin = decl_ultimate_origin (decl: node_or_origin); |
23016 | if (node || ultimate_origin) |
23017 | origin = ultimate_origin; |
23018 | |
23019 | if (reusing_die) |
23020 | goto add_location; |
23021 | |
23022 | if (origin != NULL) |
23023 | add_abstract_origin_attribute (die: parm_die, origin); |
23024 | else if (emit_name_p) |
23025 | add_name_and_src_coords_attributes (die: parm_die, decl: node); |
23026 | if (origin == NULL |
23027 | || (! DECL_ABSTRACT_P (node_or_origin) |
23028 | && variably_modified_type_p (TREE_TYPE (node_or_origin), |
23029 | decl_function_context |
23030 | (node_or_origin)))) |
23031 | { |
23032 | tree type = TREE_TYPE (node_or_origin); |
23033 | if (decl_by_reference_p (decl: node_or_origin)) |
23034 | add_type_attribute (object_die: parm_die, TREE_TYPE (type), |
23035 | cv_quals: TYPE_UNQUALIFIED, |
23036 | reverse: false, context_die); |
23037 | else |
23038 | add_type_attribute (object_die: parm_die, type, |
23039 | cv_quals: decl_quals (decl: node_or_origin), |
23040 | reverse: false, context_die); |
23041 | } |
23042 | if (origin == NULL && DECL_ARTIFICIAL (node)) |
23043 | add_AT_flag (die: parm_die, attr_kind: DW_AT_artificial, flag: 1); |
23044 | add_location: |
23045 | if (node && node != origin) |
23046 | equate_decl_number_to_die (decl: node, decl_die: parm_die); |
23047 | if (! DECL_ABSTRACT_P (node_or_origin)) |
23048 | add_location_or_const_value_attribute (die: parm_die, decl: node_or_origin, |
23049 | cache_p: node == NULL); |
23050 | |
23051 | break; |
23052 | |
23053 | case tcc_type: |
23054 | /* We were called with some kind of a ..._TYPE node. */ |
23055 | add_type_attribute (object_die: parm_die, type: node_or_origin, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
23056 | context_die); |
23057 | break; |
23058 | |
23059 | default: |
23060 | gcc_unreachable (); |
23061 | } |
23062 | |
23063 | return parm_die; |
23064 | } |
23065 | |
23066 | /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate |
23067 | children DW_TAG_formal_parameter DIEs representing the arguments of the |
23068 | parameter pack. |
23069 | |
23070 | PARM_PACK must be a function parameter pack. |
23071 | PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN |
23072 | must point to the subsequent arguments of the function PACK_ARG belongs to. |
23073 | SUBR_DIE is the DIE of the function PACK_ARG belongs to. |
23074 | If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument |
23075 | following the last one for which a DIE was generated. */ |
23076 | |
23077 | static dw_die_ref |
23078 | gen_formal_parameter_pack_die (tree parm_pack, |
23079 | tree pack_arg, |
23080 | dw_die_ref subr_die, |
23081 | tree *next_arg) |
23082 | { |
23083 | tree arg; |
23084 | dw_die_ref parm_pack_die; |
23085 | |
23086 | gcc_assert (parm_pack |
23087 | && lang_hooks.function_parameter_pack_p (parm_pack) |
23088 | && subr_die); |
23089 | |
23090 | parm_pack_die = new_die (tag_value: DW_TAG_GNU_formal_parameter_pack, parent_die: subr_die, t: parm_pack); |
23091 | add_src_coords_attributes (die: parm_pack_die, decl: parm_pack); |
23092 | |
23093 | for (arg = pack_arg; arg; arg = DECL_CHAIN (arg)) |
23094 | { |
23095 | if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg, |
23096 | parm_pack)) |
23097 | break; |
23098 | gen_formal_parameter_die (node: arg, NULL, |
23099 | emit_name_p: false /* Don't emit name attribute. */, |
23100 | context_die: parm_pack_die); |
23101 | } |
23102 | if (next_arg) |
23103 | *next_arg = arg; |
23104 | return parm_pack_die; |
23105 | } |
23106 | |
23107 | /* Generate a special type of DIE used as a stand-in for a trailing ellipsis |
23108 | at the end of an (ANSI prototyped) formal parameters list. */ |
23109 | |
23110 | static void |
23111 | gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die) |
23112 | { |
23113 | new_die (tag_value: DW_TAG_unspecified_parameters, parent_die: context_die, t: decl_or_type); |
23114 | } |
23115 | |
23116 | /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a |
23117 | DW_TAG_unspecified_parameters DIE) to represent the types of the formal |
23118 | parameters as specified in some function type specification (except for |
23119 | those which appear as part of a function *definition*). */ |
23120 | |
23121 | static void |
23122 | gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die) |
23123 | { |
23124 | tree link; |
23125 | tree formal_type = NULL; |
23126 | tree first_parm_type; |
23127 | tree arg; |
23128 | |
23129 | if (TREE_CODE (function_or_method_type) == FUNCTION_DECL) |
23130 | { |
23131 | arg = DECL_ARGUMENTS (function_or_method_type); |
23132 | function_or_method_type = TREE_TYPE (function_or_method_type); |
23133 | } |
23134 | else |
23135 | arg = NULL_TREE; |
23136 | |
23137 | first_parm_type = TYPE_ARG_TYPES (function_or_method_type); |
23138 | |
23139 | /* Make our first pass over the list of formal parameter types and output a |
23140 | DW_TAG_formal_parameter DIE for each one. */ |
23141 | for (link = first_parm_type; link; ) |
23142 | { |
23143 | dw_die_ref parm_die; |
23144 | |
23145 | formal_type = TREE_VALUE (link); |
23146 | if (formal_type == void_type_node) |
23147 | break; |
23148 | |
23149 | /* Output a (nameless) DIE to represent the formal parameter itself. */ |
23150 | parm_die = gen_formal_parameter_die (node: formal_type, NULL, |
23151 | emit_name_p: true /* Emit name attribute. */, |
23152 | context_die); |
23153 | if (TREE_CODE (function_or_method_type) == METHOD_TYPE |
23154 | && link == first_parm_type) |
23155 | { |
23156 | add_AT_flag (die: parm_die, attr_kind: DW_AT_artificial, flag: 1); |
23157 | if (dwarf_version >= 3 || !dwarf_strict) |
23158 | add_AT_die_ref (die: context_die, attr_kind: DW_AT_object_pointer, targ_die: parm_die); |
23159 | } |
23160 | else if (arg && DECL_ARTIFICIAL (arg)) |
23161 | add_AT_flag (die: parm_die, attr_kind: DW_AT_artificial, flag: 1); |
23162 | |
23163 | link = TREE_CHAIN (link); |
23164 | if (arg) |
23165 | arg = DECL_CHAIN (arg); |
23166 | } |
23167 | |
23168 | /* If this function type has an ellipsis, add a |
23169 | DW_TAG_unspecified_parameters DIE to the end of the parameter list. */ |
23170 | if (formal_type != void_type_node) |
23171 | gen_unspecified_parameters_die (decl_or_type: function_or_method_type, context_die); |
23172 | |
23173 | /* Make our second (and final) pass over the list of formal parameter types |
23174 | and output DIEs to represent those types (as necessary). */ |
23175 | for (link = TYPE_ARG_TYPES (function_or_method_type); |
23176 | link && TREE_VALUE (link); |
23177 | link = TREE_CHAIN (link)) |
23178 | gen_type_die (TREE_VALUE (link), context_die); |
23179 | } |
23180 | |
23181 | /* We want to generate the DIE for TYPE so that we can generate the |
23182 | die for MEMBER, which has been defined; we will need to refer back |
23183 | to the member declaration nested within TYPE. If we're trying to |
23184 | generate minimal debug info for TYPE, processing TYPE won't do the |
23185 | trick; we need to attach the member declaration by hand. */ |
23186 | |
23187 | static void |
23188 | gen_type_die_for_member (tree type, tree member, dw_die_ref context_die) |
23189 | { |
23190 | gen_type_die (type, context_die); |
23191 | |
23192 | /* If we're trying to avoid duplicate debug info, we may not have |
23193 | emitted the member decl for this function. Emit it now. */ |
23194 | if (TYPE_STUB_DECL (type) |
23195 | && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type)) |
23196 | && ! lookup_decl_die (decl: member)) |
23197 | { |
23198 | dw_die_ref type_die; |
23199 | gcc_assert (!decl_ultimate_origin (member)); |
23200 | |
23201 | type_die = lookup_type_die_strip_naming_typedef (type); |
23202 | if (TREE_CODE (member) == FUNCTION_DECL) |
23203 | gen_subprogram_die (member, type_die); |
23204 | else if (TREE_CODE (member) == FIELD_DECL) |
23205 | { |
23206 | /* Ignore the nameless fields that are used to skip bits but handle |
23207 | C++ anonymous unions and structs. */ |
23208 | if (DECL_NAME (member) != NULL_TREE |
23209 | || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE |
23210 | || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE) |
23211 | { |
23212 | struct vlr_context vlr_ctx = { |
23213 | DECL_CONTEXT (member), /* struct_type */ |
23214 | NULL_TREE /* variant_part_offset */ |
23215 | }; |
23216 | gen_type_die (member_declared_type (member), type_die); |
23217 | gen_field_die (member, &vlr_ctx, type_die); |
23218 | } |
23219 | } |
23220 | else |
23221 | gen_variable_die (member, NULL_TREE, type_die); |
23222 | } |
23223 | } |
23224 | |
23225 | /* Forward declare these functions, because they are mutually recursive |
23226 | with their set_block_* pairing functions. */ |
23227 | static void set_decl_origin_self (tree); |
23228 | |
23229 | /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the |
23230 | given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so |
23231 | that it points to the node itself, thus indicating that the node is its |
23232 | own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for |
23233 | the given node is NULL, recursively descend the decl/block tree which |
23234 | it is the root of, and for each other ..._DECL or BLOCK node contained |
23235 | therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also |
23236 | still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN |
23237 | values to point to themselves. */ |
23238 | |
23239 | static void |
23240 | set_block_origin_self (tree stmt) |
23241 | { |
23242 | if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE) |
23243 | { |
23244 | BLOCK_ABSTRACT_ORIGIN (stmt) = stmt; |
23245 | |
23246 | { |
23247 | tree local_decl; |
23248 | |
23249 | for (local_decl = BLOCK_VARS (stmt); |
23250 | local_decl != NULL_TREE; |
23251 | local_decl = DECL_CHAIN (local_decl)) |
23252 | /* Do not recurse on nested functions since the inlining status |
23253 | of parent and child can be different as per the DWARF spec. */ |
23254 | if (TREE_CODE (local_decl) != FUNCTION_DECL |
23255 | && !DECL_EXTERNAL (local_decl)) |
23256 | set_decl_origin_self (local_decl); |
23257 | } |
23258 | |
23259 | { |
23260 | tree subblock; |
23261 | |
23262 | for (subblock = BLOCK_SUBBLOCKS (stmt); |
23263 | subblock != NULL_TREE; |
23264 | subblock = BLOCK_CHAIN (subblock)) |
23265 | set_block_origin_self (subblock); /* Recurse. */ |
23266 | } |
23267 | } |
23268 | } |
23269 | |
23270 | /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for |
23271 | the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the |
23272 | node to so that it points to the node itself, thus indicating that the |
23273 | node represents its own (abstract) origin. Additionally, if the |
23274 | DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend |
23275 | the decl/block tree of which the given node is the root of, and for |
23276 | each other ..._DECL or BLOCK node contained therein whose |
23277 | DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL, |
23278 | set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to |
23279 | point to themselves. */ |
23280 | |
23281 | static void |
23282 | set_decl_origin_self (tree decl) |
23283 | { |
23284 | if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE) |
23285 | { |
23286 | DECL_ABSTRACT_ORIGIN (decl) = decl; |
23287 | if (TREE_CODE (decl) == FUNCTION_DECL) |
23288 | { |
23289 | tree arg; |
23290 | |
23291 | for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg)) |
23292 | DECL_ABSTRACT_ORIGIN (arg) = arg; |
23293 | if (DECL_INITIAL (decl) != NULL_TREE |
23294 | && DECL_INITIAL (decl) != error_mark_node) |
23295 | set_block_origin_self (DECL_INITIAL (decl)); |
23296 | } |
23297 | } |
23298 | } |
23299 | |
23300 | /* Mark the early DIE for DECL as the abstract instance. */ |
23301 | |
23302 | static void |
23303 | dwarf2out_abstract_function (tree decl) |
23304 | { |
23305 | dw_die_ref old_die; |
23306 | |
23307 | /* Make sure we have the actual abstract inline, not a clone. */ |
23308 | decl = DECL_ORIGIN (decl); |
23309 | |
23310 | if (DECL_IGNORED_P (decl)) |
23311 | return; |
23312 | |
23313 | /* In LTO we're all set. We already created abstract instances |
23314 | early and we want to avoid creating a concrete instance of that |
23315 | if we don't output it. */ |
23316 | if (in_lto_p) |
23317 | return; |
23318 | |
23319 | old_die = lookup_decl_die (decl); |
23320 | gcc_assert (old_die != NULL); |
23321 | if (get_AT (die: old_die, attr_kind: DW_AT_inline)) |
23322 | /* We've already generated the abstract instance. */ |
23323 | return; |
23324 | |
23325 | /* Go ahead and put DW_AT_inline on the DIE. */ |
23326 | if (DECL_DECLARED_INLINE_P (decl)) |
23327 | { |
23328 | if (cgraph_function_possibly_inlined_p (decl)) |
23329 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_declared_inlined); |
23330 | else |
23331 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_declared_not_inlined); |
23332 | } |
23333 | else |
23334 | { |
23335 | if (cgraph_function_possibly_inlined_p (decl)) |
23336 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_inlined); |
23337 | else |
23338 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_not_inlined); |
23339 | } |
23340 | |
23341 | if (DECL_DECLARED_INLINE_P (decl) |
23342 | && lookup_attribute (attr_name: "artificial" , DECL_ATTRIBUTES (decl))) |
23343 | add_AT_flag (die: old_die, attr_kind: DW_AT_artificial, flag: 1); |
23344 | |
23345 | set_decl_origin_self (decl); |
23346 | } |
23347 | |
23348 | /* Helper function of premark_used_types() which gets called through |
23349 | htab_traverse. |
23350 | |
23351 | Marks the DIE of a given type in *SLOT as perennial, so it never gets |
23352 | marked as unused by prune_unused_types. */ |
23353 | |
23354 | bool |
23355 | (tree const &type, void *) |
23356 | { |
23357 | dw_die_ref die; |
23358 | |
23359 | die = lookup_type_die (type); |
23360 | if (die != NULL) |
23361 | die->die_perennial_p = 1; |
23362 | return true; |
23363 | } |
23364 | |
23365 | /* Helper function of premark_types_used_by_global_vars which gets called |
23366 | through htab_traverse. |
23367 | |
23368 | Marks the DIE of a given type in *SLOT as perennial, so it never gets |
23369 | marked as unused by prune_unused_types. The DIE of the type is marked |
23370 | only if the global variable using the type will actually be emitted. */ |
23371 | |
23372 | int |
23373 | (types_used_by_vars_entry **slot, |
23374 | void *) |
23375 | { |
23376 | struct types_used_by_vars_entry *entry; |
23377 | dw_die_ref die; |
23378 | |
23379 | entry = (struct types_used_by_vars_entry *) *slot; |
23380 | gcc_assert (entry->type != NULL |
23381 | && entry->var_decl != NULL); |
23382 | die = lookup_type_die (type: entry->type); |
23383 | if (die) |
23384 | { |
23385 | /* Ask cgraph if the global variable really is to be emitted. |
23386 | If yes, then we'll keep the DIE of ENTRY->TYPE. */ |
23387 | varpool_node *node = varpool_node::get (decl: entry->var_decl); |
23388 | if (node && node->definition) |
23389 | { |
23390 | die->die_perennial_p = 1; |
23391 | /* Keep the parent DIEs as well. */ |
23392 | while ((die = die->die_parent) && die->die_perennial_p == 0) |
23393 | die->die_perennial_p = 1; |
23394 | } |
23395 | } |
23396 | return 1; |
23397 | } |
23398 | |
23399 | /* Mark all members of used_types_hash as perennial. */ |
23400 | |
23401 | static void |
23402 | (struct function *fun) |
23403 | { |
23404 | if (fun && fun->used_types_hash) |
23405 | fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL); |
23406 | } |
23407 | |
23408 | /* Mark all members of types_used_by_vars_entry as perennial. */ |
23409 | |
23410 | static void |
23411 | (void) |
23412 | { |
23413 | if (types_used_by_vars_hash) |
23414 | types_used_by_vars_hash |
23415 | ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL); |
23416 | } |
23417 | |
23418 | /* Mark all variables used by the symtab as perennial. */ |
23419 | |
23420 | static void |
23421 | (void) |
23422 | { |
23423 | /* Mark DIEs in the symtab as used. */ |
23424 | varpool_node *var; |
23425 | FOR_EACH_VARIABLE (var) |
23426 | { |
23427 | dw_die_ref die = lookup_decl_die (decl: var->decl); |
23428 | if (die) |
23429 | die->die_perennial_p = 1; |
23430 | } |
23431 | } |
23432 | |
23433 | /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE |
23434 | for CA_LOC call arg loc node. */ |
23435 | |
23436 | static dw_die_ref |
23437 | gen_call_site_die (tree decl, dw_die_ref subr_die, |
23438 | struct call_arg_loc_node *ca_loc) |
23439 | { |
23440 | dw_die_ref stmt_die = NULL, die; |
23441 | tree block = ca_loc->block; |
23442 | |
23443 | while (block |
23444 | && block != DECL_INITIAL (decl) |
23445 | && TREE_CODE (block) == BLOCK) |
23446 | { |
23447 | stmt_die = lookup_block_die (block); |
23448 | if (stmt_die) |
23449 | break; |
23450 | block = BLOCK_SUPERCONTEXT (block); |
23451 | } |
23452 | if (stmt_die == NULL) |
23453 | stmt_die = subr_die; |
23454 | die = new_die (tag_value: dwarf_TAG (tag: DW_TAG_call_site), parent_die: stmt_die, NULL_TREE); |
23455 | add_AT_lbl_id (die, attr_kind: dwarf_AT (at: DW_AT_call_return_pc), lbl_id: ca_loc->label); |
23456 | if (ca_loc->tail_call_p) |
23457 | add_AT_flag (die, attr_kind: dwarf_AT (at: DW_AT_call_tail_call), flag: 1); |
23458 | if (ca_loc->symbol_ref) |
23459 | { |
23460 | dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref)); |
23461 | if (tdie) |
23462 | add_AT_die_ref (die, attr_kind: dwarf_AT (at: DW_AT_call_origin), targ_die: tdie); |
23463 | else |
23464 | add_AT_addr (die, attr_kind: dwarf_AT (at: DW_AT_call_origin), addr: ca_loc->symbol_ref, |
23465 | force_direct: false); |
23466 | } |
23467 | return die; |
23468 | } |
23469 | |
23470 | /* Generate a DIE to represent a declared function (either file-scope or |
23471 | block-local). */ |
23472 | |
23473 | static void |
23474 | gen_subprogram_die (tree decl, dw_die_ref context_die) |
23475 | { |
23476 | tree origin = decl_ultimate_origin (decl); |
23477 | dw_die_ref subr_die; |
23478 | dw_die_ref old_die = lookup_decl_die (decl); |
23479 | bool old_die_had_no_children = false; |
23480 | |
23481 | /* This function gets called multiple times for different stages of |
23482 | the debug process. For example, for func() in this code: |
23483 | |
23484 | namespace S |
23485 | { |
23486 | void func() { ... } |
23487 | } |
23488 | |
23489 | ...we get called 4 times. Twice in early debug and twice in |
23490 | late debug: |
23491 | |
23492 | Early debug |
23493 | ----------- |
23494 | |
23495 | 1. Once while generating func() within the namespace. This is |
23496 | the declaration. The declaration bit below is set, as the |
23497 | context is the namespace. |
23498 | |
23499 | A new DIE will be generated with DW_AT_declaration set. |
23500 | |
23501 | 2. Once for func() itself. This is the specification. The |
23502 | declaration bit below is clear as the context is the CU. |
23503 | |
23504 | We will use the cached DIE from (1) to create a new DIE with |
23505 | DW_AT_specification pointing to the declaration in (1). |
23506 | |
23507 | Late debug via rest_of_handle_final() |
23508 | ------------------------------------- |
23509 | |
23510 | 3. Once generating func() within the namespace. This is also the |
23511 | declaration, as in (1), but this time we will early exit below |
23512 | as we have a cached DIE and a declaration needs no additional |
23513 | annotations (no locations), as the source declaration line |
23514 | info is enough. |
23515 | |
23516 | 4. Once for func() itself. As in (2), this is the specification, |
23517 | but this time we will re-use the cached DIE, and just annotate |
23518 | it with the location information that should now be available. |
23519 | |
23520 | For something without namespaces, but with abstract instances, we |
23521 | are also called a multiple times: |
23522 | |
23523 | class Base |
23524 | { |
23525 | public: |
23526 | Base (); // constructor declaration (1) |
23527 | }; |
23528 | |
23529 | Base::Base () { } // constructor specification (2) |
23530 | |
23531 | Early debug |
23532 | ----------- |
23533 | |
23534 | 1. Once for the Base() constructor by virtue of it being a |
23535 | member of the Base class. This is done via |
23536 | rest_of_type_compilation. |
23537 | |
23538 | This is a declaration, so a new DIE will be created with |
23539 | DW_AT_declaration. |
23540 | |
23541 | 2. Once for the Base() constructor definition, but this time |
23542 | while generating the abstract instance of the base |
23543 | constructor (__base_ctor) which is being generated via early |
23544 | debug of reachable functions. |
23545 | |
23546 | Even though we have a cached version of the declaration (1), |
23547 | we will create a DW_AT_specification of the declaration DIE |
23548 | in (1). |
23549 | |
23550 | 3. Once for the __base_ctor itself, but this time, we generate |
23551 | an DW_AT_abstract_origin version of the DW_AT_specification in |
23552 | (2). |
23553 | |
23554 | Late debug via rest_of_handle_final |
23555 | ----------------------------------- |
23556 | |
23557 | 4. One final time for the __base_ctor (which will have a cached |
23558 | DIE with DW_AT_abstract_origin created in (3). This time, |
23559 | we will just annotate the location information now |
23560 | available. |
23561 | */ |
23562 | int declaration = (current_function_decl != decl |
23563 | || (!DECL_INITIAL (decl) && !origin) |
23564 | || class_or_namespace_scope_p (context_die)); |
23565 | |
23566 | /* A declaration that has been previously dumped needs no |
23567 | additional information. */ |
23568 | if (old_die && declaration) |
23569 | return; |
23570 | |
23571 | if (in_lto_p && old_die && old_die->die_child == NULL) |
23572 | old_die_had_no_children = true; |
23573 | |
23574 | /* Now that the C++ front end lazily declares artificial member fns, we |
23575 | might need to retrofit the declaration into its class. */ |
23576 | if (!declaration && !origin && !old_die |
23577 | && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl)) |
23578 | && !class_or_namespace_scope_p (context_die) |
23579 | && debug_info_level > DINFO_LEVEL_TERSE) |
23580 | old_die = force_decl_die (decl); |
23581 | |
23582 | /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */ |
23583 | if (origin != NULL) |
23584 | { |
23585 | gcc_assert (!declaration || local_scope_p (context_die)); |
23586 | |
23587 | /* Fixup die_parent for the abstract instance of a nested |
23588 | inline function. */ |
23589 | if (old_die && old_die->die_parent == NULL) |
23590 | add_child_die (die: context_die, child_die: old_die); |
23591 | |
23592 | if (old_die && get_AT_ref (die: old_die, attr_kind: DW_AT_abstract_origin)) |
23593 | { |
23594 | /* If we have a DW_AT_abstract_origin we have a working |
23595 | cached version. */ |
23596 | subr_die = old_die; |
23597 | } |
23598 | else |
23599 | { |
23600 | subr_die = new_die (tag_value: DW_TAG_subprogram, parent_die: context_die, t: decl); |
23601 | add_abstract_origin_attribute (die: subr_die, origin); |
23602 | /* This is where the actual code for a cloned function is. |
23603 | Let's emit linkage name attribute for it. This helps |
23604 | debuggers to e.g, set breakpoints into |
23605 | constructors/destructors when the user asks "break |
23606 | K::K". */ |
23607 | add_linkage_name (die: subr_die, decl); |
23608 | } |
23609 | } |
23610 | /* A cached copy, possibly from early dwarf generation. Reuse as |
23611 | much as possible. */ |
23612 | else if (old_die) |
23613 | { |
23614 | if (!get_AT_flag (die: old_die, attr_kind: DW_AT_declaration) |
23615 | /* We can have a normal definition following an inline one in the |
23616 | case of redefinition of GNU C extern inlines. |
23617 | It seems reasonable to use AT_specification in this case. */ |
23618 | && !get_AT (die: old_die, attr_kind: DW_AT_inline)) |
23619 | { |
23620 | /* Detect and ignore this case, where we are trying to output |
23621 | something we have already output. */ |
23622 | if (get_AT (die: old_die, attr_kind: DW_AT_low_pc) |
23623 | || get_AT (die: old_die, attr_kind: DW_AT_ranges)) |
23624 | return; |
23625 | |
23626 | /* If we have no location information, this must be a |
23627 | partially generated DIE from early dwarf generation. |
23628 | Fall through and generate it. */ |
23629 | } |
23630 | |
23631 | /* If the definition comes from the same place as the declaration, |
23632 | maybe use the old DIE. We always want the DIE for this function |
23633 | that has the *_pc attributes to be under comp_unit_die so the |
23634 | debugger can find it. We also need to do this for abstract |
23635 | instances of inlines, since the spec requires the out-of-line copy |
23636 | to have the same parent. For local class methods, this doesn't |
23637 | apply; we just use the old DIE. */ |
23638 | expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl)); |
23639 | struct dwarf_file_data * file_index = lookup_filename (s.file); |
23640 | if (((is_unit_die (c: old_die->die_parent) |
23641 | /* This condition fixes the inconsistency/ICE with the |
23642 | following Fortran test (or some derivative thereof) while |
23643 | building libgfortran: |
23644 | |
23645 | module some_m |
23646 | contains |
23647 | logical function funky (FLAG) |
23648 | funky = .true. |
23649 | end function |
23650 | end module |
23651 | */ |
23652 | || (old_die->die_parent |
23653 | && old_die->die_parent->die_tag == DW_TAG_module) |
23654 | || local_scope_p (context_die: old_die->die_parent) |
23655 | || context_die == NULL) |
23656 | && (DECL_ARTIFICIAL (decl) |
23657 | || (get_AT_file (die: old_die, attr_kind: DW_AT_decl_file) == file_index |
23658 | && (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_line) |
23659 | == (unsigned) s.line) |
23660 | && (!debug_column_info |
23661 | || s.column == 0 |
23662 | || (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_column) |
23663 | == (unsigned) s.column))))) |
23664 | /* With LTO if there's an abstract instance for |
23665 | the old DIE, this is a concrete instance and |
23666 | thus re-use the DIE. */ |
23667 | || get_AT (die: old_die, attr_kind: DW_AT_abstract_origin)) |
23668 | { |
23669 | subr_die = old_die; |
23670 | |
23671 | /* Clear out the declaration attribute, but leave the |
23672 | parameters so they can be augmented with location |
23673 | information later. Unless this was a declaration, in |
23674 | which case, wipe out the nameless parameters and recreate |
23675 | them further down. */ |
23676 | if (remove_AT (die: subr_die, attr_kind: DW_AT_declaration)) |
23677 | { |
23678 | |
23679 | remove_AT (die: subr_die, attr_kind: DW_AT_object_pointer); |
23680 | remove_child_TAG (die: subr_die, tag: DW_TAG_formal_parameter); |
23681 | } |
23682 | } |
23683 | /* Make a specification pointing to the previously built |
23684 | declaration. */ |
23685 | else |
23686 | { |
23687 | subr_die = new_die (tag_value: DW_TAG_subprogram, parent_die: context_die, t: decl); |
23688 | add_AT_specification (die: subr_die, targ_die: old_die); |
23689 | add_pubname (decl, die: subr_die); |
23690 | if (get_AT_file (die: old_die, attr_kind: DW_AT_decl_file) != file_index) |
23691 | add_AT_file (die: subr_die, attr_kind: DW_AT_decl_file, fd: file_index); |
23692 | if (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_line) != (unsigned) s.line) |
23693 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_decl_line, unsigned_val: s.line); |
23694 | if (debug_column_info |
23695 | && s.column |
23696 | && (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_column) |
23697 | != (unsigned) s.column)) |
23698 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_decl_column, unsigned_val: s.column); |
23699 | |
23700 | /* If the prototype had an 'auto' or 'decltype(auto)' in |
23701 | the return type, emit the real type on the definition die. */ |
23702 | if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE) |
23703 | { |
23704 | dw_die_ref die = get_AT_ref (die: old_die, attr_kind: DW_AT_type); |
23705 | while (die |
23706 | && (die->die_tag == DW_TAG_reference_type |
23707 | || die->die_tag == DW_TAG_rvalue_reference_type |
23708 | || die->die_tag == DW_TAG_pointer_type |
23709 | || die->die_tag == DW_TAG_const_type |
23710 | || die->die_tag == DW_TAG_volatile_type |
23711 | || die->die_tag == DW_TAG_restrict_type |
23712 | || die->die_tag == DW_TAG_array_type |
23713 | || die->die_tag == DW_TAG_ptr_to_member_type |
23714 | || die->die_tag == DW_TAG_subroutine_type)) |
23715 | die = get_AT_ref (die, attr_kind: DW_AT_type); |
23716 | if (die == auto_die || die == decltype_auto_die) |
23717 | add_type_attribute (object_die: subr_die, TREE_TYPE (TREE_TYPE (decl)), |
23718 | cv_quals: TYPE_UNQUALIFIED, reverse: false, context_die); |
23719 | } |
23720 | |
23721 | /* When we process the method declaration, we haven't seen |
23722 | the out-of-class defaulted definition yet, so we have to |
23723 | recheck now. */ |
23724 | if ((dwarf_version >= 5 || ! dwarf_strict) |
23725 | && !get_AT (die: subr_die, attr_kind: DW_AT_defaulted)) |
23726 | { |
23727 | int defaulted |
23728 | = lang_hooks.decls.decl_dwarf_attribute (decl, |
23729 | DW_AT_defaulted); |
23730 | if (defaulted != -1) |
23731 | { |
23732 | /* Other values must have been handled before. */ |
23733 | gcc_assert (defaulted == DW_DEFAULTED_out_of_class); |
23734 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_defaulted, unsigned_val: defaulted); |
23735 | } |
23736 | } |
23737 | } |
23738 | } |
23739 | /* Create a fresh DIE for anything else. */ |
23740 | else |
23741 | { |
23742 | subr_die = new_die (tag_value: DW_TAG_subprogram, parent_die: context_die, t: decl); |
23743 | |
23744 | if (TREE_PUBLIC (decl)) |
23745 | add_AT_flag (die: subr_die, attr_kind: DW_AT_external, flag: 1); |
23746 | |
23747 | add_name_and_src_coords_attributes (die: subr_die, decl); |
23748 | add_pubname (decl, die: subr_die); |
23749 | if (debug_info_level > DINFO_LEVEL_TERSE) |
23750 | { |
23751 | add_prototyped_attribute (die: subr_die, TREE_TYPE (decl)); |
23752 | add_type_attribute (object_die: subr_die, TREE_TYPE (TREE_TYPE (decl)), |
23753 | cv_quals: TYPE_UNQUALIFIED, reverse: false, context_die); |
23754 | } |
23755 | |
23756 | add_pure_or_virtual_attribute (die: subr_die, func_decl: decl); |
23757 | if (DECL_ARTIFICIAL (decl)) |
23758 | add_AT_flag (die: subr_die, attr_kind: DW_AT_artificial, flag: 1); |
23759 | |
23760 | if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict)) |
23761 | add_AT_flag (die: subr_die, attr_kind: DW_AT_noreturn, flag: 1); |
23762 | |
23763 | add_alignment_attribute (die: subr_die, tree_node: decl); |
23764 | |
23765 | add_accessibility_attribute (die: subr_die, decl); |
23766 | } |
23767 | |
23768 | /* Unless we have an existing non-declaration DIE, equate the new |
23769 | DIE. */ |
23770 | if (!old_die || is_declaration_die (die: old_die)) |
23771 | equate_decl_number_to_die (decl, decl_die: subr_die); |
23772 | |
23773 | if (declaration) |
23774 | { |
23775 | if (!old_die || !get_AT (die: old_die, attr_kind: DW_AT_inline)) |
23776 | { |
23777 | add_AT_flag (die: subr_die, attr_kind: DW_AT_declaration, flag: 1); |
23778 | |
23779 | /* If this is an explicit function declaration then generate |
23780 | a DW_AT_explicit attribute. */ |
23781 | if ((dwarf_version >= 3 || !dwarf_strict) |
23782 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
23783 | DW_AT_explicit) == 1) |
23784 | add_AT_flag (die: subr_die, attr_kind: DW_AT_explicit, flag: 1); |
23785 | |
23786 | /* If this is a C++11 deleted special function member then generate |
23787 | a DW_AT_deleted attribute. */ |
23788 | if ((dwarf_version >= 5 || !dwarf_strict) |
23789 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
23790 | DW_AT_deleted) == 1) |
23791 | add_AT_flag (die: subr_die, attr_kind: DW_AT_deleted, flag: 1); |
23792 | |
23793 | /* If this is a C++11 defaulted special function member then |
23794 | generate a DW_AT_defaulted attribute. */ |
23795 | if (dwarf_version >= 5 || !dwarf_strict) |
23796 | { |
23797 | int defaulted |
23798 | = lang_hooks.decls.decl_dwarf_attribute (decl, |
23799 | DW_AT_defaulted); |
23800 | if (defaulted != -1) |
23801 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_defaulted, unsigned_val: defaulted); |
23802 | } |
23803 | |
23804 | /* If this is a C++11 non-static member function with & ref-qualifier |
23805 | then generate a DW_AT_reference attribute. */ |
23806 | if ((dwarf_version >= 5 || !dwarf_strict) |
23807 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
23808 | DW_AT_reference) == 1) |
23809 | add_AT_flag (die: subr_die, attr_kind: DW_AT_reference, flag: 1); |
23810 | |
23811 | /* If this is a C++11 non-static member function with && |
23812 | ref-qualifier then generate a DW_AT_reference attribute. */ |
23813 | if ((dwarf_version >= 5 || !dwarf_strict) |
23814 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
23815 | DW_AT_rvalue_reference) |
23816 | == 1) |
23817 | add_AT_flag (die: subr_die, attr_kind: DW_AT_rvalue_reference, flag: 1); |
23818 | } |
23819 | } |
23820 | /* For non DECL_EXTERNALs, if range information is available, fill |
23821 | the DIE with it. */ |
23822 | else if (!DECL_EXTERNAL (decl) && !early_dwarf) |
23823 | { |
23824 | HOST_WIDE_INT cfa_fb_offset; |
23825 | |
23826 | struct function *fun = DECL_STRUCT_FUNCTION (decl); |
23827 | |
23828 | if (!crtl->has_bb_partition) |
23829 | { |
23830 | dw_fde_ref fde = fun->fde; |
23831 | if (fde->dw_fde_begin) |
23832 | { |
23833 | /* We have already generated the labels. */ |
23834 | add_AT_low_high_pc (die: subr_die, lbl_low: fde->dw_fde_begin, |
23835 | lbl_high: fde->dw_fde_end, force_direct: false); |
23836 | } |
23837 | else |
23838 | { |
23839 | /* Create start/end labels and add the range. */ |
23840 | char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES]; |
23841 | char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES]; |
23842 | ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL, |
23843 | current_function_funcdef_no); |
23844 | ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL, |
23845 | current_function_funcdef_no); |
23846 | add_AT_low_high_pc (die: subr_die, lbl_low: label_id_low, lbl_high: label_id_high, |
23847 | force_direct: false); |
23848 | } |
23849 | |
23850 | #if VMS_DEBUGGING_INFO |
23851 | /* HP OpenVMS Industry Standard 64: DWARF Extensions |
23852 | Section 2.3 Prologue and Epilogue Attributes: |
23853 | When a breakpoint is set on entry to a function, it is generally |
23854 | desirable for execution to be suspended, not on the very first |
23855 | instruction of the function, but rather at a point after the |
23856 | function's frame has been set up, after any language defined local |
23857 | declaration processing has been completed, and before execution of |
23858 | the first statement of the function begins. Debuggers generally |
23859 | cannot properly determine where this point is. Similarly for a |
23860 | breakpoint set on exit from a function. The prologue and epilogue |
23861 | attributes allow a compiler to communicate the location(s) to use. */ |
23862 | |
23863 | { |
23864 | if (fde->dw_fde_vms_end_prologue) |
23865 | add_AT_vms_delta (subr_die, DW_AT_HP_prologue, |
23866 | fde->dw_fde_begin, fde->dw_fde_vms_end_prologue); |
23867 | |
23868 | if (fde->dw_fde_vms_begin_epilogue) |
23869 | add_AT_vms_delta (subr_die, DW_AT_HP_epilogue, |
23870 | fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue); |
23871 | } |
23872 | #endif |
23873 | |
23874 | } |
23875 | else |
23876 | { |
23877 | /* Generate pubnames entries for the split function code ranges. */ |
23878 | dw_fde_ref fde = fun->fde; |
23879 | |
23880 | if (fde->dw_fde_second_begin) |
23881 | { |
23882 | if (dwarf_version >= 3 || !dwarf_strict) |
23883 | { |
23884 | /* We should use ranges for non-contiguous code section |
23885 | addresses. Use the actual code range for the initial |
23886 | section, since the HOT/COLD labels might precede an |
23887 | alignment offset. */ |
23888 | bool range_list_added = false; |
23889 | add_ranges_by_labels (die: subr_die, begin: fde->dw_fde_begin, |
23890 | end: fde->dw_fde_end, added: &range_list_added, |
23891 | force_direct: false); |
23892 | add_ranges_by_labels (die: subr_die, begin: fde->dw_fde_second_begin, |
23893 | end: fde->dw_fde_second_end, |
23894 | added: &range_list_added, force_direct: false); |
23895 | if (range_list_added) |
23896 | add_ranges (NULL); |
23897 | } |
23898 | else |
23899 | { |
23900 | /* There is no real support in DW2 for this .. so we make |
23901 | a work-around. First, emit the pub name for the segment |
23902 | containing the function label. Then make and emit a |
23903 | simplified subprogram DIE for the second segment with the |
23904 | name pre-fixed by __hot/cold_sect_of_. We use the same |
23905 | linkage name for the second die so that gdb will find both |
23906 | sections when given "b foo". */ |
23907 | const char *name = NULL; |
23908 | tree decl_name = DECL_NAME (decl); |
23909 | dw_die_ref seg_die; |
23910 | |
23911 | /* Do the 'primary' section. */ |
23912 | add_AT_low_high_pc (die: subr_die, lbl_low: fde->dw_fde_begin, |
23913 | lbl_high: fde->dw_fde_end, force_direct: false); |
23914 | |
23915 | /* Build a minimal DIE for the secondary section. */ |
23916 | seg_die = new_die (tag_value: DW_TAG_subprogram, |
23917 | parent_die: subr_die->die_parent, t: decl); |
23918 | |
23919 | if (TREE_PUBLIC (decl)) |
23920 | add_AT_flag (die: seg_die, attr_kind: DW_AT_external, flag: 1); |
23921 | |
23922 | if (decl_name != NULL |
23923 | && IDENTIFIER_POINTER (decl_name) != NULL) |
23924 | { |
23925 | name = dwarf2_name (decl, scope: 1); |
23926 | if (! DECL_ARTIFICIAL (decl)) |
23927 | add_src_coords_attributes (die: seg_die, decl); |
23928 | |
23929 | add_linkage_name (die: seg_die, decl); |
23930 | } |
23931 | gcc_assert (name != NULL); |
23932 | add_pure_or_virtual_attribute (die: seg_die, func_decl: decl); |
23933 | if (DECL_ARTIFICIAL (decl)) |
23934 | add_AT_flag (die: seg_die, attr_kind: DW_AT_artificial, flag: 1); |
23935 | |
23936 | name = concat ("__second_sect_of_" , name, NULL); |
23937 | add_AT_low_high_pc (die: seg_die, lbl_low: fde->dw_fde_second_begin, |
23938 | lbl_high: fde->dw_fde_second_end, force_direct: false); |
23939 | add_name_attribute (die: seg_die, name_string: name); |
23940 | if (want_pubnames ()) |
23941 | add_pubname_string (str: name, die: seg_die); |
23942 | } |
23943 | } |
23944 | else |
23945 | add_AT_low_high_pc (die: subr_die, lbl_low: fde->dw_fde_begin, lbl_high: fde->dw_fde_end, |
23946 | force_direct: false); |
23947 | } |
23948 | |
23949 | cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl); |
23950 | |
23951 | /* We define the "frame base" as the function's CFA. This is more |
23952 | convenient for several reasons: (1) It's stable across the prologue |
23953 | and epilogue, which makes it better than just a frame pointer, |
23954 | (2) With dwarf3, there exists a one-byte encoding that allows us |
23955 | to reference the .debug_frame data by proxy, but failing that, |
23956 | (3) We can at least reuse the code inspection and interpretation |
23957 | code that determines the CFA position at various points in the |
23958 | function. */ |
23959 | if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2) |
23960 | { |
23961 | dw_loc_descr_ref op = new_loc_descr (op: DW_OP_call_frame_cfa, oprnd1: 0, oprnd2: 0); |
23962 | add_AT_loc (die: subr_die, attr_kind: DW_AT_frame_base, loc: op); |
23963 | } |
23964 | else |
23965 | { |
23966 | dw_loc_list_ref list = convert_cfa_to_fb_loc_list (offset: cfa_fb_offset); |
23967 | if (list->dw_loc_next) |
23968 | add_AT_loc_list (die: subr_die, attr_kind: DW_AT_frame_base, loc_list: list); |
23969 | else |
23970 | add_AT_loc (die: subr_die, attr_kind: DW_AT_frame_base, loc: list->expr); |
23971 | } |
23972 | |
23973 | /* Compute a displacement from the "steady-state frame pointer" to |
23974 | the CFA. The former is what all stack slots and argument slots |
23975 | will reference in the rtl; the latter is what we've told the |
23976 | debugger about. We'll need to adjust all frame_base references |
23977 | by this displacement. */ |
23978 | compute_frame_pointer_to_fb_displacement (offset: cfa_fb_offset); |
23979 | |
23980 | if (fun->static_chain_decl) |
23981 | { |
23982 | /* DWARF requires here a location expression that computes the |
23983 | address of the enclosing subprogram's frame base. The machinery |
23984 | in tree-nested.cc is supposed to store this specific address in the |
23985 | last field of the FRAME record. */ |
23986 | const tree frame_type |
23987 | = TREE_TYPE (TREE_TYPE (fun->static_chain_decl)); |
23988 | const tree fb_decl = tree_last (TYPE_FIELDS (frame_type)); |
23989 | |
23990 | tree fb_expr |
23991 | = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl); |
23992 | fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl), |
23993 | fb_expr, fb_decl, NULL_TREE); |
23994 | |
23995 | add_AT_location_description (die: subr_die, attr_kind: DW_AT_static_link, |
23996 | descr: loc_list_from_tree (loc: fb_expr, want_address: 0, NULL)); |
23997 | } |
23998 | |
23999 | resolve_variable_values (); |
24000 | } |
24001 | |
24002 | /* Generate child dies for template parameters. */ |
24003 | if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE) |
24004 | gen_generic_params_dies (t: decl); |
24005 | |
24006 | /* Now output descriptions of the arguments for this function. This gets |
24007 | (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list |
24008 | for a FUNCTION_DECL doesn't indicate cases where there was a trailing |
24009 | `...' at the end of the formal parameter list. In order to find out if |
24010 | there was a trailing ellipsis or not, we must instead look at the type |
24011 | associated with the FUNCTION_DECL. This will be a node of type |
24012 | FUNCTION_TYPE. If the chain of type nodes hanging off of this |
24013 | FUNCTION_TYPE node ends with a void_type_node then there should *not* be |
24014 | an ellipsis at the end. */ |
24015 | |
24016 | /* In the case where we are describing a mere function declaration, all we |
24017 | need to do here (and all we *can* do here) is to describe the *types* of |
24018 | its formal parameters. */ |
24019 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
24020 | ; |
24021 | else if (declaration) |
24022 | gen_formal_types_die (function_or_method_type: decl, context_die: subr_die); |
24023 | else |
24024 | { |
24025 | /* Generate DIEs to represent all known formal parameters. */ |
24026 | tree parm = DECL_ARGUMENTS (decl); |
24027 | tree generic_decl = early_dwarf |
24028 | ? lang_hooks.decls.get_generic_function_decl (decl) : NULL; |
24029 | tree generic_decl_parm = generic_decl |
24030 | ? DECL_ARGUMENTS (generic_decl) |
24031 | : NULL; |
24032 | |
24033 | /* Now we want to walk the list of parameters of the function and |
24034 | emit their relevant DIEs. |
24035 | |
24036 | We consider the case of DECL being an instance of a generic function |
24037 | as well as it being a normal function. |
24038 | |
24039 | If DECL is an instance of a generic function we walk the |
24040 | parameters of the generic function declaration _and_ the parameters of |
24041 | DECL itself. This is useful because we want to emit specific DIEs for |
24042 | function parameter packs and those are declared as part of the |
24043 | generic function declaration. In that particular case, |
24044 | the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE. |
24045 | That DIE has children DIEs representing the set of arguments |
24046 | of the pack. Note that the set of pack arguments can be empty. |
24047 | In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any |
24048 | children DIE. |
24049 | |
24050 | Otherwise, we just consider the parameters of DECL. */ |
24051 | while (generic_decl_parm || parm) |
24052 | { |
24053 | if (generic_decl_parm |
24054 | && lang_hooks.function_parameter_pack_p (generic_decl_parm)) |
24055 | gen_formal_parameter_pack_die (parm_pack: generic_decl_parm, |
24056 | pack_arg: parm, subr_die, |
24057 | next_arg: &parm); |
24058 | else if (parm) |
24059 | { |
24060 | dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die); |
24061 | |
24062 | if (early_dwarf |
24063 | && parm == DECL_ARGUMENTS (decl) |
24064 | && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE |
24065 | && parm_die |
24066 | && (dwarf_version >= 3 || !dwarf_strict)) |
24067 | add_AT_die_ref (die: subr_die, attr_kind: DW_AT_object_pointer, targ_die: parm_die); |
24068 | |
24069 | parm = DECL_CHAIN (parm); |
24070 | } |
24071 | |
24072 | if (generic_decl_parm) |
24073 | generic_decl_parm = DECL_CHAIN (generic_decl_parm); |
24074 | } |
24075 | |
24076 | /* Decide whether we need an unspecified_parameters DIE at the end. |
24077 | There are 2 more cases to do this for: 1) the ansi ... declaration - |
24078 | this is detectable when the end of the arg list is not a |
24079 | void_type_node 2) an unprototyped function declaration (not a |
24080 | definition). This just means that we have no info about the |
24081 | parameters at all. */ |
24082 | if (early_dwarf) |
24083 | { |
24084 | if (prototype_p (TREE_TYPE (decl))) |
24085 | { |
24086 | /* This is the prototyped case, check for.... */ |
24087 | if (stdarg_p (TREE_TYPE (decl))) |
24088 | gen_unspecified_parameters_die (decl_or_type: decl, context_die: subr_die); |
24089 | } |
24090 | else if (DECL_INITIAL (decl) == NULL_TREE) |
24091 | gen_unspecified_parameters_die (decl_or_type: decl, context_die: subr_die); |
24092 | } |
24093 | else if ((subr_die != old_die || old_die_had_no_children) |
24094 | && prototype_p (TREE_TYPE (decl)) |
24095 | && stdarg_p (TREE_TYPE (decl))) |
24096 | gen_unspecified_parameters_die (decl_or_type: decl, context_die: subr_die); |
24097 | } |
24098 | |
24099 | if (subr_die != old_die) |
24100 | /* Add the calling convention attribute if requested. */ |
24101 | add_calling_convention_attribute (subr_die, decl); |
24102 | |
24103 | /* Output Dwarf info for all of the stuff within the body of the function |
24104 | (if it has one - it may be just a declaration). |
24105 | |
24106 | OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent |
24107 | a function. This BLOCK actually represents the outermost binding contour |
24108 | for the function, i.e. the contour in which the function's formal |
24109 | parameters and labels get declared. Curiously, it appears that the front |
24110 | end doesn't actually put the PARM_DECL nodes for the current function onto |
24111 | the BLOCK_VARS list for this outer scope, but are strung off of the |
24112 | DECL_ARGUMENTS list for the function instead. |
24113 | |
24114 | The BLOCK_VARS list for the `outer_scope' does provide us with a list of |
24115 | the LABEL_DECL nodes for the function however, and we output DWARF info |
24116 | for those in decls_for_scope. Just within the `outer_scope' there will be |
24117 | a BLOCK node representing the function's outermost pair of curly braces, |
24118 | and any blocks used for the base and member initializers of a C++ |
24119 | constructor function. */ |
24120 | tree outer_scope = DECL_INITIAL (decl); |
24121 | if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK) |
24122 | { |
24123 | int call_site_note_count = 0; |
24124 | int tail_call_site_note_count = 0; |
24125 | |
24126 | /* Emit a DW_TAG_variable DIE for a named return value. */ |
24127 | if (DECL_NAME (DECL_RESULT (decl))) |
24128 | gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die); |
24129 | |
24130 | /* The first time through decls_for_scope we will generate the |
24131 | DIEs for the locals. The second time, we fill in the |
24132 | location info. */ |
24133 | decls_for_scope (outer_scope, subr_die); |
24134 | |
24135 | if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5)) |
24136 | { |
24137 | struct call_arg_loc_node *ca_loc; |
24138 | for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next) |
24139 | { |
24140 | dw_die_ref die = NULL; |
24141 | rtx tloc = NULL_RTX, tlocc = NULL_RTX; |
24142 | rtx arg, next_arg; |
24143 | tree arg_decl = NULL_TREE; |
24144 | |
24145 | for (arg = (ca_loc->call_arg_loc_note != NULL_RTX |
24146 | ? XEXP (ca_loc->call_arg_loc_note, 0) |
24147 | : NULL_RTX); |
24148 | arg; arg = next_arg) |
24149 | { |
24150 | dw_loc_descr_ref reg, val; |
24151 | machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1)); |
24152 | dw_die_ref cdie, tdie = NULL; |
24153 | |
24154 | next_arg = XEXP (arg, 1); |
24155 | if (REG_P (XEXP (XEXP (arg, 0), 0)) |
24156 | && next_arg |
24157 | && MEM_P (XEXP (XEXP (next_arg, 0), 0)) |
24158 | && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)) |
24159 | && REGNO (XEXP (XEXP (arg, 0), 0)) |
24160 | == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))) |
24161 | next_arg = XEXP (next_arg, 1); |
24162 | if (mode == VOIDmode) |
24163 | { |
24164 | mode = GET_MODE (XEXP (XEXP (arg, 0), 0)); |
24165 | if (mode == VOIDmode) |
24166 | mode = GET_MODE (XEXP (arg, 0)); |
24167 | } |
24168 | if (mode == VOIDmode || mode == BLKmode) |
24169 | continue; |
24170 | /* Get dynamic information about call target only if we |
24171 | have no static information: we cannot generate both |
24172 | DW_AT_call_origin and DW_AT_call_target |
24173 | attributes. */ |
24174 | if (ca_loc->symbol_ref == NULL_RTX) |
24175 | { |
24176 | if (XEXP (XEXP (arg, 0), 0) == pc_rtx) |
24177 | { |
24178 | tloc = XEXP (XEXP (arg, 0), 1); |
24179 | continue; |
24180 | } |
24181 | else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER |
24182 | && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx) |
24183 | { |
24184 | tlocc = XEXP (XEXP (arg, 0), 1); |
24185 | continue; |
24186 | } |
24187 | } |
24188 | reg = NULL; |
24189 | if (REG_P (XEXP (XEXP (arg, 0), 0))) |
24190 | reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0), |
24191 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24192 | else if (MEM_P (XEXP (XEXP (arg, 0), 0))) |
24193 | { |
24194 | rtx mem = XEXP (XEXP (arg, 0), 0); |
24195 | reg = mem_loc_descriptor (XEXP (mem, 0), |
24196 | mode: get_address_mode (mem), |
24197 | GET_MODE (mem), |
24198 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24199 | } |
24200 | else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) |
24201 | == DEBUG_PARAMETER_REF) |
24202 | { |
24203 | tree tdecl |
24204 | = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0)); |
24205 | tdie = lookup_decl_die (decl: tdecl); |
24206 | if (tdie == NULL) |
24207 | continue; |
24208 | arg_decl = tdecl; |
24209 | } |
24210 | else |
24211 | continue; |
24212 | if (reg == NULL |
24213 | && GET_CODE (XEXP (XEXP (arg, 0), 0)) |
24214 | != DEBUG_PARAMETER_REF) |
24215 | continue; |
24216 | val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode, |
24217 | VOIDmode, |
24218 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24219 | if (val == NULL) |
24220 | continue; |
24221 | if (die == NULL) |
24222 | die = gen_call_site_die (decl, subr_die, ca_loc); |
24223 | cdie = new_die (tag_value: dwarf_TAG (tag: DW_TAG_call_site_parameter), parent_die: die, |
24224 | NULL_TREE); |
24225 | add_desc_attribute (die: cdie, decl: arg_decl); |
24226 | if (reg != NULL) |
24227 | add_AT_loc (die: cdie, attr_kind: DW_AT_location, loc: reg); |
24228 | else if (tdie != NULL) |
24229 | add_AT_die_ref (die: cdie, attr_kind: dwarf_AT (at: DW_AT_call_parameter), |
24230 | targ_die: tdie); |
24231 | add_AT_loc (die: cdie, attr_kind: dwarf_AT (at: DW_AT_call_value), loc: val); |
24232 | if (next_arg != XEXP (arg, 1)) |
24233 | { |
24234 | mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1)); |
24235 | if (mode == VOIDmode) |
24236 | mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0)); |
24237 | val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1), |
24238 | 0), 1), |
24239 | mode, VOIDmode, |
24240 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24241 | if (val != NULL) |
24242 | add_AT_loc (die: cdie, attr_kind: dwarf_AT (at: DW_AT_call_data_value), |
24243 | loc: val); |
24244 | } |
24245 | } |
24246 | if (die == NULL |
24247 | && (ca_loc->symbol_ref || tloc)) |
24248 | die = gen_call_site_die (decl, subr_die, ca_loc); |
24249 | if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX)) |
24250 | { |
24251 | dw_loc_descr_ref tval = NULL; |
24252 | |
24253 | if (tloc != NULL_RTX) |
24254 | tval = mem_loc_descriptor (rtl: tloc, |
24255 | GET_MODE (tloc) == VOIDmode |
24256 | ? Pmode : GET_MODE (tloc), |
24257 | VOIDmode, |
24258 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24259 | if (tval) |
24260 | add_AT_loc (die, attr_kind: dwarf_AT (at: DW_AT_call_target), loc: tval); |
24261 | else if (tlocc != NULL_RTX) |
24262 | { |
24263 | tval = mem_loc_descriptor (rtl: tlocc, |
24264 | GET_MODE (tlocc) == VOIDmode |
24265 | ? Pmode : GET_MODE (tlocc), |
24266 | VOIDmode, |
24267 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24268 | if (tval) |
24269 | add_AT_loc (die, |
24270 | attr_kind: dwarf_AT (at: DW_AT_call_target_clobbered), |
24271 | loc: tval); |
24272 | } |
24273 | } |
24274 | if (die != NULL) |
24275 | { |
24276 | call_site_note_count++; |
24277 | if (ca_loc->tail_call_p) |
24278 | tail_call_site_note_count++; |
24279 | } |
24280 | } |
24281 | } |
24282 | call_arg_locations = NULL; |
24283 | call_arg_loc_last = NULL; |
24284 | if (tail_call_site_count >= 0 |
24285 | && tail_call_site_count == tail_call_site_note_count |
24286 | && (!dwarf_strict || dwarf_version >= 5)) |
24287 | { |
24288 | if (call_site_count >= 0 |
24289 | && call_site_count == call_site_note_count) |
24290 | add_AT_flag (die: subr_die, attr_kind: dwarf_AT (at: DW_AT_call_all_calls), flag: 1); |
24291 | else |
24292 | add_AT_flag (die: subr_die, attr_kind: dwarf_AT (at: DW_AT_call_all_tail_calls), flag: 1); |
24293 | } |
24294 | call_site_count = -1; |
24295 | tail_call_site_count = -1; |
24296 | } |
24297 | |
24298 | /* Mark used types after we have created DIEs for the functions scopes. */ |
24299 | premark_used_types (DECL_STRUCT_FUNCTION (decl)); |
24300 | } |
24301 | |
24302 | /* Returns a hash value for X (which really is a die_struct). */ |
24303 | |
24304 | hashval_t |
24305 | block_die_hasher::hash (die_struct *d) |
24306 | { |
24307 | return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent); |
24308 | } |
24309 | |
24310 | /* Return true if decl_id and die_parent of die_struct X is the same |
24311 | as decl_id and die_parent of die_struct Y. */ |
24312 | |
24313 | bool |
24314 | block_die_hasher::equal (die_struct *x, die_struct *y) |
24315 | { |
24316 | return x->decl_id == y->decl_id && x->die_parent == y->die_parent; |
24317 | } |
24318 | |
24319 | /* Hold information about markers for inlined entry points. */ |
24320 | struct GTY ((for_user)) inline_entry_data |
24321 | { |
24322 | /* The block that's the inlined_function_outer_scope for an inlined |
24323 | function. */ |
24324 | tree block; |
24325 | |
24326 | /* The label at the inlined entry point. */ |
24327 | const char *label_pfx; |
24328 | unsigned int label_num; |
24329 | |
24330 | /* The view number to be used as the inlined entry point. */ |
24331 | var_loc_view view; |
24332 | }; |
24333 | |
24334 | struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data> |
24335 | { |
24336 | typedef tree compare_type; |
24337 | static inline hashval_t hash (const inline_entry_data *); |
24338 | static inline bool equal (const inline_entry_data *, const_tree); |
24339 | }; |
24340 | |
24341 | /* Hash table routines for inline_entry_data. */ |
24342 | |
24343 | inline hashval_t |
24344 | inline_entry_data_hasher::hash (const inline_entry_data *data) |
24345 | { |
24346 | return htab_hash_pointer (data->block); |
24347 | } |
24348 | |
24349 | inline bool |
24350 | inline_entry_data_hasher::equal (const inline_entry_data *data, |
24351 | const_tree block) |
24352 | { |
24353 | return data->block == block; |
24354 | } |
24355 | |
24356 | /* Inlined entry points pending DIE creation in this compilation unit. */ |
24357 | |
24358 | static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table; |
24359 | |
24360 | |
24361 | /* Return TRUE if DECL, which may have been previously generated as |
24362 | OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is |
24363 | true if decl (or its origin) is either an extern declaration or a |
24364 | class/namespace scoped declaration. |
24365 | |
24366 | The declare_in_namespace support causes us to get two DIEs for one |
24367 | variable, both of which are declarations. We want to avoid |
24368 | considering one to be a specification, so we must test for |
24369 | DECLARATION and DW_AT_declaration. */ |
24370 | static inline bool |
24371 | decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration) |
24372 | { |
24373 | return (old_die && TREE_STATIC (decl) && !declaration |
24374 | && get_AT_flag (die: old_die, attr_kind: DW_AT_declaration) == 1); |
24375 | } |
24376 | |
24377 | /* Return true if DECL is a local static. */ |
24378 | |
24379 | static inline bool |
24380 | local_function_static (tree decl) |
24381 | { |
24382 | gcc_assert (VAR_P (decl)); |
24383 | return TREE_STATIC (decl) |
24384 | && DECL_CONTEXT (decl) |
24385 | && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL; |
24386 | } |
24387 | |
24388 | /* Return true iff DECL overrides (presumably completes) the type of |
24389 | OLD_DIE within CONTEXT_DIE. */ |
24390 | |
24391 | static bool |
24392 | override_type_for_decl_p (tree decl, dw_die_ref old_die, |
24393 | dw_die_ref context_die) |
24394 | { |
24395 | tree type = TREE_TYPE (decl); |
24396 | int cv_quals; |
24397 | |
24398 | if (decl_by_reference_p (decl)) |
24399 | { |
24400 | type = TREE_TYPE (type); |
24401 | cv_quals = TYPE_UNQUALIFIED; |
24402 | } |
24403 | else |
24404 | cv_quals = decl_quals (decl); |
24405 | |
24406 | dw_die_ref type_die = modified_type_die (type, |
24407 | cv_quals: cv_quals | TYPE_QUALS (type), |
24408 | reverse: false, |
24409 | context_die); |
24410 | |
24411 | dw_die_ref old_type_die = get_AT_ref (die: old_die, attr_kind: DW_AT_type); |
24412 | |
24413 | return type_die != old_type_die; |
24414 | } |
24415 | |
24416 | /* Generate a DIE to represent a declared data object. |
24417 | Either DECL or ORIGIN must be non-null. */ |
24418 | |
24419 | static void |
24420 | gen_variable_die (tree decl, tree origin, dw_die_ref context_die) |
24421 | { |
24422 | HOST_WIDE_INT off = 0; |
24423 | tree com_decl; |
24424 | tree decl_or_origin = decl ? decl : origin; |
24425 | tree ultimate_origin; |
24426 | dw_die_ref var_die; |
24427 | dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL; |
24428 | bool declaration = (DECL_EXTERNAL (decl_or_origin) |
24429 | || class_or_namespace_scope_p (context_die)); |
24430 | bool specialization_p = false; |
24431 | bool no_linkage_name = false; |
24432 | |
24433 | /* While C++ inline static data members have definitions inside of the |
24434 | class, force the first DIE to be a declaration, then let gen_member_die |
24435 | reparent it to the class context and call gen_variable_die again |
24436 | to create the outside of the class DIE for the definition. */ |
24437 | if (!declaration |
24438 | && old_die == NULL |
24439 | && decl |
24440 | && DECL_CONTEXT (decl) |
24441 | && TYPE_P (DECL_CONTEXT (decl)) |
24442 | && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1) |
24443 | { |
24444 | declaration = true; |
24445 | if (dwarf_version < 5) |
24446 | no_linkage_name = true; |
24447 | } |
24448 | |
24449 | ultimate_origin = decl_ultimate_origin (decl: decl_or_origin); |
24450 | if (decl || ultimate_origin) |
24451 | origin = ultimate_origin; |
24452 | com_decl = fortran_common (decl: decl_or_origin, value: &off); |
24453 | |
24454 | /* Symbol in common gets emitted as a child of the common block, in the form |
24455 | of a data member. */ |
24456 | if (com_decl) |
24457 | { |
24458 | dw_die_ref com_die; |
24459 | dw_loc_list_ref loc = NULL; |
24460 | die_node com_die_arg; |
24461 | |
24462 | var_die = lookup_decl_die (decl: decl_or_origin); |
24463 | if (var_die) |
24464 | { |
24465 | if (! early_dwarf && get_AT (die: var_die, attr_kind: DW_AT_location) == NULL) |
24466 | { |
24467 | loc = loc_list_from_tree (loc: com_decl, want_address: off ? 1 : 2, NULL); |
24468 | if (loc) |
24469 | { |
24470 | if (off) |
24471 | { |
24472 | /* Optimize the common case. */ |
24473 | if (single_element_loc_list_p (list: loc) |
24474 | && loc->expr->dw_loc_opc == DW_OP_addr |
24475 | && loc->expr->dw_loc_next == NULL |
24476 | && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) |
24477 | == SYMBOL_REF) |
24478 | { |
24479 | rtx x = loc->expr->dw_loc_oprnd1.v.val_addr; |
24480 | loc->expr->dw_loc_oprnd1.v.val_addr |
24481 | = plus_constant (GET_MODE (x), x , off); |
24482 | } |
24483 | else |
24484 | loc_list_plus_const (list_head: loc, offset: off); |
24485 | } |
24486 | add_AT_location_description (die: var_die, attr_kind: DW_AT_location, descr: loc); |
24487 | remove_AT (die: var_die, attr_kind: DW_AT_declaration); |
24488 | } |
24489 | } |
24490 | return; |
24491 | } |
24492 | |
24493 | if (common_block_die_table == NULL) |
24494 | common_block_die_table = hash_table<block_die_hasher>::create_ggc (n: 10); |
24495 | |
24496 | com_die_arg.decl_id = DECL_UID (com_decl); |
24497 | com_die_arg.die_parent = context_die; |
24498 | com_die = common_block_die_table->find (value: &com_die_arg); |
24499 | if (! early_dwarf) |
24500 | loc = loc_list_from_tree (loc: com_decl, want_address: 2, NULL); |
24501 | if (com_die == NULL) |
24502 | { |
24503 | const char *cnam |
24504 | = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl)); |
24505 | die_node **slot; |
24506 | |
24507 | com_die = new_die (tag_value: DW_TAG_common_block, parent_die: context_die, t: decl); |
24508 | add_name_and_src_coords_attributes (die: com_die, decl: com_decl); |
24509 | if (loc) |
24510 | { |
24511 | add_AT_location_description (die: com_die, attr_kind: DW_AT_location, descr: loc); |
24512 | /* Avoid sharing the same loc descriptor between |
24513 | DW_TAG_common_block and DW_TAG_variable. */ |
24514 | loc = loc_list_from_tree (loc: com_decl, want_address: 2, NULL); |
24515 | } |
24516 | else if (DECL_EXTERNAL (decl_or_origin)) |
24517 | add_AT_flag (die: com_die, attr_kind: DW_AT_declaration, flag: 1); |
24518 | if (want_pubnames ()) |
24519 | add_pubname_string (str: cnam, die: com_die); /* ??? needed? */ |
24520 | com_die->decl_id = DECL_UID (com_decl); |
24521 | slot = common_block_die_table->find_slot (value: com_die, insert: INSERT); |
24522 | *slot = com_die; |
24523 | } |
24524 | else if (get_AT (die: com_die, attr_kind: DW_AT_location) == NULL && loc) |
24525 | { |
24526 | add_AT_location_description (die: com_die, attr_kind: DW_AT_location, descr: loc); |
24527 | loc = loc_list_from_tree (loc: com_decl, want_address: 2, NULL); |
24528 | remove_AT (die: com_die, attr_kind: DW_AT_declaration); |
24529 | } |
24530 | var_die = new_die (tag_value: DW_TAG_variable, parent_die: com_die, t: decl); |
24531 | add_name_and_src_coords_attributes (die: var_die, decl: decl_or_origin); |
24532 | add_type_attribute (object_die: var_die, TREE_TYPE (decl_or_origin), |
24533 | cv_quals: decl_quals (decl: decl_or_origin), reverse: false, |
24534 | context_die); |
24535 | add_alignment_attribute (die: var_die, tree_node: decl); |
24536 | add_AT_flag (die: var_die, attr_kind: DW_AT_external, flag: 1); |
24537 | if (loc) |
24538 | { |
24539 | if (off) |
24540 | { |
24541 | /* Optimize the common case. */ |
24542 | if (single_element_loc_list_p (list: loc) |
24543 | && loc->expr->dw_loc_opc == DW_OP_addr |
24544 | && loc->expr->dw_loc_next == NULL |
24545 | && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF) |
24546 | { |
24547 | rtx x = loc->expr->dw_loc_oprnd1.v.val_addr; |
24548 | loc->expr->dw_loc_oprnd1.v.val_addr |
24549 | = plus_constant (GET_MODE (x), x, off); |
24550 | } |
24551 | else |
24552 | loc_list_plus_const (list_head: loc, offset: off); |
24553 | } |
24554 | add_AT_location_description (die: var_die, attr_kind: DW_AT_location, descr: loc); |
24555 | } |
24556 | else if (DECL_EXTERNAL (decl_or_origin)) |
24557 | add_AT_flag (die: var_die, attr_kind: DW_AT_declaration, flag: 1); |
24558 | if (decl) |
24559 | equate_decl_number_to_die (decl, decl_die: var_die); |
24560 | return; |
24561 | } |
24562 | |
24563 | if (old_die) |
24564 | { |
24565 | if (declaration) |
24566 | { |
24567 | /* A declaration that has been previously dumped, needs no |
24568 | further annotations, since it doesn't need location on |
24569 | the second pass. */ |
24570 | return; |
24571 | } |
24572 | else if (decl_will_get_specification_p (old_die, decl, declaration) |
24573 | && !get_AT (die: old_die, attr_kind: DW_AT_specification)) |
24574 | { |
24575 | /* Fall-thru so we can make a new variable die along with a |
24576 | DW_AT_specification. */ |
24577 | } |
24578 | else if (origin && old_die->die_parent != context_die) |
24579 | { |
24580 | /* If we will be creating an inlined instance, we need a |
24581 | new DIE that will get annotated with |
24582 | DW_AT_abstract_origin. */ |
24583 | gcc_assert (!DECL_ABSTRACT_P (decl)); |
24584 | } |
24585 | else |
24586 | { |
24587 | /* If a DIE was dumped early, it still needs location info. |
24588 | Skip to where we fill the location bits. */ |
24589 | var_die = old_die; |
24590 | |
24591 | /* ??? In LTRANS we cannot annotate early created variably |
24592 | modified type DIEs without copying them and adjusting all |
24593 | references to them. Thus we dumped them again. Also add a |
24594 | reference to them but beware of -g0 compile and -g link |
24595 | in which case the reference will be already present. */ |
24596 | tree type = TREE_TYPE (decl_or_origin); |
24597 | if (in_lto_p |
24598 | && ! get_AT (die: var_die, attr_kind: DW_AT_type) |
24599 | && variably_modified_type_p |
24600 | (type, decl_function_context (decl_or_origin))) |
24601 | { |
24602 | if (decl_by_reference_p (decl: decl_or_origin)) |
24603 | add_type_attribute (object_die: var_die, TREE_TYPE (type), |
24604 | cv_quals: TYPE_UNQUALIFIED, reverse: false, context_die); |
24605 | else |
24606 | add_type_attribute (object_die: var_die, type, cv_quals: decl_quals (decl: decl_or_origin), |
24607 | reverse: false, context_die); |
24608 | } |
24609 | |
24610 | goto gen_variable_die_location; |
24611 | } |
24612 | } |
24613 | |
24614 | /* For static data members, the declaration in the class is supposed |
24615 | to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility |
24616 | also in DWARF2; the specification should still be DW_TAG_variable |
24617 | referencing the DW_TAG_member DIE. */ |
24618 | if (declaration && class_scope_p (context_die) && dwarf_version < 5) |
24619 | var_die = new_die (tag_value: DW_TAG_member, parent_die: context_die, t: decl); |
24620 | else |
24621 | var_die = new_die (tag_value: DW_TAG_variable, parent_die: context_die, t: decl); |
24622 | |
24623 | if (origin != NULL) |
24624 | add_abstract_origin_attribute (die: var_die, origin); |
24625 | |
24626 | /* Loop unrolling can create multiple blocks that refer to the same |
24627 | static variable, so we must test for the DW_AT_declaration flag. |
24628 | |
24629 | ??? Loop unrolling/reorder_blocks should perhaps be rewritten to |
24630 | copy decls and set the DECL_ABSTRACT_P flag on them instead of |
24631 | sharing them. |
24632 | |
24633 | ??? Duplicated blocks have been rewritten to use .debug_ranges. */ |
24634 | else if (decl_will_get_specification_p (old_die, decl, declaration)) |
24635 | { |
24636 | /* This is a definition of a C++ class level static. */ |
24637 | add_AT_specification (die: var_die, targ_die: old_die); |
24638 | specialization_p = true; |
24639 | if (DECL_NAME (decl)) |
24640 | { |
24641 | expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl)); |
24642 | struct dwarf_file_data * file_index = lookup_filename (s.file); |
24643 | |
24644 | if (get_AT_file (die: old_die, attr_kind: DW_AT_decl_file) != file_index) |
24645 | add_AT_file (die: var_die, attr_kind: DW_AT_decl_file, fd: file_index); |
24646 | |
24647 | if (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_line) != (unsigned) s.line) |
24648 | add_AT_unsigned (die: var_die, attr_kind: DW_AT_decl_line, unsigned_val: s.line); |
24649 | |
24650 | if (debug_column_info |
24651 | && s.column |
24652 | && (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_column) |
24653 | != (unsigned) s.column)) |
24654 | add_AT_unsigned (die: var_die, attr_kind: DW_AT_decl_column, unsigned_val: s.column); |
24655 | |
24656 | if (old_die->die_tag == DW_TAG_member) |
24657 | add_linkage_name (die: var_die, decl); |
24658 | } |
24659 | } |
24660 | else |
24661 | add_name_and_src_coords_attributes (die: var_die, decl, no_linkage_name); |
24662 | |
24663 | if ((origin == NULL && !specialization_p) |
24664 | || (origin != NULL |
24665 | && !DECL_ABSTRACT_P (decl_or_origin) |
24666 | && variably_modified_type_p (TREE_TYPE (decl_or_origin), |
24667 | decl_function_context |
24668 | (decl_or_origin))) |
24669 | || (old_die && specialization_p |
24670 | && override_type_for_decl_p (decl: decl_or_origin, old_die, context_die))) |
24671 | { |
24672 | tree type = TREE_TYPE (decl_or_origin); |
24673 | |
24674 | if (decl_by_reference_p (decl: decl_or_origin)) |
24675 | add_type_attribute (object_die: var_die, TREE_TYPE (type), cv_quals: TYPE_UNQUALIFIED, reverse: false, |
24676 | context_die); |
24677 | else |
24678 | add_type_attribute (object_die: var_die, type, cv_quals: decl_quals (decl: decl_or_origin), reverse: false, |
24679 | context_die); |
24680 | } |
24681 | |
24682 | if (origin == NULL && !specialization_p) |
24683 | { |
24684 | if (TREE_PUBLIC (decl)) |
24685 | add_AT_flag (die: var_die, attr_kind: DW_AT_external, flag: 1); |
24686 | |
24687 | if (DECL_ARTIFICIAL (decl)) |
24688 | add_AT_flag (die: var_die, attr_kind: DW_AT_artificial, flag: 1); |
24689 | |
24690 | add_alignment_attribute (die: var_die, tree_node: decl); |
24691 | |
24692 | add_accessibility_attribute (die: var_die, decl); |
24693 | } |
24694 | |
24695 | if (declaration) |
24696 | add_AT_flag (die: var_die, attr_kind: DW_AT_declaration, flag: 1); |
24697 | |
24698 | if (decl && (DECL_ABSTRACT_P (decl) |
24699 | || !old_die || is_declaration_die (die: old_die))) |
24700 | equate_decl_number_to_die (decl, decl_die: var_die); |
24701 | |
24702 | gen_variable_die_location: |
24703 | if (! declaration |
24704 | && (! DECL_ABSTRACT_P (decl_or_origin) |
24705 | /* Local static vars are shared between all clones/inlines, |
24706 | so emit DW_AT_location on the abstract DIE if DECL_RTL is |
24707 | already set. */ |
24708 | || (VAR_P (decl_or_origin) |
24709 | && TREE_STATIC (decl_or_origin) |
24710 | && DECL_RTL_SET_P (decl_or_origin)))) |
24711 | { |
24712 | if (early_dwarf) |
24713 | { |
24714 | add_pubname (decl: decl_or_origin, die: var_die); |
24715 | /* For global register variables, emit DW_AT_location if possible |
24716 | already during early_dwarf, as late_global_decl won't be usually |
24717 | called. */ |
24718 | if (DECL_HARD_REGISTER (decl_or_origin) |
24719 | && TREE_STATIC (decl_or_origin) |
24720 | && !decl_by_reference_p (decl: decl_or_origin) |
24721 | && !get_AT (die: var_die, attr_kind: DW_AT_location) |
24722 | && !get_AT (die: var_die, attr_kind: DW_AT_const_value) |
24723 | && DECL_RTL_SET_P (decl_or_origin) |
24724 | && REG_P (DECL_RTL (decl_or_origin))) |
24725 | { |
24726 | dw_loc_descr_ref descr |
24727 | = reg_loc_descriptor (DECL_RTL (decl_or_origin), |
24728 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24729 | if (descr) |
24730 | add_AT_loc (die: var_die, attr_kind: DW_AT_location, loc: descr); |
24731 | } |
24732 | } |
24733 | else |
24734 | add_location_or_const_value_attribute (die: var_die, decl: decl_or_origin, |
24735 | cache_p: decl == NULL); |
24736 | } |
24737 | else |
24738 | tree_add_const_value_attribute_for_decl (var_die, decl: decl_or_origin); |
24739 | |
24740 | if ((dwarf_version >= 4 || !dwarf_strict) |
24741 | && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin, |
24742 | DW_AT_const_expr) == 1 |
24743 | && !get_AT (die: var_die, attr_kind: DW_AT_const_expr) |
24744 | && !specialization_p) |
24745 | add_AT_flag (die: var_die, attr_kind: DW_AT_const_expr, flag: 1); |
24746 | |
24747 | if (!dwarf_strict) |
24748 | { |
24749 | int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin, |
24750 | DW_AT_inline); |
24751 | if (inl != -1 |
24752 | && !get_AT (die: var_die, attr_kind: DW_AT_inline) |
24753 | && !specialization_p) |
24754 | add_AT_unsigned (die: var_die, attr_kind: DW_AT_inline, unsigned_val: inl); |
24755 | } |
24756 | } |
24757 | |
24758 | /* Generate a DIE to represent a named constant. */ |
24759 | |
24760 | static void |
24761 | gen_const_die (tree decl, dw_die_ref context_die) |
24762 | { |
24763 | dw_die_ref const_die; |
24764 | tree type = TREE_TYPE (decl); |
24765 | |
24766 | const_die = lookup_decl_die (decl); |
24767 | if (const_die) |
24768 | return; |
24769 | |
24770 | const_die = new_die (tag_value: DW_TAG_constant, parent_die: context_die, t: decl); |
24771 | equate_decl_number_to_die (decl, decl_die: const_die); |
24772 | add_name_and_src_coords_attributes (die: const_die, decl); |
24773 | add_type_attribute (object_die: const_die, type, cv_quals: TYPE_QUAL_CONST, reverse: false, context_die); |
24774 | if (TREE_PUBLIC (decl)) |
24775 | add_AT_flag (die: const_die, attr_kind: DW_AT_external, flag: 1); |
24776 | if (DECL_ARTIFICIAL (decl)) |
24777 | add_AT_flag (die: const_die, attr_kind: DW_AT_artificial, flag: 1); |
24778 | tree_add_const_value_attribute_for_decl (var_die: const_die, decl); |
24779 | } |
24780 | |
24781 | /* Generate a DIE to represent a label identifier. */ |
24782 | |
24783 | static void |
24784 | gen_label_die (tree decl, dw_die_ref context_die) |
24785 | { |
24786 | tree origin = decl_ultimate_origin (decl); |
24787 | dw_die_ref lbl_die = lookup_decl_die (decl); |
24788 | rtx insn; |
24789 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
24790 | |
24791 | if (!lbl_die) |
24792 | { |
24793 | lbl_die = new_die (tag_value: DW_TAG_label, parent_die: context_die, t: decl); |
24794 | equate_decl_number_to_die (decl, decl_die: lbl_die); |
24795 | |
24796 | if (origin != NULL) |
24797 | add_abstract_origin_attribute (die: lbl_die, origin); |
24798 | else |
24799 | add_name_and_src_coords_attributes (die: lbl_die, decl); |
24800 | } |
24801 | |
24802 | if (DECL_ABSTRACT_P (decl)) |
24803 | equate_decl_number_to_die (decl, decl_die: lbl_die); |
24804 | else if (! early_dwarf) |
24805 | { |
24806 | insn = DECL_RTL_IF_SET (decl); |
24807 | |
24808 | /* Deleted labels are programmer specified labels which have been |
24809 | eliminated because of various optimizations. We still emit them |
24810 | here so that it is possible to put breakpoints on them. */ |
24811 | if (insn |
24812 | && (LABEL_P (insn) |
24813 | || ((NOTE_P (insn) |
24814 | && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))) |
24815 | { |
24816 | /* When optimization is enabled (via -O) some parts of the compiler |
24817 | (e.g. jump.cc and cse.cc) may try to delete CODE_LABEL insns which |
24818 | represent source-level labels which were explicitly declared by |
24819 | the user. This really shouldn't be happening though, so catch |
24820 | it if it ever does happen. */ |
24821 | gcc_assert (!as_a<rtx_insn *> (insn)->deleted ()); |
24822 | |
24823 | ASM_GENERATE_INTERNAL_LABEL (label, "L" , CODE_LABEL_NUMBER (insn)); |
24824 | add_AT_lbl_id (die: lbl_die, attr_kind: DW_AT_low_pc, lbl_id: label); |
24825 | } |
24826 | else if (insn |
24827 | && NOTE_P (insn) |
24828 | && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL |
24829 | && CODE_LABEL_NUMBER (insn) != -1) |
24830 | { |
24831 | ASM_GENERATE_INTERNAL_LABEL (label, "LDL" , CODE_LABEL_NUMBER (insn)); |
24832 | add_AT_lbl_id (die: lbl_die, attr_kind: DW_AT_low_pc, lbl_id: label); |
24833 | } |
24834 | } |
24835 | } |
24836 | |
24837 | /* A helper function for gen_inlined_subroutine_die. Add source coordinate |
24838 | attributes to the DIE for a block STMT, to describe where the inlined |
24839 | function was called from. This is similar to add_src_coords_attributes. */ |
24840 | |
24841 | static inline void |
24842 | add_call_src_coords_attributes (tree stmt, dw_die_ref die) |
24843 | { |
24844 | /* We can end up with BUILTINS_LOCATION here. */ |
24845 | if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt))) |
24846 | return; |
24847 | |
24848 | location_t locus = BLOCK_SOURCE_LOCATION (stmt); |
24849 | expanded_location s = expand_location (locus); |
24850 | |
24851 | if (dwarf_version >= 3 || !dwarf_strict) |
24852 | { |
24853 | add_AT_file (die, attr_kind: DW_AT_call_file, fd: lookup_filename (s.file)); |
24854 | add_AT_unsigned (die, attr_kind: DW_AT_call_line, unsigned_val: s.line); |
24855 | if (debug_column_info && s.column) |
24856 | add_AT_unsigned (die, attr_kind: DW_AT_call_column, unsigned_val: s.column); |
24857 | unsigned discr = get_discriminator_from_loc (locus); |
24858 | if (discr != 0) |
24859 | add_AT_unsigned (die, attr_kind: DW_AT_GNU_discriminator, unsigned_val: discr); |
24860 | } |
24861 | } |
24862 | |
24863 | |
24864 | /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die. |
24865 | Add low_pc and high_pc attributes to the DIE for a block STMT. */ |
24866 | |
24867 | static inline void |
24868 | add_high_low_attributes (tree stmt, dw_die_ref die) |
24869 | { |
24870 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
24871 | |
24872 | if (inline_entry_data **iedp |
24873 | = !inline_entry_data_table ? NULL |
24874 | : inline_entry_data_table->find_slot_with_hash (comparable: stmt, |
24875 | hash: htab_hash_pointer (stmt), |
24876 | insert: NO_INSERT)) |
24877 | { |
24878 | inline_entry_data *ied = *iedp; |
24879 | gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS); |
24880 | gcc_assert (debug_inline_points); |
24881 | gcc_assert (inlined_function_outer_scope_p (stmt)); |
24882 | |
24883 | ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num); |
24884 | add_AT_lbl_id (die, attr_kind: DW_AT_entry_pc, lbl_id: label); |
24885 | |
24886 | if (debug_variable_location_views && !ZERO_VIEW_P (ied->view) |
24887 | && !dwarf_strict) |
24888 | { |
24889 | if (!output_asm_line_debug_info ()) |
24890 | add_AT_unsigned (die, attr_kind: DW_AT_GNU_entry_view, unsigned_val: ied->view); |
24891 | else |
24892 | { |
24893 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , ied->view); |
24894 | /* FIXME: this will resolve to a small number. Could we |
24895 | possibly emit smaller data? Ideally we'd emit a |
24896 | uleb128, but that would make the size of DIEs |
24897 | impossible for the compiler to compute, since it's |
24898 | the assembler that computes the value of the view |
24899 | label in this case. Ideally, we'd have a single form |
24900 | encompassing both the address and the view, and |
24901 | indirecting them through a table might make things |
24902 | easier, but even that would be more wasteful, |
24903 | space-wise, than what we have now. */ |
24904 | add_AT_symview (die, attr_kind: DW_AT_GNU_entry_view, view_label: label); |
24905 | } |
24906 | } |
24907 | |
24908 | inline_entry_data_table->clear_slot (slot: iedp); |
24909 | } |
24910 | |
24911 | if (BLOCK_FRAGMENT_CHAIN (stmt) |
24912 | && (dwarf_version >= 3 || !dwarf_strict)) |
24913 | { |
24914 | tree chain, superblock = NULL_TREE; |
24915 | dw_die_ref pdie; |
24916 | dw_attr_node *attr = NULL; |
24917 | |
24918 | if (!debug_inline_points && inlined_function_outer_scope_p (block: stmt)) |
24919 | { |
24920 | ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL, |
24921 | BLOCK_NUMBER (stmt)); |
24922 | add_AT_lbl_id (die, attr_kind: DW_AT_entry_pc, lbl_id: label); |
24923 | } |
24924 | |
24925 | /* Optimize duplicate .debug_ranges lists or even tails of |
24926 | lists. If this BLOCK has same ranges as its supercontext, |
24927 | lookup DW_AT_ranges attribute in the supercontext (and |
24928 | recursively so), verify that the ranges_table contains the |
24929 | right values and use it instead of adding a new .debug_range. */ |
24930 | for (chain = stmt, pdie = die; |
24931 | BLOCK_SAME_RANGE (chain); |
24932 | chain = BLOCK_SUPERCONTEXT (chain)) |
24933 | { |
24934 | dw_attr_node *new_attr; |
24935 | |
24936 | pdie = pdie->die_parent; |
24937 | if (pdie == NULL) |
24938 | break; |
24939 | if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE) |
24940 | break; |
24941 | new_attr = get_AT (die: pdie, attr_kind: DW_AT_ranges); |
24942 | if (new_attr == NULL |
24943 | || new_attr->dw_attr_val.val_class != dw_val_class_range_list) |
24944 | break; |
24945 | attr = new_attr; |
24946 | superblock = BLOCK_SUPERCONTEXT (chain); |
24947 | } |
24948 | if (attr != NULL |
24949 | && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num |
24950 | == (int)BLOCK_NUMBER (superblock)) |
24951 | && BLOCK_FRAGMENT_CHAIN (superblock)) |
24952 | { |
24953 | unsigned long off = attr->dw_attr_val.v.val_offset; |
24954 | unsigned long supercnt = 0, thiscnt = 0; |
24955 | for (chain = BLOCK_FRAGMENT_CHAIN (superblock); |
24956 | chain; chain = BLOCK_FRAGMENT_CHAIN (chain)) |
24957 | { |
24958 | ++supercnt; |
24959 | gcc_checking_assert ((*ranges_table)[off + supercnt].num |
24960 | == (int)BLOCK_NUMBER (chain)); |
24961 | } |
24962 | gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0); |
24963 | for (chain = BLOCK_FRAGMENT_CHAIN (stmt); |
24964 | chain; chain = BLOCK_FRAGMENT_CHAIN (chain)) |
24965 | ++thiscnt; |
24966 | gcc_assert (supercnt >= thiscnt); |
24967 | add_AT_range_list (die, attr_kind: DW_AT_ranges, offset: off + supercnt - thiscnt, |
24968 | force_direct: false); |
24969 | note_rnglist_head (offset: off + supercnt - thiscnt); |
24970 | return; |
24971 | } |
24972 | |
24973 | unsigned int offset = add_ranges (block: stmt, maybe_new_sec: true); |
24974 | add_AT_range_list (die, attr_kind: DW_AT_ranges, offset, force_direct: false); |
24975 | note_rnglist_head (offset); |
24976 | |
24977 | bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt); |
24978 | chain = BLOCK_FRAGMENT_CHAIN (stmt); |
24979 | do |
24980 | { |
24981 | add_ranges (block: chain, maybe_new_sec: prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain)); |
24982 | prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain); |
24983 | chain = BLOCK_FRAGMENT_CHAIN (chain); |
24984 | } |
24985 | while (chain); |
24986 | add_ranges (NULL); |
24987 | } |
24988 | else |
24989 | { |
24990 | char label_high[MAX_ARTIFICIAL_LABEL_BYTES]; |
24991 | ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL, |
24992 | BLOCK_NUMBER (stmt)); |
24993 | ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL, |
24994 | BLOCK_NUMBER (stmt)); |
24995 | add_AT_low_high_pc (die, lbl_low: label, lbl_high: label_high, force_direct: false); |
24996 | } |
24997 | } |
24998 | |
24999 | /* Generate a DIE for a lexical block. */ |
25000 | |
25001 | static void |
25002 | gen_lexical_block_die (tree stmt, dw_die_ref context_die) |
25003 | { |
25004 | dw_die_ref old_die = lookup_block_die (block: stmt); |
25005 | dw_die_ref stmt_die = NULL; |
25006 | if (!old_die) |
25007 | { |
25008 | stmt_die = new_die (tag_value: DW_TAG_lexical_block, parent_die: context_die, t: stmt); |
25009 | equate_block_to_die (block: stmt, die: stmt_die); |
25010 | } |
25011 | |
25012 | if (BLOCK_ABSTRACT_ORIGIN (stmt)) |
25013 | { |
25014 | /* If this is an inlined or conrecte instance, create a new lexical |
25015 | die for anything below to attach DW_AT_abstract_origin to. */ |
25016 | if (old_die) |
25017 | stmt_die = new_die (tag_value: DW_TAG_lexical_block, parent_die: context_die, t: stmt); |
25018 | |
25019 | tree origin = block_ultimate_origin (stmt); |
25020 | if (origin != NULL_TREE && (origin != stmt || old_die)) |
25021 | add_abstract_origin_attribute (die: stmt_die, origin); |
25022 | |
25023 | old_die = NULL; |
25024 | } |
25025 | |
25026 | if (old_die) |
25027 | stmt_die = old_die; |
25028 | |
25029 | /* A non abstract block whose blocks have already been reordered |
25030 | should have the instruction range for this block. If so, set the |
25031 | high/low attributes. */ |
25032 | if (!early_dwarf && TREE_ASM_WRITTEN (stmt)) |
25033 | { |
25034 | gcc_assert (stmt_die); |
25035 | add_high_low_attributes (stmt, die: stmt_die); |
25036 | } |
25037 | |
25038 | decls_for_scope (stmt, stmt_die); |
25039 | } |
25040 | |
25041 | /* Generate a DIE for an inlined subprogram. */ |
25042 | |
25043 | static void |
25044 | gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die) |
25045 | { |
25046 | tree decl = block_ultimate_origin (stmt); |
25047 | |
25048 | /* Make sure any inlined functions are known to be inlineable. */ |
25049 | gcc_checking_assert (DECL_ABSTRACT_P (decl) |
25050 | || cgraph_function_possibly_inlined_p (decl)); |
25051 | |
25052 | dw_die_ref subr_die = new_die (tag_value: DW_TAG_inlined_subroutine, parent_die: context_die, t: stmt); |
25053 | |
25054 | if (call_arg_locations || debug_inline_points) |
25055 | equate_block_to_die (block: stmt, die: subr_die); |
25056 | add_abstract_origin_attribute (die: subr_die, origin: decl); |
25057 | if (TREE_ASM_WRITTEN (stmt)) |
25058 | add_high_low_attributes (stmt, die: subr_die); |
25059 | add_call_src_coords_attributes (stmt, die: subr_die); |
25060 | |
25061 | /* The inliner creates an extra BLOCK for the parameter setup, |
25062 | we want to merge that with the actual outermost BLOCK of the |
25063 | inlined function to avoid duplicate locals in consumers. |
25064 | Do that by doing the recursion to subblocks on the single subblock |
25065 | of STMT. */ |
25066 | bool unwrap_one = false; |
25067 | if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt))) |
25068 | { |
25069 | tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt)); |
25070 | if (origin |
25071 | && TREE_CODE (origin) == BLOCK |
25072 | && BLOCK_SUPERCONTEXT (origin) == decl) |
25073 | unwrap_one = true; |
25074 | } |
25075 | decls_for_scope (stmt, subr_die, !unwrap_one); |
25076 | if (unwrap_one) |
25077 | decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die); |
25078 | } |
25079 | |
25080 | /* Generate a DIE for a field in a record, or structure. CTX is required: see |
25081 | the comment for VLR_CONTEXT. */ |
25082 | |
25083 | static void |
25084 | gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die) |
25085 | { |
25086 | dw_die_ref decl_die; |
25087 | |
25088 | if (TREE_TYPE (decl) == error_mark_node) |
25089 | return; |
25090 | |
25091 | decl_die = new_die (tag_value: DW_TAG_member, parent_die: context_die, t: decl); |
25092 | add_name_and_src_coords_attributes (die: decl_die, decl); |
25093 | add_type_attribute (object_die: decl_die, type: member_declared_type (member: decl), cv_quals: decl_quals (decl), |
25094 | TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)), |
25095 | context_die); |
25096 | |
25097 | if (DECL_BIT_FIELD_TYPE (decl)) |
25098 | { |
25099 | add_byte_size_attribute (die: decl_die, tree_node: decl); |
25100 | add_bit_size_attribute (die: decl_die, decl); |
25101 | add_bit_offset_attribute (die: decl_die, decl); |
25102 | } |
25103 | |
25104 | add_alignment_attribute (die: decl_die, tree_node: decl); |
25105 | |
25106 | if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE) |
25107 | add_data_member_location_attribute (die: decl_die, decl, ctx); |
25108 | |
25109 | if (DECL_ARTIFICIAL (decl)) |
25110 | add_AT_flag (die: decl_die, attr_kind: DW_AT_artificial, flag: 1); |
25111 | |
25112 | add_accessibility_attribute (die: decl_die, decl); |
25113 | |
25114 | /* Equate decl number to die, so that we can look up this decl later on. */ |
25115 | equate_decl_number_to_die (decl, decl_die); |
25116 | } |
25117 | |
25118 | /* Generate a DIE for a pointer to a member type. TYPE can be an |
25119 | OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a |
25120 | pointer to member function. */ |
25121 | |
25122 | static void |
25123 | gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die) |
25124 | { |
25125 | if (lookup_type_die (type)) |
25126 | return; |
25127 | |
25128 | dw_die_ref ptr_die = new_die (tag_value: DW_TAG_ptr_to_member_type, |
25129 | parent_die: scope_die_for (t: type, context_die), t: type); |
25130 | |
25131 | equate_type_number_to_die (type, type_die: ptr_die); |
25132 | add_AT_die_ref (die: ptr_die, attr_kind: DW_AT_containing_type, |
25133 | targ_die: lookup_type_die (TYPE_OFFSET_BASETYPE (type))); |
25134 | add_type_attribute (object_die: ptr_die, TREE_TYPE (type), cv_quals: TYPE_UNQUALIFIED, reverse: false, |
25135 | context_die); |
25136 | add_alignment_attribute (die: ptr_die, tree_node: type); |
25137 | |
25138 | if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE |
25139 | && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE) |
25140 | { |
25141 | dw_loc_descr_ref op = new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0); |
25142 | add_AT_loc (die: ptr_die, attr_kind: DW_AT_use_location, loc: op); |
25143 | } |
25144 | } |
25145 | |
25146 | static char *producer_string; |
25147 | |
25148 | /* Given a C and/or C++ language/version string return the "highest". |
25149 | C++ is assumed to be "higher" than C in this case. Used for merging |
25150 | LTO translation unit languages. */ |
25151 | static const char * |
25152 | highest_c_language (const char *lang1, const char *lang2) |
25153 | { |
25154 | if (strcmp (s1: "GNU C++26" , s2: lang1) == 0 || strcmp (s1: "GNU C++26" , s2: lang2) == 0) |
25155 | return "GNU C++26" ; |
25156 | if (strcmp (s1: "GNU C++23" , s2: lang1) == 0 || strcmp (s1: "GNU C++23" , s2: lang2) == 0) |
25157 | return "GNU C++23" ; |
25158 | if (strcmp (s1: "GNU C++20" , s2: lang1) == 0 || strcmp (s1: "GNU C++20" , s2: lang2) == 0) |
25159 | return "GNU C++20" ; |
25160 | if (strcmp (s1: "GNU C++17" , s2: lang1) == 0 || strcmp (s1: "GNU C++17" , s2: lang2) == 0) |
25161 | return "GNU C++17" ; |
25162 | if (strcmp (s1: "GNU C++14" , s2: lang1) == 0 || strcmp (s1: "GNU C++14" , s2: lang2) == 0) |
25163 | return "GNU C++14" ; |
25164 | if (strcmp (s1: "GNU C++11" , s2: lang1) == 0 || strcmp (s1: "GNU C++11" , s2: lang2) == 0) |
25165 | return "GNU C++11" ; |
25166 | if (strcmp (s1: "GNU C++98" , s2: lang1) == 0 || strcmp (s1: "GNU C++98" , s2: lang2) == 0) |
25167 | return "GNU C++98" ; |
25168 | |
25169 | if (strcmp (s1: "GNU C23" , s2: lang1) == 0 || strcmp (s1: "GNU C23" , s2: lang2) == 0) |
25170 | return "GNU C23" ; |
25171 | if (strcmp (s1: "GNU C17" , s2: lang1) == 0 || strcmp (s1: "GNU C17" , s2: lang2) == 0) |
25172 | return "GNU C17" ; |
25173 | if (strcmp (s1: "GNU C11" , s2: lang1) == 0 || strcmp (s1: "GNU C11" , s2: lang2) == 0) |
25174 | return "GNU C11" ; |
25175 | if (strcmp (s1: "GNU C99" , s2: lang1) == 0 || strcmp (s1: "GNU C99" , s2: lang2) == 0) |
25176 | return "GNU C99" ; |
25177 | if (strcmp (s1: "GNU C89" , s2: lang1) == 0 || strcmp (s1: "GNU C89" , s2: lang2) == 0) |
25178 | return "GNU C89" ; |
25179 | |
25180 | gcc_unreachable (); |
25181 | } |
25182 | |
25183 | |
25184 | /* Generate the DIE for the compilation unit. */ |
25185 | |
25186 | static dw_die_ref |
25187 | gen_compile_unit_die (const char *filename) |
25188 | { |
25189 | dw_die_ref die; |
25190 | const char *language_string = lang_hooks.name; |
25191 | int language; |
25192 | |
25193 | die = new_die (tag_value: DW_TAG_compile_unit, NULL, NULL); |
25194 | |
25195 | if (filename) |
25196 | { |
25197 | add_filename_attribute (die, name_string: filename); |
25198 | /* Don't add cwd for <built-in>. */ |
25199 | if (filename[0] != '<') |
25200 | add_comp_dir_attribute (die); |
25201 | } |
25202 | |
25203 | add_AT_string (die, attr_kind: DW_AT_producer, str: producer_string ? producer_string : "" ); |
25204 | |
25205 | /* If our producer is LTO try to figure out a common language to use |
25206 | from the global list of translation units. */ |
25207 | if (strcmp (s1: language_string, s2: "GNU GIMPLE" ) == 0) |
25208 | { |
25209 | unsigned i; |
25210 | tree t; |
25211 | const char *common_lang = NULL; |
25212 | |
25213 | FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t) |
25214 | { |
25215 | if (!TRANSLATION_UNIT_LANGUAGE (t)) |
25216 | continue; |
25217 | if (!common_lang) |
25218 | common_lang = TRANSLATION_UNIT_LANGUAGE (t); |
25219 | else if (strcmp (s1: common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0) |
25220 | ; |
25221 | else if (startswith (str: common_lang, prefix: "GNU C" ) |
25222 | && startswith (TRANSLATION_UNIT_LANGUAGE (t), prefix: "GNU C" )) |
25223 | /* Mixing C and C++ is ok, use C++ in that case. */ |
25224 | common_lang = highest_c_language (lang1: common_lang, |
25225 | TRANSLATION_UNIT_LANGUAGE (t)); |
25226 | else |
25227 | { |
25228 | /* Fall back to C. */ |
25229 | common_lang = NULL; |
25230 | break; |
25231 | } |
25232 | } |
25233 | |
25234 | if (common_lang) |
25235 | language_string = common_lang; |
25236 | } |
25237 | |
25238 | language = DW_LANG_C; |
25239 | if (startswith (str: language_string, prefix: "GNU C" ) |
25240 | && ISDIGIT (language_string[5])) |
25241 | { |
25242 | language = DW_LANG_C89; |
25243 | if (dwarf_version >= 3 || !dwarf_strict) |
25244 | { |
25245 | if (strcmp (s1: language_string, s2: "GNU C89" ) != 0) |
25246 | language = DW_LANG_C99; |
25247 | |
25248 | if (dwarf_version >= 5 /* || !dwarf_strict */) |
25249 | if (strcmp (s1: language_string, s2: "GNU C11" ) == 0 |
25250 | || strcmp (s1: language_string, s2: "GNU C17" ) == 0 |
25251 | || strcmp (s1: language_string, s2: "GNU C23" ) == 0) |
25252 | language = DW_LANG_C11; |
25253 | } |
25254 | } |
25255 | else if (startswith (str: language_string, prefix: "GNU C++" )) |
25256 | { |
25257 | language = DW_LANG_C_plus_plus; |
25258 | if (dwarf_version >= 5 /* || !dwarf_strict */) |
25259 | { |
25260 | if (strcmp (s1: language_string, s2: "GNU C++11" ) == 0) |
25261 | language = DW_LANG_C_plus_plus_11; |
25262 | else if (strcmp (s1: language_string, s2: "GNU C++14" ) == 0) |
25263 | language = DW_LANG_C_plus_plus_14; |
25264 | else if (strcmp (s1: language_string, s2: "GNU C++17" ) == 0 |
25265 | || strcmp (s1: language_string, s2: "GNU C++20" ) == 0 |
25266 | || strcmp (s1: language_string, s2: "GNU C++23" ) == 0 |
25267 | || strcmp (s1: language_string, s2: "GNU C++26" ) == 0) |
25268 | /* For now. */ |
25269 | language = DW_LANG_C_plus_plus_14; |
25270 | } |
25271 | } |
25272 | else if (strcmp (s1: language_string, s2: "GNU F77" ) == 0) |
25273 | language = DW_LANG_Fortran77; |
25274 | else if (strcmp (s1: language_string, s2: "GNU Modula-2" ) == 0) |
25275 | language = DW_LANG_Modula2; |
25276 | else if (dwarf_version >= 3 || !dwarf_strict) |
25277 | { |
25278 | if (strcmp (s1: language_string, s2: "GNU Ada" ) == 0) |
25279 | language = DW_LANG_Ada95; |
25280 | else if (startswith (str: language_string, prefix: "GNU Fortran" )) |
25281 | { |
25282 | language = DW_LANG_Fortran95; |
25283 | if (dwarf_version >= 5 /* || !dwarf_strict */) |
25284 | { |
25285 | if (strcmp (s1: language_string, s2: "GNU Fortran2003" ) == 0) |
25286 | language = DW_LANG_Fortran03; |
25287 | else if (strcmp (s1: language_string, s2: "GNU Fortran2008" ) == 0) |
25288 | language = DW_LANG_Fortran08; |
25289 | } |
25290 | } |
25291 | else if (strcmp (s1: language_string, s2: "GNU Objective-C" ) == 0) |
25292 | language = DW_LANG_ObjC; |
25293 | else if (strcmp (s1: language_string, s2: "GNU Objective-C++" ) == 0) |
25294 | language = DW_LANG_ObjC_plus_plus; |
25295 | else if (strcmp (s1: language_string, s2: "GNU D" ) == 0) |
25296 | language = DW_LANG_D; |
25297 | else if (dwarf_version >= 5 || !dwarf_strict) |
25298 | { |
25299 | if (strcmp (s1: language_string, s2: "GNU Go" ) == 0) |
25300 | language = DW_LANG_Go; |
25301 | else if (strcmp (s1: language_string, s2: "GNU Rust" ) == 0) |
25302 | language = DW_LANG_Rust; |
25303 | } |
25304 | } |
25305 | /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */ |
25306 | else if (startswith (str: language_string, prefix: "GNU Fortran" )) |
25307 | language = DW_LANG_Fortran90; |
25308 | /* Likewise for Ada. */ |
25309 | else if (strcmp (s1: language_string, s2: "GNU Ada" ) == 0) |
25310 | language = DW_LANG_Ada83; |
25311 | |
25312 | add_AT_unsigned (die, attr_kind: DW_AT_language, unsigned_val: language); |
25313 | |
25314 | switch (language) |
25315 | { |
25316 | case DW_LANG_Fortran77: |
25317 | case DW_LANG_Fortran90: |
25318 | case DW_LANG_Fortran95: |
25319 | case DW_LANG_Fortran03: |
25320 | case DW_LANG_Fortran08: |
25321 | /* Fortran has case insensitive identifiers and the front-end |
25322 | lowercases everything. */ |
25323 | add_AT_unsigned (die, attr_kind: DW_AT_identifier_case, unsigned_val: DW_ID_down_case); |
25324 | break; |
25325 | default: |
25326 | /* The default DW_ID_case_sensitive doesn't need to be specified. */ |
25327 | break; |
25328 | } |
25329 | return die; |
25330 | } |
25331 | |
25332 | /* Generate the DIE for a base class. */ |
25333 | |
25334 | static void |
25335 | gen_inheritance_die (tree binfo, tree access, tree type, |
25336 | dw_die_ref context_die) |
25337 | { |
25338 | dw_die_ref die = new_die (tag_value: DW_TAG_inheritance, parent_die: context_die, t: binfo); |
25339 | struct vlr_context ctx = { .struct_type: type, NULL }; |
25340 | |
25341 | add_type_attribute (object_die: die, BINFO_TYPE (binfo), cv_quals: TYPE_UNQUALIFIED, reverse: false, |
25342 | context_die); |
25343 | add_data_member_location_attribute (die, decl: binfo, ctx: &ctx); |
25344 | |
25345 | if (BINFO_VIRTUAL_P (binfo)) |
25346 | add_AT_unsigned (die, attr_kind: DW_AT_virtuality, unsigned_val: DW_VIRTUALITY_virtual); |
25347 | |
25348 | /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type |
25349 | children, otherwise the default is DW_ACCESS_public. In DWARF2 |
25350 | the default has always been DW_ACCESS_private. */ |
25351 | if (access == access_public_node) |
25352 | { |
25353 | if (dwarf_version == 2 |
25354 | || context_die->die_tag == DW_TAG_class_type) |
25355 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_public); |
25356 | } |
25357 | else if (access == access_protected_node) |
25358 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_protected); |
25359 | else if (dwarf_version > 2 |
25360 | && context_die->die_tag != DW_TAG_class_type) |
25361 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_private); |
25362 | } |
25363 | |
25364 | /* Return whether DECL is a FIELD_DECL that represents the variant part of a |
25365 | structure. */ |
25366 | |
25367 | static bool |
25368 | is_variant_part (tree decl) |
25369 | { |
25370 | return (TREE_CODE (decl) == FIELD_DECL |
25371 | && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE); |
25372 | } |
25373 | |
25374 | /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is, |
25375 | return the FIELD_DECL. Return NULL_TREE otherwise. */ |
25376 | |
25377 | static tree |
25378 | analyze_discr_in_predicate (tree operand, tree struct_type) |
25379 | { |
25380 | while (CONVERT_EXPR_P (operand)) |
25381 | operand = TREE_OPERAND (operand, 0); |
25382 | |
25383 | /* Match field access to members of struct_type only. */ |
25384 | if (TREE_CODE (operand) == COMPONENT_REF |
25385 | && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR |
25386 | && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type |
25387 | && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL) |
25388 | return TREE_OPERAND (operand, 1); |
25389 | else |
25390 | return NULL_TREE; |
25391 | } |
25392 | |
25393 | /* Check that SRC is a constant integer that can be represented as a native |
25394 | integer constant (either signed or unsigned). If so, store it into DEST and |
25395 | return true. Return false otherwise. */ |
25396 | |
25397 | static bool |
25398 | get_discr_value (tree src, dw_discr_value *dest) |
25399 | { |
25400 | tree discr_type = TREE_TYPE (src); |
25401 | |
25402 | if (lang_hooks.types.get_debug_type) |
25403 | { |
25404 | tree debug_type = lang_hooks.types.get_debug_type (discr_type); |
25405 | if (debug_type != NULL) |
25406 | discr_type = debug_type; |
25407 | } |
25408 | |
25409 | if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type)) |
25410 | return false; |
25411 | |
25412 | /* Signedness can vary between the original type and the debug type. This |
25413 | can happen for character types in Ada for instance: the character type |
25414 | used for code generation can be signed, to be compatible with the C one, |
25415 | but from a debugger point of view, it must be unsigned. */ |
25416 | bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src)); |
25417 | bool is_debug_unsigned = TYPE_UNSIGNED (discr_type); |
25418 | |
25419 | if (is_orig_unsigned != is_debug_unsigned) |
25420 | src = fold_convert (discr_type, src); |
25421 | |
25422 | if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src))) |
25423 | return false; |
25424 | |
25425 | dest->pos = is_debug_unsigned; |
25426 | if (is_debug_unsigned) |
25427 | dest->v.uval = tree_to_uhwi (src); |
25428 | else |
25429 | dest->v.sval = tree_to_shwi (src); |
25430 | |
25431 | return true; |
25432 | } |
25433 | |
25434 | /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a |
25435 | FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful, |
25436 | store NULL_TREE in DISCR_DECL. Otherwise: |
25437 | |
25438 | - store the discriminant field in STRUCT_TYPE that controls the variant |
25439 | part to *DISCR_DECL |
25440 | |
25441 | - put in *DISCR_LISTS_P an array where for each variant, the item |
25442 | represents the corresponding matching list of discriminant values. |
25443 | |
25444 | - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of |
25445 | the above array. |
25446 | |
25447 | Note that when the array is allocated (i.e. when the analysis is |
25448 | successful), it is up to the caller to free the array. */ |
25449 | |
25450 | static void |
25451 | analyze_variants_discr (tree variant_part_decl, |
25452 | tree struct_type, |
25453 | tree *discr_decl, |
25454 | dw_discr_list_ref **discr_lists_p, |
25455 | unsigned *discr_lists_length) |
25456 | { |
25457 | tree variant_part_type = TREE_TYPE (variant_part_decl); |
25458 | tree variant; |
25459 | dw_discr_list_ref *discr_lists; |
25460 | unsigned i; |
25461 | |
25462 | /* Compute how many variants there are in this variant part. */ |
25463 | *discr_lists_length = 0; |
25464 | for (variant = TYPE_FIELDS (variant_part_type); |
25465 | variant != NULL_TREE; |
25466 | variant = DECL_CHAIN (variant)) |
25467 | ++*discr_lists_length; |
25468 | |
25469 | *discr_decl = NULL_TREE; |
25470 | *discr_lists_p |
25471 | = (dw_discr_list_ref *) xcalloc (*discr_lists_length, |
25472 | sizeof (**discr_lists_p)); |
25473 | discr_lists = *discr_lists_p; |
25474 | |
25475 | /* And then analyze all variants to extract discriminant information for all |
25476 | of them. This analysis is conservative: as soon as we detect something we |
25477 | do not support, abort everything and pretend we found nothing. */ |
25478 | for (variant = TYPE_FIELDS (variant_part_type), i = 0; |
25479 | variant != NULL_TREE; |
25480 | variant = DECL_CHAIN (variant), ++i) |
25481 | { |
25482 | tree match_expr = DECL_QUALIFIER (variant); |
25483 | |
25484 | /* Now, try to analyze the predicate and deduce a discriminant for |
25485 | it. */ |
25486 | if (match_expr == boolean_true_node) |
25487 | /* Typically happens for the default variant: it matches all cases that |
25488 | previous variants rejected. Don't output any matching value for |
25489 | this one. */ |
25490 | continue; |
25491 | |
25492 | /* The following loop tries to iterate over each discriminant |
25493 | possibility: single values or ranges. */ |
25494 | while (match_expr != NULL_TREE) |
25495 | { |
25496 | tree next_round_match_expr; |
25497 | tree candidate_discr = NULL_TREE; |
25498 | dw_discr_list_ref new_node = NULL; |
25499 | |
25500 | /* Possibilities are matched one after the other by nested |
25501 | TRUTH_ORIF_EXPR expressions. Process the current possibility and |
25502 | continue with the rest at next iteration. */ |
25503 | if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR) |
25504 | { |
25505 | next_round_match_expr = TREE_OPERAND (match_expr, 0); |
25506 | match_expr = TREE_OPERAND (match_expr, 1); |
25507 | } |
25508 | else |
25509 | next_round_match_expr = NULL_TREE; |
25510 | |
25511 | if (match_expr == boolean_false_node) |
25512 | /* This sub-expression matches nothing: just wait for the next |
25513 | one. */ |
25514 | ; |
25515 | |
25516 | else if (TREE_CODE (match_expr) == EQ_EXPR) |
25517 | { |
25518 | /* We are matching: <discr_field> == <integer_cst> |
25519 | This sub-expression matches a single value. */ |
25520 | tree integer_cst = TREE_OPERAND (match_expr, 1); |
25521 | |
25522 | candidate_discr |
25523 | = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0), |
25524 | struct_type); |
25525 | |
25526 | new_node = ggc_cleared_alloc<dw_discr_list_node> (); |
25527 | if (!get_discr_value (src: integer_cst, |
25528 | dest: &new_node->dw_discr_lower_bound)) |
25529 | goto abort; |
25530 | new_node->dw_discr_range = false; |
25531 | } |
25532 | |
25533 | else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR) |
25534 | { |
25535 | /* We are matching: |
25536 | <discr_field> > <integer_cst> |
25537 | && <discr_field> < <integer_cst>. |
25538 | This sub-expression matches the range of values between the |
25539 | two matched integer constants. Note that comparisons can be |
25540 | inclusive or exclusive. */ |
25541 | tree candidate_discr_1, candidate_discr_2; |
25542 | tree lower_cst, upper_cst; |
25543 | bool lower_cst_included, upper_cst_included; |
25544 | tree lower_op = TREE_OPERAND (match_expr, 0); |
25545 | tree upper_op = TREE_OPERAND (match_expr, 1); |
25546 | |
25547 | /* When the comparison is exclusive, the integer constant is not |
25548 | the discriminant range bound we are looking for: we will have |
25549 | to increment or decrement it. */ |
25550 | if (TREE_CODE (lower_op) == GE_EXPR) |
25551 | lower_cst_included = true; |
25552 | else if (TREE_CODE (lower_op) == GT_EXPR) |
25553 | lower_cst_included = false; |
25554 | else |
25555 | goto abort; |
25556 | |
25557 | if (TREE_CODE (upper_op) == LE_EXPR) |
25558 | upper_cst_included = true; |
25559 | else if (TREE_CODE (upper_op) == LT_EXPR) |
25560 | upper_cst_included = false; |
25561 | else |
25562 | goto abort; |
25563 | |
25564 | /* Extract the discriminant from the first operand and check it |
25565 | is consistant with the same analysis in the second |
25566 | operand. */ |
25567 | candidate_discr_1 |
25568 | = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0), |
25569 | struct_type); |
25570 | candidate_discr_2 |
25571 | = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0), |
25572 | struct_type); |
25573 | if (candidate_discr_1 == candidate_discr_2) |
25574 | candidate_discr = candidate_discr_1; |
25575 | else |
25576 | goto abort; |
25577 | |
25578 | /* Extract bounds from both. */ |
25579 | new_node = ggc_cleared_alloc<dw_discr_list_node> (); |
25580 | lower_cst = TREE_OPERAND (lower_op, 1); |
25581 | upper_cst = TREE_OPERAND (upper_op, 1); |
25582 | |
25583 | if (!lower_cst_included) |
25584 | lower_cst |
25585 | = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst, |
25586 | build_int_cst (TREE_TYPE (lower_cst), 1)); |
25587 | if (!upper_cst_included) |
25588 | upper_cst |
25589 | = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst, |
25590 | build_int_cst (TREE_TYPE (upper_cst), 1)); |
25591 | |
25592 | if (!get_discr_value (src: lower_cst, |
25593 | dest: &new_node->dw_discr_lower_bound) |
25594 | || !get_discr_value (src: upper_cst, |
25595 | dest: &new_node->dw_discr_upper_bound)) |
25596 | goto abort; |
25597 | |
25598 | new_node->dw_discr_range = true; |
25599 | } |
25600 | |
25601 | else if ((candidate_discr |
25602 | = analyze_discr_in_predicate (operand: match_expr, struct_type)) |
25603 | && (TREE_TYPE (candidate_discr) == boolean_type_node |
25604 | || TREE_TYPE (TREE_TYPE (candidate_discr)) |
25605 | == boolean_type_node)) |
25606 | { |
25607 | /* We are matching: <discr_field> for a boolean discriminant. |
25608 | This sub-expression matches boolean_true_node. */ |
25609 | new_node = ggc_cleared_alloc<dw_discr_list_node> (); |
25610 | if (!get_discr_value (boolean_true_node, |
25611 | dest: &new_node->dw_discr_lower_bound)) |
25612 | goto abort; |
25613 | new_node->dw_discr_range = false; |
25614 | } |
25615 | |
25616 | else |
25617 | /* Unsupported sub-expression: we cannot determine the set of |
25618 | matching discriminant values. Abort everything. */ |
25619 | goto abort; |
25620 | |
25621 | /* If the discriminant info is not consistant with what we saw so |
25622 | far, consider the analysis failed and abort everything. */ |
25623 | if (candidate_discr == NULL_TREE |
25624 | || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl)) |
25625 | goto abort; |
25626 | else |
25627 | *discr_decl = candidate_discr; |
25628 | |
25629 | if (new_node != NULL) |
25630 | { |
25631 | new_node->dw_discr_next = discr_lists[i]; |
25632 | discr_lists[i] = new_node; |
25633 | } |
25634 | match_expr = next_round_match_expr; |
25635 | } |
25636 | } |
25637 | |
25638 | /* If we reach this point, we could match everything we were interested |
25639 | in. */ |
25640 | return; |
25641 | |
25642 | abort: |
25643 | /* Clean all data structure and return no result. */ |
25644 | free (ptr: *discr_lists_p); |
25645 | *discr_lists_p = NULL; |
25646 | *discr_decl = NULL_TREE; |
25647 | } |
25648 | |
25649 | /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part |
25650 | of STRUCT_TYPE, a record type. This new DIE is emitted as the next child |
25651 | under CONTEXT_DIE. |
25652 | |
25653 | Variant parts are supposed to be implemented as a FIELD_DECL whose type is a |
25654 | QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for |
25655 | this type, which are record types, represent the available variants and each |
25656 | has a DECL_QUALIFIER attribute. The discriminant and the discriminant |
25657 | values are inferred from these attributes. |
25658 | |
25659 | In trees, the offsets for the fields inside these sub-records are relative |
25660 | to the variant part itself, whereas the corresponding DIEs should have |
25661 | offset attributes that are relative to the embedding record base address. |
25662 | This is why the caller must provide a VARIANT_PART_OFFSET expression: it |
25663 | must be an expression that computes the offset of the variant part to |
25664 | describe in DWARF. */ |
25665 | |
25666 | static void |
25667 | gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx, |
25668 | dw_die_ref context_die) |
25669 | { |
25670 | const tree variant_part_type = TREE_TYPE (variant_part_decl); |
25671 | tree variant_part_offset = vlr_ctx->variant_part_offset; |
25672 | |
25673 | /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or |
25674 | NULL_TREE if there is no such field. */ |
25675 | tree discr_decl = NULL_TREE; |
25676 | dw_discr_list_ref *discr_lists; |
25677 | unsigned discr_lists_length = 0; |
25678 | unsigned i; |
25679 | |
25680 | dw_die_ref dwarf_proc_die = NULL; |
25681 | dw_die_ref variant_part_die |
25682 | = new_die (tag_value: DW_TAG_variant_part, parent_die: context_die, t: variant_part_type); |
25683 | |
25684 | equate_decl_number_to_die (decl: variant_part_decl, decl_die: variant_part_die); |
25685 | |
25686 | analyze_variants_discr (variant_part_decl, struct_type: vlr_ctx->struct_type, |
25687 | discr_decl: &discr_decl, discr_lists_p: &discr_lists, discr_lists_length: &discr_lists_length); |
25688 | |
25689 | if (discr_decl != NULL_TREE) |
25690 | { |
25691 | dw_die_ref discr_die = lookup_decl_die (decl: discr_decl); |
25692 | |
25693 | if (discr_die) |
25694 | add_AT_die_ref (die: variant_part_die, attr_kind: DW_AT_discr, targ_die: discr_die); |
25695 | else |
25696 | /* We have no DIE for the discriminant, so just discard all |
25697 | discrimimant information in the output. */ |
25698 | discr_decl = NULL_TREE; |
25699 | } |
25700 | |
25701 | /* If the offset for this variant part is more complex than a constant, |
25702 | create a DWARF procedure for it so that we will not have to generate |
25703 | DWARF expressions for it for each member. */ |
25704 | if (TREE_CODE (variant_part_offset) != INTEGER_CST |
25705 | && (dwarf_version >= 3 || !dwarf_strict)) |
25706 | { |
25707 | struct loc_descr_context ctx = { |
25708 | .context_type: vlr_ctx->struct_type, /* context_type */ |
25709 | NULL_TREE, /* base_decl */ |
25710 | NULL, /* dpi */ |
25711 | .placeholder_arg: false, /* placeholder_arg */ |
25712 | .placeholder_seen: false, /* placeholder_seen */ |
25713 | .strict_signedness: false /* strict_signedness */ |
25714 | }; |
25715 | const tree dwarf_proc_fndecl |
25716 | = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE, |
25717 | build_function_type (TREE_TYPE (variant_part_offset), |
25718 | NULL_TREE)); |
25719 | const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0); |
25720 | const dw_loc_descr_ref dwarf_proc_body |
25721 | = loc_descriptor_from_tree (loc: variant_part_offset, want_address: 0, context: &ctx); |
25722 | |
25723 | dwarf_proc_die = new_dwarf_proc_die (location: dwarf_proc_body, |
25724 | fndecl: dwarf_proc_fndecl, parent_die: context_die); |
25725 | if (dwarf_proc_die != NULL) |
25726 | variant_part_offset = dwarf_proc_call; |
25727 | } |
25728 | |
25729 | /* Output DIEs for all variants. */ |
25730 | i = 0; |
25731 | for (tree variant = TYPE_FIELDS (variant_part_type); |
25732 | variant != NULL_TREE; |
25733 | variant = DECL_CHAIN (variant), ++i) |
25734 | { |
25735 | tree variant_type = TREE_TYPE (variant); |
25736 | dw_die_ref variant_die; |
25737 | |
25738 | /* All variants (i.e. members of a variant part) are supposed to be |
25739 | encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields |
25740 | under these records. */ |
25741 | gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE); |
25742 | |
25743 | variant_die = new_die (tag_value: DW_TAG_variant, parent_die: variant_part_die, t: variant_type); |
25744 | equate_decl_number_to_die (decl: variant, decl_die: variant_die); |
25745 | |
25746 | /* Output discriminant values this variant matches, if any. */ |
25747 | if (discr_decl == NULL || discr_lists[i] == NULL) |
25748 | /* In the case we have discriminant information at all, this is |
25749 | probably the default variant: as the standard says, don't |
25750 | output any discriminant value/list attribute. */ |
25751 | ; |
25752 | else if (discr_lists[i]->dw_discr_next == NULL |
25753 | && !discr_lists[i]->dw_discr_range) |
25754 | /* If there is only one accepted value, don't bother outputting a |
25755 | list. */ |
25756 | add_discr_value (die: variant_die, value: &discr_lists[i]->dw_discr_lower_bound); |
25757 | else |
25758 | add_discr_list (die: variant_die, discr_list: discr_lists[i]); |
25759 | |
25760 | for (tree member = TYPE_FIELDS (variant_type); |
25761 | member != NULL_TREE; |
25762 | member = DECL_CHAIN (member)) |
25763 | { |
25764 | struct vlr_context vlr_sub_ctx = { |
25765 | .struct_type: vlr_ctx->struct_type, /* struct_type */ |
25766 | NULL /* variant_part_offset */ |
25767 | }; |
25768 | if (is_variant_part (decl: member)) |
25769 | { |
25770 | /* All offsets for fields inside variant parts are relative to |
25771 | the top-level embedding RECORD_TYPE's base address. On the |
25772 | other hand, offsets in GCC's types are relative to the |
25773 | nested-most variant part. So we have to sum offsets each time |
25774 | we recurse. */ |
25775 | |
25776 | vlr_sub_ctx.variant_part_offset |
25777 | = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset), |
25778 | variant_part_offset, byte_position (member)); |
25779 | gen_variant_part (variant_part_decl: member, vlr_ctx: &vlr_sub_ctx, context_die: variant_die); |
25780 | } |
25781 | else |
25782 | { |
25783 | vlr_sub_ctx.variant_part_offset = variant_part_offset; |
25784 | gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die); |
25785 | } |
25786 | } |
25787 | } |
25788 | |
25789 | free (ptr: discr_lists); |
25790 | } |
25791 | |
25792 | /* Generate a DIE for a class member. */ |
25793 | |
25794 | static void |
25795 | gen_member_die (tree type, dw_die_ref context_die) |
25796 | { |
25797 | tree member; |
25798 | tree binfo = TYPE_BINFO (type); |
25799 | |
25800 | gcc_assert (TYPE_MAIN_VARIANT (type) == type); |
25801 | |
25802 | /* If this is not an incomplete type, output descriptions of each of its |
25803 | members. Note that as we output the DIEs necessary to represent the |
25804 | members of this record or union type, we will also be trying to output |
25805 | DIEs to represent the *types* of those members. However the `type' |
25806 | function (above) will specifically avoid generating type DIEs for member |
25807 | types *within* the list of member DIEs for this (containing) type except |
25808 | for those types (of members) which are explicitly marked as also being |
25809 | members of this (containing) type themselves. The g++ front- end can |
25810 | force any given type to be treated as a member of some other (containing) |
25811 | type by setting the TYPE_CONTEXT of the given (member) type to point to |
25812 | the TREE node representing the appropriate (containing) type. */ |
25813 | |
25814 | /* First output info about the base classes. */ |
25815 | if (binfo && early_dwarf) |
25816 | { |
25817 | vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo); |
25818 | int i; |
25819 | tree base; |
25820 | |
25821 | for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++) |
25822 | gen_inheritance_die (binfo: base, |
25823 | access: (accesses ? (*accesses)[i] : access_public_node), |
25824 | type, |
25825 | context_die); |
25826 | } |
25827 | |
25828 | /* Now output info about the members. */ |
25829 | for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member)) |
25830 | { |
25831 | /* Ignore clones. */ |
25832 | if (DECL_ABSTRACT_ORIGIN (member)) |
25833 | continue; |
25834 | |
25835 | struct vlr_context vlr_ctx = { .struct_type: type, NULL_TREE }; |
25836 | bool static_inline_p |
25837 | = (VAR_P (member) |
25838 | && TREE_STATIC (member) |
25839 | && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline) |
25840 | != -1)); |
25841 | |
25842 | /* If we thought we were generating minimal debug info for TYPE |
25843 | and then changed our minds, some of the member declarations |
25844 | may have already been defined. Don't define them again, but |
25845 | do put them in the right order. */ |
25846 | |
25847 | if (dw_die_ref child = lookup_decl_die (decl: member)) |
25848 | { |
25849 | /* Handle inline static data members, which only have in-class |
25850 | declarations. */ |
25851 | bool splice = true; |
25852 | |
25853 | dw_die_ref ref = NULL; |
25854 | if (child->die_tag == DW_TAG_variable |
25855 | && child->die_parent == comp_unit_die ()) |
25856 | { |
25857 | ref = get_AT_ref (die: child, attr_kind: DW_AT_specification); |
25858 | |
25859 | /* For C++17 inline static data members followed by redundant |
25860 | out of class redeclaration, we might get here with |
25861 | child being the DIE created for the out of class |
25862 | redeclaration and with its DW_AT_specification being |
25863 | the DIE created for in-class definition. We want to |
25864 | reparent the latter, and don't want to create another |
25865 | DIE with DW_AT_specification in that case, because |
25866 | we already have one. */ |
25867 | if (ref |
25868 | && static_inline_p |
25869 | && ref->die_tag == DW_TAG_variable |
25870 | && ref->die_parent == comp_unit_die () |
25871 | && get_AT (die: ref, attr_kind: DW_AT_specification) == NULL) |
25872 | { |
25873 | child = ref; |
25874 | ref = NULL; |
25875 | static_inline_p = false; |
25876 | } |
25877 | |
25878 | if (!ref) |
25879 | { |
25880 | reparent_child (child, new_parent: context_die); |
25881 | if (dwarf_version < 5) |
25882 | child->die_tag = DW_TAG_member; |
25883 | splice = false; |
25884 | } |
25885 | } |
25886 | else if (child->die_tag == DW_TAG_enumerator) |
25887 | /* Enumerators remain under their enumeration even if |
25888 | their names are introduced in the enclosing scope. */ |
25889 | splice = false; |
25890 | |
25891 | if (splice) |
25892 | splice_child_die (parent: context_die, child); |
25893 | } |
25894 | |
25895 | /* Do not generate DWARF for variant parts if we are generating the |
25896 | corresponding GNAT encodings: DIEs generated for the two schemes |
25897 | would conflict in our mappings. */ |
25898 | else if (is_variant_part (decl: member) |
25899 | && gnat_encodings != DWARF_GNAT_ENCODINGS_ALL) |
25900 | { |
25901 | vlr_ctx.variant_part_offset = byte_position (member); |
25902 | gen_variant_part (variant_part_decl: member, vlr_ctx: &vlr_ctx, context_die); |
25903 | } |
25904 | else |
25905 | { |
25906 | vlr_ctx.variant_part_offset = NULL_TREE; |
25907 | gen_decl_die (member, NULL, &vlr_ctx, context_die); |
25908 | } |
25909 | |
25910 | /* For C++ inline static data members emit immediately a DW_TAG_variable |
25911 | DIE that will refer to that DW_TAG_member/DW_TAG_variable through |
25912 | DW_AT_specification. */ |
25913 | if (static_inline_p) |
25914 | { |
25915 | int old_extern = DECL_EXTERNAL (member); |
25916 | DECL_EXTERNAL (member) = 0; |
25917 | gen_decl_die (member, NULL, NULL, comp_unit_die ()); |
25918 | DECL_EXTERNAL (member) = old_extern; |
25919 | } |
25920 | } |
25921 | } |
25922 | |
25923 | /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG |
25924 | is set, we pretend that the type was never defined, so we only get the |
25925 | member DIEs needed by later specification DIEs. */ |
25926 | |
25927 | static void |
25928 | gen_struct_or_union_type_die (tree type, dw_die_ref context_die, |
25929 | enum debug_info_usage usage) |
25930 | { |
25931 | if (TREE_ASM_WRITTEN (type)) |
25932 | { |
25933 | /* Fill in the bound of variable-length fields in late dwarf if |
25934 | still incomplete. */ |
25935 | if (!early_dwarf && variably_modified_type_p (type, NULL)) |
25936 | for (tree member = TYPE_FIELDS (type); |
25937 | member; |
25938 | member = DECL_CHAIN (member)) |
25939 | fill_variable_array_bounds (TREE_TYPE (member)); |
25940 | return; |
25941 | } |
25942 | |
25943 | dw_die_ref type_die = lookup_type_die (type); |
25944 | dw_die_ref scope_die = 0; |
25945 | bool nested = false; |
25946 | bool complete = (TYPE_SIZE (type) |
25947 | && (! TYPE_STUB_DECL (type) |
25948 | || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type)))); |
25949 | bool ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace); |
25950 | complete = complete && should_emit_struct_debug (type, usage); |
25951 | |
25952 | if (type_die && ! complete) |
25953 | return; |
25954 | |
25955 | if (TYPE_CONTEXT (type) != NULL_TREE |
25956 | && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type)) |
25957 | || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL)) |
25958 | nested = true; |
25959 | |
25960 | scope_die = scope_die_for (t: type, context_die); |
25961 | |
25962 | /* Generate child dies for template parameters. */ |
25963 | if (!type_die && debug_info_level > DINFO_LEVEL_TERSE) |
25964 | schedule_generic_params_dies_gen (t: type); |
25965 | |
25966 | if (! type_die || (nested && is_cu_die (c: scope_die))) |
25967 | /* First occurrence of type or toplevel definition of nested class. */ |
25968 | { |
25969 | dw_die_ref old_die = type_die; |
25970 | |
25971 | type_die = new_die (TREE_CODE (type) == RECORD_TYPE |
25972 | ? record_type_tag (type) : DW_TAG_union_type, |
25973 | parent_die: scope_die, t: type); |
25974 | equate_type_number_to_die (type, type_die); |
25975 | if (old_die) |
25976 | add_AT_specification (die: type_die, targ_die: old_die); |
25977 | else |
25978 | add_name_attribute (die: type_die, name_string: type_tag (type)); |
25979 | } |
25980 | else |
25981 | remove_AT (die: type_die, attr_kind: DW_AT_declaration); |
25982 | |
25983 | /* If this type has been completed, then give it a byte_size attribute and |
25984 | then give a list of members. */ |
25985 | if (complete && !ns_decl) |
25986 | { |
25987 | /* Prevent infinite recursion in cases where the type of some member of |
25988 | this type is expressed in terms of this type itself. */ |
25989 | TREE_ASM_WRITTEN (type) = 1; |
25990 | add_byte_size_attribute (die: type_die, tree_node: type); |
25991 | add_alignment_attribute (die: type_die, tree_node: type); |
25992 | if (TYPE_STUB_DECL (type) != NULL_TREE) |
25993 | { |
25994 | add_src_coords_attributes (die: type_die, TYPE_STUB_DECL (type)); |
25995 | add_accessibility_attribute (die: type_die, TYPE_STUB_DECL (type)); |
25996 | } |
25997 | |
25998 | /* If the first reference to this type was as the return type of an |
25999 | inline function, then it may not have a parent. Fix this now. */ |
26000 | if (type_die->die_parent == NULL) |
26001 | add_child_die (die: scope_die, child_die: type_die); |
26002 | |
26003 | gen_member_die (type, context_die: type_die); |
26004 | |
26005 | add_gnat_descriptive_type_attribute (die: type_die, type, context_die); |
26006 | if (TYPE_ARTIFICIAL (type)) |
26007 | add_AT_flag (die: type_die, attr_kind: DW_AT_artificial, flag: 1); |
26008 | |
26009 | /* GNU extension: Record what type our vtable lives in. */ |
26010 | if (TYPE_VFIELD (type)) |
26011 | { |
26012 | tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type)); |
26013 | |
26014 | gen_type_die (vtype, context_die); |
26015 | add_AT_die_ref (die: type_die, attr_kind: DW_AT_containing_type, |
26016 | targ_die: lookup_type_die (type: vtype)); |
26017 | } |
26018 | } |
26019 | else |
26020 | { |
26021 | add_AT_flag (die: type_die, attr_kind: DW_AT_declaration, flag: 1); |
26022 | |
26023 | /* We don't need to do this for function-local types. */ |
26024 | if (TYPE_STUB_DECL (type) |
26025 | && ! decl_function_context (TYPE_STUB_DECL (type))) |
26026 | vec_safe_push (v&: incomplete_types, obj: type); |
26027 | } |
26028 | |
26029 | if (get_AT (die: type_die, attr_kind: DW_AT_name)) |
26030 | add_pubtype (decl: type, die: type_die); |
26031 | } |
26032 | |
26033 | /* Generate a DIE for a subroutine _type_. */ |
26034 | |
26035 | static void |
26036 | gen_subroutine_type_die (tree type, dw_die_ref context_die) |
26037 | { |
26038 | tree return_type = TREE_TYPE (type); |
26039 | dw_die_ref subr_die |
26040 | = new_die (tag_value: DW_TAG_subroutine_type, |
26041 | parent_die: scope_die_for (t: type, context_die), t: type); |
26042 | |
26043 | equate_type_number_to_die (type, type_die: subr_die); |
26044 | add_prototyped_attribute (die: subr_die, func_type: type); |
26045 | add_type_attribute (object_die: subr_die, type: return_type, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
26046 | context_die); |
26047 | add_alignment_attribute (die: subr_die, tree_node: type); |
26048 | gen_formal_types_die (function_or_method_type: type, context_die: subr_die); |
26049 | |
26050 | if (get_AT (die: subr_die, attr_kind: DW_AT_name)) |
26051 | add_pubtype (decl: type, die: subr_die); |
26052 | if ((dwarf_version >= 5 || !dwarf_strict) |
26053 | && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1) |
26054 | add_AT_flag (die: subr_die, attr_kind: DW_AT_reference, flag: 1); |
26055 | if ((dwarf_version >= 5 || !dwarf_strict) |
26056 | && lang_hooks.types.type_dwarf_attribute (type, |
26057 | DW_AT_rvalue_reference) != -1) |
26058 | add_AT_flag (die: subr_die, attr_kind: DW_AT_rvalue_reference, flag: 1); |
26059 | } |
26060 | |
26061 | /* Generate a DIE for a type definition. */ |
26062 | |
26063 | static void |
26064 | gen_typedef_die (tree decl, dw_die_ref context_die) |
26065 | { |
26066 | dw_die_ref type_die; |
26067 | tree type; |
26068 | |
26069 | if (TREE_ASM_WRITTEN (decl)) |
26070 | { |
26071 | if (DECL_ORIGINAL_TYPE (decl)) |
26072 | fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl)); |
26073 | return; |
26074 | } |
26075 | |
26076 | /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin |
26077 | checks in process_scope_var and modified_type_die), this should be called |
26078 | only for original types. */ |
26079 | gcc_assert (decl_ultimate_origin (decl) == NULL |
26080 | || decl_ultimate_origin (decl) == decl); |
26081 | |
26082 | TREE_ASM_WRITTEN (decl) = 1; |
26083 | type_die = new_die (tag_value: DW_TAG_typedef, parent_die: context_die, t: decl); |
26084 | |
26085 | add_name_and_src_coords_attributes (die: type_die, decl); |
26086 | if (DECL_ORIGINAL_TYPE (decl)) |
26087 | { |
26088 | type = DECL_ORIGINAL_TYPE (decl); |
26089 | if (type == error_mark_node) |
26090 | return; |
26091 | |
26092 | gcc_assert (type != TREE_TYPE (decl)); |
26093 | equate_type_number_to_die (TREE_TYPE (decl), type_die); |
26094 | } |
26095 | else |
26096 | { |
26097 | type = TREE_TYPE (decl); |
26098 | if (type == error_mark_node) |
26099 | return; |
26100 | |
26101 | if (is_naming_typedef_decl (TYPE_NAME (type))) |
26102 | { |
26103 | /* Here, we are in the case of decl being a typedef naming |
26104 | an anonymous type, e.g: |
26105 | typedef struct {...} foo; |
26106 | In that case TREE_TYPE (decl) is not a typedef variant |
26107 | type and TYPE_NAME of the anonymous type is set to the |
26108 | TYPE_DECL of the typedef. This construct is emitted by |
26109 | the C++ FE. |
26110 | |
26111 | TYPE is the anonymous struct named by the typedef |
26112 | DECL. As we need the DW_AT_type attribute of the |
26113 | DW_TAG_typedef to point to the DIE of TYPE, let's |
26114 | generate that DIE right away. add_type_attribute |
26115 | called below will then pick (via lookup_type_die) that |
26116 | anonymous struct DIE. */ |
26117 | if (!TREE_ASM_WRITTEN (type)) |
26118 | gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE); |
26119 | |
26120 | /* This is a GNU Extension. We are adding a |
26121 | DW_AT_linkage_name attribute to the DIE of the |
26122 | anonymous struct TYPE. The value of that attribute |
26123 | is the name of the typedef decl naming the anonymous |
26124 | struct. This greatly eases the work of consumers of |
26125 | this debug info. */ |
26126 | add_linkage_name_raw (die: lookup_type_die (type), decl); |
26127 | } |
26128 | } |
26129 | |
26130 | add_type_attribute (object_die: type_die, type, cv_quals: decl_quals (decl), reverse: false, |
26131 | context_die); |
26132 | |
26133 | if (is_naming_typedef_decl (decl)) |
26134 | /* We want that all subsequent calls to lookup_type_die with |
26135 | TYPE in argument yield the DW_TAG_typedef we have just |
26136 | created. */ |
26137 | equate_type_number_to_die (type, type_die); |
26138 | |
26139 | add_alignment_attribute (die: type_die, TREE_TYPE (decl)); |
26140 | |
26141 | add_accessibility_attribute (die: type_die, decl); |
26142 | |
26143 | if (DECL_ABSTRACT_P (decl)) |
26144 | equate_decl_number_to_die (decl, decl_die: type_die); |
26145 | |
26146 | if (get_AT (die: type_die, attr_kind: DW_AT_name)) |
26147 | add_pubtype (decl, die: type_die); |
26148 | } |
26149 | |
26150 | /* Generate a DIE for a struct, class, enum or union type. */ |
26151 | |
26152 | static void |
26153 | gen_tagged_type_die (tree type, |
26154 | dw_die_ref context_die, |
26155 | enum debug_info_usage usage) |
26156 | { |
26157 | if (type == NULL_TREE |
26158 | || !is_tagged_type (type)) |
26159 | return; |
26160 | |
26161 | if (TREE_ASM_WRITTEN (type)) |
26162 | ; |
26163 | /* If this is a nested type whose containing class hasn't been written |
26164 | out yet, writing it out will cover this one, too. This does not apply |
26165 | to instantiations of member class templates; they need to be added to |
26166 | the containing class as they are generated. FIXME: This hurts the |
26167 | idea of combining type decls from multiple TUs, since we can't predict |
26168 | what set of template instantiations we'll get. */ |
26169 | else if (TYPE_CONTEXT (type) |
26170 | && AGGREGATE_TYPE_P (TYPE_CONTEXT (type)) |
26171 | && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type))) |
26172 | { |
26173 | gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage); |
26174 | |
26175 | if (TREE_ASM_WRITTEN (type)) |
26176 | return; |
26177 | |
26178 | /* If that failed, attach ourselves to the stub. */ |
26179 | context_die = lookup_type_die (TYPE_CONTEXT (type)); |
26180 | } |
26181 | else if (TYPE_CONTEXT (type) != NULL_TREE |
26182 | && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL)) |
26183 | { |
26184 | /* If this type is local to a function that hasn't been written |
26185 | out yet, use a NULL context for now; it will be fixed up in |
26186 | decls_for_scope. */ |
26187 | context_die = lookup_decl_die (TYPE_CONTEXT (type)); |
26188 | /* A declaration DIE doesn't count; nested types need to go in the |
26189 | specification. */ |
26190 | if (context_die && is_declaration_die (die: context_die)) |
26191 | context_die = NULL; |
26192 | } |
26193 | else |
26194 | context_die = declare_in_namespace (type, context_die); |
26195 | |
26196 | if (TREE_CODE (type) == ENUMERAL_TYPE) |
26197 | { |
26198 | /* This might have been written out by the call to |
26199 | declare_in_namespace. */ |
26200 | if (!TREE_ASM_WRITTEN (type)) |
26201 | gen_enumeration_type_die (type, context_die); |
26202 | } |
26203 | else |
26204 | gen_struct_or_union_type_die (type, context_die, usage); |
26205 | |
26206 | /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix |
26207 | it up if it is ever completed. gen_*_type_die will set it for us |
26208 | when appropriate. */ |
26209 | } |
26210 | |
26211 | /* Generate a type description DIE. */ |
26212 | |
26213 | static void |
26214 | gen_type_die_with_usage (tree type, dw_die_ref context_die, |
26215 | enum debug_info_usage usage) |
26216 | { |
26217 | struct array_descr_info info; |
26218 | |
26219 | if (type == NULL_TREE || type == error_mark_node) |
26220 | return; |
26221 | |
26222 | if (flag_checking && type) |
26223 | verify_type (t: type); |
26224 | |
26225 | if (TYPE_NAME (type) != NULL_TREE |
26226 | && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL |
26227 | && is_redundant_typedef (TYPE_NAME (type)) |
26228 | && DECL_ORIGINAL_TYPE (TYPE_NAME (type))) |
26229 | /* The DECL of this type is a typedef we don't want to emit debug |
26230 | info for but we want debug info for its underlying typedef. |
26231 | This can happen for e.g, the injected-class-name of a C++ |
26232 | type. */ |
26233 | type = DECL_ORIGINAL_TYPE (TYPE_NAME (type)); |
26234 | |
26235 | /* If TYPE is a typedef type variant, let's generate debug info |
26236 | for the parent typedef which TYPE is a type of. */ |
26237 | if (typedef_variant_p (type)) |
26238 | { |
26239 | if (TREE_ASM_WRITTEN (type)) |
26240 | return; |
26241 | |
26242 | tree name = TYPE_NAME (type); |
26243 | tree origin = decl_ultimate_origin (decl: name); |
26244 | if (origin != NULL && origin != name) |
26245 | { |
26246 | gen_decl_die (origin, NULL, NULL, context_die); |
26247 | return; |
26248 | } |
26249 | |
26250 | /* Prevent broken recursion; we can't hand off to the same type. */ |
26251 | gcc_assert (DECL_ORIGINAL_TYPE (name) != type); |
26252 | |
26253 | /* Give typedefs the right scope. */ |
26254 | context_die = scope_die_for (t: type, context_die); |
26255 | |
26256 | TREE_ASM_WRITTEN (type) = 1; |
26257 | |
26258 | gen_decl_die (name, NULL, NULL, context_die); |
26259 | return; |
26260 | } |
26261 | |
26262 | /* If type is an anonymous tagged type named by a typedef, let's |
26263 | generate debug info for the typedef. */ |
26264 | if (is_naming_typedef_decl (TYPE_NAME (type))) |
26265 | { |
26266 | /* Give typedefs the right scope. */ |
26267 | context_die = scope_die_for (t: type, context_die); |
26268 | |
26269 | gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die); |
26270 | return; |
26271 | } |
26272 | |
26273 | if (lang_hooks.types.get_debug_type) |
26274 | { |
26275 | tree debug_type = lang_hooks.types.get_debug_type (type); |
26276 | |
26277 | if (debug_type != NULL_TREE && debug_type != type) |
26278 | { |
26279 | gen_type_die_with_usage (type: debug_type, context_die, usage); |
26280 | return; |
26281 | } |
26282 | } |
26283 | |
26284 | /* We are going to output a DIE to represent the unqualified version |
26285 | of this type (i.e. without any const or volatile qualifiers) so |
26286 | get the main variant (i.e. the unqualified version) of this type |
26287 | now. (Vectors and arrays are special because the debugging info is in the |
26288 | cloned type itself. Similarly function/method types can contain extra |
26289 | ref-qualification). */ |
26290 | if (FUNC_OR_METHOD_TYPE_P (type)) |
26291 | { |
26292 | /* For function/method types, can't use type_main_variant here, |
26293 | because that can have different ref-qualifiers for C++, |
26294 | but try to canonicalize. */ |
26295 | tree main = TYPE_MAIN_VARIANT (type); |
26296 | for (tree t = main; t; t = TYPE_NEXT_VARIANT (t)) |
26297 | if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0 |
26298 | && check_base_type (cand: t, base: main) |
26299 | && check_lang_type (cand: t, base: type)) |
26300 | { |
26301 | type = t; |
26302 | break; |
26303 | } |
26304 | } |
26305 | else if (TREE_CODE (type) != VECTOR_TYPE |
26306 | && TREE_CODE (type) != ARRAY_TYPE) |
26307 | type = type_main_variant (type); |
26308 | |
26309 | /* If this is an array type with hidden descriptor, handle it first. */ |
26310 | if (!TREE_ASM_WRITTEN (type) |
26311 | && lang_hooks.types.get_array_descr_info) |
26312 | { |
26313 | memset (s: &info, c: 0, n: sizeof (info)); |
26314 | if (lang_hooks.types.get_array_descr_info (type, &info)) |
26315 | { |
26316 | /* Fortran sometimes emits array types with no dimension. */ |
26317 | gcc_assert (info.ndimensions >= 0 |
26318 | && (info.ndimensions |
26319 | <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN)); |
26320 | gen_descr_array_type_die (type, info: &info, context_die); |
26321 | TREE_ASM_WRITTEN (type) = 1; |
26322 | return; |
26323 | } |
26324 | } |
26325 | |
26326 | if (TREE_ASM_WRITTEN (type)) |
26327 | { |
26328 | /* Variable-length types may be incomplete even if |
26329 | TREE_ASM_WRITTEN. For such types, fall through to |
26330 | gen_array_type_die() and possibly fill in |
26331 | DW_AT_{upper,lower}_bound attributes. */ |
26332 | if ((TREE_CODE (type) != ARRAY_TYPE |
26333 | && TREE_CODE (type) != RECORD_TYPE |
26334 | && TREE_CODE (type) != UNION_TYPE |
26335 | && TREE_CODE (type) != QUAL_UNION_TYPE) |
26336 | || !variably_modified_type_p (type, NULL)) |
26337 | return; |
26338 | } |
26339 | |
26340 | switch (TREE_CODE (type)) |
26341 | { |
26342 | case ERROR_MARK: |
26343 | break; |
26344 | |
26345 | case POINTER_TYPE: |
26346 | case REFERENCE_TYPE: |
26347 | /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This |
26348 | ensures that the gen_type_die recursion will terminate even if the |
26349 | type is recursive. Recursive types are possible in Ada. */ |
26350 | /* ??? We could perhaps do this for all types before the switch |
26351 | statement. */ |
26352 | TREE_ASM_WRITTEN (type) = 1; |
26353 | |
26354 | /* For these types, all that is required is that we output a DIE (or a |
26355 | set of DIEs) to represent the "basis" type. */ |
26356 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
26357 | usage: DINFO_USAGE_IND_USE); |
26358 | break; |
26359 | |
26360 | case OFFSET_TYPE: |
26361 | /* This code is used for C++ pointer-to-data-member types. |
26362 | Output a description of the relevant class type. */ |
26363 | gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die, |
26364 | usage: DINFO_USAGE_IND_USE); |
26365 | |
26366 | /* Output a description of the type of the object pointed to. */ |
26367 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
26368 | usage: DINFO_USAGE_IND_USE); |
26369 | |
26370 | /* Now output a DIE to represent this pointer-to-data-member type |
26371 | itself. */ |
26372 | gen_ptr_to_mbr_type_die (type, context_die); |
26373 | break; |
26374 | |
26375 | case FUNCTION_TYPE: |
26376 | /* Force out return type (in case it wasn't forced out already). */ |
26377 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
26378 | usage: DINFO_USAGE_DIR_USE); |
26379 | gen_subroutine_type_die (type, context_die); |
26380 | break; |
26381 | |
26382 | case METHOD_TYPE: |
26383 | /* Force out return type (in case it wasn't forced out already). */ |
26384 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
26385 | usage: DINFO_USAGE_DIR_USE); |
26386 | gen_subroutine_type_die (type, context_die); |
26387 | break; |
26388 | |
26389 | case ARRAY_TYPE: |
26390 | case VECTOR_TYPE: |
26391 | gen_array_type_die (type, context_die); |
26392 | break; |
26393 | |
26394 | case ENUMERAL_TYPE: |
26395 | case RECORD_TYPE: |
26396 | case UNION_TYPE: |
26397 | case QUAL_UNION_TYPE: |
26398 | gen_tagged_type_die (type, context_die, usage); |
26399 | return; |
26400 | |
26401 | case VOID_TYPE: |
26402 | case OPAQUE_TYPE: |
26403 | case INTEGER_TYPE: |
26404 | case REAL_TYPE: |
26405 | case FIXED_POINT_TYPE: |
26406 | case COMPLEX_TYPE: |
26407 | case BOOLEAN_TYPE: |
26408 | case BITINT_TYPE: |
26409 | /* No DIEs needed for fundamental types. */ |
26410 | break; |
26411 | |
26412 | case NULLPTR_TYPE: |
26413 | case LANG_TYPE: |
26414 | /* Just use DW_TAG_unspecified_type. */ |
26415 | { |
26416 | dw_die_ref type_die = lookup_type_die (type); |
26417 | if (type_die == NULL) |
26418 | { |
26419 | tree name = TYPE_IDENTIFIER (type); |
26420 | type_die = new_die (tag_value: DW_TAG_unspecified_type, parent_die: comp_unit_die (), |
26421 | t: type); |
26422 | add_name_attribute (die: type_die, IDENTIFIER_POINTER (name)); |
26423 | equate_type_number_to_die (type, type_die); |
26424 | } |
26425 | } |
26426 | break; |
26427 | |
26428 | default: |
26429 | if (is_cxx_auto (type)) |
26430 | { |
26431 | tree name = TYPE_IDENTIFIER (type); |
26432 | dw_die_ref *die = (name == get_identifier ("auto" ) |
26433 | ? &auto_die : &decltype_auto_die); |
26434 | if (!*die) |
26435 | { |
26436 | *die = new_die (tag_value: DW_TAG_unspecified_type, |
26437 | parent_die: comp_unit_die (), NULL_TREE); |
26438 | add_name_attribute (die: *die, IDENTIFIER_POINTER (name)); |
26439 | } |
26440 | equate_type_number_to_die (type, type_die: *die); |
26441 | break; |
26442 | } |
26443 | gcc_unreachable (); |
26444 | } |
26445 | |
26446 | TREE_ASM_WRITTEN (type) = 1; |
26447 | } |
26448 | |
26449 | static void |
26450 | gen_type_die (tree type, dw_die_ref context_die) |
26451 | { |
26452 | if (type != error_mark_node) |
26453 | { |
26454 | gen_type_die_with_usage (type, context_die, usage: DINFO_USAGE_DIR_USE); |
26455 | if (flag_checking) |
26456 | { |
26457 | dw_die_ref die = lookup_type_die (type); |
26458 | if (die) |
26459 | check_die (die); |
26460 | } |
26461 | } |
26462 | } |
26463 | |
26464 | /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the |
26465 | things which are local to the given block. */ |
26466 | |
26467 | static void |
26468 | gen_block_die (tree stmt, dw_die_ref context_die) |
26469 | { |
26470 | int must_output_die = 0; |
26471 | bool inlined_func; |
26472 | |
26473 | /* Ignore blocks that are NULL. */ |
26474 | if (stmt == NULL_TREE) |
26475 | return; |
26476 | |
26477 | inlined_func = inlined_function_outer_scope_p (block: stmt); |
26478 | |
26479 | /* If the block is one fragment of a non-contiguous block, do not |
26480 | process the variables, since they will have been done by the |
26481 | origin block. Do process subblocks. */ |
26482 | if (BLOCK_FRAGMENT_ORIGIN (stmt)) |
26483 | { |
26484 | tree sub; |
26485 | |
26486 | for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub)) |
26487 | gen_block_die (stmt: sub, context_die); |
26488 | |
26489 | return; |
26490 | } |
26491 | |
26492 | /* Determine if we need to output any Dwarf DIEs at all to represent this |
26493 | block. */ |
26494 | if (inlined_func) |
26495 | /* The outer scopes for inlinings *must* always be represented. We |
26496 | generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */ |
26497 | must_output_die = 1; |
26498 | else if (lookup_block_die (block: stmt)) |
26499 | /* If we already have a DIE then it was filled early. Meanwhile |
26500 | we might have pruned all BLOCK_VARS as optimized out but we |
26501 | still want to generate high/low PC attributes so output it. */ |
26502 | must_output_die = 1; |
26503 | else if (TREE_USED (stmt) |
26504 | || TREE_ASM_WRITTEN (stmt)) |
26505 | { |
26506 | /* Determine if this block directly contains any "significant" |
26507 | local declarations which we will need to output DIEs for. */ |
26508 | if (debug_info_level > DINFO_LEVEL_TERSE) |
26509 | { |
26510 | /* We are not in terse mode so any local declaration that |
26511 | is not ignored for debug purposes counts as being a |
26512 | "significant" one. */ |
26513 | if (BLOCK_NUM_NONLOCALIZED_VARS (stmt)) |
26514 | must_output_die = 1; |
26515 | else |
26516 | for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var)) |
26517 | if (!DECL_IGNORED_P (var)) |
26518 | { |
26519 | must_output_die = 1; |
26520 | break; |
26521 | } |
26522 | } |
26523 | else if (!dwarf2out_ignore_block (stmt)) |
26524 | must_output_die = 1; |
26525 | } |
26526 | |
26527 | /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block |
26528 | DIE for any block which contains no significant local declarations at |
26529 | all. Rather, in such cases we just call `decls_for_scope' so that any |
26530 | needed Dwarf info for any sub-blocks will get properly generated. Note |
26531 | that in terse mode, our definition of what constitutes a "significant" |
26532 | local declaration gets restricted to include only inlined function |
26533 | instances and local (nested) function definitions. */ |
26534 | if (must_output_die) |
26535 | { |
26536 | if (inlined_func) |
26537 | gen_inlined_subroutine_die (stmt, context_die); |
26538 | else |
26539 | gen_lexical_block_die (stmt, context_die); |
26540 | } |
26541 | else |
26542 | decls_for_scope (stmt, context_die); |
26543 | } |
26544 | |
26545 | /* Process variable DECL (or variable with origin ORIGIN) within |
26546 | block STMT and add it to CONTEXT_DIE. */ |
26547 | static void |
26548 | process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die) |
26549 | { |
26550 | dw_die_ref die; |
26551 | tree decl_or_origin = decl ? decl : origin; |
26552 | |
26553 | if (TREE_CODE (decl_or_origin) == FUNCTION_DECL) |
26554 | die = lookup_decl_die (decl: decl_or_origin); |
26555 | else if (TREE_CODE (decl_or_origin) == TYPE_DECL) |
26556 | { |
26557 | if (TYPE_DECL_IS_STUB (decl_or_origin)) |
26558 | die = lookup_type_die (TREE_TYPE (decl_or_origin)); |
26559 | else |
26560 | die = lookup_decl_die (decl: decl_or_origin); |
26561 | /* Avoid re-creating the DIE late if it was optimized as unused early. */ |
26562 | if (! die && ! early_dwarf) |
26563 | return; |
26564 | } |
26565 | else |
26566 | die = NULL; |
26567 | |
26568 | /* Avoid creating DIEs for local typedefs and concrete static variables that |
26569 | will only be pruned later. */ |
26570 | if ((origin || decl_ultimate_origin (decl)) |
26571 | && (TREE_CODE (decl_or_origin) == TYPE_DECL |
26572 | || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin)))) |
26573 | { |
26574 | origin = decl_ultimate_origin (decl: decl_or_origin); |
26575 | if (decl && VAR_P (decl) && die != NULL) |
26576 | { |
26577 | die = lookup_decl_die (decl: origin); |
26578 | if (die != NULL) |
26579 | equate_decl_number_to_die (decl, decl_die: die); |
26580 | } |
26581 | return; |
26582 | } |
26583 | |
26584 | if (die != NULL && die->die_parent == NULL) |
26585 | add_child_die (die: context_die, child_die: die); |
26586 | |
26587 | if (TREE_CODE (decl_or_origin) == IMPORTED_DECL) |
26588 | { |
26589 | if (early_dwarf) |
26590 | dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin), |
26591 | stmt, context_die); |
26592 | } |
26593 | else |
26594 | { |
26595 | if (decl && DECL_P (decl)) |
26596 | { |
26597 | die = lookup_decl_die (decl); |
26598 | |
26599 | /* Early created DIEs do not have a parent as the decls refer |
26600 | to the function as DECL_CONTEXT rather than the BLOCK. */ |
26601 | if (die && die->die_parent == NULL) |
26602 | { |
26603 | gcc_assert (in_lto_p); |
26604 | add_child_die (die: context_die, child_die: die); |
26605 | } |
26606 | } |
26607 | |
26608 | gen_decl_die (decl, origin, NULL, context_die); |
26609 | } |
26610 | } |
26611 | |
26612 | /* Generate all of the decls declared within a given scope and (recursively) |
26613 | all of its sub-blocks. */ |
26614 | |
26615 | static void |
26616 | decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse) |
26617 | { |
26618 | tree decl; |
26619 | unsigned int i; |
26620 | tree subblocks; |
26621 | |
26622 | /* Ignore NULL blocks. */ |
26623 | if (stmt == NULL_TREE) |
26624 | return; |
26625 | |
26626 | /* Output the DIEs to represent all of the data objects and typedefs |
26627 | declared directly within this block but not within any nested |
26628 | sub-blocks. Also, nested function and tag DIEs have been |
26629 | generated with a parent of NULL; fix that up now. We don't |
26630 | have to do this if we're at -g1. */ |
26631 | if (debug_info_level > DINFO_LEVEL_TERSE) |
26632 | { |
26633 | for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl)) |
26634 | process_scope_var (stmt, decl, NULL_TREE, context_die); |
26635 | /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract |
26636 | origin - avoid doing this twice as we have no good way to see |
26637 | if we've done it once already. */ |
26638 | if (! early_dwarf) |
26639 | for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++) |
26640 | { |
26641 | decl = BLOCK_NONLOCALIZED_VAR (stmt, i); |
26642 | if (decl == current_function_decl) |
26643 | /* Ignore declarations of the current function, while they |
26644 | are declarations, gen_subprogram_die would treat them |
26645 | as definitions again, because they are equal to |
26646 | current_function_decl and endlessly recurse. */; |
26647 | else if (TREE_CODE (decl) == FUNCTION_DECL) |
26648 | process_scope_var (stmt, decl, NULL_TREE, context_die); |
26649 | else |
26650 | process_scope_var (stmt, NULL_TREE, origin: decl, context_die); |
26651 | } |
26652 | } |
26653 | |
26654 | /* Even if we're at -g1, we need to process the subblocks in order to get |
26655 | inlined call information. */ |
26656 | |
26657 | /* Output the DIEs to represent all sub-blocks (and the items declared |
26658 | therein) of this block. */ |
26659 | if (recurse) |
26660 | for (subblocks = BLOCK_SUBBLOCKS (stmt); |
26661 | subblocks != NULL; |
26662 | subblocks = BLOCK_CHAIN (subblocks)) |
26663 | gen_block_die (stmt: subblocks, context_die); |
26664 | } |
26665 | |
26666 | /* Is this a typedef we can avoid emitting? */ |
26667 | |
26668 | static bool |
26669 | is_redundant_typedef (const_tree decl) |
26670 | { |
26671 | if (TYPE_DECL_IS_STUB (decl)) |
26672 | return true; |
26673 | |
26674 | if (DECL_ARTIFICIAL (decl) |
26675 | && DECL_CONTEXT (decl) |
26676 | && is_tagged_type (DECL_CONTEXT (decl)) |
26677 | && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL |
26678 | && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl)))) |
26679 | /* Also ignore the artificial member typedef for the class name. */ |
26680 | return true; |
26681 | |
26682 | return false; |
26683 | } |
26684 | |
26685 | /* Return TRUE if TYPE is a typedef that names a type for linkage |
26686 | purposes. This kind of typedefs is produced by the C++ FE for |
26687 | constructs like: |
26688 | |
26689 | typedef struct {...} foo; |
26690 | |
26691 | In that case, there is no typedef variant type produced for foo. |
26692 | Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous |
26693 | struct type. */ |
26694 | |
26695 | static bool |
26696 | is_naming_typedef_decl (const_tree decl) |
26697 | { |
26698 | if (decl == NULL_TREE |
26699 | || TREE_CODE (decl) != TYPE_DECL |
26700 | || DECL_NAMELESS (decl) |
26701 | || !is_tagged_type (TREE_TYPE (decl)) |
26702 | || DECL_IS_UNDECLARED_BUILTIN (decl) |
26703 | || is_redundant_typedef (decl) |
26704 | /* It looks like Ada produces TYPE_DECLs that are very similar |
26705 | to C++ naming typedefs but that have different |
26706 | semantics. Let's be specific to c++ for now. */ |
26707 | || !is_cxx (decl)) |
26708 | return false; |
26709 | |
26710 | return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE |
26711 | && TYPE_NAME (TREE_TYPE (decl)) == decl |
26712 | && (TYPE_STUB_DECL (TREE_TYPE (decl)) |
26713 | != TYPE_NAME (TREE_TYPE (decl)))); |
26714 | } |
26715 | |
26716 | /* Looks up the DIE for a context. */ |
26717 | |
26718 | static inline dw_die_ref |
26719 | lookup_context_die (tree context) |
26720 | { |
26721 | if (context) |
26722 | { |
26723 | /* Find die that represents this context. */ |
26724 | if (TYPE_P (context)) |
26725 | { |
26726 | context = TYPE_MAIN_VARIANT (context); |
26727 | dw_die_ref ctx = lookup_type_die (type: context); |
26728 | if (!ctx) |
26729 | return NULL; |
26730 | return strip_naming_typedef (type: context, type_die: ctx); |
26731 | } |
26732 | else |
26733 | return lookup_decl_die (decl: context); |
26734 | } |
26735 | return comp_unit_die (); |
26736 | } |
26737 | |
26738 | /* Returns the DIE for a context. */ |
26739 | |
26740 | static inline dw_die_ref |
26741 | get_context_die (tree context) |
26742 | { |
26743 | if (context) |
26744 | { |
26745 | /* Find die that represents this context. */ |
26746 | if (TYPE_P (context)) |
26747 | { |
26748 | context = TYPE_MAIN_VARIANT (context); |
26749 | return strip_naming_typedef (type: context, type_die: force_type_die (context)); |
26750 | } |
26751 | else |
26752 | return force_decl_die (context); |
26753 | } |
26754 | return comp_unit_die (); |
26755 | } |
26756 | |
26757 | /* Returns the DIE for decl. A DIE will always be returned. */ |
26758 | |
26759 | static dw_die_ref |
26760 | force_decl_die (tree decl) |
26761 | { |
26762 | dw_die_ref decl_die; |
26763 | unsigned saved_external_flag; |
26764 | tree save_fn = NULL_TREE; |
26765 | decl_die = lookup_decl_die (decl); |
26766 | if (!decl_die) |
26767 | { |
26768 | dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl)); |
26769 | |
26770 | decl_die = lookup_decl_die (decl); |
26771 | if (decl_die) |
26772 | return decl_die; |
26773 | |
26774 | switch (TREE_CODE (decl)) |
26775 | { |
26776 | case FUNCTION_DECL: |
26777 | /* Clear current_function_decl, so that gen_subprogram_die thinks |
26778 | that this is a declaration. At this point, we just want to force |
26779 | declaration die. */ |
26780 | save_fn = current_function_decl; |
26781 | current_function_decl = NULL_TREE; |
26782 | gen_subprogram_die (decl, context_die); |
26783 | current_function_decl = save_fn; |
26784 | break; |
26785 | |
26786 | case VAR_DECL: |
26787 | /* Set external flag to force declaration die. Restore it after |
26788 | gen_decl_die() call. */ |
26789 | saved_external_flag = DECL_EXTERNAL (decl); |
26790 | DECL_EXTERNAL (decl) = 1; |
26791 | gen_decl_die (decl, NULL, NULL, context_die); |
26792 | DECL_EXTERNAL (decl) = saved_external_flag; |
26793 | break; |
26794 | |
26795 | case NAMESPACE_DECL: |
26796 | if (dwarf_version >= 3 || !dwarf_strict) |
26797 | dwarf2out_decl (decl); |
26798 | else |
26799 | /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */ |
26800 | decl_die = comp_unit_die (); |
26801 | break; |
26802 | |
26803 | case CONST_DECL: |
26804 | /* Enumerators shouldn't need force_decl_die. */ |
26805 | gcc_assert (DECL_CONTEXT (decl) == NULL_TREE |
26806 | || TREE_CODE (DECL_CONTEXT (decl)) != ENUMERAL_TYPE); |
26807 | gen_decl_die (decl, NULL, NULL, context_die); |
26808 | break; |
26809 | |
26810 | case TRANSLATION_UNIT_DECL: |
26811 | decl_die = comp_unit_die (); |
26812 | break; |
26813 | |
26814 | default: |
26815 | gcc_unreachable (); |
26816 | } |
26817 | |
26818 | /* We should be able to find the DIE now. */ |
26819 | if (!decl_die) |
26820 | decl_die = lookup_decl_die (decl); |
26821 | gcc_assert (decl_die); |
26822 | } |
26823 | |
26824 | return decl_die; |
26825 | } |
26826 | |
26827 | /* Returns the DIE for TYPE, that must not be a base type. A DIE is |
26828 | always returned. */ |
26829 | |
26830 | static dw_die_ref |
26831 | force_type_die (tree type) |
26832 | { |
26833 | dw_die_ref type_die; |
26834 | |
26835 | type_die = lookup_type_die (type); |
26836 | if (!type_die) |
26837 | { |
26838 | dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type)); |
26839 | |
26840 | type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type), |
26841 | reverse: false, context_die); |
26842 | gcc_assert (type_die); |
26843 | } |
26844 | return type_die; |
26845 | } |
26846 | |
26847 | /* Force out any required namespaces to be able to output DECL, |
26848 | and return the new context_die for it, if it's changed. */ |
26849 | |
26850 | static dw_die_ref |
26851 | setup_namespace_context (tree thing, dw_die_ref context_die) |
26852 | { |
26853 | tree context = (DECL_P (thing) |
26854 | ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing)); |
26855 | if (context && TREE_CODE (context) == NAMESPACE_DECL) |
26856 | /* Force out the namespace. */ |
26857 | context_die = force_decl_die (decl: context); |
26858 | |
26859 | return context_die; |
26860 | } |
26861 | |
26862 | /* Emit a declaration DIE for THING (which is either a DECL or a tagged |
26863 | type) within its namespace, if appropriate. |
26864 | |
26865 | For compatibility with older debuggers, namespace DIEs only contain |
26866 | declarations; all definitions are emitted at CU scope, with |
26867 | DW_AT_specification pointing to the declaration (like with class |
26868 | members). */ |
26869 | |
26870 | static dw_die_ref |
26871 | declare_in_namespace (tree thing, dw_die_ref context_die) |
26872 | { |
26873 | dw_die_ref ns_context; |
26874 | |
26875 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
26876 | return context_die; |
26877 | |
26878 | /* External declarations in the local scope only need to be emitted |
26879 | once, not once in the namespace and once in the scope. |
26880 | |
26881 | This avoids declaring the `extern' below in the |
26882 | namespace DIE as well as in the innermost scope: |
26883 | |
26884 | namespace S |
26885 | { |
26886 | int i=5; |
26887 | int foo() |
26888 | { |
26889 | int i=8; |
26890 | extern int i; |
26891 | return i; |
26892 | } |
26893 | } |
26894 | */ |
26895 | if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die)) |
26896 | return context_die; |
26897 | |
26898 | /* If this decl is from an inlined function, then don't try to emit it in its |
26899 | namespace, as we will get confused. It would have already been emitted |
26900 | when the abstract instance of the inline function was emitted anyways. */ |
26901 | if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing)) |
26902 | return context_die; |
26903 | |
26904 | ns_context = setup_namespace_context (thing, context_die); |
26905 | |
26906 | if (ns_context != context_die) |
26907 | { |
26908 | if (is_fortran () || is_dlang ()) |
26909 | return ns_context; |
26910 | if (DECL_P (thing)) |
26911 | gen_decl_die (thing, NULL, NULL, ns_context); |
26912 | else |
26913 | gen_type_die (type: thing, context_die: ns_context); |
26914 | } |
26915 | return context_die; |
26916 | } |
26917 | |
26918 | /* Generate a DIE for a namespace or namespace alias. */ |
26919 | |
26920 | static void |
26921 | gen_namespace_die (tree decl, dw_die_ref context_die) |
26922 | { |
26923 | dw_die_ref namespace_die; |
26924 | |
26925 | /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace |
26926 | they are an alias of. */ |
26927 | if (DECL_ABSTRACT_ORIGIN (decl) == NULL) |
26928 | { |
26929 | /* Output a real namespace or module. */ |
26930 | context_die = setup_namespace_context (thing: decl, context_die: comp_unit_die ()); |
26931 | namespace_die = new_die (tag_value: is_fortran () || is_dlang () |
26932 | ? DW_TAG_module : DW_TAG_namespace, |
26933 | parent_die: context_die, t: decl); |
26934 | /* For Fortran modules defined in different CU don't add src coords. */ |
26935 | if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl)) |
26936 | { |
26937 | const char *name = dwarf2_name (decl, scope: 0); |
26938 | if (name) |
26939 | add_name_attribute (die: namespace_die, name_string: name); |
26940 | } |
26941 | else |
26942 | add_name_and_src_coords_attributes (die: namespace_die, decl); |
26943 | if (DECL_EXTERNAL (decl)) |
26944 | add_AT_flag (die: namespace_die, attr_kind: DW_AT_declaration, flag: 1); |
26945 | equate_decl_number_to_die (decl, decl_die: namespace_die); |
26946 | } |
26947 | else |
26948 | { |
26949 | /* Output a namespace alias. */ |
26950 | |
26951 | /* Force out the namespace we are an alias of, if necessary. */ |
26952 | dw_die_ref origin_die |
26953 | = force_decl_die (DECL_ABSTRACT_ORIGIN (decl)); |
26954 | |
26955 | if (DECL_FILE_SCOPE_P (decl) |
26956 | || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL) |
26957 | context_die = setup_namespace_context (thing: decl, context_die: comp_unit_die ()); |
26958 | /* Now create the namespace alias DIE. */ |
26959 | namespace_die = new_die (tag_value: DW_TAG_imported_declaration, parent_die: context_die, t: decl); |
26960 | add_name_and_src_coords_attributes (die: namespace_die, decl); |
26961 | add_AT_die_ref (die: namespace_die, attr_kind: DW_AT_import, targ_die: origin_die); |
26962 | equate_decl_number_to_die (decl, decl_die: namespace_die); |
26963 | } |
26964 | if ((dwarf_version >= 5 || !dwarf_strict) |
26965 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
26966 | DW_AT_export_symbols) == 1) |
26967 | add_AT_flag (die: namespace_die, attr_kind: DW_AT_export_symbols, flag: 1); |
26968 | |
26969 | /* Bypass dwarf2_name's check for DECL_NAMELESS. */ |
26970 | if (want_pubnames ()) |
26971 | add_pubname_string (str: lang_hooks.dwarf_name (decl, 1), die: namespace_die); |
26972 | } |
26973 | |
26974 | /* Generate Dwarf debug information for a decl described by DECL. |
26975 | The return value is currently only meaningful for PARM_DECLs, |
26976 | for all other decls it returns NULL. |
26977 | |
26978 | If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT. |
26979 | It can be NULL otherwise. */ |
26980 | |
26981 | static dw_die_ref |
26982 | gen_decl_die (tree decl, tree origin, struct vlr_context *ctx, |
26983 | dw_die_ref context_die) |
26984 | { |
26985 | tree decl_or_origin = decl ? decl : origin; |
26986 | tree class_origin = NULL, ultimate_origin; |
26987 | |
26988 | if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin)) |
26989 | return NULL; |
26990 | |
26991 | switch (TREE_CODE (decl_or_origin)) |
26992 | { |
26993 | case ERROR_MARK: |
26994 | break; |
26995 | |
26996 | case CONST_DECL: |
26997 | if (!is_fortran () && !is_ada () && !is_dlang ()) |
26998 | { |
26999 | /* The individual enumerators of an enum type get output when we output |
27000 | the Dwarf representation of the relevant enum type itself. */ |
27001 | break; |
27002 | } |
27003 | |
27004 | /* Emit its type. */ |
27005 | gen_type_die (TREE_TYPE (decl), context_die); |
27006 | |
27007 | /* And its containing namespace. */ |
27008 | context_die = declare_in_namespace (thing: decl, context_die); |
27009 | |
27010 | gen_const_die (decl, context_die); |
27011 | break; |
27012 | |
27013 | case FUNCTION_DECL: |
27014 | #if 0 |
27015 | /* FIXME */ |
27016 | /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN |
27017 | on local redeclarations of global functions. That seems broken. */ |
27018 | if (current_function_decl != decl) |
27019 | /* This is only a declaration. */; |
27020 | #endif |
27021 | |
27022 | /* We should have abstract copies already and should not generate |
27023 | stray type DIEs in late LTO dumping. */ |
27024 | if (! early_dwarf) |
27025 | ; |
27026 | |
27027 | /* If we're emitting a clone, emit info for the abstract instance. */ |
27028 | else if (origin || DECL_ORIGIN (decl) != decl) |
27029 | dwarf2out_abstract_function (decl: origin |
27030 | ? DECL_ORIGIN (origin) |
27031 | : DECL_ABSTRACT_ORIGIN (decl)); |
27032 | |
27033 | /* If we're emitting a possibly inlined function emit it as |
27034 | abstract instance. */ |
27035 | else if (cgraph_function_possibly_inlined_p (decl) |
27036 | && ! DECL_ABSTRACT_P (decl) |
27037 | && ! class_or_namespace_scope_p (context_die) |
27038 | /* dwarf2out_abstract_function won't emit a die if this is just |
27039 | a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in |
27040 | that case, because that works only if we have a die. */ |
27041 | && DECL_INITIAL (decl) != NULL_TREE) |
27042 | dwarf2out_abstract_function (decl); |
27043 | |
27044 | /* Otherwise we're emitting the primary DIE for this decl. */ |
27045 | else if (debug_info_level > DINFO_LEVEL_TERSE) |
27046 | { |
27047 | /* Before we describe the FUNCTION_DECL itself, make sure that we |
27048 | have its containing type. */ |
27049 | if (!origin) |
27050 | origin = decl_class_context (decl); |
27051 | if (origin != NULL_TREE) |
27052 | gen_type_die (type: origin, context_die); |
27053 | |
27054 | /* And its return type. */ |
27055 | gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die); |
27056 | |
27057 | /* And its virtual context. */ |
27058 | if (DECL_VINDEX (decl) != NULL_TREE) |
27059 | gen_type_die (DECL_CONTEXT (decl), context_die); |
27060 | |
27061 | /* Make sure we have a member DIE for decl. */ |
27062 | if (origin != NULL_TREE) |
27063 | gen_type_die_for_member (type: origin, member: decl, context_die); |
27064 | |
27065 | /* And its containing namespace. */ |
27066 | context_die = declare_in_namespace (thing: decl, context_die); |
27067 | } |
27068 | |
27069 | /* Now output a DIE to represent the function itself. */ |
27070 | if (decl) |
27071 | gen_subprogram_die (decl, context_die); |
27072 | break; |
27073 | |
27074 | case TYPE_DECL: |
27075 | /* If we are in terse mode, don't generate any DIEs to represent any |
27076 | actual typedefs. */ |
27077 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27078 | break; |
27079 | |
27080 | /* In the special case of a TYPE_DECL node representing the declaration |
27081 | of some type tag, if the given TYPE_DECL is marked as having been |
27082 | instantiated from some other (original) TYPE_DECL node (e.g. one which |
27083 | was generated within the original definition of an inline function) we |
27084 | used to generate a special (abbreviated) DW_TAG_structure_type, |
27085 | DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing |
27086 | should be actually referencing those DIEs, as variable DIEs with that |
27087 | type would be emitted already in the abstract origin, so it was always |
27088 | removed during unused type prunning. Don't add anything in this |
27089 | case. */ |
27090 | if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE) |
27091 | break; |
27092 | |
27093 | if (is_redundant_typedef (decl)) |
27094 | gen_type_die (TREE_TYPE (decl), context_die); |
27095 | else |
27096 | /* Output a DIE to represent the typedef itself. */ |
27097 | gen_typedef_die (decl, context_die); |
27098 | break; |
27099 | |
27100 | case LABEL_DECL: |
27101 | if (debug_info_level >= DINFO_LEVEL_NORMAL) |
27102 | gen_label_die (decl, context_die); |
27103 | break; |
27104 | |
27105 | case VAR_DECL: |
27106 | case RESULT_DECL: |
27107 | /* If we are in terse mode, don't generate any DIEs to represent any |
27108 | variable declarations or definitions unless it is external. */ |
27109 | if (debug_info_level < DINFO_LEVEL_TERSE |
27110 | || (debug_info_level == DINFO_LEVEL_TERSE |
27111 | && !TREE_PUBLIC (decl_or_origin))) |
27112 | break; |
27113 | |
27114 | if (debug_info_level > DINFO_LEVEL_TERSE) |
27115 | { |
27116 | /* Avoid generating stray type DIEs during late dwarf dumping. |
27117 | All types have been dumped early. */ |
27118 | if (early_dwarf |
27119 | /* ??? But in LTRANS we cannot annotate early created variably |
27120 | modified type DIEs without copying them and adjusting all |
27121 | references to them. Dump them again as happens for inlining |
27122 | which copies both the decl and the types. */ |
27123 | /* ??? And even non-LTO needs to re-visit type DIEs to fill |
27124 | in VLA bound information for example. */ |
27125 | || (decl && variably_modified_type_p (TREE_TYPE (decl), |
27126 | current_function_decl))) |
27127 | { |
27128 | /* Output any DIEs that are needed to specify the type of this data |
27129 | object. */ |
27130 | if (decl_by_reference_p (decl: decl_or_origin)) |
27131 | gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die); |
27132 | else |
27133 | gen_type_die (TREE_TYPE (decl_or_origin), context_die); |
27134 | } |
27135 | |
27136 | if (early_dwarf) |
27137 | { |
27138 | /* And its containing type. */ |
27139 | class_origin = decl_class_context (decl: decl_or_origin); |
27140 | if (class_origin != NULL_TREE) |
27141 | gen_type_die_for_member (type: class_origin, member: decl_or_origin, context_die); |
27142 | |
27143 | /* And its containing namespace. */ |
27144 | context_die = declare_in_namespace (thing: decl_or_origin, context_die); |
27145 | } |
27146 | } |
27147 | |
27148 | /* Now output the DIE to represent the data object itself. This gets |
27149 | complicated because of the possibility that the VAR_DECL really |
27150 | represents an inlined instance of a formal parameter for an inline |
27151 | function. */ |
27152 | ultimate_origin = decl_ultimate_origin (decl: decl_or_origin); |
27153 | if (ultimate_origin != NULL_TREE |
27154 | && TREE_CODE (ultimate_origin) == PARM_DECL) |
27155 | gen_formal_parameter_die (node: decl, origin, |
27156 | emit_name_p: true /* Emit name attribute. */, |
27157 | context_die); |
27158 | else |
27159 | gen_variable_die (decl, origin, context_die); |
27160 | break; |
27161 | |
27162 | case FIELD_DECL: |
27163 | gcc_assert (ctx != NULL && ctx->struct_type != NULL); |
27164 | /* Ignore the nameless fields that are used to skip bits but handle C++ |
27165 | anonymous unions and structs. */ |
27166 | if (DECL_NAME (decl) != NULL_TREE |
27167 | || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE |
27168 | || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE) |
27169 | { |
27170 | gen_type_die (type: member_declared_type (member: decl), context_die); |
27171 | gen_field_die (decl, ctx, context_die); |
27172 | } |
27173 | break; |
27174 | |
27175 | case PARM_DECL: |
27176 | /* Avoid generating stray type DIEs during late dwarf dumping. |
27177 | All types have been dumped early. */ |
27178 | if (early_dwarf |
27179 | /* ??? But in LTRANS we cannot annotate early created variably |
27180 | modified type DIEs without copying them and adjusting all |
27181 | references to them. Dump them again as happens for inlining |
27182 | which copies both the decl and the types. */ |
27183 | /* ??? And even non-LTO needs to re-visit type DIEs to fill |
27184 | in VLA bound information for example. */ |
27185 | || (decl && variably_modified_type_p (TREE_TYPE (decl), |
27186 | current_function_decl))) |
27187 | { |
27188 | if (DECL_BY_REFERENCE (decl_or_origin)) |
27189 | gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die); |
27190 | else |
27191 | gen_type_die (TREE_TYPE (decl_or_origin), context_die); |
27192 | } |
27193 | return gen_formal_parameter_die (node: decl, origin, |
27194 | emit_name_p: true /* Emit name attribute. */, |
27195 | context_die); |
27196 | |
27197 | case NAMESPACE_DECL: |
27198 | if (dwarf_version >= 3 || !dwarf_strict) |
27199 | gen_namespace_die (decl, context_die); |
27200 | break; |
27201 | |
27202 | case IMPORTED_DECL: |
27203 | dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl), |
27204 | DECL_CONTEXT (decl), context_die); |
27205 | break; |
27206 | |
27207 | case NAMELIST_DECL: |
27208 | gen_namelist_decl (DECL_NAME (decl), context_die, |
27209 | NAMELIST_DECL_ASSOCIATED_DECL (decl)); |
27210 | break; |
27211 | |
27212 | default: |
27213 | /* Probably some frontend-internal decl. Assume we don't care. */ |
27214 | gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES); |
27215 | break; |
27216 | } |
27217 | |
27218 | return NULL; |
27219 | } |
27220 | |
27221 | /* Output initial debug information for global DECL. Called at the |
27222 | end of the parsing process. |
27223 | |
27224 | This is the initial debug generation process. As such, the DIEs |
27225 | generated may be incomplete. A later debug generation pass |
27226 | (dwarf2out_late_global_decl) will augment the information generated |
27227 | in this pass (e.g., with complete location info). */ |
27228 | |
27229 | static void |
27230 | dwarf2out_early_global_decl (tree decl) |
27231 | { |
27232 | set_early_dwarf s; |
27233 | |
27234 | /* gen_decl_die() will set DECL_ABSTRACT because |
27235 | cgraph_function_possibly_inlined_p() returns true. This is in |
27236 | turn will cause DW_AT_inline attributes to be set. |
27237 | |
27238 | This happens because at early dwarf generation, there is no |
27239 | cgraph information, causing cgraph_function_possibly_inlined_p() |
27240 | to return true. Trick cgraph_function_possibly_inlined_p() |
27241 | while we generate dwarf early. */ |
27242 | bool save = symtab->global_info_ready; |
27243 | symtab->global_info_ready = true; |
27244 | |
27245 | /* We don't handle TYPE_DECLs. If required, they'll be reached via |
27246 | other DECLs and they can point to template types or other things |
27247 | that dwarf2out can't handle when done via dwarf2out_decl. */ |
27248 | if (TREE_CODE (decl) != TYPE_DECL |
27249 | && TREE_CODE (decl) != PARM_DECL) |
27250 | { |
27251 | if (TREE_CODE (decl) == FUNCTION_DECL) |
27252 | { |
27253 | tree save_fndecl = current_function_decl; |
27254 | |
27255 | /* For nested functions, make sure we have DIEs for the parents first |
27256 | so that all nested DIEs are generated at the proper scope in the |
27257 | first shot. */ |
27258 | tree context = decl_function_context (decl); |
27259 | if (context != NULL) |
27260 | { |
27261 | dw_die_ref context_die = lookup_decl_die (decl: context); |
27262 | current_function_decl = context; |
27263 | |
27264 | /* Avoid emitting DIEs multiple times, but still process CONTEXT |
27265 | enough so that it lands in its own context. This avoids type |
27266 | pruning issues later on. */ |
27267 | if (context_die == NULL || is_declaration_die (die: context_die)) |
27268 | dwarf2out_early_global_decl (decl: context); |
27269 | } |
27270 | |
27271 | /* Emit an abstract origin of a function first. This happens |
27272 | with C++ constructor clones for example and makes |
27273 | dwarf2out_abstract_function happy which requires the early |
27274 | DIE of the abstract instance to be present. */ |
27275 | tree origin = DECL_ABSTRACT_ORIGIN (decl); |
27276 | dw_die_ref origin_die; |
27277 | if (origin != NULL |
27278 | /* Do not emit the DIE multiple times but make sure to |
27279 | process it fully here in case we just saw a declaration. */ |
27280 | && ((origin_die = lookup_decl_die (decl: origin)) == NULL |
27281 | || is_declaration_die (die: origin_die))) |
27282 | { |
27283 | current_function_decl = origin; |
27284 | dwarf2out_decl (origin); |
27285 | } |
27286 | |
27287 | /* Emit the DIE for decl but avoid doing that multiple times. */ |
27288 | dw_die_ref old_die; |
27289 | if ((old_die = lookup_decl_die (decl)) == NULL |
27290 | || is_declaration_die (die: old_die)) |
27291 | { |
27292 | current_function_decl = decl; |
27293 | dwarf2out_decl (decl); |
27294 | } |
27295 | |
27296 | current_function_decl = save_fndecl; |
27297 | } |
27298 | else |
27299 | dwarf2out_decl (decl); |
27300 | } |
27301 | symtab->global_info_ready = save; |
27302 | } |
27303 | |
27304 | /* Return whether EXPR is an expression with the following pattern: |
27305 | INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */ |
27306 | |
27307 | static bool |
27308 | is_trivial_indirect_ref (tree expr) |
27309 | { |
27310 | if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF) |
27311 | return false; |
27312 | |
27313 | tree nop = TREE_OPERAND (expr, 0); |
27314 | if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR) |
27315 | return false; |
27316 | |
27317 | tree int_cst = TREE_OPERAND (nop, 0); |
27318 | return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST; |
27319 | } |
27320 | |
27321 | /* Output debug information for global decl DECL. Called from |
27322 | toplev.cc after compilation proper has finished. */ |
27323 | |
27324 | static void |
27325 | dwarf2out_late_global_decl (tree decl) |
27326 | { |
27327 | /* Fill-in any location information we were unable to determine |
27328 | on the first pass. */ |
27329 | if (VAR_P (decl)) |
27330 | { |
27331 | dw_die_ref die = lookup_decl_die (decl); |
27332 | |
27333 | /* We may have to generate full debug late for LTO in case debug |
27334 | was not enabled at compile-time or the target doesn't support |
27335 | the LTO early debug scheme. */ |
27336 | if (! die && in_lto_p |
27337 | /* Function scope variables are emitted when emitting the |
27338 | DIE for the function. */ |
27339 | && ! local_function_static (decl)) |
27340 | dwarf2out_decl (decl); |
27341 | else if (die) |
27342 | { |
27343 | /* We get called via the symtab code invoking late_global_decl |
27344 | for symbols that are optimized out. |
27345 | |
27346 | Do not add locations for those, except if they have a |
27347 | DECL_VALUE_EXPR, in which case they are relevant for debuggers. |
27348 | Still don't add a location if the DECL_VALUE_EXPR is not a trivial |
27349 | INDIRECT_REF expression, as this could generate relocations to |
27350 | text symbols in LTO object files, which is invalid. */ |
27351 | varpool_node *node = varpool_node::get (decl); |
27352 | if ((! node || ! node->definition) |
27353 | && ! (DECL_HAS_VALUE_EXPR_P (decl) |
27354 | && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl)))) |
27355 | tree_add_const_value_attribute_for_decl (var_die: die, decl); |
27356 | else |
27357 | add_location_or_const_value_attribute (die, decl, cache_p: false); |
27358 | } |
27359 | } |
27360 | } |
27361 | |
27362 | /* Output debug information for type decl DECL. Called from toplev.cc |
27363 | and from language front ends (to record built-in types). */ |
27364 | static void |
27365 | dwarf2out_type_decl (tree decl, int local) |
27366 | { |
27367 | if (!local) |
27368 | { |
27369 | set_early_dwarf s; |
27370 | dwarf2out_decl (decl); |
27371 | } |
27372 | } |
27373 | |
27374 | /* Output debug information for imported module or decl DECL. |
27375 | NAME is non-NULL name in the lexical block if the decl has been renamed. |
27376 | LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK) |
27377 | that DECL belongs to. |
27378 | LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */ |
27379 | static void |
27380 | dwarf2out_imported_module_or_decl_1 (tree decl, |
27381 | tree name, |
27382 | tree lexical_block, |
27383 | dw_die_ref lexical_block_die) |
27384 | { |
27385 | expanded_location xloc; |
27386 | dw_die_ref imported_die = NULL; |
27387 | dw_die_ref at_import_die; |
27388 | |
27389 | if (TREE_CODE (decl) == IMPORTED_DECL) |
27390 | { |
27391 | xloc = expand_location (DECL_SOURCE_LOCATION (decl)); |
27392 | decl = IMPORTED_DECL_ASSOCIATED_DECL (decl); |
27393 | gcc_assert (decl); |
27394 | } |
27395 | else |
27396 | xloc = expand_location (input_location); |
27397 | |
27398 | if (TREE_CODE (decl) == TYPE_DECL) |
27399 | { |
27400 | at_import_die = force_type_die (TREE_TYPE (decl)); |
27401 | /* For namespace N { typedef void T; } using N::T; base_type_die |
27402 | returns NULL, but DW_TAG_imported_declaration requires |
27403 | the DW_AT_import tag. Force creation of DW_TAG_typedef. */ |
27404 | if (!at_import_die) |
27405 | { |
27406 | gcc_assert (TREE_CODE (decl) == TYPE_DECL); |
27407 | gen_typedef_die (decl, context_die: get_context_die (DECL_CONTEXT (decl))); |
27408 | at_import_die = lookup_type_die (TREE_TYPE (decl)); |
27409 | gcc_assert (at_import_die); |
27410 | } |
27411 | } |
27412 | else |
27413 | { |
27414 | at_import_die = lookup_decl_die (decl); |
27415 | if (!at_import_die) |
27416 | { |
27417 | /* If we're trying to avoid duplicate debug info, we may not have |
27418 | emitted the member decl for this field. Emit it now. */ |
27419 | if (TREE_CODE (decl) == FIELD_DECL) |
27420 | { |
27421 | tree type = DECL_CONTEXT (decl); |
27422 | |
27423 | if (TYPE_CONTEXT (type) |
27424 | && TYPE_P (TYPE_CONTEXT (type)) |
27425 | && !should_emit_struct_debug (TYPE_CONTEXT (type), |
27426 | usage: DINFO_USAGE_DIR_USE)) |
27427 | return; |
27428 | gen_type_die_for_member (type, member: decl, |
27429 | context_die: get_context_die (TYPE_CONTEXT (type))); |
27430 | } |
27431 | if (TREE_CODE (decl) == CONST_DECL) |
27432 | { |
27433 | /* Individual enumerators of an enum type do not get output here |
27434 | (see gen_decl_die), so we cannot call force_decl_die. */ |
27435 | if (!is_fortran () && !is_ada () && !is_dlang ()) |
27436 | return; |
27437 | } |
27438 | if (TREE_CODE (decl) == NAMELIST_DECL) |
27439 | at_import_die = gen_namelist_decl (DECL_NAME (decl), |
27440 | get_context_die (DECL_CONTEXT (decl)), |
27441 | NULL_TREE); |
27442 | else |
27443 | at_import_die = force_decl_die (decl); |
27444 | } |
27445 | } |
27446 | |
27447 | if (TREE_CODE (decl) == NAMESPACE_DECL) |
27448 | { |
27449 | if (dwarf_version >= 3 || !dwarf_strict) |
27450 | imported_die = new_die (tag_value: DW_TAG_imported_module, |
27451 | parent_die: lexical_block_die, |
27452 | t: lexical_block); |
27453 | else |
27454 | return; |
27455 | } |
27456 | else |
27457 | imported_die = new_die (tag_value: DW_TAG_imported_declaration, |
27458 | parent_die: lexical_block_die, |
27459 | t: lexical_block); |
27460 | |
27461 | add_AT_file (die: imported_die, attr_kind: DW_AT_decl_file, fd: lookup_filename (xloc.file)); |
27462 | add_AT_unsigned (die: imported_die, attr_kind: DW_AT_decl_line, unsigned_val: xloc.line); |
27463 | if (debug_column_info && xloc.column) |
27464 | add_AT_unsigned (die: imported_die, attr_kind: DW_AT_decl_column, unsigned_val: xloc.column); |
27465 | if (name) |
27466 | add_AT_string (die: imported_die, attr_kind: DW_AT_name, |
27467 | IDENTIFIER_POINTER (name)); |
27468 | add_AT_die_ref (die: imported_die, attr_kind: DW_AT_import, targ_die: at_import_die); |
27469 | } |
27470 | |
27471 | /* Output debug information for imported module or decl DECL. |
27472 | NAME is non-NULL name in context if the decl has been renamed. |
27473 | CHILD is true if decl is one of the renamed decls as part of |
27474 | importing whole module. |
27475 | IMPLICIT is set if this hook is called for an implicit import |
27476 | such as inline namespace. */ |
27477 | |
27478 | static void |
27479 | dwarf2out_imported_module_or_decl (tree decl, tree name, tree context, |
27480 | bool child, bool implicit) |
27481 | { |
27482 | /* dw_die_ref at_import_die; */ |
27483 | dw_die_ref scope_die; |
27484 | |
27485 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27486 | return; |
27487 | |
27488 | gcc_assert (decl); |
27489 | |
27490 | /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace |
27491 | should be enough, for DWARF4 and older even if we emit as extension |
27492 | DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway |
27493 | for the benefit of consumers unaware of DW_AT_export_symbols. */ |
27494 | if (implicit |
27495 | && dwarf_version >= 5 |
27496 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
27497 | DW_AT_export_symbols) == 1) |
27498 | return; |
27499 | |
27500 | set_early_dwarf s; |
27501 | |
27502 | /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs. |
27503 | We need decl DIE for reference and scope die. First, get DIE for the decl |
27504 | itself. */ |
27505 | |
27506 | /* Get the scope die for decl context. Use comp_unit_die for global module |
27507 | or decl. If die is not found for non globals, force new die. */ |
27508 | if (context |
27509 | && TYPE_P (context) |
27510 | && !should_emit_struct_debug (type: context, usage: DINFO_USAGE_DIR_USE)) |
27511 | return; |
27512 | |
27513 | scope_die = get_context_die (context); |
27514 | |
27515 | if (child) |
27516 | { |
27517 | /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so |
27518 | there is nothing we can do, here. */ |
27519 | if (dwarf_version < 3 && dwarf_strict) |
27520 | return; |
27521 | |
27522 | gcc_assert (scope_die->die_child); |
27523 | gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module); |
27524 | gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL); |
27525 | scope_die = scope_die->die_child; |
27526 | } |
27527 | |
27528 | /* OK, now we have DIEs for decl as well as scope. Emit imported die. */ |
27529 | dwarf2out_imported_module_or_decl_1 (decl, name, lexical_block: context, lexical_block_die: scope_die); |
27530 | } |
27531 | |
27532 | /* Output debug information for namelists. */ |
27533 | |
27534 | static dw_die_ref |
27535 | gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls) |
27536 | { |
27537 | dw_die_ref nml_die, nml_item_die, nml_item_ref_die; |
27538 | tree value; |
27539 | unsigned i; |
27540 | |
27541 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27542 | return NULL; |
27543 | |
27544 | gcc_assert (scope_die != NULL); |
27545 | nml_die = new_die (tag_value: DW_TAG_namelist, parent_die: scope_die, NULL); |
27546 | add_AT_string (die: nml_die, attr_kind: DW_AT_name, IDENTIFIER_POINTER (name)); |
27547 | |
27548 | /* If there are no item_decls, we have a nondefining namelist, e.g. |
27549 | with USE association; hence, set DW_AT_declaration. */ |
27550 | if (item_decls == NULL_TREE) |
27551 | { |
27552 | add_AT_flag (die: nml_die, attr_kind: DW_AT_declaration, flag: 1); |
27553 | return nml_die; |
27554 | } |
27555 | |
27556 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value) |
27557 | { |
27558 | nml_item_ref_die = lookup_decl_die (decl: value); |
27559 | if (!nml_item_ref_die) |
27560 | nml_item_ref_die = force_decl_die (decl: value); |
27561 | |
27562 | nml_item_die = new_die (tag_value: DW_TAG_namelist_item, parent_die: nml_die, NULL); |
27563 | add_AT_die_ref (die: nml_item_die, attr_kind: DW_AT_namelist_item, targ_die: nml_item_ref_die); |
27564 | } |
27565 | return nml_die; |
27566 | } |
27567 | |
27568 | |
27569 | /* Write the debugging output for DECL and return the DIE. */ |
27570 | |
27571 | static void |
27572 | dwarf2out_decl (tree decl) |
27573 | { |
27574 | dw_die_ref context_die = comp_unit_die (); |
27575 | |
27576 | switch (TREE_CODE (decl)) |
27577 | { |
27578 | case ERROR_MARK: |
27579 | return; |
27580 | |
27581 | case FUNCTION_DECL: |
27582 | /* If we're a nested function, initially use a parent of NULL; if we're |
27583 | a plain function, this will be fixed up in decls_for_scope. If |
27584 | we're a method, it will be ignored, since we already have a DIE. |
27585 | Avoid doing this late though since clones of class methods may |
27586 | otherwise end up in limbo and create type DIEs late. */ |
27587 | if (early_dwarf |
27588 | && decl_function_context (decl) |
27589 | /* But if we're in terse mode, we don't care about scope. */ |
27590 | && debug_info_level > DINFO_LEVEL_TERSE) |
27591 | context_die = NULL; |
27592 | break; |
27593 | |
27594 | case VAR_DECL: |
27595 | /* For local statics lookup proper context die. */ |
27596 | if (local_function_static (decl)) |
27597 | context_die = lookup_decl_die (DECL_CONTEXT (decl)); |
27598 | |
27599 | /* If we are in terse mode, don't generate any DIEs to represent any |
27600 | variable declarations or definitions unless it is external. */ |
27601 | if (debug_info_level < DINFO_LEVEL_TERSE |
27602 | || (debug_info_level == DINFO_LEVEL_TERSE |
27603 | && !TREE_PUBLIC (decl))) |
27604 | return; |
27605 | break; |
27606 | |
27607 | case CONST_DECL: |
27608 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27609 | return; |
27610 | if (!is_fortran () && !is_ada () && !is_dlang ()) |
27611 | return; |
27612 | if (TREE_STATIC (decl) && decl_function_context (decl)) |
27613 | context_die = lookup_decl_die (DECL_CONTEXT (decl)); |
27614 | break; |
27615 | |
27616 | case NAMESPACE_DECL: |
27617 | case IMPORTED_DECL: |
27618 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27619 | return; |
27620 | if (lookup_decl_die (decl) != NULL) |
27621 | return; |
27622 | break; |
27623 | |
27624 | case TYPE_DECL: |
27625 | /* Don't emit stubs for types unless they are needed by other DIEs. */ |
27626 | if (TYPE_DECL_SUPPRESS_DEBUG (decl)) |
27627 | return; |
27628 | |
27629 | /* Don't bother trying to generate any DIEs to represent any of the |
27630 | normal built-in types for the language we are compiling. */ |
27631 | if (DECL_IS_UNDECLARED_BUILTIN (decl)) |
27632 | return; |
27633 | |
27634 | /* If we are in terse mode, don't generate any DIEs for types. */ |
27635 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27636 | return; |
27637 | |
27638 | /* If we're a function-scope tag, initially use a parent of NULL; |
27639 | this will be fixed up in decls_for_scope. */ |
27640 | if (decl_function_context (decl)) |
27641 | context_die = NULL; |
27642 | |
27643 | break; |
27644 | |
27645 | case NAMELIST_DECL: |
27646 | break; |
27647 | |
27648 | default: |
27649 | return; |
27650 | } |
27651 | |
27652 | gen_decl_die (decl, NULL, NULL, context_die); |
27653 | |
27654 | if (flag_checking) |
27655 | { |
27656 | dw_die_ref die = lookup_decl_die (decl); |
27657 | if (die) |
27658 | check_die (die); |
27659 | } |
27660 | } |
27661 | |
27662 | /* Write the debugging output for DECL. */ |
27663 | |
27664 | static void |
27665 | dwarf2out_function_decl (tree decl) |
27666 | { |
27667 | dwarf2out_decl (decl); |
27668 | call_arg_locations = NULL; |
27669 | call_arg_loc_last = NULL; |
27670 | call_site_count = -1; |
27671 | tail_call_site_count = -1; |
27672 | decl_loc_table->empty (); |
27673 | cached_dw_loc_list_table->empty (); |
27674 | } |
27675 | |
27676 | /* Output a marker (i.e. a label) for the beginning of the generated code for |
27677 | a lexical block. */ |
27678 | |
27679 | static void |
27680 | dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED, |
27681 | unsigned int blocknum) |
27682 | { |
27683 | switch_to_section (current_function_section ()); |
27684 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum); |
27685 | } |
27686 | |
27687 | /* Output a marker (i.e. a label) for the end of the generated code for a |
27688 | lexical block. */ |
27689 | |
27690 | static void |
27691 | dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum) |
27692 | { |
27693 | switch_to_section (current_function_section ()); |
27694 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum); |
27695 | } |
27696 | |
27697 | /* Returns true if it is appropriate not to emit any debugging |
27698 | information for BLOCK, because it doesn't contain any instructions. |
27699 | |
27700 | Don't allow this for blocks with nested functions or local classes |
27701 | as we would end up with orphans, and in the presence of scheduling |
27702 | we may end up calling them anyway. */ |
27703 | |
27704 | static bool |
27705 | dwarf2out_ignore_block (const_tree block) |
27706 | { |
27707 | tree decl; |
27708 | unsigned int i; |
27709 | |
27710 | for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl)) |
27711 | if (TREE_CODE (decl) == FUNCTION_DECL |
27712 | || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl))) |
27713 | return false; |
27714 | for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++) |
27715 | { |
27716 | decl = BLOCK_NONLOCALIZED_VAR (block, i); |
27717 | if (TREE_CODE (decl) == FUNCTION_DECL |
27718 | || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl))) |
27719 | return false; |
27720 | } |
27721 | |
27722 | return true; |
27723 | } |
27724 | |
27725 | /* Hash table routines for file_hash. */ |
27726 | |
27727 | bool |
27728 | dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2) |
27729 | { |
27730 | return filename_cmp (s1: p1->key, s2: p2) == 0; |
27731 | } |
27732 | |
27733 | hashval_t |
27734 | dwarf_file_hasher::hash (dwarf_file_data *p) |
27735 | { |
27736 | return htab_hash_string (p->key); |
27737 | } |
27738 | |
27739 | /* Lookup FILE_NAME (in the list of filenames that we know about here in |
27740 | dwarf2out.cc) and return its "index". The index of each (known) filename is |
27741 | just a unique number which is associated with only that one filename. We |
27742 | need such numbers for the sake of generating labels (in the .debug_sfnames |
27743 | section) and references to those files numbers (in the .debug_srcinfo |
27744 | and .debug_macinfo sections). If the filename given as an argument is not |
27745 | found in our current list, add it to the list and assign it the next |
27746 | available unique index number. */ |
27747 | |
27748 | static struct dwarf_file_data * |
27749 | lookup_filename (const char *file_name) |
27750 | { |
27751 | struct dwarf_file_data * created; |
27752 | |
27753 | if (!file_name) |
27754 | return NULL; |
27755 | |
27756 | if (!file_name[0]) |
27757 | file_name = "<stdin>" ; |
27758 | |
27759 | dwarf_file_data **slot |
27760 | = file_table->find_slot_with_hash (comparable: file_name, hash: htab_hash_string (file_name), |
27761 | insert: INSERT); |
27762 | if (*slot) |
27763 | return *slot; |
27764 | |
27765 | created = ggc_alloc<dwarf_file_data> (); |
27766 | created->key = file_name; |
27767 | created->filename = remap_debug_filename (file_name); |
27768 | created->emitted_number = 0; |
27769 | *slot = created; |
27770 | return created; |
27771 | } |
27772 | |
27773 | /* If the assembler will construct the file table, then translate the compiler |
27774 | internal file table number into the assembler file table number, and emit |
27775 | a .file directive if we haven't already emitted one yet. The file table |
27776 | numbers are different because we prune debug info for unused variables and |
27777 | types, which may include filenames. */ |
27778 | |
27779 | static int |
27780 | maybe_emit_file (struct dwarf_file_data * fd) |
27781 | { |
27782 | if (! fd->emitted_number) |
27783 | { |
27784 | if (last_emitted_file) |
27785 | fd->emitted_number = last_emitted_file->emitted_number + 1; |
27786 | else |
27787 | fd->emitted_number = 1; |
27788 | last_emitted_file = fd; |
27789 | |
27790 | if (output_asm_line_debug_info ()) |
27791 | { |
27792 | fprintf (stream: asm_out_file, format: "\t.file %u " , fd->emitted_number); |
27793 | output_quoted_string (asm_out_file, fd->filename); |
27794 | fputc (c: '\n', stream: asm_out_file); |
27795 | } |
27796 | } |
27797 | |
27798 | return fd->emitted_number; |
27799 | } |
27800 | |
27801 | /* Schedule generation of a DW_AT_const_value attribute to DIE. |
27802 | That generation should happen after function debug info has been |
27803 | generated. The value of the attribute is the constant value of ARG. */ |
27804 | |
27805 | static void |
27806 | append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg) |
27807 | { |
27808 | die_arg_entry entry; |
27809 | |
27810 | if (!die || !arg) |
27811 | return; |
27812 | |
27813 | gcc_assert (early_dwarf); |
27814 | |
27815 | if (!tmpl_value_parm_die_table) |
27816 | vec_alloc (v&: tmpl_value_parm_die_table, nelems: 32); |
27817 | |
27818 | entry.die = die; |
27819 | entry.arg = arg; |
27820 | vec_safe_push (v&: tmpl_value_parm_die_table, obj: entry); |
27821 | } |
27822 | |
27823 | /* Return TRUE if T is an instance of generic type, FALSE |
27824 | otherwise. */ |
27825 | |
27826 | static bool |
27827 | generic_type_p (tree t) |
27828 | { |
27829 | if (t == NULL_TREE || !TYPE_P (t)) |
27830 | return false; |
27831 | return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE; |
27832 | } |
27833 | |
27834 | /* Schedule the generation of the generic parameter dies for the |
27835 | instance of generic type T. The proper generation itself is later |
27836 | done by gen_scheduled_generic_parms_dies. */ |
27837 | |
27838 | static void |
27839 | schedule_generic_params_dies_gen (tree t) |
27840 | { |
27841 | if (!generic_type_p (t)) |
27842 | return; |
27843 | |
27844 | gcc_assert (early_dwarf); |
27845 | |
27846 | if (!generic_type_instances) |
27847 | vec_alloc (v&: generic_type_instances, nelems: 256); |
27848 | |
27849 | vec_safe_push (v&: generic_type_instances, obj: t); |
27850 | } |
27851 | |
27852 | /* Add a DW_AT_const_value attribute to DIEs that were scheduled |
27853 | by append_entry_to_tmpl_value_parm_die_table. This function must |
27854 | be called after function DIEs have been generated. */ |
27855 | |
27856 | static void |
27857 | gen_remaining_tmpl_value_param_die_attribute (void) |
27858 | { |
27859 | if (tmpl_value_parm_die_table) |
27860 | { |
27861 | unsigned i, j; |
27862 | die_arg_entry *e; |
27863 | |
27864 | /* We do this in two phases - first get the cases we can |
27865 | handle during early-finish, preserving those we cannot |
27866 | (containing symbolic constants where we don't yet know |
27867 | whether we are going to output the referenced symbols). |
27868 | For those we try again at late-finish. */ |
27869 | j = 0; |
27870 | FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e) |
27871 | { |
27872 | if (!e->die->removed |
27873 | && !tree_add_const_value_attribute (die: e->die, t: e->arg)) |
27874 | { |
27875 | dw_loc_descr_ref loc = NULL; |
27876 | if (! early_dwarf |
27877 | && (dwarf_version >= 5 || !dwarf_strict)) |
27878 | loc = loc_descriptor_from_tree (loc: e->arg, want_address: 2, NULL); |
27879 | if (loc) |
27880 | add_AT_loc (die: e->die, attr_kind: DW_AT_location, loc); |
27881 | else |
27882 | (*tmpl_value_parm_die_table)[j++] = *e; |
27883 | } |
27884 | } |
27885 | tmpl_value_parm_die_table->truncate (size: j); |
27886 | } |
27887 | } |
27888 | |
27889 | /* Generate generic parameters DIEs for instances of generic types |
27890 | that have been previously scheduled by |
27891 | schedule_generic_params_dies_gen. This function must be called |
27892 | after all the types of the CU have been laid out. */ |
27893 | |
27894 | static void |
27895 | gen_scheduled_generic_parms_dies (void) |
27896 | { |
27897 | unsigned i; |
27898 | tree t; |
27899 | |
27900 | if (!generic_type_instances) |
27901 | return; |
27902 | |
27903 | FOR_EACH_VEC_ELT (*generic_type_instances, i, t) |
27904 | if (COMPLETE_TYPE_P (t)) |
27905 | gen_generic_params_dies (t); |
27906 | |
27907 | generic_type_instances = NULL; |
27908 | } |
27909 | |
27910 | |
27911 | /* Replace DW_AT_name for the decl with name. */ |
27912 | |
27913 | static void |
27914 | dwarf2out_set_name (tree decl, tree name) |
27915 | { |
27916 | dw_die_ref die; |
27917 | dw_attr_node *attr; |
27918 | const char *dname; |
27919 | |
27920 | die = TYPE_SYMTAB_DIE (decl); |
27921 | if (!die) |
27922 | return; |
27923 | |
27924 | dname = dwarf2_name (decl: name, scope: 0); |
27925 | if (!dname) |
27926 | return; |
27927 | |
27928 | attr = get_AT (die, attr_kind: DW_AT_name); |
27929 | if (attr) |
27930 | { |
27931 | struct indirect_string_node *node; |
27932 | |
27933 | node = find_AT_string (str: dname); |
27934 | /* replace the string. */ |
27935 | attr->dw_attr_val.v.val_str = node; |
27936 | } |
27937 | |
27938 | else |
27939 | add_name_attribute (die, name_string: dname); |
27940 | } |
27941 | |
27942 | /* True if before or during processing of the first function being emitted. */ |
27943 | static bool in_first_function_p = true; |
27944 | /* True if loc_note during dwarf2out_var_location call might still be |
27945 | before first real instruction at address equal to .Ltext0. */ |
27946 | static bool maybe_at_text_label_p = true; |
27947 | /* One above highest N where .LVLN label might be equal to .Ltext0 label. */ |
27948 | static unsigned int first_loclabel_num_not_at_text_label; |
27949 | |
27950 | /* Look ahead for a real insn. */ |
27951 | |
27952 | static rtx_insn * |
27953 | dwarf2out_next_real_insn (rtx_insn *loc_note) |
27954 | { |
27955 | rtx_insn *next_real = NEXT_INSN (insn: loc_note); |
27956 | |
27957 | while (next_real) |
27958 | if (INSN_P (next_real)) |
27959 | break; |
27960 | else |
27961 | next_real = NEXT_INSN (insn: next_real); |
27962 | |
27963 | return next_real; |
27964 | } |
27965 | |
27966 | /* Called by the final INSN scan whenever we see a var location. We |
27967 | use it to drop labels in the right places, and throw the location in |
27968 | our lookup table. */ |
27969 | |
27970 | static void |
27971 | dwarf2out_var_location (rtx_insn *loc_note) |
27972 | { |
27973 | char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2]; |
27974 | struct var_loc_node *newloc; |
27975 | rtx_insn *next_real; |
27976 | rtx_insn *call_insn = NULL; |
27977 | static const char *last_label; |
27978 | static const char *last_postcall_label; |
27979 | static bool last_in_cold_section_p; |
27980 | static rtx_insn *expected_next_loc_note; |
27981 | tree decl; |
27982 | bool var_loc_p; |
27983 | var_loc_view view = 0; |
27984 | |
27985 | if (!NOTE_P (loc_note)) |
27986 | { |
27987 | if (CALL_P (loc_note)) |
27988 | { |
27989 | maybe_reset_location_view (insn: loc_note, table: cur_line_info_table); |
27990 | call_site_count++; |
27991 | if (SIBLING_CALL_P (loc_note)) |
27992 | tail_call_site_count++; |
27993 | if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX)) |
27994 | { |
27995 | call_insn = loc_note; |
27996 | loc_note = NULL; |
27997 | var_loc_p = false; |
27998 | |
27999 | next_real = dwarf2out_next_real_insn (loc_note: call_insn); |
28000 | cached_next_real_insn = NULL; |
28001 | goto create_label; |
28002 | } |
28003 | if (optimize == 0 && !flag_var_tracking) |
28004 | { |
28005 | /* When the var-tracking pass is not running, there is no note |
28006 | for indirect calls whose target is compile-time known. In this |
28007 | case, process such calls specifically so that we generate call |
28008 | sites for them anyway. */ |
28009 | rtx x = PATTERN (insn: loc_note); |
28010 | if (GET_CODE (x) == PARALLEL) |
28011 | x = XVECEXP (x, 0, 0); |
28012 | if (GET_CODE (x) == SET) |
28013 | x = SET_SRC (x); |
28014 | if (GET_CODE (x) == CALL) |
28015 | x = XEXP (x, 0); |
28016 | if (!MEM_P (x) |
28017 | || GET_CODE (XEXP (x, 0)) != SYMBOL_REF |
28018 | || !SYMBOL_REF_DECL (XEXP (x, 0)) |
28019 | || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) |
28020 | != FUNCTION_DECL)) |
28021 | { |
28022 | call_insn = loc_note; |
28023 | loc_note = NULL; |
28024 | var_loc_p = false; |
28025 | |
28026 | next_real = dwarf2out_next_real_insn (loc_note: call_insn); |
28027 | cached_next_real_insn = NULL; |
28028 | goto create_label; |
28029 | } |
28030 | } |
28031 | } |
28032 | else if (!debug_variable_location_views) |
28033 | gcc_unreachable (); |
28034 | else |
28035 | maybe_reset_location_view (insn: loc_note, table: cur_line_info_table); |
28036 | |
28037 | return; |
28038 | } |
28039 | |
28040 | var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION; |
28041 | if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note))) |
28042 | return; |
28043 | |
28044 | /* Optimize processing a large consecutive sequence of location |
28045 | notes so we don't spend too much time in next_real_insn. If the |
28046 | next insn is another location note, remember the next_real_insn |
28047 | calculation for next time. */ |
28048 | next_real = cached_next_real_insn; |
28049 | if (next_real) |
28050 | { |
28051 | if (expected_next_loc_note != loc_note) |
28052 | next_real = NULL; |
28053 | } |
28054 | |
28055 | if (! next_real) |
28056 | next_real = dwarf2out_next_real_insn (loc_note); |
28057 | |
28058 | if (next_real) |
28059 | { |
28060 | rtx_insn *next_note = NEXT_INSN (insn: loc_note); |
28061 | while (next_note != next_real) |
28062 | { |
28063 | if (! next_note->deleted () |
28064 | && NOTE_P (next_note) |
28065 | && NOTE_KIND (next_note) == NOTE_INSN_VAR_LOCATION) |
28066 | break; |
28067 | next_note = NEXT_INSN (insn: next_note); |
28068 | } |
28069 | |
28070 | if (next_note == next_real) |
28071 | cached_next_real_insn = NULL; |
28072 | else |
28073 | { |
28074 | expected_next_loc_note = next_note; |
28075 | cached_next_real_insn = next_real; |
28076 | } |
28077 | } |
28078 | else |
28079 | cached_next_real_insn = NULL; |
28080 | |
28081 | /* If there are no instructions which would be affected by this note, |
28082 | don't do anything. */ |
28083 | if (var_loc_p |
28084 | && next_real == NULL_RTX |
28085 | && !NOTE_DURING_CALL_P (loc_note)) |
28086 | return; |
28087 | |
28088 | create_label: |
28089 | |
28090 | if (next_real == NULL_RTX) |
28091 | next_real = get_last_insn (); |
28092 | |
28093 | /* If there were any real insns between note we processed last time |
28094 | and this note (or if it is the first note), clear |
28095 | last_{,postcall_}label so that they are not reused this time. */ |
28096 | if (last_var_location_insn == NULL_RTX |
28097 | || last_var_location_insn != next_real |
28098 | || last_in_cold_section_p != in_cold_section_p) |
28099 | { |
28100 | last_label = NULL; |
28101 | last_postcall_label = NULL; |
28102 | } |
28103 | |
28104 | if (var_loc_p) |
28105 | { |
28106 | const char *label |
28107 | = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label; |
28108 | view = cur_line_info_table->view; |
28109 | decl = NOTE_VAR_LOCATION_DECL (loc_note); |
28110 | newloc = add_var_loc_to_decl (decl, loc_note, label, view); |
28111 | if (newloc == NULL) |
28112 | return; |
28113 | } |
28114 | else |
28115 | { |
28116 | decl = NULL_TREE; |
28117 | newloc = NULL; |
28118 | } |
28119 | |
28120 | /* If there were no real insns between note we processed last time |
28121 | and this note, use the label we emitted last time. Otherwise |
28122 | create a new label and emit it. */ |
28123 | if (last_label == NULL) |
28124 | { |
28125 | ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL" , loclabel_num); |
28126 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL" , loclabel_num); |
28127 | loclabel_num++; |
28128 | last_label = ggc_strdup (loclabel); |
28129 | /* See if loclabel might be equal to .Ltext0. If yes, |
28130 | bump first_loclabel_num_not_at_text_label. */ |
28131 | if (!have_multiple_function_sections |
28132 | && in_first_function_p |
28133 | && maybe_at_text_label_p) |
28134 | { |
28135 | static rtx_insn *last_start; |
28136 | rtx_insn *insn; |
28137 | for (insn = loc_note; insn; insn = previous_insn (insn)) |
28138 | if (insn == last_start) |
28139 | break; |
28140 | else if (!NONDEBUG_INSN_P (insn)) |
28141 | continue; |
28142 | else |
28143 | { |
28144 | rtx body = PATTERN (insn); |
28145 | if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER) |
28146 | continue; |
28147 | /* Inline asm could occupy zero bytes. */ |
28148 | else if (GET_CODE (body) == ASM_INPUT |
28149 | || asm_noperands (body) >= 0) |
28150 | continue; |
28151 | #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */ |
28152 | else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0) |
28153 | continue; |
28154 | #endif |
28155 | else |
28156 | { |
28157 | /* Assume insn has non-zero length. */ |
28158 | maybe_at_text_label_p = false; |
28159 | break; |
28160 | } |
28161 | } |
28162 | if (maybe_at_text_label_p) |
28163 | { |
28164 | last_start = loc_note; |
28165 | first_loclabel_num_not_at_text_label = loclabel_num; |
28166 | } |
28167 | } |
28168 | } |
28169 | |
28170 | gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX) |
28171 | || (loc_note != NULL_RTX && call_insn == NULL_RTX)); |
28172 | |
28173 | if (!var_loc_p) |
28174 | { |
28175 | struct call_arg_loc_node *ca_loc |
28176 | = ggc_cleared_alloc<call_arg_loc_node> (); |
28177 | rtx_insn *prev = call_insn; |
28178 | |
28179 | ca_loc->call_arg_loc_note |
28180 | = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX); |
28181 | ca_loc->next = NULL; |
28182 | ca_loc->label = last_label; |
28183 | gcc_assert (prev |
28184 | && (CALL_P (prev) |
28185 | || (NONJUMP_INSN_P (prev) |
28186 | && GET_CODE (PATTERN (prev)) == SEQUENCE |
28187 | && CALL_P (XVECEXP (PATTERN (prev), 0, 0))))); |
28188 | if (!CALL_P (prev)) |
28189 | prev = as_a <rtx_sequence *> (p: PATTERN (insn: prev))->insn (index: 0); |
28190 | ca_loc->tail_call_p = SIBLING_CALL_P (prev); |
28191 | |
28192 | /* Look for a SYMBOL_REF in the "prev" instruction. */ |
28193 | rtx x = get_call_rtx_from (prev); |
28194 | if (x) |
28195 | { |
28196 | /* Try to get the call symbol, if any. */ |
28197 | if (MEM_P (XEXP (x, 0))) |
28198 | x = XEXP (x, 0); |
28199 | /* First, look for a memory access to a symbol_ref. */ |
28200 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF |
28201 | && SYMBOL_REF_DECL (XEXP (x, 0)) |
28202 | && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL) |
28203 | ca_loc->symbol_ref = XEXP (x, 0); |
28204 | /* Otherwise, look at a compile-time known user-level function |
28205 | declaration. */ |
28206 | else if (MEM_P (x) |
28207 | && MEM_EXPR (x) |
28208 | && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL) |
28209 | ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0); |
28210 | } |
28211 | |
28212 | ca_loc->block = insn_scope (prev); |
28213 | if (call_arg_locations) |
28214 | call_arg_loc_last->next = ca_loc; |
28215 | else |
28216 | call_arg_locations = ca_loc; |
28217 | call_arg_loc_last = ca_loc; |
28218 | } |
28219 | else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note)) |
28220 | { |
28221 | newloc->label = last_label; |
28222 | newloc->view = view; |
28223 | } |
28224 | else |
28225 | { |
28226 | if (!last_postcall_label) |
28227 | { |
28228 | sprintf (s: loclabel, format: "%s-1" , last_label); |
28229 | last_postcall_label = ggc_strdup (loclabel); |
28230 | } |
28231 | newloc->label = last_postcall_label; |
28232 | /* ??? This view is at last_label, not last_label-1, but we |
28233 | could only assume view at last_label-1 is zero if we could |
28234 | assume calls always have length greater than one. This is |
28235 | probably true in general, though there might be a rare |
28236 | exception to this rule, e.g. if a call insn is optimized out |
28237 | by target magic. Then, even the -1 in the label will be |
28238 | wrong, which might invalidate the range. Anyway, using view, |
28239 | though technically possibly incorrect, will work as far as |
28240 | ranges go: since L-1 is in the middle of the call insn, |
28241 | (L-1).0 and (L-1).V shouldn't make any difference, and having |
28242 | the loclist entry refer to the .loc entry might be useful, so |
28243 | leave it like this. */ |
28244 | newloc->view = view; |
28245 | } |
28246 | |
28247 | if (var_loc_p && flag_debug_asm) |
28248 | { |
28249 | const char *name, *sep, *patstr; |
28250 | if (decl && DECL_NAME (decl)) |
28251 | name = IDENTIFIER_POINTER (DECL_NAME (decl)); |
28252 | else |
28253 | name = "" ; |
28254 | if (NOTE_VAR_LOCATION_LOC (loc_note)) |
28255 | { |
28256 | sep = " => " ; |
28257 | patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note)); |
28258 | } |
28259 | else |
28260 | { |
28261 | sep = " " ; |
28262 | patstr = "RESET" ; |
28263 | } |
28264 | fprintf (stream: asm_out_file, format: "\t%s DEBUG %s%s%s\n" , ASM_COMMENT_START, |
28265 | name, sep, patstr); |
28266 | } |
28267 | |
28268 | last_var_location_insn = next_real; |
28269 | last_in_cold_section_p = in_cold_section_p; |
28270 | } |
28271 | |
28272 | /* Check whether BLOCK, a lexical block, is nested within OUTER, or is |
28273 | OUTER itself. If BOTHWAYS, check not only that BLOCK can reach |
28274 | OUTER through BLOCK_SUPERCONTEXT links, but also that there is a |
28275 | path from OUTER to BLOCK through BLOCK_SUBBLOCKs and |
28276 | BLOCK_FRAGMENT_ORIGIN links. */ |
28277 | static bool |
28278 | block_within_block_p (tree block, tree outer, bool bothways) |
28279 | { |
28280 | if (block == outer) |
28281 | return true; |
28282 | |
28283 | /* Quickly check that OUTER is up BLOCK's supercontext chain. */ |
28284 | for (tree context = BLOCK_SUPERCONTEXT (block); |
28285 | context != outer; |
28286 | context = BLOCK_SUPERCONTEXT (context)) |
28287 | if (!context || TREE_CODE (context) != BLOCK) |
28288 | return false; |
28289 | |
28290 | if (!bothways) |
28291 | return true; |
28292 | |
28293 | /* Now check that each block is actually referenced by its |
28294 | parent. */ |
28295 | for (tree context = BLOCK_SUPERCONTEXT (block); ; |
28296 | context = BLOCK_SUPERCONTEXT (context)) |
28297 | { |
28298 | if (BLOCK_FRAGMENT_ORIGIN (context)) |
28299 | { |
28300 | gcc_assert (!BLOCK_SUBBLOCKS (context)); |
28301 | context = BLOCK_FRAGMENT_ORIGIN (context); |
28302 | } |
28303 | for (tree sub = BLOCK_SUBBLOCKS (context); |
28304 | sub != block; |
28305 | sub = BLOCK_CHAIN (sub)) |
28306 | if (!sub) |
28307 | return false; |
28308 | if (context == outer) |
28309 | return true; |
28310 | else |
28311 | block = context; |
28312 | } |
28313 | } |
28314 | |
28315 | /* Called during final while assembling the marker of the entry point |
28316 | for an inlined function. */ |
28317 | |
28318 | static void |
28319 | dwarf2out_inline_entry (tree block) |
28320 | { |
28321 | gcc_assert (debug_inline_points); |
28322 | |
28323 | /* If we can't represent it, don't bother. */ |
28324 | if (!(dwarf_version >= 3 || !dwarf_strict)) |
28325 | return; |
28326 | |
28327 | gcc_assert (DECL_P (block_ultimate_origin (block))); |
28328 | |
28329 | /* Sanity check the block tree. This would catch a case in which |
28330 | BLOCK got removed from the tree reachable from the outermost |
28331 | lexical block, but got retained in markers. It would still link |
28332 | back to its parents, but some ancestor would be missing a link |
28333 | down the path to the sub BLOCK. If the block got removed, its |
28334 | BLOCK_NUMBER will not be a usable value. */ |
28335 | if (flag_checking) |
28336 | gcc_assert (block_within_block_p (block, |
28337 | DECL_INITIAL (current_function_decl), |
28338 | true)); |
28339 | |
28340 | gcc_assert (inlined_function_outer_scope_p (block)); |
28341 | gcc_assert (!lookup_block_die (block)); |
28342 | |
28343 | if (BLOCK_FRAGMENT_ORIGIN (block)) |
28344 | block = BLOCK_FRAGMENT_ORIGIN (block); |
28345 | /* Can the entry point ever not be at the beginning of an |
28346 | unfragmented lexical block? */ |
28347 | else if (!(BLOCK_FRAGMENT_CHAIN (block) |
28348 | || (cur_line_info_table |
28349 | && !ZERO_VIEW_P (cur_line_info_table->view)))) |
28350 | return; |
28351 | |
28352 | if (!inline_entry_data_table) |
28353 | inline_entry_data_table |
28354 | = hash_table<inline_entry_data_hasher>::create_ggc (n: 10); |
28355 | |
28356 | |
28357 | inline_entry_data **iedp |
28358 | = inline_entry_data_table->find_slot_with_hash (comparable: block, |
28359 | hash: htab_hash_pointer (block), |
28360 | insert: INSERT); |
28361 | if (*iedp) |
28362 | /* ??? Ideally, we'd record all entry points for the same inlined |
28363 | function (some may have been duplicated by e.g. unrolling), but |
28364 | we have no way to represent that ATM. */ |
28365 | return; |
28366 | |
28367 | inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> (); |
28368 | ied->block = block; |
28369 | ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL; |
28370 | ied->label_num = BLOCK_NUMBER (block); |
28371 | if (cur_line_info_table) |
28372 | ied->view = cur_line_info_table->view; |
28373 | |
28374 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL, |
28375 | BLOCK_NUMBER (block)); |
28376 | } |
28377 | |
28378 | /* Called from finalize_size_functions for size functions so that their body |
28379 | can be encoded in the debug info to describe the layout of variable-length |
28380 | structures. */ |
28381 | |
28382 | static void |
28383 | dwarf2out_size_function (tree decl) |
28384 | { |
28385 | set_early_dwarf s; |
28386 | function_to_dwarf_procedure (fndecl: decl); |
28387 | } |
28388 | |
28389 | /* Note in one location list that text section has changed. */ |
28390 | |
28391 | int |
28392 | var_location_switch_text_section_1 (var_loc_list **slot, void *) |
28393 | { |
28394 | var_loc_list *list = *slot; |
28395 | if (list->first) |
28396 | list->last_before_switch |
28397 | = list->last->next ? list->last->next : list->last; |
28398 | return 1; |
28399 | } |
28400 | |
28401 | /* Note in all location lists that text section has changed. */ |
28402 | |
28403 | static void |
28404 | var_location_switch_text_section (void) |
28405 | { |
28406 | if (decl_loc_table == NULL) |
28407 | return; |
28408 | |
28409 | decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL); |
28410 | } |
28411 | |
28412 | /* Create a new line number table. */ |
28413 | |
28414 | static dw_line_info_table * |
28415 | new_line_info_table (void) |
28416 | { |
28417 | dw_line_info_table *table; |
28418 | |
28419 | table = ggc_cleared_alloc<dw_line_info_table> (); |
28420 | table->file_num = 1; |
28421 | table->line_num = 1; |
28422 | table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START; |
28423 | FORCE_RESET_NEXT_VIEW (table->view); |
28424 | table->symviews_since_reset = 0; |
28425 | |
28426 | return table; |
28427 | } |
28428 | |
28429 | /* Lookup the "current" table into which we emit line info, so |
28430 | that we don't have to do it for every source line. */ |
28431 | |
28432 | static void |
28433 | set_cur_line_info_table (section *sec) |
28434 | { |
28435 | dw_line_info_table *table; |
28436 | |
28437 | if (sec == text_section) |
28438 | table = text_section_line_info; |
28439 | else if (sec == cold_text_section) |
28440 | { |
28441 | table = cold_text_section_line_info; |
28442 | if (!table) |
28443 | { |
28444 | cold_text_section_line_info = table = new_line_info_table (); |
28445 | table->end_label = cold_end_label; |
28446 | } |
28447 | } |
28448 | else |
28449 | { |
28450 | const char *end_label; |
28451 | |
28452 | if (crtl->has_bb_partition) |
28453 | { |
28454 | if (in_cold_section_p) |
28455 | end_label = crtl->subsections.cold_section_end_label; |
28456 | else |
28457 | end_label = crtl->subsections.hot_section_end_label; |
28458 | } |
28459 | else |
28460 | { |
28461 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
28462 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL, |
28463 | current_function_funcdef_no); |
28464 | end_label = ggc_strdup (label); |
28465 | } |
28466 | |
28467 | table = new_line_info_table (); |
28468 | table->end_label = end_label; |
28469 | |
28470 | vec_safe_push (v&: separate_line_info, obj: table); |
28471 | } |
28472 | |
28473 | if (output_asm_line_debug_info ()) |
28474 | table->is_stmt = (cur_line_info_table |
28475 | ? cur_line_info_table->is_stmt |
28476 | : DWARF_LINE_DEFAULT_IS_STMT_START); |
28477 | cur_line_info_table = table; |
28478 | } |
28479 | |
28480 | |
28481 | /* We need to reset the locations at the beginning of each |
28482 | function. We can't do this in the end_function hook, because the |
28483 | declarations that use the locations won't have been output when |
28484 | that hook is called. Also compute have_multiple_function_sections here. */ |
28485 | |
28486 | static void |
28487 | dwarf2out_begin_function (tree fun) |
28488 | { |
28489 | section *sec = function_section (fun); |
28490 | |
28491 | if (sec != text_section) |
28492 | have_multiple_function_sections = true; |
28493 | |
28494 | if (crtl->has_bb_partition && !cold_text_section) |
28495 | { |
28496 | gcc_assert (current_function_decl == fun); |
28497 | cold_text_section = unlikely_text_section (); |
28498 | switch_to_section (cold_text_section); |
28499 | ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label); |
28500 | switch_to_section (sec); |
28501 | } |
28502 | |
28503 | call_site_count = 0; |
28504 | tail_call_site_count = 0; |
28505 | |
28506 | set_cur_line_info_table (sec); |
28507 | FORCE_RESET_NEXT_VIEW (cur_line_info_table->view); |
28508 | } |
28509 | |
28510 | /* Helper function of dwarf2out_end_function, called only after emitting |
28511 | the very first function into assembly. Check if some .debug_loc range |
28512 | might end with a .LVL* label that could be equal to .Ltext0. |
28513 | In that case we must force using absolute addresses in .debug_loc ranges, |
28514 | because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for |
28515 | .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc |
28516 | list terminator. |
28517 | Set have_multiple_function_sections to true in that case and |
28518 | terminate htab traversal. */ |
28519 | |
28520 | int |
28521 | find_empty_loc_ranges_at_text_label (var_loc_list **slot, int) |
28522 | { |
28523 | var_loc_list *entry = *slot; |
28524 | struct var_loc_node *node; |
28525 | |
28526 | node = entry->first; |
28527 | if (node && node->next && node->next->label) |
28528 | { |
28529 | unsigned int i; |
28530 | const char *label = node->next->label; |
28531 | char loclabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
28532 | |
28533 | for (i = 0; i < first_loclabel_num_not_at_text_label; i++) |
28534 | { |
28535 | ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL" , i); |
28536 | if (strcmp (s1: label, s2: loclabel) == 0) |
28537 | { |
28538 | have_multiple_function_sections = true; |
28539 | return 0; |
28540 | } |
28541 | } |
28542 | } |
28543 | return 1; |
28544 | } |
28545 | |
28546 | /* Hook called after emitting a function into assembly. |
28547 | This does something only for the very first function emitted. */ |
28548 | |
28549 | static void |
28550 | dwarf2out_end_function (unsigned int) |
28551 | { |
28552 | if (in_first_function_p |
28553 | && !have_multiple_function_sections |
28554 | && first_loclabel_num_not_at_text_label |
28555 | && decl_loc_table) |
28556 | decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (argument: 0); |
28557 | in_first_function_p = false; |
28558 | maybe_at_text_label_p = false; |
28559 | } |
28560 | |
28561 | /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let |
28562 | front-ends register a translation unit even before dwarf2out_init is |
28563 | called. */ |
28564 | static tree main_translation_unit = NULL_TREE; |
28565 | |
28566 | /* Hook called by front-ends after they built their main translation unit. |
28567 | Associate comp_unit_die to UNIT. */ |
28568 | |
28569 | static void |
28570 | dwarf2out_register_main_translation_unit (tree unit) |
28571 | { |
28572 | gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL |
28573 | && main_translation_unit == NULL_TREE); |
28574 | main_translation_unit = unit; |
28575 | /* If dwarf2out_init has not been called yet, it will perform the association |
28576 | itself looking at main_translation_unit. */ |
28577 | if (decl_die_table != NULL) |
28578 | equate_decl_number_to_die (decl: unit, decl_die: comp_unit_die ()); |
28579 | } |
28580 | |
28581 | /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */ |
28582 | |
28583 | static void |
28584 | push_dw_line_info_entry (dw_line_info_table *table, |
28585 | enum dw_line_info_opcode opcode, unsigned int val) |
28586 | { |
28587 | dw_line_info_entry e; |
28588 | e.opcode = opcode; |
28589 | e.val = val; |
28590 | vec_safe_push (v&: table->entries, obj: e); |
28591 | } |
28592 | |
28593 | /* Output a label to mark the beginning of a source code line entry |
28594 | and record information relating to this source line, in |
28595 | 'line_info_table' for later output of the .debug_line section. */ |
28596 | /* ??? The discriminator parameter ought to be unsigned. */ |
28597 | |
28598 | static void |
28599 | dwarf2out_source_line (unsigned int line, unsigned int column, |
28600 | const char *filename, |
28601 | int discriminator, bool is_stmt) |
28602 | { |
28603 | unsigned int file_num; |
28604 | dw_line_info_table *table; |
28605 | static var_loc_view lvugid; |
28606 | |
28607 | /* 'line_info_table' information gathering is not needed when the debug |
28608 | info level is set to the lowest value. Also, the current DWARF-based |
28609 | debug formats do not use this info. */ |
28610 | if (debug_info_level < DINFO_LEVEL_TERSE || !dwarf_debuginfo_p ()) |
28611 | return; |
28612 | |
28613 | table = cur_line_info_table; |
28614 | |
28615 | if (line == 0) |
28616 | { |
28617 | if (debug_variable_location_views |
28618 | && output_asm_line_debug_info () |
28619 | && table && !RESETTING_VIEW_P (table->view)) |
28620 | { |
28621 | /* If we're using the assembler to compute view numbers, we |
28622 | can't issue a .loc directive for line zero, so we can't |
28623 | get a view number at this point. We might attempt to |
28624 | compute it from the previous view, or equate it to a |
28625 | subsequent view (though it might not be there!), but |
28626 | since we're omitting the line number entry, we might as |
28627 | well omit the view number as well. That means pretending |
28628 | it's a view number zero, which might very well turn out |
28629 | to be correct. ??? Extend the assembler so that the |
28630 | compiler could emit e.g. ".locview .LVU#", to output a |
28631 | view without changing line number information. We'd then |
28632 | have to count it in symviews_since_reset; when it's omitted, |
28633 | it doesn't count. */ |
28634 | if (!zero_view_p) |
28635 | zero_view_p = BITMAP_GGC_ALLOC (); |
28636 | bitmap_set_bit (zero_view_p, table->view); |
28637 | if (flag_debug_asm) |
28638 | { |
28639 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
28640 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , table->view); |
28641 | fprintf (stream: asm_out_file, format: "\t%s line 0, omitted view " , |
28642 | ASM_COMMENT_START); |
28643 | assemble_name (asm_out_file, label); |
28644 | putc (c: '\n', stream: asm_out_file); |
28645 | } |
28646 | table->view = ++lvugid; |
28647 | } |
28648 | return; |
28649 | } |
28650 | |
28651 | /* The discriminator column was added in dwarf4. Simplify the below |
28652 | by simply removing it if we're not supposed to output it. */ |
28653 | if (dwarf_version < 4 && dwarf_strict) |
28654 | discriminator = 0; |
28655 | |
28656 | if (!debug_column_info) |
28657 | column = 0; |
28658 | |
28659 | file_num = maybe_emit_file (fd: lookup_filename (file_name: filename)); |
28660 | |
28661 | /* ??? TODO: Elide duplicate line number entries. Traditionally, |
28662 | the debugger has used the second (possibly duplicate) line number |
28663 | at the beginning of the function to mark the end of the prologue. |
28664 | We could eliminate any other duplicates within the function. For |
28665 | Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in |
28666 | that second line number entry. */ |
28667 | /* Recall that this end-of-prologue indication is *not* the same thing |
28668 | as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note, |
28669 | to which the hook corresponds, follows the last insn that was |
28670 | emitted by gen_prologue. What we need is to precede the first insn |
28671 | that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first |
28672 | insn that corresponds to something the user wrote. These may be |
28673 | very different locations once scheduling is enabled. */ |
28674 | |
28675 | if (0 && file_num == table->file_num |
28676 | && line == table->line_num |
28677 | && column == table->column_num |
28678 | && discriminator == table->discrim_num |
28679 | && is_stmt == table->is_stmt) |
28680 | return; |
28681 | |
28682 | switch_to_section (current_function_section ()); |
28683 | |
28684 | /* If requested, emit something human-readable. */ |
28685 | if (flag_debug_asm) |
28686 | { |
28687 | if (debug_column_info) |
28688 | fprintf (stream: asm_out_file, format: "\t%s %s:%d:%d\n" , ASM_COMMENT_START, |
28689 | filename, line, column); |
28690 | else |
28691 | fprintf (stream: asm_out_file, format: "\t%s %s:%d\n" , ASM_COMMENT_START, |
28692 | filename, line); |
28693 | } |
28694 | |
28695 | if (output_asm_line_debug_info ()) |
28696 | { |
28697 | /* Emit the .loc directive understood by GNU as. */ |
28698 | /* "\t.loc %u %u 0 is_stmt %u discriminator %u", |
28699 | file_num, line, is_stmt, discriminator */ |
28700 | fputs (s: "\t.loc " , stream: asm_out_file); |
28701 | fprint_ul (asm_out_file, file_num); |
28702 | putc (c: ' ', stream: asm_out_file); |
28703 | fprint_ul (asm_out_file, line); |
28704 | putc (c: ' ', stream: asm_out_file); |
28705 | fprint_ul (asm_out_file, column); |
28706 | |
28707 | if (is_stmt != table->is_stmt) |
28708 | { |
28709 | #if HAVE_GAS_LOC_STMT |
28710 | fputs (s: " is_stmt " , stream: asm_out_file); |
28711 | putc (c: is_stmt ? '1' : '0', stream: asm_out_file); |
28712 | #endif |
28713 | } |
28714 | if (SUPPORTS_DISCRIMINATOR && discriminator != 0) |
28715 | { |
28716 | gcc_assert (discriminator > 0); |
28717 | fputs (s: " discriminator " , stream: asm_out_file); |
28718 | fprint_ul (asm_out_file, (unsigned long) discriminator); |
28719 | } |
28720 | if (debug_variable_location_views) |
28721 | { |
28722 | if (!RESETTING_VIEW_P (table->view)) |
28723 | { |
28724 | table->symviews_since_reset++; |
28725 | if (table->symviews_since_reset > symview_upper_bound) |
28726 | symview_upper_bound = table->symviews_since_reset; |
28727 | /* When we're using the assembler to compute view |
28728 | numbers, we output symbolic labels after "view" in |
28729 | .loc directives, and the assembler will set them for |
28730 | us, so that we can refer to the view numbers in |
28731 | location lists. The only exceptions are when we know |
28732 | a view will be zero: "-0" is a forced reset, used |
28733 | e.g. in the beginning of functions, whereas "0" tells |
28734 | the assembler to check that there was a PC change |
28735 | since the previous view, in a way that implicitly |
28736 | resets the next view. */ |
28737 | fputs (s: " view " , stream: asm_out_file); |
28738 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
28739 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , table->view); |
28740 | assemble_name (asm_out_file, label); |
28741 | table->view = ++lvugid; |
28742 | } |
28743 | else |
28744 | { |
28745 | table->symviews_since_reset = 0; |
28746 | if (FORCE_RESETTING_VIEW_P (table->view)) |
28747 | fputs (s: " view -0" , stream: asm_out_file); |
28748 | else |
28749 | fputs (s: " view 0" , stream: asm_out_file); |
28750 | /* Mark the present view as a zero view. Earlier debug |
28751 | binds may have already added its id to loclists to be |
28752 | emitted later, so we can't reuse the id for something |
28753 | else. However, it's good to know whether a view is |
28754 | known to be zero, because then we may be able to |
28755 | optimize out locviews that are all zeros, so take |
28756 | note of it in zero_view_p. */ |
28757 | if (!zero_view_p) |
28758 | zero_view_p = BITMAP_GGC_ALLOC (); |
28759 | bitmap_set_bit (zero_view_p, lvugid); |
28760 | table->view = ++lvugid; |
28761 | } |
28762 | } |
28763 | putc (c: '\n', stream: asm_out_file); |
28764 | } |
28765 | else |
28766 | { |
28767 | unsigned int label_num = ++line_info_label_num; |
28768 | |
28769 | targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num); |
28770 | |
28771 | if (debug_variable_location_views && !RESETTING_VIEW_P (table->view)) |
28772 | push_dw_line_info_entry (table, opcode: LI_adv_address, val: label_num); |
28773 | else |
28774 | push_dw_line_info_entry (table, opcode: LI_set_address, val: label_num); |
28775 | if (debug_variable_location_views) |
28776 | { |
28777 | bool resetting = FORCE_RESETTING_VIEW_P (table->view); |
28778 | if (resetting) |
28779 | table->view = 0; |
28780 | |
28781 | if (flag_debug_asm) |
28782 | fprintf (stream: asm_out_file, format: "\t%s view %s%d\n" , |
28783 | ASM_COMMENT_START, |
28784 | resetting ? "-" : "" , |
28785 | table->view); |
28786 | |
28787 | table->view++; |
28788 | } |
28789 | if (file_num != table->file_num) |
28790 | push_dw_line_info_entry (table, opcode: LI_set_file, val: file_num); |
28791 | if (discriminator != table->discrim_num) |
28792 | push_dw_line_info_entry (table, opcode: LI_set_discriminator, val: discriminator); |
28793 | if (is_stmt != table->is_stmt) |
28794 | push_dw_line_info_entry (table, opcode: LI_negate_stmt, val: 0); |
28795 | push_dw_line_info_entry (table, opcode: LI_set_line, val: line); |
28796 | if (debug_column_info) |
28797 | push_dw_line_info_entry (table, opcode: LI_set_column, val: column); |
28798 | } |
28799 | |
28800 | table->file_num = file_num; |
28801 | table->line_num = line; |
28802 | table->column_num = column; |
28803 | table->discrim_num = discriminator; |
28804 | table->is_stmt = is_stmt; |
28805 | table->in_use = true; |
28806 | } |
28807 | |
28808 | /* Record a source file location for a DECL_IGNORED_P function. */ |
28809 | |
28810 | static void |
28811 | dwarf2out_set_ignored_loc (unsigned int line, unsigned int column, |
28812 | const char *filename) |
28813 | { |
28814 | dw_fde_ref fde = cfun->fde; |
28815 | |
28816 | fde->ignored_debug = false; |
28817 | set_cur_line_info_table (function_section (fde->decl)); |
28818 | |
28819 | dwarf2out_source_line (line, column, filename, discriminator: 0, is_stmt: true); |
28820 | } |
28821 | |
28822 | /* Record the beginning of a new source file. */ |
28823 | |
28824 | static void |
28825 | dwarf2out_start_source_file (unsigned int lineno, const char *filename) |
28826 | { |
28827 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
28828 | { |
28829 | macinfo_entry e; |
28830 | e.code = DW_MACINFO_start_file; |
28831 | e.lineno = lineno; |
28832 | e.info = ggc_strdup (filename); |
28833 | vec_safe_push (v&: macinfo_table, obj: e); |
28834 | } |
28835 | } |
28836 | |
28837 | /* Record the end of a source file. */ |
28838 | |
28839 | static void |
28840 | dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED) |
28841 | { |
28842 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
28843 | { |
28844 | macinfo_entry e; |
28845 | e.code = DW_MACINFO_end_file; |
28846 | e.lineno = lineno; |
28847 | e.info = NULL; |
28848 | vec_safe_push (v&: macinfo_table, obj: e); |
28849 | } |
28850 | } |
28851 | |
28852 | /* Called from debug_define in toplev.cc. The `buffer' parameter contains |
28853 | the tail part of the directive line, i.e. the part which is past the |
28854 | initial whitespace, #, whitespace, directive-name, whitespace part. */ |
28855 | |
28856 | static void |
28857 | dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED, |
28858 | const char *buffer ATTRIBUTE_UNUSED) |
28859 | { |
28860 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
28861 | { |
28862 | macinfo_entry e; |
28863 | /* Insert a dummy first entry to be able to optimize the whole |
28864 | predefined macro block using DW_MACRO_import. */ |
28865 | if (macinfo_table->is_empty () && lineno <= 1) |
28866 | { |
28867 | e.code = 0; |
28868 | e.lineno = 0; |
28869 | e.info = NULL; |
28870 | vec_safe_push (v&: macinfo_table, obj: e); |
28871 | } |
28872 | e.code = DW_MACINFO_define; |
28873 | e.lineno = lineno; |
28874 | e.info = ggc_strdup (buffer); |
28875 | vec_safe_push (v&: macinfo_table, obj: e); |
28876 | } |
28877 | } |
28878 | |
28879 | /* Called from debug_undef in toplev.cc. The `buffer' parameter contains |
28880 | the tail part of the directive line, i.e. the part which is past the |
28881 | initial whitespace, #, whitespace, directive-name, whitespace part. */ |
28882 | |
28883 | static void |
28884 | dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED, |
28885 | const char *buffer ATTRIBUTE_UNUSED) |
28886 | { |
28887 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
28888 | { |
28889 | macinfo_entry e; |
28890 | /* Insert a dummy first entry to be able to optimize the whole |
28891 | predefined macro block using DW_MACRO_import. */ |
28892 | if (macinfo_table->is_empty () && lineno <= 1) |
28893 | { |
28894 | e.code = 0; |
28895 | e.lineno = 0; |
28896 | e.info = NULL; |
28897 | vec_safe_push (v&: macinfo_table, obj: e); |
28898 | } |
28899 | e.code = DW_MACINFO_undef; |
28900 | e.lineno = lineno; |
28901 | e.info = ggc_strdup (buffer); |
28902 | vec_safe_push (v&: macinfo_table, obj: e); |
28903 | } |
28904 | } |
28905 | |
28906 | /* Helpers to manipulate hash table of CUs. */ |
28907 | |
28908 | struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry> |
28909 | { |
28910 | static inline hashval_t hash (const macinfo_entry *); |
28911 | static inline bool equal (const macinfo_entry *, const macinfo_entry *); |
28912 | }; |
28913 | |
28914 | inline hashval_t |
28915 | macinfo_entry_hasher::hash (const macinfo_entry *entry) |
28916 | { |
28917 | return htab_hash_string (entry->info); |
28918 | } |
28919 | |
28920 | inline bool |
28921 | macinfo_entry_hasher::equal (const macinfo_entry *entry1, |
28922 | const macinfo_entry *entry2) |
28923 | { |
28924 | return !strcmp (s1: entry1->info, s2: entry2->info); |
28925 | } |
28926 | |
28927 | typedef hash_table<macinfo_entry_hasher> macinfo_hash_type; |
28928 | |
28929 | /* Output a single .debug_macinfo entry. */ |
28930 | |
28931 | static void |
28932 | output_macinfo_op (macinfo_entry *ref) |
28933 | { |
28934 | int file_num; |
28935 | size_t len; |
28936 | struct indirect_string_node *node; |
28937 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
28938 | struct dwarf_file_data *fd; |
28939 | |
28940 | switch (ref->code) |
28941 | { |
28942 | case DW_MACINFO_start_file: |
28943 | fd = lookup_filename (file_name: ref->info); |
28944 | file_num = maybe_emit_file (fd); |
28945 | dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file" ); |
28946 | dw2_asm_output_data_uleb128 (ref->lineno, |
28947 | "Included from line number %lu" , |
28948 | (unsigned long) ref->lineno); |
28949 | dw2_asm_output_data_uleb128 (file_num, "file %s" , ref->info); |
28950 | break; |
28951 | case DW_MACINFO_end_file: |
28952 | dw2_asm_output_data (1, DW_MACINFO_end_file, "End file" ); |
28953 | break; |
28954 | case DW_MACINFO_define: |
28955 | case DW_MACINFO_undef: |
28956 | len = strlen (s: ref->info) + 1; |
28957 | if ((!dwarf_strict || dwarf_version >= 5) |
28958 | && len > (size_t) dwarf_offset_size |
28959 | && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET |
28960 | && (debug_str_section->common.flags & SECTION_MERGE) != 0) |
28961 | { |
28962 | if (dwarf_split_debug_info && dwarf_version >= 5) |
28963 | ref->code = ref->code == DW_MACINFO_define |
28964 | ? DW_MACRO_define_strx : DW_MACRO_undef_strx; |
28965 | else |
28966 | ref->code = ref->code == DW_MACINFO_define |
28967 | ? DW_MACRO_define_strp : DW_MACRO_undef_strp; |
28968 | output_macinfo_op (ref); |
28969 | return; |
28970 | } |
28971 | dw2_asm_output_data (1, ref->code, |
28972 | ref->code == DW_MACINFO_define |
28973 | ? "Define macro" : "Undefine macro" ); |
28974 | dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu" , |
28975 | (unsigned long) ref->lineno); |
28976 | dw2_asm_output_nstring (ref->info, -1, "The macro" ); |
28977 | break; |
28978 | case DW_MACRO_define_strp: |
28979 | dw2_asm_output_data (1, ref->code, "Define macro strp" ); |
28980 | goto do_DW_MACRO_define_strpx; |
28981 | case DW_MACRO_undef_strp: |
28982 | dw2_asm_output_data (1, ref->code, "Undefine macro strp" ); |
28983 | goto do_DW_MACRO_define_strpx; |
28984 | case DW_MACRO_define_strx: |
28985 | dw2_asm_output_data (1, ref->code, "Define macro strx" ); |
28986 | goto do_DW_MACRO_define_strpx; |
28987 | case DW_MACRO_undef_strx: |
28988 | dw2_asm_output_data (1, ref->code, "Undefine macro strx" ); |
28989 | /* FALLTHRU */ |
28990 | do_DW_MACRO_define_strpx: |
28991 | /* NB: dwarf2out_finish performs: |
28992 | 1. save_macinfo_strings |
28993 | 2. hash table traverse of index_string |
28994 | 3. output_macinfo -> output_macinfo_op |
28995 | 4. output_indirect_strings |
28996 | -> hash table traverse of output_index_string |
28997 | |
28998 | When output_macinfo_op is called, all index strings have been |
28999 | added to hash table by save_macinfo_strings and we can't pass |
29000 | INSERT to find_slot_with_hash which may expand hash table, even |
29001 | if no insertion is needed, and change hash table traverse order |
29002 | between index_string and output_index_string. */ |
29003 | node = find_AT_string (str: ref->info, insert: NO_INSERT); |
29004 | gcc_assert (node |
29005 | && (node->form == DW_FORM_strp |
29006 | || node->form == dwarf_FORM (DW_FORM_strx))); |
29007 | dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu" , |
29008 | (unsigned long) ref->lineno); |
29009 | if (node->form == DW_FORM_strp) |
29010 | dw2_asm_output_offset (dwarf_offset_size, node->label, |
29011 | debug_str_section, "The macro: \"%s\"" , |
29012 | ref->info); |
29013 | else |
29014 | dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"" , |
29015 | ref->info); |
29016 | break; |
29017 | case DW_MACRO_import: |
29018 | dw2_asm_output_data (1, ref->code, "Import" ); |
29019 | ASM_GENERATE_INTERNAL_LABEL (label, |
29020 | DEBUG_MACRO_SECTION_LABEL, |
29021 | ref->lineno + macinfo_label_base); |
29022 | dw2_asm_output_offset (dwarf_offset_size, label, NULL, NULL); |
29023 | break; |
29024 | default: |
29025 | fprintf (stream: asm_out_file, format: "%s unrecognized macinfo code %lu\n" , |
29026 | ASM_COMMENT_START, (unsigned long) ref->code); |
29027 | break; |
29028 | } |
29029 | } |
29030 | |
29031 | /* Attempt to make a sequence of define/undef macinfo ops shareable with |
29032 | other compilation unit .debug_macinfo sections. IDX is the first |
29033 | index of a define/undef, return the number of ops that should be |
29034 | emitted in a comdat .debug_macinfo section and emit |
29035 | a DW_MACRO_import entry referencing it. |
29036 | If the define/undef entry should be emitted normally, return 0. */ |
29037 | |
29038 | static unsigned |
29039 | optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files, |
29040 | macinfo_hash_type **macinfo_htab) |
29041 | { |
29042 | macinfo_entry *first, *second, *cur, *inc; |
29043 | char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1]; |
29044 | unsigned char checksum[16]; |
29045 | struct md5_ctx ctx; |
29046 | char *grp_name, *tail; |
29047 | const char *base; |
29048 | unsigned int i, count, encoded_filename_len, linebuf_len; |
29049 | macinfo_entry **slot; |
29050 | |
29051 | first = &(*macinfo_table)[idx]; |
29052 | second = &(*macinfo_table)[idx + 1]; |
29053 | |
29054 | /* Optimize only if there are at least two consecutive define/undef ops, |
29055 | and either all of them are before first DW_MACINFO_start_file |
29056 | with lineno {0,1} (i.e. predefined macro block), or all of them are |
29057 | in some included header file. */ |
29058 | if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef) |
29059 | return 0; |
29060 | if (vec_safe_is_empty (v: files)) |
29061 | { |
29062 | if (first->lineno > 1 || second->lineno > 1) |
29063 | return 0; |
29064 | } |
29065 | else if (first->lineno == 0) |
29066 | return 0; |
29067 | |
29068 | /* Find the last define/undef entry that can be grouped together |
29069 | with first and at the same time compute md5 checksum of their |
29070 | codes, linenumbers and strings. */ |
29071 | md5_init_ctx (ctx: &ctx); |
29072 | for (i = idx; macinfo_table->iterate (ix: i, ptr: &cur); i++) |
29073 | if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef) |
29074 | break; |
29075 | else if (vec_safe_is_empty (v: files) && cur->lineno > 1) |
29076 | break; |
29077 | else |
29078 | { |
29079 | unsigned char code = cur->code; |
29080 | md5_process_bytes (buffer: &code, len: 1, ctx: &ctx); |
29081 | checksum_uleb128 (value: cur->lineno, ctx: &ctx); |
29082 | md5_process_bytes (buffer: cur->info, len: strlen (s: cur->info) + 1, ctx: &ctx); |
29083 | } |
29084 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
29085 | count = i - idx; |
29086 | |
29087 | /* From the containing include filename (if any) pick up just |
29088 | usable characters from its basename. */ |
29089 | if (vec_safe_is_empty (v: files)) |
29090 | base = "" ; |
29091 | else |
29092 | base = lbasename (files->last ().info); |
29093 | for (encoded_filename_len = 0, i = 0; base[i]; i++) |
29094 | if (ISIDNUM (base[i]) || base[i] == '.') |
29095 | encoded_filename_len++; |
29096 | /* Count . at the end. */ |
29097 | if (encoded_filename_len) |
29098 | encoded_filename_len++; |
29099 | |
29100 | sprintf (s: linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno); |
29101 | linebuf_len = strlen (s: linebuf); |
29102 | |
29103 | /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */ |
29104 | grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1 |
29105 | + 16 * 2 + 1); |
29106 | memcpy (dest: grp_name, dwarf_offset_size == 4 ? "wm4." : "wm8." , n: 4); |
29107 | tail = grp_name + 4; |
29108 | if (encoded_filename_len) |
29109 | { |
29110 | for (i = 0; base[i]; i++) |
29111 | if (ISIDNUM (base[i]) || base[i] == '.') |
29112 | *tail++ = base[i]; |
29113 | *tail++ = '.'; |
29114 | } |
29115 | memcpy (dest: tail, src: linebuf, n: linebuf_len); |
29116 | tail += linebuf_len; |
29117 | *tail++ = '.'; |
29118 | for (i = 0; i < 16; i++) |
29119 | sprintf (s: tail + i * 2, format: "%02x" , checksum[i] & 0xff); |
29120 | |
29121 | /* Construct a macinfo_entry for DW_MACRO_import |
29122 | in the empty vector entry before the first define/undef. */ |
29123 | inc = &(*macinfo_table)[idx - 1]; |
29124 | inc->code = DW_MACRO_import; |
29125 | inc->lineno = 0; |
29126 | inc->info = ggc_strdup (grp_name); |
29127 | if (!*macinfo_htab) |
29128 | *macinfo_htab = new macinfo_hash_type (10); |
29129 | /* Avoid emitting duplicates. */ |
29130 | slot = (*macinfo_htab)->find_slot (value: inc, insert: INSERT); |
29131 | if (*slot != NULL) |
29132 | { |
29133 | inc->code = 0; |
29134 | inc->info = NULL; |
29135 | /* If such an entry has been used before, just emit |
29136 | a DW_MACRO_import op. */ |
29137 | inc = *slot; |
29138 | output_macinfo_op (ref: inc); |
29139 | /* And clear all macinfo_entry in the range to avoid emitting them |
29140 | in the second pass. */ |
29141 | for (i = idx; macinfo_table->iterate (ix: i, ptr: &cur) && i < idx + count; i++) |
29142 | { |
29143 | cur->code = 0; |
29144 | cur->info = NULL; |
29145 | } |
29146 | } |
29147 | else |
29148 | { |
29149 | *slot = inc; |
29150 | inc->lineno = (*macinfo_htab)->elements (); |
29151 | output_macinfo_op (ref: inc); |
29152 | } |
29153 | return count; |
29154 | } |
29155 | |
29156 | /* Save any strings needed by the macinfo table in the debug str |
29157 | table. All strings must be collected into the table by the time |
29158 | index_string is called. */ |
29159 | |
29160 | static void |
29161 | save_macinfo_strings (void) |
29162 | { |
29163 | unsigned len; |
29164 | unsigned i; |
29165 | macinfo_entry *ref; |
29166 | |
29167 | for (i = 0; macinfo_table && macinfo_table->iterate (ix: i, ptr: &ref); i++) |
29168 | { |
29169 | switch (ref->code) |
29170 | { |
29171 | /* Match the logic in output_macinfo_op to decide on |
29172 | indirect strings. */ |
29173 | case DW_MACINFO_define: |
29174 | case DW_MACINFO_undef: |
29175 | len = strlen (s: ref->info) + 1; |
29176 | if ((!dwarf_strict || dwarf_version >= 5) |
29177 | && len > (unsigned) dwarf_offset_size |
29178 | && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET |
29179 | && (debug_str_section->common.flags & SECTION_MERGE) != 0) |
29180 | set_indirect_string (find_AT_string (str: ref->info)); |
29181 | break; |
29182 | case DW_MACINFO_start_file: |
29183 | /* -gsplit-dwarf -g3 will also output filename as indirect |
29184 | string. */ |
29185 | if (!dwarf_split_debug_info) |
29186 | break; |
29187 | /* Fall through. */ |
29188 | case DW_MACRO_define_strp: |
29189 | case DW_MACRO_undef_strp: |
29190 | case DW_MACRO_define_strx: |
29191 | case DW_MACRO_undef_strx: |
29192 | set_indirect_string (find_AT_string (str: ref->info)); |
29193 | break; |
29194 | default: |
29195 | break; |
29196 | } |
29197 | } |
29198 | } |
29199 | |
29200 | /* Output macinfo section(s). */ |
29201 | |
29202 | static void |
29203 | output_macinfo (const char *debug_line_label, bool early_lto_debug) |
29204 | { |
29205 | unsigned i; |
29206 | unsigned long length = vec_safe_length (v: macinfo_table); |
29207 | macinfo_entry *ref; |
29208 | vec<macinfo_entry, va_gc> *files = NULL; |
29209 | macinfo_hash_type *macinfo_htab = NULL; |
29210 | char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES]; |
29211 | |
29212 | if (! length) |
29213 | return; |
29214 | |
29215 | /* output_macinfo* uses these interchangeably. */ |
29216 | gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define |
29217 | && (int) DW_MACINFO_undef == (int) DW_MACRO_undef |
29218 | && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file |
29219 | && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file); |
29220 | |
29221 | /* AIX Assembler inserts the length, so adjust the reference to match the |
29222 | offset expected by debuggers. */ |
29223 | strcpy (dest: dl_section_ref, src: debug_line_label); |
29224 | if (XCOFF_DEBUGGING_INFO) |
29225 | strcat (dest: dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR); |
29226 | |
29227 | /* For .debug_macro emit the section header. */ |
29228 | if (!dwarf_strict || dwarf_version >= 5) |
29229 | { |
29230 | dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4, |
29231 | "DWARF macro version number" ); |
29232 | if (dwarf_offset_size == 8) |
29233 | dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present" ); |
29234 | else |
29235 | dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present" ); |
29236 | dw2_asm_output_offset (dwarf_offset_size, debug_line_label, |
29237 | debug_line_section, NULL); |
29238 | } |
29239 | |
29240 | /* In the first loop, it emits the primary .debug_macinfo section |
29241 | and after each emitted op the macinfo_entry is cleared. |
29242 | If a longer range of define/undef ops can be optimized using |
29243 | DW_MACRO_import, the DW_MACRO_import op is emitted and kept in |
29244 | the vector before the first define/undef in the range and the |
29245 | whole range of define/undef ops is not emitted and kept. */ |
29246 | for (i = 0; macinfo_table->iterate (ix: i, ptr: &ref); i++) |
29247 | { |
29248 | switch (ref->code) |
29249 | { |
29250 | case DW_MACINFO_start_file: |
29251 | vec_safe_push (v&: files, obj: *ref); |
29252 | break; |
29253 | case DW_MACINFO_end_file: |
29254 | if (!vec_safe_is_empty (v: files)) |
29255 | files->pop (); |
29256 | break; |
29257 | case DW_MACINFO_define: |
29258 | case DW_MACINFO_undef: |
29259 | if ((!dwarf_strict || dwarf_version >= 5) |
29260 | && !dwarf_split_debug_info |
29261 | && HAVE_COMDAT_GROUP |
29262 | && vec_safe_length (v: files) != 1 |
29263 | && i > 0 |
29264 | && i + 1 < length |
29265 | && (*macinfo_table)[i - 1].code == 0) |
29266 | { |
29267 | unsigned count = optimize_macinfo_range (idx: i, files, macinfo_htab: &macinfo_htab); |
29268 | if (count) |
29269 | { |
29270 | i += count - 1; |
29271 | continue; |
29272 | } |
29273 | } |
29274 | break; |
29275 | case 0: |
29276 | /* A dummy entry may be inserted at the beginning to be able |
29277 | to optimize the whole block of predefined macros. */ |
29278 | if (i == 0) |
29279 | continue; |
29280 | default: |
29281 | break; |
29282 | } |
29283 | output_macinfo_op (ref); |
29284 | ref->info = NULL; |
29285 | ref->code = 0; |
29286 | } |
29287 | |
29288 | if (!macinfo_htab) |
29289 | return; |
29290 | |
29291 | /* Save the number of transparent includes so we can adjust the |
29292 | label number for the fat LTO object DWARF. */ |
29293 | unsigned macinfo_label_base_adj = macinfo_htab->elements (); |
29294 | |
29295 | delete macinfo_htab; |
29296 | macinfo_htab = NULL; |
29297 | |
29298 | /* If any DW_MACRO_import were used, on those DW_MACRO_import entries |
29299 | terminate the current chain and switch to a new comdat .debug_macinfo |
29300 | section and emit the define/undef entries within it. */ |
29301 | for (i = 0; macinfo_table->iterate (ix: i, ptr: &ref); i++) |
29302 | switch (ref->code) |
29303 | { |
29304 | case 0: |
29305 | continue; |
29306 | case DW_MACRO_import: |
29307 | { |
29308 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
29309 | tree comdat_key = get_identifier (ref->info); |
29310 | /* Terminate the previous .debug_macinfo section. */ |
29311 | dw2_asm_output_data (1, 0, "End compilation unit" ); |
29312 | targetm.asm_out.named_section (debug_macinfo_section_name, |
29313 | SECTION_DEBUG |
29314 | | SECTION_LINKONCE |
29315 | | (early_lto_debug |
29316 | ? SECTION_EXCLUDE : 0), |
29317 | comdat_key); |
29318 | ASM_GENERATE_INTERNAL_LABEL (label, |
29319 | DEBUG_MACRO_SECTION_LABEL, |
29320 | ref->lineno + macinfo_label_base); |
29321 | ASM_OUTPUT_LABEL (asm_out_file, label); |
29322 | ref->code = 0; |
29323 | ref->info = NULL; |
29324 | dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4, |
29325 | "DWARF macro version number" ); |
29326 | if (dwarf_offset_size == 8) |
29327 | dw2_asm_output_data (1, 1, "Flags: 64-bit" ); |
29328 | else |
29329 | dw2_asm_output_data (1, 0, "Flags: 32-bit" ); |
29330 | } |
29331 | break; |
29332 | case DW_MACINFO_define: |
29333 | case DW_MACINFO_undef: |
29334 | output_macinfo_op (ref); |
29335 | ref->code = 0; |
29336 | ref->info = NULL; |
29337 | break; |
29338 | default: |
29339 | gcc_unreachable (); |
29340 | } |
29341 | |
29342 | macinfo_label_base += macinfo_label_base_adj; |
29343 | } |
29344 | |
29345 | /* As init_sections_and_labels may get called multiple times, have a |
29346 | generation count for labels. */ |
29347 | static unsigned init_sections_and_labels_generation; |
29348 | |
29349 | /* Initialize the various sections and labels for dwarf output and prefix |
29350 | them with PREFIX if non-NULL. Returns the generation (zero based |
29351 | number of times function was called). */ |
29352 | |
29353 | static unsigned |
29354 | init_sections_and_labels (bool early_lto_debug) |
29355 | { |
29356 | if (early_lto_debug) |
29357 | { |
29358 | if (!dwarf_split_debug_info) |
29359 | { |
29360 | debug_info_section = get_section (DEBUG_LTO_INFO_SECTION, |
29361 | SECTION_DEBUG | SECTION_EXCLUDE, |
29362 | NULL); |
29363 | debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION, |
29364 | SECTION_DEBUG | SECTION_EXCLUDE, |
29365 | NULL); |
29366 | debug_macinfo_section_name |
29367 | = ((dwarf_strict && dwarf_version < 5) |
29368 | ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION); |
29369 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
29370 | SECTION_DEBUG |
29371 | | SECTION_EXCLUDE, NULL); |
29372 | } |
29373 | else |
29374 | { |
29375 | /* ??? Which of the following do we need early? */ |
29376 | debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION, |
29377 | SECTION_DEBUG | SECTION_EXCLUDE, |
29378 | NULL); |
29379 | debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION, |
29380 | SECTION_DEBUG | SECTION_EXCLUDE, |
29381 | NULL); |
29382 | debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION, |
29383 | SECTION_DEBUG |
29384 | | SECTION_EXCLUDE, NULL); |
29385 | debug_skeleton_abbrev_section |
29386 | = get_section (DEBUG_LTO_ABBREV_SECTION, |
29387 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29388 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label, |
29389 | DEBUG_SKELETON_ABBREV_SECTION_LABEL, |
29390 | init_sections_and_labels_generation); |
29391 | |
29392 | /* Somewhat confusing detail: The skeleton_[abbrev|info] sections |
29393 | stay in the main .o, but the skeleton_line goes into the split |
29394 | off dwo. */ |
29395 | debug_skeleton_line_section |
29396 | = get_section (DEBUG_LTO_LINE_SECTION, |
29397 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29398 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label, |
29399 | DEBUG_SKELETON_LINE_SECTION_LABEL, |
29400 | init_sections_and_labels_generation); |
29401 | debug_str_offsets_section |
29402 | = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION, |
29403 | SECTION_DEBUG | SECTION_EXCLUDE, |
29404 | NULL); |
29405 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label, |
29406 | DEBUG_SKELETON_INFO_SECTION_LABEL, |
29407 | init_sections_and_labels_generation); |
29408 | debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION, |
29409 | DEBUG_STR_DWO_SECTION_FLAGS, |
29410 | NULL); |
29411 | debug_macinfo_section_name |
29412 | = ((dwarf_strict && dwarf_version < 5) |
29413 | ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION); |
29414 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
29415 | SECTION_DEBUG | SECTION_EXCLUDE, |
29416 | NULL); |
29417 | } |
29418 | /* For macro info and the file table we have to refer to a |
29419 | debug_line section. */ |
29420 | debug_line_section = get_section (DEBUG_LTO_LINE_SECTION, |
29421 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29422 | ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label, |
29423 | DEBUG_LINE_SECTION_LABEL, |
29424 | init_sections_and_labels_generation); |
29425 | |
29426 | debug_str_section = get_section (DEBUG_LTO_STR_SECTION, |
29427 | DEBUG_STR_SECTION_FLAGS |
29428 | | SECTION_EXCLUDE, NULL); |
29429 | if (!dwarf_split_debug_info) |
29430 | debug_line_str_section |
29431 | = get_section (DEBUG_LTO_LINE_STR_SECTION, |
29432 | DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL); |
29433 | } |
29434 | else |
29435 | { |
29436 | if (!dwarf_split_debug_info) |
29437 | { |
29438 | debug_info_section = get_section (DEBUG_INFO_SECTION, |
29439 | SECTION_DEBUG, NULL); |
29440 | debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION, |
29441 | SECTION_DEBUG, NULL); |
29442 | debug_loc_section = get_section (dwarf_version >= 5 |
29443 | ? DEBUG_LOCLISTS_SECTION |
29444 | : DEBUG_LOC_SECTION, |
29445 | SECTION_DEBUG, NULL); |
29446 | debug_macinfo_section_name |
29447 | = ((dwarf_strict && dwarf_version < 5) |
29448 | ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION); |
29449 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
29450 | SECTION_DEBUG, NULL); |
29451 | } |
29452 | else |
29453 | { |
29454 | debug_info_section = get_section (DEBUG_DWO_INFO_SECTION, |
29455 | SECTION_DEBUG | SECTION_EXCLUDE, |
29456 | NULL); |
29457 | debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION, |
29458 | SECTION_DEBUG | SECTION_EXCLUDE, |
29459 | NULL); |
29460 | debug_addr_section = get_section (DEBUG_ADDR_SECTION, |
29461 | SECTION_DEBUG, NULL); |
29462 | debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION, |
29463 | SECTION_DEBUG, NULL); |
29464 | debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION, |
29465 | SECTION_DEBUG, NULL); |
29466 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label, |
29467 | DEBUG_SKELETON_ABBREV_SECTION_LABEL, |
29468 | init_sections_and_labels_generation); |
29469 | |
29470 | /* Somewhat confusing detail: The skeleton_[abbrev|info] sections |
29471 | stay in the main .o, but the skeleton_line goes into the |
29472 | split off dwo. */ |
29473 | debug_skeleton_line_section |
29474 | = get_section (DEBUG_DWO_LINE_SECTION, |
29475 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29476 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label, |
29477 | DEBUG_SKELETON_LINE_SECTION_LABEL, |
29478 | init_sections_and_labels_generation); |
29479 | debug_str_offsets_section |
29480 | = get_section (DEBUG_DWO_STR_OFFSETS_SECTION, |
29481 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29482 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label, |
29483 | DEBUG_SKELETON_INFO_SECTION_LABEL, |
29484 | init_sections_and_labels_generation); |
29485 | debug_loc_section = get_section (dwarf_version >= 5 |
29486 | ? DEBUG_DWO_LOCLISTS_SECTION |
29487 | : DEBUG_DWO_LOC_SECTION, |
29488 | SECTION_DEBUG | SECTION_EXCLUDE, |
29489 | NULL); |
29490 | debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION, |
29491 | DEBUG_STR_DWO_SECTION_FLAGS, |
29492 | NULL); |
29493 | debug_macinfo_section_name |
29494 | = ((dwarf_strict && dwarf_version < 5) |
29495 | ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION); |
29496 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
29497 | SECTION_DEBUG | SECTION_EXCLUDE, |
29498 | NULL); |
29499 | if (dwarf_version >= 5) |
29500 | debug_ranges_dwo_section |
29501 | = get_section (DEBUG_DWO_RNGLISTS_SECTION, |
29502 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29503 | } |
29504 | debug_aranges_section = get_section (DEBUG_ARANGES_SECTION, |
29505 | SECTION_DEBUG, NULL); |
29506 | debug_line_section = get_section (DEBUG_LINE_SECTION, |
29507 | SECTION_DEBUG, NULL); |
29508 | debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION, |
29509 | SECTION_DEBUG, NULL); |
29510 | debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION, |
29511 | SECTION_DEBUG, NULL); |
29512 | debug_str_section = get_section (DEBUG_STR_SECTION, |
29513 | DEBUG_STR_SECTION_FLAGS, NULL); |
29514 | if ((!dwarf_split_debug_info && !output_asm_line_debug_info ()) |
29515 | || asm_outputs_debug_line_str ()) |
29516 | debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION, |
29517 | DEBUG_STR_SECTION_FLAGS, NULL); |
29518 | |
29519 | debug_ranges_section = get_section (dwarf_version >= 5 |
29520 | ? DEBUG_RNGLISTS_SECTION |
29521 | : DEBUG_RANGES_SECTION, |
29522 | SECTION_DEBUG, NULL); |
29523 | debug_frame_section = get_section (DEBUG_FRAME_SECTION, |
29524 | SECTION_DEBUG, NULL); |
29525 | } |
29526 | |
29527 | ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label, |
29528 | DEBUG_ABBREV_SECTION_LABEL, |
29529 | init_sections_and_labels_generation); |
29530 | ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label, |
29531 | DEBUG_INFO_SECTION_LABEL, |
29532 | init_sections_and_labels_generation); |
29533 | info_section_emitted = false; |
29534 | ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label, |
29535 | DEBUG_LINE_SECTION_LABEL, |
29536 | init_sections_and_labels_generation); |
29537 | /* There are up to 6 unique ranges labels per generation. |
29538 | See also output_rnglists. */ |
29539 | ASM_GENERATE_INTERNAL_LABEL (ranges_section_label, |
29540 | DEBUG_RANGES_SECTION_LABEL, |
29541 | init_sections_and_labels_generation * 6); |
29542 | if (dwarf_version >= 5 && dwarf_split_debug_info) |
29543 | ASM_GENERATE_INTERNAL_LABEL (ranges_base_label, |
29544 | DEBUG_RANGES_SECTION_LABEL, |
29545 | 1 + init_sections_and_labels_generation * 6); |
29546 | ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label, |
29547 | DEBUG_ADDR_SECTION_LABEL, |
29548 | init_sections_and_labels_generation); |
29549 | ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label, |
29550 | (dwarf_strict && dwarf_version < 5) |
29551 | ? DEBUG_MACINFO_SECTION_LABEL |
29552 | : DEBUG_MACRO_SECTION_LABEL, |
29553 | init_sections_and_labels_generation); |
29554 | ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL, |
29555 | init_sections_and_labels_generation); |
29556 | |
29557 | ++init_sections_and_labels_generation; |
29558 | return init_sections_and_labels_generation - 1; |
29559 | } |
29560 | |
29561 | /* Set up for Dwarf output at the start of compilation. */ |
29562 | |
29563 | static void |
29564 | dwarf2out_init (const char *filename ATTRIBUTE_UNUSED) |
29565 | { |
29566 | /* Allocate the file_table. */ |
29567 | file_table = hash_table<dwarf_file_hasher>::create_ggc (n: 50); |
29568 | |
29569 | #ifndef DWARF2_LINENO_DEBUGGING_INFO |
29570 | /* Allocate the decl_die_table. */ |
29571 | decl_die_table = hash_table<decl_die_hasher>::create_ggc (n: 10); |
29572 | |
29573 | /* Allocate the decl_loc_table. */ |
29574 | decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (n: 10); |
29575 | |
29576 | /* Allocate the cached_dw_loc_list_table. */ |
29577 | cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (n: 10); |
29578 | |
29579 | /* Allocate the initial hunk of the abbrev_die_table. */ |
29580 | vec_alloc (v&: abbrev_die_table, nelems: 256); |
29581 | /* Zero-th entry is allocated, but unused. */ |
29582 | abbrev_die_table->quick_push (NULL); |
29583 | |
29584 | /* Allocate the dwarf_proc_stack_usage_map. */ |
29585 | dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>; |
29586 | |
29587 | /* Allocate the pubtypes and pubnames vectors. */ |
29588 | vec_alloc (v&: pubname_table, nelems: 32); |
29589 | vec_alloc (v&: pubtype_table, nelems: 32); |
29590 | |
29591 | vec_alloc (v&: incomplete_types, nelems: 64); |
29592 | |
29593 | vec_alloc (v&: used_rtx_array, nelems: 32); |
29594 | |
29595 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
29596 | vec_alloc (v&: macinfo_table, nelems: 64); |
29597 | #endif |
29598 | |
29599 | /* If front-ends already registered a main translation unit but we were not |
29600 | ready to perform the association, do this now. */ |
29601 | if (main_translation_unit != NULL_TREE) |
29602 | equate_decl_number_to_die (decl: main_translation_unit, decl_die: comp_unit_die ()); |
29603 | } |
29604 | |
29605 | /* Called before compile () starts outputtting functions, variables |
29606 | and toplevel asms into assembly. */ |
29607 | |
29608 | static void |
29609 | dwarf2out_assembly_start (void) |
29610 | { |
29611 | if (text_section_line_info) |
29612 | return; |
29613 | |
29614 | #ifndef DWARF2_LINENO_DEBUGGING_INFO |
29615 | ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0); |
29616 | ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0); |
29617 | ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label, |
29618 | COLD_TEXT_SECTION_LABEL, 0); |
29619 | ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0); |
29620 | |
29621 | switch_to_section (text_section); |
29622 | ASM_OUTPUT_LABEL (asm_out_file, text_section_label); |
29623 | #endif |
29624 | |
29625 | /* Make sure the line number table for .text always exists. */ |
29626 | text_section_line_info = new_line_info_table (); |
29627 | text_section_line_info->end_label = text_end_label; |
29628 | |
29629 | #ifdef DWARF2_LINENO_DEBUGGING_INFO |
29630 | cur_line_info_table = text_section_line_info; |
29631 | #endif |
29632 | |
29633 | if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE |
29634 | && dwarf2out_do_cfi_asm () |
29635 | && !dwarf2out_do_eh_frame ()) |
29636 | fprintf (stream: asm_out_file, format: "\t.cfi_sections\t.debug_frame\n" ); |
29637 | |
29638 | #if defined(HAVE_AS_GDWARF_5_DEBUG_FLAG) && defined(HAVE_AS_WORKING_DWARF_N_FLAG) |
29639 | if (output_asm_line_debug_info () && dwarf_version >= 5) |
29640 | { |
29641 | /* When gas outputs DWARF5 .debug_line[_str] then we have to |
29642 | tell it the comp_dir and main file name for the zero entry |
29643 | line table. */ |
29644 | const char *comp_dir, *filename0; |
29645 | |
29646 | comp_dir = comp_dir_string (); |
29647 | if (comp_dir == NULL) |
29648 | comp_dir = "" ; |
29649 | |
29650 | filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
29651 | if (filename0 == NULL) |
29652 | filename0 = "" ; |
29653 | |
29654 | fprintf (stream: asm_out_file, format: "\t.file 0 " ); |
29655 | output_quoted_string (asm_out_file, remap_debug_filename (comp_dir)); |
29656 | fputc (c: ' ', stream: asm_out_file); |
29657 | output_quoted_string (asm_out_file, remap_debug_filename (filename0)); |
29658 | fputc (c: '\n', stream: asm_out_file); |
29659 | } |
29660 | else |
29661 | #endif |
29662 | /* Work around for PR101575: output a dummy .file directive. */ |
29663 | if (!last_emitted_file && dwarf_debuginfo_p () |
29664 | && debug_info_level >= DINFO_LEVEL_TERSE) |
29665 | { |
29666 | const char *filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
29667 | |
29668 | if (filename0 == NULL) |
29669 | filename0 = "<dummy>" ; |
29670 | maybe_emit_file (fd: lookup_filename (file_name: filename0)); |
29671 | } |
29672 | } |
29673 | |
29674 | /* A helper function for dwarf2out_finish called through |
29675 | htab_traverse. Assign a string its index. All strings must be |
29676 | collected into the table by the time index_string is called, |
29677 | because the indexing code relies on htab_traverse to traverse nodes |
29678 | in the same order for each run. */ |
29679 | |
29680 | int |
29681 | index_string (indirect_string_node **h, unsigned int *index) |
29682 | { |
29683 | indirect_string_node *node = *h; |
29684 | |
29685 | find_string_form (node); |
29686 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
29687 | { |
29688 | gcc_assert (node->index == NO_INDEX_ASSIGNED); |
29689 | node->index = *index; |
29690 | *index += 1; |
29691 | } |
29692 | return 1; |
29693 | } |
29694 | |
29695 | /* A helper function for output_indirect_strings called through |
29696 | htab_traverse. Output the offset to a string and update the |
29697 | current offset. */ |
29698 | |
29699 | int |
29700 | output_index_string_offset (indirect_string_node **h, unsigned int *offset) |
29701 | { |
29702 | indirect_string_node *node = *h; |
29703 | |
29704 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
29705 | { |
29706 | /* Assert that this node has been assigned an index. */ |
29707 | gcc_assert (node->index != NO_INDEX_ASSIGNED |
29708 | && node->index != NOT_INDEXED); |
29709 | dw2_asm_output_data (dwarf_offset_size, *offset, |
29710 | "indexed string 0x%x: %s" , node->index, node->str); |
29711 | *offset += strlen (s: node->str) + 1; |
29712 | } |
29713 | return 1; |
29714 | } |
29715 | |
29716 | /* A helper function for dwarf2out_finish called through |
29717 | htab_traverse. Output the indexed string. */ |
29718 | |
29719 | int |
29720 | output_index_string (indirect_string_node **h, unsigned int *cur_idx) |
29721 | { |
29722 | struct indirect_string_node *node = *h; |
29723 | |
29724 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
29725 | { |
29726 | /* Assert that the strings are output in the same order as their |
29727 | indexes were assigned. */ |
29728 | gcc_assert (*cur_idx == node->index); |
29729 | assemble_string (node->str, strlen (s: node->str) + 1); |
29730 | *cur_idx += 1; |
29731 | } |
29732 | return 1; |
29733 | } |
29734 | |
29735 | /* A helper function for output_indirect_strings. Counts the number |
29736 | of index strings offsets. Must match the logic of the functions |
29737 | output_index_string[_offsets] above. */ |
29738 | int |
29739 | count_index_strings (indirect_string_node **h, unsigned int *last_idx) |
29740 | { |
29741 | struct indirect_string_node *node = *h; |
29742 | |
29743 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
29744 | *last_idx += 1; |
29745 | return 1; |
29746 | } |
29747 | |
29748 | /* A helper function for dwarf2out_finish called through |
29749 | htab_traverse. Emit one queued .debug_str string. */ |
29750 | |
29751 | int |
29752 | output_indirect_string (indirect_string_node **h, enum dwarf_form form) |
29753 | { |
29754 | struct indirect_string_node *node = *h; |
29755 | |
29756 | node->form = find_string_form (node); |
29757 | if (node->form == form && node->refcount > 0) |
29758 | { |
29759 | ASM_OUTPUT_LABEL (asm_out_file, node->label); |
29760 | assemble_string (node->str, strlen (s: node->str) + 1); |
29761 | } |
29762 | |
29763 | return 1; |
29764 | } |
29765 | |
29766 | /* Output the indexed string table. */ |
29767 | |
29768 | static void |
29769 | output_indirect_strings (void) |
29770 | { |
29771 | switch_to_section (debug_str_section); |
29772 | if (!dwarf_split_debug_info) |
29773 | debug_str_hash->traverse<enum dwarf_form, |
29774 | output_indirect_string> (argument: DW_FORM_strp); |
29775 | else |
29776 | { |
29777 | unsigned int offset = 0; |
29778 | unsigned int cur_idx = 0; |
29779 | |
29780 | if (skeleton_debug_str_hash) |
29781 | skeleton_debug_str_hash->traverse<enum dwarf_form, |
29782 | output_indirect_string> (argument: DW_FORM_strp); |
29783 | |
29784 | switch_to_section (debug_str_offsets_section); |
29785 | /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit |
29786 | header. Note that we don't need to generate a label to the |
29787 | actual index table following the header here, because this is |
29788 | for the split dwarf case only. In an .dwo file there is only |
29789 | one string offsets table (and one debug info section). But |
29790 | if we would start using string offset tables for the main (or |
29791 | skeleton) unit, then we have to add a DW_AT_str_offsets_base |
29792 | pointing to the actual index after the header. Split dwarf |
29793 | units will never have a string offsets base attribute. When |
29794 | a split unit is moved into a .dwp file the string offsets can |
29795 | be found through the .debug_cu_index section table. */ |
29796 | if (dwarf_version >= 5) |
29797 | { |
29798 | unsigned int last_idx = 0; |
29799 | unsigned long str_offsets_length; |
29800 | |
29801 | debug_str_hash->traverse_noresize |
29802 | <unsigned int *, count_index_strings> (argument: &last_idx); |
29803 | str_offsets_length = last_idx * dwarf_offset_size + 4; |
29804 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
29805 | dw2_asm_output_data (4, 0xffffffff, |
29806 | "Escape value for 64-bit DWARF extension" ); |
29807 | dw2_asm_output_data (dwarf_offset_size, str_offsets_length, |
29808 | "Length of string offsets unit" ); |
29809 | dw2_asm_output_data (2, 5, "DWARF string offsets version" ); |
29810 | dw2_asm_output_data (2, 0, "Header zero padding" ); |
29811 | } |
29812 | debug_str_hash->traverse_noresize |
29813 | <unsigned int *, output_index_string_offset> (argument: &offset); |
29814 | switch_to_section (debug_str_dwo_section); |
29815 | debug_str_hash->traverse_noresize<unsigned int *, output_index_string> |
29816 | (argument: &cur_idx); |
29817 | } |
29818 | } |
29819 | |
29820 | /* Callback for htab_traverse to assign an index to an entry in the |
29821 | table, and to write that entry to the .debug_addr section. */ |
29822 | |
29823 | int |
29824 | output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index) |
29825 | { |
29826 | addr_table_entry *entry = *slot; |
29827 | |
29828 | if (entry->refcount == 0) |
29829 | { |
29830 | gcc_assert (entry->index == NO_INDEX_ASSIGNED |
29831 | || entry->index == NOT_INDEXED); |
29832 | return 1; |
29833 | } |
29834 | |
29835 | gcc_assert (entry->index == *cur_index); |
29836 | (*cur_index)++; |
29837 | |
29838 | switch (entry->kind) |
29839 | { |
29840 | case ate_kind_rtx: |
29841 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl, |
29842 | "0x%x" , entry->index); |
29843 | break; |
29844 | case ate_kind_rtx_dtprel: |
29845 | gcc_assert (targetm.asm_out.output_dwarf_dtprel); |
29846 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, |
29847 | DWARF2_ADDR_SIZE, |
29848 | entry->addr.rtl); |
29849 | fputc (c: '\n', stream: asm_out_file); |
29850 | break; |
29851 | case ate_kind_label: |
29852 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label, |
29853 | "0x%x" , entry->index); |
29854 | break; |
29855 | default: |
29856 | gcc_unreachable (); |
29857 | } |
29858 | return 1; |
29859 | } |
29860 | |
29861 | /* A helper function for dwarf2out_finish. Counts the number |
29862 | of indexed addresses. Must match the logic of the functions |
29863 | output_addr_table_entry above. */ |
29864 | int |
29865 | count_index_addrs (addr_table_entry **slot, unsigned int *last_idx) |
29866 | { |
29867 | addr_table_entry *entry = *slot; |
29868 | |
29869 | if (entry->refcount > 0) |
29870 | *last_idx += 1; |
29871 | return 1; |
29872 | } |
29873 | |
29874 | /* Produce the .debug_addr section. */ |
29875 | |
29876 | static void |
29877 | output_addr_table (void) |
29878 | { |
29879 | unsigned int index = 0; |
29880 | if (addr_index_table == NULL || addr_index_table->size () == 0) |
29881 | return; |
29882 | |
29883 | switch_to_section (debug_addr_section); |
29884 | /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission |
29885 | which GCC uses to implement -gsplit-dwarf as DWARF GNU extension |
29886 | before DWARF5, didn't have a header for .debug_addr units. |
29887 | DWARF5 specifies a small header when address tables are used. */ |
29888 | if (dwarf_version >= 5) |
29889 | { |
29890 | unsigned int last_idx = 0; |
29891 | unsigned long addrs_length; |
29892 | |
29893 | addr_index_table->traverse_noresize |
29894 | <unsigned int *, count_index_addrs> (argument: &last_idx); |
29895 | addrs_length = last_idx * DWARF2_ADDR_SIZE + 4; |
29896 | |
29897 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
29898 | dw2_asm_output_data (4, 0xffffffff, |
29899 | "Escape value for 64-bit DWARF extension" ); |
29900 | dw2_asm_output_data (dwarf_offset_size, addrs_length, |
29901 | "Length of Address Unit" ); |
29902 | dw2_asm_output_data (2, 5, "DWARF addr version" ); |
29903 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address" ); |
29904 | dw2_asm_output_data (1, 0, "Size of Segment Descriptor" ); |
29905 | } |
29906 | ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label); |
29907 | |
29908 | addr_index_table |
29909 | ->traverse_noresize<unsigned int *, output_addr_table_entry> (argument: &index); |
29910 | } |
29911 | |
29912 | #if ENABLE_ASSERT_CHECKING |
29913 | /* Verify that all marks are clear. */ |
29914 | |
29915 | static void |
29916 | verify_marks_clear (dw_die_ref die) |
29917 | { |
29918 | dw_die_ref c; |
29919 | |
29920 | gcc_assert (! die->die_mark); |
29921 | FOR_EACH_CHILD (die, c, verify_marks_clear (c)); |
29922 | } |
29923 | #endif /* ENABLE_ASSERT_CHECKING */ |
29924 | |
29925 | /* Clear the marks for a die and its children. |
29926 | Be cool if the mark isn't set. */ |
29927 | |
29928 | static void |
29929 | prune_unmark_dies (dw_die_ref die) |
29930 | { |
29931 | dw_die_ref c; |
29932 | |
29933 | if (die->die_mark) |
29934 | die->die_mark = 0; |
29935 | FOR_EACH_CHILD (die, c, prune_unmark_dies (c)); |
29936 | } |
29937 | |
29938 | /* Given LOC that is referenced by a DIE we're marking as used, find all |
29939 | referenced DWARF procedures it references and mark them as used. */ |
29940 | |
29941 | static void |
29942 | prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc) |
29943 | { |
29944 | for (; loc != NULL; loc = loc->dw_loc_next) |
29945 | switch (loc->dw_loc_opc) |
29946 | { |
29947 | case DW_OP_implicit_pointer: |
29948 | case DW_OP_convert: |
29949 | case DW_OP_reinterpret: |
29950 | case DW_OP_GNU_implicit_pointer: |
29951 | case DW_OP_GNU_convert: |
29952 | case DW_OP_GNU_reinterpret: |
29953 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref) |
29954 | prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1); |
29955 | break; |
29956 | case DW_OP_GNU_variable_value: |
29957 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
29958 | { |
29959 | dw_die_ref ref |
29960 | = lookup_decl_die (decl: loc->dw_loc_oprnd1.v.val_decl_ref); |
29961 | if (ref == NULL) |
29962 | break; |
29963 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
29964 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
29965 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
29966 | } |
29967 | /* FALLTHRU */ |
29968 | case DW_OP_call2: |
29969 | case DW_OP_call4: |
29970 | case DW_OP_call_ref: |
29971 | case DW_OP_const_type: |
29972 | case DW_OP_GNU_const_type: |
29973 | case DW_OP_GNU_parameter_ref: |
29974 | gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref); |
29975 | prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1); |
29976 | break; |
29977 | case DW_OP_regval_type: |
29978 | case DW_OP_deref_type: |
29979 | case DW_OP_GNU_regval_type: |
29980 | case DW_OP_GNU_deref_type: |
29981 | gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref); |
29982 | prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1); |
29983 | break; |
29984 | case DW_OP_entry_value: |
29985 | case DW_OP_GNU_entry_value: |
29986 | gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc); |
29987 | prune_unused_types_walk_loc_descr (loc: loc->dw_loc_oprnd1.v.val_loc); |
29988 | break; |
29989 | default: |
29990 | break; |
29991 | } |
29992 | } |
29993 | |
29994 | /* Given DIE that we're marking as used, find any other dies |
29995 | it references as attributes and mark them as used. */ |
29996 | |
29997 | static void |
29998 | prune_unused_types_walk_attribs (dw_die_ref die) |
29999 | { |
30000 | dw_attr_node *a; |
30001 | unsigned ix; |
30002 | |
30003 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
30004 | { |
30005 | switch (AT_class (a)) |
30006 | { |
30007 | /* Make sure DWARF procedures referenced by location descriptions will |
30008 | get emitted. */ |
30009 | case dw_val_class_loc: |
30010 | prune_unused_types_walk_loc_descr (loc: AT_loc (a)); |
30011 | break; |
30012 | case dw_val_class_loc_list: |
30013 | for (dw_loc_list_ref list = AT_loc_list (a); |
30014 | list != NULL; |
30015 | list = list->dw_loc_next) |
30016 | prune_unused_types_walk_loc_descr (loc: list->expr); |
30017 | break; |
30018 | |
30019 | case dw_val_class_view_list: |
30020 | /* This points to a loc_list in another attribute, so it's |
30021 | already covered. */ |
30022 | break; |
30023 | |
30024 | case dw_val_class_die_ref: |
30025 | /* A reference to another DIE. |
30026 | Make sure that it will get emitted. |
30027 | If it was broken out into a comdat group, don't follow it. */ |
30028 | if (! AT_ref (a)->comdat_type_p |
30029 | || a->dw_attr == DW_AT_specification) |
30030 | prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1); |
30031 | break; |
30032 | |
30033 | case dw_val_class_str: |
30034 | /* Set the string's refcount to 0 so that prune_unused_types_mark |
30035 | accounts properly for it. */ |
30036 | a->dw_attr_val.v.val_str->refcount = 0; |
30037 | break; |
30038 | |
30039 | default: |
30040 | break; |
30041 | } |
30042 | } |
30043 | } |
30044 | |
30045 | /* Mark the generic parameters and arguments children DIEs of DIE. */ |
30046 | |
30047 | static void |
30048 | prune_unused_types_mark_generic_parms_dies (dw_die_ref die) |
30049 | { |
30050 | dw_die_ref c; |
30051 | |
30052 | if (die == NULL || die->die_child == NULL) |
30053 | return; |
30054 | c = die->die_child; |
30055 | do |
30056 | { |
30057 | if (is_template_parameter (die: c)) |
30058 | prune_unused_types_mark (c, 1); |
30059 | c = c->die_sib; |
30060 | } while (c && c != die->die_child); |
30061 | } |
30062 | |
30063 | /* Mark DIE as being used. If DOKIDS is true, then walk down |
30064 | to DIE's children. */ |
30065 | |
30066 | static void |
30067 | prune_unused_types_mark (dw_die_ref die, int dokids) |
30068 | { |
30069 | dw_die_ref c; |
30070 | |
30071 | if (die->die_mark == 0) |
30072 | { |
30073 | /* We haven't done this node yet. Mark it as used. */ |
30074 | die->die_mark = 1; |
30075 | /* If this is the DIE of a generic type instantiation, |
30076 | mark the children DIEs that describe its generic parms and |
30077 | args. */ |
30078 | prune_unused_types_mark_generic_parms_dies (die); |
30079 | |
30080 | /* We also have to mark its parents as used. |
30081 | (But we don't want to mark our parent's kids due to this, |
30082 | unless it is a class.) */ |
30083 | if (die->die_parent) |
30084 | prune_unused_types_mark (die: die->die_parent, |
30085 | dokids: class_scope_p (context_die: die->die_parent)); |
30086 | |
30087 | /* Mark any referenced nodes. */ |
30088 | prune_unused_types_walk_attribs (die); |
30089 | |
30090 | /* If this node is a specification, |
30091 | also mark the definition, if it exists. */ |
30092 | if (get_AT_flag (die, attr_kind: DW_AT_declaration) && die->die_definition) |
30093 | prune_unused_types_mark (die: die->die_definition, dokids: 1); |
30094 | } |
30095 | |
30096 | if (dokids && die->die_mark != 2) |
30097 | { |
30098 | /* We need to walk the children, but haven't done so yet. |
30099 | Remember that we've walked the kids. */ |
30100 | die->die_mark = 2; |
30101 | |
30102 | /* If this is an array type, we need to make sure our |
30103 | kids get marked, even if they're types. If we're |
30104 | breaking out types into comdat sections, do this |
30105 | for all type definitions. */ |
30106 | if (die->die_tag == DW_TAG_array_type |
30107 | || (use_debug_types |
30108 | && is_type_die (die) && ! is_declaration_die (die))) |
30109 | FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1)); |
30110 | else |
30111 | FOR_EACH_CHILD (die, c, prune_unused_types_walk (c)); |
30112 | } |
30113 | } |
30114 | |
30115 | /* For local classes, look if any static member functions were emitted |
30116 | and if so, mark them. */ |
30117 | |
30118 | static void |
30119 | prune_unused_types_walk_local_classes (dw_die_ref die) |
30120 | { |
30121 | dw_die_ref c; |
30122 | |
30123 | if (die->die_mark == 2) |
30124 | return; |
30125 | |
30126 | switch (die->die_tag) |
30127 | { |
30128 | case DW_TAG_structure_type: |
30129 | case DW_TAG_union_type: |
30130 | case DW_TAG_class_type: |
30131 | case DW_TAG_interface_type: |
30132 | break; |
30133 | |
30134 | case DW_TAG_subprogram: |
30135 | if (!get_AT_flag (die, attr_kind: DW_AT_declaration) |
30136 | || die->die_definition != NULL) |
30137 | prune_unused_types_mark (die, dokids: 1); |
30138 | return; |
30139 | |
30140 | default: |
30141 | return; |
30142 | } |
30143 | |
30144 | /* Mark children. */ |
30145 | FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c)); |
30146 | } |
30147 | |
30148 | /* Walk the tree DIE and mark types that we actually use. */ |
30149 | |
30150 | static void |
30151 | prune_unused_types_walk (dw_die_ref die) |
30152 | { |
30153 | dw_die_ref c; |
30154 | |
30155 | /* Don't do anything if this node is already marked and |
30156 | children have been marked as well. */ |
30157 | if (die->die_mark == 2) |
30158 | return; |
30159 | |
30160 | switch (die->die_tag) |
30161 | { |
30162 | case DW_TAG_structure_type: |
30163 | case DW_TAG_union_type: |
30164 | case DW_TAG_class_type: |
30165 | case DW_TAG_interface_type: |
30166 | if (die->die_perennial_p) |
30167 | break; |
30168 | |
30169 | for (c = die->die_parent; c; c = c->die_parent) |
30170 | if (c->die_tag == DW_TAG_subprogram) |
30171 | break; |
30172 | |
30173 | /* Finding used static member functions inside of classes |
30174 | is needed just for local classes, because for other classes |
30175 | static member function DIEs with DW_AT_specification |
30176 | are emitted outside of the DW_TAG_*_type. If we ever change |
30177 | it, we'd need to call this even for non-local classes. */ |
30178 | if (c) |
30179 | prune_unused_types_walk_local_classes (die); |
30180 | |
30181 | /* It's a type node --- don't mark it. */ |
30182 | return; |
30183 | |
30184 | case DW_TAG_const_type: |
30185 | case DW_TAG_packed_type: |
30186 | case DW_TAG_pointer_type: |
30187 | case DW_TAG_reference_type: |
30188 | case DW_TAG_rvalue_reference_type: |
30189 | case DW_TAG_volatile_type: |
30190 | case DW_TAG_restrict_type: |
30191 | case DW_TAG_shared_type: |
30192 | case DW_TAG_atomic_type: |
30193 | case DW_TAG_immutable_type: |
30194 | case DW_TAG_typedef: |
30195 | case DW_TAG_array_type: |
30196 | case DW_TAG_coarray_type: |
30197 | case DW_TAG_friend: |
30198 | case DW_TAG_enumeration_type: |
30199 | case DW_TAG_subroutine_type: |
30200 | case DW_TAG_string_type: |
30201 | case DW_TAG_set_type: |
30202 | case DW_TAG_subrange_type: |
30203 | case DW_TAG_ptr_to_member_type: |
30204 | case DW_TAG_file_type: |
30205 | case DW_TAG_unspecified_type: |
30206 | case DW_TAG_dynamic_type: |
30207 | /* Type nodes are useful only when other DIEs reference them --- don't |
30208 | mark them. */ |
30209 | /* FALLTHROUGH */ |
30210 | |
30211 | case DW_TAG_dwarf_procedure: |
30212 | /* Likewise for DWARF procedures. */ |
30213 | |
30214 | if (die->die_perennial_p) |
30215 | break; |
30216 | |
30217 | return; |
30218 | |
30219 | case DW_TAG_variable: |
30220 | if (flag_debug_only_used_symbols) |
30221 | { |
30222 | if (die->die_perennial_p) |
30223 | break; |
30224 | |
30225 | /* For static data members, the declaration in the class is supposed |
30226 | to have DW_TAG_member tag in DWARF{3,4} but DW_TAG_variable in |
30227 | DWARF5. DW_TAG_member will be marked, so mark even such |
30228 | DW_TAG_variables in DWARF5, as long as it has DW_AT_const_value |
30229 | attribute. */ |
30230 | if (dwarf_version >= 5 |
30231 | && class_scope_p (context_die: die->die_parent) |
30232 | && get_AT (die, attr_kind: DW_AT_const_value)) |
30233 | break; |
30234 | |
30235 | /* premark_used_variables marks external variables --- don't mark |
30236 | them here. But function-local externals are always considered |
30237 | used. */ |
30238 | if (get_AT (die, attr_kind: DW_AT_external)) |
30239 | { |
30240 | for (c = die->die_parent; c; c = c->die_parent) |
30241 | if (c->die_tag == DW_TAG_subprogram) |
30242 | break; |
30243 | if (!c) |
30244 | return; |
30245 | } |
30246 | } |
30247 | /* FALLTHROUGH */ |
30248 | |
30249 | default: |
30250 | /* Mark everything else. */ |
30251 | break; |
30252 | } |
30253 | |
30254 | if (die->die_mark == 0) |
30255 | { |
30256 | die->die_mark = 1; |
30257 | |
30258 | /* Now, mark any dies referenced from here. */ |
30259 | prune_unused_types_walk_attribs (die); |
30260 | } |
30261 | |
30262 | die->die_mark = 2; |
30263 | |
30264 | /* Mark children. */ |
30265 | FOR_EACH_CHILD (die, c, prune_unused_types_walk (c)); |
30266 | } |
30267 | |
30268 | /* Increment the string counts on strings referred to from DIE's |
30269 | attributes. */ |
30270 | |
30271 | static void |
30272 | prune_unused_types_update_strings (dw_die_ref die) |
30273 | { |
30274 | dw_attr_node *a; |
30275 | unsigned ix; |
30276 | |
30277 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
30278 | if (AT_class (a) == dw_val_class_str) |
30279 | { |
30280 | struct indirect_string_node *s = a->dw_attr_val.v.val_str; |
30281 | s->refcount++; |
30282 | /* Avoid unnecessarily putting strings that are used less than |
30283 | twice in the hash table. */ |
30284 | if (s->form != DW_FORM_line_strp |
30285 | && (s->refcount |
30286 | == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))) |
30287 | { |
30288 | indirect_string_node **slot |
30289 | = debug_str_hash->find_slot_with_hash (comparable: s->str, |
30290 | hash: htab_hash_string (s->str), |
30291 | insert: INSERT); |
30292 | gcc_assert (*slot == NULL); |
30293 | *slot = s; |
30294 | } |
30295 | } |
30296 | } |
30297 | |
30298 | /* Mark DIE and its children as removed. */ |
30299 | |
30300 | static void |
30301 | mark_removed (dw_die_ref die) |
30302 | { |
30303 | dw_die_ref c; |
30304 | die->removed = true; |
30305 | FOR_EACH_CHILD (die, c, mark_removed (c)); |
30306 | } |
30307 | |
30308 | /* Remove from the tree DIE any dies that aren't marked. */ |
30309 | |
30310 | static void |
30311 | prune_unused_types_prune (dw_die_ref die) |
30312 | { |
30313 | dw_die_ref c; |
30314 | |
30315 | gcc_assert (die->die_mark); |
30316 | prune_unused_types_update_strings (die); |
30317 | |
30318 | if (! die->die_child) |
30319 | return; |
30320 | |
30321 | c = die->die_child; |
30322 | do { |
30323 | dw_die_ref prev = c, next; |
30324 | for (c = c->die_sib; ! c->die_mark; c = next) |
30325 | if (c == die->die_child) |
30326 | { |
30327 | /* No marked children between 'prev' and the end of the list. */ |
30328 | if (prev == c) |
30329 | /* No marked children at all. */ |
30330 | die->die_child = NULL; |
30331 | else |
30332 | { |
30333 | prev->die_sib = c->die_sib; |
30334 | die->die_child = prev; |
30335 | } |
30336 | c->die_sib = NULL; |
30337 | mark_removed (die: c); |
30338 | return; |
30339 | } |
30340 | else |
30341 | { |
30342 | next = c->die_sib; |
30343 | c->die_sib = NULL; |
30344 | mark_removed (die: c); |
30345 | } |
30346 | |
30347 | if (c != prev->die_sib) |
30348 | prev->die_sib = c; |
30349 | prune_unused_types_prune (die: c); |
30350 | } while (c != die->die_child); |
30351 | } |
30352 | |
30353 | /* Remove dies representing declarations that we never use. */ |
30354 | |
30355 | static void |
30356 | prune_unused_types (void) |
30357 | { |
30358 | unsigned int i; |
30359 | limbo_die_node *node; |
30360 | comdat_type_node *ctnode; |
30361 | pubname_entry *pub; |
30362 | dw_die_ref base_type; |
30363 | |
30364 | #if ENABLE_ASSERT_CHECKING |
30365 | /* All the marks should already be clear. */ |
30366 | verify_marks_clear (die: comp_unit_die ()); |
30367 | for (node = limbo_die_list; node; node = node->next) |
30368 | verify_marks_clear (die: node->die); |
30369 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
30370 | verify_marks_clear (die: ctnode->root_die); |
30371 | #endif /* ENABLE_ASSERT_CHECKING */ |
30372 | |
30373 | /* Mark types that are used in global variables. */ |
30374 | premark_types_used_by_global_vars (); |
30375 | |
30376 | /* Mark variables used in the symtab. */ |
30377 | if (flag_debug_only_used_symbols) |
30378 | premark_used_variables (); |
30379 | |
30380 | /* Set the mark on nodes that are actually used. */ |
30381 | prune_unused_types_walk (die: comp_unit_die ()); |
30382 | for (node = limbo_die_list; node; node = node->next) |
30383 | prune_unused_types_walk (die: node->die); |
30384 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
30385 | { |
30386 | prune_unused_types_walk (die: ctnode->root_die); |
30387 | prune_unused_types_mark (die: ctnode->type_die, dokids: 1); |
30388 | } |
30389 | |
30390 | /* Also set the mark on nodes referenced from the pubname_table. Enumerators |
30391 | are unusual in that they are pubnames that are the children of pubtypes. |
30392 | They should only be marked via their parent DW_TAG_enumeration_type die, |
30393 | not as roots in themselves. */ |
30394 | FOR_EACH_VEC_ELT (*pubname_table, i, pub) |
30395 | if (pub->die->die_tag != DW_TAG_enumerator) |
30396 | prune_unused_types_mark (die: pub->die, dokids: 1); |
30397 | for (i = 0; base_types.iterate (ix: i, ptr: &base_type); i++) |
30398 | prune_unused_types_mark (die: base_type, dokids: 1); |
30399 | |
30400 | /* Also set the mark on nodes that could be referenced by |
30401 | DW_TAG_call_site DW_AT_call_origin (i.e. direct call callees) or |
30402 | by DW_TAG_inlined_subroutine origins. */ |
30403 | cgraph_node *cnode; |
30404 | FOR_EACH_FUNCTION (cnode) |
30405 | if (cnode->referred_to_p (include_self: false)) |
30406 | { |
30407 | dw_die_ref die = lookup_decl_die (decl: cnode->decl); |
30408 | if (die == NULL || die->die_mark) |
30409 | continue; |
30410 | for (cgraph_edge *e = cnode->callers; e; e = e->next_caller) |
30411 | if (e->caller != cnode) |
30412 | { |
30413 | prune_unused_types_mark (die, dokids: 1); |
30414 | break; |
30415 | } |
30416 | } |
30417 | |
30418 | if (debug_str_hash) |
30419 | debug_str_hash->empty (); |
30420 | if (skeleton_debug_str_hash) |
30421 | skeleton_debug_str_hash->empty (); |
30422 | prune_unused_types_prune (die: comp_unit_die ()); |
30423 | for (limbo_die_node **pnode = &limbo_die_list; *pnode; ) |
30424 | { |
30425 | node = *pnode; |
30426 | if (!node->die->die_mark) |
30427 | *pnode = node->next; |
30428 | else |
30429 | { |
30430 | prune_unused_types_prune (die: node->die); |
30431 | pnode = &node->next; |
30432 | } |
30433 | } |
30434 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
30435 | prune_unused_types_prune (die: ctnode->root_die); |
30436 | |
30437 | /* Leave the marks clear. */ |
30438 | prune_unmark_dies (die: comp_unit_die ()); |
30439 | for (node = limbo_die_list; node; node = node->next) |
30440 | prune_unmark_dies (die: node->die); |
30441 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
30442 | prune_unmark_dies (die: ctnode->root_die); |
30443 | } |
30444 | |
30445 | /* Helpers to manipulate hash table of comdat type units. */ |
30446 | |
30447 | struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node> |
30448 | { |
30449 | static inline hashval_t hash (const comdat_type_node *); |
30450 | static inline bool equal (const comdat_type_node *, const comdat_type_node *); |
30451 | }; |
30452 | |
30453 | inline hashval_t |
30454 | comdat_type_hasher::hash (const comdat_type_node *type_node) |
30455 | { |
30456 | hashval_t h; |
30457 | memcpy (dest: &h, src: type_node->signature, n: sizeof (h)); |
30458 | return h; |
30459 | } |
30460 | |
30461 | inline bool |
30462 | comdat_type_hasher::equal (const comdat_type_node *type_node_1, |
30463 | const comdat_type_node *type_node_2) |
30464 | { |
30465 | return (! memcmp (s1: type_node_1->signature, s2: type_node_2->signature, |
30466 | DWARF_TYPE_SIGNATURE_SIZE)); |
30467 | } |
30468 | |
30469 | /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref |
30470 | to the location it would have been added, should we know its |
30471 | DECL_ASSEMBLER_NAME when we added other attributes. This will |
30472 | probably improve compactness of debug info, removing equivalent |
30473 | abbrevs, and hide any differences caused by deferring the |
30474 | computation of the assembler name, triggered by e.g. PCH. */ |
30475 | |
30476 | static inline void |
30477 | move_linkage_attr (dw_die_ref die) |
30478 | { |
30479 | unsigned ix = vec_safe_length (v: die->die_attr); |
30480 | dw_attr_node linkage = (*die->die_attr)[ix - 1]; |
30481 | |
30482 | gcc_assert (linkage.dw_attr == DW_AT_linkage_name |
30483 | || linkage.dw_attr == DW_AT_MIPS_linkage_name); |
30484 | |
30485 | while (--ix > 0) |
30486 | { |
30487 | dw_attr_node *prev = &(*die->die_attr)[ix - 1]; |
30488 | |
30489 | if (prev->dw_attr == DW_AT_decl_line |
30490 | || prev->dw_attr == DW_AT_decl_column |
30491 | || prev->dw_attr == DW_AT_name) |
30492 | break; |
30493 | } |
30494 | |
30495 | if (ix != vec_safe_length (v: die->die_attr) - 1) |
30496 | { |
30497 | die->die_attr->pop (); |
30498 | die->die_attr->quick_insert (ix, obj: linkage); |
30499 | } |
30500 | } |
30501 | |
30502 | /* Helper function for resolve_addr, mark DW_TAG_base_type nodes |
30503 | referenced from typed stack ops and count how often they are used. */ |
30504 | |
30505 | static void |
30506 | mark_base_types (dw_loc_descr_ref loc) |
30507 | { |
30508 | dw_die_ref base_type = NULL; |
30509 | |
30510 | for (; loc; loc = loc->dw_loc_next) |
30511 | { |
30512 | switch (loc->dw_loc_opc) |
30513 | { |
30514 | case DW_OP_regval_type: |
30515 | case DW_OP_deref_type: |
30516 | case DW_OP_GNU_regval_type: |
30517 | case DW_OP_GNU_deref_type: |
30518 | base_type = loc->dw_loc_oprnd2.v.val_die_ref.die; |
30519 | break; |
30520 | case DW_OP_convert: |
30521 | case DW_OP_reinterpret: |
30522 | case DW_OP_GNU_convert: |
30523 | case DW_OP_GNU_reinterpret: |
30524 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const) |
30525 | continue; |
30526 | /* FALLTHRU */ |
30527 | case DW_OP_const_type: |
30528 | case DW_OP_GNU_const_type: |
30529 | base_type = loc->dw_loc_oprnd1.v.val_die_ref.die; |
30530 | break; |
30531 | case DW_OP_entry_value: |
30532 | case DW_OP_GNU_entry_value: |
30533 | mark_base_types (loc: loc->dw_loc_oprnd1.v.val_loc); |
30534 | continue; |
30535 | default: |
30536 | continue; |
30537 | } |
30538 | gcc_assert (base_type->die_parent == comp_unit_die ()); |
30539 | if (base_type->die_mark) |
30540 | base_type->die_mark++; |
30541 | else |
30542 | { |
30543 | base_types.safe_push (obj: base_type); |
30544 | base_type->die_mark = 1; |
30545 | } |
30546 | } |
30547 | } |
30548 | |
30549 | /* Stripped-down variant of resolve_addr, mark DW_TAG_base_type nodes |
30550 | referenced from typed stack ops and count how often they are used. */ |
30551 | |
30552 | static void |
30553 | mark_base_types (dw_die_ref die) |
30554 | { |
30555 | dw_die_ref c; |
30556 | dw_attr_node *a; |
30557 | dw_loc_list_ref *curr; |
30558 | unsigned ix; |
30559 | |
30560 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
30561 | switch (AT_class (a)) |
30562 | { |
30563 | case dw_val_class_loc_list: |
30564 | curr = AT_loc_list_ptr (a); |
30565 | while (*curr) |
30566 | { |
30567 | mark_base_types (loc: (*curr)->expr); |
30568 | curr = &(*curr)->dw_loc_next; |
30569 | } |
30570 | break; |
30571 | |
30572 | case dw_val_class_loc: |
30573 | mark_base_types (loc: AT_loc (a)); |
30574 | break; |
30575 | |
30576 | default: |
30577 | break; |
30578 | } |
30579 | |
30580 | FOR_EACH_CHILD (die, c, mark_base_types (c)); |
30581 | } |
30582 | |
30583 | /* Comparison function for sorting marked base types. */ |
30584 | |
30585 | static int |
30586 | base_type_cmp (const void *x, const void *y) |
30587 | { |
30588 | dw_die_ref dx = *(const dw_die_ref *) x; |
30589 | dw_die_ref dy = *(const dw_die_ref *) y; |
30590 | unsigned int byte_size1, byte_size2; |
30591 | unsigned int encoding1, encoding2; |
30592 | unsigned int align1, align2; |
30593 | if (dx->die_mark > dy->die_mark) |
30594 | return -1; |
30595 | if (dx->die_mark < dy->die_mark) |
30596 | return 1; |
30597 | byte_size1 = get_AT_unsigned (die: dx, attr_kind: DW_AT_byte_size); |
30598 | byte_size2 = get_AT_unsigned (die: dy, attr_kind: DW_AT_byte_size); |
30599 | if (byte_size1 < byte_size2) |
30600 | return 1; |
30601 | if (byte_size1 > byte_size2) |
30602 | return -1; |
30603 | encoding1 = get_AT_unsigned (die: dx, attr_kind: DW_AT_encoding); |
30604 | encoding2 = get_AT_unsigned (die: dy, attr_kind: DW_AT_encoding); |
30605 | if (encoding1 < encoding2) |
30606 | return 1; |
30607 | if (encoding1 > encoding2) |
30608 | return -1; |
30609 | align1 = get_AT_unsigned (die: dx, attr_kind: DW_AT_alignment); |
30610 | align2 = get_AT_unsigned (die: dy, attr_kind: DW_AT_alignment); |
30611 | if (align1 < align2) |
30612 | return 1; |
30613 | if (align1 > align2) |
30614 | return -1; |
30615 | return 0; |
30616 | } |
30617 | |
30618 | /* Move base types marked by mark_base_types as early as possible |
30619 | in the CU, sorted by decreasing usage count both to make the |
30620 | uleb128 references as small as possible and to make sure they |
30621 | will have die_offset already computed by calc_die_sizes when |
30622 | sizes of typed stack loc ops is computed. */ |
30623 | |
30624 | static void |
30625 | move_marked_base_types (void) |
30626 | { |
30627 | unsigned int i; |
30628 | dw_die_ref base_type, die, c; |
30629 | |
30630 | if (base_types.is_empty ()) |
30631 | return; |
30632 | |
30633 | /* Sort by decreasing usage count, they will be added again in that |
30634 | order later on. */ |
30635 | base_types.qsort (base_type_cmp); |
30636 | die = comp_unit_die (); |
30637 | c = die->die_child; |
30638 | do |
30639 | { |
30640 | dw_die_ref prev = c; |
30641 | c = c->die_sib; |
30642 | while (c->die_mark) |
30643 | { |
30644 | remove_child_with_prev (child: c, prev); |
30645 | /* As base types got marked, there must be at least |
30646 | one node other than DW_TAG_base_type. */ |
30647 | gcc_assert (die->die_child != NULL); |
30648 | c = prev->die_sib; |
30649 | } |
30650 | } |
30651 | while (c != die->die_child); |
30652 | gcc_assert (die->die_child); |
30653 | c = die->die_child; |
30654 | for (i = 0; base_types.iterate (ix: i, ptr: &base_type); i++) |
30655 | { |
30656 | base_type->die_mark = 0; |
30657 | base_type->die_sib = c->die_sib; |
30658 | c->die_sib = base_type; |
30659 | c = base_type; |
30660 | } |
30661 | } |
30662 | |
30663 | /* Helper function for resolve_addr, attempt to resolve |
30664 | one CONST_STRING, return true if successful. Similarly verify that |
30665 | SYMBOL_REFs refer to variables emitted in the current CU. */ |
30666 | |
30667 | static bool |
30668 | resolve_one_addr (rtx *addr) |
30669 | { |
30670 | rtx rtl = *addr; |
30671 | |
30672 | if (GET_CODE (rtl) == CONST_STRING) |
30673 | { |
30674 | size_t len = strlen (XSTR (rtl, 0)) + 1; |
30675 | tree t = build_string (len, XSTR (rtl, 0)); |
30676 | tree tlen = size_int (len - 1); |
30677 | TREE_TYPE (t) |
30678 | = build_array_type (char_type_node, build_index_type (tlen)); |
30679 | rtl = lookup_constant_def (t); |
30680 | if (!rtl || !MEM_P (rtl)) |
30681 | return false; |
30682 | rtl = XEXP (rtl, 0); |
30683 | if (GET_CODE (rtl) == SYMBOL_REF |
30684 | && SYMBOL_REF_DECL (rtl) |
30685 | && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl))) |
30686 | return false; |
30687 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
30688 | *addr = rtl; |
30689 | return true; |
30690 | } |
30691 | |
30692 | if (GET_CODE (rtl) == SYMBOL_REF |
30693 | && SYMBOL_REF_DECL (rtl)) |
30694 | { |
30695 | if (TREE_CONSTANT_POOL_ADDRESS_P (rtl)) |
30696 | { |
30697 | if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl)))) |
30698 | return false; |
30699 | } |
30700 | else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl))) |
30701 | return false; |
30702 | } |
30703 | |
30704 | if (GET_CODE (rtl) == CONST) |
30705 | { |
30706 | subrtx_ptr_iterator::array_type array; |
30707 | FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL) |
30708 | if (!resolve_one_addr (addr: *iter)) |
30709 | return false; |
30710 | } |
30711 | |
30712 | return true; |
30713 | } |
30714 | |
30715 | /* For STRING_CST, return SYMBOL_REF of its constant pool entry, |
30716 | if possible, and create DW_TAG_dwarf_procedure that can be referenced |
30717 | from DW_OP_implicit_pointer if the string hasn't been seen yet. */ |
30718 | |
30719 | static rtx |
30720 | string_cst_pool_decl (tree t) |
30721 | { |
30722 | rtx rtl = output_constant_def (t, 1); |
30723 | unsigned char *array; |
30724 | dw_loc_descr_ref l; |
30725 | tree decl; |
30726 | size_t len; |
30727 | dw_die_ref ref; |
30728 | |
30729 | if (!rtl || !MEM_P (rtl)) |
30730 | return NULL_RTX; |
30731 | rtl = XEXP (rtl, 0); |
30732 | if (GET_CODE (rtl) != SYMBOL_REF |
30733 | || SYMBOL_REF_DECL (rtl) == NULL_TREE) |
30734 | return NULL_RTX; |
30735 | |
30736 | decl = SYMBOL_REF_DECL (rtl); |
30737 | if (!lookup_decl_die (decl)) |
30738 | { |
30739 | len = TREE_STRING_LENGTH (t); |
30740 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
30741 | ref = new_die (tag_value: DW_TAG_dwarf_procedure, parent_die: comp_unit_die (), t: decl); |
30742 | array = ggc_vec_alloc<unsigned char> (c: len); |
30743 | memcpy (dest: array, TREE_STRING_POINTER (t), n: len); |
30744 | l = new_loc_descr (op: DW_OP_implicit_value, oprnd1: len, oprnd2: 0); |
30745 | l->dw_loc_oprnd2.val_class = dw_val_class_vec; |
30746 | l->dw_loc_oprnd2.v.val_vec.length = len; |
30747 | l->dw_loc_oprnd2.v.val_vec.elt_size = 1; |
30748 | l->dw_loc_oprnd2.v.val_vec.array = array; |
30749 | add_AT_loc (die: ref, attr_kind: DW_AT_location, loc: l); |
30750 | equate_decl_number_to_die (decl, decl_die: ref); |
30751 | } |
30752 | return rtl; |
30753 | } |
30754 | |
30755 | /* Helper function of resolve_addr_in_expr. LOC is |
30756 | a DW_OP_addr followed by DW_OP_stack_value, either at the start |
30757 | of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be |
30758 | resolved. Replace it (both DW_OP_addr and DW_OP_stack_value) |
30759 | with DW_OP_implicit_pointer if possible |
30760 | and return true, if unsuccessful, return false. */ |
30761 | |
30762 | static bool |
30763 | optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc) |
30764 | { |
30765 | rtx rtl = loc->dw_loc_oprnd1.v.val_addr; |
30766 | HOST_WIDE_INT offset = 0; |
30767 | dw_die_ref ref = NULL; |
30768 | tree decl; |
30769 | |
30770 | if (GET_CODE (rtl) == CONST |
30771 | && GET_CODE (XEXP (rtl, 0)) == PLUS |
30772 | && CONST_INT_P (XEXP (XEXP (rtl, 0), 1))) |
30773 | { |
30774 | offset = INTVAL (XEXP (XEXP (rtl, 0), 1)); |
30775 | rtl = XEXP (XEXP (rtl, 0), 0); |
30776 | } |
30777 | if (GET_CODE (rtl) == CONST_STRING) |
30778 | { |
30779 | size_t len = strlen (XSTR (rtl, 0)) + 1; |
30780 | tree t = build_string (len, XSTR (rtl, 0)); |
30781 | tree tlen = size_int (len - 1); |
30782 | |
30783 | TREE_TYPE (t) |
30784 | = build_array_type (char_type_node, build_index_type (tlen)); |
30785 | rtl = string_cst_pool_decl (t); |
30786 | if (!rtl) |
30787 | return false; |
30788 | } |
30789 | if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl)) |
30790 | { |
30791 | decl = SYMBOL_REF_DECL (rtl); |
30792 | if (VAR_P (decl) && !DECL_EXTERNAL (decl)) |
30793 | { |
30794 | ref = lookup_decl_die (decl); |
30795 | if (ref && (get_AT (die: ref, attr_kind: DW_AT_location) |
30796 | || get_AT (die: ref, attr_kind: DW_AT_const_value))) |
30797 | { |
30798 | loc->dw_loc_opc = dwarf_OP (op: DW_OP_implicit_pointer); |
30799 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
30800 | loc->dw_loc_oprnd1.val_entry = NULL; |
30801 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
30802 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
30803 | loc->dw_loc_next = loc->dw_loc_next->dw_loc_next; |
30804 | loc->dw_loc_oprnd2.v.val_int = offset; |
30805 | return true; |
30806 | } |
30807 | } |
30808 | } |
30809 | return false; |
30810 | } |
30811 | |
30812 | /* Helper function for resolve_addr, handle one location |
30813 | expression, return false if at least one CONST_STRING or SYMBOL_REF in |
30814 | the location list couldn't be resolved. */ |
30815 | |
30816 | static bool |
30817 | resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc) |
30818 | { |
30819 | dw_loc_descr_ref keep = NULL; |
30820 | for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next) |
30821 | switch (loc->dw_loc_opc) |
30822 | { |
30823 | case DW_OP_addr: |
30824 | if (!resolve_one_addr (addr: &loc->dw_loc_oprnd1.v.val_addr)) |
30825 | { |
30826 | if ((prev == NULL |
30827 | || prev->dw_loc_opc == DW_OP_piece |
30828 | || prev->dw_loc_opc == DW_OP_bit_piece) |
30829 | && loc->dw_loc_next |
30830 | && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value |
30831 | && (!dwarf_strict || dwarf_version >= 5) |
30832 | && optimize_one_addr_into_implicit_ptr (loc)) |
30833 | break; |
30834 | return false; |
30835 | } |
30836 | break; |
30837 | case DW_OP_GNU_addr_index: |
30838 | case DW_OP_addrx: |
30839 | case DW_OP_GNU_const_index: |
30840 | case DW_OP_constx: |
30841 | if ((loc->dw_loc_opc == DW_OP_GNU_addr_index |
30842 | || loc->dw_loc_opc == DW_OP_addrx) |
30843 | || ((loc->dw_loc_opc == DW_OP_GNU_const_index |
30844 | || loc->dw_loc_opc == DW_OP_constx) |
30845 | && loc->dtprel)) |
30846 | { |
30847 | rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl; |
30848 | if (!resolve_one_addr (addr: &rtl)) |
30849 | return false; |
30850 | remove_addr_table_entry (entry: loc->dw_loc_oprnd1.val_entry); |
30851 | loc->dw_loc_oprnd1.val_entry |
30852 | = add_addr_table_entry (addr: rtl, kind: ate_kind_rtx); |
30853 | } |
30854 | break; |
30855 | case DW_OP_const4u: |
30856 | case DW_OP_const8u: |
30857 | if (loc->dtprel |
30858 | && !resolve_one_addr (addr: &loc->dw_loc_oprnd1.v.val_addr)) |
30859 | return false; |
30860 | break; |
30861 | case DW_OP_plus_uconst: |
30862 | if (size_of_loc_descr (loc) |
30863 | > size_of_int_loc_descriptor (i: loc->dw_loc_oprnd1.v.val_unsigned) |
30864 | + 1 |
30865 | && loc->dw_loc_oprnd1.v.val_unsigned > 0) |
30866 | { |
30867 | dw_loc_descr_ref repl |
30868 | = int_loc_descriptor (poly_i: loc->dw_loc_oprnd1.v.val_unsigned); |
30869 | add_loc_descr (list_head: &repl, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
30870 | add_loc_descr (list_head: &repl, descr: loc->dw_loc_next); |
30871 | *loc = *repl; |
30872 | } |
30873 | break; |
30874 | case DW_OP_implicit_value: |
30875 | if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr |
30876 | && !resolve_one_addr (addr: &loc->dw_loc_oprnd2.v.val_addr)) |
30877 | return false; |
30878 | break; |
30879 | case DW_OP_implicit_pointer: |
30880 | case DW_OP_GNU_implicit_pointer: |
30881 | case DW_OP_GNU_parameter_ref: |
30882 | case DW_OP_GNU_variable_value: |
30883 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
30884 | { |
30885 | dw_die_ref ref |
30886 | = lookup_decl_die (decl: loc->dw_loc_oprnd1.v.val_decl_ref); |
30887 | if (ref == NULL) |
30888 | return false; |
30889 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
30890 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
30891 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
30892 | } |
30893 | if (loc->dw_loc_opc == DW_OP_GNU_variable_value) |
30894 | { |
30895 | if (prev == NULL |
30896 | && loc->dw_loc_next == NULL |
30897 | && AT_class (a) == dw_val_class_loc) |
30898 | switch (a->dw_attr) |
30899 | { |
30900 | /* Following attributes allow both exprloc and reference, |
30901 | so if the whole expression is DW_OP_GNU_variable_value |
30902 | alone we could transform it into reference. */ |
30903 | case DW_AT_byte_size: |
30904 | case DW_AT_bit_size: |
30905 | case DW_AT_lower_bound: |
30906 | case DW_AT_upper_bound: |
30907 | case DW_AT_bit_stride: |
30908 | case DW_AT_count: |
30909 | case DW_AT_allocated: |
30910 | case DW_AT_associated: |
30911 | case DW_AT_byte_stride: |
30912 | a->dw_attr_val.val_class = dw_val_class_die_ref; |
30913 | a->dw_attr_val.val_entry = NULL; |
30914 | a->dw_attr_val.v.val_die_ref.die |
30915 | = loc->dw_loc_oprnd1.v.val_die_ref.die; |
30916 | a->dw_attr_val.v.val_die_ref.external = 0; |
30917 | return true; |
30918 | default: |
30919 | break; |
30920 | } |
30921 | if (dwarf_strict) |
30922 | return false; |
30923 | } |
30924 | break; |
30925 | case DW_OP_const_type: |
30926 | case DW_OP_regval_type: |
30927 | case DW_OP_deref_type: |
30928 | case DW_OP_convert: |
30929 | case DW_OP_reinterpret: |
30930 | case DW_OP_GNU_const_type: |
30931 | case DW_OP_GNU_regval_type: |
30932 | case DW_OP_GNU_deref_type: |
30933 | case DW_OP_GNU_convert: |
30934 | case DW_OP_GNU_reinterpret: |
30935 | while (loc->dw_loc_next |
30936 | && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert |
30937 | || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert)) |
30938 | { |
30939 | dw_die_ref base1, base2; |
30940 | unsigned enc1, enc2, size1, size2; |
30941 | if (loc->dw_loc_opc == DW_OP_regval_type |
30942 | || loc->dw_loc_opc == DW_OP_deref_type |
30943 | || loc->dw_loc_opc == DW_OP_GNU_regval_type |
30944 | || loc->dw_loc_opc == DW_OP_GNU_deref_type) |
30945 | base1 = loc->dw_loc_oprnd2.v.val_die_ref.die; |
30946 | else if (loc->dw_loc_oprnd1.val_class |
30947 | == dw_val_class_unsigned_const) |
30948 | break; |
30949 | else |
30950 | base1 = loc->dw_loc_oprnd1.v.val_die_ref.die; |
30951 | if (loc->dw_loc_next->dw_loc_oprnd1.val_class |
30952 | == dw_val_class_unsigned_const) |
30953 | break; |
30954 | base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die; |
30955 | gcc_assert (base1->die_tag == DW_TAG_base_type |
30956 | && base2->die_tag == DW_TAG_base_type); |
30957 | enc1 = get_AT_unsigned (die: base1, attr_kind: DW_AT_encoding); |
30958 | enc2 = get_AT_unsigned (die: base2, attr_kind: DW_AT_encoding); |
30959 | size1 = get_AT_unsigned (die: base1, attr_kind: DW_AT_byte_size); |
30960 | size2 = get_AT_unsigned (die: base2, attr_kind: DW_AT_byte_size); |
30961 | if (size1 == size2 |
30962 | && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed) |
30963 | && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed) |
30964 | && loc != keep) |
30965 | || enc1 == enc2)) |
30966 | { |
30967 | /* Optimize away next DW_OP_convert after |
30968 | adjusting LOC's base type die reference. */ |
30969 | if (loc->dw_loc_opc == DW_OP_regval_type |
30970 | || loc->dw_loc_opc == DW_OP_deref_type |
30971 | || loc->dw_loc_opc == DW_OP_GNU_regval_type |
30972 | || loc->dw_loc_opc == DW_OP_GNU_deref_type) |
30973 | loc->dw_loc_oprnd2.v.val_die_ref.die = base2; |
30974 | else |
30975 | loc->dw_loc_oprnd1.v.val_die_ref.die = base2; |
30976 | loc->dw_loc_next = loc->dw_loc_next->dw_loc_next; |
30977 | continue; |
30978 | } |
30979 | /* Don't change integer DW_OP_convert after e.g. floating |
30980 | point typed stack entry. */ |
30981 | else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed) |
30982 | keep = loc->dw_loc_next; |
30983 | break; |
30984 | } |
30985 | break; |
30986 | default: |
30987 | break; |
30988 | } |
30989 | return true; |
30990 | } |
30991 | |
30992 | /* Helper function of resolve_addr. DIE had DW_AT_location of |
30993 | DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand |
30994 | and DW_OP_addr couldn't be resolved. resolve_addr has already |
30995 | removed the DW_AT_location attribute. This function attempts to |
30996 | add a new DW_AT_location attribute with DW_OP_implicit_pointer |
30997 | to it or DW_AT_const_value attribute, if possible. */ |
30998 | |
30999 | static void |
31000 | optimize_location_into_implicit_ptr (dw_die_ref die, tree decl) |
31001 | { |
31002 | if (!VAR_P (decl) |
31003 | || lookup_decl_die (decl) != die |
31004 | || DECL_EXTERNAL (decl) |
31005 | || !TREE_STATIC (decl) |
31006 | || DECL_INITIAL (decl) == NULL_TREE |
31007 | || DECL_P (DECL_INITIAL (decl)) |
31008 | || get_AT (die, attr_kind: DW_AT_const_value)) |
31009 | return; |
31010 | |
31011 | tree init = DECL_INITIAL (decl); |
31012 | HOST_WIDE_INT offset = 0; |
31013 | /* For variables that have been optimized away and thus |
31014 | don't have a memory location, see if we can emit |
31015 | DW_AT_const_value instead. */ |
31016 | if (tree_add_const_value_attribute (die, t: init)) |
31017 | return; |
31018 | if (dwarf_strict && dwarf_version < 5) |
31019 | return; |
31020 | /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR, |
31021 | and ADDR_EXPR refers to a decl that has DW_AT_location or |
31022 | DW_AT_const_value (but isn't addressable, otherwise |
31023 | resolving the original DW_OP_addr wouldn't fail), see if |
31024 | we can add DW_OP_implicit_pointer. */ |
31025 | STRIP_NOPS (init); |
31026 | if (TREE_CODE (init) == POINTER_PLUS_EXPR |
31027 | && tree_fits_shwi_p (TREE_OPERAND (init, 1))) |
31028 | { |
31029 | offset = tree_to_shwi (TREE_OPERAND (init, 1)); |
31030 | init = TREE_OPERAND (init, 0); |
31031 | STRIP_NOPS (init); |
31032 | } |
31033 | if (TREE_CODE (init) != ADDR_EXPR) |
31034 | return; |
31035 | if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST |
31036 | && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0))) |
31037 | || (VAR_P (TREE_OPERAND (init, 0)) |
31038 | && !DECL_EXTERNAL (TREE_OPERAND (init, 0)) |
31039 | && TREE_OPERAND (init, 0) != decl)) |
31040 | { |
31041 | dw_die_ref ref; |
31042 | dw_loc_descr_ref l; |
31043 | |
31044 | if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST) |
31045 | { |
31046 | rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0)); |
31047 | if (!rtl) |
31048 | return; |
31049 | decl = SYMBOL_REF_DECL (rtl); |
31050 | } |
31051 | else |
31052 | decl = TREE_OPERAND (init, 0); |
31053 | ref = lookup_decl_die (decl); |
31054 | if (ref == NULL |
31055 | || (!get_AT (die: ref, attr_kind: DW_AT_location) |
31056 | && !get_AT (die: ref, attr_kind: DW_AT_const_value))) |
31057 | return; |
31058 | l = new_loc_descr (op: dwarf_OP (op: DW_OP_implicit_pointer), oprnd1: 0, oprnd2: offset); |
31059 | l->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
31060 | l->dw_loc_oprnd1.v.val_die_ref.die = ref; |
31061 | l->dw_loc_oprnd1.v.val_die_ref.external = 0; |
31062 | add_AT_loc (die, attr_kind: DW_AT_location, loc: l); |
31063 | } |
31064 | } |
31065 | |
31066 | /* Return NULL if l is a DWARF expression, or first op that is not |
31067 | valid DWARF expression. */ |
31068 | |
31069 | static dw_loc_descr_ref |
31070 | non_dwarf_expression (dw_loc_descr_ref l) |
31071 | { |
31072 | while (l) |
31073 | { |
31074 | if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31) |
31075 | return l; |
31076 | switch (l->dw_loc_opc) |
31077 | { |
31078 | case DW_OP_regx: |
31079 | case DW_OP_implicit_value: |
31080 | case DW_OP_stack_value: |
31081 | case DW_OP_implicit_pointer: |
31082 | case DW_OP_GNU_implicit_pointer: |
31083 | case DW_OP_GNU_parameter_ref: |
31084 | case DW_OP_piece: |
31085 | case DW_OP_bit_piece: |
31086 | return l; |
31087 | default: |
31088 | break; |
31089 | } |
31090 | l = l->dw_loc_next; |
31091 | } |
31092 | return NULL; |
31093 | } |
31094 | |
31095 | /* Return adjusted copy of EXPR: |
31096 | If it is empty DWARF expression, return it. |
31097 | If it is valid non-empty DWARF expression, |
31098 | return copy of EXPR with DW_OP_deref appended to it. |
31099 | If it is DWARF expression followed by DW_OP_reg{N,x}, return |
31100 | copy of the DWARF expression with DW_OP_breg{N,x} <0> appended. |
31101 | If it is DWARF expression followed by DW_OP_stack_value, return |
31102 | copy of the DWARF expression without anything appended. |
31103 | Otherwise, return NULL. */ |
31104 | |
31105 | static dw_loc_descr_ref |
31106 | copy_deref_exprloc (dw_loc_descr_ref expr) |
31107 | { |
31108 | dw_loc_descr_ref tail = NULL; |
31109 | |
31110 | if (expr == NULL) |
31111 | return NULL; |
31112 | |
31113 | dw_loc_descr_ref l = non_dwarf_expression (l: expr); |
31114 | if (l && l->dw_loc_next) |
31115 | return NULL; |
31116 | |
31117 | if (l) |
31118 | { |
31119 | if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31) |
31120 | tail = new_loc_descr (op: (enum dwarf_location_atom) |
31121 | (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)), |
31122 | oprnd1: 0, oprnd2: 0); |
31123 | else |
31124 | switch (l->dw_loc_opc) |
31125 | { |
31126 | case DW_OP_regx: |
31127 | tail = new_loc_descr (op: DW_OP_bregx, |
31128 | oprnd1: l->dw_loc_oprnd1.v.val_unsigned, oprnd2: 0); |
31129 | break; |
31130 | case DW_OP_stack_value: |
31131 | break; |
31132 | default: |
31133 | return NULL; |
31134 | } |
31135 | } |
31136 | else |
31137 | tail = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
31138 | |
31139 | dw_loc_descr_ref ret = NULL, *p = &ret; |
31140 | while (expr != l) |
31141 | { |
31142 | *p = new_loc_descr (op: expr->dw_loc_opc, oprnd1: 0, oprnd2: 0); |
31143 | (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1; |
31144 | (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2; |
31145 | p = &(*p)->dw_loc_next; |
31146 | expr = expr->dw_loc_next; |
31147 | } |
31148 | *p = tail; |
31149 | return ret; |
31150 | } |
31151 | |
31152 | /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value |
31153 | reference to a variable or argument, adjust it if needed and return: |
31154 | -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size |
31155 | attribute if present should be removed |
31156 | 0 keep the attribute perhaps with minor modifications, no need to rescan |
31157 | 1 if the attribute has been successfully adjusted. */ |
31158 | |
31159 | static int |
31160 | optimize_string_length (dw_attr_node *a) |
31161 | { |
31162 | dw_loc_descr_ref l = AT_loc (a), lv; |
31163 | dw_die_ref die; |
31164 | if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
31165 | { |
31166 | tree decl = l->dw_loc_oprnd1.v.val_decl_ref; |
31167 | die = lookup_decl_die (decl); |
31168 | if (die) |
31169 | { |
31170 | l->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
31171 | l->dw_loc_oprnd1.v.val_die_ref.die = die; |
31172 | l->dw_loc_oprnd1.v.val_die_ref.external = 0; |
31173 | } |
31174 | else |
31175 | return -1; |
31176 | } |
31177 | else |
31178 | die = l->dw_loc_oprnd1.v.val_die_ref.die; |
31179 | |
31180 | /* DWARF5 allows reference class, so we can then reference the DIE. |
31181 | Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */ |
31182 | if (l->dw_loc_next != NULL && dwarf_version >= 5) |
31183 | { |
31184 | a->dw_attr_val.val_class = dw_val_class_die_ref; |
31185 | a->dw_attr_val.val_entry = NULL; |
31186 | a->dw_attr_val.v.val_die_ref.die = die; |
31187 | a->dw_attr_val.v.val_die_ref.external = 0; |
31188 | return 0; |
31189 | } |
31190 | |
31191 | dw_attr_node *av = get_AT (die, attr_kind: DW_AT_location); |
31192 | dw_loc_list_ref d; |
31193 | bool non_dwarf_expr = false; |
31194 | |
31195 | if (av == NULL) |
31196 | return dwarf_strict ? -1 : 0; |
31197 | switch (AT_class (a: av)) |
31198 | { |
31199 | case dw_val_class_loc_list: |
31200 | for (d = AT_loc_list (a: av); d != NULL; d = d->dw_loc_next) |
31201 | if (d->expr && non_dwarf_expression (l: d->expr)) |
31202 | non_dwarf_expr = true; |
31203 | break; |
31204 | case dw_val_class_view_list: |
31205 | gcc_unreachable (); |
31206 | case dw_val_class_loc: |
31207 | lv = AT_loc (a: av); |
31208 | if (lv == NULL) |
31209 | return dwarf_strict ? -1 : 0; |
31210 | if (non_dwarf_expression (l: lv)) |
31211 | non_dwarf_expr = true; |
31212 | break; |
31213 | default: |
31214 | return dwarf_strict ? -1 : 0; |
31215 | } |
31216 | |
31217 | /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value |
31218 | into DW_OP_call4 or DW_OP_GNU_variable_value into |
31219 | DW_OP_call4 DW_OP_deref, do so. */ |
31220 | if (!non_dwarf_expr |
31221 | && (l->dw_loc_next != NULL || AT_class (a: av) == dw_val_class_loc)) |
31222 | { |
31223 | l->dw_loc_opc = DW_OP_call4; |
31224 | if (l->dw_loc_next) |
31225 | l->dw_loc_next = NULL; |
31226 | else |
31227 | l->dw_loc_next = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
31228 | return 0; |
31229 | } |
31230 | |
31231 | /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just |
31232 | copy over the DW_AT_location attribute from die to a. */ |
31233 | if (l->dw_loc_next != NULL) |
31234 | { |
31235 | a->dw_attr_val = av->dw_attr_val; |
31236 | return 1; |
31237 | } |
31238 | |
31239 | dw_loc_list_ref list, *p; |
31240 | switch (AT_class (a: av)) |
31241 | { |
31242 | case dw_val_class_loc_list: |
31243 | p = &list; |
31244 | list = NULL; |
31245 | for (d = AT_loc_list (a: av); d != NULL; d = d->dw_loc_next) |
31246 | { |
31247 | lv = copy_deref_exprloc (expr: d->expr); |
31248 | if (lv) |
31249 | { |
31250 | *p = new_loc_list (expr: lv, begin: d->begin, vbegin: d->vbegin, end: d->end, vend: d->vend, section: d->section); |
31251 | p = &(*p)->dw_loc_next; |
31252 | } |
31253 | else if (!dwarf_strict && d->expr) |
31254 | return 0; |
31255 | } |
31256 | if (list == NULL) |
31257 | return dwarf_strict ? -1 : 0; |
31258 | a->dw_attr_val.val_class = dw_val_class_loc_list; |
31259 | gen_llsym (list); |
31260 | *AT_loc_list_ptr (a) = list; |
31261 | return 1; |
31262 | case dw_val_class_loc: |
31263 | lv = copy_deref_exprloc (expr: AT_loc (a: av)); |
31264 | if (lv == NULL) |
31265 | return dwarf_strict ? -1 : 0; |
31266 | a->dw_attr_val.v.val_loc = lv; |
31267 | return 1; |
31268 | default: |
31269 | gcc_unreachable (); |
31270 | } |
31271 | } |
31272 | |
31273 | /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to |
31274 | an address in .rodata section if the string literal is emitted there, |
31275 | or remove the containing location list or replace DW_AT_const_value |
31276 | with DW_AT_location and empty location expression, if it isn't found |
31277 | in .rodata. Similarly for SYMBOL_REFs, keep only those that refer |
31278 | to something that has been emitted in the current CU. */ |
31279 | |
31280 | static void |
31281 | resolve_addr (dw_die_ref die) |
31282 | { |
31283 | dw_die_ref c; |
31284 | dw_attr_node *a; |
31285 | dw_loc_list_ref *curr, *start, loc; |
31286 | unsigned ix; |
31287 | bool remove_AT_byte_size = false; |
31288 | |
31289 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
31290 | switch (AT_class (a)) |
31291 | { |
31292 | case dw_val_class_loc_list: |
31293 | start = curr = AT_loc_list_ptr (a); |
31294 | loc = *curr; |
31295 | gcc_assert (loc); |
31296 | /* The same list can be referenced more than once. See if we have |
31297 | already recorded the result from a previous pass. */ |
31298 | if (loc->replaced) |
31299 | *curr = loc->dw_loc_next; |
31300 | else if (!loc->resolved_addr) |
31301 | { |
31302 | /* As things stand, we do not expect or allow one die to |
31303 | reference a suffix of another die's location list chain. |
31304 | References must be identical or completely separate. |
31305 | There is therefore no need to cache the result of this |
31306 | pass on any list other than the first; doing so |
31307 | would lead to unnecessary writes. */ |
31308 | while (*curr) |
31309 | { |
31310 | gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr); |
31311 | if (!resolve_addr_in_expr (a, loc: (*curr)->expr)) |
31312 | { |
31313 | dw_loc_list_ref next = (*curr)->dw_loc_next; |
31314 | dw_loc_descr_ref l = (*curr)->expr; |
31315 | |
31316 | if (next && (*curr)->ll_symbol) |
31317 | { |
31318 | gcc_assert (!next->ll_symbol); |
31319 | next->ll_symbol = (*curr)->ll_symbol; |
31320 | next->vl_symbol = (*curr)->vl_symbol; |
31321 | } |
31322 | if (dwarf_split_debug_info) |
31323 | remove_loc_list_addr_table_entries (descr: l); |
31324 | *curr = next; |
31325 | } |
31326 | else |
31327 | { |
31328 | mark_base_types (loc: (*curr)->expr); |
31329 | curr = &(*curr)->dw_loc_next; |
31330 | } |
31331 | } |
31332 | if (loc == *start) |
31333 | loc->resolved_addr = 1; |
31334 | else |
31335 | { |
31336 | loc->replaced = 1; |
31337 | loc->dw_loc_next = *start; |
31338 | } |
31339 | } |
31340 | if (!*start) |
31341 | { |
31342 | remove_AT (die, attr_kind: a->dw_attr); |
31343 | ix--; |
31344 | } |
31345 | break; |
31346 | case dw_val_class_view_list: |
31347 | { |
31348 | gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews); |
31349 | gcc_checking_assert (dwarf2out_locviews_in_attribute ()); |
31350 | dw_val_node *llnode |
31351 | = view_list_to_loc_list_val_node (val: &a->dw_attr_val); |
31352 | /* If we no longer have a loclist, or it no longer needs |
31353 | views, drop this attribute. */ |
31354 | if (!llnode || !llnode->v.val_loc_list->vl_symbol) |
31355 | { |
31356 | remove_AT (die, attr_kind: a->dw_attr); |
31357 | ix--; |
31358 | } |
31359 | break; |
31360 | } |
31361 | case dw_val_class_loc: |
31362 | { |
31363 | dw_loc_descr_ref l = AT_loc (a); |
31364 | /* DW_OP_GNU_variable_value DW_OP_stack_value or |
31365 | DW_OP_GNU_variable_value in DW_AT_string_length can be converted |
31366 | into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard |
31367 | DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5 |
31368 | DW_OP_GNU_variable_value DW_OP_stack_value can be replaced |
31369 | with DW_FORM_ref referencing the same DIE as |
31370 | DW_OP_GNU_variable_value used to reference. */ |
31371 | if (a->dw_attr == DW_AT_string_length |
31372 | && l |
31373 | && l->dw_loc_opc == DW_OP_GNU_variable_value |
31374 | && (l->dw_loc_next == NULL |
31375 | || (l->dw_loc_next->dw_loc_next == NULL |
31376 | && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value))) |
31377 | { |
31378 | switch (optimize_string_length (a)) |
31379 | { |
31380 | case -1: |
31381 | remove_AT (die, attr_kind: a->dw_attr); |
31382 | ix--; |
31383 | /* If we drop DW_AT_string_length, we need to drop also |
31384 | DW_AT_{string_length_,}byte_size. */ |
31385 | remove_AT_byte_size = true; |
31386 | continue; |
31387 | default: |
31388 | break; |
31389 | case 1: |
31390 | /* Even if we keep the optimized DW_AT_string_length, |
31391 | it might have changed AT_class, so process it again. */ |
31392 | ix--; |
31393 | continue; |
31394 | } |
31395 | } |
31396 | /* For -gdwarf-2 don't attempt to optimize |
31397 | DW_AT_data_member_location containing |
31398 | DW_OP_plus_uconst - older consumers might |
31399 | rely on it being that op instead of a more complex, |
31400 | but shorter, location description. */ |
31401 | if ((dwarf_version > 2 |
31402 | || a->dw_attr != DW_AT_data_member_location |
31403 | || l == NULL |
31404 | || l->dw_loc_opc != DW_OP_plus_uconst |
31405 | || l->dw_loc_next != NULL) |
31406 | && !resolve_addr_in_expr (a, loc: l)) |
31407 | { |
31408 | if (dwarf_split_debug_info) |
31409 | remove_loc_list_addr_table_entries (descr: l); |
31410 | if (l != NULL |
31411 | && l->dw_loc_next == NULL |
31412 | && l->dw_loc_opc == DW_OP_addr |
31413 | && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF |
31414 | && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr) |
31415 | && a->dw_attr == DW_AT_location) |
31416 | { |
31417 | tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr); |
31418 | remove_AT (die, attr_kind: a->dw_attr); |
31419 | ix--; |
31420 | optimize_location_into_implicit_ptr (die, decl); |
31421 | break; |
31422 | } |
31423 | if (a->dw_attr == DW_AT_string_length) |
31424 | /* If we drop DW_AT_string_length, we need to drop also |
31425 | DW_AT_{string_length_,}byte_size. */ |
31426 | remove_AT_byte_size = true; |
31427 | remove_AT (die, attr_kind: a->dw_attr); |
31428 | ix--; |
31429 | } |
31430 | else |
31431 | mark_base_types (loc: l); |
31432 | } |
31433 | break; |
31434 | case dw_val_class_addr: |
31435 | if (a->dw_attr == DW_AT_const_value |
31436 | && !resolve_one_addr (addr: &a->dw_attr_val.v.val_addr)) |
31437 | { |
31438 | if (AT_index (a) != NOT_INDEXED) |
31439 | remove_addr_table_entry (entry: a->dw_attr_val.val_entry); |
31440 | remove_AT (die, attr_kind: a->dw_attr); |
31441 | ix--; |
31442 | } |
31443 | if ((die->die_tag == DW_TAG_call_site |
31444 | && a->dw_attr == DW_AT_call_origin) |
31445 | || (die->die_tag == DW_TAG_GNU_call_site |
31446 | && a->dw_attr == DW_AT_abstract_origin)) |
31447 | { |
31448 | tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr); |
31449 | dw_die_ref tdie = lookup_decl_die (decl: tdecl); |
31450 | dw_die_ref cdie; |
31451 | if (tdie == NULL |
31452 | && DECL_EXTERNAL (tdecl) |
31453 | && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE |
31454 | && (cdie = lookup_context_die (DECL_CONTEXT (tdecl)))) |
31455 | { |
31456 | dw_die_ref pdie = cdie; |
31457 | /* Make sure we don't add these DIEs into type units. |
31458 | We could emit skeleton DIEs for context (namespaces, |
31459 | outer structs/classes) and a skeleton DIE for the |
31460 | innermost context with DW_AT_signature pointing to the |
31461 | type unit. See PR78835. */ |
31462 | while (pdie && pdie->die_tag != DW_TAG_type_unit) |
31463 | pdie = pdie->die_parent; |
31464 | if (pdie == NULL) |
31465 | { |
31466 | /* Creating a full DIE for tdecl is overly expensive and |
31467 | at this point even wrong when in the LTO phase |
31468 | as it can end up generating new type DIEs we didn't |
31469 | output and thus optimize_external_refs will crash. */ |
31470 | tdie = new_die (tag_value: DW_TAG_subprogram, parent_die: cdie, NULL_TREE); |
31471 | add_AT_flag (die: tdie, attr_kind: DW_AT_external, flag: 1); |
31472 | add_AT_flag (die: tdie, attr_kind: DW_AT_declaration, flag: 1); |
31473 | add_linkage_attr (die: tdie, decl: tdecl); |
31474 | add_name_and_src_coords_attributes (die: tdie, decl: tdecl, no_linkage_name: true); |
31475 | equate_decl_number_to_die (decl: tdecl, decl_die: tdie); |
31476 | } |
31477 | } |
31478 | if (tdie) |
31479 | { |
31480 | a->dw_attr_val.val_class = dw_val_class_die_ref; |
31481 | a->dw_attr_val.v.val_die_ref.die = tdie; |
31482 | a->dw_attr_val.v.val_die_ref.external = 0; |
31483 | } |
31484 | else |
31485 | { |
31486 | if (AT_index (a) != NOT_INDEXED) |
31487 | remove_addr_table_entry (entry: a->dw_attr_val.val_entry); |
31488 | remove_AT (die, attr_kind: a->dw_attr); |
31489 | ix--; |
31490 | } |
31491 | } |
31492 | break; |
31493 | default: |
31494 | break; |
31495 | } |
31496 | |
31497 | if (remove_AT_byte_size) |
31498 | remove_AT (die, dwarf_version >= 5 |
31499 | ? DW_AT_string_length_byte_size |
31500 | : DW_AT_byte_size); |
31501 | |
31502 | FOR_EACH_CHILD (die, c, resolve_addr (c)); |
31503 | } |
31504 | |
31505 | /* Helper routines for optimize_location_lists. |
31506 | This pass tries to share identical local lists in .debug_loc |
31507 | section. */ |
31508 | |
31509 | /* Iteratively hash operands of LOC opcode into HSTATE. */ |
31510 | |
31511 | static void |
31512 | hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate) |
31513 | { |
31514 | dw_val_ref val1 = &loc->dw_loc_oprnd1; |
31515 | dw_val_ref val2 = &loc->dw_loc_oprnd2; |
31516 | |
31517 | switch (loc->dw_loc_opc) |
31518 | { |
31519 | case DW_OP_const4u: |
31520 | case DW_OP_const8u: |
31521 | if (loc->dtprel) |
31522 | goto hash_addr; |
31523 | /* FALLTHRU */ |
31524 | case DW_OP_const1u: |
31525 | case DW_OP_const1s: |
31526 | case DW_OP_const2u: |
31527 | case DW_OP_const2s: |
31528 | case DW_OP_const4s: |
31529 | case DW_OP_const8s: |
31530 | case DW_OP_constu: |
31531 | case DW_OP_consts: |
31532 | case DW_OP_pick: |
31533 | case DW_OP_plus_uconst: |
31534 | case DW_OP_breg0: |
31535 | case DW_OP_breg1: |
31536 | case DW_OP_breg2: |
31537 | case DW_OP_breg3: |
31538 | case DW_OP_breg4: |
31539 | case DW_OP_breg5: |
31540 | case DW_OP_breg6: |
31541 | case DW_OP_breg7: |
31542 | case DW_OP_breg8: |
31543 | case DW_OP_breg9: |
31544 | case DW_OP_breg10: |
31545 | case DW_OP_breg11: |
31546 | case DW_OP_breg12: |
31547 | case DW_OP_breg13: |
31548 | case DW_OP_breg14: |
31549 | case DW_OP_breg15: |
31550 | case DW_OP_breg16: |
31551 | case DW_OP_breg17: |
31552 | case DW_OP_breg18: |
31553 | case DW_OP_breg19: |
31554 | case DW_OP_breg20: |
31555 | case DW_OP_breg21: |
31556 | case DW_OP_breg22: |
31557 | case DW_OP_breg23: |
31558 | case DW_OP_breg24: |
31559 | case DW_OP_breg25: |
31560 | case DW_OP_breg26: |
31561 | case DW_OP_breg27: |
31562 | case DW_OP_breg28: |
31563 | case DW_OP_breg29: |
31564 | case DW_OP_breg30: |
31565 | case DW_OP_breg31: |
31566 | case DW_OP_regx: |
31567 | case DW_OP_fbreg: |
31568 | case DW_OP_piece: |
31569 | case DW_OP_deref_size: |
31570 | case DW_OP_xderef_size: |
31571 | hstate.add_object (obj&: val1->v.val_int); |
31572 | break; |
31573 | case DW_OP_skip: |
31574 | case DW_OP_bra: |
31575 | { |
31576 | int offset; |
31577 | |
31578 | gcc_assert (val1->val_class == dw_val_class_loc); |
31579 | offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3); |
31580 | hstate.add_object (obj&: offset); |
31581 | } |
31582 | break; |
31583 | case DW_OP_implicit_value: |
31584 | hstate.add_object (obj&: val1->v.val_unsigned); |
31585 | switch (val2->val_class) |
31586 | { |
31587 | case dw_val_class_const: |
31588 | hstate.add_object (obj&: val2->v.val_int); |
31589 | break; |
31590 | case dw_val_class_vec: |
31591 | { |
31592 | unsigned int elt_size = val2->v.val_vec.elt_size; |
31593 | unsigned int len = val2->v.val_vec.length; |
31594 | |
31595 | hstate.add_int (v: elt_size); |
31596 | hstate.add_int (v: len); |
31597 | hstate.add (data: val2->v.val_vec.array, len: len * elt_size); |
31598 | } |
31599 | break; |
31600 | case dw_val_class_const_double: |
31601 | hstate.add_object (obj&: val2->v.val_double.low); |
31602 | hstate.add_object (obj&: val2->v.val_double.high); |
31603 | break; |
31604 | case dw_val_class_wide_int: |
31605 | hstate.add (data: val2->v.val_wide->get_val (), |
31606 | len: get_full_len (op: *val2->v.val_wide) |
31607 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
31608 | break; |
31609 | case dw_val_class_addr: |
31610 | inchash::add_rtx (val2->v.val_addr, hstate); |
31611 | break; |
31612 | default: |
31613 | gcc_unreachable (); |
31614 | } |
31615 | break; |
31616 | case DW_OP_bregx: |
31617 | case DW_OP_bit_piece: |
31618 | hstate.add_object (obj&: val1->v.val_int); |
31619 | hstate.add_object (obj&: val2->v.val_int); |
31620 | break; |
31621 | case DW_OP_addr: |
31622 | hash_addr: |
31623 | if (loc->dtprel) |
31624 | { |
31625 | unsigned char dtprel = 0xd1; |
31626 | hstate.add_object (obj&: dtprel); |
31627 | } |
31628 | inchash::add_rtx (val1->v.val_addr, hstate); |
31629 | break; |
31630 | case DW_OP_GNU_addr_index: |
31631 | case DW_OP_addrx: |
31632 | case DW_OP_GNU_const_index: |
31633 | case DW_OP_constx: |
31634 | { |
31635 | if (loc->dtprel) |
31636 | { |
31637 | unsigned char dtprel = 0xd1; |
31638 | hstate.add_object (obj&: dtprel); |
31639 | } |
31640 | inchash::add_rtx (val1->val_entry->addr.rtl, hstate); |
31641 | } |
31642 | break; |
31643 | case DW_OP_implicit_pointer: |
31644 | case DW_OP_GNU_implicit_pointer: |
31645 | hstate.add_int (v: val2->v.val_int); |
31646 | break; |
31647 | case DW_OP_entry_value: |
31648 | case DW_OP_GNU_entry_value: |
31649 | hstate.add_object (obj&: val1->v.val_loc); |
31650 | break; |
31651 | case DW_OP_regval_type: |
31652 | case DW_OP_deref_type: |
31653 | case DW_OP_GNU_regval_type: |
31654 | case DW_OP_GNU_deref_type: |
31655 | { |
31656 | unsigned int byte_size |
31657 | = get_AT_unsigned (die: val2->v.val_die_ref.die, attr_kind: DW_AT_byte_size); |
31658 | unsigned int encoding |
31659 | = get_AT_unsigned (die: val2->v.val_die_ref.die, attr_kind: DW_AT_encoding); |
31660 | hstate.add_object (obj&: val1->v.val_int); |
31661 | hstate.add_object (obj&: byte_size); |
31662 | hstate.add_object (obj&: encoding); |
31663 | } |
31664 | break; |
31665 | case DW_OP_convert: |
31666 | case DW_OP_reinterpret: |
31667 | case DW_OP_GNU_convert: |
31668 | case DW_OP_GNU_reinterpret: |
31669 | if (val1->val_class == dw_val_class_unsigned_const) |
31670 | { |
31671 | hstate.add_object (obj&: val1->v.val_unsigned); |
31672 | break; |
31673 | } |
31674 | /* FALLTHRU */ |
31675 | case DW_OP_const_type: |
31676 | case DW_OP_GNU_const_type: |
31677 | { |
31678 | unsigned int byte_size |
31679 | = get_AT_unsigned (die: val1->v.val_die_ref.die, attr_kind: DW_AT_byte_size); |
31680 | unsigned int encoding |
31681 | = get_AT_unsigned (die: val1->v.val_die_ref.die, attr_kind: DW_AT_encoding); |
31682 | hstate.add_object (obj&: byte_size); |
31683 | hstate.add_object (obj&: encoding); |
31684 | if (loc->dw_loc_opc != DW_OP_const_type |
31685 | && loc->dw_loc_opc != DW_OP_GNU_const_type) |
31686 | break; |
31687 | hstate.add_object (obj&: val2->val_class); |
31688 | switch (val2->val_class) |
31689 | { |
31690 | case dw_val_class_const: |
31691 | hstate.add_object (obj&: val2->v.val_int); |
31692 | break; |
31693 | case dw_val_class_vec: |
31694 | { |
31695 | unsigned int elt_size = val2->v.val_vec.elt_size; |
31696 | unsigned int len = val2->v.val_vec.length; |
31697 | |
31698 | hstate.add_object (obj&: elt_size); |
31699 | hstate.add_object (obj&: len); |
31700 | hstate.add (data: val2->v.val_vec.array, len: len * elt_size); |
31701 | } |
31702 | break; |
31703 | case dw_val_class_const_double: |
31704 | hstate.add_object (obj&: val2->v.val_double.low); |
31705 | hstate.add_object (obj&: val2->v.val_double.high); |
31706 | break; |
31707 | case dw_val_class_wide_int: |
31708 | hstate.add (data: val2->v.val_wide->get_val (), |
31709 | len: get_full_len (op: *val2->v.val_wide) |
31710 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
31711 | break; |
31712 | default: |
31713 | gcc_unreachable (); |
31714 | } |
31715 | } |
31716 | break; |
31717 | |
31718 | default: |
31719 | /* Other codes have no operands. */ |
31720 | break; |
31721 | } |
31722 | } |
31723 | |
31724 | /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */ |
31725 | |
31726 | static inline void |
31727 | hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate) |
31728 | { |
31729 | dw_loc_descr_ref l; |
31730 | bool sizes_computed = false; |
31731 | /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */ |
31732 | size_of_locs (loc); |
31733 | |
31734 | for (l = loc; l != NULL; l = l->dw_loc_next) |
31735 | { |
31736 | enum dwarf_location_atom opc = l->dw_loc_opc; |
31737 | hstate.add_object (obj&: opc); |
31738 | if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed) |
31739 | { |
31740 | size_of_locs (loc); |
31741 | sizes_computed = true; |
31742 | } |
31743 | hash_loc_operands (loc: l, hstate); |
31744 | } |
31745 | } |
31746 | |
31747 | /* Compute hash of the whole location list LIST_HEAD. */ |
31748 | |
31749 | static inline void |
31750 | hash_loc_list (dw_loc_list_ref list_head) |
31751 | { |
31752 | dw_loc_list_ref curr = list_head; |
31753 | inchash::hash hstate; |
31754 | |
31755 | for (curr = list_head; curr != NULL; curr = curr->dw_loc_next) |
31756 | { |
31757 | hstate.add (data: curr->begin, len: strlen (s: curr->begin) + 1); |
31758 | hstate.add (data: curr->end, len: strlen (s: curr->end) + 1); |
31759 | hstate.add_object (obj&: curr->vbegin); |
31760 | hstate.add_object (obj&: curr->vend); |
31761 | if (curr->section) |
31762 | hstate.add (data: curr->section, len: strlen (s: curr->section) + 1); |
31763 | hash_locs (loc: curr->expr, hstate); |
31764 | } |
31765 | list_head->hash = hstate.end (); |
31766 | } |
31767 | |
31768 | /* Return true if X and Y opcodes have the same operands. */ |
31769 | |
31770 | static inline bool |
31771 | compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y) |
31772 | { |
31773 | dw_val_ref valx1 = &x->dw_loc_oprnd1; |
31774 | dw_val_ref valx2 = &x->dw_loc_oprnd2; |
31775 | dw_val_ref valy1 = &y->dw_loc_oprnd1; |
31776 | dw_val_ref valy2 = &y->dw_loc_oprnd2; |
31777 | |
31778 | switch (x->dw_loc_opc) |
31779 | { |
31780 | case DW_OP_const4u: |
31781 | case DW_OP_const8u: |
31782 | if (x->dtprel) |
31783 | goto hash_addr; |
31784 | /* FALLTHRU */ |
31785 | case DW_OP_const1u: |
31786 | case DW_OP_const1s: |
31787 | case DW_OP_const2u: |
31788 | case DW_OP_const2s: |
31789 | case DW_OP_const4s: |
31790 | case DW_OP_const8s: |
31791 | case DW_OP_constu: |
31792 | case DW_OP_consts: |
31793 | case DW_OP_pick: |
31794 | case DW_OP_plus_uconst: |
31795 | case DW_OP_breg0: |
31796 | case DW_OP_breg1: |
31797 | case DW_OP_breg2: |
31798 | case DW_OP_breg3: |
31799 | case DW_OP_breg4: |
31800 | case DW_OP_breg5: |
31801 | case DW_OP_breg6: |
31802 | case DW_OP_breg7: |
31803 | case DW_OP_breg8: |
31804 | case DW_OP_breg9: |
31805 | case DW_OP_breg10: |
31806 | case DW_OP_breg11: |
31807 | case DW_OP_breg12: |
31808 | case DW_OP_breg13: |
31809 | case DW_OP_breg14: |
31810 | case DW_OP_breg15: |
31811 | case DW_OP_breg16: |
31812 | case DW_OP_breg17: |
31813 | case DW_OP_breg18: |
31814 | case DW_OP_breg19: |
31815 | case DW_OP_breg20: |
31816 | case DW_OP_breg21: |
31817 | case DW_OP_breg22: |
31818 | case DW_OP_breg23: |
31819 | case DW_OP_breg24: |
31820 | case DW_OP_breg25: |
31821 | case DW_OP_breg26: |
31822 | case DW_OP_breg27: |
31823 | case DW_OP_breg28: |
31824 | case DW_OP_breg29: |
31825 | case DW_OP_breg30: |
31826 | case DW_OP_breg31: |
31827 | case DW_OP_regx: |
31828 | case DW_OP_fbreg: |
31829 | case DW_OP_piece: |
31830 | case DW_OP_deref_size: |
31831 | case DW_OP_xderef_size: |
31832 | return valx1->v.val_int == valy1->v.val_int; |
31833 | case DW_OP_skip: |
31834 | case DW_OP_bra: |
31835 | /* If splitting debug info, the use of DW_OP_GNU_addr_index |
31836 | can cause irrelevant differences in dw_loc_addr. */ |
31837 | gcc_assert (valx1->val_class == dw_val_class_loc |
31838 | && valy1->val_class == dw_val_class_loc |
31839 | && (dwarf_split_debug_info |
31840 | || x->dw_loc_addr == y->dw_loc_addr)); |
31841 | return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr; |
31842 | case DW_OP_implicit_value: |
31843 | if (valx1->v.val_unsigned != valy1->v.val_unsigned |
31844 | || valx2->val_class != valy2->val_class) |
31845 | return false; |
31846 | switch (valx2->val_class) |
31847 | { |
31848 | case dw_val_class_const: |
31849 | return valx2->v.val_int == valy2->v.val_int; |
31850 | case dw_val_class_vec: |
31851 | return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size |
31852 | && valx2->v.val_vec.length == valy2->v.val_vec.length |
31853 | && memcmp (s1: valx2->v.val_vec.array, s2: valy2->v.val_vec.array, |
31854 | n: valx2->v.val_vec.elt_size |
31855 | * valx2->v.val_vec.length) == 0; |
31856 | case dw_val_class_const_double: |
31857 | return valx2->v.val_double.low == valy2->v.val_double.low |
31858 | && valx2->v.val_double.high == valy2->v.val_double.high; |
31859 | case dw_val_class_wide_int: |
31860 | return *valx2->v.val_wide == *valy2->v.val_wide; |
31861 | case dw_val_class_addr: |
31862 | return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr); |
31863 | default: |
31864 | gcc_unreachable (); |
31865 | } |
31866 | case DW_OP_bregx: |
31867 | case DW_OP_bit_piece: |
31868 | return valx1->v.val_int == valy1->v.val_int |
31869 | && valx2->v.val_int == valy2->v.val_int; |
31870 | case DW_OP_addr: |
31871 | hash_addr: |
31872 | return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr); |
31873 | case DW_OP_GNU_addr_index: |
31874 | case DW_OP_addrx: |
31875 | case DW_OP_GNU_const_index: |
31876 | case DW_OP_constx: |
31877 | { |
31878 | rtx ax1 = valx1->val_entry->addr.rtl; |
31879 | rtx ay1 = valy1->val_entry->addr.rtl; |
31880 | return rtx_equal_p (ax1, ay1); |
31881 | } |
31882 | case DW_OP_implicit_pointer: |
31883 | case DW_OP_GNU_implicit_pointer: |
31884 | return valx1->val_class == dw_val_class_die_ref |
31885 | && valx1->val_class == valy1->val_class |
31886 | && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die |
31887 | && valx2->v.val_int == valy2->v.val_int; |
31888 | case DW_OP_entry_value: |
31889 | case DW_OP_GNU_entry_value: |
31890 | return compare_loc_operands (x: valx1->v.val_loc, y: valy1->v.val_loc); |
31891 | case DW_OP_const_type: |
31892 | case DW_OP_GNU_const_type: |
31893 | if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die |
31894 | || valx2->val_class != valy2->val_class) |
31895 | return false; |
31896 | switch (valx2->val_class) |
31897 | { |
31898 | case dw_val_class_const: |
31899 | return valx2->v.val_int == valy2->v.val_int; |
31900 | case dw_val_class_vec: |
31901 | return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size |
31902 | && valx2->v.val_vec.length == valy2->v.val_vec.length |
31903 | && memcmp (s1: valx2->v.val_vec.array, s2: valy2->v.val_vec.array, |
31904 | n: valx2->v.val_vec.elt_size |
31905 | * valx2->v.val_vec.length) == 0; |
31906 | case dw_val_class_const_double: |
31907 | return valx2->v.val_double.low == valy2->v.val_double.low |
31908 | && valx2->v.val_double.high == valy2->v.val_double.high; |
31909 | case dw_val_class_wide_int: |
31910 | return *valx2->v.val_wide == *valy2->v.val_wide; |
31911 | default: |
31912 | gcc_unreachable (); |
31913 | } |
31914 | case DW_OP_regval_type: |
31915 | case DW_OP_deref_type: |
31916 | case DW_OP_GNU_regval_type: |
31917 | case DW_OP_GNU_deref_type: |
31918 | return valx1->v.val_int == valy1->v.val_int |
31919 | && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die; |
31920 | case DW_OP_convert: |
31921 | case DW_OP_reinterpret: |
31922 | case DW_OP_GNU_convert: |
31923 | case DW_OP_GNU_reinterpret: |
31924 | if (valx1->val_class != valy1->val_class) |
31925 | return false; |
31926 | if (valx1->val_class == dw_val_class_unsigned_const) |
31927 | return valx1->v.val_unsigned == valy1->v.val_unsigned; |
31928 | return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die; |
31929 | case DW_OP_GNU_parameter_ref: |
31930 | return valx1->val_class == dw_val_class_die_ref |
31931 | && valx1->val_class == valy1->val_class |
31932 | && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die; |
31933 | default: |
31934 | /* Other codes have no operands. */ |
31935 | return true; |
31936 | } |
31937 | } |
31938 | |
31939 | /* Return true if DWARF location expressions X and Y are the same. */ |
31940 | |
31941 | static inline bool |
31942 | compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y) |
31943 | { |
31944 | for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next) |
31945 | if (x->dw_loc_opc != y->dw_loc_opc |
31946 | || x->dtprel != y->dtprel |
31947 | || !compare_loc_operands (x, y)) |
31948 | break; |
31949 | return x == NULL && y == NULL; |
31950 | } |
31951 | |
31952 | /* Hashtable helpers. */ |
31953 | |
31954 | struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct> |
31955 | { |
31956 | static inline hashval_t hash (const dw_loc_list_struct *); |
31957 | static inline bool equal (const dw_loc_list_struct *, |
31958 | const dw_loc_list_struct *); |
31959 | }; |
31960 | |
31961 | /* Return precomputed hash of location list X. */ |
31962 | |
31963 | inline hashval_t |
31964 | loc_list_hasher::hash (const dw_loc_list_struct *x) |
31965 | { |
31966 | return x->hash; |
31967 | } |
31968 | |
31969 | /* Return true if location lists A and B are the same. */ |
31970 | |
31971 | inline bool |
31972 | loc_list_hasher::equal (const dw_loc_list_struct *a, |
31973 | const dw_loc_list_struct *b) |
31974 | { |
31975 | if (a == b) |
31976 | return true; |
31977 | if (a->hash != b->hash) |
31978 | return false; |
31979 | for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next) |
31980 | if (strcmp (s1: a->begin, s2: b->begin) != 0 |
31981 | || strcmp (s1: a->end, s2: b->end) != 0 |
31982 | || (a->section == NULL) != (b->section == NULL) |
31983 | || (a->section && strcmp (s1: a->section, s2: b->section) != 0) |
31984 | || a->vbegin != b->vbegin || a->vend != b->vend |
31985 | || !compare_locs (x: a->expr, y: b->expr)) |
31986 | break; |
31987 | return a == NULL && b == NULL; |
31988 | } |
31989 | |
31990 | typedef hash_table<loc_list_hasher> loc_list_hash_type; |
31991 | |
31992 | |
31993 | /* Recursively optimize location lists referenced from DIE |
31994 | children and share them whenever possible. */ |
31995 | |
31996 | static void |
31997 | optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab) |
31998 | { |
31999 | dw_die_ref c; |
32000 | dw_attr_node *a; |
32001 | unsigned ix; |
32002 | dw_loc_list_struct **slot; |
32003 | bool drop_locviews = false; |
32004 | bool has_locviews = false; |
32005 | |
32006 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
32007 | if (AT_class (a) == dw_val_class_loc_list) |
32008 | { |
32009 | dw_loc_list_ref list = AT_loc_list (a); |
32010 | /* TODO: perform some optimizations here, before hashing |
32011 | it and storing into the hash table. */ |
32012 | hash_loc_list (list_head: list); |
32013 | slot = htab->find_slot_with_hash (comparable: list, hash: list->hash, insert: INSERT); |
32014 | if (*slot == NULL) |
32015 | { |
32016 | *slot = list; |
32017 | if (loc_list_has_views (list)) |
32018 | gcc_assert (list->vl_symbol); |
32019 | else if (list->vl_symbol) |
32020 | { |
32021 | drop_locviews = true; |
32022 | list->vl_symbol = NULL; |
32023 | } |
32024 | } |
32025 | else |
32026 | { |
32027 | if (list->vl_symbol && !(*slot)->vl_symbol) |
32028 | drop_locviews = true; |
32029 | a->dw_attr_val.v.val_loc_list = *slot; |
32030 | } |
32031 | } |
32032 | else if (AT_class (a) == dw_val_class_view_list) |
32033 | { |
32034 | gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews); |
32035 | has_locviews = true; |
32036 | } |
32037 | |
32038 | |
32039 | if (drop_locviews && has_locviews) |
32040 | remove_AT (die, attr_kind: DW_AT_GNU_locviews); |
32041 | |
32042 | FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab)); |
32043 | } |
32044 | |
32045 | |
32046 | /* Recursively assign each location list a unique index into the debug_addr |
32047 | section. */ |
32048 | |
32049 | static void |
32050 | index_location_lists (dw_die_ref die) |
32051 | { |
32052 | dw_die_ref c; |
32053 | dw_attr_node *a; |
32054 | unsigned ix; |
32055 | |
32056 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
32057 | if (AT_class (a) == dw_val_class_loc_list) |
32058 | { |
32059 | dw_loc_list_ref list = AT_loc_list (a); |
32060 | dw_loc_list_ref curr; |
32061 | for (curr = list; curr != NULL; curr = curr->dw_loc_next) |
32062 | { |
32063 | /* Don't index an entry that has already been indexed |
32064 | or won't be output. Make sure skip_loc_list_entry doesn't |
32065 | call size_of_locs, because that might cause circular dependency, |
32066 | index_location_lists requiring address table indexes to be |
32067 | computed, but adding new indexes through add_addr_table_entry |
32068 | and address table index computation requiring no new additions |
32069 | to the hash table. In the rare case of DWARF[234] >= 64KB |
32070 | location expression, we'll just waste unused address table entry |
32071 | for it. */ |
32072 | if (curr->begin_entry != NULL || skip_loc_list_entry (curr)) |
32073 | continue; |
32074 | |
32075 | curr->begin_entry |
32076 | = add_addr_table_entry (addr: xstrdup (curr->begin), kind: ate_kind_label); |
32077 | if (dwarf_version >= 5 && !HAVE_AS_LEB128) |
32078 | curr->end_entry |
32079 | = add_addr_table_entry (addr: xstrdup (curr->end), kind: ate_kind_label); |
32080 | } |
32081 | } |
32082 | |
32083 | FOR_EACH_CHILD (die, c, index_location_lists (c)); |
32084 | } |
32085 | |
32086 | /* Optimize location lists referenced from DIE |
32087 | children and share them whenever possible. */ |
32088 | |
32089 | static void |
32090 | optimize_location_lists (dw_die_ref die) |
32091 | { |
32092 | loc_list_hash_type htab (500); |
32093 | optimize_location_lists_1 (die, htab: &htab); |
32094 | } |
32095 | |
32096 | /* Traverse the limbo die list, and add parent/child links. The only |
32097 | dies without parents that should be here are concrete instances of |
32098 | inline functions, and the comp_unit_die. We can ignore the comp_unit_die. |
32099 | For concrete instances, we can get the parent die from the abstract |
32100 | instance. */ |
32101 | |
32102 | static void |
32103 | flush_limbo_die_list (void) |
32104 | { |
32105 | limbo_die_node *node; |
32106 | |
32107 | /* get_context_die calls force_decl_die, which can put new DIEs on the |
32108 | limbo list in LTO mode when nested functions are put in a different |
32109 | partition than that of their parent function. */ |
32110 | while ((node = limbo_die_list)) |
32111 | { |
32112 | dw_die_ref die = node->die; |
32113 | limbo_die_list = node->next; |
32114 | |
32115 | if (die->die_parent == NULL) |
32116 | { |
32117 | dw_die_ref origin = get_AT_ref (die, attr_kind: DW_AT_abstract_origin); |
32118 | |
32119 | if (origin && origin->die_parent) |
32120 | add_child_die (die: origin->die_parent, child_die: die); |
32121 | else if (is_cu_die (c: die)) |
32122 | ; |
32123 | else if (seen_error ()) |
32124 | /* It's OK to be confused by errors in the input. */ |
32125 | add_child_die (die: comp_unit_die (), child_die: die); |
32126 | else |
32127 | { |
32128 | /* In certain situations, the lexical block containing a |
32129 | nested function can be optimized away, which results |
32130 | in the nested function die being orphaned. Likewise |
32131 | with the return type of that nested function. Force |
32132 | this to be a child of the containing function. |
32133 | |
32134 | It may happen that even the containing function got fully |
32135 | inlined and optimized out. In that case we are lost and |
32136 | assign the empty child. This should not be big issue as |
32137 | the function is likely unreachable too. */ |
32138 | gcc_assert (node->created_for); |
32139 | |
32140 | if (DECL_P (node->created_for)) |
32141 | origin = get_context_die (DECL_CONTEXT (node->created_for)); |
32142 | else if (TYPE_P (node->created_for)) |
32143 | origin = scope_die_for (t: node->created_for, context_die: comp_unit_die ()); |
32144 | else |
32145 | origin = comp_unit_die (); |
32146 | |
32147 | add_child_die (die: origin, child_die: die); |
32148 | } |
32149 | } |
32150 | } |
32151 | } |
32152 | |
32153 | /* Reset DIEs so we can output them again. */ |
32154 | |
32155 | static void |
32156 | reset_dies (dw_die_ref die) |
32157 | { |
32158 | dw_die_ref c; |
32159 | |
32160 | /* Remove stuff we re-generate. */ |
32161 | die->die_mark = 0; |
32162 | die->die_offset = 0; |
32163 | die->die_abbrev = 0; |
32164 | remove_AT (die, attr_kind: DW_AT_sibling); |
32165 | |
32166 | FOR_EACH_CHILD (die, c, reset_dies (c)); |
32167 | } |
32168 | |
32169 | /* reset_indirect_string removed the references coming from DW_AT_name |
32170 | and DW_AT_comp_dir attributes on compilation unit DIEs. Readd them as |
32171 | .debug_line_str strings again. */ |
32172 | |
32173 | static void |
32174 | adjust_name_comp_dir (dw_die_ref die) |
32175 | { |
32176 | for (int i = 0; i < 2; i++) |
32177 | { |
32178 | dwarf_attribute attr_kind = i ? DW_AT_comp_dir : DW_AT_name; |
32179 | dw_attr_node *a = get_AT (die, attr_kind); |
32180 | if (a == NULL || a->dw_attr_val.val_class != dw_val_class_str) |
32181 | continue; |
32182 | |
32183 | if (!debug_line_str_hash) |
32184 | debug_line_str_hash |
32185 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
32186 | |
32187 | struct indirect_string_node *node |
32188 | = find_AT_string_in_table (str: a->dw_attr_val.v.val_str->str, |
32189 | table: debug_line_str_hash); |
32190 | set_indirect_string (node); |
32191 | node->form = DW_FORM_line_strp; |
32192 | a->dw_attr_val.v.val_str = node; |
32193 | } |
32194 | } |
32195 | |
32196 | /* Output stuff that dwarf requires at the end of every file, |
32197 | and generate the DWARF-2 debugging info. */ |
32198 | |
32199 | static void |
32200 | dwarf2out_finish (const char *filename) |
32201 | { |
32202 | comdat_type_node *ctnode; |
32203 | dw_die_ref main_comp_unit_die; |
32204 | unsigned char checksum[16]; |
32205 | char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES]; |
32206 | |
32207 | /* Generate CTF/BTF debug info. */ |
32208 | if ((ctf_debug_info_level > CTFINFO_LEVEL_NONE |
32209 | || btf_debuginfo_p ()) && lang_GNU_C ()) |
32210 | ctf_debug_finish (filename); |
32211 | |
32212 | /* Skip emitting DWARF if not required. */ |
32213 | if (!dwarf_debuginfo_p ()) |
32214 | return; |
32215 | |
32216 | /* Flush out any latecomers to the limbo party. */ |
32217 | flush_limbo_die_list (); |
32218 | |
32219 | if (inline_entry_data_table) |
32220 | gcc_assert (inline_entry_data_table->is_empty ()); |
32221 | |
32222 | if (flag_checking) |
32223 | { |
32224 | verify_die (die: comp_unit_die ()); |
32225 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
32226 | verify_die (die: node->die); |
32227 | } |
32228 | |
32229 | /* We shouldn't have any symbols with delayed asm names for |
32230 | DIEs generated after early finish. */ |
32231 | gcc_assert (deferred_asm_name == NULL); |
32232 | |
32233 | gen_remaining_tmpl_value_param_die_attribute (); |
32234 | |
32235 | if (flag_generate_lto || flag_generate_offload) |
32236 | { |
32237 | gcc_assert (flag_fat_lto_objects || flag_generate_offload); |
32238 | |
32239 | /* Prune stuff so that dwarf2out_finish runs successfully |
32240 | for the fat part of the object. */ |
32241 | reset_dies (die: comp_unit_die ()); |
32242 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
32243 | reset_dies (die: node->die); |
32244 | |
32245 | hash_table<comdat_type_hasher> comdat_type_table (100); |
32246 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
32247 | { |
32248 | comdat_type_node **slot |
32249 | = comdat_type_table.find_slot (value: ctnode, insert: INSERT); |
32250 | |
32251 | /* Don't reset types twice. */ |
32252 | if (*slot != HTAB_EMPTY_ENTRY) |
32253 | continue; |
32254 | |
32255 | /* Remove the pointer to the line table. */ |
32256 | remove_AT (die: ctnode->root_die, attr_kind: DW_AT_stmt_list); |
32257 | |
32258 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
32259 | reset_dies (die: ctnode->root_die); |
32260 | |
32261 | *slot = ctnode; |
32262 | } |
32263 | |
32264 | /* Reset die CU symbol so we don't output it twice. */ |
32265 | comp_unit_die ()->die_id.die_symbol = NULL; |
32266 | |
32267 | /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */ |
32268 | remove_AT (die: comp_unit_die (), attr_kind: DW_AT_stmt_list); |
32269 | if (have_macinfo) |
32270 | remove_AT (die: comp_unit_die (), DEBUG_MACRO_ATTRIBUTE); |
32271 | |
32272 | /* Remove indirect string decisions. */ |
32273 | debug_str_hash->traverse<void *, reset_indirect_string> (NULL); |
32274 | if (debug_line_str_hash) |
32275 | { |
32276 | debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL); |
32277 | debug_line_str_hash = NULL; |
32278 | if (asm_outputs_debug_line_str ()) |
32279 | { |
32280 | adjust_name_comp_dir (die: comp_unit_die ()); |
32281 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
32282 | adjust_name_comp_dir (die: node->die); |
32283 | } |
32284 | } |
32285 | } |
32286 | |
32287 | #if ENABLE_ASSERT_CHECKING |
32288 | { |
32289 | dw_die_ref die = comp_unit_die (), c; |
32290 | FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark)); |
32291 | } |
32292 | #endif |
32293 | base_types.truncate (size: 0); |
32294 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
32295 | resolve_addr (die: ctnode->root_die); |
32296 | resolve_addr (die: comp_unit_die ()); |
32297 | move_marked_base_types (); |
32298 | |
32299 | if (dump_file) |
32300 | { |
32301 | fprintf (stream: dump_file, format: "DWARF for %s\n" , filename); |
32302 | print_die (die: comp_unit_die (), outfile: dump_file); |
32303 | } |
32304 | |
32305 | /* Initialize sections and labels used for actual assembler output. */ |
32306 | unsigned generation = init_sections_and_labels (early_lto_debug: false); |
32307 | |
32308 | /* Traverse the DIE's and add sibling attributes to those DIE's that |
32309 | have children. */ |
32310 | add_sibling_attributes (die: comp_unit_die ()); |
32311 | limbo_die_node *node; |
32312 | for (node = cu_die_list; node; node = node->next) |
32313 | add_sibling_attributes (die: node->die); |
32314 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
32315 | add_sibling_attributes (die: ctnode->root_die); |
32316 | |
32317 | /* When splitting DWARF info, we put some attributes in the |
32318 | skeleton compile_unit DIE that remains in the .o, while |
32319 | most attributes go in the DWO compile_unit_die. */ |
32320 | if (dwarf_split_debug_info) |
32321 | { |
32322 | limbo_die_node *cu; |
32323 | main_comp_unit_die = gen_compile_unit_die (NULL); |
32324 | if (dwarf_version >= 5) |
32325 | main_comp_unit_die->die_tag = DW_TAG_skeleton_unit; |
32326 | cu = limbo_die_list; |
32327 | gcc_assert (cu->die == main_comp_unit_die); |
32328 | limbo_die_list = limbo_die_list->next; |
32329 | cu->next = cu_die_list; |
32330 | cu_die_list = cu; |
32331 | } |
32332 | else |
32333 | main_comp_unit_die = comp_unit_die (); |
32334 | |
32335 | /* Output a terminator label for the .text section. */ |
32336 | switch_to_section (text_section); |
32337 | targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0); |
32338 | if (cold_text_section) |
32339 | { |
32340 | switch_to_section (cold_text_section); |
32341 | targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0); |
32342 | } |
32343 | |
32344 | /* We can only use the low/high_pc attributes if all of the code was |
32345 | in .text. */ |
32346 | if ((!have_multiple_function_sections |
32347 | && vec_safe_length (v: switch_text_ranges) < 2) |
32348 | || (dwarf_version < 3 && dwarf_strict)) |
32349 | { |
32350 | const char *end_label = text_end_label; |
32351 | if (vec_safe_length (v: switch_text_ranges) == 1) |
32352 | end_label = (*switch_text_ranges)[0]; |
32353 | /* Don't add if the CU has no associated code. */ |
32354 | if (switch_text_ranges) |
32355 | add_AT_low_high_pc (die: main_comp_unit_die, lbl_low: text_section_label, |
32356 | lbl_high: end_label, force_direct: true); |
32357 | } |
32358 | else |
32359 | { |
32360 | unsigned fde_idx; |
32361 | dw_fde_ref fde; |
32362 | bool range_list_added = false; |
32363 | if (switch_text_ranges) |
32364 | { |
32365 | const char *prev_loc = text_section_label; |
32366 | const char *loc; |
32367 | unsigned idx; |
32368 | |
32369 | FOR_EACH_VEC_ELT (*switch_text_ranges, idx, loc) |
32370 | if (prev_loc) |
32371 | { |
32372 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
32373 | end: loc, added: &range_list_added, force_direct: true); |
32374 | prev_loc = NULL; |
32375 | } |
32376 | else |
32377 | prev_loc = loc; |
32378 | |
32379 | if (prev_loc) |
32380 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
32381 | end: text_end_label, added: &range_list_added, force_direct: true); |
32382 | } |
32383 | |
32384 | if (switch_cold_ranges) |
32385 | { |
32386 | const char *prev_loc = cold_text_section_label; |
32387 | const char *loc; |
32388 | unsigned idx; |
32389 | |
32390 | FOR_EACH_VEC_ELT (*switch_cold_ranges, idx, loc) |
32391 | if (prev_loc) |
32392 | { |
32393 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
32394 | end: loc, added: &range_list_added, force_direct: true); |
32395 | prev_loc = NULL; |
32396 | } |
32397 | else |
32398 | prev_loc = loc; |
32399 | |
32400 | if (prev_loc) |
32401 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
32402 | end: cold_end_label, added: &range_list_added, force_direct: true); |
32403 | } |
32404 | |
32405 | FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde) |
32406 | { |
32407 | if (fde->ignored_debug) |
32408 | continue; |
32409 | if (!fde->in_std_section) |
32410 | add_ranges_by_labels (die: main_comp_unit_die, begin: fde->dw_fde_begin, |
32411 | end: fde->dw_fde_end, added: &range_list_added, |
32412 | force_direct: true); |
32413 | if (fde->dw_fde_second_begin && !fde->second_in_std_section) |
32414 | add_ranges_by_labels (die: main_comp_unit_die, begin: fde->dw_fde_second_begin, |
32415 | end: fde->dw_fde_second_end, added: &range_list_added, |
32416 | force_direct: true); |
32417 | } |
32418 | |
32419 | if (range_list_added) |
32420 | { |
32421 | /* We need to give .debug_loc and .debug_ranges an appropriate |
32422 | "base address". Use zero so that these addresses become |
32423 | absolute. Historically, we've emitted the unexpected |
32424 | DW_AT_entry_pc instead of DW_AT_low_pc for this purpose. |
32425 | Emit both to give time for other tools to adapt. */ |
32426 | add_AT_addr (die: main_comp_unit_die, attr_kind: DW_AT_low_pc, const0_rtx, force_direct: true); |
32427 | if (! dwarf_strict && dwarf_version < 4) |
32428 | add_AT_addr (die: main_comp_unit_die, attr_kind: DW_AT_entry_pc, const0_rtx, force_direct: true); |
32429 | |
32430 | add_ranges (NULL); |
32431 | have_multiple_function_sections = true; |
32432 | } |
32433 | } |
32434 | |
32435 | /* AIX Assembler inserts the length, so adjust the reference to match the |
32436 | offset expected by debuggers. */ |
32437 | strcpy (dest: dl_section_ref, src: debug_line_section_label); |
32438 | if (XCOFF_DEBUGGING_INFO) |
32439 | strcat (dest: dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR); |
32440 | |
32441 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
32442 | add_AT_lineptr (die: main_comp_unit_die, attr_kind: DW_AT_stmt_list, |
32443 | label: dl_section_ref); |
32444 | |
32445 | if (have_macinfo) |
32446 | add_AT_macptr (die: comp_unit_die (), DEBUG_MACRO_ATTRIBUTE, |
32447 | label: macinfo_section_label); |
32448 | |
32449 | if (dwarf_split_debug_info) |
32450 | { |
32451 | if (have_location_lists) |
32452 | { |
32453 | /* Since we generate the loclists in the split DWARF .dwo |
32454 | file itself, we don't need to generate a loclists_base |
32455 | attribute for the split compile unit DIE. That attribute |
32456 | (and using relocatable sec_offset FORMs) isn't allowed |
32457 | for a split compile unit. Only if the .debug_loclists |
32458 | section was in the main file, would we need to generate a |
32459 | loclists_base attribute here (for the full or skeleton |
32460 | unit DIE). */ |
32461 | |
32462 | /* optimize_location_lists calculates the size of the lists, |
32463 | so index them first, and assign indices to the entries. |
32464 | Although optimize_location_lists will remove entries from |
32465 | the table, it only does so for duplicates, and therefore |
32466 | only reduces ref_counts to 1. */ |
32467 | index_location_lists (die: comp_unit_die ()); |
32468 | } |
32469 | |
32470 | if (dwarf_version >= 5 && !vec_safe_is_empty (v: ranges_table)) |
32471 | index_rnglists (); |
32472 | |
32473 | if (addr_index_table != NULL) |
32474 | { |
32475 | unsigned int index = 0; |
32476 | addr_index_table |
32477 | ->traverse_noresize<unsigned int *, index_addr_table_entry> |
32478 | (argument: &index); |
32479 | } |
32480 | } |
32481 | |
32482 | loc_list_idx = 0; |
32483 | if (have_location_lists) |
32484 | { |
32485 | optimize_location_lists (die: comp_unit_die ()); |
32486 | /* And finally assign indexes to the entries for -gsplit-dwarf. */ |
32487 | if (dwarf_version >= 5 && dwarf_split_debug_info) |
32488 | assign_location_list_indexes (die: comp_unit_die ()); |
32489 | } |
32490 | |
32491 | save_macinfo_strings (); |
32492 | |
32493 | if (dwarf_split_debug_info) |
32494 | { |
32495 | unsigned int index = 0; |
32496 | |
32497 | /* Add attributes common to skeleton compile_units and |
32498 | type_units. Because these attributes include strings, it |
32499 | must be done before freezing the string table. Top-level |
32500 | skeleton die attrs are added when the skeleton type unit is |
32501 | created, so ensure it is created by this point. */ |
32502 | add_top_level_skeleton_die_attrs (die: main_comp_unit_die); |
32503 | debug_str_hash->traverse_noresize<unsigned int *, index_string> (argument: &index); |
32504 | } |
32505 | |
32506 | /* Output all of the compilation units. We put the main one last so that |
32507 | the offsets are available to output_pubnames. */ |
32508 | for (node = cu_die_list; node; node = node->next) |
32509 | output_comp_unit (die: node->die, output_if_empty: 0, NULL); |
32510 | |
32511 | hash_table<comdat_type_hasher> comdat_type_table (100); |
32512 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
32513 | { |
32514 | comdat_type_node **slot = comdat_type_table.find_slot (value: ctnode, insert: INSERT); |
32515 | |
32516 | /* Don't output duplicate types. */ |
32517 | if (*slot != HTAB_EMPTY_ENTRY) |
32518 | continue; |
32519 | |
32520 | /* Add a pointer to the line table for the main compilation unit |
32521 | so that the debugger can make sense of DW_AT_decl_file |
32522 | attributes. */ |
32523 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
32524 | add_AT_lineptr (die: ctnode->root_die, attr_kind: DW_AT_stmt_list, |
32525 | label: (!dwarf_split_debug_info |
32526 | ? dl_section_ref |
32527 | : debug_skeleton_line_section_label)); |
32528 | |
32529 | output_comdat_type_unit (node: ctnode, early_lto_debug: false); |
32530 | *slot = ctnode; |
32531 | } |
32532 | |
32533 | if (dwarf_split_debug_info) |
32534 | { |
32535 | int mark; |
32536 | struct md5_ctx ctx; |
32537 | |
32538 | /* Compute a checksum of the comp_unit to use as the dwo_id. */ |
32539 | md5_init_ctx (ctx: &ctx); |
32540 | mark = 0; |
32541 | die_checksum (die: comp_unit_die (), ctx: &ctx, mark: &mark); |
32542 | unmark_all_dies (die: comp_unit_die ()); |
32543 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
32544 | |
32545 | if (dwarf_version < 5) |
32546 | { |
32547 | /* Use the first 8 bytes of the checksum as the dwo_id, |
32548 | and add it to both comp-unit DIEs. */ |
32549 | add_AT_data8 (die: main_comp_unit_die, attr_kind: DW_AT_GNU_dwo_id, data8: checksum); |
32550 | add_AT_data8 (die: comp_unit_die (), attr_kind: DW_AT_GNU_dwo_id, data8: checksum); |
32551 | } |
32552 | |
32553 | /* Add the base offset of the ranges table to the skeleton |
32554 | comp-unit DIE. */ |
32555 | if (!vec_safe_is_empty (v: ranges_table)) |
32556 | { |
32557 | if (dwarf_version < 5) |
32558 | add_AT_lineptr (die: main_comp_unit_die, attr_kind: DW_AT_GNU_ranges_base, |
32559 | label: ranges_section_label); |
32560 | } |
32561 | |
32562 | output_addr_table (); |
32563 | } |
32564 | |
32565 | /* Output the main compilation unit if non-empty or if .debug_macinfo |
32566 | or .debug_macro will be emitted. */ |
32567 | output_comp_unit (die: comp_unit_die (), have_macinfo, |
32568 | dwarf_split_debug_info ? checksum : NULL); |
32569 | |
32570 | if (dwarf_split_debug_info && info_section_emitted) |
32571 | output_skeleton_debug_sections (comp_unit: main_comp_unit_die, dwo_id: checksum); |
32572 | |
32573 | /* Output the abbreviation table. */ |
32574 | if (vec_safe_length (v: abbrev_die_table) != 1) |
32575 | { |
32576 | switch_to_section (debug_abbrev_section); |
32577 | ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label); |
32578 | output_abbrev_section (); |
32579 | } |
32580 | |
32581 | /* Output location list section if necessary. */ |
32582 | if (have_location_lists) |
32583 | { |
32584 | char l1[MAX_ARTIFICIAL_LABEL_BYTES]; |
32585 | char l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
32586 | /* Output the location lists info. */ |
32587 | switch_to_section (debug_loc_section); |
32588 | if (dwarf_version >= 5) |
32589 | { |
32590 | ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2); |
32591 | ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3); |
32592 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
32593 | dw2_asm_output_data (4, 0xffffffff, |
32594 | "Initial length escape value indicating " |
32595 | "64-bit DWARF extension" ); |
32596 | dw2_asm_output_delta (dwarf_offset_size, l2, l1, |
32597 | "Length of Location Lists" ); |
32598 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
32599 | output_dwarf_version (); |
32600 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size" ); |
32601 | dw2_asm_output_data (1, 0, "Segment Size" ); |
32602 | dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0, |
32603 | "Offset Entry Count" ); |
32604 | } |
32605 | ASM_OUTPUT_LABEL (asm_out_file, loc_section_label); |
32606 | if (dwarf_version >= 5 && dwarf_split_debug_info) |
32607 | { |
32608 | unsigned int save_loc_list_idx = loc_list_idx; |
32609 | loc_list_idx = 0; |
32610 | output_loclists_offsets (die: comp_unit_die ()); |
32611 | gcc_assert (save_loc_list_idx == loc_list_idx); |
32612 | } |
32613 | output_location_lists (die: comp_unit_die ()); |
32614 | if (dwarf_version >= 5) |
32615 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
32616 | } |
32617 | |
32618 | output_pubtables (); |
32619 | |
32620 | /* Output the address range information if a CU (.debug_info section) |
32621 | was emitted. We output an empty table even if we had no functions |
32622 | to put in it. This because the consumer has no way to tell the |
32623 | difference between an empty table that we omitted and failure to |
32624 | generate a table that would have contained data. */ |
32625 | if (info_section_emitted) |
32626 | { |
32627 | switch_to_section (debug_aranges_section); |
32628 | output_aranges (); |
32629 | } |
32630 | |
32631 | /* Output ranges section if necessary. */ |
32632 | if (!vec_safe_is_empty (v: ranges_table)) |
32633 | { |
32634 | if (dwarf_version >= 5) |
32635 | { |
32636 | if (dwarf_split_debug_info) |
32637 | { |
32638 | /* We don't know right now whether there are any |
32639 | ranges for .debug_rnglists and any for .debug_rnglists.dwo. |
32640 | Depending on into which of those two belongs the first |
32641 | ranges_table entry, emit that section first and that |
32642 | output_rnglists call will return true if the other kind of |
32643 | ranges needs to be emitted as well. */ |
32644 | bool dwo = (*ranges_table)[0].idx != DW_RANGES_IDX_SKELETON; |
32645 | if (output_rnglists (generation, dwo)) |
32646 | output_rnglists (generation, dwo: !dwo); |
32647 | } |
32648 | else |
32649 | output_rnglists (generation, dwo: false); |
32650 | } |
32651 | else |
32652 | output_ranges (); |
32653 | } |
32654 | |
32655 | /* Have to end the macro section. */ |
32656 | if (have_macinfo) |
32657 | { |
32658 | switch_to_section (debug_macinfo_section); |
32659 | ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label); |
32660 | output_macinfo (debug_line_label: !dwarf_split_debug_info ? debug_line_section_label |
32661 | : debug_skeleton_line_section_label, early_lto_debug: false); |
32662 | dw2_asm_output_data (1, 0, "End compilation unit" ); |
32663 | } |
32664 | |
32665 | /* Output the source line correspondence table. We must do this |
32666 | even if there is no line information. Otherwise, on an empty |
32667 | translation unit, we will generate a present, but empty, |
32668 | .debug_info section. IRIX 6.5 `nm' will then complain when |
32669 | examining the file. This is done late so that any filenames |
32670 | used by the debug_info section are marked as 'used'. */ |
32671 | switch_to_section (debug_line_section); |
32672 | ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label); |
32673 | if (! output_asm_line_debug_info ()) |
32674 | output_line_info (prologue_only: false); |
32675 | |
32676 | if (dwarf_split_debug_info && info_section_emitted) |
32677 | { |
32678 | switch_to_section (debug_skeleton_line_section); |
32679 | ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label); |
32680 | output_line_info (prologue_only: true); |
32681 | } |
32682 | |
32683 | /* If we emitted any indirect strings, output the string table too. */ |
32684 | if (debug_str_hash || skeleton_debug_str_hash) |
32685 | output_indirect_strings (); |
32686 | if (debug_line_str_hash) |
32687 | { |
32688 | switch_to_section (debug_line_str_section); |
32689 | const enum dwarf_form form = DW_FORM_line_strp; |
32690 | debug_line_str_hash->traverse<enum dwarf_form, |
32691 | output_indirect_string> (argument: form); |
32692 | } |
32693 | |
32694 | /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */ |
32695 | symview_upper_bound = 0; |
32696 | if (zero_view_p) |
32697 | bitmap_clear (zero_view_p); |
32698 | } |
32699 | |
32700 | /* Returns a hash value for X (which really is a variable_value_struct). */ |
32701 | |
32702 | inline hashval_t |
32703 | variable_value_hasher::hash (variable_value_struct *x) |
32704 | { |
32705 | return (hashval_t) x->decl_id; |
32706 | } |
32707 | |
32708 | /* Return true if decl_id of variable_value_struct X is the same as |
32709 | UID of decl Y. */ |
32710 | |
32711 | inline bool |
32712 | variable_value_hasher::equal (variable_value_struct *x, tree y) |
32713 | { |
32714 | return x->decl_id == DECL_UID (y); |
32715 | } |
32716 | |
32717 | /* Helper function for resolve_variable_value, handle |
32718 | DW_OP_GNU_variable_value in one location expression. |
32719 | Return true if exprloc has been changed into loclist. */ |
32720 | |
32721 | static bool |
32722 | resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc) |
32723 | { |
32724 | dw_loc_descr_ref next; |
32725 | for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next) |
32726 | { |
32727 | next = loc->dw_loc_next; |
32728 | if (loc->dw_loc_opc != DW_OP_GNU_variable_value |
32729 | || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref) |
32730 | continue; |
32731 | |
32732 | tree decl = loc->dw_loc_oprnd1.v.val_decl_ref; |
32733 | if (DECL_CONTEXT (decl) != current_function_decl) |
32734 | continue; |
32735 | |
32736 | dw_die_ref ref = lookup_decl_die (decl); |
32737 | if (ref) |
32738 | { |
32739 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
32740 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
32741 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
32742 | continue; |
32743 | } |
32744 | dw_loc_list_ref l = loc_list_from_tree (loc: decl, want_address: 0, NULL); |
32745 | if (l == NULL) |
32746 | continue; |
32747 | if (l->dw_loc_next) |
32748 | { |
32749 | if (AT_class (a) != dw_val_class_loc) |
32750 | continue; |
32751 | switch (a->dw_attr) |
32752 | { |
32753 | /* Following attributes allow both exprloc and loclist |
32754 | classes, so we can change them into a loclist. */ |
32755 | case DW_AT_location: |
32756 | case DW_AT_string_length: |
32757 | case DW_AT_return_addr: |
32758 | case DW_AT_data_member_location: |
32759 | case DW_AT_frame_base: |
32760 | case DW_AT_segment: |
32761 | case DW_AT_static_link: |
32762 | case DW_AT_use_location: |
32763 | case DW_AT_vtable_elem_location: |
32764 | if (prev) |
32765 | { |
32766 | prev->dw_loc_next = NULL; |
32767 | prepend_loc_descr_to_each (list: l, ref: AT_loc (a)); |
32768 | } |
32769 | if (next) |
32770 | add_loc_descr_to_each (list: l, ref: next); |
32771 | a->dw_attr_val.val_class = dw_val_class_loc_list; |
32772 | a->dw_attr_val.val_entry = NULL; |
32773 | a->dw_attr_val.v.val_loc_list = l; |
32774 | have_location_lists = true; |
32775 | return true; |
32776 | /* Following attributes allow both exprloc and reference, |
32777 | so if the whole expression is DW_OP_GNU_variable_value alone |
32778 | we could transform it into reference. */ |
32779 | case DW_AT_byte_size: |
32780 | case DW_AT_bit_size: |
32781 | case DW_AT_lower_bound: |
32782 | case DW_AT_upper_bound: |
32783 | case DW_AT_bit_stride: |
32784 | case DW_AT_count: |
32785 | case DW_AT_allocated: |
32786 | case DW_AT_associated: |
32787 | case DW_AT_byte_stride: |
32788 | if (prev == NULL && next == NULL) |
32789 | break; |
32790 | /* FALLTHRU */ |
32791 | default: |
32792 | if (dwarf_strict) |
32793 | continue; |
32794 | break; |
32795 | } |
32796 | /* Create DW_TAG_variable that we can refer to. */ |
32797 | gen_decl_die (decl, NULL_TREE, NULL, |
32798 | context_die: lookup_decl_die (decl: current_function_decl)); |
32799 | ref = lookup_decl_die (decl); |
32800 | if (ref) |
32801 | { |
32802 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
32803 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
32804 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
32805 | } |
32806 | continue; |
32807 | } |
32808 | if (prev) |
32809 | { |
32810 | prev->dw_loc_next = l->expr; |
32811 | add_loc_descr (list_head: &prev->dw_loc_next, descr: next); |
32812 | free_loc_descr (loc, NULL); |
32813 | next = prev->dw_loc_next; |
32814 | } |
32815 | else |
32816 | { |
32817 | memcpy (dest: loc, src: l->expr, n: sizeof (dw_loc_descr_node)); |
32818 | add_loc_descr (list_head: &loc, descr: next); |
32819 | next = loc; |
32820 | } |
32821 | loc = prev; |
32822 | } |
32823 | return false; |
32824 | } |
32825 | |
32826 | /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */ |
32827 | |
32828 | static void |
32829 | resolve_variable_value (dw_die_ref die) |
32830 | { |
32831 | dw_attr_node *a; |
32832 | dw_loc_list_ref loc; |
32833 | unsigned ix; |
32834 | |
32835 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
32836 | switch (AT_class (a)) |
32837 | { |
32838 | case dw_val_class_loc: |
32839 | if (!resolve_variable_value_in_expr (a, loc: AT_loc (a))) |
32840 | break; |
32841 | /* FALLTHRU */ |
32842 | case dw_val_class_loc_list: |
32843 | loc = AT_loc_list (a); |
32844 | gcc_assert (loc); |
32845 | for (; loc; loc = loc->dw_loc_next) |
32846 | resolve_variable_value_in_expr (a, loc: loc->expr); |
32847 | break; |
32848 | default: |
32849 | break; |
32850 | } |
32851 | } |
32852 | |
32853 | /* Attempt to optimize DW_OP_GNU_variable_value refering to |
32854 | temporaries in the current function. */ |
32855 | |
32856 | static void |
32857 | resolve_variable_values (void) |
32858 | { |
32859 | if (!variable_value_hash || !current_function_decl) |
32860 | return; |
32861 | |
32862 | struct variable_value_struct *node |
32863 | = variable_value_hash->find_with_hash (comparable: current_function_decl, |
32864 | DECL_UID (current_function_decl)); |
32865 | |
32866 | if (node == NULL) |
32867 | return; |
32868 | |
32869 | unsigned int i; |
32870 | dw_die_ref die; |
32871 | FOR_EACH_VEC_SAFE_ELT (node->dies, i, die) |
32872 | resolve_variable_value (die); |
32873 | } |
32874 | |
32875 | /* Helper function for note_variable_value, handle one location |
32876 | expression. */ |
32877 | |
32878 | static void |
32879 | note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc) |
32880 | { |
32881 | for (; loc; loc = loc->dw_loc_next) |
32882 | if (loc->dw_loc_opc == DW_OP_GNU_variable_value |
32883 | && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
32884 | { |
32885 | tree decl = loc->dw_loc_oprnd1.v.val_decl_ref; |
32886 | dw_die_ref ref = lookup_decl_die (decl); |
32887 | if (! ref && (flag_generate_lto || flag_generate_offload)) |
32888 | { |
32889 | /* ??? This is somewhat a hack because we do not create DIEs |
32890 | for variables not in BLOCK trees early but when generating |
32891 | early LTO output we need the dw_val_class_decl_ref to be |
32892 | fully resolved. For fat LTO objects we'd also like to |
32893 | undo this after LTO dwarf output. */ |
32894 | gcc_assert (DECL_CONTEXT (decl)); |
32895 | dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl)); |
32896 | gcc_assert (ctx != NULL); |
32897 | gen_decl_die (decl, NULL_TREE, NULL, context_die: ctx); |
32898 | ref = lookup_decl_die (decl); |
32899 | gcc_assert (ref != NULL); |
32900 | } |
32901 | if (ref) |
32902 | { |
32903 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
32904 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
32905 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
32906 | continue; |
32907 | } |
32908 | if (VAR_P (decl) |
32909 | && DECL_CONTEXT (decl) |
32910 | && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL |
32911 | && lookup_decl_die (DECL_CONTEXT (decl))) |
32912 | { |
32913 | if (!variable_value_hash) |
32914 | variable_value_hash |
32915 | = hash_table<variable_value_hasher>::create_ggc (n: 10); |
32916 | |
32917 | tree fndecl = DECL_CONTEXT (decl); |
32918 | struct variable_value_struct *node; |
32919 | struct variable_value_struct **slot |
32920 | = variable_value_hash->find_slot_with_hash (comparable: fndecl, |
32921 | DECL_UID (fndecl), |
32922 | insert: INSERT); |
32923 | if (*slot == NULL) |
32924 | { |
32925 | node = ggc_cleared_alloc<variable_value_struct> (); |
32926 | node->decl_id = DECL_UID (fndecl); |
32927 | *slot = node; |
32928 | } |
32929 | else |
32930 | node = *slot; |
32931 | |
32932 | vec_safe_push (v&: node->dies, obj: die); |
32933 | } |
32934 | } |
32935 | } |
32936 | |
32937 | /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still |
32938 | with dw_val_class_decl_ref operand. */ |
32939 | |
32940 | static void |
32941 | note_variable_value (dw_die_ref die) |
32942 | { |
32943 | dw_die_ref c; |
32944 | dw_attr_node *a; |
32945 | dw_loc_list_ref loc; |
32946 | unsigned ix; |
32947 | |
32948 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
32949 | switch (AT_class (a)) |
32950 | { |
32951 | case dw_val_class_loc_list: |
32952 | loc = AT_loc_list (a); |
32953 | gcc_assert (loc); |
32954 | if (!loc->noted_variable_value) |
32955 | { |
32956 | loc->noted_variable_value = 1; |
32957 | for (; loc; loc = loc->dw_loc_next) |
32958 | note_variable_value_in_expr (die, loc: loc->expr); |
32959 | } |
32960 | break; |
32961 | case dw_val_class_loc: |
32962 | note_variable_value_in_expr (die, loc: AT_loc (a)); |
32963 | break; |
32964 | default: |
32965 | break; |
32966 | } |
32967 | |
32968 | /* Mark children. */ |
32969 | FOR_EACH_CHILD (die, c, note_variable_value (c)); |
32970 | } |
32971 | |
32972 | /* Process DWARF dies for CTF generation. */ |
32973 | |
32974 | static void |
32975 | ctf_debug_do_cu (dw_die_ref die) |
32976 | { |
32977 | dw_die_ref c; |
32978 | |
32979 | if (!ctf_do_die (die)) |
32980 | return; |
32981 | |
32982 | FOR_EACH_CHILD (die, c, ctf_do_die (c)); |
32983 | } |
32984 | |
32985 | /* Perform any cleanups needed after the early debug generation pass |
32986 | has run. */ |
32987 | |
32988 | static void |
32989 | dwarf2out_early_finish (const char *filename) |
32990 | { |
32991 | comdat_type_node *ctnode; |
32992 | set_early_dwarf s; |
32993 | char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES]; |
32994 | |
32995 | /* PCH might result in DW_AT_producer string being restored from the |
32996 | header compilation, so always fill it with empty string initially |
32997 | and overwrite only here. */ |
32998 | dw_attr_node *producer = get_AT (die: comp_unit_die (), attr_kind: DW_AT_producer); |
32999 | |
33000 | if (dwarf_record_gcc_switches) |
33001 | producer_string = gen_producer_string (language_string: lang_hooks.name, |
33002 | options: save_decoded_options, |
33003 | options_count: save_decoded_options_count); |
33004 | else |
33005 | producer_string = concat (lang_hooks.name, " " , version_string, NULL); |
33006 | |
33007 | producer->dw_attr_val.v.val_str->refcount--; |
33008 | producer->dw_attr_val.v.val_str = find_AT_string (str: producer_string); |
33009 | |
33010 | /* Add the name for the main input file now. We delayed this from |
33011 | dwarf2out_init to avoid complications with PCH. */ |
33012 | add_filename_attribute (die: comp_unit_die (), name_string: remap_debug_filename (filename)); |
33013 | add_comp_dir_attribute (die: comp_unit_die ()); |
33014 | |
33015 | /* With LTO early dwarf was really finished at compile-time, so make |
33016 | sure to adjust the phase after annotating the LTRANS CU DIE. */ |
33017 | if (in_lto_p) |
33018 | { |
33019 | early_dwarf_finished = true; |
33020 | if (dump_file) |
33021 | { |
33022 | fprintf (stream: dump_file, format: "LTO EARLY DWARF for %s\n" , filename); |
33023 | print_die (die: comp_unit_die (), outfile: dump_file); |
33024 | } |
33025 | return; |
33026 | } |
33027 | |
33028 | /* Walk through the list of incomplete types again, trying once more to |
33029 | emit full debugging info for them. */ |
33030 | retry_incomplete_types (); |
33031 | |
33032 | gen_scheduled_generic_parms_dies (); |
33033 | gen_remaining_tmpl_value_param_die_attribute (); |
33034 | |
33035 | /* The point here is to flush out the limbo list so that it is empty |
33036 | and we don't need to stream it for LTO. */ |
33037 | flush_limbo_die_list (); |
33038 | |
33039 | /* Add DW_AT_linkage_name for all deferred DIEs. */ |
33040 | for (limbo_die_node *node = deferred_asm_name; node; node = node->next) |
33041 | { |
33042 | tree decl = node->created_for; |
33043 | if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl) |
33044 | /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that |
33045 | ended up in deferred_asm_name before we knew it was |
33046 | constant and never written to disk. */ |
33047 | && DECL_ASSEMBLER_NAME (decl)) |
33048 | { |
33049 | add_linkage_attr (die: node->die, decl); |
33050 | move_linkage_attr (die: node->die); |
33051 | } |
33052 | } |
33053 | deferred_asm_name = NULL; |
33054 | |
33055 | if (flag_eliminate_unused_debug_types) |
33056 | prune_unused_types (); |
33057 | |
33058 | /* Generate separate COMDAT sections for type DIEs. */ |
33059 | if (use_debug_types) |
33060 | { |
33061 | break_out_comdat_types (die: comp_unit_die ()); |
33062 | |
33063 | /* Each new type_unit DIE was added to the limbo die list when created. |
33064 | Since these have all been added to comdat_type_list, clear the |
33065 | limbo die list. */ |
33066 | limbo_die_list = NULL; |
33067 | |
33068 | /* For each new comdat type unit, copy declarations for incomplete |
33069 | types to make the new unit self-contained (i.e., no direct |
33070 | references to the main compile unit). */ |
33071 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
33072 | copy_decls_for_unworthy_types (unit: ctnode->root_die); |
33073 | copy_decls_for_unworthy_types (unit: comp_unit_die ()); |
33074 | |
33075 | /* In the process of copying declarations from one unit to another, |
33076 | we may have left some declarations behind that are no longer |
33077 | referenced. Prune them. */ |
33078 | prune_unused_types (); |
33079 | } |
33080 | |
33081 | /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still |
33082 | with dw_val_class_decl_ref operand. */ |
33083 | note_variable_value (die: comp_unit_die ()); |
33084 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
33085 | note_variable_value (die: node->die); |
33086 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
33087 | note_variable_value (die: ctnode->root_die); |
33088 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
33089 | note_variable_value (die: node->die); |
33090 | |
33091 | /* The AT_pubnames attribute needs to go in all skeleton dies, including |
33092 | both the main_cu and all skeleton TUs. Making this call unconditional |
33093 | would end up either adding a second copy of the AT_pubnames attribute, or |
33094 | requiring a special case in add_top_level_skeleton_die_attrs. */ |
33095 | if (!dwarf_split_debug_info) |
33096 | add_AT_pubnames (die: comp_unit_die ()); |
33097 | |
33098 | /* The early debug phase is now finished. */ |
33099 | early_dwarf_finished = true; |
33100 | if (dump_file) |
33101 | { |
33102 | fprintf (stream: dump_file, format: "EARLY DWARF for %s\n" , filename); |
33103 | print_die (die: comp_unit_die (), outfile: dump_file); |
33104 | } |
33105 | |
33106 | /* Generate CTF/BTF debug info. */ |
33107 | if ((ctf_debug_info_level > CTFINFO_LEVEL_NONE |
33108 | || btf_debuginfo_p ()) && lang_GNU_C ()) |
33109 | { |
33110 | ctf_debug_init (); |
33111 | ctf_debug_do_cu (die: comp_unit_die ()); |
33112 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
33113 | ctf_debug_do_cu (die: node->die); |
33114 | /* Post process the debug data in the CTF container if necessary. */ |
33115 | ctf_debug_init_postprocess (btf_debuginfo_p ()); |
33116 | |
33117 | ctf_debug_early_finish (filename); |
33118 | } |
33119 | |
33120 | /* Do not generate DWARF assembler now when not producing LTO bytecode. */ |
33121 | if ((!flag_generate_lto && !flag_generate_offload) |
33122 | /* FIXME: Disable debug info generation for (PE-)COFF targets since the |
33123 | copy_lto_debug_sections operation of the simple object support in |
33124 | libiberty is not implemented for them yet. */ |
33125 | || TARGET_PECOFF || TARGET_COFF) |
33126 | return; |
33127 | |
33128 | /* Now as we are going to output for LTO initialize sections and labels |
33129 | to the LTO variants. We don't need a random-seed postfix as other |
33130 | LTO sections as linking the LTO debug sections into one in a partial |
33131 | link is fine. */ |
33132 | init_sections_and_labels (early_lto_debug: true); |
33133 | |
33134 | /* The output below is modeled after dwarf2out_finish with all |
33135 | location related output removed and some LTO specific changes. |
33136 | Some refactoring might make both smaller and easier to match up. */ |
33137 | |
33138 | base_types.truncate (size: 0); |
33139 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
33140 | mark_base_types (die: ctnode->root_die); |
33141 | mark_base_types (die: comp_unit_die ()); |
33142 | move_marked_base_types (); |
33143 | |
33144 | /* Traverse the DIE's and add sibling attributes to those DIE's |
33145 | that have children. */ |
33146 | add_sibling_attributes (die: comp_unit_die ()); |
33147 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
33148 | add_sibling_attributes (die: node->die); |
33149 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
33150 | add_sibling_attributes (die: ctnode->root_die); |
33151 | |
33152 | /* AIX Assembler inserts the length, so adjust the reference to match the |
33153 | offset expected by debuggers. */ |
33154 | strcpy (dest: dl_section_ref, src: debug_line_section_label); |
33155 | if (XCOFF_DEBUGGING_INFO) |
33156 | strcat (dest: dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR); |
33157 | |
33158 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
33159 | add_AT_lineptr (die: comp_unit_die (), attr_kind: DW_AT_stmt_list, label: dl_section_ref); |
33160 | |
33161 | if (have_macinfo) |
33162 | add_AT_macptr (die: comp_unit_die (), DEBUG_MACRO_ATTRIBUTE, |
33163 | label: macinfo_section_label); |
33164 | |
33165 | save_macinfo_strings (); |
33166 | |
33167 | if (dwarf_split_debug_info) |
33168 | { |
33169 | unsigned int index = 0; |
33170 | debug_str_hash->traverse_noresize<unsigned int *, index_string> (argument: &index); |
33171 | } |
33172 | |
33173 | /* Output all of the compilation units. We put the main one last so that |
33174 | the offsets are available to output_pubnames. */ |
33175 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
33176 | output_comp_unit (die: node->die, output_if_empty: 0, NULL); |
33177 | |
33178 | hash_table<comdat_type_hasher> comdat_type_table (100); |
33179 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
33180 | { |
33181 | comdat_type_node **slot = comdat_type_table.find_slot (value: ctnode, insert: INSERT); |
33182 | |
33183 | /* Don't output duplicate types. */ |
33184 | if (*slot != HTAB_EMPTY_ENTRY) |
33185 | continue; |
33186 | |
33187 | /* Add a pointer to the line table for the main compilation unit |
33188 | so that the debugger can make sense of DW_AT_decl_file |
33189 | attributes. */ |
33190 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
33191 | add_AT_lineptr (die: ctnode->root_die, attr_kind: DW_AT_stmt_list, |
33192 | label: (!dwarf_split_debug_info |
33193 | ? debug_line_section_label |
33194 | : debug_skeleton_line_section_label)); |
33195 | |
33196 | output_comdat_type_unit (node: ctnode, early_lto_debug: true); |
33197 | *slot = ctnode; |
33198 | } |
33199 | |
33200 | /* Stick a unique symbol to the main debuginfo section. */ |
33201 | compute_comp_unit_symbol (unit_die: comp_unit_die ()); |
33202 | |
33203 | /* Output the main compilation unit. We always need it if only for |
33204 | the CU symbol. */ |
33205 | output_comp_unit (die: comp_unit_die (), output_if_empty: true, NULL); |
33206 | |
33207 | /* Output the abbreviation table. */ |
33208 | if (vec_safe_length (v: abbrev_die_table) != 1) |
33209 | { |
33210 | switch_to_section (debug_abbrev_section); |
33211 | ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label); |
33212 | output_abbrev_section (); |
33213 | } |
33214 | |
33215 | /* Have to end the macro section. */ |
33216 | if (have_macinfo) |
33217 | { |
33218 | /* We have to save macinfo state if we need to output it again |
33219 | for the FAT part of the object. */ |
33220 | vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table; |
33221 | if (flag_fat_lto_objects) |
33222 | macinfo_table = macinfo_table->copy (); |
33223 | |
33224 | switch_to_section (debug_macinfo_section); |
33225 | ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label); |
33226 | output_macinfo (debug_line_label: debug_line_section_label, early_lto_debug: true); |
33227 | dw2_asm_output_data (1, 0, "End compilation unit" ); |
33228 | |
33229 | if (flag_fat_lto_objects) |
33230 | { |
33231 | vec_free (v&: macinfo_table); |
33232 | macinfo_table = saved_macinfo_table; |
33233 | } |
33234 | } |
33235 | |
33236 | /* Emit a skeleton debug_line section. */ |
33237 | switch_to_section (debug_line_section); |
33238 | ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label); |
33239 | output_line_info (prologue_only: true); |
33240 | |
33241 | /* If we emitted any indirect strings, output the string table too. */ |
33242 | if (debug_str_hash || skeleton_debug_str_hash) |
33243 | output_indirect_strings (); |
33244 | if (debug_line_str_hash) |
33245 | { |
33246 | switch_to_section (debug_line_str_section); |
33247 | const enum dwarf_form form = DW_FORM_line_strp; |
33248 | debug_line_str_hash->traverse<enum dwarf_form, |
33249 | output_indirect_string> (argument: form); |
33250 | } |
33251 | |
33252 | /* Switch back to the text section. */ |
33253 | switch_to_section (text_section); |
33254 | } |
33255 | |
33256 | /* Reset all state within dwarf2out.cc so that we can rerun the compiler |
33257 | within the same process. For use by toplev::finalize. */ |
33258 | |
33259 | void |
33260 | dwarf2out_cc_finalize (void) |
33261 | { |
33262 | last_var_location_insn = NULL; |
33263 | cached_next_real_insn = NULL; |
33264 | used_rtx_array = NULL; |
33265 | incomplete_types = NULL; |
33266 | debug_info_section = NULL; |
33267 | debug_skeleton_info_section = NULL; |
33268 | debug_abbrev_section = NULL; |
33269 | debug_skeleton_abbrev_section = NULL; |
33270 | debug_aranges_section = NULL; |
33271 | debug_addr_section = NULL; |
33272 | debug_macinfo_section = NULL; |
33273 | debug_line_section = NULL; |
33274 | debug_skeleton_line_section = NULL; |
33275 | debug_loc_section = NULL; |
33276 | debug_pubnames_section = NULL; |
33277 | debug_pubtypes_section = NULL; |
33278 | debug_str_section = NULL; |
33279 | debug_line_str_section = NULL; |
33280 | debug_str_dwo_section = NULL; |
33281 | debug_str_offsets_section = NULL; |
33282 | debug_ranges_section = NULL; |
33283 | debug_ranges_dwo_section = NULL; |
33284 | debug_frame_section = NULL; |
33285 | fde_vec = NULL; |
33286 | debug_str_hash = NULL; |
33287 | debug_line_str_hash = NULL; |
33288 | skeleton_debug_str_hash = NULL; |
33289 | dw2_string_counter = 0; |
33290 | have_multiple_function_sections = false; |
33291 | in_text_section_p = false; |
33292 | cold_text_section = NULL; |
33293 | last_text_label = NULL; |
33294 | last_cold_label = NULL; |
33295 | switch_text_ranges = NULL; |
33296 | switch_cold_ranges = NULL; |
33297 | current_unit_personality = NULL; |
33298 | |
33299 | early_dwarf = false; |
33300 | early_dwarf_finished = false; |
33301 | |
33302 | next_die_offset = 0; |
33303 | single_comp_unit_die = NULL; |
33304 | comdat_type_list = NULL; |
33305 | limbo_die_list = NULL; |
33306 | file_table = NULL; |
33307 | decl_die_table = NULL; |
33308 | common_block_die_table = NULL; |
33309 | decl_loc_table = NULL; |
33310 | call_arg_locations = NULL; |
33311 | call_arg_loc_last = NULL; |
33312 | call_site_count = -1; |
33313 | tail_call_site_count = -1; |
33314 | cached_dw_loc_list_table = NULL; |
33315 | abbrev_die_table = NULL; |
33316 | delete dwarf_proc_stack_usage_map; |
33317 | dwarf_proc_stack_usage_map = NULL; |
33318 | line_info_label_num = 0; |
33319 | cur_line_info_table = NULL; |
33320 | text_section_line_info = NULL; |
33321 | cold_text_section_line_info = NULL; |
33322 | separate_line_info = NULL; |
33323 | info_section_emitted = false; |
33324 | pubname_table = NULL; |
33325 | pubtype_table = NULL; |
33326 | macinfo_table = NULL; |
33327 | ranges_table = NULL; |
33328 | ranges_by_label = NULL; |
33329 | rnglist_idx = 0; |
33330 | have_location_lists = false; |
33331 | loclabel_num = 0; |
33332 | poc_label_num = 0; |
33333 | last_emitted_file = NULL; |
33334 | label_num = 0; |
33335 | tmpl_value_parm_die_table = NULL; |
33336 | generic_type_instances = NULL; |
33337 | frame_pointer_fb_offset = 0; |
33338 | frame_pointer_fb_offset_valid = false; |
33339 | base_types.release (); |
33340 | XDELETEVEC (producer_string); |
33341 | producer_string = NULL; |
33342 | output_line_info_generation = 0; |
33343 | init_sections_and_labels_generation = 0; |
33344 | } |
33345 | |
33346 | #include "gt-dwarf2out.h" |
33347 | |