| 1 | /* Output Dwarf2 format symbol table information from GCC. |
| 2 | Copyright (C) 1992-2026 Free Software Foundation, Inc. |
| 3 | Contributed by Gary Funck (gary@intrepid.com). |
| 4 | Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com). |
| 5 | Extensively modified by Jason Merrill (jason@cygnus.com). |
| 6 | |
| 7 | This file is part of GCC. |
| 8 | |
| 9 | GCC is free software; you can redistribute it and/or modify it under |
| 10 | the terms of the GNU General Public License as published by the Free |
| 11 | Software Foundation; either version 3, or (at your option) any later |
| 12 | version. |
| 13 | |
| 14 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| 15 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| 17 | for more details. |
| 18 | |
| 19 | You should have received a copy of the GNU General Public License |
| 20 | along with GCC; see the file COPYING3. If not see |
| 21 | <http://www.gnu.org/licenses/>. */ |
| 22 | |
| 23 | /* TODO: Emit .debug_line header even when there are no functions, since |
| 24 | the file numbers are used by .debug_info. Alternately, leave |
| 25 | out locations for types and decls. |
| 26 | Avoid talking about ctors and op= for PODs. |
| 27 | Factor out common prologue sequences into multiple CIEs. */ |
| 28 | |
| 29 | /* The first part of this file deals with the DWARF 2 frame unwind |
| 30 | information, which is also used by the GCC efficient exception handling |
| 31 | mechanism. The second part, controlled only by an #ifdef |
| 32 | DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging |
| 33 | information. */ |
| 34 | |
| 35 | /* DWARF2 Abbreviation Glossary: |
| 36 | |
| 37 | CFA = Canonical Frame Address |
| 38 | a fixed address on the stack which identifies a call frame. |
| 39 | We define it to be the value of SP just before the call insn. |
| 40 | The CFA register and offset, which may change during the course |
| 41 | of the function, are used to calculate its value at runtime. |
| 42 | |
| 43 | CFI = Call Frame Instruction |
| 44 | an instruction for the DWARF2 abstract machine |
| 45 | |
| 46 | CIE = Common Information Entry |
| 47 | information describing information common to one or more FDEs |
| 48 | |
| 49 | DIE = Debugging Information Entry |
| 50 | |
| 51 | FDE = Frame Description Entry |
| 52 | information describing the stack call frame, in particular, |
| 53 | how to restore registers |
| 54 | |
| 55 | DW_CFA_... = DWARF2 CFA call frame instruction |
| 56 | DW_TAG_... = DWARF2 DIE tag */ |
| 57 | |
| 58 | #include "config.h" |
| 59 | #include "system.h" |
| 60 | #include "coretypes.h" |
| 61 | #include "target.h" |
| 62 | #include "function.h" |
| 63 | #include "rtl.h" |
| 64 | #include "tree.h" |
| 65 | #include "memmodel.h" |
| 66 | #include "tm_p.h" |
| 67 | #include "stringpool.h" |
| 68 | #include "insn-config.h" |
| 69 | #include "ira.h" |
| 70 | #include "cgraph.h" |
| 71 | #include "diagnostic.h" |
| 72 | #include "fold-const.h" |
| 73 | #include "stor-layout.h" |
| 74 | #include "varasm.h" |
| 75 | #include "version.h" |
| 76 | #include "flags.h" |
| 77 | #include "rtlhash.h" |
| 78 | #include "reload.h" |
| 79 | #include "output.h" |
| 80 | #include "expr.h" |
| 81 | #include "dwarf2out.h" |
| 82 | #include "dwarf2ctf.h" |
| 83 | #include "dwarf2codeview.h" |
| 84 | #include "dwarf2asm.h" |
| 85 | #include "toplev.h" |
| 86 | #include "md5.h" |
| 87 | #include "tree-pretty-print.h" |
| 88 | #include "print-rtl.h" |
| 89 | #include "debug.h" |
| 90 | #include "common/common-target.h" |
| 91 | #include "langhooks.h" |
| 92 | #include "lra.h" |
| 93 | #include "dumpfile.h" |
| 94 | #include "opts.h" |
| 95 | #include "tree-dfa.h" |
| 96 | #include "gdb/gdb-index.h" |
| 97 | #include "rtl-iter.h" |
| 98 | #include "stringpool.h" |
| 99 | #include "attribs.h" |
| 100 | #include "file-prefix-map.h" /* remap_debug_filename() */ |
| 101 | |
| 102 | static void dwarf2out_source_line (unsigned int, unsigned int, const char *, |
| 103 | int, bool); |
| 104 | static rtx_insn *last_var_location_insn; |
| 105 | static rtx_insn *cached_next_real_insn; |
| 106 | static void dwarf2out_decl (tree); |
| 107 | static bool is_redundant_typedef (const_tree); |
| 108 | |
| 109 | #ifndef XCOFF_DEBUGGING_INFO |
| 110 | #define XCOFF_DEBUGGING_INFO 0 |
| 111 | #endif |
| 112 | |
| 113 | #ifndef HAVE_XCOFF_DWARF_EXTRAS |
| 114 | #define 0 |
| 115 | #endif |
| 116 | |
| 117 | #ifdef VMS_DEBUGGING_INFO |
| 118 | int vms_file_stats_name (const char *, long long *, long *, char *, int *); |
| 119 | |
| 120 | /* Define this macro to be a nonzero value if the directory specifications |
| 121 | which are output in the debug info should end with a separator. */ |
| 122 | #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1 |
| 123 | /* Define this macro to evaluate to a nonzero value if GCC should refrain |
| 124 | from generating indirect strings in DWARF2 debug information, for instance |
| 125 | if your target is stuck with an old version of GDB that is unable to |
| 126 | process them properly or uses VMS Debug. */ |
| 127 | #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1 |
| 128 | #else |
| 129 | #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0 |
| 130 | #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0 |
| 131 | #endif |
| 132 | |
| 133 | /* ??? Poison these here until it can be done generically. They've been |
| 134 | totally replaced in this file; make sure it stays that way. */ |
| 135 | #undef DWARF2_UNWIND_INFO |
| 136 | #undef DWARF2_FRAME_INFO |
| 137 | #if (GCC_VERSION >= 3000) |
| 138 | #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO |
| 139 | #endif |
| 140 | |
| 141 | /* The size of the target's pointer type. */ |
| 142 | #ifndef PTR_SIZE |
| 143 | #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT) |
| 144 | #endif |
| 145 | |
| 146 | /* Array of RTXes referenced by the debugging information, which therefore |
| 147 | must be kept around forever. */ |
| 148 | static GTY(()) vec<rtx, va_gc> *used_rtx_array; |
| 149 | |
| 150 | /* A pointer to the base of a list of incomplete types which might be |
| 151 | completed at some later time. incomplete_types_list needs to be a |
| 152 | vec<tree, va_gc> *because we want to tell the garbage collector about |
| 153 | it. */ |
| 154 | static GTY(()) vec<tree, va_gc> *incomplete_types; |
| 155 | |
| 156 | /* Pointers to various DWARF2 sections. */ |
| 157 | static GTY(()) section *debug_info_section; |
| 158 | static GTY(()) section *debug_skeleton_info_section; |
| 159 | static GTY(()) section *debug_abbrev_section; |
| 160 | static GTY(()) section *debug_skeleton_abbrev_section; |
| 161 | static GTY(()) section *debug_aranges_section; |
| 162 | static GTY(()) section *debug_addr_section; |
| 163 | static GTY(()) section *debug_macinfo_section; |
| 164 | static const char *debug_macinfo_section_name; |
| 165 | static unsigned macinfo_label_base = 1; |
| 166 | static GTY(()) section *debug_line_section; |
| 167 | static GTY(()) section *debug_skeleton_line_section; |
| 168 | static GTY(()) section *debug_loc_section; |
| 169 | static GTY(()) section *debug_pubnames_section; |
| 170 | static GTY(()) section *debug_pubtypes_section; |
| 171 | static GTY(()) section *debug_str_section; |
| 172 | static GTY(()) section *debug_line_str_section; |
| 173 | static GTY(()) section *debug_str_dwo_section; |
| 174 | static GTY(()) section *debug_str_offsets_section; |
| 175 | static GTY(()) section *debug_ranges_section; |
| 176 | static GTY(()) section *debug_ranges_dwo_section; |
| 177 | static GTY(()) section *debug_frame_section; |
| 178 | |
| 179 | /* Maximum size (in bytes) of an artificially generated label. */ |
| 180 | #define MAX_ARTIFICIAL_LABEL_BYTES 40 |
| 181 | |
| 182 | /* According to the (draft) DWARF 3 specification, the initial length |
| 183 | should either be 4 or 12 bytes. When it's 12 bytes, the first 4 |
| 184 | bytes are 0xffffffff, followed by the length stored in the next 8 |
| 185 | bytes. |
| 186 | |
| 187 | However, the SGI/MIPS ABI uses an initial length which is equal to |
| 188 | dwarf_offset_size. It is defined (elsewhere) accordingly. */ |
| 189 | |
| 190 | #ifndef DWARF_INITIAL_LENGTH_SIZE |
| 191 | #define DWARF_INITIAL_LENGTH_SIZE (dwarf_offset_size == 4 ? 4 : 12) |
| 192 | #endif |
| 193 | |
| 194 | #ifndef DWARF_INITIAL_LENGTH_SIZE_STR |
| 195 | #define DWARF_INITIAL_LENGTH_SIZE_STR (dwarf_offset_size == 4 ? "-4" : "-12") |
| 196 | #endif |
| 197 | |
| 198 | /* Round SIZE up to the nearest BOUNDARY. */ |
| 199 | #define DWARF_ROUND(SIZE,BOUNDARY) \ |
| 200 | ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY)) |
| 201 | |
| 202 | /* CIE identifier. */ |
| 203 | #if HOST_BITS_PER_WIDE_INT >= 64 |
| 204 | #define DWARF_CIE_ID \ |
| 205 | (unsigned HOST_WIDE_INT) (dwarf_offset_size == 4 ? DW_CIE_ID : DW64_CIE_ID) |
| 206 | #else |
| 207 | #define DWARF_CIE_ID DW_CIE_ID |
| 208 | #endif |
| 209 | |
| 210 | |
| 211 | /* A vector for a table that contains frame description |
| 212 | information for each routine. */ |
| 213 | #define NOT_INDEXED (-1U) |
| 214 | #define NO_INDEX_ASSIGNED (-2U) |
| 215 | |
| 216 | static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec; |
| 217 | |
| 218 | struct GTY((for_user)) indirect_string_node { |
| 219 | const char *str; |
| 220 | unsigned int refcount; |
| 221 | enum dwarf_form form; |
| 222 | char *label; |
| 223 | unsigned int index; |
| 224 | }; |
| 225 | |
| 226 | struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node> |
| 227 | { |
| 228 | typedef const char *compare_type; |
| 229 | |
| 230 | static hashval_t hash (indirect_string_node *); |
| 231 | static bool equal (indirect_string_node *, const char *); |
| 232 | }; |
| 233 | |
| 234 | static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash; |
| 235 | |
| 236 | static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash; |
| 237 | |
| 238 | /* With split_debug_info, both the comp_dir and dwo_name go in the |
| 239 | main object file, rather than the dwo, similar to the force_direct |
| 240 | parameter elsewhere but with additional complications: |
| 241 | |
| 242 | 1) The string is needed in both the main object file and the dwo. |
| 243 | That is, the comp_dir and dwo_name will appear in both places. |
| 244 | |
| 245 | 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp, |
| 246 | DW_FORM_line_strp or DW_FORM_strx/GNU_str_index. |
| 247 | |
| 248 | 3) GCC chooses the form to use late, depending on the size and |
| 249 | reference count. |
| 250 | |
| 251 | Rather than forcing the all debug string handling functions and |
| 252 | callers to deal with these complications, simply use a separate, |
| 253 | special-cased string table for any attribute that should go in the |
| 254 | main object file. This limits the complexity to just the places |
| 255 | that need it. */ |
| 256 | |
| 257 | static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash; |
| 258 | |
| 259 | static GTY(()) int dw2_string_counter; |
| 260 | |
| 261 | /* True if the compilation unit places functions in more than one section. */ |
| 262 | static GTY(()) bool have_multiple_function_sections = false; |
| 263 | |
| 264 | /* The default cold text section. */ |
| 265 | static GTY(()) section *cold_text_section; |
| 266 | |
| 267 | /* True if currently in text section. */ |
| 268 | static GTY(()) bool in_text_section_p = false; |
| 269 | |
| 270 | /* Last debug-on location in corresponding section. */ |
| 271 | static GTY(()) const char *last_text_label; |
| 272 | static GTY(()) const char *last_cold_label; |
| 273 | |
| 274 | /* Mark debug-on/off locations per section. |
| 275 | NULL means the section is not used at all. */ |
| 276 | static GTY(()) vec<const char *, va_gc> *switch_text_ranges; |
| 277 | static GTY(()) vec<const char *, va_gc> *switch_cold_ranges; |
| 278 | |
| 279 | /* The DIE for C++14 'auto' in a function return type. */ |
| 280 | static GTY(()) dw_die_ref auto_die; |
| 281 | |
| 282 | /* The DIE for C++14 'decltype(auto)' in a function return type. */ |
| 283 | static GTY(()) dw_die_ref decltype_auto_die; |
| 284 | |
| 285 | /* Forward declarations for functions defined in this file. */ |
| 286 | |
| 287 | static void output_call_frame_info (int); |
| 288 | |
| 289 | /* Personality decl of current unit. Used only when assembler does not support |
| 290 | personality CFI. */ |
| 291 | static GTY(()) rtx current_unit_personality; |
| 292 | |
| 293 | /* Whether an eh_frame section is required. */ |
| 294 | static GTY(()) bool do_eh_frame = false; |
| 295 | |
| 296 | /* .debug_rnglists next index. */ |
| 297 | static unsigned int rnglist_idx; |
| 298 | |
| 299 | /* Data and reference forms for relocatable data. */ |
| 300 | #define DW_FORM_data (dwarf_offset_size == 8 ? DW_FORM_data8 : DW_FORM_data4) |
| 301 | #define DW_FORM_ref (dwarf_offset_size == 8 ? DW_FORM_ref8 : DW_FORM_ref4) |
| 302 | |
| 303 | #ifndef DEBUG_FRAME_SECTION |
| 304 | #define DEBUG_FRAME_SECTION ".debug_frame" |
| 305 | #endif |
| 306 | |
| 307 | #ifndef FUNC_BEGIN_LABEL |
| 308 | #define FUNC_BEGIN_LABEL "LFB" |
| 309 | #endif |
| 310 | |
| 311 | #ifndef FUNC_SECOND_SECT_LABEL |
| 312 | #define FUNC_SECOND_SECT_LABEL "LFSB" |
| 313 | #endif |
| 314 | |
| 315 | #ifndef FUNC_END_LABEL |
| 316 | #define FUNC_END_LABEL "LFE" |
| 317 | #endif |
| 318 | |
| 319 | #ifndef PROLOGUE_END_LABEL |
| 320 | #define PROLOGUE_END_LABEL "LPE" |
| 321 | #endif |
| 322 | |
| 323 | #ifndef EPILOGUE_BEGIN_LABEL |
| 324 | #define EPILOGUE_BEGIN_LABEL "LEB" |
| 325 | #endif |
| 326 | |
| 327 | #ifndef FRAME_BEGIN_LABEL |
| 328 | #define FRAME_BEGIN_LABEL "Lframe" |
| 329 | #endif |
| 330 | #define CIE_AFTER_SIZE_LABEL "LSCIE" |
| 331 | #define CIE_END_LABEL "LECIE" |
| 332 | #define FDE_LABEL "LSFDE" |
| 333 | #define FDE_AFTER_SIZE_LABEL "LASFDE" |
| 334 | #define FDE_END_LABEL "LEFDE" |
| 335 | #define LINE_NUMBER_BEGIN_LABEL "LSLT" |
| 336 | #define LINE_NUMBER_END_LABEL "LELT" |
| 337 | #define LN_PROLOG_AS_LABEL "LASLTP" |
| 338 | #define LN_PROLOG_END_LABEL "LELTP" |
| 339 | #define DIE_LABEL_PREFIX "DW" |
| 340 | |
| 341 | /* Match the base name of a file to the base name of a compilation unit. */ |
| 342 | |
| 343 | static bool |
| 344 | matches_main_base (const char *path) |
| 345 | { |
| 346 | /* Cache the last query. */ |
| 347 | static const char *last_path = NULL; |
| 348 | static bool last_match = false; |
| 349 | if (path != last_path) |
| 350 | { |
| 351 | const char *base; |
| 352 | int length = base_of_path (path, base_out: &base); |
| 353 | last_path = path; |
| 354 | last_match = (length == main_input_baselength |
| 355 | && memcmp (s1: base, main_input_basename, n: length) == 0); |
| 356 | } |
| 357 | return last_match; |
| 358 | } |
| 359 | |
| 360 | #ifdef DEBUG_DEBUG_STRUCT |
| 361 | |
| 362 | static bool |
| 363 | dump_struct_debug (tree type, enum debug_info_usage usage, |
| 364 | enum debug_struct_file criterion, int generic, |
| 365 | bool matches, bool result) |
| 366 | { |
| 367 | /* Find the type name. */ |
| 368 | tree type_decl = TYPE_STUB_DECL (type); |
| 369 | tree t = type_decl; |
| 370 | const char *name = 0; |
| 371 | if (TREE_CODE (t) == TYPE_DECL) |
| 372 | t = DECL_NAME (t); |
| 373 | if (t) |
| 374 | name = IDENTIFIER_POINTER (t); |
| 375 | |
| 376 | fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n" , |
| 377 | criterion, |
| 378 | DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr" , |
| 379 | matches ? "bas" : "hdr" , |
| 380 | generic ? "gen" : "ord" , |
| 381 | usage == DINFO_USAGE_DFN ? ";" : |
| 382 | usage == DINFO_USAGE_DIR_USE ? "." : "*" , |
| 383 | result, |
| 384 | (void*) type_decl, name); |
| 385 | return result; |
| 386 | } |
| 387 | #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \ |
| 388 | dump_struct_debug (type, usage, criterion, generic, matches, result) |
| 389 | |
| 390 | #else |
| 391 | |
| 392 | #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \ |
| 393 | (result) |
| 394 | |
| 395 | #endif |
| 396 | |
| 397 | /* Get the number of HOST_WIDE_INTs needed to represent the precision |
| 398 | of the number. */ |
| 399 | |
| 400 | static unsigned int |
| 401 | get_full_len (const dw_wide_int &op) |
| 402 | { |
| 403 | return CEIL (op.get_precision (), HOST_BITS_PER_WIDE_INT); |
| 404 | } |
| 405 | |
| 406 | static bool |
| 407 | should_emit_struct_debug (tree type, enum debug_info_usage usage) |
| 408 | { |
| 409 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 410 | return false; |
| 411 | |
| 412 | enum debug_struct_file criterion; |
| 413 | tree type_decl; |
| 414 | bool generic = lang_hooks.types.generic_p (type); |
| 415 | |
| 416 | if (generic) |
| 417 | criterion = debug_struct_generic[usage]; |
| 418 | else |
| 419 | criterion = debug_struct_ordinary[usage]; |
| 420 | |
| 421 | if (criterion == DINFO_STRUCT_FILE_NONE) |
| 422 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, false); |
| 423 | if (criterion == DINFO_STRUCT_FILE_ANY) |
| 424 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, true); |
| 425 | |
| 426 | type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type)); |
| 427 | |
| 428 | if (type_decl != NULL) |
| 429 | { |
| 430 | if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl)) |
| 431 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, true); |
| 432 | |
| 433 | if (matches_main_base (DECL_SOURCE_FILE (type_decl))) |
| 434 | return DUMP_GSTRUCT (type, usage, criterion, generic, true, true); |
| 435 | } |
| 436 | |
| 437 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, false); |
| 438 | } |
| 439 | |
| 440 | /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section, |
| 441 | switch to the data section instead, and write out a synthetic start label |
| 442 | for collect2 the first time around. */ |
| 443 | |
| 444 | static void |
| 445 | switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED) |
| 446 | { |
| 447 | if (eh_frame_section == 0) |
| 448 | { |
| 449 | int flags; |
| 450 | |
| 451 | if (EH_TABLES_CAN_BE_READ_ONLY) |
| 452 | { |
| 453 | int fde_encoding; |
| 454 | int per_encoding; |
| 455 | int lsda_encoding; |
| 456 | |
| 457 | fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, |
| 458 | /*global=*/0); |
| 459 | per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, |
| 460 | /*global=*/1); |
| 461 | lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, |
| 462 | /*global=*/0); |
| 463 | flags = ((! flag_pic |
| 464 | || ((fde_encoding & 0x70) != DW_EH_PE_absptr |
| 465 | && (fde_encoding & 0x70) != DW_EH_PE_aligned |
| 466 | && (per_encoding & 0x70) != DW_EH_PE_absptr |
| 467 | && (per_encoding & 0x70) != DW_EH_PE_aligned |
| 468 | && (lsda_encoding & 0x70) != DW_EH_PE_absptr |
| 469 | && (lsda_encoding & 0x70) != DW_EH_PE_aligned)) |
| 470 | ? 0 : SECTION_WRITE); |
| 471 | } |
| 472 | else |
| 473 | flags = SECTION_WRITE; |
| 474 | |
| 475 | #ifdef EH_FRAME_SECTION_NAME |
| 476 | eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL); |
| 477 | #else |
| 478 | eh_frame_section = ((flags == SECTION_WRITE) |
| 479 | ? data_section : readonly_data_section); |
| 480 | #endif /* EH_FRAME_SECTION_NAME */ |
| 481 | } |
| 482 | |
| 483 | switch_to_section (eh_frame_section); |
| 484 | |
| 485 | #ifdef EH_FRAME_THROUGH_COLLECT2 |
| 486 | /* We have no special eh_frame section. Emit special labels to guide |
| 487 | collect2. */ |
| 488 | if (!back) |
| 489 | { |
| 490 | tree label = get_file_function_name ("F" ); |
| 491 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE)); |
| 492 | targetm.asm_out.globalize_label (asm_out_file, |
| 493 | IDENTIFIER_POINTER (label)); |
| 494 | ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label)); |
| 495 | } |
| 496 | #endif |
| 497 | } |
| 498 | |
| 499 | /* Switch [BACK] to the eh or debug frame table section, depending on |
| 500 | FOR_EH. */ |
| 501 | |
| 502 | static void |
| 503 | switch_to_frame_table_section (int for_eh, bool back) |
| 504 | { |
| 505 | if (for_eh) |
| 506 | switch_to_eh_frame_section (back); |
| 507 | else |
| 508 | { |
| 509 | if (!debug_frame_section) |
| 510 | debug_frame_section = get_section (DEBUG_FRAME_SECTION, |
| 511 | SECTION_DEBUG, NULL); |
| 512 | switch_to_section (debug_frame_section); |
| 513 | } |
| 514 | } |
| 515 | |
| 516 | /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */ |
| 517 | |
| 518 | enum dw_cfi_oprnd_type |
| 519 | dw_cfi_oprnd1_desc (dwarf_call_frame_info cfi) |
| 520 | { |
| 521 | switch (cfi) |
| 522 | { |
| 523 | case DW_CFA_nop: |
| 524 | case DW_CFA_remember_state: |
| 525 | case DW_CFA_restore_state: |
| 526 | return dw_cfi_oprnd_unused; |
| 527 | |
| 528 | case DW_CFA_set_loc: |
| 529 | case DW_CFA_advance_loc1: |
| 530 | case DW_CFA_advance_loc2: |
| 531 | case DW_CFA_advance_loc4: |
| 532 | case DW_CFA_MIPS_advance_loc8: |
| 533 | return dw_cfi_oprnd_addr; |
| 534 | |
| 535 | case DW_CFA_offset: |
| 536 | case DW_CFA_offset_extended: |
| 537 | case DW_CFA_def_cfa: |
| 538 | case DW_CFA_offset_extended_sf: |
| 539 | case DW_CFA_def_cfa_sf: |
| 540 | case DW_CFA_restore: |
| 541 | case DW_CFA_restore_extended: |
| 542 | case DW_CFA_undefined: |
| 543 | case DW_CFA_same_value: |
| 544 | case DW_CFA_def_cfa_register: |
| 545 | case DW_CFA_register: |
| 546 | case DW_CFA_expression: |
| 547 | case DW_CFA_val_expression: |
| 548 | return dw_cfi_oprnd_reg_num; |
| 549 | |
| 550 | case DW_CFA_def_cfa_offset: |
| 551 | case DW_CFA_GNU_args_size: |
| 552 | case DW_CFA_def_cfa_offset_sf: |
| 553 | return dw_cfi_oprnd_offset; |
| 554 | |
| 555 | case DW_CFA_def_cfa_expression: |
| 556 | return dw_cfi_oprnd_loc; |
| 557 | |
| 558 | default: |
| 559 | { |
| 560 | dw_cfi_oprnd_type oprnd_type; |
| 561 | if (targetm.dw_cfi_oprnd1_desc (cfi, oprnd_type)) |
| 562 | return oprnd_type; |
| 563 | else |
| 564 | gcc_unreachable (); |
| 565 | } |
| 566 | } |
| 567 | } |
| 568 | |
| 569 | /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */ |
| 570 | |
| 571 | enum dw_cfi_oprnd_type |
| 572 | dw_cfi_oprnd2_desc (dwarf_call_frame_info cfi) |
| 573 | { |
| 574 | switch (cfi) |
| 575 | { |
| 576 | case DW_CFA_def_cfa: |
| 577 | case DW_CFA_def_cfa_sf: |
| 578 | case DW_CFA_offset: |
| 579 | case DW_CFA_offset_extended_sf: |
| 580 | case DW_CFA_offset_extended: |
| 581 | return dw_cfi_oprnd_offset; |
| 582 | |
| 583 | case DW_CFA_register: |
| 584 | return dw_cfi_oprnd_reg_num; |
| 585 | |
| 586 | case DW_CFA_expression: |
| 587 | case DW_CFA_val_expression: |
| 588 | return dw_cfi_oprnd_loc; |
| 589 | |
| 590 | case DW_CFA_def_cfa_expression: |
| 591 | return dw_cfi_oprnd_cfa_loc; |
| 592 | |
| 593 | default: |
| 594 | return dw_cfi_oprnd_unused; |
| 595 | } |
| 596 | } |
| 597 | |
| 598 | /* Output one FDE. */ |
| 599 | |
| 600 | static void |
| 601 | output_fde (dw_fde_ref fde, bool for_eh, bool second, |
| 602 | char *section_start_label, int fde_encoding, char *augmentation, |
| 603 | bool any_lsda_needed, int lsda_encoding) |
| 604 | { |
| 605 | const char *begin, *end; |
| 606 | static unsigned int j; |
| 607 | char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 608 | |
| 609 | targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh, |
| 610 | /* empty */ 0); |
| 611 | targetm.asm_out.internal_label (asm_out_file, FDE_LABEL, |
| 612 | for_eh + j); |
| 613 | ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j); |
| 614 | ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j); |
| 615 | if (!XCOFF_DEBUGGING_INFO || for_eh) |
| 616 | { |
| 617 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4 && !for_eh) |
| 618 | dw2_asm_output_data (4, 0xffffffff, "Initial length escape value" |
| 619 | " indicating 64-bit DWARF extension" ); |
| 620 | dw2_asm_output_delta (for_eh ? 4 : dwarf_offset_size, l2, l1, |
| 621 | "FDE Length" ); |
| 622 | } |
| 623 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
| 624 | |
| 625 | if (for_eh) |
| 626 | dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset" ); |
| 627 | else |
| 628 | dw2_asm_output_offset (dwarf_offset_size, section_start_label, |
| 629 | debug_frame_section, "FDE CIE offset" ); |
| 630 | |
| 631 | begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin; |
| 632 | end = second ? fde->dw_fde_second_end : fde->dw_fde_end; |
| 633 | |
| 634 | if (for_eh) |
| 635 | { |
| 636 | rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin); |
| 637 | SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL; |
| 638 | dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false, |
| 639 | "FDE initial location" ); |
| 640 | dw2_asm_output_delta (size_of_encoded_value (fde_encoding), |
| 641 | end, begin, "FDE address range" ); |
| 642 | } |
| 643 | else |
| 644 | { |
| 645 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location" ); |
| 646 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range" ); |
| 647 | } |
| 648 | |
| 649 | if (augmentation[0]) |
| 650 | { |
| 651 | if (any_lsda_needed) |
| 652 | { |
| 653 | int size = size_of_encoded_value (lsda_encoding); |
| 654 | |
| 655 | if (lsda_encoding == DW_EH_PE_aligned) |
| 656 | { |
| 657 | int offset = ( 4 /* Length */ |
| 658 | + 4 /* CIE offset */ |
| 659 | + 2 * size_of_encoded_value (fde_encoding) |
| 660 | + 1 /* Augmentation size */ ); |
| 661 | int pad = -offset & (PTR_SIZE - 1); |
| 662 | |
| 663 | size += pad; |
| 664 | gcc_assert (size_of_uleb128 (size) == 1); |
| 665 | } |
| 666 | |
| 667 | dw2_asm_output_data_uleb128 (size, "Augmentation size" ); |
| 668 | |
| 669 | if (fde->uses_eh_lsda) |
| 670 | { |
| 671 | ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA" , |
| 672 | fde->funcdef_number); |
| 673 | dw2_asm_output_encoded_addr_rtx (lsda_encoding, |
| 674 | gen_rtx_SYMBOL_REF (Pmode, l1), |
| 675 | false, |
| 676 | "Language Specific Data Area" ); |
| 677 | } |
| 678 | else |
| 679 | { |
| 680 | if (lsda_encoding == DW_EH_PE_aligned) |
| 681 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE)); |
| 682 | dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0, |
| 683 | "Language Specific Data Area (none)" ); |
| 684 | } |
| 685 | } |
| 686 | else |
| 687 | dw2_asm_output_data_uleb128 (0, "Augmentation size" ); |
| 688 | } |
| 689 | |
| 690 | /* Loop through the Call Frame Instructions associated with this FDE. */ |
| 691 | fde->dw_fde_current_label = begin; |
| 692 | { |
| 693 | size_t from, until, i; |
| 694 | |
| 695 | from = 0; |
| 696 | until = vec_safe_length (v: fde->dw_fde_cfi); |
| 697 | |
| 698 | if (fde->dw_fde_second_begin == NULL) |
| 699 | ; |
| 700 | else if (!second) |
| 701 | until = fde->dw_fde_switch_cfi_index; |
| 702 | else |
| 703 | from = fde->dw_fde_switch_cfi_index; |
| 704 | |
| 705 | for (i = from; i < until; i++) |
| 706 | output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh); |
| 707 | } |
| 708 | |
| 709 | /* If we are to emit a ref/link from function bodies to their frame tables, |
| 710 | do it now. This is typically performed to make sure that tables |
| 711 | associated with functions are dragged with them and not discarded in |
| 712 | garbage collecting links. We need to do this on a per function basis to |
| 713 | cope with -ffunction-sections. */ |
| 714 | |
| 715 | #ifdef ASM_OUTPUT_DWARF_TABLE_REF |
| 716 | /* Switch to the function section, emit the ref to the tables, and |
| 717 | switch *back* into the table section. */ |
| 718 | switch_to_section (function_section (fde->decl)); |
| 719 | ASM_OUTPUT_DWARF_TABLE_REF (section_start_label); |
| 720 | switch_to_frame_table_section (for_eh, true); |
| 721 | #endif |
| 722 | |
| 723 | /* Pad the FDE out to an address sized boundary. */ |
| 724 | ASM_OUTPUT_ALIGN (asm_out_file, |
| 725 | floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE))); |
| 726 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
| 727 | |
| 728 | j += 2; |
| 729 | } |
| 730 | |
| 731 | /* Return true if frame description entry FDE is needed for EH. */ |
| 732 | |
| 733 | static bool |
| 734 | fde_needed_for_eh_p (dw_fde_ref fde) |
| 735 | { |
| 736 | if (flag_asynchronous_unwind_tables) |
| 737 | return true; |
| 738 | |
| 739 | if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl)) |
| 740 | return true; |
| 741 | |
| 742 | if (fde->uses_eh_lsda) |
| 743 | return true; |
| 744 | |
| 745 | /* If exceptions are enabled, we have collected nothrow info. */ |
| 746 | if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow)) |
| 747 | return false; |
| 748 | |
| 749 | return true; |
| 750 | } |
| 751 | |
| 752 | /* Output the call frame information used to record information |
| 753 | that relates to calculating the frame pointer, and records the |
| 754 | location of saved registers. */ |
| 755 | |
| 756 | static void |
| 757 | output_call_frame_info (int for_eh) |
| 758 | { |
| 759 | unsigned int i; |
| 760 | dw_fde_ref fde; |
| 761 | dw_cfi_ref cfi; |
| 762 | char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 763 | char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 764 | bool any_lsda_needed = false; |
| 765 | char augmentation[6]; |
| 766 | int augmentation_size; |
| 767 | int fde_encoding = DW_EH_PE_absptr; |
| 768 | int per_encoding = DW_EH_PE_absptr; |
| 769 | int lsda_encoding = DW_EH_PE_absptr; |
| 770 | int return_reg; |
| 771 | rtx personality = NULL; |
| 772 | int dw_cie_version; |
| 773 | |
| 774 | /* Don't emit a CIE if there won't be any FDEs. */ |
| 775 | if (!fde_vec) |
| 776 | return; |
| 777 | |
| 778 | /* Nothing to do if the assembler's doing it all. */ |
| 779 | if (dwarf2out_do_cfi_asm ()) |
| 780 | return; |
| 781 | |
| 782 | /* If we don't have any functions we'll want to unwind out of, don't emit |
| 783 | any EH unwind information. If we make FDEs linkonce, we may have to |
| 784 | emit an empty label for an FDE that wouldn't otherwise be emitted. We |
| 785 | want to avoid having an FDE kept around when the function it refers to |
| 786 | is discarded. Example where this matters: a primary function template |
| 787 | in C++ requires EH information, an explicit specialization doesn't. */ |
| 788 | if (for_eh) |
| 789 | { |
| 790 | bool any_eh_needed = false; |
| 791 | |
| 792 | FOR_EACH_VEC_ELT (*fde_vec, i, fde) |
| 793 | { |
| 794 | if (fde->uses_eh_lsda) |
| 795 | any_eh_needed = any_lsda_needed = true; |
| 796 | else if (fde_needed_for_eh_p (fde)) |
| 797 | any_eh_needed = true; |
| 798 | else if (TARGET_USES_WEAK_UNWIND_INFO) |
| 799 | targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1); |
| 800 | } |
| 801 | |
| 802 | if (!any_eh_needed) |
| 803 | return; |
| 804 | } |
| 805 | |
| 806 | /* We're going to be generating comments, so turn on app. */ |
| 807 | if (flag_debug_asm) |
| 808 | app_enable (); |
| 809 | |
| 810 | /* Switch to the proper frame section, first time. */ |
| 811 | switch_to_frame_table_section (for_eh, back: false); |
| 812 | |
| 813 | ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh); |
| 814 | ASM_OUTPUT_LABEL (asm_out_file, section_start_label); |
| 815 | |
| 816 | /* Output the CIE. */ |
| 817 | ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh); |
| 818 | ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh); |
| 819 | if (!XCOFF_DEBUGGING_INFO || for_eh) |
| 820 | { |
| 821 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4 && !for_eh) |
| 822 | dw2_asm_output_data (4, 0xffffffff, |
| 823 | "Initial length escape value indicating 64-bit DWARF extension" ); |
| 824 | dw2_asm_output_delta (for_eh ? 4 : dwarf_offset_size, l2, l1, |
| 825 | "Length of Common Information Entry" ); |
| 826 | } |
| 827 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
| 828 | |
| 829 | /* Now that the CIE pointer is PC-relative for EH, |
| 830 | use 0 to identify the CIE. */ |
| 831 | dw2_asm_output_data ((for_eh ? 4 : dwarf_offset_size), |
| 832 | (for_eh ? 0 : DWARF_CIE_ID), |
| 833 | "CIE Identifier Tag" ); |
| 834 | |
| 835 | /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to |
| 836 | use CIE version 1, unless that would produce incorrect results |
| 837 | due to overflowing the return register column. */ |
| 838 | return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh); |
| 839 | dw_cie_version = 1; |
| 840 | if (return_reg >= 256 || dwarf_version > 2) |
| 841 | dw_cie_version = 3; |
| 842 | dw2_asm_output_data (1, dw_cie_version, "CIE Version" ); |
| 843 | |
| 844 | augmentation[0] = 0; |
| 845 | augmentation_size = 0; |
| 846 | |
| 847 | personality = current_unit_personality; |
| 848 | if (for_eh) |
| 849 | { |
| 850 | char *p; |
| 851 | |
| 852 | /* Augmentation: |
| 853 | z Indicates that a uleb128 is present to size the |
| 854 | augmentation section. |
| 855 | L Indicates the encoding (and thus presence) of |
| 856 | an LSDA pointer in the FDE augmentation. |
| 857 | R Indicates a non-default pointer encoding for |
| 858 | FDE code pointers. |
| 859 | P Indicates the presence of an encoding + language |
| 860 | personality routine in the CIE augmentation. */ |
| 861 | |
| 862 | fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0); |
| 863 | per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1); |
| 864 | lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0); |
| 865 | |
| 866 | p = augmentation + 1; |
| 867 | if (personality) |
| 868 | { |
| 869 | *p++ = 'P'; |
| 870 | augmentation_size += 1 + size_of_encoded_value (per_encoding); |
| 871 | assemble_external_libcall (personality); |
| 872 | } |
| 873 | if (any_lsda_needed) |
| 874 | { |
| 875 | *p++ = 'L'; |
| 876 | augmentation_size += 1; |
| 877 | } |
| 878 | if (fde_encoding != DW_EH_PE_absptr) |
| 879 | { |
| 880 | *p++ = 'R'; |
| 881 | augmentation_size += 1; |
| 882 | } |
| 883 | if (p > augmentation + 1) |
| 884 | { |
| 885 | augmentation[0] = 'z'; |
| 886 | *p = '\0'; |
| 887 | } |
| 888 | |
| 889 | /* Ug. Some platforms can't do unaligned dynamic relocations at all. */ |
| 890 | if (personality && per_encoding == DW_EH_PE_aligned) |
| 891 | { |
| 892 | int offset = ( 4 /* Length */ |
| 893 | + 4 /* CIE Id */ |
| 894 | + 1 /* CIE version */ |
| 895 | + strlen (s: augmentation) + 1 /* Augmentation */ |
| 896 | + size_of_uleb128 (1) /* Code alignment */ |
| 897 | + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT) |
| 898 | + 1 /* RA column */ |
| 899 | + 1 /* Augmentation size */ |
| 900 | + 1 /* Personality encoding */ ); |
| 901 | int pad = -offset & (PTR_SIZE - 1); |
| 902 | |
| 903 | augmentation_size += pad; |
| 904 | |
| 905 | /* Augmentations should be small, so there's scarce need to |
| 906 | iterate for a solution. Die if we exceed one uleb128 byte. */ |
| 907 | gcc_assert (size_of_uleb128 (augmentation_size) == 1); |
| 908 | } |
| 909 | } |
| 910 | |
| 911 | dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation" ); |
| 912 | if (dw_cie_version >= 4) |
| 913 | { |
| 914 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size" ); |
| 915 | dw2_asm_output_data (1, 0, "CIE Segment Size" ); |
| 916 | } |
| 917 | dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor" ); |
| 918 | dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT, |
| 919 | "CIE Data Alignment Factor" ); |
| 920 | |
| 921 | if (dw_cie_version == 1) |
| 922 | dw2_asm_output_data (1, return_reg, "CIE RA Column" ); |
| 923 | else |
| 924 | dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column" ); |
| 925 | |
| 926 | if (augmentation[0]) |
| 927 | { |
| 928 | dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size" ); |
| 929 | if (personality) |
| 930 | { |
| 931 | dw2_asm_output_data (1, per_encoding, "Personality (%s)" , |
| 932 | eh_data_format_name (per_encoding)); |
| 933 | dw2_asm_output_encoded_addr_rtx (per_encoding, |
| 934 | personality, |
| 935 | true, NULL); |
| 936 | } |
| 937 | |
| 938 | if (any_lsda_needed) |
| 939 | dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)" , |
| 940 | eh_data_format_name (lsda_encoding)); |
| 941 | |
| 942 | if (fde_encoding != DW_EH_PE_absptr) |
| 943 | dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)" , |
| 944 | eh_data_format_name (fde_encoding)); |
| 945 | } |
| 946 | |
| 947 | FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi) |
| 948 | output_cfi (cfi, NULL, for_eh); |
| 949 | |
| 950 | /* Pad the CIE out to an address sized boundary. */ |
| 951 | ASM_OUTPUT_ALIGN (asm_out_file, |
| 952 | floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)); |
| 953 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
| 954 | |
| 955 | /* Loop through all of the FDE's. */ |
| 956 | FOR_EACH_VEC_ELT (*fde_vec, i, fde) |
| 957 | { |
| 958 | unsigned int k; |
| 959 | |
| 960 | /* Don't emit EH unwind info for leaf functions that don't need it. */ |
| 961 | if (for_eh && !fde_needed_for_eh_p (fde)) |
| 962 | continue; |
| 963 | |
| 964 | for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++) |
| 965 | output_fde (fde, for_eh, second: k, section_start_label, fde_encoding, |
| 966 | augmentation, any_lsda_needed, lsda_encoding); |
| 967 | } |
| 968 | |
| 969 | if (for_eh && targetm.terminate_dw2_eh_frame_info) |
| 970 | dw2_asm_output_data (4, 0, "End of Table" ); |
| 971 | |
| 972 | /* Turn off app to make assembly quicker. */ |
| 973 | if (flag_debug_asm) |
| 974 | app_disable (); |
| 975 | } |
| 976 | |
| 977 | /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */ |
| 978 | |
| 979 | static void |
| 980 | dwarf2out_do_cfi_startproc (bool second) |
| 981 | { |
| 982 | int enc; |
| 983 | rtx ref; |
| 984 | |
| 985 | fprintf (stream: asm_out_file, format: "\t.cfi_startproc\n" ); |
| 986 | |
| 987 | targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl); |
| 988 | |
| 989 | /* .cfi_personality and .cfi_lsda are only relevant to DWARF2 |
| 990 | eh unwinders. */ |
| 991 | if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2) |
| 992 | return; |
| 993 | |
| 994 | rtx personality = get_personality_function (current_function_decl); |
| 995 | |
| 996 | if (personality) |
| 997 | { |
| 998 | enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1); |
| 999 | ref = personality; |
| 1000 | |
| 1001 | /* ??? The GAS support isn't entirely consistent. We have to |
| 1002 | handle indirect support ourselves, but PC-relative is done |
| 1003 | in the assembler. Further, the assembler can't handle any |
| 1004 | of the weirder relocation types. */ |
| 1005 | if (enc & DW_EH_PE_indirect) |
| 1006 | { |
| 1007 | if (targetm.asm_out.make_eh_symbol_indirect != NULL) |
| 1008 | ref = targetm.asm_out.make_eh_symbol_indirect (ref, true); |
| 1009 | else |
| 1010 | ref = dw2_force_const_mem (ref, true); |
| 1011 | } |
| 1012 | |
| 1013 | fprintf (stream: asm_out_file, format: "\t.cfi_personality %#x," , enc); |
| 1014 | output_addr_const (asm_out_file, ref); |
| 1015 | fputc (c: '\n', stream: asm_out_file); |
| 1016 | } |
| 1017 | |
| 1018 | if (crtl->uses_eh_lsda) |
| 1019 | { |
| 1020 | char lab[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 1021 | |
| 1022 | enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0); |
| 1023 | ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA" , |
| 1024 | current_function_funcdef_no); |
| 1025 | ref = gen_rtx_SYMBOL_REF (Pmode, lab); |
| 1026 | SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL; |
| 1027 | |
| 1028 | if (enc & DW_EH_PE_indirect) |
| 1029 | { |
| 1030 | if (targetm.asm_out.make_eh_symbol_indirect != NULL) |
| 1031 | ref = targetm.asm_out.make_eh_symbol_indirect (ref, true); |
| 1032 | else |
| 1033 | ref = dw2_force_const_mem (ref, true); |
| 1034 | } |
| 1035 | |
| 1036 | fprintf (stream: asm_out_file, format: "\t.cfi_lsda %#x," , enc); |
| 1037 | output_addr_const (asm_out_file, ref); |
| 1038 | fputc (c: '\n', stream: asm_out_file); |
| 1039 | } |
| 1040 | } |
| 1041 | |
| 1042 | /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that |
| 1043 | this allocation may be done before pass_final. */ |
| 1044 | |
| 1045 | dw_fde_ref |
| 1046 | dwarf2out_alloc_current_fde (void) |
| 1047 | { |
| 1048 | dw_fde_ref fde; |
| 1049 | |
| 1050 | fde = ggc_cleared_alloc<dw_fde_node> (); |
| 1051 | fde->decl = current_function_decl; |
| 1052 | fde->funcdef_number = current_function_funcdef_no; |
| 1053 | fde->fde_index = vec_safe_length (v: fde_vec); |
| 1054 | fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls; |
| 1055 | fde->uses_eh_lsda = crtl->uses_eh_lsda; |
| 1056 | fde->nothrow = crtl->nothrow; |
| 1057 | fde->drap_reg = INVALID_REGNUM; |
| 1058 | fde->vdrap_reg = INVALID_REGNUM; |
| 1059 | |
| 1060 | /* Record the FDE associated with this function. */ |
| 1061 | cfun->fde = fde; |
| 1062 | vec_safe_push (v&: fde_vec, obj: fde); |
| 1063 | |
| 1064 | return fde; |
| 1065 | } |
| 1066 | |
| 1067 | /* Output a marker (i.e. a label) for the beginning of a function, before |
| 1068 | the prologue. */ |
| 1069 | |
| 1070 | void |
| 1071 | dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED, |
| 1072 | unsigned int column ATTRIBUTE_UNUSED, |
| 1073 | const char *file ATTRIBUTE_UNUSED) |
| 1074 | { |
| 1075 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 1076 | char * dup_label; |
| 1077 | dw_fde_ref fde; |
| 1078 | section *fnsec; |
| 1079 | bool do_frame; |
| 1080 | |
| 1081 | current_function_func_begin_label = NULL; |
| 1082 | |
| 1083 | do_frame = dwarf2out_do_frame (); |
| 1084 | |
| 1085 | /* ??? current_function_func_begin_label is also used by except.cc for |
| 1086 | call-site information. We must emit this label if it might be used. */ |
| 1087 | if (!do_frame |
| 1088 | && (!flag_exceptions |
| 1089 | || targetm_common.except_unwind_info (&global_options) == UI_SJLJ)) |
| 1090 | return; |
| 1091 | |
| 1092 | fnsec = function_section (current_function_decl); |
| 1093 | switch_to_section (fnsec); |
| 1094 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL, |
| 1095 | current_function_funcdef_no); |
| 1096 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL, |
| 1097 | current_function_funcdef_no); |
| 1098 | dup_label = xstrdup (label); |
| 1099 | current_function_func_begin_label = dup_label; |
| 1100 | |
| 1101 | /* We can elide FDE allocation if we're not emitting frame unwind info. */ |
| 1102 | if (!do_frame) |
| 1103 | return; |
| 1104 | |
| 1105 | /* Unlike the debug version, the EH version of frame unwind info is a per- |
| 1106 | function setting so we need to record whether we need it for the unit. */ |
| 1107 | do_eh_frame |= dwarf2out_do_eh_frame (); |
| 1108 | |
| 1109 | /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that |
| 1110 | emit insns as rtx but bypass the bulk of rest_of_compilation, which |
| 1111 | would include pass_dwarf2_frame. If we've not created the FDE yet, |
| 1112 | do so now. */ |
| 1113 | fde = cfun->fde; |
| 1114 | if (fde == NULL) |
| 1115 | fde = dwarf2out_alloc_current_fde (); |
| 1116 | |
| 1117 | /* Initialize the bits of CURRENT_FDE that were not available earlier. */ |
| 1118 | fde->dw_fde_begin = dup_label; |
| 1119 | fde->dw_fde_current_label = dup_label; |
| 1120 | fde->in_std_section = (fnsec == text_section |
| 1121 | || (cold_text_section && fnsec == cold_text_section)); |
| 1122 | fde->ignored_debug = DECL_IGNORED_P (current_function_decl); |
| 1123 | in_text_section_p = fnsec == text_section; |
| 1124 | |
| 1125 | /* We only want to output line number information for the genuine dwarf2 |
| 1126 | prologue case, not the eh frame case. */ |
| 1127 | #ifdef DWARF2_DEBUGGING_INFO |
| 1128 | if (file) |
| 1129 | dwarf2out_source_line (line, column, file, 0, true); |
| 1130 | #endif |
| 1131 | |
| 1132 | if (dwarf2out_do_cfi_asm ()) |
| 1133 | dwarf2out_do_cfi_startproc (second: false); |
| 1134 | else |
| 1135 | { |
| 1136 | rtx personality = get_personality_function (current_function_decl); |
| 1137 | if (!current_unit_personality) |
| 1138 | current_unit_personality = personality; |
| 1139 | |
| 1140 | /* We cannot keep a current personality per function as without CFI |
| 1141 | asm, at the point where we emit the CFI data, there is no current |
| 1142 | function anymore. */ |
| 1143 | if (personality && current_unit_personality != personality) |
| 1144 | sorry ("multiple EH personalities are supported only with assemblers " |
| 1145 | "supporting %<.cfi_personality%> directive" ); |
| 1146 | } |
| 1147 | } |
| 1148 | |
| 1149 | /* Output a marker (i.e. a label) for the end of the generated code |
| 1150 | for a function prologue. This gets called *after* the prologue code has |
| 1151 | been generated. */ |
| 1152 | |
| 1153 | void |
| 1154 | dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED, |
| 1155 | const char *file ATTRIBUTE_UNUSED) |
| 1156 | { |
| 1157 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 1158 | |
| 1159 | /* Output a label to mark the endpoint of the code generated for this |
| 1160 | function. */ |
| 1161 | ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL, |
| 1162 | current_function_funcdef_no); |
| 1163 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL, |
| 1164 | current_function_funcdef_no); |
| 1165 | cfun->fde->dw_fde_vms_end_prologue = xstrdup (label); |
| 1166 | } |
| 1167 | |
| 1168 | /* Output a marker (i.e. a label) for the beginning of the generated code |
| 1169 | for a function epilogue. This gets called *before* the prologue code has |
| 1170 | been generated. */ |
| 1171 | |
| 1172 | void |
| 1173 | dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED, |
| 1174 | const char *file ATTRIBUTE_UNUSED) |
| 1175 | { |
| 1176 | dw_fde_ref fde = cfun->fde; |
| 1177 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 1178 | |
| 1179 | if (fde->dw_fde_vms_begin_epilogue) |
| 1180 | return; |
| 1181 | |
| 1182 | /* Output a label to mark the endpoint of the code generated for this |
| 1183 | function. */ |
| 1184 | ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL, |
| 1185 | current_function_funcdef_no); |
| 1186 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL, |
| 1187 | current_function_funcdef_no); |
| 1188 | fde->dw_fde_vms_begin_epilogue = xstrdup (label); |
| 1189 | } |
| 1190 | |
| 1191 | /* Mark the ranges of non-debug subsections in the std text sections. */ |
| 1192 | |
| 1193 | static void |
| 1194 | mark_ignored_debug_section (dw_fde_ref fde, bool second) |
| 1195 | { |
| 1196 | bool std_section; |
| 1197 | const char *begin_label, *end_label; |
| 1198 | const char **last_end_label; |
| 1199 | vec<const char *, va_gc> **switch_ranges; |
| 1200 | |
| 1201 | if (second) |
| 1202 | { |
| 1203 | std_section = fde->second_in_std_section; |
| 1204 | begin_label = fde->dw_fde_second_begin; |
| 1205 | end_label = fde->dw_fde_second_end; |
| 1206 | } |
| 1207 | else |
| 1208 | { |
| 1209 | std_section = fde->in_std_section; |
| 1210 | begin_label = fde->dw_fde_begin; |
| 1211 | end_label = fde->dw_fde_end; |
| 1212 | } |
| 1213 | |
| 1214 | if (!std_section) |
| 1215 | return; |
| 1216 | |
| 1217 | if (in_text_section_p) |
| 1218 | { |
| 1219 | last_end_label = &last_text_label; |
| 1220 | switch_ranges = &switch_text_ranges; |
| 1221 | } |
| 1222 | else |
| 1223 | { |
| 1224 | last_end_label = &last_cold_label; |
| 1225 | switch_ranges = &switch_cold_ranges; |
| 1226 | } |
| 1227 | |
| 1228 | if (fde->ignored_debug) |
| 1229 | { |
| 1230 | if (*switch_ranges && !(vec_safe_length (v: *switch_ranges) & 1)) |
| 1231 | vec_safe_push (v&: *switch_ranges, obj: *last_end_label); |
| 1232 | } |
| 1233 | else |
| 1234 | { |
| 1235 | *last_end_label = end_label; |
| 1236 | |
| 1237 | if (!*switch_ranges) |
| 1238 | vec_alloc (v&: *switch_ranges, nelems: 16); |
| 1239 | else if (vec_safe_length (v: *switch_ranges) & 1) |
| 1240 | vec_safe_push (v&: *switch_ranges, obj: begin_label); |
| 1241 | } |
| 1242 | } |
| 1243 | |
| 1244 | /* Output a marker (i.e. a label) for the absolute end of the generated code |
| 1245 | for a function definition. This gets called *after* the epilogue code has |
| 1246 | been generated. */ |
| 1247 | |
| 1248 | void |
| 1249 | dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED, |
| 1250 | const char *file ATTRIBUTE_UNUSED) |
| 1251 | { |
| 1252 | dw_fde_ref fde; |
| 1253 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 1254 | |
| 1255 | last_var_location_insn = NULL; |
| 1256 | cached_next_real_insn = NULL; |
| 1257 | |
| 1258 | if (dwarf2out_do_cfi_asm ()) |
| 1259 | fprintf (stream: asm_out_file, format: "\t.cfi_endproc\n" ); |
| 1260 | |
| 1261 | #ifdef CODEVIEW_DEBUGGING_INFO |
| 1262 | if (codeview_debuginfo_p ()) |
| 1263 | codeview_end_epilogue (); |
| 1264 | #endif |
| 1265 | |
| 1266 | /* Output a label to mark the endpoint of the code generated for this |
| 1267 | function. */ |
| 1268 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL, |
| 1269 | current_function_funcdef_no); |
| 1270 | ASM_OUTPUT_LABEL (asm_out_file, label); |
| 1271 | fde = cfun->fde; |
| 1272 | gcc_assert (fde != NULL); |
| 1273 | if (fde->dw_fde_second_begin == NULL) |
| 1274 | fde->dw_fde_end = xstrdup (label); |
| 1275 | |
| 1276 | mark_ignored_debug_section (fde, second: fde->dw_fde_second_begin != NULL); |
| 1277 | } |
| 1278 | |
| 1279 | void |
| 1280 | dwarf2out_frame_finish (void) |
| 1281 | { |
| 1282 | /* Output call frame information. */ |
| 1283 | if (targetm.debug_unwind_info () == UI_DWARF2) |
| 1284 | output_call_frame_info (for_eh: 0); |
| 1285 | |
| 1286 | /* Output another copy for the unwinder. */ |
| 1287 | if (do_eh_frame) |
| 1288 | output_call_frame_info (for_eh: 1); |
| 1289 | } |
| 1290 | |
| 1291 | static void var_location_switch_text_section (void); |
| 1292 | static void set_cur_line_info_table (section *); |
| 1293 | |
| 1294 | void |
| 1295 | dwarf2out_switch_text_section (void) |
| 1296 | { |
| 1297 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 1298 | section *sect; |
| 1299 | dw_fde_ref fde = cfun->fde; |
| 1300 | |
| 1301 | gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL); |
| 1302 | |
| 1303 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL, |
| 1304 | current_function_funcdef_no); |
| 1305 | |
| 1306 | fde->dw_fde_second_begin = ggc_strdup (label); |
| 1307 | if (!in_cold_section_p) |
| 1308 | { |
| 1309 | fde->dw_fde_end = crtl->subsections.cold_section_end_label; |
| 1310 | fde->dw_fde_second_end = crtl->subsections.hot_section_end_label; |
| 1311 | } |
| 1312 | else |
| 1313 | { |
| 1314 | fde->dw_fde_end = crtl->subsections.hot_section_end_label; |
| 1315 | fde->dw_fde_second_end = crtl->subsections.cold_section_end_label; |
| 1316 | } |
| 1317 | have_multiple_function_sections = true; |
| 1318 | |
| 1319 | #ifdef CODEVIEW_DEBUGGING_INFO |
| 1320 | if (codeview_debuginfo_p ()) |
| 1321 | codeview_switch_text_section (); |
| 1322 | #endif |
| 1323 | |
| 1324 | if (dwarf2out_do_cfi_asm ()) |
| 1325 | fprintf (stream: asm_out_file, format: "\t.cfi_endproc\n" ); |
| 1326 | |
| 1327 | mark_ignored_debug_section (fde, second: false); |
| 1328 | |
| 1329 | /* Now do the real section switch. */ |
| 1330 | sect = current_function_section (); |
| 1331 | switch_to_section (sect); |
| 1332 | |
| 1333 | fde->second_in_std_section |
| 1334 | = (sect == text_section |
| 1335 | || (cold_text_section && sect == cold_text_section)); |
| 1336 | in_text_section_p = sect == text_section; |
| 1337 | |
| 1338 | if (dwarf2out_do_cfi_asm ()) |
| 1339 | dwarf2out_do_cfi_startproc (second: true); |
| 1340 | |
| 1341 | var_location_switch_text_section (); |
| 1342 | |
| 1343 | if (cold_text_section != NULL) |
| 1344 | set_cur_line_info_table (sect); |
| 1345 | } |
| 1346 | |
| 1347 | /* And now, the subset of the debugging information support code necessary |
| 1348 | for emitting location expressions. */ |
| 1349 | |
| 1350 | /* Describe an entry into the .debug_addr section. */ |
| 1351 | |
| 1352 | enum ate_kind { |
| 1353 | ate_kind_rtx, |
| 1354 | ate_kind_rtx_dtprel, |
| 1355 | ate_kind_label |
| 1356 | }; |
| 1357 | |
| 1358 | struct GTY((for_user)) addr_table_entry { |
| 1359 | enum ate_kind kind; |
| 1360 | unsigned int refcount; |
| 1361 | unsigned int index; |
| 1362 | union addr_table_entry_struct_union |
| 1363 | { |
| 1364 | rtx GTY ((tag ("0" ))) rtl; |
| 1365 | char * GTY ((tag ("1" ))) label; |
| 1366 | } |
| 1367 | GTY ((desc ("%1.kind" ))) addr; |
| 1368 | }; |
| 1369 | |
| 1370 | static dw_loc_descr_ref int_loc_descriptor (poly_int64); |
| 1371 | static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT); |
| 1372 | |
| 1373 | /* Convert a DWARF stack opcode into its string name. */ |
| 1374 | |
| 1375 | static const char * |
| 1376 | dwarf_stack_op_name (unsigned int op) |
| 1377 | { |
| 1378 | const char *name = get_DW_OP_name (op); |
| 1379 | |
| 1380 | if (name != NULL) |
| 1381 | return name; |
| 1382 | |
| 1383 | return "OP_<unknown>" ; |
| 1384 | } |
| 1385 | |
| 1386 | /* Return TRUE iff we're to output location view lists as a separate |
| 1387 | attribute next to the location lists, as an extension compatible |
| 1388 | with DWARF 2 and above. */ |
| 1389 | |
| 1390 | static inline bool |
| 1391 | dwarf2out_locviews_in_attribute () |
| 1392 | { |
| 1393 | return debug_variable_location_views == 1; |
| 1394 | } |
| 1395 | |
| 1396 | /* Return TRUE iff we're to output location view lists as part of the |
| 1397 | location lists, as proposed for standardization after DWARF 5. */ |
| 1398 | |
| 1399 | static inline bool |
| 1400 | dwarf2out_locviews_in_loclist () |
| 1401 | { |
| 1402 | #ifndef DW_LLE_view_pair |
| 1403 | return false; |
| 1404 | #else |
| 1405 | return debug_variable_location_views == -1; |
| 1406 | #endif |
| 1407 | } |
| 1408 | |
| 1409 | /* Return a pointer to a newly allocated location description. Location |
| 1410 | descriptions are simple expression terms that can be strung |
| 1411 | together to form more complicated location (address) descriptions. */ |
| 1412 | |
| 1413 | static inline dw_loc_descr_ref |
| 1414 | new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1, |
| 1415 | unsigned HOST_WIDE_INT oprnd2) |
| 1416 | { |
| 1417 | dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> (); |
| 1418 | |
| 1419 | descr->dw_loc_opc = op; |
| 1420 | descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const; |
| 1421 | descr->dw_loc_oprnd1.val_entry = NULL; |
| 1422 | descr->dw_loc_oprnd1.v.val_unsigned = oprnd1; |
| 1423 | descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const; |
| 1424 | descr->dw_loc_oprnd2.val_entry = NULL; |
| 1425 | descr->dw_loc_oprnd2.v.val_unsigned = oprnd2; |
| 1426 | |
| 1427 | return descr; |
| 1428 | } |
| 1429 | |
| 1430 | /* Add a location description term to a location description expression. */ |
| 1431 | |
| 1432 | static inline void |
| 1433 | add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr) |
| 1434 | { |
| 1435 | dw_loc_descr_ref *d; |
| 1436 | |
| 1437 | /* Find the end of the chain. */ |
| 1438 | for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next) |
| 1439 | ; |
| 1440 | |
| 1441 | *d = descr; |
| 1442 | } |
| 1443 | |
| 1444 | /* Compare two location operands for exact equality. */ |
| 1445 | |
| 1446 | static bool |
| 1447 | dw_val_equal_p (dw_val_node *a, dw_val_node *b) |
| 1448 | { |
| 1449 | if (a->val_class != b->val_class) |
| 1450 | return false; |
| 1451 | switch (a->val_class) |
| 1452 | { |
| 1453 | case dw_val_class_none: |
| 1454 | return true; |
| 1455 | case dw_val_class_addr: |
| 1456 | return rtx_equal_p (a->v.val_addr, b->v.val_addr); |
| 1457 | |
| 1458 | case dw_val_class_offset: |
| 1459 | case dw_val_class_unsigned_const: |
| 1460 | case dw_val_class_const: |
| 1461 | case dw_val_class_unsigned_const_implicit: |
| 1462 | case dw_val_class_const_implicit: |
| 1463 | case dw_val_class_range_list: |
| 1464 | /* These are all HOST_WIDE_INT, signed or unsigned. */ |
| 1465 | return a->v.val_unsigned == b->v.val_unsigned; |
| 1466 | |
| 1467 | case dw_val_class_loc: |
| 1468 | return a->v.val_loc == b->v.val_loc; |
| 1469 | case dw_val_class_loc_list: |
| 1470 | return a->v.val_loc_list == b->v.val_loc_list; |
| 1471 | case dw_val_class_view_list: |
| 1472 | return a->v.val_view_list == b->v.val_view_list; |
| 1473 | case dw_val_class_die_ref: |
| 1474 | return a->v.val_die_ref.die == b->v.val_die_ref.die; |
| 1475 | case dw_val_class_fde_ref: |
| 1476 | return a->v.val_fde_index == b->v.val_fde_index; |
| 1477 | case dw_val_class_symview: |
| 1478 | return strcmp (s1: a->v.val_symbolic_view, s2: b->v.val_symbolic_view) == 0; |
| 1479 | case dw_val_class_lbl_id: |
| 1480 | case dw_val_class_lineptr: |
| 1481 | case dw_val_class_macptr: |
| 1482 | case dw_val_class_loclistsptr: |
| 1483 | case dw_val_class_high_pc: |
| 1484 | return strcmp (s1: a->v.val_lbl_id, s2: b->v.val_lbl_id) == 0; |
| 1485 | case dw_val_class_str: |
| 1486 | return a->v.val_str == b->v.val_str; |
| 1487 | case dw_val_class_flag: |
| 1488 | return a->v.val_flag == b->v.val_flag; |
| 1489 | case dw_val_class_file: |
| 1490 | case dw_val_class_file_implicit: |
| 1491 | return a->v.val_file == b->v.val_file; |
| 1492 | case dw_val_class_decl_ref: |
| 1493 | return a->v.val_decl_ref == b->v.val_decl_ref; |
| 1494 | |
| 1495 | case dw_val_class_const_double: |
| 1496 | return (a->v.val_double.high == b->v.val_double.high |
| 1497 | && a->v.val_double.low == b->v.val_double.low); |
| 1498 | |
| 1499 | case dw_val_class_wide_int: |
| 1500 | return *a->v.val_wide == *b->v.val_wide; |
| 1501 | |
| 1502 | case dw_val_class_vec: |
| 1503 | { |
| 1504 | size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length; |
| 1505 | size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length; |
| 1506 | |
| 1507 | return (a_len == b_len |
| 1508 | && !memcmp (s1: a->v.val_vec.array, s2: b->v.val_vec.array, n: a_len)); |
| 1509 | } |
| 1510 | |
| 1511 | case dw_val_class_data8: |
| 1512 | return memcmp (s1: a->v.val_data8, s2: b->v.val_data8, n: 8) == 0; |
| 1513 | |
| 1514 | case dw_val_class_vms_delta: |
| 1515 | return (!strcmp (s1: a->v.val_vms_delta.lbl1, s2: b->v.val_vms_delta.lbl1) |
| 1516 | && !strcmp (s1: a->v.val_vms_delta.lbl2, s2: b->v.val_vms_delta.lbl2)); |
| 1517 | |
| 1518 | case dw_val_class_discr_value: |
| 1519 | return (a->v.val_discr_value.pos == b->v.val_discr_value.pos |
| 1520 | && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval); |
| 1521 | case dw_val_class_discr_list: |
| 1522 | /* It makes no sense comparing two discriminant value lists. */ |
| 1523 | return false; |
| 1524 | } |
| 1525 | gcc_unreachable (); |
| 1526 | } |
| 1527 | |
| 1528 | /* Compare two location atoms for exact equality. */ |
| 1529 | |
| 1530 | static bool |
| 1531 | loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b) |
| 1532 | { |
| 1533 | if (a->dw_loc_opc != b->dw_loc_opc) |
| 1534 | return false; |
| 1535 | |
| 1536 | /* ??? This is only ever set for DW_OP_constNu, for N equal to the |
| 1537 | address size, but since we always allocate cleared storage it |
| 1538 | should be zero for other types of locations. */ |
| 1539 | if (a->dw_loc_dtprel != b->dw_loc_dtprel) |
| 1540 | return false; |
| 1541 | |
| 1542 | return (dw_val_equal_p (a: &a->dw_loc_oprnd1, b: &b->dw_loc_oprnd1) |
| 1543 | && dw_val_equal_p (a: &a->dw_loc_oprnd2, b: &b->dw_loc_oprnd2)); |
| 1544 | } |
| 1545 | |
| 1546 | /* Compare two complete location expressions for exact equality. */ |
| 1547 | |
| 1548 | bool |
| 1549 | loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b) |
| 1550 | { |
| 1551 | while (1) |
| 1552 | { |
| 1553 | if (a == b) |
| 1554 | return true; |
| 1555 | if (a == NULL || b == NULL) |
| 1556 | return false; |
| 1557 | if (!loc_descr_equal_p_1 (a, b)) |
| 1558 | return false; |
| 1559 | |
| 1560 | a = a->dw_loc_next; |
| 1561 | b = b->dw_loc_next; |
| 1562 | } |
| 1563 | } |
| 1564 | |
| 1565 | |
| 1566 | /* Add a constant POLY_OFFSET to a location expression. */ |
| 1567 | |
| 1568 | static void |
| 1569 | loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset) |
| 1570 | { |
| 1571 | dw_loc_descr_ref loc; |
| 1572 | HOST_WIDE_INT *p; |
| 1573 | |
| 1574 | gcc_assert (*list_head != NULL); |
| 1575 | |
| 1576 | if (known_eq (poly_offset, 0)) |
| 1577 | return; |
| 1578 | |
| 1579 | /* Find the end of the chain. */ |
| 1580 | for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next) |
| 1581 | ; |
| 1582 | |
| 1583 | HOST_WIDE_INT offset; |
| 1584 | if (!poly_offset.is_constant (const_value: &offset)) |
| 1585 | { |
| 1586 | loc->dw_loc_next = int_loc_descriptor (poly_offset); |
| 1587 | add_loc_descr (list_head: &loc->dw_loc_next, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
| 1588 | return; |
| 1589 | } |
| 1590 | |
| 1591 | p = NULL; |
| 1592 | if (loc->dw_loc_opc == DW_OP_fbreg |
| 1593 | || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31)) |
| 1594 | p = &loc->dw_loc_oprnd1.v.val_int; |
| 1595 | else if (loc->dw_loc_opc == DW_OP_bregx) |
| 1596 | p = &loc->dw_loc_oprnd2.v.val_int; |
| 1597 | |
| 1598 | /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its |
| 1599 | offset. Don't optimize if an signed integer overflow would happen. */ |
| 1600 | if (p != NULL |
| 1601 | && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset) |
| 1602 | || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset))) |
| 1603 | *p += offset; |
| 1604 | |
| 1605 | else if (offset > 0) |
| 1606 | loc->dw_loc_next = new_loc_descr (op: DW_OP_plus_uconst, oprnd1: offset, oprnd2: 0); |
| 1607 | |
| 1608 | else |
| 1609 | { |
| 1610 | loc->dw_loc_next |
| 1611 | = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset); |
| 1612 | add_loc_descr (list_head: &loc->dw_loc_next, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
| 1613 | } |
| 1614 | } |
| 1615 | |
| 1616 | /* Return a pointer to a newly allocated location description for |
| 1617 | REG and OFFSET. */ |
| 1618 | |
| 1619 | static inline dw_loc_descr_ref |
| 1620 | new_reg_loc_descr (unsigned int reg, poly_int64 offset) |
| 1621 | { |
| 1622 | HOST_WIDE_INT const_offset; |
| 1623 | if (offset.is_constant (const_value: &const_offset)) |
| 1624 | { |
| 1625 | if (reg <= 31) |
| 1626 | return new_loc_descr (op: (enum dwarf_location_atom) (DW_OP_breg0 + reg), |
| 1627 | oprnd1: const_offset, oprnd2: 0); |
| 1628 | else |
| 1629 | return new_loc_descr (op: DW_OP_bregx, oprnd1: reg, oprnd2: const_offset); |
| 1630 | } |
| 1631 | else |
| 1632 | { |
| 1633 | dw_loc_descr_ref ret = new_reg_loc_descr (reg, offset: 0); |
| 1634 | loc_descr_plus_const (list_head: &ret, poly_offset: offset); |
| 1635 | return ret; |
| 1636 | } |
| 1637 | } |
| 1638 | |
| 1639 | /* Add a constant OFFSET to a location list. */ |
| 1640 | |
| 1641 | static void |
| 1642 | loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset) |
| 1643 | { |
| 1644 | dw_loc_list_ref d; |
| 1645 | for (d = list_head; d != NULL; d = d->dw_loc_next) |
| 1646 | loc_descr_plus_const (list_head: &d->expr, poly_offset: offset); |
| 1647 | } |
| 1648 | |
| 1649 | #define DWARF_REF_SIZE \ |
| 1650 | (dwarf_version == 2 ? DWARF2_ADDR_SIZE : dwarf_offset_size) |
| 1651 | |
| 1652 | /* The number of bits that can be encoded by largest DW_FORM_dataN. |
| 1653 | In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5 |
| 1654 | DW_FORM_data16 with 128 bits. */ |
| 1655 | #define DWARF_LARGEST_DATA_FORM_BITS \ |
| 1656 | (dwarf_version >= 5 ? 128 : 64) |
| 1657 | |
| 1658 | /* Utility inline function for construction of ops that were GNU extension |
| 1659 | before DWARF 5. */ |
| 1660 | static inline enum dwarf_location_atom |
| 1661 | dwarf_OP (enum dwarf_location_atom op) |
| 1662 | { |
| 1663 | switch (op) |
| 1664 | { |
| 1665 | case DW_OP_implicit_pointer: |
| 1666 | if (dwarf_version < 5) |
| 1667 | return DW_OP_GNU_implicit_pointer; |
| 1668 | break; |
| 1669 | |
| 1670 | case DW_OP_entry_value: |
| 1671 | if (dwarf_version < 5) |
| 1672 | return DW_OP_GNU_entry_value; |
| 1673 | break; |
| 1674 | |
| 1675 | case DW_OP_const_type: |
| 1676 | if (dwarf_version < 5) |
| 1677 | return DW_OP_GNU_const_type; |
| 1678 | break; |
| 1679 | |
| 1680 | case DW_OP_regval_type: |
| 1681 | if (dwarf_version < 5) |
| 1682 | return DW_OP_GNU_regval_type; |
| 1683 | break; |
| 1684 | |
| 1685 | case DW_OP_deref_type: |
| 1686 | if (dwarf_version < 5) |
| 1687 | return DW_OP_GNU_deref_type; |
| 1688 | break; |
| 1689 | |
| 1690 | case DW_OP_convert: |
| 1691 | if (dwarf_version < 5) |
| 1692 | return DW_OP_GNU_convert; |
| 1693 | break; |
| 1694 | |
| 1695 | case DW_OP_reinterpret: |
| 1696 | if (dwarf_version < 5) |
| 1697 | return DW_OP_GNU_reinterpret; |
| 1698 | break; |
| 1699 | |
| 1700 | case DW_OP_addrx: |
| 1701 | if (dwarf_version < 5) |
| 1702 | return DW_OP_GNU_addr_index; |
| 1703 | break; |
| 1704 | |
| 1705 | case DW_OP_constx: |
| 1706 | if (dwarf_version < 5) |
| 1707 | return DW_OP_GNU_const_index; |
| 1708 | break; |
| 1709 | |
| 1710 | default: |
| 1711 | break; |
| 1712 | } |
| 1713 | return op; |
| 1714 | } |
| 1715 | |
| 1716 | /* Similarly for attributes. */ |
| 1717 | static inline enum dwarf_attribute |
| 1718 | dwarf_AT (enum dwarf_attribute at) |
| 1719 | { |
| 1720 | switch (at) |
| 1721 | { |
| 1722 | case DW_AT_call_return_pc: |
| 1723 | if (dwarf_version < 5) |
| 1724 | return DW_AT_low_pc; |
| 1725 | break; |
| 1726 | |
| 1727 | case DW_AT_call_tail_call: |
| 1728 | if (dwarf_version < 5) |
| 1729 | return DW_AT_GNU_tail_call; |
| 1730 | break; |
| 1731 | |
| 1732 | case DW_AT_call_origin: |
| 1733 | if (dwarf_version < 5) |
| 1734 | return DW_AT_abstract_origin; |
| 1735 | break; |
| 1736 | |
| 1737 | case DW_AT_call_target: |
| 1738 | if (dwarf_version < 5) |
| 1739 | return DW_AT_GNU_call_site_target; |
| 1740 | break; |
| 1741 | |
| 1742 | case DW_AT_call_target_clobbered: |
| 1743 | if (dwarf_version < 5) |
| 1744 | return DW_AT_GNU_call_site_target_clobbered; |
| 1745 | break; |
| 1746 | |
| 1747 | case DW_AT_call_parameter: |
| 1748 | if (dwarf_version < 5) |
| 1749 | return DW_AT_abstract_origin; |
| 1750 | break; |
| 1751 | |
| 1752 | case DW_AT_call_value: |
| 1753 | if (dwarf_version < 5) |
| 1754 | return DW_AT_GNU_call_site_value; |
| 1755 | break; |
| 1756 | |
| 1757 | case DW_AT_call_data_value: |
| 1758 | if (dwarf_version < 5) |
| 1759 | return DW_AT_GNU_call_site_data_value; |
| 1760 | break; |
| 1761 | |
| 1762 | case DW_AT_call_all_calls: |
| 1763 | if (dwarf_version < 5) |
| 1764 | return DW_AT_GNU_all_call_sites; |
| 1765 | break; |
| 1766 | |
| 1767 | case DW_AT_call_all_tail_calls: |
| 1768 | if (dwarf_version < 5) |
| 1769 | return DW_AT_GNU_all_tail_call_sites; |
| 1770 | break; |
| 1771 | |
| 1772 | case DW_AT_dwo_name: |
| 1773 | if (dwarf_version < 5) |
| 1774 | return DW_AT_GNU_dwo_name; |
| 1775 | break; |
| 1776 | |
| 1777 | case DW_AT_addr_base: |
| 1778 | if (dwarf_version < 5) |
| 1779 | return DW_AT_GNU_addr_base; |
| 1780 | break; |
| 1781 | |
| 1782 | default: |
| 1783 | break; |
| 1784 | } |
| 1785 | return at; |
| 1786 | } |
| 1787 | |
| 1788 | /* And similarly for tags. */ |
| 1789 | static inline enum dwarf_tag |
| 1790 | dwarf_TAG (enum dwarf_tag tag) |
| 1791 | { |
| 1792 | switch (tag) |
| 1793 | { |
| 1794 | case DW_TAG_call_site: |
| 1795 | if (dwarf_version < 5) |
| 1796 | return DW_TAG_GNU_call_site; |
| 1797 | break; |
| 1798 | |
| 1799 | case DW_TAG_call_site_parameter: |
| 1800 | if (dwarf_version < 5) |
| 1801 | return DW_TAG_GNU_call_site_parameter; |
| 1802 | break; |
| 1803 | |
| 1804 | default: |
| 1805 | break; |
| 1806 | } |
| 1807 | return tag; |
| 1808 | } |
| 1809 | |
| 1810 | /* And similarly for forms. */ |
| 1811 | static inline enum dwarf_form |
| 1812 | dwarf_FORM (enum dwarf_form form) |
| 1813 | { |
| 1814 | switch (form) |
| 1815 | { |
| 1816 | case DW_FORM_addrx: |
| 1817 | if (dwarf_version < 5) |
| 1818 | return DW_FORM_GNU_addr_index; |
| 1819 | break; |
| 1820 | |
| 1821 | case DW_FORM_strx: |
| 1822 | if (dwarf_version < 5) |
| 1823 | return DW_FORM_GNU_str_index; |
| 1824 | break; |
| 1825 | |
| 1826 | default: |
| 1827 | break; |
| 1828 | } |
| 1829 | return form; |
| 1830 | } |
| 1831 | |
| 1832 | static unsigned long int get_base_type_offset (dw_die_ref); |
| 1833 | |
| 1834 | /* Return the size of a location descriptor. */ |
| 1835 | |
| 1836 | static unsigned long |
| 1837 | size_of_loc_descr (dw_loc_descr_ref loc) |
| 1838 | { |
| 1839 | unsigned long size = 1; |
| 1840 | |
| 1841 | switch (loc->dw_loc_opc) |
| 1842 | { |
| 1843 | case DW_OP_addr: |
| 1844 | size += DWARF2_ADDR_SIZE; |
| 1845 | break; |
| 1846 | case DW_OP_GNU_addr_index: |
| 1847 | case DW_OP_addrx: |
| 1848 | case DW_OP_GNU_const_index: |
| 1849 | case DW_OP_constx: |
| 1850 | gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED); |
| 1851 | size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index); |
| 1852 | break; |
| 1853 | case DW_OP_const1u: |
| 1854 | case DW_OP_const1s: |
| 1855 | size += 1; |
| 1856 | break; |
| 1857 | case DW_OP_const2u: |
| 1858 | case DW_OP_const2s: |
| 1859 | size += 2; |
| 1860 | break; |
| 1861 | case DW_OP_const4u: |
| 1862 | case DW_OP_const4s: |
| 1863 | size += 4; |
| 1864 | break; |
| 1865 | case DW_OP_const8u: |
| 1866 | case DW_OP_const8s: |
| 1867 | size += 8; |
| 1868 | break; |
| 1869 | case DW_OP_constu: |
| 1870 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
| 1871 | break; |
| 1872 | case DW_OP_consts: |
| 1873 | size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int); |
| 1874 | break; |
| 1875 | case DW_OP_pick: |
| 1876 | size += 1; |
| 1877 | break; |
| 1878 | case DW_OP_plus_uconst: |
| 1879 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
| 1880 | break; |
| 1881 | case DW_OP_skip: |
| 1882 | case DW_OP_bra: |
| 1883 | size += 2; |
| 1884 | break; |
| 1885 | case DW_OP_breg0: |
| 1886 | case DW_OP_breg1: |
| 1887 | case DW_OP_breg2: |
| 1888 | case DW_OP_breg3: |
| 1889 | case DW_OP_breg4: |
| 1890 | case DW_OP_breg5: |
| 1891 | case DW_OP_breg6: |
| 1892 | case DW_OP_breg7: |
| 1893 | case DW_OP_breg8: |
| 1894 | case DW_OP_breg9: |
| 1895 | case DW_OP_breg10: |
| 1896 | case DW_OP_breg11: |
| 1897 | case DW_OP_breg12: |
| 1898 | case DW_OP_breg13: |
| 1899 | case DW_OP_breg14: |
| 1900 | case DW_OP_breg15: |
| 1901 | case DW_OP_breg16: |
| 1902 | case DW_OP_breg17: |
| 1903 | case DW_OP_breg18: |
| 1904 | case DW_OP_breg19: |
| 1905 | case DW_OP_breg20: |
| 1906 | case DW_OP_breg21: |
| 1907 | case DW_OP_breg22: |
| 1908 | case DW_OP_breg23: |
| 1909 | case DW_OP_breg24: |
| 1910 | case DW_OP_breg25: |
| 1911 | case DW_OP_breg26: |
| 1912 | case DW_OP_breg27: |
| 1913 | case DW_OP_breg28: |
| 1914 | case DW_OP_breg29: |
| 1915 | case DW_OP_breg30: |
| 1916 | case DW_OP_breg31: |
| 1917 | size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int); |
| 1918 | break; |
| 1919 | case DW_OP_regx: |
| 1920 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
| 1921 | break; |
| 1922 | case DW_OP_fbreg: |
| 1923 | size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int); |
| 1924 | break; |
| 1925 | case DW_OP_bregx: |
| 1926 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
| 1927 | size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int); |
| 1928 | break; |
| 1929 | case DW_OP_piece: |
| 1930 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
| 1931 | break; |
| 1932 | case DW_OP_bit_piece: |
| 1933 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
| 1934 | size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned); |
| 1935 | break; |
| 1936 | case DW_OP_deref_size: |
| 1937 | case DW_OP_xderef_size: |
| 1938 | size += 1; |
| 1939 | break; |
| 1940 | case DW_OP_call2: |
| 1941 | size += 2; |
| 1942 | break; |
| 1943 | case DW_OP_call4: |
| 1944 | size += 4; |
| 1945 | break; |
| 1946 | case DW_OP_call_ref: |
| 1947 | case DW_OP_GNU_variable_value: |
| 1948 | size += DWARF_REF_SIZE; |
| 1949 | break; |
| 1950 | case DW_OP_implicit_value: |
| 1951 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned) |
| 1952 | + loc->dw_loc_oprnd1.v.val_unsigned; |
| 1953 | break; |
| 1954 | case DW_OP_implicit_pointer: |
| 1955 | case DW_OP_GNU_implicit_pointer: |
| 1956 | size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int); |
| 1957 | break; |
| 1958 | case DW_OP_entry_value: |
| 1959 | case DW_OP_GNU_entry_value: |
| 1960 | { |
| 1961 | unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc); |
| 1962 | size += size_of_uleb128 (op_size) + op_size; |
| 1963 | break; |
| 1964 | } |
| 1965 | case DW_OP_const_type: |
| 1966 | case DW_OP_GNU_const_type: |
| 1967 | { |
| 1968 | unsigned long o |
| 1969 | = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die); |
| 1970 | size += size_of_uleb128 (o) + 1; |
| 1971 | switch (loc->dw_loc_oprnd2.val_class) |
| 1972 | { |
| 1973 | case dw_val_class_vec: |
| 1974 | size += loc->dw_loc_oprnd2.v.val_vec.length |
| 1975 | * loc->dw_loc_oprnd2.v.val_vec.elt_size; |
| 1976 | break; |
| 1977 | case dw_val_class_const: |
| 1978 | size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT; |
| 1979 | break; |
| 1980 | case dw_val_class_const_double: |
| 1981 | size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT; |
| 1982 | break; |
| 1983 | case dw_val_class_wide_int: |
| 1984 | size += (get_full_len (op: *loc->dw_loc_oprnd2.v.val_wide) |
| 1985 | * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT); |
| 1986 | break; |
| 1987 | default: |
| 1988 | gcc_unreachable (); |
| 1989 | } |
| 1990 | break; |
| 1991 | } |
| 1992 | case DW_OP_regval_type: |
| 1993 | case DW_OP_GNU_regval_type: |
| 1994 | { |
| 1995 | unsigned long o |
| 1996 | = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die); |
| 1997 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned) |
| 1998 | + size_of_uleb128 (o); |
| 1999 | } |
| 2000 | break; |
| 2001 | case DW_OP_deref_type: |
| 2002 | case DW_OP_GNU_deref_type: |
| 2003 | { |
| 2004 | unsigned long o |
| 2005 | = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die); |
| 2006 | size += 1 + size_of_uleb128 (o); |
| 2007 | } |
| 2008 | break; |
| 2009 | case DW_OP_convert: |
| 2010 | case DW_OP_reinterpret: |
| 2011 | case DW_OP_GNU_convert: |
| 2012 | case DW_OP_GNU_reinterpret: |
| 2013 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const) |
| 2014 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
| 2015 | else |
| 2016 | { |
| 2017 | unsigned long o |
| 2018 | = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die); |
| 2019 | size += size_of_uleb128 (o); |
| 2020 | } |
| 2021 | break; |
| 2022 | case DW_OP_GNU_parameter_ref: |
| 2023 | size += 4; |
| 2024 | break; |
| 2025 | default: |
| 2026 | break; |
| 2027 | } |
| 2028 | |
| 2029 | return size; |
| 2030 | } |
| 2031 | |
| 2032 | /* Return the size of a series of location descriptors. */ |
| 2033 | |
| 2034 | unsigned long |
| 2035 | size_of_locs (dw_loc_descr_ref loc) |
| 2036 | { |
| 2037 | dw_loc_descr_ref l; |
| 2038 | unsigned long size; |
| 2039 | |
| 2040 | /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr |
| 2041 | field, to avoid writing to a PCH file. */ |
| 2042 | for (size = 0, l = loc; l != NULL; l = l->dw_loc_next) |
| 2043 | { |
| 2044 | if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra) |
| 2045 | break; |
| 2046 | size += size_of_loc_descr (loc: l); |
| 2047 | } |
| 2048 | if (! l) |
| 2049 | return size; |
| 2050 | |
| 2051 | for (size = 0, l = loc; l != NULL; l = l->dw_loc_next) |
| 2052 | { |
| 2053 | l->dw_loc_addr = size; |
| 2054 | size += size_of_loc_descr (loc: l); |
| 2055 | } |
| 2056 | |
| 2057 | return size; |
| 2058 | } |
| 2059 | |
| 2060 | /* Return the size of the value in a DW_AT_discr_value attribute. */ |
| 2061 | |
| 2062 | static int |
| 2063 | size_of_discr_value (dw_discr_value *discr_value) |
| 2064 | { |
| 2065 | if (discr_value->pos) |
| 2066 | return size_of_uleb128 (discr_value->v.uval); |
| 2067 | else |
| 2068 | return size_of_sleb128 (discr_value->v.sval); |
| 2069 | } |
| 2070 | |
| 2071 | /* Return the size of the value in a DW_AT_discr_list attribute. */ |
| 2072 | |
| 2073 | static int |
| 2074 | size_of_discr_list (dw_discr_list_ref discr_list) |
| 2075 | { |
| 2076 | int size = 0; |
| 2077 | |
| 2078 | for (dw_discr_list_ref list = discr_list; |
| 2079 | list != NULL; |
| 2080 | list = list->dw_discr_next) |
| 2081 | { |
| 2082 | /* One byte for the discriminant value descriptor, and then one or two |
| 2083 | LEB128 numbers, depending on whether it's a single case label or a |
| 2084 | range label. */ |
| 2085 | size += 1; |
| 2086 | size += size_of_discr_value (discr_value: &list->dw_discr_lower_bound); |
| 2087 | if (list->dw_discr_range != 0) |
| 2088 | size += size_of_discr_value (discr_value: &list->dw_discr_upper_bound); |
| 2089 | } |
| 2090 | return size; |
| 2091 | } |
| 2092 | |
| 2093 | static HOST_WIDE_INT extract_int (const unsigned char *, unsigned); |
| 2094 | static void get_ref_die_offset_label (char *, dw_die_ref); |
| 2095 | static unsigned long int get_ref_die_offset (dw_die_ref); |
| 2096 | |
| 2097 | /* Output location description stack opcode's operands (if any). |
| 2098 | The for_eh_or_skip parameter controls whether register numbers are |
| 2099 | converted using DWARF2_FRAME_REG_OUT, which is needed in the case that |
| 2100 | hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind |
| 2101 | info). This should be suppressed for the cases that have not been converted |
| 2102 | (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */ |
| 2103 | |
| 2104 | static void |
| 2105 | output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip) |
| 2106 | { |
| 2107 | dw_val_ref val1 = &loc->dw_loc_oprnd1; |
| 2108 | dw_val_ref val2 = &loc->dw_loc_oprnd2; |
| 2109 | |
| 2110 | switch (loc->dw_loc_opc) |
| 2111 | { |
| 2112 | #ifdef DWARF2_DEBUGGING_INFO |
| 2113 | case DW_OP_const2u: |
| 2114 | case DW_OP_const2s: |
| 2115 | dw2_asm_output_data (2, val1->v.val_int, NULL); |
| 2116 | break; |
| 2117 | case DW_OP_const4u: |
| 2118 | if (loc->dw_loc_dtprel) |
| 2119 | { |
| 2120 | gcc_assert (targetm.asm_out.output_dwarf_dtprel); |
| 2121 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4, |
| 2122 | val1->v.val_addr); |
| 2123 | fputc (c: '\n', stream: asm_out_file); |
| 2124 | break; |
| 2125 | } |
| 2126 | /* FALLTHRU */ |
| 2127 | case DW_OP_const4s: |
| 2128 | dw2_asm_output_data (4, val1->v.val_int, NULL); |
| 2129 | break; |
| 2130 | case DW_OP_const8u: |
| 2131 | if (loc->dw_loc_dtprel) |
| 2132 | { |
| 2133 | gcc_assert (targetm.asm_out.output_dwarf_dtprel); |
| 2134 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8, |
| 2135 | val1->v.val_addr); |
| 2136 | fputc (c: '\n', stream: asm_out_file); |
| 2137 | break; |
| 2138 | } |
| 2139 | /* FALLTHRU */ |
| 2140 | case DW_OP_const8s: |
| 2141 | gcc_assert (HOST_BITS_PER_WIDE_INT >= 64); |
| 2142 | dw2_asm_output_data (8, val1->v.val_int, NULL); |
| 2143 | break; |
| 2144 | case DW_OP_skip: |
| 2145 | case DW_OP_bra: |
| 2146 | { |
| 2147 | int offset; |
| 2148 | |
| 2149 | gcc_assert (val1->val_class == dw_val_class_loc); |
| 2150 | offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3); |
| 2151 | |
| 2152 | dw2_asm_output_data (2, offset, NULL); |
| 2153 | } |
| 2154 | break; |
| 2155 | case DW_OP_implicit_value: |
| 2156 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
| 2157 | switch (val2->val_class) |
| 2158 | { |
| 2159 | case dw_val_class_const: |
| 2160 | dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL); |
| 2161 | break; |
| 2162 | case dw_val_class_vec: |
| 2163 | { |
| 2164 | unsigned int elt_size = val2->v.val_vec.elt_size; |
| 2165 | unsigned int len = val2->v.val_vec.length; |
| 2166 | unsigned int i; |
| 2167 | unsigned char *p; |
| 2168 | |
| 2169 | if (elt_size > sizeof (HOST_WIDE_INT)) |
| 2170 | { |
| 2171 | elt_size /= 2; |
| 2172 | len *= 2; |
| 2173 | } |
| 2174 | for (i = 0, p = (unsigned char *) val2->v.val_vec.array; |
| 2175 | i < len; |
| 2176 | i++, p += elt_size) |
| 2177 | dw2_asm_output_data (elt_size, extract_int (p, elt_size), |
| 2178 | "fp or vector constant word %u" , i); |
| 2179 | } |
| 2180 | break; |
| 2181 | case dw_val_class_const_double: |
| 2182 | { |
| 2183 | unsigned HOST_WIDE_INT first, second; |
| 2184 | |
| 2185 | if (WORDS_BIG_ENDIAN) |
| 2186 | { |
| 2187 | first = val2->v.val_double.high; |
| 2188 | second = val2->v.val_double.low; |
| 2189 | } |
| 2190 | else |
| 2191 | { |
| 2192 | first = val2->v.val_double.low; |
| 2193 | second = val2->v.val_double.high; |
| 2194 | } |
| 2195 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
| 2196 | first, NULL); |
| 2197 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
| 2198 | second, NULL); |
| 2199 | } |
| 2200 | break; |
| 2201 | case dw_val_class_wide_int: |
| 2202 | { |
| 2203 | int i; |
| 2204 | int len = get_full_len (op: *val2->v.val_wide); |
| 2205 | if (WORDS_BIG_ENDIAN) |
| 2206 | for (i = len - 1; i >= 0; --i) |
| 2207 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
| 2208 | val2->v.val_wide->elt (i), NULL); |
| 2209 | else |
| 2210 | for (i = 0; i < len; ++i) |
| 2211 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
| 2212 | val2->v.val_wide->elt (i), NULL); |
| 2213 | } |
| 2214 | break; |
| 2215 | case dw_val_class_addr: |
| 2216 | gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE); |
| 2217 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL); |
| 2218 | break; |
| 2219 | default: |
| 2220 | gcc_unreachable (); |
| 2221 | } |
| 2222 | break; |
| 2223 | #else |
| 2224 | case DW_OP_const2u: |
| 2225 | case DW_OP_const2s: |
| 2226 | case DW_OP_const4u: |
| 2227 | case DW_OP_const4s: |
| 2228 | case DW_OP_const8u: |
| 2229 | case DW_OP_const8s: |
| 2230 | case DW_OP_skip: |
| 2231 | case DW_OP_bra: |
| 2232 | case DW_OP_implicit_value: |
| 2233 | /* We currently don't make any attempt to make sure these are |
| 2234 | aligned properly like we do for the main unwind info, so |
| 2235 | don't support emitting things larger than a byte if we're |
| 2236 | only doing unwinding. */ |
| 2237 | gcc_unreachable (); |
| 2238 | #endif |
| 2239 | case DW_OP_const1u: |
| 2240 | case DW_OP_const1s: |
| 2241 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
| 2242 | break; |
| 2243 | case DW_OP_constu: |
| 2244 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
| 2245 | break; |
| 2246 | case DW_OP_consts: |
| 2247 | dw2_asm_output_data_sleb128 (val1->v.val_int, NULL); |
| 2248 | break; |
| 2249 | case DW_OP_pick: |
| 2250 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
| 2251 | break; |
| 2252 | case DW_OP_plus_uconst: |
| 2253 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
| 2254 | break; |
| 2255 | case DW_OP_breg0: |
| 2256 | case DW_OP_breg1: |
| 2257 | case DW_OP_breg2: |
| 2258 | case DW_OP_breg3: |
| 2259 | case DW_OP_breg4: |
| 2260 | case DW_OP_breg5: |
| 2261 | case DW_OP_breg6: |
| 2262 | case DW_OP_breg7: |
| 2263 | case DW_OP_breg8: |
| 2264 | case DW_OP_breg9: |
| 2265 | case DW_OP_breg10: |
| 2266 | case DW_OP_breg11: |
| 2267 | case DW_OP_breg12: |
| 2268 | case DW_OP_breg13: |
| 2269 | case DW_OP_breg14: |
| 2270 | case DW_OP_breg15: |
| 2271 | case DW_OP_breg16: |
| 2272 | case DW_OP_breg17: |
| 2273 | case DW_OP_breg18: |
| 2274 | case DW_OP_breg19: |
| 2275 | case DW_OP_breg20: |
| 2276 | case DW_OP_breg21: |
| 2277 | case DW_OP_breg22: |
| 2278 | case DW_OP_breg23: |
| 2279 | case DW_OP_breg24: |
| 2280 | case DW_OP_breg25: |
| 2281 | case DW_OP_breg26: |
| 2282 | case DW_OP_breg27: |
| 2283 | case DW_OP_breg28: |
| 2284 | case DW_OP_breg29: |
| 2285 | case DW_OP_breg30: |
| 2286 | case DW_OP_breg31: |
| 2287 | dw2_asm_output_data_sleb128 (val1->v.val_int, NULL); |
| 2288 | break; |
| 2289 | case DW_OP_regx: |
| 2290 | { |
| 2291 | unsigned r = val1->v.val_unsigned; |
| 2292 | if (for_eh_or_skip >= 0) |
| 2293 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
| 2294 | gcc_assert (size_of_uleb128 (r) |
| 2295 | == size_of_uleb128 (val1->v.val_unsigned)); |
| 2296 | dw2_asm_output_data_uleb128 (r, NULL); |
| 2297 | } |
| 2298 | break; |
| 2299 | case DW_OP_fbreg: |
| 2300 | dw2_asm_output_data_sleb128 (val1->v.val_int, NULL); |
| 2301 | break; |
| 2302 | case DW_OP_bregx: |
| 2303 | { |
| 2304 | unsigned r = val1->v.val_unsigned; |
| 2305 | if (for_eh_or_skip >= 0) |
| 2306 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
| 2307 | gcc_assert (size_of_uleb128 (r) |
| 2308 | == size_of_uleb128 (val1->v.val_unsigned)); |
| 2309 | dw2_asm_output_data_uleb128 (r, NULL); |
| 2310 | dw2_asm_output_data_sleb128 (val2->v.val_int, NULL); |
| 2311 | } |
| 2312 | break; |
| 2313 | case DW_OP_piece: |
| 2314 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
| 2315 | break; |
| 2316 | case DW_OP_bit_piece: |
| 2317 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
| 2318 | dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL); |
| 2319 | break; |
| 2320 | case DW_OP_deref_size: |
| 2321 | case DW_OP_xderef_size: |
| 2322 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
| 2323 | break; |
| 2324 | |
| 2325 | case DW_OP_addr: |
| 2326 | if (loc->dw_loc_dtprel) |
| 2327 | { |
| 2328 | if (targetm.asm_out.output_dwarf_dtprel) |
| 2329 | { |
| 2330 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, |
| 2331 | DWARF2_ADDR_SIZE, |
| 2332 | val1->v.val_addr); |
| 2333 | fputc (c: '\n', stream: asm_out_file); |
| 2334 | } |
| 2335 | else |
| 2336 | gcc_unreachable (); |
| 2337 | } |
| 2338 | else |
| 2339 | { |
| 2340 | #ifdef DWARF2_DEBUGGING_INFO |
| 2341 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL); |
| 2342 | #else |
| 2343 | gcc_unreachable (); |
| 2344 | #endif |
| 2345 | } |
| 2346 | break; |
| 2347 | |
| 2348 | case DW_OP_GNU_addr_index: |
| 2349 | case DW_OP_addrx: |
| 2350 | case DW_OP_GNU_const_index: |
| 2351 | case DW_OP_constx: |
| 2352 | gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED); |
| 2353 | dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index, |
| 2354 | "(index into .debug_addr)" ); |
| 2355 | break; |
| 2356 | |
| 2357 | case DW_OP_call2: |
| 2358 | case DW_OP_call4: |
| 2359 | { |
| 2360 | unsigned long die_offset |
| 2361 | = get_ref_die_offset (val1->v.val_die_ref.die); |
| 2362 | /* Make sure the offset has been computed and that we can encode it as |
| 2363 | an operand. */ |
| 2364 | gcc_assert (die_offset > 0 |
| 2365 | && die_offset <= (loc->dw_loc_opc == DW_OP_call2 |
| 2366 | ? 0xffff |
| 2367 | : 0xffffffff)); |
| 2368 | dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4, |
| 2369 | die_offset, NULL); |
| 2370 | } |
| 2371 | break; |
| 2372 | |
| 2373 | case DW_OP_call_ref: |
| 2374 | case DW_OP_GNU_variable_value: |
| 2375 | { |
| 2376 | char label[MAX_ARTIFICIAL_LABEL_BYTES |
| 2377 | + HOST_BITS_PER_WIDE_INT / 2 + 2]; |
| 2378 | gcc_assert (val1->val_class == dw_val_class_die_ref); |
| 2379 | get_ref_die_offset_label (label, val1->v.val_die_ref.die); |
| 2380 | dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL); |
| 2381 | } |
| 2382 | break; |
| 2383 | |
| 2384 | case DW_OP_implicit_pointer: |
| 2385 | case DW_OP_GNU_implicit_pointer: |
| 2386 | { |
| 2387 | char label[MAX_ARTIFICIAL_LABEL_BYTES |
| 2388 | + HOST_BITS_PER_WIDE_INT / 2 + 2]; |
| 2389 | gcc_assert (val1->val_class == dw_val_class_die_ref); |
| 2390 | get_ref_die_offset_label (label, val1->v.val_die_ref.die); |
| 2391 | dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL); |
| 2392 | dw2_asm_output_data_sleb128 (val2->v.val_int, NULL); |
| 2393 | } |
| 2394 | break; |
| 2395 | |
| 2396 | case DW_OP_entry_value: |
| 2397 | case DW_OP_GNU_entry_value: |
| 2398 | dw2_asm_output_data_uleb128 (size_of_locs (loc: val1->v.val_loc), NULL); |
| 2399 | output_loc_sequence (val1->v.val_loc, for_eh_or_skip); |
| 2400 | break; |
| 2401 | |
| 2402 | case DW_OP_const_type: |
| 2403 | case DW_OP_GNU_const_type: |
| 2404 | { |
| 2405 | unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l; |
| 2406 | gcc_assert (o); |
| 2407 | dw2_asm_output_data_uleb128 (o, NULL); |
| 2408 | switch (val2->val_class) |
| 2409 | { |
| 2410 | case dw_val_class_const: |
| 2411 | l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
| 2412 | dw2_asm_output_data (1, l, NULL); |
| 2413 | dw2_asm_output_data (l, val2->v.val_int, NULL); |
| 2414 | break; |
| 2415 | case dw_val_class_vec: |
| 2416 | { |
| 2417 | unsigned int elt_size = val2->v.val_vec.elt_size; |
| 2418 | unsigned int len = val2->v.val_vec.length; |
| 2419 | unsigned int i; |
| 2420 | unsigned char *p; |
| 2421 | |
| 2422 | l = len * elt_size; |
| 2423 | dw2_asm_output_data (1, l, NULL); |
| 2424 | if (elt_size > sizeof (HOST_WIDE_INT)) |
| 2425 | { |
| 2426 | elt_size /= 2; |
| 2427 | len *= 2; |
| 2428 | } |
| 2429 | for (i = 0, p = (unsigned char *) val2->v.val_vec.array; |
| 2430 | i < len; |
| 2431 | i++, p += elt_size) |
| 2432 | dw2_asm_output_data (elt_size, extract_int (p, elt_size), |
| 2433 | "fp or vector constant word %u" , i); |
| 2434 | } |
| 2435 | break; |
| 2436 | case dw_val_class_const_double: |
| 2437 | { |
| 2438 | unsigned HOST_WIDE_INT first, second; |
| 2439 | l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
| 2440 | |
| 2441 | dw2_asm_output_data (1, 2 * l, NULL); |
| 2442 | if (WORDS_BIG_ENDIAN) |
| 2443 | { |
| 2444 | first = val2->v.val_double.high; |
| 2445 | second = val2->v.val_double.low; |
| 2446 | } |
| 2447 | else |
| 2448 | { |
| 2449 | first = val2->v.val_double.low; |
| 2450 | second = val2->v.val_double.high; |
| 2451 | } |
| 2452 | dw2_asm_output_data (l, first, NULL); |
| 2453 | dw2_asm_output_data (l, second, NULL); |
| 2454 | } |
| 2455 | break; |
| 2456 | case dw_val_class_wide_int: |
| 2457 | { |
| 2458 | int i; |
| 2459 | int len = get_full_len (op: *val2->v.val_wide); |
| 2460 | l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
| 2461 | |
| 2462 | dw2_asm_output_data (1, len * l, NULL); |
| 2463 | if (WORDS_BIG_ENDIAN) |
| 2464 | for (i = len - 1; i >= 0; --i) |
| 2465 | dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL); |
| 2466 | else |
| 2467 | for (i = 0; i < len; ++i) |
| 2468 | dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL); |
| 2469 | } |
| 2470 | break; |
| 2471 | default: |
| 2472 | gcc_unreachable (); |
| 2473 | } |
| 2474 | } |
| 2475 | break; |
| 2476 | case DW_OP_regval_type: |
| 2477 | case DW_OP_GNU_regval_type: |
| 2478 | { |
| 2479 | unsigned r = val1->v.val_unsigned; |
| 2480 | unsigned long o = get_base_type_offset (val2->v.val_die_ref.die); |
| 2481 | gcc_assert (o); |
| 2482 | if (for_eh_or_skip >= 0) |
| 2483 | { |
| 2484 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
| 2485 | gcc_assert (size_of_uleb128 (r) |
| 2486 | == size_of_uleb128 (val1->v.val_unsigned)); |
| 2487 | } |
| 2488 | dw2_asm_output_data_uleb128 (r, NULL); |
| 2489 | dw2_asm_output_data_uleb128 (o, NULL); |
| 2490 | } |
| 2491 | break; |
| 2492 | case DW_OP_deref_type: |
| 2493 | case DW_OP_GNU_deref_type: |
| 2494 | { |
| 2495 | unsigned long o = get_base_type_offset (val2->v.val_die_ref.die); |
| 2496 | gcc_assert (o); |
| 2497 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
| 2498 | dw2_asm_output_data_uleb128 (o, NULL); |
| 2499 | } |
| 2500 | break; |
| 2501 | case DW_OP_convert: |
| 2502 | case DW_OP_reinterpret: |
| 2503 | case DW_OP_GNU_convert: |
| 2504 | case DW_OP_GNU_reinterpret: |
| 2505 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const) |
| 2506 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
| 2507 | else |
| 2508 | { |
| 2509 | unsigned long o = get_base_type_offset (val1->v.val_die_ref.die); |
| 2510 | gcc_assert (o); |
| 2511 | dw2_asm_output_data_uleb128 (o, NULL); |
| 2512 | } |
| 2513 | break; |
| 2514 | |
| 2515 | case DW_OP_GNU_parameter_ref: |
| 2516 | { |
| 2517 | unsigned long o; |
| 2518 | gcc_assert (val1->val_class == dw_val_class_die_ref); |
| 2519 | o = get_ref_die_offset (val1->v.val_die_ref.die); |
| 2520 | dw2_asm_output_data (4, o, NULL); |
| 2521 | } |
| 2522 | break; |
| 2523 | |
| 2524 | default: |
| 2525 | /* Other codes have no operands. */ |
| 2526 | break; |
| 2527 | } |
| 2528 | } |
| 2529 | |
| 2530 | /* Output a sequence of location operations. |
| 2531 | The for_eh_or_skip parameter controls whether register numbers are |
| 2532 | converted using DWARF2_FRAME_REG_OUT, which is needed in the case that |
| 2533 | hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind |
| 2534 | info). This should be suppressed for the cases that have not been converted |
| 2535 | (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */ |
| 2536 | |
| 2537 | void |
| 2538 | output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip) |
| 2539 | { |
| 2540 | for (; loc != NULL; loc = loc->dw_loc_next) |
| 2541 | { |
| 2542 | enum dwarf_location_atom opc = loc->dw_loc_opc; |
| 2543 | /* Output the opcode. */ |
| 2544 | if (for_eh_or_skip >= 0 |
| 2545 | && opc >= DW_OP_breg0 && opc <= DW_OP_breg31) |
| 2546 | { |
| 2547 | unsigned r = (opc - DW_OP_breg0); |
| 2548 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
| 2549 | gcc_assert (r <= 31); |
| 2550 | opc = (enum dwarf_location_atom) (DW_OP_breg0 + r); |
| 2551 | } |
| 2552 | else if (for_eh_or_skip >= 0 |
| 2553 | && opc >= DW_OP_reg0 && opc <= DW_OP_reg31) |
| 2554 | { |
| 2555 | unsigned r = (opc - DW_OP_reg0); |
| 2556 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
| 2557 | gcc_assert (r <= 31); |
| 2558 | opc = (enum dwarf_location_atom) (DW_OP_reg0 + r); |
| 2559 | } |
| 2560 | |
| 2561 | dw2_asm_output_data (1, opc, |
| 2562 | "%s" , dwarf_stack_op_name (op: opc)); |
| 2563 | |
| 2564 | /* Output the operand(s) (if any). */ |
| 2565 | output_loc_operands (loc, for_eh_or_skip); |
| 2566 | } |
| 2567 | } |
| 2568 | |
| 2569 | /* Output location description stack opcode's operands (if any). |
| 2570 | The output is single bytes on a line, suitable for .cfi_escape. */ |
| 2571 | |
| 2572 | static void |
| 2573 | output_loc_operands_raw (dw_loc_descr_ref loc) |
| 2574 | { |
| 2575 | dw_val_ref val1 = &loc->dw_loc_oprnd1; |
| 2576 | dw_val_ref val2 = &loc->dw_loc_oprnd2; |
| 2577 | |
| 2578 | switch (loc->dw_loc_opc) |
| 2579 | { |
| 2580 | case DW_OP_addr: |
| 2581 | case DW_OP_GNU_addr_index: |
| 2582 | case DW_OP_addrx: |
| 2583 | case DW_OP_GNU_const_index: |
| 2584 | case DW_OP_constx: |
| 2585 | case DW_OP_implicit_value: |
| 2586 | /* We cannot output addresses in .cfi_escape, only bytes. */ |
| 2587 | gcc_unreachable (); |
| 2588 | |
| 2589 | case DW_OP_const1u: |
| 2590 | case DW_OP_const1s: |
| 2591 | case DW_OP_pick: |
| 2592 | case DW_OP_deref_size: |
| 2593 | case DW_OP_xderef_size: |
| 2594 | fputc (c: ',', stream: asm_out_file); |
| 2595 | dw2_asm_output_data_raw (1, val1->v.val_int); |
| 2596 | break; |
| 2597 | |
| 2598 | case DW_OP_const2u: |
| 2599 | case DW_OP_const2s: |
| 2600 | fputc (c: ',', stream: asm_out_file); |
| 2601 | dw2_asm_output_data_raw (2, val1->v.val_int); |
| 2602 | break; |
| 2603 | |
| 2604 | case DW_OP_const4u: |
| 2605 | case DW_OP_const4s: |
| 2606 | fputc (c: ',', stream: asm_out_file); |
| 2607 | dw2_asm_output_data_raw (4, val1->v.val_int); |
| 2608 | break; |
| 2609 | |
| 2610 | case DW_OP_const8u: |
| 2611 | case DW_OP_const8s: |
| 2612 | gcc_assert (HOST_BITS_PER_WIDE_INT >= 64); |
| 2613 | fputc (c: ',', stream: asm_out_file); |
| 2614 | dw2_asm_output_data_raw (8, val1->v.val_int); |
| 2615 | break; |
| 2616 | |
| 2617 | case DW_OP_skip: |
| 2618 | case DW_OP_bra: |
| 2619 | { |
| 2620 | int offset; |
| 2621 | |
| 2622 | gcc_assert (val1->val_class == dw_val_class_loc); |
| 2623 | offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3); |
| 2624 | |
| 2625 | fputc (c: ',', stream: asm_out_file); |
| 2626 | dw2_asm_output_data_raw (2, offset); |
| 2627 | } |
| 2628 | break; |
| 2629 | |
| 2630 | case DW_OP_regx: |
| 2631 | { |
| 2632 | unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1); |
| 2633 | gcc_assert (size_of_uleb128 (r) |
| 2634 | == size_of_uleb128 (val1->v.val_unsigned)); |
| 2635 | fputc (c: ',', stream: asm_out_file); |
| 2636 | dw2_asm_output_data_uleb128_raw (r); |
| 2637 | } |
| 2638 | break; |
| 2639 | |
| 2640 | case DW_OP_constu: |
| 2641 | case DW_OP_plus_uconst: |
| 2642 | case DW_OP_piece: |
| 2643 | fputc (c: ',', stream: asm_out_file); |
| 2644 | dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned); |
| 2645 | break; |
| 2646 | |
| 2647 | case DW_OP_bit_piece: |
| 2648 | fputc (c: ',', stream: asm_out_file); |
| 2649 | dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned); |
| 2650 | dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned); |
| 2651 | break; |
| 2652 | |
| 2653 | case DW_OP_consts: |
| 2654 | case DW_OP_breg0: |
| 2655 | case DW_OP_breg1: |
| 2656 | case DW_OP_breg2: |
| 2657 | case DW_OP_breg3: |
| 2658 | case DW_OP_breg4: |
| 2659 | case DW_OP_breg5: |
| 2660 | case DW_OP_breg6: |
| 2661 | case DW_OP_breg7: |
| 2662 | case DW_OP_breg8: |
| 2663 | case DW_OP_breg9: |
| 2664 | case DW_OP_breg10: |
| 2665 | case DW_OP_breg11: |
| 2666 | case DW_OP_breg12: |
| 2667 | case DW_OP_breg13: |
| 2668 | case DW_OP_breg14: |
| 2669 | case DW_OP_breg15: |
| 2670 | case DW_OP_breg16: |
| 2671 | case DW_OP_breg17: |
| 2672 | case DW_OP_breg18: |
| 2673 | case DW_OP_breg19: |
| 2674 | case DW_OP_breg20: |
| 2675 | case DW_OP_breg21: |
| 2676 | case DW_OP_breg22: |
| 2677 | case DW_OP_breg23: |
| 2678 | case DW_OP_breg24: |
| 2679 | case DW_OP_breg25: |
| 2680 | case DW_OP_breg26: |
| 2681 | case DW_OP_breg27: |
| 2682 | case DW_OP_breg28: |
| 2683 | case DW_OP_breg29: |
| 2684 | case DW_OP_breg30: |
| 2685 | case DW_OP_breg31: |
| 2686 | case DW_OP_fbreg: |
| 2687 | fputc (c: ',', stream: asm_out_file); |
| 2688 | dw2_asm_output_data_sleb128_raw (val1->v.val_int); |
| 2689 | break; |
| 2690 | |
| 2691 | case DW_OP_bregx: |
| 2692 | { |
| 2693 | unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1); |
| 2694 | gcc_assert (size_of_uleb128 (r) |
| 2695 | == size_of_uleb128 (val1->v.val_unsigned)); |
| 2696 | fputc (c: ',', stream: asm_out_file); |
| 2697 | dw2_asm_output_data_uleb128_raw (r); |
| 2698 | fputc (c: ',', stream: asm_out_file); |
| 2699 | dw2_asm_output_data_sleb128_raw (val2->v.val_int); |
| 2700 | } |
| 2701 | break; |
| 2702 | |
| 2703 | case DW_OP_implicit_pointer: |
| 2704 | case DW_OP_entry_value: |
| 2705 | case DW_OP_const_type: |
| 2706 | case DW_OP_regval_type: |
| 2707 | case DW_OP_deref_type: |
| 2708 | case DW_OP_convert: |
| 2709 | case DW_OP_reinterpret: |
| 2710 | case DW_OP_GNU_implicit_pointer: |
| 2711 | case DW_OP_GNU_entry_value: |
| 2712 | case DW_OP_GNU_const_type: |
| 2713 | case DW_OP_GNU_regval_type: |
| 2714 | case DW_OP_GNU_deref_type: |
| 2715 | case DW_OP_GNU_convert: |
| 2716 | case DW_OP_GNU_reinterpret: |
| 2717 | case DW_OP_GNU_parameter_ref: |
| 2718 | gcc_unreachable (); |
| 2719 | break; |
| 2720 | |
| 2721 | default: |
| 2722 | /* Other codes have no operands. */ |
| 2723 | break; |
| 2724 | } |
| 2725 | } |
| 2726 | |
| 2727 | void |
| 2728 | output_loc_sequence_raw (dw_loc_descr_ref loc) |
| 2729 | { |
| 2730 | while (1) |
| 2731 | { |
| 2732 | enum dwarf_location_atom opc = loc->dw_loc_opc; |
| 2733 | /* Output the opcode. */ |
| 2734 | if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31) |
| 2735 | { |
| 2736 | unsigned r = (opc - DW_OP_breg0); |
| 2737 | r = DWARF2_FRAME_REG_OUT (r, 1); |
| 2738 | gcc_assert (r <= 31); |
| 2739 | opc = (enum dwarf_location_atom) (DW_OP_breg0 + r); |
| 2740 | } |
| 2741 | else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31) |
| 2742 | { |
| 2743 | unsigned r = (opc - DW_OP_reg0); |
| 2744 | r = DWARF2_FRAME_REG_OUT (r, 1); |
| 2745 | gcc_assert (r <= 31); |
| 2746 | opc = (enum dwarf_location_atom) (DW_OP_reg0 + r); |
| 2747 | } |
| 2748 | /* Output the opcode. */ |
| 2749 | fprintf (stream: asm_out_file, format: "%#x" , opc); |
| 2750 | output_loc_operands_raw (loc); |
| 2751 | |
| 2752 | if (!loc->dw_loc_next) |
| 2753 | break; |
| 2754 | loc = loc->dw_loc_next; |
| 2755 | |
| 2756 | fputc (c: ',', stream: asm_out_file); |
| 2757 | } |
| 2758 | } |
| 2759 | |
| 2760 | static void |
| 2761 | build_breg_loc (struct dw_loc_descr_node **head, unsigned int regno) |
| 2762 | { |
| 2763 | if (regno <= 31) |
| 2764 | add_loc_descr (list_head: head, descr: new_loc_descr (op: (enum dwarf_location_atom) |
| 2765 | (DW_OP_breg0 + regno), oprnd1: 0, oprnd2: 0)); |
| 2766 | else |
| 2767 | add_loc_descr (list_head: head, descr: new_loc_descr (op: DW_OP_bregx, oprnd1: regno, oprnd2: 0)); |
| 2768 | } |
| 2769 | |
| 2770 | /* Build a dwarf location for a cfa_reg spanning multiple |
| 2771 | consecutive registers. */ |
| 2772 | |
| 2773 | struct dw_loc_descr_node * |
| 2774 | build_span_loc (struct cfa_reg reg) |
| 2775 | { |
| 2776 | struct dw_loc_descr_node *head = NULL; |
| 2777 | |
| 2778 | gcc_assert (reg.span_width > 0); |
| 2779 | gcc_assert (reg.span > 1); |
| 2780 | |
| 2781 | /* Start from the highest number register as it goes in the upper bits. */ |
| 2782 | unsigned int regno = reg.reg + reg.span - 1; |
| 2783 | build_breg_loc (head: &head, regno); |
| 2784 | |
| 2785 | /* Deal with the remaining registers in the span. */ |
| 2786 | for (int i = reg.span - 2; i >= 0; i--) |
| 2787 | { |
| 2788 | add_loc_descr (list_head: &head, descr: int_loc_descriptor (reg.span_width * 8)); |
| 2789 | add_loc_descr (list_head: &head, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 2790 | regno--; |
| 2791 | build_breg_loc (head: &head, regno); |
| 2792 | add_loc_descr (list_head: &head, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
| 2793 | } |
| 2794 | return head; |
| 2795 | } |
| 2796 | |
| 2797 | /* This function builds a dwarf location descriptor sequence from a |
| 2798 | dw_cfa_location, adding the given OFFSET to the result of the |
| 2799 | expression. */ |
| 2800 | |
| 2801 | struct dw_loc_descr_node * |
| 2802 | build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset) |
| 2803 | { |
| 2804 | struct dw_loc_descr_node *head, *tmp; |
| 2805 | |
| 2806 | offset += cfa->offset; |
| 2807 | |
| 2808 | if (cfa->reg.span > 1) |
| 2809 | { |
| 2810 | head = build_span_loc (reg: cfa->reg); |
| 2811 | |
| 2812 | if (maybe_ne (a: offset, b: 0)) |
| 2813 | loc_descr_plus_const (list_head: &head, poly_offset: offset); |
| 2814 | } |
| 2815 | else if (cfa->indirect) |
| 2816 | { |
| 2817 | head = new_reg_loc_descr (reg: cfa->reg.reg, offset: cfa->base_offset); |
| 2818 | head->dw_loc_oprnd1.val_class = dw_val_class_const; |
| 2819 | head->dw_loc_oprnd1.val_entry = NULL; |
| 2820 | tmp = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
| 2821 | add_loc_descr (list_head: &head, descr: tmp); |
| 2822 | loc_descr_plus_const (list_head: &head, poly_offset: offset); |
| 2823 | } |
| 2824 | else |
| 2825 | head = new_reg_loc_descr (reg: cfa->reg.reg, offset); |
| 2826 | |
| 2827 | return head; |
| 2828 | } |
| 2829 | |
| 2830 | /* This function builds a dwarf location descriptor sequence for |
| 2831 | the address at OFFSET from the CFA when stack is aligned to |
| 2832 | ALIGNMENT byte. */ |
| 2833 | |
| 2834 | struct dw_loc_descr_node * |
| 2835 | build_cfa_aligned_loc (dw_cfa_location *cfa, |
| 2836 | poly_int64 offset, HOST_WIDE_INT alignment) |
| 2837 | { |
| 2838 | struct dw_loc_descr_node *head; |
| 2839 | unsigned int dwarf_fp |
| 2840 | = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM); |
| 2841 | |
| 2842 | /* When CFA is defined as FP+OFFSET, emulate stack alignment. */ |
| 2843 | if (cfa->reg.reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0) |
| 2844 | { |
| 2845 | head = new_reg_loc_descr (reg: dwarf_fp, offset: 0); |
| 2846 | add_loc_descr (list_head: &head, descr: int_loc_descriptor (alignment)); |
| 2847 | add_loc_descr (list_head: &head, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 2848 | loc_descr_plus_const (list_head: &head, poly_offset: offset); |
| 2849 | } |
| 2850 | else |
| 2851 | head = new_reg_loc_descr (reg: dwarf_fp, offset); |
| 2852 | return head; |
| 2853 | } |
| 2854 | |
| 2855 | /* And now, the support for symbolic debugging information. */ |
| 2856 | |
| 2857 | /* .debug_str support. */ |
| 2858 | |
| 2859 | static void dwarf2out_init (const char *); |
| 2860 | static void dwarf2out_finish (const char *); |
| 2861 | static void dwarf2out_early_finish (const char *); |
| 2862 | static void dwarf2out_assembly_start (void); |
| 2863 | static void dwarf2out_define (unsigned int, const char *); |
| 2864 | static void dwarf2out_undef (unsigned int, const char *); |
| 2865 | static void dwarf2out_start_source_file (unsigned, const char *); |
| 2866 | static void dwarf2out_end_source_file (unsigned); |
| 2867 | static void dwarf2out_function_decl (tree); |
| 2868 | static void dwarf2out_begin_block (unsigned, unsigned, tree); |
| 2869 | static void dwarf2out_end_block (unsigned, unsigned); |
| 2870 | static bool dwarf2out_ignore_block (const_tree); |
| 2871 | static void dwarf2out_set_ignored_loc (unsigned, unsigned, const char *); |
| 2872 | static void dwarf2out_early_global_decl (tree); |
| 2873 | static void dwarf2out_late_global_decl (tree); |
| 2874 | static void dwarf2out_type_decl (tree, int); |
| 2875 | static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool); |
| 2876 | static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree, |
| 2877 | dw_die_ref); |
| 2878 | static void dwarf2out_abstract_function (tree); |
| 2879 | static void dwarf2out_var_location (rtx_insn *); |
| 2880 | static void dwarf2out_inline_entry (tree); |
| 2881 | static void dwarf2out_size_function (tree); |
| 2882 | static void dwarf2out_begin_function (tree); |
| 2883 | static void dwarf2out_end_function (unsigned int); |
| 2884 | static void dwarf2out_register_main_translation_unit (tree unit); |
| 2885 | static void dwarf2out_set_name (tree, tree); |
| 2886 | static void dwarf2out_register_external_die (tree decl, const char *sym, |
| 2887 | unsigned HOST_WIDE_INT off); |
| 2888 | static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym, |
| 2889 | unsigned HOST_WIDE_INT *off); |
| 2890 | |
| 2891 | /* The debug hooks structure. */ |
| 2892 | |
| 2893 | const struct gcc_debug_hooks dwarf2_debug_hooks = |
| 2894 | { |
| 2895 | .init: dwarf2out_init, |
| 2896 | .finish: dwarf2out_finish, |
| 2897 | .early_finish: dwarf2out_early_finish, |
| 2898 | .assembly_start: dwarf2out_assembly_start, |
| 2899 | .define: dwarf2out_define, |
| 2900 | .undef: dwarf2out_undef, |
| 2901 | .start_source_file: dwarf2out_start_source_file, |
| 2902 | .end_source_file: dwarf2out_end_source_file, |
| 2903 | .begin_block: dwarf2out_begin_block, |
| 2904 | .end_block: dwarf2out_end_block, |
| 2905 | .ignore_block: dwarf2out_ignore_block, |
| 2906 | .source_line: dwarf2out_source_line, |
| 2907 | .set_ignored_loc: dwarf2out_set_ignored_loc, |
| 2908 | .begin_prologue: dwarf2out_begin_prologue, |
| 2909 | #if VMS_DEBUGGING_INFO |
| 2910 | dwarf2out_vms_end_prologue, |
| 2911 | dwarf2out_vms_begin_epilogue, |
| 2912 | #else |
| 2913 | .end_prologue: debug_nothing_int_charstar, |
| 2914 | .begin_epilogue: debug_nothing_int_charstar, |
| 2915 | #endif |
| 2916 | .end_epilogue: dwarf2out_end_epilogue, |
| 2917 | .begin_function: dwarf2out_begin_function, |
| 2918 | .end_function: dwarf2out_end_function, /* end_function */ |
| 2919 | .register_main_translation_unit: dwarf2out_register_main_translation_unit, |
| 2920 | .function_decl: dwarf2out_function_decl, /* function_decl */ |
| 2921 | .early_global_decl: dwarf2out_early_global_decl, |
| 2922 | .late_global_decl: dwarf2out_late_global_decl, |
| 2923 | .type_decl: dwarf2out_type_decl, /* type_decl */ |
| 2924 | .imported_module_or_decl: dwarf2out_imported_module_or_decl, |
| 2925 | .die_ref_for_decl: dwarf2out_die_ref_for_decl, |
| 2926 | .register_external_die: dwarf2out_register_external_die, |
| 2927 | .deferred_inline_function: debug_nothing_tree, /* deferred_inline_function */ |
| 2928 | /* The DWARF 2 backend tries to reduce debugging bloat by not |
| 2929 | emitting the abstract description of inline functions until |
| 2930 | something tries to reference them. */ |
| 2931 | .outlining_inline_function: dwarf2out_abstract_function, /* outlining_inline_function */ |
| 2932 | .label: debug_nothing_rtx_code_label, /* label */ |
| 2933 | .handle_pch: debug_nothing_int, /* handle_pch */ |
| 2934 | .var_location: dwarf2out_var_location, |
| 2935 | .inline_entry: dwarf2out_inline_entry, /* inline_entry */ |
| 2936 | .size_function: dwarf2out_size_function, /* size_function */ |
| 2937 | .switch_text_section: dwarf2out_switch_text_section, |
| 2938 | .set_name: dwarf2out_set_name, |
| 2939 | .start_end_main_source_file: 1, /* start_end_main_source_file */ |
| 2940 | TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */ |
| 2941 | }; |
| 2942 | |
| 2943 | const struct gcc_debug_hooks dwarf2_lineno_debug_hooks = |
| 2944 | { |
| 2945 | .init: dwarf2out_init, |
| 2946 | .finish: debug_nothing_charstar, |
| 2947 | .early_finish: debug_nothing_charstar, |
| 2948 | .assembly_start: dwarf2out_assembly_start, |
| 2949 | .define: debug_nothing_int_charstar, |
| 2950 | .undef: debug_nothing_int_charstar, |
| 2951 | .start_source_file: debug_nothing_int_charstar, |
| 2952 | .end_source_file: debug_nothing_int, |
| 2953 | .begin_block: debug_nothing_int_int_tree, /* begin_block */ |
| 2954 | .end_block: debug_nothing_int_int, /* end_block */ |
| 2955 | .ignore_block: debug_true_const_tree, /* ignore_block */ |
| 2956 | .source_line: dwarf2out_source_line, /* source_line */ |
| 2957 | .set_ignored_loc: debug_nothing_int_int_charstar, /* set_ignored_loc */ |
| 2958 | .begin_prologue: debug_nothing_int_int_charstar, /* begin_prologue */ |
| 2959 | .end_prologue: debug_nothing_int_charstar, /* end_prologue */ |
| 2960 | .begin_epilogue: debug_nothing_int_charstar, /* begin_epilogue */ |
| 2961 | .end_epilogue: debug_nothing_int_charstar, /* end_epilogue */ |
| 2962 | .begin_function: debug_nothing_tree, /* begin_function */ |
| 2963 | .end_function: debug_nothing_int, /* end_function */ |
| 2964 | .register_main_translation_unit: debug_nothing_tree, /* register_main_translation_unit */ |
| 2965 | .function_decl: debug_nothing_tree, /* function_decl */ |
| 2966 | .early_global_decl: debug_nothing_tree, /* early_global_decl */ |
| 2967 | .late_global_decl: debug_nothing_tree, /* late_global_decl */ |
| 2968 | .type_decl: debug_nothing_tree_int, /* type_decl */ |
| 2969 | .imported_module_or_decl: debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */ |
| 2970 | .die_ref_for_decl: debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */ |
| 2971 | .register_external_die: debug_nothing_tree_charstar_uhwi, /* register_external_die */ |
| 2972 | .deferred_inline_function: debug_nothing_tree, /* deferred_inline_function */ |
| 2973 | .outlining_inline_function: debug_nothing_tree, /* outlining_inline_function */ |
| 2974 | .label: debug_nothing_rtx_code_label, /* label */ |
| 2975 | .handle_pch: debug_nothing_int, /* handle_pch */ |
| 2976 | .var_location: debug_nothing_rtx_insn, /* var_location */ |
| 2977 | .inline_entry: debug_nothing_tree, /* inline_entry */ |
| 2978 | .size_function: debug_nothing_tree, /* size_function */ |
| 2979 | .switch_text_section: debug_nothing_void, /* switch_text_section */ |
| 2980 | .set_name: debug_nothing_tree_tree, /* set_name */ |
| 2981 | .start_end_main_source_file: 0, /* start_end_main_source_file */ |
| 2982 | TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */ |
| 2983 | }; |
| 2984 | |
| 2985 | /* NOTE: In the comments in this file, many references are made to |
| 2986 | "Debugging Information Entries". This term is abbreviated as `DIE' |
| 2987 | throughout the remainder of this file. */ |
| 2988 | |
| 2989 | /* An internal representation of the DWARF output is built, and then |
| 2990 | walked to generate the DWARF debugging info. The walk of the internal |
| 2991 | representation is done after the entire program has been compiled. |
| 2992 | The types below are used to describe the internal representation. */ |
| 2993 | |
| 2994 | /* Whether to put type DIEs into their own section .debug_types instead |
| 2995 | of making them part of the .debug_info section. Only supported for |
| 2996 | Dwarf V4 or higher and the user didn't disable them through |
| 2997 | -fno-debug-types-section. It is more efficient to put them in a |
| 2998 | separate comdat sections since the linker will then be able to |
| 2999 | remove duplicates. But not all tools support .debug_types sections |
| 3000 | yet. For Dwarf V5 or higher .debug_types doesn't exist any more, |
| 3001 | it is DW_UT_type unit type in .debug_info section. For late LTO |
| 3002 | debug there should be almost no types emitted so avoid enabling |
| 3003 | -fdebug-types-section there. */ |
| 3004 | |
| 3005 | #define use_debug_types (dwarf_version >= 4 \ |
| 3006 | && flag_debug_types_section \ |
| 3007 | && !in_lto_p) |
| 3008 | |
| 3009 | /* Various DIE's use offsets relative to the beginning of the |
| 3010 | .debug_info section to refer to each other. */ |
| 3011 | |
| 3012 | typedef long int dw_offset; |
| 3013 | |
| 3014 | struct comdat_type_node; |
| 3015 | |
| 3016 | /* The entries in the line_info table more-or-less mirror the opcodes |
| 3017 | that are used in the real dwarf line table. Arrays of these entries |
| 3018 | are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not |
| 3019 | supported. */ |
| 3020 | |
| 3021 | enum dw_line_info_opcode { |
| 3022 | /* Emit DW_LNE_set_address; the operand is the label index. */ |
| 3023 | LI_set_address, |
| 3024 | |
| 3025 | /* Emit a row to the matrix with the given line. This may be done |
| 3026 | via any combination of DW_LNS_copy, DW_LNS_advance_line, and |
| 3027 | special opcodes. */ |
| 3028 | LI_set_line, |
| 3029 | |
| 3030 | /* Emit a DW_LNS_set_file. */ |
| 3031 | LI_set_file, |
| 3032 | |
| 3033 | /* Emit a DW_LNS_set_column. */ |
| 3034 | LI_set_column, |
| 3035 | |
| 3036 | /* Emit a DW_LNS_negate_stmt; the operand is ignored. */ |
| 3037 | LI_negate_stmt, |
| 3038 | |
| 3039 | /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */ |
| 3040 | LI_set_prologue_end, |
| 3041 | LI_set_epilogue_begin, |
| 3042 | |
| 3043 | /* Emit a DW_LNE_set_discriminator. */ |
| 3044 | LI_set_discriminator, |
| 3045 | |
| 3046 | /* Output a Fixed Advance PC; the target PC is the label index; the |
| 3047 | base PC is the previous LI_adv_address or LI_set_address entry. |
| 3048 | We only use this when emitting debug views without assembler |
| 3049 | support, at explicit user request. Ideally, we should only use |
| 3050 | it when the offset might be zero but we can't tell: it's the only |
| 3051 | way to maybe change the PC without resetting the view number. */ |
| 3052 | LI_adv_address |
| 3053 | }; |
| 3054 | |
| 3055 | typedef struct GTY(()) dw_line_info_struct { |
| 3056 | enum dw_line_info_opcode opcode; |
| 3057 | unsigned int val; |
| 3058 | } dw_line_info_entry; |
| 3059 | |
| 3060 | |
| 3061 | struct GTY(()) dw_line_info_table { |
| 3062 | /* The label that marks the end of this section. */ |
| 3063 | const char *end_label; |
| 3064 | |
| 3065 | /* The values for the last row of the matrix, as collected in the table. |
| 3066 | These are used to minimize the changes to the next row. */ |
| 3067 | unsigned int file_num; |
| 3068 | unsigned int line_num; |
| 3069 | unsigned int column_num; |
| 3070 | int discrim_num; |
| 3071 | bool is_stmt; |
| 3072 | bool in_use; |
| 3073 | |
| 3074 | /* This denotes the NEXT view number. |
| 3075 | |
| 3076 | If it is 0, it is known that the NEXT view will be the first view |
| 3077 | at the given PC. |
| 3078 | |
| 3079 | If it is -1, we're forcing the view number to be reset, e.g. at a |
| 3080 | function entry. |
| 3081 | |
| 3082 | The meaning of other nonzero values depends on whether we're |
| 3083 | computing views internally or leaving it for the assembler to do |
| 3084 | so. If we're emitting them internally, view denotes the view |
| 3085 | number since the last known advance of PC. If we're leaving it |
| 3086 | for the assembler, it denotes the LVU label number that we're |
| 3087 | going to ask the assembler to assign. */ |
| 3088 | var_loc_view view; |
| 3089 | |
| 3090 | /* This counts the number of symbolic views emitted in this table |
| 3091 | since the latest view reset. Its max value, over all tables, |
| 3092 | sets symview_upper_bound. */ |
| 3093 | var_loc_view symviews_since_reset; |
| 3094 | |
| 3095 | #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1) |
| 3096 | #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0) |
| 3097 | #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1) |
| 3098 | #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x)) |
| 3099 | |
| 3100 | vec<dw_line_info_entry, va_gc> *entries; |
| 3101 | }; |
| 3102 | |
| 3103 | /* This is an upper bound for view numbers that the assembler may |
| 3104 | assign to symbolic views output in this translation. It is used to |
| 3105 | decide how big a field to use to represent view numbers in |
| 3106 | symview-classed attributes. */ |
| 3107 | |
| 3108 | static var_loc_view symview_upper_bound; |
| 3109 | |
| 3110 | /* If we're keep track of location views and their reset points, and |
| 3111 | INSN is a reset point (i.e., it necessarily advances the PC), mark |
| 3112 | the next view in TABLE as reset. */ |
| 3113 | |
| 3114 | static void |
| 3115 | maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table) |
| 3116 | { |
| 3117 | if (!debug_internal_reset_location_views) |
| 3118 | return; |
| 3119 | |
| 3120 | /* Maybe turn (part of?) this test into a default target hook. */ |
| 3121 | int reset = 0; |
| 3122 | |
| 3123 | if (targetm.reset_location_view) |
| 3124 | reset = targetm.reset_location_view (insn); |
| 3125 | |
| 3126 | if (reset) |
| 3127 | ; |
| 3128 | else if (JUMP_TABLE_DATA_P (insn)) |
| 3129 | reset = 1; |
| 3130 | else if (GET_CODE (insn) == USE |
| 3131 | || GET_CODE (insn) == CLOBBER |
| 3132 | || GET_CODE (insn) == ASM_INPUT |
| 3133 | || asm_noperands (insn) >= 0) |
| 3134 | ; |
| 3135 | else if (get_attr_min_length (insn) > 0) |
| 3136 | reset = 1; |
| 3137 | |
| 3138 | if (reset > 0 && !RESETTING_VIEW_P (table->view)) |
| 3139 | RESET_NEXT_VIEW (table->view); |
| 3140 | } |
| 3141 | |
| 3142 | /* The Debugging Information Entry (DIE) structure. DIEs form a tree. |
| 3143 | The children of each node form a circular list linked by |
| 3144 | die_sib. die_child points to the node *before* the "first" child node. */ |
| 3145 | |
| 3146 | typedef struct GTY((chain_circular ("%h.die_sib" ), for_user)) die_struct { |
| 3147 | union die_symbol_or_type_node |
| 3148 | { |
| 3149 | const char * GTY ((tag ("0" ))) die_symbol; |
| 3150 | comdat_type_node *GTY ((tag ("1" ))) die_type_node; |
| 3151 | } |
| 3152 | GTY ((desc ("%0.comdat_type_p" ))) die_id; |
| 3153 | vec<dw_attr_node, va_gc> *die_attr; |
| 3154 | dw_die_ref die_parent; |
| 3155 | dw_die_ref die_child; |
| 3156 | dw_die_ref die_sib; |
| 3157 | dw_die_ref die_definition; /* ref from a specification to its definition */ |
| 3158 | dw_offset die_offset; |
| 3159 | unsigned long die_abbrev; |
| 3160 | int die_mark; |
| 3161 | unsigned int decl_id; |
| 3162 | enum dwarf_tag die_tag; |
| 3163 | /* Die is used and must not be pruned as unused. */ |
| 3164 | BOOL_BITFIELD die_perennial_p : 1; |
| 3165 | BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */ |
| 3166 | /* For an external ref to die_symbol if die_offset contains an extra |
| 3167 | offset to that symbol. */ |
| 3168 | BOOL_BITFIELD with_offset : 1; |
| 3169 | /* Whether this DIE was removed from the DIE tree, for example via |
| 3170 | prune_unused_types. We don't consider those present from the |
| 3171 | DIE lookup routines. */ |
| 3172 | BOOL_BITFIELD removed : 1; |
| 3173 | /* Lots of spare bits. */ |
| 3174 | } |
| 3175 | die_node; |
| 3176 | |
| 3177 | /* Set to TRUE while dwarf2out_early_global_decl is running. */ |
| 3178 | static bool early_dwarf; |
| 3179 | static bool early_dwarf_finished; |
| 3180 | class set_early_dwarf { |
| 3181 | public: |
| 3182 | bool saved; |
| 3183 | set_early_dwarf () : saved(early_dwarf) |
| 3184 | { |
| 3185 | gcc_assert (! early_dwarf_finished); |
| 3186 | early_dwarf = true; |
| 3187 | } |
| 3188 | ~set_early_dwarf () { early_dwarf = saved; } |
| 3189 | }; |
| 3190 | |
| 3191 | /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */ |
| 3192 | #define FOR_EACH_CHILD(die, c, expr) do { \ |
| 3193 | c = die->die_child; \ |
| 3194 | if (c) do { \ |
| 3195 | c = c->die_sib; \ |
| 3196 | expr; \ |
| 3197 | } while (c != die->die_child); \ |
| 3198 | } while (0) |
| 3199 | |
| 3200 | /* The pubname structure */ |
| 3201 | |
| 3202 | typedef struct GTY(()) pubname_struct { |
| 3203 | dw_die_ref die; |
| 3204 | const char *name; |
| 3205 | } |
| 3206 | pubname_entry; |
| 3207 | |
| 3208 | |
| 3209 | struct GTY(()) dw_ranges { |
| 3210 | const char *label; |
| 3211 | /* If this is positive, it's a block number, otherwise it's a |
| 3212 | bitwise-negated index into dw_ranges_by_label. */ |
| 3213 | int num; |
| 3214 | /* If idx is equal to DW_RANGES_IDX_SKELETON, it should be emitted |
| 3215 | into .debug_rnglists section rather than .debug_rnglists.dwo |
| 3216 | for -gsplit-dwarf and DWARF >= 5. */ |
| 3217 | #define DW_RANGES_IDX_SKELETON ((1U << 31) - 1) |
| 3218 | /* Index for the range list for DW_FORM_rnglistx. */ |
| 3219 | unsigned int idx : 31; |
| 3220 | /* True if this range might be possibly in a different section |
| 3221 | from previous entry. */ |
| 3222 | unsigned int maybe_new_sec : 1; |
| 3223 | addr_table_entry *begin_entry; |
| 3224 | addr_table_entry *end_entry; |
| 3225 | }; |
| 3226 | |
| 3227 | /* A structure to hold a macinfo entry. */ |
| 3228 | |
| 3229 | typedef struct GTY(()) macinfo_struct { |
| 3230 | unsigned char code; |
| 3231 | unsigned HOST_WIDE_INT lineno; |
| 3232 | const char *info; |
| 3233 | } |
| 3234 | macinfo_entry; |
| 3235 | |
| 3236 | |
| 3237 | struct GTY(()) dw_ranges_by_label { |
| 3238 | const char *begin; |
| 3239 | const char *end; |
| 3240 | }; |
| 3241 | |
| 3242 | /* The comdat type node structure. */ |
| 3243 | struct GTY(()) comdat_type_node |
| 3244 | { |
| 3245 | dw_die_ref root_die; |
| 3246 | dw_die_ref type_die; |
| 3247 | dw_die_ref skeleton_die; |
| 3248 | char signature[DWARF_TYPE_SIGNATURE_SIZE]; |
| 3249 | comdat_type_node *next; |
| 3250 | }; |
| 3251 | |
| 3252 | /* A list of DIEs for which we can't determine ancestry (parent_die |
| 3253 | field) just yet. Later in dwarf2out_finish we will fill in the |
| 3254 | missing bits. */ |
| 3255 | typedef struct GTY(()) limbo_die_struct { |
| 3256 | dw_die_ref die; |
| 3257 | /* The tree for which this DIE was created. We use this to |
| 3258 | determine ancestry later. */ |
| 3259 | tree created_for; |
| 3260 | struct limbo_die_struct *next; |
| 3261 | } |
| 3262 | limbo_die_node; |
| 3263 | |
| 3264 | typedef struct skeleton_chain_struct |
| 3265 | { |
| 3266 | dw_die_ref old_die; |
| 3267 | dw_die_ref new_die; |
| 3268 | struct skeleton_chain_struct *parent; |
| 3269 | } |
| 3270 | skeleton_chain_node; |
| 3271 | |
| 3272 | /* Define a macro which returns nonzero for a TYPE_DECL which was |
| 3273 | implicitly generated for a type. |
| 3274 | |
| 3275 | Note that, unlike the C front-end (which generates a NULL named |
| 3276 | TYPE_DECL node for each complete tagged type, each array type, |
| 3277 | and each function type node created) the C++ front-end generates |
| 3278 | a _named_ TYPE_DECL node for each tagged type node created. |
| 3279 | These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to |
| 3280 | generate a DW_TAG_typedef DIE for them. Likewise with the Ada |
| 3281 | front-end, but for each type, tagged or not. */ |
| 3282 | |
| 3283 | #define TYPE_DECL_IS_STUB(decl) \ |
| 3284 | (DECL_NAME (decl) == NULL_TREE \ |
| 3285 | || (DECL_ARTIFICIAL (decl) \ |
| 3286 | && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \ |
| 3287 | /* This is necessary for stub decls that \ |
| 3288 | appear in nested inline functions. */ \ |
| 3289 | || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \ |
| 3290 | && (decl_ultimate_origin (decl) \ |
| 3291 | == TYPE_STUB_DECL (TREE_TYPE (decl))))))) |
| 3292 | |
| 3293 | /* Information concerning the compilation unit's programming |
| 3294 | language, and compiler version. */ |
| 3295 | |
| 3296 | /* Fixed size portion of the DWARF compilation unit header. */ |
| 3297 | #define \ |
| 3298 | (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size \ |
| 3299 | + (dwarf_version >= 5 ? 4 : 3)) |
| 3300 | |
| 3301 | /* Fixed size portion of the DWARF comdat type unit header. */ |
| 3302 | #define \ |
| 3303 | (DWARF_COMPILE_UNIT_HEADER_SIZE \ |
| 3304 | + DWARF_TYPE_SIGNATURE_SIZE + dwarf_offset_size) |
| 3305 | |
| 3306 | /* Fixed size portion of the DWARF skeleton compilation unit header. */ |
| 3307 | #define \ |
| 3308 | (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0)) |
| 3309 | |
| 3310 | /* Fixed size portion of public names info. */ |
| 3311 | #define (2 * dwarf_offset_size + 2) |
| 3312 | |
| 3313 | /* Fixed size portion of the address range info. */ |
| 3314 | #define \ |
| 3315 | (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4, \ |
| 3316 | DWARF2_ADDR_SIZE * 2) \ |
| 3317 | - DWARF_INITIAL_LENGTH_SIZE) |
| 3318 | |
| 3319 | /* Size of padding portion in the address range info. It must be |
| 3320 | aligned to twice the pointer size. */ |
| 3321 | #define DWARF_ARANGES_PAD_SIZE \ |
| 3322 | (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4, \ |
| 3323 | DWARF2_ADDR_SIZE * 2) \ |
| 3324 | - (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4)) |
| 3325 | |
| 3326 | /* Use assembler line directives if available. */ |
| 3327 | #ifndef DWARF2_ASM_LINE_DEBUG_INFO |
| 3328 | #ifdef HAVE_AS_DWARF2_DEBUG_LINE |
| 3329 | #define DWARF2_ASM_LINE_DEBUG_INFO 1 |
| 3330 | #else |
| 3331 | #define DWARF2_ASM_LINE_DEBUG_INFO 0 |
| 3332 | #endif |
| 3333 | #endif |
| 3334 | |
| 3335 | /* Use assembler views in line directives if available. */ |
| 3336 | #ifndef DWARF2_ASM_VIEW_DEBUG_INFO |
| 3337 | #ifdef HAVE_AS_DWARF2_DEBUG_VIEW |
| 3338 | #define DWARF2_ASM_VIEW_DEBUG_INFO 1 |
| 3339 | #else |
| 3340 | #define DWARF2_ASM_VIEW_DEBUG_INFO 0 |
| 3341 | #endif |
| 3342 | #endif |
| 3343 | |
| 3344 | /* Return true if GCC configure detected assembler support for .loc. */ |
| 3345 | |
| 3346 | bool |
| 3347 | dwarf2out_default_as_loc_support (void) |
| 3348 | { |
| 3349 | return DWARF2_ASM_LINE_DEBUG_INFO; |
| 3350 | #if (GCC_VERSION >= 3000) |
| 3351 | # undef DWARF2_ASM_LINE_DEBUG_INFO |
| 3352 | # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO |
| 3353 | #endif |
| 3354 | } |
| 3355 | |
| 3356 | /* Return true if GCC configure detected assembler support for views |
| 3357 | in .loc directives. */ |
| 3358 | |
| 3359 | bool |
| 3360 | dwarf2out_default_as_locview_support (void) |
| 3361 | { |
| 3362 | return DWARF2_ASM_VIEW_DEBUG_INFO; |
| 3363 | #if (GCC_VERSION >= 3000) |
| 3364 | # undef DWARF2_ASM_VIEW_DEBUG_INFO |
| 3365 | # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO |
| 3366 | #endif |
| 3367 | } |
| 3368 | |
| 3369 | /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported |
| 3370 | view computation, and it refers to a view identifier for which we |
| 3371 | will not emit a label because it is known to map to a view number |
| 3372 | zero. We won't allocate the bitmap if we're not using assembler |
| 3373 | support for location views, but we have to make the variable |
| 3374 | visible for GGC and for code that will be optimized out for lack of |
| 3375 | support but that's still parsed and compiled. We could abstract it |
| 3376 | out with macros, but it's not worth it. */ |
| 3377 | static GTY(()) bitmap zero_view_p; |
| 3378 | |
| 3379 | /* Evaluate to TRUE iff N is known to identify the first location view |
| 3380 | at its PC. When not using assembler location view computation, |
| 3381 | that must be view number zero. Otherwise, ZERO_VIEW_P is allocated |
| 3382 | and views label numbers recorded in it are the ones known to be |
| 3383 | zero. */ |
| 3384 | #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \ |
| 3385 | || (N) == (var_loc_view)-1 \ |
| 3386 | || (zero_view_p \ |
| 3387 | && bitmap_bit_p (zero_view_p, (N)))) |
| 3388 | |
| 3389 | /* Return true iff we're to emit .loc directives for the assembler to |
| 3390 | generate line number sections. |
| 3391 | |
| 3392 | When we're not emitting views, all we need from the assembler is |
| 3393 | support for .loc directives. |
| 3394 | |
| 3395 | If we are emitting views, we can only use the assembler's .loc |
| 3396 | support if it also supports views. |
| 3397 | |
| 3398 | When the compiler is emitting the line number programs and |
| 3399 | computing view numbers itself, it resets view numbers at known PC |
| 3400 | changes and counts from that, and then it emits view numbers as |
| 3401 | literal constants in locviewlists. There are cases in which the |
| 3402 | compiler is not sure about PC changes, e.g. when extra alignment is |
| 3403 | requested for a label. In these cases, the compiler may not reset |
| 3404 | the view counter, and the potential PC advance in the line number |
| 3405 | program will use an opcode that does not reset the view counter |
| 3406 | even if the PC actually changes, so that compiler and debug info |
| 3407 | consumer can keep view numbers in sync. |
| 3408 | |
| 3409 | When the compiler defers view computation to the assembler, it |
| 3410 | emits symbolic view numbers in locviewlists, with the exception of |
| 3411 | views known to be zero (forced resets, or reset after |
| 3412 | compiler-visible PC changes): instead of emitting symbols for |
| 3413 | these, we emit literal zero and assert the assembler agrees with |
| 3414 | the compiler's assessment. We could use symbolic views everywhere, |
| 3415 | instead of special-casing zero views, but then we'd be unable to |
| 3416 | optimize out locviewlists that contain only zeros. */ |
| 3417 | |
| 3418 | static bool |
| 3419 | output_asm_line_debug_info (void) |
| 3420 | { |
| 3421 | return (dwarf2out_as_loc_support |
| 3422 | && (dwarf2out_as_locview_support |
| 3423 | || !debug_variable_location_views)); |
| 3424 | } |
| 3425 | |
| 3426 | static bool asm_outputs_debug_line_str (void); |
| 3427 | |
| 3428 | /* Minimum line offset in a special line info. opcode. |
| 3429 | This value was chosen to give a reasonable range of values. */ |
| 3430 | #define DWARF_LINE_BASE -10 |
| 3431 | |
| 3432 | /* First special line opcode - leave room for the standard opcodes. */ |
| 3433 | #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1) |
| 3434 | |
| 3435 | /* Range of line offsets in a special line info. opcode. */ |
| 3436 | #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1) |
| 3437 | |
| 3438 | /* Flag that indicates the initial value of the is_stmt_start flag. |
| 3439 | In the present implementation, we do not mark any lines as |
| 3440 | the beginning of a source statement, because that information |
| 3441 | is not made available by the GCC front-end. */ |
| 3442 | #define DWARF_LINE_DEFAULT_IS_STMT_START 1 |
| 3443 | |
| 3444 | /* Maximum number of operations per instruction bundle. */ |
| 3445 | #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN |
| 3446 | #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1 |
| 3447 | #endif |
| 3448 | |
| 3449 | /* This location is used by calc_die_sizes() to keep track |
| 3450 | the offset of each DIE within the .debug_info section. */ |
| 3451 | static unsigned long next_die_offset; |
| 3452 | |
| 3453 | /* Record the root of the DIE's built for the current compilation unit. */ |
| 3454 | static GTY(()) dw_die_ref single_comp_unit_die; |
| 3455 | |
| 3456 | /* A list of type DIEs that have been separated into comdat sections. */ |
| 3457 | static GTY(()) comdat_type_node *comdat_type_list; |
| 3458 | |
| 3459 | /* A list of CU DIEs that have been separated. */ |
| 3460 | static GTY(()) limbo_die_node *cu_die_list; |
| 3461 | |
| 3462 | /* A list of DIEs with a NULL parent waiting to be relocated. */ |
| 3463 | static GTY(()) limbo_die_node *limbo_die_list; |
| 3464 | |
| 3465 | /* A list of DIEs for which we may have to generate |
| 3466 | DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */ |
| 3467 | static GTY(()) limbo_die_node *deferred_asm_name; |
| 3468 | |
| 3469 | struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data> |
| 3470 | { |
| 3471 | typedef const char *compare_type; |
| 3472 | |
| 3473 | static hashval_t hash (dwarf_file_data *); |
| 3474 | static bool equal (dwarf_file_data *, const char *); |
| 3475 | }; |
| 3476 | |
| 3477 | /* Filenames referenced by this compilation unit. */ |
| 3478 | static GTY(()) hash_table<dwarf_file_hasher> *file_table; |
| 3479 | |
| 3480 | struct decl_die_hasher : ggc_ptr_hash<die_node> |
| 3481 | { |
| 3482 | typedef tree compare_type; |
| 3483 | |
| 3484 | static hashval_t hash (die_node *); |
| 3485 | static bool equal (die_node *, tree); |
| 3486 | }; |
| 3487 | /* A hash table of references to DIE's that describe declarations. |
| 3488 | The key is a DECL_UID() which is a unique number identifying each decl. */ |
| 3489 | static GTY (()) hash_table<decl_die_hasher> *decl_die_table; |
| 3490 | |
| 3491 | struct GTY ((for_user)) variable_value_struct { |
| 3492 | unsigned int decl_id; |
| 3493 | vec<dw_die_ref, va_gc> *dies; |
| 3494 | }; |
| 3495 | |
| 3496 | struct variable_value_hasher : ggc_ptr_hash<variable_value_struct> |
| 3497 | { |
| 3498 | typedef tree compare_type; |
| 3499 | |
| 3500 | static hashval_t hash (variable_value_struct *); |
| 3501 | static bool equal (variable_value_struct *, tree); |
| 3502 | }; |
| 3503 | /* A hash table of DIEs that contain DW_OP_GNU_variable_value with |
| 3504 | dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is |
| 3505 | DECL_CONTEXT of the referenced VAR_DECLs. */ |
| 3506 | static GTY (()) hash_table<variable_value_hasher> *variable_value_hash; |
| 3507 | |
| 3508 | struct block_die_hasher : ggc_ptr_hash<die_struct> |
| 3509 | { |
| 3510 | static hashval_t hash (die_struct *); |
| 3511 | static bool equal (die_struct *, die_struct *); |
| 3512 | }; |
| 3513 | |
| 3514 | /* A hash table of references to DIE's that describe COMMON blocks. |
| 3515 | The key is DECL_UID() ^ die_parent. */ |
| 3516 | static GTY (()) hash_table<block_die_hasher> *common_block_die_table; |
| 3517 | |
| 3518 | typedef struct GTY(()) die_arg_entry_struct { |
| 3519 | dw_die_ref die; |
| 3520 | tree arg; |
| 3521 | } die_arg_entry; |
| 3522 | |
| 3523 | |
| 3524 | /* Node of the variable location list. */ |
| 3525 | struct GTY ((chain_next ("%h.next" ))) var_loc_node { |
| 3526 | /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables, |
| 3527 | EXPR_LIST chain. For small bitsizes, bitsize is encoded |
| 3528 | in mode of the EXPR_LIST node and first EXPR_LIST operand |
| 3529 | is either NOTE_INSN_VAR_LOCATION for a piece with a known |
| 3530 | location or NULL for padding. For larger bitsizes, |
| 3531 | mode is 0 and first operand is a CONCAT with bitsize |
| 3532 | as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp. |
| 3533 | NULL as second operand. */ |
| 3534 | rtx GTY (()) loc; |
| 3535 | const char * GTY (()) label; |
| 3536 | struct var_loc_node * GTY (()) next; |
| 3537 | var_loc_view view; |
| 3538 | }; |
| 3539 | |
| 3540 | /* Variable location list. */ |
| 3541 | struct GTY ((for_user)) var_loc_list_def { |
| 3542 | struct var_loc_node * GTY (()) first; |
| 3543 | |
| 3544 | /* Pointer to the last but one or last element of the |
| 3545 | chained list. If the list is empty, both first and |
| 3546 | last are NULL, if the list contains just one node |
| 3547 | or the last node certainly is not redundant, it points |
| 3548 | to the last node, otherwise points to the last but one. |
| 3549 | Do not mark it for GC because it is marked through the chain. */ |
| 3550 | struct var_loc_node * GTY ((skip ("%h" ))) last; |
| 3551 | |
| 3552 | /* Pointer to the last element before section switch, |
| 3553 | if NULL, either sections weren't switched or first |
| 3554 | is after section switch. */ |
| 3555 | struct var_loc_node * GTY ((skip ("%h" ))) last_before_switch; |
| 3556 | |
| 3557 | /* DECL_UID of the variable decl. */ |
| 3558 | unsigned int decl_id; |
| 3559 | }; |
| 3560 | typedef struct var_loc_list_def var_loc_list; |
| 3561 | |
| 3562 | /* Call argument location list. */ |
| 3563 | struct GTY ((chain_next ("%h.next" ))) call_arg_loc_node { |
| 3564 | rtx_insn * GTY (()) call_insn; |
| 3565 | const char * GTY (()) label; |
| 3566 | tree GTY (()) block; |
| 3567 | bool tail_call_p; |
| 3568 | rtx GTY (()) symbol_ref; |
| 3569 | struct call_arg_loc_node * GTY (()) next; |
| 3570 | }; |
| 3571 | |
| 3572 | |
| 3573 | struct decl_loc_hasher : ggc_ptr_hash<var_loc_list> |
| 3574 | { |
| 3575 | typedef const_tree compare_type; |
| 3576 | |
| 3577 | static hashval_t hash (var_loc_list *); |
| 3578 | static bool equal (var_loc_list *, const_tree); |
| 3579 | }; |
| 3580 | |
| 3581 | /* Table of decl location linked lists. */ |
| 3582 | static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table; |
| 3583 | |
| 3584 | /* Head and tail of call_arg_loc chain. */ |
| 3585 | static GTY (()) struct call_arg_loc_node *call_arg_locations; |
| 3586 | static struct call_arg_loc_node *call_arg_loc_last; |
| 3587 | |
| 3588 | /* Number of call sites in the current function. */ |
| 3589 | static int call_site_count = -1; |
| 3590 | /* Number of tail call sites in the current function. */ |
| 3591 | static int tail_call_site_count = -1; |
| 3592 | |
| 3593 | /* A cached location list. */ |
| 3594 | struct GTY ((for_user)) cached_dw_loc_list_def { |
| 3595 | /* The DECL_UID of the decl that this entry describes. */ |
| 3596 | unsigned int decl_id; |
| 3597 | |
| 3598 | /* The cached location list. */ |
| 3599 | dw_loc_list_ref loc_list; |
| 3600 | }; |
| 3601 | typedef struct cached_dw_loc_list_def cached_dw_loc_list; |
| 3602 | |
| 3603 | struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list> |
| 3604 | { |
| 3605 | |
| 3606 | typedef const_tree compare_type; |
| 3607 | |
| 3608 | static hashval_t hash (cached_dw_loc_list *); |
| 3609 | static bool equal (cached_dw_loc_list *, const_tree); |
| 3610 | }; |
| 3611 | |
| 3612 | /* Table of cached location lists. */ |
| 3613 | static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table; |
| 3614 | |
| 3615 | /* A vector of references to DIE's that are uniquely identified by their tag, |
| 3616 | presence/absence of children DIE's, and list of attribute/value pairs. */ |
| 3617 | static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table; |
| 3618 | |
| 3619 | /* A hash map to remember the stack usage for DWARF procedures. The value |
| 3620 | stored is the stack size difference between before the DWARF procedure |
| 3621 | invocation and after it returned. In other words, for a DWARF procedure |
| 3622 | that consumes N stack slots and that pushes M ones, this stores M - N. */ |
| 3623 | static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map; |
| 3624 | |
| 3625 | /* A global counter for generating labels for line number data. */ |
| 3626 | static unsigned int line_info_label_num; |
| 3627 | |
| 3628 | /* The current table to which we should emit line number information |
| 3629 | for the current function. This will be set up at the beginning of |
| 3630 | assembly for the function. */ |
| 3631 | static GTY(()) dw_line_info_table *cur_line_info_table; |
| 3632 | |
| 3633 | /* The two default tables of line number info. */ |
| 3634 | static GTY(()) dw_line_info_table *text_section_line_info; |
| 3635 | static GTY(()) dw_line_info_table *cold_text_section_line_info; |
| 3636 | |
| 3637 | /* The set of all non-default tables of line number info. */ |
| 3638 | static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info; |
| 3639 | |
| 3640 | /* A flag to tell pubnames/types export if there is an info section to |
| 3641 | refer to. */ |
| 3642 | static bool info_section_emitted; |
| 3643 | |
| 3644 | /* A pointer to the base of a table that contains a list of publicly |
| 3645 | accessible names. */ |
| 3646 | static GTY (()) vec<pubname_entry, va_gc> *pubname_table; |
| 3647 | |
| 3648 | /* A pointer to the base of a table that contains a list of publicly |
| 3649 | accessible types. */ |
| 3650 | static GTY (()) vec<pubname_entry, va_gc> *pubtype_table; |
| 3651 | |
| 3652 | /* A pointer to the base of a table that contains a list of macro |
| 3653 | defines/undefines (and file start/end markers). */ |
| 3654 | static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table; |
| 3655 | |
| 3656 | /* True if .debug_macinfo or .debug_macros section is going to be |
| 3657 | emitted. */ |
| 3658 | #define have_macinfo \ |
| 3659 | ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \ |
| 3660 | && debug_info_level >= DINFO_LEVEL_VERBOSE \ |
| 3661 | && !macinfo_table->is_empty ()) |
| 3662 | |
| 3663 | /* Vector of dies for which we should generate .debug_ranges info. */ |
| 3664 | static GTY (()) vec<dw_ranges, va_gc> *ranges_table; |
| 3665 | |
| 3666 | /* Vector of pairs of labels referenced in ranges_table. */ |
| 3667 | static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label; |
| 3668 | |
| 3669 | /* Whether we have location lists that need outputting */ |
| 3670 | static GTY(()) bool have_location_lists; |
| 3671 | |
| 3672 | /* Unique label counter. */ |
| 3673 | static GTY(()) unsigned int loclabel_num; |
| 3674 | |
| 3675 | /* Unique label counter for point-of-call tables. */ |
| 3676 | static GTY(()) unsigned int poc_label_num; |
| 3677 | |
| 3678 | /* The last file entry emitted by maybe_emit_file(). */ |
| 3679 | static GTY(()) struct dwarf_file_data * last_emitted_file; |
| 3680 | |
| 3681 | /* Number of internal labels generated by gen_internal_sym(). */ |
| 3682 | static GTY(()) int label_num; |
| 3683 | |
| 3684 | static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table; |
| 3685 | |
| 3686 | /* Instances of generic types for which we need to generate debug |
| 3687 | info that describe their generic parameters and arguments. That |
| 3688 | generation needs to happen once all types are properly laid out so |
| 3689 | we do it at the end of compilation. */ |
| 3690 | static GTY(()) vec<tree, va_gc> *generic_type_instances; |
| 3691 | |
| 3692 | /* Offset from the "steady-state frame pointer" to the frame base, |
| 3693 | within the current function. */ |
| 3694 | static poly_int64 frame_pointer_fb_offset; |
| 3695 | static bool frame_pointer_fb_offset_valid; |
| 3696 | |
| 3697 | static vec<dw_die_ref> base_types; |
| 3698 | |
| 3699 | /* A cached btf_type_tag or btf_decl_tag user annotation. */ |
| 3700 | struct GTY ((for_user)) annotation_node |
| 3701 | { |
| 3702 | const char *name; |
| 3703 | const char *value; |
| 3704 | hashval_t hash; |
| 3705 | dw_die_ref die; |
| 3706 | struct annotation_node *next; |
| 3707 | }; |
| 3708 | |
| 3709 | /* Hasher for btf_type_tag and btf_decl_tag annotation nodes. */ |
| 3710 | struct annotation_node_hasher : ggc_ptr_hash<annotation_node> |
| 3711 | { |
| 3712 | typedef const struct annotation_node *compare_type; |
| 3713 | |
| 3714 | static hashval_t hash (struct annotation_node *); |
| 3715 | static bool equal (const struct annotation_node *, |
| 3716 | const struct annotation_node *); |
| 3717 | }; |
| 3718 | |
| 3719 | /* A hash table of tag annotation nodes for btf_type_tag and btf_decl_tag C |
| 3720 | attributes. DIEs for these user annotations may be reused if they are |
| 3721 | structurally equivalent; this hash table is used to ensure the DIEs are |
| 3722 | reused wherever possible. */ |
| 3723 | static GTY (()) hash_table<annotation_node_hasher> *btf_tag_htab; |
| 3724 | |
| 3725 | |
| 3726 | /* Flags to represent a set of attribute classes for attributes that represent |
| 3727 | a scalar value (bounds, pointers, ...). */ |
| 3728 | enum dw_scalar_form |
| 3729 | { |
| 3730 | dw_scalar_form_constant = 0x01, |
| 3731 | dw_scalar_form_exprloc = 0x02, |
| 3732 | dw_scalar_form_reference = 0x04 |
| 3733 | }; |
| 3734 | |
| 3735 | /* Forward declarations for functions defined in this file. */ |
| 3736 | |
| 3737 | static bool is_pseudo_reg (const_rtx); |
| 3738 | static tree type_main_variant (tree); |
| 3739 | static bool is_tagged_type (const_tree); |
| 3740 | static const char *dwarf_tag_name (unsigned); |
| 3741 | static const char *dwarf_attr_name (unsigned); |
| 3742 | static const char *dwarf_form_name (unsigned); |
| 3743 | static tree decl_ultimate_origin (const_tree); |
| 3744 | static tree decl_class_context (tree); |
| 3745 | static void add_dwarf_attr (dw_die_ref, dw_attr_node *); |
| 3746 | static inline unsigned int AT_index (dw_attr_node *); |
| 3747 | static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned); |
| 3748 | static inline unsigned AT_flag (dw_attr_node *); |
| 3749 | static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT); |
| 3750 | static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT); |
| 3751 | static void add_AT_double (dw_die_ref, enum dwarf_attribute, |
| 3752 | HOST_WIDE_INT, unsigned HOST_WIDE_INT); |
| 3753 | static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int, |
| 3754 | unsigned int, unsigned char *); |
| 3755 | static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *); |
| 3756 | static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *); |
| 3757 | static inline const char *AT_string (dw_attr_node *); |
| 3758 | static enum dwarf_form AT_string_form (dw_attr_node *); |
| 3759 | static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref); |
| 3760 | static void add_AT_specification (dw_die_ref, dw_die_ref); |
| 3761 | static inline dw_die_ref AT_ref (dw_attr_node *); |
| 3762 | static inline int AT_ref_external (dw_attr_node *); |
| 3763 | static inline void set_AT_ref_external (dw_attr_node *, int); |
| 3764 | static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref); |
| 3765 | static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute, |
| 3766 | dw_loc_list_ref); |
| 3767 | static inline dw_loc_list_ref AT_loc_list (dw_attr_node *); |
| 3768 | static void add_AT_view_list (dw_die_ref, enum dwarf_attribute); |
| 3769 | static inline dw_loc_list_ref AT_loc_list (dw_attr_node *); |
| 3770 | static addr_table_entry *add_addr_table_entry (void *, enum ate_kind); |
| 3771 | static void remove_addr_table_entry (addr_table_entry *); |
| 3772 | static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool); |
| 3773 | static inline rtx AT_addr (dw_attr_node *); |
| 3774 | static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *); |
| 3775 | static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *, |
| 3776 | int = 0); |
| 3777 | static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *); |
| 3778 | static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *); |
| 3779 | static void add_AT_range_list (dw_die_ref, enum dwarf_attribute, |
| 3780 | unsigned long, bool); |
| 3781 | static inline const char *AT_lbl (dw_attr_node *); |
| 3782 | static const char *get_AT_low_pc (dw_die_ref); |
| 3783 | static bool is_c (void); |
| 3784 | static bool is_cxx (void); |
| 3785 | static bool is_cxx (const_tree); |
| 3786 | static bool is_fortran (void); |
| 3787 | static bool is_ada (void); |
| 3788 | static bool remove_AT (dw_die_ref, enum dwarf_attribute); |
| 3789 | static void remove_child_TAG (dw_die_ref, enum dwarf_tag); |
| 3790 | static void add_child_die (dw_die_ref, dw_die_ref); |
| 3791 | static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree); |
| 3792 | static dw_die_ref strip_naming_typedef (tree, dw_die_ref); |
| 3793 | static dw_die_ref lookup_type_die_strip_naming_typedef (tree); |
| 3794 | static void equate_type_number_to_die (tree, dw_die_ref); |
| 3795 | static var_loc_list *lookup_decl_loc (const_tree); |
| 3796 | static void equate_decl_number_to_die (tree, dw_die_ref); |
| 3797 | static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view); |
| 3798 | static void print_spaces (FILE *); |
| 3799 | static void print_die (dw_die_ref, FILE *); |
| 3800 | static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *); |
| 3801 | static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *); |
| 3802 | static void die_checksum (dw_die_ref, struct md5_ctx *, int *); |
| 3803 | static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *); |
| 3804 | static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *); |
| 3805 | static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *); |
| 3806 | static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *, |
| 3807 | struct md5_ctx *, int *); |
| 3808 | struct checksum_attributes; |
| 3809 | static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref); |
| 3810 | static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *); |
| 3811 | static void checksum_die_context (dw_die_ref, struct md5_ctx *); |
| 3812 | static void generate_type_signature (dw_die_ref, comdat_type_node *); |
| 3813 | static bool same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *); |
| 3814 | static bool same_dw_val_p (const dw_val_node *, const dw_val_node *, int *); |
| 3815 | static bool same_attr_p (dw_attr_node *, dw_attr_node *, int *); |
| 3816 | static bool same_die_p (dw_die_ref, dw_die_ref, int *); |
| 3817 | static bool is_type_die (dw_die_ref); |
| 3818 | static inline bool is_template_instantiation (dw_die_ref); |
| 3819 | static bool is_declaration_die (dw_die_ref); |
| 3820 | static bool should_move_die_to_comdat (dw_die_ref); |
| 3821 | static dw_die_ref clone_as_declaration (dw_die_ref); |
| 3822 | static dw_die_ref clone_die (dw_die_ref); |
| 3823 | static dw_die_ref clone_tree (dw_die_ref); |
| 3824 | static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref); |
| 3825 | static void generate_skeleton_ancestor_tree (skeleton_chain_node *); |
| 3826 | static void generate_skeleton_bottom_up (skeleton_chain_node *); |
| 3827 | static dw_die_ref generate_skeleton (dw_die_ref); |
| 3828 | static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref, |
| 3829 | dw_die_ref, |
| 3830 | dw_die_ref); |
| 3831 | static void break_out_comdat_types (dw_die_ref); |
| 3832 | static void copy_decls_for_unworthy_types (dw_die_ref); |
| 3833 | |
| 3834 | static void add_sibling_attributes (dw_die_ref); |
| 3835 | static void output_location_lists (dw_die_ref); |
| 3836 | static int constant_size (unsigned HOST_WIDE_INT); |
| 3837 | static unsigned long size_of_die (dw_die_ref); |
| 3838 | static void calc_die_sizes (dw_die_ref); |
| 3839 | static void calc_base_type_die_sizes (void); |
| 3840 | static void mark_dies (dw_die_ref); |
| 3841 | static void unmark_dies (dw_die_ref); |
| 3842 | static void unmark_all_dies (dw_die_ref); |
| 3843 | static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *); |
| 3844 | static unsigned long size_of_aranges (void); |
| 3845 | static enum dwarf_form value_format (dw_attr_node *); |
| 3846 | static void output_value_format (dw_attr_node *); |
| 3847 | static void output_abbrev_section (void); |
| 3848 | static void output_die_abbrevs (unsigned long, dw_die_ref); |
| 3849 | static void output_die (dw_die_ref); |
| 3850 | static void output_compilation_unit_header (enum dwarf_unit_type); |
| 3851 | static void output_comp_unit (dw_die_ref, int, const unsigned char *); |
| 3852 | static void output_comdat_type_unit (comdat_type_node *, bool); |
| 3853 | static const char *dwarf2_name (tree, int); |
| 3854 | static void add_pubname (tree, dw_die_ref); |
| 3855 | static void add_enumerator_pubname (const char *, dw_die_ref); |
| 3856 | static void add_pubname_string (const char *, dw_die_ref); |
| 3857 | static void add_pubtype (tree, dw_die_ref); |
| 3858 | static void output_pubnames (vec<pubname_entry, va_gc> *); |
| 3859 | static void output_aranges (void); |
| 3860 | static unsigned int add_ranges (const_tree, bool = false); |
| 3861 | static void add_ranges_by_labels (dw_die_ref, const char *, const char *, |
| 3862 | bool *, bool); |
| 3863 | static void output_ranges (void); |
| 3864 | static dw_line_info_table *new_line_info_table (void); |
| 3865 | static void output_line_info (bool); |
| 3866 | static void output_file_names (void); |
| 3867 | static bool is_base_type (tree); |
| 3868 | static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref); |
| 3869 | static int decl_quals (const_tree); |
| 3870 | static dw_die_ref modified_type_die (tree, int, tree, bool, dw_die_ref); |
| 3871 | static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref); |
| 3872 | static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref); |
| 3873 | static unsigned int debugger_reg_number (const_rtx); |
| 3874 | static void add_loc_descr_op_piece (dw_loc_descr_ref *, int); |
| 3875 | static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status); |
| 3876 | static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int, |
| 3877 | enum var_init_status); |
| 3878 | static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx, |
| 3879 | enum var_init_status); |
| 3880 | static dw_loc_descr_ref based_loc_descr (rtx, poly_int64, |
| 3881 | enum var_init_status); |
| 3882 | static bool is_based_loc (const_rtx); |
| 3883 | static bool resolve_one_addr (rtx *); |
| 3884 | static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx, |
| 3885 | enum var_init_status); |
| 3886 | static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode, |
| 3887 | enum var_init_status); |
| 3888 | struct loc_descr_context; |
| 3889 | static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref); |
| 3890 | static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list); |
| 3891 | static dw_loc_list_ref loc_list_from_tree (tree, int, |
| 3892 | struct loc_descr_context *); |
| 3893 | static dw_loc_descr_ref loc_descriptor_from_tree (tree, int, |
| 3894 | struct loc_descr_context *); |
| 3895 | static tree field_type (const_tree); |
| 3896 | static unsigned int simple_type_align_in_bits (const_tree); |
| 3897 | static unsigned int simple_decl_align_in_bits (const_tree); |
| 3898 | static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree); |
| 3899 | struct vlr_context; |
| 3900 | static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *, |
| 3901 | HOST_WIDE_INT *); |
| 3902 | static void add_AT_location_description (dw_die_ref, enum dwarf_attribute, |
| 3903 | dw_loc_list_ref); |
| 3904 | static void add_data_member_location_attribute (dw_die_ref, tree, |
| 3905 | struct vlr_context *); |
| 3906 | static bool add_const_value_attribute (dw_die_ref, machine_mode, rtx); |
| 3907 | static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *); |
| 3908 | static void insert_wide_int (const wide_int_ref &, unsigned char *, int); |
| 3909 | static unsigned insert_float (const_rtx, unsigned char *); |
| 3910 | static rtx rtl_for_decl_location (tree); |
| 3911 | static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool); |
| 3912 | static bool tree_add_const_value_attribute (dw_die_ref, tree); |
| 3913 | static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree); |
| 3914 | static void add_desc_attribute (dw_die_ref, tree); |
| 3915 | static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref); |
| 3916 | static void add_comp_dir_attribute (dw_die_ref); |
| 3917 | static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int, |
| 3918 | struct loc_descr_context *); |
| 3919 | static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree, |
| 3920 | struct loc_descr_context *); |
| 3921 | static void add_subscript_info (dw_die_ref, tree, bool); |
| 3922 | static void add_byte_size_attribute (dw_die_ref, tree); |
| 3923 | static void add_alignment_attribute (dw_die_ref, tree); |
| 3924 | static void add_bit_offset_attribute (dw_die_ref, tree); |
| 3925 | static void add_bit_size_attribute (dw_die_ref, tree); |
| 3926 | static void add_prototyped_attribute (dw_die_ref, tree); |
| 3927 | static void add_abstract_origin_attribute (dw_die_ref, tree); |
| 3928 | static void add_pure_or_virtual_attribute (dw_die_ref, tree); |
| 3929 | static void add_src_coords_attributes (dw_die_ref, tree); |
| 3930 | static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false); |
| 3931 | static void add_discr_value (dw_die_ref, dw_discr_value *); |
| 3932 | static void add_discr_list (dw_die_ref, dw_discr_list_ref); |
| 3933 | static inline dw_discr_list_ref AT_discr_list (dw_attr_node *); |
| 3934 | static dw_die_ref scope_die_for (tree, dw_die_ref); |
| 3935 | static inline bool local_scope_p (dw_die_ref); |
| 3936 | static inline bool class_scope_p (dw_die_ref); |
| 3937 | static inline bool class_or_namespace_scope_p (dw_die_ref); |
| 3938 | static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref); |
| 3939 | static void add_calling_convention_attribute (dw_die_ref, tree); |
| 3940 | static const char *type_tag (const_tree); |
| 3941 | static tree member_declared_type (const_tree); |
| 3942 | #if 0 |
| 3943 | static const char *decl_start_label (tree); |
| 3944 | #endif |
| 3945 | static void gen_array_type_die (tree, dw_die_ref); |
| 3946 | static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref); |
| 3947 | #if 0 |
| 3948 | static void gen_entry_point_die (tree, dw_die_ref); |
| 3949 | #endif |
| 3950 | static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref, bool); |
| 3951 | static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref); |
| 3952 | static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*); |
| 3953 | static void gen_unspecified_parameters_die (tree, dw_die_ref); |
| 3954 | static void gen_formal_types_die (tree, dw_die_ref); |
| 3955 | static void gen_subprogram_die (tree, dw_die_ref); |
| 3956 | static void gen_variable_die (tree, tree, dw_die_ref); |
| 3957 | static void gen_const_die (tree, dw_die_ref); |
| 3958 | static void gen_label_die (tree, dw_die_ref); |
| 3959 | static void gen_lexical_block_die (tree, dw_die_ref); |
| 3960 | static void gen_inlined_subroutine_die (tree, dw_die_ref); |
| 3961 | static void gen_field_die (tree, struct vlr_context *, dw_die_ref); |
| 3962 | static void gen_ptr_to_mbr_type_die (tree, dw_die_ref); |
| 3963 | static dw_die_ref gen_compile_unit_die (const char *); |
| 3964 | static void gen_inheritance_die (tree, tree, tree, dw_die_ref); |
| 3965 | static void gen_member_die (tree, dw_die_ref); |
| 3966 | static void gen_struct_or_union_type_die (tree, dw_die_ref, |
| 3967 | enum debug_info_usage); |
| 3968 | static void gen_subroutine_type_die (tree, dw_die_ref); |
| 3969 | static void gen_typedef_die (tree, dw_die_ref); |
| 3970 | static void gen_type_die (tree, dw_die_ref, bool = false); |
| 3971 | static void gen_block_die (tree, dw_die_ref); |
| 3972 | static void decls_for_scope (tree, dw_die_ref, bool = true); |
| 3973 | static bool is_naming_typedef_decl (const_tree); |
| 3974 | static inline dw_die_ref get_context_die (tree); |
| 3975 | static void gen_namespace_die (tree, dw_die_ref); |
| 3976 | static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree); |
| 3977 | static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref); |
| 3978 | static dw_die_ref force_decl_die (tree); |
| 3979 | static dw_die_ref force_type_die (tree); |
| 3980 | static dw_die_ref setup_namespace_context (tree, dw_die_ref); |
| 3981 | static dw_die_ref declare_in_namespace (tree, dw_die_ref); |
| 3982 | static struct dwarf_file_data * lookup_filename (const char *); |
| 3983 | static void retry_incomplete_types (void); |
| 3984 | static void gen_type_die_for_member (tree, tree, dw_die_ref); |
| 3985 | static void gen_generic_params_dies (tree); |
| 3986 | static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage, |
| 3987 | bool = false); |
| 3988 | static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage, |
| 3989 | bool = false); |
| 3990 | static void splice_child_die (dw_die_ref, dw_die_ref); |
| 3991 | static int file_info_cmp (const void *, const void *); |
| 3992 | static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view, |
| 3993 | const char *, var_loc_view, const char *); |
| 3994 | static void output_loc_list (dw_loc_list_ref); |
| 3995 | static char *gen_internal_sym (const char *); |
| 3996 | static bool want_pubnames (void); |
| 3997 | |
| 3998 | static void prune_unmark_dies (dw_die_ref); |
| 3999 | static void prune_unused_types_mark_generic_parms_dies (dw_die_ref); |
| 4000 | static void prune_unused_types_mark (dw_die_ref, int); |
| 4001 | static void prune_unused_types_walk (dw_die_ref); |
| 4002 | static void prune_unused_types_walk_attribs (dw_die_ref); |
| 4003 | static void prune_unused_types_prune (dw_die_ref); |
| 4004 | static void prune_unused_types (void); |
| 4005 | static int maybe_emit_file (struct dwarf_file_data *fd); |
| 4006 | static inline const char *AT_vms_delta1 (dw_attr_node *); |
| 4007 | static inline const char *AT_vms_delta2 (dw_attr_node *); |
| 4008 | #if VMS_DEBUGGING_INFO |
| 4009 | static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute, |
| 4010 | const char *, const char *); |
| 4011 | #endif |
| 4012 | static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree); |
| 4013 | static void gen_remaining_tmpl_value_param_die_attribute (void); |
| 4014 | static bool generic_type_p (tree); |
| 4015 | static void schedule_generic_params_dies_gen (tree t); |
| 4016 | static void gen_scheduled_generic_parms_dies (void); |
| 4017 | static void resolve_variable_values (void); |
| 4018 | |
| 4019 | static const char *comp_dir_string (void); |
| 4020 | |
| 4021 | static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &); |
| 4022 | |
| 4023 | /* enum for tracking thread-local variables whose address is really an offset |
| 4024 | relative to the TLS pointer, which will need link-time relocation, but will |
| 4025 | not need relocation by the DWARF consumer. */ |
| 4026 | |
| 4027 | enum dtprel_bool |
| 4028 | { |
| 4029 | dtprel_false = 0, |
| 4030 | dtprel_true = 1 |
| 4031 | }; |
| 4032 | |
| 4033 | /* Return the operator to use for an address of a variable. For dtprel_true, we |
| 4034 | use DW_OP_const*. For regular variables, which need both link-time |
| 4035 | relocation and consumer-level relocation (e.g., to account for shared objects |
| 4036 | loaded at a random address), we use DW_OP_addr*. */ |
| 4037 | |
| 4038 | static inline enum dwarf_location_atom |
| 4039 | dw_addr_op (enum dtprel_bool dtprel) |
| 4040 | { |
| 4041 | if (dtprel == dtprel_true) |
| 4042 | return (dwarf_split_debug_info ? dwarf_OP (op: DW_OP_constx) |
| 4043 | : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u)); |
| 4044 | else |
| 4045 | return dwarf_split_debug_info ? dwarf_OP (op: DW_OP_addrx) : DW_OP_addr; |
| 4046 | } |
| 4047 | |
| 4048 | /* Return a pointer to a newly allocated address location description. If |
| 4049 | dwarf_split_debug_info is true, then record the address with the appropriate |
| 4050 | relocation. */ |
| 4051 | static inline dw_loc_descr_ref |
| 4052 | new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel) |
| 4053 | { |
| 4054 | dw_loc_descr_ref ref = new_loc_descr (op: dw_addr_op (dtprel), oprnd1: 0, oprnd2: 0); |
| 4055 | |
| 4056 | ref->dw_loc_oprnd1.val_class = dw_val_class_addr; |
| 4057 | ref->dw_loc_oprnd1.v.val_addr = addr; |
| 4058 | ref->dw_loc_dtprel = dtprel; |
| 4059 | if (dwarf_split_debug_info) |
| 4060 | ref->dw_loc_oprnd1.val_entry |
| 4061 | = add_addr_table_entry (addr, |
| 4062 | dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx); |
| 4063 | else |
| 4064 | ref->dw_loc_oprnd1.val_entry = NULL; |
| 4065 | |
| 4066 | return ref; |
| 4067 | } |
| 4068 | |
| 4069 | /* Section names used to hold DWARF debugging information. */ |
| 4070 | |
| 4071 | #ifndef DEBUG_INFO_SECTION |
| 4072 | #define DEBUG_INFO_SECTION ".debug_info" |
| 4073 | #endif |
| 4074 | #ifndef DEBUG_DWO_INFO_SECTION |
| 4075 | #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo" |
| 4076 | #endif |
| 4077 | #ifndef DEBUG_LTO_INFO_SECTION |
| 4078 | #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info" |
| 4079 | #endif |
| 4080 | #ifndef DEBUG_LTO_DWO_INFO_SECTION |
| 4081 | #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo" |
| 4082 | #endif |
| 4083 | #ifndef DEBUG_ABBREV_SECTION |
| 4084 | #define DEBUG_ABBREV_SECTION ".debug_abbrev" |
| 4085 | #endif |
| 4086 | #ifndef DEBUG_LTO_ABBREV_SECTION |
| 4087 | #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev" |
| 4088 | #endif |
| 4089 | #ifndef DEBUG_DWO_ABBREV_SECTION |
| 4090 | #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo" |
| 4091 | #endif |
| 4092 | #ifndef DEBUG_LTO_DWO_ABBREV_SECTION |
| 4093 | #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo" |
| 4094 | #endif |
| 4095 | #ifndef DEBUG_ARANGES_SECTION |
| 4096 | #define DEBUG_ARANGES_SECTION ".debug_aranges" |
| 4097 | #endif |
| 4098 | #ifndef DEBUG_ADDR_SECTION |
| 4099 | #define DEBUG_ADDR_SECTION ".debug_addr" |
| 4100 | #endif |
| 4101 | #ifndef DEBUG_MACINFO_SECTION |
| 4102 | #define DEBUG_MACINFO_SECTION ".debug_macinfo" |
| 4103 | #endif |
| 4104 | #ifndef DEBUG_LTO_MACINFO_SECTION |
| 4105 | #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo" |
| 4106 | #endif |
| 4107 | #ifndef DEBUG_DWO_MACINFO_SECTION |
| 4108 | #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo" |
| 4109 | #endif |
| 4110 | #ifndef DEBUG_LTO_DWO_MACINFO_SECTION |
| 4111 | #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo" |
| 4112 | #endif |
| 4113 | #ifndef DEBUG_MACRO_SECTION |
| 4114 | #define DEBUG_MACRO_SECTION ".debug_macro" |
| 4115 | #endif |
| 4116 | #ifndef DEBUG_LTO_MACRO_SECTION |
| 4117 | #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro" |
| 4118 | #endif |
| 4119 | #ifndef DEBUG_DWO_MACRO_SECTION |
| 4120 | #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo" |
| 4121 | #endif |
| 4122 | #ifndef DEBUG_LTO_DWO_MACRO_SECTION |
| 4123 | #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo" |
| 4124 | #endif |
| 4125 | #ifndef DEBUG_LINE_SECTION |
| 4126 | #define DEBUG_LINE_SECTION ".debug_line" |
| 4127 | #endif |
| 4128 | #ifndef DEBUG_LTO_LINE_SECTION |
| 4129 | #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line" |
| 4130 | #endif |
| 4131 | #ifndef DEBUG_DWO_LINE_SECTION |
| 4132 | #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo" |
| 4133 | #endif |
| 4134 | #ifndef DEBUG_LTO_DWO_LINE_SECTION |
| 4135 | #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo" |
| 4136 | #endif |
| 4137 | #ifndef DEBUG_LOC_SECTION |
| 4138 | #define DEBUG_LOC_SECTION ".debug_loc" |
| 4139 | #endif |
| 4140 | #ifndef DEBUG_DWO_LOC_SECTION |
| 4141 | #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo" |
| 4142 | #endif |
| 4143 | #ifndef DEBUG_LOCLISTS_SECTION |
| 4144 | #define DEBUG_LOCLISTS_SECTION ".debug_loclists" |
| 4145 | #endif |
| 4146 | #ifndef DEBUG_DWO_LOCLISTS_SECTION |
| 4147 | #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo" |
| 4148 | #endif |
| 4149 | #ifndef DEBUG_PUBNAMES_SECTION |
| 4150 | #define DEBUG_PUBNAMES_SECTION \ |
| 4151 | ((debug_generate_pub_sections == 2) \ |
| 4152 | ? ".debug_gnu_pubnames" : ".debug_pubnames") |
| 4153 | #endif |
| 4154 | #ifndef DEBUG_PUBTYPES_SECTION |
| 4155 | #define DEBUG_PUBTYPES_SECTION \ |
| 4156 | ((debug_generate_pub_sections == 2) \ |
| 4157 | ? ".debug_gnu_pubtypes" : ".debug_pubtypes") |
| 4158 | #endif |
| 4159 | #ifndef DEBUG_STR_OFFSETS_SECTION |
| 4160 | #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets" |
| 4161 | #endif |
| 4162 | #ifndef DEBUG_DWO_STR_OFFSETS_SECTION |
| 4163 | #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo" |
| 4164 | #endif |
| 4165 | #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION |
| 4166 | #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo" |
| 4167 | #endif |
| 4168 | #ifndef DEBUG_STR_SECTION |
| 4169 | #define DEBUG_STR_SECTION ".debug_str" |
| 4170 | #endif |
| 4171 | #ifndef DEBUG_LTO_STR_SECTION |
| 4172 | #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str" |
| 4173 | #endif |
| 4174 | #ifndef DEBUG_STR_DWO_SECTION |
| 4175 | #define DEBUG_STR_DWO_SECTION ".debug_str.dwo" |
| 4176 | #endif |
| 4177 | #ifndef DEBUG_LTO_STR_DWO_SECTION |
| 4178 | #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo" |
| 4179 | #endif |
| 4180 | #ifndef DEBUG_RANGES_SECTION |
| 4181 | #define DEBUG_RANGES_SECTION ".debug_ranges" |
| 4182 | #endif |
| 4183 | #ifndef DEBUG_RNGLISTS_SECTION |
| 4184 | #define DEBUG_RNGLISTS_SECTION ".debug_rnglists" |
| 4185 | #endif |
| 4186 | #ifndef DEBUG_DWO_RNGLISTS_SECTION |
| 4187 | #define DEBUG_DWO_RNGLISTS_SECTION ".debug_rnglists.dwo" |
| 4188 | #endif |
| 4189 | #ifndef DEBUG_LINE_STR_SECTION |
| 4190 | #define DEBUG_LINE_STR_SECTION ".debug_line_str" |
| 4191 | #endif |
| 4192 | #ifndef DEBUG_LTO_LINE_STR_SECTION |
| 4193 | #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str" |
| 4194 | #endif |
| 4195 | |
| 4196 | /* Section flags for .debug_str section. */ |
| 4197 | #define DEBUG_STR_SECTION_FLAGS \ |
| 4198 | (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \ |
| 4199 | ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \ |
| 4200 | : SECTION_DEBUG) |
| 4201 | |
| 4202 | /* Section flags for .debug_str.dwo section. */ |
| 4203 | #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE) |
| 4204 | |
| 4205 | /* Attribute used to refer to the macro section. */ |
| 4206 | #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \ |
| 4207 | : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros) |
| 4208 | |
| 4209 | /* Labels we insert at beginning sections we can reference instead of |
| 4210 | the section names themselves. */ |
| 4211 | |
| 4212 | #ifndef TEXT_SECTION_LABEL |
| 4213 | #define TEXT_SECTION_LABEL "Ltext" |
| 4214 | #endif |
| 4215 | #ifndef COLD_TEXT_SECTION_LABEL |
| 4216 | #define COLD_TEXT_SECTION_LABEL "Ltext_cold" |
| 4217 | #endif |
| 4218 | #ifndef DEBUG_LINE_SECTION_LABEL |
| 4219 | #define DEBUG_LINE_SECTION_LABEL "Ldebug_line" |
| 4220 | #endif |
| 4221 | #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL |
| 4222 | #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line" |
| 4223 | #endif |
| 4224 | #ifndef DEBUG_INFO_SECTION_LABEL |
| 4225 | #define DEBUG_INFO_SECTION_LABEL "Ldebug_info" |
| 4226 | #endif |
| 4227 | #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL |
| 4228 | #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info" |
| 4229 | #endif |
| 4230 | #ifndef DEBUG_ABBREV_SECTION_LABEL |
| 4231 | #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev" |
| 4232 | #endif |
| 4233 | #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL |
| 4234 | #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev" |
| 4235 | #endif |
| 4236 | #ifndef DEBUG_ADDR_SECTION_LABEL |
| 4237 | #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr" |
| 4238 | #endif |
| 4239 | #ifndef DEBUG_LOC_SECTION_LABEL |
| 4240 | #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc" |
| 4241 | #endif |
| 4242 | #ifndef DEBUG_RANGES_SECTION_LABEL |
| 4243 | #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges" |
| 4244 | #endif |
| 4245 | #ifndef DEBUG_MACINFO_SECTION_LABEL |
| 4246 | #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo" |
| 4247 | #endif |
| 4248 | #ifndef DEBUG_MACRO_SECTION_LABEL |
| 4249 | #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro" |
| 4250 | #endif |
| 4251 | #define SKELETON_COMP_DIE_ABBREV 1 |
| 4252 | #define SKELETON_TYPE_DIE_ABBREV 2 |
| 4253 | |
| 4254 | /* Definitions of defaults for formats and names of various special |
| 4255 | (artificial) labels which may be generated within this file (when the -g |
| 4256 | options is used and DWARF2_DEBUGGING_INFO is in effect. |
| 4257 | If necessary, these may be overridden from within the tm.h file, but |
| 4258 | typically, overriding these defaults is unnecessary. */ |
| 4259 | |
| 4260 | static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4261 | static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4262 | static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4263 | static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4264 | static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4265 | static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4266 | static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4267 | static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4268 | static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4269 | static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4270 | static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4271 | static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4272 | static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4273 | static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4274 | static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4275 | |
| 4276 | #ifndef TEXT_END_LABEL |
| 4277 | #define TEXT_END_LABEL "Letext" |
| 4278 | #endif |
| 4279 | #ifndef COLD_END_LABEL |
| 4280 | #define COLD_END_LABEL "Letext_cold" |
| 4281 | #endif |
| 4282 | #ifndef BLOCK_BEGIN_LABEL |
| 4283 | #define BLOCK_BEGIN_LABEL "LBB" |
| 4284 | #endif |
| 4285 | #ifndef BLOCK_INLINE_ENTRY_LABEL |
| 4286 | #define BLOCK_INLINE_ENTRY_LABEL "LBI" |
| 4287 | #endif |
| 4288 | #ifndef BLOCK_END_LABEL |
| 4289 | #define BLOCK_END_LABEL "LBE" |
| 4290 | #endif |
| 4291 | #ifndef LINE_CODE_LABEL |
| 4292 | #define LINE_CODE_LABEL "LM" |
| 4293 | #endif |
| 4294 | |
| 4295 | |
| 4296 | /* Return the root of the DIE's built for the current compilation unit. */ |
| 4297 | static dw_die_ref |
| 4298 | comp_unit_die (void) |
| 4299 | { |
| 4300 | if (!single_comp_unit_die) |
| 4301 | single_comp_unit_die = gen_compile_unit_die (NULL); |
| 4302 | return single_comp_unit_die; |
| 4303 | } |
| 4304 | |
| 4305 | /* We allow a language front-end to designate a function that is to be |
| 4306 | called to "demangle" any name before it is put into a DIE. */ |
| 4307 | |
| 4308 | static const char *(*demangle_name_func) (const char *); |
| 4309 | |
| 4310 | void |
| 4311 | dwarf2out_set_demangle_name_func (const char *(*func) (const char *)) |
| 4312 | { |
| 4313 | demangle_name_func = func; |
| 4314 | } |
| 4315 | |
| 4316 | /* Test if rtl node points to a pseudo register. */ |
| 4317 | |
| 4318 | static inline bool |
| 4319 | is_pseudo_reg (const_rtx rtl) |
| 4320 | { |
| 4321 | return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER) |
| 4322 | || (GET_CODE (rtl) == SUBREG |
| 4323 | && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER)); |
| 4324 | } |
| 4325 | |
| 4326 | /* Return a reference to a type, with its const and volatile qualifiers |
| 4327 | removed. */ |
| 4328 | |
| 4329 | static inline tree |
| 4330 | type_main_variant (tree type) |
| 4331 | { |
| 4332 | type = TYPE_MAIN_VARIANT (type); |
| 4333 | |
| 4334 | /* ??? There really should be only one main variant among any group of |
| 4335 | variants of a given type (and all of the MAIN_VARIANT values for all |
| 4336 | members of the group should point to that one type) but sometimes the C |
| 4337 | front-end messes this up for array types, so we work around that bug |
| 4338 | here. */ |
| 4339 | if (TREE_CODE (type) == ARRAY_TYPE) |
| 4340 | while (type != TYPE_MAIN_VARIANT (type)) |
| 4341 | type = TYPE_MAIN_VARIANT (type); |
| 4342 | |
| 4343 | return type; |
| 4344 | } |
| 4345 | |
| 4346 | /* Return true if the given type node represents a tagged type. */ |
| 4347 | |
| 4348 | static inline bool |
| 4349 | is_tagged_type (const_tree type) |
| 4350 | { |
| 4351 | enum tree_code code = TREE_CODE (type); |
| 4352 | |
| 4353 | return (code == RECORD_TYPE || code == UNION_TYPE |
| 4354 | || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE); |
| 4355 | } |
| 4356 | |
| 4357 | /* Set label to debug_info_section_label + die_offset of a DIE reference. */ |
| 4358 | |
| 4359 | static void |
| 4360 | get_ref_die_offset_label (char *label, dw_die_ref ref) |
| 4361 | { |
| 4362 | sprintf (s: label, format: "%s+%ld" , debug_info_section_label, ref->die_offset); |
| 4363 | } |
| 4364 | |
| 4365 | /* Return die_offset of a DIE reference to a base type. */ |
| 4366 | |
| 4367 | static unsigned long int |
| 4368 | get_base_type_offset (dw_die_ref ref) |
| 4369 | { |
| 4370 | if (ref->die_offset) |
| 4371 | return ref->die_offset; |
| 4372 | if (comp_unit_die ()->die_abbrev) |
| 4373 | { |
| 4374 | calc_base_type_die_sizes (); |
| 4375 | gcc_assert (ref->die_offset); |
| 4376 | } |
| 4377 | return ref->die_offset; |
| 4378 | } |
| 4379 | |
| 4380 | /* Return die_offset of a DIE reference other than base type. */ |
| 4381 | |
| 4382 | static unsigned long int |
| 4383 | get_ref_die_offset (dw_die_ref ref) |
| 4384 | { |
| 4385 | gcc_assert (ref->die_offset); |
| 4386 | return ref->die_offset; |
| 4387 | } |
| 4388 | |
| 4389 | /* Convert a DIE tag into its string name. */ |
| 4390 | |
| 4391 | static const char * |
| 4392 | dwarf_tag_name (unsigned int tag) |
| 4393 | { |
| 4394 | const char *name = get_DW_TAG_name (tag); |
| 4395 | |
| 4396 | if (name != NULL) |
| 4397 | return name; |
| 4398 | |
| 4399 | return "DW_TAG_<unknown>" ; |
| 4400 | } |
| 4401 | |
| 4402 | /* Convert a DWARF attribute code into its string name. */ |
| 4403 | |
| 4404 | static const char * |
| 4405 | dwarf_attr_name (unsigned int attr) |
| 4406 | { |
| 4407 | const char *name; |
| 4408 | |
| 4409 | switch (attr) |
| 4410 | { |
| 4411 | #if VMS_DEBUGGING_INFO |
| 4412 | case DW_AT_HP_prologue: |
| 4413 | return "DW_AT_HP_prologue" ; |
| 4414 | #else |
| 4415 | case DW_AT_MIPS_loop_unroll_factor: |
| 4416 | return "DW_AT_MIPS_loop_unroll_factor" ; |
| 4417 | #endif |
| 4418 | |
| 4419 | #if VMS_DEBUGGING_INFO |
| 4420 | case DW_AT_HP_epilogue: |
| 4421 | return "DW_AT_HP_epilogue" ; |
| 4422 | #else |
| 4423 | case DW_AT_MIPS_stride: |
| 4424 | return "DW_AT_MIPS_stride" ; |
| 4425 | #endif |
| 4426 | } |
| 4427 | |
| 4428 | name = get_DW_AT_name (attr); |
| 4429 | |
| 4430 | if (name != NULL) |
| 4431 | return name; |
| 4432 | |
| 4433 | return "DW_AT_<unknown>" ; |
| 4434 | } |
| 4435 | |
| 4436 | /* Convert a DWARF value form code into its string name. */ |
| 4437 | |
| 4438 | static const char * |
| 4439 | dwarf_form_name (unsigned int form) |
| 4440 | { |
| 4441 | const char *name = get_DW_FORM_name (form); |
| 4442 | |
| 4443 | if (name != NULL) |
| 4444 | return name; |
| 4445 | |
| 4446 | return "DW_FORM_<unknown>" ; |
| 4447 | } |
| 4448 | |
| 4449 | /* Determine the "ultimate origin" of a decl. The decl may be an inlined |
| 4450 | instance of an inlined instance of a decl which is local to an inline |
| 4451 | function, so we have to trace all of the way back through the origin chain |
| 4452 | to find out what sort of node actually served as the original seed for the |
| 4453 | given block. */ |
| 4454 | |
| 4455 | static tree |
| 4456 | decl_ultimate_origin (const_tree decl) |
| 4457 | { |
| 4458 | if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON)) |
| 4459 | return NULL_TREE; |
| 4460 | |
| 4461 | /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if |
| 4462 | we're trying to output the abstract instance of this function. */ |
| 4463 | if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl) |
| 4464 | return NULL_TREE; |
| 4465 | |
| 4466 | /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the |
| 4467 | most distant ancestor, this should never happen. */ |
| 4468 | gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl))); |
| 4469 | |
| 4470 | return DECL_ABSTRACT_ORIGIN (decl); |
| 4471 | } |
| 4472 | |
| 4473 | /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT |
| 4474 | of a virtual function may refer to a base class, so we check the 'this' |
| 4475 | parameter. */ |
| 4476 | |
| 4477 | static tree |
| 4478 | decl_class_context (tree decl) |
| 4479 | { |
| 4480 | tree context = NULL_TREE; |
| 4481 | |
| 4482 | if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl)) |
| 4483 | context = DECL_CONTEXT (decl); |
| 4484 | else |
| 4485 | context = TYPE_MAIN_VARIANT |
| 4486 | (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))))); |
| 4487 | |
| 4488 | if (context && !TYPE_P (context)) |
| 4489 | context = NULL_TREE; |
| 4490 | |
| 4491 | return context; |
| 4492 | } |
| 4493 | |
| 4494 | /* Add an attribute/value pair to a DIE. */ |
| 4495 | |
| 4496 | static inline void |
| 4497 | add_dwarf_attr (dw_die_ref die, dw_attr_node *attr) |
| 4498 | { |
| 4499 | /* Maybe this should be an assert? */ |
| 4500 | if (die == NULL) |
| 4501 | return; |
| 4502 | |
| 4503 | if (flag_checking) |
| 4504 | { |
| 4505 | /* Check we do not add duplicate attrs. Can't use get_AT here |
| 4506 | because that recurses to the specification/abstract origin DIE. */ |
| 4507 | dw_attr_node *a; |
| 4508 | unsigned ix; |
| 4509 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 4510 | gcc_assert (a->dw_attr != attr->dw_attr); |
| 4511 | } |
| 4512 | |
| 4513 | vec_safe_reserve (v&: die->die_attr, nelems: 1); |
| 4514 | vec_safe_push (v&: die->die_attr, obj: *attr); |
| 4515 | } |
| 4516 | |
| 4517 | enum dw_val_class |
| 4518 | AT_class (dw_attr_node *a) |
| 4519 | { |
| 4520 | return a->dw_attr_val.val_class; |
| 4521 | } |
| 4522 | |
| 4523 | /* Return the index for any attribute that will be referenced with a |
| 4524 | DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String |
| 4525 | indices are stored in dw_attr_val.v.val_str for reference counting |
| 4526 | pruning. */ |
| 4527 | |
| 4528 | static inline unsigned int |
| 4529 | AT_index (dw_attr_node *a) |
| 4530 | { |
| 4531 | if (AT_class (a) == dw_val_class_str) |
| 4532 | return a->dw_attr_val.v.val_str->index; |
| 4533 | else if (a->dw_attr_val.val_entry != NULL) |
| 4534 | return a->dw_attr_val.val_entry->index; |
| 4535 | return NOT_INDEXED; |
| 4536 | } |
| 4537 | |
| 4538 | /* Add a flag value attribute to a DIE. */ |
| 4539 | |
| 4540 | static inline void |
| 4541 | add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag) |
| 4542 | { |
| 4543 | dw_attr_node attr; |
| 4544 | |
| 4545 | attr.dw_attr = attr_kind; |
| 4546 | attr.dw_attr_val.val_class = dw_val_class_flag; |
| 4547 | attr.dw_attr_val.val_entry = NULL; |
| 4548 | attr.dw_attr_val.v.val_flag = flag; |
| 4549 | add_dwarf_attr (die, attr: &attr); |
| 4550 | } |
| 4551 | |
| 4552 | static inline unsigned |
| 4553 | AT_flag (dw_attr_node *a) |
| 4554 | { |
| 4555 | gcc_assert (a && AT_class (a) == dw_val_class_flag); |
| 4556 | return a->dw_attr_val.v.val_flag; |
| 4557 | } |
| 4558 | |
| 4559 | /* Add a signed integer attribute value to a DIE. */ |
| 4560 | |
| 4561 | static inline void |
| 4562 | add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val) |
| 4563 | { |
| 4564 | dw_attr_node attr; |
| 4565 | |
| 4566 | attr.dw_attr = attr_kind; |
| 4567 | attr.dw_attr_val.val_class = dw_val_class_const; |
| 4568 | attr.dw_attr_val.val_entry = NULL; |
| 4569 | attr.dw_attr_val.v.val_int = int_val; |
| 4570 | add_dwarf_attr (die, attr: &attr); |
| 4571 | } |
| 4572 | |
| 4573 | HOST_WIDE_INT |
| 4574 | AT_int (dw_attr_node *a) |
| 4575 | { |
| 4576 | gcc_assert (a && (AT_class (a) == dw_val_class_const |
| 4577 | || AT_class (a) == dw_val_class_const_implicit)); |
| 4578 | return a->dw_attr_val.v.val_int; |
| 4579 | } |
| 4580 | |
| 4581 | /* Add an unsigned integer attribute value to a DIE. */ |
| 4582 | |
| 4583 | static inline void |
| 4584 | add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 4585 | unsigned HOST_WIDE_INT unsigned_val) |
| 4586 | { |
| 4587 | dw_attr_node attr; |
| 4588 | |
| 4589 | attr.dw_attr = attr_kind; |
| 4590 | attr.dw_attr_val.val_class = dw_val_class_unsigned_const; |
| 4591 | attr.dw_attr_val.val_entry = NULL; |
| 4592 | attr.dw_attr_val.v.val_unsigned = unsigned_val; |
| 4593 | add_dwarf_attr (die, attr: &attr); |
| 4594 | } |
| 4595 | |
| 4596 | unsigned HOST_WIDE_INT |
| 4597 | AT_unsigned (dw_attr_node *a) |
| 4598 | { |
| 4599 | gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const |
| 4600 | || AT_class (a) == dw_val_class_unsigned_const_implicit)); |
| 4601 | return a->dw_attr_val.v.val_unsigned; |
| 4602 | } |
| 4603 | |
| 4604 | dw_wide_int * |
| 4605 | alloc_dw_wide_int (const wide_int_ref &w) |
| 4606 | { |
| 4607 | dw_wide_int *p |
| 4608 | = (dw_wide_int *) ggc_internal_alloc (s: sizeof (dw_wide_int) |
| 4609 | + ((w.get_len () - 1) |
| 4610 | * sizeof (HOST_WIDE_INT))); |
| 4611 | p->precision = w.get_precision (); |
| 4612 | p->len = w.get_len (); |
| 4613 | memcpy (dest: p->val, src: w.get_val (), n: p->len * sizeof (HOST_WIDE_INT)); |
| 4614 | return p; |
| 4615 | } |
| 4616 | |
| 4617 | /* Add an unsigned wide integer attribute value to a DIE. */ |
| 4618 | |
| 4619 | static inline void |
| 4620 | add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 4621 | const wide_int_ref &w) |
| 4622 | { |
| 4623 | dw_attr_node attr; |
| 4624 | |
| 4625 | attr.dw_attr = attr_kind; |
| 4626 | attr.dw_attr_val.val_class = dw_val_class_wide_int; |
| 4627 | attr.dw_attr_val.val_entry = NULL; |
| 4628 | attr.dw_attr_val.v.val_wide = alloc_dw_wide_int (w); |
| 4629 | add_dwarf_attr (die, attr: &attr); |
| 4630 | } |
| 4631 | |
| 4632 | /* Add an unsigned double integer attribute value to a DIE. */ |
| 4633 | |
| 4634 | static inline void |
| 4635 | add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 4636 | HOST_WIDE_INT high, unsigned HOST_WIDE_INT low) |
| 4637 | { |
| 4638 | dw_attr_node attr; |
| 4639 | |
| 4640 | attr.dw_attr = attr_kind; |
| 4641 | attr.dw_attr_val.val_class = dw_val_class_const_double; |
| 4642 | attr.dw_attr_val.val_entry = NULL; |
| 4643 | attr.dw_attr_val.v.val_double.high = high; |
| 4644 | attr.dw_attr_val.v.val_double.low = low; |
| 4645 | add_dwarf_attr (die, attr: &attr); |
| 4646 | } |
| 4647 | |
| 4648 | /* Add a floating point attribute value to a DIE and return it. */ |
| 4649 | |
| 4650 | static inline void |
| 4651 | add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 4652 | unsigned int length, unsigned int elt_size, unsigned char *array) |
| 4653 | { |
| 4654 | dw_attr_node attr; |
| 4655 | |
| 4656 | attr.dw_attr = attr_kind; |
| 4657 | attr.dw_attr_val.val_class = dw_val_class_vec; |
| 4658 | attr.dw_attr_val.val_entry = NULL; |
| 4659 | attr.dw_attr_val.v.val_vec.length = length; |
| 4660 | attr.dw_attr_val.v.val_vec.elt_size = elt_size; |
| 4661 | attr.dw_attr_val.v.val_vec.array = array; |
| 4662 | add_dwarf_attr (die, attr: &attr); |
| 4663 | } |
| 4664 | |
| 4665 | /* Add an 8-byte data attribute value to a DIE. */ |
| 4666 | |
| 4667 | static inline void |
| 4668 | add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 4669 | unsigned char data8[8]) |
| 4670 | { |
| 4671 | dw_attr_node attr; |
| 4672 | |
| 4673 | attr.dw_attr = attr_kind; |
| 4674 | attr.dw_attr_val.val_class = dw_val_class_data8; |
| 4675 | attr.dw_attr_val.val_entry = NULL; |
| 4676 | memcpy (dest: attr.dw_attr_val.v.val_data8, src: data8, n: 8); |
| 4677 | add_dwarf_attr (die, attr: &attr); |
| 4678 | } |
| 4679 | |
| 4680 | /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using |
| 4681 | dwarf_split_debug_info, address attributes in dies destined for the |
| 4682 | final executable have force_direct set to avoid using indexed |
| 4683 | references. */ |
| 4684 | |
| 4685 | static inline void |
| 4686 | add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high, |
| 4687 | bool force_direct) |
| 4688 | { |
| 4689 | dw_attr_node attr; |
| 4690 | char * lbl_id; |
| 4691 | |
| 4692 | lbl_id = xstrdup (lbl_low); |
| 4693 | attr.dw_attr = DW_AT_low_pc; |
| 4694 | attr.dw_attr_val.val_class = dw_val_class_lbl_id; |
| 4695 | attr.dw_attr_val.v.val_lbl_id = lbl_id; |
| 4696 | if (dwarf_split_debug_info && !force_direct) |
| 4697 | attr.dw_attr_val.val_entry |
| 4698 | = add_addr_table_entry (lbl_id, ate_kind_label); |
| 4699 | else |
| 4700 | attr.dw_attr_val.val_entry = NULL; |
| 4701 | add_dwarf_attr (die, attr: &attr); |
| 4702 | |
| 4703 | attr.dw_attr = DW_AT_high_pc; |
| 4704 | if (dwarf_version < 4) |
| 4705 | attr.dw_attr_val.val_class = dw_val_class_lbl_id; |
| 4706 | else |
| 4707 | attr.dw_attr_val.val_class = dw_val_class_high_pc; |
| 4708 | lbl_id = xstrdup (lbl_high); |
| 4709 | attr.dw_attr_val.v.val_lbl_id = lbl_id; |
| 4710 | if (attr.dw_attr_val.val_class == dw_val_class_lbl_id |
| 4711 | && dwarf_split_debug_info && !force_direct) |
| 4712 | attr.dw_attr_val.val_entry |
| 4713 | = add_addr_table_entry (lbl_id, ate_kind_label); |
| 4714 | else |
| 4715 | attr.dw_attr_val.val_entry = NULL; |
| 4716 | add_dwarf_attr (die, attr: &attr); |
| 4717 | } |
| 4718 | |
| 4719 | /* Hash and equality functions for debug_str_hash. */ |
| 4720 | |
| 4721 | hashval_t |
| 4722 | indirect_string_hasher::hash (indirect_string_node *x) |
| 4723 | { |
| 4724 | return htab_hash_string (x->str); |
| 4725 | } |
| 4726 | |
| 4727 | bool |
| 4728 | indirect_string_hasher::equal (indirect_string_node *x1, const char *x2) |
| 4729 | { |
| 4730 | return strcmp (s1: x1->str, s2: x2) == 0; |
| 4731 | } |
| 4732 | |
| 4733 | /* Add STR to the given string hash table. */ |
| 4734 | |
| 4735 | static struct indirect_string_node * |
| 4736 | find_AT_string_in_table (const char *str, |
| 4737 | hash_table<indirect_string_hasher> *table, |
| 4738 | enum insert_option insert = INSERT) |
| 4739 | { |
| 4740 | struct indirect_string_node *node; |
| 4741 | |
| 4742 | indirect_string_node **slot |
| 4743 | = table->find_slot_with_hash (comparable: str, hash: htab_hash_string (str), insert); |
| 4744 | if (*slot == NULL) |
| 4745 | { |
| 4746 | node = ggc_cleared_alloc<indirect_string_node> (); |
| 4747 | node->str = ggc_strdup (str); |
| 4748 | *slot = node; |
| 4749 | } |
| 4750 | else |
| 4751 | node = *slot; |
| 4752 | |
| 4753 | node->refcount++; |
| 4754 | return node; |
| 4755 | } |
| 4756 | |
| 4757 | /* Add STR to the indirect string hash table. */ |
| 4758 | |
| 4759 | static struct indirect_string_node * |
| 4760 | find_AT_string (const char *str, enum insert_option insert = INSERT) |
| 4761 | { |
| 4762 | if (! debug_str_hash) |
| 4763 | debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
| 4764 | |
| 4765 | return find_AT_string_in_table (str, table: debug_str_hash, insert); |
| 4766 | } |
| 4767 | |
| 4768 | /* Add a string attribute value to a DIE. */ |
| 4769 | |
| 4770 | static inline void |
| 4771 | add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str) |
| 4772 | { |
| 4773 | dw_attr_node attr; |
| 4774 | struct indirect_string_node *node; |
| 4775 | |
| 4776 | node = find_AT_string (str); |
| 4777 | |
| 4778 | attr.dw_attr = attr_kind; |
| 4779 | attr.dw_attr_val.val_class = dw_val_class_str; |
| 4780 | attr.dw_attr_val.val_entry = NULL; |
| 4781 | attr.dw_attr_val.v.val_str = node; |
| 4782 | add_dwarf_attr (die, attr: &attr); |
| 4783 | } |
| 4784 | |
| 4785 | static inline const char * |
| 4786 | AT_string (dw_attr_node *a) |
| 4787 | { |
| 4788 | gcc_assert (a && AT_class (a) == dw_val_class_str); |
| 4789 | return a->dw_attr_val.v.val_str->str; |
| 4790 | } |
| 4791 | |
| 4792 | /* Call this function directly to bypass AT_string_form's logic to put |
| 4793 | the string inline in the die. */ |
| 4794 | |
| 4795 | static void |
| 4796 | set_indirect_string (struct indirect_string_node *node) |
| 4797 | { |
| 4798 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 4799 | /* Already indirect is a no op. */ |
| 4800 | if (node->form == DW_FORM_strp |
| 4801 | || node->form == DW_FORM_line_strp |
| 4802 | || node->form == dwarf_FORM (form: DW_FORM_strx)) |
| 4803 | { |
| 4804 | gcc_assert (node->label); |
| 4805 | return; |
| 4806 | } |
| 4807 | ASM_GENERATE_INTERNAL_LABEL (label, "LASF" , dw2_string_counter); |
| 4808 | ++dw2_string_counter; |
| 4809 | node->label = xstrdup (label); |
| 4810 | |
| 4811 | if (!dwarf_split_debug_info) |
| 4812 | { |
| 4813 | node->form = DW_FORM_strp; |
| 4814 | node->index = NOT_INDEXED; |
| 4815 | } |
| 4816 | else |
| 4817 | { |
| 4818 | node->form = dwarf_FORM (form: DW_FORM_strx); |
| 4819 | node->index = NO_INDEX_ASSIGNED; |
| 4820 | } |
| 4821 | } |
| 4822 | |
| 4823 | /* A helper function for dwarf2out_finish, called to reset indirect |
| 4824 | string decisions done for early LTO dwarf output before fat object |
| 4825 | dwarf output. */ |
| 4826 | |
| 4827 | int |
| 4828 | reset_indirect_string (indirect_string_node **h, void *) |
| 4829 | { |
| 4830 | struct indirect_string_node *node = *h; |
| 4831 | if (node->form == DW_FORM_strp |
| 4832 | || node->form == DW_FORM_line_strp |
| 4833 | || node->form == dwarf_FORM (form: DW_FORM_strx)) |
| 4834 | { |
| 4835 | free (ptr: node->label); |
| 4836 | node->label = NULL; |
| 4837 | node->form = (dwarf_form) 0; |
| 4838 | node->index = 0; |
| 4839 | } |
| 4840 | return 1; |
| 4841 | } |
| 4842 | |
| 4843 | /* Add a string representing a file or filepath attribute value to a DIE. */ |
| 4844 | |
| 4845 | static inline void |
| 4846 | add_filepath_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 4847 | const char *str) |
| 4848 | { |
| 4849 | if (! asm_outputs_debug_line_str ()) |
| 4850 | add_AT_string (die, attr_kind, str); |
| 4851 | else |
| 4852 | { |
| 4853 | dw_attr_node attr; |
| 4854 | struct indirect_string_node *node; |
| 4855 | |
| 4856 | if (!debug_line_str_hash) |
| 4857 | debug_line_str_hash |
| 4858 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
| 4859 | |
| 4860 | node = find_AT_string_in_table (str, table: debug_line_str_hash); |
| 4861 | set_indirect_string (node); |
| 4862 | node->form = DW_FORM_line_strp; |
| 4863 | |
| 4864 | attr.dw_attr = attr_kind; |
| 4865 | attr.dw_attr_val.val_class = dw_val_class_str; |
| 4866 | attr.dw_attr_val.val_entry = NULL; |
| 4867 | attr.dw_attr_val.v.val_str = node; |
| 4868 | add_dwarf_attr (die, attr: &attr); |
| 4869 | } |
| 4870 | } |
| 4871 | |
| 4872 | /* Find out whether a string should be output inline in DIE |
| 4873 | or out-of-line in .debug_str section. */ |
| 4874 | |
| 4875 | static enum dwarf_form |
| 4876 | find_string_form (struct indirect_string_node *node) |
| 4877 | { |
| 4878 | unsigned int len; |
| 4879 | |
| 4880 | if (node->form) |
| 4881 | return node->form; |
| 4882 | |
| 4883 | len = strlen (s: node->str) + 1; |
| 4884 | |
| 4885 | /* If the string is shorter or equal to the size of the reference, it is |
| 4886 | always better to put it inline. */ |
| 4887 | if (len <= (unsigned) dwarf_offset_size || node->refcount == 0) |
| 4888 | return node->form = DW_FORM_string; |
| 4889 | |
| 4890 | /* If we cannot expect the linker to merge strings in .debug_str |
| 4891 | section, only put it into .debug_str if it is worth even in this |
| 4892 | single module. */ |
| 4893 | if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET |
| 4894 | || ((debug_str_section->common.flags & SECTION_MERGE) == 0 |
| 4895 | && (len - dwarf_offset_size) * node->refcount <= len)) |
| 4896 | return node->form = DW_FORM_string; |
| 4897 | |
| 4898 | set_indirect_string (node); |
| 4899 | |
| 4900 | return node->form; |
| 4901 | } |
| 4902 | |
| 4903 | /* Find out whether the string referenced from the attribute should be |
| 4904 | output inline in DIE or out-of-line in .debug_str section. */ |
| 4905 | |
| 4906 | static enum dwarf_form |
| 4907 | AT_string_form (dw_attr_node *a) |
| 4908 | { |
| 4909 | gcc_assert (a && AT_class (a) == dw_val_class_str); |
| 4910 | return find_string_form (node: a->dw_attr_val.v.val_str); |
| 4911 | } |
| 4912 | |
| 4913 | /* Add a DIE reference attribute value to a DIE. */ |
| 4914 | |
| 4915 | static inline void |
| 4916 | add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die) |
| 4917 | { |
| 4918 | dw_attr_node attr; |
| 4919 | gcc_checking_assert (targ_die != NULL); |
| 4920 | gcc_assert (targ_die != die |
| 4921 | || (attr_kind != DW_AT_abstract_origin |
| 4922 | && attr_kind != DW_AT_specification)); |
| 4923 | |
| 4924 | /* With LTO we can end up trying to reference something we didn't create |
| 4925 | a DIE for. Avoid crashing later on a NULL referenced DIE. */ |
| 4926 | if (targ_die == NULL) |
| 4927 | return; |
| 4928 | |
| 4929 | attr.dw_attr = attr_kind; |
| 4930 | attr.dw_attr_val.val_class = dw_val_class_die_ref; |
| 4931 | attr.dw_attr_val.val_entry = NULL; |
| 4932 | attr.dw_attr_val.v.val_die_ref.die = targ_die; |
| 4933 | attr.dw_attr_val.v.val_die_ref.external = 0; |
| 4934 | add_dwarf_attr (die, attr: &attr); |
| 4935 | } |
| 4936 | |
| 4937 | /* Change DIE reference REF to point to NEW_DIE instead. */ |
| 4938 | |
| 4939 | static inline void |
| 4940 | change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die) |
| 4941 | { |
| 4942 | gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref); |
| 4943 | ref->dw_attr_val.v.val_die_ref.die = new_die; |
| 4944 | ref->dw_attr_val.v.val_die_ref.external = 0; |
| 4945 | } |
| 4946 | |
| 4947 | /* Add an AT_specification attribute to a DIE, and also make the back |
| 4948 | pointer from the specification to the definition. */ |
| 4949 | |
| 4950 | static inline void |
| 4951 | add_AT_specification (dw_die_ref die, dw_die_ref targ_die) |
| 4952 | { |
| 4953 | add_AT_die_ref (die, attr_kind: DW_AT_specification, targ_die); |
| 4954 | gcc_assert (!targ_die->die_definition); |
| 4955 | targ_die->die_definition = die; |
| 4956 | } |
| 4957 | |
| 4958 | static inline dw_die_ref |
| 4959 | AT_ref (dw_attr_node *a) |
| 4960 | { |
| 4961 | gcc_assert (a && AT_class (a) == dw_val_class_die_ref); |
| 4962 | return a->dw_attr_val.v.val_die_ref.die; |
| 4963 | } |
| 4964 | |
| 4965 | static inline int |
| 4966 | AT_ref_external (dw_attr_node *a) |
| 4967 | { |
| 4968 | if (a && AT_class (a) == dw_val_class_die_ref) |
| 4969 | return a->dw_attr_val.v.val_die_ref.external; |
| 4970 | |
| 4971 | return 0; |
| 4972 | } |
| 4973 | |
| 4974 | static inline void |
| 4975 | set_AT_ref_external (dw_attr_node *a, int i) |
| 4976 | { |
| 4977 | gcc_assert (a && AT_class (a) == dw_val_class_die_ref); |
| 4978 | a->dw_attr_val.v.val_die_ref.external = i; |
| 4979 | } |
| 4980 | |
| 4981 | /* Add a location description attribute value to a DIE. */ |
| 4982 | |
| 4983 | static inline void |
| 4984 | add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc) |
| 4985 | { |
| 4986 | dw_attr_node attr; |
| 4987 | |
| 4988 | attr.dw_attr = attr_kind; |
| 4989 | attr.dw_attr_val.val_class = dw_val_class_loc; |
| 4990 | attr.dw_attr_val.val_entry = NULL; |
| 4991 | attr.dw_attr_val.v.val_loc = loc; |
| 4992 | add_dwarf_attr (die, attr: &attr); |
| 4993 | } |
| 4994 | |
| 4995 | dw_loc_descr_ref |
| 4996 | AT_loc (dw_attr_node *a) |
| 4997 | { |
| 4998 | gcc_assert (a && AT_class (a) == dw_val_class_loc); |
| 4999 | return a->dw_attr_val.v.val_loc; |
| 5000 | } |
| 5001 | |
| 5002 | static inline void |
| 5003 | add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list) |
| 5004 | { |
| 5005 | dw_attr_node attr; |
| 5006 | |
| 5007 | if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS) |
| 5008 | return; |
| 5009 | |
| 5010 | attr.dw_attr = attr_kind; |
| 5011 | attr.dw_attr_val.val_class = dw_val_class_loc_list; |
| 5012 | attr.dw_attr_val.val_entry = NULL; |
| 5013 | attr.dw_attr_val.v.val_loc_list = loc_list; |
| 5014 | add_dwarf_attr (die, attr: &attr); |
| 5015 | have_location_lists = true; |
| 5016 | } |
| 5017 | |
| 5018 | static inline dw_loc_list_ref |
| 5019 | AT_loc_list (dw_attr_node *a) |
| 5020 | { |
| 5021 | gcc_assert (a && AT_class (a) == dw_val_class_loc_list); |
| 5022 | return a->dw_attr_val.v.val_loc_list; |
| 5023 | } |
| 5024 | |
| 5025 | /* Add a view list attribute to DIE. It must have a DW_AT_location |
| 5026 | attribute, because the view list complements the location list. */ |
| 5027 | |
| 5028 | static inline void |
| 5029 | add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind) |
| 5030 | { |
| 5031 | dw_attr_node attr; |
| 5032 | |
| 5033 | if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS) |
| 5034 | return; |
| 5035 | |
| 5036 | attr.dw_attr = attr_kind; |
| 5037 | attr.dw_attr_val.val_class = dw_val_class_view_list; |
| 5038 | attr.dw_attr_val.val_entry = NULL; |
| 5039 | attr.dw_attr_val.v.val_view_list = die; |
| 5040 | add_dwarf_attr (die, attr: &attr); |
| 5041 | gcc_checking_assert (get_AT (die, DW_AT_location)); |
| 5042 | gcc_assert (have_location_lists); |
| 5043 | } |
| 5044 | |
| 5045 | /* Return a pointer to the location list referenced by the attribute. |
| 5046 | If the named attribute is a view list, look up the corresponding |
| 5047 | DW_AT_location attribute and return its location list. */ |
| 5048 | |
| 5049 | static inline dw_loc_list_ref * |
| 5050 | AT_loc_list_ptr (dw_attr_node *a) |
| 5051 | { |
| 5052 | gcc_assert (a); |
| 5053 | switch (AT_class (a)) |
| 5054 | { |
| 5055 | case dw_val_class_loc_list: |
| 5056 | return &a->dw_attr_val.v.val_loc_list; |
| 5057 | case dw_val_class_view_list: |
| 5058 | { |
| 5059 | dw_attr_node *l; |
| 5060 | l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location); |
| 5061 | if (!l) |
| 5062 | return NULL; |
| 5063 | gcc_checking_assert (l + 1 == a); |
| 5064 | return AT_loc_list_ptr (a: l); |
| 5065 | } |
| 5066 | default: |
| 5067 | gcc_unreachable (); |
| 5068 | } |
| 5069 | } |
| 5070 | |
| 5071 | /* Return the location attribute value associated with a view list |
| 5072 | attribute value. */ |
| 5073 | |
| 5074 | static inline dw_val_node * |
| 5075 | view_list_to_loc_list_val_node (dw_val_node *val) |
| 5076 | { |
| 5077 | gcc_assert (val->val_class == dw_val_class_view_list); |
| 5078 | dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location); |
| 5079 | if (!loc) |
| 5080 | return NULL; |
| 5081 | gcc_checking_assert (&(loc + 1)->dw_attr_val == val); |
| 5082 | gcc_assert (AT_class (loc) == dw_val_class_loc_list); |
| 5083 | return &loc->dw_attr_val; |
| 5084 | } |
| 5085 | |
| 5086 | struct addr_hasher : ggc_ptr_hash<addr_table_entry> |
| 5087 | { |
| 5088 | static hashval_t hash (addr_table_entry *); |
| 5089 | static bool equal (addr_table_entry *, addr_table_entry *); |
| 5090 | }; |
| 5091 | |
| 5092 | /* Table of entries into the .debug_addr section. */ |
| 5093 | |
| 5094 | static GTY (()) hash_table<addr_hasher> *addr_index_table; |
| 5095 | |
| 5096 | /* Hash an address_table_entry. */ |
| 5097 | |
| 5098 | hashval_t |
| 5099 | addr_hasher::hash (addr_table_entry *a) |
| 5100 | { |
| 5101 | inchash::hash hstate; |
| 5102 | switch (a->kind) |
| 5103 | { |
| 5104 | case ate_kind_rtx: |
| 5105 | hstate.add_int (v: 0); |
| 5106 | break; |
| 5107 | case ate_kind_rtx_dtprel: |
| 5108 | hstate.add_int (v: 1); |
| 5109 | break; |
| 5110 | case ate_kind_label: |
| 5111 | return htab_hash_string (a->addr.label); |
| 5112 | default: |
| 5113 | gcc_unreachable (); |
| 5114 | } |
| 5115 | inchash::add_rtx (a->addr.rtl, hstate); |
| 5116 | return hstate.end (); |
| 5117 | } |
| 5118 | |
| 5119 | /* Determine equality for two address_table_entries. */ |
| 5120 | |
| 5121 | bool |
| 5122 | addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2) |
| 5123 | { |
| 5124 | if (a1->kind != a2->kind) |
| 5125 | return false; |
| 5126 | switch (a1->kind) |
| 5127 | { |
| 5128 | case ate_kind_rtx: |
| 5129 | case ate_kind_rtx_dtprel: |
| 5130 | return rtx_equal_p (a1->addr.rtl, a2->addr.rtl); |
| 5131 | case ate_kind_label: |
| 5132 | return strcmp (s1: a1->addr.label, s2: a2->addr.label) == 0; |
| 5133 | default: |
| 5134 | gcc_unreachable (); |
| 5135 | } |
| 5136 | } |
| 5137 | |
| 5138 | /* Initialize an addr_table_entry. */ |
| 5139 | |
| 5140 | void |
| 5141 | init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr) |
| 5142 | { |
| 5143 | e->kind = kind; |
| 5144 | switch (kind) |
| 5145 | { |
| 5146 | case ate_kind_rtx: |
| 5147 | case ate_kind_rtx_dtprel: |
| 5148 | e->addr.rtl = (rtx) addr; |
| 5149 | break; |
| 5150 | case ate_kind_label: |
| 5151 | e->addr.label = (char *) addr; |
| 5152 | break; |
| 5153 | } |
| 5154 | e->refcount = 0; |
| 5155 | e->index = NO_INDEX_ASSIGNED; |
| 5156 | } |
| 5157 | |
| 5158 | /* Add attr to the address table entry to the table. Defer setting an |
| 5159 | index until output time. */ |
| 5160 | |
| 5161 | static addr_table_entry * |
| 5162 | add_addr_table_entry (void *addr, enum ate_kind kind) |
| 5163 | { |
| 5164 | addr_table_entry *node; |
| 5165 | addr_table_entry finder; |
| 5166 | |
| 5167 | gcc_assert (dwarf_split_debug_info); |
| 5168 | if (! addr_index_table) |
| 5169 | addr_index_table = hash_table<addr_hasher>::create_ggc (n: 10); |
| 5170 | init_addr_table_entry (e: &finder, kind, addr); |
| 5171 | addr_table_entry **slot = addr_index_table->find_slot (value: &finder, insert: INSERT); |
| 5172 | |
| 5173 | if (*slot == HTAB_EMPTY_ENTRY) |
| 5174 | { |
| 5175 | node = ggc_cleared_alloc<addr_table_entry> (); |
| 5176 | init_addr_table_entry (e: node, kind, addr); |
| 5177 | *slot = node; |
| 5178 | } |
| 5179 | else |
| 5180 | node = *slot; |
| 5181 | |
| 5182 | node->refcount++; |
| 5183 | return node; |
| 5184 | } |
| 5185 | |
| 5186 | /* Remove an entry from the addr table by decrementing its refcount. |
| 5187 | Strictly, decrementing the refcount would be enough, but the |
| 5188 | assertion that the entry is actually in the table has found |
| 5189 | bugs. */ |
| 5190 | |
| 5191 | static void |
| 5192 | remove_addr_table_entry (addr_table_entry *entry) |
| 5193 | { |
| 5194 | gcc_assert (dwarf_split_debug_info && addr_index_table); |
| 5195 | /* After an index is assigned, the table is frozen. */ |
| 5196 | gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED); |
| 5197 | entry->refcount--; |
| 5198 | } |
| 5199 | |
| 5200 | /* Given a location list, remove all addresses it refers to from the |
| 5201 | address_table. */ |
| 5202 | |
| 5203 | static void |
| 5204 | remove_loc_list_addr_table_entries (dw_loc_descr_ref descr) |
| 5205 | { |
| 5206 | for (; descr; descr = descr->dw_loc_next) |
| 5207 | if (descr->dw_loc_oprnd1.val_entry != NULL) |
| 5208 | { |
| 5209 | gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED); |
| 5210 | remove_addr_table_entry (entry: descr->dw_loc_oprnd1.val_entry); |
| 5211 | } |
| 5212 | } |
| 5213 | |
| 5214 | /* A helper function for dwarf2out_finish called through |
| 5215 | htab_traverse. Assign an addr_table_entry its index. All entries |
| 5216 | must be collected into the table when this function is called, |
| 5217 | because the indexing code relies on htab_traverse to traverse nodes |
| 5218 | in the same order for each run. */ |
| 5219 | |
| 5220 | int |
| 5221 | index_addr_table_entry (addr_table_entry **h, unsigned int *index) |
| 5222 | { |
| 5223 | addr_table_entry *node = *h; |
| 5224 | |
| 5225 | /* Don't index unreferenced nodes. */ |
| 5226 | if (node->refcount == 0) |
| 5227 | return 1; |
| 5228 | |
| 5229 | gcc_assert (node->index == NO_INDEX_ASSIGNED); |
| 5230 | node->index = *index; |
| 5231 | *index += 1; |
| 5232 | |
| 5233 | return 1; |
| 5234 | } |
| 5235 | |
| 5236 | /* Return the tag of a given DIE. */ |
| 5237 | |
| 5238 | enum dwarf_tag |
| 5239 | dw_get_die_tag (dw_die_ref die) |
| 5240 | { |
| 5241 | return die->die_tag; |
| 5242 | } |
| 5243 | |
| 5244 | /* Return a reference to the children list of a given DIE. */ |
| 5245 | |
| 5246 | dw_die_ref |
| 5247 | dw_get_die_child (dw_die_ref die) |
| 5248 | { |
| 5249 | return die->die_child; |
| 5250 | } |
| 5251 | |
| 5252 | /* Return a reference to the sibling of a given DIE. */ |
| 5253 | |
| 5254 | dw_die_ref |
| 5255 | dw_get_die_sib (dw_die_ref die) |
| 5256 | { |
| 5257 | return die->die_sib; |
| 5258 | } |
| 5259 | |
| 5260 | /* Add an address constant attribute value to a DIE. When using |
| 5261 | dwarf_split_debug_info, address attributes in dies destined for the |
| 5262 | final executable should be direct references--setting the parameter |
| 5263 | force_direct ensures this behavior. */ |
| 5264 | |
| 5265 | static inline void |
| 5266 | add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr, |
| 5267 | bool force_direct) |
| 5268 | { |
| 5269 | dw_attr_node attr; |
| 5270 | |
| 5271 | attr.dw_attr = attr_kind; |
| 5272 | attr.dw_attr_val.val_class = dw_val_class_addr; |
| 5273 | attr.dw_attr_val.v.val_addr = addr; |
| 5274 | if (dwarf_split_debug_info && !force_direct) |
| 5275 | attr.dw_attr_val.val_entry = add_addr_table_entry (addr, kind: ate_kind_rtx); |
| 5276 | else |
| 5277 | attr.dw_attr_val.val_entry = NULL; |
| 5278 | add_dwarf_attr (die, attr: &attr); |
| 5279 | } |
| 5280 | |
| 5281 | /* Get the RTX from to an address DIE attribute. */ |
| 5282 | |
| 5283 | static inline rtx |
| 5284 | AT_addr (dw_attr_node *a) |
| 5285 | { |
| 5286 | gcc_assert (a && AT_class (a) == dw_val_class_addr); |
| 5287 | return a->dw_attr_val.v.val_addr; |
| 5288 | } |
| 5289 | |
| 5290 | /* Add a file attribute value to a DIE. */ |
| 5291 | |
| 5292 | static inline void |
| 5293 | add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 5294 | struct dwarf_file_data *fd) |
| 5295 | { |
| 5296 | dw_attr_node attr; |
| 5297 | |
| 5298 | attr.dw_attr = attr_kind; |
| 5299 | attr.dw_attr_val.val_class = dw_val_class_file; |
| 5300 | attr.dw_attr_val.val_entry = NULL; |
| 5301 | attr.dw_attr_val.v.val_file = fd; |
| 5302 | add_dwarf_attr (die, attr: &attr); |
| 5303 | } |
| 5304 | |
| 5305 | /* Get the dwarf_file_data from a file DIE attribute. */ |
| 5306 | |
| 5307 | static inline struct dwarf_file_data * |
| 5308 | AT_file (dw_attr_node *a) |
| 5309 | { |
| 5310 | gcc_assert (a && (AT_class (a) == dw_val_class_file |
| 5311 | || AT_class (a) == dw_val_class_file_implicit)); |
| 5312 | return a->dw_attr_val.v.val_file; |
| 5313 | } |
| 5314 | |
| 5315 | #if VMS_DEBUGGING_INFO |
| 5316 | /* Add a vms delta attribute value to a DIE. */ |
| 5317 | |
| 5318 | static inline void |
| 5319 | add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 5320 | const char *lbl1, const char *lbl2) |
| 5321 | { |
| 5322 | dw_attr_node attr; |
| 5323 | |
| 5324 | attr.dw_attr = attr_kind; |
| 5325 | attr.dw_attr_val.val_class = dw_val_class_vms_delta; |
| 5326 | attr.dw_attr_val.val_entry = NULL; |
| 5327 | attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1); |
| 5328 | attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2); |
| 5329 | add_dwarf_attr (die, &attr); |
| 5330 | } |
| 5331 | #endif |
| 5332 | |
| 5333 | /* Add a symbolic view identifier attribute value to a DIE. */ |
| 5334 | |
| 5335 | static inline void |
| 5336 | add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 5337 | const char *view_label) |
| 5338 | { |
| 5339 | dw_attr_node attr; |
| 5340 | |
| 5341 | attr.dw_attr = attr_kind; |
| 5342 | attr.dw_attr_val.val_class = dw_val_class_symview; |
| 5343 | attr.dw_attr_val.val_entry = NULL; |
| 5344 | attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label); |
| 5345 | add_dwarf_attr (die, attr: &attr); |
| 5346 | } |
| 5347 | |
| 5348 | /* Add a label identifier attribute value to a DIE. */ |
| 5349 | |
| 5350 | static inline void |
| 5351 | add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 5352 | const char *lbl_id, int offset) |
| 5353 | { |
| 5354 | dw_attr_node attr; |
| 5355 | |
| 5356 | attr.dw_attr = attr_kind; |
| 5357 | attr.dw_attr_val.val_class = dw_val_class_lbl_id; |
| 5358 | attr.dw_attr_val.val_entry = NULL; |
| 5359 | if (!offset) |
| 5360 | attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id); |
| 5361 | else |
| 5362 | attr.dw_attr_val.v.val_lbl_id = xasprintf ("%s%+i" , lbl_id, offset); |
| 5363 | if (dwarf_split_debug_info) |
| 5364 | attr.dw_attr_val.val_entry |
| 5365 | = add_addr_table_entry (addr: attr.dw_attr_val.v.val_lbl_id, |
| 5366 | kind: ate_kind_label); |
| 5367 | add_dwarf_attr (die, attr: &attr); |
| 5368 | } |
| 5369 | |
| 5370 | /* Add a section offset attribute value to a DIE, an offset into the |
| 5371 | debug_line section. */ |
| 5372 | |
| 5373 | static inline void |
| 5374 | add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 5375 | const char *label) |
| 5376 | { |
| 5377 | dw_attr_node attr; |
| 5378 | |
| 5379 | attr.dw_attr = attr_kind; |
| 5380 | attr.dw_attr_val.val_class = dw_val_class_lineptr; |
| 5381 | attr.dw_attr_val.val_entry = NULL; |
| 5382 | attr.dw_attr_val.v.val_lbl_id = xstrdup (label); |
| 5383 | add_dwarf_attr (die, attr: &attr); |
| 5384 | } |
| 5385 | |
| 5386 | /* Add a section offset attribute value to a DIE, an offset into the |
| 5387 | debug_macinfo section. */ |
| 5388 | |
| 5389 | static inline void |
| 5390 | add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 5391 | const char *label) |
| 5392 | { |
| 5393 | dw_attr_node attr; |
| 5394 | |
| 5395 | attr.dw_attr = attr_kind; |
| 5396 | attr.dw_attr_val.val_class = dw_val_class_macptr; |
| 5397 | attr.dw_attr_val.val_entry = NULL; |
| 5398 | attr.dw_attr_val.v.val_lbl_id = xstrdup (label); |
| 5399 | add_dwarf_attr (die, attr: &attr); |
| 5400 | } |
| 5401 | |
| 5402 | /* Add a range_list attribute value to a DIE. When using |
| 5403 | dwarf_split_debug_info, address attributes in dies destined for the |
| 5404 | final executable should be direct references--setting the parameter |
| 5405 | force_direct ensures this behavior. */ |
| 5406 | |
| 5407 | #define UNRELOCATED_OFFSET ((addr_table_entry *) 1) |
| 5408 | #define RELOCATED_OFFSET (NULL) |
| 5409 | |
| 5410 | static void |
| 5411 | add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 5412 | long unsigned int offset, bool force_direct) |
| 5413 | { |
| 5414 | dw_attr_node attr; |
| 5415 | |
| 5416 | attr.dw_attr = attr_kind; |
| 5417 | attr.dw_attr_val.val_class = dw_val_class_range_list; |
| 5418 | /* For the range_list attribute, use val_entry to store whether the |
| 5419 | offset should follow split-debug-info or normal semantics. This |
| 5420 | value is read in output_range_list_offset. */ |
| 5421 | if (dwarf_split_debug_info && !force_direct) |
| 5422 | attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET; |
| 5423 | else |
| 5424 | attr.dw_attr_val.val_entry = RELOCATED_OFFSET; |
| 5425 | attr.dw_attr_val.v.val_offset = offset; |
| 5426 | add_dwarf_attr (die, attr: &attr); |
| 5427 | } |
| 5428 | |
| 5429 | /* Return the start label of a delta attribute. */ |
| 5430 | |
| 5431 | static inline const char * |
| 5432 | AT_vms_delta1 (dw_attr_node *a) |
| 5433 | { |
| 5434 | gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta)); |
| 5435 | return a->dw_attr_val.v.val_vms_delta.lbl1; |
| 5436 | } |
| 5437 | |
| 5438 | /* Return the end label of a delta attribute. */ |
| 5439 | |
| 5440 | static inline const char * |
| 5441 | AT_vms_delta2 (dw_attr_node *a) |
| 5442 | { |
| 5443 | gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta)); |
| 5444 | return a->dw_attr_val.v.val_vms_delta.lbl2; |
| 5445 | } |
| 5446 | |
| 5447 | static inline const char * |
| 5448 | AT_lbl (dw_attr_node *a) |
| 5449 | { |
| 5450 | gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id |
| 5451 | || AT_class (a) == dw_val_class_lineptr |
| 5452 | || AT_class (a) == dw_val_class_macptr |
| 5453 | || AT_class (a) == dw_val_class_loclistsptr |
| 5454 | || AT_class (a) == dw_val_class_high_pc)); |
| 5455 | return a->dw_attr_val.v.val_lbl_id; |
| 5456 | } |
| 5457 | |
| 5458 | /* Get the attribute of type attr_kind. */ |
| 5459 | |
| 5460 | dw_attr_node * |
| 5461 | get_AT (dw_die_ref die, enum dwarf_attribute attr_kind) |
| 5462 | { |
| 5463 | dw_attr_node *a; |
| 5464 | unsigned ix; |
| 5465 | dw_die_ref spec = NULL; |
| 5466 | |
| 5467 | if (! die) |
| 5468 | return NULL; |
| 5469 | |
| 5470 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 5471 | if (a->dw_attr == attr_kind) |
| 5472 | return a; |
| 5473 | else if (a->dw_attr == DW_AT_specification |
| 5474 | || a->dw_attr == DW_AT_abstract_origin) |
| 5475 | spec = AT_ref (a); |
| 5476 | |
| 5477 | if (spec) |
| 5478 | return get_AT (die: spec, attr_kind); |
| 5479 | |
| 5480 | return NULL; |
| 5481 | } |
| 5482 | |
| 5483 | /* Returns the parent of the declaration of DIE. */ |
| 5484 | |
| 5485 | dw_die_ref |
| 5486 | dw_get_die_parent (dw_die_ref die) |
| 5487 | { |
| 5488 | dw_die_ref t; |
| 5489 | |
| 5490 | if (!die) |
| 5491 | return NULL; |
| 5492 | |
| 5493 | if ((t = get_AT_ref (die, DW_AT_abstract_origin)) |
| 5494 | || (t = get_AT_ref (die, DW_AT_specification))) |
| 5495 | die = t; |
| 5496 | |
| 5497 | return die->die_parent; |
| 5498 | } |
| 5499 | |
| 5500 | /* Return the "low pc" attribute value, typically associated with a subprogram |
| 5501 | DIE. Return null if the "low pc" attribute is either not present, or if it |
| 5502 | cannot be represented as an assembler label identifier. */ |
| 5503 | |
| 5504 | static inline const char * |
| 5505 | get_AT_low_pc (dw_die_ref die) |
| 5506 | { |
| 5507 | dw_attr_node *a = get_AT (die, attr_kind: DW_AT_low_pc); |
| 5508 | |
| 5509 | return a ? AT_lbl (a) : NULL; |
| 5510 | } |
| 5511 | |
| 5512 | /* Return the value of the string attribute designated by ATTR_KIND, or |
| 5513 | NULL if it is not present. */ |
| 5514 | |
| 5515 | const char * |
| 5516 | get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind) |
| 5517 | { |
| 5518 | dw_attr_node *a = get_AT (die, attr_kind); |
| 5519 | |
| 5520 | return a ? AT_string (a) : NULL; |
| 5521 | } |
| 5522 | |
| 5523 | /* Return the value of the flag attribute designated by ATTR_KIND, or -1 |
| 5524 | if it is not present. */ |
| 5525 | |
| 5526 | int |
| 5527 | get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind) |
| 5528 | { |
| 5529 | dw_attr_node *a = get_AT (die, attr_kind); |
| 5530 | |
| 5531 | return a ? AT_flag (a) : 0; |
| 5532 | } |
| 5533 | |
| 5534 | /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0 |
| 5535 | if it is not present. */ |
| 5536 | |
| 5537 | unsigned |
| 5538 | get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind) |
| 5539 | { |
| 5540 | dw_attr_node *a = get_AT (die, attr_kind); |
| 5541 | |
| 5542 | return a ? AT_unsigned (a) : 0; |
| 5543 | } |
| 5544 | |
| 5545 | dw_die_ref |
| 5546 | get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind) |
| 5547 | { |
| 5548 | dw_attr_node *a = get_AT (die, attr_kind); |
| 5549 | |
| 5550 | return a ? AT_ref (a) : NULL; |
| 5551 | } |
| 5552 | |
| 5553 | struct dwarf_file_data * |
| 5554 | get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind) |
| 5555 | { |
| 5556 | dw_attr_node *a = get_AT (die, attr_kind); |
| 5557 | |
| 5558 | return a ? AT_file (a) : NULL; |
| 5559 | } |
| 5560 | |
| 5561 | /* Return TRUE if the language is C. */ |
| 5562 | |
| 5563 | static inline bool |
| 5564 | is_c (void) |
| 5565 | { |
| 5566 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
| 5567 | |
| 5568 | return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99 |
| 5569 | || lang == DW_LANG_C11 || lang == DW_LANG_ObjC); |
| 5570 | |
| 5571 | |
| 5572 | } |
| 5573 | |
| 5574 | /* Return TRUE if the language is C++. */ |
| 5575 | |
| 5576 | static inline bool |
| 5577 | is_cxx (void) |
| 5578 | { |
| 5579 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
| 5580 | |
| 5581 | return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus |
| 5582 | || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14); |
| 5583 | } |
| 5584 | |
| 5585 | /* Return TRUE if DECL was created by the C++ frontend. */ |
| 5586 | |
| 5587 | static bool |
| 5588 | is_cxx (const_tree decl) |
| 5589 | { |
| 5590 | if (in_lto_p) |
| 5591 | { |
| 5592 | const_tree context = get_ultimate_context (decl); |
| 5593 | if (context && TRANSLATION_UNIT_LANGUAGE (context)) |
| 5594 | return startswith (TRANSLATION_UNIT_LANGUAGE (context), prefix: "GNU C++" ); |
| 5595 | } |
| 5596 | return is_cxx (); |
| 5597 | } |
| 5598 | |
| 5599 | /* Return TRUE if the language is Fortran. */ |
| 5600 | |
| 5601 | static inline bool |
| 5602 | is_fortran (void) |
| 5603 | { |
| 5604 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
| 5605 | |
| 5606 | return (lang == DW_LANG_Fortran77 |
| 5607 | || lang == DW_LANG_Fortran90 |
| 5608 | || lang == DW_LANG_Fortran95 |
| 5609 | || lang == DW_LANG_Fortran03 |
| 5610 | || lang == DW_LANG_Fortran08); |
| 5611 | } |
| 5612 | |
| 5613 | static inline bool |
| 5614 | is_fortran (const_tree decl) |
| 5615 | { |
| 5616 | if (in_lto_p) |
| 5617 | { |
| 5618 | const_tree context = get_ultimate_context (decl); |
| 5619 | if (context && TRANSLATION_UNIT_LANGUAGE (context)) |
| 5620 | return (strncmp (TRANSLATION_UNIT_LANGUAGE (context), |
| 5621 | s2: "GNU Fortran" , n: 11) == 0 |
| 5622 | || strcmp (TRANSLATION_UNIT_LANGUAGE (context), |
| 5623 | s2: "GNU F77" ) == 0); |
| 5624 | } |
| 5625 | return is_fortran (); |
| 5626 | } |
| 5627 | |
| 5628 | /* Return TRUE if the language is Rust. |
| 5629 | Note, returns FALSE for dwarf_version < 5 && dwarf_strict. */ |
| 5630 | |
| 5631 | static inline bool |
| 5632 | is_rust (void) |
| 5633 | { |
| 5634 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
| 5635 | |
| 5636 | return lang == DW_LANG_Rust; |
| 5637 | } |
| 5638 | |
| 5639 | /* Return TRUE if the language is Ada. */ |
| 5640 | |
| 5641 | static inline bool |
| 5642 | is_ada (void) |
| 5643 | { |
| 5644 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
| 5645 | |
| 5646 | return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83; |
| 5647 | } |
| 5648 | |
| 5649 | /* Return TRUE if the language is D. */ |
| 5650 | |
| 5651 | static inline bool |
| 5652 | is_dlang (void) |
| 5653 | { |
| 5654 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
| 5655 | |
| 5656 | return lang == DW_LANG_D; |
| 5657 | } |
| 5658 | |
| 5659 | /* Remove the specified attribute if present. Return TRUE if removal |
| 5660 | was successful. */ |
| 5661 | |
| 5662 | static bool |
| 5663 | remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind) |
| 5664 | { |
| 5665 | dw_attr_node *a; |
| 5666 | unsigned ix; |
| 5667 | |
| 5668 | if (! die) |
| 5669 | return false; |
| 5670 | |
| 5671 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 5672 | if (a->dw_attr == attr_kind) |
| 5673 | { |
| 5674 | if (AT_class (a) == dw_val_class_str) |
| 5675 | if (a->dw_attr_val.v.val_str->refcount) |
| 5676 | a->dw_attr_val.v.val_str->refcount--; |
| 5677 | |
| 5678 | /* vec::ordered_remove should help reduce the number of abbrevs |
| 5679 | that are needed. */ |
| 5680 | die->die_attr->ordered_remove (ix); |
| 5681 | return true; |
| 5682 | } |
| 5683 | return false; |
| 5684 | } |
| 5685 | |
| 5686 | /* Remove CHILD from its parent. PREV must have the property that |
| 5687 | PREV->DIE_SIB == CHILD. Does not alter CHILD. */ |
| 5688 | |
| 5689 | static void |
| 5690 | remove_child_with_prev (dw_die_ref child, dw_die_ref prev) |
| 5691 | { |
| 5692 | gcc_assert (child->die_parent == prev->die_parent); |
| 5693 | gcc_assert (prev->die_sib == child); |
| 5694 | if (prev == child) |
| 5695 | { |
| 5696 | gcc_assert (child->die_parent->die_child == child); |
| 5697 | prev = NULL; |
| 5698 | } |
| 5699 | else |
| 5700 | prev->die_sib = child->die_sib; |
| 5701 | if (child->die_parent->die_child == child) |
| 5702 | child->die_parent->die_child = prev; |
| 5703 | child->die_sib = NULL; |
| 5704 | } |
| 5705 | |
| 5706 | /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that |
| 5707 | PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */ |
| 5708 | |
| 5709 | static void |
| 5710 | replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev) |
| 5711 | { |
| 5712 | dw_die_ref parent = old_child->die_parent; |
| 5713 | |
| 5714 | gcc_assert (parent == prev->die_parent); |
| 5715 | gcc_assert (prev->die_sib == old_child); |
| 5716 | |
| 5717 | new_child->die_parent = parent; |
| 5718 | if (prev == old_child) |
| 5719 | { |
| 5720 | gcc_assert (parent->die_child == old_child); |
| 5721 | new_child->die_sib = new_child; |
| 5722 | } |
| 5723 | else |
| 5724 | { |
| 5725 | prev->die_sib = new_child; |
| 5726 | new_child->die_sib = old_child->die_sib; |
| 5727 | } |
| 5728 | if (old_child->die_parent->die_child == old_child) |
| 5729 | old_child->die_parent->die_child = new_child; |
| 5730 | old_child->die_sib = NULL; |
| 5731 | } |
| 5732 | |
| 5733 | /* Move all children from OLD_PARENT to NEW_PARENT. */ |
| 5734 | |
| 5735 | static void |
| 5736 | move_all_children (dw_die_ref old_parent, dw_die_ref new_parent) |
| 5737 | { |
| 5738 | dw_die_ref c; |
| 5739 | new_parent->die_child = old_parent->die_child; |
| 5740 | old_parent->die_child = NULL; |
| 5741 | FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent); |
| 5742 | } |
| 5743 | |
| 5744 | /* Remove child DIE whose die_tag is TAG. Do nothing if no child |
| 5745 | matches TAG. */ |
| 5746 | |
| 5747 | static void |
| 5748 | remove_child_TAG (dw_die_ref die, enum dwarf_tag tag) |
| 5749 | { |
| 5750 | dw_die_ref c; |
| 5751 | |
| 5752 | c = die->die_child; |
| 5753 | if (c) do { |
| 5754 | dw_die_ref prev = c; |
| 5755 | c = c->die_sib; |
| 5756 | while (c->die_tag == tag) |
| 5757 | { |
| 5758 | remove_child_with_prev (child: c, prev); |
| 5759 | c->die_parent = NULL; |
| 5760 | /* Might have removed every child. */ |
| 5761 | if (die->die_child == NULL) |
| 5762 | return; |
| 5763 | c = prev->die_sib; |
| 5764 | } |
| 5765 | } while (c != die->die_child); |
| 5766 | } |
| 5767 | |
| 5768 | /* Add a CHILD_DIE as the last child of DIE. */ |
| 5769 | |
| 5770 | static void |
| 5771 | add_child_die (dw_die_ref die, dw_die_ref child_die) |
| 5772 | { |
| 5773 | /* FIXME this should probably be an assert. */ |
| 5774 | if (! die || ! child_die) |
| 5775 | return; |
| 5776 | gcc_assert (die != child_die); |
| 5777 | |
| 5778 | child_die->die_parent = die; |
| 5779 | if (die->die_child) |
| 5780 | { |
| 5781 | child_die->die_sib = die->die_child->die_sib; |
| 5782 | die->die_child->die_sib = child_die; |
| 5783 | } |
| 5784 | else |
| 5785 | child_die->die_sib = child_die; |
| 5786 | die->die_child = child_die; |
| 5787 | } |
| 5788 | |
| 5789 | /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */ |
| 5790 | |
| 5791 | static void |
| 5792 | add_child_die_after (dw_die_ref die, dw_die_ref child_die, |
| 5793 | dw_die_ref after_die) |
| 5794 | { |
| 5795 | gcc_assert (die |
| 5796 | && child_die |
| 5797 | && after_die |
| 5798 | && die->die_child |
| 5799 | && die != child_die); |
| 5800 | |
| 5801 | child_die->die_parent = die; |
| 5802 | child_die->die_sib = after_die->die_sib; |
| 5803 | after_die->die_sib = child_die; |
| 5804 | if (die->die_child == after_die) |
| 5805 | die->die_child = child_die; |
| 5806 | } |
| 5807 | |
| 5808 | /* Unassociate CHILD from its parent, and make its parent be |
| 5809 | NEW_PARENT. */ |
| 5810 | |
| 5811 | static void |
| 5812 | reparent_child (dw_die_ref child, dw_die_ref new_parent) |
| 5813 | { |
| 5814 | for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib) |
| 5815 | if (p->die_sib == child) |
| 5816 | { |
| 5817 | remove_child_with_prev (child, prev: p); |
| 5818 | break; |
| 5819 | } |
| 5820 | add_child_die (die: new_parent, child_die: child); |
| 5821 | } |
| 5822 | |
| 5823 | /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT |
| 5824 | is the specification, to the end of PARENT's list of children. |
| 5825 | This is done by removing and re-adding it. */ |
| 5826 | |
| 5827 | static void |
| 5828 | splice_child_die (dw_die_ref parent, dw_die_ref child) |
| 5829 | { |
| 5830 | /* We want the declaration DIE from inside the class, not the |
| 5831 | specification DIE at toplevel. */ |
| 5832 | if (child->die_parent != parent) |
| 5833 | { |
| 5834 | dw_die_ref tmp = get_AT_ref (die: child, attr_kind: DW_AT_specification); |
| 5835 | |
| 5836 | if (tmp) |
| 5837 | child = tmp; |
| 5838 | } |
| 5839 | |
| 5840 | gcc_assert (child->die_parent == parent |
| 5841 | || (child->die_parent |
| 5842 | == get_AT_ref (parent, DW_AT_specification))); |
| 5843 | |
| 5844 | reparent_child (child, new_parent: parent); |
| 5845 | } |
| 5846 | |
| 5847 | /* Create and return a new die with TAG_VALUE as tag. */ |
| 5848 | |
| 5849 | dw_die_ref |
| 5850 | new_die_raw (enum dwarf_tag tag_value) |
| 5851 | { |
| 5852 | dw_die_ref die = ggc_cleared_alloc<die_node> (); |
| 5853 | die->die_tag = tag_value; |
| 5854 | return die; |
| 5855 | } |
| 5856 | |
| 5857 | /* Create and return a new die with a parent of PARENT_DIE. If |
| 5858 | PARENT_DIE is NULL, the new DIE is placed in limbo and an |
| 5859 | associated tree T must be supplied to determine parenthood |
| 5860 | later. */ |
| 5861 | |
| 5862 | static inline dw_die_ref |
| 5863 | new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t) |
| 5864 | { |
| 5865 | dw_die_ref die = new_die_raw (tag_value); |
| 5866 | |
| 5867 | if (parent_die != NULL) |
| 5868 | add_child_die (die: parent_die, child_die: die); |
| 5869 | else |
| 5870 | { |
| 5871 | limbo_die_node *limbo_node; |
| 5872 | |
| 5873 | /* No DIEs created after early dwarf should end up in limbo, |
| 5874 | because the limbo list should not persist past LTO |
| 5875 | streaming. */ |
| 5876 | if (tag_value != DW_TAG_compile_unit |
| 5877 | /* These are allowed because they're generated while |
| 5878 | breaking out COMDAT units late. */ |
| 5879 | && tag_value != DW_TAG_type_unit |
| 5880 | && tag_value != DW_TAG_skeleton_unit |
| 5881 | && !early_dwarf |
| 5882 | /* Allow nested functions to live in limbo because they will |
| 5883 | only temporarily live there, as decls_for_scope will fix |
| 5884 | them up. */ |
| 5885 | && (TREE_CODE (t) != FUNCTION_DECL |
| 5886 | || !decl_function_context (t)) |
| 5887 | /* Same as nested functions above but for types. Types that |
| 5888 | are local to a function will be fixed in |
| 5889 | decls_for_scope. */ |
| 5890 | && (!RECORD_OR_UNION_TYPE_P (t) |
| 5891 | || !TYPE_CONTEXT (t) |
| 5892 | || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL) |
| 5893 | /* FIXME debug-early: Allow late limbo DIE creation for LTO, |
| 5894 | especially in the ltrans stage, but once we implement LTO |
| 5895 | dwarf streaming, we should remove this exception. */ |
| 5896 | && !in_lto_p) |
| 5897 | { |
| 5898 | fprintf (stderr, format: "symbol ended up in limbo too late:" ); |
| 5899 | debug_generic_stmt (t); |
| 5900 | gcc_unreachable (); |
| 5901 | } |
| 5902 | |
| 5903 | limbo_node = ggc_cleared_alloc<limbo_die_node> (); |
| 5904 | limbo_node->die = die; |
| 5905 | limbo_node->created_for = t; |
| 5906 | limbo_node->next = limbo_die_list; |
| 5907 | limbo_die_list = limbo_node; |
| 5908 | } |
| 5909 | |
| 5910 | return die; |
| 5911 | } |
| 5912 | |
| 5913 | /* Return the DIE associated with the given type specifier. */ |
| 5914 | |
| 5915 | dw_die_ref |
| 5916 | lookup_type_die (tree type) |
| 5917 | { |
| 5918 | dw_die_ref die = TYPE_SYMTAB_DIE (type); |
| 5919 | if (die && die->removed) |
| 5920 | { |
| 5921 | TYPE_SYMTAB_DIE (type) = NULL; |
| 5922 | TREE_ASM_WRITTEN (type) = 0; |
| 5923 | return NULL; |
| 5924 | } |
| 5925 | return die; |
| 5926 | } |
| 5927 | |
| 5928 | /* Given a TYPE_DIE representing the type TYPE, if TYPE is an |
| 5929 | anonymous type named by the typedef TYPE_DIE, return the DIE of the |
| 5930 | anonymous type instead the one of the naming typedef. */ |
| 5931 | |
| 5932 | static inline dw_die_ref |
| 5933 | strip_naming_typedef (tree type, dw_die_ref type_die) |
| 5934 | { |
| 5935 | if (type |
| 5936 | && TREE_CODE (type) == RECORD_TYPE |
| 5937 | && type_die |
| 5938 | && type_die->die_tag == DW_TAG_typedef |
| 5939 | && is_naming_typedef_decl (TYPE_NAME (type))) |
| 5940 | type_die = get_AT_ref (die: type_die, attr_kind: DW_AT_type); |
| 5941 | return type_die; |
| 5942 | } |
| 5943 | |
| 5944 | /* Like lookup_type_die, but if type is an anonymous type named by a |
| 5945 | typedef[1], return the DIE of the anonymous type instead the one of |
| 5946 | the naming typedef. This is because in gen_typedef_die, we did |
| 5947 | equate the anonymous struct named by the typedef with the DIE of |
| 5948 | the naming typedef. So by default, lookup_type_die on an anonymous |
| 5949 | struct yields the DIE of the naming typedef. |
| 5950 | |
| 5951 | [1]: Read the comment of is_naming_typedef_decl to learn about what |
| 5952 | a naming typedef is. */ |
| 5953 | |
| 5954 | static inline dw_die_ref |
| 5955 | lookup_type_die_strip_naming_typedef (tree type) |
| 5956 | { |
| 5957 | dw_die_ref die = lookup_type_die (type); |
| 5958 | return strip_naming_typedef (type, type_die: die); |
| 5959 | } |
| 5960 | |
| 5961 | /* Equate a DIE to a given type specifier. */ |
| 5962 | |
| 5963 | static inline void |
| 5964 | equate_type_number_to_die (tree type, dw_die_ref type_die) |
| 5965 | { |
| 5966 | TYPE_SYMTAB_DIE (type) = type_die; |
| 5967 | } |
| 5968 | |
| 5969 | static dw_die_ref maybe_create_die_with_external_ref (tree); |
| 5970 | struct GTY(()) sym_off_pair |
| 5971 | { |
| 5972 | const char *sym; |
| 5973 | unsigned HOST_WIDE_INT off; |
| 5974 | }; |
| 5975 | static GTY(()) hash_map<tree, sym_off_pair> *external_die_map; |
| 5976 | |
| 5977 | /* Returns a hash value for X (which really is a die_struct). */ |
| 5978 | |
| 5979 | inline hashval_t |
| 5980 | decl_die_hasher::hash (die_node *x) |
| 5981 | { |
| 5982 | return (hashval_t) x->decl_id; |
| 5983 | } |
| 5984 | |
| 5985 | /* Return true if decl_id of die_struct X is the same as UID of decl *Y. */ |
| 5986 | |
| 5987 | inline bool |
| 5988 | decl_die_hasher::equal (die_node *x, tree y) |
| 5989 | { |
| 5990 | return (x->decl_id == DECL_UID (y)); |
| 5991 | } |
| 5992 | |
| 5993 | /* Return the DIE associated with a given declaration. */ |
| 5994 | |
| 5995 | dw_die_ref |
| 5996 | lookup_decl_die (tree decl) |
| 5997 | { |
| 5998 | dw_die_ref *die = decl_die_table->find_slot_with_hash (comparable: decl, DECL_UID (decl), |
| 5999 | insert: NO_INSERT); |
| 6000 | if (!die) |
| 6001 | { |
| 6002 | if (in_lto_p) |
| 6003 | return maybe_create_die_with_external_ref (decl); |
| 6004 | return NULL; |
| 6005 | } |
| 6006 | if ((*die)->removed) |
| 6007 | { |
| 6008 | decl_die_table->clear_slot (slot: die); |
| 6009 | return NULL; |
| 6010 | } |
| 6011 | return *die; |
| 6012 | } |
| 6013 | |
| 6014 | |
| 6015 | /* Return the DIE associated with BLOCK. */ |
| 6016 | |
| 6017 | static inline dw_die_ref |
| 6018 | lookup_block_die (tree block) |
| 6019 | { |
| 6020 | dw_die_ref die = BLOCK_DIE (block); |
| 6021 | if (!die && in_lto_p) |
| 6022 | return maybe_create_die_with_external_ref (block); |
| 6023 | return die; |
| 6024 | } |
| 6025 | |
| 6026 | /* Associate DIE with BLOCK. */ |
| 6027 | |
| 6028 | static inline void |
| 6029 | equate_block_to_die (tree block, dw_die_ref die) |
| 6030 | { |
| 6031 | BLOCK_DIE (block) = die; |
| 6032 | } |
| 6033 | #undef BLOCK_DIE |
| 6034 | |
| 6035 | |
| 6036 | /* For DECL which might have early dwarf output query a SYMBOL + OFFSET |
| 6037 | style reference. Return true if we found one referring to a DIE for |
| 6038 | DECL, otherwise return false. */ |
| 6039 | |
| 6040 | static bool |
| 6041 | dwarf2out_die_ref_for_decl (tree decl, const char **sym, |
| 6042 | unsigned HOST_WIDE_INT *off) |
| 6043 | { |
| 6044 | dw_die_ref die; |
| 6045 | |
| 6046 | if (in_lto_p) |
| 6047 | { |
| 6048 | /* During WPA stage and incremental linking we use a hash-map |
| 6049 | to store the decl <-> label + offset map. */ |
| 6050 | if (!external_die_map) |
| 6051 | return false; |
| 6052 | sym_off_pair *desc = external_die_map->get (k: decl); |
| 6053 | if (!desc) |
| 6054 | return false; |
| 6055 | *sym = desc->sym; |
| 6056 | *off = desc->off; |
| 6057 | return true; |
| 6058 | } |
| 6059 | |
| 6060 | if (TREE_CODE (decl) == BLOCK) |
| 6061 | die = lookup_block_die (block: decl); |
| 6062 | else |
| 6063 | die = lookup_decl_die (decl); |
| 6064 | if (!die) |
| 6065 | return false; |
| 6066 | |
| 6067 | /* Similar to get_ref_die_offset_label, but using the "correct" |
| 6068 | label. */ |
| 6069 | *off = die->die_offset; |
| 6070 | while (die->die_parent) |
| 6071 | die = die->die_parent; |
| 6072 | /* For the containing CU DIE we compute a die_symbol in |
| 6073 | compute_comp_unit_symbol. */ |
| 6074 | if (die->die_tag == DW_TAG_compile_unit) |
| 6075 | { |
| 6076 | gcc_assert (die->die_id.die_symbol != NULL); |
| 6077 | *sym = die->die_id.die_symbol; |
| 6078 | return true; |
| 6079 | } |
| 6080 | /* While we can gracefully handle running into say a type unit |
| 6081 | we don't really want and consider this a bug. */ |
| 6082 | if (flag_checking) |
| 6083 | gcc_unreachable (); |
| 6084 | return false; |
| 6085 | } |
| 6086 | |
| 6087 | /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */ |
| 6088 | |
| 6089 | static void |
| 6090 | add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 6091 | const char *symbol, HOST_WIDE_INT offset) |
| 6092 | { |
| 6093 | /* Create a fake DIE that contains the reference. Don't use |
| 6094 | new_die because we don't want to end up in the limbo list. */ |
| 6095 | /* ??? We probably want to share these, thus put a ref to the DIE |
| 6096 | we create here to the external_die_map entry. */ |
| 6097 | dw_die_ref ref = new_die_raw (tag_value: die->die_tag); |
| 6098 | ref->die_id.die_symbol = symbol; |
| 6099 | ref->die_offset = offset; |
| 6100 | ref->with_offset = 1; |
| 6101 | add_AT_die_ref (die, attr_kind, targ_die: ref); |
| 6102 | } |
| 6103 | |
| 6104 | /* Create a DIE for DECL if required and add a reference to a DIE |
| 6105 | at SYMBOL + OFFSET which contains attributes dumped early. */ |
| 6106 | |
| 6107 | static void |
| 6108 | dwarf2out_register_external_die (tree decl, const char *sym, |
| 6109 | unsigned HOST_WIDE_INT off) |
| 6110 | { |
| 6111 | if (debug_info_level == DINFO_LEVEL_NONE) |
| 6112 | return; |
| 6113 | |
| 6114 | if (!external_die_map) |
| 6115 | external_die_map = hash_map<tree, sym_off_pair>::create_ggc (size: 1000); |
| 6116 | gcc_checking_assert (!external_die_map->get (decl)); |
| 6117 | sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), .off: off }; |
| 6118 | external_die_map->put (k: decl, v: p); |
| 6119 | } |
| 6120 | |
| 6121 | /* If we have a registered external DIE for DECL return a new DIE for |
| 6122 | the concrete instance with an appropriate abstract origin. */ |
| 6123 | |
| 6124 | static dw_die_ref |
| 6125 | maybe_create_die_with_external_ref (tree decl) |
| 6126 | { |
| 6127 | if (!external_die_map) |
| 6128 | return NULL; |
| 6129 | sym_off_pair *desc = external_die_map->get (k: decl); |
| 6130 | if (!desc) |
| 6131 | return NULL; |
| 6132 | |
| 6133 | const char *sym = desc->sym; |
| 6134 | unsigned HOST_WIDE_INT off = desc->off; |
| 6135 | external_die_map->remove (k: decl); |
| 6136 | |
| 6137 | in_lto_p = false; |
| 6138 | dw_die_ref die = (TREE_CODE (decl) == BLOCK |
| 6139 | ? lookup_block_die (block: decl) : lookup_decl_die (decl)); |
| 6140 | gcc_assert (!die); |
| 6141 | in_lto_p = true; |
| 6142 | |
| 6143 | tree ctx; |
| 6144 | dw_die_ref parent = NULL; |
| 6145 | /* Need to lookup a DIE for the decls context - the containing |
| 6146 | function or translation unit. */ |
| 6147 | if (TREE_CODE (decl) == BLOCK) |
| 6148 | { |
| 6149 | ctx = BLOCK_SUPERCONTEXT (decl); |
| 6150 | /* ??? We do not output DIEs for all scopes thus skip as |
| 6151 | many DIEs as needed. */ |
| 6152 | while (TREE_CODE (ctx) == BLOCK |
| 6153 | && !lookup_block_die (block: ctx)) |
| 6154 | ctx = BLOCK_SUPERCONTEXT (ctx); |
| 6155 | } |
| 6156 | else |
| 6157 | ctx = DECL_CONTEXT (decl); |
| 6158 | /* Peel types in the context stack. */ |
| 6159 | while (ctx && TYPE_P (ctx)) |
| 6160 | ctx = TYPE_CONTEXT (ctx); |
| 6161 | /* Likewise namespaces in case we do not want to emit DIEs for them. */ |
| 6162 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 6163 | while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL) |
| 6164 | ctx = DECL_CONTEXT (ctx); |
| 6165 | if (ctx) |
| 6166 | { |
| 6167 | if (TREE_CODE (ctx) == BLOCK) |
| 6168 | parent = lookup_block_die (block: ctx); |
| 6169 | else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL |
| 6170 | /* Keep the 1:1 association during WPA. */ |
| 6171 | && !flag_wpa |
| 6172 | && flag_incremental_link != INCREMENTAL_LINK_LTO) |
| 6173 | /* Otherwise all late annotations go to the main CU which |
| 6174 | imports the original CUs. */ |
| 6175 | parent = comp_unit_die (); |
| 6176 | else if (TREE_CODE (ctx) == FUNCTION_DECL |
| 6177 | && TREE_CODE (decl) != FUNCTION_DECL |
| 6178 | && TREE_CODE (decl) != PARM_DECL |
| 6179 | && TREE_CODE (decl) != RESULT_DECL |
| 6180 | && TREE_CODE (decl) != BLOCK) |
| 6181 | /* Leave function local entities parent determination to when |
| 6182 | we process scope vars. */ |
| 6183 | ; |
| 6184 | else |
| 6185 | parent = lookup_decl_die (decl: ctx); |
| 6186 | } |
| 6187 | else |
| 6188 | /* In some cases the FEs fail to set DECL_CONTEXT properly. |
| 6189 | Handle this case gracefully by globalizing stuff. */ |
| 6190 | parent = comp_unit_die (); |
| 6191 | /* Create a DIE "stub". */ |
| 6192 | switch (TREE_CODE (decl)) |
| 6193 | { |
| 6194 | case TRANSLATION_UNIT_DECL: |
| 6195 | { |
| 6196 | die = comp_unit_die (); |
| 6197 | /* We re-target all CU decls to the LTRANS CU DIE, so no need |
| 6198 | to create a DIE for the original CUs. */ |
| 6199 | return die; |
| 6200 | } |
| 6201 | case NAMESPACE_DECL: |
| 6202 | if (is_fortran (decl)) |
| 6203 | die = new_die (tag_value: DW_TAG_module, parent_die: parent, t: decl); |
| 6204 | else |
| 6205 | die = new_die (tag_value: DW_TAG_namespace, parent_die: parent, t: decl); |
| 6206 | break; |
| 6207 | case FUNCTION_DECL: |
| 6208 | die = new_die (tag_value: DW_TAG_subprogram, parent_die: parent, t: decl); |
| 6209 | break; |
| 6210 | case VAR_DECL: |
| 6211 | die = new_die (tag_value: DW_TAG_variable, parent_die: parent, t: decl); |
| 6212 | break; |
| 6213 | case RESULT_DECL: |
| 6214 | die = new_die (tag_value: DW_TAG_variable, parent_die: parent, t: decl); |
| 6215 | break; |
| 6216 | case PARM_DECL: |
| 6217 | die = new_die (tag_value: DW_TAG_formal_parameter, parent_die: parent, t: decl); |
| 6218 | break; |
| 6219 | case CONST_DECL: |
| 6220 | die = new_die (tag_value: DW_TAG_constant, parent_die: parent, t: decl); |
| 6221 | break; |
| 6222 | case LABEL_DECL: |
| 6223 | die = new_die (tag_value: DW_TAG_label, parent_die: parent, t: decl); |
| 6224 | break; |
| 6225 | case BLOCK: |
| 6226 | die = new_die (tag_value: DW_TAG_lexical_block, parent_die: parent, t: decl); |
| 6227 | break; |
| 6228 | default: |
| 6229 | gcc_unreachable (); |
| 6230 | } |
| 6231 | if (TREE_CODE (decl) == BLOCK) |
| 6232 | equate_block_to_die (block: decl, die); |
| 6233 | else |
| 6234 | equate_decl_number_to_die (decl, die); |
| 6235 | |
| 6236 | add_desc_attribute (die, decl); |
| 6237 | |
| 6238 | /* Add a reference to the DIE providing early debug at $sym + off. */ |
| 6239 | add_AT_external_die_ref (die, attr_kind: DW_AT_abstract_origin, symbol: sym, offset: off); |
| 6240 | |
| 6241 | return die; |
| 6242 | } |
| 6243 | |
| 6244 | /* Returns a hash value for X (which really is a var_loc_list). */ |
| 6245 | |
| 6246 | inline hashval_t |
| 6247 | decl_loc_hasher::hash (var_loc_list *x) |
| 6248 | { |
| 6249 | return (hashval_t) x->decl_id; |
| 6250 | } |
| 6251 | |
| 6252 | /* Return true if decl_id of var_loc_list X is the same as |
| 6253 | UID of decl *Y. */ |
| 6254 | |
| 6255 | inline bool |
| 6256 | decl_loc_hasher::equal (var_loc_list *x, const_tree y) |
| 6257 | { |
| 6258 | return (x->decl_id == DECL_UID (y)); |
| 6259 | } |
| 6260 | |
| 6261 | /* Return the var_loc list associated with a given declaration. */ |
| 6262 | |
| 6263 | static inline var_loc_list * |
| 6264 | lookup_decl_loc (const_tree decl) |
| 6265 | { |
| 6266 | if (!decl_loc_table) |
| 6267 | return NULL; |
| 6268 | return decl_loc_table->find_with_hash (comparable: decl, DECL_UID (decl)); |
| 6269 | } |
| 6270 | |
| 6271 | /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */ |
| 6272 | |
| 6273 | inline hashval_t |
| 6274 | dw_loc_list_hasher::hash (cached_dw_loc_list *x) |
| 6275 | { |
| 6276 | return (hashval_t) x->decl_id; |
| 6277 | } |
| 6278 | |
| 6279 | /* Return true if decl_id of cached_dw_loc_list X is the same as |
| 6280 | UID of decl *Y. */ |
| 6281 | |
| 6282 | inline bool |
| 6283 | dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y) |
| 6284 | { |
| 6285 | return (x->decl_id == DECL_UID (y)); |
| 6286 | } |
| 6287 | |
| 6288 | /* Equate a DIE to a particular declaration. */ |
| 6289 | |
| 6290 | static void |
| 6291 | equate_decl_number_to_die (tree decl, dw_die_ref decl_die) |
| 6292 | { |
| 6293 | unsigned int decl_id = DECL_UID (decl); |
| 6294 | |
| 6295 | *decl_die_table->find_slot_with_hash (comparable: decl, hash: decl_id, insert: INSERT) = decl_die; |
| 6296 | decl_die->decl_id = decl_id; |
| 6297 | } |
| 6298 | |
| 6299 | /* Return how many bits covers PIECE EXPR_LIST. */ |
| 6300 | |
| 6301 | static HOST_WIDE_INT |
| 6302 | decl_piece_bitsize (rtx piece) |
| 6303 | { |
| 6304 | int ret = (int) GET_MODE (piece); |
| 6305 | if (ret) |
| 6306 | return ret; |
| 6307 | gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT |
| 6308 | && CONST_INT_P (XEXP (XEXP (piece, 0), 0))); |
| 6309 | return INTVAL (XEXP (XEXP (piece, 0), 0)); |
| 6310 | } |
| 6311 | |
| 6312 | /* Return pointer to the location of location note in PIECE EXPR_LIST. */ |
| 6313 | |
| 6314 | static rtx * |
| 6315 | decl_piece_varloc_ptr (rtx piece) |
| 6316 | { |
| 6317 | if ((int) GET_MODE (piece)) |
| 6318 | return &XEXP (piece, 0); |
| 6319 | else |
| 6320 | return &XEXP (XEXP (piece, 0), 1); |
| 6321 | } |
| 6322 | |
| 6323 | /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits. |
| 6324 | Next is the chain of following piece nodes. */ |
| 6325 | |
| 6326 | static rtx_expr_list * |
| 6327 | decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next) |
| 6328 | { |
| 6329 | if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE) |
| 6330 | return alloc_EXPR_LIST (bitsize, loc_note, next); |
| 6331 | else |
| 6332 | return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode, |
| 6333 | GEN_INT (bitsize), |
| 6334 | loc_note), next); |
| 6335 | } |
| 6336 | |
| 6337 | /* Return rtx that should be stored into loc field for |
| 6338 | LOC_NOTE and BITPOS/BITSIZE. */ |
| 6339 | |
| 6340 | static rtx |
| 6341 | construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos, |
| 6342 | HOST_WIDE_INT bitsize) |
| 6343 | { |
| 6344 | if (bitsize != -1) |
| 6345 | { |
| 6346 | loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX); |
| 6347 | if (bitpos != 0) |
| 6348 | loc_note = decl_piece_node (NULL_RTX, bitsize: bitpos, next: loc_note); |
| 6349 | } |
| 6350 | return loc_note; |
| 6351 | } |
| 6352 | |
| 6353 | /* This function either modifies location piece list *DEST in |
| 6354 | place (if SRC and INNER is NULL), or copies location piece list |
| 6355 | *SRC to *DEST while modifying it. Location BITPOS is modified |
| 6356 | to contain LOC_NOTE, any pieces overlapping it are removed resp. |
| 6357 | not copied and if needed some padding around it is added. |
| 6358 | When modifying in place, DEST should point to EXPR_LIST where |
| 6359 | earlier pieces cover PIECE_BITPOS bits, when copying SRC points |
| 6360 | to the start of the whole list and INNER points to the EXPR_LIST |
| 6361 | where earlier pieces cover PIECE_BITPOS bits. */ |
| 6362 | |
| 6363 | static void |
| 6364 | adjust_piece_list (rtx *dest, rtx *src, rtx *inner, |
| 6365 | HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos, |
| 6366 | HOST_WIDE_INT bitsize, rtx loc_note) |
| 6367 | { |
| 6368 | HOST_WIDE_INT diff; |
| 6369 | bool copy = inner != NULL; |
| 6370 | |
| 6371 | if (copy) |
| 6372 | { |
| 6373 | /* First copy all nodes preceding the current bitpos. */ |
| 6374 | while (src != inner) |
| 6375 | { |
| 6376 | *dest = decl_piece_node (loc_note: *decl_piece_varloc_ptr (piece: *src), |
| 6377 | bitsize: decl_piece_bitsize (piece: *src), NULL_RTX); |
| 6378 | dest = &XEXP (*dest, 1); |
| 6379 | src = &XEXP (*src, 1); |
| 6380 | } |
| 6381 | } |
| 6382 | /* Add padding if needed. */ |
| 6383 | if (bitpos != piece_bitpos) |
| 6384 | { |
| 6385 | *dest = decl_piece_node (NULL_RTX, bitsize: bitpos - piece_bitpos, |
| 6386 | next: copy ? NULL_RTX : *dest); |
| 6387 | dest = &XEXP (*dest, 1); |
| 6388 | } |
| 6389 | else if (*dest && decl_piece_bitsize (piece: *dest) == bitsize) |
| 6390 | { |
| 6391 | gcc_assert (!copy); |
| 6392 | /* A piece with correct bitpos and bitsize already exist, |
| 6393 | just update the location for it and return. */ |
| 6394 | *decl_piece_varloc_ptr (piece: *dest) = loc_note; |
| 6395 | return; |
| 6396 | } |
| 6397 | /* Add the piece that changed. */ |
| 6398 | *dest = decl_piece_node (loc_note, bitsize, next: copy ? NULL_RTX : *dest); |
| 6399 | dest = &XEXP (*dest, 1); |
| 6400 | /* Skip over pieces that overlap it. */ |
| 6401 | diff = bitpos - piece_bitpos + bitsize; |
| 6402 | if (!copy) |
| 6403 | src = dest; |
| 6404 | while (diff > 0 && *src) |
| 6405 | { |
| 6406 | rtx piece = *src; |
| 6407 | diff -= decl_piece_bitsize (piece); |
| 6408 | if (copy) |
| 6409 | src = &XEXP (piece, 1); |
| 6410 | else |
| 6411 | { |
| 6412 | *src = XEXP (piece, 1); |
| 6413 | free_EXPR_LIST_node (piece); |
| 6414 | } |
| 6415 | } |
| 6416 | /* Add padding if needed. */ |
| 6417 | if (diff < 0 && *src) |
| 6418 | { |
| 6419 | if (!copy) |
| 6420 | dest = src; |
| 6421 | *dest = decl_piece_node (NULL_RTX, bitsize: -diff, next: copy ? NULL_RTX : *dest); |
| 6422 | dest = &XEXP (*dest, 1); |
| 6423 | } |
| 6424 | if (!copy) |
| 6425 | return; |
| 6426 | /* Finally copy all nodes following it. */ |
| 6427 | while (*src) |
| 6428 | { |
| 6429 | *dest = decl_piece_node (loc_note: *decl_piece_varloc_ptr (piece: *src), |
| 6430 | bitsize: decl_piece_bitsize (piece: *src), NULL_RTX); |
| 6431 | dest = &XEXP (*dest, 1); |
| 6432 | src = &XEXP (*src, 1); |
| 6433 | } |
| 6434 | } |
| 6435 | |
| 6436 | /* Add a variable location node to the linked list for DECL. */ |
| 6437 | |
| 6438 | static struct var_loc_node * |
| 6439 | add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view) |
| 6440 | { |
| 6441 | unsigned int decl_id; |
| 6442 | var_loc_list *temp; |
| 6443 | struct var_loc_node *loc = NULL; |
| 6444 | HOST_WIDE_INT bitsize = -1, bitpos = -1; |
| 6445 | |
| 6446 | if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl)) |
| 6447 | { |
| 6448 | tree realdecl = DECL_DEBUG_EXPR (decl); |
| 6449 | if (handled_component_p (t: realdecl) |
| 6450 | || (TREE_CODE (realdecl) == MEM_REF |
| 6451 | && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR)) |
| 6452 | { |
| 6453 | bool reverse; |
| 6454 | tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos, |
| 6455 | &bitsize, &reverse); |
| 6456 | if (!innerdecl |
| 6457 | || !DECL_P (innerdecl) |
| 6458 | || DECL_IGNORED_P (innerdecl) |
| 6459 | || TREE_STATIC (innerdecl) |
| 6460 | || bitsize == 0 |
| 6461 | || bitpos + bitsize > 256) |
| 6462 | return NULL; |
| 6463 | decl = innerdecl; |
| 6464 | } |
| 6465 | } |
| 6466 | |
| 6467 | decl_id = DECL_UID (decl); |
| 6468 | var_loc_list **slot |
| 6469 | = decl_loc_table->find_slot_with_hash (comparable: decl, hash: decl_id, insert: INSERT); |
| 6470 | if (*slot == NULL) |
| 6471 | { |
| 6472 | temp = ggc_cleared_alloc<var_loc_list> (); |
| 6473 | temp->decl_id = decl_id; |
| 6474 | *slot = temp; |
| 6475 | } |
| 6476 | else |
| 6477 | temp = *slot; |
| 6478 | |
| 6479 | /* For PARM_DECLs try to keep around the original incoming value, |
| 6480 | even if that means we'll emit a zero-range .debug_loc entry. */ |
| 6481 | if (temp->last |
| 6482 | && temp->first == temp->last |
| 6483 | && TREE_CODE (decl) == PARM_DECL |
| 6484 | && NOTE_P (temp->first->loc) |
| 6485 | && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl |
| 6486 | && DECL_INCOMING_RTL (decl) |
| 6487 | && NOTE_VAR_LOCATION_LOC (temp->first->loc) |
| 6488 | && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc)) |
| 6489 | == GET_CODE (DECL_INCOMING_RTL (decl)) |
| 6490 | && prev_real_insn (as_a<rtx_insn *> (p: temp->first->loc)) == NULL_RTX |
| 6491 | && (bitsize != -1 |
| 6492 | || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc), |
| 6493 | NOTE_VAR_LOCATION_LOC (loc_note)) |
| 6494 | || (NOTE_VAR_LOCATION_STATUS (temp->first->loc) |
| 6495 | != NOTE_VAR_LOCATION_STATUS (loc_note)))) |
| 6496 | { |
| 6497 | loc = ggc_cleared_alloc<var_loc_node> (); |
| 6498 | temp->first->next = loc; |
| 6499 | temp->last = loc; |
| 6500 | loc->loc = construct_piece_list (loc_note, bitpos, bitsize); |
| 6501 | } |
| 6502 | else if (temp->last) |
| 6503 | { |
| 6504 | struct var_loc_node *last = temp->last, *unused = NULL; |
| 6505 | rtx *piece_loc = NULL, last_loc_note; |
| 6506 | HOST_WIDE_INT piece_bitpos = 0; |
| 6507 | if (last->next) |
| 6508 | { |
| 6509 | last = last->next; |
| 6510 | gcc_assert (last->next == NULL); |
| 6511 | } |
| 6512 | if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST) |
| 6513 | { |
| 6514 | piece_loc = &last->loc; |
| 6515 | do |
| 6516 | { |
| 6517 | HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (piece: *piece_loc); |
| 6518 | if (piece_bitpos + cur_bitsize > bitpos) |
| 6519 | break; |
| 6520 | piece_bitpos += cur_bitsize; |
| 6521 | piece_loc = &XEXP (*piece_loc, 1); |
| 6522 | } |
| 6523 | while (*piece_loc); |
| 6524 | } |
| 6525 | /* TEMP->LAST here is either pointer to the last but one or |
| 6526 | last element in the chained list, LAST is pointer to the |
| 6527 | last element. */ |
| 6528 | if (label && strcmp (s1: last->label, s2: label) == 0 && last->view == view) |
| 6529 | { |
| 6530 | /* For SRA optimized variables if there weren't any real |
| 6531 | insns since last note, just modify the last node. */ |
| 6532 | if (piece_loc != NULL) |
| 6533 | { |
| 6534 | adjust_piece_list (dest: piece_loc, NULL, NULL, |
| 6535 | bitpos, piece_bitpos, bitsize, loc_note); |
| 6536 | return NULL; |
| 6537 | } |
| 6538 | /* If the last note doesn't cover any instructions, remove it. */ |
| 6539 | if (temp->last != last) |
| 6540 | { |
| 6541 | temp->last->next = NULL; |
| 6542 | unused = last; |
| 6543 | last = temp->last; |
| 6544 | gcc_assert (strcmp (last->label, label) != 0 || last->view != view); |
| 6545 | } |
| 6546 | else |
| 6547 | { |
| 6548 | gcc_assert (temp->first == temp->last |
| 6549 | || (temp->first->next == temp->last |
| 6550 | && TREE_CODE (decl) == PARM_DECL)); |
| 6551 | memset (s: temp->last, c: '\0', n: sizeof (*temp->last)); |
| 6552 | temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize); |
| 6553 | return temp->last; |
| 6554 | } |
| 6555 | } |
| 6556 | if (bitsize == -1 && NOTE_P (last->loc)) |
| 6557 | last_loc_note = last->loc; |
| 6558 | else if (piece_loc != NULL |
| 6559 | && *piece_loc != NULL_RTX |
| 6560 | && piece_bitpos == bitpos |
| 6561 | && decl_piece_bitsize (piece: *piece_loc) == bitsize) |
| 6562 | last_loc_note = *decl_piece_varloc_ptr (piece: *piece_loc); |
| 6563 | else |
| 6564 | last_loc_note = NULL_RTX; |
| 6565 | /* If the current location is the same as the end of the list, |
| 6566 | and either both or neither of the locations is uninitialized, |
| 6567 | we have nothing to do. */ |
| 6568 | if (last_loc_note == NULL_RTX |
| 6569 | || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note), |
| 6570 | NOTE_VAR_LOCATION_LOC (loc_note))) |
| 6571 | || ((NOTE_VAR_LOCATION_STATUS (last_loc_note) |
| 6572 | != NOTE_VAR_LOCATION_STATUS (loc_note)) |
| 6573 | && ((NOTE_VAR_LOCATION_STATUS (last_loc_note) |
| 6574 | == VAR_INIT_STATUS_UNINITIALIZED) |
| 6575 | || (NOTE_VAR_LOCATION_STATUS (loc_note) |
| 6576 | == VAR_INIT_STATUS_UNINITIALIZED)))) |
| 6577 | { |
| 6578 | /* Add LOC to the end of list and update LAST. If the last |
| 6579 | element of the list has been removed above, reuse its |
| 6580 | memory for the new node, otherwise allocate a new one. */ |
| 6581 | if (unused) |
| 6582 | { |
| 6583 | loc = unused; |
| 6584 | memset (s: loc, c: '\0', n: sizeof (*loc)); |
| 6585 | } |
| 6586 | else |
| 6587 | loc = ggc_cleared_alloc<var_loc_node> (); |
| 6588 | if (bitsize == -1 || piece_loc == NULL) |
| 6589 | loc->loc = construct_piece_list (loc_note, bitpos, bitsize); |
| 6590 | else |
| 6591 | adjust_piece_list (dest: &loc->loc, src: &last->loc, inner: piece_loc, |
| 6592 | bitpos, piece_bitpos, bitsize, loc_note); |
| 6593 | last->next = loc; |
| 6594 | /* Ensure TEMP->LAST will point either to the new last but one |
| 6595 | element of the chain, or to the last element in it. */ |
| 6596 | if (last != temp->last) |
| 6597 | temp->last = last; |
| 6598 | } |
| 6599 | else if (unused) |
| 6600 | ggc_free (unused); |
| 6601 | } |
| 6602 | else |
| 6603 | { |
| 6604 | loc = ggc_cleared_alloc<var_loc_node> (); |
| 6605 | temp->first = loc; |
| 6606 | temp->last = loc; |
| 6607 | loc->loc = construct_piece_list (loc_note, bitpos, bitsize); |
| 6608 | } |
| 6609 | return loc; |
| 6610 | } |
| 6611 | |
| 6612 | /* Keep track of the number of spaces used to indent the |
| 6613 | output of the debugging routines that print the structure of |
| 6614 | the DIE internal representation. */ |
| 6615 | static int print_indent; |
| 6616 | |
| 6617 | /* Indent the line the number of spaces given by print_indent. */ |
| 6618 | |
| 6619 | static inline void |
| 6620 | print_spaces (FILE *outfile) |
| 6621 | { |
| 6622 | fprintf (stream: outfile, format: "%*s" , print_indent, "" ); |
| 6623 | } |
| 6624 | |
| 6625 | /* Print a type signature in hex. */ |
| 6626 | |
| 6627 | static inline void |
| 6628 | print_signature (FILE *outfile, char *sig) |
| 6629 | { |
| 6630 | int i; |
| 6631 | |
| 6632 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
| 6633 | fprintf (stream: outfile, format: "%02x" , sig[i] & 0xff); |
| 6634 | } |
| 6635 | |
| 6636 | static inline void |
| 6637 | print_discr_value (FILE *outfile, dw_discr_value *discr_value) |
| 6638 | { |
| 6639 | if (discr_value->pos) |
| 6640 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval); |
| 6641 | else |
| 6642 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval); |
| 6643 | } |
| 6644 | |
| 6645 | static void print_loc_descr (dw_loc_descr_ref, FILE *); |
| 6646 | |
| 6647 | /* Print the value associated to the VAL DWARF value node to OUTFILE. If |
| 6648 | RECURSE, output location descriptor operations. */ |
| 6649 | |
| 6650 | static void |
| 6651 | print_dw_val (dw_val_node *val, bool recurse, FILE *outfile) |
| 6652 | { |
| 6653 | switch (val->val_class) |
| 6654 | { |
| 6655 | case dw_val_class_addr: |
| 6656 | fprintf (stream: outfile, format: "address" ); |
| 6657 | break; |
| 6658 | case dw_val_class_offset: |
| 6659 | fprintf (stream: outfile, format: "offset" ); |
| 6660 | break; |
| 6661 | case dw_val_class_loc: |
| 6662 | fprintf (stream: outfile, format: "location descriptor" ); |
| 6663 | if (val->v.val_loc == NULL) |
| 6664 | fprintf (stream: outfile, format: " -> <null>" ); |
| 6665 | else if (recurse) |
| 6666 | { |
| 6667 | fprintf (stream: outfile, format: ":\n" ); |
| 6668 | print_indent += 4; |
| 6669 | print_loc_descr (val->v.val_loc, outfile); |
| 6670 | print_indent -= 4; |
| 6671 | } |
| 6672 | else |
| 6673 | { |
| 6674 | if (flag_dump_noaddr || flag_dump_unnumbered) |
| 6675 | fprintf (stream: outfile, format: " #" ); |
| 6676 | else |
| 6677 | fprintf (stream: outfile, format: " (%p)" , (void *) val->v.val_loc); |
| 6678 | } |
| 6679 | break; |
| 6680 | case dw_val_class_loc_list: |
| 6681 | fprintf (stream: outfile, format: "location list -> label:%s" , |
| 6682 | val->v.val_loc_list->ll_symbol); |
| 6683 | break; |
| 6684 | case dw_val_class_view_list: |
| 6685 | val = view_list_to_loc_list_val_node (val); |
| 6686 | fprintf (stream: outfile, format: "location list with views -> labels:%s and %s" , |
| 6687 | val->v.val_loc_list->ll_symbol, |
| 6688 | val->v.val_loc_list->vl_symbol); |
| 6689 | break; |
| 6690 | case dw_val_class_range_list: |
| 6691 | fprintf (stream: outfile, format: "range list" ); |
| 6692 | break; |
| 6693 | case dw_val_class_const: |
| 6694 | case dw_val_class_const_implicit: |
| 6695 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int); |
| 6696 | break; |
| 6697 | case dw_val_class_unsigned_const: |
| 6698 | case dw_val_class_unsigned_const_implicit: |
| 6699 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned); |
| 6700 | break; |
| 6701 | case dw_val_class_const_double: |
| 6702 | fprintf (stream: outfile, format: "constant (" HOST_WIDE_INT_PRINT_DEC"," \ |
| 6703 | HOST_WIDE_INT_PRINT_UNSIGNED")" , |
| 6704 | val->v.val_double.high, |
| 6705 | val->v.val_double.low); |
| 6706 | break; |
| 6707 | case dw_val_class_wide_int: |
| 6708 | { |
| 6709 | int i = val->v.val_wide->get_len (); |
| 6710 | fprintf (stream: outfile, format: "constant (" ); |
| 6711 | gcc_assert (i > 0); |
| 6712 | if (val->v.val_wide->elt (i: i - 1) == 0) |
| 6713 | fprintf (stream: outfile, format: "0x" ); |
| 6714 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_HEX, |
| 6715 | val->v.val_wide->elt (i: --i)); |
| 6716 | while (--i >= 0) |
| 6717 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_PADDED_HEX, |
| 6718 | val->v.val_wide->elt (i)); |
| 6719 | fprintf (stream: outfile, format: ")" ); |
| 6720 | break; |
| 6721 | } |
| 6722 | case dw_val_class_vec: |
| 6723 | fprintf (stream: outfile, format: "floating-point or vector constant" ); |
| 6724 | break; |
| 6725 | case dw_val_class_flag: |
| 6726 | fprintf (stream: outfile, format: "%u" , val->v.val_flag); |
| 6727 | break; |
| 6728 | case dw_val_class_die_ref: |
| 6729 | if (val->v.val_die_ref.die != NULL) |
| 6730 | { |
| 6731 | dw_die_ref die = val->v.val_die_ref.die; |
| 6732 | |
| 6733 | if (die->comdat_type_p) |
| 6734 | { |
| 6735 | fprintf (stream: outfile, format: "die -> signature: " ); |
| 6736 | print_signature (outfile, |
| 6737 | sig: die->die_id.die_type_node->signature); |
| 6738 | } |
| 6739 | else if (die->die_id.die_symbol) |
| 6740 | { |
| 6741 | fprintf (stream: outfile, format: "die -> label: %s" , die->die_id.die_symbol); |
| 6742 | if (die->with_offset) |
| 6743 | fprintf (stream: outfile, format: " + %ld" , die->die_offset); |
| 6744 | } |
| 6745 | else |
| 6746 | fprintf (stream: outfile, format: "die -> %ld" , die->die_offset); |
| 6747 | if (flag_dump_noaddr || flag_dump_unnumbered) |
| 6748 | fprintf (stream: outfile, format: " #" ); |
| 6749 | else |
| 6750 | fprintf (stream: outfile, format: " (%p)" , (void *) die); |
| 6751 | } |
| 6752 | else |
| 6753 | fprintf (stream: outfile, format: "die -> <null>" ); |
| 6754 | break; |
| 6755 | case dw_val_class_vms_delta: |
| 6756 | fprintf (stream: outfile, format: "delta: @slotcount(%s-%s)" , |
| 6757 | val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1); |
| 6758 | break; |
| 6759 | case dw_val_class_symview: |
| 6760 | fprintf (stream: outfile, format: "view: %s" , val->v.val_symbolic_view); |
| 6761 | break; |
| 6762 | case dw_val_class_lbl_id: |
| 6763 | case dw_val_class_lineptr: |
| 6764 | case dw_val_class_macptr: |
| 6765 | case dw_val_class_loclistsptr: |
| 6766 | case dw_val_class_high_pc: |
| 6767 | fprintf (stream: outfile, format: "label: %s" , val->v.val_lbl_id); |
| 6768 | break; |
| 6769 | case dw_val_class_str: |
| 6770 | if (val->v.val_str->str != NULL) |
| 6771 | fprintf (stream: outfile, format: "\"%s\"" , val->v.val_str->str); |
| 6772 | else |
| 6773 | fprintf (stream: outfile, format: "<null>" ); |
| 6774 | break; |
| 6775 | case dw_val_class_file: |
| 6776 | case dw_val_class_file_implicit: |
| 6777 | fprintf (stream: outfile, format: "\"%s\" (%d)" , val->v.val_file->filename, |
| 6778 | val->v.val_file->emitted_number); |
| 6779 | break; |
| 6780 | case dw_val_class_data8: |
| 6781 | { |
| 6782 | int i; |
| 6783 | |
| 6784 | for (i = 0; i < 8; i++) |
| 6785 | fprintf (stream: outfile, format: "%02x" , val->v.val_data8[i]); |
| 6786 | break; |
| 6787 | } |
| 6788 | case dw_val_class_discr_value: |
| 6789 | print_discr_value (outfile, discr_value: &val->v.val_discr_value); |
| 6790 | break; |
| 6791 | case dw_val_class_discr_list: |
| 6792 | for (dw_discr_list_ref node = val->v.val_discr_list; |
| 6793 | node != NULL; |
| 6794 | node = node->dw_discr_next) |
| 6795 | { |
| 6796 | if (node->dw_discr_range) |
| 6797 | { |
| 6798 | fprintf (stream: outfile, format: " .. " ); |
| 6799 | print_discr_value (outfile, discr_value: &node->dw_discr_lower_bound); |
| 6800 | print_discr_value (outfile, discr_value: &node->dw_discr_upper_bound); |
| 6801 | } |
| 6802 | else |
| 6803 | print_discr_value (outfile, discr_value: &node->dw_discr_lower_bound); |
| 6804 | |
| 6805 | if (node->dw_discr_next != NULL) |
| 6806 | fprintf (stream: outfile, format: " | " ); |
| 6807 | } |
| 6808 | default: |
| 6809 | break; |
| 6810 | } |
| 6811 | } |
| 6812 | |
| 6813 | /* Likewise, for a DIE attribute. */ |
| 6814 | |
| 6815 | static void |
| 6816 | print_attribute (dw_attr_node *a, bool recurse, FILE *outfile) |
| 6817 | { |
| 6818 | print_dw_val (val: &a->dw_attr_val, recurse, outfile); |
| 6819 | } |
| 6820 | |
| 6821 | |
| 6822 | /* Print the list of operands in the LOC location description to OUTFILE. This |
| 6823 | routine is a debugging aid only. */ |
| 6824 | |
| 6825 | static void |
| 6826 | print_loc_descr (dw_loc_descr_ref loc, FILE *outfile) |
| 6827 | { |
| 6828 | dw_loc_descr_ref l = loc; |
| 6829 | |
| 6830 | if (loc == NULL) |
| 6831 | { |
| 6832 | print_spaces (outfile); |
| 6833 | fprintf (stream: outfile, format: "<null>\n" ); |
| 6834 | return; |
| 6835 | } |
| 6836 | |
| 6837 | for (l = loc; l != NULL; l = l->dw_loc_next) |
| 6838 | { |
| 6839 | print_spaces (outfile); |
| 6840 | if (flag_dump_noaddr || flag_dump_unnumbered) |
| 6841 | fprintf (stream: outfile, format: "#" ); |
| 6842 | else |
| 6843 | fprintf (stream: outfile, format: "(%p)" , (void *) l); |
| 6844 | fprintf (stream: outfile, format: " %s" , |
| 6845 | dwarf_stack_op_name (op: l->dw_loc_opc)); |
| 6846 | if (l->dw_loc_oprnd1.val_class != dw_val_class_none) |
| 6847 | { |
| 6848 | fprintf (stream: outfile, format: " " ); |
| 6849 | print_dw_val (val: &l->dw_loc_oprnd1, recurse: false, outfile); |
| 6850 | } |
| 6851 | if (l->dw_loc_oprnd2.val_class != dw_val_class_none) |
| 6852 | { |
| 6853 | fprintf (stream: outfile, format: ", " ); |
| 6854 | print_dw_val (val: &l->dw_loc_oprnd2, recurse: false, outfile); |
| 6855 | } |
| 6856 | fprintf (stream: outfile, format: "\n" ); |
| 6857 | } |
| 6858 | } |
| 6859 | |
| 6860 | /* Print the information associated with a given DIE, and its children. |
| 6861 | This routine is a debugging aid only. */ |
| 6862 | |
| 6863 | static void |
| 6864 | print_die (dw_die_ref die, FILE *outfile) |
| 6865 | { |
| 6866 | dw_attr_node *a; |
| 6867 | dw_die_ref c; |
| 6868 | unsigned ix; |
| 6869 | |
| 6870 | print_spaces (outfile); |
| 6871 | fprintf (stream: outfile, format: "DIE %4ld: %s " , |
| 6872 | die->die_offset, dwarf_tag_name (tag: die->die_tag)); |
| 6873 | if (flag_dump_noaddr || flag_dump_unnumbered) |
| 6874 | fprintf (stream: outfile, format: "#\n" ); |
| 6875 | else |
| 6876 | fprintf (stream: outfile, format: "(%p)\n" , (void*) die); |
| 6877 | print_spaces (outfile); |
| 6878 | fprintf (stream: outfile, format: " abbrev id: %lu" , die->die_abbrev); |
| 6879 | fprintf (stream: outfile, format: " offset: %ld" , die->die_offset); |
| 6880 | fprintf (stream: outfile, format: " mark: %d\n" , die->die_mark); |
| 6881 | |
| 6882 | if (die->comdat_type_p) |
| 6883 | { |
| 6884 | print_spaces (outfile); |
| 6885 | fprintf (stream: outfile, format: " signature: " ); |
| 6886 | print_signature (outfile, sig: die->die_id.die_type_node->signature); |
| 6887 | fprintf (stream: outfile, format: "\n" ); |
| 6888 | } |
| 6889 | |
| 6890 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 6891 | { |
| 6892 | print_spaces (outfile); |
| 6893 | fprintf (stream: outfile, format: " %s: " , dwarf_attr_name (attr: a->dw_attr)); |
| 6894 | |
| 6895 | print_attribute (a, recurse: true, outfile); |
| 6896 | fprintf (stream: outfile, format: "\n" ); |
| 6897 | } |
| 6898 | |
| 6899 | if (die->die_child != NULL) |
| 6900 | { |
| 6901 | print_indent += 4; |
| 6902 | FOR_EACH_CHILD (die, c, print_die (c, outfile)); |
| 6903 | print_indent -= 4; |
| 6904 | } |
| 6905 | if (print_indent == 0) |
| 6906 | fprintf (stream: outfile, format: "\n" ); |
| 6907 | } |
| 6908 | |
| 6909 | /* Print the list of operations in the LOC location description. */ |
| 6910 | |
| 6911 | DEBUG_FUNCTION void |
| 6912 | debug_dwarf_loc_descr (dw_loc_descr_ref loc) |
| 6913 | { |
| 6914 | print_loc_descr (loc, stderr); |
| 6915 | } |
| 6916 | |
| 6917 | /* Print the information collected for a given DIE. */ |
| 6918 | |
| 6919 | DEBUG_FUNCTION void |
| 6920 | debug_dwarf_die (dw_die_ref die) |
| 6921 | { |
| 6922 | print_die (die, stderr); |
| 6923 | } |
| 6924 | |
| 6925 | DEBUG_FUNCTION void |
| 6926 | debug (die_struct &ref) |
| 6927 | { |
| 6928 | print_die (die: &ref, stderr); |
| 6929 | } |
| 6930 | |
| 6931 | DEBUG_FUNCTION void |
| 6932 | debug (die_struct *ptr) |
| 6933 | { |
| 6934 | if (ptr) |
| 6935 | debug (ref&: *ptr); |
| 6936 | else |
| 6937 | fprintf (stderr, format: "<nil>\n" ); |
| 6938 | } |
| 6939 | |
| 6940 | |
| 6941 | /* Print all DWARF information collected for the compilation unit. |
| 6942 | This routine is a debugging aid only. */ |
| 6943 | |
| 6944 | DEBUG_FUNCTION void |
| 6945 | debug_dwarf (void) |
| 6946 | { |
| 6947 | print_indent = 0; |
| 6948 | print_die (die: comp_unit_die (), stderr); |
| 6949 | } |
| 6950 | |
| 6951 | /* Verify the DIE tree structure. */ |
| 6952 | |
| 6953 | DEBUG_FUNCTION void |
| 6954 | verify_die (dw_die_ref die) |
| 6955 | { |
| 6956 | gcc_assert (!die->die_mark); |
| 6957 | if (die->die_parent == NULL |
| 6958 | && die->die_sib == NULL) |
| 6959 | return; |
| 6960 | /* Verify the die_sib list is cyclic. */ |
| 6961 | dw_die_ref x = die; |
| 6962 | do |
| 6963 | { |
| 6964 | x->die_mark = 1; |
| 6965 | x = x->die_sib; |
| 6966 | } |
| 6967 | while (x && !x->die_mark); |
| 6968 | gcc_assert (x == die); |
| 6969 | x = die; |
| 6970 | do |
| 6971 | { |
| 6972 | /* Verify all dies have the same parent. */ |
| 6973 | gcc_assert (x->die_parent == die->die_parent); |
| 6974 | if (x->die_child) |
| 6975 | { |
| 6976 | /* Verify the child has the proper parent and recurse. */ |
| 6977 | gcc_assert (x->die_child->die_parent == x); |
| 6978 | verify_die (die: x->die_child); |
| 6979 | } |
| 6980 | x->die_mark = 0; |
| 6981 | x = x->die_sib; |
| 6982 | } |
| 6983 | while (x && x->die_mark); |
| 6984 | } |
| 6985 | |
| 6986 | /* Sanity checks on DIEs. */ |
| 6987 | |
| 6988 | static void |
| 6989 | check_die (dw_die_ref die) |
| 6990 | { |
| 6991 | unsigned ix; |
| 6992 | dw_attr_node *a; |
| 6993 | bool inline_found = false; |
| 6994 | int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0; |
| 6995 | int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0; |
| 6996 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 6997 | { |
| 6998 | switch (a->dw_attr) |
| 6999 | { |
| 7000 | case DW_AT_inline: |
| 7001 | if (a->dw_attr_val.v.val_unsigned) |
| 7002 | inline_found = true; |
| 7003 | break; |
| 7004 | case DW_AT_location: |
| 7005 | ++n_location; |
| 7006 | break; |
| 7007 | case DW_AT_low_pc: |
| 7008 | ++n_low_pc; |
| 7009 | break; |
| 7010 | case DW_AT_high_pc: |
| 7011 | ++n_high_pc; |
| 7012 | break; |
| 7013 | case DW_AT_artificial: |
| 7014 | ++n_artificial; |
| 7015 | break; |
| 7016 | case DW_AT_decl_column: |
| 7017 | ++n_decl_column; |
| 7018 | break; |
| 7019 | case DW_AT_decl_line: |
| 7020 | ++n_decl_line; |
| 7021 | break; |
| 7022 | case DW_AT_decl_file: |
| 7023 | ++n_decl_file; |
| 7024 | break; |
| 7025 | default: |
| 7026 | break; |
| 7027 | } |
| 7028 | } |
| 7029 | if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1 |
| 7030 | || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1) |
| 7031 | { |
| 7032 | fprintf (stderr, format: "Duplicate attributes in DIE:\n" ); |
| 7033 | debug_dwarf_die (die); |
| 7034 | gcc_unreachable (); |
| 7035 | } |
| 7036 | if (inline_found) |
| 7037 | { |
| 7038 | /* A debugging information entry that is a member of an abstract |
| 7039 | instance tree [that has DW_AT_inline] should not contain any |
| 7040 | attributes which describe aspects of the subroutine which vary |
| 7041 | between distinct inlined expansions or distinct out-of-line |
| 7042 | expansions. */ |
| 7043 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 7044 | gcc_assert (a->dw_attr != DW_AT_low_pc |
| 7045 | && a->dw_attr != DW_AT_high_pc |
| 7046 | && a->dw_attr != DW_AT_location |
| 7047 | && a->dw_attr != DW_AT_frame_base |
| 7048 | && a->dw_attr != DW_AT_call_all_calls |
| 7049 | && a->dw_attr != DW_AT_GNU_all_call_sites); |
| 7050 | } |
| 7051 | } |
| 7052 | |
| 7053 | #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx) |
| 7054 | #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx) |
| 7055 | #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx) |
| 7056 | |
| 7057 | /* Calculate the checksum of a location expression. */ |
| 7058 | |
| 7059 | static inline void |
| 7060 | loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx) |
| 7061 | { |
| 7062 | int tem; |
| 7063 | inchash::hash hstate; |
| 7064 | hashval_t hash; |
| 7065 | |
| 7066 | tem = (loc->dw_loc_dtprel << 8) | ((unsigned int) loc->dw_loc_opc); |
| 7067 | CHECKSUM (tem); |
| 7068 | hash_loc_operands (loc, hstate); |
| 7069 | hash = hstate.end(); |
| 7070 | CHECKSUM (hash); |
| 7071 | } |
| 7072 | |
| 7073 | /* Calculate the checksum of an attribute. */ |
| 7074 | |
| 7075 | static void |
| 7076 | attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark) |
| 7077 | { |
| 7078 | dw_loc_descr_ref loc; |
| 7079 | rtx r; |
| 7080 | |
| 7081 | CHECKSUM (at->dw_attr); |
| 7082 | |
| 7083 | /* We don't care that this was compiled with a different compiler |
| 7084 | snapshot; if the output is the same, that's what matters. */ |
| 7085 | if (at->dw_attr == DW_AT_producer) |
| 7086 | return; |
| 7087 | |
| 7088 | switch (AT_class (a: at)) |
| 7089 | { |
| 7090 | case dw_val_class_const: |
| 7091 | case dw_val_class_const_implicit: |
| 7092 | CHECKSUM (at->dw_attr_val.v.val_int); |
| 7093 | break; |
| 7094 | case dw_val_class_unsigned_const: |
| 7095 | case dw_val_class_unsigned_const_implicit: |
| 7096 | CHECKSUM (at->dw_attr_val.v.val_unsigned); |
| 7097 | break; |
| 7098 | case dw_val_class_const_double: |
| 7099 | CHECKSUM (at->dw_attr_val.v.val_double); |
| 7100 | break; |
| 7101 | case dw_val_class_wide_int: |
| 7102 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (), |
| 7103 | get_full_len (*at->dw_attr_val.v.val_wide) |
| 7104 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
| 7105 | break; |
| 7106 | case dw_val_class_vec: |
| 7107 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array, |
| 7108 | (at->dw_attr_val.v.val_vec.length |
| 7109 | * at->dw_attr_val.v.val_vec.elt_size)); |
| 7110 | break; |
| 7111 | case dw_val_class_flag: |
| 7112 | CHECKSUM (at->dw_attr_val.v.val_flag); |
| 7113 | break; |
| 7114 | case dw_val_class_str: |
| 7115 | CHECKSUM_STRING (AT_string (at)); |
| 7116 | break; |
| 7117 | |
| 7118 | case dw_val_class_addr: |
| 7119 | r = AT_addr (a: at); |
| 7120 | gcc_assert (GET_CODE (r) == SYMBOL_REF); |
| 7121 | CHECKSUM_STRING (XSTR (r, 0)); |
| 7122 | break; |
| 7123 | |
| 7124 | case dw_val_class_offset: |
| 7125 | CHECKSUM (at->dw_attr_val.v.val_offset); |
| 7126 | break; |
| 7127 | |
| 7128 | case dw_val_class_loc: |
| 7129 | for (loc = AT_loc (a: at); loc; loc = loc->dw_loc_next) |
| 7130 | loc_checksum (loc, ctx); |
| 7131 | break; |
| 7132 | |
| 7133 | case dw_val_class_die_ref: |
| 7134 | die_checksum (AT_ref (a: at), ctx, mark); |
| 7135 | break; |
| 7136 | |
| 7137 | case dw_val_class_fde_ref: |
| 7138 | case dw_val_class_vms_delta: |
| 7139 | case dw_val_class_symview: |
| 7140 | case dw_val_class_lbl_id: |
| 7141 | case dw_val_class_lineptr: |
| 7142 | case dw_val_class_macptr: |
| 7143 | case dw_val_class_loclistsptr: |
| 7144 | case dw_val_class_high_pc: |
| 7145 | break; |
| 7146 | |
| 7147 | case dw_val_class_file: |
| 7148 | case dw_val_class_file_implicit: |
| 7149 | CHECKSUM_STRING (AT_file (at)->filename); |
| 7150 | break; |
| 7151 | |
| 7152 | case dw_val_class_data8: |
| 7153 | CHECKSUM (at->dw_attr_val.v.val_data8); |
| 7154 | break; |
| 7155 | |
| 7156 | default: |
| 7157 | break; |
| 7158 | } |
| 7159 | } |
| 7160 | |
| 7161 | /* Calculate the checksum of a DIE. */ |
| 7162 | |
| 7163 | static void |
| 7164 | die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark) |
| 7165 | { |
| 7166 | dw_die_ref c; |
| 7167 | dw_attr_node *a; |
| 7168 | unsigned ix; |
| 7169 | |
| 7170 | /* To avoid infinite recursion. */ |
| 7171 | if (die->die_mark) |
| 7172 | { |
| 7173 | CHECKSUM (die->die_mark); |
| 7174 | return; |
| 7175 | } |
| 7176 | die->die_mark = ++(*mark); |
| 7177 | |
| 7178 | CHECKSUM (die->die_tag); |
| 7179 | |
| 7180 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 7181 | attr_checksum (at: a, ctx, mark); |
| 7182 | |
| 7183 | FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark)); |
| 7184 | } |
| 7185 | |
| 7186 | #undef CHECKSUM |
| 7187 | #undef CHECKSUM_BLOCK |
| 7188 | #undef CHECKSUM_STRING |
| 7189 | |
| 7190 | /* For DWARF-4 types, include the trailing NULL when checksumming strings. */ |
| 7191 | #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx) |
| 7192 | #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx) |
| 7193 | #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx) |
| 7194 | #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx) |
| 7195 | #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx) |
| 7196 | #define CHECKSUM_ATTR(FOO) \ |
| 7197 | if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark) |
| 7198 | |
| 7199 | /* Calculate the checksum of a number in signed LEB128 format. */ |
| 7200 | |
| 7201 | static void |
| 7202 | checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx) |
| 7203 | { |
| 7204 | unsigned char byte; |
| 7205 | bool more; |
| 7206 | |
| 7207 | while (1) |
| 7208 | { |
| 7209 | byte = (value & 0x7f); |
| 7210 | value >>= 7; |
| 7211 | more = !((value == 0 && (byte & 0x40) == 0) |
| 7212 | || (value == -1 && (byte & 0x40) != 0)); |
| 7213 | if (more) |
| 7214 | byte |= 0x80; |
| 7215 | CHECKSUM (byte); |
| 7216 | if (!more) |
| 7217 | break; |
| 7218 | } |
| 7219 | } |
| 7220 | |
| 7221 | /* Calculate the checksum of a number in unsigned LEB128 format. */ |
| 7222 | |
| 7223 | static void |
| 7224 | checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx) |
| 7225 | { |
| 7226 | while (1) |
| 7227 | { |
| 7228 | unsigned char byte = (value & 0x7f); |
| 7229 | value >>= 7; |
| 7230 | if (value != 0) |
| 7231 | /* More bytes to follow. */ |
| 7232 | byte |= 0x80; |
| 7233 | CHECKSUM (byte); |
| 7234 | if (value == 0) |
| 7235 | break; |
| 7236 | } |
| 7237 | } |
| 7238 | |
| 7239 | /* Checksum the context of the DIE. This adds the names of any |
| 7240 | surrounding namespaces or structures to the checksum. */ |
| 7241 | |
| 7242 | static void |
| 7243 | checksum_die_context (dw_die_ref die, struct md5_ctx *ctx) |
| 7244 | { |
| 7245 | const char *name; |
| 7246 | dw_die_ref spec; |
| 7247 | int tag = die->die_tag; |
| 7248 | |
| 7249 | if (tag != DW_TAG_namespace |
| 7250 | && tag != DW_TAG_structure_type |
| 7251 | && tag != DW_TAG_class_type) |
| 7252 | return; |
| 7253 | |
| 7254 | name = get_AT_string (die, attr_kind: DW_AT_name); |
| 7255 | |
| 7256 | spec = get_AT_ref (die, attr_kind: DW_AT_specification); |
| 7257 | if (spec != NULL) |
| 7258 | die = spec; |
| 7259 | |
| 7260 | if (die->die_parent != NULL) |
| 7261 | checksum_die_context (die: die->die_parent, ctx); |
| 7262 | |
| 7263 | CHECKSUM_ULEB128 ('C'); |
| 7264 | CHECKSUM_ULEB128 (tag); |
| 7265 | if (name != NULL) |
| 7266 | CHECKSUM_STRING (name); |
| 7267 | } |
| 7268 | |
| 7269 | /* Calculate the checksum of a location expression. */ |
| 7270 | |
| 7271 | static inline void |
| 7272 | loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx) |
| 7273 | { |
| 7274 | /* Special case for lone DW_OP_plus_uconst: checksum as if the location |
| 7275 | were emitted as a DW_FORM_sdata instead of a location expression. */ |
| 7276 | if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL) |
| 7277 | { |
| 7278 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
| 7279 | CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned); |
| 7280 | return; |
| 7281 | } |
| 7282 | |
| 7283 | /* Otherwise, just checksum the raw location expression. */ |
| 7284 | while (loc != NULL) |
| 7285 | { |
| 7286 | inchash::hash hstate; |
| 7287 | hashval_t hash; |
| 7288 | |
| 7289 | CHECKSUM_ULEB128 (loc->dw_loc_dtprel); |
| 7290 | CHECKSUM_ULEB128 (loc->dw_loc_opc); |
| 7291 | hash_loc_operands (loc, hstate); |
| 7292 | hash = hstate.end (); |
| 7293 | CHECKSUM (hash); |
| 7294 | loc = loc->dw_loc_next; |
| 7295 | } |
| 7296 | } |
| 7297 | |
| 7298 | /* Calculate the checksum of an attribute. */ |
| 7299 | |
| 7300 | static void |
| 7301 | attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at, |
| 7302 | struct md5_ctx *ctx, int *mark) |
| 7303 | { |
| 7304 | dw_loc_descr_ref loc; |
| 7305 | rtx r; |
| 7306 | |
| 7307 | if (AT_class (a: at) == dw_val_class_die_ref) |
| 7308 | { |
| 7309 | dw_die_ref target_die = AT_ref (a: at); |
| 7310 | |
| 7311 | /* For pointer and reference types, we checksum only the (qualified) |
| 7312 | name of the target type (if there is a name). For friend entries, |
| 7313 | we checksum only the (qualified) name of the target type or function. |
| 7314 | This allows the checksum to remain the same whether the target type |
| 7315 | is complete or not. */ |
| 7316 | if ((at->dw_attr == DW_AT_type |
| 7317 | && (tag == DW_TAG_pointer_type |
| 7318 | || tag == DW_TAG_reference_type |
| 7319 | || tag == DW_TAG_rvalue_reference_type |
| 7320 | || tag == DW_TAG_ptr_to_member_type)) |
| 7321 | || (at->dw_attr == DW_AT_friend |
| 7322 | && tag == DW_TAG_friend)) |
| 7323 | { |
| 7324 | dw_attr_node *name_attr = get_AT (die: target_die, attr_kind: DW_AT_name); |
| 7325 | |
| 7326 | if (name_attr != NULL) |
| 7327 | { |
| 7328 | dw_die_ref decl = get_AT_ref (die: target_die, attr_kind: DW_AT_specification); |
| 7329 | |
| 7330 | if (decl == NULL) |
| 7331 | decl = target_die; |
| 7332 | CHECKSUM_ULEB128 ('N'); |
| 7333 | CHECKSUM_ULEB128 (at->dw_attr); |
| 7334 | if (decl->die_parent != NULL) |
| 7335 | checksum_die_context (die: decl->die_parent, ctx); |
| 7336 | CHECKSUM_ULEB128 ('E'); |
| 7337 | CHECKSUM_STRING (AT_string (name_attr)); |
| 7338 | return; |
| 7339 | } |
| 7340 | } |
| 7341 | |
| 7342 | /* For all other references to another DIE, we check to see if the |
| 7343 | target DIE has already been visited. If it has, we emit a |
| 7344 | backward reference; if not, we descend recursively. */ |
| 7345 | if (target_die->die_mark > 0) |
| 7346 | { |
| 7347 | CHECKSUM_ULEB128 ('R'); |
| 7348 | CHECKSUM_ULEB128 (at->dw_attr); |
| 7349 | CHECKSUM_ULEB128 (target_die->die_mark); |
| 7350 | } |
| 7351 | else |
| 7352 | { |
| 7353 | dw_die_ref decl = get_AT_ref (die: target_die, attr_kind: DW_AT_specification); |
| 7354 | |
| 7355 | if (decl == NULL) |
| 7356 | decl = target_die; |
| 7357 | target_die->die_mark = ++(*mark); |
| 7358 | CHECKSUM_ULEB128 ('T'); |
| 7359 | CHECKSUM_ULEB128 (at->dw_attr); |
| 7360 | if (decl->die_parent != NULL) |
| 7361 | checksum_die_context (die: decl->die_parent, ctx); |
| 7362 | die_checksum_ordered (target_die, ctx, mark); |
| 7363 | } |
| 7364 | return; |
| 7365 | } |
| 7366 | |
| 7367 | CHECKSUM_ULEB128 ('A'); |
| 7368 | CHECKSUM_ULEB128 (at->dw_attr); |
| 7369 | |
| 7370 | switch (AT_class (a: at)) |
| 7371 | { |
| 7372 | case dw_val_class_const: |
| 7373 | case dw_val_class_const_implicit: |
| 7374 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
| 7375 | CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int); |
| 7376 | break; |
| 7377 | |
| 7378 | case dw_val_class_unsigned_const: |
| 7379 | case dw_val_class_unsigned_const_implicit: |
| 7380 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
| 7381 | CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned); |
| 7382 | break; |
| 7383 | |
| 7384 | case dw_val_class_const_double: |
| 7385 | CHECKSUM_ULEB128 (DW_FORM_block); |
| 7386 | CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double)); |
| 7387 | CHECKSUM (at->dw_attr_val.v.val_double); |
| 7388 | break; |
| 7389 | |
| 7390 | case dw_val_class_wide_int: |
| 7391 | CHECKSUM_ULEB128 (DW_FORM_block); |
| 7392 | CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide) |
| 7393 | * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT); |
| 7394 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (), |
| 7395 | get_full_len (*at->dw_attr_val.v.val_wide) |
| 7396 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
| 7397 | break; |
| 7398 | |
| 7399 | case dw_val_class_vec: |
| 7400 | CHECKSUM_ULEB128 (DW_FORM_block); |
| 7401 | CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length |
| 7402 | * at->dw_attr_val.v.val_vec.elt_size); |
| 7403 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array, |
| 7404 | (at->dw_attr_val.v.val_vec.length |
| 7405 | * at->dw_attr_val.v.val_vec.elt_size)); |
| 7406 | break; |
| 7407 | |
| 7408 | case dw_val_class_flag: |
| 7409 | CHECKSUM_ULEB128 (DW_FORM_flag); |
| 7410 | CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0); |
| 7411 | break; |
| 7412 | |
| 7413 | case dw_val_class_str: |
| 7414 | CHECKSUM_ULEB128 (DW_FORM_string); |
| 7415 | CHECKSUM_STRING (AT_string (at)); |
| 7416 | break; |
| 7417 | |
| 7418 | case dw_val_class_addr: |
| 7419 | r = AT_addr (a: at); |
| 7420 | gcc_assert (GET_CODE (r) == SYMBOL_REF); |
| 7421 | CHECKSUM_ULEB128 (DW_FORM_string); |
| 7422 | CHECKSUM_STRING (XSTR (r, 0)); |
| 7423 | break; |
| 7424 | |
| 7425 | case dw_val_class_offset: |
| 7426 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
| 7427 | CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset); |
| 7428 | break; |
| 7429 | |
| 7430 | case dw_val_class_loc: |
| 7431 | for (loc = AT_loc (a: at); loc; loc = loc->dw_loc_next) |
| 7432 | loc_checksum_ordered (loc, ctx); |
| 7433 | break; |
| 7434 | |
| 7435 | case dw_val_class_fde_ref: |
| 7436 | case dw_val_class_symview: |
| 7437 | case dw_val_class_lbl_id: |
| 7438 | case dw_val_class_lineptr: |
| 7439 | case dw_val_class_macptr: |
| 7440 | case dw_val_class_loclistsptr: |
| 7441 | case dw_val_class_high_pc: |
| 7442 | break; |
| 7443 | |
| 7444 | case dw_val_class_file: |
| 7445 | case dw_val_class_file_implicit: |
| 7446 | CHECKSUM_ULEB128 (DW_FORM_string); |
| 7447 | CHECKSUM_STRING (AT_file (at)->filename); |
| 7448 | break; |
| 7449 | |
| 7450 | case dw_val_class_data8: |
| 7451 | CHECKSUM (at->dw_attr_val.v.val_data8); |
| 7452 | break; |
| 7453 | |
| 7454 | default: |
| 7455 | break; |
| 7456 | } |
| 7457 | } |
| 7458 | |
| 7459 | struct checksum_attributes |
| 7460 | { |
| 7461 | dw_attr_node *at_name; |
| 7462 | dw_attr_node *at_type; |
| 7463 | dw_attr_node *at_friend; |
| 7464 | dw_attr_node *at_accessibility; |
| 7465 | dw_attr_node *at_address_class; |
| 7466 | dw_attr_node *at_alignment; |
| 7467 | dw_attr_node *at_allocated; |
| 7468 | dw_attr_node *at_artificial; |
| 7469 | dw_attr_node *at_associated; |
| 7470 | dw_attr_node *at_binary_scale; |
| 7471 | dw_attr_node *at_bit_offset; |
| 7472 | dw_attr_node *at_bit_size; |
| 7473 | dw_attr_node *at_bit_stride; |
| 7474 | dw_attr_node *at_byte_size; |
| 7475 | dw_attr_node *at_byte_stride; |
| 7476 | dw_attr_node *at_const_value; |
| 7477 | dw_attr_node *at_containing_type; |
| 7478 | dw_attr_node *at_count; |
| 7479 | dw_attr_node *at_data_location; |
| 7480 | dw_attr_node *at_data_member_location; |
| 7481 | dw_attr_node *at_decimal_scale; |
| 7482 | dw_attr_node *at_decimal_sign; |
| 7483 | dw_attr_node *at_default_value; |
| 7484 | dw_attr_node *at_digit_count; |
| 7485 | dw_attr_node *at_discr; |
| 7486 | dw_attr_node *at_discr_list; |
| 7487 | dw_attr_node *at_discr_value; |
| 7488 | dw_attr_node *at_encoding; |
| 7489 | dw_attr_node *at_endianity; |
| 7490 | dw_attr_node *at_explicit; |
| 7491 | dw_attr_node *at_is_optional; |
| 7492 | dw_attr_node *at_location; |
| 7493 | dw_attr_node *at_lower_bound; |
| 7494 | dw_attr_node *at_mutable; |
| 7495 | dw_attr_node *at_ordering; |
| 7496 | dw_attr_node *at_picture_string; |
| 7497 | dw_attr_node *at_prototyped; |
| 7498 | dw_attr_node *at_small; |
| 7499 | dw_attr_node *at_segment; |
| 7500 | dw_attr_node *at_string_length; |
| 7501 | dw_attr_node *at_string_length_bit_size; |
| 7502 | dw_attr_node *at_string_length_byte_size; |
| 7503 | dw_attr_node *at_threads_scaled; |
| 7504 | dw_attr_node *at_upper_bound; |
| 7505 | dw_attr_node *at_use_location; |
| 7506 | dw_attr_node *at_use_UTF8; |
| 7507 | dw_attr_node *at_variable_parameter; |
| 7508 | dw_attr_node *at_virtuality; |
| 7509 | dw_attr_node *at_visibility; |
| 7510 | dw_attr_node *at_vtable_elem_location; |
| 7511 | }; |
| 7512 | |
| 7513 | /* Collect the attributes that we will want to use for the checksum. */ |
| 7514 | |
| 7515 | static void |
| 7516 | collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die) |
| 7517 | { |
| 7518 | dw_attr_node *a; |
| 7519 | unsigned ix; |
| 7520 | |
| 7521 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 7522 | { |
| 7523 | switch (a->dw_attr) |
| 7524 | { |
| 7525 | case DW_AT_name: |
| 7526 | attrs->at_name = a; |
| 7527 | break; |
| 7528 | case DW_AT_type: |
| 7529 | attrs->at_type = a; |
| 7530 | break; |
| 7531 | case DW_AT_friend: |
| 7532 | attrs->at_friend = a; |
| 7533 | break; |
| 7534 | case DW_AT_accessibility: |
| 7535 | attrs->at_accessibility = a; |
| 7536 | break; |
| 7537 | case DW_AT_address_class: |
| 7538 | attrs->at_address_class = a; |
| 7539 | break; |
| 7540 | case DW_AT_alignment: |
| 7541 | attrs->at_alignment = a; |
| 7542 | break; |
| 7543 | case DW_AT_allocated: |
| 7544 | attrs->at_allocated = a; |
| 7545 | break; |
| 7546 | case DW_AT_artificial: |
| 7547 | attrs->at_artificial = a; |
| 7548 | break; |
| 7549 | case DW_AT_associated: |
| 7550 | attrs->at_associated = a; |
| 7551 | break; |
| 7552 | case DW_AT_binary_scale: |
| 7553 | attrs->at_binary_scale = a; |
| 7554 | break; |
| 7555 | case DW_AT_bit_offset: |
| 7556 | attrs->at_bit_offset = a; |
| 7557 | break; |
| 7558 | case DW_AT_bit_size: |
| 7559 | attrs->at_bit_size = a; |
| 7560 | break; |
| 7561 | case DW_AT_bit_stride: |
| 7562 | attrs->at_bit_stride = a; |
| 7563 | break; |
| 7564 | case DW_AT_byte_size: |
| 7565 | attrs->at_byte_size = a; |
| 7566 | break; |
| 7567 | case DW_AT_byte_stride: |
| 7568 | attrs->at_byte_stride = a; |
| 7569 | break; |
| 7570 | case DW_AT_const_value: |
| 7571 | attrs->at_const_value = a; |
| 7572 | break; |
| 7573 | case DW_AT_containing_type: |
| 7574 | attrs->at_containing_type = a; |
| 7575 | break; |
| 7576 | case DW_AT_count: |
| 7577 | attrs->at_count = a; |
| 7578 | break; |
| 7579 | case DW_AT_data_location: |
| 7580 | attrs->at_data_location = a; |
| 7581 | break; |
| 7582 | case DW_AT_data_member_location: |
| 7583 | attrs->at_data_member_location = a; |
| 7584 | break; |
| 7585 | case DW_AT_decimal_scale: |
| 7586 | attrs->at_decimal_scale = a; |
| 7587 | break; |
| 7588 | case DW_AT_decimal_sign: |
| 7589 | attrs->at_decimal_sign = a; |
| 7590 | break; |
| 7591 | case DW_AT_default_value: |
| 7592 | attrs->at_default_value = a; |
| 7593 | break; |
| 7594 | case DW_AT_digit_count: |
| 7595 | attrs->at_digit_count = a; |
| 7596 | break; |
| 7597 | case DW_AT_discr: |
| 7598 | attrs->at_discr = a; |
| 7599 | break; |
| 7600 | case DW_AT_discr_list: |
| 7601 | attrs->at_discr_list = a; |
| 7602 | break; |
| 7603 | case DW_AT_discr_value: |
| 7604 | attrs->at_discr_value = a; |
| 7605 | break; |
| 7606 | case DW_AT_encoding: |
| 7607 | attrs->at_encoding = a; |
| 7608 | break; |
| 7609 | case DW_AT_endianity: |
| 7610 | attrs->at_endianity = a; |
| 7611 | break; |
| 7612 | case DW_AT_explicit: |
| 7613 | attrs->at_explicit = a; |
| 7614 | break; |
| 7615 | case DW_AT_is_optional: |
| 7616 | attrs->at_is_optional = a; |
| 7617 | break; |
| 7618 | case DW_AT_location: |
| 7619 | attrs->at_location = a; |
| 7620 | break; |
| 7621 | case DW_AT_lower_bound: |
| 7622 | attrs->at_lower_bound = a; |
| 7623 | break; |
| 7624 | case DW_AT_mutable: |
| 7625 | attrs->at_mutable = a; |
| 7626 | break; |
| 7627 | case DW_AT_ordering: |
| 7628 | attrs->at_ordering = a; |
| 7629 | break; |
| 7630 | case DW_AT_picture_string: |
| 7631 | attrs->at_picture_string = a; |
| 7632 | break; |
| 7633 | case DW_AT_prototyped: |
| 7634 | attrs->at_prototyped = a; |
| 7635 | break; |
| 7636 | case DW_AT_small: |
| 7637 | attrs->at_small = a; |
| 7638 | break; |
| 7639 | case DW_AT_segment: |
| 7640 | attrs->at_segment = a; |
| 7641 | break; |
| 7642 | case DW_AT_string_length: |
| 7643 | attrs->at_string_length = a; |
| 7644 | break; |
| 7645 | case DW_AT_string_length_bit_size: |
| 7646 | attrs->at_string_length_bit_size = a; |
| 7647 | break; |
| 7648 | case DW_AT_string_length_byte_size: |
| 7649 | attrs->at_string_length_byte_size = a; |
| 7650 | break; |
| 7651 | case DW_AT_threads_scaled: |
| 7652 | attrs->at_threads_scaled = a; |
| 7653 | break; |
| 7654 | case DW_AT_upper_bound: |
| 7655 | attrs->at_upper_bound = a; |
| 7656 | break; |
| 7657 | case DW_AT_use_location: |
| 7658 | attrs->at_use_location = a; |
| 7659 | break; |
| 7660 | case DW_AT_use_UTF8: |
| 7661 | attrs->at_use_UTF8 = a; |
| 7662 | break; |
| 7663 | case DW_AT_variable_parameter: |
| 7664 | attrs->at_variable_parameter = a; |
| 7665 | break; |
| 7666 | case DW_AT_virtuality: |
| 7667 | attrs->at_virtuality = a; |
| 7668 | break; |
| 7669 | case DW_AT_visibility: |
| 7670 | attrs->at_visibility = a; |
| 7671 | break; |
| 7672 | case DW_AT_vtable_elem_location: |
| 7673 | attrs->at_vtable_elem_location = a; |
| 7674 | break; |
| 7675 | default: |
| 7676 | break; |
| 7677 | } |
| 7678 | } |
| 7679 | } |
| 7680 | |
| 7681 | /* Calculate the checksum of a DIE, using an ordered subset of attributes. */ |
| 7682 | |
| 7683 | static void |
| 7684 | die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark) |
| 7685 | { |
| 7686 | dw_die_ref c; |
| 7687 | dw_die_ref decl; |
| 7688 | struct checksum_attributes attrs; |
| 7689 | |
| 7690 | CHECKSUM_ULEB128 ('D'); |
| 7691 | CHECKSUM_ULEB128 (die->die_tag); |
| 7692 | |
| 7693 | memset (s: &attrs, c: 0, n: sizeof (attrs)); |
| 7694 | |
| 7695 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
| 7696 | if (decl != NULL) |
| 7697 | collect_checksum_attributes (attrs: &attrs, die: decl); |
| 7698 | collect_checksum_attributes (attrs: &attrs, die); |
| 7699 | |
| 7700 | CHECKSUM_ATTR (attrs.at_name); |
| 7701 | CHECKSUM_ATTR (attrs.at_accessibility); |
| 7702 | CHECKSUM_ATTR (attrs.at_address_class); |
| 7703 | CHECKSUM_ATTR (attrs.at_allocated); |
| 7704 | CHECKSUM_ATTR (attrs.at_artificial); |
| 7705 | CHECKSUM_ATTR (attrs.at_associated); |
| 7706 | CHECKSUM_ATTR (attrs.at_binary_scale); |
| 7707 | CHECKSUM_ATTR (attrs.at_bit_offset); |
| 7708 | CHECKSUM_ATTR (attrs.at_bit_size); |
| 7709 | CHECKSUM_ATTR (attrs.at_bit_stride); |
| 7710 | CHECKSUM_ATTR (attrs.at_byte_size); |
| 7711 | CHECKSUM_ATTR (attrs.at_byte_stride); |
| 7712 | CHECKSUM_ATTR (attrs.at_const_value); |
| 7713 | CHECKSUM_ATTR (attrs.at_containing_type); |
| 7714 | CHECKSUM_ATTR (attrs.at_count); |
| 7715 | CHECKSUM_ATTR (attrs.at_data_location); |
| 7716 | CHECKSUM_ATTR (attrs.at_data_member_location); |
| 7717 | CHECKSUM_ATTR (attrs.at_decimal_scale); |
| 7718 | CHECKSUM_ATTR (attrs.at_decimal_sign); |
| 7719 | CHECKSUM_ATTR (attrs.at_default_value); |
| 7720 | CHECKSUM_ATTR (attrs.at_digit_count); |
| 7721 | CHECKSUM_ATTR (attrs.at_discr); |
| 7722 | CHECKSUM_ATTR (attrs.at_discr_list); |
| 7723 | CHECKSUM_ATTR (attrs.at_discr_value); |
| 7724 | CHECKSUM_ATTR (attrs.at_encoding); |
| 7725 | CHECKSUM_ATTR (attrs.at_endianity); |
| 7726 | CHECKSUM_ATTR (attrs.at_explicit); |
| 7727 | CHECKSUM_ATTR (attrs.at_is_optional); |
| 7728 | CHECKSUM_ATTR (attrs.at_location); |
| 7729 | CHECKSUM_ATTR (attrs.at_lower_bound); |
| 7730 | CHECKSUM_ATTR (attrs.at_mutable); |
| 7731 | CHECKSUM_ATTR (attrs.at_ordering); |
| 7732 | CHECKSUM_ATTR (attrs.at_picture_string); |
| 7733 | CHECKSUM_ATTR (attrs.at_prototyped); |
| 7734 | CHECKSUM_ATTR (attrs.at_small); |
| 7735 | CHECKSUM_ATTR (attrs.at_segment); |
| 7736 | CHECKSUM_ATTR (attrs.at_string_length); |
| 7737 | CHECKSUM_ATTR (attrs.at_string_length_bit_size); |
| 7738 | CHECKSUM_ATTR (attrs.at_string_length_byte_size); |
| 7739 | CHECKSUM_ATTR (attrs.at_threads_scaled); |
| 7740 | CHECKSUM_ATTR (attrs.at_upper_bound); |
| 7741 | CHECKSUM_ATTR (attrs.at_use_location); |
| 7742 | CHECKSUM_ATTR (attrs.at_use_UTF8); |
| 7743 | CHECKSUM_ATTR (attrs.at_variable_parameter); |
| 7744 | CHECKSUM_ATTR (attrs.at_virtuality); |
| 7745 | CHECKSUM_ATTR (attrs.at_visibility); |
| 7746 | CHECKSUM_ATTR (attrs.at_vtable_elem_location); |
| 7747 | CHECKSUM_ATTR (attrs.at_type); |
| 7748 | CHECKSUM_ATTR (attrs.at_friend); |
| 7749 | CHECKSUM_ATTR (attrs.at_alignment); |
| 7750 | |
| 7751 | /* Checksum the child DIEs. */ |
| 7752 | c = die->die_child; |
| 7753 | if (c) do { |
| 7754 | dw_attr_node *name_attr; |
| 7755 | |
| 7756 | c = c->die_sib; |
| 7757 | name_attr = get_AT (die: c, attr_kind: DW_AT_name); |
| 7758 | if (is_template_instantiation (c)) |
| 7759 | { |
| 7760 | /* Ignore instantiations of member type and function templates. */ |
| 7761 | } |
| 7762 | else if (name_attr != NULL |
| 7763 | && (is_type_die (c) || c->die_tag == DW_TAG_subprogram)) |
| 7764 | { |
| 7765 | /* Use a shallow checksum for named nested types and member |
| 7766 | functions. */ |
| 7767 | CHECKSUM_ULEB128 ('S'); |
| 7768 | CHECKSUM_ULEB128 (c->die_tag); |
| 7769 | CHECKSUM_STRING (AT_string (name_attr)); |
| 7770 | } |
| 7771 | else |
| 7772 | { |
| 7773 | /* Use a deep checksum for other children. */ |
| 7774 | /* Mark this DIE so it gets processed when unmarking. */ |
| 7775 | if (c->die_mark == 0) |
| 7776 | c->die_mark = -1; |
| 7777 | die_checksum_ordered (die: c, ctx, mark); |
| 7778 | } |
| 7779 | } while (c != die->die_child); |
| 7780 | |
| 7781 | CHECKSUM_ULEB128 (0); |
| 7782 | } |
| 7783 | |
| 7784 | /* Add a type name and tag to a hash. */ |
| 7785 | static void |
| 7786 | die_odr_checksum (int tag, const char *name, md5_ctx *ctx) |
| 7787 | { |
| 7788 | CHECKSUM_ULEB128 (tag); |
| 7789 | CHECKSUM_STRING (name); |
| 7790 | } |
| 7791 | |
| 7792 | #undef CHECKSUM |
| 7793 | #undef CHECKSUM_STRING |
| 7794 | #undef CHECKSUM_ATTR |
| 7795 | #undef CHECKSUM_LEB128 |
| 7796 | #undef CHECKSUM_ULEB128 |
| 7797 | |
| 7798 | /* Generate the type signature for DIE. This is computed by generating an |
| 7799 | MD5 checksum over the DIE's tag, its relevant attributes, and its |
| 7800 | children. Attributes that are references to other DIEs are processed |
| 7801 | by recursion, using the MARK field to prevent infinite recursion. |
| 7802 | If the DIE is nested inside a namespace or another type, we also |
| 7803 | need to include that context in the signature. The lower 64 bits |
| 7804 | of the resulting MD5 checksum comprise the signature. */ |
| 7805 | |
| 7806 | static void |
| 7807 | generate_type_signature (dw_die_ref die, comdat_type_node *type_node) |
| 7808 | { |
| 7809 | int mark; |
| 7810 | const char *name; |
| 7811 | unsigned char checksum[16]; |
| 7812 | struct md5_ctx ctx; |
| 7813 | dw_die_ref decl; |
| 7814 | dw_die_ref parent; |
| 7815 | |
| 7816 | name = get_AT_string (die, attr_kind: DW_AT_name); |
| 7817 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
| 7818 | parent = dw_get_die_parent (die); |
| 7819 | |
| 7820 | /* First, compute a signature for just the type name (and its surrounding |
| 7821 | context, if any. This is stored in the type unit DIE for link-time |
| 7822 | ODR (one-definition rule) checking. */ |
| 7823 | |
| 7824 | if (is_cxx () && name != NULL) |
| 7825 | { |
| 7826 | md5_init_ctx (ctx: &ctx); |
| 7827 | |
| 7828 | /* Checksum the names of surrounding namespaces and structures. */ |
| 7829 | if (parent != NULL) |
| 7830 | checksum_die_context (die: parent, ctx: &ctx); |
| 7831 | |
| 7832 | /* Checksum the current DIE. */ |
| 7833 | die_odr_checksum (tag: die->die_tag, name, ctx: &ctx); |
| 7834 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
| 7835 | |
| 7836 | add_AT_data8 (die: type_node->root_die, attr_kind: DW_AT_GNU_odr_signature, data8: &checksum[8]); |
| 7837 | } |
| 7838 | |
| 7839 | /* Next, compute the complete type signature. */ |
| 7840 | |
| 7841 | md5_init_ctx (ctx: &ctx); |
| 7842 | mark = 1; |
| 7843 | die->die_mark = mark; |
| 7844 | |
| 7845 | /* Checksum the names of surrounding namespaces and structures. */ |
| 7846 | if (parent != NULL) |
| 7847 | checksum_die_context (die: parent, ctx: &ctx); |
| 7848 | |
| 7849 | /* Checksum the DIE and its children. */ |
| 7850 | die_checksum_ordered (die, ctx: &ctx, mark: &mark); |
| 7851 | unmark_all_dies (die); |
| 7852 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
| 7853 | |
| 7854 | /* Store the signature in the type node and link the type DIE and the |
| 7855 | type node together. */ |
| 7856 | memcpy (dest: type_node->signature, src: &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE], |
| 7857 | DWARF_TYPE_SIGNATURE_SIZE); |
| 7858 | die->comdat_type_p = true; |
| 7859 | die->die_id.die_type_node = type_node; |
| 7860 | type_node->type_die = die; |
| 7861 | |
| 7862 | /* If the DIE is a specification, link its declaration to the type node |
| 7863 | as well. */ |
| 7864 | if (decl != NULL) |
| 7865 | { |
| 7866 | decl->comdat_type_p = true; |
| 7867 | decl->die_id.die_type_node = type_node; |
| 7868 | } |
| 7869 | } |
| 7870 | |
| 7871 | /* Do the location expressions look same? */ |
| 7872 | static inline bool |
| 7873 | same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark) |
| 7874 | { |
| 7875 | return loc1->dw_loc_opc == loc2->dw_loc_opc |
| 7876 | && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark) |
| 7877 | && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark); |
| 7878 | } |
| 7879 | |
| 7880 | /* Do the values look the same? */ |
| 7881 | static bool |
| 7882 | same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark) |
| 7883 | { |
| 7884 | dw_loc_descr_ref loc1, loc2; |
| 7885 | rtx r1, r2; |
| 7886 | |
| 7887 | if (v1->val_class != v2->val_class) |
| 7888 | return false; |
| 7889 | |
| 7890 | switch (v1->val_class) |
| 7891 | { |
| 7892 | case dw_val_class_const: |
| 7893 | case dw_val_class_const_implicit: |
| 7894 | return v1->v.val_int == v2->v.val_int; |
| 7895 | case dw_val_class_unsigned_const: |
| 7896 | case dw_val_class_unsigned_const_implicit: |
| 7897 | return v1->v.val_unsigned == v2->v.val_unsigned; |
| 7898 | case dw_val_class_const_double: |
| 7899 | return v1->v.val_double.high == v2->v.val_double.high |
| 7900 | && v1->v.val_double.low == v2->v.val_double.low; |
| 7901 | case dw_val_class_wide_int: |
| 7902 | return *v1->v.val_wide == *v2->v.val_wide; |
| 7903 | case dw_val_class_vec: |
| 7904 | if (v1->v.val_vec.length != v2->v.val_vec.length |
| 7905 | || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size) |
| 7906 | return false; |
| 7907 | if (memcmp (s1: v1->v.val_vec.array, s2: v2->v.val_vec.array, |
| 7908 | n: v1->v.val_vec.length * v1->v.val_vec.elt_size)) |
| 7909 | return false; |
| 7910 | return true; |
| 7911 | case dw_val_class_flag: |
| 7912 | return v1->v.val_flag == v2->v.val_flag; |
| 7913 | case dw_val_class_str: |
| 7914 | return !strcmp (s1: v1->v.val_str->str, s2: v2->v.val_str->str); |
| 7915 | |
| 7916 | case dw_val_class_addr: |
| 7917 | r1 = v1->v.val_addr; |
| 7918 | r2 = v2->v.val_addr; |
| 7919 | if (GET_CODE (r1) != GET_CODE (r2)) |
| 7920 | return false; |
| 7921 | return !rtx_equal_p (r1, r2); |
| 7922 | |
| 7923 | case dw_val_class_offset: |
| 7924 | return v1->v.val_offset == v2->v.val_offset; |
| 7925 | |
| 7926 | case dw_val_class_loc: |
| 7927 | for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc; |
| 7928 | loc1 && loc2; |
| 7929 | loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next) |
| 7930 | if (!same_loc_p (loc1, loc2, mark)) |
| 7931 | return false; |
| 7932 | return !loc1 && !loc2; |
| 7933 | |
| 7934 | case dw_val_class_die_ref: |
| 7935 | return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark); |
| 7936 | |
| 7937 | case dw_val_class_symview: |
| 7938 | return strcmp (s1: v1->v.val_symbolic_view, s2: v2->v.val_symbolic_view) == 0; |
| 7939 | |
| 7940 | case dw_val_class_fde_ref: |
| 7941 | case dw_val_class_vms_delta: |
| 7942 | case dw_val_class_lbl_id: |
| 7943 | case dw_val_class_lineptr: |
| 7944 | case dw_val_class_macptr: |
| 7945 | case dw_val_class_loclistsptr: |
| 7946 | case dw_val_class_high_pc: |
| 7947 | return true; |
| 7948 | |
| 7949 | case dw_val_class_file: |
| 7950 | case dw_val_class_file_implicit: |
| 7951 | return v1->v.val_file == v2->v.val_file; |
| 7952 | |
| 7953 | case dw_val_class_data8: |
| 7954 | return !memcmp (s1: v1->v.val_data8, s2: v2->v.val_data8, n: 8); |
| 7955 | |
| 7956 | default: |
| 7957 | return true; |
| 7958 | } |
| 7959 | } |
| 7960 | |
| 7961 | /* Do the attributes look the same? */ |
| 7962 | |
| 7963 | static bool |
| 7964 | same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark) |
| 7965 | { |
| 7966 | if (at1->dw_attr != at2->dw_attr) |
| 7967 | return false; |
| 7968 | |
| 7969 | /* We don't care that this was compiled with a different compiler |
| 7970 | snapshot; if the output is the same, that's what matters. */ |
| 7971 | if (at1->dw_attr == DW_AT_producer) |
| 7972 | return true; |
| 7973 | |
| 7974 | return same_dw_val_p (v1: &at1->dw_attr_val, v2: &at2->dw_attr_val, mark); |
| 7975 | } |
| 7976 | |
| 7977 | /* Do the dies look the same? */ |
| 7978 | |
| 7979 | static bool |
| 7980 | same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark) |
| 7981 | { |
| 7982 | dw_die_ref c1, c2; |
| 7983 | dw_attr_node *a1; |
| 7984 | unsigned ix; |
| 7985 | |
| 7986 | /* To avoid infinite recursion. */ |
| 7987 | if (die1->die_mark) |
| 7988 | return die1->die_mark == die2->die_mark; |
| 7989 | die1->die_mark = die2->die_mark = ++(*mark); |
| 7990 | |
| 7991 | if (die1->die_tag != die2->die_tag) |
| 7992 | return false; |
| 7993 | |
| 7994 | if (vec_safe_length (v: die1->die_attr) != vec_safe_length (v: die2->die_attr)) |
| 7995 | return false; |
| 7996 | |
| 7997 | FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1) |
| 7998 | if (!same_attr_p (at1: a1, at2: &(*die2->die_attr)[ix], mark)) |
| 7999 | return false; |
| 8000 | |
| 8001 | c1 = die1->die_child; |
| 8002 | c2 = die2->die_child; |
| 8003 | if (! c1) |
| 8004 | { |
| 8005 | if (c2) |
| 8006 | return false; |
| 8007 | } |
| 8008 | else |
| 8009 | for (;;) |
| 8010 | { |
| 8011 | if (!same_die_p (die1: c1, die2: c2, mark)) |
| 8012 | return false; |
| 8013 | c1 = c1->die_sib; |
| 8014 | c2 = c2->die_sib; |
| 8015 | if (c1 == die1->die_child) |
| 8016 | { |
| 8017 | if (c2 == die2->die_child) |
| 8018 | break; |
| 8019 | else |
| 8020 | return false; |
| 8021 | } |
| 8022 | } |
| 8023 | |
| 8024 | return true; |
| 8025 | } |
| 8026 | |
| 8027 | /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its |
| 8028 | children, and set die_symbol. */ |
| 8029 | |
| 8030 | static void |
| 8031 | compute_comp_unit_symbol (dw_die_ref unit_die) |
| 8032 | { |
| 8033 | const char *die_name = get_AT_string (die: unit_die, attr_kind: DW_AT_name); |
| 8034 | const char *base = die_name ? lbasename (die_name) : "anonymous" ; |
| 8035 | char *name = XALLOCAVEC (char, strlen (base) + 64); |
| 8036 | char *p; |
| 8037 | int i, mark; |
| 8038 | unsigned char checksum[16]; |
| 8039 | struct md5_ctx ctx; |
| 8040 | |
| 8041 | /* Compute the checksum of the DIE, then append part of it as hex digits to |
| 8042 | the name filename of the unit. */ |
| 8043 | |
| 8044 | md5_init_ctx (ctx: &ctx); |
| 8045 | mark = 0; |
| 8046 | die_checksum (die: unit_die, ctx: &ctx, mark: &mark); |
| 8047 | unmark_all_dies (unit_die); |
| 8048 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
| 8049 | |
| 8050 | /* When we this for comp_unit_die () we have a DW_AT_name that might |
| 8051 | not start with a letter but with anything valid for filenames and |
| 8052 | clean_symbol_name doesn't fix that up. Prepend 'g' if the first |
| 8053 | character is not a letter. */ |
| 8054 | sprintf (s: name, format: "%s%s." , ISALPHA (*base) ? "" : "g" , base); |
| 8055 | clean_symbol_name (name); |
| 8056 | |
| 8057 | p = name + strlen (s: name); |
| 8058 | for (i = 0; i < 4; i++) |
| 8059 | { |
| 8060 | sprintf (s: p, format: "%.2x" , checksum[i]); |
| 8061 | p += 2; |
| 8062 | } |
| 8063 | |
| 8064 | unit_die->die_id.die_symbol = xstrdup (name); |
| 8065 | } |
| 8066 | |
| 8067 | /* Returns true if DIE represents a type, in the sense of TYPE_P. */ |
| 8068 | |
| 8069 | static bool |
| 8070 | is_type_die (dw_die_ref die) |
| 8071 | { |
| 8072 | switch (die->die_tag) |
| 8073 | { |
| 8074 | case DW_TAG_array_type: |
| 8075 | case DW_TAG_class_type: |
| 8076 | case DW_TAG_interface_type: |
| 8077 | case DW_TAG_enumeration_type: |
| 8078 | case DW_TAG_pointer_type: |
| 8079 | case DW_TAG_reference_type: |
| 8080 | case DW_TAG_rvalue_reference_type: |
| 8081 | case DW_TAG_string_type: |
| 8082 | case DW_TAG_structure_type: |
| 8083 | case DW_TAG_subroutine_type: |
| 8084 | case DW_TAG_union_type: |
| 8085 | case DW_TAG_ptr_to_member_type: |
| 8086 | case DW_TAG_set_type: |
| 8087 | case DW_TAG_subrange_type: |
| 8088 | case DW_TAG_base_type: |
| 8089 | case DW_TAG_const_type: |
| 8090 | case DW_TAG_file_type: |
| 8091 | case DW_TAG_packed_type: |
| 8092 | case DW_TAG_volatile_type: |
| 8093 | case DW_TAG_typedef: |
| 8094 | return true; |
| 8095 | default: |
| 8096 | return false; |
| 8097 | } |
| 8098 | } |
| 8099 | |
| 8100 | /* Returns true iff C is a compile-unit DIE. */ |
| 8101 | |
| 8102 | static inline bool |
| 8103 | is_cu_die (dw_die_ref c) |
| 8104 | { |
| 8105 | return c && (c->die_tag == DW_TAG_compile_unit |
| 8106 | || c->die_tag == DW_TAG_skeleton_unit); |
| 8107 | } |
| 8108 | |
| 8109 | /* Returns true iff C is a unit DIE of some sort. */ |
| 8110 | |
| 8111 | static inline bool |
| 8112 | is_unit_die (dw_die_ref c) |
| 8113 | { |
| 8114 | return c && (c->die_tag == DW_TAG_compile_unit |
| 8115 | || c->die_tag == DW_TAG_partial_unit |
| 8116 | || c->die_tag == DW_TAG_type_unit |
| 8117 | || c->die_tag == DW_TAG_skeleton_unit); |
| 8118 | } |
| 8119 | |
| 8120 | /* Returns true iff C is a namespace DIE. */ |
| 8121 | |
| 8122 | static inline bool |
| 8123 | is_namespace_die (dw_die_ref c) |
| 8124 | { |
| 8125 | return c && c->die_tag == DW_TAG_namespace; |
| 8126 | } |
| 8127 | |
| 8128 | /* Return true if this DIE is a template parameter. */ |
| 8129 | |
| 8130 | static inline bool |
| 8131 | is_template_parameter (dw_die_ref die) |
| 8132 | { |
| 8133 | switch (die->die_tag) |
| 8134 | { |
| 8135 | case DW_TAG_template_type_param: |
| 8136 | case DW_TAG_template_value_param: |
| 8137 | case DW_TAG_GNU_template_template_param: |
| 8138 | case DW_TAG_GNU_template_parameter_pack: |
| 8139 | return true; |
| 8140 | default: |
| 8141 | return false; |
| 8142 | } |
| 8143 | } |
| 8144 | |
| 8145 | /* Return true if this DIE represents a template instantiation. */ |
| 8146 | |
| 8147 | static inline bool |
| 8148 | is_template_instantiation (dw_die_ref die) |
| 8149 | { |
| 8150 | dw_die_ref c; |
| 8151 | |
| 8152 | if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram) |
| 8153 | return false; |
| 8154 | FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true); |
| 8155 | return false; |
| 8156 | } |
| 8157 | |
| 8158 | static char * |
| 8159 | gen_internal_sym (const char *prefix) |
| 8160 | { |
| 8161 | char buf[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 8162 | |
| 8163 | ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++); |
| 8164 | return xstrdup (buf); |
| 8165 | } |
| 8166 | |
| 8167 | /* Return true if this DIE is a declaration. */ |
| 8168 | |
| 8169 | static bool |
| 8170 | is_declaration_die (dw_die_ref die) |
| 8171 | { |
| 8172 | dw_attr_node *a; |
| 8173 | unsigned ix; |
| 8174 | |
| 8175 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 8176 | if (a->dw_attr == DW_AT_declaration) |
| 8177 | return true; |
| 8178 | |
| 8179 | return false; |
| 8180 | } |
| 8181 | |
| 8182 | /* Return true if this DIE is nested inside a subprogram. */ |
| 8183 | |
| 8184 | static bool |
| 8185 | is_nested_in_subprogram (dw_die_ref die) |
| 8186 | { |
| 8187 | dw_die_ref decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
| 8188 | |
| 8189 | if (decl == NULL) |
| 8190 | decl = die; |
| 8191 | return local_scope_p (decl); |
| 8192 | } |
| 8193 | |
| 8194 | /* Return true if this DIE contains a defining declaration of a |
| 8195 | subprogram. */ |
| 8196 | |
| 8197 | static bool |
| 8198 | contains_subprogram_definition (dw_die_ref die) |
| 8199 | { |
| 8200 | dw_die_ref c; |
| 8201 | |
| 8202 | if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die)) |
| 8203 | return true; |
| 8204 | FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1); |
| 8205 | return false; |
| 8206 | } |
| 8207 | |
| 8208 | /* Return true if this is a type DIE that should be moved to a |
| 8209 | COMDAT .debug_types section or .debug_info section with DW_UT_*type |
| 8210 | unit type. */ |
| 8211 | |
| 8212 | static bool |
| 8213 | should_move_die_to_comdat (dw_die_ref die) |
| 8214 | { |
| 8215 | switch (die->die_tag) |
| 8216 | { |
| 8217 | case DW_TAG_class_type: |
| 8218 | case DW_TAG_structure_type: |
| 8219 | case DW_TAG_enumeration_type: |
| 8220 | case DW_TAG_union_type: |
| 8221 | /* Don't move declarations, inlined instances, types nested in a |
| 8222 | subprogram, or types that contain subprogram definitions. */ |
| 8223 | if (is_declaration_die (die) |
| 8224 | || get_AT (die, attr_kind: DW_AT_abstract_origin) |
| 8225 | || is_nested_in_subprogram (die) |
| 8226 | || contains_subprogram_definition (die)) |
| 8227 | return false; |
| 8228 | if (die->die_tag != DW_TAG_enumeration_type) |
| 8229 | { |
| 8230 | /* Don't move non-constant size aggregates. */ |
| 8231 | dw_attr_node *sz = get_AT (die, attr_kind: DW_AT_byte_size); |
| 8232 | if (sz == NULL |
| 8233 | || (AT_class (a: sz) != dw_val_class_unsigned_const |
| 8234 | && AT_class (a: sz) != dw_val_class_unsigned_const_implicit)) |
| 8235 | return false; |
| 8236 | } |
| 8237 | return true; |
| 8238 | case DW_TAG_array_type: |
| 8239 | case DW_TAG_interface_type: |
| 8240 | case DW_TAG_pointer_type: |
| 8241 | case DW_TAG_reference_type: |
| 8242 | case DW_TAG_rvalue_reference_type: |
| 8243 | case DW_TAG_string_type: |
| 8244 | case DW_TAG_subroutine_type: |
| 8245 | case DW_TAG_ptr_to_member_type: |
| 8246 | case DW_TAG_set_type: |
| 8247 | case DW_TAG_subrange_type: |
| 8248 | case DW_TAG_base_type: |
| 8249 | case DW_TAG_const_type: |
| 8250 | case DW_TAG_file_type: |
| 8251 | case DW_TAG_packed_type: |
| 8252 | case DW_TAG_volatile_type: |
| 8253 | case DW_TAG_typedef: |
| 8254 | default: |
| 8255 | return false; |
| 8256 | } |
| 8257 | } |
| 8258 | |
| 8259 | /* Make a clone of DIE. */ |
| 8260 | |
| 8261 | static dw_die_ref |
| 8262 | clone_die (dw_die_ref die) |
| 8263 | { |
| 8264 | dw_die_ref clone = new_die_raw (tag_value: die->die_tag); |
| 8265 | dw_attr_node *a; |
| 8266 | unsigned ix; |
| 8267 | |
| 8268 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 8269 | add_dwarf_attr (die: clone, attr: a); |
| 8270 | |
| 8271 | return clone; |
| 8272 | } |
| 8273 | |
| 8274 | /* Make a clone of the tree rooted at DIE. */ |
| 8275 | |
| 8276 | static dw_die_ref |
| 8277 | clone_tree (dw_die_ref die) |
| 8278 | { |
| 8279 | dw_die_ref c; |
| 8280 | dw_die_ref clone = clone_die (die); |
| 8281 | |
| 8282 | FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c))); |
| 8283 | |
| 8284 | return clone; |
| 8285 | } |
| 8286 | |
| 8287 | /* Make a clone of DIE as a declaration. */ |
| 8288 | |
| 8289 | static dw_die_ref |
| 8290 | clone_as_declaration (dw_die_ref die) |
| 8291 | { |
| 8292 | dw_die_ref clone; |
| 8293 | dw_die_ref decl; |
| 8294 | dw_attr_node *a; |
| 8295 | unsigned ix; |
| 8296 | |
| 8297 | /* If the DIE is already a declaration, just clone it. */ |
| 8298 | if (is_declaration_die (die)) |
| 8299 | return clone_die (die); |
| 8300 | |
| 8301 | /* If the DIE is a specification, just clone its declaration DIE. */ |
| 8302 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
| 8303 | if (decl != NULL) |
| 8304 | { |
| 8305 | clone = clone_die (die: decl); |
| 8306 | if (die->comdat_type_p) |
| 8307 | add_AT_die_ref (die: clone, attr_kind: DW_AT_signature, targ_die: die); |
| 8308 | return clone; |
| 8309 | } |
| 8310 | |
| 8311 | clone = new_die_raw (tag_value: die->die_tag); |
| 8312 | |
| 8313 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 8314 | { |
| 8315 | /* We don't want to copy over all attributes. |
| 8316 | For example we don't want DW_AT_byte_size because otherwise we will no |
| 8317 | longer have a declaration and GDB will treat it as a definition. */ |
| 8318 | |
| 8319 | switch (a->dw_attr) |
| 8320 | { |
| 8321 | case DW_AT_abstract_origin: |
| 8322 | case DW_AT_artificial: |
| 8323 | case DW_AT_containing_type: |
| 8324 | case DW_AT_external: |
| 8325 | case DW_AT_name: |
| 8326 | case DW_AT_type: |
| 8327 | case DW_AT_virtuality: |
| 8328 | case DW_AT_linkage_name: |
| 8329 | case DW_AT_MIPS_linkage_name: |
| 8330 | add_dwarf_attr (die: clone, attr: a); |
| 8331 | break; |
| 8332 | case DW_AT_byte_size: |
| 8333 | case DW_AT_alignment: |
| 8334 | default: |
| 8335 | break; |
| 8336 | } |
| 8337 | } |
| 8338 | |
| 8339 | if (die->comdat_type_p) |
| 8340 | add_AT_die_ref (die: clone, attr_kind: DW_AT_signature, targ_die: die); |
| 8341 | |
| 8342 | add_AT_flag (die: clone, attr_kind: DW_AT_declaration, flag: 1); |
| 8343 | return clone; |
| 8344 | } |
| 8345 | |
| 8346 | |
| 8347 | /* Structure to map a DIE in one CU to its copy in a comdat type unit. */ |
| 8348 | |
| 8349 | struct decl_table_entry |
| 8350 | { |
| 8351 | dw_die_ref orig; |
| 8352 | dw_die_ref copy; |
| 8353 | }; |
| 8354 | |
| 8355 | /* Helpers to manipulate hash table of copied declarations. */ |
| 8356 | |
| 8357 | /* Hashtable helpers. */ |
| 8358 | |
| 8359 | struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry> |
| 8360 | { |
| 8361 | typedef die_struct *compare_type; |
| 8362 | static inline hashval_t hash (const decl_table_entry *); |
| 8363 | static inline bool equal (const decl_table_entry *, const die_struct *); |
| 8364 | }; |
| 8365 | |
| 8366 | inline hashval_t |
| 8367 | decl_table_entry_hasher::hash (const decl_table_entry *entry) |
| 8368 | { |
| 8369 | return htab_hash_pointer (entry->orig); |
| 8370 | } |
| 8371 | |
| 8372 | inline bool |
| 8373 | decl_table_entry_hasher::equal (const decl_table_entry *entry1, |
| 8374 | const die_struct *entry2) |
| 8375 | { |
| 8376 | return entry1->orig == entry2; |
| 8377 | } |
| 8378 | |
| 8379 | typedef hash_table<decl_table_entry_hasher> decl_hash_type; |
| 8380 | |
| 8381 | /* Copy DIE and its ancestors, up to, but not including, the compile unit |
| 8382 | or type unit entry, to a new tree. Adds the new tree to UNIT and returns |
| 8383 | a pointer to the copy of DIE. If DECL_TABLE is provided, it is used |
| 8384 | to check if the ancestor has already been copied into UNIT. */ |
| 8385 | |
| 8386 | static dw_die_ref |
| 8387 | copy_ancestor_tree (dw_die_ref unit, dw_die_ref die, |
| 8388 | decl_hash_type *decl_table) |
| 8389 | { |
| 8390 | dw_die_ref parent = die->die_parent; |
| 8391 | dw_die_ref new_parent = unit; |
| 8392 | dw_die_ref copy; |
| 8393 | decl_table_entry **slot = NULL; |
| 8394 | struct decl_table_entry *entry = NULL; |
| 8395 | |
| 8396 | /* If DIE refers to a stub unfold that so we get the appropriate |
| 8397 | DIE registered as orig in decl_table. */ |
| 8398 | if (dw_die_ref c = get_AT_ref (die, attr_kind: DW_AT_signature)) |
| 8399 | die = c; |
| 8400 | |
| 8401 | if (decl_table) |
| 8402 | { |
| 8403 | /* Check if the entry has already been copied to UNIT. */ |
| 8404 | slot = decl_table->find_slot_with_hash (comparable: die, hash: htab_hash_pointer (die), |
| 8405 | insert: INSERT); |
| 8406 | if (*slot != HTAB_EMPTY_ENTRY) |
| 8407 | { |
| 8408 | entry = *slot; |
| 8409 | return entry->copy; |
| 8410 | } |
| 8411 | |
| 8412 | /* Record in DECL_TABLE that DIE has been copied to UNIT. */ |
| 8413 | entry = XCNEW (struct decl_table_entry); |
| 8414 | entry->orig = die; |
| 8415 | entry->copy = NULL; |
| 8416 | *slot = entry; |
| 8417 | } |
| 8418 | |
| 8419 | if (parent != NULL) |
| 8420 | { |
| 8421 | dw_die_ref spec = get_AT_ref (die: parent, attr_kind: DW_AT_specification); |
| 8422 | if (spec != NULL) |
| 8423 | parent = spec; |
| 8424 | if (!is_unit_die (c: parent)) |
| 8425 | new_parent = copy_ancestor_tree (unit, die: parent, decl_table); |
| 8426 | } |
| 8427 | |
| 8428 | copy = clone_as_declaration (die); |
| 8429 | add_child_die (die: new_parent, child_die: copy); |
| 8430 | |
| 8431 | if (decl_table) |
| 8432 | { |
| 8433 | /* Record the pointer to the copy. */ |
| 8434 | entry->copy = copy; |
| 8435 | } |
| 8436 | |
| 8437 | return copy; |
| 8438 | } |
| 8439 | /* Copy the declaration context to the new type unit DIE. This includes |
| 8440 | any surrounding namespace or type declarations. If the DIE has an |
| 8441 | AT_specification attribute, it also includes attributes and children |
| 8442 | attached to the specification, and returns a pointer to the original |
| 8443 | parent of the declaration DIE. Returns NULL otherwise. */ |
| 8444 | |
| 8445 | static dw_die_ref |
| 8446 | copy_declaration_context (dw_die_ref unit, dw_die_ref die) |
| 8447 | { |
| 8448 | dw_die_ref decl; |
| 8449 | dw_die_ref new_decl; |
| 8450 | dw_die_ref orig_parent = NULL; |
| 8451 | |
| 8452 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
| 8453 | if (decl == NULL) |
| 8454 | decl = die; |
| 8455 | else |
| 8456 | { |
| 8457 | unsigned ix; |
| 8458 | dw_die_ref c; |
| 8459 | dw_attr_node *a; |
| 8460 | |
| 8461 | /* The original DIE will be changed to a declaration, and must |
| 8462 | be moved to be a child of the original declaration DIE. */ |
| 8463 | orig_parent = decl->die_parent; |
| 8464 | |
| 8465 | /* Copy the type node pointer from the new DIE to the original |
| 8466 | declaration DIE so we can forward references later. */ |
| 8467 | decl->comdat_type_p = true; |
| 8468 | decl->die_id.die_type_node = die->die_id.die_type_node; |
| 8469 | |
| 8470 | remove_AT (die, attr_kind: DW_AT_specification); |
| 8471 | |
| 8472 | FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a) |
| 8473 | { |
| 8474 | if (a->dw_attr != DW_AT_name |
| 8475 | && a->dw_attr != DW_AT_declaration |
| 8476 | && a->dw_attr != DW_AT_external) |
| 8477 | add_dwarf_attr (die, attr: a); |
| 8478 | } |
| 8479 | |
| 8480 | FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c))); |
| 8481 | } |
| 8482 | |
| 8483 | if (decl->die_parent != NULL |
| 8484 | && !is_unit_die (c: decl->die_parent)) |
| 8485 | { |
| 8486 | new_decl = copy_ancestor_tree (unit, die: decl, NULL); |
| 8487 | if (new_decl != NULL) |
| 8488 | { |
| 8489 | remove_AT (die: new_decl, attr_kind: DW_AT_signature); |
| 8490 | add_AT_specification (die, targ_die: new_decl); |
| 8491 | } |
| 8492 | } |
| 8493 | |
| 8494 | return orig_parent; |
| 8495 | } |
| 8496 | |
| 8497 | /* Generate the skeleton ancestor tree for the given NODE, then clone |
| 8498 | the DIE and add the clone into the tree. */ |
| 8499 | |
| 8500 | static void |
| 8501 | generate_skeleton_ancestor_tree (skeleton_chain_node *node) |
| 8502 | { |
| 8503 | if (node->new_die != NULL) |
| 8504 | return; |
| 8505 | |
| 8506 | node->new_die = clone_as_declaration (die: node->old_die); |
| 8507 | |
| 8508 | if (node->parent != NULL) |
| 8509 | { |
| 8510 | generate_skeleton_ancestor_tree (node: node->parent); |
| 8511 | add_child_die (die: node->parent->new_die, child_die: node->new_die); |
| 8512 | } |
| 8513 | } |
| 8514 | |
| 8515 | /* Generate a skeleton tree of DIEs containing any declarations that are |
| 8516 | found in the original tree. We traverse the tree looking for declaration |
| 8517 | DIEs, and construct the skeleton from the bottom up whenever we find one. */ |
| 8518 | |
| 8519 | static void |
| 8520 | generate_skeleton_bottom_up (skeleton_chain_node *parent) |
| 8521 | { |
| 8522 | skeleton_chain_node node; |
| 8523 | dw_die_ref c; |
| 8524 | dw_die_ref first; |
| 8525 | dw_die_ref prev = NULL; |
| 8526 | dw_die_ref next = NULL; |
| 8527 | |
| 8528 | node.parent = parent; |
| 8529 | |
| 8530 | first = c = parent->old_die->die_child; |
| 8531 | if (c) |
| 8532 | next = c->die_sib; |
| 8533 | if (c) do { |
| 8534 | if (prev == NULL || prev->die_sib == c) |
| 8535 | prev = c; |
| 8536 | c = next; |
| 8537 | next = (c == first ? NULL : c->die_sib); |
| 8538 | node.old_die = c; |
| 8539 | node.new_die = NULL; |
| 8540 | if (is_declaration_die (die: c)) |
| 8541 | { |
| 8542 | if (is_template_instantiation (die: c)) |
| 8543 | { |
| 8544 | /* Instantiated templates do not need to be cloned into the |
| 8545 | type unit. Just move the DIE and its children back to |
| 8546 | the skeleton tree (in the main CU). */ |
| 8547 | remove_child_with_prev (child: c, prev); |
| 8548 | generate_skeleton_ancestor_tree (node: parent); |
| 8549 | add_child_die (die: parent->new_die, child_die: c); |
| 8550 | c = prev; |
| 8551 | } |
| 8552 | else if (c->comdat_type_p) |
| 8553 | { |
| 8554 | /* This is the skeleton of earlier break_out_comdat_types |
| 8555 | type. Clone the existing DIE, but keep the children |
| 8556 | under the original (which is in the main CU). */ |
| 8557 | dw_die_ref clone = clone_die (die: c); |
| 8558 | |
| 8559 | replace_child (old_child: c, new_child: clone, prev); |
| 8560 | generate_skeleton_ancestor_tree (node: parent); |
| 8561 | add_child_die (die: parent->new_die, child_die: c); |
| 8562 | c = clone; |
| 8563 | continue; |
| 8564 | } |
| 8565 | else |
| 8566 | { |
| 8567 | /* Clone the existing DIE, move the original to the skeleton |
| 8568 | tree (which is in the main CU), and put the clone, with |
| 8569 | all the original's children, where the original came from |
| 8570 | (which is about to be moved to the type unit). */ |
| 8571 | dw_die_ref clone = clone_die (die: c); |
| 8572 | move_all_children (old_parent: c, new_parent: clone); |
| 8573 | |
| 8574 | /* If the original has a DW_AT_object_pointer attribute, |
| 8575 | it would now point to a child DIE just moved to the |
| 8576 | cloned tree, so we need to remove that attribute from |
| 8577 | the original. */ |
| 8578 | remove_AT (die: c, attr_kind: DW_AT_object_pointer); |
| 8579 | |
| 8580 | replace_child (old_child: c, new_child: clone, prev); |
| 8581 | generate_skeleton_ancestor_tree (node: parent); |
| 8582 | add_child_die (die: parent->new_die, child_die: c); |
| 8583 | node.old_die = clone; |
| 8584 | node.new_die = c; |
| 8585 | c = clone; |
| 8586 | } |
| 8587 | } |
| 8588 | generate_skeleton_bottom_up (parent: &node); |
| 8589 | } while (next != NULL); |
| 8590 | } |
| 8591 | |
| 8592 | /* Wrapper function for generate_skeleton_bottom_up. */ |
| 8593 | |
| 8594 | static dw_die_ref |
| 8595 | generate_skeleton (dw_die_ref die) |
| 8596 | { |
| 8597 | skeleton_chain_node node; |
| 8598 | |
| 8599 | node.old_die = die; |
| 8600 | node.new_die = NULL; |
| 8601 | node.parent = NULL; |
| 8602 | |
| 8603 | /* If this type definition is nested inside another type, |
| 8604 | and is not an instantiation of a template, always leave |
| 8605 | at least a declaration in its place. */ |
| 8606 | if (die->die_parent != NULL |
| 8607 | && is_type_die (die: die->die_parent) |
| 8608 | && !is_template_instantiation (die)) |
| 8609 | node.new_die = clone_as_declaration (die); |
| 8610 | |
| 8611 | generate_skeleton_bottom_up (parent: &node); |
| 8612 | return node.new_die; |
| 8613 | } |
| 8614 | |
| 8615 | /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned |
| 8616 | declaration. The original DIE is moved to a new compile unit so that |
| 8617 | existing references to it follow it to the new location. If any of the |
| 8618 | original DIE's descendants is a declaration, we need to replace the |
| 8619 | original DIE with a skeleton tree and move the declarations back into the |
| 8620 | skeleton tree. */ |
| 8621 | |
| 8622 | static dw_die_ref |
| 8623 | remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child, |
| 8624 | dw_die_ref prev) |
| 8625 | { |
| 8626 | dw_die_ref skeleton, orig_parent; |
| 8627 | |
| 8628 | /* Copy the declaration context to the type unit DIE. If the returned |
| 8629 | ORIG_PARENT is not NULL, the skeleton needs to be added as a child of |
| 8630 | that DIE. */ |
| 8631 | orig_parent = copy_declaration_context (unit, die: child); |
| 8632 | |
| 8633 | skeleton = generate_skeleton (die: child); |
| 8634 | if (skeleton == NULL) |
| 8635 | remove_child_with_prev (child, prev); |
| 8636 | else |
| 8637 | { |
| 8638 | skeleton->comdat_type_p = true; |
| 8639 | skeleton->die_id.die_type_node = child->die_id.die_type_node; |
| 8640 | |
| 8641 | /* If the original DIE was a specification, we need to put |
| 8642 | the skeleton under the parent DIE of the declaration. |
| 8643 | This leaves the original declaration in the tree, but |
| 8644 | it will be pruned later since there are no longer any |
| 8645 | references to it. */ |
| 8646 | if (orig_parent != NULL) |
| 8647 | { |
| 8648 | remove_child_with_prev (child, prev); |
| 8649 | add_child_die (die: orig_parent, child_die: skeleton); |
| 8650 | } |
| 8651 | else |
| 8652 | replace_child (old_child: child, new_child: skeleton, prev); |
| 8653 | } |
| 8654 | |
| 8655 | return skeleton; |
| 8656 | } |
| 8657 | |
| 8658 | static void |
| 8659 | copy_dwarf_procs_ref_in_attrs (dw_die_ref die, |
| 8660 | comdat_type_node *type_node, |
| 8661 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs); |
| 8662 | |
| 8663 | /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF |
| 8664 | procedure, put it under TYPE_NODE and return the copy. Continue looking for |
| 8665 | DWARF procedure references in the DW_AT_location attribute. */ |
| 8666 | |
| 8667 | static dw_die_ref |
| 8668 | copy_dwarf_procedure (dw_die_ref die, |
| 8669 | comdat_type_node *type_node, |
| 8670 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs) |
| 8671 | { |
| 8672 | gcc_assert (die->die_tag == DW_TAG_dwarf_procedure); |
| 8673 | |
| 8674 | /* DWARF procedures are not supposed to have children... */ |
| 8675 | gcc_assert (die->die_child == NULL); |
| 8676 | |
| 8677 | /* ... and they are supposed to have only one attribute: DW_AT_location. */ |
| 8678 | gcc_assert (vec_safe_length (die->die_attr) == 1 |
| 8679 | && ((*die->die_attr)[0].dw_attr == DW_AT_location)); |
| 8680 | |
| 8681 | /* Do not copy more than once DWARF procedures. */ |
| 8682 | bool existed; |
| 8683 | dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (k: die, existed: &existed); |
| 8684 | if (existed) |
| 8685 | return die_copy; |
| 8686 | |
| 8687 | die_copy = clone_die (die); |
| 8688 | add_child_die (die: type_node->root_die, child_die: die_copy); |
| 8689 | copy_dwarf_procs_ref_in_attrs (die: die_copy, type_node, copied_dwarf_procs); |
| 8690 | return die_copy; |
| 8691 | } |
| 8692 | |
| 8693 | /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF |
| 8694 | procedures in DIE's attributes. */ |
| 8695 | |
| 8696 | static void |
| 8697 | copy_dwarf_procs_ref_in_attrs (dw_die_ref die, |
| 8698 | comdat_type_node *type_node, |
| 8699 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs) |
| 8700 | { |
| 8701 | dw_attr_node *a; |
| 8702 | unsigned i; |
| 8703 | |
| 8704 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a) |
| 8705 | { |
| 8706 | dw_loc_descr_ref loc; |
| 8707 | |
| 8708 | if (a->dw_attr_val.val_class != dw_val_class_loc) |
| 8709 | continue; |
| 8710 | |
| 8711 | for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next) |
| 8712 | { |
| 8713 | switch (loc->dw_loc_opc) |
| 8714 | { |
| 8715 | case DW_OP_call2: |
| 8716 | case DW_OP_call4: |
| 8717 | case DW_OP_call_ref: |
| 8718 | gcc_assert (loc->dw_loc_oprnd1.val_class |
| 8719 | == dw_val_class_die_ref); |
| 8720 | loc->dw_loc_oprnd1.v.val_die_ref.die |
| 8721 | = copy_dwarf_procedure (die: loc->dw_loc_oprnd1.v.val_die_ref.die, |
| 8722 | type_node, |
| 8723 | copied_dwarf_procs); |
| 8724 | |
| 8725 | default: |
| 8726 | break; |
| 8727 | } |
| 8728 | } |
| 8729 | } |
| 8730 | } |
| 8731 | |
| 8732 | /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and |
| 8733 | rewrite references to point to the copies. |
| 8734 | |
| 8735 | References are looked for in DIE's attributes and recursively in all its |
| 8736 | children attributes that are location descriptions. COPIED_DWARF_PROCS is a |
| 8737 | mapping from old DWARF procedures to their copy. It is used not to copy |
| 8738 | twice the same DWARF procedure under TYPE_NODE. */ |
| 8739 | |
| 8740 | static void |
| 8741 | copy_dwarf_procs_ref_in_dies (dw_die_ref die, |
| 8742 | comdat_type_node *type_node, |
| 8743 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs) |
| 8744 | { |
| 8745 | dw_die_ref c; |
| 8746 | |
| 8747 | copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs); |
| 8748 | FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c, |
| 8749 | type_node, |
| 8750 | copied_dwarf_procs)); |
| 8751 | } |
| 8752 | |
| 8753 | /* Traverse the DIE and set up additional .debug_types or .debug_info |
| 8754 | DW_UT_*type sections for each type worthy of being placed in a COMDAT |
| 8755 | section. */ |
| 8756 | |
| 8757 | static void |
| 8758 | break_out_comdat_types (dw_die_ref die) |
| 8759 | { |
| 8760 | dw_die_ref c; |
| 8761 | dw_die_ref first; |
| 8762 | dw_die_ref prev = NULL; |
| 8763 | dw_die_ref next = NULL; |
| 8764 | dw_die_ref unit = NULL; |
| 8765 | |
| 8766 | first = c = die->die_child; |
| 8767 | if (c) |
| 8768 | next = c->die_sib; |
| 8769 | if (c) do { |
| 8770 | if (prev == NULL || prev->die_sib == c) |
| 8771 | prev = c; |
| 8772 | c = next; |
| 8773 | next = (c == first ? NULL : c->die_sib); |
| 8774 | if (should_move_die_to_comdat (die: c)) |
| 8775 | { |
| 8776 | dw_die_ref replacement; |
| 8777 | comdat_type_node *type_node; |
| 8778 | |
| 8779 | /* Break out nested types into their own type units. */ |
| 8780 | break_out_comdat_types (die: c); |
| 8781 | |
| 8782 | /* Create a new type unit DIE as the root for the new tree. */ |
| 8783 | unit = new_die (tag_value: DW_TAG_type_unit, NULL, NULL); |
| 8784 | add_AT_unsigned (die: unit, attr_kind: DW_AT_language, |
| 8785 | unsigned_val: get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language)); |
| 8786 | if (unsigned lname = get_AT_unsigned (die: comp_unit_die (), |
| 8787 | attr_kind: DW_AT_language_name)) |
| 8788 | { |
| 8789 | add_AT_unsigned (die: unit, attr_kind: DW_AT_language_name, unsigned_val: lname); |
| 8790 | add_AT_unsigned (die: unit, attr_kind: DW_AT_language_version, |
| 8791 | unsigned_val: get_AT_unsigned (die: comp_unit_die (), |
| 8792 | attr_kind: DW_AT_language_version)); |
| 8793 | } |
| 8794 | |
| 8795 | /* Add the new unit's type DIE into the comdat type list. */ |
| 8796 | type_node = ggc_cleared_alloc<comdat_type_node> (); |
| 8797 | type_node->root_die = unit; |
| 8798 | type_node->next = comdat_type_list; |
| 8799 | comdat_type_list = type_node; |
| 8800 | |
| 8801 | /* Generate the type signature. */ |
| 8802 | generate_type_signature (die: c, type_node); |
| 8803 | |
| 8804 | /* Copy the declaration context, attributes, and children of the |
| 8805 | declaration into the new type unit DIE, then remove this DIE |
| 8806 | from the main CU (or replace it with a skeleton if necessary). */ |
| 8807 | replacement = remove_child_or_replace_with_skeleton (unit, child: c, prev); |
| 8808 | type_node->skeleton_die = replacement; |
| 8809 | |
| 8810 | /* Add the DIE to the new compunit. */ |
| 8811 | add_child_die (die: unit, child_die: c); |
| 8812 | |
| 8813 | /* Types can reference DWARF procedures for type size or data location |
| 8814 | expressions. Calls in DWARF expressions cannot target procedures |
| 8815 | that are not in the same section. So we must copy DWARF procedures |
| 8816 | along with this type and then rewrite references to them. */ |
| 8817 | hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs; |
| 8818 | copy_dwarf_procs_ref_in_dies (die: c, type_node, copied_dwarf_procs); |
| 8819 | |
| 8820 | if (replacement != NULL) |
| 8821 | c = replacement; |
| 8822 | } |
| 8823 | else if (c->die_tag == DW_TAG_namespace |
| 8824 | || c->die_tag == DW_TAG_class_type |
| 8825 | || c->die_tag == DW_TAG_structure_type |
| 8826 | || c->die_tag == DW_TAG_union_type) |
| 8827 | { |
| 8828 | /* Look for nested types that can be broken out. */ |
| 8829 | break_out_comdat_types (die: c); |
| 8830 | } |
| 8831 | } while (next != NULL); |
| 8832 | } |
| 8833 | |
| 8834 | /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations. |
| 8835 | Enter all the cloned children into the hash table decl_table. */ |
| 8836 | |
| 8837 | static dw_die_ref |
| 8838 | clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table) |
| 8839 | { |
| 8840 | dw_die_ref c; |
| 8841 | dw_die_ref clone; |
| 8842 | struct decl_table_entry *entry; |
| 8843 | decl_table_entry **slot; |
| 8844 | |
| 8845 | if (die->die_tag == DW_TAG_subprogram) |
| 8846 | clone = clone_as_declaration (die); |
| 8847 | else |
| 8848 | clone = clone_die (die); |
| 8849 | |
| 8850 | slot = decl_table->find_slot_with_hash (comparable: die, |
| 8851 | hash: htab_hash_pointer (die), insert: INSERT); |
| 8852 | |
| 8853 | /* Assert that DIE isn't in the hash table yet. If it would be there |
| 8854 | before, the ancestors would be necessarily there as well, therefore |
| 8855 | clone_tree_partial wouldn't be called. */ |
| 8856 | gcc_assert (*slot == HTAB_EMPTY_ENTRY); |
| 8857 | |
| 8858 | entry = XCNEW (struct decl_table_entry); |
| 8859 | entry->orig = die; |
| 8860 | entry->copy = clone; |
| 8861 | *slot = entry; |
| 8862 | |
| 8863 | if (die->die_tag != DW_TAG_subprogram) |
| 8864 | FOR_EACH_CHILD (die, c, |
| 8865 | add_child_die (clone, clone_tree_partial (c, decl_table))); |
| 8866 | |
| 8867 | return clone; |
| 8868 | } |
| 8869 | |
| 8870 | /* Walk the DIE and its children, looking for references to incomplete |
| 8871 | or trivial types that are unmarked (i.e., that are not in the current |
| 8872 | type_unit). */ |
| 8873 | |
| 8874 | static void |
| 8875 | copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table) |
| 8876 | { |
| 8877 | dw_die_ref c; |
| 8878 | dw_attr_node *a; |
| 8879 | unsigned ix; |
| 8880 | |
| 8881 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 8882 | { |
| 8883 | if (AT_class (a) == dw_val_class_die_ref) |
| 8884 | { |
| 8885 | dw_die_ref targ = AT_ref (a); |
| 8886 | decl_table_entry **slot; |
| 8887 | struct decl_table_entry *entry; |
| 8888 | |
| 8889 | if (targ->die_mark != 0 || targ->comdat_type_p) |
| 8890 | continue; |
| 8891 | |
| 8892 | slot = decl_table->find_slot_with_hash (comparable: targ, |
| 8893 | hash: htab_hash_pointer (targ), |
| 8894 | insert: INSERT); |
| 8895 | |
| 8896 | if (*slot != HTAB_EMPTY_ENTRY) |
| 8897 | { |
| 8898 | /* TARG has already been copied, so we just need to |
| 8899 | modify the reference to point to the copy. */ |
| 8900 | entry = *slot; |
| 8901 | a->dw_attr_val.v.val_die_ref.die = entry->copy; |
| 8902 | } |
| 8903 | else |
| 8904 | { |
| 8905 | dw_die_ref parent = unit; |
| 8906 | dw_die_ref copy = clone_die (die: targ); |
| 8907 | |
| 8908 | /* Record in DECL_TABLE that TARG has been copied. |
| 8909 | Need to do this now, before the recursive call, |
| 8910 | because DECL_TABLE may be expanded and SLOT |
| 8911 | would no longer be a valid pointer. */ |
| 8912 | entry = XCNEW (struct decl_table_entry); |
| 8913 | entry->orig = targ; |
| 8914 | entry->copy = copy; |
| 8915 | *slot = entry; |
| 8916 | |
| 8917 | /* If TARG is not a declaration DIE, we need to copy its |
| 8918 | children. */ |
| 8919 | if (!is_declaration_die (die: targ)) |
| 8920 | { |
| 8921 | FOR_EACH_CHILD ( |
| 8922 | targ, c, |
| 8923 | add_child_die (copy, |
| 8924 | clone_tree_partial (c, decl_table))); |
| 8925 | } |
| 8926 | |
| 8927 | /* Make sure the cloned tree is marked as part of the |
| 8928 | type unit. */ |
| 8929 | mark_dies (copy); |
| 8930 | |
| 8931 | /* If TARG has surrounding context, copy its ancestor tree |
| 8932 | into the new type unit. */ |
| 8933 | if (targ->die_parent != NULL |
| 8934 | && !is_unit_die (c: targ->die_parent)) |
| 8935 | parent = copy_ancestor_tree (unit, die: targ->die_parent, |
| 8936 | decl_table); |
| 8937 | |
| 8938 | add_child_die (die: parent, child_die: copy); |
| 8939 | a->dw_attr_val.v.val_die_ref.die = copy; |
| 8940 | |
| 8941 | /* Make sure the newly-copied DIE is walked. If it was |
| 8942 | installed in a previously-added context, it won't |
| 8943 | get visited otherwise. */ |
| 8944 | if (parent != unit) |
| 8945 | { |
| 8946 | /* Find the highest point of the newly-added tree, |
| 8947 | mark each node along the way, and walk from there. */ |
| 8948 | parent->die_mark = 1; |
| 8949 | while (parent->die_parent |
| 8950 | && parent->die_parent->die_mark == 0) |
| 8951 | { |
| 8952 | parent = parent->die_parent; |
| 8953 | parent->die_mark = 1; |
| 8954 | } |
| 8955 | copy_decls_walk (unit, die: parent, decl_table); |
| 8956 | } |
| 8957 | } |
| 8958 | } |
| 8959 | } |
| 8960 | |
| 8961 | FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table)); |
| 8962 | } |
| 8963 | |
| 8964 | /* Collect skeleton dies in DIE created by break_out_comdat_types already |
| 8965 | and record them in DECL_TABLE. */ |
| 8966 | |
| 8967 | static void |
| 8968 | collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table) |
| 8969 | { |
| 8970 | dw_die_ref c; |
| 8971 | |
| 8972 | if (dw_attr_node *a = get_AT (die, attr_kind: DW_AT_signature)) |
| 8973 | { |
| 8974 | dw_die_ref targ = AT_ref (a); |
| 8975 | gcc_assert (targ->die_mark == 0 && targ->comdat_type_p); |
| 8976 | decl_table_entry **slot |
| 8977 | = decl_table->find_slot_with_hash (comparable: targ, |
| 8978 | hash: htab_hash_pointer (targ), |
| 8979 | insert: INSERT); |
| 8980 | gcc_assert (*slot == HTAB_EMPTY_ENTRY); |
| 8981 | /* Record in DECL_TABLE that TARG has been already copied |
| 8982 | by remove_child_or_replace_with_skeleton. */ |
| 8983 | decl_table_entry *entry = XCNEW (struct decl_table_entry); |
| 8984 | entry->orig = targ; |
| 8985 | entry->copy = die; |
| 8986 | *slot = entry; |
| 8987 | } |
| 8988 | FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table)); |
| 8989 | } |
| 8990 | |
| 8991 | /* Copy declarations for "unworthy" types into the new comdat section. |
| 8992 | Incomplete types, modified types, and certain other types aren't broken |
| 8993 | out into comdat sections of their own, so they don't have a signature, |
| 8994 | and we need to copy the declaration into the same section so that we |
| 8995 | don't have an external reference. */ |
| 8996 | |
| 8997 | static void |
| 8998 | copy_decls_for_unworthy_types (dw_die_ref unit) |
| 8999 | { |
| 9000 | mark_dies (unit); |
| 9001 | decl_hash_type decl_table (10); |
| 9002 | collect_skeleton_dies (die: unit, decl_table: &decl_table); |
| 9003 | copy_decls_walk (unit, die: unit, decl_table: &decl_table); |
| 9004 | unmark_dies (unit); |
| 9005 | } |
| 9006 | |
| 9007 | /* Traverse the DIE and add a sibling attribute if it may have the |
| 9008 | effect of speeding up access to siblings. To save some space, |
| 9009 | avoid generating sibling attributes for DIE's without children. */ |
| 9010 | |
| 9011 | static void |
| 9012 | add_sibling_attributes (dw_die_ref die) |
| 9013 | { |
| 9014 | dw_die_ref c; |
| 9015 | |
| 9016 | if (! die->die_child) |
| 9017 | return; |
| 9018 | |
| 9019 | if (die->die_parent && die != die->die_parent->die_child) |
| 9020 | add_AT_die_ref (die, attr_kind: DW_AT_sibling, targ_die: die->die_sib); |
| 9021 | |
| 9022 | FOR_EACH_CHILD (die, c, add_sibling_attributes (c)); |
| 9023 | } |
| 9024 | |
| 9025 | /* Output all location lists for the DIE and its children. */ |
| 9026 | |
| 9027 | static void |
| 9028 | output_location_lists (dw_die_ref die) |
| 9029 | { |
| 9030 | dw_die_ref c; |
| 9031 | dw_attr_node *a; |
| 9032 | unsigned ix; |
| 9033 | |
| 9034 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 9035 | if (AT_class (a) == dw_val_class_loc_list) |
| 9036 | output_loc_list (AT_loc_list (a)); |
| 9037 | |
| 9038 | FOR_EACH_CHILD (die, c, output_location_lists (c)); |
| 9039 | } |
| 9040 | |
| 9041 | /* During assign_location_list_indexes and output_loclists_offset the |
| 9042 | current index, after it the number of assigned indexes (i.e. how |
| 9043 | large the .debug_loclists* offset table should be). */ |
| 9044 | static unsigned int loc_list_idx; |
| 9045 | |
| 9046 | /* Output all location list offsets for the DIE and its children. */ |
| 9047 | |
| 9048 | static void |
| 9049 | output_loclists_offsets (dw_die_ref die) |
| 9050 | { |
| 9051 | dw_die_ref c; |
| 9052 | dw_attr_node *a; |
| 9053 | unsigned ix; |
| 9054 | |
| 9055 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 9056 | if (AT_class (a) == dw_val_class_loc_list) |
| 9057 | { |
| 9058 | dw_loc_list_ref l = AT_loc_list (a); |
| 9059 | if (l->offset_emitted) |
| 9060 | continue; |
| 9061 | dw2_asm_output_delta (dwarf_offset_size, l->ll_symbol, |
| 9062 | loc_section_label, NULL); |
| 9063 | gcc_assert (l->hash == loc_list_idx); |
| 9064 | loc_list_idx++; |
| 9065 | l->offset_emitted = true; |
| 9066 | } |
| 9067 | |
| 9068 | FOR_EACH_CHILD (die, c, output_loclists_offsets (c)); |
| 9069 | } |
| 9070 | |
| 9071 | /* Recursively set indexes of location lists. */ |
| 9072 | |
| 9073 | static void |
| 9074 | assign_location_list_indexes (dw_die_ref die) |
| 9075 | { |
| 9076 | dw_die_ref c; |
| 9077 | dw_attr_node *a; |
| 9078 | unsigned ix; |
| 9079 | |
| 9080 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 9081 | if (AT_class (a) == dw_val_class_loc_list) |
| 9082 | { |
| 9083 | dw_loc_list_ref list = AT_loc_list (a); |
| 9084 | if (!list->num_assigned) |
| 9085 | { |
| 9086 | list->num_assigned = true; |
| 9087 | list->hash = loc_list_idx++; |
| 9088 | } |
| 9089 | } |
| 9090 | |
| 9091 | FOR_EACH_CHILD (die, c, assign_location_list_indexes (c)); |
| 9092 | } |
| 9093 | |
| 9094 | /* We want to limit the number of external references, because they are |
| 9095 | larger than local references: a relocation takes multiple words, and |
| 9096 | even a sig8 reference is always eight bytes, whereas a local reference |
| 9097 | can be as small as one byte (though DW_FORM_ref is usually 4 in GCC). |
| 9098 | So if we encounter multiple external references to the same type DIE, we |
| 9099 | make a local typedef stub for it and redirect all references there. |
| 9100 | |
| 9101 | This is the element of the hash table for keeping track of these |
| 9102 | references. */ |
| 9103 | |
| 9104 | struct external_ref |
| 9105 | { |
| 9106 | dw_die_ref type; |
| 9107 | dw_die_ref stub; |
| 9108 | unsigned n_refs; |
| 9109 | }; |
| 9110 | |
| 9111 | /* Hashtable helpers. */ |
| 9112 | |
| 9113 | struct external_ref_hasher : free_ptr_hash <external_ref> |
| 9114 | { |
| 9115 | static inline hashval_t hash (const external_ref *); |
| 9116 | static inline bool equal (const external_ref *, const external_ref *); |
| 9117 | }; |
| 9118 | |
| 9119 | inline hashval_t |
| 9120 | external_ref_hasher::hash (const external_ref *r) |
| 9121 | { |
| 9122 | dw_die_ref die = r->type; |
| 9123 | hashval_t h = 0; |
| 9124 | |
| 9125 | /* We can't use the address of the DIE for hashing, because |
| 9126 | that will make the order of the stub DIEs non-deterministic. */ |
| 9127 | if (! die->comdat_type_p) |
| 9128 | /* We have a symbol; use it to compute a hash. */ |
| 9129 | h = htab_hash_string (die->die_id.die_symbol); |
| 9130 | else |
| 9131 | { |
| 9132 | /* We have a type signature; use a subset of the bits as the hash. |
| 9133 | The 8-byte signature is at least as large as hashval_t. */ |
| 9134 | comdat_type_node *type_node = die->die_id.die_type_node; |
| 9135 | memcpy (dest: &h, src: type_node->signature, n: sizeof (h)); |
| 9136 | } |
| 9137 | return h; |
| 9138 | } |
| 9139 | |
| 9140 | inline bool |
| 9141 | external_ref_hasher::equal (const external_ref *r1, const external_ref *r2) |
| 9142 | { |
| 9143 | return r1->type == r2->type; |
| 9144 | } |
| 9145 | |
| 9146 | typedef hash_table<external_ref_hasher> external_ref_hash_type; |
| 9147 | |
| 9148 | /* Return a pointer to the external_ref for references to DIE. */ |
| 9149 | |
| 9150 | static struct external_ref * |
| 9151 | lookup_external_ref (external_ref_hash_type *map, dw_die_ref die) |
| 9152 | { |
| 9153 | struct external_ref ref, *ref_p; |
| 9154 | external_ref **slot; |
| 9155 | |
| 9156 | ref.type = die; |
| 9157 | slot = map->find_slot (value: &ref, insert: INSERT); |
| 9158 | if (*slot != HTAB_EMPTY_ENTRY) |
| 9159 | return *slot; |
| 9160 | |
| 9161 | ref_p = XCNEW (struct external_ref); |
| 9162 | ref_p->type = die; |
| 9163 | *slot = ref_p; |
| 9164 | return ref_p; |
| 9165 | } |
| 9166 | |
| 9167 | /* Subroutine of optimize_external_refs, below. |
| 9168 | |
| 9169 | If we see a type skeleton, record it as our stub. If we see external |
| 9170 | references, remember how many we've seen. */ |
| 9171 | |
| 9172 | static void |
| 9173 | optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map) |
| 9174 | { |
| 9175 | dw_die_ref c; |
| 9176 | dw_attr_node *a; |
| 9177 | unsigned ix; |
| 9178 | struct external_ref *ref_p; |
| 9179 | |
| 9180 | if (is_type_die (die) |
| 9181 | && (c = get_AT_ref (die, attr_kind: DW_AT_signature))) |
| 9182 | { |
| 9183 | /* This is a local skeleton; use it for local references. */ |
| 9184 | ref_p = lookup_external_ref (map, die: c); |
| 9185 | ref_p->stub = die; |
| 9186 | } |
| 9187 | |
| 9188 | /* Scan the DIE references, and remember any that refer to DIEs from |
| 9189 | other CUs (i.e. those which are not marked). */ |
| 9190 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 9191 | if (AT_class (a) == dw_val_class_die_ref |
| 9192 | && (c = AT_ref (a))->die_mark == 0 |
| 9193 | && is_type_die (die: c)) |
| 9194 | { |
| 9195 | ref_p = lookup_external_ref (map, die: c); |
| 9196 | ref_p->n_refs++; |
| 9197 | } |
| 9198 | |
| 9199 | FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map)); |
| 9200 | } |
| 9201 | |
| 9202 | /* htab_traverse callback function for optimize_external_refs, below. SLOT |
| 9203 | points to an external_ref, DATA is the CU we're processing. If we don't |
| 9204 | already have a local stub, and we have multiple refs, build a stub. */ |
| 9205 | |
| 9206 | int |
| 9207 | dwarf2_build_local_stub (external_ref **slot, dw_die_ref data) |
| 9208 | { |
| 9209 | struct external_ref *ref_p = *slot; |
| 9210 | |
| 9211 | if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict) |
| 9212 | { |
| 9213 | /* We have multiple references to this type, so build a small stub. |
| 9214 | Both of these forms are a bit dodgy from the perspective of the |
| 9215 | DWARF standard, since technically they should have names. */ |
| 9216 | dw_die_ref cu = data; |
| 9217 | dw_die_ref type = ref_p->type; |
| 9218 | dw_die_ref stub = NULL; |
| 9219 | |
| 9220 | if (type->comdat_type_p) |
| 9221 | { |
| 9222 | /* If we refer to this type via sig8, use AT_signature. */ |
| 9223 | stub = new_die (tag_value: type->die_tag, parent_die: cu, NULL_TREE); |
| 9224 | add_AT_die_ref (die: stub, attr_kind: DW_AT_signature, targ_die: type); |
| 9225 | } |
| 9226 | else |
| 9227 | { |
| 9228 | /* Otherwise, use a typedef with no name. */ |
| 9229 | stub = new_die (tag_value: DW_TAG_typedef, parent_die: cu, NULL_TREE); |
| 9230 | add_AT_die_ref (die: stub, attr_kind: DW_AT_type, targ_die: type); |
| 9231 | } |
| 9232 | |
| 9233 | stub->die_mark++; |
| 9234 | ref_p->stub = stub; |
| 9235 | } |
| 9236 | return 1; |
| 9237 | } |
| 9238 | |
| 9239 | /* DIE is a unit; look through all the DIE references to see if there are |
| 9240 | any external references to types, and if so, create local stubs for |
| 9241 | them which will be applied in build_abbrev_table. This is useful because |
| 9242 | references to local DIEs are smaller. */ |
| 9243 | |
| 9244 | static external_ref_hash_type * |
| 9245 | optimize_external_refs (dw_die_ref die) |
| 9246 | { |
| 9247 | external_ref_hash_type *map = new external_ref_hash_type (10); |
| 9248 | optimize_external_refs_1 (die, map); |
| 9249 | map->traverse <dw_die_ref, dwarf2_build_local_stub> (argument: die); |
| 9250 | return map; |
| 9251 | } |
| 9252 | |
| 9253 | /* The following 3 variables are temporaries that are computed only during the |
| 9254 | build_abbrev_table call and used and released during the following |
| 9255 | optimize_abbrev_table call. */ |
| 9256 | |
| 9257 | /* First abbrev_id that can be optimized based on usage. */ |
| 9258 | static unsigned int abbrev_opt_start; |
| 9259 | |
| 9260 | /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with |
| 9261 | abbrev_id smaller than this, because they must be already sized |
| 9262 | during build_abbrev_table). */ |
| 9263 | static unsigned int abbrev_opt_base_type_end; |
| 9264 | |
| 9265 | /* Vector of usage counts during build_abbrev_table. Indexed by |
| 9266 | abbrev_id - abbrev_opt_start. */ |
| 9267 | static vec<unsigned int> abbrev_usage_count; |
| 9268 | |
| 9269 | /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */ |
| 9270 | static vec<dw_die_ref> sorted_abbrev_dies; |
| 9271 | |
| 9272 | /* The format of each DIE (and its attribute value pairs) is encoded in an |
| 9273 | abbreviation table. This routine builds the abbreviation table and assigns |
| 9274 | a unique abbreviation id for each abbreviation entry. The children of each |
| 9275 | die are visited recursively. */ |
| 9276 | |
| 9277 | static void |
| 9278 | build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map) |
| 9279 | { |
| 9280 | unsigned int abbrev_id = 0; |
| 9281 | dw_die_ref c; |
| 9282 | dw_attr_node *a; |
| 9283 | unsigned ix; |
| 9284 | dw_die_ref abbrev; |
| 9285 | |
| 9286 | /* Scan the DIE references, and replace any that refer to |
| 9287 | DIEs from other CUs (i.e. those which are not marked) with |
| 9288 | the local stubs we built in optimize_external_refs. */ |
| 9289 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 9290 | if (AT_class (a) == dw_val_class_die_ref |
| 9291 | && (c = AT_ref (a))->die_mark == 0) |
| 9292 | { |
| 9293 | struct external_ref *ref_p; |
| 9294 | gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol); |
| 9295 | |
| 9296 | if (is_type_die (die: c) |
| 9297 | && (ref_p = lookup_external_ref (map: extern_map, die: c)) |
| 9298 | && ref_p->stub && ref_p->stub != die) |
| 9299 | { |
| 9300 | gcc_assert (a->dw_attr != DW_AT_signature); |
| 9301 | change_AT_die_ref (ref: a, new_die: ref_p->stub); |
| 9302 | } |
| 9303 | else |
| 9304 | /* We aren't changing this reference, so mark it external. */ |
| 9305 | set_AT_ref_external (a, i: 1); |
| 9306 | } |
| 9307 | |
| 9308 | FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev) |
| 9309 | { |
| 9310 | dw_attr_node *die_a, *abbrev_a; |
| 9311 | unsigned ix; |
| 9312 | bool ok = true; |
| 9313 | |
| 9314 | if (abbrev_id == 0) |
| 9315 | continue; |
| 9316 | if (abbrev->die_tag != die->die_tag) |
| 9317 | continue; |
| 9318 | if ((abbrev->die_child != NULL) != (die->die_child != NULL)) |
| 9319 | continue; |
| 9320 | |
| 9321 | if (vec_safe_length (v: abbrev->die_attr) != vec_safe_length (v: die->die_attr)) |
| 9322 | continue; |
| 9323 | |
| 9324 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a) |
| 9325 | { |
| 9326 | abbrev_a = &(*abbrev->die_attr)[ix]; |
| 9327 | if ((abbrev_a->dw_attr != die_a->dw_attr) |
| 9328 | || (value_format (abbrev_a) != value_format (die_a))) |
| 9329 | { |
| 9330 | ok = false; |
| 9331 | break; |
| 9332 | } |
| 9333 | } |
| 9334 | if (ok) |
| 9335 | break; |
| 9336 | } |
| 9337 | |
| 9338 | if (abbrev_id >= vec_safe_length (v: abbrev_die_table)) |
| 9339 | { |
| 9340 | vec_safe_push (v&: abbrev_die_table, obj: die); |
| 9341 | if (abbrev_opt_start) |
| 9342 | abbrev_usage_count.safe_push (obj: 0); |
| 9343 | } |
| 9344 | if (abbrev_opt_start && abbrev_id >= abbrev_opt_start) |
| 9345 | { |
| 9346 | abbrev_usage_count[abbrev_id - abbrev_opt_start]++; |
| 9347 | sorted_abbrev_dies.safe_push (obj: die); |
| 9348 | } |
| 9349 | |
| 9350 | die->die_abbrev = abbrev_id; |
| 9351 | FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map)); |
| 9352 | } |
| 9353 | |
| 9354 | /* Callback function for sorted_abbrev_dies vector sorting. We sort |
| 9355 | by die_abbrev's usage count, from the most commonly used |
| 9356 | abbreviation to the least. */ |
| 9357 | |
| 9358 | static int |
| 9359 | die_abbrev_cmp (const void *p1, const void *p2) |
| 9360 | { |
| 9361 | dw_die_ref die1 = *(const dw_die_ref *) p1; |
| 9362 | dw_die_ref die2 = *(const dw_die_ref *) p2; |
| 9363 | |
| 9364 | gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start); |
| 9365 | gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start); |
| 9366 | |
| 9367 | if (die1->die_abbrev >= abbrev_opt_base_type_end |
| 9368 | && die2->die_abbrev >= abbrev_opt_base_type_end) |
| 9369 | { |
| 9370 | if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start] |
| 9371 | > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start]) |
| 9372 | return -1; |
| 9373 | if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start] |
| 9374 | < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start]) |
| 9375 | return 1; |
| 9376 | } |
| 9377 | |
| 9378 | /* Stabilize the sort. */ |
| 9379 | if (die1->die_abbrev < die2->die_abbrev) |
| 9380 | return -1; |
| 9381 | if (die1->die_abbrev > die2->die_abbrev) |
| 9382 | return 1; |
| 9383 | |
| 9384 | return 0; |
| 9385 | } |
| 9386 | |
| 9387 | /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes |
| 9388 | of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1] |
| 9389 | into dw_val_class_const_implicit or |
| 9390 | dw_val_class_unsigned_const_implicit. */ |
| 9391 | |
| 9392 | static void |
| 9393 | optimize_implicit_const (unsigned int first_id, unsigned int end, |
| 9394 | vec<bool> &implicit_consts) |
| 9395 | { |
| 9396 | /* It never makes sense if there is just one DIE using the abbreviation. */ |
| 9397 | if (end < first_id + 2) |
| 9398 | return; |
| 9399 | |
| 9400 | dw_attr_node *a; |
| 9401 | unsigned ix, i; |
| 9402 | dw_die_ref die = sorted_abbrev_dies[first_id]; |
| 9403 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 9404 | if (implicit_consts[ix]) |
| 9405 | { |
| 9406 | enum dw_val_class new_class = dw_val_class_none; |
| 9407 | switch (AT_class (a)) |
| 9408 | { |
| 9409 | case dw_val_class_unsigned_const: |
| 9410 | if ((HOST_WIDE_INT) AT_unsigned (a) < 0) |
| 9411 | continue; |
| 9412 | |
| 9413 | /* The .debug_abbrev section will grow by |
| 9414 | size_of_sleb128 (AT_unsigned (a)) and we avoid the constants |
| 9415 | in all the DIEs using that abbreviation. */ |
| 9416 | if (constant_size (AT_unsigned (a)) * (end - first_id) |
| 9417 | <= (unsigned) size_of_sleb128 (AT_unsigned (a))) |
| 9418 | continue; |
| 9419 | |
| 9420 | new_class = dw_val_class_unsigned_const_implicit; |
| 9421 | break; |
| 9422 | |
| 9423 | case dw_val_class_const: |
| 9424 | new_class = dw_val_class_const_implicit; |
| 9425 | break; |
| 9426 | |
| 9427 | case dw_val_class_file: |
| 9428 | new_class = dw_val_class_file_implicit; |
| 9429 | break; |
| 9430 | |
| 9431 | default: |
| 9432 | continue; |
| 9433 | } |
| 9434 | for (i = first_id; i < end; i++) |
| 9435 | (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class |
| 9436 | = new_class; |
| 9437 | } |
| 9438 | } |
| 9439 | |
| 9440 | /* Attempt to optimize abbreviation table from abbrev_opt_start |
| 9441 | abbreviation above. */ |
| 9442 | |
| 9443 | static void |
| 9444 | optimize_abbrev_table (void) |
| 9445 | { |
| 9446 | if (abbrev_opt_start |
| 9447 | && vec_safe_length (v: abbrev_die_table) > abbrev_opt_start |
| 9448 | && (dwarf_version >= 5 || vec_safe_length (v: abbrev_die_table) > 127)) |
| 9449 | { |
| 9450 | auto_vec<bool, 32> implicit_consts; |
| 9451 | sorted_abbrev_dies.qsort (die_abbrev_cmp); |
| 9452 | |
| 9453 | unsigned int abbrev_id = abbrev_opt_start - 1; |
| 9454 | unsigned int first_id = ~0U; |
| 9455 | unsigned int last_abbrev_id = 0; |
| 9456 | unsigned int i; |
| 9457 | dw_die_ref die; |
| 9458 | if (abbrev_opt_base_type_end > abbrev_opt_start) |
| 9459 | abbrev_id = abbrev_opt_base_type_end - 1; |
| 9460 | /* Reassign abbreviation ids from abbrev_opt_start above, so that |
| 9461 | most commonly used abbreviations come first. */ |
| 9462 | FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die) |
| 9463 | { |
| 9464 | dw_attr_node *a; |
| 9465 | unsigned ix; |
| 9466 | |
| 9467 | /* If calc_base_type_die_sizes has been called, the CU and |
| 9468 | base types after it can't be optimized, because we've already |
| 9469 | calculated their DIE offsets. We've sorted them first. */ |
| 9470 | if (die->die_abbrev < abbrev_opt_base_type_end) |
| 9471 | continue; |
| 9472 | if (die->die_abbrev != last_abbrev_id) |
| 9473 | { |
| 9474 | last_abbrev_id = die->die_abbrev; |
| 9475 | if (dwarf_version >= 5 && first_id != ~0U) |
| 9476 | optimize_implicit_const (first_id, end: i, implicit_consts); |
| 9477 | abbrev_id++; |
| 9478 | (*abbrev_die_table)[abbrev_id] = die; |
| 9479 | if (dwarf_version >= 5) |
| 9480 | { |
| 9481 | first_id = i; |
| 9482 | implicit_consts.truncate (size: 0); |
| 9483 | |
| 9484 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 9485 | switch (AT_class (a)) |
| 9486 | { |
| 9487 | case dw_val_class_const: |
| 9488 | case dw_val_class_unsigned_const: |
| 9489 | case dw_val_class_file: |
| 9490 | implicit_consts.safe_push (obj: true); |
| 9491 | break; |
| 9492 | default: |
| 9493 | implicit_consts.safe_push (obj: false); |
| 9494 | break; |
| 9495 | } |
| 9496 | } |
| 9497 | } |
| 9498 | else if (dwarf_version >= 5) |
| 9499 | { |
| 9500 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 9501 | if (!implicit_consts[ix]) |
| 9502 | continue; |
| 9503 | else |
| 9504 | { |
| 9505 | dw_attr_node *other_a |
| 9506 | = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix]; |
| 9507 | if (!dw_val_equal_p (a: &a->dw_attr_val, |
| 9508 | b: &other_a->dw_attr_val)) |
| 9509 | implicit_consts[ix] = false; |
| 9510 | } |
| 9511 | } |
| 9512 | die->die_abbrev = abbrev_id; |
| 9513 | } |
| 9514 | gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1); |
| 9515 | if (dwarf_version >= 5 && first_id != ~0U) |
| 9516 | optimize_implicit_const (first_id, end: i, implicit_consts); |
| 9517 | } |
| 9518 | |
| 9519 | abbrev_opt_start = 0; |
| 9520 | abbrev_opt_base_type_end = 0; |
| 9521 | abbrev_usage_count.release (); |
| 9522 | sorted_abbrev_dies.release (); |
| 9523 | } |
| 9524 | |
| 9525 | /* Return the power-of-two number of bytes necessary to represent VALUE. */ |
| 9526 | |
| 9527 | static int |
| 9528 | constant_size (unsigned HOST_WIDE_INT value) |
| 9529 | { |
| 9530 | int log; |
| 9531 | |
| 9532 | if (value == 0) |
| 9533 | log = 0; |
| 9534 | else |
| 9535 | log = floor_log2 (x: value); |
| 9536 | |
| 9537 | log = log / 8; |
| 9538 | log = 1 << (floor_log2 (x: log) + 1); |
| 9539 | |
| 9540 | return log; |
| 9541 | } |
| 9542 | |
| 9543 | /* Return the size of a DIE as it is represented in the |
| 9544 | .debug_info section. */ |
| 9545 | |
| 9546 | static unsigned long |
| 9547 | size_of_die (dw_die_ref die) |
| 9548 | { |
| 9549 | unsigned long size = 0; |
| 9550 | dw_attr_node *a; |
| 9551 | unsigned ix; |
| 9552 | enum dwarf_form form; |
| 9553 | |
| 9554 | size += size_of_uleb128 (die->die_abbrev); |
| 9555 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 9556 | { |
| 9557 | switch (AT_class (a)) |
| 9558 | { |
| 9559 | case dw_val_class_addr: |
| 9560 | if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED) |
| 9561 | { |
| 9562 | gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED); |
| 9563 | size += size_of_uleb128 (AT_index (a)); |
| 9564 | } |
| 9565 | else |
| 9566 | size += DWARF2_ADDR_SIZE; |
| 9567 | break; |
| 9568 | case dw_val_class_offset: |
| 9569 | size += dwarf_offset_size; |
| 9570 | break; |
| 9571 | case dw_val_class_loc: |
| 9572 | { |
| 9573 | unsigned long lsize = size_of_locs (loc: AT_loc (a)); |
| 9574 | |
| 9575 | /* Block length. */ |
| 9576 | if (dwarf_version >= 4) |
| 9577 | size += size_of_uleb128 (lsize); |
| 9578 | else |
| 9579 | size += constant_size (value: lsize); |
| 9580 | size += lsize; |
| 9581 | } |
| 9582 | break; |
| 9583 | case dw_val_class_loc_list: |
| 9584 | if (dwarf_split_debug_info && dwarf_version >= 5) |
| 9585 | { |
| 9586 | gcc_assert (AT_loc_list (a)->num_assigned); |
| 9587 | size += size_of_uleb128 (AT_loc_list (a)->hash); |
| 9588 | } |
| 9589 | else |
| 9590 | size += dwarf_offset_size; |
| 9591 | break; |
| 9592 | case dw_val_class_view_list: |
| 9593 | size += dwarf_offset_size; |
| 9594 | break; |
| 9595 | case dw_val_class_range_list: |
| 9596 | if (value_format (a) == DW_FORM_rnglistx) |
| 9597 | { |
| 9598 | gcc_assert (rnglist_idx); |
| 9599 | dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset]; |
| 9600 | size += size_of_uleb128 (r->idx); |
| 9601 | } |
| 9602 | else |
| 9603 | size += dwarf_offset_size; |
| 9604 | break; |
| 9605 | case dw_val_class_const: |
| 9606 | size += size_of_sleb128 (AT_int (a)); |
| 9607 | break; |
| 9608 | case dw_val_class_unsigned_const: |
| 9609 | { |
| 9610 | int csize = constant_size (value: AT_unsigned (a)); |
| 9611 | if (dwarf_version == 3 |
| 9612 | && a->dw_attr == DW_AT_data_member_location |
| 9613 | && csize >= 4) |
| 9614 | size += size_of_uleb128 (AT_unsigned (a)); |
| 9615 | else |
| 9616 | size += csize; |
| 9617 | } |
| 9618 | break; |
| 9619 | case dw_val_class_symview: |
| 9620 | if (symview_upper_bound <= 0xff) |
| 9621 | size += 1; |
| 9622 | else if (symview_upper_bound <= 0xffff) |
| 9623 | size += 2; |
| 9624 | else if (symview_upper_bound <= 0xffffffff) |
| 9625 | size += 4; |
| 9626 | else |
| 9627 | size += 8; |
| 9628 | break; |
| 9629 | case dw_val_class_const_implicit: |
| 9630 | case dw_val_class_unsigned_const_implicit: |
| 9631 | case dw_val_class_file_implicit: |
| 9632 | /* These occupy no size in the DIE, just an extra sleb128 in |
| 9633 | .debug_abbrev. */ |
| 9634 | break; |
| 9635 | case dw_val_class_const_double: |
| 9636 | size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR; |
| 9637 | if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS) |
| 9638 | size++; /* block */ |
| 9639 | break; |
| 9640 | case dw_val_class_wide_int: |
| 9641 | size += (get_full_len (op: *a->dw_attr_val.v.val_wide) |
| 9642 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
| 9643 | if (get_full_len (op: *a->dw_attr_val.v.val_wide) |
| 9644 | * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS) |
| 9645 | size++; /* block */ |
| 9646 | break; |
| 9647 | case dw_val_class_vec: |
| 9648 | size += constant_size (value: a->dw_attr_val.v.val_vec.length |
| 9649 | * a->dw_attr_val.v.val_vec.elt_size) |
| 9650 | + a->dw_attr_val.v.val_vec.length |
| 9651 | * a->dw_attr_val.v.val_vec.elt_size; /* block */ |
| 9652 | break; |
| 9653 | case dw_val_class_flag: |
| 9654 | if (dwarf_version >= 4) |
| 9655 | /* Currently all add_AT_flag calls pass in 1 as last argument, |
| 9656 | so DW_FORM_flag_present can be used. If that ever changes, |
| 9657 | we'll need to use DW_FORM_flag and have some optimization |
| 9658 | in build_abbrev_table that will change those to |
| 9659 | DW_FORM_flag_present if it is set to 1 in all DIEs using |
| 9660 | the same abbrev entry. */ |
| 9661 | gcc_assert (a->dw_attr_val.v.val_flag == 1); |
| 9662 | else |
| 9663 | size += 1; |
| 9664 | break; |
| 9665 | case dw_val_class_die_ref: |
| 9666 | if (AT_ref_external (a)) |
| 9667 | { |
| 9668 | /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions |
| 9669 | we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr |
| 9670 | is sized by target address length, whereas in DWARF3 |
| 9671 | it's always sized as an offset. */ |
| 9672 | if (AT_ref (a)->comdat_type_p) |
| 9673 | size += DWARF_TYPE_SIGNATURE_SIZE; |
| 9674 | else if (dwarf_version == 2) |
| 9675 | size += DWARF2_ADDR_SIZE; |
| 9676 | else |
| 9677 | size += dwarf_offset_size; |
| 9678 | } |
| 9679 | else |
| 9680 | size += dwarf_offset_size; |
| 9681 | break; |
| 9682 | case dw_val_class_fde_ref: |
| 9683 | size += dwarf_offset_size; |
| 9684 | break; |
| 9685 | case dw_val_class_lbl_id: |
| 9686 | if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED) |
| 9687 | { |
| 9688 | gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED); |
| 9689 | size += size_of_uleb128 (AT_index (a)); |
| 9690 | } |
| 9691 | else |
| 9692 | size += DWARF2_ADDR_SIZE; |
| 9693 | break; |
| 9694 | case dw_val_class_lineptr: |
| 9695 | case dw_val_class_macptr: |
| 9696 | case dw_val_class_loclistsptr: |
| 9697 | size += dwarf_offset_size; |
| 9698 | break; |
| 9699 | case dw_val_class_str: |
| 9700 | form = AT_string_form (a); |
| 9701 | if (form == DW_FORM_strp || form == DW_FORM_line_strp) |
| 9702 | size += dwarf_offset_size; |
| 9703 | else if (form == dwarf_FORM (form: DW_FORM_strx)) |
| 9704 | size += size_of_uleb128 (AT_index (a)); |
| 9705 | else |
| 9706 | size += strlen (s: a->dw_attr_val.v.val_str->str) + 1; |
| 9707 | break; |
| 9708 | case dw_val_class_file: |
| 9709 | size += constant_size (value: maybe_emit_file (fd: a->dw_attr_val.v.val_file)); |
| 9710 | break; |
| 9711 | case dw_val_class_data8: |
| 9712 | size += 8; |
| 9713 | break; |
| 9714 | case dw_val_class_vms_delta: |
| 9715 | size += dwarf_offset_size; |
| 9716 | break; |
| 9717 | case dw_val_class_high_pc: |
| 9718 | size += DWARF2_ADDR_SIZE; |
| 9719 | break; |
| 9720 | case dw_val_class_discr_value: |
| 9721 | size += size_of_discr_value (discr_value: &a->dw_attr_val.v.val_discr_value); |
| 9722 | break; |
| 9723 | case dw_val_class_discr_list: |
| 9724 | { |
| 9725 | unsigned block_size = size_of_discr_list (discr_list: AT_discr_list (a)); |
| 9726 | |
| 9727 | /* This is a block, so we have the block length and then its |
| 9728 | data. */ |
| 9729 | size += constant_size (value: block_size) + block_size; |
| 9730 | } |
| 9731 | break; |
| 9732 | default: |
| 9733 | gcc_unreachable (); |
| 9734 | } |
| 9735 | } |
| 9736 | |
| 9737 | return size; |
| 9738 | } |
| 9739 | |
| 9740 | /* Size the debugging information associated with a given DIE. Visits the |
| 9741 | DIE's children recursively. Updates the global variable next_die_offset, on |
| 9742 | each time through. Uses the current value of next_die_offset to update the |
| 9743 | die_offset field in each DIE. */ |
| 9744 | |
| 9745 | static void |
| 9746 | calc_die_sizes (dw_die_ref die) |
| 9747 | { |
| 9748 | dw_die_ref c; |
| 9749 | |
| 9750 | gcc_assert (die->die_offset == 0 |
| 9751 | || (unsigned long int) die->die_offset == next_die_offset); |
| 9752 | die->die_offset = next_die_offset; |
| 9753 | next_die_offset += size_of_die (die); |
| 9754 | |
| 9755 | FOR_EACH_CHILD (die, c, calc_die_sizes (c)); |
| 9756 | |
| 9757 | if (die->die_child != NULL) |
| 9758 | /* Count the null byte used to terminate sibling lists. */ |
| 9759 | next_die_offset += 1; |
| 9760 | } |
| 9761 | |
| 9762 | /* Size just the base type children at the start of the CU. |
| 9763 | This is needed because build_abbrev needs to size locs |
| 9764 | and sizing of type based stack ops needs to know die_offset |
| 9765 | values for the base types. */ |
| 9766 | |
| 9767 | static void |
| 9768 | calc_base_type_die_sizes (void) |
| 9769 | { |
| 9770 | unsigned long die_offset = (dwarf_split_debug_info |
| 9771 | ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE |
| 9772 | : DWARF_COMPILE_UNIT_HEADER_SIZE); |
| 9773 | unsigned int i; |
| 9774 | dw_die_ref base_type; |
| 9775 | #if ENABLE_ASSERT_CHECKING |
| 9776 | dw_die_ref prev = comp_unit_die ()->die_child; |
| 9777 | #endif |
| 9778 | |
| 9779 | die_offset += size_of_die (die: comp_unit_die ()); |
| 9780 | for (i = 0; base_types.iterate (ix: i, ptr: &base_type); i++) |
| 9781 | { |
| 9782 | #if ENABLE_ASSERT_CHECKING |
| 9783 | gcc_assert (base_type->die_offset == 0 |
| 9784 | && prev->die_sib == base_type |
| 9785 | && base_type->die_child == NULL |
| 9786 | && base_type->die_abbrev); |
| 9787 | prev = base_type; |
| 9788 | #endif |
| 9789 | if (abbrev_opt_start |
| 9790 | && base_type->die_abbrev >= abbrev_opt_base_type_end) |
| 9791 | abbrev_opt_base_type_end = base_type->die_abbrev + 1; |
| 9792 | base_type->die_offset = die_offset; |
| 9793 | die_offset += size_of_die (die: base_type); |
| 9794 | } |
| 9795 | } |
| 9796 | |
| 9797 | /* Set the marks for a die and its children. We do this so |
| 9798 | that we know whether or not a reference needs to use FORM_ref_addr; only |
| 9799 | DIEs in the same CU will be marked. We used to clear out the offset |
| 9800 | and use that as the flag, but ran into ordering problems. */ |
| 9801 | |
| 9802 | static void |
| 9803 | mark_dies (dw_die_ref die) |
| 9804 | { |
| 9805 | dw_die_ref c; |
| 9806 | |
| 9807 | gcc_assert (!die->die_mark); |
| 9808 | |
| 9809 | die->die_mark = 1; |
| 9810 | FOR_EACH_CHILD (die, c, mark_dies (c)); |
| 9811 | } |
| 9812 | |
| 9813 | /* Clear the marks for a die and its children. */ |
| 9814 | |
| 9815 | static void |
| 9816 | unmark_dies (dw_die_ref die) |
| 9817 | { |
| 9818 | dw_die_ref c; |
| 9819 | |
| 9820 | if (! use_debug_types) |
| 9821 | gcc_assert (die->die_mark); |
| 9822 | |
| 9823 | die->die_mark = 0; |
| 9824 | FOR_EACH_CHILD (die, c, unmark_dies (c)); |
| 9825 | } |
| 9826 | |
| 9827 | /* Clear the marks for a die, its children and referred dies. */ |
| 9828 | |
| 9829 | static void |
| 9830 | unmark_all_dies (dw_die_ref die) |
| 9831 | { |
| 9832 | dw_die_ref c; |
| 9833 | dw_attr_node *a; |
| 9834 | unsigned ix; |
| 9835 | |
| 9836 | if (!die->die_mark) |
| 9837 | return; |
| 9838 | die->die_mark = 0; |
| 9839 | |
| 9840 | FOR_EACH_CHILD (die, c, unmark_all_dies (c)); |
| 9841 | |
| 9842 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 9843 | if (AT_class (a) == dw_val_class_die_ref) |
| 9844 | unmark_all_dies (die: AT_ref (a)); |
| 9845 | } |
| 9846 | |
| 9847 | /* Calculate if the entry should appear in the final output file. It may be |
| 9848 | from a pruned a type. */ |
| 9849 | |
| 9850 | static bool |
| 9851 | include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p) |
| 9852 | { |
| 9853 | /* By limiting gnu pubnames to definitions only, gold can generate a |
| 9854 | gdb index without entries for declarations, which don't include |
| 9855 | enough information to be useful. */ |
| 9856 | if (debug_generate_pub_sections == 2 && is_declaration_die (die: p->die)) |
| 9857 | return false; |
| 9858 | |
| 9859 | if (table == pubname_table) |
| 9860 | { |
| 9861 | /* Enumerator names are part of the pubname table, but the |
| 9862 | parent DW_TAG_enumeration_type die may have been pruned. |
| 9863 | Don't output them if that is the case. */ |
| 9864 | if (p->die->die_tag == DW_TAG_enumerator && |
| 9865 | (p->die->die_parent == NULL |
| 9866 | || !p->die->die_parent->die_perennial_p)) |
| 9867 | return false; |
| 9868 | |
| 9869 | /* Everything else in the pubname table is included. */ |
| 9870 | return true; |
| 9871 | } |
| 9872 | |
| 9873 | /* The pubtypes table shouldn't include types that have been |
| 9874 | pruned. */ |
| 9875 | return (p->die->die_offset != 0 |
| 9876 | || !flag_eliminate_unused_debug_types); |
| 9877 | } |
| 9878 | |
| 9879 | /* Return the size of the .debug_pubnames or .debug_pubtypes table |
| 9880 | generated for the compilation unit. */ |
| 9881 | |
| 9882 | static unsigned long |
| 9883 | size_of_pubnames (vec<pubname_entry, va_gc> *names) |
| 9884 | { |
| 9885 | unsigned long size; |
| 9886 | unsigned i; |
| 9887 | pubname_entry *p; |
| 9888 | int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0; |
| 9889 | |
| 9890 | size = DWARF_PUBNAMES_HEADER_SIZE; |
| 9891 | FOR_EACH_VEC_ELT (*names, i, p) |
| 9892 | if (include_pubname_in_output (table: names, p)) |
| 9893 | size += strlen (s: p->name) + dwarf_offset_size + 1 + space_for_flags; |
| 9894 | |
| 9895 | size += dwarf_offset_size; |
| 9896 | return size; |
| 9897 | } |
| 9898 | |
| 9899 | /* Return the size of the information in the .debug_aranges section. */ |
| 9900 | |
| 9901 | static unsigned long |
| 9902 | size_of_aranges (void) |
| 9903 | { |
| 9904 | unsigned long size; |
| 9905 | |
| 9906 | size = DWARF_ARANGES_HEADER_SIZE; |
| 9907 | |
| 9908 | /* Count the address/length pair for this compilation unit. */ |
| 9909 | if (switch_text_ranges) |
| 9910 | size += 2 * DWARF2_ADDR_SIZE |
| 9911 | * (vec_safe_length (v: switch_text_ranges) / 2 + 1); |
| 9912 | if (switch_cold_ranges) |
| 9913 | size += 2 * DWARF2_ADDR_SIZE |
| 9914 | * (vec_safe_length (v: switch_cold_ranges) / 2 + 1); |
| 9915 | if (have_multiple_function_sections) |
| 9916 | { |
| 9917 | unsigned fde_idx; |
| 9918 | dw_fde_ref fde; |
| 9919 | |
| 9920 | FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde) |
| 9921 | { |
| 9922 | if (fde->ignored_debug) |
| 9923 | continue; |
| 9924 | if (!fde->in_std_section) |
| 9925 | size += 2 * DWARF2_ADDR_SIZE; |
| 9926 | if (fde->dw_fde_second_begin && !fde->second_in_std_section) |
| 9927 | size += 2 * DWARF2_ADDR_SIZE; |
| 9928 | } |
| 9929 | } |
| 9930 | |
| 9931 | /* Count the two zero words used to terminated the address range table. */ |
| 9932 | size += 2 * DWARF2_ADDR_SIZE; |
| 9933 | return size; |
| 9934 | } |
| 9935 | |
| 9936 | /* Select the encoding of an attribute value. */ |
| 9937 | |
| 9938 | static enum dwarf_form |
| 9939 | value_format (dw_attr_node *a) |
| 9940 | { |
| 9941 | switch (AT_class (a)) |
| 9942 | { |
| 9943 | case dw_val_class_addr: |
| 9944 | /* Only very few attributes allow DW_FORM_addr. */ |
| 9945 | switch (a->dw_attr) |
| 9946 | { |
| 9947 | case DW_AT_low_pc: |
| 9948 | case DW_AT_high_pc: |
| 9949 | case DW_AT_entry_pc: |
| 9950 | case DW_AT_trampoline: |
| 9951 | return (AT_index (a) == NOT_INDEXED |
| 9952 | ? DW_FORM_addr : dwarf_FORM (form: DW_FORM_addrx)); |
| 9953 | default: |
| 9954 | break; |
| 9955 | } |
| 9956 | switch (DWARF2_ADDR_SIZE) |
| 9957 | { |
| 9958 | case 1: |
| 9959 | return DW_FORM_data1; |
| 9960 | case 2: |
| 9961 | return DW_FORM_data2; |
| 9962 | case 4: |
| 9963 | return DW_FORM_data4; |
| 9964 | case 8: |
| 9965 | return DW_FORM_data8; |
| 9966 | default: |
| 9967 | gcc_unreachable (); |
| 9968 | } |
| 9969 | case dw_val_class_loc_list: |
| 9970 | if (dwarf_split_debug_info |
| 9971 | && dwarf_version >= 5 |
| 9972 | && AT_loc_list (a)->num_assigned) |
| 9973 | return DW_FORM_loclistx; |
| 9974 | /* FALLTHRU */ |
| 9975 | case dw_val_class_view_list: |
| 9976 | case dw_val_class_range_list: |
| 9977 | /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo |
| 9978 | but in .debug_info use DW_FORM_sec_offset, which is shorter if we |
| 9979 | care about sizes of .debug* sections in shared libraries and |
| 9980 | executables and don't take into account relocations that affect just |
| 9981 | relocatable objects - for DW_FORM_rnglistx we'd have to emit offset |
| 9982 | table in the .debug_rnglists section. */ |
| 9983 | if (dwarf_split_debug_info |
| 9984 | && dwarf_version >= 5 |
| 9985 | && AT_class (a) == dw_val_class_range_list |
| 9986 | && rnglist_idx |
| 9987 | && a->dw_attr_val.val_entry != RELOCATED_OFFSET) |
| 9988 | return DW_FORM_rnglistx; |
| 9989 | if (dwarf_version >= 4) |
| 9990 | return DW_FORM_sec_offset; |
| 9991 | /* FALLTHRU */ |
| 9992 | case dw_val_class_vms_delta: |
| 9993 | case dw_val_class_offset: |
| 9994 | switch (dwarf_offset_size) |
| 9995 | { |
| 9996 | case 4: |
| 9997 | return DW_FORM_data4; |
| 9998 | case 8: |
| 9999 | return DW_FORM_data8; |
| 10000 | default: |
| 10001 | gcc_unreachable (); |
| 10002 | } |
| 10003 | case dw_val_class_loc: |
| 10004 | if (dwarf_version >= 4) |
| 10005 | return DW_FORM_exprloc; |
| 10006 | switch (constant_size (value: size_of_locs (loc: AT_loc (a)))) |
| 10007 | { |
| 10008 | case 1: |
| 10009 | return DW_FORM_block1; |
| 10010 | case 2: |
| 10011 | return DW_FORM_block2; |
| 10012 | case 4: |
| 10013 | return DW_FORM_block4; |
| 10014 | default: |
| 10015 | gcc_unreachable (); |
| 10016 | } |
| 10017 | case dw_val_class_const: |
| 10018 | return DW_FORM_sdata; |
| 10019 | case dw_val_class_unsigned_const: |
| 10020 | switch (constant_size (value: AT_unsigned (a))) |
| 10021 | { |
| 10022 | case 1: |
| 10023 | return DW_FORM_data1; |
| 10024 | case 2: |
| 10025 | return DW_FORM_data2; |
| 10026 | case 4: |
| 10027 | /* In DWARF3 DW_AT_data_member_location with |
| 10028 | DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not |
| 10029 | constant, so we need to use DW_FORM_udata if we need |
| 10030 | a large constant. */ |
| 10031 | if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location) |
| 10032 | return DW_FORM_udata; |
| 10033 | return DW_FORM_data4; |
| 10034 | case 8: |
| 10035 | if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location) |
| 10036 | return DW_FORM_udata; |
| 10037 | return DW_FORM_data8; |
| 10038 | default: |
| 10039 | gcc_unreachable (); |
| 10040 | } |
| 10041 | case dw_val_class_const_implicit: |
| 10042 | case dw_val_class_unsigned_const_implicit: |
| 10043 | case dw_val_class_file_implicit: |
| 10044 | return DW_FORM_implicit_const; |
| 10045 | case dw_val_class_const_double: |
| 10046 | switch (HOST_BITS_PER_WIDE_INT) |
| 10047 | { |
| 10048 | case 8: |
| 10049 | return DW_FORM_data2; |
| 10050 | case 16: |
| 10051 | return DW_FORM_data4; |
| 10052 | case 32: |
| 10053 | return DW_FORM_data8; |
| 10054 | case 64: |
| 10055 | if (dwarf_version >= 5) |
| 10056 | return DW_FORM_data16; |
| 10057 | /* FALLTHRU */ |
| 10058 | default: |
| 10059 | return DW_FORM_block1; |
| 10060 | } |
| 10061 | case dw_val_class_wide_int: |
| 10062 | switch (get_full_len (op: *a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT) |
| 10063 | { |
| 10064 | case 8: |
| 10065 | return DW_FORM_data1; |
| 10066 | case 16: |
| 10067 | return DW_FORM_data2; |
| 10068 | case 32: |
| 10069 | return DW_FORM_data4; |
| 10070 | case 64: |
| 10071 | return DW_FORM_data8; |
| 10072 | case 128: |
| 10073 | if (dwarf_version >= 5) |
| 10074 | return DW_FORM_data16; |
| 10075 | /* FALLTHRU */ |
| 10076 | default: |
| 10077 | return DW_FORM_block1; |
| 10078 | } |
| 10079 | case dw_val_class_symview: |
| 10080 | /* ??? We might use uleb128, but then we'd have to compute |
| 10081 | .debug_info offsets in the assembler. */ |
| 10082 | if (symview_upper_bound <= 0xff) |
| 10083 | return DW_FORM_data1; |
| 10084 | else if (symview_upper_bound <= 0xffff) |
| 10085 | return DW_FORM_data2; |
| 10086 | else if (symview_upper_bound <= 0xffffffff) |
| 10087 | return DW_FORM_data4; |
| 10088 | else |
| 10089 | return DW_FORM_data8; |
| 10090 | case dw_val_class_vec: |
| 10091 | switch (constant_size (value: a->dw_attr_val.v.val_vec.length |
| 10092 | * a->dw_attr_val.v.val_vec.elt_size)) |
| 10093 | { |
| 10094 | case 1: |
| 10095 | return DW_FORM_block1; |
| 10096 | case 2: |
| 10097 | return DW_FORM_block2; |
| 10098 | case 4: |
| 10099 | return DW_FORM_block4; |
| 10100 | default: |
| 10101 | gcc_unreachable (); |
| 10102 | } |
| 10103 | case dw_val_class_flag: |
| 10104 | if (dwarf_version >= 4) |
| 10105 | { |
| 10106 | /* Currently all add_AT_flag calls pass in 1 as last argument, |
| 10107 | so DW_FORM_flag_present can be used. If that ever changes, |
| 10108 | we'll need to use DW_FORM_flag and have some optimization |
| 10109 | in build_abbrev_table that will change those to |
| 10110 | DW_FORM_flag_present if it is set to 1 in all DIEs using |
| 10111 | the same abbrev entry. */ |
| 10112 | gcc_assert (a->dw_attr_val.v.val_flag == 1); |
| 10113 | return DW_FORM_flag_present; |
| 10114 | } |
| 10115 | return DW_FORM_flag; |
| 10116 | case dw_val_class_die_ref: |
| 10117 | if (AT_ref_external (a)) |
| 10118 | { |
| 10119 | if (AT_ref (a)->comdat_type_p) |
| 10120 | return DW_FORM_ref_sig8; |
| 10121 | else |
| 10122 | return DW_FORM_ref_addr; |
| 10123 | } |
| 10124 | else |
| 10125 | return DW_FORM_ref; |
| 10126 | case dw_val_class_fde_ref: |
| 10127 | return DW_FORM_data; |
| 10128 | case dw_val_class_lbl_id: |
| 10129 | return (AT_index (a) == NOT_INDEXED |
| 10130 | ? DW_FORM_addr : dwarf_FORM (form: DW_FORM_addrx)); |
| 10131 | case dw_val_class_lineptr: |
| 10132 | case dw_val_class_macptr: |
| 10133 | case dw_val_class_loclistsptr: |
| 10134 | return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data; |
| 10135 | case dw_val_class_str: |
| 10136 | return AT_string_form (a); |
| 10137 | case dw_val_class_file: |
| 10138 | switch (constant_size (value: maybe_emit_file (fd: a->dw_attr_val.v.val_file))) |
| 10139 | { |
| 10140 | case 1: |
| 10141 | return DW_FORM_data1; |
| 10142 | case 2: |
| 10143 | return DW_FORM_data2; |
| 10144 | case 4: |
| 10145 | return DW_FORM_data4; |
| 10146 | default: |
| 10147 | gcc_unreachable (); |
| 10148 | } |
| 10149 | |
| 10150 | case dw_val_class_data8: |
| 10151 | return DW_FORM_data8; |
| 10152 | |
| 10153 | case dw_val_class_high_pc: |
| 10154 | switch (DWARF2_ADDR_SIZE) |
| 10155 | { |
| 10156 | case 1: |
| 10157 | return DW_FORM_data1; |
| 10158 | case 2: |
| 10159 | return DW_FORM_data2; |
| 10160 | case 4: |
| 10161 | return DW_FORM_data4; |
| 10162 | case 8: |
| 10163 | return DW_FORM_data8; |
| 10164 | default: |
| 10165 | gcc_unreachable (); |
| 10166 | } |
| 10167 | |
| 10168 | case dw_val_class_discr_value: |
| 10169 | return (a->dw_attr_val.v.val_discr_value.pos |
| 10170 | ? DW_FORM_udata |
| 10171 | : DW_FORM_sdata); |
| 10172 | case dw_val_class_discr_list: |
| 10173 | switch (constant_size (value: size_of_discr_list (discr_list: AT_discr_list (a)))) |
| 10174 | { |
| 10175 | case 1: |
| 10176 | return DW_FORM_block1; |
| 10177 | case 2: |
| 10178 | return DW_FORM_block2; |
| 10179 | case 4: |
| 10180 | return DW_FORM_block4; |
| 10181 | default: |
| 10182 | gcc_unreachable (); |
| 10183 | } |
| 10184 | |
| 10185 | default: |
| 10186 | gcc_unreachable (); |
| 10187 | } |
| 10188 | } |
| 10189 | |
| 10190 | /* Output the encoding of an attribute value. */ |
| 10191 | |
| 10192 | static void |
| 10193 | output_value_format (dw_attr_node *a) |
| 10194 | { |
| 10195 | enum dwarf_form form = value_format (a); |
| 10196 | |
| 10197 | dw2_asm_output_data_uleb128 (form, "(%s)" , dwarf_form_name (form)); |
| 10198 | } |
| 10199 | |
| 10200 | /* Given a die and id, produce the appropriate abbreviations. */ |
| 10201 | |
| 10202 | static void |
| 10203 | output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev) |
| 10204 | { |
| 10205 | unsigned ix; |
| 10206 | dw_attr_node *a_attr; |
| 10207 | |
| 10208 | dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)" ); |
| 10209 | dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)" , |
| 10210 | dwarf_tag_name (tag: abbrev->die_tag)); |
| 10211 | |
| 10212 | if (abbrev->die_child != NULL) |
| 10213 | dw2_asm_output_data (1, DW_children_yes, "DW_children_yes" ); |
| 10214 | else |
| 10215 | dw2_asm_output_data (1, DW_children_no, "DW_children_no" ); |
| 10216 | |
| 10217 | for (ix = 0; vec_safe_iterate (v: abbrev->die_attr, ix, ptr: &a_attr); ix++) |
| 10218 | { |
| 10219 | dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)" , |
| 10220 | dwarf_attr_name (attr: a_attr->dw_attr)); |
| 10221 | output_value_format (a: a_attr); |
| 10222 | if (value_format (a: a_attr) == DW_FORM_implicit_const) |
| 10223 | { |
| 10224 | if (AT_class (a: a_attr) == dw_val_class_file_implicit) |
| 10225 | { |
| 10226 | int f = maybe_emit_file (fd: a_attr->dw_attr_val.v.val_file); |
| 10227 | const char *filename = a_attr->dw_attr_val.v.val_file->filename; |
| 10228 | dw2_asm_output_data_sleb128 (f, "(%s)" , filename); |
| 10229 | } |
| 10230 | else |
| 10231 | dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL); |
| 10232 | } |
| 10233 | } |
| 10234 | |
| 10235 | dw2_asm_output_data (1, 0, NULL); |
| 10236 | dw2_asm_output_data (1, 0, NULL); |
| 10237 | } |
| 10238 | |
| 10239 | |
| 10240 | /* Output the .debug_abbrev section which defines the DIE abbreviation |
| 10241 | table. */ |
| 10242 | |
| 10243 | static void |
| 10244 | output_abbrev_section (void) |
| 10245 | { |
| 10246 | unsigned int abbrev_id; |
| 10247 | dw_die_ref abbrev; |
| 10248 | |
| 10249 | FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev) |
| 10250 | if (abbrev_id != 0) |
| 10251 | output_die_abbrevs (abbrev_id, abbrev); |
| 10252 | |
| 10253 | /* Terminate the table. */ |
| 10254 | dw2_asm_output_data (1, 0, NULL); |
| 10255 | } |
| 10256 | |
| 10257 | /* Return a new location list, given the begin and end range, and the |
| 10258 | expression. */ |
| 10259 | |
| 10260 | static inline dw_loc_list_ref |
| 10261 | new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin, |
| 10262 | const char *end, var_loc_view vend, |
| 10263 | const char *section) |
| 10264 | { |
| 10265 | dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> (); |
| 10266 | |
| 10267 | retlist->begin = begin; |
| 10268 | retlist->begin_entry = NULL; |
| 10269 | retlist->end = end; |
| 10270 | retlist->end_entry = NULL; |
| 10271 | retlist->expr = expr; |
| 10272 | retlist->section = section; |
| 10273 | retlist->vbegin = vbegin; |
| 10274 | retlist->vend = vend; |
| 10275 | |
| 10276 | return retlist; |
| 10277 | } |
| 10278 | |
| 10279 | /* Return true iff there's any nonzero view number in the loc list. |
| 10280 | |
| 10281 | ??? When views are not enabled, we'll often extend a single range |
| 10282 | to the entire function, so that we emit a single location |
| 10283 | expression rather than a location list. With views, even with a |
| 10284 | single range, we'll output a list if start or end have a nonzero |
| 10285 | view. If we change this, we may want to stop splitting a single |
| 10286 | range in dw_loc_list just because of a nonzero view, even if it |
| 10287 | straddles across hot/cold partitions. */ |
| 10288 | |
| 10289 | static bool |
| 10290 | loc_list_has_views (dw_loc_list_ref list) |
| 10291 | { |
| 10292 | if (!debug_variable_location_views) |
| 10293 | return false; |
| 10294 | |
| 10295 | for (dw_loc_list_ref loc = list; |
| 10296 | loc != NULL; loc = loc->dw_loc_next) |
| 10297 | if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend)) |
| 10298 | return true; |
| 10299 | |
| 10300 | return false; |
| 10301 | } |
| 10302 | |
| 10303 | /* Generate a new internal symbol for this location list node, if it |
| 10304 | hasn't got one yet. */ |
| 10305 | |
| 10306 | static inline void |
| 10307 | gen_llsym (dw_loc_list_ref list) |
| 10308 | { |
| 10309 | gcc_assert (!list->ll_symbol); |
| 10310 | list->ll_symbol = gen_internal_sym (prefix: "LLST" ); |
| 10311 | |
| 10312 | if (!loc_list_has_views (list)) |
| 10313 | return; |
| 10314 | |
| 10315 | if (dwarf2out_locviews_in_attribute ()) |
| 10316 | { |
| 10317 | /* Use the same label_num for the view list. */ |
| 10318 | label_num--; |
| 10319 | list->vl_symbol = gen_internal_sym (prefix: "LVUS" ); |
| 10320 | } |
| 10321 | else |
| 10322 | list->vl_symbol = list->ll_symbol; |
| 10323 | } |
| 10324 | |
| 10325 | /* Generate a symbol for the list, but only if we really want to emit |
| 10326 | it as a list. */ |
| 10327 | |
| 10328 | static inline void |
| 10329 | maybe_gen_llsym (dw_loc_list_ref list) |
| 10330 | { |
| 10331 | if (!list || (!list->dw_loc_next && !loc_list_has_views (list))) |
| 10332 | return; |
| 10333 | |
| 10334 | gen_llsym (list); |
| 10335 | } |
| 10336 | |
| 10337 | /* Determine whether or not to skip loc_list entry CURR. If SIZEP is |
| 10338 | NULL, don't consider size of the location expression. If we're not |
| 10339 | to skip it, and SIZEP is non-null, store the size of CURR->expr's |
| 10340 | representation in *SIZEP. */ |
| 10341 | |
| 10342 | static bool |
| 10343 | skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL) |
| 10344 | { |
| 10345 | /* Don't output an entry that starts and ends at the same address. */ |
| 10346 | if (strcmp (s1: curr->begin, s2: curr->end) == 0 |
| 10347 | && curr->vbegin == curr->vend && !curr->force) |
| 10348 | return true; |
| 10349 | |
| 10350 | if (!sizep) |
| 10351 | return false; |
| 10352 | |
| 10353 | unsigned long size = size_of_locs (loc: curr->expr); |
| 10354 | |
| 10355 | /* If the expression is too large, drop it on the floor. We could |
| 10356 | perhaps put it into DW_TAG_dwarf_procedure and refer to that |
| 10357 | in the expression, but >= 64KB expressions for a single value |
| 10358 | in a single range are unlikely very useful. */ |
| 10359 | if (dwarf_version < 5 && size > 0xffff) |
| 10360 | return true; |
| 10361 | |
| 10362 | *sizep = size; |
| 10363 | |
| 10364 | return false; |
| 10365 | } |
| 10366 | |
| 10367 | /* Output a view pair loclist entry for CURR, if it requires one. */ |
| 10368 | |
| 10369 | static void |
| 10370 | dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr) |
| 10371 | { |
| 10372 | if (!dwarf2out_locviews_in_loclist ()) |
| 10373 | return; |
| 10374 | |
| 10375 | if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend)) |
| 10376 | return; |
| 10377 | |
| 10378 | #ifdef DW_LLE_view_pair |
| 10379 | dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair" ); |
| 10380 | |
| 10381 | if (dwarf2out_as_locview_support) |
| 10382 | { |
| 10383 | if (ZERO_VIEW_P (curr->vbegin)) |
| 10384 | dw2_asm_output_data_uleb128 (0, "Location view begin" ); |
| 10385 | else |
| 10386 | { |
| 10387 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 10388 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vbegin); |
| 10389 | dw2_asm_output_symname_uleb128 (label, "Location view begin" ); |
| 10390 | } |
| 10391 | |
| 10392 | if (ZERO_VIEW_P (curr->vend)) |
| 10393 | dw2_asm_output_data_uleb128 (0, "Location view end" ); |
| 10394 | else |
| 10395 | { |
| 10396 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 10397 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vend); |
| 10398 | dw2_asm_output_symname_uleb128 (label, "Location view end" ); |
| 10399 | } |
| 10400 | } |
| 10401 | else |
| 10402 | { |
| 10403 | dw2_asm_output_data_uleb128 (ZERO_VIEW_P (curr->vbegin) |
| 10404 | ? 0 : curr->vbegin, "Location view begin" ); |
| 10405 | dw2_asm_output_data_uleb128 (ZERO_VIEW_P (curr->vend) |
| 10406 | ? 0 : curr->vend, "Location view end" ); |
| 10407 | } |
| 10408 | #endif /* DW_LLE_view_pair */ |
| 10409 | |
| 10410 | return; |
| 10411 | } |
| 10412 | |
| 10413 | /* Output the location list given to us. */ |
| 10414 | |
| 10415 | static void |
| 10416 | output_loc_list (dw_loc_list_ref list_head) |
| 10417 | { |
| 10418 | int vcount = 0, lcount = 0; |
| 10419 | |
| 10420 | if (list_head->emitted) |
| 10421 | return; |
| 10422 | list_head->emitted = true; |
| 10423 | |
| 10424 | if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ()) |
| 10425 | { |
| 10426 | ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol); |
| 10427 | |
| 10428 | for (dw_loc_list_ref curr = list_head; curr != NULL; |
| 10429 | curr = curr->dw_loc_next) |
| 10430 | { |
| 10431 | unsigned long size; |
| 10432 | |
| 10433 | if (skip_loc_list_entry (curr, sizep: &size)) |
| 10434 | continue; |
| 10435 | |
| 10436 | vcount++; |
| 10437 | |
| 10438 | /* ?? dwarf_split_debug_info? */ |
| 10439 | if (dwarf2out_as_locview_support) |
| 10440 | { |
| 10441 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 10442 | |
| 10443 | if (!ZERO_VIEW_P (curr->vbegin)) |
| 10444 | { |
| 10445 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vbegin); |
| 10446 | dw2_asm_output_symname_uleb128 (label, |
| 10447 | "View list begin (%s)" , |
| 10448 | list_head->vl_symbol); |
| 10449 | } |
| 10450 | else |
| 10451 | dw2_asm_output_data_uleb128 (0, |
| 10452 | "View list begin (%s)" , |
| 10453 | list_head->vl_symbol); |
| 10454 | |
| 10455 | if (!ZERO_VIEW_P (curr->vend)) |
| 10456 | { |
| 10457 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vend); |
| 10458 | dw2_asm_output_symname_uleb128 (label, |
| 10459 | "View list end (%s)" , |
| 10460 | list_head->vl_symbol); |
| 10461 | } |
| 10462 | else |
| 10463 | dw2_asm_output_data_uleb128 (0, |
| 10464 | "View list end (%s)" , |
| 10465 | list_head->vl_symbol); |
| 10466 | } |
| 10467 | else |
| 10468 | { |
| 10469 | dw2_asm_output_data_uleb128 (ZERO_VIEW_P (curr->vbegin) |
| 10470 | ? 0 : curr->vbegin, |
| 10471 | "View list begin (%s)" , |
| 10472 | list_head->vl_symbol); |
| 10473 | dw2_asm_output_data_uleb128 (ZERO_VIEW_P (curr->vend) |
| 10474 | ? 0 : curr->vend, |
| 10475 | "View list end (%s)" , |
| 10476 | list_head->vl_symbol); |
| 10477 | } |
| 10478 | } |
| 10479 | } |
| 10480 | |
| 10481 | ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol); |
| 10482 | |
| 10483 | const char *last_section = NULL; |
| 10484 | const char *base_label = NULL; |
| 10485 | |
| 10486 | /* Walk the location list, and output each range + expression. */ |
| 10487 | for (dw_loc_list_ref curr = list_head; curr != NULL; |
| 10488 | curr = curr->dw_loc_next) |
| 10489 | { |
| 10490 | unsigned long size; |
| 10491 | |
| 10492 | /* Skip this entry? If we skip it here, we must skip it in the |
| 10493 | view list above as well. */ |
| 10494 | if (skip_loc_list_entry (curr, sizep: &size)) |
| 10495 | continue; |
| 10496 | |
| 10497 | lcount++; |
| 10498 | |
| 10499 | if (dwarf_version >= 5) |
| 10500 | { |
| 10501 | if (dwarf_split_debug_info && HAVE_AS_LEB128) |
| 10502 | { |
| 10503 | dwarf2out_maybe_output_loclist_view_pair (curr); |
| 10504 | /* For -gsplit-dwarf, emit DW_LLE_startx_length, which has |
| 10505 | uleb128 index into .debug_addr and uleb128 length. */ |
| 10506 | dw2_asm_output_data (1, DW_LLE_startx_length, |
| 10507 | "DW_LLE_startx_length (%s)" , |
| 10508 | list_head->ll_symbol); |
| 10509 | dw2_asm_output_data_uleb128 (curr->begin_entry->index, |
| 10510 | "Location list range start index " |
| 10511 | "(%s)" , curr->begin); |
| 10512 | dw2_asm_output_delta_uleb128 (curr->end, curr->begin, |
| 10513 | "Location list length (%s)" , |
| 10514 | list_head->ll_symbol); |
| 10515 | } |
| 10516 | else if (dwarf_split_debug_info) |
| 10517 | { |
| 10518 | dwarf2out_maybe_output_loclist_view_pair (curr); |
| 10519 | /* For -gsplit-dwarf without usable .uleb128 support, emit |
| 10520 | DW_LLE_startx_endx, which has two uleb128 indexes into |
| 10521 | .debug_addr. */ |
| 10522 | dw2_asm_output_data (1, DW_LLE_startx_endx, |
| 10523 | "DW_LLE_startx_endx (%s)" , |
| 10524 | list_head->ll_symbol); |
| 10525 | dw2_asm_output_data_uleb128 (curr->begin_entry->index, |
| 10526 | "Location list range start index " |
| 10527 | "(%s)" , curr->begin); |
| 10528 | dw2_asm_output_data_uleb128 (curr->end_entry->index, |
| 10529 | "Location list range end index " |
| 10530 | "(%s)" , curr->end); |
| 10531 | } |
| 10532 | else if (!have_multiple_function_sections && HAVE_AS_LEB128) |
| 10533 | { |
| 10534 | dwarf2out_maybe_output_loclist_view_pair (curr); |
| 10535 | /* If all code is in .text section, the base address is |
| 10536 | already provided by the CU attributes. Use |
| 10537 | DW_LLE_offset_pair where both addresses are uleb128 encoded |
| 10538 | offsets against that base. */ |
| 10539 | dw2_asm_output_data (1, DW_LLE_offset_pair, |
| 10540 | "DW_LLE_offset_pair (%s)" , |
| 10541 | list_head->ll_symbol); |
| 10542 | dw2_asm_output_delta_uleb128 (curr->begin, curr->section, |
| 10543 | "Location list begin address (%s)" , |
| 10544 | list_head->ll_symbol); |
| 10545 | dw2_asm_output_delta_uleb128 (curr->end, curr->section, |
| 10546 | "Location list end address (%s)" , |
| 10547 | list_head->ll_symbol); |
| 10548 | } |
| 10549 | else if (HAVE_AS_LEB128) |
| 10550 | { |
| 10551 | /* Otherwise, find out how many consecutive entries could share |
| 10552 | the same base entry. If just one, emit DW_LLE_start_length, |
| 10553 | otherwise emit DW_LLE_base_address for the base address |
| 10554 | followed by a series of DW_LLE_offset_pair. */ |
| 10555 | if (last_section == NULL || curr->section != last_section) |
| 10556 | { |
| 10557 | dw_loc_list_ref curr2; |
| 10558 | for (curr2 = curr->dw_loc_next; curr2 != NULL; |
| 10559 | curr2 = curr2->dw_loc_next) |
| 10560 | { |
| 10561 | if (strcmp (s1: curr2->begin, s2: curr2->end) == 0 |
| 10562 | && !curr2->force) |
| 10563 | continue; |
| 10564 | break; |
| 10565 | } |
| 10566 | if (curr2 == NULL || curr->section != curr2->section) |
| 10567 | last_section = NULL; |
| 10568 | else |
| 10569 | { |
| 10570 | last_section = curr->section; |
| 10571 | base_label = curr->begin; |
| 10572 | dw2_asm_output_data (1, DW_LLE_base_address, |
| 10573 | "DW_LLE_base_address (%s)" , |
| 10574 | list_head->ll_symbol); |
| 10575 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label, |
| 10576 | "Base address (%s)" , |
| 10577 | list_head->ll_symbol); |
| 10578 | } |
| 10579 | } |
| 10580 | /* Only one entry with the same base address. Use |
| 10581 | DW_LLE_start_length with absolute address and uleb128 |
| 10582 | length. */ |
| 10583 | if (last_section == NULL) |
| 10584 | { |
| 10585 | dwarf2out_maybe_output_loclist_view_pair (curr); |
| 10586 | dw2_asm_output_data (1, DW_LLE_start_length, |
| 10587 | "DW_LLE_start_length (%s)" , |
| 10588 | list_head->ll_symbol); |
| 10589 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin, |
| 10590 | "Location list begin address (%s)" , |
| 10591 | list_head->ll_symbol); |
| 10592 | dw2_asm_output_delta_uleb128 (curr->end, curr->begin, |
| 10593 | "Location list length " |
| 10594 | "(%s)" , list_head->ll_symbol); |
| 10595 | } |
| 10596 | /* Otherwise emit DW_LLE_offset_pair, relative to above emitted |
| 10597 | DW_LLE_base_address. */ |
| 10598 | else |
| 10599 | { |
| 10600 | dwarf2out_maybe_output_loclist_view_pair (curr); |
| 10601 | dw2_asm_output_data (1, DW_LLE_offset_pair, |
| 10602 | "DW_LLE_offset_pair (%s)" , |
| 10603 | list_head->ll_symbol); |
| 10604 | dw2_asm_output_delta_uleb128 (curr->begin, base_label, |
| 10605 | "Location list begin address " |
| 10606 | "(%s)" , list_head->ll_symbol); |
| 10607 | dw2_asm_output_delta_uleb128 (curr->end, base_label, |
| 10608 | "Location list end address " |
| 10609 | "(%s)" , list_head->ll_symbol); |
| 10610 | } |
| 10611 | } |
| 10612 | /* The assembler does not support .uleb128 directive. Emit |
| 10613 | DW_LLE_start_end with a pair of absolute addresses. */ |
| 10614 | else |
| 10615 | { |
| 10616 | dwarf2out_maybe_output_loclist_view_pair (curr); |
| 10617 | dw2_asm_output_data (1, DW_LLE_start_end, |
| 10618 | "DW_LLE_start_end (%s)" , |
| 10619 | list_head->ll_symbol); |
| 10620 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin, |
| 10621 | "Location list begin address (%s)" , |
| 10622 | list_head->ll_symbol); |
| 10623 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end, |
| 10624 | "Location list end address (%s)" , |
| 10625 | list_head->ll_symbol); |
| 10626 | } |
| 10627 | } |
| 10628 | else if (dwarf_split_debug_info) |
| 10629 | { |
| 10630 | /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr |
| 10631 | and 4 byte length. */ |
| 10632 | dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry, |
| 10633 | "Location list start/length entry (%s)" , |
| 10634 | list_head->ll_symbol); |
| 10635 | dw2_asm_output_data_uleb128 (curr->begin_entry->index, |
| 10636 | "Location list range start index (%s)" , |
| 10637 | curr->begin); |
| 10638 | /* The length field is 4 bytes. If we ever need to support |
| 10639 | an 8-byte length, we can add a new DW_LLE code or fall back |
| 10640 | to DW_LLE_GNU_start_end_entry. */ |
| 10641 | dw2_asm_output_delta (4, curr->end, curr->begin, |
| 10642 | "Location list range length (%s)" , |
| 10643 | list_head->ll_symbol); |
| 10644 | } |
| 10645 | else if (!have_multiple_function_sections) |
| 10646 | { |
| 10647 | /* Pair of relative addresses against start of text section. */ |
| 10648 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section, |
| 10649 | "Location list begin address (%s)" , |
| 10650 | list_head->ll_symbol); |
| 10651 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section, |
| 10652 | "Location list end address (%s)" , |
| 10653 | list_head->ll_symbol); |
| 10654 | } |
| 10655 | else |
| 10656 | { |
| 10657 | /* Pair of absolute addresses. */ |
| 10658 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin, |
| 10659 | "Location list begin address (%s)" , |
| 10660 | list_head->ll_symbol); |
| 10661 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end, |
| 10662 | "Location list end address (%s)" , |
| 10663 | list_head->ll_symbol); |
| 10664 | } |
| 10665 | |
| 10666 | /* Output the block length for this list of location operations. */ |
| 10667 | if (dwarf_version >= 5) |
| 10668 | dw2_asm_output_data_uleb128 (size, "Location expression size" ); |
| 10669 | else |
| 10670 | { |
| 10671 | gcc_assert (size <= 0xffff); |
| 10672 | dw2_asm_output_data (2, size, "Location expression size" ); |
| 10673 | } |
| 10674 | |
| 10675 | output_loc_sequence (loc: curr->expr, for_eh_or_skip: -1); |
| 10676 | } |
| 10677 | |
| 10678 | /* And finally list termination. */ |
| 10679 | if (dwarf_version >= 5) |
| 10680 | dw2_asm_output_data (1, DW_LLE_end_of_list, |
| 10681 | "DW_LLE_end_of_list (%s)" , list_head->ll_symbol); |
| 10682 | else if (dwarf_split_debug_info) |
| 10683 | dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry, |
| 10684 | "Location list terminator (%s)" , |
| 10685 | list_head->ll_symbol); |
| 10686 | else |
| 10687 | { |
| 10688 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, |
| 10689 | "Location list terminator begin (%s)" , |
| 10690 | list_head->ll_symbol); |
| 10691 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, |
| 10692 | "Location list terminator end (%s)" , |
| 10693 | list_head->ll_symbol); |
| 10694 | } |
| 10695 | |
| 10696 | gcc_assert (!list_head->vl_symbol |
| 10697 | || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0)); |
| 10698 | } |
| 10699 | |
| 10700 | /* Output a range_list offset into the .debug_ranges or .debug_rnglists |
| 10701 | section. Emit a relocated reference if val_entry is NULL, otherwise, |
| 10702 | emit an indirect reference. */ |
| 10703 | |
| 10704 | static void |
| 10705 | output_range_list_offset (dw_attr_node *a) |
| 10706 | { |
| 10707 | const char *name = dwarf_attr_name (attr: a->dw_attr); |
| 10708 | |
| 10709 | if (a->dw_attr_val.val_entry == RELOCATED_OFFSET) |
| 10710 | { |
| 10711 | if (dwarf_version >= 5) |
| 10712 | { |
| 10713 | dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset]; |
| 10714 | dw2_asm_output_offset (dwarf_offset_size, r->label, |
| 10715 | debug_ranges_section, "%s" , name); |
| 10716 | } |
| 10717 | else |
| 10718 | { |
| 10719 | char *p = strchr (s: ranges_section_label, c: '\0'); |
| 10720 | sprintf (s: p, format: "+" HOST_WIDE_INT_PRINT_HEX, |
| 10721 | a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE); |
| 10722 | dw2_asm_output_offset (dwarf_offset_size, ranges_section_label, |
| 10723 | debug_ranges_section, "%s" , name); |
| 10724 | *p = '\0'; |
| 10725 | } |
| 10726 | } |
| 10727 | else if (dwarf_version >= 5) |
| 10728 | { |
| 10729 | dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset]; |
| 10730 | gcc_assert (rnglist_idx); |
| 10731 | dw2_asm_output_data_uleb128 (r->idx, "%s" , name); |
| 10732 | } |
| 10733 | else |
| 10734 | dw2_asm_output_data (dwarf_offset_size, |
| 10735 | a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE, |
| 10736 | "%s (offset from %s)" , name, ranges_section_label); |
| 10737 | } |
| 10738 | |
| 10739 | /* Output the offset into the debug_loc section. */ |
| 10740 | |
| 10741 | static void |
| 10742 | output_loc_list_offset (dw_attr_node *a) |
| 10743 | { |
| 10744 | char *sym = AT_loc_list (a)->ll_symbol; |
| 10745 | |
| 10746 | gcc_assert (sym); |
| 10747 | if (!dwarf_split_debug_info) |
| 10748 | dw2_asm_output_offset (dwarf_offset_size, sym, debug_loc_section, |
| 10749 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
| 10750 | else if (dwarf_version >= 5) |
| 10751 | { |
| 10752 | gcc_assert (AT_loc_list (a)->num_assigned); |
| 10753 | dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)" , |
| 10754 | dwarf_attr_name (attr: a->dw_attr), |
| 10755 | sym); |
| 10756 | } |
| 10757 | else |
| 10758 | dw2_asm_output_delta (dwarf_offset_size, sym, loc_section_label, |
| 10759 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
| 10760 | } |
| 10761 | |
| 10762 | /* Output the offset into the debug_loc section. */ |
| 10763 | |
| 10764 | static void |
| 10765 | output_view_list_offset (dw_attr_node *a) |
| 10766 | { |
| 10767 | char *sym = (*AT_loc_list_ptr (a))->vl_symbol; |
| 10768 | |
| 10769 | gcc_assert (sym); |
| 10770 | if (dwarf_split_debug_info) |
| 10771 | dw2_asm_output_delta (dwarf_offset_size, sym, loc_section_label, |
| 10772 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
| 10773 | else |
| 10774 | dw2_asm_output_offset (dwarf_offset_size, sym, debug_loc_section, |
| 10775 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
| 10776 | } |
| 10777 | |
| 10778 | /* Output an attribute's index or value appropriately. */ |
| 10779 | |
| 10780 | static void |
| 10781 | output_attr_index_or_value (dw_attr_node *a) |
| 10782 | { |
| 10783 | const char *name = dwarf_attr_name (attr: a->dw_attr); |
| 10784 | |
| 10785 | if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED) |
| 10786 | { |
| 10787 | dw2_asm_output_data_uleb128 (AT_index (a), "%s" , name); |
| 10788 | return; |
| 10789 | } |
| 10790 | switch (AT_class (a)) |
| 10791 | { |
| 10792 | case dw_val_class_addr: |
| 10793 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s" , name); |
| 10794 | break; |
| 10795 | case dw_val_class_high_pc: |
| 10796 | case dw_val_class_lbl_id: |
| 10797 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s" , name); |
| 10798 | break; |
| 10799 | default: |
| 10800 | gcc_unreachable (); |
| 10801 | } |
| 10802 | } |
| 10803 | |
| 10804 | /* Output a type signature. */ |
| 10805 | |
| 10806 | static inline void |
| 10807 | output_signature (const char *sig, const char *name) |
| 10808 | { |
| 10809 | int i; |
| 10810 | |
| 10811 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
| 10812 | dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name); |
| 10813 | } |
| 10814 | |
| 10815 | /* Output a discriminant value. */ |
| 10816 | |
| 10817 | static inline void |
| 10818 | output_discr_value (dw_discr_value *discr_value, const char *name) |
| 10819 | { |
| 10820 | if (discr_value->pos) |
| 10821 | dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s" , name); |
| 10822 | else |
| 10823 | dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s" , name); |
| 10824 | } |
| 10825 | |
| 10826 | /* Output the DIE and its attributes. Called recursively to generate |
| 10827 | the definitions of each child DIE. */ |
| 10828 | |
| 10829 | static void |
| 10830 | output_die (dw_die_ref die) |
| 10831 | { |
| 10832 | dw_attr_node *a; |
| 10833 | dw_die_ref c; |
| 10834 | unsigned long size; |
| 10835 | unsigned ix; |
| 10836 | |
| 10837 | dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)" , |
| 10838 | (unsigned long)die->die_offset, |
| 10839 | dwarf_tag_name (tag: die->die_tag)); |
| 10840 | |
| 10841 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 10842 | { |
| 10843 | const char *name = dwarf_attr_name (attr: a->dw_attr); |
| 10844 | |
| 10845 | switch (AT_class (a)) |
| 10846 | { |
| 10847 | case dw_val_class_addr: |
| 10848 | output_attr_index_or_value (a); |
| 10849 | break; |
| 10850 | |
| 10851 | case dw_val_class_offset: |
| 10852 | dw2_asm_output_data (dwarf_offset_size, a->dw_attr_val.v.val_offset, |
| 10853 | "%s" , name); |
| 10854 | break; |
| 10855 | |
| 10856 | case dw_val_class_range_list: |
| 10857 | output_range_list_offset (a); |
| 10858 | break; |
| 10859 | |
| 10860 | case dw_val_class_loc: |
| 10861 | size = size_of_locs (loc: AT_loc (a)); |
| 10862 | |
| 10863 | /* Output the block length for this list of location operations. */ |
| 10864 | if (dwarf_version >= 4) |
| 10865 | dw2_asm_output_data_uleb128 (size, "%s" , name); |
| 10866 | else |
| 10867 | dw2_asm_output_data (constant_size (value: size), size, "%s" , name); |
| 10868 | |
| 10869 | output_loc_sequence (loc: AT_loc (a), for_eh_or_skip: -1); |
| 10870 | break; |
| 10871 | |
| 10872 | case dw_val_class_const: |
| 10873 | /* ??? It would be slightly more efficient to use a scheme like is |
| 10874 | used for unsigned constants below, but gdb 4.x does not sign |
| 10875 | extend. Gdb 5.x does sign extend. */ |
| 10876 | dw2_asm_output_data_sleb128 (AT_int (a), "%s" , name); |
| 10877 | break; |
| 10878 | |
| 10879 | case dw_val_class_unsigned_const: |
| 10880 | { |
| 10881 | int csize = constant_size (value: AT_unsigned (a)); |
| 10882 | if (dwarf_version == 3 |
| 10883 | && a->dw_attr == DW_AT_data_member_location |
| 10884 | && csize >= 4) |
| 10885 | dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s" , name); |
| 10886 | else |
| 10887 | dw2_asm_output_data (csize, AT_unsigned (a), "%s" , name); |
| 10888 | } |
| 10889 | break; |
| 10890 | |
| 10891 | case dw_val_class_symview: |
| 10892 | { |
| 10893 | int vsize; |
| 10894 | if (symview_upper_bound <= 0xff) |
| 10895 | vsize = 1; |
| 10896 | else if (symview_upper_bound <= 0xffff) |
| 10897 | vsize = 2; |
| 10898 | else if (symview_upper_bound <= 0xffffffff) |
| 10899 | vsize = 4; |
| 10900 | else |
| 10901 | vsize = 8; |
| 10902 | dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view, |
| 10903 | "%s" , name); |
| 10904 | } |
| 10905 | break; |
| 10906 | |
| 10907 | case dw_val_class_const_implicit: |
| 10908 | if (flag_debug_asm) |
| 10909 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s (" |
| 10910 | HOST_WIDE_INT_PRINT_DEC ")\n" , |
| 10911 | ASM_COMMENT_START, name, AT_int (a)); |
| 10912 | break; |
| 10913 | |
| 10914 | case dw_val_class_unsigned_const_implicit: |
| 10915 | if (flag_debug_asm) |
| 10916 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s (" |
| 10917 | HOST_WIDE_INT_PRINT_HEX ")\n" , |
| 10918 | ASM_COMMENT_START, name, AT_unsigned (a)); |
| 10919 | break; |
| 10920 | |
| 10921 | case dw_val_class_const_double: |
| 10922 | { |
| 10923 | unsigned HOST_WIDE_INT first, second; |
| 10924 | |
| 10925 | if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS) |
| 10926 | dw2_asm_output_data (1, |
| 10927 | HOST_BITS_PER_DOUBLE_INT |
| 10928 | / HOST_BITS_PER_CHAR, |
| 10929 | NULL); |
| 10930 | |
| 10931 | if (WORDS_BIG_ENDIAN) |
| 10932 | { |
| 10933 | first = a->dw_attr_val.v.val_double.high; |
| 10934 | second = a->dw_attr_val.v.val_double.low; |
| 10935 | } |
| 10936 | else |
| 10937 | { |
| 10938 | first = a->dw_attr_val.v.val_double.low; |
| 10939 | second = a->dw_attr_val.v.val_double.high; |
| 10940 | } |
| 10941 | |
| 10942 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
| 10943 | first, "%s" , name); |
| 10944 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
| 10945 | second, NULL); |
| 10946 | } |
| 10947 | break; |
| 10948 | |
| 10949 | case dw_val_class_wide_int: |
| 10950 | { |
| 10951 | int i; |
| 10952 | int len = get_full_len (op: *a->dw_attr_val.v.val_wide); |
| 10953 | int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
| 10954 | if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS) |
| 10955 | dw2_asm_output_data (1, get_full_len (op: *a->dw_attr_val.v.val_wide) |
| 10956 | * l, NULL); |
| 10957 | |
| 10958 | if (WORDS_BIG_ENDIAN) |
| 10959 | for (i = len - 1; i >= 0; --i) |
| 10960 | { |
| 10961 | dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i), |
| 10962 | "%s" , name); |
| 10963 | name = "" ; |
| 10964 | } |
| 10965 | else |
| 10966 | for (i = 0; i < len; ++i) |
| 10967 | { |
| 10968 | dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i), |
| 10969 | "%s" , name); |
| 10970 | name = "" ; |
| 10971 | } |
| 10972 | } |
| 10973 | break; |
| 10974 | |
| 10975 | case dw_val_class_vec: |
| 10976 | { |
| 10977 | unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size; |
| 10978 | unsigned int len = a->dw_attr_val.v.val_vec.length; |
| 10979 | unsigned int i; |
| 10980 | unsigned char *p; |
| 10981 | |
| 10982 | dw2_asm_output_data (constant_size (value: len * elt_size), |
| 10983 | len * elt_size, "%s" , name); |
| 10984 | if (elt_size > sizeof (HOST_WIDE_INT)) |
| 10985 | { |
| 10986 | elt_size /= 2; |
| 10987 | len *= 2; |
| 10988 | } |
| 10989 | for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array; |
| 10990 | i < len; |
| 10991 | i++, p += elt_size) |
| 10992 | dw2_asm_output_data (elt_size, extract_int (p, elt_size), |
| 10993 | "fp or vector constant word %u" , i); |
| 10994 | break; |
| 10995 | } |
| 10996 | |
| 10997 | case dw_val_class_flag: |
| 10998 | if (dwarf_version >= 4) |
| 10999 | { |
| 11000 | /* Currently all add_AT_flag calls pass in 1 as last argument, |
| 11001 | so DW_FORM_flag_present can be used. If that ever changes, |
| 11002 | we'll need to use DW_FORM_flag and have some optimization |
| 11003 | in build_abbrev_table that will change those to |
| 11004 | DW_FORM_flag_present if it is set to 1 in all DIEs using |
| 11005 | the same abbrev entry. */ |
| 11006 | gcc_assert (AT_flag (a) == 1); |
| 11007 | if (flag_debug_asm) |
| 11008 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s\n" , |
| 11009 | ASM_COMMENT_START, name); |
| 11010 | break; |
| 11011 | } |
| 11012 | dw2_asm_output_data (1, AT_flag (a), "%s" , name); |
| 11013 | break; |
| 11014 | |
| 11015 | case dw_val_class_loc_list: |
| 11016 | output_loc_list_offset (a); |
| 11017 | break; |
| 11018 | |
| 11019 | case dw_val_class_view_list: |
| 11020 | output_view_list_offset (a); |
| 11021 | break; |
| 11022 | |
| 11023 | case dw_val_class_die_ref: |
| 11024 | if (AT_ref_external (a)) |
| 11025 | { |
| 11026 | if (AT_ref (a)->comdat_type_p) |
| 11027 | { |
| 11028 | comdat_type_node *type_node |
| 11029 | = AT_ref (a)->die_id.die_type_node; |
| 11030 | |
| 11031 | gcc_assert (type_node); |
| 11032 | output_signature (sig: type_node->signature, name); |
| 11033 | } |
| 11034 | else |
| 11035 | { |
| 11036 | const char *sym = AT_ref (a)->die_id.die_symbol; |
| 11037 | int size; |
| 11038 | |
| 11039 | gcc_assert (sym); |
| 11040 | /* In DWARF2, DW_FORM_ref_addr is sized by target address |
| 11041 | length, whereas in DWARF3 it's always sized as an |
| 11042 | offset. */ |
| 11043 | if (dwarf_version == 2) |
| 11044 | size = DWARF2_ADDR_SIZE; |
| 11045 | else |
| 11046 | size = dwarf_offset_size; |
| 11047 | /* ??? We cannot unconditionally output die_offset if |
| 11048 | non-zero - others might create references to those |
| 11049 | DIEs via symbols. |
| 11050 | And we do not clear its DIE offset after outputting it |
| 11051 | (and the label refers to the actual DIEs, not the |
| 11052 | DWARF CU unit header which is when using label + offset |
| 11053 | would be the correct thing to do). |
| 11054 | ??? This is the reason for the with_offset flag. */ |
| 11055 | if (AT_ref (a)->with_offset) |
| 11056 | dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset, |
| 11057 | debug_info_section, "%s" , name); |
| 11058 | else |
| 11059 | dw2_asm_output_offset (size, sym, debug_info_section, "%s" , |
| 11060 | name); |
| 11061 | } |
| 11062 | } |
| 11063 | else |
| 11064 | { |
| 11065 | gcc_assert (AT_ref (a)->die_offset); |
| 11066 | dw2_asm_output_data (dwarf_offset_size, AT_ref (a)->die_offset, |
| 11067 | "%s" , name); |
| 11068 | } |
| 11069 | break; |
| 11070 | |
| 11071 | case dw_val_class_fde_ref: |
| 11072 | { |
| 11073 | char l1[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 11074 | |
| 11075 | ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL, |
| 11076 | a->dw_attr_val.v.val_fde_index * 2); |
| 11077 | dw2_asm_output_offset (dwarf_offset_size, l1, debug_frame_section, |
| 11078 | "%s" , name); |
| 11079 | } |
| 11080 | break; |
| 11081 | |
| 11082 | case dw_val_class_vms_delta: |
| 11083 | #ifdef ASM_OUTPUT_DWARF_VMS_DELTA |
| 11084 | dw2_asm_output_vms_delta (dwarf_offset_size, |
| 11085 | AT_vms_delta2 (a), AT_vms_delta1 (a), |
| 11086 | "%s" , name); |
| 11087 | #else |
| 11088 | dw2_asm_output_delta (dwarf_offset_size, |
| 11089 | AT_vms_delta2 (a), AT_vms_delta1 (a), |
| 11090 | "%s" , name); |
| 11091 | #endif |
| 11092 | break; |
| 11093 | |
| 11094 | case dw_val_class_lbl_id: |
| 11095 | output_attr_index_or_value (a); |
| 11096 | break; |
| 11097 | |
| 11098 | case dw_val_class_lineptr: |
| 11099 | dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a), |
| 11100 | debug_line_section, "%s" , name); |
| 11101 | break; |
| 11102 | |
| 11103 | case dw_val_class_macptr: |
| 11104 | dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a), |
| 11105 | debug_macinfo_section, "%s" , name); |
| 11106 | break; |
| 11107 | |
| 11108 | case dw_val_class_loclistsptr: |
| 11109 | dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a), |
| 11110 | debug_loc_section, "%s" , name); |
| 11111 | break; |
| 11112 | |
| 11113 | case dw_val_class_str: |
| 11114 | if (a->dw_attr_val.v.val_str->form == DW_FORM_strp) |
| 11115 | dw2_asm_output_offset (dwarf_offset_size, |
| 11116 | a->dw_attr_val.v.val_str->label, |
| 11117 | debug_str_section, |
| 11118 | "%s: \"%s\"" , name, AT_string (a)); |
| 11119 | else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp) |
| 11120 | dw2_asm_output_offset (dwarf_offset_size, |
| 11121 | a->dw_attr_val.v.val_str->label, |
| 11122 | debug_line_str_section, |
| 11123 | "%s: \"%s\"" , name, AT_string (a)); |
| 11124 | else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (form: DW_FORM_strx)) |
| 11125 | dw2_asm_output_data_uleb128 (AT_index (a), |
| 11126 | "%s: \"%s\"" , name, AT_string (a)); |
| 11127 | else |
| 11128 | dw2_asm_output_nstring (AT_string (a), -1, "%s" , name); |
| 11129 | break; |
| 11130 | |
| 11131 | case dw_val_class_file: |
| 11132 | { |
| 11133 | int f = maybe_emit_file (fd: a->dw_attr_val.v.val_file); |
| 11134 | |
| 11135 | dw2_asm_output_data (constant_size (value: f), f, "%s (%s)" , name, |
| 11136 | a->dw_attr_val.v.val_file->filename); |
| 11137 | break; |
| 11138 | } |
| 11139 | |
| 11140 | case dw_val_class_file_implicit: |
| 11141 | if (flag_debug_asm) |
| 11142 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s (%d, %s)\n" , |
| 11143 | ASM_COMMENT_START, name, |
| 11144 | maybe_emit_file (fd: a->dw_attr_val.v.val_file), |
| 11145 | a->dw_attr_val.v.val_file->filename); |
| 11146 | break; |
| 11147 | |
| 11148 | case dw_val_class_data8: |
| 11149 | { |
| 11150 | int i; |
| 11151 | |
| 11152 | for (i = 0; i < 8; i++) |
| 11153 | dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i], |
| 11154 | i == 0 ? "%s" : NULL, name); |
| 11155 | break; |
| 11156 | } |
| 11157 | |
| 11158 | case dw_val_class_high_pc: |
| 11159 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a), |
| 11160 | get_AT_low_pc (die), "DW_AT_high_pc" ); |
| 11161 | break; |
| 11162 | |
| 11163 | case dw_val_class_discr_value: |
| 11164 | output_discr_value (discr_value: &a->dw_attr_val.v.val_discr_value, name); |
| 11165 | break; |
| 11166 | |
| 11167 | case dw_val_class_discr_list: |
| 11168 | { |
| 11169 | dw_discr_list_ref list = AT_discr_list (a); |
| 11170 | const int size = size_of_discr_list (discr_list: list); |
| 11171 | |
| 11172 | /* This is a block, so output its length first. */ |
| 11173 | dw2_asm_output_data (constant_size (value: size), size, |
| 11174 | "%s: block size" , name); |
| 11175 | |
| 11176 | for (; list != NULL; list = list->dw_discr_next) |
| 11177 | { |
| 11178 | /* One byte for the discriminant value descriptor, and then as |
| 11179 | many LEB128 numbers as required. */ |
| 11180 | if (list->dw_discr_range) |
| 11181 | dw2_asm_output_data (1, DW_DSC_range, |
| 11182 | "%s: DW_DSC_range" , name); |
| 11183 | else |
| 11184 | dw2_asm_output_data (1, DW_DSC_label, |
| 11185 | "%s: DW_DSC_label" , name); |
| 11186 | |
| 11187 | output_discr_value (discr_value: &list->dw_discr_lower_bound, name); |
| 11188 | if (list->dw_discr_range) |
| 11189 | output_discr_value (discr_value: &list->dw_discr_upper_bound, name); |
| 11190 | } |
| 11191 | break; |
| 11192 | } |
| 11193 | |
| 11194 | default: |
| 11195 | gcc_unreachable (); |
| 11196 | } |
| 11197 | } |
| 11198 | |
| 11199 | FOR_EACH_CHILD (die, c, output_die (c)); |
| 11200 | |
| 11201 | /* Add null byte to terminate sibling list. */ |
| 11202 | if (die->die_child != NULL) |
| 11203 | dw2_asm_output_data (1, 0, "end of children of DIE %#lx" , |
| 11204 | (unsigned long) die->die_offset); |
| 11205 | } |
| 11206 | |
| 11207 | /* Output the dwarf version number. */ |
| 11208 | |
| 11209 | static void |
| 11210 | output_dwarf_version () |
| 11211 | { |
| 11212 | /* ??? For now, if -gdwarf-6 is specified, we output version 5 with |
| 11213 | views in loclist. That will change eventually. */ |
| 11214 | if (dwarf_version == 6) |
| 11215 | { |
| 11216 | static bool once; |
| 11217 | if (!once) |
| 11218 | { |
| 11219 | warning (0, "%<-gdwarf-6%> is output as version 5 with " |
| 11220 | "incompatibilities" ); |
| 11221 | once = true; |
| 11222 | } |
| 11223 | dw2_asm_output_data (2, 5, "DWARF version number" ); |
| 11224 | } |
| 11225 | else |
| 11226 | dw2_asm_output_data (2, dwarf_version, "DWARF version number" ); |
| 11227 | } |
| 11228 | |
| 11229 | /* Output the compilation unit that appears at the beginning of the |
| 11230 | .debug_info section, and precedes the DIE descriptions. */ |
| 11231 | |
| 11232 | static void |
| 11233 | (enum dwarf_unit_type ut) |
| 11234 | { |
| 11235 | if (!XCOFF_DEBUGGING_INFO) |
| 11236 | { |
| 11237 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
| 11238 | dw2_asm_output_data (4, 0xffffffff, |
| 11239 | "Initial length escape value indicating 64-bit DWARF extension" ); |
| 11240 | dw2_asm_output_data (dwarf_offset_size, |
| 11241 | next_die_offset - DWARF_INITIAL_LENGTH_SIZE, |
| 11242 | "Length of Compilation Unit Info" ); |
| 11243 | } |
| 11244 | |
| 11245 | output_dwarf_version (); |
| 11246 | if (dwarf_version >= 5) |
| 11247 | { |
| 11248 | const char *name; |
| 11249 | switch (ut) |
| 11250 | { |
| 11251 | case DW_UT_compile: name = "DW_UT_compile" ; break; |
| 11252 | case DW_UT_type: name = "DW_UT_type" ; break; |
| 11253 | case DW_UT_split_compile: name = "DW_UT_split_compile" ; break; |
| 11254 | case DW_UT_split_type: name = "DW_UT_split_type" ; break; |
| 11255 | default: gcc_unreachable (); |
| 11256 | } |
| 11257 | dw2_asm_output_data (1, ut, "%s" , name); |
| 11258 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
| 11259 | } |
| 11260 | dw2_asm_output_offset (dwarf_offset_size, abbrev_section_label, |
| 11261 | debug_abbrev_section, |
| 11262 | "Offset Into Abbrev. Section" ); |
| 11263 | if (dwarf_version < 5) |
| 11264 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
| 11265 | } |
| 11266 | |
| 11267 | /* Output the compilation unit DIE and its children. */ |
| 11268 | |
| 11269 | static void |
| 11270 | output_comp_unit (dw_die_ref die, int output_if_empty, |
| 11271 | const unsigned char *dwo_id) |
| 11272 | { |
| 11273 | const char *secname, *oldsym; |
| 11274 | char *tmp; |
| 11275 | |
| 11276 | /* Unless we are outputting main CU, we may throw away empty ones. */ |
| 11277 | if (!output_if_empty && die->die_child == NULL) |
| 11278 | return; |
| 11279 | |
| 11280 | /* Even if there are no children of this DIE, we must output the information |
| 11281 | about the compilation unit. Otherwise, on an empty translation unit, we |
| 11282 | will generate a present, but empty, .debug_info section. IRIX 6.5 `nm' |
| 11283 | will then complain when examining the file. First mark all the DIEs in |
| 11284 | this CU so we know which get local refs. */ |
| 11285 | mark_dies (die); |
| 11286 | |
| 11287 | external_ref_hash_type *extern_map = optimize_external_refs (die); |
| 11288 | |
| 11289 | /* For now, optimize only the main CU, in order to optimize the rest |
| 11290 | we'd need to see all of them earlier. Leave the rest for post-linking |
| 11291 | tools like DWZ. */ |
| 11292 | if (die == comp_unit_die ()) |
| 11293 | abbrev_opt_start = vec_safe_length (v: abbrev_die_table); |
| 11294 | |
| 11295 | build_abbrev_table (die, extern_map); |
| 11296 | |
| 11297 | optimize_abbrev_table (); |
| 11298 | |
| 11299 | delete extern_map; |
| 11300 | |
| 11301 | /* Initialize the beginning DIE offset - and calculate sizes/offsets. */ |
| 11302 | next_die_offset = (dwo_id |
| 11303 | ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE |
| 11304 | : DWARF_COMPILE_UNIT_HEADER_SIZE); |
| 11305 | calc_die_sizes (die); |
| 11306 | |
| 11307 | oldsym = die->die_id.die_symbol; |
| 11308 | if (oldsym && die->comdat_type_p) |
| 11309 | { |
| 11310 | tmp = XALLOCAVEC (char, strlen (oldsym) + 24); |
| 11311 | |
| 11312 | sprintf (s: tmp, format: ".gnu.linkonce.wi.%s" , oldsym); |
| 11313 | secname = tmp; |
| 11314 | die->die_id.die_symbol = NULL; |
| 11315 | switch_to_section (get_section (secname, SECTION_DEBUG, NULL)); |
| 11316 | } |
| 11317 | else |
| 11318 | { |
| 11319 | switch_to_section (debug_info_section); |
| 11320 | ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label); |
| 11321 | info_section_emitted = true; |
| 11322 | } |
| 11323 | |
| 11324 | /* For LTO cross unit DIE refs we want a symbol on the start of the |
| 11325 | debuginfo section, not on the CU DIE. */ |
| 11326 | if ((flag_generate_lto || flag_generate_offload) && oldsym) |
| 11327 | { |
| 11328 | /* ??? No way to get visibility assembled without a decl. */ |
| 11329 | tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, |
| 11330 | get_identifier (oldsym), char_type_node); |
| 11331 | TREE_PUBLIC (decl) = true; |
| 11332 | TREE_STATIC (decl) = true; |
| 11333 | DECL_ARTIFICIAL (decl) = true; |
| 11334 | DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN; |
| 11335 | DECL_VISIBILITY_SPECIFIED (decl) = true; |
| 11336 | targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN); |
| 11337 | #ifdef ASM_WEAKEN_LABEL |
| 11338 | /* We prefer a .weak because that handles duplicates from duplicate |
| 11339 | archive members in a graceful way. */ |
| 11340 | ASM_WEAKEN_LABEL (asm_out_file, oldsym); |
| 11341 | #else |
| 11342 | targetm.asm_out.globalize_label (asm_out_file, oldsym); |
| 11343 | #endif |
| 11344 | ASM_OUTPUT_LABEL (asm_out_file, oldsym); |
| 11345 | } |
| 11346 | |
| 11347 | /* Output debugging information. */ |
| 11348 | output_compilation_unit_header (ut: dwo_id |
| 11349 | ? DW_UT_split_compile : DW_UT_compile); |
| 11350 | if (dwarf_version >= 5) |
| 11351 | { |
| 11352 | if (dwo_id != NULL) |
| 11353 | for (int i = 0; i < 8; i++) |
| 11354 | dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL); |
| 11355 | } |
| 11356 | output_die (die); |
| 11357 | |
| 11358 | /* Leave the marks on the main CU, so we can check them in |
| 11359 | output_pubnames. */ |
| 11360 | if (oldsym) |
| 11361 | { |
| 11362 | unmark_dies (die); |
| 11363 | die->die_id.die_symbol = oldsym; |
| 11364 | } |
| 11365 | } |
| 11366 | |
| 11367 | /* Whether to generate the DWARF accelerator tables in .debug_pubnames |
| 11368 | and .debug_pubtypes. This is configured per-target, but can be |
| 11369 | overridden by the -gpubnames or -gno-pubnames options. */ |
| 11370 | |
| 11371 | static inline bool |
| 11372 | want_pubnames (void) |
| 11373 | { |
| 11374 | if (debug_info_level <= DINFO_LEVEL_TERSE |
| 11375 | /* Names and types go to the early debug part only. */ |
| 11376 | || in_lto_p) |
| 11377 | return false; |
| 11378 | if (debug_generate_pub_sections != -1) |
| 11379 | return debug_generate_pub_sections; |
| 11380 | return targetm.want_debug_pub_sections; |
| 11381 | } |
| 11382 | |
| 11383 | /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */ |
| 11384 | |
| 11385 | static void |
| 11386 | add_AT_pubnames (dw_die_ref die) |
| 11387 | { |
| 11388 | if (want_pubnames ()) |
| 11389 | add_AT_flag (die, attr_kind: DW_AT_GNU_pubnames, flag: 1); |
| 11390 | } |
| 11391 | |
| 11392 | /* Add a string attribute value to a skeleton DIE. */ |
| 11393 | |
| 11394 | static inline void |
| 11395 | add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 11396 | const char *str) |
| 11397 | { |
| 11398 | dw_attr_node attr; |
| 11399 | struct indirect_string_node *node; |
| 11400 | |
| 11401 | if (! skeleton_debug_str_hash) |
| 11402 | skeleton_debug_str_hash |
| 11403 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
| 11404 | |
| 11405 | node = find_AT_string_in_table (str, table: skeleton_debug_str_hash); |
| 11406 | find_string_form (node); |
| 11407 | if (node->form == dwarf_FORM (form: DW_FORM_strx)) |
| 11408 | node->form = DW_FORM_strp; |
| 11409 | |
| 11410 | attr.dw_attr = attr_kind; |
| 11411 | attr.dw_attr_val.val_class = dw_val_class_str; |
| 11412 | attr.dw_attr_val.val_entry = NULL; |
| 11413 | attr.dw_attr_val.v.val_str = node; |
| 11414 | add_dwarf_attr (die, attr: &attr); |
| 11415 | } |
| 11416 | |
| 11417 | /* Helper function to generate top-level dies for skeleton debug_info and |
| 11418 | debug_types. */ |
| 11419 | |
| 11420 | static void |
| 11421 | add_top_level_skeleton_die_attrs (dw_die_ref die) |
| 11422 | { |
| 11423 | const char *dwo_file_name = concat (aux_base_name, ".dwo" , NULL); |
| 11424 | const char *comp_dir = comp_dir_string (); |
| 11425 | |
| 11426 | add_skeleton_AT_string (die, attr_kind: dwarf_AT (at: DW_AT_dwo_name), str: dwo_file_name); |
| 11427 | if (comp_dir != NULL) |
| 11428 | add_skeleton_AT_string (die, attr_kind: DW_AT_comp_dir, str: comp_dir); |
| 11429 | add_AT_pubnames (die); |
| 11430 | if (addr_index_table != NULL && addr_index_table->size () > 0) |
| 11431 | add_AT_lineptr (die, attr_kind: dwarf_AT (at: DW_AT_addr_base), label: debug_addr_section_label); |
| 11432 | } |
| 11433 | |
| 11434 | /* Output skeleton debug sections that point to the dwo file. */ |
| 11435 | |
| 11436 | static void |
| 11437 | output_skeleton_debug_sections (dw_die_ref comp_unit, |
| 11438 | const unsigned char *dwo_id) |
| 11439 | { |
| 11440 | /* These attributes will be found in the full debug_info section. */ |
| 11441 | remove_AT (die: comp_unit, attr_kind: DW_AT_producer); |
| 11442 | remove_AT (die: comp_unit, attr_kind: DW_AT_language); |
| 11443 | remove_AT (die: comp_unit, attr_kind: DW_AT_language_name); |
| 11444 | remove_AT (die: comp_unit, attr_kind: DW_AT_language_version); |
| 11445 | |
| 11446 | switch_to_section (debug_skeleton_info_section); |
| 11447 | ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label); |
| 11448 | |
| 11449 | /* Produce the skeleton compilation-unit header. This one differs enough from |
| 11450 | a normal CU header that it's better not to call output_compilation_unit |
| 11451 | header. */ |
| 11452 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
| 11453 | dw2_asm_output_data (4, 0xffffffff, |
| 11454 | "Initial length escape value indicating 64-bit " |
| 11455 | "DWARF extension" ); |
| 11456 | |
| 11457 | dw2_asm_output_data (dwarf_offset_size, |
| 11458 | DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE |
| 11459 | - DWARF_INITIAL_LENGTH_SIZE |
| 11460 | + size_of_die (die: comp_unit), |
| 11461 | "Length of Compilation Unit Info" ); |
| 11462 | output_dwarf_version (); |
| 11463 | if (dwarf_version >= 5) |
| 11464 | { |
| 11465 | dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton" ); |
| 11466 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
| 11467 | } |
| 11468 | dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_abbrev_section_label, |
| 11469 | debug_skeleton_abbrev_section, |
| 11470 | "Offset Into Abbrev. Section" ); |
| 11471 | if (dwarf_version < 5) |
| 11472 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
| 11473 | else |
| 11474 | for (int i = 0; i < 8; i++) |
| 11475 | dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL); |
| 11476 | |
| 11477 | comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV; |
| 11478 | output_die (die: comp_unit); |
| 11479 | |
| 11480 | /* Build the skeleton debug_abbrev section. */ |
| 11481 | switch_to_section (debug_skeleton_abbrev_section); |
| 11482 | ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label); |
| 11483 | |
| 11484 | output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, abbrev: comp_unit); |
| 11485 | |
| 11486 | dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev" ); |
| 11487 | } |
| 11488 | |
| 11489 | /* Output a comdat type unit DIE and its children. */ |
| 11490 | |
| 11491 | static void |
| 11492 | output_comdat_type_unit (comdat_type_node *node, |
| 11493 | bool early_lto_debug ATTRIBUTE_UNUSED) |
| 11494 | { |
| 11495 | const char *secname; |
| 11496 | char *tmp; |
| 11497 | int i; |
| 11498 | #if defined (OBJECT_FORMAT_ELF) |
| 11499 | tree comdat_key; |
| 11500 | #endif |
| 11501 | |
| 11502 | /* First mark all the DIEs in this CU so we know which get local refs. */ |
| 11503 | mark_dies (die: node->root_die); |
| 11504 | |
| 11505 | external_ref_hash_type *extern_map = optimize_external_refs (die: node->root_die); |
| 11506 | |
| 11507 | build_abbrev_table (die: node->root_die, extern_map); |
| 11508 | |
| 11509 | delete extern_map; |
| 11510 | extern_map = NULL; |
| 11511 | |
| 11512 | /* Initialize the beginning DIE offset - and calculate sizes/offsets. */ |
| 11513 | next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE; |
| 11514 | calc_die_sizes (die: node->root_die); |
| 11515 | |
| 11516 | #if defined (OBJECT_FORMAT_ELF) |
| 11517 | if (dwarf_version >= 5) |
| 11518 | { |
| 11519 | if (!dwarf_split_debug_info) |
| 11520 | secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION; |
| 11521 | else |
| 11522 | secname = (early_lto_debug |
| 11523 | ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION); |
| 11524 | } |
| 11525 | else if (!dwarf_split_debug_info) |
| 11526 | secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types" ; |
| 11527 | else |
| 11528 | secname = (early_lto_debug |
| 11529 | ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo" ); |
| 11530 | |
| 11531 | tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2); |
| 11532 | sprintf (s: tmp, dwarf_version >= 5 ? "wi." : "wt." ); |
| 11533 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
| 11534 | sprintf (s: tmp + 3 + i * 2, format: "%02x" , node->signature[i] & 0xff); |
| 11535 | comdat_key = get_identifier (tmp); |
| 11536 | targetm.asm_out.named_section (secname, |
| 11537 | SECTION_DEBUG | SECTION_LINKONCE, |
| 11538 | comdat_key); |
| 11539 | #else |
| 11540 | tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2); |
| 11541 | sprintf (tmp, (dwarf_version >= 5 |
| 11542 | ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt." )); |
| 11543 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
| 11544 | sprintf (tmp + 17 + i * 2, "%02x" , node->signature[i] & 0xff); |
| 11545 | secname = tmp; |
| 11546 | switch_to_section (get_section (secname, SECTION_DEBUG, NULL)); |
| 11547 | #endif |
| 11548 | |
| 11549 | /* Output debugging information. */ |
| 11550 | output_compilation_unit_header (dwarf_split_debug_info |
| 11551 | ? DW_UT_split_type : DW_UT_type); |
| 11552 | output_signature (sig: node->signature, name: "Type Signature" ); |
| 11553 | dw2_asm_output_data (dwarf_offset_size, node->type_die->die_offset, |
| 11554 | "Offset to Type DIE" ); |
| 11555 | output_die (die: node->root_die); |
| 11556 | |
| 11557 | unmark_dies (die: node->root_die); |
| 11558 | } |
| 11559 | |
| 11560 | /* Return the DWARF2/3 pubname associated with a decl. */ |
| 11561 | |
| 11562 | static const char * |
| 11563 | dwarf2_name (tree decl, int scope) |
| 11564 | { |
| 11565 | if (DECL_NAMELESS (decl)) |
| 11566 | return NULL; |
| 11567 | return lang_hooks.dwarf_name (decl, scope ? 1 : 0); |
| 11568 | } |
| 11569 | |
| 11570 | /* Add a new entry to .debug_pubnames if appropriate. */ |
| 11571 | |
| 11572 | static void |
| 11573 | add_pubname_string (const char *str, dw_die_ref die) |
| 11574 | { |
| 11575 | pubname_entry e; |
| 11576 | |
| 11577 | e.die = die; |
| 11578 | e.name = xstrdup (str); |
| 11579 | vec_safe_push (v&: pubname_table, obj: e); |
| 11580 | } |
| 11581 | |
| 11582 | static void |
| 11583 | add_pubname (tree decl, dw_die_ref die) |
| 11584 | { |
| 11585 | if (!want_pubnames ()) |
| 11586 | return; |
| 11587 | |
| 11588 | /* Don't add items to the table when we expect that the consumer will have |
| 11589 | just read the enclosing die. For example, if the consumer is looking at a |
| 11590 | class_member, it will either be inside the class already, or will have just |
| 11591 | looked up the class to find the member. Either way, searching the class is |
| 11592 | faster than searching the index. */ |
| 11593 | if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent)) |
| 11594 | || is_cu_die (c: die->die_parent) || is_namespace_die (c: die->die_parent)) |
| 11595 | { |
| 11596 | const char *name = dwarf2_name (decl, scope: 1); |
| 11597 | |
| 11598 | if (name) |
| 11599 | add_pubname_string (str: name, die); |
| 11600 | } |
| 11601 | } |
| 11602 | |
| 11603 | /* Add an enumerator to the pubnames section. */ |
| 11604 | |
| 11605 | static void |
| 11606 | add_enumerator_pubname (const char *scope_name, dw_die_ref die) |
| 11607 | { |
| 11608 | pubname_entry e; |
| 11609 | |
| 11610 | gcc_assert (scope_name); |
| 11611 | e.name = concat (scope_name, get_AT_string (die, attr_kind: DW_AT_name), NULL); |
| 11612 | e.die = die; |
| 11613 | vec_safe_push (v&: pubname_table, obj: e); |
| 11614 | } |
| 11615 | |
| 11616 | /* Add a new entry to .debug_pubtypes if appropriate. */ |
| 11617 | |
| 11618 | static void |
| 11619 | add_pubtype (tree decl, dw_die_ref die) |
| 11620 | { |
| 11621 | pubname_entry e; |
| 11622 | |
| 11623 | if (!want_pubnames ()) |
| 11624 | return; |
| 11625 | |
| 11626 | if ((TREE_PUBLIC (decl) |
| 11627 | || is_cu_die (c: die->die_parent) || is_namespace_die (c: die->die_parent)) |
| 11628 | && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl))) |
| 11629 | { |
| 11630 | tree scope = NULL; |
| 11631 | const char *scope_name = "" ; |
| 11632 | const char *sep = is_cxx () ? "::" : "." ; |
| 11633 | const char *name; |
| 11634 | |
| 11635 | scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL; |
| 11636 | if (scope && TREE_CODE (scope) == NAMESPACE_DECL) |
| 11637 | { |
| 11638 | scope_name = lang_hooks.dwarf_name (scope, 1); |
| 11639 | if (scope_name != NULL && scope_name[0] != '\0') |
| 11640 | scope_name = concat (scope_name, sep, NULL); |
| 11641 | else |
| 11642 | scope_name = "" ; |
| 11643 | } |
| 11644 | |
| 11645 | if (TYPE_P (decl)) |
| 11646 | name = type_tag (decl); |
| 11647 | else |
| 11648 | name = lang_hooks.dwarf_name (decl, 1); |
| 11649 | |
| 11650 | /* If we don't have a name for the type, there's no point in adding |
| 11651 | it to the table. */ |
| 11652 | if (name != NULL && name[0] != '\0') |
| 11653 | { |
| 11654 | e.die = die; |
| 11655 | e.name = concat (scope_name, name, NULL); |
| 11656 | vec_safe_push (v&: pubtype_table, obj: e); |
| 11657 | } |
| 11658 | |
| 11659 | /* Although it might be more consistent to add the pubinfo for the |
| 11660 | enumerators as their dies are created, they should only be added if the |
| 11661 | enum type meets the criteria above. So rather than re-check the parent |
| 11662 | enum type whenever an enumerator die is created, just output them all |
| 11663 | here. This isn't protected by the name conditional because anonymous |
| 11664 | enums don't have names. */ |
| 11665 | if (die->die_tag == DW_TAG_enumeration_type) |
| 11666 | { |
| 11667 | dw_die_ref c; |
| 11668 | |
| 11669 | FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c)); |
| 11670 | } |
| 11671 | } |
| 11672 | } |
| 11673 | |
| 11674 | /* Output a single entry in the pubnames table. */ |
| 11675 | |
| 11676 | static void |
| 11677 | output_pubname (dw_offset die_offset, pubname_entry *entry) |
| 11678 | { |
| 11679 | dw_die_ref die = entry->die; |
| 11680 | int is_static = get_AT_flag (die, attr_kind: DW_AT_external) ? 0 : 1; |
| 11681 | |
| 11682 | dw2_asm_output_data (dwarf_offset_size, die_offset, "DIE offset" ); |
| 11683 | |
| 11684 | if (debug_generate_pub_sections == 2) |
| 11685 | { |
| 11686 | /* This logic follows gdb's method for determining the value of the flag |
| 11687 | byte. */ |
| 11688 | uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE; |
| 11689 | switch (die->die_tag) |
| 11690 | { |
| 11691 | case DW_TAG_typedef: |
| 11692 | case DW_TAG_base_type: |
| 11693 | case DW_TAG_subrange_type: |
| 11694 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE); |
| 11695 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1); |
| 11696 | break; |
| 11697 | case DW_TAG_enumerator: |
| 11698 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
| 11699 | GDB_INDEX_SYMBOL_KIND_VARIABLE); |
| 11700 | if (!is_cxx ()) |
| 11701 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1); |
| 11702 | break; |
| 11703 | case DW_TAG_subprogram: |
| 11704 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
| 11705 | GDB_INDEX_SYMBOL_KIND_FUNCTION); |
| 11706 | if (!is_ada ()) |
| 11707 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static); |
| 11708 | break; |
| 11709 | case DW_TAG_constant: |
| 11710 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
| 11711 | GDB_INDEX_SYMBOL_KIND_VARIABLE); |
| 11712 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static); |
| 11713 | break; |
| 11714 | case DW_TAG_variable: |
| 11715 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
| 11716 | GDB_INDEX_SYMBOL_KIND_VARIABLE); |
| 11717 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static); |
| 11718 | break; |
| 11719 | case DW_TAG_namespace: |
| 11720 | case DW_TAG_imported_declaration: |
| 11721 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE); |
| 11722 | break; |
| 11723 | case DW_TAG_class_type: |
| 11724 | case DW_TAG_interface_type: |
| 11725 | case DW_TAG_structure_type: |
| 11726 | case DW_TAG_union_type: |
| 11727 | case DW_TAG_enumeration_type: |
| 11728 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE); |
| 11729 | if (!is_cxx ()) |
| 11730 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1); |
| 11731 | break; |
| 11732 | default: |
| 11733 | /* An unusual tag. Leave the flag-byte empty. */ |
| 11734 | break; |
| 11735 | } |
| 11736 | dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE, |
| 11737 | "GDB-index flags" ); |
| 11738 | } |
| 11739 | |
| 11740 | dw2_asm_output_nstring (entry->name, -1, "external name" ); |
| 11741 | } |
| 11742 | |
| 11743 | |
| 11744 | /* Output the public names table used to speed up access to externally |
| 11745 | visible names; or the public types table used to find type definitions. */ |
| 11746 | |
| 11747 | static void |
| 11748 | output_pubnames (vec<pubname_entry, va_gc> *names) |
| 11749 | { |
| 11750 | unsigned i; |
| 11751 | unsigned long pubnames_length = size_of_pubnames (names); |
| 11752 | pubname_entry *pub; |
| 11753 | |
| 11754 | if (!XCOFF_DEBUGGING_INFO) |
| 11755 | { |
| 11756 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
| 11757 | dw2_asm_output_data (4, 0xffffffff, |
| 11758 | "Initial length escape value indicating 64-bit DWARF extension" ); |
| 11759 | dw2_asm_output_data (dwarf_offset_size, pubnames_length, |
| 11760 | "Pub Info Length" ); |
| 11761 | } |
| 11762 | |
| 11763 | /* Version number for pubnames/pubtypes is independent of dwarf version. */ |
| 11764 | dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version" ); |
| 11765 | |
| 11766 | if (dwarf_split_debug_info) |
| 11767 | dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_info_section_label, |
| 11768 | debug_skeleton_info_section, |
| 11769 | "Offset of Compilation Unit Info" ); |
| 11770 | else |
| 11771 | dw2_asm_output_offset (dwarf_offset_size, debug_info_section_label, |
| 11772 | debug_info_section, |
| 11773 | "Offset of Compilation Unit Info" ); |
| 11774 | dw2_asm_output_data (dwarf_offset_size, next_die_offset, |
| 11775 | "Compilation Unit Length" ); |
| 11776 | |
| 11777 | FOR_EACH_VEC_ELT (*names, i, pub) |
| 11778 | { |
| 11779 | if (include_pubname_in_output (table: names, p: pub)) |
| 11780 | { |
| 11781 | dw_offset die_offset = pub->die->die_offset; |
| 11782 | |
| 11783 | /* We shouldn't see pubnames for DIEs outside of the main CU. */ |
| 11784 | if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator) |
| 11785 | gcc_assert (pub->die->die_mark); |
| 11786 | |
| 11787 | /* If we're putting types in their own .debug_types sections, |
| 11788 | the .debug_pubtypes table will still point to the compile |
| 11789 | unit (not the type unit), so we want to use the offset of |
| 11790 | the skeleton DIE (if there is one). */ |
| 11791 | if (pub->die->comdat_type_p && names == pubtype_table) |
| 11792 | { |
| 11793 | comdat_type_node *type_node = pub->die->die_id.die_type_node; |
| 11794 | |
| 11795 | if (type_node != NULL) |
| 11796 | die_offset = (type_node->skeleton_die != NULL |
| 11797 | ? type_node->skeleton_die->die_offset |
| 11798 | : comp_unit_die ()->die_offset); |
| 11799 | } |
| 11800 | |
| 11801 | output_pubname (die_offset, entry: pub); |
| 11802 | } |
| 11803 | } |
| 11804 | |
| 11805 | dw2_asm_output_data (dwarf_offset_size, 0, NULL); |
| 11806 | } |
| 11807 | |
| 11808 | /* Output public names and types tables if necessary. */ |
| 11809 | |
| 11810 | static void |
| 11811 | output_pubtables (void) |
| 11812 | { |
| 11813 | if (!want_pubnames () || !info_section_emitted) |
| 11814 | return; |
| 11815 | |
| 11816 | switch_to_section (debug_pubnames_section); |
| 11817 | output_pubnames (names: pubname_table); |
| 11818 | /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2. |
| 11819 | It shouldn't hurt to emit it always, since pure DWARF2 consumers |
| 11820 | simply won't look for the section. */ |
| 11821 | switch_to_section (debug_pubtypes_section); |
| 11822 | output_pubnames (names: pubtype_table); |
| 11823 | } |
| 11824 | |
| 11825 | |
| 11826 | /* Output the information that goes into the .debug_aranges table. |
| 11827 | Namely, define the beginning and ending address range of the |
| 11828 | text section generated for this compilation unit. */ |
| 11829 | |
| 11830 | static void |
| 11831 | output_aranges (void) |
| 11832 | { |
| 11833 | unsigned i; |
| 11834 | unsigned long aranges_length = size_of_aranges (); |
| 11835 | |
| 11836 | if (!XCOFF_DEBUGGING_INFO) |
| 11837 | { |
| 11838 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
| 11839 | dw2_asm_output_data (4, 0xffffffff, |
| 11840 | "Initial length escape value indicating 64-bit DWARF extension" ); |
| 11841 | dw2_asm_output_data (dwarf_offset_size, aranges_length, |
| 11842 | "Length of Address Ranges Info" ); |
| 11843 | } |
| 11844 | |
| 11845 | /* Version number for aranges is still 2, even up to DWARF5. */ |
| 11846 | dw2_asm_output_data (2, 2, "DWARF aranges version" ); |
| 11847 | if (dwarf_split_debug_info) |
| 11848 | dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_info_section_label, |
| 11849 | debug_skeleton_info_section, |
| 11850 | "Offset of Compilation Unit Info" ); |
| 11851 | else |
| 11852 | dw2_asm_output_offset (dwarf_offset_size, debug_info_section_label, |
| 11853 | debug_info_section, |
| 11854 | "Offset of Compilation Unit Info" ); |
| 11855 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address" ); |
| 11856 | dw2_asm_output_data (1, 0, "Size of Segment Descriptor" ); |
| 11857 | |
| 11858 | /* We need to align to twice the pointer size here. */ |
| 11859 | if (DWARF_ARANGES_PAD_SIZE) |
| 11860 | { |
| 11861 | /* Pad using a 2 byte words so that padding is correct for any |
| 11862 | pointer size. */ |
| 11863 | dw2_asm_output_data (2, 0, "Pad to %d byte boundary" , |
| 11864 | 2 * DWARF2_ADDR_SIZE); |
| 11865 | for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2) |
| 11866 | dw2_asm_output_data (2, 0, NULL); |
| 11867 | } |
| 11868 | |
| 11869 | /* It is necessary not to output these entries if the sections were |
| 11870 | not used; if the sections were not used, the length will be 0 and |
| 11871 | the address may end up as 0 if the section is discarded by ld |
| 11872 | --gc-sections, leaving an invalid (0, 0) entry that can be |
| 11873 | confused with the terminator. */ |
| 11874 | if (switch_text_ranges) |
| 11875 | { |
| 11876 | const char *prev_loc = text_section_label; |
| 11877 | const char *loc; |
| 11878 | unsigned idx; |
| 11879 | |
| 11880 | FOR_EACH_VEC_ELT (*switch_text_ranges, idx, loc) |
| 11881 | if (prev_loc) |
| 11882 | { |
| 11883 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
| 11884 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, loc, prev_loc, "Length" ); |
| 11885 | prev_loc = NULL; |
| 11886 | } |
| 11887 | else |
| 11888 | prev_loc = loc; |
| 11889 | |
| 11890 | if (prev_loc) |
| 11891 | { |
| 11892 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
| 11893 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label, |
| 11894 | prev_loc, "Length" ); |
| 11895 | } |
| 11896 | } |
| 11897 | |
| 11898 | if (switch_cold_ranges) |
| 11899 | { |
| 11900 | const char *prev_loc = cold_text_section_label; |
| 11901 | const char *loc; |
| 11902 | unsigned idx; |
| 11903 | |
| 11904 | FOR_EACH_VEC_ELT (*switch_cold_ranges, idx, loc) |
| 11905 | if (prev_loc) |
| 11906 | { |
| 11907 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
| 11908 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, loc, prev_loc, "Length" ); |
| 11909 | prev_loc = NULL; |
| 11910 | } |
| 11911 | else |
| 11912 | prev_loc = loc; |
| 11913 | |
| 11914 | if (prev_loc) |
| 11915 | { |
| 11916 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
| 11917 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label, |
| 11918 | prev_loc, "Length" ); |
| 11919 | } |
| 11920 | } |
| 11921 | |
| 11922 | if (have_multiple_function_sections) |
| 11923 | { |
| 11924 | unsigned fde_idx; |
| 11925 | dw_fde_ref fde; |
| 11926 | |
| 11927 | FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde) |
| 11928 | { |
| 11929 | if (fde->ignored_debug) |
| 11930 | continue; |
| 11931 | if (!fde->in_std_section) |
| 11932 | { |
| 11933 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin, |
| 11934 | "Address" ); |
| 11935 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end, |
| 11936 | fde->dw_fde_begin, "Length" ); |
| 11937 | } |
| 11938 | if (fde->dw_fde_second_begin && !fde->second_in_std_section) |
| 11939 | { |
| 11940 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin, |
| 11941 | "Address" ); |
| 11942 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end, |
| 11943 | fde->dw_fde_second_begin, "Length" ); |
| 11944 | } |
| 11945 | } |
| 11946 | } |
| 11947 | |
| 11948 | /* Output the terminator words. */ |
| 11949 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
| 11950 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
| 11951 | } |
| 11952 | |
| 11953 | /* Add a new entry to .debug_ranges. Return its index into |
| 11954 | ranges_table vector. */ |
| 11955 | |
| 11956 | static unsigned int |
| 11957 | add_ranges_num (int num, bool maybe_new_sec) |
| 11958 | { |
| 11959 | dw_ranges r = { NULL, .num: num, .idx: 0, .maybe_new_sec: maybe_new_sec, NULL, NULL }; |
| 11960 | vec_safe_push (v&: ranges_table, obj: r); |
| 11961 | return vec_safe_length (v: ranges_table) - 1; |
| 11962 | } |
| 11963 | |
| 11964 | /* Add a new entry to .debug_ranges corresponding to a block, or a |
| 11965 | range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if |
| 11966 | this entry might be in a different section from previous range. */ |
| 11967 | |
| 11968 | static unsigned int |
| 11969 | add_ranges (const_tree block, bool maybe_new_sec) |
| 11970 | { |
| 11971 | return add_ranges_num (num: block ? BLOCK_NUMBER (block) : 0, maybe_new_sec); |
| 11972 | } |
| 11973 | |
| 11974 | /* Note that (*rnglist_table)[offset] is either a head of a rnglist |
| 11975 | chain, or middle entry of a chain that will be directly referred to. */ |
| 11976 | |
| 11977 | static void |
| 11978 | note_rnglist_head (unsigned int offset) |
| 11979 | { |
| 11980 | if (dwarf_version < 5 || (*ranges_table)[offset].label) |
| 11981 | return; |
| 11982 | (*ranges_table)[offset].label = gen_internal_sym (prefix: "LLRL" ); |
| 11983 | } |
| 11984 | |
| 11985 | /* Add a new entry to .debug_ranges corresponding to a pair of labels. |
| 11986 | When using dwarf_split_debug_info, address attributes in dies destined |
| 11987 | for the final executable should be direct references--setting the |
| 11988 | parameter force_direct ensures this behavior. */ |
| 11989 | |
| 11990 | static void |
| 11991 | add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end, |
| 11992 | bool *added, bool force_direct) |
| 11993 | { |
| 11994 | unsigned int in_use = vec_safe_length (v: ranges_by_label); |
| 11995 | unsigned int offset; |
| 11996 | dw_ranges_by_label rbl = { .begin: begin, .end: end }; |
| 11997 | vec_safe_push (v&: ranges_by_label, obj: rbl); |
| 11998 | offset = add_ranges_num (num: -(int)in_use - 1, maybe_new_sec: true); |
| 11999 | if (!*added) |
| 12000 | { |
| 12001 | add_AT_range_list (die, attr_kind: DW_AT_ranges, offset, force_direct); |
| 12002 | *added = true; |
| 12003 | note_rnglist_head (offset); |
| 12004 | if (dwarf_split_debug_info && force_direct) |
| 12005 | (*ranges_table)[offset].idx = DW_RANGES_IDX_SKELETON; |
| 12006 | } |
| 12007 | } |
| 12008 | |
| 12009 | /* Emit .debug_ranges section. */ |
| 12010 | |
| 12011 | static void |
| 12012 | output_ranges (void) |
| 12013 | { |
| 12014 | unsigned i; |
| 12015 | static const char *const start_fmt = "Offset %#x" ; |
| 12016 | const char *fmt = start_fmt; |
| 12017 | dw_ranges *r; |
| 12018 | |
| 12019 | switch_to_section (debug_ranges_section); |
| 12020 | ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label); |
| 12021 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
| 12022 | { |
| 12023 | int block_num = r->num; |
| 12024 | |
| 12025 | if (block_num > 0) |
| 12026 | { |
| 12027 | char blabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 12028 | char elabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 12029 | |
| 12030 | ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num); |
| 12031 | ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num); |
| 12032 | |
| 12033 | /* If all code is in the text section, then the compilation |
| 12034 | unit base address defaults to DW_AT_low_pc, which is the |
| 12035 | base of the text section. */ |
| 12036 | if (!have_multiple_function_sections) |
| 12037 | { |
| 12038 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel, |
| 12039 | text_section_label, |
| 12040 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
| 12041 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel, |
| 12042 | text_section_label, NULL); |
| 12043 | } |
| 12044 | |
| 12045 | /* Otherwise, the compilation unit base address is zero, |
| 12046 | which allows us to use absolute addresses, and not worry |
| 12047 | about whether the target supports cross-section |
| 12048 | arithmetic. */ |
| 12049 | else |
| 12050 | { |
| 12051 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
| 12052 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
| 12053 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL); |
| 12054 | } |
| 12055 | |
| 12056 | fmt = NULL; |
| 12057 | } |
| 12058 | |
| 12059 | /* Negative block_num stands for an index into ranges_by_label. */ |
| 12060 | else if (block_num < 0) |
| 12061 | { |
| 12062 | int lab_idx = - block_num - 1; |
| 12063 | |
| 12064 | if (!have_multiple_function_sections) |
| 12065 | { |
| 12066 | gcc_unreachable (); |
| 12067 | #if 0 |
| 12068 | /* If we ever use add_ranges_by_labels () for a single |
| 12069 | function section, all we have to do is to take out |
| 12070 | the #if 0 above. */ |
| 12071 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, |
| 12072 | (*ranges_by_label)[lab_idx].begin, |
| 12073 | text_section_label, |
| 12074 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
| 12075 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, |
| 12076 | (*ranges_by_label)[lab_idx].end, |
| 12077 | text_section_label, NULL); |
| 12078 | #endif |
| 12079 | } |
| 12080 | else |
| 12081 | { |
| 12082 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, |
| 12083 | (*ranges_by_label)[lab_idx].begin, |
| 12084 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
| 12085 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, |
| 12086 | (*ranges_by_label)[lab_idx].end, |
| 12087 | NULL); |
| 12088 | } |
| 12089 | } |
| 12090 | else |
| 12091 | { |
| 12092 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
| 12093 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
| 12094 | fmt = start_fmt; |
| 12095 | } |
| 12096 | } |
| 12097 | } |
| 12098 | |
| 12099 | /* Non-zero if .debug_line_str should be used for .debug_line section |
| 12100 | strings or strings that are likely shareable with those. */ |
| 12101 | #define DWARF5_USE_DEBUG_LINE_STR \ |
| 12102 | (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \ |
| 12103 | && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \ |
| 12104 | /* FIXME: there is no .debug_line_str.dwo section, \ |
| 12105 | for -gsplit-dwarf we should use DW_FORM_strx instead. */ \ |
| 12106 | && !dwarf_split_debug_info) |
| 12107 | |
| 12108 | |
| 12109 | /* Returns TRUE if we are outputting DWARF5 and the assembler supports |
| 12110 | DWARF5 .debug_line tables using .debug_line_str or we generate |
| 12111 | it ourselves, except for split-dwarf which doesn't have a |
| 12112 | .debug_line_str. */ |
| 12113 | static bool |
| 12114 | asm_outputs_debug_line_str (void) |
| 12115 | { |
| 12116 | if (dwarf_version >= 5 |
| 12117 | && ! output_asm_line_debug_info () |
| 12118 | && DWARF5_USE_DEBUG_LINE_STR) |
| 12119 | return true; |
| 12120 | else |
| 12121 | { |
| 12122 | #if defined(HAVE_AS_GDWARF_5_DEBUG_FLAG) && defined(HAVE_AS_WORKING_DWARF_N_FLAG) |
| 12123 | return !dwarf_split_debug_info && dwarf_version >= 5; |
| 12124 | #else |
| 12125 | return false; |
| 12126 | #endif |
| 12127 | } |
| 12128 | } |
| 12129 | |
| 12130 | /* Return true if it is beneficial to use DW_RLE_base_address{,x}. |
| 12131 | I is index of the following range. */ |
| 12132 | |
| 12133 | static bool |
| 12134 | use_distinct_base_address_for_range (unsigned int i) |
| 12135 | { |
| 12136 | if (i >= vec_safe_length (v: ranges_table)) |
| 12137 | return false; |
| 12138 | |
| 12139 | dw_ranges *r2 = &(*ranges_table)[i]; |
| 12140 | /* Use DW_RLE_base_address{,x} if there is a next range in the |
| 12141 | range list and is guaranteed to be in the same section. */ |
| 12142 | return r2->num != 0 && r2->label == NULL && !r2->maybe_new_sec; |
| 12143 | } |
| 12144 | |
| 12145 | /* Assign .debug_rnglists indexes and unique indexes into the debug_addr |
| 12146 | section when needed. */ |
| 12147 | |
| 12148 | static void |
| 12149 | index_rnglists (void) |
| 12150 | { |
| 12151 | unsigned i; |
| 12152 | dw_ranges *r; |
| 12153 | bool base = false; |
| 12154 | |
| 12155 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
| 12156 | { |
| 12157 | if (r->label && r->idx != DW_RANGES_IDX_SKELETON) |
| 12158 | r->idx = rnglist_idx++; |
| 12159 | |
| 12160 | int block_num = r->num; |
| 12161 | if ((HAVE_AS_LEB128 || block_num < 0) |
| 12162 | && !have_multiple_function_sections) |
| 12163 | continue; |
| 12164 | if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec)) |
| 12165 | base = false; |
| 12166 | if (block_num > 0) |
| 12167 | { |
| 12168 | char blabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 12169 | char elabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 12170 | |
| 12171 | ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num); |
| 12172 | ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num); |
| 12173 | |
| 12174 | if (HAVE_AS_LEB128) |
| 12175 | { |
| 12176 | if (!base && use_distinct_base_address_for_range (i: i + 1)) |
| 12177 | { |
| 12178 | r->begin_entry = add_addr_table_entry (addr: xstrdup (blabel), |
| 12179 | kind: ate_kind_label); |
| 12180 | base = true; |
| 12181 | } |
| 12182 | if (base) |
| 12183 | /* If we have a base, no need for further |
| 12184 | begin_entry/end_entry, as DW_RLE_offset_pair will be |
| 12185 | used. */ |
| 12186 | continue; |
| 12187 | r->begin_entry |
| 12188 | = add_addr_table_entry (addr: xstrdup (blabel), kind: ate_kind_label); |
| 12189 | /* No need for end_entry, DW_RLE_start{,x}_length will use |
| 12190 | length as opposed to a pair of addresses. */ |
| 12191 | } |
| 12192 | else |
| 12193 | { |
| 12194 | r->begin_entry |
| 12195 | = add_addr_table_entry (addr: xstrdup (blabel), kind: ate_kind_label); |
| 12196 | r->end_entry |
| 12197 | = add_addr_table_entry (addr: xstrdup (elabel), kind: ate_kind_label); |
| 12198 | } |
| 12199 | } |
| 12200 | |
| 12201 | /* Negative block_num stands for an index into ranges_by_label. */ |
| 12202 | else if (block_num < 0) |
| 12203 | { |
| 12204 | int lab_idx = - block_num - 1; |
| 12205 | const char *blabel = (*ranges_by_label)[lab_idx].begin; |
| 12206 | const char *elabel = (*ranges_by_label)[lab_idx].end; |
| 12207 | |
| 12208 | r->begin_entry |
| 12209 | = add_addr_table_entry (addr: xstrdup (blabel), kind: ate_kind_label); |
| 12210 | if (!HAVE_AS_LEB128) |
| 12211 | r->end_entry |
| 12212 | = add_addr_table_entry (addr: xstrdup (elabel), kind: ate_kind_label); |
| 12213 | } |
| 12214 | } |
| 12215 | } |
| 12216 | |
| 12217 | /* Emit .debug_rnglists or (when DWO is true) .debug_rnglists.dwo section. */ |
| 12218 | |
| 12219 | static bool |
| 12220 | output_rnglists (unsigned generation, bool dwo) |
| 12221 | { |
| 12222 | unsigned i; |
| 12223 | dw_ranges *r; |
| 12224 | char l1[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 12225 | char l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 12226 | char basebuf[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 12227 | |
| 12228 | if (dwo) |
| 12229 | switch_to_section (debug_ranges_dwo_section); |
| 12230 | else |
| 12231 | { |
| 12232 | switch_to_section (debug_ranges_section); |
| 12233 | ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label); |
| 12234 | } |
| 12235 | /* There are up to 4 unique ranges labels per generation. |
| 12236 | See also init_sections_and_labels. */ |
| 12237 | ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL, |
| 12238 | 2 + 2 * dwo + generation * 6); |
| 12239 | ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL, |
| 12240 | 3 + 2 * dwo + generation * 6); |
| 12241 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
| 12242 | dw2_asm_output_data (4, 0xffffffff, |
| 12243 | "Initial length escape value indicating " |
| 12244 | "64-bit DWARF extension" ); |
| 12245 | dw2_asm_output_delta (dwarf_offset_size, l2, l1, |
| 12246 | "Length of Range Lists" ); |
| 12247 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
| 12248 | output_dwarf_version (); |
| 12249 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size" ); |
| 12250 | dw2_asm_output_data (1, 0, "Segment Size" ); |
| 12251 | /* Emit the offset table only for -gsplit-dwarf. If we don't care |
| 12252 | about relocation sizes and primarily care about the size of .debug* |
| 12253 | sections in linked shared libraries and executables, then |
| 12254 | the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes |
| 12255 | into it are usually larger than just DW_FORM_sec_offset offsets |
| 12256 | into the .debug_rnglists section. */ |
| 12257 | dw2_asm_output_data (4, dwo ? rnglist_idx : 0, |
| 12258 | "Offset Entry Count" ); |
| 12259 | if (dwo) |
| 12260 | { |
| 12261 | ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label); |
| 12262 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
| 12263 | if (r->label && r->idx != DW_RANGES_IDX_SKELETON) |
| 12264 | dw2_asm_output_delta (dwarf_offset_size, r->label, |
| 12265 | ranges_base_label, NULL); |
| 12266 | } |
| 12267 | |
| 12268 | const char *lab = "" ; |
| 12269 | const char *base = NULL; |
| 12270 | bool skipping = false; |
| 12271 | bool ret = false; |
| 12272 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
| 12273 | { |
| 12274 | int block_num = r->num; |
| 12275 | |
| 12276 | if (r->label) |
| 12277 | { |
| 12278 | if (dwarf_split_debug_info |
| 12279 | && (r->idx == DW_RANGES_IDX_SKELETON) == dwo) |
| 12280 | { |
| 12281 | ret = true; |
| 12282 | skipping = true; |
| 12283 | continue; |
| 12284 | } |
| 12285 | ASM_OUTPUT_LABEL (asm_out_file, r->label); |
| 12286 | lab = r->label; |
| 12287 | } |
| 12288 | if (skipping) |
| 12289 | { |
| 12290 | if (block_num == 0) |
| 12291 | skipping = false; |
| 12292 | continue; |
| 12293 | } |
| 12294 | if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec)) |
| 12295 | base = NULL; |
| 12296 | if (block_num > 0) |
| 12297 | { |
| 12298 | char blabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 12299 | char elabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 12300 | |
| 12301 | ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num); |
| 12302 | ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num); |
| 12303 | |
| 12304 | if (HAVE_AS_LEB128) |
| 12305 | { |
| 12306 | /* If all code is in the text section, then the compilation |
| 12307 | unit base address defaults to DW_AT_low_pc, which is the |
| 12308 | base of the text section. */ |
| 12309 | if (!have_multiple_function_sections) |
| 12310 | { |
| 12311 | dw2_asm_output_data (1, DW_RLE_offset_pair, |
| 12312 | "DW_RLE_offset_pair (%s)" , lab); |
| 12313 | dw2_asm_output_delta_uleb128 (blabel, text_section_label, |
| 12314 | "Range begin address (%s)" , lab); |
| 12315 | dw2_asm_output_delta_uleb128 (elabel, text_section_label, |
| 12316 | "Range end address (%s)" , lab); |
| 12317 | continue; |
| 12318 | } |
| 12319 | if (base == NULL && use_distinct_base_address_for_range (i: i + 1)) |
| 12320 | { |
| 12321 | if (dwarf_split_debug_info) |
| 12322 | { |
| 12323 | dw2_asm_output_data (1, DW_RLE_base_addressx, |
| 12324 | "DW_RLE_base_addressx (%s)" , lab); |
| 12325 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
| 12326 | "Base address index (%s)" , |
| 12327 | blabel); |
| 12328 | } |
| 12329 | else |
| 12330 | { |
| 12331 | dw2_asm_output_data (1, DW_RLE_base_address, |
| 12332 | "DW_RLE_base_address (%s)" , lab); |
| 12333 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
| 12334 | "Base address (%s)" , lab); |
| 12335 | } |
| 12336 | strcpy (dest: basebuf, src: blabel); |
| 12337 | base = basebuf; |
| 12338 | } |
| 12339 | if (base) |
| 12340 | { |
| 12341 | dw2_asm_output_data (1, DW_RLE_offset_pair, |
| 12342 | "DW_RLE_offset_pair (%s)" , lab); |
| 12343 | dw2_asm_output_delta_uleb128 (blabel, base, |
| 12344 | "Range begin address (%s)" , lab); |
| 12345 | dw2_asm_output_delta_uleb128 (elabel, base, |
| 12346 | "Range end address (%s)" , lab); |
| 12347 | continue; |
| 12348 | } |
| 12349 | if (dwarf_split_debug_info) |
| 12350 | { |
| 12351 | dw2_asm_output_data (1, DW_RLE_startx_length, |
| 12352 | "DW_RLE_startx_length (%s)" , lab); |
| 12353 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
| 12354 | "Range begin address index " |
| 12355 | "(%s)" , blabel); |
| 12356 | } |
| 12357 | else |
| 12358 | { |
| 12359 | dw2_asm_output_data (1, DW_RLE_start_length, |
| 12360 | "DW_RLE_start_length (%s)" , lab); |
| 12361 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
| 12362 | "Range begin address (%s)" , lab); |
| 12363 | } |
| 12364 | dw2_asm_output_delta_uleb128 (elabel, blabel, |
| 12365 | "Range length (%s)" , lab); |
| 12366 | } |
| 12367 | else if (dwarf_split_debug_info) |
| 12368 | { |
| 12369 | dw2_asm_output_data (1, DW_RLE_startx_endx, |
| 12370 | "DW_RLE_startx_endx (%s)" , lab); |
| 12371 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
| 12372 | "Range begin address index " |
| 12373 | "(%s)" , blabel); |
| 12374 | dw2_asm_output_data_uleb128 (r->end_entry->index, |
| 12375 | "Range end address index " |
| 12376 | "(%s)" , elabel); |
| 12377 | } |
| 12378 | else |
| 12379 | { |
| 12380 | dw2_asm_output_data (1, DW_RLE_start_end, |
| 12381 | "DW_RLE_start_end (%s)" , lab); |
| 12382 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
| 12383 | "Range begin address (%s)" , lab); |
| 12384 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, |
| 12385 | "Range end address (%s)" , lab); |
| 12386 | } |
| 12387 | } |
| 12388 | |
| 12389 | /* Negative block_num stands for an index into ranges_by_label. */ |
| 12390 | else if (block_num < 0) |
| 12391 | { |
| 12392 | int lab_idx = - block_num - 1; |
| 12393 | const char *blabel = (*ranges_by_label)[lab_idx].begin; |
| 12394 | const char *elabel = (*ranges_by_label)[lab_idx].end; |
| 12395 | |
| 12396 | if (!have_multiple_function_sections) |
| 12397 | gcc_unreachable (); |
| 12398 | if (HAVE_AS_LEB128) |
| 12399 | { |
| 12400 | if (dwarf_split_debug_info) |
| 12401 | { |
| 12402 | dw2_asm_output_data (1, DW_RLE_startx_length, |
| 12403 | "DW_RLE_startx_length (%s)" , lab); |
| 12404 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
| 12405 | "Range begin address index " |
| 12406 | "(%s)" , blabel); |
| 12407 | } |
| 12408 | else |
| 12409 | { |
| 12410 | dw2_asm_output_data (1, DW_RLE_start_length, |
| 12411 | "DW_RLE_start_length (%s)" , lab); |
| 12412 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
| 12413 | "Range begin address (%s)" , lab); |
| 12414 | } |
| 12415 | dw2_asm_output_delta_uleb128 (elabel, blabel, |
| 12416 | "Range length (%s)" , lab); |
| 12417 | } |
| 12418 | else if (dwarf_split_debug_info) |
| 12419 | { |
| 12420 | dw2_asm_output_data (1, DW_RLE_startx_endx, |
| 12421 | "DW_RLE_startx_endx (%s)" , lab); |
| 12422 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
| 12423 | "Range begin address index " |
| 12424 | "(%s)" , blabel); |
| 12425 | dw2_asm_output_data_uleb128 (r->end_entry->index, |
| 12426 | "Range end address index " |
| 12427 | "(%s)" , elabel); |
| 12428 | } |
| 12429 | else |
| 12430 | { |
| 12431 | dw2_asm_output_data (1, DW_RLE_start_end, |
| 12432 | "DW_RLE_start_end (%s)" , lab); |
| 12433 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
| 12434 | "Range begin address (%s)" , lab); |
| 12435 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, |
| 12436 | "Range end address (%s)" , lab); |
| 12437 | } |
| 12438 | } |
| 12439 | else |
| 12440 | dw2_asm_output_data (1, DW_RLE_end_of_list, |
| 12441 | "DW_RLE_end_of_list (%s)" , lab); |
| 12442 | } |
| 12443 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
| 12444 | return ret; |
| 12445 | } |
| 12446 | |
| 12447 | /* Data structure containing information about input files. */ |
| 12448 | struct file_info |
| 12449 | { |
| 12450 | const char *path; /* Complete file name. */ |
| 12451 | const char *fname; /* File name part. */ |
| 12452 | int length; /* Length of entire string. */ |
| 12453 | struct dwarf_file_data * file_idx; /* Index in input file table. */ |
| 12454 | int dir_idx; /* Index in directory table. */ |
| 12455 | }; |
| 12456 | |
| 12457 | /* Data structure containing information about directories with source |
| 12458 | files. */ |
| 12459 | struct dir_info |
| 12460 | { |
| 12461 | const char *path; /* Path including directory name. */ |
| 12462 | int length; /* Path length. */ |
| 12463 | int prefix; /* Index of directory entry which is a prefix. */ |
| 12464 | int count; /* Number of files in this directory. */ |
| 12465 | int dir_idx; /* Index of directory used as base. */ |
| 12466 | }; |
| 12467 | |
| 12468 | /* Callback function for file_info comparison. We sort by looking at |
| 12469 | the directories in the path. */ |
| 12470 | |
| 12471 | static int |
| 12472 | file_info_cmp (const void *p1, const void *p2) |
| 12473 | { |
| 12474 | const struct file_info *const s1 = (const struct file_info *) p1; |
| 12475 | const struct file_info *const s2 = (const struct file_info *) p2; |
| 12476 | const unsigned char *cp1; |
| 12477 | const unsigned char *cp2; |
| 12478 | |
| 12479 | /* Take care of file names without directories. We need to make sure that |
| 12480 | we return consistent values to qsort since some will get confused if |
| 12481 | we return the same value when identical operands are passed in opposite |
| 12482 | orders. So if neither has a directory, return 0 and otherwise return |
| 12483 | 1 or -1 depending on which one has the directory. We want the one with |
| 12484 | the directory to sort after the one without, so all no directory files |
| 12485 | are at the start (normally only the compilation unit file). */ |
| 12486 | if ((s1->path == s1->fname || s2->path == s2->fname)) |
| 12487 | return (s2->path == s2->fname) - (s1->path == s1->fname); |
| 12488 | |
| 12489 | cp1 = (const unsigned char *) s1->path; |
| 12490 | cp2 = (const unsigned char *) s2->path; |
| 12491 | |
| 12492 | while (1) |
| 12493 | { |
| 12494 | ++cp1; |
| 12495 | ++cp2; |
| 12496 | /* Reached the end of the first path? If so, handle like above, |
| 12497 | but now we want longer directory prefixes before shorter ones. */ |
| 12498 | if ((cp1 == (const unsigned char *) s1->fname) |
| 12499 | || (cp2 == (const unsigned char *) s2->fname)) |
| 12500 | return ((cp1 == (const unsigned char *) s1->fname) |
| 12501 | - (cp2 == (const unsigned char *) s2->fname)); |
| 12502 | |
| 12503 | /* Character of current path component the same? */ |
| 12504 | else if (*cp1 != *cp2) |
| 12505 | return *cp1 - *cp2; |
| 12506 | } |
| 12507 | } |
| 12508 | |
| 12509 | struct file_name_acquire_data |
| 12510 | { |
| 12511 | struct file_info *files; |
| 12512 | int used_files; |
| 12513 | int max_files; |
| 12514 | }; |
| 12515 | |
| 12516 | /* Traversal function for the hash table. */ |
| 12517 | |
| 12518 | int |
| 12519 | file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad) |
| 12520 | { |
| 12521 | struct dwarf_file_data *d = *slot; |
| 12522 | struct file_info *fi; |
| 12523 | const char *f; |
| 12524 | |
| 12525 | gcc_assert (fnad->max_files >= d->emitted_number); |
| 12526 | |
| 12527 | if (! d->emitted_number) |
| 12528 | return 1; |
| 12529 | |
| 12530 | gcc_assert (fnad->max_files != fnad->used_files); |
| 12531 | |
| 12532 | fi = fnad->files + fnad->used_files++; |
| 12533 | |
| 12534 | f = d->filename; |
| 12535 | |
| 12536 | /* Skip all leading "./". */ |
| 12537 | while (f[0] == '.' && IS_DIR_SEPARATOR (f[1])) |
| 12538 | f += 2; |
| 12539 | |
| 12540 | /* Create a new array entry. */ |
| 12541 | fi->path = f; |
| 12542 | fi->length = strlen (s: f); |
| 12543 | fi->file_idx = d; |
| 12544 | |
| 12545 | /* Search for the file name part. */ |
| 12546 | f = strrchr (s: f, DIR_SEPARATOR); |
| 12547 | #if defined (DIR_SEPARATOR_2) |
| 12548 | { |
| 12549 | const char *g = strrchr (fi->path, DIR_SEPARATOR_2); |
| 12550 | |
| 12551 | if (g != NULL) |
| 12552 | { |
| 12553 | if (f == NULL || f < g) |
| 12554 | f = g; |
| 12555 | } |
| 12556 | } |
| 12557 | #endif |
| 12558 | |
| 12559 | fi->fname = f == NULL ? fi->path : f + 1; |
| 12560 | return 1; |
| 12561 | } |
| 12562 | |
| 12563 | /* Helper function for output_file_names. Emit a FORM encoded |
| 12564 | string STR, with assembly comment start ENTRY_KIND and |
| 12565 | index IDX */ |
| 12566 | |
| 12567 | static void |
| 12568 | output_line_string (enum dwarf_form form, const char *str, |
| 12569 | const char *entry_kind, unsigned int idx) |
| 12570 | { |
| 12571 | switch (form) |
| 12572 | { |
| 12573 | case DW_FORM_string: |
| 12574 | dw2_asm_output_nstring (str, -1, "%s: %#x" , entry_kind, idx); |
| 12575 | break; |
| 12576 | case DW_FORM_line_strp: |
| 12577 | if (!debug_line_str_hash) |
| 12578 | debug_line_str_hash |
| 12579 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
| 12580 | |
| 12581 | struct indirect_string_node *node; |
| 12582 | node = find_AT_string_in_table (str, table: debug_line_str_hash); |
| 12583 | set_indirect_string (node); |
| 12584 | node->form = form; |
| 12585 | dw2_asm_output_offset (dwarf_offset_size, node->label, |
| 12586 | debug_line_str_section, "%s: %#x: \"%s\"" , |
| 12587 | entry_kind, 0, node->str); |
| 12588 | break; |
| 12589 | default: |
| 12590 | gcc_unreachable (); |
| 12591 | } |
| 12592 | } |
| 12593 | |
| 12594 | /* Output the directory table and the file name table. We try to minimize |
| 12595 | the total amount of memory needed. A heuristic is used to avoid large |
| 12596 | slowdowns with many input files. */ |
| 12597 | |
| 12598 | static void |
| 12599 | output_file_names (void) |
| 12600 | { |
| 12601 | struct file_name_acquire_data fnad; |
| 12602 | int numfiles; |
| 12603 | struct file_info *files; |
| 12604 | struct dir_info *dirs; |
| 12605 | int *saved; |
| 12606 | int *savehere; |
| 12607 | int *backmap; |
| 12608 | int ndirs; |
| 12609 | int idx_offset; |
| 12610 | int i; |
| 12611 | |
| 12612 | if (!last_emitted_file) |
| 12613 | { |
| 12614 | if (dwarf_version >= 5) |
| 12615 | { |
| 12616 | const char *comp_dir = comp_dir_string (); |
| 12617 | if (comp_dir == NULL) |
| 12618 | comp_dir = "" ; |
| 12619 | dw2_asm_output_data (1, 1, "Directory entry format count" ); |
| 12620 | enum dwarf_form str_form = DW_FORM_string; |
| 12621 | if (DWARF5_USE_DEBUG_LINE_STR) |
| 12622 | str_form = DW_FORM_line_strp; |
| 12623 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
| 12624 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
| 12625 | get_DW_FORM_name (form: str_form)); |
| 12626 | dw2_asm_output_data_uleb128 (1, "Directories count" ); |
| 12627 | if (str_form == DW_FORM_string) |
| 12628 | dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x" , 0); |
| 12629 | else |
| 12630 | output_line_string (form: str_form, str: comp_dir, entry_kind: "Directory Entry" , idx: 0); |
| 12631 | const char *filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
| 12632 | if (filename0 == NULL) |
| 12633 | filename0 = "" ; |
| 12634 | #ifdef VMS_DEBUGGING_INFO |
| 12635 | dw2_asm_output_data (1, 4, "File name entry format count" ); |
| 12636 | #else |
| 12637 | dw2_asm_output_data (1, 2, "File name entry format count" ); |
| 12638 | #endif |
| 12639 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
| 12640 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
| 12641 | get_DW_FORM_name (form: str_form)); |
| 12642 | dw2_asm_output_data_uleb128 (DW_LNCT_directory_index, |
| 12643 | "DW_LNCT_directory_index" ); |
| 12644 | dw2_asm_output_data_uleb128 (DW_FORM_data1, "%s" , |
| 12645 | get_DW_FORM_name (form: DW_FORM_data1)); |
| 12646 | #ifdef VMS_DEBUGGING_INFO |
| 12647 | dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp" ); |
| 12648 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
| 12649 | dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size" ); |
| 12650 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
| 12651 | #endif |
| 12652 | dw2_asm_output_data_uleb128 (1, "File names count" ); |
| 12653 | |
| 12654 | output_line_string (form: str_form, str: filename0, entry_kind: "File Entry" , idx: 0); |
| 12655 | dw2_asm_output_data (1, 0, NULL); |
| 12656 | #ifdef VMS_DEBUGGING_INFO |
| 12657 | dw2_asm_output_data_uleb128 (0, NULL); |
| 12658 | dw2_asm_output_data_uleb128 (0, NULL); |
| 12659 | #endif |
| 12660 | } |
| 12661 | else |
| 12662 | { |
| 12663 | dw2_asm_output_data (1, 0, "End directory table" ); |
| 12664 | dw2_asm_output_data (1, 0, "End file name table" ); |
| 12665 | } |
| 12666 | return; |
| 12667 | } |
| 12668 | |
| 12669 | numfiles = last_emitted_file->emitted_number; |
| 12670 | |
| 12671 | /* Allocate the various arrays we need. */ |
| 12672 | files = XALLOCAVEC (struct file_info, numfiles); |
| 12673 | dirs = XALLOCAVEC (struct dir_info, numfiles); |
| 12674 | |
| 12675 | fnad.files = files; |
| 12676 | fnad.used_files = 0; |
| 12677 | fnad.max_files = numfiles; |
| 12678 | file_table->traverse<file_name_acquire_data *, file_name_acquire> (argument: &fnad); |
| 12679 | gcc_assert (fnad.used_files == fnad.max_files); |
| 12680 | |
| 12681 | qsort (files, numfiles, sizeof (files[0]), file_info_cmp); |
| 12682 | |
| 12683 | /* Find all the different directories used. */ |
| 12684 | dirs[0].path = files[0].path; |
| 12685 | dirs[0].length = files[0].fname - files[0].path; |
| 12686 | dirs[0].prefix = -1; |
| 12687 | dirs[0].count = 1; |
| 12688 | dirs[0].dir_idx = 0; |
| 12689 | files[0].dir_idx = 0; |
| 12690 | ndirs = 1; |
| 12691 | |
| 12692 | for (i = 1; i < numfiles; i++) |
| 12693 | if (files[i].fname - files[i].path == dirs[ndirs - 1].length |
| 12694 | && memcmp (s1: dirs[ndirs - 1].path, s2: files[i].path, |
| 12695 | n: dirs[ndirs - 1].length) == 0) |
| 12696 | { |
| 12697 | /* Same directory as last entry. */ |
| 12698 | files[i].dir_idx = ndirs - 1; |
| 12699 | ++dirs[ndirs - 1].count; |
| 12700 | } |
| 12701 | else |
| 12702 | { |
| 12703 | int j; |
| 12704 | |
| 12705 | /* This is a new directory. */ |
| 12706 | dirs[ndirs].path = files[i].path; |
| 12707 | dirs[ndirs].length = files[i].fname - files[i].path; |
| 12708 | dirs[ndirs].count = 1; |
| 12709 | dirs[ndirs].dir_idx = ndirs; |
| 12710 | files[i].dir_idx = ndirs; |
| 12711 | |
| 12712 | /* Search for a prefix. */ |
| 12713 | dirs[ndirs].prefix = -1; |
| 12714 | for (j = 0; j < ndirs; j++) |
| 12715 | if (dirs[j].length < dirs[ndirs].length |
| 12716 | && dirs[j].length > 1 |
| 12717 | && (dirs[ndirs].prefix == -1 |
| 12718 | || dirs[j].length > dirs[dirs[ndirs].prefix].length) |
| 12719 | && memcmp (s1: dirs[j].path, s2: dirs[ndirs].path, n: dirs[j].length) == 0) |
| 12720 | dirs[ndirs].prefix = j; |
| 12721 | |
| 12722 | ++ndirs; |
| 12723 | } |
| 12724 | |
| 12725 | /* Now to the actual work. We have to find a subset of the directories which |
| 12726 | allow expressing the file name using references to the directory table |
| 12727 | with the least amount of characters. We do not do an exhaustive search |
| 12728 | where we would have to check out every combination of every single |
| 12729 | possible prefix. Instead we use a heuristic which provides nearly optimal |
| 12730 | results in most cases and never is much off. */ |
| 12731 | saved = XALLOCAVEC (int, ndirs); |
| 12732 | savehere = XALLOCAVEC (int, ndirs); |
| 12733 | |
| 12734 | memset (s: saved, c: '\0', n: ndirs * sizeof (saved[0])); |
| 12735 | for (i = 0; i < ndirs; i++) |
| 12736 | { |
| 12737 | int j; |
| 12738 | int total; |
| 12739 | |
| 12740 | /* We can always save some space for the current directory. But this |
| 12741 | does not mean it will be enough to justify adding the directory. */ |
| 12742 | savehere[i] = dirs[i].length; |
| 12743 | total = (savehere[i] - saved[i]) * dirs[i].count; |
| 12744 | |
| 12745 | for (j = i + 1; j < ndirs; j++) |
| 12746 | { |
| 12747 | savehere[j] = 0; |
| 12748 | if (saved[j] < dirs[i].length) |
| 12749 | { |
| 12750 | /* Determine whether the dirs[i] path is a prefix of the |
| 12751 | dirs[j] path. */ |
| 12752 | int k; |
| 12753 | |
| 12754 | k = dirs[j].prefix; |
| 12755 | while (k != -1 && k != (int) i) |
| 12756 | k = dirs[k].prefix; |
| 12757 | |
| 12758 | if (k == (int) i) |
| 12759 | { |
| 12760 | /* Yes it is. We can possibly save some memory by |
| 12761 | writing the filenames in dirs[j] relative to |
| 12762 | dirs[i]. */ |
| 12763 | savehere[j] = dirs[i].length; |
| 12764 | total += (savehere[j] - saved[j]) * dirs[j].count; |
| 12765 | } |
| 12766 | } |
| 12767 | } |
| 12768 | |
| 12769 | /* Check whether we can save enough to justify adding the dirs[i] |
| 12770 | directory. */ |
| 12771 | if (total > dirs[i].length + 1) |
| 12772 | { |
| 12773 | /* It's worthwhile adding. */ |
| 12774 | for (j = i; j < ndirs; j++) |
| 12775 | if (savehere[j] > 0) |
| 12776 | { |
| 12777 | /* Remember how much we saved for this directory so far. */ |
| 12778 | saved[j] = savehere[j]; |
| 12779 | |
| 12780 | /* Remember the prefix directory. */ |
| 12781 | dirs[j].dir_idx = i; |
| 12782 | } |
| 12783 | } |
| 12784 | } |
| 12785 | |
| 12786 | /* Emit the directory name table. */ |
| 12787 | idx_offset = dirs[0].length > 0 ? 1 : 0; |
| 12788 | enum dwarf_form str_form = DW_FORM_string; |
| 12789 | enum dwarf_form idx_form = DW_FORM_udata; |
| 12790 | if (dwarf_version >= 5) |
| 12791 | { |
| 12792 | const char *comp_dir = comp_dir_string (); |
| 12793 | if (comp_dir == NULL) |
| 12794 | comp_dir = "" ; |
| 12795 | dw2_asm_output_data (1, 1, "Directory entry format count" ); |
| 12796 | if (DWARF5_USE_DEBUG_LINE_STR) |
| 12797 | str_form = DW_FORM_line_strp; |
| 12798 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
| 12799 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
| 12800 | get_DW_FORM_name (form: str_form)); |
| 12801 | dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count" ); |
| 12802 | if (str_form == DW_FORM_string) |
| 12803 | { |
| 12804 | dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x" , 0); |
| 12805 | for (i = 1 - idx_offset; i < ndirs; i++) |
| 12806 | dw2_asm_output_nstring (dirs[i].path, |
| 12807 | dirs[i].length |
| 12808 | - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR, |
| 12809 | "Directory Entry: %#x" , i + idx_offset); |
| 12810 | } |
| 12811 | else |
| 12812 | { |
| 12813 | output_line_string (form: str_form, str: comp_dir, entry_kind: "Directory Entry" , idx: 0); |
| 12814 | for (i = 1 - idx_offset; i < ndirs; i++) |
| 12815 | { |
| 12816 | const char *str |
| 12817 | = ggc_alloc_string (contents: dirs[i].path, |
| 12818 | length: dirs[i].length |
| 12819 | - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR); |
| 12820 | output_line_string (form: str_form, str, entry_kind: "Directory Entry" , |
| 12821 | idx: (unsigned) i + idx_offset); |
| 12822 | } |
| 12823 | } |
| 12824 | } |
| 12825 | else |
| 12826 | { |
| 12827 | for (i = 1 - idx_offset; i < ndirs; i++) |
| 12828 | dw2_asm_output_nstring (dirs[i].path, |
| 12829 | dirs[i].length |
| 12830 | - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR, |
| 12831 | "Directory Entry: %#x" , i + idx_offset); |
| 12832 | |
| 12833 | dw2_asm_output_data (1, 0, "End directory table" ); |
| 12834 | } |
| 12835 | |
| 12836 | /* We have to emit them in the order of emitted_number since that's |
| 12837 | used in the debug info generation. To do this efficiently we |
| 12838 | generate a back-mapping of the indices first. */ |
| 12839 | backmap = XALLOCAVEC (int, numfiles); |
| 12840 | for (i = 0; i < numfiles; i++) |
| 12841 | backmap[files[i].file_idx->emitted_number - 1] = i; |
| 12842 | |
| 12843 | if (dwarf_version >= 5) |
| 12844 | { |
| 12845 | const char *filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
| 12846 | if (filename0 == NULL) |
| 12847 | filename0 = "" ; |
| 12848 | /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and |
| 12849 | DW_FORM_data2. Choose one based on the number of directories |
| 12850 | and how much space would they occupy in each encoding. |
| 12851 | If we have at most 256 directories, all indexes fit into |
| 12852 | a single byte, so DW_FORM_data1 is most compact (if there |
| 12853 | are at most 128 directories, DW_FORM_udata would be as |
| 12854 | compact as that, but not shorter and slower to decode). */ |
| 12855 | if (ndirs + idx_offset <= 256) |
| 12856 | idx_form = DW_FORM_data1; |
| 12857 | /* If there are more than 65536 directories, we have to use |
| 12858 | DW_FORM_udata, DW_FORM_data2 can't refer to them. |
| 12859 | Otherwise, compute what space would occupy if all the indexes |
| 12860 | used DW_FORM_udata - sum - and compare that to how large would |
| 12861 | be DW_FORM_data2 encoding, and pick the more efficient one. */ |
| 12862 | else if (ndirs + idx_offset <= 65536) |
| 12863 | { |
| 12864 | unsigned HOST_WIDE_INT sum = 1; |
| 12865 | for (i = 0; i < numfiles; i++) |
| 12866 | { |
| 12867 | int file_idx = backmap[i]; |
| 12868 | int dir_idx = dirs[files[file_idx].dir_idx].dir_idx; |
| 12869 | sum += size_of_uleb128 (dir_idx); |
| 12870 | } |
| 12871 | if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1)) |
| 12872 | idx_form = DW_FORM_data2; |
| 12873 | } |
| 12874 | #ifdef VMS_DEBUGGING_INFO |
| 12875 | dw2_asm_output_data (1, 4, "File name entry format count" ); |
| 12876 | #else |
| 12877 | dw2_asm_output_data (1, 2, "File name entry format count" ); |
| 12878 | #endif |
| 12879 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
| 12880 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
| 12881 | get_DW_FORM_name (form: str_form)); |
| 12882 | dw2_asm_output_data_uleb128 (DW_LNCT_directory_index, |
| 12883 | "DW_LNCT_directory_index" ); |
| 12884 | dw2_asm_output_data_uleb128 (idx_form, "%s" , |
| 12885 | get_DW_FORM_name (form: idx_form)); |
| 12886 | #ifdef VMS_DEBUGGING_INFO |
| 12887 | dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp" ); |
| 12888 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
| 12889 | dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size" ); |
| 12890 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
| 12891 | #endif |
| 12892 | dw2_asm_output_data_uleb128 (numfiles + 1, "File names count" ); |
| 12893 | |
| 12894 | output_line_string (form: str_form, str: filename0, entry_kind: "File Entry" , idx: 0); |
| 12895 | |
| 12896 | /* Include directory index. */ |
| 12897 | if (idx_form != DW_FORM_udata) |
| 12898 | dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2, |
| 12899 | 0, NULL); |
| 12900 | else |
| 12901 | dw2_asm_output_data_uleb128 (0, NULL); |
| 12902 | |
| 12903 | #ifdef VMS_DEBUGGING_INFO |
| 12904 | dw2_asm_output_data_uleb128 (0, NULL); |
| 12905 | dw2_asm_output_data_uleb128 (0, NULL); |
| 12906 | #endif |
| 12907 | } |
| 12908 | |
| 12909 | /* Now write all the file names. */ |
| 12910 | for (i = 0; i < numfiles; i++) |
| 12911 | { |
| 12912 | int file_idx = backmap[i]; |
| 12913 | int dir_idx = dirs[files[file_idx].dir_idx].dir_idx; |
| 12914 | |
| 12915 | #ifdef VMS_DEBUGGING_INFO |
| 12916 | #define MAX_VMS_VERSION_LEN 6 /* ";32768" */ |
| 12917 | |
| 12918 | /* Setting these fields can lead to debugger miscomparisons, |
| 12919 | but VMS Debug requires them to be set correctly. */ |
| 12920 | |
| 12921 | int ver; |
| 12922 | long long cdt; |
| 12923 | long siz; |
| 12924 | int maxfilelen = (strlen (files[file_idx].path) |
| 12925 | + dirs[dir_idx].length |
| 12926 | + MAX_VMS_VERSION_LEN + 1); |
| 12927 | char *filebuf = XALLOCAVEC (char, maxfilelen); |
| 12928 | |
| 12929 | vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver); |
| 12930 | snprintf (filebuf, maxfilelen, "%s;%d" , |
| 12931 | files[file_idx].path + dirs[dir_idx].length, ver); |
| 12932 | |
| 12933 | output_line_string (str_form, filebuf, "File Entry" , (unsigned) i + 1); |
| 12934 | |
| 12935 | /* Include directory index. */ |
| 12936 | if (dwarf_version >= 5 && idx_form != DW_FORM_udata) |
| 12937 | dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2, |
| 12938 | dir_idx + idx_offset, NULL); |
| 12939 | else |
| 12940 | dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL); |
| 12941 | |
| 12942 | /* Modification time. */ |
| 12943 | dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path, |
| 12944 | &cdt, 0, 0, 0) == 0) |
| 12945 | ? cdt : 0, NULL); |
| 12946 | |
| 12947 | /* File length in bytes. */ |
| 12948 | dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path, |
| 12949 | 0, &siz, 0, 0) == 0) |
| 12950 | ? siz : 0, NULL); |
| 12951 | #else |
| 12952 | output_line_string (form: str_form, |
| 12953 | str: files[file_idx].path + dirs[dir_idx].length, |
| 12954 | entry_kind: "File Entry" , idx: (unsigned) i + 1); |
| 12955 | |
| 12956 | /* Include directory index. */ |
| 12957 | if (dwarf_version >= 5 && idx_form != DW_FORM_udata) |
| 12958 | dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2, |
| 12959 | dir_idx + idx_offset, NULL); |
| 12960 | else |
| 12961 | dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL); |
| 12962 | |
| 12963 | if (dwarf_version >= 5) |
| 12964 | continue; |
| 12965 | |
| 12966 | /* Modification time. */ |
| 12967 | dw2_asm_output_data_uleb128 (0, NULL); |
| 12968 | |
| 12969 | /* File length in bytes. */ |
| 12970 | dw2_asm_output_data_uleb128 (0, NULL); |
| 12971 | #endif /* VMS_DEBUGGING_INFO */ |
| 12972 | } |
| 12973 | |
| 12974 | if (dwarf_version < 5) |
| 12975 | dw2_asm_output_data (1, 0, "End file name table" ); |
| 12976 | } |
| 12977 | |
| 12978 | |
| 12979 | /* Output one line number table into the .debug_line section. */ |
| 12980 | |
| 12981 | static void |
| 12982 | output_one_line_info_table (dw_line_info_table *table) |
| 12983 | { |
| 12984 | char line_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 12985 | unsigned int current_line = 1; |
| 12986 | bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START; |
| 12987 | dw_line_info_entry *ent, *prev_addr = NULL; |
| 12988 | size_t i; |
| 12989 | unsigned int view; |
| 12990 | |
| 12991 | view = 0; |
| 12992 | |
| 12993 | FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent) |
| 12994 | { |
| 12995 | switch (ent->opcode) |
| 12996 | { |
| 12997 | case LI_set_address: |
| 12998 | /* ??? Unfortunately, we have little choice here currently, and |
| 12999 | must always use the most general form. GCC does not know the |
| 13000 | address delta itself, so we can't use DW_LNS_advance_pc. Many |
| 13001 | ports do have length attributes which will give an upper bound |
| 13002 | on the address range. We could perhaps use length attributes |
| 13003 | to determine when it is safe to use DW_LNS_fixed_advance_pc. */ |
| 13004 | ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val); |
| 13005 | |
| 13006 | view = 0; |
| 13007 | |
| 13008 | /* This can handle any delta. This takes |
| 13009 | 4+DWARF2_ADDR_SIZE bytes. */ |
| 13010 | dw2_asm_output_data (1, 0, "set address %s%s" , line_label, |
| 13011 | debug_variable_location_views |
| 13012 | ? ", reset view to 0" : "" ); |
| 13013 | dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL); |
| 13014 | dw2_asm_output_data (1, DW_LNE_set_address, NULL); |
| 13015 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL); |
| 13016 | |
| 13017 | prev_addr = ent; |
| 13018 | break; |
| 13019 | |
| 13020 | case LI_adv_address: |
| 13021 | { |
| 13022 | ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val); |
| 13023 | char prev_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 13024 | ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val); |
| 13025 | |
| 13026 | view++; |
| 13027 | |
| 13028 | if (HAVE_AS_LEB128) |
| 13029 | { |
| 13030 | /* Using DW_LNS_advance_pc with label delta is only valid if |
| 13031 | Minimum Instruction Length in the header is 1, but that is |
| 13032 | what we use on all targets. */ |
| 13033 | dw2_asm_output_data (1, DW_LNS_advance_pc, |
| 13034 | "advance PC, increment view to %i" , view); |
| 13035 | dw2_asm_output_delta_uleb128 (line_label, prev_label, |
| 13036 | "from %s to %s" , prev_label, |
| 13037 | line_label); |
| 13038 | } |
| 13039 | else |
| 13040 | { |
| 13041 | dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, |
| 13042 | "fixed advance PC, increment view to %i" , |
| 13043 | view); |
| 13044 | dw2_asm_output_delta (2, line_label, prev_label, |
| 13045 | "from %s to %s" , prev_label, line_label); |
| 13046 | } |
| 13047 | |
| 13048 | prev_addr = ent; |
| 13049 | break; |
| 13050 | } |
| 13051 | |
| 13052 | case LI_set_line: |
| 13053 | if (ent->val == current_line) |
| 13054 | { |
| 13055 | /* We still need to start a new row, so output a copy insn. */ |
| 13056 | dw2_asm_output_data (1, DW_LNS_copy, |
| 13057 | "copy line %u" , current_line); |
| 13058 | } |
| 13059 | else |
| 13060 | { |
| 13061 | int line_offset = ent->val - current_line; |
| 13062 | int line_delta = line_offset - DWARF_LINE_BASE; |
| 13063 | |
| 13064 | current_line = ent->val; |
| 13065 | if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1)) |
| 13066 | { |
| 13067 | /* This can handle deltas from -10 to 234, using the current |
| 13068 | definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE. |
| 13069 | This takes 1 byte. */ |
| 13070 | dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta, |
| 13071 | "line %u" , current_line); |
| 13072 | } |
| 13073 | else |
| 13074 | { |
| 13075 | /* This can handle any delta. This takes at least 4 bytes, |
| 13076 | depending on the value being encoded. */ |
| 13077 | dw2_asm_output_data (1, DW_LNS_advance_line, |
| 13078 | "advance to line %u" , current_line); |
| 13079 | dw2_asm_output_data_sleb128 (line_offset, NULL); |
| 13080 | dw2_asm_output_data (1, DW_LNS_copy, NULL); |
| 13081 | } |
| 13082 | } |
| 13083 | break; |
| 13084 | |
| 13085 | case LI_set_file: |
| 13086 | dw2_asm_output_data (1, DW_LNS_set_file, "set file %u" , ent->val); |
| 13087 | dw2_asm_output_data_uleb128 (ent->val, "%u" , ent->val); |
| 13088 | break; |
| 13089 | |
| 13090 | case LI_set_column: |
| 13091 | dw2_asm_output_data (1, DW_LNS_set_column, "column %u" , ent->val); |
| 13092 | dw2_asm_output_data_uleb128 (ent->val, "%u" , ent->val); |
| 13093 | break; |
| 13094 | |
| 13095 | case LI_negate_stmt: |
| 13096 | current_is_stmt = !current_is_stmt; |
| 13097 | dw2_asm_output_data (1, DW_LNS_negate_stmt, |
| 13098 | "is_stmt %d" , current_is_stmt); |
| 13099 | break; |
| 13100 | |
| 13101 | case LI_set_prologue_end: |
| 13102 | dw2_asm_output_data (1, DW_LNS_set_prologue_end, |
| 13103 | "set prologue end" ); |
| 13104 | break; |
| 13105 | |
| 13106 | case LI_set_epilogue_begin: |
| 13107 | dw2_asm_output_data (1, DW_LNS_set_epilogue_begin, |
| 13108 | "set epilogue begin" ); |
| 13109 | break; |
| 13110 | |
| 13111 | case LI_set_discriminator: |
| 13112 | dw2_asm_output_data (1, 0, "discriminator %u" , ent->val); |
| 13113 | dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL); |
| 13114 | dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL); |
| 13115 | dw2_asm_output_data_uleb128 (ent->val, NULL); |
| 13116 | break; |
| 13117 | } |
| 13118 | } |
| 13119 | |
| 13120 | /* Emit debug info for the address of the end of the table. */ |
| 13121 | dw2_asm_output_data (1, 0, "set address %s" , table->end_label); |
| 13122 | dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL); |
| 13123 | dw2_asm_output_data (1, DW_LNE_set_address, NULL); |
| 13124 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL); |
| 13125 | |
| 13126 | dw2_asm_output_data (1, 0, "end sequence" ); |
| 13127 | dw2_asm_output_data_uleb128 (1, NULL); |
| 13128 | dw2_asm_output_data (1, DW_LNE_end_sequence, NULL); |
| 13129 | } |
| 13130 | |
| 13131 | static unsigned int output_line_info_generation; |
| 13132 | |
| 13133 | /* Output the source line number correspondence information. This |
| 13134 | information goes into the .debug_line section. */ |
| 13135 | |
| 13136 | static void |
| 13137 | output_line_info (bool prologue_only) |
| 13138 | { |
| 13139 | char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 13140 | char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 13141 | bool saw_one = false; |
| 13142 | int opc; |
| 13143 | |
| 13144 | ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, |
| 13145 | output_line_info_generation); |
| 13146 | ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, |
| 13147 | output_line_info_generation); |
| 13148 | ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, |
| 13149 | output_line_info_generation); |
| 13150 | ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, |
| 13151 | output_line_info_generation++); |
| 13152 | |
| 13153 | if (!XCOFF_DEBUGGING_INFO) |
| 13154 | { |
| 13155 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
| 13156 | dw2_asm_output_data (4, 0xffffffff, |
| 13157 | "Initial length escape value indicating 64-bit DWARF extension" ); |
| 13158 | dw2_asm_output_delta (dwarf_offset_size, l2, l1, |
| 13159 | "Length of Source Line Info" ); |
| 13160 | } |
| 13161 | |
| 13162 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
| 13163 | |
| 13164 | output_dwarf_version (); |
| 13165 | if (dwarf_version >= 5) |
| 13166 | { |
| 13167 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size" ); |
| 13168 | dw2_asm_output_data (1, 0, "Segment Size" ); |
| 13169 | } |
| 13170 | dw2_asm_output_delta (dwarf_offset_size, p2, p1, "Prolog Length" ); |
| 13171 | ASM_OUTPUT_LABEL (asm_out_file, p1); |
| 13172 | |
| 13173 | /* Define the architecture-dependent minimum instruction length (in bytes). |
| 13174 | In this implementation of DWARF, this field is used for information |
| 13175 | purposes only. Since GCC generates assembly language, we have no |
| 13176 | a priori knowledge of how many instruction bytes are generated for each |
| 13177 | source line, and therefore can use only the DW_LNE_set_address and |
| 13178 | DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix |
| 13179 | this as '1', which is "correct enough" for all architectures, |
| 13180 | and don't let the target override. */ |
| 13181 | dw2_asm_output_data (1, 1, "Minimum Instruction Length" ); |
| 13182 | |
| 13183 | if (dwarf_version >= 4) |
| 13184 | dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN, |
| 13185 | "Maximum Operations Per Instruction" ); |
| 13186 | dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START, |
| 13187 | "Default is_stmt_start flag" ); |
| 13188 | dw2_asm_output_data (1, DWARF_LINE_BASE, |
| 13189 | "Line Base Value (Special Opcodes)" ); |
| 13190 | dw2_asm_output_data (1, DWARF_LINE_RANGE, |
| 13191 | "Line Range Value (Special Opcodes)" ); |
| 13192 | dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE, |
| 13193 | "Special Opcode Base" ); |
| 13194 | |
| 13195 | for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++) |
| 13196 | { |
| 13197 | int n_op_args; |
| 13198 | switch (opc) |
| 13199 | { |
| 13200 | case DW_LNS_advance_pc: |
| 13201 | case DW_LNS_advance_line: |
| 13202 | case DW_LNS_set_file: |
| 13203 | case DW_LNS_set_column: |
| 13204 | case DW_LNS_fixed_advance_pc: |
| 13205 | case DW_LNS_set_isa: |
| 13206 | n_op_args = 1; |
| 13207 | break; |
| 13208 | default: |
| 13209 | n_op_args = 0; |
| 13210 | break; |
| 13211 | } |
| 13212 | |
| 13213 | dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args" , |
| 13214 | opc, n_op_args); |
| 13215 | } |
| 13216 | |
| 13217 | /* Write out the information about the files we use. */ |
| 13218 | output_file_names (); |
| 13219 | ASM_OUTPUT_LABEL (asm_out_file, p2); |
| 13220 | if (prologue_only) |
| 13221 | { |
| 13222 | /* Output the marker for the end of the line number info. */ |
| 13223 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
| 13224 | return; |
| 13225 | } |
| 13226 | |
| 13227 | if (separate_line_info) |
| 13228 | { |
| 13229 | dw_line_info_table *table; |
| 13230 | size_t i; |
| 13231 | |
| 13232 | FOR_EACH_VEC_ELT (*separate_line_info, i, table) |
| 13233 | if (table->in_use) |
| 13234 | { |
| 13235 | output_one_line_info_table (table); |
| 13236 | saw_one = true; |
| 13237 | } |
| 13238 | } |
| 13239 | if (cold_text_section_line_info && cold_text_section_line_info->in_use) |
| 13240 | { |
| 13241 | output_one_line_info_table (table: cold_text_section_line_info); |
| 13242 | saw_one = true; |
| 13243 | } |
| 13244 | |
| 13245 | /* ??? Some Darwin linkers crash on a .debug_line section with no |
| 13246 | sequences. Further, merely a DW_LNE_end_sequence entry is not |
| 13247 | sufficient -- the address column must also be initialized. |
| 13248 | Make sure to output at least one set_address/end_sequence pair, |
| 13249 | choosing .text since that section is always present. */ |
| 13250 | if (text_section_line_info->in_use || !saw_one) |
| 13251 | output_one_line_info_table (table: text_section_line_info); |
| 13252 | |
| 13253 | /* Output the marker for the end of the line number info. */ |
| 13254 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
| 13255 | } |
| 13256 | |
| 13257 | /* Return true if DW_AT_endianity should be emitted according to REVERSE. */ |
| 13258 | |
| 13259 | static inline bool |
| 13260 | need_endianity_attribute_p (bool reverse) |
| 13261 | { |
| 13262 | return reverse && (dwarf_version >= 3 || !dwarf_strict); |
| 13263 | } |
| 13264 | |
| 13265 | /* Given a pointer to a tree node for some base type, return a pointer to |
| 13266 | a DIE that describes the given type. REVERSE is true if the type is |
| 13267 | to be interpreted in the reverse storage order wrt the target order. |
| 13268 | |
| 13269 | This routine must only be called for GCC type nodes that correspond to |
| 13270 | Dwarf base (fundamental) types. */ |
| 13271 | |
| 13272 | dw_die_ref |
| 13273 | base_type_die (tree type, bool reverse) |
| 13274 | { |
| 13275 | dw_die_ref base_type_result; |
| 13276 | enum dwarf_type encoding; |
| 13277 | bool fpt_used = false; |
| 13278 | struct fixed_point_type_info fpt_info; |
| 13279 | tree type_bias = NULL_TREE; |
| 13280 | |
| 13281 | /* If this is a subtype that should not be emitted as a subrange type, |
| 13282 | use the base type. See subrange_type_for_debug_p. */ |
| 13283 | if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE) |
| 13284 | type = TREE_TYPE (type); |
| 13285 | |
| 13286 | switch (TREE_CODE (type)) |
| 13287 | { |
| 13288 | case INTEGER_TYPE: |
| 13289 | if ((dwarf_version >= 4 || !dwarf_strict) |
| 13290 | && TYPE_NAME (type) |
| 13291 | && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL |
| 13292 | && DECL_IS_UNDECLARED_BUILTIN (TYPE_NAME (type)) |
| 13293 | && DECL_NAME (TYPE_NAME (type))) |
| 13294 | { |
| 13295 | const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))); |
| 13296 | if (strcmp (s1: name, s2: "char16_t" ) == 0 |
| 13297 | || strcmp (s1: name, s2: "char8_t" ) == 0 |
| 13298 | || strcmp (s1: name, s2: "char32_t" ) == 0) |
| 13299 | { |
| 13300 | encoding = DW_ATE_UTF; |
| 13301 | break; |
| 13302 | } |
| 13303 | } |
| 13304 | if ((dwarf_version >= 3 || !dwarf_strict) |
| 13305 | && lang_hooks.types.get_fixed_point_type_info) |
| 13306 | { |
| 13307 | memset (s: &fpt_info, c: 0, n: sizeof (fpt_info)); |
| 13308 | if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info)) |
| 13309 | { |
| 13310 | fpt_used = true; |
| 13311 | encoding = ((TYPE_UNSIGNED (type)) |
| 13312 | ? DW_ATE_unsigned_fixed |
| 13313 | : DW_ATE_signed_fixed); |
| 13314 | break; |
| 13315 | } |
| 13316 | } |
| 13317 | if (TYPE_STRING_FLAG (type)) |
| 13318 | { |
| 13319 | if ((dwarf_version >= 4 || !dwarf_strict) |
| 13320 | && is_rust () |
| 13321 | && int_size_in_bytes (type) == 4) |
| 13322 | encoding = DW_ATE_UTF; |
| 13323 | else if (TYPE_UNSIGNED (type)) |
| 13324 | encoding = DW_ATE_unsigned_char; |
| 13325 | else |
| 13326 | encoding = DW_ATE_signed_char; |
| 13327 | } |
| 13328 | else if (TYPE_UNSIGNED (type)) |
| 13329 | encoding = DW_ATE_unsigned; |
| 13330 | else |
| 13331 | encoding = DW_ATE_signed; |
| 13332 | |
| 13333 | if (!dwarf_strict |
| 13334 | && lang_hooks.types.get_type_bias) |
| 13335 | type_bias = lang_hooks.types.get_type_bias (type); |
| 13336 | break; |
| 13337 | |
| 13338 | case REAL_TYPE: |
| 13339 | if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))) |
| 13340 | { |
| 13341 | if (dwarf_version >= 3 || !dwarf_strict) |
| 13342 | encoding = DW_ATE_decimal_float; |
| 13343 | else |
| 13344 | encoding = DW_ATE_lo_user; |
| 13345 | } |
| 13346 | else |
| 13347 | encoding = DW_ATE_float; |
| 13348 | break; |
| 13349 | |
| 13350 | case FIXED_POINT_TYPE: |
| 13351 | if (!(dwarf_version >= 3 || !dwarf_strict)) |
| 13352 | encoding = DW_ATE_lo_user; |
| 13353 | else if (TYPE_UNSIGNED (type)) |
| 13354 | encoding = DW_ATE_unsigned_fixed; |
| 13355 | else |
| 13356 | encoding = DW_ATE_signed_fixed; |
| 13357 | break; |
| 13358 | |
| 13359 | /* Dwarf2 doesn't know anything about complex ints, so use |
| 13360 | a user defined type for it. */ |
| 13361 | case COMPLEX_TYPE: |
| 13362 | if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (type))) |
| 13363 | encoding = DW_ATE_complex_float; |
| 13364 | else |
| 13365 | encoding = DW_ATE_lo_user; |
| 13366 | break; |
| 13367 | |
| 13368 | case BOOLEAN_TYPE: |
| 13369 | /* GNU FORTRAN/Ada/C++ BOOLEAN type. */ |
| 13370 | encoding = DW_ATE_boolean; |
| 13371 | break; |
| 13372 | |
| 13373 | case BITINT_TYPE: |
| 13374 | /* C23 _BitInt(N). */ |
| 13375 | if (TYPE_UNSIGNED (type)) |
| 13376 | encoding = DW_ATE_unsigned; |
| 13377 | else |
| 13378 | encoding = DW_ATE_signed; |
| 13379 | break; |
| 13380 | |
| 13381 | default: |
| 13382 | /* No other TREE_CODEs are Dwarf fundamental types. */ |
| 13383 | gcc_unreachable (); |
| 13384 | } |
| 13385 | |
| 13386 | base_type_result = new_die_raw (tag_value: DW_TAG_base_type); |
| 13387 | |
| 13388 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_byte_size, |
| 13389 | unsigned_val: int_size_in_bytes (type)); |
| 13390 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_encoding, unsigned_val: encoding); |
| 13391 | if (TREE_CODE (type) == BITINT_TYPE) |
| 13392 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_bit_size, TYPE_PRECISION (type)); |
| 13393 | |
| 13394 | if (need_endianity_attribute_p (reverse)) |
| 13395 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_endianity, |
| 13396 | BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big); |
| 13397 | |
| 13398 | add_alignment_attribute (base_type_result, type); |
| 13399 | |
| 13400 | if (fpt_used) |
| 13401 | { |
| 13402 | switch (fpt_info.scale_factor_kind) |
| 13403 | { |
| 13404 | case fixed_point_scale_factor_binary: |
| 13405 | add_AT_int (die: base_type_result, attr_kind: DW_AT_binary_scale, |
| 13406 | int_val: fpt_info.scale_factor.binary); |
| 13407 | break; |
| 13408 | |
| 13409 | case fixed_point_scale_factor_decimal: |
| 13410 | add_AT_int (die: base_type_result, attr_kind: DW_AT_decimal_scale, |
| 13411 | int_val: fpt_info.scale_factor.decimal); |
| 13412 | break; |
| 13413 | |
| 13414 | case fixed_point_scale_factor_arbitrary: |
| 13415 | /* Arbitrary scale factors cannot be described in standard DWARF. */ |
| 13416 | if (!dwarf_strict) |
| 13417 | { |
| 13418 | /* Describe the scale factor as a rational constant. */ |
| 13419 | const dw_die_ref scale_factor |
| 13420 | = new_die (tag_value: DW_TAG_constant, parent_die: comp_unit_die (), t: type); |
| 13421 | |
| 13422 | add_scalar_info (scale_factor, DW_AT_GNU_numerator, |
| 13423 | fpt_info.scale_factor.arbitrary.numerator, |
| 13424 | dw_scalar_form_constant, NULL); |
| 13425 | add_scalar_info (scale_factor, DW_AT_GNU_denominator, |
| 13426 | fpt_info.scale_factor.arbitrary.denominator, |
| 13427 | dw_scalar_form_constant, NULL); |
| 13428 | |
| 13429 | add_AT_die_ref (die: base_type_result, attr_kind: DW_AT_small, targ_die: scale_factor); |
| 13430 | } |
| 13431 | break; |
| 13432 | |
| 13433 | default: |
| 13434 | gcc_unreachable (); |
| 13435 | } |
| 13436 | } |
| 13437 | |
| 13438 | if (type_bias) |
| 13439 | add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias, |
| 13440 | dw_scalar_form_constant |
| 13441 | | dw_scalar_form_exprloc |
| 13442 | | dw_scalar_form_reference, |
| 13443 | NULL); |
| 13444 | |
| 13445 | return base_type_result; |
| 13446 | } |
| 13447 | |
| 13448 | /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM |
| 13449 | named 'auto' in its type: return true for it, false otherwise. */ |
| 13450 | |
| 13451 | static inline bool |
| 13452 | is_cxx_auto (tree type) |
| 13453 | { |
| 13454 | if (is_cxx ()) |
| 13455 | { |
| 13456 | tree name = TYPE_IDENTIFIER (type); |
| 13457 | if (name == get_identifier ("auto" ) |
| 13458 | || name == get_identifier ("decltype(auto)" )) |
| 13459 | return true; |
| 13460 | } |
| 13461 | return false; |
| 13462 | } |
| 13463 | |
| 13464 | /* Given a pointer to an arbitrary ..._TYPE tree node, return true if the |
| 13465 | given input type is a Dwarf "fundamental" type. Otherwise return null. */ |
| 13466 | |
| 13467 | static inline bool |
| 13468 | is_base_type (tree type) |
| 13469 | { |
| 13470 | switch (TREE_CODE (type)) |
| 13471 | { |
| 13472 | case INTEGER_TYPE: |
| 13473 | case REAL_TYPE: |
| 13474 | case FIXED_POINT_TYPE: |
| 13475 | case COMPLEX_TYPE: |
| 13476 | case BOOLEAN_TYPE: |
| 13477 | case BITINT_TYPE: |
| 13478 | return true; |
| 13479 | |
| 13480 | case VOID_TYPE: |
| 13481 | case OPAQUE_TYPE: |
| 13482 | case ARRAY_TYPE: |
| 13483 | case RECORD_TYPE: |
| 13484 | case UNION_TYPE: |
| 13485 | case QUAL_UNION_TYPE: |
| 13486 | case ENUMERAL_TYPE: |
| 13487 | case FUNCTION_TYPE: |
| 13488 | case METHOD_TYPE: |
| 13489 | case POINTER_TYPE: |
| 13490 | case REFERENCE_TYPE: |
| 13491 | case NULLPTR_TYPE: |
| 13492 | case OFFSET_TYPE: |
| 13493 | case LANG_TYPE: |
| 13494 | case VECTOR_TYPE: |
| 13495 | return false; |
| 13496 | |
| 13497 | default: |
| 13498 | if (is_cxx () |
| 13499 | && TREE_CODE (type) >= LAST_AND_UNUSED_TREE_CODE |
| 13500 | && TYPE_P (type) |
| 13501 | && TYPE_IDENTIFIER (type)) |
| 13502 | return false; |
| 13503 | gcc_unreachable (); |
| 13504 | } |
| 13505 | } |
| 13506 | |
| 13507 | /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE |
| 13508 | node, return the size in bits for the type if it is a constant, or else |
| 13509 | return the alignment for the type if the type's size is not constant, or |
| 13510 | else return BITS_PER_WORD if the type actually turns out to be an |
| 13511 | ERROR_MARK node. */ |
| 13512 | |
| 13513 | static inline unsigned HOST_WIDE_INT |
| 13514 | simple_type_size_in_bits (const_tree type) |
| 13515 | { |
| 13516 | if (TREE_CODE (type) == ERROR_MARK) |
| 13517 | return BITS_PER_WORD; |
| 13518 | else if (TYPE_SIZE (type) == NULL_TREE) |
| 13519 | return 0; |
| 13520 | else if (tree_fits_uhwi_p (TYPE_SIZE (type))) |
| 13521 | return tree_to_uhwi (TYPE_SIZE (type)); |
| 13522 | else |
| 13523 | return TYPE_ALIGN (type); |
| 13524 | } |
| 13525 | |
| 13526 | /* Similarly, but return an offset_int instead of UHWI. */ |
| 13527 | |
| 13528 | static inline offset_int |
| 13529 | offset_int_type_size_in_bits (const_tree type) |
| 13530 | { |
| 13531 | if (TREE_CODE (type) == ERROR_MARK) |
| 13532 | return BITS_PER_WORD; |
| 13533 | else if (TYPE_SIZE (type) == NULL_TREE) |
| 13534 | return 0; |
| 13535 | else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST) |
| 13536 | return wi::to_offset (TYPE_SIZE (type)); |
| 13537 | else |
| 13538 | return TYPE_ALIGN (type); |
| 13539 | } |
| 13540 | |
| 13541 | /* Given a pointer to a tree node for a subrange type, return a pointer |
| 13542 | to a DIE that describes the given type. */ |
| 13543 | |
| 13544 | static dw_die_ref |
| 13545 | subrange_type_die (tree type, tree low, tree high, tree bias, |
| 13546 | dw_die_ref context_die) |
| 13547 | { |
| 13548 | dw_die_ref subrange_die; |
| 13549 | const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type); |
| 13550 | |
| 13551 | if (context_die == NULL) |
| 13552 | context_die = comp_unit_die (); |
| 13553 | |
| 13554 | subrange_die = new_die (tag_value: DW_TAG_subrange_type, parent_die: context_die, t: type); |
| 13555 | |
| 13556 | if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes) |
| 13557 | { |
| 13558 | /* The size of the subrange type and its base type do not match, |
| 13559 | so we need to generate a size attribute for the subrange type. */ |
| 13560 | add_AT_unsigned (die: subrange_die, attr_kind: DW_AT_byte_size, unsigned_val: size_in_bytes); |
| 13561 | } |
| 13562 | |
| 13563 | add_alignment_attribute (subrange_die, type); |
| 13564 | |
| 13565 | if (low) |
| 13566 | add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL); |
| 13567 | if (high) |
| 13568 | add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL); |
| 13569 | if (bias && !dwarf_strict) |
| 13570 | add_scalar_info (subrange_die, DW_AT_GNU_bias, bias, |
| 13571 | dw_scalar_form_constant |
| 13572 | | dw_scalar_form_exprloc |
| 13573 | | dw_scalar_form_reference, |
| 13574 | NULL); |
| 13575 | |
| 13576 | return subrange_die; |
| 13577 | } |
| 13578 | |
| 13579 | /* Returns the (const and/or volatile) cv_qualifiers associated with |
| 13580 | the decl node. This will normally be augmented with the |
| 13581 | cv_qualifiers of the underlying type in add_type_attribute. */ |
| 13582 | |
| 13583 | static int |
| 13584 | decl_quals (const_tree decl) |
| 13585 | { |
| 13586 | return ((TREE_READONLY (decl) |
| 13587 | /* The C++ front-end correctly marks reference-typed |
| 13588 | variables as readonly, but from a language (and debug |
| 13589 | info) standpoint they are not const-qualified. */ |
| 13590 | && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE |
| 13591 | ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED) |
| 13592 | | (TREE_THIS_VOLATILE (decl) |
| 13593 | ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED)); |
| 13594 | } |
| 13595 | |
| 13596 | /* Determine the TYPE whose qualifiers match the largest strict subset |
| 13597 | of the given TYPE_QUALS, and return its qualifiers. Ignore all |
| 13598 | qualifiers outside QUAL_MASK. */ |
| 13599 | |
| 13600 | static int |
| 13601 | get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask) |
| 13602 | { |
| 13603 | tree t; |
| 13604 | int best_rank = 0, best_qual = 0, max_rank; |
| 13605 | |
| 13606 | type_quals &= qual_mask; |
| 13607 | max_rank = popcount_hwi (x: type_quals) - 1; |
| 13608 | |
| 13609 | for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank; |
| 13610 | t = TYPE_NEXT_VARIANT (t)) |
| 13611 | { |
| 13612 | int q = TYPE_QUALS (t) & qual_mask; |
| 13613 | |
| 13614 | if ((q & type_quals) == q && q != type_quals |
| 13615 | && check_base_type (cand: t, base: type)) |
| 13616 | { |
| 13617 | int rank = popcount_hwi (x: q); |
| 13618 | |
| 13619 | if (rank > best_rank) |
| 13620 | { |
| 13621 | best_rank = rank; |
| 13622 | best_qual = q; |
| 13623 | } |
| 13624 | } |
| 13625 | } |
| 13626 | |
| 13627 | return best_qual; |
| 13628 | } |
| 13629 | |
| 13630 | struct dwarf_qual_info_t { int q; enum dwarf_tag t; }; |
| 13631 | static const dwarf_qual_info_t dwarf_qual_info[] = |
| 13632 | { |
| 13633 | { .q: TYPE_QUAL_CONST, .t: DW_TAG_const_type }, |
| 13634 | { .q: TYPE_QUAL_VOLATILE, .t: DW_TAG_volatile_type }, |
| 13635 | { .q: TYPE_QUAL_RESTRICT, .t: DW_TAG_restrict_type }, |
| 13636 | { .q: TYPE_QUAL_ATOMIC, .t: DW_TAG_atomic_type } |
| 13637 | }; |
| 13638 | static const unsigned int dwarf_qual_info_size = ARRAY_SIZE (dwarf_qual_info); |
| 13639 | |
| 13640 | /* If DIE is a qualified DIE of some base DIE with the same parent, |
| 13641 | return the base DIE, otherwise return NULL. Set MASK to the |
| 13642 | qualifiers added compared to the returned DIE. */ |
| 13643 | |
| 13644 | static dw_die_ref |
| 13645 | qualified_die_p (dw_die_ref die, int *mask, unsigned int depth) |
| 13646 | { |
| 13647 | unsigned int i; |
| 13648 | for (i = 0; i < dwarf_qual_info_size; i++) |
| 13649 | if (die->die_tag == dwarf_qual_info[i].t) |
| 13650 | break; |
| 13651 | if (i == dwarf_qual_info_size) |
| 13652 | return NULL; |
| 13653 | if (vec_safe_length (v: die->die_attr) != 1) |
| 13654 | return NULL; |
| 13655 | dw_die_ref type = get_AT_ref (die, attr_kind: DW_AT_type); |
| 13656 | if (type == NULL || type->die_parent != die->die_parent) |
| 13657 | return NULL; |
| 13658 | *mask |= dwarf_qual_info[i].q; |
| 13659 | if (depth) |
| 13660 | { |
| 13661 | dw_die_ref ret = qualified_die_p (die: type, mask, depth: depth - 1); |
| 13662 | if (ret) |
| 13663 | return ret; |
| 13664 | } |
| 13665 | return type; |
| 13666 | } |
| 13667 | |
| 13668 | /* If TYPE is long double or complex long double that |
| 13669 | should be emitted as artificial typedef to _Float128 or |
| 13670 | complex _Float128, return the type it should be emitted as. |
| 13671 | This is done in case the target already supports 16-byte |
| 13672 | composite floating point type (ibm_extended_format). */ |
| 13673 | |
| 13674 | static tree |
| 13675 | long_double_as_float128 (tree type) |
| 13676 | { |
| 13677 | if (type != long_double_type_node |
| 13678 | && type != complex_long_double_type_node) |
| 13679 | return NULL_TREE; |
| 13680 | |
| 13681 | machine_mode mode, fmode; |
| 13682 | if (TREE_CODE (type) == COMPLEX_TYPE) |
| 13683 | mode = TYPE_MODE (TREE_TYPE (type)); |
| 13684 | else |
| 13685 | mode = TYPE_MODE (type); |
| 13686 | if (known_eq (GET_MODE_SIZE (mode), 16) && !MODE_COMPOSITE_P (mode)) |
| 13687 | FOR_EACH_MODE_IN_CLASS (fmode, MODE_FLOAT) |
| 13688 | if (known_eq (GET_MODE_SIZE (fmode), 16) |
| 13689 | && MODE_COMPOSITE_P (fmode)) |
| 13690 | { |
| 13691 | if (type == long_double_type_node) |
| 13692 | { |
| 13693 | if (float128_type_node |
| 13694 | && (TYPE_MODE (float128_type_node) |
| 13695 | == TYPE_MODE (type))) |
| 13696 | return float128_type_node; |
| 13697 | return NULL_TREE; |
| 13698 | } |
| 13699 | for (int i = 0; i < NUM_FLOATN_NX_TYPES; i++) |
| 13700 | if (COMPLEX_FLOATN_NX_TYPE_NODE (i) != NULL_TREE |
| 13701 | && (TYPE_MODE (COMPLEX_FLOATN_NX_TYPE_NODE (i)) |
| 13702 | == TYPE_MODE (type))) |
| 13703 | return COMPLEX_FLOATN_NX_TYPE_NODE (i); |
| 13704 | } |
| 13705 | |
| 13706 | return NULL_TREE; |
| 13707 | } |
| 13708 | |
| 13709 | /* Hash function for struct annotation_node. The hash value is computed when |
| 13710 | the annotation node is created based on the name, value and chain of any |
| 13711 | further annotations on the same entity. */ |
| 13712 | |
| 13713 | hashval_t |
| 13714 | annotation_node_hasher::hash (struct annotation_node *node) |
| 13715 | { |
| 13716 | return node->hash; |
| 13717 | } |
| 13718 | |
| 13719 | /* Return whether two annotation nodes represent the same annotation and |
| 13720 | can therefore share a DIE. Beware of hash value collisions. */ |
| 13721 | |
| 13722 | bool |
| 13723 | annotation_node_hasher::equal (const struct annotation_node *node1, |
| 13724 | const struct annotation_node *node2) |
| 13725 | { |
| 13726 | return (node1->hash == node2->hash |
| 13727 | && (node1->name == node2->name |
| 13728 | || !strcmp (s1: node1->name, s2: node2->name)) |
| 13729 | && (node1->value == node2->value |
| 13730 | || !strcmp (s1: node1->value, s2: node2->value)) |
| 13731 | && node1->next == node2->next); |
| 13732 | } |
| 13733 | |
| 13734 | /* Return an appropriate entry in the btf tag hash table for a given btf tag. |
| 13735 | If a structurally equivalent tag (one with the same name, value, and |
| 13736 | subsequent chain of further tags) has already been processed, then the |
| 13737 | existing entry for that tag is returned and should be reused. |
| 13738 | Otherwise, a new entry is added to the hash table and returned. */ |
| 13739 | |
| 13740 | static struct annotation_node * |
| 13741 | hash_btf_tag (tree attr) |
| 13742 | { |
| 13743 | if (attr == NULL_TREE || TREE_CODE (attr) != TREE_LIST) |
| 13744 | return NULL; |
| 13745 | |
| 13746 | if (!btf_tag_htab) |
| 13747 | btf_tag_htab = hash_table<annotation_node_hasher>::create_ggc (n: 10); |
| 13748 | |
| 13749 | const char * name = IDENTIFIER_POINTER (get_attribute_name (attr)); |
| 13750 | const char * value = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))); |
| 13751 | tree chain = lookup_attribute (attr_name: name, TREE_CHAIN (attr)); |
| 13752 | |
| 13753 | /* Hash for one tag depends on hash of next tag in the chain, because |
| 13754 | the chain is part of structural equivalence. */ |
| 13755 | struct annotation_node *chain_node = hash_btf_tag (attr: chain); |
| 13756 | gcc_checking_assert (chain == NULL_TREE || chain_node != NULL); |
| 13757 | |
| 13758 | /* Skip any non-btf-tag attributes that might be in the chain. */ |
| 13759 | if (strcmp (s1: name, s2: "btf_type_tag" ) != 0 && strcmp (s1: name, s2: "btf_decl_tag" ) != 0) |
| 13760 | return chain_node; |
| 13761 | |
| 13762 | /* Hash for a given tag is determined by the name, value, and chain of |
| 13763 | further tags. */ |
| 13764 | inchash::hash h; |
| 13765 | h.merge_hash (other: htab_hash_string (name)); |
| 13766 | h.merge_hash (other: htab_hash_string (value)); |
| 13767 | h.merge_hash (other: chain_node ? chain_node->hash : 0); |
| 13768 | |
| 13769 | struct annotation_node node; |
| 13770 | node.name = name; |
| 13771 | node.value = value; |
| 13772 | node.hash = h.end (); |
| 13773 | node.next = chain_node; |
| 13774 | |
| 13775 | struct annotation_node **slot = btf_tag_htab->find_slot (value: &node, insert: INSERT); |
| 13776 | if (*slot == NULL) |
| 13777 | { |
| 13778 | /* Create new htab entry for this annotation. */ |
| 13779 | struct annotation_node *new_slot |
| 13780 | = ggc_cleared_alloc<struct annotation_node> (); |
| 13781 | new_slot->name = name; |
| 13782 | new_slot->value = value; |
| 13783 | new_slot->hash = node.hash; |
| 13784 | new_slot->next = chain_node; |
| 13785 | |
| 13786 | *slot = new_slot; |
| 13787 | return new_slot; |
| 13788 | } |
| 13789 | else |
| 13790 | { |
| 13791 | /* This node is already in the hash table. */ |
| 13792 | return *slot; |
| 13793 | } |
| 13794 | } |
| 13795 | |
| 13796 | /* Generate (or reuse) DW_TAG_GNU_annotation DIEs representing the btf_type_tag |
| 13797 | or btf_decl_tag user annotations in ATTR, and update DIE to refer to them |
| 13798 | via DW_AT_GNU_annotation. If there are multiple type_tag or decl_tag |
| 13799 | annotations in ATTR, they are all processed recursively by this function to |
| 13800 | build a chain of annotation DIEs. |
| 13801 | A single chain of annotation DIEs can be shared among all occurrences of |
| 13802 | equivalent sets of attributes appearing on different types or declarations. |
| 13803 | Return the first annotation DIE in the created (or reused) chain. */ |
| 13804 | |
| 13805 | static dw_die_ref |
| 13806 | gen_btf_tag_dies (tree attr, dw_die_ref die) |
| 13807 | { |
| 13808 | if (attr == NULL_TREE) |
| 13809 | return die; |
| 13810 | |
| 13811 | const char * name = IDENTIFIER_POINTER (get_attribute_name (attr)); |
| 13812 | const char * value = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))); |
| 13813 | |
| 13814 | dw_die_ref tag_die, prev = NULL; |
| 13815 | |
| 13816 | /* Multiple annotations on the same item form a singly-linked list of |
| 13817 | annotation DIEs; generate recursively backward from the end so we can |
| 13818 | chain each created DIE to the next, which has already been created. */ |
| 13819 | tree rest = lookup_attribute (attr_name: name, TREE_CHAIN (attr)); |
| 13820 | if (rest) |
| 13821 | prev = gen_btf_tag_dies (attr: rest, NULL); |
| 13822 | |
| 13823 | /* Calculate a hash value for the tag based on its structure, find the |
| 13824 | existing entry for it (if any) in the hash table, or create a new entry |
| 13825 | which can be reused by structurally-equivalent tags. */ |
| 13826 | struct annotation_node *entry = hash_btf_tag (attr); |
| 13827 | if (!entry) |
| 13828 | return die; |
| 13829 | |
| 13830 | /* If the node already has an associated DIE, reuse it. |
| 13831 | Otherwise, create the new annotation DIE, and associate it with |
| 13832 | the hash table entry for future reuse. Any structurally-equivalent |
| 13833 | tag we process later will find and share the same DIE. */ |
| 13834 | if (entry->die) |
| 13835 | tag_die = entry->die; |
| 13836 | else |
| 13837 | { |
| 13838 | tag_die = new_die (tag_value: DW_TAG_GNU_annotation, parent_die: comp_unit_die (), NULL); |
| 13839 | add_name_attribute (tag_die, name); |
| 13840 | add_AT_string (die: tag_die, attr_kind: DW_AT_const_value, str: value); |
| 13841 | if (prev) |
| 13842 | add_AT_die_ref (die: tag_die, attr_kind: DW_AT_GNU_annotation, targ_die: prev); |
| 13843 | |
| 13844 | entry->die = tag_die; |
| 13845 | } |
| 13846 | |
| 13847 | if (die) |
| 13848 | { |
| 13849 | /* Add (or replace) AT_GNU_annotation referring to the annotation DIE. |
| 13850 | Replacement may happen for example when 'die' is a global variable |
| 13851 | which has been re-declared multiple times. In any case, the set of |
| 13852 | input attributes is the one that ought to be reflected. For global |
| 13853 | variable re-declarations which add additional decl tags, they will |
| 13854 | have been accumulated in the variable's DECL_ATTRIBUTES for us. */ |
| 13855 | remove_AT (die, attr_kind: DW_AT_GNU_annotation); |
| 13856 | add_AT_die_ref (die, attr_kind: DW_AT_GNU_annotation, targ_die: tag_die); |
| 13857 | } |
| 13858 | |
| 13859 | return tag_die; |
| 13860 | } |
| 13861 | |
| 13862 | /* Generate (or reuse) annotation DIEs representing the type_tags on T, if |
| 13863 | any, and update DIE to refer to them as appropriate. */ |
| 13864 | |
| 13865 | static void |
| 13866 | maybe_gen_btf_type_tag_dies (tree t, dw_die_ref target) |
| 13867 | { |
| 13868 | if (t == NULL_TREE || !TYPE_P (t) || !target) |
| 13869 | return; |
| 13870 | |
| 13871 | tree attr = lookup_attribute (attr_name: "btf_type_tag" , TYPE_ATTRIBUTES (t)); |
| 13872 | if (attr == NULL_TREE) |
| 13873 | return; |
| 13874 | |
| 13875 | gen_btf_tag_dies (attr, die: target); |
| 13876 | } |
| 13877 | |
| 13878 | /* Generate (or reuse) annotation DIEs representing any decl_tags in ATTR that |
| 13879 | apply to TARGET. */ |
| 13880 | |
| 13881 | static void |
| 13882 | maybe_gen_btf_decl_tag_dies (tree t, dw_die_ref target) |
| 13883 | { |
| 13884 | if (t == NULL_TREE || !DECL_P (t) || !target) |
| 13885 | return; |
| 13886 | |
| 13887 | tree attr = lookup_attribute (attr_name: "btf_decl_tag" , DECL_ATTRIBUTES (t)); |
| 13888 | if (attr == NULL_TREE) |
| 13889 | return; |
| 13890 | |
| 13891 | gen_btf_tag_dies (attr, die: target); |
| 13892 | } |
| 13893 | |
| 13894 | /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging |
| 13895 | entry that chains the modifiers specified by CV_QUALS in front of the |
| 13896 | given type. Also handle any type attributes in TYPE_ATTRS which have |
| 13897 | a representation in DWARF. REVERSE is true if the type is to be interpreted |
| 13898 | in the reverse storage order wrt the target order. */ |
| 13899 | |
| 13900 | static dw_die_ref |
| 13901 | modified_type_die (tree type, int cv_quals, tree type_attrs, bool reverse, |
| 13902 | dw_die_ref context_die) |
| 13903 | { |
| 13904 | enum tree_code code = TREE_CODE (type); |
| 13905 | dw_die_ref mod_type_die; |
| 13906 | dw_die_ref sub_die = NULL; |
| 13907 | tree item_type = NULL; |
| 13908 | tree qualified_type; |
| 13909 | tree name, low, high; |
| 13910 | tree btf_tags; |
| 13911 | dw_die_ref mod_scope; |
| 13912 | struct array_descr_info info; |
| 13913 | /* Only these cv-qualifiers are currently handled. */ |
| 13914 | const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE |
| 13915 | | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC); |
| 13916 | /* DW_AT_endianity is specified only for base types in the standard. */ |
| 13917 | const bool reverse_type |
| 13918 | = need_endianity_attribute_p (reverse) |
| 13919 | && (is_base_type (type) |
| 13920 | || (TREE_CODE (type) == ENUMERAL_TYPE && !dwarf_strict)); |
| 13921 | |
| 13922 | if (code == ERROR_MARK) |
| 13923 | return NULL; |
| 13924 | |
| 13925 | if (lang_hooks.types.get_debug_type) |
| 13926 | { |
| 13927 | tree debug_type = lang_hooks.types.get_debug_type (type); |
| 13928 | |
| 13929 | if (debug_type != NULL_TREE && debug_type != type) |
| 13930 | return modified_type_die (type: debug_type, cv_quals, type_attrs, reverse, |
| 13931 | context_die); |
| 13932 | } |
| 13933 | |
| 13934 | cv_quals &= cv_qual_mask; |
| 13935 | |
| 13936 | /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type |
| 13937 | tag modifier (and not an attribute) old consumers won't be able |
| 13938 | to handle it. */ |
| 13939 | if (dwarf_version < 3) |
| 13940 | cv_quals &= ~TYPE_QUAL_RESTRICT; |
| 13941 | |
| 13942 | /* Likewise for DW_TAG_atomic_type for DWARFv5. */ |
| 13943 | if (dwarf_version < 5) |
| 13944 | cv_quals &= ~TYPE_QUAL_ATOMIC; |
| 13945 | |
| 13946 | /* See if we already have the appropriately qualified variant of |
| 13947 | this type. */ |
| 13948 | qualified_type = get_qualified_type (type, cv_quals); |
| 13949 | |
| 13950 | if (qualified_type == sizetype) |
| 13951 | { |
| 13952 | /* Try not to expose the internal sizetype type's name. */ |
| 13953 | if (TYPE_NAME (qualified_type) |
| 13954 | && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL) |
| 13955 | { |
| 13956 | tree t = TREE_TYPE (TYPE_NAME (qualified_type)); |
| 13957 | |
| 13958 | gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE |
| 13959 | && (TYPE_PRECISION (t) |
| 13960 | == TYPE_PRECISION (qualified_type)) |
| 13961 | && (TYPE_UNSIGNED (t) |
| 13962 | == TYPE_UNSIGNED (qualified_type))); |
| 13963 | qualified_type = t; |
| 13964 | } |
| 13965 | else if (qualified_type == sizetype |
| 13966 | && TREE_CODE (sizetype) == TREE_CODE (size_type_node) |
| 13967 | && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node) |
| 13968 | && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node)) |
| 13969 | qualified_type = size_type_node; |
| 13970 | if (type == sizetype) |
| 13971 | type = qualified_type; |
| 13972 | } |
| 13973 | |
| 13974 | /* If we do, then we can just use its DIE, if it exists. */ |
| 13975 | if (qualified_type) |
| 13976 | { |
| 13977 | mod_type_die = lookup_type_die (type: qualified_type); |
| 13978 | |
| 13979 | /* DW_AT_endianity doesn't come from a qualifier on the type, so it is |
| 13980 | dealt with specially: the DIE with the attribute, if it exists, is |
| 13981 | placed immediately after the regular DIE for the same type. */ |
| 13982 | if (mod_type_die |
| 13983 | && (!reverse_type |
| 13984 | || ((mod_type_die = mod_type_die->die_sib) != NULL |
| 13985 | && get_AT_unsigned (die: mod_type_die, attr_kind: DW_AT_endianity)))) |
| 13986 | return mod_type_die; |
| 13987 | } |
| 13988 | |
| 13989 | name = qualified_type ? TYPE_NAME (qualified_type) : NULL; |
| 13990 | |
| 13991 | /* Handle C typedef types. */ |
| 13992 | if (name |
| 13993 | && TREE_CODE (name) == TYPE_DECL |
| 13994 | && DECL_ORIGINAL_TYPE (name) |
| 13995 | && !DECL_ARTIFICIAL (name)) |
| 13996 | { |
| 13997 | tree dtype = TREE_TYPE (name); |
| 13998 | |
| 13999 | /* Skip the typedef for base types with DW_AT_endianity, no big deal. */ |
| 14000 | if (qualified_type == dtype && !reverse_type) |
| 14001 | { |
| 14002 | tree origin = decl_ultimate_origin (decl: name); |
| 14003 | |
| 14004 | /* Typedef variants that have an abstract origin don't get their own |
| 14005 | type DIE (see gen_typedef_die), so fall back on the ultimate |
| 14006 | abstract origin instead. */ |
| 14007 | if (origin != NULL && origin != name) |
| 14008 | return modified_type_die (TREE_TYPE (origin), cv_quals, type_attrs, |
| 14009 | reverse, context_die); |
| 14010 | |
| 14011 | /* For a named type, use the typedef. */ |
| 14012 | gen_type_die (qualified_type, context_die); |
| 14013 | return lookup_type_die (type: qualified_type); |
| 14014 | } |
| 14015 | else |
| 14016 | { |
| 14017 | int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype); |
| 14018 | dquals &= cv_qual_mask; |
| 14019 | if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED |
| 14020 | || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type)) |
| 14021 | { |
| 14022 | tree tags = lookup_attribute (attr_name: "btf_type_tag" , list: type_attrs); |
| 14023 | tree dtags = lookup_attribute (attr_name: "btf_type_tag" , |
| 14024 | TYPE_ATTRIBUTES (dtype)); |
| 14025 | if (tags && !attribute_list_equal (tags, dtags)) |
| 14026 | { |
| 14027 | /* Use of a typedef with additional btf_type_tags. |
| 14028 | Create a new typedef DIE to which we can attach the |
| 14029 | additional type_tag DIEs without disturbing other users of |
| 14030 | the underlying typedef. */ |
| 14031 | dw_die_ref mod_die |
| 14032 | = modified_type_die (type: dtype, cv_quals, NULL_TREE, reverse, |
| 14033 | context_die); |
| 14034 | |
| 14035 | mod_die = clone_die (die: mod_die); |
| 14036 | add_child_die (die: comp_unit_die (), child_die: mod_die); |
| 14037 | if (!lookup_type_die (type)) |
| 14038 | equate_type_number_to_die (type, type_die: mod_die); |
| 14039 | |
| 14040 | /* Now generate the type_tag DIEs only for the new |
| 14041 | type_tags appearing in the use of the typedef, and |
| 14042 | attach them to the cloned typedef DIE. */ |
| 14043 | gen_btf_tag_dies (attr: tags, die: mod_die); |
| 14044 | return mod_die; |
| 14045 | } |
| 14046 | /* cv-unqualified version of named type. Just use |
| 14047 | the unnamed type to which it refers. */ |
| 14048 | return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals, |
| 14049 | type_attrs, reverse, context_die); |
| 14050 | } |
| 14051 | /* Else cv-qualified version of named type; fall through. */ |
| 14052 | } |
| 14053 | } |
| 14054 | |
| 14055 | mod_scope = scope_die_for (type, context_die); |
| 14056 | |
| 14057 | if (cv_quals) |
| 14058 | { |
| 14059 | int sub_quals = 0, first_quals = 0; |
| 14060 | unsigned i; |
| 14061 | dw_die_ref first = NULL, last = NULL; |
| 14062 | |
| 14063 | /* Determine a lesser qualified type that most closely matches |
| 14064 | this one. Then generate DW_TAG_* entries for the remaining |
| 14065 | qualifiers. */ |
| 14066 | sub_quals = get_nearest_type_subqualifiers (type, type_quals: cv_quals, |
| 14067 | qual_mask: cv_qual_mask); |
| 14068 | if (sub_quals && use_debug_types) |
| 14069 | { |
| 14070 | bool needed = false; |
| 14071 | /* If emitting type units, make sure the order of qualifiers |
| 14072 | is canonical. Thus, start from unqualified type if |
| 14073 | an earlier qualifier is missing in sub_quals, but some later |
| 14074 | one is present there. */ |
| 14075 | for (i = 0; i < dwarf_qual_info_size; i++) |
| 14076 | if (dwarf_qual_info[i].q & cv_quals & ~sub_quals) |
| 14077 | needed = true; |
| 14078 | else if (needed && (dwarf_qual_info[i].q & cv_quals)) |
| 14079 | { |
| 14080 | sub_quals = 0; |
| 14081 | break; |
| 14082 | } |
| 14083 | } |
| 14084 | mod_type_die = modified_type_die (type, cv_quals: sub_quals, type_attrs, |
| 14085 | reverse, context_die); |
| 14086 | if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope) |
| 14087 | { |
| 14088 | /* As not all intermediate qualified DIEs have corresponding |
| 14089 | tree types, ensure that qualified DIEs in the same scope |
| 14090 | as their DW_AT_type are emitted after their DW_AT_type, |
| 14091 | only with other qualified DIEs for the same type possibly |
| 14092 | in between them. Determine the range of such qualified |
| 14093 | DIEs now (first being the base type, last being corresponding |
| 14094 | last qualified DIE for it). */ |
| 14095 | unsigned int count = 0; |
| 14096 | first = qualified_die_p (die: mod_type_die, mask: &first_quals, |
| 14097 | depth: dwarf_qual_info_size); |
| 14098 | if (first == NULL) |
| 14099 | first = mod_type_die; |
| 14100 | gcc_assert ((first_quals & ~sub_quals) == 0); |
| 14101 | for (count = 0, last = first; |
| 14102 | count < (1U << dwarf_qual_info_size); |
| 14103 | count++, last = last->die_sib) |
| 14104 | { |
| 14105 | int quals = 0; |
| 14106 | if (last == mod_scope->die_child) |
| 14107 | break; |
| 14108 | if (qualified_die_p (die: last->die_sib, mask: &quals, depth: dwarf_qual_info_size) |
| 14109 | != first) |
| 14110 | break; |
| 14111 | } |
| 14112 | } |
| 14113 | |
| 14114 | for (i = 0; i < dwarf_qual_info_size; i++) |
| 14115 | if (dwarf_qual_info[i].q & cv_quals & ~sub_quals) |
| 14116 | { |
| 14117 | dw_die_ref d; |
| 14118 | if (first && first != last) |
| 14119 | { |
| 14120 | for (d = first->die_sib; ; d = d->die_sib) |
| 14121 | { |
| 14122 | int quals = 0; |
| 14123 | qualified_die_p (die: d, mask: &quals, depth: dwarf_qual_info_size); |
| 14124 | if (quals == (first_quals | dwarf_qual_info[i].q)) |
| 14125 | break; |
| 14126 | if (d == last) |
| 14127 | { |
| 14128 | d = NULL; |
| 14129 | break; |
| 14130 | } |
| 14131 | } |
| 14132 | if (d) |
| 14133 | { |
| 14134 | mod_type_die = d; |
| 14135 | continue; |
| 14136 | } |
| 14137 | } |
| 14138 | if (first) |
| 14139 | { |
| 14140 | d = new_die_raw (tag_value: dwarf_qual_info[i].t); |
| 14141 | add_child_die_after (die: mod_scope, child_die: d, after_die: last); |
| 14142 | last = d; |
| 14143 | } |
| 14144 | else |
| 14145 | d = new_die (tag_value: dwarf_qual_info[i].t, parent_die: mod_scope, t: type); |
| 14146 | if (mod_type_die) |
| 14147 | add_AT_die_ref (die: d, attr_kind: DW_AT_type, targ_die: mod_type_die); |
| 14148 | mod_type_die = d; |
| 14149 | first_quals |= dwarf_qual_info[i].q; |
| 14150 | } |
| 14151 | } |
| 14152 | else if (type_attrs |
| 14153 | && (btf_tags = lookup_attribute (attr_name: "btf_type_tag" , list: type_attrs))) |
| 14154 | { |
| 14155 | /* First create a DIE for the type without any type_tag attribute. |
| 14156 | Then generate TAG_GNU_annotation DIEs for the type_tags. */ |
| 14157 | dw_die_ref mod_die = modified_type_die (type, cv_quals, NULL_TREE, |
| 14158 | reverse, context_die); |
| 14159 | gen_btf_tag_dies (attr: btf_tags, die: mod_die); |
| 14160 | return mod_die; |
| 14161 | } |
| 14162 | else if (code == POINTER_TYPE || code == REFERENCE_TYPE) |
| 14163 | { |
| 14164 | dwarf_tag tag = DW_TAG_pointer_type; |
| 14165 | if (code == REFERENCE_TYPE) |
| 14166 | { |
| 14167 | if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4) |
| 14168 | tag = DW_TAG_rvalue_reference_type; |
| 14169 | else |
| 14170 | tag = DW_TAG_reference_type; |
| 14171 | } |
| 14172 | mod_type_die = new_die (tag_value: tag, parent_die: mod_scope, t: type); |
| 14173 | |
| 14174 | add_AT_unsigned (die: mod_type_die, attr_kind: DW_AT_byte_size, |
| 14175 | unsigned_val: simple_type_size_in_bits (type) / BITS_PER_UNIT); |
| 14176 | add_alignment_attribute (mod_type_die, type); |
| 14177 | item_type = TREE_TYPE (type); |
| 14178 | |
| 14179 | addr_space_t as = TYPE_ADDR_SPACE (item_type); |
| 14180 | if (!ADDR_SPACE_GENERIC_P (as)) |
| 14181 | { |
| 14182 | int action = targetm.addr_space.debug (as); |
| 14183 | if (action >= 0) |
| 14184 | { |
| 14185 | /* Positive values indicate an address_class. */ |
| 14186 | add_AT_unsigned (die: mod_type_die, attr_kind: DW_AT_address_class, unsigned_val: action); |
| 14187 | } |
| 14188 | else |
| 14189 | { |
| 14190 | /* Negative values indicate an (inverted) segment base reg. */ |
| 14191 | dw_loc_descr_ref d |
| 14192 | = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED); |
| 14193 | add_AT_loc (die: mod_type_die, attr_kind: DW_AT_segment, loc: d); |
| 14194 | } |
| 14195 | } |
| 14196 | } |
| 14197 | else if (code == ARRAY_TYPE |
| 14198 | || (lang_hooks.types.get_array_descr_info |
| 14199 | && lang_hooks.types.get_array_descr_info (type, &info))) |
| 14200 | { |
| 14201 | gen_type_die (type, mod_scope); |
| 14202 | return lookup_type_die (type); |
| 14203 | } |
| 14204 | else if (code == INTEGER_TYPE |
| 14205 | && TREE_TYPE (type) != NULL_TREE |
| 14206 | && subrange_type_for_debug_p (type, &low, &high)) |
| 14207 | { |
| 14208 | tree bias = NULL_TREE; |
| 14209 | if (lang_hooks.types.get_type_bias) |
| 14210 | bias = lang_hooks.types.get_type_bias (type); |
| 14211 | mod_type_die = subrange_type_die (type, low, high, bias, context_die: mod_scope); |
| 14212 | item_type = TREE_TYPE (type); |
| 14213 | } |
| 14214 | else if (is_base_type (type)) |
| 14215 | { |
| 14216 | /* If a target supports long double as different floating point |
| 14217 | modes with the same 16-byte size, use normal DW_TAG_base_type |
| 14218 | only for the composite (ibm_extended_real_format) type and |
| 14219 | for the other for the time being emit instead a "_Float128" |
| 14220 | or "complex _Float128" DW_TAG_base_type and a "long double" |
| 14221 | or "complex long double" typedef to it. */ |
| 14222 | if (tree other_type = long_double_as_float128 (type)) |
| 14223 | { |
| 14224 | dw_die_ref other_die; |
| 14225 | if (TYPE_NAME (other_type)) |
| 14226 | { |
| 14227 | other_die |
| 14228 | = modified_type_die (type: other_type, cv_quals: TYPE_UNQUALIFIED, |
| 14229 | TYPE_ATTRIBUTES (other_type), |
| 14230 | reverse, context_die); |
| 14231 | } |
| 14232 | else |
| 14233 | { |
| 14234 | other_die = base_type_die (type, reverse); |
| 14235 | add_child_die (die: comp_unit_die (), child_die: other_die); |
| 14236 | add_name_attribute (other_die, |
| 14237 | TREE_CODE (type) == COMPLEX_TYPE |
| 14238 | ? "complex _Float128" : "_Float128" ); |
| 14239 | } |
| 14240 | mod_type_die = new_die_raw (tag_value: DW_TAG_typedef); |
| 14241 | add_AT_die_ref (die: mod_type_die, attr_kind: DW_AT_type, targ_die: other_die); |
| 14242 | } |
| 14243 | else |
| 14244 | mod_type_die = base_type_die (type, reverse); |
| 14245 | |
| 14246 | /* The DIE with DW_AT_endianity is placed right after the naked DIE. */ |
| 14247 | if (reverse_type) |
| 14248 | { |
| 14249 | dw_die_ref after_die = modified_type_die (type, cv_quals, type_attrs, |
| 14250 | reverse: false, context_die); |
| 14251 | add_child_die_after (die: mod_scope, child_die: mod_type_die, after_die); |
| 14252 | } |
| 14253 | else |
| 14254 | add_child_die (die: mod_scope, child_die: mod_type_die); |
| 14255 | |
| 14256 | add_pubtype (decl: type, die: mod_type_die); |
| 14257 | } |
| 14258 | else |
| 14259 | { |
| 14260 | /* The DIE with DW_AT_endianity is placed right after the naked DIE. */ |
| 14261 | if (reverse_type) |
| 14262 | { |
| 14263 | dw_die_ref after_die = modified_type_die (type, cv_quals, type_attrs, |
| 14264 | reverse: false, context_die); |
| 14265 | gen_type_die (type, context_die, true); |
| 14266 | gcc_assert (after_die->die_sib |
| 14267 | && get_AT_unsigned (after_die->die_sib, DW_AT_endianity)); |
| 14268 | return after_die->die_sib; |
| 14269 | } |
| 14270 | |
| 14271 | gen_type_die (type, context_die); |
| 14272 | |
| 14273 | /* We have to get the type_main_variant here (and pass that to the |
| 14274 | `lookup_type_die' routine) because the ..._TYPE node we have |
| 14275 | might simply be a *copy* of some original type node (where the |
| 14276 | copy was created to help us keep track of typedef names) and |
| 14277 | that copy might have a different TYPE_UID from the original |
| 14278 | ..._TYPE node. */ |
| 14279 | if (code == FUNCTION_TYPE || code == METHOD_TYPE) |
| 14280 | { |
| 14281 | /* For function/method types, can't just use type_main_variant here, |
| 14282 | because that can have different ref-qualifiers for C++, |
| 14283 | but try to canonicalize. */ |
| 14284 | tree main = TYPE_MAIN_VARIANT (type); |
| 14285 | for (tree t = main; t; t = TYPE_NEXT_VARIANT (t)) |
| 14286 | if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0 |
| 14287 | && check_base_type (cand: t, base: main) |
| 14288 | && check_lang_type (cand: t, base: type)) |
| 14289 | return lookup_type_die (type: t); |
| 14290 | return lookup_type_die (type); |
| 14291 | } |
| 14292 | /* Vectors have the debugging information in the type, |
| 14293 | not the main variant. */ |
| 14294 | else if (code == VECTOR_TYPE) |
| 14295 | return lookup_type_die (type); |
| 14296 | else |
| 14297 | return lookup_type_die (type: type_main_variant (type)); |
| 14298 | } |
| 14299 | |
| 14300 | /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those, |
| 14301 | don't output a DW_TAG_typedef, since there isn't one in the |
| 14302 | user's program; just attach a DW_AT_name to the type. |
| 14303 | Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type |
| 14304 | if the base type already has the same name. */ |
| 14305 | if (name |
| 14306 | && ((TREE_CODE (name) != TYPE_DECL |
| 14307 | && (qualified_type == TYPE_MAIN_VARIANT (type) |
| 14308 | || (cv_quals == TYPE_UNQUALIFIED))) |
| 14309 | || (TREE_CODE (name) == TYPE_DECL |
| 14310 | && DECL_NAME (name) |
| 14311 | && !DECL_NAMELESS (name) |
| 14312 | && (TREE_TYPE (name) == qualified_type |
| 14313 | || (lang_hooks.types.get_debug_type |
| 14314 | && (lang_hooks.types.get_debug_type (TREE_TYPE (name)) |
| 14315 | == qualified_type)))))) |
| 14316 | { |
| 14317 | if (TREE_CODE (name) == TYPE_DECL) |
| 14318 | /* Could just call add_name_and_src_coords_attributes here, |
| 14319 | but since this is a builtin type it doesn't have any |
| 14320 | useful source coordinates anyway. */ |
| 14321 | name = DECL_NAME (name); |
| 14322 | add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name)); |
| 14323 | } |
| 14324 | else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type) |
| 14325 | { |
| 14326 | if (TREE_CODE (type) == BITINT_TYPE) |
| 14327 | { |
| 14328 | char name_buf[sizeof ("unsigned _BitInt(2147483647)" )]; |
| 14329 | snprintf (s: name_buf, maxlen: sizeof (name_buf), |
| 14330 | format: "%s_BitInt(%d)" , TYPE_UNSIGNED (type) ? "unsigned " : "" , |
| 14331 | TYPE_PRECISION (type)); |
| 14332 | add_name_attribute (mod_type_die, name_buf); |
| 14333 | } |
| 14334 | else |
| 14335 | { |
| 14336 | /* This probably indicates a bug. */ |
| 14337 | name = TYPE_IDENTIFIER (type); |
| 14338 | add_name_attribute (mod_type_die, |
| 14339 | name |
| 14340 | ? IDENTIFIER_POINTER (name) : "__unknown__" ); |
| 14341 | } |
| 14342 | } |
| 14343 | |
| 14344 | if (qualified_type && !reverse_type) |
| 14345 | equate_type_number_to_die (type: qualified_type, type_die: mod_type_die); |
| 14346 | |
| 14347 | if (item_type) |
| 14348 | /* We must do this after the equate_type_number_to_die call, in case |
| 14349 | this is a recursive type. This ensures that the modified_type_die |
| 14350 | recursion will terminate even if the type is recursive. Recursive |
| 14351 | types are possible in Ada. */ |
| 14352 | sub_die = modified_type_die (type: item_type, |
| 14353 | TYPE_QUALS_NO_ADDR_SPACE (item_type), |
| 14354 | TYPE_ATTRIBUTES (item_type), |
| 14355 | reverse, context_die); |
| 14356 | |
| 14357 | if (sub_die != NULL) |
| 14358 | add_AT_die_ref (die: mod_type_die, attr_kind: DW_AT_type, targ_die: sub_die); |
| 14359 | |
| 14360 | add_gnat_descriptive_type_attribute (mod_type_die, type, context_die); |
| 14361 | if (TYPE_ARTIFICIAL (type)) |
| 14362 | add_AT_flag (die: mod_type_die, attr_kind: DW_AT_artificial, flag: 1); |
| 14363 | |
| 14364 | return mod_type_die; |
| 14365 | } |
| 14366 | |
| 14367 | /* Generate DIEs for the generic parameters of T. |
| 14368 | T must be either a generic type or a generic function. |
| 14369 | See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */ |
| 14370 | |
| 14371 | static void |
| 14372 | gen_generic_params_dies (tree t) |
| 14373 | { |
| 14374 | tree parms, args; |
| 14375 | int parms_num, i; |
| 14376 | dw_die_ref die = NULL; |
| 14377 | int non_default; |
| 14378 | |
| 14379 | if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t))) |
| 14380 | return; |
| 14381 | |
| 14382 | if (TYPE_P (t)) |
| 14383 | die = lookup_type_die (type: t); |
| 14384 | else if (DECL_P (t)) |
| 14385 | die = lookup_decl_die (decl: t); |
| 14386 | |
| 14387 | gcc_assert (die); |
| 14388 | |
| 14389 | parms = lang_hooks.get_innermost_generic_parms (t); |
| 14390 | if (!parms) |
| 14391 | /* T has no generic parameter. It means T is neither a generic type |
| 14392 | or function. End of story. */ |
| 14393 | return; |
| 14394 | |
| 14395 | parms_num = TREE_VEC_LENGTH (parms); |
| 14396 | args = lang_hooks.get_innermost_generic_args (t); |
| 14397 | if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST) |
| 14398 | non_default = int_cst_value (TREE_CHAIN (args)); |
| 14399 | else |
| 14400 | non_default = TREE_VEC_LENGTH (args); |
| 14401 | for (i = 0; i < parms_num; i++) |
| 14402 | { |
| 14403 | tree parm, arg, arg_pack_elems; |
| 14404 | dw_die_ref parm_die; |
| 14405 | |
| 14406 | parm = TREE_VEC_ELT (parms, i); |
| 14407 | arg = TREE_VEC_ELT (args, i); |
| 14408 | arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg); |
| 14409 | gcc_assert (parm && TREE_VALUE (parm) && arg); |
| 14410 | |
| 14411 | if (parm && TREE_VALUE (parm) && arg) |
| 14412 | { |
| 14413 | /* If PARM represents a template parameter pack, |
| 14414 | emit a DW_TAG_GNU_template_parameter_pack DIE, followed |
| 14415 | by DW_TAG_template_*_parameter DIEs for the argument |
| 14416 | pack elements of ARG. Note that ARG would then be |
| 14417 | an argument pack. */ |
| 14418 | if (arg_pack_elems) |
| 14419 | parm_die = template_parameter_pack_die (TREE_VALUE (parm), |
| 14420 | arg_pack_elems, |
| 14421 | die); |
| 14422 | else |
| 14423 | parm_die = generic_parameter_die (TREE_VALUE (parm), arg, |
| 14424 | true /* emit name */, die); |
| 14425 | if (i >= non_default) |
| 14426 | add_AT_flag (die: parm_die, attr_kind: DW_AT_default_value, flag: 1); |
| 14427 | } |
| 14428 | } |
| 14429 | } |
| 14430 | |
| 14431 | /* Create and return a DIE for PARM which should be |
| 14432 | the representation of a generic type parameter. |
| 14433 | For instance, in the C++ front end, PARM would be a template parameter. |
| 14434 | ARG is the argument to PARM. |
| 14435 | EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the |
| 14436 | name of the PARM. |
| 14437 | PARENT_DIE is the parent DIE which the new created DIE should be added to, |
| 14438 | as a child node. */ |
| 14439 | |
| 14440 | static dw_die_ref |
| 14441 | generic_parameter_die (tree parm, tree arg, |
| 14442 | bool emit_name_p, |
| 14443 | dw_die_ref parent_die) |
| 14444 | { |
| 14445 | dw_die_ref tmpl_die = NULL; |
| 14446 | const char *name = NULL; |
| 14447 | |
| 14448 | /* C++20 accepts class literals as template parameters, and var |
| 14449 | decls with initializers represent them. The VAR_DECLs would be |
| 14450 | rejected, but we can take the DECL_INITIAL constructor and |
| 14451 | attempt to expand it. */ |
| 14452 | if (arg && VAR_P (arg)) |
| 14453 | arg = DECL_INITIAL (arg); |
| 14454 | |
| 14455 | if (!parm || !DECL_NAME (parm) || !arg) |
| 14456 | return NULL; |
| 14457 | |
| 14458 | /* We support non-type generic parameters and arguments, |
| 14459 | type generic parameters and arguments, as well as |
| 14460 | generic generic parameters (a.k.a. template template parameters in C++) |
| 14461 | and arguments. */ |
| 14462 | if (TREE_CODE (parm) == PARM_DECL) |
| 14463 | /* PARM is a nontype generic parameter */ |
| 14464 | tmpl_die = new_die (tag_value: DW_TAG_template_value_param, parent_die, t: parm); |
| 14465 | else if (TREE_CODE (parm) == TYPE_DECL) |
| 14466 | /* PARM is a type generic parameter. */ |
| 14467 | tmpl_die = new_die (tag_value: DW_TAG_template_type_param, parent_die, t: parm); |
| 14468 | else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm)) |
| 14469 | /* PARM is a generic generic parameter. |
| 14470 | Its DIE is a GNU extension. It shall have a |
| 14471 | DW_AT_name attribute to represent the name of the template template |
| 14472 | parameter, and a DW_AT_GNU_template_name attribute to represent the |
| 14473 | name of the template template argument. */ |
| 14474 | tmpl_die = new_die (tag_value: DW_TAG_GNU_template_template_param, |
| 14475 | parent_die, t: parm); |
| 14476 | else |
| 14477 | gcc_unreachable (); |
| 14478 | |
| 14479 | if (tmpl_die) |
| 14480 | { |
| 14481 | tree tmpl_type; |
| 14482 | |
| 14483 | /* If PARM is a generic parameter pack, it means we are |
| 14484 | emitting debug info for a template argument pack element. |
| 14485 | In other terms, ARG is a template argument pack element. |
| 14486 | In that case, we don't emit any DW_AT_name attribute for |
| 14487 | the die. */ |
| 14488 | if (emit_name_p) |
| 14489 | { |
| 14490 | name = IDENTIFIER_POINTER (DECL_NAME (parm)); |
| 14491 | gcc_assert (name); |
| 14492 | add_AT_string (die: tmpl_die, attr_kind: DW_AT_name, str: name); |
| 14493 | } |
| 14494 | |
| 14495 | if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm)) |
| 14496 | { |
| 14497 | /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter |
| 14498 | TMPL_DIE should have a child DW_AT_type attribute that is set |
| 14499 | to the type of the argument to PARM, which is ARG. |
| 14500 | If PARM is a type generic parameter, TMPL_DIE should have a |
| 14501 | child DW_AT_type that is set to ARG. */ |
| 14502 | tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg); |
| 14503 | add_type_attribute (tmpl_die, tmpl_type, |
| 14504 | (TREE_THIS_VOLATILE (tmpl_type) |
| 14505 | ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED), |
| 14506 | false, parent_die); |
| 14507 | } |
| 14508 | else |
| 14509 | { |
| 14510 | /* So TMPL_DIE is a DIE representing a |
| 14511 | a generic generic template parameter, a.k.a template template |
| 14512 | parameter in C++ and arg is a template. */ |
| 14513 | |
| 14514 | /* The DW_AT_GNU_template_name attribute of the DIE must be set |
| 14515 | to the name of the argument. */ |
| 14516 | name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, scope: 1); |
| 14517 | if (name) |
| 14518 | add_AT_string (die: tmpl_die, attr_kind: DW_AT_GNU_template_name, str: name); |
| 14519 | } |
| 14520 | |
| 14521 | if (TREE_CODE (parm) == PARM_DECL) |
| 14522 | /* So PARM is a non-type generic parameter. |
| 14523 | DWARF3 5.6.8 says we must set a DW_AT_const_value child |
| 14524 | attribute of TMPL_DIE which value represents the value |
| 14525 | of ARG. |
| 14526 | We must be careful here: |
| 14527 | The value of ARG might reference some function decls. |
| 14528 | We might currently be emitting debug info for a generic |
| 14529 | type and types are emitted before function decls, we don't |
| 14530 | know if the function decls referenced by ARG will actually be |
| 14531 | emitted after cgraph computations. |
| 14532 | So must defer the generation of the DW_AT_const_value to |
| 14533 | after cgraph is ready. */ |
| 14534 | append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg); |
| 14535 | } |
| 14536 | |
| 14537 | return tmpl_die; |
| 14538 | } |
| 14539 | |
| 14540 | /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing. |
| 14541 | PARM_PACK must be a template parameter pack. The returned DIE |
| 14542 | will be child DIE of PARENT_DIE. */ |
| 14543 | |
| 14544 | static dw_die_ref |
| 14545 | template_parameter_pack_die (tree parm_pack, |
| 14546 | tree parm_pack_args, |
| 14547 | dw_die_ref parent_die) |
| 14548 | { |
| 14549 | dw_die_ref die; |
| 14550 | int j; |
| 14551 | |
| 14552 | gcc_assert (parent_die && parm_pack); |
| 14553 | |
| 14554 | die = new_die (tag_value: DW_TAG_GNU_template_parameter_pack, parent_die, t: parm_pack); |
| 14555 | add_name_and_src_coords_attributes (die, parm_pack); |
| 14556 | for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++) |
| 14557 | generic_parameter_die (parm: parm_pack, |
| 14558 | TREE_VEC_ELT (parm_pack_args, j), |
| 14559 | emit_name_p: false /* Don't emit DW_AT_name */, |
| 14560 | parent_die: die); |
| 14561 | return die; |
| 14562 | } |
| 14563 | |
| 14564 | /* Return the debugger register number described by a given RTL node. */ |
| 14565 | |
| 14566 | static unsigned int |
| 14567 | debugger_reg_number (const_rtx rtl) |
| 14568 | { |
| 14569 | unsigned regno = REGNO (rtl); |
| 14570 | |
| 14571 | gcc_assert (regno < FIRST_PSEUDO_REGISTER); |
| 14572 | |
| 14573 | #ifdef LEAF_REG_REMAP |
| 14574 | if (crtl->uses_only_leaf_regs) |
| 14575 | { |
| 14576 | int leaf_reg = LEAF_REG_REMAP (regno); |
| 14577 | if (leaf_reg != -1) |
| 14578 | regno = (unsigned) leaf_reg; |
| 14579 | } |
| 14580 | #endif |
| 14581 | |
| 14582 | regno = DEBUGGER_REGNO (regno); |
| 14583 | gcc_assert (regno != INVALID_REGNUM); |
| 14584 | return regno; |
| 14585 | } |
| 14586 | |
| 14587 | /* Optionally add a DW_OP_piece term to a location description expression. |
| 14588 | DW_OP_piece is only added if the location description expression already |
| 14589 | doesn't end with DW_OP_piece. */ |
| 14590 | |
| 14591 | static void |
| 14592 | add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size) |
| 14593 | { |
| 14594 | dw_loc_descr_ref loc; |
| 14595 | |
| 14596 | if (*list_head != NULL) |
| 14597 | { |
| 14598 | /* Find the end of the chain. */ |
| 14599 | for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next) |
| 14600 | ; |
| 14601 | |
| 14602 | if (loc->dw_loc_opc != DW_OP_piece) |
| 14603 | loc->dw_loc_next = new_loc_descr (op: DW_OP_piece, oprnd1: size, oprnd2: 0); |
| 14604 | } |
| 14605 | } |
| 14606 | |
| 14607 | /* Return a location descriptor that designates a machine register or |
| 14608 | zero if there is none. */ |
| 14609 | |
| 14610 | static dw_loc_descr_ref |
| 14611 | reg_loc_descriptor (rtx rtl, enum var_init_status initialized) |
| 14612 | { |
| 14613 | rtx regs; |
| 14614 | |
| 14615 | if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER) |
| 14616 | return 0; |
| 14617 | |
| 14618 | /* We only use "frame base" when we're sure we're talking about the |
| 14619 | post-prologue local stack frame. We do this by *not* running |
| 14620 | register elimination until this point, and recognizing the special |
| 14621 | argument pointer and soft frame pointer rtx's. |
| 14622 | Use DW_OP_fbreg offset DW_OP_stack_value in this case. */ |
| 14623 | if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx) |
| 14624 | && (ira_use_lra_p |
| 14625 | ? lra_eliminate_regs (rtl, VOIDmode, NULL_RTX) |
| 14626 | : eliminate_regs (rtl, VOIDmode, NULL_RTX)) != rtl) |
| 14627 | { |
| 14628 | dw_loc_descr_ref result = NULL; |
| 14629 | |
| 14630 | if (dwarf_version >= 4 || !dwarf_strict) |
| 14631 | { |
| 14632 | result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode, |
| 14633 | initialized); |
| 14634 | if (result) |
| 14635 | add_loc_descr (list_head: &result, |
| 14636 | descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
| 14637 | } |
| 14638 | return result; |
| 14639 | } |
| 14640 | |
| 14641 | regs = targetm.dwarf_register_span (rtl); |
| 14642 | |
| 14643 | if (REG_NREGS (rtl) > 1 || regs) |
| 14644 | return multiple_reg_loc_descriptor (rtl, regs, initialized); |
| 14645 | else |
| 14646 | { |
| 14647 | unsigned int debugger_regnum = debugger_reg_number (rtl); |
| 14648 | if (debugger_regnum == IGNORED_DWARF_REGNUM) |
| 14649 | return 0; |
| 14650 | return one_reg_loc_descriptor (debugger_regnum, initialized); |
| 14651 | } |
| 14652 | } |
| 14653 | |
| 14654 | /* Return a location descriptor that designates a machine register for |
| 14655 | a given hard register number. */ |
| 14656 | |
| 14657 | static dw_loc_descr_ref |
| 14658 | one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized) |
| 14659 | { |
| 14660 | dw_loc_descr_ref reg_loc_descr; |
| 14661 | |
| 14662 | if (regno <= 31) |
| 14663 | reg_loc_descr |
| 14664 | = new_loc_descr (op: (enum dwarf_location_atom) (DW_OP_reg0 + regno), oprnd1: 0, oprnd2: 0); |
| 14665 | else |
| 14666 | reg_loc_descr = new_loc_descr (op: DW_OP_regx, oprnd1: regno, oprnd2: 0); |
| 14667 | |
| 14668 | if (initialized == VAR_INIT_STATUS_UNINITIALIZED) |
| 14669 | add_loc_descr (list_head: ®_loc_descr, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
| 14670 | |
| 14671 | return reg_loc_descr; |
| 14672 | } |
| 14673 | |
| 14674 | /* Given an RTL of a register, return a location descriptor that |
| 14675 | designates a value that spans more than one register. */ |
| 14676 | |
| 14677 | static dw_loc_descr_ref |
| 14678 | multiple_reg_loc_descriptor (rtx rtl, rtx regs, |
| 14679 | enum var_init_status initialized) |
| 14680 | { |
| 14681 | int size, i; |
| 14682 | dw_loc_descr_ref loc_result = NULL; |
| 14683 | |
| 14684 | /* Simple, contiguous registers. */ |
| 14685 | if (regs == NULL_RTX) |
| 14686 | { |
| 14687 | unsigned reg = REGNO (rtl); |
| 14688 | int nregs; |
| 14689 | |
| 14690 | #ifdef LEAF_REG_REMAP |
| 14691 | if (crtl->uses_only_leaf_regs) |
| 14692 | { |
| 14693 | int leaf_reg = LEAF_REG_REMAP (reg); |
| 14694 | if (leaf_reg != -1) |
| 14695 | reg = (unsigned) leaf_reg; |
| 14696 | } |
| 14697 | #endif |
| 14698 | |
| 14699 | gcc_assert ((unsigned) DEBUGGER_REGNO (reg) == debugger_reg_number (rtl)); |
| 14700 | nregs = REG_NREGS (rtl); |
| 14701 | |
| 14702 | /* At present we only track constant-sized pieces. */ |
| 14703 | if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (const_value: &size)) |
| 14704 | return NULL; |
| 14705 | size /= nregs; |
| 14706 | |
| 14707 | loc_result = NULL; |
| 14708 | while (nregs--) |
| 14709 | { |
| 14710 | dw_loc_descr_ref t; |
| 14711 | |
| 14712 | t = one_reg_loc_descriptor (DEBUGGER_REGNO (reg), |
| 14713 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 14714 | add_loc_descr (list_head: &loc_result, descr: t); |
| 14715 | add_loc_descr_op_piece (list_head: &loc_result, size); |
| 14716 | ++reg; |
| 14717 | } |
| 14718 | return loc_result; |
| 14719 | } |
| 14720 | |
| 14721 | /* Now onto stupid register sets in non contiguous locations. */ |
| 14722 | |
| 14723 | gcc_assert (GET_CODE (regs) == PARALLEL); |
| 14724 | |
| 14725 | /* At present we only track constant-sized pieces. */ |
| 14726 | if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (const_value: &size)) |
| 14727 | return NULL; |
| 14728 | loc_result = NULL; |
| 14729 | |
| 14730 | for (i = 0; i < XVECLEN (regs, 0); ++i) |
| 14731 | { |
| 14732 | dw_loc_descr_ref t; |
| 14733 | |
| 14734 | t = one_reg_loc_descriptor (regno: debugger_reg_number (XVECEXP (regs, 0, i)), |
| 14735 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 14736 | add_loc_descr (list_head: &loc_result, descr: t); |
| 14737 | add_loc_descr_op_piece (list_head: &loc_result, size); |
| 14738 | } |
| 14739 | |
| 14740 | if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED) |
| 14741 | add_loc_descr (list_head: &loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
| 14742 | return loc_result; |
| 14743 | } |
| 14744 | |
| 14745 | static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT); |
| 14746 | |
| 14747 | /* Return a location descriptor that designates a constant i, |
| 14748 | as a compound operation from constant (i >> shift), constant shift |
| 14749 | and DW_OP_shl. */ |
| 14750 | |
| 14751 | static dw_loc_descr_ref |
| 14752 | int_shift_loc_descriptor (HOST_WIDE_INT i, int shift) |
| 14753 | { |
| 14754 | dw_loc_descr_ref ret = int_loc_descriptor (i >> shift); |
| 14755 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (shift)); |
| 14756 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 14757 | return ret; |
| 14758 | } |
| 14759 | |
| 14760 | /* Return a location descriptor that designates constant POLY_I. */ |
| 14761 | |
| 14762 | static dw_loc_descr_ref |
| 14763 | int_loc_descriptor (poly_int64 poly_i) |
| 14764 | { |
| 14765 | enum dwarf_location_atom op; |
| 14766 | |
| 14767 | HOST_WIDE_INT i; |
| 14768 | if (!poly_i.is_constant (const_value: &i)) |
| 14769 | { |
| 14770 | /* Create location descriptions for the non-constant part and |
| 14771 | add any constant offset at the end. */ |
| 14772 | dw_loc_descr_ref ret = NULL; |
| 14773 | HOST_WIDE_INT constant = poly_i.coeffs[0]; |
| 14774 | for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j) |
| 14775 | { |
| 14776 | HOST_WIDE_INT coeff = poly_i.coeffs[j]; |
| 14777 | if (coeff != 0) |
| 14778 | { |
| 14779 | dw_loc_descr_ref start = ret; |
| 14780 | unsigned int factor; |
| 14781 | int bias; |
| 14782 | unsigned int regno = targetm.dwarf_poly_indeterminate_value |
| 14783 | (j, &factor, &bias); |
| 14784 | |
| 14785 | /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value: |
| 14786 | add COEFF * (REGNO / FACTOR) now and subtract |
| 14787 | COEFF * BIAS from the final constant part. */ |
| 14788 | constant -= coeff * bias; |
| 14789 | add_loc_descr (list_head: &ret, descr: new_reg_loc_descr (reg: regno, offset: 0)); |
| 14790 | if (coeff % factor == 0) |
| 14791 | coeff /= factor; |
| 14792 | else |
| 14793 | { |
| 14794 | int amount = exact_log2 (x: factor); |
| 14795 | gcc_assert (amount >= 0); |
| 14796 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (poly_i: amount)); |
| 14797 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
| 14798 | } |
| 14799 | if (coeff != 1) |
| 14800 | { |
| 14801 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (poly_i: coeff)); |
| 14802 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
| 14803 | } |
| 14804 | if (start) |
| 14805 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
| 14806 | } |
| 14807 | } |
| 14808 | loc_descr_plus_const (list_head: &ret, poly_offset: constant); |
| 14809 | return ret; |
| 14810 | } |
| 14811 | |
| 14812 | /* Pick the smallest representation of a constant, rather than just |
| 14813 | defaulting to the LEB encoding. */ |
| 14814 | if (i >= 0) |
| 14815 | { |
| 14816 | int clz = clz_hwi (x: i); |
| 14817 | int ctz = ctz_hwi (x: i); |
| 14818 | if (i <= 31) |
| 14819 | op = (enum dwarf_location_atom) (DW_OP_lit0 + i); |
| 14820 | else if (i <= 0xff) |
| 14821 | op = DW_OP_const1u; |
| 14822 | else if (i <= 0xffff) |
| 14823 | op = DW_OP_const2u; |
| 14824 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5 |
| 14825 | && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT) |
| 14826 | /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and |
| 14827 | DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes, |
| 14828 | while DW_OP_const4u is 5 bytes. */ |
| 14829 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5); |
| 14830 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
| 14831 | && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT) |
| 14832 | /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes, |
| 14833 | while DW_OP_const4u is 5 bytes. */ |
| 14834 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8); |
| 14835 | |
| 14836 | else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff |
| 14837 | && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i) |
| 14838 | <= 4) |
| 14839 | { |
| 14840 | /* As i >= 2**31, the double cast above will yield a negative number. |
| 14841 | Since wrapping is defined in DWARF expressions we can output big |
| 14842 | positive integers as small negative ones, regardless of the size |
| 14843 | of host wide ints. |
| 14844 | |
| 14845 | Here, since the evaluator will handle 32-bit values and since i >= |
| 14846 | 2**31, we know it's going to be interpreted as a negative literal: |
| 14847 | store it this way if we can do better than 5 bytes this way. */ |
| 14848 | return int_loc_descriptor (poly_i: (HOST_WIDE_INT) (int32_t) i); |
| 14849 | } |
| 14850 | else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff) |
| 14851 | op = DW_OP_const4u; |
| 14852 | |
| 14853 | /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at |
| 14854 | least 6 bytes: see if we can do better before falling back to it. */ |
| 14855 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
| 14856 | && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT) |
| 14857 | /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */ |
| 14858 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8); |
| 14859 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16 |
| 14860 | && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31) |
| 14861 | >= HOST_BITS_PER_WIDE_INT) |
| 14862 | /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes, |
| 14863 | DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */ |
| 14864 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16); |
| 14865 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32 |
| 14866 | && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT |
| 14867 | && size_of_uleb128 (i) > 6) |
| 14868 | /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */ |
| 14869 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32); |
| 14870 | else |
| 14871 | op = DW_OP_constu; |
| 14872 | } |
| 14873 | else |
| 14874 | { |
| 14875 | if (i >= -0x80) |
| 14876 | op = DW_OP_const1s; |
| 14877 | else if (i >= -0x8000) |
| 14878 | op = DW_OP_const2s; |
| 14879 | else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000) |
| 14880 | { |
| 14881 | if (size_of_int_loc_descriptor (i) < 5) |
| 14882 | { |
| 14883 | dw_loc_descr_ref ret = int_loc_descriptor (poly_i: -i); |
| 14884 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
| 14885 | return ret; |
| 14886 | } |
| 14887 | op = DW_OP_const4s; |
| 14888 | } |
| 14889 | else |
| 14890 | { |
| 14891 | if (size_of_int_loc_descriptor (i) |
| 14892 | < (unsigned long) 1 + size_of_sleb128 (i)) |
| 14893 | { |
| 14894 | dw_loc_descr_ref ret = int_loc_descriptor (poly_i: -i); |
| 14895 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
| 14896 | return ret; |
| 14897 | } |
| 14898 | op = DW_OP_consts; |
| 14899 | } |
| 14900 | } |
| 14901 | |
| 14902 | return new_loc_descr (op, oprnd1: i, oprnd2: 0); |
| 14903 | } |
| 14904 | |
| 14905 | /* Likewise, for unsigned constants. */ |
| 14906 | |
| 14907 | static dw_loc_descr_ref |
| 14908 | uint_loc_descriptor (unsigned HOST_WIDE_INT i) |
| 14909 | { |
| 14910 | const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT); |
| 14911 | const unsigned HOST_WIDE_INT max_uint |
| 14912 | = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT); |
| 14913 | |
| 14914 | /* If possible, use the clever signed constants handling. */ |
| 14915 | if (i <= max_int) |
| 14916 | return int_loc_descriptor (poly_i: (HOST_WIDE_INT) i); |
| 14917 | |
| 14918 | /* Here, we are left with positive numbers that cannot be represented as |
| 14919 | HOST_WIDE_INT, i.e.: |
| 14920 | max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT) |
| 14921 | |
| 14922 | Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes |
| 14923 | whereas may be better to output a negative integer: thanks to integer |
| 14924 | wrapping, we know that: |
| 14925 | x = x - 2 ** DWARF2_ADDR_SIZE |
| 14926 | = x - 2 * (max (HOST_WIDE_INT) + 1) |
| 14927 | So numbers close to max (unsigned HOST_WIDE_INT) could be represented as |
| 14928 | small negative integers. Let's try that in cases it will clearly improve |
| 14929 | the encoding: there is no gain turning DW_OP_const4u into |
| 14930 | DW_OP_const4s. */ |
| 14931 | if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT |
| 14932 | && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000) |
| 14933 | || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000))) |
| 14934 | { |
| 14935 | const unsigned HOST_WIDE_INT first_shift = i - max_int - 1; |
| 14936 | |
| 14937 | /* Now, -1 < first_shift <= max (HOST_WIDE_INT) |
| 14938 | i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */ |
| 14939 | const HOST_WIDE_INT second_shift |
| 14940 | = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1; |
| 14941 | |
| 14942 | /* So we finally have: |
| 14943 | -max (HOST_WIDE_INT) - 1 <= second_shift <= -1. |
| 14944 | i.e. min (HOST_WIDE_INT) <= second_shift < 0. */ |
| 14945 | return int_loc_descriptor (poly_i: second_shift); |
| 14946 | } |
| 14947 | |
| 14948 | /* Last chance: fallback to a simple constant operation. */ |
| 14949 | return new_loc_descr |
| 14950 | (op: (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff) |
| 14951 | ? DW_OP_const4u |
| 14952 | : DW_OP_const8u, |
| 14953 | oprnd1: i, oprnd2: 0); |
| 14954 | } |
| 14955 | |
| 14956 | /* Generate and return a location description that computes the unsigned |
| 14957 | comparison of the two stack top entries (a OP b where b is the top-most |
| 14958 | entry and a is the second one). The KIND of comparison can be LT_EXPR, |
| 14959 | LE_EXPR, GT_EXPR or GE_EXPR. */ |
| 14960 | |
| 14961 | static dw_loc_descr_ref |
| 14962 | uint_comparison_loc_list (enum tree_code kind) |
| 14963 | { |
| 14964 | enum dwarf_location_atom op, flip_op; |
| 14965 | dw_loc_descr_ref ret, bra_node, jmp_node, tmp; |
| 14966 | |
| 14967 | switch (kind) |
| 14968 | { |
| 14969 | case LT_EXPR: |
| 14970 | op = DW_OP_lt; |
| 14971 | break; |
| 14972 | case LE_EXPR: |
| 14973 | op = DW_OP_le; |
| 14974 | break; |
| 14975 | case GT_EXPR: |
| 14976 | op = DW_OP_gt; |
| 14977 | break; |
| 14978 | case GE_EXPR: |
| 14979 | op = DW_OP_ge; |
| 14980 | break; |
| 14981 | default: |
| 14982 | gcc_unreachable (); |
| 14983 | } |
| 14984 | |
| 14985 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
| 14986 | jmp_node = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
| 14987 | |
| 14988 | /* Until DWARFv4, operations all work on signed integers. It is nevertheless |
| 14989 | possible to perform unsigned comparisons: we just have to distinguish |
| 14990 | three cases: |
| 14991 | |
| 14992 | 1. when a and b have the same sign (as signed integers); then we should |
| 14993 | return: a OP(signed) b; |
| 14994 | |
| 14995 | 2. when a is a negative signed integer while b is a positive one, then a |
| 14996 | is a greater unsigned integer than b; likewise when a and b's roles |
| 14997 | are flipped. |
| 14998 | |
| 14999 | So first, compare the sign of the two operands. */ |
| 15000 | ret = new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0); |
| 15001 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
| 15002 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_xor, oprnd1: 0, oprnd2: 0)); |
| 15003 | /* If they have different signs (i.e. they have different sign bits), then |
| 15004 | the stack top value has now the sign bit set and thus it's smaller than |
| 15005 | zero. */ |
| 15006 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_lit0, oprnd1: 0, oprnd2: 0)); |
| 15007 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_lt, oprnd1: 0, oprnd2: 0)); |
| 15008 | add_loc_descr (list_head: &ret, descr: bra_node); |
| 15009 | |
| 15010 | /* We are in case 1. At this point, we know both operands have the same |
| 15011 | sign, to it's safe to use the built-in signed comparison. */ |
| 15012 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 15013 | add_loc_descr (list_head: &ret, descr: jmp_node); |
| 15014 | |
| 15015 | /* We are in case 2. Here, we know both operands do not have the same sign, |
| 15016 | so we have to flip the signed comparison. */ |
| 15017 | flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt; |
| 15018 | tmp = new_loc_descr (op: flip_op, oprnd1: 0, oprnd2: 0); |
| 15019 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 15020 | bra_node->dw_loc_oprnd1.v.val_loc = tmp; |
| 15021 | add_loc_descr (list_head: &ret, descr: tmp); |
| 15022 | |
| 15023 | /* This dummy operation is necessary to make the two branches join. */ |
| 15024 | tmp = new_loc_descr (op: DW_OP_nop, oprnd1: 0, oprnd2: 0); |
| 15025 | jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 15026 | jmp_node->dw_loc_oprnd1.v.val_loc = tmp; |
| 15027 | add_loc_descr (list_head: &ret, descr: tmp); |
| 15028 | |
| 15029 | return ret; |
| 15030 | } |
| 15031 | |
| 15032 | /* Likewise, but takes the location description lists (might be destructive on |
| 15033 | them). Return NULL if either is NULL or if concatenation fails. */ |
| 15034 | |
| 15035 | static dw_loc_list_ref |
| 15036 | loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right, |
| 15037 | enum tree_code kind) |
| 15038 | { |
| 15039 | if (left == NULL || right == NULL) |
| 15040 | return NULL; |
| 15041 | |
| 15042 | add_loc_list (ret: &left, list: right); |
| 15043 | if (left == NULL) |
| 15044 | return NULL; |
| 15045 | |
| 15046 | add_loc_descr_to_each (list: left, ref: uint_comparison_loc_list (kind)); |
| 15047 | return left; |
| 15048 | } |
| 15049 | |
| 15050 | /* Return size_of_locs (int_shift_loc_descriptor (i, shift)) |
| 15051 | without actually allocating it. */ |
| 15052 | |
| 15053 | static unsigned long |
| 15054 | size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift) |
| 15055 | { |
| 15056 | return size_of_int_loc_descriptor (i >> shift) |
| 15057 | + size_of_int_loc_descriptor (shift) |
| 15058 | + 1; |
| 15059 | } |
| 15060 | |
| 15061 | /* Return size_of_locs (int_loc_descriptor (i)) without |
| 15062 | actually allocating it. */ |
| 15063 | |
| 15064 | static unsigned long |
| 15065 | size_of_int_loc_descriptor (HOST_WIDE_INT i) |
| 15066 | { |
| 15067 | unsigned long s; |
| 15068 | |
| 15069 | if (i >= 0) |
| 15070 | { |
| 15071 | int clz, ctz; |
| 15072 | if (i <= 31) |
| 15073 | return 1; |
| 15074 | else if (i <= 0xff) |
| 15075 | return 2; |
| 15076 | else if (i <= 0xffff) |
| 15077 | return 3; |
| 15078 | clz = clz_hwi (x: i); |
| 15079 | ctz = ctz_hwi (x: i); |
| 15080 | if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5 |
| 15081 | && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT) |
| 15082 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
| 15083 | - clz - 5); |
| 15084 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
| 15085 | && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT) |
| 15086 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
| 15087 | - clz - 8); |
| 15088 | else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff |
| 15089 | && size_of_int_loc_descriptor (i: (HOST_WIDE_INT) (int32_t) i) |
| 15090 | <= 4) |
| 15091 | return size_of_int_loc_descriptor (i: (HOST_WIDE_INT) (int32_t) i); |
| 15092 | else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff) |
| 15093 | return 5; |
| 15094 | s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i); |
| 15095 | if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
| 15096 | && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT) |
| 15097 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
| 15098 | - clz - 8); |
| 15099 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16 |
| 15100 | && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT) |
| 15101 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
| 15102 | - clz - 16); |
| 15103 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32 |
| 15104 | && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT |
| 15105 | && s > 6) |
| 15106 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
| 15107 | - clz - 32); |
| 15108 | else |
| 15109 | return 1 + s; |
| 15110 | } |
| 15111 | else |
| 15112 | { |
| 15113 | if (i >= -0x80) |
| 15114 | return 2; |
| 15115 | else if (i >= -0x8000) |
| 15116 | return 3; |
| 15117 | else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000) |
| 15118 | { |
| 15119 | if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i) |
| 15120 | { |
| 15121 | s = size_of_int_loc_descriptor (i: -i) + 1; |
| 15122 | if (s < 5) |
| 15123 | return s; |
| 15124 | } |
| 15125 | return 5; |
| 15126 | } |
| 15127 | else |
| 15128 | { |
| 15129 | unsigned long r = 1 + size_of_sleb128 (i); |
| 15130 | if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i) |
| 15131 | { |
| 15132 | s = size_of_int_loc_descriptor (i: -i) + 1; |
| 15133 | if (s < r) |
| 15134 | return s; |
| 15135 | } |
| 15136 | return r; |
| 15137 | } |
| 15138 | } |
| 15139 | } |
| 15140 | |
| 15141 | /* Return loc description representing "address" of integer value. |
| 15142 | This can appear only as toplevel expression. */ |
| 15143 | |
| 15144 | static dw_loc_descr_ref |
| 15145 | address_of_int_loc_descriptor (int size, HOST_WIDE_INT i) |
| 15146 | { |
| 15147 | int litsize; |
| 15148 | dw_loc_descr_ref loc_result = NULL; |
| 15149 | |
| 15150 | if (!(dwarf_version >= 4 || !dwarf_strict)) |
| 15151 | return NULL; |
| 15152 | |
| 15153 | litsize = size_of_int_loc_descriptor (i); |
| 15154 | /* Determine if DW_OP_stack_value or DW_OP_implicit_value |
| 15155 | is more compact. For DW_OP_stack_value we need: |
| 15156 | litsize + 1 (DW_OP_stack_value) |
| 15157 | and for DW_OP_implicit_value: |
| 15158 | 1 (DW_OP_implicit_value) + 1 (length) + size. */ |
| 15159 | if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size) |
| 15160 | { |
| 15161 | loc_result = int_loc_descriptor (poly_i: i); |
| 15162 | add_loc_descr (list_head: &loc_result, |
| 15163 | descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
| 15164 | return loc_result; |
| 15165 | } |
| 15166 | |
| 15167 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
| 15168 | oprnd1: size, oprnd2: 0); |
| 15169 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_const; |
| 15170 | loc_result->dw_loc_oprnd2.v.val_int = i; |
| 15171 | return loc_result; |
| 15172 | } |
| 15173 | |
| 15174 | /* Return a location descriptor that designates a base+offset location. */ |
| 15175 | |
| 15176 | static dw_loc_descr_ref |
| 15177 | based_loc_descr (rtx reg, poly_int64 offset, |
| 15178 | enum var_init_status initialized) |
| 15179 | { |
| 15180 | unsigned int regno; |
| 15181 | dw_loc_descr_ref result; |
| 15182 | dw_fde_ref fde = cfun->fde; |
| 15183 | |
| 15184 | /* We only use "frame base" when we're sure we're talking about the |
| 15185 | post-prologue local stack frame. We do this by *not* running |
| 15186 | register elimination until this point, and recognizing the special |
| 15187 | argument pointer and soft frame pointer rtx's. */ |
| 15188 | if (reg == arg_pointer_rtx || reg == frame_pointer_rtx) |
| 15189 | { |
| 15190 | rtx elim = (ira_use_lra_p |
| 15191 | ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX) |
| 15192 | : eliminate_regs (reg, VOIDmode, NULL_RTX)); |
| 15193 | |
| 15194 | if (elim != reg) |
| 15195 | { |
| 15196 | /* Allow hard frame pointer here even if frame pointer |
| 15197 | isn't used since hard frame pointer is encoded with |
| 15198 | DW_OP_fbreg which uses the DW_AT_frame_base attribute, |
| 15199 | not hard frame pointer directly. */ |
| 15200 | elim = strip_offset_and_add (x: elim, offset: &offset); |
| 15201 | gcc_assert (elim == hard_frame_pointer_rtx |
| 15202 | || elim == stack_pointer_rtx); |
| 15203 | |
| 15204 | /* If drap register is used to align stack, use frame |
| 15205 | pointer + offset to access stack variables. If stack |
| 15206 | is aligned without drap, use stack pointer + offset to |
| 15207 | access stack variables. */ |
| 15208 | if (crtl->stack_realign_tried |
| 15209 | && reg == frame_pointer_rtx) |
| 15210 | { |
| 15211 | int base_reg |
| 15212 | = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM) |
| 15213 | ? HARD_FRAME_POINTER_REGNUM |
| 15214 | : REGNO (elim)); |
| 15215 | return new_reg_loc_descr (reg: base_reg, offset); |
| 15216 | } |
| 15217 | |
| 15218 | gcc_assert (frame_pointer_fb_offset_valid); |
| 15219 | offset += frame_pointer_fb_offset; |
| 15220 | HOST_WIDE_INT const_offset; |
| 15221 | if (offset.is_constant (const_value: &const_offset)) |
| 15222 | return new_loc_descr (op: DW_OP_fbreg, oprnd1: const_offset, oprnd2: 0); |
| 15223 | else |
| 15224 | { |
| 15225 | dw_loc_descr_ref ret = new_loc_descr (op: DW_OP_fbreg, oprnd1: 0, oprnd2: 0); |
| 15226 | loc_descr_plus_const (list_head: &ret, poly_offset: offset); |
| 15227 | return ret; |
| 15228 | } |
| 15229 | } |
| 15230 | } |
| 15231 | |
| 15232 | regno = REGNO (reg); |
| 15233 | #ifdef LEAF_REG_REMAP |
| 15234 | if (crtl->uses_only_leaf_regs) |
| 15235 | { |
| 15236 | int leaf_reg = LEAF_REG_REMAP (regno); |
| 15237 | if (leaf_reg != -1) |
| 15238 | regno = (unsigned) leaf_reg; |
| 15239 | } |
| 15240 | #endif |
| 15241 | regno = DWARF_FRAME_REGNUM (regno); |
| 15242 | |
| 15243 | HOST_WIDE_INT const_offset; |
| 15244 | if (!optimize && fde |
| 15245 | && (fde->drap_reg == regno || fde->vdrap_reg == regno) |
| 15246 | && offset.is_constant (const_value: &const_offset)) |
| 15247 | { |
| 15248 | /* Use cfa+offset to represent the location of arguments passed |
| 15249 | on the stack when drap is used to align stack. |
| 15250 | Only do this when not optimizing, for optimized code var-tracking |
| 15251 | is supposed to track where the arguments live and the register |
| 15252 | used as vdrap or drap in some spot might be used for something |
| 15253 | else in other part of the routine. */ |
| 15254 | return new_loc_descr (op: DW_OP_fbreg, oprnd1: const_offset, oprnd2: 0); |
| 15255 | } |
| 15256 | |
| 15257 | result = new_reg_loc_descr (reg: regno, offset); |
| 15258 | |
| 15259 | if (initialized == VAR_INIT_STATUS_UNINITIALIZED) |
| 15260 | add_loc_descr (list_head: &result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
| 15261 | |
| 15262 | return result; |
| 15263 | } |
| 15264 | |
| 15265 | /* Return true if this RTL expression describes a base+offset calculation. */ |
| 15266 | |
| 15267 | static inline bool |
| 15268 | is_based_loc (const_rtx rtl) |
| 15269 | { |
| 15270 | return (GET_CODE (rtl) == PLUS |
| 15271 | && ((REG_P (XEXP (rtl, 0)) |
| 15272 | && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER |
| 15273 | && CONST_INT_P (XEXP (rtl, 1))))); |
| 15274 | } |
| 15275 | |
| 15276 | /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0) |
| 15277 | failed. */ |
| 15278 | |
| 15279 | static dw_loc_descr_ref |
| 15280 | tls_mem_loc_descriptor (rtx mem) |
| 15281 | { |
| 15282 | tree base; |
| 15283 | dw_loc_descr_ref loc_result; |
| 15284 | |
| 15285 | if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem)) |
| 15286 | return NULL; |
| 15287 | |
| 15288 | base = get_base_address (MEM_EXPR (mem)); |
| 15289 | if (base == NULL |
| 15290 | || !VAR_P (base) |
| 15291 | || !DECL_THREAD_LOCAL_P (base)) |
| 15292 | return NULL; |
| 15293 | |
| 15294 | loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL); |
| 15295 | if (loc_result == NULL) |
| 15296 | return NULL; |
| 15297 | |
| 15298 | if (maybe_ne (MEM_OFFSET (mem), b: 0)) |
| 15299 | loc_descr_plus_const (list_head: &loc_result, MEM_OFFSET (mem)); |
| 15300 | |
| 15301 | return loc_result; |
| 15302 | } |
| 15303 | |
| 15304 | /* Output debug info about reason why we failed to expand expression as dwarf |
| 15305 | expression. */ |
| 15306 | |
| 15307 | static void |
| 15308 | expansion_failed (tree expr, rtx rtl, char const *reason) |
| 15309 | { |
| 15310 | if (dump_file && (dump_flags & TDF_DETAILS)) |
| 15311 | { |
| 15312 | fprintf (stream: dump_file, format: "Failed to expand as dwarf: " ); |
| 15313 | if (expr) |
| 15314 | print_generic_expr (dump_file, expr, dump_flags); |
| 15315 | if (rtl) |
| 15316 | { |
| 15317 | fprintf (stream: dump_file, format: "\n" ); |
| 15318 | print_rtl (dump_file, rtl); |
| 15319 | } |
| 15320 | fprintf (stream: dump_file, format: "\nReason: %s\n" , reason); |
| 15321 | } |
| 15322 | } |
| 15323 | |
| 15324 | /* Helper function for const_ok_for_output. */ |
| 15325 | |
| 15326 | static bool |
| 15327 | const_ok_for_output_1 (rtx rtl) |
| 15328 | { |
| 15329 | if (targetm.const_not_ok_for_debug_p (rtl)) |
| 15330 | { |
| 15331 | if (GET_CODE (rtl) != UNSPEC) |
| 15332 | { |
| 15333 | expansion_failed (NULL_TREE, rtl, |
| 15334 | reason: "Expression rejected for debug by the backend.\n" ); |
| 15335 | return false; |
| 15336 | } |
| 15337 | |
| 15338 | /* If delegitimize_address couldn't do anything with the UNSPEC, and |
| 15339 | the target hook doesn't explicitly allow it in debug info, assume |
| 15340 | we can't express it in the debug info. */ |
| 15341 | /* Don't complain about TLS UNSPECs, those are just too hard to |
| 15342 | delegitimize. Note this could be a non-decl SYMBOL_REF such as |
| 15343 | one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL |
| 15344 | rather than DECL_THREAD_LOCAL_P is not just an optimization. */ |
| 15345 | if (flag_checking |
| 15346 | && (XVECLEN (rtl, 0) == 0 |
| 15347 | || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF |
| 15348 | || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE)) |
| 15349 | inform (current_function_decl |
| 15350 | ? DECL_SOURCE_LOCATION (current_function_decl) |
| 15351 | : UNKNOWN_LOCATION, |
| 15352 | #if NUM_UNSPEC_VALUES > 0 |
| 15353 | "non-delegitimized UNSPEC %s (%d) found in variable location" , |
| 15354 | ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES) |
| 15355 | ? unspec_strings[XINT (rtl, 1)] : "unknown" ), |
| 15356 | #else |
| 15357 | "non-delegitimized UNSPEC %d found in variable location" , |
| 15358 | #endif |
| 15359 | XINT (rtl, 1)); |
| 15360 | expansion_failed (NULL_TREE, rtl, |
| 15361 | reason: "UNSPEC hasn't been delegitimized.\n" ); |
| 15362 | return false; |
| 15363 | } |
| 15364 | |
| 15365 | if (CONST_POLY_INT_P (rtl)) |
| 15366 | return false; |
| 15367 | |
| 15368 | /* FIXME: Refer to PR60655. It is possible for simplification |
| 15369 | of rtl expressions in var tracking to produce such expressions. |
| 15370 | We should really identify / validate expressions |
| 15371 | enclosed in CONST that can be handled by assemblers on various |
| 15372 | targets and only handle legitimate cases here. */ |
| 15373 | switch (GET_CODE (rtl)) |
| 15374 | { |
| 15375 | case SYMBOL_REF: |
| 15376 | break; |
| 15377 | case NOT: |
| 15378 | case NEG: |
| 15379 | return false; |
| 15380 | case PLUS: |
| 15381 | { |
| 15382 | /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the |
| 15383 | operands. */ |
| 15384 | subrtx_var_iterator::array_type array; |
| 15385 | bool first = false; |
| 15386 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL) |
| 15387 | if (SYMBOL_REF_P (*iter) |
| 15388 | || LABEL_P (*iter) |
| 15389 | || GET_CODE (*iter) == UNSPEC) |
| 15390 | { |
| 15391 | first = true; |
| 15392 | break; |
| 15393 | } |
| 15394 | if (!first) |
| 15395 | return true; |
| 15396 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL) |
| 15397 | if (SYMBOL_REF_P (*iter) |
| 15398 | || LABEL_P (*iter) |
| 15399 | || GET_CODE (*iter) == UNSPEC) |
| 15400 | return false; |
| 15401 | return true; |
| 15402 | } |
| 15403 | case MINUS: |
| 15404 | { |
| 15405 | /* Disallow negation of SYMBOL_REFs or UNSPECs when they |
| 15406 | appear in the second operand of MINUS. */ |
| 15407 | subrtx_var_iterator::array_type array; |
| 15408 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL) |
| 15409 | if (SYMBOL_REF_P (*iter) |
| 15410 | || LABEL_P (*iter) |
| 15411 | || GET_CODE (*iter) == UNSPEC) |
| 15412 | return false; |
| 15413 | return true; |
| 15414 | } |
| 15415 | default: |
| 15416 | return true; |
| 15417 | } |
| 15418 | |
| 15419 | if (CONSTANT_POOL_ADDRESS_P (rtl)) |
| 15420 | { |
| 15421 | bool marked; |
| 15422 | get_pool_constant_mark (rtl, &marked); |
| 15423 | /* If all references to this pool constant were optimized away, |
| 15424 | it was not output and thus we can't represent it. */ |
| 15425 | if (!marked) |
| 15426 | { |
| 15427 | expansion_failed (NULL_TREE, rtl, |
| 15428 | reason: "Constant was removed from constant pool.\n" ); |
| 15429 | return false; |
| 15430 | } |
| 15431 | } |
| 15432 | |
| 15433 | if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE) |
| 15434 | return false; |
| 15435 | |
| 15436 | /* Avoid references to external symbols in debug info, on several targets |
| 15437 | the linker might even refuse to link when linking a shared library, |
| 15438 | and in many other cases the relocations for .debug_info/.debug_loc are |
| 15439 | dropped, so the address becomes zero anyway. Hidden symbols, guaranteed |
| 15440 | to be defined within the same shared library or executable are fine. */ |
| 15441 | if (SYMBOL_REF_EXTERNAL_P (rtl)) |
| 15442 | { |
| 15443 | tree decl = SYMBOL_REF_DECL (rtl); |
| 15444 | |
| 15445 | if (decl == NULL || !targetm.binds_local_p (decl)) |
| 15446 | { |
| 15447 | expansion_failed (NULL_TREE, rtl, |
| 15448 | reason: "Symbol not defined in current TU.\n" ); |
| 15449 | return false; |
| 15450 | } |
| 15451 | } |
| 15452 | |
| 15453 | return true; |
| 15454 | } |
| 15455 | |
| 15456 | /* Return true if constant RTL can be emitted in DW_OP_addr or |
| 15457 | DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or |
| 15458 | non-marked constant pool SYMBOL_REFs can't be referenced in it. */ |
| 15459 | |
| 15460 | static bool |
| 15461 | const_ok_for_output (rtx rtl) |
| 15462 | { |
| 15463 | if (GET_CODE (rtl) == SYMBOL_REF) |
| 15464 | return const_ok_for_output_1 (rtl); |
| 15465 | |
| 15466 | if (GET_CODE (rtl) == CONST) |
| 15467 | { |
| 15468 | subrtx_var_iterator::array_type array; |
| 15469 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL) |
| 15470 | if (!const_ok_for_output_1 (rtl: *iter)) |
| 15471 | return false; |
| 15472 | return true; |
| 15473 | } |
| 15474 | |
| 15475 | return true; |
| 15476 | } |
| 15477 | |
| 15478 | /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP |
| 15479 | if possible, NULL otherwise. */ |
| 15480 | |
| 15481 | static dw_die_ref |
| 15482 | base_type_for_mode (machine_mode mode, bool unsignedp) |
| 15483 | { |
| 15484 | dw_die_ref type_die; |
| 15485 | tree type = lang_hooks.types.type_for_mode (mode, unsignedp); |
| 15486 | |
| 15487 | if (type == NULL) |
| 15488 | return NULL; |
| 15489 | switch (TREE_CODE (type)) |
| 15490 | { |
| 15491 | case INTEGER_TYPE: |
| 15492 | case REAL_TYPE: |
| 15493 | break; |
| 15494 | default: |
| 15495 | return NULL; |
| 15496 | } |
| 15497 | type_die = lookup_type_die (type); |
| 15498 | if (!type_die) |
| 15499 | type_die = modified_type_die (type, cv_quals: TYPE_UNQUALIFIED, NULL_TREE, |
| 15500 | reverse: false, context_die: comp_unit_die ()); |
| 15501 | if (type_die == NULL || type_die->die_tag != DW_TAG_base_type) |
| 15502 | return NULL; |
| 15503 | return type_die; |
| 15504 | } |
| 15505 | |
| 15506 | /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned |
| 15507 | type matching MODE, or, if MODE is narrower than or as wide as |
| 15508 | DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not |
| 15509 | possible. */ |
| 15510 | |
| 15511 | static dw_loc_descr_ref |
| 15512 | convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op) |
| 15513 | { |
| 15514 | machine_mode outer_mode = mode; |
| 15515 | dw_die_ref type_die; |
| 15516 | dw_loc_descr_ref cvt; |
| 15517 | |
| 15518 | if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE) |
| 15519 | { |
| 15520 | add_loc_descr (list_head: &op, descr: new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0)); |
| 15521 | return op; |
| 15522 | } |
| 15523 | type_die = base_type_for_mode (mode: outer_mode, unsignedp: 1); |
| 15524 | if (type_die == NULL) |
| 15525 | return NULL; |
| 15526 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 15527 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 15528 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 15529 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 15530 | add_loc_descr (list_head: &op, descr: cvt); |
| 15531 | return op; |
| 15532 | } |
| 15533 | |
| 15534 | /* Return location descriptor for comparison OP with operands OP0 and OP1. */ |
| 15535 | |
| 15536 | static dw_loc_descr_ref |
| 15537 | compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0, |
| 15538 | dw_loc_descr_ref op1) |
| 15539 | { |
| 15540 | dw_loc_descr_ref ret = op0; |
| 15541 | add_loc_descr (list_head: &ret, descr: op1); |
| 15542 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 15543 | if (STORE_FLAG_VALUE != 1) |
| 15544 | { |
| 15545 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (STORE_FLAG_VALUE)); |
| 15546 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
| 15547 | } |
| 15548 | return ret; |
| 15549 | } |
| 15550 | |
| 15551 | /* Subroutine of scompare_loc_descriptor for the case in which we're |
| 15552 | comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE, |
| 15553 | and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */ |
| 15554 | |
| 15555 | static dw_loc_descr_ref |
| 15556 | scompare_loc_descriptor_wide (enum dwarf_location_atom op, |
| 15557 | scalar_int_mode op_mode, |
| 15558 | dw_loc_descr_ref op0, dw_loc_descr_ref op1) |
| 15559 | { |
| 15560 | dw_die_ref type_die = base_type_for_mode (mode: op_mode, unsignedp: 0); |
| 15561 | dw_loc_descr_ref cvt; |
| 15562 | |
| 15563 | if (type_die == NULL) |
| 15564 | return NULL; |
| 15565 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 15566 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 15567 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 15568 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 15569 | add_loc_descr (list_head: &op0, descr: cvt); |
| 15570 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 15571 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 15572 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 15573 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 15574 | add_loc_descr (list_head: &op1, descr: cvt); |
| 15575 | return compare_loc_descriptor (op, op0, op1); |
| 15576 | } |
| 15577 | |
| 15578 | /* Subroutine of scompare_loc_descriptor for the case in which we're |
| 15579 | comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE, |
| 15580 | and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */ |
| 15581 | |
| 15582 | static dw_loc_descr_ref |
| 15583 | scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl, |
| 15584 | scalar_int_mode op_mode, |
| 15585 | dw_loc_descr_ref op0, dw_loc_descr_ref op1) |
| 15586 | { |
| 15587 | int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode: op_mode)) * BITS_PER_UNIT; |
| 15588 | /* For eq/ne, if the operands are known to be zero-extended, |
| 15589 | there is no need to do the fancy shifting up. */ |
| 15590 | if (op == DW_OP_eq || op == DW_OP_ne) |
| 15591 | { |
| 15592 | dw_loc_descr_ref last0, last1; |
| 15593 | for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next) |
| 15594 | ; |
| 15595 | for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next) |
| 15596 | ; |
| 15597 | /* deref_size zero extends, and for constants we can check |
| 15598 | whether they are zero extended or not. */ |
| 15599 | if (((last0->dw_loc_opc == DW_OP_deref_size |
| 15600 | && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (mode: op_mode)) |
| 15601 | || (CONST_INT_P (XEXP (rtl, 0)) |
| 15602 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0)) |
| 15603 | == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode)))) |
| 15604 | && ((last1->dw_loc_opc == DW_OP_deref_size |
| 15605 | && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (mode: op_mode)) |
| 15606 | || (CONST_INT_P (XEXP (rtl, 1)) |
| 15607 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1)) |
| 15608 | == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode))))) |
| 15609 | return compare_loc_descriptor (op, op0, op1); |
| 15610 | |
| 15611 | /* EQ/NE comparison against constant in narrower type than |
| 15612 | DWARF2_ADDR_SIZE can be performed either as |
| 15613 | DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift> |
| 15614 | DW_OP_{eq,ne} |
| 15615 | or |
| 15616 | DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask> |
| 15617 | DW_OP_{eq,ne}. Pick whatever is shorter. */ |
| 15618 | if (CONST_INT_P (XEXP (rtl, 1)) |
| 15619 | && GET_MODE_BITSIZE (mode: op_mode) < HOST_BITS_PER_WIDE_INT |
| 15620 | && (size_of_int_loc_descriptor (i: shift) + 1 |
| 15621 | + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift) |
| 15622 | >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1 |
| 15623 | + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1)) |
| 15624 | & GET_MODE_MASK (op_mode)))) |
| 15625 | { |
| 15626 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (GET_MODE_MASK (op_mode))); |
| 15627 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 15628 | op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) |
| 15629 | & GET_MODE_MASK (op_mode)); |
| 15630 | return compare_loc_descriptor (op, op0, op1); |
| 15631 | } |
| 15632 | } |
| 15633 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: shift)); |
| 15634 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 15635 | if (CONST_INT_P (XEXP (rtl, 1))) |
| 15636 | op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift); |
| 15637 | else |
| 15638 | { |
| 15639 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: shift)); |
| 15640 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 15641 | } |
| 15642 | return compare_loc_descriptor (op, op0, op1); |
| 15643 | } |
| 15644 | |
| 15645 | /* Return location descriptor for signed comparison OP RTL. */ |
| 15646 | |
| 15647 | static dw_loc_descr_ref |
| 15648 | scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl, |
| 15649 | machine_mode mem_mode) |
| 15650 | { |
| 15651 | machine_mode op_mode = GET_MODE (XEXP (rtl, 0)); |
| 15652 | dw_loc_descr_ref op0, op1; |
| 15653 | |
| 15654 | if (op_mode == VOIDmode) |
| 15655 | op_mode = GET_MODE (XEXP (rtl, 1)); |
| 15656 | if (op_mode == VOIDmode) |
| 15657 | return NULL; |
| 15658 | |
| 15659 | scalar_int_mode int_op_mode; |
| 15660 | if (dwarf_strict |
| 15661 | && dwarf_version < 5 |
| 15662 | && (!is_a <scalar_int_mode> (m: op_mode, result: &int_op_mode) |
| 15663 | || GET_MODE_SIZE (mode: int_op_mode) > DWARF2_ADDR_SIZE)) |
| 15664 | return NULL; |
| 15665 | |
| 15666 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: op_mode, mem_mode, |
| 15667 | VAR_INIT_STATUS_INITIALIZED); |
| 15668 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode: op_mode, mem_mode, |
| 15669 | VAR_INIT_STATUS_INITIALIZED); |
| 15670 | |
| 15671 | if (op0 == NULL || op1 == NULL) |
| 15672 | return NULL; |
| 15673 | |
| 15674 | if (is_a <scalar_int_mode> (m: op_mode, result: &int_op_mode)) |
| 15675 | { |
| 15676 | if (GET_MODE_SIZE (mode: int_op_mode) < DWARF2_ADDR_SIZE) |
| 15677 | return scompare_loc_descriptor_narrow (op, rtl, op_mode: int_op_mode, op0, op1); |
| 15678 | |
| 15679 | if (GET_MODE_SIZE (mode: int_op_mode) > DWARF2_ADDR_SIZE) |
| 15680 | return scompare_loc_descriptor_wide (op, op_mode: int_op_mode, op0, op1); |
| 15681 | } |
| 15682 | return compare_loc_descriptor (op, op0, op1); |
| 15683 | } |
| 15684 | |
| 15685 | /* Return location descriptor for unsigned comparison OP RTL. */ |
| 15686 | |
| 15687 | static dw_loc_descr_ref |
| 15688 | ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl, |
| 15689 | machine_mode mem_mode) |
| 15690 | { |
| 15691 | dw_loc_descr_ref op0, op1; |
| 15692 | |
| 15693 | machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0)); |
| 15694 | if (test_op_mode == VOIDmode) |
| 15695 | test_op_mode = GET_MODE (XEXP (rtl, 1)); |
| 15696 | |
| 15697 | scalar_int_mode op_mode; |
| 15698 | if (!is_a <scalar_int_mode> (m: test_op_mode, result: &op_mode)) |
| 15699 | return NULL; |
| 15700 | |
| 15701 | if (dwarf_strict |
| 15702 | && dwarf_version < 5 |
| 15703 | && GET_MODE_SIZE (mode: op_mode) > DWARF2_ADDR_SIZE) |
| 15704 | return NULL; |
| 15705 | |
| 15706 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: op_mode, mem_mode, |
| 15707 | VAR_INIT_STATUS_INITIALIZED); |
| 15708 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode: op_mode, mem_mode, |
| 15709 | VAR_INIT_STATUS_INITIALIZED); |
| 15710 | |
| 15711 | if (op0 == NULL || op1 == NULL) |
| 15712 | return NULL; |
| 15713 | |
| 15714 | if (GET_MODE_SIZE (mode: op_mode) < DWARF2_ADDR_SIZE) |
| 15715 | { |
| 15716 | HOST_WIDE_INT mask = GET_MODE_MASK (op_mode); |
| 15717 | dw_loc_descr_ref last0, last1; |
| 15718 | for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next) |
| 15719 | ; |
| 15720 | for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next) |
| 15721 | ; |
| 15722 | if (CONST_INT_P (XEXP (rtl, 0))) |
| 15723 | op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask); |
| 15724 | /* deref_size zero extends, so no need to mask it again. */ |
| 15725 | else if (last0->dw_loc_opc != DW_OP_deref_size |
| 15726 | || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (mode: op_mode)) |
| 15727 | { |
| 15728 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: mask)); |
| 15729 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 15730 | } |
| 15731 | if (CONST_INT_P (XEXP (rtl, 1))) |
| 15732 | op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask); |
| 15733 | /* deref_size zero extends, so no need to mask it again. */ |
| 15734 | else if (last1->dw_loc_opc != DW_OP_deref_size |
| 15735 | || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (mode: op_mode)) |
| 15736 | { |
| 15737 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: mask)); |
| 15738 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 15739 | } |
| 15740 | } |
| 15741 | else if (GET_MODE_SIZE (mode: op_mode) == DWARF2_ADDR_SIZE) |
| 15742 | { |
| 15743 | HOST_WIDE_INT bias = 1; |
| 15744 | bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1); |
| 15745 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: bias, oprnd2: 0)); |
| 15746 | if (CONST_INT_P (XEXP (rtl, 1))) |
| 15747 | op1 = int_loc_descriptor (poly_i: (unsigned HOST_WIDE_INT) bias |
| 15748 | + INTVAL (XEXP (rtl, 1))); |
| 15749 | else |
| 15750 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_plus_uconst, |
| 15751 | oprnd1: bias, oprnd2: 0)); |
| 15752 | } |
| 15753 | return compare_loc_descriptor (op, op0, op1); |
| 15754 | } |
| 15755 | |
| 15756 | /* Return location descriptor for {U,S}{MIN,MAX}. */ |
| 15757 | |
| 15758 | static dw_loc_descr_ref |
| 15759 | minmax_loc_descriptor (rtx rtl, machine_mode mode, |
| 15760 | machine_mode mem_mode) |
| 15761 | { |
| 15762 | enum dwarf_location_atom op; |
| 15763 | dw_loc_descr_ref op0, op1, ret; |
| 15764 | dw_loc_descr_ref bra_node, drop_node; |
| 15765 | |
| 15766 | scalar_int_mode int_mode; |
| 15767 | if (dwarf_strict |
| 15768 | && dwarf_version < 5 |
| 15769 | && (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 15770 | || GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE)) |
| 15771 | return NULL; |
| 15772 | |
| 15773 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
| 15774 | VAR_INIT_STATUS_INITIALIZED); |
| 15775 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
| 15776 | VAR_INIT_STATUS_INITIALIZED); |
| 15777 | |
| 15778 | if (op0 == NULL || op1 == NULL) |
| 15779 | return NULL; |
| 15780 | |
| 15781 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
| 15782 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 15783 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
| 15784 | if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX) |
| 15785 | { |
| 15786 | /* Checked by the caller. */ |
| 15787 | int_mode = as_a <scalar_int_mode> (m: mode); |
| 15788 | if (GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
| 15789 | { |
| 15790 | HOST_WIDE_INT mask = GET_MODE_MASK (int_mode); |
| 15791 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: mask)); |
| 15792 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 15793 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: mask)); |
| 15794 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 15795 | } |
| 15796 | else if (GET_MODE_SIZE (mode: int_mode) == DWARF2_ADDR_SIZE) |
| 15797 | { |
| 15798 | HOST_WIDE_INT bias = 1; |
| 15799 | bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1); |
| 15800 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: bias, oprnd2: 0)); |
| 15801 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: bias, oprnd2: 0)); |
| 15802 | } |
| 15803 | } |
| 15804 | else if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 15805 | && GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
| 15806 | { |
| 15807 | int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode: int_mode)) * BITS_PER_UNIT; |
| 15808 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: shift)); |
| 15809 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 15810 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: shift)); |
| 15811 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 15812 | } |
| 15813 | else if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 15814 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
| 15815 | { |
| 15816 | dw_die_ref type_die = base_type_for_mode (mode: int_mode, unsignedp: 0); |
| 15817 | dw_loc_descr_ref cvt; |
| 15818 | if (type_die == NULL) |
| 15819 | return NULL; |
| 15820 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 15821 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 15822 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 15823 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 15824 | add_loc_descr (list_head: &op0, descr: cvt); |
| 15825 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 15826 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 15827 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 15828 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 15829 | add_loc_descr (list_head: &op1, descr: cvt); |
| 15830 | } |
| 15831 | |
| 15832 | if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN) |
| 15833 | op = DW_OP_lt; |
| 15834 | else |
| 15835 | op = DW_OP_gt; |
| 15836 | ret = op0; |
| 15837 | add_loc_descr (list_head: &ret, descr: op1); |
| 15838 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 15839 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
| 15840 | add_loc_descr (list_head: &ret, descr: bra_node); |
| 15841 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 15842 | drop_node = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
| 15843 | add_loc_descr (list_head: &ret, descr: drop_node); |
| 15844 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 15845 | bra_node->dw_loc_oprnd1.v.val_loc = drop_node; |
| 15846 | if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX) |
| 15847 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 15848 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
| 15849 | ret = convert_descriptor_to_mode (mode: int_mode, op: ret); |
| 15850 | return ret; |
| 15851 | } |
| 15852 | |
| 15853 | /* Helper function for mem_loc_descriptor. Perform OP binary op, |
| 15854 | but after converting arguments to type_die, afterwards |
| 15855 | convert back to unsigned. */ |
| 15856 | |
| 15857 | static dw_loc_descr_ref |
| 15858 | typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die, |
| 15859 | scalar_int_mode mode, machine_mode mem_mode) |
| 15860 | { |
| 15861 | dw_loc_descr_ref cvt, op0, op1; |
| 15862 | |
| 15863 | if (type_die == NULL) |
| 15864 | return NULL; |
| 15865 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
| 15866 | VAR_INIT_STATUS_INITIALIZED); |
| 15867 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
| 15868 | VAR_INIT_STATUS_INITIALIZED); |
| 15869 | if (op0 == NULL || op1 == NULL) |
| 15870 | return NULL; |
| 15871 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 15872 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 15873 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 15874 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 15875 | add_loc_descr (list_head: &op0, descr: cvt); |
| 15876 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 15877 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 15878 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 15879 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 15880 | add_loc_descr (list_head: &op1, descr: cvt); |
| 15881 | add_loc_descr (list_head: &op0, descr: op1); |
| 15882 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 15883 | return convert_descriptor_to_mode (mode, op: op0); |
| 15884 | } |
| 15885 | |
| 15886 | /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value, |
| 15887 | const0 is DW_OP_lit0 or corresponding typed constant, |
| 15888 | const1 is DW_OP_lit1 or corresponding typed constant |
| 15889 | and constMSB is constant with just the MSB bit set |
| 15890 | for the mode): |
| 15891 | DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4> |
| 15892 | L1: const0 DW_OP_swap |
| 15893 | L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl |
| 15894 | DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> |
| 15895 | L3: DW_OP_drop |
| 15896 | L4: DW_OP_nop |
| 15897 | |
| 15898 | CTZ is similar: |
| 15899 | DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4> |
| 15900 | L1: const0 DW_OP_swap |
| 15901 | L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr |
| 15902 | DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> |
| 15903 | L3: DW_OP_drop |
| 15904 | L4: DW_OP_nop |
| 15905 | |
| 15906 | FFS is similar: |
| 15907 | DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4> |
| 15908 | L1: const1 DW_OP_swap |
| 15909 | L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr |
| 15910 | DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> |
| 15911 | L3: DW_OP_drop |
| 15912 | L4: DW_OP_nop */ |
| 15913 | |
| 15914 | static dw_loc_descr_ref |
| 15915 | clz_loc_descriptor (rtx rtl, scalar_int_mode mode, |
| 15916 | machine_mode mem_mode) |
| 15917 | { |
| 15918 | dw_loc_descr_ref op0, ret, tmp; |
| 15919 | HOST_WIDE_INT valv; |
| 15920 | dw_loc_descr_ref l1jump, l1label; |
| 15921 | dw_loc_descr_ref l2jump, l2label; |
| 15922 | dw_loc_descr_ref l3jump, l3label; |
| 15923 | dw_loc_descr_ref l4jump, l4label; |
| 15924 | rtx msb; |
| 15925 | |
| 15926 | if (GET_MODE (XEXP (rtl, 0)) != mode) |
| 15927 | return NULL; |
| 15928 | |
| 15929 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
| 15930 | VAR_INIT_STATUS_INITIALIZED); |
| 15931 | if (op0 == NULL) |
| 15932 | return NULL; |
| 15933 | ret = op0; |
| 15934 | if (GET_CODE (rtl) == CLZ) |
| 15935 | { |
| 15936 | if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv)) |
| 15937 | valv = GET_MODE_BITSIZE (mode); |
| 15938 | } |
| 15939 | else if (GET_CODE (rtl) == FFS) |
| 15940 | valv = 0; |
| 15941 | else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv)) |
| 15942 | valv = GET_MODE_BITSIZE (mode); |
| 15943 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
| 15944 | l1jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
| 15945 | add_loc_descr (list_head: &ret, descr: l1jump); |
| 15946 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0)); |
| 15947 | tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode, |
| 15948 | VAR_INIT_STATUS_INITIALIZED); |
| 15949 | if (tmp == NULL) |
| 15950 | return NULL; |
| 15951 | add_loc_descr (list_head: &ret, descr: tmp); |
| 15952 | l4jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
| 15953 | add_loc_descr (list_head: &ret, descr: l4jump); |
| 15954 | l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS |
| 15955 | ? const1_rtx : const0_rtx, |
| 15956 | mode, mem_mode, |
| 15957 | VAR_INIT_STATUS_INITIALIZED); |
| 15958 | if (l1label == NULL) |
| 15959 | return NULL; |
| 15960 | add_loc_descr (list_head: &ret, descr: l1label); |
| 15961 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 15962 | l2label = new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0); |
| 15963 | add_loc_descr (list_head: &ret, descr: l2label); |
| 15964 | if (GET_CODE (rtl) != CLZ) |
| 15965 | msb = const1_rtx; |
| 15966 | else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
| 15967 | msb = GEN_INT (HOST_WIDE_INT_1U |
| 15968 | << (GET_MODE_BITSIZE (mode) - 1)); |
| 15969 | else |
| 15970 | msb = immed_wide_int_const |
| 15971 | (wi::set_bit_in_zero (bit: GET_MODE_PRECISION (mode) - 1, |
| 15972 | precision: GET_MODE_PRECISION (mode)), mode); |
| 15973 | if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0) |
| 15974 | tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32 |
| 15975 | ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64 |
| 15976 | ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), oprnd2: 0); |
| 15977 | else |
| 15978 | tmp = mem_loc_descriptor (msb, mode, mem_mode, |
| 15979 | VAR_INIT_STATUS_INITIALIZED); |
| 15980 | if (tmp == NULL) |
| 15981 | return NULL; |
| 15982 | add_loc_descr (list_head: &ret, descr: tmp); |
| 15983 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 15984 | l3jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
| 15985 | add_loc_descr (list_head: &ret, descr: l3jump); |
| 15986 | tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode, |
| 15987 | VAR_INIT_STATUS_INITIALIZED); |
| 15988 | if (tmp == NULL) |
| 15989 | return NULL; |
| 15990 | add_loc_descr (list_head: &ret, descr: tmp); |
| 15991 | add_loc_descr (list_head: &ret, descr: new_loc_descr (GET_CODE (rtl) == CLZ |
| 15992 | ? DW_OP_shl : DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
| 15993 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 15994 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: 1, oprnd2: 0)); |
| 15995 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 15996 | l2jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
| 15997 | add_loc_descr (list_head: &ret, descr: l2jump); |
| 15998 | l3label = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
| 15999 | add_loc_descr (list_head: &ret, descr: l3label); |
| 16000 | l4label = new_loc_descr (op: DW_OP_nop, oprnd1: 0, oprnd2: 0); |
| 16001 | add_loc_descr (list_head: &ret, descr: l4label); |
| 16002 | l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 16003 | l1jump->dw_loc_oprnd1.v.val_loc = l1label; |
| 16004 | l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 16005 | l2jump->dw_loc_oprnd1.v.val_loc = l2label; |
| 16006 | l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 16007 | l3jump->dw_loc_oprnd1.v.val_loc = l3label; |
| 16008 | l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 16009 | l4jump->dw_loc_oprnd1.v.val_loc = l4label; |
| 16010 | return ret; |
| 16011 | } |
| 16012 | |
| 16013 | /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant, |
| 16014 | const1 is DW_OP_lit1 or corresponding typed constant): |
| 16015 | const0 DW_OP_swap |
| 16016 | L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and |
| 16017 | DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1> |
| 16018 | L2: DW_OP_drop |
| 16019 | |
| 16020 | PARITY is similar: |
| 16021 | L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and |
| 16022 | DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1> |
| 16023 | L2: DW_OP_drop */ |
| 16024 | |
| 16025 | static dw_loc_descr_ref |
| 16026 | popcount_loc_descriptor (rtx rtl, scalar_int_mode mode, |
| 16027 | machine_mode mem_mode) |
| 16028 | { |
| 16029 | dw_loc_descr_ref op0, ret, tmp; |
| 16030 | dw_loc_descr_ref l1jump, l1label; |
| 16031 | dw_loc_descr_ref l2jump, l2label; |
| 16032 | |
| 16033 | if (GET_MODE (XEXP (rtl, 0)) != mode) |
| 16034 | return NULL; |
| 16035 | |
| 16036 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
| 16037 | VAR_INIT_STATUS_INITIALIZED); |
| 16038 | if (op0 == NULL) |
| 16039 | return NULL; |
| 16040 | ret = op0; |
| 16041 | tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode, |
| 16042 | VAR_INIT_STATUS_INITIALIZED); |
| 16043 | if (tmp == NULL) |
| 16044 | return NULL; |
| 16045 | add_loc_descr (list_head: &ret, descr: tmp); |
| 16046 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 16047 | l1label = new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0); |
| 16048 | add_loc_descr (list_head: &ret, descr: l1label); |
| 16049 | l2jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
| 16050 | add_loc_descr (list_head: &ret, descr: l2jump); |
| 16051 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
| 16052 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_rot, oprnd1: 0, oprnd2: 0)); |
| 16053 | tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode, |
| 16054 | VAR_INIT_STATUS_INITIALIZED); |
| 16055 | if (tmp == NULL) |
| 16056 | return NULL; |
| 16057 | add_loc_descr (list_head: &ret, descr: tmp); |
| 16058 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 16059 | add_loc_descr (list_head: &ret, descr: new_loc_descr (GET_CODE (rtl) == POPCOUNT |
| 16060 | ? DW_OP_plus : DW_OP_xor, oprnd1: 0, oprnd2: 0)); |
| 16061 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 16062 | tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode, |
| 16063 | VAR_INIT_STATUS_INITIALIZED); |
| 16064 | add_loc_descr (list_head: &ret, descr: tmp); |
| 16065 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
| 16066 | l1jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
| 16067 | add_loc_descr (list_head: &ret, descr: l1jump); |
| 16068 | l2label = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
| 16069 | add_loc_descr (list_head: &ret, descr: l2label); |
| 16070 | l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 16071 | l1jump->dw_loc_oprnd1.v.val_loc = l1label; |
| 16072 | l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 16073 | l2jump->dw_loc_oprnd1.v.val_loc = l2label; |
| 16074 | return ret; |
| 16075 | } |
| 16076 | |
| 16077 | /* BSWAP (constS is initial shift count, either 56 or 24): |
| 16078 | constS const0 |
| 16079 | L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr |
| 16080 | const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or |
| 16081 | DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8 |
| 16082 | DW_OP_minus DW_OP_swap DW_OP_skip <L1> |
| 16083 | L2: DW_OP_drop DW_OP_swap DW_OP_drop */ |
| 16084 | |
| 16085 | static dw_loc_descr_ref |
| 16086 | bswap_loc_descriptor (rtx rtl, scalar_int_mode mode, |
| 16087 | machine_mode mem_mode) |
| 16088 | { |
| 16089 | dw_loc_descr_ref op0, ret, tmp; |
| 16090 | dw_loc_descr_ref l1jump, l1label; |
| 16091 | dw_loc_descr_ref l2jump, l2label; |
| 16092 | |
| 16093 | if (BITS_PER_UNIT != 8 |
| 16094 | || (GET_MODE_BITSIZE (mode) != 32 |
| 16095 | && GET_MODE_BITSIZE (mode) != 64)) |
| 16096 | return NULL; |
| 16097 | |
| 16098 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
| 16099 | VAR_INIT_STATUS_INITIALIZED); |
| 16100 | if (op0 == NULL) |
| 16101 | return NULL; |
| 16102 | |
| 16103 | ret = op0; |
| 16104 | tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8), |
| 16105 | mode, mem_mode, |
| 16106 | VAR_INIT_STATUS_INITIALIZED); |
| 16107 | if (tmp == NULL) |
| 16108 | return NULL; |
| 16109 | add_loc_descr (list_head: &ret, descr: tmp); |
| 16110 | tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode, |
| 16111 | VAR_INIT_STATUS_INITIALIZED); |
| 16112 | if (tmp == NULL) |
| 16113 | return NULL; |
| 16114 | add_loc_descr (list_head: &ret, descr: tmp); |
| 16115 | l1label = new_loc_descr (op: DW_OP_pick, oprnd1: 2, oprnd2: 0); |
| 16116 | add_loc_descr (list_head: &ret, descr: l1label); |
| 16117 | tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8), |
| 16118 | mode, mem_mode, |
| 16119 | VAR_INIT_STATUS_INITIALIZED); |
| 16120 | add_loc_descr (list_head: &ret, descr: tmp); |
| 16121 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_pick, oprnd1: 3, oprnd2: 0)); |
| 16122 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
| 16123 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
| 16124 | tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode, |
| 16125 | VAR_INIT_STATUS_INITIALIZED); |
| 16126 | if (tmp == NULL) |
| 16127 | return NULL; |
| 16128 | add_loc_descr (list_head: &ret, descr: tmp); |
| 16129 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 16130 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_pick, oprnd1: 2, oprnd2: 0)); |
| 16131 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 16132 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_or, oprnd1: 0, oprnd2: 0)); |
| 16133 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 16134 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
| 16135 | tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode, |
| 16136 | VAR_INIT_STATUS_INITIALIZED); |
| 16137 | add_loc_descr (list_head: &ret, descr: tmp); |
| 16138 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_eq, oprnd1: 0, oprnd2: 0)); |
| 16139 | l2jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
| 16140 | add_loc_descr (list_head: &ret, descr: l2jump); |
| 16141 | tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode, |
| 16142 | VAR_INIT_STATUS_INITIALIZED); |
| 16143 | add_loc_descr (list_head: &ret, descr: tmp); |
| 16144 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
| 16145 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 16146 | l1jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
| 16147 | add_loc_descr (list_head: &ret, descr: l1jump); |
| 16148 | l2label = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
| 16149 | add_loc_descr (list_head: &ret, descr: l2label); |
| 16150 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 16151 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0)); |
| 16152 | l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 16153 | l1jump->dw_loc_oprnd1.v.val_loc = l1label; |
| 16154 | l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 16155 | l2jump->dw_loc_oprnd1.v.val_loc = l2label; |
| 16156 | return ret; |
| 16157 | } |
| 16158 | |
| 16159 | /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode): |
| 16160 | DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot |
| 16161 | [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg |
| 16162 | DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or |
| 16163 | |
| 16164 | ROTATERT is similar: |
| 16165 | DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE> |
| 16166 | DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot |
| 16167 | [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */ |
| 16168 | |
| 16169 | static dw_loc_descr_ref |
| 16170 | rotate_loc_descriptor (rtx rtl, scalar_int_mode mode, |
| 16171 | machine_mode mem_mode) |
| 16172 | { |
| 16173 | rtx rtlop1 = XEXP (rtl, 1); |
| 16174 | dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL }; |
| 16175 | int i; |
| 16176 | |
| 16177 | if (is_narrower_int_mode (GET_MODE (rtlop1), limit: mode)) |
| 16178 | rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1); |
| 16179 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
| 16180 | VAR_INIT_STATUS_INITIALIZED); |
| 16181 | op1 = mem_loc_descriptor (rtlop1, mode, mem_mode, |
| 16182 | VAR_INIT_STATUS_INITIALIZED); |
| 16183 | if (op0 == NULL || op1 == NULL) |
| 16184 | return NULL; |
| 16185 | if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE) |
| 16186 | for (i = 0; i < 2; i++) |
| 16187 | { |
| 16188 | if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT) |
| 16189 | mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)), |
| 16190 | mode, mem_mode, |
| 16191 | VAR_INIT_STATUS_INITIALIZED); |
| 16192 | else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT) |
| 16193 | mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32 |
| 16194 | ? DW_OP_const4u |
| 16195 | : HOST_BITS_PER_WIDE_INT == 64 |
| 16196 | ? DW_OP_const8u : DW_OP_constu, |
| 16197 | GET_MODE_MASK (mode), oprnd2: 0); |
| 16198 | else |
| 16199 | mask[i] = NULL; |
| 16200 | if (mask[i] == NULL) |
| 16201 | return NULL; |
| 16202 | add_loc_descr (list_head: &mask[i], descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 16203 | } |
| 16204 | ret = op0; |
| 16205 | add_loc_descr (list_head: &ret, descr: op1); |
| 16206 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
| 16207 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
| 16208 | if (GET_CODE (rtl) == ROTATERT) |
| 16209 | { |
| 16210 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
| 16211 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus_uconst, |
| 16212 | oprnd1: GET_MODE_BITSIZE (mode), oprnd2: 0)); |
| 16213 | } |
| 16214 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 16215 | if (mask[0] != NULL) |
| 16216 | add_loc_descr (list_head: &ret, descr: mask[0]); |
| 16217 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_rot, oprnd1: 0, oprnd2: 0)); |
| 16218 | if (mask[1] != NULL) |
| 16219 | { |
| 16220 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 16221 | add_loc_descr (list_head: &ret, descr: mask[1]); |
| 16222 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 16223 | } |
| 16224 | if (GET_CODE (rtl) == ROTATE) |
| 16225 | { |
| 16226 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
| 16227 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus_uconst, |
| 16228 | oprnd1: GET_MODE_BITSIZE (mode), oprnd2: 0)); |
| 16229 | } |
| 16230 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
| 16231 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_or, oprnd1: 0, oprnd2: 0)); |
| 16232 | return ret; |
| 16233 | } |
| 16234 | |
| 16235 | /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref |
| 16236 | for DEBUG_PARAMETER_REF RTL. */ |
| 16237 | |
| 16238 | static dw_loc_descr_ref |
| 16239 | parameter_ref_descriptor (rtx rtl) |
| 16240 | { |
| 16241 | dw_loc_descr_ref ret; |
| 16242 | dw_die_ref ref; |
| 16243 | |
| 16244 | if (dwarf_strict) |
| 16245 | return NULL; |
| 16246 | gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL); |
| 16247 | /* With LTO during LTRANS we get the late DIE that refers to the early |
| 16248 | DIE, thus we add another indirection here. This seems to confuse |
| 16249 | gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */ |
| 16250 | ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl)); |
| 16251 | ret = new_loc_descr (op: DW_OP_GNU_parameter_ref, oprnd1: 0, oprnd2: 0); |
| 16252 | if (ref) |
| 16253 | { |
| 16254 | ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 16255 | ret->dw_loc_oprnd1.v.val_die_ref.die = ref; |
| 16256 | ret->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 16257 | } |
| 16258 | else |
| 16259 | { |
| 16260 | ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref; |
| 16261 | ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl); |
| 16262 | } |
| 16263 | return ret; |
| 16264 | } |
| 16265 | |
| 16266 | /* The following routine converts the RTL for a variable or parameter |
| 16267 | (resident in memory) into an equivalent Dwarf representation of a |
| 16268 | mechanism for getting the address of that same variable onto the top of a |
| 16269 | hypothetical "address evaluation" stack. |
| 16270 | |
| 16271 | When creating memory location descriptors, we are effectively transforming |
| 16272 | the RTL for a memory-resident object into its Dwarf postfix expression |
| 16273 | equivalent. This routine recursively descends an RTL tree, turning |
| 16274 | it into Dwarf postfix code as it goes. |
| 16275 | |
| 16276 | MODE is the mode that should be assumed for the rtl if it is VOIDmode. |
| 16277 | |
| 16278 | MEM_MODE is the mode of the memory reference, needed to handle some |
| 16279 | autoincrement addressing modes. |
| 16280 | |
| 16281 | Return 0 if we can't represent the location. */ |
| 16282 | |
| 16283 | dw_loc_descr_ref |
| 16284 | mem_loc_descriptor (rtx rtl, machine_mode mode, |
| 16285 | machine_mode mem_mode, |
| 16286 | enum var_init_status initialized) |
| 16287 | { |
| 16288 | dw_loc_descr_ref mem_loc_result = NULL; |
| 16289 | enum dwarf_location_atom op; |
| 16290 | dw_loc_descr_ref op0, op1; |
| 16291 | rtx inner = NULL_RTX; |
| 16292 | |
| 16293 | if (mode == VOIDmode) |
| 16294 | mode = GET_MODE (rtl); |
| 16295 | |
| 16296 | /* Note that for a dynamically sized array, the location we will generate a |
| 16297 | description of here will be the lowest numbered location which is |
| 16298 | actually within the array. That's *not* necessarily the same as the |
| 16299 | zeroth element of the array. */ |
| 16300 | |
| 16301 | rtl = targetm.delegitimize_address (rtl); |
| 16302 | |
| 16303 | if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode) |
| 16304 | return NULL; |
| 16305 | |
| 16306 | scalar_int_mode int_mode = BImode, inner_mode, op1_mode; |
| 16307 | switch (GET_CODE (rtl)) |
| 16308 | { |
| 16309 | case POST_INC: |
| 16310 | case POST_DEC: |
| 16311 | case POST_MODIFY: |
| 16312 | return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized); |
| 16313 | |
| 16314 | case SUBREG: |
| 16315 | /* The case of a subreg may arise when we have a local (register) |
| 16316 | variable or a formal (register) parameter which doesn't quite fill |
| 16317 | up an entire register. For now, just assume that it is |
| 16318 | legitimate to make the Dwarf info refer to the whole register which |
| 16319 | contains the given subreg. */ |
| 16320 | if (!subreg_lowpart_p (rtl)) |
| 16321 | break; |
| 16322 | inner = SUBREG_REG (rtl); |
| 16323 | /* FALLTHRU */ |
| 16324 | case TRUNCATE: |
| 16325 | if (inner == NULL_RTX) |
| 16326 | inner = XEXP (rtl, 0); |
| 16327 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16328 | && is_a <scalar_int_mode> (GET_MODE (inner), result: &inner_mode) |
| 16329 | && (GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
| 16330 | #ifdef POINTERS_EXTEND_UNSIGNED |
| 16331 | || (int_mode == Pmode && mem_mode != VOIDmode) |
| 16332 | #endif |
| 16333 | ) |
| 16334 | && GET_MODE_SIZE (mode: inner_mode) <= DWARF2_ADDR_SIZE) |
| 16335 | { |
| 16336 | mem_loc_result = mem_loc_descriptor (rtl: inner, |
| 16337 | mode: inner_mode, |
| 16338 | mem_mode, initialized); |
| 16339 | break; |
| 16340 | } |
| 16341 | if (dwarf_strict && dwarf_version < 5) |
| 16342 | break; |
| 16343 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16344 | && is_a <scalar_int_mode> (GET_MODE (inner), result: &inner_mode) |
| 16345 | ? GET_MODE_SIZE (mode: int_mode) <= GET_MODE_SIZE (mode: inner_mode) |
| 16346 | : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner)))) |
| 16347 | { |
| 16348 | dw_die_ref type_die; |
| 16349 | dw_loc_descr_ref cvt; |
| 16350 | |
| 16351 | mem_loc_result = mem_loc_descriptor (rtl: inner, |
| 16352 | GET_MODE (inner), |
| 16353 | mem_mode, initialized); |
| 16354 | if (mem_loc_result == NULL) |
| 16355 | break; |
| 16356 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
| 16357 | if (type_die == NULL) |
| 16358 | { |
| 16359 | mem_loc_result = NULL; |
| 16360 | break; |
| 16361 | } |
| 16362 | if (maybe_ne (a: GET_MODE_SIZE (mode), b: GET_MODE_SIZE (GET_MODE (inner)))) |
| 16363 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 16364 | else |
| 16365 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_reinterpret), oprnd1: 0, oprnd2: 0); |
| 16366 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 16367 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 16368 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 16369 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
| 16370 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16371 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE) |
| 16372 | { |
| 16373 | /* Convert it to untyped afterwards. */ |
| 16374 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 16375 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
| 16376 | } |
| 16377 | } |
| 16378 | break; |
| 16379 | |
| 16380 | case REG: |
| 16381 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16382 | || (GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE |
| 16383 | && rtl != arg_pointer_rtx |
| 16384 | && rtl != frame_pointer_rtx |
| 16385 | #ifdef POINTERS_EXTEND_UNSIGNED |
| 16386 | && (int_mode != Pmode || mem_mode == VOIDmode) |
| 16387 | #endif |
| 16388 | )) |
| 16389 | { |
| 16390 | dw_die_ref type_die; |
| 16391 | unsigned int debugger_regnum; |
| 16392 | |
| 16393 | if (dwarf_strict && dwarf_version < 5) |
| 16394 | break; |
| 16395 | if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER) |
| 16396 | break; |
| 16397 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
| 16398 | if (type_die == NULL) |
| 16399 | break; |
| 16400 | |
| 16401 | debugger_regnum = debugger_reg_number (rtl); |
| 16402 | if (debugger_regnum == IGNORED_DWARF_REGNUM) |
| 16403 | break; |
| 16404 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_regval_type), |
| 16405 | oprnd1: debugger_regnum, oprnd2: 0); |
| 16406 | mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref; |
| 16407 | mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die; |
| 16408 | mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0; |
| 16409 | break; |
| 16410 | } |
| 16411 | /* Whenever a register number forms a part of the description of the |
| 16412 | method for calculating the (dynamic) address of a memory resident |
| 16413 | object, DWARF rules require the register number be referred to as |
| 16414 | a "base register". This distinction is not based in any way upon |
| 16415 | what category of register the hardware believes the given register |
| 16416 | belongs to. This is strictly DWARF terminology we're dealing with |
| 16417 | here. Note that in cases where the location of a memory-resident |
| 16418 | data object could be expressed as: OP_ADD (OP_BASEREG (basereg), |
| 16419 | OP_CONST (0)) the actual DWARF location descriptor that we generate |
| 16420 | may just be OP_BASEREG (basereg). This may look deceptively like |
| 16421 | the object in question was allocated to a register (rather than in |
| 16422 | memory) so DWARF consumers need to be aware of the subtle |
| 16423 | distinction between OP_REG and OP_BASEREG. */ |
| 16424 | if (REGNO (rtl) < FIRST_PSEUDO_REGISTER) |
| 16425 | mem_loc_result = based_loc_descr (reg: rtl, offset: 0, initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16426 | else if (stack_realign_drap |
| 16427 | && crtl->drap_reg |
| 16428 | && crtl->args.internal_arg_pointer == rtl |
| 16429 | && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER) |
| 16430 | { |
| 16431 | /* If RTL is internal_arg_pointer, which has been optimized |
| 16432 | out, use DRAP instead. */ |
| 16433 | mem_loc_result = based_loc_descr (crtl->drap_reg, offset: 0, |
| 16434 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16435 | } |
| 16436 | break; |
| 16437 | |
| 16438 | case SIGN_EXTEND: |
| 16439 | case ZERO_EXTEND: |
| 16440 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16441 | || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), result: &inner_mode)) |
| 16442 | break; |
| 16443 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: inner_mode, |
| 16444 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16445 | if (op0 == 0) |
| 16446 | break; |
| 16447 | else if (GET_CODE (rtl) == ZERO_EXTEND |
| 16448 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
| 16449 | && GET_MODE_BITSIZE (mode: inner_mode) < HOST_BITS_PER_WIDE_INT |
| 16450 | /* If DW_OP_const{1,2,4}u won't be used, it is shorter |
| 16451 | to expand zero extend as two shifts instead of |
| 16452 | masking. */ |
| 16453 | && GET_MODE_SIZE (mode: inner_mode) <= 4) |
| 16454 | { |
| 16455 | mem_loc_result = op0; |
| 16456 | add_loc_descr (list_head: &mem_loc_result, |
| 16457 | descr: int_loc_descriptor (GET_MODE_MASK (inner_mode))); |
| 16458 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 16459 | } |
| 16460 | else if (GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE) |
| 16461 | { |
| 16462 | int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode: inner_mode); |
| 16463 | shift *= BITS_PER_UNIT; |
| 16464 | if (GET_CODE (rtl) == SIGN_EXTEND) |
| 16465 | op = DW_OP_shra; |
| 16466 | else |
| 16467 | op = DW_OP_shr; |
| 16468 | mem_loc_result = op0; |
| 16469 | add_loc_descr (list_head: &mem_loc_result, descr: int_loc_descriptor (poly_i: shift)); |
| 16470 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 16471 | add_loc_descr (list_head: &mem_loc_result, descr: int_loc_descriptor (poly_i: shift)); |
| 16472 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 16473 | } |
| 16474 | else if (!dwarf_strict || dwarf_version >= 5) |
| 16475 | { |
| 16476 | dw_die_ref type_die1, type_die2; |
| 16477 | dw_loc_descr_ref cvt; |
| 16478 | |
| 16479 | type_die1 = base_type_for_mode (mode: inner_mode, |
| 16480 | GET_CODE (rtl) == ZERO_EXTEND); |
| 16481 | if (type_die1 == NULL) |
| 16482 | break; |
| 16483 | type_die2 = base_type_for_mode (mode: int_mode, unsignedp: 1); |
| 16484 | if (type_die2 == NULL) |
| 16485 | break; |
| 16486 | mem_loc_result = op0; |
| 16487 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 16488 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 16489 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1; |
| 16490 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 16491 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
| 16492 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 16493 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 16494 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2; |
| 16495 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 16496 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
| 16497 | } |
| 16498 | break; |
| 16499 | |
| 16500 | case MEM: |
| 16501 | { |
| 16502 | rtx new_rtl = avoid_constant_pool_reference (rtl); |
| 16503 | if (new_rtl != rtl) |
| 16504 | { |
| 16505 | mem_loc_result = mem_loc_descriptor (rtl: new_rtl, mode, mem_mode, |
| 16506 | initialized); |
| 16507 | if (mem_loc_result != NULL) |
| 16508 | return mem_loc_result; |
| 16509 | } |
| 16510 | } |
| 16511 | mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), |
| 16512 | mode: get_address_mode (mem: rtl), mem_mode: mode, |
| 16513 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16514 | if (mem_loc_result == NULL) |
| 16515 | mem_loc_result = tls_mem_loc_descriptor (mem: rtl); |
| 16516 | if (mem_loc_result != NULL) |
| 16517 | { |
| 16518 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16519 | || GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
| 16520 | { |
| 16521 | dw_die_ref type_die; |
| 16522 | dw_loc_descr_ref deref; |
| 16523 | HOST_WIDE_INT size; |
| 16524 | |
| 16525 | if (dwarf_strict && dwarf_version < 5) |
| 16526 | return NULL; |
| 16527 | if (!GET_MODE_SIZE (mode).is_constant (const_value: &size)) |
| 16528 | return NULL; |
| 16529 | type_die |
| 16530 | = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
| 16531 | if (type_die == NULL) |
| 16532 | return NULL; |
| 16533 | deref = new_loc_descr (op: dwarf_OP (op: DW_OP_deref_type), oprnd1: size, oprnd2: 0); |
| 16534 | deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref; |
| 16535 | deref->dw_loc_oprnd2.v.val_die_ref.die = type_die; |
| 16536 | deref->dw_loc_oprnd2.v.val_die_ref.external = 0; |
| 16537 | add_loc_descr (list_head: &mem_loc_result, descr: deref); |
| 16538 | } |
| 16539 | else if (GET_MODE_SIZE (mode: int_mode) == DWARF2_ADDR_SIZE) |
| 16540 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0)); |
| 16541 | else |
| 16542 | add_loc_descr (list_head: &mem_loc_result, |
| 16543 | descr: new_loc_descr (op: DW_OP_deref_size, |
| 16544 | oprnd1: GET_MODE_SIZE (mode: int_mode), oprnd2: 0)); |
| 16545 | } |
| 16546 | break; |
| 16547 | |
| 16548 | case LO_SUM: |
| 16549 | return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized); |
| 16550 | |
| 16551 | case LABEL_REF: |
| 16552 | /* Some ports can transform a symbol ref into a label ref, because |
| 16553 | the symbol ref is too far away and has to be dumped into a constant |
| 16554 | pool. */ |
| 16555 | case CONST: |
| 16556 | case SYMBOL_REF: |
| 16557 | case UNSPEC: |
| 16558 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16559 | || (GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE |
| 16560 | #ifdef POINTERS_EXTEND_UNSIGNED |
| 16561 | && (int_mode != Pmode || mem_mode == VOIDmode) |
| 16562 | #endif |
| 16563 | )) |
| 16564 | break; |
| 16565 | |
| 16566 | if (GET_CODE (rtl) == UNSPEC) |
| 16567 | { |
| 16568 | /* If delegitimize_address couldn't do anything with the UNSPEC, we |
| 16569 | can't express it in the debug info. This can happen e.g. with some |
| 16570 | TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend |
| 16571 | approves. */ |
| 16572 | bool not_ok = false; |
| 16573 | subrtx_var_iterator::array_type array; |
| 16574 | FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL) |
| 16575 | if (*iter != rtl && !CONSTANT_P (*iter)) |
| 16576 | { |
| 16577 | not_ok = true; |
| 16578 | break; |
| 16579 | } |
| 16580 | |
| 16581 | if (not_ok) |
| 16582 | break; |
| 16583 | |
| 16584 | FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL) |
| 16585 | if (!const_ok_for_output_1 (rtl: *iter)) |
| 16586 | { |
| 16587 | not_ok = true; |
| 16588 | break; |
| 16589 | } |
| 16590 | |
| 16591 | if (not_ok) |
| 16592 | break; |
| 16593 | |
| 16594 | rtl = gen_rtx_CONST (GET_MODE (rtl), rtl); |
| 16595 | goto symref; |
| 16596 | } |
| 16597 | |
| 16598 | if (GET_CODE (rtl) == SYMBOL_REF |
| 16599 | && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE) |
| 16600 | { |
| 16601 | dw_loc_descr_ref temp; |
| 16602 | |
| 16603 | /* If this is not defined, we have no way to emit the data. */ |
| 16604 | if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel) |
| 16605 | break; |
| 16606 | |
| 16607 | temp = new_addr_loc_descr (addr: rtl, dtprel: dtprel_true); |
| 16608 | |
| 16609 | /* We check for DWARF 5 here because gdb did not implement |
| 16610 | DW_OP_form_tls_address until after 7.12. */ |
| 16611 | mem_loc_result = new_loc_descr (op: (dwarf_version >= 5 |
| 16612 | ? DW_OP_form_tls_address |
| 16613 | : DW_OP_GNU_push_tls_address), |
| 16614 | oprnd1: 0, oprnd2: 0); |
| 16615 | add_loc_descr (list_head: &mem_loc_result, descr: temp); |
| 16616 | |
| 16617 | break; |
| 16618 | } |
| 16619 | |
| 16620 | if (!const_ok_for_output (rtl)) |
| 16621 | { |
| 16622 | if (GET_CODE (rtl) == CONST) |
| 16623 | switch (GET_CODE (XEXP (rtl, 0))) |
| 16624 | { |
| 16625 | case NOT: |
| 16626 | op = DW_OP_not; |
| 16627 | goto try_const_unop; |
| 16628 | case NEG: |
| 16629 | op = DW_OP_neg; |
| 16630 | goto try_const_unop; |
| 16631 | try_const_unop: |
| 16632 | rtx arg; |
| 16633 | arg = XEXP (XEXP (rtl, 0), 0); |
| 16634 | if (!CONSTANT_P (arg)) |
| 16635 | arg = gen_rtx_CONST (int_mode, arg); |
| 16636 | op0 = mem_loc_descriptor (rtl: arg, mode: int_mode, mem_mode, |
| 16637 | initialized); |
| 16638 | if (op0) |
| 16639 | { |
| 16640 | mem_loc_result = op0; |
| 16641 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 16642 | } |
| 16643 | break; |
| 16644 | default: |
| 16645 | mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode: int_mode, |
| 16646 | mem_mode, initialized); |
| 16647 | break; |
| 16648 | } |
| 16649 | break; |
| 16650 | } |
| 16651 | |
| 16652 | symref: |
| 16653 | mem_loc_result = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
| 16654 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
| 16655 | break; |
| 16656 | |
| 16657 | case CONCAT: |
| 16658 | case CONCATN: |
| 16659 | case VAR_LOCATION: |
| 16660 | case DEBUG_IMPLICIT_PTR: |
| 16661 | expansion_failed (NULL_TREE, rtl, |
| 16662 | reason: "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor" ); |
| 16663 | return 0; |
| 16664 | |
| 16665 | case ENTRY_VALUE: |
| 16666 | if (dwarf_strict && dwarf_version < 5) |
| 16667 | return NULL; |
| 16668 | if (REG_P (ENTRY_VALUE_EXP (rtl))) |
| 16669 | { |
| 16670 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16671 | || GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
| 16672 | op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode, |
| 16673 | VOIDmode, initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16674 | else |
| 16675 | { |
| 16676 | unsigned int debugger_regnum = debugger_reg_number (ENTRY_VALUE_EXP (rtl)); |
| 16677 | if (debugger_regnum == IGNORED_DWARF_REGNUM) |
| 16678 | return NULL; |
| 16679 | op0 = one_reg_loc_descriptor (regno: debugger_regnum, |
| 16680 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16681 | } |
| 16682 | } |
| 16683 | else if (MEM_P (ENTRY_VALUE_EXP (rtl)) |
| 16684 | && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0))) |
| 16685 | { |
| 16686 | op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode, |
| 16687 | VOIDmode, initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16688 | if (op0 && op0->dw_loc_opc == DW_OP_fbreg) |
| 16689 | return NULL; |
| 16690 | } |
| 16691 | else |
| 16692 | gcc_unreachable (); |
| 16693 | if (op0 == NULL) |
| 16694 | return NULL; |
| 16695 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_entry_value), oprnd1: 0, oprnd2: 0); |
| 16696 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 16697 | mem_loc_result->dw_loc_oprnd1.v.val_loc = op0; |
| 16698 | break; |
| 16699 | |
| 16700 | case DEBUG_PARAMETER_REF: |
| 16701 | mem_loc_result = parameter_ref_descriptor (rtl); |
| 16702 | break; |
| 16703 | |
| 16704 | case PRE_MODIFY: |
| 16705 | /* Extract the PLUS expression nested inside and fall into |
| 16706 | PLUS code below. */ |
| 16707 | rtl = XEXP (rtl, 1); |
| 16708 | goto plus; |
| 16709 | |
| 16710 | case PRE_INC: |
| 16711 | case PRE_DEC: |
| 16712 | /* Turn these into a PLUS expression and fall into the PLUS code |
| 16713 | below. */ |
| 16714 | rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0), |
| 16715 | gen_int_mode (GET_CODE (rtl) == PRE_INC |
| 16716 | ? GET_MODE_UNIT_SIZE (mem_mode) |
| 16717 | : -GET_MODE_UNIT_SIZE (mem_mode), |
| 16718 | mode)); |
| 16719 | |
| 16720 | /* fall through */ |
| 16721 | |
| 16722 | case PLUS: |
| 16723 | plus: |
| 16724 | if (is_based_loc (rtl) |
| 16725 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16726 | && (GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
| 16727 | || XEXP (rtl, 0) == arg_pointer_rtx |
| 16728 | || XEXP (rtl, 0) == frame_pointer_rtx)) |
| 16729 | mem_loc_result = based_loc_descr (XEXP (rtl, 0), |
| 16730 | INTVAL (XEXP (rtl, 1)), |
| 16731 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16732 | else |
| 16733 | { |
| 16734 | mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
| 16735 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16736 | if (mem_loc_result == 0) |
| 16737 | break; |
| 16738 | |
| 16739 | if (CONST_INT_P (XEXP (rtl, 1)) |
| 16740 | && (GET_MODE_SIZE (mode: as_a <scalar_int_mode> (m: mode)) |
| 16741 | <= DWARF2_ADDR_SIZE)) |
| 16742 | loc_descr_plus_const (list_head: &mem_loc_result, INTVAL (XEXP (rtl, 1))); |
| 16743 | else |
| 16744 | { |
| 16745 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
| 16746 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16747 | if (op1 == 0) |
| 16748 | return NULL; |
| 16749 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
| 16750 | add_loc_descr (list_head: &mem_loc_result, |
| 16751 | descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
| 16752 | } |
| 16753 | } |
| 16754 | break; |
| 16755 | |
| 16756 | /* If a pseudo-reg is optimized away, it is possible for it to |
| 16757 | be replaced with a MEM containing a multiply or shift. */ |
| 16758 | case MINUS: |
| 16759 | op = DW_OP_minus; |
| 16760 | goto do_binop; |
| 16761 | |
| 16762 | case MULT: |
| 16763 | op = DW_OP_mul; |
| 16764 | goto do_binop; |
| 16765 | |
| 16766 | case DIV: |
| 16767 | if ((!dwarf_strict || dwarf_version >= 5) |
| 16768 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16769 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
| 16770 | { |
| 16771 | mem_loc_result = typed_binop (op: DW_OP_div, rtl, |
| 16772 | type_die: base_type_for_mode (mode, unsignedp: 0), |
| 16773 | mode: int_mode, mem_mode); |
| 16774 | break; |
| 16775 | } |
| 16776 | op = DW_OP_div; |
| 16777 | goto do_binop; |
| 16778 | |
| 16779 | case UMOD: |
| 16780 | op = DW_OP_mod; |
| 16781 | goto do_binop; |
| 16782 | |
| 16783 | case ASHIFT: |
| 16784 | op = DW_OP_shl; |
| 16785 | goto do_shift; |
| 16786 | |
| 16787 | case ASHIFTRT: |
| 16788 | op = DW_OP_shra; |
| 16789 | goto do_shift; |
| 16790 | |
| 16791 | case LSHIFTRT: |
| 16792 | op = DW_OP_shr; |
| 16793 | goto do_shift; |
| 16794 | |
| 16795 | do_shift: |
| 16796 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
| 16797 | break; |
| 16798 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: int_mode, mem_mode, |
| 16799 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16800 | { |
| 16801 | rtx rtlop1 = XEXP (rtl, 1); |
| 16802 | if (is_a <scalar_int_mode> (GET_MODE (rtlop1), result: &op1_mode) |
| 16803 | && GET_MODE_BITSIZE (mode: op1_mode) < GET_MODE_BITSIZE (mode: int_mode)) |
| 16804 | rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1); |
| 16805 | op1 = mem_loc_descriptor (rtl: rtlop1, mode: int_mode, mem_mode, |
| 16806 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16807 | } |
| 16808 | |
| 16809 | if (op0 == 0 || op1 == 0) |
| 16810 | break; |
| 16811 | |
| 16812 | mem_loc_result = op0; |
| 16813 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
| 16814 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 16815 | break; |
| 16816 | |
| 16817 | case AND: |
| 16818 | op = DW_OP_and; |
| 16819 | goto do_binop; |
| 16820 | |
| 16821 | case IOR: |
| 16822 | op = DW_OP_or; |
| 16823 | goto do_binop; |
| 16824 | |
| 16825 | case XOR: |
| 16826 | op = DW_OP_xor; |
| 16827 | goto do_binop; |
| 16828 | |
| 16829 | do_binop: |
| 16830 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
| 16831 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16832 | if (XEXP (rtl, 0) == XEXP (rtl, 1)) |
| 16833 | { |
| 16834 | if (op0 == 0) |
| 16835 | break; |
| 16836 | mem_loc_result = op0; |
| 16837 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
| 16838 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 16839 | break; |
| 16840 | } |
| 16841 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
| 16842 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16843 | |
| 16844 | if (op0 == 0 || op1 == 0) |
| 16845 | break; |
| 16846 | |
| 16847 | mem_loc_result = op0; |
| 16848 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
| 16849 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 16850 | break; |
| 16851 | |
| 16852 | case MOD: |
| 16853 | if ((!dwarf_strict || dwarf_version >= 5) |
| 16854 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16855 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
| 16856 | { |
| 16857 | mem_loc_result = typed_binop (op: DW_OP_mod, rtl, |
| 16858 | type_die: base_type_for_mode (mode, unsignedp: 0), |
| 16859 | mode: int_mode, mem_mode); |
| 16860 | break; |
| 16861 | } |
| 16862 | |
| 16863 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
| 16864 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16865 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
| 16866 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16867 | |
| 16868 | if (op0 == 0 || op1 == 0) |
| 16869 | break; |
| 16870 | |
| 16871 | mem_loc_result = op0; |
| 16872 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
| 16873 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
| 16874 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
| 16875 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_div, oprnd1: 0, oprnd2: 0)); |
| 16876 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
| 16877 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
| 16878 | break; |
| 16879 | |
| 16880 | case UDIV: |
| 16881 | if ((!dwarf_strict || dwarf_version >= 5) |
| 16882 | && is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
| 16883 | { |
| 16884 | /* We can use a signed divide if the sign bit is not set. */ |
| 16885 | if (GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
| 16886 | { |
| 16887 | op = DW_OP_div; |
| 16888 | goto do_binop; |
| 16889 | } |
| 16890 | |
| 16891 | mem_loc_result = typed_binop (op: DW_OP_div, rtl, |
| 16892 | type_die: base_type_for_mode (mode: int_mode, unsignedp: 1), |
| 16893 | mode: int_mode, mem_mode); |
| 16894 | } |
| 16895 | break; |
| 16896 | |
| 16897 | case NOT: |
| 16898 | op = DW_OP_not; |
| 16899 | goto do_unop; |
| 16900 | |
| 16901 | case ABS: |
| 16902 | op = DW_OP_abs; |
| 16903 | goto do_unop; |
| 16904 | |
| 16905 | case NEG: |
| 16906 | op = DW_OP_neg; |
| 16907 | goto do_unop; |
| 16908 | |
| 16909 | do_unop: |
| 16910 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
| 16911 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 16912 | |
| 16913 | if (op0 == 0) |
| 16914 | break; |
| 16915 | |
| 16916 | mem_loc_result = op0; |
| 16917 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 16918 | break; |
| 16919 | |
| 16920 | case CONST_INT: |
| 16921 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 16922 | || GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
| 16923 | #ifdef POINTERS_EXTEND_UNSIGNED |
| 16924 | || (int_mode == Pmode |
| 16925 | && mem_mode != VOIDmode |
| 16926 | && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl)) |
| 16927 | #endif |
| 16928 | ) |
| 16929 | { |
| 16930 | mem_loc_result = int_loc_descriptor (INTVAL (rtl)); |
| 16931 | break; |
| 16932 | } |
| 16933 | if ((!dwarf_strict || dwarf_version >= 5) |
| 16934 | && (GET_MODE_BITSIZE (mode: int_mode) == HOST_BITS_PER_WIDE_INT |
| 16935 | || GET_MODE_BITSIZE (mode: int_mode) == HOST_BITS_PER_DOUBLE_INT)) |
| 16936 | { |
| 16937 | dw_die_ref type_die = base_type_for_mode (mode: int_mode, unsignedp: 1); |
| 16938 | scalar_int_mode amode; |
| 16939 | if (type_die == NULL) |
| 16940 | return NULL; |
| 16941 | if (INTVAL (rtl) >= 0 |
| 16942 | && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, limit: 0) |
| 16943 | .exists (mode: &amode)) |
| 16944 | && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl) |
| 16945 | /* const DW_OP_convert <XXX> vs. |
| 16946 | DW_OP_const_type <XXX, 1, const>. */ |
| 16947 | && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1 |
| 16948 | < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (mode: int_mode)) |
| 16949 | { |
| 16950 | mem_loc_result = int_loc_descriptor (INTVAL (rtl)); |
| 16951 | op0 = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 16952 | op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 16953 | op0->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 16954 | op0->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 16955 | add_loc_descr (list_head: &mem_loc_result, descr: op0); |
| 16956 | return mem_loc_result; |
| 16957 | } |
| 16958 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_const_type), oprnd1: 0, |
| 16959 | INTVAL (rtl)); |
| 16960 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 16961 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 16962 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 16963 | if (GET_MODE_BITSIZE (mode: int_mode) == HOST_BITS_PER_WIDE_INT) |
| 16964 | mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const; |
| 16965 | else |
| 16966 | { |
| 16967 | mem_loc_result->dw_loc_oprnd2.val_class |
| 16968 | = dw_val_class_const_double; |
| 16969 | mem_loc_result->dw_loc_oprnd2.v.val_double |
| 16970 | = double_int::from_shwi (INTVAL (rtl)); |
| 16971 | } |
| 16972 | } |
| 16973 | break; |
| 16974 | |
| 16975 | case CONST_DOUBLE: |
| 16976 | if (!dwarf_strict || dwarf_version >= 5) |
| 16977 | { |
| 16978 | dw_die_ref type_die; |
| 16979 | |
| 16980 | /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a |
| 16981 | CONST_DOUBLE rtx could represent either a large integer |
| 16982 | or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0, |
| 16983 | the value is always a floating point constant. |
| 16984 | |
| 16985 | When it is an integer, a CONST_DOUBLE is used whenever |
| 16986 | the constant requires 2 HWIs to be adequately represented. |
| 16987 | We output CONST_DOUBLEs as blocks. */ |
| 16988 | if (mode == VOIDmode |
| 16989 | || (GET_MODE (rtl) == VOIDmode |
| 16990 | && maybe_ne (a: GET_MODE_BITSIZE (mode), |
| 16991 | HOST_BITS_PER_DOUBLE_INT))) |
| 16992 | break; |
| 16993 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
| 16994 | if (type_die == NULL) |
| 16995 | return NULL; |
| 16996 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_const_type), oprnd1: 0, oprnd2: 0); |
| 16997 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 16998 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 16999 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 17000 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
| 17001 | if (!SCALAR_FLOAT_MODE_P (mode)) |
| 17002 | { |
| 17003 | mem_loc_result->dw_loc_oprnd2.val_class |
| 17004 | = dw_val_class_const_double; |
| 17005 | mem_loc_result->dw_loc_oprnd2.v.val_double |
| 17006 | = rtx_to_double_int (rtl); |
| 17007 | } |
| 17008 | else |
| 17009 | #endif |
| 17010 | { |
| 17011 | scalar_float_mode float_mode = as_a <scalar_float_mode> (m: mode); |
| 17012 | unsigned int length = GET_MODE_SIZE (mode: float_mode); |
| 17013 | unsigned char *array = ggc_vec_alloc<unsigned char> (c: length); |
| 17014 | unsigned int elt_size = insert_float (rtl, array); |
| 17015 | |
| 17016 | mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec; |
| 17017 | mem_loc_result->dw_loc_oprnd2.v.val_vec.length |
| 17018 | = length / elt_size; |
| 17019 | mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size; |
| 17020 | mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array; |
| 17021 | } |
| 17022 | } |
| 17023 | break; |
| 17024 | |
| 17025 | case CONST_WIDE_INT: |
| 17026 | if (!dwarf_strict || dwarf_version >= 5) |
| 17027 | { |
| 17028 | dw_die_ref type_die; |
| 17029 | |
| 17030 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
| 17031 | if (type_die == NULL) |
| 17032 | return NULL; |
| 17033 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_const_type), oprnd1: 0, oprnd2: 0); |
| 17034 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 17035 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 17036 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 17037 | mem_loc_result->dw_loc_oprnd2.val_class |
| 17038 | = dw_val_class_wide_int; |
| 17039 | mem_loc_result->dw_loc_oprnd2.v.val_wide |
| 17040 | = alloc_dw_wide_int (w: rtx_mode_t (rtl, mode)); |
| 17041 | } |
| 17042 | break; |
| 17043 | |
| 17044 | case CONST_POLY_INT: |
| 17045 | mem_loc_result = int_loc_descriptor (poly_i: rtx_to_poly_int64 (x: rtl)); |
| 17046 | break; |
| 17047 | |
| 17048 | case EQ: |
| 17049 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_eq, rtl, mem_mode); |
| 17050 | break; |
| 17051 | |
| 17052 | case GE: |
| 17053 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_ge, rtl, mem_mode); |
| 17054 | break; |
| 17055 | |
| 17056 | case GT: |
| 17057 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_gt, rtl, mem_mode); |
| 17058 | break; |
| 17059 | |
| 17060 | case LE: |
| 17061 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_le, rtl, mem_mode); |
| 17062 | break; |
| 17063 | |
| 17064 | case LT: |
| 17065 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_lt, rtl, mem_mode); |
| 17066 | break; |
| 17067 | |
| 17068 | case NE: |
| 17069 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_ne, rtl, mem_mode); |
| 17070 | break; |
| 17071 | |
| 17072 | case GEU: |
| 17073 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_ge, rtl, mem_mode); |
| 17074 | break; |
| 17075 | |
| 17076 | case GTU: |
| 17077 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_gt, rtl, mem_mode); |
| 17078 | break; |
| 17079 | |
| 17080 | case LEU: |
| 17081 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_le, rtl, mem_mode); |
| 17082 | break; |
| 17083 | |
| 17084 | case LTU: |
| 17085 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_lt, rtl, mem_mode); |
| 17086 | break; |
| 17087 | |
| 17088 | case UMIN: |
| 17089 | case UMAX: |
| 17090 | if (!SCALAR_INT_MODE_P (mode)) |
| 17091 | break; |
| 17092 | /* FALLTHRU */ |
| 17093 | case SMIN: |
| 17094 | case SMAX: |
| 17095 | mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode); |
| 17096 | break; |
| 17097 | |
| 17098 | case ZERO_EXTRACT: |
| 17099 | case SIGN_EXTRACT: |
| 17100 | if (CONST_INT_P (XEXP (rtl, 1)) |
| 17101 | && CONST_INT_P (XEXP (rtl, 2)) |
| 17102 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 17103 | && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), result: &inner_mode) |
| 17104 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
| 17105 | && GET_MODE_SIZE (mode: inner_mode) <= DWARF2_ADDR_SIZE |
| 17106 | && ((unsigned) INTVAL (XEXP (rtl, 1)) |
| 17107 | + (unsigned) INTVAL (XEXP (rtl, 2)) |
| 17108 | <= GET_MODE_BITSIZE (mode: int_mode))) |
| 17109 | { |
| 17110 | int shift, size; |
| 17111 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: inner_mode, |
| 17112 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
| 17113 | if (op0 == 0) |
| 17114 | break; |
| 17115 | if (GET_CODE (rtl) == SIGN_EXTRACT) |
| 17116 | op = DW_OP_shra; |
| 17117 | else |
| 17118 | op = DW_OP_shr; |
| 17119 | mem_loc_result = op0; |
| 17120 | size = INTVAL (XEXP (rtl, 1)); |
| 17121 | shift = INTVAL (XEXP (rtl, 2)); |
| 17122 | if (BITS_BIG_ENDIAN) |
| 17123 | shift = GET_MODE_BITSIZE (mode: inner_mode) - shift - size; |
| 17124 | if (shift + size != (int) DWARF2_ADDR_SIZE) |
| 17125 | { |
| 17126 | add_loc_descr (list_head: &mem_loc_result, |
| 17127 | descr: int_loc_descriptor (DWARF2_ADDR_SIZE |
| 17128 | - shift - size)); |
| 17129 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 17130 | } |
| 17131 | if (size != (int) DWARF2_ADDR_SIZE) |
| 17132 | { |
| 17133 | add_loc_descr (list_head: &mem_loc_result, |
| 17134 | descr: int_loc_descriptor (DWARF2_ADDR_SIZE - size)); |
| 17135 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 17136 | } |
| 17137 | } |
| 17138 | break; |
| 17139 | |
| 17140 | case IF_THEN_ELSE: |
| 17141 | { |
| 17142 | dw_loc_descr_ref op2, bra_node, drop_node; |
| 17143 | op0 = mem_loc_descriptor (XEXP (rtl, 0), |
| 17144 | GET_MODE (XEXP (rtl, 0)) == VOIDmode |
| 17145 | ? word_mode : GET_MODE (XEXP (rtl, 0)), |
| 17146 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
| 17147 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
| 17148 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 17149 | op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode, |
| 17150 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 17151 | if (op0 == NULL || op1 == NULL || op2 == NULL) |
| 17152 | break; |
| 17153 | |
| 17154 | mem_loc_result = op1; |
| 17155 | add_loc_descr (list_head: &mem_loc_result, descr: op2); |
| 17156 | add_loc_descr (list_head: &mem_loc_result, descr: op0); |
| 17157 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
| 17158 | add_loc_descr (list_head: &mem_loc_result, descr: bra_node); |
| 17159 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
| 17160 | drop_node = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
| 17161 | add_loc_descr (list_head: &mem_loc_result, descr: drop_node); |
| 17162 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 17163 | bra_node->dw_loc_oprnd1.v.val_loc = drop_node; |
| 17164 | } |
| 17165 | break; |
| 17166 | |
| 17167 | case FLOAT_EXTEND: |
| 17168 | case FLOAT_TRUNCATE: |
| 17169 | case FLOAT: |
| 17170 | case UNSIGNED_FLOAT: |
| 17171 | case FIX: |
| 17172 | case UNSIGNED_FIX: |
| 17173 | if (!dwarf_strict || dwarf_version >= 5) |
| 17174 | { |
| 17175 | dw_die_ref type_die; |
| 17176 | dw_loc_descr_ref cvt; |
| 17177 | |
| 17178 | op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)), |
| 17179 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
| 17180 | if (op0 == NULL) |
| 17181 | break; |
| 17182 | if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), result: &int_mode) |
| 17183 | && (GET_CODE (rtl) == FLOAT |
| 17184 | || GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE)) |
| 17185 | { |
| 17186 | type_die = base_type_for_mode (mode: int_mode, |
| 17187 | GET_CODE (rtl) == UNSIGNED_FLOAT); |
| 17188 | if (type_die == NULL) |
| 17189 | break; |
| 17190 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 17191 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 17192 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 17193 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 17194 | add_loc_descr (list_head: &op0, descr: cvt); |
| 17195 | } |
| 17196 | type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX); |
| 17197 | if (type_die == NULL) |
| 17198 | break; |
| 17199 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 17200 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 17201 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 17202 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 17203 | add_loc_descr (list_head: &op0, descr: cvt); |
| 17204 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 17205 | && (GET_CODE (rtl) == FIX |
| 17206 | || GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE)) |
| 17207 | { |
| 17208 | op0 = convert_descriptor_to_mode (mode: int_mode, op: op0); |
| 17209 | if (op0 == NULL) |
| 17210 | break; |
| 17211 | } |
| 17212 | mem_loc_result = op0; |
| 17213 | } |
| 17214 | break; |
| 17215 | |
| 17216 | case CLZ: |
| 17217 | case CTZ: |
| 17218 | case FFS: |
| 17219 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
| 17220 | mem_loc_result = clz_loc_descriptor (rtl, mode: int_mode, mem_mode); |
| 17221 | break; |
| 17222 | |
| 17223 | case POPCOUNT: |
| 17224 | case PARITY: |
| 17225 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
| 17226 | mem_loc_result = popcount_loc_descriptor (rtl, mode: int_mode, mem_mode); |
| 17227 | break; |
| 17228 | |
| 17229 | case BSWAP: |
| 17230 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
| 17231 | mem_loc_result = bswap_loc_descriptor (rtl, mode: int_mode, mem_mode); |
| 17232 | break; |
| 17233 | |
| 17234 | case ROTATE: |
| 17235 | case ROTATERT: |
| 17236 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
| 17237 | mem_loc_result = rotate_loc_descriptor (rtl, mode: int_mode, mem_mode); |
| 17238 | break; |
| 17239 | |
| 17240 | case COMPARE: |
| 17241 | /* In theory, we could implement the above. */ |
| 17242 | /* DWARF cannot represent the unsigned compare operations |
| 17243 | natively. */ |
| 17244 | case SS_MULT: |
| 17245 | case US_MULT: |
| 17246 | case SS_DIV: |
| 17247 | case US_DIV: |
| 17248 | case SS_PLUS: |
| 17249 | case US_PLUS: |
| 17250 | case SS_MINUS: |
| 17251 | case US_MINUS: |
| 17252 | case SS_NEG: |
| 17253 | case US_NEG: |
| 17254 | case SS_ABS: |
| 17255 | case SS_ASHIFT: |
| 17256 | case US_ASHIFT: |
| 17257 | case SS_TRUNCATE: |
| 17258 | case US_TRUNCATE: |
| 17259 | case UNORDERED: |
| 17260 | case ORDERED: |
| 17261 | case UNEQ: |
| 17262 | case UNGE: |
| 17263 | case UNGT: |
| 17264 | case UNLE: |
| 17265 | case UNLT: |
| 17266 | case LTGT: |
| 17267 | case FRACT_CONVERT: |
| 17268 | case UNSIGNED_FRACT_CONVERT: |
| 17269 | case SAT_FRACT: |
| 17270 | case UNSIGNED_SAT_FRACT: |
| 17271 | case SQRT: |
| 17272 | case ASM_OPERANDS: |
| 17273 | case VEC_MERGE: |
| 17274 | case VEC_SELECT: |
| 17275 | case VEC_CONCAT: |
| 17276 | case VEC_DUPLICATE: |
| 17277 | case VEC_SERIES: |
| 17278 | case HIGH: |
| 17279 | case FMA: |
| 17280 | case STRICT_LOW_PART: |
| 17281 | case CONST_VECTOR: |
| 17282 | case CONST_FIXED: |
| 17283 | case CLRSB: |
| 17284 | case CLOBBER: |
| 17285 | case SMUL_HIGHPART: |
| 17286 | case UMUL_HIGHPART: |
| 17287 | case BITREVERSE: |
| 17288 | case COPYSIGN: |
| 17289 | break; |
| 17290 | |
| 17291 | case CONST_STRING: |
| 17292 | resolve_one_addr (&rtl); |
| 17293 | goto symref; |
| 17294 | |
| 17295 | /* RTL sequences inside PARALLEL record a series of DWARF operations for |
| 17296 | the expression. An UNSPEC rtx represents a raw DWARF operation, |
| 17297 | new_loc_descr is called for it to build the operation directly. |
| 17298 | Otherwise mem_loc_descriptor is called recursively. */ |
| 17299 | case PARALLEL: |
| 17300 | { |
| 17301 | int index = 0; |
| 17302 | dw_loc_descr_ref exp_result = NULL; |
| 17303 | |
| 17304 | for (; index < XVECLEN (rtl, 0); index++) |
| 17305 | { |
| 17306 | rtx elem = XVECEXP (rtl, 0, index); |
| 17307 | if (GET_CODE (elem) == UNSPEC) |
| 17308 | { |
| 17309 | /* Each DWARF operation UNSPEC contain two operands, if |
| 17310 | one operand is not used for the operation, const0_rtx is |
| 17311 | passed. */ |
| 17312 | gcc_assert (XVECLEN (elem, 0) == 2); |
| 17313 | |
| 17314 | HOST_WIDE_INT dw_op = XINT (elem, 1); |
| 17315 | HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0)); |
| 17316 | HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1)); |
| 17317 | exp_result |
| 17318 | = new_loc_descr (op: (enum dwarf_location_atom) dw_op, oprnd1, |
| 17319 | oprnd2); |
| 17320 | } |
| 17321 | else |
| 17322 | exp_result |
| 17323 | = mem_loc_descriptor (rtl: elem, mode, mem_mode, |
| 17324 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 17325 | |
| 17326 | if (!mem_loc_result) |
| 17327 | mem_loc_result = exp_result; |
| 17328 | else |
| 17329 | add_loc_descr (list_head: &mem_loc_result, descr: exp_result); |
| 17330 | } |
| 17331 | |
| 17332 | break; |
| 17333 | } |
| 17334 | |
| 17335 | default: |
| 17336 | if (flag_checking) |
| 17337 | { |
| 17338 | print_rtl (stderr, rtl); |
| 17339 | gcc_unreachable (); |
| 17340 | } |
| 17341 | break; |
| 17342 | } |
| 17343 | |
| 17344 | if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED) |
| 17345 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
| 17346 | |
| 17347 | return mem_loc_result; |
| 17348 | } |
| 17349 | |
| 17350 | /* Return a descriptor that describes the concatenation of two locations. |
| 17351 | This is typically a complex variable. */ |
| 17352 | |
| 17353 | static dw_loc_descr_ref |
| 17354 | concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized) |
| 17355 | { |
| 17356 | /* At present we only track constant-sized pieces. */ |
| 17357 | unsigned int size0, size1; |
| 17358 | if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (const_value: &size0) |
| 17359 | || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (const_value: &size1)) |
| 17360 | return 0; |
| 17361 | |
| 17362 | dw_loc_descr_ref cc_loc_result = NULL; |
| 17363 | dw_loc_descr_ref x0_ref |
| 17364 | = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED); |
| 17365 | dw_loc_descr_ref x1_ref |
| 17366 | = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED); |
| 17367 | |
| 17368 | if (x0_ref == 0 || x1_ref == 0) |
| 17369 | return 0; |
| 17370 | |
| 17371 | cc_loc_result = x0_ref; |
| 17372 | add_loc_descr_op_piece (list_head: &cc_loc_result, size: size0); |
| 17373 | |
| 17374 | add_loc_descr (list_head: &cc_loc_result, descr: x1_ref); |
| 17375 | add_loc_descr_op_piece (list_head: &cc_loc_result, size: size1); |
| 17376 | |
| 17377 | if (initialized == VAR_INIT_STATUS_UNINITIALIZED) |
| 17378 | add_loc_descr (list_head: &cc_loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
| 17379 | |
| 17380 | return cc_loc_result; |
| 17381 | } |
| 17382 | |
| 17383 | /* Return a descriptor that describes the concatenation of N |
| 17384 | locations. */ |
| 17385 | |
| 17386 | static dw_loc_descr_ref |
| 17387 | concatn_loc_descriptor (rtx concatn, enum var_init_status initialized) |
| 17388 | { |
| 17389 | unsigned int i; |
| 17390 | dw_loc_descr_ref cc_loc_result = NULL; |
| 17391 | unsigned int n = XVECLEN (concatn, 0); |
| 17392 | unsigned int size; |
| 17393 | |
| 17394 | for (i = 0; i < n; ++i) |
| 17395 | { |
| 17396 | dw_loc_descr_ref ref; |
| 17397 | rtx x = XVECEXP (concatn, 0, i); |
| 17398 | |
| 17399 | /* At present we only track constant-sized pieces. */ |
| 17400 | if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (const_value: &size)) |
| 17401 | return NULL; |
| 17402 | |
| 17403 | ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED); |
| 17404 | if (ref == NULL) |
| 17405 | return NULL; |
| 17406 | |
| 17407 | add_loc_descr (list_head: &cc_loc_result, descr: ref); |
| 17408 | add_loc_descr_op_piece (list_head: &cc_loc_result, size); |
| 17409 | } |
| 17410 | |
| 17411 | if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED) |
| 17412 | add_loc_descr (list_head: &cc_loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
| 17413 | |
| 17414 | return cc_loc_result; |
| 17415 | } |
| 17416 | |
| 17417 | /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer |
| 17418 | for DEBUG_IMPLICIT_PTR RTL. */ |
| 17419 | |
| 17420 | static dw_loc_descr_ref |
| 17421 | implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset) |
| 17422 | { |
| 17423 | dw_loc_descr_ref ret; |
| 17424 | dw_die_ref ref; |
| 17425 | |
| 17426 | if (dwarf_strict && dwarf_version < 5) |
| 17427 | return NULL; |
| 17428 | gcc_assert (VAR_P (DEBUG_IMPLICIT_PTR_DECL (rtl)) |
| 17429 | || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL |
| 17430 | || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL); |
| 17431 | ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl)); |
| 17432 | ret = new_loc_descr (op: dwarf_OP (op: DW_OP_implicit_pointer), oprnd1: 0, oprnd2: offset); |
| 17433 | ret->dw_loc_oprnd2.val_class = dw_val_class_const; |
| 17434 | if (ref) |
| 17435 | { |
| 17436 | ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 17437 | ret->dw_loc_oprnd1.v.val_die_ref.die = ref; |
| 17438 | ret->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 17439 | } |
| 17440 | else |
| 17441 | { |
| 17442 | ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref; |
| 17443 | ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl); |
| 17444 | } |
| 17445 | return ret; |
| 17446 | } |
| 17447 | |
| 17448 | /* Output a proper Dwarf location descriptor for a variable or parameter |
| 17449 | which is either allocated in a register or in a memory location. For a |
| 17450 | register, we just generate an OP_REG and the register number. For a |
| 17451 | memory location we provide a Dwarf postfix expression describing how to |
| 17452 | generate the (dynamic) address of the object onto the address stack. |
| 17453 | |
| 17454 | MODE is mode of the decl if this loc_descriptor is going to be used in |
| 17455 | .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are |
| 17456 | allowed, VOIDmode otherwise. |
| 17457 | |
| 17458 | If we don't know how to describe it, return 0. */ |
| 17459 | |
| 17460 | static dw_loc_descr_ref |
| 17461 | loc_descriptor (rtx rtl, machine_mode mode, |
| 17462 | enum var_init_status initialized) |
| 17463 | { |
| 17464 | dw_loc_descr_ref loc_result = NULL; |
| 17465 | scalar_int_mode int_mode; |
| 17466 | |
| 17467 | switch (GET_CODE (rtl)) |
| 17468 | { |
| 17469 | case SUBREG: |
| 17470 | /* The case of a subreg may arise when we have a local (register) |
| 17471 | variable or a formal (register) parameter which doesn't quite fill |
| 17472 | up an entire register. For now, just assume that it is |
| 17473 | legitimate to make the Dwarf info refer to the whole register which |
| 17474 | contains the given subreg. */ |
| 17475 | if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl)) |
| 17476 | loc_result = loc_descriptor (SUBREG_REG (rtl), |
| 17477 | GET_MODE (SUBREG_REG (rtl)), initialized); |
| 17478 | else |
| 17479 | goto do_default; |
| 17480 | break; |
| 17481 | |
| 17482 | case REG: |
| 17483 | loc_result = reg_loc_descriptor (rtl, initialized); |
| 17484 | break; |
| 17485 | |
| 17486 | case MEM: |
| 17487 | loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode: get_address_mode (mem: rtl), |
| 17488 | GET_MODE (rtl), initialized); |
| 17489 | if (loc_result == NULL) |
| 17490 | loc_result = tls_mem_loc_descriptor (mem: rtl); |
| 17491 | if (loc_result == NULL) |
| 17492 | { |
| 17493 | rtx new_rtl = avoid_constant_pool_reference (rtl); |
| 17494 | if (new_rtl != rtl) |
| 17495 | loc_result = loc_descriptor (rtl: new_rtl, mode, initialized); |
| 17496 | } |
| 17497 | break; |
| 17498 | |
| 17499 | case CONCAT: |
| 17500 | loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1), |
| 17501 | initialized); |
| 17502 | break; |
| 17503 | |
| 17504 | case CONCATN: |
| 17505 | loc_result = concatn_loc_descriptor (concatn: rtl, initialized); |
| 17506 | break; |
| 17507 | |
| 17508 | case VAR_LOCATION: |
| 17509 | /* Single part. */ |
| 17510 | if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL) |
| 17511 | { |
| 17512 | rtx loc = PAT_VAR_LOCATION_LOC (rtl); |
| 17513 | if (GET_CODE (loc) == EXPR_LIST) |
| 17514 | loc = XEXP (loc, 0); |
| 17515 | loc_result = loc_descriptor (rtl: loc, mode, initialized); |
| 17516 | break; |
| 17517 | } |
| 17518 | |
| 17519 | rtl = XEXP (rtl, 1); |
| 17520 | /* FALLTHRU */ |
| 17521 | |
| 17522 | case PARALLEL: |
| 17523 | { |
| 17524 | rtvec par_elems = XVEC (rtl, 0); |
| 17525 | int num_elem = GET_NUM_ELEM (par_elems); |
| 17526 | machine_mode mode; |
| 17527 | int i, size; |
| 17528 | |
| 17529 | /* Create the first one, so we have something to add to. */ |
| 17530 | loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0), |
| 17531 | VOIDmode, initialized); |
| 17532 | if (loc_result == NULL) |
| 17533 | return NULL; |
| 17534 | mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0)); |
| 17535 | /* At present we only track constant-sized pieces. */ |
| 17536 | if (!GET_MODE_SIZE (mode).is_constant (const_value: &size)) |
| 17537 | return NULL; |
| 17538 | add_loc_descr_op_piece (list_head: &loc_result, size); |
| 17539 | for (i = 1; i < num_elem; i++) |
| 17540 | { |
| 17541 | dw_loc_descr_ref temp; |
| 17542 | |
| 17543 | temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0), |
| 17544 | VOIDmode, initialized); |
| 17545 | if (temp == NULL) |
| 17546 | return NULL; |
| 17547 | add_loc_descr (list_head: &loc_result, descr: temp); |
| 17548 | mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0)); |
| 17549 | /* At present we only track constant-sized pieces. */ |
| 17550 | if (!GET_MODE_SIZE (mode).is_constant (const_value: &size)) |
| 17551 | return NULL; |
| 17552 | add_loc_descr_op_piece (list_head: &loc_result, size); |
| 17553 | } |
| 17554 | } |
| 17555 | break; |
| 17556 | |
| 17557 | case CONST_INT: |
| 17558 | if (mode != VOIDmode && mode != BLKmode) |
| 17559 | { |
| 17560 | int_mode = as_a <scalar_int_mode> (m: mode); |
| 17561 | loc_result = address_of_int_loc_descriptor (size: GET_MODE_SIZE (mode: int_mode), |
| 17562 | INTVAL (rtl)); |
| 17563 | } |
| 17564 | break; |
| 17565 | |
| 17566 | case CONST_DOUBLE: |
| 17567 | if (mode == VOIDmode) |
| 17568 | mode = GET_MODE (rtl); |
| 17569 | |
| 17570 | if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict)) |
| 17571 | { |
| 17572 | gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl)); |
| 17573 | |
| 17574 | /* Note that a CONST_DOUBLE rtx could represent either an integer |
| 17575 | or a floating-point constant. A CONST_DOUBLE is used whenever |
| 17576 | the constant requires more than one word in order to be |
| 17577 | adequately represented. We output CONST_DOUBLEs as blocks. */ |
| 17578 | scalar_mode smode = as_a <scalar_mode> (m: mode); |
| 17579 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
| 17580 | oprnd1: GET_MODE_SIZE (mode: smode), oprnd2: 0); |
| 17581 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
| 17582 | if (!SCALAR_FLOAT_MODE_P (smode)) |
| 17583 | { |
| 17584 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double; |
| 17585 | loc_result->dw_loc_oprnd2.v.val_double |
| 17586 | = rtx_to_double_int (rtl); |
| 17587 | } |
| 17588 | else |
| 17589 | #endif |
| 17590 | { |
| 17591 | unsigned int length = GET_MODE_SIZE (mode: smode); |
| 17592 | unsigned char *array = ggc_vec_alloc<unsigned char> (c: length); |
| 17593 | unsigned int elt_size = insert_float (rtl, array); |
| 17594 | |
| 17595 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec; |
| 17596 | loc_result->dw_loc_oprnd2.v.val_vec.length = length / elt_size; |
| 17597 | loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size; |
| 17598 | loc_result->dw_loc_oprnd2.v.val_vec.array = array; |
| 17599 | } |
| 17600 | } |
| 17601 | break; |
| 17602 | |
| 17603 | case CONST_WIDE_INT: |
| 17604 | if (mode == VOIDmode) |
| 17605 | mode = GET_MODE (rtl); |
| 17606 | |
| 17607 | if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict)) |
| 17608 | { |
| 17609 | int_mode = as_a <scalar_int_mode> (m: mode); |
| 17610 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
| 17611 | oprnd1: GET_MODE_SIZE (mode: int_mode), oprnd2: 0); |
| 17612 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int; |
| 17613 | loc_result->dw_loc_oprnd2.v.val_wide |
| 17614 | = alloc_dw_wide_int (w: rtx_mode_t (rtl, int_mode)); |
| 17615 | } |
| 17616 | break; |
| 17617 | |
| 17618 | case CONST_VECTOR: |
| 17619 | if (mode == VOIDmode) |
| 17620 | mode = GET_MODE (rtl); |
| 17621 | |
| 17622 | if (mode != VOIDmode |
| 17623 | /* The combination of a length and byte elt_size doesn't extend |
| 17624 | naturally to boolean vectors, where several elements are packed |
| 17625 | into the same byte. */ |
| 17626 | && GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL |
| 17627 | && (dwarf_version >= 4 || !dwarf_strict)) |
| 17628 | { |
| 17629 | unsigned int length; |
| 17630 | if (!CONST_VECTOR_NUNITS (rtl).is_constant (const_value: &length)) |
| 17631 | return NULL; |
| 17632 | |
| 17633 | unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl)); |
| 17634 | unsigned char *array |
| 17635 | = ggc_vec_alloc<unsigned char> (c: length * elt_size); |
| 17636 | unsigned int i; |
| 17637 | unsigned char *p; |
| 17638 | machine_mode imode = GET_MODE_INNER (mode); |
| 17639 | |
| 17640 | gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl)); |
| 17641 | switch (GET_MODE_CLASS (mode)) |
| 17642 | { |
| 17643 | case MODE_VECTOR_INT: |
| 17644 | for (i = 0, p = array; i < length; i++, p += elt_size) |
| 17645 | { |
| 17646 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
| 17647 | insert_wide_int (rtx_mode_t (elt, imode), p, elt_size); |
| 17648 | } |
| 17649 | break; |
| 17650 | |
| 17651 | case MODE_VECTOR_FLOAT: |
| 17652 | for (i = 0, p = array; i < length; i++, p += elt_size) |
| 17653 | { |
| 17654 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
| 17655 | insert_float (elt, p); |
| 17656 | } |
| 17657 | break; |
| 17658 | |
| 17659 | default: |
| 17660 | gcc_unreachable (); |
| 17661 | } |
| 17662 | |
| 17663 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
| 17664 | oprnd1: length * elt_size, oprnd2: 0); |
| 17665 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec; |
| 17666 | loc_result->dw_loc_oprnd2.v.val_vec.length = length; |
| 17667 | loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size; |
| 17668 | loc_result->dw_loc_oprnd2.v.val_vec.array = array; |
| 17669 | } |
| 17670 | break; |
| 17671 | |
| 17672 | case CONST: |
| 17673 | if (mode == VOIDmode |
| 17674 | || CONST_SCALAR_INT_P (XEXP (rtl, 0)) |
| 17675 | || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0)) |
| 17676 | || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR) |
| 17677 | { |
| 17678 | loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized); |
| 17679 | break; |
| 17680 | } |
| 17681 | /* FALLTHROUGH */ |
| 17682 | case SYMBOL_REF: |
| 17683 | if (!const_ok_for_output (rtl)) |
| 17684 | break; |
| 17685 | /* FALLTHROUGH */ |
| 17686 | case LABEL_REF: |
| 17687 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 17688 | && GET_MODE_SIZE (mode: int_mode) == DWARF2_ADDR_SIZE |
| 17689 | && (dwarf_version >= 4 || !dwarf_strict)) |
| 17690 | { |
| 17691 | loc_result = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
| 17692 | add_loc_descr (list_head: &loc_result, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
| 17693 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
| 17694 | } |
| 17695 | break; |
| 17696 | |
| 17697 | case DEBUG_IMPLICIT_PTR: |
| 17698 | loc_result = implicit_ptr_descriptor (rtl, offset: 0); |
| 17699 | break; |
| 17700 | |
| 17701 | case PLUS: |
| 17702 | if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR |
| 17703 | && CONST_INT_P (XEXP (rtl, 1))) |
| 17704 | { |
| 17705 | loc_result |
| 17706 | = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1))); |
| 17707 | break; |
| 17708 | } |
| 17709 | /* FALLTHRU */ |
| 17710 | do_default: |
| 17711 | default: |
| 17712 | if ((is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 17713 | && GET_MODE (rtl) == int_mode |
| 17714 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
| 17715 | && dwarf_version >= 4) |
| 17716 | || (!dwarf_strict && mode != VOIDmode && mode != BLKmode)) |
| 17717 | { |
| 17718 | /* Value expression. */ |
| 17719 | loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized); |
| 17720 | if (loc_result) |
| 17721 | add_loc_descr (list_head: &loc_result, |
| 17722 | descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
| 17723 | } |
| 17724 | break; |
| 17725 | } |
| 17726 | |
| 17727 | return loc_result; |
| 17728 | } |
| 17729 | |
| 17730 | /* We need to figure out what section we should use as the base for the |
| 17731 | address ranges where a given location is valid. |
| 17732 | 1. If this particular DECL has a section associated with it, use that. |
| 17733 | 2. If this function has a section associated with it, use that. |
| 17734 | 3. Otherwise, use the text section. |
| 17735 | XXX: If you split a variable across multiple sections, we won't notice. */ |
| 17736 | |
| 17737 | static const char * |
| 17738 | secname_for_decl (const_tree decl) |
| 17739 | { |
| 17740 | const char *secname; |
| 17741 | |
| 17742 | if (VAR_OR_FUNCTION_DECL_P (decl) |
| 17743 | && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl)) |
| 17744 | && DECL_SECTION_NAME (decl)) |
| 17745 | secname = DECL_SECTION_NAME (decl); |
| 17746 | else if (current_function_decl && DECL_SECTION_NAME (current_function_decl)) |
| 17747 | { |
| 17748 | if (in_cold_section_p) |
| 17749 | { |
| 17750 | section *sec = current_function_section (); |
| 17751 | if (sec->common.flags & SECTION_NAMED) |
| 17752 | return sec->named.name; |
| 17753 | } |
| 17754 | secname = DECL_SECTION_NAME (current_function_decl); |
| 17755 | } |
| 17756 | else if (cfun && in_cold_section_p) |
| 17757 | secname = crtl->subsections.cold_section_label; |
| 17758 | else |
| 17759 | secname = text_section_label; |
| 17760 | |
| 17761 | return secname; |
| 17762 | } |
| 17763 | |
| 17764 | /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */ |
| 17765 | |
| 17766 | static bool |
| 17767 | decl_by_reference_p (tree decl) |
| 17768 | { |
| 17769 | return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL |
| 17770 | || VAR_P (decl)) |
| 17771 | && DECL_BY_REFERENCE (decl)); |
| 17772 | } |
| 17773 | |
| 17774 | /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor |
| 17775 | for VARLOC. */ |
| 17776 | |
| 17777 | static dw_loc_descr_ref |
| 17778 | dw_loc_list_1 (tree loc, rtx varloc, int want_address, |
| 17779 | enum var_init_status initialized) |
| 17780 | { |
| 17781 | int have_address = 0; |
| 17782 | dw_loc_descr_ref descr; |
| 17783 | machine_mode mode; |
| 17784 | |
| 17785 | if (want_address != 2) |
| 17786 | { |
| 17787 | gcc_assert (GET_CODE (varloc) == VAR_LOCATION); |
| 17788 | /* Single part. */ |
| 17789 | if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL) |
| 17790 | { |
| 17791 | varloc = PAT_VAR_LOCATION_LOC (varloc); |
| 17792 | if (GET_CODE (varloc) == EXPR_LIST) |
| 17793 | varloc = XEXP (varloc, 0); |
| 17794 | mode = GET_MODE (varloc); |
| 17795 | if (MEM_P (varloc)) |
| 17796 | { |
| 17797 | rtx addr = XEXP (varloc, 0); |
| 17798 | descr = mem_loc_descriptor (rtl: addr, mode: get_address_mode (mem: varloc), |
| 17799 | mem_mode: mode, initialized); |
| 17800 | if (descr) |
| 17801 | have_address = 1; |
| 17802 | else |
| 17803 | { |
| 17804 | rtx x = avoid_constant_pool_reference (varloc); |
| 17805 | if (x != varloc) |
| 17806 | descr = mem_loc_descriptor (rtl: x, mode, VOIDmode, |
| 17807 | initialized); |
| 17808 | } |
| 17809 | } |
| 17810 | else |
| 17811 | descr = mem_loc_descriptor (rtl: varloc, mode, VOIDmode, initialized); |
| 17812 | } |
| 17813 | else |
| 17814 | return 0; |
| 17815 | } |
| 17816 | else |
| 17817 | { |
| 17818 | if (GET_CODE (varloc) == VAR_LOCATION) |
| 17819 | mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc)); |
| 17820 | else |
| 17821 | mode = DECL_MODE (loc); |
| 17822 | descr = loc_descriptor (rtl: varloc, mode, initialized); |
| 17823 | have_address = 1; |
| 17824 | } |
| 17825 | |
| 17826 | if (!descr) |
| 17827 | return 0; |
| 17828 | |
| 17829 | if (want_address == 2 && !have_address |
| 17830 | && (dwarf_version >= 4 || !dwarf_strict)) |
| 17831 | { |
| 17832 | if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE) |
| 17833 | { |
| 17834 | expansion_failed (expr: loc, NULL_RTX, |
| 17835 | reason: "DWARF address size mismatch" ); |
| 17836 | return 0; |
| 17837 | } |
| 17838 | add_loc_descr (list_head: &descr, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
| 17839 | have_address = 1; |
| 17840 | } |
| 17841 | /* Show if we can't fill the request for an address. */ |
| 17842 | if (want_address && !have_address) |
| 17843 | { |
| 17844 | expansion_failed (expr: loc, NULL_RTX, |
| 17845 | reason: "Want address and only have value" ); |
| 17846 | return 0; |
| 17847 | } |
| 17848 | |
| 17849 | /* If we've got an address and don't want one, dereference. */ |
| 17850 | if (!want_address && have_address) |
| 17851 | { |
| 17852 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc)); |
| 17853 | enum dwarf_location_atom op; |
| 17854 | |
| 17855 | if (size > DWARF2_ADDR_SIZE || size == -1) |
| 17856 | { |
| 17857 | expansion_failed (expr: loc, NULL_RTX, |
| 17858 | reason: "DWARF address size mismatch" ); |
| 17859 | return 0; |
| 17860 | } |
| 17861 | else if (size == DWARF2_ADDR_SIZE) |
| 17862 | op = DW_OP_deref; |
| 17863 | else |
| 17864 | op = DW_OP_deref_size; |
| 17865 | |
| 17866 | add_loc_descr (list_head: &descr, descr: new_loc_descr (op, oprnd1: size, oprnd2: 0)); |
| 17867 | } |
| 17868 | |
| 17869 | return descr; |
| 17870 | } |
| 17871 | |
| 17872 | /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL |
| 17873 | if it is not possible. */ |
| 17874 | |
| 17875 | static dw_loc_descr_ref |
| 17876 | new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset) |
| 17877 | { |
| 17878 | if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0) |
| 17879 | return new_loc_descr (op: DW_OP_piece, oprnd1: bitsize / BITS_PER_UNIT, oprnd2: 0); |
| 17880 | else if (dwarf_version >= 3 || !dwarf_strict) |
| 17881 | return new_loc_descr (op: DW_OP_bit_piece, oprnd1: bitsize, oprnd2: offset); |
| 17882 | else |
| 17883 | return NULL; |
| 17884 | } |
| 17885 | |
| 17886 | /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor |
| 17887 | for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */ |
| 17888 | |
| 17889 | static dw_loc_descr_ref |
| 17890 | dw_sra_loc_expr (tree decl, rtx loc) |
| 17891 | { |
| 17892 | rtx p; |
| 17893 | unsigned HOST_WIDE_INT padsize = 0; |
| 17894 | dw_loc_descr_ref descr, *descr_tail; |
| 17895 | unsigned HOST_WIDE_INT decl_size; |
| 17896 | rtx varloc; |
| 17897 | enum var_init_status initialized; |
| 17898 | |
| 17899 | if (DECL_SIZE (decl) == NULL |
| 17900 | || !tree_fits_uhwi_p (DECL_SIZE (decl))) |
| 17901 | return NULL; |
| 17902 | |
| 17903 | decl_size = tree_to_uhwi (DECL_SIZE (decl)); |
| 17904 | descr = NULL; |
| 17905 | descr_tail = &descr; |
| 17906 | |
| 17907 | for (p = loc; p; p = XEXP (p, 1)) |
| 17908 | { |
| 17909 | unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (piece: p); |
| 17910 | rtx loc_note = *decl_piece_varloc_ptr (piece: p); |
| 17911 | dw_loc_descr_ref cur_descr; |
| 17912 | dw_loc_descr_ref *tail, last = NULL; |
| 17913 | unsigned HOST_WIDE_INT opsize = 0; |
| 17914 | |
| 17915 | if (loc_note == NULL_RTX |
| 17916 | || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX) |
| 17917 | { |
| 17918 | padsize += bitsize; |
| 17919 | continue; |
| 17920 | } |
| 17921 | initialized = NOTE_VAR_LOCATION_STATUS (loc_note); |
| 17922 | varloc = NOTE_VAR_LOCATION (loc_note); |
| 17923 | cur_descr = dw_loc_list_1 (loc: decl, varloc, want_address: 2, initialized); |
| 17924 | if (cur_descr == NULL) |
| 17925 | { |
| 17926 | padsize += bitsize; |
| 17927 | continue; |
| 17928 | } |
| 17929 | |
| 17930 | /* Check that cur_descr either doesn't use |
| 17931 | DW_OP_*piece operations, or their sum is equal |
| 17932 | to bitsize. Otherwise we can't embed it. */ |
| 17933 | for (tail = &cur_descr; *tail != NULL; |
| 17934 | tail = &(*tail)->dw_loc_next) |
| 17935 | if ((*tail)->dw_loc_opc == DW_OP_piece) |
| 17936 | { |
| 17937 | opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned |
| 17938 | * BITS_PER_UNIT; |
| 17939 | last = *tail; |
| 17940 | } |
| 17941 | else if ((*tail)->dw_loc_opc == DW_OP_bit_piece) |
| 17942 | { |
| 17943 | opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned; |
| 17944 | last = *tail; |
| 17945 | } |
| 17946 | |
| 17947 | if (last != NULL && opsize != bitsize) |
| 17948 | { |
| 17949 | padsize += bitsize; |
| 17950 | /* Discard the current piece of the descriptor and release any |
| 17951 | addr_table entries it uses. */ |
| 17952 | remove_loc_list_addr_table_entries (descr: cur_descr); |
| 17953 | continue; |
| 17954 | } |
| 17955 | |
| 17956 | /* If there is a hole, add DW_OP_*piece after empty DWARF |
| 17957 | expression, which means that those bits are optimized out. */ |
| 17958 | if (padsize) |
| 17959 | { |
| 17960 | if (padsize > decl_size) |
| 17961 | { |
| 17962 | remove_loc_list_addr_table_entries (descr: cur_descr); |
| 17963 | goto discard_descr; |
| 17964 | } |
| 17965 | decl_size -= padsize; |
| 17966 | *descr_tail = new_loc_descr_op_bit_piece (bitsize: padsize, offset: 0); |
| 17967 | if (*descr_tail == NULL) |
| 17968 | { |
| 17969 | remove_loc_list_addr_table_entries (descr: cur_descr); |
| 17970 | goto discard_descr; |
| 17971 | } |
| 17972 | descr_tail = &(*descr_tail)->dw_loc_next; |
| 17973 | padsize = 0; |
| 17974 | } |
| 17975 | *descr_tail = cur_descr; |
| 17976 | descr_tail = tail; |
| 17977 | if (bitsize > decl_size) |
| 17978 | goto discard_descr; |
| 17979 | decl_size -= bitsize; |
| 17980 | if (last == NULL) |
| 17981 | { |
| 17982 | HOST_WIDE_INT offset = 0; |
| 17983 | if (GET_CODE (varloc) == VAR_LOCATION |
| 17984 | && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL) |
| 17985 | { |
| 17986 | varloc = PAT_VAR_LOCATION_LOC (varloc); |
| 17987 | if (GET_CODE (varloc) == EXPR_LIST) |
| 17988 | varloc = XEXP (varloc, 0); |
| 17989 | } |
| 17990 | do |
| 17991 | { |
| 17992 | if (GET_CODE (varloc) == CONST |
| 17993 | || GET_CODE (varloc) == SIGN_EXTEND |
| 17994 | || GET_CODE (varloc) == ZERO_EXTEND) |
| 17995 | varloc = XEXP (varloc, 0); |
| 17996 | else if (GET_CODE (varloc) == SUBREG) |
| 17997 | varloc = SUBREG_REG (varloc); |
| 17998 | else |
| 17999 | break; |
| 18000 | } |
| 18001 | while (1); |
| 18002 | /* DW_OP_bit_size offset should be zero for register |
| 18003 | or implicit location descriptions and empty location |
| 18004 | descriptions, but for memory addresses needs big endian |
| 18005 | adjustment. */ |
| 18006 | if (MEM_P (varloc)) |
| 18007 | { |
| 18008 | unsigned HOST_WIDE_INT memsize; |
| 18009 | if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (const_value: &memsize)) |
| 18010 | goto discard_descr; |
| 18011 | memsize *= BITS_PER_UNIT; |
| 18012 | if (memsize != bitsize) |
| 18013 | { |
| 18014 | if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN |
| 18015 | && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD)) |
| 18016 | goto discard_descr; |
| 18017 | if (memsize < bitsize) |
| 18018 | goto discard_descr; |
| 18019 | if (BITS_BIG_ENDIAN) |
| 18020 | offset = memsize - bitsize; |
| 18021 | } |
| 18022 | } |
| 18023 | |
| 18024 | *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset); |
| 18025 | if (*descr_tail == NULL) |
| 18026 | goto discard_descr; |
| 18027 | descr_tail = &(*descr_tail)->dw_loc_next; |
| 18028 | } |
| 18029 | } |
| 18030 | |
| 18031 | /* If there were any non-empty expressions, add padding till the end of |
| 18032 | the decl. */ |
| 18033 | if (descr != NULL && decl_size != 0) |
| 18034 | { |
| 18035 | *descr_tail = new_loc_descr_op_bit_piece (bitsize: decl_size, offset: 0); |
| 18036 | if (*descr_tail == NULL) |
| 18037 | goto discard_descr; |
| 18038 | } |
| 18039 | return descr; |
| 18040 | |
| 18041 | discard_descr: |
| 18042 | /* Discard the descriptor and release any addr_table entries it uses. */ |
| 18043 | remove_loc_list_addr_table_entries (descr); |
| 18044 | return NULL; |
| 18045 | } |
| 18046 | |
| 18047 | /* Return the dwarf representation of the location list LOC_LIST of |
| 18048 | DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree |
| 18049 | function. */ |
| 18050 | |
| 18051 | static dw_loc_list_ref |
| 18052 | dw_loc_list (var_loc_list *loc_list, tree decl, int want_address) |
| 18053 | { |
| 18054 | const char *endname, *secname; |
| 18055 | var_loc_view endview; |
| 18056 | rtx varloc; |
| 18057 | enum var_init_status initialized; |
| 18058 | struct var_loc_node *node; |
| 18059 | dw_loc_descr_ref descr; |
| 18060 | char label_id[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 18061 | dw_loc_list_ref list = NULL; |
| 18062 | dw_loc_list_ref *listp = &list; |
| 18063 | |
| 18064 | /* Now that we know what section we are using for a base, |
| 18065 | actually construct the list of locations. |
| 18066 | The first location information is what is passed to the |
| 18067 | function that creates the location list, and the remaining |
| 18068 | locations just get added on to that list. |
| 18069 | Note that we only know the start address for a location |
| 18070 | (IE location changes), so to build the range, we use |
| 18071 | the range [current location start, next location start]. |
| 18072 | This means we have to special case the last node, and generate |
| 18073 | a range of [last location start, end of function label]. */ |
| 18074 | |
| 18075 | if (cfun && crtl->has_bb_partition) |
| 18076 | { |
| 18077 | bool save_in_cold_section_p = in_cold_section_p; |
| 18078 | in_cold_section_p = first_function_block_is_cold; |
| 18079 | if (loc_list->last_before_switch == NULL) |
| 18080 | in_cold_section_p = !in_cold_section_p; |
| 18081 | secname = secname_for_decl (decl); |
| 18082 | in_cold_section_p = save_in_cold_section_p; |
| 18083 | } |
| 18084 | else |
| 18085 | secname = secname_for_decl (decl); |
| 18086 | |
| 18087 | for (node = loc_list->first; node; node = node->next) |
| 18088 | { |
| 18089 | bool range_across_switch = false; |
| 18090 | if (GET_CODE (node->loc) == EXPR_LIST |
| 18091 | || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX) |
| 18092 | { |
| 18093 | if (GET_CODE (node->loc) == EXPR_LIST) |
| 18094 | { |
| 18095 | descr = NULL; |
| 18096 | /* This requires DW_OP_{,bit_}piece, which is not usable |
| 18097 | inside DWARF expressions. */ |
| 18098 | if (want_address == 2) |
| 18099 | descr = dw_sra_loc_expr (decl, loc: node->loc); |
| 18100 | } |
| 18101 | else |
| 18102 | { |
| 18103 | initialized = NOTE_VAR_LOCATION_STATUS (node->loc); |
| 18104 | varloc = NOTE_VAR_LOCATION (node->loc); |
| 18105 | descr = dw_loc_list_1 (loc: decl, varloc, want_address, initialized); |
| 18106 | } |
| 18107 | if (descr) |
| 18108 | { |
| 18109 | /* If section switch happens in between node->label |
| 18110 | and node->next->label (or end of function) and |
| 18111 | we can't emit it as a single entry list, |
| 18112 | emit two ranges, first one ending at the end |
| 18113 | of first partition and second one starting at the |
| 18114 | beginning of second partition. */ |
| 18115 | if (node == loc_list->last_before_switch |
| 18116 | && (node != loc_list->first || loc_list->first->next |
| 18117 | /* If we are to emit a view number, we will emit |
| 18118 | a loclist rather than a single location |
| 18119 | expression for the entire function (see |
| 18120 | loc_list_has_views), so we have to split the |
| 18121 | range that straddles across partitions. */ |
| 18122 | || !ZERO_VIEW_P (node->view)) |
| 18123 | && current_function_decl) |
| 18124 | { |
| 18125 | endname = cfun->fde->dw_fde_end; |
| 18126 | endview = 0; |
| 18127 | range_across_switch = true; |
| 18128 | } |
| 18129 | /* The variable has a location between NODE->LABEL and |
| 18130 | NODE->NEXT->LABEL. */ |
| 18131 | else if (node->next) |
| 18132 | endname = node->next->label, endview = node->next->view; |
| 18133 | /* If the variable has a location at the last label |
| 18134 | it keeps its location until the end of function. */ |
| 18135 | else if (!current_function_decl) |
| 18136 | endname = text_end_label, endview = 0; |
| 18137 | else |
| 18138 | { |
| 18139 | ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL, |
| 18140 | current_function_funcdef_no); |
| 18141 | endname = ggc_strdup (label_id); |
| 18142 | endview = 0; |
| 18143 | } |
| 18144 | |
| 18145 | *listp = new_loc_list (expr: descr, begin: node->label, vbegin: node->view, |
| 18146 | end: endname, vend: endview, section: secname); |
| 18147 | if (TREE_CODE (decl) == PARM_DECL |
| 18148 | && node == loc_list->first |
| 18149 | && NOTE_P (node->loc) |
| 18150 | && strcmp (s1: node->label, s2: endname) == 0) |
| 18151 | (*listp)->force = true; |
| 18152 | listp = &(*listp)->dw_loc_next; |
| 18153 | } |
| 18154 | } |
| 18155 | |
| 18156 | if (cfun |
| 18157 | && crtl->has_bb_partition |
| 18158 | && node == loc_list->last_before_switch) |
| 18159 | { |
| 18160 | bool save_in_cold_section_p = in_cold_section_p; |
| 18161 | in_cold_section_p = !first_function_block_is_cold; |
| 18162 | secname = secname_for_decl (decl); |
| 18163 | in_cold_section_p = save_in_cold_section_p; |
| 18164 | } |
| 18165 | |
| 18166 | if (range_across_switch) |
| 18167 | { |
| 18168 | if (GET_CODE (node->loc) == EXPR_LIST) |
| 18169 | descr = dw_sra_loc_expr (decl, loc: node->loc); |
| 18170 | else |
| 18171 | { |
| 18172 | initialized = NOTE_VAR_LOCATION_STATUS (node->loc); |
| 18173 | varloc = NOTE_VAR_LOCATION (node->loc); |
| 18174 | descr = dw_loc_list_1 (loc: decl, varloc, want_address, |
| 18175 | initialized); |
| 18176 | } |
| 18177 | gcc_assert (descr); |
| 18178 | /* The variable has a location between NODE->LABEL and |
| 18179 | NODE->NEXT->LABEL. */ |
| 18180 | if (node->next) |
| 18181 | endname = node->next->label, endview = node->next->view; |
| 18182 | else |
| 18183 | endname = cfun->fde->dw_fde_second_end, endview = 0; |
| 18184 | *listp = new_loc_list (expr: descr, cfun->fde->dw_fde_second_begin, vbegin: 0, |
| 18185 | end: endname, vend: endview, section: secname); |
| 18186 | listp = &(*listp)->dw_loc_next; |
| 18187 | } |
| 18188 | } |
| 18189 | |
| 18190 | /* Try to avoid the overhead of a location list emitting a location |
| 18191 | expression instead, but only if we didn't have more than one |
| 18192 | location entry in the first place. If some entries were not |
| 18193 | representable, we don't want to pretend a single entry that was |
| 18194 | applies to the entire scope in which the variable is |
| 18195 | available. */ |
| 18196 | if (list && loc_list->first->next) |
| 18197 | gen_llsym (list); |
| 18198 | else |
| 18199 | maybe_gen_llsym (list); |
| 18200 | |
| 18201 | return list; |
| 18202 | } |
| 18203 | |
| 18204 | /* Return true if the loc_list has only single element and thus |
| 18205 | can be represented as location description. */ |
| 18206 | |
| 18207 | static bool |
| 18208 | single_element_loc_list_p (dw_loc_list_ref list) |
| 18209 | { |
| 18210 | gcc_assert (!list->dw_loc_next || list->ll_symbol); |
| 18211 | return !list->ll_symbol; |
| 18212 | } |
| 18213 | |
| 18214 | /* Duplicate a single element of location list. */ |
| 18215 | |
| 18216 | static inline dw_loc_descr_ref |
| 18217 | copy_loc_descr (dw_loc_descr_ref ref) |
| 18218 | { |
| 18219 | dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> (); |
| 18220 | memcpy (dest: copy, src: ref, n: sizeof (dw_loc_descr_node)); |
| 18221 | return copy; |
| 18222 | } |
| 18223 | |
| 18224 | /* To each location in list LIST append loc descr REF. */ |
| 18225 | |
| 18226 | static void |
| 18227 | add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref) |
| 18228 | { |
| 18229 | dw_loc_descr_ref copy; |
| 18230 | add_loc_descr (list_head: &list->expr, descr: ref); |
| 18231 | list = list->dw_loc_next; |
| 18232 | while (list) |
| 18233 | { |
| 18234 | copy = copy_loc_descr (ref); |
| 18235 | add_loc_descr (list_head: &list->expr, descr: copy); |
| 18236 | while (copy->dw_loc_next) |
| 18237 | copy = copy->dw_loc_next = copy_loc_descr (ref: copy->dw_loc_next); |
| 18238 | list = list->dw_loc_next; |
| 18239 | } |
| 18240 | } |
| 18241 | |
| 18242 | /* To each location in list LIST prepend loc descr REF. */ |
| 18243 | |
| 18244 | static void |
| 18245 | prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref) |
| 18246 | { |
| 18247 | dw_loc_descr_ref copy; |
| 18248 | dw_loc_descr_ref ref_end = list->expr; |
| 18249 | add_loc_descr (list_head: &ref, descr: list->expr); |
| 18250 | list->expr = ref; |
| 18251 | list = list->dw_loc_next; |
| 18252 | while (list) |
| 18253 | { |
| 18254 | dw_loc_descr_ref end = list->expr; |
| 18255 | list->expr = copy = copy_loc_descr (ref); |
| 18256 | while (copy->dw_loc_next != ref_end) |
| 18257 | copy = copy->dw_loc_next = copy_loc_descr (ref: copy->dw_loc_next); |
| 18258 | copy->dw_loc_next = end; |
| 18259 | list = list->dw_loc_next; |
| 18260 | } |
| 18261 | } |
| 18262 | |
| 18263 | /* Given two lists RET and LIST |
| 18264 | produce location list that is result of adding expression in LIST |
| 18265 | to expression in RET on each position in program. |
| 18266 | Might be destructive on both RET and LIST. |
| 18267 | |
| 18268 | TODO: We handle only simple cases of RET or LIST having at most one |
| 18269 | element. General case would involve sorting the lists in program order |
| 18270 | and merging them that will need some additional work. |
| 18271 | Adding that will improve quality of debug info especially for SRA-ed |
| 18272 | structures. */ |
| 18273 | |
| 18274 | static void |
| 18275 | add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list) |
| 18276 | { |
| 18277 | if (!list) |
| 18278 | return; |
| 18279 | if (!*ret) |
| 18280 | { |
| 18281 | *ret = list; |
| 18282 | return; |
| 18283 | } |
| 18284 | if (!list->dw_loc_next) |
| 18285 | { |
| 18286 | add_loc_descr_to_each (list: *ret, ref: list->expr); |
| 18287 | return; |
| 18288 | } |
| 18289 | if (!(*ret)->dw_loc_next) |
| 18290 | { |
| 18291 | prepend_loc_descr_to_each (list, ref: (*ret)->expr); |
| 18292 | *ret = list; |
| 18293 | return; |
| 18294 | } |
| 18295 | expansion_failed (NULL_TREE, NULL_RTX, |
| 18296 | reason: "Don't know how to merge two non-trivial" |
| 18297 | " location lists.\n" ); |
| 18298 | *ret = NULL; |
| 18299 | return; |
| 18300 | } |
| 18301 | |
| 18302 | /* LOC is constant expression. Try a luck, look it up in constant |
| 18303 | pool and return its loc_descr of its address. */ |
| 18304 | |
| 18305 | static dw_loc_descr_ref |
| 18306 | cst_pool_loc_descr (tree loc) |
| 18307 | { |
| 18308 | /* Get an RTL for this, if something has been emitted. */ |
| 18309 | rtx rtl = lookup_constant_def (loc); |
| 18310 | |
| 18311 | if (!rtl || !MEM_P (rtl)) |
| 18312 | { |
| 18313 | gcc_assert (!rtl); |
| 18314 | return 0; |
| 18315 | } |
| 18316 | gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF); |
| 18317 | |
| 18318 | /* TODO: We might get more coverage if we was actually delaying expansion |
| 18319 | of all expressions till end of compilation when constant pools are fully |
| 18320 | populated. */ |
| 18321 | if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0)))) |
| 18322 | { |
| 18323 | expansion_failed (expr: loc, NULL_RTX, |
| 18324 | reason: "CST value in contant pool but not marked." ); |
| 18325 | return 0; |
| 18326 | } |
| 18327 | return mem_loc_descriptor (XEXP (rtl, 0), mode: get_address_mode (mem: rtl), |
| 18328 | GET_MODE (rtl), initialized: VAR_INIT_STATUS_INITIALIZED); |
| 18329 | } |
| 18330 | |
| 18331 | /* Return dw_loc_list representing address of addr_expr LOC |
| 18332 | by looking for inner INDIRECT_REF expression and turning |
| 18333 | it into simple arithmetics. |
| 18334 | |
| 18335 | See loc_list_from_tree for the meaning of CONTEXT. */ |
| 18336 | |
| 18337 | static dw_loc_list_ref |
| 18338 | loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev, |
| 18339 | loc_descr_context *context) |
| 18340 | { |
| 18341 | tree obj, offset; |
| 18342 | poly_int64 bitsize, bitpos, bytepos; |
| 18343 | machine_mode mode; |
| 18344 | int unsignedp, reversep, volatilep = 0; |
| 18345 | dw_loc_list_ref list_ret = NULL, list_ret1 = NULL; |
| 18346 | |
| 18347 | obj = get_inner_reference (TREE_OPERAND (loc, 0), |
| 18348 | &bitsize, &bitpos, &offset, &mode, |
| 18349 | &unsignedp, &reversep, &volatilep); |
| 18350 | STRIP_NOPS (obj); |
| 18351 | if (!multiple_p (a: bitpos, BITS_PER_UNIT, multiple: &bytepos)) |
| 18352 | { |
| 18353 | expansion_failed (expr: loc, NULL_RTX, reason: "bitfield access" ); |
| 18354 | return 0; |
| 18355 | } |
| 18356 | if (!INDIRECT_REF_P (obj)) |
| 18357 | { |
| 18358 | expansion_failed (expr: obj, |
| 18359 | NULL_RTX, reason: "no indirect ref in inner refrence" ); |
| 18360 | return 0; |
| 18361 | } |
| 18362 | if (!offset && known_eq (bitpos, 0)) |
| 18363 | list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1, |
| 18364 | context); |
| 18365 | else if (toplev |
| 18366 | && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE |
| 18367 | && (dwarf_version >= 4 || !dwarf_strict)) |
| 18368 | { |
| 18369 | list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context); |
| 18370 | if (!list_ret) |
| 18371 | return 0; |
| 18372 | if (offset) |
| 18373 | { |
| 18374 | /* Variable offset. */ |
| 18375 | list_ret1 = loc_list_from_tree (offset, 0, context); |
| 18376 | if (list_ret1 == 0) |
| 18377 | return 0; |
| 18378 | add_loc_list (ret: &list_ret, list: list_ret1); |
| 18379 | if (!list_ret) |
| 18380 | return 0; |
| 18381 | add_loc_descr_to_each (list: list_ret, |
| 18382 | ref: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
| 18383 | } |
| 18384 | HOST_WIDE_INT value; |
| 18385 | if (bytepos.is_constant (const_value: &value) && value > 0) |
| 18386 | add_loc_descr_to_each (list: list_ret, |
| 18387 | ref: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: value, oprnd2: 0)); |
| 18388 | else if (maybe_ne (a: bytepos, b: 0)) |
| 18389 | loc_list_plus_const (list_head: list_ret, offset: bytepos); |
| 18390 | add_loc_descr_to_each (list: list_ret, |
| 18391 | ref: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
| 18392 | } |
| 18393 | return list_ret; |
| 18394 | } |
| 18395 | |
| 18396 | /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case |
| 18397 | all operations from LOC are nops, move to the last one. Insert in NOPS all |
| 18398 | operations that are skipped. */ |
| 18399 | |
| 18400 | static void |
| 18401 | loc_descr_to_next_no_nop (dw_loc_descr_ref &loc, |
| 18402 | hash_set<dw_loc_descr_ref> &nops) |
| 18403 | { |
| 18404 | while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop) |
| 18405 | { |
| 18406 | nops.add (k: loc); |
| 18407 | loc = loc->dw_loc_next; |
| 18408 | } |
| 18409 | } |
| 18410 | |
| 18411 | /* Helper for loc_descr_without_nops: free the location description operation |
| 18412 | P. */ |
| 18413 | |
| 18414 | bool |
| 18415 | free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED) |
| 18416 | { |
| 18417 | ggc_free (loc); |
| 18418 | return true; |
| 18419 | } |
| 18420 | |
| 18421 | /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that |
| 18422 | finishes LOC. */ |
| 18423 | |
| 18424 | static void |
| 18425 | loc_descr_without_nops (dw_loc_descr_ref &loc) |
| 18426 | { |
| 18427 | if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL) |
| 18428 | return; |
| 18429 | |
| 18430 | /* Set of all DW_OP_nop operations we remove. */ |
| 18431 | hash_set<dw_loc_descr_ref> nops; |
| 18432 | |
| 18433 | /* First, strip all prefix NOP operations in order to keep the head of the |
| 18434 | operations list. */ |
| 18435 | loc_descr_to_next_no_nop (loc, nops); |
| 18436 | |
| 18437 | for (dw_loc_descr_ref cur = loc; cur != NULL;) |
| 18438 | { |
| 18439 | /* For control flow operations: strip "prefix" nops in destination |
| 18440 | labels. */ |
| 18441 | if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc) |
| 18442 | loc_descr_to_next_no_nop (loc&: cur->dw_loc_oprnd1.v.val_loc, nops); |
| 18443 | if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc) |
| 18444 | loc_descr_to_next_no_nop (loc&: cur->dw_loc_oprnd2.v.val_loc, nops); |
| 18445 | |
| 18446 | /* Do the same for the operations that follow, then move to the next |
| 18447 | iteration. */ |
| 18448 | if (cur->dw_loc_next != NULL) |
| 18449 | loc_descr_to_next_no_nop (loc&: cur->dw_loc_next, nops); |
| 18450 | cur = cur->dw_loc_next; |
| 18451 | } |
| 18452 | |
| 18453 | nops.traverse<void *, free_loc_descr> (NULL); |
| 18454 | } |
| 18455 | |
| 18456 | |
| 18457 | struct dwarf_procedure_info; |
| 18458 | |
| 18459 | /* Helper structure for location descriptions generation. */ |
| 18460 | struct loc_descr_context |
| 18461 | { |
| 18462 | /* The type that is implicitly referenced by DW_OP_push_object_address, or |
| 18463 | NULL_TREE if DW_OP_push_object_address in invalid for this location |
| 18464 | description. This is used when processing PLACEHOLDER_EXPR nodes. */ |
| 18465 | tree context_type; |
| 18466 | /* The ..._DECL node that should be translated as a |
| 18467 | DW_OP_push_object_address operation. */ |
| 18468 | tree base_decl; |
| 18469 | /* Information about the DWARF procedure we are currently generating. NULL if |
| 18470 | we are not generating a DWARF procedure. */ |
| 18471 | struct dwarf_procedure_info *dpi; |
| 18472 | /* True if integral PLACEHOLDER_EXPR stands for the first argument passed |
| 18473 | by consumer. Used for DW_TAG_generic_subrange attributes. */ |
| 18474 | bool placeholder_arg; |
| 18475 | /* True if PLACEHOLDER_EXPR has been seen. */ |
| 18476 | bool placeholder_seen; |
| 18477 | /* True if strict preservation of signedness has been requested. */ |
| 18478 | bool strict_signedness; |
| 18479 | }; |
| 18480 | |
| 18481 | /* DWARF procedures generation |
| 18482 | |
| 18483 | DWARF expressions (aka. location descriptions) are used to encode variable |
| 18484 | things such as sizes or offsets. Such computations can have redundant parts |
| 18485 | that can be factorized in order to reduce the size of the output debug |
| 18486 | information. This is the whole point of DWARF procedures. |
| 18487 | |
| 18488 | Thanks to stor-layout.cc, size and offset expressions in GENERIC trees are |
| 18489 | already factorized into functions ("size functions") in order to handle very |
| 18490 | big and complex types. Such functions are quite simple: they have integral |
| 18491 | arguments, they return an integral result and their body contains only a |
| 18492 | return statement with arithmetic expressions. This is the only kind of |
| 18493 | function we are interested in translating into DWARF procedures, here. |
| 18494 | |
| 18495 | DWARF expressions and DWARF procedure are executed using a stack, so we have |
| 18496 | to define some calling convention for them to interact. Let's say that: |
| 18497 | |
| 18498 | - Before calling a DWARF procedure, DWARF expressions must push on the stack |
| 18499 | all arguments in reverse order (right-to-left) so that when the DWARF |
| 18500 | procedure execution starts, the first argument is the top of the stack. |
| 18501 | |
| 18502 | - Then, when returning, the DWARF procedure must have consumed all arguments |
| 18503 | on the stack, must have pushed the result and touched nothing else. |
| 18504 | |
| 18505 | - Each integral argument and the result are integral types can be hold in a |
| 18506 | single stack slot. |
| 18507 | |
| 18508 | - We call "frame offset" the number of stack slots that are "under DWARF |
| 18509 | procedure control": it includes the arguments slots, the temporaries and |
| 18510 | the result slot. Thus, it is equal to the number of arguments when the |
| 18511 | procedure execution starts and must be equal to one (the result) when it |
| 18512 | returns. */ |
| 18513 | |
| 18514 | /* Helper structure used when generating operations for a DWARF procedure. */ |
| 18515 | struct dwarf_procedure_info |
| 18516 | { |
| 18517 | /* The FUNCTION_DECL node corresponding to the DWARF procedure that is |
| 18518 | currently translated. */ |
| 18519 | tree fndecl; |
| 18520 | /* The number of arguments FNDECL takes. */ |
| 18521 | unsigned args_count; |
| 18522 | }; |
| 18523 | |
| 18524 | /* Return a pointer to a newly created DIE node for a DWARF procedure. Add |
| 18525 | LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE, |
| 18526 | equate it to this DIE. */ |
| 18527 | |
| 18528 | static dw_die_ref |
| 18529 | new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl, |
| 18530 | dw_die_ref parent_die) |
| 18531 | { |
| 18532 | dw_die_ref dwarf_proc_die; |
| 18533 | |
| 18534 | if ((dwarf_version < 3 && dwarf_strict) |
| 18535 | || location == NULL) |
| 18536 | return NULL; |
| 18537 | |
| 18538 | dwarf_proc_die = new_die (tag_value: DW_TAG_dwarf_procedure, parent_die, t: fndecl); |
| 18539 | if (fndecl) |
| 18540 | equate_decl_number_to_die (decl: fndecl, decl_die: dwarf_proc_die); |
| 18541 | add_AT_loc (die: dwarf_proc_die, attr_kind: DW_AT_location, loc: location); |
| 18542 | return dwarf_proc_die; |
| 18543 | } |
| 18544 | |
| 18545 | /* Return whether TYPE is a supported type as a DWARF procedure argument |
| 18546 | type or return type (we handle only scalar types and pointer types that |
| 18547 | aren't wider than the DWARF expression evaluation stack). */ |
| 18548 | |
| 18549 | static bool |
| 18550 | is_handled_procedure_type (tree type) |
| 18551 | { |
| 18552 | return ((INTEGRAL_TYPE_P (type) |
| 18553 | || TREE_CODE (type) == OFFSET_TYPE |
| 18554 | || TREE_CODE (type) == POINTER_TYPE) |
| 18555 | && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE); |
| 18556 | } |
| 18557 | |
| 18558 | /* Helper for resolve_args_picking: do the same but stop when coming across |
| 18559 | visited nodes. For each node we visit, register in FRAME_OFFSETS the frame |
| 18560 | offset *before* evaluating the corresponding operation. */ |
| 18561 | |
| 18562 | static bool |
| 18563 | resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset, |
| 18564 | struct dwarf_procedure_info *dpi, |
| 18565 | hash_map<dw_loc_descr_ref, unsigned> &frame_offsets) |
| 18566 | { |
| 18567 | /* The "frame_offset" identifier is already used to name a macro... */ |
| 18568 | unsigned frame_offset_ = initial_frame_offset; |
| 18569 | dw_loc_descr_ref l; |
| 18570 | |
| 18571 | for (l = loc; l != NULL;) |
| 18572 | { |
| 18573 | bool existed; |
| 18574 | unsigned &l_frame_offset = frame_offsets.get_or_insert (k: l, existed: &existed); |
| 18575 | |
| 18576 | /* If we already met this node, there is nothing to compute anymore. */ |
| 18577 | if (existed) |
| 18578 | { |
| 18579 | /* Make sure that the stack size is consistent wherever the execution |
| 18580 | flow comes from. */ |
| 18581 | gcc_assert ((unsigned) l_frame_offset == frame_offset_); |
| 18582 | break; |
| 18583 | } |
| 18584 | l_frame_offset = frame_offset_; |
| 18585 | |
| 18586 | /* If needed, relocate the picking offset with respect to the frame |
| 18587 | offset. */ |
| 18588 | if (l->dw_loc_frame_offset_rel) |
| 18589 | { |
| 18590 | unsigned HOST_WIDE_INT off; |
| 18591 | switch (l->dw_loc_opc) |
| 18592 | { |
| 18593 | case DW_OP_pick: |
| 18594 | off = l->dw_loc_oprnd1.v.val_unsigned; |
| 18595 | break; |
| 18596 | case DW_OP_dup: |
| 18597 | off = 0; |
| 18598 | break; |
| 18599 | case DW_OP_over: |
| 18600 | off = 1; |
| 18601 | break; |
| 18602 | default: |
| 18603 | gcc_unreachable (); |
| 18604 | } |
| 18605 | /* frame_offset_ is the size of the current stack frame, including |
| 18606 | incoming arguments. Besides, the arguments are pushed |
| 18607 | right-to-left. Thus, in order to access the Nth argument from |
| 18608 | this operation node, the picking has to skip temporaries *plus* |
| 18609 | one stack slot per argument (0 for the first one, 1 for the second |
| 18610 | one, etc.). |
| 18611 | |
| 18612 | The targetted argument number (N) is already set as the operand, |
| 18613 | and the number of temporaries can be computed with: |
| 18614 | frame_offsets_ - dpi->args_count */ |
| 18615 | off += frame_offset_ - dpi->args_count; |
| 18616 | |
| 18617 | /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */ |
| 18618 | if (off > 255) |
| 18619 | return false; |
| 18620 | |
| 18621 | if (off == 0) |
| 18622 | { |
| 18623 | l->dw_loc_opc = DW_OP_dup; |
| 18624 | l->dw_loc_oprnd1.v.val_unsigned = 0; |
| 18625 | } |
| 18626 | else if (off == 1) |
| 18627 | { |
| 18628 | l->dw_loc_opc = DW_OP_over; |
| 18629 | l->dw_loc_oprnd1.v.val_unsigned = 0; |
| 18630 | } |
| 18631 | else |
| 18632 | { |
| 18633 | l->dw_loc_opc = DW_OP_pick; |
| 18634 | l->dw_loc_oprnd1.v.val_unsigned = off; |
| 18635 | } |
| 18636 | } |
| 18637 | |
| 18638 | /* Update frame_offset according to the effect the current operation has |
| 18639 | on the stack. */ |
| 18640 | switch (l->dw_loc_opc) |
| 18641 | { |
| 18642 | case DW_OP_deref: |
| 18643 | case DW_OP_swap: |
| 18644 | case DW_OP_rot: |
| 18645 | case DW_OP_abs: |
| 18646 | case DW_OP_neg: |
| 18647 | case DW_OP_not: |
| 18648 | case DW_OP_plus_uconst: |
| 18649 | case DW_OP_skip: |
| 18650 | case DW_OP_reg0: |
| 18651 | case DW_OP_reg1: |
| 18652 | case DW_OP_reg2: |
| 18653 | case DW_OP_reg3: |
| 18654 | case DW_OP_reg4: |
| 18655 | case DW_OP_reg5: |
| 18656 | case DW_OP_reg6: |
| 18657 | case DW_OP_reg7: |
| 18658 | case DW_OP_reg8: |
| 18659 | case DW_OP_reg9: |
| 18660 | case DW_OP_reg10: |
| 18661 | case DW_OP_reg11: |
| 18662 | case DW_OP_reg12: |
| 18663 | case DW_OP_reg13: |
| 18664 | case DW_OP_reg14: |
| 18665 | case DW_OP_reg15: |
| 18666 | case DW_OP_reg16: |
| 18667 | case DW_OP_reg17: |
| 18668 | case DW_OP_reg18: |
| 18669 | case DW_OP_reg19: |
| 18670 | case DW_OP_reg20: |
| 18671 | case DW_OP_reg21: |
| 18672 | case DW_OP_reg22: |
| 18673 | case DW_OP_reg23: |
| 18674 | case DW_OP_reg24: |
| 18675 | case DW_OP_reg25: |
| 18676 | case DW_OP_reg26: |
| 18677 | case DW_OP_reg27: |
| 18678 | case DW_OP_reg28: |
| 18679 | case DW_OP_reg29: |
| 18680 | case DW_OP_reg30: |
| 18681 | case DW_OP_reg31: |
| 18682 | case DW_OP_bregx: |
| 18683 | case DW_OP_piece: |
| 18684 | case DW_OP_deref_size: |
| 18685 | case DW_OP_nop: |
| 18686 | case DW_OP_bit_piece: |
| 18687 | case DW_OP_implicit_value: |
| 18688 | case DW_OP_stack_value: |
| 18689 | case DW_OP_deref_type: |
| 18690 | case DW_OP_convert: |
| 18691 | case DW_OP_reinterpret: |
| 18692 | case DW_OP_GNU_deref_type: |
| 18693 | case DW_OP_GNU_convert: |
| 18694 | case DW_OP_GNU_reinterpret: |
| 18695 | break; |
| 18696 | |
| 18697 | case DW_OP_addr: |
| 18698 | case DW_OP_const1u: |
| 18699 | case DW_OP_const1s: |
| 18700 | case DW_OP_const2u: |
| 18701 | case DW_OP_const2s: |
| 18702 | case DW_OP_const4u: |
| 18703 | case DW_OP_const4s: |
| 18704 | case DW_OP_const8u: |
| 18705 | case DW_OP_const8s: |
| 18706 | case DW_OP_constu: |
| 18707 | case DW_OP_consts: |
| 18708 | case DW_OP_dup: |
| 18709 | case DW_OP_over: |
| 18710 | case DW_OP_pick: |
| 18711 | case DW_OP_lit0: |
| 18712 | case DW_OP_lit1: |
| 18713 | case DW_OP_lit2: |
| 18714 | case DW_OP_lit3: |
| 18715 | case DW_OP_lit4: |
| 18716 | case DW_OP_lit5: |
| 18717 | case DW_OP_lit6: |
| 18718 | case DW_OP_lit7: |
| 18719 | case DW_OP_lit8: |
| 18720 | case DW_OP_lit9: |
| 18721 | case DW_OP_lit10: |
| 18722 | case DW_OP_lit11: |
| 18723 | case DW_OP_lit12: |
| 18724 | case DW_OP_lit13: |
| 18725 | case DW_OP_lit14: |
| 18726 | case DW_OP_lit15: |
| 18727 | case DW_OP_lit16: |
| 18728 | case DW_OP_lit17: |
| 18729 | case DW_OP_lit18: |
| 18730 | case DW_OP_lit19: |
| 18731 | case DW_OP_lit20: |
| 18732 | case DW_OP_lit21: |
| 18733 | case DW_OP_lit22: |
| 18734 | case DW_OP_lit23: |
| 18735 | case DW_OP_lit24: |
| 18736 | case DW_OP_lit25: |
| 18737 | case DW_OP_lit26: |
| 18738 | case DW_OP_lit27: |
| 18739 | case DW_OP_lit28: |
| 18740 | case DW_OP_lit29: |
| 18741 | case DW_OP_lit30: |
| 18742 | case DW_OP_lit31: |
| 18743 | case DW_OP_breg0: |
| 18744 | case DW_OP_breg1: |
| 18745 | case DW_OP_breg2: |
| 18746 | case DW_OP_breg3: |
| 18747 | case DW_OP_breg4: |
| 18748 | case DW_OP_breg5: |
| 18749 | case DW_OP_breg6: |
| 18750 | case DW_OP_breg7: |
| 18751 | case DW_OP_breg8: |
| 18752 | case DW_OP_breg9: |
| 18753 | case DW_OP_breg10: |
| 18754 | case DW_OP_breg11: |
| 18755 | case DW_OP_breg12: |
| 18756 | case DW_OP_breg13: |
| 18757 | case DW_OP_breg14: |
| 18758 | case DW_OP_breg15: |
| 18759 | case DW_OP_breg16: |
| 18760 | case DW_OP_breg17: |
| 18761 | case DW_OP_breg18: |
| 18762 | case DW_OP_breg19: |
| 18763 | case DW_OP_breg20: |
| 18764 | case DW_OP_breg21: |
| 18765 | case DW_OP_breg22: |
| 18766 | case DW_OP_breg23: |
| 18767 | case DW_OP_breg24: |
| 18768 | case DW_OP_breg25: |
| 18769 | case DW_OP_breg26: |
| 18770 | case DW_OP_breg27: |
| 18771 | case DW_OP_breg28: |
| 18772 | case DW_OP_breg29: |
| 18773 | case DW_OP_breg30: |
| 18774 | case DW_OP_breg31: |
| 18775 | case DW_OP_fbreg: |
| 18776 | case DW_OP_push_object_address: |
| 18777 | case DW_OP_call_frame_cfa: |
| 18778 | case DW_OP_GNU_variable_value: |
| 18779 | case DW_OP_GNU_addr_index: |
| 18780 | case DW_OP_GNU_const_index: |
| 18781 | ++frame_offset_; |
| 18782 | break; |
| 18783 | |
| 18784 | case DW_OP_drop: |
| 18785 | case DW_OP_xderef: |
| 18786 | case DW_OP_and: |
| 18787 | case DW_OP_div: |
| 18788 | case DW_OP_minus: |
| 18789 | case DW_OP_mod: |
| 18790 | case DW_OP_mul: |
| 18791 | case DW_OP_or: |
| 18792 | case DW_OP_plus: |
| 18793 | case DW_OP_shl: |
| 18794 | case DW_OP_shr: |
| 18795 | case DW_OP_shra: |
| 18796 | case DW_OP_xor: |
| 18797 | case DW_OP_bra: |
| 18798 | case DW_OP_eq: |
| 18799 | case DW_OP_ge: |
| 18800 | case DW_OP_gt: |
| 18801 | case DW_OP_le: |
| 18802 | case DW_OP_lt: |
| 18803 | case DW_OP_ne: |
| 18804 | case DW_OP_regx: |
| 18805 | case DW_OP_xderef_size: |
| 18806 | --frame_offset_; |
| 18807 | break; |
| 18808 | |
| 18809 | case DW_OP_call2: |
| 18810 | case DW_OP_call4: |
| 18811 | case DW_OP_call_ref: |
| 18812 | { |
| 18813 | dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die; |
| 18814 | int *stack_usage = dwarf_proc_stack_usage_map->get (k: dwarf_proc); |
| 18815 | |
| 18816 | if (stack_usage == NULL) |
| 18817 | return false; |
| 18818 | frame_offset_ += *stack_usage; |
| 18819 | break; |
| 18820 | } |
| 18821 | |
| 18822 | case DW_OP_implicit_pointer: |
| 18823 | case DW_OP_entry_value: |
| 18824 | case DW_OP_const_type: |
| 18825 | case DW_OP_regval_type: |
| 18826 | case DW_OP_form_tls_address: |
| 18827 | case DW_OP_GNU_push_tls_address: |
| 18828 | case DW_OP_GNU_uninit: |
| 18829 | case DW_OP_GNU_encoded_addr: |
| 18830 | case DW_OP_GNU_implicit_pointer: |
| 18831 | case DW_OP_GNU_entry_value: |
| 18832 | case DW_OP_GNU_const_type: |
| 18833 | case DW_OP_GNU_regval_type: |
| 18834 | case DW_OP_GNU_parameter_ref: |
| 18835 | /* loc_list_from_tree will probably not output these operations for |
| 18836 | size functions, so assume they will not appear here. */ |
| 18837 | /* Fall through... */ |
| 18838 | |
| 18839 | default: |
| 18840 | gcc_unreachable (); |
| 18841 | } |
| 18842 | |
| 18843 | /* Now, follow the control flow (except subroutine calls). */ |
| 18844 | switch (l->dw_loc_opc) |
| 18845 | { |
| 18846 | case DW_OP_bra: |
| 18847 | if (!resolve_args_picking_1 (loc: l->dw_loc_next, initial_frame_offset: frame_offset_, dpi, |
| 18848 | frame_offsets)) |
| 18849 | return false; |
| 18850 | /* Fall through. */ |
| 18851 | |
| 18852 | case DW_OP_skip: |
| 18853 | l = l->dw_loc_oprnd1.v.val_loc; |
| 18854 | break; |
| 18855 | |
| 18856 | case DW_OP_stack_value: |
| 18857 | return true; |
| 18858 | |
| 18859 | default: |
| 18860 | l = l->dw_loc_next; |
| 18861 | break; |
| 18862 | } |
| 18863 | } |
| 18864 | |
| 18865 | return true; |
| 18866 | } |
| 18867 | |
| 18868 | /* Make a DFS over operations reachable through LOC (i.e. follow branch |
| 18869 | operations) in order to resolve the operand of DW_OP_pick operations that |
| 18870 | target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame |
| 18871 | offset *before* LOC is executed. Return if all relocations were |
| 18872 | successful. */ |
| 18873 | |
| 18874 | static bool |
| 18875 | resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset, |
| 18876 | struct dwarf_procedure_info *dpi) |
| 18877 | { |
| 18878 | /* Associate to all visited operations the frame offset *before* evaluating |
| 18879 | this operation. */ |
| 18880 | hash_map<dw_loc_descr_ref, unsigned> frame_offsets; |
| 18881 | |
| 18882 | return |
| 18883 | resolve_args_picking_1 (loc, initial_frame_offset, dpi, frame_offsets); |
| 18884 | } |
| 18885 | |
| 18886 | /* Try to generate a DWARF procedure that computes the same result as FNDECL. |
| 18887 | Return NULL if it is not possible. */ |
| 18888 | |
| 18889 | static dw_die_ref |
| 18890 | function_to_dwarf_procedure (tree fndecl) |
| 18891 | { |
| 18892 | struct dwarf_procedure_info dpi; |
| 18893 | struct loc_descr_context ctx = { |
| 18894 | NULL_TREE, /* context_type */ |
| 18895 | NULL_TREE, /* base_decl */ |
| 18896 | .dpi: &dpi, /* dpi */ |
| 18897 | .placeholder_arg: false, /* placeholder_arg */ |
| 18898 | .placeholder_seen: false, /* placeholder_seen */ |
| 18899 | .strict_signedness: true /* strict_signedness */ |
| 18900 | }; |
| 18901 | dw_die_ref dwarf_proc_die; |
| 18902 | tree tree_body = DECL_SAVED_TREE (fndecl); |
| 18903 | dw_loc_descr_ref loc_body, epilogue; |
| 18904 | |
| 18905 | tree cursor; |
| 18906 | unsigned i; |
| 18907 | |
| 18908 | /* Do not generate multiple DWARF procedures for the same function |
| 18909 | declaration. */ |
| 18910 | dwarf_proc_die = lookup_decl_die (decl: fndecl); |
| 18911 | if (dwarf_proc_die != NULL) |
| 18912 | return dwarf_proc_die; |
| 18913 | |
| 18914 | /* DWARF procedures are available starting with the DWARFv3 standard. */ |
| 18915 | if (dwarf_version < 3 && dwarf_strict) |
| 18916 | return NULL; |
| 18917 | |
| 18918 | /* We handle only functions for which we still have a body, that return a |
| 18919 | supported type and that takes arguments with supported types. Note that |
| 18920 | there is no point translating functions that return nothing. */ |
| 18921 | if (tree_body == NULL_TREE |
| 18922 | || DECL_RESULT (fndecl) == NULL_TREE |
| 18923 | || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl)))) |
| 18924 | return NULL; |
| 18925 | |
| 18926 | for (cursor = DECL_ARGUMENTS (fndecl); |
| 18927 | cursor != NULL_TREE; |
| 18928 | cursor = TREE_CHAIN (cursor)) |
| 18929 | if (!is_handled_procedure_type (TREE_TYPE (cursor))) |
| 18930 | return NULL; |
| 18931 | |
| 18932 | /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */ |
| 18933 | if (TREE_CODE (tree_body) != RETURN_EXPR) |
| 18934 | return NULL; |
| 18935 | tree_body = TREE_OPERAND (tree_body, 0); |
| 18936 | if (TREE_CODE (tree_body) != MODIFY_EXPR |
| 18937 | || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl)) |
| 18938 | return NULL; |
| 18939 | tree_body = TREE_OPERAND (tree_body, 1); |
| 18940 | |
| 18941 | /* Try to translate the body expression itself. Note that this will probably |
| 18942 | cause an infinite recursion if its call graph has a cycle. This is very |
| 18943 | unlikely for size functions, however, so don't bother with such things at |
| 18944 | the moment. */ |
| 18945 | dpi.fndecl = fndecl; |
| 18946 | dpi.args_count = list_length (DECL_ARGUMENTS (fndecl)); |
| 18947 | loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx); |
| 18948 | if (!loc_body) |
| 18949 | return NULL; |
| 18950 | |
| 18951 | /* After evaluating all operands in "loc_body", we should still have on the |
| 18952 | stack all arguments plus the desired function result (top of the stack). |
| 18953 | Generate code in order to keep only the result in our stack frame. */ |
| 18954 | epilogue = NULL; |
| 18955 | for (i = 0; i < dpi.args_count; ++i) |
| 18956 | { |
| 18957 | dw_loc_descr_ref op_couple = new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0); |
| 18958 | op_couple->dw_loc_next = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
| 18959 | op_couple->dw_loc_next->dw_loc_next = epilogue; |
| 18960 | epilogue = op_couple; |
| 18961 | } |
| 18962 | add_loc_descr (list_head: &loc_body, descr: epilogue); |
| 18963 | if (!resolve_args_picking (loc: loc_body, initial_frame_offset: dpi.args_count, dpi: &dpi)) |
| 18964 | return NULL; |
| 18965 | |
| 18966 | /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed |
| 18967 | because they are considered useful. Now there is an epilogue, they are |
| 18968 | not anymore, so give it another try. */ |
| 18969 | loc_descr_without_nops (loc&: loc_body); |
| 18970 | |
| 18971 | /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as |
| 18972 | a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely, |
| 18973 | though, given that size functions do not come from source, so they should |
| 18974 | not have a dedicated DW_TAG_subprogram DIE. */ |
| 18975 | dwarf_proc_die |
| 18976 | = new_dwarf_proc_die (location: loc_body, fndecl, |
| 18977 | parent_die: get_context_die (DECL_CONTEXT (fndecl))); |
| 18978 | |
| 18979 | /* The called DWARF procedure consumes one stack slot per argument and |
| 18980 | returns one stack slot. */ |
| 18981 | dwarf_proc_stack_usage_map->put (k: dwarf_proc_die, v: 1 - dpi.args_count); |
| 18982 | |
| 18983 | return dwarf_proc_die; |
| 18984 | } |
| 18985 | |
| 18986 | /* Helper function for loc_list_from_tree. Perform OP binary op, |
| 18987 | but after converting arguments to type_die, afterwards convert |
| 18988 | back to unsigned. */ |
| 18989 | |
| 18990 | static dw_loc_list_ref |
| 18991 | typed_binop_from_tree (enum dwarf_location_atom op, tree loc, |
| 18992 | dw_die_ref type_die, scalar_int_mode mode, |
| 18993 | struct loc_descr_context *context) |
| 18994 | { |
| 18995 | dw_loc_list_ref op0, op1; |
| 18996 | dw_loc_descr_ref cvt, binop; |
| 18997 | |
| 18998 | if (type_die == NULL) |
| 18999 | return NULL; |
| 19000 | |
| 19001 | op0 = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context); |
| 19002 | op1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context); |
| 19003 | if (op0 == NULL || op1 == NULL) |
| 19004 | return NULL; |
| 19005 | |
| 19006 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 19007 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 19008 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 19009 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 19010 | add_loc_descr_to_each (list: op0, ref: cvt); |
| 19011 | |
| 19012 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
| 19013 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 19014 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
| 19015 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 19016 | add_loc_descr_to_each (list: op1, ref: cvt); |
| 19017 | |
| 19018 | add_loc_list (ret: &op0, list: op1); |
| 19019 | if (op0 == NULL) |
| 19020 | return NULL; |
| 19021 | |
| 19022 | binop = new_loc_descr (op, oprnd1: 0, oprnd2: 0); |
| 19023 | convert_descriptor_to_mode (mode, op: binop); |
| 19024 | add_loc_descr_to_each (list: op0, ref: binop); |
| 19025 | |
| 19026 | return op0; |
| 19027 | } |
| 19028 | |
| 19029 | /* Generate Dwarf location list representing LOC. |
| 19030 | If WANT_ADDRESS is false, expression computing LOC will be computed |
| 19031 | If WANT_ADDRESS is 1, expression computing address of LOC will be returned |
| 19032 | if WANT_ADDRESS is 2, expression computing address useable in location |
| 19033 | will be returned (i.e. DW_OP_reg can be used |
| 19034 | to refer to register values). |
| 19035 | |
| 19036 | CONTEXT provides information to customize the location descriptions |
| 19037 | generation. Its context_type field specifies what type is implicitly |
| 19038 | referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation |
| 19039 | will not be generated. |
| 19040 | |
| 19041 | Its DPI field determines whether we are generating a DWARF expression for a |
| 19042 | DWARF procedure, so PARM_DECL references are processed specifically. |
| 19043 | |
| 19044 | If CONTEXT is NULL, the behavior is the same as if context_type, base_decl |
| 19045 | and dpi fields were null. */ |
| 19046 | |
| 19047 | static dw_loc_list_ref |
| 19048 | loc_list_from_tree_1 (tree loc, int want_address, |
| 19049 | struct loc_descr_context *context) |
| 19050 | { |
| 19051 | dw_loc_descr_ref ret = NULL, ret1 = NULL; |
| 19052 | dw_loc_list_ref list_ret = NULL, list_ret1 = NULL; |
| 19053 | int have_address = 0; |
| 19054 | enum dwarf_location_atom op; |
| 19055 | |
| 19056 | /* ??? Most of the time we do not take proper care for sign/zero |
| 19057 | extending the values properly. Hopefully this won't be a real |
| 19058 | problem... */ |
| 19059 | |
| 19060 | if (context != NULL |
| 19061 | && context->base_decl == loc |
| 19062 | && want_address == 0) |
| 19063 | { |
| 19064 | if (dwarf_version >= 3 || !dwarf_strict) |
| 19065 | return new_loc_list (expr: new_loc_descr (op: DW_OP_push_object_address, oprnd1: 0, oprnd2: 0), |
| 19066 | NULL, vbegin: 0, NULL, vend: 0, NULL); |
| 19067 | else |
| 19068 | return NULL; |
| 19069 | } |
| 19070 | |
| 19071 | switch (TREE_CODE (loc)) |
| 19072 | { |
| 19073 | case ERROR_MARK: |
| 19074 | expansion_failed (expr: loc, NULL_RTX, reason: "ERROR_MARK" ); |
| 19075 | return 0; |
| 19076 | |
| 19077 | case PLACEHOLDER_EXPR: |
| 19078 | /* This case involves extracting fields from an object to determine the |
| 19079 | position of other fields. It is supposed to appear only as the first |
| 19080 | operand of COMPONENT_REF nodes and to reference precisely the type |
| 19081 | that the context allows or its enclosing type. */ |
| 19082 | if (context != NULL |
| 19083 | && (TREE_TYPE (loc) == context->context_type |
| 19084 | || TREE_TYPE (loc) == TYPE_CONTEXT (context->context_type)) |
| 19085 | && want_address >= 1) |
| 19086 | { |
| 19087 | if (dwarf_version >= 3 || !dwarf_strict) |
| 19088 | { |
| 19089 | ret = new_loc_descr (op: DW_OP_push_object_address, oprnd1: 0, oprnd2: 0); |
| 19090 | have_address = 1; |
| 19091 | break; |
| 19092 | } |
| 19093 | else |
| 19094 | return NULL; |
| 19095 | } |
| 19096 | /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for |
| 19097 | the single argument passed by consumer. */ |
| 19098 | else if (context != NULL |
| 19099 | && context->placeholder_arg |
| 19100 | && INTEGRAL_TYPE_P (TREE_TYPE (loc)) |
| 19101 | && want_address == 0) |
| 19102 | { |
| 19103 | ret = new_loc_descr (op: DW_OP_pick, oprnd1: 0, oprnd2: 0); |
| 19104 | ret->dw_loc_frame_offset_rel = 1; |
| 19105 | context->placeholder_seen = true; |
| 19106 | break; |
| 19107 | } |
| 19108 | else |
| 19109 | expansion_failed (expr: loc, NULL_RTX, |
| 19110 | reason: "PLACEHOLDER_EXPR for an unexpected type" ); |
| 19111 | break; |
| 19112 | |
| 19113 | case CALL_EXPR: |
| 19114 | { |
| 19115 | tree callee = get_callee_fndecl (loc); |
| 19116 | dw_die_ref dwarf_proc; |
| 19117 | |
| 19118 | if (callee |
| 19119 | && is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))) |
| 19120 | && (dwarf_proc = function_to_dwarf_procedure (fndecl: callee))) |
| 19121 | { |
| 19122 | /* DWARF procedures are used for size functions, which are built |
| 19123 | when size expressions contain conditional constructs, so we |
| 19124 | request strict preservation of signedness for comparisons. */ |
| 19125 | bool old_strict_signedness; |
| 19126 | if (context) |
| 19127 | { |
| 19128 | old_strict_signedness = context->strict_signedness; |
| 19129 | context->strict_signedness = true; |
| 19130 | } |
| 19131 | |
| 19132 | /* Evaluate arguments right-to-left so that the first argument |
| 19133 | will be the top-most one on the stack. */ |
| 19134 | for (int i = call_expr_nargs (loc) - 1; i >= 0; --i) |
| 19135 | { |
| 19136 | tree arg = CALL_EXPR_ARG (loc, i); |
| 19137 | ret1 = loc_descriptor_from_tree (arg, 0, context); |
| 19138 | if (!ret1) |
| 19139 | { |
| 19140 | expansion_failed (expr: arg, NULL_RTX, reason: "CALL_EXPR argument" ); |
| 19141 | return NULL; |
| 19142 | } |
| 19143 | add_loc_descr (list_head: &ret, descr: ret1); |
| 19144 | } |
| 19145 | |
| 19146 | ret1 = new_loc_descr (op: DW_OP_call4, oprnd1: 0, oprnd2: 0); |
| 19147 | ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 19148 | ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc; |
| 19149 | ret1->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 19150 | add_loc_descr (list_head: &ret, descr: ret1); |
| 19151 | if (context) |
| 19152 | context->strict_signedness = old_strict_signedness; |
| 19153 | } |
| 19154 | else |
| 19155 | expansion_failed (expr: loc, NULL_RTX, reason: "CALL_EXPR target" ); |
| 19156 | break; |
| 19157 | } |
| 19158 | |
| 19159 | case PREINCREMENT_EXPR: |
| 19160 | case PREDECREMENT_EXPR: |
| 19161 | case POSTINCREMENT_EXPR: |
| 19162 | case POSTDECREMENT_EXPR: |
| 19163 | expansion_failed (expr: loc, NULL_RTX, reason: "PRE/POST INCREMENT/DECREMENT" ); |
| 19164 | /* There are no opcodes for these operations. */ |
| 19165 | return 0; |
| 19166 | |
| 19167 | case ADDR_EXPR: |
| 19168 | /* If we already want an address, see if there is INDIRECT_REF inside |
| 19169 | e.g. for &this->field. */ |
| 19170 | if (want_address) |
| 19171 | { |
| 19172 | list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref |
| 19173 | (loc, toplev: want_address == 2, context); |
| 19174 | if (list_ret) |
| 19175 | have_address = 1; |
| 19176 | else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0)) |
| 19177 | && (ret = cst_pool_loc_descr (loc))) |
| 19178 | have_address = 1; |
| 19179 | } |
| 19180 | /* Otherwise, process the argument and look for the address. */ |
| 19181 | if (!list_ret && !ret) |
| 19182 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 1, context); |
| 19183 | else |
| 19184 | { |
| 19185 | if (want_address) |
| 19186 | expansion_failed (expr: loc, NULL_RTX, reason: "need address of ADDR_EXPR" ); |
| 19187 | return NULL; |
| 19188 | } |
| 19189 | break; |
| 19190 | |
| 19191 | case VAR_DECL: |
| 19192 | if (DECL_THREAD_LOCAL_P (loc)) |
| 19193 | { |
| 19194 | rtx rtl; |
| 19195 | enum dwarf_location_atom tls_op; |
| 19196 | enum dtprel_bool dtprel = dtprel_false; |
| 19197 | |
| 19198 | if (targetm.have_tls) |
| 19199 | { |
| 19200 | /* If this is not defined, we have no way to emit the |
| 19201 | data. */ |
| 19202 | if (!targetm.asm_out.output_dwarf_dtprel) |
| 19203 | return 0; |
| 19204 | |
| 19205 | /* The way DW_OP_GNU_push_tls_address is specified, we |
| 19206 | can only look up addresses of objects in the current |
| 19207 | module. We used DW_OP_addr as first op, but that's |
| 19208 | wrong, because DW_OP_addr is relocated by the debug |
| 19209 | info consumer, while DW_OP_GNU_push_tls_address |
| 19210 | operand shouldn't be. */ |
| 19211 | if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc)) |
| 19212 | return 0; |
| 19213 | dtprel = dtprel_true; |
| 19214 | /* We check for DWARF 5 here because gdb did not implement |
| 19215 | DW_OP_form_tls_address until after 7.12. */ |
| 19216 | tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address |
| 19217 | : DW_OP_GNU_push_tls_address); |
| 19218 | } |
| 19219 | else |
| 19220 | { |
| 19221 | if (!targetm.emutls.debug_form_tls_address |
| 19222 | || !(dwarf_version >= 3 || !dwarf_strict)) |
| 19223 | return 0; |
| 19224 | /* We stuffed the control variable into the DECL_VALUE_EXPR |
| 19225 | to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should |
| 19226 | no longer appear in gimple code. We used the control |
| 19227 | variable in specific so that we could pick it up here. */ |
| 19228 | loc = DECL_VALUE_EXPR (loc); |
| 19229 | tls_op = DW_OP_form_tls_address; |
| 19230 | } |
| 19231 | |
| 19232 | rtl = rtl_for_decl_location (loc); |
| 19233 | if (rtl == NULL_RTX) |
| 19234 | return 0; |
| 19235 | |
| 19236 | if (!MEM_P (rtl)) |
| 19237 | return 0; |
| 19238 | rtl = XEXP (rtl, 0); |
| 19239 | if (! CONSTANT_P (rtl)) |
| 19240 | return 0; |
| 19241 | |
| 19242 | ret = new_addr_loc_descr (addr: rtl, dtprel); |
| 19243 | ret1 = new_loc_descr (op: tls_op, oprnd1: 0, oprnd2: 0); |
| 19244 | add_loc_descr (list_head: &ret, descr: ret1); |
| 19245 | |
| 19246 | have_address = 1; |
| 19247 | break; |
| 19248 | } |
| 19249 | /* FALLTHRU */ |
| 19250 | |
| 19251 | case PARM_DECL: |
| 19252 | if (context != NULL && context->dpi != NULL |
| 19253 | && DECL_CONTEXT (loc) == context->dpi->fndecl) |
| 19254 | { |
| 19255 | /* We are generating code for a DWARF procedure and we want to access |
| 19256 | one of its arguments: find the appropriate argument offset and let |
| 19257 | the resolve_args_picking pass compute the offset that complies |
| 19258 | with the stack frame size. */ |
| 19259 | unsigned i = 0; |
| 19260 | tree cursor; |
| 19261 | |
| 19262 | for (cursor = DECL_ARGUMENTS (context->dpi->fndecl); |
| 19263 | cursor != NULL_TREE && cursor != loc; |
| 19264 | cursor = TREE_CHAIN (cursor), ++i) |
| 19265 | ; |
| 19266 | /* If we are translating a DWARF procedure, all referenced parameters |
| 19267 | must belong to the current function. */ |
| 19268 | gcc_assert (cursor != NULL_TREE); |
| 19269 | |
| 19270 | ret = new_loc_descr (op: DW_OP_pick, oprnd1: i, oprnd2: 0); |
| 19271 | ret->dw_loc_frame_offset_rel = 1; |
| 19272 | break; |
| 19273 | } |
| 19274 | /* FALLTHRU */ |
| 19275 | |
| 19276 | case RESULT_DECL: |
| 19277 | if (DECL_HAS_VALUE_EXPR_P (loc)) |
| 19278 | { |
| 19279 | tree value_expr = DECL_VALUE_EXPR (loc); |
| 19280 | |
| 19281 | /* Non-local frame structures are DECL_IGNORED_P variables so we need |
| 19282 | to wait until they get an RTX in order to reference them. */ |
| 19283 | if (early_dwarf |
| 19284 | && TREE_CODE (value_expr) == COMPONENT_REF |
| 19285 | && VAR_P (TREE_OPERAND (value_expr, 0)) |
| 19286 | && DECL_NONLOCAL_FRAME (TREE_OPERAND (value_expr, 0))) |
| 19287 | ; |
| 19288 | else |
| 19289 | return loc_list_from_tree_1 (loc: value_expr, want_address, context); |
| 19290 | } |
| 19291 | |
| 19292 | /* FALLTHRU */ |
| 19293 | |
| 19294 | case FUNCTION_DECL: |
| 19295 | { |
| 19296 | rtx rtl; |
| 19297 | var_loc_list *loc_list = lookup_decl_loc (decl: loc); |
| 19298 | |
| 19299 | if (loc_list && loc_list->first) |
| 19300 | { |
| 19301 | list_ret = dw_loc_list (loc_list, decl: loc, want_address); |
| 19302 | have_address = want_address != 0; |
| 19303 | break; |
| 19304 | } |
| 19305 | rtl = rtl_for_decl_location (loc); |
| 19306 | if (rtl == NULL_RTX) |
| 19307 | { |
| 19308 | if (TREE_CODE (loc) != FUNCTION_DECL |
| 19309 | && early_dwarf |
| 19310 | && want_address != 1 |
| 19311 | && ! DECL_IGNORED_P (loc) |
| 19312 | && (INTEGRAL_TYPE_P (TREE_TYPE (loc)) |
| 19313 | || POINTER_TYPE_P (TREE_TYPE (loc))) |
| 19314 | && TYPE_MODE (TREE_TYPE (loc)) != BLKmode |
| 19315 | && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc))) |
| 19316 | <= DWARF2_ADDR_SIZE)) |
| 19317 | { |
| 19318 | dw_die_ref ref = lookup_decl_die (decl: loc); |
| 19319 | if (ref) |
| 19320 | { |
| 19321 | ret = new_loc_descr (op: DW_OP_GNU_variable_value, oprnd1: 0, oprnd2: 0); |
| 19322 | ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 19323 | ret->dw_loc_oprnd1.v.val_die_ref.die = ref; |
| 19324 | ret->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 19325 | } |
| 19326 | else if (current_function_decl |
| 19327 | && DECL_CONTEXT (loc) == current_function_decl) |
| 19328 | { |
| 19329 | ret = new_loc_descr (op: DW_OP_GNU_variable_value, oprnd1: 0, oprnd2: 0); |
| 19330 | ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref; |
| 19331 | ret->dw_loc_oprnd1.v.val_decl_ref = loc; |
| 19332 | } |
| 19333 | break; |
| 19334 | } |
| 19335 | expansion_failed (expr: loc, NULL_RTX, reason: "DECL has no RTL" ); |
| 19336 | return 0; |
| 19337 | } |
| 19338 | else if (CONST_INT_P (rtl)) |
| 19339 | { |
| 19340 | HOST_WIDE_INT val = INTVAL (rtl); |
| 19341 | if (TYPE_UNSIGNED (TREE_TYPE (loc))) |
| 19342 | val &= GET_MODE_MASK (DECL_MODE (loc)); |
| 19343 | ret = int_loc_descriptor (poly_i: val); |
| 19344 | } |
| 19345 | else if (GET_CODE (rtl) == CONST_STRING) |
| 19346 | { |
| 19347 | expansion_failed (expr: loc, NULL_RTX, reason: "CONST_STRING" ); |
| 19348 | return 0; |
| 19349 | } |
| 19350 | else if (CONSTANT_P (rtl) && const_ok_for_output (rtl)) |
| 19351 | ret = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
| 19352 | else |
| 19353 | { |
| 19354 | machine_mode mode, mem_mode; |
| 19355 | |
| 19356 | /* Certain constructs can only be represented at top-level. */ |
| 19357 | if (want_address == 2) |
| 19358 | { |
| 19359 | ret = loc_descriptor (rtl, VOIDmode, |
| 19360 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 19361 | have_address = 1; |
| 19362 | } |
| 19363 | else |
| 19364 | { |
| 19365 | mode = GET_MODE (rtl); |
| 19366 | mem_mode = VOIDmode; |
| 19367 | if (MEM_P (rtl)) |
| 19368 | { |
| 19369 | mem_mode = mode; |
| 19370 | mode = get_address_mode (mem: rtl); |
| 19371 | rtl = XEXP (rtl, 0); |
| 19372 | have_address = 1; |
| 19373 | } |
| 19374 | ret = mem_loc_descriptor (rtl, mode, mem_mode, |
| 19375 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 19376 | } |
| 19377 | if (!ret) |
| 19378 | expansion_failed (expr: loc, rtl, |
| 19379 | reason: "failed to produce loc descriptor for rtl" ); |
| 19380 | } |
| 19381 | } |
| 19382 | break; |
| 19383 | |
| 19384 | case MEM_REF: |
| 19385 | if (!integer_zerop (TREE_OPERAND (loc, 1))) |
| 19386 | { |
| 19387 | have_address = 1; |
| 19388 | goto do_plus; |
| 19389 | } |
| 19390 | /* Fallthru. */ |
| 19391 | case INDIRECT_REF: |
| 19392 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
| 19393 | have_address = 1; |
| 19394 | break; |
| 19395 | |
| 19396 | case TARGET_MEM_REF: |
| 19397 | case SSA_NAME: |
| 19398 | case DEBUG_EXPR_DECL: |
| 19399 | return NULL; |
| 19400 | |
| 19401 | case COMPOUND_EXPR: |
| 19402 | return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address, |
| 19403 | context); |
| 19404 | |
| 19405 | CASE_CONVERT: |
| 19406 | case VIEW_CONVERT_EXPR: |
| 19407 | case SAVE_EXPR: |
| 19408 | case MODIFY_EXPR: |
| 19409 | case NON_LVALUE_EXPR: |
| 19410 | return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address, |
| 19411 | context); |
| 19412 | |
| 19413 | case COMPONENT_REF: |
| 19414 | case BIT_FIELD_REF: |
| 19415 | case ARRAY_REF: |
| 19416 | case ARRAY_RANGE_REF: |
| 19417 | case REALPART_EXPR: |
| 19418 | case IMAGPART_EXPR: |
| 19419 | { |
| 19420 | tree obj, offset; |
| 19421 | poly_int64 bitsize, bitpos, bytepos; |
| 19422 | machine_mode mode; |
| 19423 | int unsignedp, reversep, volatilep = 0; |
| 19424 | |
| 19425 | obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode, |
| 19426 | &unsignedp, &reversep, &volatilep); |
| 19427 | |
| 19428 | gcc_assert (obj != loc); |
| 19429 | |
| 19430 | list_ret = loc_list_from_tree_1 (loc: obj, |
| 19431 | want_address: want_address == 2 |
| 19432 | && known_eq (bitpos, 0) |
| 19433 | && !offset ? 2 : 1, |
| 19434 | context); |
| 19435 | /* TODO: We can extract value of the small expression via shifting even |
| 19436 | for nonzero bitpos. */ |
| 19437 | if (list_ret == 0) |
| 19438 | return 0; |
| 19439 | if (!multiple_p (a: bitpos, BITS_PER_UNIT, multiple: &bytepos)) |
| 19440 | { |
| 19441 | expansion_failed (expr: loc, NULL_RTX, reason: "bitfield access" ); |
| 19442 | return 0; |
| 19443 | } |
| 19444 | |
| 19445 | if (offset != NULL_TREE) |
| 19446 | { |
| 19447 | /* Variable offset. */ |
| 19448 | list_ret1 = loc_list_from_tree_1 (loc: offset, want_address: 0, context); |
| 19449 | if (list_ret1 == 0) |
| 19450 | return 0; |
| 19451 | add_loc_list (ret: &list_ret, list: list_ret1); |
| 19452 | if (!list_ret) |
| 19453 | return 0; |
| 19454 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
| 19455 | } |
| 19456 | |
| 19457 | HOST_WIDE_INT value; |
| 19458 | if (bytepos.is_constant (const_value: &value) && value > 0) |
| 19459 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_plus_uconst, |
| 19460 | oprnd1: value, oprnd2: 0)); |
| 19461 | else if (maybe_ne (a: bytepos, b: 0)) |
| 19462 | loc_list_plus_const (list_head: list_ret, offset: bytepos); |
| 19463 | |
| 19464 | have_address = 1; |
| 19465 | break; |
| 19466 | } |
| 19467 | |
| 19468 | case INTEGER_CST: |
| 19469 | if ((want_address || !tree_fits_shwi_p (loc)) |
| 19470 | && (ret = cst_pool_loc_descr (loc))) |
| 19471 | have_address = 1; |
| 19472 | else if (want_address == 2 |
| 19473 | && tree_fits_shwi_p (loc) |
| 19474 | && (ret = address_of_int_loc_descriptor |
| 19475 | (size: int_size_in_bytes (TREE_TYPE (loc)), |
| 19476 | i: tree_to_shwi (loc)))) |
| 19477 | have_address = 1; |
| 19478 | else if (tree_fits_shwi_p (loc)) |
| 19479 | ret = int_loc_descriptor (poly_i: tree_to_shwi (loc)); |
| 19480 | else if (tree_fits_uhwi_p (loc)) |
| 19481 | ret = uint_loc_descriptor (i: tree_to_uhwi (loc)); |
| 19482 | else |
| 19483 | { |
| 19484 | expansion_failed (expr: loc, NULL_RTX, |
| 19485 | reason: "Integer operand is not host integer" ); |
| 19486 | return 0; |
| 19487 | } |
| 19488 | break; |
| 19489 | |
| 19490 | case POLY_INT_CST: |
| 19491 | { |
| 19492 | if (want_address) |
| 19493 | { |
| 19494 | expansion_failed (expr: loc, NULL_RTX, |
| 19495 | reason: "constant address with a runtime component" ); |
| 19496 | return 0; |
| 19497 | } |
| 19498 | poly_int64 value; |
| 19499 | if (!poly_int_tree_p (t: loc, value: &value)) |
| 19500 | { |
| 19501 | expansion_failed (expr: loc, NULL_RTX, reason: "constant too big" ); |
| 19502 | return 0; |
| 19503 | } |
| 19504 | ret = int_loc_descriptor (poly_i: value); |
| 19505 | } |
| 19506 | break; |
| 19507 | |
| 19508 | case CONSTRUCTOR: |
| 19509 | case REAL_CST: |
| 19510 | case STRING_CST: |
| 19511 | case COMPLEX_CST: |
| 19512 | if ((ret = cst_pool_loc_descr (loc))) |
| 19513 | have_address = 1; |
| 19514 | else if (TREE_CODE (loc) == CONSTRUCTOR) |
| 19515 | { |
| 19516 | tree type = TREE_TYPE (loc); |
| 19517 | unsigned HOST_WIDE_INT size = int_size_in_bytes (type); |
| 19518 | unsigned HOST_WIDE_INT offset = 0; |
| 19519 | unsigned HOST_WIDE_INT cnt; |
| 19520 | constructor_elt *ce; |
| 19521 | |
| 19522 | if (TREE_CODE (type) == RECORD_TYPE) |
| 19523 | { |
| 19524 | /* This is very limited, but it's enough to output |
| 19525 | pointers to member functions, as long as the |
| 19526 | referenced function is defined in the current |
| 19527 | translation unit. */ |
| 19528 | FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce) |
| 19529 | { |
| 19530 | tree val = ce->value; |
| 19531 | |
| 19532 | tree field = ce->index; |
| 19533 | |
| 19534 | if (val) |
| 19535 | STRIP_NOPS (val); |
| 19536 | |
| 19537 | if (!field || DECL_BIT_FIELD (field)) |
| 19538 | { |
| 19539 | expansion_failed (expr: loc, NULL_RTX, |
| 19540 | reason: "bitfield in record type constructor" ); |
| 19541 | size = offset = (unsigned HOST_WIDE_INT)-1; |
| 19542 | ret = NULL; |
| 19543 | break; |
| 19544 | } |
| 19545 | |
| 19546 | HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field)); |
| 19547 | unsigned HOST_WIDE_INT pos = int_byte_position (field); |
| 19548 | gcc_assert (pos + fieldsize <= size); |
| 19549 | if (pos < offset) |
| 19550 | { |
| 19551 | expansion_failed (expr: loc, NULL_RTX, |
| 19552 | reason: "out-of-order fields in record constructor" ); |
| 19553 | size = offset = (unsigned HOST_WIDE_INT)-1; |
| 19554 | ret = NULL; |
| 19555 | break; |
| 19556 | } |
| 19557 | if (pos > offset) |
| 19558 | { |
| 19559 | ret1 = new_loc_descr (op: DW_OP_piece, oprnd1: pos - offset, oprnd2: 0); |
| 19560 | add_loc_descr (list_head: &ret, descr: ret1); |
| 19561 | offset = pos; |
| 19562 | } |
| 19563 | if (val && fieldsize != 0) |
| 19564 | { |
| 19565 | ret1 = loc_descriptor_from_tree (val, want_address, context); |
| 19566 | if (!ret1) |
| 19567 | { |
| 19568 | expansion_failed (expr: loc, NULL_RTX, |
| 19569 | reason: "unsupported expression in field" ); |
| 19570 | size = offset = (unsigned HOST_WIDE_INT)-1; |
| 19571 | ret = NULL; |
| 19572 | break; |
| 19573 | } |
| 19574 | add_loc_descr (list_head: &ret, descr: ret1); |
| 19575 | } |
| 19576 | if (fieldsize) |
| 19577 | { |
| 19578 | ret1 = new_loc_descr (op: DW_OP_piece, oprnd1: fieldsize, oprnd2: 0); |
| 19579 | add_loc_descr (list_head: &ret, descr: ret1); |
| 19580 | offset = pos + fieldsize; |
| 19581 | } |
| 19582 | } |
| 19583 | |
| 19584 | if (offset != size) |
| 19585 | { |
| 19586 | ret1 = new_loc_descr (op: DW_OP_piece, oprnd1: size - offset, oprnd2: 0); |
| 19587 | add_loc_descr (list_head: &ret, descr: ret1); |
| 19588 | offset = size; |
| 19589 | } |
| 19590 | |
| 19591 | have_address = !!want_address; |
| 19592 | } |
| 19593 | else |
| 19594 | expansion_failed (expr: loc, NULL_RTX, |
| 19595 | reason: "constructor of non-record type" ); |
| 19596 | } |
| 19597 | else |
| 19598 | /* We can construct small constants here using int_loc_descriptor. */ |
| 19599 | expansion_failed (expr: loc, NULL_RTX, |
| 19600 | reason: "constructor or constant not in constant pool" ); |
| 19601 | break; |
| 19602 | |
| 19603 | case TRUTH_AND_EXPR: |
| 19604 | case TRUTH_ANDIF_EXPR: |
| 19605 | case BIT_AND_EXPR: |
| 19606 | op = DW_OP_and; |
| 19607 | goto do_binop; |
| 19608 | |
| 19609 | case TRUTH_XOR_EXPR: |
| 19610 | case BIT_XOR_EXPR: |
| 19611 | op = DW_OP_xor; |
| 19612 | goto do_binop; |
| 19613 | |
| 19614 | case TRUTH_OR_EXPR: |
| 19615 | case TRUTH_ORIF_EXPR: |
| 19616 | case BIT_IOR_EXPR: |
| 19617 | op = DW_OP_or; |
| 19618 | goto do_binop; |
| 19619 | |
| 19620 | case EXACT_DIV_EXPR: |
| 19621 | case FLOOR_DIV_EXPR: |
| 19622 | case TRUNC_DIV_EXPR: |
| 19623 | /* Turn a divide by a power of 2 into a shift when possible. */ |
| 19624 | if (TYPE_UNSIGNED (TREE_TYPE (loc)) |
| 19625 | && tree_fits_uhwi_p (TREE_OPERAND (loc, 1))) |
| 19626 | { |
| 19627 | const int log2 = exact_log2 (x: tree_to_uhwi (TREE_OPERAND (loc, 1))); |
| 19628 | if (log2 > 0) |
| 19629 | { |
| 19630 | list_ret |
| 19631 | = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
| 19632 | if (list_ret == 0) |
| 19633 | return 0; |
| 19634 | |
| 19635 | add_loc_descr_to_each (list: list_ret, ref: uint_loc_descriptor (i: log2)); |
| 19636 | add_loc_descr_to_each (list: list_ret, |
| 19637 | ref: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
| 19638 | break; |
| 19639 | } |
| 19640 | } |
| 19641 | |
| 19642 | /* fall through */ |
| 19643 | |
| 19644 | case CEIL_DIV_EXPR: |
| 19645 | case ROUND_DIV_EXPR: |
| 19646 | if (TYPE_UNSIGNED (TREE_TYPE (loc))) |
| 19647 | { |
| 19648 | const enum machine_mode mode = TYPE_MODE (TREE_TYPE (loc)); |
| 19649 | scalar_int_mode int_mode; |
| 19650 | |
| 19651 | if ((dwarf_strict && dwarf_version < 5) |
| 19652 | || !is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
| 19653 | return 0; |
| 19654 | |
| 19655 | /* We can use a signed divide if the sign bit is not set. */ |
| 19656 | if (GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
| 19657 | { |
| 19658 | op = DW_OP_div; |
| 19659 | goto do_binop; |
| 19660 | } |
| 19661 | |
| 19662 | list_ret = typed_binop_from_tree (op: DW_OP_div, loc, |
| 19663 | type_die: base_type_for_mode (mode: int_mode, unsignedp: 1), |
| 19664 | mode: int_mode, context); |
| 19665 | break; |
| 19666 | } |
| 19667 | op = DW_OP_div; |
| 19668 | goto do_binop; |
| 19669 | |
| 19670 | case MINUS_EXPR: |
| 19671 | op = DW_OP_minus; |
| 19672 | goto do_binop; |
| 19673 | |
| 19674 | case FLOOR_MOD_EXPR: |
| 19675 | case CEIL_MOD_EXPR: |
| 19676 | case ROUND_MOD_EXPR: |
| 19677 | case TRUNC_MOD_EXPR: |
| 19678 | if (TYPE_UNSIGNED (TREE_TYPE (loc))) |
| 19679 | { |
| 19680 | op = DW_OP_mod; |
| 19681 | goto do_binop; |
| 19682 | } |
| 19683 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
| 19684 | list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address: 0, context); |
| 19685 | if (list_ret == 0 || list_ret1 == 0) |
| 19686 | return 0; |
| 19687 | |
| 19688 | add_loc_list (ret: &list_ret, list: list_ret1); |
| 19689 | if (list_ret == 0) |
| 19690 | return 0; |
| 19691 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
| 19692 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
| 19693 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_div, oprnd1: 0, oprnd2: 0)); |
| 19694 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
| 19695 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
| 19696 | break; |
| 19697 | |
| 19698 | case MULT_EXPR: |
| 19699 | op = DW_OP_mul; |
| 19700 | goto do_binop; |
| 19701 | |
| 19702 | case LSHIFT_EXPR: |
| 19703 | op = DW_OP_shl; |
| 19704 | goto do_binop; |
| 19705 | |
| 19706 | case RSHIFT_EXPR: |
| 19707 | op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra); |
| 19708 | goto do_binop; |
| 19709 | |
| 19710 | case POINTER_PLUS_EXPR: |
| 19711 | case PLUS_EXPR: |
| 19712 | do_plus: |
| 19713 | if (tree_fits_shwi_p (TREE_OPERAND (loc, 1))) |
| 19714 | { |
| 19715 | /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be |
| 19716 | smarter to encode their opposite. The DW_OP_plus_uconst operation |
| 19717 | takes 1 + X bytes, X being the size of the ULEB128 addend. On the |
| 19718 | other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y |
| 19719 | bytes, Y being the size of the operation that pushes the opposite |
| 19720 | of the addend. So let's choose the smallest representation. */ |
| 19721 | const tree tree_addend = TREE_OPERAND (loc, 1); |
| 19722 | offset_int wi_addend; |
| 19723 | HOST_WIDE_INT shwi_addend; |
| 19724 | dw_loc_descr_ref loc_naddend; |
| 19725 | |
| 19726 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
| 19727 | if (list_ret == 0) |
| 19728 | return 0; |
| 19729 | |
| 19730 | /* Try to get the literal to push. It is the opposite of the addend, |
| 19731 | so as we rely on wrapping during DWARF evaluation, first decode |
| 19732 | the literal as a "DWARF-sized" signed number. */ |
| 19733 | wi_addend = wi::to_offset (t: tree_addend); |
| 19734 | wi_addend = wi::sext (x: wi_addend, DWARF2_ADDR_SIZE * 8); |
| 19735 | shwi_addend = wi_addend.to_shwi (); |
| 19736 | loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT)) |
| 19737 | ? int_loc_descriptor (poly_i: -shwi_addend) |
| 19738 | : NULL; |
| 19739 | |
| 19740 | if (loc_naddend != NULL |
| 19741 | && ((unsigned) size_of_uleb128 (shwi_addend) |
| 19742 | > size_of_loc_descr (loc: loc_naddend))) |
| 19743 | { |
| 19744 | add_loc_descr_to_each (list: list_ret, ref: loc_naddend); |
| 19745 | add_loc_descr_to_each (list: list_ret, |
| 19746 | ref: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
| 19747 | } |
| 19748 | else |
| 19749 | { |
| 19750 | for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; ) |
| 19751 | { |
| 19752 | loc_naddend = loc_cur; |
| 19753 | loc_cur = loc_cur->dw_loc_next; |
| 19754 | ggc_free (loc_naddend); |
| 19755 | } |
| 19756 | loc_list_plus_const (list_head: list_ret, offset: wi_addend.to_shwi ()); |
| 19757 | } |
| 19758 | break; |
| 19759 | } |
| 19760 | |
| 19761 | op = DW_OP_plus; |
| 19762 | goto do_binop; |
| 19763 | |
| 19764 | case LE_EXPR: |
| 19765 | op = DW_OP_le; |
| 19766 | goto do_comp_binop; |
| 19767 | |
| 19768 | case GE_EXPR: |
| 19769 | op = DW_OP_ge; |
| 19770 | goto do_comp_binop; |
| 19771 | |
| 19772 | case LT_EXPR: |
| 19773 | op = DW_OP_lt; |
| 19774 | goto do_comp_binop; |
| 19775 | |
| 19776 | case GT_EXPR: |
| 19777 | op = DW_OP_gt; |
| 19778 | goto do_comp_binop; |
| 19779 | |
| 19780 | do_comp_binop: |
| 19781 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0)))) |
| 19782 | { |
| 19783 | const enum machine_mode mode |
| 19784 | = TYPE_MODE (TREE_TYPE (TREE_OPERAND (loc, 0))); |
| 19785 | scalar_int_mode int_mode; |
| 19786 | |
| 19787 | /* We can use a signed comparison if the sign bit is not set. */ |
| 19788 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 19789 | && GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
| 19790 | goto do_binop; |
| 19791 | |
| 19792 | list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context); |
| 19793 | list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context); |
| 19794 | list_ret = loc_list_from_uint_comparison (left: list_ret, right: list_ret1, |
| 19795 | TREE_CODE (loc)); |
| 19796 | break; |
| 19797 | } |
| 19798 | else |
| 19799 | goto do_binop; |
| 19800 | |
| 19801 | case EQ_EXPR: |
| 19802 | op = DW_OP_eq; |
| 19803 | goto do_binop; |
| 19804 | |
| 19805 | case NE_EXPR: |
| 19806 | op = DW_OP_ne; |
| 19807 | goto do_binop; |
| 19808 | |
| 19809 | do_binop: |
| 19810 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
| 19811 | list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address: 0, context); |
| 19812 | if (list_ret == 0 || list_ret1 == 0) |
| 19813 | return 0; |
| 19814 | |
| 19815 | add_loc_list (ret: &list_ret, list: list_ret1); |
| 19816 | if (list_ret == 0) |
| 19817 | return 0; |
| 19818 | |
| 19819 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 19820 | break; |
| 19821 | |
| 19822 | case TRUTH_NOT_EXPR: |
| 19823 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
| 19824 | if (list_ret == 0) |
| 19825 | return 0; |
| 19826 | |
| 19827 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_lit0, oprnd1: 0, oprnd2: 0)); |
| 19828 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_eq, oprnd1: 0, oprnd2: 0)); |
| 19829 | break; |
| 19830 | |
| 19831 | case BIT_NOT_EXPR: |
| 19832 | op = DW_OP_not; |
| 19833 | goto do_unop; |
| 19834 | |
| 19835 | case ABS_EXPR: |
| 19836 | op = DW_OP_abs; |
| 19837 | goto do_unop; |
| 19838 | |
| 19839 | case NEGATE_EXPR: |
| 19840 | op = DW_OP_neg; |
| 19841 | goto do_unop; |
| 19842 | |
| 19843 | do_unop: |
| 19844 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
| 19845 | if (list_ret == 0) |
| 19846 | return 0; |
| 19847 | |
| 19848 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
| 19849 | break; |
| 19850 | |
| 19851 | case MIN_EXPR: |
| 19852 | case MAX_EXPR: |
| 19853 | { |
| 19854 | const enum tree_code code = |
| 19855 | TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR; |
| 19856 | |
| 19857 | loc = build3 (COND_EXPR, TREE_TYPE (loc), |
| 19858 | build2 (code, integer_type_node, |
| 19859 | TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)), |
| 19860 | TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0)); |
| 19861 | } |
| 19862 | |
| 19863 | /* fall through */ |
| 19864 | |
| 19865 | case COND_EXPR: |
| 19866 | { |
| 19867 | dw_loc_descr_ref lhs |
| 19868 | = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context); |
| 19869 | dw_loc_list_ref rhs |
| 19870 | = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), want_address: 0, context); |
| 19871 | dw_loc_descr_ref bra_node, jump_node, tmp; |
| 19872 | |
| 19873 | /* DW_OP_bra is branch-on-nonzero so avoid doing useless work. */ |
| 19874 | if (TREE_CODE (TREE_OPERAND (loc, 0)) == NE_EXPR |
| 19875 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (loc, 0), 1))) |
| 19876 | list_ret |
| 19877 | = loc_list_from_tree_1 (TREE_OPERAND (TREE_OPERAND (loc, 0), 0), |
| 19878 | want_address: 0, context); |
| 19879 | /* Likewise, swap the operands for a logically negated condition. */ |
| 19880 | else if (TREE_CODE (TREE_OPERAND (loc, 0)) == TRUTH_NOT_EXPR) |
| 19881 | { |
| 19882 | lhs = loc_descriptor_from_tree (TREE_OPERAND (loc, 2), 0, context); |
| 19883 | rhs = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address: 0, context); |
| 19884 | list_ret |
| 19885 | = loc_list_from_tree_1 (TREE_OPERAND (TREE_OPERAND (loc, 0), 0), |
| 19886 | want_address: 0, context); |
| 19887 | } |
| 19888 | else |
| 19889 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
| 19890 | if (list_ret == 0 || lhs == 0 || rhs == 0) |
| 19891 | return 0; |
| 19892 | |
| 19893 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
| 19894 | add_loc_descr_to_each (list: list_ret, ref: bra_node); |
| 19895 | |
| 19896 | add_loc_list (ret: &list_ret, list: rhs); |
| 19897 | jump_node = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
| 19898 | add_loc_descr_to_each (list: list_ret, ref: jump_node); |
| 19899 | |
| 19900 | add_loc_descr_to_each (list: list_ret, ref: lhs); |
| 19901 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 19902 | bra_node->dw_loc_oprnd1.v.val_loc = lhs; |
| 19903 | |
| 19904 | /* ??? Need a node to point the skip at. Use a nop. */ |
| 19905 | tmp = new_loc_descr (op: DW_OP_nop, oprnd1: 0, oprnd2: 0); |
| 19906 | add_loc_descr_to_each (list: list_ret, ref: tmp); |
| 19907 | jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
| 19908 | jump_node->dw_loc_oprnd1.v.val_loc = tmp; |
| 19909 | } |
| 19910 | break; |
| 19911 | |
| 19912 | case FIX_TRUNC_EXPR: |
| 19913 | return 0; |
| 19914 | |
| 19915 | case COMPOUND_LITERAL_EXPR: |
| 19916 | return loc_list_from_tree_1 (COMPOUND_LITERAL_EXPR_DECL (loc), |
| 19917 | want_address: 0, context); |
| 19918 | |
| 19919 | default: |
| 19920 | /* Leave front-end specific codes as simply unknown. This comes |
| 19921 | up, for instance, with the C STMT_EXPR. */ |
| 19922 | if ((unsigned int) TREE_CODE (loc) |
| 19923 | >= (unsigned int) LAST_AND_UNUSED_TREE_CODE) |
| 19924 | { |
| 19925 | expansion_failed (expr: loc, NULL_RTX, |
| 19926 | reason: "language specific tree node" ); |
| 19927 | return 0; |
| 19928 | } |
| 19929 | |
| 19930 | /* Otherwise this is a generic code; we should just lists all of |
| 19931 | these explicitly. We forgot one. */ |
| 19932 | if (flag_checking) |
| 19933 | gcc_unreachable (); |
| 19934 | |
| 19935 | /* In a release build, we want to degrade gracefully: better to |
| 19936 | generate incomplete debugging information than to crash. */ |
| 19937 | return NULL; |
| 19938 | } |
| 19939 | |
| 19940 | if (!ret && !list_ret) |
| 19941 | return 0; |
| 19942 | |
| 19943 | /* Implement wrap-around arithmetics for small integer types. */ |
| 19944 | if ((TREE_CODE (loc) == PLUS_EXPR |
| 19945 | || TREE_CODE (loc) == MINUS_EXPR |
| 19946 | || TREE_CODE (loc) == MULT_EXPR |
| 19947 | || TREE_CODE (loc) == NEGATE_EXPR |
| 19948 | || TREE_CODE (loc) == LSHIFT_EXPR) |
| 19949 | && INTEGRAL_TYPE_P (TREE_TYPE (loc)) |
| 19950 | && TYPE_OVERFLOW_WRAPS (TREE_TYPE (loc))) |
| 19951 | { |
| 19952 | const enum machine_mode mode = TYPE_MODE (TREE_TYPE (loc)); |
| 19953 | scalar_int_mode int_mode; |
| 19954 | |
| 19955 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 19956 | && GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
| 19957 | { |
| 19958 | const unsigned HOST_WIDE_INT mask |
| 19959 | = (HOST_WIDE_INT_1U << GET_MODE_BITSIZE (mode: int_mode)) - 1; |
| 19960 | add_loc_descr_to_each (list: list_ret, ref: uint_loc_descriptor (i: mask)); |
| 19961 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 19962 | } |
| 19963 | } |
| 19964 | |
| 19965 | if (want_address == 2 && !have_address |
| 19966 | && (dwarf_version >= 4 || !dwarf_strict)) |
| 19967 | { |
| 19968 | if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE) |
| 19969 | { |
| 19970 | expansion_failed (expr: loc, NULL_RTX, |
| 19971 | reason: "DWARF address size mismatch" ); |
| 19972 | return 0; |
| 19973 | } |
| 19974 | if (ret) |
| 19975 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
| 19976 | else |
| 19977 | add_loc_descr_to_each (list: list_ret, |
| 19978 | ref: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
| 19979 | have_address = 1; |
| 19980 | } |
| 19981 | /* Show if we can't fill the request for an address. */ |
| 19982 | if (want_address && !have_address) |
| 19983 | { |
| 19984 | expansion_failed (expr: loc, NULL_RTX, |
| 19985 | reason: "Want address and only have value" ); |
| 19986 | return 0; |
| 19987 | } |
| 19988 | |
| 19989 | gcc_assert (!ret || !list_ret); |
| 19990 | |
| 19991 | /* If we've got an address and don't want one, dereference. */ |
| 19992 | if (!want_address && have_address) |
| 19993 | { |
| 19994 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc)); |
| 19995 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (loc)); |
| 19996 | scalar_int_mode int_mode; |
| 19997 | dw_die_ref type_die; |
| 19998 | dw_loc_descr_ref deref; |
| 19999 | |
| 20000 | /* Bail out if the size is variable or greater than DWARF2_ADDR_SIZE. */ |
| 20001 | if (size < 0 || size > DWARF2_ADDR_SIZE) |
| 20002 | { |
| 20003 | expansion_failed (expr: loc, NULL_RTX, reason: "DWARF address size mismatch" ); |
| 20004 | return 0; |
| 20005 | } |
| 20006 | |
| 20007 | /* If it is equal to DWARF2_ADDR_SIZE, extension does not matter. */ |
| 20008 | else if (size == DWARF2_ADDR_SIZE) |
| 20009 | deref = new_loc_descr (op: DW_OP_deref, oprnd1: size, oprnd2: 0); |
| 20010 | |
| 20011 | /* If it is lower than DWARF2_ADDR_SIZE, DW_OP_deref_size will zero- |
| 20012 | extend the value, which is really OK for unsigned types only. */ |
| 20013 | else if (!(context && context->strict_signedness) |
| 20014 | || TYPE_UNSIGNED (TREE_TYPE (loc)) |
| 20015 | || (dwarf_strict && dwarf_version < 5) |
| 20016 | || !is_a <scalar_int_mode> (m: mode, result: &int_mode) |
| 20017 | || !(type_die = base_type_for_mode (mode, unsignedp: false))) |
| 20018 | deref = new_loc_descr (op: DW_OP_deref_size, oprnd1: size, oprnd2: 0); |
| 20019 | |
| 20020 | /* Use DW_OP_deref_type for signed integral types if possible, but |
| 20021 | convert back to the generic type to avoid type mismatches later. */ |
| 20022 | else |
| 20023 | { |
| 20024 | deref = new_loc_descr (op: dwarf_OP (op: DW_OP_deref_type), oprnd1: size, oprnd2: 0); |
| 20025 | deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref; |
| 20026 | deref->dw_loc_oprnd2.v.val_die_ref.die = type_die; |
| 20027 | deref->dw_loc_oprnd2.v.val_die_ref.external = 0; |
| 20028 | add_loc_descr (list_head: &deref, |
| 20029 | descr: new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0)); |
| 20030 | } |
| 20031 | |
| 20032 | /* Deal with bit-fields whose size is not a multiple of a byte. */ |
| 20033 | if (TREE_CODE (loc) == COMPONENT_REF |
| 20034 | && DECL_BIT_FIELD (TREE_OPERAND (loc, 1))) |
| 20035 | { |
| 20036 | const unsigned HOST_WIDE_INT bitsize |
| 20037 | = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (loc, 1))); |
| 20038 | if (bitsize < (unsigned HOST_WIDE_INT)size * BITS_PER_UNIT) |
| 20039 | { |
| 20040 | if (TYPE_UNSIGNED (TREE_TYPE (loc))) |
| 20041 | { |
| 20042 | if (BYTES_BIG_ENDIAN) |
| 20043 | { |
| 20044 | const unsigned HOST_WIDE_INT shift |
| 20045 | = size * BITS_PER_UNIT - bitsize; |
| 20046 | add_loc_descr (list_head: &deref, descr: uint_loc_descriptor (i: shift)); |
| 20047 | add_loc_descr (list_head: &deref, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
| 20048 | } |
| 20049 | else |
| 20050 | { |
| 20051 | const unsigned HOST_WIDE_INT mask |
| 20052 | = (HOST_WIDE_INT_1U << bitsize) - 1; |
| 20053 | add_loc_descr (list_head: &deref, descr: uint_loc_descriptor (i: mask)); |
| 20054 | add_loc_descr (list_head: &deref, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
| 20055 | } |
| 20056 | } |
| 20057 | else |
| 20058 | { |
| 20059 | const unsigned HOST_WIDE_INT shiftr |
| 20060 | = DWARF2_ADDR_SIZE * BITS_PER_UNIT - bitsize; |
| 20061 | const unsigned HOST_WIDE_INT shiftl |
| 20062 | = BYTES_BIG_ENDIAN |
| 20063 | ? (DWARF2_ADDR_SIZE - size) * BITS_PER_UNIT |
| 20064 | : shiftr; |
| 20065 | if (shiftl > 0) |
| 20066 | { |
| 20067 | add_loc_descr (list_head: &deref, descr: uint_loc_descriptor (i: shiftl)); |
| 20068 | add_loc_descr (list_head: &deref, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
| 20069 | } |
| 20070 | add_loc_descr (list_head: &deref, descr: uint_loc_descriptor (i: shiftr)); |
| 20071 | add_loc_descr (list_head: &deref, descr: new_loc_descr (op: DW_OP_shra, oprnd1: 0, oprnd2: 0)); |
| 20072 | } |
| 20073 | } |
| 20074 | } |
| 20075 | |
| 20076 | if (ret) |
| 20077 | add_loc_descr (list_head: &ret, descr: deref); |
| 20078 | else |
| 20079 | add_loc_descr_to_each (list: list_ret, ref: deref); |
| 20080 | } |
| 20081 | |
| 20082 | if (ret) |
| 20083 | list_ret = new_loc_list (expr: ret, NULL, vbegin: 0, NULL, vend: 0, NULL); |
| 20084 | |
| 20085 | return list_ret; |
| 20086 | } |
| 20087 | |
| 20088 | /* Likewise, but strip useless DW_OP_nop operations in the resulting |
| 20089 | expressions. */ |
| 20090 | |
| 20091 | static dw_loc_list_ref |
| 20092 | loc_list_from_tree (tree loc, int want_address, |
| 20093 | struct loc_descr_context *context) |
| 20094 | { |
| 20095 | dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context); |
| 20096 | |
| 20097 | for (dw_loc_list_ref loc_cur = result; |
| 20098 | loc_cur != NULL; loc_cur = loc_cur->dw_loc_next) |
| 20099 | loc_descr_without_nops (loc&: loc_cur->expr); |
| 20100 | return result; |
| 20101 | } |
| 20102 | |
| 20103 | /* Same as above but return only single location expression. */ |
| 20104 | static dw_loc_descr_ref |
| 20105 | loc_descriptor_from_tree (tree loc, int want_address, |
| 20106 | struct loc_descr_context *context) |
| 20107 | { |
| 20108 | dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context); |
| 20109 | if (!ret) |
| 20110 | return NULL; |
| 20111 | if (ret->dw_loc_next) |
| 20112 | { |
| 20113 | expansion_failed (expr: loc, NULL_RTX, |
| 20114 | reason: "Location list where only loc descriptor needed" ); |
| 20115 | return NULL; |
| 20116 | } |
| 20117 | return ret->expr; |
| 20118 | } |
| 20119 | |
| 20120 | /* Given a pointer to what is assumed to be a FIELD_DECL node, return a |
| 20121 | pointer to the declared type for the relevant field variable, or return |
| 20122 | `integer_type_node' if the given node turns out to be an |
| 20123 | ERROR_MARK node. */ |
| 20124 | |
| 20125 | static inline tree |
| 20126 | field_type (const_tree decl) |
| 20127 | { |
| 20128 | tree type; |
| 20129 | |
| 20130 | if (TREE_CODE (decl) == ERROR_MARK) |
| 20131 | return integer_type_node; |
| 20132 | |
| 20133 | type = DECL_BIT_FIELD_TYPE (decl); |
| 20134 | if (type == NULL_TREE) |
| 20135 | type = TREE_TYPE (decl); |
| 20136 | |
| 20137 | return type; |
| 20138 | } |
| 20139 | |
| 20140 | /* Given a pointer to a tree node, return the alignment in bits for |
| 20141 | it, or else return BITS_PER_WORD if the node actually turns out to |
| 20142 | be an ERROR_MARK node. */ |
| 20143 | |
| 20144 | static inline unsigned |
| 20145 | simple_type_align_in_bits (const_tree type) |
| 20146 | { |
| 20147 | return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD; |
| 20148 | } |
| 20149 | |
| 20150 | static inline unsigned |
| 20151 | simple_decl_align_in_bits (const_tree decl) |
| 20152 | { |
| 20153 | return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD; |
| 20154 | } |
| 20155 | |
| 20156 | /* Return the result of rounding T up to ALIGN. */ |
| 20157 | |
| 20158 | static inline offset_int |
| 20159 | round_up_to_align (const offset_int &t, unsigned int align) |
| 20160 | { |
| 20161 | return wi::udiv_trunc (x: t + align - 1, y: align) * align; |
| 20162 | } |
| 20163 | |
| 20164 | /* Helper structure for RECORD_TYPE processing. */ |
| 20165 | struct vlr_context |
| 20166 | { |
| 20167 | /* Root RECORD_TYPE. It is needed to generate data member location |
| 20168 | descriptions in variable-length records (VLR), but also to cope with |
| 20169 | variants, which are composed of nested structures multiplexed with |
| 20170 | QUAL_UNION_TYPE nodes. Each time such a structure is passed to a |
| 20171 | function processing a FIELD_DECL, it is required to be non null. */ |
| 20172 | tree struct_type; |
| 20173 | |
| 20174 | /* When generating a variant part in a RECORD_TYPE (i.e. a nested |
| 20175 | QUAL_UNION_TYPE), this holds an expression that computes the offset for |
| 20176 | this variant part as part of the root record (in storage units). For |
| 20177 | regular records, it must be NULL_TREE. */ |
| 20178 | tree variant_part_offset; |
| 20179 | }; |
| 20180 | |
| 20181 | /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest |
| 20182 | addressed byte of the "containing object" for the given FIELD_DECL. If |
| 20183 | possible, return a native constant through CST_OFFSET (in which case NULL is |
| 20184 | returned); otherwise return a DWARF expression that computes the offset. |
| 20185 | |
| 20186 | Set *CST_OFFSET to 0 and return NULL if we are unable to determine what |
| 20187 | that offset is, either because the argument turns out to be a pointer to an |
| 20188 | ERROR_MARK node, or because the offset expression is too complex for us. |
| 20189 | |
| 20190 | CTX is required: see the comment for VLR_CONTEXT. */ |
| 20191 | |
| 20192 | static dw_loc_descr_ref |
| 20193 | field_byte_offset (const_tree decl, struct vlr_context *ctx, |
| 20194 | HOST_WIDE_INT *cst_offset) |
| 20195 | { |
| 20196 | tree tree_result; |
| 20197 | dw_loc_list_ref loc_result; |
| 20198 | |
| 20199 | *cst_offset = 0; |
| 20200 | |
| 20201 | if (TREE_CODE (decl) == ERROR_MARK) |
| 20202 | return NULL; |
| 20203 | else |
| 20204 | gcc_assert (TREE_CODE (decl) == FIELD_DECL); |
| 20205 | |
| 20206 | /* We cannot handle variable bit offsets at the moment, so abort if it's the |
| 20207 | case. */ |
| 20208 | if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST) |
| 20209 | return NULL; |
| 20210 | |
| 20211 | /* We used to handle only constant offsets in all cases. Now, we handle |
| 20212 | properly dynamic byte offsets only when PCC bitfield type doesn't |
| 20213 | matter. */ |
| 20214 | if (PCC_BITFIELD_TYPE_MATTERS |
| 20215 | && DECL_BIT_FIELD_TYPE (decl) |
| 20216 | && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST) |
| 20217 | { |
| 20218 | offset_int object_offset_in_bits; |
| 20219 | offset_int object_offset_in_bytes; |
| 20220 | offset_int bitpos_int; |
| 20221 | tree type; |
| 20222 | tree field_size_tree; |
| 20223 | offset_int deepest_bitpos; |
| 20224 | offset_int field_size_in_bits; |
| 20225 | unsigned int type_align_in_bits; |
| 20226 | unsigned int decl_align_in_bits; |
| 20227 | offset_int type_size_in_bits; |
| 20228 | |
| 20229 | bitpos_int = wi::to_offset (t: bit_position (decl)); |
| 20230 | type = field_type (decl); |
| 20231 | type_size_in_bits = offset_int_type_size_in_bits (type); |
| 20232 | type_align_in_bits = simple_type_align_in_bits (type); |
| 20233 | |
| 20234 | field_size_tree = DECL_SIZE (decl); |
| 20235 | |
| 20236 | /* The size could be unspecified if there was an error, or for |
| 20237 | a flexible array member. */ |
| 20238 | if (!field_size_tree) |
| 20239 | field_size_tree = bitsize_zero_node; |
| 20240 | |
| 20241 | /* If the size of the field is not constant, use the type size. */ |
| 20242 | if (TREE_CODE (field_size_tree) == INTEGER_CST) |
| 20243 | field_size_in_bits = wi::to_offset (t: field_size_tree); |
| 20244 | else |
| 20245 | field_size_in_bits = type_size_in_bits; |
| 20246 | |
| 20247 | decl_align_in_bits = simple_decl_align_in_bits (decl); |
| 20248 | |
| 20249 | /* The GCC front-end doesn't make any attempt to keep track of the |
| 20250 | starting bit offset (relative to the start of the containing |
| 20251 | structure type) of the hypothetical "containing object" for a |
| 20252 | bit-field. Thus, when computing the byte offset value for the |
| 20253 | start of the "containing object" of a bit-field, we must deduce |
| 20254 | this information on our own. This can be rather tricky to do in |
| 20255 | some cases. For example, handling the following structure type |
| 20256 | definition when compiling for an i386/i486 target (which only |
| 20257 | aligns long long's to 32-bit boundaries) can be very tricky: |
| 20258 | |
| 20259 | struct S { int field1; long long field2:31; }; |
| 20260 | |
| 20261 | Fortunately, there is a simple rule-of-thumb which can be used |
| 20262 | in such cases. When compiling for an i386/i486, GCC will |
| 20263 | allocate 8 bytes for the structure shown above. It decides to |
| 20264 | do this based upon one simple rule for bit-field allocation. |
| 20265 | GCC allocates each "containing object" for each bit-field at |
| 20266 | the first (i.e. lowest addressed) legitimate alignment boundary |
| 20267 | (based upon the required minimum alignment for the declared |
| 20268 | type of the field) which it can possibly use, subject to the |
| 20269 | condition that there is still enough available space remaining |
| 20270 | in the containing object (when allocated at the selected point) |
| 20271 | to fully accommodate all of the bits of the bit-field itself. |
| 20272 | |
| 20273 | This simple rule makes it obvious why GCC allocates 8 bytes for |
| 20274 | each object of the structure type shown above. When looking |
| 20275 | for a place to allocate the "containing object" for `field2', |
| 20276 | the compiler simply tries to allocate a 64-bit "containing |
| 20277 | object" at each successive 32-bit boundary (starting at zero) |
| 20278 | until it finds a place to allocate that 64- bit field such that |
| 20279 | at least 31 contiguous (and previously unallocated) bits remain |
| 20280 | within that selected 64 bit field. (As it turns out, for the |
| 20281 | example above, the compiler finds it is OK to allocate the |
| 20282 | "containing object" 64-bit field at bit-offset zero within the |
| 20283 | structure type.) |
| 20284 | |
| 20285 | Here we attempt to work backwards from the limited set of facts |
| 20286 | we're given, and we try to deduce from those facts, where GCC |
| 20287 | must have believed that the containing object started (within |
| 20288 | the structure type). The value we deduce is then used (by the |
| 20289 | callers of this routine) to generate DW_AT_location and |
| 20290 | DW_AT_bit_offset attributes for fields (both bit-fields and, in |
| 20291 | the case of DW_AT_location, regular fields as well). */ |
| 20292 | |
| 20293 | /* Figure out the bit-distance from the start of the structure to |
| 20294 | the "deepest" bit of the bit-field. */ |
| 20295 | deepest_bitpos = bitpos_int + field_size_in_bits; |
| 20296 | |
| 20297 | /* This is the tricky part. Use some fancy footwork to deduce |
| 20298 | where the lowest addressed bit of the containing object must |
| 20299 | be. */ |
| 20300 | object_offset_in_bits = deepest_bitpos - type_size_in_bits; |
| 20301 | |
| 20302 | /* Round up to type_align by default. This works best for |
| 20303 | bitfields. */ |
| 20304 | object_offset_in_bits |
| 20305 | = round_up_to_align (t: object_offset_in_bits, align: type_align_in_bits); |
| 20306 | |
| 20307 | if (wi::gtu_p (x: object_offset_in_bits, y: bitpos_int)) |
| 20308 | { |
| 20309 | object_offset_in_bits = deepest_bitpos - type_size_in_bits; |
| 20310 | |
| 20311 | /* Round up to decl_align instead. */ |
| 20312 | object_offset_in_bits |
| 20313 | = round_up_to_align (t: object_offset_in_bits, align: decl_align_in_bits); |
| 20314 | } |
| 20315 | |
| 20316 | object_offset_in_bytes |
| 20317 | = wi::lrshift (x: object_offset_in_bits, LOG2_BITS_PER_UNIT); |
| 20318 | if (ctx->variant_part_offset == NULL_TREE) |
| 20319 | { |
| 20320 | *cst_offset = object_offset_in_bytes.to_shwi (); |
| 20321 | return NULL; |
| 20322 | } |
| 20323 | tree_result = wide_int_to_tree (sizetype, cst: object_offset_in_bytes); |
| 20324 | } |
| 20325 | else |
| 20326 | tree_result = byte_position (decl); |
| 20327 | |
| 20328 | if (ctx->variant_part_offset != NULL_TREE) |
| 20329 | tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result), |
| 20330 | ctx->variant_part_offset, tree_result); |
| 20331 | |
| 20332 | /* If the byte offset is a constant, it's simpler to handle a native |
| 20333 | constant rather than a DWARF expression. */ |
| 20334 | if (TREE_CODE (tree_result) == INTEGER_CST) |
| 20335 | { |
| 20336 | *cst_offset = wi::to_offset (t: tree_result).to_shwi (); |
| 20337 | return NULL; |
| 20338 | } |
| 20339 | |
| 20340 | struct loc_descr_context loc_ctx = { |
| 20341 | .context_type: ctx->struct_type, /* context_type */ |
| 20342 | NULL_TREE, /* base_decl */ |
| 20343 | NULL, /* dpi */ |
| 20344 | .placeholder_arg: false, /* placeholder_arg */ |
| 20345 | .placeholder_seen: false, /* placeholder_seen */ |
| 20346 | .strict_signedness: false /* strict_signedness */ |
| 20347 | }; |
| 20348 | loc_result = loc_list_from_tree (loc: tree_result, want_address: 0, context: &loc_ctx); |
| 20349 | |
| 20350 | /* We want a DWARF expression: abort if we only have a location list with |
| 20351 | multiple elements. */ |
| 20352 | if (!loc_result || !single_element_loc_list_p (list: loc_result)) |
| 20353 | return NULL; |
| 20354 | else |
| 20355 | return loc_result->expr; |
| 20356 | } |
| 20357 | |
| 20358 | /* The following routines define various Dwarf attributes and any data |
| 20359 | associated with them. */ |
| 20360 | |
| 20361 | /* Add a location description attribute value to a DIE. |
| 20362 | |
| 20363 | This emits location attributes suitable for whole variables and |
| 20364 | whole parameters. Note that the location attributes for struct fields are |
| 20365 | generated by the routine `data_member_location_attribute' below. */ |
| 20366 | |
| 20367 | static inline void |
| 20368 | add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind, |
| 20369 | dw_loc_list_ref descr) |
| 20370 | { |
| 20371 | bool check_no_locviews = true; |
| 20372 | if (descr == 0) |
| 20373 | return; |
| 20374 | if (single_element_loc_list_p (list: descr)) |
| 20375 | add_AT_loc (die, attr_kind, loc: descr->expr); |
| 20376 | else |
| 20377 | { |
| 20378 | add_AT_loc_list (die, attr_kind, loc_list: descr); |
| 20379 | gcc_assert (descr->ll_symbol); |
| 20380 | if (attr_kind == DW_AT_location && descr->vl_symbol |
| 20381 | && dwarf2out_locviews_in_attribute ()) |
| 20382 | { |
| 20383 | add_AT_view_list (die, attr_kind: DW_AT_GNU_locviews); |
| 20384 | check_no_locviews = false; |
| 20385 | } |
| 20386 | } |
| 20387 | |
| 20388 | if (check_no_locviews) |
| 20389 | gcc_assert (!get_AT (die, DW_AT_GNU_locviews)); |
| 20390 | } |
| 20391 | |
| 20392 | /* Add DW_AT_accessibility attribute to DIE if needed. */ |
| 20393 | |
| 20394 | static void |
| 20395 | add_accessibility_attribute (dw_die_ref die, tree decl) |
| 20396 | { |
| 20397 | /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type |
| 20398 | children, otherwise the default is DW_ACCESS_public. In DWARF2 |
| 20399 | the default has always been DW_ACCESS_public. */ |
| 20400 | if (TREE_PROTECTED (decl)) |
| 20401 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_protected); |
| 20402 | else if (TREE_PRIVATE (decl)) |
| 20403 | { |
| 20404 | if (dwarf_version == 2 |
| 20405 | || die->die_parent == NULL |
| 20406 | || die->die_parent->die_tag != DW_TAG_class_type) |
| 20407 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_private); |
| 20408 | } |
| 20409 | else if (dwarf_version > 2 |
| 20410 | && die->die_parent |
| 20411 | && die->die_parent->die_tag == DW_TAG_class_type) |
| 20412 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_public); |
| 20413 | } |
| 20414 | |
| 20415 | /* Attach the specialized form of location attribute used for data members of |
| 20416 | struct and union types. In the special case of a FIELD_DECL node which |
| 20417 | represents a bit-field, the "offset" part of this special location |
| 20418 | descriptor must indicate the distance in bytes from the lowest-addressed |
| 20419 | byte of the containing struct or union type to the lowest-addressed byte of |
| 20420 | the "containing object" for the bit-field. (See the `field_byte_offset' |
| 20421 | function above). |
| 20422 | |
| 20423 | For any given bit-field, the "containing object" is a hypothetical object |
| 20424 | (of some integral or enum type) within which the given bit-field lives. The |
| 20425 | type of this hypothetical "containing object" is always the same as the |
| 20426 | declared type of the individual bit-field itself (for GCC anyway... the |
| 20427 | DWARF spec doesn't actually mandate this). Note that it is the size (in |
| 20428 | bytes) of the hypothetical "containing object" which will be given in the |
| 20429 | DW_AT_byte_size attribute for this bit-field. (See the |
| 20430 | `byte_size_attribute' function below.) It is also used when calculating the |
| 20431 | value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute' |
| 20432 | function below.) |
| 20433 | |
| 20434 | CTX is required: see the comment for VLR_CONTEXT. */ |
| 20435 | |
| 20436 | static void |
| 20437 | add_data_member_location_attribute (dw_die_ref die, |
| 20438 | tree decl, |
| 20439 | struct vlr_context *ctx) |
| 20440 | { |
| 20441 | HOST_WIDE_INT offset; |
| 20442 | dw_loc_descr_ref loc_descr = 0; |
| 20443 | |
| 20444 | if (TREE_CODE (decl) == TREE_BINFO) |
| 20445 | { |
| 20446 | /* We're working on the TAG_inheritance for a base class. */ |
| 20447 | if (BINFO_VIRTUAL_P (decl) && is_cxx ()) |
| 20448 | { |
| 20449 | /* For C++ virtual bases we can't just use BINFO_OFFSET, as they |
| 20450 | aren't at a fixed offset from all (sub)objects of the same |
| 20451 | type. We need to extract the appropriate offset from our |
| 20452 | vtable. The following dwarf expression means |
| 20453 | |
| 20454 | BaseAddr = ObAddr + *((*ObAddr) - Offset) |
| 20455 | |
| 20456 | This is specific to the V3 ABI, of course. */ |
| 20457 | |
| 20458 | dw_loc_descr_ref tmp; |
| 20459 | |
| 20460 | /* Make a copy of the object address. */ |
| 20461 | tmp = new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0); |
| 20462 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
| 20463 | |
| 20464 | /* Extract the vtable address. */ |
| 20465 | tmp = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
| 20466 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
| 20467 | |
| 20468 | /* Calculate the address of the offset. */ |
| 20469 | offset = tree_to_shwi (BINFO_VPTR_FIELD (decl)); |
| 20470 | gcc_assert (offset < 0); |
| 20471 | |
| 20472 | tmp = int_loc_descriptor (poly_i: -offset); |
| 20473 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
| 20474 | tmp = new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0); |
| 20475 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
| 20476 | |
| 20477 | /* Extract the offset. */ |
| 20478 | tmp = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
| 20479 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
| 20480 | |
| 20481 | /* Add it to the object address. */ |
| 20482 | tmp = new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0); |
| 20483 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
| 20484 | } |
| 20485 | else |
| 20486 | offset = tree_to_shwi (BINFO_OFFSET (decl)); |
| 20487 | } |
| 20488 | else |
| 20489 | { |
| 20490 | loc_descr = field_byte_offset (decl, ctx, cst_offset: &offset); |
| 20491 | |
| 20492 | if (!loc_descr) |
| 20493 | ; |
| 20494 | |
| 20495 | /* If loc_descr is available, then we know the offset is dynamic. */ |
| 20496 | else if (gnat_encodings == DWARF_GNAT_ENCODINGS_ALL) |
| 20497 | { |
| 20498 | loc_descr = NULL; |
| 20499 | offset = 0; |
| 20500 | } |
| 20501 | |
| 20502 | /* Data member location evaluation starts with the base address on the |
| 20503 | stack. Compute the field offset and add it to this base address. */ |
| 20504 | else |
| 20505 | add_loc_descr (list_head: &loc_descr, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
| 20506 | } |
| 20507 | |
| 20508 | if (!loc_descr) |
| 20509 | { |
| 20510 | /* While DW_AT_data_bit_offset has been added already in DWARF4, |
| 20511 | e.g. GDB only added support to it in November 2016. For DWARF5 |
| 20512 | we need newer debug info consumers anyway. We might change this |
| 20513 | to dwarf_version >= 4 once most consumers caught up. */ |
| 20514 | if (dwarf_version >= 5 |
| 20515 | && TREE_CODE (decl) == FIELD_DECL |
| 20516 | && DECL_BIT_FIELD_TYPE (decl) |
| 20517 | && (ctx->variant_part_offset == NULL_TREE |
| 20518 | || TREE_CODE (ctx->variant_part_offset) == INTEGER_CST)) |
| 20519 | { |
| 20520 | tree off = bit_position (decl); |
| 20521 | if (ctx->variant_part_offset) |
| 20522 | off = bit_from_pos (ctx->variant_part_offset, off); |
| 20523 | if (tree_fits_uhwi_p (off) && get_AT (die, attr_kind: DW_AT_bit_size)) |
| 20524 | { |
| 20525 | remove_AT (die, attr_kind: DW_AT_byte_size); |
| 20526 | remove_AT (die, attr_kind: DW_AT_bit_offset); |
| 20527 | add_AT_unsigned (die, attr_kind: DW_AT_data_bit_offset, unsigned_val: tree_to_uhwi (off)); |
| 20528 | return; |
| 20529 | } |
| 20530 | } |
| 20531 | if (dwarf_version > 2) |
| 20532 | { |
| 20533 | /* Don't need to output a location expression, just the constant. */ |
| 20534 | if (offset < 0) |
| 20535 | add_AT_int (die, attr_kind: DW_AT_data_member_location, int_val: offset); |
| 20536 | else |
| 20537 | add_AT_unsigned (die, attr_kind: DW_AT_data_member_location, unsigned_val: offset); |
| 20538 | return; |
| 20539 | } |
| 20540 | else |
| 20541 | { |
| 20542 | enum dwarf_location_atom op; |
| 20543 | |
| 20544 | /* The DWARF2 standard says that we should assume that the structure |
| 20545 | address is already on the stack, so we can specify a structure |
| 20546 | field address by using DW_OP_plus_uconst. */ |
| 20547 | op = DW_OP_plus_uconst; |
| 20548 | loc_descr = new_loc_descr (op, oprnd1: offset, oprnd2: 0); |
| 20549 | } |
| 20550 | } |
| 20551 | |
| 20552 | add_AT_loc (die, attr_kind: DW_AT_data_member_location, loc: loc_descr); |
| 20553 | } |
| 20554 | |
| 20555 | /* Writes integer values to dw_vec_const array. */ |
| 20556 | |
| 20557 | static void |
| 20558 | insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest) |
| 20559 | { |
| 20560 | while (size != 0) |
| 20561 | { |
| 20562 | *dest++ = val & 0xff; |
| 20563 | val >>= 8; |
| 20564 | --size; |
| 20565 | } |
| 20566 | } |
| 20567 | |
| 20568 | /* Reads integers from dw_vec_const array. Inverse of insert_int. */ |
| 20569 | |
| 20570 | static HOST_WIDE_INT |
| 20571 | (const unsigned char *src, unsigned int size) |
| 20572 | { |
| 20573 | HOST_WIDE_INT val = 0; |
| 20574 | |
| 20575 | src += size; |
| 20576 | while (size != 0) |
| 20577 | { |
| 20578 | val <<= 8; |
| 20579 | val |= *--src & 0xff; |
| 20580 | --size; |
| 20581 | } |
| 20582 | return val; |
| 20583 | } |
| 20584 | |
| 20585 | /* Writes wide_int values to dw_vec_const array. */ |
| 20586 | |
| 20587 | static void |
| 20588 | insert_wide_int (const wide_int_ref &val, unsigned char *dest, int elt_size) |
| 20589 | { |
| 20590 | int i; |
| 20591 | |
| 20592 | if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT) |
| 20593 | { |
| 20594 | insert_int (val: (HOST_WIDE_INT) val.elt (i: 0), size: elt_size, dest); |
| 20595 | return; |
| 20596 | } |
| 20597 | |
| 20598 | /* We'd have to extend this code to support odd sizes. */ |
| 20599 | gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0); |
| 20600 | |
| 20601 | int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT); |
| 20602 | |
| 20603 | if (WORDS_BIG_ENDIAN) |
| 20604 | for (i = n - 1; i >= 0; i--) |
| 20605 | { |
| 20606 | insert_int (val: (HOST_WIDE_INT) val.elt (i), size: sizeof (HOST_WIDE_INT), dest); |
| 20607 | dest += sizeof (HOST_WIDE_INT); |
| 20608 | } |
| 20609 | else |
| 20610 | for (i = 0; i < n; i++) |
| 20611 | { |
| 20612 | insert_int (val: (HOST_WIDE_INT) val.elt (i), size: sizeof (HOST_WIDE_INT), dest); |
| 20613 | dest += sizeof (HOST_WIDE_INT); |
| 20614 | } |
| 20615 | } |
| 20616 | |
| 20617 | /* Writes floating point values to dw_vec_const array. */ |
| 20618 | |
| 20619 | static unsigned |
| 20620 | insert_float (const_rtx rtl, unsigned char *array) |
| 20621 | { |
| 20622 | long val[4]; |
| 20623 | int i; |
| 20624 | scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl)); |
| 20625 | |
| 20626 | real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode); |
| 20627 | |
| 20628 | /* real_to_target puts 32-bit pieces in each long. Pack them. */ |
| 20629 | if (GET_MODE_SIZE (mode) < 4) |
| 20630 | { |
| 20631 | gcc_assert (GET_MODE_SIZE (mode) == 2); |
| 20632 | insert_int (val: val[0], size: 2, dest: array); |
| 20633 | return 2; |
| 20634 | } |
| 20635 | |
| 20636 | for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++) |
| 20637 | { |
| 20638 | insert_int (val: val[i], size: 4, dest: array); |
| 20639 | array += 4; |
| 20640 | } |
| 20641 | return 4; |
| 20642 | } |
| 20643 | |
| 20644 | /* Attach a DW_AT_const_value attribute for a variable or a parameter which |
| 20645 | does not have a "location" either in memory or in a register. These |
| 20646 | things can arise in GNU C when a constant is passed as an actual parameter |
| 20647 | to an inlined function. They can also arise in C++ where declared |
| 20648 | constants do not necessarily get memory "homes". */ |
| 20649 | |
| 20650 | static bool |
| 20651 | add_const_value_attribute (dw_die_ref die, machine_mode mode, rtx rtl) |
| 20652 | { |
| 20653 | scalar_mode int_mode; |
| 20654 | |
| 20655 | switch (GET_CODE (rtl)) |
| 20656 | { |
| 20657 | case CONST_INT: |
| 20658 | { |
| 20659 | HOST_WIDE_INT val = INTVAL (rtl); |
| 20660 | |
| 20661 | if (val < 0) |
| 20662 | add_AT_int (die, attr_kind: DW_AT_const_value, int_val: val); |
| 20663 | else |
| 20664 | add_AT_unsigned (die, attr_kind: DW_AT_const_value, unsigned_val: (unsigned HOST_WIDE_INT) val); |
| 20665 | } |
| 20666 | return true; |
| 20667 | |
| 20668 | case CONST_WIDE_INT: |
| 20669 | if (is_int_mode (mode, int_mode: &int_mode) |
| 20670 | && (GET_MODE_PRECISION (mode: int_mode) |
| 20671 | & (HOST_BITS_PER_WIDE_INT - 1)) == 0) |
| 20672 | { |
| 20673 | add_AT_wide (die, attr_kind: DW_AT_const_value, w: rtx_mode_t (rtl, int_mode)); |
| 20674 | return true; |
| 20675 | } |
| 20676 | return false; |
| 20677 | |
| 20678 | case CONST_DOUBLE: |
| 20679 | /* Note that a CONST_DOUBLE rtx could represent either an integer or a |
| 20680 | floating-point constant. A CONST_DOUBLE is used whenever the |
| 20681 | constant requires more than one word in order to be adequately |
| 20682 | represented. */ |
| 20683 | if (TARGET_SUPPORTS_WIDE_INT == 0 |
| 20684 | && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl))) |
| 20685 | add_AT_double (die, attr_kind: DW_AT_const_value, |
| 20686 | CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl)); |
| 20687 | else |
| 20688 | { |
| 20689 | scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl)); |
| 20690 | unsigned int length = GET_MODE_SIZE (mode); |
| 20691 | unsigned char *array = ggc_vec_alloc<unsigned char> (c: length); |
| 20692 | unsigned int elt_size = insert_float (rtl, array); |
| 20693 | |
| 20694 | add_AT_vec (die, attr_kind: DW_AT_const_value, length: length / elt_size, elt_size, |
| 20695 | array); |
| 20696 | } |
| 20697 | return true; |
| 20698 | |
| 20699 | case CONST_VECTOR: |
| 20700 | { |
| 20701 | unsigned int length; |
| 20702 | if (!CONST_VECTOR_NUNITS (rtl).is_constant (const_value: &length)) |
| 20703 | return false; |
| 20704 | |
| 20705 | machine_mode mode = GET_MODE (rtl); |
| 20706 | /* The combination of a length and byte elt_size doesn't extend |
| 20707 | naturally to boolean vectors, where several elements are packed |
| 20708 | into the same byte. */ |
| 20709 | if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL) |
| 20710 | return false; |
| 20711 | |
| 20712 | unsigned int elt_size = GET_MODE_UNIT_SIZE (mode); |
| 20713 | unsigned char *array |
| 20714 | = ggc_vec_alloc<unsigned char> (c: length * elt_size); |
| 20715 | unsigned int i; |
| 20716 | unsigned char *p; |
| 20717 | machine_mode imode = GET_MODE_INNER (mode); |
| 20718 | |
| 20719 | switch (GET_MODE_CLASS (mode)) |
| 20720 | { |
| 20721 | case MODE_VECTOR_INT: |
| 20722 | for (i = 0, p = array; i < length; i++, p += elt_size) |
| 20723 | { |
| 20724 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
| 20725 | insert_wide_int (val: rtx_mode_t (elt, imode), dest: p, elt_size); |
| 20726 | } |
| 20727 | break; |
| 20728 | |
| 20729 | case MODE_VECTOR_FLOAT: |
| 20730 | for (i = 0, p = array; i < length; i++, p += elt_size) |
| 20731 | { |
| 20732 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
| 20733 | insert_float (rtl: elt, array: p); |
| 20734 | } |
| 20735 | break; |
| 20736 | |
| 20737 | default: |
| 20738 | gcc_unreachable (); |
| 20739 | } |
| 20740 | |
| 20741 | add_AT_vec (die, attr_kind: DW_AT_const_value, length, elt_size, array); |
| 20742 | } |
| 20743 | return true; |
| 20744 | |
| 20745 | case CONST_STRING: |
| 20746 | if (dwarf_version >= 4 || !dwarf_strict) |
| 20747 | { |
| 20748 | dw_loc_descr_ref loc_result; |
| 20749 | resolve_one_addr (&rtl); |
| 20750 | rtl_addr: |
| 20751 | loc_result = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
| 20752 | add_loc_descr (list_head: &loc_result, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
| 20753 | add_AT_loc (die, attr_kind: DW_AT_location, loc: loc_result); |
| 20754 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
| 20755 | return true; |
| 20756 | } |
| 20757 | return false; |
| 20758 | |
| 20759 | case CONST: |
| 20760 | if (CONSTANT_P (XEXP (rtl, 0))) |
| 20761 | return add_const_value_attribute (die, mode, XEXP (rtl, 0)); |
| 20762 | /* FALLTHROUGH */ |
| 20763 | case SYMBOL_REF: |
| 20764 | if (!const_ok_for_output (rtl)) |
| 20765 | return false; |
| 20766 | /* FALLTHROUGH */ |
| 20767 | case LABEL_REF: |
| 20768 | if (dwarf_version >= 4 || !dwarf_strict) |
| 20769 | goto rtl_addr; |
| 20770 | return false; |
| 20771 | |
| 20772 | case PLUS: |
| 20773 | /* In cases where an inlined instance of an inline function is passed |
| 20774 | the address of an `auto' variable (which is local to the caller) we |
| 20775 | can get a situation where the DECL_RTL of the artificial local |
| 20776 | variable (for the inlining) which acts as a stand-in for the |
| 20777 | corresponding formal parameter (of the inline function) will look |
| 20778 | like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not |
| 20779 | exactly a compile-time constant expression, but it isn't the address |
| 20780 | of the (artificial) local variable either. Rather, it represents the |
| 20781 | *value* which the artificial local variable always has during its |
| 20782 | lifetime. We currently have no way to represent such quasi-constant |
| 20783 | values in Dwarf, so for now we just punt and generate nothing. */ |
| 20784 | return false; |
| 20785 | |
| 20786 | case HIGH: |
| 20787 | case CONST_FIXED: |
| 20788 | case MINUS: |
| 20789 | case SIGN_EXTEND: |
| 20790 | case ZERO_EXTEND: |
| 20791 | case CONST_POLY_INT: |
| 20792 | return false; |
| 20793 | |
| 20794 | case MEM: |
| 20795 | if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING |
| 20796 | && MEM_READONLY_P (rtl) |
| 20797 | && GET_MODE (rtl) == BLKmode) |
| 20798 | { |
| 20799 | add_AT_string (die, attr_kind: DW_AT_const_value, XSTR (XEXP (rtl, 0), 0)); |
| 20800 | return true; |
| 20801 | } |
| 20802 | return false; |
| 20803 | |
| 20804 | default: |
| 20805 | /* No other kinds of rtx should be possible here. */ |
| 20806 | gcc_unreachable (); |
| 20807 | } |
| 20808 | } |
| 20809 | |
| 20810 | /* Determine whether the evaluation of EXPR references any variables |
| 20811 | or functions which aren't otherwise used (and therefore may not be |
| 20812 | output). */ |
| 20813 | static tree |
| 20814 | reference_to_unused (tree * tp, int * walk_subtrees, |
| 20815 | void * data ATTRIBUTE_UNUSED) |
| 20816 | { |
| 20817 | if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp)) |
| 20818 | *walk_subtrees = 0; |
| 20819 | |
| 20820 | if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp) |
| 20821 | && ! TREE_ASM_WRITTEN (*tp)) |
| 20822 | return *tp; |
| 20823 | /* ??? The C++ FE emits debug information for using decls, so |
| 20824 | putting gcc_unreachable here falls over. See PR31899. For now |
| 20825 | be conservative. */ |
| 20826 | else if (!symtab->global_info_ready && VAR_P (*tp)) |
| 20827 | return *tp; |
| 20828 | else if (VAR_P (*tp)) |
| 20829 | { |
| 20830 | varpool_node *node = varpool_node::get (decl: *tp); |
| 20831 | if (!node || !node->definition) |
| 20832 | return *tp; |
| 20833 | } |
| 20834 | else if (TREE_CODE (*tp) == FUNCTION_DECL |
| 20835 | && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp))) |
| 20836 | { |
| 20837 | /* The call graph machinery must have finished analyzing, |
| 20838 | optimizing and gimplifying the CU by now. |
| 20839 | So if *TP has no call graph node associated |
| 20840 | to it, it means *TP will not be emitted. */ |
| 20841 | if (!symtab->global_info_ready || !cgraph_node::get (decl: *tp)) |
| 20842 | return *tp; |
| 20843 | } |
| 20844 | else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp)) |
| 20845 | return *tp; |
| 20846 | |
| 20847 | return NULL_TREE; |
| 20848 | } |
| 20849 | |
| 20850 | /* Generate an RTL constant from a decl initializer INIT with decl type TYPE, |
| 20851 | for use in a later add_const_value_attribute call. */ |
| 20852 | |
| 20853 | static rtx |
| 20854 | rtl_for_decl_init (tree init, tree type) |
| 20855 | { |
| 20856 | rtx rtl = NULL_RTX; |
| 20857 | |
| 20858 | STRIP_NOPS (init); |
| 20859 | |
| 20860 | /* If a variable is initialized with a string constant without embedded |
| 20861 | zeros, build CONST_STRING. */ |
| 20862 | if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE) |
| 20863 | { |
| 20864 | tree enttype = TREE_TYPE (type); |
| 20865 | tree domain = TYPE_DOMAIN (type); |
| 20866 | scalar_int_mode mode; |
| 20867 | |
| 20868 | if (is_int_mode (TYPE_MODE (enttype), int_mode: &mode) |
| 20869 | && GET_MODE_SIZE (mode) == 1 |
| 20870 | && domain |
| 20871 | && TYPE_MAX_VALUE (domain) |
| 20872 | && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST |
| 20873 | && integer_zerop (TYPE_MIN_VALUE (domain)) |
| 20874 | && compare_tree_int (TYPE_MAX_VALUE (domain), |
| 20875 | TREE_STRING_LENGTH (init) - 1) == 0 |
| 20876 | && ((size_t) TREE_STRING_LENGTH (init) |
| 20877 | == strlen (TREE_STRING_POINTER (init)) + 1)) |
| 20878 | { |
| 20879 | rtl = gen_rtx_CONST_STRING (VOIDmode, |
| 20880 | ggc_strdup (TREE_STRING_POINTER (init))); |
| 20881 | rtl = gen_rtx_MEM (BLKmode, rtl); |
| 20882 | MEM_READONLY_P (rtl) = 1; |
| 20883 | } |
| 20884 | } |
| 20885 | /* Other aggregates, and complex values, could be represented using |
| 20886 | CONCAT: FIXME! |
| 20887 | If this changes, please adjust tree_add_const_value_attribute |
| 20888 | so that for early_dwarf it will for such initializers mangle referenced |
| 20889 | decls. */ |
| 20890 | else if (AGGREGATE_TYPE_P (type) |
| 20891 | || (TREE_CODE (init) == VIEW_CONVERT_EXPR |
| 20892 | && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0)))) |
| 20893 | || TREE_CODE (type) == COMPLEX_TYPE) |
| 20894 | ; |
| 20895 | /* Vectors only work if their mode is supported by the target. |
| 20896 | FIXME: generic vectors ought to work too. */ |
| 20897 | else if (TREE_CODE (type) == VECTOR_TYPE |
| 20898 | && !VECTOR_MODE_P (TYPE_MODE (type))) |
| 20899 | ; |
| 20900 | /* If the initializer is something that we know will expand into an |
| 20901 | immediate RTL constant, expand it now. We must be careful not to |
| 20902 | reference variables which won't be output. */ |
| 20903 | else if (initializer_constant_valid_p (init, type) |
| 20904 | && ! walk_tree (&init, reference_to_unused, NULL, NULL)) |
| 20905 | { |
| 20906 | /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if |
| 20907 | possible. */ |
| 20908 | if (TREE_CODE (type) == VECTOR_TYPE) |
| 20909 | switch (TREE_CODE (init)) |
| 20910 | { |
| 20911 | case VECTOR_CST: |
| 20912 | break; |
| 20913 | case CONSTRUCTOR: |
| 20914 | if (TREE_CONSTANT (init)) |
| 20915 | { |
| 20916 | vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init); |
| 20917 | bool constant_p = true; |
| 20918 | tree value; |
| 20919 | unsigned HOST_WIDE_INT ix; |
| 20920 | |
| 20921 | /* Even when ctor is constant, it might contain non-*_CST |
| 20922 | elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't |
| 20923 | belong into VECTOR_CST nodes. */ |
| 20924 | FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) |
| 20925 | if (!CONSTANT_CLASS_P (value)) |
| 20926 | { |
| 20927 | constant_p = false; |
| 20928 | break; |
| 20929 | } |
| 20930 | |
| 20931 | if (constant_p) |
| 20932 | { |
| 20933 | init = build_vector_from_ctor (type, elts); |
| 20934 | break; |
| 20935 | } |
| 20936 | } |
| 20937 | /* FALLTHRU */ |
| 20938 | |
| 20939 | default: |
| 20940 | return NULL; |
| 20941 | } |
| 20942 | |
| 20943 | /* Large _BitInt BLKmode INTEGER_CSTs would yield a MEM. */ |
| 20944 | if (TREE_CODE (init) == INTEGER_CST |
| 20945 | && TREE_CODE (TREE_TYPE (init)) == BITINT_TYPE |
| 20946 | && TYPE_MODE (TREE_TYPE (init)) == BLKmode) |
| 20947 | { |
| 20948 | if (tree_fits_shwi_p (init)) |
| 20949 | return GEN_INT (tree_to_shwi (init)); |
| 20950 | else |
| 20951 | return NULL; |
| 20952 | } |
| 20953 | |
| 20954 | rtl = expand_expr (exp: init, NULL_RTX, VOIDmode, modifier: EXPAND_INITIALIZER); |
| 20955 | |
| 20956 | /* If expand_expr returns a MEM, it wasn't immediate. */ |
| 20957 | gcc_assert (!rtl || !MEM_P (rtl)); |
| 20958 | } |
| 20959 | |
| 20960 | return rtl; |
| 20961 | } |
| 20962 | |
| 20963 | /* Generate RTL for the variable DECL to represent its location. */ |
| 20964 | |
| 20965 | static rtx |
| 20966 | rtl_for_decl_location (tree decl) |
| 20967 | { |
| 20968 | rtx rtl; |
| 20969 | |
| 20970 | /* Here we have to decide where we are going to say the parameter "lives" |
| 20971 | (as far as the debugger is concerned). We only have a couple of |
| 20972 | choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL. |
| 20973 | |
| 20974 | DECL_RTL normally indicates where the parameter lives during most of the |
| 20975 | activation of the function. If optimization is enabled however, this |
| 20976 | could be either NULL or else a pseudo-reg. Both of those cases indicate |
| 20977 | that the parameter doesn't really live anywhere (as far as the code |
| 20978 | generation parts of GCC are concerned) during most of the function's |
| 20979 | activation. That will happen (for example) if the parameter is never |
| 20980 | referenced within the function. |
| 20981 | |
| 20982 | We could just generate a location descriptor here for all non-NULL |
| 20983 | non-pseudo values of DECL_RTL and ignore all of the rest, but we can be |
| 20984 | a little nicer than that if we also consider DECL_INCOMING_RTL in cases |
| 20985 | where DECL_RTL is NULL or is a pseudo-reg. |
| 20986 | |
| 20987 | Note however that we can only get away with using DECL_INCOMING_RTL as |
| 20988 | a backup substitute for DECL_RTL in certain limited cases. In cases |
| 20989 | where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl), |
| 20990 | we can be sure that the parameter was passed using the same type as it is |
| 20991 | declared to have within the function, and that its DECL_INCOMING_RTL |
| 20992 | points us to a place where a value of that type is passed. |
| 20993 | |
| 20994 | In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different, |
| 20995 | we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL |
| 20996 | because in these cases DECL_INCOMING_RTL points us to a value of some |
| 20997 | type which is *different* from the type of the parameter itself. Thus, |
| 20998 | if we tried to use DECL_INCOMING_RTL to generate a location attribute in |
| 20999 | such cases, the debugger would end up (for example) trying to fetch a |
| 21000 | `float' from a place which actually contains the first part of a |
| 21001 | `double'. That would lead to really incorrect and confusing |
| 21002 | output at debug-time. |
| 21003 | |
| 21004 | So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL |
| 21005 | in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There |
| 21006 | are a couple of exceptions however. On little-endian machines we can |
| 21007 | get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is |
| 21008 | not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is |
| 21009 | an integral type that is smaller than TREE_TYPE (decl). These cases arise |
| 21010 | when (on a little-endian machine) a non-prototyped function has a |
| 21011 | parameter declared to be of type `short' or `char'. In such cases, |
| 21012 | TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will |
| 21013 | be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the |
| 21014 | passed `int' value. If the debugger then uses that address to fetch |
| 21015 | a `short' or a `char' (on a little-endian machine) the result will be |
| 21016 | the correct data, so we allow for such exceptional cases below. |
| 21017 | |
| 21018 | Note that our goal here is to describe the place where the given formal |
| 21019 | parameter lives during most of the function's activation (i.e. between the |
| 21020 | end of the prologue and the start of the epilogue). We'll do that as best |
| 21021 | as we can. Note however that if the given formal parameter is modified |
| 21022 | sometime during the execution of the function, then a stack backtrace (at |
| 21023 | debug-time) will show the function as having been called with the *new* |
| 21024 | value rather than the value which was originally passed in. This happens |
| 21025 | rarely enough that it is not a major problem, but it *is* a problem, and |
| 21026 | I'd like to fix it. |
| 21027 | |
| 21028 | A future version of dwarf2out.cc may generate two additional attributes for |
| 21029 | any given DW_TAG_formal_parameter DIE which will describe the "passed |
| 21030 | type" and the "passed location" for the given formal parameter in addition |
| 21031 | to the attributes we now generate to indicate the "declared type" and the |
| 21032 | "active location" for each parameter. This additional set of attributes |
| 21033 | could be used by debuggers for stack backtraces. Separately, note that |
| 21034 | sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also. |
| 21035 | This happens (for example) for inlined-instances of inline function formal |
| 21036 | parameters which are never referenced. This really shouldn't be |
| 21037 | happening. All PARM_DECL nodes should get valid non-NULL |
| 21038 | DECL_INCOMING_RTL values. FIXME. */ |
| 21039 | |
| 21040 | /* Use DECL_RTL as the "location" unless we find something better. */ |
| 21041 | rtl = DECL_RTL_IF_SET (decl); |
| 21042 | |
| 21043 | /* When generating abstract instances, ignore everything except |
| 21044 | constants, symbols living in memory, and symbols living in |
| 21045 | fixed registers. */ |
| 21046 | if (! reload_completed) |
| 21047 | { |
| 21048 | if (rtl |
| 21049 | && (CONSTANT_P (rtl) |
| 21050 | || (MEM_P (rtl) |
| 21051 | && CONSTANT_P (XEXP (rtl, 0))) |
| 21052 | || (REG_P (rtl) |
| 21053 | && VAR_P (decl) |
| 21054 | && TREE_STATIC (decl)))) |
| 21055 | { |
| 21056 | rtl = targetm.delegitimize_address (rtl); |
| 21057 | return rtl; |
| 21058 | } |
| 21059 | rtl = NULL_RTX; |
| 21060 | } |
| 21061 | else if (TREE_CODE (decl) == PARM_DECL) |
| 21062 | { |
| 21063 | if (rtl == NULL_RTX |
| 21064 | || is_pseudo_reg (rtl) |
| 21065 | || (MEM_P (rtl) |
| 21066 | && is_pseudo_reg (XEXP (rtl, 0)) |
| 21067 | && DECL_INCOMING_RTL (decl) |
| 21068 | && MEM_P (DECL_INCOMING_RTL (decl)) |
| 21069 | && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl)))) |
| 21070 | { |
| 21071 | tree declared_type = TREE_TYPE (decl); |
| 21072 | tree passed_type = DECL_ARG_TYPE (decl); |
| 21073 | machine_mode dmode = TYPE_MODE (declared_type); |
| 21074 | machine_mode pmode = TYPE_MODE (passed_type); |
| 21075 | |
| 21076 | /* This decl represents a formal parameter which was optimized out. |
| 21077 | Note that DECL_INCOMING_RTL may be NULL in here, but we handle |
| 21078 | all cases where (rtl == NULL_RTX) just below. */ |
| 21079 | if (dmode == pmode) |
| 21080 | rtl = DECL_INCOMING_RTL (decl); |
| 21081 | else if ((rtl == NULL_RTX || is_pseudo_reg (rtl)) |
| 21082 | && SCALAR_INT_MODE_P (dmode) |
| 21083 | && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode)) |
| 21084 | && DECL_INCOMING_RTL (decl)) |
| 21085 | { |
| 21086 | rtx inc = DECL_INCOMING_RTL (decl); |
| 21087 | if (REG_P (inc)) |
| 21088 | rtl = inc; |
| 21089 | else if (MEM_P (inc)) |
| 21090 | { |
| 21091 | if (BYTES_BIG_ENDIAN) |
| 21092 | rtl = adjust_address_nv (inc, dmode, |
| 21093 | GET_MODE_SIZE (pmode) |
| 21094 | - GET_MODE_SIZE (dmode)); |
| 21095 | else |
| 21096 | rtl = inc; |
| 21097 | } |
| 21098 | } |
| 21099 | } |
| 21100 | |
| 21101 | /* If the parm was passed in registers, but lives on the stack, then |
| 21102 | make a big endian correction if the mode of the type of the |
| 21103 | parameter is not the same as the mode of the rtl. */ |
| 21104 | /* ??? This is the same series of checks that are made in dbxout.cc before |
| 21105 | we reach the big endian correction code there. It isn't clear if all |
| 21106 | of these checks are necessary here, but keeping them all is the safe |
| 21107 | thing to do. */ |
| 21108 | else if (MEM_P (rtl) |
| 21109 | && XEXP (rtl, 0) != const0_rtx |
| 21110 | && ! CONSTANT_P (XEXP (rtl, 0)) |
| 21111 | /* Not passed in memory. */ |
| 21112 | && !MEM_P (DECL_INCOMING_RTL (decl)) |
| 21113 | /* Not passed by invisible reference. */ |
| 21114 | && (!REG_P (XEXP (rtl, 0)) |
| 21115 | || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM |
| 21116 | || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM |
| 21117 | #if !HARD_FRAME_POINTER_IS_ARG_POINTER |
| 21118 | || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM |
| 21119 | #endif |
| 21120 | ) |
| 21121 | /* Big endian correction check. */ |
| 21122 | && BYTES_BIG_ENDIAN |
| 21123 | && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl) |
| 21124 | && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))), |
| 21125 | UNITS_PER_WORD)) |
| 21126 | { |
| 21127 | machine_mode addr_mode = get_address_mode (mem: rtl); |
| 21128 | poly_int64 offset = (UNITS_PER_WORD |
| 21129 | - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))); |
| 21130 | |
| 21131 | rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)), |
| 21132 | plus_constant (addr_mode, XEXP (rtl, 0), offset)); |
| 21133 | } |
| 21134 | } |
| 21135 | else if (VAR_P (decl) |
| 21136 | && rtl |
| 21137 | && MEM_P (rtl) |
| 21138 | && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl))) |
| 21139 | { |
| 21140 | machine_mode addr_mode = get_address_mode (mem: rtl); |
| 21141 | poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)), |
| 21142 | GET_MODE (rtl)); |
| 21143 | |
| 21144 | /* If a variable is declared "register" yet is smaller than |
| 21145 | a register, then if we store the variable to memory, it |
| 21146 | looks like we're storing a register-sized value, when in |
| 21147 | fact we are not. We need to adjust the offset of the |
| 21148 | storage location to reflect the actual value's bytes, |
| 21149 | else gdb will not be able to display it. */ |
| 21150 | if (maybe_ne (a: offset, b: 0)) |
| 21151 | rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)), |
| 21152 | plus_constant (addr_mode, XEXP (rtl, 0), offset)); |
| 21153 | } |
| 21154 | |
| 21155 | /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant, |
| 21156 | and will have been substituted directly into all expressions that use it. |
| 21157 | C does not have such a concept, but C++ and other languages do. */ |
| 21158 | if (!rtl && VAR_P (decl) && DECL_INITIAL (decl)) |
| 21159 | rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl)); |
| 21160 | |
| 21161 | if (rtl) |
| 21162 | rtl = targetm.delegitimize_address (rtl); |
| 21163 | |
| 21164 | /* If we don't look past the constant pool, we risk emitting a |
| 21165 | reference to a constant pool entry that isn't referenced from |
| 21166 | code, and thus is not emitted. */ |
| 21167 | if (rtl) |
| 21168 | rtl = avoid_constant_pool_reference (rtl); |
| 21169 | |
| 21170 | /* Try harder to get a rtl. If this symbol ends up not being emitted |
| 21171 | in the current CU, resolve_addr will remove the expression referencing |
| 21172 | it. */ |
| 21173 | if (rtl == NULL_RTX |
| 21174 | && !(early_dwarf && (flag_generate_lto || flag_generate_offload)) |
| 21175 | && VAR_P (decl) |
| 21176 | && !DECL_EXTERNAL (decl) |
| 21177 | && TREE_STATIC (decl) |
| 21178 | && DECL_NAME (decl) |
| 21179 | && !DECL_HARD_REGISTER (decl) |
| 21180 | && DECL_MODE (decl) != VOIDmode) |
| 21181 | { |
| 21182 | rtl = make_decl_rtl_for_debug (decl); |
| 21183 | if (!MEM_P (rtl) |
| 21184 | || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF |
| 21185 | || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl) |
| 21186 | rtl = NULL_RTX; |
| 21187 | } |
| 21188 | |
| 21189 | return rtl; |
| 21190 | } |
| 21191 | |
| 21192 | /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is |
| 21193 | returned. If so, the decl for the COMMON block is returned, and the |
| 21194 | value is the offset into the common block for the symbol. */ |
| 21195 | |
| 21196 | static tree |
| 21197 | fortran_common (tree decl, HOST_WIDE_INT *value) |
| 21198 | { |
| 21199 | tree val_expr, cvar; |
| 21200 | machine_mode mode; |
| 21201 | poly_int64 bitsize, bitpos; |
| 21202 | tree offset; |
| 21203 | HOST_WIDE_INT cbitpos; |
| 21204 | int unsignedp, reversep, volatilep = 0; |
| 21205 | |
| 21206 | /* If the decl isn't a VAR_DECL, or if it isn't static, or if |
| 21207 | it does not have a value (the offset into the common area), or if it |
| 21208 | is thread local (as opposed to global) then it isn't common, and shouldn't |
| 21209 | be handled as such. */ |
| 21210 | if (!VAR_P (decl) |
| 21211 | || !TREE_STATIC (decl) |
| 21212 | || !DECL_HAS_VALUE_EXPR_P (decl) |
| 21213 | || !is_fortran ()) |
| 21214 | return NULL_TREE; |
| 21215 | |
| 21216 | val_expr = DECL_VALUE_EXPR (decl); |
| 21217 | if (TREE_CODE (val_expr) != COMPONENT_REF) |
| 21218 | return NULL_TREE; |
| 21219 | |
| 21220 | cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode, |
| 21221 | &unsignedp, &reversep, &volatilep); |
| 21222 | |
| 21223 | if (cvar == NULL_TREE |
| 21224 | || !VAR_P (cvar) |
| 21225 | || DECL_ARTIFICIAL (cvar) |
| 21226 | || !TREE_PUBLIC (cvar) |
| 21227 | /* We don't expect to have to cope with variable offsets, |
| 21228 | since at present all static data must have a constant size. */ |
| 21229 | || !bitpos.is_constant (const_value: &cbitpos)) |
| 21230 | return NULL_TREE; |
| 21231 | |
| 21232 | *value = 0; |
| 21233 | if (offset != NULL) |
| 21234 | { |
| 21235 | if (!tree_fits_shwi_p (offset)) |
| 21236 | return NULL_TREE; |
| 21237 | *value = tree_to_shwi (offset); |
| 21238 | } |
| 21239 | if (cbitpos != 0) |
| 21240 | *value += cbitpos / BITS_PER_UNIT; |
| 21241 | |
| 21242 | return cvar; |
| 21243 | } |
| 21244 | |
| 21245 | /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value |
| 21246 | data attribute for a variable or a parameter. We generate the |
| 21247 | DW_AT_const_value attribute only in those cases where the given variable |
| 21248 | or parameter does not have a true "location" either in memory or in a |
| 21249 | register. This can happen (for example) when a constant is passed as an |
| 21250 | actual argument in a call to an inline function. (It's possible that |
| 21251 | these things can crop up in other ways also.) Note that one type of |
| 21252 | constant value which can be passed into an inlined function is a constant |
| 21253 | pointer. This can happen for example if an actual argument in an inlined |
| 21254 | function call evaluates to a compile-time constant address. |
| 21255 | |
| 21256 | CACHE_P is true if it is worth caching the location list for DECL, |
| 21257 | so that future calls can reuse it rather than regenerate it from scratch. |
| 21258 | This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines, |
| 21259 | since we will need to refer to them each time the function is inlined. */ |
| 21260 | |
| 21261 | static bool |
| 21262 | add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p) |
| 21263 | { |
| 21264 | rtx rtl; |
| 21265 | dw_loc_list_ref list; |
| 21266 | var_loc_list *loc_list; |
| 21267 | cached_dw_loc_list *cache; |
| 21268 | |
| 21269 | if (early_dwarf) |
| 21270 | return false; |
| 21271 | |
| 21272 | if (TREE_CODE (decl) == ERROR_MARK) |
| 21273 | return false; |
| 21274 | |
| 21275 | if (get_AT (die, attr_kind: DW_AT_location) |
| 21276 | || get_AT (die, attr_kind: DW_AT_const_value)) |
| 21277 | return true; |
| 21278 | |
| 21279 | gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL |
| 21280 | || TREE_CODE (decl) == RESULT_DECL); |
| 21281 | |
| 21282 | /* Try to get some constant RTL for this decl, and use that as the value of |
| 21283 | the location. */ |
| 21284 | |
| 21285 | rtl = rtl_for_decl_location (decl); |
| 21286 | if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING) |
| 21287 | && add_const_value_attribute (die, DECL_MODE (decl), rtl)) |
| 21288 | return true; |
| 21289 | |
| 21290 | /* See if we have single element location list that is equivalent to |
| 21291 | a constant value. That way we are better to use add_const_value_attribute |
| 21292 | rather than expanding constant value equivalent. */ |
| 21293 | loc_list = lookup_decl_loc (decl); |
| 21294 | if (loc_list |
| 21295 | && loc_list->first |
| 21296 | && loc_list->first->next == NULL |
| 21297 | && NOTE_P (loc_list->first->loc) |
| 21298 | && NOTE_VAR_LOCATION (loc_list->first->loc) |
| 21299 | && NOTE_VAR_LOCATION_LOC (loc_list->first->loc)) |
| 21300 | { |
| 21301 | struct var_loc_node *node; |
| 21302 | |
| 21303 | node = loc_list->first; |
| 21304 | rtl = NOTE_VAR_LOCATION_LOC (node->loc); |
| 21305 | if (GET_CODE (rtl) == EXPR_LIST) |
| 21306 | rtl = XEXP (rtl, 0); |
| 21307 | if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING) |
| 21308 | && add_const_value_attribute (die, DECL_MODE (decl), rtl)) |
| 21309 | return true; |
| 21310 | } |
| 21311 | /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its |
| 21312 | list several times. See if we've already cached the contents. */ |
| 21313 | list = NULL; |
| 21314 | if (loc_list == NULL || cached_dw_loc_list_table == NULL) |
| 21315 | cache_p = false; |
| 21316 | if (cache_p) |
| 21317 | { |
| 21318 | cache = cached_dw_loc_list_table->find_with_hash (comparable: decl, DECL_UID (decl)); |
| 21319 | if (cache) |
| 21320 | list = cache->loc_list; |
| 21321 | } |
| 21322 | if (list == NULL) |
| 21323 | { |
| 21324 | list = loc_list_from_tree (loc: decl, want_address: decl_by_reference_p (decl) ? 0 : 2, |
| 21325 | NULL); |
| 21326 | /* It is usually worth caching this result if the decl is from |
| 21327 | BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */ |
| 21328 | if (cache_p && list && list->dw_loc_next) |
| 21329 | { |
| 21330 | cached_dw_loc_list **slot |
| 21331 | = cached_dw_loc_list_table->find_slot_with_hash (comparable: decl, |
| 21332 | DECL_UID (decl), |
| 21333 | insert: INSERT); |
| 21334 | cache = ggc_cleared_alloc<cached_dw_loc_list> (); |
| 21335 | cache->decl_id = DECL_UID (decl); |
| 21336 | cache->loc_list = list; |
| 21337 | *slot = cache; |
| 21338 | } |
| 21339 | } |
| 21340 | if (list) |
| 21341 | { |
| 21342 | add_AT_location_description (die, attr_kind: DW_AT_location, descr: list); |
| 21343 | return true; |
| 21344 | } |
| 21345 | /* None of that worked, so it must not really have a location; |
| 21346 | try adding a constant value attribute from the DECL_INITIAL. */ |
| 21347 | return tree_add_const_value_attribute_for_decl (die, decl); |
| 21348 | } |
| 21349 | |
| 21350 | /* Mangle referenced decls. */ |
| 21351 | static tree |
| 21352 | mangle_referenced_decls (tree *tp, int *walk_subtrees, void *) |
| 21353 | { |
| 21354 | if (! EXPR_P (*tp) |
| 21355 | && ! CONSTANT_CLASS_P (*tp) |
| 21356 | && TREE_CODE (*tp) != CONSTRUCTOR) |
| 21357 | *walk_subtrees = 0; |
| 21358 | |
| 21359 | if (VAR_OR_FUNCTION_DECL_P (*tp)) |
| 21360 | assign_assembler_name_if_needed (*tp); |
| 21361 | |
| 21362 | return NULL_TREE; |
| 21363 | } |
| 21364 | |
| 21365 | /* Attach a DW_AT_const_value attribute to DIE. The value of the |
| 21366 | attribute is the const value T. */ |
| 21367 | |
| 21368 | static bool |
| 21369 | tree_add_const_value_attribute (dw_die_ref die, tree t) |
| 21370 | { |
| 21371 | tree init; |
| 21372 | tree type = TREE_TYPE (t); |
| 21373 | |
| 21374 | if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node) |
| 21375 | return false; |
| 21376 | |
| 21377 | init = t; |
| 21378 | gcc_assert (!DECL_P (init)); |
| 21379 | |
| 21380 | if (TREE_CODE (init) == INTEGER_CST) |
| 21381 | { |
| 21382 | if (tree_fits_uhwi_p (init)) |
| 21383 | { |
| 21384 | add_AT_unsigned (die, attr_kind: DW_AT_const_value, unsigned_val: tree_to_uhwi (init)); |
| 21385 | return true; |
| 21386 | } |
| 21387 | if (tree_fits_shwi_p (init)) |
| 21388 | { |
| 21389 | add_AT_int (die, attr_kind: DW_AT_const_value, int_val: tree_to_shwi (init)); |
| 21390 | return true; |
| 21391 | } |
| 21392 | } |
| 21393 | if (!early_dwarf) |
| 21394 | { |
| 21395 | rtx rtl = rtl_for_decl_init (init, type); |
| 21396 | if (rtl) |
| 21397 | return add_const_value_attribute (die, TYPE_MODE (type), rtl); |
| 21398 | } |
| 21399 | else |
| 21400 | { |
| 21401 | /* For early_dwarf force mangling of all referenced symbols. */ |
| 21402 | tree initializer = init; |
| 21403 | STRIP_NOPS (initializer); |
| 21404 | if (initializer_constant_valid_p (initializer, type)) |
| 21405 | walk_tree (&initializer, mangle_referenced_decls, NULL, NULL); |
| 21406 | } |
| 21407 | /* If the host and target are sane, try harder. */ |
| 21408 | if (CHAR_BIT == 8 && BITS_PER_UNIT == 8 |
| 21409 | && initializer_constant_valid_p (init, type)) |
| 21410 | { |
| 21411 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init)); |
| 21412 | if (size > 0 && (int) size == size) |
| 21413 | { |
| 21414 | unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (c: size); |
| 21415 | |
| 21416 | if (native_encode_initializer (init, array, size) == size) |
| 21417 | { |
| 21418 | add_AT_vec (die, attr_kind: DW_AT_const_value, length: size, elt_size: 1, array); |
| 21419 | return true; |
| 21420 | } |
| 21421 | ggc_free (array); |
| 21422 | } |
| 21423 | } |
| 21424 | return false; |
| 21425 | } |
| 21426 | |
| 21427 | /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the |
| 21428 | attribute is the const value of T, where T is an integral constant |
| 21429 | variable with static storage duration |
| 21430 | (so it can't be a PARM_DECL or a RESULT_DECL). */ |
| 21431 | |
| 21432 | static bool |
| 21433 | tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl) |
| 21434 | { |
| 21435 | |
| 21436 | if (!decl |
| 21437 | || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL) |
| 21438 | || (VAR_P (decl) && !TREE_STATIC (decl))) |
| 21439 | return false; |
| 21440 | |
| 21441 | if (TREE_READONLY (decl) |
| 21442 | && ! TREE_THIS_VOLATILE (decl) |
| 21443 | && DECL_INITIAL (decl)) |
| 21444 | /* OK */; |
| 21445 | else |
| 21446 | return false; |
| 21447 | |
| 21448 | /* Don't add DW_AT_const_value if abstract origin already has one. */ |
| 21449 | if (get_AT (die: var_die, attr_kind: DW_AT_const_value)) |
| 21450 | return false; |
| 21451 | |
| 21452 | return tree_add_const_value_attribute (die: var_die, DECL_INITIAL (decl)); |
| 21453 | } |
| 21454 | |
| 21455 | /* Convert the CFI instructions for the current function into a |
| 21456 | location list. This is used for DW_AT_frame_base when we targeting |
| 21457 | a dwarf2 consumer that does not support the dwarf3 |
| 21458 | DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA |
| 21459 | expressions. */ |
| 21460 | |
| 21461 | static dw_loc_list_ref |
| 21462 | convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset) |
| 21463 | { |
| 21464 | int ix; |
| 21465 | dw_fde_ref fde; |
| 21466 | dw_loc_list_ref list, *list_tail; |
| 21467 | dw_cfi_ref cfi; |
| 21468 | dw_cfa_location last_cfa, next_cfa; |
| 21469 | const char *start_label, *last_label, *section; |
| 21470 | dw_cfa_location remember; |
| 21471 | |
| 21472 | fde = cfun->fde; |
| 21473 | gcc_assert (fde != NULL); |
| 21474 | |
| 21475 | section = secname_for_decl (decl: current_function_decl); |
| 21476 | list_tail = &list; |
| 21477 | list = NULL; |
| 21478 | |
| 21479 | memset (s: &next_cfa, c: 0, n: sizeof (next_cfa)); |
| 21480 | |
| 21481 | #ifdef CODEVIEW_DEBUGGING_INFO |
| 21482 | /* We can write simplified frame base information for CodeView, as we're |
| 21483 | not using it for rewinding. */ |
| 21484 | if (codeview_debuginfo_p ()) |
| 21485 | { |
| 21486 | int dwreg = DEBUGGER_REGNO (cfun->machine->fs.cfa_reg->u.reg.regno); |
| 21487 | |
| 21488 | next_cfa.reg.set_by_dwreg (dwreg); |
| 21489 | next_cfa.offset = cfun->machine->fs.fp_valid |
| 21490 | ? cfun->machine->fs.fp_offset : cfun->machine->fs.sp_offset; |
| 21491 | |
| 21492 | *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset), |
| 21493 | fde->dw_fde_begin, 0, |
| 21494 | fde->dw_fde_second_begin |
| 21495 | ? fde->dw_fde_second_end : fde->dw_fde_end, 0, |
| 21496 | section); |
| 21497 | maybe_gen_llsym (list); |
| 21498 | |
| 21499 | return list; |
| 21500 | } |
| 21501 | #endif |
| 21502 | |
| 21503 | next_cfa.reg.set_by_dwreg (INVALID_REGNUM); |
| 21504 | remember = next_cfa; |
| 21505 | |
| 21506 | start_label = fde->dw_fde_begin; |
| 21507 | |
| 21508 | /* ??? Bald assumption that the CIE opcode list does not contain |
| 21509 | advance opcodes. */ |
| 21510 | FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi) |
| 21511 | lookup_cfa_1 (cfi, loc: &next_cfa, remember: &remember); |
| 21512 | |
| 21513 | last_cfa = next_cfa; |
| 21514 | last_label = start_label; |
| 21515 | |
| 21516 | if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0) |
| 21517 | { |
| 21518 | /* If the first partition contained no CFI adjustments, the |
| 21519 | CIE opcodes apply to the whole first partition. */ |
| 21520 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
| 21521 | begin: fde->dw_fde_begin, vbegin: 0, end: fde->dw_fde_end, vend: 0, section); |
| 21522 | list_tail =&(*list_tail)->dw_loc_next; |
| 21523 | start_label = last_label = fde->dw_fde_second_begin; |
| 21524 | } |
| 21525 | |
| 21526 | FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi) |
| 21527 | { |
| 21528 | switch (cfi->dw_cfi_opc) |
| 21529 | { |
| 21530 | case DW_CFA_set_loc: |
| 21531 | case DW_CFA_advance_loc1: |
| 21532 | case DW_CFA_advance_loc2: |
| 21533 | case DW_CFA_advance_loc4: |
| 21534 | if (!cfa_equal_p (&last_cfa, &next_cfa)) |
| 21535 | { |
| 21536 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
| 21537 | begin: start_label, vbegin: 0, end: last_label, vend: 0, section); |
| 21538 | |
| 21539 | list_tail = &(*list_tail)->dw_loc_next; |
| 21540 | last_cfa = next_cfa; |
| 21541 | start_label = last_label; |
| 21542 | } |
| 21543 | last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr; |
| 21544 | break; |
| 21545 | |
| 21546 | case DW_CFA_advance_loc: |
| 21547 | /* The encoding is complex enough that we should never emit this. */ |
| 21548 | gcc_unreachable (); |
| 21549 | |
| 21550 | default: |
| 21551 | lookup_cfa_1 (cfi, loc: &next_cfa, remember: &remember); |
| 21552 | break; |
| 21553 | } |
| 21554 | if (ix + 1 == fde->dw_fde_switch_cfi_index) |
| 21555 | { |
| 21556 | if (!cfa_equal_p (&last_cfa, &next_cfa)) |
| 21557 | { |
| 21558 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
| 21559 | begin: start_label, vbegin: 0, end: last_label, vend: 0, section); |
| 21560 | |
| 21561 | list_tail = &(*list_tail)->dw_loc_next; |
| 21562 | last_cfa = next_cfa; |
| 21563 | start_label = last_label; |
| 21564 | } |
| 21565 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
| 21566 | begin: start_label, vbegin: 0, end: fde->dw_fde_end, vend: 0, section); |
| 21567 | list_tail = &(*list_tail)->dw_loc_next; |
| 21568 | start_label = last_label = fde->dw_fde_second_begin; |
| 21569 | } |
| 21570 | } |
| 21571 | |
| 21572 | if (!cfa_equal_p (&last_cfa, &next_cfa)) |
| 21573 | { |
| 21574 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
| 21575 | begin: start_label, vbegin: 0, end: last_label, vend: 0, section); |
| 21576 | list_tail = &(*list_tail)->dw_loc_next; |
| 21577 | start_label = last_label; |
| 21578 | } |
| 21579 | |
| 21580 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &next_cfa, offset), |
| 21581 | begin: start_label, vbegin: 0, |
| 21582 | end: fde->dw_fde_second_begin |
| 21583 | ? fde->dw_fde_second_end : fde->dw_fde_end, vend: 0, |
| 21584 | section); |
| 21585 | |
| 21586 | maybe_gen_llsym (list); |
| 21587 | |
| 21588 | return list; |
| 21589 | } |
| 21590 | |
| 21591 | /* Compute a displacement from the "steady-state frame pointer" to the |
| 21592 | frame base (often the same as the CFA), and store it in |
| 21593 | frame_pointer_fb_offset. OFFSET is added to the displacement |
| 21594 | before the latter is negated. */ |
| 21595 | |
| 21596 | static void |
| 21597 | compute_frame_pointer_to_fb_displacement (poly_int64 offset) |
| 21598 | { |
| 21599 | rtx reg, elim; |
| 21600 | |
| 21601 | #ifdef FRAME_POINTER_CFA_OFFSET |
| 21602 | reg = frame_pointer_rtx; |
| 21603 | offset += FRAME_POINTER_CFA_OFFSET (current_function_decl); |
| 21604 | #else |
| 21605 | reg = arg_pointer_rtx; |
| 21606 | offset += ARG_POINTER_CFA_OFFSET (current_function_decl); |
| 21607 | #endif |
| 21608 | |
| 21609 | elim = (ira_use_lra_p |
| 21610 | ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX) |
| 21611 | : eliminate_regs (reg, VOIDmode, NULL_RTX)); |
| 21612 | elim = strip_offset_and_add (x: elim, offset: &offset); |
| 21613 | |
| 21614 | frame_pointer_fb_offset = -offset; |
| 21615 | |
| 21616 | /* ??? AVR doesn't set up valid eliminations when there is no stack frame |
| 21617 | in which to eliminate. This is because it's stack pointer isn't |
| 21618 | directly accessible as a register within the ISA. To work around |
| 21619 | this, assume that while we cannot provide a proper value for |
| 21620 | frame_pointer_fb_offset, we won't need one either. We can use |
| 21621 | hard frame pointer in debug info even if frame pointer isn't used |
| 21622 | since hard frame pointer in debug info is encoded with DW_OP_fbreg |
| 21623 | which uses the DW_AT_frame_base attribute, not hard frame pointer |
| 21624 | directly. */ |
| 21625 | frame_pointer_fb_offset_valid |
| 21626 | = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx); |
| 21627 | } |
| 21628 | |
| 21629 | /* Generate a DW_AT_name attribute given some string value to be included as |
| 21630 | the value of the attribute. */ |
| 21631 | |
| 21632 | void |
| 21633 | add_name_attribute (dw_die_ref die, const char *name_string) |
| 21634 | { |
| 21635 | if (name_string != NULL && *name_string != 0) |
| 21636 | { |
| 21637 | if (demangle_name_func) |
| 21638 | name_string = (*demangle_name_func) (name_string); |
| 21639 | |
| 21640 | add_AT_string (die, attr_kind: DW_AT_name, str: name_string); |
| 21641 | } |
| 21642 | } |
| 21643 | |
| 21644 | /* Generate a DW_AT_name attribute given some string value representing a |
| 21645 | file or filepath to be included as value of the attribute. */ |
| 21646 | static void |
| 21647 | add_filename_attribute (dw_die_ref die, const char *name_string) |
| 21648 | { |
| 21649 | if (name_string != NULL && *name_string != 0) |
| 21650 | add_filepath_AT_string (die, attr_kind: DW_AT_name, str: name_string); |
| 21651 | } |
| 21652 | |
| 21653 | /* Generate a DW_AT_description attribute given some string value to be included |
| 21654 | as the value of the attribute. */ |
| 21655 | |
| 21656 | static void |
| 21657 | add_desc_attribute (dw_die_ref die, const char *name_string) |
| 21658 | { |
| 21659 | if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict)) |
| 21660 | return; |
| 21661 | |
| 21662 | if (name_string == NULL || *name_string == 0) |
| 21663 | return; |
| 21664 | |
| 21665 | if (demangle_name_func) |
| 21666 | name_string = (*demangle_name_func) (name_string); |
| 21667 | |
| 21668 | add_AT_string (die, attr_kind: DW_AT_description, str: name_string); |
| 21669 | } |
| 21670 | |
| 21671 | /* Generate a DW_AT_description attribute given some decl to be included |
| 21672 | as the value of the attribute. */ |
| 21673 | |
| 21674 | static void |
| 21675 | add_desc_attribute (dw_die_ref die, tree decl) |
| 21676 | { |
| 21677 | tree decl_name; |
| 21678 | |
| 21679 | if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict)) |
| 21680 | return; |
| 21681 | |
| 21682 | if (decl == NULL_TREE || !DECL_P (decl)) |
| 21683 | return; |
| 21684 | decl_name = DECL_NAME (decl); |
| 21685 | |
| 21686 | if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL) |
| 21687 | { |
| 21688 | const char *name = dwarf2_name (decl, scope: 0); |
| 21689 | add_desc_attribute (die, name_string: name ? name : IDENTIFIER_POINTER (decl_name)); |
| 21690 | } |
| 21691 | else |
| 21692 | { |
| 21693 | char *desc = print_generic_expr_to_str (decl); |
| 21694 | add_desc_attribute (die, name_string: desc); |
| 21695 | free (ptr: desc); |
| 21696 | } |
| 21697 | } |
| 21698 | |
| 21699 | /* Retrieve the descriptive type of TYPE, if any, make sure it has a |
| 21700 | DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE |
| 21701 | of TYPE accordingly. |
| 21702 | |
| 21703 | ??? This is a temporary measure until after we're able to generate |
| 21704 | regular DWARF for the complex Ada type system. */ |
| 21705 | |
| 21706 | static void |
| 21707 | add_gnat_descriptive_type_attribute (dw_die_ref die, tree type, |
| 21708 | dw_die_ref context_die) |
| 21709 | { |
| 21710 | tree dtype; |
| 21711 | dw_die_ref dtype_die; |
| 21712 | |
| 21713 | if (!lang_hooks.types.descriptive_type) |
| 21714 | return; |
| 21715 | |
| 21716 | dtype = lang_hooks.types.descriptive_type (type); |
| 21717 | if (!dtype) |
| 21718 | return; |
| 21719 | |
| 21720 | dtype_die = lookup_type_die (type: dtype); |
| 21721 | if (!dtype_die) |
| 21722 | { |
| 21723 | gen_type_die (dtype, context_die); |
| 21724 | dtype_die = lookup_type_die (type: dtype); |
| 21725 | gcc_assert (dtype_die); |
| 21726 | } |
| 21727 | |
| 21728 | add_AT_die_ref (die, attr_kind: DW_AT_GNAT_descriptive_type, targ_die: dtype_die); |
| 21729 | } |
| 21730 | |
| 21731 | /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */ |
| 21732 | |
| 21733 | static const char * |
| 21734 | comp_dir_string (void) |
| 21735 | { |
| 21736 | const char *wd; |
| 21737 | char *wd_plus_sep = NULL; |
| 21738 | static const char *cached_wd = NULL; |
| 21739 | |
| 21740 | if (cached_wd != NULL) |
| 21741 | return cached_wd; |
| 21742 | |
| 21743 | wd = get_src_pwd (); |
| 21744 | if (wd == NULL) |
| 21745 | return NULL; |
| 21746 | |
| 21747 | if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR) |
| 21748 | { |
| 21749 | size_t wdlen = strlen (s: wd); |
| 21750 | wd_plus_sep = XNEWVEC (char, wdlen + 2); |
| 21751 | strcpy (dest: wd_plus_sep, src: wd); |
| 21752 | wd_plus_sep [wdlen] = DIR_SEPARATOR; |
| 21753 | wd_plus_sep [wdlen + 1] = 0; |
| 21754 | wd = wd_plus_sep; |
| 21755 | } |
| 21756 | |
| 21757 | cached_wd = remap_debug_filename (wd); |
| 21758 | |
| 21759 | /* remap_debug_filename can just pass through wd or return a new gc string. |
| 21760 | These two types can't be both stored in a GTY(())-tagged string, but since |
| 21761 | the cached value lives forever just copy it if needed. */ |
| 21762 | if (cached_wd != wd) |
| 21763 | { |
| 21764 | cached_wd = xstrdup (cached_wd); |
| 21765 | if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL) |
| 21766 | free (ptr: wd_plus_sep); |
| 21767 | } |
| 21768 | |
| 21769 | return cached_wd; |
| 21770 | } |
| 21771 | |
| 21772 | /* Generate a DW_AT_comp_dir attribute for DIE. */ |
| 21773 | |
| 21774 | static void |
| 21775 | add_comp_dir_attribute (dw_die_ref die) |
| 21776 | { |
| 21777 | const char * wd = comp_dir_string (); |
| 21778 | if (wd != NULL) |
| 21779 | add_filepath_AT_string (die, attr_kind: DW_AT_comp_dir, str: wd); |
| 21780 | } |
| 21781 | |
| 21782 | /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a |
| 21783 | pointer computation, ...), output a representation for that bound according |
| 21784 | to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See |
| 21785 | loc_list_from_tree for the meaning of CONTEXT. */ |
| 21786 | |
| 21787 | static void |
| 21788 | add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value, |
| 21789 | int forms, struct loc_descr_context *context) |
| 21790 | { |
| 21791 | dw_die_ref context_die, decl_die = NULL; |
| 21792 | dw_loc_list_ref list; |
| 21793 | bool strip_conversions = true; |
| 21794 | bool placeholder_seen = false; |
| 21795 | |
| 21796 | while (strip_conversions) |
| 21797 | switch (TREE_CODE (value)) |
| 21798 | { |
| 21799 | case ERROR_MARK: |
| 21800 | case SAVE_EXPR: |
| 21801 | return; |
| 21802 | |
| 21803 | CASE_CONVERT: |
| 21804 | case VIEW_CONVERT_EXPR: |
| 21805 | value = TREE_OPERAND (value, 0); |
| 21806 | break; |
| 21807 | |
| 21808 | default: |
| 21809 | strip_conversions = false; |
| 21810 | break; |
| 21811 | } |
| 21812 | |
| 21813 | /* If possible and permitted, output the attribute as a constant. */ |
| 21814 | if ((forms & dw_scalar_form_constant) != 0 |
| 21815 | && TREE_CODE (value) == INTEGER_CST) |
| 21816 | { |
| 21817 | unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value)); |
| 21818 | |
| 21819 | /* If HOST_WIDE_INT is big enough then represent the bound as |
| 21820 | a constant value. We need to choose a form based on |
| 21821 | whether the type is signed or unsigned. We cannot just |
| 21822 | call add_AT_unsigned if the value itself is positive |
| 21823 | (add_AT_unsigned might add the unsigned value encoded as |
| 21824 | DW_FORM_data[1248]). Some DWARF consumers will lookup the |
| 21825 | bounds type and then sign extend any unsigned values found |
| 21826 | for signed types. This is needed only for |
| 21827 | DW_AT_{lower,upper}_bound, since for most other attributes, |
| 21828 | consumers will treat DW_FORM_data[1248] as unsigned values, |
| 21829 | regardless of the underlying type. */ |
| 21830 | if (prec <= HOST_BITS_PER_WIDE_INT |
| 21831 | || tree_fits_uhwi_p (value)) |
| 21832 | { |
| 21833 | if (TYPE_UNSIGNED (TREE_TYPE (value))) |
| 21834 | add_AT_unsigned (die, attr_kind: attr, TREE_INT_CST_LOW (value)); |
| 21835 | else |
| 21836 | add_AT_int (die, attr_kind: attr, TREE_INT_CST_LOW (value)); |
| 21837 | } |
| 21838 | else if (dwarf_version >= 5 |
| 21839 | && TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (value))) == 128) |
| 21840 | /* Otherwise represent the bound as an unsigned value with |
| 21841 | the precision of its type. The precision and signedness |
| 21842 | of the type will be necessary to re-interpret it |
| 21843 | unambiguously. */ |
| 21844 | add_AT_wide (die, attr_kind: attr, w: wi::to_wide (t: value)); |
| 21845 | else |
| 21846 | { |
| 21847 | rtx v = immed_wide_int_const (wi::to_wide (t: value), |
| 21848 | TYPE_MODE (TREE_TYPE (value))); |
| 21849 | dw_loc_descr_ref loc |
| 21850 | = loc_descriptor (rtl: v, TYPE_MODE (TREE_TYPE (value)), |
| 21851 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 21852 | if (loc) |
| 21853 | add_AT_loc (die, attr_kind: attr, loc); |
| 21854 | } |
| 21855 | return; |
| 21856 | } |
| 21857 | |
| 21858 | /* Otherwise, if it's possible and permitted too, output a reference to |
| 21859 | another DIE. */ |
| 21860 | if ((forms & dw_scalar_form_reference) != 0) |
| 21861 | { |
| 21862 | tree decl = NULL_TREE; |
| 21863 | |
| 21864 | /* Some type attributes reference an outer type. For instance, the upper |
| 21865 | bound of an array may reference an embedding record (this happens in |
| 21866 | Ada). */ |
| 21867 | if (TREE_CODE (value) == COMPONENT_REF |
| 21868 | && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR |
| 21869 | && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL) |
| 21870 | decl = TREE_OPERAND (value, 1); |
| 21871 | |
| 21872 | else if (VAR_P (value) |
| 21873 | || TREE_CODE (value) == PARM_DECL |
| 21874 | || TREE_CODE (value) == RESULT_DECL) |
| 21875 | decl = value; |
| 21876 | |
| 21877 | if (decl != NULL_TREE) |
| 21878 | { |
| 21879 | decl_die = lookup_decl_die (decl); |
| 21880 | |
| 21881 | /* ??? Can this happen, or should the variable have been bound |
| 21882 | first? Probably it can, since I imagine that we try to create |
| 21883 | the types of parameters in the order in which they exist in |
| 21884 | the list, and won't have created a forward reference to a |
| 21885 | later parameter. */ |
| 21886 | if (decl_die != NULL) |
| 21887 | { |
| 21888 | if (get_AT (die: decl_die, attr_kind: DW_AT_location) |
| 21889 | || get_AT (die: decl_die, attr_kind: DW_AT_data_member_location) |
| 21890 | || get_AT (die: decl_die, attr_kind: DW_AT_data_bit_offset) |
| 21891 | || get_AT (die: decl_die, attr_kind: DW_AT_const_value)) |
| 21892 | { |
| 21893 | add_AT_die_ref (die, attr_kind: attr, targ_die: decl_die); |
| 21894 | return; |
| 21895 | } |
| 21896 | } |
| 21897 | } |
| 21898 | } |
| 21899 | |
| 21900 | /* Last chance: try to create a stack operation procedure to evaluate the |
| 21901 | value. Do nothing if even that is not possible or permitted. */ |
| 21902 | if ((forms & dw_scalar_form_exprloc) == 0) |
| 21903 | return; |
| 21904 | |
| 21905 | list = loc_list_from_tree (loc: value, want_address: 2, context); |
| 21906 | if (context && context->placeholder_arg) |
| 21907 | { |
| 21908 | placeholder_seen = context->placeholder_seen; |
| 21909 | context->placeholder_seen = false; |
| 21910 | } |
| 21911 | if (list == NULL || single_element_loc_list_p (list)) |
| 21912 | { |
| 21913 | /* If this attribute is not a reference nor constant, it is |
| 21914 | a DWARF expression rather than location description. For that |
| 21915 | loc_list_from_tree (value, 0, &context) is needed. */ |
| 21916 | dw_loc_list_ref list2 = loc_list_from_tree (loc: value, want_address: 0, context); |
| 21917 | if (list2 && single_element_loc_list_p (list: list2)) |
| 21918 | { |
| 21919 | if (placeholder_seen) |
| 21920 | { |
| 21921 | struct dwarf_procedure_info dpi; |
| 21922 | dpi.fndecl = NULL_TREE; |
| 21923 | dpi.args_count = 1; |
| 21924 | if (!resolve_args_picking (loc: list2->expr, initial_frame_offset: 1, dpi: &dpi)) |
| 21925 | return; |
| 21926 | } |
| 21927 | add_AT_loc (die, attr_kind: attr, loc: list2->expr); |
| 21928 | return; |
| 21929 | } |
| 21930 | } |
| 21931 | |
| 21932 | /* If that failed to give a single element location list, fall back to |
| 21933 | outputting this as a reference... still if permitted. */ |
| 21934 | if (list == NULL |
| 21935 | || (forms & dw_scalar_form_reference) == 0 |
| 21936 | || placeholder_seen) |
| 21937 | return; |
| 21938 | |
| 21939 | if (!decl_die) |
| 21940 | { |
| 21941 | if (current_function_decl == 0) |
| 21942 | context_die = comp_unit_die (); |
| 21943 | else |
| 21944 | context_die = lookup_decl_die (decl: current_function_decl); |
| 21945 | |
| 21946 | decl_die = new_die (tag_value: DW_TAG_variable, parent_die: context_die, t: value); |
| 21947 | add_AT_flag (die: decl_die, attr_kind: DW_AT_artificial, flag: 1); |
| 21948 | add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false, |
| 21949 | context_die); |
| 21950 | } |
| 21951 | |
| 21952 | add_AT_location_description (die: decl_die, attr_kind: DW_AT_location, descr: list); |
| 21953 | add_AT_die_ref (die, attr_kind: attr, targ_die: decl_die); |
| 21954 | } |
| 21955 | |
| 21956 | /* Return the default for DW_AT_lower_bound, or -1 if there is not any |
| 21957 | default. */ |
| 21958 | |
| 21959 | static int |
| 21960 | lower_bound_default (void) |
| 21961 | { |
| 21962 | switch (get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language)) |
| 21963 | { |
| 21964 | case DW_LANG_C: |
| 21965 | case DW_LANG_C89: |
| 21966 | case DW_LANG_C99: |
| 21967 | case DW_LANG_C11: |
| 21968 | case DW_LANG_C_plus_plus: |
| 21969 | case DW_LANG_C_plus_plus_11: |
| 21970 | case DW_LANG_C_plus_plus_14: |
| 21971 | case DW_LANG_ObjC: |
| 21972 | case DW_LANG_ObjC_plus_plus: |
| 21973 | return 0; |
| 21974 | case DW_LANG_Fortran77: |
| 21975 | case DW_LANG_Fortran90: |
| 21976 | case DW_LANG_Fortran95: |
| 21977 | case DW_LANG_Fortran03: |
| 21978 | case DW_LANG_Fortran08: |
| 21979 | return 1; |
| 21980 | case DW_LANG_UPC: |
| 21981 | case DW_LANG_D: |
| 21982 | case DW_LANG_Python: |
| 21983 | return dwarf_version >= 4 ? 0 : -1; |
| 21984 | case DW_LANG_Ada95: |
| 21985 | case DW_LANG_Ada83: |
| 21986 | case DW_LANG_Cobol74: |
| 21987 | case DW_LANG_Cobol85: |
| 21988 | case DW_LANG_Modula2: |
| 21989 | case DW_LANG_PLI: |
| 21990 | return dwarf_version >= 4 ? 1 : -1; |
| 21991 | default: |
| 21992 | return -1; |
| 21993 | } |
| 21994 | } |
| 21995 | |
| 21996 | /* Given a tree node describing an array bound (either lower or upper) output |
| 21997 | a representation for that bound. */ |
| 21998 | |
| 21999 | static void |
| 22000 | add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr, |
| 22001 | tree bound, struct loc_descr_context *context) |
| 22002 | { |
| 22003 | int dflt; |
| 22004 | |
| 22005 | while (1) |
| 22006 | switch (TREE_CODE (bound)) |
| 22007 | { |
| 22008 | /* Strip all conversions. */ |
| 22009 | CASE_CONVERT: |
| 22010 | case VIEW_CONVERT_EXPR: |
| 22011 | bound = TREE_OPERAND (bound, 0); |
| 22012 | break; |
| 22013 | |
| 22014 | /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds |
| 22015 | are even omitted when they are the default. */ |
| 22016 | case INTEGER_CST: |
| 22017 | /* If the value for this bound is the default one, we can even omit the |
| 22018 | attribute. */ |
| 22019 | if (bound_attr == DW_AT_lower_bound |
| 22020 | && tree_fits_shwi_p (bound) |
| 22021 | && (dflt = lower_bound_default ()) != -1 |
| 22022 | && tree_to_shwi (bound) == dflt) |
| 22023 | return; |
| 22024 | |
| 22025 | /* FALLTHRU */ |
| 22026 | |
| 22027 | default: |
| 22028 | /* Let GNAT encodings do the magic for self-referential bounds. */ |
| 22029 | if (is_ada () |
| 22030 | && gnat_encodings == DWARF_GNAT_ENCODINGS_ALL |
| 22031 | && contains_placeholder_p (bound)) |
| 22032 | return; |
| 22033 | |
| 22034 | add_scalar_info (die: subrange_die, attr: bound_attr, value: bound, |
| 22035 | forms: dw_scalar_form_constant |
| 22036 | | dw_scalar_form_exprloc |
| 22037 | | dw_scalar_form_reference, |
| 22038 | context); |
| 22039 | return; |
| 22040 | } |
| 22041 | } |
| 22042 | |
| 22043 | /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing |
| 22044 | possibly nested array subscripts in a flat sequence if COLLAPSE_P is true. |
| 22045 | |
| 22046 | This function reuses previously set type and bound information if |
| 22047 | available. */ |
| 22048 | |
| 22049 | static void |
| 22050 | add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p) |
| 22051 | { |
| 22052 | dw_die_ref child = type_die->die_child; |
| 22053 | struct array_descr_info info; |
| 22054 | int dimension_number; |
| 22055 | |
| 22056 | if (lang_hooks.types.get_array_descr_info) |
| 22057 | { |
| 22058 | memset (s: &info, c: 0, n: sizeof (info)); |
| 22059 | if (lang_hooks.types.get_array_descr_info (type, &info)) |
| 22060 | /* Fortran sometimes emits array types with no dimension. */ |
| 22061 | gcc_assert (info.ndimensions >= 0 |
| 22062 | && info.ndimensions |
| 22063 | <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN); |
| 22064 | } |
| 22065 | else |
| 22066 | info.ndimensions = 0; |
| 22067 | |
| 22068 | for (dimension_number = 0; |
| 22069 | TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p); |
| 22070 | type = TREE_TYPE (type), dimension_number++) |
| 22071 | { |
| 22072 | tree domain = TYPE_DOMAIN (type); |
| 22073 | |
| 22074 | if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0) |
| 22075 | break; |
| 22076 | |
| 22077 | /* Arrays come in three flavors: Unspecified bounds, fixed bounds, |
| 22078 | and (in GNU C only) variable bounds. Handle all three forms |
| 22079 | here. */ |
| 22080 | |
| 22081 | /* Find and reuse a previously generated DW_TAG_subrange_type if |
| 22082 | available. |
| 22083 | |
| 22084 | For multi-dimensional arrays, as we iterate through the |
| 22085 | various dimensions in the enclosing for loop above, we also |
| 22086 | iterate through the DIE children and pick at each |
| 22087 | DW_TAG_subrange_type previously generated (if available). |
| 22088 | Each child DW_TAG_subrange_type DIE describes the range of |
| 22089 | the current dimension. At this point we should have as many |
| 22090 | DW_TAG_subrange_type's as we have dimensions in the |
| 22091 | array. */ |
| 22092 | dw_die_ref subrange_die = NULL; |
| 22093 | if (child) |
| 22094 | while (1) |
| 22095 | { |
| 22096 | child = child->die_sib; |
| 22097 | if (child->die_tag == DW_TAG_subrange_type) |
| 22098 | subrange_die = child; |
| 22099 | if (child == type_die->die_child) |
| 22100 | { |
| 22101 | /* If we wrapped around, stop looking next time. */ |
| 22102 | child = NULL; |
| 22103 | break; |
| 22104 | } |
| 22105 | if (child->die_tag == DW_TAG_subrange_type) |
| 22106 | break; |
| 22107 | } |
| 22108 | if (!subrange_die) |
| 22109 | subrange_die = new_die (tag_value: DW_TAG_subrange_type, parent_die: type_die, NULL); |
| 22110 | |
| 22111 | if (domain) |
| 22112 | { |
| 22113 | /* We have an array type with specified bounds. */ |
| 22114 | tree lower = TYPE_MIN_VALUE (domain); |
| 22115 | tree upper = TYPE_MAX_VALUE (domain); |
| 22116 | tree index_type = TREE_TYPE (domain); |
| 22117 | |
| 22118 | if (dimension_number <= info.ndimensions - 1) |
| 22119 | { |
| 22120 | lower = info.dimen[dimension_number].lower_bound; |
| 22121 | upper = info.dimen[dimension_number].upper_bound; |
| 22122 | index_type = info.dimen[dimension_number].bounds_type; |
| 22123 | } |
| 22124 | |
| 22125 | /* Define the index type. */ |
| 22126 | if (index_type && !get_AT (die: subrange_die, attr_kind: DW_AT_type)) |
| 22127 | add_type_attribute (subrange_die, index_type, TYPE_UNQUALIFIED, |
| 22128 | false, type_die); |
| 22129 | |
| 22130 | /* ??? If upper is NULL, the array has unspecified length, |
| 22131 | but it does have a lower bound. This happens with Fortran |
| 22132 | dimension arr(N:*) |
| 22133 | Since the debugger is definitely going to need to know N |
| 22134 | to produce useful results, go ahead and output the lower |
| 22135 | bound solo, and hope the debugger can cope. */ |
| 22136 | |
| 22137 | if (lower && !get_AT (die: subrange_die, attr_kind: DW_AT_lower_bound)) |
| 22138 | add_bound_info (subrange_die, bound_attr: DW_AT_lower_bound, bound: lower, NULL); |
| 22139 | |
| 22140 | if (!get_AT (die: subrange_die, attr_kind: DW_AT_upper_bound) |
| 22141 | && !get_AT (die: subrange_die, attr_kind: DW_AT_count)) |
| 22142 | { |
| 22143 | if (upper) |
| 22144 | add_bound_info (subrange_die, bound_attr: DW_AT_upper_bound, bound: upper, NULL); |
| 22145 | else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type)) |
| 22146 | /* Zero-length array. */ |
| 22147 | add_bound_info (subrange_die, bound_attr: DW_AT_count, |
| 22148 | bound: build_int_cst (TREE_TYPE (lower), 0), NULL); |
| 22149 | } |
| 22150 | } |
| 22151 | |
| 22152 | /* Otherwise we have an array type with an unspecified length. The |
| 22153 | DWARF-2 spec does not say how to handle this; let's just leave out the |
| 22154 | bounds. */ |
| 22155 | } |
| 22156 | } |
| 22157 | |
| 22158 | /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */ |
| 22159 | |
| 22160 | static void |
| 22161 | add_byte_size_attribute (dw_die_ref die, tree tree_node) |
| 22162 | { |
| 22163 | dw_die_ref decl_die; |
| 22164 | HOST_WIDE_INT size; |
| 22165 | |
| 22166 | switch (TREE_CODE (tree_node)) |
| 22167 | { |
| 22168 | case ERROR_MARK: |
| 22169 | size = 0; |
| 22170 | break; |
| 22171 | case ENUMERAL_TYPE: |
| 22172 | case RECORD_TYPE: |
| 22173 | case UNION_TYPE: |
| 22174 | case QUAL_UNION_TYPE: |
| 22175 | if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL |
| 22176 | && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node)))) |
| 22177 | { |
| 22178 | add_AT_die_ref (die, attr_kind: DW_AT_byte_size, targ_die: decl_die); |
| 22179 | return; |
| 22180 | } |
| 22181 | size = int_size_in_bytes (tree_node); |
| 22182 | break; |
| 22183 | case FIELD_DECL: |
| 22184 | /* For a data member of a struct or union, the DW_AT_byte_size is |
| 22185 | generally given as the number of bytes normally allocated for an |
| 22186 | object of the *declared* type of the member itself. This is true |
| 22187 | even for bit-fields. */ |
| 22188 | size = int_size_in_bytes (field_type (decl: tree_node)); |
| 22189 | break; |
| 22190 | default: |
| 22191 | gcc_unreachable (); |
| 22192 | } |
| 22193 | |
| 22194 | /* Note that `size' might be -1 when we get to this point. If it is, that |
| 22195 | indicates that the byte size of the entity in question is variable. */ |
| 22196 | if (size >= 0) |
| 22197 | add_AT_unsigned (die, attr_kind: DW_AT_byte_size, unsigned_val: size); |
| 22198 | |
| 22199 | /* Support for dynamically-sized objects was introduced in DWARF3. */ |
| 22200 | else if (TYPE_P (tree_node) |
| 22201 | && (dwarf_version >= 3 || !dwarf_strict) |
| 22202 | && gnat_encodings != DWARF_GNAT_ENCODINGS_ALL) |
| 22203 | { |
| 22204 | struct loc_descr_context ctx = { |
| 22205 | .context_type: const_cast<tree> (tree_node), /* context_type */ |
| 22206 | NULL_TREE, /* base_decl */ |
| 22207 | NULL, /* dpi */ |
| 22208 | .placeholder_arg: false, /* placeholder_arg */ |
| 22209 | .placeholder_seen: false, /* placeholder_seen */ |
| 22210 | .strict_signedness: false /* strict_signedness */ |
| 22211 | }; |
| 22212 | |
| 22213 | tree tree_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (tree_node)); |
| 22214 | add_scalar_info (die, attr: DW_AT_byte_size, value: tree_size, |
| 22215 | forms: dw_scalar_form_constant |
| 22216 | | dw_scalar_form_exprloc |
| 22217 | | dw_scalar_form_reference, |
| 22218 | context: &ctx); |
| 22219 | } |
| 22220 | } |
| 22221 | |
| 22222 | /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default |
| 22223 | alignment. */ |
| 22224 | |
| 22225 | static void |
| 22226 | add_alignment_attribute (dw_die_ref die, tree tree_node) |
| 22227 | { |
| 22228 | if (dwarf_version < 5 && dwarf_strict) |
| 22229 | return; |
| 22230 | |
| 22231 | unsigned align; |
| 22232 | |
| 22233 | if (DECL_P (tree_node)) |
| 22234 | { |
| 22235 | if (!DECL_USER_ALIGN (tree_node)) |
| 22236 | return; |
| 22237 | |
| 22238 | align = DECL_ALIGN_UNIT (tree_node); |
| 22239 | } |
| 22240 | else if (TYPE_P (tree_node)) |
| 22241 | { |
| 22242 | if (!TYPE_USER_ALIGN (tree_node)) |
| 22243 | return; |
| 22244 | |
| 22245 | align = TYPE_ALIGN_UNIT (tree_node); |
| 22246 | } |
| 22247 | else |
| 22248 | gcc_unreachable (); |
| 22249 | |
| 22250 | add_AT_unsigned (die, attr_kind: DW_AT_alignment, unsigned_val: align); |
| 22251 | } |
| 22252 | |
| 22253 | /* For a FIELD_DECL node which represents a bit-field, output an attribute |
| 22254 | which specifies the distance in bits from the highest order bit of the |
| 22255 | "containing object" for the bit-field to the highest order bit of the |
| 22256 | bit-field itself. |
| 22257 | |
| 22258 | For any given bit-field, the "containing object" is a hypothetical object |
| 22259 | (of some integral or enum type) within which the given bit-field lives. The |
| 22260 | type of this hypothetical "containing object" is always the same as the |
| 22261 | declared type of the individual bit-field itself. The determination of the |
| 22262 | exact location of the "containing object" for a bit-field is rather |
| 22263 | complicated. It's handled by the `field_byte_offset' function (above). |
| 22264 | |
| 22265 | Note that it is the size (in bytes) of the hypothetical "containing object" |
| 22266 | which will be given in the DW_AT_byte_size attribute for this bit-field. |
| 22267 | (See `byte_size_attribute' above). */ |
| 22268 | |
| 22269 | static inline void |
| 22270 | add_bit_offset_attribute (dw_die_ref die, tree decl) |
| 22271 | { |
| 22272 | HOST_WIDE_INT object_offset_in_bytes; |
| 22273 | tree original_type = DECL_BIT_FIELD_TYPE (decl); |
| 22274 | HOST_WIDE_INT bitpos_int; |
| 22275 | HOST_WIDE_INT highest_order_object_bit_offset; |
| 22276 | HOST_WIDE_INT highest_order_field_bit_offset; |
| 22277 | HOST_WIDE_INT bit_offset; |
| 22278 | |
| 22279 | /* The containing object is within the DECL_CONTEXT. */ |
| 22280 | struct vlr_context ctx = { DECL_CONTEXT (decl), NULL_TREE }; |
| 22281 | |
| 22282 | field_byte_offset (decl, ctx: &ctx, cst_offset: &object_offset_in_bytes); |
| 22283 | |
| 22284 | /* Must be a field and a bit field. */ |
| 22285 | gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL); |
| 22286 | |
| 22287 | /* We can't yet handle bit-fields whose offsets are variable, so if we |
| 22288 | encounter such things, just return without generating any attribute |
| 22289 | whatsoever. Likewise for variable or too large size. */ |
| 22290 | if (! tree_fits_shwi_p (bit_position (decl)) |
| 22291 | || ! tree_fits_uhwi_p (DECL_SIZE (decl))) |
| 22292 | return; |
| 22293 | |
| 22294 | bitpos_int = int_bit_position (field: decl); |
| 22295 | |
| 22296 | /* Note that the bit offset is always the distance (in bits) from the |
| 22297 | highest-order bit of the "containing object" to the highest-order bit of |
| 22298 | the bit-field itself. Since the "high-order end" of any object or field |
| 22299 | is different on big-endian and little-endian machines, the computation |
| 22300 | below must take account of these differences. */ |
| 22301 | highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT; |
| 22302 | highest_order_field_bit_offset = bitpos_int; |
| 22303 | |
| 22304 | if (! BYTES_BIG_ENDIAN) |
| 22305 | { |
| 22306 | highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl)); |
| 22307 | highest_order_object_bit_offset += |
| 22308 | simple_type_size_in_bits (type: original_type); |
| 22309 | } |
| 22310 | |
| 22311 | bit_offset |
| 22312 | = (! BYTES_BIG_ENDIAN |
| 22313 | ? highest_order_object_bit_offset - highest_order_field_bit_offset |
| 22314 | : highest_order_field_bit_offset - highest_order_object_bit_offset); |
| 22315 | |
| 22316 | if (bit_offset < 0) |
| 22317 | add_AT_int (die, attr_kind: DW_AT_bit_offset, int_val: bit_offset); |
| 22318 | else |
| 22319 | add_AT_unsigned (die, attr_kind: DW_AT_bit_offset, unsigned_val: (unsigned HOST_WIDE_INT) bit_offset); |
| 22320 | } |
| 22321 | |
| 22322 | /* For a FIELD_DECL node which represents a bit field, output an attribute |
| 22323 | which specifies the length in bits of the given field. */ |
| 22324 | |
| 22325 | static inline void |
| 22326 | add_bit_size_attribute (dw_die_ref die, tree decl) |
| 22327 | { |
| 22328 | /* Must be a field and a bit field. */ |
| 22329 | gcc_assert (TREE_CODE (decl) == FIELD_DECL |
| 22330 | && DECL_BIT_FIELD_TYPE (decl)); |
| 22331 | |
| 22332 | if (tree_fits_uhwi_p (DECL_SIZE (decl))) |
| 22333 | add_AT_unsigned (die, attr_kind: DW_AT_bit_size, unsigned_val: tree_to_uhwi (DECL_SIZE (decl))); |
| 22334 | } |
| 22335 | |
| 22336 | /* If the compiled language is ANSI C, then add a 'prototyped' |
| 22337 | attribute, if arg types are given for the parameters of a function. */ |
| 22338 | |
| 22339 | static inline void |
| 22340 | add_prototyped_attribute (dw_die_ref die, tree func_type) |
| 22341 | { |
| 22342 | switch (get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language)) |
| 22343 | { |
| 22344 | case DW_LANG_C: |
| 22345 | case DW_LANG_C89: |
| 22346 | case DW_LANG_C99: |
| 22347 | case DW_LANG_C11: |
| 22348 | case DW_LANG_ObjC: |
| 22349 | if (prototype_p (func_type)) |
| 22350 | add_AT_flag (die, attr_kind: DW_AT_prototyped, flag: 1); |
| 22351 | break; |
| 22352 | default: |
| 22353 | break; |
| 22354 | } |
| 22355 | } |
| 22356 | |
| 22357 | /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found |
| 22358 | by looking in the type declaration, the object declaration equate table or |
| 22359 | the block mapping. */ |
| 22360 | |
| 22361 | static inline void |
| 22362 | add_abstract_origin_attribute (dw_die_ref die, tree origin) |
| 22363 | { |
| 22364 | dw_die_ref origin_die = NULL; |
| 22365 | |
| 22366 | /* For late LTO debug output we want to refer directly to the abstract |
| 22367 | DIE in the early debug rather to the possibly existing concrete |
| 22368 | instance and avoid creating that just for this purpose. */ |
| 22369 | sym_off_pair *desc; |
| 22370 | if (in_lto_p |
| 22371 | && external_die_map |
| 22372 | && (desc = external_die_map->get (k: origin))) |
| 22373 | { |
| 22374 | add_AT_external_die_ref (die, attr_kind: DW_AT_abstract_origin, |
| 22375 | symbol: desc->sym, offset: desc->off); |
| 22376 | return; |
| 22377 | } |
| 22378 | |
| 22379 | if (DECL_P (origin)) |
| 22380 | origin_die = lookup_decl_die (decl: origin); |
| 22381 | else if (TYPE_P (origin)) |
| 22382 | origin_die = lookup_type_die (type: origin); |
| 22383 | else if (TREE_CODE (origin) == BLOCK) |
| 22384 | origin_die = lookup_block_die (block: origin); |
| 22385 | |
| 22386 | /* XXX: Functions that are never lowered don't always have correct block |
| 22387 | trees (in the case of java, they simply have no block tree, in some other |
| 22388 | languages). For these functions, there is nothing we can really do to |
| 22389 | output correct debug info for inlined functions in all cases. Rather |
| 22390 | than die, we'll just produce deficient debug info now, in that we will |
| 22391 | have variables without a proper abstract origin. In the future, when all |
| 22392 | functions are lowered, we should re-add a gcc_assert (origin_die) |
| 22393 | here. */ |
| 22394 | |
| 22395 | if (origin_die) |
| 22396 | { |
| 22397 | dw_attr_node *a; |
| 22398 | /* Like above, if we already created a concrete instance DIE |
| 22399 | do not use that for the abstract origin but the early DIE |
| 22400 | if present. */ |
| 22401 | if (in_lto_p |
| 22402 | && (a = get_AT (die: origin_die, attr_kind: DW_AT_abstract_origin))) |
| 22403 | origin_die = AT_ref (a); |
| 22404 | add_AT_die_ref (die, attr_kind: DW_AT_abstract_origin, targ_die: origin_die); |
| 22405 | } |
| 22406 | } |
| 22407 | |
| 22408 | /* We do not currently support the pure_virtual attribute. */ |
| 22409 | |
| 22410 | static inline void |
| 22411 | add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl) |
| 22412 | { |
| 22413 | if (DECL_VINDEX (func_decl)) |
| 22414 | { |
| 22415 | add_AT_unsigned (die, attr_kind: DW_AT_virtuality, unsigned_val: DW_VIRTUALITY_virtual); |
| 22416 | |
| 22417 | if (tree_fits_shwi_p (DECL_VINDEX (func_decl))) |
| 22418 | add_AT_loc (die, attr_kind: DW_AT_vtable_elem_location, |
| 22419 | loc: new_loc_descr (op: DW_OP_constu, |
| 22420 | oprnd1: tree_to_shwi (DECL_VINDEX (func_decl)), |
| 22421 | oprnd2: 0)); |
| 22422 | |
| 22423 | /* GNU extension: Record what type this method came from originally. */ |
| 22424 | if (debug_info_level > DINFO_LEVEL_TERSE |
| 22425 | && DECL_CONTEXT (func_decl)) |
| 22426 | add_AT_die_ref (die, attr_kind: DW_AT_containing_type, |
| 22427 | targ_die: lookup_type_die (DECL_CONTEXT (func_decl))); |
| 22428 | } |
| 22429 | } |
| 22430 | |
| 22431 | /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the |
| 22432 | given decl. This used to be a vendor extension until after DWARF 4 |
| 22433 | standardized it. */ |
| 22434 | |
| 22435 | static void |
| 22436 | add_linkage_attr (dw_die_ref die, tree decl) |
| 22437 | { |
| 22438 | const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); |
| 22439 | |
| 22440 | /* Mimic what assemble_name_raw does with a leading '*'. */ |
| 22441 | if (name[0] == '*') |
| 22442 | name = &name[1]; |
| 22443 | |
| 22444 | if (dwarf_version >= 4) |
| 22445 | add_AT_string (die, attr_kind: DW_AT_linkage_name, str: name); |
| 22446 | else |
| 22447 | add_AT_string (die, attr_kind: DW_AT_MIPS_linkage_name, str: name); |
| 22448 | } |
| 22449 | |
| 22450 | /* Add source coordinate attributes for the given decl. */ |
| 22451 | |
| 22452 | static void |
| 22453 | add_src_coords_attributes (dw_die_ref die, tree decl) |
| 22454 | { |
| 22455 | expanded_location s; |
| 22456 | |
| 22457 | if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION) |
| 22458 | return; |
| 22459 | s = expand_location (DECL_SOURCE_LOCATION (decl)); |
| 22460 | add_AT_file (die, attr_kind: DW_AT_decl_file, fd: lookup_filename (s.file)); |
| 22461 | add_AT_unsigned (die, attr_kind: DW_AT_decl_line, unsigned_val: s.line); |
| 22462 | if (debug_column_info && s.column) |
| 22463 | add_AT_unsigned (die, attr_kind: DW_AT_decl_column, unsigned_val: s.column); |
| 22464 | } |
| 22465 | |
| 22466 | /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */ |
| 22467 | |
| 22468 | static void |
| 22469 | add_linkage_name_raw (dw_die_ref die, tree decl) |
| 22470 | { |
| 22471 | /* Defer until we have an assembler name set. */ |
| 22472 | if (!DECL_ASSEMBLER_NAME_SET_P (decl)) |
| 22473 | { |
| 22474 | limbo_die_node *asm_name; |
| 22475 | |
| 22476 | asm_name = ggc_cleared_alloc<limbo_die_node> (); |
| 22477 | asm_name->die = die; |
| 22478 | asm_name->created_for = decl; |
| 22479 | asm_name->next = deferred_asm_name; |
| 22480 | deferred_asm_name = asm_name; |
| 22481 | } |
| 22482 | else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)) |
| 22483 | add_linkage_attr (die, decl); |
| 22484 | } |
| 22485 | |
| 22486 | /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */ |
| 22487 | |
| 22488 | static void |
| 22489 | add_linkage_name (dw_die_ref die, tree decl) |
| 22490 | { |
| 22491 | if (debug_info_level > DINFO_LEVEL_NONE |
| 22492 | && VAR_OR_FUNCTION_DECL_P (decl) |
| 22493 | && TREE_PUBLIC (decl) |
| 22494 | && !(VAR_P (decl) && DECL_REGISTER (decl)) |
| 22495 | && die->die_tag != DW_TAG_member) |
| 22496 | add_linkage_name_raw (die, decl); |
| 22497 | } |
| 22498 | |
| 22499 | /* Add a DW_AT_name attribute and source coordinate attribute for the |
| 22500 | given decl, but only if it actually has a name. */ |
| 22501 | |
| 22502 | static void |
| 22503 | add_name_and_src_coords_attributes (dw_die_ref die, tree decl, |
| 22504 | bool no_linkage_name) |
| 22505 | { |
| 22506 | tree decl_name; |
| 22507 | |
| 22508 | decl_name = DECL_NAME (decl); |
| 22509 | if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL) |
| 22510 | { |
| 22511 | const char *name = dwarf2_name (decl, scope: 0); |
| 22512 | if (name) |
| 22513 | add_name_attribute (die, name_string: name); |
| 22514 | else |
| 22515 | add_desc_attribute (die, decl); |
| 22516 | |
| 22517 | if (! DECL_ARTIFICIAL (decl)) |
| 22518 | add_src_coords_attributes (die, decl); |
| 22519 | |
| 22520 | if (!no_linkage_name) |
| 22521 | add_linkage_name (die, decl); |
| 22522 | } |
| 22523 | else |
| 22524 | add_desc_attribute (die, decl); |
| 22525 | |
| 22526 | #ifdef VMS_DEBUGGING_INFO |
| 22527 | /* Get the function's name, as described by its RTL. This may be different |
| 22528 | from the DECL_NAME name used in the source file. */ |
| 22529 | if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl)) |
| 22530 | { |
| 22531 | add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address, |
| 22532 | XEXP (DECL_RTL (decl), 0), false); |
| 22533 | vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0)); |
| 22534 | } |
| 22535 | #endif /* VMS_DEBUGGING_INFO */ |
| 22536 | } |
| 22537 | |
| 22538 | /* Add VALUE as a DW_AT_discr_value attribute to DIE. */ |
| 22539 | |
| 22540 | static void |
| 22541 | add_discr_value (dw_die_ref die, dw_discr_value *value) |
| 22542 | { |
| 22543 | dw_attr_node attr; |
| 22544 | |
| 22545 | attr.dw_attr = DW_AT_discr_value; |
| 22546 | attr.dw_attr_val.val_class = dw_val_class_discr_value; |
| 22547 | attr.dw_attr_val.val_entry = NULL; |
| 22548 | attr.dw_attr_val.v.val_discr_value.pos = value->pos; |
| 22549 | if (value->pos) |
| 22550 | attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval; |
| 22551 | else |
| 22552 | attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval; |
| 22553 | add_dwarf_attr (die, attr: &attr); |
| 22554 | } |
| 22555 | |
| 22556 | /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */ |
| 22557 | |
| 22558 | static void |
| 22559 | add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list) |
| 22560 | { |
| 22561 | dw_attr_node attr; |
| 22562 | |
| 22563 | attr.dw_attr = DW_AT_discr_list; |
| 22564 | attr.dw_attr_val.val_class = dw_val_class_discr_list; |
| 22565 | attr.dw_attr_val.val_entry = NULL; |
| 22566 | attr.dw_attr_val.v.val_discr_list = discr_list; |
| 22567 | add_dwarf_attr (die, attr: &attr); |
| 22568 | } |
| 22569 | |
| 22570 | static inline dw_discr_list_ref |
| 22571 | AT_discr_list (dw_attr_node *attr) |
| 22572 | { |
| 22573 | return attr->dw_attr_val.v.val_discr_list; |
| 22574 | } |
| 22575 | |
| 22576 | #ifdef VMS_DEBUGGING_INFO |
| 22577 | /* Output the debug main pointer die for VMS */ |
| 22578 | |
| 22579 | void |
| 22580 | dwarf2out_vms_debug_main_pointer (void) |
| 22581 | { |
| 22582 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 22583 | dw_die_ref die; |
| 22584 | |
| 22585 | /* Allocate the VMS debug main subprogram die. */ |
| 22586 | die = new_die_raw (DW_TAG_subprogram); |
| 22587 | add_name_attribute (die, VMS_DEBUG_MAIN_POINTER); |
| 22588 | ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL, |
| 22589 | current_function_funcdef_no); |
| 22590 | add_AT_lbl_id (die, DW_AT_entry_pc, label); |
| 22591 | |
| 22592 | /* Make it the first child of comp_unit_die (). */ |
| 22593 | die->die_parent = comp_unit_die (); |
| 22594 | if (comp_unit_die ()->die_child) |
| 22595 | { |
| 22596 | die->die_sib = comp_unit_die ()->die_child->die_sib; |
| 22597 | comp_unit_die ()->die_child->die_sib = die; |
| 22598 | } |
| 22599 | else |
| 22600 | { |
| 22601 | die->die_sib = die; |
| 22602 | comp_unit_die ()->die_child = die; |
| 22603 | } |
| 22604 | } |
| 22605 | #endif /* VMS_DEBUGGING_INFO */ |
| 22606 | |
| 22607 | /* walk_tree helper function for uses_local_type, below. */ |
| 22608 | |
| 22609 | static tree |
| 22610 | uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
| 22611 | { |
| 22612 | if (!TYPE_P (*tp)) |
| 22613 | *walk_subtrees = 0; |
| 22614 | else |
| 22615 | { |
| 22616 | tree name = TYPE_NAME (*tp); |
| 22617 | if (name && DECL_P (name) && decl_function_context (name)) |
| 22618 | return *tp; |
| 22619 | } |
| 22620 | return NULL_TREE; |
| 22621 | } |
| 22622 | |
| 22623 | /* If TYPE involves a function-local type (including a local typedef to a |
| 22624 | non-local type), returns that type; otherwise returns NULL_TREE. */ |
| 22625 | |
| 22626 | static tree |
| 22627 | uses_local_type (tree type) |
| 22628 | { |
| 22629 | tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL); |
| 22630 | return used; |
| 22631 | } |
| 22632 | |
| 22633 | /* Return the DIE for the scope that immediately contains this type. |
| 22634 | Non-named types that do not involve a function-local type get global |
| 22635 | scope. Named types nested in namespaces or other types get their |
| 22636 | containing scope. All other types (i.e. function-local named types) get |
| 22637 | the current active scope. */ |
| 22638 | |
| 22639 | static dw_die_ref |
| 22640 | scope_die_for (tree t, dw_die_ref context_die) |
| 22641 | { |
| 22642 | dw_die_ref scope_die = NULL; |
| 22643 | tree containing_scope; |
| 22644 | |
| 22645 | /* Non-types always go in the current scope. */ |
| 22646 | gcc_assert (TYPE_P (t)); |
| 22647 | |
| 22648 | /* Use the scope of the typedef, rather than the scope of the type |
| 22649 | it refers to. */ |
| 22650 | if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t))) |
| 22651 | containing_scope = DECL_CONTEXT (TYPE_NAME (t)); |
| 22652 | else |
| 22653 | containing_scope = TYPE_CONTEXT (t); |
| 22654 | |
| 22655 | /* Use the containing namespace if there is one. */ |
| 22656 | if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL) |
| 22657 | { |
| 22658 | if (context_die == lookup_decl_die (decl: containing_scope)) |
| 22659 | /* OK */; |
| 22660 | else if (debug_info_level > DINFO_LEVEL_TERSE) |
| 22661 | context_die = get_context_die (containing_scope); |
| 22662 | else |
| 22663 | containing_scope = NULL_TREE; |
| 22664 | } |
| 22665 | |
| 22666 | /* Ignore function type "scopes" from the C frontend. They mean that |
| 22667 | a tagged type is local to a parmlist of a function declarator, but |
| 22668 | that isn't useful to DWARF. */ |
| 22669 | if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE) |
| 22670 | containing_scope = NULL_TREE; |
| 22671 | |
| 22672 | if (SCOPE_FILE_SCOPE_P (containing_scope)) |
| 22673 | { |
| 22674 | /* If T uses a local type keep it local as well, to avoid references |
| 22675 | to function-local DIEs from outside the function. */ |
| 22676 | if (current_function_decl && uses_local_type (type: t)) |
| 22677 | scope_die = context_die; |
| 22678 | else |
| 22679 | scope_die = comp_unit_die (); |
| 22680 | } |
| 22681 | else if (TYPE_P (containing_scope)) |
| 22682 | { |
| 22683 | /* For types, we can just look up the appropriate DIE. */ |
| 22684 | if (debug_info_level > DINFO_LEVEL_TERSE) |
| 22685 | scope_die = get_context_die (containing_scope); |
| 22686 | else |
| 22687 | { |
| 22688 | scope_die = lookup_type_die_strip_naming_typedef (type: containing_scope); |
| 22689 | if (scope_die == NULL) |
| 22690 | scope_die = comp_unit_die (); |
| 22691 | } |
| 22692 | } |
| 22693 | else |
| 22694 | scope_die = context_die; |
| 22695 | |
| 22696 | return scope_die; |
| 22697 | } |
| 22698 | |
| 22699 | /* Returns true if CONTEXT_DIE is internal to a function. */ |
| 22700 | |
| 22701 | static inline bool |
| 22702 | local_scope_p (dw_die_ref context_die) |
| 22703 | { |
| 22704 | for (; context_die; context_die = context_die->die_parent) |
| 22705 | if (context_die->die_tag == DW_TAG_inlined_subroutine |
| 22706 | || context_die->die_tag == DW_TAG_subprogram) |
| 22707 | return true; |
| 22708 | |
| 22709 | return false; |
| 22710 | } |
| 22711 | |
| 22712 | /* Returns true if CONTEXT_DIE is a class. */ |
| 22713 | |
| 22714 | static inline bool |
| 22715 | class_scope_p (dw_die_ref context_die) |
| 22716 | { |
| 22717 | return (context_die |
| 22718 | && (context_die->die_tag == DW_TAG_structure_type |
| 22719 | || context_die->die_tag == DW_TAG_class_type |
| 22720 | || context_die->die_tag == DW_TAG_interface_type |
| 22721 | || context_die->die_tag == DW_TAG_union_type)); |
| 22722 | } |
| 22723 | |
| 22724 | /* Returns true if CONTEXT_DIE is a class or namespace, for deciding |
| 22725 | whether or not to treat a DIE in this context as a declaration. */ |
| 22726 | |
| 22727 | static inline bool |
| 22728 | class_or_namespace_scope_p (dw_die_ref context_die) |
| 22729 | { |
| 22730 | return (class_scope_p (context_die) |
| 22731 | || (context_die && context_die->die_tag == DW_TAG_namespace)); |
| 22732 | } |
| 22733 | |
| 22734 | /* Many forms of DIEs require a "type description" attribute. This |
| 22735 | routine locates the proper "type descriptor" die for the type given |
| 22736 | by 'type' plus any additional qualifiers given by 'cv_quals', and |
| 22737 | adds a DW_AT_type attribute below the given die. */ |
| 22738 | |
| 22739 | static void |
| 22740 | add_type_attribute (dw_die_ref object_die, tree type, int cv_quals, |
| 22741 | bool reverse, dw_die_ref context_die) |
| 22742 | { |
| 22743 | enum tree_code code = TREE_CODE (type); |
| 22744 | dw_die_ref type_die = NULL; |
| 22745 | |
| 22746 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 22747 | return; |
| 22748 | |
| 22749 | /* ??? If this type is an unnamed subrange type of an integral, floating-point |
| 22750 | or fixed-point type, use the inner type. This is because we have no |
| 22751 | support for unnamed types in base_type_die. This can happen if this is |
| 22752 | an Ada subrange type. Correct solution is emit a subrange type die. */ |
| 22753 | if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE) |
| 22754 | && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0) |
| 22755 | type = TREE_TYPE (type), code = TREE_CODE (type); |
| 22756 | |
| 22757 | if (code == ERROR_MARK |
| 22758 | /* Handle a special case. For functions whose return type is void, we |
| 22759 | generate *no* type attribute. (Note that no object may have type |
| 22760 | `void', so this only applies to function return types). */ |
| 22761 | || code == VOID_TYPE) |
| 22762 | return; |
| 22763 | |
| 22764 | type_die = modified_type_die (type, |
| 22765 | cv_quals: cv_quals | TYPE_QUALS (type), |
| 22766 | TYPE_ATTRIBUTES (type), |
| 22767 | reverse, |
| 22768 | context_die); |
| 22769 | |
| 22770 | if (type_die != NULL) |
| 22771 | add_AT_die_ref (die: object_die, attr_kind: DW_AT_type, targ_die: type_die); |
| 22772 | } |
| 22773 | |
| 22774 | /* Given an object die, add the calling convention attribute for the |
| 22775 | function call type. */ |
| 22776 | static void |
| 22777 | add_calling_convention_attribute (dw_die_ref subr_die, tree decl) |
| 22778 | { |
| 22779 | enum dwarf_calling_convention value = DW_CC_normal; |
| 22780 | |
| 22781 | value = ((enum dwarf_calling_convention) |
| 22782 | targetm.dwarf_calling_convention (TREE_TYPE (decl))); |
| 22783 | |
| 22784 | if (is_fortran () |
| 22785 | && id_equal (DECL_ASSEMBLER_NAME (decl), str: "MAIN__" )) |
| 22786 | { |
| 22787 | /* DWARF 2 doesn't provide a way to identify a program's source-level |
| 22788 | entry point. DW_AT_calling_convention attributes are only meant |
| 22789 | to describe functions' calling conventions. However, lacking a |
| 22790 | better way to signal the Fortran main program, we used this for |
| 22791 | a long time, following existing custom. Now, DWARF 4 has |
| 22792 | DW_AT_main_subprogram, which we add below, but some tools still |
| 22793 | rely on the old way, which we thus keep. */ |
| 22794 | value = DW_CC_program; |
| 22795 | |
| 22796 | if (dwarf_version >= 4 || !dwarf_strict) |
| 22797 | add_AT_flag (die: subr_die, attr_kind: DW_AT_main_subprogram, flag: 1); |
| 22798 | } |
| 22799 | |
| 22800 | /* Only add the attribute if the backend requests it, and |
| 22801 | is not DW_CC_normal. */ |
| 22802 | if (value && (value != DW_CC_normal)) |
| 22803 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_calling_convention, unsigned_val: value); |
| 22804 | } |
| 22805 | |
| 22806 | /* Given a tree pointer to a struct, class, union, or enum type node, return |
| 22807 | a pointer to the (string) tag name for the given type, or zero if the type |
| 22808 | was declared without a tag. */ |
| 22809 | |
| 22810 | static const char * |
| 22811 | type_tag (const_tree type) |
| 22812 | { |
| 22813 | const char *name = 0; |
| 22814 | |
| 22815 | if (TYPE_NAME (type) != 0) |
| 22816 | { |
| 22817 | tree t = 0; |
| 22818 | |
| 22819 | /* Find the IDENTIFIER_NODE for the type name. */ |
| 22820 | if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE |
| 22821 | && !TYPE_NAMELESS (type)) |
| 22822 | t = TYPE_NAME (type); |
| 22823 | |
| 22824 | /* The g++ front end makes the TYPE_NAME of *each* tagged type point to |
| 22825 | a TYPE_DECL node, regardless of whether or not a `typedef' was |
| 22826 | involved. */ |
| 22827 | else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL |
| 22828 | && ! DECL_IGNORED_P (TYPE_NAME (type))) |
| 22829 | { |
| 22830 | /* We want to be extra verbose. Don't call dwarf_name if |
| 22831 | DECL_NAME isn't set. The default hook for decl_printable_name |
| 22832 | doesn't like that, and in this context it's correct to return |
| 22833 | 0, instead of "<anonymous>" or the like. */ |
| 22834 | if (DECL_NAME (TYPE_NAME (type)) |
| 22835 | && !DECL_NAMELESS (TYPE_NAME (type))) |
| 22836 | name = lang_hooks.dwarf_name (TYPE_NAME (type), 2); |
| 22837 | } |
| 22838 | |
| 22839 | /* Now get the name as a string, or invent one. */ |
| 22840 | if (!name && t != 0) |
| 22841 | name = IDENTIFIER_POINTER (t); |
| 22842 | } |
| 22843 | |
| 22844 | return (name == 0 || *name == '\0') ? 0 : name; |
| 22845 | } |
| 22846 | |
| 22847 | /* Return the type associated with a data member, make a special check |
| 22848 | for bit field types. */ |
| 22849 | |
| 22850 | static inline tree |
| 22851 | member_declared_type (const_tree member) |
| 22852 | { |
| 22853 | return (DECL_BIT_FIELD_TYPE (member) |
| 22854 | ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member)); |
| 22855 | } |
| 22856 | |
| 22857 | /* Get the decl's label, as described by its RTL. This may be different |
| 22858 | from the DECL_NAME name used in the source file. */ |
| 22859 | |
| 22860 | #if 0 |
| 22861 | static const char * |
| 22862 | decl_start_label (tree decl) |
| 22863 | { |
| 22864 | rtx x; |
| 22865 | const char *fnname; |
| 22866 | |
| 22867 | x = DECL_RTL (decl); |
| 22868 | gcc_assert (MEM_P (x)); |
| 22869 | |
| 22870 | x = XEXP (x, 0); |
| 22871 | gcc_assert (GET_CODE (x) == SYMBOL_REF); |
| 22872 | |
| 22873 | fnname = XSTR (x, 0); |
| 22874 | return fnname; |
| 22875 | } |
| 22876 | #endif |
| 22877 | |
| 22878 | /* For variable-length arrays that have been previously generated, but |
| 22879 | may be incomplete due to missing subscript info, fill the subscript |
| 22880 | info. Return TRUE if this is one of those cases. */ |
| 22881 | |
| 22882 | static bool |
| 22883 | fill_variable_array_bounds (tree type) |
| 22884 | { |
| 22885 | if (TREE_ASM_WRITTEN (type) |
| 22886 | && TREE_CODE (type) == ARRAY_TYPE |
| 22887 | && variably_modified_type_p (type, NULL)) |
| 22888 | { |
| 22889 | dw_die_ref array_die = lookup_type_die (type); |
| 22890 | if (!array_die) |
| 22891 | return false; |
| 22892 | add_subscript_info (type_die: array_die, type, collapse_p: !is_ada ()); |
| 22893 | return true; |
| 22894 | } |
| 22895 | return false; |
| 22896 | } |
| 22897 | |
| 22898 | /* These routines generate the internal representation of the DIE's for |
| 22899 | the compilation unit. Debugging information is collected by walking |
| 22900 | the declaration trees passed in from dwarf2out_decl(). */ |
| 22901 | |
| 22902 | static void |
| 22903 | gen_array_type_die (tree type, dw_die_ref context_die) |
| 22904 | { |
| 22905 | dw_die_ref array_die; |
| 22906 | |
| 22907 | /* GNU compilers represent multidimensional array types as sequences of one |
| 22908 | dimensional array types whose element types are themselves array types. |
| 22909 | We sometimes squish that down to a single array_type DIE with multiple |
| 22910 | subscripts in the Dwarf debugging info. The draft Dwarf specification |
| 22911 | say that we are allowed to do this kind of compression in C, because |
| 22912 | there is no difference between an array of arrays and a multidimensional |
| 22913 | array. We don't do this for Ada to remain as close as possible to the |
| 22914 | actual representation, which is especially important against the language |
| 22915 | flexibilty wrt arrays of variable size. */ |
| 22916 | |
| 22917 | bool collapse_nested_arrays = !is_ada (); |
| 22918 | |
| 22919 | if (fill_variable_array_bounds (type)) |
| 22920 | return; |
| 22921 | |
| 22922 | dw_die_ref scope_die = scope_die_for (t: type, context_die); |
| 22923 | tree element_type; |
| 22924 | |
| 22925 | /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as |
| 22926 | DW_TAG_string_type doesn't have DW_AT_type attribute). */ |
| 22927 | if (TREE_CODE (type) == ARRAY_TYPE |
| 22928 | && TYPE_STRING_FLAG (type) |
| 22929 | && is_fortran () |
| 22930 | && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node)) |
| 22931 | { |
| 22932 | HOST_WIDE_INT size; |
| 22933 | |
| 22934 | array_die = new_die (tag_value: DW_TAG_string_type, parent_die: scope_die, t: type); |
| 22935 | add_name_attribute (die: array_die, name_string: type_tag (type)); |
| 22936 | equate_type_number_to_die (type, type_die: array_die); |
| 22937 | size = int_size_in_bytes (type); |
| 22938 | if (size >= 0) |
| 22939 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_byte_size, unsigned_val: size); |
| 22940 | /* ??? We can't annotate types late, but for LTO we may not |
| 22941 | generate a location early either (gfortran.dg/save_6.f90). */ |
| 22942 | else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload)) |
| 22943 | && TYPE_DOMAIN (type) != NULL_TREE |
| 22944 | && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE) |
| 22945 | { |
| 22946 | tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); |
| 22947 | tree rszdecl = szdecl; |
| 22948 | |
| 22949 | size = int_size_in_bytes (TREE_TYPE (szdecl)); |
| 22950 | if (!DECL_P (szdecl)) |
| 22951 | { |
| 22952 | if (INDIRECT_REF_P (szdecl) |
| 22953 | && DECL_P (TREE_OPERAND (szdecl, 0))) |
| 22954 | { |
| 22955 | rszdecl = TREE_OPERAND (szdecl, 0); |
| 22956 | if (int_size_in_bytes (TREE_TYPE (rszdecl)) |
| 22957 | != DWARF2_ADDR_SIZE) |
| 22958 | size = 0; |
| 22959 | } |
| 22960 | else |
| 22961 | size = 0; |
| 22962 | } |
| 22963 | if (size > 0) |
| 22964 | { |
| 22965 | dw_loc_list_ref loc |
| 22966 | = loc_list_from_tree (loc: rszdecl, want_address: szdecl == rszdecl ? 2 : 0, |
| 22967 | NULL); |
| 22968 | if (loc) |
| 22969 | { |
| 22970 | add_AT_location_description (die: array_die, attr_kind: DW_AT_string_length, |
| 22971 | descr: loc); |
| 22972 | if (size != DWARF2_ADDR_SIZE) |
| 22973 | add_AT_unsigned (die: array_die, dwarf_version >= 5 |
| 22974 | ? DW_AT_string_length_byte_size |
| 22975 | : DW_AT_byte_size, unsigned_val: size); |
| 22976 | } |
| 22977 | } |
| 22978 | } |
| 22979 | return; |
| 22980 | } |
| 22981 | |
| 22982 | array_die = new_die (tag_value: DW_TAG_array_type, parent_die: scope_die, t: type); |
| 22983 | add_name_attribute (die: array_die, name_string: type_tag (type)); |
| 22984 | equate_type_number_to_die (type, type_die: array_die); |
| 22985 | |
| 22986 | if (VECTOR_TYPE_P (type)) |
| 22987 | add_AT_flag (die: array_die, attr_kind: DW_AT_GNU_vector, flag: 1); |
| 22988 | |
| 22989 | /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */ |
| 22990 | if (is_fortran () |
| 22991 | && TREE_CODE (type) == ARRAY_TYPE |
| 22992 | && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE |
| 22993 | && !TYPE_STRING_FLAG (TREE_TYPE (type))) |
| 22994 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_ordering, unsigned_val: DW_ORD_col_major); |
| 22995 | |
| 22996 | #if 0 |
| 22997 | /* We default the array ordering. Debuggers will probably do the right |
| 22998 | things even if DW_AT_ordering is not present. It's not even an issue |
| 22999 | until we start to get into multidimensional arrays anyway. If a debugger |
| 23000 | is ever caught doing the Wrong Thing for multi-dimensional arrays, |
| 23001 | then we'll have to put the DW_AT_ordering attribute back in. (But if |
| 23002 | and when we find out that we need to put these in, we will only do so |
| 23003 | for multidimensional arrays. */ |
| 23004 | add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major); |
| 23005 | #endif |
| 23006 | |
| 23007 | if (VECTOR_TYPE_P (type)) |
| 23008 | { |
| 23009 | /* For VECTOR_TYPEs we use an array DIE with appropriate bounds. */ |
| 23010 | dw_die_ref subrange_die = new_die (tag_value: DW_TAG_subrange_type, parent_die: array_die, NULL); |
| 23011 | int lb = lower_bound_default (); |
| 23012 | if (lb == -1) |
| 23013 | lb = 0; |
| 23014 | add_bound_info (subrange_die, bound_attr: DW_AT_lower_bound, size_int (lb), NULL); |
| 23015 | add_bound_info (subrange_die, bound_attr: DW_AT_upper_bound, |
| 23016 | size_int (lb + TYPE_VECTOR_SUBPARTS (type) - 1), NULL); |
| 23017 | } |
| 23018 | else |
| 23019 | add_subscript_info (type_die: array_die, type, collapse_p: collapse_nested_arrays); |
| 23020 | |
| 23021 | /* Add representation of the type of the elements of this array type and |
| 23022 | emit the corresponding DIE if we haven't done it already. */ |
| 23023 | element_type = TREE_TYPE (type); |
| 23024 | if (collapse_nested_arrays) |
| 23025 | while (TREE_CODE (element_type) == ARRAY_TYPE) |
| 23026 | { |
| 23027 | if (TYPE_STRING_FLAG (element_type) && is_fortran ()) |
| 23028 | break; |
| 23029 | element_type = TREE_TYPE (element_type); |
| 23030 | } |
| 23031 | |
| 23032 | add_type_attribute (object_die: array_die, type: element_type, cv_quals: TYPE_UNQUALIFIED, |
| 23033 | TREE_CODE (type) == ARRAY_TYPE |
| 23034 | && TYPE_REVERSE_STORAGE_ORDER (type), |
| 23035 | context_die); |
| 23036 | |
| 23037 | /* Add bit stride information to boolean vectors of single bits so that |
| 23038 | elements can be correctly read and displayed by a debugger. */ |
| 23039 | if (VECTOR_BOOLEAN_TYPE_P (type)) |
| 23040 | { |
| 23041 | enum machine_mode tmode = TYPE_MODE_RAW (type); |
| 23042 | if (GET_MODE_CLASS (tmode) == MODE_VECTOR_BOOL) |
| 23043 | { |
| 23044 | /* Calculate bit-size of element based on mnode. */ |
| 23045 | poly_uint16 bit_size = exact_div (a: GET_MODE_BITSIZE (mode: tmode), |
| 23046 | b: GET_MODE_NUNITS (mode: tmode)); |
| 23047 | /* Set bit stride in the array type DIE. */ |
| 23048 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_bit_stride, unsigned_val: bit_size.coeffs[0]); |
| 23049 | /* Find DIE corresponding to the element type so that we could |
| 23050 | add DW_AT_bit_size to it. */ |
| 23051 | dw_die_ref elem_die = get_AT_ref (die: array_die, attr_kind: DW_AT_type); |
| 23052 | /* Avoid adding DW_AT_bit_size twice. */ |
| 23053 | if (get_AT (die: elem_die, attr_kind: DW_AT_bit_size) == NULL) |
| 23054 | add_AT_unsigned (die: elem_die, attr_kind: DW_AT_bit_size, |
| 23055 | TYPE_PRECISION (element_type)); |
| 23056 | } |
| 23057 | } |
| 23058 | |
| 23059 | add_gnat_descriptive_type_attribute (die: array_die, type, context_die); |
| 23060 | if (TYPE_ARTIFICIAL (type)) |
| 23061 | add_AT_flag (die: array_die, attr_kind: DW_AT_artificial, flag: 1); |
| 23062 | |
| 23063 | if (get_AT (die: array_die, attr_kind: DW_AT_name)) |
| 23064 | add_pubtype (decl: type, die: array_die); |
| 23065 | |
| 23066 | add_alignment_attribute (die: array_die, tree_node: type); |
| 23067 | maybe_gen_btf_type_tag_dies (t: type, target: array_die); |
| 23068 | } |
| 23069 | |
| 23070 | /* This routine generates DIE for array with hidden descriptor, details |
| 23071 | are filled into *info by a langhook. */ |
| 23072 | |
| 23073 | static void |
| 23074 | gen_descr_array_type_die (tree type, struct array_descr_info *info, |
| 23075 | dw_die_ref context_die) |
| 23076 | { |
| 23077 | const dw_die_ref scope_die = scope_die_for (t: type, context_die); |
| 23078 | const dw_die_ref array_die = new_die (tag_value: DW_TAG_array_type, parent_die: scope_die, t: type); |
| 23079 | struct loc_descr_context context = { |
| 23080 | .context_type: type, /* context_type */ |
| 23081 | .base_decl: info->base_decl, /* base_decl */ |
| 23082 | NULL, /* dpi */ |
| 23083 | .placeholder_arg: false, /* placeholder_arg */ |
| 23084 | .placeholder_seen: false, /* placeholder_seen */ |
| 23085 | .strict_signedness: false /* strict_signedness */ |
| 23086 | }; |
| 23087 | enum dwarf_tag subrange_tag = DW_TAG_subrange_type; |
| 23088 | int dim; |
| 23089 | |
| 23090 | add_name_attribute (die: array_die, name_string: type_tag (type)); |
| 23091 | equate_type_number_to_die (type, type_die: array_die); |
| 23092 | |
| 23093 | if (info->ndimensions > 1) |
| 23094 | switch (info->ordering) |
| 23095 | { |
| 23096 | case array_descr_ordering_row_major: |
| 23097 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_ordering, unsigned_val: DW_ORD_row_major); |
| 23098 | break; |
| 23099 | case array_descr_ordering_column_major: |
| 23100 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_ordering, unsigned_val: DW_ORD_col_major); |
| 23101 | break; |
| 23102 | default: |
| 23103 | break; |
| 23104 | } |
| 23105 | |
| 23106 | if (dwarf_version >= 3 || !dwarf_strict) |
| 23107 | { |
| 23108 | if (info->data_location) |
| 23109 | add_scalar_info (die: array_die, attr: DW_AT_data_location, value: info->data_location, |
| 23110 | forms: dw_scalar_form_exprloc, context: &context); |
| 23111 | if (info->associated) |
| 23112 | add_scalar_info (die: array_die, attr: DW_AT_associated, value: info->associated, |
| 23113 | forms: dw_scalar_form_constant |
| 23114 | | dw_scalar_form_exprloc |
| 23115 | | dw_scalar_form_reference, context: &context); |
| 23116 | if (info->allocated) |
| 23117 | add_scalar_info (die: array_die, attr: DW_AT_allocated, value: info->allocated, |
| 23118 | forms: dw_scalar_form_constant |
| 23119 | | dw_scalar_form_exprloc |
| 23120 | | dw_scalar_form_reference, context: &context); |
| 23121 | if (info->stride) |
| 23122 | { |
| 23123 | const enum dwarf_attribute attr |
| 23124 | = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride; |
| 23125 | const int forms |
| 23126 | = (info->stride_in_bits) |
| 23127 | ? dw_scalar_form_constant |
| 23128 | : (dw_scalar_form_constant |
| 23129 | | dw_scalar_form_exprloc |
| 23130 | | dw_scalar_form_reference); |
| 23131 | |
| 23132 | add_scalar_info (die: array_die, attr, value: info->stride, forms, context: &context); |
| 23133 | } |
| 23134 | } |
| 23135 | if (dwarf_version >= 5) |
| 23136 | { |
| 23137 | if (info->rank) |
| 23138 | { |
| 23139 | add_scalar_info (die: array_die, attr: DW_AT_rank, value: info->rank, |
| 23140 | forms: dw_scalar_form_constant |
| 23141 | | dw_scalar_form_exprloc, context: &context); |
| 23142 | subrange_tag = DW_TAG_generic_subrange; |
| 23143 | context.placeholder_arg = true; |
| 23144 | } |
| 23145 | } |
| 23146 | |
| 23147 | add_gnat_descriptive_type_attribute (die: array_die, type, context_die); |
| 23148 | |
| 23149 | for (dim = 0; dim < info->ndimensions; dim++) |
| 23150 | { |
| 23151 | dw_die_ref subrange_die = new_die (tag_value: subrange_tag, parent_die: array_die, NULL); |
| 23152 | |
| 23153 | if (info->dimen[dim].bounds_type) |
| 23154 | add_type_attribute (object_die: subrange_die, |
| 23155 | type: info->dimen[dim].bounds_type, cv_quals: TYPE_UNQUALIFIED, |
| 23156 | reverse: false, context_die); |
| 23157 | if (info->dimen[dim].lower_bound) |
| 23158 | add_bound_info (subrange_die, bound_attr: DW_AT_lower_bound, |
| 23159 | bound: info->dimen[dim].lower_bound, context: &context); |
| 23160 | if (info->dimen[dim].upper_bound) |
| 23161 | add_bound_info (subrange_die, bound_attr: DW_AT_upper_bound, |
| 23162 | bound: info->dimen[dim].upper_bound, context: &context); |
| 23163 | if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride) |
| 23164 | add_scalar_info (die: subrange_die, attr: DW_AT_byte_stride, |
| 23165 | value: info->dimen[dim].stride, |
| 23166 | forms: dw_scalar_form_constant |
| 23167 | | dw_scalar_form_exprloc |
| 23168 | | dw_scalar_form_reference, |
| 23169 | context: &context); |
| 23170 | } |
| 23171 | |
| 23172 | gen_type_die (info->element_type, context_die); |
| 23173 | add_type_attribute (object_die: array_die, type: info->element_type, cv_quals: TYPE_UNQUALIFIED, |
| 23174 | TREE_CODE (type) == ARRAY_TYPE |
| 23175 | && TYPE_REVERSE_STORAGE_ORDER (type), |
| 23176 | context_die); |
| 23177 | |
| 23178 | if (get_AT (die: array_die, attr_kind: DW_AT_name)) |
| 23179 | add_pubtype (decl: type, die: array_die); |
| 23180 | |
| 23181 | add_alignment_attribute (die: array_die, tree_node: type); |
| 23182 | } |
| 23183 | |
| 23184 | #if 0 |
| 23185 | static void |
| 23186 | gen_entry_point_die (tree decl, dw_die_ref context_die) |
| 23187 | { |
| 23188 | tree origin = decl_ultimate_origin (decl); |
| 23189 | dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl); |
| 23190 | |
| 23191 | if (origin != NULL) |
| 23192 | add_abstract_origin_attribute (decl_die, origin); |
| 23193 | else |
| 23194 | { |
| 23195 | add_name_and_src_coords_attributes (decl_die, decl); |
| 23196 | add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)), |
| 23197 | TYPE_UNQUALIFIED, false, context_die); |
| 23198 | } |
| 23199 | |
| 23200 | if (DECL_ABSTRACT_P (decl)) |
| 23201 | equate_decl_number_to_die (decl, decl_die); |
| 23202 | else |
| 23203 | add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl)); |
| 23204 | } |
| 23205 | #endif |
| 23206 | |
| 23207 | /* Walk through the list of incomplete types again, trying once more to |
| 23208 | emit full debugging info for them. */ |
| 23209 | |
| 23210 | static void |
| 23211 | retry_incomplete_types (void) |
| 23212 | { |
| 23213 | set_early_dwarf s; |
| 23214 | int i; |
| 23215 | |
| 23216 | for (i = vec_safe_length (v: incomplete_types) - 1; i >= 0; i--) |
| 23217 | if (should_emit_struct_debug (type: (*incomplete_types)[i], usage: DINFO_USAGE_DIR_USE)) |
| 23218 | gen_type_die ((*incomplete_types)[i], comp_unit_die ()); |
| 23219 | vec_safe_truncate (v: incomplete_types, size: 0); |
| 23220 | } |
| 23221 | |
| 23222 | /* Determine what tag to use for a record type. */ |
| 23223 | |
| 23224 | static enum dwarf_tag |
| 23225 | record_type_tag (tree type) |
| 23226 | { |
| 23227 | if (! lang_hooks.types.classify_record) |
| 23228 | return DW_TAG_structure_type; |
| 23229 | |
| 23230 | switch (lang_hooks.types.classify_record (type)) |
| 23231 | { |
| 23232 | case RECORD_IS_STRUCT: |
| 23233 | return DW_TAG_structure_type; |
| 23234 | |
| 23235 | case RECORD_IS_CLASS: |
| 23236 | return DW_TAG_class_type; |
| 23237 | |
| 23238 | case RECORD_IS_INTERFACE: |
| 23239 | if (dwarf_version >= 3 || !dwarf_strict) |
| 23240 | return DW_TAG_interface_type; |
| 23241 | return DW_TAG_structure_type; |
| 23242 | |
| 23243 | default: |
| 23244 | gcc_unreachable (); |
| 23245 | } |
| 23246 | } |
| 23247 | |
| 23248 | /* Generate a DIE to represent an enumeration type. Note that these DIEs |
| 23249 | include all of the information about the enumeration values also. Each |
| 23250 | enumerated type name/value is listed as a child of the enumerated type |
| 23251 | DIE. REVERSE is true if the type is to be interpreted in the reverse |
| 23252 | storage order wrt the target order. */ |
| 23253 | |
| 23254 | static dw_die_ref |
| 23255 | gen_enumeration_type_die (tree type, dw_die_ref context_die, bool reverse) |
| 23256 | { |
| 23257 | dw_die_ref type_die = lookup_type_die (type); |
| 23258 | dw_die_ref orig_type_die = type_die; |
| 23259 | |
| 23260 | if (type_die == NULL || reverse) |
| 23261 | { |
| 23262 | dw_die_ref scope_die = scope_die_for (t: type, context_die); |
| 23263 | |
| 23264 | /* The DIE with DW_AT_endianity is placed right after the naked DIE. */ |
| 23265 | if (reverse) |
| 23266 | { |
| 23267 | gcc_assert (type_die); |
| 23268 | dw_die_ref after_die = type_die; |
| 23269 | type_die = new_die_raw (tag_value: DW_TAG_enumeration_type); |
| 23270 | add_child_die_after (die: scope_die, child_die: type_die, after_die); |
| 23271 | } |
| 23272 | else |
| 23273 | { |
| 23274 | type_die = new_die (tag_value: DW_TAG_enumeration_type, parent_die: scope_die, t: type); |
| 23275 | equate_type_number_to_die (type, type_die); |
| 23276 | } |
| 23277 | add_name_attribute (die: type_die, name_string: type_tag (type)); |
| 23278 | if ((dwarf_version >= 4 || !dwarf_strict) |
| 23279 | && ENUM_IS_SCOPED (type)) |
| 23280 | add_AT_flag (die: type_die, attr_kind: DW_AT_enum_class, flag: 1); |
| 23281 | if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type)) |
| 23282 | add_AT_flag (die: type_die, attr_kind: DW_AT_declaration, flag: 1); |
| 23283 | if (!dwarf_strict) |
| 23284 | add_AT_unsigned (die: type_die, attr_kind: DW_AT_encoding, |
| 23285 | TYPE_UNSIGNED (type) |
| 23286 | ? DW_ATE_unsigned |
| 23287 | : DW_ATE_signed); |
| 23288 | if (reverse) |
| 23289 | add_AT_unsigned (die: type_die, attr_kind: DW_AT_endianity, |
| 23290 | BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big); |
| 23291 | } |
| 23292 | else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type)) |
| 23293 | return type_die; |
| 23294 | else |
| 23295 | remove_AT (die: type_die, attr_kind: DW_AT_declaration); |
| 23296 | |
| 23297 | /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the |
| 23298 | given enum type is incomplete, do not generate the DW_AT_byte_size |
| 23299 | attribute or the DW_AT_element_list attribute. */ |
| 23300 | if (TYPE_SIZE (type)) |
| 23301 | { |
| 23302 | tree link; |
| 23303 | |
| 23304 | if (!ENUM_IS_OPAQUE (type)) |
| 23305 | TREE_ASM_WRITTEN (type) = 1; |
| 23306 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_byte_size)) |
| 23307 | add_byte_size_attribute (die: type_die, tree_node: type); |
| 23308 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_alignment)) |
| 23309 | add_alignment_attribute (die: type_die, tree_node: type); |
| 23310 | if ((dwarf_version >= 3 || !dwarf_strict) |
| 23311 | && (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_type))) |
| 23312 | { |
| 23313 | tree underlying = lang_hooks.types.enum_underlying_base_type (type); |
| 23314 | add_type_attribute (object_die: type_die, type: underlying, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
| 23315 | context_die); |
| 23316 | } |
| 23317 | if (TYPE_STUB_DECL (type) != NULL_TREE) |
| 23318 | { |
| 23319 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_decl_file)) |
| 23320 | add_src_coords_attributes (die: type_die, TYPE_STUB_DECL (type)); |
| 23321 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_accessibility)) |
| 23322 | add_accessibility_attribute (die: type_die, TYPE_STUB_DECL (type)); |
| 23323 | } |
| 23324 | |
| 23325 | /* If the first reference to this type was as the return type of an |
| 23326 | inline function, then it may not have a parent. Fix this now. */ |
| 23327 | if (type_die->die_parent == NULL) |
| 23328 | add_child_die (die: scope_die_for (t: type, context_die), child_die: type_die); |
| 23329 | |
| 23330 | for (link = TYPE_VALUES (type); |
| 23331 | link != NULL; link = TREE_CHAIN (link)) |
| 23332 | { |
| 23333 | dw_die_ref enum_die = new_die (tag_value: DW_TAG_enumerator, parent_die: type_die, t: link); |
| 23334 | tree value = TREE_VALUE (link); |
| 23335 | |
| 23336 | if (DECL_P (value)) |
| 23337 | equate_decl_number_to_die (decl: value, decl_die: enum_die); |
| 23338 | |
| 23339 | gcc_assert (!ENUM_IS_OPAQUE (type)); |
| 23340 | add_name_attribute (die: enum_die, |
| 23341 | IDENTIFIER_POINTER (TREE_PURPOSE (link))); |
| 23342 | |
| 23343 | if (TREE_CODE (value) == CONST_DECL) |
| 23344 | value = DECL_INITIAL (value); |
| 23345 | |
| 23346 | if (simple_type_size_in_bits (TREE_TYPE (value)) |
| 23347 | <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value)) |
| 23348 | { |
| 23349 | /* For constant forms created by add_AT_unsigned DWARF |
| 23350 | consumers (GDB, elfutils, etc.) always zero extend |
| 23351 | the value. Only when the actual value is negative |
| 23352 | do we need to use add_AT_int to generate a constant |
| 23353 | form that can represent negative values. */ |
| 23354 | HOST_WIDE_INT val = TREE_INT_CST_LOW (value); |
| 23355 | if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0) |
| 23356 | add_AT_unsigned (die: enum_die, attr_kind: DW_AT_const_value, |
| 23357 | unsigned_val: (unsigned HOST_WIDE_INT) val); |
| 23358 | else |
| 23359 | add_AT_int (die: enum_die, attr_kind: DW_AT_const_value, int_val: val); |
| 23360 | } |
| 23361 | else |
| 23362 | /* Enumeration constants may be wider than HOST_WIDE_INT. Handle |
| 23363 | that here. TODO: This should be re-worked to use correct |
| 23364 | signed/unsigned double tags for all cases. */ |
| 23365 | add_AT_wide (die: enum_die, attr_kind: DW_AT_const_value, w: wi::to_wide (t: value)); |
| 23366 | } |
| 23367 | |
| 23368 | add_gnat_descriptive_type_attribute (die: type_die, type, context_die); |
| 23369 | if (TYPE_ARTIFICIAL (type) |
| 23370 | && (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_artificial))) |
| 23371 | add_AT_flag (die: type_die, attr_kind: DW_AT_artificial, flag: 1); |
| 23372 | } |
| 23373 | else |
| 23374 | add_AT_flag (die: type_die, attr_kind: DW_AT_declaration, flag: 1); |
| 23375 | |
| 23376 | add_pubtype (decl: type, die: type_die); |
| 23377 | |
| 23378 | return type_die; |
| 23379 | } |
| 23380 | |
| 23381 | /* Generate a DIE to represent either a real live formal parameter decl or to |
| 23382 | represent just the type of some formal parameter position in some function |
| 23383 | type. |
| 23384 | |
| 23385 | Note that this routine is a bit unusual because its argument may be a |
| 23386 | ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which |
| 23387 | represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE |
| 23388 | node. If it's the former then this function is being called to output a |
| 23389 | DIE to represent a formal parameter object (or some inlining thereof). If |
| 23390 | it's the latter, then this function is only being called to output a |
| 23391 | DW_TAG_formal_parameter DIE to stand as a placeholder for some formal |
| 23392 | argument type of some subprogram type. |
| 23393 | If EMIT_NAME_P is true, name and source coordinate attributes |
| 23394 | are emitted. */ |
| 23395 | |
| 23396 | static dw_die_ref |
| 23397 | gen_formal_parameter_die (tree node, tree origin, bool emit_name_p, |
| 23398 | dw_die_ref context_die) |
| 23399 | { |
| 23400 | tree node_or_origin = node ? node : origin; |
| 23401 | tree ultimate_origin; |
| 23402 | dw_die_ref parm_die = NULL; |
| 23403 | |
| 23404 | if (DECL_P (node_or_origin)) |
| 23405 | { |
| 23406 | parm_die = lookup_decl_die (decl: node); |
| 23407 | |
| 23408 | /* If the contexts differ, we may not be talking about the same |
| 23409 | thing. |
| 23410 | ??? When in LTO the DIE parent is the "abstract" copy and the |
| 23411 | context_die is the specification "copy". */ |
| 23412 | if (parm_die |
| 23413 | && parm_die->die_parent != context_die |
| 23414 | && (parm_die->die_parent->die_tag != DW_TAG_GNU_formal_parameter_pack |
| 23415 | || parm_die->die_parent->die_parent != context_die) |
| 23416 | && !in_lto_p) |
| 23417 | { |
| 23418 | gcc_assert (!DECL_ABSTRACT_P (node)); |
| 23419 | /* This can happen when creating a concrete instance, in |
| 23420 | which case we need to create a new DIE that will get |
| 23421 | annotated with DW_AT_abstract_origin. */ |
| 23422 | parm_die = NULL; |
| 23423 | } |
| 23424 | |
| 23425 | if (parm_die && parm_die->die_parent == NULL) |
| 23426 | { |
| 23427 | /* Check that parm_die already has the right attributes that |
| 23428 | we would have added below. If any attributes are |
| 23429 | missing, fall through to add them. */ |
| 23430 | if (! DECL_ABSTRACT_P (node_or_origin) |
| 23431 | && !get_AT (die: parm_die, attr_kind: DW_AT_location) |
| 23432 | && !get_AT (die: parm_die, attr_kind: DW_AT_const_value)) |
| 23433 | /* We are missing location info, and are about to add it. */ |
| 23434 | ; |
| 23435 | else |
| 23436 | { |
| 23437 | add_child_die (die: context_die, child_die: parm_die); |
| 23438 | maybe_gen_btf_decl_tag_dies (t: node_or_origin, target: parm_die); |
| 23439 | return parm_die; |
| 23440 | } |
| 23441 | } |
| 23442 | } |
| 23443 | |
| 23444 | /* If we have a previously generated DIE, use it, unless this is an |
| 23445 | concrete instance (origin != NULL), in which case we need a new |
| 23446 | DIE with a corresponding DW_AT_abstract_origin. */ |
| 23447 | bool reusing_die; |
| 23448 | if (parm_die && origin == NULL) |
| 23449 | reusing_die = true; |
| 23450 | else |
| 23451 | { |
| 23452 | parm_die = new_die (tag_value: DW_TAG_formal_parameter, parent_die: context_die, t: node); |
| 23453 | reusing_die = false; |
| 23454 | } |
| 23455 | |
| 23456 | switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin))) |
| 23457 | { |
| 23458 | case tcc_declaration: |
| 23459 | ultimate_origin = decl_ultimate_origin (decl: node_or_origin); |
| 23460 | if (node || ultimate_origin) |
| 23461 | origin = ultimate_origin; |
| 23462 | |
| 23463 | if (reusing_die) |
| 23464 | goto add_location; |
| 23465 | |
| 23466 | if (origin != NULL) |
| 23467 | add_abstract_origin_attribute (die: parm_die, origin); |
| 23468 | else if (emit_name_p) |
| 23469 | add_name_and_src_coords_attributes (die: parm_die, decl: node); |
| 23470 | if (origin == NULL |
| 23471 | || (! DECL_ABSTRACT_P (node_or_origin) |
| 23472 | && variably_modified_type_p (TREE_TYPE (node_or_origin), |
| 23473 | decl_function_context |
| 23474 | (node_or_origin)))) |
| 23475 | { |
| 23476 | tree type = TREE_TYPE (node_or_origin); |
| 23477 | if (decl_by_reference_p (decl: node_or_origin)) |
| 23478 | add_type_attribute (object_die: parm_die, TREE_TYPE (type), |
| 23479 | cv_quals: TYPE_UNQUALIFIED, |
| 23480 | reverse: false, context_die); |
| 23481 | else |
| 23482 | add_type_attribute (object_die: parm_die, type, |
| 23483 | cv_quals: decl_quals (decl: node_or_origin), |
| 23484 | reverse: false, context_die); |
| 23485 | } |
| 23486 | if (origin == NULL && DECL_ARTIFICIAL (node)) |
| 23487 | add_AT_flag (die: parm_die, attr_kind: DW_AT_artificial, flag: 1); |
| 23488 | add_location: |
| 23489 | if (node && node != origin) |
| 23490 | equate_decl_number_to_die (decl: node, decl_die: parm_die); |
| 23491 | if (! DECL_ABSTRACT_P (node_or_origin)) |
| 23492 | add_location_or_const_value_attribute (die: parm_die, decl: node_or_origin, |
| 23493 | cache_p: node == NULL); |
| 23494 | |
| 23495 | break; |
| 23496 | |
| 23497 | case tcc_type: |
| 23498 | /* We were called with some kind of a ..._TYPE node. */ |
| 23499 | add_type_attribute (object_die: parm_die, type: node_or_origin, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
| 23500 | context_die); |
| 23501 | break; |
| 23502 | |
| 23503 | default: |
| 23504 | gcc_unreachable (); |
| 23505 | } |
| 23506 | |
| 23507 | maybe_gen_btf_decl_tag_dies (t: node_or_origin, target: parm_die); |
| 23508 | |
| 23509 | return parm_die; |
| 23510 | } |
| 23511 | |
| 23512 | /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate |
| 23513 | children DW_TAG_formal_parameter DIEs representing the arguments of the |
| 23514 | parameter pack. |
| 23515 | |
| 23516 | PARM_PACK must be a function parameter pack. |
| 23517 | PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN |
| 23518 | must point to the subsequent arguments of the function PACK_ARG belongs to. |
| 23519 | SUBR_DIE is the DIE of the function PACK_ARG belongs to. |
| 23520 | If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument |
| 23521 | following the last one for which a DIE was generated. */ |
| 23522 | |
| 23523 | static dw_die_ref |
| 23524 | gen_formal_parameter_pack_die (tree parm_pack, |
| 23525 | tree pack_arg, |
| 23526 | dw_die_ref subr_die, |
| 23527 | tree *next_arg) |
| 23528 | { |
| 23529 | tree arg; |
| 23530 | dw_die_ref parm_pack_die; |
| 23531 | |
| 23532 | gcc_assert (parm_pack |
| 23533 | && lang_hooks.function_parameter_pack_p (parm_pack) |
| 23534 | && subr_die); |
| 23535 | |
| 23536 | parm_pack_die = new_die (tag_value: DW_TAG_GNU_formal_parameter_pack, parent_die: subr_die, t: parm_pack); |
| 23537 | add_name_and_src_coords_attributes (die: parm_pack_die, decl: parm_pack); |
| 23538 | |
| 23539 | for (arg = pack_arg; arg; arg = DECL_CHAIN (arg)) |
| 23540 | { |
| 23541 | if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg, |
| 23542 | parm_pack)) |
| 23543 | break; |
| 23544 | gen_formal_parameter_die (node: arg, NULL, |
| 23545 | emit_name_p: false /* Don't emit name attribute. */, |
| 23546 | context_die: parm_pack_die); |
| 23547 | } |
| 23548 | if (next_arg) |
| 23549 | *next_arg = arg; |
| 23550 | return parm_pack_die; |
| 23551 | } |
| 23552 | |
| 23553 | /* Generate a special type of DIE used as a stand-in for a trailing ellipsis |
| 23554 | at the end of an (ANSI prototyped) formal parameters list. */ |
| 23555 | |
| 23556 | static void |
| 23557 | gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die) |
| 23558 | { |
| 23559 | new_die (tag_value: DW_TAG_unspecified_parameters, parent_die: context_die, t: decl_or_type); |
| 23560 | } |
| 23561 | |
| 23562 | /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a |
| 23563 | DW_TAG_unspecified_parameters DIE) to represent the types of the formal |
| 23564 | parameters as specified in some function type specification (except for |
| 23565 | those which appear as part of a function *definition*). */ |
| 23566 | |
| 23567 | static void |
| 23568 | gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die) |
| 23569 | { |
| 23570 | tree link; |
| 23571 | tree formal_type = NULL; |
| 23572 | tree first_parm_type; |
| 23573 | tree arg; |
| 23574 | |
| 23575 | if (TREE_CODE (function_or_method_type) == FUNCTION_DECL) |
| 23576 | { |
| 23577 | arg = DECL_ARGUMENTS (function_or_method_type); |
| 23578 | function_or_method_type = TREE_TYPE (function_or_method_type); |
| 23579 | } |
| 23580 | else |
| 23581 | arg = NULL_TREE; |
| 23582 | |
| 23583 | first_parm_type = TYPE_ARG_TYPES (function_or_method_type); |
| 23584 | |
| 23585 | /* Make our first pass over the list of formal parameter types and output a |
| 23586 | DW_TAG_formal_parameter DIE for each one. */ |
| 23587 | for (link = first_parm_type; link; ) |
| 23588 | { |
| 23589 | dw_die_ref parm_die; |
| 23590 | |
| 23591 | formal_type = TREE_VALUE (link); |
| 23592 | if (formal_type == void_type_node) |
| 23593 | break; |
| 23594 | |
| 23595 | /* Output a (nameless) DIE to represent the formal parameter itself. */ |
| 23596 | parm_die = gen_formal_parameter_die (node: formal_type, NULL, |
| 23597 | emit_name_p: true /* Emit name attribute. */, |
| 23598 | context_die); |
| 23599 | if (TREE_CODE (function_or_method_type) == METHOD_TYPE |
| 23600 | && link == first_parm_type) |
| 23601 | { |
| 23602 | add_AT_flag (die: parm_die, attr_kind: DW_AT_artificial, flag: 1); |
| 23603 | if (dwarf_version >= 3 || !dwarf_strict) |
| 23604 | add_AT_die_ref (die: context_die, attr_kind: DW_AT_object_pointer, targ_die: parm_die); |
| 23605 | } |
| 23606 | else if (arg && DECL_ARTIFICIAL (arg)) |
| 23607 | add_AT_flag (die: parm_die, attr_kind: DW_AT_artificial, flag: 1); |
| 23608 | |
| 23609 | link = TREE_CHAIN (link); |
| 23610 | if (arg) |
| 23611 | arg = DECL_CHAIN (arg); |
| 23612 | } |
| 23613 | |
| 23614 | /* If this function type has an ellipsis, add a |
| 23615 | DW_TAG_unspecified_parameters DIE to the end of the parameter list. */ |
| 23616 | if (formal_type != void_type_node) |
| 23617 | gen_unspecified_parameters_die (decl_or_type: function_or_method_type, context_die); |
| 23618 | |
| 23619 | /* Make our second (and final) pass over the list of formal parameter types |
| 23620 | and output DIEs to represent those types (as necessary). */ |
| 23621 | for (link = TYPE_ARG_TYPES (function_or_method_type); |
| 23622 | link && TREE_VALUE (link); |
| 23623 | link = TREE_CHAIN (link)) |
| 23624 | gen_type_die (TREE_VALUE (link), context_die); |
| 23625 | } |
| 23626 | |
| 23627 | /* We want to generate the DIE for TYPE so that we can generate the |
| 23628 | die for MEMBER, which has been defined; we will need to refer back |
| 23629 | to the member declaration nested within TYPE. If we're trying to |
| 23630 | generate minimal debug info for TYPE, processing TYPE won't do the |
| 23631 | trick; we need to attach the member declaration by hand. */ |
| 23632 | |
| 23633 | static void |
| 23634 | gen_type_die_for_member (tree type, tree member, dw_die_ref context_die) |
| 23635 | { |
| 23636 | gen_type_die (type, context_die); |
| 23637 | |
| 23638 | /* If we're trying to avoid duplicate debug info, we may not have |
| 23639 | emitted the member decl for this function. Emit it now. */ |
| 23640 | if (TYPE_STUB_DECL (type) |
| 23641 | && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type)) |
| 23642 | && ! lookup_decl_die (decl: member)) |
| 23643 | { |
| 23644 | dw_die_ref type_die; |
| 23645 | gcc_assert (!decl_ultimate_origin (member)); |
| 23646 | |
| 23647 | type_die = lookup_type_die_strip_naming_typedef (type); |
| 23648 | if (TREE_CODE (member) == FUNCTION_DECL) |
| 23649 | gen_subprogram_die (member, type_die); |
| 23650 | else if (TREE_CODE (member) == FIELD_DECL) |
| 23651 | { |
| 23652 | /* Ignore the nameless fields that are used to skip bits but handle |
| 23653 | C++ anonymous unions and structs. */ |
| 23654 | if (DECL_NAME (member) != NULL_TREE |
| 23655 | || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE |
| 23656 | || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE) |
| 23657 | { |
| 23658 | struct vlr_context vlr_ctx = { |
| 23659 | DECL_CONTEXT (member), /* struct_type */ |
| 23660 | NULL_TREE /* variant_part_offset */ |
| 23661 | }; |
| 23662 | gen_type_die (member_declared_type (member), type_die); |
| 23663 | gen_field_die (member, &vlr_ctx, type_die); |
| 23664 | } |
| 23665 | } |
| 23666 | else |
| 23667 | gen_variable_die (member, NULL_TREE, type_die); |
| 23668 | } |
| 23669 | } |
| 23670 | |
| 23671 | /* Forward declare these functions, because they are mutually recursive |
| 23672 | with their set_block_* pairing functions. */ |
| 23673 | static void set_decl_origin_self (tree); |
| 23674 | |
| 23675 | /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the |
| 23676 | given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so |
| 23677 | that it points to the node itself, thus indicating that the node is its |
| 23678 | own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for |
| 23679 | the given node is NULL, recursively descend the decl/block tree which |
| 23680 | it is the root of, and for each other ..._DECL or BLOCK node contained |
| 23681 | therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also |
| 23682 | still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN |
| 23683 | values to point to themselves. */ |
| 23684 | |
| 23685 | static void |
| 23686 | set_block_origin_self (tree stmt) |
| 23687 | { |
| 23688 | if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE) |
| 23689 | { |
| 23690 | BLOCK_ABSTRACT_ORIGIN (stmt) = stmt; |
| 23691 | |
| 23692 | { |
| 23693 | tree local_decl; |
| 23694 | |
| 23695 | for (local_decl = BLOCK_VARS (stmt); |
| 23696 | local_decl != NULL_TREE; |
| 23697 | local_decl = DECL_CHAIN (local_decl)) |
| 23698 | /* Do not recurse on nested functions since the inlining status |
| 23699 | of parent and child can be different as per the DWARF spec. */ |
| 23700 | if (TREE_CODE (local_decl) != FUNCTION_DECL |
| 23701 | && !DECL_EXTERNAL (local_decl)) |
| 23702 | set_decl_origin_self (local_decl); |
| 23703 | } |
| 23704 | |
| 23705 | { |
| 23706 | tree subblock; |
| 23707 | |
| 23708 | for (subblock = BLOCK_SUBBLOCKS (stmt); |
| 23709 | subblock != NULL_TREE; |
| 23710 | subblock = BLOCK_CHAIN (subblock)) |
| 23711 | set_block_origin_self (subblock); /* Recurse. */ |
| 23712 | } |
| 23713 | } |
| 23714 | } |
| 23715 | |
| 23716 | /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for |
| 23717 | the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the |
| 23718 | node to so that it points to the node itself, thus indicating that the |
| 23719 | node represents its own (abstract) origin. Additionally, if the |
| 23720 | DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend |
| 23721 | the decl/block tree of which the given node is the root of, and for |
| 23722 | each other ..._DECL or BLOCK node contained therein whose |
| 23723 | DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL, |
| 23724 | set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to |
| 23725 | point to themselves. */ |
| 23726 | |
| 23727 | static void |
| 23728 | set_decl_origin_self (tree decl) |
| 23729 | { |
| 23730 | if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE) |
| 23731 | { |
| 23732 | DECL_ABSTRACT_ORIGIN (decl) = decl; |
| 23733 | if (TREE_CODE (decl) == FUNCTION_DECL) |
| 23734 | { |
| 23735 | tree arg; |
| 23736 | |
| 23737 | for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg)) |
| 23738 | DECL_ABSTRACT_ORIGIN (arg) = arg; |
| 23739 | if (DECL_INITIAL (decl) != NULL_TREE |
| 23740 | && DECL_INITIAL (decl) != error_mark_node) |
| 23741 | set_block_origin_self (DECL_INITIAL (decl)); |
| 23742 | } |
| 23743 | } |
| 23744 | } |
| 23745 | |
| 23746 | /* Mark the early DIE for DECL as the abstract instance. */ |
| 23747 | |
| 23748 | static void |
| 23749 | dwarf2out_abstract_function (tree decl) |
| 23750 | { |
| 23751 | dw_die_ref old_die; |
| 23752 | |
| 23753 | /* Make sure we have the actual abstract inline, not a clone. */ |
| 23754 | decl = DECL_ORIGIN (decl); |
| 23755 | |
| 23756 | if (DECL_IGNORED_P (decl)) |
| 23757 | return; |
| 23758 | |
| 23759 | #ifdef CODEVIEW_DEBUGGING_INFO |
| 23760 | if (codeview_debuginfo_p ()) |
| 23761 | codeview_abstract_function (decl); |
| 23762 | #endif |
| 23763 | |
| 23764 | /* In LTO we're all set. We already created abstract instances |
| 23765 | early and we want to avoid creating a concrete instance of that |
| 23766 | if we don't output it. */ |
| 23767 | if (in_lto_p) |
| 23768 | return; |
| 23769 | |
| 23770 | old_die = lookup_decl_die (decl); |
| 23771 | gcc_assert (old_die != NULL); |
| 23772 | if (get_AT (die: old_die, attr_kind: DW_AT_inline)) |
| 23773 | /* We've already generated the abstract instance. */ |
| 23774 | return; |
| 23775 | |
| 23776 | /* Go ahead and put DW_AT_inline on the DIE. */ |
| 23777 | if (DECL_DECLARED_INLINE_P (decl)) |
| 23778 | { |
| 23779 | if (cgraph_function_possibly_inlined_p (decl)) |
| 23780 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_declared_inlined); |
| 23781 | else |
| 23782 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_declared_not_inlined); |
| 23783 | } |
| 23784 | else |
| 23785 | { |
| 23786 | if (cgraph_function_possibly_inlined_p (decl)) |
| 23787 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_inlined); |
| 23788 | else |
| 23789 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_not_inlined); |
| 23790 | } |
| 23791 | |
| 23792 | if (DECL_DECLARED_INLINE_P (decl) |
| 23793 | && lookup_attribute (attr_name: "artificial" , DECL_ATTRIBUTES (decl))) |
| 23794 | add_AT_flag (die: old_die, attr_kind: DW_AT_artificial, flag: 1); |
| 23795 | |
| 23796 | set_decl_origin_self (decl); |
| 23797 | } |
| 23798 | |
| 23799 | /* Helper function of premark_used_types() which gets called through |
| 23800 | htab_traverse. |
| 23801 | |
| 23802 | Marks the DIE of a given type in *SLOT as perennial, so it never gets |
| 23803 | marked as unused by prune_unused_types. */ |
| 23804 | |
| 23805 | bool |
| 23806 | (tree const &type, void *) |
| 23807 | { |
| 23808 | dw_die_ref die; |
| 23809 | |
| 23810 | die = lookup_type_die (type); |
| 23811 | if (die != NULL) |
| 23812 | die->die_perennial_p = 1; |
| 23813 | return true; |
| 23814 | } |
| 23815 | |
| 23816 | /* Helper function of premark_types_used_by_global_vars which gets called |
| 23817 | through htab_traverse. |
| 23818 | |
| 23819 | Marks the DIE of a given type in *SLOT as perennial, so it never gets |
| 23820 | marked as unused by prune_unused_types. The DIE of the type is marked |
| 23821 | only if the global variable using the type will actually be emitted. */ |
| 23822 | |
| 23823 | int |
| 23824 | (types_used_by_vars_entry **slot, |
| 23825 | void *) |
| 23826 | { |
| 23827 | struct types_used_by_vars_entry *entry; |
| 23828 | dw_die_ref die; |
| 23829 | |
| 23830 | entry = (struct types_used_by_vars_entry *) *slot; |
| 23831 | gcc_assert (entry->type != NULL |
| 23832 | && entry->var_decl != NULL); |
| 23833 | die = lookup_type_die (type: entry->type); |
| 23834 | if (die) |
| 23835 | { |
| 23836 | /* Ask cgraph if the global variable really is to be emitted. |
| 23837 | If yes, then we'll keep the DIE of ENTRY->TYPE. */ |
| 23838 | varpool_node *node = varpool_node::get (decl: entry->var_decl); |
| 23839 | if (node && node->definition) |
| 23840 | { |
| 23841 | die->die_perennial_p = 1; |
| 23842 | /* Keep the parent DIEs as well. */ |
| 23843 | while ((die = die->die_parent) && die->die_perennial_p == 0) |
| 23844 | die->die_perennial_p = 1; |
| 23845 | } |
| 23846 | } |
| 23847 | return 1; |
| 23848 | } |
| 23849 | |
| 23850 | /* Mark all members of used_types_hash as perennial. */ |
| 23851 | |
| 23852 | static void |
| 23853 | (struct function *fun) |
| 23854 | { |
| 23855 | if (fun && fun->used_types_hash) |
| 23856 | fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL); |
| 23857 | } |
| 23858 | |
| 23859 | /* Mark all members of types_used_by_vars_entry as perennial. */ |
| 23860 | |
| 23861 | static void |
| 23862 | (void) |
| 23863 | { |
| 23864 | if (types_used_by_vars_hash) |
| 23865 | types_used_by_vars_hash |
| 23866 | ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL); |
| 23867 | } |
| 23868 | |
| 23869 | /* Mark all variables used by the symtab as perennial. */ |
| 23870 | |
| 23871 | static void |
| 23872 | (void) |
| 23873 | { |
| 23874 | /* Mark DIEs in the symtab as used. */ |
| 23875 | varpool_node *var; |
| 23876 | FOR_EACH_VARIABLE (var) |
| 23877 | { |
| 23878 | dw_die_ref die = lookup_decl_die (decl: var->decl); |
| 23879 | if (die) |
| 23880 | { |
| 23881 | die->die_perennial_p = 1; |
| 23882 | if (tree attr = lookup_attribute (attr_name: "structured bindings" , |
| 23883 | DECL_ATTRIBUTES (var->decl))) |
| 23884 | for (tree d = TREE_VALUE (attr); d; d = TREE_CHAIN (d)) |
| 23885 | { |
| 23886 | die = lookup_decl_die (TREE_VALUE (d)); |
| 23887 | if (die) |
| 23888 | die->die_perennial_p = 1; |
| 23889 | } |
| 23890 | } |
| 23891 | } |
| 23892 | } |
| 23893 | |
| 23894 | /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE |
| 23895 | for CA_LOC call arg loc node. */ |
| 23896 | |
| 23897 | static dw_die_ref |
| 23898 | gen_call_site_die (tree decl, dw_die_ref subr_die, |
| 23899 | struct call_arg_loc_node *ca_loc) |
| 23900 | { |
| 23901 | dw_die_ref stmt_die = NULL, die; |
| 23902 | tree block = ca_loc->block; |
| 23903 | |
| 23904 | while (block |
| 23905 | && block != DECL_INITIAL (decl) |
| 23906 | && TREE_CODE (block) == BLOCK) |
| 23907 | { |
| 23908 | stmt_die = lookup_block_die (block); |
| 23909 | if (stmt_die) |
| 23910 | break; |
| 23911 | block = BLOCK_SUPERCONTEXT (block); |
| 23912 | } |
| 23913 | if (stmt_die == NULL) |
| 23914 | stmt_die = subr_die; |
| 23915 | die = new_die (tag_value: dwarf_TAG (tag: DW_TAG_call_site), parent_die: stmt_die, NULL_TREE); |
| 23916 | add_AT_lbl_id (die, attr_kind: dwarf_AT (at: DW_AT_call_return_pc), |
| 23917 | lbl_id: ca_loc->label, |
| 23918 | offset: targetm.calls.call_offset_return_label (ca_loc->call_insn)); |
| 23919 | if (ca_loc->tail_call_p) |
| 23920 | add_AT_flag (die, attr_kind: dwarf_AT (at: DW_AT_call_tail_call), flag: 1); |
| 23921 | if (ca_loc->symbol_ref) |
| 23922 | { |
| 23923 | dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref)); |
| 23924 | if (tdie) |
| 23925 | add_AT_die_ref (die, attr_kind: dwarf_AT (at: DW_AT_call_origin), targ_die: tdie); |
| 23926 | else |
| 23927 | add_AT_addr (die, attr_kind: dwarf_AT (at: DW_AT_call_origin), addr: ca_loc->symbol_ref, |
| 23928 | force_direct: false); |
| 23929 | } |
| 23930 | return die; |
| 23931 | } |
| 23932 | |
| 23933 | /* Generate a DIE to represent a declared function (either file-scope or |
| 23934 | block-local). */ |
| 23935 | |
| 23936 | static void |
| 23937 | gen_subprogram_die (tree decl, dw_die_ref context_die) |
| 23938 | { |
| 23939 | tree origin = decl_ultimate_origin (decl); |
| 23940 | dw_die_ref subr_die; |
| 23941 | dw_die_ref old_die = lookup_decl_die (decl); |
| 23942 | bool old_die_had_no_children = false; |
| 23943 | |
| 23944 | /* This function gets called multiple times for different stages of |
| 23945 | the debug process. For example, for func() in this code: |
| 23946 | |
| 23947 | namespace S |
| 23948 | { |
| 23949 | void func() { ... } |
| 23950 | } |
| 23951 | |
| 23952 | ...we get called 4 times. Twice in early debug and twice in |
| 23953 | late debug: |
| 23954 | |
| 23955 | Early debug |
| 23956 | ----------- |
| 23957 | |
| 23958 | 1. Once while generating func() within the namespace. This is |
| 23959 | the declaration. The declaration bit below is set, as the |
| 23960 | context is the namespace. |
| 23961 | |
| 23962 | A new DIE will be generated with DW_AT_declaration set. |
| 23963 | |
| 23964 | 2. Once for func() itself. This is the specification. The |
| 23965 | declaration bit below is clear as the context is the CU. |
| 23966 | |
| 23967 | We will use the cached DIE from (1) to create a new DIE with |
| 23968 | DW_AT_specification pointing to the declaration in (1). |
| 23969 | |
| 23970 | Late debug via rest_of_handle_final() |
| 23971 | ------------------------------------- |
| 23972 | |
| 23973 | 3. Once generating func() within the namespace. This is also the |
| 23974 | declaration, as in (1), but this time we will early exit below |
| 23975 | as we have a cached DIE and a declaration needs no additional |
| 23976 | annotations (no locations), as the source declaration line |
| 23977 | info is enough. |
| 23978 | |
| 23979 | 4. Once for func() itself. As in (2), this is the specification, |
| 23980 | but this time we will re-use the cached DIE, and just annotate |
| 23981 | it with the location information that should now be available. |
| 23982 | |
| 23983 | For something without namespaces, but with abstract instances, we |
| 23984 | are also called a multiple times: |
| 23985 | |
| 23986 | class Base |
| 23987 | { |
| 23988 | public: |
| 23989 | Base (); // constructor declaration (1) |
| 23990 | }; |
| 23991 | |
| 23992 | Base::Base () { } // constructor specification (2) |
| 23993 | |
| 23994 | Early debug |
| 23995 | ----------- |
| 23996 | |
| 23997 | 1. Once for the Base() constructor by virtue of it being a |
| 23998 | member of the Base class. This is done via |
| 23999 | rest_of_type_compilation. |
| 24000 | |
| 24001 | This is a declaration, so a new DIE will be created with |
| 24002 | DW_AT_declaration. |
| 24003 | |
| 24004 | 2. Once for the Base() constructor definition, but this time |
| 24005 | while generating the abstract instance of the base |
| 24006 | constructor (__base_ctor) which is being generated via early |
| 24007 | debug of reachable functions. |
| 24008 | |
| 24009 | Even though we have a cached version of the declaration (1), |
| 24010 | we will create a DW_AT_specification of the declaration DIE |
| 24011 | in (1). |
| 24012 | |
| 24013 | 3. Once for the __base_ctor itself, but this time, we generate |
| 24014 | an DW_AT_abstract_origin version of the DW_AT_specification in |
| 24015 | (2). |
| 24016 | |
| 24017 | Late debug via rest_of_handle_final |
| 24018 | ----------------------------------- |
| 24019 | |
| 24020 | 4. One final time for the __base_ctor (which will have a cached |
| 24021 | DIE with DW_AT_abstract_origin created in (3). This time, |
| 24022 | we will just annotate the location information now |
| 24023 | available. |
| 24024 | */ |
| 24025 | int declaration = (current_function_decl != decl |
| 24026 | || (!DECL_INITIAL (decl) && !origin) |
| 24027 | || class_or_namespace_scope_p (context_die)); |
| 24028 | |
| 24029 | /* A declaration that has been previously dumped needs no |
| 24030 | additional information. */ |
| 24031 | if (old_die && declaration) |
| 24032 | return; |
| 24033 | |
| 24034 | if (in_lto_p && old_die && old_die->die_child == NULL) |
| 24035 | old_die_had_no_children = true; |
| 24036 | |
| 24037 | /* Now that the C++ front end lazily declares artificial member fns, we |
| 24038 | might need to retrofit the declaration into its class. */ |
| 24039 | if (!declaration && !origin && !old_die |
| 24040 | && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl)) |
| 24041 | && !class_or_namespace_scope_p (context_die) |
| 24042 | && debug_info_level > DINFO_LEVEL_TERSE) |
| 24043 | old_die = force_decl_die (decl); |
| 24044 | |
| 24045 | /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */ |
| 24046 | if (origin != NULL) |
| 24047 | { |
| 24048 | gcc_assert (!declaration || local_scope_p (context_die)); |
| 24049 | |
| 24050 | /* Fixup die_parent for the abstract instance of a nested |
| 24051 | inline function. */ |
| 24052 | if (old_die && old_die->die_parent == NULL) |
| 24053 | add_child_die (die: context_die, child_die: old_die); |
| 24054 | |
| 24055 | if (old_die && get_AT_ref (die: old_die, attr_kind: DW_AT_abstract_origin)) |
| 24056 | { |
| 24057 | /* If we have a DW_AT_abstract_origin we have a working |
| 24058 | cached version. */ |
| 24059 | subr_die = old_die; |
| 24060 | } |
| 24061 | else |
| 24062 | { |
| 24063 | subr_die = new_die (tag_value: DW_TAG_subprogram, parent_die: context_die, t: decl); |
| 24064 | add_abstract_origin_attribute (die: subr_die, origin); |
| 24065 | /* This is where the actual code for a cloned function is. |
| 24066 | Let's emit linkage name attribute for it. This helps |
| 24067 | debuggers to e.g, set breakpoints into |
| 24068 | constructors/destructors when the user asks "break |
| 24069 | K::K". */ |
| 24070 | add_linkage_name (die: subr_die, decl); |
| 24071 | } |
| 24072 | } |
| 24073 | /* A cached copy, possibly from early dwarf generation. Reuse as |
| 24074 | much as possible. */ |
| 24075 | else if (old_die) |
| 24076 | { |
| 24077 | if (!get_AT_flag (die: old_die, attr_kind: DW_AT_declaration) |
| 24078 | /* We can have a normal definition following an inline one in the |
| 24079 | case of redefinition of GNU C extern inlines. |
| 24080 | It seems reasonable to use AT_specification in this case. */ |
| 24081 | && !get_AT (die: old_die, attr_kind: DW_AT_inline)) |
| 24082 | { |
| 24083 | /* Detect and ignore this case, where we are trying to output |
| 24084 | something we have already output. */ |
| 24085 | if (get_AT (die: old_die, attr_kind: DW_AT_low_pc) |
| 24086 | || get_AT (die: old_die, attr_kind: DW_AT_ranges)) |
| 24087 | return; |
| 24088 | |
| 24089 | /* If we have no location information, this must be a |
| 24090 | partially generated DIE from early dwarf generation. |
| 24091 | Fall through and generate it. */ |
| 24092 | } |
| 24093 | |
| 24094 | /* If the definition comes from the same place as the declaration, |
| 24095 | maybe use the old DIE. We always want the DIE for this function |
| 24096 | that has the *_pc attributes to be under comp_unit_die so the |
| 24097 | debugger can find it. We also need to do this for abstract |
| 24098 | instances of inlines, since the spec requires the out-of-line copy |
| 24099 | to have the same parent. For local class methods, this doesn't |
| 24100 | apply; we just use the old DIE. */ |
| 24101 | expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl)); |
| 24102 | struct dwarf_file_data * file_index = lookup_filename (s.file); |
| 24103 | if (((is_unit_die (c: old_die->die_parent) |
| 24104 | /* This condition fixes the inconsistency/ICE with the |
| 24105 | following Fortran test (or some derivative thereof) while |
| 24106 | building libgfortran: |
| 24107 | |
| 24108 | module some_m |
| 24109 | contains |
| 24110 | logical function funky (FLAG) |
| 24111 | funky = .true. |
| 24112 | end function |
| 24113 | end module |
| 24114 | */ |
| 24115 | || (old_die->die_parent |
| 24116 | && old_die->die_parent->die_tag == DW_TAG_module) |
| 24117 | || local_scope_p (context_die: old_die->die_parent) |
| 24118 | || context_die == NULL) |
| 24119 | && (DECL_ARTIFICIAL (decl) |
| 24120 | || (get_AT_file (die: old_die, attr_kind: DW_AT_decl_file) == file_index |
| 24121 | && (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_line) |
| 24122 | == (unsigned) s.line) |
| 24123 | && (!debug_column_info |
| 24124 | || s.column == 0 |
| 24125 | || (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_column) |
| 24126 | == (unsigned) s.column))))) |
| 24127 | /* With LTO if there's an abstract instance for |
| 24128 | the old DIE, this is a concrete instance and |
| 24129 | thus re-use the DIE. */ |
| 24130 | || get_AT (die: old_die, attr_kind: DW_AT_abstract_origin)) |
| 24131 | { |
| 24132 | subr_die = old_die; |
| 24133 | |
| 24134 | /* Clear out the declaration attribute, but leave the |
| 24135 | parameters so they can be augmented with location |
| 24136 | information later. Unless this was a declaration, in |
| 24137 | which case, wipe out the nameless parameters and recreate |
| 24138 | them further down. */ |
| 24139 | if (remove_AT (die: subr_die, attr_kind: DW_AT_declaration)) |
| 24140 | { |
| 24141 | |
| 24142 | remove_AT (die: subr_die, attr_kind: DW_AT_object_pointer); |
| 24143 | remove_child_TAG (die: subr_die, tag: DW_TAG_formal_parameter); |
| 24144 | } |
| 24145 | } |
| 24146 | /* Make a specification pointing to the previously built |
| 24147 | declaration. */ |
| 24148 | else |
| 24149 | { |
| 24150 | subr_die = new_die (tag_value: DW_TAG_subprogram, parent_die: context_die, t: decl); |
| 24151 | add_AT_specification (die: subr_die, targ_die: old_die); |
| 24152 | add_pubname (decl, die: subr_die); |
| 24153 | if (get_AT_file (die: old_die, attr_kind: DW_AT_decl_file) != file_index) |
| 24154 | add_AT_file (die: subr_die, attr_kind: DW_AT_decl_file, fd: file_index); |
| 24155 | if (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_line) != (unsigned) s.line) |
| 24156 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_decl_line, unsigned_val: s.line); |
| 24157 | if (debug_column_info |
| 24158 | && s.column |
| 24159 | && (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_column) |
| 24160 | != (unsigned) s.column)) |
| 24161 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_decl_column, unsigned_val: s.column); |
| 24162 | |
| 24163 | /* If the prototype had an 'auto' or 'decltype(auto)' in |
| 24164 | the return type, emit the real type on the definition die. */ |
| 24165 | if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE) |
| 24166 | { |
| 24167 | dw_die_ref die = get_AT_ref (die: old_die, attr_kind: DW_AT_type); |
| 24168 | while (die |
| 24169 | && (die->die_tag == DW_TAG_reference_type |
| 24170 | || die->die_tag == DW_TAG_rvalue_reference_type |
| 24171 | || die->die_tag == DW_TAG_pointer_type |
| 24172 | || die->die_tag == DW_TAG_const_type |
| 24173 | || die->die_tag == DW_TAG_volatile_type |
| 24174 | || die->die_tag == DW_TAG_restrict_type |
| 24175 | || die->die_tag == DW_TAG_array_type |
| 24176 | || die->die_tag == DW_TAG_ptr_to_member_type |
| 24177 | || die->die_tag == DW_TAG_subroutine_type)) |
| 24178 | die = get_AT_ref (die, attr_kind: DW_AT_type); |
| 24179 | if (die == auto_die || die == decltype_auto_die) |
| 24180 | add_type_attribute (object_die: subr_die, TREE_TYPE (TREE_TYPE (decl)), |
| 24181 | cv_quals: TYPE_UNQUALIFIED, reverse: false, context_die); |
| 24182 | } |
| 24183 | |
| 24184 | /* When we process the method declaration, we haven't seen |
| 24185 | the out-of-class defaulted definition yet, so we have to |
| 24186 | recheck now. */ |
| 24187 | if ((dwarf_version >= 5 || ! dwarf_strict) |
| 24188 | && !get_AT (die: subr_die, attr_kind: DW_AT_defaulted)) |
| 24189 | { |
| 24190 | int defaulted |
| 24191 | = lang_hooks.decls.decl_dwarf_attribute (decl, |
| 24192 | DW_AT_defaulted); |
| 24193 | if (defaulted != -1) |
| 24194 | { |
| 24195 | /* Other values must have been handled before. */ |
| 24196 | gcc_assert (defaulted == DW_DEFAULTED_out_of_class); |
| 24197 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_defaulted, unsigned_val: defaulted); |
| 24198 | } |
| 24199 | } |
| 24200 | } |
| 24201 | } |
| 24202 | /* Create a fresh DIE for anything else. */ |
| 24203 | else |
| 24204 | { |
| 24205 | subr_die = new_die (tag_value: DW_TAG_subprogram, parent_die: context_die, t: decl); |
| 24206 | |
| 24207 | if (TREE_PUBLIC (decl)) |
| 24208 | add_AT_flag (die: subr_die, attr_kind: DW_AT_external, flag: 1); |
| 24209 | |
| 24210 | add_name_and_src_coords_attributes (die: subr_die, decl); |
| 24211 | add_pubname (decl, die: subr_die); |
| 24212 | if (debug_info_level > DINFO_LEVEL_TERSE) |
| 24213 | { |
| 24214 | add_prototyped_attribute (die: subr_die, TREE_TYPE (decl)); |
| 24215 | add_type_attribute (object_die: subr_die, TREE_TYPE (TREE_TYPE (decl)), |
| 24216 | cv_quals: TYPE_UNQUALIFIED, reverse: false, context_die); |
| 24217 | } |
| 24218 | |
| 24219 | add_pure_or_virtual_attribute (die: subr_die, func_decl: decl); |
| 24220 | if (DECL_ARTIFICIAL (decl)) |
| 24221 | add_AT_flag (die: subr_die, attr_kind: DW_AT_artificial, flag: 1); |
| 24222 | |
| 24223 | if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict)) |
| 24224 | add_AT_flag (die: subr_die, attr_kind: DW_AT_noreturn, flag: 1); |
| 24225 | |
| 24226 | add_alignment_attribute (die: subr_die, tree_node: decl); |
| 24227 | |
| 24228 | add_accessibility_attribute (die: subr_die, decl); |
| 24229 | } |
| 24230 | |
| 24231 | /* Unless we have an existing non-declaration DIE, equate the new |
| 24232 | DIE. */ |
| 24233 | if (!old_die || is_declaration_die (die: old_die)) |
| 24234 | equate_decl_number_to_die (decl, decl_die: subr_die); |
| 24235 | |
| 24236 | if (declaration) |
| 24237 | { |
| 24238 | if (!old_die || !get_AT (die: old_die, attr_kind: DW_AT_inline)) |
| 24239 | { |
| 24240 | add_AT_flag (die: subr_die, attr_kind: DW_AT_declaration, flag: 1); |
| 24241 | |
| 24242 | /* If this is an explicit function declaration then generate |
| 24243 | a DW_AT_explicit attribute. */ |
| 24244 | if ((dwarf_version >= 3 || !dwarf_strict) |
| 24245 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
| 24246 | DW_AT_explicit) == 1) |
| 24247 | add_AT_flag (die: subr_die, attr_kind: DW_AT_explicit, flag: 1); |
| 24248 | |
| 24249 | /* If this is a C++11 deleted special function member then generate |
| 24250 | a DW_AT_deleted attribute. */ |
| 24251 | if ((dwarf_version >= 5 || !dwarf_strict) |
| 24252 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
| 24253 | DW_AT_deleted) == 1) |
| 24254 | add_AT_flag (die: subr_die, attr_kind: DW_AT_deleted, flag: 1); |
| 24255 | |
| 24256 | /* If this is a C++11 defaulted special function member then |
| 24257 | generate a DW_AT_defaulted attribute. */ |
| 24258 | if (dwarf_version >= 5 || !dwarf_strict) |
| 24259 | { |
| 24260 | int defaulted |
| 24261 | = lang_hooks.decls.decl_dwarf_attribute (decl, |
| 24262 | DW_AT_defaulted); |
| 24263 | if (defaulted != -1) |
| 24264 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_defaulted, unsigned_val: defaulted); |
| 24265 | } |
| 24266 | |
| 24267 | /* If this is a C++11 non-static member function with & ref-qualifier |
| 24268 | then generate a DW_AT_reference attribute. */ |
| 24269 | if ((dwarf_version >= 5 || !dwarf_strict) |
| 24270 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
| 24271 | DW_AT_reference) == 1) |
| 24272 | add_AT_flag (die: subr_die, attr_kind: DW_AT_reference, flag: 1); |
| 24273 | |
| 24274 | /* If this is a C++11 non-static member function with && |
| 24275 | ref-qualifier then generate a DW_AT_reference attribute. */ |
| 24276 | if ((dwarf_version >= 5 || !dwarf_strict) |
| 24277 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
| 24278 | DW_AT_rvalue_reference) |
| 24279 | == 1) |
| 24280 | add_AT_flag (die: subr_die, attr_kind: DW_AT_rvalue_reference, flag: 1); |
| 24281 | } |
| 24282 | } |
| 24283 | /* For non DECL_EXTERNALs, if range information is available, fill |
| 24284 | the DIE with it. */ |
| 24285 | else if (!DECL_EXTERNAL (decl) && !early_dwarf) |
| 24286 | { |
| 24287 | HOST_WIDE_INT cfa_fb_offset; |
| 24288 | |
| 24289 | struct function *fun = DECL_STRUCT_FUNCTION (decl); |
| 24290 | |
| 24291 | if (!crtl->has_bb_partition) |
| 24292 | { |
| 24293 | dw_fde_ref fde = fun->fde; |
| 24294 | if (fde->dw_fde_begin) |
| 24295 | { |
| 24296 | /* We have already generated the labels. */ |
| 24297 | add_AT_low_high_pc (die: subr_die, lbl_low: fde->dw_fde_begin, |
| 24298 | lbl_high: fde->dw_fde_end, force_direct: false); |
| 24299 | } |
| 24300 | else |
| 24301 | { |
| 24302 | /* Create start/end labels and add the range. */ |
| 24303 | char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 24304 | char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 24305 | ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL, |
| 24306 | current_function_funcdef_no); |
| 24307 | ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL, |
| 24308 | current_function_funcdef_no); |
| 24309 | add_AT_low_high_pc (die: subr_die, lbl_low: label_id_low, lbl_high: label_id_high, |
| 24310 | force_direct: false); |
| 24311 | } |
| 24312 | |
| 24313 | #if VMS_DEBUGGING_INFO |
| 24314 | /* HP OpenVMS Industry Standard 64: DWARF Extensions |
| 24315 | Section 2.3 Prologue and Epilogue Attributes: |
| 24316 | When a breakpoint is set on entry to a function, it is generally |
| 24317 | desirable for execution to be suspended, not on the very first |
| 24318 | instruction of the function, but rather at a point after the |
| 24319 | function's frame has been set up, after any language defined local |
| 24320 | declaration processing has been completed, and before execution of |
| 24321 | the first statement of the function begins. Debuggers generally |
| 24322 | cannot properly determine where this point is. Similarly for a |
| 24323 | breakpoint set on exit from a function. The prologue and epilogue |
| 24324 | attributes allow a compiler to communicate the location(s) to use. */ |
| 24325 | |
| 24326 | { |
| 24327 | if (fde->dw_fde_vms_end_prologue) |
| 24328 | add_AT_vms_delta (subr_die, DW_AT_HP_prologue, |
| 24329 | fde->dw_fde_begin, fde->dw_fde_vms_end_prologue); |
| 24330 | |
| 24331 | if (fde->dw_fde_vms_begin_epilogue) |
| 24332 | add_AT_vms_delta (subr_die, DW_AT_HP_epilogue, |
| 24333 | fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue); |
| 24334 | } |
| 24335 | #endif |
| 24336 | |
| 24337 | } |
| 24338 | else |
| 24339 | { |
| 24340 | /* Generate pubnames entries for the split function code ranges. */ |
| 24341 | dw_fde_ref fde = fun->fde; |
| 24342 | |
| 24343 | if (fde->dw_fde_second_begin) |
| 24344 | { |
| 24345 | if (dwarf_version >= 3 || !dwarf_strict) |
| 24346 | { |
| 24347 | /* We should use ranges for non-contiguous code section |
| 24348 | addresses. Use the actual code range for the initial |
| 24349 | section, since the HOT/COLD labels might precede an |
| 24350 | alignment offset. */ |
| 24351 | bool range_list_added = false; |
| 24352 | add_ranges_by_labels (die: subr_die, begin: fde->dw_fde_begin, |
| 24353 | end: fde->dw_fde_end, added: &range_list_added, |
| 24354 | force_direct: false); |
| 24355 | add_ranges_by_labels (die: subr_die, begin: fde->dw_fde_second_begin, |
| 24356 | end: fde->dw_fde_second_end, |
| 24357 | added: &range_list_added, force_direct: false); |
| 24358 | if (range_list_added) |
| 24359 | add_ranges (NULL); |
| 24360 | } |
| 24361 | else |
| 24362 | { |
| 24363 | /* There is no real support in DW2 for this .. so we make |
| 24364 | a work-around. First, emit the pub name for the segment |
| 24365 | containing the function label. Then make and emit a |
| 24366 | simplified subprogram DIE for the second segment with the |
| 24367 | name pre-fixed by __hot/cold_sect_of_. We use the same |
| 24368 | linkage name for the second die so that gdb will find both |
| 24369 | sections when given "b foo". */ |
| 24370 | const char *name = NULL; |
| 24371 | tree decl_name = DECL_NAME (decl); |
| 24372 | dw_die_ref seg_die; |
| 24373 | |
| 24374 | /* Do the 'primary' section. */ |
| 24375 | add_AT_low_high_pc (die: subr_die, lbl_low: fde->dw_fde_begin, |
| 24376 | lbl_high: fde->dw_fde_end, force_direct: false); |
| 24377 | |
| 24378 | /* Build a minimal DIE for the secondary section. */ |
| 24379 | seg_die = new_die (tag_value: DW_TAG_subprogram, |
| 24380 | parent_die: subr_die->die_parent, t: decl); |
| 24381 | |
| 24382 | if (TREE_PUBLIC (decl)) |
| 24383 | add_AT_flag (die: seg_die, attr_kind: DW_AT_external, flag: 1); |
| 24384 | |
| 24385 | if (decl_name != NULL |
| 24386 | && IDENTIFIER_POINTER (decl_name) != NULL) |
| 24387 | { |
| 24388 | name = dwarf2_name (decl, scope: 1); |
| 24389 | if (! DECL_ARTIFICIAL (decl)) |
| 24390 | add_src_coords_attributes (die: seg_die, decl); |
| 24391 | |
| 24392 | add_linkage_name (die: seg_die, decl); |
| 24393 | } |
| 24394 | gcc_assert (name != NULL); |
| 24395 | add_pure_or_virtual_attribute (die: seg_die, func_decl: decl); |
| 24396 | if (DECL_ARTIFICIAL (decl)) |
| 24397 | add_AT_flag (die: seg_die, attr_kind: DW_AT_artificial, flag: 1); |
| 24398 | |
| 24399 | name = concat ("__second_sect_of_" , name, NULL); |
| 24400 | add_AT_low_high_pc (die: seg_die, lbl_low: fde->dw_fde_second_begin, |
| 24401 | lbl_high: fde->dw_fde_second_end, force_direct: false); |
| 24402 | add_name_attribute (die: seg_die, name_string: name); |
| 24403 | if (want_pubnames ()) |
| 24404 | add_pubname_string (str: name, die: seg_die); |
| 24405 | } |
| 24406 | } |
| 24407 | else |
| 24408 | add_AT_low_high_pc (die: subr_die, lbl_low: fde->dw_fde_begin, lbl_high: fde->dw_fde_end, |
| 24409 | force_direct: false); |
| 24410 | } |
| 24411 | |
| 24412 | cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl); |
| 24413 | |
| 24414 | /* We define the "frame base" as the function's CFA. This is more |
| 24415 | convenient for several reasons: (1) It's stable across the prologue |
| 24416 | and epilogue, which makes it better than just a frame pointer, |
| 24417 | (2) With dwarf3, there exists a one-byte encoding that allows us |
| 24418 | to reference the .debug_frame data by proxy, but failing that, |
| 24419 | (3) We can at least reuse the code inspection and interpretation |
| 24420 | code that determines the CFA position at various points in the |
| 24421 | function. */ |
| 24422 | if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2) |
| 24423 | { |
| 24424 | dw_loc_descr_ref op = new_loc_descr (op: DW_OP_call_frame_cfa, oprnd1: 0, oprnd2: 0); |
| 24425 | add_AT_loc (die: subr_die, attr_kind: DW_AT_frame_base, loc: op); |
| 24426 | } |
| 24427 | else |
| 24428 | { |
| 24429 | dw_loc_list_ref list = convert_cfa_to_fb_loc_list (offset: cfa_fb_offset); |
| 24430 | if (list->dw_loc_next) |
| 24431 | add_AT_loc_list (die: subr_die, attr_kind: DW_AT_frame_base, loc_list: list); |
| 24432 | else |
| 24433 | add_AT_loc (die: subr_die, attr_kind: DW_AT_frame_base, loc: list->expr); |
| 24434 | } |
| 24435 | |
| 24436 | /* Compute a displacement from the "steady-state frame pointer" to |
| 24437 | the CFA. The former is what all stack slots and argument slots |
| 24438 | will reference in the rtl; the latter is what we've told the |
| 24439 | debugger about. We'll need to adjust all frame_base references |
| 24440 | by this displacement. */ |
| 24441 | compute_frame_pointer_to_fb_displacement (offset: cfa_fb_offset); |
| 24442 | |
| 24443 | if (fun->static_chain_decl) |
| 24444 | { |
| 24445 | /* DWARF requires here a location expression that computes the |
| 24446 | address of the enclosing subprogram's frame base. The machinery |
| 24447 | in tree-nested.cc is supposed to store this specific address in the |
| 24448 | last field of the FRAME record. */ |
| 24449 | const tree frame_type |
| 24450 | = TREE_TYPE (TREE_TYPE (fun->static_chain_decl)); |
| 24451 | const tree fb_decl = tree_last (TYPE_FIELDS (frame_type)); |
| 24452 | |
| 24453 | tree fb_expr |
| 24454 | = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl); |
| 24455 | fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl), |
| 24456 | fb_expr, fb_decl, NULL_TREE); |
| 24457 | |
| 24458 | add_AT_location_description (die: subr_die, attr_kind: DW_AT_static_link, |
| 24459 | descr: loc_list_from_tree (loc: fb_expr, want_address: 0, NULL)); |
| 24460 | } |
| 24461 | |
| 24462 | resolve_variable_values (); |
| 24463 | } |
| 24464 | |
| 24465 | /* Generate child dies for template parameters. */ |
| 24466 | if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE) |
| 24467 | gen_generic_params_dies (t: decl); |
| 24468 | |
| 24469 | /* Now output descriptions of the arguments for this function. This gets |
| 24470 | (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list |
| 24471 | for a FUNCTION_DECL doesn't indicate cases where there was a trailing |
| 24472 | `...' at the end of the formal parameter list. In order to find out if |
| 24473 | there was a trailing ellipsis or not, we must instead look at the type |
| 24474 | associated with the FUNCTION_DECL. This will be a node of type |
| 24475 | FUNCTION_TYPE. If the chain of type nodes hanging off of this |
| 24476 | FUNCTION_TYPE node ends with a void_type_node then there should *not* be |
| 24477 | an ellipsis at the end. */ |
| 24478 | |
| 24479 | /* In the case where we are describing a mere function declaration, all we |
| 24480 | need to do here (and all we *can* do here) is to describe the *types* of |
| 24481 | its formal parameters. */ |
| 24482 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 24483 | ; |
| 24484 | else if (declaration) |
| 24485 | gen_formal_types_die (function_or_method_type: decl, context_die: subr_die); |
| 24486 | else |
| 24487 | { |
| 24488 | /* Generate DIEs to represent all known formal parameters. */ |
| 24489 | tree parm = DECL_ARGUMENTS (decl); |
| 24490 | tree generic_decl = early_dwarf |
| 24491 | ? lang_hooks.decls.get_generic_function_decl (decl) : NULL; |
| 24492 | tree generic_decl_parm = generic_decl |
| 24493 | ? DECL_ARGUMENTS (generic_decl) |
| 24494 | : NULL; |
| 24495 | |
| 24496 | /* Now we want to walk the list of parameters of the function and |
| 24497 | emit their relevant DIEs. |
| 24498 | |
| 24499 | We consider the case of DECL being an instance of a generic function |
| 24500 | as well as it being a normal function. |
| 24501 | |
| 24502 | If DECL is an instance of a generic function we walk the |
| 24503 | parameters of the generic function declaration _and_ the parameters of |
| 24504 | DECL itself. This is useful because we want to emit specific DIEs for |
| 24505 | function parameter packs and those are declared as part of the |
| 24506 | generic function declaration. In that particular case, |
| 24507 | the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE. |
| 24508 | That DIE has children DIEs representing the set of arguments |
| 24509 | of the pack. Note that the set of pack arguments can be empty. |
| 24510 | In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any |
| 24511 | children DIE. |
| 24512 | |
| 24513 | Otherwise, we just consider the parameters of DECL. */ |
| 24514 | while (generic_decl_parm || parm) |
| 24515 | { |
| 24516 | if (generic_decl_parm |
| 24517 | && lang_hooks.function_parameter_pack_p (generic_decl_parm)) |
| 24518 | gen_formal_parameter_pack_die (parm_pack: generic_decl_parm, |
| 24519 | pack_arg: parm, subr_die, |
| 24520 | next_arg: &parm); |
| 24521 | else if (parm) |
| 24522 | { |
| 24523 | dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die); |
| 24524 | |
| 24525 | if (early_dwarf |
| 24526 | && parm == DECL_ARGUMENTS (decl) |
| 24527 | && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE |
| 24528 | && parm_die |
| 24529 | && (dwarf_version >= 3 || !dwarf_strict)) |
| 24530 | add_AT_die_ref (die: subr_die, attr_kind: DW_AT_object_pointer, targ_die: parm_die); |
| 24531 | |
| 24532 | parm = DECL_CHAIN (parm); |
| 24533 | } |
| 24534 | |
| 24535 | if (generic_decl_parm) |
| 24536 | generic_decl_parm = DECL_CHAIN (generic_decl_parm); |
| 24537 | } |
| 24538 | |
| 24539 | /* Decide whether we need an unspecified_parameters DIE at the end. |
| 24540 | There are 2 more cases to do this for: 1) the ansi ... declaration - |
| 24541 | this is detectable when the end of the arg list is not a |
| 24542 | void_type_node 2) an unprototyped function declaration (not a |
| 24543 | definition). This just means that we have no info about the |
| 24544 | parameters at all. */ |
| 24545 | if (early_dwarf) |
| 24546 | { |
| 24547 | if (prototype_p (TREE_TYPE (decl))) |
| 24548 | { |
| 24549 | /* This is the prototyped case, check for.... */ |
| 24550 | if (stdarg_p (TREE_TYPE (decl))) |
| 24551 | gen_unspecified_parameters_die (decl_or_type: decl, context_die: subr_die); |
| 24552 | } |
| 24553 | else if (DECL_INITIAL (decl) == NULL_TREE) |
| 24554 | gen_unspecified_parameters_die (decl_or_type: decl, context_die: subr_die); |
| 24555 | } |
| 24556 | else if ((subr_die != old_die || old_die_had_no_children) |
| 24557 | && prototype_p (TREE_TYPE (decl)) |
| 24558 | && stdarg_p (TREE_TYPE (decl))) |
| 24559 | gen_unspecified_parameters_die (decl_or_type: decl, context_die: subr_die); |
| 24560 | } |
| 24561 | |
| 24562 | if (subr_die != old_die) |
| 24563 | /* Add the calling convention attribute if requested. */ |
| 24564 | add_calling_convention_attribute (subr_die, decl); |
| 24565 | |
| 24566 | /* Output Dwarf info for all of the stuff within the body of the function |
| 24567 | (if it has one - it may be just a declaration). |
| 24568 | |
| 24569 | OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent |
| 24570 | a function. This BLOCK actually represents the outermost binding contour |
| 24571 | for the function, i.e. the contour in which the function's formal |
| 24572 | parameters and labels get declared. Curiously, it appears that the front |
| 24573 | end doesn't actually put the PARM_DECL nodes for the current function onto |
| 24574 | the BLOCK_VARS list for this outer scope, but are strung off of the |
| 24575 | DECL_ARGUMENTS list for the function instead. |
| 24576 | |
| 24577 | The BLOCK_VARS list for the `outer_scope' does provide us with a list of |
| 24578 | the LABEL_DECL nodes for the function however, and we output DWARF info |
| 24579 | for those in decls_for_scope. Just within the `outer_scope' there will be |
| 24580 | a BLOCK node representing the function's outermost pair of curly braces, |
| 24581 | and any blocks used for the base and member initializers of a C++ |
| 24582 | constructor function. */ |
| 24583 | tree outer_scope = DECL_INITIAL (decl); |
| 24584 | if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK) |
| 24585 | { |
| 24586 | int call_site_note_count = 0; |
| 24587 | int tail_call_site_note_count = 0; |
| 24588 | |
| 24589 | /* Emit a DW_TAG_variable DIE for a named return value. */ |
| 24590 | if (DECL_NAME (DECL_RESULT (decl))) |
| 24591 | gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die); |
| 24592 | |
| 24593 | /* The first time through decls_for_scope we will generate the |
| 24594 | DIEs for the locals. The second time, we fill in the |
| 24595 | location info. */ |
| 24596 | decls_for_scope (outer_scope, subr_die); |
| 24597 | |
| 24598 | if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5)) |
| 24599 | { |
| 24600 | struct call_arg_loc_node *ca_loc; |
| 24601 | for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next) |
| 24602 | { |
| 24603 | dw_die_ref die = NULL; |
| 24604 | rtx tloc = NULL_RTX, tlocc = NULL_RTX; |
| 24605 | rtx call_arg_loc_note |
| 24606 | = find_reg_note (ca_loc->call_insn, |
| 24607 | REG_CALL_ARG_LOCATION, NULL_RTX); |
| 24608 | rtx arg, next_arg; |
| 24609 | tree arg_decl = NULL_TREE; |
| 24610 | |
| 24611 | for (arg = (call_arg_loc_note != NULL_RTX |
| 24612 | ? XEXP (call_arg_loc_note, 0) |
| 24613 | : NULL_RTX); |
| 24614 | arg; arg = next_arg) |
| 24615 | { |
| 24616 | dw_loc_descr_ref reg, val; |
| 24617 | machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1)); |
| 24618 | dw_die_ref cdie, tdie = NULL; |
| 24619 | |
| 24620 | next_arg = XEXP (arg, 1); |
| 24621 | if (REG_P (XEXP (XEXP (arg, 0), 0)) |
| 24622 | && next_arg |
| 24623 | && MEM_P (XEXP (XEXP (next_arg, 0), 0)) |
| 24624 | && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)) |
| 24625 | && REGNO (XEXP (XEXP (arg, 0), 0)) |
| 24626 | == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))) |
| 24627 | next_arg = XEXP (next_arg, 1); |
| 24628 | if (mode == VOIDmode) |
| 24629 | { |
| 24630 | mode = GET_MODE (XEXP (XEXP (arg, 0), 0)); |
| 24631 | if (mode == VOIDmode) |
| 24632 | mode = GET_MODE (XEXP (arg, 0)); |
| 24633 | } |
| 24634 | if (mode == VOIDmode || mode == BLKmode) |
| 24635 | continue; |
| 24636 | /* Get dynamic information about call target only if we |
| 24637 | have no static information: we cannot generate both |
| 24638 | DW_AT_call_origin and DW_AT_call_target |
| 24639 | attributes. */ |
| 24640 | if (ca_loc->symbol_ref == NULL_RTX) |
| 24641 | { |
| 24642 | if (XEXP (XEXP (arg, 0), 0) == pc_rtx) |
| 24643 | { |
| 24644 | tloc = XEXP (XEXP (arg, 0), 1); |
| 24645 | continue; |
| 24646 | } |
| 24647 | else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER |
| 24648 | && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx) |
| 24649 | { |
| 24650 | tlocc = XEXP (XEXP (arg, 0), 1); |
| 24651 | continue; |
| 24652 | } |
| 24653 | } |
| 24654 | reg = NULL; |
| 24655 | if (REG_P (XEXP (XEXP (arg, 0), 0))) |
| 24656 | reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0), |
| 24657 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 24658 | else if (MEM_P (XEXP (XEXP (arg, 0), 0))) |
| 24659 | { |
| 24660 | rtx mem = XEXP (XEXP (arg, 0), 0); |
| 24661 | reg = mem_loc_descriptor (XEXP (mem, 0), |
| 24662 | mode: get_address_mode (mem), |
| 24663 | GET_MODE (mem), |
| 24664 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 24665 | } |
| 24666 | else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) |
| 24667 | == DEBUG_PARAMETER_REF) |
| 24668 | { |
| 24669 | tree tdecl |
| 24670 | = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0)); |
| 24671 | tdie = lookup_decl_die (decl: tdecl); |
| 24672 | if (tdie == NULL) |
| 24673 | continue; |
| 24674 | arg_decl = tdecl; |
| 24675 | } |
| 24676 | else |
| 24677 | continue; |
| 24678 | if (reg == NULL |
| 24679 | && GET_CODE (XEXP (XEXP (arg, 0), 0)) |
| 24680 | != DEBUG_PARAMETER_REF) |
| 24681 | continue; |
| 24682 | val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode, |
| 24683 | VOIDmode, |
| 24684 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 24685 | if (val == NULL) |
| 24686 | continue; |
| 24687 | if (die == NULL) |
| 24688 | die = gen_call_site_die (decl, subr_die, ca_loc); |
| 24689 | cdie = new_die (tag_value: dwarf_TAG (tag: DW_TAG_call_site_parameter), parent_die: die, |
| 24690 | NULL_TREE); |
| 24691 | add_desc_attribute (die: cdie, decl: arg_decl); |
| 24692 | if (reg != NULL) |
| 24693 | add_AT_loc (die: cdie, attr_kind: DW_AT_location, loc: reg); |
| 24694 | else if (tdie != NULL) |
| 24695 | add_AT_die_ref (die: cdie, attr_kind: dwarf_AT (at: DW_AT_call_parameter), |
| 24696 | targ_die: tdie); |
| 24697 | add_AT_loc (die: cdie, attr_kind: dwarf_AT (at: DW_AT_call_value), loc: val); |
| 24698 | if (next_arg != XEXP (arg, 1)) |
| 24699 | { |
| 24700 | mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1)); |
| 24701 | if (mode == VOIDmode) |
| 24702 | mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0)); |
| 24703 | val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1), |
| 24704 | 0), 1), |
| 24705 | mode, VOIDmode, |
| 24706 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 24707 | if (val != NULL) |
| 24708 | add_AT_loc (die: cdie, attr_kind: dwarf_AT (at: DW_AT_call_data_value), |
| 24709 | loc: val); |
| 24710 | } |
| 24711 | } |
| 24712 | if (die == NULL |
| 24713 | && (ca_loc->symbol_ref || tloc)) |
| 24714 | die = gen_call_site_die (decl, subr_die, ca_loc); |
| 24715 | if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX)) |
| 24716 | { |
| 24717 | dw_loc_descr_ref tval = NULL; |
| 24718 | |
| 24719 | if (tloc != NULL_RTX) |
| 24720 | tval = mem_loc_descriptor (rtl: tloc, |
| 24721 | GET_MODE (tloc) == VOIDmode |
| 24722 | ? Pmode : GET_MODE (tloc), |
| 24723 | VOIDmode, |
| 24724 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 24725 | if (tval) |
| 24726 | add_AT_loc (die, attr_kind: dwarf_AT (at: DW_AT_call_target), loc: tval); |
| 24727 | else if (tlocc != NULL_RTX) |
| 24728 | { |
| 24729 | tval = mem_loc_descriptor (rtl: tlocc, |
| 24730 | GET_MODE (tlocc) == VOIDmode |
| 24731 | ? Pmode : GET_MODE (tlocc), |
| 24732 | VOIDmode, |
| 24733 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 24734 | if (tval) |
| 24735 | add_AT_loc (die, |
| 24736 | attr_kind: dwarf_AT (at: DW_AT_call_target_clobbered), |
| 24737 | loc: tval); |
| 24738 | } |
| 24739 | } |
| 24740 | if (die != NULL) |
| 24741 | { |
| 24742 | call_site_note_count++; |
| 24743 | if (ca_loc->tail_call_p) |
| 24744 | tail_call_site_note_count++; |
| 24745 | } |
| 24746 | } |
| 24747 | } |
| 24748 | call_arg_locations = NULL; |
| 24749 | call_arg_loc_last = NULL; |
| 24750 | if (tail_call_site_count >= 0 |
| 24751 | && tail_call_site_count == tail_call_site_note_count |
| 24752 | && (!dwarf_strict || dwarf_version >= 5)) |
| 24753 | { |
| 24754 | if (call_site_count >= 0 |
| 24755 | && call_site_count == call_site_note_count) |
| 24756 | add_AT_flag (die: subr_die, attr_kind: dwarf_AT (at: DW_AT_call_all_calls), flag: 1); |
| 24757 | else |
| 24758 | add_AT_flag (die: subr_die, attr_kind: dwarf_AT (at: DW_AT_call_all_tail_calls), flag: 1); |
| 24759 | } |
| 24760 | call_site_count = -1; |
| 24761 | tail_call_site_count = -1; |
| 24762 | } |
| 24763 | |
| 24764 | /* Mark used types after we have created DIEs for the functions scopes. */ |
| 24765 | premark_used_types (DECL_STRUCT_FUNCTION (decl)); |
| 24766 | } |
| 24767 | |
| 24768 | /* Returns a hash value for X (which really is a die_struct). */ |
| 24769 | |
| 24770 | hashval_t |
| 24771 | block_die_hasher::hash (die_struct *d) |
| 24772 | { |
| 24773 | return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent); |
| 24774 | } |
| 24775 | |
| 24776 | /* Return true if decl_id and die_parent of die_struct X is the same |
| 24777 | as decl_id and die_parent of die_struct Y. */ |
| 24778 | |
| 24779 | bool |
| 24780 | block_die_hasher::equal (die_struct *x, die_struct *y) |
| 24781 | { |
| 24782 | return x->decl_id == y->decl_id && x->die_parent == y->die_parent; |
| 24783 | } |
| 24784 | |
| 24785 | /* Hold information about markers for inlined entry points. */ |
| 24786 | struct GTY ((for_user)) inline_entry_data |
| 24787 | { |
| 24788 | /* The block that's the inlined_function_outer_scope for an inlined |
| 24789 | function. */ |
| 24790 | tree block; |
| 24791 | |
| 24792 | /* The label at the inlined entry point. */ |
| 24793 | const char *label_pfx; |
| 24794 | unsigned int label_num; |
| 24795 | |
| 24796 | /* The view number to be used as the inlined entry point. */ |
| 24797 | var_loc_view view; |
| 24798 | }; |
| 24799 | |
| 24800 | struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data> |
| 24801 | { |
| 24802 | typedef tree compare_type; |
| 24803 | static inline hashval_t hash (const inline_entry_data *); |
| 24804 | static inline bool equal (const inline_entry_data *, const_tree); |
| 24805 | }; |
| 24806 | |
| 24807 | /* Hash table routines for inline_entry_data. */ |
| 24808 | |
| 24809 | inline hashval_t |
| 24810 | inline_entry_data_hasher::hash (const inline_entry_data *data) |
| 24811 | { |
| 24812 | return htab_hash_pointer (data->block); |
| 24813 | } |
| 24814 | |
| 24815 | inline bool |
| 24816 | inline_entry_data_hasher::equal (const inline_entry_data *data, |
| 24817 | const_tree block) |
| 24818 | { |
| 24819 | return data->block == block; |
| 24820 | } |
| 24821 | |
| 24822 | /* Inlined entry points pending DIE creation in this compilation unit. */ |
| 24823 | |
| 24824 | static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table; |
| 24825 | |
| 24826 | |
| 24827 | /* Return TRUE if DECL, which may have been previously generated as |
| 24828 | OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is |
| 24829 | true if decl (or its origin) is either an extern declaration or a |
| 24830 | class/namespace scoped declaration. |
| 24831 | |
| 24832 | The declare_in_namespace support causes us to get two DIEs for one |
| 24833 | variable, both of which are declarations. We want to avoid |
| 24834 | considering one to be a specification, so we must test for |
| 24835 | DECLARATION and DW_AT_declaration. */ |
| 24836 | static inline bool |
| 24837 | decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration) |
| 24838 | { |
| 24839 | return (old_die && TREE_STATIC (decl) && !declaration |
| 24840 | && get_AT_flag (die: old_die, attr_kind: DW_AT_declaration) == 1); |
| 24841 | } |
| 24842 | |
| 24843 | /* Return true if DECL is a local static. */ |
| 24844 | |
| 24845 | static inline bool |
| 24846 | local_function_static (tree decl) |
| 24847 | { |
| 24848 | gcc_assert (VAR_P (decl)); |
| 24849 | return TREE_STATIC (decl) |
| 24850 | && DECL_CONTEXT (decl) |
| 24851 | && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL; |
| 24852 | } |
| 24853 | |
| 24854 | /* Return true iff DECL overrides (presumably completes) the type of |
| 24855 | OLD_DIE within CONTEXT_DIE. */ |
| 24856 | |
| 24857 | static bool |
| 24858 | override_type_for_decl_p (tree decl, dw_die_ref old_die, |
| 24859 | dw_die_ref context_die) |
| 24860 | { |
| 24861 | tree type = TREE_TYPE (decl); |
| 24862 | int cv_quals; |
| 24863 | |
| 24864 | if (decl_by_reference_p (decl)) |
| 24865 | { |
| 24866 | type = TREE_TYPE (type); |
| 24867 | cv_quals = TYPE_UNQUALIFIED; |
| 24868 | } |
| 24869 | else |
| 24870 | cv_quals = decl_quals (decl); |
| 24871 | |
| 24872 | dw_die_ref type_die |
| 24873 | = modified_type_die (type, |
| 24874 | cv_quals: cv_quals | TYPE_QUALS (type), |
| 24875 | TYPE_ATTRIBUTES (type), |
| 24876 | reverse: false, |
| 24877 | context_die); |
| 24878 | |
| 24879 | dw_die_ref old_type_die = get_AT_ref (die: old_die, attr_kind: DW_AT_type); |
| 24880 | |
| 24881 | return type_die != old_type_die; |
| 24882 | } |
| 24883 | |
| 24884 | /* Generate a DIE to represent a declared data object. |
| 24885 | Either DECL or ORIGIN must be non-null. */ |
| 24886 | |
| 24887 | static void |
| 24888 | gen_variable_die (tree decl, tree origin, dw_die_ref context_die) |
| 24889 | { |
| 24890 | HOST_WIDE_INT off = 0; |
| 24891 | tree com_decl; |
| 24892 | tree decl_or_origin = decl ? decl : origin; |
| 24893 | tree ultimate_origin; |
| 24894 | dw_die_ref var_die; |
| 24895 | dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL; |
| 24896 | bool declaration = (DECL_EXTERNAL (decl_or_origin) |
| 24897 | || class_or_namespace_scope_p (context_die)); |
| 24898 | bool specialization_p = false; |
| 24899 | bool no_linkage_name = false; |
| 24900 | |
| 24901 | /* While C++ inline static data members have definitions inside of the |
| 24902 | class, force the first DIE to be a declaration, then let gen_member_die |
| 24903 | reparent it to the class context and call gen_variable_die again |
| 24904 | to create the outside of the class DIE for the definition. */ |
| 24905 | if (!declaration |
| 24906 | && old_die == NULL |
| 24907 | && decl |
| 24908 | && DECL_CONTEXT (decl) |
| 24909 | && TYPE_P (DECL_CONTEXT (decl)) |
| 24910 | && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1) |
| 24911 | { |
| 24912 | declaration = true; |
| 24913 | if (dwarf_version < 5) |
| 24914 | no_linkage_name = true; |
| 24915 | } |
| 24916 | |
| 24917 | ultimate_origin = decl_ultimate_origin (decl: decl_or_origin); |
| 24918 | if (decl || ultimate_origin) |
| 24919 | origin = ultimate_origin; |
| 24920 | com_decl = fortran_common (decl: decl_or_origin, value: &off); |
| 24921 | |
| 24922 | /* Symbol in common gets emitted as a child of the common block, in the form |
| 24923 | of a data member. */ |
| 24924 | if (com_decl) |
| 24925 | { |
| 24926 | dw_die_ref com_die; |
| 24927 | dw_loc_list_ref loc = NULL; |
| 24928 | die_node com_die_arg; |
| 24929 | |
| 24930 | var_die = lookup_decl_die (decl: decl_or_origin); |
| 24931 | if (var_die) |
| 24932 | { |
| 24933 | if (! early_dwarf && get_AT (die: var_die, attr_kind: DW_AT_location) == NULL) |
| 24934 | { |
| 24935 | loc = loc_list_from_tree (loc: com_decl, want_address: off ? 1 : 2, NULL); |
| 24936 | if (loc) |
| 24937 | { |
| 24938 | if (off) |
| 24939 | { |
| 24940 | /* Optimize the common case. */ |
| 24941 | if (single_element_loc_list_p (list: loc) |
| 24942 | && loc->expr->dw_loc_opc == DW_OP_addr |
| 24943 | && loc->expr->dw_loc_next == NULL |
| 24944 | && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) |
| 24945 | == SYMBOL_REF) |
| 24946 | { |
| 24947 | rtx x = loc->expr->dw_loc_oprnd1.v.val_addr; |
| 24948 | loc->expr->dw_loc_oprnd1.v.val_addr |
| 24949 | = plus_constant (GET_MODE (x), x , off); |
| 24950 | } |
| 24951 | else |
| 24952 | loc_list_plus_const (list_head: loc, offset: off); |
| 24953 | } |
| 24954 | add_AT_location_description (die: var_die, attr_kind: DW_AT_location, descr: loc); |
| 24955 | remove_AT (die: var_die, attr_kind: DW_AT_declaration); |
| 24956 | } |
| 24957 | } |
| 24958 | return; |
| 24959 | } |
| 24960 | |
| 24961 | if (common_block_die_table == NULL) |
| 24962 | common_block_die_table = hash_table<block_die_hasher>::create_ggc (n: 10); |
| 24963 | |
| 24964 | com_die_arg.decl_id = DECL_UID (com_decl); |
| 24965 | com_die_arg.die_parent = context_die; |
| 24966 | com_die = common_block_die_table->find (value: &com_die_arg); |
| 24967 | if (! early_dwarf) |
| 24968 | loc = loc_list_from_tree (loc: com_decl, want_address: 2, NULL); |
| 24969 | if (com_die == NULL) |
| 24970 | { |
| 24971 | const char *cnam |
| 24972 | = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl)); |
| 24973 | die_node **slot; |
| 24974 | |
| 24975 | com_die = new_die (tag_value: DW_TAG_common_block, parent_die: context_die, t: decl); |
| 24976 | add_name_and_src_coords_attributes (die: com_die, decl: com_decl); |
| 24977 | if (loc) |
| 24978 | { |
| 24979 | add_AT_location_description (die: com_die, attr_kind: DW_AT_location, descr: loc); |
| 24980 | /* Avoid sharing the same loc descriptor between |
| 24981 | DW_TAG_common_block and DW_TAG_variable. */ |
| 24982 | loc = loc_list_from_tree (loc: com_decl, want_address: 2, NULL); |
| 24983 | } |
| 24984 | else if (DECL_EXTERNAL (decl_or_origin)) |
| 24985 | add_AT_flag (die: com_die, attr_kind: DW_AT_declaration, flag: 1); |
| 24986 | if (want_pubnames ()) |
| 24987 | add_pubname_string (str: cnam, die: com_die); /* ??? needed? */ |
| 24988 | com_die->decl_id = DECL_UID (com_decl); |
| 24989 | slot = common_block_die_table->find_slot (value: com_die, insert: INSERT); |
| 24990 | *slot = com_die; |
| 24991 | } |
| 24992 | else if (get_AT (die: com_die, attr_kind: DW_AT_location) == NULL && loc) |
| 24993 | { |
| 24994 | add_AT_location_description (die: com_die, attr_kind: DW_AT_location, descr: loc); |
| 24995 | loc = loc_list_from_tree (loc: com_decl, want_address: 2, NULL); |
| 24996 | remove_AT (die: com_die, attr_kind: DW_AT_declaration); |
| 24997 | } |
| 24998 | var_die = new_die (tag_value: DW_TAG_variable, parent_die: com_die, t: decl); |
| 24999 | add_name_and_src_coords_attributes (die: var_die, decl: decl_or_origin); |
| 25000 | add_type_attribute (object_die: var_die, TREE_TYPE (decl_or_origin), |
| 25001 | cv_quals: decl_quals (decl: decl_or_origin), reverse: false, |
| 25002 | context_die); |
| 25003 | add_alignment_attribute (die: var_die, tree_node: decl); |
| 25004 | add_AT_flag (die: var_die, attr_kind: DW_AT_external, flag: 1); |
| 25005 | if (loc) |
| 25006 | { |
| 25007 | if (off) |
| 25008 | { |
| 25009 | /* Optimize the common case. */ |
| 25010 | if (single_element_loc_list_p (list: loc) |
| 25011 | && loc->expr->dw_loc_opc == DW_OP_addr |
| 25012 | && loc->expr->dw_loc_next == NULL |
| 25013 | && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF) |
| 25014 | { |
| 25015 | rtx x = loc->expr->dw_loc_oprnd1.v.val_addr; |
| 25016 | loc->expr->dw_loc_oprnd1.v.val_addr |
| 25017 | = plus_constant (GET_MODE (x), x, off); |
| 25018 | } |
| 25019 | else |
| 25020 | loc_list_plus_const (list_head: loc, offset: off); |
| 25021 | } |
| 25022 | add_AT_location_description (die: var_die, attr_kind: DW_AT_location, descr: loc); |
| 25023 | } |
| 25024 | else if (DECL_EXTERNAL (decl_or_origin)) |
| 25025 | add_AT_flag (die: var_die, attr_kind: DW_AT_declaration, flag: 1); |
| 25026 | if (decl) |
| 25027 | equate_decl_number_to_die (decl, decl_die: var_die); |
| 25028 | return; |
| 25029 | } |
| 25030 | |
| 25031 | if (old_die) |
| 25032 | { |
| 25033 | if (declaration) |
| 25034 | { |
| 25035 | /* A declaration that has been previously dumped, needs no |
| 25036 | further annotations, since it doesn't need location on |
| 25037 | the second pass. */ |
| 25038 | return; |
| 25039 | } |
| 25040 | else if (decl_will_get_specification_p (old_die, decl, declaration) |
| 25041 | && !get_AT (die: old_die, attr_kind: DW_AT_specification)) |
| 25042 | { |
| 25043 | /* Fall-thru so we can make a new variable die along with a |
| 25044 | DW_AT_specification. */ |
| 25045 | } |
| 25046 | else if (origin && old_die->die_parent != context_die) |
| 25047 | { |
| 25048 | /* If we will be creating an inlined instance, we need a |
| 25049 | new DIE that will get annotated with |
| 25050 | DW_AT_abstract_origin. */ |
| 25051 | gcc_assert (!DECL_ABSTRACT_P (decl)); |
| 25052 | } |
| 25053 | else |
| 25054 | { |
| 25055 | /* If a DIE was dumped early, it still needs location info. |
| 25056 | Skip to where we fill the location bits. */ |
| 25057 | var_die = old_die; |
| 25058 | |
| 25059 | /* ??? In LTRANS we cannot annotate early created variably |
| 25060 | modified type DIEs without copying them and adjusting all |
| 25061 | references to them. Thus we dumped them again. Also add a |
| 25062 | reference to them but beware of -g0 compile and -g link |
| 25063 | in which case the reference will be already present. */ |
| 25064 | tree type = TREE_TYPE (decl_or_origin); |
| 25065 | if (in_lto_p |
| 25066 | && ! get_AT (die: var_die, attr_kind: DW_AT_type) |
| 25067 | && variably_modified_type_p |
| 25068 | (type, decl_function_context (decl_or_origin))) |
| 25069 | { |
| 25070 | if (decl_by_reference_p (decl: decl_or_origin)) |
| 25071 | add_type_attribute (object_die: var_die, TREE_TYPE (type), |
| 25072 | cv_quals: TYPE_UNQUALIFIED, reverse: false, context_die); |
| 25073 | else |
| 25074 | add_type_attribute (object_die: var_die, type, cv_quals: decl_quals (decl: decl_or_origin), |
| 25075 | reverse: false, context_die); |
| 25076 | } |
| 25077 | |
| 25078 | goto gen_variable_die_location; |
| 25079 | } |
| 25080 | } |
| 25081 | |
| 25082 | /* For static data members, the declaration in the class is supposed |
| 25083 | to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility |
| 25084 | also in DWARF2; the specification should still be DW_TAG_variable |
| 25085 | referencing the DW_TAG_member DIE. */ |
| 25086 | if (declaration && class_scope_p (context_die) && dwarf_version < 5) |
| 25087 | var_die = new_die (tag_value: DW_TAG_member, parent_die: context_die, t: decl); |
| 25088 | else |
| 25089 | var_die = new_die (tag_value: DW_TAG_variable, parent_die: context_die, t: decl); |
| 25090 | |
| 25091 | if (origin != NULL) |
| 25092 | add_abstract_origin_attribute (die: var_die, origin); |
| 25093 | |
| 25094 | /* Loop unrolling can create multiple blocks that refer to the same |
| 25095 | static variable, so we must test for the DW_AT_declaration flag. |
| 25096 | |
| 25097 | ??? Loop unrolling/reorder_blocks should perhaps be rewritten to |
| 25098 | copy decls and set the DECL_ABSTRACT_P flag on them instead of |
| 25099 | sharing them. |
| 25100 | |
| 25101 | ??? Duplicated blocks have been rewritten to use .debug_ranges. */ |
| 25102 | else if (decl_will_get_specification_p (old_die, decl, declaration)) |
| 25103 | { |
| 25104 | /* This is a definition of a C++ class level static. */ |
| 25105 | add_AT_specification (die: var_die, targ_die: old_die); |
| 25106 | specialization_p = true; |
| 25107 | if (DECL_NAME (decl)) |
| 25108 | { |
| 25109 | expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl)); |
| 25110 | struct dwarf_file_data * file_index = lookup_filename (s.file); |
| 25111 | |
| 25112 | if (get_AT_file (die: old_die, attr_kind: DW_AT_decl_file) != file_index) |
| 25113 | add_AT_file (die: var_die, attr_kind: DW_AT_decl_file, fd: file_index); |
| 25114 | |
| 25115 | if (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_line) != (unsigned) s.line) |
| 25116 | add_AT_unsigned (die: var_die, attr_kind: DW_AT_decl_line, unsigned_val: s.line); |
| 25117 | |
| 25118 | if (debug_column_info |
| 25119 | && s.column |
| 25120 | && (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_column) |
| 25121 | != (unsigned) s.column)) |
| 25122 | add_AT_unsigned (die: var_die, attr_kind: DW_AT_decl_column, unsigned_val: s.column); |
| 25123 | |
| 25124 | if (old_die->die_tag == DW_TAG_member) |
| 25125 | add_linkage_name (die: var_die, decl); |
| 25126 | } |
| 25127 | } |
| 25128 | else |
| 25129 | add_name_and_src_coords_attributes (die: var_die, decl, no_linkage_name); |
| 25130 | |
| 25131 | if ((origin == NULL && !specialization_p) |
| 25132 | || (origin != NULL |
| 25133 | && !DECL_ABSTRACT_P (decl_or_origin) |
| 25134 | && variably_modified_type_p (TREE_TYPE (decl_or_origin), |
| 25135 | decl_function_context |
| 25136 | (decl_or_origin))) |
| 25137 | || (old_die && specialization_p |
| 25138 | && override_type_for_decl_p (decl: decl_or_origin, old_die, context_die))) |
| 25139 | { |
| 25140 | tree type = TREE_TYPE (decl_or_origin); |
| 25141 | |
| 25142 | if (decl_by_reference_p (decl: decl_or_origin)) |
| 25143 | add_type_attribute (object_die: var_die, TREE_TYPE (type), cv_quals: TYPE_UNQUALIFIED, reverse: false, |
| 25144 | context_die); |
| 25145 | else |
| 25146 | add_type_attribute (object_die: var_die, type, cv_quals: decl_quals (decl: decl_or_origin), reverse: false, |
| 25147 | context_die); |
| 25148 | } |
| 25149 | |
| 25150 | if (origin == NULL && !specialization_p) |
| 25151 | { |
| 25152 | if (TREE_PUBLIC (decl)) |
| 25153 | add_AT_flag (die: var_die, attr_kind: DW_AT_external, flag: 1); |
| 25154 | |
| 25155 | if (DECL_ARTIFICIAL (decl)) |
| 25156 | add_AT_flag (die: var_die, attr_kind: DW_AT_artificial, flag: 1); |
| 25157 | |
| 25158 | add_alignment_attribute (die: var_die, tree_node: decl); |
| 25159 | |
| 25160 | add_accessibility_attribute (die: var_die, decl); |
| 25161 | } |
| 25162 | |
| 25163 | if (declaration) |
| 25164 | add_AT_flag (die: var_die, attr_kind: DW_AT_declaration, flag: 1); |
| 25165 | |
| 25166 | if (decl && (DECL_ABSTRACT_P (decl) |
| 25167 | || !old_die || is_declaration_die (die: old_die))) |
| 25168 | equate_decl_number_to_die (decl, decl_die: var_die); |
| 25169 | |
| 25170 | gen_variable_die_location: |
| 25171 | if (! declaration |
| 25172 | && (! DECL_ABSTRACT_P (decl_or_origin) |
| 25173 | /* Local static vars are shared between all clones/inlines, |
| 25174 | so emit DW_AT_location on the abstract DIE if DECL_RTL is |
| 25175 | already set. */ |
| 25176 | || (VAR_P (decl_or_origin) |
| 25177 | && TREE_STATIC (decl_or_origin) |
| 25178 | && DECL_RTL_SET_P (decl_or_origin)))) |
| 25179 | { |
| 25180 | if (early_dwarf) |
| 25181 | { |
| 25182 | add_pubname (decl: decl_or_origin, die: var_die); |
| 25183 | /* For global register variables, emit DW_AT_location if possible |
| 25184 | already during early_dwarf, as late_global_decl won't be usually |
| 25185 | called. */ |
| 25186 | if (DECL_HARD_REGISTER (decl_or_origin) |
| 25187 | && TREE_STATIC (decl_or_origin) |
| 25188 | && !decl_by_reference_p (decl: decl_or_origin) |
| 25189 | && !get_AT (die: var_die, attr_kind: DW_AT_location) |
| 25190 | && !get_AT (die: var_die, attr_kind: DW_AT_const_value) |
| 25191 | && DECL_RTL_SET_P (decl_or_origin) |
| 25192 | && REG_P (DECL_RTL (decl_or_origin))) |
| 25193 | { |
| 25194 | dw_loc_descr_ref descr |
| 25195 | = reg_loc_descriptor (DECL_RTL (decl_or_origin), |
| 25196 | initialized: VAR_INIT_STATUS_INITIALIZED); |
| 25197 | if (descr) |
| 25198 | add_AT_loc (die: var_die, attr_kind: DW_AT_location, loc: descr); |
| 25199 | } |
| 25200 | } |
| 25201 | else |
| 25202 | add_location_or_const_value_attribute (die: var_die, decl: decl_or_origin, |
| 25203 | cache_p: decl == NULL); |
| 25204 | } |
| 25205 | else |
| 25206 | tree_add_const_value_attribute_for_decl (var_die, decl: decl_or_origin); |
| 25207 | |
| 25208 | if ((dwarf_version >= 4 || !dwarf_strict) |
| 25209 | && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin, |
| 25210 | DW_AT_const_expr) == 1 |
| 25211 | && !get_AT (die: var_die, attr_kind: DW_AT_const_expr) |
| 25212 | && !specialization_p) |
| 25213 | add_AT_flag (die: var_die, attr_kind: DW_AT_const_expr, flag: 1); |
| 25214 | |
| 25215 | if (!dwarf_strict) |
| 25216 | { |
| 25217 | int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin, |
| 25218 | DW_AT_inline); |
| 25219 | if (inl != -1 |
| 25220 | && !get_AT (die: var_die, attr_kind: DW_AT_inline) |
| 25221 | && !specialization_p) |
| 25222 | add_AT_unsigned (die: var_die, attr_kind: DW_AT_inline, unsigned_val: inl); |
| 25223 | } |
| 25224 | } |
| 25225 | |
| 25226 | /* Generate a DIE to represent a named constant. */ |
| 25227 | |
| 25228 | static void |
| 25229 | gen_const_die (tree decl, dw_die_ref context_die) |
| 25230 | { |
| 25231 | dw_die_ref const_die; |
| 25232 | tree type = TREE_TYPE (decl); |
| 25233 | |
| 25234 | const_die = lookup_decl_die (decl); |
| 25235 | if (const_die) |
| 25236 | return; |
| 25237 | |
| 25238 | const_die = new_die (tag_value: DW_TAG_constant, parent_die: context_die, t: decl); |
| 25239 | equate_decl_number_to_die (decl, decl_die: const_die); |
| 25240 | add_name_and_src_coords_attributes (die: const_die, decl); |
| 25241 | add_type_attribute (object_die: const_die, type, cv_quals: TYPE_QUAL_CONST, reverse: false, context_die); |
| 25242 | if (TREE_PUBLIC (decl)) |
| 25243 | add_AT_flag (die: const_die, attr_kind: DW_AT_external, flag: 1); |
| 25244 | if (DECL_ARTIFICIAL (decl)) |
| 25245 | add_AT_flag (die: const_die, attr_kind: DW_AT_artificial, flag: 1); |
| 25246 | tree_add_const_value_attribute_for_decl (var_die: const_die, decl); |
| 25247 | } |
| 25248 | |
| 25249 | /* Generate a DIE to represent a label identifier. */ |
| 25250 | |
| 25251 | static void |
| 25252 | gen_label_die (tree decl, dw_die_ref context_die) |
| 25253 | { |
| 25254 | tree origin = decl_ultimate_origin (decl); |
| 25255 | dw_die_ref lbl_die = lookup_decl_die (decl); |
| 25256 | rtx insn; |
| 25257 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 25258 | |
| 25259 | if (!lbl_die) |
| 25260 | { |
| 25261 | lbl_die = new_die (tag_value: DW_TAG_label, parent_die: context_die, t: decl); |
| 25262 | equate_decl_number_to_die (decl, decl_die: lbl_die); |
| 25263 | |
| 25264 | if (origin != NULL) |
| 25265 | add_abstract_origin_attribute (die: lbl_die, origin); |
| 25266 | else |
| 25267 | add_name_and_src_coords_attributes (die: lbl_die, decl); |
| 25268 | } |
| 25269 | |
| 25270 | if (DECL_ABSTRACT_P (decl)) |
| 25271 | equate_decl_number_to_die (decl, decl_die: lbl_die); |
| 25272 | else if (! early_dwarf) |
| 25273 | { |
| 25274 | insn = DECL_RTL_IF_SET (decl); |
| 25275 | |
| 25276 | /* Deleted labels are programmer specified labels which have been |
| 25277 | eliminated because of various optimizations. We still emit them |
| 25278 | here so that it is possible to put breakpoints on them. */ |
| 25279 | if (insn |
| 25280 | && (LABEL_P (insn) |
| 25281 | || ((NOTE_P (insn) |
| 25282 | && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))) |
| 25283 | { |
| 25284 | /* When optimization is enabled (via -O) some parts of the compiler |
| 25285 | (e.g. jump.cc and cse.cc) may try to delete CODE_LABEL insns which |
| 25286 | represent source-level labels which were explicitly declared by |
| 25287 | the user. This really shouldn't be happening though, so catch |
| 25288 | it if it ever does happen. */ |
| 25289 | gcc_assert (!as_a<rtx_insn *> (insn)->deleted ()); |
| 25290 | |
| 25291 | ASM_GENERATE_INTERNAL_LABEL (label, "L" , CODE_LABEL_NUMBER (insn)); |
| 25292 | add_AT_lbl_id (die: lbl_die, attr_kind: DW_AT_low_pc, lbl_id: label); |
| 25293 | } |
| 25294 | else if (insn |
| 25295 | && NOTE_P (insn) |
| 25296 | && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL |
| 25297 | && CODE_LABEL_NUMBER (insn) != -1) |
| 25298 | { |
| 25299 | ASM_GENERATE_INTERNAL_LABEL (label, "LDL" , CODE_LABEL_NUMBER (insn)); |
| 25300 | add_AT_lbl_id (die: lbl_die, attr_kind: DW_AT_low_pc, lbl_id: label); |
| 25301 | } |
| 25302 | } |
| 25303 | } |
| 25304 | |
| 25305 | /* A helper function for gen_inlined_subroutine_die. Add source coordinate |
| 25306 | attributes to the DIE for a block STMT, to describe where the inlined |
| 25307 | function was called from. This is similar to add_src_coords_attributes. */ |
| 25308 | |
| 25309 | static inline void |
| 25310 | add_call_src_coords_attributes (tree stmt, dw_die_ref die) |
| 25311 | { |
| 25312 | /* We can end up with BUILTINS_LOCATION here. */ |
| 25313 | if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt))) |
| 25314 | return; |
| 25315 | |
| 25316 | location_t locus = BLOCK_SOURCE_LOCATION (stmt); |
| 25317 | expanded_location s = expand_location (locus); |
| 25318 | |
| 25319 | if (dwarf_version >= 3 || !dwarf_strict) |
| 25320 | { |
| 25321 | add_AT_file (die, attr_kind: DW_AT_call_file, fd: lookup_filename (s.file)); |
| 25322 | add_AT_unsigned (die, attr_kind: DW_AT_call_line, unsigned_val: s.line); |
| 25323 | if (debug_column_info && s.column) |
| 25324 | add_AT_unsigned (die, attr_kind: DW_AT_call_column, unsigned_val: s.column); |
| 25325 | unsigned discr = get_discriminator_from_loc (locus); |
| 25326 | if (discr != 0) |
| 25327 | add_AT_unsigned (die, attr_kind: DW_AT_GNU_discriminator, unsigned_val: discr); |
| 25328 | } |
| 25329 | } |
| 25330 | |
| 25331 | |
| 25332 | /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die. |
| 25333 | Add low_pc and high_pc attributes to the DIE for a block STMT. */ |
| 25334 | |
| 25335 | static inline void |
| 25336 | add_high_low_attributes (tree stmt, dw_die_ref die) |
| 25337 | { |
| 25338 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 25339 | |
| 25340 | if (inline_entry_data **iedp |
| 25341 | = !inline_entry_data_table ? NULL |
| 25342 | : inline_entry_data_table->find_slot_with_hash (comparable: stmt, |
| 25343 | hash: htab_hash_pointer (stmt), |
| 25344 | insert: NO_INSERT)) |
| 25345 | { |
| 25346 | inline_entry_data *ied = *iedp; |
| 25347 | gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS); |
| 25348 | gcc_assert (debug_inline_points); |
| 25349 | gcc_assert (inlined_function_outer_scope_p (stmt)); |
| 25350 | |
| 25351 | ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num); |
| 25352 | add_AT_lbl_id (die, attr_kind: DW_AT_entry_pc, lbl_id: label); |
| 25353 | |
| 25354 | if (debug_variable_location_views && !ZERO_VIEW_P (ied->view) |
| 25355 | && !dwarf_strict) |
| 25356 | { |
| 25357 | if (!output_asm_line_debug_info ()) |
| 25358 | add_AT_unsigned (die, attr_kind: DW_AT_GNU_entry_view, unsigned_val: ied->view); |
| 25359 | else |
| 25360 | { |
| 25361 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , ied->view); |
| 25362 | /* FIXME: this will resolve to a small number. Could we |
| 25363 | possibly emit smaller data? Ideally we'd emit a |
| 25364 | uleb128, but that would make the size of DIEs |
| 25365 | impossible for the compiler to compute, since it's |
| 25366 | the assembler that computes the value of the view |
| 25367 | label in this case. Ideally, we'd have a single form |
| 25368 | encompassing both the address and the view, and |
| 25369 | indirecting them through a table might make things |
| 25370 | easier, but even that would be more wasteful, |
| 25371 | space-wise, than what we have now. */ |
| 25372 | add_AT_symview (die, attr_kind: DW_AT_GNU_entry_view, view_label: label); |
| 25373 | } |
| 25374 | } |
| 25375 | |
| 25376 | inline_entry_data_table->clear_slot (slot: iedp); |
| 25377 | } |
| 25378 | |
| 25379 | if (BLOCK_FRAGMENT_CHAIN (stmt) |
| 25380 | && (dwarf_version >= 3 || !dwarf_strict)) |
| 25381 | { |
| 25382 | tree chain, superblock = NULL_TREE; |
| 25383 | dw_die_ref pdie; |
| 25384 | dw_attr_node *attr = NULL; |
| 25385 | |
| 25386 | if (!debug_inline_points && inlined_function_outer_scope_p (block: stmt)) |
| 25387 | { |
| 25388 | ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL, |
| 25389 | BLOCK_NUMBER (stmt)); |
| 25390 | add_AT_lbl_id (die, attr_kind: DW_AT_entry_pc, lbl_id: label); |
| 25391 | } |
| 25392 | |
| 25393 | /* Optimize duplicate .debug_ranges lists or even tails of |
| 25394 | lists. If this BLOCK has same ranges as its supercontext, |
| 25395 | lookup DW_AT_ranges attribute in the supercontext (and |
| 25396 | recursively so), verify that the ranges_table contains the |
| 25397 | right values and use it instead of adding a new .debug_range. */ |
| 25398 | for (chain = stmt, pdie = die; |
| 25399 | BLOCK_SAME_RANGE (chain); |
| 25400 | chain = BLOCK_SUPERCONTEXT (chain)) |
| 25401 | { |
| 25402 | dw_attr_node *new_attr; |
| 25403 | |
| 25404 | pdie = pdie->die_parent; |
| 25405 | if (pdie == NULL) |
| 25406 | break; |
| 25407 | if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE) |
| 25408 | break; |
| 25409 | new_attr = get_AT (die: pdie, attr_kind: DW_AT_ranges); |
| 25410 | if (new_attr == NULL |
| 25411 | || new_attr->dw_attr_val.val_class != dw_val_class_range_list) |
| 25412 | break; |
| 25413 | attr = new_attr; |
| 25414 | superblock = BLOCK_SUPERCONTEXT (chain); |
| 25415 | } |
| 25416 | if (attr != NULL |
| 25417 | && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num |
| 25418 | == (int)BLOCK_NUMBER (superblock)) |
| 25419 | && BLOCK_FRAGMENT_CHAIN (superblock)) |
| 25420 | { |
| 25421 | unsigned long off = attr->dw_attr_val.v.val_offset; |
| 25422 | unsigned long supercnt = 0, thiscnt = 0; |
| 25423 | for (chain = BLOCK_FRAGMENT_CHAIN (superblock); |
| 25424 | chain; chain = BLOCK_FRAGMENT_CHAIN (chain)) |
| 25425 | { |
| 25426 | ++supercnt; |
| 25427 | gcc_checking_assert ((*ranges_table)[off + supercnt].num |
| 25428 | == (int)BLOCK_NUMBER (chain)); |
| 25429 | } |
| 25430 | gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0); |
| 25431 | for (chain = BLOCK_FRAGMENT_CHAIN (stmt); |
| 25432 | chain; chain = BLOCK_FRAGMENT_CHAIN (chain)) |
| 25433 | ++thiscnt; |
| 25434 | gcc_assert (supercnt >= thiscnt); |
| 25435 | add_AT_range_list (die, attr_kind: DW_AT_ranges, offset: off + supercnt - thiscnt, |
| 25436 | force_direct: false); |
| 25437 | note_rnglist_head (offset: off + supercnt - thiscnt); |
| 25438 | return; |
| 25439 | } |
| 25440 | |
| 25441 | unsigned int offset = add_ranges (block: stmt, maybe_new_sec: true); |
| 25442 | add_AT_range_list (die, attr_kind: DW_AT_ranges, offset, force_direct: false); |
| 25443 | note_rnglist_head (offset); |
| 25444 | |
| 25445 | bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt); |
| 25446 | chain = BLOCK_FRAGMENT_CHAIN (stmt); |
| 25447 | do |
| 25448 | { |
| 25449 | add_ranges (block: chain, maybe_new_sec: prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain)); |
| 25450 | prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain); |
| 25451 | chain = BLOCK_FRAGMENT_CHAIN (chain); |
| 25452 | } |
| 25453 | while (chain); |
| 25454 | add_ranges (NULL); |
| 25455 | } |
| 25456 | else |
| 25457 | { |
| 25458 | char label_high[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 25459 | ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL, |
| 25460 | BLOCK_NUMBER (stmt)); |
| 25461 | ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL, |
| 25462 | BLOCK_NUMBER (stmt)); |
| 25463 | add_AT_low_high_pc (die, lbl_low: label, lbl_high: label_high, force_direct: false); |
| 25464 | } |
| 25465 | } |
| 25466 | |
| 25467 | /* Generate a DIE for a lexical block. */ |
| 25468 | |
| 25469 | static void |
| 25470 | gen_lexical_block_die (tree stmt, dw_die_ref context_die) |
| 25471 | { |
| 25472 | dw_die_ref old_die = lookup_block_die (block: stmt); |
| 25473 | dw_die_ref stmt_die = NULL; |
| 25474 | if (!old_die) |
| 25475 | { |
| 25476 | stmt_die = new_die (tag_value: DW_TAG_lexical_block, parent_die: context_die, t: stmt); |
| 25477 | equate_block_to_die (block: stmt, die: stmt_die); |
| 25478 | } |
| 25479 | |
| 25480 | if (BLOCK_ABSTRACT_ORIGIN (stmt)) |
| 25481 | { |
| 25482 | /* If this is an inlined or concrete instance, create a new lexical |
| 25483 | die for anything below to attach DW_AT_abstract_origin to. */ |
| 25484 | if (old_die) |
| 25485 | stmt_die = new_die (tag_value: DW_TAG_lexical_block, parent_die: context_die, t: stmt); |
| 25486 | |
| 25487 | tree origin = block_ultimate_origin (stmt); |
| 25488 | if (origin != NULL_TREE && (origin != stmt || old_die)) |
| 25489 | add_abstract_origin_attribute (die: stmt_die, origin); |
| 25490 | |
| 25491 | old_die = NULL; |
| 25492 | } |
| 25493 | |
| 25494 | if (old_die) |
| 25495 | stmt_die = old_die; |
| 25496 | |
| 25497 | /* A non abstract block whose blocks have already been reordered |
| 25498 | should have the instruction range for this block. If so, set the |
| 25499 | high/low attributes. */ |
| 25500 | if (!early_dwarf && TREE_ASM_WRITTEN (stmt)) |
| 25501 | { |
| 25502 | gcc_assert (stmt_die); |
| 25503 | add_high_low_attributes (stmt, die: stmt_die); |
| 25504 | } |
| 25505 | |
| 25506 | decls_for_scope (stmt, stmt_die); |
| 25507 | } |
| 25508 | |
| 25509 | /* Generate a DIE for an inlined subprogram. */ |
| 25510 | |
| 25511 | static void |
| 25512 | gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die) |
| 25513 | { |
| 25514 | tree decl = block_ultimate_origin (stmt); |
| 25515 | |
| 25516 | /* Make sure any inlined functions are known to be inlineable. */ |
| 25517 | gcc_checking_assert (DECL_ABSTRACT_P (decl) |
| 25518 | || cgraph_function_possibly_inlined_p (decl)); |
| 25519 | |
| 25520 | dw_die_ref subr_die = new_die (tag_value: DW_TAG_inlined_subroutine, parent_die: context_die, t: stmt); |
| 25521 | |
| 25522 | if (call_arg_locations || debug_inline_points) |
| 25523 | equate_block_to_die (block: stmt, die: subr_die); |
| 25524 | add_abstract_origin_attribute (die: subr_die, origin: decl); |
| 25525 | if (TREE_ASM_WRITTEN (stmt)) |
| 25526 | add_high_low_attributes (stmt, die: subr_die); |
| 25527 | add_call_src_coords_attributes (stmt, die: subr_die); |
| 25528 | |
| 25529 | /* The inliner creates an extra BLOCK for the parameter setup, |
| 25530 | we want to merge that with the actual outermost BLOCK of the |
| 25531 | inlined function to avoid duplicate locals in consumers. |
| 25532 | Do that by doing the recursion to subblocks on the single subblock |
| 25533 | of STMT. */ |
| 25534 | bool unwrap_one = false; |
| 25535 | tree sub = BLOCK_SUBBLOCKS (stmt); |
| 25536 | if (sub) |
| 25537 | { |
| 25538 | tree origin = block_ultimate_origin (sub); |
| 25539 | if (origin |
| 25540 | && TREE_CODE (origin) == BLOCK |
| 25541 | && BLOCK_SUPERCONTEXT (origin) == decl) |
| 25542 | unwrap_one = true; |
| 25543 | for (tree next = BLOCK_CHAIN (sub); unwrap_one && next; |
| 25544 | next = BLOCK_CHAIN (next)) |
| 25545 | if (BLOCK_FRAGMENT_ORIGIN (next) != sub) |
| 25546 | unwrap_one = false; |
| 25547 | } |
| 25548 | decls_for_scope (stmt, subr_die, !unwrap_one); |
| 25549 | if (unwrap_one) |
| 25550 | { |
| 25551 | decls_for_scope (sub, subr_die); |
| 25552 | for (sub = BLOCK_CHAIN (sub); sub; sub = BLOCK_CHAIN (sub)) |
| 25553 | gen_block_die (sub, subr_die); |
| 25554 | } |
| 25555 | } |
| 25556 | |
| 25557 | /* Generate a DIE for a field in a record, or structure. CTX is required: see |
| 25558 | the comment for VLR_CONTEXT. */ |
| 25559 | |
| 25560 | static void |
| 25561 | gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die) |
| 25562 | { |
| 25563 | dw_die_ref decl_die; |
| 25564 | |
| 25565 | if (TREE_TYPE (decl) == error_mark_node) |
| 25566 | return; |
| 25567 | |
| 25568 | decl_die = new_die (tag_value: DW_TAG_member, parent_die: context_die, t: decl); |
| 25569 | add_name_and_src_coords_attributes (die: decl_die, decl); |
| 25570 | add_type_attribute (object_die: decl_die, type: member_declared_type (member: decl), cv_quals: decl_quals (decl), |
| 25571 | TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)), |
| 25572 | context_die); |
| 25573 | |
| 25574 | if (DECL_BIT_FIELD_TYPE (decl)) |
| 25575 | { |
| 25576 | add_byte_size_attribute (die: decl_die, tree_node: decl); |
| 25577 | add_bit_size_attribute (die: decl_die, decl); |
| 25578 | add_bit_offset_attribute (die: decl_die, decl); |
| 25579 | } |
| 25580 | |
| 25581 | add_alignment_attribute (die: decl_die, tree_node: decl); |
| 25582 | |
| 25583 | if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE) |
| 25584 | add_data_member_location_attribute (die: decl_die, decl, ctx); |
| 25585 | |
| 25586 | if (DECL_ARTIFICIAL (decl)) |
| 25587 | add_AT_flag (die: decl_die, attr_kind: DW_AT_artificial, flag: 1); |
| 25588 | |
| 25589 | add_accessibility_attribute (die: decl_die, decl); |
| 25590 | |
| 25591 | /* Add DW_AT_export_symbols to anonymous unions or structs. */ |
| 25592 | if ((dwarf_version >= 5 || !dwarf_strict) && DECL_NAME (decl) == NULL_TREE) |
| 25593 | if (tree type = member_declared_type (member: decl)) |
| 25594 | if (lang_hooks.types.type_dwarf_attribute (TYPE_MAIN_VARIANT (type), |
| 25595 | DW_AT_export_symbols) != -1) |
| 25596 | { |
| 25597 | dw_die_ref type_die = lookup_type_die (TYPE_MAIN_VARIANT (type)); |
| 25598 | if (type_die && get_AT (die: type_die, attr_kind: DW_AT_export_symbols) == NULL) |
| 25599 | add_AT_flag (die: type_die, attr_kind: DW_AT_export_symbols, flag: 1); |
| 25600 | } |
| 25601 | |
| 25602 | /* Equate decl number to die, so that we can look up this decl later on. */ |
| 25603 | equate_decl_number_to_die (decl, decl_die); |
| 25604 | } |
| 25605 | |
| 25606 | /* Generate a DIE for a pointer to a member type. TYPE can be an |
| 25607 | OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a |
| 25608 | pointer to member function. */ |
| 25609 | |
| 25610 | static void |
| 25611 | gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die) |
| 25612 | { |
| 25613 | if (lookup_type_die (type)) |
| 25614 | return; |
| 25615 | |
| 25616 | dw_die_ref ptr_die = new_die (tag_value: DW_TAG_ptr_to_member_type, |
| 25617 | parent_die: scope_die_for (t: type, context_die), t: type); |
| 25618 | |
| 25619 | equate_type_number_to_die (type, type_die: ptr_die); |
| 25620 | add_AT_die_ref (die: ptr_die, attr_kind: DW_AT_containing_type, |
| 25621 | targ_die: lookup_type_die (TYPE_OFFSET_BASETYPE (type))); |
| 25622 | add_type_attribute (object_die: ptr_die, TREE_TYPE (type), cv_quals: TYPE_UNQUALIFIED, reverse: false, |
| 25623 | context_die); |
| 25624 | add_alignment_attribute (die: ptr_die, tree_node: type); |
| 25625 | |
| 25626 | if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE |
| 25627 | && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE) |
| 25628 | { |
| 25629 | dw_loc_descr_ref op = new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0); |
| 25630 | add_AT_loc (die: ptr_die, attr_kind: DW_AT_use_location, loc: op); |
| 25631 | } |
| 25632 | } |
| 25633 | |
| 25634 | static char *producer_string; |
| 25635 | |
| 25636 | /* Given a C and/or C++ language/version string return the "highest". |
| 25637 | C++ is assumed to be "higher" than C in this case. Used for merging |
| 25638 | LTO translation unit languages. */ |
| 25639 | static const char * |
| 25640 | highest_c_language (const char *lang1, const char *lang2) |
| 25641 | { |
| 25642 | if (strcmp (s1: "GNU C++26" , s2: lang1) == 0 || strcmp (s1: "GNU C++26" , s2: lang2) == 0) |
| 25643 | return "GNU C++26" ; |
| 25644 | if (strcmp (s1: "GNU C++23" , s2: lang1) == 0 || strcmp (s1: "GNU C++23" , s2: lang2) == 0) |
| 25645 | return "GNU C++23" ; |
| 25646 | if (strcmp (s1: "GNU C++20" , s2: lang1) == 0 || strcmp (s1: "GNU C++20" , s2: lang2) == 0) |
| 25647 | return "GNU C++20" ; |
| 25648 | if (strcmp (s1: "GNU C++17" , s2: lang1) == 0 || strcmp (s1: "GNU C++17" , s2: lang2) == 0) |
| 25649 | return "GNU C++17" ; |
| 25650 | if (strcmp (s1: "GNU C++14" , s2: lang1) == 0 || strcmp (s1: "GNU C++14" , s2: lang2) == 0) |
| 25651 | return "GNU C++14" ; |
| 25652 | if (strcmp (s1: "GNU C++11" , s2: lang1) == 0 || strcmp (s1: "GNU C++11" , s2: lang2) == 0) |
| 25653 | return "GNU C++11" ; |
| 25654 | if (strcmp (s1: "GNU C++98" , s2: lang1) == 0 || strcmp (s1: "GNU C++98" , s2: lang2) == 0) |
| 25655 | return "GNU C++98" ; |
| 25656 | |
| 25657 | if (strcmp (s1: "GNU C2Y" , s2: lang1) == 0 || strcmp (s1: "GNU C2Y" , s2: lang2) == 0) |
| 25658 | return "GNU C2Y" ; |
| 25659 | if (strcmp (s1: "GNU C23" , s2: lang1) == 0 || strcmp (s1: "GNU C23" , s2: lang2) == 0) |
| 25660 | return "GNU C23" ; |
| 25661 | if (strcmp (s1: "GNU C17" , s2: lang1) == 0 || strcmp (s1: "GNU C17" , s2: lang2) == 0) |
| 25662 | return "GNU C17" ; |
| 25663 | if (strcmp (s1: "GNU C11" , s2: lang1) == 0 || strcmp (s1: "GNU C11" , s2: lang2) == 0) |
| 25664 | return "GNU C11" ; |
| 25665 | if (strcmp (s1: "GNU C99" , s2: lang1) == 0 || strcmp (s1: "GNU C99" , s2: lang2) == 0) |
| 25666 | return "GNU C99" ; |
| 25667 | if (strcmp (s1: "GNU C89" , s2: lang1) == 0 || strcmp (s1: "GNU C89" , s2: lang2) == 0) |
| 25668 | return "GNU C89" ; |
| 25669 | |
| 25670 | gcc_unreachable (); |
| 25671 | } |
| 25672 | |
| 25673 | |
| 25674 | /* Generate the DIE for the compilation unit. */ |
| 25675 | |
| 25676 | static dw_die_ref |
| 25677 | gen_compile_unit_die (const char *filename) |
| 25678 | { |
| 25679 | dw_die_ref die; |
| 25680 | const char *language_string = lang_hooks.name; |
| 25681 | int language, lname, lversion; |
| 25682 | |
| 25683 | die = new_die (tag_value: DW_TAG_compile_unit, NULL, NULL); |
| 25684 | |
| 25685 | if (filename) |
| 25686 | { |
| 25687 | add_filename_attribute (die, name_string: filename); |
| 25688 | /* Don't add cwd for <built-in>. */ |
| 25689 | if (filename[0] != '<') |
| 25690 | add_comp_dir_attribute (die); |
| 25691 | } |
| 25692 | |
| 25693 | add_AT_string (die, attr_kind: DW_AT_producer, str: producer_string ? producer_string : "" ); |
| 25694 | |
| 25695 | /* If our producer is LTO try to figure out a common language to use |
| 25696 | from the global list of translation units. */ |
| 25697 | if (strcmp (s1: language_string, s2: "GNU GIMPLE" ) == 0) |
| 25698 | { |
| 25699 | unsigned i; |
| 25700 | tree t; |
| 25701 | const char *common_lang = NULL; |
| 25702 | |
| 25703 | FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t) |
| 25704 | { |
| 25705 | if (!TRANSLATION_UNIT_LANGUAGE (t)) |
| 25706 | continue; |
| 25707 | if (!common_lang) |
| 25708 | common_lang = TRANSLATION_UNIT_LANGUAGE (t); |
| 25709 | else if (strcmp (s1: common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0) |
| 25710 | ; |
| 25711 | else if (startswith (str: common_lang, prefix: "GNU C" ) |
| 25712 | && startswith (TRANSLATION_UNIT_LANGUAGE (t), prefix: "GNU C" )) |
| 25713 | /* Mixing C and C++ is ok, use C++ in that case. */ |
| 25714 | common_lang = highest_c_language (lang1: common_lang, |
| 25715 | TRANSLATION_UNIT_LANGUAGE (t)); |
| 25716 | else |
| 25717 | { |
| 25718 | /* Fall back to C. */ |
| 25719 | common_lang = NULL; |
| 25720 | break; |
| 25721 | } |
| 25722 | } |
| 25723 | |
| 25724 | if (common_lang) |
| 25725 | language_string = common_lang; |
| 25726 | } |
| 25727 | |
| 25728 | language = DW_LANG_C; |
| 25729 | lname = 0; |
| 25730 | lversion = 0; |
| 25731 | if (startswith (str: language_string, prefix: "GNU C" ) |
| 25732 | && ISDIGIT (language_string[5])) |
| 25733 | { |
| 25734 | language = DW_LANG_C89; |
| 25735 | if (dwarf_version >= 3 || !dwarf_strict) |
| 25736 | { |
| 25737 | if (strcmp (s1: language_string, s2: "GNU C89" ) != 0) |
| 25738 | language = DW_LANG_C99; |
| 25739 | |
| 25740 | if (dwarf_version >= 5 /* || !dwarf_strict */) |
| 25741 | { |
| 25742 | if (strcmp (s1: language_string, s2: "GNU C11" ) == 0) |
| 25743 | language = DW_LANG_C11; |
| 25744 | else if (strcmp (s1: language_string, s2: "GNU C17" ) == 0) |
| 25745 | { |
| 25746 | language = DW_LANG_C11; |
| 25747 | lname = DW_LNAME_C; |
| 25748 | lversion = 201710; |
| 25749 | } |
| 25750 | else if (strcmp (s1: language_string, s2: "GNU C23" ) == 0) |
| 25751 | { |
| 25752 | language = DW_LANG_C11; |
| 25753 | lname = DW_LNAME_C; |
| 25754 | lversion = 202311; |
| 25755 | } |
| 25756 | else if (strcmp (s1: language_string, s2: "GNU C2Y" ) == 0) |
| 25757 | { |
| 25758 | language = DW_LANG_C11; |
| 25759 | lname = DW_LNAME_C; |
| 25760 | lversion = 202500; |
| 25761 | } |
| 25762 | } |
| 25763 | } |
| 25764 | } |
| 25765 | else if (startswith (str: language_string, prefix: "GNU C++" )) |
| 25766 | { |
| 25767 | language = DW_LANG_C_plus_plus; |
| 25768 | if (dwarf_version >= 5 /* || !dwarf_strict */) |
| 25769 | { |
| 25770 | if (strcmp (s1: language_string, s2: "GNU C++11" ) == 0) |
| 25771 | language = DW_LANG_C_plus_plus_11; |
| 25772 | else if (strcmp (s1: language_string, s2: "GNU C++14" ) == 0) |
| 25773 | language = DW_LANG_C_plus_plus_14; |
| 25774 | else if (strcmp (s1: language_string, s2: "GNU C++17" ) == 0) |
| 25775 | { |
| 25776 | language = DW_LANG_C_plus_plus_14; |
| 25777 | lname = DW_LNAME_C_plus_plus; |
| 25778 | lversion = 201703; |
| 25779 | } |
| 25780 | else if (strcmp (s1: language_string, s2: "GNU C++20" ) == 0) |
| 25781 | { |
| 25782 | language = DW_LANG_C_plus_plus_14; |
| 25783 | lname = DW_LNAME_C_plus_plus; |
| 25784 | lversion = 202002; |
| 25785 | } |
| 25786 | else if (strcmp (s1: language_string, s2: "GNU C++23" ) == 0) |
| 25787 | { |
| 25788 | language = DW_LANG_C_plus_plus_14; |
| 25789 | lname = DW_LNAME_C_plus_plus; |
| 25790 | lversion = 202302; |
| 25791 | } |
| 25792 | else if (strcmp (s1: language_string, s2: "GNU C++26" ) == 0) |
| 25793 | { |
| 25794 | language = DW_LANG_C_plus_plus_14; |
| 25795 | lname = DW_LNAME_C_plus_plus; |
| 25796 | lversion = 202400; |
| 25797 | } |
| 25798 | } |
| 25799 | } |
| 25800 | else if (strcmp (s1: language_string, s2: "GNU F77" ) == 0) |
| 25801 | language = DW_LANG_Fortran77; |
| 25802 | else if (strcmp (s1: language_string, s2: "GCC COBOL" ) == 0) |
| 25803 | language = DW_LANG_Cobol85; |
| 25804 | else if (strcmp (s1: language_string, s2: "GNU Modula-2" ) == 0) |
| 25805 | language = DW_LANG_Modula2; |
| 25806 | else if (dwarf_version >= 3 || !dwarf_strict) |
| 25807 | { |
| 25808 | if (strcmp (s1: language_string, s2: "GNU Ada" ) == 0) |
| 25809 | language = DW_LANG_Ada95; |
| 25810 | else if (startswith (str: language_string, prefix: "GNU Fortran" )) |
| 25811 | { |
| 25812 | language = DW_LANG_Fortran95; |
| 25813 | if (dwarf_version >= 5 /* || !dwarf_strict */) |
| 25814 | { |
| 25815 | if (strcmp (s1: language_string, s2: "GNU Fortran2003" ) == 0) |
| 25816 | language = DW_LANG_Fortran03; |
| 25817 | else if (strcmp (s1: language_string, s2: "GNU Fortran2008" ) == 0) |
| 25818 | language = DW_LANG_Fortran08; |
| 25819 | } |
| 25820 | } |
| 25821 | else if (strcmp (s1: language_string, s2: "GNU Objective-C" ) == 0) |
| 25822 | language = DW_LANG_ObjC; |
| 25823 | else if (strcmp (s1: language_string, s2: "GNU Objective-C++" ) == 0) |
| 25824 | language = DW_LANG_ObjC_plus_plus; |
| 25825 | else if (strcmp (s1: language_string, s2: "GNU D" ) == 0) |
| 25826 | language = DW_LANG_D; |
| 25827 | else if (dwarf_version >= 5 || !dwarf_strict) |
| 25828 | { |
| 25829 | if (strcmp (s1: language_string, s2: "GNU Go" ) == 0) |
| 25830 | language = DW_LANG_Go; |
| 25831 | else if (strcmp (s1: language_string, s2: "GNU Rust" ) == 0) |
| 25832 | language = DW_LANG_Rust; |
| 25833 | else if (strcmp (s1: language_string, s2: "GNU Algol 68" ) == 0) |
| 25834 | { |
| 25835 | language = DW_LANG_Algol68; |
| 25836 | lname = DW_LNAME_Algol68; |
| 25837 | lversion = 1978; /* Not a typo. The revised language of the |
| 25838 | Revised Report. */ |
| 25839 | } |
| 25840 | } |
| 25841 | } |
| 25842 | /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */ |
| 25843 | else if (startswith (str: language_string, prefix: "GNU Fortran" )) |
| 25844 | language = DW_LANG_Fortran90; |
| 25845 | /* Likewise for Ada. */ |
| 25846 | else if (strcmp (s1: language_string, s2: "GNU Ada" ) == 0) |
| 25847 | language = DW_LANG_Ada83; |
| 25848 | |
| 25849 | add_AT_unsigned (die, attr_kind: DW_AT_language, unsigned_val: language); |
| 25850 | if (lname && dwarf_version >= 5 && !dwarf_strict) |
| 25851 | { |
| 25852 | add_AT_unsigned (die, attr_kind: DW_AT_language_name, unsigned_val: lname); |
| 25853 | add_AT_unsigned (die, attr_kind: DW_AT_language_version, unsigned_val: lversion); |
| 25854 | } |
| 25855 | |
| 25856 | switch (language) |
| 25857 | { |
| 25858 | case DW_LANG_Fortran77: |
| 25859 | case DW_LANG_Fortran90: |
| 25860 | case DW_LANG_Fortran95: |
| 25861 | case DW_LANG_Fortran03: |
| 25862 | case DW_LANG_Fortran08: |
| 25863 | /* Fortran has case insensitive identifiers and the front-end |
| 25864 | lowercases everything. */ |
| 25865 | add_AT_unsigned (die, attr_kind: DW_AT_identifier_case, unsigned_val: DW_ID_down_case); |
| 25866 | break; |
| 25867 | case DW_LANG_Cobol85: |
| 25868 | add_AT_unsigned (die, attr_kind: DW_AT_identifier_case, unsigned_val: DW_ID_case_insensitive); |
| 25869 | break; |
| 25870 | default: |
| 25871 | /* The default DW_ID_case_sensitive doesn't need to be specified. */ |
| 25872 | break; |
| 25873 | } |
| 25874 | return die; |
| 25875 | } |
| 25876 | |
| 25877 | /* Generate the DIE for a base class. */ |
| 25878 | |
| 25879 | static void |
| 25880 | gen_inheritance_die (tree binfo, tree access, tree type, |
| 25881 | dw_die_ref context_die) |
| 25882 | { |
| 25883 | dw_die_ref die = new_die (tag_value: DW_TAG_inheritance, parent_die: context_die, t: binfo); |
| 25884 | struct vlr_context ctx = { .struct_type: type, NULL }; |
| 25885 | |
| 25886 | add_type_attribute (object_die: die, BINFO_TYPE (binfo), cv_quals: TYPE_UNQUALIFIED, reverse: false, |
| 25887 | context_die); |
| 25888 | add_data_member_location_attribute (die, decl: binfo, ctx: &ctx); |
| 25889 | |
| 25890 | if (BINFO_VIRTUAL_P (binfo)) |
| 25891 | add_AT_unsigned (die, attr_kind: DW_AT_virtuality, unsigned_val: DW_VIRTUALITY_virtual); |
| 25892 | |
| 25893 | /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type |
| 25894 | children, otherwise the default is DW_ACCESS_public. In DWARF2 |
| 25895 | the default has always been DW_ACCESS_private. */ |
| 25896 | if (access == access_public_node) |
| 25897 | { |
| 25898 | if (dwarf_version == 2 |
| 25899 | || context_die->die_tag == DW_TAG_class_type) |
| 25900 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_public); |
| 25901 | } |
| 25902 | else if (access == access_protected_node) |
| 25903 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_protected); |
| 25904 | else if (dwarf_version > 2 |
| 25905 | && context_die->die_tag != DW_TAG_class_type) |
| 25906 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_private); |
| 25907 | } |
| 25908 | |
| 25909 | /* Return whether DECL is a FIELD_DECL that represents the variant part of a |
| 25910 | structure. */ |
| 25911 | |
| 25912 | static bool |
| 25913 | is_variant_part (tree decl) |
| 25914 | { |
| 25915 | return (TREE_CODE (decl) == FIELD_DECL |
| 25916 | && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE); |
| 25917 | } |
| 25918 | |
| 25919 | /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is, |
| 25920 | return the FIELD_DECL. Return NULL_TREE otherwise. */ |
| 25921 | |
| 25922 | static tree |
| 25923 | analyze_discr_in_predicate (tree operand, tree struct_type) |
| 25924 | { |
| 25925 | while (CONVERT_EXPR_P (operand)) |
| 25926 | operand = TREE_OPERAND (operand, 0); |
| 25927 | |
| 25928 | /* Match field access to members of struct_type only. */ |
| 25929 | if (TREE_CODE (operand) == COMPONENT_REF |
| 25930 | && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR |
| 25931 | && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type |
| 25932 | && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL) |
| 25933 | return TREE_OPERAND (operand, 1); |
| 25934 | else |
| 25935 | return NULL_TREE; |
| 25936 | } |
| 25937 | |
| 25938 | /* Check that SRC is a constant integer that can be represented as a native |
| 25939 | integer constant (either signed or unsigned). If so, store it into DEST and |
| 25940 | return true. Return false otherwise. */ |
| 25941 | |
| 25942 | static bool |
| 25943 | get_discr_value (tree src, dw_discr_value *dest) |
| 25944 | { |
| 25945 | tree discr_type = TREE_TYPE (src); |
| 25946 | |
| 25947 | if (lang_hooks.types.get_debug_type) |
| 25948 | { |
| 25949 | tree debug_type = lang_hooks.types.get_debug_type (discr_type); |
| 25950 | if (debug_type != NULL) |
| 25951 | discr_type = debug_type; |
| 25952 | } |
| 25953 | |
| 25954 | if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type)) |
| 25955 | return false; |
| 25956 | |
| 25957 | /* Signedness can vary between the original type and the debug type. This |
| 25958 | can happen for character types in Ada for instance: the character type |
| 25959 | used for code generation can be signed, to be compatible with the C one, |
| 25960 | but from a debugger point of view, it must be unsigned. */ |
| 25961 | bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src)); |
| 25962 | bool is_debug_unsigned = TYPE_UNSIGNED (discr_type); |
| 25963 | |
| 25964 | if (is_orig_unsigned != is_debug_unsigned) |
| 25965 | src = fold_convert (discr_type, src); |
| 25966 | |
| 25967 | if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src))) |
| 25968 | return false; |
| 25969 | |
| 25970 | dest->pos = is_debug_unsigned; |
| 25971 | if (is_debug_unsigned) |
| 25972 | dest->v.uval = tree_to_uhwi (src); |
| 25973 | else |
| 25974 | dest->v.sval = tree_to_shwi (src); |
| 25975 | |
| 25976 | return true; |
| 25977 | } |
| 25978 | |
| 25979 | /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a |
| 25980 | FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful, |
| 25981 | store NULL_TREE in DISCR_DECL. Otherwise: |
| 25982 | |
| 25983 | - store the discriminant field in STRUCT_TYPE that controls the variant |
| 25984 | part to *DISCR_DECL |
| 25985 | |
| 25986 | - put in *DISCR_LISTS_P an array where for each variant, the item |
| 25987 | represents the corresponding matching list of discriminant values. |
| 25988 | |
| 25989 | - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of |
| 25990 | the above array. |
| 25991 | |
| 25992 | Note that when the array is allocated (i.e. when the analysis is |
| 25993 | successful), it is up to the caller to free the array. */ |
| 25994 | |
| 25995 | static void |
| 25996 | analyze_variants_discr (tree variant_part_decl, |
| 25997 | tree struct_type, |
| 25998 | tree *discr_decl, |
| 25999 | dw_discr_list_ref **discr_lists_p, |
| 26000 | unsigned *discr_lists_length) |
| 26001 | { |
| 26002 | tree variant_part_type = TREE_TYPE (variant_part_decl); |
| 26003 | tree variant; |
| 26004 | dw_discr_list_ref *discr_lists; |
| 26005 | unsigned i; |
| 26006 | |
| 26007 | /* Compute how many variants there are in this variant part. */ |
| 26008 | *discr_lists_length = 0; |
| 26009 | for (variant = TYPE_FIELDS (variant_part_type); |
| 26010 | variant != NULL_TREE; |
| 26011 | variant = DECL_CHAIN (variant)) |
| 26012 | ++*discr_lists_length; |
| 26013 | |
| 26014 | *discr_decl = NULL_TREE; |
| 26015 | *discr_lists_p |
| 26016 | = (dw_discr_list_ref *) xcalloc (*discr_lists_length, |
| 26017 | sizeof (**discr_lists_p)); |
| 26018 | discr_lists = *discr_lists_p; |
| 26019 | |
| 26020 | /* And then analyze all variants to extract discriminant information for all |
| 26021 | of them. This analysis is conservative: as soon as we detect something we |
| 26022 | do not support, abort everything and pretend we found nothing. */ |
| 26023 | for (variant = TYPE_FIELDS (variant_part_type), i = 0; |
| 26024 | variant != NULL_TREE; |
| 26025 | variant = DECL_CHAIN (variant), ++i) |
| 26026 | { |
| 26027 | tree match_expr = DECL_QUALIFIER (variant); |
| 26028 | |
| 26029 | /* Now, try to analyze the predicate and deduce a discriminant for |
| 26030 | it. */ |
| 26031 | if (match_expr == boolean_true_node) |
| 26032 | /* Typically happens for the default variant: it matches all cases that |
| 26033 | previous variants rejected. Don't output any matching value for |
| 26034 | this one. */ |
| 26035 | continue; |
| 26036 | |
| 26037 | /* The following loop tries to iterate over each discriminant |
| 26038 | possibility: single values or ranges. */ |
| 26039 | while (match_expr != NULL_TREE) |
| 26040 | { |
| 26041 | tree next_round_match_expr; |
| 26042 | tree candidate_discr = NULL_TREE; |
| 26043 | dw_discr_list_ref new_node = NULL; |
| 26044 | |
| 26045 | /* Possibilities are matched one after the other by nested |
| 26046 | TRUTH_ORIF_EXPR expressions. Process the current possibility and |
| 26047 | continue with the rest at next iteration. */ |
| 26048 | if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR) |
| 26049 | { |
| 26050 | next_round_match_expr = TREE_OPERAND (match_expr, 0); |
| 26051 | match_expr = TREE_OPERAND (match_expr, 1); |
| 26052 | } |
| 26053 | else |
| 26054 | next_round_match_expr = NULL_TREE; |
| 26055 | |
| 26056 | if (match_expr == boolean_false_node) |
| 26057 | /* This sub-expression matches nothing: just wait for the next |
| 26058 | one. */ |
| 26059 | ; |
| 26060 | |
| 26061 | else if (TREE_CODE (match_expr) == EQ_EXPR) |
| 26062 | { |
| 26063 | /* We are matching: <discr_field> == <integer_cst> |
| 26064 | This sub-expression matches a single value. */ |
| 26065 | tree integer_cst = TREE_OPERAND (match_expr, 1); |
| 26066 | |
| 26067 | candidate_discr |
| 26068 | = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0), |
| 26069 | struct_type); |
| 26070 | |
| 26071 | new_node = ggc_cleared_alloc<dw_discr_list_node> (); |
| 26072 | if (!get_discr_value (src: integer_cst, |
| 26073 | dest: &new_node->dw_discr_lower_bound)) |
| 26074 | goto abort; |
| 26075 | new_node->dw_discr_range = false; |
| 26076 | } |
| 26077 | |
| 26078 | else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR) |
| 26079 | { |
| 26080 | /* We are matching: |
| 26081 | <discr_field> > <integer_cst> |
| 26082 | && <discr_field> < <integer_cst>. |
| 26083 | This sub-expression matches the range of values between the |
| 26084 | two matched integer constants. Note that comparisons can be |
| 26085 | inclusive or exclusive. */ |
| 26086 | tree candidate_discr_1, candidate_discr_2; |
| 26087 | tree lower_cst, upper_cst; |
| 26088 | bool lower_cst_included, upper_cst_included; |
| 26089 | tree lower_op = TREE_OPERAND (match_expr, 0); |
| 26090 | tree upper_op = TREE_OPERAND (match_expr, 1); |
| 26091 | |
| 26092 | /* When the comparison is exclusive, the integer constant is not |
| 26093 | the discriminant range bound we are looking for: we will have |
| 26094 | to increment or decrement it. */ |
| 26095 | if (TREE_CODE (lower_op) == GE_EXPR) |
| 26096 | lower_cst_included = true; |
| 26097 | else if (TREE_CODE (lower_op) == GT_EXPR) |
| 26098 | lower_cst_included = false; |
| 26099 | else |
| 26100 | goto abort; |
| 26101 | |
| 26102 | if (TREE_CODE (upper_op) == LE_EXPR) |
| 26103 | upper_cst_included = true; |
| 26104 | else if (TREE_CODE (upper_op) == LT_EXPR) |
| 26105 | upper_cst_included = false; |
| 26106 | else |
| 26107 | goto abort; |
| 26108 | |
| 26109 | /* Extract the discriminant from the first operand and check it |
| 26110 | is consistent with the same analysis in the second |
| 26111 | operand. */ |
| 26112 | candidate_discr_1 |
| 26113 | = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0), |
| 26114 | struct_type); |
| 26115 | candidate_discr_2 |
| 26116 | = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0), |
| 26117 | struct_type); |
| 26118 | if (candidate_discr_1 == candidate_discr_2) |
| 26119 | candidate_discr = candidate_discr_1; |
| 26120 | else |
| 26121 | goto abort; |
| 26122 | |
| 26123 | /* Extract bounds from both. */ |
| 26124 | new_node = ggc_cleared_alloc<dw_discr_list_node> (); |
| 26125 | lower_cst = TREE_OPERAND (lower_op, 1); |
| 26126 | upper_cst = TREE_OPERAND (upper_op, 1); |
| 26127 | |
| 26128 | if (!lower_cst_included) |
| 26129 | lower_cst |
| 26130 | = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst, |
| 26131 | build_int_cst (TREE_TYPE (lower_cst), 1)); |
| 26132 | if (!upper_cst_included) |
| 26133 | upper_cst |
| 26134 | = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst, |
| 26135 | build_int_cst (TREE_TYPE (upper_cst), 1)); |
| 26136 | |
| 26137 | if (!get_discr_value (src: lower_cst, |
| 26138 | dest: &new_node->dw_discr_lower_bound) |
| 26139 | || !get_discr_value (src: upper_cst, |
| 26140 | dest: &new_node->dw_discr_upper_bound)) |
| 26141 | goto abort; |
| 26142 | |
| 26143 | new_node->dw_discr_range = true; |
| 26144 | } |
| 26145 | |
| 26146 | else if ((candidate_discr |
| 26147 | = analyze_discr_in_predicate (operand: match_expr, struct_type)) |
| 26148 | && (TREE_TYPE (candidate_discr) == boolean_type_node |
| 26149 | || TREE_TYPE (TREE_TYPE (candidate_discr)) |
| 26150 | == boolean_type_node)) |
| 26151 | { |
| 26152 | /* We are matching: <discr_field> for a boolean discriminant. |
| 26153 | This sub-expression matches boolean_true_node. */ |
| 26154 | new_node = ggc_cleared_alloc<dw_discr_list_node> (); |
| 26155 | if (!get_discr_value (boolean_true_node, |
| 26156 | dest: &new_node->dw_discr_lower_bound)) |
| 26157 | goto abort; |
| 26158 | new_node->dw_discr_range = false; |
| 26159 | } |
| 26160 | |
| 26161 | else |
| 26162 | /* Unsupported sub-expression: we cannot determine the set of |
| 26163 | matching discriminant values. Abort everything. */ |
| 26164 | goto abort; |
| 26165 | |
| 26166 | /* If the discriminant info is not consistent with what we saw so |
| 26167 | far, consider the analysis failed and abort everything. */ |
| 26168 | if (candidate_discr == NULL_TREE |
| 26169 | || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl)) |
| 26170 | goto abort; |
| 26171 | else |
| 26172 | *discr_decl = candidate_discr; |
| 26173 | |
| 26174 | if (new_node != NULL) |
| 26175 | { |
| 26176 | new_node->dw_discr_next = discr_lists[i]; |
| 26177 | discr_lists[i] = new_node; |
| 26178 | } |
| 26179 | match_expr = next_round_match_expr; |
| 26180 | } |
| 26181 | } |
| 26182 | |
| 26183 | /* If we reach this point, we could match everything we were interested |
| 26184 | in. */ |
| 26185 | return; |
| 26186 | |
| 26187 | abort: |
| 26188 | /* Clean all data structure and return no result. */ |
| 26189 | free (ptr: *discr_lists_p); |
| 26190 | *discr_lists_p = NULL; |
| 26191 | *discr_decl = NULL_TREE; |
| 26192 | } |
| 26193 | |
| 26194 | /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part |
| 26195 | of STRUCT_TYPE, a record type. This new DIE is emitted as the next child |
| 26196 | under CONTEXT_DIE. |
| 26197 | |
| 26198 | Variant parts are supposed to be implemented as a FIELD_DECL whose type is a |
| 26199 | QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for |
| 26200 | this type, which are record types, represent the available variants and each |
| 26201 | has a DECL_QUALIFIER attribute. The discriminant and the discriminant |
| 26202 | values are inferred from these attributes. |
| 26203 | |
| 26204 | In trees, the offsets for the fields inside these sub-records are relative |
| 26205 | to the variant part itself, whereas the corresponding DIEs should have |
| 26206 | offset attributes that are relative to the embedding record base address. |
| 26207 | This is why the caller must provide a VARIANT_PART_OFFSET expression: it |
| 26208 | must be an expression that computes the offset of the variant part to |
| 26209 | describe in DWARF. */ |
| 26210 | |
| 26211 | static void |
| 26212 | gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx, |
| 26213 | dw_die_ref context_die) |
| 26214 | { |
| 26215 | const tree variant_part_type = TREE_TYPE (variant_part_decl); |
| 26216 | tree variant_part_offset = vlr_ctx->variant_part_offset; |
| 26217 | |
| 26218 | /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or |
| 26219 | NULL_TREE if there is no such field. */ |
| 26220 | tree discr_decl = NULL_TREE; |
| 26221 | dw_discr_list_ref *discr_lists; |
| 26222 | unsigned discr_lists_length = 0; |
| 26223 | unsigned i; |
| 26224 | |
| 26225 | dw_die_ref dwarf_proc_die = NULL; |
| 26226 | dw_die_ref variant_part_die |
| 26227 | = new_die (tag_value: DW_TAG_variant_part, parent_die: context_die, t: variant_part_type); |
| 26228 | |
| 26229 | equate_decl_number_to_die (decl: variant_part_decl, decl_die: variant_part_die); |
| 26230 | |
| 26231 | analyze_variants_discr (variant_part_decl, struct_type: vlr_ctx->struct_type, |
| 26232 | discr_decl: &discr_decl, discr_lists_p: &discr_lists, discr_lists_length: &discr_lists_length); |
| 26233 | |
| 26234 | if (discr_decl != NULL_TREE) |
| 26235 | { |
| 26236 | dw_die_ref discr_die = lookup_decl_die (decl: discr_decl); |
| 26237 | |
| 26238 | if (discr_die) |
| 26239 | add_AT_die_ref (die: variant_part_die, attr_kind: DW_AT_discr, targ_die: discr_die); |
| 26240 | else |
| 26241 | /* We have no DIE for the discriminant, so just discard all |
| 26242 | discrimimant information in the output. */ |
| 26243 | discr_decl = NULL_TREE; |
| 26244 | } |
| 26245 | |
| 26246 | /* If the offset for this variant part is more complex than a constant, |
| 26247 | create a DWARF procedure for it so that we will not have to generate |
| 26248 | DWARF expressions for it for each member. */ |
| 26249 | if (TREE_CODE (variant_part_offset) != INTEGER_CST |
| 26250 | && (dwarf_version >= 3 || !dwarf_strict)) |
| 26251 | { |
| 26252 | struct loc_descr_context ctx = { |
| 26253 | .context_type: vlr_ctx->struct_type, /* context_type */ |
| 26254 | NULL_TREE, /* base_decl */ |
| 26255 | NULL, /* dpi */ |
| 26256 | .placeholder_arg: false, /* placeholder_arg */ |
| 26257 | .placeholder_seen: false, /* placeholder_seen */ |
| 26258 | .strict_signedness: false /* strict_signedness */ |
| 26259 | }; |
| 26260 | const tree dwarf_proc_fndecl |
| 26261 | = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE, |
| 26262 | build_function_type (TREE_TYPE (variant_part_offset), |
| 26263 | NULL_TREE)); |
| 26264 | const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0); |
| 26265 | const dw_loc_descr_ref dwarf_proc_body |
| 26266 | = loc_descriptor_from_tree (loc: variant_part_offset, want_address: 0, context: &ctx); |
| 26267 | |
| 26268 | dwarf_proc_die = new_dwarf_proc_die (location: dwarf_proc_body, |
| 26269 | fndecl: dwarf_proc_fndecl, parent_die: context_die); |
| 26270 | if (dwarf_proc_die != NULL) |
| 26271 | variant_part_offset = dwarf_proc_call; |
| 26272 | } |
| 26273 | |
| 26274 | /* Output DIEs for all variants. */ |
| 26275 | i = 0; |
| 26276 | for (tree variant = TYPE_FIELDS (variant_part_type); |
| 26277 | variant != NULL_TREE; |
| 26278 | variant = DECL_CHAIN (variant), ++i) |
| 26279 | { |
| 26280 | tree variant_type = TREE_TYPE (variant); |
| 26281 | dw_die_ref variant_die; |
| 26282 | |
| 26283 | /* All variants (i.e. members of a variant part) are supposed to be |
| 26284 | encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields |
| 26285 | under these records. */ |
| 26286 | gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE); |
| 26287 | |
| 26288 | variant_die = new_die (tag_value: DW_TAG_variant, parent_die: variant_part_die, t: variant_type); |
| 26289 | equate_decl_number_to_die (decl: variant, decl_die: variant_die); |
| 26290 | |
| 26291 | /* Output discriminant values this variant matches, if any. */ |
| 26292 | if (discr_decl == NULL || discr_lists[i] == NULL) |
| 26293 | /* In the case we have discriminant information at all, this is |
| 26294 | probably the default variant: as the standard says, don't |
| 26295 | output any discriminant value/list attribute. */ |
| 26296 | ; |
| 26297 | else if (discr_lists[i]->dw_discr_next == NULL |
| 26298 | && !discr_lists[i]->dw_discr_range) |
| 26299 | /* If there is only one accepted value, don't bother outputting a |
| 26300 | list. */ |
| 26301 | add_discr_value (die: variant_die, value: &discr_lists[i]->dw_discr_lower_bound); |
| 26302 | else |
| 26303 | add_discr_list (die: variant_die, discr_list: discr_lists[i]); |
| 26304 | |
| 26305 | for (tree member = TYPE_FIELDS (variant_type); |
| 26306 | member != NULL_TREE; |
| 26307 | member = DECL_CHAIN (member)) |
| 26308 | { |
| 26309 | struct vlr_context vlr_sub_ctx = { |
| 26310 | .struct_type: vlr_ctx->struct_type, /* struct_type */ |
| 26311 | NULL /* variant_part_offset */ |
| 26312 | }; |
| 26313 | if (is_variant_part (decl: member)) |
| 26314 | { |
| 26315 | /* All offsets for fields inside variant parts are relative to |
| 26316 | the top-level embedding RECORD_TYPE's base address. On the |
| 26317 | other hand, offsets in GCC's types are relative to the |
| 26318 | nested-most variant part. So we have to sum offsets each time |
| 26319 | we recurse. */ |
| 26320 | |
| 26321 | vlr_sub_ctx.variant_part_offset |
| 26322 | = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset), |
| 26323 | variant_part_offset, byte_position (member)); |
| 26324 | gen_variant_part (variant_part_decl: member, vlr_ctx: &vlr_sub_ctx, context_die: variant_die); |
| 26325 | } |
| 26326 | else |
| 26327 | { |
| 26328 | vlr_sub_ctx.variant_part_offset = variant_part_offset; |
| 26329 | gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die); |
| 26330 | } |
| 26331 | } |
| 26332 | } |
| 26333 | |
| 26334 | free (ptr: discr_lists); |
| 26335 | } |
| 26336 | |
| 26337 | /* Generate a DIE for a class member. */ |
| 26338 | |
| 26339 | static void |
| 26340 | gen_member_die (tree type, dw_die_ref context_die) |
| 26341 | { |
| 26342 | tree member; |
| 26343 | tree binfo = TYPE_BINFO (type); |
| 26344 | |
| 26345 | gcc_assert (TYPE_MAIN_VARIANT (type) == type); |
| 26346 | |
| 26347 | /* If this is not an incomplete type, output descriptions of each of its |
| 26348 | members. Note that as we output the DIEs necessary to represent the |
| 26349 | members of this record or union type, we will also be trying to output |
| 26350 | DIEs to represent the *types* of those members. However the `type' |
| 26351 | function (above) will specifically avoid generating type DIEs for member |
| 26352 | types *within* the list of member DIEs for this (containing) type except |
| 26353 | for those types (of members) which are explicitly marked as also being |
| 26354 | members of this (containing) type themselves. The g++ front- end can |
| 26355 | force any given type to be treated as a member of some other (containing) |
| 26356 | type by setting the TYPE_CONTEXT of the given (member) type to point to |
| 26357 | the TREE node representing the appropriate (containing) type. */ |
| 26358 | |
| 26359 | /* First output info about the base classes. */ |
| 26360 | if (binfo && early_dwarf) |
| 26361 | { |
| 26362 | vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo); |
| 26363 | int i; |
| 26364 | tree base; |
| 26365 | |
| 26366 | for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++) |
| 26367 | gen_inheritance_die (binfo: base, |
| 26368 | access: (accesses ? (*accesses)[i] : access_public_node), |
| 26369 | type, |
| 26370 | context_die); |
| 26371 | } |
| 26372 | |
| 26373 | /* Now output info about the members. */ |
| 26374 | for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member)) |
| 26375 | { |
| 26376 | /* Ignore clones. */ |
| 26377 | if (DECL_ABSTRACT_ORIGIN (member)) |
| 26378 | continue; |
| 26379 | |
| 26380 | struct vlr_context vlr_ctx = { .struct_type: type, NULL_TREE }; |
| 26381 | bool static_inline_p |
| 26382 | = (VAR_P (member) |
| 26383 | && TREE_STATIC (member) |
| 26384 | && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline) |
| 26385 | != -1)); |
| 26386 | |
| 26387 | /* If we thought we were generating minimal debug info for TYPE |
| 26388 | and then changed our minds, some of the member declarations |
| 26389 | may have already been defined. Don't define them again, but |
| 26390 | do put them in the right order. */ |
| 26391 | |
| 26392 | if (dw_die_ref child = lookup_decl_die (decl: member)) |
| 26393 | { |
| 26394 | /* Handle inline static data members, which only have in-class |
| 26395 | declarations. */ |
| 26396 | bool splice = true; |
| 26397 | |
| 26398 | dw_die_ref ref = NULL; |
| 26399 | if (child->die_tag == DW_TAG_variable |
| 26400 | && child->die_parent == comp_unit_die ()) |
| 26401 | { |
| 26402 | ref = get_AT_ref (die: child, attr_kind: DW_AT_specification); |
| 26403 | |
| 26404 | /* For C++17 inline static data members followed by redundant |
| 26405 | out of class redeclaration, we might get here with |
| 26406 | child being the DIE created for the out of class |
| 26407 | redeclaration and with its DW_AT_specification being |
| 26408 | the DIE created for in-class definition. We want to |
| 26409 | reparent the latter, and don't want to create another |
| 26410 | DIE with DW_AT_specification in that case, because |
| 26411 | we already have one. */ |
| 26412 | if (ref |
| 26413 | && static_inline_p |
| 26414 | && ref->die_tag == DW_TAG_variable |
| 26415 | && ref->die_parent == comp_unit_die () |
| 26416 | && get_AT (die: ref, attr_kind: DW_AT_specification) == NULL) |
| 26417 | { |
| 26418 | child = ref; |
| 26419 | ref = NULL; |
| 26420 | static_inline_p = false; |
| 26421 | } |
| 26422 | |
| 26423 | if (!ref) |
| 26424 | { |
| 26425 | reparent_child (child, new_parent: context_die); |
| 26426 | if (dwarf_version < 5) |
| 26427 | child->die_tag = DW_TAG_member; |
| 26428 | splice = false; |
| 26429 | } |
| 26430 | } |
| 26431 | else if (child->die_tag == DW_TAG_enumerator) |
| 26432 | /* Enumerators remain under their enumeration even if |
| 26433 | their names are introduced in the enclosing scope. */ |
| 26434 | splice = false; |
| 26435 | |
| 26436 | if (splice) |
| 26437 | splice_child_die (parent: context_die, child); |
| 26438 | } |
| 26439 | |
| 26440 | /* Do not generate DWARF for variant parts if we are generating the |
| 26441 | corresponding GNAT encodings: DIEs generated for the two schemes |
| 26442 | would conflict in our mappings. */ |
| 26443 | else if (is_variant_part (decl: member) |
| 26444 | && gnat_encodings != DWARF_GNAT_ENCODINGS_ALL) |
| 26445 | { |
| 26446 | vlr_ctx.variant_part_offset = byte_position (member); |
| 26447 | gen_variant_part (variant_part_decl: member, vlr_ctx: &vlr_ctx, context_die); |
| 26448 | } |
| 26449 | else |
| 26450 | { |
| 26451 | vlr_ctx.variant_part_offset = NULL_TREE; |
| 26452 | gen_decl_die (member, NULL, &vlr_ctx, context_die); |
| 26453 | } |
| 26454 | |
| 26455 | /* For C++ inline static data members emit immediately a DW_TAG_variable |
| 26456 | DIE that will refer to that DW_TAG_member/DW_TAG_variable through |
| 26457 | DW_AT_specification. */ |
| 26458 | if (static_inline_p) |
| 26459 | { |
| 26460 | int old_extern = DECL_EXTERNAL (member); |
| 26461 | DECL_EXTERNAL (member) = 0; |
| 26462 | gen_decl_die (member, NULL, NULL, comp_unit_die ()); |
| 26463 | DECL_EXTERNAL (member) = old_extern; |
| 26464 | } |
| 26465 | } |
| 26466 | } |
| 26467 | |
| 26468 | /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG |
| 26469 | is set, we pretend that the type was never defined, so we only get the |
| 26470 | member DIEs needed by later specification DIEs. */ |
| 26471 | |
| 26472 | static void |
| 26473 | gen_struct_or_union_type_die (tree type, dw_die_ref context_die, |
| 26474 | enum debug_info_usage usage) |
| 26475 | { |
| 26476 | if (TREE_ASM_WRITTEN (type)) |
| 26477 | { |
| 26478 | /* Fill in the bound of variable-length fields in late dwarf if |
| 26479 | still incomplete. */ |
| 26480 | if (!early_dwarf && variably_modified_type_p (type, NULL)) |
| 26481 | for (tree member = TYPE_FIELDS (type); |
| 26482 | member; |
| 26483 | member = DECL_CHAIN (member)) |
| 26484 | fill_variable_array_bounds (TREE_TYPE (member)); |
| 26485 | return; |
| 26486 | } |
| 26487 | |
| 26488 | dw_die_ref type_die = lookup_type_die (type); |
| 26489 | dw_die_ref scope_die = 0; |
| 26490 | bool nested = false; |
| 26491 | bool complete = (TYPE_SIZE (type) |
| 26492 | && (! TYPE_STUB_DECL (type) |
| 26493 | || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type)))); |
| 26494 | bool ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace); |
| 26495 | complete = complete && should_emit_struct_debug (type, usage); |
| 26496 | |
| 26497 | if (type_die && ! complete) |
| 26498 | return; |
| 26499 | |
| 26500 | if (TYPE_CONTEXT (type) != NULL_TREE |
| 26501 | && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type)) |
| 26502 | || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL)) |
| 26503 | nested = true; |
| 26504 | |
| 26505 | scope_die = scope_die_for (t: type, context_die); |
| 26506 | |
| 26507 | /* Generate child dies for template parameters. */ |
| 26508 | if (!type_die && debug_info_level > DINFO_LEVEL_TERSE) |
| 26509 | schedule_generic_params_dies_gen (t: type); |
| 26510 | |
| 26511 | if (! type_die || (nested && is_cu_die (c: scope_die))) |
| 26512 | /* First occurrence of type or toplevel definition of nested class. */ |
| 26513 | { |
| 26514 | dw_die_ref old_die = type_die; |
| 26515 | |
| 26516 | type_die = new_die (TREE_CODE (type) == RECORD_TYPE |
| 26517 | ? record_type_tag (type) : DW_TAG_union_type, |
| 26518 | parent_die: scope_die, t: type); |
| 26519 | equate_type_number_to_die (type, type_die); |
| 26520 | if (old_die) |
| 26521 | add_AT_specification (die: type_die, targ_die: old_die); |
| 26522 | else |
| 26523 | add_name_attribute (die: type_die, name_string: type_tag (type)); |
| 26524 | } |
| 26525 | else |
| 26526 | remove_AT (die: type_die, attr_kind: DW_AT_declaration); |
| 26527 | |
| 26528 | /* If this type has been completed, then give it a byte_size attribute and |
| 26529 | then give a list of members. */ |
| 26530 | if (complete && !ns_decl) |
| 26531 | { |
| 26532 | /* Prevent infinite recursion in cases where the type of some member of |
| 26533 | this type is expressed in terms of this type itself. */ |
| 26534 | TREE_ASM_WRITTEN (type) = 1; |
| 26535 | add_byte_size_attribute (die: type_die, tree_node: type); |
| 26536 | add_alignment_attribute (die: type_die, tree_node: type); |
| 26537 | if (TYPE_STUB_DECL (type) != NULL_TREE) |
| 26538 | { |
| 26539 | add_src_coords_attributes (die: type_die, TYPE_STUB_DECL (type)); |
| 26540 | add_accessibility_attribute (die: type_die, TYPE_STUB_DECL (type)); |
| 26541 | } |
| 26542 | |
| 26543 | /* If the first reference to this type was as the return type of an |
| 26544 | inline function, then it may not have a parent. Fix this now. */ |
| 26545 | if (type_die->die_parent == NULL) |
| 26546 | add_child_die (die: scope_die, child_die: type_die); |
| 26547 | |
| 26548 | gen_member_die (type, context_die: type_die); |
| 26549 | |
| 26550 | add_gnat_descriptive_type_attribute (die: type_die, type, context_die); |
| 26551 | if (TYPE_ARTIFICIAL (type)) |
| 26552 | add_AT_flag (die: type_die, attr_kind: DW_AT_artificial, flag: 1); |
| 26553 | |
| 26554 | /* GNU extension: Record what type our vtable lives in. */ |
| 26555 | if (TYPE_VFIELD (type)) |
| 26556 | { |
| 26557 | tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type)); |
| 26558 | |
| 26559 | gen_type_die (vtype, context_die); |
| 26560 | add_AT_die_ref (die: type_die, attr_kind: DW_AT_containing_type, |
| 26561 | targ_die: lookup_type_die (type: vtype)); |
| 26562 | } |
| 26563 | } |
| 26564 | else |
| 26565 | { |
| 26566 | add_AT_flag (die: type_die, attr_kind: DW_AT_declaration, flag: 1); |
| 26567 | |
| 26568 | /* We don't need to do this for function-local types. */ |
| 26569 | if (TYPE_STUB_DECL (type) |
| 26570 | && ! decl_function_context (TYPE_STUB_DECL (type))) |
| 26571 | vec_safe_push (v&: incomplete_types, obj: type); |
| 26572 | } |
| 26573 | |
| 26574 | if (get_AT (die: type_die, attr_kind: DW_AT_name)) |
| 26575 | add_pubtype (decl: type, die: type_die); |
| 26576 | } |
| 26577 | |
| 26578 | /* Generate a DIE for a subroutine _type_. */ |
| 26579 | |
| 26580 | static void |
| 26581 | gen_subroutine_type_die (tree type, dw_die_ref context_die) |
| 26582 | { |
| 26583 | tree return_type = TREE_TYPE (type); |
| 26584 | dw_die_ref subr_die |
| 26585 | = new_die (tag_value: DW_TAG_subroutine_type, |
| 26586 | parent_die: scope_die_for (t: type, context_die), t: type); |
| 26587 | |
| 26588 | equate_type_number_to_die (type, type_die: subr_die); |
| 26589 | add_prototyped_attribute (die: subr_die, func_type: type); |
| 26590 | add_type_attribute (object_die: subr_die, type: return_type, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
| 26591 | context_die); |
| 26592 | add_alignment_attribute (die: subr_die, tree_node: type); |
| 26593 | gen_formal_types_die (function_or_method_type: type, context_die: subr_die); |
| 26594 | |
| 26595 | if (get_AT (die: subr_die, attr_kind: DW_AT_name)) |
| 26596 | add_pubtype (decl: type, die: subr_die); |
| 26597 | if ((dwarf_version >= 5 || !dwarf_strict) |
| 26598 | && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1) |
| 26599 | add_AT_flag (die: subr_die, attr_kind: DW_AT_reference, flag: 1); |
| 26600 | if ((dwarf_version >= 5 || !dwarf_strict) |
| 26601 | && lang_hooks.types.type_dwarf_attribute (type, |
| 26602 | DW_AT_rvalue_reference) != -1) |
| 26603 | add_AT_flag (die: subr_die, attr_kind: DW_AT_rvalue_reference, flag: 1); |
| 26604 | } |
| 26605 | |
| 26606 | /* Generate a DIE for a type definition. */ |
| 26607 | |
| 26608 | static void |
| 26609 | gen_typedef_die (tree decl, dw_die_ref context_die) |
| 26610 | { |
| 26611 | dw_die_ref type_die; |
| 26612 | tree type; |
| 26613 | |
| 26614 | if (TREE_ASM_WRITTEN (decl)) |
| 26615 | { |
| 26616 | if (DECL_ORIGINAL_TYPE (decl)) |
| 26617 | fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl)); |
| 26618 | return; |
| 26619 | } |
| 26620 | |
| 26621 | /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin |
| 26622 | checks in process_scope_var and modified_type_die), this should be called |
| 26623 | only for original types. */ |
| 26624 | gcc_assert (decl_ultimate_origin (decl) == NULL |
| 26625 | || decl_ultimate_origin (decl) == decl); |
| 26626 | |
| 26627 | TREE_ASM_WRITTEN (decl) = 1; |
| 26628 | type_die = new_die (tag_value: DW_TAG_typedef, parent_die: context_die, t: decl); |
| 26629 | |
| 26630 | add_name_and_src_coords_attributes (die: type_die, decl); |
| 26631 | if (DECL_ORIGINAL_TYPE (decl)) |
| 26632 | { |
| 26633 | type = DECL_ORIGINAL_TYPE (decl); |
| 26634 | if (type == error_mark_node) |
| 26635 | return; |
| 26636 | |
| 26637 | gcc_assert (type != TREE_TYPE (decl)); |
| 26638 | equate_type_number_to_die (TREE_TYPE (decl), type_die); |
| 26639 | } |
| 26640 | else |
| 26641 | { |
| 26642 | type = TREE_TYPE (decl); |
| 26643 | if (type == error_mark_node) |
| 26644 | return; |
| 26645 | |
| 26646 | if (is_naming_typedef_decl (TYPE_NAME (type))) |
| 26647 | { |
| 26648 | /* Here, we are in the case of decl being a typedef naming |
| 26649 | an anonymous type, e.g: |
| 26650 | typedef struct {...} foo; |
| 26651 | In that case TREE_TYPE (decl) is not a typedef variant |
| 26652 | type and TYPE_NAME of the anonymous type is set to the |
| 26653 | TYPE_DECL of the typedef. This construct is emitted by |
| 26654 | the C++ FE. |
| 26655 | |
| 26656 | TYPE is the anonymous struct named by the typedef |
| 26657 | DECL. As we need the DW_AT_type attribute of the |
| 26658 | DW_TAG_typedef to point to the DIE of TYPE, let's |
| 26659 | generate that DIE right away. add_type_attribute |
| 26660 | called below will then pick (via lookup_type_die) that |
| 26661 | anonymous struct DIE. */ |
| 26662 | if (!TREE_ASM_WRITTEN (type)) |
| 26663 | gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE); |
| 26664 | |
| 26665 | /* This is a GNU Extension. We are adding a |
| 26666 | DW_AT_linkage_name attribute to the DIE of the |
| 26667 | anonymous struct TYPE. The value of that attribute |
| 26668 | is the name of the typedef decl naming the anonymous |
| 26669 | struct. This greatly eases the work of consumers of |
| 26670 | this debug info. */ |
| 26671 | add_linkage_name_raw (die: lookup_type_die (type), decl); |
| 26672 | } |
| 26673 | } |
| 26674 | |
| 26675 | add_type_attribute (object_die: type_die, type, cv_quals: decl_quals (decl), reverse: false, |
| 26676 | context_die); |
| 26677 | |
| 26678 | if (is_naming_typedef_decl (decl)) |
| 26679 | /* We want that all subsequent calls to lookup_type_die with |
| 26680 | TYPE in argument yield the DW_TAG_typedef we have just |
| 26681 | created. */ |
| 26682 | equate_type_number_to_die (type, type_die); |
| 26683 | |
| 26684 | add_alignment_attribute (die: type_die, TREE_TYPE (decl)); |
| 26685 | |
| 26686 | add_accessibility_attribute (die: type_die, decl); |
| 26687 | |
| 26688 | if (DECL_ABSTRACT_P (decl)) |
| 26689 | equate_decl_number_to_die (decl, decl_die: type_die); |
| 26690 | |
| 26691 | if (get_AT (die: type_die, attr_kind: DW_AT_name)) |
| 26692 | add_pubtype (decl, die: type_die); |
| 26693 | } |
| 26694 | |
| 26695 | /* Generate a DIE for a struct, class, enum or union type. */ |
| 26696 | |
| 26697 | static void |
| 26698 | gen_tagged_type_die (tree type, |
| 26699 | dw_die_ref context_die, |
| 26700 | enum debug_info_usage usage, |
| 26701 | bool reverse) |
| 26702 | { |
| 26703 | if (type == NULL_TREE |
| 26704 | || !is_tagged_type (type)) |
| 26705 | return; |
| 26706 | |
| 26707 | if (TREE_ASM_WRITTEN (type)) |
| 26708 | ; |
| 26709 | /* If this is a nested type whose containing class hasn't been written |
| 26710 | out yet, writing it out will cover this one, too. This does not apply |
| 26711 | to instantiations of member class templates; they need to be added to |
| 26712 | the containing class as they are generated. FIXME: This hurts the |
| 26713 | idea of combining type decls from multiple TUs, since we can't predict |
| 26714 | what set of template instantiations we'll get. */ |
| 26715 | else if (TYPE_CONTEXT (type) |
| 26716 | && AGGREGATE_TYPE_P (TYPE_CONTEXT (type)) |
| 26717 | && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type))) |
| 26718 | { |
| 26719 | gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage); |
| 26720 | |
| 26721 | if (TREE_ASM_WRITTEN (type)) |
| 26722 | return; |
| 26723 | |
| 26724 | /* If that failed, attach ourselves to the stub. */ |
| 26725 | context_die = lookup_type_die (TYPE_CONTEXT (type)); |
| 26726 | } |
| 26727 | else if (TYPE_CONTEXT (type) != NULL_TREE |
| 26728 | && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL)) |
| 26729 | { |
| 26730 | /* If this type is local to a function that hasn't been written |
| 26731 | out yet, use a NULL context for now; it will be fixed up in |
| 26732 | decls_for_scope. */ |
| 26733 | context_die = lookup_decl_die (TYPE_CONTEXT (type)); |
| 26734 | /* A declaration DIE doesn't count; nested types need to go in the |
| 26735 | specification. */ |
| 26736 | if (context_die && is_declaration_die (die: context_die)) |
| 26737 | context_die = NULL; |
| 26738 | } |
| 26739 | else |
| 26740 | context_die = declare_in_namespace (type, context_die); |
| 26741 | |
| 26742 | if (TREE_CODE (type) == ENUMERAL_TYPE) |
| 26743 | { |
| 26744 | /* This might have been written out by the call to |
| 26745 | declare_in_namespace. */ |
| 26746 | if (!TREE_ASM_WRITTEN (type) || reverse) |
| 26747 | gen_enumeration_type_die (type, context_die, reverse); |
| 26748 | } |
| 26749 | else |
| 26750 | gen_struct_or_union_type_die (type, context_die, usage); |
| 26751 | |
| 26752 | dw_die_ref die = lookup_type_die (type); |
| 26753 | if (die) |
| 26754 | maybe_gen_btf_type_tag_dies (t: type, target: die); |
| 26755 | |
| 26756 | /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix |
| 26757 | it up if it is ever completed. gen_*_type_die will set it for us |
| 26758 | when appropriate. */ |
| 26759 | } |
| 26760 | |
| 26761 | /* Generate a type description DIE. */ |
| 26762 | |
| 26763 | static void |
| 26764 | gen_type_die_with_usage (tree type, dw_die_ref context_die, |
| 26765 | enum debug_info_usage usage, bool reverse) |
| 26766 | { |
| 26767 | struct array_descr_info info; |
| 26768 | |
| 26769 | if (type == NULL_TREE || type == error_mark_node) |
| 26770 | return; |
| 26771 | |
| 26772 | if (flag_checking && type) |
| 26773 | verify_type (t: type); |
| 26774 | |
| 26775 | if (TYPE_NAME (type) != NULL_TREE |
| 26776 | && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL |
| 26777 | && is_redundant_typedef (TYPE_NAME (type)) |
| 26778 | && DECL_ORIGINAL_TYPE (TYPE_NAME (type))) |
| 26779 | /* The DECL of this type is a typedef we don't want to emit debug |
| 26780 | info for but we want debug info for its underlying typedef. |
| 26781 | This can happen for e.g, the injected-class-name of a C++ |
| 26782 | type. */ |
| 26783 | type = DECL_ORIGINAL_TYPE (TYPE_NAME (type)); |
| 26784 | |
| 26785 | /* If TYPE is a typedef type variant, let's generate debug info |
| 26786 | for the parent typedef which TYPE is a type of. */ |
| 26787 | if (typedef_variant_p (type)) |
| 26788 | { |
| 26789 | tree name = TYPE_NAME (type); |
| 26790 | if (TREE_ASM_WRITTEN (name)) |
| 26791 | return; |
| 26792 | |
| 26793 | tree origin = decl_ultimate_origin (decl: name); |
| 26794 | if (origin != NULL && origin != name) |
| 26795 | { |
| 26796 | gen_decl_die (origin, NULL, NULL, context_die); |
| 26797 | return; |
| 26798 | } |
| 26799 | |
| 26800 | /* Prevent broken recursion; we can't hand off to the same type. */ |
| 26801 | gcc_assert (DECL_ORIGINAL_TYPE (name) != type); |
| 26802 | |
| 26803 | /* Give typedefs the right scope. */ |
| 26804 | context_die = scope_die_for (t: type, context_die); |
| 26805 | |
| 26806 | gen_decl_die (name, NULL, NULL, context_die); |
| 26807 | return; |
| 26808 | } |
| 26809 | |
| 26810 | /* If type is an anonymous tagged type named by a typedef, let's |
| 26811 | generate debug info for the typedef. */ |
| 26812 | if (is_naming_typedef_decl (TYPE_NAME (type))) |
| 26813 | { |
| 26814 | /* Give typedefs the right scope. */ |
| 26815 | context_die = scope_die_for (t: type, context_die); |
| 26816 | |
| 26817 | gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die); |
| 26818 | return; |
| 26819 | } |
| 26820 | |
| 26821 | if (lang_hooks.types.get_debug_type) |
| 26822 | { |
| 26823 | tree debug_type = lang_hooks.types.get_debug_type (type); |
| 26824 | |
| 26825 | if (debug_type != NULL_TREE && debug_type != type) |
| 26826 | { |
| 26827 | gen_type_die_with_usage (type: debug_type, context_die, usage, reverse); |
| 26828 | return; |
| 26829 | } |
| 26830 | } |
| 26831 | |
| 26832 | /* We are going to output a DIE to represent the unqualified version |
| 26833 | of this type (i.e. without any const or volatile qualifiers) so |
| 26834 | get the main variant (i.e. the unqualified version) of this type |
| 26835 | now. (Vectors and arrays are special because the debugging info is in the |
| 26836 | cloned type itself. Similarly function/method types can contain extra |
| 26837 | ref-qualification). */ |
| 26838 | if (FUNC_OR_METHOD_TYPE_P (type)) |
| 26839 | { |
| 26840 | /* For function/method types, can't use type_main_variant here, |
| 26841 | because that can have different ref-qualifiers for C++, |
| 26842 | but try to canonicalize. */ |
| 26843 | tree main = TYPE_MAIN_VARIANT (type); |
| 26844 | for (tree t = main; t; t = TYPE_NEXT_VARIANT (t)) |
| 26845 | if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0 |
| 26846 | && check_base_type (cand: t, base: main) |
| 26847 | && check_lang_type (cand: t, base: type)) |
| 26848 | { |
| 26849 | type = t; |
| 26850 | break; |
| 26851 | } |
| 26852 | } |
| 26853 | else if (TREE_CODE (type) != VECTOR_TYPE |
| 26854 | && TREE_CODE (type) != ARRAY_TYPE) |
| 26855 | type = type_main_variant (type); |
| 26856 | |
| 26857 | /* If this is an array type with hidden descriptor, handle it first. */ |
| 26858 | if (!TREE_ASM_WRITTEN (type) |
| 26859 | && lang_hooks.types.get_array_descr_info) |
| 26860 | { |
| 26861 | memset (s: &info, c: 0, n: sizeof (info)); |
| 26862 | if (lang_hooks.types.get_array_descr_info (type, &info)) |
| 26863 | { |
| 26864 | /* Fortran sometimes emits array types with no dimension. */ |
| 26865 | gcc_assert (info.ndimensions >= 0 |
| 26866 | && (info.ndimensions |
| 26867 | <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN)); |
| 26868 | gen_descr_array_type_die (type, info: &info, context_die); |
| 26869 | TREE_ASM_WRITTEN (type) = 1; |
| 26870 | return; |
| 26871 | } |
| 26872 | } |
| 26873 | |
| 26874 | if (TREE_ASM_WRITTEN (type) && !reverse) |
| 26875 | { |
| 26876 | /* Variable-length types may be incomplete even if |
| 26877 | TREE_ASM_WRITTEN. For such types, fall through to |
| 26878 | gen_array_type_die() and possibly fill in |
| 26879 | DW_AT_{upper,lower}_bound attributes. */ |
| 26880 | if ((TREE_CODE (type) != ARRAY_TYPE |
| 26881 | && TREE_CODE (type) != RECORD_TYPE |
| 26882 | && TREE_CODE (type) != UNION_TYPE |
| 26883 | && TREE_CODE (type) != QUAL_UNION_TYPE) |
| 26884 | || !variably_modified_type_p (type, NULL)) |
| 26885 | return; |
| 26886 | } |
| 26887 | |
| 26888 | switch (TREE_CODE (type)) |
| 26889 | { |
| 26890 | case ERROR_MARK: |
| 26891 | break; |
| 26892 | |
| 26893 | case POINTER_TYPE: |
| 26894 | case REFERENCE_TYPE: |
| 26895 | /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This |
| 26896 | ensures that the gen_type_die recursion will terminate even if the |
| 26897 | type is recursive. Recursive types are possible in Ada. */ |
| 26898 | /* ??? We could perhaps do this for all types before the switch |
| 26899 | statement. */ |
| 26900 | TREE_ASM_WRITTEN (type) = 1; |
| 26901 | |
| 26902 | /* For these types, all that is required is that we output a DIE (or a |
| 26903 | set of DIEs) to represent the "basis" type. */ |
| 26904 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
| 26905 | usage: DINFO_USAGE_IND_USE); |
| 26906 | break; |
| 26907 | |
| 26908 | case OFFSET_TYPE: |
| 26909 | /* This code is used for C++ pointer-to-data-member types. |
| 26910 | Output a description of the relevant class type. */ |
| 26911 | gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die, |
| 26912 | usage: DINFO_USAGE_IND_USE); |
| 26913 | |
| 26914 | /* Output a description of the type of the object pointed to. */ |
| 26915 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
| 26916 | usage: DINFO_USAGE_IND_USE); |
| 26917 | |
| 26918 | /* Now output a DIE to represent this pointer-to-data-member type |
| 26919 | itself. */ |
| 26920 | gen_ptr_to_mbr_type_die (type, context_die); |
| 26921 | break; |
| 26922 | |
| 26923 | case FUNCTION_TYPE: |
| 26924 | /* Force out return type (in case it wasn't forced out already). */ |
| 26925 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
| 26926 | usage: DINFO_USAGE_DIR_USE); |
| 26927 | gen_subroutine_type_die (type, context_die); |
| 26928 | break; |
| 26929 | |
| 26930 | case METHOD_TYPE: |
| 26931 | /* Force out return type (in case it wasn't forced out already). */ |
| 26932 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
| 26933 | usage: DINFO_USAGE_DIR_USE); |
| 26934 | gen_subroutine_type_die (type, context_die); |
| 26935 | break; |
| 26936 | |
| 26937 | case ARRAY_TYPE: |
| 26938 | case VECTOR_TYPE: |
| 26939 | gen_array_type_die (type, context_die); |
| 26940 | break; |
| 26941 | |
| 26942 | case ENUMERAL_TYPE: |
| 26943 | case RECORD_TYPE: |
| 26944 | case UNION_TYPE: |
| 26945 | case QUAL_UNION_TYPE: |
| 26946 | gen_tagged_type_die (type, context_die, usage, reverse); |
| 26947 | return; |
| 26948 | |
| 26949 | case VOID_TYPE: |
| 26950 | case OPAQUE_TYPE: |
| 26951 | case INTEGER_TYPE: |
| 26952 | case REAL_TYPE: |
| 26953 | case FIXED_POINT_TYPE: |
| 26954 | case COMPLEX_TYPE: |
| 26955 | case BOOLEAN_TYPE: |
| 26956 | case BITINT_TYPE: |
| 26957 | /* No DIEs needed for fundamental types. */ |
| 26958 | break; |
| 26959 | |
| 26960 | case NULLPTR_TYPE: |
| 26961 | case LANG_TYPE: |
| 26962 | unspecified_type: |
| 26963 | /* Just use DW_TAG_unspecified_type. */ |
| 26964 | { |
| 26965 | dw_die_ref type_die = lookup_type_die (type); |
| 26966 | if (type_die == NULL) |
| 26967 | { |
| 26968 | tree name = TYPE_IDENTIFIER (type); |
| 26969 | type_die = new_die (tag_value: DW_TAG_unspecified_type, parent_die: comp_unit_die (), |
| 26970 | t: type); |
| 26971 | add_name_attribute (die: type_die, IDENTIFIER_POINTER (name)); |
| 26972 | equate_type_number_to_die (type, type_die); |
| 26973 | } |
| 26974 | } |
| 26975 | break; |
| 26976 | |
| 26977 | default: |
| 26978 | if (is_cxx_auto (type)) |
| 26979 | { |
| 26980 | tree name = TYPE_IDENTIFIER (type); |
| 26981 | dw_die_ref *die = (name == get_identifier ("auto" ) |
| 26982 | ? &auto_die : &decltype_auto_die); |
| 26983 | if (!*die) |
| 26984 | { |
| 26985 | *die = new_die (tag_value: DW_TAG_unspecified_type, |
| 26986 | parent_die: comp_unit_die (), NULL_TREE); |
| 26987 | add_name_attribute (die: *die, IDENTIFIER_POINTER (name)); |
| 26988 | } |
| 26989 | equate_type_number_to_die (type, type_die: *die); |
| 26990 | break; |
| 26991 | } |
| 26992 | if (is_cxx () |
| 26993 | && TREE_CODE (type) >= LAST_AND_UNUSED_TREE_CODE |
| 26994 | && TYPE_P (type) |
| 26995 | && TYPE_IDENTIFIER (type)) |
| 26996 | goto unspecified_type; |
| 26997 | gcc_unreachable (); |
| 26998 | } |
| 26999 | |
| 27000 | TREE_ASM_WRITTEN (type) = 1; |
| 27001 | } |
| 27002 | |
| 27003 | static void |
| 27004 | gen_type_die (tree type, dw_die_ref context_die, bool reverse) |
| 27005 | { |
| 27006 | if (type != error_mark_node) |
| 27007 | { |
| 27008 | gen_type_die_with_usage (type, context_die, usage: DINFO_USAGE_DIR_USE, reverse); |
| 27009 | if (flag_checking) |
| 27010 | { |
| 27011 | dw_die_ref die = lookup_type_die (type); |
| 27012 | if (die) |
| 27013 | check_die (die); |
| 27014 | } |
| 27015 | } |
| 27016 | } |
| 27017 | |
| 27018 | /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the |
| 27019 | things which are local to the given block. */ |
| 27020 | |
| 27021 | static void |
| 27022 | gen_block_die (tree stmt, dw_die_ref context_die) |
| 27023 | { |
| 27024 | int must_output_die = 0; |
| 27025 | bool inlined_func; |
| 27026 | |
| 27027 | /* Ignore blocks that are NULL. */ |
| 27028 | if (stmt == NULL_TREE) |
| 27029 | return; |
| 27030 | |
| 27031 | inlined_func = inlined_function_outer_scope_p (block: stmt); |
| 27032 | |
| 27033 | /* If the block is one fragment of a non-contiguous block, do not |
| 27034 | process the variables, since they will have been done by the |
| 27035 | origin block. Do process subblocks. */ |
| 27036 | if (BLOCK_FRAGMENT_ORIGIN (stmt)) |
| 27037 | { |
| 27038 | tree sub; |
| 27039 | |
| 27040 | for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub)) |
| 27041 | gen_block_die (stmt: sub, context_die); |
| 27042 | |
| 27043 | return; |
| 27044 | } |
| 27045 | |
| 27046 | /* Determine if we need to output any Dwarf DIEs at all to represent this |
| 27047 | block. */ |
| 27048 | if (inlined_func) |
| 27049 | /* The outer scopes for inlinings *must* always be represented. We |
| 27050 | generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */ |
| 27051 | must_output_die = 1; |
| 27052 | else if (lookup_block_die (block: stmt)) |
| 27053 | /* If we already have a DIE then it was filled early. Meanwhile |
| 27054 | we might have pruned all BLOCK_VARS as optimized out but we |
| 27055 | still want to generate high/low PC attributes so output it. */ |
| 27056 | must_output_die = 1; |
| 27057 | else if (TREE_USED (stmt) |
| 27058 | || TREE_ASM_WRITTEN (stmt)) |
| 27059 | { |
| 27060 | /* Determine if this block directly contains any "significant" |
| 27061 | local declarations which we will need to output DIEs for. */ |
| 27062 | if (debug_info_level > DINFO_LEVEL_TERSE) |
| 27063 | { |
| 27064 | /* We are not in terse mode so any local declaration that |
| 27065 | is not ignored for debug purposes counts as being a |
| 27066 | "significant" one. */ |
| 27067 | if (BLOCK_NUM_NONLOCALIZED_VARS (stmt)) |
| 27068 | must_output_die = 1; |
| 27069 | else |
| 27070 | for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var)) |
| 27071 | if (!DECL_IGNORED_P (var)) |
| 27072 | { |
| 27073 | must_output_die = 1; |
| 27074 | break; |
| 27075 | } |
| 27076 | } |
| 27077 | else if (!dwarf2out_ignore_block (stmt)) |
| 27078 | must_output_die = 1; |
| 27079 | } |
| 27080 | |
| 27081 | /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block |
| 27082 | DIE for any block which contains no significant local declarations at |
| 27083 | all. Rather, in such cases we just call `decls_for_scope' so that any |
| 27084 | needed Dwarf info for any sub-blocks will get properly generated. Note |
| 27085 | that in terse mode, our definition of what constitutes a "significant" |
| 27086 | local declaration gets restricted to include only inlined function |
| 27087 | instances and local (nested) function definitions. */ |
| 27088 | if (must_output_die) |
| 27089 | { |
| 27090 | if (inlined_func) |
| 27091 | gen_inlined_subroutine_die (stmt, context_die); |
| 27092 | else |
| 27093 | gen_lexical_block_die (stmt, context_die); |
| 27094 | } |
| 27095 | else |
| 27096 | decls_for_scope (stmt, context_die); |
| 27097 | } |
| 27098 | |
| 27099 | /* Process variable DECL (or variable with origin ORIGIN) within |
| 27100 | block STMT and add it to CONTEXT_DIE. */ |
| 27101 | static void |
| 27102 | process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die) |
| 27103 | { |
| 27104 | dw_die_ref die; |
| 27105 | tree decl_or_origin = decl ? decl : origin; |
| 27106 | |
| 27107 | if (TREE_CODE (decl_or_origin) == FUNCTION_DECL) |
| 27108 | die = lookup_decl_die (decl: decl_or_origin); |
| 27109 | else if (TREE_CODE (decl_or_origin) == TYPE_DECL) |
| 27110 | { |
| 27111 | if (TYPE_DECL_IS_STUB (decl_or_origin)) |
| 27112 | die = lookup_type_die (TREE_TYPE (decl_or_origin)); |
| 27113 | else |
| 27114 | die = lookup_decl_die (decl: decl_or_origin); |
| 27115 | /* Avoid re-creating the DIE late if it was optimized as unused early. */ |
| 27116 | if (! die && ! early_dwarf) |
| 27117 | return; |
| 27118 | } |
| 27119 | else |
| 27120 | die = NULL; |
| 27121 | |
| 27122 | /* Avoid creating DIEs for local typedefs and concrete static variables that |
| 27123 | will only be pruned later. */ |
| 27124 | if ((origin || decl_ultimate_origin (decl)) |
| 27125 | && (TREE_CODE (decl_or_origin) == TYPE_DECL |
| 27126 | || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin)))) |
| 27127 | { |
| 27128 | origin = decl_ultimate_origin (decl: decl_or_origin); |
| 27129 | if (decl && VAR_P (decl) && die != NULL) |
| 27130 | { |
| 27131 | die = lookup_decl_die (decl: origin); |
| 27132 | if (die != NULL) |
| 27133 | equate_decl_number_to_die (decl, decl_die: die); |
| 27134 | } |
| 27135 | return; |
| 27136 | } |
| 27137 | |
| 27138 | if (die != NULL && die->die_parent == NULL) |
| 27139 | add_child_die (die: context_die, child_die: die); |
| 27140 | |
| 27141 | if (TREE_CODE (decl_or_origin) == IMPORTED_DECL) |
| 27142 | { |
| 27143 | if (early_dwarf) |
| 27144 | dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin), |
| 27145 | stmt, context_die); |
| 27146 | } |
| 27147 | else |
| 27148 | { |
| 27149 | if (decl && DECL_P (decl)) |
| 27150 | { |
| 27151 | die = lookup_decl_die (decl); |
| 27152 | |
| 27153 | /* Early created DIEs do not have a parent as the decls refer |
| 27154 | to the function as DECL_CONTEXT rather than the BLOCK. */ |
| 27155 | if (die && die->die_parent == NULL) |
| 27156 | { |
| 27157 | gcc_assert (in_lto_p); |
| 27158 | add_child_die (die: context_die, child_die: die); |
| 27159 | } |
| 27160 | } |
| 27161 | |
| 27162 | gen_decl_die (decl, origin, NULL, context_die); |
| 27163 | } |
| 27164 | } |
| 27165 | |
| 27166 | /* Generate all of the decls declared within a given scope and (recursively) |
| 27167 | all of its sub-blocks. */ |
| 27168 | |
| 27169 | static void |
| 27170 | decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse) |
| 27171 | { |
| 27172 | tree decl; |
| 27173 | unsigned int i; |
| 27174 | tree subblocks; |
| 27175 | |
| 27176 | /* Ignore NULL blocks. */ |
| 27177 | if (stmt == NULL_TREE) |
| 27178 | return; |
| 27179 | |
| 27180 | /* Output the DIEs to represent all of the data objects and typedefs |
| 27181 | declared directly within this block but not within any nested |
| 27182 | sub-blocks. Also, nested function and tag DIEs have been |
| 27183 | generated with a parent of NULL; fix that up now. We don't |
| 27184 | have to do this if we're at -g1. */ |
| 27185 | if (debug_info_level > DINFO_LEVEL_TERSE) |
| 27186 | { |
| 27187 | for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl)) |
| 27188 | process_scope_var (stmt, decl, NULL_TREE, context_die); |
| 27189 | /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract |
| 27190 | origin - avoid doing this twice as we have no good way to see |
| 27191 | if we've done it once already. */ |
| 27192 | if (! early_dwarf) |
| 27193 | for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++) |
| 27194 | { |
| 27195 | decl = BLOCK_NONLOCALIZED_VAR (stmt, i); |
| 27196 | if (decl == current_function_decl) |
| 27197 | /* Ignore declarations of the current function, while they |
| 27198 | are declarations, gen_subprogram_die would treat them |
| 27199 | as definitions again, because they are equal to |
| 27200 | current_function_decl and endlessly recurse. */; |
| 27201 | else if (TREE_CODE (decl) == FUNCTION_DECL) |
| 27202 | process_scope_var (stmt, decl, NULL_TREE, context_die); |
| 27203 | else |
| 27204 | process_scope_var (stmt, NULL_TREE, origin: decl, context_die); |
| 27205 | } |
| 27206 | } |
| 27207 | |
| 27208 | /* Even if we're at -g1, we need to process the subblocks in order to get |
| 27209 | inlined call information. */ |
| 27210 | |
| 27211 | /* Output the DIEs to represent all sub-blocks (and the items declared |
| 27212 | therein) of this block. */ |
| 27213 | if (recurse) |
| 27214 | for (subblocks = BLOCK_SUBBLOCKS (stmt); |
| 27215 | subblocks != NULL; |
| 27216 | subblocks = BLOCK_CHAIN (subblocks)) |
| 27217 | gen_block_die (stmt: subblocks, context_die); |
| 27218 | } |
| 27219 | |
| 27220 | /* Is this a typedef we can avoid emitting? */ |
| 27221 | |
| 27222 | static bool |
| 27223 | is_redundant_typedef (const_tree decl) |
| 27224 | { |
| 27225 | if (TYPE_DECL_IS_STUB (decl)) |
| 27226 | return true; |
| 27227 | |
| 27228 | if (DECL_ARTIFICIAL (decl) |
| 27229 | && DECL_CONTEXT (decl) |
| 27230 | && is_tagged_type (DECL_CONTEXT (decl)) |
| 27231 | && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL |
| 27232 | && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl)))) |
| 27233 | /* Also ignore the artificial member typedef for the class name. */ |
| 27234 | return true; |
| 27235 | |
| 27236 | return false; |
| 27237 | } |
| 27238 | |
| 27239 | /* Return TRUE if TYPE is a typedef that names a type for linkage |
| 27240 | purposes. This kind of typedefs is produced by the C++ FE for |
| 27241 | constructs like: |
| 27242 | |
| 27243 | typedef struct {...} foo; |
| 27244 | |
| 27245 | In that case, there is no typedef variant type produced for foo. |
| 27246 | Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous |
| 27247 | struct type. */ |
| 27248 | |
| 27249 | static bool |
| 27250 | is_naming_typedef_decl (const_tree decl) |
| 27251 | { |
| 27252 | if (decl == NULL_TREE |
| 27253 | || TREE_CODE (decl) != TYPE_DECL |
| 27254 | || DECL_NAMELESS (decl) |
| 27255 | || !is_tagged_type (TREE_TYPE (decl)) |
| 27256 | || DECL_IS_UNDECLARED_BUILTIN (decl) |
| 27257 | || is_redundant_typedef (decl) |
| 27258 | /* It looks like Ada produces TYPE_DECLs that are very similar |
| 27259 | to C++ naming typedefs but that have different |
| 27260 | semantics. Let's be specific to c++ for now. */ |
| 27261 | || !is_cxx (decl)) |
| 27262 | return false; |
| 27263 | |
| 27264 | return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE |
| 27265 | && TYPE_NAME (TREE_TYPE (decl)) == decl |
| 27266 | && (TYPE_STUB_DECL (TREE_TYPE (decl)) |
| 27267 | != TYPE_NAME (TREE_TYPE (decl)))); |
| 27268 | } |
| 27269 | |
| 27270 | /* Looks up the DIE for a context. */ |
| 27271 | |
| 27272 | static inline dw_die_ref |
| 27273 | lookup_context_die (tree context) |
| 27274 | { |
| 27275 | if (context) |
| 27276 | { |
| 27277 | /* Find die that represents this context. */ |
| 27278 | if (TYPE_P (context)) |
| 27279 | { |
| 27280 | context = TYPE_MAIN_VARIANT (context); |
| 27281 | dw_die_ref ctx = lookup_type_die (type: context); |
| 27282 | if (!ctx) |
| 27283 | return NULL; |
| 27284 | return strip_naming_typedef (type: context, type_die: ctx); |
| 27285 | } |
| 27286 | else |
| 27287 | return lookup_decl_die (decl: context); |
| 27288 | } |
| 27289 | return comp_unit_die (); |
| 27290 | } |
| 27291 | |
| 27292 | /* Returns the DIE for a context. */ |
| 27293 | |
| 27294 | static inline dw_die_ref |
| 27295 | get_context_die (tree context) |
| 27296 | { |
| 27297 | if (context) |
| 27298 | { |
| 27299 | /* Find die that represents this context. */ |
| 27300 | if (TYPE_P (context)) |
| 27301 | { |
| 27302 | context = TYPE_MAIN_VARIANT (context); |
| 27303 | return strip_naming_typedef (type: context, type_die: force_type_die (context)); |
| 27304 | } |
| 27305 | else |
| 27306 | return force_decl_die (context); |
| 27307 | } |
| 27308 | return comp_unit_die (); |
| 27309 | } |
| 27310 | |
| 27311 | /* Returns the DIE for decl. A DIE will always be returned. */ |
| 27312 | |
| 27313 | static dw_die_ref |
| 27314 | force_decl_die (tree decl) |
| 27315 | { |
| 27316 | dw_die_ref decl_die; |
| 27317 | unsigned saved_external_flag; |
| 27318 | tree save_fn = NULL_TREE; |
| 27319 | decl_die = lookup_decl_die (decl); |
| 27320 | if (!decl_die) |
| 27321 | { |
| 27322 | dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl)); |
| 27323 | |
| 27324 | decl_die = lookup_decl_die (decl); |
| 27325 | if (decl_die) |
| 27326 | return decl_die; |
| 27327 | |
| 27328 | switch (TREE_CODE (decl)) |
| 27329 | { |
| 27330 | case FUNCTION_DECL: |
| 27331 | /* Clear current_function_decl, so that gen_subprogram_die thinks |
| 27332 | that this is a declaration. At this point, we just want to force |
| 27333 | declaration die. */ |
| 27334 | save_fn = current_function_decl; |
| 27335 | current_function_decl = NULL_TREE; |
| 27336 | gen_subprogram_die (decl, context_die); |
| 27337 | current_function_decl = save_fn; |
| 27338 | break; |
| 27339 | |
| 27340 | case VAR_DECL: |
| 27341 | /* Set external flag to force declaration die. Restore it after |
| 27342 | gen_decl_die() call. */ |
| 27343 | saved_external_flag = DECL_EXTERNAL (decl); |
| 27344 | DECL_EXTERNAL (decl) = 1; |
| 27345 | gen_decl_die (decl, NULL, NULL, context_die); |
| 27346 | DECL_EXTERNAL (decl) = saved_external_flag; |
| 27347 | break; |
| 27348 | |
| 27349 | case NAMESPACE_DECL: |
| 27350 | if (dwarf_version >= 3 || !dwarf_strict) |
| 27351 | dwarf2out_decl (decl); |
| 27352 | else |
| 27353 | /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */ |
| 27354 | decl_die = comp_unit_die (); |
| 27355 | break; |
| 27356 | |
| 27357 | case CONST_DECL: |
| 27358 | /* Enumerators shouldn't need force_decl_die. */ |
| 27359 | gcc_assert (DECL_CONTEXT (decl) == NULL_TREE |
| 27360 | || TREE_CODE (DECL_CONTEXT (decl)) != ENUMERAL_TYPE); |
| 27361 | gen_decl_die (decl, NULL, NULL, context_die); |
| 27362 | break; |
| 27363 | |
| 27364 | case TRANSLATION_UNIT_DECL: |
| 27365 | decl_die = comp_unit_die (); |
| 27366 | break; |
| 27367 | |
| 27368 | default: |
| 27369 | gcc_unreachable (); |
| 27370 | } |
| 27371 | |
| 27372 | /* We should be able to find the DIE now. */ |
| 27373 | if (!decl_die) |
| 27374 | decl_die = lookup_decl_die (decl); |
| 27375 | gcc_assert (decl_die); |
| 27376 | } |
| 27377 | |
| 27378 | return decl_die; |
| 27379 | } |
| 27380 | |
| 27381 | /* Returns the DIE for TYPE, that must not be a base type. A DIE is |
| 27382 | always returned. */ |
| 27383 | |
| 27384 | static dw_die_ref |
| 27385 | force_type_die (tree type) |
| 27386 | { |
| 27387 | dw_die_ref type_die; |
| 27388 | |
| 27389 | type_die = lookup_type_die (type); |
| 27390 | if (!type_die) |
| 27391 | { |
| 27392 | dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type)); |
| 27393 | |
| 27394 | type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type), |
| 27395 | TYPE_ATTRIBUTES (type), |
| 27396 | reverse: false, context_die); |
| 27397 | gcc_assert (type_die); |
| 27398 | } |
| 27399 | return type_die; |
| 27400 | } |
| 27401 | |
| 27402 | /* Force out any required namespaces to be able to output DECL, |
| 27403 | and return the new context_die for it, if it's changed. */ |
| 27404 | |
| 27405 | static dw_die_ref |
| 27406 | setup_namespace_context (tree thing, dw_die_ref context_die) |
| 27407 | { |
| 27408 | tree context = (DECL_P (thing) |
| 27409 | ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing)); |
| 27410 | if (context && TREE_CODE (context) == NAMESPACE_DECL) |
| 27411 | /* Force out the namespace. */ |
| 27412 | context_die = force_decl_die (decl: context); |
| 27413 | |
| 27414 | return context_die; |
| 27415 | } |
| 27416 | |
| 27417 | /* Emit a declaration DIE for THING (which is either a DECL or a tagged |
| 27418 | type) within its namespace, if appropriate. |
| 27419 | |
| 27420 | For compatibility with older debuggers, namespace DIEs only contain |
| 27421 | declarations; all definitions are emitted at CU scope, with |
| 27422 | DW_AT_specification pointing to the declaration (like with class |
| 27423 | members). */ |
| 27424 | |
| 27425 | static dw_die_ref |
| 27426 | declare_in_namespace (tree thing, dw_die_ref context_die) |
| 27427 | { |
| 27428 | dw_die_ref ns_context; |
| 27429 | |
| 27430 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 27431 | return context_die; |
| 27432 | |
| 27433 | /* External declarations in the local scope only need to be emitted |
| 27434 | once, not once in the namespace and once in the scope. |
| 27435 | |
| 27436 | This avoids declaring the `extern' below in the |
| 27437 | namespace DIE as well as in the innermost scope: |
| 27438 | |
| 27439 | namespace S |
| 27440 | { |
| 27441 | int i=5; |
| 27442 | int foo() |
| 27443 | { |
| 27444 | int i=8; |
| 27445 | extern int i; |
| 27446 | return i; |
| 27447 | } |
| 27448 | } |
| 27449 | */ |
| 27450 | if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die)) |
| 27451 | return context_die; |
| 27452 | |
| 27453 | /* If this decl is from an inlined function, then don't try to emit it in its |
| 27454 | namespace, as we will get confused. It would have already been emitted |
| 27455 | when the abstract instance of the inline function was emitted anyways. */ |
| 27456 | if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing)) |
| 27457 | return context_die; |
| 27458 | |
| 27459 | ns_context = setup_namespace_context (thing, context_die); |
| 27460 | |
| 27461 | if (ns_context != context_die) |
| 27462 | { |
| 27463 | if (is_fortran () || is_dlang ()) |
| 27464 | return ns_context; |
| 27465 | if (DECL_P (thing)) |
| 27466 | gen_decl_die (thing, NULL, NULL, ns_context); |
| 27467 | else |
| 27468 | gen_type_die (type: thing, context_die: ns_context); |
| 27469 | } |
| 27470 | return context_die; |
| 27471 | } |
| 27472 | |
| 27473 | /* Generate a DIE for a namespace or namespace alias. */ |
| 27474 | |
| 27475 | static void |
| 27476 | gen_namespace_die (tree decl, dw_die_ref context_die) |
| 27477 | { |
| 27478 | dw_die_ref namespace_die; |
| 27479 | |
| 27480 | /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace |
| 27481 | they are an alias of. */ |
| 27482 | if (DECL_ABSTRACT_ORIGIN (decl) == NULL) |
| 27483 | { |
| 27484 | /* Output a real namespace or module. */ |
| 27485 | context_die = setup_namespace_context (thing: decl, context_die: comp_unit_die ()); |
| 27486 | namespace_die = new_die (tag_value: is_fortran () || is_dlang () || is_ada () |
| 27487 | ? DW_TAG_module : DW_TAG_namespace, |
| 27488 | parent_die: context_die, t: decl); |
| 27489 | /* For Fortran modules defined in different CU don't add src coords. */ |
| 27490 | if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl)) |
| 27491 | { |
| 27492 | const char *name = dwarf2_name (decl, scope: 0); |
| 27493 | if (name) |
| 27494 | add_name_attribute (die: namespace_die, name_string: name); |
| 27495 | } |
| 27496 | else |
| 27497 | add_name_and_src_coords_attributes (die: namespace_die, decl); |
| 27498 | if (DECL_EXTERNAL (decl)) |
| 27499 | add_AT_flag (die: namespace_die, attr_kind: DW_AT_declaration, flag: 1); |
| 27500 | equate_decl_number_to_die (decl, decl_die: namespace_die); |
| 27501 | } |
| 27502 | else |
| 27503 | { |
| 27504 | /* Output a namespace alias. */ |
| 27505 | |
| 27506 | /* Force out the namespace we are an alias of, if necessary. */ |
| 27507 | dw_die_ref origin_die |
| 27508 | = force_decl_die (DECL_ABSTRACT_ORIGIN (decl)); |
| 27509 | |
| 27510 | if (DECL_FILE_SCOPE_P (decl) |
| 27511 | || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL) |
| 27512 | context_die = setup_namespace_context (thing: decl, context_die: comp_unit_die ()); |
| 27513 | /* Now create the namespace alias DIE. */ |
| 27514 | namespace_die = new_die (tag_value: DW_TAG_imported_declaration, parent_die: context_die, t: decl); |
| 27515 | add_name_and_src_coords_attributes (die: namespace_die, decl); |
| 27516 | add_AT_die_ref (die: namespace_die, attr_kind: DW_AT_import, targ_die: origin_die); |
| 27517 | equate_decl_number_to_die (decl, decl_die: namespace_die); |
| 27518 | } |
| 27519 | if ((dwarf_version >= 5 || !dwarf_strict) |
| 27520 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
| 27521 | DW_AT_export_symbols) == 1) |
| 27522 | add_AT_flag (die: namespace_die, attr_kind: DW_AT_export_symbols, flag: 1); |
| 27523 | |
| 27524 | /* Bypass dwarf2_name's check for DECL_NAMELESS. */ |
| 27525 | if (want_pubnames ()) |
| 27526 | add_pubname_string (str: lang_hooks.dwarf_name (decl, 1), die: namespace_die); |
| 27527 | } |
| 27528 | |
| 27529 | /* Generate Dwarf debug information for a decl described by DECL. |
| 27530 | The return value is currently only meaningful for PARM_DECLs, |
| 27531 | for all other decls it returns NULL. |
| 27532 | |
| 27533 | If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT. |
| 27534 | It can be NULL otherwise. */ |
| 27535 | |
| 27536 | static dw_die_ref |
| 27537 | gen_decl_die (tree decl, tree origin, struct vlr_context *ctx, |
| 27538 | dw_die_ref context_die) |
| 27539 | { |
| 27540 | tree decl_or_origin = decl ? decl : origin; |
| 27541 | tree class_origin = NULL, ultimate_origin; |
| 27542 | |
| 27543 | if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin)) |
| 27544 | return NULL; |
| 27545 | |
| 27546 | switch (TREE_CODE (decl_or_origin)) |
| 27547 | { |
| 27548 | case ERROR_MARK: |
| 27549 | break; |
| 27550 | |
| 27551 | case CONST_DECL: |
| 27552 | if (!is_fortran () && !is_ada () && !is_dlang ()) |
| 27553 | { |
| 27554 | /* The individual enumerators of an enum type get output when we output |
| 27555 | the Dwarf representation of the relevant enum type itself. */ |
| 27556 | break; |
| 27557 | } |
| 27558 | |
| 27559 | /* Emit its type. */ |
| 27560 | gen_type_die (TREE_TYPE (decl), context_die); |
| 27561 | |
| 27562 | /* And its containing namespace. */ |
| 27563 | context_die = declare_in_namespace (thing: decl, context_die); |
| 27564 | |
| 27565 | gen_const_die (decl, context_die); |
| 27566 | break; |
| 27567 | |
| 27568 | case FUNCTION_DECL: |
| 27569 | #if 0 |
| 27570 | /* FIXME */ |
| 27571 | /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN |
| 27572 | on local redeclarations of global functions. That seems broken. */ |
| 27573 | if (current_function_decl != decl) |
| 27574 | /* This is only a declaration. */; |
| 27575 | #endif |
| 27576 | |
| 27577 | /* We should have abstract copies already and should not generate |
| 27578 | stray type DIEs in late LTO dumping. */ |
| 27579 | if (! early_dwarf) |
| 27580 | ; |
| 27581 | |
| 27582 | /* If we're emitting a clone, emit info for the abstract instance. */ |
| 27583 | else if (origin || DECL_ORIGIN (decl) != decl) |
| 27584 | dwarf2out_abstract_function (decl: origin |
| 27585 | ? DECL_ORIGIN (origin) |
| 27586 | : DECL_ABSTRACT_ORIGIN (decl)); |
| 27587 | |
| 27588 | /* If we're emitting a possibly inlined function emit it as |
| 27589 | abstract instance. */ |
| 27590 | else if (cgraph_function_possibly_inlined_p (decl) |
| 27591 | && ! DECL_ABSTRACT_P (decl) |
| 27592 | && ! class_or_namespace_scope_p (context_die) |
| 27593 | /* dwarf2out_abstract_function won't emit a die if this is just |
| 27594 | a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in |
| 27595 | that case, because that works only if we have a die. */ |
| 27596 | && DECL_INITIAL (decl) != NULL_TREE) |
| 27597 | dwarf2out_abstract_function (decl); |
| 27598 | |
| 27599 | /* Otherwise we're emitting the primary DIE for this decl. */ |
| 27600 | else if (debug_info_level > DINFO_LEVEL_TERSE) |
| 27601 | { |
| 27602 | /* Before we describe the FUNCTION_DECL itself, make sure that we |
| 27603 | have its containing type. */ |
| 27604 | if (!origin) |
| 27605 | origin = decl_class_context (decl); |
| 27606 | if (origin != NULL_TREE) |
| 27607 | gen_type_die (type: origin, context_die); |
| 27608 | |
| 27609 | /* And its return type. */ |
| 27610 | gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die); |
| 27611 | |
| 27612 | /* And its virtual context. */ |
| 27613 | if (DECL_VINDEX (decl) != NULL_TREE) |
| 27614 | gen_type_die (DECL_CONTEXT (decl), context_die); |
| 27615 | |
| 27616 | /* Make sure we have a member DIE for decl. */ |
| 27617 | if (origin != NULL_TREE) |
| 27618 | gen_type_die_for_member (type: origin, member: decl, context_die); |
| 27619 | |
| 27620 | /* And its containing namespace. */ |
| 27621 | context_die = declare_in_namespace (thing: decl, context_die); |
| 27622 | } |
| 27623 | |
| 27624 | /* Now output a DIE to represent the function itself. */ |
| 27625 | if (decl) |
| 27626 | gen_subprogram_die (decl, context_die); |
| 27627 | break; |
| 27628 | |
| 27629 | case TYPE_DECL: |
| 27630 | /* If we are in terse mode, don't generate any DIEs to represent any |
| 27631 | actual typedefs. */ |
| 27632 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 27633 | break; |
| 27634 | |
| 27635 | /* In the special case of a TYPE_DECL node representing the declaration |
| 27636 | of some type tag, if the given TYPE_DECL is marked as having been |
| 27637 | instantiated from some other (original) TYPE_DECL node (e.g. one which |
| 27638 | was generated within the original definition of an inline function) we |
| 27639 | used to generate a special (abbreviated) DW_TAG_structure_type, |
| 27640 | DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing |
| 27641 | should be actually referencing those DIEs, as variable DIEs with that |
| 27642 | type would be emitted already in the abstract origin, so it was always |
| 27643 | removed during unused type pruning. Don't add anything in this |
| 27644 | case. */ |
| 27645 | if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE) |
| 27646 | break; |
| 27647 | |
| 27648 | if (is_redundant_typedef (decl)) |
| 27649 | gen_type_die (TREE_TYPE (decl), context_die); |
| 27650 | else |
| 27651 | /* Output a DIE to represent the typedef itself. */ |
| 27652 | gen_typedef_die (decl, context_die); |
| 27653 | break; |
| 27654 | |
| 27655 | case LABEL_DECL: |
| 27656 | if (debug_info_level >= DINFO_LEVEL_NORMAL) |
| 27657 | gen_label_die (decl, context_die); |
| 27658 | break; |
| 27659 | |
| 27660 | case VAR_DECL: |
| 27661 | case RESULT_DECL: |
| 27662 | /* If we are in terse mode, don't generate any DIEs to represent any |
| 27663 | variable declarations or definitions unless it is external. */ |
| 27664 | if (debug_info_level < DINFO_LEVEL_TERSE |
| 27665 | || (debug_info_level == DINFO_LEVEL_TERSE |
| 27666 | && !TREE_PUBLIC (decl_or_origin))) |
| 27667 | break; |
| 27668 | |
| 27669 | if (debug_info_level > DINFO_LEVEL_TERSE) |
| 27670 | { |
| 27671 | /* Avoid generating stray type DIEs during late dwarf dumping. |
| 27672 | All types have been dumped early. */ |
| 27673 | if (early_dwarf |
| 27674 | /* ??? But in LTRANS we cannot annotate early created variably |
| 27675 | modified type DIEs without copying them and adjusting all |
| 27676 | references to them. Dump them again as happens for inlining |
| 27677 | which copies both the decl and the types. */ |
| 27678 | /* ??? And even non-LTO needs to re-visit type DIEs to fill |
| 27679 | in VLA bound information for example. */ |
| 27680 | || (decl && variably_modified_type_p (TREE_TYPE (decl), |
| 27681 | current_function_decl))) |
| 27682 | { |
| 27683 | /* Output any DIEs that are needed to specify the type of this data |
| 27684 | object. */ |
| 27685 | if (decl_by_reference_p (decl: decl_or_origin)) |
| 27686 | gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die); |
| 27687 | else |
| 27688 | gen_type_die (TREE_TYPE (decl_or_origin), context_die); |
| 27689 | } |
| 27690 | |
| 27691 | if (early_dwarf) |
| 27692 | { |
| 27693 | /* And its containing type. */ |
| 27694 | class_origin = decl_class_context (decl: decl_or_origin); |
| 27695 | if (class_origin != NULL_TREE) |
| 27696 | gen_type_die_for_member (type: class_origin, member: decl_or_origin, context_die); |
| 27697 | |
| 27698 | /* And its containing namespace. */ |
| 27699 | context_die = declare_in_namespace (thing: decl_or_origin, context_die); |
| 27700 | } |
| 27701 | } |
| 27702 | |
| 27703 | /* Now output the DIE to represent the data object itself. This gets |
| 27704 | complicated because of the possibility that the VAR_DECL really |
| 27705 | represents an inlined instance of a formal parameter for an inline |
| 27706 | function. */ |
| 27707 | ultimate_origin = decl_ultimate_origin (decl: decl_or_origin); |
| 27708 | if (ultimate_origin != NULL_TREE |
| 27709 | && TREE_CODE (ultimate_origin) == PARM_DECL) |
| 27710 | gen_formal_parameter_die (node: decl, origin, |
| 27711 | emit_name_p: true /* Emit name attribute. */, |
| 27712 | context_die); |
| 27713 | else |
| 27714 | gen_variable_die (decl, origin, context_die); |
| 27715 | break; |
| 27716 | |
| 27717 | case FIELD_DECL: |
| 27718 | gcc_assert (ctx != NULL && ctx->struct_type != NULL); |
| 27719 | /* Ignore the nameless fields that are used to skip bits but handle C++ |
| 27720 | anonymous unions and structs. */ |
| 27721 | if (DECL_NAME (decl) != NULL_TREE |
| 27722 | || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE |
| 27723 | || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE) |
| 27724 | { |
| 27725 | gen_type_die (type: member_declared_type (member: decl), context_die); |
| 27726 | gen_field_die (decl, ctx, context_die); |
| 27727 | } |
| 27728 | break; |
| 27729 | |
| 27730 | case PARM_DECL: |
| 27731 | /* Avoid generating stray type DIEs during late dwarf dumping. |
| 27732 | All types have been dumped early. */ |
| 27733 | if (early_dwarf |
| 27734 | /* ??? But in LTRANS we cannot annotate early created variably |
| 27735 | modified type DIEs without copying them and adjusting all |
| 27736 | references to them. Dump them again as happens for inlining |
| 27737 | which copies both the decl and the types. */ |
| 27738 | /* ??? And even non-LTO needs to re-visit type DIEs to fill |
| 27739 | in VLA bound information for example. */ |
| 27740 | || (decl && variably_modified_type_p (TREE_TYPE (decl), |
| 27741 | current_function_decl))) |
| 27742 | { |
| 27743 | if (DECL_BY_REFERENCE (decl_or_origin)) |
| 27744 | gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die); |
| 27745 | else |
| 27746 | gen_type_die (TREE_TYPE (decl_or_origin), context_die); |
| 27747 | } |
| 27748 | return gen_formal_parameter_die (node: decl, origin, |
| 27749 | emit_name_p: true /* Emit name attribute. */, |
| 27750 | context_die); |
| 27751 | |
| 27752 | case NAMESPACE_DECL: |
| 27753 | if (dwarf_version >= 3 || !dwarf_strict) |
| 27754 | gen_namespace_die (decl, context_die); |
| 27755 | break; |
| 27756 | |
| 27757 | case IMPORTED_DECL: |
| 27758 | dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl), |
| 27759 | DECL_CONTEXT (decl), context_die); |
| 27760 | break; |
| 27761 | |
| 27762 | case NAMELIST_DECL: |
| 27763 | gen_namelist_decl (DECL_NAME (decl), context_die, |
| 27764 | NAMELIST_DECL_ASSOCIATED_DECL (decl)); |
| 27765 | break; |
| 27766 | |
| 27767 | default: |
| 27768 | /* Probably some frontend-internal decl. Assume we don't care. */ |
| 27769 | gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES); |
| 27770 | break; |
| 27771 | } |
| 27772 | |
| 27773 | maybe_gen_btf_decl_tag_dies (t: decl_or_origin, |
| 27774 | target: lookup_decl_die (decl: decl_or_origin)); |
| 27775 | |
| 27776 | return NULL; |
| 27777 | } |
| 27778 | |
| 27779 | /* Output initial debug information for global DECL. Called at the |
| 27780 | end of the parsing process. |
| 27781 | |
| 27782 | This is the initial debug generation process. As such, the DIEs |
| 27783 | generated may be incomplete. A later debug generation pass |
| 27784 | (dwarf2out_late_global_decl) will augment the information generated |
| 27785 | in this pass (e.g., with complete location info). */ |
| 27786 | |
| 27787 | static void |
| 27788 | dwarf2out_early_global_decl (tree decl) |
| 27789 | { |
| 27790 | set_early_dwarf s; |
| 27791 | |
| 27792 | /* gen_decl_die() will set DECL_ABSTRACT because |
| 27793 | cgraph_function_possibly_inlined_p() returns true. This is in |
| 27794 | turn will cause DW_AT_inline attributes to be set. |
| 27795 | |
| 27796 | This happens because at early dwarf generation, there is no |
| 27797 | cgraph information, causing cgraph_function_possibly_inlined_p() |
| 27798 | to return true. Trick cgraph_function_possibly_inlined_p() |
| 27799 | while we generate dwarf early. */ |
| 27800 | bool save = symtab->global_info_ready; |
| 27801 | symtab->global_info_ready = true; |
| 27802 | |
| 27803 | /* We don't handle TYPE_DECLs. If required, they'll be reached via |
| 27804 | other DECLs and they can point to template types or other things |
| 27805 | that dwarf2out can't handle when done via dwarf2out_decl. */ |
| 27806 | if (TREE_CODE (decl) != TYPE_DECL |
| 27807 | && TREE_CODE (decl) != PARM_DECL) |
| 27808 | { |
| 27809 | if (TREE_CODE (decl) == FUNCTION_DECL) |
| 27810 | { |
| 27811 | tree save_fndecl = current_function_decl; |
| 27812 | |
| 27813 | /* For nested functions, make sure we have DIEs for the parents first |
| 27814 | so that all nested DIEs are generated at the proper scope in the |
| 27815 | first shot. */ |
| 27816 | tree context = decl_function_context (decl); |
| 27817 | if (context != NULL) |
| 27818 | { |
| 27819 | dw_die_ref context_die = lookup_decl_die (decl: context); |
| 27820 | current_function_decl = context; |
| 27821 | |
| 27822 | /* Avoid emitting DIEs multiple times, but still process CONTEXT |
| 27823 | enough so that it lands in its own context. This avoids type |
| 27824 | pruning issues later on. */ |
| 27825 | if (context_die == NULL || is_declaration_die (die: context_die)) |
| 27826 | dwarf2out_early_global_decl (decl: context); |
| 27827 | } |
| 27828 | |
| 27829 | /* Emit an abstract origin of a function first. This happens |
| 27830 | with C++ constructor clones for example and makes |
| 27831 | dwarf2out_abstract_function happy which requires the early |
| 27832 | DIE of the abstract instance to be present. */ |
| 27833 | tree origin = DECL_ABSTRACT_ORIGIN (decl); |
| 27834 | dw_die_ref origin_die; |
| 27835 | if (origin != NULL |
| 27836 | /* Do not emit the DIE multiple times but make sure to |
| 27837 | process it fully here in case we just saw a declaration. */ |
| 27838 | && ((origin_die = lookup_decl_die (decl: origin)) == NULL |
| 27839 | || is_declaration_die (die: origin_die))) |
| 27840 | { |
| 27841 | current_function_decl = origin; |
| 27842 | dwarf2out_decl (origin); |
| 27843 | } |
| 27844 | |
| 27845 | /* Emit the DIE for decl but avoid doing that multiple times. */ |
| 27846 | dw_die_ref old_die; |
| 27847 | if ((old_die = lookup_decl_die (decl)) == NULL |
| 27848 | || is_declaration_die (die: old_die)) |
| 27849 | { |
| 27850 | current_function_decl = decl; |
| 27851 | dwarf2out_decl (decl); |
| 27852 | } |
| 27853 | |
| 27854 | current_function_decl = save_fndecl; |
| 27855 | } |
| 27856 | else |
| 27857 | dwarf2out_decl (decl); |
| 27858 | } |
| 27859 | symtab->global_info_ready = save; |
| 27860 | } |
| 27861 | |
| 27862 | /* Return whether EXPR is an expression with the following pattern: |
| 27863 | INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */ |
| 27864 | |
| 27865 | static bool |
| 27866 | is_trivial_indirect_ref (tree expr) |
| 27867 | { |
| 27868 | if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF) |
| 27869 | return false; |
| 27870 | |
| 27871 | tree nop = TREE_OPERAND (expr, 0); |
| 27872 | if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR) |
| 27873 | return false; |
| 27874 | |
| 27875 | tree int_cst = TREE_OPERAND (nop, 0); |
| 27876 | return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST; |
| 27877 | } |
| 27878 | |
| 27879 | /* Output debug information for global decl DECL. Called from |
| 27880 | toplev.cc after compilation proper has finished. */ |
| 27881 | |
| 27882 | static void |
| 27883 | dwarf2out_late_global_decl (tree decl) |
| 27884 | { |
| 27885 | /* Fill-in any location information we were unable to determine |
| 27886 | on the first pass. */ |
| 27887 | if (VAR_P (decl)) |
| 27888 | { |
| 27889 | dw_die_ref die = lookup_decl_die (decl); |
| 27890 | |
| 27891 | /* We may have to generate full debug late for LTO in case debug |
| 27892 | was not enabled at compile-time or the target doesn't support |
| 27893 | the LTO early debug scheme. */ |
| 27894 | if (! die && in_lto_p |
| 27895 | /* Function scope variables are emitted when emitting the |
| 27896 | DIE for the function. */ |
| 27897 | && ! local_function_static (decl)) |
| 27898 | dwarf2out_decl (decl); |
| 27899 | else if (die) |
| 27900 | { |
| 27901 | /* We get called via the symtab code invoking late_global_decl |
| 27902 | for symbols that are optimized out. |
| 27903 | |
| 27904 | Do not add locations for those, except if they have a |
| 27905 | DECL_VALUE_EXPR, in which case they are relevant for debuggers. |
| 27906 | Still don't add a location if the DECL_VALUE_EXPR is not a trivial |
| 27907 | INDIRECT_REF expression, as this could generate relocations to |
| 27908 | text symbols in LTO object files, which is invalid. */ |
| 27909 | varpool_node *node = varpool_node::get (decl); |
| 27910 | if ((! node || ! node->definition) |
| 27911 | && ! (DECL_HAS_VALUE_EXPR_P (decl) |
| 27912 | && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl)))) |
| 27913 | tree_add_const_value_attribute_for_decl (var_die: die, decl); |
| 27914 | else |
| 27915 | { |
| 27916 | add_location_or_const_value_attribute (die, decl, cache_p: false); |
| 27917 | /* For C++ structured bindings at namespace scope when processing |
| 27918 | the underlying variable also add locations on the structured |
| 27919 | bindings which refer to it (unless they are tuple-based, then |
| 27920 | they are separate VAR_DECLs registered in varpool). */ |
| 27921 | if (tree attr = lookup_attribute (attr_name: "structured bindings" , |
| 27922 | DECL_ATTRIBUTES (decl))) |
| 27923 | for (tree d = TREE_VALUE (attr); d; d = TREE_CHAIN (d)) |
| 27924 | { |
| 27925 | die = lookup_decl_die (TREE_VALUE (d)); |
| 27926 | if (die) |
| 27927 | add_location_or_const_value_attribute (die, |
| 27928 | TREE_VALUE (d), |
| 27929 | cache_p: false); |
| 27930 | } |
| 27931 | } |
| 27932 | } |
| 27933 | } |
| 27934 | } |
| 27935 | |
| 27936 | /* Output debug information for type decl DECL. Called from toplev.cc |
| 27937 | and from language front ends (to record built-in types). */ |
| 27938 | static void |
| 27939 | dwarf2out_type_decl (tree decl, int local) |
| 27940 | { |
| 27941 | if (!local) |
| 27942 | { |
| 27943 | set_early_dwarf s; |
| 27944 | dwarf2out_decl (decl); |
| 27945 | } |
| 27946 | } |
| 27947 | |
| 27948 | /* Output debug information for imported module or decl DECL. |
| 27949 | NAME is non-NULL name in the lexical block if the decl has been renamed. |
| 27950 | LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK) |
| 27951 | that DECL belongs to. |
| 27952 | LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */ |
| 27953 | static void |
| 27954 | dwarf2out_imported_module_or_decl_1 (tree decl, |
| 27955 | tree name, |
| 27956 | tree lexical_block, |
| 27957 | dw_die_ref lexical_block_die) |
| 27958 | { |
| 27959 | expanded_location xloc; |
| 27960 | dw_die_ref imported_die = NULL; |
| 27961 | dw_die_ref at_import_die; |
| 27962 | |
| 27963 | if (TREE_CODE (decl) == IMPORTED_DECL) |
| 27964 | { |
| 27965 | xloc = expand_location (DECL_SOURCE_LOCATION (decl)); |
| 27966 | decl = IMPORTED_DECL_ASSOCIATED_DECL (decl); |
| 27967 | gcc_assert (decl); |
| 27968 | } |
| 27969 | else |
| 27970 | xloc = expand_location (input_location); |
| 27971 | |
| 27972 | if (TREE_CODE (decl) == TYPE_DECL) |
| 27973 | { |
| 27974 | at_import_die = force_type_die (TREE_TYPE (decl)); |
| 27975 | /* For namespace N { typedef void T; } using N::T; base_type_die |
| 27976 | returns NULL, but DW_TAG_imported_declaration requires |
| 27977 | the DW_AT_import tag. Force creation of DW_TAG_typedef. */ |
| 27978 | if (!at_import_die) |
| 27979 | { |
| 27980 | gcc_assert (TREE_CODE (decl) == TYPE_DECL); |
| 27981 | gen_typedef_die (decl, context_die: get_context_die (DECL_CONTEXT (decl))); |
| 27982 | at_import_die = lookup_type_die (TREE_TYPE (decl)); |
| 27983 | gcc_assert (at_import_die); |
| 27984 | } |
| 27985 | } |
| 27986 | else |
| 27987 | { |
| 27988 | at_import_die = lookup_decl_die (decl); |
| 27989 | if (!at_import_die) |
| 27990 | { |
| 27991 | /* If we're trying to avoid duplicate debug info, we may not have |
| 27992 | emitted the member decl for this field. Emit it now. */ |
| 27993 | if (TREE_CODE (decl) == FIELD_DECL) |
| 27994 | { |
| 27995 | tree type = DECL_CONTEXT (decl); |
| 27996 | |
| 27997 | if (TYPE_CONTEXT (type) |
| 27998 | && TYPE_P (TYPE_CONTEXT (type)) |
| 27999 | && !should_emit_struct_debug (TYPE_CONTEXT (type), |
| 28000 | usage: DINFO_USAGE_DIR_USE)) |
| 28001 | return; |
| 28002 | gen_type_die_for_member (type, member: decl, |
| 28003 | context_die: get_context_die (TYPE_CONTEXT (type))); |
| 28004 | } |
| 28005 | if (TREE_CODE (decl) == CONST_DECL) |
| 28006 | { |
| 28007 | /* Individual enumerators of an enum type do not get output here |
| 28008 | (see gen_decl_die), so we cannot call force_decl_die. */ |
| 28009 | if (!is_fortran () && !is_ada () && !is_dlang ()) |
| 28010 | return; |
| 28011 | } |
| 28012 | if (TREE_CODE (decl) == NAMELIST_DECL) |
| 28013 | at_import_die = gen_namelist_decl (DECL_NAME (decl), |
| 28014 | get_context_die (DECL_CONTEXT (decl)), |
| 28015 | NULL_TREE); |
| 28016 | else |
| 28017 | at_import_die = force_decl_die (decl); |
| 28018 | } |
| 28019 | } |
| 28020 | |
| 28021 | if (TREE_CODE (decl) == NAMESPACE_DECL) |
| 28022 | { |
| 28023 | if (dwarf_version >= 3 || !dwarf_strict) |
| 28024 | imported_die = new_die (tag_value: DW_TAG_imported_module, |
| 28025 | parent_die: lexical_block_die, |
| 28026 | t: lexical_block); |
| 28027 | else |
| 28028 | return; |
| 28029 | } |
| 28030 | else |
| 28031 | imported_die = new_die (tag_value: DW_TAG_imported_declaration, |
| 28032 | parent_die: lexical_block_die, |
| 28033 | t: lexical_block); |
| 28034 | |
| 28035 | add_AT_file (die: imported_die, attr_kind: DW_AT_decl_file, fd: lookup_filename (xloc.file)); |
| 28036 | add_AT_unsigned (die: imported_die, attr_kind: DW_AT_decl_line, unsigned_val: xloc.line); |
| 28037 | if (debug_column_info && xloc.column) |
| 28038 | add_AT_unsigned (die: imported_die, attr_kind: DW_AT_decl_column, unsigned_val: xloc.column); |
| 28039 | if (name) |
| 28040 | add_AT_string (die: imported_die, attr_kind: DW_AT_name, |
| 28041 | IDENTIFIER_POINTER (name)); |
| 28042 | add_AT_die_ref (die: imported_die, attr_kind: DW_AT_import, targ_die: at_import_die); |
| 28043 | } |
| 28044 | |
| 28045 | /* Output debug information for imported module or decl DECL. |
| 28046 | NAME is non-NULL name in context if the decl has been renamed. |
| 28047 | CHILD is true if decl is one of the renamed decls as part of |
| 28048 | importing whole module. |
| 28049 | IMPLICIT is set if this hook is called for an implicit import |
| 28050 | such as inline namespace. */ |
| 28051 | |
| 28052 | static void |
| 28053 | dwarf2out_imported_module_or_decl (tree decl, tree name, tree context, |
| 28054 | bool child, bool implicit) |
| 28055 | { |
| 28056 | /* dw_die_ref at_import_die; */ |
| 28057 | dw_die_ref scope_die; |
| 28058 | |
| 28059 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 28060 | return; |
| 28061 | |
| 28062 | gcc_assert (decl); |
| 28063 | |
| 28064 | /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace |
| 28065 | should be enough, for DWARF4 and older even if we emit as extension |
| 28066 | DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway |
| 28067 | for the benefit of consumers unaware of DW_AT_export_symbols. */ |
| 28068 | if (implicit |
| 28069 | && dwarf_version >= 5 |
| 28070 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
| 28071 | DW_AT_export_symbols) == 1) |
| 28072 | return; |
| 28073 | |
| 28074 | set_early_dwarf s; |
| 28075 | |
| 28076 | /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs. |
| 28077 | We need decl DIE for reference and scope die. First, get DIE for the decl |
| 28078 | itself. */ |
| 28079 | |
| 28080 | /* Get the scope die for decl context. Use comp_unit_die for global module |
| 28081 | or decl. If die is not found for non globals, force new die. */ |
| 28082 | if (context |
| 28083 | && TYPE_P (context) |
| 28084 | && !should_emit_struct_debug (type: context, usage: DINFO_USAGE_DIR_USE)) |
| 28085 | return; |
| 28086 | |
| 28087 | scope_die = get_context_die (context); |
| 28088 | |
| 28089 | if (child) |
| 28090 | { |
| 28091 | /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so |
| 28092 | there is nothing we can do, here. */ |
| 28093 | if (dwarf_version < 3 && dwarf_strict) |
| 28094 | return; |
| 28095 | |
| 28096 | gcc_assert (scope_die->die_child); |
| 28097 | gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module); |
| 28098 | gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL); |
| 28099 | scope_die = scope_die->die_child; |
| 28100 | } |
| 28101 | |
| 28102 | /* OK, now we have DIEs for decl as well as scope. Emit imported die. */ |
| 28103 | dwarf2out_imported_module_or_decl_1 (decl, name, lexical_block: context, lexical_block_die: scope_die); |
| 28104 | } |
| 28105 | |
| 28106 | /* Output debug information for namelists. */ |
| 28107 | |
| 28108 | static dw_die_ref |
| 28109 | gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls) |
| 28110 | { |
| 28111 | dw_die_ref nml_die, nml_item_die, nml_item_ref_die; |
| 28112 | tree value; |
| 28113 | unsigned i; |
| 28114 | |
| 28115 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 28116 | return NULL; |
| 28117 | |
| 28118 | gcc_assert (scope_die != NULL); |
| 28119 | nml_die = new_die (tag_value: DW_TAG_namelist, parent_die: scope_die, NULL); |
| 28120 | add_AT_string (die: nml_die, attr_kind: DW_AT_name, IDENTIFIER_POINTER (name)); |
| 28121 | |
| 28122 | /* If there are no item_decls, we have a nondefining namelist, e.g. |
| 28123 | with USE association; hence, set DW_AT_declaration. */ |
| 28124 | if (item_decls == NULL_TREE) |
| 28125 | { |
| 28126 | add_AT_flag (die: nml_die, attr_kind: DW_AT_declaration, flag: 1); |
| 28127 | return nml_die; |
| 28128 | } |
| 28129 | |
| 28130 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value) |
| 28131 | { |
| 28132 | nml_item_ref_die = lookup_decl_die (decl: value); |
| 28133 | if (!nml_item_ref_die) |
| 28134 | nml_item_ref_die = force_decl_die (decl: value); |
| 28135 | |
| 28136 | nml_item_die = new_die (tag_value: DW_TAG_namelist_item, parent_die: nml_die, NULL); |
| 28137 | add_AT_die_ref (die: nml_item_die, attr_kind: DW_AT_namelist_item, targ_die: nml_item_ref_die); |
| 28138 | } |
| 28139 | return nml_die; |
| 28140 | } |
| 28141 | |
| 28142 | |
| 28143 | /* Write the debugging output for DECL and return the DIE. */ |
| 28144 | |
| 28145 | static void |
| 28146 | dwarf2out_decl (tree decl) |
| 28147 | { |
| 28148 | dw_die_ref context_die = comp_unit_die (); |
| 28149 | |
| 28150 | switch (TREE_CODE (decl)) |
| 28151 | { |
| 28152 | case ERROR_MARK: |
| 28153 | return; |
| 28154 | |
| 28155 | case FUNCTION_DECL: |
| 28156 | /* If we're a nested function, initially use a parent of NULL; if we're |
| 28157 | a plain function, this will be fixed up in decls_for_scope. If |
| 28158 | we're a method, it will be ignored, since we already have a DIE. |
| 28159 | Avoid doing this late though since clones of class methods may |
| 28160 | otherwise end up in limbo and create type DIEs late. */ |
| 28161 | if (early_dwarf |
| 28162 | && decl_function_context (decl) |
| 28163 | /* But if we're in terse mode, we don't care about scope. */ |
| 28164 | && debug_info_level > DINFO_LEVEL_TERSE) |
| 28165 | context_die = NULL; |
| 28166 | break; |
| 28167 | |
| 28168 | case VAR_DECL: |
| 28169 | /* For local statics lookup proper context die. */ |
| 28170 | if (local_function_static (decl)) |
| 28171 | context_die = lookup_decl_die (DECL_CONTEXT (decl)); |
| 28172 | |
| 28173 | /* If we are in terse mode, don't generate any DIEs to represent any |
| 28174 | variable declarations or definitions unless it is external. */ |
| 28175 | if (debug_info_level < DINFO_LEVEL_TERSE |
| 28176 | || (debug_info_level == DINFO_LEVEL_TERSE |
| 28177 | && !TREE_PUBLIC (decl))) |
| 28178 | return; |
| 28179 | break; |
| 28180 | |
| 28181 | case CONST_DECL: |
| 28182 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 28183 | return; |
| 28184 | if (!is_fortran () && !is_ada () && !is_dlang ()) |
| 28185 | return; |
| 28186 | if (TREE_STATIC (decl) && decl_function_context (decl)) |
| 28187 | context_die = lookup_decl_die (DECL_CONTEXT (decl)); |
| 28188 | break; |
| 28189 | |
| 28190 | case NAMESPACE_DECL: |
| 28191 | case IMPORTED_DECL: |
| 28192 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 28193 | return; |
| 28194 | if (lookup_decl_die (decl) != NULL) |
| 28195 | return; |
| 28196 | break; |
| 28197 | |
| 28198 | case TYPE_DECL: |
| 28199 | /* Don't emit stubs for types unless they are needed by other DIEs. */ |
| 28200 | if (TYPE_DECL_SUPPRESS_DEBUG (decl)) |
| 28201 | return; |
| 28202 | |
| 28203 | /* Don't bother trying to generate any DIEs to represent any of the |
| 28204 | normal built-in types for the language we are compiling. */ |
| 28205 | if (DECL_IS_UNDECLARED_BUILTIN (decl)) |
| 28206 | return; |
| 28207 | |
| 28208 | /* If we are in terse mode, don't generate any DIEs for types. */ |
| 28209 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
| 28210 | return; |
| 28211 | |
| 28212 | /* If we're a function-scope tag, initially use a parent of NULL; |
| 28213 | this will be fixed up in decls_for_scope. */ |
| 28214 | if (decl_function_context (decl)) |
| 28215 | context_die = NULL; |
| 28216 | |
| 28217 | break; |
| 28218 | |
| 28219 | case NAMELIST_DECL: |
| 28220 | break; |
| 28221 | |
| 28222 | default: |
| 28223 | return; |
| 28224 | } |
| 28225 | |
| 28226 | gen_decl_die (decl, NULL, NULL, context_die); |
| 28227 | |
| 28228 | if (flag_checking) |
| 28229 | { |
| 28230 | dw_die_ref die = lookup_decl_die (decl); |
| 28231 | if (die) |
| 28232 | check_die (die); |
| 28233 | } |
| 28234 | } |
| 28235 | |
| 28236 | /* Write the debugging output for DECL. */ |
| 28237 | |
| 28238 | static void |
| 28239 | dwarf2out_function_decl (tree decl) |
| 28240 | { |
| 28241 | dwarf2out_decl (decl); |
| 28242 | call_arg_locations = NULL; |
| 28243 | call_arg_loc_last = NULL; |
| 28244 | call_site_count = -1; |
| 28245 | tail_call_site_count = -1; |
| 28246 | decl_loc_table->empty (); |
| 28247 | cached_dw_loc_list_table->empty (); |
| 28248 | } |
| 28249 | |
| 28250 | /* Output a marker (i.e. a label) for the beginning of the generated code for |
| 28251 | a lexical block. */ |
| 28252 | |
| 28253 | static void |
| 28254 | dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED, |
| 28255 | unsigned int blocknum, |
| 28256 | tree block ATTRIBUTE_UNUSED) |
| 28257 | { |
| 28258 | #ifdef CODEVIEW_DEBUGGING_INFO |
| 28259 | if (codeview_debuginfo_p ()) |
| 28260 | codeview_begin_block (line, blocknum, block); |
| 28261 | #endif |
| 28262 | |
| 28263 | switch_to_section (current_function_section ()); |
| 28264 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum); |
| 28265 | } |
| 28266 | |
| 28267 | /* Output a marker (i.e. a label) for the end of the generated code for a |
| 28268 | lexical block. */ |
| 28269 | |
| 28270 | static void |
| 28271 | dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum) |
| 28272 | { |
| 28273 | #ifdef CODEVIEW_DEBUGGING_INFO |
| 28274 | if (codeview_debuginfo_p ()) |
| 28275 | codeview_end_block (line, blocknum); |
| 28276 | #endif |
| 28277 | |
| 28278 | switch_to_section (current_function_section ()); |
| 28279 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum); |
| 28280 | } |
| 28281 | |
| 28282 | /* Returns true if it is appropriate not to emit any debugging |
| 28283 | information for BLOCK, because it doesn't contain any instructions. |
| 28284 | |
| 28285 | Don't allow this for blocks with nested functions or local classes |
| 28286 | as we would end up with orphans, and in the presence of scheduling |
| 28287 | we may end up calling them anyway. */ |
| 28288 | |
| 28289 | static bool |
| 28290 | dwarf2out_ignore_block (const_tree block) |
| 28291 | { |
| 28292 | tree decl; |
| 28293 | unsigned int i; |
| 28294 | |
| 28295 | for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl)) |
| 28296 | if (TREE_CODE (decl) == FUNCTION_DECL |
| 28297 | || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl))) |
| 28298 | return false; |
| 28299 | for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++) |
| 28300 | { |
| 28301 | decl = BLOCK_NONLOCALIZED_VAR (block, i); |
| 28302 | if (TREE_CODE (decl) == FUNCTION_DECL |
| 28303 | || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl))) |
| 28304 | return false; |
| 28305 | } |
| 28306 | |
| 28307 | return true; |
| 28308 | } |
| 28309 | |
| 28310 | /* Hash table routines for file_hash. */ |
| 28311 | |
| 28312 | bool |
| 28313 | dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2) |
| 28314 | { |
| 28315 | return filename_cmp (s1: p1->key, s2: p2) == 0; |
| 28316 | } |
| 28317 | |
| 28318 | hashval_t |
| 28319 | dwarf_file_hasher::hash (dwarf_file_data *p) |
| 28320 | { |
| 28321 | return htab_hash_string (p->key); |
| 28322 | } |
| 28323 | |
| 28324 | /* Lookup FILE_NAME (in the list of filenames that we know about here in |
| 28325 | dwarf2out.cc) and return its "index". The index of each (known) filename is |
| 28326 | just a unique number which is associated with only that one filename. We |
| 28327 | need such numbers for the sake of generating labels (in the .debug_sfnames |
| 28328 | section) and references to those files numbers (in the .debug_srcinfo |
| 28329 | and .debug_macinfo sections). If the filename given as an argument is not |
| 28330 | found in our current list, add it to the list and assign it the next |
| 28331 | available unique index number. */ |
| 28332 | |
| 28333 | static struct dwarf_file_data * |
| 28334 | lookup_filename (const char *file_name) |
| 28335 | { |
| 28336 | struct dwarf_file_data * created; |
| 28337 | |
| 28338 | if (!file_name) |
| 28339 | return NULL; |
| 28340 | |
| 28341 | if (!file_name[0]) |
| 28342 | file_name = "<stdin>" ; |
| 28343 | |
| 28344 | dwarf_file_data **slot |
| 28345 | = file_table->find_slot_with_hash (comparable: file_name, hash: htab_hash_string (file_name), |
| 28346 | insert: INSERT); |
| 28347 | if (*slot) |
| 28348 | return *slot; |
| 28349 | |
| 28350 | created = ggc_alloc<dwarf_file_data> (); |
| 28351 | created->key = file_name; |
| 28352 | created->filename = remap_debug_filename (file_name); |
| 28353 | created->emitted_number = 0; |
| 28354 | *slot = created; |
| 28355 | return created; |
| 28356 | } |
| 28357 | |
| 28358 | /* If the assembler will construct the file table, then translate the compiler |
| 28359 | internal file table number into the assembler file table number, and emit |
| 28360 | a .file directive if we haven't already emitted one yet. The file table |
| 28361 | numbers are different because we prune debug info for unused variables and |
| 28362 | types, which may include filenames. */ |
| 28363 | |
| 28364 | static int |
| 28365 | maybe_emit_file (struct dwarf_file_data * fd) |
| 28366 | { |
| 28367 | if (! fd->emitted_number) |
| 28368 | { |
| 28369 | if (last_emitted_file) |
| 28370 | fd->emitted_number = last_emitted_file->emitted_number + 1; |
| 28371 | else |
| 28372 | fd->emitted_number = 1; |
| 28373 | last_emitted_file = fd; |
| 28374 | |
| 28375 | if (output_asm_line_debug_info ()) |
| 28376 | { |
| 28377 | fprintf (stream: asm_out_file, format: "\t.file %u " , fd->emitted_number); |
| 28378 | output_quoted_string (asm_out_file, fd->filename); |
| 28379 | fputc (c: '\n', stream: asm_out_file); |
| 28380 | } |
| 28381 | } |
| 28382 | |
| 28383 | return fd->emitted_number; |
| 28384 | } |
| 28385 | |
| 28386 | /* Schedule generation of a DW_AT_const_value attribute to DIE. |
| 28387 | That generation should happen after function debug info has been |
| 28388 | generated. The value of the attribute is the constant value of ARG. */ |
| 28389 | |
| 28390 | static void |
| 28391 | append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg) |
| 28392 | { |
| 28393 | die_arg_entry entry; |
| 28394 | |
| 28395 | if (!die || !arg) |
| 28396 | return; |
| 28397 | |
| 28398 | gcc_assert (early_dwarf); |
| 28399 | |
| 28400 | if (!tmpl_value_parm_die_table) |
| 28401 | vec_alloc (v&: tmpl_value_parm_die_table, nelems: 32); |
| 28402 | |
| 28403 | entry.die = die; |
| 28404 | entry.arg = arg; |
| 28405 | vec_safe_push (v&: tmpl_value_parm_die_table, obj: entry); |
| 28406 | } |
| 28407 | |
| 28408 | /* Return TRUE if T is an instance of generic type, FALSE |
| 28409 | otherwise. */ |
| 28410 | |
| 28411 | static bool |
| 28412 | generic_type_p (tree t) |
| 28413 | { |
| 28414 | if (t == NULL_TREE || !TYPE_P (t)) |
| 28415 | return false; |
| 28416 | return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE; |
| 28417 | } |
| 28418 | |
| 28419 | /* Schedule the generation of the generic parameter dies for the |
| 28420 | instance of generic type T. The proper generation itself is later |
| 28421 | done by gen_scheduled_generic_parms_dies. */ |
| 28422 | |
| 28423 | static void |
| 28424 | schedule_generic_params_dies_gen (tree t) |
| 28425 | { |
| 28426 | if (!generic_type_p (t)) |
| 28427 | return; |
| 28428 | |
| 28429 | gcc_assert (early_dwarf); |
| 28430 | |
| 28431 | if (!generic_type_instances) |
| 28432 | vec_alloc (v&: generic_type_instances, nelems: 256); |
| 28433 | |
| 28434 | vec_safe_push (v&: generic_type_instances, obj: t); |
| 28435 | } |
| 28436 | |
| 28437 | /* Add a DW_AT_const_value attribute to DIEs that were scheduled |
| 28438 | by append_entry_to_tmpl_value_parm_die_table. This function must |
| 28439 | be called after function DIEs have been generated. */ |
| 28440 | |
| 28441 | static void |
| 28442 | gen_remaining_tmpl_value_param_die_attribute (void) |
| 28443 | { |
| 28444 | if (tmpl_value_parm_die_table) |
| 28445 | { |
| 28446 | unsigned i, j; |
| 28447 | die_arg_entry *e; |
| 28448 | |
| 28449 | /* We do this in two phases - first get the cases we can |
| 28450 | handle during early-finish, preserving those we cannot |
| 28451 | (containing symbolic constants where we don't yet know |
| 28452 | whether we are going to output the referenced symbols). |
| 28453 | For those we try again at late-finish. */ |
| 28454 | j = 0; |
| 28455 | FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e) |
| 28456 | { |
| 28457 | if (!e->die->removed |
| 28458 | && !tree_add_const_value_attribute (die: e->die, t: e->arg)) |
| 28459 | { |
| 28460 | dw_loc_descr_ref loc = NULL; |
| 28461 | if (! early_dwarf |
| 28462 | && (dwarf_version >= 5 || !dwarf_strict)) |
| 28463 | loc = loc_descriptor_from_tree (loc: e->arg, want_address: 2, NULL); |
| 28464 | if (loc) |
| 28465 | add_AT_loc (die: e->die, attr_kind: DW_AT_location, loc); |
| 28466 | else |
| 28467 | (*tmpl_value_parm_die_table)[j++] = *e; |
| 28468 | } |
| 28469 | } |
| 28470 | tmpl_value_parm_die_table->truncate (size: j); |
| 28471 | } |
| 28472 | } |
| 28473 | |
| 28474 | /* Generate generic parameters DIEs for instances of generic types |
| 28475 | that have been previously scheduled by |
| 28476 | schedule_generic_params_dies_gen. This function must be called |
| 28477 | after all the types of the CU have been laid out. */ |
| 28478 | |
| 28479 | static void |
| 28480 | gen_scheduled_generic_parms_dies (void) |
| 28481 | { |
| 28482 | unsigned i; |
| 28483 | tree t; |
| 28484 | |
| 28485 | if (!generic_type_instances) |
| 28486 | return; |
| 28487 | |
| 28488 | FOR_EACH_VEC_ELT (*generic_type_instances, i, t) |
| 28489 | if (COMPLETE_TYPE_P (t)) |
| 28490 | gen_generic_params_dies (t); |
| 28491 | |
| 28492 | generic_type_instances = NULL; |
| 28493 | } |
| 28494 | |
| 28495 | |
| 28496 | /* Replace DW_AT_name for the decl with name. */ |
| 28497 | |
| 28498 | static void |
| 28499 | dwarf2out_set_name (tree decl, tree name) |
| 28500 | { |
| 28501 | dw_die_ref die; |
| 28502 | dw_attr_node *attr; |
| 28503 | const char *dname; |
| 28504 | |
| 28505 | die = TYPE_SYMTAB_DIE (decl); |
| 28506 | if (!die) |
| 28507 | return; |
| 28508 | |
| 28509 | dname = dwarf2_name (decl: name, scope: 0); |
| 28510 | if (!dname) |
| 28511 | return; |
| 28512 | |
| 28513 | attr = get_AT (die, attr_kind: DW_AT_name); |
| 28514 | if (attr) |
| 28515 | { |
| 28516 | struct indirect_string_node *node; |
| 28517 | |
| 28518 | node = find_AT_string (str: dname); |
| 28519 | /* replace the string. */ |
| 28520 | attr->dw_attr_val.v.val_str = node; |
| 28521 | } |
| 28522 | |
| 28523 | else |
| 28524 | add_name_attribute (die, name_string: dname); |
| 28525 | } |
| 28526 | |
| 28527 | /* True if before or during processing of the first function being emitted. */ |
| 28528 | static bool in_first_function_p = true; |
| 28529 | /* True if loc_note during dwarf2out_var_location call might still be |
| 28530 | before first real instruction at address equal to .Ltext0. */ |
| 28531 | static bool maybe_at_text_label_p = true; |
| 28532 | /* One above highest N where .LVLN label might be equal to .Ltext0 label. */ |
| 28533 | static unsigned int first_loclabel_num_not_at_text_label; |
| 28534 | |
| 28535 | /* Look ahead for a real insn. */ |
| 28536 | |
| 28537 | static rtx_insn * |
| 28538 | dwarf2out_next_real_insn (rtx_insn *loc_note) |
| 28539 | { |
| 28540 | rtx_insn *next_real = NEXT_INSN (insn: loc_note); |
| 28541 | |
| 28542 | while (next_real) |
| 28543 | if (INSN_P (next_real)) |
| 28544 | break; |
| 28545 | else |
| 28546 | next_real = NEXT_INSN (insn: next_real); |
| 28547 | |
| 28548 | return next_real; |
| 28549 | } |
| 28550 | |
| 28551 | /* Called by the final INSN scan whenever we see a var location. We |
| 28552 | use it to drop labels in the right places, and throw the location in |
| 28553 | our lookup table. */ |
| 28554 | |
| 28555 | static void |
| 28556 | dwarf2out_var_location (rtx_insn *loc_note) |
| 28557 | { |
| 28558 | char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2]; |
| 28559 | struct var_loc_node *newloc; |
| 28560 | rtx_insn *next_real; |
| 28561 | rtx_insn *call_insn = NULL; |
| 28562 | static const char *last_label; |
| 28563 | static const char *last_postcall_label; |
| 28564 | static bool last_in_cold_section_p; |
| 28565 | static rtx_insn *expected_next_loc_note; |
| 28566 | tree decl; |
| 28567 | bool var_loc_p; |
| 28568 | var_loc_view view = 0; |
| 28569 | |
| 28570 | if (!NOTE_P (loc_note)) |
| 28571 | { |
| 28572 | if (CALL_P (loc_note)) |
| 28573 | { |
| 28574 | maybe_reset_location_view (insn: loc_note, table: cur_line_info_table); |
| 28575 | call_site_count++; |
| 28576 | if (SIBLING_CALL_P (loc_note)) |
| 28577 | tail_call_site_count++; |
| 28578 | if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX)) |
| 28579 | { |
| 28580 | call_insn = loc_note; |
| 28581 | loc_note = NULL; |
| 28582 | var_loc_p = false; |
| 28583 | |
| 28584 | next_real = dwarf2out_next_real_insn (loc_note: call_insn); |
| 28585 | cached_next_real_insn = NULL; |
| 28586 | goto create_label; |
| 28587 | } |
| 28588 | if (optimize == 0 && !flag_var_tracking) |
| 28589 | { |
| 28590 | /* When the var-tracking pass is not running, there is no note |
| 28591 | for indirect calls whose target is compile-time known. In this |
| 28592 | case, process such calls specifically so that we generate call |
| 28593 | sites for them anyway. */ |
| 28594 | rtx x = PATTERN (insn: loc_note); |
| 28595 | if (GET_CODE (x) == PARALLEL) |
| 28596 | x = XVECEXP (x, 0, 0); |
| 28597 | if (GET_CODE (x) == SET) |
| 28598 | x = SET_SRC (x); |
| 28599 | if (GET_CODE (x) == CALL) |
| 28600 | x = XEXP (x, 0); |
| 28601 | if (!MEM_P (x) |
| 28602 | || GET_CODE (XEXP (x, 0)) != SYMBOL_REF |
| 28603 | || !SYMBOL_REF_DECL (XEXP (x, 0)) |
| 28604 | || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) |
| 28605 | != FUNCTION_DECL)) |
| 28606 | { |
| 28607 | call_insn = loc_note; |
| 28608 | loc_note = NULL; |
| 28609 | var_loc_p = false; |
| 28610 | |
| 28611 | next_real = dwarf2out_next_real_insn (loc_note: call_insn); |
| 28612 | cached_next_real_insn = NULL; |
| 28613 | goto create_label; |
| 28614 | } |
| 28615 | } |
| 28616 | } |
| 28617 | else if (!debug_variable_location_views) |
| 28618 | gcc_unreachable (); |
| 28619 | else |
| 28620 | maybe_reset_location_view (insn: loc_note, table: cur_line_info_table); |
| 28621 | |
| 28622 | return; |
| 28623 | } |
| 28624 | |
| 28625 | var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION; |
| 28626 | if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note))) |
| 28627 | return; |
| 28628 | |
| 28629 | /* Optimize processing a large consecutive sequence of location |
| 28630 | notes so we don't spend too much time in next_real_insn. If the |
| 28631 | next insn is another location note, remember the next_real_insn |
| 28632 | calculation for next time. */ |
| 28633 | next_real = cached_next_real_insn; |
| 28634 | if (next_real) |
| 28635 | { |
| 28636 | if (expected_next_loc_note != loc_note) |
| 28637 | next_real = NULL; |
| 28638 | } |
| 28639 | |
| 28640 | if (! next_real) |
| 28641 | next_real = dwarf2out_next_real_insn (loc_note); |
| 28642 | |
| 28643 | if (next_real) |
| 28644 | { |
| 28645 | rtx_insn *next_note = NEXT_INSN (insn: loc_note); |
| 28646 | while (next_note != next_real) |
| 28647 | { |
| 28648 | if (! next_note->deleted () |
| 28649 | && NOTE_P (next_note) |
| 28650 | && NOTE_KIND (next_note) == NOTE_INSN_VAR_LOCATION) |
| 28651 | break; |
| 28652 | next_note = NEXT_INSN (insn: next_note); |
| 28653 | } |
| 28654 | |
| 28655 | if (next_note == next_real) |
| 28656 | cached_next_real_insn = NULL; |
| 28657 | else |
| 28658 | { |
| 28659 | expected_next_loc_note = next_note; |
| 28660 | cached_next_real_insn = next_real; |
| 28661 | } |
| 28662 | } |
| 28663 | else |
| 28664 | cached_next_real_insn = NULL; |
| 28665 | |
| 28666 | /* If there are no instructions which would be affected by this note, |
| 28667 | don't do anything. */ |
| 28668 | if (var_loc_p |
| 28669 | && next_real == NULL_RTX |
| 28670 | && !NOTE_DURING_CALL_P (loc_note)) |
| 28671 | return; |
| 28672 | |
| 28673 | create_label: |
| 28674 | |
| 28675 | if (next_real == NULL_RTX) |
| 28676 | next_real = get_last_insn (); |
| 28677 | |
| 28678 | /* If there were any real insns between note we processed last time |
| 28679 | and this note (or if it is the first note), clear |
| 28680 | last_{,postcall_}label so that they are not reused this time. */ |
| 28681 | if (last_var_location_insn == NULL_RTX |
| 28682 | || last_var_location_insn != next_real |
| 28683 | || last_in_cold_section_p != in_cold_section_p) |
| 28684 | { |
| 28685 | last_label = NULL; |
| 28686 | last_postcall_label = NULL; |
| 28687 | } |
| 28688 | |
| 28689 | if (var_loc_p) |
| 28690 | { |
| 28691 | const char *label |
| 28692 | = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label; |
| 28693 | view = cur_line_info_table->view; |
| 28694 | decl = NOTE_VAR_LOCATION_DECL (loc_note); |
| 28695 | newloc = add_var_loc_to_decl (decl, loc_note, label, view); |
| 28696 | if (newloc == NULL) |
| 28697 | return; |
| 28698 | } |
| 28699 | else |
| 28700 | { |
| 28701 | decl = NULL_TREE; |
| 28702 | newloc = NULL; |
| 28703 | } |
| 28704 | |
| 28705 | /* If there were no real insns between note we processed last time |
| 28706 | and this note, use the label we emitted last time. Otherwise |
| 28707 | create a new label and emit it. */ |
| 28708 | if (last_label == NULL) |
| 28709 | { |
| 28710 | ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL" , loclabel_num); |
| 28711 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL" , loclabel_num); |
| 28712 | loclabel_num++; |
| 28713 | last_label = ggc_strdup (loclabel); |
| 28714 | /* See if loclabel might be equal to .Ltext0. If yes, |
| 28715 | bump first_loclabel_num_not_at_text_label. */ |
| 28716 | if (!have_multiple_function_sections |
| 28717 | && in_first_function_p |
| 28718 | && maybe_at_text_label_p) |
| 28719 | { |
| 28720 | static rtx_insn *last_start; |
| 28721 | rtx_insn *insn; |
| 28722 | for (insn = loc_note; insn; insn = previous_insn (insn)) |
| 28723 | if (insn == last_start) |
| 28724 | break; |
| 28725 | else if (!NONDEBUG_INSN_P (insn)) |
| 28726 | continue; |
| 28727 | else |
| 28728 | { |
| 28729 | rtx body = PATTERN (insn); |
| 28730 | if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER) |
| 28731 | continue; |
| 28732 | /* Inline asm could occupy zero bytes. */ |
| 28733 | else if (GET_CODE (body) == ASM_INPUT |
| 28734 | || asm_noperands (body) >= 0) |
| 28735 | continue; |
| 28736 | #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */ |
| 28737 | else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0) |
| 28738 | continue; |
| 28739 | #endif |
| 28740 | else |
| 28741 | { |
| 28742 | /* Assume insn has non-zero length. */ |
| 28743 | maybe_at_text_label_p = false; |
| 28744 | break; |
| 28745 | } |
| 28746 | } |
| 28747 | if (maybe_at_text_label_p) |
| 28748 | { |
| 28749 | last_start = loc_note; |
| 28750 | first_loclabel_num_not_at_text_label = loclabel_num; |
| 28751 | } |
| 28752 | } |
| 28753 | } |
| 28754 | |
| 28755 | gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX) |
| 28756 | || (loc_note != NULL_RTX && call_insn == NULL_RTX)); |
| 28757 | |
| 28758 | if (!var_loc_p) |
| 28759 | { |
| 28760 | struct call_arg_loc_node *ca_loc |
| 28761 | = ggc_cleared_alloc<call_arg_loc_node> (); |
| 28762 | rtx_insn *prev = call_insn; |
| 28763 | |
| 28764 | ca_loc->call_insn = call_insn; |
| 28765 | ca_loc->next = NULL; |
| 28766 | ca_loc->label = last_label; |
| 28767 | gcc_assert (prev |
| 28768 | && (CALL_P (prev) |
| 28769 | || (NONJUMP_INSN_P (prev) |
| 28770 | && GET_CODE (PATTERN (prev)) == SEQUENCE |
| 28771 | && CALL_P (XVECEXP (PATTERN (prev), 0, 0))))); |
| 28772 | if (!CALL_P (prev)) |
| 28773 | prev = as_a <rtx_sequence *> (p: PATTERN (insn: prev))->insn (index: 0); |
| 28774 | ca_loc->tail_call_p = SIBLING_CALL_P (prev); |
| 28775 | |
| 28776 | /* Look for a SYMBOL_REF in the "prev" instruction. */ |
| 28777 | rtx x = get_call_rtx_from (prev); |
| 28778 | if (x) |
| 28779 | { |
| 28780 | /* Try to get the call symbol, if any. */ |
| 28781 | if (MEM_P (XEXP (x, 0))) |
| 28782 | x = XEXP (x, 0); |
| 28783 | /* First, look for a memory access to a symbol_ref. */ |
| 28784 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF |
| 28785 | && SYMBOL_REF_DECL (XEXP (x, 0)) |
| 28786 | && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL) |
| 28787 | ca_loc->symbol_ref = XEXP (x, 0); |
| 28788 | /* Otherwise, look at a compile-time known user-level function |
| 28789 | declaration. */ |
| 28790 | else if (MEM_P (x) |
| 28791 | && MEM_EXPR (x) |
| 28792 | && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL) |
| 28793 | ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0); |
| 28794 | } |
| 28795 | |
| 28796 | ca_loc->block = insn_scope (prev); |
| 28797 | if (call_arg_locations) |
| 28798 | call_arg_loc_last->next = ca_loc; |
| 28799 | else |
| 28800 | call_arg_locations = ca_loc; |
| 28801 | call_arg_loc_last = ca_loc; |
| 28802 | } |
| 28803 | else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note)) |
| 28804 | { |
| 28805 | newloc->label = last_label; |
| 28806 | newloc->view = view; |
| 28807 | } |
| 28808 | else |
| 28809 | { |
| 28810 | if (!last_postcall_label) |
| 28811 | { |
| 28812 | sprintf (s: loclabel, format: "%s-1" , last_label); |
| 28813 | last_postcall_label = ggc_strdup (loclabel); |
| 28814 | } |
| 28815 | newloc->label = last_postcall_label; |
| 28816 | /* ??? This view is at last_label, not last_label-1, but we |
| 28817 | could only assume view at last_label-1 is zero if we could |
| 28818 | assume calls always have length greater than one. This is |
| 28819 | probably true in general, though there might be a rare |
| 28820 | exception to this rule, e.g. if a call insn is optimized out |
| 28821 | by target magic. Then, even the -1 in the label will be |
| 28822 | wrong, which might invalidate the range. Anyway, using view, |
| 28823 | though technically possibly incorrect, will work as far as |
| 28824 | ranges go: since L-1 is in the middle of the call insn, |
| 28825 | (L-1).0 and (L-1).V shouldn't make any difference, and having |
| 28826 | the loclist entry refer to the .loc entry might be useful, so |
| 28827 | leave it like this. */ |
| 28828 | newloc->view = view; |
| 28829 | } |
| 28830 | |
| 28831 | if (var_loc_p && flag_debug_asm) |
| 28832 | { |
| 28833 | const char *name, *sep, *patstr; |
| 28834 | if (decl && DECL_NAME (decl)) |
| 28835 | name = IDENTIFIER_POINTER (DECL_NAME (decl)); |
| 28836 | else |
| 28837 | name = "" ; |
| 28838 | if (NOTE_VAR_LOCATION_LOC (loc_note)) |
| 28839 | { |
| 28840 | sep = " => " ; |
| 28841 | patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note)); |
| 28842 | } |
| 28843 | else |
| 28844 | { |
| 28845 | sep = " " ; |
| 28846 | patstr = "RESET" ; |
| 28847 | } |
| 28848 | fprintf (stream: asm_out_file, format: "\t%s DEBUG %s%s%s\n" , ASM_COMMENT_START, |
| 28849 | name, sep, patstr); |
| 28850 | } |
| 28851 | |
| 28852 | last_var_location_insn = next_real; |
| 28853 | last_in_cold_section_p = in_cold_section_p; |
| 28854 | } |
| 28855 | |
| 28856 | /* Check whether BLOCK, a lexical block, is nested within OUTER, or is |
| 28857 | OUTER itself. If BOTHWAYS, check not only that BLOCK can reach |
| 28858 | OUTER through BLOCK_SUPERCONTEXT links, but also that there is a |
| 28859 | path from OUTER to BLOCK through BLOCK_SUBBLOCKs and |
| 28860 | BLOCK_FRAGMENT_ORIGIN links. */ |
| 28861 | static bool |
| 28862 | block_within_block_p (tree block, tree outer, bool bothways) |
| 28863 | { |
| 28864 | if (block == outer) |
| 28865 | return true; |
| 28866 | |
| 28867 | /* Quickly check that OUTER is up BLOCK's supercontext chain. */ |
| 28868 | for (tree context = BLOCK_SUPERCONTEXT (block); |
| 28869 | context != outer; |
| 28870 | context = BLOCK_SUPERCONTEXT (context)) |
| 28871 | if (!context || TREE_CODE (context) != BLOCK) |
| 28872 | return false; |
| 28873 | |
| 28874 | if (!bothways) |
| 28875 | return true; |
| 28876 | |
| 28877 | /* Now check that each block is actually referenced by its |
| 28878 | parent. */ |
| 28879 | for (tree context = BLOCK_SUPERCONTEXT (block); ; |
| 28880 | context = BLOCK_SUPERCONTEXT (context)) |
| 28881 | { |
| 28882 | if (BLOCK_FRAGMENT_ORIGIN (context)) |
| 28883 | { |
| 28884 | gcc_assert (!BLOCK_SUBBLOCKS (context)); |
| 28885 | context = BLOCK_FRAGMENT_ORIGIN (context); |
| 28886 | } |
| 28887 | for (tree sub = BLOCK_SUBBLOCKS (context); |
| 28888 | sub != block; |
| 28889 | sub = BLOCK_CHAIN (sub)) |
| 28890 | if (!sub) |
| 28891 | return false; |
| 28892 | if (context == outer) |
| 28893 | return true; |
| 28894 | else |
| 28895 | block = context; |
| 28896 | } |
| 28897 | } |
| 28898 | |
| 28899 | /* Called during final while assembling the marker of the entry point |
| 28900 | for an inlined function. */ |
| 28901 | |
| 28902 | static void |
| 28903 | dwarf2out_inline_entry (tree block) |
| 28904 | { |
| 28905 | gcc_assert (debug_inline_points); |
| 28906 | |
| 28907 | /* If we can't represent it, don't bother. */ |
| 28908 | if (!(dwarf_version >= 3 || !dwarf_strict)) |
| 28909 | return; |
| 28910 | |
| 28911 | gcc_assert (DECL_P (block_ultimate_origin (block))); |
| 28912 | |
| 28913 | /* Sanity check the block tree. This would catch a case in which |
| 28914 | BLOCK got removed from the tree reachable from the outermost |
| 28915 | lexical block, but got retained in markers. It would still link |
| 28916 | back to its parents, but some ancestor would be missing a link |
| 28917 | down the path to the sub BLOCK. If the block got removed, its |
| 28918 | BLOCK_NUMBER will not be a usable value. */ |
| 28919 | if (flag_checking) |
| 28920 | gcc_assert (block_within_block_p (block, |
| 28921 | DECL_INITIAL (current_function_decl), |
| 28922 | true)); |
| 28923 | |
| 28924 | gcc_assert (inlined_function_outer_scope_p (block)); |
| 28925 | gcc_assert (!lookup_block_die (block)); |
| 28926 | |
| 28927 | if (BLOCK_FRAGMENT_ORIGIN (block)) |
| 28928 | block = BLOCK_FRAGMENT_ORIGIN (block); |
| 28929 | /* Can the entry point ever not be at the beginning of an |
| 28930 | unfragmented lexical block? */ |
| 28931 | else if (!(BLOCK_FRAGMENT_CHAIN (block) |
| 28932 | || (cur_line_info_table |
| 28933 | && !ZERO_VIEW_P (cur_line_info_table->view)))) |
| 28934 | return; |
| 28935 | |
| 28936 | if (!inline_entry_data_table) |
| 28937 | inline_entry_data_table |
| 28938 | = hash_table<inline_entry_data_hasher>::create_ggc (n: 10); |
| 28939 | |
| 28940 | |
| 28941 | inline_entry_data **iedp |
| 28942 | = inline_entry_data_table->find_slot_with_hash (comparable: block, |
| 28943 | hash: htab_hash_pointer (block), |
| 28944 | insert: INSERT); |
| 28945 | if (*iedp) |
| 28946 | /* ??? Ideally, we'd record all entry points for the same inlined |
| 28947 | function (some may have been duplicated by e.g. unrolling), but |
| 28948 | we have no way to represent that ATM. */ |
| 28949 | return; |
| 28950 | |
| 28951 | inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> (); |
| 28952 | ied->block = block; |
| 28953 | ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL; |
| 28954 | ied->label_num = BLOCK_NUMBER (block); |
| 28955 | if (cur_line_info_table) |
| 28956 | ied->view = cur_line_info_table->view; |
| 28957 | |
| 28958 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL, |
| 28959 | BLOCK_NUMBER (block)); |
| 28960 | } |
| 28961 | |
| 28962 | /* Called from finalize_size_functions for size functions so that their body |
| 28963 | can be encoded in the debug info to describe the layout of variable-length |
| 28964 | structures. */ |
| 28965 | |
| 28966 | static void |
| 28967 | dwarf2out_size_function (tree decl) |
| 28968 | { |
| 28969 | set_early_dwarf s; |
| 28970 | function_to_dwarf_procedure (fndecl: decl); |
| 28971 | } |
| 28972 | |
| 28973 | /* Note in one location list that text section has changed. */ |
| 28974 | |
| 28975 | int |
| 28976 | var_location_switch_text_section_1 (var_loc_list **slot, void *) |
| 28977 | { |
| 28978 | var_loc_list *list = *slot; |
| 28979 | if (list->first) |
| 28980 | list->last_before_switch |
| 28981 | = list->last->next ? list->last->next : list->last; |
| 28982 | return 1; |
| 28983 | } |
| 28984 | |
| 28985 | /* Note in all location lists that text section has changed. */ |
| 28986 | |
| 28987 | static void |
| 28988 | var_location_switch_text_section (void) |
| 28989 | { |
| 28990 | if (decl_loc_table == NULL) |
| 28991 | return; |
| 28992 | |
| 28993 | decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL); |
| 28994 | } |
| 28995 | |
| 28996 | /* Create a new line number table. */ |
| 28997 | |
| 28998 | static dw_line_info_table * |
| 28999 | new_line_info_table (void) |
| 29000 | { |
| 29001 | dw_line_info_table *table; |
| 29002 | |
| 29003 | table = ggc_cleared_alloc<dw_line_info_table> (); |
| 29004 | table->file_num = 1; |
| 29005 | table->line_num = 1; |
| 29006 | table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START; |
| 29007 | FORCE_RESET_NEXT_VIEW (table->view); |
| 29008 | table->symviews_since_reset = 0; |
| 29009 | |
| 29010 | return table; |
| 29011 | } |
| 29012 | |
| 29013 | /* Lookup the "current" table into which we emit line info, so |
| 29014 | that we don't have to do it for every source line. */ |
| 29015 | |
| 29016 | static void |
| 29017 | set_cur_line_info_table (section *sec) |
| 29018 | { |
| 29019 | dw_line_info_table *table; |
| 29020 | |
| 29021 | if (sec == text_section) |
| 29022 | table = text_section_line_info; |
| 29023 | else if (sec == cold_text_section) |
| 29024 | { |
| 29025 | table = cold_text_section_line_info; |
| 29026 | if (!table) |
| 29027 | { |
| 29028 | cold_text_section_line_info = table = new_line_info_table (); |
| 29029 | table->end_label = cold_end_label; |
| 29030 | } |
| 29031 | } |
| 29032 | else |
| 29033 | { |
| 29034 | const char *end_label; |
| 29035 | |
| 29036 | if (crtl->has_bb_partition) |
| 29037 | { |
| 29038 | if (in_cold_section_p) |
| 29039 | end_label = crtl->subsections.cold_section_end_label; |
| 29040 | else |
| 29041 | end_label = crtl->subsections.hot_section_end_label; |
| 29042 | } |
| 29043 | else |
| 29044 | { |
| 29045 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 29046 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL, |
| 29047 | current_function_funcdef_no); |
| 29048 | end_label = ggc_strdup (label); |
| 29049 | } |
| 29050 | |
| 29051 | table = new_line_info_table (); |
| 29052 | table->end_label = end_label; |
| 29053 | |
| 29054 | vec_safe_push (v&: separate_line_info, obj: table); |
| 29055 | } |
| 29056 | |
| 29057 | if (output_asm_line_debug_info ()) |
| 29058 | table->is_stmt = (cur_line_info_table |
| 29059 | ? cur_line_info_table->is_stmt |
| 29060 | : DWARF_LINE_DEFAULT_IS_STMT_START); |
| 29061 | cur_line_info_table = table; |
| 29062 | } |
| 29063 | |
| 29064 | |
| 29065 | /* We need to reset the locations at the beginning of each |
| 29066 | function. We can't do this in the end_function hook, because the |
| 29067 | declarations that use the locations won't have been output when |
| 29068 | that hook is called. Also compute have_multiple_function_sections here. */ |
| 29069 | |
| 29070 | static void |
| 29071 | dwarf2out_begin_function (tree fun) |
| 29072 | { |
| 29073 | section *sec = function_section (fun); |
| 29074 | |
| 29075 | if (sec != text_section) |
| 29076 | have_multiple_function_sections = true; |
| 29077 | |
| 29078 | if (crtl->has_bb_partition && !cold_text_section) |
| 29079 | { |
| 29080 | gcc_assert (current_function_decl == fun); |
| 29081 | cold_text_section = unlikely_text_section (); |
| 29082 | switch_to_section (cold_text_section); |
| 29083 | ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label); |
| 29084 | switch_to_section (sec); |
| 29085 | } |
| 29086 | |
| 29087 | call_site_count = 0; |
| 29088 | tail_call_site_count = 0; |
| 29089 | |
| 29090 | set_cur_line_info_table (sec); |
| 29091 | FORCE_RESET_NEXT_VIEW (cur_line_info_table->view); |
| 29092 | } |
| 29093 | |
| 29094 | /* Helper function of dwarf2out_end_function, called only after emitting |
| 29095 | the very first function into assembly. Check if some .debug_loc range |
| 29096 | might end with a .LVL* label that could be equal to .Ltext0. |
| 29097 | In that case we must force using absolute addresses in .debug_loc ranges, |
| 29098 | because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for |
| 29099 | .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc |
| 29100 | list terminator. |
| 29101 | Set have_multiple_function_sections to true in that case and |
| 29102 | terminate htab traversal. */ |
| 29103 | |
| 29104 | int |
| 29105 | find_empty_loc_ranges_at_text_label (var_loc_list **slot, int) |
| 29106 | { |
| 29107 | var_loc_list *entry = *slot; |
| 29108 | struct var_loc_node *node; |
| 29109 | |
| 29110 | node = entry->first; |
| 29111 | if (node && node->next && node->next->label) |
| 29112 | { |
| 29113 | unsigned int i; |
| 29114 | const char *label = node->next->label; |
| 29115 | char loclabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 29116 | |
| 29117 | for (i = 0; i < first_loclabel_num_not_at_text_label; i++) |
| 29118 | { |
| 29119 | ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL" , i); |
| 29120 | if (strcmp (s1: label, s2: loclabel) == 0) |
| 29121 | { |
| 29122 | have_multiple_function_sections = true; |
| 29123 | return 0; |
| 29124 | } |
| 29125 | } |
| 29126 | } |
| 29127 | return 1; |
| 29128 | } |
| 29129 | |
| 29130 | /* Hook called after emitting a function into assembly. |
| 29131 | This does something only for the very first function emitted. */ |
| 29132 | |
| 29133 | static void |
| 29134 | dwarf2out_end_function (unsigned int) |
| 29135 | { |
| 29136 | if (in_first_function_p |
| 29137 | && !have_multiple_function_sections |
| 29138 | && first_loclabel_num_not_at_text_label |
| 29139 | && decl_loc_table) |
| 29140 | decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (argument: 0); |
| 29141 | in_first_function_p = false; |
| 29142 | maybe_at_text_label_p = false; |
| 29143 | } |
| 29144 | |
| 29145 | /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let |
| 29146 | front-ends register a translation unit even before dwarf2out_init is |
| 29147 | called. */ |
| 29148 | static tree main_translation_unit = NULL_TREE; |
| 29149 | |
| 29150 | /* Hook called by front-ends after they built their main translation unit. |
| 29151 | Associate comp_unit_die to UNIT. */ |
| 29152 | |
| 29153 | static void |
| 29154 | dwarf2out_register_main_translation_unit (tree unit) |
| 29155 | { |
| 29156 | gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL |
| 29157 | && main_translation_unit == NULL_TREE); |
| 29158 | main_translation_unit = unit; |
| 29159 | /* If dwarf2out_init has not been called yet, it will perform the association |
| 29160 | itself looking at main_translation_unit. */ |
| 29161 | if (decl_die_table != NULL) |
| 29162 | equate_decl_number_to_die (decl: unit, decl_die: comp_unit_die ()); |
| 29163 | } |
| 29164 | |
| 29165 | /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */ |
| 29166 | |
| 29167 | static void |
| 29168 | push_dw_line_info_entry (dw_line_info_table *table, |
| 29169 | enum dw_line_info_opcode opcode, unsigned int val) |
| 29170 | { |
| 29171 | dw_line_info_entry e; |
| 29172 | e.opcode = opcode; |
| 29173 | e.val = val; |
| 29174 | vec_safe_push (v&: table->entries, obj: e); |
| 29175 | } |
| 29176 | |
| 29177 | /* Output a label to mark the beginning of a source code line entry |
| 29178 | and record information relating to this source line, in |
| 29179 | 'line_info_table' for later output of the .debug_line section. */ |
| 29180 | /* ??? The discriminator parameter ought to be unsigned. */ |
| 29181 | |
| 29182 | static void |
| 29183 | dwarf2out_source_line (unsigned int line, unsigned int column, |
| 29184 | const char *filename, |
| 29185 | int discriminator, bool is_stmt) |
| 29186 | { |
| 29187 | unsigned int file_num; |
| 29188 | dw_line_info_table *table; |
| 29189 | static var_loc_view lvugid; |
| 29190 | |
| 29191 | #ifdef CODEVIEW_DEBUGGING_INFO |
| 29192 | if (codeview_debuginfo_p ()) |
| 29193 | codeview_source_line (line, filename); |
| 29194 | #endif |
| 29195 | |
| 29196 | /* 'line_info_table' information gathering is not needed when the debug |
| 29197 | info level is set to the lowest value. Also, the current DWARF-based |
| 29198 | debug formats do not use this info. */ |
| 29199 | if (debug_info_level < DINFO_LEVEL_TERSE || !dwarf_debuginfo_p ()) |
| 29200 | return; |
| 29201 | |
| 29202 | table = cur_line_info_table; |
| 29203 | |
| 29204 | if (line == 0) |
| 29205 | { |
| 29206 | if (debug_variable_location_views |
| 29207 | && output_asm_line_debug_info () |
| 29208 | && table && !RESETTING_VIEW_P (table->view)) |
| 29209 | { |
| 29210 | /* If we're using the assembler to compute view numbers, we |
| 29211 | can't issue a .loc directive for line zero, so we can't |
| 29212 | get a view number at this point. We might attempt to |
| 29213 | compute it from the previous view, or equate it to a |
| 29214 | subsequent view (though it might not be there!), but |
| 29215 | since we're omitting the line number entry, we might as |
| 29216 | well omit the view number as well. That means pretending |
| 29217 | it's a view number zero, which might very well turn out |
| 29218 | to be correct. ??? Extend the assembler so that the |
| 29219 | compiler could emit e.g. ".locview .LVU#", to output a |
| 29220 | view without changing line number information. We'd then |
| 29221 | have to count it in symviews_since_reset; when it's omitted, |
| 29222 | it doesn't count. */ |
| 29223 | if (!zero_view_p) |
| 29224 | zero_view_p = BITMAP_GGC_ALLOC (); |
| 29225 | bitmap_set_bit (zero_view_p, table->view); |
| 29226 | if (flag_debug_asm) |
| 29227 | { |
| 29228 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 29229 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , table->view); |
| 29230 | fprintf (stream: asm_out_file, format: "\t%s line 0, omitted view " , |
| 29231 | ASM_COMMENT_START); |
| 29232 | assemble_name (asm_out_file, label); |
| 29233 | putc (c: '\n', stream: asm_out_file); |
| 29234 | } |
| 29235 | table->view = ++lvugid; |
| 29236 | } |
| 29237 | return; |
| 29238 | } |
| 29239 | |
| 29240 | /* The discriminator column was added in dwarf4. Simplify the below |
| 29241 | by simply removing it if we're not supposed to output it. */ |
| 29242 | if (dwarf_version < 4 && dwarf_strict) |
| 29243 | discriminator = 0; |
| 29244 | |
| 29245 | if (!debug_column_info) |
| 29246 | column = 0; |
| 29247 | |
| 29248 | file_num = maybe_emit_file (fd: lookup_filename (file_name: filename)); |
| 29249 | |
| 29250 | /* ??? TODO: Elide duplicate line number entries. Traditionally, |
| 29251 | the debugger has used the second (possibly duplicate) line number |
| 29252 | at the beginning of the function to mark the end of the prologue. |
| 29253 | We could eliminate any other duplicates within the function. For |
| 29254 | Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in |
| 29255 | that second line number entry. */ |
| 29256 | /* Recall that this end-of-prologue indication is *not* the same thing |
| 29257 | as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note, |
| 29258 | to which the hook corresponds, follows the last insn that was |
| 29259 | emitted by gen_prologue. What we need is to precede the first insn |
| 29260 | that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first |
| 29261 | insn that corresponds to something the user wrote. These may be |
| 29262 | very different locations once scheduling is enabled. */ |
| 29263 | |
| 29264 | if (0 && file_num == table->file_num |
| 29265 | && line == table->line_num |
| 29266 | && column == table->column_num |
| 29267 | && discriminator == table->discrim_num |
| 29268 | && is_stmt == table->is_stmt) |
| 29269 | return; |
| 29270 | |
| 29271 | switch_to_section (current_function_section ()); |
| 29272 | |
| 29273 | /* If requested, emit something human-readable. */ |
| 29274 | if (flag_debug_asm) |
| 29275 | { |
| 29276 | if (debug_column_info) |
| 29277 | fprintf (stream: asm_out_file, format: "\t%s %s:%d:%d\n" , ASM_COMMENT_START, |
| 29278 | filename, line, column); |
| 29279 | else |
| 29280 | fprintf (stream: asm_out_file, format: "\t%s %s:%d\n" , ASM_COMMENT_START, |
| 29281 | filename, line); |
| 29282 | } |
| 29283 | |
| 29284 | if (output_asm_line_debug_info ()) |
| 29285 | { |
| 29286 | /* Emit the .loc directive understood by GNU as. */ |
| 29287 | /* "\t.loc %u %u 0 is_stmt %u discriminator %u", |
| 29288 | file_num, line, is_stmt, discriminator */ |
| 29289 | fputs (s: "\t.loc " , stream: asm_out_file); |
| 29290 | fprint_ul (asm_out_file, file_num); |
| 29291 | putc (c: ' ', stream: asm_out_file); |
| 29292 | fprint_ul (asm_out_file, line); |
| 29293 | putc (c: ' ', stream: asm_out_file); |
| 29294 | fprint_ul (asm_out_file, column); |
| 29295 | |
| 29296 | if (is_stmt != table->is_stmt) |
| 29297 | { |
| 29298 | #if HAVE_GAS_LOC_STMT |
| 29299 | fputs (s: " is_stmt " , stream: asm_out_file); |
| 29300 | putc (c: is_stmt ? '1' : '0', stream: asm_out_file); |
| 29301 | #endif |
| 29302 | } |
| 29303 | if (SUPPORTS_DISCRIMINATOR && discriminator != 0) |
| 29304 | { |
| 29305 | gcc_assert (discriminator > 0); |
| 29306 | fputs (s: " discriminator " , stream: asm_out_file); |
| 29307 | fprint_ul (asm_out_file, (unsigned long) discriminator); |
| 29308 | } |
| 29309 | if (debug_variable_location_views) |
| 29310 | { |
| 29311 | if (!RESETTING_VIEW_P (table->view)) |
| 29312 | { |
| 29313 | table->symviews_since_reset++; |
| 29314 | if (table->symviews_since_reset > symview_upper_bound) |
| 29315 | symview_upper_bound = table->symviews_since_reset; |
| 29316 | /* When we're using the assembler to compute view |
| 29317 | numbers, we output symbolic labels after "view" in |
| 29318 | .loc directives, and the assembler will set them for |
| 29319 | us, so that we can refer to the view numbers in |
| 29320 | location lists. The only exceptions are when we know |
| 29321 | a view will be zero: "-0" is a forced reset, used |
| 29322 | e.g. in the beginning of functions, whereas "0" tells |
| 29323 | the assembler to check that there was a PC change |
| 29324 | since the previous view, in a way that implicitly |
| 29325 | resets the next view. */ |
| 29326 | fputs (s: " view " , stream: asm_out_file); |
| 29327 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 29328 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , table->view); |
| 29329 | assemble_name (asm_out_file, label); |
| 29330 | table->view = ++lvugid; |
| 29331 | } |
| 29332 | else |
| 29333 | { |
| 29334 | table->symviews_since_reset = 0; |
| 29335 | if (FORCE_RESETTING_VIEW_P (table->view)) |
| 29336 | fputs (s: " view -0" , stream: asm_out_file); |
| 29337 | else |
| 29338 | fputs (s: " view 0" , stream: asm_out_file); |
| 29339 | /* Mark the present view as a zero view. Earlier debug |
| 29340 | binds may have already added its id to loclists to be |
| 29341 | emitted later, so we can't reuse the id for something |
| 29342 | else. However, it's good to know whether a view is |
| 29343 | known to be zero, because then we may be able to |
| 29344 | optimize out locviews that are all zeros, so take |
| 29345 | note of it in zero_view_p. */ |
| 29346 | if (!zero_view_p) |
| 29347 | zero_view_p = BITMAP_GGC_ALLOC (); |
| 29348 | bitmap_set_bit (zero_view_p, lvugid); |
| 29349 | table->view = ++lvugid; |
| 29350 | } |
| 29351 | } |
| 29352 | putc (c: '\n', stream: asm_out_file); |
| 29353 | } |
| 29354 | else |
| 29355 | { |
| 29356 | unsigned int label_num = ++line_info_label_num; |
| 29357 | |
| 29358 | targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num); |
| 29359 | |
| 29360 | if (debug_variable_location_views && !RESETTING_VIEW_P (table->view)) |
| 29361 | push_dw_line_info_entry (table, opcode: LI_adv_address, val: label_num); |
| 29362 | else |
| 29363 | push_dw_line_info_entry (table, opcode: LI_set_address, val: label_num); |
| 29364 | if (debug_variable_location_views) |
| 29365 | { |
| 29366 | bool resetting = FORCE_RESETTING_VIEW_P (table->view); |
| 29367 | if (resetting) |
| 29368 | table->view = 0; |
| 29369 | |
| 29370 | if (flag_debug_asm) |
| 29371 | fprintf (stream: asm_out_file, format: "\t%s view %s%d\n" , |
| 29372 | ASM_COMMENT_START, |
| 29373 | resetting ? "-" : "" , |
| 29374 | table->view); |
| 29375 | |
| 29376 | table->view++; |
| 29377 | } |
| 29378 | if (file_num != table->file_num) |
| 29379 | push_dw_line_info_entry (table, opcode: LI_set_file, val: file_num); |
| 29380 | if (discriminator != table->discrim_num) |
| 29381 | push_dw_line_info_entry (table, opcode: LI_set_discriminator, val: discriminator); |
| 29382 | if (is_stmt != table->is_stmt) |
| 29383 | push_dw_line_info_entry (table, opcode: LI_negate_stmt, val: 0); |
| 29384 | push_dw_line_info_entry (table, opcode: LI_set_line, val: line); |
| 29385 | if (debug_column_info) |
| 29386 | push_dw_line_info_entry (table, opcode: LI_set_column, val: column); |
| 29387 | } |
| 29388 | |
| 29389 | table->file_num = file_num; |
| 29390 | table->line_num = line; |
| 29391 | table->column_num = column; |
| 29392 | table->discrim_num = discriminator; |
| 29393 | table->is_stmt = is_stmt; |
| 29394 | table->in_use = true; |
| 29395 | } |
| 29396 | |
| 29397 | /* Record a source file location for a DECL_IGNORED_P function. */ |
| 29398 | |
| 29399 | static void |
| 29400 | dwarf2out_set_ignored_loc (unsigned int line, unsigned int column, |
| 29401 | const char *filename) |
| 29402 | { |
| 29403 | dw_fde_ref fde = cfun->fde; |
| 29404 | |
| 29405 | fde->ignored_debug = false; |
| 29406 | set_cur_line_info_table (current_function_section ()); |
| 29407 | |
| 29408 | dwarf2out_source_line (line, column, filename, discriminator: 0, is_stmt: true); |
| 29409 | } |
| 29410 | |
| 29411 | /* Record the beginning of a new source file. */ |
| 29412 | |
| 29413 | static void |
| 29414 | dwarf2out_start_source_file (unsigned int lineno, const char *filename) |
| 29415 | { |
| 29416 | #ifdef CODEVIEW_DEBUGGING_INFO |
| 29417 | if (codeview_debuginfo_p ()) |
| 29418 | codeview_start_source_file (filename); |
| 29419 | #endif |
| 29420 | |
| 29421 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
| 29422 | { |
| 29423 | macinfo_entry e; |
| 29424 | e.code = DW_MACINFO_start_file; |
| 29425 | e.lineno = lineno; |
| 29426 | e.info = ggc_strdup (filename); |
| 29427 | vec_safe_push (v&: macinfo_table, obj: e); |
| 29428 | } |
| 29429 | } |
| 29430 | |
| 29431 | /* Record the end of a source file. */ |
| 29432 | |
| 29433 | static void |
| 29434 | dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED) |
| 29435 | { |
| 29436 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
| 29437 | { |
| 29438 | macinfo_entry e; |
| 29439 | e.code = DW_MACINFO_end_file; |
| 29440 | e.lineno = lineno; |
| 29441 | e.info = NULL; |
| 29442 | vec_safe_push (v&: macinfo_table, obj: e); |
| 29443 | } |
| 29444 | } |
| 29445 | |
| 29446 | /* Called from debug_define in toplev.cc. The `buffer' parameter contains |
| 29447 | the tail part of the directive line, i.e. the part which is past the |
| 29448 | initial whitespace, #, whitespace, directive-name, whitespace part. */ |
| 29449 | |
| 29450 | static void |
| 29451 | dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED, |
| 29452 | const char *buffer ATTRIBUTE_UNUSED) |
| 29453 | { |
| 29454 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
| 29455 | { |
| 29456 | macinfo_entry e; |
| 29457 | /* Insert a dummy first entry to be able to optimize the whole |
| 29458 | predefined macro block using DW_MACRO_import. */ |
| 29459 | if (macinfo_table->is_empty () && lineno <= 1) |
| 29460 | { |
| 29461 | e.code = 0; |
| 29462 | e.lineno = 0; |
| 29463 | e.info = NULL; |
| 29464 | vec_safe_push (v&: macinfo_table, obj: e); |
| 29465 | } |
| 29466 | e.code = DW_MACINFO_define; |
| 29467 | e.lineno = lineno; |
| 29468 | e.info = ggc_strdup (buffer); |
| 29469 | vec_safe_push (v&: macinfo_table, obj: e); |
| 29470 | } |
| 29471 | } |
| 29472 | |
| 29473 | /* Called from debug_undef in toplev.cc. The `buffer' parameter contains |
| 29474 | the tail part of the directive line, i.e. the part which is past the |
| 29475 | initial whitespace, #, whitespace, directive-name, whitespace part. */ |
| 29476 | |
| 29477 | static void |
| 29478 | dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED, |
| 29479 | const char *buffer ATTRIBUTE_UNUSED) |
| 29480 | { |
| 29481 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
| 29482 | { |
| 29483 | macinfo_entry e; |
| 29484 | /* Insert a dummy first entry to be able to optimize the whole |
| 29485 | predefined macro block using DW_MACRO_import. */ |
| 29486 | if (macinfo_table->is_empty () && lineno <= 1) |
| 29487 | { |
| 29488 | e.code = 0; |
| 29489 | e.lineno = 0; |
| 29490 | e.info = NULL; |
| 29491 | vec_safe_push (v&: macinfo_table, obj: e); |
| 29492 | } |
| 29493 | e.code = DW_MACINFO_undef; |
| 29494 | e.lineno = lineno; |
| 29495 | e.info = ggc_strdup (buffer); |
| 29496 | vec_safe_push (v&: macinfo_table, obj: e); |
| 29497 | } |
| 29498 | } |
| 29499 | |
| 29500 | /* Helpers to manipulate hash table of CUs. */ |
| 29501 | |
| 29502 | struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry> |
| 29503 | { |
| 29504 | static inline hashval_t hash (const macinfo_entry *); |
| 29505 | static inline bool equal (const macinfo_entry *, const macinfo_entry *); |
| 29506 | }; |
| 29507 | |
| 29508 | inline hashval_t |
| 29509 | macinfo_entry_hasher::hash (const macinfo_entry *entry) |
| 29510 | { |
| 29511 | return htab_hash_string (entry->info); |
| 29512 | } |
| 29513 | |
| 29514 | inline bool |
| 29515 | macinfo_entry_hasher::equal (const macinfo_entry *entry1, |
| 29516 | const macinfo_entry *entry2) |
| 29517 | { |
| 29518 | return !strcmp (s1: entry1->info, s2: entry2->info); |
| 29519 | } |
| 29520 | |
| 29521 | typedef hash_table<macinfo_entry_hasher> macinfo_hash_type; |
| 29522 | |
| 29523 | /* Output a single .debug_macinfo entry. */ |
| 29524 | |
| 29525 | static void |
| 29526 | output_macinfo_op (macinfo_entry *ref) |
| 29527 | { |
| 29528 | int file_num; |
| 29529 | size_t len; |
| 29530 | struct indirect_string_node *node; |
| 29531 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 29532 | struct dwarf_file_data *fd; |
| 29533 | |
| 29534 | switch (ref->code) |
| 29535 | { |
| 29536 | case DW_MACINFO_start_file: |
| 29537 | fd = lookup_filename (file_name: ref->info); |
| 29538 | file_num = maybe_emit_file (fd); |
| 29539 | dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file" ); |
| 29540 | dw2_asm_output_data_uleb128 (ref->lineno, |
| 29541 | "Included from line number " |
| 29542 | HOST_WIDE_INT_PRINT_UNSIGNED, |
| 29543 | ref->lineno); |
| 29544 | dw2_asm_output_data_uleb128 (file_num, "file %s" , ref->info); |
| 29545 | break; |
| 29546 | case DW_MACINFO_end_file: |
| 29547 | dw2_asm_output_data (1, DW_MACINFO_end_file, "End file" ); |
| 29548 | break; |
| 29549 | case DW_MACINFO_define: |
| 29550 | case DW_MACINFO_undef: |
| 29551 | len = strlen (s: ref->info) + 1; |
| 29552 | if ((!dwarf_strict || dwarf_version >= 5) |
| 29553 | && len > (size_t) dwarf_offset_size |
| 29554 | && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET |
| 29555 | && (debug_str_section->common.flags & SECTION_MERGE) != 0) |
| 29556 | { |
| 29557 | if (dwarf_split_debug_info) |
| 29558 | ref->code = ref->code == DW_MACINFO_define |
| 29559 | ? DW_MACRO_define_strx : DW_MACRO_undef_strx; |
| 29560 | else |
| 29561 | ref->code = ref->code == DW_MACINFO_define |
| 29562 | ? DW_MACRO_define_strp : DW_MACRO_undef_strp; |
| 29563 | output_macinfo_op (ref); |
| 29564 | return; |
| 29565 | } |
| 29566 | dw2_asm_output_data (1, ref->code, |
| 29567 | ref->code == DW_MACINFO_define |
| 29568 | ? "Define macro" : "Undefine macro" ); |
| 29569 | dw2_asm_output_data_uleb128 (ref->lineno, |
| 29570 | "At line number " |
| 29571 | HOST_WIDE_INT_PRINT_UNSIGNED, |
| 29572 | ref->lineno); |
| 29573 | dw2_asm_output_nstring (ref->info, -1, "The macro" ); |
| 29574 | break; |
| 29575 | case DW_MACRO_define_strp: |
| 29576 | dw2_asm_output_data (1, ref->code, "Define macro strp" ); |
| 29577 | goto do_DW_MACRO_define_strpx; |
| 29578 | case DW_MACRO_undef_strp: |
| 29579 | dw2_asm_output_data (1, ref->code, "Undefine macro strp" ); |
| 29580 | goto do_DW_MACRO_define_strpx; |
| 29581 | case DW_MACRO_define_strx: |
| 29582 | dw2_asm_output_data (1, ref->code, "Define macro strx" ); |
| 29583 | goto do_DW_MACRO_define_strpx; |
| 29584 | case DW_MACRO_undef_strx: |
| 29585 | dw2_asm_output_data (1, ref->code, "Undefine macro strx" ); |
| 29586 | /* FALLTHRU */ |
| 29587 | do_DW_MACRO_define_strpx: |
| 29588 | /* NB: dwarf2out_finish performs: |
| 29589 | 1. save_macinfo_strings |
| 29590 | 2. hash table traverse of index_string |
| 29591 | 3. output_macinfo -> output_macinfo_op |
| 29592 | 4. output_indirect_strings |
| 29593 | -> hash table traverse of output_index_string |
| 29594 | |
| 29595 | When output_macinfo_op is called, all index strings have been |
| 29596 | added to hash table by save_macinfo_strings and we can't pass |
| 29597 | INSERT to find_slot_with_hash which may expand hash table, even |
| 29598 | if no insertion is needed, and change hash table traverse order |
| 29599 | between index_string and output_index_string. */ |
| 29600 | node = find_AT_string (str: ref->info, insert: NO_INSERT); |
| 29601 | gcc_assert (node |
| 29602 | && (node->form == DW_FORM_strp |
| 29603 | || node->form == dwarf_FORM (DW_FORM_strx))); |
| 29604 | dw2_asm_output_data_uleb128 (ref->lineno, |
| 29605 | "At line number " |
| 29606 | HOST_WIDE_INT_PRINT_UNSIGNED, |
| 29607 | ref->lineno); |
| 29608 | if (node->form == DW_FORM_strp) |
| 29609 | { |
| 29610 | gcc_assert (ref->code == DW_MACRO_define_strp |
| 29611 | || ref->code == DW_MACRO_undef_strp); |
| 29612 | dw2_asm_output_offset (dwarf_offset_size, node->label, |
| 29613 | debug_str_section, "The macro: \"%s\"" , |
| 29614 | ref->info); |
| 29615 | } |
| 29616 | else |
| 29617 | { |
| 29618 | gcc_assert (ref->code == DW_MACRO_define_strx |
| 29619 | || ref->code == DW_MACRO_undef_strx); |
| 29620 | dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"" , |
| 29621 | ref->info); |
| 29622 | } |
| 29623 | break; |
| 29624 | case DW_MACRO_import: |
| 29625 | dw2_asm_output_data (1, ref->code, "Import" ); |
| 29626 | ASM_GENERATE_INTERNAL_LABEL (label, |
| 29627 | DEBUG_MACRO_SECTION_LABEL, |
| 29628 | ref->lineno + macinfo_label_base); |
| 29629 | dw2_asm_output_offset (dwarf_offset_size, label, NULL, NULL); |
| 29630 | break; |
| 29631 | default: |
| 29632 | fprintf (stream: asm_out_file, format: "%s unrecognized macinfo code %lu\n" , |
| 29633 | ASM_COMMENT_START, (unsigned long) ref->code); |
| 29634 | break; |
| 29635 | } |
| 29636 | } |
| 29637 | |
| 29638 | /* Attempt to make a sequence of define/undef macinfo ops shareable with |
| 29639 | other compilation unit .debug_macinfo sections. IDX is the first |
| 29640 | index of a define/undef, return the number of ops that should be |
| 29641 | emitted in a comdat .debug_macinfo section and emit |
| 29642 | a DW_MACRO_import entry referencing it. |
| 29643 | If the define/undef entry should be emitted normally, return 0. */ |
| 29644 | |
| 29645 | static unsigned |
| 29646 | optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files, |
| 29647 | macinfo_hash_type **macinfo_htab) |
| 29648 | { |
| 29649 | macinfo_entry *first, *second, *cur, *inc; |
| 29650 | char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1]; |
| 29651 | unsigned char checksum[16]; |
| 29652 | struct md5_ctx ctx; |
| 29653 | char *grp_name, *tail; |
| 29654 | const char *base; |
| 29655 | unsigned int i, count, encoded_filename_len, linebuf_len; |
| 29656 | macinfo_entry **slot; |
| 29657 | |
| 29658 | first = &(*macinfo_table)[idx]; |
| 29659 | second = &(*macinfo_table)[idx + 1]; |
| 29660 | |
| 29661 | /* Optimize only if there are at least two consecutive define/undef ops, |
| 29662 | and either all of them are before first DW_MACINFO_start_file |
| 29663 | with lineno {0,1} (i.e. predefined macro block), or all of them are |
| 29664 | in some included header file. */ |
| 29665 | if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef) |
| 29666 | return 0; |
| 29667 | if (vec_safe_is_empty (v: files)) |
| 29668 | { |
| 29669 | if (first->lineno > 1 || second->lineno > 1) |
| 29670 | return 0; |
| 29671 | } |
| 29672 | else if (first->lineno == 0) |
| 29673 | return 0; |
| 29674 | |
| 29675 | /* Find the last define/undef entry that can be grouped together |
| 29676 | with first and at the same time compute md5 checksum of their |
| 29677 | codes, linenumbers and strings. */ |
| 29678 | md5_init_ctx (ctx: &ctx); |
| 29679 | for (i = idx; macinfo_table->iterate (ix: i, ptr: &cur); i++) |
| 29680 | if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef) |
| 29681 | break; |
| 29682 | else if (vec_safe_is_empty (v: files) && cur->lineno > 1) |
| 29683 | break; |
| 29684 | else |
| 29685 | { |
| 29686 | unsigned char code = cur->code; |
| 29687 | md5_process_bytes (buffer: &code, len: 1, ctx: &ctx); |
| 29688 | checksum_uleb128 (value: cur->lineno, ctx: &ctx); |
| 29689 | md5_process_bytes (buffer: cur->info, len: strlen (s: cur->info) + 1, ctx: &ctx); |
| 29690 | } |
| 29691 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
| 29692 | count = i - idx; |
| 29693 | |
| 29694 | /* From the containing include filename (if any) pick up just |
| 29695 | usable characters from its basename. */ |
| 29696 | if (vec_safe_is_empty (v: files)) |
| 29697 | base = "" ; |
| 29698 | else |
| 29699 | base = lbasename (files->last ().info); |
| 29700 | for (encoded_filename_len = 0, i = 0; base[i]; i++) |
| 29701 | if (ISIDNUM (base[i]) || base[i] == '.') |
| 29702 | encoded_filename_len++; |
| 29703 | /* Count . at the end. */ |
| 29704 | if (encoded_filename_len) |
| 29705 | encoded_filename_len++; |
| 29706 | |
| 29707 | sprintf (s: linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno); |
| 29708 | linebuf_len = strlen (s: linebuf); |
| 29709 | |
| 29710 | /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */ |
| 29711 | grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1 |
| 29712 | + 16 * 2 + 1); |
| 29713 | memcpy (dest: grp_name, dwarf_offset_size == 4 ? "wm4." : "wm8." , n: 4); |
| 29714 | tail = grp_name + 4; |
| 29715 | if (encoded_filename_len) |
| 29716 | { |
| 29717 | for (i = 0; base[i]; i++) |
| 29718 | if (ISIDNUM (base[i]) || base[i] == '.') |
| 29719 | *tail++ = base[i]; |
| 29720 | *tail++ = '.'; |
| 29721 | } |
| 29722 | memcpy (dest: tail, src: linebuf, n: linebuf_len); |
| 29723 | tail += linebuf_len; |
| 29724 | *tail++ = '.'; |
| 29725 | for (i = 0; i < 16; i++) |
| 29726 | sprintf (s: tail + i * 2, format: "%02x" , checksum[i] & 0xff); |
| 29727 | |
| 29728 | /* Construct a macinfo_entry for DW_MACRO_import |
| 29729 | in the empty vector entry before the first define/undef. */ |
| 29730 | inc = &(*macinfo_table)[idx - 1]; |
| 29731 | inc->code = DW_MACRO_import; |
| 29732 | inc->lineno = 0; |
| 29733 | inc->info = ggc_strdup (grp_name); |
| 29734 | if (!*macinfo_htab) |
| 29735 | *macinfo_htab = new macinfo_hash_type (10); |
| 29736 | /* Avoid emitting duplicates. */ |
| 29737 | slot = (*macinfo_htab)->find_slot (value: inc, insert: INSERT); |
| 29738 | if (*slot != NULL) |
| 29739 | { |
| 29740 | inc->code = 0; |
| 29741 | inc->info = NULL; |
| 29742 | /* If such an entry has been used before, just emit |
| 29743 | a DW_MACRO_import op. */ |
| 29744 | inc = *slot; |
| 29745 | output_macinfo_op (ref: inc); |
| 29746 | /* And clear all macinfo_entry in the range to avoid emitting them |
| 29747 | in the second pass. */ |
| 29748 | for (i = idx; macinfo_table->iterate (ix: i, ptr: &cur) && i < idx + count; i++) |
| 29749 | { |
| 29750 | cur->code = 0; |
| 29751 | cur->info = NULL; |
| 29752 | } |
| 29753 | } |
| 29754 | else |
| 29755 | { |
| 29756 | *slot = inc; |
| 29757 | inc->lineno = (*macinfo_htab)->elements (); |
| 29758 | output_macinfo_op (ref: inc); |
| 29759 | } |
| 29760 | return count; |
| 29761 | } |
| 29762 | |
| 29763 | /* Save any strings needed by the macinfo table in the debug str |
| 29764 | table. All strings must be collected into the table by the time |
| 29765 | index_string is called. */ |
| 29766 | |
| 29767 | static void |
| 29768 | save_macinfo_strings (void) |
| 29769 | { |
| 29770 | unsigned len; |
| 29771 | unsigned i; |
| 29772 | macinfo_entry *ref; |
| 29773 | |
| 29774 | for (i = 0; macinfo_table && macinfo_table->iterate (ix: i, ptr: &ref); i++) |
| 29775 | { |
| 29776 | switch (ref->code) |
| 29777 | { |
| 29778 | /* Match the logic in output_macinfo_op to decide on |
| 29779 | indirect strings. */ |
| 29780 | case DW_MACINFO_define: |
| 29781 | case DW_MACINFO_undef: |
| 29782 | len = strlen (s: ref->info) + 1; |
| 29783 | if ((!dwarf_strict || dwarf_version >= 5) |
| 29784 | && len > (unsigned) dwarf_offset_size |
| 29785 | && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET |
| 29786 | && (debug_str_section->common.flags & SECTION_MERGE) != 0) |
| 29787 | set_indirect_string (find_AT_string (str: ref->info)); |
| 29788 | break; |
| 29789 | case DW_MACINFO_start_file: |
| 29790 | /* -gsplit-dwarf -g3 will also output filename as indirect |
| 29791 | string. */ |
| 29792 | if (!dwarf_split_debug_info) |
| 29793 | break; |
| 29794 | /* Fall through. */ |
| 29795 | case DW_MACRO_define_strp: |
| 29796 | case DW_MACRO_undef_strp: |
| 29797 | case DW_MACRO_define_strx: |
| 29798 | case DW_MACRO_undef_strx: |
| 29799 | set_indirect_string (find_AT_string (str: ref->info)); |
| 29800 | break; |
| 29801 | default: |
| 29802 | break; |
| 29803 | } |
| 29804 | } |
| 29805 | } |
| 29806 | |
| 29807 | /* Output macinfo section(s). */ |
| 29808 | |
| 29809 | static void |
| 29810 | output_macinfo (const char *debug_line_label, bool early_lto_debug) |
| 29811 | { |
| 29812 | unsigned i; |
| 29813 | unsigned long length = vec_safe_length (v: macinfo_table); |
| 29814 | macinfo_entry *ref; |
| 29815 | vec<macinfo_entry, va_gc> *files = NULL; |
| 29816 | macinfo_hash_type *macinfo_htab = NULL; |
| 29817 | char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 29818 | |
| 29819 | if (! length) |
| 29820 | return; |
| 29821 | |
| 29822 | /* output_macinfo* uses these interchangeably. */ |
| 29823 | gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define |
| 29824 | && (int) DW_MACINFO_undef == (int) DW_MACRO_undef |
| 29825 | && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file |
| 29826 | && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file); |
| 29827 | |
| 29828 | /* AIX Assembler inserts the length, so adjust the reference to match the |
| 29829 | offset expected by debuggers. */ |
| 29830 | strcpy (dest: dl_section_ref, src: debug_line_label); |
| 29831 | if (XCOFF_DEBUGGING_INFO) |
| 29832 | strcat (dest: dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR); |
| 29833 | |
| 29834 | /* For .debug_macro emit the section header. */ |
| 29835 | if (!dwarf_strict || dwarf_version >= 5) |
| 29836 | { |
| 29837 | dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4, |
| 29838 | "DWARF macro version number" ); |
| 29839 | if (dwarf_offset_size == 8) |
| 29840 | dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present" ); |
| 29841 | else |
| 29842 | dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present" ); |
| 29843 | dw2_asm_output_offset (dwarf_offset_size, debug_line_label, |
| 29844 | debug_line_section, NULL); |
| 29845 | } |
| 29846 | |
| 29847 | /* In the first loop, it emits the primary .debug_macinfo section |
| 29848 | and after each emitted op the macinfo_entry is cleared. |
| 29849 | If a longer range of define/undef ops can be optimized using |
| 29850 | DW_MACRO_import, the DW_MACRO_import op is emitted and kept in |
| 29851 | the vector before the first define/undef in the range and the |
| 29852 | whole range of define/undef ops is not emitted and kept. */ |
| 29853 | for (i = 0; macinfo_table->iterate (ix: i, ptr: &ref); i++) |
| 29854 | { |
| 29855 | switch (ref->code) |
| 29856 | { |
| 29857 | case DW_MACINFO_start_file: |
| 29858 | vec_safe_push (v&: files, obj: *ref); |
| 29859 | break; |
| 29860 | case DW_MACINFO_end_file: |
| 29861 | if (!vec_safe_is_empty (v: files)) |
| 29862 | files->pop (); |
| 29863 | break; |
| 29864 | case DW_MACINFO_define: |
| 29865 | case DW_MACINFO_undef: |
| 29866 | if ((!dwarf_strict || dwarf_version >= 5) |
| 29867 | && !dwarf_split_debug_info |
| 29868 | && HAVE_COMDAT_GROUP |
| 29869 | && vec_safe_length (v: files) != 1 |
| 29870 | && i > 0 |
| 29871 | && i + 1 < length |
| 29872 | && (*macinfo_table)[i - 1].code == 0) |
| 29873 | { |
| 29874 | unsigned count = optimize_macinfo_range (idx: i, files, macinfo_htab: &macinfo_htab); |
| 29875 | if (count) |
| 29876 | { |
| 29877 | i += count - 1; |
| 29878 | continue; |
| 29879 | } |
| 29880 | } |
| 29881 | break; |
| 29882 | case 0: |
| 29883 | /* A dummy entry may be inserted at the beginning to be able |
| 29884 | to optimize the whole block of predefined macros. */ |
| 29885 | if (i == 0) |
| 29886 | continue; |
| 29887 | default: |
| 29888 | break; |
| 29889 | } |
| 29890 | output_macinfo_op (ref); |
| 29891 | ref->info = NULL; |
| 29892 | ref->code = 0; |
| 29893 | } |
| 29894 | |
| 29895 | if (!macinfo_htab) |
| 29896 | return; |
| 29897 | |
| 29898 | /* Save the number of transparent includes so we can adjust the |
| 29899 | label number for the fat LTO object DWARF. */ |
| 29900 | unsigned macinfo_label_base_adj = macinfo_htab->elements (); |
| 29901 | |
| 29902 | delete macinfo_htab; |
| 29903 | macinfo_htab = NULL; |
| 29904 | |
| 29905 | /* If any DW_MACRO_import were used, on those DW_MACRO_import entries |
| 29906 | terminate the current chain and switch to a new comdat .debug_macinfo |
| 29907 | section and emit the define/undef entries within it. */ |
| 29908 | for (i = 0; macinfo_table->iterate (ix: i, ptr: &ref); i++) |
| 29909 | switch (ref->code) |
| 29910 | { |
| 29911 | case 0: |
| 29912 | continue; |
| 29913 | case DW_MACRO_import: |
| 29914 | { |
| 29915 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 29916 | tree comdat_key = get_identifier (ref->info); |
| 29917 | /* Terminate the previous .debug_macinfo section. */ |
| 29918 | dw2_asm_output_data (1, 0, "End compilation unit" ); |
| 29919 | targetm.asm_out.named_section (debug_macinfo_section_name, |
| 29920 | SECTION_DEBUG |
| 29921 | | SECTION_LINKONCE |
| 29922 | | (early_lto_debug |
| 29923 | ? SECTION_EXCLUDE : 0), |
| 29924 | comdat_key); |
| 29925 | ASM_GENERATE_INTERNAL_LABEL (label, |
| 29926 | DEBUG_MACRO_SECTION_LABEL, |
| 29927 | ref->lineno + macinfo_label_base); |
| 29928 | ASM_OUTPUT_LABEL (asm_out_file, label); |
| 29929 | ref->code = 0; |
| 29930 | ref->info = NULL; |
| 29931 | dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4, |
| 29932 | "DWARF macro version number" ); |
| 29933 | if (dwarf_offset_size == 8) |
| 29934 | dw2_asm_output_data (1, 1, "Flags: 64-bit" ); |
| 29935 | else |
| 29936 | dw2_asm_output_data (1, 0, "Flags: 32-bit" ); |
| 29937 | } |
| 29938 | break; |
| 29939 | case DW_MACINFO_define: |
| 29940 | case DW_MACINFO_undef: |
| 29941 | output_macinfo_op (ref); |
| 29942 | ref->code = 0; |
| 29943 | ref->info = NULL; |
| 29944 | break; |
| 29945 | default: |
| 29946 | gcc_unreachable (); |
| 29947 | } |
| 29948 | |
| 29949 | macinfo_label_base += macinfo_label_base_adj; |
| 29950 | } |
| 29951 | |
| 29952 | /* As init_sections_and_labels may get called multiple times, have a |
| 29953 | generation count for labels. */ |
| 29954 | static unsigned init_sections_and_labels_generation; |
| 29955 | |
| 29956 | /* Initialize the various sections and labels for dwarf output and prefix |
| 29957 | them with PREFIX if non-NULL. Returns the generation (zero based |
| 29958 | number of times function was called). */ |
| 29959 | |
| 29960 | static unsigned |
| 29961 | init_sections_and_labels (bool early_lto_debug) |
| 29962 | { |
| 29963 | if (early_lto_debug) |
| 29964 | { |
| 29965 | if (!dwarf_split_debug_info) |
| 29966 | { |
| 29967 | debug_info_section = get_section (DEBUG_LTO_INFO_SECTION, |
| 29968 | SECTION_DEBUG | SECTION_EXCLUDE, |
| 29969 | NULL); |
| 29970 | debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION, |
| 29971 | SECTION_DEBUG | SECTION_EXCLUDE, |
| 29972 | NULL); |
| 29973 | debug_macinfo_section_name |
| 29974 | = ((dwarf_strict && dwarf_version < 5) |
| 29975 | ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION); |
| 29976 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
| 29977 | SECTION_DEBUG |
| 29978 | | SECTION_EXCLUDE, NULL); |
| 29979 | } |
| 29980 | else |
| 29981 | { |
| 29982 | /* ??? Which of the following do we need early? */ |
| 29983 | debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION, |
| 29984 | SECTION_DEBUG | SECTION_EXCLUDE, |
| 29985 | NULL); |
| 29986 | debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION, |
| 29987 | SECTION_DEBUG | SECTION_EXCLUDE, |
| 29988 | NULL); |
| 29989 | debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION, |
| 29990 | SECTION_DEBUG |
| 29991 | | SECTION_EXCLUDE, NULL); |
| 29992 | debug_skeleton_abbrev_section |
| 29993 | = get_section (DEBUG_LTO_ABBREV_SECTION, |
| 29994 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
| 29995 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label, |
| 29996 | DEBUG_SKELETON_ABBREV_SECTION_LABEL, |
| 29997 | init_sections_and_labels_generation); |
| 29998 | |
| 29999 | /* Somewhat confusing detail: The skeleton_[abbrev|info] sections |
| 30000 | stay in the main .o, but the skeleton_line goes into the split |
| 30001 | off dwo. */ |
| 30002 | debug_skeleton_line_section |
| 30003 | = get_section (DEBUG_LTO_LINE_SECTION, |
| 30004 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
| 30005 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label, |
| 30006 | DEBUG_SKELETON_LINE_SECTION_LABEL, |
| 30007 | init_sections_and_labels_generation); |
| 30008 | debug_str_offsets_section |
| 30009 | = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION, |
| 30010 | SECTION_DEBUG | SECTION_EXCLUDE, |
| 30011 | NULL); |
| 30012 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label, |
| 30013 | DEBUG_SKELETON_INFO_SECTION_LABEL, |
| 30014 | init_sections_and_labels_generation); |
| 30015 | debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION, |
| 30016 | DEBUG_STR_DWO_SECTION_FLAGS, |
| 30017 | NULL); |
| 30018 | debug_macinfo_section_name |
| 30019 | = ((dwarf_strict && dwarf_version < 5) |
| 30020 | ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION); |
| 30021 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
| 30022 | SECTION_DEBUG | SECTION_EXCLUDE, |
| 30023 | NULL); |
| 30024 | } |
| 30025 | /* For macro info and the file table we have to refer to a |
| 30026 | debug_line section. */ |
| 30027 | debug_line_section = get_section (DEBUG_LTO_LINE_SECTION, |
| 30028 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
| 30029 | ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label, |
| 30030 | DEBUG_LINE_SECTION_LABEL, |
| 30031 | init_sections_and_labels_generation); |
| 30032 | |
| 30033 | debug_str_section = get_section (DEBUG_LTO_STR_SECTION, |
| 30034 | DEBUG_STR_SECTION_FLAGS |
| 30035 | | SECTION_EXCLUDE, NULL); |
| 30036 | if (!dwarf_split_debug_info) |
| 30037 | debug_line_str_section |
| 30038 | = get_section (DEBUG_LTO_LINE_STR_SECTION, |
| 30039 | DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL); |
| 30040 | } |
| 30041 | else |
| 30042 | { |
| 30043 | if (!dwarf_split_debug_info) |
| 30044 | { |
| 30045 | debug_info_section = get_section (DEBUG_INFO_SECTION, |
| 30046 | SECTION_DEBUG, NULL); |
| 30047 | debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION, |
| 30048 | SECTION_DEBUG, NULL); |
| 30049 | debug_loc_section = get_section (dwarf_version >= 5 |
| 30050 | ? DEBUG_LOCLISTS_SECTION |
| 30051 | : DEBUG_LOC_SECTION, |
| 30052 | SECTION_DEBUG, NULL); |
| 30053 | debug_macinfo_section_name |
| 30054 | = ((dwarf_strict && dwarf_version < 5) |
| 30055 | ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION); |
| 30056 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
| 30057 | SECTION_DEBUG, NULL); |
| 30058 | } |
| 30059 | else |
| 30060 | { |
| 30061 | debug_info_section = get_section (DEBUG_DWO_INFO_SECTION, |
| 30062 | SECTION_DEBUG | SECTION_EXCLUDE, |
| 30063 | NULL); |
| 30064 | debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION, |
| 30065 | SECTION_DEBUG | SECTION_EXCLUDE, |
| 30066 | NULL); |
| 30067 | debug_addr_section = get_section (DEBUG_ADDR_SECTION, |
| 30068 | SECTION_DEBUG, NULL); |
| 30069 | debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION, |
| 30070 | SECTION_DEBUG, NULL); |
| 30071 | debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION, |
| 30072 | SECTION_DEBUG, NULL); |
| 30073 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label, |
| 30074 | DEBUG_SKELETON_ABBREV_SECTION_LABEL, |
| 30075 | init_sections_and_labels_generation); |
| 30076 | |
| 30077 | /* Somewhat confusing detail: The skeleton_[abbrev|info] sections |
| 30078 | stay in the main .o, but the skeleton_line goes into the |
| 30079 | split off dwo. */ |
| 30080 | debug_skeleton_line_section |
| 30081 | = get_section (DEBUG_DWO_LINE_SECTION, |
| 30082 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
| 30083 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label, |
| 30084 | DEBUG_SKELETON_LINE_SECTION_LABEL, |
| 30085 | init_sections_and_labels_generation); |
| 30086 | debug_str_offsets_section |
| 30087 | = get_section (DEBUG_DWO_STR_OFFSETS_SECTION, |
| 30088 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
| 30089 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label, |
| 30090 | DEBUG_SKELETON_INFO_SECTION_LABEL, |
| 30091 | init_sections_and_labels_generation); |
| 30092 | debug_loc_section = get_section (dwarf_version >= 5 |
| 30093 | ? DEBUG_DWO_LOCLISTS_SECTION |
| 30094 | : DEBUG_DWO_LOC_SECTION, |
| 30095 | SECTION_DEBUG | SECTION_EXCLUDE, |
| 30096 | NULL); |
| 30097 | debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION, |
| 30098 | DEBUG_STR_DWO_SECTION_FLAGS, |
| 30099 | NULL); |
| 30100 | debug_macinfo_section_name |
| 30101 | = ((dwarf_strict && dwarf_version < 5) |
| 30102 | ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION); |
| 30103 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
| 30104 | SECTION_DEBUG | SECTION_EXCLUDE, |
| 30105 | NULL); |
| 30106 | if (dwarf_version >= 5) |
| 30107 | debug_ranges_dwo_section |
| 30108 | = get_section (DEBUG_DWO_RNGLISTS_SECTION, |
| 30109 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
| 30110 | } |
| 30111 | debug_aranges_section = get_section (DEBUG_ARANGES_SECTION, |
| 30112 | SECTION_DEBUG, NULL); |
| 30113 | debug_line_section = get_section (DEBUG_LINE_SECTION, |
| 30114 | SECTION_DEBUG, NULL); |
| 30115 | debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION, |
| 30116 | SECTION_DEBUG, NULL); |
| 30117 | debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION, |
| 30118 | SECTION_DEBUG, NULL); |
| 30119 | debug_str_section = get_section (DEBUG_STR_SECTION, |
| 30120 | DEBUG_STR_SECTION_FLAGS, NULL); |
| 30121 | if ((!dwarf_split_debug_info && !output_asm_line_debug_info ()) |
| 30122 | || asm_outputs_debug_line_str ()) |
| 30123 | debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION, |
| 30124 | DEBUG_STR_SECTION_FLAGS, NULL); |
| 30125 | |
| 30126 | debug_ranges_section = get_section (dwarf_version >= 5 |
| 30127 | ? DEBUG_RNGLISTS_SECTION |
| 30128 | : DEBUG_RANGES_SECTION, |
| 30129 | SECTION_DEBUG, NULL); |
| 30130 | debug_frame_section = get_section (DEBUG_FRAME_SECTION, |
| 30131 | SECTION_DEBUG, NULL); |
| 30132 | } |
| 30133 | |
| 30134 | ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label, |
| 30135 | DEBUG_ABBREV_SECTION_LABEL, |
| 30136 | init_sections_and_labels_generation); |
| 30137 | ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label, |
| 30138 | DEBUG_INFO_SECTION_LABEL, |
| 30139 | init_sections_and_labels_generation); |
| 30140 | info_section_emitted = false; |
| 30141 | ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label, |
| 30142 | DEBUG_LINE_SECTION_LABEL, |
| 30143 | init_sections_and_labels_generation); |
| 30144 | /* There are up to 6 unique ranges labels per generation. |
| 30145 | See also output_rnglists. */ |
| 30146 | ASM_GENERATE_INTERNAL_LABEL (ranges_section_label, |
| 30147 | DEBUG_RANGES_SECTION_LABEL, |
| 30148 | init_sections_and_labels_generation * 6); |
| 30149 | if (dwarf_version >= 5 && dwarf_split_debug_info) |
| 30150 | ASM_GENERATE_INTERNAL_LABEL (ranges_base_label, |
| 30151 | DEBUG_RANGES_SECTION_LABEL, |
| 30152 | 1 + init_sections_and_labels_generation * 6); |
| 30153 | ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label, |
| 30154 | DEBUG_ADDR_SECTION_LABEL, |
| 30155 | init_sections_and_labels_generation); |
| 30156 | ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label, |
| 30157 | (dwarf_strict && dwarf_version < 5) |
| 30158 | ? DEBUG_MACINFO_SECTION_LABEL |
| 30159 | : DEBUG_MACRO_SECTION_LABEL, |
| 30160 | init_sections_and_labels_generation); |
| 30161 | ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL, |
| 30162 | init_sections_and_labels_generation); |
| 30163 | |
| 30164 | ++init_sections_and_labels_generation; |
| 30165 | return init_sections_and_labels_generation - 1; |
| 30166 | } |
| 30167 | |
| 30168 | /* Set up for Dwarf output at the start of compilation. */ |
| 30169 | |
| 30170 | static void |
| 30171 | dwarf2out_init (const char *filename ATTRIBUTE_UNUSED) |
| 30172 | { |
| 30173 | /* Allocate the file_table. */ |
| 30174 | file_table = hash_table<dwarf_file_hasher>::create_ggc (n: 50); |
| 30175 | |
| 30176 | #ifndef DWARF2_LINENO_DEBUGGING_INFO |
| 30177 | /* Allocate the decl_die_table. */ |
| 30178 | decl_die_table = hash_table<decl_die_hasher>::create_ggc (n: 10); |
| 30179 | |
| 30180 | /* Allocate the decl_loc_table. */ |
| 30181 | decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (n: 10); |
| 30182 | |
| 30183 | /* Allocate the cached_dw_loc_list_table. */ |
| 30184 | cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (n: 10); |
| 30185 | |
| 30186 | /* Allocate the initial hunk of the abbrev_die_table. */ |
| 30187 | vec_alloc (v&: abbrev_die_table, nelems: 256); |
| 30188 | /* Zero-th entry is allocated, but unused. */ |
| 30189 | abbrev_die_table->quick_push (NULL); |
| 30190 | |
| 30191 | /* Allocate the dwarf_proc_stack_usage_map. */ |
| 30192 | dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>; |
| 30193 | |
| 30194 | /* Allocate the pubtypes and pubnames vectors. */ |
| 30195 | vec_alloc (v&: pubname_table, nelems: 32); |
| 30196 | vec_alloc (v&: pubtype_table, nelems: 32); |
| 30197 | |
| 30198 | vec_alloc (v&: incomplete_types, nelems: 64); |
| 30199 | |
| 30200 | vec_alloc (v&: used_rtx_array, nelems: 32); |
| 30201 | |
| 30202 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
| 30203 | vec_alloc (v&: macinfo_table, nelems: 64); |
| 30204 | #endif |
| 30205 | |
| 30206 | /* If front-ends already registered a main translation unit but we were not |
| 30207 | ready to perform the association, do this now. */ |
| 30208 | if (main_translation_unit != NULL_TREE) |
| 30209 | equate_decl_number_to_die (decl: main_translation_unit, decl_die: comp_unit_die ()); |
| 30210 | } |
| 30211 | |
| 30212 | /* Called before compile () starts outputtting functions, variables |
| 30213 | and toplevel asms into assembly. */ |
| 30214 | |
| 30215 | static void |
| 30216 | dwarf2out_assembly_start (void) |
| 30217 | { |
| 30218 | if (text_section_line_info) |
| 30219 | return; |
| 30220 | |
| 30221 | #ifndef DWARF2_LINENO_DEBUGGING_INFO |
| 30222 | ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0); |
| 30223 | ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0); |
| 30224 | ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label, |
| 30225 | COLD_TEXT_SECTION_LABEL, 0); |
| 30226 | ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0); |
| 30227 | |
| 30228 | switch_to_section (text_section); |
| 30229 | ASM_OUTPUT_LABEL (asm_out_file, text_section_label); |
| 30230 | #endif |
| 30231 | |
| 30232 | /* Make sure the line number table for .text always exists. */ |
| 30233 | text_section_line_info = new_line_info_table (); |
| 30234 | text_section_line_info->end_label = text_end_label; |
| 30235 | |
| 30236 | #ifdef DWARF2_LINENO_DEBUGGING_INFO |
| 30237 | cur_line_info_table = text_section_line_info; |
| 30238 | #endif |
| 30239 | |
| 30240 | if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE |
| 30241 | && dwarf2out_do_cfi_asm () |
| 30242 | && !dwarf2out_do_eh_frame ()) |
| 30243 | fprintf (stream: asm_out_file, format: "\t.cfi_sections\t.debug_frame\n" ); |
| 30244 | |
| 30245 | #if defined(HAVE_AS_GDWARF_5_DEBUG_FLAG) && defined(HAVE_AS_WORKING_DWARF_N_FLAG) |
| 30246 | if (output_asm_line_debug_info () && dwarf_version >= 5) |
| 30247 | { |
| 30248 | /* When gas outputs DWARF5 .debug_line[_str] then we have to |
| 30249 | tell it the comp_dir and main file name for the zero entry |
| 30250 | line table. */ |
| 30251 | const char *comp_dir, *filename0; |
| 30252 | |
| 30253 | comp_dir = comp_dir_string (); |
| 30254 | if (comp_dir == NULL) |
| 30255 | comp_dir = "" ; |
| 30256 | |
| 30257 | filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
| 30258 | if (filename0 == NULL) |
| 30259 | filename0 = "" ; |
| 30260 | |
| 30261 | fprintf (stream: asm_out_file, format: "\t.file 0 " ); |
| 30262 | output_quoted_string (asm_out_file, remap_debug_filename (comp_dir)); |
| 30263 | fputc (c: ' ', stream: asm_out_file); |
| 30264 | output_quoted_string (asm_out_file, remap_debug_filename (filename0)); |
| 30265 | fputc (c: '\n', stream: asm_out_file); |
| 30266 | } |
| 30267 | else |
| 30268 | #endif |
| 30269 | /* Work around for PR101575: output a dummy .file directive. */ |
| 30270 | if (!last_emitted_file && dwarf_debuginfo_p () |
| 30271 | && debug_info_level >= DINFO_LEVEL_TERSE) |
| 30272 | { |
| 30273 | const char *filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
| 30274 | |
| 30275 | if (filename0 == NULL) |
| 30276 | filename0 = "<dummy>" ; |
| 30277 | maybe_emit_file (fd: lookup_filename (file_name: filename0)); |
| 30278 | } |
| 30279 | } |
| 30280 | |
| 30281 | /* A helper function for dwarf2out_finish called through |
| 30282 | htab_traverse. Assign a string its index. All strings must be |
| 30283 | collected into the table by the time index_string is called, |
| 30284 | because the indexing code relies on htab_traverse to traverse nodes |
| 30285 | in the same order for each run. */ |
| 30286 | |
| 30287 | int |
| 30288 | index_string (indirect_string_node **h, unsigned int *index) |
| 30289 | { |
| 30290 | indirect_string_node *node = *h; |
| 30291 | |
| 30292 | find_string_form (node); |
| 30293 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
| 30294 | { |
| 30295 | gcc_assert (node->index == NO_INDEX_ASSIGNED); |
| 30296 | node->index = *index; |
| 30297 | *index += 1; |
| 30298 | } |
| 30299 | return 1; |
| 30300 | } |
| 30301 | |
| 30302 | /* A helper function for output_indirect_strings called through |
| 30303 | htab_traverse. Output the offset to a string and update the |
| 30304 | current offset. */ |
| 30305 | |
| 30306 | int |
| 30307 | output_index_string_offset (indirect_string_node **h, unsigned int *offset) |
| 30308 | { |
| 30309 | indirect_string_node *node = *h; |
| 30310 | |
| 30311 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
| 30312 | { |
| 30313 | /* Assert that this node has been assigned an index. */ |
| 30314 | gcc_assert (node->index != NO_INDEX_ASSIGNED |
| 30315 | && node->index != NOT_INDEXED); |
| 30316 | dw2_asm_output_data (dwarf_offset_size, *offset, |
| 30317 | "indexed string 0x%x: %s" , node->index, node->str); |
| 30318 | *offset += strlen (s: node->str) + 1; |
| 30319 | } |
| 30320 | return 1; |
| 30321 | } |
| 30322 | |
| 30323 | /* A helper function for dwarf2out_finish called through |
| 30324 | htab_traverse. Output the indexed string. */ |
| 30325 | |
| 30326 | int |
| 30327 | output_index_string (indirect_string_node **h, unsigned int *cur_idx) |
| 30328 | { |
| 30329 | struct indirect_string_node *node = *h; |
| 30330 | |
| 30331 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
| 30332 | { |
| 30333 | /* Assert that the strings are output in the same order as their |
| 30334 | indexes were assigned. */ |
| 30335 | gcc_assert (*cur_idx == node->index); |
| 30336 | assemble_string (node->str, strlen (s: node->str) + 1); |
| 30337 | *cur_idx += 1; |
| 30338 | } |
| 30339 | return 1; |
| 30340 | } |
| 30341 | |
| 30342 | /* A helper function for output_indirect_strings. Counts the number |
| 30343 | of index strings offsets. Must match the logic of the functions |
| 30344 | output_index_string[_offsets] above. */ |
| 30345 | int |
| 30346 | count_index_strings (indirect_string_node **h, unsigned int *last_idx) |
| 30347 | { |
| 30348 | struct indirect_string_node *node = *h; |
| 30349 | |
| 30350 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
| 30351 | *last_idx += 1; |
| 30352 | return 1; |
| 30353 | } |
| 30354 | |
| 30355 | /* A helper function for dwarf2out_finish called through |
| 30356 | htab_traverse. Emit one queued .debug_str string. */ |
| 30357 | |
| 30358 | int |
| 30359 | output_indirect_string (indirect_string_node **h, enum dwarf_form form) |
| 30360 | { |
| 30361 | struct indirect_string_node *node = *h; |
| 30362 | |
| 30363 | node->form = find_string_form (node); |
| 30364 | if (node->form == form && node->refcount > 0) |
| 30365 | { |
| 30366 | ASM_OUTPUT_LABEL (asm_out_file, node->label); |
| 30367 | assemble_string (node->str, strlen (s: node->str) + 1); |
| 30368 | } |
| 30369 | |
| 30370 | return 1; |
| 30371 | } |
| 30372 | |
| 30373 | /* Output the indexed string table. */ |
| 30374 | |
| 30375 | static void |
| 30376 | output_indirect_strings (void) |
| 30377 | { |
| 30378 | switch_to_section (debug_str_section); |
| 30379 | if (!dwarf_split_debug_info) |
| 30380 | debug_str_hash->traverse<enum dwarf_form, |
| 30381 | output_indirect_string> (argument: DW_FORM_strp); |
| 30382 | else |
| 30383 | { |
| 30384 | unsigned int offset = 0; |
| 30385 | unsigned int cur_idx = 0; |
| 30386 | |
| 30387 | if (skeleton_debug_str_hash) |
| 30388 | skeleton_debug_str_hash->traverse<enum dwarf_form, |
| 30389 | output_indirect_string> (argument: DW_FORM_strp); |
| 30390 | |
| 30391 | switch_to_section (debug_str_offsets_section); |
| 30392 | /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit |
| 30393 | header. Note that we don't need to generate a label to the |
| 30394 | actual index table following the header here, because this is |
| 30395 | for the split dwarf case only. In an .dwo file there is only |
| 30396 | one string offsets table (and one debug info section). But |
| 30397 | if we would start using string offset tables for the main (or |
| 30398 | skeleton) unit, then we have to add a DW_AT_str_offsets_base |
| 30399 | pointing to the actual index after the header. Split dwarf |
| 30400 | units will never have a string offsets base attribute. When |
| 30401 | a split unit is moved into a .dwp file the string offsets can |
| 30402 | be found through the .debug_cu_index section table. */ |
| 30403 | if (dwarf_version >= 5) |
| 30404 | { |
| 30405 | unsigned int last_idx = 0; |
| 30406 | unsigned long str_offsets_length; |
| 30407 | |
| 30408 | debug_str_hash->traverse_noresize |
| 30409 | <unsigned int *, count_index_strings> (argument: &last_idx); |
| 30410 | str_offsets_length = last_idx * dwarf_offset_size + 4; |
| 30411 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
| 30412 | dw2_asm_output_data (4, 0xffffffff, |
| 30413 | "Escape value for 64-bit DWARF extension" ); |
| 30414 | dw2_asm_output_data (dwarf_offset_size, str_offsets_length, |
| 30415 | "Length of string offsets unit" ); |
| 30416 | dw2_asm_output_data (2, 5, "DWARF string offsets version" ); |
| 30417 | dw2_asm_output_data (2, 0, "Header zero padding" ); |
| 30418 | } |
| 30419 | debug_str_hash->traverse_noresize |
| 30420 | <unsigned int *, output_index_string_offset> (argument: &offset); |
| 30421 | switch_to_section (debug_str_dwo_section); |
| 30422 | debug_str_hash->traverse_noresize<unsigned int *, output_index_string> |
| 30423 | (argument: &cur_idx); |
| 30424 | } |
| 30425 | } |
| 30426 | |
| 30427 | /* Callback for htab_traverse to assign an index to an entry in the |
| 30428 | table, and to write that entry to the .debug_addr section. */ |
| 30429 | |
| 30430 | int |
| 30431 | output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index) |
| 30432 | { |
| 30433 | addr_table_entry *entry = *slot; |
| 30434 | |
| 30435 | if (entry->refcount == 0) |
| 30436 | { |
| 30437 | gcc_assert (entry->index == NO_INDEX_ASSIGNED |
| 30438 | || entry->index == NOT_INDEXED); |
| 30439 | return 1; |
| 30440 | } |
| 30441 | |
| 30442 | gcc_assert (entry->index == *cur_index); |
| 30443 | (*cur_index)++; |
| 30444 | |
| 30445 | switch (entry->kind) |
| 30446 | { |
| 30447 | case ate_kind_rtx: |
| 30448 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl, |
| 30449 | "0x%x" , entry->index); |
| 30450 | break; |
| 30451 | case ate_kind_rtx_dtprel: |
| 30452 | gcc_assert (targetm.asm_out.output_dwarf_dtprel); |
| 30453 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, |
| 30454 | DWARF2_ADDR_SIZE, |
| 30455 | entry->addr.rtl); |
| 30456 | fputc (c: '\n', stream: asm_out_file); |
| 30457 | break; |
| 30458 | case ate_kind_label: |
| 30459 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label, |
| 30460 | "0x%x" , entry->index); |
| 30461 | break; |
| 30462 | default: |
| 30463 | gcc_unreachable (); |
| 30464 | } |
| 30465 | return 1; |
| 30466 | } |
| 30467 | |
| 30468 | /* A helper function for dwarf2out_finish. Counts the number |
| 30469 | of indexed addresses. Must match the logic of the functions |
| 30470 | output_addr_table_entry above. */ |
| 30471 | int |
| 30472 | count_index_addrs (addr_table_entry **slot, unsigned int *last_idx) |
| 30473 | { |
| 30474 | addr_table_entry *entry = *slot; |
| 30475 | |
| 30476 | if (entry->refcount > 0) |
| 30477 | *last_idx += 1; |
| 30478 | return 1; |
| 30479 | } |
| 30480 | |
| 30481 | /* Produce the .debug_addr section. */ |
| 30482 | |
| 30483 | static void |
| 30484 | output_addr_table (void) |
| 30485 | { |
| 30486 | unsigned int index = 0; |
| 30487 | if (addr_index_table == NULL || addr_index_table->size () == 0) |
| 30488 | return; |
| 30489 | |
| 30490 | switch_to_section (debug_addr_section); |
| 30491 | /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission |
| 30492 | which GCC uses to implement -gsplit-dwarf as DWARF GNU extension |
| 30493 | before DWARF5, didn't have a header for .debug_addr units. |
| 30494 | DWARF5 specifies a small header when address tables are used. */ |
| 30495 | if (dwarf_version >= 5) |
| 30496 | { |
| 30497 | unsigned int last_idx = 0; |
| 30498 | unsigned long addrs_length; |
| 30499 | |
| 30500 | addr_index_table->traverse_noresize |
| 30501 | <unsigned int *, count_index_addrs> (argument: &last_idx); |
| 30502 | addrs_length = last_idx * DWARF2_ADDR_SIZE + 4; |
| 30503 | |
| 30504 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
| 30505 | dw2_asm_output_data (4, 0xffffffff, |
| 30506 | "Escape value for 64-bit DWARF extension" ); |
| 30507 | dw2_asm_output_data (dwarf_offset_size, addrs_length, |
| 30508 | "Length of Address Unit" ); |
| 30509 | dw2_asm_output_data (2, 5, "DWARF addr version" ); |
| 30510 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address" ); |
| 30511 | dw2_asm_output_data (1, 0, "Size of Segment Descriptor" ); |
| 30512 | } |
| 30513 | ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label); |
| 30514 | |
| 30515 | addr_index_table |
| 30516 | ->traverse_noresize<unsigned int *, output_addr_table_entry> (argument: &index); |
| 30517 | } |
| 30518 | |
| 30519 | #if ENABLE_ASSERT_CHECKING |
| 30520 | /* Verify that all marks are clear. */ |
| 30521 | |
| 30522 | static void |
| 30523 | verify_marks_clear (dw_die_ref die) |
| 30524 | { |
| 30525 | dw_die_ref c; |
| 30526 | |
| 30527 | gcc_assert (! die->die_mark); |
| 30528 | FOR_EACH_CHILD (die, c, verify_marks_clear (c)); |
| 30529 | } |
| 30530 | #endif /* ENABLE_ASSERT_CHECKING */ |
| 30531 | |
| 30532 | /* Clear the marks for a die and its children. |
| 30533 | Be cool if the mark isn't set. */ |
| 30534 | |
| 30535 | static void |
| 30536 | prune_unmark_dies (dw_die_ref die) |
| 30537 | { |
| 30538 | dw_die_ref c; |
| 30539 | |
| 30540 | if (die->die_mark) |
| 30541 | die->die_mark = 0; |
| 30542 | FOR_EACH_CHILD (die, c, prune_unmark_dies (c)); |
| 30543 | } |
| 30544 | |
| 30545 | /* Given LOC that is referenced by a DIE we're marking as used, find all |
| 30546 | referenced DWARF procedures it references and mark them as used. */ |
| 30547 | |
| 30548 | static void |
| 30549 | prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc) |
| 30550 | { |
| 30551 | for (; loc != NULL; loc = loc->dw_loc_next) |
| 30552 | switch (loc->dw_loc_opc) |
| 30553 | { |
| 30554 | case DW_OP_implicit_pointer: |
| 30555 | case DW_OP_convert: |
| 30556 | case DW_OP_reinterpret: |
| 30557 | case DW_OP_GNU_implicit_pointer: |
| 30558 | case DW_OP_GNU_convert: |
| 30559 | case DW_OP_GNU_reinterpret: |
| 30560 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref) |
| 30561 | prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1); |
| 30562 | break; |
| 30563 | case DW_OP_GNU_variable_value: |
| 30564 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
| 30565 | { |
| 30566 | dw_die_ref ref |
| 30567 | = lookup_decl_die (decl: loc->dw_loc_oprnd1.v.val_decl_ref); |
| 30568 | if (ref == NULL) |
| 30569 | break; |
| 30570 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 30571 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
| 30572 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 30573 | } |
| 30574 | /* FALLTHRU */ |
| 30575 | case DW_OP_call2: |
| 30576 | case DW_OP_call4: |
| 30577 | case DW_OP_call_ref: |
| 30578 | case DW_OP_const_type: |
| 30579 | case DW_OP_GNU_const_type: |
| 30580 | case DW_OP_GNU_parameter_ref: |
| 30581 | gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref); |
| 30582 | prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1); |
| 30583 | break; |
| 30584 | case DW_OP_regval_type: |
| 30585 | case DW_OP_deref_type: |
| 30586 | case DW_OP_GNU_regval_type: |
| 30587 | case DW_OP_GNU_deref_type: |
| 30588 | gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref); |
| 30589 | prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1); |
| 30590 | break; |
| 30591 | case DW_OP_entry_value: |
| 30592 | case DW_OP_GNU_entry_value: |
| 30593 | gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc); |
| 30594 | prune_unused_types_walk_loc_descr (loc: loc->dw_loc_oprnd1.v.val_loc); |
| 30595 | break; |
| 30596 | default: |
| 30597 | break; |
| 30598 | } |
| 30599 | } |
| 30600 | |
| 30601 | /* Given DIE that we're marking as used, find any other dies |
| 30602 | it references as attributes and mark them as used. */ |
| 30603 | |
| 30604 | static void |
| 30605 | prune_unused_types_walk_attribs (dw_die_ref die) |
| 30606 | { |
| 30607 | dw_attr_node *a; |
| 30608 | unsigned ix; |
| 30609 | |
| 30610 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 30611 | { |
| 30612 | switch (AT_class (a)) |
| 30613 | { |
| 30614 | /* Make sure DWARF procedures referenced by location descriptions will |
| 30615 | get emitted. */ |
| 30616 | case dw_val_class_loc: |
| 30617 | prune_unused_types_walk_loc_descr (loc: AT_loc (a)); |
| 30618 | break; |
| 30619 | case dw_val_class_loc_list: |
| 30620 | for (dw_loc_list_ref list = AT_loc_list (a); |
| 30621 | list != NULL; |
| 30622 | list = list->dw_loc_next) |
| 30623 | prune_unused_types_walk_loc_descr (loc: list->expr); |
| 30624 | break; |
| 30625 | |
| 30626 | case dw_val_class_view_list: |
| 30627 | /* This points to a loc_list in another attribute, so it's |
| 30628 | already covered. */ |
| 30629 | break; |
| 30630 | |
| 30631 | case dw_val_class_die_ref: |
| 30632 | /* A reference to another DIE. |
| 30633 | Make sure that it will get emitted. |
| 30634 | If it was broken out into a comdat group, don't follow it. */ |
| 30635 | if (! AT_ref (a)->comdat_type_p |
| 30636 | || a->dw_attr == DW_AT_specification) |
| 30637 | prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1); |
| 30638 | break; |
| 30639 | |
| 30640 | case dw_val_class_str: |
| 30641 | /* Set the string's refcount to 0 so that prune_unused_types_mark |
| 30642 | accounts properly for it. */ |
| 30643 | a->dw_attr_val.v.val_str->refcount = 0; |
| 30644 | break; |
| 30645 | |
| 30646 | default: |
| 30647 | break; |
| 30648 | } |
| 30649 | } |
| 30650 | } |
| 30651 | |
| 30652 | /* Mark the generic parameters and arguments children DIEs of DIE. */ |
| 30653 | |
| 30654 | static void |
| 30655 | prune_unused_types_mark_generic_parms_dies (dw_die_ref die) |
| 30656 | { |
| 30657 | dw_die_ref c; |
| 30658 | |
| 30659 | if (die == NULL || die->die_child == NULL) |
| 30660 | return; |
| 30661 | c = die->die_child; |
| 30662 | do |
| 30663 | { |
| 30664 | if (is_template_parameter (die: c)) |
| 30665 | prune_unused_types_mark (c, 1); |
| 30666 | c = c->die_sib; |
| 30667 | } while (c && c != die->die_child); |
| 30668 | } |
| 30669 | |
| 30670 | /* Mark DIE as being used. If DOKIDS is true, then walk down |
| 30671 | to DIE's children. */ |
| 30672 | |
| 30673 | static void |
| 30674 | prune_unused_types_mark (dw_die_ref die, int dokids) |
| 30675 | { |
| 30676 | dw_die_ref c; |
| 30677 | |
| 30678 | if (die->die_mark == 0) |
| 30679 | { |
| 30680 | /* We haven't done this node yet. Mark it as used. */ |
| 30681 | die->die_mark = 1; |
| 30682 | /* If this is the DIE of a generic type instantiation, |
| 30683 | mark the children DIEs that describe its generic parms and |
| 30684 | args. */ |
| 30685 | prune_unused_types_mark_generic_parms_dies (die); |
| 30686 | |
| 30687 | /* We also have to mark its parents as used. |
| 30688 | (But we don't want to mark our parent's kids due to this, |
| 30689 | unless it is a class.) */ |
| 30690 | if (die->die_parent) |
| 30691 | prune_unused_types_mark (die: die->die_parent, |
| 30692 | dokids: class_scope_p (context_die: die->die_parent)); |
| 30693 | |
| 30694 | /* Mark any referenced nodes. */ |
| 30695 | prune_unused_types_walk_attribs (die); |
| 30696 | |
| 30697 | /* If this node is a specification, |
| 30698 | also mark the definition, if it exists. */ |
| 30699 | if (get_AT_flag (die, attr_kind: DW_AT_declaration) && die->die_definition) |
| 30700 | prune_unused_types_mark (die: die->die_definition, dokids: 1); |
| 30701 | } |
| 30702 | |
| 30703 | if (dokids && die->die_mark != 2) |
| 30704 | { |
| 30705 | /* We need to walk the children, but haven't done so yet. |
| 30706 | Remember that we've walked the kids. */ |
| 30707 | die->die_mark = 2; |
| 30708 | |
| 30709 | /* If this is an array type, we need to make sure our |
| 30710 | kids get marked, even if they're types. If we're |
| 30711 | breaking out types into comdat sections, do this |
| 30712 | for all type definitions. */ |
| 30713 | if (die->die_tag == DW_TAG_array_type |
| 30714 | || (use_debug_types |
| 30715 | && is_type_die (die) && ! is_declaration_die (die))) |
| 30716 | FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1)); |
| 30717 | else |
| 30718 | FOR_EACH_CHILD (die, c, prune_unused_types_walk (c)); |
| 30719 | } |
| 30720 | } |
| 30721 | |
| 30722 | /* For local classes, look if any static member functions were emitted |
| 30723 | and if so, mark them. */ |
| 30724 | |
| 30725 | static void |
| 30726 | prune_unused_types_walk_local_classes (dw_die_ref die) |
| 30727 | { |
| 30728 | dw_die_ref c; |
| 30729 | |
| 30730 | if (die->die_mark == 2) |
| 30731 | return; |
| 30732 | |
| 30733 | switch (die->die_tag) |
| 30734 | { |
| 30735 | case DW_TAG_structure_type: |
| 30736 | case DW_TAG_union_type: |
| 30737 | case DW_TAG_class_type: |
| 30738 | case DW_TAG_interface_type: |
| 30739 | break; |
| 30740 | |
| 30741 | case DW_TAG_subprogram: |
| 30742 | if (!get_AT_flag (die, attr_kind: DW_AT_declaration) |
| 30743 | || die->die_definition != NULL) |
| 30744 | prune_unused_types_mark (die, dokids: 1); |
| 30745 | return; |
| 30746 | |
| 30747 | default: |
| 30748 | return; |
| 30749 | } |
| 30750 | |
| 30751 | /* Mark children. */ |
| 30752 | FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c)); |
| 30753 | } |
| 30754 | |
| 30755 | /* Walk the tree DIE and mark types that we actually use. */ |
| 30756 | |
| 30757 | static void |
| 30758 | prune_unused_types_walk (dw_die_ref die) |
| 30759 | { |
| 30760 | dw_die_ref c; |
| 30761 | |
| 30762 | /* Don't do anything if this node is already marked and |
| 30763 | children have been marked as well. */ |
| 30764 | if (die->die_mark == 2) |
| 30765 | return; |
| 30766 | |
| 30767 | switch (die->die_tag) |
| 30768 | { |
| 30769 | case DW_TAG_structure_type: |
| 30770 | case DW_TAG_union_type: |
| 30771 | case DW_TAG_class_type: |
| 30772 | case DW_TAG_interface_type: |
| 30773 | if (die->die_perennial_p) |
| 30774 | break; |
| 30775 | |
| 30776 | for (c = die->die_parent; c; c = c->die_parent) |
| 30777 | if (c->die_tag == DW_TAG_subprogram) |
| 30778 | break; |
| 30779 | |
| 30780 | /* Finding used static member functions inside of classes |
| 30781 | is needed just for local classes, because for other classes |
| 30782 | static member function DIEs with DW_AT_specification |
| 30783 | are emitted outside of the DW_TAG_*_type. If we ever change |
| 30784 | it, we'd need to call this even for non-local classes. */ |
| 30785 | if (c) |
| 30786 | prune_unused_types_walk_local_classes (die); |
| 30787 | |
| 30788 | /* It's a type node --- don't mark it. */ |
| 30789 | return; |
| 30790 | |
| 30791 | case DW_TAG_const_type: |
| 30792 | case DW_TAG_packed_type: |
| 30793 | case DW_TAG_pointer_type: |
| 30794 | case DW_TAG_reference_type: |
| 30795 | case DW_TAG_rvalue_reference_type: |
| 30796 | case DW_TAG_volatile_type: |
| 30797 | case DW_TAG_restrict_type: |
| 30798 | case DW_TAG_shared_type: |
| 30799 | case DW_TAG_atomic_type: |
| 30800 | case DW_TAG_immutable_type: |
| 30801 | case DW_TAG_typedef: |
| 30802 | case DW_TAG_array_type: |
| 30803 | case DW_TAG_coarray_type: |
| 30804 | case DW_TAG_friend: |
| 30805 | case DW_TAG_enumeration_type: |
| 30806 | case DW_TAG_subroutine_type: |
| 30807 | case DW_TAG_string_type: |
| 30808 | case DW_TAG_set_type: |
| 30809 | case DW_TAG_subrange_type: |
| 30810 | case DW_TAG_ptr_to_member_type: |
| 30811 | case DW_TAG_file_type: |
| 30812 | case DW_TAG_unspecified_type: |
| 30813 | case DW_TAG_dynamic_type: |
| 30814 | /* Type nodes are useful only when other DIEs reference them --- don't |
| 30815 | mark them. */ |
| 30816 | /* FALLTHROUGH */ |
| 30817 | |
| 30818 | case DW_TAG_dwarf_procedure: |
| 30819 | /* Likewise for DWARF procedures. */ |
| 30820 | |
| 30821 | if (die->die_perennial_p) |
| 30822 | break; |
| 30823 | |
| 30824 | return; |
| 30825 | |
| 30826 | case DW_TAG_variable: |
| 30827 | if (flag_debug_only_used_symbols) |
| 30828 | { |
| 30829 | if (die->die_perennial_p) |
| 30830 | break; |
| 30831 | |
| 30832 | /* For static data members, the declaration in the class is supposed |
| 30833 | to have DW_TAG_member tag in DWARF{3,4} but DW_TAG_variable in |
| 30834 | DWARF5. DW_TAG_member will be marked, so mark even such |
| 30835 | DW_TAG_variables in DWARF5, as long as it has DW_AT_const_value |
| 30836 | attribute. */ |
| 30837 | if (dwarf_version >= 5 |
| 30838 | && class_scope_p (context_die: die->die_parent) |
| 30839 | && get_AT (die, attr_kind: DW_AT_const_value)) |
| 30840 | break; |
| 30841 | |
| 30842 | /* premark_used_variables marks external variables --- don't mark |
| 30843 | them here. But function-local externals are always considered |
| 30844 | used. */ |
| 30845 | if (get_AT (die, attr_kind: DW_AT_external)) |
| 30846 | { |
| 30847 | for (c = die->die_parent; c; c = c->die_parent) |
| 30848 | if (c->die_tag == DW_TAG_subprogram) |
| 30849 | break; |
| 30850 | if (!c) |
| 30851 | return; |
| 30852 | } |
| 30853 | } |
| 30854 | /* FALLTHROUGH */ |
| 30855 | |
| 30856 | default: |
| 30857 | /* Mark everything else. */ |
| 30858 | break; |
| 30859 | } |
| 30860 | |
| 30861 | if (die->die_mark == 0) |
| 30862 | { |
| 30863 | die->die_mark = 1; |
| 30864 | |
| 30865 | /* Now, mark any dies referenced from here. */ |
| 30866 | prune_unused_types_walk_attribs (die); |
| 30867 | } |
| 30868 | |
| 30869 | die->die_mark = 2; |
| 30870 | |
| 30871 | /* Mark children. */ |
| 30872 | FOR_EACH_CHILD (die, c, prune_unused_types_walk (c)); |
| 30873 | } |
| 30874 | |
| 30875 | /* Increment the string counts on strings referred to from DIE's |
| 30876 | attributes. */ |
| 30877 | |
| 30878 | static void |
| 30879 | prune_unused_types_update_strings (dw_die_ref die) |
| 30880 | { |
| 30881 | dw_attr_node *a; |
| 30882 | unsigned ix; |
| 30883 | |
| 30884 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 30885 | if (AT_class (a) == dw_val_class_str) |
| 30886 | { |
| 30887 | struct indirect_string_node *s = a->dw_attr_val.v.val_str; |
| 30888 | s->refcount++; |
| 30889 | /* Avoid unnecessarily putting strings that are used less than |
| 30890 | twice in the hash table. */ |
| 30891 | if (s->form != DW_FORM_line_strp |
| 30892 | && (s->refcount |
| 30893 | == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))) |
| 30894 | { |
| 30895 | indirect_string_node **slot |
| 30896 | = debug_str_hash->find_slot_with_hash (comparable: s->str, |
| 30897 | hash: htab_hash_string (s->str), |
| 30898 | insert: INSERT); |
| 30899 | gcc_assert (*slot == NULL); |
| 30900 | *slot = s; |
| 30901 | } |
| 30902 | } |
| 30903 | } |
| 30904 | |
| 30905 | /* Mark DIE and its children as removed. */ |
| 30906 | |
| 30907 | static void |
| 30908 | mark_removed (dw_die_ref die) |
| 30909 | { |
| 30910 | dw_die_ref c; |
| 30911 | die->removed = true; |
| 30912 | FOR_EACH_CHILD (die, c, mark_removed (c)); |
| 30913 | } |
| 30914 | |
| 30915 | /* Remove from the tree DIE any dies that aren't marked. */ |
| 30916 | |
| 30917 | static void |
| 30918 | prune_unused_types_prune (dw_die_ref die) |
| 30919 | { |
| 30920 | dw_die_ref c; |
| 30921 | |
| 30922 | gcc_assert (die->die_mark); |
| 30923 | prune_unused_types_update_strings (die); |
| 30924 | |
| 30925 | if (! die->die_child) |
| 30926 | return; |
| 30927 | |
| 30928 | c = die->die_child; |
| 30929 | do { |
| 30930 | dw_die_ref prev = c, next; |
| 30931 | for (c = c->die_sib; ! c->die_mark; c = next) |
| 30932 | if (c == die->die_child) |
| 30933 | { |
| 30934 | /* No marked children between 'prev' and the end of the list. */ |
| 30935 | if (prev == c) |
| 30936 | /* No marked children at all. */ |
| 30937 | die->die_child = NULL; |
| 30938 | else |
| 30939 | { |
| 30940 | prev->die_sib = c->die_sib; |
| 30941 | die->die_child = prev; |
| 30942 | } |
| 30943 | c->die_sib = NULL; |
| 30944 | mark_removed (die: c); |
| 30945 | return; |
| 30946 | } |
| 30947 | else |
| 30948 | { |
| 30949 | next = c->die_sib; |
| 30950 | c->die_sib = NULL; |
| 30951 | mark_removed (die: c); |
| 30952 | } |
| 30953 | |
| 30954 | if (c != prev->die_sib) |
| 30955 | prev->die_sib = c; |
| 30956 | prune_unused_types_prune (die: c); |
| 30957 | } while (c != die->die_child); |
| 30958 | } |
| 30959 | |
| 30960 | /* Remove dies representing declarations that we never use. */ |
| 30961 | |
| 30962 | static void |
| 30963 | prune_unused_types (void) |
| 30964 | { |
| 30965 | unsigned int i; |
| 30966 | limbo_die_node *node; |
| 30967 | comdat_type_node *ctnode; |
| 30968 | pubname_entry *pub; |
| 30969 | dw_die_ref base_type; |
| 30970 | |
| 30971 | #if ENABLE_ASSERT_CHECKING |
| 30972 | /* All the marks should already be clear. */ |
| 30973 | verify_marks_clear (die: comp_unit_die ()); |
| 30974 | for (node = limbo_die_list; node; node = node->next) |
| 30975 | verify_marks_clear (die: node->die); |
| 30976 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
| 30977 | verify_marks_clear (die: ctnode->root_die); |
| 30978 | #endif /* ENABLE_ASSERT_CHECKING */ |
| 30979 | |
| 30980 | /* Mark types that are used in global variables. */ |
| 30981 | premark_types_used_by_global_vars (); |
| 30982 | |
| 30983 | /* Mark variables used in the symtab. */ |
| 30984 | if (flag_debug_only_used_symbols) |
| 30985 | premark_used_variables (); |
| 30986 | |
| 30987 | /* Set the mark on nodes that are actually used. */ |
| 30988 | prune_unused_types_walk (die: comp_unit_die ()); |
| 30989 | for (node = limbo_die_list; node; node = node->next) |
| 30990 | prune_unused_types_walk (die: node->die); |
| 30991 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
| 30992 | { |
| 30993 | prune_unused_types_walk (die: ctnode->root_die); |
| 30994 | prune_unused_types_mark (die: ctnode->type_die, dokids: 1); |
| 30995 | } |
| 30996 | |
| 30997 | /* Also set the mark on nodes referenced from the pubname_table. Enumerators |
| 30998 | are unusual in that they are pubnames that are the children of pubtypes. |
| 30999 | They should only be marked via their parent DW_TAG_enumeration_type die, |
| 31000 | not as roots in themselves. */ |
| 31001 | FOR_EACH_VEC_ELT (*pubname_table, i, pub) |
| 31002 | if (pub->die->die_tag != DW_TAG_enumerator) |
| 31003 | prune_unused_types_mark (die: pub->die, dokids: 1); |
| 31004 | for (i = 0; base_types.iterate (ix: i, ptr: &base_type); i++) |
| 31005 | prune_unused_types_mark (die: base_type, dokids: 1); |
| 31006 | |
| 31007 | /* Also set the mark on nodes that could be referenced by |
| 31008 | DW_TAG_call_site DW_AT_call_origin (i.e. direct call callees) or |
| 31009 | by DW_TAG_inlined_subroutine origins. */ |
| 31010 | cgraph_node *cnode; |
| 31011 | FOR_EACH_FUNCTION (cnode) |
| 31012 | if (cnode->referred_to_p (include_self: false)) |
| 31013 | { |
| 31014 | dw_die_ref die = lookup_decl_die (decl: cnode->decl); |
| 31015 | if (die == NULL || die->die_mark) |
| 31016 | continue; |
| 31017 | for (cgraph_edge *e = cnode->callers; e; e = e->next_caller) |
| 31018 | if (e->caller != cnode) |
| 31019 | { |
| 31020 | prune_unused_types_mark (die, dokids: 1); |
| 31021 | break; |
| 31022 | } |
| 31023 | } |
| 31024 | |
| 31025 | if (debug_str_hash) |
| 31026 | debug_str_hash->empty (); |
| 31027 | if (skeleton_debug_str_hash) |
| 31028 | skeleton_debug_str_hash->empty (); |
| 31029 | prune_unused_types_prune (die: comp_unit_die ()); |
| 31030 | for (limbo_die_node **pnode = &limbo_die_list; *pnode; ) |
| 31031 | { |
| 31032 | node = *pnode; |
| 31033 | if (!node->die->die_mark) |
| 31034 | *pnode = node->next; |
| 31035 | else |
| 31036 | { |
| 31037 | prune_unused_types_prune (die: node->die); |
| 31038 | pnode = &node->next; |
| 31039 | } |
| 31040 | } |
| 31041 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
| 31042 | prune_unused_types_prune (die: ctnode->root_die); |
| 31043 | |
| 31044 | /* Leave the marks clear. */ |
| 31045 | prune_unmark_dies (die: comp_unit_die ()); |
| 31046 | for (node = limbo_die_list; node; node = node->next) |
| 31047 | prune_unmark_dies (die: node->die); |
| 31048 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
| 31049 | prune_unmark_dies (die: ctnode->root_die); |
| 31050 | } |
| 31051 | |
| 31052 | /* Helpers to manipulate hash table of comdat type units. */ |
| 31053 | |
| 31054 | struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node> |
| 31055 | { |
| 31056 | static inline hashval_t hash (const comdat_type_node *); |
| 31057 | static inline bool equal (const comdat_type_node *, const comdat_type_node *); |
| 31058 | }; |
| 31059 | |
| 31060 | inline hashval_t |
| 31061 | comdat_type_hasher::hash (const comdat_type_node *type_node) |
| 31062 | { |
| 31063 | hashval_t h; |
| 31064 | memcpy (dest: &h, src: type_node->signature, n: sizeof (h)); |
| 31065 | return h; |
| 31066 | } |
| 31067 | |
| 31068 | inline bool |
| 31069 | comdat_type_hasher::equal (const comdat_type_node *type_node_1, |
| 31070 | const comdat_type_node *type_node_2) |
| 31071 | { |
| 31072 | return (! memcmp (s1: type_node_1->signature, s2: type_node_2->signature, |
| 31073 | DWARF_TYPE_SIGNATURE_SIZE)); |
| 31074 | } |
| 31075 | |
| 31076 | /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref |
| 31077 | to the location it would have been added, should we know its |
| 31078 | DECL_ASSEMBLER_NAME when we added other attributes. This will |
| 31079 | probably improve compactness of debug info, removing equivalent |
| 31080 | abbrevs, and hide any differences caused by deferring the |
| 31081 | computation of the assembler name, triggered by e.g. PCH. */ |
| 31082 | |
| 31083 | static inline void |
| 31084 | move_linkage_attr (dw_die_ref die) |
| 31085 | { |
| 31086 | unsigned ix = vec_safe_length (v: die->die_attr); |
| 31087 | dw_attr_node linkage = (*die->die_attr)[ix - 1]; |
| 31088 | |
| 31089 | gcc_assert (linkage.dw_attr == DW_AT_linkage_name |
| 31090 | || linkage.dw_attr == DW_AT_MIPS_linkage_name); |
| 31091 | |
| 31092 | while (--ix > 0) |
| 31093 | { |
| 31094 | dw_attr_node *prev = &(*die->die_attr)[ix - 1]; |
| 31095 | |
| 31096 | if (prev->dw_attr == DW_AT_decl_line |
| 31097 | || prev->dw_attr == DW_AT_decl_column |
| 31098 | || prev->dw_attr == DW_AT_name) |
| 31099 | break; |
| 31100 | } |
| 31101 | |
| 31102 | if (ix != vec_safe_length (v: die->die_attr) - 1) |
| 31103 | { |
| 31104 | die->die_attr->pop (); |
| 31105 | die->die_attr->quick_insert (ix, obj: linkage); |
| 31106 | } |
| 31107 | } |
| 31108 | |
| 31109 | /* Helper function for resolve_addr, mark DW_TAG_base_type nodes |
| 31110 | referenced from typed stack ops and count how often they are used. */ |
| 31111 | |
| 31112 | static void |
| 31113 | mark_base_types (dw_loc_descr_ref loc) |
| 31114 | { |
| 31115 | dw_die_ref base_type = NULL; |
| 31116 | |
| 31117 | for (; loc; loc = loc->dw_loc_next) |
| 31118 | { |
| 31119 | switch (loc->dw_loc_opc) |
| 31120 | { |
| 31121 | case DW_OP_regval_type: |
| 31122 | case DW_OP_deref_type: |
| 31123 | case DW_OP_GNU_regval_type: |
| 31124 | case DW_OP_GNU_deref_type: |
| 31125 | base_type = loc->dw_loc_oprnd2.v.val_die_ref.die; |
| 31126 | break; |
| 31127 | case DW_OP_convert: |
| 31128 | case DW_OP_reinterpret: |
| 31129 | case DW_OP_GNU_convert: |
| 31130 | case DW_OP_GNU_reinterpret: |
| 31131 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const) |
| 31132 | continue; |
| 31133 | /* FALLTHRU */ |
| 31134 | case DW_OP_const_type: |
| 31135 | case DW_OP_GNU_const_type: |
| 31136 | base_type = loc->dw_loc_oprnd1.v.val_die_ref.die; |
| 31137 | break; |
| 31138 | case DW_OP_entry_value: |
| 31139 | case DW_OP_GNU_entry_value: |
| 31140 | mark_base_types (loc: loc->dw_loc_oprnd1.v.val_loc); |
| 31141 | continue; |
| 31142 | default: |
| 31143 | continue; |
| 31144 | } |
| 31145 | gcc_assert (base_type->die_parent == comp_unit_die ()); |
| 31146 | if (base_type->die_mark) |
| 31147 | base_type->die_mark++; |
| 31148 | else |
| 31149 | { |
| 31150 | base_types.safe_push (obj: base_type); |
| 31151 | base_type->die_mark = 1; |
| 31152 | } |
| 31153 | } |
| 31154 | } |
| 31155 | |
| 31156 | /* Stripped-down variant of resolve_addr, mark DW_TAG_base_type nodes |
| 31157 | referenced from typed stack ops and count how often they are used. */ |
| 31158 | |
| 31159 | static void |
| 31160 | mark_base_types (dw_die_ref die) |
| 31161 | { |
| 31162 | dw_die_ref c; |
| 31163 | dw_attr_node *a; |
| 31164 | dw_loc_list_ref *curr; |
| 31165 | unsigned ix; |
| 31166 | |
| 31167 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 31168 | switch (AT_class (a)) |
| 31169 | { |
| 31170 | case dw_val_class_loc_list: |
| 31171 | curr = AT_loc_list_ptr (a); |
| 31172 | while (*curr) |
| 31173 | { |
| 31174 | mark_base_types (loc: (*curr)->expr); |
| 31175 | curr = &(*curr)->dw_loc_next; |
| 31176 | } |
| 31177 | break; |
| 31178 | |
| 31179 | case dw_val_class_loc: |
| 31180 | mark_base_types (loc: AT_loc (a)); |
| 31181 | break; |
| 31182 | |
| 31183 | default: |
| 31184 | break; |
| 31185 | } |
| 31186 | |
| 31187 | FOR_EACH_CHILD (die, c, mark_base_types (c)); |
| 31188 | } |
| 31189 | |
| 31190 | /* Comparison function for sorting marked base types. */ |
| 31191 | |
| 31192 | static int |
| 31193 | base_type_cmp (const void *x, const void *y) |
| 31194 | { |
| 31195 | dw_die_ref dx = *(const dw_die_ref *) x; |
| 31196 | dw_die_ref dy = *(const dw_die_ref *) y; |
| 31197 | unsigned int byte_size1, byte_size2; |
| 31198 | unsigned int encoding1, encoding2; |
| 31199 | unsigned int align1, align2; |
| 31200 | if (dx->die_mark > dy->die_mark) |
| 31201 | return -1; |
| 31202 | if (dx->die_mark < dy->die_mark) |
| 31203 | return 1; |
| 31204 | byte_size1 = get_AT_unsigned (die: dx, attr_kind: DW_AT_byte_size); |
| 31205 | byte_size2 = get_AT_unsigned (die: dy, attr_kind: DW_AT_byte_size); |
| 31206 | if (byte_size1 < byte_size2) |
| 31207 | return 1; |
| 31208 | if (byte_size1 > byte_size2) |
| 31209 | return -1; |
| 31210 | encoding1 = get_AT_unsigned (die: dx, attr_kind: DW_AT_encoding); |
| 31211 | encoding2 = get_AT_unsigned (die: dy, attr_kind: DW_AT_encoding); |
| 31212 | if (encoding1 < encoding2) |
| 31213 | return 1; |
| 31214 | if (encoding1 > encoding2) |
| 31215 | return -1; |
| 31216 | align1 = get_AT_unsigned (die: dx, attr_kind: DW_AT_alignment); |
| 31217 | align2 = get_AT_unsigned (die: dy, attr_kind: DW_AT_alignment); |
| 31218 | if (align1 < align2) |
| 31219 | return 1; |
| 31220 | if (align1 > align2) |
| 31221 | return -1; |
| 31222 | return 0; |
| 31223 | } |
| 31224 | |
| 31225 | /* Move base types marked by mark_base_types as early as possible |
| 31226 | in the CU, sorted by decreasing usage count both to make the |
| 31227 | uleb128 references as small as possible and to make sure they |
| 31228 | will have die_offset already computed by calc_die_sizes when |
| 31229 | sizes of typed stack loc ops is computed. */ |
| 31230 | |
| 31231 | static void |
| 31232 | move_marked_base_types (void) |
| 31233 | { |
| 31234 | unsigned int i; |
| 31235 | dw_die_ref base_type, die, c; |
| 31236 | |
| 31237 | if (base_types.is_empty ()) |
| 31238 | return; |
| 31239 | |
| 31240 | /* Sort by decreasing usage count, they will be added again in that |
| 31241 | order later on. */ |
| 31242 | base_types.qsort (base_type_cmp); |
| 31243 | die = comp_unit_die (); |
| 31244 | c = die->die_child; |
| 31245 | do |
| 31246 | { |
| 31247 | dw_die_ref prev = c; |
| 31248 | c = c->die_sib; |
| 31249 | while (c->die_mark) |
| 31250 | { |
| 31251 | remove_child_with_prev (child: c, prev); |
| 31252 | /* As base types got marked, there must be at least |
| 31253 | one node other than DW_TAG_base_type. */ |
| 31254 | gcc_assert (die->die_child != NULL); |
| 31255 | c = prev->die_sib; |
| 31256 | } |
| 31257 | } |
| 31258 | while (c != die->die_child); |
| 31259 | gcc_assert (die->die_child); |
| 31260 | c = die->die_child; |
| 31261 | for (i = 0; base_types.iterate (ix: i, ptr: &base_type); i++) |
| 31262 | { |
| 31263 | base_type->die_mark = 0; |
| 31264 | base_type->die_sib = c->die_sib; |
| 31265 | c->die_sib = base_type; |
| 31266 | c = base_type; |
| 31267 | } |
| 31268 | } |
| 31269 | |
| 31270 | /* Helper function for resolve_addr, attempt to resolve |
| 31271 | one CONST_STRING, return true if successful. Similarly verify that |
| 31272 | SYMBOL_REFs refer to variables emitted in the current CU. */ |
| 31273 | |
| 31274 | static bool |
| 31275 | resolve_one_addr (rtx *addr) |
| 31276 | { |
| 31277 | rtx rtl = *addr; |
| 31278 | |
| 31279 | if (GET_CODE (rtl) == CONST_STRING) |
| 31280 | { |
| 31281 | size_t len = strlen (XSTR (rtl, 0)) + 1; |
| 31282 | tree t = build_string (len, XSTR (rtl, 0)); |
| 31283 | tree tlen = size_int (len - 1); |
| 31284 | TREE_TYPE (t) |
| 31285 | = build_array_type (char_type_node, build_index_type (tlen)); |
| 31286 | rtl = lookup_constant_def (t); |
| 31287 | if (!rtl || !MEM_P (rtl)) |
| 31288 | return false; |
| 31289 | rtl = XEXP (rtl, 0); |
| 31290 | if (GET_CODE (rtl) == SYMBOL_REF |
| 31291 | && SYMBOL_REF_DECL (rtl) |
| 31292 | && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl))) |
| 31293 | return false; |
| 31294 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
| 31295 | *addr = rtl; |
| 31296 | return true; |
| 31297 | } |
| 31298 | |
| 31299 | if (GET_CODE (rtl) == SYMBOL_REF |
| 31300 | && SYMBOL_REF_DECL (rtl)) |
| 31301 | { |
| 31302 | if (TREE_CONSTANT_POOL_ADDRESS_P (rtl)) |
| 31303 | { |
| 31304 | if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl)))) |
| 31305 | return false; |
| 31306 | } |
| 31307 | else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl))) |
| 31308 | return false; |
| 31309 | } |
| 31310 | |
| 31311 | if (GET_CODE (rtl) == CONST) |
| 31312 | { |
| 31313 | subrtx_ptr_iterator::array_type array; |
| 31314 | FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL) |
| 31315 | if (!resolve_one_addr (addr: *iter)) |
| 31316 | return false; |
| 31317 | } |
| 31318 | |
| 31319 | return true; |
| 31320 | } |
| 31321 | |
| 31322 | /* For STRING_CST, return SYMBOL_REF of its constant pool entry, |
| 31323 | if possible, and create DW_TAG_dwarf_procedure that can be referenced |
| 31324 | from DW_OP_implicit_pointer if the string hasn't been seen yet. */ |
| 31325 | |
| 31326 | static rtx |
| 31327 | string_cst_pool_decl (tree t) |
| 31328 | { |
| 31329 | rtx rtl = output_constant_def (t, 1); |
| 31330 | unsigned char *array; |
| 31331 | dw_loc_descr_ref l; |
| 31332 | tree decl; |
| 31333 | size_t len; |
| 31334 | dw_die_ref ref; |
| 31335 | |
| 31336 | if (!rtl || !MEM_P (rtl)) |
| 31337 | return NULL_RTX; |
| 31338 | rtl = XEXP (rtl, 0); |
| 31339 | if (GET_CODE (rtl) != SYMBOL_REF |
| 31340 | || SYMBOL_REF_DECL (rtl) == NULL_TREE) |
| 31341 | return NULL_RTX; |
| 31342 | |
| 31343 | decl = SYMBOL_REF_DECL (rtl); |
| 31344 | if (!lookup_decl_die (decl)) |
| 31345 | { |
| 31346 | len = TREE_STRING_LENGTH (t); |
| 31347 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
| 31348 | ref = new_die (tag_value: DW_TAG_dwarf_procedure, parent_die: comp_unit_die (), t: decl); |
| 31349 | array = ggc_vec_alloc<unsigned char> (c: len); |
| 31350 | memcpy (dest: array, TREE_STRING_POINTER (t), n: len); |
| 31351 | l = new_loc_descr (op: DW_OP_implicit_value, oprnd1: len, oprnd2: 0); |
| 31352 | l->dw_loc_oprnd2.val_class = dw_val_class_vec; |
| 31353 | l->dw_loc_oprnd2.v.val_vec.length = len; |
| 31354 | l->dw_loc_oprnd2.v.val_vec.elt_size = 1; |
| 31355 | l->dw_loc_oprnd2.v.val_vec.array = array; |
| 31356 | add_AT_loc (die: ref, attr_kind: DW_AT_location, loc: l); |
| 31357 | equate_decl_number_to_die (decl, decl_die: ref); |
| 31358 | } |
| 31359 | return rtl; |
| 31360 | } |
| 31361 | |
| 31362 | /* Helper function of resolve_addr_in_expr. LOC is |
| 31363 | a DW_OP_addr followed by DW_OP_stack_value, either at the start |
| 31364 | of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be |
| 31365 | resolved. Replace it (both DW_OP_addr and DW_OP_stack_value) |
| 31366 | with DW_OP_implicit_pointer if possible |
| 31367 | and return true, if unsuccessful, return false. */ |
| 31368 | |
| 31369 | static bool |
| 31370 | optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc) |
| 31371 | { |
| 31372 | rtx rtl = loc->dw_loc_oprnd1.v.val_addr; |
| 31373 | HOST_WIDE_INT offset = 0; |
| 31374 | dw_die_ref ref = NULL; |
| 31375 | tree decl; |
| 31376 | |
| 31377 | if (GET_CODE (rtl) == CONST |
| 31378 | && GET_CODE (XEXP (rtl, 0)) == PLUS |
| 31379 | && CONST_INT_P (XEXP (XEXP (rtl, 0), 1))) |
| 31380 | { |
| 31381 | offset = INTVAL (XEXP (XEXP (rtl, 0), 1)); |
| 31382 | rtl = XEXP (XEXP (rtl, 0), 0); |
| 31383 | } |
| 31384 | if (GET_CODE (rtl) == CONST_STRING) |
| 31385 | { |
| 31386 | size_t len = strlen (XSTR (rtl, 0)) + 1; |
| 31387 | tree t = build_string (len, XSTR (rtl, 0)); |
| 31388 | tree tlen = size_int (len - 1); |
| 31389 | |
| 31390 | TREE_TYPE (t) |
| 31391 | = build_array_type (char_type_node, build_index_type (tlen)); |
| 31392 | rtl = string_cst_pool_decl (t); |
| 31393 | if (!rtl) |
| 31394 | return false; |
| 31395 | } |
| 31396 | if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl)) |
| 31397 | { |
| 31398 | decl = SYMBOL_REF_DECL (rtl); |
| 31399 | if (VAR_P (decl) && !DECL_EXTERNAL (decl)) |
| 31400 | { |
| 31401 | ref = lookup_decl_die (decl); |
| 31402 | if (ref && (get_AT (die: ref, attr_kind: DW_AT_location) |
| 31403 | || get_AT (die: ref, attr_kind: DW_AT_const_value))) |
| 31404 | { |
| 31405 | loc->dw_loc_opc = dwarf_OP (op: DW_OP_implicit_pointer); |
| 31406 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 31407 | loc->dw_loc_oprnd1.val_entry = NULL; |
| 31408 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
| 31409 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 31410 | loc->dw_loc_next = loc->dw_loc_next->dw_loc_next; |
| 31411 | loc->dw_loc_oprnd2.v.val_int = offset; |
| 31412 | return true; |
| 31413 | } |
| 31414 | } |
| 31415 | } |
| 31416 | return false; |
| 31417 | } |
| 31418 | |
| 31419 | /* Helper function for resolve_addr, handle one location |
| 31420 | expression, return false if at least one CONST_STRING or SYMBOL_REF in |
| 31421 | the location list couldn't be resolved. */ |
| 31422 | |
| 31423 | static bool |
| 31424 | resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc) |
| 31425 | { |
| 31426 | dw_loc_descr_ref keep = NULL; |
| 31427 | for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next) |
| 31428 | switch (loc->dw_loc_opc) |
| 31429 | { |
| 31430 | case DW_OP_addr: |
| 31431 | if (!resolve_one_addr (addr: &loc->dw_loc_oprnd1.v.val_addr)) |
| 31432 | { |
| 31433 | if ((prev == NULL |
| 31434 | || prev->dw_loc_opc == DW_OP_piece |
| 31435 | || prev->dw_loc_opc == DW_OP_bit_piece) |
| 31436 | && loc->dw_loc_next |
| 31437 | && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value |
| 31438 | && (!dwarf_strict || dwarf_version >= 5) |
| 31439 | && optimize_one_addr_into_implicit_ptr (loc)) |
| 31440 | break; |
| 31441 | return false; |
| 31442 | } |
| 31443 | break; |
| 31444 | case DW_OP_GNU_addr_index: |
| 31445 | case DW_OP_addrx: |
| 31446 | case DW_OP_GNU_const_index: |
| 31447 | case DW_OP_constx: |
| 31448 | if ((loc->dw_loc_opc == DW_OP_GNU_addr_index |
| 31449 | || loc->dw_loc_opc == DW_OP_addrx) |
| 31450 | || ((loc->dw_loc_opc == DW_OP_GNU_const_index |
| 31451 | || loc->dw_loc_opc == DW_OP_constx) |
| 31452 | && loc->dw_loc_dtprel)) |
| 31453 | { |
| 31454 | rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl; |
| 31455 | if (!resolve_one_addr (addr: &rtl)) |
| 31456 | return false; |
| 31457 | remove_addr_table_entry (entry: loc->dw_loc_oprnd1.val_entry); |
| 31458 | loc->dw_loc_oprnd1.val_entry |
| 31459 | = add_addr_table_entry (addr: rtl, kind: loc->dw_loc_dtprel |
| 31460 | ? ate_kind_rtx_dtprel : ate_kind_rtx); |
| 31461 | } |
| 31462 | break; |
| 31463 | case DW_OP_const4u: |
| 31464 | case DW_OP_const8u: |
| 31465 | if (loc->dw_loc_dtprel |
| 31466 | && !resolve_one_addr (addr: &loc->dw_loc_oprnd1.v.val_addr)) |
| 31467 | return false; |
| 31468 | break; |
| 31469 | case DW_OP_plus_uconst: |
| 31470 | if (size_of_loc_descr (loc) |
| 31471 | > size_of_int_loc_descriptor (i: loc->dw_loc_oprnd1.v.val_unsigned) |
| 31472 | + 1 |
| 31473 | && loc->dw_loc_oprnd1.v.val_unsigned > 0) |
| 31474 | { |
| 31475 | dw_loc_descr_ref repl |
| 31476 | = int_loc_descriptor (poly_i: loc->dw_loc_oprnd1.v.val_unsigned); |
| 31477 | add_loc_descr (list_head: &repl, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
| 31478 | add_loc_descr (list_head: &repl, descr: loc->dw_loc_next); |
| 31479 | *loc = *repl; |
| 31480 | } |
| 31481 | break; |
| 31482 | case DW_OP_implicit_value: |
| 31483 | if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr |
| 31484 | && !resolve_one_addr (addr: &loc->dw_loc_oprnd2.v.val_addr)) |
| 31485 | return false; |
| 31486 | break; |
| 31487 | case DW_OP_implicit_pointer: |
| 31488 | case DW_OP_GNU_implicit_pointer: |
| 31489 | case DW_OP_GNU_parameter_ref: |
| 31490 | case DW_OP_GNU_variable_value: |
| 31491 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
| 31492 | { |
| 31493 | dw_die_ref ref |
| 31494 | = lookup_decl_die (decl: loc->dw_loc_oprnd1.v.val_decl_ref); |
| 31495 | if (ref == NULL) |
| 31496 | return false; |
| 31497 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 31498 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
| 31499 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 31500 | } |
| 31501 | if (loc->dw_loc_opc == DW_OP_GNU_variable_value) |
| 31502 | { |
| 31503 | if (prev == NULL |
| 31504 | && loc->dw_loc_next == NULL |
| 31505 | && AT_class (a) == dw_val_class_loc) |
| 31506 | switch (a->dw_attr) |
| 31507 | { |
| 31508 | /* Following attributes allow both exprloc and reference, |
| 31509 | so if the whole expression is DW_OP_GNU_variable_value |
| 31510 | alone we could transform it into reference. */ |
| 31511 | case DW_AT_byte_size: |
| 31512 | case DW_AT_bit_size: |
| 31513 | case DW_AT_lower_bound: |
| 31514 | case DW_AT_upper_bound: |
| 31515 | case DW_AT_bit_stride: |
| 31516 | case DW_AT_count: |
| 31517 | case DW_AT_allocated: |
| 31518 | case DW_AT_associated: |
| 31519 | case DW_AT_byte_stride: |
| 31520 | a->dw_attr_val.val_class = dw_val_class_die_ref; |
| 31521 | a->dw_attr_val.val_entry = NULL; |
| 31522 | a->dw_attr_val.v.val_die_ref.die |
| 31523 | = loc->dw_loc_oprnd1.v.val_die_ref.die; |
| 31524 | a->dw_attr_val.v.val_die_ref.external = 0; |
| 31525 | return true; |
| 31526 | default: |
| 31527 | break; |
| 31528 | } |
| 31529 | if (dwarf_strict) |
| 31530 | return false; |
| 31531 | } |
| 31532 | break; |
| 31533 | case DW_OP_const_type: |
| 31534 | case DW_OP_regval_type: |
| 31535 | case DW_OP_deref_type: |
| 31536 | case DW_OP_convert: |
| 31537 | case DW_OP_reinterpret: |
| 31538 | case DW_OP_GNU_const_type: |
| 31539 | case DW_OP_GNU_regval_type: |
| 31540 | case DW_OP_GNU_deref_type: |
| 31541 | case DW_OP_GNU_convert: |
| 31542 | case DW_OP_GNU_reinterpret: |
| 31543 | while (loc->dw_loc_next |
| 31544 | && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert |
| 31545 | || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert)) |
| 31546 | { |
| 31547 | dw_die_ref base1, base2; |
| 31548 | unsigned enc1, enc2, size1, size2; |
| 31549 | if (loc->dw_loc_opc == DW_OP_regval_type |
| 31550 | || loc->dw_loc_opc == DW_OP_deref_type |
| 31551 | || loc->dw_loc_opc == DW_OP_GNU_regval_type |
| 31552 | || loc->dw_loc_opc == DW_OP_GNU_deref_type) |
| 31553 | base1 = loc->dw_loc_oprnd2.v.val_die_ref.die; |
| 31554 | else if (loc->dw_loc_oprnd1.val_class |
| 31555 | == dw_val_class_unsigned_const) |
| 31556 | break; |
| 31557 | else |
| 31558 | base1 = loc->dw_loc_oprnd1.v.val_die_ref.die; |
| 31559 | if (loc->dw_loc_next->dw_loc_oprnd1.val_class |
| 31560 | == dw_val_class_unsigned_const) |
| 31561 | break; |
| 31562 | base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die; |
| 31563 | gcc_assert (base1->die_tag == DW_TAG_base_type |
| 31564 | && base2->die_tag == DW_TAG_base_type); |
| 31565 | enc1 = get_AT_unsigned (die: base1, attr_kind: DW_AT_encoding); |
| 31566 | enc2 = get_AT_unsigned (die: base2, attr_kind: DW_AT_encoding); |
| 31567 | size1 = get_AT_unsigned (die: base1, attr_kind: DW_AT_byte_size); |
| 31568 | size2 = get_AT_unsigned (die: base2, attr_kind: DW_AT_byte_size); |
| 31569 | if (size1 == size2 |
| 31570 | && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed) |
| 31571 | && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed) |
| 31572 | && loc != keep) |
| 31573 | || enc1 == enc2)) |
| 31574 | { |
| 31575 | /* Optimize away next DW_OP_convert after |
| 31576 | adjusting LOC's base type die reference. */ |
| 31577 | if (loc->dw_loc_opc == DW_OP_regval_type |
| 31578 | || loc->dw_loc_opc == DW_OP_deref_type |
| 31579 | || loc->dw_loc_opc == DW_OP_GNU_regval_type |
| 31580 | || loc->dw_loc_opc == DW_OP_GNU_deref_type) |
| 31581 | loc->dw_loc_oprnd2.v.val_die_ref.die = base2; |
| 31582 | else |
| 31583 | loc->dw_loc_oprnd1.v.val_die_ref.die = base2; |
| 31584 | loc->dw_loc_next = loc->dw_loc_next->dw_loc_next; |
| 31585 | continue; |
| 31586 | } |
| 31587 | /* Don't change integer DW_OP_convert after e.g. floating |
| 31588 | point typed stack entry. */ |
| 31589 | else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed) |
| 31590 | keep = loc->dw_loc_next; |
| 31591 | break; |
| 31592 | } |
| 31593 | break; |
| 31594 | default: |
| 31595 | break; |
| 31596 | } |
| 31597 | return true; |
| 31598 | } |
| 31599 | |
| 31600 | /* Helper function of resolve_addr. DIE had DW_AT_location of |
| 31601 | DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand |
| 31602 | and DW_OP_addr couldn't be resolved. resolve_addr has already |
| 31603 | removed the DW_AT_location attribute. This function attempts to |
| 31604 | add a new DW_AT_location attribute with DW_OP_implicit_pointer |
| 31605 | to it or DW_AT_const_value attribute, if possible. */ |
| 31606 | |
| 31607 | static void |
| 31608 | optimize_location_into_implicit_ptr (dw_die_ref die, tree decl) |
| 31609 | { |
| 31610 | if (!VAR_P (decl) |
| 31611 | || lookup_decl_die (decl) != die |
| 31612 | || DECL_EXTERNAL (decl) |
| 31613 | || !TREE_STATIC (decl) |
| 31614 | || DECL_INITIAL (decl) == NULL_TREE |
| 31615 | || DECL_P (DECL_INITIAL (decl)) |
| 31616 | || get_AT (die, attr_kind: DW_AT_const_value)) |
| 31617 | return; |
| 31618 | |
| 31619 | tree init = DECL_INITIAL (decl); |
| 31620 | HOST_WIDE_INT offset = 0; |
| 31621 | /* For variables that have been optimized away and thus |
| 31622 | don't have a memory location, see if we can emit |
| 31623 | DW_AT_const_value instead. */ |
| 31624 | if (tree_add_const_value_attribute (die, t: init)) |
| 31625 | return; |
| 31626 | if (dwarf_strict && dwarf_version < 5) |
| 31627 | return; |
| 31628 | /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR, |
| 31629 | and ADDR_EXPR refers to a decl that has DW_AT_location or |
| 31630 | DW_AT_const_value (but isn't addressable, otherwise |
| 31631 | resolving the original DW_OP_addr wouldn't fail), see if |
| 31632 | we can add DW_OP_implicit_pointer. */ |
| 31633 | STRIP_NOPS (init); |
| 31634 | if (TREE_CODE (init) == POINTER_PLUS_EXPR |
| 31635 | && tree_fits_shwi_p (TREE_OPERAND (init, 1))) |
| 31636 | { |
| 31637 | offset = tree_to_shwi (TREE_OPERAND (init, 1)); |
| 31638 | init = TREE_OPERAND (init, 0); |
| 31639 | STRIP_NOPS (init); |
| 31640 | } |
| 31641 | if (TREE_CODE (init) != ADDR_EXPR) |
| 31642 | return; |
| 31643 | if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST |
| 31644 | && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0))) |
| 31645 | || (VAR_P (TREE_OPERAND (init, 0)) |
| 31646 | && !DECL_EXTERNAL (TREE_OPERAND (init, 0)) |
| 31647 | && TREE_OPERAND (init, 0) != decl)) |
| 31648 | { |
| 31649 | dw_die_ref ref; |
| 31650 | dw_loc_descr_ref l; |
| 31651 | |
| 31652 | if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST) |
| 31653 | { |
| 31654 | rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0)); |
| 31655 | if (!rtl) |
| 31656 | return; |
| 31657 | decl = SYMBOL_REF_DECL (rtl); |
| 31658 | } |
| 31659 | else |
| 31660 | decl = TREE_OPERAND (init, 0); |
| 31661 | ref = lookup_decl_die (decl); |
| 31662 | if (ref == NULL |
| 31663 | || (!get_AT (die: ref, attr_kind: DW_AT_location) |
| 31664 | && !get_AT (die: ref, attr_kind: DW_AT_const_value))) |
| 31665 | return; |
| 31666 | l = new_loc_descr (op: dwarf_OP (op: DW_OP_implicit_pointer), oprnd1: 0, oprnd2: offset); |
| 31667 | l->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 31668 | l->dw_loc_oprnd1.v.val_die_ref.die = ref; |
| 31669 | l->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 31670 | add_AT_loc (die, attr_kind: DW_AT_location, loc: l); |
| 31671 | } |
| 31672 | } |
| 31673 | |
| 31674 | /* Return NULL if l is a DWARF expression, or first op that is not |
| 31675 | valid DWARF expression. */ |
| 31676 | |
| 31677 | static dw_loc_descr_ref |
| 31678 | non_dwarf_expression (dw_loc_descr_ref l) |
| 31679 | { |
| 31680 | while (l) |
| 31681 | { |
| 31682 | if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31) |
| 31683 | return l; |
| 31684 | switch (l->dw_loc_opc) |
| 31685 | { |
| 31686 | case DW_OP_regx: |
| 31687 | case DW_OP_implicit_value: |
| 31688 | case DW_OP_stack_value: |
| 31689 | case DW_OP_implicit_pointer: |
| 31690 | case DW_OP_GNU_implicit_pointer: |
| 31691 | case DW_OP_GNU_parameter_ref: |
| 31692 | case DW_OP_piece: |
| 31693 | case DW_OP_bit_piece: |
| 31694 | return l; |
| 31695 | default: |
| 31696 | break; |
| 31697 | } |
| 31698 | l = l->dw_loc_next; |
| 31699 | } |
| 31700 | return NULL; |
| 31701 | } |
| 31702 | |
| 31703 | /* Return adjusted copy of EXPR: |
| 31704 | If it is empty DWARF expression, return it. |
| 31705 | If it is valid non-empty DWARF expression, |
| 31706 | return copy of EXPR with DW_OP_deref appended to it. |
| 31707 | If it is DWARF expression followed by DW_OP_reg{N,x}, return |
| 31708 | copy of the DWARF expression with DW_OP_breg{N,x} <0> appended. |
| 31709 | If it is DWARF expression followed by DW_OP_stack_value, return |
| 31710 | copy of the DWARF expression without anything appended. |
| 31711 | Otherwise, return NULL. */ |
| 31712 | |
| 31713 | static dw_loc_descr_ref |
| 31714 | copy_deref_exprloc (dw_loc_descr_ref expr) |
| 31715 | { |
| 31716 | dw_loc_descr_ref tail = NULL; |
| 31717 | |
| 31718 | if (expr == NULL) |
| 31719 | return NULL; |
| 31720 | |
| 31721 | dw_loc_descr_ref l = non_dwarf_expression (l: expr); |
| 31722 | if (l && l->dw_loc_next) |
| 31723 | return NULL; |
| 31724 | |
| 31725 | if (l) |
| 31726 | { |
| 31727 | if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31) |
| 31728 | tail = new_loc_descr (op: (enum dwarf_location_atom) |
| 31729 | (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)), |
| 31730 | oprnd1: 0, oprnd2: 0); |
| 31731 | else |
| 31732 | switch (l->dw_loc_opc) |
| 31733 | { |
| 31734 | case DW_OP_regx: |
| 31735 | tail = new_loc_descr (op: DW_OP_bregx, |
| 31736 | oprnd1: l->dw_loc_oprnd1.v.val_unsigned, oprnd2: 0); |
| 31737 | break; |
| 31738 | case DW_OP_stack_value: |
| 31739 | break; |
| 31740 | default: |
| 31741 | return NULL; |
| 31742 | } |
| 31743 | } |
| 31744 | else |
| 31745 | tail = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
| 31746 | |
| 31747 | dw_loc_descr_ref ret = NULL, *p = &ret; |
| 31748 | while (expr != l) |
| 31749 | { |
| 31750 | *p = new_loc_descr (op: expr->dw_loc_opc, oprnd1: 0, oprnd2: 0); |
| 31751 | (*p)->dw_loc_oprnd1.val_class = expr->dw_loc_oprnd1.val_class; |
| 31752 | (*p)->dw_loc_oprnd1.val_entry = expr->dw_loc_oprnd1.val_entry; |
| 31753 | (*p)->dw_loc_oprnd1.v = expr->dw_loc_oprnd1.v; |
| 31754 | (*p)->dw_loc_oprnd2.val_class = expr->dw_loc_oprnd2.val_class; |
| 31755 | (*p)->dw_loc_oprnd2.val_entry = expr->dw_loc_oprnd2.val_entry; |
| 31756 | (*p)->dw_loc_oprnd2.v = expr->dw_loc_oprnd2.v; |
| 31757 | p = &(*p)->dw_loc_next; |
| 31758 | expr = expr->dw_loc_next; |
| 31759 | } |
| 31760 | *p = tail; |
| 31761 | return ret; |
| 31762 | } |
| 31763 | |
| 31764 | /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value |
| 31765 | reference to a variable or argument, adjust it if needed and return: |
| 31766 | -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size |
| 31767 | attribute if present should be removed |
| 31768 | 0 keep the attribute perhaps with minor modifications, no need to rescan |
| 31769 | 1 if the attribute has been successfully adjusted. */ |
| 31770 | |
| 31771 | static int |
| 31772 | optimize_string_length (dw_attr_node *a) |
| 31773 | { |
| 31774 | dw_loc_descr_ref l = AT_loc (a), lv; |
| 31775 | dw_die_ref die; |
| 31776 | if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
| 31777 | { |
| 31778 | tree decl = l->dw_loc_oprnd1.v.val_decl_ref; |
| 31779 | die = lookup_decl_die (decl); |
| 31780 | if (die) |
| 31781 | { |
| 31782 | l->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 31783 | l->dw_loc_oprnd1.v.val_die_ref.die = die; |
| 31784 | l->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 31785 | } |
| 31786 | else |
| 31787 | return -1; |
| 31788 | } |
| 31789 | else |
| 31790 | die = l->dw_loc_oprnd1.v.val_die_ref.die; |
| 31791 | |
| 31792 | /* DWARF5 allows reference class, so we can then reference the DIE. |
| 31793 | Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */ |
| 31794 | if (l->dw_loc_next != NULL && dwarf_version >= 5) |
| 31795 | { |
| 31796 | a->dw_attr_val.val_class = dw_val_class_die_ref; |
| 31797 | a->dw_attr_val.val_entry = NULL; |
| 31798 | a->dw_attr_val.v.val_die_ref.die = die; |
| 31799 | a->dw_attr_val.v.val_die_ref.external = 0; |
| 31800 | return 0; |
| 31801 | } |
| 31802 | |
| 31803 | dw_attr_node *av = get_AT (die, attr_kind: DW_AT_location); |
| 31804 | dw_loc_list_ref d; |
| 31805 | bool non_dwarf_expr = false; |
| 31806 | |
| 31807 | if (av == NULL) |
| 31808 | return dwarf_strict ? -1 : 0; |
| 31809 | switch (AT_class (a: av)) |
| 31810 | { |
| 31811 | case dw_val_class_loc_list: |
| 31812 | for (d = AT_loc_list (a: av); d != NULL; d = d->dw_loc_next) |
| 31813 | if (d->expr && non_dwarf_expression (l: d->expr)) |
| 31814 | non_dwarf_expr = true; |
| 31815 | break; |
| 31816 | case dw_val_class_view_list: |
| 31817 | gcc_unreachable (); |
| 31818 | case dw_val_class_loc: |
| 31819 | lv = AT_loc (a: av); |
| 31820 | if (lv == NULL) |
| 31821 | return dwarf_strict ? -1 : 0; |
| 31822 | if (non_dwarf_expression (l: lv)) |
| 31823 | non_dwarf_expr = true; |
| 31824 | break; |
| 31825 | default: |
| 31826 | return dwarf_strict ? -1 : 0; |
| 31827 | } |
| 31828 | |
| 31829 | /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value |
| 31830 | into DW_OP_call4 or DW_OP_GNU_variable_value into |
| 31831 | DW_OP_call4 DW_OP_deref, do so. */ |
| 31832 | if (!non_dwarf_expr |
| 31833 | && (l->dw_loc_next != NULL || AT_class (a: av) == dw_val_class_loc)) |
| 31834 | { |
| 31835 | l->dw_loc_opc = DW_OP_call4; |
| 31836 | if (l->dw_loc_next) |
| 31837 | l->dw_loc_next = NULL; |
| 31838 | else |
| 31839 | l->dw_loc_next = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
| 31840 | return 0; |
| 31841 | } |
| 31842 | |
| 31843 | /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just |
| 31844 | copy over the DW_AT_location attribute from die to a. */ |
| 31845 | if (l->dw_loc_next != NULL) |
| 31846 | { |
| 31847 | a->dw_attr_val.val_class = av->dw_attr_val.val_class; |
| 31848 | a->dw_attr_val.val_entry = av->dw_attr_val.val_entry; |
| 31849 | a->dw_attr_val.v = av->dw_attr_val.v; |
| 31850 | return 1; |
| 31851 | } |
| 31852 | |
| 31853 | dw_loc_list_ref list, *p; |
| 31854 | switch (AT_class (a: av)) |
| 31855 | { |
| 31856 | case dw_val_class_loc_list: |
| 31857 | p = &list; |
| 31858 | list = NULL; |
| 31859 | for (d = AT_loc_list (a: av); d != NULL; d = d->dw_loc_next) |
| 31860 | { |
| 31861 | lv = copy_deref_exprloc (expr: d->expr); |
| 31862 | if (lv) |
| 31863 | { |
| 31864 | *p = new_loc_list (expr: lv, begin: d->begin, vbegin: d->vbegin, end: d->end, vend: d->vend, section: d->section); |
| 31865 | p = &(*p)->dw_loc_next; |
| 31866 | } |
| 31867 | else if (!dwarf_strict && d->expr) |
| 31868 | return 0; |
| 31869 | } |
| 31870 | if (list == NULL) |
| 31871 | return dwarf_strict ? -1 : 0; |
| 31872 | a->dw_attr_val.val_class = dw_val_class_loc_list; |
| 31873 | gen_llsym (list); |
| 31874 | *AT_loc_list_ptr (a) = list; |
| 31875 | return 1; |
| 31876 | case dw_val_class_loc: |
| 31877 | lv = copy_deref_exprloc (expr: AT_loc (a: av)); |
| 31878 | if (lv == NULL) |
| 31879 | return dwarf_strict ? -1 : 0; |
| 31880 | a->dw_attr_val.v.val_loc = lv; |
| 31881 | return 1; |
| 31882 | default: |
| 31883 | gcc_unreachable (); |
| 31884 | } |
| 31885 | } |
| 31886 | |
| 31887 | /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to |
| 31888 | an address in .rodata section if the string literal is emitted there, |
| 31889 | or remove the containing location list or replace DW_AT_const_value |
| 31890 | with DW_AT_location and empty location expression, if it isn't found |
| 31891 | in .rodata. Similarly for SYMBOL_REFs, keep only those that refer |
| 31892 | to something that has been emitted in the current CU. */ |
| 31893 | |
| 31894 | static void |
| 31895 | resolve_addr (dw_die_ref die) |
| 31896 | { |
| 31897 | dw_die_ref c; |
| 31898 | dw_attr_node *a; |
| 31899 | dw_loc_list_ref *curr, *start, loc; |
| 31900 | unsigned ix; |
| 31901 | bool remove_AT_byte_size = false; |
| 31902 | |
| 31903 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 31904 | switch (AT_class (a)) |
| 31905 | { |
| 31906 | case dw_val_class_loc_list: |
| 31907 | start = curr = AT_loc_list_ptr (a); |
| 31908 | loc = *curr; |
| 31909 | gcc_assert (loc); |
| 31910 | /* The same list can be referenced more than once. See if we have |
| 31911 | already recorded the result from a previous pass. */ |
| 31912 | if (loc->replaced) |
| 31913 | *curr = loc->dw_loc_next; |
| 31914 | else if (!loc->resolved_addr) |
| 31915 | { |
| 31916 | /* As things stand, we do not expect or allow one die to |
| 31917 | reference a suffix of another die's location list chain. |
| 31918 | References must be identical or completely separate. |
| 31919 | There is therefore no need to cache the result of this |
| 31920 | pass on any list other than the first; doing so |
| 31921 | would lead to unnecessary writes. */ |
| 31922 | while (*curr) |
| 31923 | { |
| 31924 | gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr); |
| 31925 | if (!resolve_addr_in_expr (a, loc: (*curr)->expr)) |
| 31926 | { |
| 31927 | dw_loc_list_ref next = (*curr)->dw_loc_next; |
| 31928 | dw_loc_descr_ref l = (*curr)->expr; |
| 31929 | |
| 31930 | if (next && (*curr)->ll_symbol) |
| 31931 | { |
| 31932 | gcc_assert (!next->ll_symbol); |
| 31933 | next->ll_symbol = (*curr)->ll_symbol; |
| 31934 | next->vl_symbol = (*curr)->vl_symbol; |
| 31935 | } |
| 31936 | if (dwarf_split_debug_info) |
| 31937 | remove_loc_list_addr_table_entries (descr: l); |
| 31938 | *curr = next; |
| 31939 | } |
| 31940 | else |
| 31941 | { |
| 31942 | mark_base_types (loc: (*curr)->expr); |
| 31943 | curr = &(*curr)->dw_loc_next; |
| 31944 | } |
| 31945 | } |
| 31946 | if (loc == *start) |
| 31947 | loc->resolved_addr = 1; |
| 31948 | else |
| 31949 | { |
| 31950 | loc->replaced = 1; |
| 31951 | loc->dw_loc_next = *start; |
| 31952 | } |
| 31953 | } |
| 31954 | if (!*start) |
| 31955 | { |
| 31956 | remove_AT (die, attr_kind: a->dw_attr); |
| 31957 | ix--; |
| 31958 | } |
| 31959 | break; |
| 31960 | case dw_val_class_view_list: |
| 31961 | { |
| 31962 | gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews); |
| 31963 | gcc_checking_assert (dwarf2out_locviews_in_attribute ()); |
| 31964 | dw_val_node *llnode |
| 31965 | = view_list_to_loc_list_val_node (val: &a->dw_attr_val); |
| 31966 | /* If we no longer have a loclist, or it no longer needs |
| 31967 | views, drop this attribute. */ |
| 31968 | if (!llnode || !llnode->v.val_loc_list->vl_symbol) |
| 31969 | { |
| 31970 | remove_AT (die, attr_kind: a->dw_attr); |
| 31971 | ix--; |
| 31972 | } |
| 31973 | break; |
| 31974 | } |
| 31975 | case dw_val_class_loc: |
| 31976 | { |
| 31977 | dw_loc_descr_ref l = AT_loc (a); |
| 31978 | /* DW_OP_GNU_variable_value DW_OP_stack_value or |
| 31979 | DW_OP_GNU_variable_value in DW_AT_string_length can be converted |
| 31980 | into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard |
| 31981 | DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5 |
| 31982 | DW_OP_GNU_variable_value DW_OP_stack_value can be replaced |
| 31983 | with DW_FORM_ref referencing the same DIE as |
| 31984 | DW_OP_GNU_variable_value used to reference. */ |
| 31985 | if (a->dw_attr == DW_AT_string_length |
| 31986 | && l |
| 31987 | && l->dw_loc_opc == DW_OP_GNU_variable_value |
| 31988 | && (l->dw_loc_next == NULL |
| 31989 | || (l->dw_loc_next->dw_loc_next == NULL |
| 31990 | && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value))) |
| 31991 | { |
| 31992 | switch (optimize_string_length (a)) |
| 31993 | { |
| 31994 | case -1: |
| 31995 | remove_AT (die, attr_kind: a->dw_attr); |
| 31996 | ix--; |
| 31997 | /* If we drop DW_AT_string_length, we need to drop also |
| 31998 | DW_AT_{string_length_,}byte_size. */ |
| 31999 | remove_AT_byte_size = true; |
| 32000 | continue; |
| 32001 | default: |
| 32002 | break; |
| 32003 | case 1: |
| 32004 | /* Even if we keep the optimized DW_AT_string_length, |
| 32005 | it might have changed AT_class, so process it again. */ |
| 32006 | ix--; |
| 32007 | continue; |
| 32008 | } |
| 32009 | } |
| 32010 | /* For -gdwarf-2 don't attempt to optimize |
| 32011 | DW_AT_data_member_location containing |
| 32012 | DW_OP_plus_uconst - older consumers might |
| 32013 | rely on it being that op instead of a more complex, |
| 32014 | but shorter, location description. */ |
| 32015 | if ((dwarf_version > 2 |
| 32016 | || a->dw_attr != DW_AT_data_member_location |
| 32017 | || l == NULL |
| 32018 | || l->dw_loc_opc != DW_OP_plus_uconst |
| 32019 | || l->dw_loc_next != NULL) |
| 32020 | && !resolve_addr_in_expr (a, loc: l)) |
| 32021 | { |
| 32022 | if (dwarf_split_debug_info) |
| 32023 | remove_loc_list_addr_table_entries (descr: l); |
| 32024 | if (l != NULL |
| 32025 | && l->dw_loc_next == NULL |
| 32026 | && l->dw_loc_opc == DW_OP_addr |
| 32027 | && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF |
| 32028 | && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr) |
| 32029 | && a->dw_attr == DW_AT_location) |
| 32030 | { |
| 32031 | tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr); |
| 32032 | remove_AT (die, attr_kind: a->dw_attr); |
| 32033 | ix--; |
| 32034 | optimize_location_into_implicit_ptr (die, decl); |
| 32035 | break; |
| 32036 | } |
| 32037 | if (a->dw_attr == DW_AT_string_length) |
| 32038 | /* If we drop DW_AT_string_length, we need to drop also |
| 32039 | DW_AT_{string_length_,}byte_size. */ |
| 32040 | remove_AT_byte_size = true; |
| 32041 | remove_AT (die, attr_kind: a->dw_attr); |
| 32042 | ix--; |
| 32043 | } |
| 32044 | else |
| 32045 | mark_base_types (loc: l); |
| 32046 | } |
| 32047 | break; |
| 32048 | case dw_val_class_addr: |
| 32049 | if (a->dw_attr == DW_AT_const_value |
| 32050 | && !resolve_one_addr (addr: &a->dw_attr_val.v.val_addr)) |
| 32051 | { |
| 32052 | if (AT_index (a) != NOT_INDEXED) |
| 32053 | remove_addr_table_entry (entry: a->dw_attr_val.val_entry); |
| 32054 | remove_AT (die, attr_kind: a->dw_attr); |
| 32055 | ix--; |
| 32056 | } |
| 32057 | if ((die->die_tag == DW_TAG_call_site |
| 32058 | && a->dw_attr == DW_AT_call_origin) |
| 32059 | || (die->die_tag == DW_TAG_GNU_call_site |
| 32060 | && a->dw_attr == DW_AT_abstract_origin)) |
| 32061 | { |
| 32062 | tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr); |
| 32063 | dw_die_ref tdie = lookup_decl_die (decl: tdecl); |
| 32064 | dw_die_ref cdie; |
| 32065 | if (tdie == NULL |
| 32066 | && DECL_EXTERNAL (tdecl) |
| 32067 | && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE |
| 32068 | && (cdie = lookup_context_die (DECL_CONTEXT (tdecl)))) |
| 32069 | { |
| 32070 | dw_die_ref pdie = cdie; |
| 32071 | /* Make sure we don't add these DIEs into type units. |
| 32072 | We could emit skeleton DIEs for context (namespaces, |
| 32073 | outer structs/classes) and a skeleton DIE for the |
| 32074 | innermost context with DW_AT_signature pointing to the |
| 32075 | type unit. See PR78835. */ |
| 32076 | while (pdie && pdie->die_tag != DW_TAG_type_unit) |
| 32077 | pdie = pdie->die_parent; |
| 32078 | if (pdie == NULL) |
| 32079 | { |
| 32080 | /* Creating a full DIE for tdecl is overly expensive and |
| 32081 | at this point even wrong when in the LTO phase |
| 32082 | as it can end up generating new type DIEs we didn't |
| 32083 | output and thus optimize_external_refs will crash. */ |
| 32084 | tdie = new_die (tag_value: DW_TAG_subprogram, parent_die: cdie, NULL_TREE); |
| 32085 | add_AT_flag (die: tdie, attr_kind: DW_AT_external, flag: 1); |
| 32086 | add_AT_flag (die: tdie, attr_kind: DW_AT_declaration, flag: 1); |
| 32087 | add_linkage_attr (die: tdie, decl: tdecl); |
| 32088 | add_name_and_src_coords_attributes (die: tdie, decl: tdecl, no_linkage_name: true); |
| 32089 | equate_decl_number_to_die (decl: tdecl, decl_die: tdie); |
| 32090 | } |
| 32091 | } |
| 32092 | if (tdie) |
| 32093 | { |
| 32094 | a->dw_attr_val.val_class = dw_val_class_die_ref; |
| 32095 | a->dw_attr_val.v.val_die_ref.die = tdie; |
| 32096 | a->dw_attr_val.v.val_die_ref.external = 0; |
| 32097 | } |
| 32098 | else |
| 32099 | { |
| 32100 | if (AT_index (a) != NOT_INDEXED) |
| 32101 | remove_addr_table_entry (entry: a->dw_attr_val.val_entry); |
| 32102 | remove_AT (die, attr_kind: a->dw_attr); |
| 32103 | ix--; |
| 32104 | } |
| 32105 | } |
| 32106 | break; |
| 32107 | default: |
| 32108 | break; |
| 32109 | } |
| 32110 | |
| 32111 | if (remove_AT_byte_size) |
| 32112 | remove_AT (die, dwarf_version >= 5 |
| 32113 | ? DW_AT_string_length_byte_size |
| 32114 | : DW_AT_byte_size); |
| 32115 | |
| 32116 | FOR_EACH_CHILD (die, c, resolve_addr (c)); |
| 32117 | } |
| 32118 | |
| 32119 | /* Helper routines for optimize_location_lists. |
| 32120 | This pass tries to share identical local lists in .debug_loc |
| 32121 | section. */ |
| 32122 | |
| 32123 | /* Iteratively hash operands of LOC opcode into HSTATE. */ |
| 32124 | |
| 32125 | static void |
| 32126 | hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate) |
| 32127 | { |
| 32128 | dw_val_ref val1 = &loc->dw_loc_oprnd1; |
| 32129 | dw_val_ref val2 = &loc->dw_loc_oprnd2; |
| 32130 | |
| 32131 | switch (loc->dw_loc_opc) |
| 32132 | { |
| 32133 | case DW_OP_const4u: |
| 32134 | case DW_OP_const8u: |
| 32135 | if (loc->dw_loc_dtprel) |
| 32136 | goto hash_addr; |
| 32137 | /* FALLTHRU */ |
| 32138 | case DW_OP_const1u: |
| 32139 | case DW_OP_const1s: |
| 32140 | case DW_OP_const2u: |
| 32141 | case DW_OP_const2s: |
| 32142 | case DW_OP_const4s: |
| 32143 | case DW_OP_const8s: |
| 32144 | case DW_OP_constu: |
| 32145 | case DW_OP_consts: |
| 32146 | case DW_OP_pick: |
| 32147 | case DW_OP_plus_uconst: |
| 32148 | case DW_OP_breg0: |
| 32149 | case DW_OP_breg1: |
| 32150 | case DW_OP_breg2: |
| 32151 | case DW_OP_breg3: |
| 32152 | case DW_OP_breg4: |
| 32153 | case DW_OP_breg5: |
| 32154 | case DW_OP_breg6: |
| 32155 | case DW_OP_breg7: |
| 32156 | case DW_OP_breg8: |
| 32157 | case DW_OP_breg9: |
| 32158 | case DW_OP_breg10: |
| 32159 | case DW_OP_breg11: |
| 32160 | case DW_OP_breg12: |
| 32161 | case DW_OP_breg13: |
| 32162 | case DW_OP_breg14: |
| 32163 | case DW_OP_breg15: |
| 32164 | case DW_OP_breg16: |
| 32165 | case DW_OP_breg17: |
| 32166 | case DW_OP_breg18: |
| 32167 | case DW_OP_breg19: |
| 32168 | case DW_OP_breg20: |
| 32169 | case DW_OP_breg21: |
| 32170 | case DW_OP_breg22: |
| 32171 | case DW_OP_breg23: |
| 32172 | case DW_OP_breg24: |
| 32173 | case DW_OP_breg25: |
| 32174 | case DW_OP_breg26: |
| 32175 | case DW_OP_breg27: |
| 32176 | case DW_OP_breg28: |
| 32177 | case DW_OP_breg29: |
| 32178 | case DW_OP_breg30: |
| 32179 | case DW_OP_breg31: |
| 32180 | case DW_OP_regx: |
| 32181 | case DW_OP_fbreg: |
| 32182 | case DW_OP_piece: |
| 32183 | case DW_OP_deref_size: |
| 32184 | case DW_OP_xderef_size: |
| 32185 | hstate.add_object (obj&: val1->v.val_int); |
| 32186 | break; |
| 32187 | case DW_OP_skip: |
| 32188 | case DW_OP_bra: |
| 32189 | { |
| 32190 | int offset; |
| 32191 | |
| 32192 | gcc_assert (val1->val_class == dw_val_class_loc); |
| 32193 | offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3); |
| 32194 | hstate.add_object (obj&: offset); |
| 32195 | } |
| 32196 | break; |
| 32197 | case DW_OP_implicit_value: |
| 32198 | hstate.add_object (obj&: val1->v.val_unsigned); |
| 32199 | switch (val2->val_class) |
| 32200 | { |
| 32201 | case dw_val_class_const: |
| 32202 | hstate.add_object (obj&: val2->v.val_int); |
| 32203 | break; |
| 32204 | case dw_val_class_vec: |
| 32205 | { |
| 32206 | unsigned int elt_size = val2->v.val_vec.elt_size; |
| 32207 | unsigned int len = val2->v.val_vec.length; |
| 32208 | |
| 32209 | hstate.add_int (v: elt_size); |
| 32210 | hstate.add_int (v: len); |
| 32211 | hstate.add (data: val2->v.val_vec.array, len: len * elt_size); |
| 32212 | } |
| 32213 | break; |
| 32214 | case dw_val_class_const_double: |
| 32215 | hstate.add_object (obj&: val2->v.val_double.low); |
| 32216 | hstate.add_object (obj&: val2->v.val_double.high); |
| 32217 | break; |
| 32218 | case dw_val_class_wide_int: |
| 32219 | hstate.add (data: val2->v.val_wide->get_val (), |
| 32220 | len: get_full_len (op: *val2->v.val_wide) |
| 32221 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
| 32222 | break; |
| 32223 | case dw_val_class_addr: |
| 32224 | inchash::add_rtx (val2->v.val_addr, hstate); |
| 32225 | break; |
| 32226 | default: |
| 32227 | gcc_unreachable (); |
| 32228 | } |
| 32229 | break; |
| 32230 | case DW_OP_bregx: |
| 32231 | case DW_OP_bit_piece: |
| 32232 | hstate.add_object (obj&: val1->v.val_int); |
| 32233 | hstate.add_object (obj&: val2->v.val_int); |
| 32234 | break; |
| 32235 | case DW_OP_addr: |
| 32236 | hash_addr: |
| 32237 | if (loc->dw_loc_dtprel) |
| 32238 | { |
| 32239 | unsigned char dtprel = 0xd1; |
| 32240 | hstate.add_object (obj&: dtprel); |
| 32241 | } |
| 32242 | inchash::add_rtx (val1->v.val_addr, hstate); |
| 32243 | break; |
| 32244 | case DW_OP_GNU_addr_index: |
| 32245 | case DW_OP_addrx: |
| 32246 | case DW_OP_GNU_const_index: |
| 32247 | case DW_OP_constx: |
| 32248 | { |
| 32249 | if (loc->dw_loc_dtprel) |
| 32250 | { |
| 32251 | unsigned char dtprel = 0xd1; |
| 32252 | hstate.add_object (obj&: dtprel); |
| 32253 | } |
| 32254 | inchash::add_rtx (val1->val_entry->addr.rtl, hstate); |
| 32255 | } |
| 32256 | break; |
| 32257 | case DW_OP_implicit_pointer: |
| 32258 | case DW_OP_GNU_implicit_pointer: |
| 32259 | hstate.add_int (v: val2->v.val_int); |
| 32260 | break; |
| 32261 | case DW_OP_entry_value: |
| 32262 | case DW_OP_GNU_entry_value: |
| 32263 | hstate.add_object (obj&: val1->v.val_loc); |
| 32264 | break; |
| 32265 | case DW_OP_regval_type: |
| 32266 | case DW_OP_deref_type: |
| 32267 | case DW_OP_GNU_regval_type: |
| 32268 | case DW_OP_GNU_deref_type: |
| 32269 | { |
| 32270 | unsigned int byte_size |
| 32271 | = get_AT_unsigned (die: val2->v.val_die_ref.die, attr_kind: DW_AT_byte_size); |
| 32272 | unsigned int encoding |
| 32273 | = get_AT_unsigned (die: val2->v.val_die_ref.die, attr_kind: DW_AT_encoding); |
| 32274 | hstate.add_object (obj&: val1->v.val_int); |
| 32275 | hstate.add_object (obj&: byte_size); |
| 32276 | hstate.add_object (obj&: encoding); |
| 32277 | } |
| 32278 | break; |
| 32279 | case DW_OP_convert: |
| 32280 | case DW_OP_reinterpret: |
| 32281 | case DW_OP_GNU_convert: |
| 32282 | case DW_OP_GNU_reinterpret: |
| 32283 | if (val1->val_class == dw_val_class_unsigned_const) |
| 32284 | { |
| 32285 | hstate.add_object (obj&: val1->v.val_unsigned); |
| 32286 | break; |
| 32287 | } |
| 32288 | /* FALLTHRU */ |
| 32289 | case DW_OP_const_type: |
| 32290 | case DW_OP_GNU_const_type: |
| 32291 | { |
| 32292 | unsigned int byte_size |
| 32293 | = get_AT_unsigned (die: val1->v.val_die_ref.die, attr_kind: DW_AT_byte_size); |
| 32294 | unsigned int encoding |
| 32295 | = get_AT_unsigned (die: val1->v.val_die_ref.die, attr_kind: DW_AT_encoding); |
| 32296 | hstate.add_object (obj&: byte_size); |
| 32297 | hstate.add_object (obj&: encoding); |
| 32298 | if (loc->dw_loc_opc != DW_OP_const_type |
| 32299 | && loc->dw_loc_opc != DW_OP_GNU_const_type) |
| 32300 | break; |
| 32301 | hstate.add_object (obj&: val2->val_class); |
| 32302 | switch (val2->val_class) |
| 32303 | { |
| 32304 | case dw_val_class_const: |
| 32305 | hstate.add_object (obj&: val2->v.val_int); |
| 32306 | break; |
| 32307 | case dw_val_class_vec: |
| 32308 | { |
| 32309 | unsigned int elt_size = val2->v.val_vec.elt_size; |
| 32310 | unsigned int len = val2->v.val_vec.length; |
| 32311 | |
| 32312 | hstate.add_object (obj&: elt_size); |
| 32313 | hstate.add_object (obj&: len); |
| 32314 | hstate.add (data: val2->v.val_vec.array, len: len * elt_size); |
| 32315 | } |
| 32316 | break; |
| 32317 | case dw_val_class_const_double: |
| 32318 | hstate.add_object (obj&: val2->v.val_double.low); |
| 32319 | hstate.add_object (obj&: val2->v.val_double.high); |
| 32320 | break; |
| 32321 | case dw_val_class_wide_int: |
| 32322 | hstate.add (data: val2->v.val_wide->get_val (), |
| 32323 | len: get_full_len (op: *val2->v.val_wide) |
| 32324 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
| 32325 | break; |
| 32326 | default: |
| 32327 | gcc_unreachable (); |
| 32328 | } |
| 32329 | } |
| 32330 | break; |
| 32331 | |
| 32332 | default: |
| 32333 | /* Other codes have no operands. */ |
| 32334 | break; |
| 32335 | } |
| 32336 | } |
| 32337 | |
| 32338 | /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */ |
| 32339 | |
| 32340 | static inline void |
| 32341 | hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate) |
| 32342 | { |
| 32343 | dw_loc_descr_ref l; |
| 32344 | bool sizes_computed = false; |
| 32345 | /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */ |
| 32346 | size_of_locs (loc); |
| 32347 | |
| 32348 | for (l = loc; l != NULL; l = l->dw_loc_next) |
| 32349 | { |
| 32350 | enum dwarf_location_atom opc = l->dw_loc_opc; |
| 32351 | hstate.add_object (obj&: opc); |
| 32352 | if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed) |
| 32353 | { |
| 32354 | size_of_locs (loc); |
| 32355 | sizes_computed = true; |
| 32356 | } |
| 32357 | hash_loc_operands (loc: l, hstate); |
| 32358 | } |
| 32359 | } |
| 32360 | |
| 32361 | /* Compute hash of the whole location list LIST_HEAD. */ |
| 32362 | |
| 32363 | static inline void |
| 32364 | hash_loc_list (dw_loc_list_ref list_head) |
| 32365 | { |
| 32366 | dw_loc_list_ref curr = list_head; |
| 32367 | inchash::hash hstate; |
| 32368 | |
| 32369 | for (curr = list_head; curr != NULL; curr = curr->dw_loc_next) |
| 32370 | { |
| 32371 | hstate.add (data: curr->begin, len: strlen (s: curr->begin) + 1); |
| 32372 | hstate.add (data: curr->end, len: strlen (s: curr->end) + 1); |
| 32373 | hstate.add_object (obj&: curr->vbegin); |
| 32374 | hstate.add_object (obj&: curr->vend); |
| 32375 | if (curr->section) |
| 32376 | hstate.add (data: curr->section, len: strlen (s: curr->section) + 1); |
| 32377 | hash_locs (loc: curr->expr, hstate); |
| 32378 | } |
| 32379 | list_head->hash = hstate.end (); |
| 32380 | } |
| 32381 | |
| 32382 | /* Return true if X and Y opcodes have the same operands. */ |
| 32383 | |
| 32384 | static inline bool |
| 32385 | compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y) |
| 32386 | { |
| 32387 | dw_val_ref valx1 = &x->dw_loc_oprnd1; |
| 32388 | dw_val_ref valx2 = &x->dw_loc_oprnd2; |
| 32389 | dw_val_ref valy1 = &y->dw_loc_oprnd1; |
| 32390 | dw_val_ref valy2 = &y->dw_loc_oprnd2; |
| 32391 | |
| 32392 | switch (x->dw_loc_opc) |
| 32393 | { |
| 32394 | case DW_OP_const4u: |
| 32395 | case DW_OP_const8u: |
| 32396 | if (x->dw_loc_dtprel) |
| 32397 | goto hash_addr; |
| 32398 | /* FALLTHRU */ |
| 32399 | case DW_OP_const1u: |
| 32400 | case DW_OP_const1s: |
| 32401 | case DW_OP_const2u: |
| 32402 | case DW_OP_const2s: |
| 32403 | case DW_OP_const4s: |
| 32404 | case DW_OP_const8s: |
| 32405 | case DW_OP_constu: |
| 32406 | case DW_OP_consts: |
| 32407 | case DW_OP_pick: |
| 32408 | case DW_OP_plus_uconst: |
| 32409 | case DW_OP_breg0: |
| 32410 | case DW_OP_breg1: |
| 32411 | case DW_OP_breg2: |
| 32412 | case DW_OP_breg3: |
| 32413 | case DW_OP_breg4: |
| 32414 | case DW_OP_breg5: |
| 32415 | case DW_OP_breg6: |
| 32416 | case DW_OP_breg7: |
| 32417 | case DW_OP_breg8: |
| 32418 | case DW_OP_breg9: |
| 32419 | case DW_OP_breg10: |
| 32420 | case DW_OP_breg11: |
| 32421 | case DW_OP_breg12: |
| 32422 | case DW_OP_breg13: |
| 32423 | case DW_OP_breg14: |
| 32424 | case DW_OP_breg15: |
| 32425 | case DW_OP_breg16: |
| 32426 | case DW_OP_breg17: |
| 32427 | case DW_OP_breg18: |
| 32428 | case DW_OP_breg19: |
| 32429 | case DW_OP_breg20: |
| 32430 | case DW_OP_breg21: |
| 32431 | case DW_OP_breg22: |
| 32432 | case DW_OP_breg23: |
| 32433 | case DW_OP_breg24: |
| 32434 | case DW_OP_breg25: |
| 32435 | case DW_OP_breg26: |
| 32436 | case DW_OP_breg27: |
| 32437 | case DW_OP_breg28: |
| 32438 | case DW_OP_breg29: |
| 32439 | case DW_OP_breg30: |
| 32440 | case DW_OP_breg31: |
| 32441 | case DW_OP_regx: |
| 32442 | case DW_OP_fbreg: |
| 32443 | case DW_OP_piece: |
| 32444 | case DW_OP_deref_size: |
| 32445 | case DW_OP_xderef_size: |
| 32446 | return valx1->v.val_int == valy1->v.val_int; |
| 32447 | case DW_OP_skip: |
| 32448 | case DW_OP_bra: |
| 32449 | /* If splitting debug info, the use of DW_OP_GNU_addr_index |
| 32450 | can cause irrelevant differences in dw_loc_addr. */ |
| 32451 | gcc_assert (valx1->val_class == dw_val_class_loc |
| 32452 | && valy1->val_class == dw_val_class_loc |
| 32453 | && (dwarf_split_debug_info |
| 32454 | || x->dw_loc_addr == y->dw_loc_addr)); |
| 32455 | return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr; |
| 32456 | case DW_OP_implicit_value: |
| 32457 | if (valx1->v.val_unsigned != valy1->v.val_unsigned |
| 32458 | || valx2->val_class != valy2->val_class) |
| 32459 | return false; |
| 32460 | switch (valx2->val_class) |
| 32461 | { |
| 32462 | case dw_val_class_const: |
| 32463 | return valx2->v.val_int == valy2->v.val_int; |
| 32464 | case dw_val_class_vec: |
| 32465 | return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size |
| 32466 | && valx2->v.val_vec.length == valy2->v.val_vec.length |
| 32467 | && memcmp (s1: valx2->v.val_vec.array, s2: valy2->v.val_vec.array, |
| 32468 | n: valx2->v.val_vec.elt_size |
| 32469 | * valx2->v.val_vec.length) == 0; |
| 32470 | case dw_val_class_const_double: |
| 32471 | return valx2->v.val_double.low == valy2->v.val_double.low |
| 32472 | && valx2->v.val_double.high == valy2->v.val_double.high; |
| 32473 | case dw_val_class_wide_int: |
| 32474 | return *valx2->v.val_wide == *valy2->v.val_wide; |
| 32475 | case dw_val_class_addr: |
| 32476 | return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr); |
| 32477 | default: |
| 32478 | gcc_unreachable (); |
| 32479 | } |
| 32480 | case DW_OP_bregx: |
| 32481 | case DW_OP_bit_piece: |
| 32482 | return valx1->v.val_int == valy1->v.val_int |
| 32483 | && valx2->v.val_int == valy2->v.val_int; |
| 32484 | case DW_OP_addr: |
| 32485 | hash_addr: |
| 32486 | return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr); |
| 32487 | case DW_OP_GNU_addr_index: |
| 32488 | case DW_OP_addrx: |
| 32489 | case DW_OP_GNU_const_index: |
| 32490 | case DW_OP_constx: |
| 32491 | { |
| 32492 | rtx ax1 = valx1->val_entry->addr.rtl; |
| 32493 | rtx ay1 = valy1->val_entry->addr.rtl; |
| 32494 | return rtx_equal_p (ax1, ay1); |
| 32495 | } |
| 32496 | case DW_OP_implicit_pointer: |
| 32497 | case DW_OP_GNU_implicit_pointer: |
| 32498 | return valx1->val_class == dw_val_class_die_ref |
| 32499 | && valx1->val_class == valy1->val_class |
| 32500 | && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die |
| 32501 | && valx2->v.val_int == valy2->v.val_int; |
| 32502 | case DW_OP_entry_value: |
| 32503 | case DW_OP_GNU_entry_value: |
| 32504 | return compare_loc_operands (x: valx1->v.val_loc, y: valy1->v.val_loc); |
| 32505 | case DW_OP_const_type: |
| 32506 | case DW_OP_GNU_const_type: |
| 32507 | if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die |
| 32508 | || valx2->val_class != valy2->val_class) |
| 32509 | return false; |
| 32510 | switch (valx2->val_class) |
| 32511 | { |
| 32512 | case dw_val_class_const: |
| 32513 | return valx2->v.val_int == valy2->v.val_int; |
| 32514 | case dw_val_class_vec: |
| 32515 | return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size |
| 32516 | && valx2->v.val_vec.length == valy2->v.val_vec.length |
| 32517 | && memcmp (s1: valx2->v.val_vec.array, s2: valy2->v.val_vec.array, |
| 32518 | n: valx2->v.val_vec.elt_size |
| 32519 | * valx2->v.val_vec.length) == 0; |
| 32520 | case dw_val_class_const_double: |
| 32521 | return valx2->v.val_double.low == valy2->v.val_double.low |
| 32522 | && valx2->v.val_double.high == valy2->v.val_double.high; |
| 32523 | case dw_val_class_wide_int: |
| 32524 | return *valx2->v.val_wide == *valy2->v.val_wide; |
| 32525 | default: |
| 32526 | gcc_unreachable (); |
| 32527 | } |
| 32528 | case DW_OP_regval_type: |
| 32529 | case DW_OP_deref_type: |
| 32530 | case DW_OP_GNU_regval_type: |
| 32531 | case DW_OP_GNU_deref_type: |
| 32532 | return valx1->v.val_int == valy1->v.val_int |
| 32533 | && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die; |
| 32534 | case DW_OP_convert: |
| 32535 | case DW_OP_reinterpret: |
| 32536 | case DW_OP_GNU_convert: |
| 32537 | case DW_OP_GNU_reinterpret: |
| 32538 | if (valx1->val_class != valy1->val_class) |
| 32539 | return false; |
| 32540 | if (valx1->val_class == dw_val_class_unsigned_const) |
| 32541 | return valx1->v.val_unsigned == valy1->v.val_unsigned; |
| 32542 | return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die; |
| 32543 | case DW_OP_GNU_parameter_ref: |
| 32544 | return valx1->val_class == dw_val_class_die_ref |
| 32545 | && valx1->val_class == valy1->val_class |
| 32546 | && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die; |
| 32547 | default: |
| 32548 | /* Other codes have no operands. */ |
| 32549 | return true; |
| 32550 | } |
| 32551 | } |
| 32552 | |
| 32553 | /* Return true if DWARF location expressions X and Y are the same. */ |
| 32554 | |
| 32555 | static inline bool |
| 32556 | compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y) |
| 32557 | { |
| 32558 | for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next) |
| 32559 | if (x->dw_loc_opc != y->dw_loc_opc |
| 32560 | || x->dw_loc_dtprel != y->dw_loc_dtprel |
| 32561 | || !compare_loc_operands (x, y)) |
| 32562 | break; |
| 32563 | return x == NULL && y == NULL; |
| 32564 | } |
| 32565 | |
| 32566 | /* Hashtable helpers. */ |
| 32567 | |
| 32568 | struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct> |
| 32569 | { |
| 32570 | static inline hashval_t hash (const dw_loc_list_struct *); |
| 32571 | static inline bool equal (const dw_loc_list_struct *, |
| 32572 | const dw_loc_list_struct *); |
| 32573 | }; |
| 32574 | |
| 32575 | /* Return precomputed hash of location list X. */ |
| 32576 | |
| 32577 | inline hashval_t |
| 32578 | loc_list_hasher::hash (const dw_loc_list_struct *x) |
| 32579 | { |
| 32580 | return x->hash; |
| 32581 | } |
| 32582 | |
| 32583 | /* Return true if location lists A and B are the same. */ |
| 32584 | |
| 32585 | inline bool |
| 32586 | loc_list_hasher::equal (const dw_loc_list_struct *a, |
| 32587 | const dw_loc_list_struct *b) |
| 32588 | { |
| 32589 | if (a == b) |
| 32590 | return true; |
| 32591 | if (a->hash != b->hash) |
| 32592 | return false; |
| 32593 | for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next) |
| 32594 | if (strcmp (s1: a->begin, s2: b->begin) != 0 |
| 32595 | || strcmp (s1: a->end, s2: b->end) != 0 |
| 32596 | || (a->section == NULL) != (b->section == NULL) |
| 32597 | || (a->section && strcmp (s1: a->section, s2: b->section) != 0) |
| 32598 | || a->vbegin != b->vbegin || a->vend != b->vend |
| 32599 | || !compare_locs (x: a->expr, y: b->expr)) |
| 32600 | break; |
| 32601 | return a == NULL && b == NULL; |
| 32602 | } |
| 32603 | |
| 32604 | typedef hash_table<loc_list_hasher> loc_list_hash_type; |
| 32605 | |
| 32606 | |
| 32607 | /* Recursively optimize location lists referenced from DIE |
| 32608 | children and share them whenever possible. */ |
| 32609 | |
| 32610 | static void |
| 32611 | optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab) |
| 32612 | { |
| 32613 | dw_die_ref c; |
| 32614 | dw_attr_node *a; |
| 32615 | unsigned ix; |
| 32616 | dw_loc_list_struct **slot; |
| 32617 | bool drop_locviews = false; |
| 32618 | bool has_locviews = false; |
| 32619 | |
| 32620 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 32621 | if (AT_class (a) == dw_val_class_loc_list) |
| 32622 | { |
| 32623 | dw_loc_list_ref list = AT_loc_list (a); |
| 32624 | /* TODO: perform some optimizations here, before hashing |
| 32625 | it and storing into the hash table. */ |
| 32626 | hash_loc_list (list_head: list); |
| 32627 | slot = htab->find_slot_with_hash (comparable: list, hash: list->hash, insert: INSERT); |
| 32628 | if (*slot == NULL) |
| 32629 | { |
| 32630 | *slot = list; |
| 32631 | if (loc_list_has_views (list)) |
| 32632 | gcc_assert (list->vl_symbol); |
| 32633 | else if (list->vl_symbol) |
| 32634 | { |
| 32635 | drop_locviews = true; |
| 32636 | list->vl_symbol = NULL; |
| 32637 | } |
| 32638 | } |
| 32639 | else |
| 32640 | { |
| 32641 | if (list->vl_symbol && !(*slot)->vl_symbol) |
| 32642 | drop_locviews = true; |
| 32643 | a->dw_attr_val.v.val_loc_list = *slot; |
| 32644 | } |
| 32645 | } |
| 32646 | else if (AT_class (a) == dw_val_class_view_list) |
| 32647 | { |
| 32648 | gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews); |
| 32649 | has_locviews = true; |
| 32650 | } |
| 32651 | |
| 32652 | |
| 32653 | if (drop_locviews && has_locviews) |
| 32654 | remove_AT (die, attr_kind: DW_AT_GNU_locviews); |
| 32655 | |
| 32656 | FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab)); |
| 32657 | } |
| 32658 | |
| 32659 | |
| 32660 | /* Recursively assign each location list a unique index into the debug_addr |
| 32661 | section. */ |
| 32662 | |
| 32663 | static void |
| 32664 | index_location_lists (dw_die_ref die) |
| 32665 | { |
| 32666 | dw_die_ref c; |
| 32667 | dw_attr_node *a; |
| 32668 | unsigned ix; |
| 32669 | |
| 32670 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 32671 | if (AT_class (a) == dw_val_class_loc_list) |
| 32672 | { |
| 32673 | dw_loc_list_ref list = AT_loc_list (a); |
| 32674 | dw_loc_list_ref curr; |
| 32675 | for (curr = list; curr != NULL; curr = curr->dw_loc_next) |
| 32676 | { |
| 32677 | /* Don't index an entry that has already been indexed |
| 32678 | or won't be output. Make sure skip_loc_list_entry doesn't |
| 32679 | call size_of_locs, because that might cause circular dependency, |
| 32680 | index_location_lists requiring address table indexes to be |
| 32681 | computed, but adding new indexes through add_addr_table_entry |
| 32682 | and address table index computation requiring no new additions |
| 32683 | to the hash table. In the rare case of DWARF[234] >= 64KB |
| 32684 | location expression, we'll just waste unused address table entry |
| 32685 | for it. */ |
| 32686 | if (curr->begin_entry != NULL || skip_loc_list_entry (curr)) |
| 32687 | continue; |
| 32688 | |
| 32689 | curr->begin_entry |
| 32690 | = add_addr_table_entry (addr: xstrdup (curr->begin), kind: ate_kind_label); |
| 32691 | if (dwarf_version >= 5 && !HAVE_AS_LEB128) |
| 32692 | curr->end_entry |
| 32693 | = add_addr_table_entry (addr: xstrdup (curr->end), kind: ate_kind_label); |
| 32694 | } |
| 32695 | } |
| 32696 | |
| 32697 | FOR_EACH_CHILD (die, c, index_location_lists (c)); |
| 32698 | } |
| 32699 | |
| 32700 | /* Optimize location lists referenced from DIE |
| 32701 | children and share them whenever possible. */ |
| 32702 | |
| 32703 | static void |
| 32704 | optimize_location_lists (dw_die_ref die) |
| 32705 | { |
| 32706 | loc_list_hash_type htab (500); |
| 32707 | optimize_location_lists_1 (die, htab: &htab); |
| 32708 | } |
| 32709 | |
| 32710 | /* Traverse the limbo die list, and add parent/child links. The only |
| 32711 | dies without parents that should be here are concrete instances of |
| 32712 | inline functions, and the comp_unit_die. We can ignore the comp_unit_die. |
| 32713 | For concrete instances, we can get the parent die from the abstract |
| 32714 | instance. */ |
| 32715 | |
| 32716 | static void |
| 32717 | flush_limbo_die_list (void) |
| 32718 | { |
| 32719 | limbo_die_node *node; |
| 32720 | |
| 32721 | /* get_context_die calls force_decl_die, which can put new DIEs on the |
| 32722 | limbo list in LTO mode when nested functions are put in a different |
| 32723 | partition than that of their parent function. */ |
| 32724 | while ((node = limbo_die_list)) |
| 32725 | { |
| 32726 | dw_die_ref die = node->die; |
| 32727 | limbo_die_list = node->next; |
| 32728 | |
| 32729 | if (die->die_parent == NULL) |
| 32730 | { |
| 32731 | dw_die_ref origin = get_AT_ref (die, attr_kind: DW_AT_abstract_origin); |
| 32732 | |
| 32733 | if (origin && origin->die_parent) |
| 32734 | add_child_die (die: origin->die_parent, child_die: die); |
| 32735 | else if (is_cu_die (c: die)) |
| 32736 | ; |
| 32737 | else if (seen_error ()) |
| 32738 | /* It's OK to be confused by errors in the input. */ |
| 32739 | add_child_die (die: comp_unit_die (), child_die: die); |
| 32740 | else |
| 32741 | { |
| 32742 | /* In certain situations, the lexical block containing a |
| 32743 | nested function can be optimized away, which results |
| 32744 | in the nested function die being orphaned. Likewise |
| 32745 | with the return type of that nested function. Force |
| 32746 | this to be a child of the containing function. |
| 32747 | |
| 32748 | It may happen that even the containing function got fully |
| 32749 | inlined and optimized out. In that case we are lost and |
| 32750 | assign the empty child. This should not be big issue as |
| 32751 | the function is likely unreachable too. */ |
| 32752 | gcc_assert (node->created_for); |
| 32753 | |
| 32754 | if (DECL_P (node->created_for)) |
| 32755 | origin = get_context_die (DECL_CONTEXT (node->created_for)); |
| 32756 | else if (TYPE_P (node->created_for)) |
| 32757 | origin = scope_die_for (t: node->created_for, context_die: comp_unit_die ()); |
| 32758 | else |
| 32759 | origin = comp_unit_die (); |
| 32760 | |
| 32761 | add_child_die (die: origin, child_die: die); |
| 32762 | } |
| 32763 | } |
| 32764 | } |
| 32765 | } |
| 32766 | |
| 32767 | /* Reset DIEs so we can output them again. */ |
| 32768 | |
| 32769 | static void |
| 32770 | reset_dies (dw_die_ref die) |
| 32771 | { |
| 32772 | dw_die_ref c; |
| 32773 | |
| 32774 | /* Remove stuff we re-generate. */ |
| 32775 | die->die_mark = 0; |
| 32776 | die->die_offset = 0; |
| 32777 | die->die_abbrev = 0; |
| 32778 | remove_AT (die, attr_kind: DW_AT_sibling); |
| 32779 | |
| 32780 | FOR_EACH_CHILD (die, c, reset_dies (c)); |
| 32781 | } |
| 32782 | |
| 32783 | /* reset_indirect_string removed the references coming from DW_AT_name |
| 32784 | and DW_AT_comp_dir attributes on compilation unit DIEs. Readd them as |
| 32785 | .debug_line_str strings again. */ |
| 32786 | |
| 32787 | static void |
| 32788 | adjust_name_comp_dir (dw_die_ref die) |
| 32789 | { |
| 32790 | for (int i = 0; i < 2; i++) |
| 32791 | { |
| 32792 | dwarf_attribute attr_kind = i ? DW_AT_comp_dir : DW_AT_name; |
| 32793 | dw_attr_node *a = get_AT (die, attr_kind); |
| 32794 | if (a == NULL || a->dw_attr_val.val_class != dw_val_class_str) |
| 32795 | continue; |
| 32796 | |
| 32797 | if (!debug_line_str_hash) |
| 32798 | debug_line_str_hash |
| 32799 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
| 32800 | |
| 32801 | struct indirect_string_node *node |
| 32802 | = find_AT_string_in_table (str: a->dw_attr_val.v.val_str->str, |
| 32803 | table: debug_line_str_hash); |
| 32804 | set_indirect_string (node); |
| 32805 | node->form = DW_FORM_line_strp; |
| 32806 | a->dw_attr_val.v.val_str = node; |
| 32807 | } |
| 32808 | } |
| 32809 | |
| 32810 | /* Output stuff that dwarf requires at the end of every file, |
| 32811 | and generate the DWARF-2 debugging info. */ |
| 32812 | |
| 32813 | static void |
| 32814 | dwarf2out_finish (const char *filename) |
| 32815 | { |
| 32816 | comdat_type_node *ctnode; |
| 32817 | dw_die_ref main_comp_unit_die; |
| 32818 | unsigned char checksum[16]; |
| 32819 | char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 32820 | |
| 32821 | /* Generate CTF/BTF debug info. */ |
| 32822 | if ((ctf_debug_info_level > CTFINFO_LEVEL_NONE |
| 32823 | || btf_debuginfo_p ()) && lang_GNU_C ()) |
| 32824 | ctf_debug_finish (); |
| 32825 | |
| 32826 | #ifdef CODEVIEW_DEBUGGING_INFO |
| 32827 | if (codeview_debuginfo_p ()) |
| 32828 | codeview_debug_finish (); |
| 32829 | #endif |
| 32830 | |
| 32831 | /* Skip emitting DWARF if not required. */ |
| 32832 | if (!dwarf_debuginfo_p ()) |
| 32833 | return; |
| 32834 | |
| 32835 | /* Flush out any latecomers to the limbo party. */ |
| 32836 | flush_limbo_die_list (); |
| 32837 | |
| 32838 | if (btf_tag_htab) |
| 32839 | btf_tag_htab->empty (); |
| 32840 | |
| 32841 | if (inline_entry_data_table) |
| 32842 | gcc_assert (inline_entry_data_table->is_empty ()); |
| 32843 | |
| 32844 | if (flag_checking) |
| 32845 | { |
| 32846 | verify_die (die: comp_unit_die ()); |
| 32847 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
| 32848 | verify_die (die: node->die); |
| 32849 | } |
| 32850 | |
| 32851 | /* We shouldn't have any symbols with delayed asm names for |
| 32852 | DIEs generated after early finish. */ |
| 32853 | gcc_assert (deferred_asm_name == NULL); |
| 32854 | |
| 32855 | gen_remaining_tmpl_value_param_die_attribute (); |
| 32856 | |
| 32857 | if (flag_generate_lto || flag_generate_offload) |
| 32858 | { |
| 32859 | gcc_assert (flag_fat_lto_objects || flag_generate_offload); |
| 32860 | |
| 32861 | /* Prune stuff so that dwarf2out_finish runs successfully |
| 32862 | for the fat part of the object. */ |
| 32863 | reset_dies (die: comp_unit_die ()); |
| 32864 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
| 32865 | reset_dies (die: node->die); |
| 32866 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
| 32867 | { |
| 32868 | /* Remove the pointer to the line table. */ |
| 32869 | remove_AT (die: ctnode->root_die, attr_kind: DW_AT_stmt_list); |
| 32870 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
| 32871 | reset_dies (die: ctnode->root_die); |
| 32872 | } |
| 32873 | |
| 32874 | /* Reset die CU symbol so we don't output it twice. */ |
| 32875 | comp_unit_die ()->die_id.die_symbol = NULL; |
| 32876 | |
| 32877 | /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */ |
| 32878 | remove_AT (die: comp_unit_die (), attr_kind: DW_AT_stmt_list); |
| 32879 | if (have_macinfo) |
| 32880 | remove_AT (die: comp_unit_die (), DEBUG_MACRO_ATTRIBUTE); |
| 32881 | |
| 32882 | /* Remove indirect string decisions. */ |
| 32883 | debug_str_hash->traverse<void *, reset_indirect_string> (NULL); |
| 32884 | if (debug_line_str_hash) |
| 32885 | { |
| 32886 | debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL); |
| 32887 | debug_line_str_hash = NULL; |
| 32888 | if (asm_outputs_debug_line_str ()) |
| 32889 | { |
| 32890 | adjust_name_comp_dir (die: comp_unit_die ()); |
| 32891 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
| 32892 | adjust_name_comp_dir (die: node->die); |
| 32893 | } |
| 32894 | } |
| 32895 | } |
| 32896 | |
| 32897 | #if ENABLE_ASSERT_CHECKING |
| 32898 | { |
| 32899 | dw_die_ref die = comp_unit_die (), c; |
| 32900 | FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark)); |
| 32901 | } |
| 32902 | #endif |
| 32903 | base_types.truncate (size: 0); |
| 32904 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
| 32905 | resolve_addr (die: ctnode->root_die); |
| 32906 | resolve_addr (die: comp_unit_die ()); |
| 32907 | move_marked_base_types (); |
| 32908 | |
| 32909 | if (dump_file) |
| 32910 | { |
| 32911 | fprintf (stream: dump_file, format: "DWARF for %s\n" , filename); |
| 32912 | print_die (die: comp_unit_die (), outfile: dump_file); |
| 32913 | } |
| 32914 | |
| 32915 | /* Initialize sections and labels used for actual assembler output. */ |
| 32916 | unsigned generation = init_sections_and_labels (early_lto_debug: false); |
| 32917 | |
| 32918 | /* Traverse the DIE's and add sibling attributes to those DIE's that |
| 32919 | have children. */ |
| 32920 | add_sibling_attributes (die: comp_unit_die ()); |
| 32921 | limbo_die_node *node; |
| 32922 | for (node = cu_die_list; node; node = node->next) |
| 32923 | add_sibling_attributes (die: node->die); |
| 32924 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
| 32925 | add_sibling_attributes (die: ctnode->root_die); |
| 32926 | |
| 32927 | /* When splitting DWARF info, we put some attributes in the |
| 32928 | skeleton compile_unit DIE that remains in the .o, while |
| 32929 | most attributes go in the DWO compile_unit_die. */ |
| 32930 | if (dwarf_split_debug_info) |
| 32931 | { |
| 32932 | limbo_die_node *cu; |
| 32933 | main_comp_unit_die = gen_compile_unit_die (NULL); |
| 32934 | if (dwarf_version >= 5) |
| 32935 | main_comp_unit_die->die_tag = DW_TAG_skeleton_unit; |
| 32936 | cu = limbo_die_list; |
| 32937 | gcc_assert (cu->die == main_comp_unit_die); |
| 32938 | limbo_die_list = limbo_die_list->next; |
| 32939 | cu->next = cu_die_list; |
| 32940 | cu_die_list = cu; |
| 32941 | } |
| 32942 | else |
| 32943 | main_comp_unit_die = comp_unit_die (); |
| 32944 | |
| 32945 | /* Output a terminator label for the .text section. */ |
| 32946 | switch_to_section (text_section); |
| 32947 | targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0); |
| 32948 | if (cold_text_section) |
| 32949 | { |
| 32950 | switch_to_section (cold_text_section); |
| 32951 | targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0); |
| 32952 | } |
| 32953 | |
| 32954 | /* We can only use the low/high_pc attributes if all of the code was |
| 32955 | in .text. */ |
| 32956 | if ((!have_multiple_function_sections |
| 32957 | && vec_safe_length (v: switch_text_ranges) < 2) |
| 32958 | || (dwarf_version < 3 && dwarf_strict)) |
| 32959 | { |
| 32960 | const char *end_label = text_end_label; |
| 32961 | if (vec_safe_length (v: switch_text_ranges) == 1) |
| 32962 | end_label = (*switch_text_ranges)[0]; |
| 32963 | /* Don't add if the CU has no associated code. */ |
| 32964 | if (switch_text_ranges) |
| 32965 | add_AT_low_high_pc (die: main_comp_unit_die, lbl_low: text_section_label, |
| 32966 | lbl_high: end_label, force_direct: true); |
| 32967 | } |
| 32968 | else |
| 32969 | { |
| 32970 | unsigned fde_idx; |
| 32971 | dw_fde_ref fde; |
| 32972 | bool range_list_added = false; |
| 32973 | if (switch_text_ranges) |
| 32974 | { |
| 32975 | const char *prev_loc = text_section_label; |
| 32976 | const char *loc; |
| 32977 | unsigned idx; |
| 32978 | |
| 32979 | FOR_EACH_VEC_ELT (*switch_text_ranges, idx, loc) |
| 32980 | if (prev_loc) |
| 32981 | { |
| 32982 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
| 32983 | end: loc, added: &range_list_added, force_direct: true); |
| 32984 | prev_loc = NULL; |
| 32985 | } |
| 32986 | else |
| 32987 | prev_loc = loc; |
| 32988 | |
| 32989 | if (prev_loc) |
| 32990 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
| 32991 | end: text_end_label, added: &range_list_added, force_direct: true); |
| 32992 | } |
| 32993 | |
| 32994 | if (switch_cold_ranges) |
| 32995 | { |
| 32996 | const char *prev_loc = cold_text_section_label; |
| 32997 | const char *loc; |
| 32998 | unsigned idx; |
| 32999 | |
| 33000 | FOR_EACH_VEC_ELT (*switch_cold_ranges, idx, loc) |
| 33001 | if (prev_loc) |
| 33002 | { |
| 33003 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
| 33004 | end: loc, added: &range_list_added, force_direct: true); |
| 33005 | prev_loc = NULL; |
| 33006 | } |
| 33007 | else |
| 33008 | prev_loc = loc; |
| 33009 | |
| 33010 | if (prev_loc) |
| 33011 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
| 33012 | end: cold_end_label, added: &range_list_added, force_direct: true); |
| 33013 | } |
| 33014 | |
| 33015 | FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde) |
| 33016 | { |
| 33017 | if (fde->ignored_debug) |
| 33018 | continue; |
| 33019 | if (!fde->in_std_section) |
| 33020 | add_ranges_by_labels (die: main_comp_unit_die, begin: fde->dw_fde_begin, |
| 33021 | end: fde->dw_fde_end, added: &range_list_added, |
| 33022 | force_direct: true); |
| 33023 | if (fde->dw_fde_second_begin && !fde->second_in_std_section) |
| 33024 | add_ranges_by_labels (die: main_comp_unit_die, begin: fde->dw_fde_second_begin, |
| 33025 | end: fde->dw_fde_second_end, added: &range_list_added, |
| 33026 | force_direct: true); |
| 33027 | } |
| 33028 | |
| 33029 | if (range_list_added) |
| 33030 | { |
| 33031 | /* We need to give .debug_loc and .debug_ranges an appropriate |
| 33032 | "base address". Use zero so that these addresses become |
| 33033 | absolute. Historically, we've emitted the unexpected |
| 33034 | DW_AT_entry_pc instead of DW_AT_low_pc for this purpose. |
| 33035 | Emit both to give time for other tools to adapt. */ |
| 33036 | add_AT_addr (die: main_comp_unit_die, attr_kind: DW_AT_low_pc, const0_rtx, force_direct: true); |
| 33037 | if (! dwarf_strict && dwarf_version < 4) |
| 33038 | add_AT_addr (die: main_comp_unit_die, attr_kind: DW_AT_entry_pc, const0_rtx, force_direct: true); |
| 33039 | |
| 33040 | add_ranges (NULL); |
| 33041 | have_multiple_function_sections = true; |
| 33042 | } |
| 33043 | } |
| 33044 | |
| 33045 | /* AIX Assembler inserts the length, so adjust the reference to match the |
| 33046 | offset expected by debuggers. */ |
| 33047 | strcpy (dest: dl_section_ref, src: debug_line_section_label); |
| 33048 | if (XCOFF_DEBUGGING_INFO) |
| 33049 | strcat (dest: dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR); |
| 33050 | |
| 33051 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
| 33052 | add_AT_lineptr (die: main_comp_unit_die, attr_kind: DW_AT_stmt_list, |
| 33053 | label: dl_section_ref); |
| 33054 | |
| 33055 | if (have_macinfo) |
| 33056 | add_AT_macptr (die: comp_unit_die (), DEBUG_MACRO_ATTRIBUTE, |
| 33057 | label: macinfo_section_label); |
| 33058 | |
| 33059 | if (dwarf_split_debug_info) |
| 33060 | { |
| 33061 | if (have_location_lists) |
| 33062 | { |
| 33063 | /* Since we generate the loclists in the split DWARF .dwo |
| 33064 | file itself, we don't need to generate a loclists_base |
| 33065 | attribute for the split compile unit DIE. That attribute |
| 33066 | (and using relocatable sec_offset FORMs) isn't allowed |
| 33067 | for a split compile unit. Only if the .debug_loclists |
| 33068 | section was in the main file, would we need to generate a |
| 33069 | loclists_base attribute here (for the full or skeleton |
| 33070 | unit DIE). */ |
| 33071 | |
| 33072 | /* optimize_location_lists calculates the size of the lists, |
| 33073 | so index them first, and assign indices to the entries. |
| 33074 | Although optimize_location_lists will remove entries from |
| 33075 | the table, it only does so for duplicates, and therefore |
| 33076 | only reduces ref_counts to 1. */ |
| 33077 | index_location_lists (die: comp_unit_die ()); |
| 33078 | } |
| 33079 | |
| 33080 | if (dwarf_version >= 5 && !vec_safe_is_empty (v: ranges_table)) |
| 33081 | index_rnglists (); |
| 33082 | |
| 33083 | if (addr_index_table != NULL) |
| 33084 | { |
| 33085 | unsigned int index = 0; |
| 33086 | addr_index_table |
| 33087 | ->traverse_noresize<unsigned int *, index_addr_table_entry> |
| 33088 | (argument: &index); |
| 33089 | } |
| 33090 | } |
| 33091 | |
| 33092 | loc_list_idx = 0; |
| 33093 | if (have_location_lists) |
| 33094 | { |
| 33095 | optimize_location_lists (die: comp_unit_die ()); |
| 33096 | /* And finally assign indexes to the entries for -gsplit-dwarf. */ |
| 33097 | if (dwarf_version >= 5 && dwarf_split_debug_info) |
| 33098 | assign_location_list_indexes (die: comp_unit_die ()); |
| 33099 | } |
| 33100 | |
| 33101 | save_macinfo_strings (); |
| 33102 | |
| 33103 | if (dwarf_split_debug_info) |
| 33104 | { |
| 33105 | unsigned int index = 0; |
| 33106 | |
| 33107 | /* Add attributes common to skeleton compile_units and |
| 33108 | type_units. Because these attributes include strings, it |
| 33109 | must be done before freezing the string table. Top-level |
| 33110 | skeleton die attrs are added when the skeleton type unit is |
| 33111 | created, so ensure it is created by this point. */ |
| 33112 | add_top_level_skeleton_die_attrs (die: main_comp_unit_die); |
| 33113 | debug_str_hash->traverse_noresize<unsigned int *, index_string> (argument: &index); |
| 33114 | } |
| 33115 | |
| 33116 | /* Output all of the compilation units. We put the main one last so that |
| 33117 | the offsets are available to output_pubnames. */ |
| 33118 | for (node = cu_die_list; node; node = node->next) |
| 33119 | output_comp_unit (die: node->die, output_if_empty: 0, NULL); |
| 33120 | |
| 33121 | hash_table<comdat_type_hasher> comdat_type_table (100); |
| 33122 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
| 33123 | { |
| 33124 | comdat_type_node **slot = comdat_type_table.find_slot (value: ctnode, insert: INSERT); |
| 33125 | |
| 33126 | /* Don't output duplicate types. */ |
| 33127 | if (*slot != HTAB_EMPTY_ENTRY) |
| 33128 | continue; |
| 33129 | |
| 33130 | /* Add a pointer to the line table for the main compilation unit |
| 33131 | so that the debugger can make sense of DW_AT_decl_file |
| 33132 | attributes. */ |
| 33133 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
| 33134 | add_AT_lineptr (die: ctnode->root_die, attr_kind: DW_AT_stmt_list, |
| 33135 | label: (!dwarf_split_debug_info |
| 33136 | ? dl_section_ref |
| 33137 | : debug_skeleton_line_section_label)); |
| 33138 | |
| 33139 | output_comdat_type_unit (node: ctnode, early_lto_debug: false); |
| 33140 | *slot = ctnode; |
| 33141 | } |
| 33142 | |
| 33143 | if (dwarf_split_debug_info) |
| 33144 | { |
| 33145 | int mark; |
| 33146 | struct md5_ctx ctx; |
| 33147 | |
| 33148 | /* Compute a checksum of the comp_unit to use as the dwo_id. */ |
| 33149 | md5_init_ctx (ctx: &ctx); |
| 33150 | mark = 0; |
| 33151 | die_checksum (die: comp_unit_die (), ctx: &ctx, mark: &mark); |
| 33152 | unmark_all_dies (die: comp_unit_die ()); |
| 33153 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
| 33154 | |
| 33155 | if (dwarf_version < 5) |
| 33156 | { |
| 33157 | /* Use the first 8 bytes of the checksum as the dwo_id, |
| 33158 | and add it to both comp-unit DIEs. */ |
| 33159 | add_AT_data8 (die: main_comp_unit_die, attr_kind: DW_AT_GNU_dwo_id, data8: checksum); |
| 33160 | add_AT_data8 (die: comp_unit_die (), attr_kind: DW_AT_GNU_dwo_id, data8: checksum); |
| 33161 | } |
| 33162 | |
| 33163 | /* Add the base offset of the ranges table to the skeleton |
| 33164 | comp-unit DIE. */ |
| 33165 | if (!vec_safe_is_empty (v: ranges_table)) |
| 33166 | { |
| 33167 | if (dwarf_version < 5) |
| 33168 | add_AT_lineptr (die: main_comp_unit_die, attr_kind: DW_AT_GNU_ranges_base, |
| 33169 | label: ranges_section_label); |
| 33170 | } |
| 33171 | |
| 33172 | output_addr_table (); |
| 33173 | } |
| 33174 | |
| 33175 | /* Output the main compilation unit if non-empty or if .debug_macinfo |
| 33176 | or .debug_macro will be emitted. */ |
| 33177 | output_comp_unit (die: comp_unit_die (), have_macinfo, |
| 33178 | dwarf_split_debug_info ? checksum : NULL); |
| 33179 | |
| 33180 | if (dwarf_split_debug_info && info_section_emitted) |
| 33181 | output_skeleton_debug_sections (comp_unit: main_comp_unit_die, dwo_id: checksum); |
| 33182 | |
| 33183 | /* Output the abbreviation table. */ |
| 33184 | if (vec_safe_length (v: abbrev_die_table) != 1) |
| 33185 | { |
| 33186 | switch_to_section (debug_abbrev_section); |
| 33187 | ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label); |
| 33188 | output_abbrev_section (); |
| 33189 | } |
| 33190 | |
| 33191 | /* Output location list section if necessary. */ |
| 33192 | if (have_location_lists) |
| 33193 | { |
| 33194 | char l1[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 33195 | char l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 33196 | /* Output the location lists info. */ |
| 33197 | switch_to_section (debug_loc_section); |
| 33198 | if (dwarf_version >= 5) |
| 33199 | { |
| 33200 | ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2); |
| 33201 | ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3); |
| 33202 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
| 33203 | dw2_asm_output_data (4, 0xffffffff, |
| 33204 | "Initial length escape value indicating " |
| 33205 | "64-bit DWARF extension" ); |
| 33206 | dw2_asm_output_delta (dwarf_offset_size, l2, l1, |
| 33207 | "Length of Location Lists" ); |
| 33208 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
| 33209 | output_dwarf_version (); |
| 33210 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size" ); |
| 33211 | dw2_asm_output_data (1, 0, "Segment Size" ); |
| 33212 | dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0, |
| 33213 | "Offset Entry Count" ); |
| 33214 | } |
| 33215 | ASM_OUTPUT_LABEL (asm_out_file, loc_section_label); |
| 33216 | if (dwarf_version >= 5 && dwarf_split_debug_info) |
| 33217 | { |
| 33218 | unsigned int save_loc_list_idx = loc_list_idx; |
| 33219 | loc_list_idx = 0; |
| 33220 | output_loclists_offsets (die: comp_unit_die ()); |
| 33221 | gcc_assert (save_loc_list_idx == loc_list_idx); |
| 33222 | } |
| 33223 | output_location_lists (die: comp_unit_die ()); |
| 33224 | if (dwarf_version >= 5) |
| 33225 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
| 33226 | } |
| 33227 | |
| 33228 | output_pubtables (); |
| 33229 | |
| 33230 | /* Output the address range information if a CU (.debug_info section) |
| 33231 | was emitted. We output an empty table even if we had no functions |
| 33232 | to put in it. This because the consumer has no way to tell the |
| 33233 | difference between an empty table that we omitted and failure to |
| 33234 | generate a table that would have contained data. */ |
| 33235 | if (info_section_emitted) |
| 33236 | { |
| 33237 | switch_to_section (debug_aranges_section); |
| 33238 | output_aranges (); |
| 33239 | } |
| 33240 | |
| 33241 | /* Output ranges section if necessary. */ |
| 33242 | if (!vec_safe_is_empty (v: ranges_table)) |
| 33243 | { |
| 33244 | if (dwarf_version >= 5) |
| 33245 | { |
| 33246 | if (dwarf_split_debug_info) |
| 33247 | { |
| 33248 | /* We don't know right now whether there are any |
| 33249 | ranges for .debug_rnglists and any for .debug_rnglists.dwo. |
| 33250 | Depending on into which of those two belongs the first |
| 33251 | ranges_table entry, emit that section first and that |
| 33252 | output_rnglists call will return true if the other kind of |
| 33253 | ranges needs to be emitted as well. */ |
| 33254 | bool dwo = (*ranges_table)[0].idx != DW_RANGES_IDX_SKELETON; |
| 33255 | if (output_rnglists (generation, dwo)) |
| 33256 | output_rnglists (generation, dwo: !dwo); |
| 33257 | } |
| 33258 | else |
| 33259 | output_rnglists (generation, dwo: false); |
| 33260 | } |
| 33261 | else |
| 33262 | output_ranges (); |
| 33263 | } |
| 33264 | |
| 33265 | /* Have to end the macro section. */ |
| 33266 | if (have_macinfo) |
| 33267 | { |
| 33268 | switch_to_section (debug_macinfo_section); |
| 33269 | ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label); |
| 33270 | output_macinfo (debug_line_label: !dwarf_split_debug_info ? debug_line_section_label |
| 33271 | : debug_skeleton_line_section_label, early_lto_debug: false); |
| 33272 | dw2_asm_output_data (1, 0, "End compilation unit" ); |
| 33273 | } |
| 33274 | |
| 33275 | /* Output the source line correspondence table. We must do this |
| 33276 | even if there is no line information. Otherwise, on an empty |
| 33277 | translation unit, we will generate a present, but empty, |
| 33278 | .debug_info section. IRIX 6.5 `nm' will then complain when |
| 33279 | examining the file. This is done late so that any filenames |
| 33280 | used by the debug_info section are marked as 'used'. */ |
| 33281 | switch_to_section (debug_line_section); |
| 33282 | ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label); |
| 33283 | if (! output_asm_line_debug_info ()) |
| 33284 | output_line_info (prologue_only: false); |
| 33285 | |
| 33286 | if (dwarf_split_debug_info && info_section_emitted) |
| 33287 | { |
| 33288 | switch_to_section (debug_skeleton_line_section); |
| 33289 | ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label); |
| 33290 | output_line_info (prologue_only: true); |
| 33291 | } |
| 33292 | |
| 33293 | /* If we emitted any indirect strings, output the string table too. */ |
| 33294 | if (debug_str_hash || skeleton_debug_str_hash) |
| 33295 | output_indirect_strings (); |
| 33296 | if (debug_line_str_hash) |
| 33297 | { |
| 33298 | switch_to_section (debug_line_str_section); |
| 33299 | const enum dwarf_form form = DW_FORM_line_strp; |
| 33300 | debug_line_str_hash->traverse<enum dwarf_form, |
| 33301 | output_indirect_string> (argument: form); |
| 33302 | } |
| 33303 | |
| 33304 | /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */ |
| 33305 | symview_upper_bound = 0; |
| 33306 | if (zero_view_p) |
| 33307 | bitmap_clear (zero_view_p); |
| 33308 | } |
| 33309 | |
| 33310 | /* Returns a hash value for X (which really is a variable_value_struct). */ |
| 33311 | |
| 33312 | inline hashval_t |
| 33313 | variable_value_hasher::hash (variable_value_struct *x) |
| 33314 | { |
| 33315 | return (hashval_t) x->decl_id; |
| 33316 | } |
| 33317 | |
| 33318 | /* Return true if decl_id of variable_value_struct X is the same as |
| 33319 | UID of decl Y. */ |
| 33320 | |
| 33321 | inline bool |
| 33322 | variable_value_hasher::equal (variable_value_struct *x, tree y) |
| 33323 | { |
| 33324 | return x->decl_id == DECL_UID (y); |
| 33325 | } |
| 33326 | |
| 33327 | /* Helper function for resolve_variable_value, handle |
| 33328 | DW_OP_GNU_variable_value in one location expression. |
| 33329 | Return true if exprloc has been changed into loclist. */ |
| 33330 | |
| 33331 | static bool |
| 33332 | resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc) |
| 33333 | { |
| 33334 | dw_loc_descr_ref next; |
| 33335 | for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next) |
| 33336 | { |
| 33337 | next = loc->dw_loc_next; |
| 33338 | if (loc->dw_loc_opc != DW_OP_GNU_variable_value |
| 33339 | || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref) |
| 33340 | continue; |
| 33341 | |
| 33342 | tree decl = loc->dw_loc_oprnd1.v.val_decl_ref; |
| 33343 | if (DECL_CONTEXT (decl) != current_function_decl) |
| 33344 | continue; |
| 33345 | |
| 33346 | dw_die_ref ref = lookup_decl_die (decl); |
| 33347 | if (ref) |
| 33348 | { |
| 33349 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 33350 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
| 33351 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 33352 | continue; |
| 33353 | } |
| 33354 | dw_loc_list_ref l = loc_list_from_tree (loc: decl, want_address: 0, NULL); |
| 33355 | if (l == NULL) |
| 33356 | continue; |
| 33357 | if (l->dw_loc_next) |
| 33358 | { |
| 33359 | if (AT_class (a) != dw_val_class_loc) |
| 33360 | continue; |
| 33361 | switch (a->dw_attr) |
| 33362 | { |
| 33363 | /* Following attributes allow both exprloc and loclist |
| 33364 | classes, so we can change them into a loclist. */ |
| 33365 | case DW_AT_location: |
| 33366 | case DW_AT_string_length: |
| 33367 | case DW_AT_return_addr: |
| 33368 | case DW_AT_data_member_location: |
| 33369 | case DW_AT_frame_base: |
| 33370 | case DW_AT_segment: |
| 33371 | case DW_AT_static_link: |
| 33372 | case DW_AT_use_location: |
| 33373 | case DW_AT_vtable_elem_location: |
| 33374 | if (prev) |
| 33375 | { |
| 33376 | prev->dw_loc_next = NULL; |
| 33377 | prepend_loc_descr_to_each (list: l, ref: AT_loc (a)); |
| 33378 | } |
| 33379 | if (next) |
| 33380 | add_loc_descr_to_each (list: l, ref: next); |
| 33381 | a->dw_attr_val.val_class = dw_val_class_loc_list; |
| 33382 | a->dw_attr_val.val_entry = NULL; |
| 33383 | a->dw_attr_val.v.val_loc_list = l; |
| 33384 | have_location_lists = true; |
| 33385 | return true; |
| 33386 | /* Following attributes allow both exprloc and reference, |
| 33387 | so if the whole expression is DW_OP_GNU_variable_value alone |
| 33388 | we could transform it into reference. */ |
| 33389 | case DW_AT_byte_size: |
| 33390 | case DW_AT_bit_size: |
| 33391 | case DW_AT_lower_bound: |
| 33392 | case DW_AT_upper_bound: |
| 33393 | case DW_AT_bit_stride: |
| 33394 | case DW_AT_count: |
| 33395 | case DW_AT_allocated: |
| 33396 | case DW_AT_associated: |
| 33397 | case DW_AT_byte_stride: |
| 33398 | if (prev == NULL && next == NULL) |
| 33399 | break; |
| 33400 | /* FALLTHRU */ |
| 33401 | default: |
| 33402 | if (dwarf_strict) |
| 33403 | continue; |
| 33404 | break; |
| 33405 | } |
| 33406 | /* Create DW_TAG_variable that we can refer to. */ |
| 33407 | gen_decl_die (decl, NULL_TREE, NULL, |
| 33408 | context_die: lookup_decl_die (decl: current_function_decl)); |
| 33409 | ref = lookup_decl_die (decl); |
| 33410 | if (ref) |
| 33411 | { |
| 33412 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 33413 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
| 33414 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 33415 | } |
| 33416 | continue; |
| 33417 | } |
| 33418 | if (prev) |
| 33419 | { |
| 33420 | prev->dw_loc_next = l->expr; |
| 33421 | add_loc_descr (list_head: &prev->dw_loc_next, descr: next); |
| 33422 | free_loc_descr (loc, NULL); |
| 33423 | next = prev->dw_loc_next; |
| 33424 | } |
| 33425 | else |
| 33426 | { |
| 33427 | memcpy (dest: loc, src: l->expr, n: sizeof (dw_loc_descr_node)); |
| 33428 | add_loc_descr (list_head: &loc, descr: next); |
| 33429 | next = loc; |
| 33430 | } |
| 33431 | loc = prev; |
| 33432 | } |
| 33433 | return false; |
| 33434 | } |
| 33435 | |
| 33436 | /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */ |
| 33437 | |
| 33438 | static void |
| 33439 | resolve_variable_value (dw_die_ref die) |
| 33440 | { |
| 33441 | dw_attr_node *a; |
| 33442 | dw_loc_list_ref loc; |
| 33443 | unsigned ix; |
| 33444 | |
| 33445 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 33446 | switch (AT_class (a)) |
| 33447 | { |
| 33448 | case dw_val_class_loc: |
| 33449 | if (!resolve_variable_value_in_expr (a, loc: AT_loc (a))) |
| 33450 | break; |
| 33451 | /* FALLTHRU */ |
| 33452 | case dw_val_class_loc_list: |
| 33453 | loc = AT_loc_list (a); |
| 33454 | gcc_assert (loc); |
| 33455 | for (; loc; loc = loc->dw_loc_next) |
| 33456 | resolve_variable_value_in_expr (a, loc: loc->expr); |
| 33457 | break; |
| 33458 | default: |
| 33459 | break; |
| 33460 | } |
| 33461 | } |
| 33462 | |
| 33463 | /* Attempt to optimize DW_OP_GNU_variable_value referring to |
| 33464 | temporaries in the current function. */ |
| 33465 | |
| 33466 | static void |
| 33467 | resolve_variable_values (void) |
| 33468 | { |
| 33469 | if (!variable_value_hash || !current_function_decl) |
| 33470 | return; |
| 33471 | |
| 33472 | struct variable_value_struct *node |
| 33473 | = variable_value_hash->find_with_hash (comparable: current_function_decl, |
| 33474 | DECL_UID (current_function_decl)); |
| 33475 | |
| 33476 | if (node == NULL) |
| 33477 | return; |
| 33478 | |
| 33479 | unsigned int i; |
| 33480 | dw_die_ref die; |
| 33481 | FOR_EACH_VEC_SAFE_ELT (node->dies, i, die) |
| 33482 | resolve_variable_value (die); |
| 33483 | } |
| 33484 | |
| 33485 | /* Helper function for note_variable_value, handle one location |
| 33486 | expression. */ |
| 33487 | |
| 33488 | static void |
| 33489 | note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc) |
| 33490 | { |
| 33491 | for (; loc; loc = loc->dw_loc_next) |
| 33492 | if (loc->dw_loc_opc == DW_OP_GNU_variable_value |
| 33493 | && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
| 33494 | { |
| 33495 | tree decl = loc->dw_loc_oprnd1.v.val_decl_ref; |
| 33496 | dw_die_ref ref = lookup_decl_die (decl); |
| 33497 | if (! ref && (flag_generate_lto || flag_generate_offload)) |
| 33498 | { |
| 33499 | /* ??? This is somewhat a hack because we do not create DIEs |
| 33500 | for variables not in BLOCK trees early but when generating |
| 33501 | early LTO output we need the dw_val_class_decl_ref to be |
| 33502 | fully resolved. For fat LTO objects we'd also like to |
| 33503 | undo this after LTO dwarf output. */ |
| 33504 | gcc_assert (DECL_CONTEXT (decl)); |
| 33505 | dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl)); |
| 33506 | gcc_assert (ctx != NULL); |
| 33507 | gen_decl_die (decl, NULL_TREE, NULL, context_die: ctx); |
| 33508 | ref = lookup_decl_die (decl); |
| 33509 | gcc_assert (ref != NULL); |
| 33510 | } |
| 33511 | if (ref) |
| 33512 | { |
| 33513 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
| 33514 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
| 33515 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
| 33516 | continue; |
| 33517 | } |
| 33518 | if (VAR_P (decl) |
| 33519 | && DECL_CONTEXT (decl) |
| 33520 | && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL |
| 33521 | && lookup_decl_die (DECL_CONTEXT (decl))) |
| 33522 | { |
| 33523 | if (!variable_value_hash) |
| 33524 | variable_value_hash |
| 33525 | = hash_table<variable_value_hasher>::create_ggc (n: 10); |
| 33526 | |
| 33527 | tree fndecl = DECL_CONTEXT (decl); |
| 33528 | struct variable_value_struct *node; |
| 33529 | struct variable_value_struct **slot |
| 33530 | = variable_value_hash->find_slot_with_hash (comparable: fndecl, |
| 33531 | DECL_UID (fndecl), |
| 33532 | insert: INSERT); |
| 33533 | if (*slot == NULL) |
| 33534 | { |
| 33535 | node = ggc_cleared_alloc<variable_value_struct> (); |
| 33536 | node->decl_id = DECL_UID (fndecl); |
| 33537 | *slot = node; |
| 33538 | } |
| 33539 | else |
| 33540 | node = *slot; |
| 33541 | |
| 33542 | vec_safe_push (v&: node->dies, obj: die); |
| 33543 | } |
| 33544 | } |
| 33545 | } |
| 33546 | |
| 33547 | /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still |
| 33548 | with dw_val_class_decl_ref operand. */ |
| 33549 | |
| 33550 | static void |
| 33551 | note_variable_value (dw_die_ref die) |
| 33552 | { |
| 33553 | dw_die_ref c; |
| 33554 | dw_attr_node *a; |
| 33555 | dw_loc_list_ref loc; |
| 33556 | unsigned ix; |
| 33557 | |
| 33558 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
| 33559 | switch (AT_class (a)) |
| 33560 | { |
| 33561 | case dw_val_class_loc_list: |
| 33562 | loc = AT_loc_list (a); |
| 33563 | gcc_assert (loc); |
| 33564 | if (!loc->noted_variable_value) |
| 33565 | { |
| 33566 | loc->noted_variable_value = 1; |
| 33567 | for (; loc; loc = loc->dw_loc_next) |
| 33568 | note_variable_value_in_expr (die, loc: loc->expr); |
| 33569 | } |
| 33570 | break; |
| 33571 | case dw_val_class_loc: |
| 33572 | note_variable_value_in_expr (die, loc: AT_loc (a)); |
| 33573 | break; |
| 33574 | default: |
| 33575 | break; |
| 33576 | } |
| 33577 | |
| 33578 | /* Mark children. */ |
| 33579 | FOR_EACH_CHILD (die, c, note_variable_value (c)); |
| 33580 | } |
| 33581 | |
| 33582 | /* Process DWARF dies for CTF generation. */ |
| 33583 | |
| 33584 | static void |
| 33585 | ctf_debug_do_cu (dw_die_ref die) |
| 33586 | { |
| 33587 | dw_die_ref c; |
| 33588 | |
| 33589 | if (!ctf_do_die (die)) |
| 33590 | return; |
| 33591 | |
| 33592 | FOR_EACH_CHILD (die, c, ctf_do_die (c)); |
| 33593 | } |
| 33594 | |
| 33595 | /* Perform any cleanups needed after the early debug generation pass |
| 33596 | has run. */ |
| 33597 | |
| 33598 | static void |
| 33599 | dwarf2out_early_finish (const char *filename) |
| 33600 | { |
| 33601 | comdat_type_node *ctnode; |
| 33602 | set_early_dwarf s; |
| 33603 | char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES]; |
| 33604 | |
| 33605 | /* PCH might result in DW_AT_producer string being restored from the |
| 33606 | header compilation, so always fill it with empty string initially |
| 33607 | and overwrite only here. */ |
| 33608 | dw_attr_node *producer = get_AT (die: comp_unit_die (), attr_kind: DW_AT_producer); |
| 33609 | |
| 33610 | if (dwarf_record_gcc_switches) |
| 33611 | producer_string = gen_producer_string (language_string: lang_hooks.name, |
| 33612 | options: save_decoded_options, |
| 33613 | options_count: save_decoded_options_count); |
| 33614 | else |
| 33615 | producer_string = concat (lang_hooks.name, " " , version_string, NULL); |
| 33616 | |
| 33617 | producer->dw_attr_val.v.val_str->refcount--; |
| 33618 | producer->dw_attr_val.v.val_str = find_AT_string (str: producer_string); |
| 33619 | |
| 33620 | /* Add the name for the main input file now. We delayed this from |
| 33621 | dwarf2out_init to avoid complications with PCH. */ |
| 33622 | add_filename_attribute (die: comp_unit_die (), name_string: remap_debug_filename (filename)); |
| 33623 | add_comp_dir_attribute (die: comp_unit_die ()); |
| 33624 | |
| 33625 | /* With LTO early dwarf was really finished at compile-time, so make |
| 33626 | sure to adjust the phase after annotating the LTRANS CU DIE. */ |
| 33627 | if (in_lto_p) |
| 33628 | { |
| 33629 | early_dwarf_finished = true; |
| 33630 | if (dump_file) |
| 33631 | { |
| 33632 | fprintf (stream: dump_file, format: "LTO EARLY DWARF for %s\n" , filename); |
| 33633 | print_die (die: comp_unit_die (), outfile: dump_file); |
| 33634 | } |
| 33635 | return; |
| 33636 | } |
| 33637 | |
| 33638 | /* Walk through the list of incomplete types again, trying once more to |
| 33639 | emit full debugging info for them. */ |
| 33640 | retry_incomplete_types (); |
| 33641 | |
| 33642 | gen_scheduled_generic_parms_dies (); |
| 33643 | gen_remaining_tmpl_value_param_die_attribute (); |
| 33644 | |
| 33645 | /* The point here is to flush out the limbo list so that it is empty |
| 33646 | and we don't need to stream it for LTO. */ |
| 33647 | flush_limbo_die_list (); |
| 33648 | |
| 33649 | /* Add DW_AT_linkage_name for all deferred DIEs. */ |
| 33650 | for (limbo_die_node *node = deferred_asm_name; node; node = node->next) |
| 33651 | { |
| 33652 | tree decl = node->created_for; |
| 33653 | if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl) |
| 33654 | /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that |
| 33655 | ended up in deferred_asm_name before we knew it was |
| 33656 | constant and never written to disk. */ |
| 33657 | && DECL_ASSEMBLER_NAME (decl)) |
| 33658 | { |
| 33659 | add_linkage_attr (die: node->die, decl); |
| 33660 | move_linkage_attr (die: node->die); |
| 33661 | } |
| 33662 | } |
| 33663 | deferred_asm_name = NULL; |
| 33664 | |
| 33665 | if (flag_eliminate_unused_debug_types) |
| 33666 | prune_unused_types (); |
| 33667 | |
| 33668 | /* Generate separate COMDAT sections for type DIEs. */ |
| 33669 | if (use_debug_types) |
| 33670 | { |
| 33671 | break_out_comdat_types (die: comp_unit_die ()); |
| 33672 | |
| 33673 | /* Each new type_unit DIE was added to the limbo die list when created. |
| 33674 | Since these have all been added to comdat_type_list, clear the |
| 33675 | limbo die list. */ |
| 33676 | limbo_die_list = NULL; |
| 33677 | |
| 33678 | /* For each new comdat type unit, copy declarations for incomplete |
| 33679 | types to make the new unit self-contained (i.e., no direct |
| 33680 | references to the main compile unit). */ |
| 33681 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
| 33682 | copy_decls_for_unworthy_types (unit: ctnode->root_die); |
| 33683 | copy_decls_for_unworthy_types (unit: comp_unit_die ()); |
| 33684 | |
| 33685 | /* In the process of copying declarations from one unit to another, |
| 33686 | we may have left some declarations behind that are no longer |
| 33687 | referenced. Prune them. */ |
| 33688 | prune_unused_types (); |
| 33689 | } |
| 33690 | |
| 33691 | /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still |
| 33692 | with dw_val_class_decl_ref operand. */ |
| 33693 | note_variable_value (die: comp_unit_die ()); |
| 33694 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
| 33695 | note_variable_value (die: node->die); |
| 33696 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
| 33697 | note_variable_value (die: ctnode->root_die); |
| 33698 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
| 33699 | note_variable_value (die: node->die); |
| 33700 | |
| 33701 | /* The AT_pubnames attribute needs to go in all skeleton dies, including |
| 33702 | both the main_cu and all skeleton TUs. Making this call unconditional |
| 33703 | would end up either adding a second copy of the AT_pubnames attribute, or |
| 33704 | requiring a special case in add_top_level_skeleton_die_attrs. */ |
| 33705 | if (!dwarf_split_debug_info) |
| 33706 | add_AT_pubnames (die: comp_unit_die ()); |
| 33707 | |
| 33708 | /* The early debug phase is now finished. */ |
| 33709 | early_dwarf_finished = true; |
| 33710 | if (dump_file) |
| 33711 | { |
| 33712 | fprintf (stream: dump_file, format: "EARLY DWARF for %s\n" , filename); |
| 33713 | print_die (die: comp_unit_die (), outfile: dump_file); |
| 33714 | } |
| 33715 | |
| 33716 | /* Generate CTF/BTF debug info. */ |
| 33717 | if ((ctf_debug_info_level > CTFINFO_LEVEL_NONE |
| 33718 | || btf_debuginfo_p ()) && lang_GNU_C ()) |
| 33719 | { |
| 33720 | ctf_debug_init (); |
| 33721 | ctf_debug_do_cu (die: comp_unit_die ()); |
| 33722 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
| 33723 | ctf_debug_do_cu (die: node->die); |
| 33724 | |
| 33725 | ctf_debug_early_finish (filename); |
| 33726 | } |
| 33727 | |
| 33728 | #ifdef CODEVIEW_DEBUGGING_INFO |
| 33729 | if (codeview_debuginfo_p ()) |
| 33730 | codeview_debug_early_finish (comp_unit_die ()); |
| 33731 | #endif |
| 33732 | |
| 33733 | /* Do not generate DWARF assembler now when not producing LTO bytecode. */ |
| 33734 | if ((!flag_generate_lto && !flag_generate_offload) |
| 33735 | /* FIXME: Disable debug info generation for (PE-)COFF targets since the |
| 33736 | copy_lto_debug_sections operation of the simple object support in |
| 33737 | libiberty is not implemented for them yet. */ |
| 33738 | || TARGET_PECOFF || TARGET_COFF) |
| 33739 | return; |
| 33740 | |
| 33741 | /* Now as we are going to output for LTO initialize sections and labels |
| 33742 | to the LTO variants. We don't need a random-seed postfix as other |
| 33743 | LTO sections as linking the LTO debug sections into one in a partial |
| 33744 | link is fine. */ |
| 33745 | init_sections_and_labels (early_lto_debug: true); |
| 33746 | |
| 33747 | /* The output below is modeled after dwarf2out_finish with all |
| 33748 | location related output removed and some LTO specific changes. |
| 33749 | Some refactoring might make both smaller and easier to match up. */ |
| 33750 | |
| 33751 | base_types.truncate (size: 0); |
| 33752 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
| 33753 | mark_base_types (die: ctnode->root_die); |
| 33754 | mark_base_types (die: comp_unit_die ()); |
| 33755 | move_marked_base_types (); |
| 33756 | |
| 33757 | /* Traverse the DIE's and add sibling attributes to those DIE's |
| 33758 | that have children. */ |
| 33759 | add_sibling_attributes (die: comp_unit_die ()); |
| 33760 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
| 33761 | add_sibling_attributes (die: node->die); |
| 33762 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
| 33763 | add_sibling_attributes (die: ctnode->root_die); |
| 33764 | |
| 33765 | /* AIX Assembler inserts the length, so adjust the reference to match the |
| 33766 | offset expected by debuggers. */ |
| 33767 | strcpy (dest: dl_section_ref, src: debug_line_section_label); |
| 33768 | if (XCOFF_DEBUGGING_INFO) |
| 33769 | strcat (dest: dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR); |
| 33770 | |
| 33771 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
| 33772 | add_AT_lineptr (die: comp_unit_die (), attr_kind: DW_AT_stmt_list, label: dl_section_ref); |
| 33773 | |
| 33774 | if (have_macinfo) |
| 33775 | add_AT_macptr (die: comp_unit_die (), DEBUG_MACRO_ATTRIBUTE, |
| 33776 | label: macinfo_section_label); |
| 33777 | |
| 33778 | save_macinfo_strings (); |
| 33779 | |
| 33780 | if (dwarf_split_debug_info) |
| 33781 | { |
| 33782 | unsigned int index = 0; |
| 33783 | debug_str_hash->traverse_noresize<unsigned int *, index_string> (argument: &index); |
| 33784 | } |
| 33785 | |
| 33786 | /* Output all of the compilation units. We put the main one last so that |
| 33787 | the offsets are available to output_pubnames. */ |
| 33788 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
| 33789 | output_comp_unit (die: node->die, output_if_empty: 0, NULL); |
| 33790 | |
| 33791 | hash_table<comdat_type_hasher> comdat_type_table (100); |
| 33792 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
| 33793 | { |
| 33794 | comdat_type_node **slot = comdat_type_table.find_slot (value: ctnode, insert: INSERT); |
| 33795 | |
| 33796 | /* Don't output duplicate types. */ |
| 33797 | if (*slot != HTAB_EMPTY_ENTRY) |
| 33798 | continue; |
| 33799 | |
| 33800 | /* Add a pointer to the line table for the main compilation unit |
| 33801 | so that the debugger can make sense of DW_AT_decl_file |
| 33802 | attributes. */ |
| 33803 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
| 33804 | add_AT_lineptr (die: ctnode->root_die, attr_kind: DW_AT_stmt_list, |
| 33805 | label: (!dwarf_split_debug_info |
| 33806 | ? debug_line_section_label |
| 33807 | : debug_skeleton_line_section_label)); |
| 33808 | |
| 33809 | output_comdat_type_unit (node: ctnode, early_lto_debug: true); |
| 33810 | *slot = ctnode; |
| 33811 | } |
| 33812 | |
| 33813 | /* Stick a unique symbol to the main debuginfo section. */ |
| 33814 | compute_comp_unit_symbol (unit_die: comp_unit_die ()); |
| 33815 | |
| 33816 | /* Output the main compilation unit. We always need it if only for |
| 33817 | the CU symbol. */ |
| 33818 | output_comp_unit (die: comp_unit_die (), output_if_empty: true, NULL); |
| 33819 | |
| 33820 | /* Output the abbreviation table. */ |
| 33821 | if (vec_safe_length (v: abbrev_die_table) != 1) |
| 33822 | { |
| 33823 | switch_to_section (debug_abbrev_section); |
| 33824 | ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label); |
| 33825 | output_abbrev_section (); |
| 33826 | } |
| 33827 | |
| 33828 | /* Have to end the macro section. */ |
| 33829 | if (have_macinfo) |
| 33830 | { |
| 33831 | /* We have to save macinfo state if we need to output it again |
| 33832 | for the FAT part of the object. */ |
| 33833 | vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table; |
| 33834 | if (flag_fat_lto_objects) |
| 33835 | macinfo_table = macinfo_table->copy (); |
| 33836 | |
| 33837 | switch_to_section (debug_macinfo_section); |
| 33838 | ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label); |
| 33839 | output_macinfo (debug_line_label: debug_line_section_label, early_lto_debug: true); |
| 33840 | dw2_asm_output_data (1, 0, "End compilation unit" ); |
| 33841 | |
| 33842 | if (flag_fat_lto_objects) |
| 33843 | { |
| 33844 | vec_free (v&: macinfo_table); |
| 33845 | macinfo_table = saved_macinfo_table; |
| 33846 | } |
| 33847 | } |
| 33848 | |
| 33849 | /* Emit a skeleton debug_line section. */ |
| 33850 | switch_to_section (debug_line_section); |
| 33851 | ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label); |
| 33852 | output_line_info (prologue_only: true); |
| 33853 | |
| 33854 | /* If we emitted any indirect strings, output the string table too. */ |
| 33855 | if (debug_str_hash || skeleton_debug_str_hash) |
| 33856 | output_indirect_strings (); |
| 33857 | if (debug_line_str_hash) |
| 33858 | { |
| 33859 | switch_to_section (debug_line_str_section); |
| 33860 | const enum dwarf_form form = DW_FORM_line_strp; |
| 33861 | debug_line_str_hash->traverse<enum dwarf_form, |
| 33862 | output_indirect_string> (argument: form); |
| 33863 | } |
| 33864 | |
| 33865 | /* Switch back to the text section. */ |
| 33866 | switch_to_section (text_section); |
| 33867 | } |
| 33868 | |
| 33869 | /* Reset all state within dwarf2out.cc so that we can rerun the compiler |
| 33870 | within the same process. For use by toplev::finalize. */ |
| 33871 | |
| 33872 | void |
| 33873 | dwarf2out_cc_finalize (void) |
| 33874 | { |
| 33875 | last_var_location_insn = NULL; |
| 33876 | cached_next_real_insn = NULL; |
| 33877 | used_rtx_array = NULL; |
| 33878 | incomplete_types = NULL; |
| 33879 | debug_info_section = NULL; |
| 33880 | debug_skeleton_info_section = NULL; |
| 33881 | debug_abbrev_section = NULL; |
| 33882 | debug_skeleton_abbrev_section = NULL; |
| 33883 | debug_aranges_section = NULL; |
| 33884 | debug_addr_section = NULL; |
| 33885 | debug_macinfo_section = NULL; |
| 33886 | debug_line_section = NULL; |
| 33887 | debug_skeleton_line_section = NULL; |
| 33888 | debug_loc_section = NULL; |
| 33889 | debug_pubnames_section = NULL; |
| 33890 | debug_pubtypes_section = NULL; |
| 33891 | debug_str_section = NULL; |
| 33892 | debug_line_str_section = NULL; |
| 33893 | debug_str_dwo_section = NULL; |
| 33894 | debug_str_offsets_section = NULL; |
| 33895 | debug_ranges_section = NULL; |
| 33896 | debug_ranges_dwo_section = NULL; |
| 33897 | debug_frame_section = NULL; |
| 33898 | fde_vec = NULL; |
| 33899 | debug_str_hash = NULL; |
| 33900 | debug_line_str_hash = NULL; |
| 33901 | skeleton_debug_str_hash = NULL; |
| 33902 | dw2_string_counter = 0; |
| 33903 | have_multiple_function_sections = false; |
| 33904 | in_text_section_p = false; |
| 33905 | cold_text_section = NULL; |
| 33906 | last_text_label = NULL; |
| 33907 | last_cold_label = NULL; |
| 33908 | switch_text_ranges = NULL; |
| 33909 | switch_cold_ranges = NULL; |
| 33910 | current_unit_personality = NULL; |
| 33911 | btf_tag_htab = NULL; |
| 33912 | |
| 33913 | early_dwarf = false; |
| 33914 | early_dwarf_finished = false; |
| 33915 | |
| 33916 | next_die_offset = 0; |
| 33917 | single_comp_unit_die = NULL; |
| 33918 | comdat_type_list = NULL; |
| 33919 | limbo_die_list = NULL; |
| 33920 | file_table = NULL; |
| 33921 | decl_die_table = NULL; |
| 33922 | common_block_die_table = NULL; |
| 33923 | decl_loc_table = NULL; |
| 33924 | call_arg_locations = NULL; |
| 33925 | call_arg_loc_last = NULL; |
| 33926 | call_site_count = -1; |
| 33927 | tail_call_site_count = -1; |
| 33928 | cached_dw_loc_list_table = NULL; |
| 33929 | abbrev_die_table = NULL; |
| 33930 | delete dwarf_proc_stack_usage_map; |
| 33931 | dwarf_proc_stack_usage_map = NULL; |
| 33932 | line_info_label_num = 0; |
| 33933 | cur_line_info_table = NULL; |
| 33934 | text_section_line_info = NULL; |
| 33935 | cold_text_section_line_info = NULL; |
| 33936 | separate_line_info = NULL; |
| 33937 | info_section_emitted = false; |
| 33938 | pubname_table = NULL; |
| 33939 | pubtype_table = NULL; |
| 33940 | macinfo_table = NULL; |
| 33941 | ranges_table = NULL; |
| 33942 | ranges_by_label = NULL; |
| 33943 | rnglist_idx = 0; |
| 33944 | have_location_lists = false; |
| 33945 | loclabel_num = 0; |
| 33946 | poc_label_num = 0; |
| 33947 | last_emitted_file = NULL; |
| 33948 | label_num = 0; |
| 33949 | tmpl_value_parm_die_table = NULL; |
| 33950 | generic_type_instances = NULL; |
| 33951 | frame_pointer_fb_offset = 0; |
| 33952 | frame_pointer_fb_offset_valid = false; |
| 33953 | base_types.release (); |
| 33954 | XDELETEVEC (producer_string); |
| 33955 | producer_string = NULL; |
| 33956 | output_line_info_generation = 0; |
| 33957 | init_sections_and_labels_generation = 0; |
| 33958 | } |
| 33959 | |
| 33960 | #include "gt-dwarf2out.h" |
| 33961 | |