| 1 | /* Top-level LTO routines. |
| 2 | Copyright (C) 2009-2025 Free Software Foundation, Inc. |
| 3 | Contributed by CodeSourcery, Inc. |
| 4 | |
| 5 | This file is part of GCC. |
| 6 | |
| 7 | GCC is free software; you can redistribute it and/or modify it under |
| 8 | the terms of the GNU General Public License as published by the Free |
| 9 | Software Foundation; either version 3, or (at your option) any later |
| 10 | version. |
| 11 | |
| 12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| 13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| 15 | for more details. |
| 16 | |
| 17 | You should have received a copy of the GNU General Public License |
| 18 | along with GCC; see the file COPYING3. If not see |
| 19 | <http://www.gnu.org/licenses/>. */ |
| 20 | |
| 21 | #include "config.h" |
| 22 | #include "system.h" |
| 23 | #include "coretypes.h" |
| 24 | #include "tm.h" |
| 25 | #include "function.h" |
| 26 | #include "bitmap.h" |
| 27 | #include "basic-block.h" |
| 28 | #include "tree.h" |
| 29 | #include "gimple.h" |
| 30 | #include "cfghooks.h" |
| 31 | #include "alloc-pool.h" |
| 32 | #include "tree-pass.h" |
| 33 | #include "tree-streamer.h" |
| 34 | #include "cgraph.h" |
| 35 | #include "opts.h" |
| 36 | #include "toplev.h" |
| 37 | #include "stor-layout.h" |
| 38 | #include "symbol-summary.h" |
| 39 | #include "tree-vrp.h" |
| 40 | #include "sreal.h" |
| 41 | #include "ipa-cp.h" |
| 42 | #include "ipa-prop.h" |
| 43 | #include "common.h" |
| 44 | #include "debug.h" |
| 45 | #include "lto.h" |
| 46 | #include "lto-section-names.h" |
| 47 | #include "splay-tree.h" |
| 48 | #include "lto-partition.h" |
| 49 | #include "context.h" |
| 50 | #include "pass_manager.h" |
| 51 | #include "ipa-fnsummary.h" |
| 52 | #include "ipa-utils.h" |
| 53 | #include "gomp-constants.h" |
| 54 | #include "lto-symtab.h" |
| 55 | #include "stringpool.h" |
| 56 | #include "fold-const.h" |
| 57 | #include "attribs.h" |
| 58 | #include "builtins.h" |
| 59 | #include "lto-common.h" |
| 60 | #include "tree-pretty-print.h" |
| 61 | #include "print-tree.h" |
| 62 | |
| 63 | /* True when no new types are going to be streamd from the global stream. */ |
| 64 | |
| 65 | static bool type_streaming_finished = false; |
| 66 | |
| 67 | GTY(()) tree first_personality_decl; |
| 68 | |
| 69 | /* Returns a hash code for P. */ |
| 70 | |
| 71 | static hashval_t |
| 72 | hash_name (const void *p) |
| 73 | { |
| 74 | const struct lto_section_slot *ds = (const struct lto_section_slot *) p; |
| 75 | return (hashval_t) htab_hash_string (ds->name); |
| 76 | } |
| 77 | |
| 78 | |
| 79 | /* Returns nonzero if P1 and P2 are equal. */ |
| 80 | |
| 81 | static int |
| 82 | eq_name (const void *p1, const void *p2) |
| 83 | { |
| 84 | const struct lto_section_slot *s1 |
| 85 | = (const struct lto_section_slot *) p1; |
| 86 | const struct lto_section_slot *s2 |
| 87 | = (const struct lto_section_slot *) p2; |
| 88 | |
| 89 | return strcmp (s1: s1->name, s2: s2->name) == 0; |
| 90 | } |
| 91 | |
| 92 | /* Free lto_section_slot. */ |
| 93 | |
| 94 | static void |
| 95 | free_with_string (void *arg) |
| 96 | { |
| 97 | struct lto_section_slot *s = (struct lto_section_slot *)arg; |
| 98 | |
| 99 | free (CONST_CAST (char *, s->name)); |
| 100 | free (ptr: arg); |
| 101 | } |
| 102 | |
| 103 | /* Create section hash table. */ |
| 104 | |
| 105 | htab_t |
| 106 | lto_obj_create_section_hash_table (void) |
| 107 | { |
| 108 | return htab_create (37, hash_name, eq_name, free_with_string); |
| 109 | } |
| 110 | |
| 111 | /* Delete an allocated integer KEY in the splay tree. */ |
| 112 | |
| 113 | static void |
| 114 | lto_splay_tree_delete_id (splay_tree_key key) |
| 115 | { |
| 116 | free (ptr: (void *) key); |
| 117 | } |
| 118 | |
| 119 | /* Compare splay tree node ids A and B. */ |
| 120 | |
| 121 | static int |
| 122 | lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b) |
| 123 | { |
| 124 | unsigned HOST_WIDE_INT ai; |
| 125 | unsigned HOST_WIDE_INT bi; |
| 126 | |
| 127 | ai = *(unsigned HOST_WIDE_INT *) a; |
| 128 | bi = *(unsigned HOST_WIDE_INT *) b; |
| 129 | |
| 130 | if (ai < bi) |
| 131 | return -1; |
| 132 | else if (ai > bi) |
| 133 | return 1; |
| 134 | return 0; |
| 135 | } |
| 136 | |
| 137 | /* Look up splay tree node by ID in splay tree T. */ |
| 138 | |
| 139 | static splay_tree_node |
| 140 | lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id) |
| 141 | { |
| 142 | return splay_tree_lookup (t, (splay_tree_key) &id); |
| 143 | } |
| 144 | |
| 145 | /* Check if KEY has ID. */ |
| 146 | |
| 147 | static bool |
| 148 | lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id) |
| 149 | { |
| 150 | return *(unsigned HOST_WIDE_INT *) key == id; |
| 151 | } |
| 152 | |
| 153 | /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value. |
| 154 | The ID is allocated separately because we need HOST_WIDE_INTs which may |
| 155 | be wider than a splay_tree_key. */ |
| 156 | |
| 157 | static void |
| 158 | lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id, |
| 159 | struct lto_file_decl_data *file_data) |
| 160 | { |
| 161 | unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT); |
| 162 | *idp = id; |
| 163 | splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data); |
| 164 | } |
| 165 | |
| 166 | /* Create a splay tree. */ |
| 167 | |
| 168 | static splay_tree |
| 169 | lto_splay_tree_new (void) |
| 170 | { |
| 171 | return splay_tree_new (lto_splay_tree_compare_ids, |
| 172 | lto_splay_tree_delete_id, |
| 173 | NULL); |
| 174 | } |
| 175 | |
| 176 | /* Decode the content of memory pointed to by DATA in the in decl |
| 177 | state object STATE. DATA_IN points to a data_in structure for |
| 178 | decoding. Return the address after the decoded object in the |
| 179 | input. */ |
| 180 | |
| 181 | static const uint32_t * |
| 182 | lto_read_in_decl_state (class data_in *data_in, const uint32_t *data, |
| 183 | struct lto_in_decl_state *state) |
| 184 | { |
| 185 | uint32_t ix; |
| 186 | tree decl; |
| 187 | uint32_t i, j; |
| 188 | |
| 189 | ix = *data++; |
| 190 | state->compressed = ix & 1; |
| 191 | ix /= 2; |
| 192 | decl = streamer_tree_cache_get_tree (cache: data_in->reader_cache, ix); |
| 193 | if (!VAR_OR_FUNCTION_DECL_P (decl)) |
| 194 | { |
| 195 | gcc_assert (decl == void_type_node); |
| 196 | decl = NULL_TREE; |
| 197 | } |
| 198 | state->fn_decl = decl; |
| 199 | |
| 200 | for (i = 0; i < LTO_N_DECL_STREAMS; i++) |
| 201 | { |
| 202 | uint32_t size = *data++; |
| 203 | vec<tree, va_gc> *decls = NULL; |
| 204 | vec_alloc (v&: decls, nelems: size); |
| 205 | |
| 206 | for (j = 0; j < size; j++) |
| 207 | vec_safe_push (v&: decls, |
| 208 | obj: streamer_tree_cache_get_tree (cache: data_in->reader_cache, |
| 209 | ix: data[j])); |
| 210 | |
| 211 | state->streams[i] = decls; |
| 212 | data += size; |
| 213 | } |
| 214 | |
| 215 | return data; |
| 216 | } |
| 217 | |
| 218 | |
| 219 | /* Global canonical type table. */ |
| 220 | static htab_t gimple_canonical_types; |
| 221 | static hash_map<const_tree, hashval_t> *canonical_type_hash_cache; |
| 222 | static unsigned long num_canonical_type_hash_entries; |
| 223 | static unsigned long num_canonical_type_hash_queries; |
| 224 | |
| 225 | /* Types postponed for registration to the canonical type table. |
| 226 | During streaming we postpone all TYPE_CXX_ODR_P types so we can alter |
| 227 | decide whether there is conflict with non-ODR type or not. */ |
| 228 | static GTY(()) vec<tree, va_gc> *types_to_register = NULL; |
| 229 | |
| 230 | static void iterative_hash_canonical_type (tree type, inchash::hash &hstate); |
| 231 | static hashval_t gimple_canonical_type_hash (const void *p); |
| 232 | static hashval_t gimple_register_canonical_type_1 (tree t, hashval_t hash); |
| 233 | |
| 234 | /* Returning a hash value for gimple type TYPE. |
| 235 | |
| 236 | The hash value returned is equal for types considered compatible |
| 237 | by gimple_canonical_types_compatible_p. */ |
| 238 | |
| 239 | static hashval_t |
| 240 | hash_canonical_type (tree type) |
| 241 | { |
| 242 | inchash::hash hstate; |
| 243 | enum tree_code code; |
| 244 | |
| 245 | /* We compute alias sets only for types that needs them. |
| 246 | Be sure we do not recurse to something else as we cannot hash incomplete |
| 247 | types in a way they would have same hash value as compatible complete |
| 248 | types. */ |
| 249 | gcc_checking_assert (type_with_alias_set_p (type)); |
| 250 | |
| 251 | /* Combine a few common features of types so that types are grouped into |
| 252 | smaller sets; when searching for existing matching types to merge, |
| 253 | only existing types having the same features as the new type will be |
| 254 | checked. */ |
| 255 | code = tree_code_for_canonical_type_merging (TREE_CODE (type)); |
| 256 | hstate.add_int (v: code); |
| 257 | if (!RECORD_OR_UNION_TYPE_P (type)) |
| 258 | hstate.add_int (TYPE_MODE (type)); |
| 259 | |
| 260 | /* Incorporate common features of numerical types. */ |
| 261 | if (INTEGRAL_TYPE_P (type) |
| 262 | || SCALAR_FLOAT_TYPE_P (type) |
| 263 | || FIXED_POINT_TYPE_P (type) |
| 264 | || TREE_CODE (type) == OFFSET_TYPE |
| 265 | || POINTER_TYPE_P (type)) |
| 266 | { |
| 267 | hstate.add_int (TYPE_PRECISION (type)); |
| 268 | if (!type_with_interoperable_signedness (type)) |
| 269 | hstate.add_int (TYPE_UNSIGNED (type)); |
| 270 | } |
| 271 | |
| 272 | if (VECTOR_TYPE_P (type)) |
| 273 | { |
| 274 | hstate.add_poly_int (v: TYPE_VECTOR_SUBPARTS (node: type)); |
| 275 | hstate.add_int (TYPE_UNSIGNED (type)); |
| 276 | } |
| 277 | |
| 278 | if (TREE_CODE (type) == COMPLEX_TYPE) |
| 279 | hstate.add_int (TYPE_UNSIGNED (type)); |
| 280 | |
| 281 | /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be |
| 282 | interoperable with "signed char". Unless all frontends are revisited to |
| 283 | agree on these types, we must ignore the flag completely. */ |
| 284 | |
| 285 | /* Fortran standard define C_PTR type that is compatible with every |
| 286 | C pointer. For this reason we need to glob all pointers into one. |
| 287 | Still pointers in different address spaces are not compatible. */ |
| 288 | if (POINTER_TYPE_P (type)) |
| 289 | hstate.add_int (TYPE_ADDR_SPACE (TREE_TYPE (type))); |
| 290 | |
| 291 | /* For array types hash the domain bounds and the string flag. */ |
| 292 | if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type)) |
| 293 | { |
| 294 | hstate.add_int (TYPE_STRING_FLAG (type)); |
| 295 | /* OMP lowering can introduce error_mark_node in place of |
| 296 | random local decls in types. */ |
| 297 | if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node) |
| 298 | inchash::add_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), hstate); |
| 299 | if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node) |
| 300 | inchash::add_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), hstate); |
| 301 | } |
| 302 | |
| 303 | /* Recurse for aggregates with a single element type. */ |
| 304 | if (TREE_CODE (type) == ARRAY_TYPE |
| 305 | || TREE_CODE (type) == COMPLEX_TYPE |
| 306 | || TREE_CODE (type) == VECTOR_TYPE) |
| 307 | iterative_hash_canonical_type (TREE_TYPE (type), hstate); |
| 308 | |
| 309 | /* Incorporate function return and argument types. */ |
| 310 | if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE) |
| 311 | { |
| 312 | unsigned na; |
| 313 | tree p; |
| 314 | |
| 315 | iterative_hash_canonical_type (TREE_TYPE (type), hstate); |
| 316 | |
| 317 | for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p)) |
| 318 | { |
| 319 | iterative_hash_canonical_type (TREE_VALUE (p), hstate); |
| 320 | na++; |
| 321 | } |
| 322 | |
| 323 | hstate.add_int (v: na); |
| 324 | } |
| 325 | |
| 326 | if (RECORD_OR_UNION_TYPE_P (type)) |
| 327 | { |
| 328 | unsigned nf; |
| 329 | tree f; |
| 330 | |
| 331 | for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f)) |
| 332 | if (TREE_CODE (f) == FIELD_DECL |
| 333 | && (! DECL_SIZE (f) |
| 334 | || ! integer_zerop (DECL_SIZE (f)))) |
| 335 | { |
| 336 | tree t = TREE_TYPE (f); |
| 337 | if (!TREE_CHAIN (f) |
| 338 | && TREE_CODE (t) == ARRAY_TYPE) |
| 339 | t = TREE_TYPE (t); |
| 340 | iterative_hash_canonical_type (type: t, hstate); |
| 341 | nf++; |
| 342 | } |
| 343 | |
| 344 | hstate.add_int (v: nf); |
| 345 | } |
| 346 | |
| 347 | return hstate.end(); |
| 348 | } |
| 349 | |
| 350 | /* Returning a hash value for gimple type TYPE combined with VAL. */ |
| 351 | |
| 352 | static void |
| 353 | iterative_hash_canonical_type (tree type, inchash::hash &hstate) |
| 354 | { |
| 355 | hashval_t v; |
| 356 | |
| 357 | /* All type variants have same TYPE_CANONICAL. */ |
| 358 | type = TYPE_MAIN_VARIANT (type); |
| 359 | |
| 360 | if (!canonical_type_used_p (t: type)) |
| 361 | v = hash_canonical_type (type); |
| 362 | /* An already processed type. */ |
| 363 | else if (TYPE_CANONICAL (type)) |
| 364 | { |
| 365 | type = TYPE_CANONICAL (type); |
| 366 | v = gimple_canonical_type_hash (p: type); |
| 367 | } |
| 368 | else |
| 369 | { |
| 370 | /* Canonical types should not be able to form SCCs by design, this |
| 371 | recursion is just because we do not register canonical types in |
| 372 | optimal order. To avoid quadratic behavior also register the |
| 373 | type here. */ |
| 374 | v = hash_canonical_type (type); |
| 375 | v = gimple_register_canonical_type_1 (t: type, hash: v); |
| 376 | } |
| 377 | hstate.merge_hash (other: v); |
| 378 | } |
| 379 | |
| 380 | /* Returns the hash for a canonical type P. */ |
| 381 | |
| 382 | static hashval_t |
| 383 | gimple_canonical_type_hash (const void *p) |
| 384 | { |
| 385 | num_canonical_type_hash_queries++; |
| 386 | hashval_t *slot = canonical_type_hash_cache->get (k: (const_tree) p); |
| 387 | gcc_assert (slot != NULL); |
| 388 | return *slot; |
| 389 | } |
| 390 | |
| 391 | |
| 392 | |
| 393 | /* Returns nonzero if P1 and P2 are equal. */ |
| 394 | |
| 395 | static int |
| 396 | gimple_canonical_type_eq (const void *p1, const void *p2) |
| 397 | { |
| 398 | const_tree t1 = (const_tree) p1; |
| 399 | const_tree t2 = (const_tree) p2; |
| 400 | return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1), |
| 401 | CONST_CAST_TREE (t2)); |
| 402 | } |
| 403 | |
| 404 | /* Main worker for gimple_register_canonical_type. */ |
| 405 | |
| 406 | static hashval_t |
| 407 | gimple_register_canonical_type_1 (tree t, hashval_t hash) |
| 408 | { |
| 409 | void **slot; |
| 410 | |
| 411 | gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t) |
| 412 | && type_with_alias_set_p (t) |
| 413 | && canonical_type_used_p (t)); |
| 414 | |
| 415 | /* ODR types for which there is no ODR violation and we did not record |
| 416 | structurally equivalent non-ODR type can be treated as unique by their |
| 417 | name. |
| 418 | |
| 419 | hash passed to gimple_register_canonical_type_1 is a structural hash |
| 420 | that we can use to lookup structurally equivalent non-ODR type. |
| 421 | In case we decide to treat type as unique ODR type we recompute hash based |
| 422 | on name and let TBAA machinery know about our decision. */ |
| 423 | if (RECORD_OR_UNION_TYPE_P (t) && odr_type_p (t) |
| 424 | && TYPE_CXX_ODR_P (t) && !odr_type_violation_reported_p (type: t)) |
| 425 | { |
| 426 | /* Anonymous namespace types never conflict with non-C++ types. */ |
| 427 | if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t)) |
| 428 | slot = NULL; |
| 429 | else |
| 430 | { |
| 431 | /* Here we rely on fact that all non-ODR types was inserted into |
| 432 | canonical type hash and thus we can safely detect conflicts between |
| 433 | ODR types and interoperable non-ODR types. */ |
| 434 | gcc_checking_assert (type_streaming_finished |
| 435 | && TYPE_MAIN_VARIANT (t) == t); |
| 436 | slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, |
| 437 | NO_INSERT); |
| 438 | } |
| 439 | if (slot && !TYPE_CXX_ODR_P (*(tree *)slot)) |
| 440 | { |
| 441 | tree nonodr = *(tree *)slot; |
| 442 | gcc_checking_assert (!flag_ltrans); |
| 443 | if (symtab->dump_file) |
| 444 | { |
| 445 | fprintf (stream: symtab->dump_file, |
| 446 | format: "ODR and non-ODR type conflict: " ); |
| 447 | print_generic_expr (symtab->dump_file, t); |
| 448 | fprintf (stream: symtab->dump_file, format: " and " ); |
| 449 | print_generic_expr (symtab->dump_file, nonodr); |
| 450 | fprintf (stream: symtab->dump_file, format: " mangled:%s\n" , |
| 451 | IDENTIFIER_POINTER |
| 452 | (DECL_ASSEMBLER_NAME (TYPE_NAME (t)))); |
| 453 | } |
| 454 | /* Set canonical for T and all other ODR equivalent duplicates |
| 455 | including incomplete structures. */ |
| 456 | set_type_canonical_for_odr_type (type: t, canonical: nonodr); |
| 457 | } |
| 458 | else |
| 459 | { |
| 460 | tree prevail = prevailing_odr_type (type: t); |
| 461 | |
| 462 | if (symtab->dump_file) |
| 463 | { |
| 464 | fprintf (stream: symtab->dump_file, |
| 465 | format: "New canonical ODR type: " ); |
| 466 | print_generic_expr (symtab->dump_file, t); |
| 467 | fprintf (stream: symtab->dump_file, format: " mangled:%s\n" , |
| 468 | IDENTIFIER_POINTER |
| 469 | (DECL_ASSEMBLER_NAME (TYPE_NAME (t)))); |
| 470 | } |
| 471 | /* Set canonical for T and all other ODR equivalent duplicates |
| 472 | including incomplete structures. */ |
| 473 | set_type_canonical_for_odr_type (type: t, canonical: prevail); |
| 474 | enable_odr_based_tbaa (type: t); |
| 475 | if (!type_in_anonymous_namespace_p (t)) |
| 476 | hash = htab_hash_string (IDENTIFIER_POINTER |
| 477 | (DECL_ASSEMBLER_NAME |
| 478 | (TYPE_NAME (t)))); |
| 479 | else |
| 480 | hash = TYPE_UID (t); |
| 481 | |
| 482 | /* All variants of t now have TYPE_CANONICAL set to prevail. |
| 483 | Update canonical type hash cache accordingly. */ |
| 484 | num_canonical_type_hash_entries++; |
| 485 | bool existed_p = canonical_type_hash_cache->put (k: prevail, v: hash); |
| 486 | gcc_checking_assert (!existed_p); |
| 487 | } |
| 488 | return hash; |
| 489 | } |
| 490 | |
| 491 | slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT); |
| 492 | if (*slot) |
| 493 | { |
| 494 | tree new_type = (tree)(*slot); |
| 495 | gcc_checking_assert (new_type != t); |
| 496 | TYPE_CANONICAL (t) = new_type; |
| 497 | } |
| 498 | else |
| 499 | { |
| 500 | TYPE_CANONICAL (t) = t; |
| 501 | *slot = (void *) t; |
| 502 | /* Cache the just computed hash value. */ |
| 503 | num_canonical_type_hash_entries++; |
| 504 | bool existed_p = canonical_type_hash_cache->put (k: t, v: hash); |
| 505 | gcc_assert (!existed_p); |
| 506 | } |
| 507 | return hash; |
| 508 | } |
| 509 | |
| 510 | /* Register type T in the global type table gimple_types and set |
| 511 | TYPE_CANONICAL of T accordingly. |
| 512 | This is used by LTO to merge structurally equivalent types for |
| 513 | type-based aliasing purposes across different TUs and languages. |
| 514 | |
| 515 | ??? This merging does not exactly match how the tree.cc middle-end |
| 516 | functions will assign TYPE_CANONICAL when new types are created |
| 517 | during optimization (which at least happens for pointer and array |
| 518 | types). */ |
| 519 | |
| 520 | static void |
| 521 | gimple_register_canonical_type (tree t) |
| 522 | { |
| 523 | if (TYPE_CANONICAL (t) || !type_with_alias_set_p (t) |
| 524 | || !canonical_type_used_p (t)) |
| 525 | return; |
| 526 | |
| 527 | /* Canonical types are same among all complete variants. */ |
| 528 | if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (t))) |
| 529 | TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_MAIN_VARIANT (t)); |
| 530 | else |
| 531 | { |
| 532 | hashval_t h = hash_canonical_type (TYPE_MAIN_VARIANT (t)); |
| 533 | gimple_register_canonical_type_1 (TYPE_MAIN_VARIANT (t), hash: h); |
| 534 | TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_MAIN_VARIANT (t)); |
| 535 | } |
| 536 | } |
| 537 | |
| 538 | /* Re-compute TYPE_CANONICAL for NODE and related types. */ |
| 539 | |
| 540 | static void |
| 541 | lto_register_canonical_types (tree node, bool first_p) |
| 542 | { |
| 543 | if (!node |
| 544 | || !TYPE_P (node)) |
| 545 | return; |
| 546 | |
| 547 | if (first_p) |
| 548 | TYPE_CANONICAL (node) = NULL_TREE; |
| 549 | |
| 550 | if (POINTER_TYPE_P (node) |
| 551 | || TREE_CODE (node) == COMPLEX_TYPE |
| 552 | || TREE_CODE (node) == ARRAY_TYPE) |
| 553 | lto_register_canonical_types (TREE_TYPE (node), first_p); |
| 554 | |
| 555 | if (!first_p) |
| 556 | gimple_register_canonical_type (t: node); |
| 557 | } |
| 558 | |
| 559 | /* Finish canonical type calculation: after all units has been streamed in we |
| 560 | can check if given ODR type structurally conflicts with a non-ODR type. In |
| 561 | the first case we set type canonical according to the canonical type hash. |
| 562 | In the second case we use type names. */ |
| 563 | |
| 564 | static void |
| 565 | lto_register_canonical_types_for_odr_types () |
| 566 | { |
| 567 | tree t; |
| 568 | unsigned int i; |
| 569 | |
| 570 | if (!types_to_register) |
| 571 | return; |
| 572 | |
| 573 | type_streaming_finished = true; |
| 574 | |
| 575 | /* Be sure that no types derived from ODR types was |
| 576 | not inserted into the hash table. */ |
| 577 | if (flag_checking) |
| 578 | FOR_EACH_VEC_ELT (*types_to_register, i, t) |
| 579 | gcc_assert (!TYPE_CANONICAL (t)); |
| 580 | |
| 581 | /* Register all remaining types. */ |
| 582 | FOR_EACH_VEC_ELT (*types_to_register, i, t) |
| 583 | { |
| 584 | /* For pre-streamed types like va-arg it is possible that main variant |
| 585 | is !CXX_ODR_P while the variant (which is streamed) is. |
| 586 | Copy CXX_ODR_P to make type verifier happy. This is safe because |
| 587 | in canonical type calculation we only consider main variants. |
| 588 | However we can not change this flag before streaming is finished |
| 589 | to not affect tree merging. */ |
| 590 | TYPE_CXX_ODR_P (t) = TYPE_CXX_ODR_P (TYPE_MAIN_VARIANT (t)); |
| 591 | if (!TYPE_CANONICAL (t)) |
| 592 | gimple_register_canonical_type (t); |
| 593 | } |
| 594 | } |
| 595 | |
| 596 | |
| 597 | /* Remember trees that contains references to declarations. */ |
| 598 | vec <tree, va_gc> *tree_with_vars; |
| 599 | |
| 600 | #define CHECK_VAR(tt) \ |
| 601 | do \ |
| 602 | { \ |
| 603 | if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \ |
| 604 | && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \ |
| 605 | return true; \ |
| 606 | } while (0) |
| 607 | |
| 608 | #define CHECK_NO_VAR(tt) \ |
| 609 | gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt)) |
| 610 | |
| 611 | /* Check presence of pointers to decls in fields of a tree_typed T. */ |
| 612 | |
| 613 | static inline bool |
| 614 | mentions_vars_p_typed (tree t) |
| 615 | { |
| 616 | CHECK_NO_VAR (TREE_TYPE (t)); |
| 617 | return false; |
| 618 | } |
| 619 | |
| 620 | /* Check presence of pointers to decls in fields of a tree_common T. */ |
| 621 | |
| 622 | static inline bool |
| 623 | mentions_vars_p_common (tree t) |
| 624 | { |
| 625 | if (mentions_vars_p_typed (t)) |
| 626 | return true; |
| 627 | CHECK_NO_VAR (TREE_CHAIN (t)); |
| 628 | return false; |
| 629 | } |
| 630 | |
| 631 | /* Check presence of pointers to decls in fields of a decl_minimal T. */ |
| 632 | |
| 633 | static inline bool |
| 634 | mentions_vars_p_decl_minimal (tree t) |
| 635 | { |
| 636 | if (mentions_vars_p_common (t)) |
| 637 | return true; |
| 638 | CHECK_NO_VAR (DECL_NAME (t)); |
| 639 | CHECK_VAR (DECL_CONTEXT (t)); |
| 640 | return false; |
| 641 | } |
| 642 | |
| 643 | /* Check presence of pointers to decls in fields of a decl_common T. */ |
| 644 | |
| 645 | static inline bool |
| 646 | mentions_vars_p_decl_common (tree t) |
| 647 | { |
| 648 | if (mentions_vars_p_decl_minimal (t)) |
| 649 | return true; |
| 650 | CHECK_VAR (DECL_SIZE (t)); |
| 651 | CHECK_VAR (DECL_SIZE_UNIT (t)); |
| 652 | CHECK_VAR (DECL_INITIAL (t)); |
| 653 | CHECK_NO_VAR (DECL_ATTRIBUTES (t)); |
| 654 | CHECK_VAR (DECL_ABSTRACT_ORIGIN (t)); |
| 655 | return false; |
| 656 | } |
| 657 | |
| 658 | /* Check presence of pointers to decls in fields of a decl_with_vis T. */ |
| 659 | |
| 660 | static inline bool |
| 661 | mentions_vars_p_decl_with_vis (tree t) |
| 662 | { |
| 663 | if (mentions_vars_p_decl_common (t)) |
| 664 | return true; |
| 665 | |
| 666 | /* Accessor macro has side-effects, use field-name here. */ |
| 667 | CHECK_NO_VAR (DECL_ASSEMBLER_NAME_RAW (t)); |
| 668 | return false; |
| 669 | } |
| 670 | |
| 671 | /* Check presence of pointers to decls in fields of a decl_non_common T. */ |
| 672 | |
| 673 | static inline bool |
| 674 | mentions_vars_p_decl_non_common (tree t) |
| 675 | { |
| 676 | if (mentions_vars_p_decl_with_vis (t)) |
| 677 | return true; |
| 678 | CHECK_NO_VAR (DECL_RESULT_FLD (t)); |
| 679 | return false; |
| 680 | } |
| 681 | |
| 682 | /* Check presence of pointers to decls in fields of a decl_non_common T. */ |
| 683 | |
| 684 | static bool |
| 685 | mentions_vars_p_function (tree t) |
| 686 | { |
| 687 | if (mentions_vars_p_decl_non_common (t)) |
| 688 | return true; |
| 689 | CHECK_NO_VAR (DECL_ARGUMENTS (t)); |
| 690 | CHECK_NO_VAR (DECL_VINDEX (t)); |
| 691 | CHECK_VAR (DECL_FUNCTION_PERSONALITY (t)); |
| 692 | return false; |
| 693 | } |
| 694 | |
| 695 | /* Check presence of pointers to decls in fields of a field_decl T. */ |
| 696 | |
| 697 | static bool |
| 698 | mentions_vars_p_field_decl (tree t) |
| 699 | { |
| 700 | if (mentions_vars_p_decl_common (t)) |
| 701 | return true; |
| 702 | CHECK_VAR (DECL_FIELD_OFFSET (t)); |
| 703 | CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t)); |
| 704 | CHECK_NO_VAR (DECL_QUALIFIER (t)); |
| 705 | CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t)); |
| 706 | CHECK_NO_VAR (DECL_FCONTEXT (t)); |
| 707 | return false; |
| 708 | } |
| 709 | |
| 710 | /* Check presence of pointers to decls in fields of a type T. */ |
| 711 | |
| 712 | static bool |
| 713 | mentions_vars_p_type (tree t) |
| 714 | { |
| 715 | if (mentions_vars_p_common (t)) |
| 716 | return true; |
| 717 | CHECK_NO_VAR (TYPE_CACHED_VALUES (t)); |
| 718 | CHECK_VAR (TYPE_SIZE (t)); |
| 719 | CHECK_VAR (TYPE_SIZE_UNIT (t)); |
| 720 | CHECK_NO_VAR (TYPE_ATTRIBUTES (t)); |
| 721 | CHECK_NO_VAR (TYPE_NAME (t)); |
| 722 | |
| 723 | CHECK_VAR (TYPE_MIN_VALUE_RAW (t)); |
| 724 | CHECK_VAR (TYPE_MAX_VALUE_RAW (t)); |
| 725 | |
| 726 | /* Accessor is for derived node types only. */ |
| 727 | CHECK_NO_VAR (TYPE_LANG_SLOT_1 (t)); |
| 728 | |
| 729 | CHECK_VAR (TYPE_CONTEXT (t)); |
| 730 | CHECK_NO_VAR (TYPE_CANONICAL (t)); |
| 731 | CHECK_NO_VAR (TYPE_MAIN_VARIANT (t)); |
| 732 | CHECK_NO_VAR (TYPE_NEXT_VARIANT (t)); |
| 733 | return false; |
| 734 | } |
| 735 | |
| 736 | /* Check presence of pointers to decls in fields of a BINFO T. */ |
| 737 | |
| 738 | static bool |
| 739 | mentions_vars_p_binfo (tree t) |
| 740 | { |
| 741 | unsigned HOST_WIDE_INT i, n; |
| 742 | |
| 743 | if (mentions_vars_p_common (t)) |
| 744 | return true; |
| 745 | CHECK_VAR (BINFO_VTABLE (t)); |
| 746 | CHECK_NO_VAR (BINFO_OFFSET (t)); |
| 747 | CHECK_NO_VAR (BINFO_VIRTUALS (t)); |
| 748 | CHECK_NO_VAR (BINFO_VPTR_FIELD (t)); |
| 749 | n = vec_safe_length (BINFO_BASE_ACCESSES (t)); |
| 750 | for (i = 0; i < n; i++) |
| 751 | CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i)); |
| 752 | /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX |
| 753 | and BINFO_VPTR_INDEX; these are used by C++ FE only. */ |
| 754 | n = BINFO_N_BASE_BINFOS (t); |
| 755 | for (i = 0; i < n; i++) |
| 756 | CHECK_NO_VAR (BINFO_BASE_BINFO (t, i)); |
| 757 | return false; |
| 758 | } |
| 759 | |
| 760 | /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */ |
| 761 | |
| 762 | static bool |
| 763 | mentions_vars_p_constructor (tree t) |
| 764 | { |
| 765 | unsigned HOST_WIDE_INT idx; |
| 766 | constructor_elt *ce; |
| 767 | |
| 768 | if (mentions_vars_p_typed (t)) |
| 769 | return true; |
| 770 | |
| 771 | for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), ix: idx, ptr: &ce); idx++) |
| 772 | { |
| 773 | CHECK_NO_VAR (ce->index); |
| 774 | CHECK_VAR (ce->value); |
| 775 | } |
| 776 | return false; |
| 777 | } |
| 778 | |
| 779 | /* Check presence of pointers to decls in fields of an expression tree T. */ |
| 780 | |
| 781 | static bool |
| 782 | mentions_vars_p_expr (tree t) |
| 783 | { |
| 784 | int i; |
| 785 | if (mentions_vars_p_typed (t)) |
| 786 | return true; |
| 787 | for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i) |
| 788 | CHECK_VAR (TREE_OPERAND (t, i)); |
| 789 | return false; |
| 790 | } |
| 791 | |
| 792 | /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */ |
| 793 | |
| 794 | static bool |
| 795 | mentions_vars_p_omp_clause (tree t) |
| 796 | { |
| 797 | int i; |
| 798 | if (mentions_vars_p_common (t)) |
| 799 | return true; |
| 800 | for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i) |
| 801 | CHECK_VAR (OMP_CLAUSE_OPERAND (t, i)); |
| 802 | return false; |
| 803 | } |
| 804 | |
| 805 | /* Check presence of pointers to decls that needs later fixup in T. */ |
| 806 | |
| 807 | static bool |
| 808 | mentions_vars_p (tree t) |
| 809 | { |
| 810 | switch (TREE_CODE (t)) |
| 811 | { |
| 812 | case IDENTIFIER_NODE: |
| 813 | break; |
| 814 | |
| 815 | case TREE_LIST: |
| 816 | CHECK_VAR (TREE_VALUE (t)); |
| 817 | CHECK_VAR (TREE_PURPOSE (t)); |
| 818 | CHECK_NO_VAR (TREE_CHAIN (t)); |
| 819 | break; |
| 820 | |
| 821 | case FIELD_DECL: |
| 822 | return mentions_vars_p_field_decl (t); |
| 823 | |
| 824 | case LABEL_DECL: |
| 825 | case CONST_DECL: |
| 826 | case PARM_DECL: |
| 827 | case RESULT_DECL: |
| 828 | case IMPORTED_DECL: |
| 829 | case NAMESPACE_DECL: |
| 830 | case NAMELIST_DECL: |
| 831 | return mentions_vars_p_decl_common (t); |
| 832 | |
| 833 | case VAR_DECL: |
| 834 | return mentions_vars_p_decl_with_vis (t); |
| 835 | |
| 836 | case TYPE_DECL: |
| 837 | return mentions_vars_p_decl_non_common (t); |
| 838 | |
| 839 | case FUNCTION_DECL: |
| 840 | return mentions_vars_p_function (t); |
| 841 | |
| 842 | case TREE_BINFO: |
| 843 | return mentions_vars_p_binfo (t); |
| 844 | |
| 845 | case PLACEHOLDER_EXPR: |
| 846 | return mentions_vars_p_common (t); |
| 847 | |
| 848 | case BLOCK: |
| 849 | case TRANSLATION_UNIT_DECL: |
| 850 | case OPTIMIZATION_NODE: |
| 851 | case TARGET_OPTION_NODE: |
| 852 | break; |
| 853 | |
| 854 | case CONSTRUCTOR: |
| 855 | return mentions_vars_p_constructor (t); |
| 856 | |
| 857 | case OMP_CLAUSE: |
| 858 | return mentions_vars_p_omp_clause (t); |
| 859 | |
| 860 | default: |
| 861 | if (TYPE_P (t)) |
| 862 | { |
| 863 | if (mentions_vars_p_type (t)) |
| 864 | return true; |
| 865 | } |
| 866 | else if (EXPR_P (t)) |
| 867 | { |
| 868 | if (mentions_vars_p_expr (t)) |
| 869 | return true; |
| 870 | } |
| 871 | else if (CONSTANT_CLASS_P (t)) |
| 872 | CHECK_NO_VAR (TREE_TYPE (t)); |
| 873 | else |
| 874 | gcc_unreachable (); |
| 875 | } |
| 876 | return false; |
| 877 | } |
| 878 | |
| 879 | |
| 880 | /* Return the resolution for the decl with index INDEX from DATA_IN. */ |
| 881 | |
| 882 | static enum ld_plugin_symbol_resolution |
| 883 | get_resolution (class data_in *data_in, unsigned index) |
| 884 | { |
| 885 | if (data_in->globals_resolution.exists ()) |
| 886 | { |
| 887 | ld_plugin_symbol_resolution_t ret; |
| 888 | /* We can have references to not emitted functions in |
| 889 | DECL_FUNCTION_PERSONALITY at least. So we can and have |
| 890 | to indeed return LDPR_UNKNOWN in some cases. */ |
| 891 | if (data_in->globals_resolution.length () <= index) |
| 892 | return LDPR_UNKNOWN; |
| 893 | ret = data_in->globals_resolution[index]; |
| 894 | return ret; |
| 895 | } |
| 896 | else |
| 897 | /* Delay resolution finding until decl merging. */ |
| 898 | return LDPR_UNKNOWN; |
| 899 | } |
| 900 | |
| 901 | /* We need to record resolutions until symbol table is read. */ |
| 902 | static void |
| 903 | register_resolution (struct lto_file_decl_data *file_data, tree decl, |
| 904 | enum ld_plugin_symbol_resolution resolution) |
| 905 | { |
| 906 | bool existed; |
| 907 | if (resolution == LDPR_UNKNOWN) |
| 908 | return; |
| 909 | if (!file_data->resolution_map) |
| 910 | file_data->resolution_map |
| 911 | = new hash_map<tree, ld_plugin_symbol_resolution>; |
| 912 | ld_plugin_symbol_resolution_t &res |
| 913 | = file_data->resolution_map->get_or_insert (k: decl, existed: &existed); |
| 914 | if (!existed |
| 915 | || resolution == LDPR_PREVAILING_DEF_IRONLY |
| 916 | || resolution == LDPR_PREVAILING_DEF |
| 917 | || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP) |
| 918 | res = resolution; |
| 919 | } |
| 920 | |
| 921 | /* Register DECL with the global symbol table and change its |
| 922 | name if necessary to avoid name clashes for static globals across |
| 923 | different files. */ |
| 924 | |
| 925 | static void |
| 926 | lto_register_var_decl_in_symtab (class data_in *data_in, tree decl, |
| 927 | unsigned ix) |
| 928 | { |
| 929 | tree context; |
| 930 | |
| 931 | /* Variable has file scope, not local. */ |
| 932 | if (!TREE_PUBLIC (decl) |
| 933 | && !((context = decl_function_context (decl)) |
| 934 | && auto_var_in_fn_p (decl, context))) |
| 935 | rest_of_decl_compilation (decl, 1, 0); |
| 936 | |
| 937 | /* If this variable has already been declared, queue the |
| 938 | declaration for merging. */ |
| 939 | if (TREE_PUBLIC (decl)) |
| 940 | register_resolution (file_data: data_in->file_data, |
| 941 | decl, resolution: get_resolution (data_in, index: ix)); |
| 942 | } |
| 943 | |
| 944 | |
| 945 | /* Register DECL with the global symbol table and change its |
| 946 | name if necessary to avoid name clashes for static globals across |
| 947 | different files. DATA_IN contains descriptors and tables for the |
| 948 | file being read. */ |
| 949 | |
| 950 | static void |
| 951 | lto_register_function_decl_in_symtab (class data_in *data_in, tree decl, |
| 952 | unsigned ix) |
| 953 | { |
| 954 | /* If this variable has already been declared, queue the |
| 955 | declaration for merging. */ |
| 956 | if (TREE_PUBLIC (decl) && !DECL_ABSTRACT_P (decl)) |
| 957 | register_resolution (file_data: data_in->file_data, |
| 958 | decl, resolution: get_resolution (data_in, index: ix)); |
| 959 | } |
| 960 | |
| 961 | /* Check if T is a decl and needs register its resolution info. */ |
| 962 | |
| 963 | static void |
| 964 | lto_maybe_register_decl (class data_in *data_in, tree t, unsigned ix) |
| 965 | { |
| 966 | if (VAR_P (t)) |
| 967 | lto_register_var_decl_in_symtab (data_in, decl: t, ix); |
| 968 | else if (TREE_CODE (t) == FUNCTION_DECL |
| 969 | && !fndecl_built_in_p (node: t)) |
| 970 | lto_register_function_decl_in_symtab (data_in, decl: t, ix); |
| 971 | } |
| 972 | |
| 973 | |
| 974 | /* For the type T re-materialize it in the type variant list and |
| 975 | the pointer/reference-to chains. */ |
| 976 | |
| 977 | static void |
| 978 | lto_fixup_prevailing_type (tree t) |
| 979 | { |
| 980 | /* The following re-creates proper variant lists while fixing up |
| 981 | the variant leaders. We do not stream TYPE_NEXT_VARIANT so the |
| 982 | variant list state before fixup is broken. */ |
| 983 | |
| 984 | /* If we are not our own variant leader link us into our new leaders |
| 985 | variant list. */ |
| 986 | if (TYPE_MAIN_VARIANT (t) != t) |
| 987 | { |
| 988 | tree mv = TYPE_MAIN_VARIANT (t); |
| 989 | TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv); |
| 990 | TYPE_NEXT_VARIANT (mv) = t; |
| 991 | } |
| 992 | else if (!TYPE_ATTRIBUTES (t)) |
| 993 | { |
| 994 | /* The following reconstructs the pointer chains |
| 995 | of the new pointed-to type if we are a main variant. We do |
| 996 | not stream those so they are broken before fixup. |
| 997 | Don't add it if despite being main variant it has |
| 998 | attributes (then it was created with build_distinct_type_copy). |
| 999 | Similarly don't add TYPE_REF_IS_RVALUE REFERENCE_TYPEs. |
| 1000 | Don't add it if there is something in the chain already. */ |
| 1001 | if (TREE_CODE (t) == POINTER_TYPE) |
| 1002 | { |
| 1003 | TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t)); |
| 1004 | TYPE_POINTER_TO (TREE_TYPE (t)) = t; |
| 1005 | } |
| 1006 | else if (TREE_CODE (t) == REFERENCE_TYPE && !TYPE_REF_IS_RVALUE (t)) |
| 1007 | { |
| 1008 | TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t)); |
| 1009 | TYPE_REFERENCE_TO (TREE_TYPE (t)) = t; |
| 1010 | } |
| 1011 | } |
| 1012 | } |
| 1013 | |
| 1014 | |
| 1015 | /* We keep prevailing tree SCCs in a hashtable with manual collision |
| 1016 | handling (in case all hashes compare the same) and keep the colliding |
| 1017 | entries in the tree_scc->next chain. */ |
| 1018 | |
| 1019 | struct tree_scc |
| 1020 | { |
| 1021 | tree_scc *next; |
| 1022 | /* Hash of the whole SCC. */ |
| 1023 | hashval_t hash; |
| 1024 | /* Number of trees in the SCC. */ |
| 1025 | unsigned len; |
| 1026 | /* Number of possible entries into the SCC (tree nodes [0..entry_len-1] |
| 1027 | which share the same individual tree hash). */ |
| 1028 | unsigned entry_len; |
| 1029 | /* The members of the SCC. |
| 1030 | We only need to remember the first entry node candidate for prevailing |
| 1031 | SCCs (but of course have access to all entries for SCCs we are |
| 1032 | processing). |
| 1033 | ??? For prevailing SCCs we really only need hash and the first |
| 1034 | entry candidate, but that's too awkward to implement. */ |
| 1035 | tree entries[1]; |
| 1036 | }; |
| 1037 | |
| 1038 | struct tree_scc_hasher : nofree_ptr_hash <tree_scc> |
| 1039 | { |
| 1040 | static inline hashval_t hash (const tree_scc *); |
| 1041 | static inline bool equal (const tree_scc *, const tree_scc *); |
| 1042 | }; |
| 1043 | |
| 1044 | hashval_t |
| 1045 | tree_scc_hasher::hash (const tree_scc *scc) |
| 1046 | { |
| 1047 | return scc->hash; |
| 1048 | } |
| 1049 | |
| 1050 | bool |
| 1051 | tree_scc_hasher::equal (const tree_scc *scc1, const tree_scc *scc2) |
| 1052 | { |
| 1053 | if (scc1->hash != scc2->hash |
| 1054 | || scc1->len != scc2->len |
| 1055 | || scc1->entry_len != scc2->entry_len) |
| 1056 | return false; |
| 1057 | return true; |
| 1058 | } |
| 1059 | |
| 1060 | static hash_table<tree_scc_hasher> *tree_scc_hash; |
| 1061 | static struct obstack tree_scc_hash_obstack; |
| 1062 | |
| 1063 | static unsigned long num_merged_types; |
| 1064 | static unsigned long num_prevailing_types; |
| 1065 | static unsigned long num_type_scc_trees; |
| 1066 | static unsigned long total_scc_size; |
| 1067 | static unsigned long num_sccs_read; |
| 1068 | static unsigned long num_unshared_trees_read; |
| 1069 | static unsigned long total_scc_size_merged; |
| 1070 | static unsigned long num_sccs_merged; |
| 1071 | static unsigned long num_scc_compares; |
| 1072 | static unsigned long num_scc_compare_collisions; |
| 1073 | |
| 1074 | |
| 1075 | /* Compare the two entries T1 and T2 of two SCCs that are possibly equal, |
| 1076 | recursing through in-SCC tree edges. Returns true if the SCCs entered |
| 1077 | through T1 and T2 are equal and fills in *MAP with the pairs of |
| 1078 | SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */ |
| 1079 | |
| 1080 | static bool |
| 1081 | compare_tree_sccs_1 (tree t1, tree t2, tree **map) |
| 1082 | { |
| 1083 | enum tree_code code; |
| 1084 | |
| 1085 | /* Mark already visited nodes. */ |
| 1086 | TREE_ASM_WRITTEN (t2) = 1; |
| 1087 | |
| 1088 | /* Push the pair onto map. */ |
| 1089 | (*map)[0] = t1; |
| 1090 | (*map)[1] = t2; |
| 1091 | *map = *map + 2; |
| 1092 | |
| 1093 | /* Compare value-fields. */ |
| 1094 | #define compare_values(X) \ |
| 1095 | do { \ |
| 1096 | if (X(t1) != X(t2)) \ |
| 1097 | return false; \ |
| 1098 | } while (0) |
| 1099 | |
| 1100 | compare_values (TREE_CODE); |
| 1101 | code = TREE_CODE (t1); |
| 1102 | |
| 1103 | /* If we end up comparing translation unit decls we either forgot to mark |
| 1104 | some SCC as local or we compare too much. */ |
| 1105 | gcc_checking_assert (code != TRANSLATION_UNIT_DECL); |
| 1106 | |
| 1107 | if (!TYPE_P (t1)) |
| 1108 | { |
| 1109 | compare_values (TREE_SIDE_EFFECTS); |
| 1110 | compare_values (TREE_CONSTANT); |
| 1111 | compare_values (TREE_READONLY); |
| 1112 | compare_values (TREE_PUBLIC); |
| 1113 | } |
| 1114 | compare_values (TREE_ADDRESSABLE); |
| 1115 | compare_values (TREE_THIS_VOLATILE); |
| 1116 | if (DECL_P (t1)) |
| 1117 | compare_values (DECL_UNSIGNED); |
| 1118 | else if (TYPE_P (t1)) |
| 1119 | compare_values (TYPE_UNSIGNED); |
| 1120 | if (TYPE_P (t1)) |
| 1121 | compare_values (TYPE_ARTIFICIAL); |
| 1122 | else |
| 1123 | compare_values (TREE_NO_WARNING); |
| 1124 | compare_values (TREE_NOTHROW); |
| 1125 | compare_values (TREE_STATIC); |
| 1126 | if (code != TREE_BINFO) |
| 1127 | compare_values (TREE_PRIVATE); |
| 1128 | compare_values (TREE_PROTECTED); |
| 1129 | compare_values (TREE_DEPRECATED); |
| 1130 | if (TYPE_P (t1)) |
| 1131 | { |
| 1132 | if (AGGREGATE_TYPE_P (t1)) |
| 1133 | compare_values (TYPE_REVERSE_STORAGE_ORDER); |
| 1134 | else |
| 1135 | compare_values (TYPE_SATURATING); |
| 1136 | compare_values (TYPE_ADDR_SPACE); |
| 1137 | } |
| 1138 | else if (code == SSA_NAME) |
| 1139 | compare_values (SSA_NAME_IS_DEFAULT_DEF); |
| 1140 | |
| 1141 | if (CODE_CONTAINS_STRUCT (code, TS_INT_CST)) |
| 1142 | { |
| 1143 | if (wi::to_wide (t: t1) != wi::to_wide (t: t2)) |
| 1144 | return false; |
| 1145 | } |
| 1146 | |
| 1147 | if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST)) |
| 1148 | { |
| 1149 | /* ??? No suitable compare routine available. */ |
| 1150 | REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1); |
| 1151 | REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2); |
| 1152 | if (r1.cl != r2.cl |
| 1153 | || r1.decimal != r2.decimal |
| 1154 | || r1.sign != r2.sign |
| 1155 | || r1.signalling != r2.signalling |
| 1156 | || r1.canonical != r2.canonical |
| 1157 | || r1.uexp != r2.uexp) |
| 1158 | return false; |
| 1159 | for (unsigned i = 0; i < SIGSZ; ++i) |
| 1160 | if (r1.sig[i] != r2.sig[i]) |
| 1161 | return false; |
| 1162 | } |
| 1163 | |
| 1164 | if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST)) |
| 1165 | if (!fixed_compare (EQ_EXPR, |
| 1166 | TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2))) |
| 1167 | return false; |
| 1168 | |
| 1169 | if (CODE_CONTAINS_STRUCT (code, TS_VECTOR)) |
| 1170 | { |
| 1171 | compare_values (VECTOR_CST_LOG2_NPATTERNS); |
| 1172 | compare_values (VECTOR_CST_NELTS_PER_PATTERN); |
| 1173 | } |
| 1174 | |
| 1175 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
| 1176 | { |
| 1177 | compare_values (DECL_MODE); |
| 1178 | compare_values (DECL_NONLOCAL); |
| 1179 | compare_values (DECL_VIRTUAL_P); |
| 1180 | compare_values (DECL_IGNORED_P); |
| 1181 | compare_values (DECL_ABSTRACT_P); |
| 1182 | compare_values (DECL_ARTIFICIAL); |
| 1183 | compare_values (DECL_USER_ALIGN); |
| 1184 | compare_values (DECL_PRESERVE_P); |
| 1185 | compare_values (DECL_EXTERNAL); |
| 1186 | compare_values (DECL_NOT_GIMPLE_REG_P); |
| 1187 | compare_values (DECL_ALIGN); |
| 1188 | if (code == LABEL_DECL) |
| 1189 | { |
| 1190 | compare_values (EH_LANDING_PAD_NR); |
| 1191 | compare_values (LABEL_DECL_UID); |
| 1192 | } |
| 1193 | else if (code == FIELD_DECL) |
| 1194 | { |
| 1195 | compare_values (DECL_PACKED); |
| 1196 | compare_values (DECL_NONADDRESSABLE_P); |
| 1197 | compare_values (DECL_PADDING_P); |
| 1198 | compare_values (DECL_FIELD_ABI_IGNORED); |
| 1199 | compare_values (DECL_FIELD_CXX_ZERO_WIDTH_BIT_FIELD); |
| 1200 | compare_values (DECL_OFFSET_ALIGN); |
| 1201 | compare_values (DECL_NOT_FLEXARRAY); |
| 1202 | } |
| 1203 | else if (code == VAR_DECL) |
| 1204 | { |
| 1205 | compare_values (DECL_HAS_DEBUG_EXPR_P); |
| 1206 | compare_values (DECL_NONLOCAL_FRAME); |
| 1207 | } |
| 1208 | if (code == RESULT_DECL |
| 1209 | || code == PARM_DECL |
| 1210 | || code == VAR_DECL) |
| 1211 | { |
| 1212 | compare_values (DECL_BY_REFERENCE); |
| 1213 | if (code == VAR_DECL |
| 1214 | || code == PARM_DECL) |
| 1215 | compare_values (DECL_HAS_VALUE_EXPR_P); |
| 1216 | } |
| 1217 | } |
| 1218 | |
| 1219 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL)) |
| 1220 | compare_values (DECL_REGISTER); |
| 1221 | |
| 1222 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) |
| 1223 | { |
| 1224 | compare_values (DECL_COMMON); |
| 1225 | compare_values (DECL_DLLIMPORT_P); |
| 1226 | compare_values (DECL_WEAK); |
| 1227 | compare_values (DECL_SEEN_IN_BIND_EXPR_P); |
| 1228 | compare_values (DECL_COMDAT); |
| 1229 | compare_values (DECL_VISIBILITY); |
| 1230 | compare_values (DECL_VISIBILITY_SPECIFIED); |
| 1231 | if (code == VAR_DECL) |
| 1232 | { |
| 1233 | compare_values (DECL_HARD_REGISTER); |
| 1234 | /* DECL_IN_TEXT_SECTION is set during final asm output only. */ |
| 1235 | compare_values (DECL_IN_CONSTANT_POOL); |
| 1236 | } |
| 1237 | } |
| 1238 | |
| 1239 | if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) |
| 1240 | { |
| 1241 | compare_values (DECL_BUILT_IN_CLASS); |
| 1242 | compare_values (DECL_STATIC_CONSTRUCTOR); |
| 1243 | compare_values (DECL_STATIC_DESTRUCTOR); |
| 1244 | compare_values (DECL_UNINLINABLE); |
| 1245 | compare_values (DECL_POSSIBLY_INLINED); |
| 1246 | compare_values (DECL_IS_NOVOPS); |
| 1247 | compare_values (DECL_IS_RETURNS_TWICE); |
| 1248 | compare_values (DECL_IS_MALLOC); |
| 1249 | compare_values (FUNCTION_DECL_DECL_TYPE); |
| 1250 | compare_values (DECL_DECLARED_INLINE_P); |
| 1251 | compare_values (DECL_STATIC_CHAIN); |
| 1252 | compare_values (DECL_NO_INLINE_WARNING_P); |
| 1253 | compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT); |
| 1254 | compare_values (DECL_NO_LIMIT_STACK); |
| 1255 | compare_values (DECL_DISREGARD_INLINE_LIMITS); |
| 1256 | compare_values (DECL_PURE_P); |
| 1257 | compare_values (DECL_LOOPING_CONST_OR_PURE_P); |
| 1258 | compare_values (DECL_IS_REPLACEABLE_OPERATOR); |
| 1259 | compare_values (DECL_FINAL_P); |
| 1260 | compare_values (DECL_CXX_CONSTRUCTOR_P); |
| 1261 | compare_values (DECL_CXX_DESTRUCTOR_P); |
| 1262 | if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN) |
| 1263 | compare_values (DECL_UNCHECKED_FUNCTION_CODE); |
| 1264 | } |
| 1265 | |
| 1266 | if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) |
| 1267 | { |
| 1268 | compare_values (TYPE_MODE); |
| 1269 | compare_values (TYPE_NEEDS_CONSTRUCTING); |
| 1270 | if (RECORD_OR_UNION_TYPE_P (t1)) |
| 1271 | { |
| 1272 | compare_values (TYPE_TRANSPARENT_AGGR); |
| 1273 | compare_values (TYPE_FINAL_P); |
| 1274 | compare_values (TYPE_CXX_ODR_P); |
| 1275 | } |
| 1276 | else if (code == ARRAY_TYPE) |
| 1277 | compare_values (TYPE_NONALIASED_COMPONENT); |
| 1278 | if (code == ARRAY_TYPE || code == INTEGER_TYPE) |
| 1279 | compare_values (TYPE_STRING_FLAG); |
| 1280 | if (AGGREGATE_TYPE_P (t1)) |
| 1281 | compare_values (TYPE_TYPELESS_STORAGE); |
| 1282 | compare_values (TYPE_EMPTY_P); |
| 1283 | if (FUNC_OR_METHOD_TYPE_P (t1)) |
| 1284 | compare_values (TYPE_NO_NAMED_ARGS_STDARG_P); |
| 1285 | if (RECORD_OR_UNION_TYPE_P (t1)) |
| 1286 | compare_values (TYPE_INCLUDES_FLEXARRAY); |
| 1287 | compare_values (TYPE_PACKED); |
| 1288 | compare_values (TYPE_RESTRICT); |
| 1289 | compare_values (TYPE_USER_ALIGN); |
| 1290 | compare_values (TYPE_READONLY); |
| 1291 | compare_values (TYPE_PRECISION_RAW); |
| 1292 | compare_values (TYPE_ALIGN); |
| 1293 | /* Do not compare TYPE_ALIAS_SET. Doing so introduce ordering issues |
| 1294 | with calls to get_alias_set which may initialize it for streamed |
| 1295 | in types. */ |
| 1296 | } |
| 1297 | |
| 1298 | /* We don't want to compare locations, so there is nothing do compare |
| 1299 | for TS_EXP. */ |
| 1300 | |
| 1301 | /* BLOCKs are function local and we don't merge anything there, so |
| 1302 | simply refuse to merge. */ |
| 1303 | if (CODE_CONTAINS_STRUCT (code, TS_BLOCK)) |
| 1304 | return false; |
| 1305 | |
| 1306 | if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL)) |
| 1307 | if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1), |
| 1308 | TRANSLATION_UNIT_LANGUAGE (t2)) != 0) |
| 1309 | return false; |
| 1310 | |
| 1311 | if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)) |
| 1312 | if (!cl_target_option_eq (TREE_TARGET_OPTION (t1), TREE_TARGET_OPTION (t2))) |
| 1313 | return false; |
| 1314 | |
| 1315 | if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION)) |
| 1316 | if (!cl_optimization_option_eq (TREE_OPTIMIZATION (t1), |
| 1317 | TREE_OPTIMIZATION (t2))) |
| 1318 | return false; |
| 1319 | |
| 1320 | if (CODE_CONTAINS_STRUCT (code, TS_BINFO)) |
| 1321 | if (vec_safe_length (BINFO_BASE_ACCESSES (t1)) |
| 1322 | != vec_safe_length (BINFO_BASE_ACCESSES (t2))) |
| 1323 | return false; |
| 1324 | |
| 1325 | if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) |
| 1326 | { |
| 1327 | compare_values (CLOBBER_KIND); |
| 1328 | compare_values (CONSTRUCTOR_NELTS); |
| 1329 | } |
| 1330 | |
| 1331 | if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER)) |
| 1332 | if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2) |
| 1333 | || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2), |
| 1334 | IDENTIFIER_LENGTH (t1)) != 0) |
| 1335 | return false; |
| 1336 | |
| 1337 | if (CODE_CONTAINS_STRUCT (code, TS_STRING)) |
| 1338 | if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2) |
| 1339 | || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), |
| 1340 | TREE_STRING_LENGTH (t1)) != 0) |
| 1341 | return false; |
| 1342 | |
| 1343 | if (code == RAW_DATA_CST) |
| 1344 | if (RAW_DATA_LENGTH (t1) != RAW_DATA_LENGTH (t2) |
| 1345 | || memcmp (RAW_DATA_POINTER (t1), RAW_DATA_POINTER (t2), |
| 1346 | RAW_DATA_LENGTH (t1)) != 0) |
| 1347 | return false; |
| 1348 | |
| 1349 | if (code == OMP_CLAUSE) |
| 1350 | { |
| 1351 | compare_values (OMP_CLAUSE_CODE); |
| 1352 | switch (OMP_CLAUSE_CODE (t1)) |
| 1353 | { |
| 1354 | case OMP_CLAUSE_DEFAULT: |
| 1355 | compare_values (OMP_CLAUSE_DEFAULT_KIND); |
| 1356 | break; |
| 1357 | case OMP_CLAUSE_SCHEDULE: |
| 1358 | compare_values (OMP_CLAUSE_SCHEDULE_KIND); |
| 1359 | break; |
| 1360 | case OMP_CLAUSE_DEPEND: |
| 1361 | compare_values (OMP_CLAUSE_DEPEND_KIND); |
| 1362 | break; |
| 1363 | case OMP_CLAUSE_MAP: |
| 1364 | compare_values (OMP_CLAUSE_MAP_KIND); |
| 1365 | break; |
| 1366 | case OMP_CLAUSE_PROC_BIND: |
| 1367 | compare_values (OMP_CLAUSE_PROC_BIND_KIND); |
| 1368 | break; |
| 1369 | case OMP_CLAUSE_REDUCTION: |
| 1370 | compare_values (OMP_CLAUSE_REDUCTION_CODE); |
| 1371 | compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT); |
| 1372 | compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE); |
| 1373 | break; |
| 1374 | default: |
| 1375 | break; |
| 1376 | } |
| 1377 | } |
| 1378 | |
| 1379 | #undef compare_values |
| 1380 | |
| 1381 | |
| 1382 | /* Compare pointer fields. */ |
| 1383 | |
| 1384 | /* Recurse. Search & Replaced from DFS_write_tree_body. |
| 1385 | Folding the early checks into the compare_tree_edges recursion |
| 1386 | macro makes debugging way quicker as you are able to break on |
| 1387 | compare_tree_sccs_1 and simply finish until a call returns false |
| 1388 | to spot the SCC members with the difference. */ |
| 1389 | #define compare_tree_edges(E1, E2) \ |
| 1390 | do { \ |
| 1391 | tree t1_ = (E1), t2_ = (E2); \ |
| 1392 | if (t1_ != t2_ \ |
| 1393 | && (!t1_ || !t2_ \ |
| 1394 | || !TREE_VISITED (t2_) \ |
| 1395 | || (!TREE_ASM_WRITTEN (t2_) \ |
| 1396 | && !compare_tree_sccs_1 (t1_, t2_, map)))) \ |
| 1397 | return false; \ |
| 1398 | /* Only non-NULL trees outside of the SCC may compare equal. */ \ |
| 1399 | gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \ |
| 1400 | } while (0) |
| 1401 | |
| 1402 | if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) |
| 1403 | { |
| 1404 | if (code != IDENTIFIER_NODE) |
| 1405 | compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2)); |
| 1406 | } |
| 1407 | |
| 1408 | if (CODE_CONTAINS_STRUCT (code, TS_VECTOR)) |
| 1409 | { |
| 1410 | /* Note that the number of elements for EXPR has already been emitted |
| 1411 | in EXPR's header (see streamer_write_tree_header). */ |
| 1412 | unsigned int count = vector_cst_encoded_nelts (t: t1); |
| 1413 | for (unsigned int i = 0; i < count; ++i) |
| 1414 | compare_tree_edges (VECTOR_CST_ENCODED_ELT (t1, i), |
| 1415 | VECTOR_CST_ENCODED_ELT (t2, i)); |
| 1416 | } |
| 1417 | |
| 1418 | if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX)) |
| 1419 | { |
| 1420 | compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2)); |
| 1421 | compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2)); |
| 1422 | } |
| 1423 | |
| 1424 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL)) |
| 1425 | { |
| 1426 | compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2)); |
| 1427 | /* ??? Global decls from different TUs have non-matching |
| 1428 | TRANSLATION_UNIT_DECLs. Only consider a small set of |
| 1429 | decls equivalent, we should not end up merging others. */ |
| 1430 | if ((code == TYPE_DECL |
| 1431 | || code == NAMESPACE_DECL |
| 1432 | || code == IMPORTED_DECL |
| 1433 | || code == CONST_DECL |
| 1434 | || (VAR_OR_FUNCTION_DECL_P (t1) |
| 1435 | && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1)))) |
| 1436 | && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2)) |
| 1437 | ; |
| 1438 | else |
| 1439 | compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2)); |
| 1440 | } |
| 1441 | |
| 1442 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
| 1443 | { |
| 1444 | compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2)); |
| 1445 | compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2)); |
| 1446 | compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2)); |
| 1447 | compare_tree_edges (DECL_ABSTRACT_ORIGIN (t1), DECL_ABSTRACT_ORIGIN (t2)); |
| 1448 | if ((code == VAR_DECL |
| 1449 | || code == PARM_DECL) |
| 1450 | && DECL_HAS_VALUE_EXPR_P (t1)) |
| 1451 | compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2)); |
| 1452 | if (code == VAR_DECL |
| 1453 | && DECL_HAS_DEBUG_EXPR_P (t1)) |
| 1454 | compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2)); |
| 1455 | /* LTO specific edges. */ |
| 1456 | if (code != FUNCTION_DECL |
| 1457 | && code != TRANSLATION_UNIT_DECL) |
| 1458 | compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2)); |
| 1459 | } |
| 1460 | |
| 1461 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON)) |
| 1462 | { |
| 1463 | if (code == FUNCTION_DECL) |
| 1464 | { |
| 1465 | tree a1, a2; |
| 1466 | for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2); |
| 1467 | a1 || a2; |
| 1468 | a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2)) |
| 1469 | compare_tree_edges (a1, a2); |
| 1470 | compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2)); |
| 1471 | } |
| 1472 | else if (code == TYPE_DECL) |
| 1473 | compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2)); |
| 1474 | } |
| 1475 | |
| 1476 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) |
| 1477 | { |
| 1478 | /* Make sure we don't inadvertently set the assembler name. */ |
| 1479 | if (DECL_ASSEMBLER_NAME_SET_P (t1)) |
| 1480 | compare_tree_edges (DECL_ASSEMBLER_NAME (t1), |
| 1481 | DECL_ASSEMBLER_NAME (t2)); |
| 1482 | } |
| 1483 | |
| 1484 | if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL)) |
| 1485 | { |
| 1486 | compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2)); |
| 1487 | compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2)); |
| 1488 | compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1), |
| 1489 | DECL_BIT_FIELD_REPRESENTATIVE (t2)); |
| 1490 | compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1), |
| 1491 | DECL_FIELD_BIT_OFFSET (t2)); |
| 1492 | compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2)); |
| 1493 | } |
| 1494 | |
| 1495 | if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) |
| 1496 | { |
| 1497 | compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1), |
| 1498 | DECL_FUNCTION_PERSONALITY (t2)); |
| 1499 | compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2)); |
| 1500 | compare_tree_edges (DECL_FUNCTION_SPECIFIC_TARGET (t1), |
| 1501 | DECL_FUNCTION_SPECIFIC_TARGET (t2)); |
| 1502 | compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1), |
| 1503 | DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2)); |
| 1504 | } |
| 1505 | |
| 1506 | if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) |
| 1507 | { |
| 1508 | compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2)); |
| 1509 | compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2)); |
| 1510 | compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)); |
| 1511 | compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2)); |
| 1512 | /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be |
| 1513 | reconstructed during fixup. */ |
| 1514 | /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists |
| 1515 | during fixup. */ |
| 1516 | compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2)); |
| 1517 | /* ??? Global types from different TUs have non-matching |
| 1518 | TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise |
| 1519 | equal. */ |
| 1520 | if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2)) |
| 1521 | ; |
| 1522 | else |
| 1523 | compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2)); |
| 1524 | /* TYPE_CANONICAL is re-computed during type merging, so do not |
| 1525 | compare it here. */ |
| 1526 | compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2)); |
| 1527 | } |
| 1528 | |
| 1529 | if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON)) |
| 1530 | { |
| 1531 | if (code == ARRAY_TYPE) |
| 1532 | compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2)); |
| 1533 | else if (RECORD_OR_UNION_TYPE_P (t1)) |
| 1534 | { |
| 1535 | tree f1, f2; |
| 1536 | for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2); |
| 1537 | f1 || f2; |
| 1538 | f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2)) |
| 1539 | compare_tree_edges (f1, f2); |
| 1540 | } |
| 1541 | else if (code == FUNCTION_TYPE |
| 1542 | || code == METHOD_TYPE) |
| 1543 | compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2)); |
| 1544 | |
| 1545 | if (!POINTER_TYPE_P (t1)) |
| 1546 | compare_tree_edges (TYPE_MIN_VALUE_RAW (t1), TYPE_MIN_VALUE_RAW (t2)); |
| 1547 | compare_tree_edges (TYPE_MAX_VALUE_RAW (t1), TYPE_MAX_VALUE_RAW (t2)); |
| 1548 | } |
| 1549 | |
| 1550 | if (CODE_CONTAINS_STRUCT (code, TS_LIST)) |
| 1551 | { |
| 1552 | compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2)); |
| 1553 | compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2)); |
| 1554 | compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2)); |
| 1555 | } |
| 1556 | |
| 1557 | if (CODE_CONTAINS_STRUCT (code, TS_VEC)) |
| 1558 | for (int i = 0; i < TREE_VEC_LENGTH (t1); i++) |
| 1559 | compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i)); |
| 1560 | |
| 1561 | if (CODE_CONTAINS_STRUCT (code, TS_EXP)) |
| 1562 | { |
| 1563 | for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++) |
| 1564 | compare_tree_edges (TREE_OPERAND (t1, i), |
| 1565 | TREE_OPERAND (t2, i)); |
| 1566 | |
| 1567 | /* BLOCKs are function local and we don't merge anything there. */ |
| 1568 | if (TREE_BLOCK (t1) || TREE_BLOCK (t2)) |
| 1569 | return false; |
| 1570 | } |
| 1571 | |
| 1572 | if (CODE_CONTAINS_STRUCT (code, TS_BINFO)) |
| 1573 | { |
| 1574 | unsigned i; |
| 1575 | tree t; |
| 1576 | /* Lengths have already been compared above. */ |
| 1577 | FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t) |
| 1578 | compare_tree_edges (t, BINFO_BASE_BINFO (t2, i)); |
| 1579 | FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t) |
| 1580 | compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i)); |
| 1581 | compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2)); |
| 1582 | compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2)); |
| 1583 | compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2)); |
| 1584 | /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX |
| 1585 | and BINFO_VPTR_INDEX; these are used by C++ FE only. */ |
| 1586 | } |
| 1587 | |
| 1588 | if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) |
| 1589 | { |
| 1590 | unsigned i; |
| 1591 | tree index, value; |
| 1592 | /* Lengths have already been compared above. */ |
| 1593 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value) |
| 1594 | { |
| 1595 | compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index); |
| 1596 | compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value); |
| 1597 | } |
| 1598 | } |
| 1599 | |
| 1600 | if (code == OMP_CLAUSE) |
| 1601 | { |
| 1602 | int i; |
| 1603 | |
| 1604 | for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++) |
| 1605 | compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i), |
| 1606 | OMP_CLAUSE_OPERAND (t2, i)); |
| 1607 | compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2)); |
| 1608 | } |
| 1609 | |
| 1610 | #undef compare_tree_edges |
| 1611 | |
| 1612 | return true; |
| 1613 | } |
| 1614 | |
| 1615 | /* Compare the tree scc SCC to the prevailing candidate PSCC, filling |
| 1616 | out MAP if they are equal. */ |
| 1617 | |
| 1618 | static bool |
| 1619 | compare_tree_sccs (tree_scc *pscc, tree_scc *scc, |
| 1620 | tree *map) |
| 1621 | { |
| 1622 | /* Assume SCC entry hashes are sorted after their cardinality. Which |
| 1623 | means we can simply take the first n-tuple of equal hashes |
| 1624 | (which is recorded as entry_len) and do n SCC entry candidate |
| 1625 | comparisons. */ |
| 1626 | for (unsigned i = 0; i < pscc->entry_len; ++i) |
| 1627 | { |
| 1628 | tree *mapp = map; |
| 1629 | num_scc_compare_collisions++; |
| 1630 | if (compare_tree_sccs_1 (t1: pscc->entries[0], t2: scc->entries[i], map: &mapp)) |
| 1631 | { |
| 1632 | /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN |
| 1633 | on the scc as all trees will be freed. */ |
| 1634 | return true; |
| 1635 | } |
| 1636 | /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case |
| 1637 | the SCC prevails. */ |
| 1638 | for (unsigned j = 0; j < scc->len; ++j) |
| 1639 | TREE_ASM_WRITTEN (scc->entries[j]) = 0; |
| 1640 | } |
| 1641 | |
| 1642 | return false; |
| 1643 | } |
| 1644 | |
| 1645 | /* QSort sort function to sort a map of two pointers after the 2nd |
| 1646 | pointer. */ |
| 1647 | |
| 1648 | static int |
| 1649 | cmp_tree (const void *p1_, const void *p2_) |
| 1650 | { |
| 1651 | tree *p1 = (tree *)(const_cast<void *>(p1_)); |
| 1652 | tree *p2 = (tree *)(const_cast<void *>(p2_)); |
| 1653 | if (p1[1] == p2[1]) |
| 1654 | return 0; |
| 1655 | return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1; |
| 1656 | } |
| 1657 | |
| 1658 | /* New scc of size 1 containing T was streamed in from DATA_IN and not merged. |
| 1659 | Register it to reader cache at index FROM. */ |
| 1660 | |
| 1661 | static void |
| 1662 | process_dref (class data_in *data_in, tree t, unsigned from) |
| 1663 | { |
| 1664 | struct streamer_tree_cache_d *cache = data_in->reader_cache; |
| 1665 | /* If we got a debug reference queued, see if the prevailing |
| 1666 | tree has a debug reference and if not, register the one |
| 1667 | for the tree we are about to throw away. */ |
| 1668 | if (dref_queue.length () == 1) |
| 1669 | { |
| 1670 | dref_entry e = dref_queue.pop (); |
| 1671 | gcc_assert (e.decl |
| 1672 | == streamer_tree_cache_get_tree (cache, from)); |
| 1673 | const char *sym; |
| 1674 | unsigned HOST_WIDE_INT off; |
| 1675 | if (!debug_hooks->die_ref_for_decl (t, &sym, &off)) |
| 1676 | debug_hooks->register_external_die (t, e.sym, e.off); |
| 1677 | } |
| 1678 | } |
| 1679 | |
| 1680 | /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and |
| 1681 | hash value SCC_HASH with an already recorded SCC. Return true if |
| 1682 | that was successful, otherwise return false. */ |
| 1683 | |
| 1684 | static bool |
| 1685 | unify_scc (class data_in *data_in, unsigned from, |
| 1686 | unsigned len, unsigned scc_entry_len, hashval_t scc_hash) |
| 1687 | { |
| 1688 | bool unified_p = false; |
| 1689 | struct streamer_tree_cache_d *cache = data_in->reader_cache; |
| 1690 | tree_scc *scc |
| 1691 | = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree)); |
| 1692 | scc->next = NULL; |
| 1693 | scc->hash = scc_hash; |
| 1694 | scc->len = len; |
| 1695 | scc->entry_len = scc_entry_len; |
| 1696 | for (unsigned i = 0; i < len; ++i) |
| 1697 | { |
| 1698 | tree t = streamer_tree_cache_get_tree (cache, ix: from + i); |
| 1699 | scc->entries[i] = t; |
| 1700 | /* These types should be streamed as unshared. */ |
| 1701 | gcc_checking_assert |
| 1702 | (!(TREE_CODE (t) == TRANSLATION_UNIT_DECL |
| 1703 | || (VAR_OR_FUNCTION_DECL_P (t) |
| 1704 | && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t))) |
| 1705 | || TREE_CODE (t) == LABEL_DECL |
| 1706 | || (TREE_CODE (t) == NAMESPACE_DECL && !DECL_NAME (t)) |
| 1707 | || (TYPE_P (t) |
| 1708 | && type_with_linkage_p (TYPE_MAIN_VARIANT (t)) |
| 1709 | && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t))))); |
| 1710 | } |
| 1711 | |
| 1712 | /* Look for the list of candidate SCCs to compare against. */ |
| 1713 | tree_scc **slot; |
| 1714 | slot = tree_scc_hash->find_slot_with_hash (comparable: scc, hash: scc_hash, insert: INSERT); |
| 1715 | if (*slot) |
| 1716 | { |
| 1717 | /* Try unifying against each candidate. */ |
| 1718 | num_scc_compares++; |
| 1719 | |
| 1720 | /* Set TREE_VISITED on the scc so we can easily identify tree nodes |
| 1721 | outside of the scc when following tree edges. Make sure |
| 1722 | that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit |
| 1723 | to track whether we visited the SCC member during the compare. |
| 1724 | We cannot use TREE_VISITED on the pscc members as the extended |
| 1725 | scc and pscc can overlap. */ |
| 1726 | for (unsigned i = 0; i < scc->len; ++i) |
| 1727 | { |
| 1728 | TREE_VISITED (scc->entries[i]) = 1; |
| 1729 | gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i])); |
| 1730 | } |
| 1731 | |
| 1732 | tree *map = XALLOCAVEC (tree, 2 * len); |
| 1733 | for (tree_scc *pscc = *slot; pscc; pscc = pscc->next) |
| 1734 | { |
| 1735 | if (!compare_tree_sccs (pscc, scc, map)) |
| 1736 | continue; |
| 1737 | |
| 1738 | /* Found an equal SCC. */ |
| 1739 | unified_p = true; |
| 1740 | num_scc_compare_collisions--; |
| 1741 | num_sccs_merged++; |
| 1742 | total_scc_size_merged += len; |
| 1743 | |
| 1744 | if (flag_checking) |
| 1745 | for (unsigned i = 0; i < len; ++i) |
| 1746 | { |
| 1747 | tree t = map[2*i+1]; |
| 1748 | enum tree_code code = TREE_CODE (t); |
| 1749 | /* IDENTIFIER_NODEs should be singletons and are merged by the |
| 1750 | streamer. The others should be singletons, too, and we |
| 1751 | should not merge them in any way. */ |
| 1752 | gcc_assert (code != TRANSLATION_UNIT_DECL |
| 1753 | && code != IDENTIFIER_NODE); |
| 1754 | } |
| 1755 | |
| 1756 | /* Fixup the streamer cache with the prevailing nodes according |
| 1757 | to the tree node mapping computed by compare_tree_sccs. */ |
| 1758 | if (len == 1) |
| 1759 | { |
| 1760 | process_dref (data_in, t: pscc->entries[0], from); |
| 1761 | lto_maybe_register_decl (data_in, t: pscc->entries[0], ix: from); |
| 1762 | streamer_tree_cache_replace_tree (cache, pscc->entries[0], from); |
| 1763 | } |
| 1764 | else |
| 1765 | { |
| 1766 | tree *map2 = XALLOCAVEC (tree, 2 * len); |
| 1767 | for (unsigned i = 0; i < len; ++i) |
| 1768 | { |
| 1769 | map2[i*2] = (tree)(uintptr_t)(from + i); |
| 1770 | map2[i*2+1] = scc->entries[i]; |
| 1771 | } |
| 1772 | qsort (map2, len, 2 * sizeof (tree), cmp_tree); |
| 1773 | qsort (map, len, 2 * sizeof (tree), cmp_tree); |
| 1774 | for (unsigned i = 0; i < len; ++i) |
| 1775 | { |
| 1776 | lto_maybe_register_decl (data_in, t: map[2*i], |
| 1777 | ix: (uintptr_t)map2[2*i]); |
| 1778 | streamer_tree_cache_replace_tree (cache, map[2*i], |
| 1779 | (uintptr_t)map2[2*i]); |
| 1780 | } |
| 1781 | } |
| 1782 | |
| 1783 | /* Free the tree nodes from the read SCC. */ |
| 1784 | data_in->location_cache.revert_location_cache (); |
| 1785 | for (unsigned i = 0; i < len; ++i) |
| 1786 | { |
| 1787 | if (TYPE_P (scc->entries[i])) |
| 1788 | num_merged_types++; |
| 1789 | free_node (scc->entries[i]); |
| 1790 | } |
| 1791 | |
| 1792 | /* Drop DIE references. |
| 1793 | ??? Do as in the size-one SCC case which involves sorting |
| 1794 | the queue. */ |
| 1795 | dref_queue.truncate (size: 0); |
| 1796 | |
| 1797 | break; |
| 1798 | } |
| 1799 | |
| 1800 | /* Reset TREE_VISITED if we didn't unify the SCC with another. */ |
| 1801 | if (!unified_p) |
| 1802 | for (unsigned i = 0; i < scc->len; ++i) |
| 1803 | TREE_VISITED (scc->entries[i]) = 0; |
| 1804 | } |
| 1805 | |
| 1806 | /* If we didn't unify it to any candidate duplicate the relevant |
| 1807 | pieces to permanent storage and link it into the chain. */ |
| 1808 | if (!unified_p) |
| 1809 | { |
| 1810 | tree_scc *pscc |
| 1811 | = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc)); |
| 1812 | memcpy (dest: pscc, src: scc, n: sizeof (tree_scc)); |
| 1813 | pscc->next = (*slot); |
| 1814 | *slot = pscc; |
| 1815 | } |
| 1816 | return unified_p; |
| 1817 | } |
| 1818 | |
| 1819 | typedef int_hash<unsigned, 0, UINT_MAX> code_id_hash; |
| 1820 | |
| 1821 | /* Do registering necessary once new tree fully streamed in (including all |
| 1822 | trees it reffers to). */ |
| 1823 | |
| 1824 | static void |
| 1825 | process_new_tree (tree t, hash_map <code_id_hash, unsigned> *hm, |
| 1826 | unsigned index, unsigned *total, class data_in *data_in) |
| 1827 | { |
| 1828 | /* Reconstruct the type variant and pointer-to/reference-to |
| 1829 | chains. */ |
| 1830 | if (TYPE_P (t)) |
| 1831 | { |
| 1832 | /* Map the tree types to their frequencies. */ |
| 1833 | if (flag_lto_dump_type_stats) |
| 1834 | { |
| 1835 | unsigned key = (unsigned) TREE_CODE (t); |
| 1836 | unsigned *countp = hm->get (k: key); |
| 1837 | hm->put (k: key, v: countp ? (*countp) + 1 : 1); |
| 1838 | (*total)++; |
| 1839 | } |
| 1840 | |
| 1841 | num_prevailing_types++; |
| 1842 | lto_fixup_prevailing_type (t); |
| 1843 | |
| 1844 | /* Compute the canonical type of all non-ODR types. |
| 1845 | Delay ODR types for the end of merging process - the canonical |
| 1846 | type for those can be computed using the (unique) name however |
| 1847 | we want to do this only if units in other languages do not |
| 1848 | contain structurally equivalent type. |
| 1849 | |
| 1850 | Because SCC components are streamed in random (hash) order |
| 1851 | we may have encountered the type before while registering |
| 1852 | type canonical of a derived type in the same SCC. */ |
| 1853 | if (!TYPE_CANONICAL (t)) |
| 1854 | { |
| 1855 | if (!RECORD_OR_UNION_TYPE_P (t) |
| 1856 | || !TYPE_CXX_ODR_P (t)) |
| 1857 | gimple_register_canonical_type (t); |
| 1858 | else if (COMPLETE_TYPE_P (t)) |
| 1859 | vec_safe_push (v&: types_to_register, obj: t); |
| 1860 | } |
| 1861 | if (TYPE_MAIN_VARIANT (t) == t && odr_type_p (t)) |
| 1862 | register_odr_type (t); |
| 1863 | } |
| 1864 | /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its |
| 1865 | type which is also member of this SCC. */ |
| 1866 | if (TREE_CODE (t) == INTEGER_CST |
| 1867 | && !TREE_OVERFLOW (t)) |
| 1868 | cache_integer_cst (t); |
| 1869 | if (!flag_ltrans) |
| 1870 | { |
| 1871 | lto_maybe_register_decl (data_in, t, ix: index); |
| 1872 | /* Scan the tree for references to global functions or |
| 1873 | variables and record those for later fixup. */ |
| 1874 | if (mentions_vars_p (t)) |
| 1875 | vec_safe_push (v&: tree_with_vars, obj: t); |
| 1876 | } |
| 1877 | } |
| 1878 | |
| 1879 | /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA. |
| 1880 | RESOLUTIONS is the set of symbols picked by the linker (read from the |
| 1881 | resolution file when the linker plugin is being used). */ |
| 1882 | |
| 1883 | static void |
| 1884 | lto_read_decls (struct lto_file_decl_data *decl_data, const void *data, |
| 1885 | vec<ld_plugin_symbol_resolution_t> resolutions) |
| 1886 | { |
| 1887 | const struct lto_decl_header * = (const struct lto_decl_header *) data; |
| 1888 | const int decl_offset = sizeof (struct lto_decl_header); |
| 1889 | const int main_offset = decl_offset + header->decl_state_size; |
| 1890 | const int string_offset = main_offset + header->main_size; |
| 1891 | class data_in *data_in; |
| 1892 | unsigned int i; |
| 1893 | const uint32_t *data_ptr, *data_end; |
| 1894 | uint32_t num_decl_states; |
| 1895 | |
| 1896 | lto_input_block ib_main ((const char *) data + main_offset, |
| 1897 | header->main_size, decl_data); |
| 1898 | |
| 1899 | data_in = lto_data_in_create (decl_data, (const char *) data + string_offset, |
| 1900 | header->string_size, resolutions); |
| 1901 | |
| 1902 | /* We do not uniquify the pre-loaded cache entries, those are middle-end |
| 1903 | internal types that should not be merged. */ |
| 1904 | |
| 1905 | hash_map <code_id_hash, unsigned> hm; |
| 1906 | unsigned total = 0; |
| 1907 | |
| 1908 | /* Read the global declarations and types. */ |
| 1909 | while (ib_main.p < ib_main.len) |
| 1910 | { |
| 1911 | tree t; |
| 1912 | unsigned from = data_in->reader_cache->nodes.length (); |
| 1913 | /* Read and uniquify SCCs as in the input stream. */ |
| 1914 | enum LTO_tags tag = streamer_read_record_start (ib: &ib_main); |
| 1915 | if (tag == LTO_tree_scc || tag == LTO_trees) |
| 1916 | { |
| 1917 | unsigned len_; |
| 1918 | unsigned scc_entry_len; |
| 1919 | |
| 1920 | /* Because we stream in SCC order we know that all unshared trees |
| 1921 | are now fully streamed. Process them. */ |
| 1922 | hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_, |
| 1923 | &scc_entry_len, |
| 1924 | tag == LTO_tree_scc); |
| 1925 | unsigned len = data_in->reader_cache->nodes.length () - from; |
| 1926 | gcc_assert (len == len_); |
| 1927 | |
| 1928 | if (tag == LTO_tree_scc) |
| 1929 | { |
| 1930 | total_scc_size += len; |
| 1931 | num_sccs_read++; |
| 1932 | } |
| 1933 | else |
| 1934 | num_unshared_trees_read += len; |
| 1935 | |
| 1936 | /* We have the special case of size-1 SCCs that are pre-merged |
| 1937 | by means of identifier and string sharing for example. |
| 1938 | ??? Maybe we should avoid streaming those as SCCs. */ |
| 1939 | tree first = streamer_tree_cache_get_tree (cache: data_in->reader_cache, |
| 1940 | ix: from); |
| 1941 | /* Identifier and integers are shared specially, they should never |
| 1942 | go by the tree merging path. */ |
| 1943 | gcc_checking_assert ((TREE_CODE (first) != IDENTIFIER_NODE |
| 1944 | && (TREE_CODE (first) != INTEGER_CST |
| 1945 | || TREE_OVERFLOW (first))) |
| 1946 | || len != 1); |
| 1947 | |
| 1948 | /* Try to unify the SCC with already existing ones. */ |
| 1949 | if (!flag_ltrans && tag != LTO_trees |
| 1950 | && unify_scc (data_in, from, |
| 1951 | len, scc_entry_len, scc_hash)) |
| 1952 | continue; |
| 1953 | |
| 1954 | /* Tree merging failed, mark entries in location cache as |
| 1955 | permanent. */ |
| 1956 | data_in->location_cache.accept_location_cache (); |
| 1957 | |
| 1958 | bool seen_type = false; |
| 1959 | for (unsigned i = 0; i < len; ++i) |
| 1960 | { |
| 1961 | tree t = streamer_tree_cache_get_tree (cache: data_in->reader_cache, |
| 1962 | ix: from + i); |
| 1963 | process_new_tree (t, hm: &hm, index: from + i, total: &total, data_in); |
| 1964 | if (TYPE_P (t)) |
| 1965 | seen_type = true; |
| 1966 | } |
| 1967 | |
| 1968 | /* Register DECLs with the debuginfo machinery. */ |
| 1969 | while (!dref_queue.is_empty ()) |
| 1970 | { |
| 1971 | dref_entry e = dref_queue.pop (); |
| 1972 | debug_hooks->register_external_die (e.decl, e.sym, e.off); |
| 1973 | } |
| 1974 | |
| 1975 | if (seen_type) |
| 1976 | num_type_scc_trees += len; |
| 1977 | } |
| 1978 | else |
| 1979 | { |
| 1980 | t = lto_input_tree_1 (&ib_main, data_in, tag, hash: 0); |
| 1981 | gcc_assert (data_in->reader_cache->nodes.length () == from + 1); |
| 1982 | num_unshared_trees_read++; |
| 1983 | data_in->location_cache.accept_location_cache (); |
| 1984 | process_dref (data_in, t, from); |
| 1985 | if (TREE_CODE (t) == IDENTIFIER_NODE |
| 1986 | || (TREE_CODE (t) == INTEGER_CST |
| 1987 | && !TREE_OVERFLOW (t))) |
| 1988 | ; |
| 1989 | else |
| 1990 | { |
| 1991 | lto_maybe_register_decl (data_in, t, ix: from); |
| 1992 | process_new_tree (t, hm: &hm, index: from, total: &total, data_in); |
| 1993 | } |
| 1994 | } |
| 1995 | } |
| 1996 | |
| 1997 | /* Dump type statistics. */ |
| 1998 | if (flag_lto_dump_type_stats) |
| 1999 | { |
| 2000 | fprintf (stdout, format: " Type Frequency Percentage\n\n" ); |
| 2001 | for (hash_map<code_id_hash, unsigned>::iterator itr = hm.begin (); |
| 2002 | itr != hm.end (); |
| 2003 | ++itr) |
| 2004 | { |
| 2005 | std::pair<unsigned, unsigned> p = *itr; |
| 2006 | enum tree_code code = (enum tree_code) p.first; |
| 2007 | fprintf (stdout, format: "%14s %6d %12.2f\n" , get_tree_code_name (code), |
| 2008 | p.second, float (p.second)/total*100); |
| 2009 | } |
| 2010 | } |
| 2011 | |
| 2012 | data_in->location_cache.apply_location_cache (); |
| 2013 | |
| 2014 | /* Read in lto_in_decl_state objects. */ |
| 2015 | data_ptr = (const uint32_t *) ((const char*) data + decl_offset); |
| 2016 | data_end |
| 2017 | = (const uint32_t *) ((const char*) data_ptr + header->decl_state_size); |
| 2018 | num_decl_states = *data_ptr++; |
| 2019 | |
| 2020 | gcc_assert (num_decl_states > 0); |
| 2021 | decl_data->global_decl_state = lto_new_in_decl_state (); |
| 2022 | data_ptr = lto_read_in_decl_state (data_in, data: data_ptr, |
| 2023 | state: decl_data->global_decl_state); |
| 2024 | |
| 2025 | /* Read in per-function decl states and enter them in hash table. */ |
| 2026 | decl_data->function_decl_states |
| 2027 | = hash_table<decl_state_hasher>::create_ggc (n: 37); |
| 2028 | |
| 2029 | for (i = 1; i < num_decl_states; i++) |
| 2030 | { |
| 2031 | struct lto_in_decl_state *state = lto_new_in_decl_state (); |
| 2032 | |
| 2033 | data_ptr = lto_read_in_decl_state (data_in, data: data_ptr, state); |
| 2034 | lto_in_decl_state **slot |
| 2035 | = decl_data->function_decl_states->find_slot (value: state, insert: INSERT); |
| 2036 | gcc_assert (*slot == NULL); |
| 2037 | *slot = state; |
| 2038 | } |
| 2039 | |
| 2040 | if (data_ptr != data_end) |
| 2041 | internal_error ("bytecode stream: garbage at the end of symbols section" ); |
| 2042 | |
| 2043 | /* Set the current decl state to be the global state. */ |
| 2044 | decl_data->current_decl_state = decl_data->global_decl_state; |
| 2045 | |
| 2046 | lto_data_in_delete (data_in); |
| 2047 | } |
| 2048 | |
| 2049 | /* Custom version of strtoll, which is not portable. */ |
| 2050 | |
| 2051 | static int64_t |
| 2052 | lto_parse_hex (const char *p) |
| 2053 | { |
| 2054 | int64_t ret = 0; |
| 2055 | |
| 2056 | for (; *p != '\0'; ++p) |
| 2057 | { |
| 2058 | char c = *p; |
| 2059 | unsigned char part; |
| 2060 | ret <<= 4; |
| 2061 | if (c >= '0' && c <= '9') |
| 2062 | part = c - '0'; |
| 2063 | else if (c >= 'a' && c <= 'f') |
| 2064 | part = c - 'a' + 10; |
| 2065 | else if (c >= 'A' && c <= 'F') |
| 2066 | part = c - 'A' + 10; |
| 2067 | else |
| 2068 | internal_error ("could not parse hex number" ); |
| 2069 | ret |= part; |
| 2070 | } |
| 2071 | |
| 2072 | return ret; |
| 2073 | } |
| 2074 | |
| 2075 | /* Read resolution for file named FILE_NAME. The resolution is read from |
| 2076 | RESOLUTION. */ |
| 2077 | |
| 2078 | static void |
| 2079 | lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file) |
| 2080 | { |
| 2081 | /* We require that objects in the resolution file are in the same |
| 2082 | order as the lto1 command line. */ |
| 2083 | unsigned int name_len; |
| 2084 | char *obj_name; |
| 2085 | unsigned int num_symbols; |
| 2086 | unsigned int i; |
| 2087 | struct lto_file_decl_data *file_data; |
| 2088 | splay_tree_node nd = NULL; |
| 2089 | |
| 2090 | if (!resolution) |
| 2091 | return; |
| 2092 | |
| 2093 | name_len = strlen (s: file->filename); |
| 2094 | obj_name = XNEWVEC (char, name_len + 1); |
| 2095 | fscanf (stream: resolution, format: " " ); /* Read white space. */ |
| 2096 | |
| 2097 | fread (ptr: obj_name, size: sizeof (char), n: name_len, stream: resolution); |
| 2098 | obj_name[name_len] = '\0'; |
| 2099 | if (filename_cmp (s1: obj_name, s2: file->filename) != 0) |
| 2100 | internal_error ("unexpected file name %s in linker resolution file. " |
| 2101 | "Expected %s" , obj_name, file->filename); |
| 2102 | if (file->offset != 0) |
| 2103 | { |
| 2104 | int t; |
| 2105 | char offset_p[17]; |
| 2106 | int64_t offset; |
| 2107 | t = fscanf (stream: resolution, format: "@0x%16s" , offset_p); |
| 2108 | if (t != 1) |
| 2109 | internal_error ("could not parse file offset" ); |
| 2110 | offset = lto_parse_hex (p: offset_p); |
| 2111 | if (offset != file->offset) |
| 2112 | internal_error ("unexpected offset" ); |
| 2113 | } |
| 2114 | |
| 2115 | free (ptr: obj_name); |
| 2116 | |
| 2117 | fscanf (stream: resolution, format: "%u" , &num_symbols); |
| 2118 | |
| 2119 | for (i = 0; i < num_symbols; i++) |
| 2120 | { |
| 2121 | int t; |
| 2122 | unsigned index; |
| 2123 | unsigned HOST_WIDE_INT id; |
| 2124 | char r_str[27]; |
| 2125 | enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0; |
| 2126 | unsigned int j; |
| 2127 | unsigned int lto_resolution_str_len = ARRAY_SIZE (lto_resolution_str); |
| 2128 | res_pair rp; |
| 2129 | |
| 2130 | t = fscanf (stream: resolution, format: "%u " HOST_WIDE_INT_PRINT_HEX_PURE |
| 2131 | " %26s %*[^\n]\n" , &index, &id, r_str); |
| 2132 | if (t != 3) |
| 2133 | internal_error ("invalid line in the resolution file" ); |
| 2134 | |
| 2135 | for (j = 0; j < lto_resolution_str_len; j++) |
| 2136 | { |
| 2137 | if (strcmp (s1: lto_resolution_str[j], s2: r_str) == 0) |
| 2138 | { |
| 2139 | r = (enum ld_plugin_symbol_resolution) j; |
| 2140 | /* Incremental linking together with -fwhole-program may seem |
| 2141 | somewhat contradictionary (as the point of incremental linking |
| 2142 | is to allow re-linking with more symbols later) but it is |
| 2143 | used to build LTO kernel. We want to hide all symbols that |
| 2144 | are not explicitely marked as exported and thus turn |
| 2145 | LDPR_PREVAILING_DEF_IRONLY_EXP |
| 2146 | to LDPR_PREVAILING_DEF_IRONLY. */ |
| 2147 | if (flag_whole_program |
| 2148 | && flag_incremental_link == INCREMENTAL_LINK_NOLTO |
| 2149 | && r == LDPR_PREVAILING_DEF_IRONLY_EXP) |
| 2150 | r = LDPR_PREVAILING_DEF_IRONLY; |
| 2151 | break; |
| 2152 | } |
| 2153 | } |
| 2154 | if (j == lto_resolution_str_len) |
| 2155 | internal_error ("invalid resolution in the resolution file" ); |
| 2156 | |
| 2157 | if (!(nd && lto_splay_tree_id_equal_p (key: nd->key, id))) |
| 2158 | { |
| 2159 | nd = lto_splay_tree_lookup (t: file_ids, id); |
| 2160 | if (nd == NULL) |
| 2161 | internal_error ("resolution sub id %wx not in object file" , id); |
| 2162 | } |
| 2163 | |
| 2164 | file_data = (struct lto_file_decl_data *)nd->value; |
| 2165 | /* The indexes are very sparse. To save memory save them in a compact |
| 2166 | format that is only unpacked later when the subfile is processed. */ |
| 2167 | rp.res = r; |
| 2168 | rp.index = index; |
| 2169 | file_data->respairs.safe_push (obj: rp); |
| 2170 | if (file_data->max_index < index) |
| 2171 | file_data->max_index = index; |
| 2172 | } |
| 2173 | } |
| 2174 | |
| 2175 | /* List of file_decl_datas. */ |
| 2176 | struct file_data_list |
| 2177 | { |
| 2178 | struct lto_file_decl_data *first, *last; |
| 2179 | }; |
| 2180 | |
| 2181 | /* Is the name for a id'ed LTO section? */ |
| 2182 | |
| 2183 | static int |
| 2184 | lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id) |
| 2185 | { |
| 2186 | const char *s; |
| 2187 | |
| 2188 | if (strncmp (s1: name, s2: section_name_prefix, n: strlen (s: section_name_prefix))) |
| 2189 | return 0; |
| 2190 | |
| 2191 | if (flag_ltrans) |
| 2192 | { |
| 2193 | *id = 0; |
| 2194 | return 1; |
| 2195 | } |
| 2196 | |
| 2197 | s = strrchr (s: name, c: '.'); |
| 2198 | if (!s) |
| 2199 | return 0; |
| 2200 | /* If the section is not suffixed with an ID return. */ |
| 2201 | if ((size_t)(s - name) == strlen (s: section_name_prefix)) |
| 2202 | return 0; |
| 2203 | return sscanf (s: s, format: "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1; |
| 2204 | } |
| 2205 | |
| 2206 | /* Create file_data of each sub file id. */ |
| 2207 | |
| 2208 | static int |
| 2209 | create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids, |
| 2210 | struct file_data_list *list) |
| 2211 | { |
| 2212 | struct lto_section_slot s_slot, *new_slot; |
| 2213 | unsigned HOST_WIDE_INT id; |
| 2214 | splay_tree_node nd; |
| 2215 | void **hash_slot; |
| 2216 | char *new_name; |
| 2217 | struct lto_file_decl_data *file_data; |
| 2218 | |
| 2219 | if (!lto_section_with_id (name: ls->name, id: &id)) |
| 2220 | return 1; |
| 2221 | |
| 2222 | /* Find hash table of sub module id. */ |
| 2223 | nd = lto_splay_tree_lookup (t: file_ids, id); |
| 2224 | if (nd != NULL) |
| 2225 | { |
| 2226 | file_data = (struct lto_file_decl_data *)nd->value; |
| 2227 | } |
| 2228 | else |
| 2229 | { |
| 2230 | file_data = ggc_alloc<lto_file_decl_data> (); |
| 2231 | memset(s: file_data, c: 0, n: sizeof (struct lto_file_decl_data)); |
| 2232 | file_data->id = id; |
| 2233 | file_data->section_hash_table = lto_obj_create_section_hash_table (); |
| 2234 | lto_splay_tree_insert (t: file_ids, id, file_data); |
| 2235 | |
| 2236 | /* Maintain list in linker order. */ |
| 2237 | if (!list->first) |
| 2238 | list->first = file_data; |
| 2239 | if (list->last) |
| 2240 | list->last->next = file_data; |
| 2241 | |
| 2242 | list->last = file_data; |
| 2243 | } |
| 2244 | |
| 2245 | /* Copy section into sub module hash table. */ |
| 2246 | new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1); |
| 2247 | s_slot.name = new_name; |
| 2248 | hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT); |
| 2249 | gcc_assert (*hash_slot == NULL); |
| 2250 | |
| 2251 | new_slot = XDUP (struct lto_section_slot, ls); |
| 2252 | new_slot->name = new_name; |
| 2253 | *hash_slot = new_slot; |
| 2254 | return 1; |
| 2255 | } |
| 2256 | |
| 2257 | /* Read declarations and other initializations for a FILE_DATA. */ |
| 2258 | |
| 2259 | static void |
| 2260 | lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file, |
| 2261 | int order) |
| 2262 | { |
| 2263 | const char *data; |
| 2264 | size_t len; |
| 2265 | vec<ld_plugin_symbol_resolution_t> |
| 2266 | resolutions = vNULL; |
| 2267 | int i; |
| 2268 | res_pair *rp; |
| 2269 | |
| 2270 | /* Create vector for fast access of resolution. We do this lazily |
| 2271 | to save memory. */ |
| 2272 | resolutions.safe_grow_cleared (len: file_data->max_index + 1, exact: true); |
| 2273 | for (i = 0; file_data->respairs.iterate (ix: i, ptr: &rp); i++) |
| 2274 | resolutions[rp->index] = rp->res; |
| 2275 | file_data->respairs.release (); |
| 2276 | |
| 2277 | file_data->renaming_hash_table = lto_create_renaming_table (); |
| 2278 | file_data->file_name = file->filename; |
| 2279 | file_data->order = order; |
| 2280 | |
| 2281 | /* Read and verify LTO section. */ |
| 2282 | data = lto_get_summary_section_data (file_data, LTO_section_lto, &len); |
| 2283 | if (data == NULL) |
| 2284 | { |
| 2285 | fatal_error (input_location, "bytecode stream in file %qs generated " |
| 2286 | "with GCC compiler older than 10.0" , file_data->file_name); |
| 2287 | return; |
| 2288 | } |
| 2289 | |
| 2290 | memcpy (dest: &file_data->lto_section_header, src: data, n: sizeof (lto_section)); |
| 2291 | lto_check_version (file_data->lto_section_header.major_version, |
| 2292 | file_data->lto_section_header.minor_version, |
| 2293 | file_data->file_name); |
| 2294 | |
| 2295 | #ifdef ACCEL_COMPILER |
| 2296 | lto_input_mode_table (file_data); |
| 2297 | #else |
| 2298 | file_data->mode_table = NULL; |
| 2299 | file_data->mode_bits = ceil_log2 (x: MAX_MACHINE_MODE); |
| 2300 | #endif |
| 2301 | |
| 2302 | data = lto_get_summary_section_data (file_data, LTO_section_decls, &len); |
| 2303 | if (data == NULL) |
| 2304 | { |
| 2305 | internal_error ("cannot read %<LTO_section_decls%> from %s" , |
| 2306 | file_data->file_name); |
| 2307 | return; |
| 2308 | } |
| 2309 | /* Frees resolutions. */ |
| 2310 | lto_read_decls (decl_data: file_data, data, resolutions); |
| 2311 | lto_free_section_data (file_data, LTO_section_decls, NULL, data, len); |
| 2312 | } |
| 2313 | |
| 2314 | /* Finalize FILE_DATA in FILE and increase COUNT. */ |
| 2315 | |
| 2316 | static int |
| 2317 | lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data, |
| 2318 | int *count, int order) |
| 2319 | { |
| 2320 | lto_file_finalize (file_data, file, order); |
| 2321 | if (symtab->dump_file) |
| 2322 | fprintf (stream: symtab->dump_file, |
| 2323 | format: "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n" , |
| 2324 | file_data->file_name, file_data->id); |
| 2325 | (*count)++; |
| 2326 | return 0; |
| 2327 | } |
| 2328 | |
| 2329 | /* Generate a TREE representation for all types and external decls |
| 2330 | entities in FILE. |
| 2331 | |
| 2332 | Read all of the globals out of the file. Then read the cgraph |
| 2333 | and process the .o index into the cgraph nodes so that it can open |
| 2334 | the .o file to load the functions and ipa information. */ |
| 2335 | |
| 2336 | static struct lto_file_decl_data * |
| 2337 | lto_file_read (lto_file *file, FILE *resolution_file, int *count) |
| 2338 | { |
| 2339 | struct lto_file_decl_data *file_data = NULL; |
| 2340 | splay_tree file_ids; |
| 2341 | htab_t section_hash_table; |
| 2342 | struct lto_section_slot *section; |
| 2343 | struct file_data_list file_list; |
| 2344 | struct lto_section_list section_list; |
| 2345 | |
| 2346 | memset (s: §ion_list, c: 0, n: sizeof (struct lto_section_list)); |
| 2347 | section_hash_table = lto_obj_build_section_table (file, list: §ion_list); |
| 2348 | |
| 2349 | /* Dump the details of LTO objects. */ |
| 2350 | if (flag_lto_dump_objects) |
| 2351 | { |
| 2352 | int i=0; |
| 2353 | fprintf (stdout, format: "\n LTO Object Name: %s\n" , file->filename); |
| 2354 | fprintf (stdout, format: "\nNo. Offset Size Section Name\n\n" ); |
| 2355 | for (section = section_list.first; section != NULL; section = section->next) |
| 2356 | fprintf (stdout, format: "%2d %8" PRId64 " %8" PRIu64 " %s\n" , |
| 2357 | ++i, (int64_t) section->start, (uint64_t) section->len, |
| 2358 | section->name); |
| 2359 | } |
| 2360 | |
| 2361 | /* Find all sub modules in the object and put their sections into new hash |
| 2362 | tables in a splay tree. */ |
| 2363 | file_ids = lto_splay_tree_new (); |
| 2364 | memset (s: &file_list, c: 0, n: sizeof (struct file_data_list)); |
| 2365 | for (section = section_list.first; section != NULL; section = section->next) |
| 2366 | create_subid_section_table (ls: section, file_ids, list: &file_list); |
| 2367 | |
| 2368 | /* Add resolutions to file ids. */ |
| 2369 | lto_resolution_read (file_ids, resolution: resolution_file, file); |
| 2370 | |
| 2371 | /* Finalize each lto file for each submodule in the merged object. */ |
| 2372 | int order = 0; |
| 2373 | for (file_data = file_list.first; file_data != NULL; |
| 2374 | file_data = file_data->next) |
| 2375 | lto_create_files_from_ids (file, file_data, count, order: order++); |
| 2376 | |
| 2377 | splay_tree_delete (file_ids); |
| 2378 | htab_delete (section_hash_table); |
| 2379 | |
| 2380 | return file_list.first; |
| 2381 | } |
| 2382 | |
| 2383 | #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE |
| 2384 | #define LTO_MMAP_IO 1 |
| 2385 | #endif |
| 2386 | |
| 2387 | #if LTO_MMAP_IO |
| 2388 | /* Page size of machine is used for mmap and munmap calls. */ |
| 2389 | static size_t page_mask; |
| 2390 | #endif |
| 2391 | |
| 2392 | /* Get the section data of length LEN from FILENAME starting at |
| 2393 | OFFSET. The data segment must be freed by the caller when the |
| 2394 | caller is finished. Returns NULL if all was not well. */ |
| 2395 | |
| 2396 | static char * |
| 2397 | lto_read_section_data (struct lto_file_decl_data *file_data, |
| 2398 | intptr_t offset, size_t len) |
| 2399 | { |
| 2400 | char *result; |
| 2401 | static int fd = -1; |
| 2402 | static char *fd_name; |
| 2403 | #if LTO_MMAP_IO |
| 2404 | intptr_t computed_len; |
| 2405 | intptr_t computed_offset; |
| 2406 | intptr_t diff; |
| 2407 | #endif |
| 2408 | |
| 2409 | /* Keep a single-entry file-descriptor cache. The last file we |
| 2410 | touched will get closed at exit. |
| 2411 | ??? Eventually we want to add a more sophisticated larger cache |
| 2412 | or rather fix function body streaming to not stream them in |
| 2413 | practically random order. */ |
| 2414 | if (fd != -1 |
| 2415 | && filename_cmp (s1: fd_name, s2: file_data->file_name) != 0) |
| 2416 | { |
| 2417 | free (ptr: fd_name); |
| 2418 | close (fd: fd); |
| 2419 | fd = -1; |
| 2420 | } |
| 2421 | if (fd == -1) |
| 2422 | { |
| 2423 | fd = open (file: file_data->file_name, O_RDONLY|O_BINARY); |
| 2424 | if (fd == -1) |
| 2425 | { |
| 2426 | fatal_error (input_location, "Cannot open %s" , file_data->file_name); |
| 2427 | return NULL; |
| 2428 | } |
| 2429 | fd_name = xstrdup (file_data->file_name); |
| 2430 | } |
| 2431 | |
| 2432 | #if LTO_MMAP_IO |
| 2433 | if (!page_mask) |
| 2434 | { |
| 2435 | size_t page_size = sysconf (_SC_PAGE_SIZE); |
| 2436 | page_mask = ~(page_size - 1); |
| 2437 | } |
| 2438 | |
| 2439 | computed_offset = offset & page_mask; |
| 2440 | diff = offset - computed_offset; |
| 2441 | computed_len = len + diff; |
| 2442 | |
| 2443 | result = (char *) mmap (NULL, len: computed_len, PROT_READ, MAP_PRIVATE, |
| 2444 | fd: fd, offset: computed_offset); |
| 2445 | if (result == MAP_FAILED) |
| 2446 | { |
| 2447 | fatal_error (input_location, "Cannot map %s" , file_data->file_name); |
| 2448 | return NULL; |
| 2449 | } |
| 2450 | |
| 2451 | return result + diff; |
| 2452 | #else |
| 2453 | result = (char *) xmalloc (len); |
| 2454 | if (lseek (fd, offset, SEEK_SET) != offset |
| 2455 | || read (fd, result, len) != (ssize_t) len) |
| 2456 | { |
| 2457 | free (result); |
| 2458 | fatal_error (input_location, "Cannot read %s" , file_data->file_name); |
| 2459 | result = NULL; |
| 2460 | } |
| 2461 | #ifdef __MINGW32__ |
| 2462 | /* Native windows doesn't supports delayed unlink on opened file. So |
| 2463 | we close file here again. This produces higher I/O load, but at least |
| 2464 | it prevents to have dangling file handles preventing unlink. */ |
| 2465 | free (fd_name); |
| 2466 | fd_name = NULL; |
| 2467 | close (fd); |
| 2468 | fd = -1; |
| 2469 | #endif |
| 2470 | return result; |
| 2471 | #endif |
| 2472 | } |
| 2473 | |
| 2474 | |
| 2475 | /* Get the section data from FILE_DATA of SECTION_TYPE with NAME. |
| 2476 | NAME will be NULL unless the section type is for a function |
| 2477 | body. */ |
| 2478 | |
| 2479 | static const char * |
| 2480 | get_section_data (struct lto_file_decl_data *file_data, |
| 2481 | enum lto_section_type section_type, |
| 2482 | const char *name, int order, |
| 2483 | size_t *len) |
| 2484 | { |
| 2485 | htab_t section_hash_table = file_data->section_hash_table; |
| 2486 | struct lto_section_slot *f_slot; |
| 2487 | struct lto_section_slot s_slot; |
| 2488 | const char *section_name = lto_get_section_name (section_type, name, |
| 2489 | order, file_data); |
| 2490 | char *data = NULL; |
| 2491 | |
| 2492 | *len = 0; |
| 2493 | s_slot.name = section_name; |
| 2494 | f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot); |
| 2495 | if (f_slot) |
| 2496 | { |
| 2497 | data = lto_read_section_data (file_data, offset: f_slot->start, len: f_slot->len); |
| 2498 | *len = f_slot->len; |
| 2499 | } |
| 2500 | |
| 2501 | free (CONST_CAST (char *, section_name)); |
| 2502 | return data; |
| 2503 | } |
| 2504 | |
| 2505 | |
| 2506 | /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that |
| 2507 | starts at OFFSET and has LEN bytes. */ |
| 2508 | |
| 2509 | static void |
| 2510 | free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED, |
| 2511 | enum lto_section_type section_type ATTRIBUTE_UNUSED, |
| 2512 | const char *name ATTRIBUTE_UNUSED, |
| 2513 | const char *offset, size_t len ATTRIBUTE_UNUSED) |
| 2514 | { |
| 2515 | #if LTO_MMAP_IO |
| 2516 | intptr_t computed_len; |
| 2517 | intptr_t computed_offset; |
| 2518 | intptr_t diff; |
| 2519 | #endif |
| 2520 | |
| 2521 | #if LTO_MMAP_IO |
| 2522 | computed_offset = ((intptr_t) offset) & page_mask; |
| 2523 | diff = (intptr_t) offset - computed_offset; |
| 2524 | computed_len = len + diff; |
| 2525 | |
| 2526 | munmap (addr: (caddr_t) computed_offset, len: computed_len); |
| 2527 | #else |
| 2528 | free (CONST_CAST(char *, offset)); |
| 2529 | #endif |
| 2530 | } |
| 2531 | |
| 2532 | static lto_file *current_lto_file; |
| 2533 | |
| 2534 | /* If TT is a variable or function decl replace it with its |
| 2535 | prevailing variant. */ |
| 2536 | #define LTO_SET_PREVAIL(tt) \ |
| 2537 | do {\ |
| 2538 | if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \ |
| 2539 | && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \ |
| 2540 | { \ |
| 2541 | tt = lto_symtab_prevailing_decl (tt); \ |
| 2542 | fixed = true; \ |
| 2543 | } \ |
| 2544 | } while (0) |
| 2545 | |
| 2546 | /* Ensure that TT isn't a replacable var of function decl. */ |
| 2547 | #define LTO_NO_PREVAIL(tt) \ |
| 2548 | gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt)) |
| 2549 | |
| 2550 | /* Given a tree T replace all fields referring to variables or functions |
| 2551 | with their prevailing variant. */ |
| 2552 | static void |
| 2553 | lto_fixup_prevailing_decls (tree t) |
| 2554 | { |
| 2555 | enum tree_code code = TREE_CODE (t); |
| 2556 | bool fixed = false; |
| 2557 | |
| 2558 | gcc_checking_assert (code != TREE_BINFO); |
| 2559 | LTO_NO_PREVAIL (TREE_TYPE (t)); |
| 2560 | if (CODE_CONTAINS_STRUCT (code, TS_COMMON) |
| 2561 | /* lto_symtab_prevail_decl use TREE_CHAIN to link to the prevailing decl. |
| 2562 | in the case T is a prevailed declaration we would ICE here. */ |
| 2563 | && !VAR_OR_FUNCTION_DECL_P (t)) |
| 2564 | LTO_NO_PREVAIL (TREE_CHAIN (t)); |
| 2565 | if (DECL_P (t)) |
| 2566 | { |
| 2567 | LTO_NO_PREVAIL (DECL_NAME (t)); |
| 2568 | LTO_SET_PREVAIL (DECL_CONTEXT (t)); |
| 2569 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
| 2570 | { |
| 2571 | LTO_SET_PREVAIL (DECL_SIZE (t)); |
| 2572 | LTO_SET_PREVAIL (DECL_SIZE_UNIT (t)); |
| 2573 | LTO_SET_PREVAIL (DECL_INITIAL (t)); |
| 2574 | LTO_NO_PREVAIL (DECL_ATTRIBUTES (t)); |
| 2575 | LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t)); |
| 2576 | } |
| 2577 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) |
| 2578 | { |
| 2579 | LTO_NO_PREVAIL (DECL_ASSEMBLER_NAME_RAW (t)); |
| 2580 | } |
| 2581 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON)) |
| 2582 | { |
| 2583 | LTO_NO_PREVAIL (DECL_RESULT_FLD (t)); |
| 2584 | } |
| 2585 | if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) |
| 2586 | { |
| 2587 | LTO_NO_PREVAIL (DECL_ARGUMENTS (t)); |
| 2588 | LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t)); |
| 2589 | LTO_NO_PREVAIL (DECL_VINDEX (t)); |
| 2590 | } |
| 2591 | if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL)) |
| 2592 | { |
| 2593 | LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t)); |
| 2594 | LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t)); |
| 2595 | LTO_NO_PREVAIL (DECL_QUALIFIER (t)); |
| 2596 | LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t)); |
| 2597 | LTO_NO_PREVAIL (DECL_FCONTEXT (t)); |
| 2598 | } |
| 2599 | } |
| 2600 | else if (TYPE_P (t)) |
| 2601 | { |
| 2602 | LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t)); |
| 2603 | LTO_SET_PREVAIL (TYPE_SIZE (t)); |
| 2604 | LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t)); |
| 2605 | LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t)); |
| 2606 | LTO_NO_PREVAIL (TYPE_NAME (t)); |
| 2607 | |
| 2608 | LTO_SET_PREVAIL (TYPE_MIN_VALUE_RAW (t)); |
| 2609 | LTO_SET_PREVAIL (TYPE_MAX_VALUE_RAW (t)); |
| 2610 | LTO_NO_PREVAIL (TYPE_LANG_SLOT_1 (t)); |
| 2611 | |
| 2612 | LTO_SET_PREVAIL (TYPE_CONTEXT (t)); |
| 2613 | |
| 2614 | LTO_NO_PREVAIL (TYPE_CANONICAL (t)); |
| 2615 | LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t)); |
| 2616 | LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t)); |
| 2617 | } |
| 2618 | else if (EXPR_P (t)) |
| 2619 | { |
| 2620 | int i; |
| 2621 | for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i) |
| 2622 | LTO_SET_PREVAIL (TREE_OPERAND (t, i)); |
| 2623 | } |
| 2624 | else if (TREE_CODE (t) == CONSTRUCTOR) |
| 2625 | { |
| 2626 | unsigned i; |
| 2627 | tree val; |
| 2628 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val) |
| 2629 | LTO_SET_PREVAIL (val); |
| 2630 | } |
| 2631 | else |
| 2632 | { |
| 2633 | switch (code) |
| 2634 | { |
| 2635 | case TREE_LIST: |
| 2636 | LTO_SET_PREVAIL (TREE_VALUE (t)); |
| 2637 | LTO_SET_PREVAIL (TREE_PURPOSE (t)); |
| 2638 | break; |
| 2639 | default: |
| 2640 | gcc_unreachable (); |
| 2641 | } |
| 2642 | } |
| 2643 | /* If we fixed nothing, then we missed something seen by |
| 2644 | mentions_vars_p. */ |
| 2645 | gcc_checking_assert (fixed); |
| 2646 | } |
| 2647 | #undef LTO_SET_PREVAIL |
| 2648 | #undef LTO_NO_PREVAIL |
| 2649 | |
| 2650 | /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE, |
| 2651 | replaces var and function decls with the corresponding prevailing def. */ |
| 2652 | |
| 2653 | static void |
| 2654 | lto_fixup_state (struct lto_in_decl_state *state) |
| 2655 | { |
| 2656 | unsigned i, si; |
| 2657 | |
| 2658 | /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs, |
| 2659 | we still need to walk from all DECLs to find the reachable |
| 2660 | FUNCTION_DECLs and VAR_DECLs. */ |
| 2661 | for (si = 0; si < LTO_N_DECL_STREAMS; si++) |
| 2662 | { |
| 2663 | vec<tree, va_gc> *trees = state->streams[si]; |
| 2664 | for (i = 0; i < vec_safe_length (v: trees); i++) |
| 2665 | { |
| 2666 | tree t = (*trees)[i]; |
| 2667 | if (flag_checking && TYPE_P (t)) |
| 2668 | verify_type (t); |
| 2669 | if (VAR_OR_FUNCTION_DECL_P (t) |
| 2670 | && (TREE_PUBLIC (t) || DECL_EXTERNAL (t))) |
| 2671 | (*trees)[i] = lto_symtab_prevailing_decl (decl: t); |
| 2672 | } |
| 2673 | } |
| 2674 | } |
| 2675 | |
| 2676 | /* Fix the decls from all FILES. Replaces each decl with the corresponding |
| 2677 | prevailing one. */ |
| 2678 | |
| 2679 | static void |
| 2680 | lto_fixup_decls (struct lto_file_decl_data **files) |
| 2681 | { |
| 2682 | unsigned int i; |
| 2683 | tree t; |
| 2684 | |
| 2685 | if (tree_with_vars) |
| 2686 | FOR_EACH_VEC_ELT ((*tree_with_vars), i, t) |
| 2687 | lto_fixup_prevailing_decls (t); |
| 2688 | |
| 2689 | for (i = 0; files[i]; i++) |
| 2690 | { |
| 2691 | struct lto_file_decl_data *file = files[i]; |
| 2692 | struct lto_in_decl_state *state = file->global_decl_state; |
| 2693 | lto_fixup_state (state); |
| 2694 | |
| 2695 | hash_table<decl_state_hasher>::iterator iter; |
| 2696 | lto_in_decl_state *elt; |
| 2697 | FOR_EACH_HASH_TABLE_ELEMENT (*file->function_decl_states, elt, |
| 2698 | lto_in_decl_state *, iter) |
| 2699 | lto_fixup_state (state: elt); |
| 2700 | } |
| 2701 | } |
| 2702 | |
| 2703 | static GTY((length ("lto_stats.num_input_files + 1" ))) struct lto_file_decl_data **all_file_decl_data; |
| 2704 | |
| 2705 | /* Turn file datas for sub files into a single array, so that they look |
| 2706 | like separate files for further passes. */ |
| 2707 | |
| 2708 | static void |
| 2709 | lto_flatten_files (struct lto_file_decl_data **orig, int count, |
| 2710 | int last_file_ix) |
| 2711 | { |
| 2712 | struct lto_file_decl_data *n, *next; |
| 2713 | int i, k; |
| 2714 | |
| 2715 | lto_stats.num_input_files = count; |
| 2716 | all_file_decl_data |
| 2717 | = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (c: count + 1); |
| 2718 | /* Set the hooks so that all of the ipa passes can read in their data. */ |
| 2719 | lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data); |
| 2720 | for (i = 0, k = 0; i < last_file_ix; i++) |
| 2721 | { |
| 2722 | for (n = orig[i]; n != NULL; n = next) |
| 2723 | { |
| 2724 | all_file_decl_data[k++] = n; |
| 2725 | next = n->next; |
| 2726 | n->next = NULL; |
| 2727 | } |
| 2728 | } |
| 2729 | all_file_decl_data[k] = NULL; |
| 2730 | gcc_assert (k == count); |
| 2731 | } |
| 2732 | |
| 2733 | /* Input file data before flattening (i.e. splitting them to subfiles to support |
| 2734 | incremental linking. */ |
| 2735 | static int real_file_count; |
| 2736 | static GTY((length ("real_file_count + 1" ))) struct lto_file_decl_data **real_file_decl_data; |
| 2737 | |
| 2738 | /* Read all the symbols from the input files FNAMES. NFILES is the |
| 2739 | number of files requested in the command line. Instantiate a |
| 2740 | global call graph by aggregating all the sub-graphs found in each |
| 2741 | file. */ |
| 2742 | |
| 2743 | void |
| 2744 | read_cgraph_and_symbols (unsigned nfiles, const char **fnames) |
| 2745 | { |
| 2746 | unsigned int i, last_file_ix; |
| 2747 | FILE *resolution; |
| 2748 | unsigned resolution_objects = 0; |
| 2749 | int count = 0; |
| 2750 | struct lto_file_decl_data **decl_data; |
| 2751 | symtab_node *snode; |
| 2752 | |
| 2753 | symtab->initialize (); |
| 2754 | |
| 2755 | timevar_push (tv: TV_IPA_LTO_DECL_IN); |
| 2756 | |
| 2757 | #ifdef ACCEL_COMPILER |
| 2758 | section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX; |
| 2759 | lto_stream_offload_p = true; |
| 2760 | #endif |
| 2761 | |
| 2762 | real_file_decl_data |
| 2763 | = decl_data = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (c: nfiles + 1); |
| 2764 | real_file_count = nfiles; |
| 2765 | |
| 2766 | /* Read the resolution file. */ |
| 2767 | resolution = NULL; |
| 2768 | if (resolution_file_name) |
| 2769 | { |
| 2770 | int t; |
| 2771 | |
| 2772 | resolution = fopen (filename: resolution_file_name, modes: "r" ); |
| 2773 | if (resolution == NULL) |
| 2774 | fatal_error (input_location, |
| 2775 | "could not open symbol resolution file: %m" ); |
| 2776 | |
| 2777 | t = fscanf (stream: resolution, format: "%u" , &resolution_objects); |
| 2778 | gcc_assert (t == 1); |
| 2779 | } |
| 2780 | symtab->state = LTO_STREAMING; |
| 2781 | |
| 2782 | canonical_type_hash_cache = new hash_map<const_tree, hashval_t> (251); |
| 2783 | gimple_canonical_types = htab_create (16381, gimple_canonical_type_hash, |
| 2784 | gimple_canonical_type_eq, NULL); |
| 2785 | gcc_obstack_init (&tree_scc_hash_obstack); |
| 2786 | tree_scc_hash = new hash_table<tree_scc_hasher> (4096); |
| 2787 | |
| 2788 | /* Register the common node types with the canonical type machinery so |
| 2789 | we properly share alias-sets across languages and TUs. Do not |
| 2790 | expose the common nodes as type merge target - those that should be |
| 2791 | are already exposed so by pre-loading the LTO streamer caches. |
| 2792 | Do two passes - first clear TYPE_CANONICAL and then re-compute it. */ |
| 2793 | for (i = 0; i < itk_none; ++i) |
| 2794 | lto_register_canonical_types (node: integer_types[i], first_p: true); |
| 2795 | for (i = 0; i < stk_type_kind_last; ++i) |
| 2796 | lto_register_canonical_types (node: sizetype_tab[i], first_p: true); |
| 2797 | for (i = 0; i < TI_MAX; ++i) |
| 2798 | lto_register_canonical_types (node: global_trees[i], first_p: true); |
| 2799 | for (i = 0; i < itk_none; ++i) |
| 2800 | lto_register_canonical_types (node: integer_types[i], first_p: false); |
| 2801 | for (i = 0; i < stk_type_kind_last; ++i) |
| 2802 | lto_register_canonical_types (node: sizetype_tab[i], first_p: false); |
| 2803 | for (i = 0; i < TI_MAX; ++i) |
| 2804 | lto_register_canonical_types (node: global_trees[i], first_p: false); |
| 2805 | |
| 2806 | if (!quiet_flag) |
| 2807 | fprintf (stderr, format: "Reading object files:" ); |
| 2808 | |
| 2809 | /* Read all of the object files specified on the command line. */ |
| 2810 | for (i = 0, last_file_ix = 0; i < nfiles; ++i) |
| 2811 | { |
| 2812 | struct lto_file_decl_data *file_data = NULL; |
| 2813 | if (!quiet_flag) |
| 2814 | { |
| 2815 | fprintf (stderr, format: " %s" , fnames[i]); |
| 2816 | fflush (stderr); |
| 2817 | } |
| 2818 | |
| 2819 | current_lto_file = lto_obj_file_open (filename: fnames[i], writable: false); |
| 2820 | if (!current_lto_file) |
| 2821 | break; |
| 2822 | |
| 2823 | file_data = lto_file_read (file: current_lto_file, resolution_file: resolution, count: &count); |
| 2824 | if (!file_data) |
| 2825 | { |
| 2826 | lto_obj_file_close (file: current_lto_file); |
| 2827 | free (ptr: current_lto_file); |
| 2828 | current_lto_file = NULL; |
| 2829 | break; |
| 2830 | } |
| 2831 | |
| 2832 | decl_data[last_file_ix++] = file_data; |
| 2833 | |
| 2834 | lto_obj_file_close (file: current_lto_file); |
| 2835 | free (ptr: current_lto_file); |
| 2836 | current_lto_file = NULL; |
| 2837 | } |
| 2838 | |
| 2839 | lto_flatten_files (orig: decl_data, count, last_file_ix); |
| 2840 | lto_stats.num_input_files = count; |
| 2841 | ggc_free(decl_data); |
| 2842 | real_file_decl_data = NULL; |
| 2843 | |
| 2844 | lto_register_canonical_types_for_odr_types (); |
| 2845 | |
| 2846 | if (resolution_file_name) |
| 2847 | { |
| 2848 | /* True, since the plugin splits the archives. */ |
| 2849 | gcc_assert (resolution_objects == nfiles); |
| 2850 | fclose (stream: resolution); |
| 2851 | } |
| 2852 | |
| 2853 | /* Show the LTO report before launching LTRANS. */ |
| 2854 | if (flag_lto_report || (flag_wpa && flag_lto_report_wpa)) |
| 2855 | print_lto_report_1 (); |
| 2856 | |
| 2857 | /* Free gimple type merging datastructures. */ |
| 2858 | delete tree_scc_hash; |
| 2859 | tree_scc_hash = NULL; |
| 2860 | obstack_free (&tree_scc_hash_obstack, NULL); |
| 2861 | htab_delete (gimple_canonical_types); |
| 2862 | gimple_canonical_types = NULL; |
| 2863 | delete canonical_type_hash_cache; |
| 2864 | canonical_type_hash_cache = NULL; |
| 2865 | |
| 2866 | /* At this stage we know that majority of GGC memory is reachable. |
| 2867 | Growing the limits prevents unnecesary invocation of GGC. */ |
| 2868 | ggc_grow (); |
| 2869 | report_heap_memory_use (); |
| 2870 | |
| 2871 | /* Set the hooks so that all of the ipa passes can read in their data. */ |
| 2872 | lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data); |
| 2873 | |
| 2874 | timevar_pop (tv: TV_IPA_LTO_DECL_IN); |
| 2875 | |
| 2876 | if (!quiet_flag) |
| 2877 | fprintf (stderr, format: "\nReading the symbol table:" ); |
| 2878 | |
| 2879 | timevar_push (tv: TV_IPA_LTO_CGRAPH_IO); |
| 2880 | /* Read the symtab. */ |
| 2881 | input_symtab (); |
| 2882 | |
| 2883 | input_offload_tables (!flag_ltrans); |
| 2884 | |
| 2885 | /* Store resolutions into the symbol table. */ |
| 2886 | |
| 2887 | FOR_EACH_SYMBOL (snode) |
| 2888 | if (snode->externally_visible && snode->real_symbol_p () |
| 2889 | && snode->lto_file_data && snode->lto_file_data->resolution_map |
| 2890 | && !(TREE_CODE (snode->decl) == FUNCTION_DECL |
| 2891 | && fndecl_built_in_p (node: snode->decl)) |
| 2892 | && !(VAR_P (snode->decl) && DECL_HARD_REGISTER (snode->decl))) |
| 2893 | { |
| 2894 | ld_plugin_symbol_resolution_t *res; |
| 2895 | |
| 2896 | res = snode->lto_file_data->resolution_map->get (k: snode->decl); |
| 2897 | if (!res || *res == LDPR_UNKNOWN) |
| 2898 | { |
| 2899 | if (snode->output_to_lto_symbol_table_p ()) |
| 2900 | fatal_error (input_location, "missing resolution data for %s" , |
| 2901 | IDENTIFIER_POINTER |
| 2902 | (DECL_ASSEMBLER_NAME (snode->decl))); |
| 2903 | } |
| 2904 | /* Symbol versions are always used externally, but linker does not |
| 2905 | report that correctly. |
| 2906 | This is binutils PR25924. */ |
| 2907 | else if (snode->symver && *res == LDPR_PREVAILING_DEF_IRONLY) |
| 2908 | snode->resolution = LDPR_PREVAILING_DEF_IRONLY_EXP; |
| 2909 | else |
| 2910 | snode->resolution = *res; |
| 2911 | } |
| 2912 | for (i = 0; all_file_decl_data[i]; i++) |
| 2913 | if (all_file_decl_data[i]->resolution_map) |
| 2914 | { |
| 2915 | delete all_file_decl_data[i]->resolution_map; |
| 2916 | all_file_decl_data[i]->resolution_map = NULL; |
| 2917 | } |
| 2918 | |
| 2919 | timevar_pop (tv: TV_IPA_LTO_CGRAPH_IO); |
| 2920 | |
| 2921 | if (!quiet_flag) |
| 2922 | fprintf (stderr, format: "\nMerging declarations:" ); |
| 2923 | |
| 2924 | timevar_push (tv: TV_IPA_LTO_DECL_MERGE); |
| 2925 | /* Merge global decls. In ltrans mode we read merged cgraph, we do not |
| 2926 | need to care about resolving symbols again, we only need to replace |
| 2927 | duplicated declarations read from the callgraph and from function |
| 2928 | sections. */ |
| 2929 | if (!flag_ltrans) |
| 2930 | { |
| 2931 | lto_symtab_merge_decls (); |
| 2932 | |
| 2933 | /* If there were errors during symbol merging bail out, we have no |
| 2934 | good way to recover here. */ |
| 2935 | if (seen_error ()) |
| 2936 | fatal_error (input_location, |
| 2937 | "errors during merging of translation units" ); |
| 2938 | |
| 2939 | /* Fixup all decls. */ |
| 2940 | lto_fixup_decls (files: all_file_decl_data); |
| 2941 | } |
| 2942 | if (tree_with_vars) |
| 2943 | ggc_free (tree_with_vars); |
| 2944 | tree_with_vars = NULL; |
| 2945 | /* During WPA we want to prevent ggc collecting by default. Grow limits |
| 2946 | until after the IPA summaries are streamed in. Basically all IPA memory |
| 2947 | is explcitly managed by ggc_free and ggc collect is not useful. |
| 2948 | Exception are the merged declarations. */ |
| 2949 | ggc_grow (); |
| 2950 | report_heap_memory_use (); |
| 2951 | |
| 2952 | timevar_pop (tv: TV_IPA_LTO_DECL_MERGE); |
| 2953 | /* Each pass will set the appropriate timer. */ |
| 2954 | |
| 2955 | if (!quiet_flag) |
| 2956 | fprintf (stderr, format: "\nReading summaries:" ); |
| 2957 | |
| 2958 | /* Read the IPA summary data. */ |
| 2959 | if (flag_ltrans) |
| 2960 | ipa_read_optimization_summaries (); |
| 2961 | else |
| 2962 | ipa_read_summaries (); |
| 2963 | |
| 2964 | ggc_grow (); |
| 2965 | |
| 2966 | for (i = 0; all_file_decl_data[i]; i++) |
| 2967 | { |
| 2968 | gcc_assert (all_file_decl_data[i]->symtab_node_encoder); |
| 2969 | lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder); |
| 2970 | all_file_decl_data[i]->symtab_node_encoder = NULL; |
| 2971 | lto_in_decl_state *global_decl_state |
| 2972 | = all_file_decl_data[i]->global_decl_state; |
| 2973 | lto_free_function_in_decl_state (global_decl_state); |
| 2974 | all_file_decl_data[i]->global_decl_state = NULL; |
| 2975 | all_file_decl_data[i]->current_decl_state = NULL; |
| 2976 | } |
| 2977 | |
| 2978 | if (!flag_ltrans) |
| 2979 | { |
| 2980 | /* Finally merge the cgraph according to the decl merging decisions. */ |
| 2981 | timevar_push (tv: TV_IPA_LTO_CGRAPH_MERGE); |
| 2982 | |
| 2983 | if (!quiet_flag) |
| 2984 | fprintf (stderr, format: "\nMerging symbols:" ); |
| 2985 | |
| 2986 | gcc_assert (!dump_file); |
| 2987 | dump_file = dump_begin (lto_link_dump_id, NULL); |
| 2988 | |
| 2989 | if (dump_file) |
| 2990 | { |
| 2991 | fprintf (stream: dump_file, format: "Before merging:\n" ); |
| 2992 | symtab->dump (f: dump_file); |
| 2993 | } |
| 2994 | lto_symtab_merge_symbols (); |
| 2995 | /* Removal of unreachable symbols is needed to make verify_symtab to pass; |
| 2996 | we are still having duplicated comdat groups containing local statics. |
| 2997 | We could also just remove them while merging. */ |
| 2998 | symtab->remove_unreachable_nodes (file: dump_file); |
| 2999 | ggc_collect (); |
| 3000 | report_heap_memory_use (); |
| 3001 | |
| 3002 | if (dump_file) |
| 3003 | dump_end (lto_link_dump_id, dump_file); |
| 3004 | dump_file = NULL; |
| 3005 | timevar_pop (tv: TV_IPA_LTO_CGRAPH_MERGE); |
| 3006 | } |
| 3007 | symtab->state = IPA_SSA; |
| 3008 | /* All node removals happening here are useless, because |
| 3009 | WPA should not stream them. Still always perform remove_unreachable_nodes |
| 3010 | because we may reshape clone tree, get rid of dead masters of inline |
| 3011 | clones and remove symbol entries for read-only variables we keep around |
| 3012 | only to be able to constant fold them. */ |
| 3013 | if (flag_ltrans) |
| 3014 | { |
| 3015 | if (symtab->dump_file) |
| 3016 | symtab->dump (f: symtab->dump_file); |
| 3017 | symtab->remove_unreachable_nodes (file: symtab->dump_file); |
| 3018 | } |
| 3019 | |
| 3020 | /* Indicate that the cgraph is built and ready. */ |
| 3021 | symtab->function_flags_ready = true; |
| 3022 | |
| 3023 | ggc_free (all_file_decl_data); |
| 3024 | all_file_decl_data = NULL; |
| 3025 | } |
| 3026 | |
| 3027 | |
| 3028 | |
| 3029 | /* Show various memory usage statistics related to LTO. */ |
| 3030 | void |
| 3031 | print_lto_report_1 (void) |
| 3032 | { |
| 3033 | const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS" ; |
| 3034 | fprintf (stderr, format: "%s statistics\n" , pfx); |
| 3035 | |
| 3036 | fprintf (stderr, format: "[%s] read %lu unshared trees\n" , |
| 3037 | pfx, num_unshared_trees_read); |
| 3038 | fprintf (stderr, format: "[%s] read %lu mergeable SCCs of average size %f\n" , |
| 3039 | pfx, num_sccs_read, total_scc_size / (double)num_sccs_read); |
| 3040 | fprintf (stderr, format: "[%s] %lu tree bodies read in total\n" , pfx, |
| 3041 | total_scc_size + num_unshared_trees_read); |
| 3042 | if (flag_wpa && tree_scc_hash && num_sccs_read) |
| 3043 | { |
| 3044 | fprintf (stderr, format: "[%s] tree SCC table: size " HOST_SIZE_T_PRINT_DEC ", " |
| 3045 | HOST_SIZE_T_PRINT_DEC " elements, collision ratio: %f\n" , pfx, |
| 3046 | (fmt_size_t) tree_scc_hash->size (), |
| 3047 | (fmt_size_t) tree_scc_hash->elements (), |
| 3048 | tree_scc_hash->collisions ()); |
| 3049 | hash_table<tree_scc_hasher>::iterator hiter; |
| 3050 | tree_scc *scc, *max_scc = NULL; |
| 3051 | unsigned max_length = 0; |
| 3052 | FOR_EACH_HASH_TABLE_ELEMENT (*tree_scc_hash, scc, x, hiter) |
| 3053 | { |
| 3054 | unsigned length = 0; |
| 3055 | tree_scc *s = scc; |
| 3056 | for (; s; s = s->next) |
| 3057 | length++; |
| 3058 | if (length > max_length) |
| 3059 | { |
| 3060 | max_length = length; |
| 3061 | max_scc = scc; |
| 3062 | } |
| 3063 | } |
| 3064 | fprintf (stderr, format: "[%s] tree SCC max chain length %u (size %u)\n" , |
| 3065 | pfx, max_length, max_scc->len); |
| 3066 | fprintf (stderr, format: "[%s] Compared %lu SCCs, %lu collisions (%f)\n" , pfx, |
| 3067 | num_scc_compares, num_scc_compare_collisions, |
| 3068 | num_scc_compare_collisions / (double) num_scc_compares); |
| 3069 | fprintf (stderr, format: "[%s] Merged %lu SCCs\n" , pfx, num_sccs_merged); |
| 3070 | fprintf (stderr, format: "[%s] Merged %lu tree bodies\n" , pfx, |
| 3071 | total_scc_size_merged); |
| 3072 | fprintf (stderr, format: "[%s] Merged %lu types\n" , pfx, num_merged_types); |
| 3073 | fprintf (stderr, format: "[%s] %lu types prevailed (%lu associated trees)\n" , |
| 3074 | pfx, num_prevailing_types, num_type_scc_trees); |
| 3075 | fprintf (stderr, format: "[%s] GIMPLE canonical type table: size " |
| 3076 | HOST_SIZE_T_PRINT_DEC ", " HOST_SIZE_T_PRINT_DEC |
| 3077 | " elements, %d searches, %d collisions (ratio: %f)\n" , pfx, |
| 3078 | (fmt_size_t) htab_size (gimple_canonical_types), |
| 3079 | (fmt_size_t) htab_elements (gimple_canonical_types), |
| 3080 | gimple_canonical_types->searches, |
| 3081 | gimple_canonical_types->collisions, |
| 3082 | htab_collisions (gimple_canonical_types)); |
| 3083 | fprintf (stderr, format: "[%s] GIMPLE canonical type pointer-map: " |
| 3084 | "%lu elements, %ld searches\n" , pfx, |
| 3085 | num_canonical_type_hash_entries, |
| 3086 | num_canonical_type_hash_queries); |
| 3087 | } |
| 3088 | |
| 3089 | print_lto_report (pfx); |
| 3090 | } |
| 3091 | |
| 3092 | GTY(()) tree lto_eh_personality_decl; |
| 3093 | |
| 3094 | /* Return the LTO personality function decl. */ |
| 3095 | |
| 3096 | tree |
| 3097 | lto_eh_personality (void) |
| 3098 | { |
| 3099 | if (!lto_eh_personality_decl) |
| 3100 | { |
| 3101 | /* Use the first personality DECL for our personality if we don't |
| 3102 | support multiple ones. This ensures that we don't artificially |
| 3103 | create the need for them in a single-language program. */ |
| 3104 | if (first_personality_decl && !dwarf2out_do_cfi_asm ()) |
| 3105 | lto_eh_personality_decl = first_personality_decl; |
| 3106 | else |
| 3107 | lto_eh_personality_decl = lhd_gcc_personality (); |
| 3108 | } |
| 3109 | |
| 3110 | return lto_eh_personality_decl; |
| 3111 | } |
| 3112 | |
| 3113 | /* Set the process name based on the LTO mode. */ |
| 3114 | |
| 3115 | static void |
| 3116 | lto_process_name (void) |
| 3117 | { |
| 3118 | if (flag_lto) |
| 3119 | setproctitle (flag_incremental_link == INCREMENTAL_LINK_LTO |
| 3120 | ? "lto1-inclink" : "lto1-lto" ); |
| 3121 | if (flag_wpa) |
| 3122 | setproctitle ("lto1-wpa" ); |
| 3123 | if (flag_ltrans) |
| 3124 | setproctitle ("lto1-ltrans" ); |
| 3125 | } |
| 3126 | |
| 3127 | |
| 3128 | /* Initialize the LTO front end. */ |
| 3129 | |
| 3130 | void |
| 3131 | lto_fe_init (void) |
| 3132 | { |
| 3133 | lto_process_name (); |
| 3134 | lto_streamer_hooks_init (); |
| 3135 | lto_reader_init (); |
| 3136 | lto_set_in_hooks (NULL, get_section_data, free_section_data); |
| 3137 | memset (s: <o_stats, c: 0, n: sizeof (lto_stats)); |
| 3138 | bitmap_obstack_initialize (NULL); |
| 3139 | gimple_register_cfg_hooks (); |
| 3140 | } |
| 3141 | |
| 3142 | #include "gt-lto-lto-common.h" |
| 3143 | |