1/* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2023 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "backend.h"
27#include "target.h"
28#include "rtl.h"
29#include "tree.h"
30#include "gimple.h"
31#include "tree-pass.h"
32#include "ssa.h"
33#include "gimple-streamer.h"
34#include "alias.h"
35#include "stor-layout.h"
36#include "gimple-iterator.h"
37#include "except.h"
38#include "lto-symtab.h"
39#include "cgraph.h"
40#include "cfgloop.h"
41#include "builtins.h"
42#include "gomp-constants.h"
43#include "debug.h"
44#include "omp-offload.h"
45#include "print-tree.h"
46#include "tree-dfa.h"
47#include "file-prefix-map.h" /* remap_debug_filename() */
48#include "output.h"
49#include "ipa-utils.h"
50#include "toplev.h"
51
52
53static void lto_write_tree (struct output_block*, tree, bool);
54
55/* Clear the line info stored in DATA_IN. */
56
57static void
58clear_line_info (struct output_block *ob)
59{
60 ob->current_file = NULL;
61 ob->current_line = 0;
62 ob->current_col = 0;
63 ob->current_sysp = false;
64 ob->reset_locus = true;
65 ob->emit_pwd = true;
66 /* Initialize to something that will never appear as block,
67 so that the first location with block in a function etc.
68 always streams a change_block bit and the first block. */
69 ob->current_block = void_node;
70 ob->current_discr = UINT_MAX;
71}
72
73
74/* Create the output block and return it. SECTION_TYPE is
75 LTO_section_function_body or LTO_static_initializer. */
76
77struct output_block *
78create_output_block (enum lto_section_type section_type)
79{
80 struct output_block *ob = XCNEW (struct output_block);
81 if (streamer_dump_file)
82 fprintf (stream: streamer_dump_file, format: "Creating output block for %s\n",
83 lto_section_name[section_type]);
84
85 ob->section_type = section_type;
86 ob->decl_state = lto_get_out_decl_state ();
87 /* Only global decl stream in non-wpa will ever be considered by tree
88 merging. */
89 if (!flag_wpa && section_type == LTO_section_decls)
90 ob->local_trees = new (hash_set <tree>);
91 ob->main_stream = XCNEW (struct lto_output_stream);
92 ob->string_stream = XCNEW (struct lto_output_stream);
93 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
94
95 if (section_type == LTO_section_function_body)
96 ob->cfg_stream = XCNEW (struct lto_output_stream);
97
98 clear_line_info (ob);
99
100 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
101 gcc_obstack_init (&ob->obstack);
102
103 return ob;
104}
105
106
107/* Destroy the output block OB. */
108
109void
110destroy_output_block (struct output_block *ob)
111{
112 enum lto_section_type section_type = ob->section_type;
113
114 delete ob->string_hash_table;
115 ob->string_hash_table = NULL;
116 delete ob->local_trees;
117
118 free (ptr: ob->main_stream);
119 free (ptr: ob->string_stream);
120 if (section_type == LTO_section_function_body)
121 free (ptr: ob->cfg_stream);
122
123 streamer_tree_cache_delete (ob->writer_cache);
124 obstack_free (&ob->obstack, NULL);
125
126 free (ptr: ob);
127}
128
129
130/* Wrapper around variably_modified_type_p avoiding type modification
131 during WPA streaming. */
132
133static bool
134lto_variably_modified_type_p (tree type)
135{
136 return (in_lto_p
137 ? TYPE_LANG_FLAG_0 (TYPE_MAIN_VARIANT (type))
138 : variably_modified_type_p (type, NULL_TREE));
139}
140
141
142/* Return true if tree node T is written to various tables. For these
143 nodes, we sometimes want to write their phyiscal representation
144 (via lto_output_tree), and sometimes we need to emit an index
145 reference into a table (via lto_output_tree_ref). */
146
147static bool
148tree_is_indexable (tree t)
149{
150 /* Parameters and return values of functions of variably modified types
151 must go to global stream, because they may be used in the type
152 definition. */
153 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
154 && DECL_CONTEXT (t))
155 return lto_variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)));
156 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
157 We should no longer need to stream it. */
158 else if (TREE_CODE (t) == IMPORTED_DECL)
159 gcc_unreachable ();
160 else if (TREE_CODE (t) == LABEL_DECL)
161 return FORCED_LABEL (t) || DECL_NONLOCAL (t);
162 else if (((VAR_P (t) && !TREE_STATIC (t))
163 || TREE_CODE (t) == TYPE_DECL
164 || TREE_CODE (t) == CONST_DECL
165 || TREE_CODE (t) == NAMELIST_DECL)
166 && decl_function_context (t))
167 return false;
168 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
169 return false;
170 /* Variably modified types need to be streamed alongside function
171 bodies because they can refer to local entities. Together with
172 them we have to localize their members as well.
173 ??? In theory that includes non-FIELD_DECLs as well. */
174 else if (TYPE_P (t)
175 && lto_variably_modified_type_p (type: t))
176 return false;
177 else if (TREE_CODE (t) == FIELD_DECL
178 && lto_variably_modified_type_p (DECL_CONTEXT (t)))
179 return false;
180 else
181 return (IS_TYPE_OR_DECL_P (t) || TREE_CODE (t) == SSA_NAME);
182}
183
184
185/* Output info about new location into bitpack BP.
186 After outputting bitpack, lto_output_location_data has
187 to be done to output actual data. */
188
189static void
190lto_output_location_1 (struct output_block *ob, struct bitpack_d *bp,
191 location_t orig_loc, bool block_p)
192{
193 location_t loc = LOCATION_LOCUS (orig_loc);
194
195 if (loc >= RESERVED_LOCATION_COUNT)
196 {
197 expanded_location xloc = expand_location (loc);
198 unsigned discr = get_discriminator_from_loc (orig_loc);
199
200 if (ob->reset_locus)
201 {
202 if (xloc.file == NULL)
203 ob->current_file = "";
204 if (xloc.line == 0)
205 ob->current_line = 1;
206 if (xloc.column == 0)
207 ob->current_col = 1;
208 ob->reset_locus = false;
209 }
210
211 /* As RESERVED_LOCATION_COUNT is 2, we can use the spare value of
212 3 without wasting additional bits to signalize file change.
213 If RESERVED_LOCATION_COUNT changes, reconsider this. */
214 gcc_checking_assert (RESERVED_LOCATION_COUNT == 2);
215 bp_pack_int_in_range (bp, min: 0, max: RESERVED_LOCATION_COUNT + 1,
216 val: RESERVED_LOCATION_COUNT
217 + (ob->current_file != xloc.file));
218
219 bp_pack_value (bp, val: ob->current_line != xloc.line, nbits: 1);
220 bp_pack_value (bp, val: ob->current_col != xloc.column, nbits: 1);
221 bp_pack_value (bp, val: ob->current_discr != discr, nbits: 1);
222
223 if (ob->current_file != xloc.file)
224 {
225 bool stream_pwd = false;
226 const char *remapped = remap_debug_filename (xloc.file);
227 if (ob->emit_pwd && remapped && !IS_ABSOLUTE_PATH (remapped))
228 {
229 stream_pwd = true;
230 ob->emit_pwd = false;
231 }
232 bp_pack_value (bp, val: stream_pwd, nbits: 1);
233 if (stream_pwd)
234 bp_pack_string (ob, bp, get_src_pwd (), true);
235 bp_pack_string (ob, bp, remapped, true);
236 bp_pack_value (bp, val: xloc.sysp, nbits: 1);
237 }
238 ob->current_file = xloc.file;
239 ob->current_sysp = xloc.sysp;
240
241 if (ob->current_line != xloc.line)
242 bp_pack_var_len_unsigned (bp, xloc.line);
243 ob->current_line = xloc.line;
244
245 if (ob->current_col != xloc.column)
246 bp_pack_var_len_unsigned (bp, xloc.column);
247 ob->current_col = xloc.column;
248
249 if (ob->current_discr != discr)
250 bp_pack_var_len_unsigned (bp, discr);
251 ob->current_discr = discr;
252 }
253 else
254 bp_pack_int_in_range (bp, min: 0, max: RESERVED_LOCATION_COUNT + 1, val: loc);
255
256 if (block_p)
257 {
258 tree block = LOCATION_BLOCK (orig_loc);
259 bp_pack_value (bp, val: ob->current_block != block, nbits: 1);
260 streamer_write_bitpack (bp);
261 if (ob->current_block != block)
262 lto_output_tree (ob, block, true, true);
263 ob->current_block = block;
264 }
265}
266
267/* Output info about new location into bitpack BP.
268 After outputting bitpack, lto_output_location_data has
269 to be done to output actual data. */
270
271void
272lto_output_location (struct output_block *ob, struct bitpack_d *bp,
273 location_t loc)
274{
275 lto_output_location_1 (ob, bp, orig_loc: loc, block_p: false);
276}
277
278/* Output info about new location into bitpack BP.
279 After outputting bitpack, lto_output_location_data has
280 to be done to output actual data. Like lto_output_location, but
281 additionally output LOCATION_BLOCK info too and write the BP bitpack. */
282
283void
284lto_output_location_and_block (struct output_block *ob, struct bitpack_d *bp,
285 location_t loc)
286{
287 lto_output_location_1 (ob, bp, orig_loc: loc, block_p: true);
288}
289
290
291/* Lookup NAME in ENCODER. If NAME is not found, create a new entry in
292 ENCODER for NAME with the next available index of ENCODER, then
293 print the index to OBS.
294 Return the index. */
295
296
297static unsigned
298lto_get_index (struct lto_tree_ref_encoder *encoder, tree t)
299{
300 bool existed_p;
301
302 unsigned int &index
303 = encoder->tree_hash_table->get_or_insert (k: t, existed: &existed_p);
304 if (!existed_p)
305 {
306 index = encoder->trees.length ();
307 if (streamer_dump_file)
308 {
309 print_node_brief (streamer_dump_file, " Encoding indexable ",
310 t, 4);
311 fprintf (stream: streamer_dump_file, format: " as %i \n", index);
312 }
313 encoder->trees.safe_push (obj: t);
314 }
315
316 return index;
317}
318
319
320/* If EXPR is an indexable tree node, output a reference to it to
321 output block OB. Otherwise, output the physical representation of
322 EXPR to OB. */
323
324static void
325lto_indexable_tree_ref (struct output_block *ob, tree expr,
326 enum LTO_tags *tag, unsigned *index)
327{
328 gcc_checking_assert (tree_is_indexable (expr));
329
330 if (TREE_CODE (expr) == SSA_NAME)
331 {
332 *tag = LTO_ssa_name_ref;
333 *index = SSA_NAME_VERSION (expr);
334 }
335 else
336 {
337 *tag = LTO_global_stream_ref;
338 *index = lto_get_index (encoder: &ob->decl_state->streams[LTO_DECL_STREAM], t: expr);
339 }
340}
341
342
343/* Output a static or extern var DECL to OBS. */
344
345void
346lto_output_var_decl_ref (struct lto_out_decl_state *decl_state,
347 struct lto_output_stream * obs, tree decl)
348{
349 gcc_checking_assert (VAR_P (decl));
350 streamer_write_uhwi_stream
351 (obs, lto_get_index (encoder: &decl_state->streams[LTO_DECL_STREAM],
352 t: decl));
353}
354
355
356/* Output a static or extern var DECL to OBS. */
357
358void
359lto_output_fn_decl_ref (struct lto_out_decl_state *decl_state,
360 struct lto_output_stream * obs, tree decl)
361{
362 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
363 streamer_write_uhwi_stream
364 (obs, lto_get_index (encoder: &decl_state->streams[LTO_DECL_STREAM], t: decl));
365}
366
367/* Return true if EXPR is a tree node that can be written to disk. */
368
369static inline bool
370lto_is_streamable (tree expr)
371{
372 enum tree_code code = TREE_CODE (expr);
373
374 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
375 name version in lto_output_tree_ref (see output_ssa_names). */
376 return !is_lang_specific (t: expr)
377 && code != SSA_NAME
378 && code != LANG_TYPE
379 && code != MODIFY_EXPR
380 && code != INIT_EXPR
381 && code != TARGET_EXPR
382 && code != BIND_EXPR
383 && code != WITH_CLEANUP_EXPR
384 && code != STATEMENT_LIST
385 && (code == CASE_LABEL_EXPR
386 || code == DECL_EXPR
387 || TREE_CODE_CLASS (code) != tcc_statement);
388}
389
390/* Very rough estimate of streaming size of the initializer. If we ignored
391 presence of strings, we could simply just count number of non-indexable
392 tree nodes and number of references to indexable nodes. Strings however
393 may be very large and we do not want to dump them int othe global stream.
394
395 Count the size of initializer until the size in DATA is positive. */
396
397static tree
398subtract_estimated_size (tree *tp, int *ws, void *data)
399{
400 long *sum = (long *)data;
401 if (tree_is_indexable (t: *tp))
402 {
403 /* Indexable tree is one reference to global stream.
404 Guess it may be about 4 bytes. */
405 *sum -= 4;
406 *ws = 0;
407 }
408 /* String table entry + base of tree node needs to be streamed. */
409 if (TREE_CODE (*tp) == STRING_CST)
410 *sum -= TREE_STRING_LENGTH (*tp) + 8;
411 else
412 {
413 /* Identifiers are also variable length but should not appear
414 naked in constructor. */
415 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
416 /* We do not really make attempt to work out size of pickled tree, as
417 it is very variable. Make it bigger than the reference. */
418 *sum -= 16;
419 }
420 if (*sum < 0)
421 return *tp;
422 return NULL_TREE;
423}
424
425
426/* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
427
428static tree
429get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
430{
431 gcc_checking_assert (DECL_P (expr)
432 && TREE_CODE (expr) != FUNCTION_DECL
433 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
434
435 /* Handle DECL_INITIAL for symbols. */
436 tree initial = DECL_INITIAL (expr);
437 if (VAR_P (expr)
438 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
439 && !DECL_IN_CONSTANT_POOL (expr)
440 && initial)
441 {
442 varpool_node *vnode;
443 /* Extra section needs about 30 bytes; do not produce it for simple
444 scalar values. */
445 if (!(vnode = varpool_node::get (decl: expr))
446 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
447 initial = error_mark_node;
448 if (initial != error_mark_node)
449 {
450 long max_size = 30;
451 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
452 NULL))
453 initial = error_mark_node;
454 }
455 }
456
457 return initial;
458}
459
460
461/* Output reference to tree T to the stream.
462 Assume that T is already in encoder cache.
463 This is used to stream tree bodies where we know the DFS walk arranged
464 everything to cache. Must be matched with stream_read_tree_ref. */
465
466void
467stream_write_tree_ref (struct output_block *ob, tree t)
468{
469 if (!t)
470 streamer_write_zero (ob);
471 else
472 {
473 unsigned int ix;
474 bool existed_p = streamer_tree_cache_lookup (ob->writer_cache, t, &ix);
475 if (existed_p)
476 streamer_write_hwi (ob, ix + 1);
477 else
478 {
479 enum LTO_tags tag;
480 unsigned ix;
481 int id = 0;
482
483 lto_indexable_tree_ref (ob, expr: t, tag: &tag, index: &ix);
484 if (tag == LTO_ssa_name_ref)
485 id = 1;
486 else
487 gcc_checking_assert (tag == LTO_global_stream_ref);
488 streamer_write_hwi (ob, -(int)(ix * 2 + id + 1));
489 }
490 if (streamer_debugging)
491 streamer_write_uhwi (ob, TREE_CODE (t));
492 }
493}
494
495
496
497/* Write a physical representation of tree node EXPR to output block
498 OB. If REF_P is true, the leaves of EXPR are emitted as references
499 via lto_output_tree_ref. IX is the index into the streamer cache
500 where EXPR is stored. */
501
502static void
503lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
504{
505 if (streamer_dump_file)
506 {
507 print_node_brief (streamer_dump_file, " Streaming body of ",
508 expr, 4);
509 fprintf (stream: streamer_dump_file, format: " to %s\n",
510 lto_section_name[ob->section_type]);
511 }
512
513 /* Pack all the non-pointer fields in EXPR into a bitpack and write
514 the resulting bitpack. */
515 streamer_write_tree_bitfields (ob, expr);
516
517 /* Write all the pointer fields in EXPR. */
518 streamer_write_tree_body (ob, expr);
519
520 /* Write any LTO-specific data to OB. */
521 if (DECL_P (expr)
522 && TREE_CODE (expr) != FUNCTION_DECL
523 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
524 {
525 /* Handle DECL_INITIAL for symbols. */
526 tree initial = get_symbol_initial_value
527 (encoder: ob->decl_state->symtab_node_encoder, expr);
528 stream_write_tree (ob, initial, ref_p);
529 }
530
531 /* Stream references to early generated DIEs. Keep in sync with the
532 trees handled in dwarf2out_die_ref_for_decl. */
533 if ((DECL_P (expr)
534 && TREE_CODE (expr) != FIELD_DECL
535 && TREE_CODE (expr) != DEBUG_EXPR_DECL
536 && TREE_CODE (expr) != TYPE_DECL)
537 || TREE_CODE (expr) == BLOCK)
538 {
539 const char *sym;
540 unsigned HOST_WIDE_INT off;
541 if (debug_info_level > DINFO_LEVEL_NONE
542 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
543 {
544 streamer_write_string (ob, ob->main_stream, sym, true);
545 streamer_write_uhwi (ob, off);
546 }
547 else
548 streamer_write_string (ob, ob->main_stream, NULL, true);
549 }
550}
551
552/* Write a physical representation of tree node EXPR to output block
553 OB. If REF_P is true, the leaves of EXPR are emitted as references
554 via lto_output_tree_ref. IX is the index into the streamer cache
555 where EXPR is stored. */
556
557static void
558lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
559{
560 if (!lto_is_streamable (expr))
561 internal_error ("tree code %qs is not supported in LTO streams",
562 get_tree_code_name (TREE_CODE (expr)));
563
564 /* Write the header, containing everything needed to materialize
565 EXPR on the reading side. */
566 streamer_write_tree_header (ob, expr);
567
568 lto_write_tree_1 (ob, expr, ref_p);
569}
570
571/* Emit the physical representation of tree node EXPR to output block OB,
572 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
573 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
574
575static void
576lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
577 bool ref_p, bool this_ref_p)
578{
579 unsigned ix;
580
581 gcc_checking_assert (expr != NULL_TREE
582 && !(this_ref_p && tree_is_indexable (expr)));
583
584 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
585 expr, hash, &ix);
586 gcc_assert (!exists_p);
587 if (TREE_CODE (expr) == INTEGER_CST
588 && !TREE_OVERFLOW (expr))
589 {
590 /* Shared INTEGER_CST nodes are special because they need their
591 original type to be materialized by the reader (to implement
592 TYPE_CACHED_VALUES). */
593 streamer_write_integer_cst (ob, expr);
594 }
595 else
596 {
597 /* This is the first time we see EXPR, write its fields
598 to OB. */
599 lto_write_tree (ob, expr, ref_p);
600 }
601}
602
603class DFS
604{
605public:
606 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
607 bool single_p);
608 ~DFS ();
609
610 struct scc_entry
611 {
612 tree t;
613 hashval_t hash;
614 };
615 auto_vec<scc_entry,32> sccstack;
616
617private:
618 struct sccs
619 {
620 unsigned int dfsnum;
621 unsigned int low;
622 };
623 struct worklist
624 {
625 tree expr;
626 sccs *from_state;
627 sccs *cstate;
628 bool ref_p;
629 bool this_ref_p;
630 };
631 /* Maximum index of scc stack containing a local tree. */
632 int max_local_entry;
633
634 static int scc_entry_compare (const void *, const void *);
635
636 void DFS_write_tree_body (struct output_block *ob,
637 tree expr, sccs *expr_state, bool ref_p);
638
639 void DFS_write_tree (struct output_block *ob, sccs *from_state,
640 tree expr, bool ref_p, bool this_ref_p);
641
642 hashval_t
643 hash_scc (struct output_block *ob, unsigned first, unsigned size,
644 bool ref_p, bool this_ref_p);
645
646 hash_map<tree, sccs *> sccstate;
647 auto_vec<worklist, 32> worklist_vec;
648 struct obstack sccstate_obstack;
649};
650
651/* Return true if type can not be merged with structurally same tree in
652 other translation unit. During stream out this information is propagated
653 to all trees referring to T and they are not streamed with additional
654 information needed by the tree merging in lto-common.cc (in particular,
655 scc hash codes are not streamed).
656
657 TRANSLATION_UNIT_DECL is handled specially since references to it does
658 not make other trees local as well. */
659
660static bool
661local_tree_p (tree t)
662{
663 switch (TREE_CODE (t))
664 {
665 case LABEL_DECL:
666 return true;
667 case NAMESPACE_DECL:
668 return !DECL_NAME (t);
669 case VAR_DECL:
670 case FUNCTION_DECL:
671 return !TREE_PUBLIC (t) && !DECL_EXTERNAL (t);
672 case RECORD_TYPE:
673 case UNION_TYPE:
674 case ENUMERAL_TYPE:
675 /* Anonymous namespace types are local.
676 Only work hard for main variants;
677 variant types will inherit locality. */
678 return TYPE_MAIN_VARIANT (t) == t
679 && odr_type_p (t) && type_with_linkage_p (t)
680 && type_in_anonymous_namespace_p (t);
681 default:
682 return false;
683 }
684}
685
686/* Emit the physical representation of tree node EXPR to output block OB,
687 using depth-first search on the subgraph. If THIS_REF_P is true, the
688 leaves of EXPR are emitted as references via lto_output_tree_ref.
689 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
690 this is for a rewalk of a single leaf SCC. */
691
692DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
693 bool single_p)
694{
695 unsigned int next_dfs_num = 1;
696
697 max_local_entry = -1;
698 gcc_obstack_init (&sccstate_obstack);
699 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
700 while (!worklist_vec.is_empty ())
701 {
702 worklist &w = worklist_vec.last ();
703 expr = w.expr;
704 sccs *from_state = w.from_state;
705 sccs *cstate = w.cstate;
706 ref_p = w.ref_p;
707 this_ref_p = w.this_ref_p;
708 if (cstate == NULL)
709 {
710 sccs **slot = &sccstate.get_or_insert (k: expr);
711 cstate = *slot;
712 if (cstate)
713 {
714 gcc_checking_assert (from_state);
715 if (cstate->dfsnum < from_state->dfsnum)
716 from_state->low = MIN (cstate->dfsnum, from_state->low);
717 worklist_vec.pop ();
718 continue;
719 }
720
721 scc_entry e = { .t: expr, .hash: 0 };
722 /* Not yet visited. DFS recurse and push it onto the stack. */
723 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
724 if (ob->local_trees && local_tree_p (t: expr))
725 max_local_entry = sccstack.length ();
726 sccstack.safe_push (obj: e);
727 cstate->dfsnum = next_dfs_num++;
728 cstate->low = cstate->dfsnum;
729 w.cstate = cstate;
730
731 if (TREE_CODE (expr) == INTEGER_CST
732 && !TREE_OVERFLOW (expr))
733 DFS_write_tree (ob, from_state: cstate, TREE_TYPE (expr), ref_p, this_ref_p: ref_p);
734 else
735 {
736 DFS_write_tree_body (ob, expr, expr_state: cstate, ref_p);
737
738 /* Walk any LTO-specific edges. */
739 if (DECL_P (expr)
740 && TREE_CODE (expr) != FUNCTION_DECL
741 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
742 {
743 /* Handle DECL_INITIAL for symbols. */
744 tree initial
745 = get_symbol_initial_value (encoder: ob->decl_state->symtab_node_encoder,
746 expr);
747 DFS_write_tree (ob, from_state: cstate, expr: initial, ref_p, this_ref_p: ref_p);
748 }
749 }
750 continue;
751 }
752
753 /* See if we found an SCC. */
754 if (cstate->low == cstate->dfsnum)
755 {
756 unsigned first, size;
757 tree x;
758
759 /* If we are re-walking a single leaf SCC just pop it,
760 let earlier worklist item access the sccstack. */
761 if (single_p)
762 {
763 worklist_vec.pop ();
764 continue;
765 }
766
767 /* Pop the SCC and compute its size. */
768 first = sccstack.length ();
769 do
770 {
771 x = sccstack[--first].t;
772 }
773 while (x != expr);
774 size = sccstack.length () - first;
775
776 /* No need to compute hashes for LTRANS units, we don't perform
777 any merging there. */
778 hashval_t scc_hash = 0;
779 unsigned scc_entry_len = 0;
780 bool local_to_unit = !ob->local_trees
781 || max_local_entry >= (int)first;
782
783 /* Remember that trees are local so info gets propagated to other
784 SCCs. */
785 if (local_to_unit && ob->local_trees)
786 {
787 for (unsigned i = 0; i < size; ++i)
788 ob->local_trees->add (k: sccstack[first + i].t);
789 }
790
791 /* As a special case do not stream TRANSLATION_UNIT_DECL as shared
792 tree. We can not mark it local because references to it does not
793 make other trees local (all global decls reffer to it via
794 CONTEXT). */
795 if (size == 1
796 && TREE_CODE (sccstack[first].t) == TRANSLATION_UNIT_DECL)
797 local_to_unit = true;
798
799 if (!local_to_unit)
800 {
801 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
802
803 /* Put the entries with the least number of collisions first. */
804 unsigned entry_start = 0;
805 scc_entry_len = size + 1;
806 for (unsigned i = 0; i < size;)
807 {
808 unsigned from = i;
809 for (i = i + 1; i < size
810 && (sccstack[first + i].hash
811 == sccstack[first + from].hash); ++i)
812 ;
813 if (i - from < scc_entry_len)
814 {
815 scc_entry_len = i - from;
816 entry_start = from;
817 }
818 }
819 for (unsigned i = 0; i < scc_entry_len; ++i)
820 std::swap (a&: sccstack[first + i],
821 b&: sccstack[first + entry_start + i]);
822
823 /* We already sorted SCC deterministically in hash_scc. */
824
825 /* Check that we have only one SCC.
826 Naturally we may have conflicts if hash function is not
827 strong enough. Lets see how far this gets. */
828 gcc_checking_assert (scc_entry_len == 1);
829 }
830
831 worklist_vec.pop ();
832
833 unsigned int prev_size = ob->main_stream->total_size;
834
835 /* Only global decl sections are considered by tree merging. */
836 if (ob->section_type != LTO_section_decls)
837 {
838 /* If this is the original tree we stream and it forms SCC
839 by itself then we do not need to stream SCC at all. */
840 if (worklist_vec.is_empty () && first == 0 && size == 1)
841 return;
842 if (streamer_dump_file)
843 {
844 fprintf (stream: streamer_dump_file,
845 format: " Start of LTO_trees of size %i\n", size);
846 }
847 streamer_write_record_start (ob, tag: LTO_trees);
848 streamer_write_uhwi (ob, size);
849 }
850 /* Write LTO_tree_scc if tree merging is going to be performed. */
851 else if (!local_to_unit
852 /* These are special since sharing is not done by tree
853 merging machinery. We can not special case them earlier
854 because we still need to compute hash for further sharing
855 of trees referring to them. */
856 && (size != 1
857 || (TREE_CODE (sccstack[first].t) != IDENTIFIER_NODE
858 && (TREE_CODE (sccstack[first].t) != INTEGER_CST
859 || TREE_OVERFLOW (sccstack[first].t)))))
860
861 {
862 gcc_checking_assert (ob->section_type == LTO_section_decls);
863 if (streamer_dump_file)
864 {
865 fprintf (stream: streamer_dump_file,
866 format: " Start of LTO_tree_scc of size %i\n", size);
867 }
868 streamer_write_record_start (ob, tag: LTO_tree_scc);
869 /* In wast majority of cases scc_entry_len is 1 and size is small
870 integer. Use extra bit of size to stream info about
871 exceptions. */
872 streamer_write_uhwi (ob, size * 2 + (scc_entry_len != 1));
873 if (scc_entry_len != 1)
874 streamer_write_uhwi (ob, scc_entry_len);
875 streamer_write_uhwi (ob, scc_hash);
876 }
877 /* Non-trivial SCCs must be packed to trees blocks so forward
878 references work correctly. */
879 else if (size != 1)
880 {
881 if (streamer_dump_file)
882 {
883 fprintf (stream: streamer_dump_file,
884 format: " Start of LTO_trees of size %i\n", size);
885 }
886 streamer_write_record_start (ob, tag: LTO_trees);
887 streamer_write_uhwi (ob, size);
888 }
889 else if (streamer_dump_file)
890 {
891 fprintf (stream: streamer_dump_file, format: " Streaming single tree\n");
892 }
893
894 /* Write size-1 SCCs without wrapping them inside SCC bundles.
895 All INTEGER_CSTs need to be handled this way as we need
896 their type to materialize them. Also builtins are handled
897 this way. */
898 if (size == 1)
899 lto_output_tree_1 (ob, expr, hash: scc_hash, ref_p, this_ref_p);
900 else
901 {
902
903 /* Write all headers and populate the streamer cache. */
904 for (unsigned i = 0; i < size; ++i)
905 {
906 hashval_t hash = sccstack[first+i].hash;
907 tree t = sccstack[first+i].t;
908 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
909 t, hash, NULL);
910 gcc_assert (!exists_p);
911
912 if (!lto_is_streamable (expr: t))
913 internal_error ("tree code %qs is not supported "
914 "in LTO streams",
915 get_tree_code_name (TREE_CODE (t)));
916
917 /* Write the header, containing everything needed to
918 materialize EXPR on the reading side. */
919 streamer_write_tree_header (ob, t);
920 }
921
922 /* Write the bitpacks and tree references. */
923 for (unsigned i = 0; i < size; ++i)
924 lto_write_tree_1 (ob, expr: sccstack[first+i].t, ref_p);
925 }
926 if (streamer_dump_file)
927 fprintf (stream: streamer_dump_file, format: " %u bytes\n",
928 ob->main_stream->total_size - prev_size);
929
930 /* Finally truncate the vector. */
931 sccstack.truncate (size: first);
932 if ((int)first <= max_local_entry)
933 max_local_entry = first - 1;
934
935 if (from_state)
936 from_state->low = MIN (from_state->low, cstate->low);
937 continue;
938 }
939
940 gcc_checking_assert (from_state);
941 from_state->low = MIN (from_state->low, cstate->low);
942 if (cstate->dfsnum < from_state->dfsnum)
943 from_state->low = MIN (cstate->dfsnum, from_state->low);
944 worklist_vec.pop ();
945 }
946}
947
948DFS::~DFS ()
949{
950 obstack_free (&sccstate_obstack, NULL);
951}
952
953/* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
954 DFS recurse for all tree edges originating from it. */
955
956void
957DFS::DFS_write_tree_body (struct output_block *ob,
958 tree expr, sccs *expr_state, bool ref_p)
959{
960#define DFS_follow_tree_edge(DEST) \
961 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
962
963 enum tree_code code;
964
965 code = TREE_CODE (expr);
966
967 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
968 {
969 if (TREE_CODE (expr) != IDENTIFIER_NODE)
970 DFS_follow_tree_edge (TREE_TYPE (expr));
971 }
972
973 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
974 {
975 unsigned int count = vector_cst_encoded_nelts (t: expr);
976 for (unsigned int i = 0; i < count; ++i)
977 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
978 }
979
980 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
981 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
982 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
983
984 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
985 {
986 DFS_follow_tree_edge (TREE_REALPART (expr));
987 DFS_follow_tree_edge (TREE_IMAGPART (expr));
988 }
989
990 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
991 {
992 /* Drop names that were created for anonymous entities. */
993 if (DECL_NAME (expr)
994 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
995 && IDENTIFIER_ANON_P (DECL_NAME (expr)))
996 ;
997 else
998 DFS_follow_tree_edge (DECL_NAME (expr));
999 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
1000 && ! DECL_CONTEXT (expr))
1001 DFS_follow_tree_edge ((*all_translation_units)[0]);
1002 else
1003 DFS_follow_tree_edge (DECL_CONTEXT (expr));
1004 }
1005
1006 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1007 {
1008 DFS_follow_tree_edge (DECL_SIZE (expr));
1009 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
1010
1011 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
1012 special handling in LTO, it must be handled by streamer hooks. */
1013
1014 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
1015
1016 /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
1017 declarations which should be eliminated by decl merging. Be sure none
1018 leaks to this point. */
1019 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
1020 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
1021
1022 if ((VAR_P (expr)
1023 || TREE_CODE (expr) == PARM_DECL)
1024 && DECL_HAS_VALUE_EXPR_P (expr))
1025 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
1026 if (VAR_P (expr)
1027 && DECL_HAS_DEBUG_EXPR_P (expr))
1028 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
1029 }
1030
1031 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1032 {
1033 /* Make sure we don't inadvertently set the assembler name. */
1034 if (DECL_ASSEMBLER_NAME_SET_P (expr))
1035 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
1036 }
1037
1038 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1039 {
1040 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
1041 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
1042 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
1043 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
1044 gcc_checking_assert (!DECL_FCONTEXT (expr));
1045 }
1046
1047 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1048 {
1049 gcc_checking_assert (DECL_VINDEX (expr) == NULL);
1050 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
1051 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
1052 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
1053 }
1054
1055 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1056 {
1057 DFS_follow_tree_edge (TYPE_SIZE (expr));
1058 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
1059 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
1060 DFS_follow_tree_edge (TYPE_NAME (expr));
1061 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1062 reconstructed during fixup. */
1063 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
1064 during fixup. */
1065 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
1066 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
1067 /* TYPE_CANONICAL is re-computed during type merging, so no need
1068 to follow it here. */
1069 /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
1070 it cannot be freed by free_lang_data without triggering ICEs in
1071 langhooks. */
1072 }
1073
1074 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1075 {
1076 if (TREE_CODE (expr) == ARRAY_TYPE)
1077 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
1078 else if (RECORD_OR_UNION_TYPE_P (expr))
1079 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
1080 DFS_follow_tree_edge (t);
1081 else if (FUNC_OR_METHOD_TYPE_P (expr))
1082 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
1083
1084 if (!POINTER_TYPE_P (expr))
1085 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
1086 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
1087 }
1088
1089 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1090 {
1091 DFS_follow_tree_edge (TREE_PURPOSE (expr));
1092 DFS_follow_tree_edge (TREE_VALUE (expr));
1093 DFS_follow_tree_edge (TREE_CHAIN (expr));
1094 }
1095
1096 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1097 {
1098 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1099 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
1100 }
1101
1102 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1103 {
1104 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
1105 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
1106 DFS_follow_tree_edge (TREE_BLOCK (expr));
1107 }
1108
1109 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1110 {
1111 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
1112 {
1113 /* We would have to stream externals in the block chain as
1114 non-references but we should have dropped them in
1115 free-lang-data. */
1116 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
1117 DFS_follow_tree_edge (t);
1118 }
1119
1120 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
1121 DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
1122
1123 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
1124 information for early inlined BLOCKs so drop it on the floor instead
1125 of ICEing in dwarf2out.cc. */
1126
1127 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
1128 streaming time. */
1129
1130 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
1131 list is re-constructed from BLOCK_SUPERCONTEXT. */
1132 }
1133
1134 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1135 {
1136 unsigned i;
1137 tree t;
1138
1139 /* Note that the number of BINFO slots has already been emitted in
1140 EXPR's header (see streamer_write_tree_header) because this length
1141 is needed to build the empty BINFO node on the reader side. */
1142 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
1143 DFS_follow_tree_edge (t);
1144 DFS_follow_tree_edge (BINFO_OFFSET (expr));
1145 DFS_follow_tree_edge (BINFO_VTABLE (expr));
1146
1147 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
1148 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1149 by C++ FE only. */
1150 }
1151
1152 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1153 {
1154 unsigned i;
1155 tree index, value;
1156
1157 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
1158 {
1159 DFS_follow_tree_edge (index);
1160 DFS_follow_tree_edge (value);
1161 }
1162 }
1163
1164 if (code == OMP_CLAUSE)
1165 {
1166 int i;
1167 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
1168 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
1169 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
1170 }
1171
1172#undef DFS_follow_tree_edge
1173}
1174
1175/* Return a hash value for the tree T.
1176 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
1177 may hold hash values if trees inside current SCC. */
1178
1179static hashval_t
1180hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
1181{
1182 inchash::hash hstate;
1183
1184#define visit(SIBLING) \
1185 do { \
1186 unsigned ix; \
1187 if (!SIBLING) \
1188 hstate.add_int (0); \
1189 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
1190 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
1191 else if (map) \
1192 hstate.add_int (*map->get (SIBLING)); \
1193 else \
1194 hstate.add_int (1); \
1195 } while (0)
1196
1197 /* Hash TS_BASE. */
1198 enum tree_code code = TREE_CODE (t);
1199 hstate.add_int (v: code);
1200 if (!TYPE_P (t))
1201 {
1202 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1203 hstate.add_flag (TREE_CONSTANT (t));
1204 hstate.add_flag (TREE_READONLY (t));
1205 hstate.add_flag (TREE_PUBLIC (t));
1206 }
1207 hstate.add_flag (TREE_ADDRESSABLE (t));
1208 hstate.add_flag (TREE_THIS_VOLATILE (t));
1209 if (DECL_P (t))
1210 hstate.add_flag (DECL_UNSIGNED (t));
1211 else if (TYPE_P (t))
1212 hstate.add_flag (TYPE_UNSIGNED (t));
1213 if (TYPE_P (t))
1214 hstate.add_flag (TYPE_ARTIFICIAL (t));
1215 else
1216 hstate.add_flag (TREE_NO_WARNING (t));
1217 hstate.add_flag (TREE_NOTHROW (t));
1218 hstate.add_flag (TREE_STATIC (t));
1219 hstate.add_flag (TREE_PROTECTED (t));
1220 hstate.add_flag (TREE_DEPRECATED (t));
1221 if (code != TREE_BINFO)
1222 hstate.add_flag (TREE_PRIVATE (t));
1223 if (TYPE_P (t))
1224 {
1225 hstate.add_flag (AGGREGATE_TYPE_P (t)
1226 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1227 hstate.add_flag (TYPE_ADDR_SPACE (t));
1228 }
1229 else if (code == SSA_NAME)
1230 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1231 hstate.commit_flag ();
1232
1233 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1234 hstate.add_wide_int (x: wi::to_widest (t));
1235
1236 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1237 {
1238 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1239 hstate.add_flag (flag: r.cl);
1240 hstate.add_flag (flag: r.sign);
1241 hstate.add_flag (flag: r.signalling);
1242 hstate.add_flag (flag: r.canonical);
1243 hstate.commit_flag ();
1244 hstate.add_int (v: r.uexp);
1245 hstate.add (data: r.sig, len: sizeof (r.sig));
1246 }
1247
1248 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1249 {
1250 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1251 hstate.add_int (v: f.mode);
1252 hstate.add_int (v: f.data.low);
1253 hstate.add_int (v: f.data.high);
1254 }
1255
1256 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1257 {
1258 hstate.add_hwi (DECL_MODE (t));
1259 hstate.add_flag (DECL_NONLOCAL (t));
1260 hstate.add_flag (DECL_VIRTUAL_P (t));
1261 hstate.add_flag (DECL_IGNORED_P (t));
1262 hstate.add_flag (DECL_ABSTRACT_P (t));
1263 hstate.add_flag (DECL_ARTIFICIAL (t));
1264 hstate.add_flag (DECL_USER_ALIGN (t));
1265 hstate.add_flag (DECL_PRESERVE_P (t));
1266 hstate.add_flag (DECL_EXTERNAL (t));
1267 hstate.add_flag (DECL_NOT_GIMPLE_REG_P (t));
1268 hstate.commit_flag ();
1269 hstate.add_int (DECL_ALIGN (t));
1270 if (code == LABEL_DECL)
1271 {
1272 hstate.add_int (EH_LANDING_PAD_NR (t));
1273 hstate.add_int (LABEL_DECL_UID (t));
1274 }
1275 else if (code == FIELD_DECL)
1276 {
1277 hstate.add_flag (DECL_PACKED (t));
1278 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1279 hstate.add_flag (DECL_PADDING_P (t));
1280 if (DECL_BIT_FIELD (t))
1281 hstate.add_flag (DECL_FIELD_CXX_ZERO_WIDTH_BIT_FIELD (t));
1282 else
1283 hstate.add_flag (DECL_FIELD_ABI_IGNORED (t));
1284 hstate.add_int (DECL_OFFSET_ALIGN (t));
1285 }
1286 else if (code == VAR_DECL)
1287 {
1288 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1289 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1290 }
1291 if (code == RESULT_DECL
1292 || code == PARM_DECL
1293 || code == VAR_DECL)
1294 {
1295 hstate.add_flag (DECL_BY_REFERENCE (t));
1296 if (code == VAR_DECL
1297 || code == PARM_DECL)
1298 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1299 }
1300 hstate.commit_flag ();
1301 }
1302
1303 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1304 hstate.add_int (DECL_REGISTER (t));
1305
1306 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1307 {
1308 hstate.add_flag (DECL_COMMON (t));
1309 hstate.add_flag (DECL_DLLIMPORT_P (t));
1310 hstate.add_flag (DECL_WEAK (t));
1311 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1312 hstate.add_flag (DECL_COMDAT (t));
1313 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1314 hstate.add_int (DECL_VISIBILITY (t));
1315 if (code == VAR_DECL)
1316 {
1317 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1318 hstate.add_flag (DECL_HARD_REGISTER (t));
1319 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1320 }
1321 if (TREE_CODE (t) == FUNCTION_DECL)
1322 {
1323 hstate.add_flag (DECL_FINAL_P (t));
1324 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1325 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1326 }
1327 hstate.commit_flag ();
1328 }
1329
1330 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1331 {
1332 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1333 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1334 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1335 hstate.add_flag (FUNCTION_DECL_DECL_TYPE (t));
1336 hstate.add_flag (DECL_UNINLINABLE (t));
1337 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1338 hstate.add_flag (DECL_IS_NOVOPS (t));
1339 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1340 hstate.add_flag (DECL_IS_MALLOC (t));
1341 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1342 hstate.add_flag (DECL_STATIC_CHAIN (t));
1343 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1344 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1345 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1346 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1347 hstate.add_flag (DECL_PURE_P (t));
1348 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1349 hstate.commit_flag ();
1350 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1351 hstate.add_int (DECL_UNCHECKED_FUNCTION_CODE (t));
1352 }
1353
1354 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1355 {
1356 hstate.add_hwi (TYPE_MODE (t));
1357 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1358 no streaming. */
1359 hstate.add_flag (TYPE_PACKED (t));
1360 hstate.add_flag (TYPE_RESTRICT (t));
1361 hstate.add_flag (TYPE_USER_ALIGN (t));
1362 hstate.add_flag (TYPE_READONLY (t));
1363 if (RECORD_OR_UNION_TYPE_P (t))
1364 {
1365 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1366 hstate.add_flag (TYPE_FINAL_P (t));
1367 hstate.add_flag (TYPE_CXX_ODR_P (t));
1368 }
1369 else if (code == ARRAY_TYPE)
1370 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1371 if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1372 hstate.add_flag (TYPE_STRING_FLAG (t));
1373 if (AGGREGATE_TYPE_P (t))
1374 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1375 hstate.commit_flag ();
1376 hstate.add_int (TYPE_PRECISION_RAW (t));
1377 hstate.add_int (TYPE_ALIGN (t));
1378 hstate.add_int (TYPE_EMPTY_P (t));
1379 }
1380
1381 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1382 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1383 len: strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1384
1385 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1386 /* We don't stream these when passing things to a different target. */
1387 && !lto_stream_offload_p)
1388 hstate.add_hwi (v: cl_target_option_hash (TREE_TARGET_OPTION (t)));
1389
1390 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1391 hstate.add_hwi (v: cl_optimization_hash (TREE_OPTIMIZATION (t)));
1392
1393 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1394 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1395
1396 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1397 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1398
1399 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1400 {
1401 if (code != IDENTIFIER_NODE)
1402 visit (TREE_TYPE (t));
1403 }
1404
1405 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1406 {
1407 unsigned int count = vector_cst_encoded_nelts (t);
1408 for (unsigned int i = 0; i < count; ++i)
1409 visit (VECTOR_CST_ENCODED_ELT (t, i));
1410 }
1411
1412 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1413 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1414 visit (POLY_INT_CST_COEFF (t, i));
1415
1416 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1417 {
1418 visit (TREE_REALPART (t));
1419 visit (TREE_IMAGPART (t));
1420 }
1421
1422 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1423 {
1424 /* Drop names that were created for anonymous entities. */
1425 if (DECL_NAME (t)
1426 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1427 && IDENTIFIER_ANON_P (DECL_NAME (t)))
1428 ;
1429 else
1430 visit (DECL_NAME (t));
1431 if (DECL_FILE_SCOPE_P (t))
1432 ;
1433 else
1434 visit (DECL_CONTEXT (t));
1435 }
1436
1437 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1438 {
1439 visit (DECL_SIZE (t));
1440 visit (DECL_SIZE_UNIT (t));
1441 visit (DECL_ATTRIBUTES (t));
1442 if ((code == VAR_DECL
1443 || code == PARM_DECL)
1444 && DECL_HAS_VALUE_EXPR_P (t))
1445 visit (DECL_VALUE_EXPR (t));
1446 if (code == VAR_DECL
1447 && DECL_HAS_DEBUG_EXPR_P (t))
1448 visit (DECL_DEBUG_EXPR (t));
1449 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1450 be able to call get_symbol_initial_value. */
1451 }
1452
1453 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1454 {
1455 if (DECL_ASSEMBLER_NAME_SET_P (t))
1456 visit (DECL_ASSEMBLER_NAME (t));
1457 }
1458
1459 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1460 {
1461 visit (DECL_FIELD_OFFSET (t));
1462 visit (DECL_BIT_FIELD_TYPE (t));
1463 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1464 visit (DECL_FIELD_BIT_OFFSET (t));
1465 }
1466
1467 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1468 {
1469 visit (DECL_FUNCTION_PERSONALITY (t));
1470 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1471 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1472 }
1473
1474 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1475 {
1476 visit (TYPE_SIZE (t));
1477 visit (TYPE_SIZE_UNIT (t));
1478 visit (TYPE_ATTRIBUTES (t));
1479 visit (TYPE_NAME (t));
1480 visit (TYPE_MAIN_VARIANT (t));
1481 if (TYPE_FILE_SCOPE_P (t))
1482 ;
1483 else
1484 visit (TYPE_CONTEXT (t));
1485 }
1486
1487 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1488 {
1489 if (code == ARRAY_TYPE)
1490 visit (TYPE_DOMAIN (t));
1491 else if (RECORD_OR_UNION_TYPE_P (t))
1492 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1493 visit (f);
1494 else if (code == FUNCTION_TYPE
1495 || code == METHOD_TYPE)
1496 visit (TYPE_ARG_TYPES (t));
1497 if (!POINTER_TYPE_P (t))
1498 visit (TYPE_MIN_VALUE_RAW (t));
1499 visit (TYPE_MAX_VALUE_RAW (t));
1500 }
1501
1502 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1503 {
1504 visit (TREE_PURPOSE (t));
1505 visit (TREE_VALUE (t));
1506 visit (TREE_CHAIN (t));
1507 }
1508
1509 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1510 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1511 visit (TREE_VEC_ELT (t, i));
1512
1513 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1514 {
1515 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1516 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1517 visit (TREE_OPERAND (t, i));
1518 }
1519
1520 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1521 {
1522 unsigned i;
1523 tree b;
1524 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1525 visit (b);
1526 visit (BINFO_OFFSET (t));
1527 visit (BINFO_VTABLE (t));
1528 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1529 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1530 by C++ FE only. */
1531 }
1532
1533 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1534 {
1535 unsigned i;
1536 tree index, value;
1537 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1538 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1539 {
1540 visit (index);
1541 visit (value);
1542 }
1543 }
1544
1545 if (code == OMP_CLAUSE)
1546 {
1547 int i;
1548 HOST_WIDE_INT val;
1549
1550 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1551 switch (OMP_CLAUSE_CODE (t))
1552 {
1553 case OMP_CLAUSE_DEFAULT:
1554 val = OMP_CLAUSE_DEFAULT_KIND (t);
1555 break;
1556 case OMP_CLAUSE_SCHEDULE:
1557 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1558 break;
1559 case OMP_CLAUSE_DEPEND:
1560 val = OMP_CLAUSE_DEPEND_KIND (t);
1561 break;
1562 case OMP_CLAUSE_DOACROSS:
1563 val = OMP_CLAUSE_DOACROSS_KIND (t);
1564 break;
1565 case OMP_CLAUSE_MAP:
1566 val = OMP_CLAUSE_MAP_KIND (t);
1567 break;
1568 case OMP_CLAUSE_PROC_BIND:
1569 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1570 break;
1571 case OMP_CLAUSE_REDUCTION:
1572 case OMP_CLAUSE_TASK_REDUCTION:
1573 case OMP_CLAUSE_IN_REDUCTION:
1574 val = OMP_CLAUSE_REDUCTION_CODE (t);
1575 break;
1576 default:
1577 val = 0;
1578 break;
1579 }
1580 hstate.add_hwi (v: val);
1581 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1582 visit (OMP_CLAUSE_OPERAND (t, i));
1583 visit (OMP_CLAUSE_CHAIN (t));
1584 }
1585
1586 return hstate.end ();
1587
1588#undef visit
1589}
1590
1591/* Compare two SCC entries by their hash value for qsorting them. */
1592
1593int
1594DFS::scc_entry_compare (const void *p1_, const void *p2_)
1595{
1596 const scc_entry *p1 = (const scc_entry *) p1_;
1597 const scc_entry *p2 = (const scc_entry *) p2_;
1598 if (p1->hash < p2->hash)
1599 return -1;
1600 else if (p1->hash > p2->hash)
1601 return 1;
1602 return 0;
1603}
1604
1605/* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1606 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1607
1608hashval_t
1609DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1610 bool ref_p, bool this_ref_p)
1611{
1612 unsigned int last_classes = 0, iterations = 0;
1613
1614 /* Compute hash values for the SCC members. */
1615 for (unsigned i = 0; i < size; ++i)
1616 sccstack[first+i].hash
1617 = hash_tree (cache: ob->writer_cache, NULL, t: sccstack[first+i].t);
1618
1619 if (size == 1)
1620 return sccstack[first].hash;
1621
1622 /* We aim to get unique hash for every tree within SCC and compute hash value
1623 of the whole SCC by combining all values together in a stable (entry-point
1624 independent) order. This guarantees that the same SCC regions within
1625 different translation units will get the same hash values and therefore
1626 will be merged at WPA time.
1627
1628 Often the hashes are already unique. In that case we compute the SCC hash
1629 by combining individual hash values in an increasing order.
1630
1631 If there are duplicates, we seek at least one tree with unique hash (and
1632 pick one with minimal hash and this property). Then we obtain a stable
1633 order by DFS walk starting from this unique tree and then use the index
1634 within this order to make individual hash values unique.
1635
1636 If there is no tree with unique hash, we iteratively propagate the hash
1637 values across the internal edges of SCC. This usually quickly leads
1638 to unique hashes. Consider, for example, an SCC containing two pointers
1639 that are identical except for the types they point to and assume that
1640 these types are also part of the SCC. The propagation will add the
1641 points-to type information into their hash values. */
1642 do
1643 {
1644 /* Sort the SCC so we can easily check for uniqueness. */
1645 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1646
1647 unsigned int classes = 1;
1648 int firstunique = -1;
1649
1650 /* Find the tree with lowest unique hash (if it exists) and compute
1651 the number of equivalence classes. */
1652 if (sccstack[first].hash != sccstack[first+1].hash)
1653 firstunique = 0;
1654 for (unsigned i = 1; i < size; ++i)
1655 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1656 {
1657 classes++;
1658 if (firstunique == -1
1659 && (i == size - 1
1660 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1661 firstunique = i;
1662 }
1663
1664 /* If we found a tree with unique hash, stop the iteration. */
1665 if (firstunique != -1
1666 /* Also terminate if we run out of iterations or if the number of
1667 equivalence classes is no longer increasing.
1668 For example a cyclic list of trees that are all equivalent will
1669 never have unique entry point; we however do not build such SCCs
1670 in our IL. */
1671 || classes <= last_classes || iterations > 16)
1672 {
1673 hashval_t scc_hash;
1674
1675 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1676 starting from FIRSTUNIQUE to obtain a stable order. */
1677 if (classes != size && firstunique != -1)
1678 {
1679 hash_map <tree, hashval_t> map(size*2);
1680
1681 /* Store hash values into a map, so we can associate them with
1682 the reordered SCC. */
1683 for (unsigned i = 0; i < size; ++i)
1684 map.put (k: sccstack[first+i].t, v: sccstack[first+i].hash);
1685
1686 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1687 true);
1688 gcc_assert (again.sccstack.length () == size);
1689
1690 memcpy (dest: sccstack.address () + first,
1691 src: again.sccstack.address (),
1692 n: sizeof (scc_entry) * size);
1693
1694 /* Update hash values of individual members by hashing in the
1695 index within the stable order. This ensures uniqueness.
1696 Also compute the SCC hash by mixing in all hash values in
1697 the stable order we obtained. */
1698 sccstack[first].hash = *map.get (k: sccstack[first].t);
1699 scc_hash = sccstack[first].hash;
1700 for (unsigned i = 1; i < size; ++i)
1701 {
1702 sccstack[first+i].hash
1703 = iterative_hash_hashval_t (val: i,
1704 val2: *map.get (k: sccstack[first+i].t));
1705 scc_hash
1706 = iterative_hash_hashval_t (val: scc_hash,
1707 val2: sccstack[first+i].hash);
1708 }
1709 }
1710 /* If we got a unique hash value for each tree, then sort already
1711 ensured entry-point independent order. Only compute the final
1712 SCC hash.
1713
1714 If we failed to find the unique entry point, we go by the same
1715 route. We will eventually introduce unwanted hash conflicts. */
1716 else
1717 {
1718 scc_hash = sccstack[first].hash;
1719 for (unsigned i = 1; i < size; ++i)
1720 scc_hash
1721 = iterative_hash_hashval_t (val: scc_hash, val2: sccstack[first+i].hash);
1722
1723 /* We cannot 100% guarantee that the hash won't conflict so as
1724 to make it impossible to find a unique hash. This however
1725 should be an extremely rare case. ICE for now so possible
1726 issues are found and evaluated. */
1727 gcc_checking_assert (classes == size);
1728 }
1729
1730 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1731 hash into the hash of each element. */
1732 for (unsigned i = 0; i < size; ++i)
1733 sccstack[first+i].hash
1734 = iterative_hash_hashval_t (val: sccstack[first+i].hash, val2: scc_hash);
1735 return scc_hash;
1736 }
1737
1738 last_classes = classes;
1739 iterations++;
1740
1741 /* We failed to identify the entry point; propagate hash values across
1742 the edges. */
1743 hash_map <tree, hashval_t> map(size*2);
1744
1745 for (unsigned i = 0; i < size; ++i)
1746 map.put (k: sccstack[first+i].t, v: sccstack[first+i].hash);
1747
1748 for (unsigned i = 0; i < size; i++)
1749 sccstack[first+i].hash
1750 = hash_tree (cache: ob->writer_cache, map: &map, t: sccstack[first+i].t);
1751 }
1752 while (true);
1753}
1754
1755/* DFS walk EXPR and stream SCCs of tree bodies if they are not
1756 already in the streamer cache. Main routine called for
1757 each visit of EXPR. */
1758
1759void
1760DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1761 tree expr, bool ref_p, bool this_ref_p)
1762{
1763 /* Handle special cases. */
1764 if (expr == NULL_TREE)
1765 return;
1766
1767 /* Do not DFS walk into indexable trees. */
1768 if (this_ref_p && tree_is_indexable (t: expr))
1769 return;
1770
1771 /* Check if we already streamed EXPR. */
1772 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1773 {
1774 /* Reference to a local tree makes entry also local. We always process
1775 top of stack entry, so set max to number of entries in stack - 1. */
1776 if (ob->local_trees
1777 && ob->local_trees->contains (k: expr))
1778 max_local_entry = sccstack.length () - 1;
1779 return;
1780 }
1781
1782 worklist w;
1783 w.expr = expr;
1784 w.from_state = from_state;
1785 w.cstate = NULL;
1786 w.ref_p = ref_p;
1787 w.this_ref_p = this_ref_p;
1788 worklist_vec.safe_push (obj: w);
1789}
1790
1791
1792/* Emit the physical representation of tree node EXPR to output block OB.
1793 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1794 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1795
1796void
1797lto_output_tree (struct output_block *ob, tree expr,
1798 bool ref_p, bool this_ref_p)
1799{
1800 unsigned ix;
1801 bool existed_p;
1802 unsigned int size = ob->main_stream->total_size;
1803 /* This is the first time we see EXPR, write all reachable
1804 trees to OB. */
1805 static bool in_dfs_walk;
1806
1807 if (expr == NULL_TREE)
1808 {
1809 streamer_write_record_start (ob, tag: LTO_null);
1810 return;
1811 }
1812
1813 if (this_ref_p && tree_is_indexable (t: expr))
1814 {
1815 enum LTO_tags tag;
1816 unsigned ix;
1817
1818 lto_indexable_tree_ref (ob, expr, tag: &tag, index: &ix);
1819 streamer_write_record_start (ob, tag);
1820 streamer_write_uhwi (ob, ix);
1821 return;
1822 }
1823
1824 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1825 if (existed_p)
1826 {
1827 if (streamer_dump_file)
1828 {
1829 if (in_dfs_walk)
1830 print_node_brief (streamer_dump_file, " Streaming ref to ",
1831 expr, 4);
1832 else
1833 print_node_brief (streamer_dump_file, " Streaming ref to ",
1834 expr, 4);
1835 fprintf (stream: streamer_dump_file, format: "\n");
1836 }
1837 /* If a node has already been streamed out, make sure that
1838 we don't write it more than once. Otherwise, the reader
1839 will instantiate two different nodes for the same object. */
1840 streamer_write_record_start (ob, tag: LTO_tree_pickle_reference);
1841 streamer_write_uhwi (ob, ix);
1842 if (streamer_debugging)
1843 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1844 lto_tree_code_to_tag (TREE_CODE (expr)));
1845 lto_stats.num_pickle_refs_output++;
1846 }
1847 else
1848 {
1849 /* Protect against recursion which means disconnect between
1850 what tree edges we walk in the DFS walk and what edges
1851 we stream out. */
1852 gcc_assert (!in_dfs_walk);
1853
1854 if (streamer_dump_file)
1855 {
1856 print_node_brief (streamer_dump_file, " Streaming tree ",
1857 expr, 4);
1858 fprintf (stream: streamer_dump_file, format: "\n");
1859 }
1860
1861 /* Start the DFS walk. */
1862 /* Save ob state ... */
1863 /* let's see ... */
1864 in_dfs_walk = true;
1865 DFS (ob, expr, ref_p, this_ref_p, false);
1866
1867 /* Finally append a reference to the tree we were writing. */
1868 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1869
1870 /* DFS walk above possibly skipped streaming EXPR itself to let us inline
1871 it. */
1872 if (!existed_p)
1873 lto_output_tree_1 (ob, expr, hash: 0, ref_p, this_ref_p);
1874 else if (this_ref_p)
1875 {
1876 if (streamer_dump_file)
1877 {
1878 print_node_brief (streamer_dump_file,
1879 " Streaming final ref to ",
1880 expr, 4);
1881 fprintf (stream: streamer_dump_file, format: "\n");
1882 }
1883 streamer_write_record_start (ob, tag: LTO_tree_pickle_reference);
1884 streamer_write_uhwi (ob, ix);
1885 if (streamer_debugging)
1886 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1887 lto_tree_code_to_tag (TREE_CODE (expr)));
1888 }
1889 in_dfs_walk = false;
1890 lto_stats.num_pickle_refs_output++;
1891 }
1892 if (streamer_dump_file && !in_dfs_walk)
1893 fprintf (stream: streamer_dump_file, format: " %u bytes\n",
1894 ob->main_stream->total_size - size);
1895}
1896
1897
1898/* Output to OB a list of try/catch handlers starting with FIRST. */
1899
1900static void
1901output_eh_try_list (struct output_block *ob, eh_catch first)
1902{
1903 eh_catch n;
1904
1905 for (n = first; n; n = n->next_catch)
1906 {
1907 streamer_write_record_start (ob, tag: LTO_eh_catch);
1908 stream_write_tree (ob, n->type_list, true);
1909 stream_write_tree (ob, n->filter_list, true);
1910 stream_write_tree (ob, n->label, true);
1911 }
1912
1913 streamer_write_record_start (ob, tag: LTO_null);
1914}
1915
1916
1917/* Output EH region R in function FN to OB. CURR_RN is the slot index
1918 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1919 detect EH region sharing. */
1920
1921static void
1922output_eh_region (struct output_block *ob, eh_region r)
1923{
1924 enum LTO_tags tag;
1925
1926 if (r == NULL)
1927 {
1928 streamer_write_record_start (ob, tag: LTO_null);
1929 return;
1930 }
1931
1932 if (r->type == ERT_CLEANUP)
1933 tag = LTO_ert_cleanup;
1934 else if (r->type == ERT_TRY)
1935 tag = LTO_ert_try;
1936 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1937 tag = LTO_ert_allowed_exceptions;
1938 else if (r->type == ERT_MUST_NOT_THROW)
1939 tag = LTO_ert_must_not_throw;
1940 else
1941 gcc_unreachable ();
1942
1943 streamer_write_record_start (ob, tag);
1944 streamer_write_hwi (ob, r->index);
1945
1946 if (r->outer)
1947 streamer_write_hwi (ob, r->outer->index);
1948 else
1949 streamer_write_zero (ob);
1950
1951 if (r->inner)
1952 streamer_write_hwi (ob, r->inner->index);
1953 else
1954 streamer_write_zero (ob);
1955
1956 if (r->next_peer)
1957 streamer_write_hwi (ob, r->next_peer->index);
1958 else
1959 streamer_write_zero (ob);
1960
1961 if (r->type == ERT_TRY)
1962 {
1963 output_eh_try_list (ob, first: r->u.eh_try.first_catch);
1964 }
1965 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1966 {
1967 stream_write_tree (ob, r->u.allowed.type_list, true);
1968 stream_write_tree (ob, r->u.allowed.label, true);
1969 streamer_write_uhwi (ob, r->u.allowed.filter);
1970 }
1971 else if (r->type == ERT_MUST_NOT_THROW)
1972 {
1973 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1974 bitpack_d bp = bitpack_create (s: ob->main_stream);
1975 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1976 streamer_write_bitpack (bp: &bp);
1977 }
1978
1979 if (r->landing_pads)
1980 streamer_write_hwi (ob, r->landing_pads->index);
1981 else
1982 streamer_write_zero (ob);
1983}
1984
1985
1986/* Output landing pad LP to OB. */
1987
1988static void
1989output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1990{
1991 if (lp == NULL)
1992 {
1993 streamer_write_record_start (ob, tag: LTO_null);
1994 return;
1995 }
1996
1997 streamer_write_record_start (ob, tag: LTO_eh_landing_pad);
1998 streamer_write_hwi (ob, lp->index);
1999 if (lp->next_lp)
2000 streamer_write_hwi (ob, lp->next_lp->index);
2001 else
2002 streamer_write_zero (ob);
2003
2004 if (lp->region)
2005 streamer_write_hwi (ob, lp->region->index);
2006 else
2007 streamer_write_zero (ob);
2008
2009 stream_write_tree (ob, lp->post_landing_pad, true);
2010}
2011
2012
2013/* Output the existing eh_table to OB. */
2014
2015static void
2016output_eh_regions (struct output_block *ob, struct function *fn)
2017{
2018 if (fn->eh && fn->eh->region_tree)
2019 {
2020 unsigned i;
2021 eh_region eh;
2022 eh_landing_pad lp;
2023 tree ttype;
2024
2025 streamer_write_record_start (ob, tag: LTO_eh_table);
2026
2027 /* Emit the index of the root of the EH region tree. */
2028 streamer_write_hwi (ob, fn->eh->region_tree->index);
2029
2030 /* Emit all the EH regions in the region array. */
2031 streamer_write_hwi (ob, vec_safe_length (v: fn->eh->region_array));
2032 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
2033 output_eh_region (ob, r: eh);
2034
2035 /* Emit all landing pads. */
2036 streamer_write_hwi (ob, vec_safe_length (v: fn->eh->lp_array));
2037 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
2038 output_eh_lp (ob, lp);
2039
2040 /* Emit all the runtime type data. */
2041 streamer_write_hwi (ob, vec_safe_length (v: fn->eh->ttype_data));
2042 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
2043 stream_write_tree (ob, ttype, true);
2044
2045 /* Emit the table of action chains. */
2046 if (targetm.arm_eabi_unwinder)
2047 {
2048 tree t;
2049 streamer_write_hwi (ob, vec_safe_length (v: fn->eh->ehspec_data.arm_eabi));
2050 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
2051 stream_write_tree (ob, t, true);
2052 }
2053 else
2054 {
2055 uchar c;
2056 streamer_write_hwi (ob, vec_safe_length (v: fn->eh->ehspec_data.other));
2057 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
2058 streamer_write_char_stream (obs: ob->main_stream, c);
2059 }
2060 }
2061
2062 /* The LTO_null either terminates the record or indicates that there
2063 are no eh_records at all. */
2064 streamer_write_record_start (ob, tag: LTO_null);
2065}
2066
2067
2068/* Output all of the active ssa names to the ssa_names stream. */
2069
2070static void
2071output_ssa_names (struct output_block *ob, struct function *fn)
2072{
2073 unsigned int i, len;
2074
2075 len = vec_safe_length (SSANAMES (fn));
2076 streamer_write_uhwi (ob, len);
2077
2078 for (i = 1; i < len; i++)
2079 {
2080 tree ptr = (*SSANAMES (fn))[i];
2081
2082 if (ptr == NULL_TREE
2083 || SSA_NAME_IN_FREE_LIST (ptr)
2084 || virtual_operand_p (op: ptr)
2085 /* Simply skip unreleased SSA names. */
2086 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
2087 && (! SSA_NAME_DEF_STMT (ptr)
2088 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
2089 continue;
2090
2091 streamer_write_uhwi (ob, i);
2092 streamer_write_char_stream (obs: ob->main_stream,
2093 SSA_NAME_IS_DEFAULT_DEF (ptr));
2094 if (SSA_NAME_VAR (ptr))
2095 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
2096 else
2097 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
2098 stream_write_tree (ob, TREE_TYPE (ptr), true);
2099 }
2100
2101 streamer_write_zero (ob);
2102}
2103
2104
2105
2106/* Output the cfg. */
2107
2108static void
2109output_cfg (struct output_block *ob, struct function *fn)
2110{
2111 struct lto_output_stream *tmp_stream = ob->main_stream;
2112 basic_block bb;
2113
2114 ob->main_stream = ob->cfg_stream;
2115
2116 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
2117 profile_status_for_fn (fn));
2118
2119 /* Output the number of the highest basic block. */
2120 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
2121
2122 FOR_ALL_BB_FN (bb, fn)
2123 {
2124 edge_iterator ei;
2125 edge e;
2126
2127 streamer_write_hwi (ob, bb->index);
2128
2129 /* Output the successors and the edge flags. */
2130 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
2131 FOR_EACH_EDGE (e, ei, bb->succs)
2132 {
2133 bitpack_d bp = bitpack_create (s: ob->main_stream);
2134 bp_pack_var_len_unsigned (&bp, e->dest->index);
2135 bp_pack_var_len_unsigned (&bp, e->flags);
2136 stream_output_location_and_block (ob, &bp, e->goto_locus);
2137 e->probability.stream_out (ob);
2138 }
2139 }
2140
2141 streamer_write_hwi (ob, -1);
2142
2143 bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
2144 while (bb->next_bb)
2145 {
2146 streamer_write_hwi (ob, bb->next_bb->index);
2147 bb = bb->next_bb;
2148 }
2149
2150 streamer_write_hwi (ob, -1);
2151
2152 /* Output the number of loops. */
2153 streamer_write_uhwi (ob, number_of_loops (fn));
2154
2155 /* Output each loop, skipping the tree root which has number zero. */
2156 for (unsigned i = 1; i < number_of_loops (fn); ++i)
2157 {
2158 class loop *loop = get_loop (fn, num: i);
2159
2160 /* Write the index of the loop header. That's enough to rebuild
2161 the loop tree on the reader side. Stream -1 for an unused
2162 loop entry. */
2163 if (!loop)
2164 {
2165 streamer_write_hwi (ob, -1);
2166 continue;
2167 }
2168 else
2169 streamer_write_hwi (ob, loop->header->index);
2170
2171 /* Write everything copy_loop_info copies. */
2172 streamer_write_enum (ob->main_stream,
2173 loop_estimation, EST_LAST, loop->estimate_state);
2174 streamer_write_hwi (ob, loop->any_upper_bound);
2175 if (loop->any_upper_bound)
2176 {
2177 widest_int w = widest_int::from (x: loop->nb_iterations_upper_bound,
2178 sgn: SIGNED);
2179 streamer_write_widest_int (ob, w);
2180 }
2181 streamer_write_hwi (ob, loop->any_likely_upper_bound);
2182 if (loop->any_likely_upper_bound)
2183 {
2184 widest_int w
2185 = widest_int::from (x: loop->nb_iterations_likely_upper_bound,
2186 sgn: SIGNED);
2187 streamer_write_widest_int (ob, w);
2188 }
2189 streamer_write_hwi (ob, loop->any_estimate);
2190 if (loop->any_estimate)
2191 {
2192 widest_int w = widest_int::from (x: loop->nb_iterations_estimate,
2193 sgn: SIGNED);
2194 streamer_write_widest_int (ob, w);
2195 }
2196
2197 /* Write OMP SIMD related info. */
2198 streamer_write_hwi (ob, loop->safelen);
2199 streamer_write_hwi (ob, loop->unroll);
2200 streamer_write_hwi (ob, loop->owned_clique);
2201 streamer_write_hwi (ob, loop->dont_vectorize);
2202 streamer_write_hwi (ob, loop->force_vectorize);
2203 streamer_write_hwi (ob, loop->finite_p);
2204 stream_write_tree (ob, loop->simduid, true);
2205 }
2206
2207 ob->main_stream = tmp_stream;
2208}
2209
2210
2211/* Create the header in the file using OB. If the section type is for
2212 a function, set FN to the decl for that function. */
2213
2214void
2215produce_asm (struct output_block *ob, tree fn)
2216{
2217 enum lto_section_type section_type = ob->section_type;
2218 struct lto_function_header header;
2219 char *section_name;
2220
2221 if (section_type == LTO_section_function_body)
2222 {
2223 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
2224 section_name = lto_get_section_name (section_type, name,
2225 symtab_node::get (decl: fn)->order,
2226 NULL);
2227 }
2228 else
2229 section_name = lto_get_section_name (section_type, NULL, 0, NULL);
2230
2231 lto_begin_section (section_name, !flag_wpa);
2232 free (ptr: section_name);
2233
2234 /* The entire header is stream computed here. */
2235 memset (s: &header, c: 0, n: sizeof (struct lto_function_header));
2236
2237 if (section_type == LTO_section_function_body)
2238 header.cfg_size = ob->cfg_stream->total_size;
2239 header.main_size = ob->main_stream->total_size;
2240 header.string_size = ob->string_stream->total_size;
2241 lto_write_data (&header, sizeof header);
2242
2243 /* Put all of the gimple and the string table out the asm file as a
2244 block of text. */
2245 if (section_type == LTO_section_function_body)
2246 lto_write_stream (ob->cfg_stream);
2247 lto_write_stream (ob->main_stream);
2248 lto_write_stream (ob->string_stream);
2249
2250 lto_end_section ();
2251}
2252
2253
2254/* Output the base body of struct function FN using output block OB. */
2255
2256static void
2257output_struct_function_base (struct output_block *ob, struct function *fn)
2258{
2259 struct bitpack_d bp;
2260 unsigned i;
2261 tree t;
2262
2263 /* Output the static chain and non-local goto save area. */
2264 stream_write_tree (ob, fn->static_chain_decl, true);
2265 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2266
2267 /* Output all the local variables in the function. */
2268 streamer_write_hwi (ob, vec_safe_length (v: fn->local_decls));
2269 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2270 stream_write_tree (ob, t, true);
2271
2272 /* Output current IL state of the function. */
2273 streamer_write_uhwi (ob, fn->curr_properties);
2274
2275 /* Write all the attributes for FN. */
2276 bp = bitpack_create (s: ob->main_stream);
2277 bp_pack_value (bp: &bp, val: fn->is_thunk, nbits: 1);
2278 bp_pack_value (bp: &bp, val: fn->has_local_explicit_reg_vars, nbits: 1);
2279 bp_pack_value (bp: &bp, val: fn->returns_pcc_struct, nbits: 1);
2280 bp_pack_value (bp: &bp, val: fn->returns_struct, nbits: 1);
2281 bp_pack_value (bp: &bp, val: fn->can_throw_non_call_exceptions, nbits: 1);
2282 bp_pack_value (bp: &bp, val: fn->can_delete_dead_exceptions, nbits: 1);
2283 bp_pack_value (bp: &bp, val: fn->always_inline_functions_inlined, nbits: 1);
2284 bp_pack_value (bp: &bp, val: fn->after_inlining, nbits: 1);
2285 bp_pack_value (bp: &bp, val: fn->stdarg, nbits: 1);
2286 bp_pack_value (bp: &bp, val: fn->has_nonlocal_label, nbits: 1);
2287 bp_pack_value (bp: &bp, val: fn->has_forced_label_in_static, nbits: 1);
2288 bp_pack_value (bp: &bp, val: fn->calls_alloca, nbits: 1);
2289 bp_pack_value (bp: &bp, val: fn->calls_setjmp, nbits: 1);
2290 bp_pack_value (bp: &bp, val: fn->calls_eh_return, nbits: 1);
2291 bp_pack_value (bp: &bp, val: fn->has_force_vectorize_loops, nbits: 1);
2292 bp_pack_value (bp: &bp, val: fn->has_simduid_loops, nbits: 1);
2293 bp_pack_value (bp: &bp, val: fn->assume_function, nbits: 1);
2294 bp_pack_value (bp: &bp, val: fn->va_list_fpr_size, nbits: 8);
2295 bp_pack_value (bp: &bp, val: fn->va_list_gpr_size, nbits: 8);
2296 bp_pack_value (bp: &bp, val: fn->last_clique, nbits: sizeof (short) * 8);
2297
2298 /* Output the function start and end loci. */
2299 stream_output_location (ob, &bp, fn->function_start_locus);
2300 stream_output_location (ob, &bp, fn->function_end_locus);
2301
2302 /* Save the instance discriminator if present. */
2303 int *instance_number_p = NULL;
2304 if (decl_to_instance_map)
2305 instance_number_p = decl_to_instance_map->get (k: fn->decl);
2306 bp_pack_value (bp: &bp, val: !!instance_number_p, nbits: 1);
2307 if (instance_number_p)
2308 bp_pack_value (bp: &bp, val: *instance_number_p, nbits: sizeof (int) * CHAR_BIT);
2309
2310 streamer_write_bitpack (bp: &bp);
2311}
2312
2313
2314/* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2315
2316static void
2317collect_block_tree_leafs (tree root, vec<tree> &leafs)
2318{
2319 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2320 if (! BLOCK_SUBBLOCKS (root))
2321 leafs.safe_push (obj: root);
2322 else
2323 collect_block_tree_leafs (root, leafs);
2324}
2325
2326/* This performs function body modifications that are needed for streaming
2327 to work. */
2328
2329void
2330lto_prepare_function_for_streaming (struct cgraph_node *node)
2331{
2332 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2333 basic_block bb;
2334
2335 if (number_of_loops (fn))
2336 {
2337 push_cfun (new_cfun: fn);
2338 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2339 loop_optimizer_finalize ();
2340 pop_cfun ();
2341 }
2342 /* We will renumber the statements. The code that does this uses
2343 the same ordering that we use for serializing them so we can use
2344 the same code on the other end and not have to write out the
2345 statement numbers. We do not assign UIDs to PHIs here because
2346 virtual PHIs get re-computed on-the-fly which would make numbers
2347 inconsistent. */
2348 set_gimple_stmt_max_uid (fn, maxid: 0);
2349 FOR_ALL_BB_FN (bb, fn)
2350 {
2351 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi);
2352 gsi_next (i: &gsi))
2353 {
2354 gphi *stmt = gsi.phi ();
2355
2356 /* Virtual PHIs are not going to be streamed. */
2357 if (!virtual_operand_p (op: gimple_phi_result (gs: stmt)))
2358 gimple_set_uid (g: stmt, uid: inc_gimple_stmt_max_uid (fn));
2359 }
2360 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi);
2361 gsi_next (i: &gsi))
2362 {
2363 gimple *stmt = gsi_stmt (i: gsi);
2364 gimple_set_uid (g: stmt, uid: inc_gimple_stmt_max_uid (fn));
2365 }
2366 }
2367 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2368 virtual phis now. */
2369 FOR_ALL_BB_FN (bb, fn)
2370 {
2371 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi);
2372 gsi_next (i: &gsi))
2373 {
2374 gphi *stmt = gsi.phi ();
2375 if (virtual_operand_p (op: gimple_phi_result (gs: stmt)))
2376 gimple_set_uid (g: stmt, uid: inc_gimple_stmt_max_uid (fn));
2377 }
2378 }
2379
2380}
2381
2382/* Emit the chain of tree nodes starting at T. OB is the output block
2383 to write to. REF_P is true if chain elements should be emitted
2384 as references. */
2385
2386static void
2387streamer_write_chain (struct output_block *ob, tree t, bool ref_p)
2388{
2389 while (t)
2390 {
2391 /* We avoid outputting external vars or functions by reference
2392 to the global decls section as we do not want to have them
2393 enter decl merging. We should not need to do this anymore because
2394 free_lang_data removes them from block scopes. */
2395 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
2396 stream_write_tree (ob, t, ref_p);
2397
2398 t = TREE_CHAIN (t);
2399 }
2400
2401 /* Write a sentinel to terminate the chain. */
2402 stream_write_tree (ob, NULL_TREE, ref_p);
2403}
2404
2405/* Output the body of function NODE->DECL. */
2406
2407static void
2408output_function (struct cgraph_node *node)
2409{
2410 tree function;
2411 struct function *fn;
2412 basic_block bb;
2413 struct output_block *ob;
2414
2415 if (streamer_dump_file)
2416 fprintf (stream: streamer_dump_file, format: "\nStreaming body of %s\n",
2417 node->dump_name ());
2418
2419 function = node->decl;
2420 fn = DECL_STRUCT_FUNCTION (function);
2421 ob = create_output_block (section_type: LTO_section_function_body);
2422
2423 ob->symbol = node;
2424
2425 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2426
2427 /* Make string 0 be a NULL string. */
2428 streamer_write_char_stream (obs: ob->string_stream, c: 0);
2429
2430 streamer_write_record_start (ob, tag: LTO_function);
2431
2432 /* Output decls for parameters and args. */
2433 stream_write_tree (ob, DECL_RESULT (function), true);
2434 streamer_write_chain (ob, DECL_ARGUMENTS (function), ref_p: true);
2435
2436 /* Output debug args if available. */
2437 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2438 if (! debugargs)
2439 streamer_write_uhwi (ob, 0);
2440 else
2441 {
2442 streamer_write_uhwi (ob, (*debugargs)->length ());
2443 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2444 stream_write_tree (ob, (**debugargs)[i], true);
2445 }
2446
2447 /* Output DECL_INITIAL for the function, which contains the tree of
2448 lexical scopes. */
2449 stream_write_tree (ob, DECL_INITIAL (function), true);
2450 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2451 collect block tree leafs and stream those. */
2452 auto_vec<tree> block_tree_leafs;
2453 if (DECL_INITIAL (function) && DECL_INITIAL (function) != error_mark_node)
2454 collect_block_tree_leafs (DECL_INITIAL (function), leafs&: block_tree_leafs);
2455 streamer_write_uhwi (ob, block_tree_leafs.length ());
2456 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2457 stream_write_tree (ob, block_tree_leafs[i], true);
2458
2459 /* We also stream abstract functions where we stream only stuff needed for
2460 debug info. */
2461 if (gimple_has_body_p (function))
2462 {
2463 streamer_write_uhwi (ob, 1);
2464 output_struct_function_base (ob, fn);
2465
2466 output_cfg (ob, fn);
2467
2468 /* Output all the SSA names used in the function. */
2469 output_ssa_names (ob, fn);
2470
2471 /* Output any exception handling regions. */
2472 output_eh_regions (ob, fn);
2473
2474 /* Output the code for the function. */
2475 FOR_ALL_BB_FN (bb, fn)
2476 output_bb (ob, bb, fn);
2477
2478 /* The terminator for this function. */
2479 streamer_write_record_start (ob, tag: LTO_null);
2480 }
2481 else
2482 streamer_write_uhwi (ob, 0);
2483
2484 /* Create a section to hold the pickled output of this function. */
2485 produce_asm (ob, fn: function);
2486
2487 destroy_output_block (ob);
2488 if (streamer_dump_file)
2489 fprintf (stream: streamer_dump_file, format: "Finished streaming %s\n",
2490 node->dump_name ());
2491}
2492
2493/* Output the body of function NODE->DECL. */
2494
2495static void
2496output_constructor (struct varpool_node *node)
2497{
2498 tree var = node->decl;
2499 struct output_block *ob;
2500
2501 if (streamer_dump_file)
2502 fprintf (stream: streamer_dump_file, format: "\nStreaming constructor of %s\n",
2503 node->dump_name ());
2504
2505 timevar_push (tv: TV_IPA_LTO_CTORS_OUT);
2506 ob = create_output_block (section_type: LTO_section_function_body);
2507
2508 ob->symbol = node;
2509
2510 /* Make string 0 be a NULL string. */
2511 streamer_write_char_stream (obs: ob->string_stream, c: 0);
2512
2513 /* Output DECL_INITIAL for the function, which contains the tree of
2514 lexical scopes. */
2515 stream_write_tree (ob, DECL_INITIAL (var), true);
2516
2517 /* Create a section to hold the pickled output of this function. */
2518 produce_asm (ob, fn: var);
2519
2520 destroy_output_block (ob);
2521 if (streamer_dump_file)
2522 fprintf (stream: streamer_dump_file, format: "Finished streaming %s\n",
2523 node->dump_name ());
2524 timevar_pop (tv: TV_IPA_LTO_CTORS_OUT);
2525}
2526
2527
2528/* Emit toplevel asms. */
2529
2530void
2531lto_output_toplevel_asms (void)
2532{
2533 struct output_block *ob;
2534 struct asm_node *can;
2535 char *section_name;
2536 struct lto_simple_header_with_strings header;
2537
2538 if (!symtab->first_asm_symbol ())
2539 return;
2540
2541 ob = create_output_block (section_type: LTO_section_asm);
2542
2543 /* Make string 0 be a NULL string. */
2544 streamer_write_char_stream (obs: ob->string_stream, c: 0);
2545
2546 for (can = symtab->first_asm_symbol (); can; can = can->next)
2547 {
2548 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2549 streamer_write_hwi (ob, can->order);
2550 }
2551
2552 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2553
2554 section_name = lto_get_section_name (LTO_section_asm, NULL, 0, NULL);
2555 lto_begin_section (section_name, !flag_wpa);
2556 free (ptr: section_name);
2557
2558 /* The entire header stream is computed here. */
2559 memset (s: &header, c: 0, n: sizeof (header));
2560
2561 header.main_size = ob->main_stream->total_size;
2562 header.string_size = ob->string_stream->total_size;
2563 lto_write_data (&header, sizeof header);
2564
2565 /* Put all of the gimple and the string table out the asm file as a
2566 block of text. */
2567 lto_write_stream (ob->main_stream);
2568 lto_write_stream (ob->string_stream);
2569
2570 lto_end_section ();
2571
2572 destroy_output_block (ob);
2573}
2574
2575
2576/* Copy the function body or variable constructor of NODE without deserializing. */
2577
2578static void
2579copy_function_or_variable (struct symtab_node *node)
2580{
2581 tree function = node->decl;
2582 struct lto_file_decl_data *file_data = node->lto_file_data;
2583 const char *data;
2584 size_t len;
2585 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2586 char *section_name =
2587 lto_get_section_name (LTO_section_function_body, name, node->order, NULL);
2588 size_t i, j;
2589 struct lto_in_decl_state *in_state;
2590 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2591
2592 if (streamer_dump_file)
2593 fprintf (stream: streamer_dump_file, format: "Copying section for %s\n", name);
2594 lto_begin_section (section_name, false);
2595 free (ptr: section_name);
2596
2597 /* We may have renamed the declaration, e.g., a static function. */
2598 name = lto_get_decl_name_mapping (file_data, name);
2599
2600 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2601 name, node->order - file_data->order_base,
2602 &len);
2603 gcc_assert (data);
2604
2605 /* Do a bit copy of the function body. */
2606 lto_write_raw_data (data, len);
2607
2608 /* Copy decls. */
2609 in_state =
2610 lto_get_function_in_decl_state (node->lto_file_data, function);
2611 out_state->compressed = in_state->compressed;
2612 gcc_assert (in_state);
2613
2614 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2615 {
2616 size_t n = vec_safe_length (v: in_state->streams[i]);
2617 vec<tree, va_gc> *trees = in_state->streams[i];
2618 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2619
2620 /* The out state must have the same indices and the in state.
2621 So just copy the vector. All the encoders in the in state
2622 must be empty where we reach here. */
2623 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2624 encoder->trees.reserve_exact (nelems: n);
2625 for (j = 0; j < n; j++)
2626 encoder->trees.safe_push (obj: (*trees)[j]);
2627 }
2628
2629 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2630 data, len);
2631 lto_end_section ();
2632}
2633
2634/* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2635
2636static tree
2637wrap_refs (tree *tp, int *ws, void *)
2638{
2639 tree t = *tp;
2640 if (handled_component_p (t)
2641 && VAR_P (TREE_OPERAND (t, 0))
2642 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2643 {
2644 tree decl = TREE_OPERAND (t, 0);
2645 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2646 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2647 build1 (ADDR_EXPR, ptrtype, decl),
2648 build_int_cst (ptrtype, 0));
2649 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2650 *ws = 0;
2651 }
2652 else if (TREE_CODE (t) == CONSTRUCTOR)
2653 ;
2654 else if (!EXPR_P (t))
2655 *ws = 0;
2656 return NULL_TREE;
2657}
2658
2659/* Remove functions that are no longer used from offload_funcs, and mark the
2660 remaining ones with DECL_PRESERVE_P. */
2661
2662static void
2663prune_offload_funcs (void)
2664{
2665 if (!offload_funcs)
2666 return;
2667
2668 unsigned ix, ix2;
2669 tree *elem_ptr;
2670 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2671 cgraph_node::get (*elem_ptr) == NULL);
2672
2673 tree fn_decl;
2674 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2675 DECL_PRESERVE_P (fn_decl) = 1;
2676}
2677
2678/* Produce LTO section that contains global information
2679 about LTO bytecode. */
2680
2681static void
2682produce_lto_section ()
2683{
2684 /* Stream LTO meta section. */
2685 output_block *ob = create_output_block (section_type: LTO_section_lto);
2686
2687 char * section_name = lto_get_section_name (LTO_section_lto, NULL, 0, NULL);
2688 lto_begin_section (section_name, false);
2689 free (ptr: section_name);
2690
2691#ifdef HAVE_ZSTD_H
2692 lto_compression compression = ZSTD;
2693#else
2694 lto_compression compression = ZLIB;
2695#endif
2696
2697 bool slim_object = flag_generate_lto && !flag_fat_lto_objects;
2698 lto_section s
2699 = { LTO_major_version, LTO_minor_version, .slim_object: slim_object, ._padding: 0, .flags: 0 };
2700 s.set_compression (compression);
2701 lto_write_data (&s, sizeof s);
2702 lto_end_section ();
2703 destroy_output_block (ob);
2704}
2705
2706/* Compare symbols to get them sorted by filename (to optimize streaming) */
2707
2708static int
2709cmp_symbol_files (const void *pn1, const void *pn2, void *id_map_)
2710{
2711 const symtab_node *n1 = *(const symtab_node * const *)pn1;
2712 const symtab_node *n2 = *(const symtab_node * const *)pn2;
2713 hash_map<lto_file_decl_data *, int> *id_map
2714 = (hash_map<lto_file_decl_data *, int> *)id_map_;
2715
2716 int file_order1 = n1->lto_file_data ? n1->lto_file_data->order : -1;
2717 int file_order2 = n2->lto_file_data ? n2->lto_file_data->order : -1;
2718
2719 /* Order files same way as they appeared in the command line to reduce
2720 seeking while copying sections. */
2721 if (file_order1 != file_order2)
2722 return file_order1 - file_order2;
2723
2724 /* Order within static library. */
2725 if (n1->lto_file_data && n1->lto_file_data->id != n2->lto_file_data->id)
2726 return *id_map->get (k: n1->lto_file_data) - *id_map->get (k: n2->lto_file_data);
2727
2728 /* And finaly order by the definition order. */
2729 return n1->order - n2->order;
2730}
2731
2732/* Main entry point from the pass manager. */
2733
2734void
2735lto_output (void)
2736{
2737 struct lto_out_decl_state *decl_state;
2738 bitmap output = NULL;
2739 bitmap_obstack output_obstack;
2740 unsigned int i, n_nodes;
2741 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2742 auto_vec<symtab_node *> symbols_to_copy;
2743
2744 prune_offload_funcs ();
2745
2746 if (flag_checking)
2747 {
2748 bitmap_obstack_initialize (&output_obstack);
2749 output = BITMAP_ALLOC (obstack: &output_obstack);
2750 }
2751
2752 /* Initialize the streamer. */
2753 lto_streamer_init ();
2754
2755 produce_lto_section ();
2756
2757 n_nodes = lto_symtab_encoder_size (encoder);
2758 /* Prepare vector of functions to output and then sort it to optimize
2759 section copying. */
2760 for (i = 0; i < n_nodes; i++)
2761 {
2762 symtab_node *snode = lto_symtab_encoder_deref (encoder, ref: i);
2763 if (snode->alias)
2764 continue;
2765 if (cgraph_node *node = dyn_cast <cgraph_node *> (p: snode))
2766 {
2767 if (lto_symtab_encoder_encode_body_p (encoder, node)
2768 && !node->clone_of)
2769 symbols_to_copy.safe_push (obj: node);
2770 }
2771 else if (varpool_node *node = dyn_cast <varpool_node *> (p: snode))
2772 {
2773 /* Wrap symbol references inside the ctor in a type
2774 preserving MEM_REF. */
2775 tree ctor = DECL_INITIAL (node->decl);
2776 if (ctor && !in_lto_p)
2777 walk_tree (&ctor, wrap_refs, NULL, NULL);
2778 if (get_symbol_initial_value (encoder, expr: node->decl) == error_mark_node
2779 && lto_symtab_encoder_encode_initializer_p (encoder, node))
2780 symbols_to_copy.safe_push (obj: node);
2781 }
2782 }
2783 /* Map the section hash to an order it appears in symbols_to_copy
2784 since we want to sort same ID symbols next to each other but need
2785 to avoid making overall order depend on the actual hash value. */
2786 int order = 0;
2787 hash_map<lto_file_decl_data *, int> id_map;
2788 for (i = 0; i < symbols_to_copy.length (); ++i)
2789 {
2790 symtab_node *snode = symbols_to_copy[i];
2791 if (snode->lto_file_data)
2792 {
2793 bool existed_p = false;
2794 int &ord = id_map.get_or_insert (k: snode->lto_file_data, existed: &existed_p);
2795 if (!existed_p)
2796 ord = order++;
2797 }
2798 }
2799 symbols_to_copy.sort (cmp: cmp_symbol_files, data: (void *)&id_map);
2800 for (i = 0; i < symbols_to_copy.length (); i++)
2801 {
2802 symtab_node *snode = symbols_to_copy[i];
2803 cgraph_node *cnode;
2804 varpool_node *vnode;
2805
2806 if (flag_checking)
2807 gcc_assert (bitmap_set_bit (output, DECL_UID (snode->decl)));
2808
2809 decl_state = lto_new_out_decl_state ();
2810 lto_push_out_decl_state (decl_state);
2811
2812 if ((cnode = dyn_cast <cgraph_node *> (p: snode))
2813 && (gimple_has_body_p (cnode->decl)
2814 || (!flag_wpa
2815 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2816 /* Thunks have no body but they may be synthetized
2817 at WPA time. */
2818 || DECL_ARGUMENTS (cnode->decl)
2819 || cnode->declare_variant_alt))
2820 output_function (node: cnode);
2821 else if ((vnode = dyn_cast <varpool_node *> (p: snode))
2822 && (DECL_INITIAL (vnode->decl) != error_mark_node
2823 || (!flag_wpa
2824 && flag_incremental_link != INCREMENTAL_LINK_LTO)))
2825 output_constructor (node: vnode);
2826 else
2827 copy_function_or_variable (node: snode);
2828 gcc_assert (lto_get_out_decl_state () == decl_state);
2829 lto_pop_out_decl_state ();
2830 lto_record_function_out_decl_state (snode->decl, decl_state);
2831 }
2832
2833 /* Emit the callgraph after emitting function bodies. This needs to
2834 be done now to make sure that all the statements in every function
2835 have been renumbered so that edges can be associated with call
2836 statements using the statement UIDs. */
2837 output_symtab ();
2838
2839 output_offload_tables ();
2840
2841 if (flag_checking)
2842 {
2843 BITMAP_FREE (output);
2844 bitmap_obstack_release (&output_obstack);
2845 }
2846}
2847
2848/* Write each node in encoded by ENCODER to OB, as well as those reachable
2849 from it and required for correct representation of its semantics.
2850 Each node in ENCODER must be a global declaration or a type. A node
2851 is written only once, even if it appears multiple times in the
2852 vector. Certain transitively-reachable nodes, such as those
2853 representing expressions, may be duplicated, but such nodes
2854 must not appear in ENCODER itself. */
2855
2856static void
2857write_global_stream (struct output_block *ob,
2858 struct lto_tree_ref_encoder *encoder)
2859{
2860 tree t;
2861 size_t index;
2862 const size_t size = lto_tree_ref_encoder_size (encoder);
2863
2864 for (index = 0; index < size; index++)
2865 {
2866 t = lto_tree_ref_encoder_get_tree (encoder, idx: index);
2867 if (streamer_dump_file)
2868 {
2869 fprintf (stream: streamer_dump_file, format: " %i:", (int)index);
2870 print_node_brief (streamer_dump_file, "", t, 4);
2871 fprintf (stream: streamer_dump_file, format: "\n");
2872 }
2873 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2874 stream_write_tree (ob, t, false);
2875 }
2876}
2877
2878
2879/* Write a sequence of indices into the globals vector corresponding
2880 to the trees in ENCODER. These are used by the reader to map the
2881 indices used to refer to global entities within function bodies to
2882 their referents. */
2883
2884static void
2885write_global_references (struct output_block *ob,
2886 struct lto_tree_ref_encoder *encoder)
2887{
2888 tree t;
2889 uint32_t index;
2890 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2891
2892 /* Write size and slot indexes as 32-bit unsigned numbers. */
2893 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2894 data[0] = size;
2895
2896 for (index = 0; index < size; index++)
2897 {
2898 unsigned slot_num;
2899
2900 t = lto_tree_ref_encoder_get_tree (encoder, idx: index);
2901 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2902 gcc_assert (slot_num != (unsigned)-1);
2903 data[index + 1] = slot_num;
2904 }
2905
2906 lto_write_data (data, sizeof (int32_t) * (size + 1));
2907 free (ptr: data);
2908}
2909
2910
2911/* Write all the streams in an lto_out_decl_state STATE using
2912 output block OB and output stream OUT_STREAM. */
2913
2914void
2915lto_output_decl_state_streams (struct output_block *ob,
2916 struct lto_out_decl_state *state)
2917{
2918 int i;
2919
2920 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2921 write_global_stream (ob, encoder: &state->streams[i]);
2922}
2923
2924
2925/* Write all the references in an lto_out_decl_state STATE using
2926 output block OB and output stream OUT_STREAM. */
2927
2928void
2929lto_output_decl_state_refs (struct output_block *ob,
2930 struct lto_out_decl_state *state)
2931{
2932 unsigned i;
2933 unsigned ref;
2934 tree decl;
2935
2936 /* Write reference to FUNCTION_DECL. If there is not function,
2937 write reference to void_type_node. */
2938 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2939 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2940 gcc_assert (ref != (unsigned)-1);
2941 ref = ref * 2 + (state->compressed ? 1 : 0);
2942 lto_write_data (&ref, sizeof (uint32_t));
2943
2944 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2945 write_global_references (ob, encoder: &state->streams[i]);
2946}
2947
2948
2949/* Return the written size of STATE. */
2950
2951static size_t
2952lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2953{
2954 int i;
2955 size_t size;
2956
2957 size = sizeof (int32_t); /* fn_ref. */
2958 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2959 {
2960 size += sizeof (int32_t); /* vector size. */
2961 size += (lto_tree_ref_encoder_size (encoder: &state->streams[i])
2962 * sizeof (int32_t));
2963 }
2964 return size;
2965}
2966
2967
2968/* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2969 so far. */
2970
2971static void
2972write_symbol (struct streamer_tree_cache_d *cache,
2973 tree t, hash_set<const char *> *seen, bool alias)
2974{
2975 const char *name;
2976 enum gcc_plugin_symbol_kind kind;
2977 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2978 unsigned slot_num;
2979 uint64_t size;
2980 const char *comdat;
2981 unsigned char c;
2982
2983 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2984
2985 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2986
2987 /* This behaves like assemble_name_raw in varasm.cc, performing the
2988 same name manipulations that ASM_OUTPUT_LABELREF does. */
2989 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2990
2991 if (seen->add (k: name))
2992 return;
2993
2994 streamer_tree_cache_lookup (cache, t, &slot_num);
2995 gcc_assert (slot_num != (unsigned)-1);
2996
2997 if (DECL_EXTERNAL (t))
2998 {
2999 if (DECL_WEAK (t))
3000 kind = GCCPK_WEAKUNDEF;
3001 else
3002 kind = GCCPK_UNDEF;
3003 }
3004 else
3005 {
3006 if (DECL_WEAK (t))
3007 kind = GCCPK_WEAKDEF;
3008 else if (DECL_COMMON (t))
3009 kind = GCCPK_COMMON;
3010 else
3011 kind = GCCPK_DEF;
3012
3013 /* When something is defined, it should have node attached. */
3014 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
3015 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
3016 || (cgraph_node::get (t)
3017 && cgraph_node::get (t)->definition));
3018 }
3019
3020 /* Imitate what default_elf_asm_output_external do.
3021 When symbol is external, we need to output it with DEFAULT visibility
3022 when compiling with -fvisibility=default, while with HIDDEN visibility
3023 when symbol has attribute (visibility("hidden")) specified.
3024 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
3025 right. */
3026
3027 if (DECL_EXTERNAL (t)
3028 && !targetm.binds_local_p (t))
3029 visibility = GCCPV_DEFAULT;
3030 else
3031 switch (DECL_VISIBILITY (t))
3032 {
3033 case VISIBILITY_DEFAULT:
3034 visibility = GCCPV_DEFAULT;
3035 break;
3036 case VISIBILITY_PROTECTED:
3037 visibility = GCCPV_PROTECTED;
3038 break;
3039 case VISIBILITY_HIDDEN:
3040 visibility = GCCPV_HIDDEN;
3041 break;
3042 case VISIBILITY_INTERNAL:
3043 visibility = GCCPV_INTERNAL;
3044 break;
3045 }
3046
3047 if (kind == GCCPK_COMMON
3048 && DECL_SIZE_UNIT (t)
3049 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
3050 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
3051 else
3052 size = 0;
3053
3054 if (DECL_ONE_ONLY (t))
3055 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
3056 else
3057 comdat = "";
3058
3059 lto_write_data (name, strlen (s: name) + 1);
3060 lto_write_data (comdat, strlen (s: comdat) + 1);
3061 c = (unsigned char) kind;
3062 lto_write_data (&c, 1);
3063 c = (unsigned char) visibility;
3064 lto_write_data (&c, 1);
3065 lto_write_data (&size, 8);
3066 lto_write_data (&slot_num, 4);
3067}
3068
3069/* Write extension information for symbols (symbol type, section flags). */
3070
3071static void
3072write_symbol_extension_info (tree t)
3073{
3074 unsigned char c;
3075 c = ((unsigned char) TREE_CODE (t) == VAR_DECL
3076 ? GCCST_VARIABLE : GCCST_FUNCTION);
3077 lto_write_data (&c, 1);
3078 unsigned char section_kind = 0;
3079 if (VAR_P (t))
3080 {
3081 section *s = get_variable_section (t, false);
3082 if (s->common.flags & SECTION_BSS)
3083 section_kind |= GCCSSK_BSS;
3084 }
3085 lto_write_data (&section_kind, 1);
3086}
3087
3088/* Write an IL symbol table to OB.
3089 SET and VSET are cgraph/varpool node sets we are outputting. */
3090
3091static unsigned int
3092produce_symtab (struct output_block *ob)
3093{
3094 unsigned int streamed_symbols = 0;
3095 struct streamer_tree_cache_d *cache = ob->writer_cache;
3096 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, 0, NULL);
3097 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3098 lto_symtab_encoder_iterator lsei;
3099
3100 lto_begin_section (section_name, false);
3101 free (ptr: section_name);
3102
3103 hash_set<const char *> seen;
3104
3105 /* Write the symbol table.
3106 First write everything defined and then all declarations.
3107 This is necessary to handle cases where we have duplicated symbols. */
3108 for (lsei = lsei_start (encoder);
3109 !lsei_end_p (lsei); lsei_next (lsei: &lsei))
3110 {
3111 symtab_node *node = lsei_node (lsei);
3112
3113 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3114 continue;
3115 write_symbol (cache, t: node->decl, seen: &seen, alias: false);
3116 ++streamed_symbols;
3117 }
3118 for (lsei = lsei_start (encoder);
3119 !lsei_end_p (lsei); lsei_next (lsei: &lsei))
3120 {
3121 symtab_node *node = lsei_node (lsei);
3122
3123 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3124 continue;
3125 write_symbol (cache, t: node->decl, seen: &seen, alias: false);
3126 ++streamed_symbols;
3127 }
3128
3129 lto_end_section ();
3130
3131 return streamed_symbols;
3132}
3133
3134/* Symtab extension version. */
3135#define LTO_SYMTAB_EXTENSION_VERSION 1
3136
3137/* Write an IL symbol table extension to OB.
3138 SET and VSET are cgraph/varpool node sets we are outputting. */
3139
3140static void
3141produce_symtab_extension (struct output_block *ob,
3142 unsigned int previous_streamed_symbols)
3143{
3144 unsigned int streamed_symbols = 0;
3145 char *section_name = lto_get_section_name (LTO_section_symtab_extension,
3146 NULL, 0, NULL);
3147 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3148 lto_symtab_encoder_iterator lsei;
3149
3150 lto_begin_section (section_name, false);
3151 free (ptr: section_name);
3152
3153 unsigned char version = LTO_SYMTAB_EXTENSION_VERSION;
3154 lto_write_data (&version, 1);
3155
3156 /* Write the symbol table.
3157 First write everything defined and then all declarations.
3158 This is necessary to handle cases where we have duplicated symbols. */
3159 for (lsei = lsei_start (encoder);
3160 !lsei_end_p (lsei); lsei_next (lsei: &lsei))
3161 {
3162 symtab_node *node = lsei_node (lsei);
3163
3164 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3165 continue;
3166 write_symbol_extension_info (t: node->decl);
3167 ++streamed_symbols;
3168 }
3169 for (lsei = lsei_start (encoder);
3170 !lsei_end_p (lsei); lsei_next (lsei: &lsei))
3171 {
3172 symtab_node *node = lsei_node (lsei);
3173
3174 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3175 continue;
3176 write_symbol_extension_info (t: node->decl);
3177 ++streamed_symbols;
3178 }
3179
3180 gcc_assert (previous_streamed_symbols == streamed_symbols);
3181 lto_end_section ();
3182}
3183
3184
3185/* Init the streamer_mode_table for output, where we collect info on what
3186 machine_mode values have been streamed. */
3187void
3188lto_output_init_mode_table (void)
3189{
3190 memset (s: streamer_mode_table, c: '\0', n: MAX_MACHINE_MODE);
3191}
3192
3193
3194/* Write the mode table. */
3195static void
3196lto_write_mode_table (void)
3197{
3198 struct output_block *ob;
3199 ob = create_output_block (section_type: LTO_section_mode_table);
3200 bitpack_d bp = bitpack_create (s: ob->main_stream);
3201
3202 /* Ensure that for GET_MODE_INNER (m) != m we have
3203 also the inner mode marked. */
3204 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3205 if (streamer_mode_table[i])
3206 {
3207 machine_mode m = (machine_mode) i;
3208 machine_mode inner_m = GET_MODE_INNER (m);
3209 if (inner_m != m)
3210 streamer_mode_table[(int) inner_m] = 1;
3211 }
3212
3213 /* Pack the mode_bits value within 5 bits (up to 31) in the beginning. */
3214 unsigned mode_bits = ceil_log2 (x: MAX_MACHINE_MODE);
3215 bp_pack_value (bp: &bp, val: mode_bits, nbits: 5);
3216
3217 /* First stream modes that have GET_MODE_INNER (m) == m,
3218 so that we can refer to them afterwards. */
3219 for (int pass = 0; pass < 2; pass++)
3220 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3221 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
3222 {
3223 machine_mode m = (machine_mode) i;
3224 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
3225 continue;
3226 bp_pack_value (bp: &bp, val: m, nbits: mode_bits);
3227 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
3228 bp_pack_poly_value (bp: &bp, val: GET_MODE_SIZE (mode: m), nbits: 16);
3229 bp_pack_poly_value (bp: &bp, val: GET_MODE_PRECISION (mode: m), nbits: 16);
3230 bp_pack_value (bp: &bp, GET_MODE_INNER (m), nbits: mode_bits);
3231 bp_pack_poly_value (bp: &bp, val: GET_MODE_NUNITS (mode: m), nbits: 16);
3232 switch (GET_MODE_CLASS (m))
3233 {
3234 case MODE_FRACT:
3235 case MODE_UFRACT:
3236 case MODE_ACCUM:
3237 case MODE_UACCUM:
3238 bp_pack_value (bp: &bp, GET_MODE_IBIT (m), nbits: 8);
3239 bp_pack_value (bp: &bp, GET_MODE_FBIT (m), nbits: 8);
3240 break;
3241 case MODE_FLOAT:
3242 case MODE_DECIMAL_FLOAT:
3243 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
3244 break;
3245 default:
3246 break;
3247 }
3248 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
3249 }
3250 bp_pack_value (bp: &bp, VOIDmode, nbits: mode_bits);
3251
3252 streamer_write_bitpack (bp: &bp);
3253
3254 char *section_name
3255 = lto_get_section_name (LTO_section_mode_table, NULL, 0, NULL);
3256 lto_begin_section (section_name, !flag_wpa);
3257 free (ptr: section_name);
3258
3259 /* The entire header stream is computed here. */
3260 struct lto_simple_header_with_strings header;
3261 memset (s: &header, c: 0, n: sizeof (header));
3262
3263 header.main_size = ob->main_stream->total_size;
3264 header.string_size = ob->string_stream->total_size;
3265 lto_write_data (&header, sizeof header);
3266
3267 /* Put all of the gimple and the string table out the asm file as a
3268 block of text. */
3269 lto_write_stream (ob->main_stream);
3270 lto_write_stream (ob->string_stream);
3271
3272 lto_end_section ();
3273 destroy_output_block (ob);
3274}
3275
3276
3277/* This pass is run after all of the functions are serialized and all
3278 of the IPA passes have written their serialized forms. This pass
3279 causes the vector of all of the global decls and types used from
3280 this file to be written in to a section that can then be read in to
3281 recover these on other side. */
3282
3283void
3284produce_asm_for_decls (void)
3285{
3286 struct lto_out_decl_state *out_state;
3287 struct lto_out_decl_state *fn_out_state;
3288 struct lto_decl_header header;
3289 char *section_name;
3290 struct output_block *ob;
3291 unsigned idx, num_fns;
3292 size_t decl_state_size;
3293 int32_t num_decl_states;
3294
3295 ob = create_output_block (section_type: LTO_section_decls);
3296
3297 memset (s: &header, c: 0, n: sizeof (struct lto_decl_header));
3298
3299 section_name = lto_get_section_name (LTO_section_decls, NULL, 0, NULL);
3300 lto_begin_section (section_name, !flag_wpa);
3301 free (ptr: section_name);
3302
3303 /* Make string 0 be a NULL string. */
3304 streamer_write_char_stream (obs: ob->string_stream, c: 0);
3305
3306 gcc_assert (!alias_pairs);
3307
3308 /* Get rid of the global decl state hash tables to save some memory. */
3309 out_state = lto_get_out_decl_state ();
3310 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
3311 if (out_state->streams[i].tree_hash_table)
3312 {
3313 delete out_state->streams[i].tree_hash_table;
3314 out_state->streams[i].tree_hash_table = NULL;
3315 }
3316
3317 /* Write the global symbols. */
3318 if (streamer_dump_file)
3319 fprintf (stream: streamer_dump_file, format: "Outputting global stream\n");
3320 lto_output_decl_state_streams (ob, state: out_state);
3321 num_fns = lto_function_decl_states.length ();
3322 for (idx = 0; idx < num_fns; idx++)
3323 {
3324 fn_out_state =
3325 lto_function_decl_states[idx];
3326 if (streamer_dump_file)
3327 fprintf (stream: streamer_dump_file, format: "Outputting stream for %s\n",
3328 IDENTIFIER_POINTER
3329 (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
3330 lto_output_decl_state_streams (ob, state: fn_out_state);
3331 }
3332
3333 /* Currently not used. This field would allow us to preallocate
3334 the globals vector, so that it need not be resized as it is extended. */
3335 header.num_nodes = -1;
3336
3337 /* Compute the total size of all decl out states. */
3338 decl_state_size = sizeof (int32_t);
3339 decl_state_size += lto_out_decl_state_written_size (state: out_state);
3340 for (idx = 0; idx < num_fns; idx++)
3341 {
3342 fn_out_state =
3343 lto_function_decl_states[idx];
3344 decl_state_size += lto_out_decl_state_written_size (state: fn_out_state);
3345 }
3346 header.decl_state_size = decl_state_size;
3347
3348 header.main_size = ob->main_stream->total_size;
3349 header.string_size = ob->string_stream->total_size;
3350
3351 lto_write_data (&header, sizeof header);
3352
3353 /* Write the main out-decl state, followed by out-decl states of
3354 functions. */
3355 num_decl_states = num_fns + 1;
3356 lto_write_data (&num_decl_states, sizeof (num_decl_states));
3357 lto_output_decl_state_refs (ob, state: out_state);
3358 for (idx = 0; idx < num_fns; idx++)
3359 {
3360 fn_out_state = lto_function_decl_states[idx];
3361 lto_output_decl_state_refs (ob, state: fn_out_state);
3362 }
3363
3364 lto_write_stream (ob->main_stream);
3365 lto_write_stream (ob->string_stream);
3366
3367 lto_end_section ();
3368
3369 /* Write the symbol table. It is used by linker to determine dependencies
3370 and thus we can skip it for WPA. */
3371 if (!flag_wpa)
3372 {
3373 unsigned int streamed_symbols = produce_symtab (ob);
3374 produce_symtab_extension (ob, previous_streamed_symbols: streamed_symbols);
3375 }
3376
3377 /* Write command line opts. */
3378 lto_write_options ();
3379
3380 /* Deallocate memory and clean up. */
3381 for (idx = 0; idx < num_fns; idx++)
3382 {
3383 fn_out_state =
3384 lto_function_decl_states[idx];
3385 lto_delete_out_decl_state (fn_out_state);
3386 }
3387 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
3388 lto_function_decl_states.release ();
3389 destroy_output_block (ob);
3390 if (lto_stream_offload_p)
3391 lto_write_mode_table ();
3392}
3393

source code of gcc/lto-streamer-out.cc