1/* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2023 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "backend.h"
27#include "target.h"
28#include "rtl.h"
29#include "tree.h"
30#include "gimple.h"
31#include "cfghooks.h"
32#include "tree-pass.h"
33#include "ssa.h"
34#include "gimple-streamer.h"
35#include "toplev.h"
36#include "gimple-iterator.h"
37#include "tree-cfg.h"
38#include "tree-into-ssa.h"
39#include "tree-dfa.h"
40#include "tree-ssa.h"
41#include "except.h"
42#include "cgraph.h"
43#include "cfgloop.h"
44#include "debug.h"
45#include "alloc-pool.h"
46#include "toplev.h"
47
48/* Allocator used to hold string slot entries for line map streaming. */
49static struct object_allocator<struct string_slot> *string_slot_allocator;
50
51/* The table to hold the file names. */
52static hash_table<string_slot_hasher> *file_name_hash_table;
53
54/* The table to hold the relative pathname prefixes. */
55
56/* This obstack holds file names used in locators. Line map datastructures
57 points here and thus it needs to be kept allocated as long as linemaps
58 exists. */
59static struct obstack file_name_obstack;
60
61/* Map a pair of nul terminated strings where the first one can be
62 pointer compared, but the second can't, to another string. */
63struct string_pair_map
64{
65 const char *str1;
66 const char *str2;
67 const char *str3;
68 hashval_t hash;
69 bool prefix;
70};
71
72/* Allocator used to hold string pair map entries for line map streaming. */
73static struct object_allocator<struct string_pair_map>
74 *string_pair_map_allocator;
75
76struct string_pair_map_hasher : nofree_ptr_hash <string_pair_map>
77{
78 static inline hashval_t hash (const string_pair_map *);
79 static inline bool equal (const string_pair_map *, const string_pair_map *);
80};
81
82inline hashval_t
83string_pair_map_hasher::hash (const string_pair_map *spm)
84{
85 return spm->hash;
86}
87
88inline bool
89string_pair_map_hasher::equal (const string_pair_map *spm1,
90 const string_pair_map *spm2)
91{
92 return (spm1->hash == spm2->hash
93 && spm1->str1 == spm2->str1
94 && spm1->prefix == spm2->prefix
95 && strcmp (s1: spm1->str2, s2: spm2->str2) == 0);
96}
97
98/* The table to hold the pairs of pathnames and corresponding
99 resulting pathname. Used for both mapping of get_src_pwd ()
100 and recorded source working directory to relative path prefix
101 from current working directory to the recorded one, and for
102 mapping of that relative path prefix and some relative path
103 to those concatenated. */
104static hash_table<string_pair_map_hasher> *path_name_pair_hash_table;
105
106
107/* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
108 number of valid tag values to check. */
109
110void
111lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
112{
113 va_list ap;
114 int i;
115
116 va_start (ap, ntags);
117 for (i = 0; i < ntags; i++)
118 if ((unsigned) actual == va_arg (ap, unsigned))
119 {
120 va_end (ap);
121 return;
122 }
123
124 va_end (ap);
125 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
126}
127
128
129/* Read LENGTH bytes from STREAM to ADDR. */
130
131void
132lto_input_data_block (class lto_input_block *ib, void *addr, size_t length)
133{
134 size_t i;
135 unsigned char *const buffer = (unsigned char *) addr;
136
137 for (i = 0; i < length; i++)
138 buffer[i] = streamer_read_uchar (ib);
139}
140
141/* Compute the relative path to get to DATA_WD (absolute directory name)
142 from CWD (another absolute directory name). E.g. for
143 DATA_WD of "/tmp/foo/bar" and CWD of "/tmp/baz/qux" return
144 "../../foo/bar". Returned string should be freed by the caller.
145 Return NULL if absolute file name needs to be used. */
146
147static char *
148relative_path_prefix (const char *data_wd, const char *cwd)
149{
150 const char *d = data_wd;
151 const char *c = cwd;
152#ifdef HAVE_DOS_BASED_FILE_SYSTEM
153 if (d[1] == ':')
154 {
155 if (!IS_DIR_SEPARATOR (d[2]))
156 return NULL;
157 if (c[0] == d[0] && c[1] == ':' && IS_DIR_SEPARATOR (c[2]))
158 {
159 c += 3;
160 d += 3;
161 }
162 else
163 return NULL;
164 }
165 else if (c[1] == ':')
166 return NULL;
167#endif
168 do
169 {
170 while (IS_DIR_SEPARATOR (*d))
171 d++;
172 while (IS_DIR_SEPARATOR (*c))
173 c++;
174 size_t i;
175 for (i = 0; c[i] && !IS_DIR_SEPARATOR (c[i]) && c[i] == d[i]; i++)
176 ;
177 if ((c[i] == '\0' || IS_DIR_SEPARATOR (c[i]))
178 && (d[i] == '\0' || IS_DIR_SEPARATOR (d[i])))
179 {
180 c += i;
181 d += i;
182 if (*c == '\0' || *d == '\0')
183 break;
184 }
185 else
186 break;
187 }
188 while (1);
189 size_t num_up = 0;
190 do
191 {
192 while (IS_DIR_SEPARATOR (*c))
193 c++;
194 if (*c == '\0')
195 break;
196 num_up++;
197 while (*c && !IS_DIR_SEPARATOR (*c))
198 c++;
199 }
200 while (1);
201 while (IS_DIR_SEPARATOR (*d))
202 d++;
203 size_t len = strlen (s: d);
204 if (len == 0 && num_up == 0)
205 return xstrdup (".");
206 char *ret = XNEWVEC (char, num_up * 3 + len + 1);
207 char *p = ret;
208 for (; num_up; num_up--)
209 {
210 const char dir_up[3] = { '.', '.', DIR_SEPARATOR };
211 memcpy (dest: p, src: dir_up, n: 3);
212 p += 3;
213 }
214 memcpy (dest: p, src: d, n: len + 1);
215 return ret;
216}
217
218/* Look up DATA_WD in hash table of relative prefixes. If found,
219 return relative path from CWD to DATA_WD from the hash table,
220 otherwise create it. */
221
222static const char *
223canon_relative_path_prefix (const char *data_wd, const char *cwd)
224{
225 if (!IS_ABSOLUTE_PATH (data_wd) || !IS_ABSOLUTE_PATH (cwd))
226 return NULL;
227
228 if (!path_name_pair_hash_table)
229 {
230 path_name_pair_hash_table
231 = new hash_table<string_pair_map_hasher> (37);
232 string_pair_map_allocator
233 = new object_allocator <struct string_pair_map>
234 ("line map string pair map hash");
235 }
236
237 inchash::hash h;
238 h.add_ptr (ptr: cwd);
239 h.merge_hash (other: htab_hash_string (data_wd));
240 h.add_int (v: true);
241
242 string_pair_map s_slot;
243 s_slot.str1 = cwd;
244 s_slot.str2 = data_wd;
245 s_slot.str3 = NULL;
246 s_slot.hash = h.end ();
247 s_slot.prefix = true;
248
249 string_pair_map **slot
250 = path_name_pair_hash_table->find_slot (value: &s_slot, insert: INSERT);
251 if (*slot == NULL)
252 {
253 /* Compute relative path from cwd directory to data_wd directory.
254 E.g. if cwd is /tmp/foo/bar and data_wd is /tmp/baz/qux ,
255 it will return ../../baz/qux . */
256 char *relative_path = relative_path_prefix (data_wd, cwd);
257 const char *relative = relative_path ? relative_path : data_wd;
258 size_t relative_len = strlen (s: relative);
259 gcc_assert (relative_len);
260
261 size_t data_wd_len = strlen (s: data_wd);
262 bool add_separator = false;
263 if (!IS_DIR_SEPARATOR (relative[relative_len - 1]))
264 add_separator = true;
265
266 size_t len = relative_len + 1 + data_wd_len + 1 + add_separator;
267
268 char *saved_string = XOBNEWVEC (&file_name_obstack, char, len);
269 struct string_pair_map *new_slot
270 = string_pair_map_allocator->allocate ();
271 memcpy (dest: saved_string, src: data_wd, n: data_wd_len + 1);
272 memcpy (dest: saved_string + data_wd_len + 1, src: relative, n: relative_len);
273 if (add_separator)
274 saved_string[len - 2] = DIR_SEPARATOR;
275 saved_string[len - 1] = '\0';
276 new_slot->str1 = cwd;
277 new_slot->str2 = saved_string;
278 new_slot->str3 = saved_string + data_wd_len + 1;
279 if (relative_len == 1 && relative[0] == '.')
280 new_slot->str3 = NULL;
281 new_slot->hash = s_slot.hash;
282 new_slot->prefix = true;
283 *slot = new_slot;
284 free (ptr: relative_path);
285 return new_slot->str3;
286 }
287 else
288 {
289 string_pair_map *old_slot = *slot;
290 return old_slot->str3;
291 }
292}
293
294/* Look up the pair of RELATIVE_PREFIX and STRING strings in a hash table.
295 If found, return the concatenation of those from the hash table,
296 otherwise concatenate them. */
297
298static const char *
299canon_relative_file_name (const char *relative_prefix, const char *string)
300{
301 inchash::hash h;
302 h.add_ptr (ptr: relative_prefix);
303 h.merge_hash (other: htab_hash_string (string));
304
305 string_pair_map s_slot;
306 s_slot.str1 = relative_prefix;
307 s_slot.str2 = string;
308 s_slot.str3 = NULL;
309 s_slot.hash = h.end ();
310 s_slot.prefix = false;
311
312 string_pair_map **slot
313 = path_name_pair_hash_table->find_slot (value: &s_slot, insert: INSERT);
314 if (*slot == NULL)
315 {
316 size_t relative_prefix_len = strlen (s: relative_prefix);
317 size_t string_len = strlen (s: string);
318 size_t len = relative_prefix_len + string_len + 1;
319
320 char *saved_string = XOBNEWVEC (&file_name_obstack, char, len);
321 struct string_pair_map *new_slot
322 = string_pair_map_allocator->allocate ();
323 memcpy (dest: saved_string, src: relative_prefix, n: relative_prefix_len);
324 memcpy (dest: saved_string + relative_prefix_len, src: string, n: string_len + 1);
325 new_slot->str1 = relative_prefix;
326 new_slot->str2 = saved_string + relative_prefix_len;
327 new_slot->str3 = saved_string;
328 new_slot->hash = s_slot.hash;
329 new_slot->prefix = false;
330 *slot = new_slot;
331 return new_slot->str3;
332 }
333 else
334 {
335 string_pair_map *old_slot = *slot;
336 return old_slot->str3;
337 }
338}
339
340/* Lookup STRING in file_name_hash_table. If found, return the existing
341 string, otherwise insert STRING as the canonical version.
342 If STRING is a relative pathname and RELATIVE_PREFIX is non-NULL, use
343 canon_relative_file_name instead. */
344
345static const char *
346canon_file_name (const char *relative_prefix, const char *string)
347{
348 if (relative_prefix && !IS_ABSOLUTE_PATH (string))
349 return canon_relative_file_name (relative_prefix, string);
350
351 string_slot **slot;
352 struct string_slot s_slot;
353 size_t len = strlen (s: string);
354
355 s_slot.s = string;
356 s_slot.len = len;
357
358 slot = file_name_hash_table->find_slot (value: &s_slot, insert: INSERT);
359 if (*slot == NULL)
360 {
361 char *saved_string;
362 struct string_slot *new_slot;
363
364 saved_string = XOBNEWVEC (&file_name_obstack, char, len + 1);
365 new_slot = string_slot_allocator->allocate ();
366 memcpy (dest: saved_string, src: string, n: len + 1);
367 new_slot->s = saved_string;
368 new_slot->len = len;
369 *slot = new_slot;
370 return saved_string;
371 }
372 else
373 {
374 struct string_slot *old_slot = *slot;
375 return old_slot->s;
376 }
377}
378
379/* Pointer to currently alive instance of lto_location_cache. */
380
381lto_location_cache *lto_location_cache::current_cache;
382
383/* Sort locations in source order. Start with file from last application. */
384
385int
386lto_location_cache::cmp_loc (const void *pa, const void *pb)
387{
388 const cached_location *a = ((const cached_location *)pa);
389 const cached_location *b = ((const cached_location *)pb);
390 const char *current_file = current_cache->current_file;
391 int current_line = current_cache->current_line;
392
393 if (a->file == current_file && b->file != current_file)
394 return -1;
395 if (a->file != current_file && b->file == current_file)
396 return 1;
397 if (a->file == current_file && b->file == current_file)
398 {
399 if (a->line == current_line && b->line != current_line)
400 return -1;
401 if (a->line != current_line && b->line == current_line)
402 return 1;
403 }
404 if (a->file != b->file)
405 return strcmp (s1: a->file, s2: b->file);
406 if (a->sysp != b->sysp)
407 return a->sysp ? 1 : -1;
408 if (a->line != b->line)
409 return a->line - b->line;
410 if (a->col != b->col)
411 return a->col - b->col;
412 if (a->discr != b->discr)
413 return a->discr - b->discr;
414 if ((a->block == NULL_TREE) != (b->block == NULL_TREE))
415 return a->block ? 1 : -1;
416 if (a->block)
417 {
418 if (BLOCK_NUMBER (a->block) < BLOCK_NUMBER (b->block))
419 return -1;
420 if (BLOCK_NUMBER (a->block) > BLOCK_NUMBER (b->block))
421 return 1;
422 }
423 return 0;
424}
425
426/* Apply all changes in location cache. Add locations into linemap and patch
427 trees. */
428
429bool
430lto_location_cache::apply_location_cache ()
431{
432 static const char *prev_file;
433 if (!loc_cache.length ())
434 return false;
435 if (loc_cache.length () > 1)
436 loc_cache.qsort (cmp_loc);
437
438 for (unsigned int i = 0; i < loc_cache.length (); i++)
439 {
440 struct cached_location loc = loc_cache[i];
441
442 if (current_file != loc.file)
443 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
444 sysp: loc.sysp, to_file: loc.file, to_line: loc.line);
445 else if (current_line != loc.line)
446 {
447 int max = loc.col;
448
449 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
450 if (loc.file != loc_cache[j].file
451 || loc.line != loc_cache[j].line)
452 break;
453 else if (max < loc_cache[j].col)
454 max = loc_cache[j].col;
455 linemap_line_start (set: line_table, to_line: loc.line, max_column_hint: max + 1);
456 }
457 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
458 if (current_file != loc.file
459 || current_line != loc.line
460 || current_col != loc.col)
461 {
462 current_loc = linemap_position_for_column (line_table, loc.col);
463 if (loc.block)
464 current_loc = set_block (loc: current_loc, block: loc.block);
465 if (loc.discr)
466 current_loc = location_with_discriminator (current_loc, loc.discr);
467 }
468 else if (current_block != loc.block)
469 {
470 if (loc.block)
471 current_loc = set_block (loc: current_loc, block: loc.block);
472 else
473 current_loc = LOCATION_LOCUS (current_loc);
474 if (loc.discr)
475 current_loc = location_with_discriminator (current_loc, loc.discr);
476 }
477 else if (current_discr != loc.discr)
478 current_loc = location_with_discriminator (current_loc, loc.discr);
479 *loc.loc = current_loc;
480 current_line = loc.line;
481 prev_file = current_file = loc.file;
482 current_col = loc.col;
483 current_block = loc.block;
484 current_discr = loc.discr;
485 }
486 loc_cache.truncate (size: 0);
487 accepted_length = 0;
488 return true;
489}
490
491/* Tree merging did not succeed; mark all changes in the cache as accepted. */
492
493void
494lto_location_cache::accept_location_cache ()
495{
496 gcc_assert (current_cache == this);
497 accepted_length = loc_cache.length ();
498}
499
500/* Tree merging did succeed; throw away recent changes. */
501
502void
503lto_location_cache::revert_location_cache ()
504{
505 loc_cache.truncate (size: accepted_length);
506}
507
508/* Read a location bitpack from bit pack BP and either update *LOC directly
509 or add it to the location cache. If IB is non-NULL, stream in a block
510 afterwards.
511 It is neccesary to call apply_location_cache to get *LOC updated. */
512
513void
514lto_location_cache::input_location_and_block (location_t *loc,
515 struct bitpack_d *bp,
516 class lto_input_block *ib,
517 class data_in *data_in)
518{
519 static const char *stream_file;
520 static int stream_line;
521 static int stream_col;
522 static bool stream_sysp;
523 static tree stream_block;
524 static unsigned stream_discr;
525 static const char *stream_relative_path_prefix;
526
527 gcc_assert (current_cache == this);
528
529 *loc = bp_unpack_int_in_range (bp, purpose: "location", min: 0,
530 max: RESERVED_LOCATION_COUNT + 1);
531
532 if (*loc < RESERVED_LOCATION_COUNT)
533 {
534 if (ib)
535 {
536 bool block_change = bp_unpack_value (bp, nbits: 1);
537 if (block_change)
538 stream_block = stream_read_tree (ib, data_in);
539 if (stream_block)
540 *loc = set_block (loc: *loc, block: stream_block);
541 }
542 return;
543 }
544
545 bool file_change = (*loc == RESERVED_LOCATION_COUNT + 1);
546 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
547 ICE on it. */
548 *loc = RESERVED_LOCATION_COUNT;
549 bool line_change = bp_unpack_value (bp, nbits: 1);
550 bool column_change = bp_unpack_value (bp, nbits: 1);
551 bool discr_change = bp_unpack_value (bp, nbits: 1);
552
553 if (file_change)
554 {
555 bool pwd_change = bp_unpack_value (bp, nbits: 1);
556 if (pwd_change)
557 {
558 const char *pwd = bp_unpack_string (data_in, bp);
559 const char *src_pwd = get_src_pwd ();
560 if (strcmp (s1: pwd, s2: src_pwd) == 0)
561 stream_relative_path_prefix = NULL;
562 else
563 stream_relative_path_prefix
564 = canon_relative_path_prefix (data_wd: pwd, cwd: src_pwd);
565 }
566 stream_file = canon_file_name (relative_prefix: stream_relative_path_prefix,
567 string: bp_unpack_string (data_in, bp));
568 stream_sysp = bp_unpack_value (bp, nbits: 1);
569 }
570
571 if (line_change)
572 stream_line = bp_unpack_var_len_unsigned (bp);
573
574 if (column_change)
575 stream_col = bp_unpack_var_len_unsigned (bp);
576
577 if (discr_change)
578 stream_discr = bp_unpack_var_len_unsigned (bp);
579
580 tree block = NULL_TREE;
581 if (ib)
582 {
583 bool block_change = bp_unpack_value (bp, nbits: 1);
584 if (block_change)
585 stream_block = stream_read_tree (ib, data_in);
586 block = stream_block;
587 }
588
589 /* This optimization saves location cache operations during gimple
590 streaming. */
591
592 if (current_file == stream_file
593 && current_line == stream_line
594 && current_col == stream_col
595 && current_sysp == stream_sysp
596 && current_discr == stream_discr)
597 {
598 if (current_block == block)
599 *loc = current_loc;
600 else if (block)
601 *loc = set_block (loc: current_loc, block);
602 else
603 *loc = LOCATION_LOCUS (current_loc);
604 return;
605 }
606
607 struct cached_location entry
608 = {.file: stream_file, .loc: loc, .line: stream_line, .col: stream_col, .sysp: stream_sysp, .block: block, .discr: stream_discr};
609 loc_cache.safe_push (obj: entry);
610}
611
612/* Read a location bitpack from bit pack BP and either update *LOC directly
613 or add it to the location cache.
614 It is neccesary to call apply_location_cache to get *LOC updated. */
615
616void
617lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
618 class data_in *data_in)
619{
620 return input_location_and_block (loc, bp, NULL, data_in);
621}
622
623/* Read a location bitpack from input block IB and either update *LOC directly
624 or add it to the location cache.
625 It is neccesary to call apply_location_cache to get *LOC updated. */
626
627void
628lto_input_location (location_t *loc, struct bitpack_d *bp,
629 class data_in *data_in)
630{
631 data_in->location_cache.input_location (loc, bp, data_in);
632}
633
634/* Read a reference to a tree node from DATA_IN using input block IB.
635 TAG is the expected node that should be found in IB, if TAG belongs
636 to one of the indexable trees, expect to read a reference index to
637 be looked up in one of the symbol tables, otherwise read the pysical
638 representation of the tree using stream_read_tree. FN is the
639 function scope for the read tree. */
640
641tree
642lto_input_tree_ref (class lto_input_block *ib, class data_in *data_in,
643 struct function *fn, enum LTO_tags tag)
644{
645 unsigned HOST_WIDE_INT ix_u;
646 tree result = NULL_TREE;
647
648 if (tag == LTO_ssa_name_ref)
649 {
650 ix_u = streamer_read_uhwi (ib);
651 result = (*SSANAMES (fn))[ix_u];
652 }
653 else
654 {
655 gcc_checking_assert (tag == LTO_global_stream_ref);
656 ix_u = streamer_read_uhwi (ib);
657 result = (*data_in->file_data->current_decl_state
658 ->streams[LTO_DECL_STREAM])[ix_u];
659 }
660
661 gcc_assert (result);
662
663 return result;
664}
665
666/* Read VAR_DECL reference to DATA from IB. */
667
668tree
669lto_input_var_decl_ref (lto_input_block *ib, lto_file_decl_data *file_data)
670{
671 unsigned int ix_u = streamer_read_uhwi (ib);
672 tree result = (*file_data->current_decl_state
673 ->streams[LTO_DECL_STREAM])[ix_u];
674 gcc_assert (VAR_P (result));
675 return result;
676}
677
678/* Read VAR_DECL reference to DATA from IB. */
679
680tree
681lto_input_fn_decl_ref (lto_input_block *ib, lto_file_decl_data *file_data)
682{
683 unsigned int ix_u = streamer_read_uhwi (ib);
684 tree result = (*file_data->current_decl_state
685 ->streams[LTO_DECL_STREAM])[ix_u];
686 gcc_assert (TREE_CODE (result) == FUNCTION_DECL);
687 return result;
688}
689
690
691/* Read and return a double-linked list of catch handlers from input
692 block IB, using descriptors in DATA_IN. */
693
694static struct eh_catch_d *
695lto_input_eh_catch_list (class lto_input_block *ib, class data_in *data_in,
696 eh_catch *last_p)
697{
698 eh_catch first;
699 enum LTO_tags tag;
700
701 *last_p = first = NULL;
702 tag = streamer_read_record_start (ib);
703 while (tag)
704 {
705 tree list;
706 eh_catch n;
707
708 lto_tag_check_range (actual: tag, tag1: LTO_eh_catch, tag2: LTO_eh_catch);
709
710 /* Read the catch node. */
711 n = ggc_cleared_alloc<eh_catch_d> ();
712 n->type_list = stream_read_tree (ib, data_in);
713 n->filter_list = stream_read_tree (ib, data_in);
714 n->label = stream_read_tree (ib, data_in);
715
716 /* Register all the types in N->FILTER_LIST. */
717 for (list = n->filter_list; list; list = TREE_CHAIN (list))
718 add_type_for_runtime (TREE_VALUE (list));
719
720 /* Chain N to the end of the list. */
721 if (*last_p)
722 (*last_p)->next_catch = n;
723 n->prev_catch = *last_p;
724 *last_p = n;
725
726 /* Set the head of the list the first time through the loop. */
727 if (first == NULL)
728 first = n;
729
730 tag = streamer_read_record_start (ib);
731 }
732
733 return first;
734}
735
736
737/* Read and return EH region IX from input block IB, using descriptors
738 in DATA_IN. */
739
740static eh_region
741input_eh_region (class lto_input_block *ib, class data_in *data_in, int ix)
742{
743 enum LTO_tags tag;
744 eh_region r;
745
746 /* Read the region header. */
747 tag = streamer_read_record_start (ib);
748 if (tag == LTO_null)
749 return NULL;
750
751 r = ggc_cleared_alloc<eh_region_d> ();
752 r->index = streamer_read_hwi (ib);
753
754 gcc_assert (r->index == ix);
755
756 /* Read all the region pointers as region numbers. We'll fix up
757 the pointers once the whole array has been read. */
758 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
759 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
760 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
761
762 switch (tag)
763 {
764 case LTO_ert_cleanup:
765 r->type = ERT_CLEANUP;
766 break;
767
768 case LTO_ert_try:
769 {
770 struct eh_catch_d *last_catch;
771 r->type = ERT_TRY;
772 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
773 last_p: &last_catch);
774 r->u.eh_try.last_catch = last_catch;
775 break;
776 }
777
778 case LTO_ert_allowed_exceptions:
779 {
780 tree l;
781
782 r->type = ERT_ALLOWED_EXCEPTIONS;
783 r->u.allowed.type_list = stream_read_tree (ib, data_in);
784 r->u.allowed.label = stream_read_tree (ib, data_in);
785 r->u.allowed.filter = streamer_read_uhwi (ib);
786
787 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
788 add_type_for_runtime (TREE_VALUE (l));
789 }
790 break;
791
792 case LTO_ert_must_not_throw:
793 {
794 r->type = ERT_MUST_NOT_THROW;
795 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
796 bitpack_d bp = streamer_read_bitpack (ib);
797 stream_input_location (&r->u.must_not_throw.failure_loc,
798 &bp, data_in);
799 }
800 break;
801
802 default:
803 gcc_unreachable ();
804 }
805
806 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
807
808 return r;
809}
810
811
812/* Read and return EH landing pad IX from input block IB, using descriptors
813 in DATA_IN. */
814
815static eh_landing_pad
816input_eh_lp (class lto_input_block *ib, class data_in *data_in, int ix)
817{
818 enum LTO_tags tag;
819 eh_landing_pad lp;
820
821 /* Read the landing pad header. */
822 tag = streamer_read_record_start (ib);
823 if (tag == LTO_null)
824 return NULL;
825
826 lto_tag_check_range (actual: tag, tag1: LTO_eh_landing_pad, tag2: LTO_eh_landing_pad);
827
828 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
829 lp->index = streamer_read_hwi (ib);
830 gcc_assert (lp->index == ix);
831 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
832 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
833 lp->post_landing_pad = stream_read_tree (ib, data_in);
834
835 return lp;
836}
837
838
839/* After reading the EH regions, pointers to peer and children regions
840 are region numbers. This converts all these region numbers into
841 real pointers into the rematerialized regions for FN. ROOT_REGION
842 is the region number for the root EH region in FN. */
843
844static void
845fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
846{
847 unsigned i;
848 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
849 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
850 eh_region r;
851 eh_landing_pad lp;
852
853 gcc_assert (eh_array && lp_array);
854
855 gcc_assert (root_region >= 0);
856 fn->eh->region_tree = (*eh_array)[root_region];
857
858#define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
859#define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
860
861 /* Convert all the index numbers stored in pointer fields into
862 pointers to the corresponding slots in the EH region array. */
863 FOR_EACH_VEC_ELT (*eh_array, i, r)
864 {
865 /* The array may contain NULL regions. */
866 if (r == NULL)
867 continue;
868
869 gcc_assert (i == (unsigned) r->index);
870 FIXUP_EH_REGION (r->outer);
871 FIXUP_EH_REGION (r->inner);
872 FIXUP_EH_REGION (r->next_peer);
873 FIXUP_EH_LP (r->landing_pads);
874 }
875
876 /* Convert all the index numbers stored in pointer fields into
877 pointers to the corresponding slots in the EH landing pad array. */
878 FOR_EACH_VEC_ELT (*lp_array, i, lp)
879 {
880 /* The array may contain NULL landing pads. */
881 if (lp == NULL)
882 continue;
883
884 gcc_assert (i == (unsigned) lp->index);
885 FIXUP_EH_LP (lp->next_lp);
886 FIXUP_EH_REGION (lp->region);
887 }
888
889#undef FIXUP_EH_REGION
890#undef FIXUP_EH_LP
891}
892
893
894/* Initialize EH support. */
895
896void
897lto_init_eh (void)
898{
899 static bool eh_initialized_p = false;
900
901 if (eh_initialized_p)
902 return;
903
904 /* Contrary to most other FEs, we only initialize EH support when at
905 least one of the files in the set contains exception regions in
906 it. Since this happens much later than the call to init_eh in
907 lang_dependent_init, we have to set flag_exceptions and call
908 init_eh again to initialize the EH tables. */
909 flag_exceptions = 1;
910 init_eh ();
911
912 eh_initialized_p = true;
913}
914
915
916/* Read the exception table for FN from IB using the data descriptors
917 in DATA_IN. */
918
919static void
920input_eh_regions (class lto_input_block *ib, class data_in *data_in,
921 struct function *fn)
922{
923 HOST_WIDE_INT i, root_region, len;
924 enum LTO_tags tag;
925
926 tag = streamer_read_record_start (ib);
927 if (tag == LTO_null)
928 return;
929
930 lto_tag_check_range (actual: tag, tag1: LTO_eh_table, tag2: LTO_eh_table);
931
932 gcc_assert (fn->eh);
933
934 root_region = streamer_read_hwi (ib);
935 gcc_assert (root_region == (int) root_region);
936
937 /* Read the EH region array. */
938 len = streamer_read_hwi (ib);
939 gcc_assert (len == (int) len);
940 if (len > 0)
941 {
942 vec_safe_grow_cleared (v&: fn->eh->region_array, len, exact: true);
943 for (i = 0; i < len; i++)
944 {
945 eh_region r = input_eh_region (ib, data_in, ix: i);
946 (*fn->eh->region_array)[i] = r;
947 }
948 }
949
950 /* Read the landing pads. */
951 len = streamer_read_hwi (ib);
952 gcc_assert (len == (int) len);
953 if (len > 0)
954 {
955 vec_safe_grow_cleared (v&: fn->eh->lp_array, len, exact: true);
956 for (i = 0; i < len; i++)
957 {
958 eh_landing_pad lp = input_eh_lp (ib, data_in, ix: i);
959 (*fn->eh->lp_array)[i] = lp;
960 }
961 }
962
963 /* Read the runtime type data. */
964 len = streamer_read_hwi (ib);
965 gcc_assert (len == (int) len);
966 if (len > 0)
967 {
968 vec_safe_grow_cleared (v&: fn->eh->ttype_data, len, exact: true);
969 for (i = 0; i < len; i++)
970 {
971 tree ttype = stream_read_tree (ib, data_in);
972 (*fn->eh->ttype_data)[i] = ttype;
973 }
974 }
975
976 /* Read the table of action chains. */
977 len = streamer_read_hwi (ib);
978 gcc_assert (len == (int) len);
979 if (len > 0)
980 {
981 if (targetm.arm_eabi_unwinder)
982 {
983 vec_safe_grow_cleared (v&: fn->eh->ehspec_data.arm_eabi, len, exact: true);
984 for (i = 0; i < len; i++)
985 {
986 tree t = stream_read_tree (ib, data_in);
987 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
988 }
989 }
990 else
991 {
992 vec_safe_grow_cleared (v&: fn->eh->ehspec_data.other, len, exact: true);
993 for (i = 0; i < len; i++)
994 {
995 uchar c = streamer_read_uchar (ib);
996 (*fn->eh->ehspec_data.other)[i] = c;
997 }
998 }
999 }
1000
1001 /* Reconstruct the EH region tree by fixing up the peer/children
1002 pointers. */
1003 fixup_eh_region_pointers (fn, root_region);
1004
1005 tag = streamer_read_record_start (ib);
1006 lto_tag_check_range (actual: tag, tag1: LTO_null, tag2: LTO_null);
1007}
1008
1009
1010/* Make a new basic block with index INDEX in function FN. */
1011
1012static basic_block
1013make_new_block (struct function *fn, unsigned int index)
1014{
1015 basic_block bb = alloc_block ();
1016 bb->index = index;
1017 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
1018 n_basic_blocks_for_fn (fn)++;
1019 return bb;
1020}
1021
1022
1023/* Read the CFG for function FN from input block IB. */
1024
1025static void
1026input_cfg (class lto_input_block *ib, class data_in *data_in,
1027 struct function *fn)
1028{
1029 unsigned int bb_count;
1030 basic_block p_bb;
1031 unsigned int i;
1032 int index;
1033 bool full_profile = false;
1034
1035 init_empty_tree_cfg_for_function (fn);
1036
1037 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
1038 PROFILE_LAST);
1039
1040 bb_count = streamer_read_uhwi (ib);
1041
1042 last_basic_block_for_fn (fn) = bb_count;
1043 if (bb_count > basic_block_info_for_fn (fn)->length ())
1044 vec_safe_grow_cleared (basic_block_info_for_fn (fn), len: bb_count, exact: true);
1045
1046 if (bb_count > label_to_block_map_for_fn (fn)->length ())
1047 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), len: bb_count, exact: true);
1048
1049 index = streamer_read_hwi (ib);
1050 while (index != -1)
1051 {
1052 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
1053 unsigned int edge_count;
1054
1055 if (bb == NULL)
1056 bb = make_new_block (fn, index);
1057
1058 edge_count = streamer_read_uhwi (ib);
1059
1060 /* Connect up the CFG. */
1061 for (i = 0; i < edge_count; i++)
1062 {
1063 bitpack_d bp = streamer_read_bitpack (ib);
1064 unsigned int dest_index = bp_unpack_var_len_unsigned (&bp);
1065 unsigned int edge_flags = bp_unpack_var_len_unsigned (&bp);
1066 basic_block dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
1067
1068 if (dest == NULL)
1069 dest = make_new_block (fn, index: dest_index);
1070
1071 edge e = make_edge (bb, dest, edge_flags);
1072 data_in->location_cache.input_location_and_block (loc: &e->goto_locus,
1073 bp: &bp, ib, data_in);
1074 e->probability = profile_probability::stream_in (ib);
1075 if (!e->probability.initialized_p ())
1076 full_profile = false;
1077
1078 }
1079
1080 index = streamer_read_hwi (ib);
1081 }
1082
1083 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
1084 index = streamer_read_hwi (ib);
1085 while (index != -1)
1086 {
1087 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
1088 bb->prev_bb = p_bb;
1089 p_bb->next_bb = bb;
1090 p_bb = bb;
1091 index = streamer_read_hwi (ib);
1092 }
1093
1094 /* ??? The cfgloop interface is tied to cfun. */
1095 gcc_assert (cfun == fn);
1096
1097 /* Input the loop tree. */
1098 unsigned n_loops = streamer_read_uhwi (ib);
1099 if (n_loops == 0)
1100 return;
1101
1102 struct loops *loops = ggc_cleared_alloc<struct loops> ();
1103 init_loops_structure (fn, loops, n_loops);
1104 set_loops_for_fn (fn, loops);
1105
1106 /* Input each loop and associate it with its loop header so
1107 flow_loops_find can rebuild the loop tree. */
1108 for (unsigned i = 1; i < n_loops; ++i)
1109 {
1110 int header_index = streamer_read_hwi (ib);
1111 if (header_index == -1)
1112 {
1113 loops->larray->quick_push (NULL);
1114 continue;
1115 }
1116
1117 class loop *loop = alloc_loop ();
1118 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
1119 loop->header->loop_father = loop;
1120
1121 /* Read everything copy_loop_info copies. */
1122 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
1123 loop->any_upper_bound = streamer_read_hwi (ib);
1124 if (loop->any_upper_bound)
1125 loop->nb_iterations_upper_bound
1126 = bound_wide_int::from (x: streamer_read_widest_int (ib), sgn: SIGNED);
1127 loop->any_likely_upper_bound = streamer_read_hwi (ib);
1128 if (loop->any_likely_upper_bound)
1129 loop->nb_iterations_likely_upper_bound
1130 = bound_wide_int::from (x: streamer_read_widest_int (ib), sgn: SIGNED);
1131 loop->any_estimate = streamer_read_hwi (ib);
1132 if (loop->any_estimate)
1133 loop->nb_iterations_estimate
1134 = bound_wide_int::from (x: streamer_read_widest_int (ib), sgn: SIGNED);
1135
1136 /* Read OMP SIMD related info. */
1137 loop->safelen = streamer_read_hwi (ib);
1138 loop->unroll = streamer_read_hwi (ib);
1139 loop->owned_clique = streamer_read_hwi (ib);
1140 loop->dont_vectorize = streamer_read_hwi (ib);
1141 loop->force_vectorize = streamer_read_hwi (ib);
1142 loop->finite_p = streamer_read_hwi (ib);
1143 loop->simduid = stream_read_tree (ib, data_in);
1144
1145 place_new_loop (fn, loop);
1146
1147 /* flow_loops_find doesn't like loops not in the tree, hook them
1148 all as siblings of the tree root temporarily. */
1149 flow_loop_tree_node_add (loops->tree_root, loop);
1150 }
1151
1152 /* Rebuild the loop tree. */
1153 flow_loops_find (loops);
1154 cfun->cfg->full_profile = full_profile;
1155}
1156
1157
1158/* Read the SSA names array for function FN from DATA_IN using input
1159 block IB. */
1160
1161static void
1162input_ssa_names (class lto_input_block *ib, class data_in *data_in,
1163 struct function *fn)
1164{
1165 unsigned int i, size;
1166
1167 size = streamer_read_uhwi (ib);
1168 init_tree_ssa (fn, size);
1169 cfun->gimple_df->in_ssa_p = true;
1170 init_ssa_operands (fn);
1171
1172 i = streamer_read_uhwi (ib);
1173 while (i)
1174 {
1175 tree ssa_name, name;
1176 bool is_default_def;
1177
1178 /* Skip over the elements that had been freed. */
1179 while (SSANAMES (fn)->length () < i)
1180 SSANAMES (fn)->quick_push (NULL_TREE);
1181
1182 is_default_def = (streamer_read_uchar (ib) != 0);
1183 name = stream_read_tree (ib, data_in);
1184 ssa_name = make_ssa_name_fn (fn, name, NULL);
1185
1186 if (is_default_def)
1187 {
1188 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
1189 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
1190 }
1191
1192 i = streamer_read_uhwi (ib);
1193 }
1194}
1195
1196
1197/* Go through all NODE edges and fixup call_stmt pointers
1198 so they point to STMTS. */
1199
1200static void
1201fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
1202 struct function *fn)
1203{
1204#define STMT_UID_NOT_IN_RANGE(uid) \
1205 (gimple_stmt_max_uid (fn) < uid || uid == 0)
1206
1207 struct cgraph_edge *cedge;
1208 struct ipa_ref *ref = NULL;
1209 unsigned int i;
1210
1211 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
1212 {
1213 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
1214 fatal_error (input_location,
1215 "Cgraph edge statement index out of range");
1216 cedge->call_stmt = as_a <gcall *> (p: stmts[cedge->lto_stmt_uid - 1]);
1217 cedge->lto_stmt_uid = 0;
1218 if (!cedge->call_stmt)
1219 fatal_error (input_location,
1220 "Cgraph edge statement index not found");
1221 }
1222 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
1223 {
1224 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
1225 fatal_error (input_location,
1226 "Cgraph edge statement index out of range");
1227 cedge->call_stmt = as_a <gcall *> (p: stmts[cedge->lto_stmt_uid - 1]);
1228 cedge->lto_stmt_uid = 0;
1229 if (!cedge->call_stmt)
1230 fatal_error (input_location, "Cgraph edge statement index not found");
1231 }
1232 for (i = 0; node->iterate_reference (i, ref); i++)
1233 if (ref->lto_stmt_uid)
1234 {
1235 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
1236 fatal_error (input_location,
1237 "Reference statement index out of range");
1238 ref->stmt = stmts[ref->lto_stmt_uid - 1];
1239 ref->lto_stmt_uid = 0;
1240 if (!ref->stmt)
1241 fatal_error (input_location, "Reference statement index not found");
1242 }
1243}
1244
1245
1246/* Fixup call_stmt pointers in NODE and all clones. */
1247
1248static void
1249fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
1250{
1251 struct cgraph_node *node;
1252 struct function *fn;
1253
1254 while (orig->clone_of)
1255 orig = orig->clone_of;
1256 fn = DECL_STRUCT_FUNCTION (orig->decl);
1257
1258 if (!orig->thunk)
1259 fixup_call_stmt_edges_1 (node: orig, stmts, fn);
1260 if (orig->clones)
1261 for (node = orig->clones; node != orig;)
1262 {
1263 if (!node->thunk)
1264 fixup_call_stmt_edges_1 (node, stmts, fn);
1265 if (node->clones)
1266 node = node->clones;
1267 else if (node->next_sibling_clone)
1268 node = node->next_sibling_clone;
1269 else
1270 {
1271 while (node != orig && !node->next_sibling_clone)
1272 node = node->clone_of;
1273 if (node != orig)
1274 node = node->next_sibling_clone;
1275 }
1276 }
1277}
1278
1279
1280/* Input the base body of struct function FN from DATA_IN
1281 using input block IB. */
1282
1283static void
1284input_struct_function_base (struct function *fn, class data_in *data_in,
1285 class lto_input_block *ib)
1286{
1287 struct bitpack_d bp;
1288 int len;
1289
1290 /* Read the static chain and non-local goto save area. */
1291 fn->static_chain_decl = stream_read_tree (ib, data_in);
1292 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
1293
1294 /* Read all the local symbols. */
1295 len = streamer_read_hwi (ib);
1296 if (len > 0)
1297 {
1298 int i;
1299 vec_safe_grow_cleared (v&: fn->local_decls, len, exact: true);
1300 for (i = 0; i < len; i++)
1301 {
1302 tree t = stream_read_tree (ib, data_in);
1303 (*fn->local_decls)[i] = t;
1304 }
1305 }
1306
1307 /* Input the current IL state of the function. */
1308 fn->curr_properties = streamer_read_uhwi (ib);
1309
1310 /* Read all the attributes for FN. */
1311 bp = streamer_read_bitpack (ib);
1312 fn->is_thunk = bp_unpack_value (bp: &bp, nbits: 1);
1313 fn->has_local_explicit_reg_vars = bp_unpack_value (bp: &bp, nbits: 1);
1314 fn->returns_pcc_struct = bp_unpack_value (bp: &bp, nbits: 1);
1315 fn->returns_struct = bp_unpack_value (bp: &bp, nbits: 1);
1316 fn->can_throw_non_call_exceptions = bp_unpack_value (bp: &bp, nbits: 1);
1317 fn->can_delete_dead_exceptions = bp_unpack_value (bp: &bp, nbits: 1);
1318 fn->always_inline_functions_inlined = bp_unpack_value (bp: &bp, nbits: 1);
1319 fn->after_inlining = bp_unpack_value (bp: &bp, nbits: 1);
1320 fn->stdarg = bp_unpack_value (bp: &bp, nbits: 1);
1321 fn->has_nonlocal_label = bp_unpack_value (bp: &bp, nbits: 1);
1322 fn->has_forced_label_in_static = bp_unpack_value (bp: &bp, nbits: 1);
1323 fn->calls_alloca = bp_unpack_value (bp: &bp, nbits: 1);
1324 fn->calls_setjmp = bp_unpack_value (bp: &bp, nbits: 1);
1325 fn->calls_eh_return = bp_unpack_value (bp: &bp, nbits: 1);
1326 fn->has_force_vectorize_loops = bp_unpack_value (bp: &bp, nbits: 1);
1327 fn->has_simduid_loops = bp_unpack_value (bp: &bp, nbits: 1);
1328 fn->assume_function = bp_unpack_value (bp: &bp, nbits: 1);
1329 fn->va_list_fpr_size = bp_unpack_value (bp: &bp, nbits: 8);
1330 fn->va_list_gpr_size = bp_unpack_value (bp: &bp, nbits: 8);
1331 fn->last_clique = bp_unpack_value (bp: &bp, nbits: sizeof (short) * 8);
1332
1333 /* Input the function start and end loci. */
1334 stream_input_location (&fn->function_start_locus, &bp, data_in);
1335 stream_input_location (&fn->function_end_locus, &bp, data_in);
1336
1337 /* Restore the instance discriminators if present. */
1338 int instance_number = bp_unpack_value (bp: &bp, nbits: 1);
1339 if (instance_number)
1340 {
1341 instance_number = bp_unpack_value (bp: &bp, nbits: sizeof (int) * CHAR_BIT);
1342 maybe_create_decl_to_instance_map ()->put (k: fn->decl, v: instance_number);
1343 }
1344}
1345
1346/* Read a chain of tree nodes from input block IB. DATA_IN contains
1347 tables and descriptors for the file being read. */
1348
1349static tree
1350streamer_read_chain (class lto_input_block *ib, class data_in *data_in)
1351{
1352 tree first, prev, curr;
1353
1354 /* The chain is written as NULL terminated list of trees. */
1355 first = prev = NULL_TREE;
1356 do
1357 {
1358 curr = stream_read_tree (ib, data_in);
1359 if (prev)
1360 TREE_CHAIN (prev) = curr;
1361 else
1362 first = curr;
1363
1364 prev = curr;
1365 }
1366 while (curr);
1367
1368 return first;
1369}
1370
1371/* Read the body of function FN_DECL from DATA_IN using input block IB. */
1372
1373static void
1374input_function (tree fn_decl, class data_in *data_in,
1375 class lto_input_block *ib, class lto_input_block *ib_cfg,
1376 cgraph_node *node)
1377{
1378 struct function *fn;
1379 enum LTO_tags tag;
1380 gimple **stmts;
1381 basic_block bb;
1382
1383 tag = streamer_read_record_start (ib);
1384 lto_tag_check (actual: tag, expected: LTO_function);
1385
1386 /* Read decls for parameters and args. */
1387 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1388 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1389
1390 /* Read debug args if available. */
1391 unsigned n_debugargs = streamer_read_uhwi (ib);
1392 if (n_debugargs)
1393 {
1394 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1395 vec_safe_grow (v&: *debugargs, len: n_debugargs, exact: true);
1396 for (unsigned i = 0; i < n_debugargs; ++i)
1397 (**debugargs)[i] = stream_read_tree (ib, data_in);
1398 }
1399
1400 /* Read the tree of lexical scopes for the function. */
1401 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1402 unsigned block_leaf_count = streamer_read_uhwi (ib);
1403 while (block_leaf_count--)
1404 stream_read_tree (ib, data_in);
1405
1406 if (!streamer_read_uhwi (ib))
1407 return;
1408
1409 push_struct_function (fndecl: fn_decl);
1410 fn = DECL_STRUCT_FUNCTION (fn_decl);
1411
1412 gimple_register_cfg_hooks ();
1413
1414 input_struct_function_base (fn, data_in, ib);
1415 input_cfg (ib: ib_cfg, data_in, fn);
1416
1417 /* Read all the SSA names. */
1418 input_ssa_names (ib, data_in, fn);
1419
1420 /* Read the exception handling regions in the function. */
1421 input_eh_regions (ib, data_in, fn);
1422
1423 gcc_assert (DECL_INITIAL (fn_decl));
1424 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1425
1426 /* Read all the basic blocks. */
1427 tag = streamer_read_record_start (ib);
1428 while (tag)
1429 {
1430 input_bb (ib, tag, data_in, fn,
1431 node->count_materialization_scale);
1432 tag = streamer_read_record_start (ib);
1433 }
1434
1435 /* Finalize gimple_location/gimple_block of stmts and phis. */
1436 data_in->location_cache.apply_location_cache ();
1437
1438 /* Fix up the call statements that are mentioned in the callgraph
1439 edges. */
1440 set_gimple_stmt_max_uid (cfun, maxid: 0);
1441 FOR_ALL_BB_FN (bb, cfun)
1442 {
1443 gimple_stmt_iterator gsi;
1444 for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
1445 {
1446 gimple *stmt = gsi_stmt (i: gsi);
1447 gimple_set_uid (g: stmt, uid: inc_gimple_stmt_max_uid (cfun));
1448 }
1449 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
1450 {
1451 gimple *stmt = gsi_stmt (i: gsi);
1452 gimple_set_uid (g: stmt, uid: inc_gimple_stmt_max_uid (cfun));
1453 }
1454 }
1455 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1456 FOR_ALL_BB_FN (bb, cfun)
1457 {
1458 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1459 while (!gsi_end_p (i: bsi))
1460 {
1461 gimple *stmt = gsi_stmt (i: bsi);
1462 gsi_next (i: &bsi);
1463 stmts[gimple_uid (g: stmt)] = stmt;
1464 }
1465 bsi = gsi_start_bb (bb);
1466 while (!gsi_end_p (i: bsi))
1467 {
1468 gimple *stmt = gsi_stmt (i: bsi);
1469 bool remove = false;
1470 /* If we're recompiling LTO objects with debug stmts but
1471 we're not supposed to have debug stmts, remove them now.
1472 We can't remove them earlier because this would cause uid
1473 mismatches in fixups, but we can do it at this point, as
1474 long as debug stmts don't require fixups.
1475 Similarly remove all IFN_*SAN_* internal calls */
1476 if (!flag_wpa)
1477 {
1478 if (is_gimple_debug (gs: stmt)
1479 && (gimple_debug_nonbind_marker_p (s: stmt)
1480 ? !MAY_HAVE_DEBUG_MARKER_STMTS
1481 : !MAY_HAVE_DEBUG_BIND_STMTS))
1482 remove = true;
1483 /* In case the linemap overflows locations can be dropped
1484 to zero. Thus do not keep nonsensical inline entry markers
1485 we'd later ICE on. */
1486 tree block;
1487 if (gimple_debug_inline_entry_p (s: stmt)
1488 && (((block = gimple_block (g: stmt))
1489 && !inlined_function_outer_scope_p (block))
1490 || !debug_inline_points))
1491 remove = true;
1492 if (is_gimple_call (gs: stmt)
1493 && gimple_call_internal_p (gs: stmt))
1494 {
1495 bool replace = false;
1496 switch (gimple_call_internal_fn (gs: stmt))
1497 {
1498 case IFN_UBSAN_NULL:
1499 if ((flag_sanitize
1500 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1501 replace = true;
1502 break;
1503 case IFN_UBSAN_BOUNDS:
1504 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1505 replace = true;
1506 break;
1507 case IFN_UBSAN_VPTR:
1508 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1509 replace = true;
1510 break;
1511 case IFN_UBSAN_OBJECT_SIZE:
1512 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1513 replace = true;
1514 break;
1515 case IFN_UBSAN_PTR:
1516 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1517 replace = true;
1518 break;
1519 case IFN_ASAN_MARK:
1520 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1521 replace = true;
1522 break;
1523 case IFN_TSAN_FUNC_EXIT:
1524 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1525 replace = true;
1526 break;
1527 default:
1528 break;
1529 }
1530 if (replace)
1531 {
1532 gimple_call_set_internal_fn (call_stmt: as_a <gcall *> (p: stmt),
1533 fn: IFN_NOP);
1534 update_stmt (s: stmt);
1535 }
1536 }
1537 }
1538 if (remove)
1539 {
1540 gimple_stmt_iterator gsi = bsi;
1541 gsi_next (i: &bsi);
1542 unlink_stmt_vdef (stmt);
1543 release_defs (stmt);
1544 gsi_remove (&gsi, true);
1545 }
1546 else
1547 {
1548 gsi_next (i: &bsi);
1549 stmts[gimple_uid (g: stmt)] = stmt;
1550
1551 /* Remember that the input function has begin stmt
1552 markers, so that we know to expect them when emitting
1553 debug info. */
1554 if (!cfun->debug_nonbind_markers
1555 && gimple_debug_nonbind_marker_p (s: stmt))
1556 cfun->debug_nonbind_markers = true;
1557 }
1558 }
1559 }
1560
1561 /* Set the gimple body to the statement sequence in the entry
1562 basic block. FIXME lto, this is fairly hacky. The existence
1563 of a gimple body is used by the cgraph routines, but we should
1564 really use the presence of the CFG. */
1565 {
1566 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1567 gimple_set_body (fn_decl, bb_seq (bb: ei_edge (i: ei)->dest));
1568 }
1569
1570 update_max_bb_count ();
1571 fixup_call_stmt_edges (orig: node, stmts);
1572 execute_all_ipa_stmt_fixups (node, stmts);
1573
1574 free_dominance_info (CDI_DOMINATORS);
1575 free_dominance_info (CDI_POST_DOMINATORS);
1576 free (ptr: stmts);
1577 pop_cfun ();
1578}
1579
1580/* Read the body of function FN_DECL from DATA_IN using input block IB. */
1581
1582static void
1583input_constructor (tree var, class data_in *data_in,
1584 class lto_input_block *ib)
1585{
1586 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1587}
1588
1589
1590/* Read the body from DATA for function NODE and fill it in.
1591 FILE_DATA are the global decls and types. SECTION_TYPE is either
1592 LTO_section_function_body or LTO_section_static_initializer. If
1593 section type is LTO_section_function_body, FN must be the decl for
1594 that function. */
1595
1596static void
1597lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1598 const char *data, enum lto_section_type section_type)
1599{
1600 const struct lto_function_header *header;
1601 class data_in *data_in;
1602 int cfg_offset;
1603 int main_offset;
1604 int string_offset;
1605 tree fn_decl = node->decl;
1606
1607 header = (const struct lto_function_header *) data;
1608 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1609 {
1610 cfg_offset = sizeof (struct lto_function_header);
1611 main_offset = cfg_offset + header->cfg_size;
1612 string_offset = main_offset + header->main_size;
1613 }
1614 else
1615 {
1616 main_offset = sizeof (struct lto_function_header);
1617 string_offset = main_offset + header->main_size;
1618 }
1619
1620 data_in = lto_data_in_create (file_data, data + string_offset,
1621 header->string_size, vNULL);
1622
1623 if (section_type == LTO_section_function_body)
1624 {
1625 struct lto_in_decl_state *decl_state;
1626 unsigned from;
1627
1628 gcc_checking_assert (node);
1629
1630 /* Use the function's decl state. */
1631 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1632 gcc_assert (decl_state);
1633 file_data->current_decl_state = decl_state;
1634
1635
1636 /* Set up the struct function. */
1637 from = data_in->reader_cache->nodes.length ();
1638 lto_input_block ib_main (data + main_offset, header->main_size,
1639 file_data);
1640 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1641 {
1642 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1643 file_data);
1644 input_function (fn_decl, data_in, ib: &ib_main, ib_cfg: &ib_cfg,
1645 node: dyn_cast <cgraph_node *>(p: node));
1646 }
1647 else
1648 input_constructor (var: fn_decl, data_in, ib: &ib_main);
1649 data_in->location_cache.apply_location_cache ();
1650 /* And fixup types we streamed locally. */
1651 {
1652 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1653 unsigned len = cache->nodes.length ();
1654 unsigned i;
1655 for (i = len; i-- > from;)
1656 {
1657 tree t = streamer_tree_cache_get_tree (cache, ix: i);
1658 if (t == NULL_TREE)
1659 continue;
1660
1661 if (TYPE_P (t))
1662 {
1663 gcc_assert (TYPE_STRUCTURAL_EQUALITY_P (t));
1664 if (type_with_alias_set_p (t)
1665 && canonical_type_used_p (t))
1666 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1667 if (TYPE_MAIN_VARIANT (t) != t)
1668 {
1669 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1670 TYPE_NEXT_VARIANT (t)
1671 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1672 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1673 }
1674 }
1675 }
1676 }
1677
1678 /* Restore decl state */
1679 file_data->current_decl_state = file_data->global_decl_state;
1680 }
1681
1682 lto_data_in_delete (data_in);
1683}
1684
1685
1686/* Read the body of NODE using DATA. FILE_DATA holds the global
1687 decls and types. */
1688
1689void
1690lto_input_function_body (struct lto_file_decl_data *file_data,
1691 struct cgraph_node *node, const char *data)
1692{
1693 lto_read_body_or_constructor (file_data, node, data, section_type: LTO_section_function_body);
1694}
1695
1696/* Read the body of NODE using DATA. FILE_DATA holds the global
1697 decls and types. */
1698
1699void
1700lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1701 struct varpool_node *node, const char *data)
1702{
1703 lto_read_body_or_constructor (file_data, node, data, section_type: LTO_section_function_body);
1704}
1705
1706
1707/* Queue of acummulated decl -> DIE mappings. Similar to locations those
1708 are only applied to prevailing tree nodes during tree merging. */
1709vec<dref_entry> dref_queue;
1710
1711/* Read the physical representation of a tree node EXPR from
1712 input block IB using the per-file context in DATA_IN. */
1713
1714static void
1715lto_read_tree_1 (class lto_input_block *ib, class data_in *data_in, tree expr)
1716{
1717 /* Read all the bitfield values in EXPR. Note that for LTO, we
1718 only write language-independent bitfields, so no more unpacking is
1719 needed. */
1720 streamer_read_tree_bitfields (ib, data_in, expr);
1721
1722 /* Read all the pointer fields in EXPR. */
1723 streamer_read_tree_body (ib, data_in, expr);
1724
1725 /* Read any LTO-specific data not read by the tree streamer. Do not use
1726 stream_read_tree here since that flushes the dref_queue in mids of
1727 SCC reading. */
1728 if (DECL_P (expr)
1729 && TREE_CODE (expr) != FUNCTION_DECL
1730 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1731 DECL_INITIAL (expr)
1732 = lto_input_tree_1 (ib, data_in, streamer_read_record_start (ib), hash: 0);
1733
1734 /* Stream references to early generated DIEs. Keep in sync with the
1735 trees handled in dwarf2out_register_external_die. */
1736 if ((DECL_P (expr)
1737 && TREE_CODE (expr) != FIELD_DECL
1738 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1739 && TREE_CODE (expr) != TYPE_DECL)
1740 || TREE_CODE (expr) == BLOCK)
1741 {
1742 const char *str = streamer_read_string (data_in, ib);
1743 if (str)
1744 {
1745 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1746 dref_entry e = { .decl: expr, .sym: str, .off: off };
1747 dref_queue.safe_push (obj: e);
1748 }
1749 }
1750}
1751
1752/* Read the physical representation of a tree node with tag TAG from
1753 input block IB using the per-file context in DATA_IN. */
1754
1755static tree
1756lto_read_tree (class lto_input_block *ib, class data_in *data_in,
1757 enum LTO_tags tag, hashval_t hash)
1758{
1759 /* Instantiate a new tree node. */
1760 tree result = streamer_alloc_tree (ib, data_in, tag);
1761
1762 /* Enter RESULT in the reader cache. This will make RESULT
1763 available so that circular references in the rest of the tree
1764 structure can be resolved in subsequent calls to stream_read_tree. */
1765 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1766
1767 lto_read_tree_1 (ib, data_in, expr: result);
1768
1769 return result;
1770}
1771
1772
1773/* Populate the reader cache with trees materialized from the SCC
1774 following in the IB, DATA_IN stream.
1775 If SHARED_SCC is true we input LTO_tree_scc. */
1776
1777hashval_t
1778lto_input_scc (class lto_input_block *ib, class data_in *data_in,
1779 unsigned *len, unsigned *entry_len, bool shared_scc)
1780{
1781 unsigned size = streamer_read_uhwi (ib);
1782 hashval_t scc_hash = 0;
1783 unsigned scc_entry_len = 1;
1784
1785 if (shared_scc)
1786 {
1787 if (size & 1)
1788 scc_entry_len = streamer_read_uhwi (ib);
1789 size /= 2;
1790 scc_hash = streamer_read_uhwi (ib);
1791 }
1792
1793 if (size == 1)
1794 {
1795 enum LTO_tags tag = streamer_read_record_start (ib);
1796 lto_input_tree_1 (ib, data_in, tag, hash: scc_hash);
1797 }
1798 else
1799 {
1800 unsigned int first = data_in->reader_cache->nodes.length ();
1801 tree result;
1802
1803 /* Materialize size trees by reading their headers. */
1804 for (unsigned i = 0; i < size; ++i)
1805 {
1806 enum LTO_tags tag = streamer_read_record_start (ib);
1807 if (tag == LTO_null
1808 || tag == LTO_global_stream_ref
1809 || tag == LTO_tree_pickle_reference
1810 || tag == LTO_integer_cst
1811 || tag == LTO_tree_scc
1812 || tag == LTO_trees)
1813 gcc_unreachable ();
1814
1815 result = streamer_alloc_tree (ib, data_in, tag);
1816 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1817 }
1818
1819 /* Read the tree bitpacks and references. */
1820 for (unsigned i = 0; i < size; ++i)
1821 {
1822 result = streamer_tree_cache_get_tree (cache: data_in->reader_cache,
1823 ix: first + i);
1824 lto_read_tree_1 (ib, data_in, expr: result);
1825 }
1826 }
1827
1828 *len = size;
1829 *entry_len = scc_entry_len;
1830 return scc_hash;
1831}
1832
1833/* Read reference to tree from IB and DATA_IN.
1834 This is used for streaming tree bodies where we know that
1835 the tree is already in cache or is indexable and
1836 must be matched with stream_write_tree_ref. */
1837
1838tree
1839stream_read_tree_ref (lto_input_block *ib, data_in *data_in)
1840{
1841 int ix = streamer_read_hwi (ib);
1842 if (!ix)
1843 return NULL_TREE;
1844 if (ix > 0)
1845 return streamer_tree_cache_get_tree (cache: data_in->reader_cache, ix: ix - 1);
1846
1847 ix = -ix - 1;
1848 int id = ix & 1;
1849 ix /= 2;
1850
1851 tree ret;
1852 if (!id)
1853 ret = (*data_in->file_data->current_decl_state
1854 ->streams[LTO_DECL_STREAM])[ix];
1855 else
1856 ret = (*SSANAMES (cfun))[ix];
1857 return ret;
1858}
1859
1860/* Read a tree from input block IB using the per-file context in
1861 DATA_IN. This context is used, for example, to resolve references
1862 to previously read nodes. */
1863
1864tree
1865lto_input_tree_1 (class lto_input_block *ib, class data_in *data_in,
1866 enum LTO_tags tag, hashval_t hash)
1867{
1868 tree result;
1869
1870 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1871
1872 if (tag == LTO_null)
1873 result = NULL_TREE;
1874 else if (tag == LTO_global_stream_ref || tag == LTO_ssa_name_ref)
1875 {
1876 /* If TAG is a reference to an indexable tree, the next value
1877 in IB is the index into the table where we expect to find
1878 that tree. */
1879 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1880 }
1881 else if (tag == LTO_tree_pickle_reference)
1882 {
1883 /* If TAG is a reference to a previously read tree, look it up in
1884 the reader cache. */
1885 result = streamer_get_pickled_tree (ib, data_in);
1886 }
1887 else if (tag == LTO_integer_cst)
1888 {
1889 /* For shared integer constants in singletons we can use the
1890 existing tree integer constant merging code. */
1891 tree type = stream_read_tree_ref (ib, data_in);
1892 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1893 unsigned HOST_WIDE_INT i;
1894 HOST_WIDE_INT abuf[WIDE_INT_MAX_INL_ELTS], *a = abuf;
1895
1896 if (UNLIKELY (len > WIDE_INT_MAX_INL_ELTS))
1897 a = XALLOCAVEC (HOST_WIDE_INT, len);
1898 for (i = 0; i < len; i++)
1899 a[i] = streamer_read_hwi (ib);
1900 gcc_assert (TYPE_PRECISION (type) <= WIDE_INT_MAX_PRECISION);
1901 result
1902 = wide_int_to_tree (type,
1903 cst: wide_int::from_array (val: a, len,
1904 TYPE_PRECISION (type)));
1905 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1906 }
1907 else if (tag == LTO_tree_scc || tag == LTO_trees)
1908 gcc_unreachable ();
1909 else
1910 {
1911 /* Otherwise, materialize a new node from IB. */
1912 result = lto_read_tree (ib, data_in, tag, hash);
1913 }
1914
1915 return result;
1916}
1917
1918tree
1919lto_input_tree (class lto_input_block *ib, class data_in *data_in)
1920{
1921 enum LTO_tags tag;
1922
1923 /* Input pickled trees needed to stream in the reference. */
1924 while ((tag = streamer_read_record_start (ib)) == LTO_trees)
1925 {
1926 unsigned len, entry_len;
1927 lto_input_scc (ib, data_in, len: &len, entry_len: &entry_len, shared_scc: false);
1928
1929 /* Register DECLs with the debuginfo machinery. */
1930 while (!dref_queue.is_empty ())
1931 {
1932 dref_entry e = dref_queue.pop ();
1933 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1934 }
1935 }
1936 tree t = lto_input_tree_1 (ib, data_in, tag, hash: 0);
1937
1938 if (!dref_queue.is_empty ())
1939 {
1940 dref_entry e = dref_queue.pop ();
1941 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1942 gcc_checking_assert (dref_queue.is_empty ());
1943 }
1944 return t;
1945}
1946
1947
1948/* Input toplevel asms. */
1949
1950void
1951lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1952{
1953 size_t len;
1954 const char *data
1955 = lto_get_summary_section_data (file_data, LTO_section_asm, &len);
1956 const struct lto_simple_header_with_strings *header
1957 = (const struct lto_simple_header_with_strings *) data;
1958 int string_offset;
1959 class data_in *data_in;
1960 tree str;
1961
1962 if (! data)
1963 return;
1964
1965 string_offset = sizeof (*header) + header->main_size;
1966
1967 lto_input_block ib (data + sizeof (*header), header->main_size,
1968 file_data);
1969
1970 data_in = lto_data_in_create (file_data, data + string_offset,
1971 header->string_size, vNULL);
1972
1973 while ((str = streamer_read_string_cst (data_in, &ib)))
1974 {
1975 asm_node *node = symtab->finalize_toplevel_asm (asm_str: str);
1976 node->order = streamer_read_hwi (&ib) + order_base;
1977 if (node->order >= symtab->order)
1978 symtab->order = node->order + 1;
1979 }
1980
1981 lto_data_in_delete (data_in);
1982
1983 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1984}
1985
1986
1987/* Input mode table. */
1988
1989void
1990lto_input_mode_table (struct lto_file_decl_data *file_data)
1991{
1992 size_t len;
1993 const char *data
1994 = lto_get_summary_section_data (file_data, LTO_section_mode_table, &len);
1995 if (! data)
1996 internal_error ("cannot read LTO mode table from %s",
1997 file_data->file_name);
1998
1999 const struct lto_simple_header_with_strings *header
2000 = (const struct lto_simple_header_with_strings *) data;
2001 int string_offset;
2002 class data_in *data_in;
2003 string_offset = sizeof (*header) + header->main_size;
2004
2005 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
2006 data_in = lto_data_in_create (file_data, data + string_offset,
2007 header->string_size, vNULL);
2008 bitpack_d bp = streamer_read_bitpack (ib: &ib);
2009
2010 unsigned mode_bits = bp_unpack_value (bp: &bp, nbits: 5);
2011 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (c: 1 << mode_bits);
2012
2013 file_data->mode_table = table;
2014 file_data->mode_bits = mode_bits;
2015
2016 table[VOIDmode] = VOIDmode;
2017 table[BLKmode] = BLKmode;
2018 unsigned int m;
2019 while ((m = bp_unpack_value (bp: &bp, nbits: mode_bits)) != VOIDmode)
2020 {
2021 enum mode_class mclass
2022 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
2023 poly_uint16 size = bp_unpack_poly_value (bp: &bp, nbits: 16);
2024 poly_uint16 prec = bp_unpack_poly_value (bp: &bp, nbits: 16);
2025 machine_mode inner = (machine_mode) bp_unpack_value (bp: &bp, nbits: mode_bits);
2026 poly_uint16 nunits = bp_unpack_poly_value (bp: &bp, nbits: 16);
2027 unsigned int ibit = 0, fbit = 0;
2028 unsigned int real_fmt_len = 0;
2029 const char *real_fmt_name = NULL;
2030 switch (mclass)
2031 {
2032 case MODE_FRACT:
2033 case MODE_UFRACT:
2034 case MODE_ACCUM:
2035 case MODE_UACCUM:
2036 ibit = bp_unpack_value (bp: &bp, nbits: 8);
2037 fbit = bp_unpack_value (bp: &bp, nbits: 8);
2038 break;
2039 case MODE_FLOAT:
2040 case MODE_DECIMAL_FLOAT:
2041 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
2042 &real_fmt_len);
2043 break;
2044 default:
2045 break;
2046 }
2047 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
2048 if not found, fallback to all modes. */
2049 int pass;
2050 for (pass = 0; pass < 2; pass++)
2051 for (machine_mode mr = pass ? VOIDmode
2052 : GET_CLASS_NARROWEST_MODE (mclass);
2053 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
2054 pass ? mr = (machine_mode) (mr + 1)
2055 : mr = GET_MODE_WIDER_MODE (m: mr).else_void ())
2056 if (GET_MODE_CLASS (mr) != mclass
2057 || maybe_ne (a: GET_MODE_SIZE (mode: mr), b: size)
2058 || maybe_ne (a: GET_MODE_PRECISION (mode: mr), b: prec)
2059 || (inner == m
2060 ? GET_MODE_INNER (mr) != mr
2061 : GET_MODE_INNER (mr) != table[(int) inner])
2062 || GET_MODE_IBIT (mr) != ibit
2063 || GET_MODE_FBIT (mr) != fbit
2064 || maybe_ne (a: GET_MODE_NUNITS (mode: mr), b: nunits))
2065 continue;
2066 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
2067 && strcmp (REAL_MODE_FORMAT (mr)->name, s2: real_fmt_name) != 0)
2068 continue;
2069 else
2070 {
2071 table[m] = mr;
2072 pass = 2;
2073 break;
2074 }
2075 unsigned int mname_len;
2076 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
2077 if (pass == 2)
2078 {
2079 switch (mclass)
2080 {
2081 case MODE_VECTOR_BOOL:
2082 case MODE_VECTOR_INT:
2083 case MODE_VECTOR_FLOAT:
2084 case MODE_VECTOR_FRACT:
2085 case MODE_VECTOR_UFRACT:
2086 case MODE_VECTOR_ACCUM:
2087 case MODE_VECTOR_UACCUM:
2088 /* For unsupported vector modes just use BLKmode,
2089 if the scalar mode is supported. */
2090 if (table[(int) inner] != VOIDmode)
2091 {
2092 table[m] = BLKmode;
2093 break;
2094 }
2095 /* FALLTHRU */
2096 default:
2097 /* This is only used for offloading-target compilations and
2098 is a user-facing error. Give a better error message for
2099 the common modes; see also mode-classes.def. */
2100 if (mclass == MODE_FLOAT)
2101 fatal_error (UNKNOWN_LOCATION,
2102 "%s - %u-bit-precision floating-point numbers "
2103 "unsupported (mode %qs)", TARGET_MACHINE,
2104 prec.to_constant (), mname);
2105 else if (mclass == MODE_DECIMAL_FLOAT)
2106 fatal_error (UNKNOWN_LOCATION,
2107 "%s - %u-bit-precision decimal floating-point "
2108 "numbers unsupported (mode %qs)", TARGET_MACHINE,
2109 prec.to_constant (), mname);
2110 else if (mclass == MODE_COMPLEX_FLOAT)
2111 fatal_error (UNKNOWN_LOCATION,
2112 "%s - %u-bit-precision complex floating-point "
2113 "numbers unsupported (mode %qs)", TARGET_MACHINE,
2114 prec.to_constant (), mname);
2115 else if (mclass == MODE_INT)
2116 fatal_error (UNKNOWN_LOCATION,
2117 "%s - %u-bit integer numbers unsupported (mode "
2118 "%qs)", TARGET_MACHINE, prec.to_constant (), mname);
2119 else
2120 fatal_error (UNKNOWN_LOCATION, "%s - unsupported mode %qs",
2121 TARGET_MACHINE, mname);
2122 break;
2123 }
2124 }
2125 }
2126 lto_data_in_delete (data_in);
2127
2128 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
2129}
2130
2131
2132/* Initialization for the LTO reader. */
2133
2134void
2135lto_reader_init (void)
2136{
2137 lto_streamer_init ();
2138 file_name_hash_table
2139 = new hash_table<string_slot_hasher> (37);
2140 string_slot_allocator = new object_allocator <struct string_slot>
2141 ("line map file name hash");
2142 gcc_obstack_init (&file_name_obstack);
2143}
2144
2145/* Free hash table used to stream in location file names. */
2146
2147void
2148lto_free_file_name_hash (void)
2149{
2150 delete file_name_hash_table;
2151 file_name_hash_table = NULL;
2152 delete string_slot_allocator;
2153 string_slot_allocator = NULL;
2154 delete path_name_pair_hash_table;
2155 path_name_pair_hash_table = NULL;
2156 delete string_pair_map_allocator;
2157 string_pair_map_allocator = NULL;
2158 /* file_name_obstack must stay allocated since it is referred to by
2159 line map table. */
2160}
2161
2162
2163/* Create a new data_in object for FILE_DATA. STRINGS is the string
2164 table to use with LEN strings. RESOLUTIONS is the vector of linker
2165 resolutions (NULL if not using a linker plugin). */
2166
2167class data_in *
2168lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
2169 unsigned len,
2170 vec<ld_plugin_symbol_resolution_t> resolutions)
2171{
2172 class data_in *data_in = new (class data_in);
2173 data_in->file_data = file_data;
2174 data_in->strings = strings;
2175 data_in->strings_len = len;
2176 data_in->globals_resolution = resolutions;
2177 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
2178 return data_in;
2179}
2180
2181
2182/* Remove DATA_IN. */
2183
2184void
2185lto_data_in_delete (class data_in *data_in)
2186{
2187 data_in->globals_resolution.release ();
2188 streamer_tree_cache_delete (data_in->reader_cache);
2189 delete data_in;
2190}
2191

source code of gcc/lto-streamer-in.cc