1/* Miscellaneous utilities for tree streaming. Things that are used
2 in both input and output are here.
3
4 Copyright (C) 2011-2023 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@google.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "backend.h"
27#include "tree.h"
28#include "gimple.h"
29#include "tree-streamer.h"
30#include "cgraph.h"
31
32/* Table indexed by machine_mode, used for 2 different purposes.
33 During streaming out we record there non-zero value for all modes
34 that were streamed out.
35 During streaming in, we translate the on the disk mode using this
36 table. For normal LTO it is set to identity, for ACCEL_COMPILER
37 depending on the mode_table content. */
38unsigned char streamer_mode_table[MAX_MACHINE_MODE];
39
40/* Check that all the TS_* structures handled by the streamer_write_* and
41 streamer_read_* routines are exactly ALL the structures defined in
42 treestruct.def. */
43
44void
45streamer_check_handled_ts_structures (void)
46{
47 bool handled_p[LAST_TS_ENUM];
48 unsigned i;
49
50 memset (s: &handled_p, c: 0, n: sizeof (handled_p));
51
52 /* These are the TS_* structures that are either handled or
53 explicitly ignored by the streamer routines. */
54 handled_p[TS_BASE] = true;
55 handled_p[TS_TYPED] = true;
56 handled_p[TS_COMMON] = true;
57 handled_p[TS_INT_CST] = true;
58 handled_p[TS_POLY_INT_CST] = true;
59 handled_p[TS_REAL_CST] = true;
60 handled_p[TS_FIXED_CST] = true;
61 handled_p[TS_VECTOR] = true;
62 handled_p[TS_STRING] = true;
63 handled_p[TS_COMPLEX] = true;
64 handled_p[TS_IDENTIFIER] = true;
65 handled_p[TS_DECL_MINIMAL] = true;
66 handled_p[TS_DECL_COMMON] = true;
67 handled_p[TS_DECL_WRTL] = true;
68 handled_p[TS_DECL_NON_COMMON] = true;
69 handled_p[TS_DECL_WITH_VIS] = true;
70 handled_p[TS_FIELD_DECL] = true;
71 handled_p[TS_VAR_DECL] = true;
72 handled_p[TS_PARM_DECL] = true;
73 handled_p[TS_LABEL_DECL] = true;
74 handled_p[TS_RESULT_DECL] = true;
75 handled_p[TS_CONST_DECL] = true;
76 handled_p[TS_TYPE_DECL] = true;
77 handled_p[TS_FUNCTION_DECL] = true;
78 handled_p[TS_TYPE_COMMON] = true;
79 handled_p[TS_TYPE_WITH_LANG_SPECIFIC] = true;
80 handled_p[TS_TYPE_NON_COMMON] = true;
81 handled_p[TS_LIST] = true;
82 handled_p[TS_VEC] = true;
83 handled_p[TS_EXP] = true;
84 handled_p[TS_SSA_NAME] = true;
85 handled_p[TS_BLOCK] = true;
86 handled_p[TS_BINFO] = true;
87 handled_p[TS_STATEMENT_LIST] = true;
88 handled_p[TS_CONSTRUCTOR] = true;
89 handled_p[TS_OMP_CLAUSE] = true;
90 handled_p[TS_OPTIMIZATION] = true;
91 handled_p[TS_TARGET_OPTION] = true;
92 handled_p[TS_TRANSLATION_UNIT_DECL] = true;
93
94 /* Anything not marked above will trigger the following assertion.
95 If this assertion triggers, it means that there is a new TS_*
96 structure that should be handled by the streamer. */
97 for (i = 0; i < LAST_TS_ENUM; i++)
98 gcc_assert (handled_p[i]);
99}
100
101
102/* Helper for streamer_tree_cache_insert_1. Add T to CACHE->NODES at
103 slot IX. */
104
105static void
106streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache,
107 unsigned ix, tree t, hashval_t hash)
108{
109 /* We're either replacing an old element or appending consecutively. */
110 if (cache->nodes.exists ())
111 {
112 if (cache->nodes.length () == ix)
113 cache->nodes.safe_push (obj: t);
114 else
115 cache->nodes[ix] = t;
116 }
117 if (cache->hashes.exists ())
118 {
119 if (cache->hashes.length () == ix)
120 cache->hashes.safe_push (obj: hash);
121 else
122 cache->hashes[ix] = hash;
123 }
124}
125
126
127/* Helper for streamer_tree_cache_insert and streamer_tree_cache_insert_at.
128 CACHE, T, and IX_P are as in streamer_tree_cache_insert.
129
130 If INSERT_AT_NEXT_SLOT_P is true, T is inserted at the next available
131 slot in the cache. Otherwise, T is inserted at the position indicated
132 in *IX_P.
133
134 If T already existed in CACHE, return true. Otherwise,
135 return false. */
136
137static bool
138streamer_tree_cache_insert_1 (struct streamer_tree_cache_d *cache,
139 tree t, hashval_t hash, unsigned *ix_p,
140 bool insert_at_next_slot_p)
141{
142 bool existed_p;
143
144 gcc_assert (t);
145
146 unsigned int &ix = cache->node_map->get_or_insert (k: t, existed: &existed_p);
147 if (!existed_p)
148 {
149 /* Determine the next slot to use in the cache. */
150 if (insert_at_next_slot_p)
151 ix = cache->next_idx++;
152 else
153 ix = *ix_p;
154
155 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
156 }
157 else
158 {
159 if (!insert_at_next_slot_p && ix != *ix_p)
160 {
161 /* If the caller wants to insert T at a specific slot
162 location, and ENTRY->TO does not match *IX_P, add T to
163 the requested location slot. */
164 ix = *ix_p;
165 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
166 }
167 }
168
169 if (ix_p)
170 *ix_p = ix;
171
172 return existed_p;
173}
174
175
176/* Insert tree node T in CACHE. If T already existed in the cache
177 return true. Otherwise, return false.
178
179 If IX_P is non-null, update it with the index into the cache where
180 T has been stored. */
181
182bool
183streamer_tree_cache_insert (struct streamer_tree_cache_d *cache, tree t,
184 hashval_t hash, unsigned *ix_p)
185{
186 return streamer_tree_cache_insert_1 (cache, t, hash, ix_p, insert_at_next_slot_p: true);
187}
188
189
190/* Replace the tree node with T in CACHE at slot IX. */
191
192void
193streamer_tree_cache_replace_tree (struct streamer_tree_cache_d *cache,
194 tree t, unsigned ix)
195{
196 hashval_t hash = 0;
197 if (cache->hashes.exists ())
198 hash = streamer_tree_cache_get_hash (cache, ix);
199 if (!cache->node_map)
200 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
201 else
202 streamer_tree_cache_insert_1 (cache, t, hash, ix_p: &ix, insert_at_next_slot_p: false);
203}
204
205
206/* Appends tree node T to CACHE, even if T already existed in it. */
207
208void
209streamer_tree_cache_append (struct streamer_tree_cache_d *cache,
210 tree t, hashval_t hash)
211{
212 unsigned ix = cache->next_idx++;
213 if (!cache->node_map)
214 streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
215 else
216 streamer_tree_cache_insert_1 (cache, t, hash, ix_p: &ix, insert_at_next_slot_p: false);
217}
218
219/* Return true if tree node T exists in CACHE, otherwise false. If IX_P is
220 not NULL, write to *IX_P the index into the cache where T is stored
221 ((unsigned)-1 if T is not found). */
222
223bool
224streamer_tree_cache_lookup (struct streamer_tree_cache_d *cache, tree t,
225 unsigned *ix_p)
226{
227 unsigned *slot;
228 bool retval;
229 unsigned ix;
230
231 gcc_assert (t);
232
233 slot = cache->node_map->get (k: t);
234 if (slot == NULL)
235 {
236 retval = false;
237 ix = -1;
238 }
239 else
240 {
241 retval = true;
242 ix = *slot;
243 }
244
245 if (ix_p)
246 *ix_p = ix;
247
248 return retval;
249}
250
251
252/* Verify that NODE is in CACHE. */
253
254static void
255verify_common_node_recorded (struct streamer_tree_cache_d *cache, tree node)
256{
257 /* Restrict this to flag_checking only because in general violating it is
258 harmless plus we never know what happens on all targets/frontend/flag(!)
259 combinations. */
260 if (!flag_checking)
261 return;
262
263 if (cache->node_map)
264 gcc_assert (streamer_tree_cache_lookup (cache, node, NULL));
265 else
266 {
267 bool found = false;
268 gcc_assert (cache->nodes.exists ());
269 /* Linear search... */
270 for (unsigned i = 0; !found && i < cache->nodes.length (); ++i)
271 if (cache->nodes[i] == node)
272 found = true;
273 gcc_assert (found);
274 }
275}
276
277
278/* Record NODE in CACHE. */
279
280static void
281record_common_node (struct streamer_tree_cache_d *cache, tree node)
282{
283 /* If we recursively end up at nodes we do not want to preload simply don't.
284 ??? We'd want to verify that this doesn't happen, or alternatively
285 do not recurse at all. */
286 if (node == char_type_node)
287 return;
288
289 gcc_checking_assert (node != boolean_type_node
290 && node != boolean_true_node
291 && node != boolean_false_node);
292
293 /* We have to make sure to fill exactly the same number of
294 elements for all frontends. That can include NULL trees.
295 As our hash table can't deal with zero entries we'll simply stream
296 a random other tree. A NULL tree never will be looked up so it
297 doesn't matter which tree we replace it with, just to be sure
298 use error_mark_node. */
299 if (!node)
300 node = error_mark_node;
301
302 /* This hash needs to be equal for all frontend and lto1 invocations. So
303 just use the position in the cache as hash value.
304 Small integers are used by hash_tree to record positions within scc
305 hash. Values are not in same range. */
306 streamer_tree_cache_append (cache, t: node, hash: cache->next_idx + 0xc001);
307
308 switch (TREE_CODE (node))
309 {
310 case ERROR_MARK:
311 case FIELD_DECL:
312 case FIXED_POINT_TYPE:
313 case IDENTIFIER_NODE:
314 case INTEGER_CST:
315 case INTEGER_TYPE:
316 case REAL_TYPE:
317 case TREE_LIST:
318 case VOID_CST:
319 case VOID_TYPE:
320 case OPAQUE_TYPE:
321 /* No recursive trees. */
322 break;
323 case ARRAY_TYPE:
324 case POINTER_TYPE:
325 case REFERENCE_TYPE:
326 record_common_node (cache, TREE_TYPE (node));
327 break;
328 case COMPLEX_TYPE:
329 /* Verify that a complex type's component type (node_type) has been
330 handled already (and we thus don't need to recurse here). */
331 verify_common_node_recorded (cache, TREE_TYPE (node));
332 break;
333 case RECORD_TYPE:
334 /* The FIELD_DECLs of structures should be shared, so that every
335 COMPONENT_REF uses the same tree node when referencing a field.
336 Pointer equality between FIELD_DECLs is used by the alias
337 machinery to compute overlapping component references (see
338 nonoverlapping_component_refs_p and
339 nonoverlapping_component_refs_of_decl_p). */
340 for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f))
341 record_common_node (cache, node: f);
342 break;
343 default:
344 /* Unexpected tree code. */
345 gcc_unreachable ();
346 }
347}
348
349
350/* Preload common nodes into CACHE and make sure they are merged
351 properly according to the gimple type table. */
352
353static void
354preload_common_nodes (struct streamer_tree_cache_d *cache)
355{
356 unsigned i;
357
358 for (i = 0; i < itk_none; i++)
359 /* Skip itk_char. char_type_node is dependent on -f[un]signed-char. */
360 if (i != itk_char)
361 record_common_node (cache, node: integer_types[i]);
362
363 for (i = 0; i < stk_type_kind_last; i++)
364 record_common_node (cache, node: sizetype_tab[i]);
365
366 for (i = 0; i < TI_MAX; i++)
367 /* Skip boolean type and constants, they are frontend dependent. */
368 if (i != TI_BOOLEAN_TYPE
369 && i != TI_BOOLEAN_FALSE
370 && i != TI_BOOLEAN_TRUE
371 /* MAIN_IDENTIFIER is not always initialized by Fortran FE. */
372 && i != TI_MAIN_IDENTIFIER
373 /* PID_TYPE is initialized only by C family front-ends. */
374 && i != TI_PID_TYPE
375 /* Skip optimization and target option nodes; they depend on flags. */
376 && i != TI_OPTIMIZATION_DEFAULT
377 && i != TI_OPTIMIZATION_CURRENT
378 && i != TI_TARGET_OPTION_DEFAULT
379 && i != TI_TARGET_OPTION_CURRENT
380 && i != TI_CURRENT_TARGET_PRAGMA
381 && i != TI_CURRENT_OPTIMIZE_PRAGMA
382 /* SCEV state shouldn't reach the IL. */
383 && i != TI_CHREC_DONT_KNOW
384 && i != TI_CHREC_KNOWN
385 /* Skip va_list* related nodes if offloading. For native LTO
386 we want them to be merged for the stdarg pass, for offloading
387 they might not be identical between host and offloading target. */
388 && (!lto_stream_offload_p
389 || (i != TI_VA_LIST_TYPE
390 && i != TI_VA_LIST_GPR_COUNTER_FIELD
391 && i != TI_VA_LIST_FPR_COUNTER_FIELD)))
392 record_common_node (cache, node: global_trees[i]);
393}
394
395
396/* Create a cache of pickled nodes. */
397
398struct streamer_tree_cache_d *
399streamer_tree_cache_create (bool with_hashes, bool with_map, bool with_vec)
400{
401 struct streamer_tree_cache_d *cache;
402
403 cache = XCNEW (struct streamer_tree_cache_d);
404
405 if (with_map)
406 cache->node_map = new hash_map<tree, unsigned> (251);
407 cache->next_idx = 0;
408 if (with_vec)
409 cache->nodes.create (nelems: 165);
410 if (with_hashes)
411 cache->hashes.create (nelems: 165);
412
413 /* Load all the well-known tree nodes that are always created by
414 the compiler on startup. This prevents writing them out
415 unnecessarily. */
416 preload_common_nodes (cache);
417
418 return cache;
419}
420
421
422/* Delete the streamer cache C. */
423
424void
425streamer_tree_cache_delete (struct streamer_tree_cache_d *c)
426{
427 if (c == NULL)
428 return;
429
430 delete c->node_map;
431 c->node_map = NULL;
432 c->nodes.release ();
433 c->hashes.release ();
434 free (ptr: c);
435}
436

source code of gcc/tree-streamer.cc