1/* LTO partitioning logic routines.
2 Copyright (C) 2009-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "target.h"
24#include "function.h"
25#include "basic-block.h"
26#include "tree.h"
27#include "gimple.h"
28#include "alloc-pool.h"
29#include "stringpool.h"
30#include "cgraph.h"
31#include "lto-streamer.h"
32#include "symbol-summary.h"
33#include "tree-vrp.h"
34#include "ipa-prop.h"
35#include "ipa-fnsummary.h"
36#include "lto-partition.h"
37#include "sreal.h"
38
39vec<ltrans_partition> ltrans_partitions;
40
41static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
42
43
44/* Helper for qsort; compare partitions and return one with smaller order. */
45
46static int
47cmp_partitions_order (const void *a, const void *b)
48{
49 const struct ltrans_partition_def *pa
50 = *(struct ltrans_partition_def *const *)a;
51 const struct ltrans_partition_def *pb
52 = *(struct ltrans_partition_def *const *)b;
53 int ordera = -1, orderb = -1;
54
55 if (lto_symtab_encoder_size (encoder: pa->encoder))
56 ordera = lto_symtab_encoder_deref (encoder: pa->encoder, ref: 0)->order;
57 if (lto_symtab_encoder_size (encoder: pb->encoder))
58 orderb = lto_symtab_encoder_deref (encoder: pb->encoder, ref: 0)->order;
59 return orderb - ordera;
60}
61
62/* Create new partition with name NAME. */
63
64static ltrans_partition
65new_partition (const char *name)
66{
67 ltrans_partition part = XCNEW (struct ltrans_partition_def);
68 part->encoder = lto_symtab_encoder_new (false);
69 part->name = name;
70 part->insns = 0;
71 part->symbols = 0;
72 ltrans_partitions.safe_push (obj: part);
73 return part;
74}
75
76/* Free memory used by ltrans datastructures. */
77
78void
79free_ltrans_partitions (void)
80{
81 unsigned int idx;
82 ltrans_partition part;
83 for (idx = 0; ltrans_partitions.iterate (ix: idx, ptr: &part); idx++)
84 {
85 if (part->initializers_visited)
86 delete part->initializers_visited;
87 /* Symtab encoder is freed after streaming. */
88 free (ptr: part);
89 }
90 ltrans_partitions.release ();
91}
92
93/* Return true if symbol is already in some partition. */
94
95static inline bool
96symbol_partitioned_p (symtab_node *node)
97{
98 return node->aux;
99}
100
101/* Add references into the partition. */
102static void
103add_references_to_partition (ltrans_partition part, symtab_node *node)
104{
105 int i;
106 struct ipa_ref *ref = NULL;
107
108 /* Add all duplicated references to the partition. */
109 for (i = 0; node->iterate_reference (i, ref); i++)
110 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
111 add_symbol_to_partition (part, node: ref->referred);
112 /* References to a readonly variable may be constant foled into its value.
113 Recursively look into the initializers of the constant variable and add
114 references, too. */
115 else if (is_a <varpool_node *> (p: ref->referred)
116 && (dyn_cast <varpool_node *> (p: ref->referred)
117 ->ctor_useable_for_folding_p ())
118 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
119 {
120 if (!part->initializers_visited)
121 part->initializers_visited = new hash_set<symtab_node *>;
122 if (!part->initializers_visited->add (k: ref->referred))
123 add_references_to_partition (part, node: ref->referred);
124 }
125}
126
127/* Helper function for add_symbol_to_partition doing the actual dirty work
128 of adding NODE to PART. */
129
130static bool
131add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
132{
133 enum symbol_partitioning_class c = node->get_partitioning_class ();
134 struct ipa_ref *ref;
135 symtab_node *node1;
136
137 /* If NODE is already there, we have nothing to do. */
138 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
139 return true;
140
141 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
142 just once.
143
144 Be lax about comdats; they may or may not be duplicated and we may
145 end up in need to duplicate keyed comdat because it has unkeyed alias. */
146 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
147 && symbol_partitioned_p (node))
148 return false;
149
150 /* Be sure that we never try to duplicate partitioned symbol
151 or add external symbol. */
152 gcc_assert (c != SYMBOL_EXTERNAL
153 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
154
155 part->symbols++;
156
157 lto_set_symtab_encoder_in_partition (part->encoder, node);
158
159 if (symbol_partitioned_p (node))
160 {
161 node->in_other_partition = 1;
162 if (dump_file)
163 fprintf (stream: dump_file,
164 format: "Symbol node %s now used in multiple partitions\n",
165 node->dump_name ());
166 }
167 node->aux = (void *)((size_t)node->aux + 1);
168
169 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node))
170 {
171 struct cgraph_edge *e;
172 if (!node->alias && c == SYMBOL_PARTITION)
173 part->insns += ipa_size_summaries->get (node: cnode)->size;
174
175 /* Add all inline clones and callees that are duplicated. */
176 for (e = cnode->callees; e; e = e->next_callee)
177 if (!e->inline_failed)
178 add_symbol_to_partition_1 (part, node: e->callee);
179 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
180 add_symbol_to_partition (part, node: e->callee);
181
182 /* Add all thunks associated with the function. */
183 for (e = cnode->callers; e; e = e->next_caller)
184 if (e->caller->thunk && !e->caller->inlined_to)
185 add_symbol_to_partition_1 (part, node: e->caller);
186 }
187
188 add_references_to_partition (part, node);
189
190 /* Add all aliases associated with the symbol. */
191
192 FOR_EACH_ALIAS (node, ref)
193 if (!ref->referring->transparent_alias)
194 add_symbol_to_partition_1 (part, node: ref->referring);
195 else
196 {
197 struct ipa_ref *ref2;
198 /* We do not need to add transparent aliases if they are not used.
199 However we must add aliases of transparent aliases if they exist. */
200 FOR_EACH_ALIAS (ref->referring, ref2)
201 {
202 /* Nested transparent aliases are not permitted. */
203 gcc_checking_assert (!ref2->referring->transparent_alias);
204 add_symbol_to_partition_1 (part, node: ref2->referring);
205 }
206 }
207
208 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
209 if (node->same_comdat_group)
210 for (node1 = node->same_comdat_group;
211 node1 != node; node1 = node1->same_comdat_group)
212 if (!node->alias)
213 {
214 bool added = add_symbol_to_partition_1 (part, node: node1);
215 gcc_assert (added);
216 }
217 return true;
218}
219
220/* If symbol NODE is really part of other symbol's definition (i.e. it is
221 internal label, thunk, alias or so), return the outer symbol.
222 When add_symbol_to_partition_1 is called on the outer symbol it must
223 eventually add NODE, too. */
224static symtab_node *
225contained_in_symbol (symtab_node *node)
226{
227 /* There is no need to consider transparent aliases to be part of the
228 definition: they are only useful insite the partition they are output
229 and thus we will always see an explicit reference to it. */
230 if (node->transparent_alias)
231 return node;
232 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node))
233 {
234 cnode = cnode->function_symbol ();
235 if (cnode->inlined_to)
236 cnode = cnode->inlined_to;
237 return cnode;
238 }
239 else if (varpool_node *vnode = dyn_cast <varpool_node *> (p: node))
240 return vnode->ultimate_alias_target ();
241 return node;
242}
243
244/* Add symbol NODE to partition. When definition of NODE is part
245 of other symbol definition, add the other symbol, too. */
246
247static void
248add_symbol_to_partition (ltrans_partition part, symtab_node *node)
249{
250 symtab_node *node1;
251
252 /* Verify that we do not try to duplicate something that cannot be. */
253 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
254 || !symbol_partitioned_p (node));
255
256 while ((node1 = contained_in_symbol (node)) != node)
257 node = node1;
258
259 /* If we have duplicated symbol contained in something we cannot duplicate,
260 we are very badly screwed. The other way is possible, so we do not
261 assert this in add_symbol_to_partition_1.
262
263 Be lax about comdats; they may or may not be duplicated and we may
264 end up in need to duplicate keyed comdat because it has unkeyed alias. */
265
266 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
267 || DECL_COMDAT (node->decl)
268 || !symbol_partitioned_p (node));
269
270 add_symbol_to_partition_1 (part, node);
271}
272
273/* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
274 and number of varpool nodes is N_VARPOOL_NODES. */
275
276static void
277undo_partition (ltrans_partition partition, unsigned int n_nodes)
278{
279 while (lto_symtab_encoder_size (encoder: partition->encoder) > (int)n_nodes)
280 {
281 symtab_node *node = lto_symtab_encoder_deref (encoder: partition->encoder,
282 ref: n_nodes);
283 partition->symbols--;
284 cgraph_node *cnode;
285
286 /* After UNDO we no longer know what was visited. */
287 if (partition->initializers_visited)
288 delete partition->initializers_visited;
289 partition->initializers_visited = NULL;
290
291 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (p: node))
292 && node->get_partitioning_class () == SYMBOL_PARTITION)
293 partition->insns -= ipa_size_summaries->get (node: cnode)->size;
294 lto_symtab_encoder_delete_node (partition->encoder, node);
295 node->aux = (void *)((size_t)node->aux - 1);
296 }
297}
298
299/* Group cgrah nodes by input files. This is used mainly for testing
300 right now. */
301
302void
303lto_1_to_1_map (void)
304{
305 symtab_node *node;
306 struct lto_file_decl_data *file_data;
307 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
308 ltrans_partition partition;
309 int npartitions = 0;
310
311 FOR_EACH_SYMBOL (node)
312 {
313 if (node->get_partitioning_class () != SYMBOL_PARTITION
314 || symbol_partitioned_p (node))
315 continue;
316
317 file_data = node->lto_file_data;
318
319 if (file_data)
320 {
321 ltrans_partition *slot = &pmap.get_or_insert (k: file_data);
322 if (*slot)
323 partition = *slot;
324 else
325 {
326 partition = new_partition (name: file_data->file_name);
327 *slot = partition;
328 npartitions++;
329 }
330 }
331 else if (!file_data && ltrans_partitions.length ())
332 partition = ltrans_partitions[0];
333 else
334 {
335 partition = new_partition (name: "");
336 npartitions++;
337 }
338
339 add_symbol_to_partition (part: partition, node);
340 }
341
342 /* If the cgraph is empty, create one cgraph node set so that there is still
343 an output file for any variables that need to be exported in a DSO. */
344 if (!npartitions)
345 new_partition (name: "empty");
346
347 /* Order partitions by order of symbols because they are linked into binary
348 that way. */
349 ltrans_partitions.qsort (cmp_partitions_order);
350}
351
352/* Maximal partitioning. Put every new symbol into new partition if possible. */
353
354void
355lto_max_map (void)
356{
357 symtab_node *node;
358 ltrans_partition partition;
359 int npartitions = 0;
360
361 FOR_EACH_SYMBOL (node)
362 {
363 if (node->get_partitioning_class () != SYMBOL_PARTITION
364 || symbol_partitioned_p (node))
365 continue;
366 partition = new_partition (name: node->asm_name ());
367 add_symbol_to_partition (part: partition, node);
368 npartitions++;
369 }
370 if (!npartitions)
371 new_partition (name: "empty");
372}
373
374/* Helper function for qsort; sort nodes by order. */
375static int
376node_cmp (const void *pa, const void *pb)
377{
378 const symtab_node *a = *static_cast<const symtab_node * const *> (pa);
379 const symtab_node *b = *static_cast<const symtab_node * const *> (pb);
380 return b->order - a->order;
381}
382
383/* Add all symtab nodes from NEXT_NODE to PARTITION in order. */
384
385static void
386add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition)
387{
388 unsigned i;
389 symtab_node *node;
390
391 next_nodes.qsort (node_cmp);
392 FOR_EACH_VEC_ELT (next_nodes, i, node)
393 if (!symbol_partitioned_p (node))
394 add_symbol_to_partition (part: partition, node);
395}
396
397/* Return true if we should account reference from N1 to N2 in cost
398 of partition boundary. */
399
400bool
401account_reference_p (symtab_node *n1, symtab_node *n2)
402{
403 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (p: n1))
404 n1 = cnode;
405 /* Do not account references from aliases - they are never split across
406 partitions. */
407 if (n1->alias)
408 return false;
409 /* Do not account recursion - the code below will handle it incorrectly
410 otherwise. Do not account references to external symbols: they will
411 never become local. Finally do not account references to duplicated
412 symbols: they will be always local. */
413 if (n1 == n2
414 || !n2->definition
415 || n2->get_partitioning_class () != SYMBOL_PARTITION)
416 return false;
417 /* If referring node is external symbol do not account it to boundary
418 cost. Those are added into units only to enable possible constant
419 folding and devirtulization.
420
421 Here we do not know if it will ever be added to some partition
422 (this is decided by compute_ltrans_boundary) and second it is not
423 that likely that constant folding will actually use the reference. */
424 if (contained_in_symbol (node: n1)
425 ->get_partitioning_class () == SYMBOL_EXTERNAL)
426 return false;
427 return true;
428}
429
430
431/* Group cgraph nodes into equally-sized partitions.
432
433 The partitioning algorithm is simple: nodes are taken in predefined order.
434 The order corresponds to the order we want functions to have in the final
435 output. In the future this will be given by function reordering pass, but
436 at the moment we use the topological order, which is a good approximation.
437
438 The goal is to partition this linear order into intervals (partitions) so
439 that all the partitions have approximately the same size and the number of
440 callgraph or IPA reference edges crossing boundaries is minimal.
441
442 This is a lot faster (O(n) in size of callgraph) than algorithms doing
443 priority-based graph clustering that are generally O(n^2) and, since
444 WHOPR is designed to make things go well across partitions, it leads
445 to good results.
446
447 We compute the expected size of a partition as:
448
449 max (total_size / lto_partitions, min_partition_size)
450
451 We use dynamic expected size of partition so small programs are partitioned
452 into enough partitions to allow use of multiple CPUs, while large programs
453 are not partitioned too much. Creating too many partitions significantly
454 increases the streaming overhead.
455
456 In the future, we would like to bound the maximal size of partitions so as
457 to prevent the LTRANS stage from consuming too much memory. At the moment,
458 however, the WPA stage is the most memory intensive for large benchmarks,
459 since too many types and declarations are read into memory.
460
461 The function implements a simple greedy algorithm. Nodes are being added
462 to the current partition until after 3/4 of the expected partition size is
463 reached. Past this threshold, we keep track of boundary size (number of
464 edges going to other partitions) and continue adding functions until after
465 the current partition has grown to twice the expected partition size. Then
466 the process is undone to the point where the minimal ratio of boundary size
467 and in-partition calls was reached. */
468
469void
470lto_balanced_map (int n_lto_partitions, int max_partition_size)
471{
472 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
473 int best_noreorder_pos = 0;
474 auto_vec <cgraph_node *> order (symtab->cgraph_count);
475 auto_vec<cgraph_node *> noreorder;
476 auto_vec<varpool_node *> varpool_order;
477 struct cgraph_node *node;
478 int64_t original_total_size, total_size = 0;
479 int64_t partition_size;
480 ltrans_partition partition;
481 int last_visited_node = 0;
482 varpool_node *vnode;
483 int64_t cost = 0, internal = 0;
484 unsigned int best_n_nodes = 0, best_i = 0;
485 int64_t best_cost = -1, best_internal = 0, best_size = 0;
486 int npartitions;
487 int current_order = -1;
488 int noreorder_pos = 0;
489
490 FOR_EACH_VARIABLE (vnode)
491 gcc_assert (!vnode->aux);
492
493 FOR_EACH_DEFINED_FUNCTION (node)
494 if (node->get_partitioning_class () == SYMBOL_PARTITION)
495 {
496 if (node->no_reorder)
497 noreorder.safe_push (obj: node);
498 else
499 order.safe_push (obj: node);
500 if (!node->alias)
501 total_size += ipa_size_summaries->get (node)->size;
502 }
503
504 original_total_size = total_size;
505
506 /* Streaming works best when the source units do not cross partition
507 boundaries much. This is because importing function from a source
508 unit tends to import a lot of global trees defined there. We should
509 get better about minimizing the function bounday, but until that
510 things works smoother if we order in source order. */
511 order.qsort (tp_first_run_node_cmp);
512 noreorder.qsort (node_cmp);
513
514 if (dump_file)
515 {
516 for (unsigned i = 0; i < order.length (); i++)
517 fprintf (stream: dump_file, format: "Balanced map symbol order:%s:%u\n",
518 order[i]->dump_name (), order[i]->tp_first_run);
519 for (unsigned i = 0; i < noreorder.length (); i++)
520 fprintf (stream: dump_file, format: "Balanced map symbol no_reorder:%s:%u\n",
521 noreorder[i]->dump_name (), noreorder[i]->tp_first_run);
522 }
523
524 /* Collect all variables that should not be reordered. */
525 FOR_EACH_VARIABLE (vnode)
526 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
527 && vnode->no_reorder)
528 varpool_order.safe_push (obj: vnode);
529 n_varpool_nodes = varpool_order.length ();
530 varpool_order.qsort (node_cmp);
531
532 /* Compute partition size and create the first partition. */
533 if (param_min_partition_size > max_partition_size)
534 fatal_error (input_location, "min partition size cannot be greater "
535 "than max partition size");
536
537 partition_size = total_size / n_lto_partitions;
538 if (partition_size < param_min_partition_size)
539 partition_size = param_min_partition_size;
540 npartitions = 1;
541 partition = new_partition (name: "");
542 if (dump_file)
543 fprintf (stream: dump_file, format: "Total unit size: %" PRId64 ", partition size: %" PRId64 "\n",
544 total_size, partition_size);
545
546 auto_vec<symtab_node *> next_nodes;
547
548 for (unsigned i = 0; i < order.length (); i++)
549 {
550 if (symbol_partitioned_p (node: order[i]))
551 continue;
552
553 current_order = order[i]->order;
554
555 /* Output noreorder and varpool in program order first. */
556 next_nodes.truncate (size: 0);
557 while (varpool_pos < n_varpool_nodes
558 && varpool_order[varpool_pos]->order < current_order)
559 next_nodes.safe_push (obj: varpool_order[varpool_pos++]);
560 while (noreorder_pos < (int)noreorder.length ()
561 && noreorder[noreorder_pos]->order < current_order)
562 next_nodes.safe_push (obj: noreorder[noreorder_pos++]);
563 add_sorted_nodes (next_nodes, partition);
564
565 if (!symbol_partitioned_p (node: order[i]))
566 add_symbol_to_partition (part: partition, node: order[i]);
567
568
569 /* Once we added a new node to the partition, we also want to add
570 all referenced variables unless they was already added into some
571 earlier partition.
572 add_symbol_to_partition adds possibly multiple nodes and
573 variables that are needed to satisfy needs of ORDER[i].
574 We remember last visited cgraph and varpool node from last iteration
575 of outer loop that allows us to process every new addition.
576
577 At the same time we compute size of the boundary into COST. Every
578 callgraph or IPA reference edge leaving the partition contributes into
579 COST. Every edge inside partition was earlier computed as one leaving
580 it and thus we need to subtract it from COST. */
581 while (last_visited_node < lto_symtab_encoder_size (encoder: partition->encoder))
582 {
583 int j;
584 struct ipa_ref *ref = NULL;
585 symtab_node *snode = lto_symtab_encoder_deref (encoder: partition->encoder,
586 ref: last_visited_node);
587
588 if (cgraph_node *node = dyn_cast <cgraph_node *> (p: snode))
589 {
590 struct cgraph_edge *edge;
591
592
593 last_visited_node++;
594
595 gcc_assert (node->definition || node->weakref
596 || node->declare_variant_alt);
597
598 /* Compute boundary cost of callgraph edges. */
599 for (edge = node->callees; edge; edge = edge->next_callee)
600 /* Inline edges will always end up local. */
601 if (edge->inline_failed
602 && account_reference_p (n1: node, n2: edge->callee))
603 {
604 int edge_cost = edge->frequency ();
605 int index;
606
607 if (!edge_cost)
608 edge_cost = 1;
609 gcc_assert (edge_cost > 0);
610 index = lto_symtab_encoder_lookup (encoder: partition->encoder,
611 node: edge->callee);
612 if (index != LCC_NOT_FOUND
613 && index < last_visited_node - 1)
614 cost -= edge_cost, internal += edge_cost;
615 else
616 cost += edge_cost;
617 }
618 for (edge = node->callers; edge; edge = edge->next_caller)
619 if (edge->inline_failed
620 && account_reference_p (n1: edge->caller, n2: node))
621 {
622 int edge_cost = edge->frequency ();
623 int index;
624
625 gcc_assert (edge->caller->definition);
626 if (!edge_cost)
627 edge_cost = 1;
628 gcc_assert (edge_cost > 0);
629 index = lto_symtab_encoder_lookup (encoder: partition->encoder,
630 node: edge->caller);
631 if (index != LCC_NOT_FOUND
632 && index < last_visited_node - 1)
633 cost -= edge_cost, internal += edge_cost;
634 else
635 cost += edge_cost;
636 }
637 }
638 else
639 last_visited_node++;
640
641 /* Compute boundary cost of IPA REF edges and at the same time look into
642 variables referenced from current partition and try to add them. */
643 for (j = 0; snode->iterate_reference (i: j, ref); j++)
644 if (!account_reference_p (n1: snode, n2: ref->referred))
645 ;
646 else if (is_a <varpool_node *> (p: ref->referred))
647 {
648 int index;
649
650 vnode = dyn_cast <varpool_node *> (p: ref->referred);
651 if (!symbol_partitioned_p (node: vnode)
652 && !vnode->no_reorder
653 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
654 add_symbol_to_partition (part: partition, node: vnode);
655 index = lto_symtab_encoder_lookup (encoder: partition->encoder,
656 node: vnode);
657 if (index != LCC_NOT_FOUND
658 && index < last_visited_node - 1)
659 cost--, internal++;
660 else
661 cost++;
662 }
663 else
664 {
665 int index;
666
667 node = dyn_cast <cgraph_node *> (p: ref->referred);
668 index = lto_symtab_encoder_lookup (encoder: partition->encoder,
669 node);
670 if (index != LCC_NOT_FOUND
671 && index < last_visited_node - 1)
672 cost--, internal++;
673 else
674 cost++;
675 }
676 for (j = 0; snode->iterate_referring (i: j, ref); j++)
677 if (!account_reference_p (n1: ref->referring, n2: snode))
678 ;
679 else if (is_a <varpool_node *> (p: ref->referring))
680 {
681 int index;
682
683 vnode = dyn_cast <varpool_node *> (p: ref->referring);
684 gcc_assert (vnode->definition);
685 /* It is better to couple variables with their users,
686 because it allows them to be removed. Coupling
687 with objects they refer to only helps to reduce
688 number of symbols promoted to hidden. */
689 if (!symbol_partitioned_p (node: vnode)
690 && !vnode->no_reorder
691 && !vnode->can_remove_if_no_refs_p ()
692 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
693 add_symbol_to_partition (part: partition, node: vnode);
694 index = lto_symtab_encoder_lookup (encoder: partition->encoder,
695 node: vnode);
696 if (index != LCC_NOT_FOUND
697 && index < last_visited_node - 1)
698 cost--, internal++;
699 else
700 cost++;
701 }
702 else
703 {
704 int index;
705
706 node = dyn_cast <cgraph_node *> (p: ref->referring);
707 gcc_assert (node->definition || node->declare_variant_alt);
708 index = lto_symtab_encoder_lookup (encoder: partition->encoder,
709 node);
710 if (index != LCC_NOT_FOUND
711 && index < last_visited_node - 1)
712 cost--, internal++;
713 else
714 cost++;
715 }
716 }
717
718 gcc_assert (cost >= 0 && internal >= 0);
719
720 /* If the partition is large enough, start looking for smallest boundary cost.
721 If partition still seems too small (less than 7/8 of target weight) accept
722 any cost. If partition has right size, optimize for highest internal/cost.
723 Later we stop building partition if its size is 9/8 of the target wight. */
724 if (partition->insns < partition_size * 7 / 8
725 || best_cost == -1
726 || (!cost
727 || ((sreal)best_internal * (sreal) cost
728 < ((sreal) internal * (sreal)best_cost))))
729 {
730 best_cost = cost;
731 best_internal = internal;
732 best_size = partition->insns;
733 best_i = i;
734 best_n_nodes = lto_symtab_encoder_size (encoder: partition->encoder);
735 best_varpool_pos = varpool_pos;
736 best_noreorder_pos = noreorder_pos;
737 }
738 if (dump_file)
739 fprintf (stream: dump_file, format: "Step %i: added %s, size %i, "
740 "cost %" PRId64 "/%" PRId64 " "
741 "best %" PRId64 "/%" PRId64", step %i\n", i,
742 order[i]->dump_name (),
743 partition->insns, cost, internal,
744 best_cost, best_internal, best_i);
745 /* Partition is too large, unwind into step when best cost was reached and
746 start new partition. */
747 if (partition->insns > 9 * partition_size / 8
748 || partition->insns > max_partition_size)
749 {
750 if (best_i != i)
751 {
752 if (dump_file)
753 fprintf (stream: dump_file, format: "Unwinding %i insertions to step %i\n",
754 i - best_i, best_i);
755 undo_partition (partition, n_nodes: best_n_nodes);
756 varpool_pos = best_varpool_pos;
757 noreorder_pos = best_noreorder_pos;
758 }
759 gcc_assert (best_size == partition->insns);
760 i = best_i;
761 if (dump_file)
762 fprintf (stream: dump_file,
763 format: "Partition insns: %i (want %" PRId64 ")\n",
764 partition->insns, partition_size);
765 /* When we are finished, avoid creating empty partition. */
766 while (i < order.length () - 1 && symbol_partitioned_p (node: order[i + 1]))
767 i++;
768 if (i == order.length () - 1)
769 break;
770 total_size -= partition->insns;
771 partition = new_partition (name: "");
772 last_visited_node = 0;
773 cost = 0;
774
775 if (dump_file)
776 fprintf (stream: dump_file, format: "New partition\n");
777 best_n_nodes = 0;
778 best_cost = -1;
779
780 /* Since the size of partitions is just approximate, update the size after
781 we finished current one. */
782 if (npartitions < n_lto_partitions)
783 partition_size = total_size / (n_lto_partitions - npartitions);
784 else
785 /* Watch for overflow. */
786 partition_size = INT_MAX / 16;
787
788 if (dump_file)
789 fprintf (stream: dump_file,
790 format: "Total size: %" PRId64 " partition_size: %" PRId64 "\n",
791 total_size, partition_size);
792 if (partition_size < param_min_partition_size)
793 partition_size = param_min_partition_size;
794 npartitions ++;
795 }
796 }
797
798 next_nodes.truncate (size: 0);
799
800 /* Varables that are not reachable from the code go into last partition. */
801 FOR_EACH_VARIABLE (vnode)
802 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
803 && !symbol_partitioned_p (node: vnode))
804 next_nodes.safe_push (obj: vnode);
805
806 /* Output remaining ordered symbols. */
807 while (varpool_pos < n_varpool_nodes)
808 next_nodes.safe_push (obj: varpool_order[varpool_pos++]);
809 while (noreorder_pos < (int)noreorder.length ())
810 next_nodes.safe_push (obj: noreorder[noreorder_pos++]);
811 /* For one partition the cost of boundary should be 0 unless we added final
812 symbols here (these are not accounted) or we have accounting bug. */
813 gcc_assert (next_nodes.length () || npartitions != 1 || !best_cost || best_cost == -1);
814 add_sorted_nodes (next_nodes, partition);
815
816 if (dump_file)
817 {
818 fprintf (stream: dump_file, format: "\nPartition sizes:\n");
819 unsigned partitions = ltrans_partitions.length ();
820
821 for (unsigned i = 0; i < partitions ; i++)
822 {
823 ltrans_partition p = ltrans_partitions[i];
824 fprintf (stream: dump_file, format: "partition %d contains %d (%2.2f%%)"
825 " symbols and %d (%2.2f%%) insns\n", i, p->symbols,
826 100.0 * p->symbols / order.length (), p->insns,
827 100.0 * p->insns / original_total_size);
828 }
829
830 fprintf (stream: dump_file, format: "\n");
831 }
832}
833
834/* Return true if we must not change the name of the NODE. The name as
835 extracted from the corresponding decl should be passed in NAME. */
836
837static bool
838must_not_rename (symtab_node *node, const char *name)
839{
840 /* Our renaming machinery do not handle more than one change of assembler name.
841 We should not need more than one anyway. */
842 if (node->lto_file_data
843 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
844 {
845 if (dump_file)
846 fprintf (stream: dump_file,
847 format: "Not privatizing symbol name: %s. It privatized already.\n",
848 name);
849 return true;
850 }
851 /* Avoid mangling of already mangled clones.
852 ??? should have a flag whether a symbol has a 'private' name already,
853 since we produce some symbols like that i.e. for global constructors
854 that are not really clones.
855 ??? it is what unique_name means. We only need to set it when doing
856 private symbols. */
857 if (node->unique_name)
858 {
859 if (dump_file)
860 fprintf (stream: dump_file,
861 format: "Not privatizing symbol name: %s. Has unique name.\n",
862 name);
863 return true;
864 }
865 return false;
866}
867
868/* If we are an offload compiler, we may have to rewrite symbols to be
869 valid on this target. Return either PTR or a modified version of it. */
870
871static const char *
872maybe_rewrite_identifier (const char *ptr)
873{
874#if defined ACCEL_COMPILER && (defined NO_DOT_IN_LABEL || defined NO_DOLLAR_IN_LABEL)
875#ifndef NO_DOT_IN_LABEL
876 char valid = '.';
877 const char reject[] = "$";
878#elif !defined NO_DOLLAR_IN_LABEL
879 char valid = '$';
880 const char reject[] = ".";
881#else
882 char valid = '_';
883 const char reject[] = ".$";
884#endif
885
886 char *copy = NULL;
887 const char *match = ptr;
888 for (;;)
889 {
890 size_t off = strcspn (match, reject);
891 if (match[off] == '\0')
892 break;
893 if (copy == NULL)
894 {
895 copy = xstrdup (ptr);
896 match = copy;
897 }
898 copy[off] = valid;
899 }
900 if (copy)
901 {
902 match = IDENTIFIER_POINTER (get_identifier (copy));
903 free (copy);
904 }
905 return match;
906#else
907 return ptr;
908#endif
909}
910
911/* Ensure that the symbol in NODE is valid for the target, and if not,
912 rewrite it. */
913
914static void
915validize_symbol_for_target (symtab_node *node)
916{
917 tree decl = node->decl;
918 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
919
920 if (must_not_rename (node, name))
921 return;
922
923 const char *name2 = maybe_rewrite_identifier (ptr: name);
924 if (name2 != name)
925 {
926 symtab->change_decl_assembler_name (decl, get_identifier (name2));
927 if (node->lto_file_data)
928 lto_record_renamed_decl (node->lto_file_data, name, name2);
929 }
930}
931
932/* Maps symbol names to unique lto clone counters. */
933static hash_map<const char *, unsigned> *lto_clone_numbers;
934
935/* Helper for privatize_symbol_name. Mangle NODE symbol name
936 represented by DECL. */
937
938static bool
939privatize_symbol_name_1 (symtab_node *node, tree decl)
940{
941 const char *name0 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
942
943 if (must_not_rename (node, name: name0))
944 return false;
945
946 const char *name = maybe_rewrite_identifier (ptr: name0);
947 unsigned &clone_number = lto_clone_numbers->get_or_insert (k: name);
948 symtab->change_decl_assembler_name (decl,
949 name: clone_function_name (
950 name, suffix: "lto_priv", number: clone_number));
951 clone_number++;
952
953 if (node->lto_file_data)
954 lto_record_renamed_decl (node->lto_file_data, name0,
955 IDENTIFIER_POINTER
956 (DECL_ASSEMBLER_NAME (decl)));
957
958 if (dump_file)
959 fprintf (stream: dump_file,
960 format: "Privatizing symbol name: %s -> %s\n",
961 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
962
963 return true;
964}
965
966/* Mangle NODE symbol name into a local name.
967 This is necessary to do
968 1) if two or more static vars of same assembler name
969 are merged into single ltrans unit.
970 2) if previously static var was promoted hidden to avoid possible conflict
971 with symbols defined out of the LTO world. */
972
973static bool
974privatize_symbol_name (symtab_node *node)
975{
976 if (!privatize_symbol_name_1 (node, decl: node->decl))
977 return false;
978
979 return true;
980}
981
982/* Promote variable VNODE to be static. */
983
984static void
985promote_symbol (symtab_node *node)
986{
987 /* We already promoted ... */
988 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
989 && DECL_VISIBILITY_SPECIFIED (node->decl)
990 && TREE_PUBLIC (node->decl))
991 {
992 validize_symbol_for_target (node);
993 return;
994 }
995
996 gcc_checking_assert (!TREE_PUBLIC (node->decl)
997 && !DECL_EXTERNAL (node->decl));
998 /* Be sure that newly public symbol does not conflict with anything already
999 defined by the non-LTO part. */
1000 privatize_symbol_name (node);
1001 TREE_PUBLIC (node->decl) = 1;
1002 /* After privatization the node should not conflict with any other symbol,
1003 so it is prevailing. This is important to keep binds_to_current_def_p
1004 to work across partitions. */
1005 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
1006 node->semantic_interposition = false;
1007 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
1008 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
1009 if (dump_file)
1010 fprintf (stream: dump_file,
1011 format: "Promoting as hidden: %s (%s)\n", node->dump_name (),
1012 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1013
1014 /* Promoting a symbol also promotes all transparent aliases with exception
1015 of weakref where the visibility flags are always wrong and set to
1016 !PUBLIC. */
1017 ipa_ref *ref;
1018 for (unsigned i = 0; node->iterate_direct_aliases (i, ref); i++)
1019 {
1020 struct symtab_node *alias = ref->referring;
1021 if (alias->transparent_alias && !alias->weakref)
1022 {
1023 TREE_PUBLIC (alias->decl) = 1;
1024 DECL_VISIBILITY (alias->decl) = VISIBILITY_HIDDEN;
1025 DECL_VISIBILITY_SPECIFIED (alias->decl) = true;
1026 if (dump_file)
1027 fprintf (stream: dump_file,
1028 format: "Promoting alias as hidden: %s\n",
1029 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1030 }
1031 gcc_assert (!alias->weakref || TREE_PUBLIC (alias->decl));
1032 }
1033}
1034
1035/* Return true if NODE needs named section even if it won't land in
1036 the partition symbol table.
1037
1038 FIXME: we should really not use named sections for master clones. */
1039
1040static bool
1041may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
1042{
1043 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (p: node);
1044 /* We do not need to handle variables since we never clone them. */
1045 if (!cnode)
1046 return false;
1047 /* Only master clones will have bodies streamed. */
1048 if (cnode->clone_of)
1049 return false;
1050 if (node->real_symbol_p ())
1051 return false;
1052 return (!encoder
1053 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
1054 && lto_symtab_encoder_encode_body_p (encoder,
1055 cnode)));
1056}
1057
1058/* If NODE represents a static variable. See if there are other variables
1059 of the same name in partition ENCODER (or in whole compilation unit if
1060 ENCODER is NULL) and if so, mangle the statics. Always mangle all
1061 conflicting statics, so we reduce changes of silently miscompiling
1062 asm statements referring to them by symbol name. */
1063
1064static void
1065rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
1066{
1067 tree decl = node->decl;
1068 symtab_node *s;
1069 tree name = DECL_ASSEMBLER_NAME (decl);
1070
1071 /* See if this is static symbol. */
1072 if (((node->externally_visible && !node->weakref)
1073 /* FIXME: externally_visible is somewhat illogically not set for
1074 external symbols (i.e. those not defined). Remove this test
1075 once this is fixed. */
1076 || DECL_EXTERNAL (node->decl)
1077 || !node->real_symbol_p ())
1078 && !may_need_named_section_p (encoder, node))
1079 return;
1080
1081 /* Now walk symbols sharing the same name and see if there are any conflicts.
1082 (all types of symbols counts here, since we cannot have static of the
1083 same name as external or public symbol.) */
1084 for (s = symtab_node::get_for_asmname (asmname: name);
1085 s; s = s->next_sharing_asm_name)
1086 if ((s->real_symbol_p () || may_need_named_section_p (encoder, node: s))
1087 && s->decl != node->decl
1088 && (!encoder
1089 || lto_symtab_encoder_lookup (encoder, node: s) != LCC_NOT_FOUND))
1090 break;
1091
1092 /* OK, no confict, so we have nothing to do. */
1093 if (!s)
1094 return;
1095
1096 if (dump_file)
1097 fprintf (stream: dump_file,
1098 format: "Renaming statics with asm name: %s\n", node->dump_name ());
1099
1100 /* Assign every symbol in the set that shares the same ASM name an unique
1101 mangled name. */
1102 for (s = symtab_node::get_for_asmname (asmname: name); s;)
1103 if ((!s->externally_visible || s->weakref)
1104 /* Transparent aliases having same name as target are renamed at a
1105 time their target gets new name. Transparent aliases that use
1106 separate assembler name require the name to be unique. */
1107 && (!s->transparent_alias || !s->definition || s->weakref
1108 || !symbol_table::assembler_names_equal_p
1109 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (s->decl)),
1110 IDENTIFIER_POINTER
1111 (DECL_ASSEMBLER_NAME (s->get_alias_target()->decl))))
1112 && ((s->real_symbol_p ()
1113 && !DECL_EXTERNAL (s->decl)
1114 && !TREE_PUBLIC (s->decl))
1115 || may_need_named_section_p (encoder, node: s))
1116 && (!encoder
1117 || lto_symtab_encoder_lookup (encoder, node: s) != LCC_NOT_FOUND))
1118 {
1119 if (privatize_symbol_name (node: s))
1120 /* Re-start from beginning since we do not know how many
1121 symbols changed a name. */
1122 s = symtab_node::get_for_asmname (asmname: name);
1123 else s = s->next_sharing_asm_name;
1124 }
1125 else s = s->next_sharing_asm_name;
1126}
1127
1128/* Find out all static decls that need to be promoted to global because
1129 of cross file sharing. This function must be run in the WPA mode after
1130 all inlinees are added. */
1131
1132void
1133lto_promote_cross_file_statics (void)
1134{
1135 unsigned i, n_sets;
1136
1137 gcc_assert (flag_wpa);
1138
1139 lto_stream_offload_p = false;
1140 select_what_to_stream ();
1141
1142 /* First compute boundaries. */
1143 n_sets = ltrans_partitions.length ();
1144 for (i = 0; i < n_sets; i++)
1145 {
1146 ltrans_partition part
1147 = ltrans_partitions[i];
1148 part->encoder = compute_ltrans_boundary (encoder: part->encoder);
1149 }
1150
1151 lto_clone_numbers = new hash_map<const char *, unsigned>;
1152
1153 /* Look at boundaries and promote symbols as needed. */
1154 for (i = 0; i < n_sets; i++)
1155 {
1156 lto_symtab_encoder_iterator lsei;
1157 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
1158
1159 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
1160 lsei_next (lsei: &lsei))
1161 {
1162 symtab_node *node = lsei_node (lsei);
1163
1164 /* If symbol is static, rename it if its assembler name
1165 clashes with anything else in this unit. */
1166 rename_statics (encoder, node);
1167
1168 /* No need to promote if symbol already is externally visible ... */
1169 if (node->externally_visible
1170 /* ... or if it is part of current partition ... */
1171 || lto_symtab_encoder_in_partition_p (encoder, node)
1172 /* ... or if we do not partition it. This mean that it will
1173 appear in every partition referencing it. */
1174 || node->get_partitioning_class () != SYMBOL_PARTITION)
1175 {
1176 validize_symbol_for_target (node);
1177 continue;
1178 }
1179
1180 promote_symbol (node);
1181 }
1182 }
1183 delete lto_clone_numbers;
1184}
1185
1186/* Rename statics in the whole unit in the case that
1187 we do -flto-partition=none. */
1188
1189void
1190lto_promote_statics_nonwpa (void)
1191{
1192 symtab_node *node;
1193
1194 lto_clone_numbers = new hash_map<const char *, unsigned>;
1195 FOR_EACH_SYMBOL (node)
1196 {
1197 rename_statics (NULL, node);
1198 validize_symbol_for_target (node);
1199 }
1200 delete lto_clone_numbers;
1201}
1202

source code of gcc/lto/lto-partition.cc