1/* Interprocedural analyses.
2 Copyright (C) 2005-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "rtl.h"
25#include "tree.h"
26#include "gimple.h"
27#include "alloc-pool.h"
28#include "tree-pass.h"
29#include "ssa.h"
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
33#include "fold-const.h"
34#include "gimple-iterator.h"
35#include "gimple-fold.h"
36#include "tree-eh.h"
37#include "calls.h"
38#include "stor-layout.h"
39#include "print-tree.h"
40#include "gimplify.h"
41#include "gimplify-me.h"
42#include "gimple-walk.h"
43#include "symbol-summary.h"
44#include "ipa-prop.h"
45#include "tree-cfg.h"
46#include "tree-dfa.h"
47#include "tree-inline.h"
48#include "ipa-fnsummary.h"
49#include "gimple-pretty-print.h"
50#include "ipa-utils.h"
51#include "dbgcnt.h"
52#include "domwalk.h"
53#include "builtins.h"
54#include "tree-cfgcleanup.h"
55#include "options.h"
56#include "symtab-clones.h"
57#include "attr-fnspec.h"
58#include "gimple-range.h"
59#include "value-range-storage.h"
60
61/* Function summary where the parameter infos are actually stored. */
62ipa_node_params_t *ipa_node_params_sum = NULL;
63
64function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
65
66/* Edge summary for IPA-CP edge information. */
67ipa_edge_args_sum_t *ipa_edge_args_sum;
68
69/* Traits for a hash table for reusing ranges. */
70
71struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <ipa_vr *>
72{
73 typedef ipa_vr *value_type;
74 typedef const vrange *compare_type;
75 static hashval_t
76 hash (const ipa_vr *p)
77 {
78 // This never get called, except in the verification code, as
79 // ipa_get_value_range() calculates the hash itself. This
80 // function is mostly here for completness' sake.
81 Value_Range vr;
82 p->get_vrange (vr);
83 inchash::hash hstate;
84 add_vrange (vr, hstate);
85 return hstate.end ();
86 }
87 static bool
88 equal (const ipa_vr *a, const vrange *b)
89 {
90 return a->equal_p (*b);
91 }
92 static const bool empty_zero_p = true;
93 static void
94 mark_empty (ipa_vr *&p)
95 {
96 p = NULL;
97 }
98 static bool
99 is_empty (const ipa_vr *p)
100 {
101 return p == NULL;
102 }
103 static bool
104 is_deleted (const ipa_vr *p)
105 {
106 return p == reinterpret_cast<const ipa_vr *> (1);
107 }
108 static void
109 mark_deleted (ipa_vr *&p)
110 {
111 p = reinterpret_cast<ipa_vr *> (1);
112 }
113};
114
115/* Hash table for avoid repeated allocations of equal ranges. */
116static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
117
118/* Holders of ipa cgraph hooks: */
119static struct cgraph_node_hook_list *function_insertion_hook_holder;
120
121/* Description of a reference to an IPA constant. */
122struct ipa_cst_ref_desc
123{
124 /* Edge that corresponds to the statement which took the reference. */
125 struct cgraph_edge *cs;
126 /* Linked list of duplicates created when call graph edges are cloned. */
127 struct ipa_cst_ref_desc *next_duplicate;
128 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
129 if out of control. */
130 int refcount;
131};
132
133/* Allocation pool for reference descriptions. */
134
135static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
136 ("IPA-PROP ref descriptions");
137
138ipa_vr::ipa_vr ()
139 : m_storage (NULL),
140 m_type (NULL)
141{
142}
143
144ipa_vr::ipa_vr (const vrange &r)
145 : m_storage (ggc_alloc_vrange_storage (r)),
146 m_type (r.type ())
147{
148}
149
150bool
151ipa_vr::equal_p (const vrange &r) const
152{
153 gcc_checking_assert (!r.undefined_p ());
154 return (types_compatible_p (type1: m_type, type2: r.type ()) && m_storage->equal_p (r));
155}
156
157void
158ipa_vr::get_vrange (Value_Range &r) const
159{
160 r.set_type (m_type);
161 m_storage->get_vrange (r, type: m_type);
162}
163
164void
165ipa_vr::set_unknown ()
166{
167 if (m_storage)
168 ggc_free (m_storage);
169
170 m_storage = NULL;
171}
172
173void
174ipa_vr::streamer_read (lto_input_block *ib, data_in *data_in)
175{
176 struct bitpack_d bp = streamer_read_bitpack (ib);
177 bool known = bp_unpack_value (bp: &bp, nbits: 1);
178 if (known)
179 {
180 Value_Range vr;
181 streamer_read_value_range (ib, data_in, vr);
182 if (!m_storage || !m_storage->fits_p (r: vr))
183 {
184 if (m_storage)
185 ggc_free (m_storage);
186 m_storage = ggc_alloc_vrange_storage (vr);
187 }
188 m_storage->set_vrange (vr);
189 m_type = vr.type ();
190 }
191 else
192 {
193 m_storage = NULL;
194 m_type = NULL;
195 }
196}
197
198void
199ipa_vr::streamer_write (output_block *ob) const
200{
201 struct bitpack_d bp = bitpack_create (s: ob->main_stream);
202 bp_pack_value (bp: &bp, val: !!m_storage, nbits: 1);
203 streamer_write_bitpack (bp: &bp);
204 if (m_storage)
205 {
206 Value_Range vr (m_type);
207 m_storage->get_vrange (r&: vr, type: m_type);
208 streamer_write_vrange (ob, vr);
209 }
210}
211
212void
213ipa_vr::dump (FILE *out) const
214{
215 if (known_p ())
216 {
217 Value_Range vr (m_type);
218 m_storage->get_vrange (r&: vr, type: m_type);
219 vr.dump (out);
220 }
221 else
222 fprintf (stream: out, format: "NO RANGE");
223}
224
225// These stubs are because we use an ipa_vr in a hash_traits and
226// hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
227// picking up the gt_ggc_mx (T *) version.
228void
229gt_pch_nx (ipa_vr *&x)
230{
231 return gt_pch_nx ((ipa_vr *) x);
232}
233
234void
235gt_ggc_mx (ipa_vr *&x)
236{
237 return gt_ggc_mx ((ipa_vr *) x);
238}
239
240
241/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
242 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
243
244static bool
245ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
246{
247 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
248
249 if (!fs_opts)
250 return false;
251 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
252}
253
254/* Return index of the formal whose tree is PTREE in function which corresponds
255 to INFO. */
256
257static int
258ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
259 tree ptree)
260{
261 int i, count;
262
263 count = vec_safe_length (v: descriptors);
264 for (i = 0; i < count; i++)
265 if ((*descriptors)[i].decl_or_type == ptree)
266 return i;
267
268 return -1;
269}
270
271/* Return index of the formal whose tree is PTREE in function which corresponds
272 to INFO. */
273
274int
275ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
276{
277 return ipa_get_param_decl_index_1 (descriptors: info->descriptors, ptree);
278}
279
280/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
281 NODE. */
282
283static void
284ipa_populate_param_decls (struct cgraph_node *node,
285 vec<ipa_param_descriptor, va_gc> &descriptors)
286{
287 tree fndecl;
288 tree fnargs;
289 tree parm;
290 int param_num;
291
292 fndecl = node->decl;
293 gcc_assert (gimple_has_body_p (fndecl));
294 fnargs = DECL_ARGUMENTS (fndecl);
295 param_num = 0;
296 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
297 {
298 descriptors[param_num].decl_or_type = parm;
299 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
300 descriptors[param_num].move_cost = cost;
301 /* Watch overflow, move_cost is a bitfield. */
302 gcc_checking_assert (cost == descriptors[param_num].move_cost);
303 param_num++;
304 }
305}
306
307/* Return how many formal parameters FNDECL has. */
308
309int
310count_formal_params (tree fndecl)
311{
312 tree parm;
313 int count = 0;
314 gcc_assert (gimple_has_body_p (fndecl));
315
316 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
317 count++;
318
319 return count;
320}
321
322/* Return the declaration of Ith formal parameter of the function corresponding
323 to INFO. Note there is no setter function as this array is built just once
324 using ipa_initialize_node_params. */
325
326void
327ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
328{
329 fprintf (stream: file, format: "param #%i", i);
330 if ((*info->descriptors)[i].decl_or_type)
331 {
332 fprintf (stream: file, format: " ");
333 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
334 }
335}
336
337/* If necessary, allocate vector of parameter descriptors in info of NODE.
338 Return true if they were allocated, false if not. */
339
340static bool
341ipa_alloc_node_params (struct cgraph_node *node, int param_count)
342{
343 ipa_node_params *info = ipa_node_params_sum->get_create (node);
344
345 if (!info->descriptors && param_count)
346 {
347 vec_safe_grow_cleared (v&: info->descriptors, len: param_count, exact: true);
348 return true;
349 }
350 else
351 return false;
352}
353
354/* Initialize the ipa_node_params structure associated with NODE by counting
355 the function parameters, creating the descriptors and populating their
356 param_decls. */
357
358void
359ipa_initialize_node_params (struct cgraph_node *node)
360{
361 ipa_node_params *info = ipa_node_params_sum->get_create (node);
362
363 if (!info->descriptors
364 && ipa_alloc_node_params (node, param_count: count_formal_params (fndecl: node->decl)))
365 ipa_populate_param_decls (node, descriptors&: *info->descriptors);
366}
367
368/* Print the jump functions associated with call graph edge CS to file F. */
369
370static void
371ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
372{
373 ipa_edge_args *args = ipa_edge_args_sum->get (edge: cs);
374 int count = ipa_get_cs_argument_count (args);
375
376 for (int i = 0; i < count; i++)
377 {
378 struct ipa_jump_func *jump_func;
379 enum jump_func_type type;
380
381 jump_func = ipa_get_ith_jump_func (args, i);
382 type = jump_func->type;
383
384 fprintf (stream: f, format: " param %d: ", i);
385 if (type == IPA_JF_UNKNOWN)
386 fprintf (stream: f, format: "UNKNOWN\n");
387 else if (type == IPA_JF_CONST)
388 {
389 tree val = jump_func->value.constant.value;
390 fprintf (stream: f, format: "CONST: ");
391 print_generic_expr (f, val);
392 if (TREE_CODE (val) == ADDR_EXPR
393 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
394 {
395 fprintf (stream: f, format: " -> ");
396 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
397 }
398 fprintf (stream: f, format: "\n");
399 }
400 else if (type == IPA_JF_PASS_THROUGH)
401 {
402 fprintf (stream: f, format: "PASS THROUGH: ");
403 fprintf (stream: f, format: "%d, op %s",
404 jump_func->value.pass_through.formal_id,
405 get_tree_code_name(jump_func->value.pass_through.operation));
406 if (jump_func->value.pass_through.operation != NOP_EXPR)
407 {
408 fprintf (stream: f, format: " ");
409 print_generic_expr (f, jump_func->value.pass_through.operand);
410 }
411 if (jump_func->value.pass_through.agg_preserved)
412 fprintf (stream: f, format: ", agg_preserved");
413 if (jump_func->value.pass_through.refdesc_decremented)
414 fprintf (stream: f, format: ", refdesc_decremented");
415 fprintf (stream: f, format: "\n");
416 }
417 else if (type == IPA_JF_ANCESTOR)
418 {
419 fprintf (stream: f, format: "ANCESTOR: ");
420 fprintf (stream: f, format: "%d, offset " HOST_WIDE_INT_PRINT_DEC,
421 jump_func->value.ancestor.formal_id,
422 jump_func->value.ancestor.offset);
423 if (jump_func->value.ancestor.agg_preserved)
424 fprintf (stream: f, format: ", agg_preserved");
425 if (jump_func->value.ancestor.keep_null)
426 fprintf (stream: f, format: ", keep_null");
427 fprintf (stream: f, format: "\n");
428 }
429
430 if (jump_func->agg.items)
431 {
432 struct ipa_agg_jf_item *item;
433 int j;
434
435 fprintf (stream: f, format: " Aggregate passed by %s:\n",
436 jump_func->agg.by_ref ? "reference" : "value");
437 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
438 {
439 fprintf (stream: f, format: " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
440 item->offset);
441 fprintf (stream: f, format: "type: ");
442 print_generic_expr (f, item->type);
443 fprintf (stream: f, format: ", ");
444 if (item->jftype == IPA_JF_PASS_THROUGH)
445 fprintf (stream: f, format: "PASS THROUGH: %d,",
446 item->value.pass_through.formal_id);
447 else if (item->jftype == IPA_JF_LOAD_AGG)
448 {
449 fprintf (stream: f, format: "LOAD AGG: %d",
450 item->value.pass_through.formal_id);
451 fprintf (stream: f, format: " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
452 item->value.load_agg.offset,
453 item->value.load_agg.by_ref ? "reference"
454 : "value");
455 }
456
457 if (item->jftype == IPA_JF_PASS_THROUGH
458 || item->jftype == IPA_JF_LOAD_AGG)
459 {
460 fprintf (stream: f, format: " op %s",
461 get_tree_code_name (item->value.pass_through.operation));
462 if (item->value.pass_through.operation != NOP_EXPR)
463 {
464 fprintf (stream: f, format: " ");
465 print_generic_expr (f, item->value.pass_through.operand);
466 }
467 }
468 else if (item->jftype == IPA_JF_CONST)
469 {
470 fprintf (stream: f, format: "CONST: ");
471 print_generic_expr (f, item->value.constant);
472 }
473 else if (item->jftype == IPA_JF_UNKNOWN)
474 fprintf (stream: f, format: "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
475 tree_to_uhwi (TYPE_SIZE (item->type)));
476 fprintf (stream: f, format: "\n");
477 }
478 }
479
480 class ipa_polymorphic_call_context *ctx
481 = ipa_get_ith_polymorhic_call_context (args, i);
482 if (ctx && !ctx->useless_p ())
483 {
484 fprintf (stream: f, format: " Context: ");
485 ctx->dump (f: dump_file);
486 }
487
488 if (jump_func->m_vr)
489 {
490 jump_func->m_vr->dump (out: f);
491 fprintf (stream: f, format: "\n");
492 }
493 else
494 fprintf (stream: f, format: " Unknown VR\n");
495 }
496}
497
498
499/* Print the jump functions of all arguments on all call graph edges going from
500 NODE to file F. */
501
502void
503ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
504{
505 struct cgraph_edge *cs;
506
507 fprintf (stream: f, format: " Jump functions of caller %s:\n", node->dump_name ());
508 for (cs = node->callees; cs; cs = cs->next_callee)
509 {
510
511 fprintf (stream: f, format: " callsite %s -> %s : \n",
512 node->dump_name (),
513 cs->callee->dump_name ());
514 if (!ipa_edge_args_info_available_for_edge_p (edge: cs))
515 fprintf (stream: f, format: " no arg info\n");
516 else
517 ipa_print_node_jump_functions_for_edge (f, cs);
518 }
519
520 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
521 {
522 class cgraph_indirect_call_info *ii;
523
524 ii = cs->indirect_info;
525 if (ii->agg_contents)
526 fprintf (stream: f, format: " indirect %s callsite, calling param %i, "
527 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
528 ii->member_ptr ? "member ptr" : "aggregate",
529 ii->param_index, ii->offset,
530 ii->by_ref ? "by reference" : "by_value");
531 else
532 fprintf (stream: f, format: " indirect %s callsite, calling param %i, "
533 "offset " HOST_WIDE_INT_PRINT_DEC,
534 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
535 ii->offset);
536
537 if (cs->call_stmt)
538 {
539 fprintf (stream: f, format: ", for stmt ");
540 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
541 }
542 else
543 fprintf (stream: f, format: "\n");
544 if (ii->polymorphic)
545 ii->context.dump (f);
546 if (!ipa_edge_args_info_available_for_edge_p (edge: cs))
547 fprintf (stream: f, format: " no arg info\n");
548 else
549 ipa_print_node_jump_functions_for_edge (f, cs);
550 }
551}
552
553/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
554
555void
556ipa_print_all_jump_functions (FILE *f)
557{
558 struct cgraph_node *node;
559
560 fprintf (stream: f, format: "\nJump functions:\n");
561 FOR_EACH_FUNCTION (node)
562 {
563 ipa_print_node_jump_functions (f, node);
564 }
565}
566
567/* Set jfunc to be a know-really nothing jump function. */
568
569static void
570ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
571{
572 jfunc->type = IPA_JF_UNKNOWN;
573}
574
575/* Set JFUNC to be a copy of another jmp (to be used by jump function
576 combination code). The two functions will share their rdesc. */
577
578static void
579ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
580 struct ipa_jump_func *src)
581
582{
583 gcc_checking_assert (src->type == IPA_JF_CONST);
584 dst->type = IPA_JF_CONST;
585 dst->value.constant = src->value.constant;
586}
587
588/* Set JFUNC to be a constant jmp function. */
589
590static void
591ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
592 struct cgraph_edge *cs)
593{
594 jfunc->type = IPA_JF_CONST;
595 jfunc->value.constant.value = unshare_expr_without_location (constant);
596
597 if (TREE_CODE (constant) == ADDR_EXPR
598 && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
599 || (VAR_P (TREE_OPERAND (constant, 0))
600 && TREE_STATIC (TREE_OPERAND (constant, 0)))))
601 {
602 struct ipa_cst_ref_desc *rdesc;
603
604 rdesc = ipa_refdesc_pool.allocate ();
605 rdesc->cs = cs;
606 rdesc->next_duplicate = NULL;
607 rdesc->refcount = 1;
608 jfunc->value.constant.rdesc = rdesc;
609 }
610 else
611 jfunc->value.constant.rdesc = NULL;
612}
613
614/* Set JFUNC to be a simple pass-through jump function. */
615static void
616ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
617 bool agg_preserved)
618{
619 jfunc->type = IPA_JF_PASS_THROUGH;
620 jfunc->value.pass_through.operand = NULL_TREE;
621 jfunc->value.pass_through.formal_id = formal_id;
622 jfunc->value.pass_through.operation = NOP_EXPR;
623 jfunc->value.pass_through.agg_preserved = agg_preserved;
624 jfunc->value.pass_through.refdesc_decremented = false;
625}
626
627/* Set JFUNC to be an unary pass through jump function. */
628
629static void
630ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
631 enum tree_code operation)
632{
633 jfunc->type = IPA_JF_PASS_THROUGH;
634 jfunc->value.pass_through.operand = NULL_TREE;
635 jfunc->value.pass_through.formal_id = formal_id;
636 jfunc->value.pass_through.operation = operation;
637 jfunc->value.pass_through.agg_preserved = false;
638 jfunc->value.pass_through.refdesc_decremented = false;
639}
640/* Set JFUNC to be an arithmetic pass through jump function. */
641
642static void
643ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
644 tree operand, enum tree_code operation)
645{
646 jfunc->type = IPA_JF_PASS_THROUGH;
647 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
648 jfunc->value.pass_through.formal_id = formal_id;
649 jfunc->value.pass_through.operation = operation;
650 jfunc->value.pass_through.agg_preserved = false;
651 jfunc->value.pass_through.refdesc_decremented = false;
652}
653
654/* Set JFUNC to be an ancestor jump function. */
655
656static void
657ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
658 int formal_id, bool agg_preserved, bool keep_null)
659{
660 jfunc->type = IPA_JF_ANCESTOR;
661 jfunc->value.ancestor.formal_id = formal_id;
662 jfunc->value.ancestor.offset = offset;
663 jfunc->value.ancestor.agg_preserved = agg_preserved;
664 jfunc->value.ancestor.keep_null = keep_null;
665}
666
667/* Get IPA BB information about the given BB. FBI is the context of analyzis
668 of this function body. */
669
670static struct ipa_bb_info *
671ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
672{
673 gcc_checking_assert (fbi);
674 return &fbi->bb_infos[bb->index];
675}
676
677/* Structure to be passed in between detect_type_change and
678 check_stmt_for_type_change. */
679
680struct prop_type_change_info
681{
682 /* Offset into the object where there is the virtual method pointer we are
683 looking for. */
684 HOST_WIDE_INT offset;
685 /* The declaration or SSA_NAME pointer of the base that we are checking for
686 type change. */
687 tree object;
688 /* Set to true if dynamic type change has been detected. */
689 bool type_maybe_changed;
690};
691
692/* Return true if STMT can modify a virtual method table pointer.
693
694 This function makes special assumptions about both constructors and
695 destructors which are all the functions that are allowed to alter the VMT
696 pointers. It assumes that destructors begin with assignment into all VMT
697 pointers and that constructors essentially look in the following way:
698
699 1) The very first thing they do is that they call constructors of ancestor
700 sub-objects that have them.
701
702 2) Then VMT pointers of this and all its ancestors is set to new values
703 corresponding to the type corresponding to the constructor.
704
705 3) Only afterwards, other stuff such as constructor of member sub-objects
706 and the code written by the user is run. Only this may include calling
707 virtual functions, directly or indirectly.
708
709 There is no way to call a constructor of an ancestor sub-object in any
710 other way.
711
712 This means that we do not have to care whether constructors get the correct
713 type information because they will always change it (in fact, if we define
714 the type to be given by the VMT pointer, it is undefined).
715
716 The most important fact to derive from the above is that if, for some
717 statement in the section 3, we try to detect whether the dynamic type has
718 changed, we can safely ignore all calls as we examine the function body
719 backwards until we reach statements in section 2 because these calls cannot
720 be ancestor constructors or destructors (if the input is not bogus) and so
721 do not change the dynamic type (this holds true only for automatically
722 allocated objects but at the moment we devirtualize only these). We then
723 must detect that statements in section 2 change the dynamic type and can try
724 to derive the new type. That is enough and we can stop, we will never see
725 the calls into constructors of sub-objects in this code. Therefore we can
726 safely ignore all call statements that we traverse.
727 */
728
729static bool
730stmt_may_be_vtbl_ptr_store (gimple *stmt)
731{
732 if (is_gimple_call (gs: stmt))
733 return false;
734 if (gimple_clobber_p (s: stmt))
735 return false;
736 else if (is_gimple_assign (gs: stmt))
737 {
738 tree lhs = gimple_assign_lhs (gs: stmt);
739
740 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
741 {
742 if (flag_strict_aliasing
743 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
744 return false;
745
746 if (TREE_CODE (lhs) == COMPONENT_REF
747 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
748 return false;
749 /* In the future we might want to use get_ref_base_and_extent to find
750 if there is a field corresponding to the offset and if so, proceed
751 almost like if it was a component ref. */
752 }
753 }
754 return true;
755}
756
757/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
758 to check whether a particular statement may modify the virtual table
759 pointerIt stores its result into DATA, which points to a
760 prop_type_change_info structure. */
761
762static bool
763check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
764{
765 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
766 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
767
768 if (stmt_may_be_vtbl_ptr_store (stmt))
769 {
770 tci->type_maybe_changed = true;
771 return true;
772 }
773 else
774 return false;
775}
776
777/* See if ARG is PARAM_DECl describing instance passed by pointer
778 or reference in FUNCTION. Return false if the dynamic type may change
779 in between beggining of the function until CALL is invoked.
780
781 Generally functions are not allowed to change type of such instances,
782 but they call destructors. We assume that methods cannot destroy the THIS
783 pointer. Also as a special cases, constructor and destructors may change
784 type of the THIS pointer. */
785
786static bool
787param_type_may_change_p (tree function, tree arg, gimple *call)
788{
789 /* Pure functions cannot do any changes on the dynamic type;
790 that require writting to memory. */
791 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
792 return false;
793 /* We need to check if we are within inlined consturctor
794 or destructor (ideally we would have way to check that the
795 inline cdtor is actually working on ARG, but we don't have
796 easy tie on this, so punt on all non-pure cdtors.
797 We may also record the types of cdtors and once we know type
798 of the instance match them.
799
800 Also code unification optimizations may merge calls from
801 different blocks making return values unreliable. So
802 do nothing during late optimization. */
803 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
804 return true;
805 if (TREE_CODE (arg) == SSA_NAME
806 && SSA_NAME_IS_DEFAULT_DEF (arg)
807 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
808 {
809 /* Normal (non-THIS) argument. */
810 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
811 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
812 /* THIS pointer of an method - here we want to watch constructors
813 and destructors as those definitely may change the dynamic
814 type. */
815 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
816 && !DECL_CXX_CONSTRUCTOR_P (function)
817 && !DECL_CXX_DESTRUCTOR_P (function)
818 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
819 {
820 /* Walk the inline stack and watch out for ctors/dtors. */
821 for (tree block = gimple_block (g: call); block && TREE_CODE (block) == BLOCK;
822 block = BLOCK_SUPERCONTEXT (block))
823 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
824 return true;
825 return false;
826 }
827 }
828 return true;
829}
830
831/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
832 callsite CALL) by looking for assignments to its virtual table pointer. If
833 it is, return true. ARG is the object itself (not a pointer
834 to it, unless dereferenced). BASE is the base of the memory access as
835 returned by get_ref_base_and_extent, as is the offset.
836
837 This is helper function for detect_type_change and detect_type_change_ssa
838 that does the heavy work which is usually unnecesary. */
839
840static bool
841detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
842 tree base, tree comp_type, gcall *call,
843 HOST_WIDE_INT offset)
844{
845 struct prop_type_change_info tci;
846 ao_ref ao;
847
848 gcc_checking_assert (DECL_P (arg)
849 || TREE_CODE (arg) == MEM_REF
850 || handled_component_p (arg));
851
852 comp_type = TYPE_MAIN_VARIANT (comp_type);
853
854 /* Const calls cannot call virtual methods through VMT and so type changes do
855 not matter. */
856 if (!flag_devirtualize || !gimple_vuse (g: call)
857 /* Be sure expected_type is polymorphic. */
858 || !comp_type
859 || TREE_CODE (comp_type) != RECORD_TYPE
860 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
861 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
862 return true;
863
864 if (fbi->aa_walk_budget == 0)
865 return false;
866
867 ao_ref_init (&ao, arg);
868 ao.base = base;
869 ao.offset = offset;
870 ao.size = POINTER_SIZE;
871 ao.max_size = ao.size;
872
873 tci.offset = offset;
874 tci.object = get_base_address (t: arg);
875 tci.type_maybe_changed = false;
876
877 int walked
878 = walk_aliased_vdefs (&ao, gimple_vuse (g: call), check_stmt_for_type_change,
879 &tci, NULL, NULL, limit: fbi->aa_walk_budget);
880 if (walked >= 0)
881 fbi->aa_walk_budget -= walked;
882 else
883 fbi->aa_walk_budget = 0;
884
885 if (walked >= 0 && !tci.type_maybe_changed)
886 return false;
887
888 return true;
889}
890
891/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
892 If it is, return true. ARG is the object itself (not a pointer
893 to it, unless dereferenced). BASE is the base of the memory access as
894 returned by get_ref_base_and_extent, as is the offset. */
895
896static bool
897detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
898 tree comp_type, gcall *call,
899 HOST_WIDE_INT offset)
900{
901 if (!flag_devirtualize)
902 return false;
903
904 if (TREE_CODE (base) == MEM_REF
905 && !param_type_may_change_p (function: current_function_decl,
906 TREE_OPERAND (base, 0),
907 call))
908 return false;
909 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
910 call, offset);
911}
912
913/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
914 SSA name (its dereference will become the base and the offset is assumed to
915 be zero). */
916
917static bool
918detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
919 gcall *call)
920{
921 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
922 if (!flag_devirtualize
923 || !POINTER_TYPE_P (TREE_TYPE (arg)))
924 return false;
925
926 if (!param_type_may_change_p (function: current_function_decl, arg, call))
927 return false;
928
929 arg = build2 (MEM_REF, ptr_type_node, arg,
930 build_int_cst (ptr_type_node, 0));
931
932 return detect_type_change_from_memory_writes (fbi, arg, base: arg, comp_type,
933 call, offset: 0);
934}
935
936/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
937 boolean variable pointed to by DATA. */
938
939static bool
940mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
941 void *data)
942{
943 bool *b = (bool *) data;
944 *b = true;
945 return true;
946}
947
948/* Find the nearest valid aa status for parameter specified by INDEX that
949 dominates BB. */
950
951static struct ipa_param_aa_status *
952find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
953 int index)
954{
955 while (true)
956 {
957 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
958 if (!bb)
959 return NULL;
960 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
961 if (!bi->param_aa_statuses.is_empty ()
962 && bi->param_aa_statuses[index].valid)
963 return &bi->param_aa_statuses[index];
964 }
965}
966
967/* Get AA status structure for the given BB and parameter with INDEX. Allocate
968 structures and/or intialize the result with a dominating description as
969 necessary. */
970
971static struct ipa_param_aa_status *
972parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
973 int index)
974{
975 gcc_checking_assert (fbi);
976 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
977 if (bi->param_aa_statuses.is_empty ())
978 bi->param_aa_statuses.safe_grow_cleared (len: fbi->param_count, exact: true);
979 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
980 if (!paa->valid)
981 {
982 gcc_checking_assert (!paa->parm_modified
983 && !paa->ref_modified
984 && !paa->pt_modified);
985 struct ipa_param_aa_status *dom_paa;
986 dom_paa = find_dominating_aa_status (fbi, bb, index);
987 if (dom_paa)
988 *paa = *dom_paa;
989 else
990 paa->valid = true;
991 }
992
993 return paa;
994}
995
996/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
997 a value known not to be modified in this function before reaching the
998 statement STMT. FBI holds information about the function we have so far
999 gathered but do not survive the summary building stage. */
1000
1001static bool
1002parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
1003 gimple *stmt, tree parm_load)
1004{
1005 struct ipa_param_aa_status *paa;
1006 bool modified = false;
1007 ao_ref refd;
1008
1009 tree base = get_base_address (t: parm_load);
1010 gcc_assert (TREE_CODE (base) == PARM_DECL);
1011 if (TREE_READONLY (base))
1012 return true;
1013
1014 gcc_checking_assert (fbi);
1015 paa = parm_bb_aa_status_for_bb (fbi, bb: gimple_bb (g: stmt), index);
1016 if (paa->parm_modified || fbi->aa_walk_budget == 0)
1017 return false;
1018
1019 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1020 ao_ref_init (&refd, parm_load);
1021 int walked = walk_aliased_vdefs (&refd, gimple_vuse (g: stmt), mark_modified,
1022 &modified, NULL, NULL,
1023 limit: fbi->aa_walk_budget);
1024 if (walked < 0)
1025 {
1026 modified = true;
1027 fbi->aa_walk_budget = 0;
1028 }
1029 else
1030 fbi->aa_walk_budget -= walked;
1031 if (paa && modified)
1032 paa->parm_modified = true;
1033 return !modified;
1034}
1035
1036/* If STMT is an assignment that loads a value from an parameter declaration,
1037 return the index of the parameter in ipa_node_params which has not been
1038 modified. Otherwise return -1. */
1039
1040static int
1041load_from_unmodified_param (struct ipa_func_body_info *fbi,
1042 vec<ipa_param_descriptor, va_gc> *descriptors,
1043 gimple *stmt)
1044{
1045 int index;
1046 tree op1;
1047
1048 if (!gimple_assign_single_p (gs: stmt))
1049 return -1;
1050
1051 op1 = gimple_assign_rhs1 (gs: stmt);
1052 if (TREE_CODE (op1) != PARM_DECL)
1053 return -1;
1054
1055 index = ipa_get_param_decl_index_1 (descriptors, ptree: op1);
1056 if (index < 0
1057 || !parm_preserved_before_stmt_p (fbi, index, stmt, parm_load: op1))
1058 return -1;
1059
1060 return index;
1061}
1062
1063/* Return true if memory reference REF (which must be a load through parameter
1064 with INDEX) loads data that are known to be unmodified in this function
1065 before reaching statement STMT. */
1066
1067static bool
1068parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1069 int index, gimple *stmt, tree ref)
1070{
1071 struct ipa_param_aa_status *paa;
1072 bool modified = false;
1073 ao_ref refd;
1074
1075 gcc_checking_assert (fbi);
1076 paa = parm_bb_aa_status_for_bb (fbi, bb: gimple_bb (g: stmt), index);
1077 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1078 return false;
1079
1080 gcc_checking_assert (gimple_vuse (stmt));
1081 ao_ref_init (&refd, ref);
1082 int walked = walk_aliased_vdefs (&refd, gimple_vuse (g: stmt), mark_modified,
1083 &modified, NULL, NULL,
1084 limit: fbi->aa_walk_budget);
1085 if (walked < 0)
1086 {
1087 modified = true;
1088 fbi->aa_walk_budget = 0;
1089 }
1090 else
1091 fbi->aa_walk_budget -= walked;
1092 if (modified)
1093 paa->ref_modified = true;
1094 return !modified;
1095}
1096
1097/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1098 is known to be unmodified in this function before reaching call statement
1099 CALL into which it is passed. FBI describes the function body. */
1100
1101static bool
1102parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1103 gimple *call, tree parm)
1104{
1105 bool modified = false;
1106 ao_ref refd;
1107
1108 /* It's unnecessary to calculate anything about memory contnets for a const
1109 function because it is not goin to use it. But do not cache the result
1110 either. Also, no such calculations for non-pointers. */
1111 if (!gimple_vuse (g: call)
1112 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1113 return false;
1114
1115 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1116 bb: gimple_bb (g: call),
1117 index);
1118 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1119 return false;
1120
1121 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1122 int walked = walk_aliased_vdefs (&refd, gimple_vuse (g: call), mark_modified,
1123 &modified, NULL, NULL,
1124 limit: fbi->aa_walk_budget);
1125 if (walked < 0)
1126 {
1127 fbi->aa_walk_budget = 0;
1128 modified = true;
1129 }
1130 else
1131 fbi->aa_walk_budget -= walked;
1132 if (modified)
1133 paa->pt_modified = true;
1134 return !modified;
1135}
1136
1137/* Return true if we can prove that OP is a memory reference loading
1138 data from an aggregate passed as a parameter.
1139
1140 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1141 false if it cannot prove that the value has not been modified before the
1142 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1143 if it cannot prove the value has not been modified, in that case it will
1144 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1145
1146 INFO and PARMS_AINFO describe parameters of the current function (but the
1147 latter can be NULL), STMT is the load statement. If function returns true,
1148 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1149 within the aggregate and whether it is a load from a value passed by
1150 reference respectively.
1151
1152 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1153 unsigned int. */
1154
1155bool
1156ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1157 vec<ipa_param_descriptor, va_gc> *descriptors,
1158 gimple *stmt, tree op, int *index_p,
1159 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1160 bool *by_ref_p, bool *guaranteed_unmodified)
1161{
1162 int index;
1163 HOST_WIDE_INT size;
1164 bool reverse;
1165 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1166
1167 if (!base
1168 || (*offset_p / BITS_PER_UNIT) > UINT_MAX)
1169 return false;
1170
1171 /* We can not propagate across volatile loads. */
1172 if (TREE_THIS_VOLATILE (op))
1173 return false;
1174
1175 if (DECL_P (base))
1176 {
1177 int index = ipa_get_param_decl_index_1 (descriptors, ptree: base);
1178 if (index >= 0
1179 && parm_preserved_before_stmt_p (fbi, index, stmt, parm_load: op))
1180 {
1181 *index_p = index;
1182 *by_ref_p = false;
1183 if (size_p)
1184 *size_p = size;
1185 if (guaranteed_unmodified)
1186 *guaranteed_unmodified = true;
1187 return true;
1188 }
1189 return false;
1190 }
1191
1192 if (TREE_CODE (base) != MEM_REF
1193 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1194 || !integer_zerop (TREE_OPERAND (base, 1)))
1195 return false;
1196
1197 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1198 {
1199 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1200 index = ipa_get_param_decl_index_1 (descriptors, ptree: parm);
1201 }
1202 else
1203 {
1204 /* This branch catches situations where a pointer parameter is not a
1205 gimple register, for example:
1206
1207 void hip7(S*) (struct S * p)
1208 {
1209 void (*<T2e4>) (struct S *) D.1867;
1210 struct S * p.1;
1211
1212 <bb 2>:
1213 p.1_1 = p;
1214 D.1867_2 = p.1_1->f;
1215 D.1867_2 ();
1216 gdp = &p;
1217 */
1218
1219 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1220 index = load_from_unmodified_param (fbi, descriptors, stmt: def);
1221 }
1222
1223 if (index >= 0)
1224 {
1225 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, ref: op);
1226 if (!data_preserved && !guaranteed_unmodified)
1227 return false;
1228
1229 *index_p = index;
1230 *by_ref_p = true;
1231 if (size_p)
1232 *size_p = size;
1233 if (guaranteed_unmodified)
1234 *guaranteed_unmodified = data_preserved;
1235 return true;
1236 }
1237 return false;
1238}
1239
1240/* If STMT is an assignment that loads a value from a parameter declaration,
1241 or from an aggregate passed as the parameter either by value or reference,
1242 return the index of the parameter in ipa_node_params. Otherwise return -1.
1243
1244 FBI holds gathered information about the function. INFO describes
1245 parameters of the function, STMT is the assignment statement. If it is a
1246 memory load from an aggregate, *OFFSET_P is filled with offset within the
1247 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1248 reference. */
1249
1250static int
1251load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1252 class ipa_node_params *info,
1253 gimple *stmt,
1254 HOST_WIDE_INT *offset_p,
1255 bool *by_ref_p)
1256{
1257 int index = load_from_unmodified_param (fbi, descriptors: info->descriptors, stmt);
1258 poly_int64 size;
1259
1260 /* Load value from a parameter declaration. */
1261 if (index >= 0)
1262 {
1263 *offset_p = -1;
1264 return index;
1265 }
1266
1267 if (!gimple_assign_load_p (stmt))
1268 return -1;
1269
1270 tree rhs = gimple_assign_rhs1 (gs: stmt);
1271
1272 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1273 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1274 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1275 return -1;
1276
1277 /* Skip memory reference containing bit-field. */
1278 if (TREE_CODE (rhs) == BIT_FIELD_REF
1279 || contains_bitfld_component_ref_p (rhs))
1280 return -1;
1281
1282 if (!ipa_load_from_parm_agg (fbi, descriptors: info->descriptors, stmt, op: rhs, index_p: &index,
1283 offset_p, size_p: &size, by_ref_p))
1284 return -1;
1285
1286 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1287 size));
1288 if (!*by_ref_p)
1289 {
1290 tree param_type = ipa_get_type (info, i: index);
1291
1292 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1293 return -1;
1294 }
1295 else if (TREE_THIS_VOLATILE (rhs))
1296 return -1;
1297
1298 return index;
1299}
1300
1301/* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1302 to find original pointer. Initialize RET to the pointer which results from
1303 the walk.
1304 If offset is known return true and initialize OFFSET_RET. */
1305
1306bool
1307unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1308{
1309 poly_int64 offset = 0;
1310 bool offset_known = true;
1311 int i;
1312
1313 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1314 {
1315 if (TREE_CODE (op) == ADDR_EXPR)
1316 {
1317 poly_int64 extra_offset = 0;
1318 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1319 &offset);
1320 if (!base)
1321 {
1322 base = get_base_address (TREE_OPERAND (op, 0));
1323 if (TREE_CODE (base) != MEM_REF)
1324 break;
1325 offset_known = false;
1326 }
1327 else
1328 {
1329 if (TREE_CODE (base) != MEM_REF)
1330 break;
1331 offset += extra_offset;
1332 }
1333 op = TREE_OPERAND (base, 0);
1334 if (mem_ref_offset (base).to_shwi (r: &extra_offset))
1335 offset += extra_offset;
1336 else
1337 offset_known = false;
1338 }
1339 else if (TREE_CODE (op) == SSA_NAME
1340 && !SSA_NAME_IS_DEFAULT_DEF (op))
1341 {
1342 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1343
1344 if (gimple_assign_single_p (gs: pstmt))
1345 op = gimple_assign_rhs1 (gs: pstmt);
1346 else if (is_gimple_assign (gs: pstmt)
1347 && gimple_assign_rhs_code (gs: pstmt) == POINTER_PLUS_EXPR)
1348 {
1349 poly_int64 extra_offset = 0;
1350 if (ptrdiff_tree_p (gimple_assign_rhs2 (gs: pstmt),
1351 &extra_offset))
1352 offset += extra_offset;
1353 else
1354 offset_known = false;
1355 op = gimple_assign_rhs1 (gs: pstmt);
1356 }
1357 else
1358 break;
1359 }
1360 else
1361 break;
1362 }
1363 *ret = op;
1364 *offset_ret = offset;
1365 return offset_known;
1366}
1367
1368/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1369 of an assignment statement STMT, try to determine whether we are actually
1370 handling any of the following cases and construct an appropriate jump
1371 function into JFUNC if so:
1372
1373 1) The passed value is loaded from a formal parameter which is not a gimple
1374 register (most probably because it is addressable, the value has to be
1375 scalar) and we can guarantee the value has not changed. This case can
1376 therefore be described by a simple pass-through jump function. For example:
1377
1378 foo (int a)
1379 {
1380 int a.0;
1381
1382 a.0_2 = a;
1383 bar (a.0_2);
1384
1385 2) The passed value can be described by a simple arithmetic pass-through
1386 jump function. E.g.
1387
1388 foo (int a)
1389 {
1390 int D.2064;
1391
1392 D.2064_4 = a.1(D) + 4;
1393 bar (D.2064_4);
1394
1395 This case can also occur in combination of the previous one, e.g.:
1396
1397 foo (int a, int z)
1398 {
1399 int a.0;
1400 int D.2064;
1401
1402 a.0_3 = a;
1403 D.2064_4 = a.0_3 + 4;
1404 foo (D.2064_4);
1405
1406 3) The passed value is an address of an object within another one (which
1407 also passed by reference). Such situations are described by an ancestor
1408 jump function and describe situations such as:
1409
1410 B::foo() (struct B * const this)
1411 {
1412 struct A * D.1845;
1413
1414 D.1845_2 = &this_1(D)->D.1748;
1415 A::bar (D.1845_2);
1416
1417 INFO is the structure describing individual parameters access different
1418 stages of IPA optimizations. PARMS_AINFO contains the information that is
1419 only needed for intraprocedural analysis. */
1420
1421static void
1422compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1423 class ipa_node_params *info,
1424 struct ipa_jump_func *jfunc,
1425 gcall *call, gimple *stmt, tree name,
1426 tree param_type)
1427{
1428 HOST_WIDE_INT offset, size;
1429 tree op1, tc_ssa, base, ssa;
1430 bool reverse;
1431 int index;
1432
1433 op1 = gimple_assign_rhs1 (gs: stmt);
1434
1435 if (TREE_CODE (op1) == SSA_NAME)
1436 {
1437 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1438 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1439 else
1440 index = load_from_unmodified_param (fbi, descriptors: info->descriptors,
1441 SSA_NAME_DEF_STMT (op1));
1442 tc_ssa = op1;
1443 }
1444 else
1445 {
1446 index = load_from_unmodified_param (fbi, descriptors: info->descriptors, stmt);
1447 tc_ssa = gimple_assign_lhs (gs: stmt);
1448 }
1449
1450 if (index >= 0)
1451 {
1452 switch (gimple_assign_rhs_class (gs: stmt))
1453 {
1454 case GIMPLE_BINARY_RHS:
1455 {
1456 tree op2 = gimple_assign_rhs2 (gs: stmt);
1457 if (!is_gimple_ip_invariant (op2)
1458 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1459 != tcc_comparison)
1460 && !useless_type_conversion_p (TREE_TYPE (name),
1461 TREE_TYPE (op1))))
1462 return;
1463
1464 ipa_set_jf_arith_pass_through (jfunc, formal_id: index, operand: op2,
1465 operation: gimple_assign_rhs_code (gs: stmt));
1466 break;
1467 }
1468 case GIMPLE_SINGLE_RHS:
1469 {
1470 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1471 parm: tc_ssa);
1472 ipa_set_jf_simple_pass_through (jfunc, formal_id: index, agg_preserved: agg_p);
1473 break;
1474 }
1475 case GIMPLE_UNARY_RHS:
1476 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1477 ipa_set_jf_unary_pass_through (jfunc, formal_id: index,
1478 operation: gimple_assign_rhs_code (gs: stmt));
1479 default:;
1480 }
1481 return;
1482 }
1483
1484 if (TREE_CODE (op1) != ADDR_EXPR)
1485 return;
1486 op1 = TREE_OPERAND (op1, 0);
1487 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1488 offset_int mem_offset;
1489 if (!base
1490 || TREE_CODE (base) != MEM_REF
1491 || !mem_ref_offset (base).is_constant (const_value: &mem_offset))
1492 return;
1493 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1494 ssa = TREE_OPERAND (base, 0);
1495 if (TREE_CODE (ssa) != SSA_NAME
1496 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1497 || offset < 0)
1498 return;
1499
1500 /* Dynamic types are changed in constructors and destructors. */
1501 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1502 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1503 ipa_set_ancestor_jf (jfunc, offset, formal_id: index,
1504 agg_preserved: parm_ref_data_pass_through_p (fbi, index, call, parm: ssa),
1505 keep_null: false);
1506}
1507
1508/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1509 it looks like:
1510
1511 iftmp.1_3 = &obj_2(D)->D.1762;
1512
1513 The base of the MEM_REF must be a default definition SSA NAME of a
1514 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1515 whole MEM_REF expression is returned and the offset calculated from any
1516 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1517 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1518
1519static tree
1520get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1521{
1522 HOST_WIDE_INT size;
1523 tree expr, parm, obj;
1524 bool reverse;
1525
1526 if (!gimple_assign_single_p (gs: assign))
1527 return NULL_TREE;
1528 expr = gimple_assign_rhs1 (gs: assign);
1529
1530 if (TREE_CODE (expr) != ADDR_EXPR)
1531 return NULL_TREE;
1532 expr = TREE_OPERAND (expr, 0);
1533 obj = expr;
1534 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1535
1536 offset_int mem_offset;
1537 if (!expr
1538 || TREE_CODE (expr) != MEM_REF
1539 || !mem_ref_offset (expr).is_constant (const_value: &mem_offset))
1540 return NULL_TREE;
1541 parm = TREE_OPERAND (expr, 0);
1542 if (TREE_CODE (parm) != SSA_NAME
1543 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1544 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1545 return NULL_TREE;
1546
1547 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1548 *obj_p = obj;
1549 return expr;
1550}
1551
1552
1553/* Given that an actual argument is an SSA_NAME that is a result of a phi
1554 statement PHI, try to find out whether NAME is in fact a
1555 multiple-inheritance typecast from a descendant into an ancestor of a formal
1556 parameter and thus can be described by an ancestor jump function and if so,
1557 write the appropriate function into JFUNC.
1558
1559 Essentially we want to match the following pattern:
1560
1561 if (obj_2(D) != 0B)
1562 goto <bb 3>;
1563 else
1564 goto <bb 4>;
1565
1566 <bb 3>:
1567 iftmp.1_3 = &obj_2(D)->D.1762;
1568
1569 <bb 4>:
1570 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1571 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1572 return D.1879_6; */
1573
1574static void
1575compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1576 class ipa_node_params *info,
1577 struct ipa_jump_func *jfunc,
1578 gcall *call, gphi *phi)
1579{
1580 HOST_WIDE_INT offset;
1581 gimple *assign;
1582 basic_block phi_bb, assign_bb, cond_bb;
1583 tree tmp, parm, expr, obj;
1584 int index, i;
1585
1586 if (gimple_phi_num_args (gs: phi) != 2)
1587 return;
1588
1589 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1590 tmp = PHI_ARG_DEF (phi, 0);
1591 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1592 tmp = PHI_ARG_DEF (phi, 1);
1593 else
1594 return;
1595 if (TREE_CODE (tmp) != SSA_NAME
1596 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1597 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1598 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1599 return;
1600
1601 assign = SSA_NAME_DEF_STMT (tmp);
1602 assign_bb = gimple_bb (g: assign);
1603 if (!single_pred_p (bb: assign_bb))
1604 return;
1605 expr = get_ancestor_addr_info (assign, obj_p: &obj, offset: &offset);
1606 if (!expr)
1607 return;
1608 parm = TREE_OPERAND (expr, 0);
1609 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1610 if (index < 0)
1611 return;
1612
1613 cond_bb = single_pred (bb: assign_bb);
1614 gcond *cond = safe_dyn_cast <gcond *> (p: *gsi_last_bb (bb: cond_bb));
1615 if (!cond
1616 || gimple_cond_code (gs: cond) != NE_EXPR
1617 || gimple_cond_lhs (gs: cond) != parm
1618 || !integer_zerop (gimple_cond_rhs (gs: cond)))
1619 return;
1620
1621 phi_bb = gimple_bb (g: phi);
1622 for (i = 0; i < 2; i++)
1623 {
1624 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1625 if (pred != assign_bb && pred != cond_bb)
1626 return;
1627 }
1628
1629 ipa_set_ancestor_jf (jfunc, offset, formal_id: index,
1630 agg_preserved: parm_ref_data_pass_through_p (fbi, index, call, parm),
1631 keep_null: true);
1632}
1633
1634/* Inspect the given TYPE and return true iff it has the same structure (the
1635 same number of fields of the same types) as a C++ member pointer. If
1636 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1637 corresponding fields there. */
1638
1639static bool
1640type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1641{
1642 tree fld;
1643
1644 if (TREE_CODE (type) != RECORD_TYPE)
1645 return false;
1646
1647 fld = TYPE_FIELDS (type);
1648 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1649 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1650 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1651 return false;
1652
1653 if (method_ptr)
1654 *method_ptr = fld;
1655
1656 fld = DECL_CHAIN (fld);
1657 if (!fld || INTEGRAL_TYPE_P (fld)
1658 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1659 return false;
1660 if (delta)
1661 *delta = fld;
1662
1663 if (DECL_CHAIN (fld))
1664 return false;
1665
1666 return true;
1667}
1668
1669/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1670 return the rhs of its defining statement, and this statement is stored in
1671 *RHS_STMT. Otherwise return RHS as it is. */
1672
1673static inline tree
1674get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1675{
1676 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1677 {
1678 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1679
1680 if (gimple_assign_single_p (gs: def_stmt))
1681 rhs = gimple_assign_rhs1 (gs: def_stmt);
1682 else
1683 break;
1684 *rhs_stmt = def_stmt;
1685 }
1686 return rhs;
1687}
1688
1689/* Simple linked list, describing contents of an aggregate before call. */
1690
1691struct ipa_known_agg_contents_list
1692{
1693 /* Offset and size of the described part of the aggregate. */
1694 HOST_WIDE_INT offset, size;
1695
1696 /* Type of the described part of the aggregate. */
1697 tree type;
1698
1699 /* Known constant value or jump function data describing contents. */
1700 struct ipa_load_agg_data value;
1701
1702 /* Pointer to the next structure in the list. */
1703 struct ipa_known_agg_contents_list *next;
1704};
1705
1706/* Add an aggregate content item into a linked list of
1707 ipa_known_agg_contents_list structure, in which all elements
1708 are sorted ascendingly by offset. */
1709
1710static inline void
1711add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1712 struct ipa_known_agg_contents_list *item)
1713{
1714 struct ipa_known_agg_contents_list *list = *plist;
1715
1716 for (; list; list = list->next)
1717 {
1718 if (list->offset >= item->offset)
1719 break;
1720
1721 plist = &list->next;
1722 }
1723
1724 item->next = list;
1725 *plist = item;
1726}
1727
1728/* Check whether a given aggregate content is clobbered by certain element in
1729 a linked list of ipa_known_agg_contents_list. */
1730
1731static inline bool
1732clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1733 struct ipa_known_agg_contents_list *item)
1734{
1735 for (; list; list = list->next)
1736 {
1737 if (list->offset >= item->offset)
1738 return list->offset < item->offset + item->size;
1739
1740 if (list->offset + list->size > item->offset)
1741 return true;
1742 }
1743
1744 return false;
1745}
1746
1747/* Build aggregate jump function from LIST, assuming there are exactly
1748 VALUE_COUNT entries there and that offset of the passed argument
1749 is ARG_OFFSET and store it into JFUNC. */
1750
1751static void
1752build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1753 int value_count, HOST_WIDE_INT arg_offset,
1754 struct ipa_jump_func *jfunc)
1755{
1756 vec_safe_reserve (v&: jfunc->agg.items, nelems: value_count, exact: true);
1757 for (; list; list = list->next)
1758 {
1759 struct ipa_agg_jf_item item;
1760 tree operand = list->value.pass_through.operand;
1761
1762 if (list->value.pass_through.formal_id >= 0)
1763 {
1764 /* Content value is derived from some formal paramerter. */
1765 if (list->value.offset >= 0)
1766 item.jftype = IPA_JF_LOAD_AGG;
1767 else
1768 item.jftype = IPA_JF_PASS_THROUGH;
1769
1770 item.value.load_agg = list->value;
1771 if (operand)
1772 item.value.pass_through.operand
1773 = unshare_expr_without_location (operand);
1774 }
1775 else if (operand)
1776 {
1777 /* Content value is known constant. */
1778 item.jftype = IPA_JF_CONST;
1779 item.value.constant = unshare_expr_without_location (operand);
1780 }
1781 else
1782 continue;
1783
1784 item.type = list->type;
1785 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1786
1787 item.offset = list->offset - arg_offset;
1788 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1789
1790 jfunc->agg.items->quick_push (obj: item);
1791 }
1792}
1793
1794/* Given an assignment statement STMT, try to collect information into
1795 AGG_VALUE that will be used to construct jump function for RHS of the
1796 assignment, from which content value of an aggregate part comes.
1797
1798 Besides constant and simple pass-through jump functions, also try to
1799 identify whether it matches the following pattern that can be described by
1800 a load-value-from-aggregate jump function, which is a derivative of simple
1801 pass-through jump function.
1802
1803 foo (int *p)
1804 {
1805 ...
1806
1807 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1808 bar (q_5);
1809 }
1810
1811 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1812 constant, simple pass-through and load-vale-from-aggregate. If value
1813 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1814 set to -1. For simple pass-through and load-value-from-aggregate, field
1815 FORMAL_ID specifies the related formal parameter index, and field
1816 OFFSET can be used to distinguish them, -1 means simple pass-through,
1817 otherwise means load-value-from-aggregate. */
1818
1819static void
1820analyze_agg_content_value (struct ipa_func_body_info *fbi,
1821 struct ipa_load_agg_data *agg_value,
1822 gimple *stmt)
1823{
1824 tree lhs = gimple_assign_lhs (gs: stmt);
1825 tree rhs1 = gimple_assign_rhs1 (gs: stmt);
1826 enum tree_code code;
1827 int index = -1;
1828
1829 /* Initialize jump function data for the aggregate part. */
1830 memset (s: agg_value, c: 0, n: sizeof (*agg_value));
1831 agg_value->pass_through.operation = NOP_EXPR;
1832 agg_value->pass_through.formal_id = -1;
1833 agg_value->offset = -1;
1834
1835 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1836 || TREE_THIS_VOLATILE (lhs)
1837 || TREE_CODE (lhs) == BIT_FIELD_REF
1838 || contains_bitfld_component_ref_p (lhs))
1839 return;
1840
1841 /* Skip SSA copies. */
1842 while (gimple_assign_rhs_class (gs: stmt) == GIMPLE_SINGLE_RHS)
1843 {
1844 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1845 break;
1846
1847 stmt = SSA_NAME_DEF_STMT (rhs1);
1848 if (!is_gimple_assign (gs: stmt))
1849 break;
1850
1851 rhs1 = gimple_assign_rhs1 (gs: stmt);
1852 }
1853
1854 if (gphi *phi = dyn_cast<gphi *> (p: stmt))
1855 {
1856 /* Also special case like the following (a is a formal parameter):
1857
1858 _12 = *a_11(D).dim[0].stride;
1859 ...
1860 # iftmp.22_9 = PHI <_12(2), 1(3)>
1861 ...
1862 parm.6.dim[0].stride = iftmp.22_9;
1863 ...
1864 __x_MOD_foo (&parm.6, b_31(D));
1865
1866 The aggregate function describing parm.6.dim[0].stride is encoded as a
1867 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1868 (the constant from the PHI node). */
1869
1870 if (gimple_phi_num_args (gs: phi) != 2)
1871 return;
1872 tree arg0 = gimple_phi_arg_def (gs: phi, index: 0);
1873 tree arg1 = gimple_phi_arg_def (gs: phi, index: 1);
1874 tree operand;
1875
1876 if (is_gimple_ip_invariant (arg1))
1877 {
1878 operand = arg1;
1879 rhs1 = arg0;
1880 }
1881 else if (is_gimple_ip_invariant (arg0))
1882 {
1883 operand = arg0;
1884 rhs1 = arg1;
1885 }
1886 else
1887 return;
1888
1889 rhs1 = get_ssa_def_if_simple_copy (rhs: rhs1, rhs_stmt: &stmt);
1890 if (!is_gimple_assign (gs: stmt))
1891 return;
1892
1893 code = ASSERT_EXPR;
1894 agg_value->pass_through.operand = operand;
1895 }
1896 else if (is_gimple_assign (gs: stmt))
1897 {
1898 code = gimple_assign_rhs_code (gs: stmt);
1899 switch (gimple_assign_rhs_class (gs: stmt))
1900 {
1901 case GIMPLE_SINGLE_RHS:
1902 if (is_gimple_ip_invariant (rhs1))
1903 {
1904 agg_value->pass_through.operand = rhs1;
1905 return;
1906 }
1907 code = NOP_EXPR;
1908 break;
1909
1910 case GIMPLE_UNARY_RHS:
1911 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1912 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1913 tcc_binary, this subtleness is somewhat misleading.
1914
1915 Since tcc_unary is widely used in IPA-CP code to check an operation
1916 with one operand, here we only allow tc_unary operation to avoid
1917 possible problem. Then we can use (opclass == tc_unary) or not to
1918 distinguish unary and binary. */
1919 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1920 return;
1921
1922 rhs1 = get_ssa_def_if_simple_copy (rhs: rhs1, rhs_stmt: &stmt);
1923 break;
1924
1925 case GIMPLE_BINARY_RHS:
1926 {
1927 gimple *rhs1_stmt = stmt;
1928 gimple *rhs2_stmt = stmt;
1929 tree rhs2 = gimple_assign_rhs2 (gs: stmt);
1930
1931 rhs1 = get_ssa_def_if_simple_copy (rhs: rhs1, rhs_stmt: &rhs1_stmt);
1932 rhs2 = get_ssa_def_if_simple_copy (rhs: rhs2, rhs_stmt: &rhs2_stmt);
1933
1934 if (is_gimple_ip_invariant (rhs2))
1935 {
1936 agg_value->pass_through.operand = rhs2;
1937 stmt = rhs1_stmt;
1938 }
1939 else if (is_gimple_ip_invariant (rhs1))
1940 {
1941 if (TREE_CODE_CLASS (code) == tcc_comparison)
1942 code = swap_tree_comparison (code);
1943 else if (!commutative_tree_code (code))
1944 return;
1945
1946 agg_value->pass_through.operand = rhs1;
1947 stmt = rhs2_stmt;
1948 rhs1 = rhs2;
1949 }
1950 else
1951 return;
1952
1953 if (TREE_CODE_CLASS (code) != tcc_comparison
1954 && !useless_type_conversion_p (TREE_TYPE (lhs),
1955 TREE_TYPE (rhs1)))
1956 return;
1957 }
1958 break;
1959
1960 default:
1961 return;
1962 }
1963 }
1964 else
1965 return;
1966
1967 if (TREE_CODE (rhs1) != SSA_NAME)
1968 index = load_from_unmodified_param_or_agg (fbi, info: fbi->info, stmt,
1969 offset_p: &agg_value->offset,
1970 by_ref_p: &agg_value->by_ref);
1971 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1972 index = ipa_get_param_decl_index (info: fbi->info, SSA_NAME_VAR (rhs1));
1973
1974 if (index >= 0)
1975 {
1976 if (agg_value->offset >= 0)
1977 agg_value->type = TREE_TYPE (rhs1);
1978 agg_value->pass_through.formal_id = index;
1979 agg_value->pass_through.operation = code;
1980 }
1981 else
1982 agg_value->pass_through.operand = NULL_TREE;
1983}
1984
1985/* If STMT is a memory store to the object whose address is BASE, extract
1986 information (offset, size, and value) into CONTENT, and return true,
1987 otherwise we conservatively assume the whole object is modified with
1988 unknown content, and return false. CHECK_REF means that access to object
1989 is expected to be in form of MEM_REF expression. */
1990
1991static bool
1992extract_mem_content (struct ipa_func_body_info *fbi,
1993 gimple *stmt, tree base, bool check_ref,
1994 struct ipa_known_agg_contents_list *content)
1995{
1996 HOST_WIDE_INT lhs_offset, lhs_size;
1997 bool reverse;
1998
1999 if (!is_gimple_assign (gs: stmt))
2000 return false;
2001
2002 tree lhs = gimple_assign_lhs (gs: stmt);
2003 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
2004 &reverse);
2005 if (!lhs_base)
2006 return false;
2007
2008 if (check_ref)
2009 {
2010 if (TREE_CODE (lhs_base) != MEM_REF
2011 || TREE_OPERAND (lhs_base, 0) != base
2012 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
2013 return false;
2014 }
2015 else if (lhs_base != base)
2016 return false;
2017
2018 content->offset = lhs_offset;
2019 content->size = lhs_size;
2020 content->type = TREE_TYPE (lhs);
2021 content->next = NULL;
2022
2023 analyze_agg_content_value (fbi, agg_value: &content->value, stmt);
2024 return true;
2025}
2026
2027/* Traverse statements from CALL backwards, scanning whether an aggregate given
2028 in ARG is filled in constants or values that are derived from caller's
2029 formal parameter in the way described by some kinds of jump functions. FBI
2030 is the context of the caller function for interprocedural analysis. ARG can
2031 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2032 the type of the aggregate, JFUNC is the jump function for the aggregate. */
2033
2034static void
2035determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
2036 gcall *call, tree arg,
2037 tree arg_type,
2038 struct ipa_jump_func *jfunc)
2039{
2040 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
2041 bitmap visited = NULL;
2042 int item_count = 0, value_count = 0;
2043 HOST_WIDE_INT arg_offset, arg_size;
2044 tree arg_base;
2045 bool check_ref, by_ref;
2046 ao_ref r;
2047 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
2048
2049 if (max_agg_items == 0)
2050 return;
2051
2052 /* The function operates in three stages. First, we prepare check_ref, r,
2053 arg_base and arg_offset based on what is actually passed as an actual
2054 argument. */
2055
2056 if (POINTER_TYPE_P (arg_type))
2057 {
2058 by_ref = true;
2059 if (TREE_CODE (arg) == SSA_NAME)
2060 {
2061 tree type_size;
2062 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
2063 || !POINTER_TYPE_P (TREE_TYPE (arg)))
2064 return;
2065 check_ref = true;
2066 arg_base = arg;
2067 arg_offset = 0;
2068 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2069 arg_size = tree_to_uhwi (type_size);
2070 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2071 }
2072 else if (TREE_CODE (arg) == ADDR_EXPR)
2073 {
2074 bool reverse;
2075
2076 arg = TREE_OPERAND (arg, 0);
2077 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2078 &arg_size, &reverse);
2079 if (!arg_base)
2080 return;
2081 if (DECL_P (arg_base))
2082 {
2083 check_ref = false;
2084 ao_ref_init (&r, arg_base);
2085 }
2086 else
2087 return;
2088 }
2089 else
2090 return;
2091 }
2092 else
2093 {
2094 bool reverse;
2095
2096 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2097
2098 by_ref = false;
2099 check_ref = false;
2100 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2101 &arg_size, &reverse);
2102 if (!arg_base)
2103 return;
2104
2105 ao_ref_init (&r, arg);
2106 }
2107
2108 /* Second stage traverses virtual SSA web backwards starting from the call
2109 statement, only looks at individual dominating virtual operand (its
2110 definition dominates the call), as long as it is confident that content
2111 of the aggregate is affected by definition of the virtual operand, it
2112 builds a sorted linked list of ipa_agg_jf_list describing that. */
2113
2114 for (tree dom_vuse = gimple_vuse (g: call);
2115 dom_vuse && fbi->aa_walk_budget > 0;)
2116 {
2117 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2118
2119 if (gimple_code (g: stmt) == GIMPLE_PHI)
2120 {
2121 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2122 fbi->aa_walk_budget,
2123 &visited, false, NULL, NULL);
2124 continue;
2125 }
2126
2127 fbi->aa_walk_budget--;
2128 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2129 {
2130 struct ipa_known_agg_contents_list *content
2131 = XALLOCA (struct ipa_known_agg_contents_list);
2132
2133 if (!extract_mem_content (fbi, stmt, base: arg_base, check_ref, content))
2134 break;
2135
2136 /* Now we get a dominating virtual operand, and need to check
2137 whether its value is clobbered any other dominating one. */
2138 if ((content->value.pass_through.formal_id >= 0
2139 || content->value.pass_through.operand)
2140 && !clobber_by_agg_contents_list_p (list: all_list, item: content)
2141 /* Since IPA-CP stores results with unsigned int offsets, we can
2142 discard those which would not fit now before we stream them to
2143 WPA. */
2144 && (content->offset + content->size - arg_offset
2145 <= (HOST_WIDE_INT) UINT_MAX * BITS_PER_UNIT))
2146 {
2147 struct ipa_known_agg_contents_list *copy
2148 = XALLOCA (struct ipa_known_agg_contents_list);
2149
2150 /* Add to the list consisting of only dominating virtual
2151 operands, whose definitions can finally reach the call. */
2152 add_to_agg_contents_list (plist: &list, item: (*copy = *content, copy));
2153
2154 if (++value_count == max_agg_items)
2155 break;
2156 }
2157
2158 /* Add to the list consisting of all dominating virtual operands. */
2159 add_to_agg_contents_list (plist: &all_list, item: content);
2160
2161 if (++item_count == 2 * max_agg_items)
2162 break;
2163 }
2164 dom_vuse = gimple_vuse (g: stmt);
2165 }
2166
2167 if (visited)
2168 BITMAP_FREE (visited);
2169
2170 /* Third stage just goes over the list and creates an appropriate vector of
2171 ipa_agg_jf_item structures out of it, of course only if there are
2172 any meaningful items to begin with. */
2173
2174 if (value_count)
2175 {
2176 jfunc->agg.by_ref = by_ref;
2177 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2178 }
2179}
2180
2181
2182/* Return the Ith param type of callee associated with call graph
2183 edge E. */
2184
2185tree
2186ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2187{
2188 int n;
2189 tree type = (e->callee
2190 ? TREE_TYPE (e->callee->decl)
2191 : gimple_call_fntype (gs: e->call_stmt));
2192 tree t = TYPE_ARG_TYPES (type);
2193
2194 for (n = 0; n < i; n++)
2195 {
2196 if (!t)
2197 break;
2198 t = TREE_CHAIN (t);
2199 }
2200 if (t && t != void_list_node)
2201 return TREE_VALUE (t);
2202 if (!e->callee)
2203 return NULL;
2204 t = DECL_ARGUMENTS (e->callee->decl);
2205 for (n = 0; n < i; n++)
2206 {
2207 if (!t)
2208 return NULL;
2209 t = TREE_CHAIN (t);
2210 }
2211 if (t)
2212 return TREE_TYPE (t);
2213 return NULL;
2214}
2215
2216/* Return a pointer to an ipa_vr just like TMP, but either find it in
2217 ipa_vr_hash_table or allocate it in GC memory. */
2218
2219static ipa_vr *
2220ipa_get_value_range (const vrange &tmp)
2221{
2222 inchash::hash hstate;
2223 inchash::add_vrange (tmp, hstate);
2224 hashval_t hash = hstate.end ();
2225 ipa_vr **slot = ipa_vr_hash_table->find_slot_with_hash (comparable: &tmp, hash, insert: INSERT);
2226 if (*slot)
2227 return *slot;
2228
2229 ipa_vr *vr = new (ggc_alloc<ipa_vr> ()) ipa_vr (tmp);
2230 *slot = vr;
2231 return vr;
2232}
2233
2234/* Assign to JF a pointer to a range just like TMP but either fetch a
2235 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2236
2237static void
2238ipa_set_jfunc_vr (ipa_jump_func *jf, const vrange &tmp)
2239{
2240 jf->m_vr = ipa_get_value_range (tmp);
2241}
2242
2243static void
2244ipa_set_jfunc_vr (ipa_jump_func *jf, const ipa_vr &vr)
2245{
2246 Value_Range tmp;
2247 vr.get_vrange (r&: tmp);
2248 ipa_set_jfunc_vr (jf, tmp);
2249}
2250
2251/* Compute jump function for all arguments of callsite CS and insert the
2252 information in the jump_functions array in the ipa_edge_args corresponding
2253 to this callsite. */
2254
2255static void
2256ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2257 struct cgraph_edge *cs)
2258{
2259 ipa_node_params *info = ipa_node_params_sum->get (node: cs->caller);
2260 ipa_edge_args *args = ipa_edge_args_sum->get_create (edge: cs);
2261 gcall *call = cs->call_stmt;
2262 int n, arg_num = gimple_call_num_args (gs: call);
2263 bool useful_context = false;
2264
2265 if (arg_num == 0 || args->jump_functions)
2266 return;
2267 vec_safe_grow_cleared (v&: args->jump_functions, len: arg_num, exact: true);
2268 if (flag_devirtualize)
2269 vec_safe_grow_cleared (v&: args->polymorphic_call_contexts, len: arg_num, exact: true);
2270
2271 if (gimple_call_internal_p (gs: call))
2272 return;
2273 if (ipa_func_spec_opts_forbid_analysis_p (node: cs->caller))
2274 return;
2275
2276 for (n = 0; n < arg_num; n++)
2277 {
2278 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, i: n);
2279 tree arg = gimple_call_arg (gs: call, index: n);
2280 tree param_type = ipa_get_callee_param_type (e: cs, i: n);
2281 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2282 {
2283 tree instance;
2284 class ipa_polymorphic_call_context context (cs->caller->decl,
2285 arg, cs->call_stmt,
2286 &instance);
2287 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2288 &fbi->aa_walk_budget);
2289 *ipa_get_ith_polymorhic_call_context (args, i: n) = context;
2290 if (!context.useless_p ())
2291 useful_context = true;
2292 }
2293
2294 Value_Range vr (TREE_TYPE (arg));
2295 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2296 {
2297 bool addr_nonzero = false;
2298 bool strict_overflow = false;
2299
2300 if (TREE_CODE (arg) == SSA_NAME
2301 && param_type
2302 && get_range_query (cfun)->range_of_expr (r&: vr, expr: arg, cs->call_stmt)
2303 && vr.nonzero_p ())
2304 addr_nonzero = true;
2305 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2306 addr_nonzero = true;
2307
2308 if (addr_nonzero)
2309 vr.set_nonzero (TREE_TYPE (arg));
2310
2311 unsigned HOST_WIDE_INT bitpos;
2312 unsigned align, prec = TYPE_PRECISION (TREE_TYPE (arg));
2313
2314 get_pointer_alignment_1 (arg, &align, &bitpos);
2315
2316 if (align > BITS_PER_UNIT
2317 && opt_for_fn (cs->caller->decl, flag_ipa_bit_cp))
2318 {
2319 wide_int mask
2320 = wi::bit_and_not (x: wi::mask (width: prec, negate_p: false, precision: prec),
2321 y: wide_int::from (x: align / BITS_PER_UNIT - 1,
2322 precision: prec, sgn: UNSIGNED));
2323 wide_int value = wide_int::from (x: bitpos / BITS_PER_UNIT, precision: prec,
2324 sgn: UNSIGNED);
2325 irange_bitmask bm (value, mask);
2326 if (!addr_nonzero)
2327 vr.set_varying (TREE_TYPE (arg));
2328 irange &r = as_a <irange> (v&: vr);
2329 r.update_bitmask (bm);
2330 ipa_set_jfunc_vr (jf: jfunc, tmp: vr);
2331 }
2332 else if (addr_nonzero)
2333 ipa_set_jfunc_vr (jf: jfunc, tmp: vr);
2334 else
2335 gcc_assert (!jfunc->m_vr);
2336 }
2337 else
2338 {
2339 if (param_type
2340 && Value_Range::supports_type_p (TREE_TYPE (arg))
2341 && Value_Range::supports_type_p (type: param_type)
2342 && irange::supports_p (TREE_TYPE (arg))
2343 && irange::supports_p (type: param_type)
2344 && get_range_query (cfun)->range_of_expr (r&: vr, expr: arg, cs->call_stmt)
2345 && !vr.undefined_p ())
2346 {
2347 Value_Range resvr (vr);
2348 range_cast (r&: resvr, type: param_type);
2349 if (!resvr.undefined_p () && !resvr.varying_p ())
2350 ipa_set_jfunc_vr (jf: jfunc, tmp: resvr);
2351 else
2352 gcc_assert (!jfunc->m_vr);
2353 }
2354 else
2355 gcc_assert (!jfunc->m_vr);
2356 }
2357
2358 if (is_gimple_ip_invariant (arg)
2359 || (VAR_P (arg)
2360 && is_global_var (t: arg)
2361 && TREE_READONLY (arg)))
2362 ipa_set_jf_constant (jfunc, constant: arg, cs);
2363 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2364 && TREE_CODE (arg) == PARM_DECL)
2365 {
2366 int index = ipa_get_param_decl_index (info, ptree: arg);
2367
2368 gcc_assert (index >=0);
2369 /* Aggregate passed by value, check for pass-through, otherwise we
2370 will attempt to fill in aggregate contents later in this
2371 for cycle. */
2372 if (parm_preserved_before_stmt_p (fbi, index, stmt: call, parm_load: arg))
2373 {
2374 ipa_set_jf_simple_pass_through (jfunc, formal_id: index, agg_preserved: false);
2375 continue;
2376 }
2377 }
2378 else if (TREE_CODE (arg) == SSA_NAME)
2379 {
2380 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2381 {
2382 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2383 if (index >= 0)
2384 {
2385 bool agg_p;
2386 agg_p = parm_ref_data_pass_through_p (fbi, index, call, parm: arg);
2387 ipa_set_jf_simple_pass_through (jfunc, formal_id: index, agg_preserved: agg_p);
2388 }
2389 }
2390 else
2391 {
2392 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2393 if (is_gimple_assign (gs: stmt))
2394 compute_complex_assign_jump_func (fbi, info, jfunc,
2395 call, stmt, name: arg, param_type);
2396 else if (gimple_code (g: stmt) == GIMPLE_PHI)
2397 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2398 call,
2399 phi: as_a <gphi *> (p: stmt));
2400 }
2401 }
2402
2403 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2404 passed (because type conversions are ignored in gimple). Usually we can
2405 safely get type from function declaration, but in case of K&R prototypes or
2406 variadic functions we can try our luck with type of the pointer passed.
2407 TODO: Since we look for actual initialization of the memory object, we may better
2408 work out the type based on the memory stores we find. */
2409 if (!param_type)
2410 param_type = TREE_TYPE (arg);
2411
2412 if ((jfunc->type != IPA_JF_PASS_THROUGH
2413 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2414 && (jfunc->type != IPA_JF_ANCESTOR
2415 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2416 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2417 || POINTER_TYPE_P (param_type)))
2418 determine_known_aggregate_parts (fbi, call, arg, arg_type: param_type, jfunc);
2419 }
2420 if (!useful_context)
2421 vec_free (v&: args->polymorphic_call_contexts);
2422}
2423
2424/* Compute jump functions for all edges - both direct and indirect - outgoing
2425 from BB. */
2426
2427static void
2428ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2429{
2430 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2431 int i;
2432 struct cgraph_edge *cs;
2433
2434 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2435 {
2436 struct cgraph_node *callee = cs->callee;
2437
2438 if (callee)
2439 {
2440 callee = callee->ultimate_alias_target ();
2441 /* We do not need to bother analyzing calls to unknown functions
2442 unless they may become known during lto/whopr. */
2443 if (!callee->definition && !flag_lto
2444 && !gimple_call_fnspec (stmt: cs->call_stmt).known_p ())
2445 continue;
2446 }
2447 ipa_compute_jump_functions_for_edge (fbi, cs);
2448 }
2449}
2450
2451/* If STMT looks like a statement loading a value from a member pointer formal
2452 parameter, return that parameter and store the offset of the field to
2453 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2454 might be clobbered). If USE_DELTA, then we look for a use of the delta
2455 field rather than the pfn. */
2456
2457static tree
2458ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2459 HOST_WIDE_INT *offset_p)
2460{
2461 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2462
2463 if (!gimple_assign_single_p (gs: stmt))
2464 return NULL_TREE;
2465
2466 rhs = gimple_assign_rhs1 (gs: stmt);
2467 if (TREE_CODE (rhs) == COMPONENT_REF)
2468 {
2469 ref_field = TREE_OPERAND (rhs, 1);
2470 rhs = TREE_OPERAND (rhs, 0);
2471 }
2472 else
2473 ref_field = NULL_TREE;
2474 if (TREE_CODE (rhs) != MEM_REF)
2475 return NULL_TREE;
2476 rec = TREE_OPERAND (rhs, 0);
2477 if (TREE_CODE (rec) != ADDR_EXPR)
2478 return NULL_TREE;
2479 rec = TREE_OPERAND (rec, 0);
2480 if (TREE_CODE (rec) != PARM_DECL
2481 || !type_like_member_ptr_p (TREE_TYPE (rec), method_ptr: &ptr_field, delta: &delta_field))
2482 return NULL_TREE;
2483 ref_offset = TREE_OPERAND (rhs, 1);
2484
2485 if (use_delta)
2486 fld = delta_field;
2487 else
2488 fld = ptr_field;
2489 if (offset_p)
2490 *offset_p = int_bit_position (field: fld);
2491
2492 if (ref_field)
2493 {
2494 if (integer_nonzerop (ref_offset))
2495 return NULL_TREE;
2496 return ref_field == fld ? rec : NULL_TREE;
2497 }
2498 else
2499 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2500 : NULL_TREE;
2501}
2502
2503/* Returns true iff T is an SSA_NAME defined by a statement. */
2504
2505static bool
2506ipa_is_ssa_with_stmt_def (tree t)
2507{
2508 if (TREE_CODE (t) == SSA_NAME
2509 && !SSA_NAME_IS_DEFAULT_DEF (t))
2510 return true;
2511 else
2512 return false;
2513}
2514
2515/* Find the indirect call graph edge corresponding to STMT and mark it as a
2516 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2517 indirect call graph edge.
2518 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2519
2520static struct cgraph_edge *
2521ipa_note_param_call (struct cgraph_node *node, int param_index,
2522 gcall *stmt, bool polymorphic)
2523{
2524 struct cgraph_edge *cs;
2525
2526 cs = node->get_edge (call_stmt: stmt);
2527 cs->indirect_info->param_index = param_index;
2528 cs->indirect_info->agg_contents = 0;
2529 cs->indirect_info->member_ptr = 0;
2530 cs->indirect_info->guaranteed_unmodified = 0;
2531 ipa_node_params *info = ipa_node_params_sum->get (node);
2532 ipa_set_param_used_by_indirect_call (info, i: param_index, val: true);
2533 if (cs->indirect_info->polymorphic || polymorphic)
2534 ipa_set_param_used_by_polymorphic_call (info, i: param_index, val: true);
2535 return cs;
2536}
2537
2538/* Analyze the CALL and examine uses of formal parameters of the caller NODE
2539 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2540 intermediate information about each formal parameter. Currently it checks
2541 whether the call calls a pointer that is a formal parameter and if so, the
2542 parameter is marked with the called flag and an indirect call graph edge
2543 describing the call is created. This is very simple for ordinary pointers
2544 represented in SSA but not-so-nice when it comes to member pointers. The
2545 ugly part of this function does nothing more than trying to match the
2546 pattern of such a call. An example of such a pattern is the gimple dump
2547 below, the call is on the last line:
2548
2549 <bb 2>:
2550 f$__delta_5 = f.__delta;
2551 f$__pfn_24 = f.__pfn;
2552
2553 or
2554 <bb 2>:
2555 f$__delta_5 = MEM[(struct *)&f];
2556 f$__pfn_24 = MEM[(struct *)&f + 4B];
2557
2558 and a few lines below:
2559
2560 <bb 5>
2561 D.2496_3 = (int) f$__pfn_24;
2562 D.2497_4 = D.2496_3 & 1;
2563 if (D.2497_4 != 0)
2564 goto <bb 3>;
2565 else
2566 goto <bb 4>;
2567
2568 <bb 6>:
2569 D.2500_7 = (unsigned int) f$__delta_5;
2570 D.2501_8 = &S + D.2500_7;
2571 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2572 D.2503_10 = *D.2502_9;
2573 D.2504_12 = f$__pfn_24 + -1;
2574 D.2505_13 = (unsigned int) D.2504_12;
2575 D.2506_14 = D.2503_10 + D.2505_13;
2576 D.2507_15 = *D.2506_14;
2577 iftmp.11_16 = (String:: *) D.2507_15;
2578
2579 <bb 7>:
2580 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2581 D.2500_19 = (unsigned int) f$__delta_5;
2582 D.2508_20 = &S + D.2500_19;
2583 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2584
2585 Such patterns are results of simple calls to a member pointer:
2586
2587 int doprinting (int (MyString::* f)(int) const)
2588 {
2589 MyString S ("somestring");
2590
2591 return (S.*f)(4);
2592 }
2593
2594 Moreover, the function also looks for called pointers loaded from aggregates
2595 passed by value or reference. */
2596
2597static void
2598ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2599 tree target)
2600{
2601 class ipa_node_params *info = fbi->info;
2602 HOST_WIDE_INT offset;
2603 bool by_ref;
2604
2605 if (SSA_NAME_IS_DEFAULT_DEF (target))
2606 {
2607 tree var = SSA_NAME_VAR (target);
2608 int index = ipa_get_param_decl_index (info, ptree: var);
2609 if (index >= 0)
2610 ipa_note_param_call (node: fbi->node, param_index: index, stmt: call, polymorphic: false);
2611 return;
2612 }
2613
2614 int index;
2615 gimple *def = SSA_NAME_DEF_STMT (target);
2616 bool guaranteed_unmodified;
2617 if (gimple_assign_single_p (gs: def)
2618 && ipa_load_from_parm_agg (fbi, descriptors: info->descriptors, stmt: def,
2619 op: gimple_assign_rhs1 (gs: def), index_p: &index, offset_p: &offset,
2620 NULL, by_ref_p: &by_ref, guaranteed_unmodified: &guaranteed_unmodified))
2621 {
2622 struct cgraph_edge *cs = ipa_note_param_call (node: fbi->node, param_index: index,
2623 stmt: call, polymorphic: false);
2624 cs->indirect_info->offset = offset;
2625 cs->indirect_info->agg_contents = 1;
2626 cs->indirect_info->by_ref = by_ref;
2627 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2628 return;
2629 }
2630
2631 /* Now we need to try to match the complex pattern of calling a member
2632 pointer. */
2633 if (gimple_code (g: def) != GIMPLE_PHI
2634 || gimple_phi_num_args (gs: def) != 2
2635 || !POINTER_TYPE_P (TREE_TYPE (target))
2636 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2637 return;
2638
2639 /* First, we need to check whether one of these is a load from a member
2640 pointer that is a parameter to this function. */
2641 tree n1 = PHI_ARG_DEF (def, 0);
2642 tree n2 = PHI_ARG_DEF (def, 1);
2643 if (!ipa_is_ssa_with_stmt_def (t: n1) || !ipa_is_ssa_with_stmt_def (t: n2))
2644 return;
2645 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2646 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2647
2648 tree rec;
2649 basic_block bb, virt_bb;
2650 basic_block join = gimple_bb (g: def);
2651 if ((rec = ipa_get_stmt_member_ptr_load_param (stmt: d1, use_delta: false, offset_p: &offset)))
2652 {
2653 if (ipa_get_stmt_member_ptr_load_param (stmt: d2, use_delta: false, NULL))
2654 return;
2655
2656 bb = EDGE_PRED (join, 0)->src;
2657 virt_bb = gimple_bb (g: d2);
2658 }
2659 else if ((rec = ipa_get_stmt_member_ptr_load_param (stmt: d2, use_delta: false, offset_p: &offset)))
2660 {
2661 bb = EDGE_PRED (join, 1)->src;
2662 virt_bb = gimple_bb (g: d1);
2663 }
2664 else
2665 return;
2666
2667 /* Second, we need to check that the basic blocks are laid out in the way
2668 corresponding to the pattern. */
2669
2670 if (!single_pred_p (bb: virt_bb) || !single_succ_p (bb: virt_bb)
2671 || single_pred (bb: virt_bb) != bb
2672 || single_succ (bb: virt_bb) != join)
2673 return;
2674
2675 /* Third, let's see that the branching is done depending on the least
2676 significant bit of the pfn. */
2677
2678 gcond *branch = safe_dyn_cast <gcond *> (p: *gsi_last_bb (bb));
2679 if (!branch)
2680 return;
2681
2682 if ((gimple_cond_code (gs: branch) != NE_EXPR
2683 && gimple_cond_code (gs: branch) != EQ_EXPR)
2684 || !integer_zerop (gimple_cond_rhs (gs: branch)))
2685 return;
2686
2687 tree cond = gimple_cond_lhs (gs: branch);
2688 if (!ipa_is_ssa_with_stmt_def (t: cond))
2689 return;
2690
2691 def = SSA_NAME_DEF_STMT (cond);
2692 if (!is_gimple_assign (gs: def)
2693 || gimple_assign_rhs_code (gs: def) != BIT_AND_EXPR
2694 || !integer_onep (gimple_assign_rhs2 (gs: def)))
2695 return;
2696
2697 cond = gimple_assign_rhs1 (gs: def);
2698 if (!ipa_is_ssa_with_stmt_def (t: cond))
2699 return;
2700
2701 def = SSA_NAME_DEF_STMT (cond);
2702
2703 if (is_gimple_assign (gs: def)
2704 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2705 {
2706 cond = gimple_assign_rhs1 (gs: def);
2707 if (!ipa_is_ssa_with_stmt_def (t: cond))
2708 return;
2709 def = SSA_NAME_DEF_STMT (cond);
2710 }
2711
2712 tree rec2;
2713 rec2 = ipa_get_stmt_member_ptr_load_param (stmt: def,
2714 use_delta: (TARGET_PTRMEMFUNC_VBIT_LOCATION
2715 == ptrmemfunc_vbit_in_delta),
2716 NULL);
2717 if (rec != rec2)
2718 return;
2719
2720 index = ipa_get_param_decl_index (info, ptree: rec);
2721 if (index >= 0
2722 && parm_preserved_before_stmt_p (fbi, index, stmt: call, parm_load: rec))
2723 {
2724 struct cgraph_edge *cs = ipa_note_param_call (node: fbi->node, param_index: index,
2725 stmt: call, polymorphic: false);
2726 cs->indirect_info->offset = offset;
2727 cs->indirect_info->agg_contents = 1;
2728 cs->indirect_info->member_ptr = 1;
2729 cs->indirect_info->guaranteed_unmodified = 1;
2730 }
2731
2732 return;
2733}
2734
2735/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2736 object referenced in the expression is a formal parameter of the caller
2737 FBI->node (described by FBI->info), create a call note for the
2738 statement. */
2739
2740static void
2741ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2742 gcall *call, tree target)
2743{
2744 tree obj = OBJ_TYPE_REF_OBJECT (target);
2745 int index;
2746 HOST_WIDE_INT anc_offset;
2747
2748 if (!flag_devirtualize)
2749 return;
2750
2751 if (TREE_CODE (obj) != SSA_NAME)
2752 return;
2753
2754 class ipa_node_params *info = fbi->info;
2755 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2756 {
2757 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2758 return;
2759
2760 anc_offset = 0;
2761 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2762 gcc_assert (index >= 0);
2763 if (detect_type_change_ssa (fbi, arg: obj, comp_type: obj_type_ref_class (ref: target),
2764 call))
2765 return;
2766 }
2767 else
2768 {
2769 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2770 tree expr;
2771
2772 expr = get_ancestor_addr_info (assign: stmt, obj_p: &obj, offset: &anc_offset);
2773 if (!expr)
2774 return;
2775 index = ipa_get_param_decl_index (info,
2776 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2777 gcc_assert (index >= 0);
2778 if (detect_type_change (fbi, arg: obj, base: expr, comp_type: obj_type_ref_class (ref: target),
2779 call, offset: anc_offset))
2780 return;
2781 }
2782
2783 struct cgraph_edge *cs = ipa_note_param_call (node: fbi->node, param_index: index,
2784 stmt: call, polymorphic: true);
2785 class cgraph_indirect_call_info *ii = cs->indirect_info;
2786 ii->offset = anc_offset;
2787 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2788 ii->otr_type = obj_type_ref_class (ref: target);
2789 ii->polymorphic = 1;
2790}
2791
2792/* Analyze a call statement CALL whether and how it utilizes formal parameters
2793 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2794 containing intermediate information about each formal parameter. */
2795
2796static void
2797ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2798{
2799 tree target = gimple_call_fn (gs: call);
2800
2801 if (!target
2802 || (TREE_CODE (target) != SSA_NAME
2803 && !virtual_method_call_p (target)))
2804 return;
2805
2806 struct cgraph_edge *cs = fbi->node->get_edge (call_stmt: call);
2807 /* If we previously turned the call into a direct call, there is
2808 no need to analyze. */
2809 if (cs && !cs->indirect_unknown_callee)
2810 return;
2811
2812 if (cs->indirect_info->polymorphic && flag_devirtualize)
2813 {
2814 tree instance;
2815 tree target = gimple_call_fn (gs: call);
2816 ipa_polymorphic_call_context context (current_function_decl,
2817 target, call, &instance);
2818
2819 gcc_checking_assert (cs->indirect_info->otr_type
2820 == obj_type_ref_class (target));
2821 gcc_checking_assert (cs->indirect_info->otr_token
2822 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2823
2824 cs->indirect_info->vptr_changed
2825 = !context.get_dynamic_type (instance,
2826 OBJ_TYPE_REF_OBJECT (target),
2827 obj_type_ref_class (ref: target), call,
2828 &fbi->aa_walk_budget);
2829 cs->indirect_info->context = context;
2830 }
2831
2832 if (TREE_CODE (target) == SSA_NAME)
2833 ipa_analyze_indirect_call_uses (fbi, call, target);
2834 else if (virtual_method_call_p (target))
2835 ipa_analyze_virtual_call_uses (fbi, call, target);
2836}
2837
2838
2839/* Analyze the call statement STMT with respect to formal parameters (described
2840 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2841 formal parameters are called. */
2842
2843static void
2844ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2845{
2846 if (is_gimple_call (gs: stmt))
2847 ipa_analyze_call_uses (fbi, call: as_a <gcall *> (p: stmt));
2848}
2849
2850/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2851 If OP is a parameter declaration, mark it as used in the info structure
2852 passed in DATA. */
2853
2854static bool
2855visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2856{
2857 class ipa_node_params *info = (class ipa_node_params *) data;
2858
2859 op = get_base_address (t: op);
2860 if (op
2861 && TREE_CODE (op) == PARM_DECL)
2862 {
2863 int index = ipa_get_param_decl_index (info, ptree: op);
2864 gcc_assert (index >= 0);
2865 ipa_set_param_used (info, i: index, val: true);
2866 }
2867
2868 return false;
2869}
2870
2871/* Scan the statements in BB and inspect the uses of formal parameters. Store
2872 the findings in various structures of the associated ipa_node_params
2873 structure, such as parameter flags, notes etc. FBI holds various data about
2874 the function being analyzed. */
2875
2876static void
2877ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2878{
2879 gimple_stmt_iterator gsi;
2880 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
2881 {
2882 gimple *stmt = gsi_stmt (i: gsi);
2883
2884 if (is_gimple_debug (gs: stmt))
2885 continue;
2886
2887 ipa_analyze_stmt_uses (fbi, stmt);
2888 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2889 visit_ref_for_mod_analysis,
2890 visit_ref_for_mod_analysis,
2891 visit_ref_for_mod_analysis);
2892 }
2893 for (gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
2894 walk_stmt_load_store_addr_ops (gsi_stmt (i: gsi), fbi->info,
2895 visit_ref_for_mod_analysis,
2896 visit_ref_for_mod_analysis,
2897 visit_ref_for_mod_analysis);
2898}
2899
2900/* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2901
2902static bool
2903load_from_dereferenced_name (tree expr, tree name)
2904{
2905 tree base = get_base_address (t: expr);
2906 return (TREE_CODE (base) == MEM_REF
2907 && TREE_OPERAND (base, 0) == name);
2908}
2909
2910/* Calculate controlled uses of parameters of NODE. */
2911
2912static void
2913ipa_analyze_controlled_uses (struct cgraph_node *node)
2914{
2915 ipa_node_params *info = ipa_node_params_sum->get (node);
2916
2917 for (int i = 0; i < ipa_get_param_count (info); i++)
2918 {
2919 tree parm = ipa_get_param (info, i);
2920 int call_uses = 0;
2921 bool load_dereferenced = false;
2922
2923 /* For SSA regs see if parameter is used. For non-SSA we compute
2924 the flag during modification analysis. */
2925 if (is_gimple_reg (parm))
2926 {
2927 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2928 parm);
2929 if (ddef && !has_zero_uses (var: ddef))
2930 {
2931 imm_use_iterator imm_iter;
2932 gimple *stmt;
2933
2934 ipa_set_param_used (info, i, val: true);
2935 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
2936 {
2937 if (is_gimple_debug (gs: stmt))
2938 continue;
2939
2940 int all_stmt_uses = 0;
2941 use_operand_p use_p;
2942 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
2943 all_stmt_uses++;
2944
2945 if (is_gimple_call (gs: stmt))
2946 {
2947 if (gimple_call_internal_p (gs: stmt))
2948 {
2949 call_uses = IPA_UNDESCRIBED_USE;
2950 break;
2951 }
2952 int recognized_stmt_uses;
2953 if (gimple_call_fn (gs: stmt) == ddef)
2954 recognized_stmt_uses = 1;
2955 else
2956 recognized_stmt_uses = 0;
2957 unsigned arg_count = gimple_call_num_args (gs: stmt);
2958 for (unsigned i = 0; i < arg_count; i++)
2959 {
2960 tree arg = gimple_call_arg (gs: stmt, index: i);
2961 if (arg == ddef)
2962 recognized_stmt_uses++;
2963 else if (load_from_dereferenced_name (expr: arg, name: ddef))
2964 {
2965 load_dereferenced = true;
2966 recognized_stmt_uses++;
2967 }
2968 }
2969
2970 if (recognized_stmt_uses != all_stmt_uses)
2971 {
2972 call_uses = IPA_UNDESCRIBED_USE;
2973 break;
2974 }
2975 if (call_uses >= 0)
2976 call_uses += all_stmt_uses;
2977 }
2978 else if (gimple_assign_single_p (gs: stmt))
2979 {
2980 tree rhs = gimple_assign_rhs1 (gs: stmt);
2981 if (all_stmt_uses != 1
2982 || !load_from_dereferenced_name (expr: rhs, name: ddef))
2983 {
2984 call_uses = IPA_UNDESCRIBED_USE;
2985 break;
2986 }
2987 load_dereferenced = true;
2988 }
2989 else
2990 {
2991 call_uses = IPA_UNDESCRIBED_USE;
2992 break;
2993 }
2994 }
2995 }
2996 else
2997 call_uses = 0;
2998 }
2999 else
3000 call_uses = IPA_UNDESCRIBED_USE;
3001 ipa_set_controlled_uses (info, i, val: call_uses);
3002 ipa_set_param_load_dereferenced (info, i, val: load_dereferenced);
3003 }
3004}
3005
3006/* Free stuff in BI. */
3007
3008static void
3009free_ipa_bb_info (struct ipa_bb_info *bi)
3010{
3011 bi->cg_edges.release ();
3012 bi->param_aa_statuses.release ();
3013}
3014
3015/* Dominator walker driving the analysis. */
3016
3017class analysis_dom_walker : public dom_walker
3018{
3019public:
3020 analysis_dom_walker (struct ipa_func_body_info *fbi)
3021 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3022
3023 edge before_dom_children (basic_block) final override;
3024
3025private:
3026 struct ipa_func_body_info *m_fbi;
3027};
3028
3029edge
3030analysis_dom_walker::before_dom_children (basic_block bb)
3031{
3032 ipa_analyze_params_uses_in_bb (fbi: m_fbi, bb);
3033 ipa_compute_jump_functions_for_bb (fbi: m_fbi, bb);
3034 return NULL;
3035}
3036
3037/* Release body info FBI. */
3038
3039void
3040ipa_release_body_info (struct ipa_func_body_info *fbi)
3041{
3042 int i;
3043 struct ipa_bb_info *bi;
3044
3045 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3046 free_ipa_bb_info (bi);
3047 fbi->bb_infos.release ();
3048}
3049
3050/* Initialize the array describing properties of formal parameters
3051 of NODE, analyze their uses and compute jump functions associated
3052 with actual arguments of calls from within NODE. */
3053
3054void
3055ipa_analyze_node (struct cgraph_node *node)
3056{
3057 struct ipa_func_body_info fbi;
3058 class ipa_node_params *info;
3059
3060 ipa_check_create_node_params ();
3061 ipa_check_create_edge_args ();
3062 info = ipa_node_params_sum->get_create (node);
3063
3064 if (info->analysis_done)
3065 return;
3066 info->analysis_done = 1;
3067
3068 if (ipa_func_spec_opts_forbid_analysis_p (node)
3069 || (count_formal_params (fndecl: node->decl)
3070 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS)))
3071 {
3072 gcc_assert (!ipa_get_param_count (info));
3073 return;
3074 }
3075
3076 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3077 push_cfun (new_cfun: func);
3078 calculate_dominance_info (CDI_DOMINATORS);
3079 ipa_initialize_node_params (node);
3080 ipa_analyze_controlled_uses (node);
3081
3082 fbi.node = node;
3083 fbi.info = info;
3084 fbi.bb_infos = vNULL;
3085 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), exact: true);
3086 fbi.param_count = ipa_get_param_count (info);
3087 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3088
3089 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3090 {
3091 ipa_bb_info *bi = ipa_get_bb_info (fbi: &fbi, bb: gimple_bb (g: cs->call_stmt));
3092 bi->cg_edges.safe_push (obj: cs);
3093 }
3094
3095 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3096 {
3097 ipa_bb_info *bi = ipa_get_bb_info (fbi: &fbi, bb: gimple_bb (g: cs->call_stmt));
3098 bi->cg_edges.safe_push (obj: cs);
3099 }
3100
3101 enable_ranger (cfun, use_imm_uses: false);
3102 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3103 disable_ranger (cfun);
3104
3105 ipa_release_body_info (fbi: &fbi);
3106 free_dominance_info (CDI_DOMINATORS);
3107 pop_cfun ();
3108}
3109
3110/* Update the jump functions associated with call graph edge E when the call
3111 graph edge CS is being inlined, assuming that E->caller is already (possibly
3112 indirectly) inlined into CS->callee and that E has not been inlined. */
3113
3114static void
3115update_jump_functions_after_inlining (struct cgraph_edge *cs,
3116 struct cgraph_edge *e)
3117{
3118 ipa_edge_args *top = ipa_edge_args_sum->get (edge: cs);
3119 ipa_edge_args *args = ipa_edge_args_sum->get (edge: e);
3120 if (!args)
3121 return;
3122 int count = ipa_get_cs_argument_count (args);
3123 int i;
3124
3125 for (i = 0; i < count; i++)
3126 {
3127 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3128 class ipa_polymorphic_call_context *dst_ctx
3129 = ipa_get_ith_polymorhic_call_context (args, i);
3130
3131 if (dst->agg.items)
3132 {
3133 struct ipa_agg_jf_item *item;
3134 int j;
3135
3136 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3137 {
3138 int dst_fid;
3139 struct ipa_jump_func *src;
3140
3141 if (item->jftype != IPA_JF_PASS_THROUGH
3142 && item->jftype != IPA_JF_LOAD_AGG)
3143 continue;
3144
3145 dst_fid = item->value.pass_through.formal_id;
3146 if (!top || dst_fid >= ipa_get_cs_argument_count (args: top))
3147 {
3148 item->jftype = IPA_JF_UNKNOWN;
3149 continue;
3150 }
3151
3152 item->value.pass_through.formal_id = -1;
3153 src = ipa_get_ith_jump_func (args: top, i: dst_fid);
3154 if (src->type == IPA_JF_CONST)
3155 {
3156 if (item->jftype == IPA_JF_PASS_THROUGH
3157 && item->value.pass_through.operation == NOP_EXPR)
3158 {
3159 item->jftype = IPA_JF_CONST;
3160 item->value.constant = src->value.constant.value;
3161 continue;
3162 }
3163 }
3164 else if (src->type == IPA_JF_PASS_THROUGH
3165 && src->value.pass_through.operation == NOP_EXPR)
3166 {
3167 if (item->jftype == IPA_JF_PASS_THROUGH
3168 || !item->value.load_agg.by_ref
3169 || src->value.pass_through.agg_preserved)
3170 item->value.pass_through.formal_id
3171 = src->value.pass_through.formal_id;
3172 }
3173 else if (src->type == IPA_JF_ANCESTOR)
3174 {
3175 if (item->jftype == IPA_JF_PASS_THROUGH)
3176 {
3177 if (!src->value.ancestor.offset)
3178 item->value.pass_through.formal_id
3179 = src->value.ancestor.formal_id;
3180 }
3181 else if (src->value.ancestor.agg_preserved)
3182 {
3183 gcc_checking_assert (item->value.load_agg.by_ref);
3184
3185 item->value.pass_through.formal_id
3186 = src->value.ancestor.formal_id;
3187 item->value.load_agg.offset
3188 += src->value.ancestor.offset;
3189 }
3190 }
3191
3192 if (item->value.pass_through.formal_id < 0)
3193 item->jftype = IPA_JF_UNKNOWN;
3194 }
3195 }
3196
3197 if (!top)
3198 {
3199 ipa_set_jf_unknown (jfunc: dst);
3200 continue;
3201 }
3202
3203 if (dst->type == IPA_JF_ANCESTOR)
3204 {
3205 struct ipa_jump_func *src;
3206 int dst_fid = dst->value.ancestor.formal_id;
3207 class ipa_polymorphic_call_context *src_ctx
3208 = ipa_get_ith_polymorhic_call_context (args: top, i: dst_fid);
3209
3210 /* Variable number of arguments can cause havoc if we try to access
3211 one that does not exist in the inlined edge. So make sure we
3212 don't. */
3213 if (dst_fid >= ipa_get_cs_argument_count (args: top))
3214 {
3215 ipa_set_jf_unknown (jfunc: dst);
3216 continue;
3217 }
3218
3219 src = ipa_get_ith_jump_func (args: top, i: dst_fid);
3220
3221 if (src_ctx && !src_ctx->useless_p ())
3222 {
3223 class ipa_polymorphic_call_context ctx = *src_ctx;
3224
3225 /* TODO: Make type preserved safe WRT contexts. */
3226 if (!ipa_get_jf_ancestor_type_preserved (jfunc: dst))
3227 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3228 ctx.offset_by (off: dst->value.ancestor.offset);
3229 if (!ctx.useless_p ())
3230 {
3231 if (!dst_ctx)
3232 {
3233 vec_safe_grow_cleared (v&: args->polymorphic_call_contexts,
3234 len: count, exact: true);
3235 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3236 }
3237
3238 dst_ctx->combine_with (ctx);
3239 }
3240 }
3241
3242 /* Parameter and argument in ancestor jump function must be pointer
3243 type, which means access to aggregate must be by-reference. */
3244 gcc_assert (!src->agg.items || src->agg.by_ref);
3245
3246 if (src->agg.items && dst->value.ancestor.agg_preserved)
3247 {
3248 struct ipa_agg_jf_item *item;
3249 int j;
3250
3251 /* Currently we do not produce clobber aggregate jump functions,
3252 replace with merging when we do. */
3253 gcc_assert (!dst->agg.items);
3254
3255 dst->agg.items = vec_safe_copy (src: src->agg.items);
3256 dst->agg.by_ref = src->agg.by_ref;
3257 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3258 item->offset -= dst->value.ancestor.offset;
3259 }
3260
3261 if (src->type == IPA_JF_PASS_THROUGH
3262 && src->value.pass_through.operation == NOP_EXPR)
3263 {
3264 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3265 dst->value.ancestor.agg_preserved &=
3266 src->value.pass_through.agg_preserved;
3267 }
3268 else if (src->type == IPA_JF_ANCESTOR)
3269 {
3270 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3271 dst->value.ancestor.offset += src->value.ancestor.offset;
3272 dst->value.ancestor.agg_preserved &=
3273 src->value.ancestor.agg_preserved;
3274 dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
3275 }
3276 else
3277 ipa_set_jf_unknown (jfunc: dst);
3278 }
3279 else if (dst->type == IPA_JF_PASS_THROUGH)
3280 {
3281 struct ipa_jump_func *src;
3282 /* We must check range due to calls with variable number of arguments
3283 and we cannot combine jump functions with operations. */
3284 if (dst->value.pass_through.operation == NOP_EXPR
3285 && (top && dst->value.pass_through.formal_id
3286 < ipa_get_cs_argument_count (args: top)))
3287 {
3288 int dst_fid = dst->value.pass_through.formal_id;
3289 src = ipa_get_ith_jump_func (args: top, i: dst_fid);
3290 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (jfunc: dst);
3291 class ipa_polymorphic_call_context *src_ctx
3292 = ipa_get_ith_polymorhic_call_context (args: top, i: dst_fid);
3293
3294 if (src_ctx && !src_ctx->useless_p ())
3295 {
3296 class ipa_polymorphic_call_context ctx = *src_ctx;
3297
3298 /* TODO: Make type preserved safe WRT contexts. */
3299 if (!ipa_get_jf_pass_through_type_preserved (jfunc: dst))
3300 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3301 if (!ctx.useless_p ())
3302 {
3303 if (!dst_ctx)
3304 {
3305 vec_safe_grow_cleared (v&: args->polymorphic_call_contexts,
3306 len: count, exact: true);
3307 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3308 }
3309 dst_ctx->combine_with (ctx);
3310 }
3311 }
3312 switch (src->type)
3313 {
3314 case IPA_JF_UNKNOWN:
3315 ipa_set_jf_unknown (jfunc: dst);
3316 break;
3317 case IPA_JF_CONST:
3318 {
3319 bool rd = ipa_get_jf_pass_through_refdesc_decremented (jfunc: dst);
3320 ipa_set_jf_cst_copy (dst, src);
3321 if (rd)
3322 ipa_zap_jf_refdesc (jfunc: dst);
3323 }
3324
3325 break;
3326
3327 case IPA_JF_PASS_THROUGH:
3328 {
3329 int formal_id = ipa_get_jf_pass_through_formal_id (jfunc: src);
3330 enum tree_code operation;
3331 operation = ipa_get_jf_pass_through_operation (jfunc: src);
3332
3333 if (operation == NOP_EXPR)
3334 {
3335 bool agg_p;
3336 agg_p = dst_agg_p
3337 && ipa_get_jf_pass_through_agg_preserved (jfunc: src);
3338 ipa_set_jf_simple_pass_through (jfunc: dst, formal_id, agg_preserved: agg_p);
3339 }
3340 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3341 ipa_set_jf_unary_pass_through (jfunc: dst, formal_id, operation);
3342 else
3343 {
3344 tree operand = ipa_get_jf_pass_through_operand (jfunc: src);
3345 ipa_set_jf_arith_pass_through (jfunc: dst, formal_id, operand,
3346 operation);
3347 }
3348 break;
3349 }
3350 case IPA_JF_ANCESTOR:
3351 {
3352 bool agg_p;
3353 agg_p = dst_agg_p
3354 && ipa_get_jf_ancestor_agg_preserved (jfunc: src);
3355 ipa_set_ancestor_jf (jfunc: dst,
3356 offset: ipa_get_jf_ancestor_offset (jfunc: src),
3357 formal_id: ipa_get_jf_ancestor_formal_id (jfunc: src),
3358 agg_preserved: agg_p,
3359 keep_null: ipa_get_jf_ancestor_keep_null (jfunc: src));
3360 break;
3361 }
3362 default:
3363 gcc_unreachable ();
3364 }
3365
3366 if (src->agg.items
3367 && (dst_agg_p || !src->agg.by_ref))
3368 {
3369 /* Currently we do not produce clobber aggregate jump
3370 functions, replace with merging when we do. */
3371 gcc_assert (!dst->agg.items);
3372
3373 dst->agg.by_ref = src->agg.by_ref;
3374 dst->agg.items = vec_safe_copy (src: src->agg.items);
3375 }
3376 }
3377 else
3378 ipa_set_jf_unknown (jfunc: dst);
3379 }
3380 }
3381}
3382
3383/* If TARGET is an addr_expr of a function declaration, make it the
3384 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3385 Otherwise, return NULL. */
3386
3387struct cgraph_edge *
3388ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3389 bool speculative)
3390{
3391 struct cgraph_node *callee;
3392 bool unreachable = false;
3393
3394 if (TREE_CODE (target) == ADDR_EXPR)
3395 target = TREE_OPERAND (target, 0);
3396 if (TREE_CODE (target) != FUNCTION_DECL)
3397 {
3398 target = canonicalize_constructor_val (target, NULL);
3399 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3400 {
3401 /* Member pointer call that goes through a VMT lookup. */
3402 if (ie->indirect_info->member_ptr
3403 /* Or if target is not an invariant expression and we do not
3404 know if it will evaulate to function at runtime.
3405 This can happen when folding through &VAR, where &VAR
3406 is IP invariant, but VAR itself is not.
3407
3408 TODO: Revisit this when GCC 5 is branched. It seems that
3409 member_ptr check is not needed and that we may try to fold
3410 the expression and see if VAR is readonly. */
3411 || !is_gimple_ip_invariant (target))
3412 {
3413 if (dump_enabled_p ())
3414 {
3415 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3416 "discovered direct call non-invariant %s\n",
3417 ie->caller->dump_name ());
3418 }
3419 return NULL;
3420 }
3421
3422
3423 if (dump_enabled_p ())
3424 {
3425 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3426 "discovered direct call to non-function in %s, "
3427 "making it __builtin_unreachable\n",
3428 ie->caller->dump_name ());
3429 }
3430
3431 target = builtin_decl_unreachable ();
3432 callee = cgraph_node::get_create (target);
3433 unreachable = true;
3434 }
3435 else
3436 callee = cgraph_node::get (decl: target);
3437 }
3438 else
3439 callee = cgraph_node::get (decl: target);
3440
3441 /* Because may-edges are not explicitely represented and vtable may be external,
3442 we may create the first reference to the object in the unit. */
3443 if (!callee || callee->inlined_to)
3444 {
3445
3446 /* We are better to ensure we can refer to it.
3447 In the case of static functions we are out of luck, since we already
3448 removed its body. In the case of public functions we may or may
3449 not introduce the reference. */
3450 if (!canonicalize_constructor_val (target, NULL)
3451 || !TREE_PUBLIC (target))
3452 {
3453 if (dump_file)
3454 fprintf (stream: dump_file, format: "ipa-prop: Discovered call to a known target "
3455 "(%s -> %s) but cannot refer to it. Giving up.\n",
3456 ie->caller->dump_name (),
3457 ie->callee->dump_name ());
3458 return NULL;
3459 }
3460 callee = cgraph_node::get_create (target);
3461 }
3462
3463 /* If the edge is already speculated. */
3464 if (speculative && ie->speculative)
3465 {
3466 if (dump_file)
3467 {
3468 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3469 if (!e2)
3470 {
3471 if (dump_file)
3472 fprintf (stream: dump_file, format: "ipa-prop: Discovered call to a "
3473 "speculative target (%s -> %s) but the call is "
3474 "already speculated to different target. "
3475 "Giving up.\n",
3476 ie->caller->dump_name (), callee->dump_name ());
3477 }
3478 else
3479 {
3480 if (dump_file)
3481 fprintf (stream: dump_file,
3482 format: "ipa-prop: Discovered call to a speculative target "
3483 "(%s -> %s) this agree with previous speculation.\n",
3484 ie->caller->dump_name (), callee->dump_name ());
3485 }
3486 }
3487 return NULL;
3488 }
3489
3490 if (!dbg_cnt (index: devirt))
3491 return NULL;
3492
3493 ipa_check_create_node_params ();
3494
3495 /* We cannot make edges to inline clones. It is bug that someone removed
3496 the cgraph node too early. */
3497 gcc_assert (!callee->inlined_to);
3498
3499 if (dump_file && !unreachable)
3500 {
3501 fprintf (stream: dump_file, format: "ipa-prop: Discovered %s call to a %s target "
3502 "(%s -> %s), for stmt ",
3503 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3504 speculative ? "speculative" : "known",
3505 ie->caller->dump_name (),
3506 callee->dump_name ());
3507 if (ie->call_stmt)
3508 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3509 else
3510 fprintf (stream: dump_file, format: "with uid %i\n", ie->lto_stmt_uid);
3511 }
3512 if (dump_enabled_p ())
3513 {
3514 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3515 "converting indirect call in %s to direct call to %s\n",
3516 ie->caller->dump_name (), callee->dump_name ());
3517 }
3518 if (!speculative)
3519 {
3520 struct cgraph_edge *orig = ie;
3521 ie = cgraph_edge::make_direct (edge: ie, callee);
3522 /* If we resolved speculative edge the cost is already up to date
3523 for direct call (adjusted by inline_edge_duplication_hook). */
3524 if (ie == orig)
3525 {
3526 ipa_call_summary *es = ipa_call_summaries->get (edge: ie);
3527 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3528 - eni_size_weights.call_cost);
3529 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3530 - eni_time_weights.call_cost);
3531 }
3532 }
3533 else
3534 {
3535 if (!callee->can_be_discarded_p ())
3536 {
3537 cgraph_node *alias;
3538 alias = dyn_cast<cgraph_node *> (p: callee->noninterposable_alias ());
3539 if (alias)
3540 callee = alias;
3541 }
3542 /* make_speculative will update ie's cost to direct call cost. */
3543 ie = ie->make_speculative
3544 (n2: callee, direct_count: ie->count.apply_scale (num: 8, den: 10));
3545 }
3546
3547 return ie;
3548}
3549
3550/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3551 CONSTRUCTOR and return it. Return NULL if the search fails for some
3552 reason. */
3553
3554static tree
3555find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3556{
3557 tree type = TREE_TYPE (constructor);
3558 if (TREE_CODE (type) != ARRAY_TYPE
3559 && TREE_CODE (type) != RECORD_TYPE)
3560 return NULL;
3561
3562 unsigned ix;
3563 tree index, val;
3564 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3565 {
3566 HOST_WIDE_INT elt_offset;
3567 if (TREE_CODE (type) == ARRAY_TYPE)
3568 {
3569 offset_int off;
3570 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3571 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3572
3573 if (index)
3574 {
3575 if (TREE_CODE (index) == RANGE_EXPR)
3576 off = wi::to_offset (TREE_OPERAND (index, 0));
3577 else
3578 off = wi::to_offset (t: index);
3579 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3580 {
3581 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3582 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3583 off = wi::sext (x: off - wi::to_offset (t: low_bound),
3584 TYPE_PRECISION (TREE_TYPE (index)));
3585 }
3586 off *= wi::to_offset (t: unit_size);
3587 /* ??? Handle more than just the first index of a
3588 RANGE_EXPR. */
3589 }
3590 else
3591 off = wi::to_offset (t: unit_size) * ix;
3592
3593 off = wi::lshift (x: off, LOG2_BITS_PER_UNIT);
3594 if (!wi::fits_shwi_p (x: off) || wi::neg_p (x: off))
3595 continue;
3596 elt_offset = off.to_shwi ();
3597 }
3598 else if (TREE_CODE (type) == RECORD_TYPE)
3599 {
3600 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3601 if (DECL_BIT_FIELD (index))
3602 continue;
3603 elt_offset = int_bit_position (field: index);
3604 }
3605 else
3606 gcc_unreachable ();
3607
3608 if (elt_offset > req_offset)
3609 return NULL;
3610
3611 if (TREE_CODE (val) == CONSTRUCTOR)
3612 return find_constructor_constant_at_offset (constructor: val,
3613 req_offset: req_offset - elt_offset);
3614
3615 if (elt_offset == req_offset
3616 && is_gimple_reg_type (TREE_TYPE (val))
3617 && is_gimple_ip_invariant (val))
3618 return val;
3619 }
3620 return NULL;
3621}
3622
3623/* Check whether SCALAR could be used to look up an aggregate interprocedural
3624 invariant from a static constructor and if so, return it. Otherwise return
3625 NULL. */
3626
3627tree
3628ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3629{
3630 if (by_ref)
3631 {
3632 if (TREE_CODE (scalar) != ADDR_EXPR)
3633 return NULL;
3634 scalar = TREE_OPERAND (scalar, 0);
3635 }
3636
3637 if (!VAR_P (scalar)
3638 || !is_global_var (t: scalar)
3639 || !TREE_READONLY (scalar)
3640 || !DECL_INITIAL (scalar)
3641 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3642 return NULL;
3643
3644 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), req_offset: offset);
3645}
3646
3647/* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3648 is none. BY_REF specifies whether the value has to be passed by reference
3649 or by value. */
3650
3651static tree
3652ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function *agg_jfunc,
3653 ipa_node_params *src_info,
3654 cgraph_node *src_node,
3655 HOST_WIDE_INT offset, bool by_ref)
3656{
3657 if (by_ref != agg_jfunc->by_ref)
3658 return NULL_TREE;
3659
3660 for (const ipa_agg_jf_item &item : agg_jfunc->items)
3661 if (item.offset == offset)
3662 return ipa_agg_value_from_jfunc (info: src_info, node: src_node, item: &item);
3663
3664 return NULL_TREE;
3665}
3666
3667/* Remove a reference to SYMBOL from the list of references of a node given by
3668 reference description RDESC. Return true if the reference has been
3669 successfully found and removed. */
3670
3671static bool
3672remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3673{
3674 struct ipa_ref *to_del;
3675 struct cgraph_edge *origin;
3676
3677 origin = rdesc->cs;
3678 if (!origin)
3679 return false;
3680 to_del = origin->caller->find_reference (referred_node: symbol, stmt: origin->call_stmt,
3681 lto_stmt_uid: origin->lto_stmt_uid, use_type: IPA_REF_ADDR);
3682 if (!to_del)
3683 return false;
3684
3685 to_del->remove_reference ();
3686 if (dump_file)
3687 fprintf (stream: dump_file, format: "ipa-prop: Removed a reference from %s to %s.\n",
3688 origin->caller->dump_name (), symbol->dump_name ());
3689 return true;
3690}
3691
3692/* If JFUNC has a reference description with refcount different from
3693 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3694 NULL. JFUNC must be a constant jump function. */
3695
3696static struct ipa_cst_ref_desc *
3697jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3698{
3699 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3700 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3701 return rdesc;
3702 else
3703 return NULL;
3704}
3705
3706/* If the value of constant jump function JFUNC is an address of a function
3707 declaration, return the associated call graph node. Otherwise return
3708 NULL. */
3709
3710static symtab_node *
3711symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
3712{
3713 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3714 tree cst = ipa_get_jf_constant (jfunc);
3715 if (TREE_CODE (cst) != ADDR_EXPR
3716 || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
3717 && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
3718 return NULL;
3719
3720 return symtab_node::get (TREE_OPERAND (cst, 0));
3721}
3722
3723
3724/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3725 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3726 the edge specified in the rdesc. Return false if either the symbol or the
3727 reference could not be found, otherwise return true. */
3728
3729static bool
3730try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3731{
3732 struct ipa_cst_ref_desc *rdesc;
3733 if (jfunc->type == IPA_JF_CONST
3734 && (rdesc = jfunc_rdesc_usable (jfunc))
3735 && --rdesc->refcount == 0)
3736 {
3737 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
3738 if (!symbol)
3739 return false;
3740
3741 return remove_described_reference (symbol, rdesc);
3742 }
3743 return true;
3744}
3745
3746/* Try to find a destination for indirect edge IE that corresponds to a simple
3747 call or a call of a member function pointer and where the destination is a
3748 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3749 the type of the parameter to which the result of JFUNC is passed. If it can
3750 be determined, return the newly direct edge, otherwise return NULL.
3751 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3752 relative to. */
3753
3754static struct cgraph_edge *
3755try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3756 struct ipa_jump_func *jfunc, tree target_type,
3757 struct cgraph_node *new_root,
3758 class ipa_node_params *new_root_info)
3759{
3760 struct cgraph_edge *cs;
3761 tree target = NULL_TREE;
3762 bool agg_contents = ie->indirect_info->agg_contents;
3763 tree scalar = ipa_value_from_jfunc (info: new_root_info, jfunc, type: target_type);
3764 if (agg_contents)
3765 {
3766 if (scalar)
3767 target = ipa_find_agg_cst_from_init (scalar, offset: ie->indirect_info->offset,
3768 by_ref: ie->indirect_info->by_ref);
3769 if (!target && ie->indirect_info->guaranteed_unmodified)
3770 target = ipa_find_agg_cst_from_jfunc_items (agg_jfunc: &jfunc->agg, src_info: new_root_info,
3771 src_node: new_root,
3772 offset: ie->indirect_info->offset,
3773 by_ref: ie->indirect_info->by_ref);
3774 }
3775 else
3776 target = scalar;
3777 if (!target)
3778 return NULL;
3779 cs = ipa_make_edge_direct_to_target (ie, target);
3780
3781 if (cs && !agg_contents)
3782 {
3783 bool ok;
3784 gcc_checking_assert (cs->callee
3785 && (cs != ie
3786 || jfunc->type != IPA_JF_CONST
3787 || !symtab_node_for_jfunc (jfunc)
3788 || cs->callee == symtab_node_for_jfunc (jfunc)));
3789 ok = try_decrement_rdesc_refcount (jfunc);
3790 gcc_checking_assert (ok);
3791 }
3792
3793 return cs;
3794}
3795
3796/* Return the target to be used in cases of impossible devirtualization. IE
3797 and target (the latter can be NULL) are dumped when dumping is enabled. */
3798
3799tree
3800ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3801{
3802 if (dump_file)
3803 {
3804 if (target)
3805 fprintf (stream: dump_file,
3806 format: "Type inconsistent devirtualization: %s->%s\n",
3807 ie->caller->dump_name (),
3808 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3809 else
3810 fprintf (stream: dump_file,
3811 format: "No devirtualization target in %s\n",
3812 ie->caller->dump_name ());
3813 }
3814 tree new_target = builtin_decl_unreachable ();
3815 cgraph_node::get_create (new_target);
3816 return new_target;
3817}
3818
3819/* Try to find a destination for indirect edge IE that corresponds to a virtual
3820 call based on a formal parameter which is described by jump function JFUNC
3821 and if it can be determined, make it direct and return the direct edge.
3822 Otherwise, return NULL. CTX describes the polymorphic context that the
3823 parameter the call is based on brings along with it. NEW_ROOT and
3824 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3825 to. */
3826
3827static struct cgraph_edge *
3828try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3829 struct ipa_jump_func *jfunc,
3830 class ipa_polymorphic_call_context ctx,
3831 struct cgraph_node *new_root,
3832 class ipa_node_params *new_root_info)
3833{
3834 tree target = NULL;
3835 bool speculative = false;
3836
3837 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3838 return NULL;
3839
3840 gcc_assert (!ie->indirect_info->by_ref);
3841
3842 /* Try to do lookup via known virtual table pointer value. */
3843 if (!ie->indirect_info->vptr_changed
3844 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3845 {
3846 tree vtable;
3847 unsigned HOST_WIDE_INT offset;
3848 tree t = NULL_TREE;
3849 if (jfunc->type == IPA_JF_CONST)
3850 t = ipa_find_agg_cst_from_init (scalar: ipa_get_jf_constant (jfunc),
3851 offset: ie->indirect_info->offset, by_ref: true);
3852 if (!t)
3853 t = ipa_find_agg_cst_from_jfunc_items (agg_jfunc: &jfunc->agg, src_info: new_root_info,
3854 src_node: new_root,
3855 offset: ie->indirect_info->offset, by_ref: true);
3856 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3857 {
3858 bool can_refer;
3859 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3860 vtable, offset, can_refer: &can_refer);
3861 if (can_refer)
3862 {
3863 if (!t
3864 || fndecl_built_in_p (node: t, name1: BUILT_IN_UNREACHABLE,
3865 names: BUILT_IN_UNREACHABLE_TRAP)
3866 || !possible_polymorphic_call_target_p
3867 (e: ie, n: cgraph_node::get (decl: t)))
3868 {
3869 /* Do not speculate builtin_unreachable, it is stupid! */
3870 if (!ie->indirect_info->vptr_changed)
3871 target = ipa_impossible_devirt_target (ie, target);
3872 else
3873 target = NULL;
3874 }
3875 else
3876 {
3877 target = t;
3878 speculative = ie->indirect_info->vptr_changed;
3879 }
3880 }
3881 }
3882 }
3883
3884 ipa_polymorphic_call_context ie_context (ie);
3885 vec <cgraph_node *>targets;
3886 bool final;
3887
3888 ctx.offset_by (off: ie->indirect_info->offset);
3889 if (ie->indirect_info->vptr_changed)
3890 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3891 otr_type: ie->indirect_info->otr_type);
3892 ctx.combine_with (ie_context, otr_type: ie->indirect_info->otr_type);
3893 targets = possible_polymorphic_call_targets
3894 (ie->indirect_info->otr_type,
3895 ie->indirect_info->otr_token,
3896 ctx, copletep: &final);
3897 if (final && targets.length () <= 1)
3898 {
3899 speculative = false;
3900 if (targets.length () == 1)
3901 target = targets[0]->decl;
3902 else
3903 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3904 }
3905 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3906 && !ie->speculative && ie->maybe_hot_p ())
3907 {
3908 cgraph_node *n;
3909 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3910 ie->indirect_info->otr_token,
3911 ie->indirect_info->context);
3912 if (n)
3913 {
3914 target = n->decl;
3915 speculative = true;
3916 }
3917 }
3918
3919 if (target)
3920 {
3921 if (!possible_polymorphic_call_target_p
3922 (e: ie, n: cgraph_node::get_create (target)))
3923 {
3924 if (speculative)
3925 return NULL;
3926 target = ipa_impossible_devirt_target (ie, target);
3927 }
3928 return ipa_make_edge_direct_to_target (ie, target, speculative);
3929 }
3930 else
3931 return NULL;
3932}
3933
3934/* Update the param called notes associated with NODE when CS is being inlined,
3935 assuming NODE is (potentially indirectly) inlined into CS->callee.
3936 Moreover, if the callee is discovered to be constant, create a new cgraph
3937 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3938 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3939
3940static bool
3941update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3942 struct cgraph_node *node,
3943 vec<cgraph_edge *> *new_edges)
3944{
3945 class ipa_edge_args *top;
3946 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3947 struct cgraph_node *new_root;
3948 class ipa_node_params *new_root_info, *inlined_node_info;
3949 bool res = false;
3950
3951 ipa_check_create_edge_args ();
3952 top = ipa_edge_args_sum->get (edge: cs);
3953 new_root = cs->caller->inlined_to
3954 ? cs->caller->inlined_to : cs->caller;
3955 new_root_info = ipa_node_params_sum->get (node: new_root);
3956 inlined_node_info = ipa_node_params_sum->get (node: cs->callee->function_symbol ());
3957
3958 for (ie = node->indirect_calls; ie; ie = next_ie)
3959 {
3960 class cgraph_indirect_call_info *ici = ie->indirect_info;
3961 struct ipa_jump_func *jfunc;
3962 int param_index;
3963
3964 next_ie = ie->next_callee;
3965
3966 if (ici->param_index == -1)
3967 continue;
3968
3969 /* We must check range due to calls with variable number of arguments: */
3970 if (!top || ici->param_index >= ipa_get_cs_argument_count (args: top))
3971 {
3972 ici->param_index = -1;
3973 continue;
3974 }
3975
3976 param_index = ici->param_index;
3977 jfunc = ipa_get_ith_jump_func (args: top, i: param_index);
3978
3979 auto_vec<cgraph_node *, 4> spec_targets;
3980 if (ie->speculative)
3981 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3982 direct;
3983 direct = direct->next_speculative_call_target ())
3984 spec_targets.safe_push (obj: direct->callee);
3985
3986 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3987 new_direct_edge = NULL;
3988 else if (ici->polymorphic)
3989 {
3990 ipa_polymorphic_call_context ctx;
3991 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3992 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3993 new_root,
3994 new_root_info);
3995 }
3996 else
3997 {
3998 tree target_type = ipa_get_type (info: inlined_node_info, i: param_index);
3999 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
4000 target_type,
4001 new_root,
4002 new_root_info);
4003 }
4004
4005 /* If speculation was removed, then we need to do nothing. */
4006 if (new_direct_edge && new_direct_edge != ie
4007 && spec_targets.contains (search: new_direct_edge->callee))
4008 {
4009 new_direct_edge->indirect_inlining_edge = 1;
4010 res = true;
4011 if (!new_direct_edge->speculative)
4012 continue;
4013 }
4014 else if (new_direct_edge)
4015 {
4016 new_direct_edge->indirect_inlining_edge = 1;
4017 if (new_edges)
4018 {
4019 new_edges->safe_push (obj: new_direct_edge);
4020 res = true;
4021 }
4022 /* If speculative edge was introduced we still need to update
4023 call info of the indirect edge. */
4024 if (!new_direct_edge->speculative)
4025 continue;
4026 }
4027 if (jfunc->type == IPA_JF_PASS_THROUGH
4028 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4029 {
4030 if (ici->agg_contents
4031 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4032 && !ici->polymorphic)
4033 ici->param_index = -1;
4034 else
4035 {
4036 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4037 if (ici->polymorphic
4038 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4039 ici->vptr_changed = true;
4040 ipa_set_param_used_by_indirect_call (info: new_root_info,
4041 i: ici->param_index, val: true);
4042 if (ici->polymorphic)
4043 ipa_set_param_used_by_polymorphic_call (info: new_root_info,
4044 i: ici->param_index, val: true);
4045 }
4046 }
4047 else if (jfunc->type == IPA_JF_ANCESTOR)
4048 {
4049 if (ici->agg_contents
4050 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4051 && !ici->polymorphic)
4052 ici->param_index = -1;
4053 else
4054 {
4055 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4056 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
4057 if (ici->polymorphic
4058 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4059 ici->vptr_changed = true;
4060 ipa_set_param_used_by_indirect_call (info: new_root_info,
4061 i: ici->param_index, val: true);
4062 if (ici->polymorphic)
4063 ipa_set_param_used_by_polymorphic_call (info: new_root_info,
4064 i: ici->param_index, val: true);
4065 }
4066 }
4067 else
4068 /* Either we can find a destination for this edge now or never. */
4069 ici->param_index = -1;
4070 }
4071
4072 return res;
4073}
4074
4075/* Recursively traverse subtree of NODE (including node) made of inlined
4076 cgraph_edges when CS has been inlined and invoke
4077 update_indirect_edges_after_inlining on all nodes and
4078 update_jump_functions_after_inlining on all non-inlined edges that lead out
4079 of this subtree. Newly discovered indirect edges will be added to
4080 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4081 created. */
4082
4083static bool
4084propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4085 struct cgraph_node *node,
4086 vec<cgraph_edge *> *new_edges)
4087{
4088 struct cgraph_edge *e;
4089 bool res;
4090
4091 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4092
4093 for (e = node->callees; e; e = e->next_callee)
4094 if (!e->inline_failed)
4095 res |= propagate_info_to_inlined_callees (cs, node: e->callee, new_edges);
4096 else
4097 update_jump_functions_after_inlining (cs, e);
4098 for (e = node->indirect_calls; e; e = e->next_callee)
4099 update_jump_functions_after_inlining (cs, e);
4100
4101 return res;
4102}
4103
4104/* Combine two controlled uses counts as done during inlining. */
4105
4106static int
4107combine_controlled_uses_counters (int c, int d)
4108{
4109 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4110 return IPA_UNDESCRIBED_USE;
4111 else
4112 return c + d - 1;
4113}
4114
4115/* Propagate number of controlled users from CS->caleee to the new root of the
4116 tree of inlined nodes. */
4117
4118static void
4119propagate_controlled_uses (struct cgraph_edge *cs)
4120{
4121 ipa_edge_args *args = ipa_edge_args_sum->get (edge: cs);
4122 if (!args)
4123 return;
4124 struct cgraph_node *new_root = cs->caller->inlined_to
4125 ? cs->caller->inlined_to : cs->caller;
4126 ipa_node_params *new_root_info = ipa_node_params_sum->get (node: new_root);
4127 ipa_node_params *old_root_info = ipa_node_params_sum->get (node: cs->callee);
4128 int count, i;
4129
4130 if (!old_root_info)
4131 return;
4132
4133 count = MIN (ipa_get_cs_argument_count (args),
4134 ipa_get_param_count (old_root_info));
4135 for (i = 0; i < count; i++)
4136 {
4137 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4138 struct ipa_cst_ref_desc *rdesc;
4139
4140 if (jf->type == IPA_JF_PASS_THROUGH
4141 && !ipa_get_jf_pass_through_refdesc_decremented (jfunc: jf))
4142 {
4143 int src_idx, c, d;
4144 src_idx = ipa_get_jf_pass_through_formal_id (jfunc: jf);
4145 c = ipa_get_controlled_uses (info: new_root_info, i: src_idx);
4146 d = ipa_get_controlled_uses (info: old_root_info, i);
4147
4148 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4149 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4150 c = combine_controlled_uses_counters (c, d);
4151 ipa_set_controlled_uses (info: new_root_info, i: src_idx, val: c);
4152 bool lderef = true;
4153 if (c != IPA_UNDESCRIBED_USE)
4154 {
4155 lderef = (ipa_get_param_load_dereferenced (info: new_root_info, i: src_idx)
4156 || ipa_get_param_load_dereferenced (info: old_root_info, i));
4157 ipa_set_param_load_dereferenced (info: new_root_info, i: src_idx, val: lderef);
4158 }
4159
4160 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4161 {
4162 struct cgraph_node *n;
4163 struct ipa_ref *ref;
4164 tree t = new_root_info->known_csts[src_idx];
4165
4166 if (t && TREE_CODE (t) == ADDR_EXPR
4167 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4168 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4169 && (ref = new_root->find_reference (referred_node: n, NULL, lto_stmt_uid: 0,
4170 use_type: IPA_REF_ADDR)))
4171 {
4172 if (dump_file)
4173 fprintf (stream: dump_file, format: "ipa-prop: Removing cloning-created "
4174 "reference from %s to %s.\n",
4175 new_root->dump_name (),
4176 n->dump_name ());
4177 ref->remove_reference ();
4178 }
4179 }
4180 }
4181 else if (jf->type == IPA_JF_CONST
4182 && (rdesc = jfunc_rdesc_usable (jfunc: jf)))
4183 {
4184 int d = ipa_get_controlled_uses (info: old_root_info, i);
4185 int c = rdesc->refcount;
4186 tree cst = ipa_get_jf_constant (jfunc: jf);
4187 rdesc->refcount = combine_controlled_uses_counters (c, d);
4188 if (rdesc->refcount != IPA_UNDESCRIBED_USE
4189 && ipa_get_param_load_dereferenced (info: old_root_info, i)
4190 && TREE_CODE (cst) == ADDR_EXPR
4191 && VAR_P (TREE_OPERAND (cst, 0)))
4192 {
4193 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4194 new_root->create_reference (referred_node: n, use_type: IPA_REF_LOAD, NULL);
4195 if (dump_file)
4196 fprintf (stream: dump_file, format: "ipa-prop: Address IPA constant will reach "
4197 "a load so adding LOAD reference from %s to %s.\n",
4198 new_root->dump_name (), n->dump_name ());
4199 }
4200 if (rdesc->refcount == 0)
4201 {
4202 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4203 && ((TREE_CODE (TREE_OPERAND (cst, 0))
4204 == FUNCTION_DECL)
4205 || VAR_P (TREE_OPERAND (cst, 0))));
4206
4207 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4208 if (n)
4209 {
4210 remove_described_reference (symbol: n, rdesc);
4211 cgraph_node *clone = cs->caller;
4212 while (clone->inlined_to
4213 && clone->ipcp_clone
4214 && clone != rdesc->cs->caller)
4215 {
4216 struct ipa_ref *ref;
4217 ref = clone->find_reference (referred_node: n, NULL, lto_stmt_uid: 0, use_type: IPA_REF_ADDR);
4218 if (ref)
4219 {
4220 if (dump_file)
4221 fprintf (stream: dump_file, format: "ipa-prop: Removing "
4222 "cloning-created reference "
4223 "from %s to %s.\n",
4224 clone->dump_name (),
4225 n->dump_name ());
4226 ref->remove_reference ();
4227 }
4228 clone = clone->callers->caller;
4229 }
4230 }
4231 }
4232 }
4233 }
4234
4235 for (i = ipa_get_param_count (info: old_root_info);
4236 i < ipa_get_cs_argument_count (args);
4237 i++)
4238 {
4239 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4240
4241 if (jf->type == IPA_JF_CONST)
4242 {
4243 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jfunc: jf);
4244 if (rdesc)
4245 rdesc->refcount = IPA_UNDESCRIBED_USE;
4246 }
4247 else if (jf->type == IPA_JF_PASS_THROUGH)
4248 ipa_set_controlled_uses (info: new_root_info,
4249 i: jf->value.pass_through.formal_id,
4250 IPA_UNDESCRIBED_USE);
4251 }
4252}
4253
4254/* Update jump functions and call note functions on inlining the call site CS.
4255 CS is expected to lead to a node already cloned by
4256 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4257 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4258 created. */
4259
4260bool
4261ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4262 vec<cgraph_edge *> *new_edges)
4263{
4264 bool changed;
4265 /* Do nothing if the preparation phase has not been carried out yet
4266 (i.e. during early inlining). */
4267 if (!ipa_node_params_sum)
4268 return false;
4269 gcc_assert (ipa_edge_args_sum);
4270
4271 propagate_controlled_uses (cs);
4272 changed = propagate_info_to_inlined_callees (cs, node: cs->callee, new_edges);
4273 ipa_node_params_sum->remove (node: cs->callee);
4274
4275 ipa_edge_args *args = ipa_edge_args_sum->get (edge: cs);
4276 if (args)
4277 {
4278 bool ok = true;
4279 if (args->jump_functions)
4280 {
4281 struct ipa_jump_func *jf;
4282 int i;
4283 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4284 if (jf->type == IPA_JF_CONST
4285 && ipa_get_jf_constant_rdesc (jfunc: jf))
4286 {
4287 ok = false;
4288 break;
4289 }
4290 }
4291 if (ok)
4292 ipa_edge_args_sum->remove (edge: cs);
4293 }
4294 if (ipcp_transformation_sum)
4295 ipcp_transformation_sum->remove (node: cs->callee);
4296
4297 return changed;
4298}
4299
4300/* Ensure that array of edge arguments infos is big enough to accommodate a
4301 structure for all edges and reallocates it if not. Also, allocate
4302 associated hash tables is they do not already exist. */
4303
4304void
4305ipa_check_create_edge_args (void)
4306{
4307 if (!ipa_edge_args_sum)
4308 ipa_edge_args_sum
4309 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4310 ipa_edge_args_sum_t (symtab, true));
4311 if (!ipa_vr_hash_table)
4312 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (n: 37);
4313}
4314
4315/* Free all ipa_edge structures. */
4316
4317void
4318ipa_free_all_edge_args (void)
4319{
4320 if (!ipa_edge_args_sum)
4321 return;
4322
4323 ggc_delete (ptr: ipa_edge_args_sum);
4324 ipa_edge_args_sum = NULL;
4325}
4326
4327/* Free all ipa_node_params structures. */
4328
4329void
4330ipa_free_all_node_params (void)
4331{
4332 if (ipa_node_params_sum)
4333 ggc_delete (ptr: ipa_node_params_sum);
4334 ipa_node_params_sum = NULL;
4335}
4336
4337/* Initialize IPA CP transformation summary and also allocate any necessary hash
4338 tables if they do not already exist. */
4339
4340void
4341ipcp_transformation_initialize (void)
4342{
4343 if (!ipa_vr_hash_table)
4344 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (n: 37);
4345 if (ipcp_transformation_sum == NULL)
4346 {
4347 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4348 ipcp_transformation_sum->disable_insertion_hook ();
4349 }
4350}
4351
4352/* Release the IPA CP transformation summary. */
4353
4354void
4355ipcp_free_transformation_sum (void)
4356{
4357 if (!ipcp_transformation_sum)
4358 return;
4359
4360 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4361 ggc_free (ipcp_transformation_sum);
4362 ipcp_transformation_sum = NULL;
4363}
4364
4365/* Set the aggregate replacements of NODE to be AGGVALS. */
4366
4367void
4368ipa_set_node_agg_value_chain (struct cgraph_node *node,
4369 vec<ipa_argagg_value, va_gc> *aggs)
4370{
4371 ipcp_transformation_initialize ();
4372 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4373 s->m_agg_values = aggs;
4374}
4375
4376/* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4377 count data structures accordingly. */
4378
4379void
4380ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4381{
4382 if (args->jump_functions)
4383 {
4384 struct ipa_jump_func *jf;
4385 int i;
4386 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4387 {
4388 struct ipa_cst_ref_desc *rdesc;
4389 try_decrement_rdesc_refcount (jfunc: jf);
4390 if (jf->type == IPA_JF_CONST
4391 && (rdesc = ipa_get_jf_constant_rdesc (jfunc: jf))
4392 && rdesc->cs == cs)
4393 rdesc->cs = NULL;
4394 }
4395 }
4396}
4397
4398/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4399 reference count data strucutres accordingly. */
4400
4401void
4402ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4403 ipa_edge_args *old_args, ipa_edge_args *new_args)
4404{
4405 unsigned int i;
4406
4407 new_args->jump_functions = vec_safe_copy (src: old_args->jump_functions);
4408 if (old_args->polymorphic_call_contexts)
4409 new_args->polymorphic_call_contexts
4410 = vec_safe_copy (src: old_args->polymorphic_call_contexts);
4411
4412 for (i = 0; i < vec_safe_length (v: old_args->jump_functions); i++)
4413 {
4414 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (args: old_args, i);
4415 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (args: new_args, i);
4416
4417 dst_jf->agg.items = vec_safe_copy (src: dst_jf->agg.items);
4418
4419 if (src_jf->type == IPA_JF_CONST)
4420 {
4421 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (jfunc: src_jf);
4422
4423 if (!src_rdesc)
4424 dst_jf->value.constant.rdesc = NULL;
4425 else if (src->caller == dst->caller)
4426 {
4427 /* Creation of a speculative edge. If the source edge is the one
4428 grabbing a reference, we must create a new (duplicate)
4429 reference description. Otherwise they refer to the same
4430 description corresponding to a reference taken in a function
4431 src->caller is inlined to. In that case we just must
4432 increment the refcount. */
4433 if (src_rdesc->cs == src)
4434 {
4435 symtab_node *n = symtab_node_for_jfunc (jfunc: src_jf);
4436 gcc_checking_assert (n);
4437 ipa_ref *ref
4438 = src->caller->find_reference (referred_node: n, stmt: src->call_stmt,
4439 lto_stmt_uid: src->lto_stmt_uid,
4440 use_type: IPA_REF_ADDR);
4441 gcc_checking_assert (ref);
4442 dst->caller->clone_reference (ref, stmt: ref->stmt);
4443
4444 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4445 dst_rdesc->cs = dst;
4446 dst_rdesc->refcount = src_rdesc->refcount;
4447 dst_rdesc->next_duplicate = NULL;
4448 dst_jf->value.constant.rdesc = dst_rdesc;
4449 }
4450 else
4451 {
4452 src_rdesc->refcount++;
4453 dst_jf->value.constant.rdesc = src_rdesc;
4454 }
4455 }
4456 else if (src_rdesc->cs == src)
4457 {
4458 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4459 dst_rdesc->cs = dst;
4460 dst_rdesc->refcount = src_rdesc->refcount;
4461 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4462 src_rdesc->next_duplicate = dst_rdesc;
4463 dst_jf->value.constant.rdesc = dst_rdesc;
4464 }
4465 else
4466 {
4467 struct ipa_cst_ref_desc *dst_rdesc;
4468 /* This can happen during inlining, when a JFUNC can refer to a
4469 reference taken in a function up in the tree of inline clones.
4470 We need to find the duplicate that refers to our tree of
4471 inline clones. */
4472
4473 gcc_assert (dst->caller->inlined_to);
4474 for (dst_rdesc = src_rdesc->next_duplicate;
4475 dst_rdesc;
4476 dst_rdesc = dst_rdesc->next_duplicate)
4477 {
4478 struct cgraph_node *top;
4479 top = dst_rdesc->cs->caller->inlined_to
4480 ? dst_rdesc->cs->caller->inlined_to
4481 : dst_rdesc->cs->caller;
4482 if (dst->caller->inlined_to == top)
4483 break;
4484 }
4485 gcc_assert (dst_rdesc);
4486 dst_jf->value.constant.rdesc = dst_rdesc;
4487 }
4488 }
4489 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4490 && src->caller == dst->caller)
4491 {
4492 struct cgraph_node *inline_root = dst->caller->inlined_to
4493 ? dst->caller->inlined_to : dst->caller;
4494 ipa_node_params *root_info = ipa_node_params_sum->get (node: inline_root);
4495 int idx = ipa_get_jf_pass_through_formal_id (jfunc: dst_jf);
4496
4497 int c = ipa_get_controlled_uses (info: root_info, i: idx);
4498 if (c != IPA_UNDESCRIBED_USE)
4499 {
4500 c++;
4501 ipa_set_controlled_uses (info: root_info, i: idx, val: c);
4502 }
4503 }
4504 }
4505}
4506
4507/* Analyze newly added function into callgraph. */
4508
4509static void
4510ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4511{
4512 if (node->has_gimple_body_p ())
4513 ipa_analyze_node (node);
4514}
4515
4516/* Hook that is called by summary when a node is duplicated. */
4517
4518void
4519ipa_node_params_t::duplicate(cgraph_node *, cgraph_node *,
4520 ipa_node_params *old_info,
4521 ipa_node_params *new_info)
4522{
4523 new_info->descriptors = vec_safe_copy (src: old_info->descriptors);
4524 new_info->lattices = NULL;
4525 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4526 new_info->known_csts = old_info->known_csts.copy ();
4527 new_info->known_contexts = old_info->known_contexts.copy ();
4528
4529 new_info->analysis_done = old_info->analysis_done;
4530 new_info->node_enqueued = old_info->node_enqueued;
4531 new_info->versionable = old_info->versionable;
4532}
4533
4534/* Duplication of ipcp transformation summaries. */
4535
4536void
4537ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4538 ipcp_transformation *src_trans,
4539 ipcp_transformation *dst_trans)
4540{
4541 /* Avoid redundant work of duplicating vectors we will never use. */
4542 if (dst->inlined_to)
4543 return;
4544 dst_trans->m_agg_values = vec_safe_copy (src: src_trans->m_agg_values);
4545 dst_trans->m_vr = vec_safe_copy (src: src_trans->m_vr);
4546}
4547
4548/* Register our cgraph hooks if they are not already there. */
4549
4550void
4551ipa_register_cgraph_hooks (void)
4552{
4553 ipa_check_create_node_params ();
4554 ipa_check_create_edge_args ();
4555
4556 function_insertion_hook_holder =
4557 symtab->add_cgraph_insertion_hook (hook: &ipa_add_new_function, NULL);
4558}
4559
4560/* Unregister our cgraph hooks if they are not already there. */
4561
4562static void
4563ipa_unregister_cgraph_hooks (void)
4564{
4565 if (function_insertion_hook_holder)
4566 symtab->remove_cgraph_insertion_hook (entry: function_insertion_hook_holder);
4567 function_insertion_hook_holder = NULL;
4568}
4569
4570/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4571 longer needed after ipa-cp. */
4572
4573void
4574ipa_free_all_structures_after_ipa_cp (void)
4575{
4576 if (!optimize && !in_lto_p)
4577 {
4578 ipa_free_all_edge_args ();
4579 ipa_free_all_node_params ();
4580 ipcp_sources_pool.release ();
4581 ipcp_cst_values_pool.release ();
4582 ipcp_poly_ctx_values_pool.release ();
4583 ipcp_agg_lattice_pool.release ();
4584 ipa_unregister_cgraph_hooks ();
4585 ipa_refdesc_pool.release ();
4586 }
4587}
4588
4589/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4590 longer needed after indirect inlining. */
4591
4592void
4593ipa_free_all_structures_after_iinln (void)
4594{
4595 ipa_free_all_edge_args ();
4596 ipa_free_all_node_params ();
4597 ipa_unregister_cgraph_hooks ();
4598 ipcp_sources_pool.release ();
4599 ipcp_cst_values_pool.release ();
4600 ipcp_poly_ctx_values_pool.release ();
4601 ipcp_agg_lattice_pool.release ();
4602 ipa_refdesc_pool.release ();
4603}
4604
4605/* Print ipa_tree_map data structures of all functions in the
4606 callgraph to F. */
4607
4608void
4609ipa_print_node_params (FILE *f, struct cgraph_node *node)
4610{
4611 int i, count;
4612 class ipa_node_params *info;
4613
4614 if (!node->definition)
4615 return;
4616 info = ipa_node_params_sum->get (node);
4617 fprintf (stream: f, format: " function %s parameter descriptors:\n", node->dump_name ());
4618 if (!info)
4619 {
4620 fprintf (stream: f, format: " no params return\n");
4621 return;
4622 }
4623 count = ipa_get_param_count (info);
4624 for (i = 0; i < count; i++)
4625 {
4626 int c;
4627
4628 fprintf (stream: f, format: " ");
4629 ipa_dump_param (file: f, info, i);
4630 if (ipa_is_param_used (info, i))
4631 fprintf (stream: f, format: " used");
4632 if (ipa_is_param_used_by_ipa_predicates (info, i))
4633 fprintf (stream: f, format: " used_by_ipa_predicates");
4634 if (ipa_is_param_used_by_indirect_call (info, i))
4635 fprintf (stream: f, format: " used_by_indirect_call");
4636 if (ipa_is_param_used_by_polymorphic_call (info, i))
4637 fprintf (stream: f, format: " used_by_polymorphic_call");
4638 c = ipa_get_controlled_uses (info, i);
4639 if (c == IPA_UNDESCRIBED_USE)
4640 fprintf (stream: f, format: " undescribed_use");
4641 else
4642 fprintf (stream: f, format: " controlled_uses=%i %s", c,
4643 ipa_get_param_load_dereferenced (info, i)
4644 ? "(load_dereferenced)" : "");
4645 fprintf (stream: f, format: "\n");
4646 }
4647}
4648
4649/* Print ipa_tree_map data structures of all functions in the
4650 callgraph to F. */
4651
4652void
4653ipa_print_all_params (FILE * f)
4654{
4655 struct cgraph_node *node;
4656
4657 fprintf (stream: f, format: "\nFunction parameters:\n");
4658 FOR_EACH_FUNCTION (node)
4659 ipa_print_node_params (f, node);
4660}
4661
4662/* Stream out jump function JUMP_FUNC to OB. */
4663
4664static void
4665ipa_write_jump_function (struct output_block *ob,
4666 struct ipa_jump_func *jump_func)
4667{
4668 struct ipa_agg_jf_item *item;
4669 struct bitpack_d bp;
4670 int i, count;
4671 int flag = 0;
4672
4673 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4674 as well as WPA memory by handling them specially. */
4675 if (jump_func->type == IPA_JF_CONST
4676 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4677 flag = 1;
4678
4679 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4680 switch (jump_func->type)
4681 {
4682 case IPA_JF_UNKNOWN:
4683 break;
4684 case IPA_JF_CONST:
4685 gcc_assert (
4686 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4687 stream_write_tree (ob,
4688 flag
4689 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4690 : jump_func->value.constant.value, true);
4691 break;
4692 case IPA_JF_PASS_THROUGH:
4693 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4694 if (jump_func->value.pass_through.operation == NOP_EXPR)
4695 {
4696 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4697 bp = bitpack_create (s: ob->main_stream);
4698 bp_pack_value (bp: &bp, val: jump_func->value.pass_through.agg_preserved, nbits: 1);
4699 gcc_assert (!jump_func->value.pass_through.refdesc_decremented);
4700 streamer_write_bitpack (bp: &bp);
4701 }
4702 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4703 == tcc_unary)
4704 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4705 else
4706 {
4707 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4708 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4709 }
4710 break;
4711 case IPA_JF_ANCESTOR:
4712 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4713 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4714 bp = bitpack_create (s: ob->main_stream);
4715 bp_pack_value (bp: &bp, val: jump_func->value.ancestor.agg_preserved, nbits: 1);
4716 bp_pack_value (bp: &bp, val: jump_func->value.ancestor.keep_null, nbits: 1);
4717 streamer_write_bitpack (bp: &bp);
4718 break;
4719 default:
4720 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4721 }
4722
4723 count = vec_safe_length (v: jump_func->agg.items);
4724 streamer_write_uhwi (ob, count);
4725 if (count)
4726 {
4727 bp = bitpack_create (s: ob->main_stream);
4728 bp_pack_value (bp: &bp, val: jump_func->agg.by_ref, nbits: 1);
4729 streamer_write_bitpack (bp: &bp);
4730 }
4731
4732 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4733 {
4734 stream_write_tree (ob, item->type, true);
4735 streamer_write_uhwi (ob, item->offset);
4736 streamer_write_uhwi (ob, item->jftype);
4737 switch (item->jftype)
4738 {
4739 case IPA_JF_UNKNOWN:
4740 break;
4741 case IPA_JF_CONST:
4742 stream_write_tree (ob, item->value.constant, true);
4743 break;
4744 case IPA_JF_PASS_THROUGH:
4745 case IPA_JF_LOAD_AGG:
4746 streamer_write_uhwi (ob, item->value.pass_through.operation);
4747 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4748 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4749 != tcc_unary)
4750 stream_write_tree (ob, item->value.pass_through.operand, true);
4751 if (item->jftype == IPA_JF_LOAD_AGG)
4752 {
4753 stream_write_tree (ob, item->value.load_agg.type, true);
4754 streamer_write_uhwi (ob, item->value.load_agg.offset);
4755 bp = bitpack_create (s: ob->main_stream);
4756 bp_pack_value (bp: &bp, val: item->value.load_agg.by_ref, nbits: 1);
4757 streamer_write_bitpack (bp: &bp);
4758 }
4759 break;
4760 default:
4761 fatal_error (UNKNOWN_LOCATION,
4762 "invalid jump function in LTO stream");
4763 }
4764 }
4765
4766 bp = bitpack_create (s: ob->main_stream);
4767 if (jump_func->m_vr)
4768 jump_func->m_vr->streamer_write (ob);
4769 else
4770 {
4771 bp_pack_value (bp: &bp, val: false, nbits: 1);
4772 streamer_write_bitpack (bp: &bp);
4773 }
4774}
4775
4776/* Read in jump function JUMP_FUNC from IB. */
4777
4778static void
4779ipa_read_jump_function (class lto_input_block *ib,
4780 struct ipa_jump_func *jump_func,
4781 struct cgraph_edge *cs,
4782 class data_in *data_in,
4783 bool prevails)
4784{
4785 enum jump_func_type jftype;
4786 enum tree_code operation;
4787 int i, count;
4788 int val = streamer_read_uhwi (ib);
4789 bool flag = val & 1;
4790
4791 jftype = (enum jump_func_type) (val / 2);
4792 switch (jftype)
4793 {
4794 case IPA_JF_UNKNOWN:
4795 ipa_set_jf_unknown (jfunc: jump_func);
4796 break;
4797 case IPA_JF_CONST:
4798 {
4799 tree t = stream_read_tree (ib, data_in);
4800 if (flag && prevails)
4801 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4802 ipa_set_jf_constant (jfunc: jump_func, constant: t, cs);
4803 }
4804 break;
4805 case IPA_JF_PASS_THROUGH:
4806 operation = (enum tree_code) streamer_read_uhwi (ib);
4807 if (operation == NOP_EXPR)
4808 {
4809 int formal_id = streamer_read_uhwi (ib);
4810 struct bitpack_d bp = streamer_read_bitpack (ib);
4811 bool agg_preserved = bp_unpack_value (bp: &bp, nbits: 1);
4812 ipa_set_jf_simple_pass_through (jfunc: jump_func, formal_id, agg_preserved);
4813 }
4814 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4815 {
4816 int formal_id = streamer_read_uhwi (ib);
4817 ipa_set_jf_unary_pass_through (jfunc: jump_func, formal_id, operation);
4818 }
4819 else
4820 {
4821 tree operand = stream_read_tree (ib, data_in);
4822 int formal_id = streamer_read_uhwi (ib);
4823 ipa_set_jf_arith_pass_through (jfunc: jump_func, formal_id, operand,
4824 operation);
4825 }
4826 break;
4827 case IPA_JF_ANCESTOR:
4828 {
4829 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4830 int formal_id = streamer_read_uhwi (ib);
4831 struct bitpack_d bp = streamer_read_bitpack (ib);
4832 bool agg_preserved = bp_unpack_value (bp: &bp, nbits: 1);
4833 bool keep_null = bp_unpack_value (bp: &bp, nbits: 1);
4834 ipa_set_ancestor_jf (jfunc: jump_func, offset, formal_id, agg_preserved,
4835 keep_null);
4836 break;
4837 }
4838 default:
4839 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4840 }
4841
4842 count = streamer_read_uhwi (ib);
4843 if (prevails)
4844 {
4845 jump_func->agg.items = NULL;
4846 vec_safe_reserve (v&: jump_func->agg.items, nelems: count, exact: true);
4847 }
4848 if (count)
4849 {
4850 struct bitpack_d bp = streamer_read_bitpack (ib);
4851 jump_func->agg.by_ref = bp_unpack_value (bp: &bp, nbits: 1);
4852 }
4853 for (i = 0; i < count; i++)
4854 {
4855 struct ipa_agg_jf_item item;
4856 item.type = stream_read_tree (ib, data_in);
4857 item.offset = streamer_read_uhwi (ib);
4858 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4859
4860 switch (item.jftype)
4861 {
4862 case IPA_JF_UNKNOWN:
4863 break;
4864 case IPA_JF_CONST:
4865 item.value.constant = stream_read_tree (ib, data_in);
4866 break;
4867 case IPA_JF_PASS_THROUGH:
4868 case IPA_JF_LOAD_AGG:
4869 operation = (enum tree_code) streamer_read_uhwi (ib);
4870 item.value.pass_through.operation = operation;
4871 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4872 if (TREE_CODE_CLASS (operation) == tcc_unary)
4873 item.value.pass_through.operand = NULL_TREE;
4874 else
4875 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4876 if (item.jftype == IPA_JF_LOAD_AGG)
4877 {
4878 struct bitpack_d bp;
4879 item.value.load_agg.type = stream_read_tree (ib, data_in);
4880 item.value.load_agg.offset = streamer_read_uhwi (ib);
4881 bp = streamer_read_bitpack (ib);
4882 item.value.load_agg.by_ref = bp_unpack_value (bp: &bp, nbits: 1);
4883 }
4884 break;
4885 default:
4886 fatal_error (UNKNOWN_LOCATION,
4887 "invalid jump function in LTO stream");
4888 }
4889 if (prevails)
4890 jump_func->agg.items->quick_push (obj: item);
4891 }
4892
4893 ipa_vr vr;
4894 vr.streamer_read (ib, data_in);
4895 if (vr.known_p ())
4896 {
4897 if (prevails)
4898 ipa_set_jfunc_vr (jf: jump_func, vr);
4899 }
4900 else
4901 jump_func->m_vr = NULL;
4902}
4903
4904/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4905 relevant to indirect inlining to OB. */
4906
4907static void
4908ipa_write_indirect_edge_info (struct output_block *ob,
4909 struct cgraph_edge *cs)
4910{
4911 class cgraph_indirect_call_info *ii = cs->indirect_info;
4912 struct bitpack_d bp;
4913
4914 streamer_write_hwi (ob, ii->param_index);
4915 bp = bitpack_create (s: ob->main_stream);
4916 bp_pack_value (bp: &bp, val: ii->polymorphic, nbits: 1);
4917 bp_pack_value (bp: &bp, val: ii->agg_contents, nbits: 1);
4918 bp_pack_value (bp: &bp, val: ii->member_ptr, nbits: 1);
4919 bp_pack_value (bp: &bp, val: ii->by_ref, nbits: 1);
4920 bp_pack_value (bp: &bp, val: ii->guaranteed_unmodified, nbits: 1);
4921 bp_pack_value (bp: &bp, val: ii->vptr_changed, nbits: 1);
4922 streamer_write_bitpack (bp: &bp);
4923 if (ii->agg_contents || ii->polymorphic)
4924 streamer_write_hwi (ob, ii->offset);
4925 else
4926 gcc_assert (ii->offset == 0);
4927
4928 if (ii->polymorphic)
4929 {
4930 streamer_write_hwi (ob, ii->otr_token);
4931 stream_write_tree (ob, ii->otr_type, true);
4932 ii->context.stream_out (ob);
4933 }
4934}
4935
4936/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4937 relevant to indirect inlining from IB. */
4938
4939static void
4940ipa_read_indirect_edge_info (class lto_input_block *ib,
4941 class data_in *data_in,
4942 struct cgraph_edge *cs,
4943 class ipa_node_params *info)
4944{
4945 class cgraph_indirect_call_info *ii = cs->indirect_info;
4946 struct bitpack_d bp;
4947
4948 ii->param_index = (int) streamer_read_hwi (ib);
4949 bp = streamer_read_bitpack (ib);
4950 ii->polymorphic = bp_unpack_value (bp: &bp, nbits: 1);
4951 ii->agg_contents = bp_unpack_value (bp: &bp, nbits: 1);
4952 ii->member_ptr = bp_unpack_value (bp: &bp, nbits: 1);
4953 ii->by_ref = bp_unpack_value (bp: &bp, nbits: 1);
4954 ii->guaranteed_unmodified = bp_unpack_value (bp: &bp, nbits: 1);
4955 ii->vptr_changed = bp_unpack_value (bp: &bp, nbits: 1);
4956 if (ii->agg_contents || ii->polymorphic)
4957 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4958 else
4959 ii->offset = 0;
4960 if (ii->polymorphic)
4961 {
4962 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4963 ii->otr_type = stream_read_tree (ib, data_in);
4964 ii->context.stream_in (ib, data_in);
4965 }
4966 if (info && ii->param_index >= 0)
4967 {
4968 if (ii->polymorphic)
4969 ipa_set_param_used_by_polymorphic_call (info,
4970 i: ii->param_index , val: true);
4971 ipa_set_param_used_by_indirect_call (info,
4972 i: ii->param_index, val: true);
4973 }
4974}
4975
4976/* Stream out NODE info to OB. */
4977
4978static void
4979ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4980{
4981 int node_ref;
4982 lto_symtab_encoder_t encoder;
4983 ipa_node_params *info = ipa_node_params_sum->get (node);
4984 int j;
4985 struct cgraph_edge *e;
4986 struct bitpack_d bp;
4987
4988 encoder = ob->decl_state->symtab_node_encoder;
4989 node_ref = lto_symtab_encoder_encode (encoder, node);
4990 streamer_write_uhwi (ob, node_ref);
4991
4992 streamer_write_uhwi (ob, ipa_get_param_count (info));
4993 for (j = 0; j < ipa_get_param_count (info); j++)
4994 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, i: j));
4995 bp = bitpack_create (s: ob->main_stream);
4996 gcc_assert (info->analysis_done
4997 || ipa_get_param_count (info) == 0);
4998 gcc_assert (!info->node_enqueued);
4999 gcc_assert (!info->ipcp_orig_node);
5000 for (j = 0; j < ipa_get_param_count (info); j++)
5001 {
5002 /* TODO: We could just not stream the bit in the undescribed case. */
5003 bool d = (ipa_get_controlled_uses (info, i: j) != IPA_UNDESCRIBED_USE)
5004 ? ipa_get_param_load_dereferenced (info, i: j) : true;
5005 bp_pack_value (bp: &bp, val: d, nbits: 1);
5006 bp_pack_value (bp: &bp, val: ipa_is_param_used (info, i: j), nbits: 1);
5007 }
5008 streamer_write_bitpack (bp: &bp);
5009 for (j = 0; j < ipa_get_param_count (info); j++)
5010 {
5011 streamer_write_hwi (ob, ipa_get_controlled_uses (info, i: j));
5012 stream_write_tree (ob, ipa_get_type (info, j), true);
5013 }
5014 for (e = node->callees; e; e = e->next_callee)
5015 {
5016 ipa_edge_args *args = ipa_edge_args_sum->get (edge: e);
5017
5018 if (!args)
5019 {
5020 streamer_write_uhwi (ob, 0);
5021 continue;
5022 }
5023
5024 streamer_write_uhwi (ob,
5025 ipa_get_cs_argument_count (args) * 2
5026 + (args->polymorphic_call_contexts != NULL));
5027 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5028 {
5029 ipa_write_jump_function (ob, jump_func: ipa_get_ith_jump_func (args, i: j));
5030 if (args->polymorphic_call_contexts != NULL)
5031 ipa_get_ith_polymorhic_call_context (args, i: j)->stream_out (ob);
5032 }
5033 }
5034 for (e = node->indirect_calls; e; e = e->next_callee)
5035 {
5036 ipa_edge_args *args = ipa_edge_args_sum->get (edge: e);
5037 if (!args)
5038 streamer_write_uhwi (ob, 0);
5039 else
5040 {
5041 streamer_write_uhwi (ob,
5042 ipa_get_cs_argument_count (args) * 2
5043 + (args->polymorphic_call_contexts != NULL));
5044 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5045 {
5046 ipa_write_jump_function (ob, jump_func: ipa_get_ith_jump_func (args, i: j));
5047 if (args->polymorphic_call_contexts != NULL)
5048 ipa_get_ith_polymorhic_call_context (args, i: j)->stream_out (ob);
5049 }
5050 }
5051 ipa_write_indirect_edge_info (ob, cs: e);
5052 }
5053}
5054
5055/* Stream in edge E from IB. */
5056
5057static void
5058ipa_read_edge_info (class lto_input_block *ib,
5059 class data_in *data_in,
5060 struct cgraph_edge *e, bool prevails)
5061{
5062 int count = streamer_read_uhwi (ib);
5063 bool contexts_computed = count & 1;
5064
5065 count /= 2;
5066 if (!count)
5067 return;
5068 if (prevails
5069 && (e->possibly_call_in_translation_unit_p ()
5070 /* Also stream in jump functions to builtins in hope that they
5071 will get fnspecs. */
5072 || fndecl_built_in_p (node: e->callee->decl, klass: BUILT_IN_NORMAL)))
5073 {
5074 ipa_edge_args *args = ipa_edge_args_sum->get_create (edge: e);
5075 vec_safe_grow_cleared (v&: args->jump_functions, len: count, exact: true);
5076 if (contexts_computed)
5077 vec_safe_grow_cleared (v&: args->polymorphic_call_contexts, len: count, exact: true);
5078 for (int k = 0; k < count; k++)
5079 {
5080 ipa_read_jump_function (ib, jump_func: ipa_get_ith_jump_func (args, i: k), cs: e,
5081 data_in, prevails);
5082 if (contexts_computed)
5083 ipa_get_ith_polymorhic_call_context (args, i: k)->stream_in
5084 (ib, data_in);
5085 }
5086 }
5087 else
5088 {
5089 for (int k = 0; k < count; k++)
5090 {
5091 struct ipa_jump_func dummy;
5092 ipa_read_jump_function (ib, jump_func: &dummy, cs: e,
5093 data_in, prevails);
5094 if (contexts_computed)
5095 {
5096 class ipa_polymorphic_call_context ctx;
5097 ctx.stream_in (ib, data_in);
5098 }
5099 }
5100 }
5101}
5102
5103/* Stream in NODE info from IB. */
5104
5105static void
5106ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5107 class data_in *data_in)
5108{
5109 int k;
5110 struct cgraph_edge *e;
5111 struct bitpack_d bp;
5112 bool prevails = node->prevailing_p ();
5113 ipa_node_params *info
5114 = prevails ? ipa_node_params_sum->get_create (node) : NULL;
5115
5116 int param_count = streamer_read_uhwi (ib);
5117 if (prevails)
5118 {
5119 ipa_alloc_node_params (node, param_count);
5120 for (k = 0; k < param_count; k++)
5121 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5122 if (ipa_get_param_count (info) != 0)
5123 info->analysis_done = true;
5124 info->node_enqueued = false;
5125 }
5126 else
5127 for (k = 0; k < param_count; k++)
5128 streamer_read_uhwi (ib);
5129
5130 bp = streamer_read_bitpack (ib);
5131 for (k = 0; k < param_count; k++)
5132 {
5133 bool load_dereferenced = bp_unpack_value (bp: &bp, nbits: 1);
5134 bool used = bp_unpack_value (bp: &bp, nbits: 1);
5135
5136 if (prevails)
5137 {
5138 ipa_set_param_load_dereferenced (info, i: k, val: load_dereferenced);
5139 ipa_set_param_used (info, i: k, val: used);
5140 }
5141 }
5142 for (k = 0; k < param_count; k++)
5143 {
5144 int nuses = streamer_read_hwi (ib);
5145 tree type = stream_read_tree (ib, data_in);
5146
5147 if (prevails)
5148 {
5149 ipa_set_controlled_uses (info, i: k, val: nuses);
5150 (*info->descriptors)[k].decl_or_type = type;
5151 }
5152 }
5153 for (e = node->callees; e; e = e->next_callee)
5154 ipa_read_edge_info (ib, data_in, e, prevails);
5155 for (e = node->indirect_calls; e; e = e->next_callee)
5156 {
5157 ipa_read_edge_info (ib, data_in, e, prevails);
5158 ipa_read_indirect_edge_info (ib, data_in, cs: e, info);
5159 }
5160}
5161
5162/* Write jump functions for nodes in SET. */
5163
5164void
5165ipa_prop_write_jump_functions (void)
5166{
5167 struct output_block *ob;
5168 unsigned int count = 0;
5169 lto_symtab_encoder_iterator lsei;
5170 lto_symtab_encoder_t encoder;
5171
5172 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5173 return;
5174
5175 ob = create_output_block (LTO_section_jump_functions);
5176 encoder = ob->decl_state->symtab_node_encoder;
5177 ob->symbol = NULL;
5178 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5179 lsei_next_function_in_partition (lsei: &lsei))
5180 {
5181 cgraph_node *node = lsei_cgraph_node (lsei);
5182 if (node->has_gimple_body_p ()
5183 && ipa_node_params_sum->get (node) != NULL)
5184 count++;
5185 }
5186
5187 streamer_write_uhwi (ob, count);
5188
5189 /* Process all of the functions. */
5190 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5191 lsei_next_function_in_partition (lsei: &lsei))
5192 {
5193 cgraph_node *node = lsei_cgraph_node (lsei);
5194 if (node->has_gimple_body_p ()
5195 && ipa_node_params_sum->get (node) != NULL)
5196 ipa_write_node_info (ob, node);
5197 }
5198 streamer_write_char_stream (obs: ob->main_stream, c: 0);
5199 produce_asm (ob, NULL);
5200 destroy_output_block (ob);
5201}
5202
5203/* Read section in file FILE_DATA of length LEN with data DATA. */
5204
5205static void
5206ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5207 size_t len)
5208{
5209 const struct lto_function_header *header =
5210 (const struct lto_function_header *) data;
5211 const int cfg_offset = sizeof (struct lto_function_header);
5212 const int main_offset = cfg_offset + header->cfg_size;
5213 const int string_offset = main_offset + header->main_size;
5214 class data_in *data_in;
5215 unsigned int i;
5216 unsigned int count;
5217
5218 lto_input_block ib_main ((const char *) data + main_offset,
5219 header->main_size, file_data);
5220
5221 data_in =
5222 lto_data_in_create (file_data, (const char *) data + string_offset,
5223 header->string_size, vNULL);
5224 count = streamer_read_uhwi (&ib_main);
5225
5226 for (i = 0; i < count; i++)
5227 {
5228 unsigned int index;
5229 struct cgraph_node *node;
5230 lto_symtab_encoder_t encoder;
5231
5232 index = streamer_read_uhwi (&ib_main);
5233 encoder = file_data->symtab_node_encoder;
5234 node = dyn_cast<cgraph_node *> (p: lto_symtab_encoder_deref (encoder,
5235 ref: index));
5236 gcc_assert (node->definition);
5237 ipa_read_node_info (ib: &ib_main, node, data_in);
5238 }
5239 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5240 len);
5241 lto_data_in_delete (data_in);
5242}
5243
5244/* Read ipcp jump functions. */
5245
5246void
5247ipa_prop_read_jump_functions (void)
5248{
5249 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5250 struct lto_file_decl_data *file_data;
5251 unsigned int j = 0;
5252
5253 ipa_check_create_node_params ();
5254 ipa_check_create_edge_args ();
5255 ipa_register_cgraph_hooks ();
5256
5257 while ((file_data = file_data_vec[j++]))
5258 {
5259 size_t len;
5260 const char *data
5261 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5262 &len);
5263 if (data)
5264 ipa_prop_read_section (file_data, data, len);
5265 }
5266}
5267
5268/* Return true if the IPA-CP transformation summary TS is non-NULL and contains
5269 useful info. */
5270static bool
5271useful_ipcp_transformation_info_p (ipcp_transformation *ts)
5272{
5273 if (!ts)
5274 return false;
5275 if (!vec_safe_is_empty (v: ts->m_agg_values)
5276 || !vec_safe_is_empty (v: ts->m_vr))
5277 return true;
5278 return false;
5279}
5280
5281/* Write into OB IPA-CP transfromation summary TS describing NODE. */
5282
5283void
5284write_ipcp_transformation_info (output_block *ob, cgraph_node *node,
5285 ipcp_transformation *ts)
5286{
5287 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
5288 int node_ref = lto_symtab_encoder_encode (encoder, node);
5289 streamer_write_uhwi (ob, node_ref);
5290
5291 streamer_write_uhwi (ob, vec_safe_length (v: ts->m_agg_values));
5292 for (const ipa_argagg_value &av : ts->m_agg_values)
5293 {
5294 struct bitpack_d bp;
5295
5296 stream_write_tree (ob, av.value, true);
5297 streamer_write_uhwi (ob, av.unit_offset);
5298 streamer_write_uhwi (ob, av.index);
5299
5300 bp = bitpack_create (s: ob->main_stream);
5301 bp_pack_value (bp: &bp, val: av.by_ref, nbits: 1);
5302 bp_pack_value (bp: &bp, val: av.killed, nbits: 1);
5303 streamer_write_bitpack (bp: &bp);
5304 }
5305
5306 streamer_write_uhwi (ob, vec_safe_length (v: ts->m_vr));
5307 for (const ipa_vr &parm_vr : ts->m_vr)
5308 parm_vr.streamer_write (ob);
5309}
5310
5311/* Stream in the aggregate value replacement chain for NODE from IB. */
5312
5313static void
5314read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5315 data_in *data_in)
5316{
5317 unsigned int count, i;
5318 ipcp_transformation_initialize ();
5319 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5320
5321 count = streamer_read_uhwi (ib);
5322 if (count > 0)
5323 {
5324 vec_safe_grow_cleared (v&: ts->m_agg_values, len: count, exact: true);
5325 for (i = 0; i <count; i++)
5326 {
5327 ipa_argagg_value *av = &(*ts->m_agg_values)[i];;
5328
5329 av->value = stream_read_tree (ib, data_in);
5330 av->unit_offset = streamer_read_uhwi (ib);
5331 av->index = streamer_read_uhwi (ib);
5332
5333 bitpack_d bp = streamer_read_bitpack (ib);
5334 av->by_ref = bp_unpack_value (bp: &bp, nbits: 1);
5335 av->killed = bp_unpack_value (bp: &bp, nbits: 1);
5336 }
5337 }
5338
5339 count = streamer_read_uhwi (ib);
5340 if (count > 0)
5341 {
5342 vec_safe_grow_cleared (v&: ts->m_vr, len: count, exact: true);
5343 for (i = 0; i < count; i++)
5344 {
5345 ipa_vr *parm_vr;
5346 parm_vr = &(*ts->m_vr)[i];
5347 parm_vr->streamer_read (ib, data_in);
5348 }
5349 }
5350}
5351
5352/* Write all aggregate replacement for nodes in set. */
5353
5354void
5355ipcp_write_transformation_summaries (void)
5356{
5357 struct output_block *ob;
5358 unsigned int count = 0;
5359 lto_symtab_encoder_t encoder;
5360
5361 ob = create_output_block (LTO_section_ipcp_transform);
5362 encoder = ob->decl_state->symtab_node_encoder;
5363 ob->symbol = NULL;
5364
5365 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5366 {
5367 symtab_node *snode = lto_symtab_encoder_deref (encoder, ref: i);
5368 cgraph_node *cnode = dyn_cast <cgraph_node *> (p: snode);
5369 if (!cnode)
5370 continue;
5371 ipcp_transformation *ts = ipcp_get_transformation_summary (node: cnode);
5372 if (useful_ipcp_transformation_info_p (ts)
5373 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5374 count++;
5375 }
5376
5377 streamer_write_uhwi (ob, count);
5378
5379 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5380 {
5381 symtab_node *snode = lto_symtab_encoder_deref (encoder, ref: i);
5382 cgraph_node *cnode = dyn_cast <cgraph_node *> (p: snode);
5383 if (!cnode)
5384 continue;
5385 ipcp_transformation *ts = ipcp_get_transformation_summary (node: cnode);
5386 if (useful_ipcp_transformation_info_p (ts)
5387 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5388 write_ipcp_transformation_info (ob, node: cnode, ts);
5389 }
5390 streamer_write_char_stream (obs: ob->main_stream, c: 0);
5391 produce_asm (ob, NULL);
5392 destroy_output_block (ob);
5393}
5394
5395/* Read replacements section in file FILE_DATA of length LEN with data
5396 DATA. */
5397
5398static void
5399read_replacements_section (struct lto_file_decl_data *file_data,
5400 const char *data,
5401 size_t len)
5402{
5403 const struct lto_function_header *header =
5404 (const struct lto_function_header *) data;
5405 const int cfg_offset = sizeof (struct lto_function_header);
5406 const int main_offset = cfg_offset + header->cfg_size;
5407 const int string_offset = main_offset + header->main_size;
5408 class data_in *data_in;
5409 unsigned int i;
5410 unsigned int count;
5411
5412 lto_input_block ib_main ((const char *) data + main_offset,
5413 header->main_size, file_data);
5414
5415 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5416 header->string_size, vNULL);
5417 count = streamer_read_uhwi (&ib_main);
5418
5419 for (i = 0; i < count; i++)
5420 {
5421 unsigned int index;
5422 struct cgraph_node *node;
5423 lto_symtab_encoder_t encoder;
5424
5425 index = streamer_read_uhwi (&ib_main);
5426 encoder = file_data->symtab_node_encoder;
5427 node = dyn_cast<cgraph_node *> (p: lto_symtab_encoder_deref (encoder,
5428 ref: index));
5429 read_ipcp_transformation_info (ib: &ib_main, node, data_in);
5430 }
5431 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5432 len);
5433 lto_data_in_delete (data_in);
5434}
5435
5436/* Read IPA-CP aggregate replacements. */
5437
5438void
5439ipcp_read_transformation_summaries (void)
5440{
5441 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5442 struct lto_file_decl_data *file_data;
5443 unsigned int j = 0;
5444
5445 while ((file_data = file_data_vec[j++]))
5446 {
5447 size_t len;
5448 const char *data
5449 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5450 &len);
5451 if (data)
5452 read_replacements_section (file_data, data, len);
5453 }
5454}
5455
5456/* Adjust the aggregate replacements in TS to reflect any parameter removals
5457 which might have already taken place. If after adjustments there are no
5458 aggregate replacements left, the m_agg_values will be set to NULL. In other
5459 cases, it may be shrunk. */
5460
5461static void
5462adjust_agg_replacement_values (cgraph_node *node, ipcp_transformation *ts)
5463{
5464 clone_info *cinfo = clone_info::get (node);
5465 if (!cinfo || !cinfo->param_adjustments)
5466 return;
5467
5468 auto_vec<int, 16> new_indices;
5469 cinfo->param_adjustments->get_updated_indices (new_indices: &new_indices);
5470 bool removed_item = false;
5471 unsigned dst_index = 0;
5472 unsigned count = ts->m_agg_values->length ();
5473 for (unsigned i = 0; i < count; i++)
5474 {
5475 ipa_argagg_value *v = &(*ts->m_agg_values)[i];
5476 gcc_checking_assert (v->index >= 0);
5477
5478 int new_idx = -1;
5479 if ((unsigned) v->index < new_indices.length ())
5480 new_idx = new_indices[v->index];
5481
5482 if (new_idx >= 0)
5483 {
5484 v->index = new_idx;
5485 if (removed_item)
5486 (*ts->m_agg_values)[dst_index] = *v;
5487 dst_index++;
5488 }
5489 else
5490 removed_item = true;
5491 }
5492
5493 if (dst_index == 0)
5494 {
5495 ggc_free (ts->m_agg_values);
5496 ts->m_agg_values = NULL;
5497 }
5498 else if (removed_item)
5499 ts->m_agg_values->truncate (size: dst_index);
5500
5501 return;
5502}
5503
5504/* Dominator walker driving the ipcp modification phase. */
5505
5506class ipcp_modif_dom_walker : public dom_walker
5507{
5508public:
5509 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5510 vec<ipa_param_descriptor, va_gc> *descs,
5511 ipcp_transformation *ts, bool *sc)
5512 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5513 m_ts (ts), m_something_changed (sc) {}
5514
5515 edge before_dom_children (basic_block) final override;
5516 bool cleanup_eh ()
5517 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
5518
5519private:
5520 struct ipa_func_body_info *m_fbi;
5521 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5522 ipcp_transformation *m_ts;
5523 bool *m_something_changed;
5524 auto_bitmap m_need_eh_cleanup;
5525};
5526
5527edge
5528ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5529{
5530 gimple_stmt_iterator gsi;
5531 for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
5532 {
5533 gimple *stmt = gsi_stmt (i: gsi);
5534 tree rhs, val, t;
5535 HOST_WIDE_INT bit_offset;
5536 poly_int64 size;
5537 int index;
5538 bool by_ref, vce;
5539
5540 if (!gimple_assign_load_p (stmt))
5541 continue;
5542 rhs = gimple_assign_rhs1 (gs: stmt);
5543 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5544 continue;
5545
5546 vce = false;
5547 t = rhs;
5548 while (handled_component_p (t))
5549 {
5550 /* V_C_E can do things like convert an array of integers to one
5551 bigger integer and similar things we do not handle below. */
5552 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5553 {
5554 vce = true;
5555 break;
5556 }
5557 t = TREE_OPERAND (t, 0);
5558 }
5559 if (vce)
5560 continue;
5561
5562 if (!ipa_load_from_parm_agg (fbi: m_fbi, descriptors: m_descriptors, stmt, op: rhs, index_p: &index,
5563 offset_p: &bit_offset, size_p: &size, by_ref_p: &by_ref))
5564 continue;
5565 unsigned unit_offset = bit_offset / BITS_PER_UNIT;
5566 ipa_argagg_value_list avl (m_ts);
5567 tree v = avl.get_value (index, unit_offset, by_ref);
5568
5569 if (!v
5570 || maybe_ne (a: tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), b: size))
5571 continue;
5572
5573 gcc_checking_assert (is_gimple_ip_invariant (v));
5574 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v)))
5575 {
5576 if (fold_convertible_p (TREE_TYPE (rhs), v))
5577 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v);
5578 else if (TYPE_SIZE (TREE_TYPE (rhs))
5579 == TYPE_SIZE (TREE_TYPE (v)))
5580 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v);
5581 else
5582 {
5583 if (dump_file)
5584 {
5585 fprintf (stream: dump_file, format: " const ");
5586 print_generic_expr (dump_file, v);
5587 fprintf (stream: dump_file, format: " can't be converted to type of ");
5588 print_generic_expr (dump_file, rhs);
5589 fprintf (stream: dump_file, format: "\n");
5590 }
5591 continue;
5592 }
5593 }
5594 else
5595 val = v;
5596
5597 if (dump_file && (dump_flags & TDF_DETAILS))
5598 {
5599 fprintf (stream: dump_file, format: "Modifying stmt:\n ");
5600 print_gimple_stmt (dump_file, stmt, 0);
5601 }
5602 gimple_assign_set_rhs_from_tree (&gsi, val);
5603 update_stmt (s: stmt);
5604
5605 if (dump_file && (dump_flags & TDF_DETAILS))
5606 {
5607 fprintf (stream: dump_file, format: "into:\n ");
5608 print_gimple_stmt (dump_file, stmt, 0);
5609 fprintf (stream: dump_file, format: "\n");
5610 }
5611
5612 *m_something_changed = true;
5613 if (maybe_clean_eh_stmt (stmt))
5614 bitmap_set_bit (m_need_eh_cleanup, bb->index);
5615 }
5616 return NULL;
5617}
5618
5619/* If IPA-CP discovered a constant in parameter PARM at OFFSET of a given SIZE
5620 - whether passed by reference or not is given by BY_REF - return that
5621 constant. Otherwise return NULL_TREE. The is supposed to be used only
5622 after clone materialization and transformation is done (because it asserts
5623 that killed constants have been pruned). */
5624
5625tree
5626ipcp_get_aggregate_const (struct function *func, tree parm, bool by_ref,
5627 HOST_WIDE_INT bit_offset, HOST_WIDE_INT bit_size)
5628{
5629 cgraph_node *node = cgraph_node::get (decl: func->decl);
5630 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5631
5632 if (!ts || !ts->m_agg_values)
5633 return NULL_TREE;
5634
5635 int index = ts->get_param_index (fndecl: func->decl, param: parm);
5636 if (index < 0)
5637 return NULL_TREE;
5638
5639 ipa_argagg_value_list avl (ts);
5640 unsigned unit_offset = bit_offset / BITS_PER_UNIT;
5641 const ipa_argagg_value *av = avl.get_elt (index, unit_offset);
5642 if (!av || av->by_ref != by_ref)
5643 return NULL_TREE;
5644 gcc_assert (!av->killed);
5645 tree v = av->value;
5646 if (!v
5647 || maybe_ne (a: tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))), b: bit_size))
5648 return NULL_TREE;
5649
5650 return v;
5651}
5652
5653/* Return true if we have recorded VALUE and MASK about PARM.
5654 Set VALUE and MASk accordingly. */
5655
5656bool
5657ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5658{
5659 cgraph_node *cnode = cgraph_node::get (decl: current_function_decl);
5660 ipcp_transformation *ts = ipcp_get_transformation_summary (node: cnode);
5661 if (!ts
5662 || vec_safe_length (v: ts->m_vr) == 0
5663 || !irange::supports_p (TREE_TYPE (parm)))
5664 return false;
5665
5666 int i = ts->get_param_index (fndecl: current_function_decl, param: parm);
5667 if (i < 0)
5668 return false;
5669 clone_info *cinfo = clone_info::get (node: cnode);
5670 if (cinfo && cinfo->param_adjustments)
5671 {
5672 i = cinfo->param_adjustments->get_original_index (newidx: i);
5673 if (i < 0)
5674 return false;
5675 }
5676
5677 vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5678 if (!vr[i].known_p ())
5679 return false;
5680 Value_Range tmp;
5681 vr[i].get_vrange (r&: tmp);
5682 if (tmp.undefined_p () || tmp.varying_p ())
5683 return false;
5684 irange &r = as_a <irange> (v&: tmp);
5685 irange_bitmask bm = r.get_bitmask ();
5686 *mask = widest_int::from (x: bm.mask (), TYPE_SIGN (TREE_TYPE (parm)));
5687 *value = wide_int_to_tree (TREE_TYPE (parm), cst: bm.value ());
5688 return true;
5689}
5690
5691/* Update value range of formal parameters of NODE as described in TS. */
5692
5693static void
5694ipcp_update_vr (struct cgraph_node *node, ipcp_transformation *ts)
5695{
5696 if (vec_safe_is_empty (v: ts->m_vr))
5697 return;
5698 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5699 unsigned count = vr.length ();
5700 if (!count)
5701 return;
5702
5703 auto_vec<int, 16> new_indices;
5704 bool need_remapping = false;
5705 clone_info *cinfo = clone_info::get (node);
5706 if (cinfo && cinfo->param_adjustments)
5707 {
5708 cinfo->param_adjustments->get_updated_indices (new_indices: &new_indices);
5709 need_remapping = true;
5710 }
5711 auto_vec <tree, 16> parm_decls;
5712 push_function_arg_decls (args: &parm_decls, fndecl: node->decl);
5713
5714 for (unsigned i = 0; i < count; ++i)
5715 {
5716 tree parm;
5717 int remapped_idx;
5718 if (need_remapping)
5719 {
5720 if (i >= new_indices.length ())
5721 continue;
5722 remapped_idx = new_indices[i];
5723 if (remapped_idx < 0)
5724 continue;
5725 }
5726 else
5727 remapped_idx = i;
5728
5729 parm = parm_decls[remapped_idx];
5730
5731 gcc_checking_assert (parm);
5732 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5733
5734 if (!ddef || !is_gimple_reg (parm))
5735 continue;
5736
5737 if (vr[i].known_p ())
5738 {
5739 Value_Range tmp;
5740 vr[i].get_vrange (r&: tmp);
5741
5742 if (!tmp.undefined_p () && !tmp.varying_p ())
5743 {
5744 if (dump_file)
5745 {
5746 fprintf (stream: dump_file, format: "Setting value range of param %u "
5747 "(now %i) ", i, remapped_idx);
5748 tmp.dump (dump_file);
5749 fprintf (stream: dump_file, format: "]\n");
5750 }
5751 set_range_info (ddef, tmp);
5752
5753 if (POINTER_TYPE_P (TREE_TYPE (parm))
5754 && opt_for_fn (node->decl, flag_ipa_bit_cp))
5755 {
5756 irange &r = as_a<irange> (v&: tmp);
5757 irange_bitmask bm = r.get_bitmask ();
5758 unsigned tem = bm.mask ().to_uhwi ();
5759 unsigned HOST_WIDE_INT bitpos = bm.value ().to_uhwi ();
5760 unsigned align = tem & -tem;
5761 unsigned misalign = bitpos & (align - 1);
5762
5763 if (align > 1)
5764 {
5765 if (dump_file)
5766 {
5767 fprintf (stream: dump_file,
5768 format: "Adjusting mask for param %u to ", i);
5769 print_hex (wi: bm.mask (), file: dump_file);
5770 fprintf (stream: dump_file, format: "\n");
5771 }
5772
5773 if (dump_file)
5774 fprintf (stream: dump_file,
5775 format: "Adjusting align: %u, misalign: %u\n",
5776 align, misalign);
5777
5778 unsigned old_align, old_misalign;
5779 struct ptr_info_def *pi = get_ptr_info (ddef);
5780 bool old_known = get_ptr_info_alignment (pi, &old_align,
5781 &old_misalign);
5782
5783 if (old_known && old_align > align)
5784 {
5785 if (dump_file)
5786 {
5787 fprintf (stream: dump_file,
5788 format: "But alignment was already %u.\n",
5789 old_align);
5790 if ((old_misalign & (align - 1)) != misalign)
5791 fprintf (stream: dump_file,
5792 format: "old_misalign (%u) and misalign "
5793 "(%u) mismatch\n",
5794 old_misalign, misalign);
5795 }
5796 continue;
5797 }
5798
5799 if (dump_file
5800 && old_known
5801 && ((misalign & (old_align - 1)) != old_misalign))
5802 fprintf (stream: dump_file,
5803 format: "old_misalign (%u) and misalign (%u) "
5804 "mismatch\n",
5805 old_misalign, misalign);
5806
5807 set_ptr_info_alignment (pi, align, misalign);
5808 }
5809 }
5810 else if (dump_file && INTEGRAL_TYPE_P (TREE_TYPE (parm)))
5811 {
5812 irange &r = as_a<irange> (v&: tmp);
5813 irange_bitmask bm = r.get_bitmask ();
5814 unsigned prec = TYPE_PRECISION (TREE_TYPE (parm));
5815 if (wi::ne_p (x: bm.mask (), y: wi::shwi (val: -1, precision: prec)))
5816 {
5817 fprintf (stream: dump_file,
5818 format: "Adjusting mask for param %u to ", i);
5819 print_hex (wi: bm.mask (), file: dump_file);
5820 fprintf (stream: dump_file, format: "\n");
5821 }
5822 }
5823 }
5824 }
5825 }
5826}
5827
5828/* IPCP transformation phase doing propagation of aggregate values. */
5829
5830unsigned int
5831ipcp_transform_function (struct cgraph_node *node)
5832{
5833 struct ipa_func_body_info fbi;
5834 int param_count;
5835
5836 gcc_checking_assert (cfun);
5837 gcc_checking_assert (current_function_decl);
5838
5839 if (dump_file)
5840 fprintf (stream: dump_file, format: "Modification phase of node %s\n",
5841 node->dump_name ());
5842
5843 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5844 if (!ts
5845 || (vec_safe_is_empty (v: ts->m_agg_values)
5846 && vec_safe_is_empty (v: ts->m_vr)))
5847 return 0;
5848
5849 ts->maybe_create_parm_idx_map (cfun->decl);
5850 ipcp_update_vr (node, ts);
5851 if (vec_safe_is_empty (v: ts->m_agg_values))
5852 return 0;
5853 param_count = count_formal_params (fndecl: node->decl);
5854 if (param_count == 0)
5855 return 0;
5856
5857 adjust_agg_replacement_values (node, ts);
5858 if (vec_safe_is_empty (v: ts->m_agg_values))
5859 {
5860 if (dump_file)
5861 fprintf (stream: dump_file, format: " All affected aggregate parameters were either "
5862 "removed or converted into scalars, phase done.\n");
5863 return 0;
5864 }
5865 if (dump_file)
5866 {
5867 fprintf (stream: dump_file, format: " Aggregate replacements:");
5868 ipa_argagg_value_list avs (ts);
5869 avs.dump (f: dump_file);
5870 }
5871
5872 fbi.node = node;
5873 fbi.info = NULL;
5874 fbi.bb_infos = vNULL;
5875 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), exact: true);
5876 fbi.param_count = param_count;
5877 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5878
5879 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5880 vec_safe_grow_cleared (v&: descriptors, len: param_count, exact: true);
5881 ipa_populate_param_decls (node, descriptors&: *descriptors);
5882 bool modified_mem_access = false;
5883 calculate_dominance_info (CDI_DOMINATORS);
5884 ipcp_modif_dom_walker walker (&fbi, descriptors, ts, &modified_mem_access);
5885 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5886 free_dominance_info (CDI_DOMINATORS);
5887 bool cfg_changed = walker.cleanup_eh ();
5888
5889 int i;
5890 struct ipa_bb_info *bi;
5891 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5892 free_ipa_bb_info (bi);
5893 fbi.bb_infos.release ();
5894
5895 ts->remove_argaggs_if (predicate: [](const ipa_argagg_value &v)
5896 {
5897 return v.killed;
5898 });
5899
5900 vec_free (v&: descriptors);
5901 if (cfg_changed)
5902 delete_unreachable_blocks_update_callgraph (dst_node: node, update_clones: false);
5903
5904 return modified_mem_access ? TODO_update_ssa_only_virtuals : 0;
5905}
5906
5907
5908#include "gt-ipa-prop.h"
5909

source code of gcc/ipa-prop.cc