1/* Support for thunks in symbol table.
2 Copyright (C) 2003-2023 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "tree.h"
26#include "gimple.h"
27#include "predict.h"
28#include "target.h"
29#include "rtl.h"
30#include "alloc-pool.h"
31#include "cgraph.h"
32#include "symbol-summary.h"
33#include "symtab-thunks.h"
34#include "lto-streamer.h"
35#include "fold-const.h"
36#include "gimple-iterator.h"
37#include "stor-layout.h"
38#include "gimplify-me.h"
39#include "varasm.h"
40#include "output.h"
41#include "cfg.h"
42#include "cfghooks.h"
43#include "gimple-ssa.h"
44#include "gimple-fold.h"
45#include "cfgloop.h"
46#include "tree-into-ssa.h"
47#include "tree-cfg.h"
48#include "cfgcleanup.h"
49#include "tree-pass.h"
50#include "data-streamer.h"
51#include "langhooks.h"
52
53/* Used for vtable lookup in thunk adjusting. */
54static GTY (()) tree vtable_entry_type;
55struct GTY (()) unprocessed_thunk
56{
57 cgraph_node *node;
58 thunk_info *info;
59};
60/* To be PCH safe we store thunks into a vector before end of compilation
61 unit. */
62static GTY (()) vec<unprocessed_thunk, va_gc> *thunks;
63
64namespace {
65
66/* Function summary for thunk_infos. */
67class GTY((user)) thunk_infos_t: public function_summary <thunk_info *>
68{
69public:
70 thunk_infos_t (symbol_table *table, bool ggc):
71 function_summary<thunk_info *> (table, ggc) { }
72
73 /* Hook that is called by summary when a node is duplicated. */
74 void duplicate (cgraph_node *node,
75 cgraph_node *node2,
76 thunk_info *data,
77 thunk_info *data2) final override;
78};
79
80/* Duplication hook. */
81void
82thunk_infos_t::duplicate (cgraph_node *, cgraph_node *,
83 thunk_info *src, thunk_info *dst)
84{
85 *dst = *src;
86}
87
88} /* anon namespace */
89
90/* Return thunk_info possibly creating new one. */
91thunk_info *
92thunk_info::get_create (cgraph_node *node)
93{
94 if (!symtab->m_thunks)
95 {
96 symtab->m_thunks
97 = new (ggc_alloc_no_dtor <thunk_infos_t> ())
98 thunk_infos_t (symtab, true);
99 symtab->m_thunks->disable_insertion_hook ();
100 }
101 return symtab->m_thunks->get_create (node);
102}
103
104/* Stream out THIS to OB. */
105void
106thunk_info::stream_out (lto_simple_output_block *ob)
107{
108 streamer_write_uhwi_stream
109 (ob->main_stream,
110 1 + (this_adjusting != 0) * 2
111 + (virtual_offset_p != 0) * 4);
112 streamer_write_uhwi_stream (ob->main_stream, fixed_offset);
113 streamer_write_uhwi_stream (ob->main_stream, virtual_value);
114 streamer_write_uhwi_stream (ob->main_stream, indirect_offset);
115}
116
117/* Stream in THIS from IB. */
118void
119thunk_info::stream_in (class lto_input_block *ib)
120{
121 int type = streamer_read_uhwi (ib);
122 fixed_offset = streamer_read_uhwi (ib);
123 virtual_value = streamer_read_uhwi (ib);
124 indirect_offset = streamer_read_uhwi (ib);
125
126 this_adjusting = (type & 2);
127 virtual_offset_p = (type & 4);
128}
129
130/* Dump THIS to F. */
131void
132thunk_info::dump (FILE *f)
133{
134 if (alias)
135 fprintf (stream: f, format: " of %s (asm:%s)",
136 lang_hooks.decl_printable_name (alias, 2),
137 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (alias)));
138 fprintf (stream: f, format: " fixed offset %i virtual value %i indirect_offset %i "
139 "has virtual offset %i\n",
140 (int)fixed_offset,
141 (int)virtual_value,
142 (int)indirect_offset,
143 (int)virtual_offset_p);
144}
145
146/* Hash THIS. */
147hashval_t
148thunk_info::hash ()
149{
150 inchash::hash hstate;
151 hstate.add_hwi (v: fixed_offset);
152 hstate.add_hwi (v: virtual_value);
153 hstate.add_flag (flag: this_adjusting);
154 hstate.add_flag (flag: virtual_offset_p);
155 return hstate.end ();
156}
157
158/* Add unprocessed thunk. */
159void
160thunk_info::register_early (cgraph_node *node)
161{
162 unprocessed_thunk entry = {.node: node, .info: new (ggc_alloc <thunk_info> ()) thunk_info};
163 *entry.info = *this;
164 vec_safe_push (v&: thunks, obj: entry);
165}
166
167/* Attach recorded thunks to cgraph_nodes.
168 All this is done only to avoid need to stream summaries to PCH. */
169void
170thunk_info::process_early_thunks ()
171{
172 unprocessed_thunk *e;
173 unsigned int i;
174 if (!thunks)
175 return;
176
177 FOR_EACH_VEC_ELT (*thunks, i, e)
178 {
179 *thunk_info::get_create (node: e->node) = *e->info;
180 }
181 vec_free (v&: thunks);
182 thunks = NULL;
183}
184
185/* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
186 VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
187 it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
188 for a result adjusting thunk. */
189tree
190thunk_adjust (gimple_stmt_iterator * bsi,
191 tree ptr, bool this_adjusting,
192 HOST_WIDE_INT fixed_offset, tree virtual_offset,
193 HOST_WIDE_INT indirect_offset)
194{
195 gassign *stmt;
196 tree ret;
197
198 if (this_adjusting
199 && fixed_offset != 0)
200 {
201 stmt = gimple_build_assign
202 (ptr, fold_build_pointer_plus_hwi_loc (loc: input_location,
203 ptr,
204 off: fixed_offset));
205 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
206 }
207
208 if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
209 {
210 tree vfunc_type = make_node (FUNCTION_TYPE);
211 TREE_TYPE (vfunc_type) = integer_type_node;
212 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
213 layout_type (vfunc_type);
214
215 vtable_entry_type = build_pointer_type (vfunc_type);
216 }
217
218 /* If there's a virtual offset, look up that value in the vtable and
219 adjust the pointer again. */
220 if (virtual_offset)
221 {
222 tree vtabletmp;
223 tree vtabletmp2;
224 tree vtabletmp3;
225
226 vtabletmp = create_tmp_reg
227 (build_pointer_type
228 (build_pointer_type (vtable_entry_type)), "vptr");
229
230 /* The vptr is always at offset zero in the object. */
231 stmt = gimple_build_assign (vtabletmp,
232 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
233 ptr));
234 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
235
236 /* Form the vtable address. */
237 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
238 "vtableaddr");
239 stmt = gimple_build_assign (vtabletmp2,
240 build_simple_mem_ref (vtabletmp));
241 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
242
243 /* Find the entry with the vcall offset. */
244 stmt = gimple_build_assign (vtabletmp2,
245 fold_build_pointer_plus_loc (loc: input_location,
246 ptr: vtabletmp2,
247 off: virtual_offset));
248 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
249
250 /* Get the offset itself. */
251 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
252 "vcalloffset");
253 stmt = gimple_build_assign (vtabletmp3,
254 build_simple_mem_ref (vtabletmp2));
255 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
256
257 /* Adjust the `this' pointer. */
258 ptr = fold_build_pointer_plus_loc (loc: input_location, ptr, off: vtabletmp3);
259 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
260 GSI_CONTINUE_LINKING);
261 }
262
263 /* Likewise for an offset that is stored in the object that contains the
264 vtable. */
265 if (indirect_offset != 0)
266 {
267 tree offset_ptr, offset_tree;
268
269 /* Get the address of the offset. */
270 offset_ptr
271 = create_tmp_reg (build_pointer_type
272 (build_pointer_type (vtable_entry_type)),
273 "offset_ptr");
274 stmt = gimple_build_assign (offset_ptr,
275 build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
276 ptr));
277 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
278
279 stmt = gimple_build_assign
280 (offset_ptr,
281 fold_build_pointer_plus_hwi_loc (loc: input_location, ptr: offset_ptr,
282 off: indirect_offset));
283 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
284
285 /* Get the offset itself. */
286 offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
287 "offset");
288 stmt = gimple_build_assign (offset_tree,
289 build_simple_mem_ref (offset_ptr));
290 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
291
292 /* Adjust the `this' pointer. */
293 ptr = fold_build_pointer_plus_loc (loc: input_location, ptr, off: offset_tree);
294 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
295 GSI_CONTINUE_LINKING);
296 }
297
298 if (!this_adjusting
299 && fixed_offset != 0)
300 /* Adjust the pointer by the constant. */
301 {
302 tree ptrtmp;
303
304 if (VAR_P (ptr))
305 ptrtmp = ptr;
306 else
307 {
308 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
309 stmt = gimple_build_assign (ptrtmp, ptr);
310 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
311 }
312 ptr = fold_build_pointer_plus_hwi_loc (loc: input_location,
313 ptr: ptrtmp, off: fixed_offset);
314 }
315
316 /* Emit the statement and gimplify the adjustment expression. */
317 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
318 stmt = gimple_build_assign (ret, ptr);
319 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
320
321 return ret;
322}
323
324/* Expand thunk NODE to gimple if possible.
325 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
326 no assembler is produced.
327 When OUTPUT_ASM_THUNK is true, also produce assembler for
328 thunks that are not lowered. */
329bool
330expand_thunk (cgraph_node *node, bool output_asm_thunks,
331 bool force_gimple_thunk)
332{
333 thunk_info *info = thunk_info::get (node);
334 bool this_adjusting = info->this_adjusting;
335 HOST_WIDE_INT fixed_offset = info->fixed_offset;
336 HOST_WIDE_INT virtual_value = info->virtual_value;
337 HOST_WIDE_INT indirect_offset = info->indirect_offset;
338 tree virtual_offset = NULL;
339 tree alias = node->callees->callee->decl;
340 tree thunk_fndecl = node->decl;
341 tree a;
342
343 if (!force_gimple_thunk
344 && this_adjusting
345 && indirect_offset == 0
346 && !DECL_EXTERNAL (alias)
347 && !DECL_STATIC_CHAIN (alias)
348 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
349 virtual_value, alias))
350 {
351 tree fn_block;
352 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
353
354 if (!output_asm_thunks)
355 {
356 node->analyzed = true;
357 return false;
358 }
359
360 if (in_lto_p)
361 node->get_untransformed_body ();
362 a = DECL_ARGUMENTS (thunk_fndecl);
363
364 current_function_decl = thunk_fndecl;
365
366 /* Ensure thunks are emitted in their correct sections. */
367 resolve_unique_section (thunk_fndecl, 0,
368 flag_function_sections);
369
370 DECL_RESULT (thunk_fndecl)
371 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
372 RESULT_DECL, 0, restype);
373 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
374
375 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
376 create one. */
377 fn_block = make_node (BLOCK);
378 BLOCK_VARS (fn_block) = a;
379 DECL_INITIAL (thunk_fndecl) = fn_block;
380 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
381 allocate_struct_function (thunk_fndecl, false);
382 init_function_start (thunk_fndecl);
383 cfun->is_thunk = 1;
384 insn_locations_init ();
385 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
386 prologue_location = curr_insn_location ();
387
388 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
389 fixed_offset, virtual_value, alias);
390
391 insn_locations_finalize ();
392 init_insn_lengths ();
393 free_after_compilation (cfun);
394 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
395 node->thunk = false;
396 node->analyzed = false;
397 }
398 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
399 {
400 error ("generic thunk code fails for method %qD which uses %<...%>",
401 thunk_fndecl);
402 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
403 node->analyzed = true;
404 return false;
405 }
406 else
407 {
408 tree restype;
409 basic_block bb, then_bb, else_bb, return_bb;
410 gimple_stmt_iterator bsi;
411 int nargs = 0;
412 tree arg;
413 int i;
414 tree resdecl;
415 tree restmp = NULL;
416
417 gcall *call;
418 greturn *ret;
419 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
420
421 /* We may be called from expand_thunk that releases body except for
422 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
423 if (in_lto_p && !force_gimple_thunk)
424 node->get_untransformed_body ();
425
426 /* We need to force DECL_IGNORED_P when the thunk is created
427 after early debug was run. */
428 if (force_gimple_thunk)
429 DECL_IGNORED_P (thunk_fndecl) = 1;
430
431 a = DECL_ARGUMENTS (thunk_fndecl);
432
433 current_function_decl = thunk_fndecl;
434
435 /* Ensure thunks are emitted in their correct sections. */
436 resolve_unique_section (thunk_fndecl, 0,
437 flag_function_sections);
438
439 bitmap_obstack_initialize (NULL);
440
441 if (info->virtual_offset_p)
442 virtual_offset = size_int (virtual_value);
443
444 /* Build the return declaration for the function. */
445 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
446 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
447 {
448 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
449 DECL_ARTIFICIAL (resdecl) = 1;
450 DECL_IGNORED_P (resdecl) = 1;
451 DECL_CONTEXT (resdecl) = thunk_fndecl;
452 DECL_RESULT (thunk_fndecl) = resdecl;
453 }
454 else
455 resdecl = DECL_RESULT (thunk_fndecl);
456
457 profile_count cfg_count = node->count;
458 if (!cfg_count.initialized_p ())
459 cfg_count = profile_count::from_gcov_type
460 (BB_FREQ_MAX).guessed_local ();
461
462 bb = then_bb = else_bb = return_bb
463 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
464
465 bsi = gsi_start_bb (bb);
466
467 /* Build call to the function being thunked. */
468 if (!VOID_TYPE_P (restype)
469 && (!alias_is_noreturn
470 || TREE_ADDRESSABLE (restype)
471 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
472 {
473 if (DECL_BY_REFERENCE (resdecl))
474 {
475 restmp = gimple_fold_indirect_ref (resdecl);
476 if (!restmp)
477 restmp = build2 (MEM_REF,
478 TREE_TYPE (TREE_TYPE (resdecl)),
479 resdecl,
480 build_int_cst (TREE_TYPE (resdecl), 0));
481 }
482 else if (!is_gimple_reg_type (type: restype))
483 {
484 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
485 {
486 restmp = resdecl;
487
488 if (VAR_P (restmp))
489 {
490 add_local_decl (cfun, d: restmp);
491 BLOCK_VARS (DECL_INITIAL (current_function_decl))
492 = restmp;
493 }
494 }
495 else
496 restmp = create_tmp_var (restype, "retval");
497 }
498 else
499 restmp = create_tmp_reg (restype, "retval");
500 }
501
502 for (arg = a; arg; arg = DECL_CHAIN (arg))
503 nargs++;
504 auto_vec<tree> vargs (nargs);
505 i = 0;
506 arg = a;
507 if (this_adjusting)
508 {
509 vargs.quick_push (obj: thunk_adjust (bsi: &bsi, ptr: a, this_adjusting: 1, fixed_offset,
510 virtual_offset, indirect_offset));
511 arg = DECL_CHAIN (a);
512 i = 1;
513 }
514
515 if (nargs)
516 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
517 {
518 tree tmp = arg;
519 DECL_NOT_GIMPLE_REG_P (arg) = 0;
520 if (!is_gimple_val (arg))
521 {
522 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
523 (TREE_TYPE (arg)), "arg");
524 gimple *stmt = gimple_build_assign (tmp, arg);
525 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
526 }
527 vargs.quick_push (obj: tmp);
528 }
529 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
530 node->callees->call_stmt = call;
531 gimple_call_set_from_thunk (s: call, from_thunk_p: true);
532 if (DECL_STATIC_CHAIN (alias))
533 {
534 tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
535 tree type = TREE_TYPE (p);
536 tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
537 PARM_DECL, create_tmp_var_name ("CHAIN"),
538 type);
539 DECL_ARTIFICIAL (decl) = 1;
540 DECL_IGNORED_P (decl) = 1;
541 TREE_USED (decl) = 1;
542 DECL_CONTEXT (decl) = thunk_fndecl;
543 DECL_ARG_TYPE (decl) = type;
544 TREE_READONLY (decl) = 1;
545
546 struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
547 sf->static_chain_decl = decl;
548
549 gimple_call_set_chain (call_stmt: call, chain: decl);
550 }
551
552 /* Return slot optimization is always possible and in fact required to
553 return values with DECL_BY_REFERENCE. */
554 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
555 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
556 || DECL_BY_REFERENCE (resdecl)))
557 gimple_call_set_return_slot_opt (s: call, return_slot_opt_p: true);
558
559 if (restmp)
560 {
561 gimple_call_set_lhs (gs: call, lhs: restmp);
562 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
563 TREE_TYPE (TREE_TYPE (alias))));
564 }
565 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
566 if (!alias_is_noreturn)
567 {
568 if (restmp && !this_adjusting
569 && (fixed_offset || virtual_offset))
570 {
571 tree true_label = NULL_TREE;
572
573 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
574 {
575 gimple *stmt;
576 edge e;
577 /* If the return type is a pointer, we need to
578 protect against NULL. We know there will be an
579 adjustment, because that's why we're emitting a
580 thunk. */
581 then_bb = create_basic_block (NULL, bb);
582 then_bb->count = cfg_count - cfg_count / 16;
583 return_bb = create_basic_block (NULL, then_bb);
584 return_bb->count = cfg_count;
585 else_bb = create_basic_block (NULL, else_bb);
586 else_bb->count = cfg_count / 16;
587 add_bb_to_loop (then_bb, bb->loop_father);
588 add_bb_to_loop (return_bb, bb->loop_father);
589 add_bb_to_loop (else_bb, bb->loop_father);
590 remove_edge (single_succ_edge (bb));
591 true_label = gimple_block_label (then_bb);
592 stmt = gimple_build_cond (NE_EXPR, restmp,
593 build_zero_cst (TREE_TYPE (restmp)),
594 NULL_TREE, NULL_TREE);
595 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
596 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
597 e->probability = profile_probability::guessed_always () / 16;
598 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
599 e->probability = profile_probability::guessed_always () / 16;
600 make_single_succ_edge (return_bb,
601 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
602 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
603 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
604 e->probability = profile_probability::always ();
605 bsi = gsi_last_bb (bb: then_bb);
606 }
607
608 restmp = thunk_adjust (bsi: &bsi, ptr: restmp, /*this_adjusting=*/0,
609 fixed_offset, virtual_offset,
610 indirect_offset);
611 if (true_label)
612 {
613 gimple *stmt;
614 bsi = gsi_last_bb (bb: else_bb);
615 stmt = gimple_build_assign (restmp,
616 build_zero_cst
617 (TREE_TYPE (restmp)));
618 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
619 bsi = gsi_last_bb (bb: return_bb);
620 }
621 }
622 else
623 {
624 gimple_call_set_tail (s: call, tail_p: true);
625 cfun->tail_call_marked = true;
626 }
627
628 /* Build return value. */
629 if (!DECL_BY_REFERENCE (resdecl))
630 ret = gimple_build_return (restmp);
631 else
632 ret = gimple_build_return (resdecl);
633
634 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
635 }
636 else
637 {
638 gimple_call_set_ctrl_altering (s: call, ctrl_altering_p: true);
639 gimple_call_set_tail (s: call, tail_p: true);
640 cfun->tail_call_marked = true;
641 remove_edge (single_succ_edge (bb));
642 }
643
644 cfun->gimple_df->in_ssa_p = true;
645 update_max_bb_count ();
646 profile_status_for_fn (cfun)
647 = cfg_count.initialized_p () && cfg_count.ipa_p ()
648 ? PROFILE_READ : PROFILE_GUESSED;
649 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
650 TREE_ASM_WRITTEN (thunk_fndecl) = false;
651 cfun->cfg->full_profile = true;
652 delete_unreachable_blocks ();
653 update_ssa (TODO_update_ssa);
654 checking_verify_flow_info ();
655 free_dominance_info (CDI_DOMINATORS);
656
657 /* Since we want to emit the thunk, we explicitly mark its name as
658 referenced. */
659 node->thunk = false;
660 node->lowered = true;
661 bitmap_obstack_release (NULL);
662 }
663 current_function_decl = NULL;
664 set_cfun (NULL);
665 return true;
666}
667
668void
669symtab_thunks_cc_finalize (void)
670{
671 vtable_entry_type = NULL;
672}
673
674#include "gt-symtab-thunks.h"
675

source code of gcc/symtab-thunks.cc