1/* Tail call optimization on trees.
2 Copyright (C) 2003-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "rtl.h"
25#include "tree.h"
26#include "gimple.h"
27#include "cfghooks.h"
28#include "tree-pass.h"
29#include "ssa.h"
30#include "cgraph.h"
31#include "gimple-pretty-print.h"
32#include "fold-const.h"
33#include "stor-layout.h"
34#include "gimple-iterator.h"
35#include "gimplify-me.h"
36#include "tree-cfg.h"
37#include "tree-into-ssa.h"
38#include "tree-dfa.h"
39#include "except.h"
40#include "tree-eh.h"
41#include "dbgcnt.h"
42#include "cfgloop.h"
43#include "common/common-target.h"
44#include "ipa-utils.h"
45#include "tree-ssa-live.h"
46
47/* The file implements the tail recursion elimination. It is also used to
48 analyze the tail calls in general, passing the results to the rtl level
49 where they are used for sibcall optimization.
50
51 In addition to the standard tail recursion elimination, we handle the most
52 trivial cases of making the call tail recursive by creating accumulators.
53 For example the following function
54
55 int sum (int n)
56 {
57 if (n > 0)
58 return n + sum (n - 1);
59 else
60 return 0;
61 }
62
63 is transformed into
64
65 int sum (int n)
66 {
67 int acc = 0;
68
69 while (n > 0)
70 acc += n--;
71
72 return acc;
73 }
74
75 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
76 when we reach the return x statement, we should return a_acc + x * m_acc
77 instead. They are initially initialized to 0 and 1, respectively,
78 so the semantics of the function is obviously preserved. If we are
79 guaranteed that the value of the accumulator never change, we
80 omit the accumulator.
81
82 There are three cases how the function may exit. The first one is
83 handled in adjust_return_value, the other two in adjust_accumulator_values
84 (the second case is actually a special case of the third one and we
85 present it separately just for clarity):
86
87 1) Just return x, where x is not in any of the remaining special shapes.
88 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
89
90 2) return f (...), where f is the current function, is rewritten in a
91 classical tail-recursion elimination way, into assignment of arguments
92 and jump to the start of the function. Values of the accumulators
93 are unchanged.
94
95 3) return a + m * f(...), where a and m do not depend on call to f.
96 To preserve the semantics described before we want this to be rewritten
97 in such a way that we finally return
98
99 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
100
101 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
102 eliminate the tail call to f. Special cases when the value is just
103 added or just multiplied are obtained by setting a = 0 or m = 1.
104
105 TODO -- it is possible to do similar tricks for other operations. */
106
107/* A structure that describes the tailcall. */
108
109struct tailcall
110{
111 /* The iterator pointing to the call statement. */
112 gimple_stmt_iterator call_gsi;
113
114 /* True if it is a call to the current function. */
115 bool tail_recursion;
116
117 /* The return value of the caller is mult * f + add, where f is the return
118 value of the call. */
119 tree mult, add;
120
121 /* Next tailcall in the chain. */
122 struct tailcall *next;
123};
124
125/* The variables holding the value of multiplicative and additive
126 accumulator. */
127static tree m_acc, a_acc;
128
129/* Bitmap with a bit for each function parameter which is set to true if we
130 have to copy the parameter for conversion of tail-recursive calls. */
131
132static bitmap tailr_arg_needs_copy;
133
134/* Returns false when the function is not suitable for tail call optimization
135 from some reason (e.g. if it takes variable number of arguments). */
136
137static bool
138suitable_for_tail_opt_p (void)
139{
140 if (cfun->stdarg)
141 return false;
142
143 return true;
144}
145
146/* Returns false when the function is not suitable for tail call optimization
147 for some reason (e.g. if it takes variable number of arguments).
148 This test must pass in addition to suitable_for_tail_opt_p in order to make
149 tail call discovery happen. */
150
151static bool
152suitable_for_tail_call_opt_p (void)
153{
154 tree param;
155
156 /* alloca (until we have stack slot life analysis) inhibits
157 sibling call optimizations, but not tail recursion. */
158 if (cfun->calls_alloca)
159 return false;
160
161 /* If we are using sjlj exceptions, we may need to add a call to
162 _Unwind_SjLj_Unregister at exit of the function. Which means
163 that we cannot do any sibcall transformations. */
164 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
165 && current_function_has_exception_handlers ())
166 return false;
167
168 /* Any function that calls setjmp might have longjmp called from
169 any called function. ??? We really should represent this
170 properly in the CFG so that this needn't be special cased. */
171 if (cfun->calls_setjmp)
172 return false;
173
174 /* Various targets don't handle tail calls correctly in functions
175 that call __builtin_eh_return. */
176 if (cfun->calls_eh_return)
177 return false;
178
179 /* ??? It is OK if the argument of a function is taken in some cases,
180 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
181 for (param = DECL_ARGUMENTS (current_function_decl);
182 param;
183 param = DECL_CHAIN (param))
184 if (TREE_ADDRESSABLE (param))
185 return false;
186
187 return true;
188}
189
190/* Checks whether the expression EXPR in stmt AT is independent of the
191 statement pointed to by GSI (in a sense that we already know EXPR's value
192 at GSI). We use the fact that we are only called from the chain of
193 basic blocks that have only single successor. Returns the expression
194 containing the value of EXPR at GSI. */
195
196static tree
197independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi,
198 bitmap to_move)
199{
200 basic_block bb, call_bb, at_bb;
201 edge e;
202 edge_iterator ei;
203
204 if (is_gimple_min_invariant (expr))
205 return expr;
206
207 if (TREE_CODE (expr) != SSA_NAME)
208 return NULL_TREE;
209
210 if (bitmap_bit_p (to_move, SSA_NAME_VERSION (expr)))
211 return expr;
212
213 /* Mark the blocks in the chain leading to the end. */
214 at_bb = gimple_bb (g: at);
215 call_bb = gimple_bb (g: gsi_stmt (i: gsi));
216 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
217 bb->aux = &bb->aux;
218 bb->aux = &bb->aux;
219
220 while (1)
221 {
222 at = SSA_NAME_DEF_STMT (expr);
223 bb = gimple_bb (g: at);
224
225 /* The default definition or defined before the chain. */
226 if (!bb || !bb->aux)
227 break;
228
229 if (bb == call_bb)
230 {
231 for (; !gsi_end_p (i: gsi); gsi_next (i: &gsi))
232 if (gsi_stmt (i: gsi) == at)
233 break;
234
235 if (!gsi_end_p (i: gsi))
236 expr = NULL_TREE;
237 break;
238 }
239
240 if (gimple_code (g: at) != GIMPLE_PHI)
241 {
242 expr = NULL_TREE;
243 break;
244 }
245
246 FOR_EACH_EDGE (e, ei, bb->preds)
247 if (e->src->aux)
248 break;
249 gcc_assert (e);
250
251 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
252 if (TREE_CODE (expr) != SSA_NAME)
253 {
254 /* The value is a constant. */
255 break;
256 }
257 }
258
259 /* Unmark the blocks. */
260 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
261 bb->aux = NULL;
262 bb->aux = NULL;
263
264 return expr;
265}
266
267enum par { FAIL, OK, TRY_MOVE };
268
269/* Simulates the effect of an assignment STMT on the return value of the tail
270 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
271 additive factor for the real return value. */
272
273static par
274process_assignment (gassign *stmt,
275 gimple_stmt_iterator call, tree *m,
276 tree *a, tree *ass_var, bitmap to_move)
277{
278 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
279 tree dest = gimple_assign_lhs (gs: stmt);
280 enum tree_code code = gimple_assign_rhs_code (gs: stmt);
281 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
282 tree src_var = gimple_assign_rhs1 (gs: stmt);
283
284 /* See if this is a simple copy operation of an SSA name to the function
285 result. In that case we may have a simple tail call. Ignore type
286 conversions that can never produce extra code between the function
287 call and the function return. */
288 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (s: stmt))
289 && src_var == *ass_var)
290 {
291 /* Reject a tailcall if the type conversion might need
292 additional code. */
293 if (gimple_assign_cast_p (s: stmt))
294 {
295 if (TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
296 return FAIL;
297
298 /* Even if the type modes are the same, if the precision of the
299 type is smaller than mode's precision,
300 reduce_to_bit_field_precision would generate additional code. */
301 if (INTEGRAL_TYPE_P (TREE_TYPE (dest))
302 && !type_has_mode_precision_p (TREE_TYPE (dest)))
303 return FAIL;
304 }
305
306 *ass_var = dest;
307 return OK;
308 }
309
310 switch (rhs_class)
311 {
312 case GIMPLE_BINARY_RHS:
313 op1 = gimple_assign_rhs2 (gs: stmt);
314
315 /* Fall through. */
316
317 case GIMPLE_UNARY_RHS:
318 op0 = gimple_assign_rhs1 (gs: stmt);
319 break;
320
321 default:
322 return FAIL;
323 }
324
325 /* Accumulator optimizations will reverse the order of operations.
326 We can only do that for floating-point types if we're assuming
327 that addition and multiplication are associative. */
328 if (!flag_associative_math)
329 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
330 return FAIL;
331
332 if (rhs_class == GIMPLE_UNARY_RHS
333 && op0 == *ass_var)
334 ;
335 else if (op0 == *ass_var
336 && (non_ass_var = independent_of_stmt_p (expr: op1, at: stmt, gsi: call,
337 to_move)))
338 ;
339 else if (*ass_var
340 && op1 == *ass_var
341 && (non_ass_var = independent_of_stmt_p (expr: op0, at: stmt, gsi: call,
342 to_move)))
343 ;
344 else
345 return TRY_MOVE;
346
347 switch (code)
348 {
349 case PLUS_EXPR:
350 *a = non_ass_var;
351 *ass_var = dest;
352 return OK;
353
354 case POINTER_PLUS_EXPR:
355 if (op0 != *ass_var)
356 return FAIL;
357 *a = non_ass_var;
358 *ass_var = dest;
359 return OK;
360
361 case MULT_EXPR:
362 *m = non_ass_var;
363 *ass_var = dest;
364 return OK;
365
366 case NEGATE_EXPR:
367 *m = build_minus_one_cst (TREE_TYPE (op0));
368 *ass_var = dest;
369 return OK;
370
371 case MINUS_EXPR:
372 if (*ass_var == op0)
373 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
374 else
375 {
376 *m = build_minus_one_cst (TREE_TYPE (non_ass_var));
377 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
378 }
379
380 *ass_var = dest;
381 return OK;
382
383 default:
384 return FAIL;
385 }
386}
387
388/* Propagate VAR through phis on edge E. */
389
390static tree
391propagate_through_phis (tree var, edge e)
392{
393 basic_block dest = e->dest;
394 gphi_iterator gsi;
395
396 for (gsi = gsi_start_phis (dest); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
397 {
398 gphi *phi = gsi.phi ();
399 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
400 return PHI_RESULT (phi);
401 }
402 return var;
403}
404
405/* Argument for compute_live_vars/live_vars_at_stmt and what compute_live_vars
406 returns. Computed lazily, but just once for the function. */
407static live_vars_map *live_vars;
408static vec<bitmap_head> live_vars_vec;
409
410/* Finds tailcalls falling into basic block BB. The list of found tailcalls is
411 added to the start of RET. */
412
413static void
414find_tail_calls (basic_block bb, struct tailcall **ret)
415{
416 tree ass_var = NULL_TREE, ret_var, func, param;
417 gimple *stmt;
418 gcall *call = NULL;
419 gimple_stmt_iterator gsi, agsi;
420 bool tail_recursion;
421 struct tailcall *nw;
422 edge e;
423 tree m, a;
424 basic_block abb;
425 size_t idx;
426 tree var;
427
428 if (!single_succ_p (bb))
429 return;
430
431 for (gsi = gsi_last_bb (bb); !gsi_end_p (i: gsi); gsi_prev (i: &gsi))
432 {
433 stmt = gsi_stmt (i: gsi);
434
435 /* Ignore labels, returns, nops, clobbers and debug stmts. */
436 if (gimple_code (g: stmt) == GIMPLE_LABEL
437 || gimple_code (g: stmt) == GIMPLE_RETURN
438 || gimple_code (g: stmt) == GIMPLE_NOP
439 || gimple_code (g: stmt) == GIMPLE_PREDICT
440 || gimple_clobber_p (s: stmt)
441 || is_gimple_debug (gs: stmt))
442 continue;
443
444 /* Check for a call. */
445 if (is_gimple_call (gs: stmt))
446 {
447 call = as_a <gcall *> (p: stmt);
448 ass_var = gimple_call_lhs (gs: call);
449 break;
450 }
451
452 /* Allow simple copies between local variables, even if they're
453 aggregates. */
454 if (is_gimple_assign (gs: stmt)
455 && auto_var_in_fn_p (gimple_assign_lhs (gs: stmt), cfun->decl)
456 && auto_var_in_fn_p (gimple_assign_rhs1 (gs: stmt), cfun->decl))
457 continue;
458
459 /* If the statement references memory or volatile operands, fail. */
460 if (gimple_references_memory_p (stmt)
461 || gimple_has_volatile_ops (stmt))
462 return;
463 }
464
465 if (gsi_end_p (i: gsi))
466 {
467 edge_iterator ei;
468 /* Recurse to the predecessors. */
469 FOR_EACH_EDGE (e, ei, bb->preds)
470 find_tail_calls (bb: e->src, ret);
471
472 return;
473 }
474
475 /* If the LHS of our call is not just a simple register or local
476 variable, we can't transform this into a tail or sibling call.
477 This situation happens, in (e.g.) "*p = foo()" where foo returns a
478 struct. In this case we won't have a temporary here, but we need
479 to carry out the side effect anyway, so tailcall is impossible.
480
481 ??? In some situations (when the struct is returned in memory via
482 invisible argument) we could deal with this, e.g. by passing 'p'
483 itself as that argument to foo, but it's too early to do this here,
484 and expand_call() will not handle it anyway. If it ever can, then
485 we need to revisit this here, to allow that situation. */
486 if (ass_var
487 && !is_gimple_reg (ass_var)
488 && !auto_var_in_fn_p (ass_var, cfun->decl))
489 return;
490
491 /* If the call might throw an exception that wouldn't propagate out of
492 cfun, we can't transform to a tail or sibling call (82081). */
493 if (stmt_could_throw_p (cfun, stmt)
494 && !stmt_can_throw_external (cfun, stmt))
495 return;
496
497 /* If the function returns a value, then at present, the tail call
498 must return the same type of value. There is conceptually a copy
499 between the object returned by the tail call candidate and the
500 object returned by CFUN itself.
501
502 This means that if we have:
503
504 lhs = f (&<retval>); // f reads from <retval>
505 // (lhs is usually also <retval>)
506
507 there is a copy between the temporary object returned by f and lhs,
508 meaning that any use of <retval> in f occurs before the assignment
509 to lhs begins. Thus the <retval> that is live on entry to the call
510 to f is really an independent local variable V that happens to be
511 stored in the RESULT_DECL rather than a local VAR_DECL.
512
513 Turning this into a tail call would remove the copy and make the
514 lifetimes of the return value and V overlap. The same applies to
515 tail recursion, since if f can read from <retval>, we have to assume
516 that CFUN might already have written to <retval> before the call.
517
518 The problem doesn't apply when <retval> is passed by value, but that
519 isn't a case we handle anyway. */
520 tree result_decl = DECL_RESULT (cfun->decl);
521 if (result_decl
522 && may_be_aliased (var: result_decl)
523 && ref_maybe_used_by_stmt_p (call, result_decl, false))
524 return;
525
526 /* We found the call, check whether it is suitable. */
527 tail_recursion = false;
528 func = gimple_call_fndecl (gs: call);
529 if (func
530 && !fndecl_built_in_p (node: func)
531 && recursive_call_p (current_function_decl, func))
532 {
533 tree arg;
534
535 for (param = DECL_ARGUMENTS (current_function_decl), idx = 0;
536 param && idx < gimple_call_num_args (gs: call);
537 param = DECL_CHAIN (param), idx ++)
538 {
539 arg = gimple_call_arg (gs: call, index: idx);
540 if (param != arg)
541 {
542 /* Make sure there are no problems with copying. The parameter
543 have a copyable type and the two arguments must have reasonably
544 equivalent types. The latter requirement could be relaxed if
545 we emitted a suitable type conversion statement. */
546 if (!is_gimple_reg_type (TREE_TYPE (param))
547 || !useless_type_conversion_p (TREE_TYPE (param),
548 TREE_TYPE (arg)))
549 break;
550
551 /* The parameter should be a real operand, so that phi node
552 created for it at the start of the function has the meaning
553 of copying the value. This test implies is_gimple_reg_type
554 from the previous condition, however this one could be
555 relaxed by being more careful with copying the new value
556 of the parameter (emitting appropriate GIMPLE_ASSIGN and
557 updating the virtual operands). */
558 if (!is_gimple_reg (param))
559 break;
560 }
561 }
562 if (idx == gimple_call_num_args (gs: call) && !param)
563 tail_recursion = true;
564 }
565
566 /* Compute live vars if not computed yet. */
567 if (live_vars == NULL)
568 {
569 unsigned int cnt = 0;
570 FOR_EACH_LOCAL_DECL (cfun, idx, var)
571 if (VAR_P (var)
572 && auto_var_in_fn_p (var, cfun->decl)
573 && may_be_aliased (var))
574 {
575 if (live_vars == NULL)
576 live_vars = new live_vars_map;
577 live_vars->put (DECL_UID (var), v: cnt++);
578 }
579 if (live_vars)
580 live_vars_vec = compute_live_vars (cfun, live_vars);
581 }
582
583 /* Determine a bitmap of variables which are still in scope after the
584 call. */
585 bitmap local_live_vars = NULL;
586 if (live_vars)
587 local_live_vars = live_vars_at_stmt (live_vars_vec, live_vars, call);
588
589 /* Make sure the tail invocation of this function does not indirectly
590 refer to local variables. (Passing variables directly by value
591 is OK.) */
592 FOR_EACH_LOCAL_DECL (cfun, idx, var)
593 {
594 if (TREE_CODE (var) != PARM_DECL
595 && auto_var_in_fn_p (var, cfun->decl)
596 && may_be_aliased (var)
597 && (ref_maybe_used_by_stmt_p (call, var, false)
598 || call_may_clobber_ref_p (call, var, false)))
599 {
600 if (!VAR_P (var))
601 {
602 if (local_live_vars)
603 BITMAP_FREE (local_live_vars);
604 return;
605 }
606 else
607 {
608 unsigned int *v = live_vars->get (DECL_UID (var));
609 if (bitmap_bit_p (local_live_vars, *v))
610 {
611 BITMAP_FREE (local_live_vars);
612 return;
613 }
614 }
615 }
616 }
617
618 if (local_live_vars)
619 BITMAP_FREE (local_live_vars);
620
621 /* Now check the statements after the call. None of them has virtual
622 operands, so they may only depend on the call through its return
623 value. The return value should also be dependent on each of them,
624 since we are running after dce. */
625 m = NULL_TREE;
626 a = NULL_TREE;
627 auto_bitmap to_move_defs;
628 auto_vec<gimple *> to_move_stmts;
629
630 abb = bb;
631 agsi = gsi;
632 while (1)
633 {
634 tree tmp_a = NULL_TREE;
635 tree tmp_m = NULL_TREE;
636 gsi_next (i: &agsi);
637
638 while (gsi_end_p (i: agsi))
639 {
640 ass_var = propagate_through_phis (var: ass_var, e: single_succ_edge (bb: abb));
641 abb = single_succ (bb: abb);
642 agsi = gsi_start_bb (bb: abb);
643 }
644
645 stmt = gsi_stmt (i: agsi);
646 if (gimple_code (g: stmt) == GIMPLE_RETURN)
647 break;
648
649 if (gimple_code (g: stmt) == GIMPLE_LABEL
650 || gimple_code (g: stmt) == GIMPLE_NOP
651 || gimple_code (g: stmt) == GIMPLE_PREDICT
652 || gimple_clobber_p (s: stmt)
653 || is_gimple_debug (gs: stmt))
654 continue;
655
656 if (gimple_code (g: stmt) != GIMPLE_ASSIGN)
657 return;
658
659 /* This is a gimple assign. */
660 par ret = process_assignment (stmt: as_a <gassign *> (p: stmt), call: gsi,
661 m: &tmp_m, a: &tmp_a, ass_var: &ass_var, to_move: to_move_defs);
662 if (ret == FAIL)
663 return;
664 else if (ret == TRY_MOVE)
665 {
666 if (! tail_recursion)
667 return;
668 /* Do not deal with checking dominance, the real fix is to
669 do path isolation for the transform phase anyway, removing
670 the need to compute the accumulators with new stmts. */
671 if (abb != bb)
672 return;
673 for (unsigned opno = 1; opno < gimple_num_ops (gs: stmt); ++opno)
674 {
675 tree op = gimple_op (gs: stmt, i: opno);
676 if (independent_of_stmt_p (expr: op, at: stmt, gsi, to_move: to_move_defs) != op)
677 return;
678 }
679 bitmap_set_bit (to_move_defs,
680 SSA_NAME_VERSION (gimple_assign_lhs (stmt)));
681 to_move_stmts.safe_push (obj: stmt);
682 continue;
683 }
684
685 if (tmp_a)
686 {
687 tree type = TREE_TYPE (tmp_a);
688 if (a)
689 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
690 else
691 a = tmp_a;
692 }
693 if (tmp_m)
694 {
695 tree type = TREE_TYPE (tmp_m);
696 if (m)
697 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
698 else
699 m = tmp_m;
700
701 if (a)
702 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
703 }
704 }
705
706 /* See if this is a tail call we can handle. */
707 ret_var = gimple_return_retval (gs: as_a <greturn *> (p: stmt));
708
709 /* We may proceed if there either is no return value, or the return value
710 is identical to the call's return or if the return decl is an empty type
711 variable and the call's return was not assigned. */
712 if (ret_var
713 && (ret_var != ass_var
714 && !(is_empty_type (TREE_TYPE (ret_var)) && !ass_var)))
715 return;
716
717 /* If this is not a tail recursive call, we cannot handle addends or
718 multiplicands. */
719 if (!tail_recursion && (m || a))
720 return;
721
722 /* For pointers only allow additions. */
723 if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
724 return;
725
726 /* Move queued defs. */
727 if (tail_recursion)
728 {
729 unsigned i;
730 FOR_EACH_VEC_ELT (to_move_stmts, i, stmt)
731 {
732 gimple_stmt_iterator mgsi = gsi_for_stmt (stmt);
733 gsi_move_before (&mgsi, &gsi);
734 }
735 if (!tailr_arg_needs_copy)
736 tailr_arg_needs_copy = BITMAP_ALLOC (NULL);
737 for (param = DECL_ARGUMENTS (current_function_decl), idx = 0;
738 param;
739 param = DECL_CHAIN (param), idx++)
740 {
741 tree ddef, arg = gimple_call_arg (gs: call, index: idx);
742 if (is_gimple_reg (param)
743 && (ddef = ssa_default_def (cfun, param))
744 && (arg != ddef))
745 bitmap_set_bit (tailr_arg_needs_copy, idx);
746 }
747 }
748
749 nw = XNEW (struct tailcall);
750
751 nw->call_gsi = gsi;
752
753 nw->tail_recursion = tail_recursion;
754
755 nw->mult = m;
756 nw->add = a;
757
758 nw->next = *ret;
759 *ret = nw;
760}
761
762/* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
763
764static void
765add_successor_phi_arg (edge e, tree var, tree phi_arg)
766{
767 gphi_iterator gsi;
768
769 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
770 if (PHI_RESULT (gsi.phi ()) == var)
771 break;
772
773 gcc_assert (!gsi_end_p (gsi));
774 add_phi_arg (gsi.phi (), phi_arg, e, UNKNOWN_LOCATION);
775}
776
777/* Creates a GIMPLE statement which computes the operation specified by
778 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
779 statement in the position specified by GSI. Returns the
780 tree node of the statement's result. */
781
782static tree
783adjust_return_value_with_ops (enum tree_code code, const char *label,
784 tree acc, tree op1, gimple_stmt_iterator gsi)
785{
786
787 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
788 tree result = make_temp_ssa_name (type: ret_type, NULL, name: label);
789 gassign *stmt;
790
791 if (POINTER_TYPE_P (ret_type))
792 {
793 gcc_assert (code == PLUS_EXPR && TREE_TYPE (acc) == sizetype);
794 code = POINTER_PLUS_EXPR;
795 }
796 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))
797 && code != POINTER_PLUS_EXPR)
798 stmt = gimple_build_assign (result, code, acc, op1);
799 else
800 {
801 tree tem;
802 if (code == POINTER_PLUS_EXPR)
803 tem = fold_build2 (code, TREE_TYPE (op1), op1, acc);
804 else
805 tem = fold_build2 (code, TREE_TYPE (op1),
806 fold_convert (TREE_TYPE (op1), acc), op1);
807 tree rhs = fold_convert (ret_type, tem);
808 rhs = force_gimple_operand_gsi (&gsi, rhs,
809 false, NULL, true, GSI_SAME_STMT);
810 stmt = gimple_build_assign (result, rhs);
811 }
812
813 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
814 return result;
815}
816
817/* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
818 the computation specified by CODE and OP1 and insert the statement
819 at the position specified by GSI as a new statement. Returns new SSA name
820 of updated accumulator. */
821
822static tree
823update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
824 gimple_stmt_iterator gsi)
825{
826 gassign *stmt;
827 tree var = copy_ssa_name (var: acc);
828 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
829 stmt = gimple_build_assign (var, code, acc, op1);
830 else
831 {
832 tree rhs = fold_convert (TREE_TYPE (acc),
833 fold_build2 (code,
834 TREE_TYPE (op1),
835 fold_convert (TREE_TYPE (op1), acc),
836 op1));
837 rhs = force_gimple_operand_gsi (&gsi, rhs,
838 false, NULL, false, GSI_CONTINUE_LINKING);
839 stmt = gimple_build_assign (var, rhs);
840 }
841 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
842 return var;
843}
844
845/* Adjust the accumulator values according to A and M after GSI, and update
846 the phi nodes on edge BACK. */
847
848static void
849adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
850{
851 tree var, a_acc_arg, m_acc_arg;
852
853 if (m)
854 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
855 if (a)
856 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
857
858 a_acc_arg = a_acc;
859 m_acc_arg = m_acc;
860 if (a)
861 {
862 if (m_acc)
863 {
864 if (integer_onep (a))
865 var = m_acc;
866 else
867 var = adjust_return_value_with_ops (code: MULT_EXPR, label: "acc_tmp", acc: m_acc,
868 op1: a, gsi);
869 }
870 else
871 var = a;
872
873 a_acc_arg = update_accumulator_with_ops (code: PLUS_EXPR, acc: a_acc, op1: var, gsi);
874 }
875
876 if (m)
877 m_acc_arg = update_accumulator_with_ops (code: MULT_EXPR, acc: m_acc, op1: m, gsi);
878
879 if (a_acc)
880 add_successor_phi_arg (e: back, var: a_acc, phi_arg: a_acc_arg);
881
882 if (m_acc)
883 add_successor_phi_arg (e: back, var: m_acc, phi_arg: m_acc_arg);
884}
885
886/* Adjust value of the return at the end of BB according to M and A
887 accumulators. */
888
889static void
890adjust_return_value (basic_block bb, tree m, tree a)
891{
892 tree retval;
893 greturn *ret_stmt = as_a <greturn *> (p: gimple_seq_last_stmt (s: bb_seq (bb)));
894 gimple_stmt_iterator gsi = gsi_last_bb (bb);
895
896 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
897
898 retval = gimple_return_retval (gs: ret_stmt);
899 if (!retval || retval == error_mark_node)
900 return;
901
902 if (m)
903 retval = adjust_return_value_with_ops (code: MULT_EXPR, label: "mul_tmp", acc: m_acc, op1: retval,
904 gsi);
905 if (a)
906 retval = adjust_return_value_with_ops (code: PLUS_EXPR, label: "acc_tmp", acc: a_acc, op1: retval,
907 gsi);
908 gimple_return_set_retval (gs: ret_stmt, retval);
909 update_stmt (s: ret_stmt);
910}
911
912/* Subtract COUNT and FREQUENCY from the basic block and it's
913 outgoing edge. */
914static void
915decrease_profile (basic_block bb, profile_count count)
916{
917 bb->count = bb->count - count;
918 if (!single_succ_p (bb))
919 {
920 gcc_assert (!EDGE_COUNT (bb->succs));
921 return;
922 }
923}
924
925/* Eliminates tail call described by T. TMP_VARS is a list of
926 temporary variables used to copy the function arguments.
927 Allocates *NEW_LOOP if not already done and initializes it. */
928
929static void
930eliminate_tail_call (struct tailcall *t, class loop *&new_loop)
931{
932 tree param, rslt;
933 gimple *stmt, *call;
934 tree arg;
935 size_t idx;
936 basic_block bb, first;
937 edge e;
938 gphi *phi;
939 gphi_iterator gpi;
940 gimple_stmt_iterator gsi;
941 gimple *orig_stmt;
942
943 stmt = orig_stmt = gsi_stmt (i: t->call_gsi);
944 bb = gsi_bb (i: t->call_gsi);
945
946 if (dump_file && (dump_flags & TDF_DETAILS))
947 {
948 fprintf (stream: dump_file, format: "Eliminated tail recursion in bb %d : ",
949 bb->index);
950 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
951 fprintf (stream: dump_file, format: "\n");
952 }
953
954 gcc_assert (is_gimple_call (stmt));
955
956 first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
957
958 /* Remove the code after call_gsi that will become unreachable. The
959 possibly unreachable code in other blocks is removed later in
960 cfg cleanup. */
961 gsi = t->call_gsi;
962 gimple_stmt_iterator gsi2 = gsi_last_bb (bb: gimple_bb (g: gsi_stmt (i: gsi)));
963 while (gsi_stmt (i: gsi2) != gsi_stmt (i: gsi))
964 {
965 gimple *t = gsi_stmt (i: gsi2);
966 /* Do not remove the return statement, so that redirect_edge_and_branch
967 sees how the block ends. */
968 if (gimple_code (g: t) != GIMPLE_RETURN)
969 {
970 gimple_stmt_iterator gsi3 = gsi2;
971 gsi_prev (i: &gsi2);
972 gsi_remove (&gsi3, true);
973 release_defs (t);
974 }
975 else
976 gsi_prev (i: &gsi2);
977 }
978
979 /* Number of executions of function has reduced by the tailcall. */
980 e = single_succ_edge (bb: gsi_bb (i: t->call_gsi));
981
982 profile_count count = e->count ();
983
984 /* When profile is inconsistent and the recursion edge is more frequent
985 than number of executions of functions, scale it down, so we do not end
986 up with 0 executions of entry block. */
987 if (count >= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
988 count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num: 7, den: 8);
989 decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), count);
990 decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), count);
991 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
992 decrease_profile (bb: e->dest, count);
993
994 /* Replace the call by a jump to the start of function. */
995 e = redirect_edge_and_branch (single_succ_edge (bb: gsi_bb (i: t->call_gsi)),
996 first);
997 gcc_assert (e);
998 PENDING_STMT (e) = NULL;
999
1000 /* Add the new loop. */
1001 if (!new_loop)
1002 {
1003 new_loop = alloc_loop ();
1004 new_loop->header = first;
1005 new_loop->finite_p = true;
1006 }
1007 else
1008 gcc_assert (new_loop->header == first);
1009
1010 /* Add phi node entries for arguments. The ordering of the phi nodes should
1011 be the same as the ordering of the arguments. */
1012 for (param = DECL_ARGUMENTS (current_function_decl),
1013 idx = 0, gpi = gsi_start_phis (first);
1014 param;
1015 param = DECL_CHAIN (param), idx++)
1016 {
1017 if (!bitmap_bit_p (tailr_arg_needs_copy, idx))
1018 continue;
1019
1020 arg = gimple_call_arg (gs: stmt, index: idx);
1021 phi = gpi.phi ();
1022 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
1023
1024 add_phi_arg (phi, arg, e, gimple_location (g: stmt));
1025 gsi_next (i: &gpi);
1026 }
1027
1028 /* Update the values of accumulators. */
1029 adjust_accumulator_values (gsi: t->call_gsi, m: t->mult, a: t->add, back: e);
1030
1031 call = gsi_stmt (i: t->call_gsi);
1032 rslt = gimple_call_lhs (gs: call);
1033 if (rslt != NULL_TREE && TREE_CODE (rslt) == SSA_NAME)
1034 {
1035 /* Result of the call will no longer be defined. So adjust the
1036 SSA_NAME_DEF_STMT accordingly. */
1037 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
1038 }
1039
1040 gsi_remove (&t->call_gsi, true);
1041 release_defs (call);
1042}
1043
1044/* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
1045 mark the tailcalls for the sibcall optimization. */
1046
1047static bool
1048optimize_tail_call (struct tailcall *t, bool opt_tailcalls,
1049 class loop *&new_loop)
1050{
1051 if (t->tail_recursion)
1052 {
1053 eliminate_tail_call (t, new_loop);
1054 return true;
1055 }
1056
1057 if (opt_tailcalls)
1058 {
1059 gcall *stmt = as_a <gcall *> (p: gsi_stmt (i: t->call_gsi));
1060
1061 gimple_call_set_tail (s: stmt, tail_p: true);
1062 cfun->tail_call_marked = true;
1063 if (dump_file && (dump_flags & TDF_DETAILS))
1064 {
1065 fprintf (stream: dump_file, format: "Found tail call ");
1066 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1067 fprintf (stream: dump_file, format: " in bb %i\n", (gsi_bb (i: t->call_gsi))->index);
1068 }
1069 }
1070
1071 return false;
1072}
1073
1074/* Creates a tail-call accumulator of the same type as the return type of the
1075 current function. LABEL is the name used to creating the temporary
1076 variable for the accumulator. The accumulator will be inserted in the
1077 phis of a basic block BB with single predecessor with an initial value
1078 INIT converted to the current function return type. */
1079
1080static tree
1081create_tailcall_accumulator (const char *label, basic_block bb, tree init)
1082{
1083 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
1084 if (POINTER_TYPE_P (ret_type))
1085 ret_type = sizetype;
1086
1087 tree tmp = make_temp_ssa_name (type: ret_type, NULL, name: label);
1088 gphi *phi;
1089
1090 phi = create_phi_node (tmp, bb);
1091 add_phi_arg (phi, init, single_pred_edge (bb),
1092 UNKNOWN_LOCATION);
1093 return PHI_RESULT (phi);
1094}
1095
1096/* Optimizes tail calls in the function, turning the tail recursion
1097 into iteration. */
1098
1099static unsigned int
1100tree_optimize_tail_calls_1 (bool opt_tailcalls)
1101{
1102 edge e;
1103 bool phis_constructed = false;
1104 struct tailcall *tailcalls = NULL, *act, *next;
1105 bool changed = false;
1106 basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1107 tree param;
1108 edge_iterator ei;
1109
1110 if (!suitable_for_tail_opt_p ())
1111 return 0;
1112 if (opt_tailcalls)
1113 opt_tailcalls = suitable_for_tail_call_opt_p ();
1114
1115 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1116 {
1117 /* Only traverse the normal exits, i.e. those that end with return
1118 statement. */
1119 if (safe_is_a <greturn *> (p: *gsi_last_bb (bb: e->src)))
1120 find_tail_calls (bb: e->src, ret: &tailcalls);
1121 }
1122
1123 if (live_vars)
1124 {
1125 destroy_live_vars (live_vars_vec);
1126 delete live_vars;
1127 live_vars = NULL;
1128 }
1129
1130 /* Construct the phi nodes and accumulators if necessary. */
1131 a_acc = m_acc = NULL_TREE;
1132 for (act = tailcalls; act; act = act->next)
1133 {
1134 if (!act->tail_recursion)
1135 continue;
1136
1137 if (!phis_constructed)
1138 {
1139 /* Ensure that there is only one predecessor of the block
1140 or if there are existing degenerate PHI nodes. */
1141 if (!single_pred_p (bb: first)
1142 || !gimple_seq_empty_p (s: phi_nodes (bb: first)))
1143 first =
1144 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1145
1146 /* Copy the args if needed. */
1147 unsigned idx;
1148 for (param = DECL_ARGUMENTS (current_function_decl), idx = 0;
1149 param;
1150 param = DECL_CHAIN (param), idx++)
1151 if (bitmap_bit_p (tailr_arg_needs_copy, idx))
1152 {
1153 tree name = ssa_default_def (cfun, param);
1154 tree new_name = make_ssa_name (var: param, SSA_NAME_DEF_STMT (name));
1155 gphi *phi;
1156
1157 set_ssa_default_def (cfun, param, new_name);
1158 phi = create_phi_node (name, first);
1159 add_phi_arg (phi, new_name, single_pred_edge (bb: first),
1160 EXPR_LOCATION (param));
1161 }
1162 phis_constructed = true;
1163 }
1164 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
1165 if (POINTER_TYPE_P (ret_type))
1166 ret_type = sizetype;
1167
1168 if (act->add && !a_acc)
1169 a_acc = create_tailcall_accumulator (label: "add_acc", bb: first,
1170 init: build_zero_cst (ret_type));
1171
1172 if (act->mult && !m_acc)
1173 m_acc = create_tailcall_accumulator (label: "mult_acc", bb: first,
1174 init: build_one_cst (ret_type));
1175 }
1176
1177 if (a_acc || m_acc)
1178 {
1179 /* When the tail call elimination using accumulators is performed,
1180 statements adding the accumulated value are inserted at all exits.
1181 This turns all other tail calls to non-tail ones. */
1182 opt_tailcalls = false;
1183 }
1184
1185 class loop *new_loop = NULL;
1186 for (; tailcalls; tailcalls = next)
1187 {
1188 next = tailcalls->next;
1189 changed |= optimize_tail_call (t: tailcalls, opt_tailcalls, new_loop);
1190 free (ptr: tailcalls);
1191 }
1192 if (new_loop)
1193 add_loop (new_loop, loops_for_fn (cfun)->tree_root);
1194
1195 if (a_acc || m_acc)
1196 {
1197 /* Modify the remaining return statements. */
1198 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1199 {
1200 if (safe_is_a <greturn *> (p: *gsi_last_bb (bb: e->src)))
1201 adjust_return_value (bb: e->src, m: m_acc, a: a_acc);
1202 }
1203 }
1204
1205 if (changed)
1206 free_dominance_info (CDI_DOMINATORS);
1207
1208 /* Add phi nodes for the virtual operands defined in the function to the
1209 header of the loop created by tail recursion elimination. Do so
1210 by triggering the SSA renamer. */
1211 if (phis_constructed)
1212 mark_virtual_operands_for_renaming (cfun);
1213
1214 if (tailr_arg_needs_copy)
1215 BITMAP_FREE (tailr_arg_needs_copy);
1216
1217 if (changed)
1218 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1219 return 0;
1220}
1221
1222static bool
1223gate_tail_calls (void)
1224{
1225 return flag_optimize_sibling_calls != 0 && dbg_cnt (index: tail_call);
1226}
1227
1228static unsigned int
1229execute_tail_calls (void)
1230{
1231 return tree_optimize_tail_calls_1 (opt_tailcalls: true);
1232}
1233
1234namespace {
1235
1236const pass_data pass_data_tail_recursion =
1237{
1238 .type: GIMPLE_PASS, /* type */
1239 .name: "tailr", /* name */
1240 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
1241 .tv_id: TV_NONE, /* tv_id */
1242 .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */
1243 .properties_provided: 0, /* properties_provided */
1244 .properties_destroyed: 0, /* properties_destroyed */
1245 .todo_flags_start: 0, /* todo_flags_start */
1246 .todo_flags_finish: 0, /* todo_flags_finish */
1247};
1248
1249class pass_tail_recursion : public gimple_opt_pass
1250{
1251public:
1252 pass_tail_recursion (gcc::context *ctxt)
1253 : gimple_opt_pass (pass_data_tail_recursion, ctxt)
1254 {}
1255
1256 /* opt_pass methods: */
1257 opt_pass * clone () final override
1258 {
1259 return new pass_tail_recursion (m_ctxt);
1260 }
1261 bool gate (function *) final override { return gate_tail_calls (); }
1262 unsigned int execute (function *) final override
1263 {
1264 return tree_optimize_tail_calls_1 (opt_tailcalls: false);
1265 }
1266
1267}; // class pass_tail_recursion
1268
1269} // anon namespace
1270
1271gimple_opt_pass *
1272make_pass_tail_recursion (gcc::context *ctxt)
1273{
1274 return new pass_tail_recursion (ctxt);
1275}
1276
1277namespace {
1278
1279const pass_data pass_data_tail_calls =
1280{
1281 .type: GIMPLE_PASS, /* type */
1282 .name: "tailc", /* name */
1283 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
1284 .tv_id: TV_NONE, /* tv_id */
1285 .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */
1286 .properties_provided: 0, /* properties_provided */
1287 .properties_destroyed: 0, /* properties_destroyed */
1288 .todo_flags_start: 0, /* todo_flags_start */
1289 .todo_flags_finish: 0, /* todo_flags_finish */
1290};
1291
1292class pass_tail_calls : public gimple_opt_pass
1293{
1294public:
1295 pass_tail_calls (gcc::context *ctxt)
1296 : gimple_opt_pass (pass_data_tail_calls, ctxt)
1297 {}
1298
1299 /* opt_pass methods: */
1300 bool gate (function *) final override { return gate_tail_calls (); }
1301 unsigned int execute (function *) final override
1302 {
1303 return execute_tail_calls ();
1304 }
1305
1306}; // class pass_tail_calls
1307
1308} // anon namespace
1309
1310gimple_opt_pass *
1311make_pass_tail_calls (gcc::context *ctxt)
1312{
1313 return new pass_tail_calls (ctxt);
1314}
1315

source code of gcc/tree-tailcall.cc