1/* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "target.h"
25#include "rtl.h"
26#include "tree.h"
27#include "gimple.h"
28#include "predict.h"
29#include "memmodel.h"
30#include "tm_p.h"
31#include "stringpool.h"
32#include "expmed.h"
33#include "optabs.h"
34#include "emit-rtl.h"
35#include "cgraph.h"
36#include "diagnostic-core.h"
37#include "fold-const.h"
38#include "stor-layout.h"
39#include "varasm.h"
40#include "internal-fn.h"
41#include "dojump.h"
42#include "explow.h"
43#include "calls.h"
44#include "expr.h"
45#include "output.h"
46#include "langhooks.h"
47#include "except.h"
48#include "dbgcnt.h"
49#include "rtl-iter.h"
50#include "tree-vrp.h"
51#include "tree-ssanames.h"
52#include "intl.h"
53#include "stringpool.h"
54#include "hash-map.h"
55#include "hash-traits.h"
56#include "attribs.h"
57#include "builtins.h"
58#include "gimple-iterator.h"
59#include "gimple-fold.h"
60#include "attr-fnspec.h"
61#include "value-query.h"
62#include "tree-pretty-print.h"
63#include "tree-eh.h"
64
65/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
66#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
67
68/* Data structure and subroutines used within expand_call. */
69
70struct arg_data
71{
72 /* Tree node for this argument. */
73 tree tree_value;
74 /* Mode for value; TYPE_MODE unless promoted. */
75 machine_mode mode;
76 /* Current RTL value for argument, or 0 if it isn't precomputed. */
77 rtx value;
78 /* Initially-compute RTL value for argument; only for const functions. */
79 rtx initial_value;
80 /* Register to pass this argument in, 0 if passed on stack, or an
81 PARALLEL if the arg is to be copied into multiple non-contiguous
82 registers. */
83 rtx reg;
84 /* Register to pass this argument in when generating tail call sequence.
85 This is not the same register as for normal calls on machines with
86 register windows. */
87 rtx tail_call_reg;
88 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
89 form for emit_group_move. */
90 rtx parallel_value;
91 /* If REG was promoted from the actual mode of the argument expression,
92 indicates whether the promotion is sign- or zero-extended. */
93 int unsignedp;
94 /* Number of bytes to put in registers. 0 means put the whole arg
95 in registers. Also 0 if not passed in registers. */
96 int partial;
97 /* True if argument must be passed on stack.
98 Note that some arguments may be passed on the stack
99 even though pass_on_stack is false, just because FUNCTION_ARG says so.
100 pass_on_stack identifies arguments that *cannot* go in registers. */
101 bool pass_on_stack;
102 /* Some fields packaged up for locate_and_pad_parm. */
103 struct locate_and_pad_arg_data locate;
104 /* Location on the stack at which parameter should be stored. The store
105 has already been done if STACK == VALUE. */
106 rtx stack;
107 /* Location on the stack of the start of this argument slot. This can
108 differ from STACK if this arg pads downward. This location is known
109 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
110 rtx stack_slot;
111 /* Place that this stack area has been saved, if needed. */
112 rtx save_area;
113 /* If an argument's alignment does not permit direct copying into registers,
114 copy in smaller-sized pieces into pseudos. These are stored in a
115 block pointed to by this field. The next field says how many
116 word-sized pseudos we made. */
117 rtx *aligned_regs;
118 int n_aligned_regs;
119};
120
121/* A vector of one char per byte of stack space. A byte if nonzero if
122 the corresponding stack location has been used.
123 This vector is used to prevent a function call within an argument from
124 clobbering any stack already set up. */
125static char *stack_usage_map;
126
127/* Size of STACK_USAGE_MAP. */
128static unsigned int highest_outgoing_arg_in_use;
129
130/* Assume that any stack location at this byte index is used,
131 without checking the contents of stack_usage_map. */
132static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
133
134/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
135 stack location's tail call argument has been already stored into the stack.
136 This bitmap is used to prevent sibling call optimization if function tries
137 to use parent's incoming argument slots when they have been already
138 overwritten with tail call arguments. */
139static sbitmap stored_args_map;
140
141/* Assume that any virtual-incoming location at this byte index has been
142 stored, without checking the contents of stored_args_map. */
143static unsigned HOST_WIDE_INT stored_args_watermark;
144
145/* stack_arg_under_construction is nonzero when an argument may be
146 initialized with a constructor call (including a C function that
147 returns a BLKmode struct) and expand_call must take special action
148 to make sure the object being constructed does not overlap the
149 argument list for the constructor call. */
150static int stack_arg_under_construction;
151
152static void precompute_register_parameters (int, struct arg_data *, int *);
153static bool store_one_arg (struct arg_data *, rtx, int, int, int);
154static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
155static bool finalize_must_preallocate (bool, int, struct arg_data *,
156 struct args_size *);
157static void precompute_arguments (int, struct arg_data *);
158static void compute_argument_addresses (struct arg_data *, rtx, int);
159static rtx rtx_for_function_call (tree, tree);
160static void load_register_parameters (struct arg_data *, int, rtx *, int,
161 int, bool *);
162static int special_function_p (const_tree, int);
163static bool check_sibcall_argument_overlap_1 (rtx);
164static bool check_sibcall_argument_overlap (rtx_insn *, struct arg_data *,
165 bool);
166static tree split_complex_types (tree);
167
168#ifdef REG_PARM_STACK_SPACE
169static rtx save_fixed_argument_area (int, rtx, int *, int *);
170static void restore_fixed_argument_area (rtx, rtx, int, int);
171#endif
172
173/* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
174 stack region might already be in use. */
175
176static bool
177stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
178 unsigned int reg_parm_stack_space)
179{
180 unsigned HOST_WIDE_INT const_lower, const_upper;
181 const_lower = constant_lower_bound (a: lower_bound);
182 if (!upper_bound.is_constant (const_value: &const_upper))
183 const_upper = HOST_WIDE_INT_M1U;
184
185 if (const_upper > stack_usage_watermark)
186 return true;
187
188 /* Don't worry about things in the fixed argument area;
189 it has already been saved. */
190 const_lower = MAX (const_lower, reg_parm_stack_space);
191 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
192 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
193 if (stack_usage_map[i])
194 return true;
195 return false;
196}
197
198/* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
199 stack region are now in use. */
200
201static void
202mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
203{
204 unsigned HOST_WIDE_INT const_lower, const_upper;
205 const_lower = constant_lower_bound (a: lower_bound);
206 if (upper_bound.is_constant (const_value: &const_upper)
207 && const_upper <= highest_outgoing_arg_in_use)
208 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
209 stack_usage_map[i] = 1;
210 else
211 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
212}
213
214/* Force FUNEXP into a form suitable for the address of a CALL,
215 and return that as an rtx. Also load the static chain register
216 if FNDECL is a nested function.
217
218 CALL_FUSAGE points to a variable holding the prospective
219 CALL_INSN_FUNCTION_USAGE information. */
220
221rtx
222prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
223 rtx *call_fusage, int reg_parm_seen, int flags)
224{
225 /* Make a valid memory address and copy constants through pseudo-regs,
226 but not for a constant address if -fno-function-cse. */
227 if (GET_CODE (funexp) != SYMBOL_REF)
228 {
229 /* If it's an indirect call by descriptor, generate code to perform
230 runtime identification of the pointer and load the descriptor. */
231 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
232 {
233 const int bit_val = targetm.calls.custom_function_descriptors;
234 rtx call_lab = gen_label_rtx ();
235
236 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
237 fndecl_or_type
238 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
239 fndecl_or_type);
240 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
241 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
242
243 if (GET_MODE (funexp) != Pmode)
244 funexp = convert_memory_address (Pmode, funexp);
245
246 /* Avoid long live ranges around function calls. */
247 funexp = copy_to_mode_reg (Pmode, funexp);
248
249 if (REG_P (chain))
250 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
251
252 /* Emit the runtime identification pattern. */
253 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
254 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
255 call_lab);
256
257 /* Statically predict the branch to very likely taken. */
258 rtx_insn *insn = get_last_insn ();
259 if (JUMP_P (insn))
260 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
261
262 /* Load the descriptor. */
263 rtx mem = gen_rtx_MEM (ptr_mode,
264 plus_constant (Pmode, funexp, - bit_val));
265 MEM_NOTRAP_P (mem) = 1;
266 mem = convert_memory_address (Pmode, mem);
267 emit_move_insn (chain, mem);
268
269 mem = gen_rtx_MEM (ptr_mode,
270 plus_constant (Pmode, funexp,
271 POINTER_SIZE / BITS_PER_UNIT
272 - bit_val));
273 MEM_NOTRAP_P (mem) = 1;
274 mem = convert_memory_address (Pmode, mem);
275 emit_move_insn (funexp, mem);
276
277 emit_label (call_lab);
278
279 if (REG_P (chain))
280 {
281 use_reg (fusage: call_fusage, reg: chain);
282 STATIC_CHAIN_REG_P (chain) = 1;
283 }
284
285 /* Make sure we're not going to be overwritten below. */
286 gcc_assert (!static_chain_value);
287 }
288
289 /* If we are using registers for parameters, force the
290 function address into a register now. */
291 funexp = ((reg_parm_seen
292 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
293 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
294 : memory_address (FUNCTION_MODE, funexp));
295 }
296 else
297 {
298 /* funexp could be a SYMBOL_REF represents a function pointer which is
299 of ptr_mode. In this case, it should be converted into address mode
300 to be a valid address for memory rtx pattern. See PR 64971. */
301 if (GET_MODE (funexp) != Pmode)
302 funexp = convert_memory_address (Pmode, funexp);
303
304 if (!(flags & ECF_SIBCALL))
305 {
306 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
307 funexp = force_reg (Pmode, funexp);
308 }
309 }
310
311 if (static_chain_value != 0
312 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
313 || DECL_STATIC_CHAIN (fndecl_or_type)))
314 {
315 rtx chain;
316
317 chain = targetm.calls.static_chain (fndecl_or_type, false);
318 static_chain_value = convert_memory_address (Pmode, static_chain_value);
319
320 emit_move_insn (chain, static_chain_value);
321 if (REG_P (chain))
322 {
323 use_reg (fusage: call_fusage, reg: chain);
324 STATIC_CHAIN_REG_P (chain) = 1;
325 }
326 }
327
328 return funexp;
329}
330
331/* Generate instructions to call function FUNEXP,
332 and optionally pop the results.
333 The CALL_INSN is the first insn generated.
334
335 FNDECL is the declaration node of the function. This is given to the
336 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
337 its own args.
338
339 FUNTYPE is the data type of the function. This is given to the hook
340 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
341 own args. We used to allow an identifier for library functions, but
342 that doesn't work when the return type is an aggregate type and the
343 calling convention says that the pointer to this aggregate is to be
344 popped by the callee.
345
346 STACK_SIZE is the number of bytes of arguments on the stack,
347 ROUNDED_STACK_SIZE is that number rounded up to
348 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
349 both to put into the call insn and to generate explicit popping
350 code if necessary.
351
352 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
353 It is zero if this call doesn't want a structure value.
354
355 NEXT_ARG_REG is the rtx that results from executing
356 targetm.calls.function_arg (&args_so_far,
357 function_arg_info::end_marker ());
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
363
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
366
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
370
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function. */
373
374static void
375emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
376 tree funtype ATTRIBUTE_UNUSED,
377 poly_int64 stack_size ATTRIBUTE_UNUSED,
378 poly_int64 rounded_stack_size,
379 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
380 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
381 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
382 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
383{
384 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
385 rtx call, funmem, pat;
386 bool already_popped = false;
387 poly_int64 n_popped = 0;
388
389 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
390 patterns exist). Any popping that the callee does on return will
391 be from our caller's frame rather than ours. */
392 if (!(ecf_flags & ECF_SIBCALL))
393 {
394 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
395
396#ifdef CALL_POPS_ARGS
397 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
398#endif
399 }
400
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
406
407 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
408 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
409 {
410 tree t = fndecl;
411
412 /* Although a built-in FUNCTION_DECL and its non-__builtin
413 counterpart compare equal and get a shared mem_attrs, they
414 produce different dump output in compare-debug compilations,
415 if an entry gets garbage collected in one compilation, then
416 adds a different (but equivalent) entry, while the other
417 doesn't run the garbage collector at the same spot and then
418 shares the mem_attr with the equivalent entry. */
419 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
420 {
421 tree t2 = builtin_decl_explicit (fncode: DECL_FUNCTION_CODE (decl: t));
422 if (t2)
423 t = t2;
424 }
425
426 set_mem_expr (funmem, t);
427 }
428 else if (fntree)
429 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
430
431 if (ecf_flags & ECF_SIBCALL)
432 {
433 if (valreg)
434 pat = targetm.gen_sibcall_value (valreg, funmem,
435 rounded_stack_size_rtx,
436 next_arg_reg, NULL_RTX);
437 else
438 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
439 next_arg_reg,
440 gen_int_mode (struct_value_size, Pmode));
441 }
442 /* If the target has "call" or "call_value" insns, then prefer them
443 if no arguments are actually popped. If the target does not have
444 "call" or "call_value" insns, then we must use the popping versions
445 even if the call has no arguments to pop. */
446 else if (maybe_ne (a: n_popped, b: 0)
447 || !(valreg
448 ? targetm.have_call_value ()
449 : targetm.have_call ()))
450 {
451 rtx n_pop = gen_int_mode (n_popped, Pmode);
452
453 /* If this subroutine pops its own args, record that in the call insn
454 if possible, for the sake of frame pointer elimination. */
455
456 if (valreg)
457 pat = targetm.gen_call_value_pop (valreg, funmem,
458 rounded_stack_size_rtx,
459 next_arg_reg, n_pop);
460 else
461 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
462 next_arg_reg, n_pop);
463
464 already_popped = true;
465 }
466 else
467 {
468 if (valreg)
469 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
470 next_arg_reg, NULL_RTX);
471 else
472 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
473 gen_int_mode (struct_value_size, Pmode));
474 }
475 emit_insn (pat);
476
477 /* Find the call we just emitted. */
478 rtx_call_insn *call_insn = last_call_insn ();
479
480 /* Some target create a fresh MEM instead of reusing the one provided
481 above. Set its MEM_EXPR. */
482 call = get_call_rtx_from (call_insn);
483 if (call
484 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
485 && MEM_EXPR (funmem) != NULL_TREE)
486 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
487
488 /* Put the register usage information there. */
489 add_function_usage_to (call_insn, call_fusage);
490
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (ecf_flags & ECF_CONST)
493 RTL_CONST_CALL_P (call_insn) = 1;
494
495 /* If this is a pure call, then set the insn's unchanging bit. */
496 if (ecf_flags & ECF_PURE)
497 RTL_PURE_CALL_P (call_insn) = 1;
498
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
501 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
502
503 /* Create a nothrow REG_EH_REGION note, if needed. */
504 make_reg_eh_region_note (insn: call_insn, ecf_flags, lp_nr: 0);
505
506 if (ecf_flags & ECF_NORETURN)
507 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
508
509 if (ecf_flags & ECF_RETURNS_TWICE)
510 {
511 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
512 cfun->calls_setjmp = 1;
513 }
514
515 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
516
517 /* Restore this now, so that we do defer pops for this call's args
518 if the context of the call as a whole permits. */
519 inhibit_defer_pop = old_inhibit_defer_pop;
520
521 if (maybe_ne (a: n_popped, b: 0))
522 {
523 if (!already_popped)
524 CALL_INSN_FUNCTION_USAGE (call_insn)
525 = gen_rtx_EXPR_LIST (VOIDmode,
526 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
527 CALL_INSN_FUNCTION_USAGE (call_insn));
528 rounded_stack_size -= n_popped;
529 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
530 stack_pointer_delta -= n_popped;
531
532 add_args_size_note (call_insn, stack_pointer_delta);
533
534 /* If popup is needed, stack realign must use DRAP */
535 if (SUPPORTS_STACK_ALIGNMENT)
536 crtl->need_drap = true;
537 }
538 /* For noreturn calls when not accumulating outgoing args force
539 REG_ARGS_SIZE note to prevent crossjumping of calls with different
540 args sizes. */
541 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
542 add_args_size_note (call_insn, stack_pointer_delta);
543
544 if (!ACCUMULATE_OUTGOING_ARGS)
545 {
546 /* If returning from the subroutine does not automatically pop the args,
547 we need an instruction to pop them sooner or later.
548 Perhaps do it now; perhaps just record how much space to pop later.
549
550 If returning from the subroutine does pop the args, indicate that the
551 stack pointer will be changed. */
552
553 if (maybe_ne (a: rounded_stack_size, b: 0))
554 {
555 if (ecf_flags & ECF_NORETURN)
556 /* Just pretend we did the pop. */
557 stack_pointer_delta -= rounded_stack_size;
558 else if (flag_defer_pop && inhibit_defer_pop == 0
559 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
560 pending_stack_adjust += rounded_stack_size;
561 else
562 adjust_stack (rounded_stack_size_rtx);
563 }
564 }
565 /* When we accumulate outgoing args, we must avoid any stack manipulations.
566 Restore the stack pointer to its original value now. Usually
567 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
568 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
569 popping variants of functions exist as well.
570
571 ??? We may optimize similar to defer_pop above, but it is
572 probably not worthwhile.
573
574 ??? It will be worthwhile to enable combine_stack_adjustments even for
575 such machines. */
576 else if (maybe_ne (a: n_popped, b: 0))
577 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
578}
579
580/* Determine if the function identified by FNDECL is one with
581 special properties we wish to know about. Modify FLAGS accordingly.
582
583 For example, if the function might return more than one time (setjmp), then
584 set ECF_RETURNS_TWICE.
585
586 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
587 space from the stack such as alloca. */
588
589static int
590special_function_p (const_tree fndecl, int flags)
591{
592 tree name_decl = DECL_NAME (fndecl);
593
594 if (maybe_special_function_p (fndecl)
595 && IDENTIFIER_LENGTH (name_decl) <= 11)
596 {
597 const char *name = IDENTIFIER_POINTER (name_decl);
598 const char *tname = name;
599
600 /* We assume that alloca will always be called by name. It
601 makes no sense to pass it as a pointer-to-function to
602 anything that does not understand its behavior. */
603 if (IDENTIFIER_LENGTH (name_decl) == 6
604 && name[0] == 'a'
605 && ! strcmp (s1: name, s2: "alloca"))
606 flags |= ECF_MAY_BE_ALLOCA;
607
608 /* Disregard prefix _ or __. */
609 if (name[0] == '_')
610 {
611 if (name[1] == '_')
612 tname += 2;
613 else
614 tname += 1;
615 }
616
617 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
618 if (! strcmp (s1: tname, s2: "setjmp")
619 || ! strcmp (s1: tname, s2: "sigsetjmp")
620 || ! strcmp (s1: name, s2: "savectx")
621 || ! strcmp (s1: name, s2: "vfork")
622 || ! strcmp (s1: name, s2: "getcontext"))
623 flags |= ECF_RETURNS_TWICE;
624 }
625
626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
627 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
628 flags |= ECF_MAY_BE_ALLOCA;
629
630 return flags;
631}
632
633/* Return fnspec for DECL. */
634
635static attr_fnspec
636decl_fnspec (tree fndecl)
637{
638 tree attr;
639 tree type = TREE_TYPE (fndecl);
640 if (type)
641 {
642 attr = lookup_attribute (attr_name: "fn spec", TYPE_ATTRIBUTES (type));
643 if (attr)
644 {
645 return TREE_VALUE (TREE_VALUE (attr));
646 }
647 }
648 if (fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL))
649 return builtin_fnspec (fndecl);
650 return "";
651}
652
653/* Similar to special_function_p; return a set of ERF_ flags for the
654 function FNDECL. */
655static int
656decl_return_flags (tree fndecl)
657{
658 attr_fnspec fnspec = decl_fnspec (fndecl);
659
660 unsigned int arg;
661 if (fnspec.returns_arg (arg_no: &arg))
662 return ERF_RETURNS_ARG | arg;
663
664 if (fnspec.returns_noalias_p ())
665 return ERF_NOALIAS;
666 return 0;
667}
668
669/* Return true when FNDECL represents a call to setjmp. */
670
671bool
672setjmp_call_p (const_tree fndecl)
673{
674 if (DECL_IS_RETURNS_TWICE (fndecl))
675 return true;
676 if (special_function_p (fndecl, flags: 0) & ECF_RETURNS_TWICE)
677 return true;
678
679 return false;
680}
681
682
683/* Return true if STMT may be an alloca call. */
684
685bool
686gimple_maybe_alloca_call_p (const gimple *stmt)
687{
688 tree fndecl;
689
690 if (!is_gimple_call (gs: stmt))
691 return false;
692
693 fndecl = gimple_call_fndecl (gs: stmt);
694 if (fndecl && (special_function_p (fndecl, flags: 0) & ECF_MAY_BE_ALLOCA))
695 return true;
696
697 return false;
698}
699
700/* Return true if STMT is a builtin alloca call. */
701
702bool
703gimple_alloca_call_p (const gimple *stmt)
704{
705 tree fndecl;
706
707 if (!is_gimple_call (gs: stmt))
708 return false;
709
710 fndecl = gimple_call_fndecl (gs: stmt);
711 if (fndecl && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL))
712 switch (DECL_FUNCTION_CODE (decl: fndecl))
713 {
714 CASE_BUILT_IN_ALLOCA:
715 return gimple_call_num_args (gs: stmt) > 0;
716 default:
717 break;
718 }
719
720 return false;
721}
722
723/* Return true when exp contains a builtin alloca call. */
724
725bool
726alloca_call_p (const_tree exp)
727{
728 tree fndecl;
729 if (TREE_CODE (exp) == CALL_EXPR
730 && (fndecl = get_callee_fndecl (exp))
731 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
732 switch (DECL_FUNCTION_CODE (decl: fndecl))
733 {
734 CASE_BUILT_IN_ALLOCA:
735 return true;
736 default:
737 break;
738 }
739
740 return false;
741}
742
743/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
744 function. Return FALSE otherwise. */
745
746static bool
747is_tm_builtin (const_tree fndecl)
748{
749 if (fndecl == NULL)
750 return false;
751
752 if (decl_is_tm_clone (fndecl))
753 return true;
754
755 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
756 {
757 switch (DECL_FUNCTION_CODE (decl: fndecl))
758 {
759 case BUILT_IN_TM_COMMIT:
760 case BUILT_IN_TM_COMMIT_EH:
761 case BUILT_IN_TM_ABORT:
762 case BUILT_IN_TM_IRREVOCABLE:
763 case BUILT_IN_TM_GETTMCLONE_IRR:
764 case BUILT_IN_TM_MEMCPY:
765 case BUILT_IN_TM_MEMMOVE:
766 case BUILT_IN_TM_MEMSET:
767 CASE_BUILT_IN_TM_STORE (1):
768 CASE_BUILT_IN_TM_STORE (2):
769 CASE_BUILT_IN_TM_STORE (4):
770 CASE_BUILT_IN_TM_STORE (8):
771 CASE_BUILT_IN_TM_STORE (FLOAT):
772 CASE_BUILT_IN_TM_STORE (DOUBLE):
773 CASE_BUILT_IN_TM_STORE (LDOUBLE):
774 CASE_BUILT_IN_TM_STORE (M64):
775 CASE_BUILT_IN_TM_STORE (M128):
776 CASE_BUILT_IN_TM_STORE (M256):
777 CASE_BUILT_IN_TM_LOAD (1):
778 CASE_BUILT_IN_TM_LOAD (2):
779 CASE_BUILT_IN_TM_LOAD (4):
780 CASE_BUILT_IN_TM_LOAD (8):
781 CASE_BUILT_IN_TM_LOAD (FLOAT):
782 CASE_BUILT_IN_TM_LOAD (DOUBLE):
783 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
784 CASE_BUILT_IN_TM_LOAD (M64):
785 CASE_BUILT_IN_TM_LOAD (M128):
786 CASE_BUILT_IN_TM_LOAD (M256):
787 case BUILT_IN_TM_LOG:
788 case BUILT_IN_TM_LOG_1:
789 case BUILT_IN_TM_LOG_2:
790 case BUILT_IN_TM_LOG_4:
791 case BUILT_IN_TM_LOG_8:
792 case BUILT_IN_TM_LOG_FLOAT:
793 case BUILT_IN_TM_LOG_DOUBLE:
794 case BUILT_IN_TM_LOG_LDOUBLE:
795 case BUILT_IN_TM_LOG_M64:
796 case BUILT_IN_TM_LOG_M128:
797 case BUILT_IN_TM_LOG_M256:
798 return true;
799 default:
800 break;
801 }
802 }
803 return false;
804}
805
806/* Detect flags (function attributes) from the function decl or type node. */
807
808int
809flags_from_decl_or_type (const_tree exp)
810{
811 int flags = 0;
812
813 if (DECL_P (exp))
814 {
815 /* The function exp may have the `malloc' attribute. */
816 if (DECL_IS_MALLOC (exp))
817 flags |= ECF_MALLOC;
818
819 /* The function exp may have the `returns_twice' attribute. */
820 if (DECL_IS_RETURNS_TWICE (exp))
821 flags |= ECF_RETURNS_TWICE;
822
823 /* Process the pure and const attributes. */
824 if (TREE_READONLY (exp))
825 flags |= ECF_CONST;
826 if (DECL_PURE_P (exp))
827 flags |= ECF_PURE;
828 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
829 flags |= ECF_LOOPING_CONST_OR_PURE;
830
831 if (DECL_IS_NOVOPS (exp))
832 flags |= ECF_NOVOPS;
833 if (lookup_attribute (attr_name: "leaf", DECL_ATTRIBUTES (exp)))
834 flags |= ECF_LEAF;
835 if (lookup_attribute (attr_name: "cold", DECL_ATTRIBUTES (exp)))
836 flags |= ECF_COLD;
837
838 if (TREE_NOTHROW (exp))
839 flags |= ECF_NOTHROW;
840
841 if (flag_tm)
842 {
843 if (is_tm_builtin (fndecl: exp))
844 flags |= ECF_TM_BUILTIN;
845 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
846 || lookup_attribute (attr_name: "transaction_pure",
847 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
848 flags |= ECF_TM_PURE;
849 }
850
851 if (lookup_attribute (attr_name: "expected_throw", DECL_ATTRIBUTES (exp)))
852 flags |= ECF_XTHROW;
853
854 flags = special_function_p (fndecl: exp, flags);
855 }
856 else if (TYPE_P (exp))
857 {
858 if (TYPE_READONLY (exp))
859 flags |= ECF_CONST;
860
861 if (flag_tm
862 && ((flags & ECF_CONST) != 0
863 || lookup_attribute (attr_name: "transaction_pure", TYPE_ATTRIBUTES (exp))))
864 flags |= ECF_TM_PURE;
865 }
866 else
867 gcc_unreachable ();
868
869 if (TREE_THIS_VOLATILE (exp))
870 {
871 flags |= ECF_NORETURN;
872 if (flags & (ECF_CONST|ECF_PURE))
873 flags |= ECF_LOOPING_CONST_OR_PURE;
874 }
875
876 return flags;
877}
878
879/* Detect flags from a CALL_EXPR. */
880
881int
882call_expr_flags (const_tree t)
883{
884 int flags;
885 tree decl = get_callee_fndecl (t);
886
887 if (decl)
888 flags = flags_from_decl_or_type (exp: decl);
889 else if (CALL_EXPR_FN (t) == NULL_TREE)
890 flags = internal_fn_flags (CALL_EXPR_IFN (t));
891 else
892 {
893 tree type = TREE_TYPE (CALL_EXPR_FN (t));
894 if (type && TREE_CODE (type) == POINTER_TYPE)
895 flags = flags_from_decl_or_type (TREE_TYPE (type));
896 else
897 flags = 0;
898 if (CALL_EXPR_BY_DESCRIPTOR (t))
899 flags |= ECF_BY_DESCRIPTOR;
900 }
901
902 return flags;
903}
904
905/* Return true if ARG should be passed by invisible reference. */
906
907bool
908pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
909{
910 if (tree type = arg.type)
911 {
912 /* If this type contains non-trivial constructors, then it is
913 forbidden for the middle-end to create any new copies. */
914 if (TREE_ADDRESSABLE (type))
915 return true;
916
917 /* GCC post 3.4 passes *all* variable sized types by reference. */
918 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
919 return true;
920
921 /* If a record type should be passed the same as its first (and only)
922 member, use the type and mode of that member. */
923 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
924 {
925 arg.type = TREE_TYPE (first_field (type));
926 arg.mode = TYPE_MODE (arg.type);
927 }
928 }
929
930 return targetm.calls.pass_by_reference (pack_cumulative_args (arg: ca), arg);
931}
932
933/* Return true if TYPE should be passed by reference when passed to
934 the "..." arguments of a function. */
935
936bool
937pass_va_arg_by_reference (tree type)
938{
939 return pass_by_reference (NULL, arg: function_arg_info (type, /*named=*/false));
940}
941
942/* Decide whether ARG, which occurs in the state described by CA,
943 should be passed by reference. Return true if so and update
944 ARG accordingly. */
945
946bool
947apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
948{
949 if (pass_by_reference (ca, arg))
950 {
951 arg.type = build_pointer_type (arg.type);
952 arg.mode = TYPE_MODE (arg.type);
953 arg.pass_by_reference = true;
954 return true;
955 }
956 return false;
957}
958
959/* Return true if ARG, which is passed by reference, should be callee
960 copied instead of caller copied. */
961
962bool
963reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
964{
965 if (arg.type && TREE_ADDRESSABLE (arg.type))
966 return false;
967 return targetm.calls.callee_copies (pack_cumulative_args (arg: ca), arg);
968}
969
970
971/* Precompute all register parameters as described by ARGS, storing values
972 into fields within the ARGS array.
973
974 NUM_ACTUALS indicates the total number elements in the ARGS array.
975
976 Set REG_PARM_SEEN if we encounter a register parameter. */
977
978static void
979precompute_register_parameters (int num_actuals, struct arg_data *args,
980 int *reg_parm_seen)
981{
982 int i;
983
984 *reg_parm_seen = 0;
985
986 for (i = 0; i < num_actuals; i++)
987 if (args[i].reg != 0 && ! args[i].pass_on_stack)
988 {
989 *reg_parm_seen = 1;
990
991 if (args[i].value == 0)
992 {
993 push_temp_slots ();
994 args[i].value = expand_normal (exp: args[i].tree_value);
995 preserve_temp_slots (args[i].value);
996 pop_temp_slots ();
997 }
998
999 /* If we are to promote the function arg to a wider mode,
1000 do it now. */
1001
1002 machine_mode old_mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1003
1004 /* Some ABIs require scalar floating point modes to be returned
1005 in a wider scalar integer mode. We need to explicitly
1006 reinterpret to an integer mode of the correct precision
1007 before extending to the desired result. */
1008 if (SCALAR_INT_MODE_P (args[i].mode)
1009 && SCALAR_FLOAT_MODE_P (old_mode)
1010 && known_gt (GET_MODE_SIZE (args[i].mode),
1011 GET_MODE_SIZE (old_mode)))
1012 args[i].value = convert_float_to_wider_int (mode: args[i].mode, fmode: old_mode,
1013 x: args[i].value);
1014 else if (args[i].mode != old_mode)
1015 args[i].value = convert_modes (mode: args[i].mode, oldmode: old_mode,
1016 x: args[i].value, unsignedp: args[i].unsignedp);
1017
1018 /* If the value is a non-legitimate constant, force it into a
1019 pseudo now. TLS symbols sometimes need a call to resolve. */
1020 if (CONSTANT_P (args[i].value)
1021 && (!targetm.legitimate_constant_p (args[i].mode, args[i].value)
1022 || targetm.precompute_tls_p (args[i].mode, args[i].value)))
1023 args[i].value = force_reg (args[i].mode, args[i].value);
1024
1025 /* If we're going to have to load the value by parts, pull the
1026 parts into pseudos. The part extraction process can involve
1027 non-trivial computation. */
1028 if (GET_CODE (args[i].reg) == PARALLEL)
1029 {
1030 tree type = TREE_TYPE (args[i].tree_value);
1031 args[i].parallel_value
1032 = emit_group_load_into_temps (args[i].reg, args[i].value,
1033 type, int_size_in_bytes (type));
1034 }
1035
1036 /* If the value is expensive, and we are inside an appropriately
1037 short loop, put the value into a pseudo and then put the pseudo
1038 into the hard reg.
1039
1040 For small register classes, also do this if this call uses
1041 register parameters. This is to avoid reload conflicts while
1042 loading the parameters registers. */
1043
1044 else if ((! (REG_P (args[i].value)
1045 || (GET_CODE (args[i].value) == SUBREG
1046 && REG_P (SUBREG_REG (args[i].value)))))
1047 && args[i].mode != BLKmode
1048 && (set_src_cost (x: args[i].value, mode: args[i].mode,
1049 speed_p: optimize_insn_for_speed_p ())
1050 > COSTS_N_INSNS (1))
1051 && ((*reg_parm_seen
1052 && targetm.small_register_classes_for_mode_p (args[i].mode))
1053 || optimize))
1054 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1055 }
1056}
1057
1058#ifdef REG_PARM_STACK_SPACE
1059
1060 /* The argument list is the property of the called routine and it
1061 may clobber it. If the fixed area has been used for previous
1062 parameters, we must save and restore it. */
1063
1064static rtx
1065save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1066{
1067 unsigned int low;
1068 unsigned int high;
1069
1070 /* Compute the boundary of the area that needs to be saved, if any. */
1071 high = reg_parm_stack_space;
1072 if (ARGS_GROW_DOWNWARD)
1073 high += 1;
1074
1075 if (high > highest_outgoing_arg_in_use)
1076 high = highest_outgoing_arg_in_use;
1077
1078 for (low = 0; low < high; low++)
1079 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1080 {
1081 int num_to_save;
1082 machine_mode save_mode;
1083 int delta;
1084 rtx addr;
1085 rtx stack_area;
1086 rtx save_area;
1087
1088 while (stack_usage_map[--high] == 0)
1089 ;
1090
1091 *low_to_save = low;
1092 *high_to_save = high;
1093
1094 num_to_save = high - low + 1;
1095
1096 /* If we don't have the required alignment, must do this
1097 in BLKmode. */
1098 scalar_int_mode imode;
1099 if (int_mode_for_size (size: num_to_save * BITS_PER_UNIT, limit: 1).exists (mode: &imode)
1100 && (low & (MIN (GET_MODE_SIZE (imode),
1101 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1102 save_mode = imode;
1103 else
1104 save_mode = BLKmode;
1105
1106 if (ARGS_GROW_DOWNWARD)
1107 delta = -high;
1108 else
1109 delta = low;
1110
1111 addr = plus_constant (Pmode, argblock, delta);
1112 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1113
1114 set_mem_align (stack_area, PARM_BOUNDARY);
1115 if (save_mode == BLKmode)
1116 {
1117 save_area = assign_stack_temp (BLKmode, num_to_save);
1118 emit_block_move (validize_mem (save_area), stack_area,
1119 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1120 }
1121 else
1122 {
1123 save_area = gen_reg_rtx (save_mode);
1124 emit_move_insn (save_area, stack_area);
1125 }
1126
1127 return save_area;
1128 }
1129
1130 return NULL_RTX;
1131}
1132
1133static void
1134restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1135{
1136 machine_mode save_mode = GET_MODE (save_area);
1137 int delta;
1138 rtx addr, stack_area;
1139
1140 if (ARGS_GROW_DOWNWARD)
1141 delta = -high_to_save;
1142 else
1143 delta = low_to_save;
1144
1145 addr = plus_constant (Pmode, argblock, delta);
1146 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1147 set_mem_align (stack_area, PARM_BOUNDARY);
1148
1149 if (save_mode != BLKmode)
1150 emit_move_insn (stack_area, save_area);
1151 else
1152 emit_block_move (stack_area, validize_mem (save_area),
1153 GEN_INT (high_to_save - low_to_save + 1),
1154 BLOCK_OP_CALL_PARM);
1155}
1156#endif /* REG_PARM_STACK_SPACE */
1157
1158/* If any elements in ARGS refer to parameters that are to be passed in
1159 registers, but not in memory, and whose alignment does not permit a
1160 direct copy into registers. Copy the values into a group of pseudos
1161 which we will later copy into the appropriate hard registers.
1162
1163 Pseudos for each unaligned argument will be stored into the array
1164 args[argnum].aligned_regs. The caller is responsible for deallocating
1165 the aligned_regs array if it is nonzero. */
1166
1167static void
1168store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1169{
1170 int i, j;
1171
1172 for (i = 0; i < num_actuals; i++)
1173 if (args[i].reg != 0 && ! args[i].pass_on_stack
1174 && GET_CODE (args[i].reg) != PARALLEL
1175 && args[i].mode == BLKmode
1176 && MEM_P (args[i].value)
1177 && (MEM_ALIGN (args[i].value)
1178 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1179 {
1180 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1181 int endian_correction = 0;
1182
1183 if (args[i].partial)
1184 {
1185 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1186 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1187 }
1188 else
1189 {
1190 args[i].n_aligned_regs
1191 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1192 }
1193
1194 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1195
1196 /* Structures smaller than a word are normally aligned to the
1197 least significant byte. On a BYTES_BIG_ENDIAN machine,
1198 this means we must skip the empty high order bytes when
1199 calculating the bit offset. */
1200 if (bytes < UNITS_PER_WORD
1201#ifdef BLOCK_REG_PADDING
1202 && (BLOCK_REG_PADDING (args[i].mode,
1203 TREE_TYPE (args[i].tree_value), 1)
1204 == PAD_DOWNWARD)
1205#else
1206 && BYTES_BIG_ENDIAN
1207#endif
1208 )
1209 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1210
1211 for (j = 0; j < args[i].n_aligned_regs; j++)
1212 {
1213 rtx reg = gen_reg_rtx (word_mode);
1214 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1215 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1216
1217 args[i].aligned_regs[j] = reg;
1218 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1219 word_mode, word_mode, false, NULL);
1220
1221 /* There is no need to restrict this code to loading items
1222 in TYPE_ALIGN sized hunks. The bitfield instructions can
1223 load up entire word sized registers efficiently.
1224
1225 ??? This may not be needed anymore.
1226 We use to emit a clobber here but that doesn't let later
1227 passes optimize the instructions we emit. By storing 0 into
1228 the register later passes know the first AND to zero out the
1229 bitfield being set in the register is unnecessary. The store
1230 of 0 will be deleted as will at least the first AND. */
1231
1232 emit_move_insn (reg, const0_rtx);
1233
1234 bytes -= bitsize / BITS_PER_UNIT;
1235 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1236 word_mode, word, false, false);
1237 }
1238 }
1239}
1240
1241/* Issue an error if CALL_EXPR was flagged as requiring
1242 tall-call optimization. */
1243
1244void
1245maybe_complain_about_tail_call (tree call_expr, const char *reason)
1246{
1247 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1248 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1249 return;
1250
1251 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1252}
1253
1254/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1255 CALL_EXPR EXP.
1256
1257 NUM_ACTUALS is the total number of parameters.
1258
1259 N_NAMED_ARGS is the total number of named arguments.
1260
1261 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1262 value, or null.
1263
1264 FNDECL is the tree code for the target of this call (if known)
1265
1266 ARGS_SO_FAR holds state needed by the target to know where to place
1267 the next argument.
1268
1269 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1270 for arguments which are passed in registers.
1271
1272 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1273 and may be modified by this routine.
1274
1275 OLD_PENDING_ADJ and FLAGS are pointers to integer flags which
1276 may be modified by this routine.
1277
1278 MUST_PREALLOCATE is a pointer to bool which may be
1279 modified by this routine.
1280
1281 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1282 that requires allocation of stack space.
1283
1284 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1285 the thunked-to function. */
1286
1287static void
1288initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1289 struct arg_data *args,
1290 struct args_size *args_size,
1291 int n_named_args ATTRIBUTE_UNUSED,
1292 tree exp, tree struct_value_addr_value,
1293 tree fndecl, tree fntype,
1294 cumulative_args_t args_so_far,
1295 int reg_parm_stack_space,
1296 rtx *old_stack_level,
1297 poly_int64 *old_pending_adj,
1298 bool *must_preallocate, int *ecf_flags,
1299 bool *may_tailcall, bool call_from_thunk_p)
1300{
1301 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (arg: args_so_far);
1302 location_t loc = EXPR_LOCATION (exp);
1303
1304 /* Count arg position in order args appear. */
1305 int argpos;
1306
1307 int i;
1308
1309 args_size->constant = 0;
1310 args_size->var = 0;
1311
1312 /* In this loop, we consider args in the order they are written.
1313 We fill up ARGS from the back. */
1314
1315 i = num_actuals - 1;
1316 {
1317 int j = i;
1318 call_expr_arg_iterator iter;
1319 tree arg;
1320
1321 if (struct_value_addr_value)
1322 {
1323 args[j].tree_value = struct_value_addr_value;
1324 j--;
1325 }
1326 argpos = 0;
1327 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1328 {
1329 tree argtype = TREE_TYPE (arg);
1330
1331 if (targetm.calls.split_complex_arg
1332 && argtype
1333 && TREE_CODE (argtype) == COMPLEX_TYPE
1334 && targetm.calls.split_complex_arg (argtype))
1335 {
1336 tree subtype = TREE_TYPE (argtype);
1337 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1338 j--;
1339 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1340 }
1341 else
1342 args[j].tree_value = arg;
1343 j--;
1344 argpos++;
1345 }
1346 }
1347
1348 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1349 for (argpos = 0; argpos < num_actuals; i--, argpos++)
1350 {
1351 tree type = TREE_TYPE (args[i].tree_value);
1352 int unsignedp;
1353
1354 /* Replace erroneous argument with constant zero. */
1355 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1356 args[i].tree_value = integer_zero_node, type = integer_type_node;
1357
1358 /* If TYPE is a transparent union or record, pass things the way
1359 we would pass the first field of the union or record. We have
1360 already verified that the modes are the same. */
1361 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
1362 type = TREE_TYPE (first_field (type));
1363
1364 /* Decide where to pass this arg.
1365
1366 args[i].reg is nonzero if all or part is passed in registers.
1367
1368 args[i].partial is nonzero if part but not all is passed in registers,
1369 and the exact value says how many bytes are passed in registers.
1370
1371 args[i].pass_on_stack is true if the argument must at least be
1372 computed on the stack. It may then be loaded back into registers
1373 if args[i].reg is nonzero.
1374
1375 These decisions are driven by the FUNCTION_... macros and must agree
1376 with those made by function.cc. */
1377
1378 /* See if this argument should be passed by invisible reference. */
1379 function_arg_info arg (type, argpos < n_named_args);
1380 if (pass_by_reference (ca: args_so_far_pnt, arg))
1381 {
1382 const bool callee_copies
1383 = reference_callee_copied (ca: args_so_far_pnt, arg);
1384 tree base;
1385
1386 /* If we're compiling a thunk, pass directly the address of an object
1387 already in memory, instead of making a copy. Likewise if we want
1388 to make the copy in the callee instead of the caller. */
1389 if ((call_from_thunk_p || callee_copies)
1390 && TREE_CODE (args[i].tree_value) != WITH_SIZE_EXPR
1391 && ((base = get_base_address (t: args[i].tree_value)), true)
1392 && TREE_CODE (base) != SSA_NAME
1393 && (!DECL_P (base) || MEM_P (DECL_RTL (base))))
1394 {
1395 /* We may have turned the parameter value into an SSA name.
1396 Go back to the original parameter so we can take the
1397 address. */
1398 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1399 {
1400 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1401 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1402 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1403 }
1404 /* Argument setup code may have copied the value to register. We
1405 revert that optimization now because the tail call code must
1406 use the original location. */
1407 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1408 && !MEM_P (DECL_RTL (args[i].tree_value))
1409 && DECL_INCOMING_RTL (args[i].tree_value)
1410 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1411 set_decl_rtl (args[i].tree_value,
1412 DECL_INCOMING_RTL (args[i].tree_value));
1413
1414 mark_addressable (args[i].tree_value);
1415
1416 /* We can't use sibcalls if a callee-copied argument is
1417 stored in the current function's frame. */
1418 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1419 {
1420 *may_tailcall = false;
1421 maybe_complain_about_tail_call (call_expr: exp,
1422 reason: "a callee-copied argument is"
1423 " stored in the current"
1424 " function's frame");
1425 }
1426
1427 args[i].tree_value = build_fold_addr_expr_loc (loc,
1428 args[i].tree_value);
1429 type = TREE_TYPE (args[i].tree_value);
1430
1431 if (*ecf_flags & ECF_CONST)
1432 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1433 }
1434 else
1435 {
1436 /* We make a copy of the object and pass the address to the
1437 function being called. */
1438 rtx copy;
1439
1440 if (!COMPLETE_TYPE_P (type)
1441 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1442 || (flag_stack_check == GENERIC_STACK_CHECK
1443 && compare_tree_int (TYPE_SIZE_UNIT (type),
1444 STACK_CHECK_MAX_VAR_SIZE) > 0))
1445 {
1446 /* This is a variable-sized object. Make space on the stack
1447 for it. */
1448 rtx size_rtx = expr_size (args[i].tree_value);
1449
1450 if (*old_stack_level == 0)
1451 {
1452 emit_stack_save (SAVE_BLOCK, old_stack_level);
1453 *old_pending_adj = pending_stack_adjust;
1454 pending_stack_adjust = 0;
1455 }
1456
1457 /* We can pass TRUE as the 4th argument because we just
1458 saved the stack pointer and will restore it right after
1459 the call. */
1460 copy = allocate_dynamic_stack_space (size_rtx,
1461 TYPE_ALIGN (type),
1462 TYPE_ALIGN (type),
1463 max_int_size_in_bytes
1464 (type),
1465 true);
1466 copy = gen_rtx_MEM (BLKmode, copy);
1467 set_mem_attributes (copy, type, 1);
1468 }
1469 else
1470 copy = assign_temp (type, 1, 0);
1471
1472 store_expr (args[i].tree_value, copy, 0, false, false);
1473
1474 /* Just change the const function to pure and then let
1475 the next test clear the pure based on
1476 callee_copies. */
1477 if (*ecf_flags & ECF_CONST)
1478 {
1479 *ecf_flags &= ~ECF_CONST;
1480 *ecf_flags |= ECF_PURE;
1481 }
1482
1483 if (!callee_copies && *ecf_flags & ECF_PURE)
1484 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1485
1486 args[i].tree_value
1487 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1488 type = TREE_TYPE (args[i].tree_value);
1489 *may_tailcall = false;
1490 maybe_complain_about_tail_call (call_expr: exp,
1491 reason: "argument must be passed"
1492 " by copying");
1493 }
1494 arg.pass_by_reference = true;
1495 }
1496
1497 unsignedp = TYPE_UNSIGNED (type);
1498 arg.type = type;
1499 arg.mode
1500 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1501 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1502
1503 args[i].unsignedp = unsignedp;
1504 args[i].mode = arg.mode;
1505
1506 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
1507
1508 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
1509
1510 /* If this is a sibling call and the machine has register windows, the
1511 register window has to be unwinded before calling the routine, so
1512 arguments have to go into the incoming registers. */
1513 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1514 args[i].tail_call_reg
1515 = targetm.calls.function_incoming_arg (args_so_far, arg);
1516 else
1517 args[i].tail_call_reg = args[i].reg;
1518
1519 if (args[i].reg)
1520 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
1521
1522 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
1523
1524 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1525 it means that we are to pass this arg in the register(s) designated
1526 by the PARALLEL, but also to pass it in the stack. */
1527 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1528 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1529 args[i].pass_on_stack = true;
1530
1531 /* If this is an addressable type, we must preallocate the stack
1532 since we must evaluate the object into its final location.
1533
1534 If this is to be passed in both registers and the stack, it is simpler
1535 to preallocate. */
1536 if (TREE_ADDRESSABLE (type)
1537 || (args[i].pass_on_stack && args[i].reg != 0))
1538 *must_preallocate = true;
1539
1540 /* Compute the stack-size of this argument. */
1541 if (args[i].reg == 0 || args[i].partial != 0
1542 || reg_parm_stack_space > 0
1543 || args[i].pass_on_stack)
1544 locate_and_pad_parm (arg.mode, type,
1545#ifdef STACK_PARMS_IN_REG_PARM_AREA
1546 1,
1547#else
1548 args[i].reg != 0,
1549#endif
1550 reg_parm_stack_space,
1551 args[i].pass_on_stack ? 0 : args[i].partial,
1552 fndecl, args_size, &args[i].locate);
1553#ifdef BLOCK_REG_PADDING
1554 else
1555 /* The argument is passed entirely in registers. See at which
1556 end it should be padded. */
1557 args[i].locate.where_pad =
1558 BLOCK_REG_PADDING (arg.mode, type,
1559 int_size_in_bytes (type) <= UNITS_PER_WORD);
1560#endif
1561
1562 /* Update ARGS_SIZE, the total stack space for args so far. */
1563
1564 args_size->constant += args[i].locate.size.constant;
1565 if (args[i].locate.size.var)
1566 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1567
1568 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1569 have been used, etc. */
1570
1571 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
1572 promoted_mode used for function_arg above. However, the
1573 corresponding handling of incoming arguments in function.cc
1574 does pass the promoted mode. */
1575 arg.mode = TYPE_MODE (type);
1576 targetm.calls.function_arg_advance (args_so_far, arg);
1577 }
1578}
1579
1580/* Update ARGS_SIZE to contain the total size for the argument block.
1581 Return the original constant component of the argument block's size.
1582
1583 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1584 for arguments passed in registers. */
1585
1586static poly_int64
1587compute_argument_block_size (int reg_parm_stack_space,
1588 struct args_size *args_size,
1589 tree fndecl ATTRIBUTE_UNUSED,
1590 tree fntype ATTRIBUTE_UNUSED,
1591 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1592{
1593 poly_int64 unadjusted_args_size = args_size->constant;
1594
1595 /* For accumulate outgoing args mode we don't need to align, since the frame
1596 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1597 backends from generating misaligned frame sizes. */
1598 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1599 preferred_stack_boundary = STACK_BOUNDARY;
1600
1601 /* Compute the actual size of the argument block required. The variable
1602 and constant sizes must be combined, the size may have to be rounded,
1603 and there may be a minimum required size. */
1604
1605 if (args_size->var)
1606 {
1607 args_size->var = ARGS_SIZE_TREE (*args_size);
1608 args_size->constant = 0;
1609
1610 preferred_stack_boundary /= BITS_PER_UNIT;
1611 if (preferred_stack_boundary > 1)
1612 {
1613 /* We don't handle this case yet. To handle it correctly we have
1614 to add the delta, round and subtract the delta.
1615 Currently no machine description requires this support. */
1616 gcc_assert (multiple_p (stack_pointer_delta,
1617 preferred_stack_boundary));
1618 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1619 }
1620
1621 if (reg_parm_stack_space > 0)
1622 {
1623 args_size->var
1624 = size_binop (MAX_EXPR, args_size->var,
1625 ssize_int (reg_parm_stack_space));
1626
1627 /* The area corresponding to register parameters is not to count in
1628 the size of the block we need. So make the adjustment. */
1629 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1630 args_size->var
1631 = size_binop (MINUS_EXPR, args_size->var,
1632 ssize_int (reg_parm_stack_space));
1633 }
1634 }
1635 else
1636 {
1637 preferred_stack_boundary /= BITS_PER_UNIT;
1638 if (preferred_stack_boundary < 1)
1639 preferred_stack_boundary = 1;
1640 args_size->constant = (aligned_upper_bound (value: args_size->constant
1641 + stack_pointer_delta,
1642 align: preferred_stack_boundary)
1643 - stack_pointer_delta);
1644
1645 args_size->constant = upper_bound (a: args_size->constant,
1646 b: reg_parm_stack_space);
1647
1648 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1649 args_size->constant -= reg_parm_stack_space;
1650 }
1651 return unadjusted_args_size;
1652}
1653
1654/* Precompute parameters as needed for a function call.
1655
1656 FLAGS is mask of ECF_* constants.
1657
1658 NUM_ACTUALS is the number of arguments.
1659
1660 ARGS is an array containing information for each argument; this
1661 routine fills in the INITIAL_VALUE and VALUE fields for each
1662 precomputed argument. */
1663
1664static void
1665precompute_arguments (int num_actuals, struct arg_data *args)
1666{
1667 int i;
1668
1669 /* If this is a libcall, then precompute all arguments so that we do not
1670 get extraneous instructions emitted as part of the libcall sequence. */
1671
1672 /* If we preallocated the stack space, and some arguments must be passed
1673 on the stack, then we must precompute any parameter which contains a
1674 function call which will store arguments on the stack.
1675 Otherwise, evaluating the parameter may clobber previous parameters
1676 which have already been stored into the stack. (we have code to avoid
1677 such case by saving the outgoing stack arguments, but it results in
1678 worse code) */
1679 if (!ACCUMULATE_OUTGOING_ARGS)
1680 return;
1681
1682 for (i = 0; i < num_actuals; i++)
1683 {
1684 tree type;
1685 machine_mode mode;
1686
1687 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1688 continue;
1689
1690 /* If this is an addressable type, we cannot pre-evaluate it. */
1691 type = TREE_TYPE (args[i].tree_value);
1692 gcc_assert (!TREE_ADDRESSABLE (type));
1693
1694 args[i].initial_value = args[i].value
1695 = expand_normal (exp: args[i].tree_value);
1696
1697 mode = TYPE_MODE (type);
1698 if (mode != args[i].mode)
1699 {
1700 int unsignedp = args[i].unsignedp;
1701 args[i].value
1702 = convert_modes (mode: args[i].mode, oldmode: mode,
1703 x: args[i].value, unsignedp: args[i].unsignedp);
1704
1705 /* CSE will replace this only if it contains args[i].value
1706 pseudo, so convert it down to the declared mode using
1707 a SUBREG. */
1708 if (REG_P (args[i].value)
1709 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1710 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1711 {
1712 args[i].initial_value
1713 = gen_lowpart_SUBREG (mode, args[i].value);
1714 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1715 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
1716 }
1717 }
1718 }
1719}
1720
1721/* Given the current state of MUST_PREALLOCATE and information about
1722 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1723 compute and return the final value for MUST_PREALLOCATE. */
1724
1725static bool
1726finalize_must_preallocate (bool must_preallocate, int num_actuals,
1727 struct arg_data *args, struct args_size *args_size)
1728{
1729 /* See if we have or want to preallocate stack space.
1730
1731 If we would have to push a partially-in-regs parm
1732 before other stack parms, preallocate stack space instead.
1733
1734 If the size of some parm is not a multiple of the required stack
1735 alignment, we must preallocate.
1736
1737 If the total size of arguments that would otherwise create a copy in
1738 a temporary (such as a CALL) is more than half the total argument list
1739 size, preallocation is faster.
1740
1741 Another reason to preallocate is if we have a machine (like the m88k)
1742 where stack alignment is required to be maintained between every
1743 pair of insns, not just when the call is made. However, we assume here
1744 that such machines either do not have push insns (and hence preallocation
1745 would occur anyway) or the problem is taken care of with
1746 PUSH_ROUNDING. */
1747
1748 if (! must_preallocate)
1749 {
1750 bool partial_seen = false;
1751 poly_int64 copy_to_evaluate_size = 0;
1752 int i;
1753
1754 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1755 {
1756 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1757 partial_seen = true;
1758 else if (partial_seen && args[i].reg == 0)
1759 must_preallocate = true;
1760
1761 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1762 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1763 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1764 || TREE_CODE (args[i].tree_value) == COND_EXPR
1765 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1766 copy_to_evaluate_size
1767 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1768 }
1769
1770 if (maybe_ne (a: args_size->constant, b: 0)
1771 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
1772 must_preallocate = true;
1773 }
1774 return must_preallocate;
1775}
1776
1777/* If we preallocated stack space, compute the address of each argument
1778 and store it into the ARGS array.
1779
1780 We need not ensure it is a valid memory address here; it will be
1781 validized when it is used.
1782
1783 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1784
1785static void
1786compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1787{
1788 if (argblock)
1789 {
1790 rtx arg_reg = argblock;
1791 int i;
1792 poly_int64 arg_offset = 0;
1793
1794 if (GET_CODE (argblock) == PLUS)
1795 {
1796 arg_reg = XEXP (argblock, 0);
1797 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
1798 }
1799
1800 for (i = 0; i < num_actuals; i++)
1801 {
1802 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1803 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1804 rtx addr;
1805 unsigned int align, boundary;
1806 poly_uint64 units_on_stack = 0;
1807 machine_mode partial_mode = VOIDmode;
1808
1809 /* Skip this parm if it will not be passed on the stack. */
1810 if (! args[i].pass_on_stack
1811 && args[i].reg != 0
1812 && args[i].partial == 0)
1813 continue;
1814
1815 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
1816 continue;
1817
1818 addr = simplify_gen_binary (code: PLUS, Pmode, op0: arg_reg, op1: offset);
1819 addr = plus_constant (Pmode, addr, arg_offset);
1820
1821 if (args[i].partial != 0)
1822 {
1823 /* Only part of the parameter is being passed on the stack.
1824 Generate a simple memory reference of the correct size. */
1825 units_on_stack = args[i].locate.size.constant;
1826 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
1827 partial_mode = int_mode_for_size (size: bits_on_stack, limit: 1).else_blk ();
1828 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1829 set_mem_size (args[i].stack, units_on_stack);
1830 }
1831 else
1832 {
1833 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1834 set_mem_attributes (args[i].stack,
1835 TREE_TYPE (args[i].tree_value), 1);
1836 }
1837 align = BITS_PER_UNIT;
1838 boundary = args[i].locate.boundary;
1839 poly_int64 offset_val;
1840 if (args[i].locate.where_pad != PAD_DOWNWARD)
1841 align = boundary;
1842 else if (poly_int_rtx_p (x: offset, res: &offset_val))
1843 {
1844 align = least_bit_hwi (x: boundary);
1845 unsigned int offset_align
1846 = known_alignment (a: offset_val) * BITS_PER_UNIT;
1847 if (offset_align != 0)
1848 align = MIN (align, offset_align);
1849 }
1850 set_mem_align (args[i].stack, align);
1851
1852 addr = simplify_gen_binary (code: PLUS, Pmode, op0: arg_reg, op1: slot_offset);
1853 addr = plus_constant (Pmode, addr, arg_offset);
1854
1855 if (args[i].partial != 0)
1856 {
1857 /* Only part of the parameter is being passed on the stack.
1858 Generate a simple memory reference of the correct size.
1859 */
1860 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1861 set_mem_size (args[i].stack_slot, units_on_stack);
1862 }
1863 else
1864 {
1865 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1866 set_mem_attributes (args[i].stack_slot,
1867 TREE_TYPE (args[i].tree_value), 1);
1868 }
1869 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1870
1871 /* Function incoming arguments may overlap with sibling call
1872 outgoing arguments and we cannot allow reordering of reads
1873 from function arguments with stores to outgoing arguments
1874 of sibling calls. */
1875 set_mem_alias_set (args[i].stack, 0);
1876 set_mem_alias_set (args[i].stack_slot, 0);
1877 }
1878 }
1879}
1880
1881/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1882 in a call instruction.
1883
1884 FNDECL is the tree node for the target function. For an indirect call
1885 FNDECL will be NULL_TREE.
1886
1887 ADDR is the operand 0 of CALL_EXPR for this call. */
1888
1889static rtx
1890rtx_for_function_call (tree fndecl, tree addr)
1891{
1892 rtx funexp;
1893
1894 /* Get the function to call, in the form of RTL. */
1895 if (fndecl)
1896 {
1897 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1898 TREE_USED (fndecl) = 1;
1899
1900 /* Get a SYMBOL_REF rtx for the function address. */
1901 funexp = XEXP (DECL_RTL (fndecl), 0);
1902 }
1903 else
1904 /* Generate an rtx (probably a pseudo-register) for the address. */
1905 {
1906 push_temp_slots ();
1907 funexp = expand_normal (exp: addr);
1908 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1909 }
1910 return funexp;
1911}
1912
1913/* Return the static chain for this function, if any. */
1914
1915rtx
1916rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
1917{
1918 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
1919 return NULL;
1920
1921 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
1922}
1923
1924/* Internal state for internal_arg_pointer_based_exp and its helpers. */
1925static struct
1926{
1927 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1928 or NULL_RTX if none has been scanned yet. */
1929 rtx_insn *scan_start;
1930 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1931 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1932 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1933 with fixed offset, or PC if this is with variable or unknown offset. */
1934 vec<rtx> cache;
1935} internal_arg_pointer_exp_state;
1936
1937static rtx internal_arg_pointer_based_exp (const_rtx, bool);
1938
1939/* Helper function for internal_arg_pointer_based_exp. Scan insns in
1940 the tail call sequence, starting with first insn that hasn't been
1941 scanned yet, and note for each pseudo on the LHS whether it is based
1942 on crtl->args.internal_arg_pointer or not, and what offset from that
1943 that pointer it has. */
1944
1945static void
1946internal_arg_pointer_based_exp_scan (void)
1947{
1948 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
1949
1950 if (scan_start == NULL_RTX)
1951 insn = get_insns ();
1952 else
1953 insn = NEXT_INSN (insn: scan_start);
1954
1955 while (insn)
1956 {
1957 rtx set = single_set (insn);
1958 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1959 {
1960 rtx val = NULL_RTX;
1961 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1962 /* Punt on pseudos set multiple times. */
1963 if (idx < internal_arg_pointer_exp_state.cache.length ()
1964 && (internal_arg_pointer_exp_state.cache[idx]
1965 != NULL_RTX))
1966 val = pc_rtx;
1967 else
1968 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1969 if (val != NULL_RTX)
1970 {
1971 if (idx >= internal_arg_pointer_exp_state.cache.length ())
1972 internal_arg_pointer_exp_state.cache
1973 .safe_grow_cleared (len: idx + 1, exact: true);
1974 internal_arg_pointer_exp_state.cache[idx] = val;
1975 }
1976 }
1977 if (NEXT_INSN (insn) == NULL_RTX)
1978 scan_start = insn;
1979 insn = NEXT_INSN (insn);
1980 }
1981
1982 internal_arg_pointer_exp_state.scan_start = scan_start;
1983}
1984
1985/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1986 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1987 it with fixed offset, or PC if this is with variable or unknown offset.
1988 TOPLEVEL is true if the function is invoked at the topmost level. */
1989
1990static rtx
1991internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
1992{
1993 if (CONSTANT_P (rtl))
1994 return NULL_RTX;
1995
1996 if (rtl == crtl->args.internal_arg_pointer)
1997 return const0_rtx;
1998
1999 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2000 return NULL_RTX;
2001
2002 poly_int64 offset;
2003 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), res: &offset))
2004 {
2005 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2006 if (val == NULL_RTX || val == pc_rtx)
2007 return val;
2008 return plus_constant (Pmode, val, offset);
2009 }
2010
2011 /* When called at the topmost level, scan pseudo assignments in between the
2012 last scanned instruction in the tail call sequence and the latest insn
2013 in that sequence. */
2014 if (toplevel)
2015 internal_arg_pointer_based_exp_scan ();
2016
2017 if (REG_P (rtl))
2018 {
2019 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
2020 if (idx < internal_arg_pointer_exp_state.cache.length ())
2021 return internal_arg_pointer_exp_state.cache[idx];
2022
2023 return NULL_RTX;
2024 }
2025
2026 subrtx_iterator::array_type array;
2027 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2028 {
2029 const_rtx x = *iter;
2030 if (REG_P (x) && internal_arg_pointer_based_exp (rtl: x, toplevel: false) != NULL_RTX)
2031 return pc_rtx;
2032 if (MEM_P (x))
2033 iter.skip_subrtxes ();
2034 }
2035
2036 return NULL_RTX;
2037}
2038
2039/* Return true if SIZE bytes starting from address ADDR might overlap an
2040 already-clobbered argument area. This function is used to determine
2041 if we should give up a sibcall. */
2042
2043static bool
2044mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
2045{
2046 poly_int64 i;
2047 unsigned HOST_WIDE_INT start, end;
2048 rtx val;
2049
2050 if (bitmap_empty_p (stored_args_map)
2051 && stored_args_watermark == HOST_WIDE_INT_M1U)
2052 return false;
2053 val = internal_arg_pointer_based_exp (rtl: addr, toplevel: true);
2054 if (val == NULL_RTX)
2055 return false;
2056 else if (!poly_int_rtx_p (x: val, res: &i))
2057 return true;
2058
2059 if (known_eq (size, 0U))
2060 return false;
2061
2062 if (STACK_GROWS_DOWNWARD)
2063 i -= crtl->args.pretend_args_size;
2064 else
2065 i += crtl->args.pretend_args_size;
2066
2067 if (ARGS_GROW_DOWNWARD)
2068 i = -i - size;
2069
2070 /* We can ignore any references to the function's pretend args,
2071 which at this point would manifest as negative values of I. */
2072 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2073 return false;
2074
2075 start = maybe_lt (a: i, b: 0) ? 0 : constant_lower_bound (a: i);
2076 if (!(i + size).is_constant (const_value: &end))
2077 end = HOST_WIDE_INT_M1U;
2078
2079 if (end > stored_args_watermark)
2080 return true;
2081
2082 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2083 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2084 if (bitmap_bit_p (map: stored_args_map, bitno: k))
2085 return true;
2086
2087 return false;
2088}
2089
2090/* Do the register loads required for any wholly-register parms or any
2091 parms which are passed both on the stack and in a register. Their
2092 expressions were already evaluated.
2093
2094 Mark all register-parms as living through the call, putting these USE
2095 insns in the CALL_INSN_FUNCTION_USAGE field.
2096
2097 When IS_SIBCALL, perform the check_sibcall_argument_overlap
2098 checking, setting *SIBCALL_FAILURE if appropriate. */
2099
2100static void
2101load_register_parameters (struct arg_data *args, int num_actuals,
2102 rtx *call_fusage, int flags, int is_sibcall,
2103 bool *sibcall_failure)
2104{
2105 int i, j;
2106
2107 for (i = 0; i < num_actuals; i++)
2108 {
2109 rtx reg = ((flags & ECF_SIBCALL)
2110 ? args[i].tail_call_reg : args[i].reg);
2111 if (reg)
2112 {
2113 int partial = args[i].partial;
2114 int nregs;
2115 poly_int64 size = 0;
2116 HOST_WIDE_INT const_size = 0;
2117 rtx_insn *before_arg = get_last_insn ();
2118 tree tree_value = args[i].tree_value;
2119 tree type = TREE_TYPE (tree_value);
2120 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
2121 type = TREE_TYPE (first_field (type));
2122 /* Set non-negative if we must move a word at a time, even if
2123 just one word (e.g, partial == 4 && mode == DFmode). Set
2124 to -1 if we just use a normal move insn. This value can be
2125 zero if the argument is a zero size structure. */
2126 nregs = -1;
2127 if (GET_CODE (reg) == PARALLEL)
2128 ;
2129 else if (partial)
2130 {
2131 gcc_assert (partial % UNITS_PER_WORD == 0);
2132 nregs = partial / UNITS_PER_WORD;
2133 }
2134 else if (TYPE_MODE (type) == BLKmode)
2135 {
2136 /* Variable-sized parameters should be described by a
2137 PARALLEL instead. */
2138 const_size = int_size_in_bytes (type);
2139 gcc_assert (const_size >= 0);
2140 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2141 size = const_size;
2142 }
2143 else
2144 size = GET_MODE_SIZE (mode: args[i].mode);
2145
2146 /* Handle calls that pass values in multiple non-contiguous
2147 locations. The Irix 6 ABI has examples of this. */
2148
2149 if (GET_CODE (reg) == PARALLEL)
2150 emit_group_move (reg, args[i].parallel_value);
2151
2152 /* If simple case, just do move. If normal partial, store_one_arg
2153 has already loaded the register for us. In all other cases,
2154 load the register(s) from memory. */
2155
2156 else if (nregs == -1)
2157 {
2158 emit_move_insn (reg, args[i].value);
2159#ifdef BLOCK_REG_PADDING
2160 /* Handle case where we have a value that needs shifting
2161 up to the msb. eg. a QImode value and we're padding
2162 upward on a BYTES_BIG_ENDIAN machine. */
2163 if (args[i].locate.where_pad
2164 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
2165 {
2166 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2167 if (maybe_lt (size, UNITS_PER_WORD))
2168 {
2169 rtx x;
2170 poly_int64 shift
2171 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2172
2173 /* Assigning REG here rather than a temp makes
2174 CALL_FUSAGE report the whole reg as used.
2175 Strictly speaking, the call only uses SIZE
2176 bytes at the msb end, but it doesn't seem worth
2177 generating rtl to say that. */
2178 reg = gen_rtx_REG (word_mode, REGNO (reg));
2179 x = expand_shift (LSHIFT_EXPR, word_mode,
2180 reg, shift, reg, 1);
2181 if (x != reg)
2182 emit_move_insn (reg, x);
2183 }
2184 }
2185#endif
2186 }
2187
2188 /* If we have pre-computed the values to put in the registers in
2189 the case of non-aligned structures, copy them in now. */
2190
2191 else if (args[i].n_aligned_regs != 0)
2192 for (j = 0; j < args[i].n_aligned_regs; j++)
2193 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2194 args[i].aligned_regs[j]);
2195
2196 /* If we need a single register and the source is a constant
2197 VAR_DECL with a simple constructor, expand that constructor
2198 via a pseudo rather than read from (possibly misaligned)
2199 memory. PR middle-end/95126. */
2200 else if (nregs == 1
2201 && partial == 0
2202 && !args[i].pass_on_stack
2203 && VAR_P (tree_value)
2204 && TREE_READONLY (tree_value)
2205 && !TREE_SIDE_EFFECTS (tree_value)
2206 && immediate_const_ctor_p (DECL_INITIAL (tree_value)))
2207 {
2208 rtx target = gen_reg_rtx (word_mode);
2209 store_constructor (DECL_INITIAL (tree_value), target, 0,
2210 int_expr_size (DECL_INITIAL (tree_value)),
2211 false);
2212 reg = gen_rtx_REG (word_mode, REGNO (reg));
2213 emit_move_insn (reg, target);
2214 }
2215 else if (partial == 0 || args[i].pass_on_stack)
2216 {
2217 /* SIZE and CONST_SIZE are 0 for partial arguments and
2218 the size of a BLKmode type otherwise. */
2219 gcc_checking_assert (known_eq (size, const_size));
2220 rtx mem = validize_mem (copy_rtx (args[i].value));
2221
2222 /* Check for overlap with already clobbered argument area,
2223 providing that this has non-zero size. */
2224 if (is_sibcall
2225 && const_size != 0
2226 && (mem_might_overlap_already_clobbered_arg_p
2227 (XEXP (args[i].value, 0), size: const_size)))
2228 *sibcall_failure = true;
2229
2230 if (const_size % UNITS_PER_WORD == 0
2231 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2232 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2233 else
2234 {
2235 if (nregs > 1)
2236 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2237 args[i].mode);
2238 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2239 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2240 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
2241 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2242 word_mode, word_mode, false,
2243 NULL);
2244 if (BYTES_BIG_ENDIAN)
2245 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2246 BITS_PER_WORD - bitsize, dest, 1);
2247 if (x != dest)
2248 emit_move_insn (dest, x);
2249 }
2250
2251 /* Handle a BLKmode that needs shifting. */
2252 if (nregs == 1 && const_size < UNITS_PER_WORD
2253#ifdef BLOCK_REG_PADDING
2254 && args[i].locate.where_pad == PAD_DOWNWARD
2255#else
2256 && BYTES_BIG_ENDIAN
2257#endif
2258 )
2259 {
2260 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2261 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
2262 enum tree_code dir = (BYTES_BIG_ENDIAN
2263 ? RSHIFT_EXPR : LSHIFT_EXPR);
2264 rtx x;
2265
2266 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2267 if (x != dest)
2268 emit_move_insn (dest, x);
2269 }
2270 }
2271
2272 /* When a parameter is a block, and perhaps in other cases, it is
2273 possible that it did a load from an argument slot that was
2274 already clobbered. */
2275 if (is_sibcall
2276 && check_sibcall_argument_overlap (before_arg, &args[i], false))
2277 *sibcall_failure = true;
2278
2279 /* Handle calls that pass values in multiple non-contiguous
2280 locations. The Irix 6 ABI has examples of this. */
2281 if (GET_CODE (reg) == PARALLEL)
2282 use_group_regs (call_fusage, reg);
2283 else if (nregs == -1)
2284 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
2285 else if (nregs > 0)
2286 use_regs (call_fusage, REGNO (reg), nregs);
2287 }
2288 }
2289}
2290
2291/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2292 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2293 bytes, then we would need to push some additional bytes to pad the
2294 arguments. So, we try to compute an adjust to the stack pointer for an
2295 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2296 bytes. Then, when the arguments are pushed the stack will be perfectly
2297 aligned.
2298
2299 Return true if this optimization is possible, storing the adjustment
2300 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2301 bytes that should be popped after the call. */
2302
2303static bool
2304combine_pending_stack_adjustment_and_call (poly_int64 *adjustment_out,
2305 poly_int64 unadjusted_args_size,
2306 struct args_size *args_size,
2307 unsigned int preferred_unit_stack_boundary)
2308{
2309 /* The number of bytes to pop so that the stack will be
2310 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2311 poly_int64 adjustment;
2312 /* The alignment of the stack after the arguments are pushed, if we
2313 just pushed the arguments without adjust the stack here. */
2314 unsigned HOST_WIDE_INT unadjusted_alignment;
2315
2316 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2317 align: preferred_unit_stack_boundary,
2318 misalign: &unadjusted_alignment))
2319 return false;
2320
2321 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2322 as possible -- leaving just enough left to cancel out the
2323 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2324 PENDING_STACK_ADJUST is non-negative, and congruent to
2325 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2326
2327 /* Begin by trying to pop all the bytes. */
2328 unsigned HOST_WIDE_INT tmp_misalignment;
2329 if (!known_misalignment (pending_stack_adjust,
2330 align: preferred_unit_stack_boundary,
2331 misalign: &tmp_misalignment))
2332 return false;
2333 unadjusted_alignment -= tmp_misalignment;
2334 adjustment = pending_stack_adjust;
2335 /* Push enough additional bytes that the stack will be aligned
2336 after the arguments are pushed. */
2337 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2338 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2339
2340 /* We need to know whether the adjusted argument size
2341 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2342 or a deallocation. */
2343 if (!ordered_p (a: adjustment, b: unadjusted_args_size))
2344 return false;
2345
2346 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2347 bytes after the call. The right number is the entire
2348 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2349 by the arguments in the first place. */
2350 args_size->constant
2351 = pending_stack_adjust - adjustment + unadjusted_args_size;
2352
2353 *adjustment_out = adjustment;
2354 return true;
2355}
2356
2357/* Scan X expression if it does not dereference any argument slots
2358 we already clobbered by tail call arguments (as noted in stored_args_map
2359 bitmap).
2360 Return true if X expression dereferences such argument slots,
2361 false otherwise. */
2362
2363static bool
2364check_sibcall_argument_overlap_1 (rtx x)
2365{
2366 RTX_CODE code;
2367 int i, j;
2368 const char *fmt;
2369
2370 if (x == NULL_RTX)
2371 return false;
2372
2373 code = GET_CODE (x);
2374
2375 /* We need not check the operands of the CALL expression itself. */
2376 if (code == CALL)
2377 return false;
2378
2379 if (code == MEM)
2380 return (mem_might_overlap_already_clobbered_arg_p
2381 (XEXP (x, 0), size: GET_MODE_SIZE (GET_MODE (x))));
2382
2383 /* Scan all subexpressions. */
2384 fmt = GET_RTX_FORMAT (code);
2385 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2386 {
2387 if (*fmt == 'e')
2388 {
2389 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2390 return true;
2391 }
2392 else if (*fmt == 'E')
2393 {
2394 for (j = 0; j < XVECLEN (x, i); j++)
2395 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2396 return true;
2397 }
2398 }
2399 return false;
2400}
2401
2402/* Scan sequence after INSN if it does not dereference any argument slots
2403 we already clobbered by tail call arguments (as noted in stored_args_map
2404 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2405 stored_args_map bitmap afterwards (when ARG is a register
2406 MARK_STORED_ARGS_MAP should be false). Return true if sequence after
2407 INSN dereferences such argument slots, false otherwise. */
2408
2409static bool
2410check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2411 bool mark_stored_args_map)
2412{
2413 poly_uint64 low, high;
2414 unsigned HOST_WIDE_INT const_low, const_high;
2415
2416 if (insn == NULL_RTX)
2417 insn = get_insns ();
2418 else
2419 insn = NEXT_INSN (insn);
2420
2421 for (; insn; insn = NEXT_INSN (insn))
2422 if (INSN_P (insn)
2423 && check_sibcall_argument_overlap_1 (x: PATTERN (insn)))
2424 break;
2425
2426 if (mark_stored_args_map)
2427 {
2428 if (ARGS_GROW_DOWNWARD)
2429 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2430 else
2431 low = arg->locate.slot_offset.constant;
2432 high = low + arg->locate.size.constant;
2433
2434 const_low = constant_lower_bound (a: low);
2435 if (high.is_constant (const_value: &const_high))
2436 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
2437 bitmap_set_bit (map: stored_args_map, bitno: i);
2438 else
2439 stored_args_watermark = MIN (stored_args_watermark, const_low);
2440 }
2441 return insn != NULL_RTX;
2442}
2443
2444/* Given that a function returns a value of mode MODE at the most
2445 significant end of hard register VALUE, shift VALUE left or right
2446 as specified by LEFT_P. Return true if some action was needed. */
2447
2448bool
2449shift_return_value (machine_mode mode, bool left_p, rtx value)
2450{
2451 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2452 machine_mode value_mode = GET_MODE (value);
2453 poly_int64 shift = GET_MODE_BITSIZE (mode: value_mode) - GET_MODE_BITSIZE (mode);
2454
2455 if (known_eq (shift, 0))
2456 return false;
2457
2458 /* Use ashr rather than lshr for right shifts. This is for the benefit
2459 of the MIPS port, which requires SImode values to be sign-extended
2460 when stored in 64-bit registers. */
2461 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
2462 value, gen_int_shift_amount (value_mode, shift),
2463 value, 1, OPTAB_WIDEN))
2464 gcc_unreachable ();
2465 return true;
2466}
2467
2468/* If X is a likely-spilled register value, copy it to a pseudo
2469 register and return that register. Return X otherwise. */
2470
2471static rtx
2472avoid_likely_spilled_reg (rtx x)
2473{
2474 rtx new_rtx;
2475
2476 if (REG_P (x)
2477 && HARD_REGISTER_P (x)
2478 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
2479 {
2480 /* Make sure that we generate a REG rather than a CONCAT.
2481 Moves into CONCATs can need nontrivial instructions,
2482 and the whole point of this function is to avoid
2483 using the hard register directly in such a situation. */
2484 generating_concat_p = 0;
2485 new_rtx = gen_reg_rtx (GET_MODE (x));
2486 generating_concat_p = 1;
2487 emit_move_insn (new_rtx, x);
2488 return new_rtx;
2489 }
2490 return x;
2491}
2492
2493/* Helper function for expand_call.
2494 Return false is EXP is not implementable as a sibling call. */
2495
2496static bool
2497can_implement_as_sibling_call_p (tree exp,
2498 rtx structure_value_addr,
2499 tree funtype,
2500 tree fndecl,
2501 int flags,
2502 tree addr,
2503 const args_size &args_size)
2504{
2505 if (!targetm.have_sibcall_epilogue ())
2506 {
2507 maybe_complain_about_tail_call
2508 (call_expr: exp,
2509 reason: "machine description does not have"
2510 " a sibcall_epilogue instruction pattern");
2511 return false;
2512 }
2513
2514 /* Doing sibling call optimization needs some work, since
2515 structure_value_addr can be allocated on the stack.
2516 It does not seem worth the effort since few optimizable
2517 sibling calls will return a structure. */
2518 if (structure_value_addr != NULL_RTX)
2519 {
2520 maybe_complain_about_tail_call (call_expr: exp, reason: "callee returns a structure");
2521 return false;
2522 }
2523
2524 /* Check whether the target is able to optimize the call
2525 into a sibcall. */
2526 if (!targetm.function_ok_for_sibcall (fndecl, exp))
2527 {
2528 maybe_complain_about_tail_call (call_expr: exp,
2529 reason: "target is not able to optimize the"
2530 " call into a sibling call");
2531 return false;
2532 }
2533
2534 /* Functions that do not return exactly once may not be sibcall
2535 optimized. */
2536 if (flags & ECF_RETURNS_TWICE)
2537 {
2538 maybe_complain_about_tail_call (call_expr: exp, reason: "callee returns twice");
2539 return false;
2540 }
2541 if (flags & ECF_NORETURN)
2542 {
2543 maybe_complain_about_tail_call (call_expr: exp, reason: "callee does not return");
2544 return false;
2545 }
2546
2547 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
2548 {
2549 maybe_complain_about_tail_call (call_expr: exp, reason: "volatile function type");
2550 return false;
2551 }
2552
2553 /* __sanitizer_cov_trace_pc is supposed to inspect its return address
2554 to identify the caller, and therefore should not be tailcalled. */
2555 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2556 && DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_SANITIZER_COV_TRACE_PC)
2557 {
2558 /* No need for maybe_complain_about_tail_call here:
2559 the call is synthesized by the compiler. */
2560 return false;
2561 }
2562
2563 /* If the called function is nested in the current one, it might access
2564 some of the caller's arguments, but could clobber them beforehand if
2565 the argument areas are shared. */
2566 if (fndecl && decl_function_context (fndecl) == current_function_decl)
2567 {
2568 maybe_complain_about_tail_call (call_expr: exp, reason: "nested function");
2569 return false;
2570 }
2571
2572 /* If this function requires more stack slots than the current
2573 function, we cannot change it into a sibling call.
2574 crtl->args.pretend_args_size is not part of the
2575 stack allocated by our caller. */
2576 if (maybe_gt (args_size.constant,
2577 crtl->args.size - crtl->args.pretend_args_size))
2578 {
2579 maybe_complain_about_tail_call (call_expr: exp,
2580 reason: "callee required more stack slots"
2581 " than the caller");
2582 return false;
2583 }
2584
2585 /* If the callee pops its own arguments, then it must pop exactly
2586 the same number of arguments as the current function. */
2587 if (maybe_ne (a: targetm.calls.return_pops_args (fndecl, funtype,
2588 args_size.constant),
2589 b: targetm.calls.return_pops_args (current_function_decl,
2590 TREE_TYPE
2591 (current_function_decl),
2592 crtl->args.size)))
2593 {
2594 maybe_complain_about_tail_call (call_expr: exp,
2595 reason: "inconsistent number of"
2596 " popped arguments");
2597 return false;
2598 }
2599
2600 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
2601 {
2602 maybe_complain_about_tail_call (call_expr: exp, reason: "frontend does not support"
2603 " sibling call");
2604 return false;
2605 }
2606
2607 /* All checks passed. */
2608 return true;
2609}
2610
2611/* Update stack alignment when the parameter is passed in the stack
2612 since the outgoing parameter requires extra alignment on the calling
2613 function side. */
2614
2615static void
2616update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
2617{
2618 if (crtl->stack_alignment_needed < locate->boundary)
2619 crtl->stack_alignment_needed = locate->boundary;
2620 if (crtl->preferred_stack_boundary < locate->boundary)
2621 crtl->preferred_stack_boundary = locate->boundary;
2622}
2623
2624/* Generate all the code for a CALL_EXPR exp
2625 and return an rtx for its value.
2626 Store the value in TARGET (specified as an rtx) if convenient.
2627 If the value is stored in TARGET then TARGET is returned.
2628 If IGNORE is nonzero, then we ignore the value of the function call. */
2629
2630rtx
2631expand_call (tree exp, rtx target, int ignore)
2632{
2633 /* Nonzero if we are currently expanding a call. */
2634 static int currently_expanding_call = 0;
2635
2636 /* RTX for the function to be called. */
2637 rtx funexp;
2638 /* Sequence of insns to perform a normal "call". */
2639 rtx_insn *normal_call_insns = NULL;
2640 /* Sequence of insns to perform a tail "call". */
2641 rtx_insn *tail_call_insns = NULL;
2642 /* Data type of the function. */
2643 tree funtype;
2644 tree type_arg_types;
2645 tree rettype;
2646 /* Declaration of the function being called,
2647 or 0 if the function is computed (not known by name). */
2648 tree fndecl = 0;
2649 /* The type of the function being called. */
2650 tree fntype;
2651 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
2652 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
2653 int pass;
2654
2655 /* Register in which non-BLKmode value will be returned,
2656 or 0 if no value or if value is BLKmode. */
2657 rtx valreg;
2658 /* Address where we should return a BLKmode value;
2659 0 if value not BLKmode. */
2660 rtx structure_value_addr = 0;
2661 /* Nonzero if that address is being passed by treating it as
2662 an extra, implicit first parameter. Otherwise,
2663 it is passed by being copied directly into struct_value_rtx. */
2664 int structure_value_addr_parm = 0;
2665 /* Holds the value of implicit argument for the struct value. */
2666 tree structure_value_addr_value = NULL_TREE;
2667 /* Size of aggregate value wanted, or zero if none wanted
2668 or if we are using the non-reentrant PCC calling convention
2669 or expecting the value in registers. */
2670 poly_int64 struct_value_size = 0;
2671 /* True if called function returns an aggregate in memory PCC style,
2672 by returning the address of where to find it. */
2673 bool pcc_struct_value = false;
2674 rtx struct_value = 0;
2675
2676 /* Number of actual parameters in this call, including struct value addr. */
2677 int num_actuals;
2678 /* Number of named args. Args after this are anonymous ones
2679 and they must all go on the stack. */
2680 int n_named_args;
2681 /* Number of complex actual arguments that need to be split. */
2682 int num_complex_actuals = 0;
2683
2684 /* Vector of information about each argument.
2685 Arguments are numbered in the order they will be pushed,
2686 not the order they are written. */
2687 struct arg_data *args;
2688
2689 /* Total size in bytes of all the stack-parms scanned so far. */
2690 struct args_size args_size;
2691 struct args_size adjusted_args_size;
2692 /* Size of arguments before any adjustments (such as rounding). */
2693 poly_int64 unadjusted_args_size;
2694 /* Data on reg parms scanned so far. */
2695 CUMULATIVE_ARGS args_so_far_v;
2696 cumulative_args_t args_so_far;
2697 /* Nonzero if a reg parm has been scanned. */
2698 int reg_parm_seen;
2699
2700 /* True if we must avoid push-insns in the args for this call.
2701 If stack space is allocated for register parameters, but not by the
2702 caller, then it is preallocated in the fixed part of the stack frame.
2703 So the entire argument block must then be preallocated (i.e., we
2704 ignore PUSH_ROUNDING in that case). */
2705 bool must_preallocate = !targetm.calls.push_argument (0);
2706
2707 /* Size of the stack reserved for parameter registers. */
2708 int reg_parm_stack_space = 0;
2709
2710 /* Address of space preallocated for stack parms
2711 (on machines that lack push insns), or 0 if space not preallocated. */
2712 rtx argblock = 0;
2713
2714 /* Mask of ECF_ and ERF_ flags. */
2715 int flags = 0;
2716 int return_flags = 0;
2717#ifdef REG_PARM_STACK_SPACE
2718 /* Define the boundary of the register parm stack space that needs to be
2719 saved, if any. */
2720 int low_to_save, high_to_save;
2721 rtx save_area = 0; /* Place that it is saved */
2722#endif
2723
2724 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2725 char *initial_stack_usage_map = stack_usage_map;
2726 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
2727 char *stack_usage_map_buf = NULL;
2728
2729 poly_int64 old_stack_allocated;
2730
2731 /* State variables to track stack modifications. */
2732 rtx old_stack_level = 0;
2733 int old_stack_arg_under_construction = 0;
2734 poly_int64 old_pending_adj = 0;
2735 int old_inhibit_defer_pop = inhibit_defer_pop;
2736
2737 /* Some stack pointer alterations we make are performed via
2738 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2739 which we then also need to save/restore along the way. */
2740 poly_int64 old_stack_pointer_delta = 0;
2741
2742 rtx call_fusage;
2743 tree addr = CALL_EXPR_FN (exp);
2744 int i;
2745 /* The alignment of the stack, in bits. */
2746 unsigned HOST_WIDE_INT preferred_stack_boundary;
2747 /* The alignment of the stack, in bytes. */
2748 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2749 /* The static chain value to use for this call. */
2750 rtx static_chain_value;
2751 /* See if this is "nothrow" function call. */
2752 if (TREE_NOTHROW (exp))
2753 flags |= ECF_NOTHROW;
2754
2755 /* See if we can find a DECL-node for the actual function, and get the
2756 function attributes (flags) from the function decl or type node. */
2757 fndecl = get_callee_fndecl (exp);
2758 if (fndecl)
2759 {
2760 fntype = TREE_TYPE (fndecl);
2761 flags |= flags_from_decl_or_type (exp: fndecl);
2762 return_flags |= decl_return_flags (fndecl);
2763 }
2764 else
2765 {
2766 fntype = TREE_TYPE (TREE_TYPE (addr));
2767 flags |= flags_from_decl_or_type (exp: fntype);
2768 if (CALL_EXPR_BY_DESCRIPTOR (exp))
2769 flags |= ECF_BY_DESCRIPTOR;
2770 }
2771 rettype = TREE_TYPE (exp);
2772
2773 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2774
2775 /* Warn if this value is an aggregate type,
2776 regardless of which calling convention we are using for it. */
2777 if (AGGREGATE_TYPE_P (rettype))
2778 warning (OPT_Waggregate_return, "function call has aggregate value");
2779
2780 /* If the result of a non looping pure or const function call is
2781 ignored (or void), and none of its arguments are volatile, we can
2782 avoid expanding the call and just evaluate the arguments for
2783 side-effects. */
2784 if ((flags & (ECF_CONST | ECF_PURE))
2785 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2786 && (flags & ECF_NOTHROW)
2787 && (ignore || target == const0_rtx
2788 || TYPE_MODE (rettype) == VOIDmode))
2789 {
2790 bool volatilep = false;
2791 tree arg;
2792 call_expr_arg_iterator iter;
2793
2794 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2795 if (TREE_THIS_VOLATILE (arg))
2796 {
2797 volatilep = true;
2798 break;
2799 }
2800
2801 if (! volatilep)
2802 {
2803 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2804 expand_expr (exp: arg, const0_rtx, VOIDmode, modifier: EXPAND_NORMAL);
2805 return const0_rtx;
2806 }
2807 }
2808
2809#ifdef REG_PARM_STACK_SPACE
2810 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2811#endif
2812
2813 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2814 && reg_parm_stack_space > 0 && targetm.calls.push_argument (0))
2815 must_preallocate = true;
2816
2817 /* Set up a place to return a structure. */
2818
2819 /* Cater to broken compilers. */
2820 if (aggregate_value_p (exp, fntype))
2821 {
2822 /* This call returns a big structure. */
2823 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2824
2825#ifdef PCC_STATIC_STRUCT_RETURN
2826 {
2827 pcc_struct_value = true;
2828 }
2829#else /* not PCC_STATIC_STRUCT_RETURN */
2830 {
2831 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), value: &struct_value_size))
2832 struct_value_size = -1;
2833
2834 /* Even if it is semantically safe to use the target as the return
2835 slot, it may be not sufficiently aligned for the return type. */
2836 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
2837 && target
2838 && MEM_P (target)
2839 /* If rettype is addressable, we may not create a temporary.
2840 If target is properly aligned at runtime and the compiler
2841 just doesn't know about it, it will work fine, otherwise it
2842 will be UB. */
2843 && (TREE_ADDRESSABLE (rettype)
2844 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
2845 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
2846 MEM_ALIGN (target)))))
2847 structure_value_addr = XEXP (target, 0);
2848 else
2849 {
2850 /* For variable-sized objects, we must be called with a target
2851 specified. If we were to allocate space on the stack here,
2852 we would have no way of knowing when to free it. */
2853 rtx d = assign_temp (rettype, 1, 1);
2854 structure_value_addr = XEXP (d, 0);
2855 target = 0;
2856 }
2857 }
2858#endif /* not PCC_STATIC_STRUCT_RETURN */
2859 }
2860
2861 /* Figure out the amount to which the stack should be aligned. */
2862 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2863 if (fndecl)
2864 {
2865 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
2866 /* Without automatic stack alignment, we can't increase preferred
2867 stack boundary. With automatic stack alignment, it is
2868 unnecessary since unless we can guarantee that all callers will
2869 align the outgoing stack properly, callee has to align its
2870 stack anyway. */
2871 if (i
2872 && i->preferred_incoming_stack_boundary
2873 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2874 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2875 }
2876
2877 /* Operand 0 is a pointer-to-function; get the type of the function. */
2878 funtype = TREE_TYPE (addr);
2879 gcc_assert (POINTER_TYPE_P (funtype));
2880 funtype = TREE_TYPE (funtype);
2881
2882 /* Count whether there are actual complex arguments that need to be split
2883 into their real and imaginary parts. Munge the type_arg_types
2884 appropriately here as well. */
2885 if (targetm.calls.split_complex_arg)
2886 {
2887 call_expr_arg_iterator iter;
2888 tree arg;
2889 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2890 {
2891 tree type = TREE_TYPE (arg);
2892 if (type && TREE_CODE (type) == COMPLEX_TYPE
2893 && targetm.calls.split_complex_arg (type))
2894 num_complex_actuals++;
2895 }
2896 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2897 }
2898 else
2899 type_arg_types = TYPE_ARG_TYPES (funtype);
2900
2901 if (flags & ECF_MAY_BE_ALLOCA)
2902 cfun->calls_alloca = 1;
2903
2904 /* If struct_value_rtx is 0, it means pass the address
2905 as if it were an extra parameter. Put the argument expression
2906 in structure_value_addr_value. */
2907 if (structure_value_addr && struct_value == 0)
2908 {
2909 /* If structure_value_addr is a REG other than
2910 virtual_outgoing_args_rtx, we can use always use it. If it
2911 is not a REG, we must always copy it into a register.
2912 If it is virtual_outgoing_args_rtx, we must copy it to another
2913 register in some cases. */
2914 rtx temp = (!REG_P (structure_value_addr)
2915 || (ACCUMULATE_OUTGOING_ARGS
2916 && stack_arg_under_construction
2917 && structure_value_addr == virtual_outgoing_args_rtx)
2918 ? copy_addr_to_reg (convert_memory_address
2919 (Pmode, structure_value_addr))
2920 : structure_value_addr);
2921
2922 structure_value_addr_value =
2923 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2924 structure_value_addr_parm = 1;
2925 }
2926
2927 /* Count the arguments and set NUM_ACTUALS. */
2928 num_actuals
2929 = call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2930
2931 /* Compute number of named args.
2932 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2933
2934 if (type_arg_types != 0)
2935 n_named_args
2936 = (list_length (type_arg_types)
2937 /* Count the struct value address, if it is passed as a parm. */
2938 + structure_value_addr_parm);
2939 else if (TYPE_NO_NAMED_ARGS_STDARG_P (funtype))
2940 n_named_args = 0;
2941 else
2942 /* If we know nothing, treat all args as named. */
2943 n_named_args = num_actuals;
2944
2945 /* Start updating where the next arg would go.
2946
2947 On some machines (such as the PA) indirect calls have a different
2948 calling convention than normal calls. The fourth argument in
2949 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2950 or not. */
2951 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2952 args_so_far = pack_cumulative_args (arg: &args_so_far_v);
2953
2954 /* Now possibly adjust the number of named args.
2955 Normally, don't include the last named arg if anonymous args follow.
2956 We do include the last named arg if
2957 targetm.calls.strict_argument_naming() returns nonzero.
2958 (If no anonymous args follow, the result of list_length is actually
2959 one too large. This is harmless.)
2960
2961 If targetm.calls.pretend_outgoing_varargs_named() returns
2962 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2963 this machine will be able to place unnamed args that were passed
2964 in registers into the stack. So treat all args as named. This
2965 allows the insns emitting for a specific argument list to be
2966 independent of the function declaration.
2967
2968 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2969 we do not have any reliable way to pass unnamed args in
2970 registers, so we must force them into memory. */
2971
2972 if (type_arg_types != 0
2973 && targetm.calls.strict_argument_naming (args_so_far))
2974 ;
2975 else if (type_arg_types != 0
2976 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
2977 /* Don't include the last named arg. */
2978 --n_named_args;
2979 else if (TYPE_NO_NAMED_ARGS_STDARG_P (funtype))
2980 n_named_args = 0;
2981 else
2982 /* Treat all args as named. */
2983 n_named_args = num_actuals;
2984
2985 /* Make a vector to hold all the information about each arg. */
2986 args = XCNEWVEC (struct arg_data, num_actuals);
2987
2988 /* Build up entries in the ARGS array, compute the size of the
2989 arguments into ARGS_SIZE, etc. */
2990 initialize_argument_information (num_actuals, args, args_size: &args_size,
2991 n_named_args, exp,
2992 struct_value_addr_value: structure_value_addr_value, fndecl, fntype,
2993 args_so_far, reg_parm_stack_space,
2994 old_stack_level: &old_stack_level, old_pending_adj: &old_pending_adj,
2995 must_preallocate: &must_preallocate, ecf_flags: &flags,
2996 may_tailcall: &try_tail_call, CALL_FROM_THUNK_P (exp));
2997
2998 if (args_size.var)
2999 must_preallocate = true;
3000
3001 /* Now make final decision about preallocating stack space. */
3002 must_preallocate = finalize_must_preallocate (must_preallocate,
3003 num_actuals, args,
3004 args_size: &args_size);
3005
3006 /* If the structure value address will reference the stack pointer, we
3007 must stabilize it. We don't need to do this if we know that we are
3008 not going to adjust the stack pointer in processing this call. */
3009
3010 if (structure_value_addr
3011 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3012 || reg_mentioned_p (virtual_outgoing_args_rtx,
3013 structure_value_addr))
3014 && (args_size.var
3015 || (!ACCUMULATE_OUTGOING_ARGS
3016 && maybe_ne (a: args_size.constant, b: 0))))
3017 structure_value_addr = copy_to_reg (structure_value_addr);
3018
3019 /* Tail calls can make things harder to debug, and we've traditionally
3020 pushed these optimizations into -O2. Don't try if we're already
3021 expanding a call, as that means we're an argument. Don't try if
3022 there's cleanups, as we know there's code to follow the call. */
3023 if (currently_expanding_call++ != 0
3024 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
3025 || args_size.var
3026 || dbg_cnt (index: tail_call) == false)
3027 try_tail_call = 0;
3028
3029 /* Workaround buggy C/C++ wrappers around Fortran routines with
3030 character(len=constant) arguments if the hidden string length arguments
3031 are passed on the stack; if the callers forget to pass those arguments,
3032 attempting to tail call in such routines leads to stack corruption.
3033 Avoid tail calls in functions where at least one such hidden string
3034 length argument is passed (partially or fully) on the stack in the
3035 caller and the callee needs to pass any arguments on the stack.
3036 See PR90329. */
3037 if (try_tail_call && maybe_ne (a: args_size.constant, b: 0))
3038 for (tree arg = DECL_ARGUMENTS (current_function_decl);
3039 arg; arg = DECL_CHAIN (arg))
3040 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
3041 {
3042 subrtx_iterator::array_type array;
3043 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
3044 if (MEM_P (*iter))
3045 {
3046 try_tail_call = 0;
3047 break;
3048 }
3049 }
3050
3051 /* If the user has marked the function as requiring tail-call
3052 optimization, attempt it. */
3053 if (must_tail_call)
3054 try_tail_call = 1;
3055
3056 /* Rest of purposes for tail call optimizations to fail. */
3057 if (try_tail_call)
3058 try_tail_call = can_implement_as_sibling_call_p (exp,
3059 structure_value_addr,
3060 funtype,
3061 fndecl,
3062 flags, addr, args_size);
3063
3064 /* Check if caller and callee disagree in promotion of function
3065 return value. */
3066 if (try_tail_call)
3067 {
3068 machine_mode caller_mode, caller_promoted_mode;
3069 machine_mode callee_mode, callee_promoted_mode;
3070 int caller_unsignedp, callee_unsignedp;
3071 tree caller_res = DECL_RESULT (current_function_decl);
3072
3073 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3074 caller_mode = DECL_MODE (caller_res);
3075 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3076 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3077 caller_promoted_mode
3078 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3079 &caller_unsignedp,
3080 TREE_TYPE (current_function_decl), 1);
3081 callee_promoted_mode
3082 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3083 &callee_unsignedp,
3084 funtype, 1);
3085 if (caller_mode != VOIDmode
3086 && (caller_promoted_mode != callee_promoted_mode
3087 || ((caller_mode != caller_promoted_mode
3088 || callee_mode != callee_promoted_mode)
3089 && (caller_unsignedp != callee_unsignedp
3090 || partial_subreg_p (outermode: caller_mode, innermode: callee_mode)))))
3091 {
3092 try_tail_call = 0;
3093 maybe_complain_about_tail_call (call_expr: exp,
3094 reason: "caller and callee disagree in"
3095 " promotion of function"
3096 " return value");
3097 }
3098 }
3099
3100 /* Ensure current function's preferred stack boundary is at least
3101 what we need. Stack alignment may also increase preferred stack
3102 boundary. */
3103 for (i = 0; i < num_actuals; i++)
3104 if (reg_parm_stack_space > 0
3105 || args[i].reg == 0
3106 || args[i].partial != 0
3107 || args[i].pass_on_stack)
3108 update_stack_alignment_for_call (locate: &args[i].locate);
3109 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
3110 crtl->preferred_stack_boundary = preferred_stack_boundary;
3111 else
3112 preferred_stack_boundary = crtl->preferred_stack_boundary;
3113
3114 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
3115
3116 if (flag_callgraph_info)
3117 record_final_call (callee: fndecl, EXPR_LOCATION (exp));
3118
3119 /* We want to make two insn chains; one for a sibling call, the other
3120 for a normal call. We will select one of the two chains after
3121 initial RTL generation is complete. */
3122 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
3123 {
3124 bool sibcall_failure = false;
3125 bool normal_failure = false;
3126 /* We want to emit any pending stack adjustments before the tail
3127 recursion "call". That way we know any adjustment after the tail
3128 recursion call can be ignored if we indeed use the tail
3129 call expansion. */
3130 saved_pending_stack_adjust save;
3131 rtx_insn *insns, *before_call, *after_args;
3132 rtx next_arg_reg;
3133
3134 if (pass == 0)
3135 {
3136 /* State variables we need to save and restore between
3137 iterations. */
3138 save_pending_stack_adjust (&save);
3139 }
3140 if (pass)
3141 flags &= ~ECF_SIBCALL;
3142 else
3143 flags |= ECF_SIBCALL;
3144
3145 /* Other state variables that we must reinitialize each time
3146 through the loop (that are not initialized by the loop itself). */
3147 argblock = 0;
3148 call_fusage = 0;
3149
3150 /* Start a new sequence for the normal call case.
3151
3152 From this point on, if the sibling call fails, we want to set
3153 sibcall_failure instead of continuing the loop. */
3154 start_sequence ();
3155
3156 /* Don't let pending stack adjusts add up to too much.
3157 Also, do all pending adjustments now if there is any chance
3158 this might be a call to alloca or if we are expanding a sibling
3159 call sequence.
3160 Also do the adjustments before a throwing call, otherwise
3161 exception handling can fail; PR 19225. */
3162 if (maybe_ge (pending_stack_adjust, 32)
3163 || (maybe_ne (pending_stack_adjust, b: 0)
3164 && (flags & ECF_MAY_BE_ALLOCA))
3165 || (maybe_ne (pending_stack_adjust, b: 0)
3166 && flag_exceptions && !(flags & ECF_NOTHROW))
3167 || pass == 0)
3168 do_pending_stack_adjust ();
3169
3170 /* Precompute any arguments as needed. */
3171 if (pass)
3172 precompute_arguments (num_actuals, args);
3173
3174 /* Now we are about to start emitting insns that can be deleted
3175 if a libcall is deleted. */
3176 if (pass && (flags & ECF_MALLOC))
3177 start_sequence ();
3178
3179 /* Check the canary value for sibcall or function which doesn't
3180 return and could throw. */
3181 if ((pass == 0
3182 || ((flags & ECF_NORETURN) != 0 && tree_could_throw_p (exp)))
3183 && crtl->stack_protect_guard
3184 && targetm.stack_protect_runtime_enabled_p ())
3185 stack_protect_epilogue ();
3186
3187 adjusted_args_size = args_size;
3188 /* Compute the actual size of the argument block required. The variable
3189 and constant sizes must be combined, the size may have to be rounded,
3190 and there may be a minimum required size. When generating a sibcall
3191 pattern, do not round up, since we'll be re-using whatever space our
3192 caller provided. */
3193 unadjusted_args_size
3194 = compute_argument_block_size (reg_parm_stack_space,
3195 args_size: &adjusted_args_size,
3196 fndecl, fntype,
3197 preferred_stack_boundary: (pass == 0 ? 0
3198 : preferred_stack_boundary));
3199
3200 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3201
3202 /* The argument block when performing a sibling call is the
3203 incoming argument block. */
3204 if (pass == 0)
3205 {
3206 argblock = crtl->args.internal_arg_pointer;
3207 if (STACK_GROWS_DOWNWARD)
3208 argblock
3209 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3210 else
3211 argblock
3212 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3213
3214 HOST_WIDE_INT map_size = constant_lower_bound (a: args_size.constant);
3215 stored_args_map = sbitmap_alloc (map_size);
3216 bitmap_clear (stored_args_map);
3217 stored_args_watermark = HOST_WIDE_INT_M1U;
3218 }
3219
3220 /* If we have no actual push instructions, or shouldn't use them,
3221 make space for all args right now. */
3222 else if (adjusted_args_size.var != 0)
3223 {
3224 if (old_stack_level == 0)
3225 {
3226 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3227 old_stack_pointer_delta = stack_pointer_delta;
3228 old_pending_adj = pending_stack_adjust;
3229 pending_stack_adjust = 0;
3230 /* stack_arg_under_construction says whether a stack arg is
3231 being constructed at the old stack level. Pushing the stack
3232 gets a clean outgoing argument block. */
3233 old_stack_arg_under_construction = stack_arg_under_construction;
3234 stack_arg_under_construction = 0;
3235 }
3236 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
3237 if (flag_stack_usage_info)
3238 current_function_has_unbounded_dynamic_stack_size = 1;
3239 }
3240 else
3241 {
3242 /* Note that we must go through the motions of allocating an argument
3243 block even if the size is zero because we may be storing args
3244 in the area reserved for register arguments, which may be part of
3245 the stack frame. */
3246
3247 poly_int64 needed = adjusted_args_size.constant;
3248
3249 /* Store the maximum argument space used. It will be pushed by
3250 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3251 checking). */
3252
3253 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3254 b: needed);
3255
3256 if (must_preallocate)
3257 {
3258 if (ACCUMULATE_OUTGOING_ARGS)
3259 {
3260 /* Since the stack pointer will never be pushed, it is
3261 possible for the evaluation of a parm to clobber
3262 something we have already written to the stack.
3263 Since most function calls on RISC machines do not use
3264 the stack, this is uncommon, but must work correctly.
3265
3266 Therefore, we save any area of the stack that was already
3267 written and that we are using. Here we set up to do this
3268 by making a new stack usage map from the old one. The
3269 actual save will be done by store_one_arg.
3270
3271 Another approach might be to try to reorder the argument
3272 evaluations to avoid this conflicting stack usage. */
3273
3274 /* Since we will be writing into the entire argument area,
3275 the map must be allocated for its entire size, not just
3276 the part that is the responsibility of the caller. */
3277 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3278 needed += reg_parm_stack_space;
3279
3280 poly_int64 limit = needed;
3281 if (ARGS_GROW_DOWNWARD)
3282 limit += 1;
3283
3284 /* For polynomial sizes, this is the maximum possible
3285 size needed for arguments with a constant size
3286 and offset. */
3287 HOST_WIDE_INT const_limit = constant_lower_bound (a: limit);
3288 highest_outgoing_arg_in_use
3289 = MAX (initial_highest_arg_in_use, const_limit);
3290
3291 free (ptr: stack_usage_map_buf);
3292 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3293 stack_usage_map = stack_usage_map_buf;
3294
3295 if (initial_highest_arg_in_use)
3296 memcpy (dest: stack_usage_map, src: initial_stack_usage_map,
3297 n: initial_highest_arg_in_use);
3298
3299 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3300 memset (s: &stack_usage_map[initial_highest_arg_in_use], c: 0,
3301 n: (highest_outgoing_arg_in_use
3302 - initial_highest_arg_in_use));
3303 needed = 0;
3304
3305 /* The address of the outgoing argument list must not be
3306 copied to a register here, because argblock would be left
3307 pointing to the wrong place after the call to
3308 allocate_dynamic_stack_space below. */
3309
3310 argblock = virtual_outgoing_args_rtx;
3311 }
3312 else
3313 {
3314 /* Try to reuse some or all of the pending_stack_adjust
3315 to get this space. */
3316 if (inhibit_defer_pop == 0
3317 && (combine_pending_stack_adjustment_and_call
3318 (adjustment_out: &needed,
3319 unadjusted_args_size,
3320 args_size: &adjusted_args_size,
3321 preferred_unit_stack_boundary)))
3322 {
3323 /* combine_pending_stack_adjustment_and_call computes
3324 an adjustment before the arguments are allocated.
3325 Account for them and see whether or not the stack
3326 needs to go up or down. */
3327 needed = unadjusted_args_size - needed;
3328
3329 /* Checked by
3330 combine_pending_stack_adjustment_and_call. */
3331 gcc_checking_assert (ordered_p (needed, 0));
3332 if (maybe_lt (a: needed, b: 0))
3333 {
3334 /* We're releasing stack space. */
3335 /* ??? We can avoid any adjustment at all if we're
3336 already aligned. FIXME. */
3337 pending_stack_adjust = -needed;
3338 do_pending_stack_adjust ();
3339 needed = 0;
3340 }
3341 else
3342 /* We need to allocate space. We'll do that in
3343 push_block below. */
3344 pending_stack_adjust = 0;
3345 }
3346
3347 /* Special case this because overhead of `push_block' in
3348 this case is non-trivial. */
3349 if (known_eq (needed, 0))
3350 argblock = virtual_outgoing_args_rtx;
3351 else
3352 {
3353 rtx needed_rtx = gen_int_mode (needed, Pmode);
3354 argblock = push_block (needed_rtx, 0, 0);
3355 if (ARGS_GROW_DOWNWARD)
3356 argblock = plus_constant (Pmode, argblock, needed);
3357 }
3358
3359 /* We only really need to call `copy_to_reg' in the case
3360 where push insns are going to be used to pass ARGBLOCK
3361 to a function call in ARGS. In that case, the stack
3362 pointer changes value from the allocation point to the
3363 call point, and hence the value of
3364 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3365 as well always do it. */
3366 argblock = copy_to_reg (argblock);
3367 }
3368 }
3369 }
3370
3371 if (ACCUMULATE_OUTGOING_ARGS)
3372 {
3373 /* The save/restore code in store_one_arg handles all
3374 cases except one: a constructor call (including a C
3375 function returning a BLKmode struct) to initialize
3376 an argument. */
3377 if (stack_arg_under_construction)
3378 {
3379 rtx push_size
3380 = (gen_int_mode
3381 (adjusted_args_size.constant
3382 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3383 : TREE_TYPE (fndecl))
3384 ? 0 : reg_parm_stack_space), Pmode));
3385 if (old_stack_level == 0)
3386 {
3387 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3388 old_stack_pointer_delta = stack_pointer_delta;
3389 old_pending_adj = pending_stack_adjust;
3390 pending_stack_adjust = 0;
3391 /* stack_arg_under_construction says whether a stack
3392 arg is being constructed at the old stack level.
3393 Pushing the stack gets a clean outgoing argument
3394 block. */
3395 old_stack_arg_under_construction
3396 = stack_arg_under_construction;
3397 stack_arg_under_construction = 0;
3398 /* Make a new map for the new argument list. */
3399 free (ptr: stack_usage_map_buf);
3400 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
3401 stack_usage_map = stack_usage_map_buf;
3402 highest_outgoing_arg_in_use = 0;
3403 stack_usage_watermark = HOST_WIDE_INT_M1U;
3404 }
3405 /* We can pass TRUE as the 4th argument because we just
3406 saved the stack pointer and will restore it right after
3407 the call. */
3408 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
3409 -1, true);
3410 }
3411
3412 /* If argument evaluation might modify the stack pointer,
3413 copy the address of the argument list to a register. */
3414 for (i = 0; i < num_actuals; i++)
3415 if (args[i].pass_on_stack)
3416 {
3417 argblock = copy_addr_to_reg (argblock);
3418 break;
3419 }
3420 }
3421
3422 compute_argument_addresses (args, argblock, num_actuals);
3423
3424 /* Stack is properly aligned, pops can't safely be deferred during
3425 the evaluation of the arguments. */
3426 NO_DEFER_POP;
3427
3428 /* Precompute all register parameters. It isn't safe to compute
3429 anything once we have started filling any specific hard regs.
3430 TLS symbols sometimes need a call to resolve. Precompute
3431 register parameters before any stack pointer manipulation
3432 to avoid unaligned stack in the called function. */
3433 precompute_register_parameters (num_actuals, args, reg_parm_seen: &reg_parm_seen);
3434
3435 OK_DEFER_POP;
3436
3437 /* Perform stack alignment before the first push (the last arg). */
3438 if (argblock == 0
3439 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
3440 && maybe_ne (a: adjusted_args_size.constant, b: unadjusted_args_size))
3441 {
3442 /* When the stack adjustment is pending, we get better code
3443 by combining the adjustments. */
3444 if (maybe_ne (pending_stack_adjust, b: 0)
3445 && ! inhibit_defer_pop
3446 && (combine_pending_stack_adjustment_and_call
3447 (adjustment_out: &pending_stack_adjust,
3448 unadjusted_args_size,
3449 args_size: &adjusted_args_size,
3450 preferred_unit_stack_boundary)))
3451 do_pending_stack_adjust ();
3452 else if (argblock == 0)
3453 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
3454 - unadjusted_args_size,
3455 Pmode));
3456 }
3457 /* Now that the stack is properly aligned, pops can't safely
3458 be deferred during the evaluation of the arguments. */
3459 NO_DEFER_POP;
3460
3461 /* Record the maximum pushed stack space size. We need to delay
3462 doing it this far to take into account the optimization done
3463 by combine_pending_stack_adjustment_and_call. */
3464 if (flag_stack_usage_info
3465 && !ACCUMULATE_OUTGOING_ARGS
3466 && pass
3467 && adjusted_args_size.var == 0)
3468 {
3469 poly_int64 pushed = (adjusted_args_size.constant
3470 + pending_stack_adjust);
3471 current_function_pushed_stack_size
3472 = upper_bound (current_function_pushed_stack_size, b: pushed);
3473 }
3474
3475 funexp = rtx_for_function_call (fndecl, addr);
3476
3477 if (CALL_EXPR_STATIC_CHAIN (exp))
3478 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
3479 else
3480 static_chain_value = 0;
3481
3482#ifdef REG_PARM_STACK_SPACE
3483 /* Save the fixed argument area if it's part of the caller's frame and
3484 is clobbered by argument setup for this call. */
3485 if (ACCUMULATE_OUTGOING_ARGS && pass)
3486 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3487 low_to_save: &low_to_save, high_to_save: &high_to_save);
3488#endif
3489
3490 /* Now store (and compute if necessary) all non-register parms.
3491 These come before register parms, since they can require block-moves,
3492 which could clobber the registers used for register parms.
3493 Parms which have partial registers are not stored here,
3494 but we do preallocate space here if they want that. */
3495
3496 for (i = 0; i < num_actuals; i++)
3497 {
3498 if (args[i].reg == 0 || args[i].pass_on_stack)
3499 {
3500 rtx_insn *before_arg = get_last_insn ();
3501
3502 /* We don't allow passing huge (> 2^30 B) arguments
3503 by value. It would cause an overflow later on. */
3504 if (constant_lower_bound (a: adjusted_args_size.constant)
3505 >= (1 << (HOST_BITS_PER_INT - 2)))
3506 {
3507 sorry ("passing too large argument on stack");
3508 /* Don't worry about stack clean-up. */
3509 if (pass == 0)
3510 sibcall_failure = true;
3511 else
3512 normal_failure = true;
3513 continue;
3514 }
3515
3516 if (store_one_arg (&args[i], argblock, flags,
3517 adjusted_args_size.var != 0,
3518 reg_parm_stack_space)
3519 || (pass == 0
3520 && check_sibcall_argument_overlap (insn: before_arg,
3521 arg: &args[i], mark_stored_args_map: true)))
3522 sibcall_failure = true;
3523 }
3524
3525 if (args[i].stack)
3526 call_fusage
3527 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3528 gen_rtx_USE (VOIDmode, args[i].stack),
3529 call_fusage);
3530 }
3531
3532 /* If we have a parm that is passed in registers but not in memory
3533 and whose alignment does not permit a direct copy into registers,
3534 make a group of pseudos that correspond to each register that we
3535 will later fill. */
3536 if (STRICT_ALIGNMENT)
3537 store_unaligned_arguments_into_pseudos (args, num_actuals);
3538
3539 /* Now store any partially-in-registers parm.
3540 This is the last place a block-move can happen. */
3541 if (reg_parm_seen)
3542 for (i = 0; i < num_actuals; i++)
3543 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3544 {
3545 rtx_insn *before_arg = get_last_insn ();
3546
3547 /* On targets with weird calling conventions (e.g. PA) it's
3548 hard to ensure that all cases of argument overlap between
3549 stack and registers work. Play it safe and bail out. */
3550 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
3551 {
3552 sibcall_failure = true;
3553 break;
3554 }
3555
3556 if (store_one_arg (&args[i], argblock, flags,
3557 adjusted_args_size.var != 0,
3558 reg_parm_stack_space)
3559 || (pass == 0
3560 && check_sibcall_argument_overlap (insn: before_arg,
3561 arg: &args[i], mark_stored_args_map: true)))
3562 sibcall_failure = true;
3563 }
3564
3565 bool any_regs = false;
3566 for (i = 0; i < num_actuals; i++)
3567 if (args[i].reg != NULL_RTX)
3568 {
3569 any_regs = true;
3570 targetm.calls.call_args (args[i].reg, funtype);
3571 }
3572 if (!any_regs)
3573 targetm.calls.call_args (pc_rtx, funtype);
3574
3575 /* Figure out the register where the value, if any, will come back. */
3576 valreg = 0;
3577 if (TYPE_MODE (rettype) != VOIDmode
3578 && ! structure_value_addr)
3579 {
3580 if (pcc_struct_value)
3581 valreg = hard_function_value (build_pointer_type (rettype),
3582 fndecl, NULL, (pass == 0));
3583 else
3584 valreg = hard_function_value (rettype, fndecl, fntype,
3585 (pass == 0));
3586
3587 /* If VALREG is a PARALLEL whose first member has a zero
3588 offset, use that. This is for targets such as m68k that
3589 return the same value in multiple places. */
3590 if (GET_CODE (valreg) == PARALLEL)
3591 {
3592 rtx elem = XVECEXP (valreg, 0, 0);
3593 rtx where = XEXP (elem, 0);
3594 rtx offset = XEXP (elem, 1);
3595 if (offset == const0_rtx
3596 && GET_MODE (where) == GET_MODE (valreg))
3597 valreg = where;
3598 }
3599 }
3600
3601 /* If register arguments require space on the stack and stack space
3602 was not preallocated, allocate stack space here for arguments
3603 passed in registers. */
3604 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3605 && !ACCUMULATE_OUTGOING_ARGS
3606 && !must_preallocate && reg_parm_stack_space > 0)
3607 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3608
3609 /* Pass the function the address in which to return a
3610 structure value. */
3611 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3612 {
3613 structure_value_addr
3614 = convert_memory_address (Pmode, structure_value_addr);
3615 emit_move_insn (struct_value,
3616 force_reg (Pmode,
3617 force_operand (structure_value_addr,
3618 NULL_RTX)));
3619
3620 if (REG_P (struct_value))
3621 use_reg (fusage: &call_fusage, reg: struct_value);
3622 }
3623
3624 after_args = get_last_insn ();
3625 funexp = prepare_call_address (fndecl_or_type: fndecl ? fndecl : fntype, funexp,
3626 static_chain_value, call_fusage: &call_fusage,
3627 reg_parm_seen, flags);
3628
3629 load_register_parameters (args, num_actuals, call_fusage: &call_fusage, flags,
3630 is_sibcall: pass == 0, sibcall_failure: &sibcall_failure);
3631
3632 /* Save a pointer to the last insn before the call, so that we can
3633 later safely search backwards to find the CALL_INSN. */
3634 before_call = get_last_insn ();
3635
3636 /* Set up next argument register. For sibling calls on machines
3637 with register windows this should be the incoming register. */
3638 if (pass == 0)
3639 next_arg_reg = targetm.calls.function_incoming_arg
3640 (args_so_far, function_arg_info::end_marker ());
3641 else
3642 next_arg_reg = targetm.calls.function_arg
3643 (args_so_far, function_arg_info::end_marker ());
3644
3645 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3646 {
3647 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3648 arg_nr = num_actuals - arg_nr - 1;
3649 if (arg_nr >= 0
3650 && arg_nr < num_actuals
3651 && args[arg_nr].reg
3652 && valreg
3653 && REG_P (valreg)
3654 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3655 call_fusage
3656 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
3657 gen_rtx_SET (valreg, args[arg_nr].reg),
3658 call_fusage);
3659 }
3660 /* All arguments and registers used for the call must be set up by
3661 now! */
3662
3663 /* Stack must be properly aligned now. */
3664 gcc_assert (!pass
3665 || multiple_p (stack_pointer_delta,
3666 preferred_unit_stack_boundary));
3667
3668 /* Generate the actual call instruction. */
3669 emit_call_1 (funexp, fntree: exp, fndecl, funtype, stack_size: unadjusted_args_size,
3670 rounded_stack_size: adjusted_args_size.constant, struct_value_size,
3671 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3672 ecf_flags: flags, args_so_far);
3673
3674 if (flag_ipa_ra)
3675 {
3676 rtx_call_insn *last;
3677 rtx datum = NULL_RTX;
3678 if (fndecl != NULL_TREE)
3679 {
3680 datum = XEXP (DECL_RTL (fndecl), 0);
3681 gcc_assert (datum != NULL_RTX
3682 && GET_CODE (datum) == SYMBOL_REF);
3683 }
3684 last = last_call_insn ();
3685 add_reg_note (last, REG_CALL_DECL, datum);
3686 }
3687
3688 /* If the call setup or the call itself overlaps with anything
3689 of the argument setup we probably clobbered our call address.
3690 In that case we can't do sibcalls. */
3691 if (pass == 0
3692 && check_sibcall_argument_overlap (insn: after_args, arg: 0, mark_stored_args_map: false))
3693 sibcall_failure = true;
3694
3695 /* If a non-BLKmode value is returned at the most significant end
3696 of a register, shift the register right by the appropriate amount
3697 and update VALREG accordingly. BLKmode values are handled by the
3698 group load/store machinery below. */
3699 if (!structure_value_addr
3700 && !pcc_struct_value
3701 && TYPE_MODE (rettype) != VOIDmode
3702 && TYPE_MODE (rettype) != BLKmode
3703 && REG_P (valreg)
3704 && targetm.calls.return_in_msb (rettype))
3705 {
3706 if (shift_return_value (TYPE_MODE (rettype), left_p: false, value: valreg))
3707 sibcall_failure = true;
3708 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
3709 }
3710
3711 if (pass && (flags & ECF_MALLOC))
3712 {
3713 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3714 rtx_insn *last, *insns;
3715
3716 /* The return value from a malloc-like function is a pointer. */
3717 if (TREE_CODE (rettype) == POINTER_TYPE)
3718 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
3719
3720 emit_move_insn (temp, valreg);
3721
3722 /* The return value from a malloc-like function cannot alias
3723 anything else. */
3724 last = get_last_insn ();
3725 add_reg_note (last, REG_NOALIAS, temp);
3726
3727 /* Write out the sequence. */
3728 insns = get_insns ();
3729 end_sequence ();
3730 emit_insn (insns);
3731 valreg = temp;
3732 }
3733
3734 /* For calls to `setjmp', etc., inform
3735 function.cc:setjmp_warnings that it should complain if
3736 nonvolatile values are live. For functions that cannot
3737 return, inform flow that control does not fall through. */
3738
3739 if ((flags & ECF_NORETURN) || pass == 0)
3740 {
3741 /* The barrier must be emitted
3742 immediately after the CALL_INSN. Some ports emit more
3743 than just a CALL_INSN above, so we must search for it here. */
3744
3745 rtx_insn *last = get_last_insn ();
3746 while (!CALL_P (last))
3747 {
3748 last = PREV_INSN (insn: last);
3749 /* There was no CALL_INSN? */
3750 gcc_assert (last != before_call);
3751 }
3752
3753 emit_barrier_after (last);
3754
3755 /* Stack adjustments after a noreturn call are dead code.
3756 However when NO_DEFER_POP is in effect, we must preserve
3757 stack_pointer_delta. */
3758 if (inhibit_defer_pop == 0)
3759 {
3760 stack_pointer_delta = old_stack_allocated;
3761 pending_stack_adjust = 0;
3762 }
3763 }
3764
3765 /* If value type not void, return an rtx for the value. */
3766
3767 if (TYPE_MODE (rettype) == VOIDmode
3768 || ignore)
3769 target = const0_rtx;
3770 else if (structure_value_addr)
3771 {
3772 if (target == 0 || !MEM_P (target))
3773 {
3774 target
3775 = gen_rtx_MEM (TYPE_MODE (rettype),
3776 memory_address (TYPE_MODE (rettype),
3777 structure_value_addr));
3778 set_mem_attributes (target, rettype, 1);
3779 }
3780 }
3781 else if (pcc_struct_value)
3782 {
3783 /* This is the special C++ case where we need to
3784 know what the true target was. We take care to
3785 never use this value more than once in one expression. */
3786 target = gen_rtx_MEM (TYPE_MODE (rettype),
3787 copy_to_reg (valreg));
3788 set_mem_attributes (target, rettype, 1);
3789 }
3790 /* Handle calls that return values in multiple non-contiguous locations.
3791 The Irix 6 ABI has examples of this. */
3792 else if (GET_CODE (valreg) == PARALLEL)
3793 {
3794 if (target == 0)
3795 target = emit_group_move_into_temps (valreg);
3796 else if (rtx_equal_p (target, valreg))
3797 ;
3798 else if (GET_CODE (target) == PARALLEL)
3799 /* Handle the result of a emit_group_move_into_temps
3800 call in the previous pass. */
3801 emit_group_move (target, valreg);
3802 else
3803 emit_group_store (target, valreg, rettype,
3804 int_size_in_bytes (rettype));
3805 }
3806 else if (target
3807 && GET_MODE (target) == TYPE_MODE (rettype)
3808 && GET_MODE (target) == GET_MODE (valreg))
3809 {
3810 bool may_overlap = false;
3811
3812 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3813 reg to a plain register. */
3814 if (!REG_P (target) || HARD_REGISTER_P (target))
3815 valreg = avoid_likely_spilled_reg (x: valreg);
3816
3817 /* If TARGET is a MEM in the argument area, and we have
3818 saved part of the argument area, then we can't store
3819 directly into TARGET as it may get overwritten when we
3820 restore the argument save area below. Don't work too
3821 hard though and simply force TARGET to a register if it
3822 is a MEM; the optimizer is quite likely to sort it out. */
3823 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3824 for (i = 0; i < num_actuals; i++)
3825 if (args[i].save_area)
3826 {
3827 may_overlap = true;
3828 break;
3829 }
3830
3831 if (may_overlap)
3832 target = copy_to_reg (valreg);
3833 else
3834 {
3835 /* TARGET and VALREG cannot be equal at this point
3836 because the latter would not have
3837 REG_FUNCTION_VALUE_P true, while the former would if
3838 it were referring to the same register.
3839
3840 If they refer to the same register, this move will be
3841 a no-op, except when function inlining is being
3842 done. */
3843 emit_move_insn (target, valreg);
3844
3845 /* If we are setting a MEM, this code must be executed.
3846 Since it is emitted after the call insn, sibcall
3847 optimization cannot be performed in that case. */
3848 if (MEM_P (target))
3849 sibcall_failure = true;
3850 }
3851 }
3852 else
3853 target = copy_to_reg (avoid_likely_spilled_reg (x: valreg));
3854
3855 /* If we promoted this return value, make the proper SUBREG.
3856 TARGET might be const0_rtx here, so be careful. */
3857 if (REG_P (target)
3858 && TYPE_MODE (rettype) != BLKmode
3859 && GET_MODE (target) != TYPE_MODE (rettype))
3860 {
3861 tree type = rettype;
3862 int unsignedp = TYPE_UNSIGNED (type);
3863 machine_mode ret_mode = TYPE_MODE (type);
3864 machine_mode pmode;
3865
3866 /* Ensure we promote as expected, and get the new unsignedness. */
3867 pmode = promote_function_mode (type, ret_mode, &unsignedp,
3868 funtype, 1);
3869 gcc_assert (GET_MODE (target) == pmode);
3870
3871 if (SCALAR_INT_MODE_P (pmode)
3872 && SCALAR_FLOAT_MODE_P (ret_mode)
3873 && known_gt (GET_MODE_SIZE (pmode), GET_MODE_SIZE (ret_mode)))
3874 target = convert_wider_int_to_float (mode: ret_mode, imode: pmode, x: target);
3875 else
3876 {
3877 target = gen_lowpart_SUBREG (ret_mode, target);
3878 SUBREG_PROMOTED_VAR_P (target) = 1;
3879 SUBREG_PROMOTED_SET (target, unsignedp);
3880 }
3881 }
3882
3883 /* If size of args is variable or this was a constructor call for a stack
3884 argument, restore saved stack-pointer value. */
3885
3886 if (old_stack_level)
3887 {
3888 rtx_insn *prev = get_last_insn ();
3889
3890 emit_stack_restore (SAVE_BLOCK, old_stack_level);
3891 stack_pointer_delta = old_stack_pointer_delta;
3892
3893 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
3894
3895 pending_stack_adjust = old_pending_adj;
3896 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3897 stack_arg_under_construction = old_stack_arg_under_construction;
3898 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3899 stack_usage_map = initial_stack_usage_map;
3900 stack_usage_watermark = initial_stack_usage_watermark;
3901 sibcall_failure = true;
3902 }
3903 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3904 {
3905#ifdef REG_PARM_STACK_SPACE
3906 if (save_area)
3907 restore_fixed_argument_area (save_area, argblock,
3908 high_to_save, low_to_save);
3909#endif
3910
3911 /* If we saved any argument areas, restore them. */
3912 for (i = 0; i < num_actuals; i++)
3913 if (args[i].save_area)
3914 {
3915 machine_mode save_mode = GET_MODE (args[i].save_area);
3916 rtx stack_area
3917 = gen_rtx_MEM (save_mode,
3918 memory_address (save_mode,
3919 XEXP (args[i].stack_slot, 0)));
3920
3921 if (save_mode != BLKmode)
3922 emit_move_insn (stack_area, args[i].save_area);
3923 else
3924 emit_block_move (stack_area, args[i].save_area,
3925 (gen_int_mode
3926 (args[i].locate.size.constant, Pmode)),
3927 BLOCK_OP_CALL_PARM);
3928 }
3929
3930 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3931 stack_usage_map = initial_stack_usage_map;
3932 stack_usage_watermark = initial_stack_usage_watermark;
3933 }
3934
3935 /* If this was alloca, record the new stack level. */
3936 if (flags & ECF_MAY_BE_ALLOCA)
3937 record_new_stack_level ();
3938
3939 /* Free up storage we no longer need. */
3940 for (i = 0; i < num_actuals; ++i)
3941 free (ptr: args[i].aligned_regs);
3942
3943 targetm.calls.end_call_args ();
3944
3945 insns = get_insns ();
3946 end_sequence ();
3947
3948 if (pass == 0)
3949 {
3950 tail_call_insns = insns;
3951
3952 /* Restore the pending stack adjustment now that we have
3953 finished generating the sibling call sequence. */
3954
3955 restore_pending_stack_adjust (&save);
3956
3957 /* Prepare arg structure for next iteration. */
3958 for (i = 0; i < num_actuals; i++)
3959 {
3960 args[i].value = 0;
3961 args[i].aligned_regs = 0;
3962 args[i].stack = 0;
3963 }
3964
3965 sbitmap_free (map: stored_args_map);
3966 internal_arg_pointer_exp_state.scan_start = NULL;
3967 internal_arg_pointer_exp_state.cache.release ();
3968 }
3969 else
3970 {
3971 normal_call_insns = insns;
3972
3973 /* Verify that we've deallocated all the stack we used. */
3974 gcc_assert ((flags & ECF_NORETURN)
3975 || normal_failure
3976 || known_eq (old_stack_allocated,
3977 stack_pointer_delta
3978 - pending_stack_adjust));
3979 if (normal_failure)
3980 normal_call_insns = NULL;
3981 }
3982
3983 /* If something prevents making this a sibling call,
3984 zero out the sequence. */
3985 if (sibcall_failure)
3986 tail_call_insns = NULL;
3987 else
3988 break;
3989 }
3990
3991 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3992 arguments too, as argument area is now clobbered by the call. */
3993 if (tail_call_insns)
3994 {
3995 emit_insn (tail_call_insns);
3996 crtl->tail_call_emit = true;
3997 }
3998 else
3999 {
4000 emit_insn (normal_call_insns);
4001 if (try_tail_call)
4002 /* Ideally we'd emit a message for all of the ways that it could
4003 have failed. */
4004 maybe_complain_about_tail_call (call_expr: exp, reason: "tail call production failed");
4005 }
4006
4007 currently_expanding_call--;
4008
4009 free (ptr: stack_usage_map_buf);
4010 free (ptr: args);
4011 return target;
4012}
4013
4014/* A sibling call sequence invalidates any REG_EQUIV notes made for
4015 this function's incoming arguments.
4016
4017 At the start of RTL generation we know the only REG_EQUIV notes
4018 in the rtl chain are those for incoming arguments, so we can look
4019 for REG_EQUIV notes between the start of the function and the
4020 NOTE_INSN_FUNCTION_BEG.
4021
4022 This is (slight) overkill. We could keep track of the highest
4023 argument we clobber and be more selective in removing notes, but it
4024 does not seem to be worth the effort. */
4025
4026void
4027fixup_tail_calls (void)
4028{
4029 rtx_insn *insn;
4030
4031 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4032 {
4033 rtx note;
4034
4035 /* There are never REG_EQUIV notes for the incoming arguments
4036 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4037 if (NOTE_P (insn)
4038 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
4039 break;
4040
4041 note = find_reg_note (insn, REG_EQUIV, 0);
4042 if (note)
4043 remove_note (insn, note);
4044 note = find_reg_note (insn, REG_EQUIV, 0);
4045 gcc_assert (!note);
4046 }
4047}
4048
4049/* Traverse a list of TYPES and expand all complex types into their
4050 components. */
4051static tree
4052split_complex_types (tree types)
4053{
4054 tree p;
4055
4056 /* Before allocating memory, check for the common case of no complex. */
4057 for (p = types; p; p = TREE_CHAIN (p))
4058 {
4059 tree type = TREE_VALUE (p);
4060 if (TREE_CODE (type) == COMPLEX_TYPE
4061 && targetm.calls.split_complex_arg (type))
4062 goto found;
4063 }
4064 return types;
4065
4066 found:
4067 types = copy_list (types);
4068
4069 for (p = types; p; p = TREE_CHAIN (p))
4070 {
4071 tree complex_type = TREE_VALUE (p);
4072
4073 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4074 && targetm.calls.split_complex_arg (complex_type))
4075 {
4076 tree next, imag;
4077
4078 /* Rewrite complex type with component type. */
4079 TREE_VALUE (p) = TREE_TYPE (complex_type);
4080 next = TREE_CHAIN (p);
4081
4082 /* Add another component type for the imaginary part. */
4083 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4084 TREE_CHAIN (p) = imag;
4085 TREE_CHAIN (imag) = next;
4086
4087 /* Skip the newly created node. */
4088 p = TREE_CHAIN (p);
4089 }
4090 }
4091
4092 return types;
4093}
4094
4095/* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4096 for a value of mode OUTMODE,
4097 with NARGS different arguments, passed as ARGS.
4098 Store the return value if RETVAL is nonzero: store it in VALUE if
4099 VALUE is nonnull, otherwise pick a convenient location. In either
4100 case return the location of the stored value.
4101
4102 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4103 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4104 other types of library calls. */
4105
4106rtx
4107emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4108 enum libcall_type fn_type,
4109 machine_mode outmode, int nargs, rtx_mode_t *args)
4110{
4111 /* Total size in bytes of all the stack-parms scanned so far. */
4112 struct args_size args_size;
4113 /* Size of arguments before any adjustments (such as rounding). */
4114 struct args_size original_args_size;
4115 int argnum;
4116 rtx fun;
4117 /* Todo, choose the correct decl type of orgfun. Sadly this information
4118 isn't present here, so we default to native calling abi here. */
4119 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4120 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4121 int count;
4122 rtx argblock = 0;
4123 CUMULATIVE_ARGS args_so_far_v;
4124 cumulative_args_t args_so_far;
4125 struct arg
4126 {
4127 rtx value;
4128 machine_mode mode;
4129 rtx reg;
4130 int partial;
4131 struct locate_and_pad_arg_data locate;
4132 rtx save_area;
4133 };
4134 struct arg *argvec;
4135 int old_inhibit_defer_pop = inhibit_defer_pop;
4136 rtx call_fusage = 0;
4137 rtx mem_value = 0;
4138 rtx valreg;
4139 bool pcc_struct_value = false;
4140 poly_int64 struct_value_size = 0;
4141 int flags;
4142 int reg_parm_stack_space = 0;
4143 poly_int64 needed;
4144 rtx_insn *before_call;
4145 bool have_push_fusage;
4146 tree tfom; /* type_for_mode (outmode, 0) */
4147
4148#ifdef REG_PARM_STACK_SPACE
4149 /* Define the boundary of the register parm stack space that needs to be
4150 save, if any. */
4151 int low_to_save = 0, high_to_save = 0;
4152 rtx save_area = 0; /* Place that it is saved. */
4153#endif
4154
4155 /* Size of the stack reserved for parameter registers. */
4156 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4157 char *initial_stack_usage_map = stack_usage_map;
4158 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
4159 char *stack_usage_map_buf = NULL;
4160
4161 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4162
4163#ifdef REG_PARM_STACK_SPACE
4164 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
4165#endif
4166
4167 /* By default, library functions cannot throw. */
4168 flags = ECF_NOTHROW;
4169
4170 switch (fn_type)
4171 {
4172 case LCT_NORMAL:
4173 break;
4174 case LCT_CONST:
4175 flags |= ECF_CONST;
4176 break;
4177 case LCT_PURE:
4178 flags |= ECF_PURE;
4179 break;
4180 case LCT_NORETURN:
4181 flags |= ECF_NORETURN;
4182 break;
4183 case LCT_THROW:
4184 flags &= ~ECF_NOTHROW;
4185 break;
4186 case LCT_RETURNS_TWICE:
4187 flags = ECF_RETURNS_TWICE;
4188 break;
4189 }
4190 fun = orgfun;
4191
4192 /* Ensure current function's preferred stack boundary is at least
4193 what we need. */
4194 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4195 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4196
4197 /* If this kind of value comes back in memory,
4198 decide where in memory it should come back. */
4199 if (outmode != VOIDmode)
4200 {
4201 tfom = lang_hooks.types.type_for_mode (outmode, 0);
4202 if (aggregate_value_p (tfom, 0))
4203 {
4204#ifdef PCC_STATIC_STRUCT_RETURN
4205 rtx pointer_reg
4206 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
4207 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4208 pcc_struct_value = true;
4209 if (value == 0)
4210 value = gen_reg_rtx (outmode);
4211#else /* not PCC_STATIC_STRUCT_RETURN */
4212 struct_value_size = GET_MODE_SIZE (mode: outmode);
4213 if (value != 0 && MEM_P (value))
4214 mem_value = value;
4215 else
4216 mem_value = assign_temp (tfom, 1, 1);
4217#endif
4218 /* This call returns a big structure. */
4219 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
4220 }
4221 }
4222 else
4223 tfom = void_type_node;
4224
4225 /* ??? Unfinished: must pass the memory address as an argument. */
4226
4227 /* Copy all the libcall-arguments out of the varargs data
4228 and into a vector ARGVEC.
4229
4230 Compute how to pass each argument. We only support a very small subset
4231 of the full argument passing conventions to limit complexity here since
4232 library functions shouldn't have many args. */
4233
4234 argvec = XALLOCAVEC (struct arg, nargs + 1);
4235 memset (s: argvec, c: 0, n: (nargs + 1) * sizeof (struct arg));
4236
4237#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
4238 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
4239#else
4240 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
4241#endif
4242 args_so_far = pack_cumulative_args (arg: &args_so_far_v);
4243
4244 args_size.constant = 0;
4245 args_size.var = 0;
4246
4247 count = 0;
4248
4249 push_temp_slots ();
4250
4251 /* If there's a structure value address to be passed,
4252 either pass it in the special place, or pass it as an extra argument. */
4253 if (mem_value && struct_value == 0 && ! pcc_struct_value)
4254 {
4255 rtx addr = XEXP (mem_value, 0);
4256
4257 nargs++;
4258
4259 /* Make sure it is a reasonable operand for a move or push insn. */
4260 if (!REG_P (addr) && !MEM_P (addr)
4261 && !(CONSTANT_P (addr)
4262 && targetm.legitimate_constant_p (Pmode, addr)))
4263 addr = force_operand (addr, NULL_RTX);
4264
4265 argvec[count].value = addr;
4266 argvec[count].mode = Pmode;
4267 argvec[count].partial = 0;
4268
4269 function_arg_info ptr_arg (Pmode, /*named=*/true);
4270 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
4271 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
4272
4273 locate_and_pad_parm (Pmode, NULL_TREE,
4274#ifdef STACK_PARMS_IN_REG_PARM_AREA
4275 1,
4276#else
4277 argvec[count].reg != 0,
4278#endif
4279 reg_parm_stack_space, 0,
4280 NULL_TREE, &args_size, &argvec[count].locate);
4281
4282 if (argvec[count].reg == 0 || argvec[count].partial != 0
4283 || reg_parm_stack_space > 0)
4284 args_size.constant += argvec[count].locate.size.constant;
4285
4286 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
4287
4288 count++;
4289 }
4290
4291 for (unsigned int i = 0; count < nargs; i++, count++)
4292 {
4293 rtx val = args[i].first;
4294 function_arg_info arg (args[i].second, /*named=*/true);
4295 int unsigned_p = 0;
4296
4297 /* We cannot convert the arg value to the mode the library wants here;
4298 must do it earlier where we know the signedness of the arg. */
4299 gcc_assert (arg.mode != BLKmode
4300 && (GET_MODE (val) == arg.mode
4301 || GET_MODE (val) == VOIDmode));
4302
4303 /* Make sure it is a reasonable operand for a move or push insn. */
4304 if (!REG_P (val) && !MEM_P (val)
4305 && !(CONSTANT_P (val)
4306 && targetm.legitimate_constant_p (arg.mode, val)))
4307 val = force_operand (val, NULL_RTX);
4308
4309 if (pass_by_reference (ca: &args_so_far_v, arg))
4310 {
4311 rtx slot;
4312 int must_copy = !reference_callee_copied (ca: &args_so_far_v, arg);
4313
4314 /* If this was a CONST function, it is now PURE since it now
4315 reads memory. */
4316 if (flags & ECF_CONST)
4317 {
4318 flags &= ~ECF_CONST;
4319 flags |= ECF_PURE;
4320 }
4321
4322 if (MEM_P (val) && !must_copy)
4323 {
4324 tree val_expr = MEM_EXPR (val);
4325 if (val_expr)
4326 mark_addressable (val_expr);
4327 slot = val;
4328 }
4329 else
4330 {
4331 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
4332 1, 1);
4333 emit_move_insn (slot, val);
4334 }
4335
4336 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4337 gen_rtx_USE (VOIDmode, slot),
4338 call_fusage);
4339 if (must_copy)
4340 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4341 gen_rtx_CLOBBER (VOIDmode,
4342 slot),
4343 call_fusage);
4344
4345 arg.mode = Pmode;
4346 arg.pass_by_reference = true;
4347 val = force_operand (XEXP (slot, 0), NULL_RTX);
4348 }
4349
4350 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
4351 NULL_TREE, 0);
4352 argvec[count].mode = arg.mode;
4353 argvec[count].value = convert_modes (mode: arg.mode, GET_MODE (val), x: val,
4354 unsignedp: unsigned_p);
4355 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
4356
4357 argvec[count].partial
4358 = targetm.calls.arg_partial_bytes (args_so_far, arg);
4359
4360 if (argvec[count].reg == 0
4361 || argvec[count].partial != 0
4362 || reg_parm_stack_space > 0)
4363 {
4364 locate_and_pad_parm (arg.mode, NULL_TREE,
4365#ifdef STACK_PARMS_IN_REG_PARM_AREA
4366 1,
4367#else
4368 argvec[count].reg != 0,
4369#endif
4370 reg_parm_stack_space, argvec[count].partial,
4371 NULL_TREE, &args_size, &argvec[count].locate);
4372 args_size.constant += argvec[count].locate.size.constant;
4373 gcc_assert (!argvec[count].locate.size.var);
4374 }
4375#ifdef BLOCK_REG_PADDING
4376 else
4377 /* The argument is passed entirely in registers. See at which
4378 end it should be padded. */
4379 argvec[count].locate.where_pad =
4380 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
4381 known_le (GET_MODE_SIZE (arg.mode),
4382 UNITS_PER_WORD));
4383#endif
4384
4385 targetm.calls.function_arg_advance (args_so_far, arg);
4386 }
4387
4388 for (int i = 0; i < nargs; i++)
4389 if (reg_parm_stack_space > 0
4390 || argvec[i].reg == 0
4391 || argvec[i].partial != 0)
4392 update_stack_alignment_for_call (locate: &argvec[i].locate);
4393
4394 /* If this machine requires an external definition for library
4395 functions, write one out. */
4396 assemble_external_libcall (fun);
4397
4398 original_args_size = args_size;
4399 args_size.constant = (aligned_upper_bound (value: args_size.constant
4400 + stack_pointer_delta,
4401 STACK_BYTES)
4402 - stack_pointer_delta);
4403
4404 args_size.constant = upper_bound (a: args_size.constant,
4405 b: reg_parm_stack_space);
4406
4407 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4408 args_size.constant -= reg_parm_stack_space;
4409
4410 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4411 b: args_size.constant);
4412
4413 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
4414 {
4415 poly_int64 pushed = args_size.constant + pending_stack_adjust;
4416 current_function_pushed_stack_size
4417 = upper_bound (current_function_pushed_stack_size, b: pushed);
4418 }
4419
4420 if (ACCUMULATE_OUTGOING_ARGS)
4421 {
4422 /* Since the stack pointer will never be pushed, it is possible for
4423 the evaluation of a parm to clobber something we have already
4424 written to the stack. Since most function calls on RISC machines
4425 do not use the stack, this is uncommon, but must work correctly.
4426
4427 Therefore, we save any area of the stack that was already written
4428 and that we are using. Here we set up to do this by making a new
4429 stack usage map from the old one.
4430
4431 Another approach might be to try to reorder the argument
4432 evaluations to avoid this conflicting stack usage. */
4433
4434 needed = args_size.constant;
4435
4436 /* Since we will be writing into the entire argument area, the
4437 map must be allocated for its entire size, not just the part that
4438 is the responsibility of the caller. */
4439 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4440 needed += reg_parm_stack_space;
4441
4442 poly_int64 limit = needed;
4443 if (ARGS_GROW_DOWNWARD)
4444 limit += 1;
4445
4446 /* For polynomial sizes, this is the maximum possible size needed
4447 for arguments with a constant size and offset. */
4448 HOST_WIDE_INT const_limit = constant_lower_bound (a: limit);
4449 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4450 const_limit);
4451
4452 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4453 stack_usage_map = stack_usage_map_buf;
4454
4455 if (initial_highest_arg_in_use)
4456 memcpy (dest: stack_usage_map, src: initial_stack_usage_map,
4457 n: initial_highest_arg_in_use);
4458
4459 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4460 memset (s: &stack_usage_map[initial_highest_arg_in_use], c: 0,
4461 n: highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4462 needed = 0;
4463
4464 /* We must be careful to use virtual regs before they're instantiated,
4465 and real regs afterwards. Loop optimization, for example, can create
4466 new libcalls after we've instantiated the virtual regs, and if we
4467 use virtuals anyway, they won't match the rtl patterns. */
4468
4469 if (virtuals_instantiated)
4470 argblock = plus_constant (Pmode, stack_pointer_rtx,
4471 STACK_POINTER_OFFSET);
4472 else
4473 argblock = virtual_outgoing_args_rtx;
4474 }
4475 else
4476 {
4477 if (!targetm.calls.push_argument (0))
4478 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
4479 }
4480
4481 /* We push args individually in reverse order, perform stack alignment
4482 before the first push (the last arg). */
4483 if (argblock == 0)
4484 anti_adjust_stack (gen_int_mode (args_size.constant
4485 - original_args_size.constant,
4486 Pmode));
4487
4488 argnum = nargs - 1;
4489
4490#ifdef REG_PARM_STACK_SPACE
4491 if (ACCUMULATE_OUTGOING_ARGS)
4492 {
4493 /* The argument list is the property of the called routine and it
4494 may clobber it. If the fixed area has been used for previous
4495 parameters, we must save and restore it. */
4496 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4497 low_to_save: &low_to_save, high_to_save: &high_to_save);
4498 }
4499#endif
4500
4501 /* When expanding a normal call, args are stored in push order,
4502 which is the reverse of what we have here. */
4503 bool any_regs = false;
4504 for (int i = nargs; i-- > 0; )
4505 if (argvec[i].reg != NULL_RTX)
4506 {
4507 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
4508 any_regs = true;
4509 }
4510 if (!any_regs)
4511 targetm.calls.call_args (pc_rtx, NULL_TREE);
4512
4513 /* Push the args that need to be pushed. */
4514
4515 have_push_fusage = false;
4516
4517 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4518 are to be pushed. */
4519 for (count = 0; count < nargs; count++, argnum--)
4520 {
4521 machine_mode mode = argvec[argnum].mode;
4522 rtx val = argvec[argnum].value;
4523 rtx reg = argvec[argnum].reg;
4524 int partial = argvec[argnum].partial;
4525 unsigned int parm_align = argvec[argnum].locate.boundary;
4526 poly_int64 lower_bound = 0, upper_bound = 0;
4527
4528 if (! (reg != 0 && partial == 0))
4529 {
4530 rtx use;
4531
4532 if (ACCUMULATE_OUTGOING_ARGS)
4533 {
4534 /* If this is being stored into a pre-allocated, fixed-size,
4535 stack area, save any previous data at that location. */
4536
4537 if (ARGS_GROW_DOWNWARD)
4538 {
4539 /* stack_slot is negative, but we want to index stack_usage_map
4540 with positive values. */
4541 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4542 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4543 }
4544 else
4545 {
4546 lower_bound = argvec[argnum].locate.slot_offset.constant;
4547 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4548 }
4549
4550 if (stack_region_maybe_used_p (lower_bound, upper_bound,
4551 reg_parm_stack_space))
4552 {
4553 /* We need to make a save area. */
4554 poly_uint64 size
4555 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4556 machine_mode save_mode
4557 = int_mode_for_size (size, limit: 1).else_blk ();
4558 rtx adr
4559 = plus_constant (Pmode, argblock,
4560 argvec[argnum].locate.offset.constant);
4561 rtx stack_area
4562 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4563
4564 if (save_mode == BLKmode)
4565 {
4566 argvec[argnum].save_area
4567 = assign_stack_temp (BLKmode,
4568 argvec[argnum].locate.size.constant
4569 );
4570
4571 emit_block_move (validize_mem
4572 (copy_rtx (argvec[argnum].save_area)),
4573 stack_area,
4574 (gen_int_mode
4575 (argvec[argnum].locate.size.constant,
4576 Pmode)),
4577 BLOCK_OP_CALL_PARM);
4578 }
4579 else
4580 {
4581 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4582
4583 emit_move_insn (argvec[argnum].save_area, stack_area);
4584 }
4585 }
4586 }
4587
4588 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
4589 partial, reg, 0, argblock,
4590 (gen_int_mode
4591 (argvec[argnum].locate.offset.constant, Pmode)),
4592 reg_parm_stack_space,
4593 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
4594
4595 /* Now mark the segment we just used. */
4596 if (ACCUMULATE_OUTGOING_ARGS)
4597 mark_stack_region_used (lower_bound, upper_bound);
4598
4599 NO_DEFER_POP;
4600
4601 /* Indicate argument access so that alias.cc knows that these
4602 values are live. */
4603 if (argblock)
4604 use = plus_constant (Pmode, argblock,
4605 argvec[argnum].locate.offset.constant);
4606 else if (have_push_fusage)
4607 continue;
4608 else
4609 {
4610 /* When arguments are pushed, trying to tell alias.cc where
4611 exactly this argument is won't work, because the
4612 auto-increment causes confusion. So we merely indicate
4613 that we access something with a known mode somewhere on
4614 the stack. */
4615 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4616 gen_rtx_SCRATCH (Pmode));
4617 have_push_fusage = true;
4618 }
4619 use = gen_rtx_MEM (argvec[argnum].mode, use);
4620 use = gen_rtx_USE (VOIDmode, use);
4621 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
4622 }
4623 }
4624
4625 argnum = nargs - 1;
4626
4627 fun = prepare_call_address (NULL, funexp: fun, NULL, call_fusage: &call_fusage, reg_parm_seen: 0, flags: 0);
4628
4629 /* Now load any reg parms into their regs. */
4630
4631 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4632 are to be pushed. */
4633 for (count = 0; count < nargs; count++, argnum--)
4634 {
4635 machine_mode mode = argvec[argnum].mode;
4636 rtx val = argvec[argnum].value;
4637 rtx reg = argvec[argnum].reg;
4638 int partial = argvec[argnum].partial;
4639
4640 /* Handle calls that pass values in multiple non-contiguous
4641 locations. The PA64 has examples of this for library calls. */
4642 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4643 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
4644 else if (reg != 0 && partial == 0)
4645 {
4646 emit_move_insn (reg, val);
4647#ifdef BLOCK_REG_PADDING
4648 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
4649
4650 /* Copied from load_register_parameters. */
4651
4652 /* Handle case where we have a value that needs shifting
4653 up to the msb. eg. a QImode value and we're padding
4654 upward on a BYTES_BIG_ENDIAN machine. */
4655 if (known_lt (size, UNITS_PER_WORD)
4656 && (argvec[argnum].locate.where_pad
4657 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
4658 {
4659 rtx x;
4660 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4661
4662 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4663 report the whole reg as used. Strictly speaking, the
4664 call only uses SIZE bytes at the msb end, but it doesn't
4665 seem worth generating rtl to say that. */
4666 reg = gen_rtx_REG (word_mode, REGNO (reg));
4667 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4668 if (x != reg)
4669 emit_move_insn (reg, x);
4670 }
4671#endif
4672 }
4673
4674 NO_DEFER_POP;
4675 }
4676
4677 /* Any regs containing parms remain in use through the call. */
4678 for (count = 0; count < nargs; count++)
4679 {
4680 rtx reg = argvec[count].reg;
4681 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4682 use_group_regs (&call_fusage, reg);
4683 else if (reg != 0)
4684 {
4685 int partial = argvec[count].partial;
4686 if (partial)
4687 {
4688 int nregs;
4689 gcc_assert (partial % UNITS_PER_WORD == 0);
4690 nregs = partial / UNITS_PER_WORD;
4691 use_regs (&call_fusage, REGNO (reg), nregs);
4692 }
4693 else
4694 use_reg (fusage: &call_fusage, reg);
4695 }
4696 }
4697
4698 /* Pass the function the address in which to return a structure value. */
4699 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4700 {
4701 emit_move_insn (struct_value,
4702 force_reg (Pmode,
4703 force_operand (XEXP (mem_value, 0),
4704 NULL_RTX)));
4705 if (REG_P (struct_value))
4706 use_reg (fusage: &call_fusage, reg: struct_value);
4707 }
4708
4709 /* Don't allow popping to be deferred, since then
4710 cse'ing of library calls could delete a call and leave the pop. */
4711 NO_DEFER_POP;
4712 valreg = (mem_value == 0 && outmode != VOIDmode
4713 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
4714
4715 /* Stack must be properly aligned now. */
4716 gcc_assert (multiple_p (stack_pointer_delta,
4717 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
4718
4719 before_call = get_last_insn ();
4720
4721 if (flag_callgraph_info)
4722 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
4723
4724 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4725 will set inhibit_defer_pop to that value. */
4726 /* The return type is needed to decide how many bytes the function pops.
4727 Signedness plays no role in that, so for simplicity, we pretend it's
4728 always signed. We also assume that the list of arguments passed has
4729 no impact, so we pretend it is unknown. */
4730
4731 emit_call_1 (funexp: fun, NULL,
4732 get_identifier (XSTR (orgfun, 0)),
4733 funtype: build_function_type (tfom, NULL_TREE),
4734 stack_size: original_args_size.constant, rounded_stack_size: args_size.constant,
4735 struct_value_size,
4736 next_arg_reg: targetm.calls.function_arg (args_so_far,
4737 function_arg_info::end_marker ()),
4738 valreg,
4739 old_inhibit_defer_pop: old_inhibit_defer_pop + 1, call_fusage, ecf_flags: flags, args_so_far);
4740
4741 if (flag_ipa_ra)
4742 {
4743 rtx datum = orgfun;
4744 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
4745 rtx_call_insn *last = last_call_insn ();
4746 add_reg_note (last, REG_CALL_DECL, datum);
4747 }
4748
4749 /* Right-shift returned value if necessary. */
4750 if (!pcc_struct_value
4751 && TYPE_MODE (tfom) != BLKmode
4752 && targetm.calls.return_in_msb (tfom))
4753 {
4754 shift_return_value (TYPE_MODE (tfom), left_p: false, value: valreg);
4755 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4756 }
4757
4758 targetm.calls.end_call_args ();
4759
4760 /* For calls to `setjmp', etc., inform function.cc:setjmp_warnings
4761 that it should complain if nonvolatile values are live. For
4762 functions that cannot return, inform flow that control does not
4763 fall through. */
4764 if (flags & ECF_NORETURN)
4765 {
4766 /* The barrier note must be emitted
4767 immediately after the CALL_INSN. Some ports emit more than
4768 just a CALL_INSN above, so we must search for it here. */
4769 rtx_insn *last = get_last_insn ();
4770 while (!CALL_P (last))
4771 {
4772 last = PREV_INSN (insn: last);
4773 /* There was no CALL_INSN? */
4774 gcc_assert (last != before_call);
4775 }
4776
4777 emit_barrier_after (last);
4778 }
4779
4780 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
4781 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
4782 if (flags & ECF_NOTHROW)
4783 {
4784 rtx_insn *last = get_last_insn ();
4785 while (!CALL_P (last))
4786 {
4787 last = PREV_INSN (insn: last);
4788 /* There was no CALL_INSN? */
4789 gcc_assert (last != before_call);
4790 }
4791
4792 make_reg_eh_region_note_nothrow_nononlocal (last);
4793 }
4794
4795 /* Now restore inhibit_defer_pop to its actual original value. */
4796 OK_DEFER_POP;
4797
4798 pop_temp_slots ();
4799
4800 /* Copy the value to the right place. */
4801 if (outmode != VOIDmode && retval)
4802 {
4803 if (mem_value)
4804 {
4805 if (value == 0)
4806 value = mem_value;
4807 if (value != mem_value)
4808 emit_move_insn (value, mem_value);
4809 }
4810 else if (GET_CODE (valreg) == PARALLEL)
4811 {
4812 if (value == 0)
4813 value = gen_reg_rtx (outmode);
4814 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (mode: outmode));
4815 }
4816 else
4817 {
4818 /* Convert to the proper mode if a promotion has been active. */
4819 if (GET_MODE (valreg) != outmode)
4820 {
4821 int unsignedp = TYPE_UNSIGNED (tfom);
4822
4823 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4824 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4825 == GET_MODE (valreg));
4826 valreg = convert_modes (mode: outmode, GET_MODE (valreg), x: valreg, unsignedp: 0);
4827 }
4828
4829 if (value != 0)
4830 emit_move_insn (value, valreg);
4831 else
4832 value = valreg;
4833 }
4834 }
4835
4836 if (ACCUMULATE_OUTGOING_ARGS)
4837 {
4838#ifdef REG_PARM_STACK_SPACE
4839 if (save_area)
4840 restore_fixed_argument_area (save_area, argblock,
4841 high_to_save, low_to_save);
4842#endif
4843
4844 /* If we saved any argument areas, restore them. */
4845 for (count = 0; count < nargs; count++)
4846 if (argvec[count].save_area)
4847 {
4848 machine_mode save_mode = GET_MODE (argvec[count].save_area);
4849 rtx adr = plus_constant (Pmode, argblock,
4850 argvec[count].locate.offset.constant);
4851 rtx stack_area = gen_rtx_MEM (save_mode,
4852 memory_address (save_mode, adr));
4853
4854 if (save_mode == BLKmode)
4855 emit_block_move (stack_area,
4856 validize_mem
4857 (copy_rtx (argvec[count].save_area)),
4858 (gen_int_mode
4859 (argvec[count].locate.size.constant, Pmode)),
4860 BLOCK_OP_CALL_PARM);
4861 else
4862 emit_move_insn (stack_area, argvec[count].save_area);
4863 }
4864
4865 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4866 stack_usage_map = initial_stack_usage_map;
4867 stack_usage_watermark = initial_stack_usage_watermark;
4868 }
4869
4870 free (ptr: stack_usage_map_buf);
4871
4872 return value;
4873
4874}
4875
4876
4877/* Store a single argument for a function call
4878 into the register or memory area where it must be passed.
4879 *ARG describes the argument value and where to pass it.
4880
4881 ARGBLOCK is the address of the stack-block for all the arguments,
4882 or 0 on a machine where arguments are pushed individually.
4883
4884 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4885 so must be careful about how the stack is used.
4886
4887 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4888 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4889 that we need not worry about saving and restoring the stack.
4890
4891 FNDECL is the declaration of the function we are calling.
4892
4893 Return true if this arg should cause sibcall failure,
4894 false otherwise. */
4895
4896static bool
4897store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4898 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4899{
4900 tree pval = arg->tree_value;
4901 rtx reg = 0;
4902 int partial = 0;
4903 poly_int64 used = 0;
4904 poly_int64 lower_bound = 0, upper_bound = 0;
4905 bool sibcall_failure = false;
4906
4907 if (TREE_CODE (pval) == ERROR_MARK)
4908 return true;
4909
4910 /* Push a new temporary level for any temporaries we make for
4911 this argument. */
4912 push_temp_slots ();
4913
4914 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4915 {
4916 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4917 save any previous data at that location. */
4918 if (argblock && ! variable_size && arg->stack)
4919 {
4920 if (ARGS_GROW_DOWNWARD)
4921 {
4922 /* stack_slot is negative, but we want to index stack_usage_map
4923 with positive values. */
4924 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4925 {
4926 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
4927 upper_bound = -rtx_to_poly_int64 (x: offset) + 1;
4928 }
4929 else
4930 upper_bound = 0;
4931
4932 lower_bound = upper_bound - arg->locate.size.constant;
4933 }
4934 else
4935 {
4936 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4937 {
4938 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
4939 lower_bound = rtx_to_poly_int64 (x: offset);
4940 }
4941 else
4942 lower_bound = 0;
4943
4944 upper_bound = lower_bound + arg->locate.size.constant;
4945 }
4946
4947 if (stack_region_maybe_used_p (lower_bound, upper_bound,
4948 reg_parm_stack_space))
4949 {
4950 /* We need to make a save area. */
4951 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
4952 machine_mode save_mode
4953 = int_mode_for_size (size, limit: 1).else_blk ();
4954 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4955 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4956
4957 if (save_mode == BLKmode)
4958 {
4959 arg->save_area
4960 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
4961 preserve_temp_slots (arg->save_area);
4962 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
4963 stack_area,
4964 (gen_int_mode
4965 (arg->locate.size.constant, Pmode)),
4966 BLOCK_OP_CALL_PARM);
4967 }
4968 else
4969 {
4970 arg->save_area = gen_reg_rtx (save_mode);
4971 emit_move_insn (arg->save_area, stack_area);
4972 }
4973 }
4974 }
4975 }
4976
4977 /* If this isn't going to be placed on both the stack and in registers,
4978 set up the register and number of words. */
4979 if (! arg->pass_on_stack)
4980 {
4981 if (flags & ECF_SIBCALL)
4982 reg = arg->tail_call_reg;
4983 else
4984 reg = arg->reg;
4985 partial = arg->partial;
4986 }
4987
4988 /* Being passed entirely in a register. We shouldn't be called in
4989 this case. */
4990 gcc_assert (reg == 0 || partial != 0);
4991
4992 /* If this arg needs special alignment, don't load the registers
4993 here. */
4994 if (arg->n_aligned_regs != 0)
4995 reg = 0;
4996
4997 /* If this is being passed partially in a register, we can't evaluate
4998 it directly into its stack slot. Otherwise, we can. */
4999 if (arg->value == 0)
5000 {
5001 /* stack_arg_under_construction is nonzero if a function argument is
5002 being evaluated directly into the outgoing argument list and
5003 expand_call must take special action to preserve the argument list
5004 if it is called recursively.
5005
5006 For scalar function arguments stack_usage_map is sufficient to
5007 determine which stack slots must be saved and restored. Scalar
5008 arguments in general have pass_on_stack == false.
5009
5010 If this argument is initialized by a function which takes the
5011 address of the argument (a C++ constructor or a C function
5012 returning a BLKmode structure), then stack_usage_map is
5013 insufficient and expand_call must push the stack around the
5014 function call. Such arguments have pass_on_stack == true.
5015
5016 Note that it is always safe to set stack_arg_under_construction,
5017 but this generates suboptimal code if set when not needed. */
5018
5019 if (arg->pass_on_stack)
5020 stack_arg_under_construction++;
5021
5022 arg->value = expand_expr (exp: pval,
5023 target: (partial
5024 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5025 ? NULL_RTX : arg->stack,
5026 VOIDmode, modifier: EXPAND_STACK_PARM);
5027
5028 /* If we are promoting object (or for any other reason) the mode
5029 doesn't agree, convert the mode. */
5030
5031 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5032 arg->value = convert_modes (mode: arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5033 x: arg->value, unsignedp: arg->unsignedp);
5034
5035 if (arg->pass_on_stack)
5036 stack_arg_under_construction--;
5037 }
5038
5039 /* Check for overlap with already clobbered argument area. */
5040 if ((flags & ECF_SIBCALL)
5041 && MEM_P (arg->value)
5042 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5043 size: arg->locate.size.constant))
5044 sibcall_failure = true;
5045
5046 /* Don't allow anything left on stack from computation
5047 of argument to alloca. */
5048 if (flags & ECF_MAY_BE_ALLOCA)
5049 do_pending_stack_adjust ();
5050
5051 if (arg->value == arg->stack)
5052 /* If the value is already in the stack slot, we are done. */
5053 ;
5054 else if (arg->mode != BLKmode)
5055 {
5056 unsigned int parm_align;
5057
5058 /* Argument is a scalar, not entirely passed in registers.
5059 (If part is passed in registers, arg->partial says how much
5060 and emit_push_insn will take care of putting it there.)
5061
5062 Push it, and if its size is less than the
5063 amount of space allocated to it,
5064 also bump stack pointer by the additional space.
5065 Note that in C the default argument promotions
5066 will prevent such mismatches. */
5067
5068 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5069 ? 0 : GET_MODE_SIZE (mode: arg->mode));
5070
5071 /* Compute how much space the push instruction will push.
5072 On many machines, pushing a byte will advance the stack
5073 pointer by a halfword. */
5074#ifdef PUSH_ROUNDING
5075 size = PUSH_ROUNDING (size);
5076#endif
5077 used = size;
5078
5079 /* Compute how much space the argument should get:
5080 round up to a multiple of the alignment for arguments. */
5081 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5082 != PAD_NONE)
5083 /* At the moment we don't (need to) support ABIs for which the
5084 padding isn't known at compile time. In principle it should
5085 be easy to add though. */
5086 used = force_align_up (value: size, PARM_BOUNDARY / BITS_PER_UNIT);
5087
5088 /* Compute the alignment of the pushed argument. */
5089 parm_align = arg->locate.boundary;
5090 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5091 == PAD_DOWNWARD)
5092 {
5093 poly_int64 pad = used - size;
5094 unsigned int pad_align = known_alignment (a: pad) * BITS_PER_UNIT;
5095 if (pad_align != 0)
5096 parm_align = MIN (parm_align, pad_align);
5097 }
5098
5099 /* This isn't already where we want it on the stack, so put it there.
5100 This can either be done with push or copy insns. */
5101 if (maybe_ne (a: used, b: 0)
5102 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5103 NULL_RTX, parm_align, partial, reg, used - size,
5104 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5105 reg_parm_stack_space,
5106 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5107 sibcall_failure = true;
5108
5109 /* Unless this is a partially-in-register argument, the argument is now
5110 in the stack. */
5111 if (partial == 0)
5112 arg->value = arg->stack;
5113 }
5114 else
5115 {
5116 /* BLKmode, at least partly to be pushed. */
5117
5118 unsigned int parm_align;
5119 poly_int64 excess;
5120 rtx size_rtx;
5121
5122 /* Pushing a nonscalar.
5123 If part is passed in registers, PARTIAL says how much
5124 and emit_push_insn will take care of putting it there. */
5125
5126 /* Round its size up to a multiple
5127 of the allocation unit for arguments. */
5128
5129 if (arg->locate.size.var != 0)
5130 {
5131 excess = 0;
5132 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
5133 }
5134 else
5135 {
5136 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5137 for BLKmode is careful to avoid it. */
5138 excess = (arg->locate.size.constant
5139 - arg_int_size_in_bytes (TREE_TYPE (pval))
5140 + partial);
5141 size_rtx = expand_expr (exp: arg_size_in_bytes (TREE_TYPE (pval)),
5142 NULL_RTX, TYPE_MODE (sizetype),
5143 modifier: EXPAND_NORMAL);
5144 }
5145
5146 parm_align = arg->locate.boundary;
5147
5148 /* When an argument is padded down, the block is aligned to
5149 PARM_BOUNDARY, but the actual argument isn't. */
5150 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5151 == PAD_DOWNWARD)
5152 {
5153 if (arg->locate.size.var)
5154 parm_align = BITS_PER_UNIT;
5155 else
5156 {
5157 unsigned int excess_align
5158 = known_alignment (a: excess) * BITS_PER_UNIT;
5159 if (excess_align != 0)
5160 parm_align = MIN (parm_align, excess_align);
5161 }
5162 }
5163
5164 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
5165 {
5166 /* emit_push_insn might not work properly if arg->value and
5167 argblock + arg->locate.offset areas overlap. */
5168 rtx x = arg->value;
5169 poly_int64 i = 0;
5170
5171 if (strip_offset (XEXP (x, 0), &i)
5172 == crtl->args.internal_arg_pointer)
5173 {
5174 /* arg.locate doesn't contain the pretend_args_size offset,
5175 it's part of argblock. Ensure we don't count it in I. */
5176 if (STACK_GROWS_DOWNWARD)
5177 i -= crtl->args.pretend_args_size;
5178 else
5179 i += crtl->args.pretend_args_size;
5180
5181 /* expand_call should ensure this. */
5182 gcc_assert (!arg->locate.offset.var
5183 && arg->locate.size.var == 0);
5184 poly_int64 size_val = rtx_to_poly_int64 (x: size_rtx);
5185
5186 if (known_eq (arg->locate.offset.constant, i))
5187 {
5188 /* Even though they appear to be at the same location,
5189 if part of the outgoing argument is in registers,
5190 they aren't really at the same location. Check for
5191 this by making sure that the incoming size is the
5192 same as the outgoing size. */
5193 if (maybe_ne (a: arg->locate.size.constant, b: size_val))
5194 sibcall_failure = true;
5195 }
5196 else if (maybe_in_range_p (val: arg->locate.offset.constant,
5197 pos: i, size: size_val))
5198 sibcall_failure = true;
5199 /* Use arg->locate.size.constant instead of size_rtx
5200 because we only care about the part of the argument
5201 on the stack. */
5202 else if (maybe_in_range_p (val: i, pos: arg->locate.offset.constant,
5203 size: arg->locate.size.constant))
5204 sibcall_failure = true;
5205 }
5206 }
5207
5208 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5209 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5210 parm_align, partial, reg, excess, argblock,
5211 ARGS_SIZE_RTX (arg->locate.offset),
5212 reg_parm_stack_space,
5213 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
5214 /* If we bypass emit_push_insn because it is a zero sized argument,
5215 we still might need to adjust stack if such argument requires
5216 extra alignment. See PR104558. */
5217 else if ((arg->locate.alignment_pad.var
5218 || maybe_ne (a: arg->locate.alignment_pad.constant, b: 0))
5219 && !argblock)
5220 anti_adjust_stack (ARGS_SIZE_RTX (arg->locate.alignment_pad));
5221
5222 /* Unless this is a partially-in-register argument, the argument is now
5223 in the stack.
5224
5225 ??? Unlike the case above, in which we want the actual
5226 address of the data, so that we can load it directly into a
5227 register, here we want the address of the stack slot, so that
5228 it's properly aligned for word-by-word copying or something
5229 like that. It's not clear that this is always correct. */
5230 if (partial == 0)
5231 arg->value = arg->stack_slot;
5232 }
5233
5234 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5235 {
5236 tree type = TREE_TYPE (arg->tree_value);
5237 arg->parallel_value
5238 = emit_group_load_into_temps (arg->reg, arg->value, type,
5239 int_size_in_bytes (type));
5240 }
5241
5242 /* Mark all slots this store used. */
5243 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5244 && argblock && ! variable_size && arg->stack)
5245 mark_stack_region_used (lower_bound, upper_bound);
5246
5247 /* Once we have pushed something, pops can't safely
5248 be deferred during the rest of the arguments. */
5249 NO_DEFER_POP;
5250
5251 /* Free any temporary slots made in processing this argument. */
5252 pop_temp_slots ();
5253
5254 return sibcall_failure;
5255}
5256
5257/* Nonzero if we do not know how to pass ARG solely in registers. */
5258
5259bool
5260must_pass_in_stack_var_size (const function_arg_info &arg)
5261{
5262 if (!arg.type)
5263 return false;
5264
5265 /* If the type has variable size... */
5266 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
5267 return true;
5268
5269 /* If the type is marked as addressable (it is required
5270 to be constructed into the stack)... */
5271 if (TREE_ADDRESSABLE (arg.type))
5272 return true;
5273
5274 return false;
5275}
5276
5277/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
5278 takes trailing padding of a structure into account. */
5279/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
5280
5281bool
5282must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
5283{
5284 if (!arg.type)
5285 return false;
5286
5287 /* If the type has variable size... */
5288 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
5289 return true;
5290
5291 /* If the type is marked as addressable (it is required
5292 to be constructed into the stack)... */
5293 if (TREE_ADDRESSABLE (arg.type))
5294 return true;
5295
5296 if (TYPE_EMPTY_P (arg.type))
5297 return false;
5298
5299 /* If the padding and mode of the type is such that a copy into
5300 a register would put it into the wrong part of the register. */
5301 if (arg.mode == BLKmode
5302 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5303 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
5304 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5305 return true;
5306
5307 return false;
5308}
5309
5310/* Return true if TYPE must be passed on the stack when passed to
5311 the "..." arguments of a function. */
5312
5313bool
5314must_pass_va_arg_in_stack (tree type)
5315{
5316 function_arg_info arg (type, /*named=*/false);
5317 return targetm.calls.must_pass_in_stack (arg);
5318}
5319
5320/* Return true if FIELD is the C++17 empty base field that should
5321 be ignored for ABI calling convention decisions in order to
5322 maintain ABI compatibility between C++14 and earlier, which doesn't
5323 add this FIELD to classes with empty bases, and C++17 and later
5324 which does. */
5325
5326bool
5327cxx17_empty_base_field_p (const_tree field)
5328{
5329 return (DECL_FIELD_ABI_IGNORED (field)
5330 && DECL_ARTIFICIAL (field)
5331 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
5332 && !lookup_attribute (attr_name: "no_unique_address", DECL_ATTRIBUTES (field)));
5333}
5334

source code of gcc/calls.cc