1 | /* Convert function calls to rtl insns, for GNU C compiler. |
---|---|
2 | Copyright (C) 1989-2025 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "target.h" |
25 | #include "rtl.h" |
26 | #include "tree.h" |
27 | #include "gimple.h" |
28 | #include "predict.h" |
29 | #include "memmodel.h" |
30 | #include "tm_p.h" |
31 | #include "stringpool.h" |
32 | #include "expmed.h" |
33 | #include "optabs.h" |
34 | #include "emit-rtl.h" |
35 | #include "cgraph.h" |
36 | #include "diagnostic-core.h" |
37 | #include "fold-const.h" |
38 | #include "stor-layout.h" |
39 | #include "varasm.h" |
40 | #include "internal-fn.h" |
41 | #include "dojump.h" |
42 | #include "explow.h" |
43 | #include "calls.h" |
44 | #include "expr.h" |
45 | #include "output.h" |
46 | #include "langhooks.h" |
47 | #include "except.h" |
48 | #include "dbgcnt.h" |
49 | #include "rtl-iter.h" |
50 | #include "tree-vrp.h" |
51 | #include "tree-ssanames.h" |
52 | #include "intl.h" |
53 | #include "stringpool.h" |
54 | #include "hash-map.h" |
55 | #include "hash-traits.h" |
56 | #include "attribs.h" |
57 | #include "builtins.h" |
58 | #include "gimple-iterator.h" |
59 | #include "gimple-fold.h" |
60 | #include "attr-fnspec.h" |
61 | #include "value-query.h" |
62 | #include "tree-pretty-print.h" |
63 | #include "tree-eh.h" |
64 | |
65 | /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */ |
66 | #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) |
67 | |
68 | /* Data structure and subroutines used within expand_call. */ |
69 | |
70 | struct arg_data |
71 | { |
72 | /* Tree node for this argument. */ |
73 | tree tree_value; |
74 | /* Mode for value; TYPE_MODE unless promoted. */ |
75 | machine_mode mode; |
76 | /* Current RTL value for argument, or 0 if it isn't precomputed. */ |
77 | rtx value; |
78 | /* Initially-compute RTL value for argument; only for const functions. */ |
79 | rtx initial_value; |
80 | /* Register to pass this argument in, 0 if passed on stack, or an |
81 | PARALLEL if the arg is to be copied into multiple non-contiguous |
82 | registers. */ |
83 | rtx reg; |
84 | /* Register to pass this argument in when generating tail call sequence. |
85 | This is not the same register as for normal calls on machines with |
86 | register windows. */ |
87 | rtx tail_call_reg; |
88 | /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct |
89 | form for emit_group_move. */ |
90 | rtx parallel_value; |
91 | /* If REG was promoted from the actual mode of the argument expression, |
92 | indicates whether the promotion is sign- or zero-extended. */ |
93 | int unsignedp; |
94 | /* Number of bytes to put in registers. 0 means put the whole arg |
95 | in registers. Also 0 if not passed in registers. */ |
96 | int partial; |
97 | /* True if argument must be passed on stack. |
98 | Note that some arguments may be passed on the stack |
99 | even though pass_on_stack is false, just because FUNCTION_ARG says so. |
100 | pass_on_stack identifies arguments that *cannot* go in registers. */ |
101 | bool pass_on_stack; |
102 | /* Some fields packaged up for locate_and_pad_parm. */ |
103 | struct locate_and_pad_arg_data locate; |
104 | /* Location on the stack at which parameter should be stored. The store |
105 | has already been done if STACK == VALUE. */ |
106 | rtx stack; |
107 | /* Location on the stack of the start of this argument slot. This can |
108 | differ from STACK if this arg pads downward. This location is known |
109 | to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */ |
110 | rtx stack_slot; |
111 | /* Place that this stack area has been saved, if needed. */ |
112 | rtx save_area; |
113 | /* If an argument's alignment does not permit direct copying into registers, |
114 | copy in smaller-sized pieces into pseudos. These are stored in a |
115 | block pointed to by this field. The next field says how many |
116 | word-sized pseudos we made. */ |
117 | rtx *aligned_regs; |
118 | int n_aligned_regs; |
119 | }; |
120 | |
121 | /* A vector of one char per byte of stack space. A byte if nonzero if |
122 | the corresponding stack location has been used. |
123 | This vector is used to prevent a function call within an argument from |
124 | clobbering any stack already set up. */ |
125 | static char *stack_usage_map; |
126 | |
127 | /* Size of STACK_USAGE_MAP. */ |
128 | static unsigned int highest_outgoing_arg_in_use; |
129 | |
130 | /* Assume that any stack location at this byte index is used, |
131 | without checking the contents of stack_usage_map. */ |
132 | static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U; |
133 | |
134 | /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding |
135 | stack location's tail call argument has been already stored into the stack. |
136 | This bitmap is used to prevent sibling call optimization if function tries |
137 | to use parent's incoming argument slots when they have been already |
138 | overwritten with tail call arguments. */ |
139 | static sbitmap stored_args_map; |
140 | |
141 | /* Assume that any virtual-incoming location at this byte index has been |
142 | stored, without checking the contents of stored_args_map. */ |
143 | static unsigned HOST_WIDE_INT stored_args_watermark; |
144 | |
145 | /* stack_arg_under_construction is nonzero when an argument may be |
146 | initialized with a constructor call (including a C function that |
147 | returns a BLKmode struct) and expand_call must take special action |
148 | to make sure the object being constructed does not overlap the |
149 | argument list for the constructor call. */ |
150 | static int stack_arg_under_construction; |
151 | |
152 | static void precompute_register_parameters (int, struct arg_data *, int *); |
153 | static bool store_one_arg (struct arg_data *, rtx, int, int, int); |
154 | static void store_unaligned_arguments_into_pseudos (struct arg_data *, int); |
155 | static bool finalize_must_preallocate (bool, int, struct arg_data *, |
156 | struct args_size *); |
157 | static void precompute_arguments (int, struct arg_data *); |
158 | static void compute_argument_addresses (struct arg_data *, rtx, int); |
159 | static rtx rtx_for_function_call (tree, tree); |
160 | static void load_register_parameters (struct arg_data *, int, rtx *, int, |
161 | int, bool *); |
162 | static int special_function_p (const_tree, int); |
163 | static bool check_sibcall_argument_overlap_1 (rtx); |
164 | static bool check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, |
165 | bool); |
166 | static tree split_complex_types (tree); |
167 | |
168 | #ifdef REG_PARM_STACK_SPACE |
169 | static rtx save_fixed_argument_area (int, rtx, int *, int *); |
170 | static void restore_fixed_argument_area (rtx, rtx, int, int); |
171 | #endif |
172 | |
173 | /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing |
174 | stack region might already be in use. */ |
175 | |
176 | static bool |
177 | stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound, |
178 | unsigned int reg_parm_stack_space) |
179 | { |
180 | unsigned HOST_WIDE_INT const_lower, const_upper; |
181 | const_lower = constant_lower_bound (a: lower_bound); |
182 | if (!upper_bound.is_constant (const_value: &const_upper)) |
183 | const_upper = HOST_WIDE_INT_M1U; |
184 | |
185 | if (const_upper > stack_usage_watermark) |
186 | return true; |
187 | |
188 | /* Don't worry about things in the fixed argument area; |
189 | it has already been saved. */ |
190 | const_lower = MAX (const_lower, reg_parm_stack_space); |
191 | const_upper = MIN (const_upper, highest_outgoing_arg_in_use); |
192 | for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i) |
193 | if (stack_usage_map[i]) |
194 | return true; |
195 | return false; |
196 | } |
197 | |
198 | /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing |
199 | stack region are now in use. */ |
200 | |
201 | static void |
202 | mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound) |
203 | { |
204 | unsigned HOST_WIDE_INT const_lower, const_upper; |
205 | const_lower = constant_lower_bound (a: lower_bound); |
206 | if (upper_bound.is_constant (const_value: &const_upper) |
207 | && const_upper <= highest_outgoing_arg_in_use) |
208 | for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i) |
209 | stack_usage_map[i] = 1; |
210 | else |
211 | stack_usage_watermark = MIN (stack_usage_watermark, const_lower); |
212 | } |
213 | |
214 | /* Force FUNEXP into a form suitable for the address of a CALL, |
215 | and return that as an rtx. Also load the static chain register |
216 | if FNDECL is a nested function. |
217 | |
218 | CALL_FUSAGE points to a variable holding the prospective |
219 | CALL_INSN_FUNCTION_USAGE information. */ |
220 | |
221 | rtx |
222 | prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value, |
223 | rtx *call_fusage, int reg_parm_seen, int flags) |
224 | { |
225 | /* Make a valid memory address and copy constants through pseudo-regs, |
226 | but not for a constant address if -fno-function-cse. */ |
227 | if (GET_CODE (funexp) != SYMBOL_REF) |
228 | { |
229 | /* If it's an indirect call by descriptor, generate code to perform |
230 | runtime identification of the pointer and load the descriptor. */ |
231 | if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines) |
232 | { |
233 | const int bit_val = targetm.calls.custom_function_descriptors; |
234 | rtx call_lab = gen_label_rtx (); |
235 | |
236 | gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type)); |
237 | fndecl_or_type |
238 | = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE, |
239 | fndecl_or_type); |
240 | DECL_STATIC_CHAIN (fndecl_or_type) = 1; |
241 | rtx chain = targetm.calls.static_chain (fndecl_or_type, false); |
242 | |
243 | if (GET_MODE (funexp) != Pmode) |
244 | funexp = convert_memory_address (Pmode, funexp); |
245 | |
246 | /* Avoid long live ranges around function calls. */ |
247 | funexp = copy_to_mode_reg (Pmode, funexp); |
248 | |
249 | if (REG_P (chain)) |
250 | emit_insn (gen_rtx_CLOBBER (VOIDmode, chain)); |
251 | |
252 | /* Emit the runtime identification pattern. */ |
253 | rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val)); |
254 | emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1, |
255 | call_lab); |
256 | |
257 | /* Statically predict the branch to very likely taken. */ |
258 | rtx_insn *insn = get_last_insn (); |
259 | if (JUMP_P (insn)) |
260 | predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN); |
261 | |
262 | /* Load the descriptor. */ |
263 | rtx mem = gen_rtx_MEM (ptr_mode, |
264 | plus_constant (Pmode, funexp, - bit_val)); |
265 | MEM_NOTRAP_P (mem) = 1; |
266 | mem = convert_memory_address (Pmode, mem); |
267 | emit_move_insn (chain, mem); |
268 | |
269 | mem = gen_rtx_MEM (ptr_mode, |
270 | plus_constant (Pmode, funexp, |
271 | POINTER_SIZE / BITS_PER_UNIT |
272 | - bit_val)); |
273 | MEM_NOTRAP_P (mem) = 1; |
274 | mem = convert_memory_address (Pmode, mem); |
275 | emit_move_insn (funexp, mem); |
276 | |
277 | emit_label (call_lab); |
278 | |
279 | if (REG_P (chain)) |
280 | { |
281 | use_reg (fusage: call_fusage, reg: chain); |
282 | STATIC_CHAIN_REG_P (chain) = 1; |
283 | } |
284 | |
285 | /* Make sure we're not going to be overwritten below. */ |
286 | gcc_assert (!static_chain_value); |
287 | } |
288 | |
289 | /* If we are using registers for parameters, force the |
290 | function address into a register now. */ |
291 | funexp = ((reg_parm_seen |
292 | && targetm.small_register_classes_for_mode_p (FUNCTION_MODE)) |
293 | ? force_not_mem (memory_address (FUNCTION_MODE, funexp)) |
294 | : memory_address (FUNCTION_MODE, funexp)); |
295 | } |
296 | else |
297 | { |
298 | /* funexp could be a SYMBOL_REF represents a function pointer which is |
299 | of ptr_mode. In this case, it should be converted into address mode |
300 | to be a valid address for memory rtx pattern. See PR 64971. */ |
301 | if (GET_MODE (funexp) != Pmode) |
302 | funexp = convert_memory_address (Pmode, funexp); |
303 | |
304 | if (!(flags & ECF_SIBCALL)) |
305 | { |
306 | if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse) |
307 | funexp = force_reg (Pmode, funexp); |
308 | } |
309 | } |
310 | |
311 | if (static_chain_value != 0 |
312 | && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL |
313 | || DECL_STATIC_CHAIN (fndecl_or_type))) |
314 | { |
315 | rtx chain; |
316 | |
317 | chain = targetm.calls.static_chain (fndecl_or_type, false); |
318 | static_chain_value = convert_memory_address (Pmode, static_chain_value); |
319 | |
320 | emit_move_insn (chain, static_chain_value); |
321 | if (REG_P (chain)) |
322 | { |
323 | use_reg (fusage: call_fusage, reg: chain); |
324 | STATIC_CHAIN_REG_P (chain) = 1; |
325 | } |
326 | } |
327 | |
328 | return funexp; |
329 | } |
330 | |
331 | /* Generate instructions to call function FUNEXP, |
332 | and optionally pop the results. |
333 | The CALL_INSN is the first insn generated. |
334 | |
335 | FNDECL is the declaration node of the function. This is given to the |
336 | hook TARGET_RETURN_POPS_ARGS to determine whether this function pops |
337 | its own args. |
338 | |
339 | FUNTYPE is the data type of the function. This is given to the hook |
340 | TARGET_RETURN_POPS_ARGS to determine whether this function pops its |
341 | own args. We used to allow an identifier for library functions, but |
342 | that doesn't work when the return type is an aggregate type and the |
343 | calling convention says that the pointer to this aggregate is to be |
344 | popped by the callee. |
345 | |
346 | STACK_SIZE is the number of bytes of arguments on the stack, |
347 | ROUNDED_STACK_SIZE is that number rounded up to |
348 | PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is |
349 | both to put into the call insn and to generate explicit popping |
350 | code if necessary. |
351 | |
352 | STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value. |
353 | It is zero if this call doesn't want a structure value. |
354 | |
355 | NEXT_ARG_REG is the rtx that results from executing |
356 | targetm.calls.function_arg (&args_so_far, |
357 | function_arg_info::end_marker ()); |
358 | just after all the args have had their registers assigned. |
359 | This could be whatever you like, but normally it is the first |
360 | arg-register beyond those used for args in this call, |
361 | or 0 if all the arg-registers are used in this call. |
362 | It is passed on to `gen_call' so you can put this info in the call insn. |
363 | |
364 | VALREG is a hard register in which a value is returned, |
365 | or 0 if the call does not return a value. |
366 | |
367 | OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before |
368 | the args to this call were processed. |
369 | We restore `inhibit_defer_pop' to that value. |
370 | |
371 | CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that |
372 | denote registers used by the called function. */ |
373 | |
374 | static void |
375 | emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED, |
376 | tree funtype ATTRIBUTE_UNUSED, |
377 | poly_int64 stack_size ATTRIBUTE_UNUSED, |
378 | poly_int64 rounded_stack_size, |
379 | poly_int64 struct_value_size ATTRIBUTE_UNUSED, |
380 | rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, |
381 | int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags, |
382 | cumulative_args_t args_so_far ATTRIBUTE_UNUSED) |
383 | { |
384 | rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode); |
385 | rtx call, funmem, pat; |
386 | bool already_popped = false; |
387 | poly_int64 n_popped = 0; |
388 | |
389 | /* Sibling call patterns never pop arguments (no sibcall(_value)_pop |
390 | patterns exist). Any popping that the callee does on return will |
391 | be from our caller's frame rather than ours. */ |
392 | if (!(ecf_flags & ECF_SIBCALL)) |
393 | { |
394 | n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size); |
395 | |
396 | #ifdef CALL_POPS_ARGS |
397 | n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far)); |
398 | #endif |
399 | } |
400 | |
401 | /* Ensure address is valid. SYMBOL_REF is already valid, so no need, |
402 | and we don't want to load it into a register as an optimization, |
403 | because prepare_call_address already did it if it should be done. */ |
404 | if (GET_CODE (funexp) != SYMBOL_REF) |
405 | funexp = memory_address (FUNCTION_MODE, funexp); |
406 | |
407 | funmem = gen_rtx_MEM (FUNCTION_MODE, funexp); |
408 | if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL) |
409 | { |
410 | tree t = fndecl; |
411 | |
412 | /* Although a built-in FUNCTION_DECL and its non-__builtin |
413 | counterpart compare equal and get a shared mem_attrs, they |
414 | produce different dump output in compare-debug compilations, |
415 | if an entry gets garbage collected in one compilation, then |
416 | adds a different (but equivalent) entry, while the other |
417 | doesn't run the garbage collector at the same spot and then |
418 | shares the mem_attr with the equivalent entry. */ |
419 | if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL) |
420 | { |
421 | tree t2 = builtin_decl_explicit (fncode: DECL_FUNCTION_CODE (decl: t)); |
422 | if (t2) |
423 | t = t2; |
424 | } |
425 | |
426 | set_mem_expr (funmem, t); |
427 | } |
428 | else if (fntree) |
429 | set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree))); |
430 | |
431 | if (ecf_flags & ECF_SIBCALL) |
432 | { |
433 | if (valreg) |
434 | pat = targetm.gen_sibcall_value (valreg, funmem, |
435 | rounded_stack_size_rtx, |
436 | next_arg_reg, NULL_RTX); |
437 | else |
438 | pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx, |
439 | next_arg_reg, |
440 | gen_int_mode (struct_value_size, Pmode)); |
441 | } |
442 | /* If the target has "call" or "call_value" insns, then prefer them |
443 | if no arguments are actually popped. If the target does not have |
444 | "call" or "call_value" insns, then we must use the popping versions |
445 | even if the call has no arguments to pop. */ |
446 | else if (maybe_ne (a: n_popped, b: 0) |
447 | || !(valreg |
448 | ? targetm.have_call_value () |
449 | : targetm.have_call ())) |
450 | { |
451 | rtx n_pop = gen_int_mode (n_popped, Pmode); |
452 | |
453 | /* If this subroutine pops its own args, record that in the call insn |
454 | if possible, for the sake of frame pointer elimination. */ |
455 | |
456 | if (valreg) |
457 | pat = targetm.gen_call_value_pop (valreg, funmem, |
458 | rounded_stack_size_rtx, |
459 | next_arg_reg, n_pop); |
460 | else |
461 | pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx, |
462 | next_arg_reg, n_pop); |
463 | |
464 | already_popped = true; |
465 | } |
466 | else |
467 | { |
468 | if (valreg) |
469 | pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx, |
470 | next_arg_reg, NULL_RTX); |
471 | else |
472 | pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg, |
473 | gen_int_mode (struct_value_size, Pmode)); |
474 | } |
475 | emit_insn (pat); |
476 | |
477 | /* Find the call we just emitted. */ |
478 | rtx_call_insn *call_insn = last_call_insn (); |
479 | |
480 | /* Some target create a fresh MEM instead of reusing the one provided |
481 | above. Set its MEM_EXPR. */ |
482 | call = get_call_rtx_from (call_insn); |
483 | if (call |
484 | && MEM_EXPR (XEXP (call, 0)) == NULL_TREE |
485 | && MEM_EXPR (funmem) != NULL_TREE) |
486 | set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem)); |
487 | |
488 | /* Put the register usage information there. */ |
489 | add_function_usage_to (call_insn, call_fusage); |
490 | |
491 | /* If this is a const call, then set the insn's unchanging bit. */ |
492 | if (ecf_flags & ECF_CONST) |
493 | RTL_CONST_CALL_P (call_insn) = 1; |
494 | |
495 | /* If this is a pure call, then set the insn's unchanging bit. */ |
496 | if (ecf_flags & ECF_PURE) |
497 | RTL_PURE_CALL_P (call_insn) = 1; |
498 | |
499 | /* If this is a const call, then set the insn's unchanging bit. */ |
500 | if (ecf_flags & ECF_LOOPING_CONST_OR_PURE) |
501 | RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1; |
502 | |
503 | /* Create a nothrow REG_EH_REGION note, if needed. */ |
504 | make_reg_eh_region_note (insn: call_insn, ecf_flags, lp_nr: 0); |
505 | |
506 | if (ecf_flags & ECF_NORETURN) |
507 | add_reg_note (call_insn, REG_NORETURN, const0_rtx); |
508 | |
509 | if (ecf_flags & ECF_RETURNS_TWICE) |
510 | { |
511 | add_reg_note (call_insn, REG_SETJMP, const0_rtx); |
512 | cfun->calls_setjmp = 1; |
513 | } |
514 | |
515 | SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0); |
516 | |
517 | /* Restore this now, so that we do defer pops for this call's args |
518 | if the context of the call as a whole permits. */ |
519 | inhibit_defer_pop = old_inhibit_defer_pop; |
520 | |
521 | if (maybe_ne (a: n_popped, b: 0)) |
522 | { |
523 | if (!already_popped) |
524 | CALL_INSN_FUNCTION_USAGE (call_insn) |
525 | = gen_rtx_EXPR_LIST (VOIDmode, |
526 | gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx), |
527 | CALL_INSN_FUNCTION_USAGE (call_insn)); |
528 | rounded_stack_size -= n_popped; |
529 | rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode); |
530 | stack_pointer_delta -= n_popped; |
531 | |
532 | add_args_size_note (call_insn, stack_pointer_delta); |
533 | |
534 | /* If popup is needed, stack realign must use DRAP */ |
535 | if (SUPPORTS_STACK_ALIGNMENT) |
536 | crtl->need_drap = true; |
537 | } |
538 | /* For noreturn calls when not accumulating outgoing args force |
539 | REG_ARGS_SIZE note to prevent crossjumping of calls with different |
540 | args sizes. */ |
541 | else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0) |
542 | add_args_size_note (call_insn, stack_pointer_delta); |
543 | |
544 | if (!ACCUMULATE_OUTGOING_ARGS) |
545 | { |
546 | /* If returning from the subroutine does not automatically pop the args, |
547 | we need an instruction to pop them sooner or later. |
548 | Perhaps do it now; perhaps just record how much space to pop later. |
549 | |
550 | If returning from the subroutine does pop the args, indicate that the |
551 | stack pointer will be changed. */ |
552 | |
553 | if (maybe_ne (a: rounded_stack_size, b: 0)) |
554 | { |
555 | if (ecf_flags & ECF_NORETURN) |
556 | /* Just pretend we did the pop. */ |
557 | stack_pointer_delta -= rounded_stack_size; |
558 | else if (flag_defer_pop && inhibit_defer_pop == 0 |
559 | && ! (ecf_flags & (ECF_CONST | ECF_PURE))) |
560 | pending_stack_adjust += rounded_stack_size; |
561 | else |
562 | adjust_stack (rounded_stack_size_rtx); |
563 | } |
564 | } |
565 | /* When we accumulate outgoing args, we must avoid any stack manipulations. |
566 | Restore the stack pointer to its original value now. Usually |
567 | ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions. |
568 | On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and |
569 | popping variants of functions exist as well. |
570 | |
571 | ??? We may optimize similar to defer_pop above, but it is |
572 | probably not worthwhile. |
573 | |
574 | ??? It will be worthwhile to enable combine_stack_adjustments even for |
575 | such machines. */ |
576 | else if (maybe_ne (a: n_popped, b: 0)) |
577 | anti_adjust_stack (gen_int_mode (n_popped, Pmode)); |
578 | } |
579 | |
580 | /* Determine if the function identified by FNDECL is one with |
581 | special properties we wish to know about. Modify FLAGS accordingly. |
582 | |
583 | For example, if the function might return more than one time (setjmp), then |
584 | set ECF_RETURNS_TWICE. |
585 | |
586 | Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate |
587 | space from the stack such as alloca. */ |
588 | |
589 | static int |
590 | special_function_p (const_tree fndecl, int flags) |
591 | { |
592 | tree name_decl = DECL_NAME (fndecl); |
593 | |
594 | if (maybe_special_function_p (fndecl) |
595 | && IDENTIFIER_LENGTH (name_decl) <= 11) |
596 | { |
597 | const char *name = IDENTIFIER_POINTER (name_decl); |
598 | const char *tname = name; |
599 | |
600 | /* We assume that alloca will always be called by name. It |
601 | makes no sense to pass it as a pointer-to-function to |
602 | anything that does not understand its behavior. */ |
603 | if (IDENTIFIER_LENGTH (name_decl) == 6 |
604 | && name[0] == 'a' |
605 | && ! strcmp (s1: name, s2: "alloca")) |
606 | flags |= ECF_MAY_BE_ALLOCA; |
607 | |
608 | /* Disregard prefix _ or __. */ |
609 | if (name[0] == '_') |
610 | { |
611 | if (name[1] == '_') |
612 | tname += 2; |
613 | else |
614 | tname += 1; |
615 | } |
616 | |
617 | /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */ |
618 | if (! strcmp (s1: tname, s2: "setjmp") |
619 | || ! strcmp (s1: tname, s2: "sigsetjmp") |
620 | || ! strcmp (s1: name, s2: "savectx") |
621 | || ! strcmp (s1: name, s2: "vfork") |
622 | || ! strcmp (s1: name, s2: "getcontext")) |
623 | flags |= ECF_RETURNS_TWICE; |
624 | } |
625 | |
626 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL |
627 | && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl))) |
628 | flags |= ECF_MAY_BE_ALLOCA; |
629 | |
630 | return flags; |
631 | } |
632 | |
633 | /* Return fnspec for DECL. */ |
634 | |
635 | static attr_fnspec |
636 | decl_fnspec (tree fndecl) |
637 | { |
638 | tree attr; |
639 | tree type = TREE_TYPE (fndecl); |
640 | if (type) |
641 | { |
642 | attr = lookup_attribute (attr_name: "fn spec", TYPE_ATTRIBUTES (type)); |
643 | if (attr) |
644 | { |
645 | return TREE_VALUE (TREE_VALUE (attr)); |
646 | } |
647 | } |
648 | if (fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL)) |
649 | return builtin_fnspec (fndecl); |
650 | return ""; |
651 | } |
652 | |
653 | /* Similar to special_function_p; return a set of ERF_ flags for the |
654 | function FNDECL. */ |
655 | static int |
656 | decl_return_flags (tree fndecl) |
657 | { |
658 | attr_fnspec fnspec = decl_fnspec (fndecl); |
659 | |
660 | unsigned int arg; |
661 | if (fnspec.returns_arg (arg_no: &arg)) |
662 | return ERF_RETURNS_ARG | arg; |
663 | |
664 | if (fnspec.returns_noalias_p ()) |
665 | return ERF_NOALIAS; |
666 | return 0; |
667 | } |
668 | |
669 | /* Return true when FNDECL represents a call to setjmp. */ |
670 | |
671 | bool |
672 | setjmp_call_p (const_tree fndecl) |
673 | { |
674 | if (DECL_IS_RETURNS_TWICE (fndecl)) |
675 | return true; |
676 | if (special_function_p (fndecl, flags: 0) & ECF_RETURNS_TWICE) |
677 | return true; |
678 | |
679 | return false; |
680 | } |
681 | |
682 | |
683 | /* Return true if STMT may be an alloca call. */ |
684 | |
685 | bool |
686 | gimple_maybe_alloca_call_p (const gimple *stmt) |
687 | { |
688 | tree fndecl; |
689 | |
690 | if (!is_gimple_call (gs: stmt)) |
691 | return false; |
692 | |
693 | fndecl = gimple_call_fndecl (gs: stmt); |
694 | if (fndecl && (special_function_p (fndecl, flags: 0) & ECF_MAY_BE_ALLOCA)) |
695 | return true; |
696 | |
697 | return false; |
698 | } |
699 | |
700 | /* Return true if STMT is a builtin alloca call. */ |
701 | |
702 | bool |
703 | gimple_alloca_call_p (const gimple *stmt) |
704 | { |
705 | tree fndecl; |
706 | |
707 | if (!is_gimple_call (gs: stmt)) |
708 | return false; |
709 | |
710 | fndecl = gimple_call_fndecl (gs: stmt); |
711 | if (fndecl && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL)) |
712 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
713 | { |
714 | CASE_BUILT_IN_ALLOCA: |
715 | return gimple_call_num_args (gs: stmt) > 0; |
716 | default: |
717 | break; |
718 | } |
719 | |
720 | return false; |
721 | } |
722 | |
723 | /* Return true when exp contains a builtin alloca call. */ |
724 | |
725 | bool |
726 | alloca_call_p (const_tree exp) |
727 | { |
728 | tree fndecl; |
729 | if (TREE_CODE (exp) == CALL_EXPR |
730 | && (fndecl = get_callee_fndecl (exp)) |
731 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) |
732 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
733 | { |
734 | CASE_BUILT_IN_ALLOCA: |
735 | return true; |
736 | default: |
737 | break; |
738 | } |
739 | |
740 | return false; |
741 | } |
742 | |
743 | /* Return TRUE if FNDECL is either a TM builtin or a TM cloned |
744 | function. Return FALSE otherwise. */ |
745 | |
746 | static bool |
747 | is_tm_builtin (const_tree fndecl) |
748 | { |
749 | if (fndecl == NULL) |
750 | return false; |
751 | |
752 | if (decl_is_tm_clone (fndecl)) |
753 | return true; |
754 | |
755 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) |
756 | { |
757 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
758 | { |
759 | case BUILT_IN_TM_COMMIT: |
760 | case BUILT_IN_TM_COMMIT_EH: |
761 | case BUILT_IN_TM_ABORT: |
762 | case BUILT_IN_TM_IRREVOCABLE: |
763 | case BUILT_IN_TM_GETTMCLONE_IRR: |
764 | case BUILT_IN_TM_MEMCPY: |
765 | case BUILT_IN_TM_MEMMOVE: |
766 | case BUILT_IN_TM_MEMSET: |
767 | CASE_BUILT_IN_TM_STORE (1): |
768 | CASE_BUILT_IN_TM_STORE (2): |
769 | CASE_BUILT_IN_TM_STORE (4): |
770 | CASE_BUILT_IN_TM_STORE (8): |
771 | CASE_BUILT_IN_TM_STORE (FLOAT): |
772 | CASE_BUILT_IN_TM_STORE (DOUBLE): |
773 | CASE_BUILT_IN_TM_STORE (LDOUBLE): |
774 | CASE_BUILT_IN_TM_STORE (M64): |
775 | CASE_BUILT_IN_TM_STORE (M128): |
776 | CASE_BUILT_IN_TM_STORE (M256): |
777 | CASE_BUILT_IN_TM_LOAD (1): |
778 | CASE_BUILT_IN_TM_LOAD (2): |
779 | CASE_BUILT_IN_TM_LOAD (4): |
780 | CASE_BUILT_IN_TM_LOAD (8): |
781 | CASE_BUILT_IN_TM_LOAD (FLOAT): |
782 | CASE_BUILT_IN_TM_LOAD (DOUBLE): |
783 | CASE_BUILT_IN_TM_LOAD (LDOUBLE): |
784 | CASE_BUILT_IN_TM_LOAD (M64): |
785 | CASE_BUILT_IN_TM_LOAD (M128): |
786 | CASE_BUILT_IN_TM_LOAD (M256): |
787 | case BUILT_IN_TM_LOG: |
788 | case BUILT_IN_TM_LOG_1: |
789 | case BUILT_IN_TM_LOG_2: |
790 | case BUILT_IN_TM_LOG_4: |
791 | case BUILT_IN_TM_LOG_8: |
792 | case BUILT_IN_TM_LOG_FLOAT: |
793 | case BUILT_IN_TM_LOG_DOUBLE: |
794 | case BUILT_IN_TM_LOG_LDOUBLE: |
795 | case BUILT_IN_TM_LOG_M64: |
796 | case BUILT_IN_TM_LOG_M128: |
797 | case BUILT_IN_TM_LOG_M256: |
798 | return true; |
799 | default: |
800 | break; |
801 | } |
802 | } |
803 | return false; |
804 | } |
805 | |
806 | /* Detect flags (function attributes) from the function decl or type node. */ |
807 | |
808 | int |
809 | flags_from_decl_or_type (const_tree exp) |
810 | { |
811 | int flags = 0; |
812 | |
813 | if (DECL_P (exp)) |
814 | { |
815 | /* The function exp may have the `malloc' attribute. */ |
816 | if (DECL_IS_MALLOC (exp)) |
817 | flags |= ECF_MALLOC; |
818 | |
819 | /* The function exp may have the `returns_twice' attribute. */ |
820 | if (DECL_IS_RETURNS_TWICE (exp)) |
821 | flags |= ECF_RETURNS_TWICE; |
822 | |
823 | /* Process the pure and const attributes. */ |
824 | if (TREE_READONLY (exp)) |
825 | flags |= ECF_CONST; |
826 | if (DECL_PURE_P (exp)) |
827 | flags |= ECF_PURE; |
828 | if (DECL_LOOPING_CONST_OR_PURE_P (exp)) |
829 | flags |= ECF_LOOPING_CONST_OR_PURE; |
830 | |
831 | if (DECL_IS_NOVOPS (exp)) |
832 | flags |= ECF_NOVOPS; |
833 | if (lookup_attribute (attr_name: "leaf", DECL_ATTRIBUTES (exp))) |
834 | flags |= ECF_LEAF; |
835 | if (lookup_attribute (attr_name: "cold", DECL_ATTRIBUTES (exp))) |
836 | flags |= ECF_COLD; |
837 | |
838 | if (TREE_NOTHROW (exp)) |
839 | flags |= ECF_NOTHROW; |
840 | |
841 | if (flag_tm) |
842 | { |
843 | if (is_tm_builtin (fndecl: exp)) |
844 | flags |= ECF_TM_BUILTIN; |
845 | else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0 |
846 | || lookup_attribute (attr_name: "transaction_pure", |
847 | TYPE_ATTRIBUTES (TREE_TYPE (exp)))) |
848 | flags |= ECF_TM_PURE; |
849 | } |
850 | |
851 | if (lookup_attribute (attr_name: "expected_throw", DECL_ATTRIBUTES (exp))) |
852 | flags |= ECF_XTHROW; |
853 | |
854 | flags = special_function_p (fndecl: exp, flags); |
855 | |
856 | if ((flags & ECF_CONST) == 0 |
857 | && lookup_attribute (attr_name: "unsequenced noptr", |
858 | TYPE_ATTRIBUTES (TREE_TYPE (exp)))) |
859 | { |
860 | /* [[unsequenced]] with no pointers in arguments is like |
861 | [[gnu::const]] without finite guarantee. */ |
862 | flags |= ECF_CONST; |
863 | if ((flags & ECF_PURE) == 0) |
864 | flags |= ECF_LOOPING_CONST_OR_PURE; |
865 | } |
866 | if ((flags & (ECF_CONST | ECF_PURE)) == 0 |
867 | && lookup_attribute (attr_name: "reproducible noptr", |
868 | TYPE_ATTRIBUTES (TREE_TYPE (exp)))) |
869 | /* [[reproducible]] with no pointers in arguments is like |
870 | [[gnu::pure]] without finite guarantee. */ |
871 | flags |= ECF_PURE | ECF_LOOPING_CONST_OR_PURE; |
872 | } |
873 | else if (TYPE_P (exp)) |
874 | { |
875 | if (TYPE_READONLY (exp)) |
876 | flags |= ECF_CONST; |
877 | |
878 | if (flag_tm |
879 | && ((flags & ECF_CONST) != 0 |
880 | || lookup_attribute (attr_name: "transaction_pure", TYPE_ATTRIBUTES (exp)))) |
881 | flags |= ECF_TM_PURE; |
882 | |
883 | if ((flags & ECF_CONST) == 0 |
884 | && lookup_attribute (attr_name: "unsequenced noptr", TYPE_ATTRIBUTES (exp))) |
885 | /* [[unsequenced]] with no pointers in arguments is like |
886 | [[gnu::const]] without finite guarantee. */ |
887 | flags |= ECF_CONST | ECF_LOOPING_CONST_OR_PURE; |
888 | if ((flags & ECF_CONST) == 0 |
889 | && lookup_attribute (attr_name: "reproducible noptr", TYPE_ATTRIBUTES (exp))) |
890 | /* [[reproducible]] with no pointers in arguments is like |
891 | [[gnu::pure]] without finite guarantee. */ |
892 | flags |= ECF_PURE | ECF_LOOPING_CONST_OR_PURE; |
893 | } |
894 | else |
895 | gcc_unreachable (); |
896 | |
897 | if (TREE_THIS_VOLATILE (exp)) |
898 | { |
899 | flags |= ECF_NORETURN; |
900 | if (flags & (ECF_CONST|ECF_PURE)) |
901 | flags |= ECF_LOOPING_CONST_OR_PURE; |
902 | } |
903 | |
904 | return flags; |
905 | } |
906 | |
907 | /* Detect flags from a CALL_EXPR. */ |
908 | |
909 | int |
910 | call_expr_flags (const_tree t) |
911 | { |
912 | int flags; |
913 | tree decl = get_callee_fndecl (t); |
914 | |
915 | if (decl) |
916 | flags = flags_from_decl_or_type (exp: decl); |
917 | else if (CALL_EXPR_FN (t) == NULL_TREE) |
918 | flags = internal_fn_flags (CALL_EXPR_IFN (t)); |
919 | else |
920 | { |
921 | tree type = TREE_TYPE (CALL_EXPR_FN (t)); |
922 | if (type && TREE_CODE (type) == POINTER_TYPE) |
923 | flags = flags_from_decl_or_type (TREE_TYPE (type)); |
924 | else |
925 | flags = 0; |
926 | if (CALL_EXPR_BY_DESCRIPTOR (t)) |
927 | flags |= ECF_BY_DESCRIPTOR; |
928 | } |
929 | |
930 | return flags; |
931 | } |
932 | |
933 | /* Return true if ARG should be passed by invisible reference. */ |
934 | |
935 | bool |
936 | pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg) |
937 | { |
938 | if (tree type = arg.type) |
939 | { |
940 | /* If this type contains non-trivial constructors, then it is |
941 | forbidden for the middle-end to create any new copies. */ |
942 | if (TREE_ADDRESSABLE (type)) |
943 | return true; |
944 | |
945 | /* GCC post 3.4 passes *all* variable sized types by reference. */ |
946 | if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type))) |
947 | return true; |
948 | |
949 | /* If a record type should be passed the same as its first (and only) |
950 | member, use the type and mode of that member. */ |
951 | if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) |
952 | { |
953 | arg.type = TREE_TYPE (first_field (type)); |
954 | arg.mode = TYPE_MODE (arg.type); |
955 | } |
956 | } |
957 | |
958 | return targetm.calls.pass_by_reference (pack_cumulative_args (arg: ca), arg); |
959 | } |
960 | |
961 | /* Return true if TYPE should be passed by reference when passed to |
962 | the "..." arguments of a function. */ |
963 | |
964 | bool |
965 | pass_va_arg_by_reference (tree type) |
966 | { |
967 | return pass_by_reference (NULL, arg: function_arg_info (type, /*named=*/false)); |
968 | } |
969 | |
970 | /* Decide whether ARG, which occurs in the state described by CA, |
971 | should be passed by reference. Return true if so and update |
972 | ARG accordingly. */ |
973 | |
974 | bool |
975 | apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg) |
976 | { |
977 | if (pass_by_reference (ca, arg)) |
978 | { |
979 | arg.type = build_pointer_type (arg.type); |
980 | arg.mode = TYPE_MODE (arg.type); |
981 | arg.pass_by_reference = true; |
982 | return true; |
983 | } |
984 | return false; |
985 | } |
986 | |
987 | /* Return true if ARG, which is passed by reference, should be callee |
988 | copied instead of caller copied. */ |
989 | |
990 | bool |
991 | reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg) |
992 | { |
993 | if (arg.type && TREE_ADDRESSABLE (arg.type)) |
994 | return false; |
995 | return targetm.calls.callee_copies (pack_cumulative_args (arg: ca), arg); |
996 | } |
997 | |
998 | |
999 | /* Precompute all register parameters as described by ARGS, storing values |
1000 | into fields within the ARGS array. |
1001 | |
1002 | NUM_ACTUALS indicates the total number elements in the ARGS array. |
1003 | |
1004 | Set REG_PARM_SEEN if we encounter a register parameter. */ |
1005 | |
1006 | static void |
1007 | precompute_register_parameters (int num_actuals, struct arg_data *args, |
1008 | int *reg_parm_seen) |
1009 | { |
1010 | int i; |
1011 | |
1012 | *reg_parm_seen = 0; |
1013 | |
1014 | for (i = 0; i < num_actuals; i++) |
1015 | if (args[i].reg != 0 && ! args[i].pass_on_stack) |
1016 | { |
1017 | *reg_parm_seen = 1; |
1018 | |
1019 | if (args[i].value == 0) |
1020 | { |
1021 | push_temp_slots (); |
1022 | args[i].value = expand_normal (exp: args[i].tree_value); |
1023 | preserve_temp_slots (args[i].value); |
1024 | pop_temp_slots (); |
1025 | } |
1026 | |
1027 | /* If we are to promote the function arg to a wider mode, |
1028 | do it now. */ |
1029 | |
1030 | machine_mode old_mode = TYPE_MODE (TREE_TYPE (args[i].tree_value)); |
1031 | |
1032 | /* Some ABIs require scalar floating point modes to be returned |
1033 | in a wider scalar integer mode. We need to explicitly |
1034 | reinterpret to an integer mode of the correct precision |
1035 | before extending to the desired result. */ |
1036 | if (SCALAR_INT_MODE_P (args[i].mode) |
1037 | && SCALAR_FLOAT_MODE_P (old_mode) |
1038 | && known_gt (GET_MODE_SIZE (args[i].mode), |
1039 | GET_MODE_SIZE (old_mode))) |
1040 | args[i].value = convert_float_to_wider_int (mode: args[i].mode, fmode: old_mode, |
1041 | x: args[i].value); |
1042 | else if (args[i].mode != old_mode) |
1043 | args[i].value = convert_modes (mode: args[i].mode, oldmode: old_mode, |
1044 | x: args[i].value, unsignedp: args[i].unsignedp); |
1045 | |
1046 | /* If the value is a non-legitimate constant, force it into a |
1047 | pseudo now. TLS symbols sometimes need a call to resolve. */ |
1048 | if (CONSTANT_P (args[i].value) |
1049 | && (!targetm.legitimate_constant_p (args[i].mode, args[i].value) |
1050 | || targetm.precompute_tls_p (args[i].mode, args[i].value))) |
1051 | args[i].value = force_reg (args[i].mode, args[i].value); |
1052 | |
1053 | /* If we're going to have to load the value by parts, pull the |
1054 | parts into pseudos. The part extraction process can involve |
1055 | non-trivial computation. */ |
1056 | if (GET_CODE (args[i].reg) == PARALLEL) |
1057 | { |
1058 | tree type = TREE_TYPE (args[i].tree_value); |
1059 | args[i].parallel_value |
1060 | = emit_group_load_into_temps (args[i].reg, args[i].value, |
1061 | type, int_size_in_bytes (type)); |
1062 | } |
1063 | |
1064 | /* If the value is expensive, and we are inside an appropriately |
1065 | short loop, put the value into a pseudo and then put the pseudo |
1066 | into the hard reg. |
1067 | |
1068 | For small register classes, also do this if this call uses |
1069 | register parameters. This is to avoid reload conflicts while |
1070 | loading the parameters registers. */ |
1071 | |
1072 | else if ((! (REG_P (args[i].value) |
1073 | || (GET_CODE (args[i].value) == SUBREG |
1074 | && REG_P (SUBREG_REG (args[i].value))))) |
1075 | && args[i].mode != BLKmode |
1076 | && (set_src_cost (x: args[i].value, mode: args[i].mode, |
1077 | speed_p: optimize_insn_for_speed_p ()) |
1078 | > COSTS_N_INSNS (1)) |
1079 | && ((*reg_parm_seen |
1080 | && targetm.small_register_classes_for_mode_p (args[i].mode)) |
1081 | || optimize)) |
1082 | args[i].value = copy_to_mode_reg (args[i].mode, args[i].value); |
1083 | } |
1084 | } |
1085 | |
1086 | #ifdef REG_PARM_STACK_SPACE |
1087 | |
1088 | /* The argument list is the property of the called routine and it |
1089 | may clobber it. If the fixed area has been used for previous |
1090 | parameters, we must save and restore it. */ |
1091 | |
1092 | static rtx |
1093 | save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save) |
1094 | { |
1095 | unsigned int low; |
1096 | unsigned int high; |
1097 | |
1098 | /* Compute the boundary of the area that needs to be saved, if any. */ |
1099 | high = reg_parm_stack_space; |
1100 | if (ARGS_GROW_DOWNWARD) |
1101 | high += 1; |
1102 | |
1103 | if (high > highest_outgoing_arg_in_use) |
1104 | high = highest_outgoing_arg_in_use; |
1105 | |
1106 | for (low = 0; low < high; low++) |
1107 | if (stack_usage_map[low] != 0 || low >= stack_usage_watermark) |
1108 | { |
1109 | int num_to_save; |
1110 | machine_mode save_mode; |
1111 | int delta; |
1112 | rtx addr; |
1113 | rtx stack_area; |
1114 | rtx save_area; |
1115 | |
1116 | while (stack_usage_map[--high] == 0) |
1117 | ; |
1118 | |
1119 | *low_to_save = low; |
1120 | *high_to_save = high; |
1121 | |
1122 | num_to_save = high - low + 1; |
1123 | |
1124 | /* If we don't have the required alignment, must do this |
1125 | in BLKmode. */ |
1126 | scalar_int_mode imode; |
1127 | if (int_mode_for_size (size: num_to_save * BITS_PER_UNIT, limit: 1).exists (mode: &imode) |
1128 | && (low & (MIN (GET_MODE_SIZE (imode), |
1129 | BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0) |
1130 | save_mode = imode; |
1131 | else |
1132 | save_mode = BLKmode; |
1133 | |
1134 | if (ARGS_GROW_DOWNWARD) |
1135 | delta = -high; |
1136 | else |
1137 | delta = low; |
1138 | |
1139 | addr = plus_constant (Pmode, argblock, delta); |
1140 | stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr)); |
1141 | |
1142 | set_mem_align (stack_area, PARM_BOUNDARY); |
1143 | if (save_mode == BLKmode) |
1144 | { |
1145 | save_area = assign_stack_temp (BLKmode, num_to_save); |
1146 | emit_block_move (validize_mem (save_area), stack_area, |
1147 | GEN_INT (num_to_save), BLOCK_OP_CALL_PARM); |
1148 | } |
1149 | else |
1150 | { |
1151 | save_area = gen_reg_rtx (save_mode); |
1152 | emit_move_insn (save_area, stack_area); |
1153 | } |
1154 | |
1155 | return save_area; |
1156 | } |
1157 | |
1158 | return NULL_RTX; |
1159 | } |
1160 | |
1161 | static void |
1162 | restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save) |
1163 | { |
1164 | machine_mode save_mode = GET_MODE (save_area); |
1165 | int delta; |
1166 | rtx addr, stack_area; |
1167 | |
1168 | if (ARGS_GROW_DOWNWARD) |
1169 | delta = -high_to_save; |
1170 | else |
1171 | delta = low_to_save; |
1172 | |
1173 | addr = plus_constant (Pmode, argblock, delta); |
1174 | stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr)); |
1175 | set_mem_align (stack_area, PARM_BOUNDARY); |
1176 | |
1177 | if (save_mode != BLKmode) |
1178 | emit_move_insn (stack_area, save_area); |
1179 | else |
1180 | emit_block_move (stack_area, validize_mem (save_area), |
1181 | GEN_INT (high_to_save - low_to_save + 1), |
1182 | BLOCK_OP_CALL_PARM); |
1183 | } |
1184 | #endif /* REG_PARM_STACK_SPACE */ |
1185 | |
1186 | /* If any elements in ARGS refer to parameters that are to be passed in |
1187 | registers, but not in memory, and whose alignment does not permit a |
1188 | direct copy into registers. Copy the values into a group of pseudos |
1189 | which we will later copy into the appropriate hard registers. |
1190 | |
1191 | Pseudos for each unaligned argument will be stored into the array |
1192 | args[argnum].aligned_regs. The caller is responsible for deallocating |
1193 | the aligned_regs array if it is nonzero. */ |
1194 | |
1195 | static void |
1196 | store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals) |
1197 | { |
1198 | int i, j; |
1199 | |
1200 | for (i = 0; i < num_actuals; i++) |
1201 | if (args[i].reg != 0 && ! args[i].pass_on_stack |
1202 | && GET_CODE (args[i].reg) != PARALLEL |
1203 | && args[i].mode == BLKmode |
1204 | && MEM_P (args[i].value) |
1205 | && (MEM_ALIGN (args[i].value) |
1206 | < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) |
1207 | { |
1208 | int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); |
1209 | int endian_correction = 0; |
1210 | |
1211 | if (args[i].partial) |
1212 | { |
1213 | gcc_assert (args[i].partial % UNITS_PER_WORD == 0); |
1214 | args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD; |
1215 | } |
1216 | else |
1217 | { |
1218 | args[i].n_aligned_regs |
1219 | = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; |
1220 | } |
1221 | |
1222 | args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs); |
1223 | |
1224 | /* Structures smaller than a word are normally aligned to the |
1225 | least significant byte. On a BYTES_BIG_ENDIAN machine, |
1226 | this means we must skip the empty high order bytes when |
1227 | calculating the bit offset. */ |
1228 | if (bytes < UNITS_PER_WORD |
1229 | #ifdef BLOCK_REG_PADDING |
1230 | && (BLOCK_REG_PADDING (args[i].mode, |
1231 | TREE_TYPE (args[i].tree_value), 1) |
1232 | == PAD_DOWNWARD) |
1233 | #else |
1234 | && BYTES_BIG_ENDIAN |
1235 | #endif |
1236 | ) |
1237 | endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT; |
1238 | |
1239 | for (j = 0; j < args[i].n_aligned_regs; j++) |
1240 | { |
1241 | rtx reg = gen_reg_rtx (word_mode); |
1242 | rtx word = operand_subword_force (args[i].value, j, BLKmode); |
1243 | int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD); |
1244 | |
1245 | args[i].aligned_regs[j] = reg; |
1246 | word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX, |
1247 | word_mode, word_mode, false, NULL); |
1248 | |
1249 | /* There is no need to restrict this code to loading items |
1250 | in TYPE_ALIGN sized hunks. The bitfield instructions can |
1251 | load up entire word sized registers efficiently. |
1252 | |
1253 | ??? This may not be needed anymore. |
1254 | We use to emit a clobber here but that doesn't let later |
1255 | passes optimize the instructions we emit. By storing 0 into |
1256 | the register later passes know the first AND to zero out the |
1257 | bitfield being set in the register is unnecessary. The store |
1258 | of 0 will be deleted as will at least the first AND. */ |
1259 | |
1260 | emit_move_insn (reg, const0_rtx); |
1261 | |
1262 | bytes -= bitsize / BITS_PER_UNIT; |
1263 | store_bit_field (reg, bitsize, endian_correction, 0, 0, |
1264 | word_mode, word, false, false); |
1265 | } |
1266 | } |
1267 | } |
1268 | |
1269 | /* Issue an error if CALL_EXPR was flagged as requiring |
1270 | tall-call optimization. */ |
1271 | |
1272 | void |
1273 | maybe_complain_about_tail_call (tree call_expr, const char *reason) |
1274 | { |
1275 | gcc_assert (TREE_CODE (call_expr) == CALL_EXPR); |
1276 | if (CALL_EXPR_TAILCALL (call_expr) |
1277 | && dump_file |
1278 | && (dump_flags & TDF_DETAILS)) |
1279 | { |
1280 | fprintf (stream: dump_file, format: ";; Cannot tail-call: %s: ", reason); |
1281 | print_generic_expr (dump_file, call_expr, TDF_SLIM); |
1282 | fprintf (stream: dump_file, format: "\n"); |
1283 | } |
1284 | if (CALL_EXPR_MUST_TAIL_CALL (call_expr)) |
1285 | { |
1286 | error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason); |
1287 | CALL_EXPR_MUST_TAIL_CALL (call_expr) = 0; |
1288 | } |
1289 | } |
1290 | |
1291 | /* Fill in ARGS_SIZE and ARGS array based on the parameters found in |
1292 | CALL_EXPR EXP. |
1293 | |
1294 | NUM_ACTUALS is the total number of parameters. |
1295 | |
1296 | N_NAMED_ARGS is the total number of named arguments. |
1297 | |
1298 | STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return |
1299 | value, or null. |
1300 | |
1301 | FNDECL is the tree code for the target of this call (if known) |
1302 | |
1303 | ARGS_SO_FAR holds state needed by the target to know where to place |
1304 | the next argument. |
1305 | |
1306 | REG_PARM_STACK_SPACE is the number of bytes of stack space reserved |
1307 | for arguments which are passed in registers. |
1308 | |
1309 | OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level |
1310 | and may be modified by this routine. |
1311 | |
1312 | OLD_PENDING_ADJ and FLAGS are pointers to integer flags which |
1313 | may be modified by this routine. |
1314 | |
1315 | MUST_PREALLOCATE is a pointer to bool which may be |
1316 | modified by this routine. |
1317 | |
1318 | MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference |
1319 | that requires allocation of stack space. |
1320 | |
1321 | CALL_FROM_THUNK_P is true if this call is the jump from a thunk to |
1322 | the thunked-to function. */ |
1323 | |
1324 | static void |
1325 | initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, |
1326 | struct arg_data *args, |
1327 | struct args_size *args_size, |
1328 | int n_named_args ATTRIBUTE_UNUSED, |
1329 | tree exp, tree struct_value_addr_value, |
1330 | tree fndecl, tree fntype, |
1331 | cumulative_args_t args_so_far, |
1332 | int reg_parm_stack_space, |
1333 | rtx *old_stack_level, |
1334 | poly_int64 *old_pending_adj, |
1335 | bool *must_preallocate, int *ecf_flags, |
1336 | bool *may_tailcall, bool call_from_thunk_p) |
1337 | { |
1338 | CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (arg: args_so_far); |
1339 | location_t loc = EXPR_LOCATION (exp); |
1340 | |
1341 | /* Count arg position in order args appear. */ |
1342 | int argpos; |
1343 | |
1344 | int i; |
1345 | |
1346 | args_size->constant = 0; |
1347 | args_size->var = 0; |
1348 | |
1349 | /* In this loop, we consider args in the order they are written. |
1350 | We fill up ARGS from the back. */ |
1351 | |
1352 | i = num_actuals - 1; |
1353 | { |
1354 | int j = i; |
1355 | call_expr_arg_iterator iter; |
1356 | tree arg; |
1357 | |
1358 | if (struct_value_addr_value) |
1359 | { |
1360 | args[j].tree_value = struct_value_addr_value; |
1361 | j--; |
1362 | } |
1363 | argpos = 0; |
1364 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) |
1365 | { |
1366 | tree argtype = TREE_TYPE (arg); |
1367 | |
1368 | if (targetm.calls.split_complex_arg |
1369 | && argtype |
1370 | && TREE_CODE (argtype) == COMPLEX_TYPE |
1371 | && targetm.calls.split_complex_arg (argtype)) |
1372 | { |
1373 | tree subtype = TREE_TYPE (argtype); |
1374 | args[j].tree_value = build1 (REALPART_EXPR, subtype, arg); |
1375 | j--; |
1376 | args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg); |
1377 | } |
1378 | else |
1379 | args[j].tree_value = arg; |
1380 | j--; |
1381 | argpos++; |
1382 | } |
1383 | } |
1384 | |
1385 | bool promote_p |
1386 | = targetm.calls.promote_prototypes (fndecl |
1387 | ? TREE_TYPE (fndecl) |
1388 | : fntype); |
1389 | |
1390 | /* I counts args in order (to be) pushed; ARGPOS counts in order written. */ |
1391 | for (argpos = 0; argpos < num_actuals; i--, argpos++) |
1392 | { |
1393 | tree type = TREE_TYPE (args[i].tree_value); |
1394 | int unsignedp; |
1395 | |
1396 | /* Replace erroneous argument with constant zero. */ |
1397 | if (type == error_mark_node || !COMPLETE_TYPE_P (type)) |
1398 | args[i].tree_value = integer_zero_node, type = integer_type_node; |
1399 | else if (promote_p |
1400 | && INTEGRAL_TYPE_P (type) |
1401 | && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)) |
1402 | type = integer_type_node; |
1403 | |
1404 | /* If TYPE is a transparent union or record, pass things the way |
1405 | we would pass the first field of the union or record. We have |
1406 | already verified that the modes are the same. */ |
1407 | if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type)) |
1408 | type = TREE_TYPE (first_field (type)); |
1409 | |
1410 | /* Decide where to pass this arg. |
1411 | |
1412 | args[i].reg is nonzero if all or part is passed in registers. |
1413 | |
1414 | args[i].partial is nonzero if part but not all is passed in registers, |
1415 | and the exact value says how many bytes are passed in registers. |
1416 | |
1417 | args[i].pass_on_stack is true if the argument must at least be |
1418 | computed on the stack. It may then be loaded back into registers |
1419 | if args[i].reg is nonzero. |
1420 | |
1421 | These decisions are driven by the FUNCTION_... macros and must agree |
1422 | with those made by function.cc. */ |
1423 | |
1424 | /* See if this argument should be passed by invisible reference. */ |
1425 | function_arg_info arg (type, argpos < n_named_args); |
1426 | if (pass_by_reference (ca: args_so_far_pnt, arg)) |
1427 | { |
1428 | const bool callee_copies |
1429 | = reference_callee_copied (ca: args_so_far_pnt, arg); |
1430 | tree base; |
1431 | |
1432 | /* If we're compiling a thunk, pass directly the address of an object |
1433 | already in memory, instead of making a copy. Likewise if we want |
1434 | to make the copy in the callee instead of the caller. */ |
1435 | if ((call_from_thunk_p || callee_copies) |
1436 | && TREE_CODE (args[i].tree_value) != WITH_SIZE_EXPR |
1437 | && ((base = get_base_address (t: args[i].tree_value)), true) |
1438 | && TREE_CODE (base) != SSA_NAME |
1439 | && (!DECL_P (base) || MEM_P (DECL_RTL (base)))) |
1440 | { |
1441 | /* We may have turned the parameter value into an SSA name. |
1442 | Go back to the original parameter so we can take the |
1443 | address. */ |
1444 | if (TREE_CODE (args[i].tree_value) == SSA_NAME) |
1445 | { |
1446 | gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value)); |
1447 | args[i].tree_value = SSA_NAME_VAR (args[i].tree_value); |
1448 | gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL); |
1449 | } |
1450 | /* Argument setup code may have copied the value to register. We |
1451 | revert that optimization now because the tail call code must |
1452 | use the original location. */ |
1453 | if (TREE_CODE (args[i].tree_value) == PARM_DECL |
1454 | && !MEM_P (DECL_RTL (args[i].tree_value)) |
1455 | && DECL_INCOMING_RTL (args[i].tree_value) |
1456 | && MEM_P (DECL_INCOMING_RTL (args[i].tree_value))) |
1457 | set_decl_rtl (args[i].tree_value, |
1458 | DECL_INCOMING_RTL (args[i].tree_value)); |
1459 | |
1460 | mark_addressable (args[i].tree_value); |
1461 | |
1462 | /* We can't use sibcalls if a callee-copied argument is |
1463 | stored in the current function's frame. */ |
1464 | if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base)) |
1465 | { |
1466 | *may_tailcall = false; |
1467 | maybe_complain_about_tail_call (call_expr: exp, _("a callee-copied " |
1468 | "argument is stored " |
1469 | "in the current " |
1470 | "function's frame")); |
1471 | } |
1472 | |
1473 | args[i].tree_value = build_fold_addr_expr_loc (loc, |
1474 | args[i].tree_value); |
1475 | type = TREE_TYPE (args[i].tree_value); |
1476 | |
1477 | if (*ecf_flags & ECF_CONST) |
1478 | *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE); |
1479 | } |
1480 | else |
1481 | { |
1482 | /* We make a copy of the object and pass the address to the |
1483 | function being called. */ |
1484 | rtx copy; |
1485 | |
1486 | if (!COMPLETE_TYPE_P (type) |
1487 | || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST |
1488 | || (flag_stack_check == GENERIC_STACK_CHECK |
1489 | && compare_tree_int (TYPE_SIZE_UNIT (type), |
1490 | STACK_CHECK_MAX_VAR_SIZE) > 0)) |
1491 | { |
1492 | /* This is a variable-sized object. Make space on the stack |
1493 | for it. */ |
1494 | rtx size_rtx = expr_size (args[i].tree_value); |
1495 | |
1496 | if (*old_stack_level == 0) |
1497 | { |
1498 | emit_stack_save (SAVE_BLOCK, old_stack_level); |
1499 | *old_pending_adj = pending_stack_adjust; |
1500 | pending_stack_adjust = 0; |
1501 | } |
1502 | |
1503 | /* We can pass TRUE as the 4th argument because we just |
1504 | saved the stack pointer and will restore it right after |
1505 | the call. */ |
1506 | copy = allocate_dynamic_stack_space (size_rtx, |
1507 | TYPE_ALIGN (type), |
1508 | TYPE_ALIGN (type), |
1509 | max_int_size_in_bytes |
1510 | (type), |
1511 | true); |
1512 | copy = gen_rtx_MEM (BLKmode, copy); |
1513 | set_mem_attributes (copy, type, 1); |
1514 | } |
1515 | else |
1516 | copy = assign_temp (type, 1, 0); |
1517 | |
1518 | store_expr (args[i].tree_value, copy, 0, false, false); |
1519 | |
1520 | /* Just change the const function to pure and then let |
1521 | the next test clear the pure based on |
1522 | callee_copies. */ |
1523 | if (*ecf_flags & ECF_CONST) |
1524 | { |
1525 | *ecf_flags &= ~ECF_CONST; |
1526 | *ecf_flags |= ECF_PURE; |
1527 | } |
1528 | |
1529 | if (!callee_copies && *ecf_flags & ECF_PURE) |
1530 | *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE); |
1531 | |
1532 | args[i].tree_value |
1533 | = build_fold_addr_expr_loc (loc, make_tree (type, copy)); |
1534 | type = TREE_TYPE (args[i].tree_value); |
1535 | *may_tailcall = false; |
1536 | maybe_complain_about_tail_call (call_expr: exp, |
1537 | _("argument must be passed" |
1538 | " by copying")); |
1539 | } |
1540 | arg.pass_by_reference = true; |
1541 | } |
1542 | |
1543 | unsignedp = TYPE_UNSIGNED (type); |
1544 | arg.type = type; |
1545 | arg.mode |
1546 | = promote_function_mode (type, TYPE_MODE (type), &unsignedp, |
1547 | fndecl ? TREE_TYPE (fndecl) : fntype, 0); |
1548 | |
1549 | args[i].unsignedp = unsignedp; |
1550 | args[i].mode = arg.mode; |
1551 | |
1552 | targetm.calls.warn_parameter_passing_abi (args_so_far, type); |
1553 | |
1554 | args[i].reg = targetm.calls.function_arg (args_so_far, arg); |
1555 | |
1556 | /* If this is a sibling call and the machine has register windows, the |
1557 | register window has to be unwinded before calling the routine, so |
1558 | arguments have to go into the incoming registers. */ |
1559 | if (targetm.calls.function_incoming_arg != targetm.calls.function_arg) |
1560 | args[i].tail_call_reg |
1561 | = targetm.calls.function_incoming_arg (args_so_far, arg); |
1562 | else |
1563 | args[i].tail_call_reg = args[i].reg; |
1564 | |
1565 | if (args[i].reg) |
1566 | args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg); |
1567 | |
1568 | args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg); |
1569 | |
1570 | /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]), |
1571 | it means that we are to pass this arg in the register(s) designated |
1572 | by the PARALLEL, but also to pass it in the stack. */ |
1573 | if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL |
1574 | && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0) |
1575 | args[i].pass_on_stack = true; |
1576 | |
1577 | /* If this is an addressable type, we must preallocate the stack |
1578 | since we must evaluate the object into its final location. |
1579 | |
1580 | If this is to be passed in both registers and the stack, it is simpler |
1581 | to preallocate. */ |
1582 | if (TREE_ADDRESSABLE (type) |
1583 | || (args[i].pass_on_stack && args[i].reg != 0)) |
1584 | *must_preallocate = true; |
1585 | |
1586 | /* Compute the stack-size of this argument. */ |
1587 | if (args[i].reg == 0 || args[i].partial != 0 |
1588 | || reg_parm_stack_space > 0 |
1589 | || args[i].pass_on_stack) |
1590 | locate_and_pad_parm (arg.mode, type, |
1591 | #ifdef STACK_PARMS_IN_REG_PARM_AREA |
1592 | 1, |
1593 | #else |
1594 | args[i].reg != 0, |
1595 | #endif |
1596 | reg_parm_stack_space, |
1597 | args[i].pass_on_stack ? 0 : args[i].partial, |
1598 | fndecl, args_size, &args[i].locate); |
1599 | #ifdef BLOCK_REG_PADDING |
1600 | else |
1601 | /* The argument is passed entirely in registers. See at which |
1602 | end it should be padded. */ |
1603 | args[i].locate.where_pad = |
1604 | BLOCK_REG_PADDING (arg.mode, type, |
1605 | int_size_in_bytes (type) <= UNITS_PER_WORD); |
1606 | #endif |
1607 | |
1608 | /* Update ARGS_SIZE, the total stack space for args so far. */ |
1609 | |
1610 | args_size->constant += args[i].locate.size.constant; |
1611 | if (args[i].locate.size.var) |
1612 | ADD_PARM_SIZE (*args_size, args[i].locate.size.var); |
1613 | |
1614 | /* Increment ARGS_SO_FAR, which has info about which arg-registers |
1615 | have been used, etc. */ |
1616 | |
1617 | /* ??? Traditionally we've passed TYPE_MODE here, instead of the |
1618 | promoted_mode used for function_arg above. However, the |
1619 | corresponding handling of incoming arguments in function.cc |
1620 | does pass the promoted mode. */ |
1621 | arg.mode = TYPE_MODE (type); |
1622 | targetm.calls.function_arg_advance (args_so_far, arg); |
1623 | } |
1624 | } |
1625 | |
1626 | /* Update ARGS_SIZE to contain the total size for the argument block. |
1627 | Return the original constant component of the argument block's size. |
1628 | |
1629 | REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved |
1630 | for arguments passed in registers. */ |
1631 | |
1632 | static poly_int64 |
1633 | compute_argument_block_size (int reg_parm_stack_space, |
1634 | struct args_size *args_size, |
1635 | tree fndecl ATTRIBUTE_UNUSED, |
1636 | tree fntype ATTRIBUTE_UNUSED, |
1637 | int preferred_stack_boundary ATTRIBUTE_UNUSED) |
1638 | { |
1639 | poly_int64 unadjusted_args_size = args_size->constant; |
1640 | |
1641 | /* For accumulate outgoing args mode we don't need to align, since the frame |
1642 | will be already aligned. Align to STACK_BOUNDARY in order to prevent |
1643 | backends from generating misaligned frame sizes. */ |
1644 | if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY) |
1645 | preferred_stack_boundary = STACK_BOUNDARY; |
1646 | |
1647 | /* Compute the actual size of the argument block required. The variable |
1648 | and constant sizes must be combined, the size may have to be rounded, |
1649 | and there may be a minimum required size. */ |
1650 | |
1651 | if (args_size->var) |
1652 | { |
1653 | args_size->var = ARGS_SIZE_TREE (*args_size); |
1654 | args_size->constant = 0; |
1655 | |
1656 | preferred_stack_boundary /= BITS_PER_UNIT; |
1657 | if (preferred_stack_boundary > 1) |
1658 | { |
1659 | /* We don't handle this case yet. To handle it correctly we have |
1660 | to add the delta, round and subtract the delta. |
1661 | Currently no machine description requires this support. */ |
1662 | gcc_assert (multiple_p (stack_pointer_delta, |
1663 | preferred_stack_boundary)); |
1664 | args_size->var = round_up (args_size->var, preferred_stack_boundary); |
1665 | } |
1666 | |
1667 | if (reg_parm_stack_space > 0) |
1668 | { |
1669 | args_size->var |
1670 | = size_binop (MAX_EXPR, args_size->var, |
1671 | ssize_int (reg_parm_stack_space)); |
1672 | |
1673 | /* The area corresponding to register parameters is not to count in |
1674 | the size of the block we need. So make the adjustment. */ |
1675 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
1676 | args_size->var |
1677 | = size_binop (MINUS_EXPR, args_size->var, |
1678 | ssize_int (reg_parm_stack_space)); |
1679 | } |
1680 | } |
1681 | else |
1682 | { |
1683 | preferred_stack_boundary /= BITS_PER_UNIT; |
1684 | if (preferred_stack_boundary < 1) |
1685 | preferred_stack_boundary = 1; |
1686 | args_size->constant = (aligned_upper_bound (value: args_size->constant |
1687 | + stack_pointer_delta, |
1688 | align: preferred_stack_boundary) |
1689 | - stack_pointer_delta); |
1690 | |
1691 | args_size->constant = upper_bound (a: args_size->constant, |
1692 | b: reg_parm_stack_space); |
1693 | |
1694 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
1695 | args_size->constant -= reg_parm_stack_space; |
1696 | } |
1697 | return unadjusted_args_size; |
1698 | } |
1699 | |
1700 | /* Precompute parameters as needed for a function call. |
1701 | |
1702 | FLAGS is mask of ECF_* constants. |
1703 | |
1704 | NUM_ACTUALS is the number of arguments. |
1705 | |
1706 | ARGS is an array containing information for each argument; this |
1707 | routine fills in the INITIAL_VALUE and VALUE fields for each |
1708 | precomputed argument. */ |
1709 | |
1710 | static void |
1711 | precompute_arguments (int num_actuals, struct arg_data *args) |
1712 | { |
1713 | int i; |
1714 | |
1715 | /* If this is a libcall, then precompute all arguments so that we do not |
1716 | get extraneous instructions emitted as part of the libcall sequence. */ |
1717 | |
1718 | /* If we preallocated the stack space, and some arguments must be passed |
1719 | on the stack, then we must precompute any parameter which contains a |
1720 | function call which will store arguments on the stack. |
1721 | Otherwise, evaluating the parameter may clobber previous parameters |
1722 | which have already been stored into the stack. (we have code to avoid |
1723 | such case by saving the outgoing stack arguments, but it results in |
1724 | worse code) */ |
1725 | if (!ACCUMULATE_OUTGOING_ARGS) |
1726 | return; |
1727 | |
1728 | for (i = 0; i < num_actuals; i++) |
1729 | { |
1730 | tree type; |
1731 | machine_mode mode; |
1732 | |
1733 | if (TREE_CODE (args[i].tree_value) != CALL_EXPR) |
1734 | continue; |
1735 | |
1736 | /* If this is an addressable type, we cannot pre-evaluate it. */ |
1737 | type = TREE_TYPE (args[i].tree_value); |
1738 | gcc_assert (!TREE_ADDRESSABLE (type)); |
1739 | |
1740 | args[i].initial_value = args[i].value |
1741 | = expand_normal (exp: args[i].tree_value); |
1742 | |
1743 | mode = TYPE_MODE (type); |
1744 | if (mode != args[i].mode) |
1745 | { |
1746 | int unsignedp = args[i].unsignedp; |
1747 | args[i].value |
1748 | = convert_modes (mode: args[i].mode, oldmode: mode, |
1749 | x: args[i].value, unsignedp: args[i].unsignedp); |
1750 | |
1751 | /* CSE will replace this only if it contains args[i].value |
1752 | pseudo, so convert it down to the declared mode using |
1753 | a SUBREG. */ |
1754 | if (REG_P (args[i].value) |
1755 | && GET_MODE_CLASS (args[i].mode) == MODE_INT |
1756 | && promote_mode (type, mode, &unsignedp) != args[i].mode) |
1757 | { |
1758 | args[i].initial_value |
1759 | = gen_lowpart_SUBREG (mode, args[i].value); |
1760 | SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1; |
1761 | SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp); |
1762 | } |
1763 | } |
1764 | } |
1765 | } |
1766 | |
1767 | /* Given the current state of MUST_PREALLOCATE and information about |
1768 | arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE, |
1769 | compute and return the final value for MUST_PREALLOCATE. */ |
1770 | |
1771 | static bool |
1772 | finalize_must_preallocate (bool must_preallocate, int num_actuals, |
1773 | struct arg_data *args, struct args_size *args_size) |
1774 | { |
1775 | /* See if we have or want to preallocate stack space. |
1776 | |
1777 | If we would have to push a partially-in-regs parm |
1778 | before other stack parms, preallocate stack space instead. |
1779 | |
1780 | If the size of some parm is not a multiple of the required stack |
1781 | alignment, we must preallocate. |
1782 | |
1783 | If the total size of arguments that would otherwise create a copy in |
1784 | a temporary (such as a CALL) is more than half the total argument list |
1785 | size, preallocation is faster. |
1786 | |
1787 | Another reason to preallocate is if we have a machine (like the m88k) |
1788 | where stack alignment is required to be maintained between every |
1789 | pair of insns, not just when the call is made. However, we assume here |
1790 | that such machines either do not have push insns (and hence preallocation |
1791 | would occur anyway) or the problem is taken care of with |
1792 | PUSH_ROUNDING. */ |
1793 | |
1794 | if (! must_preallocate) |
1795 | { |
1796 | bool partial_seen = false; |
1797 | poly_int64 copy_to_evaluate_size = 0; |
1798 | int i; |
1799 | |
1800 | for (i = 0; i < num_actuals && ! must_preallocate; i++) |
1801 | { |
1802 | if (args[i].partial > 0 && ! args[i].pass_on_stack) |
1803 | partial_seen = true; |
1804 | else if (partial_seen && args[i].reg == 0) |
1805 | must_preallocate = true; |
1806 | |
1807 | if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode |
1808 | && (TREE_CODE (args[i].tree_value) == CALL_EXPR |
1809 | || TREE_CODE (args[i].tree_value) == TARGET_EXPR |
1810 | || TREE_CODE (args[i].tree_value) == COND_EXPR |
1811 | || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))) |
1812 | copy_to_evaluate_size |
1813 | += int_size_in_bytes (TREE_TYPE (args[i].tree_value)); |
1814 | } |
1815 | |
1816 | if (maybe_ne (a: args_size->constant, b: 0) |
1817 | && maybe_ge (copy_to_evaluate_size * 2, args_size->constant)) |
1818 | must_preallocate = true; |
1819 | } |
1820 | return must_preallocate; |
1821 | } |
1822 | |
1823 | /* If we preallocated stack space, compute the address of each argument |
1824 | and store it into the ARGS array. |
1825 | |
1826 | We need not ensure it is a valid memory address here; it will be |
1827 | validized when it is used. |
1828 | |
1829 | ARGBLOCK is an rtx for the address of the outgoing arguments. */ |
1830 | |
1831 | static void |
1832 | compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals) |
1833 | { |
1834 | if (argblock) |
1835 | { |
1836 | rtx arg_reg = argblock; |
1837 | int i; |
1838 | poly_int64 arg_offset = 0; |
1839 | |
1840 | if (GET_CODE (argblock) == PLUS) |
1841 | { |
1842 | arg_reg = XEXP (argblock, 0); |
1843 | arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1)); |
1844 | } |
1845 | |
1846 | for (i = 0; i < num_actuals; i++) |
1847 | { |
1848 | rtx offset = ARGS_SIZE_RTX (args[i].locate.offset); |
1849 | rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset); |
1850 | rtx addr; |
1851 | unsigned int align, boundary; |
1852 | poly_uint64 units_on_stack = 0; |
1853 | machine_mode partial_mode = VOIDmode; |
1854 | |
1855 | /* Skip this parm if it will not be passed on the stack. */ |
1856 | if (! args[i].pass_on_stack |
1857 | && args[i].reg != 0 |
1858 | && args[i].partial == 0) |
1859 | continue; |
1860 | |
1861 | if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value))) |
1862 | continue; |
1863 | |
1864 | addr = simplify_gen_binary (code: PLUS, Pmode, op0: arg_reg, op1: offset); |
1865 | addr = plus_constant (Pmode, addr, arg_offset); |
1866 | |
1867 | if (args[i].partial != 0) |
1868 | { |
1869 | /* Only part of the parameter is being passed on the stack. |
1870 | Generate a simple memory reference of the correct size. */ |
1871 | units_on_stack = args[i].locate.size.constant; |
1872 | poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT; |
1873 | partial_mode = int_mode_for_size (size: bits_on_stack, limit: 1).else_blk (); |
1874 | args[i].stack = gen_rtx_MEM (partial_mode, addr); |
1875 | set_mem_size (args[i].stack, units_on_stack); |
1876 | } |
1877 | else |
1878 | { |
1879 | args[i].stack = gen_rtx_MEM (args[i].mode, addr); |
1880 | set_mem_attributes (args[i].stack, |
1881 | TREE_TYPE (args[i].tree_value), 1); |
1882 | } |
1883 | align = BITS_PER_UNIT; |
1884 | boundary = args[i].locate.boundary; |
1885 | poly_int64 offset_val; |
1886 | if (args[i].locate.where_pad != PAD_DOWNWARD) |
1887 | align = boundary; |
1888 | else if (poly_int_rtx_p (x: offset, res: &offset_val)) |
1889 | { |
1890 | align = least_bit_hwi (x: boundary); |
1891 | unsigned int offset_align |
1892 | = known_alignment (a: offset_val) * BITS_PER_UNIT; |
1893 | if (offset_align != 0) |
1894 | align = MIN (align, offset_align); |
1895 | } |
1896 | set_mem_align (args[i].stack, align); |
1897 | |
1898 | addr = simplify_gen_binary (code: PLUS, Pmode, op0: arg_reg, op1: slot_offset); |
1899 | addr = plus_constant (Pmode, addr, arg_offset); |
1900 | |
1901 | if (args[i].partial != 0) |
1902 | { |
1903 | /* Only part of the parameter is being passed on the stack. |
1904 | Generate a simple memory reference of the correct size. |
1905 | */ |
1906 | args[i].stack_slot = gen_rtx_MEM (partial_mode, addr); |
1907 | set_mem_size (args[i].stack_slot, units_on_stack); |
1908 | } |
1909 | else |
1910 | { |
1911 | args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr); |
1912 | set_mem_attributes (args[i].stack_slot, |
1913 | TREE_TYPE (args[i].tree_value), 1); |
1914 | } |
1915 | set_mem_align (args[i].stack_slot, args[i].locate.boundary); |
1916 | |
1917 | /* Function incoming arguments may overlap with sibling call |
1918 | outgoing arguments and we cannot allow reordering of reads |
1919 | from function arguments with stores to outgoing arguments |
1920 | of sibling calls. */ |
1921 | set_mem_alias_set (args[i].stack, 0); |
1922 | set_mem_alias_set (args[i].stack_slot, 0); |
1923 | } |
1924 | } |
1925 | } |
1926 | |
1927 | /* Given a FNDECL and EXP, return an rtx suitable for use as a target address |
1928 | in a call instruction. |
1929 | |
1930 | FNDECL is the tree node for the target function. For an indirect call |
1931 | FNDECL will be NULL_TREE. |
1932 | |
1933 | ADDR is the operand 0 of CALL_EXPR for this call. */ |
1934 | |
1935 | static rtx |
1936 | rtx_for_function_call (tree fndecl, tree addr) |
1937 | { |
1938 | rtx funexp; |
1939 | |
1940 | /* Get the function to call, in the form of RTL. */ |
1941 | if (fndecl) |
1942 | { |
1943 | if (!TREE_USED (fndecl) && fndecl != current_function_decl) |
1944 | TREE_USED (fndecl) = 1; |
1945 | |
1946 | /* Get a SYMBOL_REF rtx for the function address. */ |
1947 | funexp = XEXP (DECL_RTL (fndecl), 0); |
1948 | } |
1949 | else |
1950 | /* Generate an rtx (probably a pseudo-register) for the address. */ |
1951 | { |
1952 | push_temp_slots (); |
1953 | funexp = expand_normal (exp: addr); |
1954 | pop_temp_slots (); /* FUNEXP can't be BLKmode. */ |
1955 | } |
1956 | return funexp; |
1957 | } |
1958 | |
1959 | /* Return the static chain for this function, if any. */ |
1960 | |
1961 | rtx |
1962 | rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p) |
1963 | { |
1964 | if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type)) |
1965 | return NULL; |
1966 | |
1967 | return targetm.calls.static_chain (fndecl_or_type, incoming_p); |
1968 | } |
1969 | |
1970 | /* Internal state for internal_arg_pointer_based_exp and its helpers. */ |
1971 | static struct |
1972 | { |
1973 | /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan, |
1974 | or NULL_RTX if none has been scanned yet. */ |
1975 | rtx_insn *scan_start; |
1976 | /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is |
1977 | based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the |
1978 | pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it |
1979 | with fixed offset, or PC if this is with variable or unknown offset. */ |
1980 | vec<rtx> cache; |
1981 | } internal_arg_pointer_exp_state; |
1982 | |
1983 | static rtx internal_arg_pointer_based_exp (const_rtx, bool); |
1984 | |
1985 | /* Helper function for internal_arg_pointer_based_exp. Scan insns in |
1986 | the tail call sequence, starting with first insn that hasn't been |
1987 | scanned yet, and note for each pseudo on the LHS whether it is based |
1988 | on crtl->args.internal_arg_pointer or not, and what offset from that |
1989 | that pointer it has. */ |
1990 | |
1991 | static void |
1992 | internal_arg_pointer_based_exp_scan (void) |
1993 | { |
1994 | rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start; |
1995 | |
1996 | if (scan_start == NULL_RTX) |
1997 | insn = get_insns (); |
1998 | else |
1999 | insn = NEXT_INSN (insn: scan_start); |
2000 | |
2001 | while (insn) |
2002 | { |
2003 | rtx set = single_set (insn); |
2004 | if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set))) |
2005 | { |
2006 | rtx val = NULL_RTX; |
2007 | unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER; |
2008 | /* Punt on pseudos set multiple times. */ |
2009 | if (idx < internal_arg_pointer_exp_state.cache.length () |
2010 | && (internal_arg_pointer_exp_state.cache[idx] |
2011 | != NULL_RTX)) |
2012 | val = pc_rtx; |
2013 | else |
2014 | val = internal_arg_pointer_based_exp (SET_SRC (set), false); |
2015 | if (val != NULL_RTX) |
2016 | { |
2017 | if (idx >= internal_arg_pointer_exp_state.cache.length ()) |
2018 | internal_arg_pointer_exp_state.cache |
2019 | .safe_grow_cleared (len: idx + 1, exact: true); |
2020 | internal_arg_pointer_exp_state.cache[idx] = val; |
2021 | } |
2022 | } |
2023 | if (NEXT_INSN (insn) == NULL_RTX) |
2024 | scan_start = insn; |
2025 | insn = NEXT_INSN (insn); |
2026 | } |
2027 | |
2028 | internal_arg_pointer_exp_state.scan_start = scan_start; |
2029 | } |
2030 | |
2031 | /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return |
2032 | NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on |
2033 | it with fixed offset, or PC if this is with variable or unknown offset. |
2034 | TOPLEVEL is true if the function is invoked at the topmost level. */ |
2035 | |
2036 | static rtx |
2037 | internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel) |
2038 | { |
2039 | if (CONSTANT_P (rtl)) |
2040 | return NULL_RTX; |
2041 | |
2042 | if (rtl == crtl->args.internal_arg_pointer) |
2043 | return const0_rtx; |
2044 | |
2045 | if (REG_P (rtl) && HARD_REGISTER_P (rtl)) |
2046 | return NULL_RTX; |
2047 | |
2048 | poly_int64 offset; |
2049 | if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), res: &offset)) |
2050 | { |
2051 | rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel); |
2052 | if (val == NULL_RTX || val == pc_rtx) |
2053 | return val; |
2054 | return plus_constant (Pmode, val, offset); |
2055 | } |
2056 | |
2057 | /* When called at the topmost level, scan pseudo assignments in between the |
2058 | last scanned instruction in the tail call sequence and the latest insn |
2059 | in that sequence. */ |
2060 | if (toplevel) |
2061 | internal_arg_pointer_based_exp_scan (); |
2062 | |
2063 | if (REG_P (rtl)) |
2064 | { |
2065 | unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER; |
2066 | if (idx < internal_arg_pointer_exp_state.cache.length ()) |
2067 | return internal_arg_pointer_exp_state.cache[idx]; |
2068 | |
2069 | return NULL_RTX; |
2070 | } |
2071 | |
2072 | subrtx_iterator::array_type array; |
2073 | FOR_EACH_SUBRTX (iter, array, rtl, NONCONST) |
2074 | { |
2075 | const_rtx x = *iter; |
2076 | if (REG_P (x) && internal_arg_pointer_based_exp (rtl: x, toplevel: false) != NULL_RTX) |
2077 | return pc_rtx; |
2078 | if (MEM_P (x)) |
2079 | iter.skip_subrtxes (); |
2080 | } |
2081 | |
2082 | return NULL_RTX; |
2083 | } |
2084 | |
2085 | /* Return true if SIZE bytes starting from address ADDR might overlap an |
2086 | already-clobbered argument area. This function is used to determine |
2087 | if we should give up a sibcall. */ |
2088 | |
2089 | static bool |
2090 | mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size) |
2091 | { |
2092 | poly_int64 i; |
2093 | unsigned HOST_WIDE_INT start, end; |
2094 | rtx val; |
2095 | |
2096 | if (bitmap_empty_p (stored_args_map) |
2097 | && stored_args_watermark == HOST_WIDE_INT_M1U) |
2098 | return false; |
2099 | val = internal_arg_pointer_based_exp (rtl: addr, toplevel: true); |
2100 | if (val == NULL_RTX) |
2101 | return false; |
2102 | else if (!poly_int_rtx_p (x: val, res: &i)) |
2103 | return true; |
2104 | |
2105 | if (known_eq (size, 0U)) |
2106 | return false; |
2107 | |
2108 | if (STACK_GROWS_DOWNWARD) |
2109 | i -= crtl->args.pretend_args_size; |
2110 | else |
2111 | i += crtl->args.pretend_args_size; |
2112 | |
2113 | if (ARGS_GROW_DOWNWARD) |
2114 | i = -i - size; |
2115 | |
2116 | /* We can ignore any references to the function's pretend args, |
2117 | which at this point would manifest as negative values of I. */ |
2118 | if (known_le (i, 0) && known_le (size, poly_uint64 (-i))) |
2119 | return false; |
2120 | |
2121 | start = maybe_lt (a: i, b: 0) ? 0 : constant_lower_bound (a: i); |
2122 | if (!(i + size).is_constant (const_value: &end)) |
2123 | end = HOST_WIDE_INT_M1U; |
2124 | |
2125 | if (end > stored_args_watermark) |
2126 | return true; |
2127 | |
2128 | end = MIN (end, SBITMAP_SIZE (stored_args_map)); |
2129 | for (unsigned HOST_WIDE_INT k = start; k < end; ++k) |
2130 | if (bitmap_bit_p (map: stored_args_map, bitno: k)) |
2131 | return true; |
2132 | |
2133 | return false; |
2134 | } |
2135 | |
2136 | /* Do the register loads required for any wholly-register parms or any |
2137 | parms which are passed both on the stack and in a register. Their |
2138 | expressions were already evaluated. |
2139 | |
2140 | Mark all register-parms as living through the call, putting these USE |
2141 | insns in the CALL_INSN_FUNCTION_USAGE field. |
2142 | |
2143 | When IS_SIBCALL, perform the check_sibcall_argument_overlap |
2144 | checking, setting *SIBCALL_FAILURE if appropriate. */ |
2145 | |
2146 | static void |
2147 | load_register_parameters (struct arg_data *args, int num_actuals, |
2148 | rtx *call_fusage, int flags, int is_sibcall, |
2149 | bool *sibcall_failure) |
2150 | { |
2151 | int i, j; |
2152 | |
2153 | for (i = 0; i < num_actuals; i++) |
2154 | { |
2155 | rtx reg = ((flags & ECF_SIBCALL) |
2156 | ? args[i].tail_call_reg : args[i].reg); |
2157 | if (reg) |
2158 | { |
2159 | int partial = args[i].partial; |
2160 | int nregs; |
2161 | poly_int64 size = 0; |
2162 | HOST_WIDE_INT const_size = 0; |
2163 | rtx_insn *before_arg = get_last_insn (); |
2164 | tree tree_value = args[i].tree_value; |
2165 | tree type = TREE_TYPE (tree_value); |
2166 | if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type)) |
2167 | type = TREE_TYPE (first_field (type)); |
2168 | /* Set non-negative if we must move a word at a time, even if |
2169 | just one word (e.g, partial == 4 && mode == DFmode). Set |
2170 | to -1 if we just use a normal move insn. This value can be |
2171 | zero if the argument is a zero size structure. */ |
2172 | nregs = -1; |
2173 | if (GET_CODE (reg) == PARALLEL) |
2174 | ; |
2175 | else if (partial) |
2176 | { |
2177 | gcc_assert (partial % UNITS_PER_WORD == 0); |
2178 | nregs = partial / UNITS_PER_WORD; |
2179 | } |
2180 | else if (TYPE_MODE (type) == BLKmode) |
2181 | { |
2182 | /* Variable-sized parameters should be described by a |
2183 | PARALLEL instead. */ |
2184 | const_size = int_size_in_bytes (type); |
2185 | gcc_assert (const_size >= 0); |
2186 | nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; |
2187 | size = const_size; |
2188 | } |
2189 | else |
2190 | size = GET_MODE_SIZE (mode: args[i].mode); |
2191 | |
2192 | /* Handle calls that pass values in multiple non-contiguous |
2193 | locations. The Irix 6 ABI has examples of this. */ |
2194 | |
2195 | if (GET_CODE (reg) == PARALLEL) |
2196 | emit_group_move (reg, args[i].parallel_value); |
2197 | |
2198 | /* If simple case, just do move. If normal partial, store_one_arg |
2199 | has already loaded the register for us. In all other cases, |
2200 | load the register(s) from memory. */ |
2201 | |
2202 | else if (nregs == -1) |
2203 | { |
2204 | emit_move_insn (reg, args[i].value); |
2205 | #ifdef BLOCK_REG_PADDING |
2206 | /* Handle case where we have a value that needs shifting |
2207 | up to the msb. eg. a QImode value and we're padding |
2208 | upward on a BYTES_BIG_ENDIAN machine. */ |
2209 | if (args[i].locate.where_pad |
2210 | == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)) |
2211 | { |
2212 | gcc_checking_assert (ordered_p (size, UNITS_PER_WORD)); |
2213 | if (maybe_lt (size, UNITS_PER_WORD)) |
2214 | { |
2215 | rtx x; |
2216 | poly_int64 shift |
2217 | = (UNITS_PER_WORD - size) * BITS_PER_UNIT; |
2218 | |
2219 | /* Assigning REG here rather than a temp makes |
2220 | CALL_FUSAGE report the whole reg as used. |
2221 | Strictly speaking, the call only uses SIZE |
2222 | bytes at the msb end, but it doesn't seem worth |
2223 | generating rtl to say that. */ |
2224 | reg = gen_rtx_REG (word_mode, REGNO (reg)); |
2225 | x = expand_shift (LSHIFT_EXPR, word_mode, |
2226 | reg, shift, reg, 1); |
2227 | if (x != reg) |
2228 | emit_move_insn (reg, x); |
2229 | } |
2230 | } |
2231 | #endif |
2232 | } |
2233 | |
2234 | /* If we have pre-computed the values to put in the registers in |
2235 | the case of non-aligned structures, copy them in now. */ |
2236 | |
2237 | else if (args[i].n_aligned_regs != 0) |
2238 | for (j = 0; j < args[i].n_aligned_regs; j++) |
2239 | emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j), |
2240 | args[i].aligned_regs[j]); |
2241 | |
2242 | /* If we need a single register and the source is a constant |
2243 | VAR_DECL with a simple constructor, expand that constructor |
2244 | via a pseudo rather than read from (possibly misaligned) |
2245 | memory. PR middle-end/95126. */ |
2246 | else if (nregs == 1 |
2247 | && partial == 0 |
2248 | && !args[i].pass_on_stack |
2249 | && VAR_P (tree_value) |
2250 | && TREE_READONLY (tree_value) |
2251 | && !TREE_SIDE_EFFECTS (tree_value) |
2252 | && immediate_const_ctor_p (DECL_INITIAL (tree_value))) |
2253 | { |
2254 | rtx target = gen_reg_rtx (word_mode); |
2255 | store_constructor (DECL_INITIAL (tree_value), target, 0, |
2256 | int_expr_size (DECL_INITIAL (tree_value)), |
2257 | false); |
2258 | reg = gen_rtx_REG (word_mode, REGNO (reg)); |
2259 | emit_move_insn (reg, target); |
2260 | } |
2261 | else if (partial == 0 || args[i].pass_on_stack) |
2262 | { |
2263 | /* SIZE and CONST_SIZE are 0 for partial arguments and |
2264 | the size of a BLKmode type otherwise. */ |
2265 | gcc_checking_assert (known_eq (size, const_size)); |
2266 | rtx mem = validize_mem (copy_rtx (args[i].value)); |
2267 | |
2268 | /* Check for overlap with already clobbered argument area, |
2269 | providing that this has non-zero size. */ |
2270 | if (is_sibcall |
2271 | && const_size != 0 |
2272 | && (mem_might_overlap_already_clobbered_arg_p |
2273 | (XEXP (args[i].value, 0), size: const_size))) |
2274 | *sibcall_failure = true; |
2275 | |
2276 | if (const_size % UNITS_PER_WORD == 0 |
2277 | || MEM_ALIGN (mem) % BITS_PER_WORD == 0) |
2278 | move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode); |
2279 | else |
2280 | { |
2281 | if (nregs > 1) |
2282 | move_block_to_reg (REGNO (reg), mem, nregs - 1, |
2283 | args[i].mode); |
2284 | rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1); |
2285 | unsigned int bitoff = (nregs - 1) * BITS_PER_WORD; |
2286 | unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff; |
2287 | rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest, |
2288 | word_mode, word_mode, false, |
2289 | NULL); |
2290 | if (BYTES_BIG_ENDIAN) |
2291 | x = expand_shift (LSHIFT_EXPR, word_mode, x, |
2292 | BITS_PER_WORD - bitsize, dest, 1); |
2293 | if (x != dest) |
2294 | emit_move_insn (dest, x); |
2295 | } |
2296 | |
2297 | /* Handle a BLKmode that needs shifting. */ |
2298 | if (nregs == 1 && const_size < UNITS_PER_WORD |
2299 | #ifdef BLOCK_REG_PADDING |
2300 | && args[i].locate.where_pad == PAD_DOWNWARD |
2301 | #else |
2302 | && BYTES_BIG_ENDIAN |
2303 | #endif |
2304 | ) |
2305 | { |
2306 | rtx dest = gen_rtx_REG (word_mode, REGNO (reg)); |
2307 | int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT; |
2308 | enum tree_code dir = (BYTES_BIG_ENDIAN |
2309 | ? RSHIFT_EXPR : LSHIFT_EXPR); |
2310 | rtx x; |
2311 | |
2312 | x = expand_shift (dir, word_mode, dest, shift, dest, 1); |
2313 | if (x != dest) |
2314 | emit_move_insn (dest, x); |
2315 | } |
2316 | } |
2317 | |
2318 | /* When a parameter is a block, and perhaps in other cases, it is |
2319 | possible that it did a load from an argument slot that was |
2320 | already clobbered. */ |
2321 | if (is_sibcall |
2322 | && check_sibcall_argument_overlap (before_arg, &args[i], false)) |
2323 | *sibcall_failure = true; |
2324 | |
2325 | /* Handle calls that pass values in multiple non-contiguous |
2326 | locations. The Irix 6 ABI has examples of this. */ |
2327 | if (GET_CODE (reg) == PARALLEL) |
2328 | use_group_regs (call_fusage, reg); |
2329 | else if (nregs == -1) |
2330 | use_reg_mode (call_fusage, reg, TYPE_MODE (type)); |
2331 | else if (nregs > 0) |
2332 | use_regs (call_fusage, REGNO (reg), nregs); |
2333 | } |
2334 | } |
2335 | } |
2336 | |
2337 | /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments |
2338 | wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY |
2339 | bytes, then we would need to push some additional bytes to pad the |
2340 | arguments. So, we try to compute an adjust to the stack pointer for an |
2341 | amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE |
2342 | bytes. Then, when the arguments are pushed the stack will be perfectly |
2343 | aligned. |
2344 | |
2345 | Return true if this optimization is possible, storing the adjustment |
2346 | in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of |
2347 | bytes that should be popped after the call. */ |
2348 | |
2349 | static bool |
2350 | combine_pending_stack_adjustment_and_call (poly_int64 *adjustment_out, |
2351 | poly_int64 unadjusted_args_size, |
2352 | struct args_size *args_size, |
2353 | unsigned int preferred_unit_stack_boundary) |
2354 | { |
2355 | /* The number of bytes to pop so that the stack will be |
2356 | under-aligned by UNADJUSTED_ARGS_SIZE bytes. */ |
2357 | poly_int64 adjustment; |
2358 | /* The alignment of the stack after the arguments are pushed, if we |
2359 | just pushed the arguments without adjust the stack here. */ |
2360 | unsigned HOST_WIDE_INT unadjusted_alignment; |
2361 | |
2362 | if (!known_misalignment (stack_pointer_delta + unadjusted_args_size, |
2363 | align: preferred_unit_stack_boundary, |
2364 | misalign: &unadjusted_alignment)) |
2365 | return false; |
2366 | |
2367 | /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes |
2368 | as possible -- leaving just enough left to cancel out the |
2369 | UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the |
2370 | PENDING_STACK_ADJUST is non-negative, and congruent to |
2371 | -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */ |
2372 | |
2373 | /* Begin by trying to pop all the bytes. */ |
2374 | unsigned HOST_WIDE_INT tmp_misalignment; |
2375 | if (!known_misalignment (pending_stack_adjust, |
2376 | align: preferred_unit_stack_boundary, |
2377 | misalign: &tmp_misalignment)) |
2378 | return false; |
2379 | unadjusted_alignment -= tmp_misalignment; |
2380 | adjustment = pending_stack_adjust; |
2381 | /* Push enough additional bytes that the stack will be aligned |
2382 | after the arguments are pushed. */ |
2383 | if (preferred_unit_stack_boundary > 1 && unadjusted_alignment) |
2384 | adjustment -= preferred_unit_stack_boundary - unadjusted_alignment; |
2385 | |
2386 | /* We need to know whether the adjusted argument size |
2387 | (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation |
2388 | or a deallocation. */ |
2389 | if (!ordered_p (a: adjustment, b: unadjusted_args_size)) |
2390 | return false; |
2391 | |
2392 | /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of |
2393 | bytes after the call. The right number is the entire |
2394 | PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required |
2395 | by the arguments in the first place. */ |
2396 | args_size->constant |
2397 | = pending_stack_adjust - adjustment + unadjusted_args_size; |
2398 | |
2399 | *adjustment_out = adjustment; |
2400 | return true; |
2401 | } |
2402 | |
2403 | /* Scan X expression if it does not dereference any argument slots |
2404 | we already clobbered by tail call arguments (as noted in stored_args_map |
2405 | bitmap). |
2406 | Return true if X expression dereferences such argument slots, |
2407 | false otherwise. */ |
2408 | |
2409 | static bool |
2410 | check_sibcall_argument_overlap_1 (rtx x) |
2411 | { |
2412 | RTX_CODE code; |
2413 | int i, j; |
2414 | const char *fmt; |
2415 | |
2416 | if (x == NULL_RTX) |
2417 | return false; |
2418 | |
2419 | code = GET_CODE (x); |
2420 | |
2421 | /* We need not check the operands of the CALL expression itself. */ |
2422 | if (code == CALL) |
2423 | return false; |
2424 | |
2425 | if (code == MEM) |
2426 | return (mem_might_overlap_already_clobbered_arg_p |
2427 | (XEXP (x, 0), size: GET_MODE_SIZE (GET_MODE (x)))); |
2428 | |
2429 | /* Scan all subexpressions. */ |
2430 | fmt = GET_RTX_FORMAT (code); |
2431 | for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) |
2432 | { |
2433 | if (*fmt == 'e') |
2434 | { |
2435 | if (check_sibcall_argument_overlap_1 (XEXP (x, i))) |
2436 | return true; |
2437 | } |
2438 | else if (*fmt == 'E') |
2439 | { |
2440 | for (j = 0; j < XVECLEN (x, i); j++) |
2441 | if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j))) |
2442 | return true; |
2443 | } |
2444 | } |
2445 | return false; |
2446 | } |
2447 | |
2448 | /* Scan sequence after INSN if it does not dereference any argument slots |
2449 | we already clobbered by tail call arguments (as noted in stored_args_map |
2450 | bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to |
2451 | stored_args_map bitmap afterwards (when ARG is a register |
2452 | MARK_STORED_ARGS_MAP should be false). Return true if sequence after |
2453 | INSN dereferences such argument slots, false otherwise. */ |
2454 | |
2455 | static bool |
2456 | check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg, |
2457 | bool mark_stored_args_map) |
2458 | { |
2459 | poly_uint64 low, high; |
2460 | unsigned HOST_WIDE_INT const_low, const_high; |
2461 | |
2462 | if (insn == NULL_RTX) |
2463 | insn = get_insns (); |
2464 | else |
2465 | insn = NEXT_INSN (insn); |
2466 | |
2467 | for (; insn; insn = NEXT_INSN (insn)) |
2468 | if (INSN_P (insn) |
2469 | && check_sibcall_argument_overlap_1 (x: PATTERN (insn))) |
2470 | break; |
2471 | |
2472 | if (mark_stored_args_map) |
2473 | { |
2474 | if (ARGS_GROW_DOWNWARD) |
2475 | low = -arg->locate.slot_offset.constant - arg->locate.size.constant; |
2476 | else |
2477 | low = arg->locate.slot_offset.constant; |
2478 | high = low + arg->locate.size.constant; |
2479 | |
2480 | const_low = constant_lower_bound (a: low); |
2481 | if (high.is_constant (const_value: &const_high)) |
2482 | for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i) |
2483 | bitmap_set_bit (map: stored_args_map, bitno: i); |
2484 | else |
2485 | stored_args_watermark = MIN (stored_args_watermark, const_low); |
2486 | } |
2487 | return insn != NULL_RTX; |
2488 | } |
2489 | |
2490 | /* Given that a function returns a value of mode MODE at the most |
2491 | significant end of hard register VALUE, shift VALUE left or right |
2492 | as specified by LEFT_P. Return true if some action was needed. */ |
2493 | |
2494 | bool |
2495 | shift_return_value (machine_mode mode, bool left_p, rtx value) |
2496 | { |
2497 | gcc_assert (REG_P (value) && HARD_REGISTER_P (value)); |
2498 | machine_mode value_mode = GET_MODE (value); |
2499 | poly_int64 shift = GET_MODE_BITSIZE (mode: value_mode) - GET_MODE_BITSIZE (mode); |
2500 | |
2501 | if (known_eq (shift, 0)) |
2502 | return false; |
2503 | |
2504 | /* Use ashr rather than lshr for right shifts. This is for the benefit |
2505 | of the MIPS port, which requires SImode values to be sign-extended |
2506 | when stored in 64-bit registers. */ |
2507 | if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab, |
2508 | value, gen_int_shift_amount (value_mode, shift), |
2509 | value, 1, OPTAB_WIDEN)) |
2510 | gcc_unreachable (); |
2511 | return true; |
2512 | } |
2513 | |
2514 | /* If X is a likely-spilled register value, copy it to a pseudo |
2515 | register and return that register. Return X otherwise. */ |
2516 | |
2517 | static rtx |
2518 | avoid_likely_spilled_reg (rtx x) |
2519 | { |
2520 | rtx new_rtx; |
2521 | |
2522 | if (REG_P (x) |
2523 | && HARD_REGISTER_P (x) |
2524 | && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x)))) |
2525 | { |
2526 | /* Make sure that we generate a REG rather than a CONCAT. |
2527 | Moves into CONCATs can need nontrivial instructions, |
2528 | and the whole point of this function is to avoid |
2529 | using the hard register directly in such a situation. */ |
2530 | generating_concat_p = 0; |
2531 | new_rtx = gen_reg_rtx (GET_MODE (x)); |
2532 | generating_concat_p = 1; |
2533 | emit_move_insn (new_rtx, x); |
2534 | return new_rtx; |
2535 | } |
2536 | return x; |
2537 | } |
2538 | |
2539 | /* Helper function for expand_call. |
2540 | Return false is EXP is not implementable as a sibling call. */ |
2541 | |
2542 | static bool |
2543 | can_implement_as_sibling_call_p (tree exp, |
2544 | rtx structure_value_addr, |
2545 | tree funtype, |
2546 | tree fndecl, |
2547 | int flags, |
2548 | tree addr, |
2549 | const args_size &args_size) |
2550 | { |
2551 | if (!targetm.have_sibcall_epilogue () |
2552 | && !targetm.emit_epilogue_for_sibcall) |
2553 | { |
2554 | maybe_complain_about_tail_call (call_expr: exp, _("machine description does not " |
2555 | "have a sibcall_epilogue " |
2556 | "instruction pattern")); |
2557 | return false; |
2558 | } |
2559 | |
2560 | /* Doing sibling call optimization needs some work, since |
2561 | structure_value_addr can be allocated on the stack. |
2562 | It does not seem worth the effort since few optimizable |
2563 | sibling calls will return a structure. */ |
2564 | if (structure_value_addr != NULL_RTX) |
2565 | { |
2566 | maybe_complain_about_tail_call (call_expr: exp, _("callee returns a structure")); |
2567 | return false; |
2568 | } |
2569 | |
2570 | /* Check whether the target is able to optimize the call |
2571 | into a sibcall. */ |
2572 | if (!targetm.function_ok_for_sibcall (fndecl, exp)) |
2573 | { |
2574 | maybe_complain_about_tail_call (call_expr: exp, _("target is not able to optimize " |
2575 | "the call into a sibling call")); |
2576 | return false; |
2577 | } |
2578 | |
2579 | /* Functions that do not return exactly once may not be sibcall |
2580 | optimized. */ |
2581 | if (flags & ECF_RETURNS_TWICE) |
2582 | { |
2583 | maybe_complain_about_tail_call (call_expr: exp, _("callee returns twice")); |
2584 | return false; |
2585 | } |
2586 | if ((flags & ECF_NORETURN) && !CALL_EXPR_MUST_TAIL_CALL (exp)) |
2587 | { |
2588 | maybe_complain_about_tail_call (call_expr: exp, _("callee does not return")); |
2589 | return false; |
2590 | } |
2591 | |
2592 | if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))) |
2593 | { |
2594 | maybe_complain_about_tail_call (call_expr: exp, _("volatile function type")); |
2595 | return false; |
2596 | } |
2597 | |
2598 | /* __sanitizer_cov_trace_pc is supposed to inspect its return address |
2599 | to identify the caller, and therefore should not be tailcalled. */ |
2600 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL |
2601 | && DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_SANITIZER_COV_TRACE_PC) |
2602 | { |
2603 | /* No need for maybe_complain_about_tail_call here: |
2604 | the call is synthesized by the compiler. */ |
2605 | return false; |
2606 | } |
2607 | |
2608 | /* If the called function is nested in the current one, it might access |
2609 | some of the caller's arguments, but could clobber them beforehand if |
2610 | the argument areas are shared. */ |
2611 | if (fndecl && decl_function_context (fndecl) == current_function_decl) |
2612 | { |
2613 | maybe_complain_about_tail_call (call_expr: exp, _("nested function")); |
2614 | return false; |
2615 | } |
2616 | |
2617 | /* If this function requires more stack slots than the current |
2618 | function, we cannot change it into a sibling call. |
2619 | crtl->args.pretend_args_size is not part of the |
2620 | stack allocated by our caller. */ |
2621 | if (maybe_gt (args_size.constant, |
2622 | crtl->args.size - crtl->args.pretend_args_size)) |
2623 | { |
2624 | maybe_complain_about_tail_call (call_expr: exp, _("callee required more stack " |
2625 | "slots than the caller")); |
2626 | return false; |
2627 | } |
2628 | |
2629 | /* If the callee pops its own arguments, then it must pop exactly |
2630 | the same number of arguments as the current function. */ |
2631 | if (maybe_ne (a: targetm.calls.return_pops_args (fndecl, funtype, |
2632 | args_size.constant), |
2633 | b: targetm.calls.return_pops_args (current_function_decl, |
2634 | TREE_TYPE |
2635 | (current_function_decl), |
2636 | crtl->args.size))) |
2637 | { |
2638 | maybe_complain_about_tail_call (call_expr: exp, _("inconsistent number of" |
2639 | " popped arguments")); |
2640 | return false; |
2641 | } |
2642 | |
2643 | if (!lang_hooks.decls.ok_for_sibcall (fndecl)) |
2644 | { |
2645 | maybe_complain_about_tail_call (call_expr: exp, _("frontend does not support" |
2646 | " sibling call")); |
2647 | return false; |
2648 | } |
2649 | |
2650 | /* All checks passed. */ |
2651 | return true; |
2652 | } |
2653 | |
2654 | /* Update stack alignment when the parameter is passed in the stack |
2655 | since the outgoing parameter requires extra alignment on the calling |
2656 | function side. */ |
2657 | |
2658 | static void |
2659 | update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate) |
2660 | { |
2661 | if (crtl->stack_alignment_needed < locate->boundary) |
2662 | crtl->stack_alignment_needed = locate->boundary; |
2663 | if (crtl->preferred_stack_boundary < locate->boundary) |
2664 | crtl->preferred_stack_boundary = locate->boundary; |
2665 | } |
2666 | |
2667 | /* Generate all the code for a CALL_EXPR exp |
2668 | and return an rtx for its value. |
2669 | Store the value in TARGET (specified as an rtx) if convenient. |
2670 | If the value is stored in TARGET then TARGET is returned. |
2671 | If IGNORE is nonzero, then we ignore the value of the function call. */ |
2672 | |
2673 | rtx |
2674 | expand_call (tree exp, rtx target, int ignore) |
2675 | { |
2676 | /* Nonzero if we are currently expanding a call. */ |
2677 | static int currently_expanding_call = 0; |
2678 | |
2679 | /* RTX for the function to be called. */ |
2680 | rtx funexp; |
2681 | /* Sequence of insns to perform a normal "call". */ |
2682 | rtx_insn *normal_call_insns = NULL; |
2683 | /* Sequence of insns to perform a tail "call". */ |
2684 | rtx_insn *tail_call_insns = NULL; |
2685 | /* Data type of the function. */ |
2686 | tree funtype; |
2687 | tree type_arg_types; |
2688 | tree rettype; |
2689 | /* Declaration of the function being called, |
2690 | or 0 if the function is computed (not known by name). */ |
2691 | tree fndecl = 0; |
2692 | /* The type of the function being called. */ |
2693 | tree fntype; |
2694 | bool try_tail_call = CALL_EXPR_TAILCALL (exp); |
2695 | /* tree-tailcall decided not to do tail calls. Error for the musttail case, |
2696 | unfortunately we don't know the reason so it's fairly vague. |
2697 | When tree-tailcall reported an error it already cleared the flag, |
2698 | so this shouldn't really happen unless the |
2699 | the musttail pass gave up walking before finding the call. */ |
2700 | if (!try_tail_call) |
2701 | maybe_complain_about_tail_call (call_expr: exp, _("other reasons")); |
2702 | int pass; |
2703 | |
2704 | /* Register in which non-BLKmode value will be returned, |
2705 | or 0 if no value or if value is BLKmode. */ |
2706 | rtx valreg; |
2707 | /* Address where we should return a BLKmode value; |
2708 | 0 if value not BLKmode. */ |
2709 | rtx structure_value_addr = 0; |
2710 | /* Nonzero if that address is being passed by treating it as |
2711 | an extra, implicit first parameter. Otherwise, |
2712 | it is passed by being copied directly into struct_value_rtx. */ |
2713 | int structure_value_addr_parm = 0; |
2714 | /* Holds the value of implicit argument for the struct value. */ |
2715 | tree structure_value_addr_value = NULL_TREE; |
2716 | /* Size of aggregate value wanted, or zero if none wanted |
2717 | or if we are using the non-reentrant PCC calling convention |
2718 | or expecting the value in registers. */ |
2719 | poly_int64 struct_value_size = 0; |
2720 | /* True if called function returns an aggregate in memory PCC style, |
2721 | by returning the address of where to find it. */ |
2722 | bool pcc_struct_value = false; |
2723 | rtx struct_value = 0; |
2724 | |
2725 | /* Number of actual parameters in this call, including struct value addr. */ |
2726 | int num_actuals; |
2727 | /* Number of named args. Args after this are anonymous ones |
2728 | and they must all go on the stack. */ |
2729 | int n_named_args; |
2730 | /* Number of complex actual arguments that need to be split. */ |
2731 | int num_complex_actuals = 0; |
2732 | |
2733 | /* Vector of information about each argument. |
2734 | Arguments are numbered in the order they will be pushed, |
2735 | not the order they are written. */ |
2736 | struct arg_data *args; |
2737 | |
2738 | /* Total size in bytes of all the stack-parms scanned so far. */ |
2739 | struct args_size args_size; |
2740 | struct args_size adjusted_args_size; |
2741 | /* Size of arguments before any adjustments (such as rounding). */ |
2742 | poly_int64 unadjusted_args_size; |
2743 | /* Data on reg parms scanned so far. */ |
2744 | CUMULATIVE_ARGS args_so_far_v; |
2745 | cumulative_args_t args_so_far; |
2746 | /* Nonzero if a reg parm has been scanned. */ |
2747 | int reg_parm_seen; |
2748 | |
2749 | /* True if we must avoid push-insns in the args for this call. |
2750 | If stack space is allocated for register parameters, but not by the |
2751 | caller, then it is preallocated in the fixed part of the stack frame. |
2752 | So the entire argument block must then be preallocated (i.e., we |
2753 | ignore PUSH_ROUNDING in that case). */ |
2754 | bool must_preallocate = !targetm.calls.push_argument (0); |
2755 | |
2756 | /* Size of the stack reserved for parameter registers. */ |
2757 | int reg_parm_stack_space = 0; |
2758 | |
2759 | /* Address of space preallocated for stack parms |
2760 | (on machines that lack push insns), or 0 if space not preallocated. */ |
2761 | rtx argblock = 0; |
2762 | |
2763 | /* Mask of ECF_ and ERF_ flags. */ |
2764 | int flags = 0; |
2765 | int return_flags = 0; |
2766 | #ifdef REG_PARM_STACK_SPACE |
2767 | /* Define the boundary of the register parm stack space that needs to be |
2768 | saved, if any. */ |
2769 | int low_to_save, high_to_save; |
2770 | rtx save_area = 0; /* Place that it is saved */ |
2771 | #endif |
2772 | |
2773 | unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use; |
2774 | char *initial_stack_usage_map = stack_usage_map; |
2775 | unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark; |
2776 | char *stack_usage_map_buf = NULL; |
2777 | |
2778 | poly_int64 old_stack_allocated; |
2779 | |
2780 | /* State variables to track stack modifications. */ |
2781 | rtx old_stack_level = 0; |
2782 | int old_stack_arg_under_construction = 0; |
2783 | poly_int64 old_pending_adj = 0; |
2784 | int old_inhibit_defer_pop = inhibit_defer_pop; |
2785 | |
2786 | /* Some stack pointer alterations we make are performed via |
2787 | allocate_dynamic_stack_space. This modifies the stack_pointer_delta, |
2788 | which we then also need to save/restore along the way. */ |
2789 | poly_int64 old_stack_pointer_delta = 0; |
2790 | |
2791 | rtx call_fusage; |
2792 | tree addr = CALL_EXPR_FN (exp); |
2793 | int i; |
2794 | /* The alignment of the stack, in bits. */ |
2795 | unsigned HOST_WIDE_INT preferred_stack_boundary; |
2796 | /* The alignment of the stack, in bytes. */ |
2797 | unsigned HOST_WIDE_INT preferred_unit_stack_boundary; |
2798 | /* The static chain value to use for this call. */ |
2799 | rtx static_chain_value; |
2800 | /* See if this is "nothrow" function call. */ |
2801 | if (TREE_NOTHROW (exp)) |
2802 | flags |= ECF_NOTHROW; |
2803 | |
2804 | /* See if we can find a DECL-node for the actual function, and get the |
2805 | function attributes (flags) from the function decl or type node. */ |
2806 | fndecl = get_callee_fndecl (exp); |
2807 | if (fndecl) |
2808 | { |
2809 | fntype = TREE_TYPE (fndecl); |
2810 | flags |= flags_from_decl_or_type (exp: fndecl); |
2811 | return_flags |= decl_return_flags (fndecl); |
2812 | } |
2813 | else |
2814 | { |
2815 | fntype = TREE_TYPE (TREE_TYPE (addr)); |
2816 | flags |= flags_from_decl_or_type (exp: fntype); |
2817 | if (CALL_EXPR_BY_DESCRIPTOR (exp)) |
2818 | flags |= ECF_BY_DESCRIPTOR; |
2819 | } |
2820 | rettype = TREE_TYPE (exp); |
2821 | |
2822 | struct_value = targetm.calls.struct_value_rtx (fntype, 0); |
2823 | |
2824 | /* Warn if this value is an aggregate type, |
2825 | regardless of which calling convention we are using for it. */ |
2826 | if (AGGREGATE_TYPE_P (rettype)) |
2827 | warning (OPT_Waggregate_return, "function call has aggregate value"); |
2828 | |
2829 | /* If the result of a non looping pure or const function call is |
2830 | ignored (or void), and none of its arguments are volatile, we can |
2831 | avoid expanding the call and just evaluate the arguments for |
2832 | side-effects. */ |
2833 | if ((flags & (ECF_CONST | ECF_PURE)) |
2834 | && (!(flags & ECF_LOOPING_CONST_OR_PURE)) |
2835 | && (flags & ECF_NOTHROW) |
2836 | && (ignore || target == const0_rtx |
2837 | || TYPE_MODE (rettype) == VOIDmode)) |
2838 | { |
2839 | bool volatilep = false; |
2840 | tree arg; |
2841 | call_expr_arg_iterator iter; |
2842 | |
2843 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) |
2844 | if (TREE_THIS_VOLATILE (arg)) |
2845 | { |
2846 | volatilep = true; |
2847 | break; |
2848 | } |
2849 | |
2850 | if (! volatilep) |
2851 | { |
2852 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) |
2853 | expand_expr (exp: arg, const0_rtx, VOIDmode, modifier: EXPAND_NORMAL); |
2854 | return const0_rtx; |
2855 | } |
2856 | } |
2857 | |
2858 | #ifdef REG_PARM_STACK_SPACE |
2859 | reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl); |
2860 | #endif |
2861 | |
2862 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))) |
2863 | && reg_parm_stack_space > 0 && targetm.calls.push_argument (0)) |
2864 | must_preallocate = true; |
2865 | |
2866 | /* Set up a place to return a structure. */ |
2867 | |
2868 | /* Cater to broken compilers. */ |
2869 | if (aggregate_value_p (exp, fntype)) |
2870 | { |
2871 | /* This call returns a big structure. */ |
2872 | flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); |
2873 | |
2874 | #ifdef PCC_STATIC_STRUCT_RETURN |
2875 | { |
2876 | pcc_struct_value = true; |
2877 | } |
2878 | #else /* not PCC_STATIC_STRUCT_RETURN */ |
2879 | { |
2880 | if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), value: &struct_value_size)) |
2881 | struct_value_size = -1; |
2882 | |
2883 | /* Even if it is semantically safe to use the target as the return |
2884 | slot, it may be not sufficiently aligned for the return type. */ |
2885 | if (CALL_EXPR_RETURN_SLOT_OPT (exp) |
2886 | && target |
2887 | && MEM_P (target) |
2888 | /* If rettype is addressable, we may not create a temporary. |
2889 | If target is properly aligned at runtime and the compiler |
2890 | just doesn't know about it, it will work fine, otherwise it |
2891 | will be UB. */ |
2892 | && (TREE_ADDRESSABLE (rettype) |
2893 | || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype) |
2894 | && targetm.slow_unaligned_access (TYPE_MODE (rettype), |
2895 | MEM_ALIGN (target))))) |
2896 | structure_value_addr = XEXP (target, 0); |
2897 | else |
2898 | { |
2899 | /* For variable-sized objects, we must be called with a target |
2900 | specified. If we were to allocate space on the stack here, |
2901 | we would have no way of knowing when to free it. */ |
2902 | rtx d = assign_temp (rettype, 1, 1); |
2903 | structure_value_addr = XEXP (d, 0); |
2904 | target = 0; |
2905 | } |
2906 | } |
2907 | #endif /* not PCC_STATIC_STRUCT_RETURN */ |
2908 | } |
2909 | |
2910 | /* Figure out the amount to which the stack should be aligned. */ |
2911 | preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; |
2912 | if (fndecl) |
2913 | { |
2914 | struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl); |
2915 | /* Without automatic stack alignment, we can't increase preferred |
2916 | stack boundary. With automatic stack alignment, it is |
2917 | unnecessary since unless we can guarantee that all callers will |
2918 | align the outgoing stack properly, callee has to align its |
2919 | stack anyway. */ |
2920 | if (i |
2921 | && i->preferred_incoming_stack_boundary |
2922 | && i->preferred_incoming_stack_boundary < preferred_stack_boundary) |
2923 | preferred_stack_boundary = i->preferred_incoming_stack_boundary; |
2924 | } |
2925 | |
2926 | /* Operand 0 is a pointer-to-function; get the type of the function. */ |
2927 | funtype = TREE_TYPE (addr); |
2928 | gcc_assert (POINTER_TYPE_P (funtype)); |
2929 | funtype = TREE_TYPE (funtype); |
2930 | |
2931 | /* Count whether there are actual complex arguments that need to be split |
2932 | into their real and imaginary parts. Munge the type_arg_types |
2933 | appropriately here as well. */ |
2934 | if (targetm.calls.split_complex_arg) |
2935 | { |
2936 | call_expr_arg_iterator iter; |
2937 | tree arg; |
2938 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) |
2939 | { |
2940 | tree type = TREE_TYPE (arg); |
2941 | if (type && TREE_CODE (type) == COMPLEX_TYPE |
2942 | && targetm.calls.split_complex_arg (type)) |
2943 | num_complex_actuals++; |
2944 | } |
2945 | type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype)); |
2946 | } |
2947 | else |
2948 | type_arg_types = TYPE_ARG_TYPES (funtype); |
2949 | |
2950 | if (flags & ECF_MAY_BE_ALLOCA) |
2951 | cfun->calls_alloca = 1; |
2952 | |
2953 | /* If struct_value_rtx is 0, it means pass the address |
2954 | as if it were an extra parameter. Put the argument expression |
2955 | in structure_value_addr_value. */ |
2956 | if (structure_value_addr && struct_value == 0) |
2957 | { |
2958 | /* If structure_value_addr is a REG other than |
2959 | virtual_outgoing_args_rtx, we can use always use it. If it |
2960 | is not a REG, we must always copy it into a register. |
2961 | If it is virtual_outgoing_args_rtx, we must copy it to another |
2962 | register in some cases. */ |
2963 | rtx temp = (!REG_P (structure_value_addr) |
2964 | || (ACCUMULATE_OUTGOING_ARGS |
2965 | && stack_arg_under_construction |
2966 | && structure_value_addr == virtual_outgoing_args_rtx) |
2967 | ? copy_addr_to_reg (convert_memory_address |
2968 | (Pmode, structure_value_addr)) |
2969 | : structure_value_addr); |
2970 | |
2971 | structure_value_addr_value = |
2972 | make_tree (build_pointer_type (TREE_TYPE (funtype)), temp); |
2973 | structure_value_addr_parm = 1; |
2974 | } |
2975 | |
2976 | /* Count the arguments and set NUM_ACTUALS. */ |
2977 | num_actuals |
2978 | = call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm; |
2979 | |
2980 | /* Compute number of named args. |
2981 | First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */ |
2982 | |
2983 | if (type_arg_types != 0) |
2984 | n_named_args |
2985 | = (list_length (type_arg_types) |
2986 | /* Count the struct value address, if it is passed as a parm. */ |
2987 | + structure_value_addr_parm); |
2988 | else if (TYPE_NO_NAMED_ARGS_STDARG_P (funtype)) |
2989 | n_named_args = structure_value_addr_parm; |
2990 | else |
2991 | /* If we know nothing, treat all args as named. */ |
2992 | n_named_args = num_actuals; |
2993 | |
2994 | /* Start updating where the next arg would go. |
2995 | |
2996 | On some machines (such as the PA) indirect calls have a different |
2997 | calling convention than normal calls. The fourth argument in |
2998 | INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call |
2999 | or not. */ |
3000 | INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args); |
3001 | args_so_far = pack_cumulative_args (arg: &args_so_far_v); |
3002 | |
3003 | /* Now possibly adjust the number of named args. |
3004 | Normally, don't include the last named arg if anonymous args follow. |
3005 | We do include the last named arg if |
3006 | targetm.calls.strict_argument_naming() returns nonzero. |
3007 | (If no anonymous args follow, the result of list_length is actually |
3008 | one too large. This is harmless.) |
3009 | |
3010 | If targetm.calls.pretend_outgoing_varargs_named() returns |
3011 | nonzero, and targetm.calls.strict_argument_naming() returns zero, |
3012 | this machine will be able to place unnamed args that were passed |
3013 | in registers into the stack. So treat all args as named. This |
3014 | allows the insns emitting for a specific argument list to be |
3015 | independent of the function declaration. |
3016 | |
3017 | If targetm.calls.pretend_outgoing_varargs_named() returns zero, |
3018 | we do not have any reliable way to pass unnamed args in |
3019 | registers, so we must force them into memory. */ |
3020 | |
3021 | if ((type_arg_types != 0 || TYPE_NO_NAMED_ARGS_STDARG_P (funtype)) |
3022 | && targetm.calls.strict_argument_naming (args_so_far)) |
3023 | ; |
3024 | else if (type_arg_types != 0 |
3025 | && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far)) |
3026 | /* Don't include the last named arg. */ |
3027 | --n_named_args; |
3028 | else if (TYPE_NO_NAMED_ARGS_STDARG_P (funtype) |
3029 | && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far)) |
3030 | n_named_args = 0; |
3031 | else |
3032 | /* Treat all args as named. */ |
3033 | n_named_args = num_actuals; |
3034 | |
3035 | /* Make a vector to hold all the information about each arg. */ |
3036 | args = XCNEWVEC (struct arg_data, num_actuals); |
3037 | |
3038 | /* Build up entries in the ARGS array, compute the size of the |
3039 | arguments into ARGS_SIZE, etc. */ |
3040 | initialize_argument_information (num_actuals, args, args_size: &args_size, |
3041 | n_named_args, exp, |
3042 | struct_value_addr_value: structure_value_addr_value, fndecl, fntype, |
3043 | args_so_far, reg_parm_stack_space, |
3044 | old_stack_level: &old_stack_level, old_pending_adj: &old_pending_adj, |
3045 | must_preallocate: &must_preallocate, ecf_flags: &flags, |
3046 | may_tailcall: &try_tail_call, CALL_FROM_THUNK_P (exp)); |
3047 | |
3048 | if (args_size.var) |
3049 | must_preallocate = true; |
3050 | |
3051 | /* Now make final decision about preallocating stack space. */ |
3052 | must_preallocate = finalize_must_preallocate (must_preallocate, |
3053 | num_actuals, args, |
3054 | args_size: &args_size); |
3055 | |
3056 | /* If the structure value address will reference the stack pointer, we |
3057 | must stabilize it. We don't need to do this if we know that we are |
3058 | not going to adjust the stack pointer in processing this call. */ |
3059 | |
3060 | if (structure_value_addr |
3061 | && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr) |
3062 | || reg_mentioned_p (virtual_outgoing_args_rtx, |
3063 | structure_value_addr)) |
3064 | && (args_size.var |
3065 | || (!ACCUMULATE_OUTGOING_ARGS |
3066 | && maybe_ne (a: args_size.constant, b: 0)))) |
3067 | structure_value_addr = copy_to_reg (structure_value_addr); |
3068 | |
3069 | /* Tail calls can make things harder to debug, and we've traditionally |
3070 | pushed these optimizations into -O2. Don't try if we're already |
3071 | expanding a call, as that means we're an argument. Don't try if |
3072 | there's cleanups, as we know there's code to follow the call. */ |
3073 | if (currently_expanding_call++ != 0) |
3074 | { |
3075 | maybe_complain_about_tail_call (call_expr: exp, _("inside another call")); |
3076 | try_tail_call = 0; |
3077 | } |
3078 | if (!flag_optimize_sibling_calls |
3079 | && !CALL_FROM_THUNK_P (exp) |
3080 | && !CALL_EXPR_MUST_TAIL_CALL (exp)) |
3081 | try_tail_call = 0; |
3082 | if (args_size.var) |
3083 | { |
3084 | maybe_complain_about_tail_call (call_expr: exp, _("variable size arguments")); |
3085 | try_tail_call = 0; |
3086 | } |
3087 | if (dbg_cnt (index: tail_call) == false) |
3088 | try_tail_call = 0; |
3089 | |
3090 | /* Workaround buggy C/C++ wrappers around Fortran routines with |
3091 | character(len=constant) arguments if the hidden string length arguments |
3092 | are passed on the stack; if the callers forget to pass those arguments, |
3093 | attempting to tail call in such routines leads to stack corruption. |
3094 | Avoid tail calls in functions where at least one such hidden string |
3095 | length argument is passed (partially or fully) on the stack in the |
3096 | caller and the callee needs to pass any arguments on the stack. |
3097 | See PR90329. */ |
3098 | if (try_tail_call && maybe_ne (a: args_size.constant, b: 0)) |
3099 | for (tree arg = DECL_ARGUMENTS (current_function_decl); |
3100 | arg; arg = DECL_CHAIN (arg)) |
3101 | if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg)) |
3102 | { |
3103 | subrtx_iterator::array_type array; |
3104 | FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST) |
3105 | if (MEM_P (*iter)) |
3106 | { |
3107 | try_tail_call = 0; |
3108 | maybe_complain_about_tail_call (call_expr: exp, _("hidden string length " |
3109 | "argument passed on " |
3110 | "stack")); |
3111 | break; |
3112 | } |
3113 | } |
3114 | |
3115 | /* If the user has marked the function as requiring tail-call |
3116 | optimization, attempt it. */ |
3117 | if (CALL_EXPR_MUST_TAIL_CALL (exp)) |
3118 | try_tail_call = 1; |
3119 | |
3120 | /* Rest of purposes for tail call optimizations to fail. */ |
3121 | if (try_tail_call) |
3122 | try_tail_call = can_implement_as_sibling_call_p (exp, |
3123 | structure_value_addr, |
3124 | funtype, |
3125 | fndecl, |
3126 | flags, addr, args_size); |
3127 | |
3128 | /* Check if caller and callee disagree in promotion of function |
3129 | return value. */ |
3130 | if (try_tail_call) |
3131 | { |
3132 | machine_mode caller_mode, caller_promoted_mode; |
3133 | machine_mode callee_mode, callee_promoted_mode; |
3134 | int caller_unsignedp, callee_unsignedp; |
3135 | tree caller_res = DECL_RESULT (current_function_decl); |
3136 | |
3137 | caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res)); |
3138 | caller_mode = DECL_MODE (caller_res); |
3139 | callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype)); |
3140 | callee_mode = TYPE_MODE (TREE_TYPE (funtype)); |
3141 | caller_promoted_mode |
3142 | = promote_function_mode (TREE_TYPE (caller_res), caller_mode, |
3143 | &caller_unsignedp, |
3144 | TREE_TYPE (current_function_decl), 1); |
3145 | callee_promoted_mode |
3146 | = promote_function_mode (TREE_TYPE (funtype), callee_mode, |
3147 | &callee_unsignedp, |
3148 | funtype, 1); |
3149 | if (caller_mode != VOIDmode |
3150 | && (caller_promoted_mode != callee_promoted_mode |
3151 | || ((caller_mode != caller_promoted_mode |
3152 | || callee_mode != callee_promoted_mode) |
3153 | && (caller_unsignedp != callee_unsignedp |
3154 | || partial_subreg_p (outermode: caller_mode, innermode: callee_mode))))) |
3155 | { |
3156 | try_tail_call = 0; |
3157 | maybe_complain_about_tail_call (call_expr: exp, _("caller and callee disagree " |
3158 | "in promotion of function " |
3159 | "return value")); |
3160 | } |
3161 | } |
3162 | |
3163 | /* Ensure current function's preferred stack boundary is at least |
3164 | what we need. Stack alignment may also increase preferred stack |
3165 | boundary. */ |
3166 | for (i = 0; i < num_actuals; i++) |
3167 | if (reg_parm_stack_space > 0 |
3168 | || args[i].reg == 0 |
3169 | || args[i].partial != 0 |
3170 | || args[i].pass_on_stack) |
3171 | update_stack_alignment_for_call (locate: &args[i].locate); |
3172 | if (crtl->preferred_stack_boundary < preferred_stack_boundary) |
3173 | crtl->preferred_stack_boundary = preferred_stack_boundary; |
3174 | else |
3175 | preferred_stack_boundary = crtl->preferred_stack_boundary; |
3176 | |
3177 | preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT; |
3178 | |
3179 | if (flag_callgraph_info) |
3180 | record_final_call (callee: fndecl, EXPR_LOCATION (exp)); |
3181 | |
3182 | /* We want to make two insn chains; one for a sibling call, the other |
3183 | for a normal call. We will select one of the two chains after |
3184 | initial RTL generation is complete. */ |
3185 | for (pass = try_tail_call ? 0 : 1; pass < 2; pass++) |
3186 | { |
3187 | bool sibcall_failure = false; |
3188 | bool normal_failure = false; |
3189 | /* We want to emit any pending stack adjustments before the tail |
3190 | recursion "call". That way we know any adjustment after the tail |
3191 | recursion call can be ignored if we indeed use the tail |
3192 | call expansion. */ |
3193 | saved_pending_stack_adjust save; |
3194 | rtx_insn *insns, *before_call, *after_args; |
3195 | rtx next_arg_reg; |
3196 | |
3197 | if (pass == 0) |
3198 | { |
3199 | /* State variables we need to save and restore between |
3200 | iterations. */ |
3201 | save_pending_stack_adjust (&save); |
3202 | } |
3203 | if (pass) |
3204 | flags &= ~ECF_SIBCALL; |
3205 | else |
3206 | flags |= ECF_SIBCALL; |
3207 | |
3208 | /* Other state variables that we must reinitialize each time |
3209 | through the loop (that are not initialized by the loop itself). */ |
3210 | argblock = 0; |
3211 | call_fusage = 0; |
3212 | |
3213 | /* Start a new sequence for the normal call case. |
3214 | |
3215 | From this point on, if the sibling call fails, we want to set |
3216 | sibcall_failure instead of continuing the loop. */ |
3217 | start_sequence (); |
3218 | |
3219 | /* Don't let pending stack adjusts add up to too much. |
3220 | Also, do all pending adjustments now if there is any chance |
3221 | this might be a call to alloca or if we are expanding a sibling |
3222 | call sequence. |
3223 | Also do the adjustments before a throwing call, otherwise |
3224 | exception handling can fail; PR 19225. */ |
3225 | if (maybe_ge (pending_stack_adjust, 32) |
3226 | || (maybe_ne (pending_stack_adjust, b: 0) |
3227 | && (flags & ECF_MAY_BE_ALLOCA)) |
3228 | || (maybe_ne (pending_stack_adjust, b: 0) |
3229 | && flag_exceptions && !(flags & ECF_NOTHROW)) |
3230 | || pass == 0) |
3231 | do_pending_stack_adjust (); |
3232 | |
3233 | /* Precompute any arguments as needed. */ |
3234 | if (pass) |
3235 | precompute_arguments (num_actuals, args); |
3236 | |
3237 | /* Now we are about to start emitting insns that can be deleted |
3238 | if a libcall is deleted. */ |
3239 | if (pass && (flags & ECF_MALLOC)) |
3240 | start_sequence (); |
3241 | |
3242 | /* Check the canary value for sibcall or function which doesn't |
3243 | return and could throw. */ |
3244 | if ((pass == 0 |
3245 | || ((flags & ECF_NORETURN) != 0 && tree_could_throw_p (exp))) |
3246 | && crtl->stack_protect_guard |
3247 | && targetm.stack_protect_runtime_enabled_p ()) |
3248 | stack_protect_epilogue (); |
3249 | |
3250 | adjusted_args_size = args_size; |
3251 | /* Compute the actual size of the argument block required. The variable |
3252 | and constant sizes must be combined, the size may have to be rounded, |
3253 | and there may be a minimum required size. When generating a sibcall |
3254 | pattern, do not round up, since we'll be re-using whatever space our |
3255 | caller provided. */ |
3256 | unadjusted_args_size |
3257 | = compute_argument_block_size (reg_parm_stack_space, |
3258 | args_size: &adjusted_args_size, |
3259 | fndecl, fntype, |
3260 | preferred_stack_boundary: (pass == 0 ? 0 |
3261 | : preferred_stack_boundary)); |
3262 | |
3263 | old_stack_allocated = stack_pointer_delta - pending_stack_adjust; |
3264 | |
3265 | /* The argument block when performing a sibling call is the |
3266 | incoming argument block. */ |
3267 | if (pass == 0) |
3268 | { |
3269 | argblock = crtl->args.internal_arg_pointer; |
3270 | if (STACK_GROWS_DOWNWARD) |
3271 | argblock |
3272 | = plus_constant (Pmode, argblock, crtl->args.pretend_args_size); |
3273 | else |
3274 | argblock |
3275 | = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size); |
3276 | |
3277 | HOST_WIDE_INT map_size = constant_lower_bound (a: args_size.constant); |
3278 | stored_args_map = sbitmap_alloc (map_size); |
3279 | bitmap_clear (stored_args_map); |
3280 | stored_args_watermark = HOST_WIDE_INT_M1U; |
3281 | } |
3282 | |
3283 | /* If we have no actual push instructions, or shouldn't use them, |
3284 | make space for all args right now. */ |
3285 | else if (adjusted_args_size.var != 0) |
3286 | { |
3287 | if (old_stack_level == 0) |
3288 | { |
3289 | emit_stack_save (SAVE_BLOCK, &old_stack_level); |
3290 | old_stack_pointer_delta = stack_pointer_delta; |
3291 | old_pending_adj = pending_stack_adjust; |
3292 | pending_stack_adjust = 0; |
3293 | /* stack_arg_under_construction says whether a stack arg is |
3294 | being constructed at the old stack level. Pushing the stack |
3295 | gets a clean outgoing argument block. */ |
3296 | old_stack_arg_under_construction = stack_arg_under_construction; |
3297 | stack_arg_under_construction = 0; |
3298 | } |
3299 | argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0); |
3300 | if (flag_stack_usage_info) |
3301 | current_function_has_unbounded_dynamic_stack_size = 1; |
3302 | } |
3303 | else |
3304 | { |
3305 | /* Note that we must go through the motions of allocating an argument |
3306 | block even if the size is zero because we may be storing args |
3307 | in the area reserved for register arguments, which may be part of |
3308 | the stack frame. */ |
3309 | |
3310 | poly_int64 needed = adjusted_args_size.constant; |
3311 | |
3312 | /* Store the maximum argument space used. It will be pushed by |
3313 | the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow |
3314 | checking). */ |
3315 | |
3316 | crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size, |
3317 | b: needed); |
3318 | |
3319 | if (must_preallocate) |
3320 | { |
3321 | if (ACCUMULATE_OUTGOING_ARGS) |
3322 | { |
3323 | /* Since the stack pointer will never be pushed, it is |
3324 | possible for the evaluation of a parm to clobber |
3325 | something we have already written to the stack. |
3326 | Since most function calls on RISC machines do not use |
3327 | the stack, this is uncommon, but must work correctly. |
3328 | |
3329 | Therefore, we save any area of the stack that was already |
3330 | written and that we are using. Here we set up to do this |
3331 | by making a new stack usage map from the old one. The |
3332 | actual save will be done by store_one_arg. |
3333 | |
3334 | Another approach might be to try to reorder the argument |
3335 | evaluations to avoid this conflicting stack usage. */ |
3336 | |
3337 | /* Since we will be writing into the entire argument area, |
3338 | the map must be allocated for its entire size, not just |
3339 | the part that is the responsibility of the caller. */ |
3340 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
3341 | needed += reg_parm_stack_space; |
3342 | |
3343 | poly_int64 limit = needed; |
3344 | if (ARGS_GROW_DOWNWARD) |
3345 | limit += 1; |
3346 | |
3347 | /* For polynomial sizes, this is the maximum possible |
3348 | size needed for arguments with a constant size |
3349 | and offset. */ |
3350 | HOST_WIDE_INT const_limit = constant_lower_bound (a: limit); |
3351 | highest_outgoing_arg_in_use |
3352 | = MAX (initial_highest_arg_in_use, const_limit); |
3353 | |
3354 | free (ptr: stack_usage_map_buf); |
3355 | stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); |
3356 | stack_usage_map = stack_usage_map_buf; |
3357 | |
3358 | if (initial_highest_arg_in_use) |
3359 | memcpy (dest: stack_usage_map, src: initial_stack_usage_map, |
3360 | n: initial_highest_arg_in_use); |
3361 | |
3362 | if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) |
3363 | memset (s: &stack_usage_map[initial_highest_arg_in_use], c: 0, |
3364 | n: (highest_outgoing_arg_in_use |
3365 | - initial_highest_arg_in_use)); |
3366 | needed = 0; |
3367 | |
3368 | /* The address of the outgoing argument list must not be |
3369 | copied to a register here, because argblock would be left |
3370 | pointing to the wrong place after the call to |
3371 | allocate_dynamic_stack_space below. */ |
3372 | |
3373 | argblock = virtual_outgoing_args_rtx; |
3374 | } |
3375 | else |
3376 | { |
3377 | /* Try to reuse some or all of the pending_stack_adjust |
3378 | to get this space. */ |
3379 | if (inhibit_defer_pop == 0 |
3380 | && (combine_pending_stack_adjustment_and_call |
3381 | (adjustment_out: &needed, |
3382 | unadjusted_args_size, |
3383 | args_size: &adjusted_args_size, |
3384 | preferred_unit_stack_boundary))) |
3385 | { |
3386 | /* combine_pending_stack_adjustment_and_call computes |
3387 | an adjustment before the arguments are allocated. |
3388 | Account for them and see whether or not the stack |
3389 | needs to go up or down. */ |
3390 | needed = unadjusted_args_size - needed; |
3391 | |
3392 | /* Checked by |
3393 | combine_pending_stack_adjustment_and_call. */ |
3394 | gcc_checking_assert (ordered_p (needed, 0)); |
3395 | if (maybe_lt (a: needed, b: 0)) |
3396 | { |
3397 | /* We're releasing stack space. */ |
3398 | /* ??? We can avoid any adjustment at all if we're |
3399 | already aligned. FIXME. */ |
3400 | pending_stack_adjust = -needed; |
3401 | do_pending_stack_adjust (); |
3402 | needed = 0; |
3403 | } |
3404 | else |
3405 | /* We need to allocate space. We'll do that in |
3406 | push_block below. */ |
3407 | pending_stack_adjust = 0; |
3408 | } |
3409 | |
3410 | /* Special case this because overhead of `push_block' in |
3411 | this case is non-trivial. */ |
3412 | if (known_eq (needed, 0)) |
3413 | argblock = virtual_outgoing_args_rtx; |
3414 | else |
3415 | { |
3416 | rtx needed_rtx = gen_int_mode (needed, Pmode); |
3417 | argblock = push_block (needed_rtx, 0, 0); |
3418 | if (ARGS_GROW_DOWNWARD) |
3419 | argblock = plus_constant (Pmode, argblock, needed); |
3420 | } |
3421 | |
3422 | /* We only really need to call `copy_to_reg' in the case |
3423 | where push insns are going to be used to pass ARGBLOCK |
3424 | to a function call in ARGS. In that case, the stack |
3425 | pointer changes value from the allocation point to the |
3426 | call point, and hence the value of |
3427 | VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might |
3428 | as well always do it. */ |
3429 | argblock = copy_to_reg (argblock); |
3430 | } |
3431 | } |
3432 | } |
3433 | |
3434 | if (ACCUMULATE_OUTGOING_ARGS) |
3435 | { |
3436 | /* The save/restore code in store_one_arg handles all |
3437 | cases except one: a constructor call (including a C |
3438 | function returning a BLKmode struct) to initialize |
3439 | an argument. */ |
3440 | if (stack_arg_under_construction) |
3441 | { |
3442 | rtx push_size |
3443 | = (gen_int_mode |
3444 | (adjusted_args_size.constant |
3445 | + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype |
3446 | : TREE_TYPE (fndecl)) |
3447 | ? 0 : reg_parm_stack_space), Pmode)); |
3448 | if (old_stack_level == 0) |
3449 | { |
3450 | emit_stack_save (SAVE_BLOCK, &old_stack_level); |
3451 | old_stack_pointer_delta = stack_pointer_delta; |
3452 | old_pending_adj = pending_stack_adjust; |
3453 | pending_stack_adjust = 0; |
3454 | /* stack_arg_under_construction says whether a stack |
3455 | arg is being constructed at the old stack level. |
3456 | Pushing the stack gets a clean outgoing argument |
3457 | block. */ |
3458 | old_stack_arg_under_construction |
3459 | = stack_arg_under_construction; |
3460 | stack_arg_under_construction = 0; |
3461 | /* Make a new map for the new argument list. */ |
3462 | free (ptr: stack_usage_map_buf); |
3463 | stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use); |
3464 | stack_usage_map = stack_usage_map_buf; |
3465 | highest_outgoing_arg_in_use = 0; |
3466 | stack_usage_watermark = HOST_WIDE_INT_M1U; |
3467 | } |
3468 | /* We can pass TRUE as the 4th argument because we just |
3469 | saved the stack pointer and will restore it right after |
3470 | the call. */ |
3471 | allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT, |
3472 | -1, true); |
3473 | } |
3474 | |
3475 | /* If argument evaluation might modify the stack pointer, |
3476 | copy the address of the argument list to a register. */ |
3477 | for (i = 0; i < num_actuals; i++) |
3478 | if (args[i].pass_on_stack) |
3479 | { |
3480 | argblock = copy_addr_to_reg (argblock); |
3481 | break; |
3482 | } |
3483 | } |
3484 | |
3485 | compute_argument_addresses (args, argblock, num_actuals); |
3486 | |
3487 | /* Stack is properly aligned, pops can't safely be deferred during |
3488 | the evaluation of the arguments. */ |
3489 | NO_DEFER_POP; |
3490 | |
3491 | /* Precompute all register parameters. It isn't safe to compute |
3492 | anything once we have started filling any specific hard regs. |
3493 | TLS symbols sometimes need a call to resolve. Precompute |
3494 | register parameters before any stack pointer manipulation |
3495 | to avoid unaligned stack in the called function. */ |
3496 | precompute_register_parameters (num_actuals, args, reg_parm_seen: ®_parm_seen); |
3497 | |
3498 | OK_DEFER_POP; |
3499 | |
3500 | /* Perform stack alignment before the first push (the last arg). */ |
3501 | if (argblock == 0 |
3502 | && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space) |
3503 | && maybe_ne (a: adjusted_args_size.constant, b: unadjusted_args_size)) |
3504 | { |
3505 | /* When the stack adjustment is pending, we get better code |
3506 | by combining the adjustments. */ |
3507 | if (maybe_ne (pending_stack_adjust, b: 0) |
3508 | && ! inhibit_defer_pop |
3509 | && (combine_pending_stack_adjustment_and_call |
3510 | (adjustment_out: &pending_stack_adjust, |
3511 | unadjusted_args_size, |
3512 | args_size: &adjusted_args_size, |
3513 | preferred_unit_stack_boundary))) |
3514 | do_pending_stack_adjust (); |
3515 | else if (argblock == 0) |
3516 | anti_adjust_stack (gen_int_mode (adjusted_args_size.constant |
3517 | - unadjusted_args_size, |
3518 | Pmode)); |
3519 | } |
3520 | /* Now that the stack is properly aligned, pops can't safely |
3521 | be deferred during the evaluation of the arguments. */ |
3522 | NO_DEFER_POP; |
3523 | |
3524 | /* Record the maximum pushed stack space size. We need to delay |
3525 | doing it this far to take into account the optimization done |
3526 | by combine_pending_stack_adjustment_and_call. */ |
3527 | if (flag_stack_usage_info |
3528 | && !ACCUMULATE_OUTGOING_ARGS |
3529 | && pass |
3530 | && adjusted_args_size.var == 0) |
3531 | { |
3532 | poly_int64 pushed = (adjusted_args_size.constant |
3533 | + pending_stack_adjust); |
3534 | current_function_pushed_stack_size |
3535 | = upper_bound (current_function_pushed_stack_size, b: pushed); |
3536 | } |
3537 | |
3538 | funexp = rtx_for_function_call (fndecl, addr); |
3539 | |
3540 | if (CALL_EXPR_STATIC_CHAIN (exp)) |
3541 | static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); |
3542 | else |
3543 | static_chain_value = 0; |
3544 | |
3545 | #ifdef REG_PARM_STACK_SPACE |
3546 | /* Save the fixed argument area if it's part of the caller's frame and |
3547 | is clobbered by argument setup for this call. */ |
3548 | if (ACCUMULATE_OUTGOING_ARGS && pass) |
3549 | save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, |
3550 | low_to_save: &low_to_save, high_to_save: &high_to_save); |
3551 | #endif |
3552 | |
3553 | /* Now store (and compute if necessary) all non-register parms. |
3554 | These come before register parms, since they can require block-moves, |
3555 | which could clobber the registers used for register parms. |
3556 | Parms which have partial registers are not stored here, |
3557 | but we do preallocate space here if they want that. */ |
3558 | |
3559 | for (i = 0; i < num_actuals; i++) |
3560 | { |
3561 | if (args[i].reg == 0 || args[i].pass_on_stack) |
3562 | { |
3563 | rtx_insn *before_arg = get_last_insn (); |
3564 | |
3565 | /* We don't allow passing huge (> 2^30 B) arguments |
3566 | by value. It would cause an overflow later on. */ |
3567 | if (constant_lower_bound (a: adjusted_args_size.constant) |
3568 | >= (1 << (HOST_BITS_PER_INT - 2))) |
3569 | { |
3570 | sorry ("passing too large argument on stack"); |
3571 | /* Don't worry about stack clean-up. */ |
3572 | if (pass == 0) |
3573 | sibcall_failure = true; |
3574 | else |
3575 | normal_failure = true; |
3576 | continue; |
3577 | } |
3578 | |
3579 | if (store_one_arg (&args[i], argblock, flags, |
3580 | adjusted_args_size.var != 0, |
3581 | reg_parm_stack_space) |
3582 | || (pass == 0 |
3583 | && check_sibcall_argument_overlap (insn: before_arg, |
3584 | arg: &args[i], mark_stored_args_map: true))) |
3585 | sibcall_failure = true; |
3586 | } |
3587 | |
3588 | if (args[i].stack) |
3589 | call_fusage |
3590 | = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)), |
3591 | gen_rtx_USE (VOIDmode, args[i].stack), |
3592 | call_fusage); |
3593 | } |
3594 | |
3595 | /* If we have a parm that is passed in registers but not in memory |
3596 | and whose alignment does not permit a direct copy into registers, |
3597 | make a group of pseudos that correspond to each register that we |
3598 | will later fill. */ |
3599 | if (STRICT_ALIGNMENT) |
3600 | store_unaligned_arguments_into_pseudos (args, num_actuals); |
3601 | |
3602 | /* Now store any partially-in-registers parm. |
3603 | This is the last place a block-move can happen. */ |
3604 | if (reg_parm_seen) |
3605 | for (i = 0; i < num_actuals; i++) |
3606 | if (args[i].partial != 0 && ! args[i].pass_on_stack) |
3607 | { |
3608 | rtx_insn *before_arg = get_last_insn (); |
3609 | |
3610 | /* On targets with weird calling conventions (e.g. PA) it's |
3611 | hard to ensure that all cases of argument overlap between |
3612 | stack and registers work. Play it safe and bail out. */ |
3613 | if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD) |
3614 | { |
3615 | sibcall_failure = true; |
3616 | break; |
3617 | } |
3618 | |
3619 | if (store_one_arg (&args[i], argblock, flags, |
3620 | adjusted_args_size.var != 0, |
3621 | reg_parm_stack_space) |
3622 | || (pass == 0 |
3623 | && check_sibcall_argument_overlap (insn: before_arg, |
3624 | arg: &args[i], mark_stored_args_map: true))) |
3625 | sibcall_failure = true; |
3626 | } |
3627 | |
3628 | /* Set up the next argument register. For sibling calls on machines |
3629 | with register windows this should be the incoming register. */ |
3630 | if (pass == 0) |
3631 | next_arg_reg = targetm.calls.function_incoming_arg |
3632 | (args_so_far, function_arg_info::end_marker ()); |
3633 | else |
3634 | next_arg_reg = targetm.calls.function_arg |
3635 | (args_so_far, function_arg_info::end_marker ()); |
3636 | |
3637 | targetm.calls.start_call_args (args_so_far); |
3638 | |
3639 | bool any_regs = false; |
3640 | for (i = 0; i < num_actuals; i++) |
3641 | if (args[i].reg != NULL_RTX) |
3642 | { |
3643 | any_regs = true; |
3644 | targetm.calls.call_args (args_so_far, args[i].reg, funtype); |
3645 | } |
3646 | if (!any_regs) |
3647 | targetm.calls.call_args (args_so_far, pc_rtx, funtype); |
3648 | |
3649 | /* Figure out the register where the value, if any, will come back. */ |
3650 | valreg = 0; |
3651 | if (TYPE_MODE (rettype) != VOIDmode |
3652 | && ! structure_value_addr) |
3653 | { |
3654 | if (pcc_struct_value) |
3655 | valreg = hard_function_value (build_pointer_type (rettype), |
3656 | fndecl, NULL, (pass == 0)); |
3657 | else |
3658 | valreg = hard_function_value (rettype, fndecl, fntype, |
3659 | (pass == 0)); |
3660 | |
3661 | /* If VALREG is a PARALLEL whose first member has a zero |
3662 | offset, use that. This is for targets such as m68k that |
3663 | return the same value in multiple places. */ |
3664 | if (GET_CODE (valreg) == PARALLEL) |
3665 | { |
3666 | rtx elem = XVECEXP (valreg, 0, 0); |
3667 | rtx where = XEXP (elem, 0); |
3668 | rtx offset = XEXP (elem, 1); |
3669 | if (offset == const0_rtx |
3670 | && GET_MODE (where) == GET_MODE (valreg)) |
3671 | valreg = where; |
3672 | } |
3673 | } |
3674 | |
3675 | /* If register arguments require space on the stack and stack space |
3676 | was not preallocated, allocate stack space here for arguments |
3677 | passed in registers. */ |
3678 | if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))) |
3679 | && !ACCUMULATE_OUTGOING_ARGS |
3680 | && !must_preallocate && reg_parm_stack_space > 0) |
3681 | anti_adjust_stack (GEN_INT (reg_parm_stack_space)); |
3682 | |
3683 | /* Pass the function the address in which to return a |
3684 | structure value. */ |
3685 | if (pass != 0 && structure_value_addr && ! structure_value_addr_parm) |
3686 | { |
3687 | structure_value_addr |
3688 | = convert_memory_address (Pmode, structure_value_addr); |
3689 | emit_move_insn (struct_value, |
3690 | force_reg (Pmode, |
3691 | force_operand (structure_value_addr, |
3692 | NULL_RTX))); |
3693 | |
3694 | if (REG_P (struct_value)) |
3695 | use_reg (fusage: &call_fusage, reg: struct_value); |
3696 | } |
3697 | |
3698 | after_args = get_last_insn (); |
3699 | funexp = prepare_call_address (fndecl_or_type: fndecl ? fndecl : fntype, funexp, |
3700 | static_chain_value, call_fusage: &call_fusage, |
3701 | reg_parm_seen, flags); |
3702 | |
3703 | load_register_parameters (args, num_actuals, call_fusage: &call_fusage, flags, |
3704 | is_sibcall: pass == 0, sibcall_failure: &sibcall_failure); |
3705 | |
3706 | /* Save a pointer to the last insn before the call, so that we can |
3707 | later safely search backwards to find the CALL_INSN. */ |
3708 | before_call = get_last_insn (); |
3709 | |
3710 | if (pass == 1 && (return_flags & ERF_RETURNS_ARG)) |
3711 | { |
3712 | int arg_nr = return_flags & ERF_RETURN_ARG_MASK; |
3713 | arg_nr = num_actuals - arg_nr - 1; |
3714 | if (arg_nr >= 0 |
3715 | && arg_nr < num_actuals |
3716 | && args[arg_nr].reg |
3717 | && valreg |
3718 | && REG_P (valreg) |
3719 | && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg)) |
3720 | call_fusage |
3721 | = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)), |
3722 | gen_rtx_SET (valreg, args[arg_nr].reg), |
3723 | call_fusage); |
3724 | } |
3725 | /* All arguments and registers used for the call must be set up by |
3726 | now! */ |
3727 | |
3728 | /* Stack must be properly aligned now. */ |
3729 | gcc_assert (!pass |
3730 | || multiple_p (stack_pointer_delta, |
3731 | preferred_unit_stack_boundary)); |
3732 | |
3733 | /* Generate the actual call instruction. */ |
3734 | emit_call_1 (funexp, fntree: exp, fndecl, funtype, stack_size: unadjusted_args_size, |
3735 | rounded_stack_size: adjusted_args_size.constant, struct_value_size, |
3736 | next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, |
3737 | ecf_flags: flags, args_so_far); |
3738 | |
3739 | rtx_call_insn *last; |
3740 | rtx datum = NULL_RTX; |
3741 | if (fndecl != NULL_TREE) |
3742 | { |
3743 | datum = XEXP (DECL_RTL (fndecl), 0); |
3744 | gcc_assert (datum != NULL_RTX |
3745 | && GET_CODE (datum) == SYMBOL_REF); |
3746 | } |
3747 | last = last_call_insn (); |
3748 | add_reg_note (last, REG_CALL_DECL, datum); |
3749 | |
3750 | /* If the call setup or the call itself overlaps with anything |
3751 | of the argument setup we probably clobbered our call address. |
3752 | In that case we can't do sibcalls. */ |
3753 | if (pass == 0 |
3754 | && check_sibcall_argument_overlap (insn: after_args, arg: 0, mark_stored_args_map: false)) |
3755 | sibcall_failure = true; |
3756 | |
3757 | /* If a non-BLKmode value is returned at the most significant end |
3758 | of a register, shift the register right by the appropriate amount |
3759 | and update VALREG accordingly. BLKmode values are handled by the |
3760 | group load/store machinery below. */ |
3761 | if (!structure_value_addr |
3762 | && !pcc_struct_value |
3763 | && TYPE_MODE (rettype) != VOIDmode |
3764 | && TYPE_MODE (rettype) != BLKmode |
3765 | && REG_P (valreg) |
3766 | && targetm.calls.return_in_msb (rettype)) |
3767 | { |
3768 | if (shift_return_value (TYPE_MODE (rettype), left_p: false, value: valreg)) |
3769 | sibcall_failure = true; |
3770 | valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg)); |
3771 | } |
3772 | |
3773 | if (pass && (flags & ECF_MALLOC)) |
3774 | { |
3775 | rtx temp = gen_reg_rtx (GET_MODE (valreg)); |
3776 | rtx_insn *last, *insns; |
3777 | |
3778 | /* The return value from a malloc-like function is a pointer. */ |
3779 | if (TREE_CODE (rettype) == POINTER_TYPE) |
3780 | mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT); |
3781 | |
3782 | emit_move_insn (temp, valreg); |
3783 | |
3784 | /* The return value from a malloc-like function cannot alias |
3785 | anything else. */ |
3786 | last = get_last_insn (); |
3787 | add_reg_note (last, REG_NOALIAS, temp); |
3788 | |
3789 | /* Write out the sequence. */ |
3790 | insns = end_sequence (); |
3791 | emit_insn (insns); |
3792 | valreg = temp; |
3793 | } |
3794 | |
3795 | /* For calls to `setjmp', etc., inform |
3796 | function.cc:setjmp_warnings that it should complain if |
3797 | nonvolatile values are live. For functions that cannot |
3798 | return, inform flow that control does not fall through. */ |
3799 | |
3800 | if ((flags & ECF_NORETURN) || pass == 0) |
3801 | { |
3802 | /* The barrier must be emitted |
3803 | immediately after the CALL_INSN. Some ports emit more |
3804 | than just a CALL_INSN above, so we must search for it here. */ |
3805 | |
3806 | rtx_insn *last = get_last_insn (); |
3807 | while (!CALL_P (last)) |
3808 | { |
3809 | last = PREV_INSN (insn: last); |
3810 | /* There was no CALL_INSN? */ |
3811 | gcc_assert (last != before_call); |
3812 | } |
3813 | |
3814 | emit_barrier_after (last); |
3815 | |
3816 | /* Stack adjustments after a noreturn call are dead code. |
3817 | However when NO_DEFER_POP is in effect, we must preserve |
3818 | stack_pointer_delta. */ |
3819 | if (inhibit_defer_pop == 0) |
3820 | { |
3821 | stack_pointer_delta = old_stack_allocated; |
3822 | pending_stack_adjust = 0; |
3823 | } |
3824 | } |
3825 | |
3826 | /* If value type not void, return an rtx for the value. */ |
3827 | |
3828 | if (TYPE_MODE (rettype) == VOIDmode |
3829 | || ignore) |
3830 | target = const0_rtx; |
3831 | else if (structure_value_addr) |
3832 | { |
3833 | if (target == 0 || !MEM_P (target)) |
3834 | { |
3835 | target |
3836 | = gen_rtx_MEM (TYPE_MODE (rettype), |
3837 | memory_address (TYPE_MODE (rettype), |
3838 | structure_value_addr)); |
3839 | set_mem_attributes (target, rettype, 1); |
3840 | } |
3841 | } |
3842 | else if (pcc_struct_value) |
3843 | { |
3844 | /* This is the special C++ case where we need to |
3845 | know what the true target was. We take care to |
3846 | never use this value more than once in one expression. */ |
3847 | target = gen_rtx_MEM (TYPE_MODE (rettype), |
3848 | copy_to_reg (valreg)); |
3849 | set_mem_attributes (target, rettype, 1); |
3850 | } |
3851 | /* Handle calls that return values in multiple non-contiguous locations. |
3852 | The Irix 6 ABI has examples of this. */ |
3853 | else if (GET_CODE (valreg) == PARALLEL) |
3854 | { |
3855 | if (target == 0) |
3856 | target = emit_group_move_into_temps (valreg); |
3857 | else if (rtx_equal_p (target, valreg)) |
3858 | ; |
3859 | else if (GET_CODE (target) == PARALLEL) |
3860 | /* Handle the result of a emit_group_move_into_temps |
3861 | call in the previous pass. */ |
3862 | emit_group_move (target, valreg); |
3863 | else |
3864 | emit_group_store (target, valreg, rettype, |
3865 | int_size_in_bytes (rettype)); |
3866 | } |
3867 | else if (target |
3868 | && GET_MODE (target) == TYPE_MODE (rettype) |
3869 | && GET_MODE (target) == GET_MODE (valreg)) |
3870 | { |
3871 | bool may_overlap = false; |
3872 | |
3873 | /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard |
3874 | reg to a plain register. */ |
3875 | if (!REG_P (target) || HARD_REGISTER_P (target)) |
3876 | valreg = avoid_likely_spilled_reg (x: valreg); |
3877 | |
3878 | /* If TARGET is a MEM in the argument area, and we have |
3879 | saved part of the argument area, then we can't store |
3880 | directly into TARGET as it may get overwritten when we |
3881 | restore the argument save area below. Don't work too |
3882 | hard though and simply force TARGET to a register if it |
3883 | is a MEM; the optimizer is quite likely to sort it out. */ |
3884 | if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target)) |
3885 | for (i = 0; i < num_actuals; i++) |
3886 | if (args[i].save_area) |
3887 | { |
3888 | may_overlap = true; |
3889 | break; |
3890 | } |
3891 | |
3892 | if (may_overlap) |
3893 | target = copy_to_reg (valreg); |
3894 | else |
3895 | { |
3896 | /* TARGET and VALREG cannot be equal at this point |
3897 | because the latter would not have |
3898 | REG_FUNCTION_VALUE_P true, while the former would if |
3899 | it were referring to the same register. |
3900 | |
3901 | If they refer to the same register, this move will be |
3902 | a no-op, except when function inlining is being |
3903 | done. */ |
3904 | emit_move_insn (target, valreg); |
3905 | |
3906 | /* If we are setting a MEM, this code must be executed. |
3907 | Since it is emitted after the call insn, sibcall |
3908 | optimization cannot be performed in that case. */ |
3909 | if (MEM_P (target)) |
3910 | sibcall_failure = true; |
3911 | } |
3912 | } |
3913 | else |
3914 | target = copy_to_reg (avoid_likely_spilled_reg (x: valreg)); |
3915 | |
3916 | /* If we promoted this return value, make the proper SUBREG. |
3917 | TARGET might be const0_rtx here, so be careful. */ |
3918 | if (REG_P (target) |
3919 | && TYPE_MODE (rettype) != BLKmode |
3920 | && GET_MODE (target) != TYPE_MODE (rettype)) |
3921 | { |
3922 | tree type = rettype; |
3923 | int unsignedp = TYPE_UNSIGNED (type); |
3924 | machine_mode ret_mode = TYPE_MODE (type); |
3925 | machine_mode pmode; |
3926 | |
3927 | /* Ensure we promote as expected, and get the new unsignedness. */ |
3928 | pmode = promote_function_mode (type, ret_mode, &unsignedp, |
3929 | funtype, 1); |
3930 | gcc_assert (GET_MODE (target) == pmode); |
3931 | |
3932 | if (SCALAR_INT_MODE_P (pmode) |
3933 | && SCALAR_FLOAT_MODE_P (ret_mode) |
3934 | && known_gt (GET_MODE_SIZE (pmode), GET_MODE_SIZE (ret_mode))) |
3935 | target = convert_wider_int_to_float (mode: ret_mode, imode: pmode, x: target); |
3936 | else |
3937 | { |
3938 | target = gen_lowpart_SUBREG (ret_mode, target); |
3939 | SUBREG_PROMOTED_VAR_P (target) = 1; |
3940 | SUBREG_PROMOTED_SET (target, unsignedp); |
3941 | } |
3942 | } |
3943 | |
3944 | /* If size of args is variable or this was a constructor call for a stack |
3945 | argument, restore saved stack-pointer value. */ |
3946 | |
3947 | if (old_stack_level) |
3948 | { |
3949 | rtx_insn *prev = get_last_insn (); |
3950 | |
3951 | emit_stack_restore (SAVE_BLOCK, old_stack_level); |
3952 | stack_pointer_delta = old_stack_pointer_delta; |
3953 | |
3954 | fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta); |
3955 | |
3956 | pending_stack_adjust = old_pending_adj; |
3957 | old_stack_allocated = stack_pointer_delta - pending_stack_adjust; |
3958 | stack_arg_under_construction = old_stack_arg_under_construction; |
3959 | highest_outgoing_arg_in_use = initial_highest_arg_in_use; |
3960 | stack_usage_map = initial_stack_usage_map; |
3961 | stack_usage_watermark = initial_stack_usage_watermark; |
3962 | sibcall_failure = true; |
3963 | } |
3964 | else if (ACCUMULATE_OUTGOING_ARGS && pass) |
3965 | { |
3966 | #ifdef REG_PARM_STACK_SPACE |
3967 | if (save_area) |
3968 | restore_fixed_argument_area (save_area, argblock, |
3969 | high_to_save, low_to_save); |
3970 | #endif |
3971 | |
3972 | /* If we saved any argument areas, restore them. */ |
3973 | for (i = 0; i < num_actuals; i++) |
3974 | if (args[i].save_area) |
3975 | { |
3976 | machine_mode save_mode = GET_MODE (args[i].save_area); |
3977 | rtx stack_area |
3978 | = gen_rtx_MEM (save_mode, |
3979 | memory_address (save_mode, |
3980 | XEXP (args[i].stack_slot, 0))); |
3981 | |
3982 | if (save_mode != BLKmode) |
3983 | emit_move_insn (stack_area, args[i].save_area); |
3984 | else |
3985 | emit_block_move (stack_area, args[i].save_area, |
3986 | (gen_int_mode |
3987 | (args[i].locate.size.constant, Pmode)), |
3988 | BLOCK_OP_CALL_PARM); |
3989 | } |
3990 | |
3991 | highest_outgoing_arg_in_use = initial_highest_arg_in_use; |
3992 | stack_usage_map = initial_stack_usage_map; |
3993 | stack_usage_watermark = initial_stack_usage_watermark; |
3994 | } |
3995 | |
3996 | /* If this was alloca, record the new stack level. */ |
3997 | if (flags & ECF_MAY_BE_ALLOCA) |
3998 | record_new_stack_level (); |
3999 | |
4000 | /* Free up storage we no longer need. */ |
4001 | for (i = 0; i < num_actuals; ++i) |
4002 | free (ptr: args[i].aligned_regs); |
4003 | |
4004 | targetm.calls.end_call_args (args_so_far); |
4005 | |
4006 | insns = end_sequence (); |
4007 | |
4008 | if (pass == 0) |
4009 | { |
4010 | tail_call_insns = insns; |
4011 | |
4012 | /* Restore the pending stack adjustment now that we have |
4013 | finished generating the sibling call sequence. */ |
4014 | |
4015 | restore_pending_stack_adjust (&save); |
4016 | |
4017 | /* Prepare arg structure for next iteration. */ |
4018 | for (i = 0; i < num_actuals; i++) |
4019 | { |
4020 | args[i].value = 0; |
4021 | args[i].aligned_regs = 0; |
4022 | args[i].stack = 0; |
4023 | } |
4024 | |
4025 | sbitmap_free (map: stored_args_map); |
4026 | internal_arg_pointer_exp_state.scan_start = NULL; |
4027 | internal_arg_pointer_exp_state.cache.release (); |
4028 | } |
4029 | else |
4030 | { |
4031 | normal_call_insns = insns; |
4032 | |
4033 | /* Verify that we've deallocated all the stack we used. */ |
4034 | gcc_assert ((flags & ECF_NORETURN) |
4035 | || normal_failure |
4036 | || known_eq (old_stack_allocated, |
4037 | stack_pointer_delta |
4038 | - pending_stack_adjust)); |
4039 | if (normal_failure) |
4040 | normal_call_insns = NULL; |
4041 | } |
4042 | |
4043 | /* If something prevents making this a sibling call, |
4044 | zero out the sequence. */ |
4045 | if (sibcall_failure) |
4046 | tail_call_insns = NULL; |
4047 | else |
4048 | break; |
4049 | } |
4050 | |
4051 | /* If tail call production succeeded, we need to remove REG_EQUIV notes on |
4052 | arguments too, as argument area is now clobbered by the call. */ |
4053 | if (tail_call_insns) |
4054 | { |
4055 | emit_insn (tail_call_insns); |
4056 | crtl->tail_call_emit = true; |
4057 | } |
4058 | else |
4059 | { |
4060 | emit_insn (normal_call_insns); |
4061 | if (try_tail_call) |
4062 | /* Ideally we'd emit a message for all of the ways that it could |
4063 | have failed. */ |
4064 | maybe_complain_about_tail_call (call_expr: exp, _("tail call production failed")); |
4065 | } |
4066 | |
4067 | currently_expanding_call--; |
4068 | |
4069 | free (ptr: stack_usage_map_buf); |
4070 | free (ptr: args); |
4071 | return target; |
4072 | } |
4073 | |
4074 | /* A sibling call sequence invalidates any REG_EQUIV notes made for |
4075 | this function's incoming arguments. |
4076 | |
4077 | At the start of RTL generation we know the only REG_EQUIV notes |
4078 | in the rtl chain are those for incoming arguments, so we can look |
4079 | for REG_EQUIV notes between the start of the function and the |
4080 | NOTE_INSN_FUNCTION_BEG. |
4081 | |
4082 | This is (slight) overkill. We could keep track of the highest |
4083 | argument we clobber and be more selective in removing notes, but it |
4084 | does not seem to be worth the effort. */ |
4085 | |
4086 | void |
4087 | fixup_tail_calls (void) |
4088 | { |
4089 | rtx_insn *insn; |
4090 | |
4091 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
4092 | { |
4093 | rtx note; |
4094 | |
4095 | /* There are never REG_EQUIV notes for the incoming arguments |
4096 | after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */ |
4097 | if (NOTE_P (insn) |
4098 | && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG) |
4099 | break; |
4100 | |
4101 | note = find_reg_note (insn, REG_EQUIV, 0); |
4102 | if (note) |
4103 | remove_note (insn, note); |
4104 | note = find_reg_note (insn, REG_EQUIV, 0); |
4105 | gcc_assert (!note); |
4106 | } |
4107 | } |
4108 | |
4109 | /* Traverse a list of TYPES and expand all complex types into their |
4110 | components. */ |
4111 | static tree |
4112 | split_complex_types (tree types) |
4113 | { |
4114 | tree p; |
4115 | |
4116 | /* Before allocating memory, check for the common case of no complex. */ |
4117 | for (p = types; p; p = TREE_CHAIN (p)) |
4118 | { |
4119 | tree type = TREE_VALUE (p); |
4120 | if (TREE_CODE (type) == COMPLEX_TYPE |
4121 | && targetm.calls.split_complex_arg (type)) |
4122 | goto found; |
4123 | } |
4124 | return types; |
4125 | |
4126 | found: |
4127 | types = copy_list (types); |
4128 | |
4129 | for (p = types; p; p = TREE_CHAIN (p)) |
4130 | { |
4131 | tree complex_type = TREE_VALUE (p); |
4132 | |
4133 | if (TREE_CODE (complex_type) == COMPLEX_TYPE |
4134 | && targetm.calls.split_complex_arg (complex_type)) |
4135 | { |
4136 | tree next, imag; |
4137 | |
4138 | /* Rewrite complex type with component type. */ |
4139 | TREE_VALUE (p) = TREE_TYPE (complex_type); |
4140 | next = TREE_CHAIN (p); |
4141 | |
4142 | /* Add another component type for the imaginary part. */ |
4143 | imag = build_tree_list (NULL_TREE, TREE_VALUE (p)); |
4144 | TREE_CHAIN (p) = imag; |
4145 | TREE_CHAIN (imag) = next; |
4146 | |
4147 | /* Skip the newly created node. */ |
4148 | p = TREE_CHAIN (p); |
4149 | } |
4150 | } |
4151 | |
4152 | return types; |
4153 | } |
4154 | |
4155 | /* Output a library call to function ORGFUN (a SYMBOL_REF rtx) |
4156 | for a value of mode OUTMODE, |
4157 | with NARGS different arguments, passed as ARGS. |
4158 | Store the return value if RETVAL is nonzero: store it in VALUE if |
4159 | VALUE is nonnull, otherwise pick a convenient location. In either |
4160 | case return the location of the stored value. |
4161 | |
4162 | FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for |
4163 | `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for |
4164 | other types of library calls. */ |
4165 | |
4166 | rtx |
4167 | emit_library_call_value_1 (int retval, rtx orgfun, rtx value, |
4168 | enum libcall_type fn_type, |
4169 | machine_mode outmode, int nargs, rtx_mode_t *args) |
4170 | { |
4171 | /* Total size in bytes of all the stack-parms scanned so far. */ |
4172 | struct args_size args_size; |
4173 | /* Size of arguments before any adjustments (such as rounding). */ |
4174 | struct args_size original_args_size; |
4175 | int argnum; |
4176 | rtx fun; |
4177 | /* Todo, choose the correct decl type of orgfun. Sadly this information |
4178 | isn't present here, so we default to native calling abi here. */ |
4179 | tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */ |
4180 | tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */ |
4181 | int count; |
4182 | rtx argblock = 0; |
4183 | CUMULATIVE_ARGS args_so_far_v; |
4184 | cumulative_args_t args_so_far; |
4185 | struct arg |
4186 | { |
4187 | rtx value; |
4188 | machine_mode mode; |
4189 | rtx reg; |
4190 | int partial; |
4191 | struct locate_and_pad_arg_data locate; |
4192 | rtx save_area; |
4193 | }; |
4194 | struct arg *argvec; |
4195 | int old_inhibit_defer_pop = inhibit_defer_pop; |
4196 | rtx call_fusage = 0; |
4197 | rtx mem_value = 0; |
4198 | rtx valreg; |
4199 | bool pcc_struct_value = false; |
4200 | poly_int64 struct_value_size = 0; |
4201 | int flags; |
4202 | int reg_parm_stack_space = 0; |
4203 | poly_int64 needed; |
4204 | rtx_insn *before_call; |
4205 | bool have_push_fusage; |
4206 | tree tfom; /* type_for_mode (outmode, 0) */ |
4207 | |
4208 | #ifdef REG_PARM_STACK_SPACE |
4209 | /* Define the boundary of the register parm stack space that needs to be |
4210 | save, if any. */ |
4211 | int low_to_save = 0, high_to_save = 0; |
4212 | rtx save_area = 0; /* Place that it is saved. */ |
4213 | #endif |
4214 | |
4215 | /* Size of the stack reserved for parameter registers. */ |
4216 | unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use; |
4217 | char *initial_stack_usage_map = stack_usage_map; |
4218 | unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark; |
4219 | char *stack_usage_map_buf = NULL; |
4220 | |
4221 | rtx struct_value = targetm.calls.struct_value_rtx (0, 0); |
4222 | |
4223 | #ifdef REG_PARM_STACK_SPACE |
4224 | reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0); |
4225 | #endif |
4226 | |
4227 | /* By default, library functions cannot throw. */ |
4228 | flags = ECF_NOTHROW; |
4229 | |
4230 | switch (fn_type) |
4231 | { |
4232 | case LCT_NORMAL: |
4233 | break; |
4234 | case LCT_CONST: |
4235 | flags |= ECF_CONST; |
4236 | break; |
4237 | case LCT_PURE: |
4238 | flags |= ECF_PURE; |
4239 | break; |
4240 | case LCT_NORETURN: |
4241 | flags |= ECF_NORETURN; |
4242 | break; |
4243 | case LCT_THROW: |
4244 | flags &= ~ECF_NOTHROW; |
4245 | break; |
4246 | case LCT_RETURNS_TWICE: |
4247 | flags = ECF_RETURNS_TWICE; |
4248 | break; |
4249 | } |
4250 | fun = orgfun; |
4251 | |
4252 | /* Ensure current function's preferred stack boundary is at least |
4253 | what we need. */ |
4254 | if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) |
4255 | crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; |
4256 | |
4257 | /* If this kind of value comes back in memory, |
4258 | decide where in memory it should come back. */ |
4259 | if (outmode != VOIDmode) |
4260 | { |
4261 | tfom = lang_hooks.types.type_for_mode (outmode, 0); |
4262 | if (aggregate_value_p (tfom, 0)) |
4263 | { |
4264 | #ifdef PCC_STATIC_STRUCT_RETURN |
4265 | rtx pointer_reg |
4266 | = hard_function_value (build_pointer_type (tfom), 0, 0, 0); |
4267 | mem_value = gen_rtx_MEM (outmode, pointer_reg); |
4268 | pcc_struct_value = true; |
4269 | if (value == 0) |
4270 | value = gen_reg_rtx (outmode); |
4271 | #else /* not PCC_STATIC_STRUCT_RETURN */ |
4272 | struct_value_size = GET_MODE_SIZE (mode: outmode); |
4273 | if (value != 0 && MEM_P (value)) |
4274 | mem_value = value; |
4275 | else |
4276 | mem_value = assign_temp (tfom, 1, 1); |
4277 | #endif |
4278 | /* This call returns a big structure. */ |
4279 | flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); |
4280 | } |
4281 | } |
4282 | else |
4283 | tfom = void_type_node; |
4284 | |
4285 | /* ??? Unfinished: must pass the memory address as an argument. */ |
4286 | |
4287 | /* Copy all the libcall-arguments out of the varargs data |
4288 | and into a vector ARGVEC. |
4289 | |
4290 | Compute how to pass each argument. We only support a very small subset |
4291 | of the full argument passing conventions to limit complexity here since |
4292 | library functions shouldn't have many args. */ |
4293 | |
4294 | argvec = XALLOCAVEC (struct arg, nargs + 1); |
4295 | memset (s: argvec, c: 0, n: (nargs + 1) * sizeof (struct arg)); |
4296 | |
4297 | #ifdef INIT_CUMULATIVE_LIBCALL_ARGS |
4298 | INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun); |
4299 | #else |
4300 | INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs); |
4301 | #endif |
4302 | args_so_far = pack_cumulative_args (arg: &args_so_far_v); |
4303 | |
4304 | args_size.constant = 0; |
4305 | args_size.var = 0; |
4306 | |
4307 | count = 0; |
4308 | |
4309 | push_temp_slots (); |
4310 | |
4311 | /* If there's a structure value address to be passed, |
4312 | either pass it in the special place, or pass it as an extra argument. */ |
4313 | if (mem_value && struct_value == 0 && ! pcc_struct_value) |
4314 | { |
4315 | rtx addr = XEXP (mem_value, 0); |
4316 | |
4317 | nargs++; |
4318 | |
4319 | /* Make sure it is a reasonable operand for a move or push insn. */ |
4320 | if (!REG_P (addr) && !MEM_P (addr) |
4321 | && !(CONSTANT_P (addr) |
4322 | && targetm.legitimate_constant_p (Pmode, addr))) |
4323 | addr = force_operand (addr, NULL_RTX); |
4324 | |
4325 | argvec[count].value = addr; |
4326 | argvec[count].mode = Pmode; |
4327 | argvec[count].partial = 0; |
4328 | |
4329 | function_arg_info ptr_arg (Pmode, /*named=*/true); |
4330 | argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg); |
4331 | gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0); |
4332 | |
4333 | locate_and_pad_parm (Pmode, NULL_TREE, |
4334 | #ifdef STACK_PARMS_IN_REG_PARM_AREA |
4335 | 1, |
4336 | #else |
4337 | argvec[count].reg != 0, |
4338 | #endif |
4339 | reg_parm_stack_space, 0, |
4340 | NULL_TREE, &args_size, &argvec[count].locate); |
4341 | |
4342 | if (argvec[count].reg == 0 || argvec[count].partial != 0 |
4343 | || reg_parm_stack_space > 0) |
4344 | args_size.constant += argvec[count].locate.size.constant; |
4345 | |
4346 | targetm.calls.function_arg_advance (args_so_far, ptr_arg); |
4347 | |
4348 | count++; |
4349 | } |
4350 | |
4351 | for (unsigned int i = 0; count < nargs; i++, count++) |
4352 | { |
4353 | rtx val = args[i].first; |
4354 | function_arg_info arg (args[i].second, /*named=*/true); |
4355 | int unsigned_p = 0; |
4356 | |
4357 | /* We cannot convert the arg value to the mode the library wants here; |
4358 | must do it earlier where we know the signedness of the arg. */ |
4359 | gcc_assert (arg.mode != BLKmode |
4360 | && (GET_MODE (val) == arg.mode |
4361 | || GET_MODE (val) == VOIDmode)); |
4362 | |
4363 | /* Make sure it is a reasonable operand for a move or push insn. */ |
4364 | if (!REG_P (val) && !MEM_P (val) |
4365 | && !(CONSTANT_P (val) |
4366 | && targetm.legitimate_constant_p (arg.mode, val))) |
4367 | val = force_operand (val, NULL_RTX); |
4368 | |
4369 | if (pass_by_reference (ca: &args_so_far_v, arg)) |
4370 | { |
4371 | rtx slot; |
4372 | int must_copy = !reference_callee_copied (ca: &args_so_far_v, arg); |
4373 | |
4374 | /* If this was a CONST function, it is now PURE since it now |
4375 | reads memory. */ |
4376 | if (flags & ECF_CONST) |
4377 | { |
4378 | flags &= ~ECF_CONST; |
4379 | flags |= ECF_PURE; |
4380 | } |
4381 | |
4382 | if (MEM_P (val) && !must_copy) |
4383 | { |
4384 | tree val_expr = MEM_EXPR (val); |
4385 | if (val_expr) |
4386 | mark_addressable (val_expr); |
4387 | slot = val; |
4388 | } |
4389 | else |
4390 | { |
4391 | slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0), |
4392 | 1, 1); |
4393 | emit_move_insn (slot, val); |
4394 | } |
4395 | |
4396 | call_fusage = gen_rtx_EXPR_LIST (VOIDmode, |
4397 | gen_rtx_USE (VOIDmode, slot), |
4398 | call_fusage); |
4399 | if (must_copy) |
4400 | call_fusage = gen_rtx_EXPR_LIST (VOIDmode, |
4401 | gen_rtx_CLOBBER (VOIDmode, |
4402 | slot), |
4403 | call_fusage); |
4404 | |
4405 | arg.mode = Pmode; |
4406 | arg.pass_by_reference = true; |
4407 | val = force_operand (XEXP (slot, 0), NULL_RTX); |
4408 | } |
4409 | |
4410 | arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p, |
4411 | NULL_TREE, 0); |
4412 | argvec[count].mode = arg.mode; |
4413 | argvec[count].value = convert_modes (mode: arg.mode, GET_MODE (val), x: val, |
4414 | unsignedp: unsigned_p); |
4415 | argvec[count].reg = targetm.calls.function_arg (args_so_far, arg); |
4416 | |
4417 | argvec[count].partial |
4418 | = targetm.calls.arg_partial_bytes (args_so_far, arg); |
4419 | |
4420 | if (argvec[count].reg == 0 |
4421 | || argvec[count].partial != 0 |
4422 | || reg_parm_stack_space > 0) |
4423 | { |
4424 | locate_and_pad_parm (arg.mode, NULL_TREE, |
4425 | #ifdef STACK_PARMS_IN_REG_PARM_AREA |
4426 | 1, |
4427 | #else |
4428 | argvec[count].reg != 0, |
4429 | #endif |
4430 | reg_parm_stack_space, argvec[count].partial, |
4431 | NULL_TREE, &args_size, &argvec[count].locate); |
4432 | args_size.constant += argvec[count].locate.size.constant; |
4433 | gcc_assert (!argvec[count].locate.size.var); |
4434 | } |
4435 | #ifdef BLOCK_REG_PADDING |
4436 | else |
4437 | /* The argument is passed entirely in registers. See at which |
4438 | end it should be padded. */ |
4439 | argvec[count].locate.where_pad = |
4440 | BLOCK_REG_PADDING (arg.mode, NULL_TREE, |
4441 | known_le (GET_MODE_SIZE (arg.mode), |
4442 | UNITS_PER_WORD)); |
4443 | #endif |
4444 | |
4445 | targetm.calls.function_arg_advance (args_so_far, arg); |
4446 | } |
4447 | |
4448 | for (int i = 0; i < nargs; i++) |
4449 | if (reg_parm_stack_space > 0 |
4450 | || argvec[i].reg == 0 |
4451 | || argvec[i].partial != 0) |
4452 | update_stack_alignment_for_call (locate: &argvec[i].locate); |
4453 | |
4454 | /* If this machine requires an external definition for library |
4455 | functions, write one out. */ |
4456 | assemble_external_libcall (fun); |
4457 | |
4458 | original_args_size = args_size; |
4459 | args_size.constant = (aligned_upper_bound (value: args_size.constant |
4460 | + stack_pointer_delta, |
4461 | STACK_BYTES) |
4462 | - stack_pointer_delta); |
4463 | |
4464 | args_size.constant = upper_bound (a: args_size.constant, |
4465 | b: reg_parm_stack_space); |
4466 | |
4467 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
4468 | args_size.constant -= reg_parm_stack_space; |
4469 | |
4470 | crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size, |
4471 | b: args_size.constant); |
4472 | |
4473 | if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS) |
4474 | { |
4475 | poly_int64 pushed = args_size.constant + pending_stack_adjust; |
4476 | current_function_pushed_stack_size |
4477 | = upper_bound (current_function_pushed_stack_size, b: pushed); |
4478 | } |
4479 | |
4480 | if (ACCUMULATE_OUTGOING_ARGS) |
4481 | { |
4482 | /* Since the stack pointer will never be pushed, it is possible for |
4483 | the evaluation of a parm to clobber something we have already |
4484 | written to the stack. Since most function calls on RISC machines |
4485 | do not use the stack, this is uncommon, but must work correctly. |
4486 | |
4487 | Therefore, we save any area of the stack that was already written |
4488 | and that we are using. Here we set up to do this by making a new |
4489 | stack usage map from the old one. |
4490 | |
4491 | Another approach might be to try to reorder the argument |
4492 | evaluations to avoid this conflicting stack usage. */ |
4493 | |
4494 | needed = args_size.constant; |
4495 | |
4496 | /* Since we will be writing into the entire argument area, the |
4497 | map must be allocated for its entire size, not just the part that |
4498 | is the responsibility of the caller. */ |
4499 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
4500 | needed += reg_parm_stack_space; |
4501 | |
4502 | poly_int64 limit = needed; |
4503 | if (ARGS_GROW_DOWNWARD) |
4504 | limit += 1; |
4505 | |
4506 | /* For polynomial sizes, this is the maximum possible size needed |
4507 | for arguments with a constant size and offset. */ |
4508 | HOST_WIDE_INT const_limit = constant_lower_bound (a: limit); |
4509 | highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, |
4510 | const_limit); |
4511 | |
4512 | stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); |
4513 | stack_usage_map = stack_usage_map_buf; |
4514 | |
4515 | if (initial_highest_arg_in_use) |
4516 | memcpy (dest: stack_usage_map, src: initial_stack_usage_map, |
4517 | n: initial_highest_arg_in_use); |
4518 | |
4519 | if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) |
4520 | memset (s: &stack_usage_map[initial_highest_arg_in_use], c: 0, |
4521 | n: highest_outgoing_arg_in_use - initial_highest_arg_in_use); |
4522 | needed = 0; |
4523 | |
4524 | /* We must be careful to use virtual regs before they're instantiated, |
4525 | and real regs afterwards. Loop optimization, for example, can create |
4526 | new libcalls after we've instantiated the virtual regs, and if we |
4527 | use virtuals anyway, they won't match the rtl patterns. */ |
4528 | |
4529 | if (virtuals_instantiated) |
4530 | argblock = plus_constant (Pmode, stack_pointer_rtx, |
4531 | STACK_POINTER_OFFSET); |
4532 | else |
4533 | argblock = virtual_outgoing_args_rtx; |
4534 | } |
4535 | else |
4536 | { |
4537 | if (!targetm.calls.push_argument (0)) |
4538 | argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0); |
4539 | } |
4540 | |
4541 | /* We push args individually in reverse order, perform stack alignment |
4542 | before the first push (the last arg). */ |
4543 | if (argblock == 0) |
4544 | anti_adjust_stack (gen_int_mode (args_size.constant |
4545 | - original_args_size.constant, |
4546 | Pmode)); |
4547 | |
4548 | argnum = nargs - 1; |
4549 | |
4550 | #ifdef REG_PARM_STACK_SPACE |
4551 | if (ACCUMULATE_OUTGOING_ARGS) |
4552 | { |
4553 | /* The argument list is the property of the called routine and it |
4554 | may clobber it. If the fixed area has been used for previous |
4555 | parameters, we must save and restore it. */ |
4556 | save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, |
4557 | low_to_save: &low_to_save, high_to_save: &high_to_save); |
4558 | } |
4559 | #endif |
4560 | |
4561 | rtx call_cookie |
4562 | = targetm.calls.function_arg (args_so_far, |
4563 | function_arg_info::end_marker ()); |
4564 | |
4565 | /* Push the args that need to be pushed. */ |
4566 | |
4567 | have_push_fusage = false; |
4568 | |
4569 | /* ARGNUM indexes the ARGVEC array in the order in which the arguments |
4570 | are to be pushed. */ |
4571 | for (count = 0; count < nargs; count++, argnum--) |
4572 | { |
4573 | machine_mode mode = argvec[argnum].mode; |
4574 | rtx val = argvec[argnum].value; |
4575 | rtx reg = argvec[argnum].reg; |
4576 | int partial = argvec[argnum].partial; |
4577 | unsigned int parm_align = argvec[argnum].locate.boundary; |
4578 | poly_int64 lower_bound = 0, upper_bound = 0; |
4579 | |
4580 | if (! (reg != 0 && partial == 0)) |
4581 | { |
4582 | rtx use; |
4583 | |
4584 | if (ACCUMULATE_OUTGOING_ARGS) |
4585 | { |
4586 | /* If this is being stored into a pre-allocated, fixed-size, |
4587 | stack area, save any previous data at that location. */ |
4588 | |
4589 | if (ARGS_GROW_DOWNWARD) |
4590 | { |
4591 | /* stack_slot is negative, but we want to index stack_usage_map |
4592 | with positive values. */ |
4593 | upper_bound = -argvec[argnum].locate.slot_offset.constant + 1; |
4594 | lower_bound = upper_bound - argvec[argnum].locate.size.constant; |
4595 | } |
4596 | else |
4597 | { |
4598 | lower_bound = argvec[argnum].locate.slot_offset.constant; |
4599 | upper_bound = lower_bound + argvec[argnum].locate.size.constant; |
4600 | } |
4601 | |
4602 | if (stack_region_maybe_used_p (lower_bound, upper_bound, |
4603 | reg_parm_stack_space)) |
4604 | { |
4605 | /* We need to make a save area. */ |
4606 | poly_uint64 size |
4607 | = argvec[argnum].locate.size.constant * BITS_PER_UNIT; |
4608 | machine_mode save_mode |
4609 | = int_mode_for_size (size, limit: 1).else_blk (); |
4610 | rtx adr |
4611 | = plus_constant (Pmode, argblock, |
4612 | argvec[argnum].locate.offset.constant); |
4613 | rtx stack_area |
4614 | = gen_rtx_MEM (save_mode, memory_address (save_mode, adr)); |
4615 | |
4616 | if (save_mode == BLKmode) |
4617 | { |
4618 | argvec[argnum].save_area |
4619 | = assign_stack_temp (BLKmode, |
4620 | argvec[argnum].locate.size.constant |
4621 | ); |
4622 | |
4623 | emit_block_move (validize_mem |
4624 | (copy_rtx (argvec[argnum].save_area)), |
4625 | stack_area, |
4626 | (gen_int_mode |
4627 | (argvec[argnum].locate.size.constant, |
4628 | Pmode)), |
4629 | BLOCK_OP_CALL_PARM); |
4630 | } |
4631 | else |
4632 | { |
4633 | argvec[argnum].save_area = gen_reg_rtx (save_mode); |
4634 | |
4635 | emit_move_insn (argvec[argnum].save_area, stack_area); |
4636 | } |
4637 | } |
4638 | } |
4639 | |
4640 | emit_push_insn (val, mode, lang_hooks.types.type_for_mode (mode, 0), |
4641 | NULL_RTX, parm_align, partial, reg, 0, argblock, |
4642 | (gen_int_mode |
4643 | (argvec[argnum].locate.offset.constant, Pmode)), |
4644 | reg_parm_stack_space, |
4645 | ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false); |
4646 | |
4647 | /* Now mark the segment we just used. */ |
4648 | if (ACCUMULATE_OUTGOING_ARGS) |
4649 | mark_stack_region_used (lower_bound, upper_bound); |
4650 | |
4651 | NO_DEFER_POP; |
4652 | |
4653 | /* Indicate argument access so that alias.cc knows that these |
4654 | values are live. */ |
4655 | if (argblock) |
4656 | use = plus_constant (Pmode, argblock, |
4657 | argvec[argnum].locate.offset.constant); |
4658 | else if (have_push_fusage) |
4659 | continue; |
4660 | else |
4661 | { |
4662 | /* When arguments are pushed, trying to tell alias.cc where |
4663 | exactly this argument is won't work, because the |
4664 | auto-increment causes confusion. So we merely indicate |
4665 | that we access something with a known mode somewhere on |
4666 | the stack. */ |
4667 | use = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
4668 | gen_rtx_SCRATCH (Pmode)); |
4669 | have_push_fusage = true; |
4670 | } |
4671 | use = gen_rtx_MEM (argvec[argnum].mode, use); |
4672 | use = gen_rtx_USE (VOIDmode, use); |
4673 | call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage); |
4674 | } |
4675 | } |
4676 | |
4677 | argnum = nargs - 1; |
4678 | |
4679 | fun = prepare_call_address (NULL, funexp: fun, NULL, call_fusage: &call_fusage, reg_parm_seen: 0, flags: 0); |
4680 | |
4681 | targetm.calls.start_call_args (args_so_far); |
4682 | |
4683 | /* When expanding a normal call, args are stored in push order, |
4684 | which is the reverse of what we have here. */ |
4685 | bool any_regs = false; |
4686 | for (int i = nargs; i-- > 0; ) |
4687 | if (argvec[i].reg != NULL_RTX) |
4688 | { |
4689 | targetm.calls.call_args (args_so_far, argvec[i].reg, NULL_TREE); |
4690 | any_regs = true; |
4691 | } |
4692 | if (!any_regs) |
4693 | targetm.calls.call_args (args_so_far, pc_rtx, NULL_TREE); |
4694 | |
4695 | /* Now load any reg parms into their regs. */ |
4696 | |
4697 | /* ARGNUM indexes the ARGVEC array in the order in which the arguments |
4698 | are to be pushed. */ |
4699 | for (count = 0; count < nargs; count++, argnum--) |
4700 | { |
4701 | machine_mode mode = argvec[argnum].mode; |
4702 | rtx val = argvec[argnum].value; |
4703 | rtx reg = argvec[argnum].reg; |
4704 | int partial = argvec[argnum].partial; |
4705 | |
4706 | /* Handle calls that pass values in multiple non-contiguous |
4707 | locations. The PA64 has examples of this for library calls. */ |
4708 | if (reg != 0 && GET_CODE (reg) == PARALLEL) |
4709 | emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode)); |
4710 | else if (reg != 0 && partial == 0) |
4711 | { |
4712 | emit_move_insn (reg, val); |
4713 | #ifdef BLOCK_REG_PADDING |
4714 | poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode); |
4715 | |
4716 | /* Copied from load_register_parameters. */ |
4717 | |
4718 | /* Handle case where we have a value that needs shifting |
4719 | up to the msb. eg. a QImode value and we're padding |
4720 | upward on a BYTES_BIG_ENDIAN machine. */ |
4721 | if (known_lt (size, UNITS_PER_WORD) |
4722 | && (argvec[argnum].locate.where_pad |
4723 | == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) |
4724 | { |
4725 | rtx x; |
4726 | poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; |
4727 | |
4728 | /* Assigning REG here rather than a temp makes CALL_FUSAGE |
4729 | report the whole reg as used. Strictly speaking, the |
4730 | call only uses SIZE bytes at the msb end, but it doesn't |
4731 | seem worth generating rtl to say that. */ |
4732 | reg = gen_rtx_REG (word_mode, REGNO (reg)); |
4733 | x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1); |
4734 | if (x != reg) |
4735 | emit_move_insn (reg, x); |
4736 | } |
4737 | #endif |
4738 | } |
4739 | |
4740 | NO_DEFER_POP; |
4741 | } |
4742 | |
4743 | /* Any regs containing parms remain in use through the call. */ |
4744 | for (count = 0; count < nargs; count++) |
4745 | { |
4746 | rtx reg = argvec[count].reg; |
4747 | if (reg != 0 && GET_CODE (reg) == PARALLEL) |
4748 | use_group_regs (&call_fusage, reg); |
4749 | else if (reg != 0) |
4750 | { |
4751 | int partial = argvec[count].partial; |
4752 | if (partial) |
4753 | { |
4754 | int nregs; |
4755 | gcc_assert (partial % UNITS_PER_WORD == 0); |
4756 | nregs = partial / UNITS_PER_WORD; |
4757 | use_regs (&call_fusage, REGNO (reg), nregs); |
4758 | } |
4759 | else |
4760 | use_reg (fusage: &call_fusage, reg); |
4761 | } |
4762 | } |
4763 | |
4764 | /* Pass the function the address in which to return a structure value. */ |
4765 | if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value) |
4766 | { |
4767 | emit_move_insn (struct_value, |
4768 | force_reg (Pmode, |
4769 | force_operand (XEXP (mem_value, 0), |
4770 | NULL_RTX))); |
4771 | if (REG_P (struct_value)) |
4772 | use_reg (fusage: &call_fusage, reg: struct_value); |
4773 | } |
4774 | |
4775 | /* Don't allow popping to be deferred, since then |
4776 | cse'ing of library calls could delete a call and leave the pop. */ |
4777 | NO_DEFER_POP; |
4778 | valreg = (mem_value == 0 && outmode != VOIDmode |
4779 | ? hard_libcall_value (outmode, orgfun) : NULL_RTX); |
4780 | |
4781 | /* Stack must be properly aligned now. */ |
4782 | gcc_assert (multiple_p (stack_pointer_delta, |
4783 | PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)); |
4784 | |
4785 | before_call = get_last_insn (); |
4786 | |
4787 | if (flag_callgraph_info) |
4788 | record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION); |
4789 | |
4790 | /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which |
4791 | will set inhibit_defer_pop to that value. */ |
4792 | /* The return type is needed to decide how many bytes the function pops. |
4793 | Signedness plays no role in that, so for simplicity, we pretend it's |
4794 | always signed. We also assume that the list of arguments passed has |
4795 | no impact, so we pretend it is unknown. */ |
4796 | |
4797 | emit_call_1 (funexp: fun, NULL, |
4798 | get_identifier (XSTR (orgfun, 0)), |
4799 | funtype: build_function_type (tfom, NULL_TREE), |
4800 | stack_size: original_args_size.constant, rounded_stack_size: args_size.constant, |
4801 | struct_value_size, next_arg_reg: call_cookie, valreg, |
4802 | old_inhibit_defer_pop: old_inhibit_defer_pop + 1, call_fusage, ecf_flags: flags, args_so_far); |
4803 | |
4804 | rtx datum = orgfun; |
4805 | gcc_assert (GET_CODE (datum) == SYMBOL_REF); |
4806 | rtx_call_insn *last = last_call_insn (); |
4807 | add_reg_note (last, REG_CALL_DECL, datum); |
4808 | |
4809 | /* Right-shift returned value if necessary. */ |
4810 | if (!pcc_struct_value |
4811 | && TYPE_MODE (tfom) != BLKmode |
4812 | && targetm.calls.return_in_msb (tfom)) |
4813 | { |
4814 | shift_return_value (TYPE_MODE (tfom), left_p: false, value: valreg); |
4815 | valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg)); |
4816 | } |
4817 | |
4818 | targetm.calls.end_call_args (args_so_far); |
4819 | |
4820 | /* For calls to `setjmp', etc., inform function.cc:setjmp_warnings |
4821 | that it should complain if nonvolatile values are live. For |
4822 | functions that cannot return, inform flow that control does not |
4823 | fall through. */ |
4824 | if (flags & ECF_NORETURN) |
4825 | { |
4826 | /* The barrier note must be emitted |
4827 | immediately after the CALL_INSN. Some ports emit more than |
4828 | just a CALL_INSN above, so we must search for it here. */ |
4829 | rtx_insn *last = get_last_insn (); |
4830 | while (!CALL_P (last)) |
4831 | { |
4832 | last = PREV_INSN (insn: last); |
4833 | /* There was no CALL_INSN? */ |
4834 | gcc_assert (last != before_call); |
4835 | } |
4836 | |
4837 | emit_barrier_after (last); |
4838 | } |
4839 | |
4840 | /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW |
4841 | and LCT_RETURNS_TWICE, cannot perform non-local gotos. */ |
4842 | if (flags & ECF_NOTHROW) |
4843 | { |
4844 | rtx_insn *last = get_last_insn (); |
4845 | while (!CALL_P (last)) |
4846 | { |
4847 | last = PREV_INSN (insn: last); |
4848 | /* There was no CALL_INSN? */ |
4849 | gcc_assert (last != before_call); |
4850 | } |
4851 | |
4852 | make_reg_eh_region_note_nothrow_nononlocal (last); |
4853 | } |
4854 | |
4855 | /* Now restore inhibit_defer_pop to its actual original value. */ |
4856 | OK_DEFER_POP; |
4857 | |
4858 | pop_temp_slots (); |
4859 | |
4860 | /* Copy the value to the right place. */ |
4861 | if (outmode != VOIDmode && retval) |
4862 | { |
4863 | if (mem_value) |
4864 | { |
4865 | if (value == 0) |
4866 | value = mem_value; |
4867 | if (value != mem_value) |
4868 | emit_move_insn (value, mem_value); |
4869 | } |
4870 | else if (GET_CODE (valreg) == PARALLEL) |
4871 | { |
4872 | if (value == 0) |
4873 | value = gen_reg_rtx (outmode); |
4874 | emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (mode: outmode)); |
4875 | } |
4876 | else |
4877 | { |
4878 | /* Convert to the proper mode if a promotion has been active. */ |
4879 | if (GET_MODE (valreg) != outmode) |
4880 | { |
4881 | int unsignedp = TYPE_UNSIGNED (tfom); |
4882 | |
4883 | gcc_assert (promote_function_mode (tfom, outmode, &unsignedp, |
4884 | fndecl ? TREE_TYPE (fndecl) : fntype, 1) |
4885 | == GET_MODE (valreg)); |
4886 | valreg = convert_modes (mode: outmode, GET_MODE (valreg), x: valreg, unsignedp: 0); |
4887 | } |
4888 | |
4889 | if (value != 0) |
4890 | emit_move_insn (value, valreg); |
4891 | else |
4892 | value = valreg; |
4893 | } |
4894 | } |
4895 | |
4896 | if (ACCUMULATE_OUTGOING_ARGS) |
4897 | { |
4898 | #ifdef REG_PARM_STACK_SPACE |
4899 | if (save_area) |
4900 | restore_fixed_argument_area (save_area, argblock, |
4901 | high_to_save, low_to_save); |
4902 | #endif |
4903 | |
4904 | /* If we saved any argument areas, restore them. */ |
4905 | for (count = 0; count < nargs; count++) |
4906 | if (argvec[count].save_area) |
4907 | { |
4908 | machine_mode save_mode = GET_MODE (argvec[count].save_area); |
4909 | rtx adr = plus_constant (Pmode, argblock, |
4910 | argvec[count].locate.offset.constant); |
4911 | rtx stack_area = gen_rtx_MEM (save_mode, |
4912 | memory_address (save_mode, adr)); |
4913 | |
4914 | if (save_mode == BLKmode) |
4915 | emit_block_move (stack_area, |
4916 | validize_mem |
4917 | (copy_rtx (argvec[count].save_area)), |
4918 | (gen_int_mode |
4919 | (argvec[count].locate.size.constant, Pmode)), |
4920 | BLOCK_OP_CALL_PARM); |
4921 | else |
4922 | emit_move_insn (stack_area, argvec[count].save_area); |
4923 | } |
4924 | |
4925 | highest_outgoing_arg_in_use = initial_highest_arg_in_use; |
4926 | stack_usage_map = initial_stack_usage_map; |
4927 | stack_usage_watermark = initial_stack_usage_watermark; |
4928 | } |
4929 | |
4930 | free (ptr: stack_usage_map_buf); |
4931 | |
4932 | return value; |
4933 | |
4934 | } |
4935 | |
4936 | |
4937 | /* Store a single argument for a function call |
4938 | into the register or memory area where it must be passed. |
4939 | *ARG describes the argument value and where to pass it. |
4940 | |
4941 | ARGBLOCK is the address of the stack-block for all the arguments, |
4942 | or 0 on a machine where arguments are pushed individually. |
4943 | |
4944 | MAY_BE_ALLOCA nonzero says this could be a call to `alloca' |
4945 | so must be careful about how the stack is used. |
4946 | |
4947 | VARIABLE_SIZE nonzero says that this was a variable-sized outgoing |
4948 | argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate |
4949 | that we need not worry about saving and restoring the stack. |
4950 | |
4951 | FNDECL is the declaration of the function we are calling. |
4952 | |
4953 | Return true if this arg should cause sibcall failure, |
4954 | false otherwise. */ |
4955 | |
4956 | static bool |
4957 | store_one_arg (struct arg_data *arg, rtx argblock, int flags, |
4958 | int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) |
4959 | { |
4960 | tree pval = arg->tree_value; |
4961 | rtx reg = 0; |
4962 | int partial = 0; |
4963 | poly_int64 used = 0; |
4964 | poly_int64 lower_bound = 0, upper_bound = 0; |
4965 | bool sibcall_failure = false; |
4966 | |
4967 | if (TREE_CODE (pval) == ERROR_MARK) |
4968 | return true; |
4969 | |
4970 | /* Push a new temporary level for any temporaries we make for |
4971 | this argument. */ |
4972 | push_temp_slots (); |
4973 | |
4974 | if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)) |
4975 | { |
4976 | /* If this is being stored into a pre-allocated, fixed-size, stack area, |
4977 | save any previous data at that location. */ |
4978 | if (argblock && ! variable_size && arg->stack) |
4979 | { |
4980 | if (ARGS_GROW_DOWNWARD) |
4981 | { |
4982 | /* stack_slot is negative, but we want to index stack_usage_map |
4983 | with positive values. */ |
4984 | if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) |
4985 | { |
4986 | rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1); |
4987 | upper_bound = -rtx_to_poly_int64 (x: offset) + 1; |
4988 | } |
4989 | else |
4990 | upper_bound = 0; |
4991 | |
4992 | lower_bound = upper_bound - arg->locate.size.constant; |
4993 | } |
4994 | else |
4995 | { |
4996 | if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) |
4997 | { |
4998 | rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1); |
4999 | lower_bound = rtx_to_poly_int64 (x: offset); |
5000 | } |
5001 | else |
5002 | lower_bound = 0; |
5003 | |
5004 | upper_bound = lower_bound + arg->locate.size.constant; |
5005 | } |
5006 | |
5007 | if (stack_region_maybe_used_p (lower_bound, upper_bound, |
5008 | reg_parm_stack_space)) |
5009 | { |
5010 | /* We need to make a save area. */ |
5011 | poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT; |
5012 | machine_mode save_mode |
5013 | = int_mode_for_size (size, limit: 1).else_blk (); |
5014 | rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0)); |
5015 | rtx stack_area = gen_rtx_MEM (save_mode, adr); |
5016 | |
5017 | if (save_mode == BLKmode) |
5018 | { |
5019 | arg->save_area |
5020 | = assign_temp (TREE_TYPE (arg->tree_value), 1, 1); |
5021 | preserve_temp_slots (arg->save_area); |
5022 | emit_block_move (validize_mem (copy_rtx (arg->save_area)), |
5023 | stack_area, |
5024 | (gen_int_mode |
5025 | (arg->locate.size.constant, Pmode)), |
5026 | BLOCK_OP_CALL_PARM); |
5027 | } |
5028 | else |
5029 | { |
5030 | arg->save_area = gen_reg_rtx (save_mode); |
5031 | emit_move_insn (arg->save_area, stack_area); |
5032 | } |
5033 | } |
5034 | } |
5035 | } |
5036 | |
5037 | /* If this isn't going to be placed on both the stack and in registers, |
5038 | set up the register and number of words. */ |
5039 | if (! arg->pass_on_stack) |
5040 | { |
5041 | if (flags & ECF_SIBCALL) |
5042 | reg = arg->tail_call_reg; |
5043 | else |
5044 | reg = arg->reg; |
5045 | partial = arg->partial; |
5046 | } |
5047 | |
5048 | /* Being passed entirely in a register. We shouldn't be called in |
5049 | this case. */ |
5050 | gcc_assert (reg == 0 || partial != 0); |
5051 | |
5052 | /* If this arg needs special alignment, don't load the registers |
5053 | here. */ |
5054 | if (arg->n_aligned_regs != 0) |
5055 | reg = 0; |
5056 | |
5057 | /* If this is being passed partially in a register, we can't evaluate |
5058 | it directly into its stack slot. Otherwise, we can. */ |
5059 | if (arg->value == 0) |
5060 | { |
5061 | /* stack_arg_under_construction is nonzero if a function argument is |
5062 | being evaluated directly into the outgoing argument list and |
5063 | expand_call must take special action to preserve the argument list |
5064 | if it is called recursively. |
5065 | |
5066 | For scalar function arguments stack_usage_map is sufficient to |
5067 | determine which stack slots must be saved and restored. Scalar |
5068 | arguments in general have pass_on_stack == false. |
5069 | |
5070 | If this argument is initialized by a function which takes the |
5071 | address of the argument (a C++ constructor or a C function |
5072 | returning a BLKmode structure), then stack_usage_map is |
5073 | insufficient and expand_call must push the stack around the |
5074 | function call. Such arguments have pass_on_stack == true. |
5075 | |
5076 | Note that it is always safe to set stack_arg_under_construction, |
5077 | but this generates suboptimal code if set when not needed. */ |
5078 | |
5079 | if (arg->pass_on_stack) |
5080 | stack_arg_under_construction++; |
5081 | |
5082 | arg->value = expand_expr (exp: pval, |
5083 | target: (partial |
5084 | || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) |
5085 | ? NULL_RTX : arg->stack, |
5086 | VOIDmode, modifier: EXPAND_STACK_PARM); |
5087 | |
5088 | /* If we are promoting object (or for any other reason) the mode |
5089 | doesn't agree, convert the mode. */ |
5090 | |
5091 | if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) |
5092 | arg->value = convert_modes (mode: arg->mode, TYPE_MODE (TREE_TYPE (pval)), |
5093 | x: arg->value, unsignedp: arg->unsignedp); |
5094 | |
5095 | if (arg->pass_on_stack) |
5096 | stack_arg_under_construction--; |
5097 | } |
5098 | |
5099 | /* Check for overlap with already clobbered argument area. */ |
5100 | if ((flags & ECF_SIBCALL) |
5101 | && MEM_P (arg->value) |
5102 | && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0), |
5103 | size: arg->locate.size.constant)) |
5104 | sibcall_failure = true; |
5105 | |
5106 | /* Don't allow anything left on stack from computation |
5107 | of argument to alloca. */ |
5108 | if (flags & ECF_MAY_BE_ALLOCA) |
5109 | do_pending_stack_adjust (); |
5110 | |
5111 | if (arg->value == arg->stack) |
5112 | /* If the value is already in the stack slot, we are done. */ |
5113 | ; |
5114 | else if (arg->mode != BLKmode) |
5115 | { |
5116 | unsigned int parm_align; |
5117 | |
5118 | /* Argument is a scalar, not entirely passed in registers. |
5119 | (If part is passed in registers, arg->partial says how much |
5120 | and emit_push_insn will take care of putting it there.) |
5121 | |
5122 | Push it, and if its size is less than the |
5123 | amount of space allocated to it, |
5124 | also bump stack pointer by the additional space. |
5125 | Note that in C the default argument promotions |
5126 | will prevent such mismatches. */ |
5127 | |
5128 | poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval)) |
5129 | ? 0 : GET_MODE_SIZE (mode: arg->mode)); |
5130 | |
5131 | /* Compute how much space the push instruction will push. |
5132 | On many machines, pushing a byte will advance the stack |
5133 | pointer by a halfword. */ |
5134 | #ifdef PUSH_ROUNDING |
5135 | size = PUSH_ROUNDING (size); |
5136 | #endif |
5137 | used = size; |
5138 | |
5139 | /* Compute how much space the argument should get: |
5140 | round up to a multiple of the alignment for arguments. */ |
5141 | if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval)) |
5142 | != PAD_NONE) |
5143 | /* At the moment we don't (need to) support ABIs for which the |
5144 | padding isn't known at compile time. In principle it should |
5145 | be easy to add though. */ |
5146 | used = force_align_up (value: size, PARM_BOUNDARY / BITS_PER_UNIT); |
5147 | |
5148 | /* Compute the alignment of the pushed argument. */ |
5149 | parm_align = arg->locate.boundary; |
5150 | if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval)) |
5151 | == PAD_DOWNWARD) |
5152 | { |
5153 | poly_int64 pad = used - size; |
5154 | unsigned int pad_align = known_alignment (a: pad) * BITS_PER_UNIT; |
5155 | if (pad_align != 0) |
5156 | parm_align = MIN (parm_align, pad_align); |
5157 | } |
5158 | |
5159 | /* This isn't already where we want it on the stack, so put it there. |
5160 | This can either be done with push or copy insns. */ |
5161 | if (maybe_ne (a: used, b: 0) |
5162 | && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), |
5163 | NULL_RTX, parm_align, partial, reg, used - size, |
5164 | argblock, ARGS_SIZE_RTX (arg->locate.offset), |
5165 | reg_parm_stack_space, |
5166 | ARGS_SIZE_RTX (arg->locate.alignment_pad), true)) |
5167 | sibcall_failure = true; |
5168 | |
5169 | /* Unless this is a partially-in-register argument, the argument is now |
5170 | in the stack. */ |
5171 | if (partial == 0) |
5172 | arg->value = arg->stack; |
5173 | } |
5174 | else |
5175 | { |
5176 | /* BLKmode, at least partly to be pushed. */ |
5177 | |
5178 | unsigned int parm_align; |
5179 | poly_int64 excess; |
5180 | rtx size_rtx; |
5181 | |
5182 | /* Pushing a nonscalar. |
5183 | If part is passed in registers, PARTIAL says how much |
5184 | and emit_push_insn will take care of putting it there. */ |
5185 | |
5186 | /* Round its size up to a multiple |
5187 | of the allocation unit for arguments. */ |
5188 | |
5189 | if (arg->locate.size.var != 0) |
5190 | { |
5191 | excess = 0; |
5192 | size_rtx = ARGS_SIZE_RTX (arg->locate.size); |
5193 | } |
5194 | else |
5195 | { |
5196 | /* PUSH_ROUNDING has no effect on us, because emit_push_insn |
5197 | for BLKmode is careful to avoid it. */ |
5198 | excess = (arg->locate.size.constant |
5199 | - arg_int_size_in_bytes (TREE_TYPE (pval)) |
5200 | + partial); |
5201 | size_rtx = expand_expr (exp: arg_size_in_bytes (TREE_TYPE (pval)), |
5202 | NULL_RTX, TYPE_MODE (sizetype), |
5203 | modifier: EXPAND_NORMAL); |
5204 | } |
5205 | |
5206 | parm_align = arg->locate.boundary; |
5207 | |
5208 | /* When an argument is padded down, the block is aligned to |
5209 | PARM_BOUNDARY, but the actual argument isn't. */ |
5210 | if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval)) |
5211 | == PAD_DOWNWARD) |
5212 | { |
5213 | if (arg->locate.size.var) |
5214 | parm_align = BITS_PER_UNIT; |
5215 | else |
5216 | { |
5217 | unsigned int excess_align |
5218 | = known_alignment (a: excess) * BITS_PER_UNIT; |
5219 | if (excess_align != 0) |
5220 | parm_align = MIN (parm_align, excess_align); |
5221 | } |
5222 | } |
5223 | |
5224 | if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) |
5225 | { |
5226 | /* emit_push_insn might not work properly if arg->value and |
5227 | argblock + arg->locate.offset areas overlap. */ |
5228 | rtx x = arg->value; |
5229 | poly_int64 i = 0; |
5230 | |
5231 | if (strip_offset (XEXP (x, 0), &i) |
5232 | == crtl->args.internal_arg_pointer) |
5233 | { |
5234 | /* arg.locate doesn't contain the pretend_args_size offset, |
5235 | it's part of argblock. Ensure we don't count it in I. */ |
5236 | if (STACK_GROWS_DOWNWARD) |
5237 | i -= crtl->args.pretend_args_size; |
5238 | else |
5239 | i += crtl->args.pretend_args_size; |
5240 | |
5241 | /* expand_call should ensure this. */ |
5242 | gcc_assert (!arg->locate.offset.var |
5243 | && arg->locate.size.var == 0); |
5244 | poly_int64 size_val = rtx_to_poly_int64 (x: size_rtx); |
5245 | |
5246 | if (known_eq (arg->locate.offset.constant, i)) |
5247 | { |
5248 | /* Even though they appear to be at the same location, |
5249 | if part of the outgoing argument is in registers, |
5250 | they aren't really at the same location. Check for |
5251 | this by making sure that the incoming size is the |
5252 | same as the outgoing size. */ |
5253 | if (partial != 0) |
5254 | sibcall_failure = true; |
5255 | } |
5256 | else if (maybe_in_range_p (val: arg->locate.offset.constant, |
5257 | pos: i, size: size_val)) |
5258 | sibcall_failure = true; |
5259 | /* Use arg->locate.size.constant instead of size_rtx |
5260 | because we only care about the part of the argument |
5261 | on the stack. */ |
5262 | else if (maybe_in_range_p (val: i, pos: arg->locate.offset.constant, |
5263 | size: arg->locate.size.constant)) |
5264 | sibcall_failure = true; |
5265 | } |
5266 | } |
5267 | |
5268 | if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0) |
5269 | emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, |
5270 | parm_align, partial, reg, excess, argblock, |
5271 | ARGS_SIZE_RTX (arg->locate.offset), |
5272 | reg_parm_stack_space, |
5273 | ARGS_SIZE_RTX (arg->locate.alignment_pad), false); |
5274 | /* If we bypass emit_push_insn because it is a zero sized argument, |
5275 | we still might need to adjust stack if such argument requires |
5276 | extra alignment. See PR104558. */ |
5277 | else if ((arg->locate.alignment_pad.var |
5278 | || maybe_ne (a: arg->locate.alignment_pad.constant, b: 0)) |
5279 | && !argblock) |
5280 | anti_adjust_stack (ARGS_SIZE_RTX (arg->locate.alignment_pad)); |
5281 | |
5282 | /* Unless this is a partially-in-register argument, the argument is now |
5283 | in the stack. |
5284 | |
5285 | ??? Unlike the case above, in which we want the actual |
5286 | address of the data, so that we can load it directly into a |
5287 | register, here we want the address of the stack slot, so that |
5288 | it's properly aligned for word-by-word copying or something |
5289 | like that. It's not clear that this is always correct. */ |
5290 | if (partial == 0) |
5291 | arg->value = arg->stack_slot; |
5292 | } |
5293 | |
5294 | if (arg->reg && GET_CODE (arg->reg) == PARALLEL) |
5295 | { |
5296 | tree type = TREE_TYPE (arg->tree_value); |
5297 | arg->parallel_value |
5298 | = emit_group_load_into_temps (arg->reg, arg->value, type, |
5299 | int_size_in_bytes (type)); |
5300 | } |
5301 | |
5302 | /* Mark all slots this store used. */ |
5303 | if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL) |
5304 | && argblock && ! variable_size && arg->stack) |
5305 | mark_stack_region_used (lower_bound, upper_bound); |
5306 | |
5307 | /* Once we have pushed something, pops can't safely |
5308 | be deferred during the rest of the arguments. */ |
5309 | NO_DEFER_POP; |
5310 | |
5311 | /* Free any temporary slots made in processing this argument. */ |
5312 | pop_temp_slots (); |
5313 | |
5314 | return sibcall_failure; |
5315 | } |
5316 | |
5317 | /* Nonzero if we do not know how to pass ARG solely in registers. */ |
5318 | |
5319 | bool |
5320 | must_pass_in_stack_var_size (const function_arg_info &arg) |
5321 | { |
5322 | if (!arg.type) |
5323 | return false; |
5324 | |
5325 | /* If the type has variable size... */ |
5326 | if (!poly_int_tree_p (TYPE_SIZE (arg.type))) |
5327 | return true; |
5328 | |
5329 | /* If the type is marked as addressable (it is required |
5330 | to be constructed into the stack)... */ |
5331 | if (TREE_ADDRESSABLE (arg.type)) |
5332 | return true; |
5333 | |
5334 | return false; |
5335 | } |
5336 | |
5337 | /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one |
5338 | takes trailing padding of a structure into account. */ |
5339 | /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */ |
5340 | |
5341 | bool |
5342 | must_pass_in_stack_var_size_or_pad (const function_arg_info &arg) |
5343 | { |
5344 | if (!arg.type) |
5345 | return false; |
5346 | |
5347 | /* If the type has variable size... */ |
5348 | if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST) |
5349 | return true; |
5350 | |
5351 | /* If the type is marked as addressable (it is required |
5352 | to be constructed into the stack)... */ |
5353 | if (TREE_ADDRESSABLE (arg.type)) |
5354 | return true; |
5355 | |
5356 | if (TYPE_EMPTY_P (arg.type)) |
5357 | return false; |
5358 | |
5359 | /* If the padding and mode of the type is such that a copy into |
5360 | a register would put it into the wrong part of the register. */ |
5361 | if (arg.mode == BLKmode |
5362 | && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT) |
5363 | && (targetm.calls.function_arg_padding (arg.mode, arg.type) |
5364 | == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) |
5365 | return true; |
5366 | |
5367 | return false; |
5368 | } |
5369 | |
5370 | /* Return true if TYPE must be passed on the stack when passed to |
5371 | the "..." arguments of a function. */ |
5372 | |
5373 | bool |
5374 | must_pass_va_arg_in_stack (tree type) |
5375 | { |
5376 | function_arg_info arg (type, /*named=*/false); |
5377 | return targetm.calls.must_pass_in_stack (arg); |
5378 | } |
5379 | |
5380 | /* Return true if FIELD is the C++17 empty base field that should |
5381 | be ignored for ABI calling convention decisions in order to |
5382 | maintain ABI compatibility between C++14 and earlier, which doesn't |
5383 | add this FIELD to classes with empty bases, and C++17 and later |
5384 | which does. */ |
5385 | |
5386 | bool |
5387 | cxx17_empty_base_field_p (const_tree field) |
5388 | { |
5389 | return (DECL_FIELD_ABI_IGNORED (field) |
5390 | && DECL_ARTIFICIAL (field) |
5391 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field)) |
5392 | && !lookup_attribute (attr_name: "no_unique_address", DECL_ATTRIBUTES (field))); |
5393 | } |
5394 |
Definitions
- arg_data
- stack_usage_map
- highest_outgoing_arg_in_use
- stack_usage_watermark
- stored_args_map
- stored_args_watermark
- stack_arg_under_construction
- stack_region_maybe_used_p
- mark_stack_region_used
- prepare_call_address
- emit_call_1
- special_function_p
- decl_fnspec
- decl_return_flags
- setjmp_call_p
- gimple_maybe_alloca_call_p
- gimple_alloca_call_p
- alloca_call_p
- is_tm_builtin
- flags_from_decl_or_type
- call_expr_flags
- pass_by_reference
- pass_va_arg_by_reference
- apply_pass_by_reference_rules
- reference_callee_copied
- precompute_register_parameters
- save_fixed_argument_area
- restore_fixed_argument_area
- store_unaligned_arguments_into_pseudos
- maybe_complain_about_tail_call
- initialize_argument_information
- compute_argument_block_size
- precompute_arguments
- finalize_must_preallocate
- compute_argument_addresses
- rtx_for_function_call
- rtx_for_static_chain
- internal_arg_pointer_exp_state
- internal_arg_pointer_based_exp_scan
- internal_arg_pointer_based_exp
- mem_might_overlap_already_clobbered_arg_p
- load_register_parameters
- combine_pending_stack_adjustment_and_call
- check_sibcall_argument_overlap_1
- check_sibcall_argument_overlap
- shift_return_value
- avoid_likely_spilled_reg
- can_implement_as_sibling_call_p
- update_stack_alignment_for_call
- expand_call
- fixup_tail_calls
- split_complex_types
- emit_library_call_value_1
- store_one_arg
- must_pass_in_stack_var_size
- must_pass_in_stack_var_size_or_pad
- must_pass_va_arg_in_stack
Learn to use CMake with our Intro Training
Find out more