1/* Expand builtin functions.
2 Copyright (C) 1988-2025 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "backend.h"
28#include "target.h"
29#include "rtl.h"
30#include "tree.h"
31#include "memmodel.h"
32#include "gimple.h"
33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
36#include "tree-vrp.h"
37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
40#include "emit-rtl.h"
41#include "recog.h"
42#include "diagnostic-core.h"
43#include "alias.h"
44#include "fold-const.h"
45#include "fold-const-call.h"
46#include "gimple-ssa-warn-access.h"
47#include "stor-layout.h"
48#include "calls.h"
49#include "varasm.h"
50#include "tree-object-size.h"
51#include "tree-ssa-strlen.h"
52#include "realmpfr.h"
53#include "cfgrtl.h"
54#include "except.h"
55#include "dojump.h"
56#include "explow.h"
57#include "stmt.h"
58#include "expr.h"
59#include "libfuncs.h"
60#include "output.h"
61#include "typeclass.h"
62#include "langhooks.h"
63#include "value-prof.h"
64#include "builtins.h"
65#include "stringpool.h"
66#include "attribs.h"
67#include "asan.h"
68#include "internal-fn.h"
69#include "case-cfn-macros.h"
70#include "gimple-iterator.h"
71#include "gimple-fold.h"
72#include "intl.h"
73#include "file-prefix-map.h" /* remap_macro_filename() */
74#include "ipa-strub.h" /* strub_watermark_parm() */
75#include "gomp-constants.h"
76#include "omp-general.h"
77#include "tree-dfa.h"
78#include "gimple-ssa.h"
79#include "tree-ssa-live.h"
80#include "tree-outof-ssa.h"
81#include "attr-fnspec.h"
82#include "demangle.h"
83#include "gimple-range.h"
84#include "pointer-query.h"
85
86struct target_builtins default_target_builtins;
87#if SWITCHABLE_TARGET
88struct target_builtins *this_target_builtins = &default_target_builtins;
89#endif
90
91/* Define the names of the builtin function types and codes. */
92const char *const built_in_class_names[BUILT_IN_LAST]
93 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94
95#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
96const char * built_in_names[(int) END_BUILTINS] =
97{
98#include "builtins.def"
99};
100
101/* Setup an array of builtin_info_type, make sure each element decl is
102 initialized to NULL_TREE. */
103builtin_info_type builtin_info[(int)END_BUILTINS];
104
105/* Non-zero if __builtin_constant_p should be folded right away. */
106bool force_folding_builtin_constant_p;
107
108static int target_char_cast (tree, char *);
109static int apply_args_size (void);
110static int apply_result_size (void);
111static rtx result_vector (int, rtx);
112static void expand_builtin_prefetch (tree);
113static rtx expand_builtin_apply_args (void);
114static rtx expand_builtin_apply_args_1 (void);
115static rtx expand_builtin_apply (rtx, rtx, rtx);
116static void expand_builtin_return (rtx);
117static rtx expand_builtin_classify_type (tree);
118static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120static rtx expand_builtin_interclass_mathfn (tree, rtx);
121static rtx expand_builtin_sincos (tree);
122static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 optab);
125static rtx expand_builtin_cexpi (tree, rtx);
126static rtx expand_builtin_issignaling (tree, rtx);
127static rtx expand_builtin_int_roundingfn (tree, rtx);
128static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
129static rtx expand_builtin_next_arg (void);
130static rtx expand_builtin_va_start (tree);
131static rtx expand_builtin_va_end (tree);
132static rtx expand_builtin_va_copy (tree);
133static rtx inline_expand_builtin_bytecmp (tree, rtx);
134static rtx expand_builtin_strcmp (tree, rtx);
135static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
136static rtx expand_builtin_memcpy (tree, rtx);
137static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
138 rtx target, tree exp,
139 memop_ret retmode,
140 bool might_overlap);
141static rtx expand_builtin_memmove (tree, rtx);
142static rtx expand_builtin_mempcpy (tree, rtx);
143static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
144static rtx expand_builtin_strcpy (tree, rtx);
145static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
146static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
147static rtx expand_builtin_strncpy (tree, rtx);
148static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149static rtx expand_builtin_bzero (tree);
150static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152static rtx expand_builtin_alloca (tree);
153static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154static rtx expand_builtin_frame_address (tree, tree);
155static rtx expand_builtin_stack_address ();
156static tree stabilize_va_list_loc (location_t, tree, int);
157static rtx expand_builtin_expect (tree, rtx);
158static rtx expand_builtin_expect_with_probability (tree, rtx);
159static tree fold_builtin_constant_p (tree);
160static tree fold_builtin_classify_type (tree);
161static tree fold_builtin_strlen (location_t, tree, tree, tree);
162static tree fold_builtin_inf (location_t, tree, int);
163static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
164static bool validate_arg (const_tree, enum tree_code code);
165static rtx expand_builtin_fabs (tree, rtx, rtx);
166static rtx expand_builtin_signbit (tree, rtx);
167static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168static tree fold_builtin_isascii (location_t, tree);
169static tree fold_builtin_toascii (location_t, tree);
170static tree fold_builtin_isdigit (location_t, tree);
171static tree fold_builtin_fabs (location_t, tree, tree);
172static tree fold_builtin_abs (location_t, tree, tree);
173static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
174 enum tree_code);
175static tree fold_builtin_iseqsig (location_t, tree, tree);
176static tree fold_builtin_varargs (location_t, tree, tree*, int);
177
178static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
179static tree fold_builtin_strspn (location_t, tree, tree, tree, tree);
180static tree fold_builtin_strcspn (location_t, tree, tree, tree, tree);
181
182static rtx expand_builtin_object_size (tree);
183static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
184 enum built_in_function);
185static void maybe_emit_chk_warning (tree, enum built_in_function);
186static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
187static tree fold_builtin_object_size (tree, tree, enum built_in_function);
188
189unsigned HOST_WIDE_INT target_newline;
190unsigned HOST_WIDE_INT target_percent;
191static unsigned HOST_WIDE_INT target_c;
192static unsigned HOST_WIDE_INT target_s;
193char target_percent_c[3];
194char target_percent_s[3];
195char target_percent_s_newline[4];
196static tree do_mpfr_remquo (tree, tree, tree);
197static tree do_mpfr_lgamma_r (tree, tree, tree);
198static void expand_builtin_sync_synchronize (void);
199
200/* Return true if NAME starts with __builtin_ or __sync_. */
201
202static bool
203is_builtin_name (const char *name)
204{
205 return (startswith (str: name, prefix: "__builtin_")
206 || startswith (str: name, prefix: "__sync_")
207 || startswith (str: name, prefix: "__atomic_"));
208}
209
210/* Return true if NODE should be considered for inline expansion regardless
211 of the optimization level. This means whenever a function is invoked with
212 its "internal" name, which normally contains the prefix "__builtin". */
213
214bool
215called_as_built_in (tree node)
216{
217 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
218 we want the name used to call the function, not the name it
219 will have. */
220 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
221 return is_builtin_name (name);
222}
223
224/* Compute values M and N such that M divides (address of EXP - N) and such
225 that N < M. If these numbers can be determined, store M in alignp and N in
226 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
227 *alignp and any bit-offset to *bitposp.
228
229 Note that the address (and thus the alignment) computed here is based
230 on the address to which a symbol resolves, whereas DECL_ALIGN is based
231 on the address at which an object is actually located. These two
232 addresses are not always the same. For example, on ARM targets,
233 the address &foo of a Thumb function foo() has the lowest bit set,
234 whereas foo() itself starts on an even address.
235
236 If ADDR_P is true we are taking the address of the memory reference EXP
237 and thus cannot rely on the access taking place. */
238
239bool
240get_object_alignment_2 (tree exp, unsigned int *alignp,
241 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
242{
243 poly_int64 bitsize, bitpos;
244 tree offset;
245 machine_mode mode;
246 int unsignedp, reversep, volatilep;
247 unsigned int align = BITS_PER_UNIT;
248 bool known_alignment = false;
249
250 /* Get the innermost object and the constant (bitpos) and possibly
251 variable (offset) offset of the access. */
252 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
253 &unsignedp, &reversep, &volatilep);
254
255 /* Extract alignment information from the innermost object and
256 possibly adjust bitpos and offset. */
257 if (TREE_CODE (exp) == FUNCTION_DECL)
258 {
259 /* Function addresses can encode extra information besides their
260 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
261 allows the low bit to be used as a virtual bit, we know
262 that the address itself must be at least 2-byte aligned. */
263 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
264 align = 2 * BITS_PER_UNIT;
265 }
266 else if (TREE_CODE (exp) == LABEL_DECL)
267 ;
268 else if (TREE_CODE (exp) == CONST_DECL)
269 {
270 /* The alignment of a CONST_DECL is determined by its initializer. */
271 exp = DECL_INITIAL (exp);
272 align = TYPE_ALIGN (TREE_TYPE (exp));
273 if (CONSTANT_CLASS_P (exp))
274 align = targetm.constant_alignment (exp, align);
275
276 known_alignment = true;
277 }
278 else if (DECL_P (exp))
279 {
280 align = DECL_ALIGN (exp);
281 known_alignment = true;
282 }
283 else if (TREE_CODE (exp) == INDIRECT_REF
284 || TREE_CODE (exp) == MEM_REF
285 || TREE_CODE (exp) == TARGET_MEM_REF)
286 {
287 tree addr = TREE_OPERAND (exp, 0);
288 unsigned ptr_align;
289 unsigned HOST_WIDE_INT ptr_bitpos;
290 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
291
292 /* If the address is explicitely aligned, handle that. */
293 if (TREE_CODE (addr) == BIT_AND_EXPR
294 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
295 {
296 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
297 ptr_bitmask *= BITS_PER_UNIT;
298 align = least_bit_hwi (x: ptr_bitmask);
299 addr = TREE_OPERAND (addr, 0);
300 }
301
302 known_alignment
303 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
304 align = MAX (ptr_align, align);
305
306 /* Re-apply explicit alignment to the bitpos. */
307 ptr_bitpos &= ptr_bitmask;
308
309 /* The alignment of the pointer operand in a TARGET_MEM_REF
310 has to take the variable offset parts into account. */
311 if (TREE_CODE (exp) == TARGET_MEM_REF)
312 {
313 if (TMR_INDEX (exp))
314 {
315 unsigned HOST_WIDE_INT step = 1;
316 if (TMR_STEP (exp))
317 step = TREE_INT_CST_LOW (TMR_STEP (exp));
318 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
319 }
320 if (TMR_INDEX2 (exp))
321 align = BITS_PER_UNIT;
322 known_alignment = false;
323 }
324
325 /* When EXP is an actual memory reference then we can use
326 TYPE_ALIGN of a pointer indirection to derive alignment.
327 Do so only if get_pointer_alignment_1 did not reveal absolute
328 alignment knowledge and if using that alignment would
329 improve the situation. */
330 unsigned int talign;
331 if (!addr_p && !known_alignment
332 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
333 && talign > align)
334 align = talign;
335 else
336 {
337 /* Else adjust bitpos accordingly. */
338 bitpos += ptr_bitpos;
339 if (TREE_CODE (exp) == MEM_REF
340 || TREE_CODE (exp) == TARGET_MEM_REF)
341 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
342 }
343 }
344 else if (TREE_CODE (exp) == STRING_CST)
345 {
346 /* STRING_CST are the only constant objects we allow to be not
347 wrapped inside a CONST_DECL. */
348 align = TYPE_ALIGN (TREE_TYPE (exp));
349 if (CONSTANT_CLASS_P (exp))
350 align = targetm.constant_alignment (exp, align);
351
352 known_alignment = true;
353 }
354
355 /* If there is a non-constant offset part extract the maximum
356 alignment that can prevail. */
357 if (offset)
358 {
359 unsigned int trailing_zeros = tree_ctz (offset);
360 if (trailing_zeros < HOST_BITS_PER_INT)
361 {
362 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
363 if (inner)
364 align = MIN (align, inner);
365 }
366 }
367
368 /* Account for the alignment of runtime coefficients, so that the constant
369 bitpos is guaranteed to be accurate. */
370 unsigned int alt_align = ::known_alignment (a: bitpos - bitpos.coeffs[0]);
371 if (alt_align != 0 && alt_align < align)
372 {
373 align = alt_align;
374 known_alignment = false;
375 }
376
377 *alignp = align;
378 *bitposp = bitpos.coeffs[0] & (align - 1);
379 return known_alignment;
380}
381
382/* For a memory reference expression EXP compute values M and N such that M
383 divides (&EXP - N) and such that N < M. If these numbers can be determined,
384 store M in alignp and N in *BITPOSP and return true. Otherwise return false
385 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386
387bool
388get_object_alignment_1 (tree exp, unsigned int *alignp,
389 unsigned HOST_WIDE_INT *bitposp)
390{
391 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
392 with it. */
393 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
394 exp = TREE_OPERAND (exp, 0);
395 return get_object_alignment_2 (exp, alignp, bitposp, addr_p: false);
396}
397
398/* Return the alignment in bits of EXP, an object. */
399
400unsigned int
401get_object_alignment (tree exp)
402{
403 unsigned HOST_WIDE_INT bitpos = 0;
404 unsigned int align;
405
406 get_object_alignment_1 (exp, alignp: &align, bitposp: &bitpos);
407
408 /* align and bitpos now specify known low bits of the pointer.
409 ptr & (align - 1) == bitpos. */
410
411 if (bitpos != 0)
412 align = least_bit_hwi (x: bitpos);
413 return align;
414}
415
416/* For a pointer valued expression EXP compute values M and N such that M
417 divides (EXP - N) and such that N < M. If these numbers can be determined,
418 store M in alignp and N in *BITPOSP and return true. Return false if
419 the results are just a conservative approximation.
420
421 If EXP is not a pointer, false is returned too. */
422
423bool
424get_pointer_alignment_1 (tree exp, unsigned int *alignp,
425 unsigned HOST_WIDE_INT *bitposp)
426{
427 STRIP_NOPS (exp);
428
429 if (TREE_CODE (exp) == ADDR_EXPR)
430 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
431 alignp, bitposp, addr_p: true);
432 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
433 {
434 unsigned int align;
435 unsigned HOST_WIDE_INT bitpos;
436 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
437 alignp: &align, bitposp: &bitpos);
438 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
439 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
440 else
441 {
442 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
443 if (trailing_zeros < HOST_BITS_PER_INT)
444 {
445 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
446 if (inner)
447 align = MIN (align, inner);
448 }
449 }
450 *alignp = align;
451 *bitposp = bitpos & (align - 1);
452 return res;
453 }
454 else if (TREE_CODE (exp) == SSA_NAME
455 && POINTER_TYPE_P (TREE_TYPE (exp)))
456 {
457 unsigned int ptr_align, ptr_misalign;
458 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
459
460 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
461 {
462 *bitposp = ptr_misalign * BITS_PER_UNIT;
463 *alignp = ptr_align * BITS_PER_UNIT;
464 /* Make sure to return a sensible alignment when the multiplication
465 by BITS_PER_UNIT overflowed. */
466 if (*alignp == 0)
467 *alignp = 1u << (HOST_BITS_PER_INT - 1);
468 /* We cannot really tell whether this result is an approximation. */
469 return false;
470 }
471 else
472 {
473 *bitposp = 0;
474 *alignp = BITS_PER_UNIT;
475 return false;
476 }
477 }
478 else if (TREE_CODE (exp) == INTEGER_CST)
479 {
480 *alignp = BIGGEST_ALIGNMENT;
481 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
482 & (BIGGEST_ALIGNMENT - 1));
483 return true;
484 }
485
486 *bitposp = 0;
487 *alignp = BITS_PER_UNIT;
488 return false;
489}
490
491/* Return the alignment in bits of EXP, a pointer valued expression.
492 The alignment returned is, by default, the alignment of the thing that
493 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
494
495 Otherwise, look at the expression to see if we can do better, i.e., if the
496 expression is actually pointing at an object whose alignment is tighter. */
497
498unsigned int
499get_pointer_alignment (tree exp)
500{
501 unsigned HOST_WIDE_INT bitpos = 0;
502 unsigned int align;
503
504 get_pointer_alignment_1 (exp, alignp: &align, bitposp: &bitpos);
505
506 /* align and bitpos now specify known low bits of the pointer.
507 ptr & (align - 1) == bitpos. */
508
509 if (bitpos != 0)
510 align = least_bit_hwi (x: bitpos);
511
512 return align;
513}
514
515/* Return the number of leading non-zero elements in the sequence
516 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
517 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518
519unsigned
520string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
521{
522 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
523
524 unsigned n;
525
526 if (eltsize == 1)
527 {
528 /* Optimize the common case of plain char. */
529 for (n = 0; n < maxelts; n++)
530 {
531 const char *elt = (const char*) ptr + n;
532 if (!*elt)
533 break;
534 }
535 }
536 else
537 {
538 for (n = 0; n < maxelts; n++)
539 {
540 const char *elt = (const char*) ptr + n * eltsize;
541 if (!memcmp (s1: elt, s2: "\0\0\0\0", n: eltsize))
542 break;
543 }
544 }
545 return n;
546}
547
548/* Compute the length of a null-terminated character string or wide
549 character string handling character sizes of 1, 2, and 4 bytes.
550 TREE_STRING_LENGTH is not the right way because it evaluates to
551 the size of the character array in bytes (as opposed to characters)
552 and because it can contain a zero byte in the middle.
553
554 ONLY_VALUE should be nonzero if the result is not going to be emitted
555 into the instruction stream and zero if it is going to be expanded.
556 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
557 is returned, otherwise NULL, since
558 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
559 evaluate the side-effects.
560
561 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
562 accesses. Note that this implies the result is not going to be emitted
563 into the instruction stream.
564
565 Additional information about the string accessed may be recorded
566 in DATA. For example, if ARG references an unterminated string,
567 then the declaration will be stored in the DECL field. If the
568 length of the unterminated string can be determined, it'll be
569 stored in the LEN field. Note this length could well be different
570 than what a C strlen call would return.
571
572 ELTSIZE is 1 for normal single byte character strings, and 2 or
573 4 for wide characer strings. ELTSIZE is by default 1.
574
575 The value returned is of type `ssizetype'. */
576
577tree
578c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
579{
580 /* If we were not passed a DATA pointer, then get one to a local
581 structure. That avoids having to check DATA for NULL before
582 each time we want to use it. */
583 c_strlen_data local_strlen_data = { };
584 if (!data)
585 data = &local_strlen_data;
586
587 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
588
589 tree src = STRIP_NOPS (arg);
590 if (TREE_CODE (src) == COND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 {
593 tree len1, len2;
594
595 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
596 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
597 if (tree_int_cst_equal (len1, len2))
598 return len1;
599 }
600
601 if (TREE_CODE (src) == COMPOUND_EXPR
602 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
603 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
604
605 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
606
607 /* Offset from the beginning of the string in bytes. */
608 tree byteoff;
609 tree memsize;
610 tree decl;
611 src = string_constant (src, &byteoff, &memsize, &decl);
612 if (src == 0)
613 return NULL_TREE;
614
615 /* Determine the size of the string element. */
616 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
617 return NULL_TREE;
618
619 /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
620 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
621 in case the latter is less than the size of the array, such as when
622 SRC refers to a short string literal used to initialize a large array.
623 In that case, the elements of the array after the terminating NUL are
624 all NUL. */
625 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
626 strelts = strelts / eltsize;
627
628 if (!tree_fits_uhwi_p (memsize))
629 return NULL_TREE;
630
631 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
632
633 /* PTR can point to the byte representation of any string type, including
634 char* and wchar_t*. */
635 const char *ptr = TREE_STRING_POINTER (src);
636
637 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
638 {
639 /* The code below works only for single byte character types. */
640 if (eltsize != 1)
641 return NULL_TREE;
642
643 /* If the string has an internal NUL character followed by any
644 non-NUL characters (e.g., "foo\0bar"), we can't compute
645 the offset to the following NUL if we don't know where to
646 start searching for it. */
647 unsigned len = string_length (ptr, eltsize, maxelts: strelts);
648
649 /* Return when an embedded null character is found or none at all.
650 In the latter case, set the DECL/LEN field in the DATA structure
651 so that callers may examine them. */
652 if (len + 1 < strelts)
653 return NULL_TREE;
654 else if (len >= maxelts)
655 {
656 data->decl = decl;
657 data->off = byteoff;
658 data->minlen = ssize_int (len);
659 return NULL_TREE;
660 }
661
662 /* For empty strings the result should be zero. */
663 if (len == 0)
664 return ssize_int (0);
665
666 /* We don't know the starting offset, but we do know that the string
667 has no internal zero bytes. If the offset falls within the bounds
668 of the string subtract the offset from the length of the string,
669 and return that. Otherwise the length is zero. Take care to
670 use SAVE_EXPR in case the OFFSET has side-effects. */
671 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
672 : byteoff;
673 offsave = fold_convert_loc (loc, sizetype, offsave);
674 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
675 size_int (len));
676 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
677 offsave);
678 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
679 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
680 build_zero_cst (ssizetype));
681 }
682
683 /* Offset from the beginning of the string in elements. */
684 HOST_WIDE_INT eltoff;
685
686 /* We have a known offset into the string. Start searching there for
687 a null character if we can represent it as a single HOST_WIDE_INT. */
688 if (byteoff == 0)
689 eltoff = 0;
690 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
691 eltoff = -1;
692 else
693 eltoff = tree_to_uhwi (byteoff) / eltsize;
694
695 /* If the offset is known to be out of bounds, warn, and call strlen at
696 runtime. */
697 if (eltoff < 0 || eltoff >= maxelts)
698 {
699 /* Suppress multiple warnings for propagated constant strings. */
700 if (only_value != 2
701 && !warning_suppressed_p (arg, OPT_Warray_bounds_)
702 && warning_at (loc, OPT_Warray_bounds_,
703 "offset %qwi outside bounds of constant string",
704 eltoff))
705 {
706 if (decl)
707 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
708 suppress_warning (arg, OPT_Warray_bounds_);
709 }
710 return NULL_TREE;
711 }
712
713 /* If eltoff is larger than strelts but less than maxelts the
714 string length is zero, since the excess memory will be zero. */
715 if (eltoff > strelts)
716 return ssize_int (0);
717
718 /* Use strlen to search for the first zero byte. Since any strings
719 constructed with build_string will have nulls appended, we win even
720 if we get handed something like (char[4])"abcd".
721
722 Since ELTOFF is our starting index into the string, no further
723 calculation is needed. */
724 unsigned len = string_length (ptr: ptr + eltoff * eltsize, eltsize,
725 maxelts: strelts - eltoff);
726
727 /* Don't know what to return if there was no zero termination.
728 Ideally this would turn into a gcc_checking_assert over time.
729 Set DECL/LEN so callers can examine them. */
730 if (len >= maxelts - eltoff)
731 {
732 data->decl = decl;
733 data->off = byteoff;
734 data->minlen = ssize_int (len);
735 return NULL_TREE;
736 }
737
738 return ssize_int (len);
739}
740
741/* Return a constant integer corresponding to target reading
742 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
743 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
744 are assumed to be zero, otherwise it reads as many characters
745 as needed. */
746
747rtx
748c_readstr (const char *str, fixed_size_mode mode,
749 bool null_terminated_p/*=true*/)
750{
751 auto_vec<target_unit, MAX_BITSIZE_MODE_ANY_INT / BITS_PER_UNIT> bytes;
752
753 bytes.reserve (nelems: GET_MODE_SIZE (mode));
754
755 target_unit ch = 1;
756 for (unsigned int i = 0; i < GET_MODE_SIZE (mode); ++i)
757 {
758 if (ch || !null_terminated_p)
759 ch = (unsigned char) str[i];
760 bytes.quick_push (obj: ch);
761 }
762
763 return native_decode_rtx (mode, bytes, 0);
764}
765
766/* Cast a target constant CST to target CHAR and if that value fits into
767 host char type, return zero and put that value into variable pointed to by
768 P. */
769
770static int
771target_char_cast (tree cst, char *p)
772{
773 unsigned HOST_WIDE_INT val, hostval;
774
775 if (TREE_CODE (cst) != INTEGER_CST
776 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
777 return 1;
778
779 /* Do not care if it fits or not right here. */
780 val = TREE_INT_CST_LOW (cst);
781
782 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
783 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
784
785 hostval = val;
786 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
787 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
788
789 if (val != hostval)
790 return 1;
791
792 *p = hostval;
793 return 0;
794}
795
796/* Similar to save_expr, but assumes that arbitrary code is not executed
797 in between the multiple evaluations. In particular, we assume that a
798 non-addressable local variable will not be modified. */
799
800static tree
801builtin_save_expr (tree exp)
802{
803 if (TREE_CODE (exp) == SSA_NAME
804 || (TREE_ADDRESSABLE (exp) == 0
805 && (TREE_CODE (exp) == PARM_DECL
806 || (VAR_P (exp) && !TREE_STATIC (exp)))))
807 return exp;
808
809 return save_expr (exp);
810}
811
812/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
813 times to get the address of either a higher stack frame, or a return
814 address located within it (depending on FNDECL_CODE). */
815
816static rtx
817expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
818{
819 int i;
820 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
821 if (tem == NULL_RTX)
822 {
823 /* For a zero count with __builtin_return_address, we don't care what
824 frame address we return, because target-specific definitions will
825 override us. Therefore frame pointer elimination is OK, and using
826 the soft frame pointer is OK.
827
828 For a nonzero count, or a zero count with __builtin_frame_address,
829 we require a stable offset from the current frame pointer to the
830 previous one, so we must use the hard frame pointer, and
831 we must disable frame pointer elimination. */
832 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
833 tem = frame_pointer_rtx;
834 else
835 {
836 tem = hard_frame_pointer_rtx;
837
838 /* Tell reload not to eliminate the frame pointer. */
839 crtl->accesses_prior_frames = 1;
840 }
841 }
842
843 if (count > 0)
844 SETUP_FRAME_ADDRESSES ();
845
846 /* On the SPARC, the return address is not in the frame, it is in a
847 register. There is no way to access it off of the current frame
848 pointer, but it can be accessed off the previous frame pointer by
849 reading the value from the register window save area. */
850 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
851 count--;
852
853 /* Scan back COUNT frames to the specified frame. */
854 for (i = 0; i < count; i++)
855 {
856 /* Assume the dynamic chain pointer is in the word that the
857 frame address points to, unless otherwise specified. */
858 tem = DYNAMIC_CHAIN_ADDRESS (tem);
859 tem = memory_address (Pmode, tem);
860 tem = gen_frame_mem (Pmode, tem);
861 tem = copy_to_reg (tem);
862 }
863
864 /* For __builtin_frame_address, return what we've got. But, on
865 the SPARC for example, we may have to add a bias. */
866 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
867 return FRAME_ADDR_RTX (tem);
868
869 /* For __builtin_return_address, get the return address from that frame. */
870#ifdef RETURN_ADDR_RTX
871 tem = RETURN_ADDR_RTX (count, tem);
872#else
873 tem = memory_address (Pmode,
874 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
875 tem = gen_frame_mem (Pmode, tem);
876#endif
877 return tem;
878}
879
880/* Alias set used for setjmp buffer. */
881static alias_set_type setjmp_alias_set = -1;
882
883/* Construct the leading half of a __builtin_setjmp call. Control will
884 return to RECEIVER_LABEL. This is also called directly by the SJLJ
885 exception handling code. */
886
887void
888expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
889{
890 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
891 rtx stack_save;
892 rtx mem;
893
894 if (setjmp_alias_set == -1)
895 setjmp_alias_set = new_alias_set ();
896
897 buf_addr = convert_memory_address (Pmode, buf_addr);
898
899 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
900
901 /* We store the frame pointer and the address of receiver_label in
902 the buffer and use the rest of it for the stack save area, which
903 is machine-dependent. */
904
905 mem = gen_rtx_MEM (Pmode, buf_addr);
906 set_mem_alias_set (mem, setjmp_alias_set);
907 emit_move_insn (mem, hard_frame_pointer_rtx);
908
909 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
910 GET_MODE_SIZE (Pmode))),
911 set_mem_alias_set (mem, setjmp_alias_set);
912
913 emit_move_insn (validize_mem (mem),
914 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
915
916 stack_save = gen_rtx_MEM (sa_mode,
917 plus_constant (Pmode, buf_addr,
918 2 * GET_MODE_SIZE (Pmode)));
919 set_mem_alias_set (stack_save, setjmp_alias_set);
920 emit_stack_save (SAVE_NONLOCAL, &stack_save);
921
922 /* If there is further processing to do, do it. */
923 if (targetm.have_builtin_setjmp_setup ())
924 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
925
926 /* We have a nonlocal label. */
927 cfun->has_nonlocal_label = 1;
928}
929
930/* Construct the trailing part of a __builtin_setjmp call. This is
931 also called directly by the SJLJ exception handling code.
932 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
933
934void
935expand_builtin_setjmp_receiver (rtx receiver_label)
936{
937 rtx chain;
938
939 /* Mark the FP as used when we get here, so we have to make sure it's
940 marked as used by this function. */
941 emit_use (hard_frame_pointer_rtx);
942
943 /* Mark the static chain as clobbered here so life information
944 doesn't get messed up for it. */
945 chain = rtx_for_static_chain (current_function_decl, true);
946 if (chain && REG_P (chain))
947 emit_clobber (chain);
948
949 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
950 {
951 /* If the argument pointer can be eliminated in favor of the
952 frame pointer, we don't need to restore it. We assume here
953 that if such an elimination is present, it can always be used.
954 This is the case on all known machines; if we don't make this
955 assumption, we do unnecessary saving on many machines. */
956 size_t i;
957 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
958
959 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
960 if (elim_regs[i].from == ARG_POINTER_REGNUM
961 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
962 break;
963
964 if (i == ARRAY_SIZE (elim_regs))
965 {
966 /* Now restore our arg pointer from the address at which it
967 was saved in our stack frame. */
968 emit_move_insn (crtl->args.internal_arg_pointer,
969 copy_to_reg (get_arg_pointer_save_area ()));
970 }
971 }
972
973 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
974 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
975 else if (targetm.have_nonlocal_goto_receiver ())
976 emit_insn (targetm.gen_nonlocal_goto_receiver ());
977 else
978 { /* Nothing */ }
979
980 /* We must not allow the code we just generated to be reordered by
981 scheduling. Specifically, the update of the frame pointer must
982 happen immediately, not later. */
983 emit_insn (gen_blockage ());
984}
985
986/* __builtin_longjmp is passed a pointer to an array of five words (not
987 all will be used on all machines). It operates similarly to the C
988 library function of the same name, but is more efficient. Much of
989 the code below is copied from the handling of non-local gotos. */
990
991static void
992expand_builtin_longjmp (rtx buf_addr, rtx value)
993{
994 rtx fp, lab, stack;
995 rtx_insn *insn, *last;
996 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
997
998 /* DRAP is needed for stack realign if longjmp is expanded to current
999 function */
1000 if (SUPPORTS_STACK_ALIGNMENT)
1001 crtl->need_drap = true;
1002
1003 if (setjmp_alias_set == -1)
1004 setjmp_alias_set = new_alias_set ();
1005
1006 buf_addr = convert_memory_address (Pmode, buf_addr);
1007
1008 buf_addr = force_reg (Pmode, buf_addr);
1009
1010 /* We require that the user must pass a second argument of 1, because
1011 that is what builtin_setjmp will return. */
1012 gcc_assert (value == const1_rtx);
1013
1014 last = get_last_insn ();
1015 if (targetm.have_builtin_longjmp ())
1016 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1017 else
1018 {
1019 fp = gen_rtx_MEM (Pmode, buf_addr);
1020 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1021 GET_MODE_SIZE (Pmode)));
1022
1023 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1024 2 * GET_MODE_SIZE (Pmode)));
1025 set_mem_alias_set (fp, setjmp_alias_set);
1026 set_mem_alias_set (lab, setjmp_alias_set);
1027 set_mem_alias_set (stack, setjmp_alias_set);
1028
1029 /* Pick up FP, label, and SP from the block and jump. This code is
1030 from expand_goto in stmt.cc; see there for detailed comments. */
1031 if (targetm.have_nonlocal_goto ())
1032 /* We have to pass a value to the nonlocal_goto pattern that will
1033 get copied into the static_chain pointer, but it does not matter
1034 what that value is, because builtin_setjmp does not use it. */
1035 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1036 else
1037 {
1038 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1039 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1040
1041 lab = copy_to_reg (lab);
1042
1043 /* Restore the frame pointer and stack pointer. We must use a
1044 temporary since the setjmp buffer may be a local. */
1045 fp = copy_to_reg (fp);
1046 emit_stack_restore (SAVE_NONLOCAL, stack);
1047
1048 /* Ensure the frame pointer move is not optimized. */
1049 emit_insn (gen_blockage ());
1050 emit_clobber (hard_frame_pointer_rtx);
1051 emit_clobber (frame_pointer_rtx);
1052 emit_move_insn (hard_frame_pointer_rtx, fp);
1053
1054 emit_use (hard_frame_pointer_rtx);
1055 emit_use (stack_pointer_rtx);
1056 emit_indirect_jump (lab);
1057 }
1058 }
1059
1060 /* Search backwards and mark the jump insn as a non-local goto.
1061 Note that this precludes the use of __builtin_longjmp to a
1062 __builtin_setjmp target in the same function. However, we've
1063 already cautioned the user that these functions are for
1064 internal exception handling use only. */
1065 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1066 {
1067 gcc_assert (insn != last);
1068
1069 if (JUMP_P (insn))
1070 {
1071 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1072 break;
1073 }
1074 else if (CALL_P (insn))
1075 break;
1076 }
1077}
1078
1079static inline bool
1080more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1081{
1082 return (iter->i < iter->n);
1083}
1084
1085/* This function validates the types of a function call argument list
1086 against a specified list of tree_codes. If the last specifier is a 0,
1087 that represents an ellipsis, otherwise the last specifier must be a
1088 VOID_TYPE. */
1089
1090static bool
1091validate_arglist (const_tree callexpr, ...)
1092{
1093 enum tree_code code;
1094 bool res = 0;
1095 va_list ap;
1096 const_call_expr_arg_iterator iter;
1097 const_tree arg;
1098
1099 va_start (ap, callexpr);
1100 init_const_call_expr_arg_iterator (exp: callexpr, iter: &iter);
1101
1102 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1103 tree fn = CALL_EXPR_FN (callexpr);
1104 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1105
1106 for (unsigned argno = 1; ; ++argno)
1107 {
1108 code = (enum tree_code) va_arg (ap, int);
1109
1110 switch (code)
1111 {
1112 case 0:
1113 /* This signifies an ellipses, any further arguments are all ok. */
1114 res = true;
1115 goto end;
1116 case VOID_TYPE:
1117 /* This signifies an endlink, if no arguments remain, return
1118 true, otherwise return false. */
1119 res = !more_const_call_expr_args_p (iter: &iter);
1120 goto end;
1121 case POINTER_TYPE:
1122 /* The actual argument must be nonnull when either the whole
1123 called function has been declared nonnull, or when the formal
1124 argument corresponding to the actual argument has been. */
1125 if (argmap
1126 && (bitmap_empty_p (map: argmap) || bitmap_bit_p (argmap, argno)))
1127 {
1128 arg = next_const_call_expr_arg (iter: &iter);
1129 if (!validate_arg (arg, code) || integer_zerop (arg))
1130 goto end;
1131 break;
1132 }
1133 /* FALLTHRU */
1134 default:
1135 /* If no parameters remain or the parameter's code does not
1136 match the specified code, return false. Otherwise continue
1137 checking any remaining arguments. */
1138 arg = next_const_call_expr_arg (iter: &iter);
1139 if (!validate_arg (arg, code))
1140 goto end;
1141 break;
1142 }
1143 }
1144
1145 /* We need gotos here since we can only have one VA_CLOSE in a
1146 function. */
1147 end: ;
1148 va_end (ap);
1149
1150 BITMAP_FREE (argmap);
1151
1152 if (res)
1153 for (tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (TREE_TYPE (fn)));
1154 (attrs = lookup_attribute (attr_name: "nonnull_if_nonzero", list: attrs));
1155 attrs = TREE_CHAIN (attrs))
1156 {
1157 tree args = TREE_VALUE (attrs);
1158 unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1159 unsigned int idx2
1160 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1161 if (idx < (unsigned) call_expr_nargs (callexpr)
1162 && idx2 < (unsigned) call_expr_nargs (callexpr)
1163 && POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx)))
1164 && integer_zerop (CALL_EXPR_ARG (callexpr, idx))
1165 && INTEGRAL_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx2)))
1166 && integer_nonzerop (CALL_EXPR_ARG (callexpr, idx2)))
1167 return false;
1168 }
1169
1170 return res;
1171}
1172
1173/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1174 and the address of the save area. */
1175
1176static rtx
1177expand_builtin_nonlocal_goto (tree exp)
1178{
1179 tree t_label, t_save_area;
1180 rtx r_label, r_save_area, r_fp, r_sp;
1181 rtx_insn *insn;
1182
1183 if (!validate_arglist (callexpr: exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1184 return NULL_RTX;
1185
1186 t_label = CALL_EXPR_ARG (exp, 0);
1187 t_save_area = CALL_EXPR_ARG (exp, 1);
1188
1189 r_label = expand_normal (exp: t_label);
1190 r_label = convert_memory_address (Pmode, r_label);
1191 r_save_area = expand_normal (exp: t_save_area);
1192 r_save_area = convert_memory_address (Pmode, r_save_area);
1193 /* Copy the address of the save location to a register just in case it was
1194 based on the frame pointer. */
1195 r_save_area = copy_to_reg (r_save_area);
1196 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1197 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1198 plus_constant (Pmode, r_save_area,
1199 GET_MODE_SIZE (Pmode)));
1200
1201 crtl->has_nonlocal_goto = 1;
1202
1203 /* ??? We no longer need to pass the static chain value, afaik. */
1204 if (targetm.have_nonlocal_goto ())
1205 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1206 else
1207 {
1208 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1209 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1210
1211 r_label = copy_to_reg (r_label);
1212
1213 /* Restore the frame pointer and stack pointer. We must use a
1214 temporary since the setjmp buffer may be a local. */
1215 r_fp = copy_to_reg (r_fp);
1216 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1217
1218 /* Ensure the frame pointer move is not optimized. */
1219 emit_insn (gen_blockage ());
1220 emit_clobber (hard_frame_pointer_rtx);
1221 emit_clobber (frame_pointer_rtx);
1222 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1223
1224 /* USE of hard_frame_pointer_rtx added for consistency;
1225 not clear if really needed. */
1226 emit_use (hard_frame_pointer_rtx);
1227 emit_use (stack_pointer_rtx);
1228
1229 /* If the architecture is using a GP register, we must
1230 conservatively assume that the target function makes use of it.
1231 The prologue of functions with nonlocal gotos must therefore
1232 initialize the GP register to the appropriate value, and we
1233 must then make sure that this value is live at the point
1234 of the jump. (Note that this doesn't necessarily apply
1235 to targets with a nonlocal_goto pattern; they are free
1236 to implement it in their own way. Note also that this is
1237 a no-op if the GP register is a global invariant.) */
1238 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1239 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1240 emit_use (pic_offset_table_rtx);
1241
1242 emit_indirect_jump (r_label);
1243 }
1244
1245 /* Search backwards to the jump insn and mark it as a
1246 non-local goto. */
1247 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1248 {
1249 if (JUMP_P (insn))
1250 {
1251 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1252 break;
1253 }
1254 else if (CALL_P (insn))
1255 break;
1256 }
1257
1258 return const0_rtx;
1259}
1260
1261/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1262 (not all will be used on all machines) that was passed to __builtin_setjmp.
1263 It updates the stack pointer in that block to the current value. This is
1264 also called directly by the SJLJ exception handling code. */
1265
1266void
1267expand_builtin_update_setjmp_buf (rtx buf_addr)
1268{
1269 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1270 buf_addr = convert_memory_address (Pmode, buf_addr);
1271 rtx stack_save
1272 = gen_rtx_MEM (sa_mode,
1273 memory_address
1274 (sa_mode,
1275 plus_constant (Pmode, buf_addr,
1276 2 * GET_MODE_SIZE (Pmode))));
1277
1278 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1279}
1280
1281/* Expand a call to __builtin_prefetch. For a target that does not support
1282 data prefetch, evaluate the memory address argument in case it has side
1283 effects. */
1284
1285static void
1286expand_builtin_prefetch (tree exp)
1287{
1288 tree arg0, arg1, arg2;
1289 int nargs;
1290 rtx op0, op1, op2;
1291
1292 if (!validate_arglist (callexpr: exp, POINTER_TYPE, 0))
1293 return;
1294
1295 arg0 = CALL_EXPR_ARG (exp, 0);
1296
1297 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1298 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1299 locality). */
1300 nargs = call_expr_nargs (exp);
1301 arg1 = nargs > 1 ? CALL_EXPR_ARG (exp, 1) : NULL_TREE;
1302 arg2 = nargs > 2 ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
1303
1304 /* Argument 0 is an address. */
1305 op0 = expand_expr (exp: arg0, NULL_RTX, Pmode, modifier: EXPAND_NORMAL);
1306
1307 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1308 if (arg1 == NULL_TREE)
1309 op1 = const0_rtx;
1310 else if (TREE_CODE (arg1) != INTEGER_CST)
1311 {
1312 error ("second argument to %<__builtin_prefetch%> must be a constant");
1313 op1 = const0_rtx;
1314 }
1315 else
1316 op1 = expand_normal (exp: arg1);
1317 /* Argument 1 must be 0, 1 or 2. */
1318 if (!IN_RANGE (INTVAL (op1), 0, 2))
1319 {
1320 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1321 " using zero");
1322 op1 = const0_rtx;
1323 }
1324
1325 /* Argument 2 (locality) must be a compile-time constant int. */
1326 if (arg2 == NULL_TREE)
1327 op2 = GEN_INT (3);
1328 else if (TREE_CODE (arg2) != INTEGER_CST)
1329 {
1330 error ("third argument to %<__builtin_prefetch%> must be a constant");
1331 op2 = const0_rtx;
1332 }
1333 else
1334 op2 = expand_normal (exp: arg2);
1335 /* Argument 2 must be 0, 1, 2, or 3. */
1336 if (!IN_RANGE (INTVAL (op2), 0, 3))
1337 {
1338 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1339 op2 = const0_rtx;
1340 }
1341
1342 if (targetm.have_prefetch ())
1343 {
1344 class expand_operand ops[3];
1345
1346 create_address_operand (op: &ops[0], value: op0);
1347 create_integer_operand (&ops[1], INTVAL (op1));
1348 create_integer_operand (&ops[2], INTVAL (op2));
1349 if (maybe_expand_insn (icode: targetm.code_for_prefetch, nops: 3, ops))
1350 return;
1351 }
1352
1353 /* Don't do anything with direct references to volatile memory, but
1354 generate code to handle other side effects. */
1355 if (!MEM_P (op0) && side_effects_p (op0))
1356 emit_insn (op0);
1357}
1358
1359/* Get a MEM rtx for expression EXP which is the address of an operand
1360 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1361 the maximum length of the block of memory that might be accessed or
1362 NULL if unknown. */
1363
1364rtx
1365get_memory_rtx (tree exp, tree len)
1366{
1367 tree orig_exp = exp, base;
1368 rtx addr, mem;
1369
1370 gcc_checking_assert
1371 (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))));
1372
1373 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1374 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1375 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1376 exp = TREE_OPERAND (exp, 0);
1377
1378 addr = expand_expr (exp: orig_exp, NULL_RTX, mode: ptr_mode, modifier: EXPAND_NORMAL);
1379 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1380
1381 /* Get an expression we can use to find the attributes to assign to MEM.
1382 First remove any nops. */
1383 while (CONVERT_EXPR_P (exp)
1384 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1385 exp = TREE_OPERAND (exp, 0);
1386
1387 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1388 (as builtin stringops may alias with anything). */
1389 exp = fold_build2 (MEM_REF,
1390 build_array_type (char_type_node,
1391 build_range_type (sizetype,
1392 size_one_node, len)),
1393 exp, build_int_cst (ptr_type_node, 0));
1394
1395 /* If the MEM_REF has no acceptable address, try to get the base object
1396 from the original address we got, and build an all-aliasing
1397 unknown-sized access to that one. */
1398 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1399 set_mem_attributes (mem, exp, 0);
1400 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1401 && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1402 0))))
1403 {
1404 unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1405 exp = build_fold_addr_expr (base);
1406 exp = fold_build2 (MEM_REF,
1407 build_array_type (char_type_node,
1408 build_range_type (sizetype,
1409 size_zero_node,
1410 NULL)),
1411 exp, build_int_cst (ptr_type_node, 0));
1412 set_mem_attributes (mem, exp, 0);
1413 /* Since we stripped parts make sure the offset is unknown and the
1414 alignment is computed from the original address. */
1415 clear_mem_offset (mem);
1416 set_mem_align (mem, align);
1417 }
1418 set_mem_alias_set (mem, 0);
1419 return mem;
1420}
1421
1422/* Built-in functions to perform an untyped call and return. */
1423
1424/* Wrapper that implicitly applies a delta when getting or setting the
1425 enclosed value. */
1426template <typename T>
1427class delta_type
1428{
1429 T &value; T const delta;
1430public:
1431 delta_type (T &val, T dlt) : value (val), delta (dlt) {}
1432 operator T () const { return value + delta; }
1433 T operator = (T val) const { value = val - delta; return val; }
1434};
1435
1436#define saved_apply_args_size \
1437 (delta_type<int> (this_target_builtins->x_apply_args_size_plus_one, -1))
1438#define apply_args_mode \
1439 (this_target_builtins->x_apply_args_mode)
1440#define saved_apply_result_size \
1441 (delta_type<int> (this_target_builtins->x_apply_result_size_plus_one, -1))
1442#define apply_result_mode \
1443 (this_target_builtins->x_apply_result_mode)
1444
1445/* Return the size required for the block returned by __builtin_apply_args,
1446 and initialize apply_args_mode. */
1447
1448static int
1449apply_args_size (void)
1450{
1451 int size = saved_apply_args_size;
1452 int align;
1453 unsigned int regno;
1454
1455 /* The values computed by this function never change. */
1456 if (size < 0)
1457 {
1458 /* The first value is the incoming arg-pointer. */
1459 size = GET_MODE_SIZE (Pmode);
1460
1461 /* The second value is the structure value address unless this is
1462 passed as an "invisible" first argument. */
1463 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1464 size += GET_MODE_SIZE (Pmode);
1465
1466 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1467 if (FUNCTION_ARG_REGNO_P (regno))
1468 {
1469 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1470
1471 if (mode != VOIDmode)
1472 {
1473 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1474 if (size % align != 0)
1475 size = CEIL (size, align) * align;
1476 size += GET_MODE_SIZE (mode);
1477 apply_args_mode[regno] = mode;
1478 }
1479 else
1480 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1481 }
1482 else
1483 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1484
1485 saved_apply_args_size = size;
1486 }
1487 return size;
1488}
1489
1490/* Return the size required for the block returned by __builtin_apply,
1491 and initialize apply_result_mode. */
1492
1493static int
1494apply_result_size (void)
1495{
1496 int size = saved_apply_result_size;
1497 int align, regno;
1498
1499 /* The values computed by this function never change. */
1500 if (size < 0)
1501 {
1502 size = 0;
1503
1504 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1505 if (targetm.calls.function_value_regno_p (regno))
1506 {
1507 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1508
1509 if (mode != VOIDmode)
1510 {
1511 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1512 if (size % align != 0)
1513 size = CEIL (size, align) * align;
1514 size += GET_MODE_SIZE (mode);
1515 apply_result_mode[regno] = mode;
1516 }
1517 else
1518 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1519 }
1520 else
1521 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1522
1523 /* Allow targets that use untyped_call and untyped_return to override
1524 the size so that machine-specific information can be stored here. */
1525#ifdef APPLY_RESULT_SIZE
1526 size = APPLY_RESULT_SIZE;
1527#endif
1528
1529 saved_apply_result_size = size;
1530 }
1531 return size;
1532}
1533
1534/* Create a vector describing the result block RESULT. If SAVEP is true,
1535 the result block is used to save the values; otherwise it is used to
1536 restore the values. */
1537
1538static rtx
1539result_vector (int savep, rtx result)
1540{
1541 int regno, size, align, nelts;
1542 fixed_size_mode mode;
1543 rtx reg, mem;
1544 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1545
1546 size = nelts = 0;
1547 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1548 if ((mode = apply_result_mode[regno]) != VOIDmode)
1549 {
1550 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1551 if (size % align != 0)
1552 size = CEIL (size, align) * align;
1553 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1554 mem = adjust_address (result, mode, size);
1555 savevec[nelts++] = (savep
1556 ? gen_rtx_SET (mem, reg)
1557 : gen_rtx_SET (reg, mem));
1558 size += GET_MODE_SIZE (mode);
1559 }
1560 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1561}
1562
1563/* Save the state required to perform an untyped call with the same
1564 arguments as were passed to the current function. */
1565
1566static rtx
1567expand_builtin_apply_args_1 (void)
1568{
1569 rtx registers, tem;
1570 int size, align, regno;
1571 fixed_size_mode mode;
1572 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1573
1574 /* Create a block where the arg-pointer, structure value address,
1575 and argument registers can be saved. */
1576 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1577
1578 /* Walk past the arg-pointer and structure value address. */
1579 size = GET_MODE_SIZE (Pmode);
1580 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1581 size += GET_MODE_SIZE (Pmode);
1582
1583 /* Save each register used in calling a function to the block. */
1584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1585 if ((mode = apply_args_mode[regno]) != VOIDmode)
1586 {
1587 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1588 if (size % align != 0)
1589 size = CEIL (size, align) * align;
1590
1591 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1592
1593 emit_move_insn (adjust_address (registers, mode, size), tem);
1594 size += GET_MODE_SIZE (mode);
1595 }
1596
1597 /* Save the arg pointer to the block. */
1598 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1599 /* We need the pointer as the caller actually passed them to us, not
1600 as we might have pretended they were passed. Make sure it's a valid
1601 operand, as emit_move_insn isn't expected to handle a PLUS. */
1602 if (STACK_GROWS_DOWNWARD)
1603 tem
1604 = force_operand (plus_constant (Pmode, tem,
1605 crtl->args.pretend_args_size),
1606 NULL_RTX);
1607 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1608
1609 size = GET_MODE_SIZE (Pmode);
1610
1611 /* Save the structure value address unless this is passed as an
1612 "invisible" first argument. */
1613 if (struct_incoming_value)
1614 emit_move_insn (adjust_address (registers, Pmode, size),
1615 copy_to_reg (struct_incoming_value));
1616
1617 /* Return the address of the block. */
1618 return copy_addr_to_reg (XEXP (registers, 0));
1619}
1620
1621/* __builtin_apply_args returns block of memory allocated on
1622 the stack into which is stored the arg pointer, structure
1623 value address, static chain, and all the registers that might
1624 possibly be used in performing a function call. The code is
1625 moved to the start of the function so the incoming values are
1626 saved. */
1627
1628static rtx
1629expand_builtin_apply_args (void)
1630{
1631 /* Don't do __builtin_apply_args more than once in a function.
1632 Save the result of the first call and reuse it. */
1633 if (apply_args_value != 0)
1634 return apply_args_value;
1635 {
1636 /* When this function is called, it means that registers must be
1637 saved on entry to this function. So we migrate the
1638 call to the first insn of this function. */
1639 rtx temp;
1640
1641 start_sequence ();
1642 temp = expand_builtin_apply_args_1 ();
1643 rtx_insn *seq = end_sequence ();
1644
1645 apply_args_value = temp;
1646
1647 /* Put the insns after the NOTE that starts the function.
1648 If this is inside a start_sequence, make the outer-level insn
1649 chain current, so the code is placed at the start of the
1650 function. If internal_arg_pointer is a non-virtual pseudo,
1651 it needs to be placed after the function that initializes
1652 that pseudo. */
1653 push_topmost_sequence ();
1654 if (REG_P (crtl->args.internal_arg_pointer)
1655 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1656 emit_insn_before (seq, parm_birth_insn);
1657 else
1658 emit_insn_before (seq, NEXT_INSN (insn: entry_of_function ()));
1659 pop_topmost_sequence ();
1660 return temp;
1661 }
1662}
1663
1664/* Perform an untyped call and save the state required to perform an
1665 untyped return of whatever value was returned by the given function. */
1666
1667static rtx
1668expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1669{
1670 int size, align, regno;
1671 fixed_size_mode mode;
1672 rtx incoming_args, result, reg, dest, src;
1673 rtx_call_insn *call_insn;
1674 rtx old_stack_level = 0;
1675 rtx call_fusage = 0;
1676 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1677
1678 arguments = convert_memory_address (Pmode, arguments);
1679
1680 /* Create a block where the return registers can be saved. */
1681 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1682
1683 /* Fetch the arg pointer from the ARGUMENTS block. */
1684 incoming_args = gen_reg_rtx (Pmode);
1685 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1686 if (!STACK_GROWS_DOWNWARD)
1687 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1688 incoming_args, 0, OPTAB_LIB_WIDEN);
1689
1690 /* Push a new argument block and copy the arguments. Do not allow
1691 the (potential) memcpy call below to interfere with our stack
1692 manipulations. */
1693 do_pending_stack_adjust ();
1694 NO_DEFER_POP;
1695
1696 /* Save the stack with nonlocal if available. */
1697 if (targetm.have_save_stack_nonlocal ())
1698 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1699 else
1700 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1701
1702 /* Allocate a block of memory onto the stack and copy the memory
1703 arguments to the outgoing arguments address. We can pass TRUE
1704 as the 4th argument because we just saved the stack pointer
1705 and will restore it right after the call. */
1706 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1707
1708 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1709 may have already set current_function_calls_alloca to true.
1710 current_function_calls_alloca won't be set if argsize is zero,
1711 so we have to guarantee need_drap is true here. */
1712 if (SUPPORTS_STACK_ALIGNMENT)
1713 crtl->need_drap = true;
1714
1715 dest = virtual_outgoing_args_rtx;
1716 if (!STACK_GROWS_DOWNWARD)
1717 {
1718 if (CONST_INT_P (argsize))
1719 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1720 else
1721 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1722 }
1723 dest = gen_rtx_MEM (BLKmode, dest);
1724 set_mem_align (dest, PARM_BOUNDARY);
1725 src = gen_rtx_MEM (BLKmode, incoming_args);
1726 set_mem_align (src, PARM_BOUNDARY);
1727 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1728
1729 /* Refer to the argument block. */
1730 apply_args_size ();
1731 arguments = gen_rtx_MEM (BLKmode, arguments);
1732 set_mem_align (arguments, PARM_BOUNDARY);
1733
1734 /* Walk past the arg-pointer and structure value address. */
1735 size = GET_MODE_SIZE (Pmode);
1736 if (struct_value)
1737 size += GET_MODE_SIZE (Pmode);
1738
1739 /* Restore each of the registers previously saved. Make USE insns
1740 for each of these registers for use in making the call. */
1741 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1742 if ((mode = apply_args_mode[regno]) != VOIDmode)
1743 {
1744 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1745 if (size % align != 0)
1746 size = CEIL (size, align) * align;
1747 reg = gen_rtx_REG (mode, regno);
1748 emit_move_insn (reg, adjust_address (arguments, mode, size));
1749 use_reg (fusage: &call_fusage, reg);
1750 size += GET_MODE_SIZE (mode);
1751 }
1752
1753 /* Restore the structure value address unless this is passed as an
1754 "invisible" first argument. */
1755 size = GET_MODE_SIZE (Pmode);
1756 if (struct_value)
1757 {
1758 rtx value = gen_reg_rtx (Pmode);
1759 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1760 emit_move_insn (struct_value, value);
1761 if (REG_P (struct_value))
1762 use_reg (fusage: &call_fusage, reg: struct_value);
1763 }
1764
1765 /* All arguments and registers used for the call are set up by now! */
1766 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1767
1768 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1769 and we don't want to load it into a register as an optimization,
1770 because prepare_call_address already did it if it should be done. */
1771 if (GET_CODE (function) != SYMBOL_REF)
1772 function = memory_address (FUNCTION_MODE, function);
1773
1774 /* Generate the actual call instruction and save the return value. */
1775 if (targetm.have_untyped_call ())
1776 {
1777 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1778 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1779 result_vector (savep: 1, result));
1780 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1781 if (CALL_P (insn))
1782 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1783 emit_insn (seq);
1784 }
1785 else if (targetm.have_call_value ())
1786 {
1787 rtx valreg = 0;
1788
1789 /* Locate the unique return register. It is not possible to
1790 express a call that sets more than one return register using
1791 call_value; use untyped_call for that. In fact, untyped_call
1792 only needs to save the return registers in the given block. */
1793 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1794 if ((mode = apply_result_mode[regno]) != VOIDmode)
1795 {
1796 gcc_assert (!valreg); /* have_untyped_call required. */
1797
1798 valreg = gen_rtx_REG (mode, regno);
1799 }
1800
1801 emit_insn (targetm.gen_call_value (valreg,
1802 gen_rtx_MEM (FUNCTION_MODE, function),
1803 const0_rtx, NULL_RTX, const0_rtx));
1804
1805 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1806 }
1807 else
1808 gcc_unreachable ();
1809
1810 /* Find the CALL insn we just emitted, and attach the register usage
1811 information. */
1812 call_insn = last_call_insn ();
1813 add_function_usage_to (call_insn, call_fusage);
1814
1815 /* Restore the stack. */
1816 if (targetm.have_save_stack_nonlocal ())
1817 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1818 else
1819 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1820 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1821
1822 OK_DEFER_POP;
1823
1824 /* Return the address of the result block. */
1825 result = copy_addr_to_reg (XEXP (result, 0));
1826 return convert_memory_address (ptr_mode, result);
1827}
1828
1829/* Perform an untyped return. */
1830
1831static void
1832expand_builtin_return (rtx result)
1833{
1834 int size, align, regno;
1835 fixed_size_mode mode;
1836 rtx reg;
1837 rtx_insn *call_fusage = 0;
1838
1839 result = convert_memory_address (Pmode, result);
1840
1841 apply_result_size ();
1842 result = gen_rtx_MEM (BLKmode, result);
1843
1844 if (targetm.have_untyped_return ())
1845 {
1846 rtx vector = result_vector (savep: 0, result);
1847 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1848 emit_barrier ();
1849 return;
1850 }
1851
1852 /* Restore the return value and note that each value is used. */
1853 size = 0;
1854 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1855 if ((mode = apply_result_mode[regno]) != VOIDmode)
1856 {
1857 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1858 if (size % align != 0)
1859 size = CEIL (size, align) * align;
1860 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1861 emit_move_insn (reg, adjust_address (result, mode, size));
1862
1863 push_to_sequence (call_fusage);
1864 emit_use (reg);
1865 call_fusage = end_sequence ();
1866 size += GET_MODE_SIZE (mode);
1867 }
1868
1869 /* Put the USE insns before the return. */
1870 emit_insn (call_fusage);
1871
1872 /* Return whatever values was restored by jumping directly to the end
1873 of the function. */
1874 expand_naked_return ();
1875}
1876
1877/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1878
1879int
1880type_to_class (tree type)
1881{
1882 switch (TREE_CODE (type))
1883 {
1884 case VOID_TYPE: return void_type_class;
1885 case INTEGER_TYPE: return integer_type_class;
1886 case ENUMERAL_TYPE: return enumeral_type_class;
1887 case BOOLEAN_TYPE: return boolean_type_class;
1888 case POINTER_TYPE: return pointer_type_class;
1889 case REFERENCE_TYPE: return reference_type_class;
1890 case OFFSET_TYPE: return offset_type_class;
1891 case REAL_TYPE: return real_type_class;
1892 case COMPLEX_TYPE: return complex_type_class;
1893 case FUNCTION_TYPE: return function_type_class;
1894 case METHOD_TYPE: return method_type_class;
1895 case RECORD_TYPE: return record_type_class;
1896 case UNION_TYPE:
1897 case QUAL_UNION_TYPE: return union_type_class;
1898 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1899 ? string_type_class : array_type_class);
1900 case LANG_TYPE: return lang_type_class;
1901 case OPAQUE_TYPE: return opaque_type_class;
1902 case BITINT_TYPE: return bitint_type_class;
1903 case VECTOR_TYPE: return vector_type_class;
1904 default: return no_type_class;
1905 }
1906}
1907
1908/* Expand a call EXP to __builtin_classify_type. */
1909
1910static rtx
1911expand_builtin_classify_type (tree exp)
1912{
1913 if (call_expr_nargs (exp))
1914 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1915 return GEN_INT (no_type_class);
1916}
1917
1918/* This helper macro, meant to be used in mathfn_built_in below, determines
1919 which among a set of builtin math functions is appropriate for a given type
1920 mode. The `F' (float) and `L' (long double) are automatically generated
1921 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1922 types, there are additional types that are considered with 'F32', 'F64',
1923 'F128', etc. suffixes. */
1924#define CASE_MATHFN(MATHFN) \
1925 CASE_CFN_##MATHFN: \
1926 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1927 fcodel = BUILT_IN_##MATHFN##L ; break;
1928/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1929 types. */
1930#define CASE_MATHFN_FLOATN(MATHFN) \
1931 CASE_CFN_##MATHFN: \
1932 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1933 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1934 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1935 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1936 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1937 break;
1938/* Similar to above, but appends _R after any F/L suffix. */
1939#define CASE_MATHFN_REENT(MATHFN) \
1940 case CFN_BUILT_IN_##MATHFN##_R: \
1941 case CFN_BUILT_IN_##MATHFN##F_R: \
1942 case CFN_BUILT_IN_##MATHFN##L_R: \
1943 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1944 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1945
1946/* Return a function equivalent to FN but operating on floating-point
1947 values of type TYPE, or END_BUILTINS if no such function exists.
1948 This is purely an operation on function codes; it does not guarantee
1949 that the target actually has an implementation of the function. */
1950
1951static built_in_function
1952mathfn_built_in_2 (tree type, combined_fn fn)
1953{
1954 tree mtype;
1955 built_in_function fcode, fcodef, fcodel;
1956 built_in_function fcodef16 = END_BUILTINS;
1957 built_in_function fcodef32 = END_BUILTINS;
1958 built_in_function fcodef64 = END_BUILTINS;
1959 built_in_function fcodef128 = END_BUILTINS;
1960 built_in_function fcodef32x = END_BUILTINS;
1961 built_in_function fcodef64x = END_BUILTINS;
1962 built_in_function fcodef128x = END_BUILTINS;
1963
1964 /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1965 break the uses below. */
1966#undef HUGE_VAL
1967#undef NAN
1968
1969 switch (fn)
1970 {
1971#define SEQ_OF_CASE_MATHFN \
1972 CASE_MATHFN_FLOATN (ACOS) \
1973 CASE_MATHFN_FLOATN (ACOSH) \
1974 CASE_MATHFN_FLOATN (ASIN) \
1975 CASE_MATHFN_FLOATN (ASINH) \
1976 CASE_MATHFN_FLOATN (ATAN) \
1977 CASE_MATHFN_FLOATN (ATAN2) \
1978 CASE_MATHFN_FLOATN (ATANH) \
1979 CASE_MATHFN_FLOATN (CBRT) \
1980 CASE_MATHFN_FLOATN (CEIL) \
1981 CASE_MATHFN (CEXPI) \
1982 CASE_MATHFN_FLOATN (COPYSIGN) \
1983 CASE_MATHFN_FLOATN (COS) \
1984 CASE_MATHFN_FLOATN (COSH) \
1985 CASE_MATHFN (DREM) \
1986 CASE_MATHFN_FLOATN (ERF) \
1987 CASE_MATHFN_FLOATN (ERFC) \
1988 CASE_MATHFN_FLOATN (EXP) \
1989 CASE_MATHFN (EXP10) \
1990 CASE_MATHFN_FLOATN (EXP2) \
1991 CASE_MATHFN_FLOATN (EXPM1) \
1992 CASE_MATHFN_FLOATN (FABS) \
1993 CASE_MATHFN_FLOATN (FDIM) \
1994 CASE_MATHFN_FLOATN (FLOOR) \
1995 CASE_MATHFN_FLOATN (FMA) \
1996 CASE_MATHFN_FLOATN (FMAX) \
1997 CASE_MATHFN_FLOATN (FMIN) \
1998 CASE_MATHFN_FLOATN (FMOD) \
1999 CASE_MATHFN_FLOATN (FREXP) \
2000 CASE_MATHFN (GAMMA) \
2001 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2002 CASE_MATHFN_FLOATN (HUGE_VAL) \
2003 CASE_MATHFN_FLOATN (HYPOT) \
2004 CASE_MATHFN_FLOATN (ILOGB) \
2005 CASE_MATHFN (ICEIL) \
2006 CASE_MATHFN (IFLOOR) \
2007 CASE_MATHFN_FLOATN (INF) \
2008 CASE_MATHFN (IRINT) \
2009 CASE_MATHFN (IROUND) \
2010 CASE_MATHFN (ISINF) \
2011 CASE_MATHFN (J0) \
2012 CASE_MATHFN (J1) \
2013 CASE_MATHFN (JN) \
2014 CASE_MATHFN (LCEIL) \
2015 CASE_MATHFN_FLOATN (LDEXP) \
2016 CASE_MATHFN (LFLOOR) \
2017 CASE_MATHFN_FLOATN (LGAMMA) \
2018 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2019 CASE_MATHFN (LLCEIL) \
2020 CASE_MATHFN (LLFLOOR) \
2021 CASE_MATHFN_FLOATN (LLRINT) \
2022 CASE_MATHFN_FLOATN (LLROUND) \
2023 CASE_MATHFN_FLOATN (LOG) \
2024 CASE_MATHFN_FLOATN (LOG10) \
2025 CASE_MATHFN_FLOATN (LOG1P) \
2026 CASE_MATHFN_FLOATN (LOG2) \
2027 CASE_MATHFN_FLOATN (LOGB) \
2028 CASE_MATHFN_FLOATN (LRINT) \
2029 CASE_MATHFN_FLOATN (LROUND) \
2030 CASE_MATHFN_FLOATN (MODF) \
2031 CASE_MATHFN_FLOATN (NAN) \
2032 CASE_MATHFN_FLOATN (NANS) \
2033 CASE_MATHFN_FLOATN (NEARBYINT) \
2034 CASE_MATHFN_FLOATN (NEXTAFTER) \
2035 CASE_MATHFN (NEXTTOWARD) \
2036 CASE_MATHFN_FLOATN (POW) \
2037 CASE_MATHFN (POWI) \
2038 CASE_MATHFN (POW10) \
2039 CASE_MATHFN_FLOATN (REMAINDER) \
2040 CASE_MATHFN_FLOATN (REMQUO) \
2041 CASE_MATHFN_FLOATN (RINT) \
2042 CASE_MATHFN_FLOATN (ROUND) \
2043 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2044 CASE_MATHFN (SCALB) \
2045 CASE_MATHFN_FLOATN (SCALBLN) \
2046 CASE_MATHFN_FLOATN (SCALBN) \
2047 CASE_MATHFN (SIGNBIT) \
2048 CASE_MATHFN (SIGNIFICAND) \
2049 CASE_MATHFN_FLOATN (SIN) \
2050 CASE_MATHFN (SINCOS) \
2051 CASE_MATHFN_FLOATN (SINH) \
2052 CASE_MATHFN_FLOATN (SQRT) \
2053 CASE_MATHFN_FLOATN (TAN) \
2054 CASE_MATHFN_FLOATN (TANH) \
2055 CASE_MATHFN_FLOATN (TGAMMA) \
2056 CASE_MATHFN_FLOATN (TRUNC) \
2057 CASE_MATHFN (Y0) \
2058 CASE_MATHFN (Y1) \
2059 CASE_MATHFN (YN)
2060
2061 SEQ_OF_CASE_MATHFN
2062
2063 default:
2064 return END_BUILTINS;
2065 }
2066
2067 mtype = TYPE_MAIN_VARIANT (type);
2068 if (mtype == double_type_node)
2069 return fcode;
2070 else if (mtype == float_type_node)
2071 return fcodef;
2072 else if (mtype == long_double_type_node)
2073 return fcodel;
2074 else if (mtype == float16_type_node)
2075 return fcodef16;
2076 else if (mtype == float32_type_node)
2077 return fcodef32;
2078 else if (mtype == float64_type_node)
2079 return fcodef64;
2080 else if (mtype == float128_type_node)
2081 return fcodef128;
2082 else if (mtype == float32x_type_node)
2083 return fcodef32x;
2084 else if (mtype == float64x_type_node)
2085 return fcodef64x;
2086 else if (mtype == float128x_type_node)
2087 return fcodef128x;
2088 else
2089 return END_BUILTINS;
2090}
2091
2092#undef CASE_MATHFN
2093#undef CASE_MATHFN_FLOATN
2094#undef CASE_MATHFN_REENT
2095
2096/* Return mathematic function equivalent to FN but operating directly on TYPE,
2097 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2098 otherwise use the explicit declaration. If we can't do the conversion,
2099 return null. */
2100
2101static tree
2102mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2103{
2104 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2105 if (fcode2 == END_BUILTINS)
2106 return NULL_TREE;
2107
2108 if (implicit_p && !builtin_decl_implicit_p (fncode: fcode2))
2109 return NULL_TREE;
2110
2111 return builtin_decl_explicit (fncode: fcode2);
2112}
2113
2114/* Like mathfn_built_in_1, but always use the implicit array. */
2115
2116tree
2117mathfn_built_in (tree type, combined_fn fn)
2118{
2119 return mathfn_built_in_1 (type, fn, /*implicit=*/ implicit_p: 1);
2120}
2121
2122/* Like mathfn_built_in_1, but always use the explicit array. */
2123
2124tree
2125mathfn_built_in_explicit (tree type, combined_fn fn)
2126{
2127 return mathfn_built_in_1 (type, fn, /*implicit=*/ implicit_p: 0);
2128}
2129
2130/* Like mathfn_built_in_1, but take a built_in_function and
2131 always use the implicit array. */
2132
2133tree
2134mathfn_built_in (tree type, enum built_in_function fn)
2135{
2136 return mathfn_built_in_1 (type, fn: as_combined_fn (fn), /*implicit=*/ implicit_p: 1);
2137}
2138
2139/* Return the type associated with a built in function, i.e., the one
2140 to be passed to mathfn_built_in to get the type-specific
2141 function. */
2142
2143tree
2144mathfn_built_in_type (combined_fn fn)
2145{
2146#define CASE_MATHFN(MATHFN) \
2147 case CFN_BUILT_IN_##MATHFN: \
2148 return double_type_node; \
2149 case CFN_BUILT_IN_##MATHFN##F: \
2150 return float_type_node; \
2151 case CFN_BUILT_IN_##MATHFN##L: \
2152 return long_double_type_node;
2153
2154#define CASE_MATHFN_FLOATN(MATHFN) \
2155 CASE_MATHFN(MATHFN) \
2156 case CFN_BUILT_IN_##MATHFN##F16: \
2157 return float16_type_node; \
2158 case CFN_BUILT_IN_##MATHFN##F32: \
2159 return float32_type_node; \
2160 case CFN_BUILT_IN_##MATHFN##F64: \
2161 return float64_type_node; \
2162 case CFN_BUILT_IN_##MATHFN##F128: \
2163 return float128_type_node; \
2164 case CFN_BUILT_IN_##MATHFN##F32X: \
2165 return float32x_type_node; \
2166 case CFN_BUILT_IN_##MATHFN##F64X: \
2167 return float64x_type_node; \
2168 case CFN_BUILT_IN_##MATHFN##F128X: \
2169 return float128x_type_node;
2170
2171/* Similar to above, but appends _R after any F/L suffix. */
2172#define CASE_MATHFN_REENT(MATHFN) \
2173 case CFN_BUILT_IN_##MATHFN##_R: \
2174 return double_type_node; \
2175 case CFN_BUILT_IN_##MATHFN##F_R: \
2176 return float_type_node; \
2177 case CFN_BUILT_IN_##MATHFN##L_R: \
2178 return long_double_type_node;
2179
2180 switch (fn)
2181 {
2182 SEQ_OF_CASE_MATHFN
2183
2184 default:
2185 return NULL_TREE;
2186 }
2187
2188#undef CASE_MATHFN
2189#undef CASE_MATHFN_FLOATN
2190#undef CASE_MATHFN_REENT
2191#undef SEQ_OF_CASE_MATHFN
2192}
2193
2194/* Check whether there is an internal function associated with function FN
2195 and return type RETURN_TYPE. Return the function if so, otherwise return
2196 IFN_LAST.
2197
2198 Note that this function only tests whether the function is defined in
2199 internals.def, not whether it is actually available on the target. */
2200
2201static internal_fn
2202associated_internal_fn (built_in_function fn, tree return_type)
2203{
2204 switch (fn)
2205 {
2206#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2207 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2208#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2209 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2210 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2211#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2212 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2213#include "internal-fn.def"
2214
2215 CASE_FLT_FN (BUILT_IN_POW10):
2216 return IFN_EXP10;
2217
2218 CASE_FLT_FN (BUILT_IN_DREM):
2219 return IFN_REMAINDER;
2220
2221 CASE_FLT_FN (BUILT_IN_SCALBN):
2222 CASE_FLT_FN (BUILT_IN_SCALBLN):
2223 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2224 return IFN_LDEXP;
2225 return IFN_LAST;
2226 case BUILT_IN_CRC8_DATA8:
2227 case BUILT_IN_CRC16_DATA8:
2228 case BUILT_IN_CRC16_DATA16:
2229 case BUILT_IN_CRC32_DATA8:
2230 case BUILT_IN_CRC32_DATA16:
2231 case BUILT_IN_CRC32_DATA32:
2232 case BUILT_IN_CRC64_DATA8:
2233 case BUILT_IN_CRC64_DATA16:
2234 case BUILT_IN_CRC64_DATA32:
2235 case BUILT_IN_CRC64_DATA64:
2236 return IFN_CRC;
2237 case BUILT_IN_REV_CRC8_DATA8:
2238 case BUILT_IN_REV_CRC16_DATA8:
2239 case BUILT_IN_REV_CRC16_DATA16:
2240 case BUILT_IN_REV_CRC32_DATA8:
2241 case BUILT_IN_REV_CRC32_DATA16:
2242 case BUILT_IN_REV_CRC32_DATA32:
2243 case BUILT_IN_REV_CRC64_DATA8:
2244 case BUILT_IN_REV_CRC64_DATA16:
2245 case BUILT_IN_REV_CRC64_DATA32:
2246 case BUILT_IN_REV_CRC64_DATA64:
2247 return IFN_CRC_REV;
2248 default:
2249 return IFN_LAST;
2250 }
2251}
2252
2253/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2254 return its code, otherwise return IFN_LAST. Note that this function
2255 only tests whether the function is defined in internals.def, not whether
2256 it is actually available on the target. */
2257
2258internal_fn
2259associated_internal_fn (tree fndecl)
2260{
2261 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2262 return associated_internal_fn (fn: DECL_FUNCTION_CODE (decl: fndecl),
2263 TREE_TYPE (TREE_TYPE (fndecl)));
2264}
2265
2266/* Check whether there is an internal function associated with function CFN
2267 and return type RETURN_TYPE. Return the function if so, otherwise return
2268 IFN_LAST.
2269
2270 Note that this function only tests whether the function is defined in
2271 internals.def, not whether it is actually available on the target. */
2272
2273internal_fn
2274associated_internal_fn (combined_fn cfn, tree return_type)
2275{
2276 if (internal_fn_p (code: cfn))
2277 return as_internal_fn (code: cfn);
2278 return associated_internal_fn (fn: as_builtin_fn (code: cfn), return_type);
2279}
2280
2281/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2282 on the current target by a call to an internal function, return the
2283 code of that internal function, otherwise return IFN_LAST. The caller
2284 is responsible for ensuring that any side-effects of the built-in
2285 call are dealt with correctly. E.g. if CALL sets errno, the caller
2286 must decide that the errno result isn't needed or make it available
2287 in some other way. */
2288
2289internal_fn
2290replacement_internal_fn (gcall *call)
2291{
2292 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2293 {
2294 internal_fn ifn = associated_internal_fn (fndecl: gimple_call_fndecl (gs: call));
2295 if (ifn != IFN_LAST)
2296 {
2297 tree_pair types = direct_internal_fn_types (ifn, call);
2298 optimization_type opt_type = bb_optimization_type (gimple_bb (g: call));
2299 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2300 return ifn;
2301 }
2302 }
2303 return IFN_LAST;
2304}
2305
2306/* Expand a call to the builtin trinary math functions (fma).
2307 Return NULL_RTX if a normal call should be emitted rather than expanding the
2308 function in-line. EXP is the expression that is a call to the builtin
2309 function; if convenient, the result should be placed in TARGET.
2310 SUBTARGET may be used as the target for computing one of EXP's
2311 operands. */
2312
2313static rtx
2314expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2315{
2316 optab builtin_optab;
2317 rtx op0, op1, op2, result;
2318 rtx_insn *insns;
2319 tree fndecl = get_callee_fndecl (exp);
2320 tree arg0, arg1, arg2;
2321 machine_mode mode;
2322
2323 if (!validate_arglist (callexpr: exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2324 return NULL_RTX;
2325
2326 arg0 = CALL_EXPR_ARG (exp, 0);
2327 arg1 = CALL_EXPR_ARG (exp, 1);
2328 arg2 = CALL_EXPR_ARG (exp, 2);
2329
2330 switch (DECL_FUNCTION_CODE (decl: fndecl))
2331 {
2332 CASE_FLT_FN (BUILT_IN_FMA):
2333 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2334 builtin_optab = fma_optab; break;
2335 default:
2336 gcc_unreachable ();
2337 }
2338
2339 /* Make a suitable register to place result in. */
2340 mode = TYPE_MODE (TREE_TYPE (exp));
2341
2342 /* Before working hard, check whether the instruction is available. */
2343 if (optab_handler (op: builtin_optab, mode) == CODE_FOR_nothing)
2344 return NULL_RTX;
2345
2346 result = gen_reg_rtx (mode);
2347
2348 /* Always stabilize the argument list. */
2349 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (exp: arg0);
2350 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (exp: arg1);
2351 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (exp: arg2);
2352
2353 op0 = expand_expr (exp: arg0, target: subtarget, VOIDmode, modifier: EXPAND_NORMAL);
2354 op1 = expand_normal (exp: arg1);
2355 op2 = expand_normal (exp: arg2);
2356
2357 start_sequence ();
2358
2359 /* Compute into RESULT.
2360 Set RESULT to wherever the result comes back. */
2361 result = expand_ternary_op (mode, ternary_optab: builtin_optab, op0, op1, op2,
2362 target: result, unsignedp: 0);
2363
2364 /* If we were unable to expand via the builtin, stop the sequence
2365 (without outputting the insns) and call to the library function
2366 with the stabilized argument list. */
2367 if (result == 0)
2368 {
2369 end_sequence ();
2370 return expand_call (exp, target, target == const0_rtx);
2371 }
2372
2373 /* Output the entire sequence. */
2374 insns = end_sequence ();
2375 emit_insn (insns);
2376
2377 return result;
2378}
2379
2380/* Expand a call to the builtin sin and cos math functions.
2381 Return NULL_RTX if a normal call should be emitted rather than expanding the
2382 function in-line. EXP is the expression that is a call to the builtin
2383 function; if convenient, the result should be placed in TARGET.
2384 SUBTARGET may be used as the target for computing one of EXP's
2385 operands. */
2386
2387static rtx
2388expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2389{
2390 optab builtin_optab;
2391 rtx op0;
2392 rtx_insn *insns;
2393 tree fndecl = get_callee_fndecl (exp);
2394 machine_mode mode;
2395 tree arg;
2396
2397 if (!validate_arglist (callexpr: exp, REAL_TYPE, VOID_TYPE))
2398 return NULL_RTX;
2399
2400 arg = CALL_EXPR_ARG (exp, 0);
2401
2402 switch (DECL_FUNCTION_CODE (decl: fndecl))
2403 {
2404 CASE_FLT_FN (BUILT_IN_SIN):
2405 CASE_FLT_FN (BUILT_IN_COS):
2406 builtin_optab = sincos_optab; break;
2407 default:
2408 gcc_unreachable ();
2409 }
2410
2411 /* Make a suitable register to place result in. */
2412 mode = TYPE_MODE (TREE_TYPE (exp));
2413
2414 /* Check if sincos insn is available, otherwise fallback
2415 to sin or cos insn. */
2416 if (optab_handler (op: builtin_optab, mode) == CODE_FOR_nothing)
2417 switch (DECL_FUNCTION_CODE (decl: fndecl))
2418 {
2419 CASE_FLT_FN (BUILT_IN_SIN):
2420 builtin_optab = sin_optab; break;
2421 CASE_FLT_FN (BUILT_IN_COS):
2422 builtin_optab = cos_optab; break;
2423 default:
2424 gcc_unreachable ();
2425 }
2426
2427 /* Before working hard, check whether the instruction is available. */
2428 if (optab_handler (op: builtin_optab, mode) != CODE_FOR_nothing)
2429 {
2430 rtx result = gen_reg_rtx (mode);
2431
2432 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2433 need to expand the argument again. This way, we will not perform
2434 side-effects more the once. */
2435 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (exp: arg);
2436
2437 op0 = expand_expr (exp: arg, target: subtarget, VOIDmode, modifier: EXPAND_NORMAL);
2438
2439 start_sequence ();
2440
2441 /* Compute into RESULT.
2442 Set RESULT to wherever the result comes back. */
2443 if (builtin_optab == sincos_optab)
2444 {
2445 int ok;
2446
2447 switch (DECL_FUNCTION_CODE (decl: fndecl))
2448 {
2449 CASE_FLT_FN (BUILT_IN_SIN):
2450 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2451 break;
2452 CASE_FLT_FN (BUILT_IN_COS):
2453 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2454 break;
2455 default:
2456 gcc_unreachable ();
2457 }
2458 gcc_assert (ok);
2459 }
2460 else
2461 result = expand_unop (mode, builtin_optab, op0, result, 0);
2462
2463 if (result != 0)
2464 {
2465 /* Output the entire sequence. */
2466 insns = end_sequence ();
2467 emit_insn (insns);
2468 return result;
2469 }
2470
2471 /* If we were unable to expand via the builtin, stop the sequence
2472 (without outputting the insns) and call to the library function
2473 with the stabilized argument list. */
2474 end_sequence ();
2475 }
2476
2477 return expand_call (exp, target, target == const0_rtx);
2478}
2479
2480/* Given an interclass math builtin decl FNDECL and it's argument ARG
2481 return an RTL instruction code that implements the functionality.
2482 If that isn't possible or available return CODE_FOR_nothing. */
2483
2484static enum insn_code
2485interclass_mathfn_icode (tree arg, tree fndecl)
2486{
2487 bool errno_set = false;
2488 optab builtin_optab = unknown_optab;
2489 machine_mode mode;
2490
2491 switch (DECL_FUNCTION_CODE (decl: fndecl))
2492 {
2493 CASE_FLT_FN (BUILT_IN_ILOGB):
2494 errno_set = true; builtin_optab = ilogb_optab; break;
2495 CASE_FLT_FN (BUILT_IN_ISINF):
2496 builtin_optab = isinf_optab; break;
2497 case BUILT_IN_ISFINITE:
2498 builtin_optab = isfinite_optab;
2499 break;
2500 case BUILT_IN_ISNORMAL:
2501 builtin_optab = isnormal_optab;
2502 break;
2503 CASE_FLT_FN (BUILT_IN_FINITE):
2504 case BUILT_IN_FINITED32:
2505 case BUILT_IN_FINITED64:
2506 case BUILT_IN_FINITED128:
2507 case BUILT_IN_ISINFD32:
2508 case BUILT_IN_ISINFD64:
2509 case BUILT_IN_ISINFD128:
2510 /* These builtins have no optabs (yet). */
2511 break;
2512 default:
2513 gcc_unreachable ();
2514 }
2515
2516 /* There's no easy way to detect the case we need to set EDOM. */
2517 if (flag_errno_math && errno_set)
2518 return CODE_FOR_nothing;
2519
2520 /* Optab mode depends on the mode of the input argument. */
2521 mode = TYPE_MODE (TREE_TYPE (arg));
2522
2523 if (builtin_optab)
2524 return optab_handler (op: builtin_optab, mode);
2525 return CODE_FOR_nothing;
2526}
2527
2528/* Expand a call to one of the builtin math functions that operate on
2529 floating point argument and output an integer result (ilogb, isinf,
2530 isnan, etc).
2531 Return 0 if a normal call should be emitted rather than expanding the
2532 function in-line. EXP is the expression that is a call to the builtin
2533 function; if convenient, the result should be placed in TARGET. */
2534
2535static rtx
2536expand_builtin_interclass_mathfn (tree exp, rtx target)
2537{
2538 enum insn_code icode = CODE_FOR_nothing;
2539 rtx op0;
2540 tree fndecl = get_callee_fndecl (exp);
2541 machine_mode mode;
2542 tree arg;
2543
2544 if (!validate_arglist (callexpr: exp, REAL_TYPE, VOID_TYPE))
2545 return NULL_RTX;
2546
2547 arg = CALL_EXPR_ARG (exp, 0);
2548 icode = interclass_mathfn_icode (arg, fndecl);
2549 mode = TYPE_MODE (TREE_TYPE (arg));
2550
2551 if (icode != CODE_FOR_nothing)
2552 {
2553 class expand_operand ops[1];
2554 rtx_insn *last = get_last_insn ();
2555 tree orig_arg = arg;
2556
2557 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2558 need to expand the argument again. This way, we will not perform
2559 side-effects more the once. */
2560 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (exp: arg);
2561
2562 op0 = expand_expr (exp: arg, NULL_RTX, VOIDmode, modifier: EXPAND_NORMAL);
2563
2564 if (mode != GET_MODE (op0))
2565 op0 = convert_to_mode (mode, op0, 0);
2566
2567 create_output_operand (op: &ops[0], x: target, TYPE_MODE (TREE_TYPE (exp)));
2568 if (maybe_legitimize_operands (icode, opno: 0, nops: 1, ops)
2569 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2570 return ops[0].value;
2571
2572 delete_insns_since (last);
2573 CALL_EXPR_ARG (exp, 0) = orig_arg;
2574 }
2575
2576 return NULL_RTX;
2577}
2578
2579/* Expand a call to the builtin sincos math function.
2580 Return NULL_RTX if a normal call should be emitted rather than expanding the
2581 function in-line. EXP is the expression that is a call to the builtin
2582 function. */
2583
2584static rtx
2585expand_builtin_sincos (tree exp)
2586{
2587 rtx op0, op1, op2, target1, target2;
2588 machine_mode mode;
2589 tree arg, sinp, cosp;
2590 int result;
2591 location_t loc = EXPR_LOCATION (exp);
2592 tree alias_type, alias_off;
2593
2594 if (!validate_arglist (callexpr: exp, REAL_TYPE,
2595 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2596 return NULL_RTX;
2597
2598 arg = CALL_EXPR_ARG (exp, 0);
2599 sinp = CALL_EXPR_ARG (exp, 1);
2600 cosp = CALL_EXPR_ARG (exp, 2);
2601
2602 /* Make a suitable register to place result in. */
2603 mode = TYPE_MODE (TREE_TYPE (arg));
2604
2605 /* Check if sincos insn is available, otherwise emit the call. */
2606 if (optab_handler (op: sincos_optab, mode) == CODE_FOR_nothing)
2607 return NULL_RTX;
2608
2609 target1 = gen_reg_rtx (mode);
2610 target2 = gen_reg_rtx (mode);
2611
2612 op0 = expand_normal (exp: arg);
2613 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2614 alias_off = build_int_cst (alias_type, 0);
2615 op1 = expand_normal (exp: fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2616 sinp, alias_off));
2617 op2 = expand_normal (exp: fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2618 cosp, alias_off));
2619
2620 /* Compute into target1 and target2.
2621 Set TARGET to wherever the result comes back. */
2622 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2623 gcc_assert (result);
2624
2625 /* Move target1 and target2 to the memory locations indicated
2626 by op1 and op2. */
2627 emit_move_insn (op1, target1);
2628 emit_move_insn (op2, target2);
2629
2630 return const0_rtx;
2631}
2632
2633/* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2634 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2635static rtx
2636expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2637{
2638 if (!validate_arglist (callexpr: exp, VOID_TYPE))
2639 return NULL_RTX;
2640
2641 insn_code icode = direct_optab_handler (op: fegetround_optab, SImode);
2642 if (icode == CODE_FOR_nothing)
2643 return NULL_RTX;
2644
2645 if (target == 0
2646 || GET_MODE (target) != target_mode
2647 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2648 target = gen_reg_rtx (target_mode);
2649
2650 rtx pat = GEN_FCN (icode) (target);
2651 if (!pat)
2652 return NULL_RTX;
2653 emit_insn (pat);
2654
2655 return target;
2656}
2657
2658/* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2659 fenv.h), returning the result and setting it in TARGET. Otherwise return
2660 NULL_RTX on failure. */
2661static rtx
2662expand_builtin_feclear_feraise_except (tree exp, rtx target,
2663 machine_mode target_mode, optab op_optab)
2664{
2665 if (!validate_arglist (callexpr: exp, INTEGER_TYPE, VOID_TYPE))
2666 return NULL_RTX;
2667 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2668
2669 insn_code icode = direct_optab_handler (op: op_optab, SImode);
2670 if (icode == CODE_FOR_nothing)
2671 return NULL_RTX;
2672
2673 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2674 return NULL_RTX;
2675
2676 if (target == 0
2677 || GET_MODE (target) != target_mode
2678 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2679 target = gen_reg_rtx (target_mode);
2680
2681 rtx pat = GEN_FCN (icode) (target, op0);
2682 if (!pat)
2683 return NULL_RTX;
2684 emit_insn (pat);
2685
2686 return target;
2687}
2688
2689/* Expand a call to the internal cexpi builtin to the sincos math function.
2690 EXP is the expression that is a call to the builtin function; if convenient,
2691 the result should be placed in TARGET. */
2692
2693static rtx
2694expand_builtin_cexpi (tree exp, rtx target)
2695{
2696 tree fndecl = get_callee_fndecl (exp);
2697 tree arg, type;
2698 machine_mode mode;
2699 rtx op0, op1, op2;
2700 location_t loc = EXPR_LOCATION (exp);
2701
2702 if (!validate_arglist (callexpr: exp, REAL_TYPE, VOID_TYPE))
2703 return NULL_RTX;
2704
2705 arg = CALL_EXPR_ARG (exp, 0);
2706 type = TREE_TYPE (arg);
2707 mode = TYPE_MODE (TREE_TYPE (arg));
2708
2709 /* Try expanding via a sincos optab, fall back to emitting a libcall
2710 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2711 is only generated from sincos, cexp or if we have either of them. */
2712 if (optab_handler (op: sincos_optab, mode) != CODE_FOR_nothing)
2713 {
2714 op1 = gen_reg_rtx (mode);
2715 op2 = gen_reg_rtx (mode);
2716
2717 op0 = expand_expr (exp: arg, NULL_RTX, VOIDmode, modifier: EXPAND_NORMAL);
2718
2719 /* Compute into op1 and op2. */
2720 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2721 }
2722 else if (targetm.libc_has_function (function_sincos, type))
2723 {
2724 tree call, fn = NULL_TREE;
2725 tree top1, top2;
2726 rtx op1a, op2a;
2727
2728 if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CEXPIF)
2729 fn = builtin_decl_explicit (fncode: BUILT_IN_SINCOSF);
2730 else if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CEXPI)
2731 fn = builtin_decl_explicit (fncode: BUILT_IN_SINCOS);
2732 else if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CEXPIL)
2733 fn = builtin_decl_explicit (fncode: BUILT_IN_SINCOSL);
2734 else
2735 gcc_unreachable ();
2736
2737 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2738 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2739 op1a = copy_addr_to_reg (XEXP (op1, 0));
2740 op2a = copy_addr_to_reg (XEXP (op2, 0));
2741 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2742 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2743
2744 /* Make sure not to fold the sincos call again. */
2745 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2746 expand_normal (exp: build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2747 call, 3, arg, top1, top2));
2748 }
2749 else
2750 {
2751 tree call, fn = NULL_TREE, narg;
2752 tree ctype = build_complex_type (type);
2753
2754 if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CEXPIF)
2755 fn = builtin_decl_explicit (fncode: BUILT_IN_CEXPF);
2756 else if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CEXPI)
2757 fn = builtin_decl_explicit (fncode: BUILT_IN_CEXP);
2758 else if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CEXPIL)
2759 fn = builtin_decl_explicit (fncode: BUILT_IN_CEXPL);
2760 else
2761 gcc_unreachable ();
2762
2763 /* If we don't have a decl for cexp create one. This is the
2764 friendliest fallback if the user calls __builtin_cexpi
2765 without full target C99 function support. */
2766 if (fn == NULL_TREE)
2767 {
2768 tree fntype;
2769 const char *name = NULL;
2770
2771 if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CEXPIF)
2772 name = "cexpf";
2773 else if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CEXPI)
2774 name = "cexp";
2775 else if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CEXPIL)
2776 name = "cexpl";
2777
2778 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2779 fn = build_fn_decl (name, fntype);
2780 }
2781
2782 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2783 build_real (type, dconst0), arg);
2784
2785 /* Make sure not to fold the cexp call again. */
2786 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2787 return expand_expr (exp: build_call_nary (ctype, call, 1, narg),
2788 target, VOIDmode, modifier: EXPAND_NORMAL);
2789 }
2790
2791 /* Now build the proper return type. */
2792 return expand_expr (exp: build2 (COMPLEX_EXPR, build_complex_type (type),
2793 make_tree (TREE_TYPE (arg), op2),
2794 make_tree (TREE_TYPE (arg), op1)),
2795 target, VOIDmode, modifier: EXPAND_NORMAL);
2796}
2797
2798/* Conveniently construct a function call expression. FNDECL names the
2799 function to be called, N is the number of arguments, and the "..."
2800 parameters are the argument expressions. Unlike build_call_exr
2801 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2802
2803static tree
2804build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2805{
2806 va_list ap;
2807 tree fntype = TREE_TYPE (fndecl);
2808 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2809
2810 va_start (ap, n);
2811 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2812 va_end (ap);
2813 SET_EXPR_LOCATION (fn, loc);
2814 return fn;
2815}
2816
2817/* Expand the __builtin_issignaling builtin. This needs to handle
2818 all floating point formats that do support NaNs (for those that
2819 don't it just sets target to 0). */
2820
2821static rtx
2822expand_builtin_issignaling (tree exp, rtx target)
2823{
2824 if (!validate_arglist (callexpr: exp, REAL_TYPE, VOID_TYPE))
2825 return NULL_RTX;
2826
2827 tree arg = CALL_EXPR_ARG (exp, 0);
2828 scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2829 const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2830
2831 /* Expand the argument yielding a RTX expression. */
2832 rtx temp = expand_normal (exp: arg);
2833
2834 /* If mode doesn't support NaN, always return 0.
2835 Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2836 __builtin_issignaling working without -fsignaling-nans. Especially
2837 when -fno-signaling-nans is the default.
2838 On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2839 -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2840 fold to 0 or non-NaN/Inf classification. */
2841 if (!HONOR_NANS (fmode))
2842 {
2843 emit_move_insn (target, const0_rtx);
2844 return target;
2845 }
2846
2847 /* Check if the back end provides an insn that handles issignaling for the
2848 argument's mode. */
2849 enum insn_code icode = optab_handler (op: issignaling_optab, mode: fmode);
2850 if (icode != CODE_FOR_nothing)
2851 {
2852 rtx_insn *last = get_last_insn ();
2853 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2854 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2855 return this_target;
2856 delete_insns_since (last);
2857 }
2858
2859 if (DECIMAL_FLOAT_MODE_P (fmode))
2860 {
2861 scalar_int_mode imode;
2862 rtx hi;
2863 switch (fmt->ieee_bits)
2864 {
2865 case 32:
2866 case 64:
2867 imode = int_mode_for_mode (fmode).require ();
2868 temp = gen_lowpart (imode, temp);
2869 break;
2870 case 128:
2871 imode = int_mode_for_size (size: 64, limit: 1).require ();
2872 hi = NULL_RTX;
2873 /* For decimal128, TImode support isn't always there and even when
2874 it is, working on the DImode high part is usually better. */
2875 if (!MEM_P (temp))
2876 {
2877 if (rtx t = force_highpart_subreg (imode, temp, fmode))
2878 hi = t;
2879 else
2880 {
2881 scalar_int_mode imode2;
2882 if (int_mode_for_mode (fmode).exists (mode: &imode2))
2883 {
2884 rtx temp2 = gen_lowpart (imode2, temp);
2885 if (rtx t = force_highpart_subreg (imode, temp2, imode2))
2886 hi = t;
2887 }
2888 }
2889 if (!hi)
2890 {
2891 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (mode: fmode));
2892 emit_move_insn (mem, temp);
2893 temp = mem;
2894 }
2895 }
2896 if (!hi)
2897 {
2898 poly_int64 offset
2899 = subreg_highpart_offset (outermode: imode, GET_MODE (temp));
2900 hi = adjust_address (temp, imode, offset);
2901 }
2902 temp = hi;
2903 break;
2904 default:
2905 gcc_unreachable ();
2906 }
2907 /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2908 have 6 bits below it all set. */
2909 rtx val
2910 = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2911 temp = expand_binop (imode, and_optab, temp, val,
2912 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2913 temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2914 return temp;
2915 }
2916
2917 /* Only PDP11 has these defined differently but doesn't support NaNs. */
2918 gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2919 gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2920 gcc_assert (MODE_COMPOSITE_P (fmode)
2921 || (fmt->pnan == fmt->p
2922 && fmt->signbit_ro == fmt->signbit_rw));
2923
2924 switch (fmt->p)
2925 {
2926 case 106: /* IBM double double */
2927 /* For IBM double double, recurse on the most significant double. */
2928 gcc_assert (MODE_COMPOSITE_P (fmode));
2929 temp = convert_modes (DFmode, oldmode: fmode, x: temp, unsignedp: 0);
2930 fmode = DFmode;
2931 fmt = REAL_MODE_FORMAT (DFmode);
2932 /* FALLTHRU */
2933 case 8: /* bfloat */
2934 case 11: /* IEEE half */
2935 case 24: /* IEEE single */
2936 case 53: /* IEEE double or Intel extended with rounding to double */
2937 if (fmt->p == 53 && fmt->signbit_ro == 79)
2938 goto extended;
2939 {
2940 scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2941 temp = gen_lowpart (imode, temp);
2942 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2943 & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2944 if (fmt->qnan_msb_set)
2945 {
2946 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2947 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2948 /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2949 ((temp ^ bit) & mask) > val. */
2950 temp = expand_binop (imode, xor_optab, temp, bit,
2951 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2952 temp = expand_binop (imode, and_optab, temp, mask,
2953 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2954 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2955 1, 1);
2956 }
2957 else
2958 {
2959 /* For MIPS/PA IEEE single/double, expand to:
2960 (temp & val) == val. */
2961 temp = expand_binop (imode, and_optab, temp, val,
2962 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2963 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2964 1, 1);
2965 }
2966 }
2967 break;
2968 case 113: /* IEEE quad */
2969 {
2970 rtx hi = NULL_RTX, lo = NULL_RTX;
2971 scalar_int_mode imode = int_mode_for_size (size: 64, limit: 1).require ();
2972 /* For IEEE quad, TImode support isn't always there and even when
2973 it is, working on DImode parts is usually better. */
2974 if (!MEM_P (temp))
2975 {
2976 hi = force_highpart_subreg (imode, temp, fmode);
2977 lo = force_lowpart_subreg (imode, temp, fmode);
2978 if (!hi || !lo)
2979 {
2980 scalar_int_mode imode2;
2981 if (int_mode_for_mode (fmode).exists (mode: &imode2))
2982 {
2983 rtx temp2 = gen_lowpart (imode2, temp);
2984 hi = force_highpart_subreg (imode, temp2, imode2);
2985 lo = force_lowpart_subreg (imode, temp2, imode2);
2986 }
2987 }
2988 if (!hi || !lo)
2989 {
2990 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (mode: fmode));
2991 emit_move_insn (mem, temp);
2992 temp = mem;
2993 }
2994 }
2995 if (!hi || !lo)
2996 {
2997 poly_int64 offset
2998 = subreg_highpart_offset (outermode: imode, GET_MODE (temp));
2999 hi = adjust_address (temp, imode, offset);
3000 offset = subreg_lowpart_offset (outermode: imode, GET_MODE (temp));
3001 lo = adjust_address (temp, imode, offset);
3002 }
3003 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
3004 & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
3005 if (fmt->qnan_msb_set)
3006 {
3007 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
3008 - 64)));
3009 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
3010 /* For non-MIPS/PA IEEE quad, expand to:
3011 (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
3012 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3013 lo = expand_binop (imode, ior_optab, lo, nlo,
3014 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3015 lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
3016 temp = expand_binop (imode, xor_optab, hi, bit,
3017 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3018 temp = expand_binop (imode, ior_optab, temp, lo,
3019 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3020 temp = expand_binop (imode, and_optab, temp, mask,
3021 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3022 temp = emit_store_flag_force (target, GTU, temp, val, imode,
3023 1, 1);
3024 }
3025 else
3026 {
3027 /* For MIPS/PA IEEE quad, expand to:
3028 (hi & val) == val. */
3029 temp = expand_binop (imode, and_optab, hi, val,
3030 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3031 temp = emit_store_flag_force (target, EQ, temp, val, imode,
3032 1, 1);
3033 }
3034 }
3035 break;
3036 case 64: /* Intel or Motorola extended */
3037 extended:
3038 {
3039 rtx ex, hi, lo;
3040 scalar_int_mode imode = int_mode_for_size (size: 32, limit: 1).require ();
3041 scalar_int_mode iemode = int_mode_for_size (size: 16, limit: 1).require ();
3042 if (!MEM_P (temp))
3043 {
3044 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (mode: fmode));
3045 emit_move_insn (mem, temp);
3046 temp = mem;
3047 }
3048 if (fmt->signbit_ro == 95)
3049 {
3050 /* Motorola, always big endian, with 16-bit gap in between
3051 16-bit sign+exponent and 64-bit mantissa. */
3052 ex = adjust_address (temp, iemode, 0);
3053 hi = adjust_address (temp, imode, 4);
3054 lo = adjust_address (temp, imode, 8);
3055 }
3056 else if (!WORDS_BIG_ENDIAN)
3057 {
3058 /* Intel little endian, 64-bit mantissa followed by 16-bit
3059 sign+exponent and then either 16 or 48 bits of gap. */
3060 ex = adjust_address (temp, iemode, 8);
3061 hi = adjust_address (temp, imode, 4);
3062 lo = adjust_address (temp, imode, 0);
3063 }
3064 else
3065 {
3066 /* Big endian Itanium. */
3067 ex = adjust_address (temp, iemode, 0);
3068 hi = adjust_address (temp, imode, 2);
3069 lo = adjust_address (temp, imode, 6);
3070 }
3071 rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3072 gcc_assert (fmt->qnan_msb_set);
3073 rtx mask = GEN_INT (0x7fff);
3074 rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3075 /* For Intel/Motorola extended format, expand to:
3076 (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3077 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3078 lo = expand_binop (imode, ior_optab, lo, nlo,
3079 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3080 lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3081 temp = expand_binop (imode, xor_optab, hi, bit,
3082 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3083 temp = expand_binop (imode, ior_optab, temp, lo,
3084 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3085 temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3086 ex = expand_binop (iemode, and_optab, ex, mask,
3087 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3088 ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3089 ex, mask, iemode, 1, 1);
3090 temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3091 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3092 }
3093 break;
3094 default:
3095 gcc_unreachable ();
3096 }
3097
3098 return temp;
3099}
3100
3101/* Expand a call to one of the builtin rounding functions gcc defines
3102 as an extension (lfloor and lceil). As these are gcc extensions we
3103 do not need to worry about setting errno to EDOM.
3104 If expanding via optab fails, lower expression to (int)(floor(x)).
3105 EXP is the expression that is a call to the builtin function;
3106 if convenient, the result should be placed in TARGET. */
3107
3108static rtx
3109expand_builtin_int_roundingfn (tree exp, rtx target)
3110{
3111 convert_optab builtin_optab;
3112 rtx op0, tmp;
3113 rtx_insn *insns;
3114 tree fndecl = get_callee_fndecl (exp);
3115 enum built_in_function fallback_fn;
3116 tree fallback_fndecl;
3117 machine_mode mode;
3118 tree arg;
3119
3120 if (!validate_arglist (callexpr: exp, REAL_TYPE, VOID_TYPE))
3121 return NULL_RTX;
3122
3123 arg = CALL_EXPR_ARG (exp, 0);
3124
3125 switch (DECL_FUNCTION_CODE (decl: fndecl))
3126 {
3127 CASE_FLT_FN (BUILT_IN_ICEIL):
3128 CASE_FLT_FN (BUILT_IN_LCEIL):
3129 CASE_FLT_FN (BUILT_IN_LLCEIL):
3130 builtin_optab = lceil_optab;
3131 fallback_fn = BUILT_IN_CEIL;
3132 break;
3133
3134 CASE_FLT_FN (BUILT_IN_IFLOOR):
3135 CASE_FLT_FN (BUILT_IN_LFLOOR):
3136 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3137 builtin_optab = lfloor_optab;
3138 fallback_fn = BUILT_IN_FLOOR;
3139 break;
3140
3141 default:
3142 gcc_unreachable ();
3143 }
3144
3145 /* Make a suitable register to place result in. */
3146 mode = TYPE_MODE (TREE_TYPE (exp));
3147
3148 target = gen_reg_rtx (mode);
3149
3150 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3151 need to expand the argument again. This way, we will not perform
3152 side-effects more the once. */
3153 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (exp: arg);
3154
3155 op0 = expand_expr (exp: arg, NULL, VOIDmode, modifier: EXPAND_NORMAL);
3156
3157 start_sequence ();
3158
3159 /* Compute into TARGET. */
3160 if (expand_sfix_optab (target, op0, builtin_optab))
3161 {
3162 /* Output the entire sequence. */
3163 insns = end_sequence ();
3164 emit_insn (insns);
3165 return target;
3166 }
3167
3168 /* If we were unable to expand via the builtin, stop the sequence
3169 (without outputting the insns). */
3170 end_sequence ();
3171
3172 /* Fall back to floating point rounding optab. */
3173 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fn: fallback_fn);
3174
3175 /* For non-C99 targets we may end up without a fallback fndecl here
3176 if the user called __builtin_lfloor directly. In this case emit
3177 a call to the floor/ceil variants nevertheless. This should result
3178 in the best user experience for not full C99 targets. */
3179 if (fallback_fndecl == NULL_TREE)
3180 {
3181 tree fntype;
3182 const char *name = NULL;
3183
3184 switch (DECL_FUNCTION_CODE (decl: fndecl))
3185 {
3186 case BUILT_IN_ICEIL:
3187 case BUILT_IN_LCEIL:
3188 case BUILT_IN_LLCEIL:
3189 name = "ceil";
3190 break;
3191 case BUILT_IN_ICEILF:
3192 case BUILT_IN_LCEILF:
3193 case BUILT_IN_LLCEILF:
3194 name = "ceilf";
3195 break;
3196 case BUILT_IN_ICEILL:
3197 case BUILT_IN_LCEILL:
3198 case BUILT_IN_LLCEILL:
3199 name = "ceill";
3200 break;
3201 case BUILT_IN_IFLOOR:
3202 case BUILT_IN_LFLOOR:
3203 case BUILT_IN_LLFLOOR:
3204 name = "floor";
3205 break;
3206 case BUILT_IN_IFLOORF:
3207 case BUILT_IN_LFLOORF:
3208 case BUILT_IN_LLFLOORF:
3209 name = "floorf";
3210 break;
3211 case BUILT_IN_IFLOORL:
3212 case BUILT_IN_LFLOORL:
3213 case BUILT_IN_LLFLOORL:
3214 name = "floorl";
3215 break;
3216 default:
3217 gcc_unreachable ();
3218 }
3219
3220 fntype = build_function_type_list (TREE_TYPE (arg),
3221 TREE_TYPE (arg), NULL_TREE);
3222 fallback_fndecl = build_fn_decl (name, fntype);
3223 }
3224
3225 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl: fallback_fndecl, n: 1, arg);
3226
3227 tmp = expand_normal (exp);
3228 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3229
3230 /* Truncate the result of floating point optab to integer
3231 via expand_fix (). */
3232 target = gen_reg_rtx (mode);
3233 expand_fix (target, tmp, 0);
3234
3235 return target;
3236}
3237
3238/* Expand a call to one of the builtin math functions doing integer
3239 conversion (lrint).
3240 Return 0 if a normal call should be emitted rather than expanding the
3241 function in-line. EXP is the expression that is a call to the builtin
3242 function; if convenient, the result should be placed in TARGET. */
3243
3244static rtx
3245expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3246{
3247 convert_optab builtin_optab;
3248 rtx op0;
3249 rtx_insn *insns;
3250 tree fndecl = get_callee_fndecl (exp);
3251 tree arg;
3252 machine_mode mode;
3253 enum built_in_function fallback_fn = BUILT_IN_NONE;
3254
3255 if (!validate_arglist (callexpr: exp, REAL_TYPE, VOID_TYPE))
3256 return NULL_RTX;
3257
3258 arg = CALL_EXPR_ARG (exp, 0);
3259
3260 switch (DECL_FUNCTION_CODE (decl: fndecl))
3261 {
3262 CASE_FLT_FN (BUILT_IN_IRINT):
3263 fallback_fn = BUILT_IN_LRINT;
3264 gcc_fallthrough ();
3265 CASE_FLT_FN (BUILT_IN_LRINT):
3266 CASE_FLT_FN (BUILT_IN_LLRINT):
3267 builtin_optab = lrint_optab;
3268 break;
3269
3270 CASE_FLT_FN (BUILT_IN_IROUND):
3271 fallback_fn = BUILT_IN_LROUND;
3272 gcc_fallthrough ();
3273 CASE_FLT_FN (BUILT_IN_LROUND):
3274 CASE_FLT_FN (BUILT_IN_LLROUND):
3275 builtin_optab = lround_optab;
3276 break;
3277
3278 default:
3279 gcc_unreachable ();
3280 }
3281
3282 /* There's no easy way to detect the case we need to set EDOM. */
3283 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3284 return NULL_RTX;
3285
3286 /* Make a suitable register to place result in. */
3287 mode = TYPE_MODE (TREE_TYPE (exp));
3288
3289 /* There's no easy way to detect the case we need to set EDOM. */
3290 if (!flag_errno_math)
3291 {
3292 rtx result = gen_reg_rtx (mode);
3293
3294 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3295 need to expand the argument again. This way, we will not perform
3296 side-effects more the once. */
3297 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (exp: arg);
3298
3299 op0 = expand_expr (exp: arg, NULL, VOIDmode, modifier: EXPAND_NORMAL);
3300
3301 start_sequence ();
3302
3303 if (expand_sfix_optab (result, op0, builtin_optab))
3304 {
3305 /* Output the entire sequence. */
3306 insns = end_sequence ();
3307 emit_insn (insns);
3308 return result;
3309 }
3310
3311 /* If we were unable to expand via the builtin, stop the sequence
3312 (without outputting the insns) and call to the library function
3313 with the stabilized argument list. */
3314 end_sequence ();
3315 }
3316
3317 if (fallback_fn != BUILT_IN_NONE)
3318 {
3319 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3320 targets, (int) round (x) should never be transformed into
3321 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3322 a call to lround in the hope that the target provides at least some
3323 C99 functions. This should result in the best user experience for
3324 not full C99 targets.
3325 As scalar float conversions with same mode are useless in GIMPLE,
3326 we can end up e.g. with _Float32 argument passed to float builtin,
3327 try to get the type from the builtin prototype first. */
3328 tree fallback_fndecl = NULL_TREE;
3329 if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3330 fallback_fndecl
3331 = mathfn_built_in_1 (TREE_VALUE (argtypes),
3332 fn: as_combined_fn (fn: fallback_fn), implicit_p: 0);
3333 if (fallback_fndecl == NULL_TREE)
3334 fallback_fndecl
3335 = mathfn_built_in_1 (TREE_TYPE (arg),
3336 fn: as_combined_fn (fn: fallback_fn), implicit_p: 0);
3337 if (fallback_fndecl)
3338 {
3339 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3340 fndecl: fallback_fndecl, n: 1, arg);
3341
3342 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3343 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3344 return convert_to_mode (mode, target, 0);
3345 }
3346 }
3347
3348 return expand_call (exp, target, target == const0_rtx);
3349}
3350
3351/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3352 a normal call should be emitted rather than expanding the function
3353 in-line. EXP is the expression that is a call to the builtin
3354 function; if convenient, the result should be placed in TARGET. */
3355
3356static rtx
3357expand_builtin_powi (tree exp, rtx target)
3358{
3359 tree arg0, arg1;
3360 rtx op0, op1;
3361 machine_mode mode;
3362 machine_mode mode2;
3363
3364 if (! validate_arglist (callexpr: exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3365 return NULL_RTX;
3366
3367 arg0 = CALL_EXPR_ARG (exp, 0);
3368 arg1 = CALL_EXPR_ARG (exp, 1);
3369 mode = TYPE_MODE (TREE_TYPE (exp));
3370
3371 /* Emit a libcall to libgcc. */
3372
3373 /* Mode of the 2nd argument must match that of an int. */
3374 mode2 = int_mode_for_size (INT_TYPE_SIZE, limit: 0).require ();
3375
3376 if (target == NULL_RTX)
3377 target = gen_reg_rtx (mode);
3378
3379 op0 = expand_expr (exp: arg0, NULL_RTX, mode, modifier: EXPAND_NORMAL);
3380 if (GET_MODE (op0) != mode)
3381 op0 = convert_to_mode (mode, op0, 0);
3382 op1 = expand_expr (exp: arg1, NULL_RTX, mode: mode2, modifier: EXPAND_NORMAL);
3383 if (GET_MODE (op1) != mode2)
3384 op1 = convert_to_mode (mode2, op1, 0);
3385
3386 target = emit_library_call_value (fun: optab_libfunc (powi_optab, mode),
3387 value: target, fn_type: LCT_CONST, outmode: mode,
3388 arg1: op0, arg1_mode: mode, arg2: op1, arg2_mode: mode2);
3389
3390 return target;
3391}
3392
3393/* Expand expression EXP which is a call to the strlen builtin. Return
3394 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3395 try to get the result in TARGET, if convenient. */
3396
3397static rtx
3398expand_builtin_strlen (tree exp, rtx target,
3399 machine_mode target_mode)
3400{
3401 if (!validate_arglist (callexpr: exp, POINTER_TYPE, VOID_TYPE))
3402 return NULL_RTX;
3403
3404 tree src = CALL_EXPR_ARG (exp, 0);
3405
3406 /* If the length can be computed at compile-time, return it. */
3407 if (tree len = c_strlen (arg: src, only_value: 0))
3408 return expand_expr (exp: len, target, mode: target_mode, modifier: EXPAND_NORMAL);
3409
3410 /* If the length can be computed at compile-time and is constant
3411 integer, but there are side-effects in src, evaluate
3412 src for side-effects, then return len.
3413 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3414 can be optimized into: i++; x = 3; */
3415 tree len = c_strlen (arg: src, only_value: 1);
3416 if (len && TREE_CODE (len) == INTEGER_CST)
3417 {
3418 expand_expr (exp: src, const0_rtx, VOIDmode, modifier: EXPAND_NORMAL);
3419 return expand_expr (exp: len, target, mode: target_mode, modifier: EXPAND_NORMAL);
3420 }
3421
3422 unsigned int align = get_pointer_alignment (exp: src) / BITS_PER_UNIT;
3423
3424 /* If SRC is not a pointer type, don't do this operation inline. */
3425 if (align == 0)
3426 return NULL_RTX;
3427
3428 /* Bail out if we can't compute strlen in the right mode. */
3429 machine_mode insn_mode;
3430 enum insn_code icode = CODE_FOR_nothing;
3431 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3432 {
3433 icode = optab_handler (op: strlen_optab, mode: insn_mode);
3434 if (icode != CODE_FOR_nothing)
3435 break;
3436 }
3437 if (insn_mode == VOIDmode)
3438 return NULL_RTX;
3439
3440 /* Make a place to hold the source address. We will not expand
3441 the actual source until we are sure that the expansion will
3442 not fail -- there are trees that cannot be expanded twice. */
3443 rtx src_reg = gen_reg_rtx (Pmode);
3444
3445 /* Mark the beginning of the strlen sequence so we can emit the
3446 source operand later. */
3447 rtx_insn *before_strlen = get_last_insn ();
3448
3449 class expand_operand ops[4];
3450 create_output_operand (op: &ops[0], x: target, mode: insn_mode);
3451 create_fixed_operand (op: &ops[1], x: gen_rtx_MEM (BLKmode, src_reg));
3452 create_integer_operand (&ops[2], 0);
3453 create_integer_operand (&ops[3], align);
3454 if (!maybe_expand_insn (icode, nops: 4, ops))
3455 return NULL_RTX;
3456
3457 /* Check to see if the argument was declared attribute nonstring
3458 and if so, issue a warning since at this point it's not known
3459 to be nul-terminated. */
3460 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3461
3462 /* Now that we are assured of success, expand the source. */
3463 start_sequence ();
3464 rtx pat = expand_expr (exp: src, target: src_reg, Pmode, modifier: EXPAND_NORMAL);
3465 if (pat != src_reg)
3466 {
3467#ifdef POINTERS_EXTEND_UNSIGNED
3468 if (GET_MODE (pat) != Pmode)
3469 pat = convert_to_mode (Pmode, pat,
3470 POINTERS_EXTEND_UNSIGNED);
3471#endif
3472 emit_move_insn (src_reg, pat);
3473 }
3474 pat = end_sequence ();
3475
3476 if (before_strlen)
3477 emit_insn_after (pat, before_strlen);
3478 else
3479 emit_insn_before (pat, get_insns ());
3480
3481 /* Return the value in the proper mode for this function. */
3482 if (GET_MODE (ops[0].value) == target_mode)
3483 target = ops[0].value;
3484 else if (target != 0)
3485 convert_move (target, ops[0].value, 0);
3486 else
3487 target = convert_to_mode (target_mode, ops[0].value, 0);
3488
3489 return target;
3490}
3491
3492/* Expand call EXP to the strnlen built-in, returning the result
3493 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3494
3495static rtx
3496expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3497{
3498 if (!validate_arglist (callexpr: exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3499 return NULL_RTX;
3500
3501 tree src = CALL_EXPR_ARG (exp, 0);
3502 tree bound = CALL_EXPR_ARG (exp, 1);
3503
3504 if (!bound)
3505 return NULL_RTX;
3506
3507 location_t loc = UNKNOWN_LOCATION;
3508 if (EXPR_HAS_LOCATION (exp))
3509 loc = EXPR_LOCATION (exp);
3510
3511 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3512 so these conversions aren't necessary. */
3513 c_strlen_data lendata = { };
3514 tree len = c_strlen (arg: src, only_value: 0, data: &lendata, eltsize: 1);
3515 if (len)
3516 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3517
3518 if (TREE_CODE (bound) == INTEGER_CST)
3519 {
3520 if (!len)
3521 return NULL_RTX;
3522
3523 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3524 return expand_expr (exp: len, target, mode: target_mode, modifier: EXPAND_NORMAL);
3525 }
3526
3527 if (TREE_CODE (bound) != SSA_NAME)
3528 return NULL_RTX;
3529
3530 wide_int min, max;
3531 int_range_max r;
3532 get_range_query (cfun)->range_of_expr (r, expr: bound,
3533 currently_expanding_gimple_stmt);
3534 if (r.varying_p () || r.undefined_p ())
3535 return NULL_RTX;
3536 min = r.lower_bound ();
3537 max = r.upper_bound ();
3538
3539 if (!len || TREE_CODE (len) != INTEGER_CST)
3540 {
3541 bool exact;
3542 lendata.decl = unterminated_array (src, &len, &exact);
3543 if (!lendata.decl)
3544 return NULL_RTX;
3545 }
3546
3547 if (lendata.decl)
3548 return NULL_RTX;
3549
3550 if (wi::gtu_p (x: min, y: wi::to_wide (t: len)))
3551 return expand_expr (exp: len, target, mode: target_mode, modifier: EXPAND_NORMAL);
3552
3553 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3554 return expand_expr (exp: len, target, mode: target_mode, modifier: EXPAND_NORMAL);
3555}
3556
3557/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3558 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3559 a target constant. */
3560
3561static rtx
3562builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3563 fixed_size_mode mode)
3564{
3565 /* The REPresentation pointed to by DATA need not be a nul-terminated
3566 string but the caller guarantees it's large enough for MODE. */
3567 const char *rep = (const char *) data;
3568
3569 return c_readstr (str: rep + offset, mode, /*nul_terminated=*/null_terminated_p: false);
3570}
3571
3572/* LEN specify length of the block of memcpy/memset operation.
3573 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3574 In some cases we can make very likely guess on max size, then we
3575 set it into PROBABLE_MAX_SIZE. */
3576
3577static void
3578determine_block_size (tree len, rtx len_rtx,
3579 unsigned HOST_WIDE_INT *min_size,
3580 unsigned HOST_WIDE_INT *max_size,
3581 unsigned HOST_WIDE_INT *probable_max_size)
3582{
3583 if (CONST_INT_P (len_rtx))
3584 {
3585 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3586 return;
3587 }
3588 else
3589 {
3590 wide_int min, max;
3591 enum value_range_kind range_type = VR_UNDEFINED;
3592
3593 /* Determine bounds from the type. */
3594 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3595 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3596 else
3597 *min_size = 0;
3598 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3599 *probable_max_size = *max_size
3600 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3601 else
3602 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3603
3604 if (TREE_CODE (len) == SSA_NAME)
3605 {
3606 int_range_max r;
3607 tree tmin, tmax;
3608 gimple *cg = currently_expanding_gimple_stmt;
3609 get_range_query (cfun)->range_of_expr (r, expr: len, cg);
3610 range_type = get_legacy_range (r, min&: tmin, max&: tmax);
3611 if (range_type != VR_UNDEFINED)
3612 {
3613 min = wi::to_wide (t: tmin);
3614 max = wi::to_wide (t: tmax);
3615 }
3616 }
3617 if (range_type == VR_RANGE)
3618 {
3619 if (wi::fits_uhwi_p (x: min) && *min_size < min.to_uhwi ())
3620 *min_size = min.to_uhwi ();
3621 if (wi::fits_uhwi_p (x: max) && *max_size > max.to_uhwi ())
3622 *probable_max_size = *max_size = max.to_uhwi ();
3623 }
3624 else if (range_type == VR_ANTI_RANGE)
3625 {
3626 /* Code like
3627
3628 int n;
3629 if (n < 100)
3630 memcpy (a, b, n)
3631
3632 Produce anti range allowing negative values of N. We still
3633 can use the information and make a guess that N is not negative.
3634 */
3635 if (!wi::leu_p (x: max, y: 1 << 30) && wi::fits_uhwi_p (x: min))
3636 *probable_max_size = min.to_uhwi () - 1;
3637 }
3638 }
3639 gcc_checking_assert (*max_size <=
3640 (unsigned HOST_WIDE_INT)
3641 GET_MODE_MASK (GET_MODE (len_rtx)));
3642}
3643
3644/* Expand a call EXP to the memcpy builtin.
3645 Return NULL_RTX if we failed, the caller should emit a normal call,
3646 otherwise try to get the result in TARGET, if convenient (and in
3647 mode MODE if that's convenient). */
3648
3649static rtx
3650expand_builtin_memcpy (tree exp, rtx target)
3651{
3652 if (!validate_arglist (callexpr: exp,
3653 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3654 return NULL_RTX;
3655
3656 tree dest = CALL_EXPR_ARG (exp, 0);
3657 tree src = CALL_EXPR_ARG (exp, 1);
3658 tree len = CALL_EXPR_ARG (exp, 2);
3659
3660 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3661 /*retmode=*/ RETURN_BEGIN, might_overlap: false);
3662}
3663
3664/* Check a call EXP to the memmove built-in for validity.
3665 Return NULL_RTX on both success and failure. */
3666
3667static rtx
3668expand_builtin_memmove (tree exp, rtx target)
3669{
3670 if (!validate_arglist (callexpr: exp,
3671 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3672 return NULL_RTX;
3673
3674 tree dest = CALL_EXPR_ARG (exp, 0);
3675 tree src = CALL_EXPR_ARG (exp, 1);
3676 tree len = CALL_EXPR_ARG (exp, 2);
3677
3678 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3679 /*retmode=*/ RETURN_BEGIN, might_overlap: true);
3680}
3681
3682/* Expand a call EXP to the mempcpy builtin.
3683 Return NULL_RTX if we failed; the caller should emit a normal call,
3684 otherwise try to get the result in TARGET, if convenient (and in
3685 mode MODE if that's convenient). */
3686
3687static rtx
3688expand_builtin_mempcpy (tree exp, rtx target)
3689{
3690 if (!validate_arglist (callexpr: exp,
3691 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3692 return NULL_RTX;
3693
3694 tree dest = CALL_EXPR_ARG (exp, 0);
3695 tree src = CALL_EXPR_ARG (exp, 1);
3696 tree len = CALL_EXPR_ARG (exp, 2);
3697
3698 /* Policy does not generally allow using compute_objsize (which
3699 is used internally by check_memop_size) to change code generation
3700 or drive optimization decisions.
3701
3702 In this instance it is safe because the code we generate has
3703 the same semantics regardless of the return value of
3704 check_memop_sizes. Exactly the same amount of data is copied
3705 and the return value is exactly the same in both cases.
3706
3707 Furthermore, check_memop_size always uses mode 0 for the call to
3708 compute_objsize, so the imprecise nature of compute_objsize is
3709 avoided. */
3710
3711 /* Avoid expanding mempcpy into memcpy when the call is determined
3712 to overflow the buffer. This also prevents the same overflow
3713 from being diagnosed again when expanding memcpy. */
3714
3715 return expand_builtin_mempcpy_args (dest, src, len,
3716 target, exp, /*retmode=*/ RETURN_END);
3717}
3718
3719/* Helper function to do the actual work for expand of memory copy family
3720 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3721 of memory from SRC to DEST and assign to TARGET if convenient. Return
3722 value is based on RETMODE argument. */
3723
3724static rtx
3725expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3726 rtx target, tree exp, memop_ret retmode,
3727 bool might_overlap)
3728{
3729 unsigned int src_align = get_pointer_alignment (exp: src);
3730 unsigned int dest_align = get_pointer_alignment (exp: dest);
3731 rtx dest_mem, src_mem, dest_addr, len_rtx;
3732 HOST_WIDE_INT expected_size = -1;
3733 unsigned int expected_align = 0;
3734 unsigned HOST_WIDE_INT min_size;
3735 unsigned HOST_WIDE_INT max_size;
3736 unsigned HOST_WIDE_INT probable_max_size;
3737
3738 bool is_move_done;
3739
3740 /* If DEST is not a pointer type, call the normal function. */
3741 if (dest_align == 0)
3742 return NULL_RTX;
3743
3744 /* If either SRC is not a pointer type, don't do this
3745 operation in-line. */
3746 if (src_align == 0)
3747 return NULL_RTX;
3748
3749 if (currently_expanding_gimple_stmt)
3750 stringop_block_profile (currently_expanding_gimple_stmt,
3751 &expected_align, &expected_size);
3752
3753 if (expected_align < dest_align)
3754 expected_align = dest_align;
3755 dest_mem = get_memory_rtx (exp: dest, len);
3756 set_mem_align (dest_mem, dest_align);
3757 len_rtx = expand_normal (exp: len);
3758 determine_block_size (len, len_rtx, min_size: &min_size, max_size: &max_size,
3759 probable_max_size: &probable_max_size);
3760
3761 /* Try to get the byte representation of the constant SRC points to,
3762 with its byte size in NBYTES. */
3763 unsigned HOST_WIDE_INT nbytes;
3764 const char *rep = getbyterep (src, &nbytes);
3765
3766 /* If the function's constant bound LEN_RTX is less than or equal
3767 to the byte size of the representation of the constant argument,
3768 and if block move would be done by pieces, we can avoid loading
3769 the bytes from memory and only store the computed constant.
3770 This works in the overlap (memmove) case as well because
3771 store_by_pieces just generates a series of stores of constants
3772 from the representation returned by getbyterep(). */
3773 if (rep
3774 && CONST_INT_P (len_rtx)
3775 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3776 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3777 CONST_CAST (char *, rep),
3778 dest_align, false))
3779 {
3780 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3781 builtin_memcpy_read_str,
3782 CONST_CAST (char *, rep),
3783 dest_align, false, retmode);
3784 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3785 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3786 return dest_mem;
3787 }
3788
3789 src_mem = get_memory_rtx (exp: src, len);
3790 set_mem_align (src_mem, src_align);
3791
3792 /* Copy word part most expediently. */
3793 enum block_op_methods method = BLOCK_OP_NORMAL;
3794 if (CALL_EXPR_TAILCALL (exp)
3795 && (retmode == RETURN_BEGIN || target == const0_rtx))
3796 method = BLOCK_OP_TAILCALL;
3797 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3798 && retmode == RETURN_END
3799 && !might_overlap
3800 && target != const0_rtx);
3801 if (use_mempcpy_call)
3802 method = BLOCK_OP_NO_LIBCALL_RET;
3803 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3804 expected_align, expected_size,
3805 min_size, max_size, probable_max_size,
3806 bail_out_libcall: use_mempcpy_call, is_move_done: &is_move_done,
3807 might_overlap, ctz_size: tree_ctz (len));
3808
3809 /* Bail out when a mempcpy call would be expanded as libcall and when
3810 we have a target that provides a fast implementation
3811 of mempcpy routine. */
3812 if (!is_move_done)
3813 return NULL_RTX;
3814
3815 if (dest_addr == pc_rtx)
3816 return NULL_RTX;
3817
3818 if (dest_addr == 0)
3819 {
3820 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3821 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3822 }
3823
3824 if (retmode != RETURN_BEGIN && target != const0_rtx)
3825 {
3826 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3827 /* stpcpy pointer to last byte. */
3828 if (retmode == RETURN_END_MINUS_ONE)
3829 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3830 }
3831
3832 return dest_addr;
3833}
3834
3835static rtx
3836expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3837 rtx target, tree orig_exp, memop_ret retmode)
3838{
3839 return expand_builtin_memory_copy_args (dest, src, len, target, exp: orig_exp,
3840 retmode, might_overlap: false);
3841}
3842
3843/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3844 we failed, the caller should emit a normal call, otherwise try to
3845 get the result in TARGET, if convenient.
3846 Return value is based on RETMODE argument. */
3847
3848static rtx
3849expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3850{
3851 class expand_operand ops[3];
3852 rtx dest_mem;
3853 rtx src_mem;
3854
3855 if (!targetm.have_movstr ())
3856 return NULL_RTX;
3857
3858 dest_mem = get_memory_rtx (exp: dest, NULL);
3859 src_mem = get_memory_rtx (exp: src, NULL);
3860 if (retmode == RETURN_BEGIN)
3861 {
3862 target = force_reg (Pmode, XEXP (dest_mem, 0));
3863 dest_mem = replace_equiv_address (dest_mem, target);
3864 }
3865
3866 create_output_operand (op: &ops[0],
3867 x: retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3868 create_fixed_operand (op: &ops[1], x: dest_mem);
3869 create_fixed_operand (op: &ops[2], x: src_mem);
3870 if (!maybe_expand_insn (icode: targetm.code_for_movstr, nops: 3, ops))
3871 return NULL_RTX;
3872
3873 if (retmode != RETURN_BEGIN && target != const0_rtx)
3874 {
3875 target = ops[0].value;
3876 /* movstr is supposed to set end to the address of the NUL
3877 terminator. If the caller requested a mempcpy-like return value,
3878 adjust it. */
3879 if (retmode == RETURN_END)
3880 {
3881 rtx tem = plus_constant (GET_MODE (target),
3882 gen_lowpart (GET_MODE (target), target), 1);
3883 emit_move_insn (target, force_operand (tem, NULL_RTX));
3884 }
3885 }
3886 return target;
3887}
3888
3889/* Expand expression EXP, which is a call to the strcpy builtin. Return
3890 NULL_RTX if we failed the caller should emit a normal call, otherwise
3891 try to get the result in TARGET, if convenient (and in mode MODE if that's
3892 convenient). */
3893
3894static rtx
3895expand_builtin_strcpy (tree exp, rtx target)
3896{
3897 if (!validate_arglist (callexpr: exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3898 return NULL_RTX;
3899
3900 tree dest = CALL_EXPR_ARG (exp, 0);
3901 tree src = CALL_EXPR_ARG (exp, 1);
3902
3903 return expand_builtin_strcpy_args (exp, dest, src, target);
3904}
3905
3906/* Helper function to do the actual work for expand_builtin_strcpy. The
3907 arguments to the builtin_strcpy call DEST and SRC are broken out
3908 so that this can also be called without constructing an actual CALL_EXPR.
3909 The other arguments and return value are the same as for
3910 expand_builtin_strcpy. */
3911
3912static rtx
3913expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3914{
3915 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3916}
3917
3918/* Expand a call EXP to the stpcpy builtin.
3919 Return NULL_RTX if we failed the caller should emit a normal call,
3920 otherwise try to get the result in TARGET, if convenient (and in
3921 mode MODE if that's convenient). */
3922
3923static rtx
3924expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3925{
3926 tree dst, src;
3927 location_t loc = EXPR_LOCATION (exp);
3928
3929 if (!validate_arglist (callexpr: exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3930 return NULL_RTX;
3931
3932 dst = CALL_EXPR_ARG (exp, 0);
3933 src = CALL_EXPR_ARG (exp, 1);
3934
3935 /* If return value is ignored, transform stpcpy into strcpy. */
3936 if (target == const0_rtx && builtin_decl_implicit (fncode: BUILT_IN_STRCPY))
3937 {
3938 tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRCPY);
3939 tree result = build_call_nofold_loc (loc, fndecl: fn, n: 2, dst, src);
3940 return expand_expr (exp: result, target, mode, modifier: EXPAND_NORMAL);
3941 }
3942 else
3943 {
3944 tree len, lenp1;
3945 rtx ret;
3946
3947 /* Ensure we get an actual string whose length can be evaluated at
3948 compile-time, not an expression containing a string. This is
3949 because the latter will potentially produce pessimized code
3950 when used to produce the return value. */
3951 c_strlen_data lendata = { };
3952 if (!c_getstr (src)
3953 || !(len = c_strlen (arg: src, only_value: 0, data: &lendata, eltsize: 1)))
3954 return expand_movstr (dest: dst, src, target,
3955 /*retmode=*/ RETURN_END_MINUS_ONE);
3956
3957 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3958 ret = expand_builtin_mempcpy_args (dest: dst, src, len: lenp1,
3959 target, orig_exp: exp,
3960 /*retmode=*/ RETURN_END_MINUS_ONE);
3961
3962 if (ret)
3963 return ret;
3964
3965 if (TREE_CODE (len) == INTEGER_CST)
3966 {
3967 rtx len_rtx = expand_normal (exp: len);
3968
3969 if (CONST_INT_P (len_rtx))
3970 {
3971 ret = expand_builtin_strcpy_args (exp, dest: dst, src, target);
3972
3973 if (ret)
3974 {
3975 if (! target)
3976 {
3977 if (mode != VOIDmode)
3978 target = gen_reg_rtx (mode);
3979 else
3980 target = gen_reg_rtx (GET_MODE (ret));
3981 }
3982 if (GET_MODE (target) != GET_MODE (ret))
3983 ret = gen_lowpart (GET_MODE (target), ret);
3984
3985 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3986 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3987 gcc_assert (ret);
3988
3989 return target;
3990 }
3991 }
3992 }
3993
3994 return expand_movstr (dest: dst, src, target,
3995 /*retmode=*/ RETURN_END_MINUS_ONE);
3996 }
3997}
3998
3999/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4000 arguments while being careful to avoid duplicate warnings (which could
4001 be issued if the expander were to expand the call, resulting in it
4002 being emitted in expand_call(). */
4003
4004static rtx
4005expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4006{
4007 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4008 {
4009 /* The call has been successfully expanded. Check for nonstring
4010 arguments and issue warnings as appropriate. */
4011 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4012 return ret;
4013 }
4014
4015 return NULL_RTX;
4016}
4017
4018/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4019 bytes from constant string DATA + OFFSET and return it as target
4020 constant. */
4021
4022rtx
4023builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
4024 fixed_size_mode mode)
4025{
4026 const char *str = (const char *) data;
4027
4028 if ((unsigned HOST_WIDE_INT) offset > strlen (s: str))
4029 return const0_rtx;
4030
4031 return c_readstr (str: str + offset, mode);
4032}
4033
4034/* Helper to check the sizes of sequences and the destination of calls
4035 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4036 success (no overflow or invalid sizes), false otherwise. */
4037
4038static bool
4039check_strncat_sizes (tree exp, tree objsize)
4040{
4041 tree dest = CALL_EXPR_ARG (exp, 0);
4042 tree src = CALL_EXPR_ARG (exp, 1);
4043 tree maxread = CALL_EXPR_ARG (exp, 2);
4044
4045 /* Try to determine the range of lengths that the source expression
4046 refers to. */
4047 c_strlen_data lendata = { };
4048 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4049
4050 /* Try to verify that the destination is big enough for the shortest
4051 string. */
4052
4053 access_data data (nullptr, exp, access_read_write, maxread, true);
4054 if (!objsize && warn_stringop_overflow)
4055 {
4056 /* If it hasn't been provided by __strncat_chk, try to determine
4057 the size of the destination object into which the source is
4058 being copied. */
4059 objsize = compute_objsize (ptr: dest, warn_stringop_overflow - 1, pref: &data.dst);
4060 }
4061
4062 /* Add one for the terminating nul. */
4063 tree srclen = (lendata.minlen
4064 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4065 size_one_node)
4066 : NULL_TREE);
4067
4068 /* The strncat function copies at most MAXREAD bytes and always appends
4069 the terminating nul so the specified upper bound should never be equal
4070 to (or greater than) the size of the destination. */
4071 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4072 && tree_int_cst_equal (objsize, maxread))
4073 {
4074 location_t loc = EXPR_LOCATION (exp);
4075 warning_at (loc, OPT_Wstringop_overflow_,
4076 "%qD specified bound %E equals destination size",
4077 get_callee_fndecl (exp), maxread);
4078
4079 return false;
4080 }
4081
4082 if (!srclen
4083 || (maxread && tree_fits_uhwi_p (maxread)
4084 && tree_fits_uhwi_p (srclen)
4085 && tree_int_cst_lt (t1: maxread, t2: srclen)))
4086 srclen = maxread;
4087
4088 /* The number of bytes to write is LEN but check_access will alsoa
4089 check SRCLEN if LEN's value isn't known. */
4090 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4091 objsize, data.mode, &data);
4092}
4093
4094/* Expand expression EXP, which is a call to the strncpy builtin. Return
4095 NULL_RTX if we failed the caller should emit a normal call. */
4096
4097static rtx
4098expand_builtin_strncpy (tree exp, rtx target)
4099{
4100 location_t loc = EXPR_LOCATION (exp);
4101
4102 if (!validate_arglist (callexpr: exp,
4103 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4104 return NULL_RTX;
4105 tree dest = CALL_EXPR_ARG (exp, 0);
4106 tree src = CALL_EXPR_ARG (exp, 1);
4107 /* The number of bytes to write (not the maximum). */
4108 tree len = CALL_EXPR_ARG (exp, 2);
4109
4110 /* The length of the source sequence. */
4111 tree slen = c_strlen (arg: src, only_value: 1);
4112
4113 /* We must be passed a constant len and src parameter. */
4114 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4115 return NULL_RTX;
4116
4117 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4118
4119 /* We're required to pad with trailing zeros if the requested
4120 len is greater than strlen(s2)+1. In that case try to
4121 use store_by_pieces, if it fails, punt. */
4122 if (tree_int_cst_lt (t1: slen, t2: len))
4123 {
4124 unsigned int dest_align = get_pointer_alignment (exp: dest);
4125 const char *p = c_getstr (src);
4126 rtx dest_mem;
4127
4128 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4129 || !can_store_by_pieces (tree_to_uhwi (len),
4130 builtin_strncpy_read_str,
4131 CONST_CAST (char *, p),
4132 dest_align, false))
4133 return NULL_RTX;
4134
4135 dest_mem = get_memory_rtx (exp: dest, len);
4136 store_by_pieces (dest_mem, tree_to_uhwi (len),
4137 builtin_strncpy_read_str,
4138 CONST_CAST (char *, p), dest_align, false,
4139 RETURN_BEGIN);
4140 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4141 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4142 return dest_mem;
4143 }
4144
4145 return NULL_RTX;
4146}
4147
4148/* Return the RTL of a register in MODE generated from PREV in the
4149 previous iteration. */
4150
4151static rtx
4152gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
4153{
4154 rtx target = nullptr;
4155 if (prev != nullptr && prev->data != nullptr)
4156 {
4157 /* Use the previous data in the same mode. */
4158 if (prev->mode == mode)
4159 return prev->data;
4160
4161 fixed_size_mode prev_mode = prev->mode;
4162
4163 /* Don't use the previous data to write QImode if it is in a
4164 vector mode. */
4165 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4166 return target;
4167
4168 rtx prev_rtx = prev->data;
4169
4170 if (REG_P (prev_rtx)
4171 && HARD_REGISTER_P (prev_rtx)
4172 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4173 {
4174 /* This case occurs when PREV_MODE is a vector and when
4175 MODE is too small to store using vector operations.
4176 After register allocation, the code will need to move the
4177 lowpart of the vector register into a non-vector register.
4178
4179 Also, the target has chosen to use a hard register
4180 instead of going with the default choice of using a
4181 pseudo register. We should respect that choice and try to
4182 avoid creating a pseudo register with the same mode as the
4183 current hard register.
4184
4185 In principle, we could just use a lowpart MODE subreg of
4186 the vector register. However, the vector register mode might
4187 be too wide for non-vector registers, and we already know
4188 that the non-vector mode is too small for vector registers.
4189 It's therefore likely that we'd need to spill to memory in
4190 the vector mode and reload the non-vector value from there.
4191
4192 Try to avoid that by reducing the vector register to the
4193 smallest size that it can hold. This should increase the
4194 chances that non-vector registers can hold both the inner
4195 and outer modes of the subreg that we generate later. */
4196 machine_mode m;
4197 fixed_size_mode candidate;
4198 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4199 if (is_a<fixed_size_mode> (m, result: &candidate))
4200 {
4201 if (GET_MODE_SIZE (mode: candidate)
4202 >= GET_MODE_SIZE (mode: prev_mode))
4203 break;
4204 if (GET_MODE_SIZE (mode: candidate) >= GET_MODE_SIZE (mode)
4205 && lowpart_subreg_regno (REGNO (prev_rtx),
4206 prev_mode, candidate) >= 0)
4207 {
4208 target = lowpart_subreg (outermode: candidate, op: prev_rtx,
4209 innermode: prev_mode);
4210 prev_rtx = target;
4211 prev_mode = candidate;
4212 break;
4213 }
4214 }
4215 if (target == nullptr)
4216 prev_rtx = copy_to_reg (prev_rtx);
4217 }
4218
4219 target = lowpart_subreg (outermode: mode, op: prev_rtx, innermode: prev_mode);
4220 }
4221 return target;
4222}
4223
4224/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4225 bytes from constant string DATA + OFFSET and return it as target
4226 constant. If PREV isn't nullptr, it has the RTL info from the
4227 previous iteration. */
4228
4229rtx
4230builtin_memset_read_str (void *data, void *prev,
4231 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4232 fixed_size_mode mode)
4233{
4234 const char *c = (const char *) data;
4235 unsigned int size = GET_MODE_SIZE (mode);
4236
4237 rtx target = gen_memset_value_from_prev (prev: (by_pieces_prev *) prev,
4238 mode);
4239 if (target != nullptr)
4240 return target;
4241 rtx src = gen_int_mode (*c, QImode);
4242
4243 if (VECTOR_MODE_P (mode))
4244 {
4245 gcc_assert (GET_MODE_INNER (mode) == QImode);
4246
4247 rtx const_vec = gen_const_vec_duplicate (mode, src);
4248 if (prev == NULL)
4249 /* Return CONST_VECTOR when called by a query function. */
4250 return const_vec;
4251
4252 /* Use the move expander with CONST_VECTOR. */
4253 target = gen_reg_rtx (mode);
4254 emit_move_insn (target, const_vec);
4255 return target;
4256 }
4257
4258 char *p = XALLOCAVEC (char, size);
4259
4260 memset (s: p, c: *c, n: size);
4261
4262 return c_readstr (str: p, mode);
4263}
4264
4265/* Callback routine for store_by_pieces. Return the RTL of a register
4266 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4267 char value given in the RTL register data. For example, if mode is
4268 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4269 nullptr, it has the RTL info from the previous iteration. */
4270
4271static rtx
4272builtin_memset_gen_str (void *data, void *prev,
4273 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4274 fixed_size_mode mode)
4275{
4276 rtx target, coeff;
4277 size_t size;
4278 char *p;
4279
4280 size = GET_MODE_SIZE (mode);
4281 if (size == 1)
4282 return (rtx) data;
4283
4284 target = gen_memset_value_from_prev (prev: (by_pieces_prev *) prev, mode);
4285 if (target != nullptr)
4286 return target;
4287
4288 if (VECTOR_MODE_P (mode))
4289 {
4290 gcc_assert (GET_MODE_INNER (mode) == QImode);
4291
4292 /* vec_duplicate_optab is a precondition to pick a vector mode for
4293 the memset expander. */
4294 insn_code icode = optab_handler (op: vec_duplicate_optab, mode);
4295
4296 target = gen_reg_rtx (mode);
4297 class expand_operand ops[2];
4298 create_output_operand (op: &ops[0], x: target, mode);
4299 create_input_operand (op: &ops[1], value: (rtx) data, QImode);
4300 expand_insn (icode, nops: 2, ops);
4301 if (!rtx_equal_p (target, ops[0].value))
4302 emit_move_insn (target, ops[0].value);
4303
4304 return target;
4305 }
4306
4307 p = XALLOCAVEC (char, size);
4308 memset (s: p, c: 1, n: size);
4309 coeff = c_readstr (str: p, mode);
4310
4311 target = convert_to_mode (mode, (rtx) data, 1);
4312 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4313 return force_reg (mode, target);
4314}
4315
4316/* Expand expression EXP, which is a call to the memset builtin. Return
4317 NULL_RTX if we failed the caller should emit a normal call, otherwise
4318 try to get the result in TARGET, if convenient (and in mode MODE if that's
4319 convenient). */
4320
4321rtx
4322expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4323{
4324 if (!validate_arglist (callexpr: exp,
4325 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4326 return NULL_RTX;
4327
4328 tree dest = CALL_EXPR_ARG (exp, 0);
4329 tree val = CALL_EXPR_ARG (exp, 1);
4330 tree len = CALL_EXPR_ARG (exp, 2);
4331
4332 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4333}
4334
4335/* Check that store_by_pieces allows BITS + LEN (so that we don't
4336 expand something too unreasonably long), and every power of 2 in
4337 BITS. It is assumed that LEN has already been tested by
4338 itself. */
4339static bool
4340can_store_by_multiple_pieces (unsigned HOST_WIDE_INT bits,
4341 by_pieces_constfn constfun,
4342 void *constfundata, unsigned int align,
4343 bool memsetp,
4344 unsigned HOST_WIDE_INT len)
4345{
4346 if (bits
4347 && !can_store_by_pieces (bits + len, constfun, constfundata,
4348 align, memsetp))
4349 return false;
4350
4351 /* BITS set are expected to be generally in the low range and
4352 contiguous. We do NOT want to repeat the test above in case BITS
4353 has a single bit set, so we terminate the loop when BITS == BIT.
4354 In the unlikely case that BITS has the MSB set, also terminate in
4355 case BIT gets shifted out. */
4356 for (unsigned HOST_WIDE_INT bit = 1; bit < bits && bit; bit <<= 1)
4357 {
4358 if ((bits & bit) == 0)
4359 continue;
4360
4361 if (!can_store_by_pieces (bit, constfun, constfundata,
4362 align, memsetp))
4363 return false;
4364 }
4365
4366 return true;
4367}
4368
4369/* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4370 Return TRUE if successful, FALSE otherwise. TO is assumed to be
4371 aligned at an ALIGN-bits boundary. LEN must be a multiple of
4372 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4373
4374 The strategy is to issue one store_by_pieces for each power of two,
4375 from most to least significant, guarded by a test on whether there
4376 are at least that many bytes left to copy in LEN.
4377
4378 ??? Should we skip some powers of two in favor of loops? Maybe start
4379 at the max of TO/LEN/word alignment, at least when optimizing for
4380 size, instead of ensuring O(log len) dynamic compares? */
4381
4382bool
4383try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4384 unsigned HOST_WIDE_INT min_len,
4385 unsigned HOST_WIDE_INT max_len,
4386 rtx val, char valc, unsigned int align)
4387{
4388 int max_bits = floor_log2 (x: max_len);
4389 int min_bits = floor_log2 (x: min_len);
4390 int sctz_len = ctz_len;
4391
4392 gcc_checking_assert (sctz_len >= 0);
4393
4394 if (val)
4395 valc = 1;
4396
4397 /* Bits more significant than TST_BITS are part of the shared prefix
4398 in the binary representation of both min_len and max_len. Since
4399 they're identical, we don't need to test them in the loop. */
4400 int tst_bits = (max_bits != min_bits ? max_bits
4401 : floor_log2 (x: max_len ^ min_len));
4402
4403 /* Save the pre-blksize values. */
4404 int orig_max_bits = max_bits;
4405 int orig_tst_bits = tst_bits;
4406
4407 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4408 bytes, to lower max_bits. In the unlikely case of a constant LEN
4409 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4410 single store_by_pieces, but otherwise, select the minimum multiple
4411 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4412 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4413 unsigned HOST_WIDE_INT blksize;
4414 if (max_len > min_len)
4415 {
4416 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4417 align / BITS_PER_UNIT);
4418 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4419 blksize &= ~(alrng - 1);
4420 }
4421 else if (max_len == min_len)
4422 blksize = max_len;
4423 else
4424 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4425 return false;
4426 if (min_len >= blksize
4427 /* ??? Maybe try smaller fixed-prefix blksizes before
4428 punting? */
4429 && can_store_by_pieces (blksize, builtin_memset_read_str,
4430 &valc, align, true))
4431 {
4432 min_len -= blksize;
4433 min_bits = floor_log2 (x: min_len);
4434 max_len -= blksize;
4435 max_bits = floor_log2 (x: max_len);
4436
4437 tst_bits = (max_bits != min_bits ? max_bits
4438 : floor_log2 (x: max_len ^ min_len));
4439 }
4440 else
4441 blksize = 0;
4442
4443 /* Check that we can use store by pieces for the maximum store count
4444 we may issue (initial fixed-size block, plus conditional
4445 power-of-two-sized from max_bits to ctz_len. */
4446 unsigned HOST_WIDE_INT xlenest = blksize;
4447 if (max_bits >= 0)
4448 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4449 - (HOST_WIDE_INT_1U << ctz_len));
4450 bool max_loop = false;
4451 bool use_store_by_pieces = true;
4452 /* Skip the test in case of overflow in xlenest. It shouldn't
4453 happen because of the way max_bits and blksize are related, but
4454 it doesn't hurt to test. */
4455 if (blksize > xlenest
4456 || !can_store_by_multiple_pieces (bits: xlenest - blksize,
4457 constfun: builtin_memset_read_str,
4458 constfundata: &valc, align, memsetp: true, len: blksize))
4459 {
4460 if (!(flag_inline_stringops & ILSOP_MEMSET))
4461 return false;
4462
4463 for (max_bits = orig_max_bits;
4464 max_bits >= sctz_len;
4465 --max_bits)
4466 {
4467 xlenest = ((HOST_WIDE_INT_1U << max_bits) * 2
4468 - (HOST_WIDE_INT_1U << ctz_len));
4469 /* Check that blksize plus the bits to be stored as blocks
4470 sized at powers of two can be stored by pieces. This is
4471 like the test above, but with smaller max_bits. Skip
4472 orig_max_bits (it would be redundant). Also skip in case
4473 of overflow. */
4474 if (max_bits < orig_max_bits
4475 && xlenest + blksize >= xlenest
4476 && can_store_by_multiple_pieces (bits: xlenest,
4477 constfun: builtin_memset_read_str,
4478 constfundata: &valc, align, memsetp: true, len: blksize))
4479 {
4480 max_loop = true;
4481 break;
4482 }
4483 if (blksize
4484 && can_store_by_multiple_pieces (bits: xlenest,
4485 constfun: builtin_memset_read_str,
4486 constfundata: &valc, align, memsetp: true, len: 0))
4487 {
4488 max_len += blksize;
4489 min_len += blksize;
4490 tst_bits = orig_tst_bits;
4491 blksize = 0;
4492 max_loop = true;
4493 break;
4494 }
4495 if (max_bits == sctz_len)
4496 {
4497 /* We'll get here if can_store_by_pieces refuses to
4498 store even a single QImode. We'll fall back to
4499 QImode stores then. */
4500 if (!sctz_len)
4501 {
4502 blksize = 0;
4503 max_loop = true;
4504 use_store_by_pieces = false;
4505 break;
4506 }
4507 --sctz_len;
4508 --ctz_len;
4509 }
4510 }
4511 if (!max_loop)
4512 return false;
4513 /* If the boundaries are such that min and max may run a
4514 different number of trips in the initial loop, the remainder
4515 needs not be between the moduli, so set tst_bits to cover all
4516 bits. Otherwise, if the trip counts are the same, max_len
4517 has the common prefix, and the previously-computed tst_bits
4518 is usable. */
4519 if (max_len >> max_bits > min_len >> max_bits)
4520 tst_bits = max_bits;
4521 }
4522
4523 by_pieces_constfn constfun;
4524 void *constfundata;
4525 if (val)
4526 {
4527 constfun = builtin_memset_gen_str;
4528 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4529 val);
4530 }
4531 else
4532 {
4533 constfun = builtin_memset_read_str;
4534 constfundata = &valc;
4535 }
4536
4537 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4538 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4539 to = replace_equiv_address (to, ptr);
4540 set_mem_align (to, align);
4541
4542 if (blksize)
4543 {
4544 to = store_by_pieces (to, blksize,
4545 constfun, constfundata,
4546 align, true,
4547 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4548 if (max_len == 0)
4549 return true;
4550
4551 /* Adjust PTR, TO and REM. Since TO's address is likely
4552 PTR+offset, we have to replace it. */
4553 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4554 to = replace_equiv_address (to, ptr);
4555 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4556 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4557 }
4558
4559 /* Iterate over power-of-two block sizes from the maximum length to
4560 the least significant bit possibly set in the length. */
4561 for (int i = max_bits; i >= sctz_len; i--)
4562 {
4563 rtx_code_label *loop_label = NULL;
4564 rtx_code_label *label = NULL;
4565
4566 blksize = HOST_WIDE_INT_1U << i;
4567
4568 /* If we're past the bits shared between min_ and max_len, expand
4569 a test on the dynamic length, comparing it with the
4570 BLKSIZE. */
4571 if (i <= tst_bits)
4572 {
4573 label = gen_label_rtx ();
4574 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4575 ptr_mode, 1, label,
4576 prob: profile_probability::even ());
4577 }
4578 /* If we are at a bit that is in the prefix shared by min_ and
4579 max_len, skip the current BLKSIZE if the bit is clear, but do
4580 not skip the loop, even if it doesn't require
4581 prechecking. */
4582 else if ((max_len & blksize) == 0
4583 && !(max_loop && i == max_bits))
4584 continue;
4585
4586 if (max_loop && i == max_bits)
4587 {
4588 loop_label = gen_label_rtx ();
4589 emit_label (loop_label);
4590 /* Since we may run this multiple times, don't assume we
4591 know anything about the offset. */
4592 clear_mem_offset (to);
4593 }
4594
4595 bool update_needed = i != sctz_len || loop_label;
4596 rtx next_ptr = NULL_RTX;
4597 if (!use_store_by_pieces)
4598 {
4599 gcc_checking_assert (blksize == 1);
4600 if (!val)
4601 val = gen_int_mode (valc, QImode);
4602 to = change_address (to, QImode, 0);
4603 emit_move_insn (to, val);
4604 if (update_needed)
4605 next_ptr = plus_constant (GET_MODE (ptr), ptr, blksize);
4606 }
4607 else
4608 {
4609 /* Issue a store of BLKSIZE bytes. */
4610 to = store_by_pieces (to, blksize,
4611 constfun, constfundata,
4612 align, true,
4613 update_needed ? RETURN_END : RETURN_BEGIN);
4614 next_ptr = XEXP (to, 0);
4615 }
4616 /* Adjust REM and PTR, unless this is the last iteration. */
4617 if (update_needed)
4618 {
4619 emit_move_insn (ptr, force_operand (next_ptr, NULL_RTX));
4620 to = replace_equiv_address (to, ptr);
4621 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4622 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4623 }
4624
4625 if (loop_label)
4626 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), GE, NULL,
4627 ptr_mode, 1, loop_label,
4628 prob: profile_probability::likely ());
4629
4630 if (label)
4631 {
4632 emit_label (label);
4633
4634 /* Given conditional stores, the offset can no longer be
4635 known, so clear it. */
4636 clear_mem_offset (to);
4637 }
4638 }
4639
4640 return true;
4641}
4642
4643/* Helper function to do the actual work for expand_builtin_memset. The
4644 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4645 so that this can also be called without constructing an actual CALL_EXPR.
4646 The other arguments and return value are the same as for
4647 expand_builtin_memset. */
4648
4649static rtx
4650expand_builtin_memset_args (tree dest, tree val, tree len,
4651 rtx target, machine_mode mode, tree orig_exp)
4652{
4653 tree fndecl, fn;
4654 enum built_in_function fcode;
4655 machine_mode val_mode;
4656 char c;
4657 unsigned int dest_align;
4658 rtx dest_mem, dest_addr, len_rtx;
4659 HOST_WIDE_INT expected_size = -1;
4660 unsigned int expected_align = 0;
4661 unsigned HOST_WIDE_INT min_size;
4662 unsigned HOST_WIDE_INT max_size;
4663 unsigned HOST_WIDE_INT probable_max_size;
4664
4665 dest_align = get_pointer_alignment (exp: dest);
4666
4667 /* If DEST is not a pointer type, don't do this operation in-line. */
4668 if (dest_align == 0)
4669 return NULL_RTX;
4670
4671 if (currently_expanding_gimple_stmt)
4672 stringop_block_profile (currently_expanding_gimple_stmt,
4673 &expected_align, &expected_size);
4674
4675 if (expected_align < dest_align)
4676 expected_align = dest_align;
4677
4678 /* If the LEN parameter is zero, return DEST. */
4679 if (integer_zerop (len))
4680 {
4681 /* Evaluate and ignore VAL in case it has side-effects. */
4682 expand_expr (exp: val, const0_rtx, VOIDmode, modifier: EXPAND_NORMAL);
4683 return expand_expr (exp: dest, target, mode, modifier: EXPAND_NORMAL);
4684 }
4685
4686 /* Stabilize the arguments in case we fail. */
4687 dest = builtin_save_expr (exp: dest);
4688 val = builtin_save_expr (exp: val);
4689 len = builtin_save_expr (exp: len);
4690
4691 len_rtx = expand_normal (exp: len);
4692 determine_block_size (len, len_rtx, min_size: &min_size, max_size: &max_size,
4693 probable_max_size: &probable_max_size);
4694 dest_mem = get_memory_rtx (exp: dest, len);
4695 val_mode = TYPE_MODE (unsigned_char_type_node);
4696
4697 if (TREE_CODE (val) != INTEGER_CST
4698 || target_char_cast (cst: val, p: &c))
4699 {
4700 rtx val_rtx;
4701
4702 val_rtx = expand_normal (exp: val);
4703 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4704
4705 /* Assume that we can memset by pieces if we can store
4706 * the coefficients by pieces (in the required modes).
4707 * We can't pass builtin_memset_gen_str as that emits RTL. */
4708 c = 1;
4709 if (tree_fits_uhwi_p (len)
4710 && can_store_by_pieces (tree_to_uhwi (len),
4711 builtin_memset_read_str, &c, dest_align,
4712 true))
4713 {
4714 val_rtx = force_reg (val_mode, val_rtx);
4715 store_by_pieces (dest_mem, tree_to_uhwi (len),
4716 builtin_memset_gen_str, val_rtx, dest_align,
4717 true, RETURN_BEGIN);
4718 }
4719 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4720 dest_align, expected_align,
4721 expected_size, min_size, max_size,
4722 probable_max_size)
4723 && !try_store_by_multiple_pieces (to: dest_mem, len: len_rtx,
4724 ctz_len: tree_ctz (len),
4725 min_len: min_size, max_len: max_size,
4726 val: val_rtx, valc: 0,
4727 align: dest_align))
4728 goto do_libcall;
4729
4730 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4731 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4732 return dest_mem;
4733 }
4734
4735 if (c)
4736 {
4737 if (tree_fits_uhwi_p (len)
4738 && can_store_by_pieces (tree_to_uhwi (len),
4739 builtin_memset_read_str, &c, dest_align,
4740 true))
4741 store_by_pieces (dest_mem, tree_to_uhwi (len),
4742 builtin_memset_read_str, &c, dest_align, true,
4743 RETURN_BEGIN);
4744 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4745 gen_int_mode (c, val_mode),
4746 dest_align, expected_align,
4747 expected_size, min_size, max_size,
4748 probable_max_size)
4749 && !try_store_by_multiple_pieces (to: dest_mem, len: len_rtx,
4750 ctz_len: tree_ctz (len),
4751 min_len: min_size, max_len: max_size,
4752 NULL_RTX, valc: c,
4753 align: dest_align))
4754 goto do_libcall;
4755
4756 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4757 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4758 return dest_mem;
4759 }
4760
4761 set_mem_align (dest_mem, dest_align);
4762 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4763 CALL_EXPR_TAILCALL (orig_exp)
4764 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4765 expected_align, expected_size,
4766 min_size, max_size,
4767 probable_max_size, tree_ctz (len));
4768
4769 if (dest_addr == 0)
4770 {
4771 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4772 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4773 }
4774
4775 return dest_addr;
4776
4777 do_libcall:
4778 fndecl = get_callee_fndecl (orig_exp);
4779 fcode = DECL_FUNCTION_CODE (decl: fndecl);
4780 if (fcode == BUILT_IN_MEMSET)
4781 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, n: 3,
4782 dest, val, len);
4783 else if (fcode == BUILT_IN_BZERO)
4784 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, n: 2,
4785 dest, len);
4786 else
4787 gcc_unreachable ();
4788 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4789 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4790 return expand_call (fn, target, target == const0_rtx);
4791}
4792
4793/* Expand expression EXP, which is a call to the bzero builtin. Return
4794 NULL_RTX if we failed the caller should emit a normal call. */
4795
4796static rtx
4797expand_builtin_bzero (tree exp)
4798{
4799 if (!validate_arglist (callexpr: exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4800 return NULL_RTX;
4801
4802 tree dest = CALL_EXPR_ARG (exp, 0);
4803 tree size = CALL_EXPR_ARG (exp, 1);
4804
4805 /* New argument list transforming bzero(ptr x, int y) to
4806 memset(ptr x, int 0, size_t y). This is done this way
4807 so that if it isn't expanded inline, we fallback to
4808 calling bzero instead of memset. */
4809
4810 location_t loc = EXPR_LOCATION (exp);
4811
4812 return expand_builtin_memset_args (dest, integer_zero_node,
4813 len: fold_convert_loc (loc,
4814 size_type_node, size),
4815 const0_rtx, VOIDmode, orig_exp: exp);
4816}
4817
4818/* Try to expand cmpstr operation ICODE with the given operands.
4819 Return the result rtx on success, otherwise return null. */
4820
4821static rtx
4822expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4823 HOST_WIDE_INT align)
4824{
4825 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4826
4827 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4828 target = NULL_RTX;
4829
4830 class expand_operand ops[4];
4831 create_output_operand (op: &ops[0], x: target, mode: insn_mode);
4832 create_fixed_operand (op: &ops[1], x: arg1_rtx);
4833 create_fixed_operand (op: &ops[2], x: arg2_rtx);
4834 create_integer_operand (&ops[3], align);
4835 if (maybe_expand_insn (icode, nops: 4, ops))
4836 return ops[0].value;
4837 return NULL_RTX;
4838}
4839
4840/* Expand expression EXP, which is a call to the memcmp built-in function.
4841 Return NULL_RTX if we failed and the caller should emit a normal call,
4842 otherwise try to get the result in TARGET, if convenient.
4843 RESULT_EQ is true if we can relax the returned value to be either zero
4844 or nonzero, without caring about the sign. */
4845
4846static rtx
4847expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4848{
4849 if (!validate_arglist (callexpr: exp,
4850 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4851 return NULL_RTX;
4852
4853 tree arg1 = CALL_EXPR_ARG (exp, 0);
4854 tree arg2 = CALL_EXPR_ARG (exp, 1);
4855 tree len = CALL_EXPR_ARG (exp, 2);
4856
4857 /* Due to the performance benefit, always inline the calls first
4858 when result_eq is false. */
4859 rtx result = NULL_RTX;
4860 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: get_callee_fndecl (exp));
4861 if (!result_eq && fcode != BUILT_IN_BCMP)
4862 {
4863 result = inline_expand_builtin_bytecmp (exp, target);
4864 if (result)
4865 return result;
4866 }
4867
4868 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4869 location_t loc = EXPR_LOCATION (exp);
4870
4871 unsigned int arg1_align = get_pointer_alignment (exp: arg1) / BITS_PER_UNIT;
4872 unsigned int arg2_align = get_pointer_alignment (exp: arg2) / BITS_PER_UNIT;
4873
4874 /* If we don't have POINTER_TYPE, call the function. */
4875 if (arg1_align == 0 || arg2_align == 0)
4876 return NULL_RTX;
4877
4878 rtx arg1_rtx = get_memory_rtx (exp: arg1, len);
4879 rtx arg2_rtx = get_memory_rtx (exp: arg2, len);
4880 rtx len_rtx = expand_normal (exp: fold_convert_loc (loc, sizetype, len));
4881
4882 /* Set MEM_SIZE as appropriate. */
4883 if (CONST_INT_P (len_rtx))
4884 {
4885 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4886 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4887 }
4888
4889 by_pieces_constfn constfn = NULL;
4890
4891 /* Try to get the byte representation of the constant ARG2 (or, only
4892 when the function's result is used for equality to zero, ARG1)
4893 points to, with its byte size in NBYTES. */
4894 unsigned HOST_WIDE_INT nbytes;
4895 const char *rep = getbyterep (arg2, &nbytes);
4896 if (result_eq && rep == NULL)
4897 {
4898 /* For equality to zero the arguments are interchangeable. */
4899 rep = getbyterep (arg1, &nbytes);
4900 if (rep != NULL)
4901 std::swap (a&: arg1_rtx, b&: arg2_rtx);
4902 }
4903
4904 /* If the function's constant bound LEN_RTX is less than or equal
4905 to the byte size of the representation of the constant argument,
4906 and if block move would be done by pieces, we can avoid loading
4907 the bytes from memory and only store the computed constant result. */
4908 if (rep
4909 && CONST_INT_P (len_rtx)
4910 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4911 constfn = builtin_memcpy_read_str;
4912
4913 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4914 TREE_TYPE (len), target,
4915 result_eq, constfn,
4916 CONST_CAST (char *, rep),
4917 ctz_len: tree_ctz (len));
4918
4919 if (result)
4920 {
4921 /* Return the value in the proper mode for this function. */
4922 if (GET_MODE (result) == mode)
4923 return result;
4924
4925 if (target != 0)
4926 {
4927 convert_move (target, result, 0);
4928 return target;
4929 }
4930
4931 return convert_to_mode (mode, result, 0);
4932 }
4933
4934 return NULL_RTX;
4935}
4936
4937/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4938 if we failed the caller should emit a normal call, otherwise try to get
4939 the result in TARGET, if convenient. */
4940
4941static rtx
4942expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4943{
4944 if (!validate_arglist (callexpr: exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4945 return NULL_RTX;
4946
4947 tree arg1 = CALL_EXPR_ARG (exp, 0);
4948 tree arg2 = CALL_EXPR_ARG (exp, 1);
4949
4950 /* Due to the performance benefit, always inline the calls first. */
4951 rtx result = NULL_RTX;
4952 result = inline_expand_builtin_bytecmp (exp, target);
4953 if (result)
4954 return result;
4955
4956 insn_code cmpstr_icode = direct_optab_handler (op: cmpstr_optab, SImode);
4957 insn_code cmpstrn_icode = direct_optab_handler (op: cmpstrn_optab, SImode);
4958 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4959 return NULL_RTX;
4960
4961 unsigned int arg1_align = get_pointer_alignment (exp: arg1) / BITS_PER_UNIT;
4962 unsigned int arg2_align = get_pointer_alignment (exp: arg2) / BITS_PER_UNIT;
4963
4964 /* If we don't have POINTER_TYPE, call the function. */
4965 if (arg1_align == 0 || arg2_align == 0)
4966 return NULL_RTX;
4967
4968 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4969 arg1 = builtin_save_expr (exp: arg1);
4970 arg2 = builtin_save_expr (exp: arg2);
4971
4972 rtx arg1_rtx = get_memory_rtx (exp: arg1, NULL);
4973 rtx arg2_rtx = get_memory_rtx (exp: arg2, NULL);
4974
4975 /* Try to call cmpstrsi. */
4976 if (cmpstr_icode != CODE_FOR_nothing)
4977 result = expand_cmpstr (icode: cmpstr_icode, target, arg1_rtx, arg2_rtx,
4978 MIN (arg1_align, arg2_align));
4979
4980 /* Try to determine at least one length and call cmpstrnsi. */
4981 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4982 {
4983 tree len;
4984 rtx arg3_rtx;
4985
4986 tree len1 = c_strlen (arg: arg1, only_value: 1);
4987 tree len2 = c_strlen (arg: arg2, only_value: 1);
4988
4989 if (len1)
4990 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4991 if (len2)
4992 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4993
4994 /* If we don't have a constant length for the first, use the length
4995 of the second, if we know it. We don't require a constant for
4996 this case; some cost analysis could be done if both are available
4997 but neither is constant. For now, assume they're equally cheap,
4998 unless one has side effects. If both strings have constant lengths,
4999 use the smaller. */
5000
5001 if (!len1)
5002 len = len2;
5003 else if (!len2)
5004 len = len1;
5005 else if (TREE_SIDE_EFFECTS (len1))
5006 len = len2;
5007 else if (TREE_SIDE_EFFECTS (len2))
5008 len = len1;
5009 else if (TREE_CODE (len1) != INTEGER_CST)
5010 len = len2;
5011 else if (TREE_CODE (len2) != INTEGER_CST)
5012 len = len1;
5013 else if (tree_int_cst_lt (t1: len1, t2: len2))
5014 len = len1;
5015 else
5016 len = len2;
5017
5018 /* If both arguments have side effects, we cannot optimize. */
5019 if (len && !TREE_SIDE_EFFECTS (len))
5020 {
5021 arg3_rtx = expand_normal (exp: len);
5022 result = expand_cmpstrn_or_cmpmem
5023 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5024 arg3_rtx, MIN (arg1_align, arg2_align));
5025 }
5026 }
5027
5028 tree fndecl = get_callee_fndecl (exp);
5029 if (result)
5030 {
5031 /* Return the value in the proper mode for this function. */
5032 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5033 if (GET_MODE (result) == mode)
5034 return result;
5035 if (target == 0)
5036 return convert_to_mode (mode, result, 0);
5037 convert_move (target, result, 0);
5038 return target;
5039 }
5040
5041 /* Expand the library call ourselves using a stabilized argument
5042 list to avoid re-evaluating the function's arguments twice. */
5043 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, n: 2, arg1, arg2);
5044 copy_warning (fn, exp);
5045 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5046 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5047 return expand_call (fn, target, target == const0_rtx);
5048}
5049
5050/* Expand expression EXP, which is a call to the strncmp builtin. Return
5051 NULL_RTX if we failed the caller should emit a normal call, otherwise
5052 try to get the result in TARGET, if convenient. */
5053
5054static rtx
5055expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5056 ATTRIBUTE_UNUSED machine_mode mode)
5057{
5058 if (!validate_arglist (callexpr: exp,
5059 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5060 return NULL_RTX;
5061
5062 tree arg1 = CALL_EXPR_ARG (exp, 0);
5063 tree arg2 = CALL_EXPR_ARG (exp, 1);
5064 tree arg3 = CALL_EXPR_ARG (exp, 2);
5065
5066 location_t loc = EXPR_LOCATION (exp);
5067 tree len1 = c_strlen (arg: arg1, only_value: 1);
5068 tree len2 = c_strlen (arg: arg2, only_value: 1);
5069
5070 /* Due to the performance benefit, always inline the calls first. */
5071 rtx result = NULL_RTX;
5072 result = inline_expand_builtin_bytecmp (exp, target);
5073 if (result)
5074 return result;
5075
5076 /* If c_strlen can determine an expression for one of the string
5077 lengths, and it doesn't have side effects, then emit cmpstrnsi
5078 using length MIN(strlen(string)+1, arg3). */
5079 insn_code cmpstrn_icode = direct_optab_handler (op: cmpstrn_optab, SImode);
5080 if (cmpstrn_icode == CODE_FOR_nothing)
5081 return NULL_RTX;
5082
5083 tree len;
5084
5085 unsigned int arg1_align = get_pointer_alignment (exp: arg1) / BITS_PER_UNIT;
5086 unsigned int arg2_align = get_pointer_alignment (exp: arg2) / BITS_PER_UNIT;
5087
5088 if (len1)
5089 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5090 if (len2)
5091 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5092
5093 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5094
5095 /* If we don't have a constant length for the first, use the length
5096 of the second, if we know it. If neither string is constant length,
5097 use the given length argument. We don't require a constant for
5098 this case; some cost analysis could be done if both are available
5099 but neither is constant. For now, assume they're equally cheap,
5100 unless one has side effects. If both strings have constant lengths,
5101 use the smaller. */
5102
5103 if (!len1 && !len2)
5104 len = len3;
5105 else if (!len1)
5106 len = len2;
5107 else if (!len2)
5108 len = len1;
5109 else if (TREE_SIDE_EFFECTS (len1))
5110 len = len2;
5111 else if (TREE_SIDE_EFFECTS (len2))
5112 len = len1;
5113 else if (TREE_CODE (len1) != INTEGER_CST)
5114 len = len2;
5115 else if (TREE_CODE (len2) != INTEGER_CST)
5116 len = len1;
5117 else if (tree_int_cst_lt (t1: len1, t2: len2))
5118 len = len1;
5119 else
5120 len = len2;
5121
5122 /* If we are not using the given length, we must incorporate it here.
5123 The actual new length parameter will be MIN(len,arg3) in this case. */
5124 if (len != len3)
5125 {
5126 len = fold_convert_loc (loc, sizetype, len);
5127 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5128 }
5129 rtx arg1_rtx = get_memory_rtx (exp: arg1, len);
5130 rtx arg2_rtx = get_memory_rtx (exp: arg2, len);
5131 rtx arg3_rtx = expand_normal (exp: len);
5132 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5133 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5134 MIN (arg1_align, arg2_align));
5135
5136 tree fndecl = get_callee_fndecl (exp);
5137 if (result)
5138 {
5139 /* Return the value in the proper mode for this function. */
5140 mode = TYPE_MODE (TREE_TYPE (exp));
5141 if (GET_MODE (result) == mode)
5142 return result;
5143 if (target == 0)
5144 return convert_to_mode (mode, result, 0);
5145 convert_move (target, result, 0);
5146 return target;
5147 }
5148
5149 /* Expand the library call ourselves using a stabilized argument
5150 list to avoid re-evaluating the function's arguments twice. */
5151 tree call = build_call_nofold_loc (loc, fndecl, n: 3, arg1, arg2, len);
5152 copy_warning (call, exp);
5153 gcc_assert (TREE_CODE (call) == CALL_EXPR);
5154 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
5155 return expand_call (call, target, target == const0_rtx);
5156}
5157
5158/* Expand a call to __builtin_saveregs, generating the result in TARGET,
5159 if that's convenient. */
5160
5161rtx
5162expand_builtin_saveregs (void)
5163{
5164 rtx val;
5165 rtx_insn *seq;
5166
5167 /* Don't do __builtin_saveregs more than once in a function.
5168 Save the result of the first call and reuse it. */
5169 if (saveregs_value != 0)
5170 return saveregs_value;
5171
5172 /* When this function is called, it means that registers must be
5173 saved on entry to this function. So we migrate the call to the
5174 first insn of this function. */
5175
5176 start_sequence ();
5177
5178 /* Do whatever the machine needs done in this case. */
5179 val = targetm.calls.expand_builtin_saveregs ();
5180
5181 seq = end_sequence ();
5182
5183 saveregs_value = val;
5184
5185 /* Put the insns after the NOTE that starts the function. If this
5186 is inside a start_sequence, make the outer-level insn chain current, so
5187 the code is placed at the start of the function. */
5188 push_topmost_sequence ();
5189 emit_insn_after (seq, entry_of_function ());
5190 pop_topmost_sequence ();
5191
5192 return val;
5193}
5194
5195/* Expand a call to __builtin_next_arg. */
5196
5197static rtx
5198expand_builtin_next_arg (void)
5199{
5200 /* Checking arguments is already done in fold_builtin_next_arg
5201 that must be called before this function. */
5202 return expand_binop (ptr_mode, add_optab,
5203 crtl->args.internal_arg_pointer,
5204 crtl->args.arg_offset_rtx,
5205 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5206}
5207
5208/* Make it easier for the backends by protecting the valist argument
5209 from multiple evaluations. */
5210
5211static tree
5212stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5213{
5214 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5215
5216 /* The current way of determining the type of valist is completely
5217 bogus. We should have the information on the va builtin instead. */
5218 if (!vatype)
5219 vatype = targetm.fn_abi_va_list (cfun->decl);
5220
5221 if (TREE_CODE (vatype) == ARRAY_TYPE)
5222 {
5223 if (TREE_SIDE_EFFECTS (valist))
5224 valist = save_expr (valist);
5225
5226 /* For this case, the backends will be expecting a pointer to
5227 vatype, but it's possible we've actually been given an array
5228 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5229 So fix it. */
5230 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5231 {
5232 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5233 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5234 }
5235 }
5236 else
5237 {
5238 tree pt = build_pointer_type (vatype);
5239
5240 if (! needs_lvalue)
5241 {
5242 if (! TREE_SIDE_EFFECTS (valist))
5243 return valist;
5244
5245 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5246 TREE_SIDE_EFFECTS (valist) = 1;
5247 }
5248
5249 if (TREE_SIDE_EFFECTS (valist))
5250 valist = save_expr (valist);
5251 valist = fold_build2_loc (loc, MEM_REF,
5252 vatype, valist, build_int_cst (pt, 0));
5253 }
5254
5255 return valist;
5256}
5257
5258/* The "standard" definition of va_list is void*. */
5259
5260tree
5261std_build_builtin_va_list (void)
5262{
5263 return ptr_type_node;
5264}
5265
5266/* The "standard" abi va_list is va_list_type_node. */
5267
5268tree
5269std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5270{
5271 return va_list_type_node;
5272}
5273
5274/* The "standard" type of va_list is va_list_type_node. */
5275
5276tree
5277std_canonical_va_list_type (tree type)
5278{
5279 tree wtype, htype;
5280
5281 wtype = va_list_type_node;
5282 htype = type;
5283
5284 if (TREE_CODE (wtype) == ARRAY_TYPE)
5285 {
5286 /* If va_list is an array type, the argument may have decayed
5287 to a pointer type, e.g. by being passed to another function.
5288 In that case, unwrap both types so that we can compare the
5289 underlying records. */
5290 if (TREE_CODE (htype) == ARRAY_TYPE
5291 || POINTER_TYPE_P (htype))
5292 {
5293 wtype = TREE_TYPE (wtype);
5294 htype = TREE_TYPE (htype);
5295 }
5296 }
5297 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5298 return va_list_type_node;
5299
5300 return NULL_TREE;
5301}
5302
5303/* The "standard" implementation of va_start: just assign `nextarg' to
5304 the variable. */
5305
5306void
5307std_expand_builtin_va_start (tree valist, rtx nextarg)
5308{
5309 rtx va_r = expand_expr (exp: valist, NULL_RTX, VOIDmode, modifier: EXPAND_WRITE);
5310 convert_move (va_r, nextarg, 0);
5311}
5312
5313/* Expand EXP, a call to __builtin_va_start. */
5314
5315static rtx
5316expand_builtin_va_start (tree exp)
5317{
5318 rtx nextarg;
5319 tree valist;
5320 location_t loc = EXPR_LOCATION (exp);
5321
5322 if (call_expr_nargs (exp) < 2)
5323 {
5324 error_at (loc, "too few arguments to function %<va_start%>");
5325 return const0_rtx;
5326 }
5327
5328 if (fold_builtin_next_arg (exp, true))
5329 return const0_rtx;
5330
5331 nextarg = expand_builtin_next_arg ();
5332 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), needs_lvalue: 1);
5333
5334 if (targetm.expand_builtin_va_start)
5335 targetm.expand_builtin_va_start (valist, nextarg);
5336 else
5337 std_expand_builtin_va_start (valist, nextarg);
5338
5339 return const0_rtx;
5340}
5341
5342/* Expand EXP, a call to __builtin_va_end. */
5343
5344static rtx
5345expand_builtin_va_end (tree exp)
5346{
5347 tree valist = CALL_EXPR_ARG (exp, 0);
5348
5349 /* Evaluate for side effects, if needed. I hate macros that don't
5350 do that. */
5351 if (TREE_SIDE_EFFECTS (valist))
5352 expand_expr (exp: valist, const0_rtx, VOIDmode, modifier: EXPAND_NORMAL);
5353
5354 return const0_rtx;
5355}
5356
5357/* Expand EXP, a call to __builtin_va_copy. We do this as a
5358 builtin rather than just as an assignment in stdarg.h because of the
5359 nastiness of array-type va_list types. */
5360
5361static rtx
5362expand_builtin_va_copy (tree exp)
5363{
5364 tree dst, src, t;
5365 location_t loc = EXPR_LOCATION (exp);
5366
5367 dst = CALL_EXPR_ARG (exp, 0);
5368 src = CALL_EXPR_ARG (exp, 1);
5369
5370 dst = stabilize_va_list_loc (loc, valist: dst, needs_lvalue: 1);
5371 src = stabilize_va_list_loc (loc, valist: src, needs_lvalue: 0);
5372
5373 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5374
5375 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5376 {
5377 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5378 TREE_SIDE_EFFECTS (t) = 1;
5379 expand_expr (exp: t, const0_rtx, VOIDmode, modifier: EXPAND_NORMAL);
5380 }
5381 else
5382 {
5383 rtx dstb, srcb, size;
5384
5385 /* Evaluate to pointers. */
5386 dstb = expand_expr (exp: dst, NULL_RTX, Pmode, modifier: EXPAND_NORMAL);
5387 srcb = expand_expr (exp: src, NULL_RTX, Pmode, modifier: EXPAND_NORMAL);
5388 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5389 NULL_RTX, VOIDmode, modifier: EXPAND_NORMAL);
5390
5391 dstb = convert_memory_address (Pmode, dstb);
5392 srcb = convert_memory_address (Pmode, srcb);
5393
5394 /* "Dereference" to BLKmode memories. */
5395 dstb = gen_rtx_MEM (BLKmode, dstb);
5396 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5397 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5398 srcb = gen_rtx_MEM (BLKmode, srcb);
5399 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5400 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5401
5402 /* Copy. */
5403 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5404 }
5405
5406 return const0_rtx;
5407}
5408
5409/* Expand a call to one of the builtin functions __builtin_frame_address or
5410 __builtin_return_address. */
5411
5412static rtx
5413expand_builtin_frame_address (tree fndecl, tree exp)
5414{
5415 /* The argument must be a nonnegative integer constant.
5416 It counts the number of frames to scan up the stack.
5417 The value is either the frame pointer value or the return
5418 address saved in that frame. */
5419 if (call_expr_nargs (exp) == 0)
5420 /* Warning about missing arg was already issued. */
5421 return const0_rtx;
5422 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5423 {
5424 error ("invalid argument to %qD", fndecl);
5425 return const0_rtx;
5426 }
5427 else
5428 {
5429 /* Number of frames to scan up the stack. */
5430 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5431
5432 rtx tem = expand_builtin_return_addr (fndecl_code: DECL_FUNCTION_CODE (decl: fndecl), count);
5433
5434 /* Some ports cannot access arbitrary stack frames. */
5435 if (tem == NULL)
5436 {
5437 warning (0, "unsupported argument to %qD", fndecl);
5438 return const0_rtx;
5439 }
5440
5441 if (count)
5442 {
5443 /* Warn since no effort is made to ensure that any frame
5444 beyond the current one exists or can be safely reached. */
5445 warning (OPT_Wframe_address, "calling %qD with "
5446 "a nonzero argument is unsafe", fndecl);
5447 }
5448
5449 /* For __builtin_frame_address, return what we've got. */
5450 if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_FRAME_ADDRESS)
5451 return tem;
5452
5453 if (!REG_P (tem)
5454 && ! CONSTANT_P (tem))
5455 tem = copy_addr_to_reg (tem);
5456 return tem;
5457 }
5458}
5459
5460#if ! STACK_GROWS_DOWNWARD
5461# define STACK_TOPS GT
5462#else
5463# define STACK_TOPS LT
5464#endif
5465
5466#ifdef POINTERS_EXTEND_UNSIGNED
5467# define STACK_UNSIGNED POINTERS_EXTEND_UNSIGNED
5468#else
5469# define STACK_UNSIGNED true
5470#endif
5471
5472/* Expand a call to builtin function __builtin_stack_address. */
5473
5474static rtx
5475expand_builtin_stack_address ()
5476{
5477 rtx ret = convert_to_mode (ptr_mode, copy_to_reg (stack_pointer_rtx),
5478 STACK_UNSIGNED);
5479
5480#ifdef STACK_ADDRESS_OFFSET
5481 /* Unbias the stack pointer, bringing it to the boundary between the
5482 stack area claimed by the active function calling this builtin,
5483 and stack ranges that could get clobbered if it called another
5484 function. It should NOT encompass any stack red zone, that is
5485 used in leaf functions.
5486
5487 On SPARC, the register save area is *not* considered active or
5488 used by the active function, but rather as akin to the area in
5489 which call-preserved registers are saved by callees. This
5490 enables __strub_leave to clear what would otherwise overlap with
5491 its own register save area.
5492
5493 If the address is computed too high or too low, parts of a stack
5494 range that should be scrubbed may be left unscrubbed, scrubbing
5495 may corrupt active portions of the stack frame, and stack ranges
5496 may be doubly-scrubbed by caller and callee.
5497
5498 In order for it to be just right, the area delimited by
5499 @code{__builtin_stack_address} and @code{__builtin_frame_address
5500 (0)} should encompass caller's registers saved by the function,
5501 local on-stack variables and @code{alloca} stack areas.
5502 Accumulated outgoing on-stack arguments, preallocated as part of
5503 a function's own prologue, are to be regarded as part of the
5504 (caller) function's active area as well, whereas those pushed or
5505 allocated temporarily for a call are regarded as part of the
5506 callee's stack range, rather than the caller's. */
5507 ret = plus_constant (ptr_mode, ret, STACK_ADDRESS_OFFSET);
5508#endif
5509
5510 return force_reg (ptr_mode, ret);
5511}
5512
5513/* Expand a call to builtin function __builtin_strub_enter. */
5514
5515static rtx
5516expand_builtin_strub_enter (tree exp)
5517{
5518 if (!validate_arglist (callexpr: exp, POINTER_TYPE, VOID_TYPE))
5519 return NULL_RTX;
5520
5521 if (optimize < 1 || flag_no_inline)
5522 return NULL_RTX;
5523
5524 rtx stktop = expand_builtin_stack_address ();
5525
5526 tree wmptr = CALL_EXPR_ARG (exp, 0);
5527 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5528 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5529 build_int_cst (TREE_TYPE (wmptr), 0));
5530 rtx wmark = expand_expr (exp: wmtree, NULL_RTX, mode: ptr_mode, modifier: EXPAND_MEMORY);
5531
5532 emit_move_insn (wmark, stktop);
5533
5534 return const0_rtx;
5535}
5536
5537/* Expand a call to builtin function __builtin_strub_update. */
5538
5539static rtx
5540expand_builtin_strub_update (tree exp)
5541{
5542 if (!validate_arglist (callexpr: exp, POINTER_TYPE, VOID_TYPE))
5543 return NULL_RTX;
5544
5545 if (optimize < 2 || flag_no_inline)
5546 return NULL_RTX;
5547
5548 rtx stktop = expand_builtin_stack_address ();
5549
5550#ifdef RED_ZONE_SIZE
5551 /* Here's how the strub enter, update and leave functions deal with red zones.
5552
5553 If it weren't for red zones, update, called from within a strub context,
5554 would bump the watermark to the top of the stack. Enter and leave, running
5555 in the caller, would use the caller's top of stack address both to
5556 initialize the watermark passed to the callee, and to start strubbing the
5557 stack afterwards.
5558
5559 Ideally, we'd update the watermark so as to cover the used amount of red
5560 zone, and strub starting at the caller's other end of the (presumably
5561 unused) red zone. Normally, only leaf functions use the red zone, but at
5562 this point we can't tell whether a function is a leaf, nor can we tell how
5563 much of the red zone it uses. Furthermore, some strub contexts may have
5564 been inlined so that update and leave are called from the same stack frame,
5565 and the strub builtins may all have been inlined, turning a strub function
5566 into a leaf.
5567
5568 So cleaning the range from the caller's stack pointer (one end of the red
5569 zone) to the (potentially inlined) callee's (other end of the) red zone
5570 could scribble over the caller's own red zone.
5571
5572 We avoid this possibility by arranging for callers that are strub contexts
5573 to use their own watermark as the strub starting point. So, if A calls B,
5574 and B calls C, B will tell A to strub up to the end of B's red zone, and
5575 will strub itself only the part of C's stack frame and red zone that
5576 doesn't overlap with B's. With that, we don't need to know who's leaf and
5577 who isn't: inlined calls will shrink their strub window to zero, each
5578 remaining call will strub some portion of the stack, and eventually the
5579 strub context will return to a caller that isn't a strub context itself,
5580 that will therefore use its own stack pointer as the strub starting point.
5581 It's not a leaf, because strub contexts can't be inlined into non-strub
5582 contexts, so it doesn't use the red zone, and it will therefore correctly
5583 strub up the callee's stack frame up to the end of the callee's red zone.
5584 Neat! */
5585 if (true /* (flags_from_decl_or_type (current_function_decl) & ECF_LEAF) */)
5586 {
5587 poly_int64 red_zone_size = RED_ZONE_SIZE;
5588#if STACK_GROWS_DOWNWARD
5589 red_zone_size = -red_zone_size;
5590#endif
5591 stktop = plus_constant (ptr_mode, stktop, red_zone_size);
5592 stktop = force_reg (ptr_mode, stktop);
5593 }
5594#endif
5595
5596 tree wmptr = CALL_EXPR_ARG (exp, 0);
5597 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5598 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5599 build_int_cst (TREE_TYPE (wmptr), 0));
5600 rtx wmark = expand_expr (exp: wmtree, NULL_RTX, mode: ptr_mode, modifier: EXPAND_MEMORY);
5601
5602 rtx wmarkr = force_reg (ptr_mode, wmark);
5603
5604 rtx_code_label *lab = gen_label_rtx ();
5605 do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5606 ptr_mode, NULL_RTX, lab, NULL,
5607 profile_probability::very_likely ());
5608 emit_move_insn (wmark, stktop);
5609
5610 /* If this is an inlined strub function, also bump the watermark for the
5611 enclosing function. This avoids a problem with the following scenario: A
5612 calls B and B calls C, and both B and C get inlined into A. B allocates
5613 temporary stack space before calling C. If we don't update A's watermark,
5614 we may use an outdated baseline for the post-C strub_leave, erasing B's
5615 temporary stack allocation. We only need this if we're fully expanding
5616 strub_leave inline. */
5617 tree xwmptr = (optimize > 2
5618 ? strub_watermark_parm (fndecl: current_function_decl)
5619 : wmptr);
5620 if (wmptr != xwmptr)
5621 {
5622 wmptr = xwmptr;
5623 wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5624 wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5625 build_int_cst (TREE_TYPE (wmptr), 0));
5626 wmark = expand_expr (exp: wmtree, NULL_RTX, mode: ptr_mode, modifier: EXPAND_MEMORY);
5627 wmarkr = force_reg (ptr_mode, wmark);
5628
5629 do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5630 ptr_mode, NULL_RTX, lab, NULL,
5631 profile_probability::very_likely ());
5632 emit_move_insn (wmark, stktop);
5633 }
5634
5635 emit_label (lab);
5636
5637 return const0_rtx;
5638}
5639
5640
5641/* Expand a call to builtin function __builtin_strub_leave. */
5642
5643static rtx
5644expand_builtin_strub_leave (tree exp)
5645{
5646 if (!validate_arglist (callexpr: exp, POINTER_TYPE, VOID_TYPE))
5647 return NULL_RTX;
5648
5649 if (optimize < 2 || optimize_size || flag_no_inline)
5650 return NULL_RTX;
5651
5652 rtx stktop = NULL_RTX;
5653
5654 if (tree wmptr = (optimize
5655 ? strub_watermark_parm (fndecl: current_function_decl)
5656 : NULL_TREE))
5657 {
5658 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5659 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5660 build_int_cst (TREE_TYPE (wmptr), 0));
5661 rtx wmark = expand_expr (exp: wmtree, NULL_RTX, mode: ptr_mode, modifier: EXPAND_MEMORY);
5662 stktop = force_reg (ptr_mode, wmark);
5663 }
5664
5665 if (!stktop)
5666 stktop = expand_builtin_stack_address ();
5667
5668 tree wmptr = CALL_EXPR_ARG (exp, 0);
5669 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5670 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5671 build_int_cst (TREE_TYPE (wmptr), 0));
5672 rtx wmark = expand_expr (exp: wmtree, NULL_RTX, mode: ptr_mode, modifier: EXPAND_MEMORY);
5673
5674 rtx wmarkr = force_reg (ptr_mode, wmark);
5675
5676#if ! STACK_GROWS_DOWNWARD
5677 rtx base = stktop;
5678 rtx end = wmarkr;
5679#else
5680 rtx base = wmarkr;
5681 rtx end = stktop;
5682#endif
5683
5684 /* We're going to modify it, so make sure it's not e.g. the stack pointer. */
5685 base = copy_to_reg (base);
5686
5687 rtx_code_label *done = gen_label_rtx ();
5688 do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5689 ptr_mode, NULL_RTX, done, NULL,
5690 profile_probability::very_likely ());
5691
5692 if (optimize < 3)
5693 expand_call (exp, NULL_RTX, true);
5694 else
5695 {
5696 /* Ok, now we've determined we want to copy the block, so convert the
5697 addresses to Pmode, as needed to dereference them to access ptr_mode
5698 memory locations, so that we don't have to convert anything within the
5699 loop. */
5700 base = memory_address (ptr_mode, base);
5701 end = memory_address (ptr_mode, end);
5702
5703 rtx zero = force_operand (const0_rtx, NULL_RTX);
5704 int ulen = GET_MODE_SIZE (mode: ptr_mode);
5705
5706 /* ??? It would be nice to use setmem or similar patterns here,
5707 but they do not necessarily obey the stack growth direction,
5708 which has security implications. We also have to avoid calls
5709 (memset, bzero or any machine-specific ones), which are
5710 likely unsafe here (see TARGET_STRUB_MAY_USE_MEMSET). */
5711#if ! STACK_GROWS_DOWNWARD
5712 rtx incr = plus_constant (Pmode, base, ulen);
5713 rtx dstm = gen_rtx_MEM (ptr_mode, base);
5714
5715 rtx_code_label *loop = gen_label_rtx ();
5716 emit_label (loop);
5717 emit_move_insn (dstm, zero);
5718 emit_move_insn (base, force_operand (incr, NULL_RTX));
5719#else
5720 rtx decr = plus_constant (Pmode, end, -ulen);
5721 rtx dstm = gen_rtx_MEM (ptr_mode, end);
5722
5723 rtx_code_label *loop = gen_label_rtx ();
5724 emit_label (loop);
5725 emit_move_insn (end, force_operand (decr, NULL_RTX));
5726 emit_move_insn (dstm, zero);
5727#endif
5728 do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5729 Pmode, NULL_RTX, NULL, loop,
5730 profile_probability::very_likely ());
5731 }
5732
5733 emit_label (done);
5734
5735 return const0_rtx;
5736}
5737
5738/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5739 failed and the caller should emit a normal call. */
5740
5741static rtx
5742expand_builtin_alloca (tree exp)
5743{
5744 rtx op0;
5745 rtx result;
5746 unsigned int align;
5747 tree fndecl = get_callee_fndecl (exp);
5748 HOST_WIDE_INT max_size;
5749 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: fndecl);
5750 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5751 bool valid_arglist
5752 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5753 ? validate_arglist (callexpr: exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5754 VOID_TYPE)
5755 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5756 ? validate_arglist (callexpr: exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5757 : validate_arglist (callexpr: exp, INTEGER_TYPE, VOID_TYPE));
5758
5759 if (!valid_arglist)
5760 return NULL_RTX;
5761
5762 /* Compute the argument. */
5763 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5764
5765 /* Compute the alignment. */
5766 align = (fcode == BUILT_IN_ALLOCA
5767 ? BIGGEST_ALIGNMENT
5768 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5769
5770 /* Compute the maximum size. */
5771 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5772 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5773 : -1);
5774
5775 /* Allocate the desired space. If the allocation stems from the declaration
5776 of a variable-sized object, it cannot accumulate. */
5777 result
5778 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5779 result = convert_memory_address (ptr_mode, result);
5780
5781 /* Dynamic allocations for variables are recorded during gimplification. */
5782 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5783 record_dynamic_alloc (decl_or_exp: exp);
5784
5785 return result;
5786}
5787
5788/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5789 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5790 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5791 handle_builtin_stack_restore function. */
5792
5793static rtx
5794expand_asan_emit_allocas_unpoison (tree exp)
5795{
5796 tree arg0 = CALL_EXPR_ARG (exp, 0);
5797 tree arg1 = CALL_EXPR_ARG (exp, 1);
5798 rtx top = expand_expr (exp: arg0, NULL_RTX, mode: ptr_mode, modifier: EXPAND_NORMAL);
5799 rtx bot = expand_expr (exp: arg1, NULL_RTX, mode: ptr_mode, modifier: EXPAND_NORMAL);
5800 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5801 stack_pointer_rtx, NULL_RTX, 0,
5802 OPTAB_LIB_WIDEN);
5803 off = convert_modes (mode: ptr_mode, Pmode, x: off, unsignedp: 0);
5804 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5805 OPTAB_LIB_WIDEN);
5806 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5807 ret = emit_library_call_value (fun: ret, NULL_RTX, fn_type: LCT_NORMAL, outmode: ptr_mode,
5808 arg1: top, arg1_mode: ptr_mode, arg2: bot, arg2_mode: ptr_mode);
5809 return ret;
5810}
5811
5812/* Expand a call to bswap builtin in EXP.
5813 Return NULL_RTX if a normal call should be emitted rather than expanding the
5814 function in-line. If convenient, the result should be placed in TARGET.
5815 SUBTARGET may be used as the target for computing one of EXP's operands. */
5816
5817static rtx
5818expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5819 rtx subtarget)
5820{
5821 tree arg;
5822 rtx op0;
5823
5824 if (!validate_arglist (callexpr: exp, INTEGER_TYPE, VOID_TYPE))
5825 return NULL_RTX;
5826
5827 arg = CALL_EXPR_ARG (exp, 0);
5828 op0 = expand_expr (exp: arg,
5829 target: subtarget && GET_MODE (subtarget) == target_mode
5830 ? subtarget : NULL_RTX,
5831 mode: target_mode, modifier: EXPAND_NORMAL);
5832 if (GET_MODE (op0) != target_mode)
5833 op0 = convert_to_mode (target_mode, op0, 1);
5834
5835 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5836
5837 gcc_assert (target);
5838
5839 return convert_to_mode (target_mode, target, 1);
5840}
5841
5842/* Expand a call to a unary builtin in EXP.
5843 Return NULL_RTX if a normal call should be emitted rather than expanding the
5844 function in-line. If convenient, the result should be placed in TARGET.
5845 SUBTARGET may be used as the target for computing one of EXP's operands. */
5846
5847static rtx
5848expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5849 rtx subtarget, optab op_optab)
5850{
5851 rtx op0;
5852
5853 if (!validate_arglist (callexpr: exp, INTEGER_TYPE, VOID_TYPE))
5854 return NULL_RTX;
5855
5856 /* Compute the argument. */
5857 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5858 target: (subtarget
5859 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5860 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5861 VOIDmode, modifier: EXPAND_NORMAL);
5862 /* Compute op, into TARGET if possible.
5863 Set TARGET to wherever the result comes back. */
5864 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5865 op_optab, op0, target, op_optab != clrsb_optab);
5866 gcc_assert (target);
5867
5868 return convert_to_mode (target_mode, target, 0);
5869}
5870
5871/* Expand a call to __builtin_expect. We just return our argument
5872 as the builtin_expect semantic should've been already executed by
5873 tree branch prediction pass. */
5874
5875static rtx
5876expand_builtin_expect (tree exp, rtx target)
5877{
5878 tree arg;
5879
5880 if (call_expr_nargs (exp) < 2)
5881 return const0_rtx;
5882 arg = CALL_EXPR_ARG (exp, 0);
5883
5884 target = expand_expr (exp: arg, target, VOIDmode, modifier: EXPAND_NORMAL);
5885 /* When guessing was done, the hints should be already stripped away. */
5886 gcc_assert (!flag_guess_branch_prob
5887 || optimize == 0 || seen_error ());
5888 return target;
5889}
5890
5891/* Expand a call to __builtin_expect_with_probability. We just return our
5892 argument as the builtin_expect semantic should've been already executed by
5893 tree branch prediction pass. */
5894
5895static rtx
5896expand_builtin_expect_with_probability (tree exp, rtx target)
5897{
5898 tree arg;
5899
5900 if (call_expr_nargs (exp) < 3)
5901 return const0_rtx;
5902 arg = CALL_EXPR_ARG (exp, 0);
5903
5904 target = expand_expr (exp: arg, target, VOIDmode, modifier: EXPAND_NORMAL);
5905 /* When guessing was done, the hints should be already stripped away. */
5906 gcc_assert (!flag_guess_branch_prob
5907 || optimize == 0 || seen_error ());
5908 return target;
5909}
5910
5911
5912/* Expand a call to __builtin_assume_aligned. We just return our first
5913 argument as the builtin_assume_aligned semantic should've been already
5914 executed by CCP. */
5915
5916static rtx
5917expand_builtin_assume_aligned (tree exp, rtx target)
5918{
5919 if (call_expr_nargs (exp) < 2)
5920 return const0_rtx;
5921 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5922 modifier: EXPAND_NORMAL);
5923 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5924 && (call_expr_nargs (exp) < 3
5925 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5926 return target;
5927}
5928
5929void
5930expand_builtin_trap (void)
5931{
5932 if (targetm.have_trap ())
5933 {
5934 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5935 /* For trap insns when not accumulating outgoing args force
5936 REG_ARGS_SIZE note to prevent crossjumping of calls with
5937 different args sizes. */
5938 if (!ACCUMULATE_OUTGOING_ARGS)
5939 add_args_size_note (insn, stack_pointer_delta);
5940 }
5941 else
5942 {
5943 tree fn = builtin_decl_implicit (fncode: BUILT_IN_ABORT);
5944 tree call_expr = build_call_expr (fn, 0);
5945 expand_call (call_expr, NULL_RTX, false);
5946 }
5947
5948 emit_barrier ();
5949}
5950
5951/* Expand a call to __builtin_unreachable. We do nothing except emit
5952 a barrier saying that control flow will not pass here.
5953
5954 It is the responsibility of the program being compiled to ensure
5955 that control flow does never reach __builtin_unreachable. */
5956static void
5957expand_builtin_unreachable (void)
5958{
5959 /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5960 to avoid this. */
5961 gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5962 emit_barrier ();
5963}
5964
5965/* Expand EXP, a call to fabs, fabsf or fabsl.
5966 Return NULL_RTX if a normal call should be emitted rather than expanding
5967 the function inline. If convenient, the result should be placed
5968 in TARGET. SUBTARGET may be used as the target for computing
5969 the operand. */
5970
5971static rtx
5972expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5973{
5974 machine_mode mode;
5975 tree arg;
5976 rtx op0;
5977
5978 if (!validate_arglist (callexpr: exp, REAL_TYPE, VOID_TYPE))
5979 return NULL_RTX;
5980
5981 arg = CALL_EXPR_ARG (exp, 0);
5982 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (exp: arg);
5983 mode = TYPE_MODE (TREE_TYPE (arg));
5984 op0 = expand_expr (exp: arg, target: subtarget, VOIDmode, modifier: EXPAND_NORMAL);
5985 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5986}
5987
5988/* Expand EXP, a call to copysign, copysignf, or copysignl.
5989 Return NULL is a normal call should be emitted rather than expanding the
5990 function inline. If convenient, the result should be placed in TARGET.
5991 SUBTARGET may be used as the target for computing the operand. */
5992
5993static rtx
5994expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5995{
5996 rtx op0, op1;
5997 tree arg;
5998
5999 if (!validate_arglist (callexpr: exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6000 return NULL_RTX;
6001
6002 arg = CALL_EXPR_ARG (exp, 0);
6003 op0 = expand_expr (exp: arg, target: subtarget, VOIDmode, modifier: EXPAND_NORMAL);
6004
6005 arg = CALL_EXPR_ARG (exp, 1);
6006 op1 = expand_normal (exp: arg);
6007
6008 return expand_copysign (op0, op1, target);
6009}
6010
6011/* Emit a call to __builtin___clear_cache. */
6012
6013void
6014default_emit_call_builtin___clear_cache (rtx begin, rtx end)
6015{
6016 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
6017 BUILTIN_ASM_NAME_PTR
6018 (BUILT_IN_CLEAR_CACHE));
6019
6020 emit_library_call (fun: callee,
6021 fn_type: LCT_NORMAL, VOIDmode,
6022 convert_memory_address (ptr_mode, begin), arg1_mode: ptr_mode,
6023 convert_memory_address (ptr_mode, end), arg2_mode: ptr_mode);
6024}
6025
6026/* Emit a call to __builtin___clear_cache, unless the target specifies
6027 it as do-nothing. This function can be used by trampoline
6028 finalizers to duplicate the effects of expanding a call to the
6029 clear_cache builtin. */
6030
6031void
6032maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
6033{
6034 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
6035 || CONST_INT_P (begin))
6036 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
6037 || CONST_INT_P (end)));
6038
6039 if (targetm.have_clear_cache ())
6040 {
6041 /* We have a "clear_cache" insn, and it will handle everything. */
6042 class expand_operand ops[2];
6043
6044 create_address_operand (op: &ops[0], value: begin);
6045 create_address_operand (op: &ops[1], value: end);
6046
6047 if (maybe_expand_insn (icode: targetm.code_for_clear_cache, nops: 2, ops))
6048 return;
6049 }
6050 else
6051 {
6052#ifndef CLEAR_INSN_CACHE
6053 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6054 does nothing. There is no need to call it. Do nothing. */
6055 return;
6056#endif /* CLEAR_INSN_CACHE */
6057 }
6058
6059 targetm.calls.emit_call_builtin___clear_cache (begin, end);
6060}
6061
6062/* Expand a call to __builtin___clear_cache. */
6063
6064static void
6065expand_builtin___clear_cache (tree exp)
6066{
6067 tree begin, end;
6068 rtx begin_rtx, end_rtx;
6069
6070 /* We must not expand to a library call. If we did, any
6071 fallback library function in libgcc that might contain a call to
6072 __builtin___clear_cache() would recurse infinitely. */
6073 if (!validate_arglist (callexpr: exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6074 {
6075 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6076 return;
6077 }
6078
6079 begin = CALL_EXPR_ARG (exp, 0);
6080 begin_rtx = expand_expr (exp: begin, NULL_RTX, Pmode, modifier: EXPAND_NORMAL);
6081
6082 end = CALL_EXPR_ARG (exp, 1);
6083 end_rtx = expand_expr (exp: end, NULL_RTX, Pmode, modifier: EXPAND_NORMAL);
6084
6085 maybe_emit_call_builtin___clear_cache (begin: begin_rtx, end: end_rtx);
6086}
6087
6088/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6089
6090static rtx
6091round_trampoline_addr (rtx tramp)
6092{
6093 rtx temp, addend, mask;
6094
6095 /* If we don't need too much alignment, we'll have been guaranteed
6096 proper alignment by get_trampoline_type. */
6097 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6098 return tramp;
6099
6100 /* Round address up to desired boundary. */
6101 temp = gen_reg_rtx (Pmode);
6102 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6103 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6104
6105 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6106 temp, 0, OPTAB_LIB_WIDEN);
6107 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6108 temp, 0, OPTAB_LIB_WIDEN);
6109
6110 return tramp;
6111}
6112
6113static rtx
6114expand_builtin_init_trampoline (tree exp, bool onstack)
6115{
6116 tree t_tramp, t_func, t_chain;
6117 rtx m_tramp, r_tramp, r_chain, tmp;
6118
6119 if (!validate_arglist (callexpr: exp, POINTER_TYPE, POINTER_TYPE,
6120 POINTER_TYPE, VOID_TYPE))
6121 return NULL_RTX;
6122
6123 t_tramp = CALL_EXPR_ARG (exp, 0);
6124 t_func = CALL_EXPR_ARG (exp, 1);
6125 t_chain = CALL_EXPR_ARG (exp, 2);
6126
6127 r_tramp = expand_normal (exp: t_tramp);
6128 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6129 MEM_NOTRAP_P (m_tramp) = 1;
6130
6131 /* If ONSTACK, the TRAMP argument should be the address of a field
6132 within the local function's FRAME decl. Either way, let's see if
6133 we can fill in the MEM_ATTRs for this memory. */
6134 if (TREE_CODE (t_tramp) == ADDR_EXPR)
6135 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6136
6137 /* Creator of a heap trampoline is responsible for making sure the
6138 address is aligned to at least STACK_BOUNDARY. Normally malloc
6139 will ensure this anyhow. */
6140 tmp = round_trampoline_addr (tramp: r_tramp);
6141 if (tmp != r_tramp)
6142 {
6143 m_tramp = change_address (m_tramp, BLKmode, tmp);
6144 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6145 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6146 }
6147
6148 /* The FUNC argument should be the address of the nested function.
6149 Extract the actual function decl to pass to the hook. */
6150 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6151 t_func = TREE_OPERAND (t_func, 0);
6152 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6153
6154 r_chain = expand_normal (exp: t_chain);
6155
6156 /* Generate insns to initialize the trampoline. */
6157 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6158
6159 if (onstack)
6160 {
6161 trampolines_created = 1;
6162
6163 if (targetm.calls.custom_function_descriptors != 0)
6164 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6165 "trampoline generated for nested function %qD", t_func);
6166 }
6167
6168 return const0_rtx;
6169}
6170
6171static rtx
6172expand_builtin_adjust_trampoline (tree exp)
6173{
6174 rtx tramp;
6175
6176 if (!validate_arglist (callexpr: exp, POINTER_TYPE, VOID_TYPE))
6177 return NULL_RTX;
6178
6179 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6180 tramp = round_trampoline_addr (tramp);
6181 if (targetm.calls.trampoline_adjust_address)
6182 tramp = targetm.calls.trampoline_adjust_address (tramp);
6183
6184 return tramp;
6185}
6186
6187/* Expand a call to the builtin descriptor initialization routine.
6188 A descriptor is made up of a couple of pointers to the static
6189 chain and the code entry in this order. */
6190
6191static rtx
6192expand_builtin_init_descriptor (tree exp)
6193{
6194 tree t_descr, t_func, t_chain;
6195 rtx m_descr, r_descr, r_func, r_chain;
6196
6197 if (!validate_arglist (callexpr: exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6198 VOID_TYPE))
6199 return NULL_RTX;
6200
6201 t_descr = CALL_EXPR_ARG (exp, 0);
6202 t_func = CALL_EXPR_ARG (exp, 1);
6203 t_chain = CALL_EXPR_ARG (exp, 2);
6204
6205 r_descr = expand_normal (exp: t_descr);
6206 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6207 MEM_NOTRAP_P (m_descr) = 1;
6208 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6209
6210 r_func = expand_normal (exp: t_func);
6211 r_chain = expand_normal (exp: t_chain);
6212
6213 /* Generate insns to initialize the descriptor. */
6214 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6215 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6216 POINTER_SIZE / BITS_PER_UNIT), r_func);
6217
6218 return const0_rtx;
6219}
6220
6221/* Expand a call to the builtin descriptor adjustment routine. */
6222
6223static rtx
6224expand_builtin_adjust_descriptor (tree exp)
6225{
6226 rtx tramp;
6227
6228 if (!validate_arglist (callexpr: exp, POINTER_TYPE, VOID_TYPE))
6229 return NULL_RTX;
6230
6231 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6232
6233 /* Unalign the descriptor to allow runtime identification. */
6234 tramp = plus_constant (ptr_mode, tramp,
6235 targetm.calls.custom_function_descriptors);
6236
6237 return force_operand (tramp, NULL_RTX);
6238}
6239
6240/* Expand the call EXP to the built-in signbit, signbitf or signbitl
6241 function. The function first checks whether the back end provides
6242 an insn to implement signbit for the respective mode. If not, it
6243 checks whether the floating point format of the value is such that
6244 the sign bit can be extracted. If that is not the case, error out.
6245 EXP is the expression that is a call to the builtin function; if
6246 convenient, the result should be placed in TARGET. */
6247static rtx
6248expand_builtin_signbit (tree exp, rtx target)
6249{
6250 const struct real_format *fmt;
6251 scalar_float_mode fmode;
6252 scalar_int_mode rmode, imode;
6253 tree arg;
6254 int word, bitpos;
6255 enum insn_code icode;
6256 rtx temp;
6257 location_t loc = EXPR_LOCATION (exp);
6258
6259 if (!validate_arglist (callexpr: exp, REAL_TYPE, VOID_TYPE))
6260 return NULL_RTX;
6261
6262 arg = CALL_EXPR_ARG (exp, 0);
6263 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6264 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6265 fmt = REAL_MODE_FORMAT (fmode);
6266
6267 arg = builtin_save_expr (exp: arg);
6268
6269 /* Expand the argument yielding a RTX expression. */
6270 temp = expand_normal (exp: arg);
6271
6272 /* Check if the back end provides an insn that handles signbit for the
6273 argument's mode. */
6274 icode = optab_handler (op: signbit_optab, mode: fmode);
6275 if (icode != CODE_FOR_nothing)
6276 {
6277 rtx_insn *last = get_last_insn ();
6278 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6279 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
6280 return this_target;
6281 delete_insns_since (last);
6282 }
6283
6284 /* For floating point formats without a sign bit, implement signbit
6285 as "ARG < 0.0". */
6286 bitpos = fmt->signbit_ro;
6287 if (bitpos < 0)
6288 {
6289 /* But we can't do this if the format supports signed zero. */
6290 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6291
6292 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6293 build_real (TREE_TYPE (arg), dconst0));
6294 return expand_expr (exp: arg, target, VOIDmode, modifier: EXPAND_NORMAL);
6295 }
6296
6297 if (GET_MODE_SIZE (mode: fmode) <= UNITS_PER_WORD)
6298 {
6299 imode = int_mode_for_mode (fmode).require ();
6300 temp = gen_lowpart (imode, temp);
6301 }
6302 else
6303 {
6304 imode = word_mode;
6305 /* Handle targets with different FP word orders. */
6306 if (FLOAT_WORDS_BIG_ENDIAN)
6307 word = (GET_MODE_BITSIZE (mode: fmode) - bitpos) / BITS_PER_WORD;
6308 else
6309 word = bitpos / BITS_PER_WORD;
6310 temp = operand_subword_force (temp, word, fmode);
6311 bitpos = bitpos % BITS_PER_WORD;
6312 }
6313
6314 /* Force the intermediate word_mode (or narrower) result into a
6315 register. This avoids attempting to create paradoxical SUBREGs
6316 of floating point modes below. */
6317 temp = force_reg (imode, temp);
6318
6319 /* If the bitpos is within the "result mode" lowpart, the operation
6320 can be implement with a single bitwise AND. Otherwise, we need
6321 a right shift and an AND. */
6322
6323 if (bitpos < GET_MODE_BITSIZE (mode: rmode))
6324 {
6325 wide_int mask = wi::set_bit_in_zero (bit: bitpos, precision: GET_MODE_PRECISION (mode: rmode));
6326
6327 if (GET_MODE_SIZE (mode: imode) > GET_MODE_SIZE (mode: rmode))
6328 temp = gen_lowpart (rmode, temp);
6329 temp = expand_binop (rmode, and_optab, temp,
6330 immed_wide_int_const (mask, rmode),
6331 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6332 }
6333 else
6334 {
6335 /* Perform a logical right shift to place the signbit in the least
6336 significant bit, then truncate the result to the desired mode
6337 and mask just this bit. */
6338 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6339 temp = gen_lowpart (rmode, temp);
6340 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6341 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6342 }
6343
6344 return temp;
6345}
6346
6347/* Expand fork or exec calls. TARGET is the desired target of the
6348 call. EXP is the call. FN is the
6349 identificator of the actual function. IGNORE is nonzero if the
6350 value is to be ignored. */
6351
6352static rtx
6353expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6354{
6355 tree id, decl;
6356 tree call;
6357
6358 /* If we are not profiling, just call the function. */
6359 if (!coverage_instrumentation_p ())
6360 return NULL_RTX;
6361
6362 /* Otherwise call the wrapper. This should be equivalent for the rest of
6363 compiler, so the code does not diverge, and the wrapper may run the
6364 code necessary for keeping the profiling sane. */
6365
6366 switch (DECL_FUNCTION_CODE (decl: fn))
6367 {
6368 case BUILT_IN_FORK:
6369 id = get_identifier ("__gcov_fork");
6370 break;
6371
6372 case BUILT_IN_EXECL:
6373 id = get_identifier ("__gcov_execl");
6374 break;
6375
6376 case BUILT_IN_EXECV:
6377 id = get_identifier ("__gcov_execv");
6378 break;
6379
6380 case BUILT_IN_EXECLP:
6381 id = get_identifier ("__gcov_execlp");
6382 break;
6383
6384 case BUILT_IN_EXECLE:
6385 id = get_identifier ("__gcov_execle");
6386 break;
6387
6388 case BUILT_IN_EXECVP:
6389 id = get_identifier ("__gcov_execvp");
6390 break;
6391
6392 case BUILT_IN_EXECVE:
6393 id = get_identifier ("__gcov_execve");
6394 break;
6395
6396 default:
6397 gcc_unreachable ();
6398 }
6399
6400 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6401 FUNCTION_DECL, id, TREE_TYPE (fn));
6402 DECL_EXTERNAL (decl) = 1;
6403 TREE_PUBLIC (decl) = 1;
6404 DECL_ARTIFICIAL (decl) = 1;
6405 TREE_NOTHROW (decl) = 1;
6406 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6407 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6408 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6409 return expand_call (call, target, ignore);
6410 }
6411
6412
6413
6414/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6415 the pointer in these functions is void*, the tree optimizers may remove
6416 casts. The mode computed in expand_builtin isn't reliable either, due
6417 to __sync_bool_compare_and_swap.
6418
6419 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6420 group of builtins. This gives us log2 of the mode size. */
6421
6422static inline machine_mode
6423get_builtin_sync_mode (int fcode_diff)
6424{
6425 /* The size is not negotiable, so ask not to get BLKmode in return
6426 if the target indicates that a smaller size would be better. */
6427 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, limit: 0).require ();
6428}
6429
6430/* Expand the memory expression LOC and return the appropriate memory operand
6431 for the builtin_sync operations. */
6432
6433static rtx
6434get_builtin_sync_mem (tree loc, machine_mode mode)
6435{
6436 rtx addr, mem;
6437 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6438 ? TREE_TYPE (TREE_TYPE (loc))
6439 : TREE_TYPE (loc));
6440 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6441
6442 addr = expand_expr (exp: loc, NULL_RTX, mode: addr_mode, modifier: EXPAND_SUM);
6443 addr = convert_memory_address (addr_mode, addr);
6444
6445 /* Note that we explicitly do not want any alias information for this
6446 memory, so that we kill all other live memories. Otherwise we don't
6447 satisfy the full barrier semantics of the intrinsic. */
6448 mem = gen_rtx_MEM (mode, addr);
6449
6450 set_mem_addr_space (mem, addr_space);
6451
6452 mem = validize_mem (mem);
6453
6454 /* The alignment needs to be at least according to that of the mode. */
6455 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6456 get_pointer_alignment (loc)));
6457 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6458 MEM_VOLATILE_P (mem) = 1;
6459
6460 return mem;
6461}
6462
6463/* Make sure an argument is in the right mode.
6464 EXP is the tree argument.
6465 MODE is the mode it should be in. */
6466
6467static rtx
6468expand_expr_force_mode (tree exp, machine_mode mode)
6469{
6470 rtx val;
6471 machine_mode old_mode;
6472
6473 if (TREE_CODE (exp) == SSA_NAME
6474 && TYPE_MODE (TREE_TYPE (exp)) != mode)
6475 {
6476 /* Undo argument promotion if possible, as combine might not
6477 be able to do it later due to MEM_VOLATILE_P uses in the
6478 patterns. */
6479 gimple *g = get_gimple_for_ssa_name (exp);
6480 if (g && gimple_assign_cast_p (s: g))
6481 {
6482 tree rhs = gimple_assign_rhs1 (gs: g);
6483 tree_code code = gimple_assign_rhs_code (gs: g);
6484 if (CONVERT_EXPR_CODE_P (code)
6485 && TYPE_MODE (TREE_TYPE (rhs)) == mode
6486 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6487 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6488 && (TYPE_PRECISION (TREE_TYPE (exp))
6489 > TYPE_PRECISION (TREE_TYPE (rhs))))
6490 exp = rhs;
6491 }
6492 }
6493
6494 val = expand_expr (exp, NULL_RTX, mode, modifier: EXPAND_NORMAL);
6495 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6496 of CONST_INTs, where we know the old_mode only from the call argument. */
6497
6498 old_mode = GET_MODE (val);
6499 if (old_mode == VOIDmode)
6500 old_mode = TYPE_MODE (TREE_TYPE (exp));
6501 val = convert_modes (mode, oldmode: old_mode, x: val, unsignedp: 1);
6502 return val;
6503}
6504
6505
6506/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6507 EXP is the CALL_EXPR. CODE is the rtx code
6508 that corresponds to the arithmetic or logical operation from the name;
6509 an exception here is that NOT actually means NAND. TARGET is an optional
6510 place for us to store the results; AFTER is true if this is the
6511 fetch_and_xxx form. */
6512
6513static rtx
6514expand_builtin_sync_operation (machine_mode mode, tree exp,
6515 enum rtx_code code, bool after,
6516 rtx target)
6517{
6518 rtx val, mem;
6519 location_t loc = EXPR_LOCATION (exp);
6520
6521 if (code == NOT && warn_sync_nand)
6522 {
6523 tree fndecl = get_callee_fndecl (exp);
6524 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: fndecl);
6525
6526 static bool warned_f_a_n, warned_n_a_f;
6527
6528 switch (fcode)
6529 {
6530 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6531 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6532 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6533 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6534 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6535 if (warned_f_a_n)
6536 break;
6537
6538 fndecl = builtin_decl_implicit (fncode: BUILT_IN_SYNC_FETCH_AND_NAND_N);
6539 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6540 warned_f_a_n = true;
6541 break;
6542
6543 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6544 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6545 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6546 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6547 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6548 if (warned_n_a_f)
6549 break;
6550
6551 fndecl = builtin_decl_implicit (fncode: BUILT_IN_SYNC_NAND_AND_FETCH_N);
6552 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6553 warned_n_a_f = true;
6554 break;
6555
6556 default:
6557 gcc_unreachable ();
6558 }
6559 }
6560
6561 /* Expand the operands. */
6562 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6563 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6564
6565 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6566 after);
6567}
6568
6569/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6570 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6571 true if this is the boolean form. TARGET is a place for us to store the
6572 results; this is NOT optional if IS_BOOL is true. */
6573
6574static rtx
6575expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6576 bool is_bool, rtx target)
6577{
6578 rtx old_val, new_val, mem;
6579 rtx *pbool, *poval;
6580
6581 /* Expand the operands. */
6582 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6583 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6584 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6585
6586 pbool = poval = NULL;
6587 if (target != const0_rtx)
6588 {
6589 if (is_bool)
6590 pbool = &target;
6591 else
6592 poval = &target;
6593 }
6594 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6595 false, MEMMODEL_SYNC_SEQ_CST,
6596 MEMMODEL_SYNC_SEQ_CST))
6597 return NULL_RTX;
6598
6599 return target;
6600}
6601
6602/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6603 general form is actually an atomic exchange, and some targets only
6604 support a reduced form with the second argument being a constant 1.
6605 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6606 the results. */
6607
6608static rtx
6609expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6610 rtx target)
6611{
6612 rtx val, mem;
6613
6614 /* Expand the operands. */
6615 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6616 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6617
6618 return expand_sync_lock_test_and_set (target, mem, val);
6619}
6620
6621/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6622
6623static rtx
6624expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6625{
6626 rtx mem;
6627
6628 /* Expand the operands. */
6629 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6630
6631 return expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6632}
6633
6634/* Given an integer representing an ``enum memmodel'', verify its
6635 correctness and return the memory model enum. */
6636
6637static enum memmodel
6638get_memmodel (tree exp)
6639{
6640 /* If the parameter is not a constant, it's a run time value so we'll just
6641 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6642 if (TREE_CODE (exp) != INTEGER_CST)
6643 return MEMMODEL_SEQ_CST;
6644
6645 rtx op = expand_normal (exp);
6646
6647 unsigned HOST_WIDE_INT val = INTVAL (op);
6648 if (targetm.memmodel_check)
6649 val = targetm.memmodel_check (val);
6650 else if (val & ~MEMMODEL_MASK)
6651 return MEMMODEL_SEQ_CST;
6652
6653 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6654 if (memmodel_base (val) >= MEMMODEL_LAST)
6655 return MEMMODEL_SEQ_CST;
6656
6657 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6658 be conservative and promote consume to acquire. */
6659 if (val == MEMMODEL_CONSUME)
6660 val = MEMMODEL_ACQUIRE;
6661
6662 return (enum memmodel) val;
6663}
6664
6665/* Expand the __atomic_exchange intrinsic:
6666 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6667 EXP is the CALL_EXPR.
6668 TARGET is an optional place for us to store the results. */
6669
6670static rtx
6671expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6672{
6673 rtx val, mem;
6674 enum memmodel model;
6675
6676 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6677
6678 if (!flag_inline_atomics)
6679 return NULL_RTX;
6680
6681 /* Expand the operands. */
6682 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6683 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6684
6685 return expand_atomic_exchange (target, mem, val, model);
6686}
6687
6688/* Expand the __atomic_compare_exchange intrinsic:
6689 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6690 TYPE desired, BOOL weak,
6691 enum memmodel success,
6692 enum memmodel failure)
6693 EXP is the CALL_EXPR.
6694 TARGET is an optional place for us to store the results. */
6695
6696static rtx
6697expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6698 rtx target)
6699{
6700 rtx expect, desired, mem, oldval;
6701 rtx_code_label *label;
6702 tree weak;
6703 bool is_weak;
6704
6705 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6706 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6707
6708 if (failure > success)
6709 success = MEMMODEL_SEQ_CST;
6710
6711 if (is_mm_release (model: failure) || is_mm_acq_rel (model: failure))
6712 {
6713 failure = MEMMODEL_SEQ_CST;
6714 success = MEMMODEL_SEQ_CST;
6715 }
6716
6717
6718 if (!flag_inline_atomics)
6719 return NULL_RTX;
6720
6721 /* Expand the operands. */
6722 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6723
6724 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6725 expect = convert_memory_address (Pmode, expect);
6726 expect = gen_rtx_MEM (mode, expect);
6727 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6728
6729 weak = CALL_EXPR_ARG (exp, 3);
6730 is_weak = false;
6731 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6732 is_weak = true;
6733
6734 if (target == const0_rtx)
6735 target = NULL;
6736
6737 /* Lest the rtl backend create a race condition with an imporoper store
6738 to memory, always create a new pseudo for OLDVAL. */
6739 oldval = NULL;
6740
6741 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6742 is_weak, success, failure))
6743 return NULL_RTX;
6744
6745 /* Conditionally store back to EXPECT, lest we create a race condition
6746 with an improper store to memory. */
6747 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6748 the normal case where EXPECT is totally private, i.e. a register. At
6749 which point the store can be unconditional. */
6750 label = gen_label_rtx ();
6751 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6752 GET_MODE (target), 1, label);
6753 emit_move_insn (expect, oldval);
6754 emit_label (label);
6755
6756 return target;
6757}
6758
6759/* Helper function for expand_ifn_atomic_compare_exchange - expand
6760 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6761 call. The weak parameter must be dropped to match the expected parameter
6762 list and the expected argument changed from value to pointer to memory
6763 slot. */
6764
6765static void
6766expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6767{
6768 unsigned int z;
6769 vec<tree, va_gc> *vec;
6770
6771 vec_alloc (v&: vec, nelems: 5);
6772 vec->quick_push (obj: gimple_call_arg (gs: call, index: 0));
6773 tree expected = gimple_call_arg (gs: call, index: 1);
6774 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6775 TREE_TYPE (expected));
6776 rtx expd = expand_expr (exp: expected, target: x, mode, modifier: EXPAND_NORMAL);
6777 if (expd != x)
6778 emit_move_insn (x, expd);
6779 tree v = make_tree (TREE_TYPE (expected), x);
6780 vec->quick_push (obj: build1 (ADDR_EXPR,
6781 build_pointer_type (TREE_TYPE (expected)), v));
6782 vec->quick_push (obj: gimple_call_arg (gs: call, index: 2));
6783 /* Skip the boolean weak parameter. */
6784 for (z = 4; z < 6; z++)
6785 vec->quick_push (obj: gimple_call_arg (gs: call, index: z));
6786 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6787 unsigned int bytes_log2 = exact_log2 (x: GET_MODE_SIZE (mode).to_constant ());
6788 gcc_assert (bytes_log2 < 5);
6789 built_in_function fncode
6790 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6791 + bytes_log2);
6792 tree fndecl = builtin_decl_explicit (fncode);
6793 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6794 fndecl);
6795 tree exp = build_call_vec (boolean_type_node, fn, vec);
6796 tree lhs = gimple_call_lhs (gs: call);
6797 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6798 if (lhs)
6799 {
6800 rtx target = expand_expr (exp: lhs, NULL_RTX, VOIDmode, modifier: EXPAND_WRITE);
6801 if (GET_MODE (boolret) != mode)
6802 boolret = convert_modes (mode, GET_MODE (boolret), x: boolret, unsignedp: 1);
6803 x = force_reg (mode, x);
6804 write_complex_part (target, boolret, true, true);
6805 write_complex_part (target, x, false, false);
6806 }
6807}
6808
6809/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6810
6811void
6812expand_ifn_atomic_compare_exchange (gcall *call)
6813{
6814 int size = tree_to_shwi (gimple_call_arg (gs: call, index: 3)) & 255;
6815 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6816 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, limit: 0).require ();
6817
6818 memmodel success = get_memmodel (exp: gimple_call_arg (gs: call, index: 4));
6819 memmodel failure = get_memmodel (exp: gimple_call_arg (gs: call, index: 5));
6820
6821 if (failure > success)
6822 success = MEMMODEL_SEQ_CST;
6823
6824 if (is_mm_release (model: failure) || is_mm_acq_rel (model: failure))
6825 {
6826 failure = MEMMODEL_SEQ_CST;
6827 success = MEMMODEL_SEQ_CST;
6828 }
6829
6830 if (!flag_inline_atomics)
6831 {
6832 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6833 return;
6834 }
6835
6836 /* Expand the operands. */
6837 rtx mem = get_builtin_sync_mem (loc: gimple_call_arg (gs: call, index: 0), mode);
6838
6839 rtx expect = expand_expr_force_mode (exp: gimple_call_arg (gs: call, index: 1), mode);
6840 rtx desired = expand_expr_force_mode (exp: gimple_call_arg (gs: call, index: 2), mode);
6841
6842 bool is_weak = (tree_to_shwi (gimple_call_arg (gs: call, index: 3)) & 256) != 0;
6843
6844 rtx boolret = NULL;
6845 rtx oldval = NULL;
6846
6847 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6848 is_weak, success, failure))
6849 {
6850 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6851 return;
6852 }
6853
6854 tree lhs = gimple_call_lhs (gs: call);
6855 if (lhs)
6856 {
6857 rtx target = expand_expr (exp: lhs, NULL_RTX, VOIDmode, modifier: EXPAND_WRITE);
6858 if (GET_MODE (boolret) != mode)
6859 boolret = convert_modes (mode, GET_MODE (boolret), x: boolret, unsignedp: 1);
6860 write_complex_part (target, boolret, true, true);
6861 write_complex_part (target, oldval, false, false);
6862 }
6863}
6864
6865/* Expand the __atomic_load intrinsic:
6866 TYPE __atomic_load (TYPE *object, enum memmodel)
6867 EXP is the CALL_EXPR.
6868 TARGET is an optional place for us to store the results. */
6869
6870static rtx
6871expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6872{
6873 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6874 if (is_mm_release (model) || is_mm_acq_rel (model))
6875 model = MEMMODEL_SEQ_CST;
6876
6877 if (!flag_inline_atomics)
6878 return NULL_RTX;
6879
6880 /* Expand the operand. */
6881 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6882
6883 return expand_atomic_load (target, mem, model);
6884}
6885
6886
6887/* Expand the __atomic_store intrinsic:
6888 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6889 EXP is the CALL_EXPR.
6890 TARGET is an optional place for us to store the results. */
6891
6892static rtx
6893expand_builtin_atomic_store (machine_mode mode, tree exp)
6894{
6895 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6896 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6897 || is_mm_release (model)))
6898 model = MEMMODEL_SEQ_CST;
6899
6900 if (!flag_inline_atomics)
6901 return NULL_RTX;
6902
6903 /* Expand the operands. */
6904 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6905 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6906
6907 return expand_atomic_store (mem, val, model, false);
6908}
6909
6910/* Expand the __atomic_fetch_XXX intrinsic:
6911 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6912 EXP is the CALL_EXPR.
6913 TARGET is an optional place for us to store the results.
6914 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6915 FETCH_AFTER is true if returning the result of the operation.
6916 FETCH_AFTER is false if returning the value before the operation.
6917 IGNORE is true if the result is not used.
6918 EXT_CALL is the correct builtin for an external call if this cannot be
6919 resolved to an instruction sequence. */
6920
6921static rtx
6922expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6923 enum rtx_code code, bool fetch_after,
6924 bool ignore, enum built_in_function ext_call)
6925{
6926 rtx val, mem, ret;
6927 enum memmodel model;
6928 tree fndecl;
6929 tree addr;
6930
6931 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6932
6933 /* Expand the operands. */
6934 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6935 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6936
6937 /* Only try generating instructions if inlining is turned on. */
6938 if (flag_inline_atomics)
6939 {
6940 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6941 if (ret)
6942 return ret;
6943 }
6944
6945 /* Return if a different routine isn't needed for the library call. */
6946 if (ext_call == BUILT_IN_NONE)
6947 return NULL_RTX;
6948
6949 /* Change the call to the specified function. */
6950 fndecl = get_callee_fndecl (exp);
6951 addr = CALL_EXPR_FN (exp);
6952 STRIP_NOPS (addr);
6953
6954 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6955 TREE_OPERAND (addr, 0) = builtin_decl_explicit (fncode: ext_call);
6956
6957 /* If we will emit code after the call, the call cannot be a tail call.
6958 If it is emitted as a tail call, a barrier is emitted after it, and
6959 then all trailing code is removed. */
6960 if (!ignore)
6961 CALL_EXPR_TAILCALL (exp) = 0;
6962
6963 /* Expand the call here so we can emit trailing code. */
6964 ret = expand_call (exp, target, ignore);
6965
6966 /* Replace the original function just in case it matters. */
6967 TREE_OPERAND (addr, 0) = fndecl;
6968
6969 /* Then issue the arithmetic correction to return the right result. */
6970 if (!ignore)
6971 {
6972 if (code == NOT)
6973 {
6974 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6975 OPTAB_LIB_WIDEN);
6976 ret = expand_simple_unop (mode, NOT, ret, target, true);
6977 }
6978 else
6979 ret = expand_simple_binop (mode, code, ret, val, target, true,
6980 OPTAB_LIB_WIDEN);
6981 }
6982 return ret;
6983}
6984
6985/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6986
6987void
6988expand_ifn_atomic_bit_test_and (gcall *call)
6989{
6990 tree ptr = gimple_call_arg (gs: call, index: 0);
6991 tree bit = gimple_call_arg (gs: call, index: 1);
6992 tree flag = gimple_call_arg (gs: call, index: 2);
6993 tree lhs = gimple_call_lhs (gs: call);
6994 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6995 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6996 enum rtx_code code;
6997 optab optab;
6998 class expand_operand ops[5];
6999
7000 gcc_assert (flag_inline_atomics);
7001
7002 if (gimple_call_num_args (gs: call) == 5)
7003 model = get_memmodel (exp: gimple_call_arg (gs: call, index: 3));
7004
7005 rtx mem = get_builtin_sync_mem (loc: ptr, mode);
7006 rtx val = expand_expr_force_mode (exp: bit, mode);
7007
7008 switch (gimple_call_internal_fn (gs: call))
7009 {
7010 case IFN_ATOMIC_BIT_TEST_AND_SET:
7011 code = IOR;
7012 optab = atomic_bit_test_and_set_optab;
7013 break;
7014 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7015 code = XOR;
7016 optab = atomic_bit_test_and_complement_optab;
7017 break;
7018 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7019 code = AND;
7020 optab = atomic_bit_test_and_reset_optab;
7021 break;
7022 default:
7023 gcc_unreachable ();
7024 }
7025
7026 if (lhs == NULL_TREE)
7027 {
7028 rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
7029 val, NULL_RTX, true, OPTAB_DIRECT);
7030 if (code == AND)
7031 val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
7032 if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
7033 return;
7034 }
7035
7036 rtx target;
7037 if (lhs)
7038 target = expand_expr (exp: lhs, NULL_RTX, VOIDmode, modifier: EXPAND_WRITE);
7039 else
7040 target = gen_reg_rtx (mode);
7041 enum insn_code icode = direct_optab_handler (op: optab, mode);
7042 gcc_assert (icode != CODE_FOR_nothing);
7043 create_output_operand (op: &ops[0], x: target, mode);
7044 create_fixed_operand (op: &ops[1], x: mem);
7045 create_convert_operand_to (op: &ops[2], value: val, mode, unsigned_p: true);
7046 create_integer_operand (&ops[3], model);
7047 create_integer_operand (&ops[4], integer_onep (flag));
7048 if (maybe_expand_insn (icode, nops: 5, ops))
7049 return;
7050
7051 rtx bitval = val;
7052 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7053 val, NULL_RTX, true, OPTAB_DIRECT);
7054 rtx maskval = val;
7055 if (code == AND)
7056 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7057 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7058 code, model, false);
7059 if (!result)
7060 {
7061 bool is_atomic = gimple_call_num_args (gs: call) == 5;
7062 tree tcall = gimple_call_arg (gs: call, index: 3 + is_atomic);
7063 tree fndecl = gimple_call_addr_fndecl (fn: tcall);
7064 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7065 tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
7066 make_tree (type, val),
7067 is_atomic
7068 ? gimple_call_arg (gs: call, index: 3)
7069 : integer_zero_node);
7070 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7071 mode, !lhs);
7072 }
7073 if (!lhs)
7074 return;
7075 if (integer_onep (flag))
7076 {
7077 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7078 NULL_RTX, true, OPTAB_DIRECT);
7079 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7080 true, OPTAB_DIRECT);
7081 }
7082 else
7083 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7084 OPTAB_DIRECT);
7085 if (result != target)
7086 emit_move_insn (target, result);
7087}
7088
7089/* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
7090
7091void
7092expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
7093{
7094 tree cmp = gimple_call_arg (gs: call, index: 0);
7095 tree ptr = gimple_call_arg (gs: call, index: 1);
7096 tree arg = gimple_call_arg (gs: call, index: 2);
7097 tree lhs = gimple_call_lhs (gs: call);
7098 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7099 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
7100 optab optab;
7101 rtx_code code;
7102 class expand_operand ops[5];
7103
7104 gcc_assert (flag_inline_atomics);
7105
7106 if (gimple_call_num_args (gs: call) == 5)
7107 model = get_memmodel (exp: gimple_call_arg (gs: call, index: 3));
7108
7109 rtx mem = get_builtin_sync_mem (loc: ptr, mode);
7110 rtx op = expand_expr_force_mode (exp: arg, mode);
7111
7112 switch (gimple_call_internal_fn (gs: call))
7113 {
7114 case IFN_ATOMIC_ADD_FETCH_CMP_0:
7115 code = PLUS;
7116 optab = atomic_add_fetch_cmp_0_optab;
7117 break;
7118 case IFN_ATOMIC_SUB_FETCH_CMP_0:
7119 code = MINUS;
7120 optab = atomic_sub_fetch_cmp_0_optab;
7121 break;
7122 case IFN_ATOMIC_AND_FETCH_CMP_0:
7123 code = AND;
7124 optab = atomic_and_fetch_cmp_0_optab;
7125 break;
7126 case IFN_ATOMIC_OR_FETCH_CMP_0:
7127 code = IOR;
7128 optab = atomic_or_fetch_cmp_0_optab;
7129 break;
7130 case IFN_ATOMIC_XOR_FETCH_CMP_0:
7131 code = XOR;
7132 optab = atomic_xor_fetch_cmp_0_optab;
7133 break;
7134 default:
7135 gcc_unreachable ();
7136 }
7137
7138 enum rtx_code comp = UNKNOWN;
7139 switch (tree_to_uhwi (cmp))
7140 {
7141 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
7142 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
7143 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
7144 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
7145 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
7146 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
7147 default: gcc_unreachable ();
7148 }
7149
7150 rtx target;
7151 if (lhs == NULL_TREE)
7152 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
7153 else
7154 target = expand_expr (exp: lhs, NULL_RTX, VOIDmode, modifier: EXPAND_WRITE);
7155 enum insn_code icode = direct_optab_handler (op: optab, mode);
7156 gcc_assert (icode != CODE_FOR_nothing);
7157 create_output_operand (op: &ops[0], x: target, TYPE_MODE (boolean_type_node));
7158 create_fixed_operand (op: &ops[1], x: mem);
7159 create_convert_operand_to (op: &ops[2], value: op, mode, unsigned_p: true);
7160 create_integer_operand (&ops[3], model);
7161 create_integer_operand (&ops[4], comp);
7162 if (maybe_expand_insn (icode, nops: 5, ops))
7163 return;
7164
7165 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
7166 code, model, true);
7167 if (!result)
7168 {
7169 bool is_atomic = gimple_call_num_args (gs: call) == 5;
7170 tree tcall = gimple_call_arg (gs: call, index: 3 + is_atomic);
7171 tree fndecl = gimple_call_addr_fndecl (fn: tcall);
7172 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7173 tree exp = build_call_nary (type, tcall,
7174 2 + is_atomic, ptr, arg,
7175 is_atomic
7176 ? gimple_call_arg (gs: call, index: 3)
7177 : integer_zero_node);
7178 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7179 mode, !lhs);
7180 }
7181
7182 if (lhs)
7183 {
7184 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
7185 0, 1);
7186 if (result != target)
7187 emit_move_insn (target, result);
7188 }
7189}
7190
7191/* Expand an atomic clear operation.
7192 void _atomic_clear (BOOL *obj, enum memmodel)
7193 EXP is the call expression. */
7194
7195static rtx
7196expand_builtin_atomic_clear (tree exp)
7197{
7198 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, limit: 0).require ();
7199 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7200 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7201
7202 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7203 model = MEMMODEL_SEQ_CST;
7204
7205 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7206 Failing that, a store is issued by __atomic_store. The only way this can
7207 fail is if the bool type is larger than a word size. Unlikely, but
7208 handle it anyway for completeness. Assume a single threaded model since
7209 there is no atomic support in this case, and no barriers are required. */
7210 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
7211 if (!ret)
7212 emit_move_insn (mem, const0_rtx);
7213 return const0_rtx;
7214}
7215
7216/* Expand an atomic test_and_set operation.
7217 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7218 EXP is the call expression. */
7219
7220static rtx
7221expand_builtin_atomic_test_and_set (tree exp, rtx target)
7222{
7223 rtx mem;
7224 enum memmodel model;
7225 machine_mode mode;
7226
7227 mode = int_mode_for_size (BOOL_TYPE_SIZE, limit: 0).require ();
7228 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7229 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7230
7231 return expand_atomic_test_and_set (target, mem, model);
7232}
7233
7234
7235/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7236 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7237
7238static tree
7239fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7240{
7241 int size;
7242 machine_mode mode;
7243 unsigned int mode_align, type_align;
7244
7245 if (TREE_CODE (arg0) != INTEGER_CST)
7246 return NULL_TREE;
7247
7248 /* We need a corresponding integer mode for the access to be lock-free. */
7249 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7250 if (!int_mode_for_size (size, limit: 0).exists (mode: &mode))
7251 return boolean_false_node;
7252
7253 mode_align = GET_MODE_ALIGNMENT (mode);
7254
7255 if (TREE_CODE (arg1) == INTEGER_CST)
7256 {
7257 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7258
7259 /* Either this argument is null, or it's a fake pointer encoding
7260 the alignment of the object. */
7261 val = least_bit_hwi (x: val);
7262 val *= BITS_PER_UNIT;
7263
7264 if (val == 0 || mode_align < val)
7265 type_align = mode_align;
7266 else
7267 type_align = val;
7268 }
7269 else
7270 {
7271 tree ttype = TREE_TYPE (arg1);
7272
7273 /* This function is usually invoked and folded immediately by the front
7274 end before anything else has a chance to look at it. The pointer
7275 parameter at this point is usually cast to a void *, so check for that
7276 and look past the cast. */
7277 if (CONVERT_EXPR_P (arg1)
7278 && POINTER_TYPE_P (ttype)
7279 && VOID_TYPE_P (TREE_TYPE (ttype))
7280 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7281 arg1 = TREE_OPERAND (arg1, 0);
7282
7283 ttype = TREE_TYPE (arg1);
7284 gcc_assert (POINTER_TYPE_P (ttype));
7285
7286 /* Get the underlying type of the object. */
7287 ttype = TREE_TYPE (ttype);
7288 type_align = TYPE_ALIGN (ttype);
7289 }
7290
7291 /* If the object has smaller alignment, the lock free routines cannot
7292 be used. */
7293 if (type_align < mode_align)
7294 return boolean_false_node;
7295
7296 /* Check if a compare_and_swap pattern exists for the mode which represents
7297 the required size. The pattern is not allowed to fail, so the existence
7298 of the pattern indicates support is present. Also require that an
7299 atomic load exists for the required size. */
7300 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7301 return boolean_true_node;
7302 else
7303 return boolean_false_node;
7304}
7305
7306/* Return true if the parameters to call EXP represent an object which will
7307 always generate lock free instructions. The first argument represents the
7308 size of the object, and the second parameter is a pointer to the object
7309 itself. If NULL is passed for the object, then the result is based on
7310 typical alignment for an object of the specified size. Otherwise return
7311 false. */
7312
7313static rtx
7314expand_builtin_atomic_always_lock_free (tree exp)
7315{
7316 tree size;
7317 tree arg0 = CALL_EXPR_ARG (exp, 0);
7318 tree arg1 = CALL_EXPR_ARG (exp, 1);
7319
7320 if (TREE_CODE (arg0) != INTEGER_CST)
7321 {
7322 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7323 return const0_rtx;
7324 }
7325
7326 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7327 if (size == boolean_true_node)
7328 return const1_rtx;
7329 return const0_rtx;
7330}
7331
7332/* Return a one or zero if it can be determined that object ARG1 of size ARG
7333 is lock free on this architecture. */
7334
7335static tree
7336fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7337{
7338 if (!flag_inline_atomics)
7339 return NULL_TREE;
7340
7341 /* If it isn't always lock free, don't generate a result. */
7342 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7343 return boolean_true_node;
7344
7345 return NULL_TREE;
7346}
7347
7348/* Return true if the parameters to call EXP represent an object which will
7349 always generate lock free instructions. The first argument represents the
7350 size of the object, and the second parameter is a pointer to the object
7351 itself. If NULL is passed for the object, then the result is based on
7352 typical alignment for an object of the specified size. Otherwise return
7353 NULL*/
7354
7355static rtx
7356expand_builtin_atomic_is_lock_free (tree exp)
7357{
7358 tree size;
7359 tree arg0 = CALL_EXPR_ARG (exp, 0);
7360 tree arg1 = CALL_EXPR_ARG (exp, 1);
7361
7362 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7363 {
7364 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7365 return NULL_RTX;
7366 }
7367
7368 if (!flag_inline_atomics)
7369 return NULL_RTX;
7370
7371 /* If the value is known at compile time, return the RTX for it. */
7372 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7373 if (size == boolean_true_node)
7374 return const1_rtx;
7375
7376 return NULL_RTX;
7377}
7378
7379/* Expand the __atomic_thread_fence intrinsic:
7380 void __atomic_thread_fence (enum memmodel)
7381 EXP is the CALL_EXPR. */
7382
7383static void
7384expand_builtin_atomic_thread_fence (tree exp)
7385{
7386 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7387 expand_mem_thread_fence (model);
7388}
7389
7390/* Expand the __atomic_signal_fence intrinsic:
7391 void __atomic_signal_fence (enum memmodel)
7392 EXP is the CALL_EXPR. */
7393
7394static void
7395expand_builtin_atomic_signal_fence (tree exp)
7396{
7397 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7398 expand_mem_signal_fence (model);
7399}
7400
7401/* Expand the __sync_synchronize intrinsic. */
7402
7403static void
7404expand_builtin_sync_synchronize (void)
7405{
7406 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7407}
7408
7409static rtx
7410expand_builtin_thread_pointer (tree exp, rtx target)
7411{
7412 enum insn_code icode;
7413 if (!validate_arglist (callexpr: exp, VOID_TYPE))
7414 return const0_rtx;
7415 icode = direct_optab_handler (op: get_thread_pointer_optab, Pmode);
7416 if (icode != CODE_FOR_nothing)
7417 {
7418 class expand_operand op;
7419 /* If the target is not sutitable then create a new target. */
7420 if (target == NULL_RTX
7421 || !REG_P (target)
7422 || GET_MODE (target) != Pmode)
7423 target = gen_reg_rtx (Pmode);
7424 create_output_operand (op: &op, x: target, Pmode);
7425 expand_insn (icode, nops: 1, ops: &op);
7426 return target;
7427 }
7428 error ("%<__builtin_thread_pointer%> is not supported on this target");
7429 return const0_rtx;
7430}
7431
7432static void
7433expand_builtin_set_thread_pointer (tree exp)
7434{
7435 enum insn_code icode;
7436 if (!validate_arglist (callexpr: exp, POINTER_TYPE, VOID_TYPE))
7437 return;
7438 icode = direct_optab_handler (op: set_thread_pointer_optab, Pmode);
7439 if (icode != CODE_FOR_nothing)
7440 {
7441 class expand_operand op;
7442 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7443 Pmode, modifier: EXPAND_NORMAL);
7444 create_input_operand (op: &op, value: val, Pmode);
7445 expand_insn (icode, nops: 1, ops: &op);
7446 return;
7447 }
7448 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7449}
7450
7451
7452/* Emit code to restore the current value of stack. */
7453
7454static void
7455expand_stack_restore (tree var)
7456{
7457 rtx_insn *prev;
7458 rtx sa = expand_normal (exp: var);
7459
7460 sa = convert_memory_address (Pmode, sa);
7461
7462 prev = get_last_insn ();
7463 emit_stack_restore (SAVE_BLOCK, sa);
7464
7465 record_new_stack_level ();
7466
7467 fixup_args_size_notes (prev, get_last_insn (), 0);
7468}
7469
7470/* Emit code to save the current value of stack. */
7471
7472static rtx
7473expand_stack_save (void)
7474{
7475 rtx ret = NULL_RTX;
7476
7477 emit_stack_save (SAVE_BLOCK, &ret);
7478 return ret;
7479}
7480
7481/* Emit code to get the openacc gang, worker or vector id or size. */
7482
7483static rtx
7484expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7485{
7486 const char *name;
7487 rtx fallback_retval;
7488 rtx_insn *(*gen_fn) (rtx, rtx);
7489 switch (DECL_FUNCTION_CODE (decl: get_callee_fndecl (exp)))
7490 {
7491 case BUILT_IN_GOACC_PARLEVEL_ID:
7492 name = "__builtin_goacc_parlevel_id";
7493 fallback_retval = const0_rtx;
7494 gen_fn = targetm.gen_oacc_dim_pos;
7495 break;
7496 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7497 name = "__builtin_goacc_parlevel_size";
7498 fallback_retval = const1_rtx;
7499 gen_fn = targetm.gen_oacc_dim_size;
7500 break;
7501 default:
7502 gcc_unreachable ();
7503 }
7504
7505 if (oacc_get_fn_attrib (fn: current_function_decl) == NULL_TREE)
7506 {
7507 error ("%qs only supported in OpenACC code", name);
7508 return const0_rtx;
7509 }
7510
7511 tree arg = CALL_EXPR_ARG (exp, 0);
7512 if (TREE_CODE (arg) != INTEGER_CST)
7513 {
7514 error ("non-constant argument 0 to %qs", name);
7515 return const0_rtx;
7516 }
7517
7518 int dim = TREE_INT_CST_LOW (arg);
7519 switch (dim)
7520 {
7521 case GOMP_DIM_GANG:
7522 case GOMP_DIM_WORKER:
7523 case GOMP_DIM_VECTOR:
7524 break;
7525 default:
7526 error ("illegal argument 0 to %qs", name);
7527 return const0_rtx;
7528 }
7529
7530 if (ignore)
7531 return target;
7532
7533 if (target == NULL_RTX)
7534 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7535
7536 if (!targetm.have_oacc_dim_size ())
7537 {
7538 emit_move_insn (target, fallback_retval);
7539 return target;
7540 }
7541
7542 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7543 emit_insn (gen_fn (reg, GEN_INT (dim)));
7544 if (reg != target)
7545 emit_move_insn (target, reg);
7546
7547 return target;
7548}
7549
7550/* Expand a string compare operation using a sequence of char comparison
7551 to get rid of the calling overhead, with result going to TARGET if
7552 that's convenient.
7553
7554 VAR_STR is the variable string source;
7555 CONST_STR is the constant string source;
7556 LENGTH is the number of chars to compare;
7557 CONST_STR_N indicates which source string is the constant string;
7558 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7559
7560 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7561
7562 target = (int) (unsigned char) var_str[0]
7563 - (int) (unsigned char) const_str[0];
7564 if (target != 0)
7565 goto ne_label;
7566 ...
7567 target = (int) (unsigned char) var_str[length - 2]
7568 - (int) (unsigned char) const_str[length - 2];
7569 if (target != 0)
7570 goto ne_label;
7571 target = (int) (unsigned char) var_str[length - 1]
7572 - (int) (unsigned char) const_str[length - 1];
7573 ne_label:
7574 */
7575
7576static rtx
7577inline_string_cmp (rtx target, tree var_str, const char *const_str,
7578 unsigned HOST_WIDE_INT length,
7579 int const_str_n, machine_mode mode)
7580{
7581 HOST_WIDE_INT offset = 0;
7582 rtx var_rtx_array
7583 = get_memory_rtx (exp: var_str, len: build_int_cst (unsigned_type_node,length));
7584 rtx var_rtx = NULL_RTX;
7585 rtx const_rtx = NULL_RTX;
7586 rtx result = target ? target : gen_reg_rtx (mode);
7587 rtx_code_label *ne_label = gen_label_rtx ();
7588 tree unit_type_node = unsigned_char_type_node;
7589 scalar_int_mode unit_mode
7590 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7591
7592 start_sequence ();
7593
7594 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7595 {
7596 var_rtx
7597 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7598 const_rtx = c_readstr (str: const_str + offset, mode: unit_mode);
7599 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7600 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7601
7602 op0 = convert_modes (mode, oldmode: unit_mode, x: op0, unsignedp: 1);
7603 op1 = convert_modes (mode, oldmode: unit_mode, x: op1, unsignedp: 1);
7604 rtx diff = expand_simple_binop (mode, MINUS, op0, op1,
7605 result, 1, OPTAB_WIDEN);
7606
7607 /* Force the difference into result register. We cannot reassign
7608 result here ("result = diff") or we may end up returning
7609 uninitialized result when expand_simple_binop allocates a new
7610 pseudo-register for returning. */
7611 if (diff != result)
7612 emit_move_insn (result, diff);
7613
7614 if (i < length - 1)
7615 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7616 mode, true, ne_label);
7617 offset += GET_MODE_SIZE (mode: unit_mode);
7618 }
7619
7620 emit_label (ne_label);
7621 rtx_insn *insns = end_sequence ();
7622 emit_insn (insns);
7623
7624 return result;
7625}
7626
7627/* Inline expansion of a call to str(n)cmp and memcmp, with result going
7628 to TARGET if that's convenient.
7629 If the call is not been inlined, return NULL_RTX. */
7630
7631static rtx
7632inline_expand_builtin_bytecmp (tree exp, rtx target)
7633{
7634 tree fndecl = get_callee_fndecl (exp);
7635 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: fndecl);
7636 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7637
7638 /* Do NOT apply this inlining expansion when optimizing for size or
7639 optimization level below 2 or if unused *cmp hasn't been DCEd. */
7640 if (optimize < 2 || optimize_insn_for_size_p () || target == const0_rtx)
7641 return NULL_RTX;
7642
7643 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7644 || fcode == BUILT_IN_STRNCMP
7645 || fcode == BUILT_IN_MEMCMP);
7646
7647 /* On a target where the type of the call (int) has same or narrower presicion
7648 than unsigned char, give up the inlining expansion. */
7649 if (TYPE_PRECISION (unsigned_char_type_node)
7650 >= TYPE_PRECISION (TREE_TYPE (exp)))
7651 return NULL_RTX;
7652
7653 tree arg1 = CALL_EXPR_ARG (exp, 0);
7654 tree arg2 = CALL_EXPR_ARG (exp, 1);
7655 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7656
7657 unsigned HOST_WIDE_INT len1 = 0;
7658 unsigned HOST_WIDE_INT len2 = 0;
7659 unsigned HOST_WIDE_INT len3 = 0;
7660
7661 /* Get the object representation of the initializers of ARG1 and ARG2
7662 as strings, provided they refer to constant objects, with their byte
7663 sizes in LEN1 and LEN2, respectively. */
7664 const char *bytes1 = getbyterep (arg1, &len1);
7665 const char *bytes2 = getbyterep (arg2, &len2);
7666
7667 /* Fail if neither argument refers to an initialized constant. */
7668 if (!bytes1 && !bytes2)
7669 return NULL_RTX;
7670
7671 if (is_ncmp)
7672 {
7673 /* Fail if the memcmp/strncmp bound is not a constant. */
7674 if (!tree_fits_uhwi_p (len3_tree))
7675 return NULL_RTX;
7676
7677 len3 = tree_to_uhwi (len3_tree);
7678
7679 if (fcode == BUILT_IN_MEMCMP)
7680 {
7681 /* Fail if the memcmp bound is greater than the size of either
7682 of the two constant objects. */
7683 if ((bytes1 && len1 < len3)
7684 || (bytes2 && len2 < len3))
7685 return NULL_RTX;
7686 }
7687 }
7688
7689 if (fcode != BUILT_IN_MEMCMP)
7690 {
7691 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7692 and LEN2 to the length of the nul-terminated string stored
7693 in each. */
7694 if (bytes1 != NULL)
7695 len1 = strnlen (string: bytes1, maxlen: len1) + 1;
7696 if (bytes2 != NULL)
7697 len2 = strnlen (string: bytes2, maxlen: len2) + 1;
7698 }
7699
7700 /* See inline_string_cmp. */
7701 int const_str_n;
7702 if (!len1)
7703 const_str_n = 2;
7704 else if (!len2)
7705 const_str_n = 1;
7706 else if (len2 > len1)
7707 const_str_n = 1;
7708 else
7709 const_str_n = 2;
7710
7711 /* For strncmp only, compute the new bound as the smallest of
7712 the lengths of the two strings (plus 1) and the bound provided
7713 to the function. */
7714 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7715 if (is_ncmp && len3 < bound)
7716 bound = len3;
7717
7718 /* If the bound of the comparison is larger than the threshold,
7719 do nothing. */
7720 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7721 return NULL_RTX;
7722
7723 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7724
7725 /* Now, start inline expansion the call. */
7726 return inline_string_cmp (target, var_str: (const_str_n == 1) ? arg2 : arg1,
7727 const_str: (const_str_n == 1) ? bytes1 : bytes2, length: bound,
7728 const_str_n, mode);
7729}
7730
7731/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7732 represents the size of the first argument to that call, or VOIDmode
7733 if the argument is a pointer. IGNORE will be true if the result
7734 isn't used. */
7735static rtx
7736expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7737 bool ignore)
7738{
7739 rtx val, failsafe;
7740 unsigned nargs = call_expr_nargs (exp);
7741
7742 tree arg0 = CALL_EXPR_ARG (exp, 0);
7743
7744 if (mode == VOIDmode)
7745 {
7746 mode = TYPE_MODE (TREE_TYPE (arg0));
7747 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7748 }
7749
7750 val = expand_expr (exp: arg0, NULL_RTX, mode, modifier: EXPAND_NORMAL);
7751
7752 /* An optional second argument can be used as a failsafe value on
7753 some machines. If it isn't present, then the failsafe value is
7754 assumed to be 0. */
7755 if (nargs > 1)
7756 {
7757 tree arg1 = CALL_EXPR_ARG (exp, 1);
7758 failsafe = expand_expr (exp: arg1, NULL_RTX, mode, modifier: EXPAND_NORMAL);
7759 }
7760 else
7761 failsafe = const0_rtx;
7762
7763 /* If the result isn't used, the behavior is undefined. It would be
7764 nice to emit a warning here, but path splitting means this might
7765 happen with legitimate code. So simply drop the builtin
7766 expansion in that case; we've handled any side-effects above. */
7767 if (ignore)
7768 return const0_rtx;
7769
7770 /* If we don't have a suitable target, create one to hold the result. */
7771 if (target == NULL || GET_MODE (target) != mode)
7772 target = gen_reg_rtx (mode);
7773
7774 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7775 val = convert_modes (mode, VOIDmode, x: val, unsignedp: false);
7776
7777 return targetm.speculation_safe_value (mode, target, val, failsafe);
7778}
7779
7780/* Expand CRC* or REV_CRC* built-ins. */
7781
7782rtx
7783expand_builtin_crc_table_based (internal_fn fn, scalar_mode crc_mode,
7784 scalar_mode data_mode, machine_mode mode,
7785 tree exp, rtx target)
7786{
7787 tree rhs1 = CALL_EXPR_ARG (exp, 0); // crc
7788 tree rhs2 = CALL_EXPR_ARG (exp, 1); // data
7789 tree rhs3 = CALL_EXPR_ARG (exp, 2); // polynomial
7790
7791 if (!target || mode == VOIDmode)
7792 target = gen_reg_rtx (crc_mode);
7793
7794 rtx op1 = expand_normal (exp: rhs1);
7795 rtx op2 = expand_normal (exp: rhs2);
7796 gcc_assert (TREE_CODE (rhs3) == INTEGER_CST);
7797 rtx op3 = gen_int_mode (TREE_INT_CST_LOW (rhs3), crc_mode);
7798
7799 if (CONST_INT_P (op2))
7800 op2 = gen_int_mode (INTVAL (op2), crc_mode);
7801
7802 if (fn == IFN_CRC)
7803 expand_crc_table_based (target, op1, op2, op3, data_mode);
7804 else
7805 /* If it's IFN_CRC_REV generate bit-reversed CRC. */
7806 expand_reversed_crc_table_based (target, op1, op2, op3,
7807 data_mode,
7808 generate_reflecting_code_standard);
7809 return target;
7810}
7811
7812/* Expand an expression EXP that calls a built-in function,
7813 with result going to TARGET if that's convenient
7814 (and in mode MODE if that's convenient).
7815 SUBTARGET may be used as the target for computing one of EXP's operands.
7816 IGNORE is nonzero if the value is to be ignored. */
7817
7818rtx
7819expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7820 int ignore)
7821{
7822 tree fndecl = get_callee_fndecl (exp);
7823 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7824 int flags;
7825
7826 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7827 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7828
7829 /* When ASan is enabled, we don't want to expand some memory/string
7830 builtins and rely on libsanitizer's hooks. This allows us to avoid
7831 redundant checks and be sure, that possible overflow will be detected
7832 by ASan. */
7833
7834 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: fndecl);
7835 if (param_asan_kernel_mem_intrinsic_prefix
7836 && sanitize_flags_p (flag: SANITIZE_KERNEL_ADDRESS
7837 | SANITIZE_KERNEL_HWADDRESS))
7838 switch (fcode)
7839 {
7840 rtx save_decl_rtl, ret;
7841 case BUILT_IN_MEMCPY:
7842 case BUILT_IN_MEMMOVE:
7843 case BUILT_IN_MEMSET:
7844 save_decl_rtl = DECL_RTL (fndecl);
7845 DECL_RTL (fndecl) = asan_memfn_rtl (fndecl);
7846 ret = expand_call (exp, target, ignore);
7847 DECL_RTL (fndecl) = save_decl_rtl;
7848 return ret;
7849 default:
7850 break;
7851 }
7852 if (sanitize_flags_p (flag: SANITIZE_ADDRESS | SANITIZE_HWADDRESS)
7853 && asan_intercepted_p (fcode))
7854 return expand_call (exp, target, ignore);
7855
7856 /* When not optimizing, generate calls to library functions for a certain
7857 set of builtins. */
7858 if (!optimize
7859 && !called_as_built_in (node: fndecl)
7860 && fcode != BUILT_IN_FORK
7861 && fcode != BUILT_IN_EXECL
7862 && fcode != BUILT_IN_EXECV
7863 && fcode != BUILT_IN_EXECLP
7864 && fcode != BUILT_IN_EXECLE
7865 && fcode != BUILT_IN_EXECVP
7866 && fcode != BUILT_IN_EXECVE
7867 && fcode != BUILT_IN_CLEAR_CACHE
7868 && !ALLOCA_FUNCTION_CODE_P (fcode)
7869 && fcode != BUILT_IN_FREE
7870 && (fcode != BUILT_IN_MEMSET
7871 || !(flag_inline_stringops & ILSOP_MEMSET))
7872 && (fcode != BUILT_IN_MEMCPY
7873 || !(flag_inline_stringops & ILSOP_MEMCPY))
7874 && (fcode != BUILT_IN_MEMMOVE
7875 || !(flag_inline_stringops & ILSOP_MEMMOVE))
7876 && (fcode != BUILT_IN_MEMCMP
7877 || !(flag_inline_stringops & ILSOP_MEMCMP)))
7878 return expand_call (exp, target, ignore);
7879
7880 /* The built-in function expanders test for target == const0_rtx
7881 to determine whether the function's result will be ignored. */
7882 if (ignore)
7883 target = const0_rtx;
7884
7885 /* If the result of a pure or const built-in function is ignored, and
7886 none of its arguments are volatile, we can avoid expanding the
7887 built-in call and just evaluate the arguments for side-effects. */
7888 if (target == const0_rtx
7889 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7890 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7891 {
7892 bool volatilep = false;
7893 tree arg;
7894 call_expr_arg_iterator iter;
7895
7896 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7897 if (TREE_THIS_VOLATILE (arg))
7898 {
7899 volatilep = true;
7900 break;
7901 }
7902
7903 if (! volatilep)
7904 {
7905 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7906 expand_expr (exp: arg, const0_rtx, VOIDmode, modifier: EXPAND_NORMAL);
7907 return const0_rtx;
7908 }
7909 }
7910
7911 switch (fcode)
7912 {
7913 CASE_FLT_FN (BUILT_IN_FABS):
7914 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7915 case BUILT_IN_FABSD32:
7916 case BUILT_IN_FABSD64:
7917 case BUILT_IN_FABSD128:
7918 case BUILT_IN_FABSD64X:
7919 target = expand_builtin_fabs (exp, target, subtarget);
7920 if (target)
7921 return target;
7922 break;
7923
7924 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7925 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7926 target = expand_builtin_copysign (exp, target, subtarget);
7927 if (target)
7928 return target;
7929 break;
7930
7931 /* Just do a normal library call if we were unable to fold
7932 the values. */
7933 CASE_FLT_FN (BUILT_IN_CABS):
7934 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS):
7935 break;
7936
7937 CASE_FLT_FN (BUILT_IN_FMA):
7938 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7939 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7940 if (target)
7941 return target;
7942 break;
7943
7944 CASE_FLT_FN (BUILT_IN_ILOGB):
7945 if (! flag_unsafe_math_optimizations)
7946 break;
7947 gcc_fallthrough ();
7948 CASE_FLT_FN (BUILT_IN_ISINF):
7949 CASE_FLT_FN (BUILT_IN_FINITE):
7950 case BUILT_IN_ISFINITE:
7951 case BUILT_IN_ISNORMAL:
7952 target = expand_builtin_interclass_mathfn (exp, target);
7953 if (target)
7954 return target;
7955 break;
7956
7957 case BUILT_IN_ISSIGNALING:
7958 target = expand_builtin_issignaling (exp, target);
7959 if (target)
7960 return target;
7961 break;
7962
7963 CASE_FLT_FN (BUILT_IN_ICEIL):
7964 CASE_FLT_FN (BUILT_IN_LCEIL):
7965 CASE_FLT_FN (BUILT_IN_LLCEIL):
7966 CASE_FLT_FN (BUILT_IN_LFLOOR):
7967 CASE_FLT_FN (BUILT_IN_IFLOOR):
7968 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7969 target = expand_builtin_int_roundingfn (exp, target);
7970 if (target)
7971 return target;
7972 break;
7973
7974 CASE_FLT_FN (BUILT_IN_IRINT):
7975 CASE_FLT_FN (BUILT_IN_LRINT):
7976 CASE_FLT_FN (BUILT_IN_LLRINT):
7977 CASE_FLT_FN (BUILT_IN_IROUND):
7978 CASE_FLT_FN (BUILT_IN_LROUND):
7979 CASE_FLT_FN (BUILT_IN_LLROUND):
7980 target = expand_builtin_int_roundingfn_2 (exp, target);
7981 if (target)
7982 return target;
7983 break;
7984
7985 CASE_FLT_FN (BUILT_IN_POWI):
7986 target = expand_builtin_powi (exp, target);
7987 if (target)
7988 return target;
7989 break;
7990
7991 CASE_FLT_FN (BUILT_IN_CEXPI):
7992 target = expand_builtin_cexpi (exp, target);
7993 gcc_assert (target);
7994 return target;
7995
7996 CASE_FLT_FN (BUILT_IN_SIN):
7997 CASE_FLT_FN (BUILT_IN_COS):
7998 if (! flag_unsafe_math_optimizations)
7999 break;
8000 target = expand_builtin_mathfn_3 (exp, target, subtarget);
8001 if (target)
8002 return target;
8003 break;
8004
8005 CASE_FLT_FN (BUILT_IN_SINCOS):
8006 if (! flag_unsafe_math_optimizations)
8007 break;
8008 target = expand_builtin_sincos (exp);
8009 if (target)
8010 return target;
8011 break;
8012
8013 case BUILT_IN_FEGETROUND:
8014 target = expand_builtin_fegetround (exp, target, target_mode);
8015 if (target)
8016 return target;
8017 break;
8018
8019 case BUILT_IN_FECLEAREXCEPT:
8020 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
8021 op_optab: feclearexcept_optab);
8022 if (target)
8023 return target;
8024 break;
8025
8026 case BUILT_IN_FERAISEEXCEPT:
8027 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
8028 op_optab: feraiseexcept_optab);
8029 if (target)
8030 return target;
8031 break;
8032
8033 case BUILT_IN_APPLY_ARGS:
8034 return expand_builtin_apply_args ();
8035
8036 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8037 FUNCTION with a copy of the parameters described by
8038 ARGUMENTS, and ARGSIZE. It returns a block of memory
8039 allocated on the stack into which is stored all the registers
8040 that might possibly be used for returning the result of a
8041 function. ARGUMENTS is the value returned by
8042 __builtin_apply_args. ARGSIZE is the number of bytes of
8043 arguments that must be copied. ??? How should this value be
8044 computed? We'll also need a safe worst case value for varargs
8045 functions. */
8046 case BUILT_IN_APPLY:
8047 if (!validate_arglist (callexpr: exp, POINTER_TYPE,
8048 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
8049 && !validate_arglist (callexpr: exp, REFERENCE_TYPE,
8050 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8051 return const0_rtx;
8052 else
8053 {
8054 rtx ops[3];
8055
8056 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
8057 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8058 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8059
8060 return expand_builtin_apply (function: ops[0], arguments: ops[1], argsize: ops[2]);
8061 }
8062
8063 /* __builtin_return (RESULT) causes the function to return the
8064 value described by RESULT. RESULT is address of the block of
8065 memory returned by __builtin_apply. */
8066 case BUILT_IN_RETURN:
8067 if (validate_arglist (callexpr: exp, POINTER_TYPE, VOID_TYPE))
8068 expand_builtin_return (result: expand_normal (CALL_EXPR_ARG (exp, 0)));
8069 return const0_rtx;
8070
8071 case BUILT_IN_SAVEREGS:
8072 return expand_builtin_saveregs ();
8073
8074 case BUILT_IN_VA_ARG_PACK:
8075 /* All valid uses of __builtin_va_arg_pack () are removed during
8076 inlining. */
8077 error ("invalid use of %<__builtin_va_arg_pack ()%>");
8078 return const0_rtx;
8079
8080 case BUILT_IN_VA_ARG_PACK_LEN:
8081 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8082 inlining. */
8083 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
8084 return const0_rtx;
8085
8086 /* Return the address of the first anonymous stack arg. */
8087 case BUILT_IN_NEXT_ARG:
8088 if (fold_builtin_next_arg (exp, false))
8089 return const0_rtx;
8090 return expand_builtin_next_arg ();
8091
8092 case BUILT_IN_CLEAR_CACHE:
8093 expand_builtin___clear_cache (exp);
8094 return const0_rtx;
8095
8096 case BUILT_IN_CLASSIFY_TYPE:
8097 return expand_builtin_classify_type (exp);
8098
8099 case BUILT_IN_CONSTANT_P:
8100 return const0_rtx;
8101
8102 case BUILT_IN_FRAME_ADDRESS:
8103 case BUILT_IN_RETURN_ADDRESS:
8104 return expand_builtin_frame_address (fndecl, exp);
8105
8106 case BUILT_IN_STACK_ADDRESS:
8107 return expand_builtin_stack_address ();
8108
8109 case BUILT_IN___STRUB_ENTER:
8110 target = expand_builtin_strub_enter (exp);
8111 if (target)
8112 return target;
8113 break;
8114
8115 case BUILT_IN___STRUB_UPDATE:
8116 target = expand_builtin_strub_update (exp);
8117 if (target)
8118 return target;
8119 break;
8120
8121 case BUILT_IN___STRUB_LEAVE:
8122 target = expand_builtin_strub_leave (exp);
8123 if (target)
8124 return target;
8125 break;
8126
8127 /* Returns the address of the area where the structure is returned.
8128 0 otherwise. */
8129 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8130 if (call_expr_nargs (exp) != 0
8131 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8132 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8133 return const0_rtx;
8134 else
8135 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8136
8137 CASE_BUILT_IN_ALLOCA:
8138 target = expand_builtin_alloca (exp);
8139 if (target)
8140 return target;
8141 break;
8142
8143 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8144 return expand_asan_emit_allocas_unpoison (exp);
8145
8146 case BUILT_IN_STACK_SAVE:
8147 return expand_stack_save ();
8148
8149 case BUILT_IN_STACK_RESTORE:
8150 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8151 return const0_rtx;
8152
8153 case BUILT_IN_BSWAP16:
8154 case BUILT_IN_BSWAP32:
8155 case BUILT_IN_BSWAP64:
8156 case BUILT_IN_BSWAP128:
8157 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8158 if (target)
8159 return target;
8160 break;
8161
8162 CASE_INT_FN (BUILT_IN_FFS):
8163 target = expand_builtin_unop (target_mode, exp, target,
8164 subtarget, op_optab: ffs_optab);
8165 if (target)
8166 return target;
8167 break;
8168
8169 CASE_INT_FN (BUILT_IN_CLZ):
8170 target = expand_builtin_unop (target_mode, exp, target,
8171 subtarget, op_optab: clz_optab);
8172 if (target)
8173 return target;
8174 break;
8175
8176 CASE_INT_FN (BUILT_IN_CTZ):
8177 target = expand_builtin_unop (target_mode, exp, target,
8178 subtarget, op_optab: ctz_optab);
8179 if (target)
8180 return target;
8181 break;
8182
8183 CASE_INT_FN (BUILT_IN_CLRSB):
8184 target = expand_builtin_unop (target_mode, exp, target,
8185 subtarget, op_optab: clrsb_optab);
8186 if (target)
8187 return target;
8188 break;
8189
8190 CASE_INT_FN (BUILT_IN_POPCOUNT):
8191 target = expand_builtin_unop (target_mode, exp, target,
8192 subtarget, op_optab: popcount_optab);
8193 if (target)
8194 return target;
8195 break;
8196
8197 CASE_INT_FN (BUILT_IN_PARITY):
8198 target = expand_builtin_unop (target_mode, exp, target,
8199 subtarget, op_optab: parity_optab);
8200 if (target)
8201 return target;
8202 break;
8203
8204 case BUILT_IN_STRLEN:
8205 target = expand_builtin_strlen (exp, target, target_mode);
8206 if (target)
8207 return target;
8208 break;
8209
8210 case BUILT_IN_STRNLEN:
8211 target = expand_builtin_strnlen (exp, target, target_mode);
8212 if (target)
8213 return target;
8214 break;
8215
8216 case BUILT_IN_STRCPY:
8217 target = expand_builtin_strcpy (exp, target);
8218 if (target)
8219 return target;
8220 break;
8221
8222 case BUILT_IN_STRNCPY:
8223 target = expand_builtin_strncpy (exp, target);
8224 if (target)
8225 return target;
8226 break;
8227
8228 case BUILT_IN_STPCPY:
8229 target = expand_builtin_stpcpy (exp, target, mode);
8230 if (target)
8231 return target;
8232 break;
8233
8234 case BUILT_IN_MEMCPY:
8235 target = expand_builtin_memcpy (exp, target);
8236 if (target)
8237 return target;
8238 break;
8239
8240 case BUILT_IN_MEMMOVE:
8241 target = expand_builtin_memmove (exp, target);
8242 if (target)
8243 return target;
8244 break;
8245
8246 case BUILT_IN_MEMPCPY:
8247 target = expand_builtin_mempcpy (exp, target);
8248 if (target)
8249 return target;
8250 break;
8251
8252 case BUILT_IN_MEMSET:
8253 target = expand_builtin_memset (exp, target, mode);
8254 if (target)
8255 return target;
8256 break;
8257
8258 case BUILT_IN_BZERO:
8259 target = expand_builtin_bzero (exp);
8260 if (target)
8261 return target;
8262 break;
8263
8264 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8265 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
8266 when changing it to a strcmp call. */
8267 case BUILT_IN_STRCMP_EQ:
8268 target = expand_builtin_memcmp (exp, target, result_eq: true);
8269 if (target)
8270 return target;
8271
8272 /* Change this call back to a BUILT_IN_STRCMP. */
8273 TREE_OPERAND (exp, 1)
8274 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8275
8276 /* Delete the last parameter. */
8277 unsigned int i;
8278 vec<tree, va_gc> *arg_vec;
8279 vec_alloc (v&: arg_vec, nelems: 2);
8280 for (i = 0; i < 2; i++)
8281 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8282 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8283 /* FALLTHROUGH */
8284
8285 case BUILT_IN_STRCMP:
8286 target = expand_builtin_strcmp (exp, target);
8287 if (target)
8288 return target;
8289 break;
8290
8291 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8292 back to a BUILT_IN_STRNCMP. */
8293 case BUILT_IN_STRNCMP_EQ:
8294 target = expand_builtin_memcmp (exp, target, result_eq: true);
8295 if (target)
8296 return target;
8297
8298 /* Change it back to a BUILT_IN_STRNCMP. */
8299 TREE_OPERAND (exp, 1)
8300 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8301 /* FALLTHROUGH */
8302
8303 case BUILT_IN_STRNCMP:
8304 target = expand_builtin_strncmp (exp, target, mode);
8305 if (target)
8306 return target;
8307 break;
8308
8309 case BUILT_IN_BCMP:
8310 case BUILT_IN_MEMCMP:
8311 case BUILT_IN_MEMCMP_EQ:
8312 target = expand_builtin_memcmp (exp, target, result_eq: fcode == BUILT_IN_MEMCMP_EQ);
8313 if (target)
8314 return target;
8315 if (fcode == BUILT_IN_MEMCMP_EQ)
8316 {
8317 tree newdecl = builtin_decl_explicit (fncode: BUILT_IN_MEMCMP);
8318 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8319 }
8320 break;
8321
8322 case BUILT_IN_SETJMP:
8323 /* This should have been lowered to the builtins below. */
8324 gcc_unreachable ();
8325
8326 case BUILT_IN_SETJMP_SETUP:
8327 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8328 and the receiver label. */
8329 if (validate_arglist (callexpr: exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8330 {
8331 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), target: subtarget,
8332 VOIDmode, modifier: EXPAND_NORMAL);
8333 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8334 rtx_insn *label_r = label_rtx (label);
8335
8336 expand_builtin_setjmp_setup (buf_addr, receiver_label: label_r);
8337 return const0_rtx;
8338 }
8339 break;
8340
8341 case BUILT_IN_SETJMP_RECEIVER:
8342 /* __builtin_setjmp_receiver is passed the receiver label. */
8343 if (validate_arglist (callexpr: exp, POINTER_TYPE, VOID_TYPE))
8344 {
8345 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8346 rtx_insn *label_r = label_rtx (label);
8347
8348 expand_builtin_setjmp_receiver (receiver_label: label_r);
8349 nonlocal_goto_handler_labels
8350 = gen_rtx_INSN_LIST (VOIDmode, label_r,
8351 nonlocal_goto_handler_labels);
8352 /* ??? Do not let expand_label treat us as such since we would
8353 not want to be both on the list of non-local labels and on
8354 the list of forced labels. */
8355 FORCED_LABEL (label) = 0;
8356 return const0_rtx;
8357 }
8358 break;
8359
8360 /* __builtin_longjmp is passed a pointer to an array of five words.
8361 It's similar to the C library longjmp function but works with
8362 __builtin_setjmp above. */
8363 case BUILT_IN_LONGJMP:
8364 if (validate_arglist (callexpr: exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8365 {
8366 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), target: subtarget,
8367 VOIDmode, modifier: EXPAND_NORMAL);
8368 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8369
8370 if (value != const1_rtx)
8371 {
8372 error ("%<__builtin_longjmp%> second argument must be 1");
8373 return const0_rtx;
8374 }
8375
8376 expand_builtin_longjmp (buf_addr, value);
8377 return const0_rtx;
8378 }
8379 break;
8380
8381 case BUILT_IN_NONLOCAL_GOTO:
8382 target = expand_builtin_nonlocal_goto (exp);
8383 if (target)
8384 return target;
8385 break;
8386
8387 /* This updates the setjmp buffer that is its argument with the value
8388 of the current stack pointer. */
8389 case BUILT_IN_UPDATE_SETJMP_BUF:
8390 if (validate_arglist (callexpr: exp, POINTER_TYPE, VOID_TYPE))
8391 {
8392 rtx buf_addr
8393 = expand_normal (CALL_EXPR_ARG (exp, 0));
8394
8395 expand_builtin_update_setjmp_buf (buf_addr);
8396 return const0_rtx;
8397 }
8398 break;
8399
8400 case BUILT_IN_TRAP:
8401 case BUILT_IN_UNREACHABLE_TRAP:
8402 expand_builtin_trap ();
8403 return const0_rtx;
8404
8405 case BUILT_IN_UNREACHABLE:
8406 expand_builtin_unreachable ();
8407 return const0_rtx;
8408
8409 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8410 case BUILT_IN_SIGNBITD32:
8411 case BUILT_IN_SIGNBITD64:
8412 case BUILT_IN_SIGNBITD128:
8413 target = expand_builtin_signbit (exp, target);
8414 if (target)
8415 return target;
8416 break;
8417
8418 /* Various hooks for the DWARF 2 __throw routine. */
8419 case BUILT_IN_UNWIND_INIT:
8420 expand_builtin_unwind_init ();
8421 return const0_rtx;
8422 case BUILT_IN_DWARF_CFA:
8423 return virtual_cfa_rtx;
8424#ifdef DWARF2_UNWIND_INFO
8425 case BUILT_IN_DWARF_SP_COLUMN:
8426 return expand_builtin_dwarf_sp_column ();
8427 case BUILT_IN_INIT_DWARF_REG_SIZES:
8428 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8429 return const0_rtx;
8430#endif
8431 case BUILT_IN_FROB_RETURN_ADDR:
8432 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8433 case BUILT_IN_EXTRACT_RETURN_ADDR:
8434 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8435 case BUILT_IN_EH_RETURN:
8436 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8437 CALL_EXPR_ARG (exp, 1));
8438 return const0_rtx;
8439 case BUILT_IN_EH_RETURN_DATA_REGNO:
8440 return expand_builtin_eh_return_data_regno (exp);
8441 case BUILT_IN_EXTEND_POINTER:
8442 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8443 case BUILT_IN_EH_POINTER:
8444 return expand_builtin_eh_pointer (exp);
8445 case BUILT_IN_EH_FILTER:
8446 return expand_builtin_eh_filter (exp);
8447 case BUILT_IN_EH_COPY_VALUES:
8448 return expand_builtin_eh_copy_values (exp);
8449
8450 case BUILT_IN_VA_START:
8451 return expand_builtin_va_start (exp);
8452 case BUILT_IN_VA_END:
8453 return expand_builtin_va_end (exp);
8454 case BUILT_IN_VA_COPY:
8455 return expand_builtin_va_copy (exp);
8456 case BUILT_IN_EXPECT:
8457 return expand_builtin_expect (exp, target);
8458 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8459 return expand_builtin_expect_with_probability (exp, target);
8460 case BUILT_IN_ASSUME_ALIGNED:
8461 return expand_builtin_assume_aligned (exp, target);
8462 case BUILT_IN_PREFETCH:
8463 expand_builtin_prefetch (exp);
8464 return const0_rtx;
8465
8466 case BUILT_IN_INIT_TRAMPOLINE:
8467 return expand_builtin_init_trampoline (exp, onstack: true);
8468 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8469 return expand_builtin_init_trampoline (exp, onstack: false);
8470 case BUILT_IN_ADJUST_TRAMPOLINE:
8471 return expand_builtin_adjust_trampoline (exp);
8472
8473 case BUILT_IN_INIT_DESCRIPTOR:
8474 return expand_builtin_init_descriptor (exp);
8475 case BUILT_IN_ADJUST_DESCRIPTOR:
8476 return expand_builtin_adjust_descriptor (exp);
8477
8478 case BUILT_IN_GCC_NESTED_PTR_CREATED:
8479 case BUILT_IN_GCC_NESTED_PTR_DELETED:
8480 break; /* At present, no expansion, just call the function. */
8481
8482 case BUILT_IN_FORK:
8483 case BUILT_IN_EXECL:
8484 case BUILT_IN_EXECV:
8485 case BUILT_IN_EXECLP:
8486 case BUILT_IN_EXECLE:
8487 case BUILT_IN_EXECVP:
8488 case BUILT_IN_EXECVE:
8489 target = expand_builtin_fork_or_exec (fn: fndecl, exp, target, ignore);
8490 if (target)
8491 return target;
8492 break;
8493
8494 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8495 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8496 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8497 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8498 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8499 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8500 target = expand_builtin_sync_operation (mode, exp, code: PLUS, after: false, target);
8501 if (target)
8502 return target;
8503 break;
8504
8505 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8506 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8507 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8508 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8509 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8510 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8511 target = expand_builtin_sync_operation (mode, exp, code: MINUS, after: false, target);
8512 if (target)
8513 return target;
8514 break;
8515
8516 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8517 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8518 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8519 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8520 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8521 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8522 target = expand_builtin_sync_operation (mode, exp, code: IOR, after: false, target);
8523 if (target)
8524 return target;
8525 break;
8526
8527 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8528 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8529 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8530 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8531 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8532 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8533 target = expand_builtin_sync_operation (mode, exp, code: AND, after: false, target);
8534 if (target)
8535 return target;
8536 break;
8537
8538 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8539 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8540 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8541 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8542 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8543 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8544 target = expand_builtin_sync_operation (mode, exp, code: XOR, after: false, target);
8545 if (target)
8546 return target;
8547 break;
8548
8549 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8550 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8551 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8552 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8553 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8554 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8555 target = expand_builtin_sync_operation (mode, exp, code: NOT, after: false, target);
8556 if (target)
8557 return target;
8558 break;
8559
8560 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8561 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8562 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8563 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8564 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8565 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8566 target = expand_builtin_sync_operation (mode, exp, code: PLUS, after: true, target);
8567 if (target)
8568 return target;
8569 break;
8570
8571 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8572 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8573 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8574 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8575 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8576 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8577 target = expand_builtin_sync_operation (mode, exp, code: MINUS, after: true, target);
8578 if (target)
8579 return target;
8580 break;
8581
8582 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8583 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8584 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8585 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8586 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8587 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8588 target = expand_builtin_sync_operation (mode, exp, code: IOR, after: true, target);
8589 if (target)
8590 return target;
8591 break;
8592
8593 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8594 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8595 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8596 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8597 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8598 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8599 target = expand_builtin_sync_operation (mode, exp, code: AND, after: true, target);
8600 if (target)
8601 return target;
8602 break;
8603
8604 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8605 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8606 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8607 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8608 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8609 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8610 target = expand_builtin_sync_operation (mode, exp, code: XOR, after: true, target);
8611 if (target)
8612 return target;
8613 break;
8614
8615 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8616 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8617 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8618 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8619 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8620 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8621 target = expand_builtin_sync_operation (mode, exp, code: NOT, after: true, target);
8622 if (target)
8623 return target;
8624 break;
8625
8626 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8627 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8628 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8629 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8630 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8631 if (mode == VOIDmode)
8632 mode = TYPE_MODE (boolean_type_node);
8633 if (!target || !register_operand (target, mode))
8634 target = gen_reg_rtx (mode);
8635
8636 mode = get_builtin_sync_mode
8637 (fcode_diff: fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8638 target = expand_builtin_compare_and_swap (mode, exp, is_bool: true, target);
8639 if (target)
8640 return target;
8641 break;
8642
8643 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8644 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8645 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8646 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8647 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8648 mode = get_builtin_sync_mode
8649 (fcode_diff: fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8650 target = expand_builtin_compare_and_swap (mode, exp, is_bool: false, target);
8651 if (target)
8652 return target;
8653 break;
8654
8655 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8656 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8657 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8658 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8659 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8660 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8661 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8662 if (target)
8663 return target;
8664 break;
8665
8666 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8667 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8668 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8669 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8670 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8671 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8672 if (expand_builtin_sync_lock_release (mode, exp))
8673 return const0_rtx;
8674 break;
8675
8676 case BUILT_IN_SYNC_SYNCHRONIZE:
8677 expand_builtin_sync_synchronize ();
8678 return const0_rtx;
8679
8680 case BUILT_IN_ATOMIC_EXCHANGE_1:
8681 case BUILT_IN_ATOMIC_EXCHANGE_2:
8682 case BUILT_IN_ATOMIC_EXCHANGE_4:
8683 case BUILT_IN_ATOMIC_EXCHANGE_8:
8684 case BUILT_IN_ATOMIC_EXCHANGE_16:
8685 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8686 target = expand_builtin_atomic_exchange (mode, exp, target);
8687 if (target)
8688 return target;
8689 break;
8690
8691 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8692 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8693 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8694 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8695 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8696 {
8697 unsigned int nargs, z;
8698 vec<tree, va_gc> *vec;
8699
8700 mode =
8701 get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8702 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8703 if (target)
8704 return target;
8705
8706 /* If this is turned into an external library call, the weak parameter
8707 must be dropped to match the expected parameter list. */
8708 nargs = call_expr_nargs (exp);
8709 vec_alloc (v&: vec, nelems: nargs - 1);
8710 for (z = 0; z < 3; z++)
8711 vec->quick_push (CALL_EXPR_ARG (exp, z));
8712 /* Skip the boolean weak parameter. */
8713 for (z = 4; z < 6; z++)
8714 vec->quick_push (CALL_EXPR_ARG (exp, z));
8715 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8716 break;
8717 }
8718
8719 case BUILT_IN_ATOMIC_LOAD_1:
8720 case BUILT_IN_ATOMIC_LOAD_2:
8721 case BUILT_IN_ATOMIC_LOAD_4:
8722 case BUILT_IN_ATOMIC_LOAD_8:
8723 case BUILT_IN_ATOMIC_LOAD_16:
8724 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_LOAD_1);
8725 target = expand_builtin_atomic_load (mode, exp, target);
8726 if (target)
8727 return target;
8728 break;
8729
8730 case BUILT_IN_ATOMIC_STORE_1:
8731 case BUILT_IN_ATOMIC_STORE_2:
8732 case BUILT_IN_ATOMIC_STORE_4:
8733 case BUILT_IN_ATOMIC_STORE_8:
8734 case BUILT_IN_ATOMIC_STORE_16:
8735 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_STORE_1);
8736 target = expand_builtin_atomic_store (mode, exp);
8737 if (target)
8738 return const0_rtx;
8739 break;
8740
8741 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8742 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8743 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8744 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8745 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8746 {
8747 enum built_in_function lib;
8748 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8749 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8750 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8751 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: PLUS, fetch_after: true,
8752 ignore, ext_call: lib);
8753 if (target)
8754 return target;
8755 break;
8756 }
8757 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8758 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8759 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8760 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8761 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8762 {
8763 enum built_in_function lib;
8764 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8765 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8766 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8767 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: MINUS, fetch_after: true,
8768 ignore, ext_call: lib);
8769 if (target)
8770 return target;
8771 break;
8772 }
8773 case BUILT_IN_ATOMIC_AND_FETCH_1:
8774 case BUILT_IN_ATOMIC_AND_FETCH_2:
8775 case BUILT_IN_ATOMIC_AND_FETCH_4:
8776 case BUILT_IN_ATOMIC_AND_FETCH_8:
8777 case BUILT_IN_ATOMIC_AND_FETCH_16:
8778 {
8779 enum built_in_function lib;
8780 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8781 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8782 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8783 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: AND, fetch_after: true,
8784 ignore, ext_call: lib);
8785 if (target)
8786 return target;
8787 break;
8788 }
8789 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8790 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8791 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8792 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8793 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8794 {
8795 enum built_in_function lib;
8796 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8797 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8798 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8799 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: NOT, fetch_after: true,
8800 ignore, ext_call: lib);
8801 if (target)
8802 return target;
8803 break;
8804 }
8805 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8806 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8807 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8808 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8809 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8810 {
8811 enum built_in_function lib;
8812 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8813 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8814 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8815 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: XOR, fetch_after: true,
8816 ignore, ext_call: lib);
8817 if (target)
8818 return target;
8819 break;
8820 }
8821 case BUILT_IN_ATOMIC_OR_FETCH_1:
8822 case BUILT_IN_ATOMIC_OR_FETCH_2:
8823 case BUILT_IN_ATOMIC_OR_FETCH_4:
8824 case BUILT_IN_ATOMIC_OR_FETCH_8:
8825 case BUILT_IN_ATOMIC_OR_FETCH_16:
8826 {
8827 enum built_in_function lib;
8828 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8829 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8830 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8831 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: IOR, fetch_after: true,
8832 ignore, ext_call: lib);
8833 if (target)
8834 return target;
8835 break;
8836 }
8837 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8838 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8839 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8840 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8841 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8842 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8843 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: PLUS, fetch_after: false,
8844 ignore, ext_call: BUILT_IN_NONE);
8845 if (target)
8846 return target;
8847 break;
8848
8849 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8850 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8851 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8852 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8853 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8854 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8855 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: MINUS, fetch_after: false,
8856 ignore, ext_call: BUILT_IN_NONE);
8857 if (target)
8858 return target;
8859 break;
8860
8861 case BUILT_IN_ATOMIC_FETCH_AND_1:
8862 case BUILT_IN_ATOMIC_FETCH_AND_2:
8863 case BUILT_IN_ATOMIC_FETCH_AND_4:
8864 case BUILT_IN_ATOMIC_FETCH_AND_8:
8865 case BUILT_IN_ATOMIC_FETCH_AND_16:
8866 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8867 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: AND, fetch_after: false,
8868 ignore, ext_call: BUILT_IN_NONE);
8869 if (target)
8870 return target;
8871 break;
8872
8873 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8874 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8875 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8876 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8877 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8878 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8879 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: NOT, fetch_after: false,
8880 ignore, ext_call: BUILT_IN_NONE);
8881 if (target)
8882 return target;
8883 break;
8884
8885 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8886 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8887 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8888 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8889 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8890 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8891 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: XOR, fetch_after: false,
8892 ignore, ext_call: BUILT_IN_NONE);
8893 if (target)
8894 return target;
8895 break;
8896
8897 case BUILT_IN_ATOMIC_FETCH_OR_1:
8898 case BUILT_IN_ATOMIC_FETCH_OR_2:
8899 case BUILT_IN_ATOMIC_FETCH_OR_4:
8900 case BUILT_IN_ATOMIC_FETCH_OR_8:
8901 case BUILT_IN_ATOMIC_FETCH_OR_16:
8902 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8903 target = expand_builtin_atomic_fetch_op (mode, exp, target, code: IOR, fetch_after: false,
8904 ignore, ext_call: BUILT_IN_NONE);
8905 if (target)
8906 return target;
8907 break;
8908
8909 case BUILT_IN_ATOMIC_TEST_AND_SET:
8910 target = expand_builtin_atomic_test_and_set (exp, target);
8911 if (target)
8912 return target;
8913 break;
8914
8915 case BUILT_IN_ATOMIC_CLEAR:
8916 return expand_builtin_atomic_clear (exp);
8917
8918 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8919 return expand_builtin_atomic_always_lock_free (exp);
8920
8921 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8922 target = expand_builtin_atomic_is_lock_free (exp);
8923 if (target)
8924 return target;
8925 break;
8926
8927 case BUILT_IN_ATOMIC_THREAD_FENCE:
8928 expand_builtin_atomic_thread_fence (exp);
8929 return const0_rtx;
8930
8931 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8932 expand_builtin_atomic_signal_fence (exp);
8933 return const0_rtx;
8934
8935 case BUILT_IN_OBJECT_SIZE:
8936 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8937 return expand_builtin_object_size (exp);
8938
8939 case BUILT_IN_MEMCPY_CHK:
8940 case BUILT_IN_MEMPCPY_CHK:
8941 case BUILT_IN_MEMMOVE_CHK:
8942 case BUILT_IN_MEMSET_CHK:
8943 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8944 if (target)
8945 return target;
8946 break;
8947
8948 case BUILT_IN_STRCPY_CHK:
8949 case BUILT_IN_STPCPY_CHK:
8950 case BUILT_IN_STRNCPY_CHK:
8951 case BUILT_IN_STPNCPY_CHK:
8952 case BUILT_IN_STRCAT_CHK:
8953 case BUILT_IN_STRNCAT_CHK:
8954 case BUILT_IN_SNPRINTF_CHK:
8955 case BUILT_IN_VSNPRINTF_CHK:
8956 maybe_emit_chk_warning (exp, fcode);
8957 break;
8958
8959 case BUILT_IN_SPRINTF_CHK:
8960 case BUILT_IN_VSPRINTF_CHK:
8961 maybe_emit_sprintf_chk_warning (exp, fcode);
8962 break;
8963
8964 case BUILT_IN_THREAD_POINTER:
8965 return expand_builtin_thread_pointer (exp, target);
8966
8967 case BUILT_IN_SET_THREAD_POINTER:
8968 expand_builtin_set_thread_pointer (exp);
8969 return const0_rtx;
8970
8971 case BUILT_IN_ACC_ON_DEVICE:
8972 /* Do library call, if we failed to expand the builtin when
8973 folding. */
8974 break;
8975
8976 case BUILT_IN_GOACC_PARLEVEL_ID:
8977 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8978 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8979
8980 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8981 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8982
8983 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8984 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8985 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8986 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8987 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8988 mode = get_builtin_sync_mode (fcode_diff: fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8989 return expand_speculation_safe_value (mode, exp, target, ignore);
8990
8991 case BUILT_IN_CRC8_DATA8:
8992 return expand_builtin_crc_table_based (fn: IFN_CRC, QImode, QImode, mode,
8993 exp, target);
8994 case BUILT_IN_CRC16_DATA8:
8995 return expand_builtin_crc_table_based (fn: IFN_CRC, HImode, QImode, mode,
8996 exp, target);
8997 case BUILT_IN_CRC16_DATA16:
8998 return expand_builtin_crc_table_based (fn: IFN_CRC, HImode, HImode, mode,
8999 exp, target);
9000 case BUILT_IN_CRC32_DATA8:
9001 return expand_builtin_crc_table_based (fn: IFN_CRC, SImode, QImode, mode,
9002 exp, target);
9003 case BUILT_IN_CRC32_DATA16:
9004 return expand_builtin_crc_table_based (fn: IFN_CRC, SImode, HImode, mode,
9005 exp, target);
9006 case BUILT_IN_CRC32_DATA32:
9007 return expand_builtin_crc_table_based (fn: IFN_CRC, SImode, SImode, mode,
9008 exp, target);
9009 case BUILT_IN_CRC64_DATA8:
9010 return expand_builtin_crc_table_based (fn: IFN_CRC, DImode, QImode, mode,
9011 exp, target);
9012 case BUILT_IN_CRC64_DATA16:
9013 return expand_builtin_crc_table_based (fn: IFN_CRC, DImode, HImode, mode,
9014 exp, target);
9015 case BUILT_IN_CRC64_DATA32:
9016 return expand_builtin_crc_table_based (fn: IFN_CRC, DImode, SImode, mode,
9017 exp, target);
9018 case BUILT_IN_CRC64_DATA64:
9019 return expand_builtin_crc_table_based (fn: IFN_CRC, DImode, DImode, mode,
9020 exp, target);
9021 case BUILT_IN_REV_CRC8_DATA8:
9022 return expand_builtin_crc_table_based (fn: IFN_CRC_REV, QImode, QImode,
9023 mode, exp, target);
9024 case BUILT_IN_REV_CRC16_DATA8:
9025 return expand_builtin_crc_table_based (fn: IFN_CRC_REV, HImode, QImode,
9026 mode, exp, target);
9027 case BUILT_IN_REV_CRC16_DATA16:
9028 return expand_builtin_crc_table_based (fn: IFN_CRC_REV, HImode, HImode,
9029 mode, exp, target);
9030 case BUILT_IN_REV_CRC32_DATA8:
9031 return expand_builtin_crc_table_based (fn: IFN_CRC_REV, SImode, QImode,
9032 mode, exp, target);
9033 case BUILT_IN_REV_CRC32_DATA16:
9034 return expand_builtin_crc_table_based (fn: IFN_CRC_REV, SImode, HImode,
9035 mode, exp, target);
9036 case BUILT_IN_REV_CRC32_DATA32:
9037 return expand_builtin_crc_table_based (fn: IFN_CRC_REV, SImode, SImode,
9038 mode, exp, target);
9039 case BUILT_IN_REV_CRC64_DATA8:
9040 return expand_builtin_crc_table_based (fn: IFN_CRC_REV, DImode, QImode,
9041 mode, exp, target);
9042 case BUILT_IN_REV_CRC64_DATA16:
9043 return expand_builtin_crc_table_based (fn: IFN_CRC_REV, DImode, HImode,
9044 mode, exp, target);
9045 case BUILT_IN_REV_CRC64_DATA32:
9046 return expand_builtin_crc_table_based (fn: IFN_CRC_REV, DImode, SImode,
9047 mode, exp, target);
9048 case BUILT_IN_REV_CRC64_DATA64:
9049 return expand_builtin_crc_table_based (fn: IFN_CRC_REV, DImode, DImode,
9050 mode, exp, target);
9051 default: /* just do library call, if unknown builtin */
9052 break;
9053 }
9054
9055 /* The switch statement above can drop through to cause the function
9056 to be called normally. */
9057 return expand_call (exp, target, ignore);
9058}
9059
9060/* Determine whether a tree node represents a call to a built-in
9061 function. If the tree T is a call to a built-in function with
9062 the right number of arguments of the appropriate types, return
9063 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9064 Otherwise the return value is END_BUILTINS. */
9065
9066enum built_in_function
9067builtin_mathfn_code (const_tree t)
9068{
9069 const_tree fndecl, arg, parmlist;
9070 const_tree argtype, parmtype;
9071 const_call_expr_arg_iterator iter;
9072
9073 if (TREE_CODE (t) != CALL_EXPR)
9074 return END_BUILTINS;
9075
9076 fndecl = get_callee_fndecl (t);
9077 if (fndecl == NULL_TREE || !fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL))
9078 return END_BUILTINS;
9079
9080 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9081 init_const_call_expr_arg_iterator (exp: t, iter: &iter);
9082 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
9083 {
9084 /* If a function doesn't take a variable number of arguments,
9085 the last element in the list will have type `void'. */
9086 parmtype = TREE_VALUE (parmlist);
9087 if (VOID_TYPE_P (parmtype))
9088 {
9089 if (more_const_call_expr_args_p (iter: &iter))
9090 return END_BUILTINS;
9091 return DECL_FUNCTION_CODE (decl: fndecl);
9092 }
9093
9094 if (! more_const_call_expr_args_p (iter: &iter))
9095 return END_BUILTINS;
9096
9097 arg = next_const_call_expr_arg (iter: &iter);
9098 argtype = TREE_TYPE (arg);
9099
9100 if (SCALAR_FLOAT_TYPE_P (parmtype))
9101 {
9102 if (! SCALAR_FLOAT_TYPE_P (argtype))
9103 return END_BUILTINS;
9104 }
9105 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
9106 {
9107 if (! COMPLEX_FLOAT_TYPE_P (argtype))
9108 return END_BUILTINS;
9109 }
9110 else if (POINTER_TYPE_P (parmtype))
9111 {
9112 if (! POINTER_TYPE_P (argtype))
9113 return END_BUILTINS;
9114 }
9115 else if (INTEGRAL_TYPE_P (parmtype))
9116 {
9117 if (! INTEGRAL_TYPE_P (argtype))
9118 return END_BUILTINS;
9119 }
9120 else
9121 return END_BUILTINS;
9122 }
9123
9124 /* Variable-length argument list. */
9125 return DECL_FUNCTION_CODE (decl: fndecl);
9126}
9127
9128/* Fold a call to __builtin_constant_p, if we know its argument ARG will
9129 evaluate to a constant. */
9130
9131static tree
9132fold_builtin_constant_p (tree arg)
9133{
9134 /* We return 1 for a numeric type that's known to be a constant
9135 value at compile-time or for an aggregate type that's a
9136 literal constant. */
9137 STRIP_NOPS (arg);
9138
9139 /* If we know this is a constant, emit the constant of one. */
9140 if (CONSTANT_CLASS_P (arg)
9141 || (TREE_CODE (arg) == CONSTRUCTOR
9142 && TREE_CONSTANT (arg)))
9143 return integer_one_node;
9144 if (TREE_CODE (arg) == ADDR_EXPR)
9145 {
9146 tree op = TREE_OPERAND (arg, 0);
9147 if (TREE_CODE (op) == STRING_CST
9148 || (TREE_CODE (op) == ARRAY_REF
9149 && integer_zerop (TREE_OPERAND (op, 1))
9150 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9151 return integer_one_node;
9152 }
9153
9154 /* If this expression has side effects, show we don't know it to be a
9155 constant. Likewise if it's a pointer or aggregate type since in
9156 those case we only want literals, since those are only optimized
9157 when generating RTL, not later.
9158 And finally, if we are compiling an initializer, not code, we
9159 need to return a definite result now; there's not going to be any
9160 more optimization done. */
9161 if (TREE_SIDE_EFFECTS (arg)
9162 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9163 || POINTER_TYPE_P (TREE_TYPE (arg))
9164 || cfun == 0
9165 || folding_initializer
9166 || force_folding_builtin_constant_p)
9167 return integer_zero_node;
9168
9169 return NULL_TREE;
9170}
9171
9172/* Create builtin_expect or builtin_expect_with_probability
9173 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9174 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9175 builtin_expect_with_probability instead uses third argument as PROBABILITY
9176 value. */
9177
9178static tree
9179build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9180 tree predictor, tree probability)
9181{
9182 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9183
9184 fn = builtin_decl_explicit (fncode: probability == NULL_TREE ? BUILT_IN_EXPECT
9185 : BUILT_IN_EXPECT_WITH_PROBABILITY);
9186 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9187 ret_type = TREE_TYPE (TREE_TYPE (fn));
9188 pred_type = TREE_VALUE (arg_types);
9189 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9190
9191 pred = fold_convert_loc (loc, pred_type, pred);
9192 expected = fold_convert_loc (loc, expected_type, expected);
9193
9194 if (probability)
9195 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9196 else
9197 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9198 predictor);
9199
9200 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9201 build_int_cst (ret_type, 0));
9202}
9203
9204/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9205 NULL_TREE if no simplification is possible. */
9206
9207tree
9208fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9209 tree arg3)
9210{
9211 tree inner, fndecl, inner_arg0;
9212 enum tree_code code;
9213
9214 /* Distribute the expected value over short-circuiting operators.
9215 See through the cast from truthvalue_type_node to long. */
9216 inner_arg0 = arg0;
9217 while (CONVERT_EXPR_P (inner_arg0)
9218 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9219 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9220 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9221
9222 /* If this is a builtin_expect within a builtin_expect keep the
9223 inner one. See through a comparison against a constant. It
9224 might have been added to create a thruthvalue. */
9225 inner = inner_arg0;
9226
9227 if (COMPARISON_CLASS_P (inner)
9228 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9229 inner = TREE_OPERAND (inner, 0);
9230
9231 if (TREE_CODE (inner) == CALL_EXPR
9232 && (fndecl = get_callee_fndecl (inner))
9233 && fndecl_built_in_p (node: fndecl, name1: BUILT_IN_EXPECT,
9234 names: BUILT_IN_EXPECT_WITH_PROBABILITY))
9235 return arg0;
9236
9237 inner = inner_arg0;
9238 code = TREE_CODE (inner);
9239 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9240 {
9241 tree op0 = TREE_OPERAND (inner, 0);
9242 tree op1 = TREE_OPERAND (inner, 1);
9243 arg1 = save_expr (arg1);
9244
9245 op0 = build_builtin_expect_predicate (loc, pred: op0, expected: arg1, predictor: arg2, probability: arg3);
9246 op1 = build_builtin_expect_predicate (loc, pred: op1, expected: arg1, predictor: arg2, probability: arg3);
9247 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9248
9249 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9250 }
9251
9252 /* If the argument isn't invariant then there's nothing else we can do. */
9253 if (!TREE_CONSTANT (inner_arg0))
9254 return NULL_TREE;
9255
9256 /* If we expect that a comparison against the argument will fold to
9257 a constant return the constant. In practice, this means a true
9258 constant or the address of a non-weak symbol. */
9259 inner = inner_arg0;
9260 STRIP_NOPS (inner);
9261 if (TREE_CODE (inner) == ADDR_EXPR)
9262 {
9263 do
9264 {
9265 inner = TREE_OPERAND (inner, 0);
9266 }
9267 while (TREE_CODE (inner) == COMPONENT_REF
9268 || TREE_CODE (inner) == ARRAY_REF);
9269 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9270 return NULL_TREE;
9271 }
9272
9273 /* Otherwise, ARG0 already has the proper type for the return value. */
9274 return arg0;
9275}
9276
9277/* Fold a call to __builtin_classify_type with argument ARG. */
9278
9279static tree
9280fold_builtin_classify_type (tree arg)
9281{
9282 if (arg == 0)
9283 return build_int_cst (integer_type_node, no_type_class);
9284
9285 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9286}
9287
9288/* Fold a call EXPR (which may be null) to __builtin_strlen with argument
9289 ARG. */
9290
9291static tree
9292fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
9293{
9294 if (!validate_arg (arg, code: POINTER_TYPE))
9295 return NULL_TREE;
9296 else
9297 {
9298 c_strlen_data lendata = { };
9299 tree len = c_strlen (arg, only_value: 0, data: &lendata);
9300
9301 if (len)
9302 return fold_convert_loc (loc, type, len);
9303
9304 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
9305 also early enough to detect invalid reads in multimensional
9306 arrays and struct members. */
9307 if (!lendata.decl)
9308 c_strlen (arg, only_value: 1, data: &lendata);
9309
9310 if (lendata.decl)
9311 {
9312 if (EXPR_HAS_LOCATION (arg))
9313 loc = EXPR_LOCATION (arg);
9314 else if (loc == UNKNOWN_LOCATION)
9315 loc = input_location;
9316 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
9317 }
9318
9319 return NULL_TREE;
9320 }
9321}
9322
9323/* Fold a call to __builtin_inf or __builtin_huge_val. */
9324
9325static tree
9326fold_builtin_inf (location_t loc, tree type, int warn)
9327{
9328 /* __builtin_inff is intended to be usable to define INFINITY on all
9329 targets. If an infinity is not available, INFINITY expands "to a
9330 positive constant of type float that overflows at translation
9331 time", footnote "In this case, using INFINITY will violate the
9332 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9333 Thus we pedwarn to ensure this constraint violation is
9334 diagnosed. */
9335 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9336 pedwarn (loc, 0, "target format does not support infinity");
9337
9338 return build_real (type, dconstinf);
9339}
9340
9341/* Fold function call to builtin sincos, sincosf, or sincosl. Return
9342 NULL_TREE if no simplification can be made. */
9343
9344static tree
9345fold_builtin_sincos (location_t loc,
9346 tree arg0, tree arg1, tree arg2)
9347{
9348 tree type;
9349 tree fndecl, call = NULL_TREE;
9350
9351 if (!validate_arg (arg0, code: REAL_TYPE)
9352 || !validate_arg (arg1, code: POINTER_TYPE)
9353 || !validate_arg (arg2, code: POINTER_TYPE))
9354 return NULL_TREE;
9355
9356 type = TREE_TYPE (arg0);
9357
9358 /* Calculate the result when the argument is a constant. */
9359 built_in_function fn = mathfn_built_in_2 (type, fn: CFN_BUILT_IN_CEXPI);
9360 if (fn == END_BUILTINS)
9361 return NULL_TREE;
9362
9363 /* Canonicalize sincos to cexpi. */
9364 if (TREE_CODE (arg0) == REAL_CST)
9365 {
9366 tree complex_type = build_complex_type (type);
9367 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9368 }
9369 if (!call)
9370 {
9371 if (!targetm.libc_has_function (function_c99_math_complex, type)
9372 || !builtin_decl_implicit_p (fncode: fn))
9373 return NULL_TREE;
9374 fndecl = builtin_decl_explicit (fncode: fn);
9375 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9376 call = builtin_save_expr (exp: call);
9377 }
9378
9379 tree ptype = build_pointer_type (type);
9380 arg1 = fold_convert (ptype, arg1);
9381 arg2 = fold_convert (ptype, arg2);
9382 return build2 (COMPOUND_EXPR, void_type_node,
9383 build2 (MODIFY_EXPR, void_type_node,
9384 build_fold_indirect_ref_loc (loc, arg1),
9385 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9386 build2 (MODIFY_EXPR, void_type_node,
9387 build_fold_indirect_ref_loc (loc, arg2),
9388 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9389}
9390
9391/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9392 Return NULL_TREE if no simplification can be made. */
9393
9394static tree
9395fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9396{
9397 if (!validate_arg (arg1, code: POINTER_TYPE)
9398 || !validate_arg (arg2, code: POINTER_TYPE)
9399 || !validate_arg (len, code: INTEGER_TYPE))
9400 return NULL_TREE;
9401
9402 /* If the LEN parameter is zero, return zero. */
9403 if (integer_zerop (len))
9404 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9405 arg1, arg2);
9406
9407 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9408 if (operand_equal_p (arg1, arg2, flags: 0))
9409 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9410
9411 /* If len parameter is one, return an expression corresponding to
9412 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9413 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9414 {
9415 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9416 tree cst_uchar_ptr_node
9417 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9418
9419 tree ind1
9420 = fold_convert_loc (loc, integer_type_node,
9421 build1 (INDIRECT_REF, cst_uchar_node,
9422 fold_convert_loc (loc,
9423 cst_uchar_ptr_node,
9424 arg1)));
9425 tree ind2
9426 = fold_convert_loc (loc, integer_type_node,
9427 build1 (INDIRECT_REF, cst_uchar_node,
9428 fold_convert_loc (loc,
9429 cst_uchar_ptr_node,
9430 arg2)));
9431 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9432 }
9433
9434 return NULL_TREE;
9435}
9436
9437/* Fold a call to builtin isascii with argument ARG. */
9438
9439static tree
9440fold_builtin_isascii (location_t loc, tree arg)
9441{
9442 if (!validate_arg (arg, code: INTEGER_TYPE))
9443 return NULL_TREE;
9444 else
9445 {
9446 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9447 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9448 build_int_cst (integer_type_node,
9449 ~ HOST_WIDE_INT_UC (0x7f)));
9450 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9451 arg, integer_zero_node);
9452 }
9453}
9454
9455/* Fold a call to builtin toascii with argument ARG. */
9456
9457static tree
9458fold_builtin_toascii (location_t loc, tree arg)
9459{
9460 if (!validate_arg (arg, code: INTEGER_TYPE))
9461 return NULL_TREE;
9462
9463 /* Transform toascii(c) -> (c & 0x7f). */
9464 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9465 build_int_cst (integer_type_node, 0x7f));
9466}
9467
9468/* Fold a call to builtin isdigit with argument ARG. */
9469
9470static tree
9471fold_builtin_isdigit (location_t loc, tree arg)
9472{
9473 if (!validate_arg (arg, code: INTEGER_TYPE))
9474 return NULL_TREE;
9475 else
9476 {
9477 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9478 /* According to the C standard, isdigit is unaffected by locale.
9479 However, it definitely is affected by the target character set. */
9480 unsigned HOST_WIDE_INT target_digit0
9481 = lang_hooks.to_target_charset ('0');
9482
9483 if (target_digit0 == 0)
9484 return NULL_TREE;
9485
9486 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9487 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9488 build_int_cst (unsigned_type_node, target_digit0));
9489 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9490 build_int_cst (unsigned_type_node, 9));
9491 }
9492}
9493
9494/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9495
9496static tree
9497fold_builtin_fabs (location_t loc, tree arg, tree type)
9498{
9499 if (!validate_arg (arg, code: REAL_TYPE))
9500 return NULL_TREE;
9501
9502 arg = fold_convert_loc (loc, type, arg);
9503 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9504}
9505
9506/* Fold a call to abs, labs, llabs, imaxabs, uabs, ulabs, ullabs or uimaxabs
9507 with argument ARG. */
9508
9509static tree
9510fold_builtin_abs (location_t loc, tree arg, tree type)
9511{
9512 if (!validate_arg (arg, code: INTEGER_TYPE))
9513 return NULL_TREE;
9514
9515 if (TYPE_UNSIGNED (type))
9516 {
9517 if (TYPE_PRECISION (TREE_TYPE (arg))
9518 != TYPE_PRECISION (type)
9519 || TYPE_UNSIGNED (TREE_TYPE (arg)))
9520 return NULL_TREE;
9521 return fold_build1_loc (loc, ABSU_EXPR, type, arg);
9522 }
9523 arg = fold_convert_loc (loc, type, arg);
9524 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9525}
9526
9527/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9528
9529static tree
9530fold_builtin_carg (location_t loc, tree arg, tree type)
9531{
9532 if (validate_arg (arg, code: COMPLEX_TYPE)
9533 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg))))
9534 {
9535 tree atan2_fn = mathfn_built_in (type, fn: BUILT_IN_ATAN2);
9536
9537 if (atan2_fn)
9538 {
9539 tree new_arg = builtin_save_expr (exp: arg);
9540 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9541 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9542 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9543 }
9544 }
9545
9546 return NULL_TREE;
9547}
9548
9549/* Fold a call to builtin frexp, we can assume the base is 2. */
9550
9551static tree
9552fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9553{
9554 if (! validate_arg (arg0, code: REAL_TYPE) || ! validate_arg (arg1, code: POINTER_TYPE))
9555 return NULL_TREE;
9556
9557 STRIP_NOPS (arg0);
9558
9559 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9560 return NULL_TREE;
9561
9562 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9563
9564 /* Proceed if a valid pointer type was passed in. */
9565 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9566 {
9567 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9568 tree frac, exp, res;
9569
9570 switch (value->cl)
9571 {
9572 case rvc_zero:
9573 case rvc_nan:
9574 case rvc_inf:
9575 /* For +-0, return (*exp = 0, +-0). */
9576 /* For +-NaN or +-Inf, *exp is unspecified, but something should
9577 be stored there so that it isn't read from uninitialized object.
9578 As glibc and newlib store *exp = 0 for +-Inf/NaN, storing
9579 0 here as well is easiest. */
9580 exp = integer_zero_node;
9581 frac = arg0;
9582 break;
9583 case rvc_normal:
9584 {
9585 /* Since the frexp function always expects base 2, and in
9586 GCC normalized significands are already in the range
9587 [0.5, 1.0), we have exactly what frexp wants. */
9588 REAL_VALUE_TYPE frac_rvt = *value;
9589 SET_REAL_EXP (&frac_rvt, 0);
9590 frac = build_real (rettype, frac_rvt);
9591 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9592 }
9593 break;
9594 default:
9595 gcc_unreachable ();
9596 }
9597
9598 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9599 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9600 TREE_SIDE_EFFECTS (arg1) = 1;
9601 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9602 suppress_warning (res, OPT_Wunused_value);
9603 return res;
9604 }
9605
9606 return NULL_TREE;
9607}
9608
9609/* Fold a call to builtin modf. */
9610
9611static tree
9612fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9613{
9614 if (! validate_arg (arg0, code: REAL_TYPE) || ! validate_arg (arg1, code: POINTER_TYPE))
9615 return NULL_TREE;
9616
9617 STRIP_NOPS (arg0);
9618
9619 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9620 return NULL_TREE;
9621
9622 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9623
9624 /* Proceed if a valid pointer type was passed in. */
9625 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9626 {
9627 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9628 REAL_VALUE_TYPE trunc, frac;
9629 tree res;
9630
9631 switch (value->cl)
9632 {
9633 case rvc_nan:
9634 case rvc_zero:
9635 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9636 trunc = frac = *value;
9637 break;
9638 case rvc_inf:
9639 /* For +-Inf, return (*arg1 = arg0, +-0). */
9640 frac = dconst0;
9641 frac.sign = value->sign;
9642 trunc = *value;
9643 break;
9644 case rvc_normal:
9645 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9646 real_trunc (&trunc, VOIDmode, value);
9647 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9648 /* If the original number was negative and already
9649 integral, then the fractional part is -0.0. */
9650 if (value->sign && frac.cl == rvc_zero)
9651 frac.sign = value->sign;
9652 break;
9653 }
9654
9655 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9656 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9657 build_real (rettype, trunc));
9658 TREE_SIDE_EFFECTS (arg1) = 1;
9659 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9660 build_real (rettype, frac));
9661 suppress_warning (res, OPT_Wunused_value);
9662 return res;
9663 }
9664
9665 return NULL_TREE;
9666}
9667
9668/* Given a location LOC, an interclass builtin function decl FNDECL
9669 and its single argument ARG, return an folded expression computing
9670 the same, or NULL_TREE if we either couldn't or didn't want to fold
9671 (the latter happen if there's an RTL instruction available). */
9672
9673static tree
9674fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9675{
9676 machine_mode mode;
9677
9678 if (!validate_arg (arg, code: REAL_TYPE))
9679 return NULL_TREE;
9680
9681 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9682 return NULL_TREE;
9683
9684 mode = TYPE_MODE (TREE_TYPE (arg));
9685
9686 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9687
9688 /* If there is no optab, try generic code. */
9689 switch (DECL_FUNCTION_CODE (decl: fndecl))
9690 {
9691 tree result;
9692
9693 CASE_FLT_FN (BUILT_IN_ISINF):
9694 {
9695 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9696 tree const isgr_fn = builtin_decl_explicit (fncode: BUILT_IN_ISGREATER);
9697 tree type = TREE_TYPE (arg);
9698 REAL_VALUE_TYPE r;
9699 char buf[128];
9700
9701 if (is_ibm_extended)
9702 {
9703 /* NaN and Inf are encoded in the high-order double value
9704 only. The low-order value is not significant. */
9705 type = double_type_node;
9706 mode = DFmode;
9707 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9708 }
9709 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9710 real_from_string3 (&r, buf, mode);
9711 result = build_call_expr (isgr_fn, 2,
9712 fold_build1_loc (loc, ABS_EXPR, type, arg),
9713 build_real (type, r));
9714 return result;
9715 }
9716 CASE_FLT_FN (BUILT_IN_FINITE):
9717 case BUILT_IN_ISFINITE:
9718 {
9719 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9720 tree const isle_fn = builtin_decl_explicit (fncode: BUILT_IN_ISLESSEQUAL);
9721 tree type = TREE_TYPE (arg);
9722 REAL_VALUE_TYPE r;
9723 char buf[128];
9724
9725 if (is_ibm_extended)
9726 {
9727 /* NaN and Inf are encoded in the high-order double value
9728 only. The low-order value is not significant. */
9729 type = double_type_node;
9730 mode = DFmode;
9731 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9732 }
9733 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9734 real_from_string3 (&r, buf, mode);
9735 result = build_call_expr (isle_fn, 2,
9736 fold_build1_loc (loc, ABS_EXPR, type, arg),
9737 build_real (type, r));
9738 /*result = fold_build2_loc (loc, UNGT_EXPR,
9739 TREE_TYPE (TREE_TYPE (fndecl)),
9740 fold_build1_loc (loc, ABS_EXPR, type, arg),
9741 build_real (type, r));
9742 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9743 TREE_TYPE (TREE_TYPE (fndecl)),
9744 result);*/
9745 return result;
9746 }
9747 case BUILT_IN_ISNORMAL:
9748 {
9749 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9750 islessequal(fabs(x),DBL_MAX). */
9751 tree const isle_fn = builtin_decl_explicit (fncode: BUILT_IN_ISLESSEQUAL);
9752 tree type = TREE_TYPE (arg);
9753 tree orig_arg, max_exp, min_exp;
9754 machine_mode orig_mode = mode;
9755 REAL_VALUE_TYPE rmax, rmin;
9756 char buf[128];
9757
9758 orig_arg = arg = builtin_save_expr (exp: arg);
9759 if (is_ibm_extended)
9760 {
9761 /* Use double to test the normal range of IBM extended
9762 precision. Emin for IBM extended precision is
9763 different to emin for IEEE double, being 53 higher
9764 since the low double exponent is at least 53 lower
9765 than the high double exponent. */
9766 type = double_type_node;
9767 mode = DFmode;
9768 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9769 }
9770 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9771
9772 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9773 real_from_string3 (&rmax, buf, mode);
9774 if (DECIMAL_FLOAT_MODE_P (mode))
9775 sprintf (s: buf, format: "1E%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9776 else
9777 sprintf (s: buf, format: "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9778 real_from_string3 (&rmin, buf, orig_mode);
9779 max_exp = build_real (type, rmax);
9780 min_exp = build_real (type, rmin);
9781
9782 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9783 if (is_ibm_extended)
9784 {
9785 /* Testing the high end of the range is done just using
9786 the high double, using the same test as isfinite().
9787 For the subnormal end of the range we first test the
9788 high double, then if its magnitude is equal to the
9789 limit of 0x1p-969, we test whether the low double is
9790 non-zero and opposite sign to the high double. */
9791 tree const islt_fn = builtin_decl_explicit (fncode: BUILT_IN_ISLESS);
9792 tree const isgt_fn = builtin_decl_explicit (fncode: BUILT_IN_ISGREATER);
9793 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9794 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9795 arg, min_exp);
9796 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9797 complex_double_type_node, orig_arg);
9798 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9799 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9800 tree zero = build_real (type, dconst0);
9801 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9802 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9803 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9804 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9805 fold_build3 (COND_EXPR,
9806 integer_type_node,
9807 hilt, logt, lolt));
9808 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9809 eq_min, ok_lo);
9810 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9811 gt_min, eq_min);
9812 }
9813 else
9814 {
9815 tree const isge_fn
9816 = builtin_decl_explicit (fncode: BUILT_IN_ISGREATEREQUAL);
9817 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9818 }
9819 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9820 max_exp, min_exp);
9821 return result;
9822 }
9823 default:
9824 break;
9825 }
9826
9827 return NULL_TREE;
9828}
9829
9830/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9831 ARG is the argument for the call. */
9832
9833static tree
9834fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9835{
9836 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9837
9838 if (!validate_arg (arg, code: REAL_TYPE))
9839 return NULL_TREE;
9840
9841 switch (builtin_index)
9842 {
9843 case BUILT_IN_ISINF:
9844 if (tree_expr_infinite_p (arg))
9845 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9846 if (!tree_expr_maybe_infinite_p (arg))
9847 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9848 return NULL_TREE;
9849
9850 case BUILT_IN_ISINF_SIGN:
9851 {
9852 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9853 /* In a boolean context, GCC will fold the inner COND_EXPR to
9854 1. So e.g. "if (isinf_sign(x))" would be folded to just
9855 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9856 tree signbit_fn = builtin_decl_explicit (fncode: BUILT_IN_SIGNBIT);
9857 tree isinf_fn = builtin_decl_explicit (fncode: BUILT_IN_ISINF);
9858 tree tmp = NULL_TREE;
9859
9860 arg = builtin_save_expr (exp: arg);
9861
9862 if (signbit_fn && isinf_fn)
9863 {
9864 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9865 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9866
9867 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9868 signbit_call, integer_zero_node);
9869 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9870 isinf_call, integer_zero_node);
9871
9872 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9873 integer_minus_one_node, integer_one_node);
9874 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9875 isinf_call, tmp,
9876 integer_zero_node);
9877 }
9878
9879 return tmp;
9880 }
9881
9882 case BUILT_IN_ISFINITE:
9883 if (tree_expr_finite_p (arg))
9884 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9885 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9886 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9887 return NULL_TREE;
9888
9889 case BUILT_IN_ISNAN:
9890 if (tree_expr_nan_p (arg))
9891 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9892 if (!tree_expr_maybe_nan_p (arg))
9893 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9894
9895 {
9896 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9897 if (is_ibm_extended)
9898 {
9899 /* NaN and Inf are encoded in the high-order double value
9900 only. The low-order value is not significant. */
9901 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9902 }
9903 }
9904 arg = builtin_save_expr (exp: arg);
9905 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9906
9907 case BUILT_IN_ISSIGNALING:
9908 /* Folding to true for REAL_CST is done in fold_const_call_ss.
9909 Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9910 and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9911 here, so there is some possibility of __builtin_issignaling working
9912 without -fsignaling-nans. Especially when -fno-signaling-nans is
9913 the default. */
9914 if (!tree_expr_maybe_nan_p (arg))
9915 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9916 return NULL_TREE;
9917
9918 default:
9919 gcc_unreachable ();
9920 }
9921}
9922
9923/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9924 This builtin will generate code to return the appropriate floating
9925 point classification depending on the value of the floating point
9926 number passed in. The possible return values must be supplied as
9927 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9928 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9929 one floating point argument which is "type generic". */
9930
9931static tree
9932fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9933{
9934 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9935 arg, type, res, tmp;
9936 machine_mode mode;
9937 REAL_VALUE_TYPE r;
9938 char buf[128];
9939
9940 /* Verify the required arguments in the original call. */
9941 if (nargs != 6
9942 || !validate_arg (args[0], code: INTEGER_TYPE)
9943 || !validate_arg (args[1], code: INTEGER_TYPE)
9944 || !validate_arg (args[2], code: INTEGER_TYPE)
9945 || !validate_arg (args[3], code: INTEGER_TYPE)
9946 || !validate_arg (args[4], code: INTEGER_TYPE)
9947 || !validate_arg (args[5], code: REAL_TYPE))
9948 return NULL_TREE;
9949
9950 fp_nan = args[0];
9951 fp_infinite = args[1];
9952 fp_normal = args[2];
9953 fp_subnormal = args[3];
9954 fp_zero = args[4];
9955 arg = args[5];
9956 type = TREE_TYPE (arg);
9957 mode = TYPE_MODE (type);
9958 arg = builtin_save_expr (exp: fold_build1_loc (loc, ABS_EXPR, type, arg));
9959
9960 /* fpclassify(x) ->
9961 isnan(x) ? FP_NAN :
9962 (fabs(x) == Inf ? FP_INFINITE :
9963 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9964 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9965
9966 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9967 build_real (type, dconst0));
9968 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9969 tmp, fp_zero, fp_subnormal);
9970
9971 if (DECIMAL_FLOAT_MODE_P (mode))
9972 sprintf (s: buf, format: "1E%d", REAL_MODE_FORMAT (mode)->emin - 1);
9973 else
9974 sprintf (s: buf, format: "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9975 real_from_string3 (&r, buf, mode);
9976 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9977 arg, build_real (type, r));
9978 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9979 fp_normal, res);
9980
9981 if (tree_expr_maybe_infinite_p (arg))
9982 {
9983 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9984 build_real (type, dconstinf));
9985 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9986 fp_infinite, res);
9987 }
9988
9989 if (tree_expr_maybe_nan_p (arg))
9990 {
9991 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9992 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9993 res, fp_nan);
9994 }
9995
9996 return res;
9997}
9998
9999/* Fold a call to an unordered comparison function such as
10000 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10001 being called and ARG0 and ARG1 are the arguments for the call.
10002 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10003 the opposite of the desired result. UNORDERED_CODE is used
10004 for modes that can hold NaNs and ORDERED_CODE is used for
10005 the rest. */
10006
10007static tree
10008fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10009 enum tree_code unordered_code,
10010 enum tree_code ordered_code)
10011{
10012 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10013 enum tree_code code;
10014 tree type0, type1;
10015 enum tree_code code0, code1;
10016 tree cmp_type = NULL_TREE;
10017
10018 type0 = TREE_TYPE (arg0);
10019 type1 = TREE_TYPE (arg1);
10020
10021 code0 = TREE_CODE (type0);
10022 code1 = TREE_CODE (type1);
10023
10024 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10025 /* Choose the wider of two real types. */
10026 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10027 ? type0 : type1;
10028 else if (code0 == REAL_TYPE
10029 && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
10030 cmp_type = type0;
10031 else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
10032 && code1 == REAL_TYPE)
10033 cmp_type = type1;
10034
10035 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10036 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10037
10038 if (unordered_code == UNORDERED_EXPR)
10039 {
10040 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
10041 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
10042 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
10043 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10044 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10045 }
10046
10047 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
10048 ? unordered_code : ordered_code;
10049 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10050 fold_build2_loc (loc, code, type, arg0, arg1));
10051}
10052
10053/* Fold a call to __builtin_iseqsig(). ARG0 and ARG1 are the arguments.
10054 After choosing the wider floating-point type for the comparison,
10055 the code is folded to:
10056 SAVE_EXPR<ARG0> >= SAVE_EXPR<ARG1> && SAVE_EXPR<ARG0> <= SAVE_EXPR<ARG1> */
10057
10058static tree
10059fold_builtin_iseqsig (location_t loc, tree arg0, tree arg1)
10060{
10061 tree type0, type1;
10062 enum tree_code code0, code1;
10063 tree cmp1, cmp2, cmp_type = NULL_TREE;
10064
10065 type0 = TREE_TYPE (arg0);
10066 type1 = TREE_TYPE (arg1);
10067
10068 code0 = TREE_CODE (type0);
10069 code1 = TREE_CODE (type1);
10070
10071 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10072 /* Choose the wider of two real types. */
10073 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10074 ? type0 : type1;
10075 else if (code0 == REAL_TYPE
10076 && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
10077 cmp_type = type0;
10078 else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
10079 && code1 == REAL_TYPE)
10080 cmp_type = type1;
10081
10082 arg0 = builtin_save_expr (exp: fold_convert_loc (loc, cmp_type, arg0));
10083 arg1 = builtin_save_expr (exp: fold_convert_loc (loc, cmp_type, arg1));
10084
10085 cmp1 = fold_build2_loc (loc, GE_EXPR, integer_type_node, arg0, arg1);
10086 cmp2 = fold_build2_loc (loc, LE_EXPR, integer_type_node, arg0, arg1);
10087
10088 return fold_build2_loc (loc, TRUTH_AND_EXPR, integer_type_node, cmp1, cmp2);
10089}
10090
10091/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10092 arithmetics if it can never overflow, or into internal functions that
10093 return both result of arithmetics and overflowed boolean flag in
10094 a complex integer result, or some other check for overflow.
10095 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10096 checking part of that. */
10097
10098static tree
10099fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
10100 tree arg0, tree arg1, tree arg2)
10101{
10102 enum internal_fn ifn = IFN_LAST;
10103 /* The code of the expression corresponding to the built-in. */
10104 enum tree_code opcode = ERROR_MARK;
10105 bool ovf_only = false;
10106
10107 switch (fcode)
10108 {
10109 case BUILT_IN_ADD_OVERFLOW_P:
10110 ovf_only = true;
10111 /* FALLTHRU */
10112 case BUILT_IN_ADD_OVERFLOW:
10113 case BUILT_IN_SADD_OVERFLOW:
10114 case BUILT_IN_SADDL_OVERFLOW:
10115 case BUILT_IN_SADDLL_OVERFLOW:
10116 case BUILT_IN_UADD_OVERFLOW:
10117 case BUILT_IN_UADDL_OVERFLOW:
10118 case BUILT_IN_UADDLL_OVERFLOW:
10119 opcode = PLUS_EXPR;
10120 ifn = IFN_ADD_OVERFLOW;
10121 break;
10122 case BUILT_IN_SUB_OVERFLOW_P:
10123 ovf_only = true;
10124 /* FALLTHRU */
10125 case BUILT_IN_SUB_OVERFLOW:
10126 case BUILT_IN_SSUB_OVERFLOW:
10127 case BUILT_IN_SSUBL_OVERFLOW:
10128 case BUILT_IN_SSUBLL_OVERFLOW:
10129 case BUILT_IN_USUB_OVERFLOW:
10130 case BUILT_IN_USUBL_OVERFLOW:
10131 case BUILT_IN_USUBLL_OVERFLOW:
10132 opcode = MINUS_EXPR;
10133 ifn = IFN_SUB_OVERFLOW;
10134 break;
10135 case BUILT_IN_MUL_OVERFLOW_P:
10136 ovf_only = true;
10137 /* FALLTHRU */
10138 case BUILT_IN_MUL_OVERFLOW:
10139 case BUILT_IN_SMUL_OVERFLOW:
10140 case BUILT_IN_SMULL_OVERFLOW:
10141 case BUILT_IN_SMULLL_OVERFLOW:
10142 case BUILT_IN_UMUL_OVERFLOW:
10143 case BUILT_IN_UMULL_OVERFLOW:
10144 case BUILT_IN_UMULLL_OVERFLOW:
10145 opcode = MULT_EXPR;
10146 ifn = IFN_MUL_OVERFLOW;
10147 break;
10148 default:
10149 gcc_unreachable ();
10150 }
10151
10152 /* For the "generic" overloads, the first two arguments can have different
10153 types and the last argument determines the target type to use to check
10154 for overflow. The arguments of the other overloads all have the same
10155 type. */
10156 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10157
10158 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10159 arguments are constant, attempt to fold the built-in call into a constant
10160 expression indicating whether or not it detected an overflow. */
10161 if (ovf_only
10162 && TREE_CODE (arg0) == INTEGER_CST
10163 && TREE_CODE (arg1) == INTEGER_CST)
10164 /* Perform the computation in the target type and check for overflow. */
10165 return omit_one_operand_loc (loc, boolean_type_node,
10166 arith_overflowed_p (opcode, type, arg0, arg1)
10167 ? boolean_true_node : boolean_false_node,
10168 arg2);
10169
10170 tree intres, ovfres;
10171 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10172 {
10173 intres = fold_binary_loc (loc, opcode, type,
10174 fold_convert_loc (loc, type, arg0),
10175 fold_convert_loc (loc, type, arg1));
10176 if (TREE_OVERFLOW (intres))
10177 intres = drop_tree_overflow (intres);
10178 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10179 ? boolean_true_node : boolean_false_node);
10180 }
10181 else
10182 {
10183 tree ctype = build_complex_type (type);
10184 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10185 arg0, arg1);
10186 tree tgt;
10187 if (ovf_only)
10188 {
10189 tgt = call;
10190 intres = NULL_TREE;
10191 }
10192 else
10193 {
10194 /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
10195 as while the call itself is const, the REALPART_EXPR store is
10196 certainly not. And in any case, we want just one call,
10197 not multiple and trying to CSE them later. */
10198 TREE_SIDE_EFFECTS (call) = 1;
10199 tgt = save_expr (call);
10200 }
10201 intres = build1_loc (loc, code: REALPART_EXPR, type, arg1: tgt);
10202 ovfres = build1_loc (loc, code: IMAGPART_EXPR, type, arg1: tgt);
10203 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10204 }
10205
10206 if (ovf_only)
10207 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10208
10209 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10210 tree store
10211 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10212 return build2_loc (loc, code: COMPOUND_EXPR, boolean_type_node, arg0: store, arg1: ovfres);
10213}
10214
10215/* Fold __builtin_{clz,ctz,clrsb,ffs,parity,popcount}g into corresponding
10216 internal function. */
10217
10218static tree
10219fold_builtin_bit_query (location_t loc, enum built_in_function fcode,
10220 tree arg0, tree arg1)
10221{
10222 enum internal_fn ifn;
10223 enum built_in_function fcodei, fcodel, fcodell;
10224 tree arg0_type = TREE_TYPE (arg0);
10225 tree cast_type = NULL_TREE;
10226 int addend = 0;
10227
10228 switch (fcode)
10229 {
10230 case BUILT_IN_CLZG:
10231 if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10232 return NULL_TREE;
10233 ifn = IFN_CLZ;
10234 fcodei = BUILT_IN_CLZ;
10235 fcodel = BUILT_IN_CLZL;
10236 fcodell = BUILT_IN_CLZLL;
10237 break;
10238 case BUILT_IN_CTZG:
10239 if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10240 return NULL_TREE;
10241 ifn = IFN_CTZ;
10242 fcodei = BUILT_IN_CTZ;
10243 fcodel = BUILT_IN_CTZL;
10244 fcodell = BUILT_IN_CTZLL;
10245 break;
10246 case BUILT_IN_CLRSBG:
10247 ifn = IFN_CLRSB;
10248 fcodei = BUILT_IN_CLRSB;
10249 fcodel = BUILT_IN_CLRSBL;
10250 fcodell = BUILT_IN_CLRSBLL;
10251 break;
10252 case BUILT_IN_FFSG:
10253 ifn = IFN_FFS;
10254 fcodei = BUILT_IN_FFS;
10255 fcodel = BUILT_IN_FFSL;
10256 fcodell = BUILT_IN_FFSLL;
10257 break;
10258 case BUILT_IN_PARITYG:
10259 ifn = IFN_PARITY;
10260 fcodei = BUILT_IN_PARITY;
10261 fcodel = BUILT_IN_PARITYL;
10262 fcodell = BUILT_IN_PARITYLL;
10263 break;
10264 case BUILT_IN_POPCOUNTG:
10265 ifn = IFN_POPCOUNT;
10266 fcodei = BUILT_IN_POPCOUNT;
10267 fcodel = BUILT_IN_POPCOUNTL;
10268 fcodell = BUILT_IN_POPCOUNTLL;
10269 break;
10270 default:
10271 gcc_unreachable ();
10272 }
10273
10274 if (TYPE_PRECISION (arg0_type)
10275 <= TYPE_PRECISION (long_long_unsigned_type_node))
10276 {
10277 if (TYPE_PRECISION (arg0_type) <= TYPE_PRECISION (unsigned_type_node))
10278
10279 cast_type = (TYPE_UNSIGNED (arg0_type)
10280 ? unsigned_type_node : integer_type_node);
10281 else if (TYPE_PRECISION (arg0_type)
10282 <= TYPE_PRECISION (long_unsigned_type_node))
10283 {
10284 cast_type = (TYPE_UNSIGNED (arg0_type)
10285 ? long_unsigned_type_node : long_integer_type_node);
10286 fcodei = fcodel;
10287 }
10288 else
10289 {
10290 cast_type = (TYPE_UNSIGNED (arg0_type)
10291 ? long_long_unsigned_type_node
10292 : long_long_integer_type_node);
10293 fcodei = fcodell;
10294 }
10295 }
10296 else if (TYPE_PRECISION (arg0_type) <= MAX_FIXED_MODE_SIZE)
10297 {
10298 cast_type
10299 = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
10300 TYPE_UNSIGNED (arg0_type));
10301 gcc_assert (TYPE_PRECISION (cast_type)
10302 == 2 * TYPE_PRECISION (long_long_unsigned_type_node));
10303 fcodei = END_BUILTINS;
10304 }
10305 else
10306 fcodei = END_BUILTINS;
10307 if (cast_type)
10308 {
10309 switch (fcode)
10310 {
10311 case BUILT_IN_CLZG:
10312 case BUILT_IN_CLRSBG:
10313 addend = TYPE_PRECISION (arg0_type) - TYPE_PRECISION (cast_type);
10314 break;
10315 default:
10316 break;
10317 }
10318 arg0 = fold_convert (cast_type, arg0);
10319 arg0_type = cast_type;
10320 }
10321
10322 if (arg1)
10323 arg1 = fold_convert (integer_type_node, arg1);
10324
10325 tree arg2 = arg1;
10326 if (fcode == BUILT_IN_CLZG && addend)
10327 {
10328 if (arg1)
10329 arg0 = save_expr (arg0);
10330 arg2 = NULL_TREE;
10331 }
10332 tree call = NULL_TREE, tem;
10333 if (TYPE_PRECISION (arg0_type) == MAX_FIXED_MODE_SIZE
10334 && (TYPE_PRECISION (arg0_type)
10335 == 2 * TYPE_PRECISION (long_long_unsigned_type_node))
10336 /* If the target supports the optab, then don't do the expansion. */
10337 && !direct_internal_fn_supported_p (ifn, arg0_type, OPTIMIZE_FOR_BOTH))
10338 {
10339 /* __int128 expansions using up to 2 long long builtins. */
10340 arg0 = save_expr (arg0);
10341 tree type = (TYPE_UNSIGNED (arg0_type)
10342 ? long_long_unsigned_type_node
10343 : long_long_integer_type_node);
10344 tree hi = fold_build2 (RSHIFT_EXPR, arg0_type, arg0,
10345 build_int_cst (integer_type_node,
10346 MAX_FIXED_MODE_SIZE / 2));
10347 hi = fold_convert (type, hi);
10348 tree lo = fold_convert (type, arg0);
10349 switch (fcode)
10350 {
10351 case BUILT_IN_CLZG:
10352 call = fold_builtin_bit_query (loc, fcode, arg0: lo, NULL_TREE);
10353 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10354 build_int_cst (integer_type_node,
10355 MAX_FIXED_MODE_SIZE / 2));
10356 if (arg2)
10357 call = fold_build3 (COND_EXPR, integer_type_node,
10358 fold_build2 (NE_EXPR, boolean_type_node,
10359 lo, build_zero_cst (type)),
10360 call, arg2);
10361 call = fold_build3 (COND_EXPR, integer_type_node,
10362 fold_build2 (NE_EXPR, boolean_type_node,
10363 hi, build_zero_cst (type)),
10364 fold_builtin_bit_query (loc, fcode, hi,
10365 NULL_TREE),
10366 call);
10367 break;
10368 case BUILT_IN_CTZG:
10369 call = fold_builtin_bit_query (loc, fcode, arg0: hi, NULL_TREE);
10370 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10371 build_int_cst (integer_type_node,
10372 MAX_FIXED_MODE_SIZE / 2));
10373 if (arg2)
10374 call = fold_build3 (COND_EXPR, integer_type_node,
10375 fold_build2 (NE_EXPR, boolean_type_node,
10376 hi, build_zero_cst (type)),
10377 call, arg2);
10378 call = fold_build3 (COND_EXPR, integer_type_node,
10379 fold_build2 (NE_EXPR, boolean_type_node,
10380 lo, build_zero_cst (type)),
10381 fold_builtin_bit_query (loc, fcode, lo,
10382 NULL_TREE),
10383 call);
10384 break;
10385 case BUILT_IN_CLRSBG:
10386 tem = fold_builtin_bit_query (loc, fcode, arg0: lo, NULL_TREE);
10387 tem = fold_build2 (PLUS_EXPR, integer_type_node, tem,
10388 build_int_cst (integer_type_node,
10389 MAX_FIXED_MODE_SIZE / 2));
10390 tem = fold_build3 (COND_EXPR, integer_type_node,
10391 fold_build2 (LT_EXPR, boolean_type_node,
10392 fold_build2 (BIT_XOR_EXPR, type,
10393 lo, hi),
10394 build_zero_cst (type)),
10395 build_int_cst (integer_type_node,
10396 MAX_FIXED_MODE_SIZE / 2 - 1),
10397 tem);
10398 call = fold_builtin_bit_query (loc, fcode, arg0: hi, NULL_TREE);
10399 call = save_expr (call);
10400 call = fold_build3 (COND_EXPR, integer_type_node,
10401 fold_build2 (NE_EXPR, boolean_type_node,
10402 call,
10403 build_int_cst (integer_type_node,
10404 MAX_FIXED_MODE_SIZE
10405 / 2 - 1)),
10406 call, tem);
10407 break;
10408 case BUILT_IN_FFSG:
10409 call = fold_builtin_bit_query (loc, fcode, arg0: hi, NULL_TREE);
10410 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10411 build_int_cst (integer_type_node,
10412 MAX_FIXED_MODE_SIZE / 2));
10413 call = fold_build3 (COND_EXPR, integer_type_node,
10414 fold_build2 (NE_EXPR, boolean_type_node,
10415 hi, build_zero_cst (type)),
10416 call, integer_zero_node);
10417 call = fold_build3 (COND_EXPR, integer_type_node,
10418 fold_build2 (NE_EXPR, boolean_type_node,
10419 lo, build_zero_cst (type)),
10420 fold_builtin_bit_query (loc, fcode, lo,
10421 NULL_TREE),
10422 call);
10423 break;
10424 case BUILT_IN_PARITYG:
10425 call = fold_builtin_bit_query (loc, fcode,
10426 fold_build2 (BIT_XOR_EXPR, type,
10427 lo, hi), NULL_TREE);
10428 break;
10429 case BUILT_IN_POPCOUNTG:
10430 call = fold_build2 (PLUS_EXPR, integer_type_node,
10431 fold_builtin_bit_query (loc, fcode, hi,
10432 NULL_TREE),
10433 fold_builtin_bit_query (loc, fcode, lo,
10434 NULL_TREE));
10435 break;
10436 default:
10437 gcc_unreachable ();
10438 }
10439 }
10440 else
10441 {
10442 /* Only keep second argument to IFN_CLZ/IFN_CTZ if it is the
10443 value defined at zero during GIMPLE, or for large/huge _BitInt
10444 (which are then lowered during bitint lowering). */
10445 if (arg2 && TREE_CODE (TREE_TYPE (arg0)) != BITINT_TYPE)
10446 {
10447 int val;
10448 if (fcode == BUILT_IN_CLZG)
10449 {
10450 if (CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10451 val) != 2
10452 || wi::to_widest (t: arg2) != val)
10453 arg2 = NULL_TREE;
10454 }
10455 else if (CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10456 val) != 2
10457 || wi::to_widest (t: arg2) != val)
10458 arg2 = NULL_TREE;
10459 if (!direct_internal_fn_supported_p (ifn, arg0_type,
10460 OPTIMIZE_FOR_BOTH))
10461 arg2 = NULL_TREE;
10462 if (arg2 == NULL_TREE)
10463 arg0 = save_expr (arg0);
10464 }
10465 if (fcodei == END_BUILTINS || arg2)
10466 call = build_call_expr_internal_loc (loc, ifn, integer_type_node,
10467 arg2 ? 2 : 1, arg0, arg2);
10468 else
10469 call = build_call_expr_loc (loc, builtin_decl_explicit (fncode: fcodei), 1,
10470 arg0);
10471 }
10472 if (addend)
10473 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10474 build_int_cst (integer_type_node, addend));
10475 if (arg1 && arg2 == NULL_TREE)
10476 call = fold_build3 (COND_EXPR, integer_type_node,
10477 fold_build2 (NE_EXPR, boolean_type_node,
10478 arg0, build_zero_cst (arg0_type)),
10479 call, arg1);
10480
10481 return call;
10482}
10483
10484/* Fold __builtin_{add,sub}c{,l,ll} into pair of internal functions
10485 that return both result of arithmetics and overflowed boolean
10486 flag in a complex integer result. */
10487
10488static tree
10489fold_builtin_addc_subc (location_t loc, enum built_in_function fcode,
10490 tree *args)
10491{
10492 enum internal_fn ifn;
10493
10494 switch (fcode)
10495 {
10496 case BUILT_IN_ADDC:
10497 case BUILT_IN_ADDCL:
10498 case BUILT_IN_ADDCLL:
10499 ifn = IFN_ADD_OVERFLOW;
10500 break;
10501 case BUILT_IN_SUBC:
10502 case BUILT_IN_SUBCL:
10503 case BUILT_IN_SUBCLL:
10504 ifn = IFN_SUB_OVERFLOW;
10505 break;
10506 default:
10507 gcc_unreachable ();
10508 }
10509
10510 tree type = TREE_TYPE (args[0]);
10511 tree ctype = build_complex_type (type);
10512 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10513 args[0], args[1]);
10514 /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
10515 as while the call itself is const, the REALPART_EXPR store is
10516 certainly not. And in any case, we want just one call,
10517 not multiple and trying to CSE them later. */
10518 TREE_SIDE_EFFECTS (call) = 1;
10519 tree tgt = save_expr (call);
10520 tree intres = build1_loc (loc, code: REALPART_EXPR, type, arg1: tgt);
10521 tree ovfres = build1_loc (loc, code: IMAGPART_EXPR, type, arg1: tgt);
10522 call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10523 intres, args[2]);
10524 TREE_SIDE_EFFECTS (call) = 1;
10525 tgt = save_expr (call);
10526 intres = build1_loc (loc, code: REALPART_EXPR, type, arg1: tgt);
10527 tree ovfres2 = build1_loc (loc, code: IMAGPART_EXPR, type, arg1: tgt);
10528 ovfres = build2_loc (loc, code: BIT_IOR_EXPR, type, arg0: ovfres, arg1: ovfres2);
10529 tree mem_arg3 = build_fold_indirect_ref_loc (loc, args[3]);
10530 tree store
10531 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg3, ovfres);
10532 return build2_loc (loc, code: COMPOUND_EXPR, type, arg0: store, arg1: intres);
10533}
10534
10535/* Fold a call to __builtin_FILE to a constant string. */
10536
10537static inline tree
10538fold_builtin_FILE (location_t loc)
10539{
10540 if (const char *fname = LOCATION_FILE (loc))
10541 {
10542 /* The documentation says this builtin is equivalent to the preprocessor
10543 __FILE__ macro so it appears appropriate to use the same file prefix
10544 mappings. */
10545 fname = remap_macro_filename (fname);
10546 return build_string_literal (p: fname);
10547 }
10548
10549 return build_string_literal (p: "");
10550}
10551
10552/* Fold a call to __builtin_FUNCTION to a constant string. */
10553
10554static inline tree
10555fold_builtin_FUNCTION ()
10556{
10557 const char *name = "";
10558
10559 if (current_function_decl)
10560 name = lang_hooks.decl_printable_name (current_function_decl, 0);
10561
10562 return build_string_literal (p: name);
10563}
10564
10565/* Fold a call to __builtin_LINE to an integer constant. */
10566
10567static inline tree
10568fold_builtin_LINE (location_t loc, tree type)
10569{
10570 return build_int_cst (type, LOCATION_LINE (loc));
10571}
10572
10573/* Fold a call to built-in function FNDECL with 0 arguments.
10574 This function returns NULL_TREE if no simplification was possible. */
10575
10576static tree
10577fold_builtin_0 (location_t loc, tree fndecl)
10578{
10579 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10580 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: fndecl);
10581 switch (fcode)
10582 {
10583 case BUILT_IN_FILE:
10584 return fold_builtin_FILE (loc);
10585
10586 case BUILT_IN_FUNCTION:
10587 return fold_builtin_FUNCTION ();
10588
10589 case BUILT_IN_LINE:
10590 return fold_builtin_LINE (loc, type);
10591
10592 CASE_FLT_FN (BUILT_IN_INF):
10593 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10594 case BUILT_IN_INFD32:
10595 case BUILT_IN_INFD64:
10596 case BUILT_IN_INFD128:
10597 case BUILT_IN_INFD64X:
10598 return fold_builtin_inf (loc, type, warn: true);
10599
10600 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10601 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10602 return fold_builtin_inf (loc, type, warn: false);
10603
10604 case BUILT_IN_CLASSIFY_TYPE:
10605 return fold_builtin_classify_type (NULL_TREE);
10606
10607 case BUILT_IN_UNREACHABLE:
10608 /* Rewrite any explicit calls to __builtin_unreachable. */
10609 if (sanitize_flags_p (flag: SANITIZE_UNREACHABLE))
10610 return build_builtin_unreachable (loc);
10611 break;
10612
10613 default:
10614 break;
10615 }
10616 return NULL_TREE;
10617}
10618
10619/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10620 This function returns NULL_TREE if no simplification was possible. */
10621
10622static tree
10623fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
10624{
10625 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10626 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: fndecl);
10627
10628 if (error_operand_p (t: arg0))
10629 return NULL_TREE;
10630
10631 if (tree ret = fold_const_call (as_combined_fn (fn: fcode), type, arg0))
10632 return ret;
10633
10634 switch (fcode)
10635 {
10636 case BUILT_IN_CONSTANT_P:
10637 {
10638 tree val = fold_builtin_constant_p (arg: arg0);
10639
10640 /* Gimplification will pull the CALL_EXPR for the builtin out of
10641 an if condition. When not optimizing, we'll not CSE it back.
10642 To avoid link error types of regressions, return false now. */
10643 if (!val && !optimize)
10644 val = integer_zero_node;
10645
10646 return val;
10647 }
10648
10649 case BUILT_IN_CLASSIFY_TYPE:
10650 return fold_builtin_classify_type (arg: arg0);
10651
10652 case BUILT_IN_STRLEN:
10653 return fold_builtin_strlen (loc, expr, type, arg: arg0);
10654
10655 CASE_FLT_FN (BUILT_IN_FABS):
10656 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10657 case BUILT_IN_FABSD32:
10658 case BUILT_IN_FABSD64:
10659 case BUILT_IN_FABSD128:
10660 case BUILT_IN_FABSD64X:
10661 return fold_builtin_fabs (loc, arg: arg0, type);
10662
10663 case BUILT_IN_ABS:
10664 case BUILT_IN_LABS:
10665 case BUILT_IN_LLABS:
10666 case BUILT_IN_IMAXABS:
10667 case BUILT_IN_UABS:
10668 case BUILT_IN_ULABS:
10669 case BUILT_IN_ULLABS:
10670 case BUILT_IN_UIMAXABS:
10671 return fold_builtin_abs (loc, arg: arg0, type);
10672
10673 CASE_FLT_FN (BUILT_IN_CONJ):
10674 if (validate_arg (arg0, code: COMPLEX_TYPE)
10675 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10676 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10677 break;
10678
10679 CASE_FLT_FN (BUILT_IN_CREAL):
10680 if (validate_arg (arg0, code: COMPLEX_TYPE)
10681 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10682 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10683 break;
10684
10685 CASE_FLT_FN (BUILT_IN_CIMAG):
10686 if (validate_arg (arg0, code: COMPLEX_TYPE)
10687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10688 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10689 break;
10690
10691 CASE_FLT_FN (BUILT_IN_CARG):
10692 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG):
10693 return fold_builtin_carg (loc, arg: arg0, type);
10694
10695 case BUILT_IN_ISASCII:
10696 return fold_builtin_isascii (loc, arg: arg0);
10697
10698 case BUILT_IN_TOASCII:
10699 return fold_builtin_toascii (loc, arg: arg0);
10700
10701 case BUILT_IN_ISDIGIT:
10702 return fold_builtin_isdigit (loc, arg: arg0);
10703
10704 CASE_FLT_FN (BUILT_IN_FINITE):
10705 case BUILT_IN_FINITED32:
10706 case BUILT_IN_FINITED64:
10707 case BUILT_IN_FINITED128:
10708 case BUILT_IN_ISFINITE:
10709 {
10710 tree ret = fold_builtin_classify (loc, fndecl, arg: arg0, builtin_index: BUILT_IN_ISFINITE);
10711 if (ret)
10712 return ret;
10713 return fold_builtin_interclass_mathfn (loc, fndecl, arg: arg0);
10714 }
10715
10716 CASE_FLT_FN (BUILT_IN_ISINF):
10717 case BUILT_IN_ISINFD32:
10718 case BUILT_IN_ISINFD64:
10719 case BUILT_IN_ISINFD128:
10720 {
10721 tree ret = fold_builtin_classify (loc, fndecl, arg: arg0, builtin_index: BUILT_IN_ISINF);
10722 if (ret)
10723 return ret;
10724 return fold_builtin_interclass_mathfn (loc, fndecl, arg: arg0);
10725 }
10726
10727 case BUILT_IN_ISNORMAL:
10728 return fold_builtin_interclass_mathfn (loc, fndecl, arg: arg0);
10729
10730 case BUILT_IN_ISINF_SIGN:
10731 return fold_builtin_classify (loc, fndecl, arg: arg0, builtin_index: BUILT_IN_ISINF_SIGN);
10732
10733 CASE_FLT_FN (BUILT_IN_ISNAN):
10734 case BUILT_IN_ISNAND32:
10735 case BUILT_IN_ISNAND64:
10736 case BUILT_IN_ISNAND128:
10737 return fold_builtin_classify (loc, fndecl, arg: arg0, builtin_index: BUILT_IN_ISNAN);
10738
10739 case BUILT_IN_ISSIGNALING:
10740 return fold_builtin_classify (loc, fndecl, arg: arg0, builtin_index: BUILT_IN_ISSIGNALING);
10741
10742 case BUILT_IN_FREE:
10743 if (integer_zerop (arg0))
10744 return build_empty_stmt (loc);
10745 break;
10746
10747 case BUILT_IN_CLZG:
10748 case BUILT_IN_CTZG:
10749 case BUILT_IN_CLRSBG:
10750 case BUILT_IN_FFSG:
10751 case BUILT_IN_PARITYG:
10752 case BUILT_IN_POPCOUNTG:
10753 return fold_builtin_bit_query (loc, fcode, arg0, NULL_TREE);
10754
10755 default:
10756 break;
10757 }
10758
10759 return NULL_TREE;
10760
10761}
10762
10763/* Folds a call EXPR (which may be null) to built-in function FNDECL
10764 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10765 if no simplification was possible. */
10766
10767static tree
10768fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10769{
10770 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10771 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: fndecl);
10772
10773 if (error_operand_p (t: arg0)
10774 || error_operand_p (t: arg1))
10775 return NULL_TREE;
10776
10777 if (tree ret = fold_const_call (as_combined_fn (fn: fcode), type, arg0, arg1))
10778 return ret;
10779
10780 switch (fcode)
10781 {
10782 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10783 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10784 if (validate_arg (arg0, code: REAL_TYPE)
10785 && validate_arg (arg1, code: POINTER_TYPE))
10786 return do_mpfr_lgamma_r (arg0, arg1, type);
10787 break;
10788
10789 CASE_FLT_FN (BUILT_IN_FREXP):
10790 return fold_builtin_frexp (loc, arg0, arg1, rettype: type);
10791
10792 CASE_FLT_FN (BUILT_IN_MODF):
10793 return fold_builtin_modf (loc, arg0, arg1, rettype: type);
10794
10795 case BUILT_IN_STRSPN:
10796 return fold_builtin_strspn (loc, expr, arg0, arg1, type);
10797
10798 case BUILT_IN_STRCSPN:
10799 return fold_builtin_strcspn (loc, expr, arg0, arg1, type);
10800
10801 case BUILT_IN_STRPBRK:
10802 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10803
10804 case BUILT_IN_EXPECT:
10805 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10806
10807 case BUILT_IN_ISGREATER:
10808 return fold_builtin_unordered_cmp (loc, fndecl,
10809 arg0, arg1, unordered_code: UNLE_EXPR, ordered_code: LE_EXPR);
10810 case BUILT_IN_ISGREATEREQUAL:
10811 return fold_builtin_unordered_cmp (loc, fndecl,
10812 arg0, arg1, unordered_code: UNLT_EXPR, ordered_code: LT_EXPR);
10813 case BUILT_IN_ISLESS:
10814 return fold_builtin_unordered_cmp (loc, fndecl,
10815 arg0, arg1, unordered_code: UNGE_EXPR, ordered_code: GE_EXPR);
10816 case BUILT_IN_ISLESSEQUAL:
10817 return fold_builtin_unordered_cmp (loc, fndecl,
10818 arg0, arg1, unordered_code: UNGT_EXPR, ordered_code: GT_EXPR);
10819 case BUILT_IN_ISLESSGREATER:
10820 return fold_builtin_unordered_cmp (loc, fndecl,
10821 arg0, arg1, unordered_code: UNEQ_EXPR, ordered_code: EQ_EXPR);
10822 case BUILT_IN_ISUNORDERED:
10823 return fold_builtin_unordered_cmp (loc, fndecl,
10824 arg0, arg1, unordered_code: UNORDERED_EXPR,
10825 ordered_code: NOP_EXPR);
10826
10827 case BUILT_IN_ISEQSIG:
10828 return fold_builtin_iseqsig (loc, arg0, arg1);
10829
10830 /* We do the folding for va_start in the expander. */
10831 case BUILT_IN_VA_START:
10832 break;
10833
10834 case BUILT_IN_OBJECT_SIZE:
10835 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
10836 return fold_builtin_object_size (arg0, arg1, fcode);
10837
10838 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10839 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10840
10841 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10842 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10843
10844 case BUILT_IN_CLZG:
10845 case BUILT_IN_CTZG:
10846 return fold_builtin_bit_query (loc, fcode, arg0, arg1);
10847
10848 default:
10849 break;
10850 }
10851 return NULL_TREE;
10852}
10853
10854/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10855 and ARG2.
10856 This function returns NULL_TREE if no simplification was possible. */
10857
10858static tree
10859fold_builtin_3 (location_t loc, tree fndecl,
10860 tree arg0, tree arg1, tree arg2)
10861{
10862 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10863 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: fndecl);
10864
10865 if (error_operand_p (t: arg0)
10866 || error_operand_p (t: arg1)
10867 || error_operand_p (t: arg2))
10868 return NULL_TREE;
10869
10870 if (tree ret = fold_const_call (as_combined_fn (fn: fcode), type,
10871 arg0, arg1, arg2))
10872 return ret;
10873
10874 switch (fcode)
10875 {
10876
10877 CASE_FLT_FN (BUILT_IN_SINCOS):
10878 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10879
10880 CASE_FLT_FN (BUILT_IN_REMQUO):
10881 if (validate_arg (arg0, code: REAL_TYPE)
10882 && validate_arg (arg1, code: REAL_TYPE)
10883 && validate_arg (arg2, code: POINTER_TYPE))
10884 return do_mpfr_remquo (arg0, arg1, arg2);
10885 break;
10886
10887 case BUILT_IN_MEMCMP:
10888 return fold_builtin_memcmp (loc, arg1: arg0, arg2: arg1, len: arg2);
10889
10890 case BUILT_IN_EXPECT:
10891 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10892
10893 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10894 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg3: arg2);
10895
10896 case BUILT_IN_ADD_OVERFLOW:
10897 case BUILT_IN_SUB_OVERFLOW:
10898 case BUILT_IN_MUL_OVERFLOW:
10899 case BUILT_IN_ADD_OVERFLOW_P:
10900 case BUILT_IN_SUB_OVERFLOW_P:
10901 case BUILT_IN_MUL_OVERFLOW_P:
10902 case BUILT_IN_SADD_OVERFLOW:
10903 case BUILT_IN_SADDL_OVERFLOW:
10904 case BUILT_IN_SADDLL_OVERFLOW:
10905 case BUILT_IN_SSUB_OVERFLOW:
10906 case BUILT_IN_SSUBL_OVERFLOW:
10907 case BUILT_IN_SSUBLL_OVERFLOW:
10908 case BUILT_IN_SMUL_OVERFLOW:
10909 case BUILT_IN_SMULL_OVERFLOW:
10910 case BUILT_IN_SMULLL_OVERFLOW:
10911 case BUILT_IN_UADD_OVERFLOW:
10912 case BUILT_IN_UADDL_OVERFLOW:
10913 case BUILT_IN_UADDLL_OVERFLOW:
10914 case BUILT_IN_USUB_OVERFLOW:
10915 case BUILT_IN_USUBL_OVERFLOW:
10916 case BUILT_IN_USUBLL_OVERFLOW:
10917 case BUILT_IN_UMUL_OVERFLOW:
10918 case BUILT_IN_UMULL_OVERFLOW:
10919 case BUILT_IN_UMULLL_OVERFLOW:
10920 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10921
10922 default:
10923 break;
10924 }
10925 return NULL_TREE;
10926}
10927
10928/* Folds a call EXPR (which may be null) to built-in function FNDECL.
10929 ARGS is an array of NARGS arguments. IGNORE is true if the result
10930 of the function call is ignored. This function returns NULL_TREE
10931 if no simplification was possible. */
10932
10933static tree
10934fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10935 int nargs, bool)
10936{
10937 tree ret = NULL_TREE;
10938
10939 switch (nargs)
10940 {
10941 case 0:
10942 ret = fold_builtin_0 (loc, fndecl);
10943 break;
10944 case 1:
10945 ret = fold_builtin_1 (loc, expr, fndecl, arg0: args[0]);
10946 break;
10947 case 2:
10948 ret = fold_builtin_2 (loc, expr, fndecl, arg0: args[0], arg1: args[1]);
10949 break;
10950 case 3:
10951 ret = fold_builtin_3 (loc, fndecl, arg0: args[0], arg1: args[1], arg2: args[2]);
10952 break;
10953 default:
10954 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10955 break;
10956 }
10957 if (ret)
10958 {
10959 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10960 SET_EXPR_LOCATION (ret, loc);
10961 return ret;
10962 }
10963 return NULL_TREE;
10964}
10965
10966/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10967 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10968 of arguments in ARGS to be omitted. OLDNARGS is the number of
10969 elements in ARGS. */
10970
10971static tree
10972rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10973 int skip, tree fndecl, int n, va_list newargs)
10974{
10975 int nargs = oldnargs - skip + n;
10976 tree *buffer;
10977
10978 if (n > 0)
10979 {
10980 int i, j;
10981
10982 buffer = XALLOCAVEC (tree, nargs);
10983 for (i = 0; i < n; i++)
10984 buffer[i] = va_arg (newargs, tree);
10985 for (j = skip; j < oldnargs; j++, i++)
10986 buffer[i] = args[j];
10987 }
10988 else
10989 buffer = args + skip;
10990
10991 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10992}
10993
10994/* Return true if FNDECL shouldn't be folded right now.
10995 If a built-in function has an inline attribute always_inline
10996 wrapper, defer folding it after always_inline functions have
10997 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10998 might not be performed. */
10999
11000bool
11001avoid_folding_inline_builtin (tree fndecl)
11002{
11003 return (DECL_DECLARED_INLINE_P (fndecl)
11004 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11005 && cfun
11006 && !cfun->always_inline_functions_inlined
11007 && lookup_attribute (attr_name: "always_inline", DECL_ATTRIBUTES (fndecl)));
11008}
11009
11010/* A wrapper function for builtin folding that prevents warnings for
11011 "statement without effect" and the like, caused by removing the
11012 call node earlier than the warning is generated. */
11013
11014tree
11015fold_call_expr (location_t loc, tree exp, bool ignore)
11016{
11017 tree ret = NULL_TREE;
11018 tree fndecl = get_callee_fndecl (exp);
11019 if (fndecl && fndecl_built_in_p (node: fndecl)
11020 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11021 yet. Defer folding until we see all the arguments
11022 (after inlining). */
11023 && !CALL_EXPR_VA_ARG_PACK (exp))
11024 {
11025 int nargs = call_expr_nargs (exp);
11026
11027 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11028 instead last argument is __builtin_va_arg_pack (). Defer folding
11029 even in that case, until arguments are finalized. */
11030 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11031 {
11032 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11033 if (fndecl2 && fndecl_built_in_p (node: fndecl2, name1: BUILT_IN_VA_ARG_PACK))
11034 return NULL_TREE;
11035 }
11036
11037 if (avoid_folding_inline_builtin (fndecl))
11038 return NULL_TREE;
11039
11040 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11041 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11042 CALL_EXPR_ARGP (exp), ignore);
11043 else
11044 {
11045 tree *args = CALL_EXPR_ARGP (exp);
11046 ret = fold_builtin_n (loc, expr: exp, fndecl, args, nargs, ignore);
11047 if (ret)
11048 return ret;
11049 }
11050 }
11051 return NULL_TREE;
11052}
11053
11054/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
11055 N arguments are passed in the array ARGARRAY. Return a folded
11056 expression or NULL_TREE if no simplification was possible. */
11057
11058tree
11059fold_builtin_call_array (location_t loc, tree,
11060 tree fn,
11061 int n,
11062 tree *argarray)
11063{
11064 if (TREE_CODE (fn) != ADDR_EXPR)
11065 return NULL_TREE;
11066
11067 tree fndecl = TREE_OPERAND (fn, 0);
11068 if (TREE_CODE (fndecl) == FUNCTION_DECL
11069 && fndecl_built_in_p (node: fndecl))
11070 {
11071 /* If last argument is __builtin_va_arg_pack (), arguments to this
11072 function are not finalized yet. Defer folding until they are. */
11073 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11074 {
11075 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11076 if (fndecl2 && fndecl_built_in_p (node: fndecl2, name1: BUILT_IN_VA_ARG_PACK))
11077 return NULL_TREE;
11078 }
11079 if (avoid_folding_inline_builtin (fndecl))
11080 return NULL_TREE;
11081 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11082 return targetm.fold_builtin (fndecl, n, argarray, false);
11083 else
11084 return fold_builtin_n (loc, NULL_TREE, fndecl, args: argarray, nargs: n, false);
11085 }
11086
11087 return NULL_TREE;
11088}
11089
11090/* Construct a new CALL_EXPR using the tail of the argument list of EXP
11091 along with N new arguments specified as the "..." parameters. SKIP
11092 is the number of arguments in EXP to be omitted. This function is used
11093 to do varargs-to-varargs transformations. */
11094
11095static tree
11096rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11097{
11098 va_list ap;
11099 tree t;
11100
11101 va_start (ap, n);
11102 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11103 CALL_EXPR_ARGP (exp), skip, fndecl, n, newargs: ap);
11104 va_end (ap);
11105
11106 return t;
11107}
11108
11109/* Validate a single argument ARG against a tree code CODE representing
11110 a type. Return true when argument is valid. */
11111
11112static bool
11113validate_arg (const_tree arg, enum tree_code code)
11114{
11115 if (!arg)
11116 return false;
11117 else if (code == POINTER_TYPE)
11118 return POINTER_TYPE_P (TREE_TYPE (arg));
11119 else if (code == INTEGER_TYPE)
11120 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11121 return code == TREE_CODE (TREE_TYPE (arg));
11122}
11123
11124/* This function validates the types of a function call argument list
11125 against a specified list of tree_codes. If the last specifier is a 0,
11126 that represents an ellipses, otherwise the last specifier must be a
11127 VOID_TYPE.
11128
11129 This is the GIMPLE version of validate_arglist. Eventually we want to
11130 completely convert builtins.cc to work from GIMPLEs and the tree based
11131 validate_arglist will then be removed. */
11132
11133bool
11134validate_gimple_arglist (const gcall *call, ...)
11135{
11136 enum tree_code code;
11137 bool res = 0;
11138 va_list ap;
11139 const_tree arg;
11140 size_t i;
11141
11142 va_start (ap, call);
11143 i = 0;
11144
11145 do
11146 {
11147 code = (enum tree_code) va_arg (ap, int);
11148 switch (code)
11149 {
11150 case 0:
11151 /* This signifies an ellipses, any further arguments are all ok. */
11152 res = true;
11153 goto end;
11154 case VOID_TYPE:
11155 /* This signifies an endlink, if no arguments remain, return
11156 true, otherwise return false. */
11157 res = (i == gimple_call_num_args (gs: call));
11158 goto end;
11159 default:
11160 /* If no parameters remain or the parameter's code does not
11161 match the specified code, return false. Otherwise continue
11162 checking any remaining arguments. */
11163 arg = gimple_call_arg (gs: call, index: i++);
11164 if (!validate_arg (arg, code))
11165 goto end;
11166 break;
11167 }
11168 }
11169 while (1);
11170
11171 /* We need gotos here since we can only have one VA_CLOSE in a
11172 function. */
11173 end: ;
11174 va_end (ap);
11175
11176 return res;
11177}
11178
11179/* Default target-specific builtin expander that does nothing. */
11180
11181rtx
11182default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11183 rtx target ATTRIBUTE_UNUSED,
11184 rtx subtarget ATTRIBUTE_UNUSED,
11185 machine_mode mode ATTRIBUTE_UNUSED,
11186 int ignore ATTRIBUTE_UNUSED)
11187{
11188 return NULL_RTX;
11189}
11190
11191/* Returns true is EXP represents data that would potentially reside
11192 in a readonly section. */
11193
11194bool
11195readonly_data_expr (tree exp)
11196{
11197 STRIP_NOPS (exp);
11198
11199 if (TREE_CODE (exp) != ADDR_EXPR)
11200 return false;
11201
11202 exp = get_base_address (TREE_OPERAND (exp, 0));
11203 if (!exp)
11204 return false;
11205
11206 /* Make sure we call decl_readonly_section only for trees it
11207 can handle (since it returns true for everything it doesn't
11208 understand). */
11209 if (TREE_CODE (exp) == STRING_CST
11210 || TREE_CODE (exp) == CONSTRUCTOR
11211 || (VAR_P (exp) && TREE_STATIC (exp)))
11212 return decl_readonly_section (exp, 0);
11213 else
11214 return false;
11215}
11216
11217/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11218 to the call, and TYPE is its return type.
11219
11220 Return NULL_TREE if no simplification was possible, otherwise return the
11221 simplified form of the call as a tree.
11222
11223 The simplified form may be a constant or other expression which
11224 computes the same value, but in a more efficient manner (including
11225 calls to other builtin functions).
11226
11227 The call may contain arguments which need to be evaluated, but
11228 which are not useful to determine the result of the call. In
11229 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11230 COMPOUND_EXPR will be an argument which must be evaluated.
11231 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11232 COMPOUND_EXPR in the chain will contain the tree for the simplified
11233 form of the builtin function call. */
11234
11235static tree
11236fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
11237{
11238 if (!validate_arg (arg: s1, code: POINTER_TYPE)
11239 || !validate_arg (arg: s2, code: POINTER_TYPE))
11240 return NULL_TREE;
11241
11242 tree fn;
11243 const char *p1, *p2;
11244
11245 p2 = c_getstr (s2);
11246 if (p2 == NULL)
11247 return NULL_TREE;
11248
11249 p1 = c_getstr (s1);
11250 if (p1 != NULL)
11251 {
11252 const char *r = strpbrk (s: p1, accept: p2);
11253 tree tem;
11254
11255 if (r == NULL)
11256 return build_int_cst (TREE_TYPE (s1), 0);
11257
11258 /* Return an offset into the constant string argument. */
11259 tem = fold_build_pointer_plus_hwi_loc (loc, ptr: s1, off: r - p1);
11260 return fold_convert_loc (loc, type, tem);
11261 }
11262
11263 if (p2[0] == '\0')
11264 /* strpbrk(x, "") == NULL.
11265 Evaluate and ignore s1 in case it had side-effects. */
11266 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
11267
11268 if (p2[1] != '\0')
11269 return NULL_TREE; /* Really call strpbrk. */
11270
11271 fn = builtin_decl_implicit (fncode: BUILT_IN_STRCHR);
11272 if (!fn)
11273 return NULL_TREE;
11274
11275 /* New argument list transforming strpbrk(s1, s2) to
11276 strchr(s1, s2[0]). */
11277 return build_call_expr_loc (loc, fn, 2, s1,
11278 build_int_cst (integer_type_node, p2[0]));
11279}
11280
11281/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11282 to the call.
11283
11284 Return NULL_TREE if no simplification was possible, otherwise return the
11285 simplified form of the call as a tree.
11286
11287 The simplified form may be a constant or other expression which
11288 computes the same value, but in a more efficient manner (including
11289 calls to other builtin functions).
11290
11291 The call may contain arguments which need to be evaluated, but
11292 which are not useful to determine the result of the call. In
11293 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11294 COMPOUND_EXPR will be an argument which must be evaluated.
11295 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11296 COMPOUND_EXPR in the chain will contain the tree for the simplified
11297 form of the builtin function call. */
11298
11299static tree
11300fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2, tree type)
11301{
11302 if (!validate_arg (arg: s1, code: POINTER_TYPE)
11303 || !validate_arg (arg: s2, code: POINTER_TYPE))
11304 return NULL_TREE;
11305
11306 if (!check_nul_terminated_array (expr, s1)
11307 || !check_nul_terminated_array (expr, s2))
11308 return NULL_TREE;
11309
11310 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11311
11312 /* If either argument is "", return NULL_TREE. */
11313 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11314 /* Evaluate and ignore both arguments in case either one has
11315 side-effects. */
11316 return omit_two_operands_loc (loc, type, size_zero_node, s1, s2);
11317 return NULL_TREE;
11318}
11319
11320/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11321 to the call.
11322
11323 Return NULL_TREE if no simplification was possible, otherwise return the
11324 simplified form of the call as a tree.
11325
11326 The simplified form may be a constant or other expression which
11327 computes the same value, but in a more efficient manner (including
11328 calls to other builtin functions).
11329
11330 The call may contain arguments which need to be evaluated, but
11331 which are not useful to determine the result of the call. In
11332 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11333 COMPOUND_EXPR will be an argument which must be evaluated.
11334 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11335 COMPOUND_EXPR in the chain will contain the tree for the simplified
11336 form of the builtin function call. */
11337
11338static tree
11339fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2, tree type)
11340{
11341 if (!validate_arg (arg: s1, code: POINTER_TYPE)
11342 || !validate_arg (arg: s2, code: POINTER_TYPE))
11343 return NULL_TREE;
11344
11345 if (!check_nul_terminated_array (expr, s1)
11346 || !check_nul_terminated_array (expr, s2))
11347 return NULL_TREE;
11348
11349 /* If the first argument is "", return NULL_TREE. */
11350 const char *p1 = c_getstr (s1);
11351 if (p1 && *p1 == '\0')
11352 {
11353 /* Evaluate and ignore argument s2 in case it has
11354 side-effects. */
11355 return omit_one_operand_loc (loc, type, size_zero_node, s2);
11356 }
11357
11358 /* If the second argument is "", return __builtin_strlen(s1). */
11359 const char *p2 = c_getstr (s2);
11360 if (p2 && *p2 == '\0')
11361 {
11362 tree fn = builtin_decl_implicit (fncode: BUILT_IN_STRLEN);
11363
11364 /* If the replacement _DECL isn't initialized, don't do the
11365 transformation. */
11366 if (!fn)
11367 return NULL_TREE;
11368
11369 return fold_convert_loc (loc, type,
11370 build_call_expr_loc (loc, fn, 1, s1));
11371 }
11372 return NULL_TREE;
11373}
11374
11375/* Fold the next_arg or va_start call EXP. Returns true if there was an error
11376 produced. False otherwise. This is done so that we don't output the error
11377 or warning twice or three times. */
11378
11379bool
11380fold_builtin_next_arg (tree exp, bool va_start_p)
11381{
11382 tree fntype = TREE_TYPE (current_function_decl);
11383 int nargs = call_expr_nargs (exp);
11384 tree arg;
11385 /* There is good chance the current input_location points inside the
11386 definition of the va_start macro (perhaps on the token for
11387 builtin) in a system header, so warnings will not be emitted.
11388 Use the location in real source code. */
11389 location_t current_location =
11390 linemap_unwind_to_first_non_reserved_loc (line_table, loc: input_location,
11391 NULL);
11392
11393 if (!stdarg_p (fntype))
11394 {
11395 error ("%<va_start%> used in function with fixed arguments");
11396 return true;
11397 }
11398
11399 if (va_start_p)
11400 {
11401 if (va_start_p && (nargs != 2))
11402 {
11403 error ("wrong number of arguments to function %<va_start%>");
11404 return true;
11405 }
11406 arg = CALL_EXPR_ARG (exp, 1);
11407 }
11408 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11409 when we checked the arguments and if needed issued a warning. */
11410 else
11411 {
11412 if (nargs == 0)
11413 {
11414 /* Evidently an out of date version of <stdarg.h>; can't validate
11415 va_start's second argument, but can still work as intended. */
11416 warning_at (current_location,
11417 OPT_Wvarargs,
11418 "%<__builtin_next_arg%> called without an argument");
11419 return true;
11420 }
11421 else if (nargs > 1)
11422 {
11423 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11424 return true;
11425 }
11426 arg = CALL_EXPR_ARG (exp, 0);
11427 }
11428
11429 if (TREE_CODE (arg) == SSA_NAME
11430 && SSA_NAME_VAR (arg))
11431 arg = SSA_NAME_VAR (arg);
11432
11433 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11434 or __builtin_next_arg (0) the first time we see it, after checking
11435 the arguments and if needed issuing a warning. */
11436 if (!integer_zerop (arg))
11437 {
11438 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11439
11440 /* Strip off all nops for the sake of the comparison. This
11441 is not quite the same as STRIP_NOPS. It does more.
11442 We must also strip off INDIRECT_EXPR for C++ reference
11443 parameters. */
11444 while (CONVERT_EXPR_P (arg)
11445 || INDIRECT_REF_P (arg))
11446 arg = TREE_OPERAND (arg, 0);
11447 if (arg != last_parm)
11448 {
11449 /* FIXME: Sometimes with the tree optimizers we can get the
11450 not the last argument even though the user used the last
11451 argument. We just warn and set the arg to be the last
11452 argument so that we will get wrong-code because of
11453 it. */
11454 warning_at (current_location,
11455 OPT_Wvarargs,
11456 "second parameter of %<va_start%> not last named argument");
11457 }
11458
11459 /* Undefined by C99 7.15.1.4p4 (va_start):
11460 "If the parameter parmN is declared with the register storage
11461 class, with a function or array type, or with a type that is
11462 not compatible with the type that results after application of
11463 the default argument promotions, the behavior is undefined."
11464 */
11465 else if (DECL_REGISTER (arg))
11466 {
11467 warning_at (current_location,
11468 OPT_Wvarargs,
11469 "undefined behavior when second parameter of "
11470 "%<va_start%> is declared with %<register%> storage");
11471 }
11472
11473 /* We want to verify the second parameter just once before the tree
11474 optimizers are run and then avoid keeping it in the tree,
11475 as otherwise we could warn even for correct code like:
11476 void foo (int i, ...)
11477 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11478 if (va_start_p)
11479 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11480 else
11481 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11482 }
11483 return false;
11484}
11485
11486
11487/* Expand a call EXP to __builtin_object_size. */
11488
11489static rtx
11490expand_builtin_object_size (tree exp)
11491{
11492 tree ost;
11493 int object_size_type;
11494 tree fndecl = get_callee_fndecl (exp);
11495
11496 if (!validate_arglist (callexpr: exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11497 {
11498 error ("first argument of %qD must be a pointer, second integer constant",
11499 fndecl);
11500 expand_builtin_trap ();
11501 return const0_rtx;
11502 }
11503
11504 ost = CALL_EXPR_ARG (exp, 1);
11505 STRIP_NOPS (ost);
11506
11507 if (TREE_CODE (ost) != INTEGER_CST
11508 || tree_int_cst_sgn (ost) < 0
11509 || compare_tree_int (ost, 3) > 0)
11510 {
11511 error ("last argument of %qD is not integer constant between 0 and 3",
11512 fndecl);
11513 expand_builtin_trap ();
11514 return const0_rtx;
11515 }
11516
11517 object_size_type = tree_to_shwi (ost);
11518
11519 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11520}
11521
11522/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11523 FCODE is the BUILT_IN_* to use.
11524 Return NULL_RTX if we failed; the caller should emit a normal call,
11525 otherwise try to get the result in TARGET, if convenient (and in
11526 mode MODE if that's convenient). */
11527
11528static rtx
11529expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11530 enum built_in_function fcode)
11531{
11532 if (!validate_arglist (callexpr: exp,
11533 POINTER_TYPE,
11534 fcode == BUILT_IN_MEMSET_CHK
11535 ? INTEGER_TYPE : POINTER_TYPE,
11536 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11537 return NULL_RTX;
11538
11539 tree dest = CALL_EXPR_ARG (exp, 0);
11540 tree src = CALL_EXPR_ARG (exp, 1);
11541 tree len = CALL_EXPR_ARG (exp, 2);
11542 tree size = CALL_EXPR_ARG (exp, 3);
11543
11544 /* FIXME: Set access mode to write only for memset et al. */
11545 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11546 /*srcstr=*/NULL_TREE, size, access_read_write);
11547
11548 if (!tree_fits_uhwi_p (size))
11549 return NULL_RTX;
11550
11551 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11552 {
11553 /* Avoid transforming the checking call to an ordinary one when
11554 an overflow has been detected or when the call couldn't be
11555 validated because the size is not constant. */
11556 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (t1: size, t2: len))
11557 return NULL_RTX;
11558
11559 tree fn = NULL_TREE;
11560 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11561 mem{cpy,pcpy,move,set} is available. */
11562 switch (fcode)
11563 {
11564 case BUILT_IN_MEMCPY_CHK:
11565 fn = builtin_decl_explicit (fncode: BUILT_IN_MEMCPY);
11566 break;
11567 case BUILT_IN_MEMPCPY_CHK:
11568 fn = builtin_decl_explicit (fncode: BUILT_IN_MEMPCPY);
11569 break;
11570 case BUILT_IN_MEMMOVE_CHK:
11571 fn = builtin_decl_explicit (fncode: BUILT_IN_MEMMOVE);
11572 break;
11573 case BUILT_IN_MEMSET_CHK:
11574 fn = builtin_decl_explicit (fncode: BUILT_IN_MEMSET);
11575 break;
11576 default:
11577 break;
11578 }
11579
11580 if (! fn)
11581 return NULL_RTX;
11582
11583 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl: fn, n: 3, dest, src, len);
11584 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11585 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11586 return expand_expr (exp: fn, target, mode, modifier: EXPAND_NORMAL);
11587 }
11588 else if (fcode == BUILT_IN_MEMSET_CHK)
11589 return NULL_RTX;
11590 else
11591 {
11592 unsigned int dest_align = get_pointer_alignment (exp: dest);
11593
11594 /* If DEST is not a pointer type, call the normal function. */
11595 if (dest_align == 0)
11596 return NULL_RTX;
11597
11598 /* If SRC and DEST are the same (and not volatile), do nothing. */
11599 if (operand_equal_p (src, dest, flags: 0))
11600 {
11601 tree expr;
11602
11603 if (fcode != BUILT_IN_MEMPCPY_CHK)
11604 {
11605 /* Evaluate and ignore LEN in case it has side-effects. */
11606 expand_expr (exp: len, const0_rtx, VOIDmode, modifier: EXPAND_NORMAL);
11607 return expand_expr (exp: dest, target, mode, modifier: EXPAND_NORMAL);
11608 }
11609
11610 expr = fold_build_pointer_plus (dest, len);
11611 return expand_expr (exp: expr, target, mode, modifier: EXPAND_NORMAL);
11612 }
11613
11614 /* __memmove_chk special case. */
11615 if (fcode == BUILT_IN_MEMMOVE_CHK)
11616 {
11617 unsigned int src_align = get_pointer_alignment (exp: src);
11618
11619 if (src_align == 0)
11620 return NULL_RTX;
11621
11622 /* If src is categorized for a readonly section we can use
11623 normal __memcpy_chk. */
11624 if (readonly_data_expr (exp: src))
11625 {
11626 tree fn = builtin_decl_explicit (fncode: BUILT_IN_MEMCPY_CHK);
11627 if (!fn)
11628 return NULL_RTX;
11629 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl: fn, n: 4,
11630 dest, src, len, size);
11631 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11632 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11633 return expand_expr (exp: fn, target, mode, modifier: EXPAND_NORMAL);
11634 }
11635 }
11636 return NULL_RTX;
11637 }
11638}
11639
11640/* Emit warning if a buffer overflow is detected at compile time. */
11641
11642static void
11643maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11644{
11645 /* The source string. */
11646 tree srcstr = NULL_TREE;
11647 /* The size of the destination object returned by __builtin_object_size. */
11648 tree objsize = NULL_TREE;
11649 /* The string that is being concatenated with (as in __strcat_chk)
11650 or null if it isn't. */
11651 tree catstr = NULL_TREE;
11652 /* The maximum length of the source sequence in a bounded operation
11653 (such as __strncat_chk) or null if the operation isn't bounded
11654 (such as __strcat_chk). */
11655 tree maxread = NULL_TREE;
11656 /* The exact size of the access (such as in __strncpy_chk). */
11657 tree size = NULL_TREE;
11658 /* The access by the function that's checked. Except for snprintf
11659 both writing and reading is checked. */
11660 access_mode mode = access_read_write;
11661
11662 switch (fcode)
11663 {
11664 case BUILT_IN_STRCPY_CHK:
11665 case BUILT_IN_STPCPY_CHK:
11666 srcstr = CALL_EXPR_ARG (exp, 1);
11667 objsize = CALL_EXPR_ARG (exp, 2);
11668 break;
11669
11670 case BUILT_IN_STRCAT_CHK:
11671 /* For __strcat_chk the warning will be emitted only if overflowing
11672 by at least strlen (dest) + 1 bytes. */
11673 catstr = CALL_EXPR_ARG (exp, 0);
11674 srcstr = CALL_EXPR_ARG (exp, 1);
11675 objsize = CALL_EXPR_ARG (exp, 2);
11676 break;
11677
11678 case BUILT_IN_STRNCAT_CHK:
11679 catstr = CALL_EXPR_ARG (exp, 0);
11680 srcstr = CALL_EXPR_ARG (exp, 1);
11681 maxread = CALL_EXPR_ARG (exp, 2);
11682 objsize = CALL_EXPR_ARG (exp, 3);
11683 break;
11684
11685 case BUILT_IN_STRNCPY_CHK:
11686 case BUILT_IN_STPNCPY_CHK:
11687 srcstr = CALL_EXPR_ARG (exp, 1);
11688 size = CALL_EXPR_ARG (exp, 2);
11689 objsize = CALL_EXPR_ARG (exp, 3);
11690 break;
11691
11692 case BUILT_IN_SNPRINTF_CHK:
11693 case BUILT_IN_VSNPRINTF_CHK:
11694 maxread = CALL_EXPR_ARG (exp, 1);
11695 objsize = CALL_EXPR_ARG (exp, 3);
11696 /* The only checked access the write to the destination. */
11697 mode = access_write_only;
11698 break;
11699 default:
11700 gcc_unreachable ();
11701 }
11702
11703 if (catstr && maxread)
11704 {
11705 /* Check __strncat_chk. There is no way to determine the length
11706 of the string to which the source string is being appended so
11707 just warn when the length of the source string is not known. */
11708 check_strncat_sizes (exp, objsize);
11709 return;
11710 }
11711
11712 check_access (exp, size, maxread, srcstr, objsize, mode);
11713}
11714
11715/* Emit warning if a buffer overflow is detected at compile time
11716 in __sprintf_chk/__vsprintf_chk calls. */
11717
11718static void
11719maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11720{
11721 tree size, len, fmt;
11722 const char *fmt_str;
11723 int nargs = call_expr_nargs (exp);
11724
11725 /* Verify the required arguments in the original call. */
11726
11727 if (nargs < 4)
11728 return;
11729 size = CALL_EXPR_ARG (exp, 2);
11730 fmt = CALL_EXPR_ARG (exp, 3);
11731
11732 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11733 return;
11734
11735 /* Check whether the format is a literal string constant. */
11736 fmt_str = c_getstr (fmt);
11737 if (fmt_str == NULL)
11738 return;
11739
11740 if (!init_target_chars ())
11741 return;
11742
11743 /* If the format doesn't contain % args or %%, we know its size. */
11744 if (strchr (s: fmt_str, c: target_percent) == 0)
11745 len = build_int_cstu (size_type_node, strlen (s: fmt_str));
11746 /* If the format is "%s" and first ... argument is a string literal,
11747 we know it too. */
11748 else if (fcode == BUILT_IN_SPRINTF_CHK
11749 && strcmp (s1: fmt_str, s2: target_percent_s) == 0)
11750 {
11751 tree arg;
11752
11753 if (nargs < 5)
11754 return;
11755 arg = CALL_EXPR_ARG (exp, 4);
11756 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11757 return;
11758
11759 len = c_strlen (arg, only_value: 1);
11760 if (!len || ! tree_fits_uhwi_p (len))
11761 return;
11762 }
11763 else
11764 return;
11765
11766 /* Add one for the terminating nul. */
11767 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11768
11769 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
11770 access_write_only);
11771}
11772
11773/* Fold a call to __builtin_object_size with arguments PTR and OST,
11774 if possible. */
11775
11776static tree
11777fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
11778{
11779 tree bytes;
11780 int object_size_type;
11781
11782 if (!validate_arg (arg: ptr, code: POINTER_TYPE)
11783 || !validate_arg (arg: ost, code: INTEGER_TYPE))
11784 return NULL_TREE;
11785
11786 STRIP_NOPS (ost);
11787
11788 if (TREE_CODE (ost) != INTEGER_CST
11789 || tree_int_cst_sgn (ost) < 0
11790 || compare_tree_int (ost, 3) > 0)
11791 return NULL_TREE;
11792
11793 object_size_type = tree_to_shwi (ost);
11794
11795 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11796 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11797 and (size_t) 0 for types 2 and 3. */
11798 if (TREE_SIDE_EFFECTS (ptr))
11799 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11800
11801 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
11802 object_size_type |= OST_DYNAMIC;
11803
11804 if (TREE_CODE (ptr) == ADDR_EXPR)
11805 {
11806 compute_builtin_object_size (ptr, object_size_type, &bytes);
11807 if ((object_size_type & OST_DYNAMIC)
11808 || int_fits_type_p (bytes, size_type_node))
11809 return fold_convert (size_type_node, bytes);
11810 }
11811 else if (TREE_CODE (ptr) == SSA_NAME)
11812 {
11813 /* If object size is not known yet, delay folding until
11814 later. Maybe subsequent passes will help determining
11815 it. */
11816 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11817 && ((object_size_type & OST_DYNAMIC)
11818 || int_fits_type_p (bytes, size_type_node)))
11819 return fold_convert (size_type_node, bytes);
11820 }
11821
11822 return NULL_TREE;
11823}
11824
11825/* Builtins with folding operations that operate on "..." arguments
11826 need special handling; we need to store the arguments in a convenient
11827 data structure before attempting any folding. Fortunately there are
11828 only a few builtins that fall into this category. FNDECL is the
11829 function, EXP is the CALL_EXPR for the call. */
11830
11831static tree
11832fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11833{
11834 enum built_in_function fcode = DECL_FUNCTION_CODE (decl: fndecl);
11835 tree ret = NULL_TREE;
11836
11837 switch (fcode)
11838 {
11839 case BUILT_IN_FPCLASSIFY:
11840 ret = fold_builtin_fpclassify (loc, args, nargs);
11841 break;
11842
11843 case BUILT_IN_ADDC:
11844 case BUILT_IN_ADDCL:
11845 case BUILT_IN_ADDCLL:
11846 case BUILT_IN_SUBC:
11847 case BUILT_IN_SUBCL:
11848 case BUILT_IN_SUBCLL:
11849 return fold_builtin_addc_subc (loc, fcode, args);
11850
11851 default:
11852 break;
11853 }
11854 if (ret)
11855 {
11856 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11857 SET_EXPR_LOCATION (ret, loc);
11858 suppress_warning (ret);
11859 return ret;
11860 }
11861 return NULL_TREE;
11862}
11863
11864/* Initialize format string characters in the target charset. */
11865
11866bool
11867init_target_chars (void)
11868{
11869 static bool init;
11870 if (!init)
11871 {
11872 target_newline = lang_hooks.to_target_charset ('\n');
11873 target_percent = lang_hooks.to_target_charset ('%');
11874 target_c = lang_hooks.to_target_charset ('c');
11875 target_s = lang_hooks.to_target_charset ('s');
11876 if (target_newline == 0 || target_percent == 0 || target_c == 0
11877 || target_s == 0)
11878 return false;
11879
11880 target_percent_c[0] = target_percent;
11881 target_percent_c[1] = target_c;
11882 target_percent_c[2] = '\0';
11883
11884 target_percent_s[0] = target_percent;
11885 target_percent_s[1] = target_s;
11886 target_percent_s[2] = '\0';
11887
11888 target_percent_s_newline[0] = target_percent;
11889 target_percent_s_newline[1] = target_s;
11890 target_percent_s_newline[2] = target_newline;
11891 target_percent_s_newline[3] = '\0';
11892
11893 init = true;
11894 }
11895 return true;
11896}
11897
11898/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11899 and no overflow/underflow occurred. INEXACT is true if M was not
11900 exactly calculated. TYPE is the tree type for the result. This
11901 function assumes that you cleared the MPFR flags and then
11902 calculated M to see if anything subsequently set a flag prior to
11903 entering this function. Return NULL_TREE if any checks fail. */
11904
11905static tree
11906do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11907{
11908 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11909 overflow/underflow occurred. If -frounding-math, proceed iff the
11910 result of calling FUNC was exact. */
11911 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11912 && (!flag_rounding_math || !inexact))
11913 {
11914 REAL_VALUE_TYPE rr;
11915
11916 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11917 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11918 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11919 but the mpfr_t is not, then we underflowed in the
11920 conversion. */
11921 if (real_isfinite (&rr)
11922 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11923 {
11924 REAL_VALUE_TYPE rmode;
11925
11926 real_convert (&rmode, TYPE_MODE (type), &rr);
11927 /* Proceed iff the specified mode can hold the value. */
11928 if (real_identical (&rmode, &rr))
11929 return build_real (type, rmode);
11930 }
11931 }
11932 return NULL_TREE;
11933}
11934
11935/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11936 number and no overflow/underflow occurred. INEXACT is true if M
11937 was not exactly calculated. TYPE is the tree type for the result.
11938 This function assumes that you cleared the MPFR flags and then
11939 calculated M to see if anything subsequently set a flag prior to
11940 entering this function. Return NULL_TREE if any checks fail, if
11941 FORCE_CONVERT is true, then bypass the checks. */
11942
11943static tree
11944do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11945{
11946 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11947 overflow/underflow occurred. If -frounding-math, proceed iff the
11948 result of calling FUNC was exact. */
11949 if (force_convert
11950 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11951 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11952 && (!flag_rounding_math || !inexact)))
11953 {
11954 REAL_VALUE_TYPE re, im;
11955
11956 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11957 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11958 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11959 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11960 but the mpfr_t is not, then we underflowed in the
11961 conversion. */
11962 if (force_convert
11963 || (real_isfinite (&re) && real_isfinite (&im)
11964 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11965 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11966 {
11967 REAL_VALUE_TYPE re_mode, im_mode;
11968
11969 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11970 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11971 /* Proceed iff the specified mode can hold the value. */
11972 if (force_convert
11973 || (real_identical (&re_mode, &re)
11974 && real_identical (&im_mode, &im)))
11975 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11976 build_real (TREE_TYPE (type), im_mode));
11977 }
11978 }
11979 return NULL_TREE;
11980}
11981
11982/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11983 the pointer *(ARG_QUO) and return the result. The type is taken
11984 from the type of ARG0 and is used for setting the precision of the
11985 calculation and results. */
11986
11987static tree
11988do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11989{
11990 tree const type = TREE_TYPE (arg0);
11991 tree result = NULL_TREE;
11992
11993 STRIP_NOPS (arg0);
11994 STRIP_NOPS (arg1);
11995
11996 /* To proceed, MPFR must exactly represent the target floating point
11997 format, which only happens when the target base equals two. */
11998 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11999 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12000 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12001 {
12002 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12003 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12004
12005 if (real_isfinite (ra0) && real_isfinite (ra1))
12006 {
12007 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12008 const int prec = fmt->p;
12009 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12010 tree result_rem;
12011 long integer_quo;
12012 mpfr_t m0, m1;
12013
12014 mpfr_inits2 (prec, m0, m1, NULL);
12015 mpfr_from_real (m0, ra0, MPFR_RNDN);
12016 mpfr_from_real (m1, ra1, MPFR_RNDN);
12017 mpfr_clear_flags ();
12018 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12019 /* Remquo is independent of the rounding mode, so pass
12020 inexact=0 to do_mpfr_ckconv(). */
12021 result_rem = do_mpfr_ckconv (m: m0, type, /*inexact=*/ 0);
12022 mpfr_clears (m0, m1, NULL);
12023 if (result_rem)
12024 {
12025 /* MPFR calculates quo in the host's long so it may
12026 return more bits in quo than the target int can hold
12027 if sizeof(host long) > sizeof(target int). This can
12028 happen even for native compilers in LP64 mode. In
12029 these cases, modulo the quo value with the largest
12030 number that the target int can hold while leaving one
12031 bit for the sign. */
12032 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12033 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12034
12035 /* Dereference the quo pointer argument. */
12036 arg_quo = build_fold_indirect_ref (arg_quo);
12037 /* Proceed iff a valid pointer type was passed in. */
12038 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12039 {
12040 /* Set the value. */
12041 tree result_quo
12042 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12043 build_int_cst (TREE_TYPE (arg_quo),
12044 integer_quo));
12045 TREE_SIDE_EFFECTS (result_quo) = 1;
12046 /* Combine the quo assignment with the rem. */
12047 result = fold_build2 (COMPOUND_EXPR, type,
12048 result_quo, result_rem);
12049 suppress_warning (result, OPT_Wunused_value);
12050 result = non_lvalue (result);
12051 }
12052 }
12053 }
12054 }
12055 return result;
12056}
12057
12058/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12059 resulting value as a tree with type TYPE. The mpfr precision is
12060 set to the precision of TYPE. We assume that this mpfr function
12061 returns zero if the result could be calculated exactly within the
12062 requested precision. In addition, the integer pointer represented
12063 by ARG_SG will be dereferenced and set to the appropriate signgam
12064 (-1,1) value. */
12065
12066static tree
12067do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12068{
12069 tree result = NULL_TREE;
12070
12071 STRIP_NOPS (arg);
12072
12073 /* To proceed, MPFR must exactly represent the target floating point
12074 format, which only happens when the target base equals two. Also
12075 verify ARG is a constant and that ARG_SG is an int pointer. */
12076 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12077 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12078 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12079 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12080 {
12081 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12082
12083 /* In addition to NaN and Inf, the argument cannot be zero or a
12084 negative integer. */
12085 if (real_isfinite (ra)
12086 && ra->cl != rvc_zero
12087 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12088 {
12089 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12090 const int prec = fmt->p;
12091 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12092 int inexact, sg;
12093 tree result_lg;
12094
12095 auto_mpfr m (prec);
12096 mpfr_from_real (m, ra, MPFR_RNDN);
12097 mpfr_clear_flags ();
12098 inexact = mpfr_lgamma (m, &sg, m, rnd);
12099 result_lg = do_mpfr_ckconv (m, type, inexact);
12100 if (result_lg)
12101 {
12102 tree result_sg;
12103
12104 /* Dereference the arg_sg pointer argument. */
12105 arg_sg = build_fold_indirect_ref (arg_sg);
12106 /* Assign the signgam value into *arg_sg. */
12107 result_sg = fold_build2 (MODIFY_EXPR,
12108 TREE_TYPE (arg_sg), arg_sg,
12109 build_int_cst (TREE_TYPE (arg_sg), sg));
12110 TREE_SIDE_EFFECTS (result_sg) = 1;
12111 /* Combine the signgam assignment with the lgamma result. */
12112 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12113 result_sg, result_lg));
12114 }
12115 }
12116 }
12117
12118 return result;
12119}
12120
12121/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12122 mpc function FUNC on it and return the resulting value as a tree
12123 with type TYPE. The mpfr precision is set to the precision of
12124 TYPE. We assume that function FUNC returns zero if the result
12125 could be calculated exactly within the requested precision. If
12126 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12127 in the arguments and/or results. */
12128
12129tree
12130do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12131 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12132{
12133 tree result = NULL_TREE;
12134
12135 STRIP_NOPS (arg0);
12136 STRIP_NOPS (arg1);
12137
12138 /* To proceed, MPFR must exactly represent the target floating point
12139 format, which only happens when the target base equals two. */
12140 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12141 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))
12142 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12143 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))
12144 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12145 {
12146 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12147 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12148 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12149 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12150
12151 if (do_nonfinite
12152 || (real_isfinite (re0) && real_isfinite (im0)
12153 && real_isfinite (re1) && real_isfinite (im1)))
12154 {
12155 const struct real_format *const fmt =
12156 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12157 const int prec = fmt->p;
12158 const mpfr_rnd_t rnd = fmt->round_towards_zero
12159 ? MPFR_RNDZ : MPFR_RNDN;
12160 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12161 int inexact;
12162 mpc_t m0, m1;
12163
12164 mpc_init2 (m0, prec);
12165 mpc_init2 (m1, prec);
12166 mpfr_from_real (mpc_realref (m0), re0, rnd);
12167 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12168 mpfr_from_real (mpc_realref (m1), re1, rnd);
12169 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12170 mpfr_clear_flags ();
12171 inexact = func (m0, m0, m1, crnd);
12172 result = do_mpc_ckconv (m: m0, type, inexact, force_convert: do_nonfinite);
12173 mpc_clear (m0);
12174 mpc_clear (m1);
12175 }
12176 }
12177
12178 return result;
12179}
12180
12181/* A wrapper function for builtin folding that prevents warnings for
12182 "statement without effect" and the like, caused by removing the
12183 call node earlier than the warning is generated. */
12184
12185tree
12186fold_call_stmt (gcall *stmt, bool ignore)
12187{
12188 tree ret = NULL_TREE;
12189 tree fndecl = gimple_call_fndecl (gs: stmt);
12190 location_t loc = gimple_location (g: stmt);
12191 if (fndecl && fndecl_built_in_p (node: fndecl)
12192 && !gimple_call_va_arg_pack_p (s: stmt))
12193 {
12194 int nargs = gimple_call_num_args (gs: stmt);
12195 tree *args = (nargs > 0
12196 ? gimple_call_arg_ptr (gs: stmt, index: 0)
12197 : &error_mark_node);
12198
12199 if (avoid_folding_inline_builtin (fndecl))
12200 return NULL_TREE;
12201 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12202 {
12203 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12204 }
12205 else
12206 {
12207 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
12208 if (ret)
12209 {
12210 /* Propagate location information from original call to
12211 expansion of builtin. Otherwise things like
12212 maybe_emit_chk_warning, that operate on the expansion
12213 of a builtin, will use the wrong location information. */
12214 if (gimple_has_location (g: stmt))
12215 {
12216 tree realret = ret;
12217 if (TREE_CODE (ret) == NOP_EXPR)
12218 realret = TREE_OPERAND (ret, 0);
12219 if (CAN_HAVE_LOCATION_P (realret)
12220 && !EXPR_HAS_LOCATION (realret))
12221 SET_EXPR_LOCATION (realret, loc);
12222 return realret;
12223 }
12224 return ret;
12225 }
12226 }
12227 }
12228 return NULL_TREE;
12229}
12230
12231/* Look up the function in builtin_decl that corresponds to DECL
12232 and set ASMSPEC as its user assembler name. DECL must be a
12233 function decl that declares a builtin. */
12234
12235void
12236set_builtin_user_assembler_name (tree decl, const char *asmspec)
12237{
12238 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
12239 && asmspec != 0);
12240
12241 tree builtin = builtin_decl_explicit (fncode: DECL_FUNCTION_CODE (decl));
12242 set_user_assembler_name (builtin, asmspec);
12243
12244 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
12245 && INT_TYPE_SIZE < BITS_PER_WORD)
12246 {
12247 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, limit: 0).require ();
12248 set_user_assembler_libfunc ("ffs", asmspec);
12249 set_optab_libfunc (ffs_optab, mode, "ffs");
12250 }
12251}
12252
12253/* Return true if DECL is a builtin that expands to a constant or similarly
12254 simple code. */
12255bool
12256is_simple_builtin (tree decl)
12257{
12258 if (decl && fndecl_built_in_p (node: decl, klass: BUILT_IN_NORMAL))
12259 switch (DECL_FUNCTION_CODE (decl))
12260 {
12261 /* Builtins that expand to constants. */
12262 case BUILT_IN_CONSTANT_P:
12263 case BUILT_IN_EXPECT:
12264 case BUILT_IN_OBJECT_SIZE:
12265 case BUILT_IN_UNREACHABLE:
12266 /* Simple register moves or loads from stack. */
12267 case BUILT_IN_ASSUME_ALIGNED:
12268 case BUILT_IN_RETURN_ADDRESS:
12269 case BUILT_IN_EXTRACT_RETURN_ADDR:
12270 case BUILT_IN_FROB_RETURN_ADDR:
12271 case BUILT_IN_RETURN:
12272 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12273 case BUILT_IN_FRAME_ADDRESS:
12274 case BUILT_IN_VA_END:
12275 case BUILT_IN_STACK_SAVE:
12276 case BUILT_IN_STACK_RESTORE:
12277 case BUILT_IN_DWARF_CFA:
12278 /* Exception state returns or moves registers around. */
12279 case BUILT_IN_EH_FILTER:
12280 case BUILT_IN_EH_POINTER:
12281 case BUILT_IN_EH_COPY_VALUES:
12282 return true;
12283
12284 default:
12285 return false;
12286 }
12287
12288 return false;
12289}
12290
12291/* Return true if DECL is a builtin that is not expensive, i.e., they are
12292 most probably expanded inline into reasonably simple code. This is a
12293 superset of is_simple_builtin. */
12294bool
12295is_inexpensive_builtin (tree decl)
12296{
12297 if (!decl)
12298 return false;
12299 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12300 return true;
12301 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12302 switch (DECL_FUNCTION_CODE (decl))
12303 {
12304 case BUILT_IN_ABS:
12305 CASE_BUILT_IN_ALLOCA:
12306 case BUILT_IN_BSWAP16:
12307 case BUILT_IN_BSWAP32:
12308 case BUILT_IN_BSWAP64:
12309 case BUILT_IN_BSWAP128:
12310 case BUILT_IN_CLZ:
12311 case BUILT_IN_CLZIMAX:
12312 case BUILT_IN_CLZL:
12313 case BUILT_IN_CLZLL:
12314 case BUILT_IN_CTZ:
12315 case BUILT_IN_CTZIMAX:
12316 case BUILT_IN_CTZL:
12317 case BUILT_IN_CTZLL:
12318 case BUILT_IN_FFS:
12319 case BUILT_IN_FFSIMAX:
12320 case BUILT_IN_FFSL:
12321 case BUILT_IN_FFSLL:
12322 case BUILT_IN_IMAXABS:
12323 case BUILT_IN_FINITE:
12324 case BUILT_IN_FINITEF:
12325 case BUILT_IN_FINITEL:
12326 case BUILT_IN_FINITED32:
12327 case BUILT_IN_FINITED64:
12328 case BUILT_IN_FINITED128:
12329 case BUILT_IN_FPCLASSIFY:
12330 case BUILT_IN_ISFINITE:
12331 case BUILT_IN_ISINF_SIGN:
12332 case BUILT_IN_ISINF:
12333 case BUILT_IN_ISINFF:
12334 case BUILT_IN_ISINFL:
12335 case BUILT_IN_ISINFD32:
12336 case BUILT_IN_ISINFD64:
12337 case BUILT_IN_ISINFD128:
12338 case BUILT_IN_ISNAN:
12339 case BUILT_IN_ISNANF:
12340 case BUILT_IN_ISNANL:
12341 case BUILT_IN_ISNAND32:
12342 case BUILT_IN_ISNAND64:
12343 case BUILT_IN_ISNAND128:
12344 case BUILT_IN_ISNORMAL:
12345 case BUILT_IN_ISGREATER:
12346 case BUILT_IN_ISGREATEREQUAL:
12347 case BUILT_IN_ISLESS:
12348 case BUILT_IN_ISLESSEQUAL:
12349 case BUILT_IN_ISLESSGREATER:
12350 case BUILT_IN_ISUNORDERED:
12351 case BUILT_IN_ISEQSIG:
12352 case BUILT_IN_VA_ARG_PACK:
12353 case BUILT_IN_VA_ARG_PACK_LEN:
12354 case BUILT_IN_VA_COPY:
12355 case BUILT_IN_TRAP:
12356 case BUILT_IN_UNREACHABLE_TRAP:
12357 case BUILT_IN_SAVEREGS:
12358 case BUILT_IN_POPCOUNTL:
12359 case BUILT_IN_POPCOUNTLL:
12360 case BUILT_IN_POPCOUNTIMAX:
12361 case BUILT_IN_POPCOUNT:
12362 case BUILT_IN_PARITYL:
12363 case BUILT_IN_PARITYLL:
12364 case BUILT_IN_PARITYIMAX:
12365 case BUILT_IN_PARITY:
12366 case BUILT_IN_LABS:
12367 case BUILT_IN_LLABS:
12368 case BUILT_IN_PREFETCH:
12369 case BUILT_IN_ACC_ON_DEVICE:
12370 return true;
12371
12372 default:
12373 return is_simple_builtin (decl);
12374 }
12375
12376 return false;
12377}
12378
12379/* Return true if T is a constant and the value cast to a target char
12380 can be represented by a host char.
12381 Store the casted char constant in *P if so. */
12382
12383bool
12384target_char_cst_p (tree t, char *p)
12385{
12386 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
12387 return false;
12388
12389 *p = (char)tree_to_uhwi (t);
12390 return true;
12391}
12392
12393/* Return true if the builtin DECL is implemented in a standard library.
12394 Otherwise return false which doesn't guarantee it is not (thus the list
12395 of handled builtins below may be incomplete). */
12396
12397bool
12398builtin_with_linkage_p (tree decl)
12399{
12400 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12401 switch (DECL_FUNCTION_CODE (decl))
12402 {
12403 CASE_FLT_FN (BUILT_IN_ACOS):
12404 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS):
12405 CASE_FLT_FN (BUILT_IN_ACOSH):
12406 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH):
12407 CASE_FLT_FN (BUILT_IN_ASIN):
12408 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN):
12409 CASE_FLT_FN (BUILT_IN_ASINH):
12410 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH):
12411 CASE_FLT_FN (BUILT_IN_ATAN):
12412 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN):
12413 CASE_FLT_FN (BUILT_IN_ATANH):
12414 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH):
12415 CASE_FLT_FN (BUILT_IN_ATAN2):
12416 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2):
12417 CASE_FLT_FN (BUILT_IN_CBRT):
12418 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT):
12419 CASE_FLT_FN (BUILT_IN_CEIL):
12420 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12421 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12422 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12423 CASE_FLT_FN (BUILT_IN_COS):
12424 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS):
12425 CASE_FLT_FN (BUILT_IN_COSH):
12426 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH):
12427 CASE_FLT_FN (BUILT_IN_ERF):
12428 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF):
12429 CASE_FLT_FN (BUILT_IN_ERFC):
12430 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC):
12431 CASE_FLT_FN (BUILT_IN_EXP):
12432 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP):
12433 CASE_FLT_FN (BUILT_IN_EXP2):
12434 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2):
12435 CASE_FLT_FN (BUILT_IN_EXPM1):
12436 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1):
12437 CASE_FLT_FN (BUILT_IN_FABS):
12438 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12439 CASE_FLT_FN (BUILT_IN_FDIM):
12440 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM):
12441 CASE_FLT_FN (BUILT_IN_FLOOR):
12442 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12443 CASE_FLT_FN (BUILT_IN_FMA):
12444 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12445 CASE_FLT_FN (BUILT_IN_FMAX):
12446 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12447 CASE_FLT_FN (BUILT_IN_FMIN):
12448 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12449 CASE_FLT_FN (BUILT_IN_FMOD):
12450 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD):
12451 CASE_FLT_FN (BUILT_IN_FREXP):
12452 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP):
12453 CASE_FLT_FN (BUILT_IN_HYPOT):
12454 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT):
12455 CASE_FLT_FN (BUILT_IN_ILOGB):
12456 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB):
12457 CASE_FLT_FN (BUILT_IN_LDEXP):
12458 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP):
12459 CASE_FLT_FN (BUILT_IN_LGAMMA):
12460 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA):
12461 CASE_FLT_FN (BUILT_IN_LLRINT):
12462 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT):
12463 CASE_FLT_FN (BUILT_IN_LLROUND):
12464 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND):
12465 CASE_FLT_FN (BUILT_IN_LOG):
12466 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG):
12467 CASE_FLT_FN (BUILT_IN_LOG10):
12468 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10):
12469 CASE_FLT_FN (BUILT_IN_LOG1P):
12470 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P):
12471 CASE_FLT_FN (BUILT_IN_LOG2):
12472 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2):
12473 CASE_FLT_FN (BUILT_IN_LOGB):
12474 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB):
12475 CASE_FLT_FN (BUILT_IN_LRINT):
12476 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT):
12477 CASE_FLT_FN (BUILT_IN_LROUND):
12478 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND):
12479 CASE_FLT_FN (BUILT_IN_MODF):
12480 CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF):
12481 CASE_FLT_FN (BUILT_IN_NAN):
12482 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN):
12483 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12484 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12485 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12486 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER):
12487 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12488 CASE_FLT_FN (BUILT_IN_POW):
12489 CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW):
12490 CASE_FLT_FN (BUILT_IN_REMAINDER):
12491 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER):
12492 CASE_FLT_FN (BUILT_IN_REMQUO):
12493 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO):
12494 CASE_FLT_FN (BUILT_IN_RINT):
12495 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12496 CASE_FLT_FN (BUILT_IN_ROUND):
12497 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12498 CASE_FLT_FN (BUILT_IN_SCALBLN):
12499 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN):
12500 CASE_FLT_FN (BUILT_IN_SCALBN):
12501 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN):
12502 CASE_FLT_FN (BUILT_IN_SIN):
12503 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN):
12504 CASE_FLT_FN (BUILT_IN_SINH):
12505 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH):
12506 CASE_FLT_FN (BUILT_IN_SINCOS):
12507 CASE_FLT_FN (BUILT_IN_SQRT):
12508 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12509 CASE_FLT_FN (BUILT_IN_TAN):
12510 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN):
12511 CASE_FLT_FN (BUILT_IN_TANH):
12512 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH):
12513 CASE_FLT_FN (BUILT_IN_TGAMMA):
12514 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA):
12515 CASE_FLT_FN (BUILT_IN_TRUNC):
12516 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12517 return true;
12518
12519 case BUILT_IN_STPCPY:
12520 case BUILT_IN_STPNCPY:
12521 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
12522 by libiberty's stpcpy.c for MinGW targets so we need to return true
12523 in order to be able to build libiberty in LTO mode for them. */
12524 return true;
12525
12526 default:
12527 break;
12528 }
12529 return false;
12530}
12531
12532/* Return true if OFFRNG is bounded to a subrange of offset values
12533 valid for the largest possible object. */
12534
12535bool
12536access_ref::offset_bounded () const
12537{
12538 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
12539 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
12540 return wi::to_offset (t: min) <= offrng[0] && offrng[1] <= wi::to_offset (t: max);
12541}
12542
12543/* If CALLEE has known side effects, fill in INFO and return true.
12544 See tree-ssa-structalias.cc:find_func_aliases
12545 for the list of builtins we might need to handle here. */
12546
12547attr_fnspec
12548builtin_fnspec (tree callee)
12549{
12550 built_in_function code = DECL_FUNCTION_CODE (decl: callee);
12551
12552 switch (code)
12553 {
12554 /* All the following functions read memory pointed to by
12555 their second argument and write memory pointed to by first
12556 argument.
12557 strcat/strncat additionally reads memory pointed to by the first
12558 argument. */
12559 case BUILT_IN_STRCAT:
12560 case BUILT_IN_STRCAT_CHK:
12561 return "1cW 1 ";
12562 case BUILT_IN_STRNCAT:
12563 case BUILT_IN_STRNCAT_CHK:
12564 return "1cW 13";
12565 case BUILT_IN_STRCPY:
12566 case BUILT_IN_STRCPY_CHK:
12567 return "1cO 1 ";
12568 case BUILT_IN_STPCPY:
12569 case BUILT_IN_STPCPY_CHK:
12570 return ".cO 1 ";
12571 case BUILT_IN_STRNCPY:
12572 case BUILT_IN_MEMCPY:
12573 case BUILT_IN_MEMMOVE:
12574 case BUILT_IN_TM_MEMCPY:
12575 case BUILT_IN_TM_MEMMOVE:
12576 case BUILT_IN_STRNCPY_CHK:
12577 case BUILT_IN_MEMCPY_CHK:
12578 case BUILT_IN_MEMMOVE_CHK:
12579 return "1cO313";
12580 case BUILT_IN_MEMPCPY:
12581 case BUILT_IN_MEMPCPY_CHK:
12582 return ".cO313";
12583 case BUILT_IN_STPNCPY:
12584 case BUILT_IN_STPNCPY_CHK:
12585 return ".cO313";
12586 case BUILT_IN_BCOPY:
12587 return ".c23O3";
12588 case BUILT_IN_BZERO:
12589 return ".cO2";
12590 case BUILT_IN_MEMCMP:
12591 case BUILT_IN_MEMCMP_EQ:
12592 case BUILT_IN_BCMP:
12593 case BUILT_IN_STRNCMP:
12594 case BUILT_IN_STRNCMP_EQ:
12595 case BUILT_IN_STRNCASECMP:
12596 return ".cR3R3";
12597
12598 /* The following functions read memory pointed to by their
12599 first argument. */
12600 CASE_BUILT_IN_TM_LOAD (1):
12601 CASE_BUILT_IN_TM_LOAD (2):
12602 CASE_BUILT_IN_TM_LOAD (4):
12603 CASE_BUILT_IN_TM_LOAD (8):
12604 CASE_BUILT_IN_TM_LOAD (FLOAT):
12605 CASE_BUILT_IN_TM_LOAD (DOUBLE):
12606 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
12607 CASE_BUILT_IN_TM_LOAD (M64):
12608 CASE_BUILT_IN_TM_LOAD (M128):
12609 CASE_BUILT_IN_TM_LOAD (M256):
12610 case BUILT_IN_TM_LOG:
12611 case BUILT_IN_TM_LOG_1:
12612 case BUILT_IN_TM_LOG_2:
12613 case BUILT_IN_TM_LOG_4:
12614 case BUILT_IN_TM_LOG_8:
12615 case BUILT_IN_TM_LOG_FLOAT:
12616 case BUILT_IN_TM_LOG_DOUBLE:
12617 case BUILT_IN_TM_LOG_LDOUBLE:
12618 case BUILT_IN_TM_LOG_M64:
12619 case BUILT_IN_TM_LOG_M128:
12620 case BUILT_IN_TM_LOG_M256:
12621 return ".cR ";
12622
12623 case BUILT_IN_INDEX:
12624 case BUILT_IN_RINDEX:
12625 case BUILT_IN_STRCHR:
12626 case BUILT_IN_STRLEN:
12627 case BUILT_IN_STRRCHR:
12628 return ".cR ";
12629 case BUILT_IN_STRNLEN:
12630 return ".cR2";
12631
12632 /* These read memory pointed to by the first argument.
12633 Allocating memory does not have any side-effects apart from
12634 being the definition point for the pointer.
12635 Unix98 specifies that errno is set on allocation failure. */
12636 case BUILT_IN_STRDUP:
12637 return "mCR ";
12638 case BUILT_IN_STRNDUP:
12639 return "mCR2";
12640 /* Allocating memory does not have any side-effects apart from
12641 being the definition point for the pointer. */
12642 case BUILT_IN_MALLOC:
12643 case BUILT_IN_ALIGNED_ALLOC:
12644 case BUILT_IN_CALLOC:
12645 case BUILT_IN_GOMP_ALLOC:
12646 return "mC";
12647 CASE_BUILT_IN_ALLOCA:
12648 return "mc";
12649 /* These read memory pointed to by the first argument with size
12650 in the third argument. */
12651 case BUILT_IN_MEMCHR:
12652 return ".cR3";
12653 /* These read memory pointed to by the first and second arguments. */
12654 case BUILT_IN_STRSTR:
12655 case BUILT_IN_STRPBRK:
12656 case BUILT_IN_STRCASECMP:
12657 case BUILT_IN_STRCSPN:
12658 case BUILT_IN_STRSPN:
12659 case BUILT_IN_STRCMP:
12660 case BUILT_IN_STRCMP_EQ:
12661 return ".cR R ";
12662 /* Freeing memory kills the pointed-to memory. More importantly
12663 the call has to serve as a barrier for moving loads and stores
12664 across it. */
12665 case BUILT_IN_STACK_RESTORE:
12666 case BUILT_IN_FREE:
12667 case BUILT_IN_GOMP_FREE:
12668 return ".co ";
12669 case BUILT_IN_VA_END:
12670 return ".cO ";
12671 /* Realloc serves both as allocation point and deallocation point. */
12672 case BUILT_IN_REALLOC:
12673 case BUILT_IN_GOMP_REALLOC:
12674 return ".Cw ";
12675 case BUILT_IN_GAMMA_R:
12676 case BUILT_IN_GAMMAF_R:
12677 case BUILT_IN_GAMMAL_R:
12678 case BUILT_IN_LGAMMA_R:
12679 case BUILT_IN_LGAMMAF_R:
12680 case BUILT_IN_LGAMMAL_R:
12681 return ".C. Ot";
12682 case BUILT_IN_FREXP:
12683 case BUILT_IN_FREXPF:
12684 case BUILT_IN_FREXPL:
12685 case BUILT_IN_MODF:
12686 case BUILT_IN_MODFF:
12687 case BUILT_IN_MODFL:
12688 return ".c. Ot";
12689 case BUILT_IN_REMQUO:
12690 case BUILT_IN_REMQUOF:
12691 case BUILT_IN_REMQUOL:
12692 return ".c. . Ot";
12693 case BUILT_IN_SINCOS:
12694 case BUILT_IN_SINCOSF:
12695 case BUILT_IN_SINCOSL:
12696 return ".c. OtOt";
12697 case BUILT_IN_MEMSET:
12698 case BUILT_IN_MEMSET_CHK:
12699 case BUILT_IN_TM_MEMSET:
12700 return "1cO3";
12701 CASE_BUILT_IN_TM_STORE (1):
12702 CASE_BUILT_IN_TM_STORE (2):
12703 CASE_BUILT_IN_TM_STORE (4):
12704 CASE_BUILT_IN_TM_STORE (8):
12705 CASE_BUILT_IN_TM_STORE (FLOAT):
12706 CASE_BUILT_IN_TM_STORE (DOUBLE):
12707 CASE_BUILT_IN_TM_STORE (LDOUBLE):
12708 CASE_BUILT_IN_TM_STORE (M64):
12709 CASE_BUILT_IN_TM_STORE (M128):
12710 CASE_BUILT_IN_TM_STORE (M256):
12711 return ".cO ";
12712 case BUILT_IN_STACK_SAVE:
12713 case BUILT_IN_RETURN:
12714 case BUILT_IN_EH_POINTER:
12715 case BUILT_IN_EH_FILTER:
12716 case BUILT_IN_UNWIND_RESUME:
12717 case BUILT_IN_CXA_END_CLEANUP:
12718 case BUILT_IN_EH_COPY_VALUES:
12719 case BUILT_IN_FRAME_ADDRESS:
12720 case BUILT_IN_APPLY_ARGS:
12721 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
12722 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
12723 case BUILT_IN_PREFETCH:
12724 case BUILT_IN_DWARF_CFA:
12725 case BUILT_IN_RETURN_ADDRESS:
12726 return ".c";
12727 case BUILT_IN_ASSUME_ALIGNED:
12728 case BUILT_IN_EXPECT:
12729 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12730 return "1cX ";
12731 /* But posix_memalign stores a pointer into the memory pointed to
12732 by its first argument. */
12733 case BUILT_IN_POSIX_MEMALIGN:
12734 return ".cOt";
12735 case BUILT_IN_OMP_GET_MAPPED_PTR:
12736 return ". R ";
12737
12738 default:
12739 return "";
12740 }
12741}
12742

Provided by KDAB

Privacy Policy
Learn to use CMake with our Intro Training
Find out more

source code of gcc/builtins.cc