1/* Subroutines shared by all languages that are variants of C.
2 Copyright (C) 1992-2024 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#define GCC_C_COMMON_C
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "target.h"
26#include "function.h"
27#include "tree.h"
28#include "memmodel.h"
29#include "c-common.h"
30#include "gimple-expr.h"
31#include "tm_p.h"
32#include "stringpool.h"
33#include "cgraph.h"
34#include "diagnostic.h"
35#include "intl.h"
36#include "stor-layout.h"
37#include "calls.h"
38#include "attribs.h"
39#include "varasm.h"
40#include "trans-mem.h"
41#include "c-objc.h"
42#include "common/common-target.h"
43#include "langhooks.h"
44#include "tree-inline.h"
45#include "toplev.h"
46#include "tree-iterator.h"
47#include "opts.h"
48#include "gimplify.h"
49#include "substring-locations.h"
50#include "spellcheck.h"
51#include "c-spellcheck.h"
52#include "selftest.h"
53#include "debug.h"
54#include "tree-vector-builder.h"
55#include "vec-perm-indices.h"
56
57cpp_reader *parse_in; /* Declared in c-pragma.h. */
58
59/* Mode used to build pointers (VOIDmode means ptr_mode). */
60
61machine_mode c_default_pointer_mode = VOIDmode;
62
63/* The following symbols are subsumed in the c_global_trees array, and
64 listed here individually for documentation purposes.
65
66 INTEGER_TYPE and REAL_TYPE nodes for the standard data types.
67
68 tree short_integer_type_node;
69 tree long_integer_type_node;
70 tree long_long_integer_type_node;
71
72 tree short_unsigned_type_node;
73 tree long_unsigned_type_node;
74 tree long_long_unsigned_type_node;
75
76 tree truthvalue_type_node;
77 tree truthvalue_false_node;
78 tree truthvalue_true_node;
79
80 tree ptrdiff_type_node;
81
82 tree unsigned_char_type_node;
83 tree signed_char_type_node;
84 tree wchar_type_node;
85
86 tree char8_type_node;
87 tree char16_type_node;
88 tree char32_type_node;
89
90 tree float_type_node;
91 tree double_type_node;
92 tree long_double_type_node;
93
94 tree complex_integer_type_node;
95 tree complex_float_type_node;
96 tree complex_double_type_node;
97 tree complex_long_double_type_node;
98
99 tree dfloat32_type_node;
100 tree dfloat64_type_node;
101 tree_dfloat128_type_node;
102
103 tree intQI_type_node;
104 tree intHI_type_node;
105 tree intSI_type_node;
106 tree intDI_type_node;
107 tree intTI_type_node;
108
109 tree unsigned_intQI_type_node;
110 tree unsigned_intHI_type_node;
111 tree unsigned_intSI_type_node;
112 tree unsigned_intDI_type_node;
113 tree unsigned_intTI_type_node;
114
115 tree widest_integer_literal_type_node;
116 tree widest_unsigned_literal_type_node;
117
118 Nodes for types `void *' and `const void *'.
119
120 tree ptr_type_node, const_ptr_type_node;
121
122 Nodes for types `char *' and `const char *'.
123
124 tree string_type_node, const_string_type_node;
125
126 Type `char[SOMENUMBER]'.
127 Used when an array of char is needed and the size is irrelevant.
128
129 tree char_array_type_node;
130
131 Type `wchar_t[SOMENUMBER]' or something like it.
132 Used when a wide string literal is created.
133
134 tree wchar_array_type_node;
135
136 Type `char8_t[SOMENUMBER]' or something like it.
137 Used when a UTF-8 string literal is created.
138
139 tree char8_array_type_node;
140
141 Type `char16_t[SOMENUMBER]' or something like it.
142 Used when a UTF-16 string literal is created.
143
144 tree char16_array_type_node;
145
146 Type `char32_t[SOMENUMBER]' or something like it.
147 Used when a UTF-32 string literal is created.
148
149 tree char32_array_type_node;
150
151 Type `int ()' -- used for implicit declaration of functions.
152
153 tree default_function_type;
154
155 A VOID_TYPE node, packaged in a TREE_LIST.
156
157 tree void_list_node;
158
159 The lazily created VAR_DECLs for __FUNCTION__, __PRETTY_FUNCTION__,
160 and __func__. (C doesn't generate __FUNCTION__ and__PRETTY_FUNCTION__
161 VAR_DECLS, but C++ does.)
162
163 tree function_name_decl_node;
164 tree pretty_function_name_decl_node;
165 tree c99_function_name_decl_node;
166
167 Stack of nested function name VAR_DECLs.
168
169 tree saved_function_name_decls;
170
171*/
172
173tree c_global_trees[CTI_MAX];
174
175/* Switches common to the C front ends. */
176
177/* Nonzero means don't output line number information. */
178
179char flag_no_line_commands;
180
181/* Nonzero causes -E output not to be done, but directives such as
182 #define that have side effects are still obeyed. */
183
184char flag_no_output;
185
186/* Nonzero means dump macros in some fashion. */
187
188char flag_dump_macros;
189
190/* Nonzero means pass #include lines through to the output. */
191
192char flag_dump_includes;
193
194/* Nonzero means process PCH files while preprocessing. */
195
196bool flag_pch_preprocess;
197
198/* The file name to which we should write a precompiled header, or
199 NULL if no header will be written in this compile. */
200
201const char *pch_file;
202
203/* Nonzero if an ISO standard was selected. It rejects macros in the
204 user's namespace. */
205int flag_iso;
206
207/* C/ObjC language option variables. */
208
209
210/* Nonzero means allow type mismatches in conditional expressions;
211 just make their values `void'. */
212
213int flag_cond_mismatch;
214
215/* Nonzero means enable C89 Amendment 1 features. */
216
217int flag_isoc94;
218
219/* Nonzero means use the ISO C99 (or C11) dialect of C. */
220
221int flag_isoc99;
222
223/* Nonzero means use the ISO C11 dialect of C. */
224
225int flag_isoc11;
226
227/* Nonzero means use the ISO C23 dialect of C. */
228
229int flag_isoc23;
230
231/* Nonzero means that we have builtin functions, and main is an int. */
232
233int flag_hosted = 1;
234
235
236/* ObjC language option variables. */
237
238
239/* Tells the compiler that this is a special run. Do not perform any
240 compiling, instead we are to test some platform dependent features
241 and output a C header file with appropriate definitions. */
242
243int print_struct_values;
244
245/* Tells the compiler what is the constant string class for ObjC. */
246
247const char *constant_string_class_name;
248
249
250/* C++ language option variables. */
251
252/* The reference version of the ABI for -Wabi. */
253
254int warn_abi_version = -1;
255
256/* The C++ dialect being used. Default set in c_common_post_options. */
257
258enum cxx_dialect cxx_dialect = cxx_unset;
259
260/* Maximum template instantiation depth. This limit exists to limit the
261 time it takes to notice excessively recursive template instantiations.
262
263 The default is lower than the 1024 recommended by the C++0x standard
264 because G++ runs out of stack before 1024 with highly recursive template
265 argument deduction substitution (g++.dg/cpp0x/enum11.C). */
266
267int max_tinst_depth = 900;
268
269/* The elements of `ridpointers' are identifier nodes for the reserved
270 type names and storage classes. It is indexed by a RID_... value. */
271tree *ridpointers;
272
273tree (*make_fname_decl) (location_t, tree, int);
274
275/* Nonzero means don't warn about problems that occur when the code is
276 executed. */
277int c_inhibit_evaluation_warnings;
278
279/* Whether we are building a boolean conversion inside
280 convert_for_assignment, or some other late binary operation. If
281 build_binary_op is called for C (from code shared by C and C++) in
282 this case, then the operands have already been folded and the
283 result will not be folded again, so C_MAYBE_CONST_EXPR should not
284 be generated. */
285bool in_late_binary_op;
286
287/* Depending on which phase of processing we are in, we may need
288 to prefer input_location to libcpp's locations. (Specifically,
289 after the C++ lexer is done lexing tokens, but prior to calling
290 cpp_finish (), we need to do so. */
291bool override_libcpp_locations;
292
293/* Information about how a function name is generated. */
294struct fname_var_t
295{
296 tree *const decl; /* pointer to the VAR_DECL. */
297 const unsigned rid; /* RID number for the identifier. */
298 const int pretty; /* How pretty is it? */
299};
300
301/* The three ways of getting then name of the current function. */
302
303const struct fname_var_t fname_vars[] =
304{
305 /* C99 compliant __func__, must be first. */
306 {.decl: &c99_function_name_decl_node, .rid: RID_C99_FUNCTION_NAME, .pretty: 0},
307 /* GCC __FUNCTION__ compliant. */
308 {.decl: &function_name_decl_node, .rid: RID_FUNCTION_NAME, .pretty: 0},
309 /* GCC __PRETTY_FUNCTION__ compliant. */
310 {.decl: &pretty_function_name_decl_node, .rid: RID_PRETTY_FUNCTION_NAME, .pretty: 1},
311 {NULL, .rid: 0, .pretty: 0},
312};
313
314/* Flags to restrict availability of generic features that
315 are known to __has_{feature,extension}. */
316
317enum
318{
319 HF_FLAG_NONE = 0,
320 HF_FLAG_EXT = 1, /* Available only as an extension. */
321 HF_FLAG_SANITIZE = 2, /* Availability depends on sanitizer flags. */
322};
323
324/* Info for generic features which can be queried through
325 __has_{feature,extension}. */
326
327struct hf_feature_info
328{
329 const char *ident;
330 unsigned flags;
331 unsigned mask;
332};
333
334/* Table of generic features which can be queried through
335 __has_{feature,extension}. */
336
337static constexpr hf_feature_info has_feature_table[] =
338{
339 { .ident: "address_sanitizer", .flags: HF_FLAG_SANITIZE, .mask: SANITIZE_ADDRESS },
340 { .ident: "thread_sanitizer", .flags: HF_FLAG_SANITIZE, .mask: SANITIZE_THREAD },
341 { .ident: "leak_sanitizer", .flags: HF_FLAG_SANITIZE, .mask: SANITIZE_LEAK },
342 { .ident: "hwaddress_sanitizer", .flags: HF_FLAG_SANITIZE, .mask: SANITIZE_HWADDRESS },
343 { .ident: "undefined_behavior_sanitizer", .flags: HF_FLAG_SANITIZE, .mask: SANITIZE_UNDEFINED },
344 { .ident: "attribute_deprecated_with_message", .flags: HF_FLAG_NONE, .mask: 0 },
345 { .ident: "attribute_unavailable_with_message", .flags: HF_FLAG_NONE, .mask: 0 },
346 { .ident: "enumerator_attributes", .flags: HF_FLAG_NONE, .mask: 0 },
347 { .ident: "tls", .flags: HF_FLAG_NONE, .mask: 0 },
348 { .ident: "gnu_asm_goto_with_outputs", .flags: HF_FLAG_EXT, .mask: 0 },
349 { .ident: "gnu_asm_goto_with_outputs_full", .flags: HF_FLAG_EXT, .mask: 0 }
350};
351
352/* Global visibility options. */
353struct visibility_flags visibility_options;
354
355static tree check_case_value (location_t, tree);
356
357
358static void check_nonnull_arg (void *, tree, unsigned HOST_WIDE_INT);
359static bool nonnull_check_p (tree, unsigned HOST_WIDE_INT);
360
361/* Reserved words. The third field is a mask: keywords are disabled
362 if they match the mask.
363
364 Masks for languages:
365 C --std=c89: D_C99 | D_C23 | D_CXXONLY | D_OBJC | D_CXX_OBJC
366 C --std=c99: D_C23 | D_CXXONLY | D_OBJC
367 C --std=c17: D_C23 | D_CXXONLY | D_OBJC
368 C --std=c23: D_CXXONLY | D_OBJC
369 ObjC is like C except that D_OBJC and D_CXX_OBJC are not set
370 C++ --std=c++98: D_CONLY | D_CXX11 | D_CXX20 | D_OBJC
371 C++ --std=c++11: D_CONLY | D_CXX20 | D_OBJC
372 C++ --std=c++20: D_CONLY | D_OBJC
373 ObjC++ is like C++ except that D_OBJC is not set
374
375 If -fno-asm is used, D_ASM is added to the mask. If
376 -fno-gnu-keywords is used, D_EXT is added. If -fno-asm and C in
377 C89 mode, D_EXT89 is added for both -fno-asm and -fno-gnu-keywords.
378 In C with -Wc++-compat, we warn if D_CXXWARN is set.
379
380 Note the complication of the D_CXX_OBJC keywords. These are
381 reserved words such as 'class'. In C++, 'class' is a reserved
382 word. In Objective-C++ it is too. In Objective-C, it is a
383 reserved word too, but only if it follows an '@' sign.
384*/
385const struct c_common_resword c_common_reswords[] =
386{
387 { .word: "_Alignas", .rid: RID_ALIGNAS, D_CONLY },
388 { .word: "_Alignof", .rid: RID_ALIGNOF, D_CONLY },
389 { .word: "_Atomic", .rid: RID_ATOMIC, D_CONLY },
390 { .word: "_BitInt", .rid: RID_BITINT, D_CONLY },
391 { .word: "_Bool", .rid: RID_BOOL, D_CONLY },
392 { .word: "_Complex", .rid: RID_COMPLEX, .disable: 0 },
393 { .word: "_Imaginary", .rid: RID_IMAGINARY, D_CONLY },
394 { .word: "_Float16", .rid: RID_FLOAT16, .disable: 0 },
395 { .word: "_Float32", .rid: RID_FLOAT32, .disable: 0 },
396 { .word: "_Float64", .rid: RID_FLOAT64, .disable: 0 },
397 { .word: "_Float128", .rid: RID_FLOAT128, .disable: 0 },
398 { .word: "_Float32x", .rid: RID_FLOAT32X, .disable: 0 },
399 { .word: "_Float64x", .rid: RID_FLOAT64X, .disable: 0 },
400 { .word: "_Float128x", .rid: RID_FLOAT128X, .disable: 0 },
401 { .word: "_Decimal32", .rid: RID_DFLOAT32, D_CONLY },
402 { .word: "_Decimal64", .rid: RID_DFLOAT64, D_CONLY },
403 { .word: "_Decimal128", .rid: RID_DFLOAT128, D_CONLY },
404 { .word: "_Fract", .rid: RID_FRACT, D_CONLY | D_EXT },
405 { .word: "_Accum", .rid: RID_ACCUM, D_CONLY | D_EXT },
406 { .word: "_Sat", .rid: RID_SAT, D_CONLY | D_EXT },
407 { .word: "_Static_assert", .rid: RID_STATIC_ASSERT, D_CONLY },
408 { .word: "_Noreturn", .rid: RID_NORETURN, D_CONLY },
409 { .word: "_Generic", .rid: RID_GENERIC, D_CONLY },
410 { .word: "_Thread_local", .rid: RID_THREAD, D_CONLY },
411 { .word: "__FUNCTION__", .rid: RID_FUNCTION_NAME, .disable: 0 },
412 { .word: "__PRETTY_FUNCTION__", .rid: RID_PRETTY_FUNCTION_NAME, .disable: 0 },
413 { .word: "__alignof", .rid: RID_ALIGNOF, .disable: 0 },
414 { .word: "__alignof__", .rid: RID_ALIGNOF, .disable: 0 },
415 { .word: "__asm", .rid: RID_ASM, .disable: 0 },
416 { .word: "__asm__", .rid: RID_ASM, .disable: 0 },
417 { .word: "__attribute", .rid: RID_ATTRIBUTE, .disable: 0 },
418 { .word: "__attribute__", .rid: RID_ATTRIBUTE, .disable: 0 },
419 { .word: "__auto_type", .rid: RID_AUTO_TYPE, D_CONLY },
420 { .word: "__builtin_addressof", .rid: RID_ADDRESSOF, D_CXXONLY },
421 { .word: "__builtin_assoc_barrier", .rid: RID_BUILTIN_ASSOC_BARRIER, .disable: 0 },
422 { .word: "__builtin_bit_cast", .rid: RID_BUILTIN_BIT_CAST, D_CXXONLY },
423 { .word: "__builtin_call_with_static_chain",
424 .rid: RID_BUILTIN_CALL_WITH_STATIC_CHAIN, D_CONLY },
425 { .word: "__builtin_choose_expr", .rid: RID_CHOOSE_EXPR, D_CONLY },
426 { .word: "__builtin_complex", .rid: RID_BUILTIN_COMPLEX, D_CONLY },
427 { .word: "__builtin_convertvector", .rid: RID_BUILTIN_CONVERTVECTOR, .disable: 0 },
428 { .word: "__builtin_has_attribute", .rid: RID_BUILTIN_HAS_ATTRIBUTE, .disable: 0 },
429 { .word: "__builtin_launder", .rid: RID_BUILTIN_LAUNDER, D_CXXONLY },
430 { .word: "__builtin_shuffle", .rid: RID_BUILTIN_SHUFFLE, .disable: 0 },
431 { .word: "__builtin_shufflevector", .rid: RID_BUILTIN_SHUFFLEVECTOR, .disable: 0 },
432 { .word: "__builtin_stdc_bit_ceil", .rid: RID_BUILTIN_STDC, D_CONLY },
433 { .word: "__builtin_stdc_bit_floor", .rid: RID_BUILTIN_STDC, D_CONLY },
434 { .word: "__builtin_stdc_bit_width", .rid: RID_BUILTIN_STDC, D_CONLY },
435 { .word: "__builtin_stdc_count_ones", .rid: RID_BUILTIN_STDC, D_CONLY },
436 { .word: "__builtin_stdc_count_zeros", .rid: RID_BUILTIN_STDC, D_CONLY },
437 { .word: "__builtin_stdc_first_leading_one", .rid: RID_BUILTIN_STDC, D_CONLY },
438 { .word: "__builtin_stdc_first_leading_zero", .rid: RID_BUILTIN_STDC, D_CONLY },
439 { .word: "__builtin_stdc_first_trailing_one", .rid: RID_BUILTIN_STDC, D_CONLY },
440 { .word: "__builtin_stdc_first_trailing_zero", .rid: RID_BUILTIN_STDC, D_CONLY },
441 { .word: "__builtin_stdc_has_single_bit", .rid: RID_BUILTIN_STDC, D_CONLY },
442 { .word: "__builtin_stdc_leading_ones", .rid: RID_BUILTIN_STDC, D_CONLY },
443 { .word: "__builtin_stdc_leading_zeros", .rid: RID_BUILTIN_STDC, D_CONLY },
444 { .word: "__builtin_stdc_trailing_ones", .rid: RID_BUILTIN_STDC, D_CONLY },
445 { .word: "__builtin_stdc_trailing_zeros", .rid: RID_BUILTIN_STDC, D_CONLY },
446 { .word: "__builtin_tgmath", .rid: RID_BUILTIN_TGMATH, D_CONLY },
447 { .word: "__builtin_offsetof", .rid: RID_OFFSETOF, .disable: 0 },
448 { .word: "__builtin_types_compatible_p", .rid: RID_TYPES_COMPATIBLE_P, D_CONLY },
449 { .word: "__builtin_va_arg", .rid: RID_VA_ARG, .disable: 0 },
450 { .word: "__complex", .rid: RID_COMPLEX, .disable: 0 },
451 { .word: "__complex__", .rid: RID_COMPLEX, .disable: 0 },
452 { .word: "__const", .rid: RID_CONST, .disable: 0 },
453 { .word: "__const__", .rid: RID_CONST, .disable: 0 },
454 { .word: "__constinit", .rid: RID_CONSTINIT, D_CXXONLY },
455 { .word: "__decltype", .rid: RID_DECLTYPE, D_CXXONLY },
456 { .word: "__extension__", .rid: RID_EXTENSION, .disable: 0 },
457 { .word: "__func__", .rid: RID_C99_FUNCTION_NAME, .disable: 0 },
458 { .word: "__imag", .rid: RID_IMAGPART, .disable: 0 },
459 { .word: "__imag__", .rid: RID_IMAGPART, .disable: 0 },
460 { .word: "__inline", .rid: RID_INLINE, .disable: 0 },
461 { .word: "__inline__", .rid: RID_INLINE, .disable: 0 },
462 { .word: "__label__", .rid: RID_LABEL, .disable: 0 },
463 { .word: "__null", .rid: RID_NULL, .disable: 0 },
464 { .word: "__real", .rid: RID_REALPART, .disable: 0 },
465 { .word: "__real__", .rid: RID_REALPART, .disable: 0 },
466 { .word: "__restrict", .rid: RID_RESTRICT, .disable: 0 },
467 { .word: "__restrict__", .rid: RID_RESTRICT, .disable: 0 },
468 { .word: "__signed", .rid: RID_SIGNED, .disable: 0 },
469 { .word: "__signed__", .rid: RID_SIGNED, .disable: 0 },
470 { .word: "__thread", .rid: RID_THREAD, .disable: 0 },
471 { .word: "__transaction_atomic", .rid: RID_TRANSACTION_ATOMIC, .disable: 0 },
472 { .word: "__transaction_relaxed", .rid: RID_TRANSACTION_RELAXED, .disable: 0 },
473 { .word: "__transaction_cancel", .rid: RID_TRANSACTION_CANCEL, .disable: 0 },
474 { .word: "__typeof", .rid: RID_TYPEOF, .disable: 0 },
475 { .word: "__typeof__", .rid: RID_TYPEOF, .disable: 0 },
476 { .word: "__typeof_unqual", .rid: RID_TYPEOF_UNQUAL, D_CONLY },
477 { .word: "__typeof_unqual__", .rid: RID_TYPEOF_UNQUAL, D_CONLY },
478 { .word: "__volatile", .rid: RID_VOLATILE, .disable: 0 },
479 { .word: "__volatile__", .rid: RID_VOLATILE, .disable: 0 },
480 { .word: "__GIMPLE", .rid: RID_GIMPLE, D_CONLY },
481 { .word: "__PHI", .rid: RID_PHI, D_CONLY },
482 { .word: "__RTL", .rid: RID_RTL, D_CONLY },
483 { .word: "alignas", .rid: RID_ALIGNAS, D_C23 | D_CXX11 | D_CXXWARN },
484 { .word: "alignof", .rid: RID_ALIGNOF, D_C23 | D_CXX11 | D_CXXWARN },
485 { .word: "asm", .rid: RID_ASM, D_ASM },
486 { .word: "auto", .rid: RID_AUTO, .disable: 0 },
487 { .word: "bool", .rid: RID_BOOL, D_C23 | D_CXXWARN },
488 { .word: "break", .rid: RID_BREAK, .disable: 0 },
489 { .word: "case", .rid: RID_CASE, .disable: 0 },
490 { .word: "catch", .rid: RID_CATCH, D_CXX_OBJC | D_CXXWARN },
491 { .word: "char", .rid: RID_CHAR, .disable: 0 },
492 { .word: "char8_t", .rid: RID_CHAR8, D_CXX_CHAR8_T_FLAGS | D_CXXWARN },
493 { .word: "char16_t", .rid: RID_CHAR16, D_CXXONLY | D_CXX11 | D_CXXWARN },
494 { .word: "char32_t", .rid: RID_CHAR32, D_CXXONLY | D_CXX11 | D_CXXWARN },
495 { .word: "class", .rid: RID_CLASS, D_CXX_OBJC | D_CXXWARN },
496 { .word: "const", .rid: RID_CONST, .disable: 0 },
497 { .word: "consteval", .rid: RID_CONSTEVAL, D_CXXONLY | D_CXX20 | D_CXXWARN },
498 { .word: "constexpr", .rid: RID_CONSTEXPR, D_C23 | D_CXX11 | D_CXXWARN },
499 { .word: "constinit", .rid: RID_CONSTINIT, D_CXXONLY | D_CXX20 | D_CXXWARN },
500 { .word: "const_cast", .rid: RID_CONSTCAST, D_CXXONLY | D_CXXWARN },
501 { .word: "continue", .rid: RID_CONTINUE, .disable: 0 },
502 { .word: "decltype", .rid: RID_DECLTYPE, D_CXXONLY | D_CXX11 | D_CXXWARN },
503 { .word: "default", .rid: RID_DEFAULT, .disable: 0 },
504 { .word: "delete", .rid: RID_DELETE, D_CXXONLY | D_CXXWARN },
505 { .word: "do", .rid: RID_DO, .disable: 0 },
506 { .word: "double", .rid: RID_DOUBLE, .disable: 0 },
507 { .word: "dynamic_cast", .rid: RID_DYNCAST, D_CXXONLY | D_CXXWARN },
508 { .word: "else", .rid: RID_ELSE, .disable: 0 },
509 { .word: "enum", .rid: RID_ENUM, .disable: 0 },
510 { .word: "explicit", .rid: RID_EXPLICIT, D_CXXONLY | D_CXXWARN },
511 { .word: "export", .rid: RID_EXPORT, D_CXXONLY | D_CXXWARN },
512 { .word: "extern", .rid: RID_EXTERN, .disable: 0 },
513 { .word: "false", .rid: RID_FALSE, D_C23 | D_CXXWARN },
514 { .word: "float", .rid: RID_FLOAT, .disable: 0 },
515 { .word: "for", .rid: RID_FOR, .disable: 0 },
516 { .word: "friend", .rid: RID_FRIEND, D_CXXONLY | D_CXXWARN },
517 { .word: "goto", .rid: RID_GOTO, .disable: 0 },
518 { .word: "if", .rid: RID_IF, .disable: 0 },
519 { .word: "inline", .rid: RID_INLINE, D_EXT89 },
520 { .word: "int", .rid: RID_INT, .disable: 0 },
521 { .word: "long", .rid: RID_LONG, .disable: 0 },
522 { .word: "mutable", .rid: RID_MUTABLE, D_CXXONLY | D_CXXWARN },
523 { .word: "namespace", .rid: RID_NAMESPACE, D_CXXONLY | D_CXXWARN },
524 { .word: "new", .rid: RID_NEW, D_CXXONLY | D_CXXWARN },
525 { .word: "noexcept", .rid: RID_NOEXCEPT, D_CXXONLY | D_CXX11 | D_CXXWARN },
526 { .word: "nullptr", .rid: RID_NULLPTR, D_C23 | D_CXX11 | D_CXXWARN },
527 { .word: "operator", .rid: RID_OPERATOR, D_CXXONLY | D_CXXWARN },
528 { .word: "private", .rid: RID_PRIVATE, D_CXX_OBJC | D_CXXWARN },
529 { .word: "protected", .rid: RID_PROTECTED, D_CXX_OBJC | D_CXXWARN },
530 { .word: "public", .rid: RID_PUBLIC, D_CXX_OBJC | D_CXXWARN },
531 { .word: "register", .rid: RID_REGISTER, .disable: 0 },
532 { .word: "reinterpret_cast", .rid: RID_REINTCAST, D_CXXONLY | D_CXXWARN },
533 { .word: "restrict", .rid: RID_RESTRICT, D_CONLY | D_C99 },
534 { .word: "return", .rid: RID_RETURN, .disable: 0 },
535 { .word: "short", .rid: RID_SHORT, .disable: 0 },
536 { .word: "signed", .rid: RID_SIGNED, .disable: 0 },
537 { .word: "sizeof", .rid: RID_SIZEOF, .disable: 0 },
538 { .word: "static", .rid: RID_STATIC, .disable: 0 },
539 { .word: "static_assert", .rid: RID_STATIC_ASSERT, D_C23 | D_CXX11 | D_CXXWARN },
540 { .word: "static_cast", .rid: RID_STATCAST, D_CXXONLY | D_CXXWARN },
541 { .word: "struct", .rid: RID_STRUCT, .disable: 0 },
542 { .word: "switch", .rid: RID_SWITCH, .disable: 0 },
543 { .word: "template", .rid: RID_TEMPLATE, D_CXXONLY | D_CXXWARN },
544 { .word: "this", .rid: RID_THIS, D_CXXONLY | D_CXXWARN },
545 { .word: "thread_local", .rid: RID_THREAD, D_C23 | D_CXX11 | D_CXXWARN },
546 { .word: "throw", .rid: RID_THROW, D_CXX_OBJC | D_CXXWARN },
547 { .word: "true", .rid: RID_TRUE, D_C23 | D_CXXWARN },
548 { .word: "try", .rid: RID_TRY, D_CXX_OBJC | D_CXXWARN },
549 { .word: "typedef", .rid: RID_TYPEDEF, .disable: 0 },
550 { .word: "typename", .rid: RID_TYPENAME, D_CXXONLY | D_CXXWARN },
551 { .word: "typeid", .rid: RID_TYPEID, D_CXXONLY | D_CXXWARN },
552 { .word: "typeof", .rid: RID_TYPEOF, D_EXT11 },
553 { .word: "typeof_unqual", .rid: RID_TYPEOF_UNQUAL, D_CONLY | D_C23 },
554 { .word: "union", .rid: RID_UNION, .disable: 0 },
555 { .word: "unsigned", .rid: RID_UNSIGNED, .disable: 0 },
556 { .word: "using", .rid: RID_USING, D_CXXONLY | D_CXXWARN },
557 { .word: "virtual", .rid: RID_VIRTUAL, D_CXXONLY | D_CXXWARN },
558 { .word: "void", .rid: RID_VOID, .disable: 0 },
559 { .word: "volatile", .rid: RID_VOLATILE, .disable: 0 },
560 { .word: "wchar_t", .rid: RID_WCHAR, D_CXXONLY },
561 { .word: "while", .rid: RID_WHILE, .disable: 0 },
562
563 /* C++ transactional memory. */
564 { .word: "synchronized", .rid: RID_SYNCHRONIZED, D_CXX_OBJC | D_TRANSMEM },
565 { .word: "atomic_noexcept", .rid: RID_ATOMIC_NOEXCEPT, D_CXXONLY | D_TRANSMEM },
566 { .word: "atomic_cancel", .rid: RID_ATOMIC_CANCEL, D_CXXONLY | D_TRANSMEM },
567 { .word: "atomic_commit", .rid: RID_TRANSACTION_ATOMIC, D_CXXONLY | D_TRANSMEM },
568
569 /* Concepts-related keywords */
570 { .word: "concept", .rid: RID_CONCEPT, D_CXX_CONCEPTS_FLAGS | D_CXXWARN },
571 { .word: "requires", .rid: RID_REQUIRES, D_CXX_CONCEPTS_FLAGS | D_CXXWARN },
572
573 /* Modules-related keywords, these are internal unspellable tokens,
574 created by the preprocessor. */
575 { .word: "module ", .rid: RID__MODULE, D_CXX_MODULES_FLAGS | D_CXXWARN },
576 { .word: "import ", .rid: RID__IMPORT, D_CXX_MODULES_FLAGS | D_CXXWARN },
577 { .word: "export ", .rid: RID__EXPORT, D_CXX_MODULES_FLAGS | D_CXXWARN },
578
579 /* Coroutines-related keywords */
580 { .word: "co_await", .rid: RID_CO_AWAIT, D_CXX_COROUTINES_FLAGS | D_CXXWARN },
581 { .word: "co_yield", .rid: RID_CO_YIELD, D_CXX_COROUTINES_FLAGS | D_CXXWARN },
582 { .word: "co_return", .rid: RID_CO_RETURN, D_CXX_COROUTINES_FLAGS | D_CXXWARN },
583
584 /* These Objective-C keywords are recognized only immediately after
585 an '@'. */
586 { .word: "compatibility_alias", .rid: RID_AT_ALIAS, D_OBJC },
587 { .word: "defs", .rid: RID_AT_DEFS, D_OBJC },
588 { .word: "encode", .rid: RID_AT_ENCODE, D_OBJC },
589 { .word: "end", .rid: RID_AT_END, D_OBJC },
590 { .word: "implementation", .rid: RID_AT_IMPLEMENTATION, D_OBJC },
591 { .word: "interface", .rid: RID_AT_INTERFACE, D_OBJC },
592 { .word: "protocol", .rid: RID_AT_PROTOCOL, D_OBJC },
593 { .word: "selector", .rid: RID_AT_SELECTOR, D_OBJC },
594 { .word: "finally", .rid: RID_AT_FINALLY, D_OBJC },
595 { .word: "optional", .rid: RID_AT_OPTIONAL, D_OBJC },
596 { .word: "required", .rid: RID_AT_REQUIRED, D_OBJC },
597 { .word: "property", .rid: RID_AT_PROPERTY, D_OBJC },
598 { .word: "package", .rid: RID_AT_PACKAGE, D_OBJC },
599 { .word: "synthesize", .rid: RID_AT_SYNTHESIZE, D_OBJC },
600 { .word: "dynamic", .rid: RID_AT_DYNAMIC, D_OBJC },
601 /* These are recognized only in protocol-qualifier context
602 (see above) */
603 { .word: "bycopy", .rid: RID_BYCOPY, D_OBJC },
604 { .word: "byref", .rid: RID_BYREF, D_OBJC },
605 { .word: "in", .rid: RID_IN, D_OBJC },
606 { .word: "inout", .rid: RID_INOUT, D_OBJC },
607 { .word: "oneway", .rid: RID_ONEWAY, D_OBJC },
608 { .word: "out", .rid: RID_OUT, D_OBJC },
609 /* These are recognized inside a property attribute list */
610 { .word: "assign", .rid: RID_ASSIGN, D_OBJC },
611 { .word: "atomic", .rid: RID_PROPATOMIC, D_OBJC },
612 { .word: "copy", .rid: RID_COPY, D_OBJC },
613 { .word: "getter", .rid: RID_GETTER, D_OBJC },
614 { .word: "nonatomic", .rid: RID_NONATOMIC, D_OBJC },
615 { .word: "readonly", .rid: RID_READONLY, D_OBJC },
616 { .word: "readwrite", .rid: RID_READWRITE, D_OBJC },
617 { .word: "retain", .rid: RID_RETAIN, D_OBJC },
618 { .word: "setter", .rid: RID_SETTER, D_OBJC },
619 /* These are Objective C implementation of nullability, accepted only in
620 specific contexts. */
621 { .word: "null_unspecified", .rid: RID_NULL_UNSPECIFIED, D_OBJC },
622 { .word: "nullable", .rid: RID_NULLABLE, D_OBJC },
623 { .word: "nonnull", .rid: RID_NONNULL, D_OBJC },
624 { .word: "null_resettable", .rid: RID_NULL_RESETTABLE, D_OBJC },
625};
626
627const unsigned int num_c_common_reswords = ARRAY_SIZE (c_common_reswords);
628
629/* Return identifier for address space AS. */
630
631const char *
632c_addr_space_name (addr_space_t as)
633{
634 int rid = RID_FIRST_ADDR_SPACE + as;
635 gcc_assert (ridpointers [rid]);
636 return IDENTIFIER_POINTER (ridpointers [rid]);
637}
638
639/* Push current bindings for the function name VAR_DECLS. */
640
641void
642start_fname_decls (void)
643{
644 unsigned ix;
645 tree saved = NULL_TREE;
646
647 for (ix = 0; fname_vars[ix].decl; ix++)
648 {
649 tree decl = *fname_vars[ix].decl;
650
651 if (decl)
652 {
653 saved = tree_cons (decl, build_int_cst (integer_type_node, ix),
654 saved);
655 *fname_vars[ix].decl = NULL_TREE;
656 }
657 }
658 if (saved || saved_function_name_decls)
659 /* Normally they'll have been NULL, so only push if we've got a
660 stack, or they are non-NULL. */
661 saved_function_name_decls = tree_cons (saved, NULL_TREE,
662 saved_function_name_decls);
663}
664
665/* Finish up the current bindings, adding them into the current function's
666 statement tree. This must be done _before_ finish_stmt_tree is called.
667 If there is no current function, we must be at file scope and no statements
668 are involved. Pop the previous bindings. */
669
670void
671finish_fname_decls (void)
672{
673 unsigned ix;
674 tree stmts = NULL_TREE;
675 tree stack = saved_function_name_decls;
676
677 for (; stack && TREE_VALUE (stack); stack = TREE_CHAIN (stack))
678 append_to_statement_list (TREE_VALUE (stack), &stmts);
679
680 if (stmts)
681 {
682 tree *bodyp = &DECL_SAVED_TREE (current_function_decl);
683
684 if (TREE_CODE (*bodyp) == BIND_EXPR)
685 bodyp = &BIND_EXPR_BODY (*bodyp);
686
687 append_to_statement_list_force (*bodyp, &stmts);
688 *bodyp = stmts;
689 }
690
691 for (ix = 0; fname_vars[ix].decl; ix++)
692 *fname_vars[ix].decl = NULL_TREE;
693
694 if (stack)
695 {
696 /* We had saved values, restore them. */
697 tree saved;
698
699 for (saved = TREE_PURPOSE (stack); saved; saved = TREE_CHAIN (saved))
700 {
701 tree decl = TREE_PURPOSE (saved);
702 unsigned ix = TREE_INT_CST_LOW (TREE_VALUE (saved));
703
704 *fname_vars[ix].decl = decl;
705 }
706 stack = TREE_CHAIN (stack);
707 }
708 saved_function_name_decls = stack;
709}
710
711/* Return the text name of the current function, suitably prettified
712 by PRETTY_P. Return string must be freed by caller. */
713
714const char *
715fname_as_string (int pretty_p)
716{
717 const char *name = "top level";
718 char *namep;
719 int vrb = 2, len;
720 cpp_string cstr = { .len: 0, .text: 0 }, strname;
721
722 if (!pretty_p)
723 {
724 name = "";
725 vrb = 0;
726 }
727
728 if (current_function_decl)
729 name = lang_hooks.decl_printable_name (current_function_decl, vrb);
730
731 len = strlen (s: name) + 3; /* Two for '"'s. One for NULL. */
732
733 namep = XNEWVEC (char, len);
734 snprintf (s: namep, maxlen: len, format: "\"%s\"", name);
735 strname.text = (unsigned char *) namep;
736 strname.len = len - 1;
737
738 if (cpp_interpret_string (parse_in, &strname, 1, &cstr, CPP_STRING))
739 {
740 XDELETEVEC (namep);
741 return (const char *) cstr.text;
742 }
743
744 return namep;
745}
746
747/* Return the VAR_DECL for a const char array naming the current
748 function. If the VAR_DECL has not yet been created, create it
749 now. RID indicates how it should be formatted and IDENTIFIER_NODE
750 ID is its name (unfortunately C and C++ hold the RID values of
751 keywords in different places, so we can't derive RID from ID in
752 this language independent code. LOC is the location of the
753 function. */
754
755tree
756fname_decl (location_t loc, unsigned int rid, tree id)
757{
758 unsigned ix;
759 tree decl = NULL_TREE;
760
761 for (ix = 0; fname_vars[ix].decl; ix++)
762 if (fname_vars[ix].rid == rid)
763 break;
764
765 decl = *fname_vars[ix].decl;
766 if (!decl)
767 {
768 /* If a tree is built here, it would normally have the lineno of
769 the current statement. Later this tree will be moved to the
770 beginning of the function and this line number will be wrong.
771 To avoid this problem set the lineno to 0 here; that prevents
772 it from appearing in the RTL. */
773 tree stmts;
774 location_t saved_location = input_location;
775 input_location = UNKNOWN_LOCATION;
776
777 stmts = push_stmt_list ();
778 decl = (*make_fname_decl) (loc, id, fname_vars[ix].pretty);
779 stmts = pop_stmt_list (stmts);
780 if (!IS_EMPTY_STMT (stmts))
781 saved_function_name_decls
782 = tree_cons (decl, stmts, saved_function_name_decls);
783 *fname_vars[ix].decl = decl;
784 input_location = saved_location;
785 }
786 if (!ix && !current_function_decl)
787 pedwarn (loc, 0, "%qD is not defined outside of function scope", decl);
788
789 return decl;
790}
791
792/* Given a STRING_CST, give it a suitable array-of-chars data type. */
793
794tree
795fix_string_type (tree value)
796{
797 int length = TREE_STRING_LENGTH (value);
798 int nchars, charsz;
799 tree e_type, i_type, a_type;
800
801 /* Compute the number of elements, for the array type. */
802 if (TREE_TYPE (value) == char_array_type_node || !TREE_TYPE (value))
803 {
804 charsz = 1;
805 e_type = char_type_node;
806 }
807 else if (flag_char8_t && TREE_TYPE (value) == char8_array_type_node)
808 {
809 charsz = TYPE_PRECISION (char8_type_node) / BITS_PER_UNIT;
810 e_type = char8_type_node;
811 }
812 else if (TREE_TYPE (value) == char16_array_type_node)
813 {
814 charsz = TYPE_PRECISION (char16_type_node) / BITS_PER_UNIT;
815 e_type = char16_type_node;
816 }
817 else if (TREE_TYPE (value) == char32_array_type_node)
818 {
819 charsz = TYPE_PRECISION (char32_type_node) / BITS_PER_UNIT;
820 e_type = char32_type_node;
821 }
822 else
823 {
824 charsz = TYPE_PRECISION (wchar_type_node) / BITS_PER_UNIT;
825 e_type = wchar_type_node;
826 }
827
828 /* This matters only for targets where ssizetype has smaller precision
829 than 32 bits. */
830 if (wi::lts_p (x: wi::to_wide (TYPE_MAX_VALUE (ssizetype)), y: length))
831 {
832 error ("size of string literal is too large");
833 length = tree_to_shwi (TYPE_MAX_VALUE (ssizetype)) / charsz * charsz;
834 char *str = CONST_CAST (char *, TREE_STRING_POINTER (value));
835 memset (s: str + length, c: '\0',
836 MIN (TREE_STRING_LENGTH (value) - length, charsz));
837 TREE_STRING_LENGTH (value) = length;
838 }
839 nchars = length / charsz;
840
841 /* C89 2.2.4.1, C99 5.2.4.1 (Translation limits). The analogous
842 limit in C++98 Annex B is very large (65536) and is not normative,
843 so we do not diagnose it (warn_overlength_strings is forced off
844 in c_common_post_options). */
845 if (warn_overlength_strings)
846 {
847 const int nchars_max = flag_isoc99 ? 4095 : 509;
848 const int relevant_std = flag_isoc99 ? 99 : 90;
849 if (nchars - 1 > nchars_max)
850 /* Translators: The %d after 'ISO C' will be 90 or 99. Do not
851 separate the %d from the 'C'. 'ISO' should not be
852 translated, but it may be moved after 'C%d' in languages
853 where modifiers follow nouns. */
854 pedwarn (input_location, OPT_Woverlength_strings,
855 "string length %qd is greater than the length %qd "
856 "ISO C%d compilers are required to support",
857 nchars - 1, nchars_max, relevant_std);
858 }
859
860 /* Create the array type for the string constant. The ISO C++
861 standard says that a string literal has type `const char[N]' or
862 `const wchar_t[N]'. We use the same logic when invoked as a C
863 front-end with -Wwrite-strings.
864 ??? We should change the type of an expression depending on the
865 state of a warning flag. We should just be warning -- see how
866 this is handled in the C++ front-end for the deprecated implicit
867 conversion from string literals to `char*' or `wchar_t*'.
868
869 The C++ front end relies on TYPE_MAIN_VARIANT of a cv-qualified
870 array type being the unqualified version of that type.
871 Therefore, if we are constructing an array of const char, we must
872 construct the matching unqualified array type first. The C front
873 end does not require this, but it does no harm, so we do it
874 unconditionally. */
875 i_type = build_index_type (size_int (nchars - 1));
876 a_type = build_array_type (e_type, i_type);
877 if (c_dialect_cxx() || warn_write_strings)
878 a_type = c_build_qualified_type (a_type, TYPE_QUAL_CONST);
879
880 TREE_TYPE (value) = a_type;
881 TREE_CONSTANT (value) = 1;
882 TREE_READONLY (value) = 1;
883 TREE_STATIC (value) = 1;
884 return value;
885}
886
887/* Given a string of type STRING_TYPE, determine what kind of string
888 token would give an equivalent execution encoding: CPP_STRING,
889 CPP_STRING16, or CPP_STRING32. Return CPP_OTHER in case of error.
890 This may not be exactly the string token type that initially created
891 the string, since CPP_WSTRING is indistinguishable from the 16/32 bit
892 string type, and CPP_UTF8STRING is indistinguishable from CPP_STRING
893 at this point.
894
895 This effectively reverses part of the logic in lex_string and
896 fix_string_type. */
897
898static enum cpp_ttype
899get_cpp_ttype_from_string_type (tree string_type)
900{
901 gcc_assert (string_type);
902 if (TREE_CODE (string_type) == POINTER_TYPE)
903 string_type = TREE_TYPE (string_type);
904
905 if (TREE_CODE (string_type) != ARRAY_TYPE)
906 return CPP_OTHER;
907
908 tree element_type = TREE_TYPE (string_type);
909 if (TREE_CODE (element_type) != INTEGER_TYPE)
910 return CPP_OTHER;
911
912 int bits_per_character = TYPE_PRECISION (element_type);
913 switch (bits_per_character)
914 {
915 case 8:
916 return CPP_STRING; /* It could have also been CPP_UTF8STRING. */
917 case 16:
918 return CPP_STRING16;
919 case 32:
920 return CPP_STRING32;
921 }
922
923 return CPP_OTHER;
924}
925
926/* The global record of string concatentations, for use in
927 extracting locations within string literals. */
928
929GTY(()) string_concat_db *g_string_concat_db;
930
931/* Implementation of LANG_HOOKS_GET_SUBSTRING_LOCATION. */
932
933const char *
934c_get_substring_location (const substring_loc &substr_loc,
935 location_t *out_loc)
936{
937 enum cpp_ttype tok_type
938 = get_cpp_ttype_from_string_type (string_type: substr_loc.get_string_type ());
939 if (tok_type == CPP_OTHER)
940 return "unrecognized string type";
941
942 return get_location_within_string (pfile: parse_in,
943 fc&: global_dc->get_file_cache (),
944 concats: g_string_concat_db,
945 strloc: substr_loc.get_fmt_string_loc (),
946 type: tok_type,
947 caret_idx: substr_loc.get_caret_idx (),
948 start_idx: substr_loc.get_start_idx (),
949 end_idx: substr_loc.get_end_idx (),
950 out_loc);
951}
952
953
954/* Return true iff T is a boolean promoted to int. */
955
956bool
957bool_promoted_to_int_p (tree t)
958{
959 return (CONVERT_EXPR_P (t)
960 && TREE_TYPE (t) == integer_type_node
961 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == BOOLEAN_TYPE);
962}
963
964/* vector_targets_convertible_p is used for vector pointer types. The
965 callers perform various checks that the qualifiers are satisfactory,
966 while OTOH vector_targets_convertible_p ignores the number of elements
967 in the vectors. That's fine with vector pointers as we can consider,
968 say, a vector of 8 elements as two consecutive vectors of 4 elements,
969 and that does not require and conversion of the pointer values.
970 In contrast, vector_types_convertible_p and
971 vector_types_compatible_elements_p are used for vector value types. */
972/* True if pointers to distinct types T1 and T2 can be converted to
973 each other without an explicit cast. Only returns true for opaque
974 vector types. */
975bool
976vector_targets_convertible_p (const_tree t1, const_tree t2)
977{
978 if (VECTOR_TYPE_P (t1) && VECTOR_TYPE_P (t2)
979 && (TYPE_VECTOR_OPAQUE (t1) || TYPE_VECTOR_OPAQUE (t2))
980 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
981 return true;
982
983 return false;
984}
985
986/* vector_types_convertible_p is used for vector value types.
987 It could in principle call vector_targets_convertible_p as a subroutine,
988 but then the check for vector type would be duplicated with its callers,
989 and also the purpose of vector_targets_convertible_p would become
990 muddled.
991 Where vector_types_convertible_p returns true, a conversion might still be
992 needed to make the types match.
993 In contrast, vector_targets_convertible_p is used for vector pointer
994 values, and vector_types_compatible_elements_p is used specifically
995 in the context for binary operators, as a check if use is possible without
996 conversion. */
997/* True if vector types T1 and T2 can be converted to each other
998 without an explicit cast. If EMIT_LAX_NOTE is true, and T1 and T2
999 can only be converted with -flax-vector-conversions yet that is not
1000 in effect, emit a note telling the user about that option if such
1001 a note has not previously been emitted. */
1002bool
1003vector_types_convertible_p (const_tree t1, const_tree t2, bool emit_lax_note)
1004{
1005 static bool emitted_lax_note = false;
1006 bool convertible_lax;
1007
1008 if ((TYPE_VECTOR_OPAQUE (t1) || TYPE_VECTOR_OPAQUE (t2))
1009 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
1010 return true;
1011
1012 convertible_lax =
1013 (tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2))
1014 && (TREE_CODE (TREE_TYPE (t1)) != REAL_TYPE
1015 || known_eq (TYPE_VECTOR_SUBPARTS (t1),
1016 TYPE_VECTOR_SUBPARTS (t2)))
1017 && (INTEGRAL_TYPE_P (TREE_TYPE (t1))
1018 == INTEGRAL_TYPE_P (TREE_TYPE (t2))));
1019
1020 if (!convertible_lax || flag_lax_vector_conversions)
1021 return convertible_lax;
1022
1023 if (known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
1024 && lang_hooks.types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
1025 return true;
1026
1027 if (emit_lax_note && !emitted_lax_note)
1028 {
1029 emitted_lax_note = true;
1030 inform (input_location, "use %<-flax-vector-conversions%> to permit "
1031 "conversions between vectors with differing "
1032 "element types or numbers of subparts");
1033 }
1034
1035 return false;
1036}
1037
1038/* Build a VEC_PERM_EXPR if V0, V1 and MASK are not error_mark_nodes
1039 and have vector types, V0 has the same type as V1, and the number of
1040 elements of V0, V1, MASK is the same.
1041
1042 In case V1 is a NULL_TREE it is assumed that __builtin_shuffle was
1043 called with two arguments. In this case implementation passes the
1044 first argument twice in order to share the same tree code. This fact
1045 could enable the mask-values being twice the vector length. This is
1046 an implementation accident and this semantics is not guaranteed to
1047 the user. */
1048tree
1049c_build_vec_perm_expr (location_t loc, tree v0, tree v1, tree mask,
1050 bool complain)
1051{
1052 tree ret;
1053 bool wrap = true;
1054 bool maybe_const = false;
1055 bool two_arguments = false;
1056
1057 if (v1 == NULL_TREE)
1058 {
1059 two_arguments = true;
1060 v1 = v0;
1061 }
1062
1063 if (v0 == error_mark_node || v1 == error_mark_node
1064 || mask == error_mark_node)
1065 return error_mark_node;
1066
1067 if (!gnu_vector_type_p (TREE_TYPE (mask))
1068 || !VECTOR_INTEGER_TYPE_P (TREE_TYPE (mask)))
1069 {
1070 if (complain)
1071 error_at (loc, "%<__builtin_shuffle%> last argument must "
1072 "be an integer vector");
1073 return error_mark_node;
1074 }
1075
1076 if (!gnu_vector_type_p (TREE_TYPE (v0))
1077 || !gnu_vector_type_p (TREE_TYPE (v1)))
1078 {
1079 if (complain)
1080 error_at (loc, "%<__builtin_shuffle%> arguments must be vectors");
1081 return error_mark_node;
1082 }
1083
1084 if (TYPE_MAIN_VARIANT (TREE_TYPE (v0)) != TYPE_MAIN_VARIANT (TREE_TYPE (v1)))
1085 {
1086 if (complain)
1087 error_at (loc, "%<__builtin_shuffle%> argument vectors must be of "
1088 "the same type");
1089 return error_mark_node;
1090 }
1091
1092 if (maybe_ne (a: TYPE_VECTOR_SUBPARTS (TREE_TYPE (v0)),
1093 b: TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask)))
1094 && maybe_ne (a: TYPE_VECTOR_SUBPARTS (TREE_TYPE (v1)),
1095 b: TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask))))
1096 {
1097 if (complain)
1098 error_at (loc, "%<__builtin_shuffle%> number of elements of the "
1099 "argument vector(s) and the mask vector should "
1100 "be the same");
1101 return error_mark_node;
1102 }
1103
1104 if (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (v0))))
1105 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (mask)))))
1106 {
1107 if (complain)
1108 error_at (loc, "%<__builtin_shuffle%> argument vector(s) inner type "
1109 "must have the same size as inner type of the mask");
1110 return error_mark_node;
1111 }
1112
1113 if (!c_dialect_cxx ())
1114 {
1115 /* Avoid C_MAYBE_CONST_EXPRs inside VEC_PERM_EXPR. */
1116 v0 = c_fully_fold (v0, false, &maybe_const);
1117 wrap &= maybe_const;
1118
1119 if (two_arguments)
1120 v1 = v0 = save_expr (v0);
1121 else
1122 {
1123 v1 = c_fully_fold (v1, false, &maybe_const);
1124 wrap &= maybe_const;
1125 }
1126
1127 mask = c_fully_fold (mask, false, &maybe_const);
1128 wrap &= maybe_const;
1129 }
1130 else if (two_arguments)
1131 v1 = v0 = save_expr (v0);
1132
1133 ret = build3_loc (loc, code: VEC_PERM_EXPR, TREE_TYPE (v0), arg0: v0, arg1: v1, arg2: mask);
1134
1135 if (!c_dialect_cxx () && !wrap)
1136 ret = c_wrap_maybe_const (ret, true);
1137
1138 return ret;
1139}
1140
1141/* Build a VEC_PERM_EXPR if V0, V1 are not error_mark_nodes
1142 and have vector types, V0 has the same element type as V1, and the
1143 number of elements the result is that of MASK. */
1144tree
1145c_build_shufflevector (location_t loc, tree v0, tree v1,
1146 const vec<tree> &mask, bool complain)
1147{
1148 tree ret;
1149 bool wrap = true;
1150 bool maybe_const = false;
1151
1152 if (v0 == error_mark_node || v1 == error_mark_node)
1153 return error_mark_node;
1154
1155 if (!gnu_vector_type_p (TREE_TYPE (v0))
1156 || !gnu_vector_type_p (TREE_TYPE (v1)))
1157 {
1158 if (complain)
1159 error_at (loc, "%<__builtin_shufflevector%> arguments must be vectors");
1160 return error_mark_node;
1161 }
1162
1163 /* ??? In principle one could select a constant part of a variable size
1164 vector but things get a bit awkward with trying to support this here. */
1165 unsigned HOST_WIDE_INT v0n, v1n;
1166 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (v0)).is_constant (const_value: &v0n)
1167 || !TYPE_VECTOR_SUBPARTS (TREE_TYPE (v1)).is_constant (const_value: &v1n))
1168 {
1169 if (complain)
1170 error_at (loc, "%<__builtin_shufflevector%> arguments must be constant"
1171 " size vectors");
1172 return error_mark_node;
1173 }
1174
1175 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (v0)))
1176 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (v1))))
1177 {
1178 if (complain)
1179 error_at (loc, "%<__builtin_shufflevector%> argument vectors must "
1180 "have the same element type");
1181 return error_mark_node;
1182 }
1183
1184 if (!pow2p_hwi (x: mask.length ()))
1185 {
1186 if (complain)
1187 error_at (loc, "%<__builtin_shufflevector%> must specify a result "
1188 "with a power of two number of elements");
1189 return error_mark_node;
1190 }
1191
1192 if (!c_dialect_cxx ())
1193 {
1194 /* Avoid C_MAYBE_CONST_EXPRs inside VEC_PERM_EXPR. */
1195 v0 = c_fully_fold (v0, false, &maybe_const);
1196 wrap &= maybe_const;
1197
1198 v1 = c_fully_fold (v1, false, &maybe_const);
1199 wrap &= maybe_const;
1200 }
1201
1202 unsigned HOST_WIDE_INT maskl = MAX (mask.length (), MAX (v0n, v1n));
1203 unsigned HOST_WIDE_INT pad = (v0n < maskl ? maskl - v0n : 0);
1204 vec_perm_builder sel (maskl, maskl, 1);
1205 unsigned i;
1206 for (i = 0; i < mask.length (); ++i)
1207 {
1208 tree idx = mask[i];
1209 if (!tree_fits_shwi_p (idx))
1210 {
1211 if (complain)
1212 error_at (loc, "invalid element index %qE to "
1213 "%<__builtin_shufflevector%>", idx);
1214 return error_mark_node;
1215 }
1216 HOST_WIDE_INT iidx = tree_to_shwi (idx);
1217 if (iidx < -1
1218 || (iidx != -1
1219 && (unsigned HOST_WIDE_INT) iidx >= v0n + v1n))
1220 {
1221 if (complain)
1222 error_at (loc, "invalid element index %qE to "
1223 "%<__builtin_shufflevector%>", idx);
1224 return error_mark_node;
1225 }
1226 /* ??? Our VEC_PERM_EXPR does not allow for -1 yet. */
1227 if (iidx == -1)
1228 iidx = i;
1229 /* ??? Our VEC_PERM_EXPR does not allow different sized inputs,
1230 so pad out a smaller v0. */
1231 else if ((unsigned HOST_WIDE_INT) iidx >= v0n)
1232 iidx += pad;
1233 sel.quick_push (obj: iidx);
1234 }
1235 /* ??? VEC_PERM_EXPR does not support a result that is smaller than
1236 the inputs, so we have to pad id out. */
1237 for (; i < maskl; ++i)
1238 sel.quick_push (obj: i);
1239
1240 vec_perm_indices indices (sel, 2, maskl);
1241
1242 tree ret_type = build_vector_type (TREE_TYPE (TREE_TYPE (v0)), maskl);
1243 tree mask_type = build_vector_type (build_nonstandard_integer_type
1244 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (ret_type))), 1),
1245 maskl);
1246 /* Pad out arguments to the common vector size. */
1247 if (v0n < maskl)
1248 {
1249 constructor_elt elt = { NULL_TREE, .value: build_zero_cst (TREE_TYPE (v0)) };
1250 v0 = build_constructor_single (ret_type, NULL_TREE, v0);
1251 for (i = 1; i < maskl / v0n; ++i)
1252 vec_safe_push (CONSTRUCTOR_ELTS (v0), obj: elt);
1253 }
1254 if (v1n < maskl)
1255 {
1256 constructor_elt elt = { NULL_TREE, .value: build_zero_cst (TREE_TYPE (v1)) };
1257 v1 = build_constructor_single (ret_type, NULL_TREE, v1);
1258 for (i = 1; i < maskl / v1n; ++i)
1259 vec_safe_push (CONSTRUCTOR_ELTS (v1), obj: elt);
1260 }
1261 ret = build3_loc (loc, code: VEC_PERM_EXPR, type: ret_type, arg0: v0, arg1: v1,
1262 arg2: vec_perm_indices_to_tree (mask_type, indices));
1263 /* Get the lowpart we are interested in. */
1264 if (mask.length () < maskl)
1265 {
1266 tree lpartt = build_vector_type (TREE_TYPE (ret_type), mask.length ());
1267 ret = build3_loc (loc, code: BIT_FIELD_REF,
1268 type: lpartt, arg0: ret, TYPE_SIZE (lpartt), bitsize_zero_node);
1269 /* Wrap the lowpart operation in a TARGET_EXPR so it gets a separate
1270 temporary during gimplification. See PR101530 for cases where
1271 we'd otherwise end up with non-toplevel BIT_FIELD_REFs. */
1272 tree tem = create_tmp_var_raw (lpartt);
1273 DECL_CONTEXT (tem) = current_function_decl;
1274 ret = build4 (TARGET_EXPR, lpartt, tem, ret, NULL_TREE, NULL_TREE);
1275 TREE_SIDE_EFFECTS (ret) = 1;
1276 }
1277
1278 if (!c_dialect_cxx () && !wrap)
1279 ret = c_wrap_maybe_const (ret, true);
1280
1281 return ret;
1282}
1283
1284/* Build a VEC_CONVERT ifn for __builtin_convertvector builtin. */
1285
1286tree
1287c_build_vec_convert (location_t loc1, tree expr, location_t loc2, tree type,
1288 bool complain)
1289{
1290 if (error_operand_p (t: type))
1291 return error_mark_node;
1292 if (error_operand_p (t: expr))
1293 return error_mark_node;
1294
1295 if (!gnu_vector_type_p (TREE_TYPE (expr))
1296 || (!VECTOR_INTEGER_TYPE_P (TREE_TYPE (expr))
1297 && !VECTOR_FLOAT_TYPE_P (TREE_TYPE (expr))))
1298 {
1299 if (complain)
1300 error_at (loc1, "%<__builtin_convertvector%> first argument must "
1301 "be an integer or floating vector");
1302 return error_mark_node;
1303 }
1304
1305 if (!gnu_vector_type_p (type)
1306 || (!VECTOR_INTEGER_TYPE_P (type) && !VECTOR_FLOAT_TYPE_P (type)))
1307 {
1308 if (complain)
1309 error_at (loc2, "%<__builtin_convertvector%> second argument must "
1310 "be an integer or floating vector type");
1311 return error_mark_node;
1312 }
1313
1314 if (maybe_ne (a: TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)),
1315 b: TYPE_VECTOR_SUBPARTS (node: type)))
1316 {
1317 if (complain)
1318 error_at (loc1, "%<__builtin_convertvector%> number of elements "
1319 "of the first argument vector and the second argument "
1320 "vector type should be the same");
1321 return error_mark_node;
1322 }
1323
1324 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (expr)))
1325 == TYPE_MAIN_VARIANT (TREE_TYPE (type)))
1326 || (VECTOR_INTEGER_TYPE_P (TREE_TYPE (expr))
1327 && VECTOR_INTEGER_TYPE_P (type)
1328 && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (expr)))
1329 == TYPE_PRECISION (TREE_TYPE (type)))))
1330 return build1_loc (loc: loc1, code: VIEW_CONVERT_EXPR, type, arg1: expr);
1331
1332 bool wrap = true;
1333 bool maybe_const = false;
1334 tree ret;
1335 if (!c_dialect_cxx ())
1336 {
1337 /* Avoid C_MAYBE_CONST_EXPRs inside of VEC_CONVERT argument. */
1338 expr = c_fully_fold (expr, false, &maybe_const);
1339 wrap &= maybe_const;
1340 }
1341
1342 ret = build_call_expr_internal_loc (loc1, IFN_VEC_CONVERT, type, 1, expr);
1343
1344 if (!wrap)
1345 ret = c_wrap_maybe_const (ret, true);
1346
1347 return ret;
1348}
1349
1350/* Like tree.cc:get_narrower, but retain conversion from C++0x scoped enum
1351 to integral type. */
1352
1353tree
1354c_common_get_narrower (tree op, int *unsignedp_ptr)
1355{
1356 op = get_narrower (op, unsignedp_ptr);
1357
1358 if (TREE_CODE (TREE_TYPE (op)) == ENUMERAL_TYPE
1359 && ENUM_IS_SCOPED (TREE_TYPE (op)))
1360 {
1361 /* C++0x scoped enumerations don't implicitly convert to integral
1362 type; if we stripped an explicit conversion to a larger type we
1363 need to replace it so common_type will still work. */
1364 tree type = c_common_type_for_size (TYPE_PRECISION (TREE_TYPE (op)),
1365 TYPE_UNSIGNED (TREE_TYPE (op)));
1366 op = fold_convert (type, op);
1367 }
1368 return op;
1369}
1370
1371/* This is a helper function of build_binary_op.
1372
1373 For certain operations if both args were extended from the same
1374 smaller type, do the arithmetic in that type and then extend.
1375
1376 BITWISE indicates a bitwise operation.
1377 For them, this optimization is safe only if
1378 both args are zero-extended or both are sign-extended.
1379 Otherwise, we might change the result.
1380 Eg, (short)-1 | (unsigned short)-1 is (int)-1
1381 but calculated in (unsigned short) it would be (unsigned short)-1.
1382*/
1383tree
1384shorten_binary_op (tree result_type, tree op0, tree op1, bool bitwise)
1385{
1386 int unsigned0, unsigned1;
1387 tree arg0, arg1;
1388 int uns;
1389 tree type;
1390
1391 /* Do not shorten vector operations. */
1392 if (VECTOR_TYPE_P (result_type))
1393 return result_type;
1394
1395 /* Cast OP0 and OP1 to RESULT_TYPE. Doing so prevents
1396 excessive narrowing when we call get_narrower below. For
1397 example, suppose that OP0 is of unsigned int extended
1398 from signed char and that RESULT_TYPE is long long int.
1399 If we explicitly cast OP0 to RESULT_TYPE, OP0 would look
1400 like
1401
1402 (long long int) (unsigned int) signed_char
1403
1404 which get_narrower would narrow down to
1405
1406 (unsigned int) signed char
1407
1408 If we do not cast OP0 first, get_narrower would return
1409 signed_char, which is inconsistent with the case of the
1410 explicit cast. */
1411 op0 = convert (result_type, op0);
1412 op1 = convert (result_type, op1);
1413
1414 arg0 = c_common_get_narrower (op: op0, unsignedp_ptr: &unsigned0);
1415 arg1 = c_common_get_narrower (op: op1, unsignedp_ptr: &unsigned1);
1416
1417 /* UNS is 1 if the operation to be done is an unsigned one. */
1418 uns = TYPE_UNSIGNED (result_type);
1419
1420 /* Handle the case that OP0 (or OP1) does not *contain* a conversion
1421 but it *requires* conversion to FINAL_TYPE. */
1422
1423 if ((TYPE_PRECISION (TREE_TYPE (op0))
1424 == TYPE_PRECISION (TREE_TYPE (arg0)))
1425 && TREE_TYPE (op0) != result_type)
1426 unsigned0 = TYPE_UNSIGNED (TREE_TYPE (op0));
1427 if ((TYPE_PRECISION (TREE_TYPE (op1))
1428 == TYPE_PRECISION (TREE_TYPE (arg1)))
1429 && TREE_TYPE (op1) != result_type)
1430 unsigned1 = TYPE_UNSIGNED (TREE_TYPE (op1));
1431
1432 /* Now UNSIGNED0 is 1 if ARG0 zero-extends to FINAL_TYPE. */
1433
1434 /* For bitwise operations, signedness of nominal type
1435 does not matter. Consider only how operands were extended. */
1436 if (bitwise)
1437 uns = unsigned0;
1438
1439 /* Note that in all three cases below we refrain from optimizing
1440 an unsigned operation on sign-extended args.
1441 That would not be valid. */
1442
1443 /* Both args variable: if both extended in same way
1444 from same width, do it in that width.
1445 Do it unsigned if args were zero-extended. */
1446 if ((TYPE_PRECISION (TREE_TYPE (arg0))
1447 < TYPE_PRECISION (result_type))
1448 && (TYPE_PRECISION (TREE_TYPE (arg1))
1449 == TYPE_PRECISION (TREE_TYPE (arg0)))
1450 && unsigned0 == unsigned1
1451 && (unsigned0 || !uns))
1452 {
1453 tree ctype = common_type (TREE_TYPE (arg0), TREE_TYPE (arg1));
1454 if (ctype != error_mark_node)
1455 return c_common_signed_or_unsigned_type (unsigned0, ctype);
1456 }
1457
1458 else if (TREE_CODE (arg0) == INTEGER_CST
1459 && (unsigned1 || !uns)
1460 && (TYPE_PRECISION (TREE_TYPE (arg1))
1461 < TYPE_PRECISION (result_type))
1462 && (type
1463 = c_common_signed_or_unsigned_type (unsigned1,
1464 TREE_TYPE (arg1)))
1465 && !POINTER_TYPE_P (type)
1466 && int_fits_type_p (arg0, type))
1467 return type;
1468
1469 else if (TREE_CODE (arg1) == INTEGER_CST
1470 && (unsigned0 || !uns)
1471 && (TYPE_PRECISION (TREE_TYPE (arg0))
1472 < TYPE_PRECISION (result_type))
1473 && (type
1474 = c_common_signed_or_unsigned_type (unsigned0,
1475 TREE_TYPE (arg0)))
1476 && !POINTER_TYPE_P (type)
1477 && int_fits_type_p (arg1, type))
1478 return type;
1479
1480 return result_type;
1481}
1482
1483/* Returns true iff any integer value of type FROM_TYPE can be represented as
1484 real of type TO_TYPE. This is a helper function for unsafe_conversion_p. */
1485
1486static bool
1487int_safely_convertible_to_real_p (const_tree from_type, const_tree to_type)
1488{
1489 tree type_low_bound = TYPE_MIN_VALUE (from_type);
1490 tree type_high_bound = TYPE_MAX_VALUE (from_type);
1491 REAL_VALUE_TYPE real_low_bound =
1492 real_value_from_int_cst (0, type_low_bound);
1493 REAL_VALUE_TYPE real_high_bound =
1494 real_value_from_int_cst (0, type_high_bound);
1495
1496 return exact_real_truncate (TYPE_MODE (to_type), &real_low_bound)
1497 && exact_real_truncate (TYPE_MODE (to_type), &real_high_bound);
1498}
1499
1500/* Checks if expression EXPR of complex/real/integer type cannot be converted
1501 to the complex/real/integer type TYPE. Function returns non-zero when:
1502 * EXPR is a constant which cannot be exactly converted to TYPE.
1503 * EXPR is not a constant and size of EXPR's type > than size of TYPE,
1504 for EXPR type and TYPE being both integers or both real, or both
1505 complex.
1506 * EXPR is not a constant of complex type and TYPE is a real or
1507 an integer.
1508 * EXPR is not a constant of real type and TYPE is an integer.
1509 * EXPR is not a constant of integer type which cannot be
1510 exactly converted to real type.
1511
1512 Function allows conversions between types of different signedness if
1513 CHECK_SIGN is false and can return SAFE_CONVERSION (zero) in that
1514 case. Function can return UNSAFE_SIGN if CHECK_SIGN is true.
1515
1516 RESULT, when non-null is the result of the conversion. When constant
1517 it is included in the text of diagnostics.
1518
1519 Function allows conversions from complex constants to non-complex types,
1520 provided that imaginary part is zero and real part can be safely converted
1521 to TYPE. */
1522
1523enum conversion_safety
1524unsafe_conversion_p (tree type, tree expr, tree result, bool check_sign)
1525{
1526 enum conversion_safety give_warning = SAFE_CONVERSION; /* is 0 or false */
1527 tree expr_type = TREE_TYPE (expr);
1528
1529 expr = fold_for_warn (expr);
1530
1531 if (TREE_CODE (expr) == REAL_CST || TREE_CODE (expr) == INTEGER_CST)
1532 {
1533 /* If type is complex, we are interested in compatibility with
1534 underlying type. */
1535 if (TREE_CODE (type) == COMPLEX_TYPE)
1536 type = TREE_TYPE (type);
1537
1538 /* Warn for real constant that is not an exact integer converted
1539 to integer type. */
1540 if (SCALAR_FLOAT_TYPE_P (expr_type)
1541 && (TREE_CODE (type) == INTEGER_TYPE
1542 || TREE_CODE (type) == BITINT_TYPE))
1543 {
1544 if (!real_isinteger (TREE_REAL_CST_PTR (expr), TYPE_MODE (expr_type)))
1545 give_warning = UNSAFE_REAL;
1546 }
1547 /* Warn for an integer constant that does not fit into integer type. */
1548 else if ((TREE_CODE (expr_type) == INTEGER_TYPE
1549 || TREE_CODE (expr_type) == BITINT_TYPE)
1550 && (TREE_CODE (type) == INTEGER_TYPE
1551 || TREE_CODE (type) == BITINT_TYPE)
1552 && !int_fits_type_p (expr, type))
1553 {
1554 if (TYPE_UNSIGNED (type) && !TYPE_UNSIGNED (expr_type)
1555 && tree_int_cst_sgn (expr) < 0)
1556 {
1557 if (check_sign)
1558 give_warning = UNSAFE_SIGN;
1559 }
1560 else if (!TYPE_UNSIGNED (type) && TYPE_UNSIGNED (expr_type))
1561 {
1562 if (check_sign)
1563 give_warning = UNSAFE_SIGN;
1564 }
1565 else
1566 give_warning = UNSAFE_OTHER;
1567 }
1568 else if (SCALAR_FLOAT_TYPE_P (type))
1569 {
1570 /* Warn for an integer constant that does not fit into real type. */
1571 if (TREE_CODE (expr_type) == INTEGER_TYPE
1572 || TREE_CODE (expr_type) == BITINT_TYPE)
1573 {
1574 REAL_VALUE_TYPE a = real_value_from_int_cst (0, expr);
1575 if (!exact_real_truncate (TYPE_MODE (type), &a))
1576 give_warning = UNSAFE_REAL;
1577 }
1578 /* Warn for a real constant that does not fit into a smaller
1579 real type. */
1580 else if (SCALAR_FLOAT_TYPE_P (expr_type)
1581 && TYPE_PRECISION (type) < TYPE_PRECISION (expr_type))
1582 {
1583 REAL_VALUE_TYPE a = TREE_REAL_CST (expr);
1584 if (!exact_real_truncate (TYPE_MODE (type), &a))
1585 give_warning = UNSAFE_REAL;
1586 }
1587 }
1588 }
1589
1590 else if (TREE_CODE (expr) == COMPLEX_CST)
1591 {
1592 tree imag_part = TREE_IMAGPART (expr);
1593 /* Conversion from complex constant with zero imaginary part,
1594 perform check for conversion of real part. */
1595 if ((TREE_CODE (imag_part) == REAL_CST
1596 && real_zerop (imag_part))
1597 || (TREE_CODE (imag_part) == INTEGER_CST
1598 && integer_zerop (imag_part)))
1599 /* Note: in this branch we use recursive call to unsafe_conversion_p
1600 with different type of EXPR, but it is still safe, because when EXPR
1601 is a constant, it's type is not used in text of generated warnings
1602 (otherwise they could sound misleading). */
1603 return unsafe_conversion_p (type, TREE_REALPART (expr), result,
1604 check_sign);
1605 /* Conversion from complex constant with non-zero imaginary part. */
1606 else
1607 {
1608 /* Conversion to complex type.
1609 Perform checks for both real and imaginary parts. */
1610 if (TREE_CODE (type) == COMPLEX_TYPE)
1611 {
1612 enum conversion_safety re_safety =
1613 unsafe_conversion_p (type, TREE_REALPART (expr),
1614 result, check_sign);
1615 enum conversion_safety im_safety =
1616 unsafe_conversion_p (type, expr: imag_part, result, check_sign);
1617
1618 /* Merge the results into appropriate single warning. */
1619
1620 /* Note: this case includes SAFE_CONVERSION, i.e. success. */
1621 if (re_safety == im_safety)
1622 give_warning = re_safety;
1623 else if (!re_safety && im_safety)
1624 give_warning = im_safety;
1625 else if (re_safety && !im_safety)
1626 give_warning = re_safety;
1627 else
1628 give_warning = UNSAFE_OTHER;
1629 }
1630 /* Warn about conversion from complex to real or integer type. */
1631 else
1632 give_warning = UNSAFE_IMAGINARY;
1633 }
1634 }
1635
1636 /* Checks for remaining case: EXPR is not constant. */
1637 else
1638 {
1639 /* Warn for real types converted to integer types. */
1640 if (SCALAR_FLOAT_TYPE_P (expr_type)
1641 && (TREE_CODE (type) == INTEGER_TYPE
1642 || TREE_CODE (type) == BITINT_TYPE))
1643 give_warning = UNSAFE_REAL;
1644
1645 else if ((TREE_CODE (expr_type) == INTEGER_TYPE
1646 || TREE_CODE (expr_type) == BITINT_TYPE)
1647 && (TREE_CODE (type) == INTEGER_TYPE
1648 || TREE_CODE (type) == BITINT_TYPE))
1649 {
1650 /* Don't warn about unsigned char y = 0xff, x = (int) y; */
1651 expr = get_unwidened (expr, 0);
1652 expr_type = TREE_TYPE (expr);
1653
1654 /* Don't warn for short y; short x = ((int)y & 0xff); */
1655 if (TREE_CODE (expr) == BIT_AND_EXPR
1656 || TREE_CODE (expr) == BIT_IOR_EXPR
1657 || TREE_CODE (expr) == BIT_XOR_EXPR)
1658 {
1659 /* If both args were extended from a shortest type,
1660 use that type if that is safe. */
1661 expr_type = shorten_binary_op (result_type: expr_type,
1662 TREE_OPERAND (expr, 0),
1663 TREE_OPERAND (expr, 1),
1664 /* bitwise */1);
1665
1666 if (TREE_CODE (expr) == BIT_AND_EXPR)
1667 {
1668 tree op0 = TREE_OPERAND (expr, 0);
1669 tree op1 = TREE_OPERAND (expr, 1);
1670 bool unsigned0 = TYPE_UNSIGNED (TREE_TYPE (op0));
1671 bool unsigned1 = TYPE_UNSIGNED (TREE_TYPE (op1));
1672
1673 /* If one of the operands is a non-negative constant
1674 that fits in the target type, then the type of the
1675 other operand does not matter. */
1676 if ((TREE_CODE (op0) == INTEGER_CST
1677 && int_fits_type_p (op0, c_common_signed_type (type))
1678 && int_fits_type_p (op0, c_common_unsigned_type (type)))
1679 || (TREE_CODE (op1) == INTEGER_CST
1680 && int_fits_type_p (op1, c_common_signed_type (type))
1681 && int_fits_type_p (op1,
1682 c_common_unsigned_type (type))))
1683 return SAFE_CONVERSION;
1684 /* If constant is unsigned and fits in the target
1685 type, then the result will also fit. */
1686 else if ((TREE_CODE (op0) == INTEGER_CST
1687 && unsigned0
1688 && int_fits_type_p (op0, type))
1689 || (TREE_CODE (op1) == INTEGER_CST
1690 && unsigned1
1691 && int_fits_type_p (op1, type)))
1692 return SAFE_CONVERSION;
1693 }
1694 }
1695 /* Warn for integer types converted to smaller integer types. */
1696 if (TYPE_PRECISION (type) < TYPE_PRECISION (expr_type))
1697 give_warning = UNSAFE_OTHER;
1698
1699 /* When they are the same width but different signedness,
1700 then the value may change. */
1701 else if (((TYPE_PRECISION (type) == TYPE_PRECISION (expr_type)
1702 && TYPE_UNSIGNED (expr_type) != TYPE_UNSIGNED (type))
1703 /* Even when converted to a bigger type, if the type is
1704 unsigned but expr is signed, then negative values
1705 will be changed. */
1706 || (TYPE_UNSIGNED (type) && !TYPE_UNSIGNED (expr_type)))
1707 && check_sign)
1708 give_warning = UNSAFE_SIGN;
1709 }
1710
1711 /* Warn for integer types converted to real types if and only if
1712 all the range of values of the integer type cannot be
1713 represented by the real type. */
1714 else if ((TREE_CODE (expr_type) == INTEGER_TYPE
1715 || TREE_CODE (expr_type) == BITINT_TYPE)
1716 && SCALAR_FLOAT_TYPE_P (type))
1717 {
1718 /* Don't warn about char y = 0xff; float x = (int) y; */
1719 expr = get_unwidened (expr, 0);
1720 expr_type = TREE_TYPE (expr);
1721
1722 if (!int_safely_convertible_to_real_p (from_type: expr_type, to_type: type))
1723 give_warning = UNSAFE_OTHER;
1724 }
1725
1726 /* Warn for real types converted to smaller real types. */
1727 else if (SCALAR_FLOAT_TYPE_P (expr_type)
1728 && SCALAR_FLOAT_TYPE_P (type)
1729 && TYPE_PRECISION (type) < TYPE_PRECISION (expr_type))
1730 give_warning = UNSAFE_REAL;
1731
1732 /* Check conversion between two complex types. */
1733 else if (TREE_CODE (expr_type) == COMPLEX_TYPE
1734 && TREE_CODE (type) == COMPLEX_TYPE)
1735 {
1736 /* Extract underlying types (i.e., type of real and imaginary
1737 parts) of expr_type and type. */
1738 tree from_type = TREE_TYPE (expr_type);
1739 tree to_type = TREE_TYPE (type);
1740
1741 /* Warn for real types converted to integer types. */
1742 if (SCALAR_FLOAT_TYPE_P (from_type)
1743 && TREE_CODE (to_type) == INTEGER_TYPE)
1744 give_warning = UNSAFE_REAL;
1745
1746 /* Warn for real types converted to smaller real types. */
1747 else if (SCALAR_FLOAT_TYPE_P (from_type)
1748 && SCALAR_FLOAT_TYPE_P (to_type)
1749 && TYPE_PRECISION (to_type) < TYPE_PRECISION (from_type))
1750 give_warning = UNSAFE_REAL;
1751
1752 /* Check conversion for complex integer types. Here implementation
1753 is simpler than for real-domain integers because it does not
1754 involve sophisticated cases, such as bitmasks, casts, etc. */
1755 else if (TREE_CODE (from_type) == INTEGER_TYPE
1756 && TREE_CODE (to_type) == INTEGER_TYPE)
1757 {
1758 /* Warn for integer types converted to smaller integer types. */
1759 if (TYPE_PRECISION (to_type) < TYPE_PRECISION (from_type))
1760 give_warning = UNSAFE_OTHER;
1761
1762 /* Check for different signedness, see case for real-domain
1763 integers (above) for a more detailed comment. */
1764 else if (((TYPE_PRECISION (to_type) == TYPE_PRECISION (from_type)
1765 && TYPE_UNSIGNED (to_type) != TYPE_UNSIGNED (from_type))
1766 || (TYPE_UNSIGNED (to_type) && !TYPE_UNSIGNED (from_type)))
1767 && check_sign)
1768 give_warning = UNSAFE_SIGN;
1769 }
1770 else if (TREE_CODE (from_type) == INTEGER_TYPE
1771 && SCALAR_FLOAT_TYPE_P (to_type)
1772 && !int_safely_convertible_to_real_p (from_type, to_type))
1773 give_warning = UNSAFE_OTHER;
1774 }
1775
1776 /* Warn for complex types converted to real or integer types. */
1777 else if (TREE_CODE (expr_type) == COMPLEX_TYPE
1778 && TREE_CODE (type) != COMPLEX_TYPE)
1779 give_warning = UNSAFE_IMAGINARY;
1780 }
1781
1782 return give_warning;
1783}
1784
1785
1786/* Convert EXPR to TYPE, warning about conversion problems with constants.
1787 Invoke this function on every expression that is converted implicitly,
1788 i.e. because of language rules and not because of an explicit cast.
1789 INIT_CONST is true if the conversion is for arithmetic types for a static
1790 initializer and folding must apply accordingly (discarding floating-point
1791 exceptions and assuming the default rounding mode is in effect). */
1792
1793tree
1794convert_and_check (location_t loc, tree type, tree expr, bool init_const)
1795{
1796 tree result;
1797 tree expr_for_warning;
1798
1799 /* Convert from a value with possible excess precision rather than
1800 via the semantic type, but do not warn about values not fitting
1801 exactly in the semantic type. */
1802 if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR)
1803 {
1804 tree orig_type = TREE_TYPE (expr);
1805 expr = TREE_OPERAND (expr, 0);
1806 expr_for_warning = (init_const
1807 ? convert_init (orig_type, expr)
1808 : convert (orig_type, expr));
1809 if (orig_type == type)
1810 return expr_for_warning;
1811 }
1812 else
1813 expr_for_warning = expr;
1814
1815 if (TREE_TYPE (expr) == type)
1816 return expr;
1817
1818 result = init_const ? convert_init (type, expr) : convert (type, expr);
1819
1820 if (c_inhibit_evaluation_warnings == 0
1821 && !TREE_OVERFLOW_P (expr)
1822 && result != error_mark_node
1823 && !c_hardbool_type_attr (type))
1824 warnings_for_convert_and_check (loc, type, expr_for_warning, result);
1825
1826 return result;
1827}
1828
1829/* A node in a list that describes references to variables (EXPR), which are
1830 either read accesses if WRITER is zero, or write accesses, in which case
1831 WRITER is the parent of EXPR. */
1832struct tlist
1833{
1834 struct tlist *next;
1835 tree expr, writer;
1836};
1837
1838/* Used to implement a cache the results of a call to verify_tree. We only
1839 use this for SAVE_EXPRs. */
1840struct tlist_cache
1841{
1842 struct tlist_cache *next;
1843 struct tlist *cache_before_sp;
1844 struct tlist *cache_after_sp;
1845 tree expr;
1846};
1847
1848/* Obstack to use when allocating tlist structures, and corresponding
1849 firstobj. */
1850static struct obstack tlist_obstack;
1851static char *tlist_firstobj = 0;
1852
1853/* Keep track of the identifiers we've warned about, so we can avoid duplicate
1854 warnings. */
1855static struct tlist *warned_ids;
1856/* SAVE_EXPRs need special treatment. We process them only once and then
1857 cache the results. */
1858static struct tlist_cache *save_expr_cache;
1859
1860static void add_tlist (struct tlist **, struct tlist *, tree, int);
1861static void merge_tlist (struct tlist **, struct tlist *, int);
1862static void verify_tree (tree, struct tlist **, struct tlist **, tree);
1863static bool warning_candidate_p (tree);
1864static bool candidate_equal_p (const_tree, const_tree);
1865static void warn_for_collisions (struct tlist *);
1866static void warn_for_collisions_1 (tree, tree, struct tlist *, int);
1867static struct tlist *new_tlist (struct tlist *, tree, tree);
1868
1869/* Create a new struct tlist and fill in its fields. */
1870static struct tlist *
1871new_tlist (struct tlist *next, tree t, tree writer)
1872{
1873 struct tlist *l;
1874 l = XOBNEW (&tlist_obstack, struct tlist);
1875 l->next = next;
1876 l->expr = t;
1877 l->writer = writer;
1878 return l;
1879}
1880
1881/* Add duplicates of the nodes found in ADD to the list *TO. If EXCLUDE_WRITER
1882 is nonnull, we ignore any node we find which has a writer equal to it. */
1883
1884static void
1885add_tlist (struct tlist **to, struct tlist *add, tree exclude_writer, int copy)
1886{
1887 while (add)
1888 {
1889 struct tlist *next = add->next;
1890 if (!copy)
1891 add->next = *to;
1892 if (!exclude_writer || !candidate_equal_p (add->writer, exclude_writer))
1893 *to = copy ? new_tlist (next: *to, t: add->expr, writer: add->writer) : add;
1894 add = next;
1895 }
1896}
1897
1898/* Merge the nodes of ADD into TO. This merging process is done so that for
1899 each variable that already exists in TO, no new node is added; however if
1900 there is a write access recorded in ADD, and an occurrence on TO is only
1901 a read access, then the occurrence in TO will be modified to record the
1902 write. */
1903
1904static void
1905merge_tlist (struct tlist **to, struct tlist *add, int copy)
1906{
1907 struct tlist **end = to;
1908
1909 while (*end)
1910 end = &(*end)->next;
1911
1912 while (add)
1913 {
1914 int found = 0;
1915 struct tlist *tmp2;
1916 struct tlist *next = add->next;
1917
1918 for (tmp2 = *to; tmp2; tmp2 = tmp2->next)
1919 if (candidate_equal_p (tmp2->expr, add->expr))
1920 {
1921 found = 1;
1922 if (!tmp2->writer)
1923 tmp2->writer = add->writer;
1924 }
1925 if (!found)
1926 {
1927 *end = copy ? new_tlist (NULL, t: add->expr, writer: add->writer) : add;
1928 end = &(*end)->next;
1929 *end = 0;
1930 }
1931 add = next;
1932 }
1933}
1934
1935/* WRITTEN is a variable, WRITER is its parent. Warn if any of the variable
1936 references in list LIST conflict with it, excluding reads if ONLY writers
1937 is nonzero. */
1938
1939static void
1940warn_for_collisions_1 (tree written, tree writer, struct tlist *list,
1941 int only_writes)
1942{
1943 struct tlist *tmp;
1944
1945 /* Avoid duplicate warnings. */
1946 for (tmp = warned_ids; tmp; tmp = tmp->next)
1947 if (candidate_equal_p (tmp->expr, written))
1948 return;
1949
1950 while (list)
1951 {
1952 if (candidate_equal_p (list->expr, written)
1953 && !candidate_equal_p (list->writer, writer)
1954 && (!only_writes || list->writer))
1955 {
1956 warned_ids = new_tlist (next: warned_ids, t: written, NULL_TREE);
1957 warning_at (EXPR_LOC_OR_LOC (writer, input_location),
1958 OPT_Wsequence_point, "operation on %qE may be undefined",
1959 list->expr);
1960 }
1961 list = list->next;
1962 }
1963}
1964
1965/* Given a list LIST of references to variables, find whether any of these
1966 can cause conflicts due to missing sequence points. */
1967
1968static void
1969warn_for_collisions (struct tlist *list)
1970{
1971 struct tlist *tmp;
1972
1973 for (tmp = list; tmp; tmp = tmp->next)
1974 {
1975 if (tmp->writer)
1976 warn_for_collisions_1 (written: tmp->expr, writer: tmp->writer, list, only_writes: 0);
1977 }
1978}
1979
1980/* Return nonzero if X is a tree that can be verified by the sequence point
1981 warnings. */
1982
1983static bool
1984warning_candidate_p (tree x)
1985{
1986 if (DECL_P (x) && DECL_ARTIFICIAL (x))
1987 return false;
1988
1989 if (TREE_CODE (x) == BLOCK)
1990 return false;
1991
1992 /* VOID_TYPE_P (TREE_TYPE (x)) is workaround for cp/tree.cc
1993 (lvalue_p) crash on TRY/CATCH. */
1994 if (TREE_TYPE (x) == NULL_TREE || VOID_TYPE_P (TREE_TYPE (x)))
1995 return false;
1996
1997 if (!lvalue_p (x))
1998 return false;
1999
2000 /* No point to track non-const calls, they will never satisfy
2001 operand_equal_p. */
2002 if (TREE_CODE (x) == CALL_EXPR && (call_expr_flags (x) & ECF_CONST) == 0)
2003 return false;
2004
2005 if (TREE_CODE (x) == STRING_CST)
2006 return false;
2007
2008 return true;
2009}
2010
2011/* Return nonzero if X and Y appear to be the same candidate (or NULL) */
2012static bool
2013candidate_equal_p (const_tree x, const_tree y)
2014{
2015 return (x == y) || (x && y && operand_equal_p (x, y, flags: 0));
2016}
2017
2018/* Walk the tree X, and record accesses to variables. If X is written by the
2019 parent tree, WRITER is the parent.
2020 We store accesses in one of the two lists: PBEFORE_SP, and PNO_SP. If this
2021 expression or its only operand forces a sequence point, then everything up
2022 to the sequence point is stored in PBEFORE_SP. Everything else gets stored
2023 in PNO_SP.
2024 Once we return, we will have emitted warnings if any subexpression before
2025 such a sequence point could be undefined. On a higher level, however, the
2026 sequence point may not be relevant, and we'll merge the two lists.
2027
2028 Example: (b++, a) + b;
2029 The call that processes the COMPOUND_EXPR will store the increment of B
2030 in PBEFORE_SP, and the use of A in PNO_SP. The higher-level call that
2031 processes the PLUS_EXPR will need to merge the two lists so that
2032 eventually, all accesses end up on the same list (and we'll warn about the
2033 unordered subexpressions b++ and b.
2034
2035 A note on merging. If we modify the former example so that our expression
2036 becomes
2037 (b++, b) + a
2038 care must be taken not simply to add all three expressions into the final
2039 PNO_SP list. The function merge_tlist takes care of that by merging the
2040 before-SP list of the COMPOUND_EXPR into its after-SP list in a special
2041 way, so that no more than one access to B is recorded. */
2042
2043static void
2044verify_tree (tree x, struct tlist **pbefore_sp, struct tlist **pno_sp,
2045 tree writer)
2046{
2047 struct tlist *tmp_before, *tmp_nosp, *tmp_list2, *tmp_list3;
2048 enum tree_code code;
2049 enum tree_code_class cl;
2050
2051 restart:
2052 /* X may be NULL if it is the operand of an empty statement expression
2053 ({ }). */
2054 if (x == NULL)
2055 return;
2056
2057 code = TREE_CODE (x);
2058 cl = TREE_CODE_CLASS (code);
2059
2060 if (warning_candidate_p (x))
2061 *pno_sp = new_tlist (next: *pno_sp, t: x, writer);
2062
2063 switch (code)
2064 {
2065 case CONSTRUCTOR:
2066 case SIZEOF_EXPR:
2067 case PAREN_SIZEOF_EXPR:
2068 return;
2069
2070 case COMPOUND_EXPR:
2071 case TRUTH_ANDIF_EXPR:
2072 case TRUTH_ORIF_EXPR:
2073 sequenced_binary:
2074 tmp_before = tmp_nosp = tmp_list2 = tmp_list3 = 0;
2075 verify_tree (TREE_OPERAND (x, 0), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, NULL_TREE);
2076 warn_for_collisions (list: tmp_nosp);
2077 merge_tlist (to: pbefore_sp, add: tmp_before, copy: 0);
2078 merge_tlist (to: pbefore_sp, add: tmp_nosp, copy: 0);
2079 verify_tree (TREE_OPERAND (x, 1), pbefore_sp: &tmp_list3, pno_sp: &tmp_list2, NULL_TREE);
2080 warn_for_collisions (list: tmp_list2);
2081 merge_tlist (to: pbefore_sp, add: tmp_list3, copy: 0);
2082 merge_tlist (to: pno_sp, add: tmp_list2, copy: 0);
2083 return;
2084
2085 case COND_EXPR:
2086 tmp_before = tmp_list2 = 0;
2087 verify_tree (TREE_OPERAND (x, 0), pbefore_sp: &tmp_before, pno_sp: &tmp_list2, NULL_TREE);
2088 warn_for_collisions (list: tmp_list2);
2089 merge_tlist (to: pbefore_sp, add: tmp_before, copy: 0);
2090 merge_tlist (to: pbefore_sp, add: tmp_list2, copy: 0);
2091
2092 tmp_list3 = tmp_nosp = 0;
2093 verify_tree (TREE_OPERAND (x, 1), pbefore_sp: &tmp_list3, pno_sp: &tmp_nosp, NULL_TREE);
2094 warn_for_collisions (list: tmp_nosp);
2095 merge_tlist (to: pbefore_sp, add: tmp_list3, copy: 0);
2096
2097 tmp_list3 = tmp_list2 = 0;
2098 verify_tree (TREE_OPERAND (x, 2), pbefore_sp: &tmp_list3, pno_sp: &tmp_list2, NULL_TREE);
2099 warn_for_collisions (list: tmp_list2);
2100 merge_tlist (to: pbefore_sp, add: tmp_list3, copy: 0);
2101 /* Rather than add both tmp_nosp and tmp_list2, we have to merge the
2102 two first, to avoid warning for (a ? b++ : b++). */
2103 merge_tlist (to: &tmp_nosp, add: tmp_list2, copy: 0);
2104 add_tlist (to: pno_sp, add: tmp_nosp, NULL_TREE, copy: 0);
2105 return;
2106
2107 case PREDECREMENT_EXPR:
2108 case PREINCREMENT_EXPR:
2109 case POSTDECREMENT_EXPR:
2110 case POSTINCREMENT_EXPR:
2111 verify_tree (TREE_OPERAND (x, 0), pbefore_sp: pno_sp, pno_sp, writer: x);
2112 return;
2113
2114 case MODIFY_EXPR:
2115 tmp_before = tmp_nosp = tmp_list3 = 0;
2116 verify_tree (TREE_OPERAND (x, 1), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, NULL_TREE);
2117 verify_tree (TREE_OPERAND (x, 0), pbefore_sp: &tmp_list3, pno_sp: &tmp_list3, writer: x);
2118 /* Expressions inside the LHS are not ordered wrt. the sequence points
2119 in the RHS. Example:
2120 *a = (a++, 2)
2121 Despite the fact that the modification of "a" is in the before_sp
2122 list (tmp_before), it conflicts with the use of "a" in the LHS.
2123 We can handle this by adding the contents of tmp_list3
2124 to those of tmp_before, and redoing the collision warnings for that
2125 list. */
2126 add_tlist (to: &tmp_before, add: tmp_list3, exclude_writer: x, copy: 1);
2127 warn_for_collisions (list: tmp_before);
2128 /* Exclude the LHS itself here; we first have to merge it into the
2129 tmp_nosp list. This is done to avoid warning for "a = a"; if we
2130 didn't exclude the LHS, we'd get it twice, once as a read and once
2131 as a write. */
2132 add_tlist (to: pno_sp, add: tmp_list3, exclude_writer: x, copy: 0);
2133 warn_for_collisions_1 (TREE_OPERAND (x, 0), writer: x, list: tmp_nosp, only_writes: 1);
2134
2135 merge_tlist (to: pbefore_sp, add: tmp_before, copy: 0);
2136 if (warning_candidate_p (TREE_OPERAND (x, 0)))
2137 merge_tlist (to: &tmp_nosp, add: new_tlist (NULL, TREE_OPERAND (x, 0), writer: x), copy: 0);
2138 add_tlist (to: pno_sp, add: tmp_nosp, NULL_TREE, copy: 1);
2139 return;
2140
2141 case CALL_EXPR:
2142 /* We need to warn about conflicts among arguments and conflicts between
2143 args and the function address. Side effects of the function address,
2144 however, are not ordered by the sequence point of the call. */
2145 {
2146 call_expr_arg_iterator iter;
2147 tree arg;
2148 tmp_before = tmp_nosp = 0;
2149 verify_tree (CALL_EXPR_FN (x), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, NULL_TREE);
2150 FOR_EACH_CALL_EXPR_ARG (arg, iter, x)
2151 {
2152 tmp_list2 = tmp_list3 = 0;
2153 verify_tree (x: arg, pbefore_sp: &tmp_list2, pno_sp: &tmp_list3, NULL_TREE);
2154 merge_tlist (to: &tmp_list3, add: tmp_list2, copy: 0);
2155 add_tlist (to: &tmp_before, add: tmp_list3, NULL_TREE, copy: 0);
2156 }
2157 add_tlist (to: &tmp_before, add: tmp_nosp, NULL_TREE, copy: 0);
2158 warn_for_collisions (list: tmp_before);
2159 add_tlist (to: pbefore_sp, add: tmp_before, NULL_TREE, copy: 0);
2160 return;
2161 }
2162
2163 case TREE_LIST:
2164 /* Scan all the list, e.g. indices of multi dimensional array. */
2165 while (x)
2166 {
2167 tmp_before = tmp_nosp = 0;
2168 verify_tree (TREE_VALUE (x), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, NULL_TREE);
2169 merge_tlist (to: &tmp_nosp, add: tmp_before, copy: 0);
2170 add_tlist (to: pno_sp, add: tmp_nosp, NULL_TREE, copy: 0);
2171 x = TREE_CHAIN (x);
2172 }
2173 return;
2174
2175 case SAVE_EXPR:
2176 {
2177 struct tlist_cache *t;
2178 for (t = save_expr_cache; t; t = t->next)
2179 if (candidate_equal_p (x: t->expr, y: x))
2180 break;
2181
2182 if (!t)
2183 {
2184 t = XOBNEW (&tlist_obstack, struct tlist_cache);
2185 t->next = save_expr_cache;
2186 t->expr = x;
2187 save_expr_cache = t;
2188
2189 tmp_before = tmp_nosp = 0;
2190 verify_tree (TREE_OPERAND (x, 0), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, NULL_TREE);
2191 warn_for_collisions (list: tmp_nosp);
2192
2193 tmp_list3 = 0;
2194 merge_tlist (to: &tmp_list3, add: tmp_nosp, copy: 0);
2195 t->cache_before_sp = tmp_before;
2196 t->cache_after_sp = tmp_list3;
2197 }
2198 merge_tlist (to: pbefore_sp, add: t->cache_before_sp, copy: 1);
2199 add_tlist (to: pno_sp, add: t->cache_after_sp, NULL_TREE, copy: 1);
2200 return;
2201 }
2202
2203 case ADDR_EXPR:
2204 x = TREE_OPERAND (x, 0);
2205 if (DECL_P (x))
2206 return;
2207 writer = 0;
2208 goto restart;
2209
2210 case VIEW_CONVERT_EXPR:
2211 if (location_wrapper_p (exp: x))
2212 {
2213 x = TREE_OPERAND (x, 0);
2214 goto restart;
2215 }
2216 goto do_default;
2217
2218 case LSHIFT_EXPR:
2219 case RSHIFT_EXPR:
2220 case ARRAY_REF:
2221 if (cxx_dialect >= cxx17)
2222 goto sequenced_binary;
2223 goto do_default;
2224
2225 case COMPONENT_REF:
2226 /* Treat as unary, the other operands aren't evaluated. */
2227 x = TREE_OPERAND (x, 0);
2228 writer = 0;
2229 goto restart;
2230
2231 default:
2232 do_default:
2233 /* For other expressions, simply recurse on their operands.
2234 Manual tail recursion for unary expressions.
2235 Other non-expressions need not be processed. */
2236 if (cl == tcc_unary)
2237 {
2238 x = TREE_OPERAND (x, 0);
2239 writer = 0;
2240 goto restart;
2241 }
2242 else if (IS_EXPR_CODE_CLASS (cl))
2243 {
2244 int lp;
2245 int max = TREE_OPERAND_LENGTH (x);
2246 for (lp = 0; lp < max; lp++)
2247 {
2248 tmp_before = tmp_nosp = 0;
2249 verify_tree (TREE_OPERAND (x, lp), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, writer: 0);
2250 merge_tlist (to: &tmp_nosp, add: tmp_before, copy: 0);
2251 add_tlist (to: pno_sp, add: tmp_nosp, NULL_TREE, copy: 0);
2252 }
2253 }
2254 return;
2255 }
2256}
2257
2258static constexpr size_t verify_sequence_points_limit = 1024;
2259
2260/* Called from verify_sequence_points via walk_tree. */
2261
2262static tree
2263verify_tree_lim_r (tree *tp, int *walk_subtrees, void *data)
2264{
2265 if (++*((size_t *) data) > verify_sequence_points_limit)
2266 return integer_zero_node;
2267
2268 if (TYPE_P (*tp))
2269 *walk_subtrees = 0;
2270
2271 return NULL_TREE;
2272}
2273
2274/* Try to warn for undefined behavior in EXPR due to missing sequence
2275 points. */
2276
2277void
2278verify_sequence_points (tree expr)
2279{
2280 tlist *before_sp = nullptr, *after_sp = nullptr;
2281
2282 /* verify_tree is highly recursive, and merge_tlist is O(n^2),
2283 so we return early if the expression is too big. */
2284 size_t n = 0;
2285 if (walk_tree (&expr, verify_tree_lim_r, &n, nullptr))
2286 return;
2287
2288 warned_ids = nullptr;
2289 save_expr_cache = nullptr;
2290 if (!tlist_firstobj)
2291 {
2292 gcc_obstack_init (&tlist_obstack);
2293 tlist_firstobj = (char *) obstack_alloc (&tlist_obstack, 0);
2294 }
2295
2296 verify_tree (x: expr, pbefore_sp: &before_sp, pno_sp: &after_sp, NULL_TREE);
2297 warn_for_collisions (list: after_sp);
2298 obstack_free (&tlist_obstack, tlist_firstobj);
2299}
2300
2301/* Validate the expression after `case' and apply default promotions. */
2302
2303static tree
2304check_case_value (location_t loc, tree value)
2305{
2306 if (value == NULL_TREE)
2307 return value;
2308
2309 if (INTEGRAL_TYPE_P (TREE_TYPE (value))
2310 && TREE_CODE (value) == INTEGER_CST)
2311 /* Promote char or short to int. */
2312 value = perform_integral_promotions (value);
2313 else if (value != error_mark_node)
2314 {
2315 error_at (loc, "case label does not reduce to an integer constant");
2316 value = error_mark_node;
2317 }
2318
2319 constant_expression_warning (value);
2320
2321 return value;
2322}
2323
2324/* Return an integer type with BITS bits of precision,
2325 that is unsigned if UNSIGNEDP is nonzero, otherwise signed. */
2326
2327tree
2328c_common_type_for_size (unsigned int bits, int unsignedp)
2329{
2330 int i;
2331
2332 if (bits == TYPE_PRECISION (integer_type_node))
2333 return unsignedp ? unsigned_type_node : integer_type_node;
2334
2335 if (bits == TYPE_PRECISION (signed_char_type_node))
2336 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2337
2338 if (bits == TYPE_PRECISION (short_integer_type_node))
2339 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2340
2341 if (bits == TYPE_PRECISION (long_integer_type_node))
2342 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2343
2344 if (bits == TYPE_PRECISION (long_long_integer_type_node))
2345 return (unsignedp ? long_long_unsigned_type_node
2346 : long_long_integer_type_node);
2347
2348 for (i = 0; i < NUM_INT_N_ENTS; i ++)
2349 if (int_n_enabled_p[i]
2350 && bits == int_n_data[i].bitsize)
2351 return (unsignedp ? int_n_trees[i].unsigned_type
2352 : int_n_trees[i].signed_type);
2353
2354 if (bits == TYPE_PRECISION (widest_integer_literal_type_node))
2355 return (unsignedp ? widest_unsigned_literal_type_node
2356 : widest_integer_literal_type_node);
2357
2358 for (tree t = registered_builtin_types; t; t = TREE_CHAIN (t))
2359 {
2360 tree type = TREE_VALUE (t);
2361 if (TREE_CODE (type) == INTEGER_TYPE
2362 && bits == TYPE_PRECISION (type)
2363 && !!unsignedp == !!TYPE_UNSIGNED (type))
2364 return type;
2365 }
2366
2367 if (bits <= TYPE_PRECISION (intQI_type_node))
2368 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2369
2370 if (bits <= TYPE_PRECISION (intHI_type_node))
2371 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2372
2373 if (bits <= TYPE_PRECISION (intSI_type_node))
2374 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2375
2376 if (bits <= TYPE_PRECISION (intDI_type_node))
2377 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2378
2379 if (bits <= TYPE_PRECISION (widest_integer_literal_type_node))
2380 return (unsignedp ? widest_unsigned_literal_type_node
2381 : widest_integer_literal_type_node);
2382
2383 return NULL_TREE;
2384}
2385
2386/* Return a fixed-point type that has at least IBIT ibits and FBIT fbits
2387 that is unsigned if UNSIGNEDP is nonzero, otherwise signed;
2388 and saturating if SATP is nonzero, otherwise not saturating. */
2389
2390tree
2391c_common_fixed_point_type_for_size (unsigned int ibit, unsigned int fbit,
2392 int unsignedp, int satp)
2393{
2394 enum mode_class mclass;
2395 if (ibit == 0)
2396 mclass = unsignedp ? MODE_UFRACT : MODE_FRACT;
2397 else
2398 mclass = unsignedp ? MODE_UACCUM : MODE_ACCUM;
2399
2400 opt_scalar_mode opt_mode;
2401 scalar_mode mode;
2402 FOR_EACH_MODE_IN_CLASS (opt_mode, mclass)
2403 {
2404 mode = opt_mode.require ();
2405 if (GET_MODE_IBIT (mode) >= ibit && GET_MODE_FBIT (mode) >= fbit)
2406 break;
2407 }
2408
2409 if (!opt_mode.exists (mode: &mode) || !targetm.scalar_mode_supported_p (mode))
2410 {
2411 sorry ("GCC cannot support operators with integer types and "
2412 "fixed-point types that have too many integral and "
2413 "fractional bits together");
2414 return NULL_TREE;
2415 }
2416
2417 return c_common_type_for_mode (mode, satp);
2418}
2419
2420/* Used for communication between c_common_type_for_mode and
2421 c_register_builtin_type. */
2422tree registered_builtin_types;
2423
2424/* Return a data type that has machine mode MODE.
2425 If the mode is an integer,
2426 then UNSIGNEDP selects between signed and unsigned types.
2427 If the mode is a fixed-point mode,
2428 then UNSIGNEDP selects between saturating and nonsaturating types. */
2429
2430tree
2431c_common_type_for_mode (machine_mode mode, int unsignedp)
2432{
2433 tree t;
2434 int i;
2435
2436 if (mode == TYPE_MODE (integer_type_node))
2437 return unsignedp ? unsigned_type_node : integer_type_node;
2438
2439 if (mode == TYPE_MODE (signed_char_type_node))
2440 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2441
2442 if (mode == TYPE_MODE (short_integer_type_node))
2443 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2444
2445 if (mode == TYPE_MODE (long_integer_type_node))
2446 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2447
2448 if (mode == TYPE_MODE (long_long_integer_type_node))
2449 return unsignedp ? long_long_unsigned_type_node : long_long_integer_type_node;
2450
2451 for (i = 0; i < NUM_INT_N_ENTS; i ++)
2452 if (int_n_enabled_p[i]
2453 && mode == int_n_data[i].m)
2454 return (unsignedp ? int_n_trees[i].unsigned_type
2455 : int_n_trees[i].signed_type);
2456
2457 if (mode == QImode)
2458 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2459
2460 if (mode == HImode)
2461 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2462
2463 if (mode == SImode)
2464 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2465
2466 if (mode == DImode)
2467 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2468
2469#if HOST_BITS_PER_WIDE_INT >= 64
2470 if (mode == TYPE_MODE (intTI_type_node))
2471 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2472#endif
2473
2474 if (mode == TYPE_MODE (float_type_node))
2475 return float_type_node;
2476
2477 if (mode == TYPE_MODE (double_type_node))
2478 return double_type_node;
2479
2480 if (mode == TYPE_MODE (long_double_type_node))
2481 return long_double_type_node;
2482
2483 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
2484 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE
2485 && mode == TYPE_MODE (FLOATN_NX_TYPE_NODE (i)))
2486 return FLOATN_NX_TYPE_NODE (i);
2487
2488 if (mode == TYPE_MODE (void_type_node))
2489 return void_type_node;
2490
2491 if (mode == TYPE_MODE (build_pointer_type (char_type_node))
2492 || mode == TYPE_MODE (build_pointer_type (integer_type_node)))
2493 {
2494 unsigned int precision
2495 = GET_MODE_PRECISION (mode: as_a <scalar_int_mode> (m: mode));
2496 return (unsignedp
2497 ? make_unsigned_type (precision)
2498 : make_signed_type (precision));
2499 }
2500
2501 if (COMPLEX_MODE_P (mode))
2502 {
2503 machine_mode inner_mode;
2504 tree inner_type;
2505
2506 if (mode == TYPE_MODE (complex_float_type_node))
2507 return complex_float_type_node;
2508 if (mode == TYPE_MODE (complex_double_type_node))
2509 return complex_double_type_node;
2510 if (mode == TYPE_MODE (complex_long_double_type_node))
2511 return complex_long_double_type_node;
2512
2513 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
2514 if (COMPLEX_FLOATN_NX_TYPE_NODE (i) != NULL_TREE
2515 && mode == TYPE_MODE (COMPLEX_FLOATN_NX_TYPE_NODE (i)))
2516 return COMPLEX_FLOATN_NX_TYPE_NODE (i);
2517
2518 if (mode == TYPE_MODE (complex_integer_type_node) && !unsignedp)
2519 return complex_integer_type_node;
2520
2521 inner_mode = GET_MODE_INNER (mode);
2522 inner_type = c_common_type_for_mode (mode: inner_mode, unsignedp);
2523 if (inner_type != NULL_TREE)
2524 return build_complex_type (inner_type);
2525 }
2526 else if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL
2527 && valid_vector_subparts_p (subparts: GET_MODE_NUNITS (mode)))
2528 {
2529 unsigned int elem_bits = vector_element_size (GET_MODE_PRECISION (mode),
2530 GET_MODE_NUNITS (mode));
2531 tree bool_type = build_nonstandard_boolean_type (elem_bits);
2532 return build_vector_type_for_mode (bool_type, mode);
2533 }
2534 else if (VECTOR_MODE_P (mode)
2535 && valid_vector_subparts_p (subparts: GET_MODE_NUNITS (mode)))
2536 {
2537 machine_mode inner_mode = GET_MODE_INNER (mode);
2538 tree inner_type = c_common_type_for_mode (mode: inner_mode, unsignedp);
2539 if (inner_type != NULL_TREE)
2540 return build_vector_type_for_mode (inner_type, mode);
2541 }
2542
2543 if (dfloat32_type_node != NULL_TREE
2544 && mode == TYPE_MODE (dfloat32_type_node))
2545 return dfloat32_type_node;
2546 if (dfloat64_type_node != NULL_TREE
2547 && mode == TYPE_MODE (dfloat64_type_node))
2548 return dfloat64_type_node;
2549 if (dfloat128_type_node != NULL_TREE
2550 && mode == TYPE_MODE (dfloat128_type_node))
2551 return dfloat128_type_node;
2552
2553 if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
2554 {
2555 if (mode == TYPE_MODE (short_fract_type_node))
2556 return unsignedp ? sat_short_fract_type_node : short_fract_type_node;
2557 if (mode == TYPE_MODE (fract_type_node))
2558 return unsignedp ? sat_fract_type_node : fract_type_node;
2559 if (mode == TYPE_MODE (long_fract_type_node))
2560 return unsignedp ? sat_long_fract_type_node : long_fract_type_node;
2561 if (mode == TYPE_MODE (long_long_fract_type_node))
2562 return unsignedp ? sat_long_long_fract_type_node
2563 : long_long_fract_type_node;
2564
2565 if (mode == TYPE_MODE (unsigned_short_fract_type_node))
2566 return unsignedp ? sat_unsigned_short_fract_type_node
2567 : unsigned_short_fract_type_node;
2568 if (mode == TYPE_MODE (unsigned_fract_type_node))
2569 return unsignedp ? sat_unsigned_fract_type_node
2570 : unsigned_fract_type_node;
2571 if (mode == TYPE_MODE (unsigned_long_fract_type_node))
2572 return unsignedp ? sat_unsigned_long_fract_type_node
2573 : unsigned_long_fract_type_node;
2574 if (mode == TYPE_MODE (unsigned_long_long_fract_type_node))
2575 return unsignedp ? sat_unsigned_long_long_fract_type_node
2576 : unsigned_long_long_fract_type_node;
2577
2578 if (mode == TYPE_MODE (short_accum_type_node))
2579 return unsignedp ? sat_short_accum_type_node : short_accum_type_node;
2580 if (mode == TYPE_MODE (accum_type_node))
2581 return unsignedp ? sat_accum_type_node : accum_type_node;
2582 if (mode == TYPE_MODE (long_accum_type_node))
2583 return unsignedp ? sat_long_accum_type_node : long_accum_type_node;
2584 if (mode == TYPE_MODE (long_long_accum_type_node))
2585 return unsignedp ? sat_long_long_accum_type_node
2586 : long_long_accum_type_node;
2587
2588 if (mode == TYPE_MODE (unsigned_short_accum_type_node))
2589 return unsignedp ? sat_unsigned_short_accum_type_node
2590 : unsigned_short_accum_type_node;
2591 if (mode == TYPE_MODE (unsigned_accum_type_node))
2592 return unsignedp ? sat_unsigned_accum_type_node
2593 : unsigned_accum_type_node;
2594 if (mode == TYPE_MODE (unsigned_long_accum_type_node))
2595 return unsignedp ? sat_unsigned_long_accum_type_node
2596 : unsigned_long_accum_type_node;
2597 if (mode == TYPE_MODE (unsigned_long_long_accum_type_node))
2598 return unsignedp ? sat_unsigned_long_long_accum_type_node
2599 : unsigned_long_long_accum_type_node;
2600
2601 if (mode == QQmode)
2602 return unsignedp ? sat_qq_type_node : qq_type_node;
2603 if (mode == HQmode)
2604 return unsignedp ? sat_hq_type_node : hq_type_node;
2605 if (mode == SQmode)
2606 return unsignedp ? sat_sq_type_node : sq_type_node;
2607 if (mode == DQmode)
2608 return unsignedp ? sat_dq_type_node : dq_type_node;
2609 if (mode == TQmode)
2610 return unsignedp ? sat_tq_type_node : tq_type_node;
2611
2612 if (mode == UQQmode)
2613 return unsignedp ? sat_uqq_type_node : uqq_type_node;
2614 if (mode == UHQmode)
2615 return unsignedp ? sat_uhq_type_node : uhq_type_node;
2616 if (mode == USQmode)
2617 return unsignedp ? sat_usq_type_node : usq_type_node;
2618 if (mode == UDQmode)
2619 return unsignedp ? sat_udq_type_node : udq_type_node;
2620 if (mode == UTQmode)
2621 return unsignedp ? sat_utq_type_node : utq_type_node;
2622
2623 if (mode == HAmode)
2624 return unsignedp ? sat_ha_type_node : ha_type_node;
2625 if (mode == SAmode)
2626 return unsignedp ? sat_sa_type_node : sa_type_node;
2627 if (mode == DAmode)
2628 return unsignedp ? sat_da_type_node : da_type_node;
2629 if (mode == TAmode)
2630 return unsignedp ? sat_ta_type_node : ta_type_node;
2631
2632 if (mode == UHAmode)
2633 return unsignedp ? sat_uha_type_node : uha_type_node;
2634 if (mode == USAmode)
2635 return unsignedp ? sat_usa_type_node : usa_type_node;
2636 if (mode == UDAmode)
2637 return unsignedp ? sat_uda_type_node : uda_type_node;
2638 if (mode == UTAmode)
2639 return unsignedp ? sat_uta_type_node : uta_type_node;
2640 }
2641
2642 for (t = registered_builtin_types; t; t = TREE_CHAIN (t))
2643 {
2644 tree type = TREE_VALUE (t);
2645 if (TYPE_MODE (type) == mode
2646 && VECTOR_TYPE_P (type) == VECTOR_MODE_P (mode)
2647 && !!unsignedp == !!TYPE_UNSIGNED (type))
2648 return type;
2649 }
2650 return NULL_TREE;
2651}
2652
2653tree
2654c_common_unsigned_type (tree type)
2655{
2656 return c_common_signed_or_unsigned_type (1, type);
2657}
2658
2659/* Return a signed type the same as TYPE in other respects. */
2660
2661tree
2662c_common_signed_type (tree type)
2663{
2664 return c_common_signed_or_unsigned_type (0, type);
2665}
2666
2667/* Return a type the same as TYPE except unsigned or
2668 signed according to UNSIGNEDP. */
2669
2670tree
2671c_common_signed_or_unsigned_type (int unsignedp, tree type)
2672{
2673 tree type1;
2674 int i;
2675
2676 /* This block of code emulates the behavior of the old
2677 c_common_unsigned_type. In particular, it returns
2678 long_unsigned_type_node if passed a long, even when a int would
2679 have the same size. This is necessary for warnings to work
2680 correctly in archs where sizeof(int) == sizeof(long) */
2681
2682 type1 = TYPE_MAIN_VARIANT (type);
2683 if (type1 == signed_char_type_node || type1 == char_type_node || type1 == unsigned_char_type_node)
2684 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2685 if (type1 == integer_type_node || type1 == unsigned_type_node)
2686 return unsignedp ? unsigned_type_node : integer_type_node;
2687 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2688 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2689 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2690 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2691 if (type1 == long_long_integer_type_node || type1 == long_long_unsigned_type_node)
2692 return unsignedp ? long_long_unsigned_type_node : long_long_integer_type_node;
2693
2694 for (i = 0; i < NUM_INT_N_ENTS; i ++)
2695 if (int_n_enabled_p[i]
2696 && (type1 == int_n_trees[i].unsigned_type
2697 || type1 == int_n_trees[i].signed_type))
2698 return (unsignedp ? int_n_trees[i].unsigned_type
2699 : int_n_trees[i].signed_type);
2700
2701#if HOST_BITS_PER_WIDE_INT >= 64
2702 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2703 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2704#endif
2705 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2706 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2707 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2708 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2709 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2710 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2711 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2712 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2713
2714#define C_COMMON_FIXED_TYPES(NAME) \
2715 if (type1 == short_ ## NAME ## _type_node \
2716 || type1 == unsigned_short_ ## NAME ## _type_node) \
2717 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2718 : short_ ## NAME ## _type_node; \
2719 if (type1 == NAME ## _type_node \
2720 || type1 == unsigned_ ## NAME ## _type_node) \
2721 return unsignedp ? unsigned_ ## NAME ## _type_node \
2722 : NAME ## _type_node; \
2723 if (type1 == long_ ## NAME ## _type_node \
2724 || type1 == unsigned_long_ ## NAME ## _type_node) \
2725 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2726 : long_ ## NAME ## _type_node; \
2727 if (type1 == long_long_ ## NAME ## _type_node \
2728 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2729 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2730 : long_long_ ## NAME ## _type_node;
2731
2732#define C_COMMON_FIXED_MODE_TYPES(NAME) \
2733 if (type1 == NAME ## _type_node \
2734 || type1 == u ## NAME ## _type_node) \
2735 return unsignedp ? u ## NAME ## _type_node \
2736 : NAME ## _type_node;
2737
2738#define C_COMMON_FIXED_TYPES_SAT(NAME) \
2739 if (type1 == sat_ ## short_ ## NAME ## _type_node \
2740 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2741 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2742 : sat_ ## short_ ## NAME ## _type_node; \
2743 if (type1 == sat_ ## NAME ## _type_node \
2744 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2745 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2746 : sat_ ## NAME ## _type_node; \
2747 if (type1 == sat_ ## long_ ## NAME ## _type_node \
2748 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2749 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2750 : sat_ ## long_ ## NAME ## _type_node; \
2751 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2752 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2753 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2754 : sat_ ## long_long_ ## NAME ## _type_node;
2755
2756#define C_COMMON_FIXED_MODE_TYPES_SAT(NAME) \
2757 if (type1 == sat_ ## NAME ## _type_node \
2758 || type1 == sat_ ## u ## NAME ## _type_node) \
2759 return unsignedp ? sat_ ## u ## NAME ## _type_node \
2760 : sat_ ## NAME ## _type_node;
2761
2762 C_COMMON_FIXED_TYPES (fract);
2763 C_COMMON_FIXED_TYPES_SAT (fract);
2764 C_COMMON_FIXED_TYPES (accum);
2765 C_COMMON_FIXED_TYPES_SAT (accum);
2766
2767 C_COMMON_FIXED_MODE_TYPES (qq);
2768 C_COMMON_FIXED_MODE_TYPES (hq);
2769 C_COMMON_FIXED_MODE_TYPES (sq);
2770 C_COMMON_FIXED_MODE_TYPES (dq);
2771 C_COMMON_FIXED_MODE_TYPES (tq);
2772 C_COMMON_FIXED_MODE_TYPES_SAT (qq);
2773 C_COMMON_FIXED_MODE_TYPES_SAT (hq);
2774 C_COMMON_FIXED_MODE_TYPES_SAT (sq);
2775 C_COMMON_FIXED_MODE_TYPES_SAT (dq);
2776 C_COMMON_FIXED_MODE_TYPES_SAT (tq);
2777 C_COMMON_FIXED_MODE_TYPES (ha);
2778 C_COMMON_FIXED_MODE_TYPES (sa);
2779 C_COMMON_FIXED_MODE_TYPES (da);
2780 C_COMMON_FIXED_MODE_TYPES (ta);
2781 C_COMMON_FIXED_MODE_TYPES_SAT (ha);
2782 C_COMMON_FIXED_MODE_TYPES_SAT (sa);
2783 C_COMMON_FIXED_MODE_TYPES_SAT (da);
2784 C_COMMON_FIXED_MODE_TYPES_SAT (ta);
2785
2786 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2787 the precision; they have precision set to match their range, but
2788 may use a wider mode to match an ABI. If we change modes, we may
2789 wind up with bad conversions. For INTEGER_TYPEs in C, must check
2790 the precision as well, so as to yield correct results for
2791 bit-field types. C++ does not have these separate bit-field
2792 types, and producing a signed or unsigned variant of an
2793 ENUMERAL_TYPE may cause other problems as well. */
2794
2795 if (!INTEGRAL_TYPE_P (type)
2796 || TYPE_UNSIGNED (type) == unsignedp)
2797 return type;
2798
2799 if (TREE_CODE (type) == BITINT_TYPE
2800 /* signed _BitInt(1) is invalid, avoid creating that. */
2801 && (unsignedp || TYPE_PRECISION (type) > 1))
2802 return build_bitint_type (TYPE_PRECISION (type), unsignedp);
2803
2804#define TYPE_OK(node) \
2805 (TYPE_MODE (type) == TYPE_MODE (node) \
2806 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2807 if (TYPE_OK (signed_char_type_node))
2808 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2809 if (TYPE_OK (integer_type_node))
2810 return unsignedp ? unsigned_type_node : integer_type_node;
2811 if (TYPE_OK (short_integer_type_node))
2812 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2813 if (TYPE_OK (long_integer_type_node))
2814 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2815 if (TYPE_OK (long_long_integer_type_node))
2816 return (unsignedp ? long_long_unsigned_type_node
2817 : long_long_integer_type_node);
2818
2819 for (i = 0; i < NUM_INT_N_ENTS; i ++)
2820 if (int_n_enabled_p[i]
2821 && TYPE_MODE (type) == int_n_data[i].m
2822 && TYPE_PRECISION (type) == int_n_data[i].bitsize)
2823 return (unsignedp ? int_n_trees[i].unsigned_type
2824 : int_n_trees[i].signed_type);
2825
2826#if HOST_BITS_PER_WIDE_INT >= 64
2827 if (TYPE_OK (intTI_type_node))
2828 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2829#endif
2830 if (TYPE_OK (intDI_type_node))
2831 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2832 if (TYPE_OK (intSI_type_node))
2833 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2834 if (TYPE_OK (intHI_type_node))
2835 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2836 if (TYPE_OK (intQI_type_node))
2837 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2838#undef TYPE_OK
2839
2840 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2841}
2842
2843/* Build a bit-field integer type for the given WIDTH and UNSIGNEDP. */
2844
2845tree
2846c_build_bitfield_integer_type (unsigned HOST_WIDE_INT width, int unsignedp)
2847{
2848 int i;
2849
2850 /* Extended integer types of the same width as a standard type have
2851 lesser rank, so those of the same width as int promote to int or
2852 unsigned int and are valid for printf formats expecting int or
2853 unsigned int. To avoid such special cases, avoid creating
2854 extended integer types for bit-fields if a standard integer type
2855 is available. */
2856 if (width == TYPE_PRECISION (integer_type_node))
2857 return unsignedp ? unsigned_type_node : integer_type_node;
2858 if (width == TYPE_PRECISION (signed_char_type_node))
2859 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2860 if (width == TYPE_PRECISION (short_integer_type_node))
2861 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2862 if (width == TYPE_PRECISION (long_integer_type_node))
2863 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2864 if (width == TYPE_PRECISION (long_long_integer_type_node))
2865 return (unsignedp ? long_long_unsigned_type_node
2866 : long_long_integer_type_node);
2867 for (i = 0; i < NUM_INT_N_ENTS; i ++)
2868 if (int_n_enabled_p[i]
2869 && width == int_n_data[i].bitsize)
2870 return (unsignedp ? int_n_trees[i].unsigned_type
2871 : int_n_trees[i].signed_type);
2872 return build_nonstandard_integer_type (width, unsignedp);
2873}
2874
2875/* The C version of the register_builtin_type langhook. */
2876
2877void
2878c_register_builtin_type (tree type, const char* name)
2879{
2880 tree decl;
2881
2882 decl = build_decl (UNKNOWN_LOCATION,
2883 TYPE_DECL, get_identifier (name), type);
2884 DECL_ARTIFICIAL (decl) = 1;
2885 if (!TYPE_NAME (type))
2886 TYPE_NAME (type) = decl;
2887 lang_hooks.decls.pushdecl (decl);
2888
2889 registered_builtin_types = tree_cons (0, type, registered_builtin_types);
2890}
2891
2892/* Print an error message for invalid operands to arith operation
2893 CODE with TYPE0 for operand 0, and TYPE1 for operand 1.
2894 RICHLOC is a rich location for the message, containing either
2895 three separate locations for each of the operator and operands
2896
2897 lhs op rhs
2898 ~~~ ^~ ~~~
2899
2900 (C FE), or one location ranging over all over them
2901
2902 lhs op rhs
2903 ~~~~^~~~~~
2904
2905 (C++ FE). */
2906
2907void
2908binary_op_error (rich_location *richloc, enum tree_code code,
2909 tree type0, tree type1)
2910{
2911 const char *opname;
2912
2913 switch (code)
2914 {
2915 case PLUS_EXPR:
2916 opname = "+"; break;
2917 case MINUS_EXPR:
2918 opname = "-"; break;
2919 case MULT_EXPR:
2920 opname = "*"; break;
2921 case MAX_EXPR:
2922 opname = "max"; break;
2923 case MIN_EXPR:
2924 opname = "min"; break;
2925 case EQ_EXPR:
2926 opname = "=="; break;
2927 case NE_EXPR:
2928 opname = "!="; break;
2929 case LE_EXPR:
2930 opname = "<="; break;
2931 case GE_EXPR:
2932 opname = ">="; break;
2933 case LT_EXPR:
2934 opname = "<"; break;
2935 case GT_EXPR:
2936 opname = ">"; break;
2937 case LSHIFT_EXPR:
2938 opname = "<<"; break;
2939 case RSHIFT_EXPR:
2940 opname = ">>"; break;
2941 case TRUNC_MOD_EXPR:
2942 case FLOOR_MOD_EXPR:
2943 opname = "%"; break;
2944 case TRUNC_DIV_EXPR:
2945 case FLOOR_DIV_EXPR:
2946 opname = "/"; break;
2947 case BIT_AND_EXPR:
2948 opname = "&"; break;
2949 case BIT_IOR_EXPR:
2950 opname = "|"; break;
2951 case TRUTH_ANDIF_EXPR:
2952 opname = "&&"; break;
2953 case TRUTH_ORIF_EXPR:
2954 opname = "||"; break;
2955 case BIT_XOR_EXPR:
2956 opname = "^"; break;
2957 default:
2958 gcc_unreachable ();
2959 }
2960 error_at (richloc,
2961 "invalid operands to binary %s (have %qT and %qT)",
2962 opname, type0, type1);
2963}
2964
2965/* Given an expression as a tree, return its original type. Do this
2966 by stripping any conversion that preserves the sign and precision. */
2967static tree
2968expr_original_type (tree expr)
2969{
2970 STRIP_SIGN_NOPS (expr);
2971 return TREE_TYPE (expr);
2972}
2973
2974/* Subroutine of build_binary_op, used for comparison operations.
2975 See if the operands have both been converted from subword integer types
2976 and, if so, perhaps change them both back to their original type.
2977 This function is also responsible for converting the two operands
2978 to the proper common type for comparison.
2979
2980 The arguments of this function are all pointers to local variables
2981 of build_binary_op: OP0_PTR is &OP0, OP1_PTR is &OP1,
2982 RESTYPE_PTR is &RESULT_TYPE and RESCODE_PTR is &RESULTCODE.
2983
2984 LOC is the location of the comparison.
2985
2986 If this function returns non-NULL_TREE, it means that the comparison has
2987 a constant value. What this function returns is an expression for
2988 that value. */
2989
2990tree
2991shorten_compare (location_t loc, tree *op0_ptr, tree *op1_ptr,
2992 tree *restype_ptr, enum tree_code *rescode_ptr)
2993{
2994 tree type;
2995 tree op0 = *op0_ptr;
2996 tree op1 = *op1_ptr;
2997 int unsignedp0, unsignedp1;
2998 int real1, real2;
2999 tree primop0, primop1;
3000 enum tree_code code = *rescode_ptr;
3001
3002 /* Throw away any conversions to wider types
3003 already present in the operands. */
3004
3005 primop0 = c_common_get_narrower (op: op0, unsignedp_ptr: &unsignedp0);
3006 primop1 = c_common_get_narrower (op: op1, unsignedp_ptr: &unsignedp1);
3007
3008 /* If primopN is first sign-extended from primopN's precision to opN's
3009 precision, then zero-extended from opN's precision to
3010 *restype_ptr precision, shortenings might be invalid. */
3011 if (TYPE_PRECISION (TREE_TYPE (primop0)) < TYPE_PRECISION (TREE_TYPE (op0))
3012 && TYPE_PRECISION (TREE_TYPE (op0)) < TYPE_PRECISION (*restype_ptr)
3013 && !unsignedp0
3014 && TYPE_UNSIGNED (TREE_TYPE (op0)))
3015 primop0 = op0;
3016 if (TYPE_PRECISION (TREE_TYPE (primop1)) < TYPE_PRECISION (TREE_TYPE (op1))
3017 && TYPE_PRECISION (TREE_TYPE (op1)) < TYPE_PRECISION (*restype_ptr)
3018 && !unsignedp1
3019 && TYPE_UNSIGNED (TREE_TYPE (op1)))
3020 primop1 = op1;
3021
3022 /* Handle the case that OP0 does not *contain* a conversion
3023 but it *requires* conversion to FINAL_TYPE. */
3024
3025 if (op0 == primop0 && TREE_TYPE (op0) != *restype_ptr)
3026 unsignedp0 = TYPE_UNSIGNED (TREE_TYPE (op0));
3027 if (op1 == primop1 && TREE_TYPE (op1) != *restype_ptr)
3028 unsignedp1 = TYPE_UNSIGNED (TREE_TYPE (op1));
3029
3030 /* If one of the operands must be floated, we cannot optimize. */
3031 real1 = SCALAR_FLOAT_TYPE_P (TREE_TYPE (primop0));
3032 real2 = SCALAR_FLOAT_TYPE_P (TREE_TYPE (primop1));
3033
3034 /* If first arg is constant, swap the args (changing operation
3035 so value is preserved), for canonicalization. Don't do this if
3036 the second arg is 0. */
3037
3038 if (TREE_CONSTANT (primop0)
3039 && !integer_zerop (primop1) && !real_zerop (primop1)
3040 && !fixed_zerop (primop1))
3041 {
3042 std::swap (a&: primop0, b&: primop1);
3043 std::swap (a&: op0, b&: op1);
3044 *op0_ptr = op0;
3045 *op1_ptr = op1;
3046 std::swap (a&: unsignedp0, b&: unsignedp1);
3047 std::swap (a&: real1, b&: real2);
3048
3049 switch (code)
3050 {
3051 case LT_EXPR:
3052 code = GT_EXPR;
3053 break;
3054 case GT_EXPR:
3055 code = LT_EXPR;
3056 break;
3057 case LE_EXPR:
3058 code = GE_EXPR;
3059 break;
3060 case GE_EXPR:
3061 code = LE_EXPR;
3062 break;
3063 default:
3064 break;
3065 }
3066 *rescode_ptr = code;
3067 }
3068
3069 /* If comparing an integer against a constant more bits wide,
3070 maybe we can deduce a value of 1 or 0 independent of the data.
3071 Or else truncate the constant now
3072 rather than extend the variable at run time.
3073
3074 This is only interesting if the constant is the wider arg.
3075 Also, it is not safe if the constant is unsigned and the
3076 variable arg is signed, since in this case the variable
3077 would be sign-extended and then regarded as unsigned.
3078 Our technique fails in this case because the lowest/highest
3079 possible unsigned results don't follow naturally from the
3080 lowest/highest possible values of the variable operand.
3081 For just EQ_EXPR and NE_EXPR there is another technique that
3082 could be used: see if the constant can be faithfully represented
3083 in the other operand's type, by truncating it and reextending it
3084 and see if that preserves the constant's value. */
3085
3086 if (!real1 && !real2
3087 && TREE_CODE (TREE_TYPE (primop0)) != FIXED_POINT_TYPE
3088 && TREE_CODE (primop1) == INTEGER_CST
3089 && TYPE_PRECISION (TREE_TYPE (primop0)) < TYPE_PRECISION (*restype_ptr))
3090 {
3091 int min_gt, max_gt, min_lt, max_lt;
3092 tree maxval, minval;
3093 /* 1 if comparison is nominally unsigned. */
3094 int unsignedp = TYPE_UNSIGNED (*restype_ptr);
3095 tree val;
3096
3097 type = c_common_signed_or_unsigned_type (unsignedp: unsignedp0,
3098 TREE_TYPE (primop0));
3099
3100 maxval = TYPE_MAX_VALUE (type);
3101 minval = TYPE_MIN_VALUE (type);
3102
3103 if (unsignedp && !unsignedp0)
3104 *restype_ptr = c_common_signed_type (type: *restype_ptr);
3105
3106 if (TREE_TYPE (primop1) != *restype_ptr)
3107 {
3108 /* Convert primop1 to target type, but do not introduce
3109 additional overflow. We know primop1 is an int_cst. */
3110 primop1 = force_fit_type (*restype_ptr,
3111 wi::to_wide
3112 (t: primop1,
3113 TYPE_PRECISION (*restype_ptr)),
3114 0, TREE_OVERFLOW (primop1));
3115 }
3116 if (type != *restype_ptr)
3117 {
3118 minval = convert (*restype_ptr, minval);
3119 maxval = convert (*restype_ptr, maxval);
3120 }
3121
3122 min_gt = tree_int_cst_lt (t1: primop1, t2: minval);
3123 max_gt = tree_int_cst_lt (t1: primop1, t2: maxval);
3124 min_lt = tree_int_cst_lt (t1: minval, t2: primop1);
3125 max_lt = tree_int_cst_lt (t1: maxval, t2: primop1);
3126
3127 val = 0;
3128 /* This used to be a switch, but Genix compiler can't handle that. */
3129 if (code == NE_EXPR)
3130 {
3131 if (max_lt || min_gt)
3132 val = truthvalue_true_node;
3133 }
3134 else if (code == EQ_EXPR)
3135 {
3136 if (max_lt || min_gt)
3137 val = truthvalue_false_node;
3138 }
3139 else if (code == LT_EXPR)
3140 {
3141 if (max_lt)
3142 val = truthvalue_true_node;
3143 if (!min_lt)
3144 val = truthvalue_false_node;
3145 }
3146 else if (code == GT_EXPR)
3147 {
3148 if (min_gt)
3149 val = truthvalue_true_node;
3150 if (!max_gt)
3151 val = truthvalue_false_node;
3152 }
3153 else if (code == LE_EXPR)
3154 {
3155 if (!max_gt)
3156 val = truthvalue_true_node;
3157 if (min_gt)
3158 val = truthvalue_false_node;
3159 }
3160 else if (code == GE_EXPR)
3161 {
3162 if (!min_lt)
3163 val = truthvalue_true_node;
3164 if (max_lt)
3165 val = truthvalue_false_node;
3166 }
3167
3168 /* If primop0 was sign-extended and unsigned comparison specd,
3169 we did a signed comparison above using the signed type bounds.
3170 But the comparison we output must be unsigned.
3171
3172 Also, for inequalities, VAL is no good; but if the signed
3173 comparison had *any* fixed result, it follows that the
3174 unsigned comparison just tests the sign in reverse
3175 (positive values are LE, negative ones GE).
3176 So we can generate an unsigned comparison
3177 against an extreme value of the signed type. */
3178
3179 if (unsignedp && !unsignedp0)
3180 {
3181 if (val != 0)
3182 switch (code)
3183 {
3184 case LT_EXPR:
3185 case GE_EXPR:
3186 primop1 = TYPE_MIN_VALUE (type);
3187 val = 0;
3188 break;
3189
3190 case LE_EXPR:
3191 case GT_EXPR:
3192 primop1 = TYPE_MAX_VALUE (type);
3193 val = 0;
3194 break;
3195
3196 default:
3197 break;
3198 }
3199 type = c_common_unsigned_type (type);
3200 }
3201
3202 if (TREE_CODE (primop0) != INTEGER_CST
3203 /* Don't warn if it's from a (non-system) macro. */
3204 && !(from_macro_expansion_at
3205 (loc: expansion_point_location_if_in_system_header
3206 (EXPR_LOCATION (primop0)))))
3207 {
3208 if (val == truthvalue_false_node)
3209 warning_at (loc, OPT_Wtype_limits,
3210 "comparison is always false due to limited range of data type");
3211 if (val == truthvalue_true_node)
3212 warning_at (loc, OPT_Wtype_limits,
3213 "comparison is always true due to limited range of data type");
3214 }
3215
3216 if (val != 0)
3217 {
3218 /* Don't forget to evaluate PRIMOP0 if it has side effects. */
3219 if (TREE_SIDE_EFFECTS (primop0))
3220 return build2 (COMPOUND_EXPR, TREE_TYPE (val), primop0, val);
3221 return val;
3222 }
3223
3224 /* Value is not predetermined, but do the comparison
3225 in the type of the operand that is not constant.
3226 TYPE is already properly set. */
3227 }
3228
3229 /* If either arg is decimal float and the other is float, find the
3230 proper common type to use for comparison. */
3231 else if (real1 && real2
3232 && DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (primop0)))
3233 && DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (primop1))))
3234 type = common_type (TREE_TYPE (primop0), TREE_TYPE (primop1));
3235
3236 /* If either arg is decimal float and the other is float, fail. */
3237 else if (real1 && real2
3238 && (DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (primop0)))
3239 || DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (primop1)))))
3240 {
3241 type = *restype_ptr;
3242 primop0 = op0;
3243 primop1 = op1;
3244 }
3245
3246 else if (real1 && real2
3247 && (TYPE_PRECISION (TREE_TYPE (primop0))
3248 == TYPE_PRECISION (TREE_TYPE (primop1))))
3249 type = TREE_TYPE (primop0);
3250
3251 /* If args' natural types are both narrower than nominal type
3252 and both extend in the same manner, compare them
3253 in the type of the wider arg.
3254 Otherwise must actually extend both to the nominal
3255 common type lest different ways of extending
3256 alter the result.
3257 (eg, (short)-1 == (unsigned short)-1 should be 0.) */
3258
3259 else if (unsignedp0 == unsignedp1 && real1 == real2
3260 && TYPE_PRECISION (TREE_TYPE (primop0)) < TYPE_PRECISION (*restype_ptr)
3261 && TYPE_PRECISION (TREE_TYPE (primop1)) < TYPE_PRECISION (*restype_ptr)
3262 && (type = common_type (TREE_TYPE (primop0), TREE_TYPE (primop1)))
3263 != error_mark_node)
3264 {
3265 type = c_common_signed_or_unsigned_type (unsignedp: unsignedp0
3266 || TYPE_UNSIGNED (*restype_ptr),
3267 type);
3268 /* Make sure shorter operand is extended the right way
3269 to match the longer operand. */
3270 primop0
3271 = convert (c_common_signed_or_unsigned_type (unsignedp: unsignedp0,
3272 TREE_TYPE (primop0)),
3273 primop0);
3274 primop1
3275 = convert (c_common_signed_or_unsigned_type (unsignedp: unsignedp1,
3276 TREE_TYPE (primop1)),
3277 primop1);
3278 }
3279 else
3280 {
3281 /* Here we must do the comparison on the nominal type
3282 using the args exactly as we received them. */
3283 type = *restype_ptr;
3284 primop0 = op0;
3285 primop1 = op1;
3286
3287 /* We want to fold unsigned comparisons of >= and < against zero.
3288 For these, we may also issue a warning if we have a non-constant
3289 compared against zero, where the zero was spelled as "0" (rather
3290 than merely folding to it).
3291 If we have at least one constant, then op1 is constant
3292 and we may have a non-constant expression as op0. */
3293 if (!real1 && !real2 && integer_zerop (primop1)
3294 && TYPE_UNSIGNED (*restype_ptr))
3295 {
3296 tree value = NULL_TREE;
3297 /* All unsigned values are >= 0, so we warn. However,
3298 if OP0 is a constant that is >= 0, the signedness of
3299 the comparison isn't an issue, so suppress the
3300 warning. */
3301 tree folded_op0 = fold_for_warn (op0);
3302 bool warn =
3303 warn_type_limits && !in_system_header_at (loc)
3304 && !(TREE_CODE (folded_op0) == INTEGER_CST
3305 && !TREE_OVERFLOW (convert (c_common_signed_type (type),
3306 folded_op0)))
3307 /* Do not warn for enumeration types. */
3308 && (TREE_CODE (expr_original_type (folded_op0)) != ENUMERAL_TYPE);
3309
3310 switch (code)
3311 {
3312 case GE_EXPR:
3313 if (warn)
3314 warning_at (loc, OPT_Wtype_limits,
3315 "comparison of unsigned expression in %<>= 0%> "
3316 "is always true");
3317 value = truthvalue_true_node;
3318 break;
3319
3320 case LT_EXPR:
3321 if (warn)
3322 warning_at (loc, OPT_Wtype_limits,
3323 "comparison of unsigned expression in %<< 0%> "
3324 "is always false");
3325 value = truthvalue_false_node;
3326 break;
3327
3328 default:
3329 break;
3330 }
3331
3332 if (value != NULL_TREE)
3333 {
3334 /* Don't forget to evaluate PRIMOP0 if it has side effects. */
3335 if (TREE_SIDE_EFFECTS (primop0))
3336 return build2 (COMPOUND_EXPR, TREE_TYPE (value),
3337 primop0, value);
3338 return value;
3339 }
3340 }
3341 }
3342
3343 *op0_ptr = convert (type, primop0);
3344 *op1_ptr = convert (type, primop1);
3345
3346 *restype_ptr = truthvalue_type_node;
3347
3348 return NULL_TREE;
3349}
3350
3351/* Return a tree for the sum or difference (RESULTCODE says which)
3352 of pointer PTROP and integer INTOP. */
3353
3354tree
3355pointer_int_sum (location_t loc, enum tree_code resultcode,
3356 tree ptrop, tree intop, bool complain)
3357{
3358 tree size_exp, ret;
3359
3360 /* The result is a pointer of the same type that is being added. */
3361 tree result_type = TREE_TYPE (ptrop);
3362
3363 if (VOID_TYPE_P (TREE_TYPE (result_type)))
3364 {
3365 if (complain && warn_pointer_arith)
3366 pedwarn (loc, OPT_Wpointer_arith,
3367 "pointer of type %<void *%> used in arithmetic");
3368 else if (!complain)
3369 return error_mark_node;
3370 size_exp = integer_one_node;
3371 }
3372 else if (TREE_CODE (TREE_TYPE (result_type)) == FUNCTION_TYPE)
3373 {
3374 if (complain && warn_pointer_arith)
3375 pedwarn (loc, OPT_Wpointer_arith,
3376 "pointer to a function used in arithmetic");
3377 else if (!complain)
3378 return error_mark_node;
3379 size_exp = integer_one_node;
3380 }
3381 else if (!verify_type_context (loc, TCTX_POINTER_ARITH,
3382 TREE_TYPE (result_type)))
3383 size_exp = integer_one_node;
3384 else
3385 {
3386 if (!complain && !COMPLETE_TYPE_P (TREE_TYPE (result_type)))
3387 return error_mark_node;
3388 size_exp = size_in_bytes_loc (loc, TREE_TYPE (result_type));
3389 /* Wrap the pointer expression in a SAVE_EXPR to make sure it
3390 is evaluated first when the size expression may depend
3391 on it for VM types. */
3392 if (TREE_SIDE_EFFECTS (size_exp)
3393 && TREE_SIDE_EFFECTS (ptrop)
3394 && variably_modified_type_p (TREE_TYPE (ptrop), NULL))
3395 {
3396 ptrop = save_expr (ptrop);
3397 size_exp = build2 (COMPOUND_EXPR, TREE_TYPE (intop), ptrop, size_exp);
3398 }
3399 }
3400
3401 /* We are manipulating pointer values, so we don't need to warn
3402 about relying on undefined signed overflow. We disable the
3403 warning here because we use integer types so fold won't know that
3404 they are really pointers. */
3405 fold_defer_overflow_warnings ();
3406
3407 /* If what we are about to multiply by the size of the elements
3408 contains a constant term, apply distributive law
3409 and multiply that constant term separately.
3410 This helps produce common subexpressions. */
3411 if ((TREE_CODE (intop) == PLUS_EXPR || TREE_CODE (intop) == MINUS_EXPR)
3412 && !TREE_CONSTANT (intop)
3413 && TREE_CONSTANT (TREE_OPERAND (intop, 1))
3414 && TREE_CONSTANT (size_exp)
3415 /* If the constant comes from pointer subtraction,
3416 skip this optimization--it would cause an error. */
3417 && TREE_CODE (TREE_TYPE (TREE_OPERAND (intop, 0))) == INTEGER_TYPE
3418 /* If the constant is unsigned, and smaller than the pointer size,
3419 then we must skip this optimization. This is because it could cause
3420 an overflow error if the constant is negative but INTOP is not. */
3421 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (intop))
3422 || (TYPE_PRECISION (TREE_TYPE (intop))
3423 == TYPE_PRECISION (TREE_TYPE (ptrop)))))
3424 {
3425 enum tree_code subcode = resultcode;
3426 tree int_type = TREE_TYPE (intop);
3427 if (TREE_CODE (intop) == MINUS_EXPR)
3428 subcode = (subcode == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR);
3429 /* Convert both subexpression types to the type of intop,
3430 because weird cases involving pointer arithmetic
3431 can result in a sum or difference with different type args. */
3432 ptrop = build_binary_op (EXPR_LOCATION (TREE_OPERAND (intop, 1)),
3433 subcode, ptrop,
3434 convert (int_type, TREE_OPERAND (intop, 1)),
3435 true);
3436 intop = convert (int_type, TREE_OPERAND (intop, 0));
3437 }
3438
3439 /* Convert the integer argument to a type the same size as sizetype
3440 so the multiply won't overflow spuriously. */
3441 if (TYPE_PRECISION (TREE_TYPE (intop)) != TYPE_PRECISION (sizetype)
3442 || TYPE_UNSIGNED (TREE_TYPE (intop)) != TYPE_UNSIGNED (sizetype))
3443 intop = convert (c_common_type_for_size (TYPE_PRECISION (sizetype),
3444 TYPE_UNSIGNED (sizetype)), intop);
3445
3446 /* Replace the integer argument with a suitable product by the object size.
3447 Do this multiplication as signed, then convert to the appropriate type
3448 for the pointer operation and disregard an overflow that occurred only
3449 because of the sign-extension change in the latter conversion. */
3450 {
3451 tree t = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (intop), intop,
3452 convert (TREE_TYPE (intop), size_exp));
3453 intop = convert (sizetype, t);
3454 if (TREE_OVERFLOW_P (intop) && !TREE_OVERFLOW (t))
3455 intop = wide_int_to_tree (TREE_TYPE (intop), cst: wi::to_wide (t: intop));
3456 }
3457
3458 /* Create the sum or difference. */
3459 if (resultcode == MINUS_EXPR)
3460 intop = fold_build1_loc (loc, NEGATE_EXPR, sizetype, intop);
3461
3462 ret = fold_build_pointer_plus_loc (loc, ptr: ptrop, off: intop);
3463
3464 fold_undefer_and_ignore_overflow_warnings ();
3465
3466 return ret;
3467}
3468
3469/* Wrap a C_MAYBE_CONST_EXPR around an expression that is fully folded
3470 and if NON_CONST is known not to be permitted in an evaluated part
3471 of a constant expression. */
3472
3473tree
3474c_wrap_maybe_const (tree expr, bool non_const)
3475{
3476 location_t loc = EXPR_LOCATION (expr);
3477
3478 /* This should never be called for C++. */
3479 if (c_dialect_cxx ())
3480 gcc_unreachable ();
3481
3482 /* The result of folding may have a NOP_EXPR to set TREE_NO_WARNING. */
3483 STRIP_TYPE_NOPS (expr);
3484 expr = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (expr), NULL, expr);
3485 C_MAYBE_CONST_EXPR_NON_CONST (expr) = non_const;
3486 protected_set_expr_location (expr, loc);
3487
3488 return expr;
3489}
3490
3491/* Return whether EXPR is a declaration whose address can never be NULL.
3492 The address of the first struct member could be NULL only if it were
3493 accessed through a NULL pointer, and such an access would be invalid.
3494 The address of a weak symbol may be null unless it has a definition. */
3495
3496bool
3497decl_with_nonnull_addr_p (const_tree expr)
3498{
3499 if (!DECL_P (expr))
3500 return false;
3501
3502 if (TREE_CODE (expr) == FIELD_DECL
3503 || TREE_CODE (expr) == PARM_DECL
3504 || TREE_CODE (expr) == LABEL_DECL)
3505 return true;
3506
3507 if (!VAR_OR_FUNCTION_DECL_P (expr))
3508 return false;
3509
3510 if (!DECL_WEAK (expr))
3511 /* Ordinary (non-weak) symbols have nonnull addresses. */
3512 return true;
3513
3514 if (DECL_INITIAL (expr) && DECL_INITIAL (expr) != error_mark_node)
3515 /* Initialized weak symbols have nonnull addresses. */
3516 return true;
3517
3518 if (DECL_EXTERNAL (expr) || !TREE_STATIC (expr))
3519 /* Uninitialized extern weak symbols and weak symbols with no
3520 allocated storage might have a null address. */
3521 return false;
3522
3523 tree attribs = DECL_ATTRIBUTES (expr);
3524 if (lookup_attribute (attr_name: "weakref", list: attribs))
3525 /* Weakref symbols might have a null address unless their referent
3526 is known not to. Don't bother following weakref targets here. */
3527 return false;
3528
3529 return true;
3530}
3531
3532/* Prepare expr to be an argument of a TRUTH_NOT_EXPR,
3533 or for an `if' or `while' statement or ?..: exp. It should already
3534 have been validated to be of suitable type; otherwise, a bad
3535 diagnostic may result.
3536
3537 The EXPR is located at LOCATION.
3538
3539 This preparation consists of taking the ordinary
3540 representation of an expression expr and producing a valid tree
3541 boolean expression describing whether expr is nonzero. We could
3542 simply always do build_binary_op (NE_EXPR, expr, truthvalue_false_node, 1),
3543 but we optimize comparisons, &&, ||, and !.
3544
3545 The resulting type should always be `truthvalue_type_node'. */
3546
3547tree
3548c_common_truthvalue_conversion (location_t location, tree expr)
3549{
3550 STRIP_ANY_LOCATION_WRAPPER (expr);
3551 switch (TREE_CODE (expr))
3552 {
3553 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
3554 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
3555 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
3556 case ORDERED_EXPR: case UNORDERED_EXPR:
3557 if (TREE_TYPE (expr) == truthvalue_type_node)
3558 return expr;
3559 expr = build2 (TREE_CODE (expr), truthvalue_type_node,
3560 TREE_OPERAND (expr, 0), TREE_OPERAND (expr, 1));
3561 goto ret;
3562
3563 case TRUTH_ANDIF_EXPR:
3564 case TRUTH_ORIF_EXPR:
3565 case TRUTH_AND_EXPR:
3566 case TRUTH_OR_EXPR:
3567 case TRUTH_XOR_EXPR:
3568 if (TREE_TYPE (expr) == truthvalue_type_node)
3569 return expr;
3570 expr = build2 (TREE_CODE (expr), truthvalue_type_node,
3571 c_common_truthvalue_conversion (location,
3572 TREE_OPERAND (expr, 0)),
3573 c_common_truthvalue_conversion (location,
3574 TREE_OPERAND (expr, 1)));
3575 goto ret;
3576
3577 case TRUTH_NOT_EXPR:
3578 if (TREE_TYPE (expr) == truthvalue_type_node)
3579 return expr;
3580 expr = build1 (TREE_CODE (expr), truthvalue_type_node,
3581 c_common_truthvalue_conversion (location,
3582 TREE_OPERAND (expr, 0)));
3583 goto ret;
3584
3585 case ERROR_MARK:
3586 return expr;
3587
3588 case INTEGER_CST:
3589 if (TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
3590 && !integer_zerop (expr)
3591 && !integer_onep (expr))
3592 warning_at (location, OPT_Wint_in_bool_context,
3593 "enum constant in boolean context");
3594 return integer_zerop (expr) ? truthvalue_false_node
3595 : truthvalue_true_node;
3596
3597 case REAL_CST:
3598 return real_compare (NE_EXPR, &TREE_REAL_CST (expr), &dconst0)
3599 ? truthvalue_true_node
3600 : truthvalue_false_node;
3601
3602 case FIXED_CST:
3603 return fixed_compare (NE_EXPR, &TREE_FIXED_CST (expr),
3604 &FCONST0 (TYPE_MODE (TREE_TYPE (expr))))
3605 ? truthvalue_true_node
3606 : truthvalue_false_node;
3607
3608 case FUNCTION_DECL:
3609 expr = build_unary_op (location, ADDR_EXPR, expr, false);
3610 /* Fall through. */
3611
3612 case ADDR_EXPR:
3613 {
3614 tree inner = TREE_OPERAND (expr, 0);
3615 if (decl_with_nonnull_addr_p (expr: inner)
3616 /* Check both EXPR and INNER for suppression. */
3617 && !warning_suppressed_p (expr, OPT_Waddress)
3618 && !warning_suppressed_p (inner, OPT_Waddress))
3619 {
3620 /* Common Ada programmer's mistake. */
3621 warning_at (location,
3622 OPT_Waddress,
3623 "the address of %qD will always evaluate as %<true%>",
3624 inner);
3625 suppress_warning (inner, OPT_Waddress);
3626 return truthvalue_true_node;
3627 }
3628 break;
3629 }
3630
3631 case COMPLEX_EXPR:
3632 expr = build_binary_op (EXPR_LOCATION (expr),
3633 (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1))
3634 ? TRUTH_OR_EXPR : TRUTH_ORIF_EXPR),
3635 c_common_truthvalue_conversion (location,
3636 TREE_OPERAND (expr, 0)),
3637 c_common_truthvalue_conversion (location,
3638 TREE_OPERAND (expr, 1)),
3639 false);
3640 goto ret;
3641
3642 case NEGATE_EXPR:
3643 case ABS_EXPR:
3644 case ABSU_EXPR:
3645 case FLOAT_EXPR:
3646 case EXCESS_PRECISION_EXPR:
3647 /* These don't change whether an object is nonzero or zero. */
3648 return c_common_truthvalue_conversion (location, TREE_OPERAND (expr, 0));
3649
3650 case LROTATE_EXPR:
3651 case RROTATE_EXPR:
3652 /* These don't change whether an object is zero or nonzero, but
3653 we can't ignore them if their second arg has side-effects. */
3654 if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
3655 {
3656 expr = build2 (COMPOUND_EXPR, truthvalue_type_node,
3657 TREE_OPERAND (expr, 1),
3658 c_common_truthvalue_conversion
3659 (location, TREE_OPERAND (expr, 0)));
3660 goto ret;
3661 }
3662 else
3663 return c_common_truthvalue_conversion (location,
3664 TREE_OPERAND (expr, 0));
3665
3666 case MULT_EXPR:
3667 warning_at (EXPR_LOCATION (expr), OPT_Wint_in_bool_context,
3668 "%<*%> in boolean context, suggest %<&&%> instead");
3669 break;
3670
3671 case LSHIFT_EXPR:
3672 /* We will only warn on signed shifts here, because the majority of
3673 false positive warnings happen in code where unsigned arithmetic
3674 was used in anticipation of a possible overflow.
3675 Furthermore, if we see an unsigned type here we know that the
3676 result of the shift is not subject to integer promotion rules. */
3677 if ((TREE_CODE (TREE_TYPE (expr)) == INTEGER_TYPE
3678 || TREE_CODE (TREE_TYPE (expr)) == BITINT_TYPE)
3679 && !TYPE_UNSIGNED (TREE_TYPE (expr)))
3680 warning_at (EXPR_LOCATION (expr), OPT_Wint_in_bool_context,
3681 "%<<<%> in boolean context, did you mean %<<%>?");
3682 break;
3683
3684 case COND_EXPR:
3685 if (warn_int_in_bool_context
3686 && !from_macro_definition_at (EXPR_LOCATION (expr)))
3687 {
3688 tree val1 = fold_for_warn (TREE_OPERAND (expr, 1));
3689 tree val2 = fold_for_warn (TREE_OPERAND (expr, 2));
3690 if (TREE_CODE (val1) == INTEGER_CST
3691 && TREE_CODE (val2) == INTEGER_CST
3692 && !integer_zerop (val1)
3693 && !integer_zerop (val2)
3694 && (!integer_onep (val1)
3695 || !integer_onep (val2)))
3696 warning_at (EXPR_LOCATION (expr), OPT_Wint_in_bool_context,
3697 "%<?:%> using integer constants in boolean context, "
3698 "the expression will always evaluate to %<true%>");
3699 else if ((TREE_CODE (val1) == INTEGER_CST
3700 && !integer_zerop (val1)
3701 && !integer_onep (val1))
3702 || (TREE_CODE (val2) == INTEGER_CST
3703 && !integer_zerop (val2)
3704 && !integer_onep (val2)))
3705 warning_at (EXPR_LOCATION (expr), OPT_Wint_in_bool_context,
3706 "%<?:%> using integer constants in boolean context");
3707 }
3708 /* Distribute the conversion into the arms of a COND_EXPR. */
3709 if (c_dialect_cxx ())
3710 /* Avoid premature folding. */
3711 break;
3712 else
3713 {
3714 int w = warn_int_in_bool_context;
3715 warn_int_in_bool_context = 0;
3716 /* Folding will happen later for C. */
3717 expr = build3 (COND_EXPR, truthvalue_type_node,
3718 TREE_OPERAND (expr, 0),
3719 c_common_truthvalue_conversion (location,
3720 TREE_OPERAND (expr, 1)),
3721 c_common_truthvalue_conversion (location,
3722 TREE_OPERAND (expr, 2)));
3723 warn_int_in_bool_context = w;
3724 goto ret;
3725 }
3726
3727 CASE_CONVERT:
3728 {
3729 tree totype = TREE_TYPE (expr);
3730 tree fromtype = TREE_TYPE (TREE_OPERAND (expr, 0));
3731
3732 if (POINTER_TYPE_P (totype)
3733 && !c_inhibit_evaluation_warnings
3734 && TREE_CODE (fromtype) == REFERENCE_TYPE)
3735 {
3736 tree inner = expr;
3737 STRIP_NOPS (inner);
3738
3739 if (DECL_P (inner))
3740 warning_at (location,
3741 OPT_Waddress,
3742 "the compiler can assume that the address of "
3743 "%qD will always evaluate to %<true%>",
3744 inner);
3745 }
3746
3747 /* Don't cancel the effect of a CONVERT_EXPR from a REFERENCE_TYPE,
3748 since that affects how `default_conversion' will behave. */
3749 if (TREE_CODE (totype) == REFERENCE_TYPE
3750 || TREE_CODE (fromtype) == REFERENCE_TYPE)
3751 break;
3752 /* Don't strip a conversion from C++0x scoped enum, since they
3753 don't implicitly convert to other types. */
3754 if (TREE_CODE (fromtype) == ENUMERAL_TYPE
3755 && ENUM_IS_SCOPED (fromtype))
3756 break;
3757 /* If this isn't narrowing the argument, we can ignore it. */
3758 if (TYPE_PRECISION (totype) >= TYPE_PRECISION (fromtype))
3759 {
3760 tree op0 = TREE_OPERAND (expr, 0);
3761 if ((TREE_CODE (fromtype) == POINTER_TYPE
3762 && (TREE_CODE (totype) == INTEGER_TYPE
3763 || TREE_CODE (totype) == BITINT_TYPE))
3764 || warning_suppressed_p (expr, OPT_Waddress))
3765 /* Suppress -Waddress for casts to intptr_t, propagating
3766 any suppression from the enclosing expression to its
3767 operand. */
3768 suppress_warning (op0, OPT_Waddress);
3769 return c_common_truthvalue_conversion (location, expr: op0);
3770 }
3771 }
3772 break;
3773
3774 case MODIFY_EXPR:
3775 if (!warning_suppressed_p (expr, OPT_Wparentheses)
3776 && warn_parentheses
3777 && warning_at (location, OPT_Wparentheses,
3778 "suggest parentheses around assignment used as "
3779 "truth value"))
3780 suppress_warning (expr, OPT_Wparentheses);
3781 break;
3782
3783 case CONST_DECL:
3784 {
3785 tree folded_expr = fold_for_warn (expr);
3786 if (folded_expr != expr)
3787 return c_common_truthvalue_conversion (location, expr: folded_expr);
3788 }
3789 break;
3790
3791 default:
3792 break;
3793 }
3794
3795 if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
3796 {
3797 tree t = save_expr (expr);
3798 expr = (build_binary_op
3799 (EXPR_LOCATION (expr),
3800 (TREE_SIDE_EFFECTS (expr)
3801 ? TRUTH_OR_EXPR : TRUTH_ORIF_EXPR),
3802 c_common_truthvalue_conversion
3803 (location,
3804 expr: build_unary_op (location, REALPART_EXPR, t, false)),
3805 c_common_truthvalue_conversion
3806 (location,
3807 expr: build_unary_op (location, IMAGPART_EXPR, t, false)),
3808 false));
3809 goto ret;
3810 }
3811
3812 if (FIXED_POINT_TYPE_P (TREE_TYPE (expr)))
3813 {
3814 tree fixed_zero_node = build_fixed (TREE_TYPE (expr),
3815 FCONST0 (TYPE_MODE
3816 (TREE_TYPE (expr))));
3817 return build_binary_op (location, NE_EXPR, expr, fixed_zero_node, true);
3818 }
3819 else
3820 return build_binary_op (location, NE_EXPR, expr, integer_zero_node, true);
3821
3822 ret:
3823 protected_set_expr_location (expr, location);
3824 return expr;
3825}
3826
3827static void def_builtin_1 (enum built_in_function fncode,
3828 const char *name,
3829 enum built_in_class fnclass,
3830 tree fntype, tree libtype,
3831 bool both_p, bool fallback_p, bool nonansi_p,
3832 tree fnattrs, bool implicit_p);
3833
3834
3835/* Apply the TYPE_QUALS to the new DECL. */
3836
3837void
3838c_apply_type_quals_to_decl (int type_quals, tree decl)
3839{
3840 tree type = TREE_TYPE (decl);
3841
3842 if (type == error_mark_node)
3843 return;
3844
3845 if ((type_quals & TYPE_QUAL_CONST)
3846 || (type && TREE_CODE (type) == REFERENCE_TYPE))
3847 /* We used to check TYPE_NEEDS_CONSTRUCTING here, but now a constexpr
3848 constructor can produce constant init, so rely on cp_finish_decl to
3849 clear TREE_READONLY if the variable has non-constant init. */
3850 TREE_READONLY (decl) = 1;
3851 if (type_quals & TYPE_QUAL_VOLATILE)
3852 {
3853 TREE_SIDE_EFFECTS (decl) = 1;
3854 TREE_THIS_VOLATILE (decl) = 1;
3855 }
3856 if (type_quals & TYPE_QUAL_RESTRICT)
3857 {
3858 while (type && TREE_CODE (type) == ARRAY_TYPE)
3859 /* Allow 'restrict' on arrays of pointers.
3860 FIXME currently we just ignore it. */
3861 type = TREE_TYPE (type);
3862 if (!type
3863 || !POINTER_TYPE_P (type)
3864 || !C_TYPE_OBJECT_OR_INCOMPLETE_P (TREE_TYPE (type)))
3865 error ("invalid use of %<restrict%>");
3866 }
3867}
3868
3869/* Return the typed-based alias set for T, which may be an expression
3870 or a type. Return -1 if we don't do anything special. */
3871
3872alias_set_type
3873c_common_get_alias_set (tree t)
3874{
3875 /* For VLAs, use the alias set of the element type rather than the
3876 default of alias set 0 for types compared structurally. */
3877 if (TYPE_P (t) && TYPE_STRUCTURAL_EQUALITY_P (t))
3878 {
3879 if (TREE_CODE (t) == ARRAY_TYPE)
3880 return get_alias_set (TREE_TYPE (t));
3881 return -1;
3882 }
3883
3884 /* That's all the expressions we handle specially. */
3885 if (!TYPE_P (t))
3886 return -1;
3887
3888 /* Unlike char, char8_t doesn't alias in C++. (In C, char8_t is not
3889 a distinct type.) */
3890 if (flag_char8_t && t == char8_type_node && c_dialect_cxx ())
3891 return -1;
3892
3893 /* The C standard guarantees that any object may be accessed via an
3894 lvalue that has narrow character type. */
3895 if (t == char_type_node
3896 || t == signed_char_type_node
3897 || t == unsigned_char_type_node)
3898 return 0;
3899
3900 /* The C standard specifically allows aliasing between signed and
3901 unsigned variants of the same type. We treat the signed
3902 variant as canonical. */
3903 if ((TREE_CODE (t) == INTEGER_TYPE || TREE_CODE (t) == BITINT_TYPE)
3904 && TYPE_UNSIGNED (t))
3905 {
3906 tree t1 = c_common_signed_type (type: t);
3907
3908 /* t1 == t can happen for boolean nodes which are always unsigned. */
3909 if (t1 != t)
3910 return get_alias_set (t1);
3911 }
3912
3913 return -1;
3914}
3915
3916/* Compute the value of 'sizeof (TYPE)' or '__alignof__ (TYPE)', where
3917 the IS_SIZEOF parameter indicates which operator is being applied.
3918 The COMPLAIN flag controls whether we should diagnose possibly
3919 ill-formed constructs or not. LOC is the location of the SIZEOF or
3920 TYPEOF operator. If MIN_ALIGNOF, the least alignment required for
3921 a type in any context should be returned, rather than the normal
3922 alignment for that type. */
3923
3924tree
3925c_sizeof_or_alignof_type (location_t loc,
3926 tree type, bool is_sizeof, bool min_alignof,
3927 int complain)
3928{
3929 const char *op_name;
3930 tree value = NULL;
3931 enum tree_code type_code = TREE_CODE (type);
3932
3933 op_name = is_sizeof ? "sizeof" : "__alignof__";
3934
3935 if (type_code == FUNCTION_TYPE)
3936 {
3937 if (is_sizeof)
3938 {
3939 if (complain && warn_pointer_arith)
3940 pedwarn (loc, OPT_Wpointer_arith,
3941 "invalid application of %<sizeof%> to a function type");
3942 else if (!complain)
3943 return error_mark_node;
3944 value = size_one_node;
3945 }
3946 else
3947 {
3948 if (complain)
3949 {
3950 if (c_dialect_cxx ())
3951 pedwarn (loc, OPT_Wpedantic, "ISO C++ does not permit "
3952 "%<alignof%> applied to a function type");
3953 else
3954 pedwarn (loc, OPT_Wpedantic, "ISO C does not permit "
3955 "%<_Alignof%> applied to a function type");
3956 }
3957 value = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
3958 }
3959 }
3960 else if (type_code == VOID_TYPE || type_code == ERROR_MARK)
3961 {
3962 if (type_code == VOID_TYPE
3963 && complain && warn_pointer_arith)
3964 pedwarn (loc, OPT_Wpointer_arith,
3965 "invalid application of %qs to a void type", op_name);
3966 else if (!complain)
3967 return error_mark_node;
3968 value = size_one_node;
3969 }
3970 else if (!COMPLETE_TYPE_P (type)
3971 && (!c_dialect_cxx () || is_sizeof || type_code != ARRAY_TYPE))
3972 {
3973 if (complain)
3974 error_at (loc, "invalid application of %qs to incomplete type %qT",
3975 op_name, type);
3976 return error_mark_node;
3977 }
3978 else if (c_dialect_cxx () && type_code == ARRAY_TYPE
3979 && !COMPLETE_TYPE_P (TREE_TYPE (type)))
3980 {
3981 if (complain)
3982 error_at (loc, "invalid application of %qs to array type %qT of "
3983 "incomplete element type", op_name, type);
3984 return error_mark_node;
3985 }
3986 else if (!verify_type_context (loc, is_sizeof ? TCTX_SIZEOF : TCTX_ALIGNOF,
3987 type, !complain))
3988 {
3989 if (!complain)
3990 return error_mark_node;
3991 value = size_one_node;
3992 }
3993 else
3994 {
3995 if (is_sizeof)
3996 /* Convert in case a char is more than one unit. */
3997 value = size_binop_loc (loc, CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type),
3998 size_int (TYPE_PRECISION (char_type_node)
3999 / BITS_PER_UNIT));
4000 else if (min_alignof)
4001 value = size_int (min_align_of_type (type));
4002 else
4003 value = size_int (TYPE_ALIGN_UNIT (type));
4004 }
4005
4006 /* VALUE will have the middle-end integer type sizetype.
4007 However, we should really return a value of type `size_t',
4008 which is just a typedef for an ordinary integer type. */
4009 value = fold_convert_loc (loc, size_type_node, value);
4010
4011 return value;
4012}
4013
4014/* Implement the __alignof keyword: Return the minimum required
4015 alignment of EXPR, measured in bytes. For VAR_DECLs,
4016 FUNCTION_DECLs and FIELD_DECLs return DECL_ALIGN (which can be set
4017 from an "aligned" __attribute__ specification). LOC is the
4018 location of the ALIGNOF operator. */
4019
4020tree
4021c_alignof_expr (location_t loc, tree expr)
4022{
4023 tree t;
4024
4025 if (!verify_type_context (loc, TCTX_ALIGNOF, TREE_TYPE (expr)))
4026 t = size_one_node;
4027
4028 else if (VAR_OR_FUNCTION_DECL_P (expr))
4029 t = size_int (DECL_ALIGN_UNIT (expr));
4030
4031 else if (TREE_CODE (expr) == COMPONENT_REF
4032 && DECL_C_BIT_FIELD (TREE_OPERAND (expr, 1)))
4033 {
4034 error_at (loc, "%<__alignof%> applied to a bit-field");
4035 t = size_one_node;
4036 }
4037 else if (TREE_CODE (expr) == COMPONENT_REF
4038 && TREE_CODE (TREE_OPERAND (expr, 1)) == FIELD_DECL)
4039 t = size_int (DECL_ALIGN_UNIT (TREE_OPERAND (expr, 1)));
4040
4041 else if (INDIRECT_REF_P (expr))
4042 {
4043 tree t = TREE_OPERAND (expr, 0);
4044 tree best = t;
4045 int bestalign = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (t)));
4046
4047 while (CONVERT_EXPR_P (t)
4048 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == POINTER_TYPE)
4049 {
4050 int thisalign;
4051
4052 t = TREE_OPERAND (t, 0);
4053 thisalign = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (t)));
4054 if (thisalign > bestalign)
4055 best = t, bestalign = thisalign;
4056 }
4057 return c_alignof (loc, TREE_TYPE (TREE_TYPE (best)));
4058 }
4059 else
4060 return c_alignof (loc, TREE_TYPE (expr));
4061
4062 return fold_convert_loc (loc, size_type_node, t);
4063}
4064
4065/* Handle C and C++ default attributes. */
4066
4067enum built_in_attribute
4068{
4069#define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4070#define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4071#define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
4072#define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4073#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4074#include "builtin-attrs.def"
4075#undef DEF_ATTR_NULL_TREE
4076#undef DEF_ATTR_INT
4077#undef DEF_ATTR_STRING
4078#undef DEF_ATTR_IDENT
4079#undef DEF_ATTR_TREE_LIST
4080 ATTR_LAST
4081};
4082
4083static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4084
4085static void c_init_attributes (void);
4086
4087enum c_builtin_type
4088{
4089#define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4090#define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4091#define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4092#define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4093#define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4094#define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4095#define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4096#define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4097 ARG6) NAME,
4098#define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4099 ARG6, ARG7) NAME,
4100#define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4101 ARG6, ARG7, ARG8) NAME,
4102#define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4103 ARG6, ARG7, ARG8, ARG9) NAME,
4104#define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4105 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
4106#define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4107 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
4108#define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4109#define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4110#define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4111#define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4112#define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4113#define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4114 NAME,
4115#define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4116 ARG6) NAME,
4117#define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4118 ARG6, ARG7) NAME,
4119#define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4120#include "builtin-types.def"
4121#undef DEF_PRIMITIVE_TYPE
4122#undef DEF_FUNCTION_TYPE_0
4123#undef DEF_FUNCTION_TYPE_1
4124#undef DEF_FUNCTION_TYPE_2
4125#undef DEF_FUNCTION_TYPE_3
4126#undef DEF_FUNCTION_TYPE_4
4127#undef DEF_FUNCTION_TYPE_5
4128#undef DEF_FUNCTION_TYPE_6
4129#undef DEF_FUNCTION_TYPE_7
4130#undef DEF_FUNCTION_TYPE_8
4131#undef DEF_FUNCTION_TYPE_9
4132#undef DEF_FUNCTION_TYPE_10
4133#undef DEF_FUNCTION_TYPE_11
4134#undef DEF_FUNCTION_TYPE_VAR_0
4135#undef DEF_FUNCTION_TYPE_VAR_1
4136#undef DEF_FUNCTION_TYPE_VAR_2
4137#undef DEF_FUNCTION_TYPE_VAR_3
4138#undef DEF_FUNCTION_TYPE_VAR_4
4139#undef DEF_FUNCTION_TYPE_VAR_5
4140#undef DEF_FUNCTION_TYPE_VAR_6
4141#undef DEF_FUNCTION_TYPE_VAR_7
4142#undef DEF_POINTER_TYPE
4143 BT_LAST
4144};
4145
4146typedef enum c_builtin_type builtin_type;
4147
4148/* A temporary array for c_common_nodes_and_builtins. Used in
4149 communication with def_fn_type. */
4150static tree builtin_types[(int) BT_LAST + 1];
4151
4152/* A helper function for c_common_nodes_and_builtins. Build function type
4153 for DEF with return type RET and N arguments. If VAR is true, then the
4154 function should be variadic after those N arguments, or, if N is zero,
4155 unprototyped.
4156
4157 Takes special care not to ICE if any of the types involved are
4158 error_mark_node, which indicates that said type is not in fact available
4159 (see builtin_type_for_size). In which case the function type as a whole
4160 should be error_mark_node. */
4161
4162static void
4163def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4164{
4165 tree t;
4166 tree *args = XALLOCAVEC (tree, n);
4167 va_list list;
4168 int i;
4169
4170 va_start (list, n);
4171 for (i = 0; i < n; ++i)
4172 {
4173 builtin_type a = (builtin_type) va_arg (list, int);
4174 t = builtin_types[a];
4175 if (t == error_mark_node)
4176 goto egress;
4177 args[i] = t;
4178 }
4179
4180 t = builtin_types[ret];
4181 if (t == error_mark_node)
4182 goto egress;
4183 if (var)
4184 if (n == 0)
4185 t = build_function_type (t, NULL_TREE);
4186 else
4187 t = build_varargs_function_type_array (t, n, args);
4188 else
4189 t = build_function_type_array (t, n, args);
4190
4191 egress:
4192 builtin_types[def] = t;
4193 va_end (list);
4194}
4195
4196/* Build builtin functions common to both C and C++ language
4197 frontends. */
4198
4199static void
4200c_define_builtins (tree va_list_ref_type_node, tree va_list_arg_type_node)
4201{
4202#define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4203 builtin_types[ENUM] = VALUE;
4204#define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4205 def_fn_type (ENUM, RETURN, 0, 0);
4206#define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4207 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4208#define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4209 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4210#define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4211 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4212#define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4213 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4214#define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4215 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4216#define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4217 ARG6) \
4218 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4219#define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4220 ARG6, ARG7) \
4221 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4222#define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4223 ARG6, ARG7, ARG8) \
4224 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
4225 ARG7, ARG8);
4226#define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4227 ARG6, ARG7, ARG8, ARG9) \
4228 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
4229 ARG7, ARG8, ARG9);
4230#define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4231 ARG6, ARG7, ARG8, ARG9, ARG10) \
4232 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
4233 ARG7, ARG8, ARG9, ARG10);
4234#define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4235 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
4236 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
4237 ARG7, ARG8, ARG9, ARG10, ARG11);
4238#define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4239 def_fn_type (ENUM, RETURN, 1, 0);
4240#define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4241 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4242#define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4243 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4244#define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4245 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4246#define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4247 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4248#define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4249 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4250#define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4251 ARG6) \
4252 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4253#define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4254 ARG6, ARG7) \
4255 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4256#define DEF_POINTER_TYPE(ENUM, TYPE) \
4257 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4258
4259#include "builtin-types.def"
4260
4261#undef DEF_PRIMITIVE_TYPE
4262#undef DEF_FUNCTION_TYPE_0
4263#undef DEF_FUNCTION_TYPE_1
4264#undef DEF_FUNCTION_TYPE_2
4265#undef DEF_FUNCTION_TYPE_3
4266#undef DEF_FUNCTION_TYPE_4
4267#undef DEF_FUNCTION_TYPE_5
4268#undef DEF_FUNCTION_TYPE_6
4269#undef DEF_FUNCTION_TYPE_7
4270#undef DEF_FUNCTION_TYPE_8
4271#undef DEF_FUNCTION_TYPE_9
4272#undef DEF_FUNCTION_TYPE_10
4273#undef DEF_FUNCTION_TYPE_11
4274#undef DEF_FUNCTION_TYPE_VAR_0
4275#undef DEF_FUNCTION_TYPE_VAR_1
4276#undef DEF_FUNCTION_TYPE_VAR_2
4277#undef DEF_FUNCTION_TYPE_VAR_3
4278#undef DEF_FUNCTION_TYPE_VAR_4
4279#undef DEF_FUNCTION_TYPE_VAR_5
4280#undef DEF_FUNCTION_TYPE_VAR_6
4281#undef DEF_FUNCTION_TYPE_VAR_7
4282#undef DEF_POINTER_TYPE
4283 builtin_types[(int) BT_LAST] = NULL_TREE;
4284
4285 c_init_attributes ();
4286
4287#define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
4288 NONANSI_P, ATTRS, IMPLICIT, COND) \
4289 if (NAME && COND) \
4290 def_builtin_1 (ENUM, NAME, CLASS, \
4291 builtin_types[(int) TYPE], \
4292 builtin_types[(int) LIBTYPE], \
4293 BOTH_P, FALLBACK_P, NONANSI_P, \
4294 built_in_attributes[(int) ATTRS], IMPLICIT);
4295#include "builtins.def"
4296
4297 targetm.init_builtins ();
4298
4299 build_common_builtin_nodes ();
4300}
4301
4302/* Like get_identifier, but avoid warnings about null arguments when
4303 the argument may be NULL for targets where GCC lacks stdint.h type
4304 information. */
4305
4306static inline tree
4307c_get_ident (const char *id)
4308{
4309 return get_identifier (id);
4310}
4311
4312/* Build tree nodes and builtin functions common to both C and C++ language
4313 frontends. */
4314
4315void
4316c_common_nodes_and_builtins (void)
4317{
4318 int char8_type_size;
4319 int char16_type_size;
4320 int char32_type_size;
4321 int wchar_type_size;
4322 tree array_domain_type;
4323 tree va_list_ref_type_node;
4324 tree va_list_arg_type_node;
4325 int i;
4326
4327 build_common_tree_nodes (flag_signed_char);
4328
4329 /* Define `int' and `char' first so that dbx will output them first. */
4330 record_builtin_type (RID_INT, NULL, integer_type_node);
4331 record_builtin_type (RID_CHAR, "char", char_type_node);
4332
4333 /* `signed' is the same as `int'. FIXME: the declarations of "signed",
4334 "unsigned long", "long long unsigned" and "unsigned short" were in C++
4335 but not C. Are the conditionals here needed? */
4336 if (c_dialect_cxx ())
4337 record_builtin_type (RID_SIGNED, NULL, integer_type_node);
4338 record_builtin_type (RID_LONG, "long int", long_integer_type_node);
4339 record_builtin_type (RID_UNSIGNED, "unsigned int", unsigned_type_node);
4340 record_builtin_type (RID_MAX, "long unsigned int",
4341 long_unsigned_type_node);
4342
4343 for (i = 0; i < NUM_INT_N_ENTS; i ++)
4344 {
4345 char name[25];
4346
4347 sprintf (s: name, format: "__int%d", int_n_data[i].bitsize);
4348 record_builtin_type ((enum rid)(RID_FIRST_INT_N + i), name,
4349 int_n_trees[i].signed_type);
4350 sprintf (s: name, format: "__int%d__", int_n_data[i].bitsize);
4351 record_builtin_type ((enum rid)(RID_FIRST_INT_N + i), name,
4352 int_n_trees[i].signed_type);
4353 ridpointers[RID_FIRST_INT_N + i]
4354 = DECL_NAME (TYPE_NAME (int_n_trees[i].signed_type));
4355
4356 sprintf (s: name, format: "__int%d unsigned", int_n_data[i].bitsize);
4357 record_builtin_type (RID_MAX, name, int_n_trees[i].unsigned_type);
4358 sprintf (s: name, format: "__int%d__ unsigned", int_n_data[i].bitsize);
4359 record_builtin_type (RID_MAX, name, int_n_trees[i].unsigned_type);
4360 }
4361
4362 if (c_dialect_cxx ())
4363 record_builtin_type (RID_MAX, "unsigned long", long_unsigned_type_node);
4364 record_builtin_type (RID_MAX, "long long int",
4365 long_long_integer_type_node);
4366 record_builtin_type (RID_MAX, "long long unsigned int",
4367 long_long_unsigned_type_node);
4368 if (c_dialect_cxx ())
4369 record_builtin_type (RID_MAX, "long long unsigned",
4370 long_long_unsigned_type_node);
4371 record_builtin_type (RID_SHORT, "short int", short_integer_type_node);
4372 record_builtin_type (RID_MAX, "short unsigned int",
4373 short_unsigned_type_node);
4374 if (c_dialect_cxx ())
4375 record_builtin_type (RID_MAX, "unsigned short",
4376 short_unsigned_type_node);
4377
4378 /* Define both `signed char' and `unsigned char'. */
4379 record_builtin_type (RID_MAX, "signed char", signed_char_type_node);
4380 record_builtin_type (RID_MAX, "unsigned char", unsigned_char_type_node);
4381
4382 /* These are types that c_common_type_for_size and
4383 c_common_type_for_mode use. */
4384 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4385 TYPE_DECL, NULL_TREE,
4386 intQI_type_node));
4387 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4388 TYPE_DECL, NULL_TREE,
4389 intHI_type_node));
4390 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4391 TYPE_DECL, NULL_TREE,
4392 intSI_type_node));
4393 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4394 TYPE_DECL, NULL_TREE,
4395 intDI_type_node));
4396#if HOST_BITS_PER_WIDE_INT >= 64
4397 /* Note that this is different than the __int128 type that's part of
4398 the generic __intN support. */
4399 if (targetm.scalar_mode_supported_p (TImode))
4400 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4401 TYPE_DECL,
4402 get_identifier ("__int128_t"),
4403 intTI_type_node));
4404#endif
4405 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4406 TYPE_DECL, NULL_TREE,
4407 unsigned_intQI_type_node));
4408 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4409 TYPE_DECL, NULL_TREE,
4410 unsigned_intHI_type_node));
4411 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4412 TYPE_DECL, NULL_TREE,
4413 unsigned_intSI_type_node));
4414 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4415 TYPE_DECL, NULL_TREE,
4416 unsigned_intDI_type_node));
4417#if HOST_BITS_PER_WIDE_INT >= 64
4418 if (targetm.scalar_mode_supported_p (TImode))
4419 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4420 TYPE_DECL,
4421 get_identifier ("__uint128_t"),
4422 unsigned_intTI_type_node));
4423#endif
4424
4425 /* Create the widest literal types. */
4426 if (targetm.scalar_mode_supported_p (TImode))
4427 {
4428 widest_integer_literal_type_node = intTI_type_node;
4429 widest_unsigned_literal_type_node = unsigned_intTI_type_node;
4430 }
4431 else
4432 {
4433 widest_integer_literal_type_node = intDI_type_node;
4434 widest_unsigned_literal_type_node = unsigned_intDI_type_node;
4435 }
4436
4437 signed_size_type_node = c_common_signed_type (size_type_node);
4438
4439 pid_type_node =
4440 TREE_TYPE (identifier_global_value (get_identifier (PID_TYPE)));
4441
4442 record_builtin_type (RID_FLOAT, NULL, float_type_node);
4443 record_builtin_type (RID_DOUBLE, NULL, double_type_node);
4444 record_builtin_type (RID_MAX, "long double", long_double_type_node);
4445
4446 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
4447 {
4448 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
4449 record_builtin_type ((enum rid) (RID_FLOATN_NX_FIRST + i), NULL,
4450 FLOATN_NX_TYPE_NODE (i));
4451 }
4452
4453 /* For C, let float128t_type_node (__float128 in some backends) be the
4454 same type as float128_type_node (_Float128), for C++ let those
4455 be distinct types that mangle and behave differently. */
4456 if (c_dialect_cxx ())
4457 float128t_type_node = NULL_TREE;
4458
4459 /* Only supported decimal floating point extension if the target
4460 actually supports underlying modes. */
4461 if (targetm.scalar_mode_supported_p (SDmode)
4462 && targetm.scalar_mode_supported_p (DDmode)
4463 && targetm.scalar_mode_supported_p (TDmode))
4464 {
4465 record_builtin_type (RID_DFLOAT32, NULL, dfloat32_type_node);
4466 record_builtin_type (RID_DFLOAT64, NULL, dfloat64_type_node);
4467 record_builtin_type (RID_DFLOAT128, NULL, dfloat128_type_node);
4468 }
4469
4470 if (targetm.fixed_point_supported_p ())
4471 {
4472 record_builtin_type (RID_MAX, "short _Fract", short_fract_type_node);
4473 record_builtin_type (RID_FRACT, NULL, fract_type_node);
4474 record_builtin_type (RID_MAX, "long _Fract", long_fract_type_node);
4475 record_builtin_type (RID_MAX, "long long _Fract",
4476 long_long_fract_type_node);
4477 record_builtin_type (RID_MAX, "unsigned short _Fract",
4478 unsigned_short_fract_type_node);
4479 record_builtin_type (RID_MAX, "unsigned _Fract",
4480 unsigned_fract_type_node);
4481 record_builtin_type (RID_MAX, "unsigned long _Fract",
4482 unsigned_long_fract_type_node);
4483 record_builtin_type (RID_MAX, "unsigned long long _Fract",
4484 unsigned_long_long_fract_type_node);
4485 record_builtin_type (RID_MAX, "_Sat short _Fract",
4486 sat_short_fract_type_node);
4487 record_builtin_type (RID_MAX, "_Sat _Fract", sat_fract_type_node);
4488 record_builtin_type (RID_MAX, "_Sat long _Fract",
4489 sat_long_fract_type_node);
4490 record_builtin_type (RID_MAX, "_Sat long long _Fract",
4491 sat_long_long_fract_type_node);
4492 record_builtin_type (RID_MAX, "_Sat unsigned short _Fract",
4493 sat_unsigned_short_fract_type_node);
4494 record_builtin_type (RID_MAX, "_Sat unsigned _Fract",
4495 sat_unsigned_fract_type_node);
4496 record_builtin_type (RID_MAX, "_Sat unsigned long _Fract",
4497 sat_unsigned_long_fract_type_node);
4498 record_builtin_type (RID_MAX, "_Sat unsigned long long _Fract",
4499 sat_unsigned_long_long_fract_type_node);
4500 record_builtin_type (RID_MAX, "short _Accum", short_accum_type_node);
4501 record_builtin_type (RID_ACCUM, NULL, accum_type_node);
4502 record_builtin_type (RID_MAX, "long _Accum", long_accum_type_node);
4503 record_builtin_type (RID_MAX, "long long _Accum",
4504 long_long_accum_type_node);
4505 record_builtin_type (RID_MAX, "unsigned short _Accum",
4506 unsigned_short_accum_type_node);
4507 record_builtin_type (RID_MAX, "unsigned _Accum",
4508 unsigned_accum_type_node);
4509 record_builtin_type (RID_MAX, "unsigned long _Accum",
4510 unsigned_long_accum_type_node);
4511 record_builtin_type (RID_MAX, "unsigned long long _Accum",
4512 unsigned_long_long_accum_type_node);
4513 record_builtin_type (RID_MAX, "_Sat short _Accum",
4514 sat_short_accum_type_node);
4515 record_builtin_type (RID_MAX, "_Sat _Accum", sat_accum_type_node);
4516 record_builtin_type (RID_MAX, "_Sat long _Accum",
4517 sat_long_accum_type_node);
4518 record_builtin_type (RID_MAX, "_Sat long long _Accum",
4519 sat_long_long_accum_type_node);
4520 record_builtin_type (RID_MAX, "_Sat unsigned short _Accum",
4521 sat_unsigned_short_accum_type_node);
4522 record_builtin_type (RID_MAX, "_Sat unsigned _Accum",
4523 sat_unsigned_accum_type_node);
4524 record_builtin_type (RID_MAX, "_Sat unsigned long _Accum",
4525 sat_unsigned_long_accum_type_node);
4526 record_builtin_type (RID_MAX, "_Sat unsigned long long _Accum",
4527 sat_unsigned_long_long_accum_type_node);
4528
4529 }
4530
4531 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4532 TYPE_DECL,
4533 get_identifier ("complex int"),
4534 complex_integer_type_node));
4535 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4536 TYPE_DECL,
4537 get_identifier ("complex float"),
4538 complex_float_type_node));
4539 lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION,
4540 TYPE_DECL,
4541 get_identifier ("complex double"),
4542 complex_double_type_node));
4543 lang_hooks.decls.pushdecl
4544 (build_decl (UNKNOWN_LOCATION,
4545 TYPE_DECL, get_identifier ("complex long double"),
4546 complex_long_double_type_node));
4547
4548 if (!c_dialect_cxx ())
4549 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
4550 if (COMPLEX_FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
4551 {
4552 char buf[30];
4553 sprintf (s: buf, format: "complex _Float%d%s", floatn_nx_types[i].n,
4554 floatn_nx_types[i].extended ? "x" : "");
4555 lang_hooks.decls.pushdecl
4556 (build_decl (UNKNOWN_LOCATION,
4557 TYPE_DECL,
4558 get_identifier (buf),
4559 COMPLEX_FLOATN_NX_TYPE_NODE (i)));
4560 }
4561
4562 /* Make fileptr_type_node a distinct void * type until
4563 FILE type is defined. Likewise for const struct tm*. */
4564 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
4565 builtin_structptr_types[i].node
4566 = build_variant_type_copy (builtin_structptr_types[i].base);
4567
4568 record_builtin_type (RID_VOID, NULL, void_type_node);
4569
4570 /* Set the TYPE_NAME for any variants that were built before
4571 record_builtin_type gave names to the built-in types. */
4572 {
4573 tree void_name = TYPE_NAME (void_type_node);
4574 TYPE_NAME (void_type_node) = NULL_TREE;
4575 TYPE_NAME (build_qualified_type (void_type_node, TYPE_QUAL_CONST))
4576 = void_name;
4577 TYPE_NAME (void_type_node) = void_name;
4578 }
4579
4580 /* Make a type to be the domain of a few array types
4581 whose domains don't really matter.
4582 200 is small enough that it always fits in size_t
4583 and large enough that it can hold most function names for the
4584 initializations of __FUNCTION__ and __PRETTY_FUNCTION__. */
4585 array_domain_type = build_index_type (size_int (200));
4586
4587 /* Make a type for arrays of characters.
4588 With luck nothing will ever really depend on the length of this
4589 array type. */
4590 char_array_type_node
4591 = build_array_type (char_type_node, array_domain_type);
4592
4593 string_type_node = build_pointer_type (char_type_node);
4594 const_string_type_node
4595 = build_pointer_type (build_qualified_type
4596 (char_type_node, TYPE_QUAL_CONST));
4597
4598 /* This is special for C++ so functions can be overloaded. */
4599 wchar_type_node = get_identifier (MODIFIED_WCHAR_TYPE);
4600 wchar_type_node = TREE_TYPE (identifier_global_value (wchar_type_node));
4601 wchar_type_size = TYPE_PRECISION (wchar_type_node);
4602 underlying_wchar_type_node = wchar_type_node;
4603 if (c_dialect_cxx ())
4604 {
4605 if (TYPE_UNSIGNED (wchar_type_node))
4606 wchar_type_node = make_unsigned_type (wchar_type_size);
4607 else
4608 wchar_type_node = make_signed_type (wchar_type_size);
4609 record_builtin_type (RID_WCHAR, "wchar_t", wchar_type_node);
4610 }
4611
4612 /* This is for wide string constants. */
4613 wchar_array_type_node
4614 = build_array_type (wchar_type_node, array_domain_type);
4615
4616 /* Define 'char8_t'. */
4617 char8_type_node = get_identifier (CHAR8_TYPE);
4618 char8_type_node = TREE_TYPE (identifier_global_value (char8_type_node));
4619 char8_type_size = TYPE_PRECISION (char8_type_node);
4620 if (c_dialect_cxx ())
4621 {
4622 char8_type_node = make_unsigned_type (char8_type_size);
4623 TYPE_STRING_FLAG (char8_type_node) = true;
4624
4625 if (flag_char8_t)
4626 record_builtin_type (RID_CHAR8, "char8_t", char8_type_node);
4627 }
4628
4629 /* This is for UTF-8 string constants. */
4630 char8_array_type_node
4631 = build_array_type (char8_type_node, array_domain_type);
4632
4633 /* Define 'char16_t'. */
4634 char16_type_node = get_identifier (CHAR16_TYPE);
4635 char16_type_node = TREE_TYPE (identifier_global_value (char16_type_node));
4636 char16_type_size = TYPE_PRECISION (char16_type_node);
4637 if (c_dialect_cxx ())
4638 {
4639 char16_type_node = make_unsigned_type (char16_type_size);
4640
4641 if (cxx_dialect >= cxx11)
4642 record_builtin_type (RID_CHAR16, "char16_t", char16_type_node);
4643 }
4644
4645 /* This is for UTF-16 string constants. */
4646 char16_array_type_node
4647 = build_array_type (char16_type_node, array_domain_type);
4648
4649 /* Define 'char32_t'. */
4650 char32_type_node = get_identifier (CHAR32_TYPE);
4651 char32_type_node = TREE_TYPE (identifier_global_value (char32_type_node));
4652 char32_type_size = TYPE_PRECISION (char32_type_node);
4653 if (c_dialect_cxx ())
4654 {
4655 char32_type_node = make_unsigned_type (char32_type_size);
4656
4657 if (cxx_dialect >= cxx11)
4658 record_builtin_type (RID_CHAR32, "char32_t", char32_type_node);
4659 }
4660
4661 /* This is for UTF-32 string constants. */
4662 char32_array_type_node
4663 = build_array_type (char32_type_node, array_domain_type);
4664
4665 if (strcmp (WINT_TYPE, s2: "wchar_t") == 0)
4666 wint_type_node = wchar_type_node;
4667 else
4668 wint_type_node =
4669 TREE_TYPE (identifier_global_value (get_identifier (WINT_TYPE)));
4670
4671 intmax_type_node =
4672 TREE_TYPE (identifier_global_value (get_identifier (INTMAX_TYPE)));
4673 uintmax_type_node =
4674 TREE_TYPE (identifier_global_value (get_identifier (UINTMAX_TYPE)));
4675
4676 if (SIG_ATOMIC_TYPE)
4677 sig_atomic_type_node =
4678 TREE_TYPE (identifier_global_value (c_get_ident (SIG_ATOMIC_TYPE)));
4679 if (INT8_TYPE)
4680 int8_type_node =
4681 TREE_TYPE (identifier_global_value (c_get_ident (INT8_TYPE)));
4682 if (INT16_TYPE)
4683 int16_type_node =
4684 TREE_TYPE (identifier_global_value (c_get_ident (INT16_TYPE)));
4685 if (INT32_TYPE)
4686 int32_type_node =
4687 TREE_TYPE (identifier_global_value (c_get_ident (INT32_TYPE)));
4688 if (INT64_TYPE)
4689 int64_type_node =
4690 TREE_TYPE (identifier_global_value (c_get_ident (INT64_TYPE)));
4691 if (UINT8_TYPE)
4692 uint8_type_node =
4693 TREE_TYPE (identifier_global_value (c_get_ident (UINT8_TYPE)));
4694 if (UINT16_TYPE)
4695 c_uint16_type_node = uint16_type_node =
4696 TREE_TYPE (identifier_global_value (c_get_ident (UINT16_TYPE)));
4697 if (UINT32_TYPE)
4698 c_uint32_type_node = uint32_type_node =
4699 TREE_TYPE (identifier_global_value (c_get_ident (UINT32_TYPE)));
4700 if (UINT64_TYPE)
4701 c_uint64_type_node = uint64_type_node =
4702 TREE_TYPE (identifier_global_value (c_get_ident (UINT64_TYPE)));
4703 if (INT_LEAST8_TYPE)
4704 int_least8_type_node =
4705 TREE_TYPE (identifier_global_value (c_get_ident (INT_LEAST8_TYPE)));
4706 if (INT_LEAST16_TYPE)
4707 int_least16_type_node =
4708 TREE_TYPE (identifier_global_value (c_get_ident (INT_LEAST16_TYPE)));
4709 if (INT_LEAST32_TYPE)
4710 int_least32_type_node =
4711 TREE_TYPE (identifier_global_value (c_get_ident (INT_LEAST32_TYPE)));
4712 if (INT_LEAST64_TYPE)
4713 int_least64_type_node =
4714 TREE_TYPE (identifier_global_value (c_get_ident (INT_LEAST64_TYPE)));
4715 if (UINT_LEAST8_TYPE)
4716 uint_least8_type_node =
4717 TREE_TYPE (identifier_global_value (c_get_ident (UINT_LEAST8_TYPE)));
4718 if (UINT_LEAST16_TYPE)
4719 uint_least16_type_node =
4720 TREE_TYPE (identifier_global_value (c_get_ident (UINT_LEAST16_TYPE)));
4721 if (UINT_LEAST32_TYPE)
4722 uint_least32_type_node =
4723 TREE_TYPE (identifier_global_value (c_get_ident (UINT_LEAST32_TYPE)));
4724 if (UINT_LEAST64_TYPE)
4725 uint_least64_type_node =
4726 TREE_TYPE (identifier_global_value (c_get_ident (UINT_LEAST64_TYPE)));
4727 if (INT_FAST8_TYPE)
4728 int_fast8_type_node =
4729 TREE_TYPE (identifier_global_value (c_get_ident (INT_FAST8_TYPE)));
4730 if (INT_FAST16_TYPE)
4731 int_fast16_type_node =
4732 TREE_TYPE (identifier_global_value (c_get_ident (INT_FAST16_TYPE)));
4733 if (INT_FAST32_TYPE)
4734 int_fast32_type_node =
4735 TREE_TYPE (identifier_global_value (c_get_ident (INT_FAST32_TYPE)));
4736 if (INT_FAST64_TYPE)
4737 int_fast64_type_node =
4738 TREE_TYPE (identifier_global_value (c_get_ident (INT_FAST64_TYPE)));
4739 if (UINT_FAST8_TYPE)
4740 uint_fast8_type_node =
4741 TREE_TYPE (identifier_global_value (c_get_ident (UINT_FAST8_TYPE)));
4742 if (UINT_FAST16_TYPE)
4743 uint_fast16_type_node =
4744 TREE_TYPE (identifier_global_value (c_get_ident (UINT_FAST16_TYPE)));
4745 if (UINT_FAST32_TYPE)
4746 uint_fast32_type_node =
4747 TREE_TYPE (identifier_global_value (c_get_ident (UINT_FAST32_TYPE)));
4748 if (UINT_FAST64_TYPE)
4749 uint_fast64_type_node =
4750 TREE_TYPE (identifier_global_value (c_get_ident (UINT_FAST64_TYPE)));
4751 if (INTPTR_TYPE)
4752 intptr_type_node =
4753 TREE_TYPE (identifier_global_value (c_get_ident (INTPTR_TYPE)));
4754 if (UINTPTR_TYPE)
4755 uintptr_type_node =
4756 TREE_TYPE (identifier_global_value (c_get_ident (UINTPTR_TYPE)));
4757
4758 default_function_type = build_function_type (integer_type_node, NULL_TREE);
4759 unsigned_ptrdiff_type_node = c_common_unsigned_type (ptrdiff_type_node);
4760
4761 lang_hooks.decls.pushdecl
4762 (build_decl (UNKNOWN_LOCATION,
4763 TYPE_DECL, get_identifier ("__builtin_va_list"),
4764 va_list_type_node));
4765 if (targetm.enum_va_list_p)
4766 {
4767 int l;
4768 const char *pname;
4769 tree ptype;
4770
4771 for (l = 0; targetm.enum_va_list_p (l, &pname, &ptype); ++l)
4772 {
4773 lang_hooks.decls.pushdecl
4774 (build_decl (UNKNOWN_LOCATION,
4775 TYPE_DECL, get_identifier (pname),
4776 ptype));
4777
4778 }
4779 }
4780
4781 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4782 {
4783 va_list_arg_type_node = va_list_ref_type_node =
4784 build_pointer_type (TREE_TYPE (va_list_type_node));
4785 }
4786 else
4787 {
4788 va_list_arg_type_node = va_list_type_node;
4789 va_list_ref_type_node = build_reference_type (va_list_type_node);
4790 }
4791
4792 c_define_builtins (va_list_ref_type_node, va_list_arg_type_node);
4793
4794 main_identifier_node = get_identifier ("main");
4795
4796 /* Create the built-in __null node. It is important that this is
4797 not shared. */
4798 null_node = make_int_cst (1, 1);
4799 TREE_TYPE (null_node) = c_common_type_for_size (POINTER_SIZE, unsignedp: 0);
4800
4801 /* Create the built-in nullptr node. This part of its initialization is
4802 common to C and C++. The front ends can further adjust its definition
4803 in {c,cxx}_init_decl_processing. In particular, we aren't setting the
4804 alignment here for C++ backward ABI bug compatibility. */
4805 nullptr_type_node = make_node (NULLPTR_TYPE);
4806 TYPE_SIZE (nullptr_type_node) = bitsize_int (GET_MODE_BITSIZE (ptr_mode));
4807 TYPE_SIZE_UNIT (nullptr_type_node) = size_int (GET_MODE_SIZE (ptr_mode));
4808 TYPE_UNSIGNED (nullptr_type_node) = 1;
4809 TYPE_PRECISION (nullptr_type_node) = GET_MODE_BITSIZE (mode: ptr_mode);
4810 SET_TYPE_MODE (nullptr_type_node, ptr_mode);
4811 nullptr_node = build_int_cst (nullptr_type_node, 0);
4812
4813 /* Since builtin_types isn't gc'ed, don't export these nodes. */
4814 memset (s: builtin_types, c: 0, n: sizeof (builtin_types));
4815}
4816
4817/* The number of named compound-literals generated thus far. */
4818static GTY(()) int compound_literal_number;
4819
4820/* Set DECL_NAME for DECL, a VAR_DECL for a compound-literal. */
4821
4822void
4823set_compound_literal_name (tree decl)
4824{
4825 char *name;
4826 ASM_FORMAT_PRIVATE_NAME (name, "__compound_literal",
4827 compound_literal_number);
4828 compound_literal_number++;
4829 DECL_NAME (decl) = get_identifier (name);
4830}
4831
4832/* build_va_arg helper function. Return a VA_ARG_EXPR with location LOC, type
4833 TYPE and operand OP. */
4834
4835static tree
4836build_va_arg_1 (location_t loc, tree type, tree op)
4837{
4838 tree expr = build1 (VA_ARG_EXPR, type, op);
4839 SET_EXPR_LOCATION (expr, loc);
4840 return expr;
4841}
4842
4843/* Return a VA_ARG_EXPR corresponding to a source-level expression
4844 va_arg (EXPR, TYPE) at source location LOC. */
4845
4846tree
4847build_va_arg (location_t loc, tree expr, tree type)
4848{
4849 tree va_type = TREE_TYPE (expr);
4850 tree canon_va_type = (va_type == error_mark_node
4851 ? error_mark_node
4852 : targetm.canonical_va_list_type (va_type));
4853
4854 if (va_type == error_mark_node
4855 || canon_va_type == NULL_TREE)
4856 {
4857 if (canon_va_type == NULL_TREE)
4858 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4859
4860 /* Let's handle things neutrally, if expr:
4861 - has undeclared type, or
4862 - is not an va_list type. */
4863 return build_va_arg_1 (loc, type, error_mark_node);
4864 }
4865
4866 if (TREE_CODE (canon_va_type) != ARRAY_TYPE)
4867 {
4868 /* Case 1: Not an array type. */
4869
4870 /* Take the address, to get '&ap'. Note that &ap is not a va_list
4871 type. */
4872 c_common_mark_addressable_vec (expr);
4873 expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (expr)), expr);
4874
4875 return build_va_arg_1 (loc, type, op: expr);
4876 }
4877
4878 /* Case 2: Array type.
4879
4880 Background:
4881
4882 For contrast, let's start with the simple case (case 1). If
4883 canon_va_type is not an array type, but say a char *, then when
4884 passing-by-value a va_list, the type of the va_list param decl is
4885 the same as for another va_list decl (all ap's are char *):
4886
4887 f2_1 (char * ap)
4888 D.1815 = VA_ARG (&ap, 0B, 1);
4889 return D.1815;
4890
4891 f2 (int i)
4892 char * ap.0;
4893 char * ap;
4894 __builtin_va_start (&ap, 0);
4895 ap.0 = ap;
4896 res = f2_1 (ap.0);
4897 __builtin_va_end (&ap);
4898 D.1812 = res;
4899 return D.1812;
4900
4901 However, if canon_va_type is ARRAY_TYPE, then when passing-by-value a
4902 va_list the type of the va_list param decl (case 2b, struct * ap) is not
4903 the same as for another va_list decl (case 2a, struct ap[1]).
4904
4905 f2_1 (struct * ap)
4906 D.1844 = VA_ARG (ap, 0B, 0);
4907 return D.1844;
4908
4909 f2 (int i)
4910 struct ap[1];
4911 __builtin_va_start (&ap, 0);
4912 res = f2_1 (&ap);
4913 __builtin_va_end (&ap);
4914 D.1841 = res;
4915 return D.1841;
4916
4917 Case 2b is different because:
4918 - on the callee side, the parm decl has declared type va_list, but
4919 grokdeclarator changes the type of the parm decl to a pointer to the
4920 array elem type.
4921 - on the caller side, the pass-by-value uses &ap.
4922
4923 We unify these two cases (case 2a: va_list is array type,
4924 case 2b: va_list is pointer to array elem type), by adding '&' for the
4925 array type case, such that we have a pointer to array elem in both
4926 cases. */
4927
4928 if (TREE_CODE (va_type) == ARRAY_TYPE)
4929 {
4930 /* Case 2a: va_list is array type. */
4931
4932 /* Take the address, to get '&ap'. Make sure it's a pointer to array
4933 elem type. */
4934 c_common_mark_addressable_vec (expr);
4935 expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (canon_va_type)),
4936 expr);
4937
4938 /* Verify that &ap is still recognized as having va_list type. */
4939 tree canon_expr_type
4940 = targetm.canonical_va_list_type (TREE_TYPE (expr));
4941 gcc_assert (canon_expr_type != NULL_TREE);
4942 }
4943 else
4944 {
4945 /* Case 2b: va_list is pointer to array elem type. */
4946 gcc_assert (POINTER_TYPE_P (va_type));
4947
4948 /* Comparison as in std_canonical_va_list_type. */
4949 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (va_type))
4950 == TYPE_MAIN_VARIANT (TREE_TYPE (canon_va_type)));
4951
4952 /* Don't take the address. We've already got '&ap'. */
4953 ;
4954 }
4955
4956 return build_va_arg_1 (loc, type, op: expr);
4957}
4958
4959
4960/* Linked list of disabled built-in functions. */
4961
4962struct disabled_builtin
4963{
4964 const char *name;
4965 struct disabled_builtin *next;
4966};
4967static disabled_builtin *disabled_builtins = NULL;
4968
4969static bool builtin_function_disabled_p (const char *);
4970
4971/* Disable a built-in function specified by -fno-builtin-NAME. If NAME
4972 begins with "__builtin_", give an error. */
4973
4974void
4975disable_builtin_function (const char *name)
4976{
4977 if (startswith (str: name, prefix: "__builtin_"))
4978 error ("cannot disable built-in function %qs", name);
4979 else
4980 {
4981 disabled_builtin *new_disabled_builtin = XNEW (disabled_builtin);
4982 new_disabled_builtin->name = name;
4983 new_disabled_builtin->next = disabled_builtins;
4984 disabled_builtins = new_disabled_builtin;
4985 }
4986}
4987
4988
4989/* Return true if the built-in function NAME has been disabled, false
4990 otherwise. */
4991
4992static bool
4993builtin_function_disabled_p (const char *name)
4994{
4995 disabled_builtin *p;
4996 for (p = disabled_builtins; p != NULL; p = p->next)
4997 {
4998 if (strcmp (s1: name, s2: p->name) == 0)
4999 return true;
5000 }
5001 return false;
5002}
5003
5004
5005/* Worker for DEF_BUILTIN.
5006 Possibly define a builtin function with one or two names.
5007 Does not declare a non-__builtin_ function if flag_no_builtin, or if
5008 nonansi_p and flag_no_nonansi_builtin. */
5009
5010static void
5011def_builtin_1 (enum built_in_function fncode,
5012 const char *name,
5013 enum built_in_class fnclass,
5014 tree fntype, tree libtype,
5015 bool both_p, bool fallback_p, bool nonansi_p,
5016 tree fnattrs, bool implicit_p)
5017{
5018 tree decl;
5019 const char *libname;
5020
5021 if (fntype == error_mark_node)
5022 return;
5023
5024 gcc_assert ((!both_p && !fallback_p)
5025 || startswith (name, "__builtin_"));
5026
5027 libname = name + strlen (s: "__builtin_");
5028 decl = add_builtin_function (name, type: fntype, function_code: fncode, cl: fnclass,
5029 library_name: (fallback_p ? libname : NULL),
5030 attrs: fnattrs);
5031
5032 set_builtin_decl (fncode, decl, implicit_p);
5033
5034 if (both_p
5035 && !flag_no_builtin && !builtin_function_disabled_p (name: libname)
5036 && !(nonansi_p && flag_no_nonansi_builtin))
5037 add_builtin_function (name: libname, type: libtype, function_code: fncode, cl: fnclass,
5038 NULL, attrs: fnattrs);
5039}
5040
5041/* Nonzero if the type T promotes to int. This is (nearly) the
5042 integral promotions defined in ISO C99 6.3.1.1/2. */
5043
5044bool
5045c_promoting_integer_type_p (const_tree t)
5046{
5047 switch (TREE_CODE (t))
5048 {
5049 case INTEGER_TYPE:
5050 return (TYPE_MAIN_VARIANT (t) == char_type_node
5051 || TYPE_MAIN_VARIANT (t) == signed_char_type_node
5052 || TYPE_MAIN_VARIANT (t) == unsigned_char_type_node
5053 || TYPE_MAIN_VARIANT (t) == short_integer_type_node
5054 || TYPE_MAIN_VARIANT (t) == short_unsigned_type_node
5055 || TYPE_PRECISION (t) < TYPE_PRECISION (integer_type_node));
5056
5057 case ENUMERAL_TYPE:
5058 /* ??? Technically all enumerations not larger than an int
5059 promote to an int. But this is used along code paths
5060 that only want to notice a size change. */
5061 return TYPE_PRECISION (t) < TYPE_PRECISION (integer_type_node);
5062
5063 case BOOLEAN_TYPE:
5064 return true;
5065
5066 default:
5067 return false;
5068 }
5069}
5070
5071/* Return 1 if PARMS specifies a fixed number of parameters
5072 and none of their types is affected by default promotions. */
5073
5074bool
5075self_promoting_args_p (const_tree parms)
5076{
5077 const_tree t;
5078 for (t = parms; t; t = TREE_CHAIN (t))
5079 {
5080 tree type = TREE_VALUE (t);
5081
5082 if (type == error_mark_node)
5083 continue;
5084
5085 if (TREE_CHAIN (t) == NULL_TREE && type != void_type_node)
5086 return false;
5087
5088 if (type == NULL_TREE)
5089 return false;
5090
5091 if (TYPE_MAIN_VARIANT (type) == float_type_node)
5092 return false;
5093
5094 if (c_promoting_integer_type_p (t: type))
5095 return false;
5096 }
5097 return true;
5098}
5099
5100/* Recursively remove any '*' or '&' operator from TYPE. */
5101tree
5102strip_pointer_operator (tree t)
5103{
5104 while (POINTER_TYPE_P (t))
5105 t = TREE_TYPE (t);
5106 return t;
5107}
5108
5109/* Recursively remove pointer or array type from TYPE. */
5110tree
5111strip_pointer_or_array_types (tree t)
5112{
5113 while (TREE_CODE (t) == ARRAY_TYPE || POINTER_TYPE_P (t))
5114 t = TREE_TYPE (t);
5115 return t;
5116}
5117
5118/* Used to compare case labels. K1 and K2 are actually tree nodes
5119 representing case labels, or NULL_TREE for a `default' label.
5120 Returns -1 if K1 is ordered before K2, -1 if K1 is ordered after
5121 K2, and 0 if K1 and K2 are equal. */
5122
5123int
5124case_compare (splay_tree_key k1, splay_tree_key k2)
5125{
5126 /* Consider a NULL key (such as arises with a `default' label) to be
5127 smaller than anything else. */
5128 if (!k1)
5129 return k2 ? -1 : 0;
5130 else if (!k2)
5131 return k1 ? 1 : 0;
5132
5133 return tree_int_cst_compare (t1: (tree) k1, t2: (tree) k2);
5134}
5135
5136/* Process a case label, located at LOC, for the range LOW_VALUE
5137 ... HIGH_VALUE. If LOW_VALUE and HIGH_VALUE are both NULL_TREE
5138 then this case label is actually a `default' label. If only
5139 HIGH_VALUE is NULL_TREE, then case label was declared using the
5140 usual C/C++ syntax, rather than the GNU case range extension.
5141 CASES is a tree containing all the case ranges processed so far;
5142 COND is the condition for the switch-statement itself.
5143 Returns the CASE_LABEL_EXPR created, or ERROR_MARK_NODE if no
5144 CASE_LABEL_EXPR is created. ATTRS are the attributes to be applied
5145 to the label. */
5146
5147tree
5148c_add_case_label (location_t loc, splay_tree cases, tree cond,
5149 tree low_value, tree high_value, tree attrs)
5150{
5151 tree type;
5152 tree label;
5153 tree case_label;
5154 splay_tree_node node;
5155
5156 /* Create the LABEL_DECL itself. */
5157 label = create_artificial_label (loc);
5158 decl_attributes (&label, attrs, 0);
5159
5160 /* If there was an error processing the switch condition, bail now
5161 before we get more confused. */
5162 if (!cond || cond == error_mark_node)
5163 goto error_out;
5164
5165 if ((low_value && TREE_TYPE (low_value)
5166 && POINTER_TYPE_P (TREE_TYPE (low_value)))
5167 || (high_value && TREE_TYPE (high_value)
5168 && POINTER_TYPE_P (TREE_TYPE (high_value))))
5169 {
5170 error_at (loc, "pointers are not permitted as case values");
5171 goto error_out;
5172 }
5173
5174 /* Case ranges are a GNU extension. */
5175 if (high_value)
5176 pedwarn (loc, OPT_Wpedantic,
5177 "range expressions in switch statements are non-standard");
5178
5179 type = TREE_TYPE (cond);
5180 if (low_value)
5181 {
5182 low_value = check_case_value (loc, value: low_value);
5183 low_value = convert_and_check (loc, type, expr: low_value);
5184 low_value = fold (low_value);
5185 if (low_value == error_mark_node)
5186 goto error_out;
5187 }
5188 if (high_value)
5189 {
5190 high_value = check_case_value (loc, value: high_value);
5191 high_value = convert_and_check (loc, type, expr: high_value);
5192 high_value = fold (high_value);
5193 if (high_value == error_mark_node)
5194 goto error_out;
5195 }
5196
5197 if (low_value && high_value)
5198 {
5199 /* If the LOW_VALUE and HIGH_VALUE are the same, then this isn't
5200 really a case range, even though it was written that way.
5201 Remove the HIGH_VALUE to simplify later processing. */
5202 if (tree_int_cst_equal (low_value, high_value))
5203 high_value = NULL_TREE;
5204 else if (!tree_int_cst_lt (t1: low_value, t2: high_value))
5205 warning_at (loc, 0, "empty range specified");
5206 }
5207
5208 /* Look up the LOW_VALUE in the table of case labels we already
5209 have. */
5210 node = splay_tree_lookup (cases, (splay_tree_key) low_value);
5211 /* If there was not an exact match, check for overlapping ranges.
5212 There's no need to do this if there's no LOW_VALUE or HIGH_VALUE;
5213 that's a `default' label and the only overlap is an exact match. */
5214 if (!node && (low_value || high_value))
5215 {
5216 splay_tree_node low_bound;
5217 splay_tree_node high_bound;
5218
5219 /* Even though there wasn't an exact match, there might be an
5220 overlap between this case range and another case range.
5221 Since we've (inductively) not allowed any overlapping case
5222 ranges, we simply need to find the greatest low case label
5223 that is smaller that LOW_VALUE, and the smallest low case
5224 label that is greater than LOW_VALUE. If there is an overlap
5225 it will occur in one of these two ranges. */
5226 low_bound = splay_tree_predecessor (cases,
5227 (splay_tree_key) low_value);
5228 high_bound = splay_tree_successor (cases,
5229 (splay_tree_key) low_value);
5230
5231 /* Check to see if the LOW_BOUND overlaps. It is smaller than
5232 the LOW_VALUE, so there is no need to check unless the
5233 LOW_BOUND is in fact itself a case range. */
5234 if (low_bound
5235 && CASE_HIGH ((tree) low_bound->value)
5236 && tree_int_cst_compare (CASE_HIGH ((tree) low_bound->value),
5237 t2: low_value) >= 0)
5238 node = low_bound;
5239 /* Check to see if the HIGH_BOUND overlaps. The low end of that
5240 range is bigger than the low end of the current range, so we
5241 are only interested if the current range is a real range, and
5242 not an ordinary case label. */
5243 else if (high_bound
5244 && high_value
5245 && (tree_int_cst_compare (t1: (tree) high_bound->key,
5246 t2: high_value)
5247 <= 0))
5248 node = high_bound;
5249 }
5250 /* If there was an overlap, issue an error. */
5251 if (node)
5252 {
5253 tree duplicate = CASE_LABEL ((tree) node->value);
5254
5255 if (high_value)
5256 {
5257 error_at (loc, "duplicate (or overlapping) case value");
5258 inform (DECL_SOURCE_LOCATION (duplicate),
5259 "this is the first entry overlapping that value");
5260 }
5261 else if (low_value)
5262 {
5263 error_at (loc, "duplicate case value") ;
5264 inform (DECL_SOURCE_LOCATION (duplicate), "previously used here");
5265 }
5266 else
5267 {
5268 error_at (loc, "multiple default labels in one switch");
5269 inform (DECL_SOURCE_LOCATION (duplicate),
5270 "this is the first default label");
5271 }
5272 goto error_out;
5273 }
5274
5275 /* Add a CASE_LABEL to the statement-tree. */
5276 case_label = add_stmt (build_case_label (low_value, high_value, label));
5277 /* Register this case label in the splay tree. */
5278 splay_tree_insert (cases,
5279 (splay_tree_key) low_value,
5280 (splay_tree_value) case_label);
5281
5282 return case_label;
5283
5284 error_out:
5285 /* Add a label so that the back-end doesn't think that the beginning of
5286 the switch is unreachable. Note that we do not add a case label, as
5287 that just leads to duplicates and thence to failure later on. */
5288 if (!cases->root)
5289 {
5290 tree t = create_artificial_label (loc);
5291 add_stmt (build_stmt (loc, LABEL_EXPR, t));
5292 }
5293 return error_mark_node;
5294}
5295
5296/* Subroutine of c_switch_covers_all_cases_p, called via
5297 splay_tree_foreach. Return 1 if it doesn't cover all the cases.
5298 ARGS[0] is initially NULL and after the first iteration is the
5299 so far highest case label. ARGS[1] is the minimum of SWITCH_COND's
5300 type. */
5301
5302static int
5303c_switch_covers_all_cases_p_1 (splay_tree_node node, void *data)
5304{
5305 tree label = (tree) node->value;
5306 tree *args = (tree *) data;
5307
5308 /* If there is a default case, we shouldn't have called this. */
5309 gcc_assert (CASE_LOW (label));
5310
5311 if (args[0] == NULL_TREE)
5312 {
5313 if (wi::to_widest (t: args[1]) < wi::to_widest (CASE_LOW (label)))
5314 return 1;
5315 }
5316 else if (wi::add (x: wi::to_widest (t: args[0]), y: 1)
5317 != wi::to_widest (CASE_LOW (label)))
5318 return 1;
5319 if (CASE_HIGH (label))
5320 args[0] = CASE_HIGH (label);
5321 else
5322 args[0] = CASE_LOW (label);
5323 return 0;
5324}
5325
5326/* Return true if switch with CASES and switch condition with type
5327 covers all possible values in the case labels. */
5328
5329bool
5330c_switch_covers_all_cases_p (splay_tree cases, tree type)
5331{
5332 /* If there is default:, this is always the case. */
5333 splay_tree_node default_node
5334 = splay_tree_lookup (cases, (splay_tree_key) NULL);
5335 if (default_node)
5336 return true;
5337
5338 if (!INTEGRAL_TYPE_P (type))
5339 return false;
5340
5341 tree args[2] = { NULL_TREE, TYPE_MIN_VALUE (type) };
5342 if (splay_tree_foreach (cases, c_switch_covers_all_cases_p_1, args))
5343 return false;
5344
5345 /* If there are no cases at all, or if the highest case label
5346 is smaller than TYPE_MAX_VALUE, return false. */
5347 if (args[0] == NULL_TREE
5348 || wi::to_widest (t: args[0]) < wi::to_widest (TYPE_MAX_VALUE (type)))
5349 return false;
5350
5351 return true;
5352}
5353
5354/* Return true if stmt can fall through. Used by block_may_fallthru
5355 default case. */
5356
5357bool
5358c_block_may_fallthru (const_tree stmt)
5359{
5360 switch (TREE_CODE (stmt))
5361 {
5362 case SWITCH_STMT:
5363 return (!SWITCH_STMT_ALL_CASES_P (stmt)
5364 || !SWITCH_STMT_NO_BREAK_P (stmt)
5365 || block_may_fallthru (SWITCH_STMT_BODY (stmt)));
5366
5367 default:
5368 return true;
5369 }
5370}
5371
5372/* Finish an expression taking the address of LABEL (an
5373 IDENTIFIER_NODE). Returns an expression for the address.
5374
5375 LOC is the location for the expression returned. */
5376
5377tree
5378finish_label_address_expr (tree label, location_t loc)
5379{
5380 tree result;
5381
5382 pedwarn (input_location, OPT_Wpedantic, "taking the address of a label is non-standard");
5383
5384 if (label == error_mark_node)
5385 return error_mark_node;
5386
5387 label = lookup_label (label);
5388 if (label == NULL_TREE)
5389 result = null_pointer_node;
5390 else
5391 {
5392 TREE_USED (label) = 1;
5393 result = build1 (ADDR_EXPR, ptr_type_node, label);
5394 /* The current function is not necessarily uninlinable.
5395 Computed gotos are incompatible with inlining, but the value
5396 here could be used only in a diagnostic, for example. */
5397 protected_set_expr_location (result, loc);
5398 }
5399
5400 return result;
5401}
5402
5403
5404/* Given a boolean expression ARG, return a tree representing an increment
5405 or decrement (as indicated by CODE) of ARG. The front end must check for
5406 invalid cases (e.g., decrement in C++). */
5407tree
5408boolean_increment (enum tree_code code, tree arg)
5409{
5410 tree val;
5411 tree true_res = build_int_cst (TREE_TYPE (arg), 1);
5412
5413 arg = stabilize_reference (arg);
5414 switch (code)
5415 {
5416 case PREINCREMENT_EXPR:
5417 val = build2 (MODIFY_EXPR, TREE_TYPE (arg), arg, true_res);
5418 break;
5419 case POSTINCREMENT_EXPR:
5420 val = build2 (MODIFY_EXPR, TREE_TYPE (arg), arg, true_res);
5421 arg = save_expr (arg);
5422 val = build2 (COMPOUND_EXPR, TREE_TYPE (arg), val, arg);
5423 val = build2 (COMPOUND_EXPR, TREE_TYPE (arg), arg, val);
5424 break;
5425 case PREDECREMENT_EXPR:
5426 val = build2 (MODIFY_EXPR, TREE_TYPE (arg), arg,
5427 invert_truthvalue_loc (input_location, arg));
5428 break;
5429 case POSTDECREMENT_EXPR:
5430 val = build2 (MODIFY_EXPR, TREE_TYPE (arg), arg,
5431 invert_truthvalue_loc (input_location, arg));
5432 arg = save_expr (arg);
5433 val = build2 (COMPOUND_EXPR, TREE_TYPE (arg), val, arg);
5434 val = build2 (COMPOUND_EXPR, TREE_TYPE (arg), arg, val);
5435 break;
5436 default:
5437 gcc_unreachable ();
5438 }
5439 TREE_SIDE_EFFECTS (val) = 1;
5440 return val;
5441}
5442
5443/* Built-in macros for stddef.h and stdint.h, that require macros
5444 defined in this file. */
5445void
5446c_stddef_cpp_builtins(void)
5447{
5448 builtin_define_with_value ("__SIZE_TYPE__", SIZE_TYPE, 0);
5449 builtin_define_with_value ("__PTRDIFF_TYPE__", PTRDIFF_TYPE, 0);
5450 builtin_define_with_value ("__WCHAR_TYPE__", MODIFIED_WCHAR_TYPE, 0);
5451 /* C++ has wchar_t as a builtin type, C doesn't, so if WINT_TYPE
5452 maps to wchar_t, define it to the underlying WCHAR_TYPE in C, and
5453 to wchar_t in C++, so the desired type equivalence holds. */
5454 if (!c_dialect_cxx ()
5455 && strcmp (WINT_TYPE, s2: "wchar_t") == 0)
5456 builtin_define_with_value ("__WINT_TYPE__", WCHAR_TYPE, 0);
5457 else
5458 builtin_define_with_value ("__WINT_TYPE__", WINT_TYPE, 0);
5459 builtin_define_with_value ("__INTMAX_TYPE__", INTMAX_TYPE, 0);
5460 builtin_define_with_value ("__UINTMAX_TYPE__", UINTMAX_TYPE, 0);
5461 if (flag_char8_t)
5462 builtin_define_with_value ("__CHAR8_TYPE__", CHAR8_TYPE, 0);
5463 builtin_define_with_value ("__CHAR16_TYPE__", CHAR16_TYPE, 0);
5464 builtin_define_with_value ("__CHAR32_TYPE__", CHAR32_TYPE, 0);
5465 if (SIG_ATOMIC_TYPE)
5466 builtin_define_with_value ("__SIG_ATOMIC_TYPE__", SIG_ATOMIC_TYPE, 0);
5467 if (INT8_TYPE)
5468 builtin_define_with_value ("__INT8_TYPE__", INT8_TYPE, 0);
5469 if (INT16_TYPE)
5470 builtin_define_with_value ("__INT16_TYPE__", INT16_TYPE, 0);
5471 if (INT32_TYPE)
5472 builtin_define_with_value ("__INT32_TYPE__", INT32_TYPE, 0);
5473 if (INT64_TYPE)
5474 builtin_define_with_value ("__INT64_TYPE__", INT64_TYPE, 0);
5475 if (UINT8_TYPE)
5476 builtin_define_with_value ("__UINT8_TYPE__", UINT8_TYPE, 0);
5477 if (UINT16_TYPE)
5478 builtin_define_with_value ("__UINT16_TYPE__", UINT16_TYPE, 0);
5479 if (UINT32_TYPE)
5480 builtin_define_with_value ("__UINT32_TYPE__", UINT32_TYPE, 0);
5481 if (UINT64_TYPE)
5482 builtin_define_with_value ("__UINT64_TYPE__", UINT64_TYPE, 0);
5483 if (INT_LEAST8_TYPE)
5484 builtin_define_with_value ("__INT_LEAST8_TYPE__", INT_LEAST8_TYPE, 0);
5485 if (INT_LEAST16_TYPE)
5486 builtin_define_with_value ("__INT_LEAST16_TYPE__", INT_LEAST16_TYPE, 0);
5487 if (INT_LEAST32_TYPE)
5488 builtin_define_with_value ("__INT_LEAST32_TYPE__", INT_LEAST32_TYPE, 0);
5489 if (INT_LEAST64_TYPE)
5490 builtin_define_with_value ("__INT_LEAST64_TYPE__", INT_LEAST64_TYPE, 0);
5491 if (UINT_LEAST8_TYPE)
5492 builtin_define_with_value ("__UINT_LEAST8_TYPE__", UINT_LEAST8_TYPE, 0);
5493 if (UINT_LEAST16_TYPE)
5494 builtin_define_with_value ("__UINT_LEAST16_TYPE__", UINT_LEAST16_TYPE, 0);
5495 if (UINT_LEAST32_TYPE)
5496 builtin_define_with_value ("__UINT_LEAST32_TYPE__", UINT_LEAST32_TYPE, 0);
5497 if (UINT_LEAST64_TYPE)
5498 builtin_define_with_value ("__UINT_LEAST64_TYPE__", UINT_LEAST64_TYPE, 0);
5499 if (INT_FAST8_TYPE)
5500 builtin_define_with_value ("__INT_FAST8_TYPE__", INT_FAST8_TYPE, 0);
5501 if (INT_FAST16_TYPE)
5502 builtin_define_with_value ("__INT_FAST16_TYPE__", INT_FAST16_TYPE, 0);
5503 if (INT_FAST32_TYPE)
5504 builtin_define_with_value ("__INT_FAST32_TYPE__", INT_FAST32_TYPE, 0);
5505 if (INT_FAST64_TYPE)
5506 builtin_define_with_value ("__INT_FAST64_TYPE__", INT_FAST64_TYPE, 0);
5507 if (UINT_FAST8_TYPE)
5508 builtin_define_with_value ("__UINT_FAST8_TYPE__", UINT_FAST8_TYPE, 0);
5509 if (UINT_FAST16_TYPE)
5510 builtin_define_with_value ("__UINT_FAST16_TYPE__", UINT_FAST16_TYPE, 0);
5511 if (UINT_FAST32_TYPE)
5512 builtin_define_with_value ("__UINT_FAST32_TYPE__", UINT_FAST32_TYPE, 0);
5513 if (UINT_FAST64_TYPE)
5514 builtin_define_with_value ("__UINT_FAST64_TYPE__", UINT_FAST64_TYPE, 0);
5515 if (INTPTR_TYPE)
5516 builtin_define_with_value ("__INTPTR_TYPE__", INTPTR_TYPE, 0);
5517 if (UINTPTR_TYPE)
5518 builtin_define_with_value ("__UINTPTR_TYPE__", UINTPTR_TYPE, 0);
5519 /* GIMPLE FE testcases need access to the GCC internal 'sizetype'.
5520 Expose it as __SIZETYPE__. */
5521 if (flag_gimple)
5522 builtin_define_with_value ("__SIZETYPE__", SIZETYPE, 0);
5523}
5524
5525static void
5526c_init_attributes (void)
5527{
5528 /* Fill in the built_in_attributes array. */
5529#define DEF_ATTR_NULL_TREE(ENUM) \
5530 built_in_attributes[(int) ENUM] = NULL_TREE;
5531#define DEF_ATTR_INT(ENUM, VALUE) \
5532 built_in_attributes[(int) ENUM] = build_int_cst (integer_type_node, VALUE);
5533#define DEF_ATTR_STRING(ENUM, VALUE) \
5534 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5535#define DEF_ATTR_IDENT(ENUM, STRING) \
5536 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5537#define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5538 built_in_attributes[(int) ENUM] \
5539 = tree_cons (built_in_attributes[(int) PURPOSE], \
5540 built_in_attributes[(int) VALUE], \
5541 built_in_attributes[(int) CHAIN]);
5542#include "builtin-attrs.def"
5543#undef DEF_ATTR_NULL_TREE
5544#undef DEF_ATTR_INT
5545#undef DEF_ATTR_IDENT
5546#undef DEF_ATTR_TREE_LIST
5547}
5548
5549/* Check whether the byte alignment ALIGN is a valid user-specified
5550 alignment less than the supported maximum. If so, return ALIGN's
5551 base-2 log; if not, output an error and return -1. If OBJFILE
5552 then reject alignments greater than MAX_OFILE_ALIGNMENT when
5553 converted to bits. Otherwise, consider valid only alignments
5554 that are less than HOST_BITS_PER_INT - LOG2_BITS_PER_UNIT.
5555 Zero is not considered a valid argument (and results in -1 on
5556 return) but it only triggers a warning when WARN_ZERO is set. */
5557
5558int
5559check_user_alignment (const_tree align, bool objfile, bool warn_zero)
5560{
5561 if (error_operand_p (t: align))
5562 return -1;
5563
5564 if (TREE_CODE (align) != INTEGER_CST
5565 || !INTEGRAL_TYPE_P (TREE_TYPE (align)))
5566 {
5567 error ("requested alignment is not an integer constant");
5568 return -1;
5569 }
5570
5571 if (integer_zerop (align))
5572 {
5573 if (warn_zero)
5574 warning (OPT_Wattributes,
5575 "requested alignment %qE is not a positive power of 2",
5576 align);
5577 return -1;
5578 }
5579
5580 /* Log2 of the byte alignment ALIGN. */
5581 int log2align;
5582 if (tree_int_cst_sgn (align) == -1
5583 || (log2align = tree_log2 (align)) == -1)
5584 {
5585 error ("requested alignment %qE is not a positive power of 2",
5586 align);
5587 return -1;
5588 }
5589
5590 if (objfile)
5591 {
5592 unsigned maxalign = MAX_OFILE_ALIGNMENT / BITS_PER_UNIT;
5593 if (!tree_fits_uhwi_p (align) || tree_to_uhwi (align) > maxalign)
5594 {
5595 error ("requested alignment %qE exceeds object file maximum %u",
5596 align, maxalign);
5597 return -1;
5598 }
5599 }
5600
5601 if (log2align >= HOST_BITS_PER_INT - LOG2_BITS_PER_UNIT)
5602 {
5603 error ("requested alignment %qE exceeds maximum %u",
5604 align, 1U << (HOST_BITS_PER_INT - LOG2_BITS_PER_UNIT - 1));
5605 return -1;
5606 }
5607
5608 return log2align;
5609}
5610
5611/* Determine the ELF symbol visibility for DECL, which is either a
5612 variable or a function. It is an error to use this function if a
5613 definition of DECL is not available in this translation unit.
5614 Returns true if the final visibility has been determined by this
5615 function; false if the caller is free to make additional
5616 modifications. */
5617
5618bool
5619c_determine_visibility (tree decl)
5620{
5621 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
5622
5623 /* If the user explicitly specified the visibility with an
5624 attribute, honor that. DECL_VISIBILITY will have been set during
5625 the processing of the attribute. We check for an explicit
5626 attribute, rather than just checking DECL_VISIBILITY_SPECIFIED,
5627 to distinguish the use of an attribute from the use of a "#pragma
5628 GCC visibility push(...)"; in the latter case we still want other
5629 considerations to be able to overrule the #pragma. */
5630 if (lookup_attribute (attr_name: "visibility", DECL_ATTRIBUTES (decl))
5631 || (TARGET_DLLIMPORT_DECL_ATTRIBUTES
5632 && (lookup_attribute (attr_name: "dllimport", DECL_ATTRIBUTES (decl))
5633 || lookup_attribute (attr_name: "dllexport", DECL_ATTRIBUTES (decl)))))
5634 return true;
5635
5636 /* Set default visibility to whatever the user supplied with
5637 visibility_specified depending on #pragma GCC visibility. */
5638 if (!DECL_VISIBILITY_SPECIFIED (decl))
5639 {
5640 if (visibility_options.inpragma
5641 || DECL_VISIBILITY (decl) != default_visibility)
5642 {
5643 DECL_VISIBILITY (decl) = default_visibility;
5644 DECL_VISIBILITY_SPECIFIED (decl) = visibility_options.inpragma;
5645 /* If visibility changed and DECL already has DECL_RTL, ensure
5646 symbol flags are updated. */
5647 if (((VAR_P (decl) && TREE_STATIC (decl))
5648 || TREE_CODE (decl) == FUNCTION_DECL)
5649 && DECL_RTL_SET_P (decl))
5650 make_decl_rtl (decl);
5651 }
5652 }
5653 return false;
5654}
5655
5656/* Data to communicate through check_function_arguments_recurse between
5657 check_function_nonnull and check_nonnull_arg. */
5658
5659struct nonnull_arg_ctx
5660{
5661 /* Location of the call. */
5662 location_t loc;
5663 /* The function whose arguments are being checked and its type (used
5664 for calls through function pointers). */
5665 const_tree fndecl, fntype;
5666 /* True if a warning has been issued. */
5667 bool warned_p;
5668};
5669
5670/* Check the argument list of a function call to CTX.FNDECL of CTX.FNTYPE
5671 for null in argument slots that are marked as requiring a non-null
5672 pointer argument. The NARGS arguments are passed in the array ARGARRAY.
5673 Return true if we have warned. */
5674
5675static bool
5676check_function_nonnull (nonnull_arg_ctx &ctx, int nargs, tree *argarray)
5677{
5678 int firstarg = 0;
5679 if (TREE_CODE (ctx.fntype) == METHOD_TYPE)
5680 {
5681 bool closure = false;
5682 if (ctx.fndecl)
5683 {
5684 /* For certain lambda expressions the C++ front end emits calls
5685 that pass a null this pointer as an argument named __closure
5686 to the member operator() of empty function. Detect those
5687 and avoid checking them, but proceed to check the remaining
5688 arguments. */
5689 tree arg0 = DECL_ARGUMENTS (ctx.fndecl);
5690 if (tree arg0name = DECL_NAME (arg0))
5691 closure = id_equal (id: arg0name, str: "__closure");
5692 }
5693
5694 /* In calls to C++ non-static member functions check the this
5695 pointer regardless of whether the function is declared with
5696 attribute nonnull. */
5697 firstarg = 1;
5698 if (!closure)
5699 check_function_arguments_recurse (check_nonnull_arg, &ctx, argarray[0],
5700 firstarg, OPT_Wnonnull);
5701 }
5702
5703 tree attrs = lookup_attribute (attr_name: "nonnull", TYPE_ATTRIBUTES (ctx.fntype));
5704 if (attrs == NULL_TREE)
5705 return ctx.warned_p;
5706
5707 tree a = attrs;
5708 /* See if any of the nonnull attributes has no arguments. If so,
5709 then every pointer argument is checked (in which case the check
5710 for pointer type is done in check_nonnull_arg). */
5711 if (TREE_VALUE (a) != NULL_TREE)
5712 do
5713 a = lookup_attribute (attr_name: "nonnull", TREE_CHAIN (a));
5714 while (a != NULL_TREE && TREE_VALUE (a) != NULL_TREE);
5715
5716 if (a != NULL_TREE)
5717 for (int i = firstarg; i < nargs; i++)
5718 check_function_arguments_recurse (check_nonnull_arg, &ctx, argarray[i],
5719 i + 1, OPT_Wnonnull);
5720 else
5721 {
5722 /* Walk the argument list. If we encounter an argument number we
5723 should check for non-null, do it. */
5724 for (int i = firstarg; i < nargs; i++)
5725 {
5726 for (a = attrs; ; a = TREE_CHAIN (a))
5727 {
5728 a = lookup_attribute (attr_name: "nonnull", list: a);
5729 if (a == NULL_TREE || nonnull_check_p (TREE_VALUE (a), i + 1))
5730 break;
5731 }
5732
5733 if (a != NULL_TREE)
5734 check_function_arguments_recurse (check_nonnull_arg, &ctx,
5735 argarray[i], i + 1,
5736 OPT_Wnonnull);
5737 }
5738 }
5739 return ctx.warned_p;
5740}
5741
5742/* Check that the Nth argument of a function call (counting backwards
5743 from the end) is a (pointer)0. The NARGS arguments are passed in the
5744 array ARGARRAY. */
5745
5746static void
5747check_function_sentinel (const_tree fntype, int nargs, tree *argarray)
5748{
5749 tree attr = lookup_attribute (attr_name: "sentinel", TYPE_ATTRIBUTES (fntype));
5750
5751 if (attr)
5752 {
5753 int len = 0;
5754 int pos = 0;
5755 tree sentinel;
5756 function_args_iterator iter;
5757 tree t;
5758
5759 /* Skip over the named arguments. */
5760 FOREACH_FUNCTION_ARGS (fntype, t, iter)
5761 {
5762 if (len == nargs)
5763 break;
5764 len++;
5765 }
5766
5767 if (TREE_VALUE (attr))
5768 {
5769 tree p = TREE_VALUE (TREE_VALUE (attr));
5770 pos = TREE_INT_CST_LOW (p);
5771 }
5772
5773 /* The sentinel must be one of the varargs, i.e.
5774 in position >= the number of fixed arguments. */
5775 if ((nargs - 1 - pos) < len)
5776 {
5777 warning (OPT_Wformat_,
5778 "not enough variable arguments to fit a sentinel");
5779 return;
5780 }
5781
5782 /* Validate the sentinel. */
5783 sentinel = fold_for_warn (argarray[nargs - 1 - pos]);
5784 if ((!POINTER_TYPE_P (TREE_TYPE (sentinel))
5785 || !integer_zerop (sentinel))
5786 && TREE_CODE (TREE_TYPE (sentinel)) != NULLPTR_TYPE
5787 /* Although __null (in C++) is only an integer we allow it
5788 nevertheless, as we are guaranteed that it's exactly
5789 as wide as a pointer, and we don't want to force
5790 users to cast the NULL they have written there.
5791 We warn with -Wstrict-null-sentinel, though. */
5792 && (warn_strict_null_sentinel || null_node != sentinel))
5793 warning (OPT_Wformat_, "missing sentinel in function call");
5794 }
5795}
5796
5797/* Check that the same argument isn't passed to two or more
5798 restrict-qualified formal and issue a -Wrestrict warning
5799 if it is. Return true if a warning has been issued. */
5800
5801static bool
5802check_function_restrict (const_tree fndecl, const_tree fntype,
5803 int nargs, tree *unfolded_argarray)
5804{
5805 int i;
5806 tree parms = TYPE_ARG_TYPES (fntype);
5807
5808 /* Call fold_for_warn on all of the arguments. */
5809 auto_vec<tree> argarray (nargs);
5810 for (i = 0; i < nargs; i++)
5811 argarray.quick_push (obj: fold_for_warn (unfolded_argarray[i]));
5812
5813 if (fndecl
5814 && TREE_CODE (fndecl) == FUNCTION_DECL)
5815 {
5816 /* Avoid diagnosing calls built-ins with a zero size/bound
5817 here. They are checked in more detail elsewhere. */
5818 if (fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL)
5819 && nargs == 3
5820 && TREE_CODE (argarray[2]) == INTEGER_CST
5821 && integer_zerop (argarray[2]))
5822 return false;
5823
5824 if (DECL_ARGUMENTS (fndecl))
5825 parms = DECL_ARGUMENTS (fndecl);
5826 }
5827
5828 for (i = 0; i < nargs; i++)
5829 TREE_VISITED (argarray[i]) = 0;
5830
5831 bool warned = false;
5832
5833 for (i = 0; i < nargs && parms && parms != void_list_node; i++)
5834 {
5835 tree type;
5836 if (TREE_CODE (parms) == PARM_DECL)
5837 {
5838 type = TREE_TYPE (parms);
5839 parms = DECL_CHAIN (parms);
5840 }
5841 else
5842 {
5843 type = TREE_VALUE (parms);
5844 parms = TREE_CHAIN (parms);
5845 }
5846 if (POINTER_TYPE_P (type)
5847 && TYPE_RESTRICT (type)
5848 && !TYPE_READONLY (TREE_TYPE (type)))
5849 warned |= warn_for_restrict (i, argarray.address (), nargs);
5850 }
5851
5852 for (i = 0; i < nargs; i++)
5853 TREE_VISITED (argarray[i]) = 0;
5854
5855 return warned;
5856}
5857
5858/* Helper for check_function_nonnull; given a list of operands which
5859 must be non-null in ARGS, determine if operand PARAM_NUM should be
5860 checked. */
5861
5862static bool
5863nonnull_check_p (tree args, unsigned HOST_WIDE_INT param_num)
5864{
5865 unsigned HOST_WIDE_INT arg_num = 0;
5866
5867 for (; args; args = TREE_CHAIN (args))
5868 {
5869 bool found = get_attribute_operand (TREE_VALUE (args), &arg_num);
5870
5871 gcc_assert (found);
5872
5873 if (arg_num == param_num)
5874 return true;
5875 }
5876 return false;
5877}
5878
5879/* Check that the function argument PARAM (which is operand number
5880 PARAM_NUM) is non-null. This is called by check_function_nonnull
5881 via check_function_arguments_recurse. */
5882
5883static void
5884check_nonnull_arg (void *ctx, tree param, unsigned HOST_WIDE_INT param_num)
5885{
5886 struct nonnull_arg_ctx *pctx = (struct nonnull_arg_ctx *) ctx;
5887
5888 /* Just skip checking the argument if it's not a pointer. This can
5889 happen if the "nonnull" attribute was given without an operand
5890 list (which means to check every pointer argument). */
5891
5892 tree paramtype = TREE_TYPE (param);
5893 if (TREE_CODE (paramtype) != POINTER_TYPE
5894 && TREE_CODE (paramtype) != NULLPTR_TYPE)
5895 return;
5896
5897 /* Diagnose the simple cases of null arguments. */
5898 if (!integer_zerop (fold_for_warn (param)))
5899 return;
5900
5901 auto_diagnostic_group adg;
5902
5903 const location_t loc = EXPR_LOC_OR_LOC (param, pctx->loc);
5904
5905 if (TREE_CODE (pctx->fntype) == METHOD_TYPE)
5906 --param_num;
5907
5908 bool warned;
5909 if (param_num == 0)
5910 {
5911 warned = warning_at (loc, OPT_Wnonnull,
5912 "%qs pointer is null", "this");
5913 if (warned && pctx->fndecl)
5914 inform (DECL_SOURCE_LOCATION (pctx->fndecl),
5915 "in a call to non-static member function %qD",
5916 pctx->fndecl);
5917 }
5918 else
5919 {
5920 warned = warning_at (loc, OPT_Wnonnull,
5921 "argument %u null where non-null expected",
5922 (unsigned) param_num);
5923 if (warned && pctx->fndecl)
5924 inform (DECL_SOURCE_LOCATION (pctx->fndecl),
5925 "in a call to function %qD declared %qs",
5926 pctx->fndecl, "nonnull");
5927 }
5928
5929 if (warned)
5930 pctx->warned_p = true;
5931}
5932
5933/* Helper for attribute handling; fetch the operand number from
5934 the attribute argument list. */
5935
5936bool
5937get_attribute_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5938{
5939 /* Verify the arg number is a small constant. */
5940 if (tree_fits_uhwi_p (arg_num_expr))
5941 {
5942 *valp = tree_to_uhwi (arg_num_expr);
5943 return true;
5944 }
5945 else
5946 return false;
5947}
5948
5949/* Arguments being collected for optimization. */
5950typedef const char *const_char_p; /* For DEF_VEC_P. */
5951static GTY(()) vec<const_char_p, va_gc> *optimize_args;
5952
5953
5954/* Inner function to convert a TREE_LIST to argv string to parse the optimize
5955 options in ARGS. ATTR_P is true if this is for attribute(optimize), and
5956 false for #pragma GCC optimize. */
5957
5958bool
5959parse_optimize_options (tree args, bool attr_p)
5960{
5961 bool ret = true;
5962 unsigned opt_argc;
5963 unsigned i;
5964 const char **opt_argv;
5965 struct cl_decoded_option *decoded_options;
5966 unsigned int decoded_options_count;
5967 tree ap;
5968
5969 /* Build up argv vector. Just in case the string is stored away, use garbage
5970 collected strings. */
5971 vec_safe_truncate (v: optimize_args, size: 0);
5972 vec_safe_push (v&: optimize_args, obj: (const char *) NULL);
5973
5974 for (ap = args; ap != NULL_TREE; ap = TREE_CHAIN (ap))
5975 {
5976 tree value = TREE_VALUE (ap);
5977
5978 if (TREE_CODE (value) == INTEGER_CST)
5979 {
5980 char buffer[HOST_BITS_PER_LONG / 3 + 4];
5981 sprintf (s: buffer, format: "-O%ld", (long) TREE_INT_CST_LOW (value));
5982 vec_safe_push (v&: optimize_args, ggc_strdup (buffer));
5983 }
5984
5985 else if (TREE_CODE (value) == STRING_CST)
5986 {
5987 /* Split string into multiple substrings. */
5988 size_t len = TREE_STRING_LENGTH (value);
5989 char *p = ASTRDUP (TREE_STRING_POINTER (value));
5990 char *end = p + len;
5991 char *comma;
5992 char *next_p = p;
5993
5994 while (next_p != NULL)
5995 {
5996 size_t len2;
5997 char *q, *r;
5998
5999 p = next_p;
6000 comma = strchr (s: p, c: ',');
6001 if (comma)
6002 {
6003 len2 = comma - p;
6004 *comma = '\0';
6005 next_p = comma+1;
6006 }
6007 else
6008 {
6009 len2 = end - p;
6010 next_p = NULL;
6011 }
6012
6013 /* If the user supplied -Oxxx or -fxxx, only allow -Oxxx or -fxxx
6014 options. */
6015 if (*p == '-' && p[1] != 'O' && p[1] != 'f')
6016 {
6017 ret = false;
6018 if (attr_p)
6019 warning (OPT_Wattributes,
6020 "bad option %qs to attribute %<optimize%>", p);
6021 else
6022 warning (OPT_Wpragmas,
6023 "bad option %qs to pragma %<optimize%>", p);
6024 continue;
6025 }
6026
6027 /* Can't use GC memory here, see PR88007. */
6028 r = q = XOBNEWVEC (&opts_obstack, char, len2 + 3);
6029
6030 if (*p != '-')
6031 {
6032 *r++ = '-';
6033
6034 /* Assume that Ox is -Ox, a numeric value is -Ox, a s by
6035 itself is -Os, and any other switch begins with a -f. */
6036 if ((*p >= '0' && *p <= '9')
6037 || (p[0] == 's' && p[1] == '\0'))
6038 *r++ = 'O';
6039 else if (*p != 'O')
6040 *r++ = 'f';
6041 }
6042
6043 memcpy (dest: r, src: p, n: len2);
6044 r[len2] = '\0';
6045 vec_safe_push (v&: optimize_args, obj: (const char *) q);
6046 }
6047
6048 }
6049 }
6050
6051 opt_argc = optimize_args->length ();
6052 opt_argv = (const char **) alloca (sizeof (char *) * (opt_argc + 1));
6053
6054 for (i = 1; i < opt_argc; i++)
6055 opt_argv[i] = (*optimize_args)[i];
6056
6057 /* Now parse the options. */
6058 decode_cmdline_options_to_array_default_mask (argc: opt_argc, argv: opt_argv,
6059 decoded_options: &decoded_options,
6060 decoded_options_count: &decoded_options_count);
6061 /* Drop non-Optimization options. */
6062 unsigned j = 1;
6063 for (i = 1; i < decoded_options_count; ++i)
6064 {
6065 if (! (cl_options[decoded_options[i].opt_index].flags & CL_OPTIMIZATION))
6066 {
6067 ret = false;
6068 if (attr_p)
6069 warning (OPT_Wattributes,
6070 "bad option %qs to attribute %<optimize%>",
6071 decoded_options[i].orig_option_with_args_text);
6072 else
6073 warning (OPT_Wpragmas,
6074 "bad option %qs to pragma %<optimize%>",
6075 decoded_options[i].orig_option_with_args_text);
6076 continue;
6077 }
6078 if (i != j)
6079 decoded_options[j] = decoded_options[i];
6080 j++;
6081 }
6082 decoded_options_count = j;
6083
6084 /* Merge the decoded options with save_decoded_options. */
6085 unsigned save_opt_count = save_opt_decoded_options->length ();
6086 unsigned merged_decoded_options_count
6087 = save_opt_count + decoded_options_count;
6088 cl_decoded_option *merged_decoded_options
6089 = XNEWVEC (cl_decoded_option, merged_decoded_options_count);
6090
6091 /* Note the first decoded_options is used for the program name. */
6092 for (unsigned i = 0; i < save_opt_count; ++i)
6093 merged_decoded_options[i + 1] = (*save_opt_decoded_options)[i];
6094 for (unsigned i = 1; i < decoded_options_count; ++i)
6095 merged_decoded_options[save_opt_count + i] = decoded_options[i];
6096
6097 /* And apply them. */
6098 decode_options (opts: &global_options, opts_set: &global_options_set,
6099 decoded_options: merged_decoded_options, decoded_options_count: merged_decoded_options_count,
6100 loc: input_location, dc: global_dc, NULL);
6101 free (ptr: decoded_options);
6102
6103 targetm.override_options_after_change();
6104
6105 optimize_args->truncate (size: 0);
6106 return ret;
6107}
6108
6109/* Check whether ATTR is a valid attribute fallthrough. */
6110
6111bool
6112attribute_fallthrough_p (tree attr)
6113{
6114 if (attr == error_mark_node)
6115 return false;
6116 tree t = lookup_attribute (attr_ns: "", attr_name: "fallthrough", list: attr);
6117 if (t == NULL_TREE)
6118 return false;
6119 /* It is no longer true that "this attribute shall appear at most once in
6120 each attribute-list", but we still give a warning. */
6121 if (lookup_attribute (attr_ns: "", attr_name: "fallthrough", TREE_CHAIN (t)))
6122 warning (OPT_Wattributes, "attribute %<fallthrough%> specified multiple "
6123 "times");
6124 /* No attribute-argument-clause shall be present. */
6125 else if (TREE_VALUE (t) != NULL_TREE)
6126 warning (OPT_Wattributes, "%<fallthrough%> attribute specified with "
6127 "a parameter");
6128 /* Warn if other attributes are found. */
6129 for (t = attr; t != NULL_TREE; t = TREE_CHAIN (t))
6130 {
6131 tree name = get_attribute_name (t);
6132 if (!is_attribute_p (attr_name: "fallthrough", ident: name)
6133 || !is_attribute_namespace_p (attr_ns: "", attr: t))
6134 {
6135 if (!c_dialect_cxx () && get_attribute_namespace (t) == NULL_TREE)
6136 /* The specifications of standard attributes in C mean
6137 this is a constraint violation. */
6138 pedwarn (input_location, OPT_Wattributes, "%qE attribute ignored",
6139 get_attribute_name (t));
6140 else
6141 warning (OPT_Wattributes, "%qE attribute ignored", name);
6142 }
6143 }
6144 return true;
6145}
6146
6147
6148/* Check for valid arguments being passed to a function with FNTYPE.
6149 There are NARGS arguments in the array ARGARRAY. LOC should be used
6150 for diagnostics. Return true if either -Wnonnull or -Wrestrict has
6151 been issued.
6152
6153 The arguments in ARGARRAY may not have been folded yet (e.g. for C++,
6154 to preserve location wrappers); checks that require folded arguments
6155 should call fold_for_warn on them. */
6156
6157bool
6158check_function_arguments (location_t loc, const_tree fndecl, const_tree fntype,
6159 int nargs, tree *argarray, vec<location_t> *arglocs)
6160{
6161 bool warned_p = false;
6162
6163 /* Check for null being passed in a pointer argument that must be
6164 non-null. In C++, this includes the this pointer. We also need
6165 to do this if format checking is enabled. */
6166 if (warn_nonnull)
6167 {
6168 nonnull_arg_ctx ctx = { .loc: loc, .fndecl: fndecl, .fntype: fntype, .warned_p: false };
6169 warned_p = check_function_nonnull (ctx, nargs, argarray);
6170 }
6171
6172 /* Check for errors in format strings. */
6173
6174 if (warn_format || warn_suggest_attribute_format)
6175 check_function_format (fndecl ? fndecl : fntype, TYPE_ATTRIBUTES (fntype), nargs,
6176 argarray, arglocs);
6177
6178 if (warn_format)
6179 check_function_sentinel (fntype, nargs, argarray);
6180
6181 if (fndecl && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL))
6182 {
6183 switch (DECL_FUNCTION_CODE (decl: fndecl))
6184 {
6185 case BUILT_IN_SPRINTF:
6186 case BUILT_IN_SPRINTF_CHK:
6187 case BUILT_IN_SNPRINTF:
6188 case BUILT_IN_SNPRINTF_CHK:
6189 /* Let the sprintf pass handle these. */
6190 return warned_p;
6191
6192 default:
6193 break;
6194 }
6195 }
6196
6197 /* check_function_restrict sets the DECL_READ_P for arguments
6198 so it must be called unconditionally. */
6199 warned_p |= check_function_restrict (fndecl, fntype, nargs, unfolded_argarray: argarray);
6200
6201 return warned_p;
6202}
6203
6204/* Generic argument checking recursion routine. PARAM is the argument to
6205 be checked. PARAM_NUM is the number of the argument. CALLBACK is invoked
6206 once the argument is resolved. CTX is context for the callback.
6207 OPT is the warning for which this is done. */
6208void
6209check_function_arguments_recurse (void (*callback)
6210 (void *, tree, unsigned HOST_WIDE_INT),
6211 void *ctx, tree param,
6212 unsigned HOST_WIDE_INT param_num,
6213 opt_code opt)
6214{
6215 if (opt != OPT_Wformat_ && warning_suppressed_p (param))
6216 return;
6217
6218 if (CONVERT_EXPR_P (param)
6219 && (TYPE_PRECISION (TREE_TYPE (param))
6220 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (param, 0)))))
6221 {
6222 /* Strip coercion. */
6223 check_function_arguments_recurse (callback, ctx,
6224 TREE_OPERAND (param, 0), param_num,
6225 opt);
6226 return;
6227 }
6228
6229 if (TREE_CODE (param) == CALL_EXPR && CALL_EXPR_FN (param))
6230 {
6231 tree type = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (param)));
6232 tree attrs;
6233 bool found_format_arg = false;
6234
6235 /* See if this is a call to a known internationalization function
6236 that modifies a format arg. Such a function may have multiple
6237 format_arg attributes (for example, ngettext). */
6238
6239 for (attrs = TYPE_ATTRIBUTES (type);
6240 attrs;
6241 attrs = TREE_CHAIN (attrs))
6242 if (is_attribute_p (attr_name: "format_arg", ident: get_attribute_name (attrs)))
6243 {
6244 tree inner_arg;
6245 tree format_num_expr;
6246 int format_num;
6247 int i;
6248 call_expr_arg_iterator iter;
6249
6250 /* Extract the argument number, which was previously checked
6251 to be valid. */
6252 format_num_expr = TREE_VALUE (TREE_VALUE (attrs));
6253
6254 format_num = tree_to_uhwi (format_num_expr);
6255
6256 for (inner_arg = first_call_expr_arg (exp: param, iter: &iter), i = 1;
6257 inner_arg != NULL_TREE;
6258 inner_arg = next_call_expr_arg (iter: &iter), i++)
6259 if (i == format_num)
6260 {
6261 check_function_arguments_recurse (callback, ctx,
6262 param: inner_arg, param_num,
6263 opt);
6264 found_format_arg = true;
6265 break;
6266 }
6267 }
6268
6269 /* If we found a format_arg attribute and did a recursive check,
6270 we are done with checking this argument. Otherwise, we continue
6271 and this will be considered a non-literal. */
6272 if (found_format_arg)
6273 return;
6274 }
6275
6276 if (TREE_CODE (param) == COND_EXPR)
6277 {
6278 /* Simplify to avoid warning for an impossible case. */
6279 param = fold_for_warn (param);
6280 if (TREE_CODE (param) == COND_EXPR)
6281 {
6282 /* Check both halves of the conditional expression. */
6283 check_function_arguments_recurse (callback, ctx,
6284 TREE_OPERAND (param, 1),
6285 param_num, opt);
6286 check_function_arguments_recurse (callback, ctx,
6287 TREE_OPERAND (param, 2),
6288 param_num, opt);
6289 return;
6290 }
6291 }
6292
6293 (*callback) (ctx, param, param_num);
6294}
6295
6296/* Checks for a builtin function FNDECL that the number of arguments
6297 NARGS against the required number REQUIRED and issues an error if
6298 there is a mismatch. Returns true if the number of arguments is
6299 correct, otherwise false. LOC is the location of FNDECL. */
6300
6301static bool
6302builtin_function_validate_nargs (location_t loc, tree fndecl, int nargs,
6303 int required)
6304{
6305 if (nargs < required)
6306 {
6307 error_at (loc, "too few arguments to function %qE", fndecl);
6308 return false;
6309 }
6310 else if (nargs > required)
6311 {
6312 error_at (loc, "too many arguments to function %qE", fndecl);
6313 return false;
6314 }
6315 return true;
6316}
6317
6318/* Helper macro for check_builtin_function_arguments. */
6319#define ARG_LOCATION(N) \
6320 (arg_loc.is_empty () \
6321 ? EXPR_LOC_OR_LOC (args[(N)], input_location) \
6322 : expansion_point_location (arg_loc[(N)]))
6323
6324/* Verifies the NARGS arguments ARGS to the builtin function FNDECL.
6325 Returns false if there was an error, otherwise true. LOC is the
6326 location of the function; ARG_LOC is a vector of locations of the
6327 arguments. If FNDECL is the result of resolving an overloaded
6328 target built-in, ORIG_FNDECL is the original function decl,
6329 otherwise it is null. */
6330
6331bool
6332check_builtin_function_arguments (location_t loc, vec<location_t> arg_loc,
6333 tree fndecl, tree orig_fndecl,
6334 int nargs, tree *args)
6335{
6336 if (!fndecl_built_in_p (node: fndecl))
6337 return true;
6338
6339 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6340 return (!targetm.check_builtin_call
6341 || targetm.check_builtin_call (loc, arg_loc, fndecl,
6342 orig_fndecl, nargs, args));
6343
6344 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
6345 return true;
6346
6347 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
6348 switch (DECL_FUNCTION_CODE (decl: fndecl))
6349 {
6350 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
6351 if (!tree_fits_uhwi_p (args[2]))
6352 {
6353 error_at (ARG_LOCATION (2),
6354 "third argument to function %qE must be a constant integer",
6355 fndecl);
6356 return false;
6357 }
6358 /* fall through */
6359
6360 case BUILT_IN_ALLOCA_WITH_ALIGN:
6361 {
6362 /* Get the requested alignment (in bits) if it's a constant
6363 integer expression. */
6364 unsigned HOST_WIDE_INT align
6365 = tree_fits_uhwi_p (args[1]) ? tree_to_uhwi (args[1]) : 0;
6366
6367 /* Determine if the requested alignment is a power of 2. */
6368 if ((align & (align - 1)))
6369 align = 0;
6370
6371 /* The maximum alignment in bits corresponding to the same
6372 maximum in bytes enforced in check_user_alignment(). */
6373 unsigned maxalign = (UINT_MAX >> 1) + 1;
6374
6375 /* Reject invalid alignments. */
6376 if (align < BITS_PER_UNIT || maxalign < align)
6377 {
6378 error_at (ARG_LOCATION (1),
6379 "second argument to function %qE must be a constant "
6380 "integer power of 2 between %qi and %qu bits",
6381 fndecl, BITS_PER_UNIT, maxalign);
6382 return false;
6383 }
6384 return true;
6385 }
6386
6387 case BUILT_IN_CONSTANT_P:
6388 return builtin_function_validate_nargs (loc, fndecl, nargs, required: 1);
6389
6390 case BUILT_IN_ISFINITE:
6391 case BUILT_IN_ISINF:
6392 case BUILT_IN_ISINF_SIGN:
6393 case BUILT_IN_ISNAN:
6394 case BUILT_IN_ISNORMAL:
6395 case BUILT_IN_ISSIGNALING:
6396 case BUILT_IN_SIGNBIT:
6397 if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 1))
6398 {
6399 if (TREE_CODE (TREE_TYPE (args[0])) != REAL_TYPE)
6400 {
6401 error_at (ARG_LOCATION (0), "non-floating-point argument in "
6402 "call to function %qE", fndecl);
6403 return false;
6404 }
6405 return true;
6406 }
6407 return false;
6408
6409 case BUILT_IN_ISGREATER:
6410 case BUILT_IN_ISGREATEREQUAL:
6411 case BUILT_IN_ISLESS:
6412 case BUILT_IN_ISLESSEQUAL:
6413 case BUILT_IN_ISLESSGREATER:
6414 case BUILT_IN_ISUNORDERED:
6415 case BUILT_IN_ISEQSIG:
6416 if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 2))
6417 {
6418 enum tree_code code0, code1;
6419 code0 = TREE_CODE (TREE_TYPE (args[0]));
6420 code1 = TREE_CODE (TREE_TYPE (args[1]));
6421 if (!((code0 == REAL_TYPE && code1 == REAL_TYPE)
6422 || (code0 == REAL_TYPE
6423 && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
6424 || ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
6425 && code1 == REAL_TYPE)))
6426 {
6427 error_at (loc, "non-floating-point arguments in call to "
6428 "function %qE", fndecl);
6429 return false;
6430 }
6431 return true;
6432 }
6433 return false;
6434
6435 case BUILT_IN_FPCLASSIFY:
6436 if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 6))
6437 {
6438 for (unsigned int i = 0; i < 5; i++)
6439 if (TREE_CODE (args[i]) != INTEGER_CST)
6440 {
6441 error_at (ARG_LOCATION (i), "non-const integer argument %u in "
6442 "call to function %qE", i + 1, fndecl);
6443 return false;
6444 }
6445
6446 if (TREE_CODE (TREE_TYPE (args[5])) != REAL_TYPE)
6447 {
6448 error_at (ARG_LOCATION (5), "non-floating-point argument in "
6449 "call to function %qE", fndecl);
6450 return false;
6451 }
6452 return true;
6453 }
6454 return false;
6455
6456 case BUILT_IN_ASSUME_ALIGNED:
6457 if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 2 + (nargs > 2)))
6458 {
6459 if (nargs >= 3
6460 && TREE_CODE (TREE_TYPE (args[2])) != INTEGER_TYPE
6461 && TREE_CODE (TREE_TYPE (args[2])) != BITINT_TYPE)
6462 {
6463 error_at (ARG_LOCATION (2), "non-integer argument 3 in call to "
6464 "function %qE", fndecl);
6465 return false;
6466 }
6467 return true;
6468 }
6469 return false;
6470
6471 case BUILT_IN_ADD_OVERFLOW:
6472 case BUILT_IN_SUB_OVERFLOW:
6473 case BUILT_IN_MUL_OVERFLOW:
6474 if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 3))
6475 {
6476 unsigned i;
6477 for (i = 0; i < 2; i++)
6478 if (!INTEGRAL_TYPE_P (TREE_TYPE (args[i])))
6479 {
6480 error_at (ARG_LOCATION (i), "argument %u in call to function "
6481 "%qE does not have integral type", i + 1, fndecl);
6482 return false;
6483 }
6484 if (TREE_CODE (TREE_TYPE (args[2])) != POINTER_TYPE
6485 || !INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (args[2]))))
6486 {
6487 error_at (ARG_LOCATION (2), "argument 3 in call to function %qE "
6488 "does not have pointer to integral type", fndecl);
6489 return false;
6490 }
6491 else if (TREE_CODE (TREE_TYPE (TREE_TYPE (args[2]))) == ENUMERAL_TYPE)
6492 {
6493 error_at (ARG_LOCATION (2), "argument 3 in call to function %qE "
6494 "has pointer to enumerated type", fndecl);
6495 return false;
6496 }
6497 else if (TREE_CODE (TREE_TYPE (TREE_TYPE (args[2]))) == BOOLEAN_TYPE)
6498 {
6499 error_at (ARG_LOCATION (2), "argument 3 in call to function %qE "
6500 "has pointer to boolean type", fndecl);
6501 return false;
6502 }
6503 else if (TYPE_READONLY (TREE_TYPE (TREE_TYPE (args[2]))))
6504 {
6505 error_at (ARG_LOCATION (2), "argument %u in call to function %qE "
6506 "has pointer to %qs type (%qT)", 3, fndecl, "const",
6507 TREE_TYPE (args[2]));
6508 return false;
6509 }
6510 else if (TYPE_ATOMIC (TREE_TYPE (TREE_TYPE (args[2]))))
6511 {
6512 error_at (ARG_LOCATION (2), "argument %u in call to function %qE "
6513 "has pointer to %qs type (%qT)", 3, fndecl,
6514 "_Atomic", TREE_TYPE (args[2]));
6515 return false;
6516 }
6517 return true;
6518 }
6519 return false;
6520
6521 case BUILT_IN_ADD_OVERFLOW_P:
6522 case BUILT_IN_SUB_OVERFLOW_P:
6523 case BUILT_IN_MUL_OVERFLOW_P:
6524 if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 3))
6525 {
6526 unsigned i;
6527 for (i = 0; i < 3; i++)
6528 if (!INTEGRAL_TYPE_P (TREE_TYPE (args[i])))
6529 {
6530 error_at (ARG_LOCATION (i), "argument %u in call to function "
6531 "%qE does not have integral type", i + 1, fndecl);
6532 return false;
6533 }
6534 if (TREE_CODE (TREE_TYPE (args[2])) == ENUMERAL_TYPE)
6535 {
6536 error_at (ARG_LOCATION (2), "argument %u in call to function "
6537 "%qE has enumerated type", 3, fndecl);
6538 return false;
6539 }
6540 else if (TREE_CODE (TREE_TYPE (args[2])) == BOOLEAN_TYPE)
6541 {
6542 error_at (ARG_LOCATION (2), "argument %u in call to function "
6543 "%qE has boolean type", 3, fndecl);
6544 return false;
6545 }
6546 return true;
6547 }
6548 return false;
6549
6550 case BUILT_IN_CLEAR_PADDING:
6551 if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 1))
6552 {
6553 if (!POINTER_TYPE_P (TREE_TYPE (args[0])))
6554 {
6555 error_at (ARG_LOCATION (0), "argument %u in call to function "
6556 "%qE does not have pointer type", 1, fndecl);
6557 return false;
6558 }
6559 else if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_TYPE (args[0]))))
6560 {
6561 error_at (ARG_LOCATION (0), "argument %u in call to function "
6562 "%qE points to incomplete type", 1, fndecl);
6563 return false;
6564 }
6565 else if (TYPE_READONLY (TREE_TYPE (TREE_TYPE (args[0]))))
6566 {
6567 error_at (ARG_LOCATION (0), "argument %u in call to function %qE "
6568 "has pointer to %qs type (%qT)", 1, fndecl, "const",
6569 TREE_TYPE (args[0]));
6570 return false;
6571 }
6572 else if (TYPE_ATOMIC (TREE_TYPE (TREE_TYPE (args[0]))))
6573 {
6574 error_at (ARG_LOCATION (0), "argument %u in call to function %qE "
6575 "has pointer to %qs type (%qT)", 1, fndecl,
6576 "_Atomic", TREE_TYPE (args[0]));
6577 return false;
6578 }
6579 return true;
6580 }
6581 return false;
6582
6583 case BUILT_IN_CLZG:
6584 case BUILT_IN_CTZG:
6585 case BUILT_IN_CLRSBG:
6586 case BUILT_IN_FFSG:
6587 case BUILT_IN_PARITYG:
6588 case BUILT_IN_POPCOUNTG:
6589 if (nargs == 2
6590 && (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CLZG
6591 || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CTZG))
6592 {
6593 if (!INTEGRAL_TYPE_P (TREE_TYPE (args[1])))
6594 {
6595 error_at (ARG_LOCATION (1), "argument %u in call to function "
6596 "%qE does not have integral type", 2, fndecl);
6597 return false;
6598 }
6599 if ((TYPE_PRECISION (TREE_TYPE (args[1]))
6600 > TYPE_PRECISION (integer_type_node))
6601 || (TYPE_PRECISION (TREE_TYPE (args[1]))
6602 == TYPE_PRECISION (integer_type_node)
6603 && TYPE_UNSIGNED (TREE_TYPE (args[1]))))
6604 {
6605 error_at (ARG_LOCATION (1), "argument %u in call to function "
6606 "%qE does not have %<int%> type", 2, fndecl);
6607 return false;
6608 }
6609 }
6610 else if (!builtin_function_validate_nargs (loc, fndecl, nargs, required: 1))
6611 return false;
6612
6613 if (!INTEGRAL_TYPE_P (TREE_TYPE (args[0])))
6614 {
6615 error_at (ARG_LOCATION (0), "argument %u in call to function "
6616 "%qE does not have integral type", 1, fndecl);
6617 return false;
6618 }
6619 if (TREE_CODE (TREE_TYPE (args[0])) == ENUMERAL_TYPE)
6620 {
6621 error_at (ARG_LOCATION (0), "argument %u in call to function "
6622 "%qE has enumerated type", 1, fndecl);
6623 return false;
6624 }
6625 if (TREE_CODE (TREE_TYPE (args[0])) == BOOLEAN_TYPE)
6626 {
6627 error_at (ARG_LOCATION (0), "argument %u in call to function "
6628 "%qE has boolean type", 1, fndecl);
6629 return false;
6630 }
6631 if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_FFSG
6632 || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CLRSBG)
6633 {
6634 if (TYPE_UNSIGNED (TREE_TYPE (args[0])))
6635 {
6636 error_at (ARG_LOCATION (0), "argument 1 in call to function "
6637 "%qE has unsigned type", fndecl);
6638 return false;
6639 }
6640 }
6641 else if (!TYPE_UNSIGNED (TREE_TYPE (args[0])))
6642 {
6643 error_at (ARG_LOCATION (0), "argument 1 in call to function "
6644 "%qE has signed type", fndecl);
6645 return false;
6646 }
6647 return true;
6648
6649 default:
6650 return true;
6651 }
6652}
6653
6654/* Subroutine of c_parse_error.
6655 Return the result of concatenating LHS and RHS. RHS is really
6656 a string literal, its first character is indicated by RHS_START and
6657 RHS_SIZE is its length (including the terminating NUL character).
6658
6659 The caller is responsible for deleting the returned pointer. */
6660
6661static char *
6662catenate_strings (const char *lhs, const char *rhs_start, int rhs_size)
6663{
6664 const size_t lhs_size = strlen (s: lhs);
6665 char *result = XNEWVEC (char, lhs_size + rhs_size);
6666 memcpy (dest: result, src: lhs, n: lhs_size);
6667 memcpy (dest: result + lhs_size, src: rhs_start, n: rhs_size);
6668 return result;
6669}
6670
6671/* Issue the error given by GMSGID at RICHLOC, indicating that it occurred
6672 before TOKEN, which had the associated VALUE. */
6673
6674void
6675c_parse_error (const char *gmsgid, enum cpp_ttype token_type,
6676 tree value, unsigned char token_flags,
6677 rich_location *richloc)
6678{
6679#define catenate_messages(M1, M2) catenate_strings ((M1), (M2), sizeof (M2))
6680
6681 char *message = NULL;
6682
6683 if (token_type == CPP_EOF)
6684 message = catenate_messages (gmsgid, " at end of input");
6685 else if (token_type == CPP_CHAR
6686 || token_type == CPP_WCHAR
6687 || token_type == CPP_CHAR16
6688 || token_type == CPP_CHAR32
6689 || token_type == CPP_UTF8CHAR)
6690 {
6691 unsigned int val = TREE_INT_CST_LOW (value);
6692 const char *prefix;
6693
6694 switch (token_type)
6695 {
6696 default:
6697 prefix = "";
6698 break;
6699 case CPP_WCHAR:
6700 prefix = "L";
6701 break;
6702 case CPP_CHAR16:
6703 prefix = "u";
6704 break;
6705 case CPP_CHAR32:
6706 prefix = "U";
6707 break;
6708 case CPP_UTF8CHAR:
6709 prefix = "u8";
6710 break;
6711 }
6712
6713 if (val <= UCHAR_MAX && ISGRAPH (val))
6714 message = catenate_messages (gmsgid, " before %s'%c'");
6715 else
6716 message = catenate_messages (gmsgid, " before %s'\\x%x'");
6717
6718 error_at (richloc, message, prefix, val);
6719 free (ptr: message);
6720 message = NULL;
6721 }
6722 else if (token_type == CPP_CHAR_USERDEF
6723 || token_type == CPP_WCHAR_USERDEF
6724 || token_type == CPP_CHAR16_USERDEF
6725 || token_type == CPP_CHAR32_USERDEF
6726 || token_type == CPP_UTF8CHAR_USERDEF)
6727 message = catenate_messages (gmsgid,
6728 " before user-defined character literal");
6729 else if (token_type == CPP_STRING_USERDEF
6730 || token_type == CPP_WSTRING_USERDEF
6731 || token_type == CPP_STRING16_USERDEF
6732 || token_type == CPP_STRING32_USERDEF
6733 || token_type == CPP_UTF8STRING_USERDEF)
6734 message = catenate_messages (gmsgid, " before user-defined string literal");
6735 else if (token_type == CPP_STRING
6736 || token_type == CPP_WSTRING
6737 || token_type == CPP_STRING16
6738 || token_type == CPP_STRING32
6739 || token_type == CPP_UTF8STRING)
6740 message = catenate_messages (gmsgid, " before string constant");
6741 else if (token_type == CPP_NUMBER)
6742 message = catenate_messages (gmsgid, " before numeric constant");
6743 else if (token_type == CPP_NAME)
6744 {
6745 message = catenate_messages (gmsgid, " before %qE");
6746 error_at (richloc, message, value);
6747 free (ptr: message);
6748 message = NULL;
6749 }
6750 else if (token_type == CPP_PRAGMA)
6751 message = catenate_messages (gmsgid, " before %<#pragma%>");
6752 else if (token_type == CPP_PRAGMA_EOL)
6753 message = catenate_messages (gmsgid, " before end of line");
6754 else if (token_type == CPP_DECLTYPE)
6755 message = catenate_messages (gmsgid, " before %<decltype%>");
6756 else if (token_type < N_TTYPES)
6757 {
6758 message = catenate_messages (gmsgid, " before %qs token");
6759 error_at (richloc, message, cpp_type2name (token_type, flags: token_flags));
6760 free (ptr: message);
6761 message = NULL;
6762 }
6763 else
6764 error_at (richloc, gmsgid);
6765
6766 if (message)
6767 {
6768 error_at (richloc, message);
6769 free (ptr: message);
6770 }
6771#undef catenate_messages
6772}
6773
6774/* Return the gcc option code associated with the reason for a cpp
6775 message, or 0 if none. */
6776
6777static int
6778c_option_controlling_cpp_diagnostic (enum cpp_warning_reason reason)
6779{
6780 const struct cpp_reason_option_codes_t *entry;
6781
6782 for (entry = cpp_reason_option_codes; entry->reason != CPP_W_NONE; entry++)
6783 {
6784 if (entry->reason == reason)
6785 return entry->option_code;
6786 }
6787 return 0;
6788}
6789
6790/* Return TRUE if the given option index corresponds to a diagnostic
6791 issued by libcpp. Linear search seems fine for now. */
6792bool
6793c_option_is_from_cpp_diagnostics (int option_index)
6794{
6795 for (auto entry = cpp_reason_option_codes; entry->reason != CPP_W_NONE;
6796 ++entry)
6797 {
6798 if (entry->option_code == option_index)
6799 return true;
6800 }
6801 return false;
6802}
6803
6804/* Callback from cpp_diagnostic for PFILE to print diagnostics from the
6805 preprocessor. The diagnostic is of type LEVEL, with REASON set
6806 to the reason code if LEVEL is represents a warning, at location
6807 RICHLOC unless this is after lexing and the compiler's location
6808 should be used instead; MSG is the translated message and AP
6809 the arguments. Returns true if a diagnostic was emitted, false
6810 otherwise. */
6811
6812bool
6813c_cpp_diagnostic (cpp_reader *pfile ATTRIBUTE_UNUSED,
6814 enum cpp_diagnostic_level level,
6815 enum cpp_warning_reason reason,
6816 rich_location *richloc,
6817 const char *msg, va_list *ap)
6818{
6819 diagnostic_info diagnostic;
6820 diagnostic_t dlevel;
6821 bool save_warn_system_headers = global_dc->m_warn_system_headers;
6822 bool ret;
6823
6824 switch (level)
6825 {
6826 case CPP_DL_WARNING_SYSHDR:
6827 if (flag_no_output)
6828 return false;
6829 global_dc->m_warn_system_headers = 1;
6830 /* Fall through. */
6831 case CPP_DL_WARNING:
6832 if (flag_no_output)
6833 return false;
6834 dlevel = DK_WARNING;
6835 break;
6836 case CPP_DL_PEDWARN:
6837 if (flag_no_output && !flag_pedantic_errors)
6838 return false;
6839 dlevel = DK_PEDWARN;
6840 break;
6841 case CPP_DL_ERROR:
6842 dlevel = DK_ERROR;
6843 break;
6844 case CPP_DL_ICE:
6845 dlevel = DK_ICE;
6846 break;
6847 case CPP_DL_NOTE:
6848 dlevel = DK_NOTE;
6849 break;
6850 case CPP_DL_FATAL:
6851 dlevel = DK_FATAL;
6852 break;
6853 default:
6854 gcc_unreachable ();
6855 }
6856 if (override_libcpp_locations)
6857 richloc->set_range (idx: 0, loc: input_location, range_display_kind: SHOW_RANGE_WITH_CARET);
6858 diagnostic_set_info_translated (&diagnostic, msg, ap,
6859 richloc, dlevel);
6860 diagnostic_override_option_index
6861 (info: &diagnostic,
6862 optidx: c_option_controlling_cpp_diagnostic (reason));
6863 ret = diagnostic_report_diagnostic (context: global_dc, diagnostic: &diagnostic);
6864 if (level == CPP_DL_WARNING_SYSHDR)
6865 global_dc->m_warn_system_headers = save_warn_system_headers;
6866 return ret;
6867}
6868
6869/* Convert a character from the host to the target execution character
6870 set. cpplib handles this, mostly. */
6871
6872HOST_WIDE_INT
6873c_common_to_target_charset (HOST_WIDE_INT c)
6874{
6875 /* Character constants in GCC proper are sign-extended under -fsigned-char,
6876 zero-extended under -fno-signed-char. cpplib insists that characters
6877 and character constants are always unsigned. Hence we must convert
6878 back and forth. */
6879 cppchar_t uc = ((cppchar_t)c) & ((((cppchar_t)1) << CHAR_BIT)-1);
6880
6881 uc = cpp_host_to_exec_charset (parse_in, uc);
6882
6883 if (flag_signed_char)
6884 return ((HOST_WIDE_INT)uc) << (HOST_BITS_PER_WIDE_INT - CHAR_TYPE_SIZE)
6885 >> (HOST_BITS_PER_WIDE_INT - CHAR_TYPE_SIZE);
6886 else
6887 return uc;
6888}
6889
6890/* Fold an offsetof-like expression. EXPR is a nested sequence of component
6891 references with an INDIRECT_REF of a constant at the bottom; much like the
6892 traditional rendering of offsetof as a macro. TYPE is the desired type of
6893 the whole expression. Return the folded result. */
6894
6895tree
6896fold_offsetof (tree expr, tree type, enum tree_code ctx)
6897{
6898 tree base, off, t;
6899 tree_code code = TREE_CODE (expr);
6900 switch (code)
6901 {
6902 case ERROR_MARK:
6903 return expr;
6904
6905 case VAR_DECL:
6906 error ("cannot apply %<offsetof%> to static data member %qD", expr);
6907 return error_mark_node;
6908
6909 case CALL_EXPR:
6910 case TARGET_EXPR:
6911 error ("cannot apply %<offsetof%> when %<operator[]%> is overloaded");
6912 return error_mark_node;
6913
6914 case NOP_EXPR:
6915 case INDIRECT_REF:
6916 if (!TREE_CONSTANT (TREE_OPERAND (expr, 0)))
6917 {
6918 error ("cannot apply %<offsetof%> to a non constant address");
6919 return error_mark_node;
6920 }
6921 return convert (type, TREE_OPERAND (expr, 0));
6922
6923 case COMPONENT_REF:
6924 base = fold_offsetof (TREE_OPERAND (expr, 0), type, ctx: code);
6925 if (base == error_mark_node)
6926 return base;
6927
6928 t = TREE_OPERAND (expr, 1);
6929 if (DECL_C_BIT_FIELD (t))
6930 {
6931 error ("attempt to take address of bit-field structure "
6932 "member %qD", t);
6933 return error_mark_node;
6934 }
6935 off = size_binop_loc (input_location, PLUS_EXPR, DECL_FIELD_OFFSET (t),
6936 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
6937 / BITS_PER_UNIT));
6938 break;
6939
6940 case ARRAY_REF:
6941 base = fold_offsetof (TREE_OPERAND (expr, 0), type, ctx: code);
6942 if (base == error_mark_node)
6943 return base;
6944
6945 t = TREE_OPERAND (expr, 1);
6946 STRIP_ANY_LOCATION_WRAPPER (t);
6947
6948 /* Check if the offset goes beyond the upper bound of the array. */
6949 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) >= 0)
6950 {
6951 tree upbound = array_ref_up_bound (expr);
6952 if (upbound != NULL_TREE
6953 && TREE_CODE (upbound) == INTEGER_CST
6954 && !tree_int_cst_equal (upbound,
6955 TYPE_MAX_VALUE (TREE_TYPE (upbound))))
6956 {
6957 if (ctx != ARRAY_REF && ctx != COMPONENT_REF)
6958 upbound = size_binop (PLUS_EXPR, upbound,
6959 build_int_cst (TREE_TYPE (upbound), 1));
6960 if (tree_int_cst_lt (t1: upbound, t2: t))
6961 {
6962 tree v;
6963
6964 for (v = TREE_OPERAND (expr, 0);
6965 TREE_CODE (v) == COMPONENT_REF;
6966 v = TREE_OPERAND (v, 0))
6967 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
6968 == RECORD_TYPE)
6969 {
6970 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
6971 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
6972 if (TREE_CODE (fld_chain) == FIELD_DECL)
6973 break;
6974
6975 if (fld_chain)
6976 break;
6977 }
6978 /* Don't warn if the array might be considered a poor
6979 man's flexible array member with a very permissive
6980 definition thereof. */
6981 if (TREE_CODE (v) == ARRAY_REF
6982 || TREE_CODE (v) == COMPONENT_REF)
6983 warning (OPT_Warray_bounds_,
6984 "index %E denotes an offset "
6985 "greater than size of %qT",
6986 t, TREE_TYPE (TREE_OPERAND (expr, 0)));
6987 }
6988 }
6989 }
6990
6991 t = convert (sizetype, t);
6992 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
6993 break;
6994
6995 case COMPOUND_EXPR:
6996 /* Handle static members of volatile structs. */
6997 t = TREE_OPERAND (expr, 1);
6998 gcc_checking_assert (VAR_P (get_base_address (t)));
6999 return fold_offsetof (expr: t, type);
7000
7001 default:
7002 gcc_unreachable ();
7003 }
7004
7005 if (!POINTER_TYPE_P (type))
7006 return size_binop (PLUS_EXPR, base, convert (type, off));
7007 return fold_build_pointer_plus (base, off);
7008}
7009
7010/* *PTYPE is an incomplete array. Complete it with a domain based on
7011 INITIAL_VALUE. If INITIAL_VALUE is not present, use 1 if DO_DEFAULT
7012 is true. Return 0 if successful, 1 if INITIAL_VALUE can't be deciphered,
7013 2 if INITIAL_VALUE was NULL, and 3 if INITIAL_VALUE was empty. */
7014
7015int
7016complete_array_type (tree *ptype, tree initial_value, bool do_default)
7017{
7018 tree maxindex, type, main_type, elt, unqual_elt;
7019 int failure = 0, quals;
7020 bool overflow_p = false;
7021
7022 maxindex = size_zero_node;
7023 if (initial_value)
7024 {
7025 STRIP_ANY_LOCATION_WRAPPER (initial_value);
7026
7027 if (TREE_CODE (initial_value) == STRING_CST)
7028 {
7029 int eltsize
7030 = int_size_in_bytes (TREE_TYPE (TREE_TYPE (initial_value)));
7031 maxindex = size_int (TREE_STRING_LENGTH (initial_value)/eltsize - 1);
7032 }
7033 else if (TREE_CODE (initial_value) == CONSTRUCTOR)
7034 {
7035 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (initial_value);
7036
7037 if (vec_safe_is_empty (v))
7038 {
7039 if (pedantic)
7040 failure = 3;
7041 maxindex = ssize_int (-1);
7042 }
7043 else
7044 {
7045 tree curindex;
7046 unsigned HOST_WIDE_INT cnt;
7047 constructor_elt *ce;
7048 bool fold_p = false;
7049
7050 if ((*v)[0].index)
7051 maxindex = (*v)[0].index, fold_p = true;
7052
7053 curindex = maxindex;
7054
7055 for (cnt = 1; vec_safe_iterate (v, ix: cnt, ptr: &ce); cnt++)
7056 {
7057 bool curfold_p = false;
7058 if (ce->index)
7059 curindex = ce->index, curfold_p = true;
7060 else
7061 {
7062 if (fold_p)
7063 {
7064 /* Since we treat size types now as ordinary
7065 unsigned types, we need an explicit overflow
7066 check. */
7067 tree orig = curindex;
7068 curindex = fold_convert (sizetype, curindex);
7069 overflow_p |= tree_int_cst_lt (t1: curindex, t2: orig);
7070 }
7071 curindex = size_binop (PLUS_EXPR, curindex,
7072 size_one_node);
7073 }
7074 if (tree_int_cst_lt (t1: maxindex, t2: curindex))
7075 maxindex = curindex, fold_p = curfold_p;
7076 }
7077 if (fold_p)
7078 {
7079 tree orig = maxindex;
7080 maxindex = fold_convert (sizetype, maxindex);
7081 overflow_p |= tree_int_cst_lt (t1: maxindex, t2: orig);
7082 }
7083 }
7084 }
7085 else
7086 {
7087 /* Make an error message unless that happened already. */
7088 if (initial_value != error_mark_node)
7089 failure = 1;
7090 }
7091 }
7092 else
7093 {
7094 failure = 2;
7095 if (!do_default)
7096 return failure;
7097 }
7098
7099 type = *ptype;
7100 elt = TREE_TYPE (type);
7101 quals = TYPE_QUALS (strip_array_types (elt));
7102 if (quals == 0)
7103 unqual_elt = elt;
7104 else
7105 unqual_elt = c_build_qualified_type (elt, KEEP_QUAL_ADDR_SPACE (quals));
7106
7107 /* Using build_distinct_type_copy and modifying things afterward instead
7108 of using build_array_type to create a new type preserves all of the
7109 TYPE_LANG_FLAG_? bits that the front end may have set. */
7110 main_type = build_distinct_type_copy (TYPE_MAIN_VARIANT (type));
7111 TREE_TYPE (main_type) = unqual_elt;
7112 TYPE_DOMAIN (main_type)
7113 = build_range_type (TREE_TYPE (maxindex),
7114 build_int_cst (TREE_TYPE (maxindex), 0), maxindex);
7115 TYPE_TYPELESS_STORAGE (main_type) = TYPE_TYPELESS_STORAGE (type);
7116 layout_type (main_type);
7117
7118 /* Make sure we have the canonical MAIN_TYPE. */
7119 hashval_t hashcode = type_hash_canon_hash (main_type);
7120 main_type = type_hash_canon (hashcode, main_type);
7121
7122 /* Fix the canonical type. */
7123 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (main_type))
7124 || TYPE_STRUCTURAL_EQUALITY_P (TYPE_DOMAIN (main_type)))
7125 SET_TYPE_STRUCTURAL_EQUALITY (main_type);
7126 else if (TYPE_CANONICAL (TREE_TYPE (main_type)) != TREE_TYPE (main_type)
7127 || (TYPE_CANONICAL (TYPE_DOMAIN (main_type))
7128 != TYPE_DOMAIN (main_type)))
7129 TYPE_CANONICAL (main_type)
7130 = build_array_type (TYPE_CANONICAL (TREE_TYPE (main_type)),
7131 TYPE_CANONICAL (TYPE_DOMAIN (main_type)),
7132 TYPE_TYPELESS_STORAGE (main_type));
7133 else
7134 TYPE_CANONICAL (main_type) = main_type;
7135
7136 if (quals == 0)
7137 type = main_type;
7138 else
7139 type = c_build_qualified_type (main_type, quals);
7140
7141 if (COMPLETE_TYPE_P (type)
7142 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST
7143 && (overflow_p || TREE_OVERFLOW (TYPE_SIZE_UNIT (type))))
7144 {
7145 error ("size of array is too large");
7146 /* If we proceed with the array type as it is, we'll eventually
7147 crash in tree_to_[su]hwi(). */
7148 type = error_mark_node;
7149 }
7150
7151 *ptype = type;
7152 return failure;
7153}
7154
7155/* INIT is an constructor of a structure with a flexible array member.
7156 Complete the flexible array member with a domain based on it's value. */
7157void
7158complete_flexible_array_elts (tree init)
7159{
7160 tree elt, type;
7161
7162 if (init == NULL_TREE || TREE_CODE (init) != CONSTRUCTOR)
7163 return;
7164
7165 if (vec_safe_is_empty (CONSTRUCTOR_ELTS (init)))
7166 return;
7167
7168 elt = CONSTRUCTOR_ELTS (init)->last ().value;
7169 type = TREE_TYPE (elt);
7170 if (TREE_CODE (type) == ARRAY_TYPE
7171 && TYPE_SIZE (type) == NULL_TREE)
7172 complete_array_type (ptype: &TREE_TYPE (elt), initial_value: elt, do_default: false);
7173 else
7174 complete_flexible_array_elts (init: elt);
7175}
7176
7177/* Like c_mark_addressable but don't check register qualifier. */
7178void
7179c_common_mark_addressable_vec (tree t)
7180{
7181 while (handled_component_p (t) || TREE_CODE (t) == C_MAYBE_CONST_EXPR)
7182 {
7183 if (TREE_CODE (t) == C_MAYBE_CONST_EXPR)
7184 t = C_MAYBE_CONST_EXPR_EXPR (t);
7185 else
7186 t = TREE_OPERAND (t, 0);
7187 }
7188 if (!VAR_P (t)
7189 && TREE_CODE (t) != PARM_DECL
7190 && TREE_CODE (t) != COMPOUND_LITERAL_EXPR
7191 && TREE_CODE (t) != TARGET_EXPR)
7192 return;
7193 if (!VAR_P (t) || !DECL_HARD_REGISTER (t))
7194 TREE_ADDRESSABLE (t) = 1;
7195 if (TREE_CODE (t) == COMPOUND_LITERAL_EXPR)
7196 TREE_ADDRESSABLE (COMPOUND_LITERAL_EXPR_DECL (t)) = 1;
7197 else if (TREE_CODE (t) == TARGET_EXPR)
7198 TREE_ADDRESSABLE (TARGET_EXPR_SLOT (t)) = 1;
7199}
7200
7201
7202
7203/* Used to help initialize the builtin-types.def table. When a type of
7204 the correct size doesn't exist, use error_mark_node instead of NULL.
7205 The later results in segfaults even when a decl using the type doesn't
7206 get invoked. */
7207
7208tree
7209builtin_type_for_size (int size, bool unsignedp)
7210{
7211 tree type = c_common_type_for_size (bits: size, unsignedp);
7212 return type ? type : error_mark_node;
7213}
7214
7215/* Work out the size of the first argument of a call to
7216 __builtin_speculation_safe_value. Only pointers and integral types
7217 are permitted. Return -1 if the argument type is not supported or
7218 the size is too large; 0 if the argument type is a pointer or the
7219 size if it is integral. */
7220static enum built_in_function
7221speculation_safe_value_resolve_call (tree function, vec<tree, va_gc> *params)
7222{
7223 /* Type of the argument. */
7224 tree type;
7225 int size;
7226
7227 if (vec_safe_is_empty (v: params))
7228 {
7229 error ("too few arguments to function %qE", function);
7230 return BUILT_IN_NONE;
7231 }
7232
7233 type = TREE_TYPE ((*params)[0]);
7234 if (TREE_CODE (type) == ARRAY_TYPE && c_dialect_cxx ())
7235 {
7236 /* Force array-to-pointer decay for C++. */
7237 (*params)[0] = default_conversion ((*params)[0]);
7238 type = TREE_TYPE ((*params)[0]);
7239 }
7240
7241 if (POINTER_TYPE_P (type))
7242 return BUILT_IN_SPECULATION_SAFE_VALUE_PTR;
7243
7244 if (!INTEGRAL_TYPE_P (type))
7245 goto incompatible;
7246
7247 if (!COMPLETE_TYPE_P (type))
7248 goto incompatible;
7249
7250 size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7251 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
7252 return ((enum built_in_function)
7253 ((int) BUILT_IN_SPECULATION_SAFE_VALUE_1 + exact_log2 (x: size)));
7254
7255 incompatible:
7256 /* Issue the diagnostic only if the argument is valid, otherwise
7257 it would be redundant at best and could be misleading. */
7258 if (type != error_mark_node)
7259 error ("operand type %qT is incompatible with argument %d of %qE",
7260 type, 1, function);
7261
7262 return BUILT_IN_NONE;
7263}
7264
7265/* Validate and coerce PARAMS, the arguments to ORIG_FUNCTION to fit
7266 the prototype for FUNCTION. The first argument is mandatory, a second
7267 argument, if present, must be type compatible with the first. */
7268static bool
7269speculation_safe_value_resolve_params (location_t loc, tree orig_function,
7270 vec<tree, va_gc> *params)
7271{
7272 tree val;
7273
7274 if (params->length () == 0)
7275 {
7276 error_at (loc, "too few arguments to function %qE", orig_function);
7277 return false;
7278 }
7279
7280 else if (params->length () > 2)
7281 {
7282 error_at (loc, "too many arguments to function %qE", orig_function);
7283 return false;
7284 }
7285
7286 val = (*params)[0];
7287 if (TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE)
7288 val = default_conversion (val);
7289 if (!(TREE_CODE (TREE_TYPE (val)) == POINTER_TYPE
7290 || TREE_CODE (TREE_TYPE (val)) == INTEGER_TYPE))
7291 {
7292 error_at (loc,
7293 "expecting argument of type pointer or of type integer "
7294 "for argument 1");
7295 return false;
7296 }
7297 (*params)[0] = val;
7298
7299 if (params->length () == 2)
7300 {
7301 tree val2 = (*params)[1];
7302 if (TREE_CODE (TREE_TYPE (val2)) == ARRAY_TYPE)
7303 val2 = default_conversion (val2);
7304 if (error_operand_p (t: val2))
7305 return false;
7306 if (!(TREE_TYPE (val) == TREE_TYPE (val2)
7307 || useless_type_conversion_p (TREE_TYPE (val), TREE_TYPE (val2))))
7308 {
7309 error_at (loc, "both arguments must be compatible");
7310 return false;
7311 }
7312 (*params)[1] = val2;
7313 }
7314
7315 return true;
7316}
7317
7318/* Cast the result of the builtin back to the type of the first argument,
7319 preserving any qualifiers that it might have. */
7320static tree
7321speculation_safe_value_resolve_return (tree first_param, tree result)
7322{
7323 tree ptype = TREE_TYPE (first_param);
7324 tree rtype = TREE_TYPE (result);
7325 ptype = TYPE_MAIN_VARIANT (ptype);
7326
7327 if (tree_int_cst_equal (TYPE_SIZE (ptype), TYPE_SIZE (rtype)))
7328 return convert (ptype, result);
7329
7330 return result;
7331}
7332
7333/* A helper function for resolve_overloaded_builtin in resolving the
7334 overloaded __sync_ builtins. Returns a positive power of 2 if the
7335 first operand of PARAMS is a pointer to a supported data type.
7336 Returns 0 if an error is encountered. Return -1 for _BitInt
7337 __atomic*fetch* with unsupported type which should be handled by
7338 a cas loop.
7339 FETCH is true when FUNCTION is one of the _FETCH_OP_ or _OP_FETCH_
7340 built-ins. ORIG_FORMAT is for __sync_* rather than __atomic_*
7341 built-ins. */
7342
7343static int
7344sync_resolve_size (tree function, vec<tree, va_gc> *params, bool fetch,
7345 bool orig_format)
7346{
7347 /* Type of the argument. */
7348 tree argtype;
7349 /* Type the argument points to. */
7350 tree type;
7351 int size;
7352
7353 if (vec_safe_is_empty (v: params))
7354 {
7355 error ("too few arguments to function %qE", function);
7356 return 0;
7357 }
7358
7359 argtype = type = TREE_TYPE ((*params)[0]);
7360 if (TREE_CODE (type) == ARRAY_TYPE && c_dialect_cxx ())
7361 {
7362 /* Force array-to-pointer decay for C++. */
7363 (*params)[0] = default_conversion ((*params)[0]);
7364 type = TREE_TYPE ((*params)[0]);
7365 }
7366 if (TREE_CODE (type) != POINTER_TYPE)
7367 goto incompatible;
7368
7369 type = TREE_TYPE (type);
7370 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
7371 goto incompatible;
7372
7373 if (!COMPLETE_TYPE_P (type))
7374 goto incompatible;
7375
7376 if (fetch && TREE_CODE (type) == BOOLEAN_TYPE)
7377 goto incompatible;
7378
7379 size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7380 if (size == 16
7381 && fetch
7382 && !orig_format
7383 && TREE_CODE (type) == BITINT_TYPE
7384 && !targetm.scalar_mode_supported_p (TImode))
7385 return -1;
7386
7387 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
7388 return size;
7389
7390 if (fetch && !orig_format && TREE_CODE (type) == BITINT_TYPE)
7391 return -1;
7392
7393 incompatible:
7394 /* Issue the diagnostic only if the argument is valid, otherwise
7395 it would be redundant at best and could be misleading. */
7396 if (argtype != error_mark_node)
7397 error ("operand type %qT is incompatible with argument %d of %qE",
7398 argtype, 1, function);
7399 return 0;
7400}
7401
7402/* A helper function for resolve_overloaded_builtin. Adds casts to
7403 PARAMS to make arguments match up with those of FUNCTION. Drops
7404 the variadic arguments at the end. Returns false if some error
7405 was encountered; true on success. */
7406
7407static bool
7408sync_resolve_params (location_t loc, tree orig_function, tree function,
7409 vec<tree, va_gc> *params, bool orig_format)
7410{
7411 function_args_iterator iter;
7412 tree ptype;
7413 unsigned int parmnum;
7414
7415 function_args_iter_init (i: &iter, TREE_TYPE (function));
7416 /* We've declared the implementation functions to use "volatile void *"
7417 as the pointer parameter, so we shouldn't get any complaints from the
7418 call to check_function_arguments what ever type the user used. */
7419 function_args_iter_next (i: &iter);
7420 ptype = TREE_TYPE (TREE_TYPE ((*params)[0]));
7421 ptype = TYPE_MAIN_VARIANT (ptype);
7422
7423 /* For the rest of the values, we need to cast these to FTYPE, so that we
7424 don't get warnings for passing pointer types, etc. */
7425 parmnum = 0;
7426 while (1)
7427 {
7428 tree val, arg_type;
7429
7430 arg_type = function_args_iter_cond (i: &iter);
7431 /* XXX void_type_node belies the abstraction. */
7432 if (arg_type == void_type_node)
7433 break;
7434
7435 ++parmnum;
7436 if (params->length () <= parmnum)
7437 {
7438 error_at (loc, "too few arguments to function %qE", orig_function);
7439 return false;
7440 }
7441
7442 /* Only convert parameters if arg_type is unsigned integer type with
7443 new format sync routines, i.e. don't attempt to convert pointer
7444 arguments (e.g. EXPECTED argument of __atomic_compare_exchange_n),
7445 bool arguments (e.g. WEAK argument) or signed int arguments (memmodel
7446 kinds). */
7447 if (TREE_CODE (arg_type) == INTEGER_TYPE && TYPE_UNSIGNED (arg_type))
7448 {
7449 /* Ideally for the first conversion we'd use convert_for_assignment
7450 so that we get warnings for anything that doesn't match the pointer
7451 type. This isn't portable across the C and C++ front ends atm. */
7452 val = (*params)[parmnum];
7453 val = convert (ptype, val);
7454 val = convert (arg_type, val);
7455 (*params)[parmnum] = val;
7456 }
7457
7458 function_args_iter_next (i: &iter);
7459 }
7460
7461 /* __atomic routines are not variadic. */
7462 if (!orig_format && params->length () != parmnum + 1)
7463 {
7464 error_at (loc, "too many arguments to function %qE", orig_function);
7465 return false;
7466 }
7467
7468 /* The definition of these primitives is variadic, with the remaining
7469 being "an optional list of variables protected by the memory barrier".
7470 No clue what that's supposed to mean, precisely, but we consider all
7471 call-clobbered variables to be protected so we're safe. */
7472 params->truncate (size: parmnum + 1);
7473
7474 return true;
7475}
7476
7477/* A helper function for resolve_overloaded_builtin. Adds a cast to
7478 RESULT to make it match the type of the first pointer argument in
7479 PARAMS. */
7480
7481static tree
7482sync_resolve_return (tree first_param, tree result, bool orig_format)
7483{
7484 tree ptype = TREE_TYPE (TREE_TYPE (first_param));
7485 tree rtype = TREE_TYPE (result);
7486 ptype = TYPE_MAIN_VARIANT (ptype);
7487
7488 /* New format doesn't require casting unless the types are the same size. */
7489 if (orig_format || tree_int_cst_equal (TYPE_SIZE (ptype), TYPE_SIZE (rtype)))
7490 return convert (ptype, result);
7491 else
7492 return result;
7493}
7494
7495/* This function verifies the PARAMS to generic atomic FUNCTION.
7496 It returns the size if all the parameters are the same size, otherwise
7497 0 is returned if the parameters are invalid. */
7498
7499static int
7500get_atomic_generic_size (location_t loc, tree function,
7501 vec<tree, va_gc> *params)
7502{
7503 unsigned int n_param;
7504 unsigned int n_model;
7505 unsigned int outputs = 0; // bitset of output parameters
7506 unsigned int x;
7507 int size_0;
7508 tree type_0;
7509
7510 /* Determine the parameter makeup. */
7511 switch (DECL_FUNCTION_CODE (decl: function))
7512 {
7513 case BUILT_IN_ATOMIC_EXCHANGE:
7514 n_param = 4;
7515 n_model = 1;
7516 outputs = 5;
7517 break;
7518 case BUILT_IN_ATOMIC_LOAD:
7519 n_param = 3;
7520 n_model = 1;
7521 outputs = 2;
7522 break;
7523 case BUILT_IN_ATOMIC_STORE:
7524 n_param = 3;
7525 n_model = 1;
7526 outputs = 1;
7527 break;
7528 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
7529 n_param = 6;
7530 n_model = 2;
7531 outputs = 3;
7532 break;
7533 default:
7534 gcc_unreachable ();
7535 }
7536
7537 if (vec_safe_length (v: params) != n_param)
7538 {
7539 error_at (loc, "incorrect number of arguments to function %qE", function);
7540 return 0;
7541 }
7542
7543 /* Get type of first parameter, and determine its size. */
7544 type_0 = TREE_TYPE ((*params)[0]);
7545 if (TREE_CODE (type_0) == ARRAY_TYPE && c_dialect_cxx ())
7546 {
7547 /* Force array-to-pointer decay for C++. */
7548 (*params)[0] = default_conversion ((*params)[0]);
7549 type_0 = TREE_TYPE ((*params)[0]);
7550 }
7551 if (TREE_CODE (type_0) != POINTER_TYPE || VOID_TYPE_P (TREE_TYPE (type_0)))
7552 {
7553 error_at (loc, "argument 1 of %qE must be a non-void pointer type",
7554 function);
7555 return 0;
7556 }
7557
7558 if (!COMPLETE_TYPE_P (TREE_TYPE (type_0)))
7559 {
7560 error_at (loc, "argument 1 of %qE must be a pointer to a complete type",
7561 function);
7562 return 0;
7563 }
7564
7565 /* Types must be compile time constant sizes. */
7566 if (!tree_fits_uhwi_p ((TYPE_SIZE_UNIT (TREE_TYPE (type_0)))))
7567 {
7568 error_at (loc,
7569 "argument 1 of %qE must be a pointer to a constant size type",
7570 function);
7571 return 0;
7572 }
7573
7574 size_0 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type_0)));
7575
7576 /* Zero size objects are not allowed. */
7577 if (size_0 == 0)
7578 {
7579 error_at (loc,
7580 "argument 1 of %qE must be a pointer to a nonzero size object",
7581 function);
7582 return 0;
7583 }
7584
7585 /* Check each other parameter is a pointer and the same size. */
7586 for (x = 0; x < n_param - n_model; x++)
7587 {
7588 int size;
7589 tree type = TREE_TYPE ((*params)[x]);
7590 /* __atomic_compare_exchange has a bool in the 4th position, skip it. */
7591 if (n_param == 6 && x == 3)
7592 continue;
7593 if (TREE_CODE (type) == ARRAY_TYPE && c_dialect_cxx ())
7594 {
7595 /* Force array-to-pointer decay for C++. */
7596 (*params)[x] = default_conversion ((*params)[x]);
7597 type = TREE_TYPE ((*params)[x]);
7598 }
7599 if (!POINTER_TYPE_P (type))
7600 {
7601 error_at (loc, "argument %d of %qE must be a pointer type", x + 1,
7602 function);
7603 return 0;
7604 }
7605 else if (TYPE_SIZE_UNIT (TREE_TYPE (type))
7606 && TREE_CODE ((TYPE_SIZE_UNIT (TREE_TYPE (type))))
7607 != INTEGER_CST)
7608 {
7609 error_at (loc, "argument %d of %qE must be a pointer to a constant "
7610 "size type", x + 1, function);
7611 return 0;
7612 }
7613 else if (FUNCTION_POINTER_TYPE_P (type))
7614 {
7615 error_at (loc, "argument %d of %qE must not be a pointer to a "
7616 "function", x + 1, function);
7617 return 0;
7618 }
7619 tree type_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
7620 size = type_size ? tree_to_uhwi (type_size) : 0;
7621 if (size != size_0)
7622 {
7623 error_at (loc, "size mismatch in argument %d of %qE", x + 1,
7624 function);
7625 return 0;
7626 }
7627
7628 {
7629 auto_diagnostic_group d;
7630 int quals = TYPE_QUALS (TREE_TYPE (type));
7631 /* Must not write to an argument of a const-qualified type. */
7632 if (outputs & (1 << x) && quals & TYPE_QUAL_CONST)
7633 {
7634 if (c_dialect_cxx ())
7635 {
7636 error_at (loc, "argument %d of %qE must not be a pointer to "
7637 "a %<const%> type", x + 1, function);
7638 return 0;
7639 }
7640 else
7641 pedwarn (loc, OPT_Wdiscarded_qualifiers, "argument %d "
7642 "of %qE discards %<const%> qualifier", x + 1,
7643 function);
7644 }
7645 /* Only the first argument is allowed to be volatile. */
7646 if (x > 0 && quals & TYPE_QUAL_VOLATILE)
7647 {
7648 if (c_dialect_cxx ())
7649 {
7650 error_at (loc, "argument %d of %qE must not be a pointer to "
7651 "a %<volatile%> type", x + 1, function);
7652 return 0;
7653 }
7654 else
7655 pedwarn (loc, OPT_Wdiscarded_qualifiers, "argument %d "
7656 "of %qE discards %<volatile%> qualifier", x + 1,
7657 function);
7658 }
7659 }
7660 }
7661
7662 /* Check memory model parameters for validity. */
7663 for (x = n_param - n_model ; x < n_param; x++)
7664 {
7665 tree p = (*params)[x];
7666 if (!INTEGRAL_TYPE_P (TREE_TYPE (p)))
7667 {
7668 error_at (loc, "non-integer memory model argument %d of %qE", x + 1,
7669 function);
7670 return 0;
7671 }
7672 p = fold_for_warn (p);
7673 if (TREE_CODE (p) == INTEGER_CST)
7674 {
7675 /* memmodel_base masks the low 16 bits, thus ignore any bits above
7676 it by using TREE_INT_CST_LOW instead of tree_to_*hwi. Those high
7677 bits will be checked later during expansion in target specific
7678 way. */
7679 if (memmodel_base (TREE_INT_CST_LOW (p)) >= MEMMODEL_LAST)
7680 warning_at (loc, OPT_Winvalid_memory_model,
7681 "invalid memory model argument %d of %qE", x + 1,
7682 function);
7683 }
7684 }
7685
7686 return size_0;
7687}
7688
7689
7690/* This will take an __atomic_ generic FUNCTION call, and add a size parameter N
7691 at the beginning of the parameter list PARAMS representing the size of the
7692 objects. This is to match the library ABI requirement. LOC is the location
7693 of the function call.
7694 The new function is returned if it needed rebuilding, otherwise NULL_TREE is
7695 returned to allow the external call to be constructed. */
7696
7697static tree
7698add_atomic_size_parameter (unsigned n, location_t loc, tree function,
7699 vec<tree, va_gc> *params)
7700{
7701 tree size_node;
7702
7703 /* Insert a SIZE_T parameter as the first param. If there isn't
7704 enough space, allocate a new vector and recursively re-build with that. */
7705 if (!params->space (nelems: 1))
7706 {
7707 unsigned int z, len;
7708 vec<tree, va_gc> *v;
7709 tree f;
7710
7711 len = params->length ();
7712 vec_alloc (v, nelems: len + 1);
7713 v->quick_push (obj: build_int_cst (size_type_node, n));
7714 for (z = 0; z < len; z++)
7715 v->quick_push (obj: (*params)[z]);
7716 f = build_function_call_vec (loc, vNULL, function, v, NULL);
7717 vec_free (v);
7718 return f;
7719 }
7720
7721 /* Add the size parameter and leave as a function call for processing. */
7722 size_node = build_int_cst (size_type_node, n);
7723 params->quick_insert (ix: 0, obj: size_node);
7724 return NULL_TREE;
7725}
7726
7727
7728/* Return whether atomic operations for naturally aligned N-byte
7729 arguments are supported, whether inline or through libatomic. */
7730static bool
7731atomic_size_supported_p (int n)
7732{
7733 switch (n)
7734 {
7735 case 1:
7736 case 2:
7737 case 4:
7738 case 8:
7739 return true;
7740
7741 case 16:
7742 return targetm.scalar_mode_supported_p (TImode);
7743
7744 default:
7745 return false;
7746 }
7747}
7748
7749/* This will process an __atomic_exchange function call, determine whether it
7750 needs to be mapped to the _N variation, or turned into a library call.
7751 LOC is the location of the builtin call.
7752 FUNCTION is the DECL that has been invoked;
7753 PARAMS is the argument list for the call. The return value is non-null
7754 TRUE is returned if it is translated into the proper format for a call to the
7755 external library, and NEW_RETURN is set the tree for that function.
7756 FALSE is returned if processing for the _N variation is required, and
7757 NEW_RETURN is set to the return value the result is copied into. */
7758static bool
7759resolve_overloaded_atomic_exchange (location_t loc, tree function,
7760 vec<tree, va_gc> *params, tree *new_return)
7761{
7762 tree p0, p1, p2, p3;
7763 tree I_type, I_type_ptr;
7764 int n = get_atomic_generic_size (loc, function, params);
7765
7766 /* Size of 0 is an error condition. */
7767 if (n == 0)
7768 {
7769 *new_return = error_mark_node;
7770 return true;
7771 }
7772
7773 /* If not a lock-free size, change to the library generic format. */
7774 if (!atomic_size_supported_p (n))
7775 {
7776 *new_return = add_atomic_size_parameter (n, loc, function, params);
7777 return true;
7778 }
7779
7780 /* Otherwise there is a lockfree match, transform the call from:
7781 void fn(T* mem, T* desired, T* return, model)
7782 into
7783 *return = (T) (fn (In* mem, (In) *desired, model)) */
7784
7785 p0 = (*params)[0];
7786 p1 = (*params)[1];
7787 p2 = (*params)[2];
7788 p3 = (*params)[3];
7789
7790 /* Create pointer to appropriate size. */
7791 I_type = builtin_type_for_size (BITS_PER_UNIT * n, unsignedp: 1);
7792 I_type_ptr = build_pointer_type (I_type);
7793
7794 /* Convert object pointer to required type. */
7795 p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
7796 (*params)[0] = p0;
7797 /* Convert new value to required type, and dereference it.
7798 If *p1 type can have padding or may involve floating point which
7799 could e.g. be promoted to wider precision and demoted afterwards,
7800 state of padding bits might not be preserved. */
7801 build_indirect_ref (loc, p1, RO_UNARY_STAR);
7802 p1 = build2_loc (loc, code: MEM_REF, type: I_type,
7803 arg0: build1 (VIEW_CONVERT_EXPR, I_type_ptr, p1),
7804 arg1: build_zero_cst (TREE_TYPE (p1)));
7805 (*params)[1] = p1;
7806
7807 /* Move memory model to the 3rd position, and end param list. */
7808 (*params)[2] = p3;
7809 params->truncate (size: 3);
7810
7811 /* Convert return pointer and dereference it for later assignment. */
7812 *new_return = build_indirect_ref (loc, p2, RO_UNARY_STAR);
7813
7814 return false;
7815}
7816
7817
7818/* This will process an __atomic_compare_exchange function call, determine
7819 whether it needs to be mapped to the _N variation, or turned into a lib call.
7820 LOC is the location of the builtin call.
7821 FUNCTION is the DECL that has been invoked;
7822 PARAMS is the argument list for the call. The return value is non-null
7823 TRUE is returned if it is translated into the proper format for a call to the
7824 external library, and NEW_RETURN is set the tree for that function.
7825 FALSE is returned if processing for the _N variation is required. */
7826
7827static bool
7828resolve_overloaded_atomic_compare_exchange (location_t loc, tree function,
7829 vec<tree, va_gc> *params,
7830 tree *new_return)
7831{
7832 tree p0, p1, p2;
7833 tree I_type, I_type_ptr;
7834 int n = get_atomic_generic_size (loc, function, params);
7835
7836 /* Size of 0 is an error condition. */
7837 if (n == 0)
7838 {
7839 *new_return = error_mark_node;
7840 return true;
7841 }
7842
7843 /* If not a lock-free size, change to the library generic format. */
7844 if (!atomic_size_supported_p (n))
7845 {
7846 /* The library generic format does not have the weak parameter, so
7847 remove it from the param list. Since a parameter has been removed,
7848 we can be sure that there is room for the SIZE_T parameter, meaning
7849 there will not be a recursive rebuilding of the parameter list, so
7850 there is no danger this will be done twice. */
7851 if (n > 0)
7852 {
7853 (*params)[3] = (*params)[4];
7854 (*params)[4] = (*params)[5];
7855 params->truncate (size: 5);
7856 }
7857 *new_return = add_atomic_size_parameter (n, loc, function, params);
7858 return true;
7859 }
7860
7861 /* Otherwise, there is a match, so the call needs to be transformed from:
7862 bool fn(T* mem, T* desired, T* return, weak, success, failure)
7863 into
7864 bool fn ((In *)mem, (In *)expected, (In) *desired, weak, succ, fail) */
7865
7866 p0 = (*params)[0];
7867 p1 = (*params)[1];
7868 p2 = (*params)[2];
7869
7870 /* Create pointer to appropriate size. */
7871 I_type = builtin_type_for_size (BITS_PER_UNIT * n, unsignedp: 1);
7872 I_type_ptr = build_pointer_type (I_type);
7873
7874 /* Convert object pointer to required type. */
7875 p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
7876 (*params)[0] = p0;
7877
7878 /* Convert expected pointer to required type. */
7879 p1 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p1);
7880 (*params)[1] = p1;
7881
7882 /* Convert desired value to required type, and dereference it.
7883 If *p2 type can have padding or may involve floating point which
7884 could e.g. be promoted to wider precision and demoted afterwards,
7885 state of padding bits might not be preserved. */
7886 build_indirect_ref (loc, p2, RO_UNARY_STAR);
7887 p2 = build2_loc (loc, code: MEM_REF, type: I_type,
7888 arg0: build1 (VIEW_CONVERT_EXPR, I_type_ptr, p2),
7889 arg1: build_zero_cst (TREE_TYPE (p2)));
7890 (*params)[2] = p2;
7891
7892 /* The rest of the parameters are fine. NULL means no special return value
7893 processing.*/
7894 *new_return = NULL;
7895 return false;
7896}
7897
7898
7899/* This will process an __atomic_load function call, determine whether it
7900 needs to be mapped to the _N variation, or turned into a library call.
7901 LOC is the location of the builtin call.
7902 FUNCTION is the DECL that has been invoked;
7903 PARAMS is the argument list for the call. The return value is non-null
7904 TRUE is returned if it is translated into the proper format for a call to the
7905 external library, and NEW_RETURN is set the tree for that function.
7906 FALSE is returned if processing for the _N variation is required, and
7907 NEW_RETURN is set to the return value the result is copied into. */
7908
7909static bool
7910resolve_overloaded_atomic_load (location_t loc, tree function,
7911 vec<tree, va_gc> *params, tree *new_return)
7912{
7913 tree p0, p1, p2;
7914 tree I_type, I_type_ptr;
7915 int n = get_atomic_generic_size (loc, function, params);
7916
7917 /* Size of 0 is an error condition. */
7918 if (n == 0)
7919 {
7920 *new_return = error_mark_node;
7921 return true;
7922 }
7923
7924 /* If not a lock-free size, change to the library generic format. */
7925 if (!atomic_size_supported_p (n))
7926 {
7927 *new_return = add_atomic_size_parameter (n, loc, function, params);
7928 return true;
7929 }
7930
7931 /* Otherwise, there is a match, so the call needs to be transformed from:
7932 void fn(T* mem, T* return, model)
7933 into
7934 *return = (T) (fn ((In *) mem, model)) */
7935
7936 p0 = (*params)[0];
7937 p1 = (*params)[1];
7938 p2 = (*params)[2];
7939
7940 /* Create pointer to appropriate size. */
7941 I_type = builtin_type_for_size (BITS_PER_UNIT * n, unsignedp: 1);
7942 I_type_ptr = build_pointer_type (I_type);
7943
7944 /* Convert object pointer to required type. */
7945 p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
7946 (*params)[0] = p0;
7947
7948 /* Move memory model to the 2nd position, and end param list. */
7949 (*params)[1] = p2;
7950 params->truncate (size: 2);
7951
7952 /* Convert return pointer and dereference it for later assignment. */
7953 *new_return = build_indirect_ref (loc, p1, RO_UNARY_STAR);
7954
7955 return false;
7956}
7957
7958
7959/* This will process an __atomic_store function call, determine whether it
7960 needs to be mapped to the _N variation, or turned into a library call.
7961 LOC is the location of the builtin call.
7962 FUNCTION is the DECL that has been invoked;
7963 PARAMS is the argument list for the call. The return value is non-null
7964 TRUE is returned if it is translated into the proper format for a call to the
7965 external library, and NEW_RETURN is set the tree for that function.
7966 FALSE is returned if processing for the _N variation is required, and
7967 NEW_RETURN is set to the return value the result is copied into. */
7968
7969static bool
7970resolve_overloaded_atomic_store (location_t loc, tree function,
7971 vec<tree, va_gc> *params, tree *new_return)
7972{
7973 tree p0, p1;
7974 tree I_type, I_type_ptr;
7975 int n = get_atomic_generic_size (loc, function, params);
7976
7977 /* Size of 0 is an error condition. */
7978 if (n == 0)
7979 {
7980 *new_return = error_mark_node;
7981 return true;
7982 }
7983
7984 /* If not a lock-free size, change to the library generic format. */
7985 if (!atomic_size_supported_p (n))
7986 {
7987 *new_return = add_atomic_size_parameter (n, loc, function, params);
7988 return true;
7989 }
7990
7991 /* Otherwise, there is a match, so the call needs to be transformed from:
7992 void fn(T* mem, T* value, model)
7993 into
7994 fn ((In *) mem, (In) *value, model) */
7995
7996 p0 = (*params)[0];
7997 p1 = (*params)[1];
7998
7999 /* Create pointer to appropriate size. */
8000 I_type = builtin_type_for_size (BITS_PER_UNIT * n, unsignedp: 1);
8001 I_type_ptr = build_pointer_type (I_type);
8002
8003 /* Convert object pointer to required type. */
8004 p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
8005 (*params)[0] = p0;
8006
8007 /* Convert new value to required type, and dereference it. */
8008 p1 = build_indirect_ref (loc, p1, RO_UNARY_STAR);
8009 p1 = build1 (VIEW_CONVERT_EXPR, I_type, p1);
8010 (*params)[1] = p1;
8011
8012 /* The memory model is in the right spot already. Return is void. */
8013 *new_return = NULL_TREE;
8014
8015 return false;
8016}
8017
8018
8019/* Emit __atomic*fetch* on _BitInt which doesn't have a size of
8020 1, 2, 4, 8 or 16 bytes using __atomic_compare_exchange loop.
8021 ORIG_CODE is the DECL_FUNCTION_CODE of ORIG_FUNCTION and
8022 ORIG_PARAMS arguments of the call. */
8023
8024static tree
8025atomic_bitint_fetch_using_cas_loop (location_t loc,
8026 enum built_in_function orig_code,
8027 tree orig_function,
8028 vec<tree, va_gc> *orig_params)
8029{
8030 enum tree_code code = ERROR_MARK;
8031 bool return_old_p = false;
8032 switch (orig_code)
8033 {
8034 case BUILT_IN_ATOMIC_ADD_FETCH_N:
8035 code = PLUS_EXPR;
8036 break;
8037 case BUILT_IN_ATOMIC_SUB_FETCH_N:
8038 code = MINUS_EXPR;
8039 break;
8040 case BUILT_IN_ATOMIC_AND_FETCH_N:
8041 code = BIT_AND_EXPR;
8042 break;
8043 case BUILT_IN_ATOMIC_NAND_FETCH_N:
8044 break;
8045 case BUILT_IN_ATOMIC_XOR_FETCH_N:
8046 code = BIT_XOR_EXPR;
8047 break;
8048 case BUILT_IN_ATOMIC_OR_FETCH_N:
8049 code = BIT_IOR_EXPR;
8050 break;
8051 case BUILT_IN_ATOMIC_FETCH_ADD_N:
8052 code = PLUS_EXPR;
8053 return_old_p = true;
8054 break;
8055 case BUILT_IN_ATOMIC_FETCH_SUB_N:
8056 code = MINUS_EXPR;
8057 return_old_p = true;
8058 break;
8059 case BUILT_IN_ATOMIC_FETCH_AND_N:
8060 code = BIT_AND_EXPR;
8061 return_old_p = true;
8062 break;
8063 case BUILT_IN_ATOMIC_FETCH_NAND_N:
8064 return_old_p = true;
8065 break;
8066 case BUILT_IN_ATOMIC_FETCH_XOR_N:
8067 code = BIT_XOR_EXPR;
8068 return_old_p = true;
8069 break;
8070 case BUILT_IN_ATOMIC_FETCH_OR_N:
8071 code = BIT_IOR_EXPR;
8072 return_old_p = true;
8073 break;
8074 default:
8075 gcc_unreachable ();
8076 }
8077
8078 if (orig_params->length () != 3)
8079 {
8080 if (orig_params->length () < 3)
8081 error_at (loc, "too few arguments to function %qE", orig_function);
8082 else
8083 error_at (loc, "too many arguments to function %qE", orig_function);
8084 return error_mark_node;
8085 }
8086
8087 tree stmts = push_stmt_list ();
8088
8089 tree nonatomic_lhs_type = TREE_TYPE (TREE_TYPE ((*orig_params)[0]));
8090 nonatomic_lhs_type = TYPE_MAIN_VARIANT (nonatomic_lhs_type);
8091 gcc_assert (TREE_CODE (nonatomic_lhs_type) == BITINT_TYPE);
8092
8093 tree lhs_addr = (*orig_params)[0];
8094 tree val = convert (nonatomic_lhs_type, (*orig_params)[1]);
8095 tree model = convert (integer_type_node, (*orig_params)[2]);
8096 if (!c_dialect_cxx ())
8097 {
8098 lhs_addr = c_fully_fold (lhs_addr, false, NULL);
8099 val = c_fully_fold (val, false, NULL);
8100 model = c_fully_fold (model, false, NULL);
8101 }
8102 if (TREE_SIDE_EFFECTS (lhs_addr))
8103 {
8104 tree var = create_tmp_var_raw (TREE_TYPE (lhs_addr));
8105 lhs_addr = build4 (TARGET_EXPR, TREE_TYPE (lhs_addr), var, lhs_addr,
8106 NULL_TREE, NULL_TREE);
8107 add_stmt (lhs_addr);
8108 }
8109 if (TREE_SIDE_EFFECTS (val))
8110 {
8111 tree var = create_tmp_var_raw (nonatomic_lhs_type);
8112 val = build4 (TARGET_EXPR, nonatomic_lhs_type, var, val, NULL_TREE,
8113 NULL_TREE);
8114 add_stmt (val);
8115 }
8116 if (TREE_SIDE_EFFECTS (model))
8117 {
8118 tree var = create_tmp_var_raw (integer_type_node);
8119 model = build4 (TARGET_EXPR, integer_type_node, var, model, NULL_TREE,
8120 NULL_TREE);
8121 add_stmt (model);
8122 }
8123
8124 tree old = create_tmp_var_raw (nonatomic_lhs_type);
8125 tree old_addr = build_unary_op (loc, ADDR_EXPR, old, false);
8126 TREE_ADDRESSABLE (old) = 1;
8127 suppress_warning (old);
8128
8129 tree newval = create_tmp_var_raw (nonatomic_lhs_type);
8130 tree newval_addr = build_unary_op (loc, ADDR_EXPR, newval, false);
8131 TREE_ADDRESSABLE (newval) = 1;
8132 suppress_warning (newval);
8133
8134 tree loop_decl = create_artificial_label (loc);
8135 tree loop_label = build1 (LABEL_EXPR, void_type_node, loop_decl);
8136
8137 tree done_decl = create_artificial_label (loc);
8138 tree done_label = build1 (LABEL_EXPR, void_type_node, done_decl);
8139
8140 vec<tree, va_gc> *params;
8141 vec_alloc (v&: params, nelems: 6);
8142
8143 /* __atomic_load (addr, &old, SEQ_CST). */
8144 tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_LOAD);
8145 params->quick_push (obj: lhs_addr);
8146 params->quick_push (obj: old_addr);
8147 params->quick_push (obj: build_int_cst (integer_type_node, MEMMODEL_RELAXED));
8148 tree func_call = resolve_overloaded_builtin (loc, fndecl, params);
8149 if (func_call == NULL_TREE)
8150 func_call = build_function_call_vec (loc, vNULL, fndecl, params, NULL);
8151 old = build4 (TARGET_EXPR, nonatomic_lhs_type, old, func_call, NULL_TREE,
8152 NULL_TREE);
8153 add_stmt (old);
8154 params->truncate (size: 0);
8155
8156 /* loop: */
8157 add_stmt (loop_label);
8158
8159 /* newval = old + val; */
8160 tree rhs;
8161 switch (code)
8162 {
8163 case PLUS_EXPR:
8164 case MINUS_EXPR:
8165 if (!TYPE_OVERFLOW_WRAPS (nonatomic_lhs_type))
8166 {
8167 tree utype
8168 = build_bitint_type (TYPE_PRECISION (nonatomic_lhs_type), 1);
8169 rhs = convert (nonatomic_lhs_type,
8170 build2_loc (loc, code, type: utype,
8171 arg0: convert (utype, old),
8172 arg1: convert (utype, val)));
8173 }
8174 else
8175 rhs = build2_loc (loc, code, type: nonatomic_lhs_type, arg0: old, arg1: val);
8176 break;
8177 case BIT_AND_EXPR:
8178 case BIT_IOR_EXPR:
8179 case BIT_XOR_EXPR:
8180 rhs = build2_loc (loc, code, type: nonatomic_lhs_type, arg0: old, arg1: val);
8181 break;
8182 case ERROR_MARK:
8183 rhs = build2_loc (loc, code: BIT_AND_EXPR, type: nonatomic_lhs_type,
8184 arg0: build1_loc (loc, code: BIT_NOT_EXPR,
8185 type: nonatomic_lhs_type, arg1: old), arg1: val);
8186 break;
8187 default:
8188 gcc_unreachable ();
8189 }
8190 rhs = build4 (TARGET_EXPR, nonatomic_lhs_type, newval, rhs, NULL_TREE,
8191 NULL_TREE);
8192 SET_EXPR_LOCATION (rhs, loc);
8193 add_stmt (rhs);
8194
8195 /* if (__atomic_compare_exchange (addr, &old, &new, false, model, model))
8196 goto done; */
8197 fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_COMPARE_EXCHANGE);
8198 params->quick_push (obj: lhs_addr);
8199 params->quick_push (obj: old_addr);
8200 params->quick_push (obj: newval_addr);
8201 params->quick_push (integer_zero_node);
8202 params->quick_push (obj: model);
8203 if (tree_fits_uhwi_p (model)
8204 && (tree_to_uhwi (model) == MEMMODEL_RELEASE
8205 || tree_to_uhwi (model) == MEMMODEL_ACQ_REL))
8206 params->quick_push (obj: build_int_cst (integer_type_node, MEMMODEL_RELAXED));
8207 else
8208 params->quick_push (obj: model);
8209 func_call = resolve_overloaded_builtin (loc, fndecl, params);
8210 if (func_call == NULL_TREE)
8211 func_call = build_function_call_vec (loc, vNULL, fndecl, params, NULL);
8212
8213 tree goto_stmt = build1 (GOTO_EXPR, void_type_node, done_decl);
8214 SET_EXPR_LOCATION (goto_stmt, loc);
8215
8216 tree stmt
8217 = build3 (COND_EXPR, void_type_node, func_call, goto_stmt, NULL_TREE);
8218 SET_EXPR_LOCATION (stmt, loc);
8219 add_stmt (stmt);
8220
8221 /* goto loop; */
8222 goto_stmt = build1 (GOTO_EXPR, void_type_node, loop_decl);
8223 SET_EXPR_LOCATION (goto_stmt, loc);
8224 add_stmt (goto_stmt);
8225
8226 /* done: */
8227 add_stmt (done_label);
8228
8229 tree ret = create_tmp_var_raw (nonatomic_lhs_type);
8230 stmt = build2_loc (loc, code: MODIFY_EXPR, void_type_node, arg0: ret,
8231 arg1: return_old_p ? old : newval);
8232 add_stmt (stmt);
8233
8234 /* Finish the compound statement. */
8235 stmts = pop_stmt_list (stmts);
8236
8237 return build4 (TARGET_EXPR, nonatomic_lhs_type, ret, stmts, NULL_TREE,
8238 NULL_TREE);
8239}
8240
8241
8242/* Some builtin functions are placeholders for other expressions. This
8243 function should be called immediately after parsing the call expression
8244 before surrounding code has committed to the type of the expression.
8245
8246 LOC is the location of the builtin call.
8247
8248 FUNCTION is the DECL that has been invoked; it is known to be a builtin.
8249 PARAMS is the argument list for the call. The return value is non-null
8250 when expansion is complete, and null if normal processing should
8251 continue. */
8252
8253tree
8254resolve_overloaded_builtin (location_t loc, tree function,
8255 vec<tree, va_gc> *params)
8256{
8257 /* Is function one of the _FETCH_OP_ or _OP_FETCH_ built-ins?
8258 Those are not valid to call with a pointer to _Bool (or C++ bool)
8259 and so must be rejected. */
8260 bool fetch_op = true;
8261 bool orig_format = true;
8262 tree new_return = NULL_TREE;
8263
8264 switch (DECL_BUILT_IN_CLASS (function))
8265 {
8266 case BUILT_IN_NORMAL:
8267 break;
8268 case BUILT_IN_MD:
8269 if (targetm.resolve_overloaded_builtin)
8270 return targetm.resolve_overloaded_builtin (loc, function, params);
8271 else
8272 return NULL_TREE;
8273 default:
8274 return NULL_TREE;
8275 }
8276
8277 /* Handle BUILT_IN_NORMAL here. */
8278 enum built_in_function orig_code = DECL_FUNCTION_CODE (decl: function);
8279 switch (orig_code)
8280 {
8281 case BUILT_IN_SPECULATION_SAFE_VALUE_N:
8282 {
8283 tree new_function, first_param, result;
8284 enum built_in_function fncode
8285 = speculation_safe_value_resolve_call (function, params);
8286
8287 if (fncode == BUILT_IN_NONE)
8288 return error_mark_node;
8289
8290 first_param = (*params)[0];
8291 if (!speculation_safe_value_resolve_params (loc, orig_function: function, params))
8292 return error_mark_node;
8293
8294 if (targetm.have_speculation_safe_value (true))
8295 {
8296 new_function = builtin_decl_explicit (fncode);
8297 result = build_function_call_vec (loc, vNULL, new_function, params,
8298 NULL);
8299
8300 if (result == error_mark_node)
8301 return result;
8302
8303 return speculation_safe_value_resolve_return (first_param, result);
8304 }
8305 else
8306 {
8307 /* This target doesn't have, or doesn't need, active mitigation
8308 against incorrect speculative execution. Simply return the
8309 first parameter to the builtin. */
8310 if (!targetm.have_speculation_safe_value (false))
8311 /* The user has invoked __builtin_speculation_safe_value
8312 even though __HAVE_SPECULATION_SAFE_VALUE is not
8313 defined: emit a warning. */
8314 warning_at (input_location, 0,
8315 "this target does not define a speculation barrier; "
8316 "your program will still execute correctly, "
8317 "but incorrect speculation may not be "
8318 "restricted");
8319
8320 /* If the optional second argument is present, handle any side
8321 effects now. */
8322 if (params->length () == 2
8323 && TREE_SIDE_EFFECTS ((*params)[1]))
8324 return build2 (COMPOUND_EXPR, TREE_TYPE (first_param),
8325 (*params)[1], first_param);
8326
8327 return first_param;
8328 }
8329 }
8330
8331 case BUILT_IN_ATOMIC_EXCHANGE:
8332 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
8333 case BUILT_IN_ATOMIC_LOAD:
8334 case BUILT_IN_ATOMIC_STORE:
8335 {
8336 /* Handle these 4 together so that they can fall through to the next
8337 case if the call is transformed to an _N variant. */
8338 switch (orig_code)
8339 {
8340 case BUILT_IN_ATOMIC_EXCHANGE:
8341 {
8342 if (resolve_overloaded_atomic_exchange (loc, function, params,
8343 new_return: &new_return))
8344 return new_return;
8345 /* Change to the _N variant. */
8346 orig_code = BUILT_IN_ATOMIC_EXCHANGE_N;
8347 break;
8348 }
8349
8350 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
8351 {
8352 if (resolve_overloaded_atomic_compare_exchange (loc, function,
8353 params,
8354 new_return: &new_return))
8355 return new_return;
8356 /* Change to the _N variant. */
8357 orig_code = BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N;
8358 break;
8359 }
8360 case BUILT_IN_ATOMIC_LOAD:
8361 {
8362 if (resolve_overloaded_atomic_load (loc, function, params,
8363 new_return: &new_return))
8364 return new_return;
8365 /* Change to the _N variant. */
8366 orig_code = BUILT_IN_ATOMIC_LOAD_N;
8367 break;
8368 }
8369 case BUILT_IN_ATOMIC_STORE:
8370 {
8371 if (resolve_overloaded_atomic_store (loc, function, params,
8372 new_return: &new_return))
8373 return new_return;
8374 /* Change to the _N variant. */
8375 orig_code = BUILT_IN_ATOMIC_STORE_N;
8376 break;
8377 }
8378 default:
8379 gcc_unreachable ();
8380 }
8381 }
8382 /* FALLTHRU */
8383 case BUILT_IN_ATOMIC_EXCHANGE_N:
8384 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
8385 case BUILT_IN_ATOMIC_LOAD_N:
8386 case BUILT_IN_ATOMIC_STORE_N:
8387 fetch_op = false;
8388 /* FALLTHRU */
8389 case BUILT_IN_ATOMIC_ADD_FETCH_N:
8390 case BUILT_IN_ATOMIC_SUB_FETCH_N:
8391 case BUILT_IN_ATOMIC_AND_FETCH_N:
8392 case BUILT_IN_ATOMIC_NAND_FETCH_N:
8393 case BUILT_IN_ATOMIC_XOR_FETCH_N:
8394 case BUILT_IN_ATOMIC_OR_FETCH_N:
8395 case BUILT_IN_ATOMIC_FETCH_ADD_N:
8396 case BUILT_IN_ATOMIC_FETCH_SUB_N:
8397 case BUILT_IN_ATOMIC_FETCH_AND_N:
8398 case BUILT_IN_ATOMIC_FETCH_NAND_N:
8399 case BUILT_IN_ATOMIC_FETCH_XOR_N:
8400 case BUILT_IN_ATOMIC_FETCH_OR_N:
8401 orig_format = false;
8402 /* FALLTHRU */
8403 case BUILT_IN_SYNC_FETCH_AND_ADD_N:
8404 case BUILT_IN_SYNC_FETCH_AND_SUB_N:
8405 case BUILT_IN_SYNC_FETCH_AND_OR_N:
8406 case BUILT_IN_SYNC_FETCH_AND_AND_N:
8407 case BUILT_IN_SYNC_FETCH_AND_XOR_N:
8408 case BUILT_IN_SYNC_FETCH_AND_NAND_N:
8409 case BUILT_IN_SYNC_ADD_AND_FETCH_N:
8410 case BUILT_IN_SYNC_SUB_AND_FETCH_N:
8411 case BUILT_IN_SYNC_OR_AND_FETCH_N:
8412 case BUILT_IN_SYNC_AND_AND_FETCH_N:
8413 case BUILT_IN_SYNC_XOR_AND_FETCH_N:
8414 case BUILT_IN_SYNC_NAND_AND_FETCH_N:
8415 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
8416 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N:
8417 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_N:
8418 case BUILT_IN_SYNC_LOCK_RELEASE_N:
8419 {
8420 /* The following are not _FETCH_OPs and must be accepted with
8421 pointers to _Bool (or C++ bool). */
8422 if (fetch_op)
8423 fetch_op = (orig_code != BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N
8424 && orig_code != BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N
8425 && orig_code != BUILT_IN_SYNC_LOCK_TEST_AND_SET_N
8426 && orig_code != BUILT_IN_SYNC_LOCK_RELEASE_N);
8427
8428 int n = sync_resolve_size (function, params, fetch: fetch_op, orig_format);
8429 tree new_function, first_param, result;
8430 enum built_in_function fncode;
8431
8432 if (n == 0)
8433 return error_mark_node;
8434
8435 if (n == -1)
8436 return atomic_bitint_fetch_using_cas_loop (loc, orig_code,
8437 orig_function: function, orig_params: params);
8438
8439 fncode = (enum built_in_function)((int)orig_code + exact_log2 (x: n) + 1);
8440 new_function = builtin_decl_explicit (fncode);
8441 if (!sync_resolve_params (loc, orig_function: function, function: new_function, params,
8442 orig_format))
8443 return error_mark_node;
8444
8445 first_param = (*params)[0];
8446 result = build_function_call_vec (loc, vNULL, new_function, params,
8447 NULL);
8448 if (result == error_mark_node)
8449 return result;
8450 if (orig_code != BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N
8451 && orig_code != BUILT_IN_SYNC_LOCK_RELEASE_N
8452 && orig_code != BUILT_IN_ATOMIC_STORE_N
8453 && orig_code != BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N)
8454 result = sync_resolve_return (first_param, result, orig_format);
8455
8456 if (fetch_op)
8457 /* Prevent -Wunused-value warning. */
8458 TREE_USED (result) = true;
8459
8460 /* If new_return is set, assign function to that expr and cast the
8461 result to void since the generic interface returned void. */
8462 if (new_return)
8463 {
8464 /* Cast function result from I{1,2,4,8,16} to the required type. */
8465 if (TREE_CODE (TREE_TYPE (new_return)) == BITINT_TYPE)
8466 {
8467 struct bitint_info info;
8468 unsigned prec = TYPE_PRECISION (TREE_TYPE (new_return));
8469 targetm.c.bitint_type_info (prec, &info);
8470 if (!info.extended)
8471 /* For _BitInt which has the padding bits undefined
8472 convert to the _BitInt type rather than VCE to force
8473 zero or sign extension. */
8474 result = build1 (NOP_EXPR, TREE_TYPE (new_return), result);
8475 }
8476 result
8477 = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (new_return), result);
8478 result = build2 (MODIFY_EXPR, TREE_TYPE (new_return), new_return,
8479 result);
8480 TREE_SIDE_EFFECTS (result) = 1;
8481 protected_set_expr_location (result, loc);
8482 result = convert (void_type_node, result);
8483 }
8484 return result;
8485 }
8486
8487 default:
8488 return NULL_TREE;
8489 }
8490}
8491
8492/* vector_types_compatible_elements_p is used in type checks of vectors
8493 values used as operands of binary operators. Where it returns true, and
8494 the other checks of the caller succeed (being vector types in he first
8495 place, and matching number of elements), we can just treat the types
8496 as essentially the same.
8497 Contrast with vector_targets_convertible_p, which is used for vector
8498 pointer types, and vector_types_convertible_p, which will allow
8499 language-specific matches under the control of flag_lax_vector_conversions,
8500 and might still require a conversion. */
8501/* True if vector types T1 and T2 can be inputs to the same binary
8502 operator without conversion.
8503 We don't check the overall vector size here because some of our callers
8504 want to give different error messages when the vectors are compatible
8505 except for the element count. */
8506
8507bool
8508vector_types_compatible_elements_p (tree t1, tree t2)
8509{
8510 bool opaque = TYPE_VECTOR_OPAQUE (t1) || TYPE_VECTOR_OPAQUE (t2);
8511 t1 = TREE_TYPE (t1);
8512 t2 = TREE_TYPE (t2);
8513
8514 enum tree_code c1 = TREE_CODE (t1), c2 = TREE_CODE (t2);
8515
8516 gcc_assert ((INTEGRAL_TYPE_P (t1)
8517 || c1 == REAL_TYPE
8518 || c1 == FIXED_POINT_TYPE)
8519 && (INTEGRAL_TYPE_P (t2)
8520 || c2 == REAL_TYPE
8521 || c2 == FIXED_POINT_TYPE));
8522
8523 t1 = c_common_signed_type (type: t1);
8524 t2 = c_common_signed_type (type: t2);
8525 /* Equality works here because c_common_signed_type uses
8526 TYPE_MAIN_VARIANT. */
8527 if (t1 == t2)
8528 return true;
8529 if (opaque && c1 == c2
8530 && (INTEGRAL_TYPE_P (t1) || c1 == REAL_TYPE)
8531 && TYPE_PRECISION (t1) == TYPE_PRECISION (t2))
8532 return true;
8533 return false;
8534}
8535
8536/* Check for missing format attributes on function pointers. LTYPE is
8537 the new type or left-hand side type. RTYPE is the old type or
8538 right-hand side type. Returns TRUE if LTYPE is missing the desired
8539 attribute. */
8540
8541bool
8542check_missing_format_attribute (tree ltype, tree rtype)
8543{
8544 tree const ttr = TREE_TYPE (rtype), ttl = TREE_TYPE (ltype);
8545 tree ra;
8546
8547 for (ra = TYPE_ATTRIBUTES (ttr); ra; ra = TREE_CHAIN (ra))
8548 if (is_attribute_p (attr_name: "format", ident: get_attribute_name (ra)))
8549 break;
8550 if (ra)
8551 {
8552 tree la;
8553 for (la = TYPE_ATTRIBUTES (ttl); la; la = TREE_CHAIN (la))
8554 if (is_attribute_p (attr_name: "format", ident: get_attribute_name (la)))
8555 break;
8556 return !la;
8557 }
8558 else
8559 return false;
8560}
8561
8562/* Setup a TYPE_DECL node as a typedef representation.
8563
8564 X is a TYPE_DECL for a typedef statement. Create a brand new
8565 ..._TYPE node (which will be just a variant of the existing
8566 ..._TYPE node with identical properties) and then install X
8567 as the TYPE_NAME of this brand new (duplicate) ..._TYPE node.
8568
8569 The whole point here is to end up with a situation where each
8570 and every ..._TYPE node the compiler creates will be uniquely
8571 associated with AT MOST one node representing a typedef name.
8572 This way, even though the compiler substitutes corresponding
8573 ..._TYPE nodes for TYPE_DECL (i.e. "typedef name") nodes very
8574 early on, later parts of the compiler can always do the reverse
8575 translation and get back the corresponding typedef name. For
8576 example, given:
8577
8578 typedef struct S MY_TYPE;
8579 MY_TYPE object;
8580
8581 Later parts of the compiler might only know that `object' was of
8582 type `struct S' if it were not for code just below. With this
8583 code however, later parts of the compiler see something like:
8584
8585 struct S' == struct S
8586 typedef struct S' MY_TYPE;
8587 struct S' object;
8588
8589 And they can then deduce (from the node for type struct S') that
8590 the original object declaration was:
8591
8592 MY_TYPE object;
8593
8594 Being able to do this is important for proper support of protoize,
8595 and also for generating precise symbolic debugging information
8596 which takes full account of the programmer's (typedef) vocabulary.
8597
8598 Obviously, we don't want to generate a duplicate ..._TYPE node if
8599 the TYPE_DECL node that we are now processing really represents a
8600 standard built-in type. */
8601
8602void
8603set_underlying_type (tree x)
8604{
8605 if (x == error_mark_node || TREE_TYPE (x) == error_mark_node)
8606 return;
8607 if (DECL_IS_UNDECLARED_BUILTIN (x) && TREE_CODE (TREE_TYPE (x)) != ARRAY_TYPE)
8608 {
8609 if (TYPE_NAME (TREE_TYPE (x)) == 0)
8610 TYPE_NAME (TREE_TYPE (x)) = x;
8611 }
8612 else if (DECL_ORIGINAL_TYPE (x))
8613 gcc_checking_assert (TYPE_NAME (TREE_TYPE (x)) == x);
8614 else
8615 {
8616 tree tt = TREE_TYPE (x);
8617 DECL_ORIGINAL_TYPE (x) = tt;
8618 tt = build_variant_type_copy (tt);
8619 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (DECL_ORIGINAL_TYPE (x));
8620 TYPE_NAME (tt) = x;
8621
8622 /* Mark the type as used only when its type decl is decorated
8623 with attribute unused. */
8624 if (lookup_attribute (attr_name: "unused", DECL_ATTRIBUTES (x)))
8625 TREE_USED (tt) = 1;
8626
8627 TREE_TYPE (x) = tt;
8628 }
8629}
8630
8631/* Return true if it is worth exposing the DECL_ORIGINAL_TYPE of TYPE to
8632 the user in diagnostics, false if it would be better to use TYPE itself.
8633 TYPE is known to satisfy typedef_variant_p. */
8634
8635bool
8636user_facing_original_type_p (const_tree type)
8637{
8638 gcc_assert (typedef_variant_p (type));
8639 tree decl = TYPE_NAME (type);
8640
8641 /* Look through any typedef in "user" code. */
8642 if (!DECL_IN_SYSTEM_HEADER (decl) && !DECL_IS_UNDECLARED_BUILTIN (decl))
8643 return true;
8644
8645 /* If the original type is also named and is in the user namespace,
8646 assume it too is a user-facing type. */
8647 tree orig_type = DECL_ORIGINAL_TYPE (decl);
8648 if (tree orig_id = TYPE_IDENTIFIER (orig_type))
8649 if (!name_reserved_for_implementation_p (IDENTIFIER_POINTER (orig_id)))
8650 return true;
8651
8652 switch (TREE_CODE (orig_type))
8653 {
8654 /* Don't look through to an anonymous vector type, since the syntax
8655 we use for them in diagnostics isn't real C or C++ syntax.
8656 And if ORIG_TYPE is named but in the implementation namespace,
8657 TYPE is likely to be more meaningful to the user. */
8658 case VECTOR_TYPE:
8659 return false;
8660
8661 /* Don't expose anonymous tag types that are presumably meant to be
8662 known by their typedef name. Also don't expose tags that are in
8663 the implementation namespace, such as:
8664
8665 typedef struct __foo foo; */
8666 case RECORD_TYPE:
8667 case UNION_TYPE:
8668 case ENUMERAL_TYPE:
8669 return false;
8670
8671 /* Look through to anything else. */
8672 default:
8673 return true;
8674 }
8675}
8676
8677/* Record the types used by the current global variable declaration
8678 being parsed, so that we can decide later to emit their debug info.
8679 Those types are in types_used_by_cur_var_decl, and we are going to
8680 store them in the types_used_by_vars_hash hash table.
8681 DECL is the declaration of the global variable that has been parsed. */
8682
8683void
8684record_types_used_by_current_var_decl (tree decl)
8685{
8686 gcc_assert (decl && DECL_P (decl) && TREE_STATIC (decl));
8687
8688 while (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
8689 {
8690 tree type = types_used_by_cur_var_decl->pop ();
8691 types_used_by_var_decl_insert (type, var_decl: decl);
8692 }
8693}
8694
8695/* The C and C++ parsers both use vectors to hold function arguments.
8696 For efficiency, we keep a cache of unused vectors. This is the
8697 cache. */
8698
8699typedef vec<tree, va_gc> *tree_gc_vec;
8700static GTY((deletable)) vec<tree_gc_vec, va_gc> *tree_vector_cache;
8701
8702/* Return a new vector from the cache. If the cache is empty,
8703 allocate a new vector. These vectors are GC'ed, so it is OK if the
8704 pointer is not released.. */
8705
8706vec<tree, va_gc> *
8707make_tree_vector (void)
8708{
8709 if (tree_vector_cache && !tree_vector_cache->is_empty ())
8710 return tree_vector_cache->pop ();
8711 else
8712 {
8713 /* Passing 0 to vec::alloc returns NULL, and our callers require
8714 that we always return a non-NULL value. The vector code uses
8715 4 when growing a NULL vector, so we do too. */
8716 vec<tree, va_gc> *v;
8717 vec_alloc (v, nelems: 4);
8718 return v;
8719 }
8720}
8721
8722/* Release a vector of trees back to the cache. */
8723
8724void
8725release_tree_vector (vec<tree, va_gc> *vec)
8726{
8727 if (vec != NULL)
8728 {
8729 if (vec->allocated () >= 16)
8730 /* Don't cache vecs that have expanded more than once. On a p64
8731 target, vecs double in alloc size with each power of 2 elements, e.g
8732 at 16 elements the alloc increases from 128 to 256 bytes. */
8733 vec_free (v&: vec);
8734 else
8735 {
8736 vec->truncate (size: 0);
8737 vec_safe_push (v&: tree_vector_cache, obj: vec);
8738 }
8739 }
8740}
8741
8742/* Get a new tree vector holding a single tree. */
8743
8744vec<tree, va_gc> *
8745make_tree_vector_single (tree t)
8746{
8747 vec<tree, va_gc> *ret = make_tree_vector ();
8748 ret->quick_push (obj: t);
8749 return ret;
8750}
8751
8752/* Get a new tree vector of the TREE_VALUEs of a TREE_LIST chain. */
8753
8754vec<tree, va_gc> *
8755make_tree_vector_from_list (tree list)
8756{
8757 vec<tree, va_gc> *ret = make_tree_vector ();
8758 for (; list; list = TREE_CHAIN (list))
8759 vec_safe_push (v&: ret, TREE_VALUE (list));
8760 return ret;
8761}
8762
8763/* Get a new tree vector of the values of a CONSTRUCTOR. */
8764
8765vec<tree, va_gc> *
8766make_tree_vector_from_ctor (tree ctor)
8767{
8768 vec<tree,va_gc> *ret = make_tree_vector ();
8769 vec_safe_reserve (v&: ret, CONSTRUCTOR_NELTS (ctor));
8770 for (unsigned i = 0; i < CONSTRUCTOR_NELTS (ctor); ++i)
8771 ret->quick_push (CONSTRUCTOR_ELT (ctor, i)->value);
8772 return ret;
8773}
8774
8775/* Get a new tree vector which is a copy of an existing one. */
8776
8777vec<tree, va_gc> *
8778make_tree_vector_copy (const vec<tree, va_gc> *orig)
8779{
8780 vec<tree, va_gc> *ret;
8781 unsigned int ix;
8782 tree t;
8783
8784 ret = make_tree_vector ();
8785 vec_safe_reserve (v&: ret, nelems: vec_safe_length (v: orig));
8786 FOR_EACH_VEC_SAFE_ELT (orig, ix, t)
8787 ret->quick_push (obj: t);
8788 return ret;
8789}
8790
8791/* Return true if KEYWORD starts a type specifier. */
8792
8793bool
8794keyword_begins_type_specifier (enum rid keyword)
8795{
8796 switch (keyword)
8797 {
8798 case RID_AUTO_TYPE:
8799 case RID_INT:
8800 case RID_CHAR:
8801 case RID_FLOAT:
8802 case RID_DOUBLE:
8803 case RID_VOID:
8804 case RID_UNSIGNED:
8805 case RID_LONG:
8806 case RID_SHORT:
8807 case RID_SIGNED:
8808 CASE_RID_FLOATN_NX:
8809 case RID_DFLOAT32:
8810 case RID_DFLOAT64:
8811 case RID_DFLOAT128:
8812 case RID_FRACT:
8813 case RID_ACCUM:
8814 case RID_BOOL:
8815 case RID_BITINT:
8816 case RID_WCHAR:
8817 case RID_CHAR8:
8818 case RID_CHAR16:
8819 case RID_CHAR32:
8820 case RID_SAT:
8821 case RID_COMPLEX:
8822 case RID_TYPEOF:
8823 case RID_STRUCT:
8824 case RID_CLASS:
8825 case RID_UNION:
8826 case RID_ENUM:
8827 return true;
8828 default:
8829 if (keyword >= RID_FIRST_INT_N
8830 && keyword < RID_FIRST_INT_N + NUM_INT_N_ENTS
8831 && int_n_enabled_p[keyword-RID_FIRST_INT_N])
8832 return true;
8833 return false;
8834 }
8835}
8836
8837/* Return true if KEYWORD names a type qualifier. */
8838
8839bool
8840keyword_is_type_qualifier (enum rid keyword)
8841{
8842 switch (keyword)
8843 {
8844 case RID_CONST:
8845 case RID_VOLATILE:
8846 case RID_RESTRICT:
8847 case RID_ATOMIC:
8848 return true;
8849 default:
8850 return false;
8851 }
8852}
8853
8854/* Return true if KEYWORD names a storage class specifier.
8855
8856 RID_TYPEDEF is not included in this list despite `typedef' being
8857 listed in C99 6.7.1.1. 6.7.1.3 indicates that `typedef' is listed as
8858 such for syntactic convenience only. */
8859
8860bool
8861keyword_is_storage_class_specifier (enum rid keyword)
8862{
8863 switch (keyword)
8864 {
8865 case RID_STATIC:
8866 case RID_EXTERN:
8867 case RID_REGISTER:
8868 case RID_AUTO:
8869 case RID_MUTABLE:
8870 case RID_THREAD:
8871 return true;
8872 default:
8873 return false;
8874 }
8875}
8876
8877/* Return true if KEYWORD names a function-specifier [dcl.fct.spec]. */
8878
8879static bool
8880keyword_is_function_specifier (enum rid keyword)
8881{
8882 switch (keyword)
8883 {
8884 case RID_INLINE:
8885 case RID_NORETURN:
8886 case RID_VIRTUAL:
8887 case RID_EXPLICIT:
8888 return true;
8889 default:
8890 return false;
8891 }
8892}
8893
8894/* Return true if KEYWORD names a decl-specifier [dcl.spec] or a
8895 declaration-specifier (C99 6.7). */
8896
8897bool
8898keyword_is_decl_specifier (enum rid keyword)
8899{
8900 if (keyword_is_storage_class_specifier (keyword)
8901 || keyword_is_type_qualifier (keyword)
8902 || keyword_is_function_specifier (keyword))
8903 return true;
8904
8905 switch (keyword)
8906 {
8907 case RID_TYPEDEF:
8908 case RID_FRIEND:
8909 case RID_CONSTEXPR:
8910 case RID_CONSTINIT:
8911 return true;
8912 default:
8913 return false;
8914 }
8915}
8916
8917/* Initialize language-specific-bits of tree_contains_struct. */
8918
8919void
8920c_common_init_ts (void)
8921{
8922 MARK_TS_EXP (SIZEOF_EXPR);
8923 MARK_TS_EXP (PAREN_SIZEOF_EXPR);
8924 MARK_TS_EXP (C_MAYBE_CONST_EXPR);
8925 MARK_TS_EXP (EXCESS_PRECISION_EXPR);
8926 MARK_TS_EXP (BREAK_STMT);
8927 MARK_TS_EXP (CONTINUE_STMT);
8928 MARK_TS_EXP (DO_STMT);
8929 MARK_TS_EXP (FOR_STMT);
8930 MARK_TS_EXP (SWITCH_STMT);
8931 MARK_TS_EXP (WHILE_STMT);
8932
8933 MARK_TS_DECL_COMMON (CONCEPT_DECL);
8934}
8935
8936/* Build a user-defined numeric literal out of an integer constant type VALUE
8937 with identifier SUFFIX. */
8938
8939tree
8940build_userdef_literal (tree suffix_id, tree value,
8941 enum overflow_type overflow, tree num_string)
8942{
8943 tree literal = make_node (USERDEF_LITERAL);
8944 USERDEF_LITERAL_SUFFIX_ID (literal) = suffix_id;
8945 USERDEF_LITERAL_VALUE (literal) = value;
8946 USERDEF_LITERAL_OVERFLOW (literal) = overflow;
8947 USERDEF_LITERAL_NUM_STRING (literal) = num_string;
8948 return literal;
8949}
8950
8951/* For vector[index], convert the vector to an array of the underlying type.
8952 Return true if the resulting ARRAY_REF should not be an lvalue. */
8953
8954bool
8955convert_vector_to_array_for_subscript (location_t loc,
8956 tree *vecp, tree index)
8957{
8958 bool ret = false;
8959 if (gnu_vector_type_p (TREE_TYPE (*vecp)))
8960 {
8961 tree type = TREE_TYPE (*vecp);
8962
8963 ret = !lvalue_p (*vecp);
8964
8965 index = fold_for_warn (index);
8966 if (TREE_CODE (index) == INTEGER_CST)
8967 if (!tree_fits_uhwi_p (index)
8968 || maybe_ge (tree_to_uhwi (index), TYPE_VECTOR_SUBPARTS (type)))
8969 warning_at (loc, OPT_Warray_bounds_, "index value is out of bound");
8970
8971 /* We are building an ARRAY_REF so mark the vector as addressable
8972 to not run into the gimplifiers premature setting of DECL_GIMPLE_REG_P
8973 for function parameters. */
8974 c_common_mark_addressable_vec (t: *vecp);
8975
8976 *vecp = build1 (VIEW_CONVERT_EXPR,
8977 build_array_type_nelts (TREE_TYPE (type),
8978 TYPE_VECTOR_SUBPARTS (node: type)),
8979 *vecp);
8980 }
8981 return ret;
8982}
8983
8984/* Determine which of the operands, if any, is a scalar that needs to be
8985 converted to a vector, for the range of operations. */
8986enum stv_conv
8987scalar_to_vector (location_t loc, enum tree_code code, tree op0, tree op1,
8988 bool complain)
8989{
8990 tree type0 = TREE_TYPE (op0);
8991 tree type1 = TREE_TYPE (op1);
8992 bool integer_only_op = false;
8993 enum stv_conv ret = stv_firstarg;
8994
8995 gcc_assert (gnu_vector_type_p (type0) || gnu_vector_type_p (type1));
8996 switch (code)
8997 {
8998 /* Most GENERIC binary expressions require homogeneous arguments.
8999 LSHIFT_EXPR and RSHIFT_EXPR are exceptions and accept a first
9000 argument that is a vector and a second one that is a scalar, so
9001 we never return stv_secondarg for them. */
9002 case RSHIFT_EXPR:
9003 case LSHIFT_EXPR:
9004 if (TREE_CODE (type0) == INTEGER_TYPE
9005 && TREE_CODE (TREE_TYPE (type1)) == INTEGER_TYPE)
9006 {
9007 if (unsafe_conversion_p (TREE_TYPE (type1), expr: op0,
9008 NULL_TREE, check_sign: false))
9009 {
9010 if (complain)
9011 error_at (loc, "conversion of scalar %qT to vector %qT "
9012 "involves truncation", type0, type1);
9013 return stv_error;
9014 }
9015 else
9016 return stv_firstarg;
9017 }
9018 break;
9019
9020 case BIT_IOR_EXPR:
9021 case BIT_XOR_EXPR:
9022 case BIT_AND_EXPR:
9023 integer_only_op = true;
9024 /* fall through */
9025
9026 case VEC_COND_EXPR:
9027
9028 case PLUS_EXPR:
9029 case MINUS_EXPR:
9030 case MULT_EXPR:
9031 case TRUNC_DIV_EXPR:
9032 case CEIL_DIV_EXPR:
9033 case FLOOR_DIV_EXPR:
9034 case ROUND_DIV_EXPR:
9035 case EXACT_DIV_EXPR:
9036 case TRUNC_MOD_EXPR:
9037 case FLOOR_MOD_EXPR:
9038 case RDIV_EXPR:
9039 case EQ_EXPR:
9040 case NE_EXPR:
9041 case LE_EXPR:
9042 case GE_EXPR:
9043 case LT_EXPR:
9044 case GT_EXPR:
9045 /* What about UNLT_EXPR? */
9046 if (gnu_vector_type_p (type: type0))
9047 {
9048 ret = stv_secondarg;
9049 std::swap (a&: type0, b&: type1);
9050 std::swap (a&: op0, b&: op1);
9051 }
9052
9053 if (TREE_CODE (type0) == INTEGER_TYPE
9054 && TREE_CODE (TREE_TYPE (type1)) == INTEGER_TYPE)
9055 {
9056 if (unsafe_conversion_p (TREE_TYPE (type1), expr: op0,
9057 NULL_TREE, check_sign: false))
9058 {
9059 if (complain)
9060 error_at (loc, "conversion of scalar %qT to vector %qT "
9061 "involves truncation", type0, type1);
9062 return stv_error;
9063 }
9064 return ret;
9065 }
9066 else if (!integer_only_op
9067 /* Allow integer --> real conversion if safe. */
9068 && (SCALAR_FLOAT_TYPE_P (type0)
9069 || TREE_CODE (type0) == INTEGER_TYPE)
9070 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (type1)))
9071 {
9072 if (unsafe_conversion_p (TREE_TYPE (type1), expr: op0,
9073 NULL_TREE, check_sign: false))
9074 {
9075 if (complain)
9076 error_at (loc, "conversion of scalar %qT to vector %qT "
9077 "involves truncation", type0, type1);
9078 return stv_error;
9079 }
9080 return ret;
9081 }
9082 default:
9083 break;
9084 }
9085
9086 return stv_nothing;
9087}
9088
9089/* Return the alignment of std::max_align_t.
9090
9091 [support.types.layout] The type max_align_t is a POD type whose alignment
9092 requirement is at least as great as that of every scalar type, and whose
9093 alignment requirement is supported in every context. */
9094
9095unsigned
9096max_align_t_align ()
9097{
9098 unsigned int max_align = MAX (TYPE_ALIGN (long_long_integer_type_node),
9099 TYPE_ALIGN (long_double_type_node));
9100 if (float128_type_node != NULL_TREE)
9101 max_align = MAX (max_align, TYPE_ALIGN (float128_type_node));
9102 return max_align;
9103}
9104
9105/* Return true iff ALIGN is an integral constant that is a fundamental
9106 alignment, as defined by [basic.align] in the c++-11
9107 specifications.
9108
9109 That is:
9110
9111 [A fundamental alignment is represented by an alignment less than or
9112 equal to the greatest alignment supported by the implementation
9113 in all contexts, which is equal to alignof(max_align_t)]. */
9114
9115bool
9116cxx_fundamental_alignment_p (unsigned align)
9117{
9118 return (align <= max_align_t_align ());
9119}
9120
9121/* Return true if T is a pointer to a zero-sized aggregate. */
9122
9123bool
9124pointer_to_zero_sized_aggr_p (tree t)
9125{
9126 if (!POINTER_TYPE_P (t))
9127 return false;
9128 t = TREE_TYPE (t);
9129 return (TYPE_SIZE (t) && integer_zerop (TYPE_SIZE (t)));
9130}
9131
9132/* For an EXPR of a FUNCTION_TYPE that references a GCC built-in function
9133 with no library fallback or for an ADDR_EXPR whose operand is such type
9134 issues an error pointing to the location LOC.
9135 Returns true when the expression has been diagnosed and false
9136 otherwise. */
9137
9138bool
9139reject_gcc_builtin (const_tree expr, location_t loc /* = UNKNOWN_LOCATION */)
9140{
9141 if (TREE_CODE (expr) == ADDR_EXPR)
9142 expr = TREE_OPERAND (expr, 0);
9143
9144 STRIP_ANY_LOCATION_WRAPPER (expr);
9145
9146 if (TREE_TYPE (expr)
9147 && TREE_CODE (TREE_TYPE (expr)) == FUNCTION_TYPE
9148 && TREE_CODE (expr) == FUNCTION_DECL
9149 /* The intersection of DECL_BUILT_IN and DECL_IS_UNDECLARED_BUILTIN avoids
9150 false positives for user-declared built-ins such as abs or
9151 strlen, and for C++ operators new and delete.
9152 The c_decl_implicit() test avoids false positives for implicitly
9153 declared built-ins with library fallbacks (such as abs). */
9154 && fndecl_built_in_p (node: expr)
9155 && DECL_IS_UNDECLARED_BUILTIN (expr)
9156 && !c_decl_implicit (expr)
9157 && !DECL_ASSEMBLER_NAME_SET_P (expr))
9158 {
9159 if (loc == UNKNOWN_LOCATION)
9160 loc = EXPR_LOC_OR_LOC (expr, input_location);
9161
9162 /* Reject arguments that are built-in functions with
9163 no library fallback. */
9164 error_at (loc, "built-in function %qE must be directly called", expr);
9165
9166 return true;
9167 }
9168
9169 return false;
9170}
9171
9172/* Issue an ERROR for an invalid SIZE of array NAME which is null
9173 for unnamed arrays. */
9174
9175void
9176invalid_array_size_error (location_t loc, cst_size_error error,
9177 const_tree size, const_tree name)
9178{
9179 tree maxsize = max_object_size ();
9180 switch (error)
9181 {
9182 case cst_size_not_constant:
9183 if (name)
9184 error_at (loc, "size of array %qE is not a constant expression",
9185 name);
9186 else
9187 error_at (loc, "size of array is not a constant expression");
9188 break;
9189 case cst_size_negative:
9190 if (name)
9191 error_at (loc, "size %qE of array %qE is negative",
9192 size, name);
9193 else
9194 error_at (loc, "size %qE of array is negative",
9195 size);
9196 break;
9197 case cst_size_too_big:
9198 if (name)
9199 error_at (loc, "size %qE of array %qE exceeds maximum "
9200 "object size %qE", size, name, maxsize);
9201 else
9202 error_at (loc, "size %qE of array exceeds maximum "
9203 "object size %qE", size, maxsize);
9204 break;
9205 case cst_size_overflow:
9206 if (name)
9207 error_at (loc, "size of array %qE exceeds maximum "
9208 "object size %qE", name, maxsize);
9209 else
9210 error_at (loc, "size of array exceeds maximum "
9211 "object size %qE", maxsize);
9212 break;
9213 default:
9214 gcc_unreachable ();
9215 }
9216}
9217
9218/* Check if array size calculations overflow or if the array covers more
9219 than half of the address space. Return true if the size of the array
9220 is valid, false otherwise. T is either the type of the array or its
9221 size, and NAME is the name of the array, or null for unnamed arrays. */
9222
9223bool
9224valid_array_size_p (location_t loc, const_tree t, tree name, bool complain)
9225{
9226 if (t == error_mark_node)
9227 return true;
9228
9229 const_tree size;
9230 if (TYPE_P (t))
9231 {
9232 if (!COMPLETE_TYPE_P (t))
9233 return true;
9234 size = TYPE_SIZE_UNIT (t);
9235 }
9236 else
9237 size = t;
9238
9239 if (TREE_CODE (size) != INTEGER_CST)
9240 return true;
9241
9242 cst_size_error error;
9243 if (valid_constant_size_p (size, &error))
9244 return true;
9245
9246 if (!complain)
9247 return false;
9248
9249 if (TREE_CODE (TREE_TYPE (size)) == ENUMERAL_TYPE)
9250 /* Show the value of the enumerator rather than its name. */
9251 size = convert (ssizetype, const_cast<tree> (size));
9252
9253 invalid_array_size_error (loc, error, size, name);
9254 return false;
9255}
9256
9257/* Read SOURCE_DATE_EPOCH from environment to have a deterministic
9258 timestamp to replace embedded current dates to get reproducible
9259 results. Returns -1 if SOURCE_DATE_EPOCH is not defined. */
9260
9261time_t
9262cb_get_source_date_epoch (cpp_reader *pfile ATTRIBUTE_UNUSED)
9263{
9264 char *source_date_epoch;
9265 int64_t epoch;
9266 char *endptr;
9267
9268 source_date_epoch = getenv (name: "SOURCE_DATE_EPOCH");
9269 if (!source_date_epoch)
9270 return (time_t) -1;
9271
9272 errno = 0;
9273#if defined(INT64_T_IS_LONG)
9274 epoch = strtol (nptr: source_date_epoch, endptr: &endptr, base: 10);
9275#else
9276 epoch = strtoll (source_date_epoch, &endptr, 10);
9277#endif
9278 if (errno != 0 || endptr == source_date_epoch || *endptr != '\0'
9279 || epoch < 0 || epoch > MAX_SOURCE_DATE_EPOCH)
9280 {
9281 error_at (input_location, "environment variable %qs must "
9282 "expand to a non-negative integer less than or equal to %wd",
9283 "SOURCE_DATE_EPOCH", MAX_SOURCE_DATE_EPOCH);
9284 return (time_t) -1;
9285 }
9286
9287 return (time_t) epoch;
9288}
9289
9290/* Callback for libcpp for offering spelling suggestions for misspelled
9291 directives. GOAL is an unrecognized string; CANDIDATES is a
9292 NULL-terminated array of candidate strings. Return the closest
9293 match to GOAL within CANDIDATES, or NULL if none are good
9294 suggestions. */
9295
9296const char *
9297cb_get_suggestion (cpp_reader *, const char *goal,
9298 const char *const *candidates)
9299{
9300 best_match<const char *, const char *> bm (goal);
9301 while (*candidates)
9302 bm.consider (candidate: *candidates++);
9303 return bm.get_best_meaningful_candidate ();
9304}
9305
9306/* Return the latice point which is the wider of the two FLT_EVAL_METHOD
9307 modes X, Y. This isn't just >, as the FLT_EVAL_METHOD values added
9308 by C TS 18661-3 for interchange types that are computed in their
9309 native precision are larger than the C11 values for evaluating in the
9310 precision of float/double/long double. If either mode is
9311 FLT_EVAL_METHOD_UNPREDICTABLE, return that. */
9312
9313enum flt_eval_method
9314excess_precision_mode_join (enum flt_eval_method x,
9315 enum flt_eval_method y)
9316{
9317 if (x == FLT_EVAL_METHOD_UNPREDICTABLE
9318 || y == FLT_EVAL_METHOD_UNPREDICTABLE)
9319 return FLT_EVAL_METHOD_UNPREDICTABLE;
9320
9321 /* GCC only supports one interchange type right now, _Float16. If
9322 we're evaluating _Float16 in 16-bit precision, then flt_eval_method
9323 will be FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16. */
9324 if (x == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
9325 return y;
9326 if (y == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
9327 return x;
9328
9329 /* Other values for flt_eval_method are directly comparable, and we want
9330 the maximum. */
9331 return MAX (x, y);
9332}
9333
9334/* Return the value that should be set for FLT_EVAL_METHOD in the
9335 context of ISO/IEC TS 18861-3.
9336
9337 This relates to the effective excess precision seen by the user,
9338 which is the join point of the precision the target requests for
9339 -fexcess-precision={standard,fast,16} and the implicit excess precision
9340 the target uses. */
9341
9342static enum flt_eval_method
9343c_ts18661_flt_eval_method (void)
9344{
9345 enum flt_eval_method implicit
9346 = targetm.c.excess_precision (EXCESS_PRECISION_TYPE_IMPLICIT);
9347
9348 enum excess_precision_type flag_type
9349 = (flag_excess_precision == EXCESS_PRECISION_STANDARD
9350 ? EXCESS_PRECISION_TYPE_STANDARD
9351 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
9352 ? EXCESS_PRECISION_TYPE_FLOAT16
9353 : EXCESS_PRECISION_TYPE_FAST));
9354
9355 enum flt_eval_method requested
9356 = targetm.c.excess_precision (flag_type);
9357
9358 return excess_precision_mode_join (x: implicit, y: requested);
9359}
9360
9361/* As c_cpp_ts18661_flt_eval_method, but clamps the expected values to
9362 those that were permitted by C11. That is to say, eliminates
9363 FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16. */
9364
9365static enum flt_eval_method
9366c_c11_flt_eval_method (void)
9367{
9368 return excess_precision_mode_join (x: c_ts18661_flt_eval_method (),
9369 y: FLT_EVAL_METHOD_PROMOTE_TO_FLOAT);
9370}
9371
9372/* Return the value that should be set for FLT_EVAL_METHOD.
9373 MAYBE_C11_ONLY_P is TRUE if we should check
9374 FLAG_PERMITTED_EVAL_METHODS as to whether we should limit the possible
9375 values we can return to those from C99/C11, and FALSE otherwise.
9376 See the comments on c_ts18661_flt_eval_method for what value we choose
9377 to set here. */
9378
9379int
9380c_flt_eval_method (bool maybe_c11_only_p)
9381{
9382 if (maybe_c11_only_p
9383 && flag_permitted_flt_eval_methods
9384 == PERMITTED_FLT_EVAL_METHODS_C11)
9385 return c_c11_flt_eval_method ();
9386 else
9387 return c_ts18661_flt_eval_method ();
9388}
9389
9390/* An enum for get_missing_token_insertion_kind for describing the best
9391 place to insert a missing token, if there is one. */
9392
9393enum missing_token_insertion_kind
9394{
9395 MTIK_IMPOSSIBLE,
9396 MTIK_INSERT_BEFORE_NEXT,
9397 MTIK_INSERT_AFTER_PREV
9398};
9399
9400/* Given a missing token of TYPE, determine if it is reasonable to
9401 emit a fix-it hint suggesting the insertion of the token, and,
9402 if so, where the token should be inserted relative to other tokens.
9403
9404 It only makes sense to do this for values of TYPE that are symbols.
9405
9406 Some symbols should go before the next token, e.g. in:
9407 if flag)
9408 we want to insert the missing '(' immediately before "flag",
9409 giving:
9410 if (flag)
9411 rather than:
9412 if( flag)
9413 These use MTIK_INSERT_BEFORE_NEXT.
9414
9415 Other symbols should go after the previous token, e.g. in:
9416 if (flag
9417 do_something ();
9418 we want to insert the missing ')' immediately after the "flag",
9419 giving:
9420 if (flag)
9421 do_something ();
9422 rather than:
9423 if (flag
9424 )do_something ();
9425 These use MTIK_INSERT_AFTER_PREV. */
9426
9427static enum missing_token_insertion_kind
9428get_missing_token_insertion_kind (enum cpp_ttype type)
9429{
9430 switch (type)
9431 {
9432 /* Insert missing "opening" brackets immediately
9433 before the next token. */
9434 case CPP_OPEN_SQUARE:
9435 case CPP_OPEN_PAREN:
9436 return MTIK_INSERT_BEFORE_NEXT;
9437
9438 /* Insert other missing symbols immediately after
9439 the previous token. */
9440 case CPP_CLOSE_PAREN:
9441 case CPP_CLOSE_SQUARE:
9442 case CPP_SEMICOLON:
9443 case CPP_COMMA:
9444 case CPP_COLON:
9445 return MTIK_INSERT_AFTER_PREV;
9446
9447 /* Other kinds of token don't get fix-it hints. */
9448 default:
9449 return MTIK_IMPOSSIBLE;
9450 }
9451}
9452
9453/* Given RICHLOC, a location for a diagnostic describing a missing token
9454 of kind TOKEN_TYPE, potentially add a fix-it hint suggesting the
9455 insertion of the token.
9456
9457 The location of the attempted fix-it hint depends on TOKEN_TYPE:
9458 it will either be:
9459 (a) immediately after PREV_TOKEN_LOC, or
9460
9461 (b) immediately before the primary location within RICHLOC (taken to
9462 be that of the token following where the token was expected).
9463
9464 If we manage to add a fix-it hint, then the location of the
9465 fix-it hint is likely to be more useful as the primary location
9466 of the diagnostic than that of the following token, so we swap
9467 these locations.
9468
9469 For example, given this bogus code:
9470 123456789012345678901234567890
9471 1 | int missing_semicolon (void)
9472 2 | {
9473 3 | return 42
9474 4 | }
9475
9476 we will emit:
9477
9478 "expected ';' before '}'"
9479
9480 RICHLOC's primary location is at the closing brace, so before "swapping"
9481 we would emit the error at line 4 column 1:
9482
9483 123456789012345678901234567890
9484 3 | return 42 |< fix-it hint emitted for this line
9485 | ; |
9486 4 | } |< "expected ';' before '}'" emitted at this line
9487 | ^ |
9488
9489 It's more useful for the location of the diagnostic to be at the
9490 fix-it hint, so we swap the locations, so the primary location
9491 is at the fix-it hint, with the old primary location inserted
9492 as a secondary location, giving this, with the error at line 3
9493 column 12:
9494
9495 123456789012345678901234567890
9496 3 | return 42 |< "expected ';' before '}'" emitted at this line,
9497 | ^ | with fix-it hint
9498 4 | ; |
9499 | } |< secondary range emitted here
9500 | ~ |. */
9501
9502void
9503maybe_suggest_missing_token_insertion (rich_location *richloc,
9504 enum cpp_ttype token_type,
9505 location_t prev_token_loc)
9506{
9507 gcc_assert (richloc);
9508
9509 enum missing_token_insertion_kind mtik
9510 = get_missing_token_insertion_kind (type: token_type);
9511
9512 switch (mtik)
9513 {
9514 default:
9515 gcc_unreachable ();
9516 break;
9517
9518 case MTIK_IMPOSSIBLE:
9519 return;
9520
9521 case MTIK_INSERT_BEFORE_NEXT:
9522 /* Attempt to add the fix-it hint before the primary location
9523 of RICHLOC. */
9524 richloc->add_fixit_insert_before (new_content: cpp_type2name (token_type, flags: 0));
9525 break;
9526
9527 case MTIK_INSERT_AFTER_PREV:
9528 /* Attempt to add the fix-it hint after PREV_TOKEN_LOC. */
9529 richloc->add_fixit_insert_after (where: prev_token_loc,
9530 new_content: cpp_type2name (token_type, flags: 0));
9531 break;
9532 }
9533
9534 /* If we were successful, use the fix-it hint's location as the
9535 primary location within RICHLOC, adding the old primary location
9536 back as a secondary location. */
9537 if (!richloc->seen_impossible_fixit_p ())
9538 {
9539 fixit_hint *hint = richloc->get_last_fixit_hint ();
9540 location_t hint_loc = hint->get_start_loc ();
9541 location_t old_loc = richloc->get_loc ();
9542
9543 richloc->set_range (idx: 0, loc: hint_loc, range_display_kind: SHOW_RANGE_WITH_CARET);
9544 richloc->add_range (loc: old_loc);
9545 }
9546}
9547
9548#if CHECKING_P
9549
9550namespace selftest {
9551
9552/* Verify that fold_for_warn on error_mark_node is safe. */
9553
9554static void
9555test_fold_for_warn ()
9556{
9557 ASSERT_EQ (error_mark_node, fold_for_warn (error_mark_node));
9558}
9559
9560/* Run all of the selftests within this file. */
9561
9562static void
9563c_common_cc_tests ()
9564{
9565 test_fold_for_warn ();
9566}
9567
9568/* Run all of the tests within c-family. */
9569
9570void
9571c_family_tests (void)
9572{
9573 c_common_cc_tests ();
9574 c_format_cc_tests ();
9575 c_indentation_cc_tests ();
9576 c_pretty_print_cc_tests ();
9577 c_spellcheck_cc_tests ();
9578 c_diagnostic_cc_tests ();
9579 c_opt_problem_cc_tests ();
9580}
9581
9582} // namespace selftest
9583
9584#endif /* #if CHECKING_P */
9585
9586/* Attempt to locate a suitable location within FILE for a
9587 #include directive to be inserted before.
9588 LOC is the location of the relevant diagnostic.
9589
9590 Attempt to return the location within FILE immediately
9591 after the last #include within that file, or the start of
9592 that file if it has no #include directives.
9593
9594 Return UNKNOWN_LOCATION if no suitable location is found,
9595 or if an error occurs. */
9596
9597static location_t
9598try_to_locate_new_include_insertion_point (const char *file, location_t loc)
9599{
9600 /* Locate the last ordinary map within FILE that ended with a #include. */
9601 const line_map_ordinary *last_include_ord_map = NULL;
9602
9603 /* ...and the next ordinary map within FILE after that one. */
9604 const line_map_ordinary *last_ord_map_after_include = NULL;
9605
9606 /* ...and the first ordinary map within FILE. */
9607 const line_map_ordinary *first_ord_map_in_file = NULL;
9608
9609 /* Get ordinary map containing LOC (or its expansion). */
9610 const line_map_ordinary *ord_map_for_loc = NULL;
9611 linemap_resolve_location (line_table, loc, lrk: LRK_MACRO_EXPANSION_POINT,
9612 loc_map: &ord_map_for_loc);
9613 gcc_assert (ord_map_for_loc);
9614
9615 for (unsigned int i = 0; i < LINEMAPS_ORDINARY_USED (set: line_table); i++)
9616 {
9617 const line_map_ordinary *ord_map
9618 = LINEMAPS_ORDINARY_MAP_AT (set: line_table, index: i);
9619
9620 if (const line_map_ordinary *from
9621 = linemap_included_from_linemap (set: line_table, map: ord_map))
9622 /* We cannot use pointer equality, because with preprocessed
9623 input all filename strings are unique. */
9624 if (0 == strcmp (s1: from->to_file, s2: file))
9625 {
9626 last_include_ord_map = from;
9627 last_ord_map_after_include = NULL;
9628 }
9629
9630 /* Likewise, use strcmp, and reject any line-zero introductory
9631 map. */
9632 if (ord_map->to_line && 0 == strcmp (s1: ord_map->to_file, s2: file))
9633 {
9634 if (!first_ord_map_in_file)
9635 first_ord_map_in_file = ord_map;
9636 if (last_include_ord_map && !last_ord_map_after_include)
9637 last_ord_map_after_include = ord_map;
9638 }
9639
9640 /* Stop searching when reaching the ord_map containing LOC,
9641 as it makes no sense to provide fix-it hints that appear
9642 after the diagnostic in question. */
9643 if (ord_map == ord_map_for_loc)
9644 break;
9645 }
9646
9647 /* Determine where to insert the #include. */
9648 const line_map_ordinary *ord_map_for_insertion;
9649
9650 /* We want the next ordmap in the file after the last one that's a
9651 #include, but failing that, the start of the file. */
9652 if (last_ord_map_after_include)
9653 ord_map_for_insertion = last_ord_map_after_include;
9654 else
9655 ord_map_for_insertion = first_ord_map_in_file;
9656
9657 if (!ord_map_for_insertion)
9658 return UNKNOWN_LOCATION;
9659
9660 /* The "start_location" is column 0, meaning "the whole line".
9661 rich_location and edit_context can't cope with this, so use
9662 column 1 instead. */
9663 location_t col_0 = ord_map_for_insertion->start_location;
9664 return linemap_position_for_loc_and_offset (set: line_table, loc: col_0, offset: 1);
9665}
9666
9667/* A map from filenames to sets of headers added to them, for
9668 ensuring idempotency within maybe_add_include_fixit. */
9669
9670/* The values within the map. We need string comparison as there's
9671 no guarantee that two different diagnostics that are recommending
9672 adding e.g. "<stdio.h>" are using the same buffer. */
9673
9674typedef hash_set <const char *, false, nofree_string_hash> per_file_includes_t;
9675
9676/* The map itself. We don't need string comparison for the filename keys,
9677 as they come from libcpp. */
9678
9679typedef hash_map <const char *, per_file_includes_t *> added_includes_t;
9680static added_includes_t *added_includes;
9681
9682/* Attempt to add a fix-it hint to RICHLOC, adding "#include HEADER\n"
9683 in a suitable location within the file of RICHLOC's primary
9684 location.
9685
9686 This function is idempotent: a header will be added at most once to
9687 any given file.
9688
9689 If OVERRIDE_LOCATION is true, then if a fix-it is added and will be
9690 printed, then RICHLOC's primary location will be replaced by that of
9691 the fix-it hint (for use by "inform" notes where the location of the
9692 issue has already been reported). */
9693
9694void
9695maybe_add_include_fixit (rich_location *richloc, const char *header,
9696 bool override_location)
9697{
9698 location_t loc = richloc->get_loc ();
9699 const char *file = LOCATION_FILE (loc);
9700 if (!file)
9701 return;
9702
9703 /* Idempotency: don't add the same header more than once to a given file. */
9704 if (!added_includes)
9705 added_includes = new added_includes_t ();
9706 per_file_includes_t *&set = added_includes->get_or_insert (k: file);
9707 if (set)
9708 if (set->contains (k: header))
9709 /* ...then we've already added HEADER to that file. */
9710 return;
9711 if (!set)
9712 set = new per_file_includes_t ();
9713 set->add (k: header);
9714
9715 /* Attempt to locate a suitable place for the new directive. */
9716 location_t include_insert_loc
9717 = try_to_locate_new_include_insertion_point (file, loc);
9718 if (include_insert_loc == UNKNOWN_LOCATION)
9719 return;
9720
9721 char *text = xasprintf ("#include %s\n", header);
9722 richloc->add_fixit_insert_before (where: include_insert_loc, new_content: text);
9723 free (ptr: text);
9724
9725 if (override_location && global_dc->m_source_printing.enabled)
9726 {
9727 /* Replace the primary location with that of the insertion point for the
9728 fix-it hint.
9729
9730 We use SHOW_LINES_WITHOUT_RANGE so that we don't meaningless print a
9731 caret for the insertion point (or colorize it).
9732
9733 Hence we print e.g.:
9734
9735 ../x86_64-pc-linux-gnu/libstdc++-v3/include/vector:74:1: note: msg 2
9736 73 | # include <debug/vector>
9737 +++ |+#include <vector>
9738 74 | #endif
9739
9740 rather than:
9741
9742 ../x86_64-pc-linux-gnu/libstdc++-v3/include/vector:74:1: note: msg 2
9743 73 | # include <debug/vector>
9744 +++ |+#include <vector>
9745 74 | #endif
9746 | ^
9747
9748 avoiding the caret on the first column of line 74. */
9749 richloc->set_range (idx: 0, loc: include_insert_loc, range_display_kind: SHOW_LINES_WITHOUT_RANGE);
9750 }
9751}
9752
9753/* Attempt to convert a braced array initializer list CTOR for array
9754 TYPE into a STRING_CST for convenience and efficiency. Return
9755 the converted string on success or the original ctor on failure. */
9756
9757static tree
9758braced_list_to_string (tree type, tree ctor, bool member)
9759{
9760 /* Ignore non-members with unknown size like arrays with unspecified
9761 bound. */
9762 tree typesize = TYPE_SIZE_UNIT (type);
9763 if (!member && !tree_fits_uhwi_p (typesize))
9764 return ctor;
9765
9766 /* If the target char size differs from the host char size, we'd risk
9767 loosing data and getting object sizes wrong by converting to
9768 host chars. */
9769 if (TYPE_PRECISION (char_type_node) != CHAR_BIT)
9770 return ctor;
9771
9772 /* STRING_CST doesn't support wide characters. */
9773 gcc_checking_assert (TYPE_PRECISION (TREE_TYPE (type)) == CHAR_BIT);
9774
9775 /* If the array has an explicit bound, use it to constrain the size
9776 of the string. If it doesn't, be sure to create a string that's
9777 as long as implied by the index of the last zero specified via
9778 a designator, as in:
9779 const char a[] = { [7] = 0 }; */
9780 unsigned HOST_WIDE_INT maxelts;
9781 if (typesize)
9782 {
9783 maxelts = tree_to_uhwi (typesize);
9784 maxelts /= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9785 }
9786 else
9787 maxelts = HOST_WIDE_INT_M1U;
9788
9789 /* Avoid converting initializers for zero-length arrays (but do
9790 create them for flexible array members). */
9791 if (!maxelts)
9792 return ctor;
9793
9794 unsigned HOST_WIDE_INT nelts = CONSTRUCTOR_NELTS (ctor);
9795
9796 auto_vec<char> str;
9797 str.reserve (nelems: nelts + 1);
9798
9799 unsigned HOST_WIDE_INT i;
9800 tree index, value;
9801
9802 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), i, index, value)
9803 {
9804 unsigned HOST_WIDE_INT idx = i;
9805 if (index)
9806 {
9807 if (!tree_fits_uhwi_p (index))
9808 return ctor;
9809 idx = tree_to_uhwi (index);
9810 }
9811
9812 /* auto_vec is limited to UINT_MAX elements. */
9813 if (idx > UINT_MAX)
9814 return ctor;
9815
9816 /* Avoid non-constant initializers. */
9817 if (!tree_fits_shwi_p (value))
9818 return ctor;
9819
9820 /* Skip over embedded nuls except the last one (initializer
9821 elements are in ascending order of indices). */
9822 HOST_WIDE_INT val = tree_to_shwi (value);
9823 if (!val && i + 1 < nelts)
9824 continue;
9825
9826 if (idx < str.length())
9827 return ctor;
9828
9829 /* Bail if the CTOR has a block of more than 256 embedded nuls
9830 due to implicitly initialized elements. */
9831 unsigned nchars = (idx - str.length ()) + 1;
9832 if (nchars > 256)
9833 return ctor;
9834
9835 if (nchars > 1)
9836 {
9837 str.reserve (nelems: idx);
9838 str.quick_grow_cleared (len: idx);
9839 }
9840
9841 if (idx >= maxelts)
9842 return ctor;
9843
9844 str.safe_insert (ix: idx, obj: val);
9845 }
9846
9847 /* Append a nul string termination. */
9848 if (maxelts != HOST_WIDE_INT_M1U && str.length () < maxelts)
9849 str.safe_push (obj: 0);
9850
9851 /* Build a STRING_CST with the same type as the array. */
9852 tree res = build_string (str.length (), str.begin ());
9853 TREE_TYPE (res) = type;
9854 return res;
9855}
9856
9857/* Implementation of the two-argument braced_lists_to_string withe
9858 the same arguments plus MEMBER which is set for struct members
9859 to allow initializers for flexible member arrays. */
9860
9861static tree
9862braced_lists_to_strings (tree type, tree ctor, bool member)
9863{
9864 if (TREE_CODE (ctor) != CONSTRUCTOR)
9865 return ctor;
9866
9867 tree_code code = TREE_CODE (type);
9868
9869 tree ttp;
9870 if (code == ARRAY_TYPE)
9871 ttp = TREE_TYPE (type);
9872 else if (code == RECORD_TYPE)
9873 {
9874 ttp = TREE_TYPE (ctor);
9875 if (TREE_CODE (ttp) == ARRAY_TYPE)
9876 {
9877 type = ttp;
9878 ttp = TREE_TYPE (ttp);
9879 }
9880 }
9881 else
9882 return ctor;
9883
9884 if ((TREE_CODE (ttp) == ARRAY_TYPE || TREE_CODE (ttp) == INTEGER_TYPE)
9885 && TYPE_STRING_FLAG (ttp))
9886 return braced_list_to_string (type, ctor, member);
9887
9888 code = TREE_CODE (ttp);
9889 if (code == ARRAY_TYPE || RECORD_OR_UNION_TYPE_P (ttp))
9890 {
9891 bool rec = RECORD_OR_UNION_TYPE_P (ttp);
9892
9893 /* Handle array of arrays or struct member initializers. */
9894 tree val;
9895 unsigned HOST_WIDE_INT idx;
9896 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), idx, val)
9897 {
9898 val = braced_lists_to_strings (type: ttp, ctor: val, member: rec);
9899 CONSTRUCTOR_ELT (ctor, idx)->value = val;
9900 }
9901 }
9902
9903 return ctor;
9904}
9905
9906/* Attempt to convert a CTOR containing braced array initializer lists
9907 for array TYPE into one containing STRING_CSTs, for convenience and
9908 efficiency. Recurse for arrays of arrays and member initializers.
9909 Return the converted CTOR or STRING_CST on success or the original
9910 CTOR otherwise. */
9911
9912tree
9913braced_lists_to_strings (tree type, tree ctor)
9914{
9915 return braced_lists_to_strings (type, ctor, member: false);
9916}
9917
9918
9919/* Emit debug for functions before finalizing early debug. */
9920
9921void
9922c_common_finalize_early_debug (void)
9923{
9924 /* Emit early debug for reachable functions, and by consequence,
9925 locally scoped symbols. Also emit debug for extern declared
9926 functions that are still reachable at this point. */
9927 struct cgraph_node *cnode;
9928 FOR_EACH_FUNCTION (cnode)
9929 if (!cnode->alias && !cnode->thunk
9930 && (cnode->has_gimple_body_p ()
9931 || !DECL_IS_UNDECLARED_BUILTIN (cnode->decl)))
9932 (*debug_hooks->early_global_decl) (cnode->decl);
9933}
9934
9935/* Get the LEVEL of the strict_flex_array for the ARRAY_FIELD based on the
9936 values of attribute strict_flex_array and the flag_strict_flex_arrays. */
9937unsigned int
9938c_strict_flex_array_level_of (tree array_field)
9939{
9940 gcc_assert (TREE_CODE (array_field) == FIELD_DECL);
9941 unsigned int strict_flex_array_level = flag_strict_flex_arrays;
9942
9943 tree attr_strict_flex_array
9944 = lookup_attribute (attr_name: "strict_flex_array", DECL_ATTRIBUTES (array_field));
9945 /* If there is a strict_flex_array attribute attached to the field,
9946 override the flag_strict_flex_arrays. */
9947 if (attr_strict_flex_array)
9948 {
9949 /* Get the value of the level first from the attribute. */
9950 unsigned HOST_WIDE_INT attr_strict_flex_array_level = 0;
9951 gcc_assert (TREE_VALUE (attr_strict_flex_array) != NULL_TREE);
9952 attr_strict_flex_array = TREE_VALUE (attr_strict_flex_array);
9953 gcc_assert (TREE_VALUE (attr_strict_flex_array) != NULL_TREE);
9954 attr_strict_flex_array = TREE_VALUE (attr_strict_flex_array);
9955 gcc_assert (tree_fits_uhwi_p (attr_strict_flex_array));
9956 attr_strict_flex_array_level = tree_to_uhwi (attr_strict_flex_array);
9957
9958 /* The attribute has higher priority than flag_struct_flex_array. */
9959 strict_flex_array_level = attr_strict_flex_array_level;
9960 }
9961 return strict_flex_array_level;
9962}
9963
9964/* Map from identifiers to booleans. Value is true for features, and
9965 false for extensions. Used to implement __has_{feature,extension}. */
9966
9967using feature_map_t = hash_map <tree, bool>;
9968static feature_map_t *feature_map;
9969
9970/* Register a feature for __has_{feature,extension}. FEATURE_P is true
9971 if the feature identified by NAME is a feature (as opposed to an
9972 extension). */
9973
9974void
9975c_common_register_feature (const char *name, bool feature_p)
9976{
9977 bool dup = feature_map->put (get_identifier (name), v: feature_p);
9978 gcc_checking_assert (!dup);
9979}
9980
9981/* Lazily initialize hash table for __has_{feature,extension},
9982 dispatching to the appropriate front end to register language-specific
9983 features. */
9984
9985static void
9986init_has_feature ()
9987{
9988 gcc_checking_assert (!feature_map);
9989 feature_map = new feature_map_t;
9990
9991 for (unsigned i = 0; i < ARRAY_SIZE (has_feature_table); i++)
9992 {
9993 const hf_feature_info *info = has_feature_table + i;
9994
9995 if ((info->flags & HF_FLAG_SANITIZE) && !(flag_sanitize & info->mask))
9996 continue;
9997
9998 const bool feature_p = !(info->flags & HF_FLAG_EXT);
9999 c_common_register_feature (name: info->ident, feature_p);
10000 }
10001
10002 /* Register language-specific features. */
10003 c_family_register_lang_features ();
10004}
10005
10006/* If STRICT_P is true, evaluate __has_feature (IDENT).
10007 Otherwise, evaluate __has_extension (IDENT). */
10008
10009bool
10010has_feature_p (const char *ident, bool strict_p)
10011{
10012 if (!feature_map)
10013 init_has_feature ();
10014
10015 tree name = canonicalize_attr_name (get_identifier (ident));
10016 bool *feat_p = feature_map->get (k: name);
10017 if (!feat_p)
10018 return false;
10019
10020 return !strict_p || *feat_p;
10021}
10022
10023/* This is the slow path of c-common.h's c_hardbool_type_attr. */
10024
10025tree
10026c_hardbool_type_attr_1 (tree type, tree *false_value, tree *true_value)
10027{
10028 tree attr = lookup_attribute (attr_name: "hardbool", TYPE_ATTRIBUTES (type));
10029 if (!attr)
10030 return attr;
10031
10032 if (false_value)
10033 *false_value = TREE_VALUE (TYPE_VALUES (type));
10034
10035 if (true_value)
10036 *true_value = TREE_VALUE (TREE_CHAIN (TYPE_VALUES (type)));
10037
10038 return attr;
10039}
10040
10041#include "gt-c-family-c-common.h"
10042

Provided by KDAB

Privacy Policy
Improve your Profiling and Debugging skills
Find out more

source code of gcc/c-family/c-common.cc