1 | /* Subroutines shared by all languages that are variants of C. |
---|---|
2 | Copyright (C) 1992-2025 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #define GCC_C_COMMON_C |
21 | |
22 | #include "config.h" |
23 | #include "system.h" |
24 | #include "coretypes.h" |
25 | #include "target.h" |
26 | #include "function.h" |
27 | #include "tree.h" |
28 | #include "memmodel.h" |
29 | #include "c-common.h" |
30 | #include "gimple-expr.h" |
31 | #include "tm_p.h" |
32 | #include "stringpool.h" |
33 | #include "cgraph.h" |
34 | #include "diagnostic.h" |
35 | #include "intl.h" |
36 | #include "stor-layout.h" |
37 | #include "calls.h" |
38 | #include "attribs.h" |
39 | #include "varasm.h" |
40 | #include "trans-mem.h" |
41 | #include "c-objc.h" |
42 | #include "common/common-target.h" |
43 | #include "langhooks.h" |
44 | #include "tree-inline.h" |
45 | #include "toplev.h" |
46 | #include "tree-iterator.h" |
47 | #include "opts.h" |
48 | #include "gimplify.h" |
49 | #include "substring-locations.h" |
50 | #include "spellcheck.h" |
51 | #include "c-spellcheck.h" |
52 | #include "selftest.h" |
53 | #include "debug.h" |
54 | #include "tree-vector-builder.h" |
55 | #include "vec-perm-indices.h" |
56 | #include "tree-pretty-print-markup.h" |
57 | #include "gcc-rich-location.h" |
58 | #include "gcc-urlifier.h" |
59 | |
60 | cpp_reader *parse_in; /* Declared in c-pragma.h. */ |
61 | |
62 | /* Mode used to build pointers (VOIDmode means ptr_mode). */ |
63 | |
64 | machine_mode c_default_pointer_mode = VOIDmode; |
65 | |
66 | /* The following symbols are subsumed in the c_global_trees array, and |
67 | listed here individually for documentation purposes. |
68 | |
69 | INTEGER_TYPE and REAL_TYPE nodes for the standard data types. |
70 | |
71 | tree short_integer_type_node; |
72 | tree long_integer_type_node; |
73 | tree long_long_integer_type_node; |
74 | |
75 | tree short_unsigned_type_node; |
76 | tree long_unsigned_type_node; |
77 | tree long_long_unsigned_type_node; |
78 | |
79 | tree truthvalue_type_node; |
80 | tree truthvalue_false_node; |
81 | tree truthvalue_true_node; |
82 | |
83 | tree ptrdiff_type_node; |
84 | |
85 | tree unsigned_char_type_node; |
86 | tree signed_char_type_node; |
87 | tree wchar_type_node; |
88 | |
89 | tree char8_type_node; |
90 | tree char16_type_node; |
91 | tree char32_type_node; |
92 | |
93 | tree float_type_node; |
94 | tree double_type_node; |
95 | tree long_double_type_node; |
96 | |
97 | tree complex_integer_type_node; |
98 | tree complex_float_type_node; |
99 | tree complex_double_type_node; |
100 | tree complex_long_double_type_node; |
101 | |
102 | tree dfloat32_type_node; |
103 | tree dfloat64_type_node; |
104 | tree dfloat128_type_node; |
105 | tree dfloat64x_type_node; |
106 | |
107 | tree intQI_type_node; |
108 | tree intHI_type_node; |
109 | tree intSI_type_node; |
110 | tree intDI_type_node; |
111 | tree intTI_type_node; |
112 | |
113 | tree unsigned_intQI_type_node; |
114 | tree unsigned_intHI_type_node; |
115 | tree unsigned_intSI_type_node; |
116 | tree unsigned_intDI_type_node; |
117 | tree unsigned_intTI_type_node; |
118 | |
119 | tree widest_integer_literal_type_node; |
120 | tree widest_unsigned_literal_type_node; |
121 | |
122 | Nodes for types `void *' and `const void *'. |
123 | |
124 | tree ptr_type_node, const_ptr_type_node; |
125 | |
126 | Nodes for types `char *' and `const char *'. |
127 | |
128 | tree string_type_node, const_string_type_node; |
129 | |
130 | Type `char[SOMENUMBER]'. |
131 | Used when an array of char is needed and the size is irrelevant. |
132 | |
133 | tree char_array_type_node; |
134 | |
135 | Type `wchar_t[SOMENUMBER]' or something like it. |
136 | Used when a wide string literal is created. |
137 | |
138 | tree wchar_array_type_node; |
139 | |
140 | Type `char8_t[SOMENUMBER]' or something like it. |
141 | Used when a UTF-8 string literal is created. |
142 | |
143 | tree char8_array_type_node; |
144 | |
145 | Type `char16_t[SOMENUMBER]' or something like it. |
146 | Used when a UTF-16 string literal is created. |
147 | |
148 | tree char16_array_type_node; |
149 | |
150 | Type `char32_t[SOMENUMBER]' or something like it. |
151 | Used when a UTF-32 string literal is created. |
152 | |
153 | tree char32_array_type_node; |
154 | |
155 | Type `int ()' -- used for implicit declaration of functions. |
156 | |
157 | tree default_function_type; |
158 | |
159 | A VOID_TYPE node, packaged in a TREE_LIST. |
160 | |
161 | tree void_list_node; |
162 | |
163 | The lazily created VAR_DECLs for __FUNCTION__, __PRETTY_FUNCTION__, |
164 | and __func__. (C doesn't generate __FUNCTION__ and__PRETTY_FUNCTION__ |
165 | VAR_DECLS, but C++ does.) |
166 | |
167 | tree function_name_decl_node; |
168 | tree pretty_function_name_decl_node; |
169 | tree c99_function_name_decl_node; |
170 | |
171 | Stack of nested function name VAR_DECLs. |
172 | |
173 | tree saved_function_name_decls; |
174 | |
175 | */ |
176 | |
177 | tree c_global_trees[CTI_MAX]; |
178 | |
179 | /* Switches common to the C front ends. */ |
180 | |
181 | /* Nonzero means don't output line number information. */ |
182 | |
183 | char flag_no_line_commands; |
184 | |
185 | /* Nonzero causes -E output not to be done, but directives such as |
186 | #define that have side effects are still obeyed. */ |
187 | |
188 | char flag_no_output; |
189 | |
190 | /* Nonzero means dump macros in some fashion. */ |
191 | |
192 | char flag_dump_macros; |
193 | |
194 | /* Nonzero means pass #include lines through to the output. */ |
195 | |
196 | char flag_dump_includes; |
197 | |
198 | /* Nonzero means process PCH files while preprocessing. */ |
199 | |
200 | bool flag_pch_preprocess; |
201 | |
202 | /* The file name to which we should write a precompiled header, or |
203 | NULL if no header will be written in this compile. */ |
204 | |
205 | const char *pch_file; |
206 | |
207 | /* Nonzero if an ISO standard was selected. It rejects macros in the |
208 | user's namespace. */ |
209 | int flag_iso; |
210 | |
211 | /* C/ObjC language option variables. */ |
212 | |
213 | |
214 | /* Nonzero means allow type mismatches in conditional expressions; |
215 | just make their values `void'. */ |
216 | |
217 | int flag_cond_mismatch; |
218 | |
219 | /* Nonzero means enable C89 Amendment 1 features. */ |
220 | |
221 | int flag_isoc94; |
222 | |
223 | /* Nonzero means use the ISO C99 (or later) dialect of C. */ |
224 | |
225 | int flag_isoc99; |
226 | |
227 | /* Nonzero means use the ISO C11 (or later) dialect of C. */ |
228 | |
229 | int flag_isoc11; |
230 | |
231 | /* Nonzero means use the ISO C23 (or later) dialect of C. */ |
232 | |
233 | int flag_isoc23; |
234 | |
235 | /* Nonzero means use the ISO C2Y (or later) dialect of C. */ |
236 | |
237 | int flag_isoc2y; |
238 | |
239 | /* Nonzero means that we have builtin functions, and main is an int. */ |
240 | |
241 | int flag_hosted = 1; |
242 | |
243 | |
244 | /* ObjC language option variables. */ |
245 | |
246 | |
247 | /* Tells the compiler that this is a special run. Do not perform any |
248 | compiling, instead we are to test some platform dependent features |
249 | and output a C header file with appropriate definitions. */ |
250 | |
251 | int print_struct_values; |
252 | |
253 | /* Tells the compiler what is the constant string class for ObjC. */ |
254 | |
255 | const char *constant_string_class_name; |
256 | |
257 | |
258 | /* C++ language option variables. */ |
259 | |
260 | /* The reference version of the ABI for -Wabi. */ |
261 | |
262 | int warn_abi_version = -1; |
263 | |
264 | /* The C++ dialect being used. Default set in c_common_post_options. */ |
265 | |
266 | enum cxx_dialect cxx_dialect = cxx_unset; |
267 | |
268 | /* Maximum template instantiation depth. This limit exists to limit the |
269 | time it takes to notice excessively recursive template instantiations. |
270 | |
271 | The default is lower than the 1024 recommended by the C++0x standard |
272 | because G++ runs out of stack before 1024 with highly recursive template |
273 | argument deduction substitution (g++.dg/cpp0x/enum11.C). */ |
274 | |
275 | int max_tinst_depth = 900; |
276 | |
277 | /* The elements of `ridpointers' are identifier nodes for the reserved |
278 | type names and storage classes. It is indexed by a RID_... value. */ |
279 | tree *ridpointers; |
280 | |
281 | tree (*make_fname_decl) (location_t, tree, int); |
282 | |
283 | /* Nonzero means don't warn about problems that occur when the code is |
284 | executed. */ |
285 | int c_inhibit_evaluation_warnings; |
286 | |
287 | /* Whether we are building a boolean conversion inside |
288 | convert_for_assignment, or some other late binary operation. If |
289 | build_binary_op is called for C (from code shared by C and C++) in |
290 | this case, then the operands have already been folded and the |
291 | result will not be folded again, so C_MAYBE_CONST_EXPR should not |
292 | be generated. */ |
293 | bool in_late_binary_op; |
294 | |
295 | /* Depending on which phase of processing we are in, we may need |
296 | to prefer input_location to libcpp's locations. (Specifically, |
297 | after the C++ lexer is done lexing tokens, but prior to calling |
298 | cpp_finish (), we need to do so. */ |
299 | bool override_libcpp_locations; |
300 | |
301 | /* Information about how a function name is generated. */ |
302 | struct fname_var_t |
303 | { |
304 | tree *const decl; /* pointer to the VAR_DECL. */ |
305 | const unsigned rid; /* RID number for the identifier. */ |
306 | const int pretty; /* How pretty is it? */ |
307 | }; |
308 | |
309 | /* The three ways of getting then name of the current function. */ |
310 | |
311 | const struct fname_var_t fname_vars[] = |
312 | { |
313 | /* C99 compliant __func__, must be first. */ |
314 | {.decl: &c99_function_name_decl_node, .rid: RID_C99_FUNCTION_NAME, .pretty: 0}, |
315 | /* GCC __FUNCTION__ compliant. */ |
316 | {.decl: &function_name_decl_node, .rid: RID_FUNCTION_NAME, .pretty: 0}, |
317 | /* GCC __PRETTY_FUNCTION__ compliant. */ |
318 | {.decl: &pretty_function_name_decl_node, .rid: RID_PRETTY_FUNCTION_NAME, .pretty: 1}, |
319 | {NULL, .rid: 0, .pretty: 0}, |
320 | }; |
321 | |
322 | /* Flags to restrict availability of generic features that |
323 | are known to __has_{feature,extension}. */ |
324 | |
325 | enum |
326 | { |
327 | HF_FLAG_NONE = 0, |
328 | HF_FLAG_EXT = 1, /* Available only as an extension. */ |
329 | HF_FLAG_SANITIZE = 2, /* Availability depends on sanitizer flags. */ |
330 | }; |
331 | |
332 | /* Info for generic features which can be queried through |
333 | __has_{feature,extension}. */ |
334 | |
335 | struct hf_feature_info |
336 | { |
337 | const char *ident; |
338 | unsigned flags; |
339 | unsigned mask; |
340 | }; |
341 | |
342 | /* Table of generic features which can be queried through |
343 | __has_{feature,extension}. */ |
344 | |
345 | static constexpr hf_feature_info has_feature_table[] = |
346 | { |
347 | { .ident: "address_sanitizer", .flags: HF_FLAG_SANITIZE, .mask: SANITIZE_ADDRESS }, |
348 | { .ident: "thread_sanitizer", .flags: HF_FLAG_SANITIZE, .mask: SANITIZE_THREAD }, |
349 | { .ident: "leak_sanitizer", .flags: HF_FLAG_SANITIZE, .mask: SANITIZE_LEAK }, |
350 | { .ident: "hwaddress_sanitizer", .flags: HF_FLAG_SANITIZE, .mask: SANITIZE_HWADDRESS }, |
351 | { .ident: "undefined_behavior_sanitizer", .flags: HF_FLAG_SANITIZE, .mask: SANITIZE_UNDEFINED }, |
352 | { .ident: "attribute_deprecated_with_message", .flags: HF_FLAG_NONE, .mask: 0 }, |
353 | { .ident: "attribute_unavailable_with_message", .flags: HF_FLAG_NONE, .mask: 0 }, |
354 | { .ident: "enumerator_attributes", .flags: HF_FLAG_NONE, .mask: 0 }, |
355 | { .ident: "tls", .flags: HF_FLAG_NONE, .mask: 0 }, |
356 | { .ident: "gnu_asm_goto_with_outputs", .flags: HF_FLAG_EXT, .mask: 0 }, |
357 | { .ident: "gnu_asm_goto_with_outputs_full", .flags: HF_FLAG_EXT, .mask: 0 } |
358 | }; |
359 | |
360 | /* Global visibility options. */ |
361 | struct visibility_flags visibility_options; |
362 | |
363 | static tree check_case_value (location_t, tree); |
364 | |
365 | |
366 | static void check_nonnull_arg (void *, tree, unsigned HOST_WIDE_INT); |
367 | static bool nonnull_check_p (tree, unsigned HOST_WIDE_INT); |
368 | |
369 | /* Reserved words. The third field is a mask: keywords are disabled |
370 | if they match the mask. |
371 | |
372 | Masks for languages: |
373 | C --std=c89: D_C99 | D_C23 | D_CXXONLY | D_OBJC | D_CXX_OBJC |
374 | C --std=c99: D_C23 | D_CXXONLY | D_OBJC |
375 | C --std=c17: D_C23 | D_CXXONLY | D_OBJC |
376 | C --std=c23: D_CXXONLY | D_OBJC |
377 | ObjC is like C except that D_OBJC and D_CXX_OBJC are not set |
378 | C++ --std=c++98: D_CONLY | D_CXX11 | D_CXX20 | D_OBJC |
379 | C++ --std=c++11: D_CONLY | D_CXX20 | D_OBJC |
380 | C++ --std=c++20: D_CONLY | D_OBJC |
381 | ObjC++ is like C++ except that D_OBJC is not set |
382 | |
383 | If -fno-asm is used, D_ASM is added to the mask. If |
384 | -fno-gnu-keywords is used, D_EXT is added. If -fno-asm and C in |
385 | C89 mode, D_EXT89 is added for both -fno-asm and -fno-gnu-keywords. |
386 | In C with -Wc++-compat, we warn if D_CXXWARN is set. |
387 | |
388 | Note the complication of the D_CXX_OBJC keywords. These are |
389 | reserved words such as 'class'. In C++, 'class' is a reserved |
390 | word. In Objective-C++ it is too. In Objective-C, it is a |
391 | reserved word too, but only if it follows an '@' sign. |
392 | */ |
393 | const struct c_common_resword c_common_reswords[] = |
394 | { |
395 | { .word: "_Alignas", .rid: RID_ALIGNAS, D_CONLY }, |
396 | { .word: "_Alignof", .rid: RID_ALIGNOF, D_CONLY }, |
397 | { .word: "_Countof", .rid: RID_COUNTOF, D_CONLY }, |
398 | { .word: "_Atomic", .rid: RID_ATOMIC, D_CONLY }, |
399 | { .word: "_BitInt", .rid: RID_BITINT, D_CONLY }, |
400 | { .word: "_Bool", .rid: RID_BOOL, D_CONLY }, |
401 | { .word: "_Complex", .rid: RID_COMPLEX, .disable: 0 }, |
402 | { .word: "_Imaginary", .rid: RID_IMAGINARY, D_CONLY }, |
403 | { .word: "_Float16", .rid: RID_FLOAT16, .disable: 0 }, |
404 | { .word: "_Float32", .rid: RID_FLOAT32, .disable: 0 }, |
405 | { .word: "_Float64", .rid: RID_FLOAT64, .disable: 0 }, |
406 | { .word: "_Float128", .rid: RID_FLOAT128, .disable: 0 }, |
407 | { .word: "_Float32x", .rid: RID_FLOAT32X, .disable: 0 }, |
408 | { .word: "_Float64x", .rid: RID_FLOAT64X, .disable: 0 }, |
409 | { .word: "_Float128x", .rid: RID_FLOAT128X, .disable: 0 }, |
410 | { .word: "_Decimal32", .rid: RID_DFLOAT32, D_CONLY }, |
411 | { .word: "_Decimal64", .rid: RID_DFLOAT64, D_CONLY }, |
412 | { .word: "_Decimal128", .rid: RID_DFLOAT128, D_CONLY }, |
413 | { .word: "_Decimal64x", .rid: RID_DFLOAT64X, D_CONLY }, |
414 | { .word: "_Fract", .rid: RID_FRACT, D_CONLY | D_EXT }, |
415 | { .word: "_Accum", .rid: RID_ACCUM, D_CONLY | D_EXT }, |
416 | { .word: "_Sat", .rid: RID_SAT, D_CONLY | D_EXT }, |
417 | { .word: "_Static_assert", .rid: RID_STATIC_ASSERT, D_CONLY }, |
418 | { .word: "_Noreturn", .rid: RID_NORETURN, D_CONLY }, |
419 | { .word: "_Generic", .rid: RID_GENERIC, D_CONLY }, |
420 | { .word: "_Thread_local", .rid: RID_THREAD, D_CONLY }, |
421 | { .word: "__FUNCTION__", .rid: RID_FUNCTION_NAME, .disable: 0 }, |
422 | { .word: "__PRETTY_FUNCTION__", .rid: RID_PRETTY_FUNCTION_NAME, .disable: 0 }, |
423 | { .word: "__alignof", .rid: RID_ALIGNOF, .disable: 0 }, |
424 | { .word: "__alignof__", .rid: RID_ALIGNOF, .disable: 0 }, |
425 | { .word: "__asm", .rid: RID_ASM, .disable: 0 }, |
426 | { .word: "__asm__", .rid: RID_ASM, .disable: 0 }, |
427 | { .word: "__attribute", .rid: RID_ATTRIBUTE, .disable: 0 }, |
428 | { .word: "__attribute__", .rid: RID_ATTRIBUTE, .disable: 0 }, |
429 | { .word: "__auto_type", .rid: RID_AUTO_TYPE, D_CONLY }, |
430 | { .word: "__builtin_addressof", .rid: RID_ADDRESSOF, D_CXXONLY }, |
431 | { .word: "__builtin_assoc_barrier", .rid: RID_BUILTIN_ASSOC_BARRIER, .disable: 0 }, |
432 | { .word: "__builtin_bit_cast", .rid: RID_BUILTIN_BIT_CAST, D_CXXONLY }, |
433 | { .word: "__builtin_call_with_static_chain", |
434 | .rid: RID_BUILTIN_CALL_WITH_STATIC_CHAIN, D_CONLY }, |
435 | { .word: "__builtin_choose_expr", .rid: RID_CHOOSE_EXPR, D_CONLY }, |
436 | { .word: "__builtin_complex", .rid: RID_BUILTIN_COMPLEX, D_CONLY }, |
437 | { .word: "__builtin_convertvector", .rid: RID_BUILTIN_CONVERTVECTOR, .disable: 0 }, |
438 | { .word: "__builtin_counted_by_ref", .rid: RID_BUILTIN_COUNTED_BY_REF, D_CONLY }, |
439 | { .word: "__builtin_has_attribute", .rid: RID_BUILTIN_HAS_ATTRIBUTE, .disable: 0 }, |
440 | { .word: "__builtin_launder", .rid: RID_BUILTIN_LAUNDER, D_CXXONLY }, |
441 | { .word: "__builtin_operator_new", .rid: RID_BUILTIN_OPERATOR_NEW, D_CXXONLY }, |
442 | { .word: "__builtin_operator_delete", .rid: RID_BUILTIN_OPERATOR_DELETE, D_CXXONLY }, |
443 | { .word: "__builtin_shuffle", .rid: RID_BUILTIN_SHUFFLE, .disable: 0 }, |
444 | { .word: "__builtin_shufflevector", .rid: RID_BUILTIN_SHUFFLEVECTOR, .disable: 0 }, |
445 | { .word: "__builtin_stdc_bit_ceil", .rid: RID_BUILTIN_STDC, D_CONLY }, |
446 | { .word: "__builtin_stdc_bit_floor", .rid: RID_BUILTIN_STDC, D_CONLY }, |
447 | { .word: "__builtin_stdc_bit_width", .rid: RID_BUILTIN_STDC, D_CONLY }, |
448 | { .word: "__builtin_stdc_count_ones", .rid: RID_BUILTIN_STDC, D_CONLY }, |
449 | { .word: "__builtin_stdc_count_zeros", .rid: RID_BUILTIN_STDC, D_CONLY }, |
450 | { .word: "__builtin_stdc_first_leading_one", .rid: RID_BUILTIN_STDC, D_CONLY }, |
451 | { .word: "__builtin_stdc_first_leading_zero", .rid: RID_BUILTIN_STDC, D_CONLY }, |
452 | { .word: "__builtin_stdc_first_trailing_one", .rid: RID_BUILTIN_STDC, D_CONLY }, |
453 | { .word: "__builtin_stdc_first_trailing_zero", .rid: RID_BUILTIN_STDC, D_CONLY }, |
454 | { .word: "__builtin_stdc_has_single_bit", .rid: RID_BUILTIN_STDC, D_CONLY }, |
455 | { .word: "__builtin_stdc_leading_ones", .rid: RID_BUILTIN_STDC, D_CONLY }, |
456 | { .word: "__builtin_stdc_leading_zeros", .rid: RID_BUILTIN_STDC, D_CONLY }, |
457 | { .word: "__builtin_stdc_rotate_left", .rid: RID_BUILTIN_STDC, D_CONLY }, |
458 | { .word: "__builtin_stdc_rotate_right", .rid: RID_BUILTIN_STDC, D_CONLY }, |
459 | { .word: "__builtin_stdc_trailing_ones", .rid: RID_BUILTIN_STDC, D_CONLY }, |
460 | { .word: "__builtin_stdc_trailing_zeros", .rid: RID_BUILTIN_STDC, D_CONLY }, |
461 | { .word: "__builtin_tgmath", .rid: RID_BUILTIN_TGMATH, D_CONLY }, |
462 | { .word: "__builtin_offsetof", .rid: RID_OFFSETOF, .disable: 0 }, |
463 | { .word: "__builtin_types_compatible_p", .rid: RID_TYPES_COMPATIBLE_P, D_CONLY }, |
464 | { .word: "__builtin_c23_va_start", .rid: RID_C23_VA_START, D_C23 }, |
465 | { .word: "__builtin_va_arg", .rid: RID_VA_ARG, .disable: 0 }, |
466 | { .word: "__complex", .rid: RID_COMPLEX, .disable: 0 }, |
467 | { .word: "__complex__", .rid: RID_COMPLEX, .disable: 0 }, |
468 | { .word: "__const", .rid: RID_CONST, .disable: 0 }, |
469 | { .word: "__const__", .rid: RID_CONST, .disable: 0 }, |
470 | { .word: "__constinit", .rid: RID_CONSTINIT, D_CXXONLY }, |
471 | { .word: "__decltype", .rid: RID_DECLTYPE, D_CXXONLY }, |
472 | { .word: "__extension__", .rid: RID_EXTENSION, .disable: 0 }, |
473 | { .word: "__func__", .rid: RID_C99_FUNCTION_NAME, .disable: 0 }, |
474 | { .word: "__imag", .rid: RID_IMAGPART, .disable: 0 }, |
475 | { .word: "__imag__", .rid: RID_IMAGPART, .disable: 0 }, |
476 | { .word: "__inline", .rid: RID_INLINE, .disable: 0 }, |
477 | { .word: "__inline__", .rid: RID_INLINE, .disable: 0 }, |
478 | { .word: "__label__", .rid: RID_LABEL, .disable: 0 }, |
479 | { .word: "__null", .rid: RID_NULL, .disable: 0 }, |
480 | { .word: "__real", .rid: RID_REALPART, .disable: 0 }, |
481 | { .word: "__real__", .rid: RID_REALPART, .disable: 0 }, |
482 | { .word: "__restrict", .rid: RID_RESTRICT, .disable: 0 }, |
483 | { .word: "__restrict__", .rid: RID_RESTRICT, .disable: 0 }, |
484 | { .word: "__signed", .rid: RID_SIGNED, .disable: 0 }, |
485 | { .word: "__signed__", .rid: RID_SIGNED, .disable: 0 }, |
486 | { .word: "__thread", .rid: RID_THREAD, .disable: 0 }, |
487 | { .word: "__transaction_atomic", .rid: RID_TRANSACTION_ATOMIC, .disable: 0 }, |
488 | { .word: "__transaction_relaxed", .rid: RID_TRANSACTION_RELAXED, .disable: 0 }, |
489 | { .word: "__transaction_cancel", .rid: RID_TRANSACTION_CANCEL, .disable: 0 }, |
490 | { .word: "__typeof", .rid: RID_TYPEOF, .disable: 0 }, |
491 | { .word: "__typeof__", .rid: RID_TYPEOF, .disable: 0 }, |
492 | { .word: "__typeof_unqual", .rid: RID_TYPEOF_UNQUAL, D_CONLY }, |
493 | { .word: "__typeof_unqual__", .rid: RID_TYPEOF_UNQUAL, D_CONLY }, |
494 | { .word: "__volatile", .rid: RID_VOLATILE, .disable: 0 }, |
495 | { .word: "__volatile__", .rid: RID_VOLATILE, .disable: 0 }, |
496 | { .word: "__GIMPLE", .rid: RID_GIMPLE, D_CONLY }, |
497 | { .word: "__PHI", .rid: RID_PHI, D_CONLY }, |
498 | { .word: "__RTL", .rid: RID_RTL, D_CONLY }, |
499 | { .word: "alignas", .rid: RID_ALIGNAS, D_C23 | D_CXX11 | D_CXXWARN }, |
500 | { .word: "alignof", .rid: RID_ALIGNOF, D_C23 | D_CXX11 | D_CXXWARN }, |
501 | { .word: "asm", .rid: RID_ASM, D_ASM }, |
502 | { .word: "auto", .rid: RID_AUTO, .disable: 0 }, |
503 | { .word: "bool", .rid: RID_BOOL, D_C23 | D_CXXWARN }, |
504 | { .word: "break", .rid: RID_BREAK, .disable: 0 }, |
505 | { .word: "case", .rid: RID_CASE, .disable: 0 }, |
506 | { .word: "catch", .rid: RID_CATCH, D_CXX_OBJC | D_CXXWARN }, |
507 | { .word: "char", .rid: RID_CHAR, .disable: 0 }, |
508 | { .word: "char8_t", .rid: RID_CHAR8, D_CXX_CHAR8_T_FLAGS | D_CXXWARN }, |
509 | { .word: "char16_t", .rid: RID_CHAR16, D_CXXONLY | D_CXX11 | D_CXXWARN }, |
510 | { .word: "char32_t", .rid: RID_CHAR32, D_CXXONLY | D_CXX11 | D_CXXWARN }, |
511 | { .word: "class", .rid: RID_CLASS, D_CXX_OBJC | D_CXXWARN }, |
512 | { .word: "const", .rid: RID_CONST, .disable: 0 }, |
513 | { .word: "consteval", .rid: RID_CONSTEVAL, D_CXXONLY | D_CXX20 | D_CXXWARN }, |
514 | { .word: "constexpr", .rid: RID_CONSTEXPR, D_C23 | D_CXX11 | D_CXXWARN }, |
515 | { .word: "constinit", .rid: RID_CONSTINIT, D_CXXONLY | D_CXX20 | D_CXXWARN }, |
516 | { .word: "const_cast", .rid: RID_CONSTCAST, D_CXXONLY | D_CXXWARN }, |
517 | { .word: "continue", .rid: RID_CONTINUE, .disable: 0 }, |
518 | { .word: "decltype", .rid: RID_DECLTYPE, D_CXXONLY | D_CXX11 | D_CXXWARN }, |
519 | { .word: "default", .rid: RID_DEFAULT, .disable: 0 }, |
520 | { .word: "delete", .rid: RID_DELETE, D_CXXONLY | D_CXXWARN }, |
521 | { .word: "do", .rid: RID_DO, .disable: 0 }, |
522 | { .word: "double", .rid: RID_DOUBLE, .disable: 0 }, |
523 | { .word: "dynamic_cast", .rid: RID_DYNCAST, D_CXXONLY | D_CXXWARN }, |
524 | { .word: "else", .rid: RID_ELSE, .disable: 0 }, |
525 | { .word: "enum", .rid: RID_ENUM, .disable: 0 }, |
526 | { .word: "explicit", .rid: RID_EXPLICIT, D_CXXONLY | D_CXXWARN }, |
527 | { .word: "export", .rid: RID_EXPORT, D_CXXONLY | D_CXXWARN }, |
528 | { .word: "extern", .rid: RID_EXTERN, .disable: 0 }, |
529 | { .word: "false", .rid: RID_FALSE, D_C23 | D_CXXWARN }, |
530 | { .word: "float", .rid: RID_FLOAT, .disable: 0 }, |
531 | { .word: "for", .rid: RID_FOR, .disable: 0 }, |
532 | { .word: "friend", .rid: RID_FRIEND, D_CXXONLY | D_CXXWARN }, |
533 | { .word: "goto", .rid: RID_GOTO, .disable: 0 }, |
534 | { .word: "if", .rid: RID_IF, .disable: 0 }, |
535 | { .word: "inline", .rid: RID_INLINE, D_EXT89 }, |
536 | { .word: "int", .rid: RID_INT, .disable: 0 }, |
537 | { .word: "long", .rid: RID_LONG, .disable: 0 }, |
538 | { .word: "mutable", .rid: RID_MUTABLE, D_CXXONLY | D_CXXWARN }, |
539 | { .word: "namespace", .rid: RID_NAMESPACE, D_CXXONLY | D_CXXWARN }, |
540 | { .word: "new", .rid: RID_NEW, D_CXXONLY | D_CXXWARN }, |
541 | { .word: "noexcept", .rid: RID_NOEXCEPT, D_CXXONLY | D_CXX11 | D_CXXWARN }, |
542 | { .word: "nullptr", .rid: RID_NULLPTR, D_C23 | D_CXX11 | D_CXXWARN }, |
543 | { .word: "operator", .rid: RID_OPERATOR, D_CXXONLY | D_CXXWARN }, |
544 | { .word: "private", .rid: RID_PRIVATE, D_CXX_OBJC | D_CXXWARN }, |
545 | { .word: "protected", .rid: RID_PROTECTED, D_CXX_OBJC | D_CXXWARN }, |
546 | { .word: "public", .rid: RID_PUBLIC, D_CXX_OBJC | D_CXXWARN }, |
547 | { .word: "register", .rid: RID_REGISTER, .disable: 0 }, |
548 | { .word: "reinterpret_cast", .rid: RID_REINTCAST, D_CXXONLY | D_CXXWARN }, |
549 | { .word: "restrict", .rid: RID_RESTRICT, D_CONLY | D_C99 }, |
550 | { .word: "return", .rid: RID_RETURN, .disable: 0 }, |
551 | { .word: "short", .rid: RID_SHORT, .disable: 0 }, |
552 | { .word: "signed", .rid: RID_SIGNED, .disable: 0 }, |
553 | { .word: "sizeof", .rid: RID_SIZEOF, .disable: 0 }, |
554 | { .word: "static", .rid: RID_STATIC, .disable: 0 }, |
555 | { .word: "static_assert", .rid: RID_STATIC_ASSERT, D_C23 | D_CXX11 | D_CXXWARN }, |
556 | { .word: "static_cast", .rid: RID_STATCAST, D_CXXONLY | D_CXXWARN }, |
557 | { .word: "struct", .rid: RID_STRUCT, .disable: 0 }, |
558 | { .word: "switch", .rid: RID_SWITCH, .disable: 0 }, |
559 | { .word: "template", .rid: RID_TEMPLATE, D_CXXONLY | D_CXXWARN }, |
560 | { .word: "this", .rid: RID_THIS, D_CXXONLY | D_CXXWARN }, |
561 | { .word: "thread_local", .rid: RID_THREAD, D_C23 | D_CXX11 | D_CXXWARN }, |
562 | { .word: "throw", .rid: RID_THROW, D_CXX_OBJC | D_CXXWARN }, |
563 | { .word: "true", .rid: RID_TRUE, D_C23 | D_CXXWARN }, |
564 | { .word: "try", .rid: RID_TRY, D_CXX_OBJC | D_CXXWARN }, |
565 | { .word: "typedef", .rid: RID_TYPEDEF, .disable: 0 }, |
566 | { .word: "typename", .rid: RID_TYPENAME, D_CXXONLY | D_CXXWARN }, |
567 | { .word: "typeid", .rid: RID_TYPEID, D_CXXONLY | D_CXXWARN }, |
568 | { .word: "typeof", .rid: RID_TYPEOF, D_EXT11 }, |
569 | { .word: "typeof_unqual", .rid: RID_TYPEOF_UNQUAL, D_CONLY | D_C23 }, |
570 | { .word: "union", .rid: RID_UNION, .disable: 0 }, |
571 | { .word: "unsigned", .rid: RID_UNSIGNED, .disable: 0 }, |
572 | { .word: "using", .rid: RID_USING, D_CXXONLY | D_CXXWARN }, |
573 | { .word: "virtual", .rid: RID_VIRTUAL, D_CXXONLY | D_CXXWARN }, |
574 | { .word: "void", .rid: RID_VOID, .disable: 0 }, |
575 | { .word: "volatile", .rid: RID_VOLATILE, .disable: 0 }, |
576 | { .word: "wchar_t", .rid: RID_WCHAR, D_CXXONLY }, |
577 | { .word: "while", .rid: RID_WHILE, .disable: 0 }, |
578 | |
579 | /* C++ transactional memory. */ |
580 | { .word: "synchronized", .rid: RID_SYNCHRONIZED, D_CXX_OBJC | D_TRANSMEM }, |
581 | { .word: "atomic_noexcept", .rid: RID_ATOMIC_NOEXCEPT, D_CXXONLY | D_TRANSMEM }, |
582 | { .word: "atomic_cancel", .rid: RID_ATOMIC_CANCEL, D_CXXONLY | D_TRANSMEM }, |
583 | { .word: "atomic_commit", .rid: RID_TRANSACTION_ATOMIC, D_CXXONLY | D_TRANSMEM }, |
584 | |
585 | /* Concepts-related keywords */ |
586 | { .word: "concept", .rid: RID_CONCEPT, D_CXX_CONCEPTS_FLAGS | D_CXXWARN }, |
587 | { .word: "requires", .rid: RID_REQUIRES, D_CXX_CONCEPTS_FLAGS | D_CXXWARN }, |
588 | |
589 | /* Modules-related keywords, these are internal unspellable tokens, |
590 | created by the preprocessor. */ |
591 | { .word: "module ", .rid: RID__MODULE, D_CXX_MODULES_FLAGS | D_CXXWARN }, |
592 | { .word: "import ", .rid: RID__IMPORT, D_CXX_MODULES_FLAGS | D_CXXWARN }, |
593 | { .word: "export ", .rid: RID__EXPORT, D_CXX_MODULES_FLAGS | D_CXXWARN }, |
594 | |
595 | /* Coroutines-related keywords */ |
596 | { .word: "co_await", .rid: RID_CO_AWAIT, D_CXX_COROUTINES_FLAGS | D_CXXWARN }, |
597 | { .word: "co_yield", .rid: RID_CO_YIELD, D_CXX_COROUTINES_FLAGS | D_CXXWARN }, |
598 | { .word: "co_return", .rid: RID_CO_RETURN, D_CXX_COROUTINES_FLAGS | D_CXXWARN }, |
599 | |
600 | /* These Objective-C keywords are recognized only immediately after |
601 | an '@'. */ |
602 | { .word: "compatibility_alias", .rid: RID_AT_ALIAS, D_OBJC }, |
603 | { .word: "defs", .rid: RID_AT_DEFS, D_OBJC }, |
604 | { .word: "encode", .rid: RID_AT_ENCODE, D_OBJC }, |
605 | { .word: "end", .rid: RID_AT_END, D_OBJC }, |
606 | { .word: "implementation", .rid: RID_AT_IMPLEMENTATION, D_OBJC }, |
607 | { .word: "interface", .rid: RID_AT_INTERFACE, D_OBJC }, |
608 | { .word: "protocol", .rid: RID_AT_PROTOCOL, D_OBJC }, |
609 | { .word: "selector", .rid: RID_AT_SELECTOR, D_OBJC }, |
610 | { .word: "finally", .rid: RID_AT_FINALLY, D_OBJC }, |
611 | { .word: "optional", .rid: RID_AT_OPTIONAL, D_OBJC }, |
612 | { .word: "required", .rid: RID_AT_REQUIRED, D_OBJC }, |
613 | { .word: "property", .rid: RID_AT_PROPERTY, D_OBJC }, |
614 | { .word: "package", .rid: RID_AT_PACKAGE, D_OBJC }, |
615 | { .word: "synthesize", .rid: RID_AT_SYNTHESIZE, D_OBJC }, |
616 | { .word: "dynamic", .rid: RID_AT_DYNAMIC, D_OBJC }, |
617 | /* These are recognized only in protocol-qualifier context |
618 | (see above) */ |
619 | { .word: "bycopy", .rid: RID_BYCOPY, D_OBJC }, |
620 | { .word: "byref", .rid: RID_BYREF, D_OBJC }, |
621 | { .word: "in", .rid: RID_IN, D_OBJC }, |
622 | { .word: "inout", .rid: RID_INOUT, D_OBJC }, |
623 | { .word: "oneway", .rid: RID_ONEWAY, D_OBJC }, |
624 | { .word: "out", .rid: RID_OUT, D_OBJC }, |
625 | /* These are recognized inside a property attribute list */ |
626 | { .word: "assign", .rid: RID_ASSIGN, D_OBJC }, |
627 | { .word: "atomic", .rid: RID_PROPATOMIC, D_OBJC }, |
628 | { .word: "copy", .rid: RID_COPY, D_OBJC }, |
629 | { .word: "getter", .rid: RID_GETTER, D_OBJC }, |
630 | { .word: "nonatomic", .rid: RID_NONATOMIC, D_OBJC }, |
631 | { .word: "readonly", .rid: RID_READONLY, D_OBJC }, |
632 | { .word: "readwrite", .rid: RID_READWRITE, D_OBJC }, |
633 | { .word: "retain", .rid: RID_RETAIN, D_OBJC }, |
634 | { .word: "setter", .rid: RID_SETTER, D_OBJC }, |
635 | /* These are Objective C implementation of nullability, accepted only in |
636 | specific contexts. */ |
637 | { .word: "null_unspecified", .rid: RID_NULL_UNSPECIFIED, D_OBJC }, |
638 | { .word: "nullable", .rid: RID_NULLABLE, D_OBJC }, |
639 | { .word: "nonnull", .rid: RID_NONNULL, D_OBJC }, |
640 | { .word: "null_resettable", .rid: RID_NULL_RESETTABLE, D_OBJC }, |
641 | }; |
642 | |
643 | const unsigned int num_c_common_reswords = ARRAY_SIZE (c_common_reswords); |
644 | |
645 | /* Return identifier for address space AS. */ |
646 | |
647 | const char * |
648 | c_addr_space_name (addr_space_t as) |
649 | { |
650 | int rid = RID_FIRST_ADDR_SPACE + as; |
651 | gcc_assert (ridpointers [rid]); |
652 | return IDENTIFIER_POINTER (ridpointers [rid]); |
653 | } |
654 | |
655 | /* Push current bindings for the function name VAR_DECLS. */ |
656 | |
657 | void |
658 | start_fname_decls (void) |
659 | { |
660 | unsigned ix; |
661 | tree saved = NULL_TREE; |
662 | |
663 | for (ix = 0; fname_vars[ix].decl; ix++) |
664 | { |
665 | tree decl = *fname_vars[ix].decl; |
666 | |
667 | if (decl) |
668 | { |
669 | saved = tree_cons (decl, build_int_cst (integer_type_node, ix), |
670 | saved); |
671 | *fname_vars[ix].decl = NULL_TREE; |
672 | } |
673 | } |
674 | if (saved || saved_function_name_decls) |
675 | /* Normally they'll have been NULL, so only push if we've got a |
676 | stack, or they are non-NULL. */ |
677 | saved_function_name_decls = tree_cons (saved, NULL_TREE, |
678 | saved_function_name_decls); |
679 | } |
680 | |
681 | /* Finish up the current bindings, adding them into the current function's |
682 | statement tree. This must be done _before_ finish_stmt_tree is called. |
683 | If there is no current function, we must be at file scope and no statements |
684 | are involved. Pop the previous bindings. */ |
685 | |
686 | void |
687 | finish_fname_decls (void) |
688 | { |
689 | unsigned ix; |
690 | tree stmts = NULL_TREE; |
691 | tree stack = saved_function_name_decls; |
692 | |
693 | for (; stack && TREE_VALUE (stack); stack = TREE_CHAIN (stack)) |
694 | append_to_statement_list (TREE_VALUE (stack), &stmts); |
695 | |
696 | if (stmts) |
697 | { |
698 | tree *bodyp = &DECL_SAVED_TREE (current_function_decl); |
699 | |
700 | if (TREE_CODE (*bodyp) == BIND_EXPR) |
701 | bodyp = &BIND_EXPR_BODY (*bodyp); |
702 | |
703 | append_to_statement_list_force (*bodyp, &stmts); |
704 | *bodyp = stmts; |
705 | } |
706 | |
707 | for (ix = 0; fname_vars[ix].decl; ix++) |
708 | *fname_vars[ix].decl = NULL_TREE; |
709 | |
710 | if (stack) |
711 | { |
712 | /* We had saved values, restore them. */ |
713 | tree saved; |
714 | |
715 | for (saved = TREE_PURPOSE (stack); saved; saved = TREE_CHAIN (saved)) |
716 | { |
717 | tree decl = TREE_PURPOSE (saved); |
718 | unsigned ix = TREE_INT_CST_LOW (TREE_VALUE (saved)); |
719 | |
720 | *fname_vars[ix].decl = decl; |
721 | } |
722 | stack = TREE_CHAIN (stack); |
723 | } |
724 | saved_function_name_decls = stack; |
725 | } |
726 | |
727 | /* Return the text name of the current function, suitably prettified |
728 | by PRETTY_P. Return string must be freed by caller. */ |
729 | |
730 | const char * |
731 | fname_as_string (int pretty_p) |
732 | { |
733 | const char *name = "top level"; |
734 | char *namep; |
735 | int vrb = 2, len; |
736 | cpp_string cstr = { .len: 0, .text: 0 }, strname; |
737 | |
738 | if (!pretty_p) |
739 | { |
740 | name = ""; |
741 | vrb = 0; |
742 | } |
743 | |
744 | if (current_function_decl) |
745 | name = lang_hooks.decl_printable_name (current_function_decl, vrb); |
746 | |
747 | len = strlen (s: name) + 3; /* Two for '"'s. One for NULL. */ |
748 | |
749 | namep = XNEWVEC (char, len); |
750 | snprintf (s: namep, maxlen: len, format: "\"%s\"", name); |
751 | strname.text = (unsigned char *) namep; |
752 | strname.len = len - 1; |
753 | |
754 | if (cpp_interpret_string (parse_in, &strname, 1, &cstr, CPP_STRING)) |
755 | { |
756 | XDELETEVEC (namep); |
757 | return (const char *) cstr.text; |
758 | } |
759 | |
760 | return namep; |
761 | } |
762 | |
763 | /* Return the VAR_DECL for a const char array naming the current |
764 | function. If the VAR_DECL has not yet been created, create it |
765 | now. RID indicates how it should be formatted and IDENTIFIER_NODE |
766 | ID is its name (unfortunately C and C++ hold the RID values of |
767 | keywords in different places, so we can't derive RID from ID in |
768 | this language independent code. LOC is the location of the |
769 | function. */ |
770 | |
771 | tree |
772 | fname_decl (location_t loc, unsigned int rid, tree id) |
773 | { |
774 | unsigned ix; |
775 | tree decl = NULL_TREE; |
776 | |
777 | for (ix = 0; fname_vars[ix].decl; ix++) |
778 | if (fname_vars[ix].rid == rid) |
779 | break; |
780 | |
781 | decl = *fname_vars[ix].decl; |
782 | if (!decl) |
783 | { |
784 | /* If a tree is built here, it would normally have the lineno of |
785 | the current statement. Later this tree will be moved to the |
786 | beginning of the function and this line number will be wrong. |
787 | To avoid this problem set the lineno to 0 here; that prevents |
788 | it from appearing in the RTL. */ |
789 | tree stmts; |
790 | location_t saved_location = input_location; |
791 | input_location = UNKNOWN_LOCATION; |
792 | |
793 | stmts = push_stmt_list (); |
794 | decl = (*make_fname_decl) (loc, id, fname_vars[ix].pretty); |
795 | stmts = pop_stmt_list (stmts); |
796 | if (!IS_EMPTY_STMT (stmts)) |
797 | saved_function_name_decls |
798 | = tree_cons (decl, stmts, saved_function_name_decls); |
799 | *fname_vars[ix].decl = decl; |
800 | input_location = saved_location; |
801 | } |
802 | if (!ix && !current_function_decl) |
803 | pedwarn (loc, 0, "%qD is not defined outside of function scope", decl); |
804 | |
805 | return decl; |
806 | } |
807 | |
808 | /* Given a STRING_CST, give it a suitable array-of-chars data type. */ |
809 | |
810 | tree |
811 | fix_string_type (tree value) |
812 | { |
813 | int length = TREE_STRING_LENGTH (value); |
814 | int nchars, charsz; |
815 | tree e_type, i_type, a_type; |
816 | |
817 | /* Compute the number of elements, for the array type. */ |
818 | if (TREE_TYPE (value) == char_array_type_node || !TREE_TYPE (value)) |
819 | { |
820 | charsz = 1; |
821 | e_type = char_type_node; |
822 | } |
823 | else if (flag_char8_t && TREE_TYPE (value) == char8_array_type_node) |
824 | { |
825 | charsz = TYPE_PRECISION (char8_type_node) / BITS_PER_UNIT; |
826 | e_type = char8_type_node; |
827 | } |
828 | else if (TREE_TYPE (value) == char16_array_type_node) |
829 | { |
830 | charsz = TYPE_PRECISION (char16_type_node) / BITS_PER_UNIT; |
831 | e_type = char16_type_node; |
832 | } |
833 | else if (TREE_TYPE (value) == char32_array_type_node) |
834 | { |
835 | charsz = TYPE_PRECISION (char32_type_node) / BITS_PER_UNIT; |
836 | e_type = char32_type_node; |
837 | } |
838 | else |
839 | { |
840 | charsz = TYPE_PRECISION (wchar_type_node) / BITS_PER_UNIT; |
841 | e_type = wchar_type_node; |
842 | } |
843 | |
844 | /* This matters only for targets where ssizetype has smaller precision |
845 | than 32 bits. */ |
846 | if (wi::lts_p (x: wi::to_wide (TYPE_MAX_VALUE (ssizetype)), y: length)) |
847 | { |
848 | error ("size of string literal is too large"); |
849 | length = tree_to_shwi (TYPE_MAX_VALUE (ssizetype)) / charsz * charsz; |
850 | char *str = CONST_CAST (char *, TREE_STRING_POINTER (value)); |
851 | memset (s: str + length, c: '\0', |
852 | MIN (TREE_STRING_LENGTH (value) - length, charsz)); |
853 | TREE_STRING_LENGTH (value) = length; |
854 | } |
855 | nchars = length / charsz; |
856 | |
857 | /* C89 2.2.4.1, C99 5.2.4.1 (Translation limits). The analogous |
858 | limit in C++98 Annex B is very large (65536) and is not normative, |
859 | so we do not diagnose it (warn_overlength_strings is forced off |
860 | in c_common_post_options). */ |
861 | if (warn_overlength_strings) |
862 | { |
863 | const int nchars_max = flag_isoc99 ? 4095 : 509; |
864 | const int relevant_std = flag_isoc99 ? 99 : 90; |
865 | if (nchars - 1 > nchars_max) |
866 | /* Translators: The %d after 'ISO C' will be 90 or 99. Do not |
867 | separate the %d from the 'C'. 'ISO' should not be |
868 | translated, but it may be moved after 'C%d' in languages |
869 | where modifiers follow nouns. */ |
870 | pedwarn (input_location, OPT_Woverlength_strings, |
871 | "string length %qd is greater than the length %qd " |
872 | "ISO C%d compilers are required to support", |
873 | nchars - 1, nchars_max, relevant_std); |
874 | } |
875 | |
876 | /* Create the array type for the string constant. The ISO C++ |
877 | standard says that a string literal has type `const char[N]' or |
878 | `const wchar_t[N]'. We use the same logic when invoked as a C |
879 | front-end with -Wwrite-strings. |
880 | ??? We should change the type of an expression depending on the |
881 | state of a warning flag. We should just be warning -- see how |
882 | this is handled in the C++ front-end for the deprecated implicit |
883 | conversion from string literals to `char*' or `wchar_t*'. |
884 | |
885 | The C++ front end relies on TYPE_MAIN_VARIANT of a cv-qualified |
886 | array type being the unqualified version of that type. |
887 | Therefore, if we are constructing an array of const char, we must |
888 | construct the matching unqualified array type first. The C front |
889 | end does not require this, but it does no harm, so we do it |
890 | unconditionally. */ |
891 | i_type = build_index_type (size_int (nchars - 1)); |
892 | a_type = build_array_type (e_type, i_type); |
893 | if (c_dialect_cxx() || warn_write_strings) |
894 | a_type = c_build_qualified_type (a_type, TYPE_QUAL_CONST); |
895 | |
896 | TREE_TYPE (value) = a_type; |
897 | TREE_CONSTANT (value) = 1; |
898 | TREE_READONLY (value) = 1; |
899 | TREE_STATIC (value) = 1; |
900 | return value; |
901 | } |
902 | |
903 | /* Given a string of type STRING_TYPE, determine what kind of string |
904 | token would give an equivalent execution encoding: CPP_STRING, |
905 | CPP_STRING16, or CPP_STRING32. Return CPP_OTHER in case of error. |
906 | This may not be exactly the string token type that initially created |
907 | the string, since CPP_WSTRING is indistinguishable from the 16/32 bit |
908 | string type, and CPP_UTF8STRING is indistinguishable from CPP_STRING |
909 | at this point. |
910 | |
911 | This effectively reverses part of the logic in lex_string and |
912 | fix_string_type. */ |
913 | |
914 | static enum cpp_ttype |
915 | get_cpp_ttype_from_string_type (tree string_type) |
916 | { |
917 | gcc_assert (string_type); |
918 | if (TREE_CODE (string_type) == POINTER_TYPE) |
919 | string_type = TREE_TYPE (string_type); |
920 | |
921 | if (TREE_CODE (string_type) != ARRAY_TYPE) |
922 | return CPP_OTHER; |
923 | |
924 | tree element_type = TREE_TYPE (string_type); |
925 | if (TREE_CODE (element_type) != INTEGER_TYPE) |
926 | return CPP_OTHER; |
927 | |
928 | int bits_per_character = TYPE_PRECISION (element_type); |
929 | switch (bits_per_character) |
930 | { |
931 | case 8: |
932 | return CPP_STRING; /* It could have also been CPP_UTF8STRING. */ |
933 | case 16: |
934 | return CPP_STRING16; |
935 | case 32: |
936 | return CPP_STRING32; |
937 | } |
938 | |
939 | return CPP_OTHER; |
940 | } |
941 | |
942 | /* The global record of string concatentations, for use in |
943 | extracting locations within string literals. */ |
944 | |
945 | GTY(()) string_concat_db *g_string_concat_db; |
946 | |
947 | /* Implementation of LANG_HOOKS_GET_SUBSTRING_LOCATION. */ |
948 | |
949 | const char * |
950 | c_get_substring_location (const substring_loc &substr_loc, |
951 | location_t *out_loc) |
952 | { |
953 | enum cpp_ttype tok_type |
954 | = get_cpp_ttype_from_string_type (string_type: substr_loc.get_string_type ()); |
955 | if (tok_type == CPP_OTHER) |
956 | return "unrecognized string type"; |
957 | |
958 | return get_location_within_string (pfile: parse_in, |
959 | fc&: global_dc->get_file_cache (), |
960 | concats: g_string_concat_db, |
961 | strloc: substr_loc.get_fmt_string_loc (), |
962 | type: tok_type, |
963 | caret_idx: substr_loc.get_caret_idx (), |
964 | start_idx: substr_loc.get_start_idx (), |
965 | end_idx: substr_loc.get_end_idx (), |
966 | out_loc); |
967 | } |
968 | |
969 | |
970 | /* Return true iff T is a boolean promoted to int. */ |
971 | |
972 | bool |
973 | bool_promoted_to_int_p (tree t) |
974 | { |
975 | return (CONVERT_EXPR_P (t) |
976 | && TREE_TYPE (t) == integer_type_node |
977 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == BOOLEAN_TYPE); |
978 | } |
979 | |
980 | /* vector_targets_convertible_p is used for vector pointer types. The |
981 | callers perform various checks that the qualifiers are satisfactory, |
982 | while OTOH vector_targets_convertible_p ignores the number of elements |
983 | in the vectors. That's fine with vector pointers as we can consider, |
984 | say, a vector of 8 elements as two consecutive vectors of 4 elements, |
985 | and that does not require and conversion of the pointer values. |
986 | In contrast, vector_types_convertible_p and |
987 | vector_types_compatible_elements_p are used for vector value types. */ |
988 | /* True if pointers to distinct types T1 and T2 can be converted to |
989 | each other without an explicit cast. Only returns true for opaque |
990 | vector types. */ |
991 | bool |
992 | vector_targets_convertible_p (const_tree t1, const_tree t2) |
993 | { |
994 | if (VECTOR_TYPE_P (t1) && VECTOR_TYPE_P (t2) |
995 | && (TYPE_VECTOR_OPAQUE (t1) || TYPE_VECTOR_OPAQUE (t2)) |
996 | && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2))) |
997 | return true; |
998 | |
999 | return false; |
1000 | } |
1001 | |
1002 | /* vector_types_convertible_p is used for vector value types. |
1003 | It could in principle call vector_targets_convertible_p as a subroutine, |
1004 | but then the check for vector type would be duplicated with its callers, |
1005 | and also the purpose of vector_targets_convertible_p would become |
1006 | muddled. |
1007 | Where vector_types_convertible_p returns true, a conversion might still be |
1008 | needed to make the types match. |
1009 | In contrast, vector_targets_convertible_p is used for vector pointer |
1010 | values, and vector_types_compatible_elements_p is used specifically |
1011 | in the context for binary operators, as a check if use is possible without |
1012 | conversion. */ |
1013 | /* True if vector types T1 and T2 can be converted to each other |
1014 | without an explicit cast. If EMIT_LAX_NOTE is true, and T1 and T2 |
1015 | can only be converted with -flax-vector-conversions yet that is not |
1016 | in effect, emit a note telling the user about that option if such |
1017 | a note has not previously been emitted. */ |
1018 | bool |
1019 | vector_types_convertible_p (const_tree t1, const_tree t2, bool emit_lax_note) |
1020 | { |
1021 | static bool emitted_lax_note = false; |
1022 | bool convertible_lax; |
1023 | |
1024 | if ((TYPE_VECTOR_OPAQUE (t1) || TYPE_VECTOR_OPAQUE (t2)) |
1025 | && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2))) |
1026 | return true; |
1027 | |
1028 | convertible_lax = |
1029 | (tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)) |
1030 | && (TREE_CODE (TREE_TYPE (t1)) != REAL_TYPE |
1031 | || known_eq (TYPE_VECTOR_SUBPARTS (t1), |
1032 | TYPE_VECTOR_SUBPARTS (t2))) |
1033 | && (INTEGRAL_TYPE_P (TREE_TYPE (t1)) |
1034 | == INTEGRAL_TYPE_P (TREE_TYPE (t2)))); |
1035 | |
1036 | if (!convertible_lax || flag_lax_vector_conversions) |
1037 | return convertible_lax; |
1038 | |
1039 | if (known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2)) |
1040 | && lang_hooks.types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))) |
1041 | return true; |
1042 | |
1043 | if (emit_lax_note && !emitted_lax_note) |
1044 | { |
1045 | emitted_lax_note = true; |
1046 | inform (input_location, "use %<-flax-vector-conversions%> to permit " |
1047 | "conversions between vectors with differing " |
1048 | "element types or numbers of subparts"); |
1049 | } |
1050 | |
1051 | return false; |
1052 | } |
1053 | |
1054 | /* Build a VEC_PERM_EXPR if V0, V1 and MASK are not error_mark_nodes |
1055 | and have vector types, V0 has the same type as V1, and the number of |
1056 | elements of V0, V1, MASK is the same. |
1057 | |
1058 | In case V1 is a NULL_TREE it is assumed that __builtin_shuffle was |
1059 | called with two arguments. In this case implementation passes the |
1060 | first argument twice in order to share the same tree code. This fact |
1061 | could enable the mask-values being twice the vector length. This is |
1062 | an implementation accident and this semantics is not guaranteed to |
1063 | the user. */ |
1064 | tree |
1065 | c_build_vec_perm_expr (location_t loc, tree v0, tree v1, tree mask, |
1066 | bool complain) |
1067 | { |
1068 | tree ret; |
1069 | bool wrap = true; |
1070 | bool maybe_const = false; |
1071 | bool two_arguments = false; |
1072 | |
1073 | if (v1 == NULL_TREE) |
1074 | { |
1075 | two_arguments = true; |
1076 | v1 = v0; |
1077 | } |
1078 | |
1079 | if (v0 == error_mark_node || v1 == error_mark_node |
1080 | || mask == error_mark_node) |
1081 | return error_mark_node; |
1082 | |
1083 | if (!gnu_vector_type_p (TREE_TYPE (mask)) |
1084 | || !VECTOR_INTEGER_TYPE_P (TREE_TYPE (mask))) |
1085 | { |
1086 | if (complain) |
1087 | error_at (loc, "%<__builtin_shuffle%> last argument must " |
1088 | "be an integer vector"); |
1089 | return error_mark_node; |
1090 | } |
1091 | |
1092 | if (!gnu_vector_type_p (TREE_TYPE (v0)) |
1093 | || !gnu_vector_type_p (TREE_TYPE (v1))) |
1094 | { |
1095 | if (complain) |
1096 | error_at (loc, "%<__builtin_shuffle%> arguments must be vectors"); |
1097 | return error_mark_node; |
1098 | } |
1099 | |
1100 | if (TYPE_MAIN_VARIANT (TREE_TYPE (v0)) != TYPE_MAIN_VARIANT (TREE_TYPE (v1))) |
1101 | { |
1102 | if (complain) |
1103 | error_at (loc, "%<__builtin_shuffle%> argument vectors must be of " |
1104 | "the same type"); |
1105 | return error_mark_node; |
1106 | } |
1107 | |
1108 | if (maybe_ne (a: TYPE_VECTOR_SUBPARTS (TREE_TYPE (v0)), |
1109 | b: TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask))) |
1110 | && maybe_ne (a: TYPE_VECTOR_SUBPARTS (TREE_TYPE (v1)), |
1111 | b: TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask)))) |
1112 | { |
1113 | if (complain) |
1114 | error_at (loc, "%<__builtin_shuffle%> number of elements of the " |
1115 | "argument vector(s) and the mask vector should " |
1116 | "be the same"); |
1117 | return error_mark_node; |
1118 | } |
1119 | |
1120 | if (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (v0)))) |
1121 | != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (mask))))) |
1122 | { |
1123 | if (complain) |
1124 | error_at (loc, "%<__builtin_shuffle%> argument vector(s) inner type " |
1125 | "must have the same size as inner type of the mask"); |
1126 | return error_mark_node; |
1127 | } |
1128 | |
1129 | if (!c_dialect_cxx ()) |
1130 | { |
1131 | /* Avoid C_MAYBE_CONST_EXPRs inside VEC_PERM_EXPR. */ |
1132 | v0 = c_fully_fold (v0, false, &maybe_const); |
1133 | wrap &= maybe_const; |
1134 | |
1135 | if (two_arguments) |
1136 | v1 = v0 = save_expr (v0); |
1137 | else |
1138 | { |
1139 | v1 = c_fully_fold (v1, false, &maybe_const); |
1140 | wrap &= maybe_const; |
1141 | } |
1142 | |
1143 | mask = c_fully_fold (mask, false, &maybe_const); |
1144 | wrap &= maybe_const; |
1145 | } |
1146 | else if (two_arguments) |
1147 | v1 = v0 = save_expr (v0); |
1148 | |
1149 | ret = build3_loc (loc, code: VEC_PERM_EXPR, TREE_TYPE (v0), arg0: v0, arg1: v1, arg2: mask); |
1150 | |
1151 | if (!c_dialect_cxx () && !wrap) |
1152 | ret = c_wrap_maybe_const (ret, true); |
1153 | |
1154 | return ret; |
1155 | } |
1156 | |
1157 | /* Build a VEC_PERM_EXPR if V0, V1 are not error_mark_nodes |
1158 | and have vector types, V0 has the same element type as V1, and the |
1159 | number of elements the result is that of MASK. */ |
1160 | tree |
1161 | c_build_shufflevector (location_t loc, tree v0, tree v1, |
1162 | const vec<tree> &mask, bool complain) |
1163 | { |
1164 | tree ret; |
1165 | bool wrap = true; |
1166 | bool maybe_const = false; |
1167 | |
1168 | if (v0 == error_mark_node || v1 == error_mark_node) |
1169 | return error_mark_node; |
1170 | |
1171 | if (!gnu_vector_type_p (TREE_TYPE (v0)) |
1172 | || !gnu_vector_type_p (TREE_TYPE (v1))) |
1173 | { |
1174 | if (complain) |
1175 | error_at (loc, "%<__builtin_shufflevector%> arguments must be vectors"); |
1176 | return error_mark_node; |
1177 | } |
1178 | |
1179 | /* ??? In principle one could select a constant part of a variable size |
1180 | vector but things get a bit awkward with trying to support this here. */ |
1181 | unsigned HOST_WIDE_INT v0n, v1n; |
1182 | if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (v0)).is_constant (const_value: &v0n) |
1183 | || !TYPE_VECTOR_SUBPARTS (TREE_TYPE (v1)).is_constant (const_value: &v1n)) |
1184 | { |
1185 | if (complain) |
1186 | error_at (loc, "%<__builtin_shufflevector%> arguments must be constant" |
1187 | " size vectors"); |
1188 | return error_mark_node; |
1189 | } |
1190 | |
1191 | if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (v0))) |
1192 | != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (v1)))) |
1193 | { |
1194 | if (complain) |
1195 | error_at (loc, "%<__builtin_shufflevector%> argument vectors must " |
1196 | "have the same element type"); |
1197 | return error_mark_node; |
1198 | } |
1199 | |
1200 | if (!pow2p_hwi (x: mask.length ())) |
1201 | { |
1202 | if (complain) |
1203 | error_at (loc, "%<__builtin_shufflevector%> must specify a result " |
1204 | "with a power of two number of elements"); |
1205 | return error_mark_node; |
1206 | } |
1207 | |
1208 | if (!c_dialect_cxx ()) |
1209 | { |
1210 | /* Avoid C_MAYBE_CONST_EXPRs inside VEC_PERM_EXPR. */ |
1211 | v0 = c_fully_fold (v0, false, &maybe_const); |
1212 | wrap &= maybe_const; |
1213 | |
1214 | v1 = c_fully_fold (v1, false, &maybe_const); |
1215 | wrap &= maybe_const; |
1216 | } |
1217 | |
1218 | unsigned HOST_WIDE_INT maskl = MAX (mask.length (), MAX (v0n, v1n)); |
1219 | unsigned HOST_WIDE_INT pad = (v0n < maskl ? maskl - v0n : 0); |
1220 | vec_perm_builder sel (maskl, maskl, 1); |
1221 | unsigned i; |
1222 | for (i = 0; i < mask.length (); ++i) |
1223 | { |
1224 | tree idx = mask[i]; |
1225 | if (!tree_fits_shwi_p (idx)) |
1226 | { |
1227 | if (complain) |
1228 | error_at (loc, "invalid element index %qE to " |
1229 | "%<__builtin_shufflevector%>", idx); |
1230 | return error_mark_node; |
1231 | } |
1232 | HOST_WIDE_INT iidx = tree_to_shwi (idx); |
1233 | if (iidx < -1 |
1234 | || (iidx != -1 |
1235 | && (unsigned HOST_WIDE_INT) iidx >= v0n + v1n)) |
1236 | { |
1237 | if (complain) |
1238 | error_at (loc, "invalid element index %qE to " |
1239 | "%<__builtin_shufflevector%>", idx); |
1240 | return error_mark_node; |
1241 | } |
1242 | /* ??? Our VEC_PERM_EXPR does not allow for -1 yet. */ |
1243 | if (iidx == -1) |
1244 | iidx = i; |
1245 | /* ??? Our VEC_PERM_EXPR does not allow different sized inputs, |
1246 | so pad out a smaller v0. */ |
1247 | else if ((unsigned HOST_WIDE_INT) iidx >= v0n) |
1248 | iidx += pad; |
1249 | sel.quick_push (obj: iidx); |
1250 | } |
1251 | /* ??? VEC_PERM_EXPR does not support a result that is smaller than |
1252 | the inputs, so we have to pad id out. */ |
1253 | for (; i < maskl; ++i) |
1254 | sel.quick_push (obj: i); |
1255 | |
1256 | vec_perm_indices indices (sel, 2, maskl); |
1257 | |
1258 | tree ret_type = build_vector_type (TREE_TYPE (TREE_TYPE (v0)), maskl); |
1259 | tree mask_type = build_vector_type (build_nonstandard_integer_type |
1260 | (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (ret_type))), 1), |
1261 | maskl); |
1262 | /* Pad out arguments to the common vector size. */ |
1263 | if (v0n < maskl) |
1264 | { |
1265 | constructor_elt elt = { NULL_TREE, .value: build_zero_cst (TREE_TYPE (v0)) }; |
1266 | v0 = build_constructor_single (ret_type, NULL_TREE, v0); |
1267 | for (i = 1; i < maskl / v0n; ++i) |
1268 | vec_safe_push (CONSTRUCTOR_ELTS (v0), obj: elt); |
1269 | } |
1270 | if (v1n < maskl) |
1271 | { |
1272 | constructor_elt elt = { NULL_TREE, .value: build_zero_cst (TREE_TYPE (v1)) }; |
1273 | v1 = build_constructor_single (ret_type, NULL_TREE, v1); |
1274 | for (i = 1; i < maskl / v1n; ++i) |
1275 | vec_safe_push (CONSTRUCTOR_ELTS (v1), obj: elt); |
1276 | } |
1277 | ret = build3_loc (loc, code: VEC_PERM_EXPR, type: ret_type, arg0: v0, arg1: v1, |
1278 | arg2: vec_perm_indices_to_tree (mask_type, indices)); |
1279 | /* Get the lowpart we are interested in. */ |
1280 | if (mask.length () < maskl) |
1281 | { |
1282 | tree lpartt = build_vector_type (TREE_TYPE (ret_type), mask.length ()); |
1283 | ret = build3_loc (loc, code: BIT_FIELD_REF, |
1284 | type: lpartt, arg0: ret, TYPE_SIZE (lpartt), bitsize_zero_node); |
1285 | /* Wrap the lowpart operation in a TARGET_EXPR so it gets a separate |
1286 | temporary during gimplification. See PR101530 for cases where |
1287 | we'd otherwise end up with non-toplevel BIT_FIELD_REFs. */ |
1288 | tree tem = create_tmp_var_raw (lpartt); |
1289 | DECL_CONTEXT (tem) = current_function_decl; |
1290 | ret = build4 (TARGET_EXPR, lpartt, tem, ret, NULL_TREE, NULL_TREE); |
1291 | TREE_SIDE_EFFECTS (ret) = 1; |
1292 | } |
1293 | |
1294 | if (!c_dialect_cxx () && !wrap) |
1295 | ret = c_wrap_maybe_const (ret, true); |
1296 | |
1297 | return ret; |
1298 | } |
1299 | |
1300 | /* Build a VEC_CONVERT ifn for __builtin_convertvector builtin. */ |
1301 | |
1302 | tree |
1303 | c_build_vec_convert (location_t loc1, tree expr, location_t loc2, tree type, |
1304 | bool complain) |
1305 | { |
1306 | if (error_operand_p (t: type)) |
1307 | return error_mark_node; |
1308 | if (error_operand_p (t: expr)) |
1309 | return error_mark_node; |
1310 | |
1311 | if (!gnu_vector_type_p (TREE_TYPE (expr)) |
1312 | || (!VECTOR_INTEGER_TYPE_P (TREE_TYPE (expr)) |
1313 | && !VECTOR_FLOAT_TYPE_P (TREE_TYPE (expr)))) |
1314 | { |
1315 | if (complain) |
1316 | error_at (loc1, "%<__builtin_convertvector%> first argument must " |
1317 | "be an integer or floating vector"); |
1318 | return error_mark_node; |
1319 | } |
1320 | |
1321 | if (!gnu_vector_type_p (type) |
1322 | || (!VECTOR_INTEGER_TYPE_P (type) && !VECTOR_FLOAT_TYPE_P (type))) |
1323 | { |
1324 | if (complain) |
1325 | error_at (loc2, "%<__builtin_convertvector%> second argument must " |
1326 | "be an integer or floating vector type"); |
1327 | return error_mark_node; |
1328 | } |
1329 | |
1330 | if (maybe_ne (a: TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)), |
1331 | b: TYPE_VECTOR_SUBPARTS (node: type))) |
1332 | { |
1333 | if (complain) |
1334 | error_at (loc1, "%<__builtin_convertvector%> number of elements " |
1335 | "of the first argument vector and the second argument " |
1336 | "vector type should be the same"); |
1337 | return error_mark_node; |
1338 | } |
1339 | |
1340 | if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (expr))) |
1341 | == TYPE_MAIN_VARIANT (TREE_TYPE (type))) |
1342 | || (VECTOR_INTEGER_TYPE_P (TREE_TYPE (expr)) |
1343 | && VECTOR_INTEGER_TYPE_P (type) |
1344 | && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (expr))) |
1345 | == TYPE_PRECISION (TREE_TYPE (type))))) |
1346 | return build1_loc (loc: loc1, code: VIEW_CONVERT_EXPR, type, arg1: expr); |
1347 | |
1348 | bool wrap = true; |
1349 | bool maybe_const = false; |
1350 | tree ret; |
1351 | if (!c_dialect_cxx ()) |
1352 | { |
1353 | /* Avoid C_MAYBE_CONST_EXPRs inside of VEC_CONVERT argument. */ |
1354 | expr = c_fully_fold (expr, false, &maybe_const); |
1355 | wrap &= maybe_const; |
1356 | } |
1357 | |
1358 | ret = build_call_expr_internal_loc (loc1, IFN_VEC_CONVERT, type, 1, expr); |
1359 | |
1360 | if (!wrap) |
1361 | ret = c_wrap_maybe_const (ret, true); |
1362 | |
1363 | return ret; |
1364 | } |
1365 | |
1366 | /* Like tree.cc:get_narrower, but retain conversion from C++0x scoped enum |
1367 | to integral type. */ |
1368 | |
1369 | tree |
1370 | c_common_get_narrower (tree op, int *unsignedp_ptr) |
1371 | { |
1372 | op = get_narrower (op, unsignedp_ptr); |
1373 | |
1374 | if (TREE_CODE (TREE_TYPE (op)) == ENUMERAL_TYPE |
1375 | && ENUM_IS_SCOPED (TREE_TYPE (op))) |
1376 | { |
1377 | /* C++0x scoped enumerations don't implicitly convert to integral |
1378 | type; if we stripped an explicit conversion to a larger type we |
1379 | need to replace it so common_type will still work. */ |
1380 | tree type = c_common_type_for_size (TYPE_PRECISION (TREE_TYPE (op)), |
1381 | TYPE_UNSIGNED (TREE_TYPE (op))); |
1382 | op = fold_convert (type, op); |
1383 | } |
1384 | return op; |
1385 | } |
1386 | |
1387 | /* This is a helper function of build_binary_op. |
1388 | |
1389 | For certain operations if both args were extended from the same |
1390 | smaller type, do the arithmetic in that type and then extend. |
1391 | |
1392 | BITWISE indicates a bitwise operation. |
1393 | For them, this optimization is safe only if |
1394 | both args are zero-extended or both are sign-extended. |
1395 | Otherwise, we might change the result. |
1396 | Eg, (short)-1 | (unsigned short)-1 is (int)-1 |
1397 | but calculated in (unsigned short) it would be (unsigned short)-1. |
1398 | */ |
1399 | tree |
1400 | shorten_binary_op (tree result_type, tree op0, tree op1, bool bitwise) |
1401 | { |
1402 | int unsigned0, unsigned1; |
1403 | tree arg0, arg1; |
1404 | int uns; |
1405 | tree type; |
1406 | |
1407 | /* Do not shorten vector operations. */ |
1408 | if (VECTOR_TYPE_P (result_type)) |
1409 | return result_type; |
1410 | |
1411 | /* Cast OP0 and OP1 to RESULT_TYPE. Doing so prevents |
1412 | excessive narrowing when we call get_narrower below. For |
1413 | example, suppose that OP0 is of unsigned int extended |
1414 | from signed char and that RESULT_TYPE is long long int. |
1415 | If we explicitly cast OP0 to RESULT_TYPE, OP0 would look |
1416 | like |
1417 | |
1418 | (long long int) (unsigned int) signed_char |
1419 | |
1420 | which get_narrower would narrow down to |
1421 | |
1422 | (unsigned int) signed char |
1423 | |
1424 | If we do not cast OP0 first, get_narrower would return |
1425 | signed_char, which is inconsistent with the case of the |
1426 | explicit cast. */ |
1427 | op0 = convert (result_type, op0); |
1428 | op1 = convert (result_type, op1); |
1429 | |
1430 | arg0 = c_common_get_narrower (op: op0, unsignedp_ptr: &unsigned0); |
1431 | arg1 = c_common_get_narrower (op: op1, unsignedp_ptr: &unsigned1); |
1432 | |
1433 | /* UNS is 1 if the operation to be done is an unsigned one. */ |
1434 | uns = TYPE_UNSIGNED (result_type); |
1435 | |
1436 | /* Handle the case that OP0 (or OP1) does not *contain* a conversion |
1437 | but it *requires* conversion to FINAL_TYPE. */ |
1438 | |
1439 | if ((TYPE_PRECISION (TREE_TYPE (op0)) |
1440 | == TYPE_PRECISION (TREE_TYPE (arg0))) |
1441 | && TREE_TYPE (op0) != result_type) |
1442 | unsigned0 = TYPE_UNSIGNED (TREE_TYPE (op0)); |
1443 | if ((TYPE_PRECISION (TREE_TYPE (op1)) |
1444 | == TYPE_PRECISION (TREE_TYPE (arg1))) |
1445 | && TREE_TYPE (op1) != result_type) |
1446 | unsigned1 = TYPE_UNSIGNED (TREE_TYPE (op1)); |
1447 | |
1448 | /* Now UNSIGNED0 is 1 if ARG0 zero-extends to FINAL_TYPE. */ |
1449 | |
1450 | /* For bitwise operations, signedness of nominal type |
1451 | does not matter. Consider only how operands were extended. */ |
1452 | if (bitwise) |
1453 | uns = unsigned0; |
1454 | |
1455 | /* Note that in all three cases below we refrain from optimizing |
1456 | an unsigned operation on sign-extended args. |
1457 | That would not be valid. */ |
1458 | |
1459 | /* Both args variable: if both extended in same way |
1460 | from same width, do it in that width. |
1461 | Do it unsigned if args were zero-extended. */ |
1462 | if ((TYPE_PRECISION (TREE_TYPE (arg0)) |
1463 | < TYPE_PRECISION (result_type)) |
1464 | && (TYPE_PRECISION (TREE_TYPE (arg1)) |
1465 | == TYPE_PRECISION (TREE_TYPE (arg0))) |
1466 | && unsigned0 == unsigned1 |
1467 | && (unsigned0 || !uns)) |
1468 | { |
1469 | tree ctype = common_type (TREE_TYPE (arg0), TREE_TYPE (arg1)); |
1470 | if (ctype != error_mark_node) |
1471 | return c_common_signed_or_unsigned_type (unsigned0, ctype); |
1472 | } |
1473 | |
1474 | else if (TREE_CODE (arg0) == INTEGER_CST |
1475 | && (unsigned1 || !uns) |
1476 | && (TYPE_PRECISION (TREE_TYPE (arg1)) |
1477 | < TYPE_PRECISION (result_type)) |
1478 | && (type |
1479 | = c_common_signed_or_unsigned_type (unsigned1, |
1480 | TREE_TYPE (arg1))) |
1481 | && !POINTER_TYPE_P (type) |
1482 | && int_fits_type_p (arg0, type)) |
1483 | return type; |
1484 | |
1485 | else if (TREE_CODE (arg1) == INTEGER_CST |
1486 | && (unsigned0 || !uns) |
1487 | && (TYPE_PRECISION (TREE_TYPE (arg0)) |
1488 | < TYPE_PRECISION (result_type)) |
1489 | && (type |
1490 | = c_common_signed_or_unsigned_type (unsigned0, |
1491 | TREE_TYPE (arg0))) |
1492 | && !POINTER_TYPE_P (type) |
1493 | && int_fits_type_p (arg1, type)) |
1494 | return type; |
1495 | |
1496 | return result_type; |
1497 | } |
1498 | |
1499 | /* Returns true iff any integer value of type FROM_TYPE can be represented as |
1500 | real of type TO_TYPE. This is a helper function for unsafe_conversion_p. */ |
1501 | |
1502 | static bool |
1503 | int_safely_convertible_to_real_p (const_tree from_type, const_tree to_type) |
1504 | { |
1505 | tree type_low_bound = TYPE_MIN_VALUE (from_type); |
1506 | tree type_high_bound = TYPE_MAX_VALUE (from_type); |
1507 | REAL_VALUE_TYPE real_low_bound = |
1508 | real_value_from_int_cst (0, type_low_bound); |
1509 | REAL_VALUE_TYPE real_high_bound = |
1510 | real_value_from_int_cst (0, type_high_bound); |
1511 | |
1512 | return exact_real_truncate (TYPE_MODE (to_type), &real_low_bound) |
1513 | && exact_real_truncate (TYPE_MODE (to_type), &real_high_bound); |
1514 | } |
1515 | |
1516 | /* Checks if expression EXPR of complex/real/integer type cannot be converted |
1517 | to the complex/real/integer type TYPE. Function returns non-zero when: |
1518 | * EXPR is a constant which cannot be exactly converted to TYPE. |
1519 | * EXPR is not a constant and size of EXPR's type > than size of TYPE, |
1520 | for EXPR type and TYPE being both integers or both real, or both |
1521 | complex. |
1522 | * EXPR is not a constant of complex type and TYPE is a real or |
1523 | an integer. |
1524 | * EXPR is not a constant of real type and TYPE is an integer. |
1525 | * EXPR is not a constant of integer type which cannot be |
1526 | exactly converted to real type. |
1527 | |
1528 | Function allows conversions between types of different signedness if |
1529 | CHECK_SIGN is false and can return SAFE_CONVERSION (zero) in that |
1530 | case. Function can return UNSAFE_SIGN if CHECK_SIGN is true. |
1531 | |
1532 | RESULT, when non-null is the result of the conversion. When constant |
1533 | it is included in the text of diagnostics. |
1534 | |
1535 | Function allows conversions from complex constants to non-complex types, |
1536 | provided that imaginary part is zero and real part can be safely converted |
1537 | to TYPE. */ |
1538 | |
1539 | enum conversion_safety |
1540 | unsafe_conversion_p (tree type, tree expr, tree result, bool check_sign) |
1541 | { |
1542 | enum conversion_safety give_warning = SAFE_CONVERSION; /* is 0 or false */ |
1543 | tree expr_type = TREE_TYPE (expr); |
1544 | |
1545 | expr = fold_for_warn (expr); |
1546 | |
1547 | if (TREE_CODE (expr) == REAL_CST || TREE_CODE (expr) == INTEGER_CST) |
1548 | { |
1549 | /* If type is complex, we are interested in compatibility with |
1550 | underlying type. */ |
1551 | if (TREE_CODE (type) == COMPLEX_TYPE) |
1552 | type = TREE_TYPE (type); |
1553 | |
1554 | /* Warn for real constant that is not an exact integer converted |
1555 | to integer type. */ |
1556 | if (SCALAR_FLOAT_TYPE_P (expr_type) |
1557 | && (TREE_CODE (type) == INTEGER_TYPE |
1558 | || TREE_CODE (type) == BITINT_TYPE)) |
1559 | { |
1560 | if (!real_isinteger (TREE_REAL_CST_PTR (expr), TYPE_MODE (expr_type))) |
1561 | give_warning = UNSAFE_REAL; |
1562 | } |
1563 | /* Warn for an integer constant that does not fit into integer type. */ |
1564 | else if ((TREE_CODE (expr_type) == INTEGER_TYPE |
1565 | || TREE_CODE (expr_type) == BITINT_TYPE) |
1566 | && (TREE_CODE (type) == INTEGER_TYPE |
1567 | || TREE_CODE (type) == BITINT_TYPE) |
1568 | && !int_fits_type_p (expr, type)) |
1569 | { |
1570 | if (TYPE_UNSIGNED (type) && !TYPE_UNSIGNED (expr_type) |
1571 | && tree_int_cst_sgn (expr) < 0) |
1572 | { |
1573 | if (check_sign) |
1574 | give_warning = UNSAFE_SIGN; |
1575 | } |
1576 | else if (!TYPE_UNSIGNED (type) && TYPE_UNSIGNED (expr_type)) |
1577 | { |
1578 | if (check_sign) |
1579 | give_warning = UNSAFE_SIGN; |
1580 | } |
1581 | else |
1582 | give_warning = UNSAFE_OTHER; |
1583 | } |
1584 | else if (SCALAR_FLOAT_TYPE_P (type)) |
1585 | { |
1586 | /* Warn for an integer constant that does not fit into real type. */ |
1587 | if (TREE_CODE (expr_type) == INTEGER_TYPE |
1588 | || TREE_CODE (expr_type) == BITINT_TYPE) |
1589 | { |
1590 | REAL_VALUE_TYPE a = real_value_from_int_cst (0, expr); |
1591 | if (!exact_real_truncate (TYPE_MODE (type), &a)) |
1592 | give_warning = UNSAFE_REAL; |
1593 | } |
1594 | /* Warn for a real constant that does not fit into a smaller |
1595 | real type. */ |
1596 | else if (SCALAR_FLOAT_TYPE_P (expr_type) |
1597 | && TYPE_PRECISION (type) < TYPE_PRECISION (expr_type)) |
1598 | { |
1599 | REAL_VALUE_TYPE a = TREE_REAL_CST (expr); |
1600 | if (!exact_real_truncate (TYPE_MODE (type), &a)) |
1601 | give_warning = UNSAFE_REAL; |
1602 | } |
1603 | } |
1604 | } |
1605 | |
1606 | else if (TREE_CODE (expr) == COMPLEX_CST) |
1607 | { |
1608 | tree imag_part = TREE_IMAGPART (expr); |
1609 | /* Conversion from complex constant with zero imaginary part, |
1610 | perform check for conversion of real part. */ |
1611 | if ((TREE_CODE (imag_part) == REAL_CST |
1612 | && real_zerop (imag_part)) |
1613 | || (TREE_CODE (imag_part) == INTEGER_CST |
1614 | && integer_zerop (imag_part))) |
1615 | /* Note: in this branch we use recursive call to unsafe_conversion_p |
1616 | with different type of EXPR, but it is still safe, because when EXPR |
1617 | is a constant, it's type is not used in text of generated warnings |
1618 | (otherwise they could sound misleading). */ |
1619 | return unsafe_conversion_p (type, TREE_REALPART (expr), result, |
1620 | check_sign); |
1621 | /* Conversion from complex constant with non-zero imaginary part. */ |
1622 | else |
1623 | { |
1624 | /* Conversion to complex type. |
1625 | Perform checks for both real and imaginary parts. */ |
1626 | if (TREE_CODE (type) == COMPLEX_TYPE) |
1627 | { |
1628 | enum conversion_safety re_safety = |
1629 | unsafe_conversion_p (type, TREE_REALPART (expr), |
1630 | result, check_sign); |
1631 | enum conversion_safety im_safety = |
1632 | unsafe_conversion_p (type, expr: imag_part, result, check_sign); |
1633 | |
1634 | /* Merge the results into appropriate single warning. */ |
1635 | |
1636 | /* Note: this case includes SAFE_CONVERSION, i.e. success. */ |
1637 | if (re_safety == im_safety) |
1638 | give_warning = re_safety; |
1639 | else if (!re_safety && im_safety) |
1640 | give_warning = im_safety; |
1641 | else if (re_safety && !im_safety) |
1642 | give_warning = re_safety; |
1643 | else |
1644 | give_warning = UNSAFE_OTHER; |
1645 | } |
1646 | /* Warn about conversion from complex to real or integer type. */ |
1647 | else |
1648 | give_warning = UNSAFE_IMAGINARY; |
1649 | } |
1650 | } |
1651 | |
1652 | /* Checks for remaining case: EXPR is not constant. */ |
1653 | else |
1654 | { |
1655 | /* Warn for real types converted to integer types. */ |
1656 | if (SCALAR_FLOAT_TYPE_P (expr_type) |
1657 | && (TREE_CODE (type) == INTEGER_TYPE |
1658 | || TREE_CODE (type) == BITINT_TYPE)) |
1659 | give_warning = UNSAFE_REAL; |
1660 | |
1661 | else if ((TREE_CODE (expr_type) == INTEGER_TYPE |
1662 | || TREE_CODE (expr_type) == BITINT_TYPE) |
1663 | && (TREE_CODE (type) == INTEGER_TYPE |
1664 | || TREE_CODE (type) == BITINT_TYPE)) |
1665 | { |
1666 | /* Don't warn about unsigned char y = 0xff, x = (int) y; */ |
1667 | expr = get_unwidened (expr, 0); |
1668 | expr_type = TREE_TYPE (expr); |
1669 | |
1670 | /* Don't warn for short y; short x = ((int)y & 0xff); */ |
1671 | if (TREE_CODE (expr) == BIT_AND_EXPR |
1672 | || TREE_CODE (expr) == BIT_IOR_EXPR |
1673 | || TREE_CODE (expr) == BIT_XOR_EXPR) |
1674 | { |
1675 | /* If both args were extended from a shortest type, |
1676 | use that type if that is safe. */ |
1677 | expr_type = shorten_binary_op (result_type: expr_type, |
1678 | TREE_OPERAND (expr, 0), |
1679 | TREE_OPERAND (expr, 1), |
1680 | /* bitwise */1); |
1681 | |
1682 | if (TREE_CODE (expr) == BIT_AND_EXPR) |
1683 | { |
1684 | tree op0 = TREE_OPERAND (expr, 0); |
1685 | tree op1 = TREE_OPERAND (expr, 1); |
1686 | bool unsigned0 = TYPE_UNSIGNED (TREE_TYPE (op0)); |
1687 | bool unsigned1 = TYPE_UNSIGNED (TREE_TYPE (op1)); |
1688 | |
1689 | /* If one of the operands is a non-negative constant |
1690 | that fits in the target type, then the type of the |
1691 | other operand does not matter. */ |
1692 | if ((TREE_CODE (op0) == INTEGER_CST |
1693 | && int_fits_type_p (op0, c_common_signed_type (type)) |
1694 | && int_fits_type_p (op0, c_common_unsigned_type (type))) |
1695 | || (TREE_CODE (op1) == INTEGER_CST |
1696 | && int_fits_type_p (op1, c_common_signed_type (type)) |
1697 | && int_fits_type_p (op1, |
1698 | c_common_unsigned_type (type)))) |
1699 | return SAFE_CONVERSION; |
1700 | /* If constant is unsigned and fits in the target |
1701 | type, then the result will also fit. */ |
1702 | else if ((TREE_CODE (op0) == INTEGER_CST |
1703 | && unsigned0 |
1704 | && int_fits_type_p (op0, type)) |
1705 | || (TREE_CODE (op1) == INTEGER_CST |
1706 | && unsigned1 |
1707 | && int_fits_type_p (op1, type))) |
1708 | return SAFE_CONVERSION; |
1709 | } |
1710 | } |
1711 | /* Warn for integer types converted to smaller integer types. */ |
1712 | if (TYPE_PRECISION (type) < TYPE_PRECISION (expr_type)) |
1713 | give_warning = UNSAFE_OTHER; |
1714 | |
1715 | /* When they are the same width but different signedness, |
1716 | then the value may change. */ |
1717 | else if (((TYPE_PRECISION (type) == TYPE_PRECISION (expr_type) |
1718 | && TYPE_UNSIGNED (expr_type) != TYPE_UNSIGNED (type)) |
1719 | /* Even when converted to a bigger type, if the type is |
1720 | unsigned but expr is signed, then negative values |
1721 | will be changed. */ |
1722 | || (TYPE_UNSIGNED (type) && !TYPE_UNSIGNED (expr_type))) |
1723 | && check_sign) |
1724 | give_warning = UNSAFE_SIGN; |
1725 | } |
1726 | |
1727 | /* Warn for integer types converted to real types if and only if |
1728 | all the range of values of the integer type cannot be |
1729 | represented by the real type. */ |
1730 | else if ((TREE_CODE (expr_type) == INTEGER_TYPE |
1731 | || TREE_CODE (expr_type) == BITINT_TYPE) |
1732 | && SCALAR_FLOAT_TYPE_P (type)) |
1733 | { |
1734 | /* Don't warn about char y = 0xff; float x = (int) y; */ |
1735 | expr = get_unwidened (expr, 0); |
1736 | expr_type = TREE_TYPE (expr); |
1737 | |
1738 | if (!int_safely_convertible_to_real_p (from_type: expr_type, to_type: type)) |
1739 | give_warning = UNSAFE_OTHER; |
1740 | } |
1741 | |
1742 | /* Warn for real types converted to smaller real types. */ |
1743 | else if (SCALAR_FLOAT_TYPE_P (expr_type) |
1744 | && SCALAR_FLOAT_TYPE_P (type) |
1745 | && TYPE_PRECISION (type) < TYPE_PRECISION (expr_type)) |
1746 | give_warning = UNSAFE_REAL; |
1747 | |
1748 | /* Check conversion between two complex types. */ |
1749 | else if (TREE_CODE (expr_type) == COMPLEX_TYPE |
1750 | && TREE_CODE (type) == COMPLEX_TYPE) |
1751 | { |
1752 | /* Extract underlying types (i.e., type of real and imaginary |
1753 | parts) of expr_type and type. */ |
1754 | tree from_type = TREE_TYPE (expr_type); |
1755 | tree to_type = TREE_TYPE (type); |
1756 | |
1757 | /* Warn for real types converted to integer types. */ |
1758 | if (SCALAR_FLOAT_TYPE_P (from_type) |
1759 | && TREE_CODE (to_type) == INTEGER_TYPE) |
1760 | give_warning = UNSAFE_REAL; |
1761 | |
1762 | /* Warn for real types converted to smaller real types. */ |
1763 | else if (SCALAR_FLOAT_TYPE_P (from_type) |
1764 | && SCALAR_FLOAT_TYPE_P (to_type) |
1765 | && TYPE_PRECISION (to_type) < TYPE_PRECISION (from_type)) |
1766 | give_warning = UNSAFE_REAL; |
1767 | |
1768 | /* Check conversion for complex integer types. Here implementation |
1769 | is simpler than for real-domain integers because it does not |
1770 | involve sophisticated cases, such as bitmasks, casts, etc. */ |
1771 | else if (TREE_CODE (from_type) == INTEGER_TYPE |
1772 | && TREE_CODE (to_type) == INTEGER_TYPE) |
1773 | { |
1774 | /* Warn for integer types converted to smaller integer types. */ |
1775 | if (TYPE_PRECISION (to_type) < TYPE_PRECISION (from_type)) |
1776 | give_warning = UNSAFE_OTHER; |
1777 | |
1778 | /* Check for different signedness, see case for real-domain |
1779 | integers (above) for a more detailed comment. */ |
1780 | else if (((TYPE_PRECISION (to_type) == TYPE_PRECISION (from_type) |
1781 | && TYPE_UNSIGNED (to_type) != TYPE_UNSIGNED (from_type)) |
1782 | || (TYPE_UNSIGNED (to_type) && !TYPE_UNSIGNED (from_type))) |
1783 | && check_sign) |
1784 | give_warning = UNSAFE_SIGN; |
1785 | } |
1786 | else if (TREE_CODE (from_type) == INTEGER_TYPE |
1787 | && SCALAR_FLOAT_TYPE_P (to_type) |
1788 | && !int_safely_convertible_to_real_p (from_type, to_type)) |
1789 | give_warning = UNSAFE_OTHER; |
1790 | } |
1791 | |
1792 | /* Warn for complex types converted to real or integer types. */ |
1793 | else if (TREE_CODE (expr_type) == COMPLEX_TYPE |
1794 | && TREE_CODE (type) != COMPLEX_TYPE) |
1795 | give_warning = UNSAFE_IMAGINARY; |
1796 | } |
1797 | |
1798 | return give_warning; |
1799 | } |
1800 | |
1801 | |
1802 | /* Convert EXPR to TYPE, warning about conversion problems with constants. |
1803 | Invoke this function on every expression that is converted implicitly, |
1804 | i.e. because of language rules and not because of an explicit cast. |
1805 | INIT_CONST is true if the conversion is for arithmetic types for a static |
1806 | initializer and folding must apply accordingly (discarding floating-point |
1807 | exceptions and assuming the default rounding mode is in effect). */ |
1808 | |
1809 | tree |
1810 | convert_and_check (location_t loc, tree type, tree expr, bool init_const) |
1811 | { |
1812 | tree result; |
1813 | tree expr_for_warning; |
1814 | |
1815 | /* Convert from a value with possible excess precision rather than |
1816 | via the semantic type, but do not warn about values not fitting |
1817 | exactly in the semantic type. */ |
1818 | if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR) |
1819 | { |
1820 | tree orig_type = TREE_TYPE (expr); |
1821 | expr = TREE_OPERAND (expr, 0); |
1822 | expr_for_warning = (init_const |
1823 | ? convert_init (orig_type, expr) |
1824 | : convert (orig_type, expr)); |
1825 | if (orig_type == type) |
1826 | return expr_for_warning; |
1827 | } |
1828 | else |
1829 | expr_for_warning = expr; |
1830 | |
1831 | if (TREE_TYPE (expr) == type) |
1832 | return expr; |
1833 | |
1834 | result = init_const ? convert_init (type, expr) : convert (type, expr); |
1835 | |
1836 | if (c_inhibit_evaluation_warnings == 0 |
1837 | && !TREE_OVERFLOW_P (expr) |
1838 | && result != error_mark_node |
1839 | && !c_hardbool_type_attr (type)) |
1840 | warnings_for_convert_and_check (loc, type, expr_for_warning, result); |
1841 | |
1842 | return result; |
1843 | } |
1844 | |
1845 | /* A node in a list that describes references to variables (EXPR), which are |
1846 | either read accesses if WRITER is zero, or write accesses, in which case |
1847 | WRITER is the parent of EXPR. */ |
1848 | struct tlist |
1849 | { |
1850 | struct tlist *next; |
1851 | tree expr, writer; |
1852 | }; |
1853 | |
1854 | /* Used to implement a cache the results of a call to verify_tree. We only |
1855 | use this for SAVE_EXPRs. */ |
1856 | struct tlist_cache |
1857 | { |
1858 | struct tlist_cache *next; |
1859 | struct tlist *cache_before_sp; |
1860 | struct tlist *cache_after_sp; |
1861 | tree expr; |
1862 | }; |
1863 | |
1864 | /* Obstack to use when allocating tlist structures, and corresponding |
1865 | firstobj. */ |
1866 | static struct obstack tlist_obstack; |
1867 | static char *tlist_firstobj = 0; |
1868 | |
1869 | /* Keep track of the identifiers we've warned about, so we can avoid duplicate |
1870 | warnings. */ |
1871 | static struct tlist *warned_ids; |
1872 | /* SAVE_EXPRs need special treatment. We process them only once and then |
1873 | cache the results. */ |
1874 | static struct tlist_cache *save_expr_cache; |
1875 | |
1876 | static void add_tlist (struct tlist **, struct tlist *, tree, int); |
1877 | static void merge_tlist (struct tlist **, struct tlist *, int); |
1878 | static void verify_tree (tree, struct tlist **, struct tlist **, tree); |
1879 | static bool warning_candidate_p (tree); |
1880 | static bool candidate_equal_p (const_tree, const_tree); |
1881 | static void warn_for_collisions (struct tlist *); |
1882 | static void warn_for_collisions_1 (tree, tree, struct tlist *, int); |
1883 | static struct tlist *new_tlist (struct tlist *, tree, tree); |
1884 | |
1885 | /* Create a new struct tlist and fill in its fields. */ |
1886 | static struct tlist * |
1887 | new_tlist (struct tlist *next, tree t, tree writer) |
1888 | { |
1889 | struct tlist *l; |
1890 | l = XOBNEW (&tlist_obstack, struct tlist); |
1891 | l->next = next; |
1892 | l->expr = t; |
1893 | l->writer = writer; |
1894 | return l; |
1895 | } |
1896 | |
1897 | /* Add duplicates of the nodes found in ADD to the list *TO. If EXCLUDE_WRITER |
1898 | is nonnull, we ignore any node we find which has a writer equal to it. */ |
1899 | |
1900 | static void |
1901 | add_tlist (struct tlist **to, struct tlist *add, tree exclude_writer, int copy) |
1902 | { |
1903 | while (add) |
1904 | { |
1905 | struct tlist *next = add->next; |
1906 | if (!copy) |
1907 | add->next = *to; |
1908 | if (!exclude_writer || !candidate_equal_p (add->writer, exclude_writer)) |
1909 | *to = copy ? new_tlist (next: *to, t: add->expr, writer: add->writer) : add; |
1910 | add = next; |
1911 | } |
1912 | } |
1913 | |
1914 | /* Merge the nodes of ADD into TO. This merging process is done so that for |
1915 | each variable that already exists in TO, no new node is added; however if |
1916 | there is a write access recorded in ADD, and an occurrence on TO is only |
1917 | a read access, then the occurrence in TO will be modified to record the |
1918 | write. */ |
1919 | |
1920 | static void |
1921 | merge_tlist (struct tlist **to, struct tlist *add, int copy) |
1922 | { |
1923 | struct tlist **end = to; |
1924 | |
1925 | while (*end) |
1926 | end = &(*end)->next; |
1927 | |
1928 | while (add) |
1929 | { |
1930 | int found = 0; |
1931 | struct tlist *tmp2; |
1932 | struct tlist *next = add->next; |
1933 | |
1934 | for (tmp2 = *to; tmp2; tmp2 = tmp2->next) |
1935 | if (candidate_equal_p (tmp2->expr, add->expr)) |
1936 | { |
1937 | found = 1; |
1938 | if (!tmp2->writer) |
1939 | tmp2->writer = add->writer; |
1940 | } |
1941 | if (!found) |
1942 | { |
1943 | *end = copy ? new_tlist (NULL, t: add->expr, writer: add->writer) : add; |
1944 | end = &(*end)->next; |
1945 | *end = 0; |
1946 | } |
1947 | add = next; |
1948 | } |
1949 | } |
1950 | |
1951 | /* WRITTEN is a variable, WRITER is its parent. Warn if any of the variable |
1952 | references in list LIST conflict with it, excluding reads if ONLY writers |
1953 | is nonzero. */ |
1954 | |
1955 | static void |
1956 | warn_for_collisions_1 (tree written, tree writer, struct tlist *list, |
1957 | int only_writes) |
1958 | { |
1959 | struct tlist *tmp; |
1960 | |
1961 | /* Avoid duplicate warnings. */ |
1962 | for (tmp = warned_ids; tmp; tmp = tmp->next) |
1963 | if (candidate_equal_p (tmp->expr, written)) |
1964 | return; |
1965 | |
1966 | while (list) |
1967 | { |
1968 | if (candidate_equal_p (list->expr, written) |
1969 | && !candidate_equal_p (list->writer, writer) |
1970 | && (!only_writes || list->writer)) |
1971 | { |
1972 | warned_ids = new_tlist (next: warned_ids, t: written, NULL_TREE); |
1973 | warning_at (EXPR_LOC_OR_LOC (writer, input_location), |
1974 | OPT_Wsequence_point, "operation on %qE may be undefined", |
1975 | list->expr); |
1976 | } |
1977 | list = list->next; |
1978 | } |
1979 | } |
1980 | |
1981 | /* Given a list LIST of references to variables, find whether any of these |
1982 | can cause conflicts due to missing sequence points. */ |
1983 | |
1984 | static void |
1985 | warn_for_collisions (struct tlist *list) |
1986 | { |
1987 | struct tlist *tmp; |
1988 | |
1989 | for (tmp = list; tmp; tmp = tmp->next) |
1990 | { |
1991 | if (tmp->writer) |
1992 | warn_for_collisions_1 (written: tmp->expr, writer: tmp->writer, list, only_writes: 0); |
1993 | } |
1994 | } |
1995 | |
1996 | /* Return nonzero if X is a tree that can be verified by the sequence point |
1997 | warnings. */ |
1998 | |
1999 | static bool |
2000 | warning_candidate_p (tree x) |
2001 | { |
2002 | if (DECL_P (x) && DECL_ARTIFICIAL (x)) |
2003 | return false; |
2004 | |
2005 | if (TREE_CODE (x) == BLOCK) |
2006 | return false; |
2007 | |
2008 | /* VOID_TYPE_P (TREE_TYPE (x)) is workaround for cp/tree.cc |
2009 | (lvalue_p) crash on TRY/CATCH. */ |
2010 | if (TREE_TYPE (x) == NULL_TREE || VOID_TYPE_P (TREE_TYPE (x))) |
2011 | return false; |
2012 | |
2013 | if (!lvalue_p (x)) |
2014 | return false; |
2015 | |
2016 | /* No point to track non-const calls, they will never satisfy |
2017 | operand_equal_p. */ |
2018 | if (TREE_CODE (x) == CALL_EXPR && (call_expr_flags (x) & ECF_CONST) == 0) |
2019 | return false; |
2020 | |
2021 | if (TREE_CODE (x) == STRING_CST) |
2022 | return false; |
2023 | |
2024 | return true; |
2025 | } |
2026 | |
2027 | /* Return nonzero if X and Y appear to be the same candidate (or NULL) */ |
2028 | static bool |
2029 | candidate_equal_p (const_tree x, const_tree y) |
2030 | { |
2031 | return (x == y) || (x && y && operand_equal_p (x, y, flags: 0)); |
2032 | } |
2033 | |
2034 | /* Walk the tree X, and record accesses to variables. If X is written by the |
2035 | parent tree, WRITER is the parent. |
2036 | We store accesses in one of the two lists: PBEFORE_SP, and PNO_SP. If this |
2037 | expression or its only operand forces a sequence point, then everything up |
2038 | to the sequence point is stored in PBEFORE_SP. Everything else gets stored |
2039 | in PNO_SP. |
2040 | Once we return, we will have emitted warnings if any subexpression before |
2041 | such a sequence point could be undefined. On a higher level, however, the |
2042 | sequence point may not be relevant, and we'll merge the two lists. |
2043 | |
2044 | Example: (b++, a) + b; |
2045 | The call that processes the COMPOUND_EXPR will store the increment of B |
2046 | in PBEFORE_SP, and the use of A in PNO_SP. The higher-level call that |
2047 | processes the PLUS_EXPR will need to merge the two lists so that |
2048 | eventually, all accesses end up on the same list (and we'll warn about the |
2049 | unordered subexpressions b++ and b. |
2050 | |
2051 | A note on merging. If we modify the former example so that our expression |
2052 | becomes |
2053 | (b++, b) + a |
2054 | care must be taken not simply to add all three expressions into the final |
2055 | PNO_SP list. The function merge_tlist takes care of that by merging the |
2056 | before-SP list of the COMPOUND_EXPR into its after-SP list in a special |
2057 | way, so that no more than one access to B is recorded. */ |
2058 | |
2059 | static void |
2060 | verify_tree (tree x, struct tlist **pbefore_sp, struct tlist **pno_sp, |
2061 | tree writer) |
2062 | { |
2063 | struct tlist *tmp_before, *tmp_nosp, *tmp_list2, *tmp_list3; |
2064 | enum tree_code code; |
2065 | enum tree_code_class cl; |
2066 | |
2067 | restart: |
2068 | /* X may be NULL if it is the operand of an empty statement expression |
2069 | ({ }). */ |
2070 | if (x == NULL) |
2071 | return; |
2072 | |
2073 | code = TREE_CODE (x); |
2074 | cl = TREE_CODE_CLASS (code); |
2075 | |
2076 | if (warning_candidate_p (x)) |
2077 | *pno_sp = new_tlist (next: *pno_sp, t: x, writer); |
2078 | |
2079 | switch (code) |
2080 | { |
2081 | case CONSTRUCTOR: |
2082 | case SIZEOF_EXPR: |
2083 | case PAREN_SIZEOF_EXPR: |
2084 | return; |
2085 | |
2086 | case COMPOUND_EXPR: |
2087 | case TRUTH_ANDIF_EXPR: |
2088 | case TRUTH_ORIF_EXPR: |
2089 | sequenced_binary: |
2090 | tmp_before = tmp_nosp = tmp_list2 = tmp_list3 = 0; |
2091 | verify_tree (TREE_OPERAND (x, 0), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, NULL_TREE); |
2092 | warn_for_collisions (list: tmp_nosp); |
2093 | merge_tlist (to: pbefore_sp, add: tmp_before, copy: 0); |
2094 | merge_tlist (to: pbefore_sp, add: tmp_nosp, copy: 0); |
2095 | verify_tree (TREE_OPERAND (x, 1), pbefore_sp: &tmp_list3, pno_sp: &tmp_list2, NULL_TREE); |
2096 | warn_for_collisions (list: tmp_list2); |
2097 | merge_tlist (to: pbefore_sp, add: tmp_list3, copy: 0); |
2098 | merge_tlist (to: pno_sp, add: tmp_list2, copy: 0); |
2099 | return; |
2100 | |
2101 | case COND_EXPR: |
2102 | tmp_before = tmp_list2 = 0; |
2103 | verify_tree (TREE_OPERAND (x, 0), pbefore_sp: &tmp_before, pno_sp: &tmp_list2, NULL_TREE); |
2104 | warn_for_collisions (list: tmp_list2); |
2105 | merge_tlist (to: pbefore_sp, add: tmp_before, copy: 0); |
2106 | merge_tlist (to: pbefore_sp, add: tmp_list2, copy: 0); |
2107 | |
2108 | tmp_list3 = tmp_nosp = 0; |
2109 | verify_tree (TREE_OPERAND (x, 1), pbefore_sp: &tmp_list3, pno_sp: &tmp_nosp, NULL_TREE); |
2110 | warn_for_collisions (list: tmp_nosp); |
2111 | merge_tlist (to: pbefore_sp, add: tmp_list3, copy: 0); |
2112 | |
2113 | tmp_list3 = tmp_list2 = 0; |
2114 | verify_tree (TREE_OPERAND (x, 2), pbefore_sp: &tmp_list3, pno_sp: &tmp_list2, NULL_TREE); |
2115 | warn_for_collisions (list: tmp_list2); |
2116 | merge_tlist (to: pbefore_sp, add: tmp_list3, copy: 0); |
2117 | /* Rather than add both tmp_nosp and tmp_list2, we have to merge the |
2118 | two first, to avoid warning for (a ? b++ : b++). */ |
2119 | merge_tlist (to: &tmp_nosp, add: tmp_list2, copy: 0); |
2120 | add_tlist (to: pno_sp, add: tmp_nosp, NULL_TREE, copy: 0); |
2121 | return; |
2122 | |
2123 | case PREDECREMENT_EXPR: |
2124 | case PREINCREMENT_EXPR: |
2125 | case POSTDECREMENT_EXPR: |
2126 | case POSTINCREMENT_EXPR: |
2127 | verify_tree (TREE_OPERAND (x, 0), pbefore_sp: pno_sp, pno_sp, writer: x); |
2128 | return; |
2129 | |
2130 | case MODIFY_EXPR: |
2131 | tmp_before = tmp_nosp = tmp_list3 = 0; |
2132 | verify_tree (TREE_OPERAND (x, 1), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, NULL_TREE); |
2133 | verify_tree (TREE_OPERAND (x, 0), pbefore_sp: &tmp_list3, pno_sp: &tmp_list3, writer: x); |
2134 | /* Expressions inside the LHS are not ordered wrt. the sequence points |
2135 | in the RHS. Example: |
2136 | *a = (a++, 2) |
2137 | Despite the fact that the modification of "a" is in the before_sp |
2138 | list (tmp_before), it conflicts with the use of "a" in the LHS. |
2139 | We can handle this by adding the contents of tmp_list3 |
2140 | to those of tmp_before, and redoing the collision warnings for that |
2141 | list. */ |
2142 | add_tlist (to: &tmp_before, add: tmp_list3, exclude_writer: x, copy: 1); |
2143 | warn_for_collisions (list: tmp_before); |
2144 | /* Exclude the LHS itself here; we first have to merge it into the |
2145 | tmp_nosp list. This is done to avoid warning for "a = a"; if we |
2146 | didn't exclude the LHS, we'd get it twice, once as a read and once |
2147 | as a write. */ |
2148 | add_tlist (to: pno_sp, add: tmp_list3, exclude_writer: x, copy: 0); |
2149 | warn_for_collisions_1 (TREE_OPERAND (x, 0), writer: x, list: tmp_nosp, only_writes: 1); |
2150 | |
2151 | merge_tlist (to: pbefore_sp, add: tmp_before, copy: 0); |
2152 | if (warning_candidate_p (TREE_OPERAND (x, 0))) |
2153 | merge_tlist (to: &tmp_nosp, add: new_tlist (NULL, TREE_OPERAND (x, 0), writer: x), copy: 0); |
2154 | add_tlist (to: pno_sp, add: tmp_nosp, NULL_TREE, copy: 1); |
2155 | return; |
2156 | |
2157 | case CALL_EXPR: |
2158 | /* We need to warn about conflicts among arguments and conflicts between |
2159 | args and the function address. Side effects of the function address, |
2160 | however, are not ordered by the sequence point of the call. */ |
2161 | { |
2162 | call_expr_arg_iterator iter; |
2163 | tree arg; |
2164 | tmp_before = tmp_nosp = 0; |
2165 | verify_tree (CALL_EXPR_FN (x), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, NULL_TREE); |
2166 | FOR_EACH_CALL_EXPR_ARG (arg, iter, x) |
2167 | { |
2168 | tmp_list2 = tmp_list3 = 0; |
2169 | verify_tree (x: arg, pbefore_sp: &tmp_list2, pno_sp: &tmp_list3, NULL_TREE); |
2170 | merge_tlist (to: &tmp_list3, add: tmp_list2, copy: 0); |
2171 | add_tlist (to: &tmp_before, add: tmp_list3, NULL_TREE, copy: 0); |
2172 | } |
2173 | add_tlist (to: &tmp_before, add: tmp_nosp, NULL_TREE, copy: 0); |
2174 | warn_for_collisions (list: tmp_before); |
2175 | add_tlist (to: pbefore_sp, add: tmp_before, NULL_TREE, copy: 0); |
2176 | return; |
2177 | } |
2178 | |
2179 | case TREE_LIST: |
2180 | /* Scan all the list, e.g. indices of multi dimensional array. */ |
2181 | while (x) |
2182 | { |
2183 | tmp_before = tmp_nosp = 0; |
2184 | verify_tree (TREE_VALUE (x), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, NULL_TREE); |
2185 | merge_tlist (to: &tmp_nosp, add: tmp_before, copy: 0); |
2186 | add_tlist (to: pno_sp, add: tmp_nosp, NULL_TREE, copy: 0); |
2187 | x = TREE_CHAIN (x); |
2188 | } |
2189 | return; |
2190 | |
2191 | case SAVE_EXPR: |
2192 | { |
2193 | struct tlist_cache *t; |
2194 | for (t = save_expr_cache; t; t = t->next) |
2195 | if (candidate_equal_p (x: t->expr, y: x)) |
2196 | break; |
2197 | |
2198 | if (!t) |
2199 | { |
2200 | t = XOBNEW (&tlist_obstack, struct tlist_cache); |
2201 | t->next = save_expr_cache; |
2202 | t->expr = x; |
2203 | save_expr_cache = t; |
2204 | |
2205 | tmp_before = tmp_nosp = 0; |
2206 | verify_tree (TREE_OPERAND (x, 0), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, NULL_TREE); |
2207 | warn_for_collisions (list: tmp_nosp); |
2208 | |
2209 | tmp_list3 = 0; |
2210 | merge_tlist (to: &tmp_list3, add: tmp_nosp, copy: 0); |
2211 | t->cache_before_sp = tmp_before; |
2212 | t->cache_after_sp = tmp_list3; |
2213 | } |
2214 | merge_tlist (to: pbefore_sp, add: t->cache_before_sp, copy: 1); |
2215 | add_tlist (to: pno_sp, add: t->cache_after_sp, NULL_TREE, copy: 1); |
2216 | return; |
2217 | } |
2218 | |
2219 | case ADDR_EXPR: |
2220 | x = TREE_OPERAND (x, 0); |
2221 | if (DECL_P (x)) |
2222 | return; |
2223 | writer = 0; |
2224 | goto restart; |
2225 | |
2226 | case VIEW_CONVERT_EXPR: |
2227 | if (location_wrapper_p (exp: x)) |
2228 | { |
2229 | x = TREE_OPERAND (x, 0); |
2230 | goto restart; |
2231 | } |
2232 | goto do_default; |
2233 | |
2234 | case LSHIFT_EXPR: |
2235 | case RSHIFT_EXPR: |
2236 | case ARRAY_REF: |
2237 | if (cxx_dialect >= cxx17) |
2238 | goto sequenced_binary; |
2239 | goto do_default; |
2240 | |
2241 | case COMPONENT_REF: |
2242 | /* Treat as unary, the other operands aren't evaluated. */ |
2243 | x = TREE_OPERAND (x, 0); |
2244 | writer = 0; |
2245 | goto restart; |
2246 | |
2247 | default: |
2248 | do_default: |
2249 | /* For other expressions, simply recurse on their operands. |
2250 | Manual tail recursion for unary expressions. |
2251 | Other non-expressions need not be processed. */ |
2252 | if (cl == tcc_unary) |
2253 | { |
2254 | x = TREE_OPERAND (x, 0); |
2255 | writer = 0; |
2256 | goto restart; |
2257 | } |
2258 | else if (IS_EXPR_CODE_CLASS (cl)) |
2259 | { |
2260 | int lp; |
2261 | int max = TREE_OPERAND_LENGTH (x); |
2262 | for (lp = 0; lp < max; lp++) |
2263 | { |
2264 | tmp_before = tmp_nosp = 0; |
2265 | verify_tree (TREE_OPERAND (x, lp), pbefore_sp: &tmp_before, pno_sp: &tmp_nosp, writer: 0); |
2266 | merge_tlist (to: &tmp_nosp, add: tmp_before, copy: 0); |
2267 | add_tlist (to: pno_sp, add: tmp_nosp, NULL_TREE, copy: 0); |
2268 | } |
2269 | } |
2270 | return; |
2271 | } |
2272 | } |
2273 | |
2274 | static constexpr size_t verify_sequence_points_limit = 1024; |
2275 | |
2276 | /* Called from verify_sequence_points via walk_tree. */ |
2277 | |
2278 | static tree |
2279 | verify_tree_lim_r (tree *tp, int *walk_subtrees, void *data) |
2280 | { |
2281 | if (++*((size_t *) data) > verify_sequence_points_limit) |
2282 | return integer_zero_node; |
2283 | |
2284 | if (TYPE_P (*tp)) |
2285 | *walk_subtrees = 0; |
2286 | |
2287 | return NULL_TREE; |
2288 | } |
2289 | |
2290 | /* Try to warn for undefined behavior in EXPR due to missing sequence |
2291 | points. */ |
2292 | |
2293 | void |
2294 | verify_sequence_points (tree expr) |
2295 | { |
2296 | tlist *before_sp = nullptr, *after_sp = nullptr; |
2297 | |
2298 | /* verify_tree is highly recursive, and merge_tlist is O(n^2), |
2299 | so we return early if the expression is too big. */ |
2300 | size_t n = 0; |
2301 | if (walk_tree (&expr, verify_tree_lim_r, &n, nullptr)) |
2302 | return; |
2303 | |
2304 | warned_ids = nullptr; |
2305 | save_expr_cache = nullptr; |
2306 | if (!tlist_firstobj) |
2307 | { |
2308 | gcc_obstack_init (&tlist_obstack); |
2309 | tlist_firstobj = (char *) obstack_alloc (&tlist_obstack, 0); |
2310 | } |
2311 | |
2312 | verify_tree (x: expr, pbefore_sp: &before_sp, pno_sp: &after_sp, NULL_TREE); |
2313 | warn_for_collisions (list: after_sp); |
2314 | obstack_free (&tlist_obstack, tlist_firstobj); |
2315 | } |
2316 | |
2317 | /* Validate the expression after `case' and apply default promotions. */ |
2318 | |
2319 | static tree |
2320 | check_case_value (location_t loc, tree value) |
2321 | { |
2322 | if (value == NULL_TREE) |
2323 | return value; |
2324 | |
2325 | if (INTEGRAL_TYPE_P (TREE_TYPE (value)) |
2326 | && TREE_CODE (value) == INTEGER_CST) |
2327 | /* Promote char or short to int. */ |
2328 | value = perform_integral_promotions (value); |
2329 | else if (value != error_mark_node) |
2330 | { |
2331 | error_at (loc, "case label does not reduce to an integer constant"); |
2332 | value = error_mark_node; |
2333 | } |
2334 | |
2335 | constant_expression_warning (value); |
2336 | |
2337 | return value; |
2338 | } |
2339 | |
2340 | /* Return an integer type with BITS bits of precision, |
2341 | that is unsigned if UNSIGNEDP is nonzero, otherwise signed. */ |
2342 | |
2343 | tree |
2344 | c_common_type_for_size (unsigned int bits, int unsignedp) |
2345 | { |
2346 | int i; |
2347 | |
2348 | if (bits == TYPE_PRECISION (integer_type_node)) |
2349 | return unsignedp ? unsigned_type_node : integer_type_node; |
2350 | |
2351 | if (bits == TYPE_PRECISION (signed_char_type_node)) |
2352 | return unsignedp ? unsigned_char_type_node : signed_char_type_node; |
2353 | |
2354 | if (bits == TYPE_PRECISION (short_integer_type_node)) |
2355 | return unsignedp ? short_unsigned_type_node : short_integer_type_node; |
2356 | |
2357 | if (bits == TYPE_PRECISION (long_integer_type_node)) |
2358 | return unsignedp ? long_unsigned_type_node : long_integer_type_node; |
2359 | |
2360 | if (bits == TYPE_PRECISION (long_long_integer_type_node)) |
2361 | return (unsignedp ? long_long_unsigned_type_node |
2362 | : long_long_integer_type_node); |
2363 | |
2364 | for (i = 0; i < NUM_INT_N_ENTS; i ++) |
2365 | if (int_n_enabled_p[i] |
2366 | && bits == int_n_data[i].bitsize) |
2367 | return (unsignedp ? int_n_trees[i].unsigned_type |
2368 | : int_n_trees[i].signed_type); |
2369 | |
2370 | if (bits == TYPE_PRECISION (widest_integer_literal_type_node)) |
2371 | return (unsignedp ? widest_unsigned_literal_type_node |
2372 | : widest_integer_literal_type_node); |
2373 | |
2374 | for (tree t = registered_builtin_types; t; t = TREE_CHAIN (t)) |
2375 | { |
2376 | tree type = TREE_VALUE (t); |
2377 | if (TREE_CODE (type) == INTEGER_TYPE |
2378 | && bits == TYPE_PRECISION (type) |
2379 | && !!unsignedp == !!TYPE_UNSIGNED (type)) |
2380 | return type; |
2381 | } |
2382 | |
2383 | if (bits <= TYPE_PRECISION (intQI_type_node)) |
2384 | return unsignedp ? unsigned_intQI_type_node : intQI_type_node; |
2385 | |
2386 | if (bits <= TYPE_PRECISION (intHI_type_node)) |
2387 | return unsignedp ? unsigned_intHI_type_node : intHI_type_node; |
2388 | |
2389 | if (bits <= TYPE_PRECISION (intSI_type_node)) |
2390 | return unsignedp ? unsigned_intSI_type_node : intSI_type_node; |
2391 | |
2392 | if (bits <= TYPE_PRECISION (intDI_type_node)) |
2393 | return unsignedp ? unsigned_intDI_type_node : intDI_type_node; |
2394 | |
2395 | if (bits <= TYPE_PRECISION (widest_integer_literal_type_node)) |
2396 | return (unsignedp ? widest_unsigned_literal_type_node |
2397 | : widest_integer_literal_type_node); |
2398 | |
2399 | return NULL_TREE; |
2400 | } |
2401 | |
2402 | /* Return a fixed-point type that has at least IBIT ibits and FBIT fbits |
2403 | that is unsigned if UNSIGNEDP is nonzero, otherwise signed; |
2404 | and saturating if SATP is nonzero, otherwise not saturating. */ |
2405 | |
2406 | tree |
2407 | c_common_fixed_point_type_for_size (unsigned int ibit, unsigned int fbit, |
2408 | int unsignedp, int satp) |
2409 | { |
2410 | enum mode_class mclass; |
2411 | if (ibit == 0) |
2412 | mclass = unsignedp ? MODE_UFRACT : MODE_FRACT; |
2413 | else |
2414 | mclass = unsignedp ? MODE_UACCUM : MODE_ACCUM; |
2415 | |
2416 | opt_scalar_mode opt_mode; |
2417 | scalar_mode mode; |
2418 | FOR_EACH_MODE_IN_CLASS (opt_mode, mclass) |
2419 | { |
2420 | mode = opt_mode.require (); |
2421 | if (GET_MODE_IBIT (mode) >= ibit && GET_MODE_FBIT (mode) >= fbit) |
2422 | break; |
2423 | } |
2424 | |
2425 | if (!opt_mode.exists (mode: &mode) || !targetm.scalar_mode_supported_p (mode)) |
2426 | { |
2427 | sorry ("GCC cannot support operators with integer types and " |
2428 | "fixed-point types that have too many integral and " |
2429 | "fractional bits together"); |
2430 | return NULL_TREE; |
2431 | } |
2432 | |
2433 | return c_common_type_for_mode (mode, satp); |
2434 | } |
2435 | |
2436 | /* Used for communication between c_common_type_for_mode and |
2437 | c_register_builtin_type. */ |
2438 | tree registered_builtin_types; |
2439 | |
2440 | /* Return a data type that has machine mode MODE. |
2441 | If the mode is an integer, |
2442 | then UNSIGNEDP selects between signed and unsigned types. |
2443 | If the mode is a fixed-point mode, |
2444 | then UNSIGNEDP selects between saturating and nonsaturating types. */ |
2445 | |
2446 | tree |
2447 | c_common_type_for_mode (machine_mode mode, int unsignedp) |
2448 | { |
2449 | tree t; |
2450 | int i; |
2451 | |
2452 | if (mode == TYPE_MODE (integer_type_node)) |
2453 | return unsignedp ? unsigned_type_node : integer_type_node; |
2454 | |
2455 | if (mode == TYPE_MODE (signed_char_type_node)) |
2456 | return unsignedp ? unsigned_char_type_node : signed_char_type_node; |
2457 | |
2458 | if (mode == TYPE_MODE (short_integer_type_node)) |
2459 | return unsignedp ? short_unsigned_type_node : short_integer_type_node; |
2460 | |
2461 | if (mode == TYPE_MODE (long_integer_type_node)) |
2462 | return unsignedp ? long_unsigned_type_node : long_integer_type_node; |
2463 | |
2464 | if (mode == TYPE_MODE (long_long_integer_type_node)) |
2465 | return unsignedp ? long_long_unsigned_type_node : long_long_integer_type_node; |
2466 | |
2467 | for (i = 0; i < NUM_INT_N_ENTS; i ++) |
2468 | if (int_n_enabled_p[i] |
2469 | && mode == int_n_data[i].m) |
2470 | return (unsignedp ? int_n_trees[i].unsigned_type |
2471 | : int_n_trees[i].signed_type); |
2472 | |
2473 | if (mode == QImode) |
2474 | return unsignedp ? unsigned_intQI_type_node : intQI_type_node; |
2475 | |
2476 | if (mode == HImode) |
2477 | return unsignedp ? unsigned_intHI_type_node : intHI_type_node; |
2478 | |
2479 | if (mode == SImode) |
2480 | return unsignedp ? unsigned_intSI_type_node : intSI_type_node; |
2481 | |
2482 | if (mode == DImode) |
2483 | return unsignedp ? unsigned_intDI_type_node : intDI_type_node; |
2484 | |
2485 | #if HOST_BITS_PER_WIDE_INT >= 64 |
2486 | if (mode == TYPE_MODE (intTI_type_node)) |
2487 | return unsignedp ? unsigned_intTI_type_node : intTI_type_node; |
2488 | #endif |
2489 | |
2490 | if (mode == TYPE_MODE (float_type_node)) |
2491 | return float_type_node; |
2492 | |
2493 | if (mode == TYPE_MODE (double_type_node)) |
2494 | return double_type_node; |
2495 | |
2496 | if (mode == TYPE_MODE (long_double_type_node)) |
2497 | return long_double_type_node; |
2498 | |
2499 | for (i = 0; i < NUM_FLOATN_NX_TYPES; i++) |
2500 | if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE |
2501 | && mode == TYPE_MODE (FLOATN_NX_TYPE_NODE (i))) |
2502 | return FLOATN_NX_TYPE_NODE (i); |
2503 | |
2504 | if (mode == TYPE_MODE (void_type_node)) |
2505 | return void_type_node; |
2506 | |
2507 | if (mode == TYPE_MODE (build_pointer_type (char_type_node)) |
2508 | || mode == TYPE_MODE (build_pointer_type (integer_type_node))) |
2509 | { |
2510 | unsigned int precision |
2511 | = GET_MODE_PRECISION (mode: as_a <scalar_int_mode> (m: mode)); |
2512 | return (unsignedp |
2513 | ? make_unsigned_type (precision) |
2514 | : make_signed_type (precision)); |
2515 | } |
2516 | |
2517 | if (COMPLEX_MODE_P (mode)) |
2518 | { |
2519 | machine_mode inner_mode; |
2520 | tree inner_type; |
2521 | |
2522 | if (mode == TYPE_MODE (complex_float_type_node)) |
2523 | return complex_float_type_node; |
2524 | if (mode == TYPE_MODE (complex_double_type_node)) |
2525 | return complex_double_type_node; |
2526 | if (mode == TYPE_MODE (complex_long_double_type_node)) |
2527 | return complex_long_double_type_node; |
2528 | |
2529 | for (i = 0; i < NUM_FLOATN_NX_TYPES; i++) |
2530 | if (COMPLEX_FLOATN_NX_TYPE_NODE (i) != NULL_TREE |
2531 | && mode == TYPE_MODE (COMPLEX_FLOATN_NX_TYPE_NODE (i))) |
2532 | return COMPLEX_FLOATN_NX_TYPE_NODE (i); |
2533 | |
2534 | if (mode == TYPE_MODE (complex_integer_type_node) && !unsignedp) |
2535 | return complex_integer_type_node; |
2536 | |
2537 | inner_mode = GET_MODE_INNER (mode); |
2538 | inner_type = c_common_type_for_mode (mode: inner_mode, unsignedp); |
2539 | if (inner_type != NULL_TREE) |
2540 | return build_complex_type (inner_type); |
2541 | } |
2542 | else if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL |
2543 | && valid_vector_subparts_p (subparts: GET_MODE_NUNITS (mode))) |
2544 | { |
2545 | unsigned int elem_bits = vector_element_size (GET_MODE_PRECISION (mode), |
2546 | GET_MODE_NUNITS (mode)); |
2547 | tree bool_type = build_nonstandard_boolean_type (elem_bits); |
2548 | return build_vector_type_for_mode (bool_type, mode); |
2549 | } |
2550 | else if (VECTOR_MODE_P (mode) |
2551 | && valid_vector_subparts_p (subparts: GET_MODE_NUNITS (mode))) |
2552 | { |
2553 | machine_mode inner_mode = GET_MODE_INNER (mode); |
2554 | tree inner_type = c_common_type_for_mode (mode: inner_mode, unsignedp); |
2555 | if (inner_type != NULL_TREE) |
2556 | return build_vector_type_for_mode (inner_type, mode); |
2557 | } |
2558 | |
2559 | if (dfloat32_type_node != NULL_TREE |
2560 | && mode == TYPE_MODE (dfloat32_type_node)) |
2561 | return dfloat32_type_node; |
2562 | if (dfloat64_type_node != NULL_TREE |
2563 | && mode == TYPE_MODE (dfloat64_type_node)) |
2564 | return dfloat64_type_node; |
2565 | if (dfloat128_type_node != NULL_TREE |
2566 | && mode == TYPE_MODE (dfloat128_type_node)) |
2567 | return dfloat128_type_node; |
2568 | |
2569 | if (ALL_SCALAR_FIXED_POINT_MODE_P (mode)) |
2570 | { |
2571 | if (mode == TYPE_MODE (short_fract_type_node)) |
2572 | return unsignedp ? sat_short_fract_type_node : short_fract_type_node; |
2573 | if (mode == TYPE_MODE (fract_type_node)) |
2574 | return unsignedp ? sat_fract_type_node : fract_type_node; |
2575 | if (mode == TYPE_MODE (long_fract_type_node)) |
2576 | return unsignedp ? sat_long_fract_type_node : long_fract_type_node; |
2577 | if (mode == TYPE_MODE (long_long_fract_type_node)) |
2578 | return unsignedp ? sat_long_long_fract_type_node |
2579 | : long_long_fract_type_node; |
2580 | |
2581 | if (mode == TYPE_MODE (unsigned_short_fract_type_node)) |
2582 | return unsignedp ? sat_unsigned_short_fract_type_node |
2583 | : unsigned_short_fract_type_node; |
2584 | if (mode == TYPE_MODE (unsigned_fract_type_node)) |
2585 | return unsignedp ? sat_unsigned_fract_type_node |
2586 | : unsigned_fract_type_node; |
2587 | if (mode == TYPE_MODE (unsigned_long_fract_type_node)) |
2588 | return unsignedp ? sat_unsigned_long_fract_type_node |
2589 | : unsigned_long_fract_type_node; |
2590 | if (mode == TYPE_MODE (unsigned_long_long_fract_type_node)) |
2591 | return unsignedp ? sat_unsigned_long_long_fract_type_node |
2592 | : unsigned_long_long_fract_type_node; |
2593 | |
2594 | if (mode == TYPE_MODE (short_accum_type_node)) |
2595 | return unsignedp ? sat_short_accum_type_node : short_accum_type_node; |
2596 | if (mode == TYPE_MODE (accum_type_node)) |
2597 | return unsignedp ? sat_accum_type_node : accum_type_node; |
2598 | if (mode == TYPE_MODE (long_accum_type_node)) |
2599 | return unsignedp ? sat_long_accum_type_node : long_accum_type_node; |
2600 | if (mode == TYPE_MODE (long_long_accum_type_node)) |
2601 | return unsignedp ? sat_long_long_accum_type_node |
2602 | : long_long_accum_type_node; |
2603 | |
2604 | if (mode == TYPE_MODE (unsigned_short_accum_type_node)) |
2605 | return unsignedp ? sat_unsigned_short_accum_type_node |
2606 | : unsigned_short_accum_type_node; |
2607 | if (mode == TYPE_MODE (unsigned_accum_type_node)) |
2608 | return unsignedp ? sat_unsigned_accum_type_node |
2609 | : unsigned_accum_type_node; |
2610 | if (mode == TYPE_MODE (unsigned_long_accum_type_node)) |
2611 | return unsignedp ? sat_unsigned_long_accum_type_node |
2612 | : unsigned_long_accum_type_node; |
2613 | if (mode == TYPE_MODE (unsigned_long_long_accum_type_node)) |
2614 | return unsignedp ? sat_unsigned_long_long_accum_type_node |
2615 | : unsigned_long_long_accum_type_node; |
2616 | |
2617 | if (mode == QQmode) |
2618 | return unsignedp ? sat_qq_type_node : qq_type_node; |
2619 | if (mode == HQmode) |
2620 | return unsignedp ? sat_hq_type_node : hq_type_node; |
2621 | if (mode == SQmode) |
2622 | return unsignedp ? sat_sq_type_node : sq_type_node; |
2623 | if (mode == DQmode) |
2624 | return unsignedp ? sat_dq_type_node : dq_type_node; |
2625 | if (mode == TQmode) |
2626 | return unsignedp ? sat_tq_type_node : tq_type_node; |
2627 | |
2628 | if (mode == UQQmode) |
2629 | return unsignedp ? sat_uqq_type_node : uqq_type_node; |
2630 | if (mode == UHQmode) |
2631 | return unsignedp ? sat_uhq_type_node : uhq_type_node; |
2632 | if (mode == USQmode) |
2633 | return unsignedp ? sat_usq_type_node : usq_type_node; |
2634 | if (mode == UDQmode) |
2635 | return unsignedp ? sat_udq_type_node : udq_type_node; |
2636 | if (mode == UTQmode) |
2637 | return unsignedp ? sat_utq_type_node : utq_type_node; |
2638 | |
2639 | if (mode == HAmode) |
2640 | return unsignedp ? sat_ha_type_node : ha_type_node; |
2641 | if (mode == SAmode) |
2642 | return unsignedp ? sat_sa_type_node : sa_type_node; |
2643 | if (mode == DAmode) |
2644 | return unsignedp ? sat_da_type_node : da_type_node; |
2645 | if (mode == TAmode) |
2646 | return unsignedp ? sat_ta_type_node : ta_type_node; |
2647 | |
2648 | if (mode == UHAmode) |
2649 | return unsignedp ? sat_uha_type_node : uha_type_node; |
2650 | if (mode == USAmode) |
2651 | return unsignedp ? sat_usa_type_node : usa_type_node; |
2652 | if (mode == UDAmode) |
2653 | return unsignedp ? sat_uda_type_node : uda_type_node; |
2654 | if (mode == UTAmode) |
2655 | return unsignedp ? sat_uta_type_node : uta_type_node; |
2656 | } |
2657 | |
2658 | for (t = registered_builtin_types; t; t = TREE_CHAIN (t)) |
2659 | { |
2660 | tree type = TREE_VALUE (t); |
2661 | if (TYPE_MODE (type) == mode |
2662 | && VECTOR_TYPE_P (type) == VECTOR_MODE_P (mode) |
2663 | && !!unsignedp == !!TYPE_UNSIGNED (type)) |
2664 | return type; |
2665 | } |
2666 | return NULL_TREE; |
2667 | } |
2668 | |
2669 | tree |
2670 | c_common_unsigned_type (tree type) |
2671 | { |
2672 | return c_common_signed_or_unsigned_type (1, type); |
2673 | } |
2674 | |
2675 | /* Return a signed type the same as TYPE in other respects. */ |
2676 | |
2677 | tree |
2678 | c_common_signed_type (tree type) |
2679 | { |
2680 | return c_common_signed_or_unsigned_type (0, type); |
2681 | } |
2682 | |
2683 | /* Return a type the same as TYPE except unsigned or |
2684 | signed according to UNSIGNEDP. */ |
2685 | |
2686 | tree |
2687 | c_common_signed_or_unsigned_type (int unsignedp, tree type) |
2688 | { |
2689 | tree type1; |
2690 | int i; |
2691 | |
2692 | /* This block of code emulates the behavior of the old |
2693 | c_common_unsigned_type. In particular, it returns |
2694 | long_unsigned_type_node if passed a long, even when a int would |
2695 | have the same size. This is necessary for warnings to work |
2696 | correctly in archs where sizeof(int) == sizeof(long) */ |
2697 | |
2698 | type1 = TYPE_MAIN_VARIANT (type); |
2699 | if (type1 == signed_char_type_node || type1 == char_type_node || type1 == unsigned_char_type_node) |
2700 | return unsignedp ? unsigned_char_type_node : signed_char_type_node; |
2701 | if (type1 == integer_type_node || type1 == unsigned_type_node) |
2702 | return unsignedp ? unsigned_type_node : integer_type_node; |
2703 | if (type1 == short_integer_type_node || type1 == short_unsigned_type_node) |
2704 | return unsignedp ? short_unsigned_type_node : short_integer_type_node; |
2705 | if (type1 == long_integer_type_node || type1 == long_unsigned_type_node) |
2706 | return unsignedp ? long_unsigned_type_node : long_integer_type_node; |
2707 | if (type1 == long_long_integer_type_node || type1 == long_long_unsigned_type_node) |
2708 | return unsignedp ? long_long_unsigned_type_node : long_long_integer_type_node; |
2709 | |
2710 | for (i = 0; i < NUM_INT_N_ENTS; i ++) |
2711 | if (int_n_enabled_p[i] |
2712 | && (type1 == int_n_trees[i].unsigned_type |
2713 | || type1 == int_n_trees[i].signed_type)) |
2714 | return (unsignedp ? int_n_trees[i].unsigned_type |
2715 | : int_n_trees[i].signed_type); |
2716 | |
2717 | #if HOST_BITS_PER_WIDE_INT >= 64 |
2718 | if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node) |
2719 | return unsignedp ? unsigned_intTI_type_node : intTI_type_node; |
2720 | #endif |
2721 | if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node) |
2722 | return unsignedp ? unsigned_intDI_type_node : intDI_type_node; |
2723 | if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node) |
2724 | return unsignedp ? unsigned_intSI_type_node : intSI_type_node; |
2725 | if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node) |
2726 | return unsignedp ? unsigned_intHI_type_node : intHI_type_node; |
2727 | if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node) |
2728 | return unsignedp ? unsigned_intQI_type_node : intQI_type_node; |
2729 | |
2730 | #define C_COMMON_FIXED_TYPES(NAME) \ |
2731 | if (type1 == short_ ## NAME ## _type_node \ |
2732 | || type1 == unsigned_short_ ## NAME ## _type_node) \ |
2733 | return unsignedp ? unsigned_short_ ## NAME ## _type_node \ |
2734 | : short_ ## NAME ## _type_node; \ |
2735 | if (type1 == NAME ## _type_node \ |
2736 | || type1 == unsigned_ ## NAME ## _type_node) \ |
2737 | return unsignedp ? unsigned_ ## NAME ## _type_node \ |
2738 | : NAME ## _type_node; \ |
2739 | if (type1 == long_ ## NAME ## _type_node \ |
2740 | || type1 == unsigned_long_ ## NAME ## _type_node) \ |
2741 | return unsignedp ? unsigned_long_ ## NAME ## _type_node \ |
2742 | : long_ ## NAME ## _type_node; \ |
2743 | if (type1 == long_long_ ## NAME ## _type_node \ |
2744 | || type1 == unsigned_long_long_ ## NAME ## _type_node) \ |
2745 | return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \ |
2746 | : long_long_ ## NAME ## _type_node; |
2747 | |
2748 | #define C_COMMON_FIXED_MODE_TYPES(NAME) \ |
2749 | if (type1 == NAME ## _type_node \ |
2750 | || type1 == u ## NAME ## _type_node) \ |
2751 | return unsignedp ? u ## NAME ## _type_node \ |
2752 | : NAME ## _type_node; |
2753 | |
2754 | #define C_COMMON_FIXED_TYPES_SAT(NAME) \ |
2755 | if (type1 == sat_ ## short_ ## NAME ## _type_node \ |
2756 | || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \ |
2757 | return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \ |
2758 | : sat_ ## short_ ## NAME ## _type_node; \ |
2759 | if (type1 == sat_ ## NAME ## _type_node \ |
2760 | || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \ |
2761 | return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \ |
2762 | : sat_ ## NAME ## _type_node; \ |
2763 | if (type1 == sat_ ## long_ ## NAME ## _type_node \ |
2764 | || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \ |
2765 | return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \ |
2766 | : sat_ ## long_ ## NAME ## _type_node; \ |
2767 | if (type1 == sat_ ## long_long_ ## NAME ## _type_node \ |
2768 | || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \ |
2769 | return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \ |
2770 | : sat_ ## long_long_ ## NAME ## _type_node; |
2771 | |
2772 | #define C_COMMON_FIXED_MODE_TYPES_SAT(NAME) \ |
2773 | if (type1 == sat_ ## NAME ## _type_node \ |
2774 | || type1 == sat_ ## u ## NAME ## _type_node) \ |
2775 | return unsignedp ? sat_ ## u ## NAME ## _type_node \ |
2776 | : sat_ ## NAME ## _type_node; |
2777 | |
2778 | C_COMMON_FIXED_TYPES (fract); |
2779 | C_COMMON_FIXED_TYPES_SAT (fract); |
2780 | C_COMMON_FIXED_TYPES (accum); |
2781 | C_COMMON_FIXED_TYPES_SAT (accum); |
2782 | |
2783 | C_COMMON_FIXED_MODE_TYPES (qq); |
2784 | C_COMMON_FIXED_MODE_TYPES (hq); |
2785 | C_COMMON_FIXED_MODE_TYPES (sq); |
2786 | C_COMMON_FIXED_MODE_TYPES (dq); |
2787 | C_COMMON_FIXED_MODE_TYPES (tq); |
2788 | C_COMMON_FIXED_MODE_TYPES_SAT (qq); |
2789 | C_COMMON_FIXED_MODE_TYPES_SAT (hq); |
2790 | C_COMMON_FIXED_MODE_TYPES_SAT (sq); |
2791 | C_COMMON_FIXED_MODE_TYPES_SAT (dq); |
2792 | C_COMMON_FIXED_MODE_TYPES_SAT (tq); |
2793 | C_COMMON_FIXED_MODE_TYPES (ha); |
2794 | C_COMMON_FIXED_MODE_TYPES (sa); |
2795 | C_COMMON_FIXED_MODE_TYPES (da); |
2796 | C_COMMON_FIXED_MODE_TYPES (ta); |
2797 | C_COMMON_FIXED_MODE_TYPES_SAT (ha); |
2798 | C_COMMON_FIXED_MODE_TYPES_SAT (sa); |
2799 | C_COMMON_FIXED_MODE_TYPES_SAT (da); |
2800 | C_COMMON_FIXED_MODE_TYPES_SAT (ta); |
2801 | |
2802 | /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not |
2803 | the precision; they have precision set to match their range, but |
2804 | may use a wider mode to match an ABI. If we change modes, we may |
2805 | wind up with bad conversions. For INTEGER_TYPEs in C, must check |
2806 | the precision as well, so as to yield correct results for |
2807 | bit-field types. C++ does not have these separate bit-field |
2808 | types, and producing a signed or unsigned variant of an |
2809 | ENUMERAL_TYPE may cause other problems as well. */ |
2810 | |
2811 | if (!INTEGRAL_TYPE_P (type) |
2812 | || TYPE_UNSIGNED (type) == unsignedp) |
2813 | return type; |
2814 | |
2815 | if (TREE_CODE (type) == BITINT_TYPE |
2816 | /* signed _BitInt(1) is invalid, avoid creating that. */ |
2817 | && (unsignedp || TYPE_PRECISION (type) > 1)) |
2818 | return build_bitint_type (TYPE_PRECISION (type), unsignedp); |
2819 | |
2820 | #define TYPE_OK(node) \ |
2821 | (TYPE_MODE (type) == TYPE_MODE (node) \ |
2822 | && TYPE_PRECISION (type) == TYPE_PRECISION (node)) |
2823 | if (TYPE_OK (signed_char_type_node)) |
2824 | return unsignedp ? unsigned_char_type_node : signed_char_type_node; |
2825 | if (TYPE_OK (integer_type_node)) |
2826 | return unsignedp ? unsigned_type_node : integer_type_node; |
2827 | if (TYPE_OK (short_integer_type_node)) |
2828 | return unsignedp ? short_unsigned_type_node : short_integer_type_node; |
2829 | if (TYPE_OK (long_integer_type_node)) |
2830 | return unsignedp ? long_unsigned_type_node : long_integer_type_node; |
2831 | if (TYPE_OK (long_long_integer_type_node)) |
2832 | return (unsignedp ? long_long_unsigned_type_node |
2833 | : long_long_integer_type_node); |
2834 | |
2835 | for (i = 0; i < NUM_INT_N_ENTS; i ++) |
2836 | if (int_n_enabled_p[i] |
2837 | && TYPE_MODE (type) == int_n_data[i].m |
2838 | && TYPE_PRECISION (type) == int_n_data[i].bitsize) |
2839 | return (unsignedp ? int_n_trees[i].unsigned_type |
2840 | : int_n_trees[i].signed_type); |
2841 | |
2842 | #if HOST_BITS_PER_WIDE_INT >= 64 |
2843 | if (TYPE_OK (intTI_type_node)) |
2844 | return unsignedp ? unsigned_intTI_type_node : intTI_type_node; |
2845 | #endif |
2846 | if (TYPE_OK (intDI_type_node)) |
2847 | return unsignedp ? unsigned_intDI_type_node : intDI_type_node; |
2848 | if (TYPE_OK (intSI_type_node)) |
2849 | return unsignedp ? unsigned_intSI_type_node : intSI_type_node; |
2850 | if (TYPE_OK (intHI_type_node)) |
2851 | return unsignedp ? unsigned_intHI_type_node : intHI_type_node; |
2852 | if (TYPE_OK (intQI_type_node)) |
2853 | return unsignedp ? unsigned_intQI_type_node : intQI_type_node; |
2854 | #undef TYPE_OK |
2855 | |
2856 | return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp); |
2857 | } |
2858 | |
2859 | /* Build a bit-field integer type for the given WIDTH and UNSIGNEDP. */ |
2860 | |
2861 | tree |
2862 | c_build_bitfield_integer_type (unsigned HOST_WIDE_INT width, int unsignedp) |
2863 | { |
2864 | int i; |
2865 | |
2866 | /* Extended integer types of the same width as a standard type have |
2867 | lesser rank, so those of the same width as int promote to int or |
2868 | unsigned int and are valid for printf formats expecting int or |
2869 | unsigned int. To avoid such special cases, avoid creating |
2870 | extended integer types for bit-fields if a standard integer type |
2871 | is available. */ |
2872 | if (width == TYPE_PRECISION (integer_type_node)) |
2873 | return unsignedp ? unsigned_type_node : integer_type_node; |
2874 | if (width == TYPE_PRECISION (signed_char_type_node)) |
2875 | return unsignedp ? unsigned_char_type_node : signed_char_type_node; |
2876 | if (width == TYPE_PRECISION (short_integer_type_node)) |
2877 | return unsignedp ? short_unsigned_type_node : short_integer_type_node; |
2878 | if (width == TYPE_PRECISION (long_integer_type_node)) |
2879 | return unsignedp ? long_unsigned_type_node : long_integer_type_node; |
2880 | if (width == TYPE_PRECISION (long_long_integer_type_node)) |
2881 | return (unsignedp ? long_long_unsigned_type_node |
2882 | : long_long_integer_type_node); |
2883 | for (i = 0; i < NUM_INT_N_ENTS; i ++) |
2884 | if (int_n_enabled_p[i] |
2885 | && width == int_n_data[i].bitsize) |
2886 | return (unsignedp ? int_n_trees[i].unsigned_type |
2887 | : int_n_trees[i].signed_type); |
2888 | return build_nonstandard_integer_type (width, unsignedp); |
2889 | } |
2890 | |
2891 | /* The C version of the register_builtin_type langhook. */ |
2892 | |
2893 | void |
2894 | c_register_builtin_type (tree type, const char* name) |
2895 | { |
2896 | tree decl; |
2897 | |
2898 | decl = build_decl (UNKNOWN_LOCATION, |
2899 | TYPE_DECL, get_identifier (name), type); |
2900 | DECL_ARTIFICIAL (decl) = 1; |
2901 | if (!TYPE_NAME (type)) |
2902 | TYPE_NAME (type) = decl; |
2903 | lang_hooks.decls.pushdecl (decl); |
2904 | |
2905 | registered_builtin_types = tree_cons (0, type, registered_builtin_types); |
2906 | } |
2907 | |
2908 | /* Print an error message for invalid operands to arith operation |
2909 | CODE with TYPE0 for operand 0, and TYPE1 for operand 1. |
2910 | RICHLOC is a rich location for the message, containing either |
2911 | three separate locations for each of the operator and operands |
2912 | |
2913 | lhs op rhs |
2914 | ~~~ ^~ ~~~ |
2915 | |
2916 | (C FE), or one location ranging over all over them |
2917 | |
2918 | lhs op rhs |
2919 | ~~~~^~~~~~ |
2920 | |
2921 | (C++ FE). */ |
2922 | |
2923 | void |
2924 | binary_op_error (rich_location *richloc, enum tree_code code, |
2925 | tree type0, tree type1) |
2926 | { |
2927 | const char *opname; |
2928 | |
2929 | switch (code) |
2930 | { |
2931 | case PLUS_EXPR: |
2932 | opname = "+"; break; |
2933 | case MINUS_EXPR: |
2934 | opname = "-"; break; |
2935 | case MULT_EXPR: |
2936 | opname = "*"; break; |
2937 | case MAX_EXPR: |
2938 | opname = "max"; break; |
2939 | case MIN_EXPR: |
2940 | opname = "min"; break; |
2941 | case EQ_EXPR: |
2942 | opname = "=="; break; |
2943 | case NE_EXPR: |
2944 | opname = "!="; break; |
2945 | case LE_EXPR: |
2946 | opname = "<="; break; |
2947 | case GE_EXPR: |
2948 | opname = ">="; break; |
2949 | case LT_EXPR: |
2950 | opname = "<"; break; |
2951 | case GT_EXPR: |
2952 | opname = ">"; break; |
2953 | case LSHIFT_EXPR: |
2954 | opname = "<<"; break; |
2955 | case RSHIFT_EXPR: |
2956 | opname = ">>"; break; |
2957 | case TRUNC_MOD_EXPR: |
2958 | case FLOOR_MOD_EXPR: |
2959 | opname = "%"; break; |
2960 | case TRUNC_DIV_EXPR: |
2961 | case FLOOR_DIV_EXPR: |
2962 | opname = "/"; break; |
2963 | case BIT_AND_EXPR: |
2964 | opname = "&"; break; |
2965 | case BIT_IOR_EXPR: |
2966 | opname = "|"; break; |
2967 | case TRUTH_ANDIF_EXPR: |
2968 | opname = "&&"; break; |
2969 | case TRUTH_ORIF_EXPR: |
2970 | opname = "||"; break; |
2971 | case BIT_XOR_EXPR: |
2972 | opname = "^"; break; |
2973 | default: |
2974 | gcc_unreachable (); |
2975 | } |
2976 | pp_markup::element_quoted_type element_0 (type0, highlight_colors::lhs); |
2977 | pp_markup::element_quoted_type element_1 (type1, highlight_colors::rhs); |
2978 | error_at (richloc, |
2979 | "invalid operands to binary %s (have %e and %e)", |
2980 | opname, &element_0, &element_1); |
2981 | } |
2982 | |
2983 | /* Given an expression as a tree, return its original type. Do this |
2984 | by stripping any conversion that preserves the sign and precision. */ |
2985 | static tree |
2986 | expr_original_type (tree expr) |
2987 | { |
2988 | STRIP_SIGN_NOPS (expr); |
2989 | return TREE_TYPE (expr); |
2990 | } |
2991 | |
2992 | /* Subroutine of build_binary_op, used for comparison operations. |
2993 | See if the operands have both been converted from subword integer types |
2994 | and, if so, perhaps change them both back to their original type. |
2995 | This function is also responsible for converting the two operands |
2996 | to the proper common type for comparison. |
2997 | |
2998 | The arguments of this function are all pointers to local variables |
2999 | of build_binary_op: OP0_PTR is &OP0, OP1_PTR is &OP1, |
3000 | RESTYPE_PTR is &RESULT_TYPE and RESCODE_PTR is &RESULTCODE. |
3001 | |
3002 | LOC is the location of the comparison. |
3003 | |
3004 | If this function returns non-NULL_TREE, it means that the comparison has |
3005 | a constant value. What this function returns is an expression for |
3006 | that value. */ |
3007 | |
3008 | tree |
3009 | shorten_compare (location_t loc, tree *op0_ptr, tree *op1_ptr, |
3010 | tree *restype_ptr, enum tree_code *rescode_ptr) |
3011 | { |
3012 | tree type; |
3013 | tree op0 = *op0_ptr; |
3014 | tree op1 = *op1_ptr; |
3015 | int unsignedp0, unsignedp1; |
3016 | int real1, real2; |
3017 | tree primop0, primop1; |
3018 | enum tree_code code = *rescode_ptr; |
3019 | |
3020 | /* Throw away any conversions to wider types |
3021 | already present in the operands. */ |
3022 | |
3023 | primop0 = c_common_get_narrower (op: op0, unsignedp_ptr: &unsignedp0); |
3024 | primop1 = c_common_get_narrower (op: op1, unsignedp_ptr: &unsignedp1); |
3025 | |
3026 | /* If primopN is first sign-extended from primopN's precision to opN's |
3027 | precision, then zero-extended from opN's precision to |
3028 | *restype_ptr precision, shortenings might be invalid. */ |
3029 | if (TYPE_PRECISION (TREE_TYPE (primop0)) < TYPE_PRECISION (TREE_TYPE (op0)) |
3030 | && TYPE_PRECISION (TREE_TYPE (op0)) < TYPE_PRECISION (*restype_ptr) |
3031 | && !unsignedp0 |
3032 | && TYPE_UNSIGNED (TREE_TYPE (op0))) |
3033 | primop0 = op0; |
3034 | if (TYPE_PRECISION (TREE_TYPE (primop1)) < TYPE_PRECISION (TREE_TYPE (op1)) |
3035 | && TYPE_PRECISION (TREE_TYPE (op1)) < TYPE_PRECISION (*restype_ptr) |
3036 | && !unsignedp1 |
3037 | && TYPE_UNSIGNED (TREE_TYPE (op1))) |
3038 | primop1 = op1; |
3039 | |
3040 | /* Handle the case that OP0 does not *contain* a conversion |
3041 | but it *requires* conversion to FINAL_TYPE. */ |
3042 | |
3043 | if (op0 == primop0 && TREE_TYPE (op0) != *restype_ptr) |
3044 | unsignedp0 = TYPE_UNSIGNED (TREE_TYPE (op0)); |
3045 | if (op1 == primop1 && TREE_TYPE (op1) != *restype_ptr) |
3046 | unsignedp1 = TYPE_UNSIGNED (TREE_TYPE (op1)); |
3047 | |
3048 | /* If one of the operands must be floated, we cannot optimize. */ |
3049 | real1 = SCALAR_FLOAT_TYPE_P (TREE_TYPE (primop0)); |
3050 | real2 = SCALAR_FLOAT_TYPE_P (TREE_TYPE (primop1)); |
3051 | |
3052 | /* If first arg is constant, swap the args (changing operation |
3053 | so value is preserved), for canonicalization. Don't do this if |
3054 | the second arg is 0. */ |
3055 | |
3056 | if (TREE_CONSTANT (primop0) |
3057 | && !integer_zerop (primop1) && !real_zerop (primop1) |
3058 | && !fixed_zerop (primop1)) |
3059 | { |
3060 | std::swap (a&: primop0, b&: primop1); |
3061 | std::swap (a&: op0, b&: op1); |
3062 | *op0_ptr = op0; |
3063 | *op1_ptr = op1; |
3064 | std::swap (a&: unsignedp0, b&: unsignedp1); |
3065 | std::swap (a&: real1, b&: real2); |
3066 | |
3067 | switch (code) |
3068 | { |
3069 | case LT_EXPR: |
3070 | code = GT_EXPR; |
3071 | break; |
3072 | case GT_EXPR: |
3073 | code = LT_EXPR; |
3074 | break; |
3075 | case LE_EXPR: |
3076 | code = GE_EXPR; |
3077 | break; |
3078 | case GE_EXPR: |
3079 | code = LE_EXPR; |
3080 | break; |
3081 | default: |
3082 | break; |
3083 | } |
3084 | *rescode_ptr = code; |
3085 | } |
3086 | |
3087 | /* If comparing an integer against a constant more bits wide, |
3088 | maybe we can deduce a value of 1 or 0 independent of the data. |
3089 | Or else truncate the constant now |
3090 | rather than extend the variable at run time. |
3091 | |
3092 | This is only interesting if the constant is the wider arg. |
3093 | Also, it is not safe if the constant is unsigned and the |
3094 | variable arg is signed, since in this case the variable |
3095 | would be sign-extended and then regarded as unsigned. |
3096 | Our technique fails in this case because the lowest/highest |
3097 | possible unsigned results don't follow naturally from the |
3098 | lowest/highest possible values of the variable operand. |
3099 | For just EQ_EXPR and NE_EXPR there is another technique that |
3100 | could be used: see if the constant can be faithfully represented |
3101 | in the other operand's type, by truncating it and reextending it |
3102 | and see if that preserves the constant's value. */ |
3103 | |
3104 | if (!real1 && !real2 |
3105 | && TREE_CODE (TREE_TYPE (primop0)) != FIXED_POINT_TYPE |
3106 | && TREE_CODE (primop1) == INTEGER_CST |
3107 | && TYPE_PRECISION (TREE_TYPE (primop0)) < TYPE_PRECISION (*restype_ptr)) |
3108 | { |
3109 | int min_gt, max_gt, min_lt, max_lt; |
3110 | tree maxval, minval; |
3111 | /* 1 if comparison is nominally unsigned. */ |
3112 | int unsignedp = TYPE_UNSIGNED (*restype_ptr); |
3113 | tree val; |
3114 | |
3115 | type = c_common_signed_or_unsigned_type (unsignedp: unsignedp0, |
3116 | TREE_TYPE (primop0)); |
3117 | |
3118 | maxval = TYPE_MAX_VALUE (type); |
3119 | minval = TYPE_MIN_VALUE (type); |
3120 | |
3121 | if (unsignedp && !unsignedp0) |
3122 | *restype_ptr = c_common_signed_type (type: *restype_ptr); |
3123 | |
3124 | if (TREE_TYPE (primop1) != *restype_ptr) |
3125 | { |
3126 | /* Convert primop1 to target type, but do not introduce |
3127 | additional overflow. We know primop1 is an int_cst. */ |
3128 | primop1 = force_fit_type (*restype_ptr, |
3129 | wi::to_wide |
3130 | (t: primop1, |
3131 | TYPE_PRECISION (*restype_ptr)), |
3132 | 0, TREE_OVERFLOW (primop1)); |
3133 | } |
3134 | if (type != *restype_ptr) |
3135 | { |
3136 | minval = convert (*restype_ptr, minval); |
3137 | maxval = convert (*restype_ptr, maxval); |
3138 | } |
3139 | |
3140 | min_gt = tree_int_cst_lt (t1: primop1, t2: minval); |
3141 | max_gt = tree_int_cst_lt (t1: primop1, t2: maxval); |
3142 | min_lt = tree_int_cst_lt (t1: minval, t2: primop1); |
3143 | max_lt = tree_int_cst_lt (t1: maxval, t2: primop1); |
3144 | |
3145 | val = 0; |
3146 | /* This used to be a switch, but Genix compiler can't handle that. */ |
3147 | if (code == NE_EXPR) |
3148 | { |
3149 | if (max_lt || min_gt) |
3150 | val = truthvalue_true_node; |
3151 | } |
3152 | else if (code == EQ_EXPR) |
3153 | { |
3154 | if (max_lt || min_gt) |
3155 | val = truthvalue_false_node; |
3156 | } |
3157 | else if (code == LT_EXPR) |
3158 | { |
3159 | if (max_lt) |
3160 | val = truthvalue_true_node; |
3161 | if (!min_lt) |
3162 | val = truthvalue_false_node; |
3163 | } |
3164 | else if (code == GT_EXPR) |
3165 | { |
3166 | if (min_gt) |
3167 | val = truthvalue_true_node; |
3168 | if (!max_gt) |
3169 | val = truthvalue_false_node; |
3170 | } |
3171 | else if (code == LE_EXPR) |
3172 | { |
3173 | if (!max_gt) |
3174 | val = truthvalue_true_node; |
3175 | if (min_gt) |
3176 | val = truthvalue_false_node; |
3177 | } |
3178 | else if (code == GE_EXPR) |
3179 | { |
3180 | if (!min_lt) |
3181 | val = truthvalue_true_node; |
3182 | if (max_lt) |
3183 | val = truthvalue_false_node; |
3184 | } |
3185 | |
3186 | /* If primop0 was sign-extended and unsigned comparison specd, |
3187 | we did a signed comparison above using the signed type bounds. |
3188 | But the comparison we output must be unsigned. |
3189 | |
3190 | Also, for inequalities, VAL is no good; but if the signed |
3191 | comparison had *any* fixed result, it follows that the |
3192 | unsigned comparison just tests the sign in reverse |
3193 | (positive values are LE, negative ones GE). |
3194 | So we can generate an unsigned comparison |
3195 | against an extreme value of the signed type. */ |
3196 | |
3197 | if (unsignedp && !unsignedp0) |
3198 | { |
3199 | if (val != 0) |
3200 | switch (code) |
3201 | { |
3202 | case LT_EXPR: |
3203 | case GE_EXPR: |
3204 | primop1 = TYPE_MIN_VALUE (type); |
3205 | val = 0; |
3206 | break; |
3207 | |
3208 | case LE_EXPR: |
3209 | case GT_EXPR: |
3210 | primop1 = TYPE_MAX_VALUE (type); |
3211 | val = 0; |
3212 | break; |
3213 | |
3214 | default: |
3215 | break; |
3216 | } |
3217 | type = c_common_unsigned_type (type); |
3218 | } |
3219 | |
3220 | if (TREE_CODE (primop0) != INTEGER_CST |
3221 | /* Don't warn if it's from a (non-system) macro. */ |
3222 | && !(from_macro_expansion_at |
3223 | (loc: expansion_point_location_if_in_system_header |
3224 | (EXPR_LOCATION (primop0))))) |
3225 | { |
3226 | if (val == truthvalue_false_node) |
3227 | warning_at (loc, OPT_Wtype_limits, |
3228 | "comparison is always false due to limited range of data type"); |
3229 | if (val == truthvalue_true_node) |
3230 | warning_at (loc, OPT_Wtype_limits, |
3231 | "comparison is always true due to limited range of data type"); |
3232 | } |
3233 | |
3234 | if (val != 0) |
3235 | { |
3236 | /* Don't forget to evaluate PRIMOP0 if it has side effects. */ |
3237 | if (TREE_SIDE_EFFECTS (primop0)) |
3238 | return build2 (COMPOUND_EXPR, TREE_TYPE (val), primop0, val); |
3239 | return val; |
3240 | } |
3241 | |
3242 | /* Value is not predetermined, but do the comparison |
3243 | in the type of the operand that is not constant. |
3244 | TYPE is already properly set. */ |
3245 | } |
3246 | |
3247 | /* If either arg is decimal float and the other is float, find the |
3248 | proper common type to use for comparison. */ |
3249 | else if (real1 && real2 |
3250 | && DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (primop0))) |
3251 | && DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (primop1)))) |
3252 | type = common_type (TREE_TYPE (primop0), TREE_TYPE (primop1)); |
3253 | |
3254 | /* If either arg is decimal float and the other is float, fail. */ |
3255 | else if (real1 && real2 |
3256 | && (DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (primop0))) |
3257 | || DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (primop1))))) |
3258 | { |
3259 | type = *restype_ptr; |
3260 | primop0 = op0; |
3261 | primop1 = op1; |
3262 | } |
3263 | |
3264 | else if (real1 && real2 |
3265 | && (TYPE_PRECISION (TREE_TYPE (primop0)) |
3266 | == TYPE_PRECISION (TREE_TYPE (primop1)))) |
3267 | type = TREE_TYPE (primop0); |
3268 | |
3269 | /* If args' natural types are both narrower than nominal type |
3270 | and both extend in the same manner, compare them |
3271 | in the type of the wider arg. |
3272 | Otherwise must actually extend both to the nominal |
3273 | common type lest different ways of extending |
3274 | alter the result. |
3275 | (eg, (short)-1 == (unsigned short)-1 should be 0.) */ |
3276 | |
3277 | else if (unsignedp0 == unsignedp1 && real1 == real2 |
3278 | && TYPE_PRECISION (TREE_TYPE (primop0)) < TYPE_PRECISION (*restype_ptr) |
3279 | && TYPE_PRECISION (TREE_TYPE (primop1)) < TYPE_PRECISION (*restype_ptr) |
3280 | && (type = common_type (TREE_TYPE (primop0), TREE_TYPE (primop1))) |
3281 | != error_mark_node) |
3282 | { |
3283 | type = c_common_signed_or_unsigned_type (unsignedp: unsignedp0 |
3284 | || TYPE_UNSIGNED (*restype_ptr), |
3285 | type); |
3286 | /* Make sure shorter operand is extended the right way |
3287 | to match the longer operand. */ |
3288 | primop0 |
3289 | = convert (c_common_signed_or_unsigned_type (unsignedp: unsignedp0, |
3290 | TREE_TYPE (primop0)), |
3291 | primop0); |
3292 | primop1 |
3293 | = convert (c_common_signed_or_unsigned_type (unsignedp: unsignedp1, |
3294 | TREE_TYPE (primop1)), |
3295 | primop1); |
3296 | } |
3297 | else |
3298 | { |
3299 | /* Here we must do the comparison on the nominal type |
3300 | using the args exactly as we received them. */ |
3301 | type = *restype_ptr; |
3302 | primop0 = op0; |
3303 | primop1 = op1; |
3304 | |
3305 | /* We want to fold unsigned comparisons of >= and < against zero. |
3306 | For these, we may also issue a warning if we have a non-constant |
3307 | compared against zero, where the zero was spelled as "0" (rather |
3308 | than merely folding to it). |
3309 | If we have at least one constant, then op1 is constant |
3310 | and we may have a non-constant expression as op0. */ |
3311 | if (!real1 && !real2 && integer_zerop (primop1) |
3312 | && TYPE_UNSIGNED (*restype_ptr)) |
3313 | { |
3314 | tree value = NULL_TREE; |
3315 | /* All unsigned values are >= 0, so we warn. However, |
3316 | if OP0 is a constant that is >= 0, the signedness of |
3317 | the comparison isn't an issue, so suppress the |
3318 | warning. */ |
3319 | tree folded_op0 = fold_for_warn (op0); |
3320 | bool warn = |
3321 | warn_type_limits && !in_system_header_at (loc) |
3322 | && !(TREE_CODE (folded_op0) == INTEGER_CST |
3323 | && !TREE_OVERFLOW (convert (c_common_signed_type (type), |
3324 | folded_op0))) |
3325 | /* Do not warn for enumeration types. */ |
3326 | && (TREE_CODE (expr_original_type (folded_op0)) != ENUMERAL_TYPE); |
3327 | |
3328 | switch (code) |
3329 | { |
3330 | case GE_EXPR: |
3331 | if (warn) |
3332 | warning_at (loc, OPT_Wtype_limits, |
3333 | "comparison of unsigned expression in %<>= 0%> " |
3334 | "is always true"); |
3335 | value = truthvalue_true_node; |
3336 | break; |
3337 | |
3338 | case LT_EXPR: |
3339 | if (warn) |
3340 | warning_at (loc, OPT_Wtype_limits, |
3341 | "comparison of unsigned expression in %<< 0%> " |
3342 | "is always false"); |
3343 | value = truthvalue_false_node; |
3344 | break; |
3345 | |
3346 | default: |
3347 | break; |
3348 | } |
3349 | |
3350 | if (value != NULL_TREE) |
3351 | { |
3352 | /* Don't forget to evaluate PRIMOP0 if it has side effects. */ |
3353 | if (TREE_SIDE_EFFECTS (primop0)) |
3354 | return build2 (COMPOUND_EXPR, TREE_TYPE (value), |
3355 | primop0, value); |
3356 | return value; |
3357 | } |
3358 | } |
3359 | } |
3360 | |
3361 | *op0_ptr = convert (type, primop0); |
3362 | *op1_ptr = convert (type, primop1); |
3363 | |
3364 | *restype_ptr = truthvalue_type_node; |
3365 | |
3366 | return NULL_TREE; |
3367 | } |
3368 | |
3369 | /* Return a tree for the sum or difference (RESULTCODE says which) |
3370 | of pointer PTROP and integer INTOP. */ |
3371 | |
3372 | tree |
3373 | pointer_int_sum (location_t loc, enum tree_code resultcode, |
3374 | tree ptrop, tree intop, bool complain) |
3375 | { |
3376 | tree size_exp, ret; |
3377 | |
3378 | /* The result is a pointer of the same type that is being added. */ |
3379 | tree result_type = TREE_TYPE (ptrop); |
3380 | |
3381 | if (VOID_TYPE_P (TREE_TYPE (result_type))) |
3382 | { |
3383 | if (complain && warn_pointer_arith) |
3384 | pedwarn (loc, OPT_Wpointer_arith, |
3385 | "pointer of type %<void *%> used in arithmetic"); |
3386 | else if (!complain) |
3387 | return error_mark_node; |
3388 | size_exp = integer_one_node; |
3389 | } |
3390 | else if (TREE_CODE (TREE_TYPE (result_type)) == FUNCTION_TYPE) |
3391 | { |
3392 | if (complain && warn_pointer_arith) |
3393 | pedwarn (loc, OPT_Wpointer_arith, |
3394 | "pointer to a function used in arithmetic"); |
3395 | else if (!complain) |
3396 | return error_mark_node; |
3397 | size_exp = integer_one_node; |
3398 | } |
3399 | else if (!verify_type_context (loc, TCTX_POINTER_ARITH, |
3400 | TREE_TYPE (result_type))) |
3401 | size_exp = integer_one_node; |
3402 | else |
3403 | { |
3404 | if (!complain && !COMPLETE_TYPE_P (TREE_TYPE (result_type))) |
3405 | return error_mark_node; |
3406 | size_exp = size_in_bytes_loc (loc, TREE_TYPE (result_type)); |
3407 | /* Wrap the pointer expression in a SAVE_EXPR to make sure it |
3408 | is evaluated first when the size expression may depend |
3409 | on it for VM types. */ |
3410 | if (TREE_SIDE_EFFECTS (size_exp) |
3411 | && TREE_SIDE_EFFECTS (ptrop) |
3412 | && variably_modified_type_p (TREE_TYPE (ptrop), NULL)) |
3413 | { |
3414 | ptrop = save_expr (ptrop); |
3415 | size_exp = build2 (COMPOUND_EXPR, TREE_TYPE (intop), ptrop, size_exp); |
3416 | } |
3417 | } |
3418 | |
3419 | /* We are manipulating pointer values, so we don't need to warn |
3420 | about relying on undefined signed overflow. We disable the |
3421 | warning here because we use integer types so fold won't know that |
3422 | they are really pointers. */ |
3423 | fold_defer_overflow_warnings (); |
3424 | |
3425 | /* If what we are about to multiply by the size of the elements |
3426 | contains a constant term, apply distributive law |
3427 | and multiply that constant term separately. |
3428 | This helps produce common subexpressions. */ |
3429 | if ((TREE_CODE (intop) == PLUS_EXPR || TREE_CODE (intop) == MINUS_EXPR) |
3430 | && !TREE_CONSTANT (intop) |
3431 | && TREE_CONSTANT (TREE_OPERAND (intop, 1)) |
3432 | && TREE_CONSTANT (size_exp) |
3433 | /* If the constant comes from pointer subtraction, |
3434 | skip this optimization--it would cause an error. */ |
3435 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (intop, 0))) == INTEGER_TYPE |
3436 | /* If the constant is unsigned, and smaller than the pointer size, |
3437 | then we must skip this optimization. This is because it could cause |
3438 | an overflow error if the constant is negative but INTOP is not. */ |
3439 | && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (intop)) |
3440 | || (TYPE_PRECISION (TREE_TYPE (intop)) |
3441 | == TYPE_PRECISION (TREE_TYPE (ptrop))))) |
3442 | { |
3443 | enum tree_code subcode = resultcode; |
3444 | tree int_type = TREE_TYPE (intop); |
3445 | if (TREE_CODE (intop) == MINUS_EXPR) |
3446 | subcode = (subcode == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR); |
3447 | /* Convert both subexpression types to the type of intop, |
3448 | because weird cases involving pointer arithmetic |
3449 | can result in a sum or difference with different type args. */ |
3450 | ptrop = build_binary_op (EXPR_LOCATION (TREE_OPERAND (intop, 1)), |
3451 | subcode, ptrop, |
3452 | convert (int_type, TREE_OPERAND (intop, 1)), |
3453 | true); |
3454 | intop = convert (int_type, TREE_OPERAND (intop, 0)); |
3455 | } |
3456 | |
3457 | /* Convert the integer argument to a type the same size as sizetype |
3458 | so the multiply won't overflow spuriously. */ |
3459 | if (TYPE_PRECISION (TREE_TYPE (intop)) != TYPE_PRECISION (sizetype) |
3460 | || TYPE_UNSIGNED (TREE_TYPE (intop)) != TYPE_UNSIGNED (sizetype)) |
3461 | intop = convert (c_common_type_for_size (TYPE_PRECISION (sizetype), |
3462 | TYPE_UNSIGNED (sizetype)), intop); |
3463 | |
3464 | /* Replace the integer argument with a suitable product by the object size. |
3465 | Do this multiplication as signed, then convert to the appropriate type |
3466 | for the pointer operation and disregard an overflow that occurred only |
3467 | because of the sign-extension change in the latter conversion. */ |
3468 | { |
3469 | tree t = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (intop), intop, |
3470 | convert (TREE_TYPE (intop), size_exp)); |
3471 | intop = convert (sizetype, t); |
3472 | if (TREE_OVERFLOW_P (intop) && !TREE_OVERFLOW (t)) |
3473 | intop = wide_int_to_tree (TREE_TYPE (intop), cst: wi::to_wide (t: intop)); |
3474 | } |
3475 | |
3476 | /* Create the sum or difference. */ |
3477 | if (resultcode == MINUS_EXPR) |
3478 | intop = fold_build1_loc (loc, NEGATE_EXPR, sizetype, intop); |
3479 | |
3480 | ret = fold_build_pointer_plus_loc (loc, ptr: ptrop, off: intop); |
3481 | |
3482 | fold_undefer_and_ignore_overflow_warnings (); |
3483 | |
3484 | return ret; |
3485 | } |
3486 | |
3487 | /* Wrap a C_MAYBE_CONST_EXPR around an expression that is fully folded |
3488 | and if NON_CONST is known not to be permitted in an evaluated part |
3489 | of a constant expression. */ |
3490 | |
3491 | tree |
3492 | c_wrap_maybe_const (tree expr, bool non_const) |
3493 | { |
3494 | location_t loc = EXPR_LOCATION (expr); |
3495 | |
3496 | /* This should never be called for C++. */ |
3497 | if (c_dialect_cxx ()) |
3498 | gcc_unreachable (); |
3499 | |
3500 | /* The result of folding may have a NOP_EXPR to set TREE_NO_WARNING. */ |
3501 | STRIP_TYPE_NOPS (expr); |
3502 | expr = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (expr), NULL, expr); |
3503 | C_MAYBE_CONST_EXPR_NON_CONST (expr) = non_const; |
3504 | protected_set_expr_location (expr, loc); |
3505 | |
3506 | return expr; |
3507 | } |
3508 | |
3509 | /* Return whether EXPR is a declaration whose address can never be NULL. |
3510 | The address of the first struct member could be NULL only if it were |
3511 | accessed through a NULL pointer, and such an access would be invalid. |
3512 | The address of a weak symbol may be null unless it has a definition. */ |
3513 | |
3514 | bool |
3515 | decl_with_nonnull_addr_p (const_tree expr) |
3516 | { |
3517 | if (!DECL_P (expr)) |
3518 | return false; |
3519 | |
3520 | if (TREE_CODE (expr) == FIELD_DECL |
3521 | || TREE_CODE (expr) == PARM_DECL |
3522 | || TREE_CODE (expr) == LABEL_DECL) |
3523 | return true; |
3524 | |
3525 | if (!VAR_OR_FUNCTION_DECL_P (expr)) |
3526 | return false; |
3527 | |
3528 | if (!DECL_WEAK (expr)) |
3529 | /* Ordinary (non-weak) symbols have nonnull addresses. */ |
3530 | return true; |
3531 | |
3532 | if (DECL_INITIAL (expr) && DECL_INITIAL (expr) != error_mark_node) |
3533 | /* Initialized weak symbols have nonnull addresses. */ |
3534 | return true; |
3535 | |
3536 | if (DECL_EXTERNAL (expr) || !TREE_STATIC (expr)) |
3537 | /* Uninitialized extern weak symbols and weak symbols with no |
3538 | allocated storage might have a null address. */ |
3539 | return false; |
3540 | |
3541 | tree attribs = DECL_ATTRIBUTES (expr); |
3542 | if (lookup_attribute (attr_name: "weakref", list: attribs)) |
3543 | /* Weakref symbols might have a null address unless their referent |
3544 | is known not to. Don't bother following weakref targets here. */ |
3545 | return false; |
3546 | |
3547 | return true; |
3548 | } |
3549 | |
3550 | /* Prepare expr to be an argument of a TRUTH_NOT_EXPR, |
3551 | or for an `if' or `while' statement or ?..: exp. It should already |
3552 | have been validated to be of suitable type; otherwise, a bad |
3553 | diagnostic may result. |
3554 | |
3555 | The EXPR is located at LOCATION. |
3556 | |
3557 | This preparation consists of taking the ordinary |
3558 | representation of an expression expr and producing a valid tree |
3559 | boolean expression describing whether expr is nonzero. We could |
3560 | simply always do build_binary_op (NE_EXPR, expr, truthvalue_false_node, 1), |
3561 | but we optimize comparisons, &&, ||, and !. |
3562 | |
3563 | The resulting type should always be `truthvalue_type_node'. */ |
3564 | |
3565 | tree |
3566 | c_common_truthvalue_conversion (location_t location, tree expr) |
3567 | { |
3568 | STRIP_ANY_LOCATION_WRAPPER (expr); |
3569 | switch (TREE_CODE (expr)) |
3570 | { |
3571 | case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR: |
3572 | case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR: |
3573 | case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR: |
3574 | case ORDERED_EXPR: case UNORDERED_EXPR: |
3575 | if (TREE_TYPE (expr) == truthvalue_type_node) |
3576 | return expr; |
3577 | expr = build2 (TREE_CODE (expr), truthvalue_type_node, |
3578 | TREE_OPERAND (expr, 0), TREE_OPERAND (expr, 1)); |
3579 | goto ret; |
3580 | |
3581 | case TRUTH_ANDIF_EXPR: |
3582 | case TRUTH_ORIF_EXPR: |
3583 | case TRUTH_AND_EXPR: |
3584 | case TRUTH_OR_EXPR: |
3585 | case TRUTH_XOR_EXPR: |
3586 | if (TREE_TYPE (expr) == truthvalue_type_node) |
3587 | return expr; |
3588 | expr = build2 (TREE_CODE (expr), truthvalue_type_node, |
3589 | c_common_truthvalue_conversion (location, |
3590 | TREE_OPERAND (expr, 0)), |
3591 | c_common_truthvalue_conversion (location, |
3592 | TREE_OPERAND (expr, 1))); |
3593 | goto ret; |
3594 | |
3595 | case TRUTH_NOT_EXPR: |
3596 | if (TREE_TYPE (expr) == truthvalue_type_node) |
3597 | return expr; |
3598 | expr = build1 (TREE_CODE (expr), truthvalue_type_node, |
3599 | c_common_truthvalue_conversion (location, |
3600 | TREE_OPERAND (expr, 0))); |
3601 | goto ret; |
3602 | |
3603 | case ERROR_MARK: |
3604 | return expr; |
3605 | |
3606 | case INTEGER_CST: |
3607 | if (TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE |
3608 | && !integer_zerop (expr) |
3609 | && !integer_onep (expr)) |
3610 | warning_at (location, OPT_Wint_in_bool_context, |
3611 | "enum constant in boolean context"); |
3612 | return integer_zerop (expr) ? truthvalue_false_node |
3613 | : truthvalue_true_node; |
3614 | |
3615 | case REAL_CST: |
3616 | return real_compare (NE_EXPR, &TREE_REAL_CST (expr), &dconst0) |
3617 | ? truthvalue_true_node |
3618 | : truthvalue_false_node; |
3619 | |
3620 | case FIXED_CST: |
3621 | return fixed_compare (NE_EXPR, &TREE_FIXED_CST (expr), |
3622 | &FCONST0 (TYPE_MODE (TREE_TYPE (expr)))) |
3623 | ? truthvalue_true_node |
3624 | : truthvalue_false_node; |
3625 | |
3626 | case FUNCTION_DECL: |
3627 | expr = build_unary_op (location, ADDR_EXPR, expr, false); |
3628 | /* Fall through. */ |
3629 | |
3630 | case ADDR_EXPR: |
3631 | { |
3632 | tree inner = TREE_OPERAND (expr, 0); |
3633 | if (decl_with_nonnull_addr_p (expr: inner) |
3634 | /* Check both EXPR and INNER for suppression. */ |
3635 | && !warning_suppressed_p (expr, OPT_Waddress) |
3636 | && !warning_suppressed_p (inner, OPT_Waddress)) |
3637 | { |
3638 | /* Common Ada programmer's mistake. */ |
3639 | warning_at (location, |
3640 | OPT_Waddress, |
3641 | "the address of %qD will always evaluate as %<true%>", |
3642 | inner); |
3643 | suppress_warning (inner, OPT_Waddress); |
3644 | return truthvalue_true_node; |
3645 | } |
3646 | break; |
3647 | } |
3648 | |
3649 | case COMPLEX_EXPR: |
3650 | expr = build_binary_op (EXPR_LOCATION (expr), |
3651 | (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)) |
3652 | ? TRUTH_OR_EXPR : TRUTH_ORIF_EXPR), |
3653 | c_common_truthvalue_conversion (location, |
3654 | TREE_OPERAND (expr, 0)), |
3655 | c_common_truthvalue_conversion (location, |
3656 | TREE_OPERAND (expr, 1)), |
3657 | false); |
3658 | goto ret; |
3659 | |
3660 | case NEGATE_EXPR: |
3661 | case ABS_EXPR: |
3662 | case ABSU_EXPR: |
3663 | case FLOAT_EXPR: |
3664 | case EXCESS_PRECISION_EXPR: |
3665 | /* These don't change whether an object is nonzero or zero. */ |
3666 | return c_common_truthvalue_conversion (location, TREE_OPERAND (expr, 0)); |
3667 | |
3668 | case LROTATE_EXPR: |
3669 | case RROTATE_EXPR: |
3670 | /* These don't change whether an object is zero or nonzero, but |
3671 | we can't ignore them if their second arg has side-effects. */ |
3672 | if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1))) |
3673 | { |
3674 | expr = build2 (COMPOUND_EXPR, truthvalue_type_node, |
3675 | TREE_OPERAND (expr, 1), |
3676 | c_common_truthvalue_conversion |
3677 | (location, TREE_OPERAND (expr, 0))); |
3678 | goto ret; |
3679 | } |
3680 | else |
3681 | return c_common_truthvalue_conversion (location, |
3682 | TREE_OPERAND (expr, 0)); |
3683 | |
3684 | case MULT_EXPR: |
3685 | warning_at (EXPR_LOCATION (expr), OPT_Wint_in_bool_context, |
3686 | "%<*%> in boolean context, suggest %<&&%> instead"); |
3687 | break; |
3688 | |
3689 | case LSHIFT_EXPR: |
3690 | /* We will only warn on signed shifts here, because the majority of |
3691 | false positive warnings happen in code where unsigned arithmetic |
3692 | was used in anticipation of a possible overflow. |
3693 | Furthermore, if we see an unsigned type here we know that the |
3694 | result of the shift is not subject to integer promotion rules. */ |
3695 | if ((TREE_CODE (TREE_TYPE (expr)) == INTEGER_TYPE |
3696 | || TREE_CODE (TREE_TYPE (expr)) == BITINT_TYPE) |
3697 | && !TYPE_UNSIGNED (TREE_TYPE (expr))) |
3698 | warning_at (EXPR_LOCATION (expr), OPT_Wint_in_bool_context, |
3699 | "%<<<%> in boolean context, did you mean %<<%>?"); |
3700 | break; |
3701 | |
3702 | case COND_EXPR: |
3703 | if (warn_int_in_bool_context |
3704 | && !from_macro_definition_at (EXPR_LOCATION (expr))) |
3705 | { |
3706 | tree val1 = fold_for_warn (TREE_OPERAND (expr, 1)); |
3707 | tree val2 = fold_for_warn (TREE_OPERAND (expr, 2)); |
3708 | if (TREE_CODE (val1) == INTEGER_CST |
3709 | && TREE_CODE (val2) == INTEGER_CST |
3710 | && !integer_zerop (val1) |
3711 | && !integer_zerop (val2) |
3712 | && (!integer_onep (val1) |
3713 | || !integer_onep (val2))) |
3714 | warning_at (EXPR_LOCATION (expr), OPT_Wint_in_bool_context, |
3715 | "%<?:%> using integer constants in boolean context, " |
3716 | "the expression will always evaluate to %<true%>"); |
3717 | else if ((TREE_CODE (val1) == INTEGER_CST |
3718 | && !integer_zerop (val1) |
3719 | && !integer_onep (val1)) |
3720 | || (TREE_CODE (val2) == INTEGER_CST |
3721 | && !integer_zerop (val2) |
3722 | && !integer_onep (val2))) |
3723 | warning_at (EXPR_LOCATION (expr), OPT_Wint_in_bool_context, |
3724 | "%<?:%> using integer constants in boolean context"); |
3725 | } |
3726 | /* Distribute the conversion into the arms of a COND_EXPR. */ |
3727 | if (c_dialect_cxx ()) |
3728 | /* Avoid premature folding. */ |
3729 | break; |
3730 | else |
3731 | { |
3732 | int w = warn_int_in_bool_context; |
3733 | warn_int_in_bool_context = 0; |
3734 | /* Folding will happen later for C. */ |
3735 | expr = build3 (COND_EXPR, truthvalue_type_node, |
3736 | TREE_OPERAND (expr, 0), |
3737 | c_common_truthvalue_conversion (location, |
3738 | TREE_OPERAND (expr, 1)), |
3739 | c_common_truthvalue_conversion (location, |
3740 | TREE_OPERAND (expr, 2))); |
3741 | warn_int_in_bool_context = w; |
3742 | goto ret; |
3743 | } |
3744 | |
3745 | CASE_CONVERT: |
3746 | { |
3747 | tree totype = TREE_TYPE (expr); |
3748 | tree fromtype = TREE_TYPE (TREE_OPERAND (expr, 0)); |
3749 | |
3750 | if (POINTER_TYPE_P (totype) |
3751 | && !c_inhibit_evaluation_warnings |
3752 | && TREE_CODE (fromtype) == REFERENCE_TYPE) |
3753 | { |
3754 | tree inner = expr; |
3755 | STRIP_NOPS (inner); |
3756 | |
3757 | if (DECL_P (inner)) |
3758 | warning_at (location, |
3759 | OPT_Waddress, |
3760 | "the compiler can assume that the address of " |
3761 | "%qD will always evaluate to %<true%>", |
3762 | inner); |
3763 | } |
3764 | |
3765 | /* Don't cancel the effect of a CONVERT_EXPR from a REFERENCE_TYPE, |
3766 | since that affects how `default_conversion' will behave. */ |
3767 | if (TREE_CODE (totype) == REFERENCE_TYPE |
3768 | || TREE_CODE (fromtype) == REFERENCE_TYPE) |
3769 | break; |
3770 | /* Don't strip a conversion from C++0x scoped enum, since they |
3771 | don't implicitly convert to other types. */ |
3772 | if (TREE_CODE (fromtype) == ENUMERAL_TYPE |
3773 | && ENUM_IS_SCOPED (fromtype)) |
3774 | break; |
3775 | /* If this isn't narrowing the argument, we can ignore it. */ |
3776 | if (TYPE_PRECISION (totype) >= TYPE_PRECISION (fromtype)) |
3777 | { |
3778 | tree op0 = TREE_OPERAND (expr, 0); |
3779 | if ((TREE_CODE (fromtype) == POINTER_TYPE |
3780 | && (TREE_CODE (totype) == INTEGER_TYPE |
3781 | || TREE_CODE (totype) == BITINT_TYPE)) |
3782 | || warning_suppressed_p (expr, OPT_Waddress)) |
3783 | /* Suppress -Waddress for casts to intptr_t, propagating |
3784 | any suppression from the enclosing expression to its |
3785 | operand. */ |
3786 | suppress_warning (op0, OPT_Waddress); |
3787 | return c_common_truthvalue_conversion (location, expr: op0); |
3788 | } |
3789 | } |
3790 | break; |
3791 | |
3792 | case MODIFY_EXPR: |
3793 | if (!warning_suppressed_p (expr, OPT_Wparentheses) |
3794 | && warn_parentheses |
3795 | && warning_at (location, OPT_Wparentheses, |
3796 | "suggest parentheses around assignment used as " |
3797 | "truth value")) |
3798 | suppress_warning (expr, OPT_Wparentheses); |
3799 | break; |
3800 | |
3801 | case CONST_DECL: |
3802 | { |
3803 | tree folded_expr = fold_for_warn (expr); |
3804 | if (folded_expr != expr) |
3805 | return c_common_truthvalue_conversion (location, expr: folded_expr); |
3806 | } |
3807 | break; |
3808 | |
3809 | default: |
3810 | break; |
3811 | } |
3812 | |
3813 | if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE) |
3814 | { |
3815 | tree t = save_expr (expr); |
3816 | expr = (build_binary_op |
3817 | (EXPR_LOCATION (expr), |
3818 | (TREE_SIDE_EFFECTS (expr) |
3819 | ? TRUTH_OR_EXPR : TRUTH_ORIF_EXPR), |
3820 | c_common_truthvalue_conversion |
3821 | (location, |
3822 | expr: build_unary_op (location, REALPART_EXPR, t, false)), |
3823 | c_common_truthvalue_conversion |
3824 | (location, |
3825 | expr: build_unary_op (location, IMAGPART_EXPR, t, false)), |
3826 | false)); |
3827 | goto ret; |
3828 | } |
3829 | |
3830 | if (FIXED_POINT_TYPE_P (TREE_TYPE (expr))) |
3831 | { |
3832 | tree fixed_zero_node = build_fixed (TREE_TYPE (expr), |
3833 | FCONST0 (TYPE_MODE |
3834 | (TREE_TYPE (expr)))); |
3835 | return build_binary_op (location, NE_EXPR, expr, fixed_zero_node, true); |
3836 | } |
3837 | else |
3838 | return build_binary_op (location, NE_EXPR, expr, integer_zero_node, true); |
3839 | |
3840 | ret: |
3841 | protected_set_expr_location (expr, location); |
3842 | return expr; |
3843 | } |
3844 | |
3845 | static void def_builtin_1 (enum built_in_function fncode, |
3846 | const char *name, |
3847 | enum built_in_class fnclass, |
3848 | tree fntype, tree libtype, |
3849 | bool both_p, bool fallback_p, bool nonansi_p, |
3850 | tree fnattrs, bool implicit_p); |
3851 | |
3852 | |
3853 | /* Apply the TYPE_QUALS to the new DECL. */ |
3854 | |
3855 | void |
3856 | c_apply_type_quals_to_decl (int type_quals, tree decl) |
3857 | { |
3858 | tree type = TREE_TYPE (decl); |
3859 | |
3860 | if (type == error_mark_node) |
3861 | return; |
3862 | |
3863 | if ((type_quals & TYPE_QUAL_CONST) |
3864 | || (type && TREE_CODE (type) == REFERENCE_TYPE)) |
3865 | /* We used to check TYPE_NEEDS_CONSTRUCTING here, but now a constexpr |
3866 | constructor can produce constant init, so rely on cp_finish_decl to |
3867 | clear TREE_READONLY if the variable has non-constant init. */ |
3868 | TREE_READONLY (decl) = 1; |
3869 | if (type_quals & TYPE_QUAL_VOLATILE) |
3870 | { |
3871 | TREE_SIDE_EFFECTS (decl) = 1; |
3872 | TREE_THIS_VOLATILE (decl) = 1; |
3873 | } |
3874 | if (type_quals & TYPE_QUAL_RESTRICT) |
3875 | { |
3876 | while (type && TREE_CODE (type) == ARRAY_TYPE) |
3877 | /* Allow 'restrict' on arrays of pointers. |
3878 | FIXME currently we just ignore it. */ |
3879 | type = TREE_TYPE (type); |
3880 | if (!type |
3881 | || !POINTER_TYPE_P (type) |
3882 | || !C_TYPE_OBJECT_OR_INCOMPLETE_P (TREE_TYPE (type))) |
3883 | error ("invalid use of %<restrict%>"); |
3884 | } |
3885 | } |
3886 | |
3887 | /* Return the typed-based alias set for T, which may be an expression |
3888 | or a type. Return -1 if we don't do anything special. */ |
3889 | |
3890 | alias_set_type |
3891 | c_common_get_alias_set (tree t) |
3892 | { |
3893 | /* For VLAs, use the alias set of the element type rather than the |
3894 | default of alias set 0 for types compared structurally. */ |
3895 | if (TYPE_P (t) && TYPE_STRUCTURAL_EQUALITY_P (t)) |
3896 | { |
3897 | if (TREE_CODE (t) == ARRAY_TYPE) |
3898 | return get_alias_set (TREE_TYPE (t)); |
3899 | return -1; |
3900 | } |
3901 | |
3902 | /* That's all the expressions we handle specially. */ |
3903 | if (!TYPE_P (t)) |
3904 | return -1; |
3905 | |
3906 | /* Unlike char, char8_t doesn't alias in C++. (In C, char8_t is not |
3907 | a distinct type.) */ |
3908 | if (flag_char8_t && t == char8_type_node && c_dialect_cxx ()) |
3909 | return -1; |
3910 | |
3911 | /* The C standard guarantees that any object may be accessed via an |
3912 | lvalue that has narrow character type. */ |
3913 | if (t == char_type_node |
3914 | || t == signed_char_type_node |
3915 | || t == unsigned_char_type_node) |
3916 | return 0; |
3917 | |
3918 | /* The C standard specifically allows aliasing between signed and |
3919 | unsigned variants of the same type. We treat the signed |
3920 | variant as canonical. */ |
3921 | if ((TREE_CODE (t) == INTEGER_TYPE || TREE_CODE (t) == BITINT_TYPE) |
3922 | && TYPE_UNSIGNED (t)) |
3923 | { |
3924 | tree t1 = c_common_signed_type (type: t); |
3925 | |
3926 | /* t1 == t can happen for boolean nodes which are always unsigned. */ |
3927 | if (t1 != t) |
3928 | return get_alias_set (t1); |
3929 | } |
3930 | |
3931 | return -1; |
3932 | } |
3933 | |
3934 | /* Compute the value of 'sizeof (TYPE)' or '__alignof__ (TYPE)', where |
3935 | the IS_SIZEOF parameter indicates which operator is being applied. |
3936 | The COMPLAIN flag controls whether we should diagnose possibly |
3937 | ill-formed constructs or not. LOC is the location of the SIZEOF or |
3938 | TYPEOF operator. If MIN_ALIGNOF, the least alignment required for |
3939 | a type in any context should be returned, rather than the normal |
3940 | alignment for that type. */ |
3941 | |
3942 | tree |
3943 | c_sizeof_or_alignof_type (location_t loc, |
3944 | tree type, bool is_sizeof, bool min_alignof, |
3945 | int complain) |
3946 | { |
3947 | const char *op_name; |
3948 | tree value = NULL; |
3949 | enum tree_code type_code = TREE_CODE (type); |
3950 | |
3951 | op_name = is_sizeof ? "sizeof": "__alignof__"; |
3952 | |
3953 | if (type_code == FUNCTION_TYPE) |
3954 | { |
3955 | if (is_sizeof) |
3956 | { |
3957 | if (complain && warn_pointer_arith) |
3958 | pedwarn (loc, OPT_Wpointer_arith, |
3959 | "invalid application of %<sizeof%> to a function type"); |
3960 | else if (!complain) |
3961 | return error_mark_node; |
3962 | value = size_one_node; |
3963 | } |
3964 | else |
3965 | { |
3966 | if (complain) |
3967 | { |
3968 | if (c_dialect_cxx ()) |
3969 | pedwarn (loc, OPT_Wpedantic, "ISO C++ does not permit " |
3970 | "%<alignof%> applied to a function type"); |
3971 | else |
3972 | pedwarn (loc, OPT_Wpedantic, "ISO C does not permit " |
3973 | "%<_Alignof%> applied to a function type"); |
3974 | } |
3975 | value = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT); |
3976 | } |
3977 | } |
3978 | else if (type_code == VOID_TYPE || type_code == ERROR_MARK) |
3979 | { |
3980 | if (type_code == VOID_TYPE |
3981 | && complain && warn_pointer_arith) |
3982 | pedwarn (loc, OPT_Wpointer_arith, |
3983 | "invalid application of %qs to a void type", op_name); |
3984 | else if (!complain) |
3985 | return error_mark_node; |
3986 | value = size_one_node; |
3987 | } |
3988 | else if (!COMPLETE_TYPE_P (type) |
3989 | && ((!c_dialect_cxx () && !flag_isoc2y) |
3990 | || is_sizeof |
3991 | || type_code != ARRAY_TYPE)) |
3992 | { |
3993 | if (complain) |
3994 | error_at (loc, "invalid application of %qs to incomplete type %qT", |
3995 | op_name, type); |
3996 | return error_mark_node; |
3997 | } |
3998 | else if (c_dialect_cxx () && type_code == ARRAY_TYPE |
3999 | && !COMPLETE_TYPE_P (TREE_TYPE (type))) |
4000 | { |
4001 | if (complain) |
4002 | error_at (loc, "invalid application of %qs to array type %qT of " |
4003 | "incomplete element type", op_name, type); |
4004 | return error_mark_node; |
4005 | } |
4006 | else if (!verify_type_context (loc, is_sizeof ? TCTX_SIZEOF : TCTX_ALIGNOF, |
4007 | type, !complain)) |
4008 | { |
4009 | if (!complain) |
4010 | return error_mark_node; |
4011 | value = size_one_node; |
4012 | } |
4013 | else |
4014 | { |
4015 | if (is_sizeof) |
4016 | /* Convert in case a char is more than one unit. */ |
4017 | value = size_binop_loc (loc, CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type), |
4018 | size_int (TYPE_PRECISION (char_type_node) |
4019 | / BITS_PER_UNIT)); |
4020 | else if (min_alignof) |
4021 | value = size_int (min_align_of_type (type)); |
4022 | else |
4023 | value = size_int (TYPE_ALIGN_UNIT (type)); |
4024 | } |
4025 | |
4026 | /* VALUE will have the middle-end integer type sizetype. |
4027 | However, we should really return a value of type `size_t', |
4028 | which is just a typedef for an ordinary integer type. */ |
4029 | value = fold_convert_loc (loc, size_type_node, value); |
4030 | |
4031 | return value; |
4032 | } |
4033 | |
4034 | /* Implement the __alignof keyword: Return the minimum required |
4035 | alignment of EXPR, measured in bytes. For VAR_DECLs, |
4036 | FUNCTION_DECLs and FIELD_DECLs return DECL_ALIGN (which can be set |
4037 | from an "aligned" __attribute__ specification). LOC is the |
4038 | location of the ALIGNOF operator. */ |
4039 | |
4040 | tree |
4041 | c_alignof_expr (location_t loc, tree expr) |
4042 | { |
4043 | tree t; |
4044 | |
4045 | if (!verify_type_context (loc, TCTX_ALIGNOF, TREE_TYPE (expr))) |
4046 | t = size_one_node; |
4047 | |
4048 | else if (VAR_OR_FUNCTION_DECL_P (expr)) |
4049 | t = size_int (DECL_ALIGN_UNIT (expr)); |
4050 | |
4051 | else if (TREE_CODE (expr) == COMPONENT_REF |
4052 | && DECL_C_BIT_FIELD (TREE_OPERAND (expr, 1))) |
4053 | { |
4054 | error_at (loc, "%<__alignof%> applied to a bit-field"); |
4055 | t = size_one_node; |
4056 | } |
4057 | else if (TREE_CODE (expr) == COMPONENT_REF |
4058 | && TREE_CODE (TREE_OPERAND (expr, 1)) == FIELD_DECL) |
4059 | t = size_int (DECL_ALIGN_UNIT (TREE_OPERAND (expr, 1))); |
4060 | |
4061 | else if (INDIRECT_REF_P (expr)) |
4062 | { |
4063 | tree t = TREE_OPERAND (expr, 0); |
4064 | tree best = t; |
4065 | int bestalign = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (t))); |
4066 | |
4067 | while (CONVERT_EXPR_P (t) |
4068 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == POINTER_TYPE) |
4069 | { |
4070 | int thisalign; |
4071 | |
4072 | t = TREE_OPERAND (t, 0); |
4073 | thisalign = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (t))); |
4074 | if (thisalign > bestalign) |
4075 | best = t, bestalign = thisalign; |
4076 | } |
4077 | return c_alignof (loc, TREE_TYPE (TREE_TYPE (best))); |
4078 | } |
4079 | else |
4080 | return c_alignof (loc, TREE_TYPE (expr)); |
4081 | |
4082 | return fold_convert_loc (loc, size_type_node, t); |
4083 | } |
4084 | |
4085 | /* Implement the _Countof keyword: |
4086 | Return the number of elements of an array. */ |
4087 | |
4088 | tree |
4089 | c_countof_type (location_t loc, tree type) |
4090 | { |
4091 | enum tree_code type_code; |
4092 | |
4093 | type_code = TREE_CODE (type); |
4094 | if (type_code != ARRAY_TYPE) |
4095 | { |
4096 | error_at (loc, "invalid application of %<_Countof%> to type %qT", type); |
4097 | return error_mark_node; |
4098 | } |
4099 | if (!COMPLETE_TYPE_P (type)) |
4100 | { |
4101 | error_at (loc, |
4102 | "invalid application of %<_Countof%> to incomplete type %qT", |
4103 | type); |
4104 | return error_mark_node; |
4105 | } |
4106 | |
4107 | return array_type_nelts_top (type); |
4108 | } |
4109 | |
4110 | /* Handle C and C++ default attributes. */ |
4111 | |
4112 | enum built_in_attribute |
4113 | { |
4114 | #define DEF_ATTR_NULL_TREE(ENUM) ENUM, |
4115 | #define DEF_ATTR_INT(ENUM, VALUE) ENUM, |
4116 | #define DEF_ATTR_STRING(ENUM, VALUE) ENUM, |
4117 | #define DEF_ATTR_IDENT(ENUM, STRING) ENUM, |
4118 | #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM, |
4119 | #include "builtin-attrs.def" |
4120 | #undef DEF_ATTR_NULL_TREE |
4121 | #undef DEF_ATTR_INT |
4122 | #undef DEF_ATTR_STRING |
4123 | #undef DEF_ATTR_IDENT |
4124 | #undef DEF_ATTR_TREE_LIST |
4125 | ATTR_LAST |
4126 | }; |
4127 | |
4128 | static GTY(()) tree built_in_attributes[(int) ATTR_LAST]; |
4129 | |
4130 | static void c_init_attributes (void); |
4131 | |
4132 | enum c_builtin_type |
4133 | { |
4134 | #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME, |
4135 | #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME, |
4136 | #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME, |
4137 | #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME, |
4138 | #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME, |
4139 | #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME, |
4140 | #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME, |
4141 | #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4142 | ARG6) NAME, |
4143 | #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4144 | ARG6, ARG7) NAME, |
4145 | #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4146 | ARG6, ARG7, ARG8) NAME, |
4147 | #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4148 | ARG6, ARG7, ARG8, ARG9) NAME, |
4149 | #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4150 | ARG6, ARG7, ARG8, ARG9, ARG10) NAME, |
4151 | #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4152 | ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME, |
4153 | #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME, |
4154 | #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME, |
4155 | #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME, |
4156 | #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME, |
4157 | #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME, |
4158 | #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \ |
4159 | NAME, |
4160 | #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4161 | ARG6) NAME, |
4162 | #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4163 | ARG6, ARG7) NAME, |
4164 | #define DEF_POINTER_TYPE(NAME, TYPE) NAME, |
4165 | #include "builtin-types.def" |
4166 | #undef DEF_PRIMITIVE_TYPE |
4167 | #undef DEF_FUNCTION_TYPE_0 |
4168 | #undef DEF_FUNCTION_TYPE_1 |
4169 | #undef DEF_FUNCTION_TYPE_2 |
4170 | #undef DEF_FUNCTION_TYPE_3 |
4171 | #undef DEF_FUNCTION_TYPE_4 |
4172 | #undef DEF_FUNCTION_TYPE_5 |
4173 | #undef DEF_FUNCTION_TYPE_6 |
4174 | #undef DEF_FUNCTION_TYPE_7 |
4175 | #undef DEF_FUNCTION_TYPE_8 |
4176 | #undef DEF_FUNCTION_TYPE_9 |
4177 | #undef DEF_FUNCTION_TYPE_10 |
4178 | #undef DEF_FUNCTION_TYPE_11 |
4179 | #undef DEF_FUNCTION_TYPE_VAR_0 |
4180 | #undef DEF_FUNCTION_TYPE_VAR_1 |
4181 | #undef DEF_FUNCTION_TYPE_VAR_2 |
4182 | #undef DEF_FUNCTION_TYPE_VAR_3 |
4183 | #undef DEF_FUNCTION_TYPE_VAR_4 |
4184 | #undef DEF_FUNCTION_TYPE_VAR_5 |
4185 | #undef DEF_FUNCTION_TYPE_VAR_6 |
4186 | #undef DEF_FUNCTION_TYPE_VAR_7 |
4187 | #undef DEF_POINTER_TYPE |
4188 | BT_LAST |
4189 | }; |
4190 | |
4191 | typedef enum c_builtin_type builtin_type; |
4192 | |
4193 | /* A temporary array for c_common_nodes_and_builtins. Used in |
4194 | communication with def_fn_type. */ |
4195 | static tree builtin_types[(int) BT_LAST + 1]; |
4196 | |
4197 | /* A helper function for c_common_nodes_and_builtins. Build function type |
4198 | for DEF with return type RET and N arguments. If VAR is true, then the |
4199 | function should be variadic after those N arguments, or, if N is zero, |
4200 | unprototyped. |
4201 | |
4202 | Takes special care not to ICE if any of the types involved are |
4203 | error_mark_node, which indicates that said type is not in fact available |
4204 | (see builtin_type_for_size). In which case the function type as a whole |
4205 | should be error_mark_node. */ |
4206 | |
4207 | static void |
4208 | def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...) |
4209 | { |
4210 | tree t; |
4211 | tree *args = XALLOCAVEC (tree, n); |
4212 | va_list list; |
4213 | int i; |
4214 | |
4215 | va_start (list, n); |
4216 | for (i = 0; i < n; ++i) |
4217 | { |
4218 | builtin_type a = (builtin_type) va_arg (list, int); |
4219 | t = builtin_types[a]; |
4220 | if (t == error_mark_node) |
4221 | goto egress; |
4222 | args[i] = t; |
4223 | } |
4224 | |
4225 | t = builtin_types[ret]; |
4226 | if (t == error_mark_node) |
4227 | goto egress; |
4228 | if (var) |
4229 | if (n == 0) |
4230 | t = build_function_type (t, NULL_TREE); |
4231 | else |
4232 | t = build_varargs_function_type_array (t, n, args); |
4233 | else |
4234 | t = build_function_type_array (t, n, args); |
4235 | |
4236 | egress: |
4237 | builtin_types[def] = t; |
4238 | va_end (list); |
4239 | } |
4240 | |
4241 | /* Build builtin functions common to both C and C++ language |
4242 | frontends. */ |
4243 | |
4244 | static void |
4245 | c_define_builtins (tree va_list_ref_type_node, tree va_list_arg_type_node) |
4246 | { |
4247 | #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \ |
4248 | builtin_types[ENUM] = VALUE; |
4249 | #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \ |
4250 | def_fn_type (ENUM, RETURN, 0, 0); |
4251 | #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \ |
4252 | def_fn_type (ENUM, RETURN, 0, 1, ARG1); |
4253 | #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \ |
4254 | def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2); |
4255 | #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \ |
4256 | def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3); |
4257 | #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \ |
4258 | def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4); |
4259 | #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \ |
4260 | def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5); |
4261 | #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4262 | ARG6) \ |
4263 | def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6); |
4264 | #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4265 | ARG6, ARG7) \ |
4266 | def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7); |
4267 | #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4268 | ARG6, ARG7, ARG8) \ |
4269 | def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \ |
4270 | ARG7, ARG8); |
4271 | #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4272 | ARG6, ARG7, ARG8, ARG9) \ |
4273 | def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \ |
4274 | ARG7, ARG8, ARG9); |
4275 | #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4276 | ARG6, ARG7, ARG8, ARG9, ARG10) \ |
4277 | def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \ |
4278 | ARG7, ARG8, ARG9, ARG10); |
4279 | #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4280 | ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \ |
4281 | def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \ |
4282 | ARG7, ARG8, ARG9, ARG10, ARG11); |
4283 | #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \ |
4284 | def_fn_type (ENUM, RETURN, 1, 0); |
4285 | #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \ |
4286 | def_fn_type (ENUM, RETURN, 1, 1, ARG1); |
4287 | #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \ |
4288 | def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2); |
4289 | #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \ |
4290 | def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3); |
4291 | #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \ |
4292 | def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4); |
4293 | #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \ |
4294 | def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5); |
4295 | #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4296 | ARG6) \ |
4297 | def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6); |
4298 | #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
4299 | ARG6, ARG7) \ |
4300 | def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7); |
4301 | #define DEF_POINTER_TYPE(ENUM, TYPE) \ |
4302 | builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]); |
4303 | |
4304 | #include "builtin-types.def" |
4305 | |
4306 | #undef DEF_PRIMITIVE_TYPE |
4307 | #undef DEF_FUNCTION_TYPE_0 |
4308 | #undef DEF_FUNCTION_TYPE_1 |
4309 | #undef DEF_FUNCTION_TYPE_2 |
4310 | #undef DEF_FUNCTION_TYPE_3 |
4311 | #undef DEF_FUNCTION_TYPE_4 |
4312 | #undef DEF_FUNCTION_TYPE_5 |
4313 | #undef DEF_FUNCTION_TYPE_6 |
4314 | #undef DEF_FUNCTION_TYPE_7 |
4315 | #undef DEF_FUNCTION_TYPE_8 |
4316 | #undef DEF_FUNCTION_TYPE_9 |
4317 | #undef DEF_FUNCTION_TYPE_10 |
4318 | #undef DEF_FUNCTION_TYPE_11 |
4319 | #undef DEF_FUNCTION_TYPE_VAR_0 |
4320 | #undef DEF_FUNCTION_TYPE_VAR_1 |
4321 | #undef DEF_FUNCTION_TYPE_VAR_2 |
4322 | #undef DEF_FUNCTION_TYPE_VAR_3 |
4323 | #undef DEF_FUNCTION_TYPE_VAR_4 |
4324 | #undef DEF_FUNCTION_TYPE_VAR_5 |
4325 | #undef DEF_FUNCTION_TYPE_VAR_6 |
4326 | #undef DEF_FUNCTION_TYPE_VAR_7 |
4327 | #undef DEF_POINTER_TYPE |
4328 | builtin_types[(int) BT_LAST] = NULL_TREE; |
4329 | |
4330 | c_init_attributes (); |
4331 | |
4332 | #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \ |
4333 | NONANSI_P, ATTRS, IMPLICIT, COND) \ |
4334 | if (NAME && COND) \ |
4335 | def_builtin_1 (ENUM, NAME, CLASS, \ |
4336 | builtin_types[(int) TYPE], \ |
4337 | builtin_types[(int) LIBTYPE], \ |
4338 | BOTH_P, FALLBACK_P, NONANSI_P, \ |
4339 | built_in_attributes[(int) ATTRS], IMPLICIT); |
4340 | #include "builtins.def" |
4341 | |
4342 | targetm.init_builtins (); |
4343 | |
4344 | build_common_builtin_nodes (); |
4345 | } |
4346 | |
4347 | /* Like get_identifier, but avoid warnings about null arguments when |
4348 | the argument may be NULL for targets where GCC lacks stdint.h type |
4349 | information. */ |
4350 | |
4351 | static inline tree |
4352 | c_get_ident (const char *id) |
4353 | { |
4354 | return get_identifier (id); |
4355 | } |
4356 | |
4357 | /* Build tree nodes and builtin functions common to both C and C++ language |
4358 | frontends. */ |
4359 | |
4360 | void |
4361 | c_common_nodes_and_builtins (void) |
4362 | { |
4363 | int char8_type_size; |
4364 | int char16_type_size; |
4365 | int char32_type_size; |
4366 | int wchar_type_size; |
4367 | tree array_domain_type; |
4368 | tree va_list_ref_type_node; |
4369 | tree va_list_arg_type_node; |
4370 | int i; |
4371 | |
4372 | build_common_tree_nodes (flag_signed_char); |
4373 | |
4374 | /* Define `int' and `char' first so that dbx will output them first. */ |
4375 | record_builtin_type (RID_INT, NULL, integer_type_node); |
4376 | record_builtin_type (RID_CHAR, "char", char_type_node); |
4377 | |
4378 | /* `signed' is the same as `int'. FIXME: the declarations of "signed", |
4379 | "unsigned long", "long long unsigned" and "unsigned short" were in C++ |
4380 | but not C. Are the conditionals here needed? */ |
4381 | if (c_dialect_cxx ()) |
4382 | record_builtin_type (RID_SIGNED, NULL, integer_type_node); |
4383 | record_builtin_type (RID_LONG, "long int", long_integer_type_node); |
4384 | record_builtin_type (RID_UNSIGNED, "unsigned int", unsigned_type_node); |
4385 | record_builtin_type (RID_MAX, "long unsigned int", |
4386 | long_unsigned_type_node); |
4387 | |
4388 | for (i = 0; i < NUM_INT_N_ENTS; i ++) |
4389 | { |
4390 | char name[25]; |
4391 | |
4392 | sprintf (s: name, format: "__int%d", int_n_data[i].bitsize); |
4393 | record_builtin_type ((enum rid)(RID_FIRST_INT_N + i), name, |
4394 | int_n_trees[i].signed_type); |
4395 | sprintf (s: name, format: "__int%d__", int_n_data[i].bitsize); |
4396 | record_builtin_type ((enum rid)(RID_FIRST_INT_N + i), name, |
4397 | int_n_trees[i].signed_type); |
4398 | ridpointers[RID_FIRST_INT_N + i] |
4399 | = DECL_NAME (TYPE_NAME (int_n_trees[i].signed_type)); |
4400 | |
4401 | sprintf (s: name, format: "__int%d unsigned", int_n_data[i].bitsize); |
4402 | record_builtin_type (RID_MAX, name, int_n_trees[i].unsigned_type); |
4403 | sprintf (s: name, format: "__int%d__ unsigned", int_n_data[i].bitsize); |
4404 | record_builtin_type (RID_MAX, name, int_n_trees[i].unsigned_type); |
4405 | } |
4406 | |
4407 | if (c_dialect_cxx ()) |
4408 | record_builtin_type (RID_MAX, "unsigned long", long_unsigned_type_node); |
4409 | record_builtin_type (RID_MAX, "long long int", |
4410 | long_long_integer_type_node); |
4411 | record_builtin_type (RID_MAX, "long long unsigned int", |
4412 | long_long_unsigned_type_node); |
4413 | if (c_dialect_cxx ()) |
4414 | record_builtin_type (RID_MAX, "long long unsigned", |
4415 | long_long_unsigned_type_node); |
4416 | record_builtin_type (RID_SHORT, "short int", short_integer_type_node); |
4417 | record_builtin_type (RID_MAX, "short unsigned int", |
4418 | short_unsigned_type_node); |
4419 | if (c_dialect_cxx ()) |
4420 | record_builtin_type (RID_MAX, "unsigned short", |
4421 | short_unsigned_type_node); |
4422 | |
4423 | /* Define both `signed char' and `unsigned char'. */ |
4424 | record_builtin_type (RID_MAX, "signed char", signed_char_type_node); |
4425 | record_builtin_type (RID_MAX, "unsigned char", unsigned_char_type_node); |
4426 | |
4427 | /* These are types that c_common_type_for_size and |
4428 | c_common_type_for_mode use. */ |
4429 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4430 | TYPE_DECL, NULL_TREE, |
4431 | intQI_type_node)); |
4432 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4433 | TYPE_DECL, NULL_TREE, |
4434 | intHI_type_node)); |
4435 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4436 | TYPE_DECL, NULL_TREE, |
4437 | intSI_type_node)); |
4438 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4439 | TYPE_DECL, NULL_TREE, |
4440 | intDI_type_node)); |
4441 | #if HOST_BITS_PER_WIDE_INT >= 64 |
4442 | /* Note that this is different than the __int128 type that's part of |
4443 | the generic __intN support. */ |
4444 | if (targetm.scalar_mode_supported_p (TImode)) |
4445 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4446 | TYPE_DECL, |
4447 | get_identifier ("__int128_t"), |
4448 | intTI_type_node)); |
4449 | #endif |
4450 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4451 | TYPE_DECL, NULL_TREE, |
4452 | unsigned_intQI_type_node)); |
4453 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4454 | TYPE_DECL, NULL_TREE, |
4455 | unsigned_intHI_type_node)); |
4456 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4457 | TYPE_DECL, NULL_TREE, |
4458 | unsigned_intSI_type_node)); |
4459 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4460 | TYPE_DECL, NULL_TREE, |
4461 | unsigned_intDI_type_node)); |
4462 | #if HOST_BITS_PER_WIDE_INT >= 64 |
4463 | if (targetm.scalar_mode_supported_p (TImode)) |
4464 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4465 | TYPE_DECL, |
4466 | get_identifier ("__uint128_t"), |
4467 | unsigned_intTI_type_node)); |
4468 | #endif |
4469 | |
4470 | /* Create the widest literal types. */ |
4471 | if (targetm.scalar_mode_supported_p (TImode)) |
4472 | { |
4473 | widest_integer_literal_type_node = intTI_type_node; |
4474 | widest_unsigned_literal_type_node = unsigned_intTI_type_node; |
4475 | } |
4476 | else |
4477 | { |
4478 | widest_integer_literal_type_node = intDI_type_node; |
4479 | widest_unsigned_literal_type_node = unsigned_intDI_type_node; |
4480 | } |
4481 | |
4482 | signed_size_type_node = c_common_signed_type (size_type_node); |
4483 | |
4484 | pid_type_node = |
4485 | TREE_TYPE (identifier_global_value (get_identifier (PID_TYPE))); |
4486 | |
4487 | record_builtin_type (RID_FLOAT, NULL, float_type_node); |
4488 | record_builtin_type (RID_DOUBLE, NULL, double_type_node); |
4489 | record_builtin_type (RID_MAX, "long double", long_double_type_node); |
4490 | |
4491 | for (i = 0; i < NUM_FLOATN_NX_TYPES; i++) |
4492 | { |
4493 | if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE) |
4494 | record_builtin_type ((enum rid) (RID_FLOATN_NX_FIRST + i), NULL, |
4495 | FLOATN_NX_TYPE_NODE (i)); |
4496 | } |
4497 | |
4498 | /* For C, let float128t_type_node (__float128 in some backends) be the |
4499 | same type as float128_type_node (_Float128), for C++ let those |
4500 | be distinct types that mangle and behave differently. */ |
4501 | if (c_dialect_cxx ()) |
4502 | float128t_type_node = NULL_TREE; |
4503 | |
4504 | /* Only supported decimal floating point extension if the target |
4505 | actually supports underlying modes. */ |
4506 | if (targetm.scalar_mode_supported_p (SDmode) |
4507 | && targetm.scalar_mode_supported_p (DDmode) |
4508 | && targetm.scalar_mode_supported_p (TDmode)) |
4509 | { |
4510 | record_builtin_type (RID_DFLOAT32, NULL, dfloat32_type_node); |
4511 | record_builtin_type (RID_DFLOAT64, NULL, dfloat64_type_node); |
4512 | record_builtin_type (RID_DFLOAT128, NULL, dfloat128_type_node); |
4513 | record_builtin_type (RID_DFLOAT64X, NULL, dfloat64x_type_node); |
4514 | } |
4515 | |
4516 | if (targetm.fixed_point_supported_p ()) |
4517 | { |
4518 | record_builtin_type (RID_MAX, "short _Fract", short_fract_type_node); |
4519 | record_builtin_type (RID_FRACT, NULL, fract_type_node); |
4520 | record_builtin_type (RID_MAX, "long _Fract", long_fract_type_node); |
4521 | record_builtin_type (RID_MAX, "long long _Fract", |
4522 | long_long_fract_type_node); |
4523 | record_builtin_type (RID_MAX, "unsigned short _Fract", |
4524 | unsigned_short_fract_type_node); |
4525 | record_builtin_type (RID_MAX, "unsigned _Fract", |
4526 | unsigned_fract_type_node); |
4527 | record_builtin_type (RID_MAX, "unsigned long _Fract", |
4528 | unsigned_long_fract_type_node); |
4529 | record_builtin_type (RID_MAX, "unsigned long long _Fract", |
4530 | unsigned_long_long_fract_type_node); |
4531 | record_builtin_type (RID_MAX, "_Sat short _Fract", |
4532 | sat_short_fract_type_node); |
4533 | record_builtin_type (RID_MAX, "_Sat _Fract", sat_fract_type_node); |
4534 | record_builtin_type (RID_MAX, "_Sat long _Fract", |
4535 | sat_long_fract_type_node); |
4536 | record_builtin_type (RID_MAX, "_Sat long long _Fract", |
4537 | sat_long_long_fract_type_node); |
4538 | record_builtin_type (RID_MAX, "_Sat unsigned short _Fract", |
4539 | sat_unsigned_short_fract_type_node); |
4540 | record_builtin_type (RID_MAX, "_Sat unsigned _Fract", |
4541 | sat_unsigned_fract_type_node); |
4542 | record_builtin_type (RID_MAX, "_Sat unsigned long _Fract", |
4543 | sat_unsigned_long_fract_type_node); |
4544 | record_builtin_type (RID_MAX, "_Sat unsigned long long _Fract", |
4545 | sat_unsigned_long_long_fract_type_node); |
4546 | record_builtin_type (RID_MAX, "short _Accum", short_accum_type_node); |
4547 | record_builtin_type (RID_ACCUM, NULL, accum_type_node); |
4548 | record_builtin_type (RID_MAX, "long _Accum", long_accum_type_node); |
4549 | record_builtin_type (RID_MAX, "long long _Accum", |
4550 | long_long_accum_type_node); |
4551 | record_builtin_type (RID_MAX, "unsigned short _Accum", |
4552 | unsigned_short_accum_type_node); |
4553 | record_builtin_type (RID_MAX, "unsigned _Accum", |
4554 | unsigned_accum_type_node); |
4555 | record_builtin_type (RID_MAX, "unsigned long _Accum", |
4556 | unsigned_long_accum_type_node); |
4557 | record_builtin_type (RID_MAX, "unsigned long long _Accum", |
4558 | unsigned_long_long_accum_type_node); |
4559 | record_builtin_type (RID_MAX, "_Sat short _Accum", |
4560 | sat_short_accum_type_node); |
4561 | record_builtin_type (RID_MAX, "_Sat _Accum", sat_accum_type_node); |
4562 | record_builtin_type (RID_MAX, "_Sat long _Accum", |
4563 | sat_long_accum_type_node); |
4564 | record_builtin_type (RID_MAX, "_Sat long long _Accum", |
4565 | sat_long_long_accum_type_node); |
4566 | record_builtin_type (RID_MAX, "_Sat unsigned short _Accum", |
4567 | sat_unsigned_short_accum_type_node); |
4568 | record_builtin_type (RID_MAX, "_Sat unsigned _Accum", |
4569 | sat_unsigned_accum_type_node); |
4570 | record_builtin_type (RID_MAX, "_Sat unsigned long _Accum", |
4571 | sat_unsigned_long_accum_type_node); |
4572 | record_builtin_type (RID_MAX, "_Sat unsigned long long _Accum", |
4573 | sat_unsigned_long_long_accum_type_node); |
4574 | |
4575 | } |
4576 | |
4577 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4578 | TYPE_DECL, |
4579 | get_identifier ("complex int"), |
4580 | complex_integer_type_node)); |
4581 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4582 | TYPE_DECL, |
4583 | get_identifier ("complex float"), |
4584 | complex_float_type_node)); |
4585 | lang_hooks.decls.pushdecl (build_decl (UNKNOWN_LOCATION, |
4586 | TYPE_DECL, |
4587 | get_identifier ("complex double"), |
4588 | complex_double_type_node)); |
4589 | lang_hooks.decls.pushdecl |
4590 | (build_decl (UNKNOWN_LOCATION, |
4591 | TYPE_DECL, get_identifier ("complex long double"), |
4592 | complex_long_double_type_node)); |
4593 | |
4594 | if (!c_dialect_cxx ()) |
4595 | for (i = 0; i < NUM_FLOATN_NX_TYPES; i++) |
4596 | if (COMPLEX_FLOATN_NX_TYPE_NODE (i) != NULL_TREE) |
4597 | { |
4598 | char buf[30]; |
4599 | sprintf (s: buf, format: "complex _Float%d%s", floatn_nx_types[i].n, |
4600 | floatn_nx_types[i].extended ? "x": ""); |
4601 | lang_hooks.decls.pushdecl |
4602 | (build_decl (UNKNOWN_LOCATION, |
4603 | TYPE_DECL, |
4604 | get_identifier (buf), |
4605 | COMPLEX_FLOATN_NX_TYPE_NODE (i))); |
4606 | } |
4607 | |
4608 | /* Make fileptr_type_node a distinct void * type until |
4609 | FILE type is defined. Likewise for const struct tm*. */ |
4610 | for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i) |
4611 | builtin_structptr_types[i].node |
4612 | = build_variant_type_copy (builtin_structptr_types[i].base); |
4613 | |
4614 | record_builtin_type (RID_VOID, NULL, void_type_node); |
4615 | |
4616 | /* Set the TYPE_NAME for any variants that were built before |
4617 | record_builtin_type gave names to the built-in types. */ |
4618 | { |
4619 | tree void_name = TYPE_NAME (void_type_node); |
4620 | TYPE_NAME (void_type_node) = NULL_TREE; |
4621 | TYPE_NAME (build_qualified_type (void_type_node, TYPE_QUAL_CONST)) |
4622 | = void_name; |
4623 | TYPE_NAME (void_type_node) = void_name; |
4624 | } |
4625 | |
4626 | /* Make a type to be the domain of a few array types |
4627 | whose domains don't really matter. |
4628 | 200 is small enough that it always fits in size_t |
4629 | and large enough that it can hold most function names for the |
4630 | initializations of __FUNCTION__ and __PRETTY_FUNCTION__. */ |
4631 | array_domain_type = build_index_type (size_int (200)); |
4632 | |
4633 | /* Make a type for arrays of characters. |
4634 | With luck nothing will ever really depend on the length of this |
4635 | array type. */ |
4636 | char_array_type_node |
4637 | = build_array_type (char_type_node, array_domain_type); |
4638 | |
4639 | string_type_node = build_pointer_type (char_type_node); |
4640 | const_string_type_node |
4641 | = build_pointer_type (build_qualified_type |
4642 | (char_type_node, TYPE_QUAL_CONST)); |
4643 | |
4644 | /* This is special for C++ so functions can be overloaded. */ |
4645 | wchar_type_node = get_identifier (MODIFIED_WCHAR_TYPE); |
4646 | wchar_type_node = TREE_TYPE (identifier_global_value (wchar_type_node)); |
4647 | wchar_type_size = TYPE_PRECISION (wchar_type_node); |
4648 | underlying_wchar_type_node = wchar_type_node; |
4649 | if (c_dialect_cxx ()) |
4650 | { |
4651 | if (TYPE_UNSIGNED (wchar_type_node)) |
4652 | wchar_type_node = make_unsigned_type (wchar_type_size); |
4653 | else |
4654 | wchar_type_node = make_signed_type (wchar_type_size); |
4655 | record_builtin_type (RID_WCHAR, "wchar_t", wchar_type_node); |
4656 | } |
4657 | |
4658 | /* This is for wide string constants. */ |
4659 | wchar_array_type_node |
4660 | = build_array_type (wchar_type_node, array_domain_type); |
4661 | |
4662 | /* Define 'char8_t'. */ |
4663 | char8_type_node = get_identifier (CHAR8_TYPE); |
4664 | char8_type_node = TREE_TYPE (identifier_global_value (char8_type_node)); |
4665 | char8_type_size = TYPE_PRECISION (char8_type_node); |
4666 | if (c_dialect_cxx ()) |
4667 | { |
4668 | char8_type_node = make_unsigned_type (char8_type_size); |
4669 | TYPE_STRING_FLAG (char8_type_node) = true; |
4670 | |
4671 | if (flag_char8_t) |
4672 | record_builtin_type (RID_CHAR8, "char8_t", char8_type_node); |
4673 | } |
4674 | |
4675 | /* This is for UTF-8 string constants. */ |
4676 | char8_array_type_node |
4677 | = build_array_type (char8_type_node, array_domain_type); |
4678 | |
4679 | /* Define 'char16_t'. */ |
4680 | char16_type_node = get_identifier (CHAR16_TYPE); |
4681 | char16_type_node = TREE_TYPE (identifier_global_value (char16_type_node)); |
4682 | char16_type_size = TYPE_PRECISION (char16_type_node); |
4683 | if (c_dialect_cxx ()) |
4684 | { |
4685 | char16_type_node = make_unsigned_type (char16_type_size); |
4686 | |
4687 | if (cxx_dialect >= cxx11) |
4688 | record_builtin_type (RID_CHAR16, "char16_t", char16_type_node); |
4689 | } |
4690 | |
4691 | /* This is for UTF-16 string constants. */ |
4692 | char16_array_type_node |
4693 | = build_array_type (char16_type_node, array_domain_type); |
4694 | |
4695 | /* Define 'char32_t'. */ |
4696 | char32_type_node = get_identifier (CHAR32_TYPE); |
4697 | char32_type_node = TREE_TYPE (identifier_global_value (char32_type_node)); |
4698 | char32_type_size = TYPE_PRECISION (char32_type_node); |
4699 | if (c_dialect_cxx ()) |
4700 | { |
4701 | char32_type_node = make_unsigned_type (char32_type_size); |
4702 | |
4703 | if (cxx_dialect >= cxx11) |
4704 | record_builtin_type (RID_CHAR32, "char32_t", char32_type_node); |
4705 | } |
4706 | |
4707 | /* This is for UTF-32 string constants. */ |
4708 | char32_array_type_node |
4709 | = build_array_type (char32_type_node, array_domain_type); |
4710 | |
4711 | if (strcmp (WINT_TYPE, s2: "wchar_t") == 0) |
4712 | wint_type_node = wchar_type_node; |
4713 | else |
4714 | wint_type_node = |
4715 | TREE_TYPE (identifier_global_value (get_identifier (WINT_TYPE))); |
4716 | |
4717 | intmax_type_node = |
4718 | TREE_TYPE (identifier_global_value (get_identifier (INTMAX_TYPE))); |
4719 | uintmax_type_node = |
4720 | TREE_TYPE (identifier_global_value (get_identifier (UINTMAX_TYPE))); |
4721 | |
4722 | if (SIG_ATOMIC_TYPE) |
4723 | sig_atomic_type_node = |
4724 | TREE_TYPE (identifier_global_value (c_get_ident (SIG_ATOMIC_TYPE))); |
4725 | if (INT8_TYPE) |
4726 | int8_type_node = |
4727 | TREE_TYPE (identifier_global_value (c_get_ident (INT8_TYPE))); |
4728 | if (INT16_TYPE) |
4729 | int16_type_node = |
4730 | TREE_TYPE (identifier_global_value (c_get_ident (INT16_TYPE))); |
4731 | if (INT32_TYPE) |
4732 | int32_type_node = |
4733 | TREE_TYPE (identifier_global_value (c_get_ident (INT32_TYPE))); |
4734 | if (INT64_TYPE) |
4735 | int64_type_node = |
4736 | TREE_TYPE (identifier_global_value (c_get_ident (INT64_TYPE))); |
4737 | if (UINT8_TYPE) |
4738 | uint8_type_node = |
4739 | TREE_TYPE (identifier_global_value (c_get_ident (UINT8_TYPE))); |
4740 | if (UINT16_TYPE) |
4741 | c_uint16_type_node = uint16_type_node = |
4742 | TREE_TYPE (identifier_global_value (c_get_ident (UINT16_TYPE))); |
4743 | if (UINT32_TYPE) |
4744 | c_uint32_type_node = uint32_type_node = |
4745 | TREE_TYPE (identifier_global_value (c_get_ident (UINT32_TYPE))); |
4746 | if (UINT64_TYPE) |
4747 | c_uint64_type_node = uint64_type_node = |
4748 | TREE_TYPE (identifier_global_value (c_get_ident (UINT64_TYPE))); |
4749 | if (INT_LEAST8_TYPE) |
4750 | int_least8_type_node = |
4751 | TREE_TYPE (identifier_global_value (c_get_ident (INT_LEAST8_TYPE))); |
4752 | if (INT_LEAST16_TYPE) |
4753 | int_least16_type_node = |
4754 | TREE_TYPE (identifier_global_value (c_get_ident (INT_LEAST16_TYPE))); |
4755 | if (INT_LEAST32_TYPE) |
4756 | int_least32_type_node = |
4757 | TREE_TYPE (identifier_global_value (c_get_ident (INT_LEAST32_TYPE))); |
4758 | if (INT_LEAST64_TYPE) |
4759 | int_least64_type_node = |
4760 | TREE_TYPE (identifier_global_value (c_get_ident (INT_LEAST64_TYPE))); |
4761 | if (UINT_LEAST8_TYPE) |
4762 | uint_least8_type_node = |
4763 | TREE_TYPE (identifier_global_value (c_get_ident (UINT_LEAST8_TYPE))); |
4764 | if (UINT_LEAST16_TYPE) |
4765 | uint_least16_type_node = |
4766 | TREE_TYPE (identifier_global_value (c_get_ident (UINT_LEAST16_TYPE))); |
4767 | if (UINT_LEAST32_TYPE) |
4768 | uint_least32_type_node = |
4769 | TREE_TYPE (identifier_global_value (c_get_ident (UINT_LEAST32_TYPE))); |
4770 | if (UINT_LEAST64_TYPE) |
4771 | uint_least64_type_node = |
4772 | TREE_TYPE (identifier_global_value (c_get_ident (UINT_LEAST64_TYPE))); |
4773 | if (INT_FAST8_TYPE) |
4774 | int_fast8_type_node = |
4775 | TREE_TYPE (identifier_global_value (c_get_ident (INT_FAST8_TYPE))); |
4776 | if (INT_FAST16_TYPE) |
4777 | int_fast16_type_node = |
4778 | TREE_TYPE (identifier_global_value (c_get_ident (INT_FAST16_TYPE))); |
4779 | if (INT_FAST32_TYPE) |
4780 | int_fast32_type_node = |
4781 | TREE_TYPE (identifier_global_value (c_get_ident (INT_FAST32_TYPE))); |
4782 | if (INT_FAST64_TYPE) |
4783 | int_fast64_type_node = |
4784 | TREE_TYPE (identifier_global_value (c_get_ident (INT_FAST64_TYPE))); |
4785 | if (UINT_FAST8_TYPE) |
4786 | uint_fast8_type_node = |
4787 | TREE_TYPE (identifier_global_value (c_get_ident (UINT_FAST8_TYPE))); |
4788 | if (UINT_FAST16_TYPE) |
4789 | uint_fast16_type_node = |
4790 | TREE_TYPE (identifier_global_value (c_get_ident (UINT_FAST16_TYPE))); |
4791 | if (UINT_FAST32_TYPE) |
4792 | uint_fast32_type_node = |
4793 | TREE_TYPE (identifier_global_value (c_get_ident (UINT_FAST32_TYPE))); |
4794 | if (UINT_FAST64_TYPE) |
4795 | uint_fast64_type_node = |
4796 | TREE_TYPE (identifier_global_value (c_get_ident (UINT_FAST64_TYPE))); |
4797 | if (INTPTR_TYPE) |
4798 | intptr_type_node = |
4799 | TREE_TYPE (identifier_global_value (c_get_ident (INTPTR_TYPE))); |
4800 | if (UINTPTR_TYPE) |
4801 | uintptr_type_node = |
4802 | TREE_TYPE (identifier_global_value (c_get_ident (UINTPTR_TYPE))); |
4803 | |
4804 | default_function_type = build_function_type (integer_type_node, NULL_TREE); |
4805 | unsigned_ptrdiff_type_node = c_common_unsigned_type (ptrdiff_type_node); |
4806 | |
4807 | lang_hooks.decls.pushdecl |
4808 | (build_decl (UNKNOWN_LOCATION, |
4809 | TYPE_DECL, get_identifier ("__builtin_va_list"), |
4810 | va_list_type_node)); |
4811 | if (targetm.enum_va_list_p) |
4812 | { |
4813 | int l; |
4814 | const char *pname; |
4815 | tree ptype; |
4816 | |
4817 | for (l = 0; targetm.enum_va_list_p (l, &pname, &ptype); ++l) |
4818 | { |
4819 | lang_hooks.decls.pushdecl |
4820 | (build_decl (UNKNOWN_LOCATION, |
4821 | TYPE_DECL, get_identifier (pname), |
4822 | ptype)); |
4823 | |
4824 | } |
4825 | } |
4826 | |
4827 | if (TREE_CODE (va_list_type_node) == ARRAY_TYPE) |
4828 | { |
4829 | va_list_arg_type_node = va_list_ref_type_node = |
4830 | build_pointer_type (TREE_TYPE (va_list_type_node)); |
4831 | } |
4832 | else |
4833 | { |
4834 | va_list_arg_type_node = va_list_type_node; |
4835 | va_list_ref_type_node = build_reference_type (va_list_type_node); |
4836 | } |
4837 | |
4838 | c_define_builtins (va_list_ref_type_node, va_list_arg_type_node); |
4839 | |
4840 | main_identifier_node = get_identifier ("main"); |
4841 | |
4842 | /* Create the built-in __null node. It is important that this is |
4843 | not shared. */ |
4844 | null_node = make_int_cst (1, 1); |
4845 | TREE_TYPE (null_node) = c_common_type_for_size (POINTER_SIZE, unsignedp: 0); |
4846 | |
4847 | /* Create the built-in nullptr node. This part of its initialization is |
4848 | common to C and C++. The front ends can further adjust its definition |
4849 | in {c,cxx}_init_decl_processing. In particular, we aren't setting the |
4850 | alignment here for C++ backward ABI bug compatibility. */ |
4851 | nullptr_type_node = make_node (NULLPTR_TYPE); |
4852 | TYPE_SIZE (nullptr_type_node) = bitsize_int (GET_MODE_BITSIZE (ptr_mode)); |
4853 | TYPE_SIZE_UNIT (nullptr_type_node) = size_int (GET_MODE_SIZE (ptr_mode)); |
4854 | TYPE_UNSIGNED (nullptr_type_node) = 1; |
4855 | TYPE_PRECISION (nullptr_type_node) = GET_MODE_BITSIZE (mode: ptr_mode); |
4856 | SET_TYPE_MODE (nullptr_type_node, ptr_mode); |
4857 | nullptr_node = build_int_cst (nullptr_type_node, 0); |
4858 | |
4859 | /* Since builtin_types isn't gc'ed, don't export these nodes. */ |
4860 | memset (s: builtin_types, c: 0, n: sizeof (builtin_types)); |
4861 | } |
4862 | |
4863 | /* The number of named compound-literals generated thus far. */ |
4864 | static GTY(()) int compound_literal_number; |
4865 | |
4866 | /* Set DECL_NAME for DECL, a VAR_DECL for a compound-literal. */ |
4867 | |
4868 | void |
4869 | set_compound_literal_name (tree decl) |
4870 | { |
4871 | char *name; |
4872 | ASM_FORMAT_PRIVATE_NAME (name, "__compound_literal", |
4873 | compound_literal_number); |
4874 | compound_literal_number++; |
4875 | DECL_NAME (decl) = get_identifier (name); |
4876 | } |
4877 | |
4878 | /* build_va_arg helper function. Return a VA_ARG_EXPR with location LOC, type |
4879 | TYPE and operand OP. */ |
4880 | |
4881 | static tree |
4882 | build_va_arg_1 (location_t loc, tree type, tree op) |
4883 | { |
4884 | tree expr = build1 (VA_ARG_EXPR, type, op); |
4885 | SET_EXPR_LOCATION (expr, loc); |
4886 | return expr; |
4887 | } |
4888 | |
4889 | /* Return a VA_ARG_EXPR corresponding to a source-level expression |
4890 | va_arg (EXPR, TYPE) at source location LOC. */ |
4891 | |
4892 | tree |
4893 | build_va_arg (location_t loc, tree expr, tree type) |
4894 | { |
4895 | tree va_type = TREE_TYPE (expr); |
4896 | tree canon_va_type = (va_type == error_mark_node |
4897 | ? error_mark_node |
4898 | : targetm.canonical_va_list_type (va_type)); |
4899 | |
4900 | if (va_type == error_mark_node |
4901 | || canon_va_type == NULL_TREE) |
4902 | { |
4903 | if (canon_va_type == NULL_TREE) |
4904 | error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>"); |
4905 | |
4906 | /* Let's handle things neutrally, if expr: |
4907 | - has undeclared type, or |
4908 | - is not an va_list type. */ |
4909 | return build_va_arg_1 (loc, type, error_mark_node); |
4910 | } |
4911 | |
4912 | if (TREE_CODE (canon_va_type) != ARRAY_TYPE) |
4913 | { |
4914 | /* Case 1: Not an array type. */ |
4915 | |
4916 | /* Take the address, to get '&ap'. Note that &ap is not a va_list |
4917 | type. */ |
4918 | c_common_mark_addressable_vec (expr); |
4919 | expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (expr)), expr); |
4920 | |
4921 | return build_va_arg_1 (loc, type, op: expr); |
4922 | } |
4923 | |
4924 | /* Case 2: Array type. |
4925 | |
4926 | Background: |
4927 | |
4928 | For contrast, let's start with the simple case (case 1). If |
4929 | canon_va_type is not an array type, but say a char *, then when |
4930 | passing-by-value a va_list, the type of the va_list param decl is |
4931 | the same as for another va_list decl (all ap's are char *): |
4932 | |
4933 | f2_1 (char * ap) |
4934 | D.1815 = VA_ARG (&ap, 0B, 1); |
4935 | return D.1815; |
4936 | |
4937 | f2 (int i) |
4938 | char * ap.0; |
4939 | char * ap; |
4940 | __builtin_va_start (&ap, 0); |
4941 | ap.0 = ap; |
4942 | res = f2_1 (ap.0); |
4943 | __builtin_va_end (&ap); |
4944 | D.1812 = res; |
4945 | return D.1812; |
4946 | |
4947 | However, if canon_va_type is ARRAY_TYPE, then when passing-by-value a |
4948 | va_list the type of the va_list param decl (case 2b, struct * ap) is not |
4949 | the same as for another va_list decl (case 2a, struct ap[1]). |
4950 | |
4951 | f2_1 (struct * ap) |
4952 | D.1844 = VA_ARG (ap, 0B, 0); |
4953 | return D.1844; |
4954 | |
4955 | f2 (int i) |
4956 | struct ap[1]; |
4957 | __builtin_va_start (&ap, 0); |
4958 | res = f2_1 (&ap); |
4959 | __builtin_va_end (&ap); |
4960 | D.1841 = res; |
4961 | return D.1841; |
4962 | |
4963 | Case 2b is different because: |
4964 | - on the callee side, the parm decl has declared type va_list, but |
4965 | grokdeclarator changes the type of the parm decl to a pointer to the |
4966 | array elem type. |
4967 | - on the caller side, the pass-by-value uses &ap. |
4968 | |
4969 | We unify these two cases (case 2a: va_list is array type, |
4970 | case 2b: va_list is pointer to array elem type), by adding '&' for the |
4971 | array type case, such that we have a pointer to array elem in both |
4972 | cases. */ |
4973 | |
4974 | if (TREE_CODE (va_type) == ARRAY_TYPE) |
4975 | { |
4976 | /* Case 2a: va_list is array type. */ |
4977 | |
4978 | /* Take the address, to get '&ap'. Make sure it's a pointer to array |
4979 | elem type. */ |
4980 | c_common_mark_addressable_vec (expr); |
4981 | expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (canon_va_type)), |
4982 | expr); |
4983 | |
4984 | /* Verify that &ap is still recognized as having va_list type. */ |
4985 | tree canon_expr_type |
4986 | = targetm.canonical_va_list_type (TREE_TYPE (expr)); |
4987 | gcc_assert (canon_expr_type != NULL_TREE); |
4988 | } |
4989 | else |
4990 | { |
4991 | /* Case 2b: va_list is pointer to array elem type. */ |
4992 | gcc_assert (POINTER_TYPE_P (va_type)); |
4993 | |
4994 | /* Comparison as in std_canonical_va_list_type. */ |
4995 | gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (va_type)) |
4996 | == TYPE_MAIN_VARIANT (TREE_TYPE (canon_va_type))); |
4997 | |
4998 | /* Don't take the address. We've already got '&ap'. */ |
4999 | ; |
5000 | } |
5001 | |
5002 | return build_va_arg_1 (loc, type, op: expr); |
5003 | } |
5004 | |
5005 | |
5006 | /* Linked list of disabled built-in functions. */ |
5007 | |
5008 | struct disabled_builtin |
5009 | { |
5010 | const char *name; |
5011 | struct disabled_builtin *next; |
5012 | }; |
5013 | static disabled_builtin *disabled_builtins = NULL; |
5014 | |
5015 | static bool builtin_function_disabled_p (const char *); |
5016 | |
5017 | /* Disable a built-in function specified by -fno-builtin-NAME. If NAME |
5018 | begins with "__builtin_", give an error. */ |
5019 | |
5020 | void |
5021 | disable_builtin_function (const char *name) |
5022 | { |
5023 | if (startswith (str: name, prefix: "__builtin_")) |
5024 | error ("cannot disable built-in function %qs", name); |
5025 | else |
5026 | { |
5027 | disabled_builtin *new_disabled_builtin = XNEW (disabled_builtin); |
5028 | new_disabled_builtin->name = name; |
5029 | new_disabled_builtin->next = disabled_builtins; |
5030 | disabled_builtins = new_disabled_builtin; |
5031 | } |
5032 | } |
5033 | |
5034 | |
5035 | /* Return true if the built-in function NAME has been disabled, false |
5036 | otherwise. */ |
5037 | |
5038 | static bool |
5039 | builtin_function_disabled_p (const char *name) |
5040 | { |
5041 | disabled_builtin *p; |
5042 | for (p = disabled_builtins; p != NULL; p = p->next) |
5043 | { |
5044 | if (strcmp (s1: name, s2: p->name) == 0) |
5045 | return true; |
5046 | } |
5047 | return false; |
5048 | } |
5049 | |
5050 | |
5051 | /* Worker for DEF_BUILTIN. |
5052 | Possibly define a builtin function with one or two names. |
5053 | Does not declare a non-__builtin_ function if flag_no_builtin, or if |
5054 | nonansi_p and flag_no_nonansi_builtin. */ |
5055 | |
5056 | static void |
5057 | def_builtin_1 (enum built_in_function fncode, |
5058 | const char *name, |
5059 | enum built_in_class fnclass, |
5060 | tree fntype, tree libtype, |
5061 | bool both_p, bool fallback_p, bool nonansi_p, |
5062 | tree fnattrs, bool implicit_p) |
5063 | { |
5064 | tree decl; |
5065 | const char *libname; |
5066 | |
5067 | if (fntype == error_mark_node) |
5068 | return; |
5069 | |
5070 | gcc_assert ((!both_p && !fallback_p) |
5071 | || startswith (name, "__builtin_")); |
5072 | |
5073 | libname = name + strlen (s: "__builtin_"); |
5074 | decl = add_builtin_function (name, type: fntype, function_code: fncode, cl: fnclass, |
5075 | library_name: (fallback_p ? libname : NULL), |
5076 | attrs: fnattrs); |
5077 | |
5078 | set_builtin_decl (fncode, decl, implicit_p); |
5079 | |
5080 | if (both_p |
5081 | && !flag_no_builtin && !builtin_function_disabled_p (name: libname) |
5082 | && !(nonansi_p && flag_no_nonansi_builtin)) |
5083 | add_builtin_function (name: libname, type: libtype, function_code: fncode, cl: fnclass, |
5084 | NULL, attrs: fnattrs); |
5085 | } |
5086 | |
5087 | /* Nonzero if the type T promotes to int. This is (nearly) the |
5088 | integral promotions defined in ISO C99 6.3.1.1/2. */ |
5089 | |
5090 | bool |
5091 | c_promoting_integer_type_p (const_tree t) |
5092 | { |
5093 | switch (TREE_CODE (t)) |
5094 | { |
5095 | case INTEGER_TYPE: |
5096 | return (TYPE_MAIN_VARIANT (t) == char_type_node |
5097 | || TYPE_MAIN_VARIANT (t) == signed_char_type_node |
5098 | || TYPE_MAIN_VARIANT (t) == unsigned_char_type_node |
5099 | || TYPE_MAIN_VARIANT (t) == short_integer_type_node |
5100 | || TYPE_MAIN_VARIANT (t) == short_unsigned_type_node |
5101 | || TYPE_PRECISION (t) < TYPE_PRECISION (integer_type_node)); |
5102 | |
5103 | case ENUMERAL_TYPE: |
5104 | /* ??? Technically all enumerations not larger than an int |
5105 | promote to an int. But this is used along code paths |
5106 | that only want to notice a size change. */ |
5107 | return TYPE_PRECISION (t) < TYPE_PRECISION (integer_type_node); |
5108 | |
5109 | case BOOLEAN_TYPE: |
5110 | return true; |
5111 | |
5112 | default: |
5113 | return false; |
5114 | } |
5115 | } |
5116 | |
5117 | /* Return 1 if PARMS specifies a fixed number of parameters |
5118 | and none of their types is affected by default promotions. */ |
5119 | |
5120 | bool |
5121 | self_promoting_args_p (const_tree parms) |
5122 | { |
5123 | const_tree t; |
5124 | for (t = parms; t; t = TREE_CHAIN (t)) |
5125 | { |
5126 | tree type = TREE_VALUE (t); |
5127 | |
5128 | if (type == error_mark_node) |
5129 | continue; |
5130 | |
5131 | if (TREE_CHAIN (t) == NULL_TREE && type != void_type_node) |
5132 | return false; |
5133 | |
5134 | if (type == NULL_TREE) |
5135 | return false; |
5136 | |
5137 | if (TYPE_MAIN_VARIANT (type) == float_type_node) |
5138 | return false; |
5139 | |
5140 | if (c_promoting_integer_type_p (t: type)) |
5141 | return false; |
5142 | } |
5143 | return true; |
5144 | } |
5145 | |
5146 | /* Recursively remove any '*' or '&' operator from TYPE. */ |
5147 | tree |
5148 | strip_pointer_operator (tree t) |
5149 | { |
5150 | while (POINTER_TYPE_P (t)) |
5151 | t = TREE_TYPE (t); |
5152 | return t; |
5153 | } |
5154 | |
5155 | /* Recursively remove pointer or array type from TYPE. */ |
5156 | tree |
5157 | strip_pointer_or_array_types (tree t) |
5158 | { |
5159 | while (TREE_CODE (t) == ARRAY_TYPE || POINTER_TYPE_P (t)) |
5160 | t = TREE_TYPE (t); |
5161 | return t; |
5162 | } |
5163 | |
5164 | /* Used to compare case labels. K1 and K2 are actually tree nodes |
5165 | representing case labels, or NULL_TREE for a `default' label. |
5166 | Returns -1 if K1 is ordered before K2, -1 if K1 is ordered after |
5167 | K2, and 0 if K1 and K2 are equal. */ |
5168 | |
5169 | int |
5170 | case_compare (splay_tree_key k1, splay_tree_key k2) |
5171 | { |
5172 | /* Consider a NULL key (such as arises with a `default' label) to be |
5173 | smaller than anything else. */ |
5174 | if (!k1) |
5175 | return k2 ? -1 : 0; |
5176 | else if (!k2) |
5177 | return k1 ? 1 : 0; |
5178 | |
5179 | return tree_int_cst_compare (t1: (tree) k1, t2: (tree) k2); |
5180 | } |
5181 | |
5182 | /* Process a case label, located at LOC, for the range LOW_VALUE |
5183 | ... HIGH_VALUE. If LOW_VALUE and HIGH_VALUE are both NULL_TREE |
5184 | then this case label is actually a `default' label. If only |
5185 | HIGH_VALUE is NULL_TREE, then case label was declared using the |
5186 | usual C/C++ syntax, rather than the GNU case range extension. |
5187 | CASES is a tree containing all the case ranges processed so far; |
5188 | COND is the condition for the switch-statement itself. |
5189 | Returns the CASE_LABEL_EXPR created, or ERROR_MARK_NODE if no |
5190 | CASE_LABEL_EXPR is created. ATTRS are the attributes to be applied |
5191 | to the label. */ |
5192 | |
5193 | tree |
5194 | c_add_case_label (location_t loc, splay_tree cases, tree cond, |
5195 | tree low_value, tree high_value, tree attrs) |
5196 | { |
5197 | tree type; |
5198 | tree label; |
5199 | tree case_label; |
5200 | splay_tree_node node; |
5201 | |
5202 | /* Create the LABEL_DECL itself. */ |
5203 | label = create_artificial_label (loc); |
5204 | decl_attributes (&label, attrs, 0); |
5205 | |
5206 | /* If there was an error processing the switch condition, bail now |
5207 | before we get more confused. */ |
5208 | if (!cond || cond == error_mark_node) |
5209 | goto error_out; |
5210 | |
5211 | if ((low_value && TREE_TYPE (low_value) |
5212 | && POINTER_TYPE_P (TREE_TYPE (low_value))) |
5213 | || (high_value && TREE_TYPE (high_value) |
5214 | && POINTER_TYPE_P (TREE_TYPE (high_value)))) |
5215 | { |
5216 | error_at (loc, "pointers are not permitted as case values"); |
5217 | goto error_out; |
5218 | } |
5219 | |
5220 | /* Case ranges are a GNU extension. */ |
5221 | if (high_value) |
5222 | { |
5223 | if (c_dialect_cxx ()) |
5224 | pedwarn (loc, OPT_Wpedantic, |
5225 | "range expressions in switch statements are non-standard"); |
5226 | else if (warn_c23_c2y_compat > 0) |
5227 | { |
5228 | if (pedantic && !flag_isoc2y) |
5229 | pedwarn (loc, OPT_Wc23_c2y_compat, |
5230 | "ISO C does not support range expressions in switch " |
5231 | "statements before C2Y"); |
5232 | else |
5233 | warning_at (loc, OPT_Wc23_c2y_compat, |
5234 | "ISO C does not support range expressions in switch " |
5235 | "statements before C2Y"); |
5236 | } |
5237 | else if (warn_c23_c2y_compat && pedantic && !flag_isoc2y) |
5238 | pedwarn (loc, OPT_Wpedantic, |
5239 | "ISO C does not support range expressions in switch " |
5240 | "statements before C2Y"); |
5241 | } |
5242 | |
5243 | type = TREE_TYPE (cond); |
5244 | if (low_value) |
5245 | { |
5246 | low_value = check_case_value (loc, value: low_value); |
5247 | tree tem = NULL_TREE; |
5248 | if (high_value |
5249 | && !c_dialect_cxx () |
5250 | && low_value != error_mark_node |
5251 | && !int_fits_type_p (low_value, type) |
5252 | && pedwarn (loc, OPT_Wpedantic, |
5253 | "conversion of %qE to %qT in range expression changes " |
5254 | "value to %qE", low_value, type, |
5255 | (tem = fold_convert (type, low_value)))) |
5256 | low_value = tem; |
5257 | low_value = convert_and_check (loc, type, expr: low_value); |
5258 | low_value = fold (low_value); |
5259 | if (low_value == error_mark_node) |
5260 | goto error_out; |
5261 | } |
5262 | if (high_value) |
5263 | { |
5264 | high_value = check_case_value (loc, value: high_value); |
5265 | tree tem = NULL_TREE; |
5266 | if (!c_dialect_cxx () |
5267 | && high_value != error_mark_node |
5268 | && !int_fits_type_p (high_value, type) |
5269 | && pedwarn (loc, OPT_Wpedantic, |
5270 | "conversion of %qE to %qT in range expression changes " |
5271 | "value to %qE", high_value, type, |
5272 | (tem = fold_convert (type, high_value)))) |
5273 | high_value = tem; |
5274 | high_value = convert_and_check (loc, type, expr: high_value); |
5275 | high_value = fold (high_value); |
5276 | if (high_value == error_mark_node) |
5277 | goto error_out; |
5278 | } |
5279 | |
5280 | if (low_value && high_value) |
5281 | { |
5282 | /* If the LOW_VALUE and HIGH_VALUE are the same, then this isn't |
5283 | really a case range, even though it was written that way. |
5284 | Remove the HIGH_VALUE to simplify later processing. */ |
5285 | if (tree_int_cst_equal (low_value, high_value)) |
5286 | high_value = NULL_TREE; |
5287 | else if (!tree_int_cst_lt (t1: low_value, t2: high_value)) |
5288 | { |
5289 | warning_at (loc, 0, "empty range specified"); |
5290 | goto error_out; |
5291 | } |
5292 | } |
5293 | |
5294 | /* Look up the LOW_VALUE in the table of case labels we already |
5295 | have. */ |
5296 | node = splay_tree_lookup (cases, (splay_tree_key) low_value); |
5297 | /* If there was not an exact match, check for overlapping ranges. |
5298 | There's no need to do this if there's no LOW_VALUE or HIGH_VALUE; |
5299 | that's a `default' label and the only overlap is an exact match. */ |
5300 | if (!node && (low_value || high_value)) |
5301 | { |
5302 | splay_tree_node low_bound; |
5303 | splay_tree_node high_bound; |
5304 | |
5305 | /* Even though there wasn't an exact match, there might be an |
5306 | overlap between this case range and another case range. |
5307 | Since we've (inductively) not allowed any overlapping case |
5308 | ranges, we simply need to find the greatest low case label |
5309 | that is smaller that LOW_VALUE, and the smallest low case |
5310 | label that is greater than LOW_VALUE. If there is an overlap |
5311 | it will occur in one of these two ranges. */ |
5312 | low_bound = splay_tree_predecessor (cases, |
5313 | (splay_tree_key) low_value); |
5314 | high_bound = splay_tree_successor (cases, |
5315 | (splay_tree_key) low_value); |
5316 | |
5317 | /* Check to see if the LOW_BOUND overlaps. It is smaller than |
5318 | the LOW_VALUE, so there is no need to check unless the |
5319 | LOW_BOUND is in fact itself a case range. */ |
5320 | if (low_bound |
5321 | && CASE_HIGH ((tree) low_bound->value) |
5322 | && tree_int_cst_compare (CASE_HIGH ((tree) low_bound->value), |
5323 | t2: low_value) >= 0) |
5324 | node = low_bound; |
5325 | /* Check to see if the HIGH_BOUND overlaps. The low end of that |
5326 | range is bigger than the low end of the current range, so we |
5327 | are only interested if the current range is a real range, and |
5328 | not an ordinary case label. */ |
5329 | else if (high_bound |
5330 | && high_value |
5331 | && (tree_int_cst_compare (t1: (tree) high_bound->key, |
5332 | t2: high_value) |
5333 | <= 0)) |
5334 | node = high_bound; |
5335 | } |
5336 | /* If there was an overlap, issue an error. */ |
5337 | if (node) |
5338 | { |
5339 | tree duplicate = CASE_LABEL ((tree) node->value); |
5340 | |
5341 | if (high_value) |
5342 | { |
5343 | error_at (loc, "duplicate (or overlapping) case value"); |
5344 | inform (DECL_SOURCE_LOCATION (duplicate), |
5345 | "this is the first entry overlapping that value"); |
5346 | } |
5347 | else if (low_value) |
5348 | { |
5349 | error_at (loc, "duplicate case value") ; |
5350 | inform (DECL_SOURCE_LOCATION (duplicate), "previously used here"); |
5351 | } |
5352 | else |
5353 | { |
5354 | error_at (loc, "multiple default labels in one switch"); |
5355 | inform (DECL_SOURCE_LOCATION (duplicate), |
5356 | "this is the first default label"); |
5357 | } |
5358 | goto error_out; |
5359 | } |
5360 | |
5361 | /* Add a CASE_LABEL to the statement-tree. */ |
5362 | case_label = add_stmt (build_case_label (low_value, high_value, label)); |
5363 | /* Register this case label in the splay tree. */ |
5364 | splay_tree_insert (cases, |
5365 | (splay_tree_key) low_value, |
5366 | (splay_tree_value) case_label); |
5367 | |
5368 | return case_label; |
5369 | |
5370 | error_out: |
5371 | /* Add a label so that the back-end doesn't think that the beginning of |
5372 | the switch is unreachable. Note that we do not add a case label, as |
5373 | that just leads to duplicates and thence to failure later on. */ |
5374 | if (!cases->root) |
5375 | { |
5376 | tree t = create_artificial_label (loc); |
5377 | add_stmt (build_stmt (loc, LABEL_EXPR, t)); |
5378 | } |
5379 | return error_mark_node; |
5380 | } |
5381 | |
5382 | /* Subroutine of c_switch_covers_all_cases_p, called via |
5383 | splay_tree_foreach. Return 1 if it doesn't cover all the cases. |
5384 | ARGS[0] is initially NULL and after the first iteration is the |
5385 | so far highest case label. ARGS[1] is the minimum of SWITCH_COND's |
5386 | type. */ |
5387 | |
5388 | static int |
5389 | c_switch_covers_all_cases_p_1 (splay_tree_node node, void *data) |
5390 | { |
5391 | tree label = (tree) node->value; |
5392 | tree *args = (tree *) data; |
5393 | |
5394 | /* If there is a default case, we shouldn't have called this. */ |
5395 | gcc_assert (CASE_LOW (label)); |
5396 | |
5397 | if (args[0] == NULL_TREE) |
5398 | { |
5399 | if (wi::to_widest (t: args[1]) < wi::to_widest (CASE_LOW (label))) |
5400 | return 1; |
5401 | } |
5402 | else if (wi::add (x: wi::to_widest (t: args[0]), y: 1) |
5403 | != wi::to_widest (CASE_LOW (label))) |
5404 | return 1; |
5405 | if (CASE_HIGH (label)) |
5406 | args[0] = CASE_HIGH (label); |
5407 | else |
5408 | args[0] = CASE_LOW (label); |
5409 | return 0; |
5410 | } |
5411 | |
5412 | /* Return true if switch with CASES and switch condition with type |
5413 | covers all possible values in the case labels. */ |
5414 | |
5415 | bool |
5416 | c_switch_covers_all_cases_p (splay_tree cases, tree type) |
5417 | { |
5418 | /* If there is default:, this is always the case. */ |
5419 | splay_tree_node default_node |
5420 | = splay_tree_lookup (cases, (splay_tree_key) NULL); |
5421 | if (default_node) |
5422 | return true; |
5423 | |
5424 | if (!INTEGRAL_TYPE_P (type)) |
5425 | return false; |
5426 | |
5427 | tree args[2] = { NULL_TREE, TYPE_MIN_VALUE (type) }; |
5428 | if (splay_tree_foreach (cases, c_switch_covers_all_cases_p_1, args)) |
5429 | return false; |
5430 | |
5431 | /* If there are no cases at all, or if the highest case label |
5432 | is smaller than TYPE_MAX_VALUE, return false. */ |
5433 | if (args[0] == NULL_TREE |
5434 | || wi::to_widest (t: args[0]) < wi::to_widest (TYPE_MAX_VALUE (type))) |
5435 | return false; |
5436 | |
5437 | return true; |
5438 | } |
5439 | |
5440 | /* Return true if stmt can fall through. Used by block_may_fallthru |
5441 | default case. */ |
5442 | |
5443 | bool |
5444 | c_block_may_fallthru (const_tree stmt) |
5445 | { |
5446 | switch (TREE_CODE (stmt)) |
5447 | { |
5448 | case SWITCH_STMT: |
5449 | return (!SWITCH_STMT_ALL_CASES_P (stmt) |
5450 | || !SWITCH_STMT_NO_BREAK_P (stmt) |
5451 | || block_may_fallthru (SWITCH_STMT_BODY (stmt))); |
5452 | |
5453 | default: |
5454 | return true; |
5455 | } |
5456 | } |
5457 | |
5458 | /* Finish an expression taking the address of LABEL (an |
5459 | IDENTIFIER_NODE). Returns an expression for the address. |
5460 | |
5461 | LOC is the location for the expression returned. */ |
5462 | |
5463 | tree |
5464 | finish_label_address_expr (tree label, location_t loc) |
5465 | { |
5466 | tree result; |
5467 | |
5468 | pedwarn (input_location, OPT_Wpedantic, "taking the address of a label is non-standard"); |
5469 | |
5470 | if (label == error_mark_node) |
5471 | return error_mark_node; |
5472 | |
5473 | label = lookup_label (label); |
5474 | if (label == NULL_TREE) |
5475 | result = null_pointer_node; |
5476 | else |
5477 | { |
5478 | TREE_USED (label) = 1; |
5479 | result = build1 (ADDR_EXPR, ptr_type_node, label); |
5480 | /* The current function is not necessarily uninlinable. |
5481 | Computed gotos are incompatible with inlining, but the value |
5482 | here could be used only in a diagnostic, for example. */ |
5483 | protected_set_expr_location (result, loc); |
5484 | } |
5485 | |
5486 | return result; |
5487 | } |
5488 | |
5489 | |
5490 | /* Given a boolean expression ARG, return a tree representing an increment |
5491 | or decrement (as indicated by CODE) of ARG. The front end must check for |
5492 | invalid cases (e.g., decrement in C++). */ |
5493 | tree |
5494 | boolean_increment (enum tree_code code, tree arg) |
5495 | { |
5496 | tree val; |
5497 | tree true_res = build_int_cst (TREE_TYPE (arg), 1); |
5498 | |
5499 | arg = stabilize_reference (arg); |
5500 | switch (code) |
5501 | { |
5502 | case PREINCREMENT_EXPR: |
5503 | val = build2 (MODIFY_EXPR, TREE_TYPE (arg), arg, true_res); |
5504 | break; |
5505 | case POSTINCREMENT_EXPR: |
5506 | val = build2 (MODIFY_EXPR, TREE_TYPE (arg), arg, true_res); |
5507 | arg = save_expr (arg); |
5508 | val = build2 (COMPOUND_EXPR, TREE_TYPE (arg), val, arg); |
5509 | val = build2 (COMPOUND_EXPR, TREE_TYPE (arg), arg, val); |
5510 | break; |
5511 | case PREDECREMENT_EXPR: |
5512 | val = build2 (MODIFY_EXPR, TREE_TYPE (arg), arg, |
5513 | invert_truthvalue_loc (input_location, arg)); |
5514 | break; |
5515 | case POSTDECREMENT_EXPR: |
5516 | val = build2 (MODIFY_EXPR, TREE_TYPE (arg), arg, |
5517 | invert_truthvalue_loc (input_location, arg)); |
5518 | arg = save_expr (arg); |
5519 | val = build2 (COMPOUND_EXPR, TREE_TYPE (arg), val, arg); |
5520 | val = build2 (COMPOUND_EXPR, TREE_TYPE (arg), arg, val); |
5521 | break; |
5522 | default: |
5523 | gcc_unreachable (); |
5524 | } |
5525 | TREE_SIDE_EFFECTS (val) = 1; |
5526 | return val; |
5527 | } |
5528 | |
5529 | /* Built-in macros for stddef.h and stdint.h, that require macros |
5530 | defined in this file. */ |
5531 | void |
5532 | c_stddef_cpp_builtins(void) |
5533 | { |
5534 | builtin_define_with_value ("__SIZE_TYPE__", SIZE_TYPE, 0); |
5535 | builtin_define_with_value ("__PTRDIFF_TYPE__", PTRDIFF_TYPE, 0); |
5536 | builtin_define_with_value ("__WCHAR_TYPE__", MODIFIED_WCHAR_TYPE, 0); |
5537 | /* C++ has wchar_t as a builtin type, C doesn't, so if WINT_TYPE |
5538 | maps to wchar_t, define it to the underlying WCHAR_TYPE in C, and |
5539 | to wchar_t in C++, so the desired type equivalence holds. */ |
5540 | if (!c_dialect_cxx () |
5541 | && strcmp (WINT_TYPE, s2: "wchar_t") == 0) |
5542 | builtin_define_with_value ("__WINT_TYPE__", WCHAR_TYPE, 0); |
5543 | else |
5544 | builtin_define_with_value ("__WINT_TYPE__", WINT_TYPE, 0); |
5545 | builtin_define_with_value ("__INTMAX_TYPE__", INTMAX_TYPE, 0); |
5546 | builtin_define_with_value ("__UINTMAX_TYPE__", UINTMAX_TYPE, 0); |
5547 | if (flag_char8_t) |
5548 | builtin_define_with_value ("__CHAR8_TYPE__", CHAR8_TYPE, 0); |
5549 | builtin_define_with_value ("__CHAR16_TYPE__", CHAR16_TYPE, 0); |
5550 | builtin_define_with_value ("__CHAR32_TYPE__", CHAR32_TYPE, 0); |
5551 | if (SIG_ATOMIC_TYPE) |
5552 | builtin_define_with_value ("__SIG_ATOMIC_TYPE__", SIG_ATOMIC_TYPE, 0); |
5553 | if (INT8_TYPE) |
5554 | builtin_define_with_value ("__INT8_TYPE__", INT8_TYPE, 0); |
5555 | if (INT16_TYPE) |
5556 | builtin_define_with_value ("__INT16_TYPE__", INT16_TYPE, 0); |
5557 | if (INT32_TYPE) |
5558 | builtin_define_with_value ("__INT32_TYPE__", INT32_TYPE, 0); |
5559 | if (INT64_TYPE) |
5560 | builtin_define_with_value ("__INT64_TYPE__", INT64_TYPE, 0); |
5561 | if (UINT8_TYPE) |
5562 | builtin_define_with_value ("__UINT8_TYPE__", UINT8_TYPE, 0); |
5563 | if (UINT16_TYPE) |
5564 | builtin_define_with_value ("__UINT16_TYPE__", UINT16_TYPE, 0); |
5565 | if (UINT32_TYPE) |
5566 | builtin_define_with_value ("__UINT32_TYPE__", UINT32_TYPE, 0); |
5567 | if (UINT64_TYPE) |
5568 | builtin_define_with_value ("__UINT64_TYPE__", UINT64_TYPE, 0); |
5569 | if (INT_LEAST8_TYPE) |
5570 | builtin_define_with_value ("__INT_LEAST8_TYPE__", INT_LEAST8_TYPE, 0); |
5571 | if (INT_LEAST16_TYPE) |
5572 | builtin_define_with_value ("__INT_LEAST16_TYPE__", INT_LEAST16_TYPE, 0); |
5573 | if (INT_LEAST32_TYPE) |
5574 | builtin_define_with_value ("__INT_LEAST32_TYPE__", INT_LEAST32_TYPE, 0); |
5575 | if (INT_LEAST64_TYPE) |
5576 | builtin_define_with_value ("__INT_LEAST64_TYPE__", INT_LEAST64_TYPE, 0); |
5577 | if (UINT_LEAST8_TYPE) |
5578 | builtin_define_with_value ("__UINT_LEAST8_TYPE__", UINT_LEAST8_TYPE, 0); |
5579 | if (UINT_LEAST16_TYPE) |
5580 | builtin_define_with_value ("__UINT_LEAST16_TYPE__", UINT_LEAST16_TYPE, 0); |
5581 | if (UINT_LEAST32_TYPE) |
5582 | builtin_define_with_value ("__UINT_LEAST32_TYPE__", UINT_LEAST32_TYPE, 0); |
5583 | if (UINT_LEAST64_TYPE) |
5584 | builtin_define_with_value ("__UINT_LEAST64_TYPE__", UINT_LEAST64_TYPE, 0); |
5585 | if (INT_FAST8_TYPE) |
5586 | builtin_define_with_value ("__INT_FAST8_TYPE__", INT_FAST8_TYPE, 0); |
5587 | if (INT_FAST16_TYPE) |
5588 | builtin_define_with_value ("__INT_FAST16_TYPE__", INT_FAST16_TYPE, 0); |
5589 | if (INT_FAST32_TYPE) |
5590 | builtin_define_with_value ("__INT_FAST32_TYPE__", INT_FAST32_TYPE, 0); |
5591 | if (INT_FAST64_TYPE) |
5592 | builtin_define_with_value ("__INT_FAST64_TYPE__", INT_FAST64_TYPE, 0); |
5593 | if (UINT_FAST8_TYPE) |
5594 | builtin_define_with_value ("__UINT_FAST8_TYPE__", UINT_FAST8_TYPE, 0); |
5595 | if (UINT_FAST16_TYPE) |
5596 | builtin_define_with_value ("__UINT_FAST16_TYPE__", UINT_FAST16_TYPE, 0); |
5597 | if (UINT_FAST32_TYPE) |
5598 | builtin_define_with_value ("__UINT_FAST32_TYPE__", UINT_FAST32_TYPE, 0); |
5599 | if (UINT_FAST64_TYPE) |
5600 | builtin_define_with_value ("__UINT_FAST64_TYPE__", UINT_FAST64_TYPE, 0); |
5601 | if (INTPTR_TYPE) |
5602 | builtin_define_with_value ("__INTPTR_TYPE__", INTPTR_TYPE, 0); |
5603 | if (UINTPTR_TYPE) |
5604 | builtin_define_with_value ("__UINTPTR_TYPE__", UINTPTR_TYPE, 0); |
5605 | /* GIMPLE FE testcases need access to the GCC internal 'sizetype'. |
5606 | Expose it as __SIZETYPE__. */ |
5607 | if (flag_gimple) |
5608 | builtin_define_with_value ("__SIZETYPE__", SIZETYPE, 0); |
5609 | } |
5610 | |
5611 | static void |
5612 | c_init_attributes (void) |
5613 | { |
5614 | /* Fill in the built_in_attributes array. */ |
5615 | #define DEF_ATTR_NULL_TREE(ENUM) \ |
5616 | built_in_attributes[(int) ENUM] = NULL_TREE; |
5617 | #define DEF_ATTR_INT(ENUM, VALUE) \ |
5618 | built_in_attributes[(int) ENUM] = build_int_cst (integer_type_node, VALUE); |
5619 | #define DEF_ATTR_STRING(ENUM, VALUE) \ |
5620 | built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE); |
5621 | #define DEF_ATTR_IDENT(ENUM, STRING) \ |
5622 | built_in_attributes[(int) ENUM] = get_identifier (STRING); |
5623 | #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \ |
5624 | built_in_attributes[(int) ENUM] \ |
5625 | = tree_cons (built_in_attributes[(int) PURPOSE], \ |
5626 | built_in_attributes[(int) VALUE], \ |
5627 | built_in_attributes[(int) CHAIN]); |
5628 | #include "builtin-attrs.def" |
5629 | #undef DEF_ATTR_NULL_TREE |
5630 | #undef DEF_ATTR_INT |
5631 | #undef DEF_ATTR_IDENT |
5632 | #undef DEF_ATTR_TREE_LIST |
5633 | } |
5634 | |
5635 | /* Check whether the byte alignment ALIGN is a valid user-specified |
5636 | alignment less than the supported maximum. If so, return ALIGN's |
5637 | base-2 log; if not, output an error and return -1. If OBJFILE |
5638 | then reject alignments greater than MAX_OFILE_ALIGNMENT when |
5639 | converted to bits. Otherwise, consider valid only alignments |
5640 | that are less than HOST_BITS_PER_INT - LOG2_BITS_PER_UNIT. |
5641 | Zero is not considered a valid argument (and results in -1 on |
5642 | return) but it only triggers a warning when WARN_ZERO is set. */ |
5643 | |
5644 | int |
5645 | check_user_alignment (const_tree align, bool objfile, bool warn_zero) |
5646 | { |
5647 | if (error_operand_p (t: align)) |
5648 | return -1; |
5649 | |
5650 | if (TREE_CODE (align) != INTEGER_CST |
5651 | || !INTEGRAL_TYPE_P (TREE_TYPE (align))) |
5652 | { |
5653 | error ("requested alignment is not an integer constant"); |
5654 | return -1; |
5655 | } |
5656 | |
5657 | if (integer_zerop (align)) |
5658 | { |
5659 | if (warn_zero) |
5660 | warning (OPT_Wattributes, |
5661 | "requested alignment %qE is not a positive power of 2", |
5662 | align); |
5663 | return -1; |
5664 | } |
5665 | |
5666 | /* Log2 of the byte alignment ALIGN. */ |
5667 | int log2align; |
5668 | if (tree_int_cst_sgn (align) == -1 |
5669 | || (log2align = tree_log2 (align)) == -1) |
5670 | { |
5671 | error ("requested alignment %qE is not a positive power of 2", |
5672 | align); |
5673 | return -1; |
5674 | } |
5675 | |
5676 | if (objfile) |
5677 | { |
5678 | unsigned maxalign = MAX_OFILE_ALIGNMENT / BITS_PER_UNIT; |
5679 | if (!tree_fits_uhwi_p (align) || tree_to_uhwi (align) > maxalign) |
5680 | { |
5681 | error ("requested alignment %qE exceeds object file maximum %u", |
5682 | align, maxalign); |
5683 | return -1; |
5684 | } |
5685 | } |
5686 | |
5687 | if (log2align >= HOST_BITS_PER_INT - LOG2_BITS_PER_UNIT) |
5688 | { |
5689 | error ("requested alignment %qE exceeds maximum %u", |
5690 | align, 1U << (HOST_BITS_PER_INT - LOG2_BITS_PER_UNIT - 1)); |
5691 | return -1; |
5692 | } |
5693 | |
5694 | return log2align; |
5695 | } |
5696 | |
5697 | /* Determine the ELF symbol visibility for DECL, which is either a |
5698 | variable or a function. It is an error to use this function if a |
5699 | definition of DECL is not available in this translation unit. |
5700 | Returns true if the final visibility has been determined by this |
5701 | function; false if the caller is free to make additional |
5702 | modifications. */ |
5703 | |
5704 | bool |
5705 | c_determine_visibility (tree decl) |
5706 | { |
5707 | gcc_assert (VAR_OR_FUNCTION_DECL_P (decl)); |
5708 | |
5709 | /* If the user explicitly specified the visibility with an |
5710 | attribute, honor that. DECL_VISIBILITY will have been set during |
5711 | the processing of the attribute. We check for an explicit |
5712 | attribute, rather than just checking DECL_VISIBILITY_SPECIFIED, |
5713 | to distinguish the use of an attribute from the use of a "#pragma |
5714 | GCC visibility push(...)"; in the latter case we still want other |
5715 | considerations to be able to overrule the #pragma. */ |
5716 | if (lookup_attribute (attr_name: "visibility", DECL_ATTRIBUTES (decl)) |
5717 | || (TARGET_DLLIMPORT_DECL_ATTRIBUTES |
5718 | && (lookup_attribute (attr_name: "dllimport", DECL_ATTRIBUTES (decl)) |
5719 | || lookup_attribute (attr_name: "dllexport", DECL_ATTRIBUTES (decl))))) |
5720 | return true; |
5721 | |
5722 | /* Set default visibility to whatever the user supplied with |
5723 | visibility_specified depending on #pragma GCC visibility. */ |
5724 | if (!DECL_VISIBILITY_SPECIFIED (decl)) |
5725 | { |
5726 | if (visibility_options.inpragma |
5727 | || DECL_VISIBILITY (decl) != default_visibility) |
5728 | { |
5729 | DECL_VISIBILITY (decl) = default_visibility; |
5730 | DECL_VISIBILITY_SPECIFIED (decl) = visibility_options.inpragma; |
5731 | /* If visibility changed and DECL already has DECL_RTL, ensure |
5732 | symbol flags are updated. */ |
5733 | if (((VAR_P (decl) && TREE_STATIC (decl)) |
5734 | || TREE_CODE (decl) == FUNCTION_DECL) |
5735 | && DECL_RTL_SET_P (decl)) |
5736 | make_decl_rtl (decl); |
5737 | } |
5738 | } |
5739 | return false; |
5740 | } |
5741 | |
5742 | /* Data to communicate through check_function_arguments_recurse between |
5743 | check_function_nonnull and check_nonnull_arg. */ |
5744 | |
5745 | struct nonnull_arg_ctx |
5746 | { |
5747 | /* Location of the call. */ |
5748 | location_t loc; |
5749 | /* The function whose arguments are being checked and its type (used |
5750 | for calls through function pointers). */ |
5751 | const_tree fndecl, fntype; |
5752 | /* For nonnull_if_nonzero, index of the other argument. */ |
5753 | unsigned HOST_WIDE_INT other; |
5754 | /* True if a warning has been issued. */ |
5755 | bool warned_p; |
5756 | }; |
5757 | |
5758 | /* Check the argument list of a function call to CTX.FNDECL of CTX.FNTYPE |
5759 | for null in argument slots that are marked as requiring a non-null |
5760 | pointer argument. The NARGS arguments are passed in the array ARGARRAY. |
5761 | Return true if we have warned. */ |
5762 | |
5763 | static bool |
5764 | check_function_nonnull (nonnull_arg_ctx &ctx, int nargs, tree *argarray) |
5765 | { |
5766 | int firstarg = 0; |
5767 | if (TREE_CODE (ctx.fntype) == METHOD_TYPE) |
5768 | { |
5769 | bool closure = false; |
5770 | if (ctx.fndecl) |
5771 | { |
5772 | /* For certain lambda expressions the C++ front end emits calls |
5773 | that pass a null this pointer as an argument named __closure |
5774 | to the member operator() of empty function. Detect those |
5775 | and avoid checking them, but proceed to check the remaining |
5776 | arguments. */ |
5777 | tree arg0 = DECL_ARGUMENTS (ctx.fndecl); |
5778 | if (tree arg0name = DECL_NAME (arg0)) |
5779 | closure = id_equal (id: arg0name, str: "__closure"); |
5780 | } |
5781 | |
5782 | /* In calls to C++ non-static member functions check the this |
5783 | pointer regardless of whether the function is declared with |
5784 | attribute nonnull. */ |
5785 | firstarg = 1; |
5786 | if (!closure) |
5787 | check_function_arguments_recurse (check_nonnull_arg, &ctx, argarray[0], |
5788 | firstarg, OPT_Wnonnull); |
5789 | } |
5790 | |
5791 | tree attrs = lookup_attribute (attr_name: "nonnull", TYPE_ATTRIBUTES (ctx.fntype)); |
5792 | |
5793 | tree a = attrs; |
5794 | /* See if any of the nonnull attributes has no arguments. If so, |
5795 | then every pointer argument is checked (in which case the check |
5796 | for pointer type is done in check_nonnull_arg). */ |
5797 | while (a != NULL_TREE && TREE_VALUE (a) != NULL_TREE) |
5798 | a = lookup_attribute (attr_name: "nonnull", TREE_CHAIN (a)); |
5799 | |
5800 | if (a != NULL_TREE) |
5801 | for (int i = firstarg; i < nargs; i++) |
5802 | check_function_arguments_recurse (check_nonnull_arg, &ctx, argarray[i], |
5803 | i + 1, OPT_Wnonnull); |
5804 | else if (attrs) |
5805 | { |
5806 | /* Walk the argument list. If we encounter an argument number we |
5807 | should check for non-null, do it. */ |
5808 | for (int i = firstarg; i < nargs; i++) |
5809 | { |
5810 | for (a = attrs; ; a = TREE_CHAIN (a)) |
5811 | { |
5812 | a = lookup_attribute (attr_name: "nonnull", list: a); |
5813 | if (a == NULL_TREE || nonnull_check_p (TREE_VALUE (a), i + 1)) |
5814 | break; |
5815 | } |
5816 | |
5817 | if (a != NULL_TREE) |
5818 | check_function_arguments_recurse (check_nonnull_arg, &ctx, |
5819 | argarray[i], i + 1, |
5820 | OPT_Wnonnull); |
5821 | } |
5822 | } |
5823 | if (a == NULL_TREE) |
5824 | for (attrs = TYPE_ATTRIBUTES (ctx.fntype); |
5825 | (attrs = lookup_attribute (attr_name: "nonnull_if_nonzero", list: attrs)); |
5826 | attrs = TREE_CHAIN (attrs)) |
5827 | { |
5828 | tree args = TREE_VALUE (attrs); |
5829 | unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1; |
5830 | unsigned int idx2 |
5831 | = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1; |
5832 | if (idx < (unsigned) nargs - firstarg |
5833 | && idx2 < (unsigned) nargs - firstarg |
5834 | && INTEGRAL_TYPE_P (TREE_TYPE (argarray[firstarg + idx2])) |
5835 | && integer_nonzerop (argarray[firstarg + idx2])) |
5836 | { |
5837 | ctx.other = firstarg + idx2 + 1; |
5838 | check_function_arguments_recurse (check_nonnull_arg, &ctx, |
5839 | argarray[firstarg + idx], |
5840 | firstarg + idx + 1, |
5841 | OPT_Wnonnull); |
5842 | ctx.other = 0; |
5843 | } |
5844 | } |
5845 | return ctx.warned_p; |
5846 | } |
5847 | |
5848 | /* Check that the Nth argument of a function call (counting backwards |
5849 | from the end) is a (pointer)0. The NARGS arguments are passed in the |
5850 | array ARGARRAY. */ |
5851 | |
5852 | static void |
5853 | check_function_sentinel (const_tree fntype, int nargs, tree *argarray) |
5854 | { |
5855 | tree attr = lookup_attribute (attr_name: "sentinel", TYPE_ATTRIBUTES (fntype)); |
5856 | |
5857 | if (attr) |
5858 | { |
5859 | int len = 0; |
5860 | int pos = 0; |
5861 | tree sentinel; |
5862 | function_args_iterator iter; |
5863 | tree t; |
5864 | |
5865 | /* Skip over the named arguments. */ |
5866 | FOREACH_FUNCTION_ARGS (fntype, t, iter) |
5867 | { |
5868 | if (len == nargs) |
5869 | break; |
5870 | len++; |
5871 | } |
5872 | |
5873 | if (TREE_VALUE (attr)) |
5874 | { |
5875 | tree p = TREE_VALUE (TREE_VALUE (attr)); |
5876 | pos = TREE_INT_CST_LOW (p); |
5877 | } |
5878 | |
5879 | /* The sentinel must be one of the varargs, i.e. |
5880 | in position >= the number of fixed arguments. */ |
5881 | if ((nargs - 1 - pos) < len) |
5882 | { |
5883 | warning (OPT_Wformat_, |
5884 | "not enough variable arguments to fit a sentinel"); |
5885 | return; |
5886 | } |
5887 | |
5888 | /* Validate the sentinel. */ |
5889 | sentinel = fold_for_warn (argarray[nargs - 1 - pos]); |
5890 | if ((!POINTER_TYPE_P (TREE_TYPE (sentinel)) |
5891 | || !integer_zerop (sentinel)) |
5892 | && TREE_CODE (TREE_TYPE (sentinel)) != NULLPTR_TYPE |
5893 | /* Although __null (in C++) is only an integer we allow it |
5894 | nevertheless, as we are guaranteed that it's exactly |
5895 | as wide as a pointer, and we don't want to force |
5896 | users to cast the NULL they have written there. |
5897 | We warn with -Wstrict-null-sentinel, though. */ |
5898 | && (warn_strict_null_sentinel || null_node != sentinel)) |
5899 | warning (OPT_Wformat_, "missing sentinel in function call"); |
5900 | } |
5901 | } |
5902 | |
5903 | /* Check that the same argument isn't passed to two or more |
5904 | restrict-qualified formal and issue a -Wrestrict warning |
5905 | if it is. Return true if a warning has been issued. */ |
5906 | |
5907 | static bool |
5908 | check_function_restrict (const_tree fndecl, const_tree fntype, |
5909 | int nargs, tree *unfolded_argarray) |
5910 | { |
5911 | int i; |
5912 | tree parms = TYPE_ARG_TYPES (fntype); |
5913 | |
5914 | /* Call fold_for_warn on all of the arguments. */ |
5915 | auto_vec<tree> argarray (nargs); |
5916 | for (i = 0; i < nargs; i++) |
5917 | argarray.quick_push (obj: fold_for_warn (unfolded_argarray[i])); |
5918 | |
5919 | if (fndecl |
5920 | && TREE_CODE (fndecl) == FUNCTION_DECL) |
5921 | { |
5922 | /* Avoid diagnosing calls built-ins with a zero size/bound |
5923 | here. They are checked in more detail elsewhere. */ |
5924 | if (fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL) |
5925 | && nargs == 3 |
5926 | && TREE_CODE (argarray[2]) == INTEGER_CST |
5927 | && integer_zerop (argarray[2])) |
5928 | return false; |
5929 | |
5930 | if (DECL_ARGUMENTS (fndecl)) |
5931 | parms = DECL_ARGUMENTS (fndecl); |
5932 | } |
5933 | |
5934 | for (i = 0; i < nargs; i++) |
5935 | TREE_VISITED (argarray[i]) = 0; |
5936 | |
5937 | bool warned = false; |
5938 | |
5939 | for (i = 0; i < nargs && parms && parms != void_list_node; i++) |
5940 | { |
5941 | tree type; |
5942 | if (TREE_CODE (parms) == PARM_DECL) |
5943 | { |
5944 | type = TREE_TYPE (parms); |
5945 | parms = DECL_CHAIN (parms); |
5946 | } |
5947 | else |
5948 | { |
5949 | type = TREE_VALUE (parms); |
5950 | parms = TREE_CHAIN (parms); |
5951 | } |
5952 | if (POINTER_TYPE_P (type) |
5953 | && TYPE_RESTRICT (type) |
5954 | && !TYPE_READONLY (TREE_TYPE (type))) |
5955 | warned |= warn_for_restrict (i, argarray.address (), nargs); |
5956 | } |
5957 | |
5958 | for (i = 0; i < nargs; i++) |
5959 | TREE_VISITED (argarray[i]) = 0; |
5960 | |
5961 | return warned; |
5962 | } |
5963 | |
5964 | /* Helper for check_function_nonnull; given a list of operands which |
5965 | must be non-null in ARGS, determine if operand PARAM_NUM should be |
5966 | checked. */ |
5967 | |
5968 | static bool |
5969 | nonnull_check_p (tree args, unsigned HOST_WIDE_INT param_num) |
5970 | { |
5971 | unsigned HOST_WIDE_INT arg_num = 0; |
5972 | |
5973 | for (; args; args = TREE_CHAIN (args)) |
5974 | { |
5975 | bool found = get_attribute_operand (TREE_VALUE (args), &arg_num); |
5976 | |
5977 | gcc_assert (found); |
5978 | |
5979 | if (arg_num == param_num) |
5980 | return true; |
5981 | } |
5982 | return false; |
5983 | } |
5984 | |
5985 | /* Check that the function argument PARAM (which is operand number |
5986 | PARAM_NUM) is non-null. This is called by check_function_nonnull |
5987 | via check_function_arguments_recurse. */ |
5988 | |
5989 | static void |
5990 | check_nonnull_arg (void *ctx, tree param, unsigned HOST_WIDE_INT param_num) |
5991 | { |
5992 | struct nonnull_arg_ctx *pctx = (struct nonnull_arg_ctx *) ctx; |
5993 | |
5994 | /* Just skip checking the argument if it's not a pointer. This can |
5995 | happen if the "nonnull" attribute was given without an operand |
5996 | list (which means to check every pointer argument). */ |
5997 | |
5998 | tree paramtype = TREE_TYPE (param); |
5999 | if (TREE_CODE (paramtype) != POINTER_TYPE |
6000 | && TREE_CODE (paramtype) != NULLPTR_TYPE) |
6001 | return; |
6002 | |
6003 | /* Diagnose the simple cases of null arguments. */ |
6004 | if (!integer_zerop (fold_for_warn (param))) |
6005 | return; |
6006 | |
6007 | auto_diagnostic_group adg; |
6008 | |
6009 | const location_t loc = EXPR_LOC_OR_LOC (param, pctx->loc); |
6010 | |
6011 | if (TREE_CODE (pctx->fntype) == METHOD_TYPE) |
6012 | --param_num; |
6013 | |
6014 | bool warned; |
6015 | if (param_num == 0) |
6016 | { |
6017 | warned = warning_at (loc, OPT_Wnonnull, |
6018 | "%qs pointer is null", "this"); |
6019 | if (warned && pctx->fndecl) |
6020 | inform (DECL_SOURCE_LOCATION (pctx->fndecl), |
6021 | "in a call to non-static member function %qD", |
6022 | pctx->fndecl); |
6023 | } |
6024 | else |
6025 | { |
6026 | if (pctx->other) |
6027 | warned = warning_at (loc, OPT_Wnonnull, |
6028 | "argument %u null where non-null expected " |
6029 | "because argument %u is nonzero", |
6030 | (unsigned) param_num, |
6031 | TREE_CODE (pctx->fntype) == METHOD_TYPE |
6032 | ? (unsigned) pctx->other - 1 |
6033 | : (unsigned) pctx->other); |
6034 | else |
6035 | warned = warning_at (loc, OPT_Wnonnull, |
6036 | "argument %u null where non-null expected", |
6037 | (unsigned) param_num); |
6038 | if (warned && pctx->fndecl) |
6039 | inform (DECL_SOURCE_LOCATION (pctx->fndecl), |
6040 | "in a call to function %qD declared %qs", |
6041 | pctx->fndecl, |
6042 | pctx->other ? "nonnull_if_nonzero": "nonnull"); |
6043 | } |
6044 | |
6045 | if (warned) |
6046 | pctx->warned_p = true; |
6047 | } |
6048 | |
6049 | /* Helper for attribute handling; fetch the operand number from |
6050 | the attribute argument list. */ |
6051 | |
6052 | bool |
6053 | get_attribute_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp) |
6054 | { |
6055 | /* Verify the arg number is a small constant. */ |
6056 | if (tree_fits_uhwi_p (arg_num_expr)) |
6057 | { |
6058 | *valp = tree_to_uhwi (arg_num_expr); |
6059 | return true; |
6060 | } |
6061 | else |
6062 | return false; |
6063 | } |
6064 | |
6065 | /* Arguments being collected for optimization. */ |
6066 | typedef const char *const_char_p; /* For DEF_VEC_P. */ |
6067 | static GTY(()) vec<const_char_p, va_gc> *optimize_args; |
6068 | |
6069 | |
6070 | /* Inner function to convert a TREE_LIST to argv string to parse the optimize |
6071 | options in ARGS. ATTR_P is true if this is for attribute(optimize), and |
6072 | false for #pragma GCC optimize. */ |
6073 | |
6074 | bool |
6075 | parse_optimize_options (tree args, bool attr_p) |
6076 | { |
6077 | bool ret = true; |
6078 | unsigned opt_argc; |
6079 | unsigned i; |
6080 | const char **opt_argv; |
6081 | struct cl_decoded_option *decoded_options; |
6082 | unsigned int decoded_options_count; |
6083 | tree ap; |
6084 | |
6085 | /* Build up argv vector. Just in case the string is stored away, use garbage |
6086 | collected strings. */ |
6087 | vec_safe_truncate (v: optimize_args, size: 0); |
6088 | vec_safe_push (v&: optimize_args, obj: (const char *) NULL); |
6089 | |
6090 | for (ap = args; ap != NULL_TREE; ap = TREE_CHAIN (ap)) |
6091 | { |
6092 | tree value = TREE_VALUE (ap); |
6093 | |
6094 | if (TREE_CODE (value) == INTEGER_CST) |
6095 | { |
6096 | char buffer[HOST_BITS_PER_LONG / 3 + 4]; |
6097 | sprintf (s: buffer, format: "-O%ld", (long) TREE_INT_CST_LOW (value)); |
6098 | vec_safe_push (v&: optimize_args, ggc_strdup (buffer)); |
6099 | } |
6100 | |
6101 | else if (TREE_CODE (value) == STRING_CST) |
6102 | { |
6103 | /* Split string into multiple substrings. */ |
6104 | size_t len = TREE_STRING_LENGTH (value); |
6105 | char *p = ASTRDUP (TREE_STRING_POINTER (value)); |
6106 | char *end = p + len; |
6107 | char *comma; |
6108 | char *next_p = p; |
6109 | |
6110 | while (next_p != NULL) |
6111 | { |
6112 | size_t len2; |
6113 | char *q, *r; |
6114 | |
6115 | p = next_p; |
6116 | comma = strchr (s: p, c: ','); |
6117 | if (comma) |
6118 | { |
6119 | len2 = comma - p; |
6120 | *comma = '\0'; |
6121 | next_p = comma+1; |
6122 | } |
6123 | else |
6124 | { |
6125 | len2 = end - p; |
6126 | next_p = NULL; |
6127 | } |
6128 | |
6129 | /* If the user supplied -Oxxx or -fxxx, only allow -Oxxx or -fxxx |
6130 | options. */ |
6131 | if (*p == '-' && p[1] != 'O' && p[1] != 'f') |
6132 | { |
6133 | ret = false; |
6134 | if (attr_p) |
6135 | { |
6136 | auto_urlify_attributes sentinel; |
6137 | warning (OPT_Wattributes, |
6138 | "bad option %qs to attribute %<optimize%>", p); |
6139 | } |
6140 | else |
6141 | warning (OPT_Wpragmas, |
6142 | "bad option %qs to pragma %<optimize%>", p); |
6143 | continue; |
6144 | } |
6145 | |
6146 | /* Can't use GC memory here, see PR88007. */ |
6147 | r = q = XOBNEWVEC (&opts_obstack, char, len2 + 3); |
6148 | |
6149 | if (*p != '-') |
6150 | { |
6151 | *r++ = '-'; |
6152 | |
6153 | /* Assume that Ox is -Ox, a numeric value is -Ox, a s by |
6154 | itself is -Os, and any other switch begins with a -f. */ |
6155 | if ((*p >= '0' && *p <= '9') |
6156 | || (p[0] == 's' && p[1] == '\0')) |
6157 | *r++ = 'O'; |
6158 | else if (*p != 'O') |
6159 | *r++ = 'f'; |
6160 | } |
6161 | |
6162 | memcpy (dest: r, src: p, n: len2); |
6163 | r[len2] = '\0'; |
6164 | vec_safe_push (v&: optimize_args, obj: (const char *) q); |
6165 | } |
6166 | |
6167 | } |
6168 | } |
6169 | |
6170 | opt_argc = optimize_args->length (); |
6171 | opt_argv = (const char **) alloca (sizeof (char *) * (opt_argc + 1)); |
6172 | |
6173 | for (i = 1; i < opt_argc; i++) |
6174 | opt_argv[i] = (*optimize_args)[i]; |
6175 | |
6176 | /* Now parse the options. */ |
6177 | decode_cmdline_options_to_array_default_mask (argc: opt_argc, argv: opt_argv, |
6178 | decoded_options: &decoded_options, |
6179 | decoded_options_count: &decoded_options_count); |
6180 | /* Drop non-Optimization options. */ |
6181 | unsigned j = 1; |
6182 | for (i = 1; i < decoded_options_count; ++i) |
6183 | { |
6184 | if (! (cl_options[decoded_options[i].opt_index].flags & CL_OPTIMIZATION)) |
6185 | { |
6186 | ret = false; |
6187 | if (attr_p) |
6188 | { |
6189 | auto_urlify_attributes sentinel; |
6190 | warning (OPT_Wattributes, |
6191 | "bad option %qs to attribute %<optimize%>", |
6192 | decoded_options[i].orig_option_with_args_text); |
6193 | } |
6194 | else |
6195 | warning (OPT_Wpragmas, |
6196 | "bad option %qs to pragma %<optimize%>", |
6197 | decoded_options[i].orig_option_with_args_text); |
6198 | continue; |
6199 | } |
6200 | if (i != j) |
6201 | decoded_options[j] = decoded_options[i]; |
6202 | j++; |
6203 | } |
6204 | decoded_options_count = j; |
6205 | |
6206 | /* Merge the decoded options with save_decoded_options. */ |
6207 | unsigned save_opt_count = save_opt_decoded_options->length (); |
6208 | unsigned merged_decoded_options_count |
6209 | = save_opt_count + decoded_options_count; |
6210 | cl_decoded_option *merged_decoded_options |
6211 | = XNEWVEC (cl_decoded_option, merged_decoded_options_count); |
6212 | |
6213 | /* Note the first decoded_options is used for the program name. */ |
6214 | for (unsigned i = 0; i < save_opt_count; ++i) |
6215 | merged_decoded_options[i + 1] = (*save_opt_decoded_options)[i]; |
6216 | for (unsigned i = 1; i < decoded_options_count; ++i) |
6217 | merged_decoded_options[save_opt_count + i] = decoded_options[i]; |
6218 | |
6219 | /* And apply them. */ |
6220 | decode_options (opts: &global_options, opts_set: &global_options_set, |
6221 | decoded_options: merged_decoded_options, decoded_options_count: merged_decoded_options_count, |
6222 | loc: input_location, dc: global_dc, NULL); |
6223 | free (ptr: decoded_options); |
6224 | |
6225 | targetm.override_options_after_change(); |
6226 | |
6227 | optimize_args->truncate (size: 0); |
6228 | return ret; |
6229 | } |
6230 | |
6231 | /* Check whether ATTR is a valid attribute fallthrough. */ |
6232 | |
6233 | bool |
6234 | attribute_fallthrough_p (tree attr) |
6235 | { |
6236 | if (attr == error_mark_node) |
6237 | return false; |
6238 | tree t = lookup_attribute (attr_ns: "", attr_name: "fallthrough", list: attr); |
6239 | if (t == NULL_TREE) |
6240 | return false; |
6241 | auto_urlify_attributes sentinel; |
6242 | /* It is no longer true that "this attribute shall appear at most once in |
6243 | each attribute-list", but we still give a warning. */ |
6244 | if (lookup_attribute (attr_ns: "", attr_name: "fallthrough", TREE_CHAIN (t))) |
6245 | warning (OPT_Wattributes, "attribute %<fallthrough%> specified multiple " |
6246 | "times"); |
6247 | /* No attribute-argument-clause shall be present. */ |
6248 | else if (TREE_VALUE (t) != NULL_TREE) |
6249 | warning (OPT_Wattributes, "%<fallthrough%> attribute specified with " |
6250 | "a parameter"); |
6251 | /* Warn if other attributes are found. */ |
6252 | for (t = attr; t != NULL_TREE; t = TREE_CHAIN (t)) |
6253 | { |
6254 | tree name = get_attribute_name (t); |
6255 | if (!is_attribute_p (attr_name: "fallthrough", ident: name) |
6256 | || !is_attribute_namespace_p (attr_ns: "", attr: t)) |
6257 | { |
6258 | if (!c_dialect_cxx () && get_attribute_namespace (t) == NULL_TREE) |
6259 | /* The specifications of standard attributes in C mean |
6260 | this is a constraint violation. */ |
6261 | pedwarn (input_location, OPT_Wattributes, "%qE attribute ignored", |
6262 | get_attribute_name (t)); |
6263 | else |
6264 | warning (OPT_Wattributes, "%qE attribute ignored", name); |
6265 | } |
6266 | } |
6267 | return true; |
6268 | } |
6269 | |
6270 | |
6271 | /* Check for valid arguments being passed to a function with FNTYPE. |
6272 | There are NARGS arguments in the array ARGARRAY. LOC should be used |
6273 | for diagnostics. Return true if either -Wnonnull or -Wrestrict has |
6274 | been issued. |
6275 | |
6276 | The arguments in ARGARRAY may not have been folded yet (e.g. for C++, |
6277 | to preserve location wrappers); checks that require folded arguments |
6278 | should call fold_for_warn on them. |
6279 | |
6280 | Use the frontend-supplied COMP_TYPES when determining if |
6281 | one type is a subclass of another. */ |
6282 | |
6283 | bool |
6284 | check_function_arguments (location_t loc, const_tree fndecl, const_tree fntype, |
6285 | int nargs, tree *argarray, vec<location_t> *arglocs, |
6286 | bool (*comp_types) (tree, tree)) |
6287 | { |
6288 | bool warned_p = false; |
6289 | |
6290 | if (c_inhibit_evaluation_warnings) |
6291 | return warned_p; |
6292 | |
6293 | /* Check for null being passed in a pointer argument that must be |
6294 | non-null. In C++, this includes the this pointer. We also need |
6295 | to do this if format checking is enabled. */ |
6296 | if (warn_nonnull) |
6297 | { |
6298 | nonnull_arg_ctx ctx = { .loc: loc, .fndecl: fndecl, .fntype: fntype, .other: 0, .warned_p: false }; |
6299 | warned_p = check_function_nonnull (ctx, nargs, argarray); |
6300 | } |
6301 | |
6302 | /* Check for errors in format strings. */ |
6303 | |
6304 | if (warn_format || warn_suggest_attribute_format) |
6305 | check_function_format (fndecl ? fndecl : fntype, TYPE_ATTRIBUTES (fntype), nargs, |
6306 | argarray, arglocs, comp_types); |
6307 | |
6308 | if (warn_format) |
6309 | check_function_sentinel (fntype, nargs, argarray); |
6310 | |
6311 | if (fndecl && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL)) |
6312 | { |
6313 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
6314 | { |
6315 | case BUILT_IN_SPRINTF: |
6316 | case BUILT_IN_SPRINTF_CHK: |
6317 | case BUILT_IN_SNPRINTF: |
6318 | case BUILT_IN_SNPRINTF_CHK: |
6319 | /* Let the sprintf pass handle these. */ |
6320 | return warned_p; |
6321 | |
6322 | default: |
6323 | break; |
6324 | } |
6325 | } |
6326 | |
6327 | /* check_function_restrict sets the DECL_READ_P for arguments |
6328 | so it must be called unconditionally. */ |
6329 | warned_p |= check_function_restrict (fndecl, fntype, nargs, unfolded_argarray: argarray); |
6330 | |
6331 | return warned_p; |
6332 | } |
6333 | |
6334 | /* Generic argument checking recursion routine. PARAM is the argument to |
6335 | be checked. PARAM_NUM is the number of the argument. CALLBACK is invoked |
6336 | once the argument is resolved. CTX is context for the callback. |
6337 | OPT is the warning for which this is done. */ |
6338 | void |
6339 | check_function_arguments_recurse (void (*callback) |
6340 | (void *, tree, unsigned HOST_WIDE_INT), |
6341 | void *ctx, tree param, |
6342 | unsigned HOST_WIDE_INT param_num, |
6343 | opt_code opt) |
6344 | { |
6345 | if (opt != OPT_Wformat_ && warning_suppressed_p (param)) |
6346 | return; |
6347 | |
6348 | if (CONVERT_EXPR_P (param) |
6349 | && (TYPE_PRECISION (TREE_TYPE (param)) |
6350 | == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (param, 0))))) |
6351 | { |
6352 | /* Strip coercion. */ |
6353 | check_function_arguments_recurse (callback, ctx, |
6354 | TREE_OPERAND (param, 0), param_num, |
6355 | opt); |
6356 | return; |
6357 | } |
6358 | |
6359 | if (TREE_CODE (param) == CALL_EXPR && CALL_EXPR_FN (param)) |
6360 | { |
6361 | tree type = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (param))); |
6362 | tree attrs; |
6363 | bool found_format_arg = false; |
6364 | |
6365 | /* See if this is a call to a known internationalization function |
6366 | that modifies a format arg. Such a function may have multiple |
6367 | format_arg attributes (for example, ngettext). */ |
6368 | |
6369 | for (attrs = TYPE_ATTRIBUTES (type); |
6370 | attrs; |
6371 | attrs = TREE_CHAIN (attrs)) |
6372 | if (is_attribute_p (attr_name: "format_arg", ident: get_attribute_name (attrs))) |
6373 | { |
6374 | tree inner_arg; |
6375 | tree format_num_expr; |
6376 | int format_num; |
6377 | int i; |
6378 | call_expr_arg_iterator iter; |
6379 | |
6380 | /* Extract the argument number, which was previously checked |
6381 | to be valid. */ |
6382 | format_num_expr = TREE_VALUE (TREE_VALUE (attrs)); |
6383 | |
6384 | format_num = tree_to_uhwi (format_num_expr); |
6385 | |
6386 | for (inner_arg = first_call_expr_arg (exp: param, iter: &iter), i = 1; |
6387 | inner_arg != NULL_TREE; |
6388 | inner_arg = next_call_expr_arg (iter: &iter), i++) |
6389 | if (i == format_num) |
6390 | { |
6391 | check_function_arguments_recurse (callback, ctx, |
6392 | param: inner_arg, param_num, |
6393 | opt); |
6394 | found_format_arg = true; |
6395 | break; |
6396 | } |
6397 | } |
6398 | |
6399 | /* If we found a format_arg attribute and did a recursive check, |
6400 | we are done with checking this argument. Otherwise, we continue |
6401 | and this will be considered a non-literal. */ |
6402 | if (found_format_arg) |
6403 | return; |
6404 | } |
6405 | |
6406 | if (TREE_CODE (param) == COND_EXPR) |
6407 | { |
6408 | /* Simplify to avoid warning for an impossible case. */ |
6409 | param = fold_for_warn (param); |
6410 | if (TREE_CODE (param) == COND_EXPR) |
6411 | { |
6412 | /* Check both halves of the conditional expression. */ |
6413 | check_function_arguments_recurse (callback, ctx, |
6414 | TREE_OPERAND (param, 1), |
6415 | param_num, opt); |
6416 | check_function_arguments_recurse (callback, ctx, |
6417 | TREE_OPERAND (param, 2), |
6418 | param_num, opt); |
6419 | return; |
6420 | } |
6421 | } |
6422 | |
6423 | (*callback) (ctx, param, param_num); |
6424 | } |
6425 | |
6426 | /* Checks for a builtin function FNDECL that the number of arguments |
6427 | NARGS against the required number REQUIRED and issues an error if |
6428 | there is a mismatch. Returns true if the number of arguments is |
6429 | correct, otherwise false. LOC is the location of FNDECL. */ |
6430 | |
6431 | static bool |
6432 | builtin_function_validate_nargs (location_t loc, tree fndecl, int nargs, |
6433 | int required, bool complain) |
6434 | { |
6435 | if (nargs < required) |
6436 | { |
6437 | if (complain) |
6438 | error_at (loc, "too few arguments to function %qE", fndecl); |
6439 | return false; |
6440 | } |
6441 | else if (nargs > required) |
6442 | { |
6443 | if (complain) |
6444 | error_at (loc, "too many arguments to function %qE", fndecl); |
6445 | return false; |
6446 | } |
6447 | return true; |
6448 | } |
6449 | |
6450 | /* Helper macro for check_builtin_function_arguments. */ |
6451 | #define ARG_LOCATION(N) \ |
6452 | (arg_loc.is_empty () \ |
6453 | ? EXPR_LOC_OR_LOC (args[(N)], input_location) \ |
6454 | : expansion_point_location (arg_loc[(N)])) |
6455 | |
6456 | /* Verifies the NARGS arguments ARGS to the builtin function FNDECL. |
6457 | Returns false if there was an error, otherwise true. LOC is the |
6458 | location of the function; ARG_LOC is a vector of locations of the |
6459 | arguments. If FNDECL is the result of resolving an overloaded |
6460 | target built-in, ORIG_FNDECL is the original function decl, |
6461 | otherwise it is null. */ |
6462 | |
6463 | bool |
6464 | check_builtin_function_arguments (location_t loc, vec<location_t> arg_loc, |
6465 | tree fndecl, tree orig_fndecl, int nargs, |
6466 | tree *args, bool complain) |
6467 | { |
6468 | if (!fndecl_built_in_p (node: fndecl)) |
6469 | return true; |
6470 | |
6471 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) |
6472 | return (!targetm.check_builtin_call |
6473 | || targetm.check_builtin_call (loc, arg_loc, fndecl, orig_fndecl, |
6474 | nargs, args, complain)); |
6475 | |
6476 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND) |
6477 | return true; |
6478 | |
6479 | gcc_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL); |
6480 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
6481 | { |
6482 | case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX: |
6483 | if (!tree_fits_uhwi_p (args[2])) |
6484 | { |
6485 | if (complain) |
6486 | error_at ( |
6487 | ARG_LOCATION (2), |
6488 | "third argument to function %qE must be a constant integer", |
6489 | fndecl); |
6490 | return false; |
6491 | } |
6492 | /* fall through */ |
6493 | |
6494 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
6495 | { |
6496 | /* Get the requested alignment (in bits) if it's a constant |
6497 | integer expression. */ |
6498 | unsigned HOST_WIDE_INT align |
6499 | = tree_fits_uhwi_p (args[1]) ? tree_to_uhwi (args[1]) : 0; |
6500 | |
6501 | /* Determine if the requested alignment is a power of 2. */ |
6502 | if ((align & (align - 1))) |
6503 | align = 0; |
6504 | |
6505 | /* The maximum alignment in bits corresponding to the same |
6506 | maximum in bytes enforced in check_user_alignment(). */ |
6507 | unsigned maxalign = (UINT_MAX >> 1) + 1; |
6508 | |
6509 | /* Reject invalid alignments. */ |
6510 | if (align < BITS_PER_UNIT || maxalign < align) |
6511 | { |
6512 | if (complain) |
6513 | error_at (ARG_LOCATION (1), |
6514 | "second argument to function %qE must be a constant " |
6515 | "integer power of 2 between %qi and %qu bits", |
6516 | fndecl, BITS_PER_UNIT, maxalign); |
6517 | return false; |
6518 | } |
6519 | return true; |
6520 | } |
6521 | |
6522 | case BUILT_IN_CONSTANT_P: |
6523 | return builtin_function_validate_nargs (loc, fndecl, nargs, required: 1, complain); |
6524 | |
6525 | case BUILT_IN_ISFINITE: |
6526 | case BUILT_IN_ISINF: |
6527 | case BUILT_IN_ISINF_SIGN: |
6528 | case BUILT_IN_ISNAN: |
6529 | case BUILT_IN_ISNORMAL: |
6530 | case BUILT_IN_ISSIGNALING: |
6531 | case BUILT_IN_SIGNBIT: |
6532 | if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 1, complain)) |
6533 | { |
6534 | if (TREE_CODE (TREE_TYPE (args[0])) != REAL_TYPE) |
6535 | { |
6536 | if (complain) |
6537 | error_at (ARG_LOCATION (0), |
6538 | "non-floating-point argument in " |
6539 | "call to function %qE", |
6540 | fndecl); |
6541 | return false; |
6542 | } |
6543 | return true; |
6544 | } |
6545 | return false; |
6546 | |
6547 | case BUILT_IN_ISGREATER: |
6548 | case BUILT_IN_ISGREATEREQUAL: |
6549 | case BUILT_IN_ISLESS: |
6550 | case BUILT_IN_ISLESSEQUAL: |
6551 | case BUILT_IN_ISLESSGREATER: |
6552 | case BUILT_IN_ISUNORDERED: |
6553 | case BUILT_IN_ISEQSIG: |
6554 | if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 2, complain)) |
6555 | { |
6556 | enum tree_code code0, code1; |
6557 | code0 = TREE_CODE (TREE_TYPE (args[0])); |
6558 | code1 = TREE_CODE (TREE_TYPE (args[1])); |
6559 | if (!((code0 == REAL_TYPE && code1 == REAL_TYPE) |
6560 | || (code0 == REAL_TYPE |
6561 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
6562 | || ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE) |
6563 | && code1 == REAL_TYPE))) |
6564 | { |
6565 | if (complain) |
6566 | error_at (loc, |
6567 | "non-floating-point arguments in call to " |
6568 | "function %qE", |
6569 | fndecl); |
6570 | return false; |
6571 | } |
6572 | return true; |
6573 | } |
6574 | return false; |
6575 | |
6576 | case BUILT_IN_FPCLASSIFY: |
6577 | if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 6, complain)) |
6578 | { |
6579 | for (unsigned int i = 0; i < 5; i++) |
6580 | if (TREE_CODE (args[i]) != INTEGER_CST) |
6581 | { |
6582 | if (complain) |
6583 | error_at (ARG_LOCATION (i), |
6584 | "non-const integer argument %u in " |
6585 | "call to function %qE", |
6586 | i + 1, fndecl); |
6587 | return false; |
6588 | } |
6589 | |
6590 | if (TREE_CODE (TREE_TYPE (args[5])) != REAL_TYPE) |
6591 | { |
6592 | if (complain) |
6593 | error_at (ARG_LOCATION (5), |
6594 | "non-floating-point argument in " |
6595 | "call to function %qE", |
6596 | fndecl); |
6597 | return false; |
6598 | } |
6599 | return true; |
6600 | } |
6601 | return false; |
6602 | |
6603 | case BUILT_IN_ASSUME_ALIGNED: |
6604 | if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 2 + (nargs > 2), |
6605 | complain)) |
6606 | { |
6607 | if (nargs >= 3 |
6608 | && TREE_CODE (TREE_TYPE (args[2])) != INTEGER_TYPE |
6609 | && TREE_CODE (TREE_TYPE (args[2])) != BITINT_TYPE) |
6610 | { |
6611 | if (complain) |
6612 | error_at (ARG_LOCATION (2), |
6613 | "non-integer argument 3 in call to " |
6614 | "function %qE", |
6615 | fndecl); |
6616 | return false; |
6617 | } |
6618 | return true; |
6619 | } |
6620 | return false; |
6621 | |
6622 | case BUILT_IN_ADD_OVERFLOW: |
6623 | case BUILT_IN_SUB_OVERFLOW: |
6624 | case BUILT_IN_MUL_OVERFLOW: |
6625 | if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 3, complain)) |
6626 | { |
6627 | unsigned i; |
6628 | for (i = 0; i < 2; i++) |
6629 | if (!INTEGRAL_TYPE_P (TREE_TYPE (args[i]))) |
6630 | { |
6631 | if (complain) |
6632 | error_at (ARG_LOCATION (i), |
6633 | "argument %u in call to function " |
6634 | "%qE does not have integral type", |
6635 | i + 1, fndecl); |
6636 | return false; |
6637 | } |
6638 | if (TREE_CODE (TREE_TYPE (args[2])) != POINTER_TYPE |
6639 | || !INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (args[2])))) |
6640 | { |
6641 | if (complain) |
6642 | error_at (ARG_LOCATION (2), |
6643 | "argument 3 in call to function %qE " |
6644 | "does not have pointer to integral type", |
6645 | fndecl); |
6646 | return false; |
6647 | } |
6648 | else if (TREE_CODE (TREE_TYPE (TREE_TYPE (args[2]))) == ENUMERAL_TYPE) |
6649 | { |
6650 | if (complain) |
6651 | error_at (ARG_LOCATION (2), |
6652 | "argument 3 in call to function %qE " |
6653 | "has pointer to enumerated type", |
6654 | fndecl); |
6655 | return false; |
6656 | } |
6657 | else if (TREE_CODE (TREE_TYPE (TREE_TYPE (args[2]))) == BOOLEAN_TYPE) |
6658 | { |
6659 | if (complain) |
6660 | error_at (ARG_LOCATION (2), |
6661 | "argument 3 in call to function %qE " |
6662 | "has pointer to boolean type", |
6663 | fndecl); |
6664 | return false; |
6665 | } |
6666 | else if (TYPE_READONLY (TREE_TYPE (TREE_TYPE (args[2])))) |
6667 | { |
6668 | if (complain) |
6669 | error_at (ARG_LOCATION (2), |
6670 | "argument %u in call to function %qE " |
6671 | "has pointer to %qs type (%qT)", |
6672 | 3, fndecl, "const", TREE_TYPE (args[2])); |
6673 | return false; |
6674 | } |
6675 | else if (TYPE_ATOMIC (TREE_TYPE (TREE_TYPE (args[2])))) |
6676 | { |
6677 | if (complain) |
6678 | error_at (ARG_LOCATION (2), |
6679 | "argument %u in call to function %qE " |
6680 | "has pointer to %qs type (%qT)", |
6681 | 3, fndecl, "_Atomic", TREE_TYPE (args[2])); |
6682 | return false; |
6683 | } |
6684 | return true; |
6685 | } |
6686 | return false; |
6687 | |
6688 | case BUILT_IN_ADD_OVERFLOW_P: |
6689 | case BUILT_IN_SUB_OVERFLOW_P: |
6690 | case BUILT_IN_MUL_OVERFLOW_P: |
6691 | if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 3, complain)) |
6692 | { |
6693 | unsigned i; |
6694 | for (i = 0; i < 3; i++) |
6695 | if (!INTEGRAL_TYPE_P (TREE_TYPE (args[i]))) |
6696 | { |
6697 | if (complain) |
6698 | error_at (ARG_LOCATION (i), |
6699 | "argument %u in call to function " |
6700 | "%qE does not have integral type", |
6701 | i + 1, fndecl); |
6702 | return false; |
6703 | } |
6704 | if (TREE_CODE (TREE_TYPE (args[2])) == ENUMERAL_TYPE) |
6705 | { |
6706 | if (complain) |
6707 | error_at (ARG_LOCATION (2), |
6708 | "argument %u in call to function " |
6709 | "%qE has enumerated type", |
6710 | 3, fndecl); |
6711 | return false; |
6712 | } |
6713 | else if (TREE_CODE (TREE_TYPE (args[2])) == BOOLEAN_TYPE) |
6714 | { |
6715 | if (complain) |
6716 | error_at (ARG_LOCATION (2), |
6717 | "argument %u in call to function " |
6718 | "%qE has boolean type", |
6719 | 3, fndecl); |
6720 | return false; |
6721 | } |
6722 | return true; |
6723 | } |
6724 | return false; |
6725 | |
6726 | case BUILT_IN_CLEAR_PADDING: |
6727 | if (builtin_function_validate_nargs (loc, fndecl, nargs, required: 1, complain)) |
6728 | { |
6729 | if (!POINTER_TYPE_P (TREE_TYPE (args[0]))) |
6730 | { |
6731 | if (complain) |
6732 | error_at (ARG_LOCATION (0), |
6733 | "argument %u in call to function " |
6734 | "%qE does not have pointer type", |
6735 | 1, fndecl); |
6736 | return false; |
6737 | } |
6738 | else if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_TYPE (args[0])))) |
6739 | { |
6740 | if (complain) |
6741 | error_at (ARG_LOCATION (0), |
6742 | "argument %u in call to function " |
6743 | "%qE points to incomplete type", |
6744 | 1, fndecl); |
6745 | return false; |
6746 | } |
6747 | else if (TYPE_READONLY (TREE_TYPE (TREE_TYPE (args[0])))) |
6748 | { |
6749 | if (complain) |
6750 | error_at (ARG_LOCATION (0), |
6751 | "argument %u in call to function %qE " |
6752 | "has pointer to %qs type (%qT)", |
6753 | 1, fndecl, "const", TREE_TYPE (args[0])); |
6754 | return false; |
6755 | } |
6756 | else if (TYPE_ATOMIC (TREE_TYPE (TREE_TYPE (args[0])))) |
6757 | { |
6758 | if (complain) |
6759 | error_at (ARG_LOCATION (0), |
6760 | "argument %u in call to function %qE " |
6761 | "has pointer to %qs type (%qT)", |
6762 | 1, fndecl, "_Atomic", TREE_TYPE (args[0])); |
6763 | return false; |
6764 | } |
6765 | return true; |
6766 | } |
6767 | return false; |
6768 | |
6769 | case BUILT_IN_CLZG: |
6770 | case BUILT_IN_CTZG: |
6771 | case BUILT_IN_CLRSBG: |
6772 | case BUILT_IN_FFSG: |
6773 | case BUILT_IN_PARITYG: |
6774 | case BUILT_IN_POPCOUNTG: |
6775 | if (nargs == 2 |
6776 | && (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CLZG |
6777 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CTZG)) |
6778 | { |
6779 | if (!INTEGRAL_TYPE_P (TREE_TYPE (args[1]))) |
6780 | { |
6781 | if (complain) |
6782 | error_at (ARG_LOCATION (1), |
6783 | "argument %u in call to function " |
6784 | "%qE does not have integral type", |
6785 | 2, fndecl); |
6786 | return false; |
6787 | } |
6788 | if ((TYPE_PRECISION (TREE_TYPE (args[1])) |
6789 | > TYPE_PRECISION (integer_type_node)) |
6790 | || (TYPE_PRECISION (TREE_TYPE (args[1])) |
6791 | == TYPE_PRECISION (integer_type_node) |
6792 | && TYPE_UNSIGNED (TREE_TYPE (args[1])))) |
6793 | { |
6794 | if (complain) |
6795 | error_at (ARG_LOCATION (1), |
6796 | "argument %u in call to function " |
6797 | "%qE does not have %<int%> type", |
6798 | 2, fndecl); |
6799 | return false; |
6800 | } |
6801 | } |
6802 | else if (!builtin_function_validate_nargs (loc, fndecl, nargs, required: 1, |
6803 | complain)) |
6804 | return false; |
6805 | |
6806 | if (!INTEGRAL_TYPE_P (TREE_TYPE (args[0]))) |
6807 | { |
6808 | if (complain) |
6809 | error_at (ARG_LOCATION (0), |
6810 | "argument %u in call to function " |
6811 | "%qE does not have integral type", |
6812 | 1, fndecl); |
6813 | return false; |
6814 | } |
6815 | if (TREE_CODE (TREE_TYPE (args[0])) == ENUMERAL_TYPE) |
6816 | { |
6817 | if (complain) |
6818 | error_at (ARG_LOCATION (0), |
6819 | "argument %u in call to function " |
6820 | "%qE has enumerated type", |
6821 | 1, fndecl); |
6822 | return false; |
6823 | } |
6824 | if (TREE_CODE (TREE_TYPE (args[0])) == BOOLEAN_TYPE) |
6825 | { |
6826 | if (complain) |
6827 | error_at (ARG_LOCATION (0), |
6828 | "argument %u in call to function " |
6829 | "%qE has boolean type", |
6830 | 1, fndecl); |
6831 | return false; |
6832 | } |
6833 | if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_FFSG |
6834 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_CLRSBG) |
6835 | { |
6836 | if (TYPE_UNSIGNED (TREE_TYPE (args[0]))) |
6837 | { |
6838 | if (complain) |
6839 | error_at (ARG_LOCATION (0), |
6840 | "argument 1 in call to function " |
6841 | "%qE has unsigned type", |
6842 | fndecl); |
6843 | return false; |
6844 | } |
6845 | } |
6846 | else if (!TYPE_UNSIGNED (TREE_TYPE (args[0]))) |
6847 | { |
6848 | if (complain) |
6849 | error_at (ARG_LOCATION (0), |
6850 | "argument 1 in call to function " |
6851 | "%qE has signed type", |
6852 | fndecl); |
6853 | return false; |
6854 | } |
6855 | return true; |
6856 | |
6857 | default: |
6858 | return true; |
6859 | } |
6860 | } |
6861 | |
6862 | /* Subroutine of c_parse_error. |
6863 | Return the result of concatenating LHS and RHS. RHS is really |
6864 | a string literal, its first character is indicated by RHS_START and |
6865 | RHS_SIZE is its length (including the terminating NUL character). |
6866 | |
6867 | The caller is responsible for deleting the returned pointer. */ |
6868 | |
6869 | static char * |
6870 | catenate_strings (const char *lhs, const char *rhs_start, int rhs_size) |
6871 | { |
6872 | const size_t lhs_size = strlen (s: lhs); |
6873 | char *result = XNEWVEC (char, lhs_size + rhs_size); |
6874 | memcpy (dest: result, src: lhs, n: lhs_size); |
6875 | memcpy (dest: result + lhs_size, src: rhs_start, n: rhs_size); |
6876 | return result; |
6877 | } |
6878 | |
6879 | /* Issue the error given by GMSGID at RICHLOC, indicating that it occurred |
6880 | before TOKEN, which had the associated VALUE. */ |
6881 | |
6882 | void |
6883 | c_parse_error (const char *gmsgid, enum cpp_ttype token_type, |
6884 | tree value, unsigned char token_flags, |
6885 | rich_location *richloc) |
6886 | { |
6887 | #define catenate_messages(M1, M2) catenate_strings ((M1), (M2), sizeof (M2)) |
6888 | |
6889 | char *message = NULL; |
6890 | |
6891 | if (token_type == CPP_EOF) |
6892 | message = catenate_messages (gmsgid, " at end of input"); |
6893 | else if (token_type == CPP_CHAR |
6894 | || token_type == CPP_WCHAR |
6895 | || token_type == CPP_CHAR16 |
6896 | || token_type == CPP_CHAR32 |
6897 | || token_type == CPP_UTF8CHAR) |
6898 | { |
6899 | unsigned int val = TREE_INT_CST_LOW (value); |
6900 | const char *prefix; |
6901 | |
6902 | switch (token_type) |
6903 | { |
6904 | default: |
6905 | prefix = ""; |
6906 | break; |
6907 | case CPP_WCHAR: |
6908 | prefix = "L"; |
6909 | break; |
6910 | case CPP_CHAR16: |
6911 | prefix = "u"; |
6912 | break; |
6913 | case CPP_CHAR32: |
6914 | prefix = "U"; |
6915 | break; |
6916 | case CPP_UTF8CHAR: |
6917 | prefix = "u8"; |
6918 | break; |
6919 | } |
6920 | |
6921 | if (val <= UCHAR_MAX && ISGRAPH (val)) |
6922 | message = catenate_messages (gmsgid, " before %s'%c'"); |
6923 | else |
6924 | message = catenate_messages (gmsgid, " before %s'\\x%x'"); |
6925 | |
6926 | error_at (richloc, message, prefix, val); |
6927 | free (ptr: message); |
6928 | message = NULL; |
6929 | } |
6930 | else if (token_type == CPP_CHAR_USERDEF |
6931 | || token_type == CPP_WCHAR_USERDEF |
6932 | || token_type == CPP_CHAR16_USERDEF |
6933 | || token_type == CPP_CHAR32_USERDEF |
6934 | || token_type == CPP_UTF8CHAR_USERDEF) |
6935 | message = catenate_messages (gmsgid, |
6936 | " before user-defined character literal"); |
6937 | else if (token_type == CPP_STRING_USERDEF |
6938 | || token_type == CPP_WSTRING_USERDEF |
6939 | || token_type == CPP_STRING16_USERDEF |
6940 | || token_type == CPP_STRING32_USERDEF |
6941 | || token_type == CPP_UTF8STRING_USERDEF) |
6942 | message = catenate_messages (gmsgid, " before user-defined string literal"); |
6943 | else if (token_type == CPP_STRING |
6944 | || token_type == CPP_WSTRING |
6945 | || token_type == CPP_STRING16 |
6946 | || token_type == CPP_STRING32 |
6947 | || token_type == CPP_UTF8STRING) |
6948 | message = catenate_messages (gmsgid, " before string constant"); |
6949 | else if (token_type == CPP_NUMBER) |
6950 | message = catenate_messages (gmsgid, " before numeric constant"); |
6951 | else if (token_type == CPP_NAME) |
6952 | { |
6953 | message = catenate_messages (gmsgid, " before %qE"); |
6954 | error_at (richloc, message, value); |
6955 | free (ptr: message); |
6956 | message = NULL; |
6957 | } |
6958 | else if (token_type == CPP_PRAGMA) |
6959 | message = catenate_messages (gmsgid, " before %<#pragma%>"); |
6960 | else if (token_type == CPP_PRAGMA_EOL) |
6961 | message = catenate_messages (gmsgid, " before end of line"); |
6962 | else if (token_type == CPP_DECLTYPE) |
6963 | message = catenate_messages (gmsgid, " before %<decltype%>"); |
6964 | else if (token_type == CPP_EMBED) |
6965 | message = catenate_messages (gmsgid, " before %<#embed%>"); |
6966 | else if (token_type < N_TTYPES) |
6967 | { |
6968 | message = catenate_messages (gmsgid, " before %qs token"); |
6969 | error_at (richloc, message, cpp_type2name (token_type, flags: token_flags)); |
6970 | free (ptr: message); |
6971 | message = NULL; |
6972 | } |
6973 | else |
6974 | error_at (richloc, gmsgid); |
6975 | |
6976 | if (message) |
6977 | { |
6978 | error_at (richloc, message); |
6979 | free (ptr: message); |
6980 | } |
6981 | #undef catenate_messages |
6982 | } |
6983 | |
6984 | /* Return the gcc option code associated with the reason for a cpp |
6985 | message, or 0 if none. */ |
6986 | |
6987 | static diagnostic_option_id |
6988 | c_option_controlling_cpp_diagnostic (enum cpp_warning_reason reason) |
6989 | { |
6990 | const struct cpp_reason_option_codes_t *entry; |
6991 | |
6992 | for (entry = cpp_reason_option_codes; entry->reason != CPP_W_NONE; entry++) |
6993 | { |
6994 | if (entry->reason == reason) |
6995 | return entry->option_code; |
6996 | } |
6997 | return 0; |
6998 | } |
6999 | |
7000 | /* Return TRUE if the given option index corresponds to a diagnostic |
7001 | issued by libcpp. Linear search seems fine for now. */ |
7002 | bool |
7003 | c_option_is_from_cpp_diagnostics (int option_index) |
7004 | { |
7005 | for (auto entry = cpp_reason_option_codes; entry->reason != CPP_W_NONE; |
7006 | ++entry) |
7007 | { |
7008 | if (entry->option_code == option_index) |
7009 | return true; |
7010 | } |
7011 | return false; |
7012 | } |
7013 | |
7014 | /* Callback from cpp_diagnostic for PFILE to print diagnostics from the |
7015 | preprocessor. The diagnostic is of type LEVEL, with REASON set |
7016 | to the reason code if LEVEL is represents a warning, at location |
7017 | RICHLOC unless this is after lexing and the compiler's location |
7018 | should be used instead; MSG is the translated message and AP |
7019 | the arguments. Returns true if a diagnostic was emitted, false |
7020 | otherwise. */ |
7021 | |
7022 | bool |
7023 | c_cpp_diagnostic (cpp_reader *pfile ATTRIBUTE_UNUSED, |
7024 | enum cpp_diagnostic_level level, |
7025 | enum cpp_warning_reason reason, |
7026 | rich_location *richloc, |
7027 | const char *msg, va_list *ap) |
7028 | { |
7029 | diagnostic_info diagnostic; |
7030 | diagnostic_t dlevel; |
7031 | bool save_warn_system_headers = global_dc->m_warn_system_headers; |
7032 | bool ret; |
7033 | |
7034 | switch (level) |
7035 | { |
7036 | case CPP_DL_WARNING_SYSHDR: |
7037 | if (flag_no_output) |
7038 | return false; |
7039 | global_dc->m_warn_system_headers = 1; |
7040 | /* Fall through. */ |
7041 | case CPP_DL_WARNING: |
7042 | if (flag_no_output) |
7043 | return false; |
7044 | dlevel = DK_WARNING; |
7045 | break; |
7046 | case CPP_DL_PEDWARN: |
7047 | if (flag_no_output && !flag_pedantic_errors) |
7048 | return false; |
7049 | dlevel = DK_PEDWARN; |
7050 | break; |
7051 | case CPP_DL_ERROR: |
7052 | dlevel = DK_ERROR; |
7053 | break; |
7054 | case CPP_DL_ICE: |
7055 | dlevel = DK_ICE; |
7056 | break; |
7057 | case CPP_DL_NOTE: |
7058 | dlevel = DK_NOTE; |
7059 | break; |
7060 | case CPP_DL_FATAL: |
7061 | dlevel = DK_FATAL; |
7062 | break; |
7063 | default: |
7064 | gcc_unreachable (); |
7065 | } |
7066 | if (override_libcpp_locations) |
7067 | richloc->set_range (idx: 0, loc: input_location, range_display_kind: SHOW_RANGE_WITH_CARET); |
7068 | diagnostic_set_info_translated (&diagnostic, msg, ap, |
7069 | richloc, dlevel); |
7070 | diagnostic_set_option_id (info: &diagnostic, |
7071 | option_id: c_option_controlling_cpp_diagnostic (reason)); |
7072 | ret = diagnostic_report_diagnostic (context: global_dc, diagnostic: &diagnostic); |
7073 | if (level == CPP_DL_WARNING_SYSHDR) |
7074 | global_dc->m_warn_system_headers = save_warn_system_headers; |
7075 | return ret; |
7076 | } |
7077 | |
7078 | /* Convert a character from the host to the target execution character |
7079 | set. cpplib handles this, mostly. */ |
7080 | |
7081 | HOST_WIDE_INT |
7082 | c_common_to_target_charset (HOST_WIDE_INT c) |
7083 | { |
7084 | /* Character constants in GCC proper are sign-extended under -fsigned-char, |
7085 | zero-extended under -fno-signed-char. cpplib insists that characters |
7086 | and character constants are always unsigned. Hence we must convert |
7087 | back and forth. */ |
7088 | cppchar_t uc = ((cppchar_t)c) & ((((cppchar_t)1) << CHAR_BIT)-1); |
7089 | |
7090 | uc = cpp_host_to_exec_charset (parse_in, uc); |
7091 | |
7092 | if (flag_signed_char) |
7093 | return ((HOST_WIDE_INT)uc) << (HOST_BITS_PER_WIDE_INT - CHAR_TYPE_SIZE) |
7094 | >> (HOST_BITS_PER_WIDE_INT - CHAR_TYPE_SIZE); |
7095 | else |
7096 | return uc; |
7097 | } |
7098 | |
7099 | /* Fold an offsetof-like expression. EXPR is a nested sequence of component |
7100 | references with an INDIRECT_REF of a constant at the bottom; much like the |
7101 | traditional rendering of offsetof as a macro. TYPE is the desired type of |
7102 | the whole expression. Return the folded result. */ |
7103 | |
7104 | tree |
7105 | fold_offsetof (tree expr, tree type, enum tree_code ctx) |
7106 | { |
7107 | tree base, off, t; |
7108 | tree_code code = TREE_CODE (expr); |
7109 | switch (code) |
7110 | { |
7111 | case ERROR_MARK: |
7112 | return expr; |
7113 | |
7114 | case VAR_DECL: |
7115 | error ("cannot apply %<offsetof%> to static data member %qD", expr); |
7116 | return error_mark_node; |
7117 | |
7118 | case CALL_EXPR: |
7119 | case TARGET_EXPR: |
7120 | error ("cannot apply %<offsetof%> when %<operator[]%> is overloaded"); |
7121 | return error_mark_node; |
7122 | |
7123 | case NOP_EXPR: |
7124 | case INDIRECT_REF: |
7125 | if (!TREE_CONSTANT (TREE_OPERAND (expr, 0))) |
7126 | { |
7127 | error ("cannot apply %<offsetof%> to a non constant address"); |
7128 | return error_mark_node; |
7129 | } |
7130 | return convert (type, TREE_OPERAND (expr, 0)); |
7131 | |
7132 | case COMPONENT_REF: |
7133 | base = fold_offsetof (TREE_OPERAND (expr, 0), type, ctx: code); |
7134 | if (base == error_mark_node) |
7135 | return base; |
7136 | |
7137 | t = TREE_OPERAND (expr, 1); |
7138 | if (DECL_C_BIT_FIELD (t)) |
7139 | { |
7140 | error ("attempt to take address of bit-field structure " |
7141 | "member %qD", t); |
7142 | return error_mark_node; |
7143 | } |
7144 | off = size_binop_loc (input_location, PLUS_EXPR, DECL_FIELD_OFFSET (t), |
7145 | size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t)) |
7146 | / BITS_PER_UNIT)); |
7147 | break; |
7148 | |
7149 | case ARRAY_REF: |
7150 | base = fold_offsetof (TREE_OPERAND (expr, 0), type, ctx: code); |
7151 | if (base == error_mark_node) |
7152 | return base; |
7153 | |
7154 | t = TREE_OPERAND (expr, 1); |
7155 | STRIP_ANY_LOCATION_WRAPPER (t); |
7156 | |
7157 | /* Check if the offset goes beyond the upper bound of the array. */ |
7158 | if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) >= 0) |
7159 | { |
7160 | tree upbound = array_ref_up_bound (expr); |
7161 | if (upbound != NULL_TREE |
7162 | && TREE_CODE (upbound) == INTEGER_CST |
7163 | && !tree_int_cst_equal (upbound, |
7164 | TYPE_MAX_VALUE (TREE_TYPE (upbound)))) |
7165 | { |
7166 | if (ctx != ARRAY_REF && ctx != COMPONENT_REF) |
7167 | upbound = size_binop (PLUS_EXPR, upbound, |
7168 | build_int_cst (TREE_TYPE (upbound), 1)); |
7169 | if (tree_int_cst_lt (t1: upbound, t2: t)) |
7170 | { |
7171 | tree v; |
7172 | |
7173 | for (v = TREE_OPERAND (expr, 0); |
7174 | TREE_CODE (v) == COMPONENT_REF; |
7175 | v = TREE_OPERAND (v, 0)) |
7176 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) |
7177 | == RECORD_TYPE) |
7178 | { |
7179 | tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1)); |
7180 | for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain)) |
7181 | if (TREE_CODE (fld_chain) == FIELD_DECL) |
7182 | break; |
7183 | |
7184 | if (fld_chain) |
7185 | break; |
7186 | } |
7187 | /* Don't warn if the array might be considered a poor |
7188 | man's flexible array member with a very permissive |
7189 | definition thereof. */ |
7190 | if (TREE_CODE (v) == ARRAY_REF |
7191 | || TREE_CODE (v) == COMPONENT_REF) |
7192 | warning (OPT_Warray_bounds_, |
7193 | "index %E denotes an offset " |
7194 | "greater than size of %qT", |
7195 | t, TREE_TYPE (TREE_OPERAND (expr, 0))); |
7196 | } |
7197 | } |
7198 | } |
7199 | |
7200 | t = convert (sizetype, t); |
7201 | off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t); |
7202 | break; |
7203 | |
7204 | case COMPOUND_EXPR: |
7205 | /* Handle static members of volatile structs. */ |
7206 | t = TREE_OPERAND (expr, 1); |
7207 | gcc_checking_assert (VAR_P (get_base_address (t))); |
7208 | return fold_offsetof (expr: t, type); |
7209 | |
7210 | default: |
7211 | gcc_unreachable (); |
7212 | } |
7213 | |
7214 | if (!POINTER_TYPE_P (type)) |
7215 | return size_binop (PLUS_EXPR, base, convert (type, off)); |
7216 | return fold_build_pointer_plus (base, off); |
7217 | } |
7218 | |
7219 | /* *PTYPE is an incomplete array. Complete it with a domain based on |
7220 | INITIAL_VALUE. If INITIAL_VALUE is not present, use 1 if DO_DEFAULT |
7221 | is true. Return 0 if successful, 1 if INITIAL_VALUE can't be deciphered, |
7222 | 2 if INITIAL_VALUE was NULL, and 3 if INITIAL_VALUE was empty. */ |
7223 | |
7224 | int |
7225 | complete_array_type (tree *ptype, tree initial_value, bool do_default) |
7226 | { |
7227 | tree maxindex, type, main_type, elt, unqual_elt; |
7228 | int failure = 0, quals; |
7229 | bool overflow_p = false; |
7230 | |
7231 | maxindex = size_zero_node; |
7232 | if (initial_value) |
7233 | { |
7234 | STRIP_ANY_LOCATION_WRAPPER (initial_value); |
7235 | |
7236 | if (TREE_CODE (initial_value) == STRING_CST) |
7237 | { |
7238 | int eltsize |
7239 | = int_size_in_bytes (TREE_TYPE (TREE_TYPE (initial_value))); |
7240 | maxindex = size_int (TREE_STRING_LENGTH (initial_value) / eltsize |
7241 | - 1); |
7242 | } |
7243 | else if (TREE_CODE (initial_value) == CONSTRUCTOR) |
7244 | { |
7245 | vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (initial_value); |
7246 | |
7247 | if (vec_safe_is_empty (v)) |
7248 | { |
7249 | if (pedantic) |
7250 | failure = 3; |
7251 | maxindex = ssize_int (-1); |
7252 | } |
7253 | else |
7254 | { |
7255 | tree curindex; |
7256 | unsigned HOST_WIDE_INT cnt = 1; |
7257 | constructor_elt *ce; |
7258 | bool fold_p = false; |
7259 | |
7260 | if ((*v)[0].index) |
7261 | maxindex = (*v)[0].index, fold_p = true; |
7262 | if (TREE_CODE ((*v)[0].value) == RAW_DATA_CST) |
7263 | cnt = 0; |
7264 | |
7265 | curindex = maxindex; |
7266 | |
7267 | for (; vec_safe_iterate (v, ix: cnt, ptr: &ce); cnt++) |
7268 | { |
7269 | bool curfold_p = false; |
7270 | if (ce->index) |
7271 | curindex = ce->index, curfold_p = true; |
7272 | if (!ce->index || TREE_CODE (ce->value) == RAW_DATA_CST) |
7273 | { |
7274 | if (fold_p || curfold_p) |
7275 | { |
7276 | /* Since we treat size types now as ordinary |
7277 | unsigned types, we need an explicit overflow |
7278 | check. */ |
7279 | tree orig = curindex; |
7280 | curindex = fold_convert (sizetype, curindex); |
7281 | overflow_p |= tree_int_cst_lt (t1: curindex, t2: orig); |
7282 | curfold_p = false; |
7283 | } |
7284 | if (TREE_CODE (ce->value) == RAW_DATA_CST) |
7285 | curindex |
7286 | = size_binop (PLUS_EXPR, curindex, |
7287 | size_int (RAW_DATA_LENGTH (ce->value) |
7288 | - ((ce->index || !cnt) |
7289 | ? 1 : 0))); |
7290 | else |
7291 | curindex = size_binop (PLUS_EXPR, curindex, |
7292 | size_one_node); |
7293 | } |
7294 | if (tree_int_cst_lt (t1: maxindex, t2: curindex)) |
7295 | maxindex = curindex, fold_p = curfold_p; |
7296 | } |
7297 | if (fold_p) |
7298 | { |
7299 | tree orig = maxindex; |
7300 | maxindex = fold_convert (sizetype, maxindex); |
7301 | overflow_p |= tree_int_cst_lt (t1: maxindex, t2: orig); |
7302 | } |
7303 | } |
7304 | } |
7305 | else |
7306 | { |
7307 | /* Make an error message unless that happened already. */ |
7308 | if (initial_value != error_mark_node) |
7309 | failure = 1; |
7310 | } |
7311 | } |
7312 | else |
7313 | { |
7314 | failure = 2; |
7315 | if (!do_default) |
7316 | return failure; |
7317 | } |
7318 | |
7319 | type = *ptype; |
7320 | elt = TREE_TYPE (type); |
7321 | quals = TYPE_QUALS (strip_array_types (elt)); |
7322 | if (quals == 0) |
7323 | unqual_elt = elt; |
7324 | else |
7325 | unqual_elt = c_build_qualified_type (elt, KEEP_QUAL_ADDR_SPACE (quals)); |
7326 | |
7327 | /* Using build_distinct_type_copy and modifying things afterward instead |
7328 | of using build_array_type to create a new type preserves all of the |
7329 | TYPE_LANG_FLAG_? bits that the front end may have set. */ |
7330 | main_type = build_distinct_type_copy (TYPE_MAIN_VARIANT (type)); |
7331 | TREE_TYPE (main_type) = unqual_elt; |
7332 | TYPE_DOMAIN (main_type) |
7333 | = build_range_type (TREE_TYPE (maxindex), |
7334 | build_int_cst (TREE_TYPE (maxindex), 0), maxindex); |
7335 | TYPE_TYPELESS_STORAGE (main_type) = TYPE_TYPELESS_STORAGE (type); |
7336 | layout_type (main_type); |
7337 | |
7338 | /* Set TYPE_STRUCTURAL_EQUALITY_P early. */ |
7339 | if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (main_type)) |
7340 | || TYPE_STRUCTURAL_EQUALITY_P (TYPE_DOMAIN (main_type))) |
7341 | SET_TYPE_STRUCTURAL_EQUALITY (main_type); |
7342 | else |
7343 | TYPE_CANONICAL (main_type) = main_type; |
7344 | |
7345 | /* Make sure we have the canonical MAIN_TYPE. */ |
7346 | hashval_t hashcode = type_hash_canon_hash (main_type); |
7347 | main_type = type_hash_canon (hashcode, main_type); |
7348 | |
7349 | /* Fix the canonical type. */ |
7350 | if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (main_type)) |
7351 | || TYPE_STRUCTURAL_EQUALITY_P (TYPE_DOMAIN (main_type))) |
7352 | gcc_assert (TYPE_STRUCTURAL_EQUALITY_P (main_type)); |
7353 | else if (TYPE_CANONICAL (TREE_TYPE (main_type)) != TREE_TYPE (main_type) |
7354 | || (TYPE_CANONICAL (TYPE_DOMAIN (main_type)) |
7355 | != TYPE_DOMAIN (main_type))) |
7356 | TYPE_CANONICAL (main_type) |
7357 | = build_array_type (TYPE_CANONICAL (TREE_TYPE (main_type)), |
7358 | TYPE_CANONICAL (TYPE_DOMAIN (main_type)), |
7359 | TYPE_TYPELESS_STORAGE (main_type)); |
7360 | |
7361 | if (quals == 0) |
7362 | type = main_type; |
7363 | else |
7364 | type = c_build_qualified_type (main_type, quals); |
7365 | |
7366 | if (COMPLETE_TYPE_P (type) |
7367 | && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST |
7368 | && (overflow_p || TREE_OVERFLOW (TYPE_SIZE_UNIT (type)))) |
7369 | { |
7370 | error ("size of array is too large"); |
7371 | /* If we proceed with the array type as it is, we'll eventually |
7372 | crash in tree_to_[su]hwi(). */ |
7373 | type = error_mark_node; |
7374 | } |
7375 | |
7376 | *ptype = type; |
7377 | return failure; |
7378 | } |
7379 | |
7380 | /* INIT is an constructor of a structure with a flexible array member. |
7381 | Complete the flexible array member with a domain based on it's value. */ |
7382 | void |
7383 | complete_flexible_array_elts (tree init) |
7384 | { |
7385 | tree elt, type; |
7386 | |
7387 | if (init == NULL_TREE || TREE_CODE (init) != CONSTRUCTOR) |
7388 | return; |
7389 | |
7390 | if (vec_safe_is_empty (CONSTRUCTOR_ELTS (init))) |
7391 | return; |
7392 | |
7393 | elt = CONSTRUCTOR_ELTS (init)->last ().value; |
7394 | type = TREE_TYPE (elt); |
7395 | if (TREE_CODE (type) == ARRAY_TYPE |
7396 | && TYPE_SIZE (type) == NULL_TREE) |
7397 | complete_array_type (ptype: &TREE_TYPE (elt), initial_value: elt, do_default: false); |
7398 | else |
7399 | complete_flexible_array_elts (init: elt); |
7400 | } |
7401 | |
7402 | /* Like c_mark_addressable but don't check register qualifier. */ |
7403 | void |
7404 | c_common_mark_addressable_vec (tree t) |
7405 | { |
7406 | while (handled_component_p (t) || TREE_CODE (t) == C_MAYBE_CONST_EXPR) |
7407 | { |
7408 | if (TREE_CODE (t) == C_MAYBE_CONST_EXPR) |
7409 | t = C_MAYBE_CONST_EXPR_EXPR (t); |
7410 | else |
7411 | t = TREE_OPERAND (t, 0); |
7412 | } |
7413 | if (!VAR_P (t) |
7414 | && TREE_CODE (t) != PARM_DECL |
7415 | && TREE_CODE (t) != COMPOUND_LITERAL_EXPR |
7416 | && TREE_CODE (t) != TARGET_EXPR) |
7417 | return; |
7418 | if (!VAR_P (t) || !DECL_HARD_REGISTER (t)) |
7419 | TREE_ADDRESSABLE (t) = 1; |
7420 | if (TREE_CODE (t) == COMPOUND_LITERAL_EXPR) |
7421 | TREE_ADDRESSABLE (COMPOUND_LITERAL_EXPR_DECL (t)) = 1; |
7422 | else if (TREE_CODE (t) == TARGET_EXPR) |
7423 | TREE_ADDRESSABLE (TARGET_EXPR_SLOT (t)) = 1; |
7424 | } |
7425 | |
7426 | |
7427 | |
7428 | /* Used to help initialize the builtin-types.def table. When a type of |
7429 | the correct size doesn't exist, use error_mark_node instead of NULL. |
7430 | The later results in segfaults even when a decl using the type doesn't |
7431 | get invoked. */ |
7432 | |
7433 | tree |
7434 | builtin_type_for_size (int size, bool unsignedp) |
7435 | { |
7436 | tree type = c_common_type_for_size (bits: size, unsignedp); |
7437 | return type ? type : error_mark_node; |
7438 | } |
7439 | |
7440 | /* Work out the size of the first argument of a call to |
7441 | __builtin_speculation_safe_value. Only pointers and integral types |
7442 | are permitted. Return -1 if the argument type is not supported or |
7443 | the size is too large; 0 if the argument type is a pointer or the |
7444 | size if it is integral. */ |
7445 | static enum built_in_function |
7446 | speculation_safe_value_resolve_call (tree function, vec<tree, va_gc> *params, |
7447 | bool complain) |
7448 | { |
7449 | /* Type of the argument. */ |
7450 | tree type; |
7451 | int size; |
7452 | |
7453 | if (vec_safe_is_empty (v: params)) |
7454 | { |
7455 | if (complain) |
7456 | error ("too few arguments to function %qE", function); |
7457 | return BUILT_IN_NONE; |
7458 | } |
7459 | |
7460 | type = TREE_TYPE ((*params)[0]); |
7461 | if (TREE_CODE (type) == ARRAY_TYPE && c_dialect_cxx ()) |
7462 | { |
7463 | /* Force array-to-pointer decay for C++. */ |
7464 | (*params)[0] = default_conversion ((*params)[0]); |
7465 | type = TREE_TYPE ((*params)[0]); |
7466 | } |
7467 | |
7468 | if (POINTER_TYPE_P (type)) |
7469 | return BUILT_IN_SPECULATION_SAFE_VALUE_PTR; |
7470 | |
7471 | if (!INTEGRAL_TYPE_P (type)) |
7472 | goto incompatible; |
7473 | |
7474 | if (!COMPLETE_TYPE_P (type)) |
7475 | goto incompatible; |
7476 | |
7477 | size = tree_to_uhwi (TYPE_SIZE_UNIT (type)); |
7478 | if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16) |
7479 | return ((enum built_in_function) |
7480 | ((int) BUILT_IN_SPECULATION_SAFE_VALUE_1 + exact_log2 (x: size))); |
7481 | |
7482 | incompatible: |
7483 | /* Issue the diagnostic only if the argument is valid, otherwise |
7484 | it would be redundant at best and could be misleading. */ |
7485 | if (type != error_mark_node && complain) |
7486 | error ("operand type %qT is incompatible with argument %d of %qE", |
7487 | type, 1, function); |
7488 | |
7489 | return BUILT_IN_NONE; |
7490 | } |
7491 | |
7492 | /* Validate and coerce PARAMS, the arguments to ORIG_FUNCTION to fit |
7493 | the prototype for FUNCTION. The first argument is mandatory, a second |
7494 | argument, if present, must be type compatible with the first. */ |
7495 | static bool |
7496 | speculation_safe_value_resolve_params (location_t loc, tree orig_function, |
7497 | vec<tree, va_gc> *params, bool complain) |
7498 | { |
7499 | tree val; |
7500 | |
7501 | if (params->length () == 0) |
7502 | { |
7503 | if (complain) |
7504 | error_at (loc, "too few arguments to function %qE", orig_function); |
7505 | return false; |
7506 | } |
7507 | |
7508 | else if (params->length () > 2) |
7509 | { |
7510 | if (complain) |
7511 | error_at (loc, "too many arguments to function %qE", orig_function); |
7512 | return false; |
7513 | } |
7514 | |
7515 | val = (*params)[0]; |
7516 | if (TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE) |
7517 | val = default_conversion (val); |
7518 | if (!(TREE_CODE (TREE_TYPE (val)) == POINTER_TYPE |
7519 | || TREE_CODE (TREE_TYPE (val)) == INTEGER_TYPE)) |
7520 | { |
7521 | if (complain) |
7522 | error_at (loc, "expecting argument of type pointer or of type integer " |
7523 | "for argument 1"); |
7524 | return false; |
7525 | } |
7526 | (*params)[0] = val; |
7527 | |
7528 | if (params->length () == 2) |
7529 | { |
7530 | tree val2 = (*params)[1]; |
7531 | if (TREE_CODE (TREE_TYPE (val2)) == ARRAY_TYPE) |
7532 | val2 = default_conversion (val2); |
7533 | if (error_operand_p (t: val2)) |
7534 | return false; |
7535 | if (!(TREE_TYPE (val) == TREE_TYPE (val2) |
7536 | || useless_type_conversion_p (TREE_TYPE (val), TREE_TYPE (val2)))) |
7537 | { |
7538 | if (complain) |
7539 | error_at (loc, "both arguments must be compatible"); |
7540 | return false; |
7541 | } |
7542 | (*params)[1] = val2; |
7543 | } |
7544 | |
7545 | return true; |
7546 | } |
7547 | |
7548 | /* Cast the result of the builtin back to the type of the first argument, |
7549 | preserving any qualifiers that it might have. */ |
7550 | static tree |
7551 | speculation_safe_value_resolve_return (tree first_param, tree result) |
7552 | { |
7553 | tree ptype = TREE_TYPE (first_param); |
7554 | tree rtype = TREE_TYPE (result); |
7555 | ptype = TYPE_MAIN_VARIANT (ptype); |
7556 | |
7557 | if (tree_int_cst_equal (TYPE_SIZE (ptype), TYPE_SIZE (rtype))) |
7558 | return convert (ptype, result); |
7559 | |
7560 | return result; |
7561 | } |
7562 | |
7563 | /* A helper function for resolve_overloaded_builtin in resolving the |
7564 | overloaded __sync_ builtins. Returns a positive power of 2 if the |
7565 | first operand of PARAMS is a pointer to a supported data type. |
7566 | Returns 0 if an error is encountered. Return -1 for _BitInt |
7567 | __atomic*fetch* with unsupported type which should be handled by |
7568 | a cas loop. |
7569 | FETCH is true when FUNCTION is one of the _FETCH_OP_ or _OP_FETCH_ |
7570 | built-ins. ORIG_FORMAT is for __sync_* rather than __atomic_* |
7571 | built-ins. */ |
7572 | |
7573 | static int |
7574 | sync_resolve_size (tree function, vec<tree, va_gc> *params, bool fetch, |
7575 | bool orig_format, bool complain) |
7576 | { |
7577 | /* Type of the argument. */ |
7578 | tree argtype; |
7579 | /* Type the argument points to. */ |
7580 | tree type; |
7581 | int size; |
7582 | |
7583 | if (vec_safe_is_empty (v: params)) |
7584 | { |
7585 | if (complain) |
7586 | error ("too few arguments to function %qE", function); |
7587 | return 0; |
7588 | } |
7589 | |
7590 | argtype = type = TREE_TYPE ((*params)[0]); |
7591 | if (TREE_CODE (type) == ARRAY_TYPE && c_dialect_cxx ()) |
7592 | { |
7593 | /* Force array-to-pointer decay for C++. */ |
7594 | (*params)[0] = default_conversion ((*params)[0]); |
7595 | type = TREE_TYPE ((*params)[0]); |
7596 | } |
7597 | if (TREE_CODE (type) != POINTER_TYPE) |
7598 | goto incompatible; |
7599 | |
7600 | type = TREE_TYPE (type); |
7601 | if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) |
7602 | goto incompatible; |
7603 | |
7604 | if (!COMPLETE_TYPE_P (type)) |
7605 | goto incompatible; |
7606 | |
7607 | if (fetch && TREE_CODE (type) == BOOLEAN_TYPE) |
7608 | goto incompatible; |
7609 | |
7610 | size = tree_to_uhwi (TYPE_SIZE_UNIT (type)); |
7611 | if (size == 16 |
7612 | && TREE_CODE (type) == BITINT_TYPE |
7613 | && !targetm.scalar_mode_supported_p (TImode)) |
7614 | { |
7615 | if (fetch && !orig_format) |
7616 | return -1; |
7617 | goto incompatible; |
7618 | } |
7619 | |
7620 | if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16) |
7621 | return size; |
7622 | |
7623 | if (fetch && !orig_format && TREE_CODE (type) == BITINT_TYPE) |
7624 | return -1; |
7625 | |
7626 | incompatible: |
7627 | /* Issue the diagnostic only if the argument is valid, otherwise |
7628 | it would be redundant at best and could be misleading. */ |
7629 | if (argtype != error_mark_node && complain) |
7630 | error ("operand type %qT is incompatible with argument %d of %qE", |
7631 | argtype, 1, function); |
7632 | return 0; |
7633 | } |
7634 | |
7635 | /* A helper function for resolve_overloaded_builtin. Adds casts to |
7636 | PARAMS to make arguments match up with those of FUNCTION. Drops |
7637 | the variadic arguments at the end. Returns false if some error |
7638 | was encountered; true on success. */ |
7639 | |
7640 | static bool |
7641 | sync_resolve_params (location_t loc, tree orig_function, tree function, |
7642 | vec<tree, va_gc> *params, bool orig_format, bool complain) |
7643 | { |
7644 | function_args_iterator iter; |
7645 | tree ptype; |
7646 | unsigned int parmnum; |
7647 | |
7648 | function_args_iter_init (i: &iter, TREE_TYPE (function)); |
7649 | /* We've declared the implementation functions to use "volatile void *" |
7650 | as the pointer parameter, so we shouldn't get any complaints from the |
7651 | call to check_function_arguments what ever type the user used. */ |
7652 | function_args_iter_next (i: &iter); |
7653 | ptype = TREE_TYPE (TREE_TYPE ((*params)[0])); |
7654 | ptype = TYPE_MAIN_VARIANT (ptype); |
7655 | |
7656 | /* For the rest of the values, we need to cast these to FTYPE, so that we |
7657 | don't get warnings for passing pointer types, etc. */ |
7658 | parmnum = 0; |
7659 | while (1) |
7660 | { |
7661 | tree val, arg_type; |
7662 | |
7663 | arg_type = function_args_iter_cond (i: &iter); |
7664 | /* XXX void_type_node belies the abstraction. */ |
7665 | if (arg_type == void_type_node) |
7666 | break; |
7667 | |
7668 | ++parmnum; |
7669 | if (params->length () <= parmnum) |
7670 | { |
7671 | if (complain) |
7672 | error_at (loc, "too few arguments to function %qE", orig_function); |
7673 | return false; |
7674 | } |
7675 | |
7676 | /* Only convert parameters if arg_type is unsigned integer type with |
7677 | new format sync routines, i.e. don't attempt to convert pointer |
7678 | arguments (e.g. EXPECTED argument of __atomic_compare_exchange_n), |
7679 | bool arguments (e.g. WEAK argument) or signed int arguments (memmodel |
7680 | kinds). */ |
7681 | if (TREE_CODE (arg_type) == INTEGER_TYPE && TYPE_UNSIGNED (arg_type)) |
7682 | { |
7683 | /* Ideally for the first conversion we'd use convert_for_assignment |
7684 | so that we get warnings for anything that doesn't match the pointer |
7685 | type. This isn't portable across the C and C++ front ends atm. */ |
7686 | val = (*params)[parmnum]; |
7687 | val = convert (ptype, val); |
7688 | val = convert (arg_type, val); |
7689 | (*params)[parmnum] = val; |
7690 | } |
7691 | |
7692 | function_args_iter_next (i: &iter); |
7693 | } |
7694 | |
7695 | /* __atomic routines are not variadic. */ |
7696 | if (!orig_format && params->length () != parmnum + 1) |
7697 | { |
7698 | if (complain) |
7699 | error_at (loc, "too many arguments to function %qE", orig_function); |
7700 | return false; |
7701 | } |
7702 | |
7703 | /* The definition of these primitives is variadic, with the remaining |
7704 | being "an optional list of variables protected by the memory barrier". |
7705 | No clue what that's supposed to mean, precisely, but we consider all |
7706 | call-clobbered variables to be protected so we're safe. */ |
7707 | params->truncate (size: parmnum + 1); |
7708 | |
7709 | return true; |
7710 | } |
7711 | |
7712 | /* A helper function for resolve_overloaded_builtin. Adds a cast to |
7713 | RESULT to make it match the type of the first pointer argument in |
7714 | PARAMS. */ |
7715 | |
7716 | static tree |
7717 | sync_resolve_return (tree first_param, tree result, bool orig_format) |
7718 | { |
7719 | tree ptype = TREE_TYPE (TREE_TYPE (first_param)); |
7720 | tree rtype = TREE_TYPE (result); |
7721 | ptype = TYPE_MAIN_VARIANT (ptype); |
7722 | |
7723 | /* New format doesn't require casting unless the types are the same size. */ |
7724 | if (orig_format || tree_int_cst_equal (TYPE_SIZE (ptype), TYPE_SIZE (rtype))) |
7725 | return convert (ptype, result); |
7726 | else |
7727 | return result; |
7728 | } |
7729 | |
7730 | /* This function verifies the PARAMS to generic atomic FUNCTION. |
7731 | It returns the size if all the parameters are the same size, otherwise |
7732 | 0 is returned if the parameters are invalid. */ |
7733 | |
7734 | static int |
7735 | get_atomic_generic_size (location_t loc, tree function, |
7736 | vec<tree, va_gc> *params, bool complain) |
7737 | { |
7738 | unsigned int n_param; |
7739 | unsigned int n_model; |
7740 | unsigned int outputs = 0; // bitset of output parameters |
7741 | unsigned int x; |
7742 | int size_0; |
7743 | tree type_0; |
7744 | |
7745 | /* Determine the parameter makeup. */ |
7746 | switch (DECL_FUNCTION_CODE (decl: function)) |
7747 | { |
7748 | case BUILT_IN_ATOMIC_EXCHANGE: |
7749 | n_param = 4; |
7750 | n_model = 1; |
7751 | outputs = 5; |
7752 | break; |
7753 | case BUILT_IN_ATOMIC_LOAD: |
7754 | n_param = 3; |
7755 | n_model = 1; |
7756 | outputs = 2; |
7757 | break; |
7758 | case BUILT_IN_ATOMIC_STORE: |
7759 | n_param = 3; |
7760 | n_model = 1; |
7761 | outputs = 1; |
7762 | break; |
7763 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE: |
7764 | n_param = 6; |
7765 | n_model = 2; |
7766 | outputs = 3; |
7767 | break; |
7768 | default: |
7769 | gcc_unreachable (); |
7770 | } |
7771 | |
7772 | if (vec_safe_length (v: params) != n_param) |
7773 | { |
7774 | if (complain) |
7775 | error_at (loc, "incorrect number of arguments to function %qE", |
7776 | function); |
7777 | return 0; |
7778 | } |
7779 | |
7780 | /* Get type of first parameter, and determine its size. */ |
7781 | type_0 = TREE_TYPE ((*params)[0]); |
7782 | if (TREE_CODE (type_0) == ARRAY_TYPE && c_dialect_cxx ()) |
7783 | { |
7784 | /* Force array-to-pointer decay for C++. */ |
7785 | (*params)[0] = default_conversion ((*params)[0]); |
7786 | type_0 = TREE_TYPE ((*params)[0]); |
7787 | } |
7788 | if (TREE_CODE (type_0) != POINTER_TYPE || VOID_TYPE_P (TREE_TYPE (type_0))) |
7789 | { |
7790 | if (complain) |
7791 | error_at (loc, "argument 1 of %qE must be a non-void pointer type", |
7792 | function); |
7793 | return 0; |
7794 | } |
7795 | |
7796 | if (!COMPLETE_TYPE_P (TREE_TYPE (type_0))) |
7797 | { |
7798 | if (complain) |
7799 | error_at (loc, "argument 1 of %qE must be a pointer to a complete type", |
7800 | function); |
7801 | return 0; |
7802 | } |
7803 | |
7804 | /* Types must be compile time constant sizes. */ |
7805 | if (!tree_fits_uhwi_p ((TYPE_SIZE_UNIT (TREE_TYPE (type_0))))) |
7806 | { |
7807 | if (complain) |
7808 | error_at (loc, |
7809 | "argument 1 of %qE must be a pointer to a constant size type", |
7810 | function); |
7811 | return 0; |
7812 | } |
7813 | |
7814 | size_0 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type_0))); |
7815 | |
7816 | /* Zero size objects are not allowed. */ |
7817 | if (size_0 == 0) |
7818 | { |
7819 | if (complain) |
7820 | error_at ( |
7821 | loc, "argument 1 of %qE must be a pointer to a nonzero size object", |
7822 | function); |
7823 | return 0; |
7824 | } |
7825 | |
7826 | /* Check each other parameter is a pointer and the same size. */ |
7827 | for (x = 0; x < n_param - n_model; x++) |
7828 | { |
7829 | int size; |
7830 | tree type = TREE_TYPE ((*params)[x]); |
7831 | /* __atomic_compare_exchange has a bool in the 4th position, skip it. */ |
7832 | if (n_param == 6 && x == 3) |
7833 | continue; |
7834 | if (TREE_CODE (type) == ARRAY_TYPE && c_dialect_cxx ()) |
7835 | { |
7836 | /* Force array-to-pointer decay for C++. */ |
7837 | (*params)[x] = default_conversion ((*params)[x]); |
7838 | type = TREE_TYPE ((*params)[x]); |
7839 | } |
7840 | if (!POINTER_TYPE_P (type)) |
7841 | { |
7842 | if (complain) |
7843 | error_at (loc, "argument %d of %qE must be a pointer type", x + 1, |
7844 | function); |
7845 | return 0; |
7846 | } |
7847 | else if (TYPE_SIZE_UNIT (TREE_TYPE (type)) |
7848 | && TREE_CODE ((TYPE_SIZE_UNIT (TREE_TYPE (type)))) |
7849 | != INTEGER_CST) |
7850 | { |
7851 | if (complain) |
7852 | error_at (loc, |
7853 | "argument %d of %qE must be a pointer to a constant " |
7854 | "size type", |
7855 | x + 1, function); |
7856 | return 0; |
7857 | } |
7858 | else if (FUNCTION_POINTER_TYPE_P (type)) |
7859 | { |
7860 | if (complain) |
7861 | error_at (loc, |
7862 | "argument %d of %qE must not be a pointer to a " |
7863 | "function", |
7864 | x + 1, function); |
7865 | return 0; |
7866 | } |
7867 | tree type_size = TYPE_SIZE_UNIT (TREE_TYPE (type)); |
7868 | size = type_size ? tree_to_uhwi (type_size) : 0; |
7869 | if (size != size_0) |
7870 | { |
7871 | if (complain) |
7872 | error_at (loc, "size mismatch in argument %d of %qE", x + 1, |
7873 | function); |
7874 | return 0; |
7875 | } |
7876 | |
7877 | { |
7878 | auto_diagnostic_group d; |
7879 | int quals = TYPE_QUALS (TREE_TYPE (type)); |
7880 | /* Must not write to an argument of a const-qualified type. */ |
7881 | if (outputs & (1 << x) && quals & TYPE_QUAL_CONST) |
7882 | { |
7883 | if (c_dialect_cxx ()) |
7884 | { |
7885 | if (complain) |
7886 | error_at (loc, |
7887 | "argument %d of %qE must not be a pointer to " |
7888 | "a %<const%> type", |
7889 | x + 1, function); |
7890 | return 0; |
7891 | } |
7892 | else |
7893 | pedwarn (loc, OPT_Wdiscarded_qualifiers, "argument %d " |
7894 | "of %qE discards %<const%> qualifier", x + 1, |
7895 | function); |
7896 | } |
7897 | /* Only the first argument is allowed to be volatile. */ |
7898 | if (x > 0 && quals & TYPE_QUAL_VOLATILE) |
7899 | { |
7900 | if (c_dialect_cxx ()) |
7901 | { |
7902 | if (complain) |
7903 | error_at (loc, |
7904 | "argument %d of %qE must not be a pointer to " |
7905 | "a %<volatile%> type", |
7906 | x + 1, function); |
7907 | return 0; |
7908 | } |
7909 | else |
7910 | pedwarn (loc, OPT_Wdiscarded_qualifiers, "argument %d " |
7911 | "of %qE discards %<volatile%> qualifier", x + 1, |
7912 | function); |
7913 | } |
7914 | } |
7915 | } |
7916 | |
7917 | /* Check memory model parameters for validity. */ |
7918 | for (x = n_param - n_model ; x < n_param; x++) |
7919 | { |
7920 | tree p = (*params)[x]; |
7921 | if (!INTEGRAL_TYPE_P (TREE_TYPE (p))) |
7922 | { |
7923 | if (complain) |
7924 | error_at (loc, "non-integer memory model argument %d of %qE", x + 1, |
7925 | function); |
7926 | return 0; |
7927 | } |
7928 | p = fold_for_warn (p); |
7929 | if (TREE_CODE (p) == INTEGER_CST) |
7930 | { |
7931 | /* memmodel_base masks the low 16 bits, thus ignore any bits above |
7932 | it by using TREE_INT_CST_LOW instead of tree_to_*hwi. Those high |
7933 | bits will be checked later during expansion in target specific |
7934 | way. */ |
7935 | if (memmodel_base (TREE_INT_CST_LOW (p)) >= MEMMODEL_LAST) |
7936 | { |
7937 | if (complain) |
7938 | warning_at (loc, OPT_Winvalid_memory_model, |
7939 | "invalid memory model argument %d of %qE", x + 1, |
7940 | function); |
7941 | else |
7942 | return 0; |
7943 | } |
7944 | } |
7945 | } |
7946 | |
7947 | return size_0; |
7948 | } |
7949 | |
7950 | |
7951 | /* This will take an __atomic_ generic FUNCTION call, and add a size parameter N |
7952 | at the beginning of the parameter list PARAMS representing the size of the |
7953 | objects. This is to match the library ABI requirement. LOC is the location |
7954 | of the function call. |
7955 | The new function is returned if it needed rebuilding, otherwise NULL_TREE is |
7956 | returned to allow the external call to be constructed. */ |
7957 | |
7958 | static tree |
7959 | add_atomic_size_parameter (unsigned n, location_t loc, tree function, |
7960 | vec<tree, va_gc> *params) |
7961 | { |
7962 | tree size_node; |
7963 | |
7964 | /* Insert a SIZE_T parameter as the first param. If there isn't |
7965 | enough space, allocate a new vector and recursively re-build with that. */ |
7966 | if (!params->space (nelems: 1)) |
7967 | { |
7968 | unsigned int z, len; |
7969 | vec<tree, va_gc> *v; |
7970 | tree f; |
7971 | |
7972 | len = params->length (); |
7973 | vec_alloc (v, nelems: len + 1); |
7974 | v->quick_push (obj: build_int_cst (size_type_node, n)); |
7975 | for (z = 0; z < len; z++) |
7976 | v->quick_push (obj: (*params)[z]); |
7977 | f = build_function_call_vec (loc, vNULL, function, v, NULL); |
7978 | vec_free (v); |
7979 | return f; |
7980 | } |
7981 | |
7982 | /* Add the size parameter and leave as a function call for processing. */ |
7983 | size_node = build_int_cst (size_type_node, n); |
7984 | params->quick_insert (ix: 0, obj: size_node); |
7985 | return NULL_TREE; |
7986 | } |
7987 | |
7988 | |
7989 | /* Return whether atomic operations for naturally aligned N-byte |
7990 | arguments are supported, whether inline or through libatomic. */ |
7991 | static bool |
7992 | atomic_size_supported_p (int n) |
7993 | { |
7994 | switch (n) |
7995 | { |
7996 | case 1: |
7997 | case 2: |
7998 | case 4: |
7999 | case 8: |
8000 | return true; |
8001 | |
8002 | case 16: |
8003 | return targetm.scalar_mode_supported_p (TImode); |
8004 | |
8005 | default: |
8006 | return false; |
8007 | } |
8008 | } |
8009 | |
8010 | /* This will process an __atomic_exchange function call, determine whether it |
8011 | needs to be mapped to the _N variation, or turned into a library call. |
8012 | LOC is the location of the builtin call. |
8013 | FUNCTION is the DECL that has been invoked; |
8014 | PARAMS is the argument list for the call. The return value is non-null |
8015 | TRUE is returned if it is translated into the proper format for a call to the |
8016 | external library, and NEW_RETURN is set the tree for that function. |
8017 | FALSE is returned if processing for the _N variation is required, and |
8018 | NEW_RETURN is set to the return value the result is copied into. */ |
8019 | static bool |
8020 | resolve_overloaded_atomic_exchange (location_t loc, tree function, |
8021 | vec<tree, va_gc> *params, tree *new_return, |
8022 | bool complain) |
8023 | { |
8024 | tree p0, p1, p2, p3; |
8025 | tree I_type, I_type_ptr; |
8026 | int n = get_atomic_generic_size (loc, function, params, complain); |
8027 | |
8028 | /* Size of 0 is an error condition. */ |
8029 | if (n == 0) |
8030 | { |
8031 | *new_return = error_mark_node; |
8032 | return true; |
8033 | } |
8034 | |
8035 | /* If not a lock-free size, change to the library generic format. */ |
8036 | if (!atomic_size_supported_p (n)) |
8037 | { |
8038 | *new_return = add_atomic_size_parameter (n, loc, function, params); |
8039 | return true; |
8040 | } |
8041 | |
8042 | /* Otherwise there is a lockfree match, transform the call from: |
8043 | void fn(T* mem, T* desired, T* return, model) |
8044 | into |
8045 | *return = (T) (fn (In* mem, (In) *desired, model)) */ |
8046 | |
8047 | p0 = (*params)[0]; |
8048 | p1 = (*params)[1]; |
8049 | p2 = (*params)[2]; |
8050 | p3 = (*params)[3]; |
8051 | |
8052 | /* Create pointer to appropriate size. */ |
8053 | I_type = builtin_type_for_size (BITS_PER_UNIT * n, unsignedp: 1); |
8054 | I_type_ptr = build_pointer_type (I_type); |
8055 | |
8056 | /* Convert object pointer to required type. */ |
8057 | p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0); |
8058 | (*params)[0] = p0; |
8059 | /* Convert new value to required type, and dereference it. |
8060 | If *p1 type can have padding or may involve floating point which |
8061 | could e.g. be promoted to wider precision and demoted afterwards, |
8062 | state of padding bits might not be preserved. */ |
8063 | build_indirect_ref (loc, p1, RO_UNARY_STAR); |
8064 | p1 = build2_loc (loc, code: MEM_REF, type: I_type, |
8065 | arg0: build1 (VIEW_CONVERT_EXPR, I_type_ptr, p1), |
8066 | arg1: build_zero_cst (TREE_TYPE (p1))); |
8067 | (*params)[1] = p1; |
8068 | |
8069 | /* Move memory model to the 3rd position, and end param list. */ |
8070 | (*params)[2] = p3; |
8071 | params->truncate (size: 3); |
8072 | |
8073 | /* Convert return pointer and dereference it for later assignment. */ |
8074 | *new_return = build_indirect_ref (loc, p2, RO_UNARY_STAR); |
8075 | |
8076 | return false; |
8077 | } |
8078 | |
8079 | /* This will process an __atomic_compare_exchange function call, determine |
8080 | whether it needs to be mapped to the _N variation, or turned into a lib call. |
8081 | LOC is the location of the builtin call. |
8082 | FUNCTION is the DECL that has been invoked; |
8083 | PARAMS is the argument list for the call. The return value is non-null |
8084 | TRUE is returned if it is translated into the proper format for a call to the |
8085 | external library, and NEW_RETURN is set the tree for that function. |
8086 | FALSE is returned if processing for the _N variation is required. */ |
8087 | |
8088 | static bool |
8089 | resolve_overloaded_atomic_compare_exchange (location_t loc, tree function, |
8090 | vec<tree, va_gc> *params, |
8091 | tree *new_return, bool complain) |
8092 | { |
8093 | tree p0, p1, p2; |
8094 | tree I_type, I_type_ptr; |
8095 | int n = get_atomic_generic_size (loc, function, params, complain); |
8096 | |
8097 | /* Size of 0 is an error condition. */ |
8098 | if (n == 0) |
8099 | { |
8100 | *new_return = error_mark_node; |
8101 | return true; |
8102 | } |
8103 | |
8104 | /* If not a lock-free size, change to the library generic format. */ |
8105 | if (!atomic_size_supported_p (n)) |
8106 | { |
8107 | /* The library generic format does not have the weak parameter, so |
8108 | remove it from the param list. Since a parameter has been removed, |
8109 | we can be sure that there is room for the SIZE_T parameter, meaning |
8110 | there will not be a recursive rebuilding of the parameter list, so |
8111 | there is no danger this will be done twice. */ |
8112 | if (n > 0) |
8113 | { |
8114 | (*params)[3] = (*params)[4]; |
8115 | (*params)[4] = (*params)[5]; |
8116 | params->truncate (size: 5); |
8117 | } |
8118 | *new_return = add_atomic_size_parameter (n, loc, function, params); |
8119 | return true; |
8120 | } |
8121 | |
8122 | /* Otherwise, there is a match, so the call needs to be transformed from: |
8123 | bool fn(T* mem, T* desired, T* return, weak, success, failure) |
8124 | into |
8125 | bool fn ((In *)mem, (In *)expected, (In) *desired, weak, succ, fail) */ |
8126 | |
8127 | p0 = (*params)[0]; |
8128 | p1 = (*params)[1]; |
8129 | p2 = (*params)[2]; |
8130 | |
8131 | /* Create pointer to appropriate size. */ |
8132 | I_type = builtin_type_for_size (BITS_PER_UNIT * n, unsignedp: 1); |
8133 | I_type_ptr = build_pointer_type (I_type); |
8134 | |
8135 | /* Convert object pointer to required type. */ |
8136 | p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0); |
8137 | (*params)[0] = p0; |
8138 | |
8139 | /* Convert expected pointer to required type. */ |
8140 | p1 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p1); |
8141 | (*params)[1] = p1; |
8142 | |
8143 | /* Convert desired value to required type, and dereference it. |
8144 | If *p2 type can have padding or may involve floating point which |
8145 | could e.g. be promoted to wider precision and demoted afterwards, |
8146 | state of padding bits might not be preserved. */ |
8147 | build_indirect_ref (loc, p2, RO_UNARY_STAR); |
8148 | p2 = build2_loc (loc, code: MEM_REF, type: I_type, |
8149 | arg0: build1 (VIEW_CONVERT_EXPR, I_type_ptr, p2), |
8150 | arg1: build_zero_cst (TREE_TYPE (p2))); |
8151 | (*params)[2] = p2; |
8152 | |
8153 | /* The rest of the parameters are fine. NULL means no special return value |
8154 | processing.*/ |
8155 | *new_return = NULL; |
8156 | return false; |
8157 | } |
8158 | |
8159 | /* This will process an __atomic_load function call, determine whether it |
8160 | needs to be mapped to the _N variation, or turned into a library call. |
8161 | LOC is the location of the builtin call. |
8162 | FUNCTION is the DECL that has been invoked; |
8163 | PARAMS is the argument list for the call. The return value is non-null |
8164 | TRUE is returned if it is translated into the proper format for a call to the |
8165 | external library, and NEW_RETURN is set the tree for that function. |
8166 | FALSE is returned if processing for the _N variation is required, and |
8167 | NEW_RETURN is set to the return value the result is copied into. */ |
8168 | |
8169 | static bool |
8170 | resolve_overloaded_atomic_load (location_t loc, tree function, |
8171 | vec<tree, va_gc> *params, tree *new_return, |
8172 | bool complain) |
8173 | { |
8174 | tree p0, p1, p2; |
8175 | tree I_type, I_type_ptr; |
8176 | int n = get_atomic_generic_size (loc, function, params, complain); |
8177 | |
8178 | /* Size of 0 is an error condition. */ |
8179 | if (n == 0) |
8180 | { |
8181 | *new_return = error_mark_node; |
8182 | return true; |
8183 | } |
8184 | |
8185 | /* If not a lock-free size, change to the library generic format. */ |
8186 | if (!atomic_size_supported_p (n)) |
8187 | { |
8188 | *new_return = add_atomic_size_parameter (n, loc, function, params); |
8189 | return true; |
8190 | } |
8191 | |
8192 | /* Otherwise, there is a match, so the call needs to be transformed from: |
8193 | void fn(T* mem, T* return, model) |
8194 | into |
8195 | *return = (T) (fn ((In *) mem, model)) */ |
8196 | |
8197 | p0 = (*params)[0]; |
8198 | p1 = (*params)[1]; |
8199 | p2 = (*params)[2]; |
8200 | |
8201 | /* Create pointer to appropriate size. */ |
8202 | I_type = builtin_type_for_size (BITS_PER_UNIT * n, unsignedp: 1); |
8203 | I_type_ptr = build_pointer_type (I_type); |
8204 | |
8205 | /* Convert object pointer to required type. */ |
8206 | p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0); |
8207 | (*params)[0] = p0; |
8208 | |
8209 | /* Move memory model to the 2nd position, and end param list. */ |
8210 | (*params)[1] = p2; |
8211 | params->truncate (size: 2); |
8212 | |
8213 | /* Convert return pointer and dereference it for later assignment. */ |
8214 | *new_return = build_indirect_ref (loc, p1, RO_UNARY_STAR); |
8215 | |
8216 | return false; |
8217 | } |
8218 | |
8219 | /* This will process an __atomic_store function call, determine whether it |
8220 | needs to be mapped to the _N variation, or turned into a library call. |
8221 | LOC is the location of the builtin call. |
8222 | FUNCTION is the DECL that has been invoked; |
8223 | PARAMS is the argument list for the call. The return value is non-null |
8224 | TRUE is returned if it is translated into the proper format for a call to the |
8225 | external library, and NEW_RETURN is set the tree for that function. |
8226 | FALSE is returned if processing for the _N variation is required, and |
8227 | NEW_RETURN is set to the return value the result is copied into. */ |
8228 | |
8229 | static bool |
8230 | resolve_overloaded_atomic_store (location_t loc, tree function, |
8231 | vec<tree, va_gc> *params, tree *new_return, |
8232 | bool complain) |
8233 | { |
8234 | tree p0, p1; |
8235 | tree I_type, I_type_ptr; |
8236 | int n = get_atomic_generic_size (loc, function, params, complain); |
8237 | |
8238 | /* Size of 0 is an error condition. */ |
8239 | if (n == 0) |
8240 | { |
8241 | *new_return = error_mark_node; |
8242 | return true; |
8243 | } |
8244 | |
8245 | /* If not a lock-free size, change to the library generic format. */ |
8246 | if (!atomic_size_supported_p (n)) |
8247 | { |
8248 | *new_return = add_atomic_size_parameter (n, loc, function, params); |
8249 | return true; |
8250 | } |
8251 | |
8252 | /* Otherwise, there is a match, so the call needs to be transformed from: |
8253 | void fn(T* mem, T* value, model) |
8254 | into |
8255 | fn ((In *) mem, (In) *value, model) */ |
8256 | |
8257 | p0 = (*params)[0]; |
8258 | p1 = (*params)[1]; |
8259 | |
8260 | /* Create pointer to appropriate size. */ |
8261 | I_type = builtin_type_for_size (BITS_PER_UNIT * n, unsignedp: 1); |
8262 | I_type_ptr = build_pointer_type (I_type); |
8263 | |
8264 | /* Convert object pointer to required type. */ |
8265 | p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0); |
8266 | (*params)[0] = p0; |
8267 | |
8268 | /* Convert new value to required type, and dereference it. */ |
8269 | p1 = build_indirect_ref (loc, p1, RO_UNARY_STAR); |
8270 | p1 = build1 (VIEW_CONVERT_EXPR, I_type, p1); |
8271 | (*params)[1] = p1; |
8272 | |
8273 | /* The memory model is in the right spot already. Return is void. */ |
8274 | *new_return = NULL_TREE; |
8275 | |
8276 | return false; |
8277 | } |
8278 | |
8279 | /* Emit __atomic*fetch* on _BitInt which doesn't have a size of |
8280 | 1, 2, 4, 8 or 16 bytes using __atomic_compare_exchange loop. |
8281 | ORIG_CODE is the DECL_FUNCTION_CODE of ORIG_FUNCTION and |
8282 | ORIG_PARAMS arguments of the call. */ |
8283 | |
8284 | static tree |
8285 | atomic_bitint_fetch_using_cas_loop (location_t loc, |
8286 | enum built_in_function orig_code, |
8287 | tree orig_function, |
8288 | vec<tree, va_gc> *orig_params) |
8289 | { |
8290 | enum tree_code code = ERROR_MARK; |
8291 | bool return_old_p = false; |
8292 | switch (orig_code) |
8293 | { |
8294 | case BUILT_IN_ATOMIC_ADD_FETCH_N: |
8295 | code = PLUS_EXPR; |
8296 | break; |
8297 | case BUILT_IN_ATOMIC_SUB_FETCH_N: |
8298 | code = MINUS_EXPR; |
8299 | break; |
8300 | case BUILT_IN_ATOMIC_AND_FETCH_N: |
8301 | code = BIT_AND_EXPR; |
8302 | break; |
8303 | case BUILT_IN_ATOMIC_NAND_FETCH_N: |
8304 | break; |
8305 | case BUILT_IN_ATOMIC_XOR_FETCH_N: |
8306 | code = BIT_XOR_EXPR; |
8307 | break; |
8308 | case BUILT_IN_ATOMIC_OR_FETCH_N: |
8309 | code = BIT_IOR_EXPR; |
8310 | break; |
8311 | case BUILT_IN_ATOMIC_FETCH_ADD_N: |
8312 | code = PLUS_EXPR; |
8313 | return_old_p = true; |
8314 | break; |
8315 | case BUILT_IN_ATOMIC_FETCH_SUB_N: |
8316 | code = MINUS_EXPR; |
8317 | return_old_p = true; |
8318 | break; |
8319 | case BUILT_IN_ATOMIC_FETCH_AND_N: |
8320 | code = BIT_AND_EXPR; |
8321 | return_old_p = true; |
8322 | break; |
8323 | case BUILT_IN_ATOMIC_FETCH_NAND_N: |
8324 | return_old_p = true; |
8325 | break; |
8326 | case BUILT_IN_ATOMIC_FETCH_XOR_N: |
8327 | code = BIT_XOR_EXPR; |
8328 | return_old_p = true; |
8329 | break; |
8330 | case BUILT_IN_ATOMIC_FETCH_OR_N: |
8331 | code = BIT_IOR_EXPR; |
8332 | return_old_p = true; |
8333 | break; |
8334 | default: |
8335 | gcc_unreachable (); |
8336 | } |
8337 | |
8338 | if (orig_params->length () != 3) |
8339 | { |
8340 | if (orig_params->length () < 3) |
8341 | error_at (loc, "too few arguments to function %qE", orig_function); |
8342 | else |
8343 | error_at (loc, "too many arguments to function %qE", orig_function); |
8344 | return error_mark_node; |
8345 | } |
8346 | |
8347 | tree stmts = push_stmt_list (); |
8348 | |
8349 | tree nonatomic_lhs_type = TREE_TYPE (TREE_TYPE ((*orig_params)[0])); |
8350 | nonatomic_lhs_type = TYPE_MAIN_VARIANT (nonatomic_lhs_type); |
8351 | gcc_assert (TREE_CODE (nonatomic_lhs_type) == BITINT_TYPE); |
8352 | |
8353 | tree lhs_addr = (*orig_params)[0]; |
8354 | tree val = convert (nonatomic_lhs_type, (*orig_params)[1]); |
8355 | tree model = convert (integer_type_node, (*orig_params)[2]); |
8356 | if (!c_dialect_cxx ()) |
8357 | { |
8358 | lhs_addr = c_fully_fold (lhs_addr, false, NULL); |
8359 | val = c_fully_fold (val, false, NULL); |
8360 | model = c_fully_fold (model, false, NULL); |
8361 | } |
8362 | if (TREE_SIDE_EFFECTS (lhs_addr)) |
8363 | { |
8364 | tree var = create_tmp_var_raw (TREE_TYPE (lhs_addr)); |
8365 | lhs_addr = build4 (TARGET_EXPR, TREE_TYPE (lhs_addr), var, lhs_addr, |
8366 | NULL_TREE, NULL_TREE); |
8367 | add_stmt (lhs_addr); |
8368 | } |
8369 | if (TREE_SIDE_EFFECTS (val)) |
8370 | { |
8371 | tree var = create_tmp_var_raw (nonatomic_lhs_type); |
8372 | val = build4 (TARGET_EXPR, nonatomic_lhs_type, var, val, NULL_TREE, |
8373 | NULL_TREE); |
8374 | add_stmt (val); |
8375 | } |
8376 | if (TREE_SIDE_EFFECTS (model)) |
8377 | { |
8378 | tree var = create_tmp_var_raw (integer_type_node); |
8379 | model = build4 (TARGET_EXPR, integer_type_node, var, model, NULL_TREE, |
8380 | NULL_TREE); |
8381 | add_stmt (model); |
8382 | } |
8383 | |
8384 | tree old = create_tmp_var_raw (nonatomic_lhs_type); |
8385 | tree old_addr = build_unary_op (loc, ADDR_EXPR, old, false); |
8386 | TREE_ADDRESSABLE (old) = 1; |
8387 | suppress_warning (old); |
8388 | |
8389 | tree newval = create_tmp_var_raw (nonatomic_lhs_type); |
8390 | tree newval_addr = build_unary_op (loc, ADDR_EXPR, newval, false); |
8391 | TREE_ADDRESSABLE (newval) = 1; |
8392 | suppress_warning (newval); |
8393 | |
8394 | tree loop_decl = create_artificial_label (loc); |
8395 | tree loop_label = build1 (LABEL_EXPR, void_type_node, loop_decl); |
8396 | |
8397 | tree done_decl = create_artificial_label (loc); |
8398 | tree done_label = build1 (LABEL_EXPR, void_type_node, done_decl); |
8399 | |
8400 | vec<tree, va_gc> *params; |
8401 | vec_alloc (v&: params, nelems: 6); |
8402 | |
8403 | /* __atomic_load (addr, &old, SEQ_CST). */ |
8404 | tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_LOAD); |
8405 | params->quick_push (obj: lhs_addr); |
8406 | params->quick_push (obj: old_addr); |
8407 | params->quick_push (obj: build_int_cst (integer_type_node, MEMMODEL_RELAXED)); |
8408 | tree func_call = resolve_overloaded_builtin (loc, fndecl, params); |
8409 | if (func_call == NULL_TREE) |
8410 | func_call = build_function_call_vec (loc, vNULL, fndecl, params, NULL); |
8411 | old = build4 (TARGET_EXPR, nonatomic_lhs_type, old, func_call, NULL_TREE, |
8412 | NULL_TREE); |
8413 | add_stmt (old); |
8414 | params->truncate (size: 0); |
8415 | |
8416 | /* loop: */ |
8417 | add_stmt (loop_label); |
8418 | |
8419 | /* newval = old + val; */ |
8420 | tree rhs; |
8421 | switch (code) |
8422 | { |
8423 | case PLUS_EXPR: |
8424 | case MINUS_EXPR: |
8425 | if (!TYPE_OVERFLOW_WRAPS (nonatomic_lhs_type)) |
8426 | { |
8427 | tree utype |
8428 | = build_bitint_type (TYPE_PRECISION (nonatomic_lhs_type), 1); |
8429 | rhs = convert (nonatomic_lhs_type, |
8430 | build2_loc (loc, code, type: utype, |
8431 | arg0: convert (utype, old), |
8432 | arg1: convert (utype, val))); |
8433 | } |
8434 | else |
8435 | rhs = build2_loc (loc, code, type: nonatomic_lhs_type, arg0: old, arg1: val); |
8436 | break; |
8437 | case BIT_AND_EXPR: |
8438 | case BIT_IOR_EXPR: |
8439 | case BIT_XOR_EXPR: |
8440 | rhs = build2_loc (loc, code, type: nonatomic_lhs_type, arg0: old, arg1: val); |
8441 | break; |
8442 | case ERROR_MARK: |
8443 | rhs = build2_loc (loc, code: BIT_AND_EXPR, type: nonatomic_lhs_type, |
8444 | arg0: build1_loc (loc, code: BIT_NOT_EXPR, |
8445 | type: nonatomic_lhs_type, arg1: old), arg1: val); |
8446 | break; |
8447 | default: |
8448 | gcc_unreachable (); |
8449 | } |
8450 | rhs = build4 (TARGET_EXPR, nonatomic_lhs_type, newval, rhs, NULL_TREE, |
8451 | NULL_TREE); |
8452 | SET_EXPR_LOCATION (rhs, loc); |
8453 | add_stmt (rhs); |
8454 | |
8455 | /* if (__atomic_compare_exchange (addr, &old, &new, false, model, model)) |
8456 | goto done; */ |
8457 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_COMPARE_EXCHANGE); |
8458 | params->quick_push (obj: lhs_addr); |
8459 | params->quick_push (obj: old_addr); |
8460 | params->quick_push (obj: newval_addr); |
8461 | params->quick_push (integer_zero_node); |
8462 | params->quick_push (obj: model); |
8463 | if (tree_fits_uhwi_p (model) |
8464 | && (tree_to_uhwi (model) == MEMMODEL_RELEASE |
8465 | || tree_to_uhwi (model) == MEMMODEL_ACQ_REL)) |
8466 | params->quick_push (obj: build_int_cst (integer_type_node, MEMMODEL_RELAXED)); |
8467 | else |
8468 | params->quick_push (obj: model); |
8469 | func_call = resolve_overloaded_builtin (loc, fndecl, params); |
8470 | if (func_call == NULL_TREE) |
8471 | func_call = build_function_call_vec (loc, vNULL, fndecl, params, NULL); |
8472 | |
8473 | tree goto_stmt = build1 (GOTO_EXPR, void_type_node, done_decl); |
8474 | SET_EXPR_LOCATION (goto_stmt, loc); |
8475 | |
8476 | tree stmt |
8477 | = build3 (COND_EXPR, void_type_node, func_call, goto_stmt, NULL_TREE); |
8478 | SET_EXPR_LOCATION (stmt, loc); |
8479 | add_stmt (stmt); |
8480 | |
8481 | /* goto loop; */ |
8482 | goto_stmt = build1 (GOTO_EXPR, void_type_node, loop_decl); |
8483 | SET_EXPR_LOCATION (goto_stmt, loc); |
8484 | add_stmt (goto_stmt); |
8485 | |
8486 | /* done: */ |
8487 | add_stmt (done_label); |
8488 | |
8489 | tree ret = create_tmp_var_raw (nonatomic_lhs_type); |
8490 | stmt = build2_loc (loc, code: MODIFY_EXPR, void_type_node, arg0: ret, |
8491 | arg1: return_old_p ? old : newval); |
8492 | add_stmt (stmt); |
8493 | |
8494 | /* Finish the compound statement. */ |
8495 | stmts = pop_stmt_list (stmts); |
8496 | |
8497 | return build4 (TARGET_EXPR, nonatomic_lhs_type, ret, stmts, NULL_TREE, |
8498 | NULL_TREE); |
8499 | } |
8500 | |
8501 | |
8502 | /* Some builtin functions are placeholders for other expressions. This |
8503 | function should be called immediately after parsing the call expression |
8504 | before surrounding code has committed to the type of the expression. |
8505 | |
8506 | LOC is the location of the builtin call. |
8507 | |
8508 | FUNCTION is the DECL that has been invoked; it is known to be a builtin. |
8509 | PARAMS is the argument list for the call. The return value is non-null |
8510 | when expansion is complete, and null if normal processing should |
8511 | continue. */ |
8512 | |
8513 | tree |
8514 | resolve_overloaded_builtin (location_t loc, tree function, |
8515 | vec<tree, va_gc> *params, bool complain) |
8516 | { |
8517 | /* Is function one of the _FETCH_OP_ or _OP_FETCH_ built-ins? |
8518 | Those are not valid to call with a pointer to _Bool (or C++ bool) |
8519 | and so must be rejected. */ |
8520 | bool fetch_op = true; |
8521 | bool orig_format = true; |
8522 | tree new_return = NULL_TREE; |
8523 | |
8524 | switch (DECL_BUILT_IN_CLASS (function)) |
8525 | { |
8526 | case BUILT_IN_NORMAL: |
8527 | break; |
8528 | case BUILT_IN_MD: |
8529 | if (targetm.resolve_overloaded_builtin) |
8530 | return targetm.resolve_overloaded_builtin (loc, function, params, |
8531 | complain); |
8532 | else |
8533 | return NULL_TREE; |
8534 | default: |
8535 | return NULL_TREE; |
8536 | } |
8537 | |
8538 | /* Handle BUILT_IN_NORMAL here. */ |
8539 | enum built_in_function orig_code = DECL_FUNCTION_CODE (decl: function); |
8540 | switch (orig_code) |
8541 | { |
8542 | case BUILT_IN_SPECULATION_SAFE_VALUE_N: |
8543 | { |
8544 | tree new_function, first_param, result; |
8545 | enum built_in_function fncode |
8546 | = speculation_safe_value_resolve_call (function, params, complain); |
8547 | |
8548 | if (fncode == BUILT_IN_NONE) |
8549 | return error_mark_node; |
8550 | |
8551 | first_param = (*params)[0]; |
8552 | if (!speculation_safe_value_resolve_params (loc, orig_function: function, params, |
8553 | complain)) |
8554 | return error_mark_node; |
8555 | |
8556 | if (targetm.have_speculation_safe_value (true)) |
8557 | { |
8558 | new_function = builtin_decl_explicit (fncode); |
8559 | result = build_function_call_vec (loc, vNULL, new_function, params, |
8560 | NULL); |
8561 | |
8562 | if (result == error_mark_node) |
8563 | return result; |
8564 | |
8565 | return speculation_safe_value_resolve_return (first_param, result); |
8566 | } |
8567 | else |
8568 | { |
8569 | /* This target doesn't have, or doesn't need, active mitigation |
8570 | against incorrect speculative execution. Simply return the |
8571 | first parameter to the builtin. */ |
8572 | if (!targetm.have_speculation_safe_value (false)) |
8573 | { |
8574 | if (complain) |
8575 | /* The user has invoked __builtin_speculation_safe_value |
8576 | even though __HAVE_SPECULATION_SAFE_VALUE is not |
8577 | defined: emit a warning. */ |
8578 | warning_at ( |
8579 | input_location, 0, |
8580 | "this target does not define a speculation barrier; " |
8581 | "your program will still execute correctly, " |
8582 | "but incorrect speculation may not be " |
8583 | "restricted"); |
8584 | else |
8585 | return error_mark_node; |
8586 | } |
8587 | |
8588 | /* If the optional second argument is present, handle any side |
8589 | effects now. */ |
8590 | if (params->length () == 2 |
8591 | && TREE_SIDE_EFFECTS ((*params)[1])) |
8592 | return build2 (COMPOUND_EXPR, TREE_TYPE (first_param), |
8593 | (*params)[1], first_param); |
8594 | |
8595 | return first_param; |
8596 | } |
8597 | } |
8598 | |
8599 | case BUILT_IN_ATOMIC_EXCHANGE: |
8600 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE: |
8601 | case BUILT_IN_ATOMIC_LOAD: |
8602 | case BUILT_IN_ATOMIC_STORE: |
8603 | { |
8604 | /* Handle these 4 together so that they can fall through to the next |
8605 | case if the call is transformed to an _N variant. */ |
8606 | switch (orig_code) |
8607 | { |
8608 | case BUILT_IN_ATOMIC_EXCHANGE: |
8609 | { |
8610 | if (resolve_overloaded_atomic_exchange (loc, function, params, |
8611 | new_return: &new_return, complain)) |
8612 | return new_return; |
8613 | /* Change to the _N variant. */ |
8614 | orig_code = BUILT_IN_ATOMIC_EXCHANGE_N; |
8615 | break; |
8616 | } |
8617 | |
8618 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE: |
8619 | { |
8620 | if (resolve_overloaded_atomic_compare_exchange ( |
8621 | loc, function, params, new_return: &new_return, complain)) |
8622 | return new_return; |
8623 | /* Change to the _N variant. */ |
8624 | orig_code = BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N; |
8625 | break; |
8626 | } |
8627 | case BUILT_IN_ATOMIC_LOAD: |
8628 | { |
8629 | if (resolve_overloaded_atomic_load (loc, function, params, |
8630 | new_return: &new_return, complain)) |
8631 | return new_return; |
8632 | /* Change to the _N variant. */ |
8633 | orig_code = BUILT_IN_ATOMIC_LOAD_N; |
8634 | break; |
8635 | } |
8636 | case BUILT_IN_ATOMIC_STORE: |
8637 | { |
8638 | if (resolve_overloaded_atomic_store (loc, function, params, |
8639 | new_return: &new_return, complain)) |
8640 | return new_return; |
8641 | /* Change to the _N variant. */ |
8642 | orig_code = BUILT_IN_ATOMIC_STORE_N; |
8643 | break; |
8644 | } |
8645 | default: |
8646 | gcc_unreachable (); |
8647 | } |
8648 | } |
8649 | /* FALLTHRU */ |
8650 | case BUILT_IN_ATOMIC_EXCHANGE_N: |
8651 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N: |
8652 | case BUILT_IN_ATOMIC_LOAD_N: |
8653 | case BUILT_IN_ATOMIC_STORE_N: |
8654 | fetch_op = false; |
8655 | /* FALLTHRU */ |
8656 | case BUILT_IN_ATOMIC_ADD_FETCH_N: |
8657 | case BUILT_IN_ATOMIC_SUB_FETCH_N: |
8658 | case BUILT_IN_ATOMIC_AND_FETCH_N: |
8659 | case BUILT_IN_ATOMIC_NAND_FETCH_N: |
8660 | case BUILT_IN_ATOMIC_XOR_FETCH_N: |
8661 | case BUILT_IN_ATOMIC_OR_FETCH_N: |
8662 | case BUILT_IN_ATOMIC_FETCH_ADD_N: |
8663 | case BUILT_IN_ATOMIC_FETCH_SUB_N: |
8664 | case BUILT_IN_ATOMIC_FETCH_AND_N: |
8665 | case BUILT_IN_ATOMIC_FETCH_NAND_N: |
8666 | case BUILT_IN_ATOMIC_FETCH_XOR_N: |
8667 | case BUILT_IN_ATOMIC_FETCH_OR_N: |
8668 | orig_format = false; |
8669 | /* FALLTHRU */ |
8670 | case BUILT_IN_SYNC_FETCH_AND_ADD_N: |
8671 | case BUILT_IN_SYNC_FETCH_AND_SUB_N: |
8672 | case BUILT_IN_SYNC_FETCH_AND_OR_N: |
8673 | case BUILT_IN_SYNC_FETCH_AND_AND_N: |
8674 | case BUILT_IN_SYNC_FETCH_AND_XOR_N: |
8675 | case BUILT_IN_SYNC_FETCH_AND_NAND_N: |
8676 | case BUILT_IN_SYNC_ADD_AND_FETCH_N: |
8677 | case BUILT_IN_SYNC_SUB_AND_FETCH_N: |
8678 | case BUILT_IN_SYNC_OR_AND_FETCH_N: |
8679 | case BUILT_IN_SYNC_AND_AND_FETCH_N: |
8680 | case BUILT_IN_SYNC_XOR_AND_FETCH_N: |
8681 | case BUILT_IN_SYNC_NAND_AND_FETCH_N: |
8682 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N: |
8683 | case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N: |
8684 | case BUILT_IN_SYNC_LOCK_TEST_AND_SET_N: |
8685 | case BUILT_IN_SYNC_LOCK_RELEASE_N: |
8686 | { |
8687 | /* The following are not _FETCH_OPs and must be accepted with |
8688 | pointers to _Bool (or C++ bool). */ |
8689 | if (fetch_op) |
8690 | fetch_op = (orig_code != BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N |
8691 | && orig_code != BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N |
8692 | && orig_code != BUILT_IN_SYNC_LOCK_TEST_AND_SET_N |
8693 | && orig_code != BUILT_IN_SYNC_LOCK_RELEASE_N); |
8694 | |
8695 | int n = sync_resolve_size (function, params, fetch: fetch_op, orig_format, |
8696 | complain); |
8697 | tree new_function, first_param, result; |
8698 | enum built_in_function fncode; |
8699 | |
8700 | if (n == 0) |
8701 | return error_mark_node; |
8702 | |
8703 | if (n == -1) |
8704 | { |
8705 | /* complain is related to SFINAE context. |
8706 | _BitInt is not defined in C++, hence can't enter this clause |
8707 | with complain unset. Even if at the abstraction level |
8708 | this complain is unset that still makes sense (whether |
8709 | this function should report an error or not if anything is |
8710 | wrong). |
8711 | Since can't test avoiding an error when this value is false not |
8712 | writing the code and instead asserting value is not set. */ |
8713 | gcc_assert (complain); |
8714 | return atomic_bitint_fetch_using_cas_loop (loc, orig_code, orig_function: function, |
8715 | orig_params: params); |
8716 | } |
8717 | |
8718 | fncode = (enum built_in_function)((int)orig_code + exact_log2 (x: n) + 1); |
8719 | new_function = builtin_decl_explicit (fncode); |
8720 | if (!sync_resolve_params (loc, orig_function: function, function: new_function, params, |
8721 | orig_format, complain)) |
8722 | return error_mark_node; |
8723 | |
8724 | first_param = (*params)[0]; |
8725 | result = build_function_call_vec (loc, vNULL, new_function, params, |
8726 | NULL); |
8727 | if (result == error_mark_node) |
8728 | return result; |
8729 | if (orig_code != BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N |
8730 | && orig_code != BUILT_IN_SYNC_LOCK_RELEASE_N |
8731 | && orig_code != BUILT_IN_ATOMIC_STORE_N |
8732 | && orig_code != BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N) |
8733 | result = sync_resolve_return (first_param, result, orig_format); |
8734 | |
8735 | if (fetch_op) |
8736 | /* Prevent -Wunused-value warning. */ |
8737 | TREE_USED (result) = true; |
8738 | |
8739 | /* If new_return is set, assign function to that expr and cast the |
8740 | result to void since the generic interface returned void. */ |
8741 | if (new_return) |
8742 | { |
8743 | /* Cast function result from I{1,2,4,8,16} to the required type. */ |
8744 | if (TREE_CODE (TREE_TYPE (new_return)) == BITINT_TYPE) |
8745 | { |
8746 | struct bitint_info info; |
8747 | unsigned prec = TYPE_PRECISION (TREE_TYPE (new_return)); |
8748 | targetm.c.bitint_type_info (prec, &info); |
8749 | if (!info.extended) |
8750 | /* For _BitInt which has the padding bits undefined |
8751 | convert to the _BitInt type rather than VCE to force |
8752 | zero or sign extension. */ |
8753 | result = build1 (NOP_EXPR, TREE_TYPE (new_return), result); |
8754 | } |
8755 | result |
8756 | = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (new_return), result); |
8757 | result = build2 (MODIFY_EXPR, TREE_TYPE (new_return), new_return, |
8758 | result); |
8759 | TREE_SIDE_EFFECTS (result) = 1; |
8760 | protected_set_expr_location (result, loc); |
8761 | result = convert (void_type_node, result); |
8762 | } |
8763 | return result; |
8764 | } |
8765 | |
8766 | default: |
8767 | return NULL_TREE; |
8768 | } |
8769 | } |
8770 | |
8771 | /* vector_types_compatible_elements_p is used in type checks of vectors |
8772 | values used as operands of binary operators. Where it returns true, and |
8773 | the other checks of the caller succeed (being vector types in he first |
8774 | place, and matching number of elements), we can just treat the types |
8775 | as essentially the same. |
8776 | Contrast with vector_targets_convertible_p, which is used for vector |
8777 | pointer types, and vector_types_convertible_p, which will allow |
8778 | language-specific matches under the control of flag_lax_vector_conversions, |
8779 | and might still require a conversion. */ |
8780 | /* True if vector types T1 and T2 can be inputs to the same binary |
8781 | operator without conversion. |
8782 | We don't check the overall vector size here because some of our callers |
8783 | want to give different error messages when the vectors are compatible |
8784 | except for the element count. */ |
8785 | |
8786 | bool |
8787 | vector_types_compatible_elements_p (tree t1, tree t2) |
8788 | { |
8789 | bool opaque = TYPE_VECTOR_OPAQUE (t1) || TYPE_VECTOR_OPAQUE (t2); |
8790 | t1 = TREE_TYPE (t1); |
8791 | t2 = TREE_TYPE (t2); |
8792 | |
8793 | enum tree_code c1 = TREE_CODE (t1), c2 = TREE_CODE (t2); |
8794 | |
8795 | gcc_assert ((INTEGRAL_TYPE_P (t1) |
8796 | || c1 == REAL_TYPE |
8797 | || c1 == FIXED_POINT_TYPE) |
8798 | && (INTEGRAL_TYPE_P (t2) |
8799 | || c2 == REAL_TYPE |
8800 | || c2 == FIXED_POINT_TYPE)); |
8801 | |
8802 | t1 = c_common_signed_type (type: t1); |
8803 | t2 = c_common_signed_type (type: t2); |
8804 | /* Equality works here because c_common_signed_type uses |
8805 | TYPE_MAIN_VARIANT. */ |
8806 | if (t1 == t2) |
8807 | return true; |
8808 | if (opaque && c1 == c2 |
8809 | && (INTEGRAL_TYPE_P (t1) || c1 == REAL_TYPE) |
8810 | && TYPE_PRECISION (t1) == TYPE_PRECISION (t2)) |
8811 | return true; |
8812 | return false; |
8813 | } |
8814 | |
8815 | /* Check for missing format attributes on function pointers. LTYPE is |
8816 | the new type or left-hand side type. RTYPE is the old type or |
8817 | right-hand side type. Returns TRUE if LTYPE is missing the desired |
8818 | attribute. */ |
8819 | |
8820 | bool |
8821 | check_missing_format_attribute (tree ltype, tree rtype) |
8822 | { |
8823 | tree const ttr = TREE_TYPE (rtype), ttl = TREE_TYPE (ltype); |
8824 | tree ra; |
8825 | |
8826 | for (ra = TYPE_ATTRIBUTES (ttr); ra; ra = TREE_CHAIN (ra)) |
8827 | if (is_attribute_p (attr_name: "format", ident: get_attribute_name (ra))) |
8828 | break; |
8829 | if (ra) |
8830 | { |
8831 | tree la; |
8832 | for (la = TYPE_ATTRIBUTES (ttl); la; la = TREE_CHAIN (la)) |
8833 | if (is_attribute_p (attr_name: "format", ident: get_attribute_name (la))) |
8834 | break; |
8835 | return !la; |
8836 | } |
8837 | else |
8838 | return false; |
8839 | } |
8840 | |
8841 | /* Setup a TYPE_DECL node as a typedef representation. |
8842 | |
8843 | X is a TYPE_DECL for a typedef statement. Create a brand new |
8844 | ..._TYPE node (which will be just a variant of the existing |
8845 | ..._TYPE node with identical properties) and then install X |
8846 | as the TYPE_NAME of this brand new (duplicate) ..._TYPE node. |
8847 | |
8848 | The whole point here is to end up with a situation where each |
8849 | and every ..._TYPE node the compiler creates will be uniquely |
8850 | associated with AT MOST one node representing a typedef name. |
8851 | This way, even though the compiler substitutes corresponding |
8852 | ..._TYPE nodes for TYPE_DECL (i.e. "typedef name") nodes very |
8853 | early on, later parts of the compiler can always do the reverse |
8854 | translation and get back the corresponding typedef name. For |
8855 | example, given: |
8856 | |
8857 | typedef struct S MY_TYPE; |
8858 | MY_TYPE object; |
8859 | |
8860 | Later parts of the compiler might only know that `object' was of |
8861 | type `struct S' if it were not for code just below. With this |
8862 | code however, later parts of the compiler see something like: |
8863 | |
8864 | struct S' == struct S |
8865 | typedef struct S' MY_TYPE; |
8866 | struct S' object; |
8867 | |
8868 | And they can then deduce (from the node for type struct S') that |
8869 | the original object declaration was: |
8870 | |
8871 | MY_TYPE object; |
8872 | |
8873 | Being able to do this is important for proper support of protoize, |
8874 | and also for generating precise symbolic debugging information |
8875 | which takes full account of the programmer's (typedef) vocabulary. |
8876 | |
8877 | Obviously, we don't want to generate a duplicate ..._TYPE node if |
8878 | the TYPE_DECL node that we are now processing really represents a |
8879 | standard built-in type. */ |
8880 | |
8881 | void |
8882 | set_underlying_type (tree x) |
8883 | { |
8884 | if (x == error_mark_node || TREE_TYPE (x) == error_mark_node) |
8885 | return; |
8886 | if (DECL_IS_UNDECLARED_BUILTIN (x) && TREE_CODE (TREE_TYPE (x)) != ARRAY_TYPE) |
8887 | { |
8888 | if (TYPE_NAME (TREE_TYPE (x)) == 0) |
8889 | TYPE_NAME (TREE_TYPE (x)) = x; |
8890 | } |
8891 | else if (DECL_ORIGINAL_TYPE (x)) |
8892 | gcc_checking_assert (TYPE_NAME (TREE_TYPE (x)) == x); |
8893 | else |
8894 | { |
8895 | tree tt = TREE_TYPE (x); |
8896 | DECL_ORIGINAL_TYPE (x) = tt; |
8897 | tt = build_variant_type_copy (tt); |
8898 | TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (DECL_ORIGINAL_TYPE (x)); |
8899 | TYPE_NAME (tt) = x; |
8900 | |
8901 | /* Mark the type as used only when its type decl is decorated |
8902 | with attribute unused. */ |
8903 | if (lookup_attribute (attr_name: "unused", DECL_ATTRIBUTES (x))) |
8904 | TREE_USED (tt) = 1; |
8905 | |
8906 | TREE_TYPE (x) = tt; |
8907 | } |
8908 | } |
8909 | |
8910 | /* Return true if it is worth exposing the DECL_ORIGINAL_TYPE of TYPE to |
8911 | the user in diagnostics, false if it would be better to use TYPE itself. |
8912 | TYPE is known to satisfy typedef_variant_p. */ |
8913 | |
8914 | bool |
8915 | user_facing_original_type_p (const_tree type) |
8916 | { |
8917 | gcc_assert (typedef_variant_p (type)); |
8918 | tree decl = TYPE_NAME (type); |
8919 | |
8920 | /* Look through any typedef in "user" code. */ |
8921 | if (!DECL_IN_SYSTEM_HEADER (decl) && !DECL_IS_UNDECLARED_BUILTIN (decl)) |
8922 | return true; |
8923 | |
8924 | /* If the original type is also named and is in the user namespace, |
8925 | assume it too is a user-facing type. */ |
8926 | tree orig_type = DECL_ORIGINAL_TYPE (decl); |
8927 | if (tree orig_id = TYPE_IDENTIFIER (orig_type)) |
8928 | if (!name_reserved_for_implementation_p (IDENTIFIER_POINTER (orig_id))) |
8929 | return true; |
8930 | |
8931 | switch (TREE_CODE (orig_type)) |
8932 | { |
8933 | /* Don't look through to an anonymous vector type, since the syntax |
8934 | we use for them in diagnostics isn't real C or C++ syntax. |
8935 | And if ORIG_TYPE is named but in the implementation namespace, |
8936 | TYPE is likely to be more meaningful to the user. */ |
8937 | case VECTOR_TYPE: |
8938 | return false; |
8939 | |
8940 | /* Don't expose anonymous tag types that are presumably meant to be |
8941 | known by their typedef name. Also don't expose tags that are in |
8942 | the implementation namespace, such as: |
8943 | |
8944 | typedef struct __foo foo; */ |
8945 | case RECORD_TYPE: |
8946 | case UNION_TYPE: |
8947 | case ENUMERAL_TYPE: |
8948 | return false; |
8949 | |
8950 | /* Look through to anything else. */ |
8951 | default: |
8952 | return true; |
8953 | } |
8954 | } |
8955 | |
8956 | /* Record the types used by the current global variable declaration |
8957 | being parsed, so that we can decide later to emit their debug info. |
8958 | Those types are in types_used_by_cur_var_decl, and we are going to |
8959 | store them in the types_used_by_vars_hash hash table. |
8960 | DECL is the declaration of the global variable that has been parsed. */ |
8961 | |
8962 | void |
8963 | record_types_used_by_current_var_decl (tree decl) |
8964 | { |
8965 | gcc_assert (decl && DECL_P (decl) && TREE_STATIC (decl)); |
8966 | |
8967 | while (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ()) |
8968 | { |
8969 | tree type = types_used_by_cur_var_decl->pop (); |
8970 | types_used_by_var_decl_insert (type, var_decl: decl); |
8971 | } |
8972 | } |
8973 | |
8974 | /* The C and C++ parsers both use vectors to hold function arguments. |
8975 | For efficiency, we keep a cache of unused vectors. This is the |
8976 | cache. */ |
8977 | |
8978 | typedef vec<tree, va_gc> *tree_gc_vec; |
8979 | static GTY((deletable)) vec<tree_gc_vec, va_gc> *tree_vector_cache; |
8980 | |
8981 | /* Return a new vector from the cache. If the cache is empty, |
8982 | allocate a new vector. These vectors are GC'ed, so it is OK if the |
8983 | pointer is not released.. */ |
8984 | |
8985 | vec<tree, va_gc> * |
8986 | make_tree_vector (void) |
8987 | { |
8988 | if (tree_vector_cache && !tree_vector_cache->is_empty ()) |
8989 | return tree_vector_cache->pop (); |
8990 | else |
8991 | { |
8992 | /* Passing 0 to vec::alloc returns NULL, and our callers require |
8993 | that we always return a non-NULL value. The vector code uses |
8994 | 4 when growing a NULL vector, so we do too. */ |
8995 | vec<tree, va_gc> *v; |
8996 | vec_alloc (v, nelems: 4); |
8997 | return v; |
8998 | } |
8999 | } |
9000 | |
9001 | /* Release a vector of trees back to the cache. */ |
9002 | |
9003 | void |
9004 | release_tree_vector (vec<tree, va_gc> *vec) |
9005 | { |
9006 | if (vec != NULL) |
9007 | { |
9008 | if (vec->allocated () >= 16) |
9009 | /* Don't cache vecs that have expanded more than once. On a p64 |
9010 | target, vecs double in alloc size with each power of 2 elements, e.g |
9011 | at 16 elements the alloc increases from 128 to 256 bytes. */ |
9012 | vec_free (v&: vec); |
9013 | else |
9014 | { |
9015 | vec->truncate (size: 0); |
9016 | vec_safe_push (v&: tree_vector_cache, obj: vec); |
9017 | } |
9018 | } |
9019 | } |
9020 | |
9021 | /* Get a new tree vector holding a single tree. */ |
9022 | |
9023 | vec<tree, va_gc> * |
9024 | make_tree_vector_single (tree t) |
9025 | { |
9026 | vec<tree, va_gc> *ret = make_tree_vector (); |
9027 | ret->quick_push (obj: t); |
9028 | return ret; |
9029 | } |
9030 | |
9031 | /* Get a new tree vector of the TREE_VALUEs of a TREE_LIST chain. */ |
9032 | |
9033 | vec<tree, va_gc> * |
9034 | make_tree_vector_from_list (tree list) |
9035 | { |
9036 | vec<tree, va_gc> *ret = make_tree_vector (); |
9037 | for (; list; list = TREE_CHAIN (list)) |
9038 | vec_safe_push (v&: ret, TREE_VALUE (list)); |
9039 | return ret; |
9040 | } |
9041 | |
9042 | /* Append to a tree vector V the values of a CONSTRUCTOR CTOR |
9043 | and return the new possibly reallocated vector. */ |
9044 | |
9045 | vec<tree, va_gc> * |
9046 | append_ctor_to_tree_vector (vec<tree, va_gc> *v, tree ctor) |
9047 | { |
9048 | unsigned nelts = vec_safe_length (v) + CONSTRUCTOR_NELTS (ctor); |
9049 | vec_safe_reserve (v, CONSTRUCTOR_NELTS (ctor)); |
9050 | for (unsigned i = 0; i < CONSTRUCTOR_NELTS (ctor); ++i) |
9051 | if (TREE_CODE (CONSTRUCTOR_ELT (ctor, i)->value) == RAW_DATA_CST) |
9052 | { |
9053 | tree raw_data = CONSTRUCTOR_ELT (ctor, i)->value; |
9054 | nelts += RAW_DATA_LENGTH (raw_data) - 1; |
9055 | vec_safe_reserve (v, nelems: nelts - v->length ()); |
9056 | if (TYPE_PRECISION (TREE_TYPE (raw_data)) > CHAR_BIT |
9057 | || TYPE_UNSIGNED (TREE_TYPE (raw_data))) |
9058 | for (unsigned j = 0; j < (unsigned) RAW_DATA_LENGTH (raw_data); ++j) |
9059 | v->quick_push (obj: build_int_cst (TREE_TYPE (raw_data), |
9060 | RAW_DATA_UCHAR_ELT (raw_data, j))); |
9061 | else |
9062 | for (unsigned j = 0; j < (unsigned) RAW_DATA_LENGTH (raw_data); ++j) |
9063 | v->quick_push (obj: build_int_cst (TREE_TYPE (raw_data), |
9064 | RAW_DATA_SCHAR_ELT (raw_data, j))); |
9065 | } |
9066 | else |
9067 | v->quick_push (CONSTRUCTOR_ELT (ctor, i)->value); |
9068 | return v; |
9069 | } |
9070 | |
9071 | /* Get a new tree vector of the values of a CONSTRUCTOR. */ |
9072 | |
9073 | vec<tree, va_gc> * |
9074 | make_tree_vector_from_ctor (tree ctor) |
9075 | { |
9076 | vec<tree,va_gc> *ret |
9077 | = CONSTRUCTOR_NELTS (ctor) <= 16 ? make_tree_vector () : NULL; |
9078 | return append_ctor_to_tree_vector (v: ret, ctor); |
9079 | } |
9080 | |
9081 | /* Get a new tree vector which is a copy of an existing one. */ |
9082 | |
9083 | vec<tree, va_gc> * |
9084 | make_tree_vector_copy (const vec<tree, va_gc> *orig) |
9085 | { |
9086 | vec<tree, va_gc> *ret; |
9087 | unsigned int ix; |
9088 | tree t; |
9089 | |
9090 | ret = make_tree_vector (); |
9091 | vec_safe_reserve (v&: ret, nelems: vec_safe_length (v: orig)); |
9092 | FOR_EACH_VEC_SAFE_ELT (orig, ix, t) |
9093 | ret->quick_push (obj: t); |
9094 | return ret; |
9095 | } |
9096 | |
9097 | /* Return true if KEYWORD starts a type specifier. */ |
9098 | |
9099 | bool |
9100 | keyword_begins_type_specifier (enum rid keyword) |
9101 | { |
9102 | switch (keyword) |
9103 | { |
9104 | case RID_AUTO_TYPE: |
9105 | case RID_INT: |
9106 | case RID_CHAR: |
9107 | case RID_FLOAT: |
9108 | case RID_DOUBLE: |
9109 | case RID_VOID: |
9110 | case RID_UNSIGNED: |
9111 | case RID_LONG: |
9112 | case RID_SHORT: |
9113 | case RID_SIGNED: |
9114 | CASE_RID_FLOATN_NX: |
9115 | case RID_DFLOAT32: |
9116 | case RID_DFLOAT64: |
9117 | case RID_DFLOAT128: |
9118 | case RID_FRACT: |
9119 | case RID_ACCUM: |
9120 | case RID_BOOL: |
9121 | case RID_BITINT: |
9122 | case RID_WCHAR: |
9123 | case RID_CHAR8: |
9124 | case RID_CHAR16: |
9125 | case RID_CHAR32: |
9126 | case RID_SAT: |
9127 | case RID_COMPLEX: |
9128 | case RID_TYPEOF: |
9129 | case RID_STRUCT: |
9130 | case RID_CLASS: |
9131 | case RID_UNION: |
9132 | case RID_ENUM: |
9133 | return true; |
9134 | default: |
9135 | if (keyword >= RID_FIRST_INT_N |
9136 | && keyword < RID_FIRST_INT_N + NUM_INT_N_ENTS |
9137 | && int_n_enabled_p[keyword-RID_FIRST_INT_N]) |
9138 | return true; |
9139 | return false; |
9140 | } |
9141 | } |
9142 | |
9143 | /* Return true if KEYWORD names a type qualifier. */ |
9144 | |
9145 | bool |
9146 | keyword_is_type_qualifier (enum rid keyword) |
9147 | { |
9148 | switch (keyword) |
9149 | { |
9150 | case RID_CONST: |
9151 | case RID_VOLATILE: |
9152 | case RID_RESTRICT: |
9153 | case RID_ATOMIC: |
9154 | return true; |
9155 | default: |
9156 | return false; |
9157 | } |
9158 | } |
9159 | |
9160 | /* Return true if KEYWORD names a storage class specifier. |
9161 | |
9162 | RID_TYPEDEF is not included in this list despite `typedef' being |
9163 | listed in C99 6.7.1.1. 6.7.1.3 indicates that `typedef' is listed as |
9164 | such for syntactic convenience only. */ |
9165 | |
9166 | bool |
9167 | keyword_is_storage_class_specifier (enum rid keyword) |
9168 | { |
9169 | switch (keyword) |
9170 | { |
9171 | case RID_STATIC: |
9172 | case RID_EXTERN: |
9173 | case RID_REGISTER: |
9174 | case RID_AUTO: |
9175 | case RID_MUTABLE: |
9176 | case RID_THREAD: |
9177 | return true; |
9178 | default: |
9179 | return false; |
9180 | } |
9181 | } |
9182 | |
9183 | /* Return true if KEYWORD names a function-specifier [dcl.fct.spec]. */ |
9184 | |
9185 | static bool |
9186 | keyword_is_function_specifier (enum rid keyword) |
9187 | { |
9188 | switch (keyword) |
9189 | { |
9190 | case RID_INLINE: |
9191 | case RID_NORETURN: |
9192 | case RID_VIRTUAL: |
9193 | case RID_EXPLICIT: |
9194 | return true; |
9195 | default: |
9196 | return false; |
9197 | } |
9198 | } |
9199 | |
9200 | /* Return true if KEYWORD names a decl-specifier [dcl.spec] or a |
9201 | declaration-specifier (C99 6.7). */ |
9202 | |
9203 | bool |
9204 | keyword_is_decl_specifier (enum rid keyword) |
9205 | { |
9206 | if (keyword_is_storage_class_specifier (keyword) |
9207 | || keyword_is_type_qualifier (keyword) |
9208 | || keyword_is_function_specifier (keyword)) |
9209 | return true; |
9210 | |
9211 | switch (keyword) |
9212 | { |
9213 | case RID_TYPEDEF: |
9214 | case RID_FRIEND: |
9215 | case RID_CONSTEXPR: |
9216 | case RID_CONSTINIT: |
9217 | return true; |
9218 | default: |
9219 | return false; |
9220 | } |
9221 | } |
9222 | |
9223 | /* Initialize language-specific-bits of tree_contains_struct. */ |
9224 | |
9225 | void |
9226 | c_common_init_ts (void) |
9227 | { |
9228 | MARK_TS_EXP (SIZEOF_EXPR); |
9229 | MARK_TS_EXP (PAREN_SIZEOF_EXPR); |
9230 | MARK_TS_EXP (C_MAYBE_CONST_EXPR); |
9231 | MARK_TS_EXP (EXCESS_PRECISION_EXPR); |
9232 | MARK_TS_EXP (BREAK_STMT); |
9233 | MARK_TS_EXP (CONTINUE_STMT); |
9234 | MARK_TS_EXP (DO_STMT); |
9235 | MARK_TS_EXP (FOR_STMT); |
9236 | MARK_TS_EXP (SWITCH_STMT); |
9237 | MARK_TS_EXP (WHILE_STMT); |
9238 | |
9239 | MARK_TS_DECL_COMMON (CONCEPT_DECL); |
9240 | } |
9241 | |
9242 | /* Build a user-defined numeric literal out of an integer constant type VALUE |
9243 | with identifier SUFFIX. */ |
9244 | |
9245 | tree |
9246 | build_userdef_literal (tree suffix_id, tree value, |
9247 | enum overflow_type overflow, tree num_string) |
9248 | { |
9249 | tree literal = make_node (USERDEF_LITERAL); |
9250 | USERDEF_LITERAL_SUFFIX_ID (literal) = suffix_id; |
9251 | USERDEF_LITERAL_VALUE (literal) = value; |
9252 | USERDEF_LITERAL_OVERFLOW (literal) = overflow; |
9253 | USERDEF_LITERAL_NUM_STRING (literal) = num_string; |
9254 | return literal; |
9255 | } |
9256 | |
9257 | /* For vector[index], convert the vector to an array of the underlying type. |
9258 | Return true if the resulting ARRAY_REF should not be an lvalue. */ |
9259 | |
9260 | bool |
9261 | convert_vector_to_array_for_subscript (location_t loc, |
9262 | tree *vecp, tree index) |
9263 | { |
9264 | bool ret = false; |
9265 | if (gnu_vector_type_p (TREE_TYPE (*vecp))) |
9266 | { |
9267 | tree type = TREE_TYPE (*vecp); |
9268 | tree newitype; |
9269 | |
9270 | ret = !lvalue_p (*vecp); |
9271 | |
9272 | index = fold_for_warn (index); |
9273 | /* Warn out-of-bounds index for vectors only if known. */ |
9274 | if (poly_int_tree_p (t: index)) |
9275 | if (!tree_fits_poly_uint64_p (index) |
9276 | || known_ge (tree_to_poly_uint64 (index), |
9277 | TYPE_VECTOR_SUBPARTS (type))) |
9278 | warning_at (loc, OPT_Warray_bounds_, "index value is out of bound"); |
9279 | |
9280 | /* We are building an ARRAY_REF so mark the vector as addressable |
9281 | to not run into the gimplifiers premature setting of DECL_GIMPLE_REG_P |
9282 | for function parameters. */ |
9283 | c_common_mark_addressable_vec (t: *vecp); |
9284 | |
9285 | /* Make sure qualifiers are copied from the vector type to the new element |
9286 | of the array type. */ |
9287 | newitype = build_qualified_type (TREE_TYPE (type), TYPE_QUALS (type)); |
9288 | |
9289 | *vecp = build1 (VIEW_CONVERT_EXPR, |
9290 | build_array_type_nelts (newitype, |
9291 | TYPE_VECTOR_SUBPARTS (node: type)), |
9292 | *vecp); |
9293 | } |
9294 | return ret; |
9295 | } |
9296 | |
9297 | /* Determine which of the operands, if any, is a scalar that needs to be |
9298 | converted to a vector, for the range of operations. */ |
9299 | enum stv_conv |
9300 | scalar_to_vector (location_t loc, enum tree_code code, tree op0, tree op1, |
9301 | bool complain) |
9302 | { |
9303 | tree type0 = TREE_TYPE (op0); |
9304 | tree type1 = TREE_TYPE (op1); |
9305 | bool integer_only_op = false; |
9306 | enum stv_conv ret = stv_firstarg; |
9307 | |
9308 | gcc_assert (gnu_vector_type_p (type0) || gnu_vector_type_p (type1)); |
9309 | switch (code) |
9310 | { |
9311 | /* Most GENERIC binary expressions require homogeneous arguments. |
9312 | LSHIFT_EXPR and RSHIFT_EXPR are exceptions and accept a first |
9313 | argument that is a vector and a second one that is a scalar, so |
9314 | we never return stv_secondarg for them. */ |
9315 | case RSHIFT_EXPR: |
9316 | case LSHIFT_EXPR: |
9317 | if (TREE_CODE (type0) == INTEGER_TYPE |
9318 | && TREE_CODE (TREE_TYPE (type1)) == INTEGER_TYPE) |
9319 | { |
9320 | if (unsafe_conversion_p (TREE_TYPE (type1), expr: op0, |
9321 | NULL_TREE, check_sign: false)) |
9322 | { |
9323 | if (complain) |
9324 | error_at (loc, "conversion of scalar %qT to vector %qT " |
9325 | "involves truncation", type0, type1); |
9326 | return stv_error; |
9327 | } |
9328 | else |
9329 | return stv_firstarg; |
9330 | } |
9331 | break; |
9332 | |
9333 | case BIT_IOR_EXPR: |
9334 | case BIT_XOR_EXPR: |
9335 | case BIT_AND_EXPR: |
9336 | integer_only_op = true; |
9337 | /* fall through */ |
9338 | |
9339 | case VEC_COND_EXPR: |
9340 | |
9341 | case PLUS_EXPR: |
9342 | case MINUS_EXPR: |
9343 | case MULT_EXPR: |
9344 | case TRUNC_DIV_EXPR: |
9345 | case CEIL_DIV_EXPR: |
9346 | case FLOOR_DIV_EXPR: |
9347 | case ROUND_DIV_EXPR: |
9348 | case EXACT_DIV_EXPR: |
9349 | case TRUNC_MOD_EXPR: |
9350 | case FLOOR_MOD_EXPR: |
9351 | case RDIV_EXPR: |
9352 | case EQ_EXPR: |
9353 | case NE_EXPR: |
9354 | case LE_EXPR: |
9355 | case GE_EXPR: |
9356 | case LT_EXPR: |
9357 | case GT_EXPR: |
9358 | /* What about UNLT_EXPR? */ |
9359 | if (gnu_vector_type_p (type: type0)) |
9360 | { |
9361 | ret = stv_secondarg; |
9362 | std::swap (a&: type0, b&: type1); |
9363 | std::swap (a&: op0, b&: op1); |
9364 | } |
9365 | |
9366 | if (TREE_CODE (type0) == INTEGER_TYPE |
9367 | && TREE_CODE (TREE_TYPE (type1)) == INTEGER_TYPE) |
9368 | { |
9369 | if (unsafe_conversion_p (TREE_TYPE (type1), expr: op0, |
9370 | NULL_TREE, check_sign: false)) |
9371 | { |
9372 | if (complain) |
9373 | error_at (loc, "conversion of scalar %qT to vector %qT " |
9374 | "involves truncation", type0, type1); |
9375 | return stv_error; |
9376 | } |
9377 | return ret; |
9378 | } |
9379 | else if (!integer_only_op |
9380 | /* Allow integer --> real conversion if safe. */ |
9381 | && (SCALAR_FLOAT_TYPE_P (type0) |
9382 | || TREE_CODE (type0) == INTEGER_TYPE) |
9383 | && SCALAR_FLOAT_TYPE_P (TREE_TYPE (type1))) |
9384 | { |
9385 | if (unsafe_conversion_p (TREE_TYPE (type1), expr: op0, |
9386 | NULL_TREE, check_sign: false)) |
9387 | { |
9388 | if (complain) |
9389 | error_at (loc, "conversion of scalar %qT to vector %qT " |
9390 | "involves truncation", type0, type1); |
9391 | return stv_error; |
9392 | } |
9393 | return ret; |
9394 | } |
9395 | default: |
9396 | break; |
9397 | } |
9398 | |
9399 | return stv_nothing; |
9400 | } |
9401 | |
9402 | /* Return the alignment of std::max_align_t. |
9403 | |
9404 | [support.types.layout] The type max_align_t is a POD type whose alignment |
9405 | requirement is at least as great as that of every scalar type, and whose |
9406 | alignment requirement is supported in every context. */ |
9407 | |
9408 | unsigned |
9409 | max_align_t_align () |
9410 | { |
9411 | unsigned int max_align = MAX (TYPE_ALIGN (long_long_integer_type_node), |
9412 | TYPE_ALIGN (long_double_type_node)); |
9413 | if (float128_type_node != NULL_TREE) |
9414 | max_align = MAX (max_align, TYPE_ALIGN (float128_type_node)); |
9415 | return max_align; |
9416 | } |
9417 | |
9418 | /* Return true iff ALIGN is an integral constant that is a fundamental |
9419 | alignment, as defined by [basic.align] in the c++-11 |
9420 | specifications. |
9421 | |
9422 | That is: |
9423 | |
9424 | [A fundamental alignment is represented by an alignment less than or |
9425 | equal to the greatest alignment supported by the implementation |
9426 | in all contexts, which is equal to alignof(max_align_t)]. */ |
9427 | |
9428 | bool |
9429 | cxx_fundamental_alignment_p (unsigned align) |
9430 | { |
9431 | return (align <= max_align_t_align ()); |
9432 | } |
9433 | |
9434 | /* Return true if T is a pointer to a zero-sized aggregate. */ |
9435 | |
9436 | bool |
9437 | pointer_to_zero_sized_aggr_p (tree t) |
9438 | { |
9439 | if (!POINTER_TYPE_P (t)) |
9440 | return false; |
9441 | t = TREE_TYPE (t); |
9442 | return (TYPE_SIZE (t) && integer_zerop (TYPE_SIZE (t))); |
9443 | } |
9444 | |
9445 | /* For an EXPR of a FUNCTION_TYPE that references a GCC built-in function |
9446 | with no library fallback or for an ADDR_EXPR whose operand is such type |
9447 | issues an error pointing to the location LOC. |
9448 | Returns true when the expression has been diagnosed and false |
9449 | otherwise. */ |
9450 | |
9451 | bool |
9452 | reject_gcc_builtin (const_tree expr, location_t loc /* = UNKNOWN_LOCATION */) |
9453 | { |
9454 | if (TREE_CODE (expr) == ADDR_EXPR) |
9455 | expr = TREE_OPERAND (expr, 0); |
9456 | |
9457 | STRIP_ANY_LOCATION_WRAPPER (expr); |
9458 | |
9459 | if (TREE_TYPE (expr) |
9460 | && TREE_CODE (TREE_TYPE (expr)) == FUNCTION_TYPE |
9461 | && TREE_CODE (expr) == FUNCTION_DECL |
9462 | /* The intersection of DECL_BUILT_IN and DECL_IS_UNDECLARED_BUILTIN avoids |
9463 | false positives for user-declared built-ins such as abs or |
9464 | strlen, and for C++ operators new and delete. |
9465 | The c_decl_implicit() test avoids false positives for implicitly |
9466 | declared built-ins with library fallbacks (such as abs). */ |
9467 | && fndecl_built_in_p (node: expr) |
9468 | && DECL_IS_UNDECLARED_BUILTIN (expr) |
9469 | && !c_decl_implicit (expr) |
9470 | && !DECL_ASSEMBLER_NAME_SET_P (expr)) |
9471 | { |
9472 | if (loc == UNKNOWN_LOCATION) |
9473 | loc = EXPR_LOC_OR_LOC (expr, input_location); |
9474 | |
9475 | /* Reject arguments that are built-in functions with |
9476 | no library fallback. */ |
9477 | error_at (loc, "built-in function %qE must be directly called", expr); |
9478 | |
9479 | return true; |
9480 | } |
9481 | |
9482 | return false; |
9483 | } |
9484 | |
9485 | /* Issue an ERROR for an invalid SIZE of array NAME which is null |
9486 | for unnamed arrays. */ |
9487 | |
9488 | void |
9489 | invalid_array_size_error (location_t loc, cst_size_error error, |
9490 | const_tree size, const_tree name) |
9491 | { |
9492 | tree maxsize = max_object_size (); |
9493 | switch (error) |
9494 | { |
9495 | case cst_size_not_constant: |
9496 | if (name) |
9497 | error_at (loc, "size of array %qE is not a constant expression", |
9498 | name); |
9499 | else |
9500 | error_at (loc, "size of array is not a constant expression"); |
9501 | break; |
9502 | case cst_size_negative: |
9503 | if (name) |
9504 | error_at (loc, "size %qE of array %qE is negative", |
9505 | size, name); |
9506 | else |
9507 | error_at (loc, "size %qE of array is negative", |
9508 | size); |
9509 | break; |
9510 | case cst_size_too_big: |
9511 | if (name) |
9512 | error_at (loc, "size %qE of array %qE exceeds maximum " |
9513 | "object size %qE", size, name, maxsize); |
9514 | else |
9515 | error_at (loc, "size %qE of array exceeds maximum " |
9516 | "object size %qE", size, maxsize); |
9517 | break; |
9518 | case cst_size_overflow: |
9519 | if (name) |
9520 | error_at (loc, "size of array %qE exceeds maximum " |
9521 | "object size %qE", name, maxsize); |
9522 | else |
9523 | error_at (loc, "size of array exceeds maximum " |
9524 | "object size %qE", maxsize); |
9525 | break; |
9526 | default: |
9527 | gcc_unreachable (); |
9528 | } |
9529 | } |
9530 | |
9531 | /* Check if array size calculations overflow or if the array covers more |
9532 | than half of the address space. Return true if the size of the array |
9533 | is valid, false otherwise. T is either the type of the array or its |
9534 | size, and NAME is the name of the array, or null for unnamed arrays. */ |
9535 | |
9536 | bool |
9537 | valid_array_size_p (location_t loc, const_tree t, tree name, bool complain) |
9538 | { |
9539 | if (t == error_mark_node) |
9540 | return true; |
9541 | |
9542 | const_tree size; |
9543 | if (TYPE_P (t)) |
9544 | { |
9545 | if (!COMPLETE_TYPE_P (t)) |
9546 | return true; |
9547 | size = TYPE_SIZE_UNIT (t); |
9548 | } |
9549 | else |
9550 | size = t; |
9551 | |
9552 | if (TREE_CODE (size) != INTEGER_CST) |
9553 | return true; |
9554 | |
9555 | cst_size_error error; |
9556 | if (valid_constant_size_p (size, &error)) |
9557 | return true; |
9558 | |
9559 | if (!complain) |
9560 | return false; |
9561 | |
9562 | if (TREE_CODE (TREE_TYPE (size)) == ENUMERAL_TYPE) |
9563 | /* Show the value of the enumerator rather than its name. */ |
9564 | size = convert (ssizetype, const_cast<tree> (size)); |
9565 | |
9566 | invalid_array_size_error (loc, error, size, name); |
9567 | return false; |
9568 | } |
9569 | |
9570 | /* Read SOURCE_DATE_EPOCH from environment to have a deterministic |
9571 | timestamp to replace embedded current dates to get reproducible |
9572 | results. Returns -1 if SOURCE_DATE_EPOCH is not defined. */ |
9573 | |
9574 | time_t |
9575 | cb_get_source_date_epoch (cpp_reader *pfile ATTRIBUTE_UNUSED) |
9576 | { |
9577 | char *source_date_epoch; |
9578 | int64_t epoch; |
9579 | char *endptr; |
9580 | |
9581 | source_date_epoch = getenv (name: "SOURCE_DATE_EPOCH"); |
9582 | if (!source_date_epoch) |
9583 | return (time_t) -1; |
9584 | |
9585 | errno = 0; |
9586 | #if defined(INT64_T_IS_LONG) |
9587 | epoch = strtol (nptr: source_date_epoch, endptr: &endptr, base: 10); |
9588 | #else |
9589 | epoch = strtoll (source_date_epoch, &endptr, 10); |
9590 | #endif |
9591 | if (errno != 0 || endptr == source_date_epoch || *endptr != '\0' |
9592 | || epoch < 0 || epoch > MAX_SOURCE_DATE_EPOCH) |
9593 | { |
9594 | error_at (input_location, "environment variable %qs must " |
9595 | "expand to a non-negative integer less than or equal to %wd", |
9596 | "SOURCE_DATE_EPOCH", MAX_SOURCE_DATE_EPOCH); |
9597 | return (time_t) -1; |
9598 | } |
9599 | |
9600 | return (time_t) epoch; |
9601 | } |
9602 | |
9603 | /* Callback for libcpp for offering spelling suggestions for misspelled |
9604 | directives. GOAL is an unrecognized string; CANDIDATES is a |
9605 | NULL-terminated array of candidate strings. Return the closest |
9606 | match to GOAL within CANDIDATES, or NULL if none are good |
9607 | suggestions. */ |
9608 | |
9609 | const char * |
9610 | cb_get_suggestion (cpp_reader *, const char *goal, |
9611 | const char *const *candidates) |
9612 | { |
9613 | best_match<const char *, const char *> bm (goal); |
9614 | while (*candidates) |
9615 | bm.consider (candidate: *candidates++); |
9616 | return bm.get_best_meaningful_candidate (); |
9617 | } |
9618 | |
9619 | /* Return the latice point which is the wider of the two FLT_EVAL_METHOD |
9620 | modes X, Y. This isn't just >, as the FLT_EVAL_METHOD values added |
9621 | by C TS 18661-3 for interchange types that are computed in their |
9622 | native precision are larger than the C11 values for evaluating in the |
9623 | precision of float/double/long double. If either mode is |
9624 | FLT_EVAL_METHOD_UNPREDICTABLE, return that. */ |
9625 | |
9626 | enum flt_eval_method |
9627 | excess_precision_mode_join (enum flt_eval_method x, |
9628 | enum flt_eval_method y) |
9629 | { |
9630 | if (x == FLT_EVAL_METHOD_UNPREDICTABLE |
9631 | || y == FLT_EVAL_METHOD_UNPREDICTABLE) |
9632 | return FLT_EVAL_METHOD_UNPREDICTABLE; |
9633 | |
9634 | /* GCC only supports one interchange type right now, _Float16. If |
9635 | we're evaluating _Float16 in 16-bit precision, then flt_eval_method |
9636 | will be FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16. */ |
9637 | if (x == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16) |
9638 | return y; |
9639 | if (y == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16) |
9640 | return x; |
9641 | |
9642 | /* Other values for flt_eval_method are directly comparable, and we want |
9643 | the maximum. */ |
9644 | return MAX (x, y); |
9645 | } |
9646 | |
9647 | /* Return the value that should be set for FLT_EVAL_METHOD in the |
9648 | context of ISO/IEC TS 18861-3. |
9649 | |
9650 | This relates to the effective excess precision seen by the user, |
9651 | which is the join point of the precision the target requests for |
9652 | -fexcess-precision={standard,fast,16} and the implicit excess precision |
9653 | the target uses. */ |
9654 | |
9655 | static enum flt_eval_method |
9656 | c_ts18661_flt_eval_method (void) |
9657 | { |
9658 | enum flt_eval_method implicit |
9659 | = targetm.c.excess_precision (EXCESS_PRECISION_TYPE_IMPLICIT); |
9660 | |
9661 | enum excess_precision_type flag_type |
9662 | = (flag_excess_precision == EXCESS_PRECISION_STANDARD |
9663 | ? EXCESS_PRECISION_TYPE_STANDARD |
9664 | : (flag_excess_precision == EXCESS_PRECISION_FLOAT16 |
9665 | ? EXCESS_PRECISION_TYPE_FLOAT16 |
9666 | : EXCESS_PRECISION_TYPE_FAST)); |
9667 | |
9668 | enum flt_eval_method requested |
9669 | = targetm.c.excess_precision (flag_type); |
9670 | |
9671 | return excess_precision_mode_join (x: implicit, y: requested); |
9672 | } |
9673 | |
9674 | /* As c_cpp_ts18661_flt_eval_method, but clamps the expected values to |
9675 | those that were permitted by C11. That is to say, eliminates |
9676 | FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16. */ |
9677 | |
9678 | static enum flt_eval_method |
9679 | c_c11_flt_eval_method (void) |
9680 | { |
9681 | return excess_precision_mode_join (x: c_ts18661_flt_eval_method (), |
9682 | y: FLT_EVAL_METHOD_PROMOTE_TO_FLOAT); |
9683 | } |
9684 | |
9685 | /* Return the value that should be set for FLT_EVAL_METHOD. |
9686 | MAYBE_C11_ONLY_P is TRUE if we should check |
9687 | FLAG_PERMITTED_EVAL_METHODS as to whether we should limit the possible |
9688 | values we can return to those from C99/C11, and FALSE otherwise. |
9689 | See the comments on c_ts18661_flt_eval_method for what value we choose |
9690 | to set here. */ |
9691 | |
9692 | int |
9693 | c_flt_eval_method (bool maybe_c11_only_p) |
9694 | { |
9695 | if (maybe_c11_only_p |
9696 | && flag_permitted_flt_eval_methods |
9697 | == PERMITTED_FLT_EVAL_METHODS_C11) |
9698 | return c_c11_flt_eval_method (); |
9699 | else |
9700 | return c_ts18661_flt_eval_method (); |
9701 | } |
9702 | |
9703 | /* An enum for get_missing_token_insertion_kind for describing the best |
9704 | place to insert a missing token, if there is one. */ |
9705 | |
9706 | enum missing_token_insertion_kind |
9707 | { |
9708 | MTIK_IMPOSSIBLE, |
9709 | MTIK_INSERT_BEFORE_NEXT, |
9710 | MTIK_INSERT_AFTER_PREV |
9711 | }; |
9712 | |
9713 | /* Given a missing token of TYPE, determine if it is reasonable to |
9714 | emit a fix-it hint suggesting the insertion of the token, and, |
9715 | if so, where the token should be inserted relative to other tokens. |
9716 | |
9717 | It only makes sense to do this for values of TYPE that are symbols. |
9718 | |
9719 | Some symbols should go before the next token, e.g. in: |
9720 | if flag) |
9721 | we want to insert the missing '(' immediately before "flag", |
9722 | giving: |
9723 | if (flag) |
9724 | rather than: |
9725 | if( flag) |
9726 | These use MTIK_INSERT_BEFORE_NEXT. |
9727 | |
9728 | Other symbols should go after the previous token, e.g. in: |
9729 | if (flag |
9730 | do_something (); |
9731 | we want to insert the missing ')' immediately after the "flag", |
9732 | giving: |
9733 | if (flag) |
9734 | do_something (); |
9735 | rather than: |
9736 | if (flag |
9737 | )do_something (); |
9738 | These use MTIK_INSERT_AFTER_PREV. */ |
9739 | |
9740 | static enum missing_token_insertion_kind |
9741 | get_missing_token_insertion_kind (enum cpp_ttype type) |
9742 | { |
9743 | switch (type) |
9744 | { |
9745 | /* Insert missing "opening" brackets immediately |
9746 | before the next token. */ |
9747 | case CPP_OPEN_SQUARE: |
9748 | case CPP_OPEN_PAREN: |
9749 | return MTIK_INSERT_BEFORE_NEXT; |
9750 | |
9751 | /* Insert other missing symbols immediately after |
9752 | the previous token. */ |
9753 | case CPP_CLOSE_PAREN: |
9754 | case CPP_CLOSE_SQUARE: |
9755 | case CPP_SEMICOLON: |
9756 | case CPP_COMMA: |
9757 | case CPP_COLON: |
9758 | return MTIK_INSERT_AFTER_PREV; |
9759 | |
9760 | /* Other kinds of token don't get fix-it hints. */ |
9761 | default: |
9762 | return MTIK_IMPOSSIBLE; |
9763 | } |
9764 | } |
9765 | |
9766 | /* Given RICHLOC, a location for a diagnostic describing a missing token |
9767 | of kind TOKEN_TYPE, potentially add a fix-it hint suggesting the |
9768 | insertion of the token. |
9769 | |
9770 | The location of the attempted fix-it hint depends on TOKEN_TYPE: |
9771 | it will either be: |
9772 | (a) immediately after PREV_TOKEN_LOC, or |
9773 | |
9774 | (b) immediately before the primary location within RICHLOC (taken to |
9775 | be that of the token following where the token was expected). |
9776 | |
9777 | If we manage to add a fix-it hint, then the location of the |
9778 | fix-it hint is likely to be more useful as the primary location |
9779 | of the diagnostic than that of the following token, so we swap |
9780 | these locations. |
9781 | |
9782 | For example, given this bogus code: |
9783 | 123456789012345678901234567890 |
9784 | 1 | int missing_semicolon (void) |
9785 | 2 | { |
9786 | 3 | return 42 |
9787 | 4 | } |
9788 | |
9789 | we will emit: |
9790 | |
9791 | "expected ';' before '}'" |
9792 | |
9793 | RICHLOC's primary location is at the closing brace, so before "swapping" |
9794 | we would emit the error at line 4 column 1: |
9795 | |
9796 | 123456789012345678901234567890 |
9797 | 3 | return 42 |< fix-it hint emitted for this line |
9798 | | ; | |
9799 | 4 | } |< "expected ';' before '}'" emitted at this line |
9800 | | ^ | |
9801 | |
9802 | It's more useful for the location of the diagnostic to be at the |
9803 | fix-it hint, so we swap the locations, so the primary location |
9804 | is at the fix-it hint, with the old primary location inserted |
9805 | as a secondary location, giving this, with the error at line 3 |
9806 | column 12: |
9807 | |
9808 | 123456789012345678901234567890 |
9809 | 3 | return 42 |< "expected ';' before '}'" emitted at this line, |
9810 | | ^ | with fix-it hint |
9811 | 4 | ; | |
9812 | | } |< secondary range emitted here |
9813 | | ~ |. */ |
9814 | |
9815 | void |
9816 | maybe_suggest_missing_token_insertion (rich_location *richloc, |
9817 | enum cpp_ttype token_type, |
9818 | location_t prev_token_loc) |
9819 | { |
9820 | gcc_assert (richloc); |
9821 | |
9822 | enum missing_token_insertion_kind mtik |
9823 | = get_missing_token_insertion_kind (type: token_type); |
9824 | |
9825 | switch (mtik) |
9826 | { |
9827 | default: |
9828 | gcc_unreachable (); |
9829 | break; |
9830 | |
9831 | case MTIK_IMPOSSIBLE: |
9832 | return; |
9833 | |
9834 | case MTIK_INSERT_BEFORE_NEXT: |
9835 | /* Attempt to add the fix-it hint before the primary location |
9836 | of RICHLOC. */ |
9837 | richloc->add_fixit_insert_before (new_content: cpp_type2name (token_type, flags: 0)); |
9838 | break; |
9839 | |
9840 | case MTIK_INSERT_AFTER_PREV: |
9841 | /* Attempt to add the fix-it hint after PREV_TOKEN_LOC. */ |
9842 | richloc->add_fixit_insert_after (where: prev_token_loc, |
9843 | new_content: cpp_type2name (token_type, flags: 0)); |
9844 | break; |
9845 | } |
9846 | |
9847 | /* If we were successful, use the fix-it hint's location as the |
9848 | primary location within RICHLOC, adding the old primary location |
9849 | back as a secondary location. */ |
9850 | if (!richloc->seen_impossible_fixit_p ()) |
9851 | { |
9852 | fixit_hint *hint = richloc->get_last_fixit_hint (); |
9853 | location_t hint_loc = hint->get_start_loc (); |
9854 | location_t old_loc = richloc->get_loc (); |
9855 | |
9856 | richloc->set_range (idx: 0, loc: hint_loc, range_display_kind: SHOW_RANGE_WITH_CARET); |
9857 | richloc->add_range (loc: old_loc); |
9858 | } |
9859 | } |
9860 | |
9861 | /* Potentially emit a note about likely missing '&' or '*', |
9862 | depending on EXPR and EXPECTED_TYPE. */ |
9863 | |
9864 | void |
9865 | maybe_emit_indirection_note (location_t loc, |
9866 | tree expr, tree expected_type) |
9867 | { |
9868 | gcc_assert (expr); |
9869 | gcc_assert (expected_type); |
9870 | |
9871 | tree actual_type = TREE_TYPE (expr); |
9872 | |
9873 | /* Missing '&'. */ |
9874 | if (TREE_CODE (expected_type) == POINTER_TYPE |
9875 | && compatible_types_for_indirection_note_p (type1: actual_type, |
9876 | TREE_TYPE (expected_type)) |
9877 | && lvalue_p (expr)) |
9878 | { |
9879 | gcc_rich_location richloc (loc); |
9880 | richloc.add_fixit_insert_before (new_content: "&"); |
9881 | inform (&richloc, "possible fix: take the address with %qs", "&"); |
9882 | } |
9883 | |
9884 | /* Missing '*'. */ |
9885 | if (TREE_CODE (actual_type) == POINTER_TYPE |
9886 | && compatible_types_for_indirection_note_p (TREE_TYPE (actual_type), |
9887 | type2: expected_type)) |
9888 | { |
9889 | gcc_rich_location richloc (loc); |
9890 | richloc.add_fixit_insert_before (new_content: "*"); |
9891 | inform (&richloc, "possible fix: dereference with %qs", "*"); |
9892 | } |
9893 | } |
9894 | |
9895 | #if CHECKING_P |
9896 | |
9897 | namespace selftest { |
9898 | |
9899 | /* Verify that fold_for_warn on error_mark_node is safe. */ |
9900 | |
9901 | static void |
9902 | test_fold_for_warn () |
9903 | { |
9904 | ASSERT_EQ (error_mark_node, fold_for_warn (error_mark_node)); |
9905 | } |
9906 | |
9907 | /* Run all of the selftests within this file. */ |
9908 | |
9909 | static void |
9910 | c_common_cc_tests () |
9911 | { |
9912 | test_fold_for_warn (); |
9913 | } |
9914 | |
9915 | /* Run all of the tests within c-family. */ |
9916 | |
9917 | void |
9918 | c_family_tests (void) |
9919 | { |
9920 | c_common_cc_tests (); |
9921 | c_format_cc_tests (); |
9922 | c_indentation_cc_tests (); |
9923 | c_pretty_print_cc_tests (); |
9924 | c_spellcheck_cc_tests (); |
9925 | c_diagnostic_cc_tests (); |
9926 | c_opt_problem_cc_tests (); |
9927 | } |
9928 | |
9929 | } // namespace selftest |
9930 | |
9931 | #endif /* #if CHECKING_P */ |
9932 | |
9933 | /* Attempt to locate a suitable location within FILE for a |
9934 | #include directive to be inserted before. |
9935 | LOC is the location of the relevant diagnostic. |
9936 | |
9937 | Attempt to return the location within FILE immediately |
9938 | after the last #include within that file, or the start of |
9939 | that file if it has no #include directives. |
9940 | |
9941 | Return UNKNOWN_LOCATION if no suitable location is found, |
9942 | or if an error occurs. */ |
9943 | |
9944 | static location_t |
9945 | try_to_locate_new_include_insertion_point (const char *file, location_t loc) |
9946 | { |
9947 | /* Locate the last ordinary map within FILE that ended with a #include. */ |
9948 | const line_map_ordinary *last_include_ord_map = NULL; |
9949 | |
9950 | /* ...and the next ordinary map within FILE after that one. */ |
9951 | const line_map_ordinary *last_ord_map_after_include = NULL; |
9952 | |
9953 | /* ...and the first ordinary map within FILE. */ |
9954 | const line_map_ordinary *first_ord_map_in_file = NULL; |
9955 | |
9956 | /* Get ordinary map containing LOC (or its expansion). */ |
9957 | const line_map_ordinary *ord_map_for_loc = NULL; |
9958 | linemap_resolve_location (line_table, loc, lrk: LRK_MACRO_EXPANSION_POINT, |
9959 | loc_map: &ord_map_for_loc); |
9960 | gcc_assert (ord_map_for_loc); |
9961 | |
9962 | for (unsigned int i = 0; i < LINEMAPS_ORDINARY_USED (set: line_table); i++) |
9963 | { |
9964 | const line_map_ordinary *ord_map |
9965 | = LINEMAPS_ORDINARY_MAP_AT (set: line_table, index: i); |
9966 | |
9967 | if (const line_map_ordinary *from |
9968 | = linemap_included_from_linemap (set: line_table, map: ord_map)) |
9969 | /* We cannot use pointer equality, because with preprocessed |
9970 | input all filename strings are unique. */ |
9971 | if (0 == strcmp (s1: from->to_file, s2: file)) |
9972 | { |
9973 | last_include_ord_map = from; |
9974 | last_ord_map_after_include = NULL; |
9975 | } |
9976 | |
9977 | /* Likewise, use strcmp, and reject any line-zero introductory |
9978 | map. */ |
9979 | if (ord_map->to_line && 0 == strcmp (s1: ord_map->to_file, s2: file)) |
9980 | { |
9981 | if (!first_ord_map_in_file) |
9982 | first_ord_map_in_file = ord_map; |
9983 | if (last_include_ord_map && !last_ord_map_after_include) |
9984 | last_ord_map_after_include = ord_map; |
9985 | } |
9986 | |
9987 | /* Stop searching when reaching the ord_map containing LOC, |
9988 | as it makes no sense to provide fix-it hints that appear |
9989 | after the diagnostic in question. */ |
9990 | if (ord_map == ord_map_for_loc) |
9991 | break; |
9992 | } |
9993 | |
9994 | /* Determine where to insert the #include. */ |
9995 | const line_map_ordinary *ord_map_for_insertion; |
9996 | |
9997 | /* We want the next ordmap in the file after the last one that's a |
9998 | #include, but failing that, the start of the file. */ |
9999 | if (last_ord_map_after_include) |
10000 | ord_map_for_insertion = last_ord_map_after_include; |
10001 | else |
10002 | ord_map_for_insertion = first_ord_map_in_file; |
10003 | |
10004 | if (!ord_map_for_insertion) |
10005 | return UNKNOWN_LOCATION; |
10006 | |
10007 | /* The "start_location" is column 0, meaning "the whole line". |
10008 | rich_location and edit_context can't cope with this, so use |
10009 | column 1 instead. */ |
10010 | location_t col_0 = ord_map_for_insertion->start_location; |
10011 | return linemap_position_for_loc_and_offset (set: line_table, loc: col_0, offset: 1); |
10012 | } |
10013 | |
10014 | /* A map from filenames to sets of headers added to them, for |
10015 | ensuring idempotency within maybe_add_include_fixit. */ |
10016 | |
10017 | /* The values within the map. We need string comparison as there's |
10018 | no guarantee that two different diagnostics that are recommending |
10019 | adding e.g. "<stdio.h>" are using the same buffer. */ |
10020 | |
10021 | typedef hash_set <const char *, false, nofree_string_hash> per_file_includes_t; |
10022 | |
10023 | /* The map itself. We don't need string comparison for the filename keys, |
10024 | as they come from libcpp. */ |
10025 | |
10026 | typedef hash_map <const char *, per_file_includes_t *> added_includes_t; |
10027 | static added_includes_t *added_includes; |
10028 | |
10029 | /* Attempt to add a fix-it hint to RICHLOC, adding "#include HEADER\n" |
10030 | in a suitable location within the file of RICHLOC's primary |
10031 | location. |
10032 | |
10033 | This function is idempotent: a header will be added at most once to |
10034 | any given file. |
10035 | |
10036 | If OVERRIDE_LOCATION is true, then if a fix-it is added and will be |
10037 | printed, then RICHLOC's primary location will be replaced by that of |
10038 | the fix-it hint (for use by "inform" notes where the location of the |
10039 | issue has already been reported). */ |
10040 | |
10041 | void |
10042 | maybe_add_include_fixit (rich_location *richloc, const char *header, |
10043 | bool override_location) |
10044 | { |
10045 | location_t loc = richloc->get_loc (); |
10046 | const char *file = LOCATION_FILE (loc); |
10047 | if (!file) |
10048 | return; |
10049 | |
10050 | /* Idempotency: don't add the same header more than once to a given file. */ |
10051 | if (!added_includes) |
10052 | added_includes = new added_includes_t (); |
10053 | per_file_includes_t *&set = added_includes->get_or_insert (k: file); |
10054 | if (set) |
10055 | if (set->contains (k: header)) |
10056 | /* ...then we've already added HEADER to that file. */ |
10057 | return; |
10058 | if (!set) |
10059 | set = new per_file_includes_t (); |
10060 | set->add (k: header); |
10061 | |
10062 | /* Attempt to locate a suitable place for the new directive. */ |
10063 | location_t include_insert_loc |
10064 | = try_to_locate_new_include_insertion_point (file, loc); |
10065 | if (include_insert_loc == UNKNOWN_LOCATION) |
10066 | return; |
10067 | |
10068 | char *text = xasprintf ("#include %s\n", header); |
10069 | richloc->add_fixit_insert_before (where: include_insert_loc, new_content: text); |
10070 | free (ptr: text); |
10071 | |
10072 | if (override_location && global_dc->m_source_printing.enabled) |
10073 | { |
10074 | /* Replace the primary location with that of the insertion point for the |
10075 | fix-it hint. |
10076 | |
10077 | We use SHOW_LINES_WITHOUT_RANGE so that we don't meaningless print a |
10078 | caret for the insertion point (or colorize it). |
10079 | |
10080 | Hence we print e.g.: |
10081 | |
10082 | ../x86_64-pc-linux-gnu/libstdc++-v3/include/vector:74:1: note: msg 2 |
10083 | 73 | # include <debug/vector> |
10084 | +++ |+#include <vector> |
10085 | 74 | #endif |
10086 | |
10087 | rather than: |
10088 | |
10089 | ../x86_64-pc-linux-gnu/libstdc++-v3/include/vector:74:1: note: msg 2 |
10090 | 73 | # include <debug/vector> |
10091 | +++ |+#include <vector> |
10092 | 74 | #endif |
10093 | | ^ |
10094 | |
10095 | avoiding the caret on the first column of line 74. */ |
10096 | richloc->set_range (idx: 0, loc: include_insert_loc, range_display_kind: SHOW_LINES_WITHOUT_RANGE); |
10097 | } |
10098 | } |
10099 | |
10100 | /* Attempt to convert a braced array initializer list CTOR for array |
10101 | TYPE into a STRING_CST for convenience and efficiency. Return |
10102 | the converted string on success or the original ctor on failure. |
10103 | Also, for non-convertable CTORs which contain RAW_DATA_CST values |
10104 | among the elts try to extend the range of RAW_DATA_CSTs. */ |
10105 | |
10106 | static tree |
10107 | braced_list_to_string (tree type, tree ctor, bool member) |
10108 | { |
10109 | /* Ignore non-members with unknown size like arrays with unspecified |
10110 | bound. */ |
10111 | tree typesize = TYPE_SIZE_UNIT (type); |
10112 | if (!member && !tree_fits_uhwi_p (typesize)) |
10113 | return ctor; |
10114 | |
10115 | /* If the target char size differs from the host char size, we'd risk |
10116 | loosing data and getting object sizes wrong by converting to |
10117 | host chars. */ |
10118 | if (TYPE_PRECISION (char_type_node) != CHAR_BIT) |
10119 | return ctor; |
10120 | |
10121 | /* STRING_CST doesn't support wide characters. */ |
10122 | gcc_checking_assert (TYPE_PRECISION (TREE_TYPE (type)) == CHAR_BIT); |
10123 | |
10124 | /* If the array has an explicit bound, use it to constrain the size |
10125 | of the string. If it doesn't, be sure to create a string that's |
10126 | as long as implied by the index of the last zero specified via |
10127 | a designator, as in: |
10128 | const char a[] = { [7] = 0 }; */ |
10129 | unsigned HOST_WIDE_INT maxelts; |
10130 | if (typesize) |
10131 | { |
10132 | maxelts = tree_to_uhwi (typesize); |
10133 | maxelts /= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type))); |
10134 | } |
10135 | else |
10136 | maxelts = HOST_WIDE_INT_M1U; |
10137 | |
10138 | /* Avoid converting initializers for zero-length arrays (but do |
10139 | create them for flexible array members). */ |
10140 | if (!maxelts) |
10141 | return ctor; |
10142 | |
10143 | unsigned HOST_WIDE_INT nelts = CONSTRUCTOR_NELTS (ctor); |
10144 | |
10145 | auto_vec<char> str; |
10146 | str.reserve (nelems: nelts + 1); |
10147 | |
10148 | unsigned HOST_WIDE_INT i, j = HOST_WIDE_INT_M1U; |
10149 | tree index, value; |
10150 | bool check_raw_data = false; |
10151 | |
10152 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), i, index, value) |
10153 | { |
10154 | if (check_raw_data) |
10155 | { |
10156 | /* The preprocessor always surrounds CPP_EMBED tokens in between |
10157 | CPP_NUMBER and CPP_COMMA tokens. Try to undo that here now that |
10158 | the whole initializer is parsed. E.g. if we have |
10159 | [0] = 'T', [1] = "his is a #embed tex", [20] = 't' |
10160 | where the middle value is RAW_DATA_CST and in its owner this is |
10161 | surrounded by 'T' and 't' characters, we can create from it just |
10162 | [0] = "This is a #embed text" |
10163 | Similarly if a RAW_DATA_CST needs to be split into two parts |
10164 | because of designated init store but the stored value is actually |
10165 | the same as in the RAW_DATA_OWNER's memory we can merge multiple |
10166 | RAW_DATA_CSTs. */ |
10167 | if (TREE_CODE (value) == RAW_DATA_CST |
10168 | && index |
10169 | && tree_fits_uhwi_p (index)) |
10170 | { |
10171 | tree owner = RAW_DATA_OWNER (value); |
10172 | unsigned int start, end, k; |
10173 | if (TREE_CODE (owner) == STRING_CST) |
10174 | { |
10175 | start |
10176 | = RAW_DATA_POINTER (value) - TREE_STRING_POINTER (owner); |
10177 | end = TREE_STRING_LENGTH (owner) - RAW_DATA_LENGTH (value); |
10178 | } |
10179 | else |
10180 | { |
10181 | gcc_checking_assert (TREE_CODE (owner) == RAW_DATA_CST); |
10182 | start |
10183 | = RAW_DATA_POINTER (value) - RAW_DATA_POINTER (owner); |
10184 | end = RAW_DATA_LENGTH (owner) - RAW_DATA_LENGTH (value); |
10185 | } |
10186 | end -= start; |
10187 | unsigned HOST_WIDE_INT l = j == HOST_WIDE_INT_M1U ? i : j; |
10188 | for (k = 0; k < start && k < l; ++k) |
10189 | { |
10190 | constructor_elt *elt = CONSTRUCTOR_ELT (ctor, l - k - 1); |
10191 | if (elt->index == NULL_TREE |
10192 | || !tree_fits_uhwi_p (elt->index) |
10193 | || !tree_fits_shwi_p (elt->value) |
10194 | || wi::to_widest (t: index) != (wi::to_widest (t: elt->index) |
10195 | + (k + 1))) |
10196 | break; |
10197 | if (TYPE_UNSIGNED (TREE_TYPE (value))) |
10198 | { |
10199 | if (tree_to_shwi (elt->value) |
10200 | != *((const unsigned char *) |
10201 | RAW_DATA_POINTER (value) - k - 1)) |
10202 | break; |
10203 | } |
10204 | else if (tree_to_shwi (elt->value) |
10205 | != *((const signed char *) |
10206 | RAW_DATA_POINTER (value) - k - 1)) |
10207 | break; |
10208 | } |
10209 | start = k; |
10210 | l = 0; |
10211 | for (k = 0; k < end && k + 1 < CONSTRUCTOR_NELTS (ctor) - i; ++k) |
10212 | { |
10213 | constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i + k + 1); |
10214 | if (elt->index == NULL_TREE |
10215 | || !tree_fits_uhwi_p (elt->index) |
10216 | || (wi::to_widest (t: elt->index) |
10217 | != (wi::to_widest (t: index) |
10218 | + (RAW_DATA_LENGTH (value) + l)))) |
10219 | break; |
10220 | if (TREE_CODE (elt->value) == RAW_DATA_CST |
10221 | && RAW_DATA_OWNER (elt->value) == RAW_DATA_OWNER (value) |
10222 | && (RAW_DATA_POINTER (elt->value) |
10223 | == RAW_DATA_POINTER (value) + l)) |
10224 | { |
10225 | l += RAW_DATA_LENGTH (elt->value); |
10226 | end -= RAW_DATA_LENGTH (elt->value) - 1; |
10227 | continue; |
10228 | } |
10229 | if (!tree_fits_shwi_p (elt->value)) |
10230 | break; |
10231 | if (TYPE_UNSIGNED (TREE_TYPE (value))) |
10232 | { |
10233 | if (tree_to_shwi (elt->value) |
10234 | != *((const unsigned char *) |
10235 | RAW_DATA_POINTER (value) |
10236 | + RAW_DATA_LENGTH (value) + k)) |
10237 | break; |
10238 | } |
10239 | else if (tree_to_shwi (elt->value) |
10240 | != *((const signed char *) |
10241 | RAW_DATA_POINTER (value) |
10242 | + RAW_DATA_LENGTH (value) + k)) |
10243 | break; |
10244 | ++l; |
10245 | } |
10246 | end = k; |
10247 | if (start != 0 || end != 0) |
10248 | { |
10249 | if (j == HOST_WIDE_INT_M1U) |
10250 | j = i - start; |
10251 | else |
10252 | j -= start; |
10253 | RAW_DATA_POINTER (value) -= start; |
10254 | RAW_DATA_LENGTH (value) += start + end; |
10255 | i += end; |
10256 | if (start == 0) |
10257 | CONSTRUCTOR_ELT (ctor, j)->index = index; |
10258 | CONSTRUCTOR_ELT (ctor, j)->value = value; |
10259 | ++j; |
10260 | continue; |
10261 | } |
10262 | } |
10263 | if (j != HOST_WIDE_INT_M1U) |
10264 | { |
10265 | CONSTRUCTOR_ELT (ctor, j)->index = index; |
10266 | CONSTRUCTOR_ELT (ctor, j)->value = value; |
10267 | ++j; |
10268 | } |
10269 | continue; |
10270 | } |
10271 | |
10272 | unsigned HOST_WIDE_INT idx = i; |
10273 | if (index) |
10274 | { |
10275 | if (!tree_fits_uhwi_p (index)) |
10276 | { |
10277 | check_raw_data = true; |
10278 | continue; |
10279 | } |
10280 | idx = tree_to_uhwi (index); |
10281 | } |
10282 | |
10283 | /* auto_vec is limited to UINT_MAX elements. */ |
10284 | if (idx > UINT_MAX) |
10285 | { |
10286 | check_raw_data = true; |
10287 | continue; |
10288 | } |
10289 | |
10290 | /* Avoid non-constant initializers. */ |
10291 | if (!tree_fits_shwi_p (value)) |
10292 | { |
10293 | check_raw_data = true; |
10294 | --i; |
10295 | continue; |
10296 | } |
10297 | |
10298 | /* Skip over embedded nuls except the last one (initializer |
10299 | elements are in ascending order of indices). */ |
10300 | HOST_WIDE_INT val = tree_to_shwi (value); |
10301 | if (!val && i + 1 < nelts) |
10302 | continue; |
10303 | |
10304 | if (idx < str.length ()) |
10305 | { |
10306 | check_raw_data = true; |
10307 | continue; |
10308 | } |
10309 | |
10310 | /* Bail if the CTOR has a block of more than 256 embedded nuls |
10311 | due to implicitly initialized elements. */ |
10312 | unsigned nchars = (idx - str.length ()) + 1; |
10313 | if (nchars > 256) |
10314 | { |
10315 | check_raw_data = true; |
10316 | continue; |
10317 | } |
10318 | |
10319 | if (nchars > 1) |
10320 | { |
10321 | str.reserve (nelems: idx); |
10322 | str.quick_grow_cleared (len: idx); |
10323 | } |
10324 | |
10325 | if (idx >= maxelts) |
10326 | { |
10327 | check_raw_data = true; |
10328 | continue; |
10329 | } |
10330 | |
10331 | str.safe_insert (ix: idx, obj: val); |
10332 | } |
10333 | |
10334 | if (check_raw_data) |
10335 | { |
10336 | if (j != HOST_WIDE_INT_M1U) |
10337 | CONSTRUCTOR_ELTS (ctor)->truncate (size: j); |
10338 | return ctor; |
10339 | } |
10340 | |
10341 | /* Append a nul string termination. */ |
10342 | if (maxelts != HOST_WIDE_INT_M1U && str.length () < maxelts) |
10343 | str.safe_push (obj: 0); |
10344 | |
10345 | /* Build a STRING_CST with the same type as the array. */ |
10346 | tree res = build_string (str.length (), str.begin ()); |
10347 | TREE_TYPE (res) = type; |
10348 | return res; |
10349 | } |
10350 | |
10351 | /* Implementation of the two-argument braced_lists_to_string withe |
10352 | the same arguments plus MEMBER which is set for struct members |
10353 | to allow initializers for flexible member arrays. */ |
10354 | |
10355 | static tree |
10356 | braced_lists_to_strings (tree type, tree ctor, bool member) |
10357 | { |
10358 | if (TREE_CODE (ctor) != CONSTRUCTOR) |
10359 | return ctor; |
10360 | |
10361 | tree_code code = TREE_CODE (type); |
10362 | |
10363 | tree ttp; |
10364 | if (code == ARRAY_TYPE) |
10365 | ttp = TREE_TYPE (type); |
10366 | else if (code == RECORD_TYPE) |
10367 | { |
10368 | ttp = TREE_TYPE (ctor); |
10369 | if (TREE_CODE (ttp) == ARRAY_TYPE) |
10370 | { |
10371 | type = ttp; |
10372 | ttp = TREE_TYPE (ttp); |
10373 | } |
10374 | } |
10375 | else |
10376 | return ctor; |
10377 | |
10378 | if ((TREE_CODE (ttp) == ARRAY_TYPE || TREE_CODE (ttp) == INTEGER_TYPE) |
10379 | && TYPE_STRING_FLAG (ttp)) |
10380 | return braced_list_to_string (type, ctor, member); |
10381 | |
10382 | code = TREE_CODE (ttp); |
10383 | if (code == ARRAY_TYPE || RECORD_OR_UNION_TYPE_P (ttp)) |
10384 | { |
10385 | bool rec = RECORD_OR_UNION_TYPE_P (ttp); |
10386 | |
10387 | /* Handle array of arrays or struct member initializers. */ |
10388 | tree val; |
10389 | unsigned HOST_WIDE_INT idx; |
10390 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), idx, val) |
10391 | { |
10392 | val = braced_lists_to_strings (type: ttp, ctor: val, member: rec); |
10393 | CONSTRUCTOR_ELT (ctor, idx)->value = val; |
10394 | } |
10395 | } |
10396 | |
10397 | return ctor; |
10398 | } |
10399 | |
10400 | /* Attempt to convert a CTOR containing braced array initializer lists |
10401 | for array TYPE into one containing STRING_CSTs, for convenience and |
10402 | efficiency. Recurse for arrays of arrays and member initializers. |
10403 | Return the converted CTOR or STRING_CST on success or the original |
10404 | CTOR otherwise. */ |
10405 | |
10406 | tree |
10407 | braced_lists_to_strings (tree type, tree ctor) |
10408 | { |
10409 | return braced_lists_to_strings (type, ctor, member: false); |
10410 | } |
10411 | |
10412 | |
10413 | /* Emit debug for functions before finalizing early debug. */ |
10414 | |
10415 | void |
10416 | c_common_finalize_early_debug (void) |
10417 | { |
10418 | /* Emit early debug for reachable functions, and by consequence, |
10419 | locally scoped symbols. Also emit debug for extern declared |
10420 | functions that are still reachable at this point. */ |
10421 | struct cgraph_node *cnode; |
10422 | FOR_EACH_FUNCTION (cnode) |
10423 | if (!cnode->alias && !cnode->thunk |
10424 | && (cnode->has_gimple_body_p () |
10425 | || !DECL_IS_UNDECLARED_BUILTIN (cnode->decl))) |
10426 | (*debug_hooks->early_global_decl) (cnode->decl); |
10427 | } |
10428 | |
10429 | /* Determine whether TYPE is an ISO C99 flexible array member type "[]". */ |
10430 | bool |
10431 | c_flexible_array_member_type_p (const_tree type) |
10432 | { |
10433 | if (TREE_CODE (type) == ARRAY_TYPE |
10434 | && TYPE_SIZE (type) == NULL_TREE |
10435 | && TYPE_DOMAIN (type) != NULL_TREE |
10436 | && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE) |
10437 | return true; |
10438 | |
10439 | return false; |
10440 | } |
10441 | |
10442 | /* Get the LEVEL of the strict_flex_array for the ARRAY_FIELD based on the |
10443 | values of attribute strict_flex_array and the flag_strict_flex_arrays. */ |
10444 | unsigned int |
10445 | c_strict_flex_array_level_of (tree array_field) |
10446 | { |
10447 | gcc_assert (TREE_CODE (array_field) == FIELD_DECL); |
10448 | unsigned int strict_flex_array_level = flag_strict_flex_arrays; |
10449 | |
10450 | tree attr_strict_flex_array |
10451 | = lookup_attribute (attr_name: "strict_flex_array", DECL_ATTRIBUTES (array_field)); |
10452 | /* If there is a strict_flex_array attribute attached to the field, |
10453 | override the flag_strict_flex_arrays. */ |
10454 | if (attr_strict_flex_array) |
10455 | { |
10456 | /* Get the value of the level first from the attribute. */ |
10457 | unsigned HOST_WIDE_INT attr_strict_flex_array_level = 0; |
10458 | gcc_assert (TREE_VALUE (attr_strict_flex_array) != NULL_TREE); |
10459 | attr_strict_flex_array = TREE_VALUE (attr_strict_flex_array); |
10460 | gcc_assert (TREE_VALUE (attr_strict_flex_array) != NULL_TREE); |
10461 | attr_strict_flex_array = TREE_VALUE (attr_strict_flex_array); |
10462 | gcc_assert (tree_fits_uhwi_p (attr_strict_flex_array)); |
10463 | attr_strict_flex_array_level = tree_to_uhwi (attr_strict_flex_array); |
10464 | |
10465 | /* The attribute has higher priority than flag_struct_flex_array. */ |
10466 | strict_flex_array_level = attr_strict_flex_array_level; |
10467 | } |
10468 | return strict_flex_array_level; |
10469 | } |
10470 | |
10471 | /* Map from identifiers to booleans. Value is true for features, and |
10472 | false for extensions. Used to implement __has_{feature,extension}. */ |
10473 | |
10474 | using feature_map_t = hash_map <tree, bool>; |
10475 | static feature_map_t *feature_map; |
10476 | |
10477 | /* Register a feature for __has_{feature,extension}. FEATURE_P is true |
10478 | if the feature identified by NAME is a feature (as opposed to an |
10479 | extension). */ |
10480 | |
10481 | void |
10482 | c_common_register_feature (const char *name, bool feature_p) |
10483 | { |
10484 | bool dup = feature_map->put (get_identifier (name), v: feature_p); |
10485 | gcc_checking_assert (!dup); |
10486 | } |
10487 | |
10488 | /* Lazily initialize hash table for __has_{feature,extension}, |
10489 | dispatching to the appropriate front end to register language-specific |
10490 | features. */ |
10491 | |
10492 | static void |
10493 | init_has_feature () |
10494 | { |
10495 | gcc_checking_assert (!feature_map); |
10496 | feature_map = new feature_map_t; |
10497 | |
10498 | for (unsigned i = 0; i < ARRAY_SIZE (has_feature_table); i++) |
10499 | { |
10500 | const hf_feature_info *info = has_feature_table + i; |
10501 | |
10502 | if ((info->flags & HF_FLAG_SANITIZE) && !(flag_sanitize & info->mask)) |
10503 | continue; |
10504 | |
10505 | const bool feature_p = !(info->flags & HF_FLAG_EXT); |
10506 | c_common_register_feature (name: info->ident, feature_p); |
10507 | } |
10508 | |
10509 | /* Register language-specific features. */ |
10510 | c_family_register_lang_features (); |
10511 | } |
10512 | |
10513 | /* If STRICT_P is true, evaluate __has_feature (IDENT). |
10514 | Otherwise, evaluate __has_extension (IDENT). */ |
10515 | |
10516 | bool |
10517 | has_feature_p (const char *ident, bool strict_p) |
10518 | { |
10519 | if (!feature_map) |
10520 | init_has_feature (); |
10521 | |
10522 | tree name = canonicalize_attr_name (get_identifier (ident)); |
10523 | bool *feat_p = feature_map->get (k: name); |
10524 | if (!feat_p) |
10525 | return false; |
10526 | |
10527 | return !strict_p || *feat_p; |
10528 | } |
10529 | |
10530 | /* This is the slow path of c-common.h's c_hardbool_type_attr. */ |
10531 | |
10532 | tree |
10533 | c_hardbool_type_attr_1 (tree type, tree *false_value, tree *true_value) |
10534 | { |
10535 | tree attr = lookup_attribute (attr_name: "hardbool", TYPE_ATTRIBUTES (type)); |
10536 | if (!attr) |
10537 | return attr; |
10538 | |
10539 | if (false_value) |
10540 | *false_value = TREE_VALUE (TYPE_VALUES (type)); |
10541 | |
10542 | if (true_value) |
10543 | *true_value = TREE_VALUE (TREE_CHAIN (TYPE_VALUES (type))); |
10544 | |
10545 | return attr; |
10546 | } |
10547 | |
10548 | #include "gt-c-family-c-common.h" |
10549 |
Definitions
- parse_in
- c_default_pointer_mode
- c_global_trees
- flag_no_line_commands
- flag_no_output
- flag_dump_macros
- flag_dump_includes
- flag_pch_preprocess
- pch_file
- flag_iso
- flag_cond_mismatch
- flag_isoc94
- flag_isoc99
- flag_isoc11
- flag_isoc23
- flag_isoc2y
- flag_hosted
- print_struct_values
- constant_string_class_name
- warn_abi_version
- cxx_dialect
- max_tinst_depth
- ridpointers
- make_fname_decl
- c_inhibit_evaluation_warnings
- in_late_binary_op
- override_libcpp_locations
- fname_var_t
- fname_vars
- hf_feature_info
- has_feature_table
- visibility_options
- c_common_reswords
- num_c_common_reswords
- c_addr_space_name
- start_fname_decls
- finish_fname_decls
- fname_as_string
- fname_decl
- fix_string_type
- get_cpp_ttype_from_string_type
- g_string_concat_db
- c_get_substring_location
- bool_promoted_to_int_p
- vector_targets_convertible_p
- vector_types_convertible_p
- c_build_vec_perm_expr
- c_build_shufflevector
- c_build_vec_convert
- c_common_get_narrower
- shorten_binary_op
- int_safely_convertible_to_real_p
- unsafe_conversion_p
- convert_and_check
- tlist
- tlist_cache
- tlist_obstack
- tlist_firstobj
- warned_ids
- save_expr_cache
- new_tlist
- add_tlist
- merge_tlist
- warn_for_collisions_1
- warn_for_collisions
- warning_candidate_p
- candidate_equal_p
- verify_tree
- verify_sequence_points_limit
- verify_tree_lim_r
- verify_sequence_points
- check_case_value
- c_common_type_for_size
- c_common_fixed_point_type_for_size
- registered_builtin_types
- c_common_type_for_mode
- c_common_unsigned_type
- c_common_signed_type
- c_common_signed_or_unsigned_type
- c_build_bitfield_integer_type
- c_register_builtin_type
- binary_op_error
- expr_original_type
- shorten_compare
- pointer_int_sum
- c_wrap_maybe_const
- decl_with_nonnull_addr_p
- c_common_truthvalue_conversion
- c_apply_type_quals_to_decl
- c_common_get_alias_set
- c_sizeof_or_alignof_type
- c_alignof_expr
- c_countof_type
- built_in_attribute
- built_in_attributes
- c_builtin_type
- builtin_types
- def_fn_type
- c_define_builtins
- c_get_ident
- c_common_nodes_and_builtins
- compound_literal_number
- set_compound_literal_name
- build_va_arg_1
- build_va_arg
- disabled_builtin
- disabled_builtins
- disable_builtin_function
- builtin_function_disabled_p
- def_builtin_1
- c_promoting_integer_type_p
- self_promoting_args_p
- strip_pointer_operator
- strip_pointer_or_array_types
- case_compare
- c_add_case_label
- c_switch_covers_all_cases_p_1
- c_switch_covers_all_cases_p
- c_block_may_fallthru
- finish_label_address_expr
- boolean_increment
- c_stddef_cpp_builtins
- c_init_attributes
- check_user_alignment
- c_determine_visibility
- nonnull_arg_ctx
- check_function_nonnull
- check_function_sentinel
- check_function_restrict
- nonnull_check_p
- check_nonnull_arg
- get_attribute_operand
- optimize_args
- parse_optimize_options
- attribute_fallthrough_p
- check_function_arguments
- check_function_arguments_recurse
- builtin_function_validate_nargs
- check_builtin_function_arguments
- catenate_strings
- c_parse_error
- c_option_controlling_cpp_diagnostic
- c_option_is_from_cpp_diagnostics
- c_cpp_diagnostic
- c_common_to_target_charset
- fold_offsetof
- complete_array_type
- complete_flexible_array_elts
- c_common_mark_addressable_vec
- builtin_type_for_size
- speculation_safe_value_resolve_call
- speculation_safe_value_resolve_params
- speculation_safe_value_resolve_return
- sync_resolve_size
- sync_resolve_params
- sync_resolve_return
- get_atomic_generic_size
- add_atomic_size_parameter
- atomic_size_supported_p
- resolve_overloaded_atomic_exchange
- resolve_overloaded_atomic_compare_exchange
- resolve_overloaded_atomic_load
- resolve_overloaded_atomic_store
- atomic_bitint_fetch_using_cas_loop
- resolve_overloaded_builtin
- vector_types_compatible_elements_p
- check_missing_format_attribute
- set_underlying_type
- user_facing_original_type_p
- record_types_used_by_current_var_decl
- tree_vector_cache
- make_tree_vector
- release_tree_vector
- make_tree_vector_single
- make_tree_vector_from_list
- append_ctor_to_tree_vector
- make_tree_vector_from_ctor
- make_tree_vector_copy
- keyword_begins_type_specifier
- keyword_is_type_qualifier
- keyword_is_storage_class_specifier
- keyword_is_function_specifier
- keyword_is_decl_specifier
- c_common_init_ts
- build_userdef_literal
- convert_vector_to_array_for_subscript
- scalar_to_vector
- max_align_t_align
- cxx_fundamental_alignment_p
- pointer_to_zero_sized_aggr_p
- reject_gcc_builtin
- invalid_array_size_error
- valid_array_size_p
- cb_get_source_date_epoch
- cb_get_suggestion
- excess_precision_mode_join
- c_ts18661_flt_eval_method
- c_c11_flt_eval_method
- c_flt_eval_method
- missing_token_insertion_kind
- get_missing_token_insertion_kind
- maybe_suggest_missing_token_insertion
- maybe_emit_indirection_note
- test_fold_for_warn
- c_common_cc_tests
- c_family_tests
- try_to_locate_new_include_insertion_point
- added_includes
- maybe_add_include_fixit
- braced_list_to_string
- braced_lists_to_strings
- braced_lists_to_strings
- c_common_finalize_early_debug
- c_flexible_array_member_type_p
- c_strict_flex_array_level_of
- feature_map
- c_common_register_feature
- init_has_feature
- has_feature_p
Improve your Profiling and Debugging skills
Find out more