1 | /* Build expressions with type checking for C compiler. |
2 | Copyright (C) 1987-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | |
21 | /* This file is part of the C front end. |
22 | It contains routines to build C expressions given their operands, |
23 | including computing the types of the result, C-specific error checks, |
24 | and some optimization. */ |
25 | |
26 | #include "config.h" |
27 | #include "system.h" |
28 | #include "coretypes.h" |
29 | #include "memmodel.h" |
30 | #include "target.h" |
31 | #include "function.h" |
32 | #include "bitmap.h" |
33 | #include "c-tree.h" |
34 | #include "gimple-expr.h" |
35 | #include "predict.h" |
36 | #include "stor-layout.h" |
37 | #include "trans-mem.h" |
38 | #include "varasm.h" |
39 | #include "stmt.h" |
40 | #include "langhooks.h" |
41 | #include "c-lang.h" |
42 | #include "intl.h" |
43 | #include "tree-iterator.h" |
44 | #include "gimplify.h" |
45 | #include "tree-inline.h" |
46 | #include "omp-general.h" |
47 | #include "c-family/c-objc.h" |
48 | #include "c-family/c-ubsan.h" |
49 | #include "gomp-constants.h" |
50 | #include "spellcheck-tree.h" |
51 | #include "gcc-rich-location.h" |
52 | #include "stringpool.h" |
53 | #include "attribs.h" |
54 | #include "asan.h" |
55 | #include "realmpfr.h" |
56 | |
57 | /* Possible cases of implicit conversions. Used to select diagnostic messages |
58 | and control folding initializers in convert_for_assignment. */ |
59 | enum impl_conv { |
60 | ic_argpass, |
61 | ic_assign, |
62 | ic_init, |
63 | ic_init_const, |
64 | ic_return |
65 | }; |
66 | |
67 | /* The level of nesting inside "__alignof__". */ |
68 | int in_alignof; |
69 | |
70 | /* The level of nesting inside "sizeof". */ |
71 | int in_sizeof; |
72 | |
73 | /* The level of nesting inside "typeof". */ |
74 | int in_typeof; |
75 | |
76 | /* True when parsing OpenMP loop expressions. */ |
77 | bool c_in_omp_for; |
78 | |
79 | /* The argument of last parsed sizeof expression, only to be tested |
80 | if expr.original_code == SIZEOF_EXPR. */ |
81 | tree c_last_sizeof_arg; |
82 | location_t c_last_sizeof_loc; |
83 | |
84 | /* Nonzero if we might need to print a "missing braces around |
85 | initializer" message within this initializer. */ |
86 | static int found_missing_braces; |
87 | |
88 | static bool require_constant_value; |
89 | static bool require_constant_elements; |
90 | static bool require_constexpr_value; |
91 | |
92 | static tree qualify_type (tree, tree); |
93 | struct comptypes_data; |
94 | static bool tagged_types_tu_compatible_p (const_tree, const_tree, |
95 | struct comptypes_data *); |
96 | static bool comp_target_types (location_t, tree, tree); |
97 | static bool function_types_compatible_p (const_tree, const_tree, |
98 | struct comptypes_data *); |
99 | static bool type_lists_compatible_p (const_tree, const_tree, |
100 | struct comptypes_data *); |
101 | static tree lookup_field (tree, tree); |
102 | static int convert_arguments (location_t, vec<location_t>, tree, |
103 | vec<tree, va_gc> *, vec<tree, va_gc> *, tree, |
104 | tree); |
105 | static tree pointer_diff (location_t, tree, tree, tree *); |
106 | static tree convert_for_assignment (location_t, location_t, tree, tree, tree, |
107 | enum impl_conv, bool, tree, tree, int, |
108 | int = 0); |
109 | static tree valid_compound_expr_initializer (tree, tree); |
110 | static void push_string (const char *); |
111 | static void push_member_name (tree); |
112 | static int spelling_length (void); |
113 | static char *print_spelling (char *); |
114 | static void warning_init (location_t, int, const char *); |
115 | static tree digest_init (location_t, tree, tree, tree, bool, bool, bool, bool, |
116 | bool, bool); |
117 | static void output_init_element (location_t, tree, tree, bool, tree, tree, bool, |
118 | bool, struct obstack *); |
119 | static void output_pending_init_elements (int, struct obstack *); |
120 | static bool set_designator (location_t, bool, struct obstack *); |
121 | static void push_range_stack (tree, struct obstack *); |
122 | static void add_pending_init (location_t, tree, tree, tree, bool, |
123 | struct obstack *); |
124 | static void set_nonincremental_init (struct obstack *); |
125 | static void set_nonincremental_init_from_string (tree, struct obstack *); |
126 | static tree find_init_member (tree, struct obstack *); |
127 | static void readonly_warning (tree, enum lvalue_use); |
128 | static int lvalue_or_else (location_t, const_tree, enum lvalue_use); |
129 | static void record_maybe_used_decl (tree); |
130 | static bool comptypes_internal (const_tree, const_tree, |
131 | struct comptypes_data *data); |
132 | |
133 | /* Return true if EXP is a null pointer constant, false otherwise. */ |
134 | |
135 | bool |
136 | null_pointer_constant_p (const_tree expr) |
137 | { |
138 | /* This should really operate on c_expr structures, but they aren't |
139 | yet available everywhere required. */ |
140 | tree type = TREE_TYPE (expr); |
141 | |
142 | /* An integer constant expression with the value 0, such an expression |
143 | cast to type void*, or the predefined constant nullptr, are a null |
144 | pointer constant. */ |
145 | if (expr == nullptr_node) |
146 | return true; |
147 | |
148 | return (TREE_CODE (expr) == INTEGER_CST |
149 | && !TREE_OVERFLOW (expr) |
150 | && integer_zerop (expr) |
151 | && (INTEGRAL_TYPE_P (type) |
152 | || (TREE_CODE (type) == POINTER_TYPE |
153 | && VOID_TYPE_P (TREE_TYPE (type)) |
154 | && TYPE_QUALS (TREE_TYPE (type)) == TYPE_UNQUALIFIED))); |
155 | } |
156 | |
157 | /* EXPR may appear in an unevaluated part of an integer constant |
158 | expression, but not in an evaluated part. Wrap it in a |
159 | C_MAYBE_CONST_EXPR, or mark it with TREE_OVERFLOW if it is just an |
160 | INTEGER_CST and we cannot create a C_MAYBE_CONST_EXPR. */ |
161 | |
162 | static tree |
163 | note_integer_operands (tree expr) |
164 | { |
165 | tree ret; |
166 | if (TREE_CODE (expr) == INTEGER_CST && in_late_binary_op) |
167 | { |
168 | ret = copy_node (expr); |
169 | TREE_OVERFLOW (ret) = 1; |
170 | } |
171 | else |
172 | { |
173 | ret = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (expr), NULL_TREE, expr); |
174 | C_MAYBE_CONST_EXPR_INT_OPERANDS (ret) = 1; |
175 | } |
176 | return ret; |
177 | } |
178 | |
179 | /* Having checked whether EXPR may appear in an unevaluated part of an |
180 | integer constant expression and found that it may, remove any |
181 | C_MAYBE_CONST_EXPR noting this fact and return the resulting |
182 | expression. */ |
183 | |
184 | static inline tree |
185 | remove_c_maybe_const_expr (tree expr) |
186 | { |
187 | if (TREE_CODE (expr) == C_MAYBE_CONST_EXPR) |
188 | return C_MAYBE_CONST_EXPR_EXPR (expr); |
189 | else |
190 | return expr; |
191 | } |
192 | |
193 | /* This is a cache to hold if two types are compatible or not. */ |
194 | |
195 | struct tagged_tu_seen_cache { |
196 | const struct tagged_tu_seen_cache * next; |
197 | const_tree t1; |
198 | const_tree t2; |
199 | /* The return value of tagged_types_tu_compatible_p if we had seen |
200 | these two types already. */ |
201 | int val; |
202 | }; |
203 | |
204 | static const struct tagged_tu_seen_cache * tagged_tu_seen_base; |
205 | static void free_all_tagged_tu_seen_up_to (const struct tagged_tu_seen_cache *); |
206 | |
207 | /* Do `exp = require_complete_type (loc, exp);' to make sure exp |
208 | does not have an incomplete type. (That includes void types.) |
209 | LOC is the location of the use. */ |
210 | |
211 | tree |
212 | require_complete_type (location_t loc, tree value) |
213 | { |
214 | tree type = TREE_TYPE (value); |
215 | |
216 | if (error_operand_p (t: value)) |
217 | return error_mark_node; |
218 | |
219 | /* First, detect a valid value with a complete type. */ |
220 | if (COMPLETE_TYPE_P (type)) |
221 | return value; |
222 | |
223 | c_incomplete_type_error (loc, value, type); |
224 | return error_mark_node; |
225 | } |
226 | |
227 | /* Print an error message for invalid use of an incomplete type. |
228 | VALUE is the expression that was used (or 0 if that isn't known) |
229 | and TYPE is the type that was invalid. LOC is the location for |
230 | the error. */ |
231 | |
232 | void |
233 | c_incomplete_type_error (location_t loc, const_tree value, const_tree type) |
234 | { |
235 | /* Avoid duplicate error message. */ |
236 | if (TREE_CODE (type) == ERROR_MARK) |
237 | return; |
238 | |
239 | if (value != NULL_TREE && (VAR_P (value) || TREE_CODE (value) == PARM_DECL)) |
240 | error_at (loc, "%qD has an incomplete type %qT" , value, type); |
241 | else |
242 | { |
243 | retry: |
244 | /* We must print an error message. Be clever about what it says. */ |
245 | |
246 | switch (TREE_CODE (type)) |
247 | { |
248 | case RECORD_TYPE: |
249 | case UNION_TYPE: |
250 | case ENUMERAL_TYPE: |
251 | break; |
252 | |
253 | case VOID_TYPE: |
254 | error_at (loc, "invalid use of void expression" ); |
255 | return; |
256 | |
257 | case ARRAY_TYPE: |
258 | if (TYPE_DOMAIN (type)) |
259 | { |
260 | if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL) |
261 | { |
262 | error_at (loc, "invalid use of flexible array member" ); |
263 | return; |
264 | } |
265 | type = TREE_TYPE (type); |
266 | goto retry; |
267 | } |
268 | error_at (loc, "invalid use of array with unspecified bounds" ); |
269 | return; |
270 | |
271 | default: |
272 | gcc_unreachable (); |
273 | } |
274 | |
275 | if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE) |
276 | error_at (loc, "invalid use of undefined type %qT" , type); |
277 | else |
278 | /* If this type has a typedef-name, the TYPE_NAME is a TYPE_DECL. */ |
279 | error_at (loc, "invalid use of incomplete typedef %qT" , type); |
280 | } |
281 | } |
282 | |
283 | /* Given a type, apply default promotions wrt unnamed function |
284 | arguments and return the new type. */ |
285 | |
286 | tree |
287 | c_type_promotes_to (tree type) |
288 | { |
289 | tree ret = NULL_TREE; |
290 | |
291 | if (TYPE_MAIN_VARIANT (type) == float_type_node) |
292 | ret = double_type_node; |
293 | else if (c_promoting_integer_type_p (type)) |
294 | { |
295 | /* Preserve unsignedness if not really getting any wider. */ |
296 | if (TYPE_UNSIGNED (type) |
297 | && (TYPE_PRECISION (type) == TYPE_PRECISION (integer_type_node))) |
298 | ret = unsigned_type_node; |
299 | else |
300 | ret = integer_type_node; |
301 | } |
302 | |
303 | if (ret != NULL_TREE) |
304 | return (TYPE_ATOMIC (type) |
305 | ? c_build_qualified_type (ret, TYPE_QUAL_ATOMIC) |
306 | : ret); |
307 | |
308 | return type; |
309 | } |
310 | |
311 | /* Return true if between two named address spaces, whether there is a superset |
312 | named address space that encompasses both address spaces. If there is a |
313 | superset, return which address space is the superset. */ |
314 | |
315 | static bool |
316 | addr_space_superset (addr_space_t as1, addr_space_t as2, addr_space_t *common) |
317 | { |
318 | if (as1 == as2) |
319 | { |
320 | *common = as1; |
321 | return true; |
322 | } |
323 | else if (targetm.addr_space.subset_p (as1, as2)) |
324 | { |
325 | *common = as2; |
326 | return true; |
327 | } |
328 | else if (targetm.addr_space.subset_p (as2, as1)) |
329 | { |
330 | *common = as1; |
331 | return true; |
332 | } |
333 | else |
334 | return false; |
335 | } |
336 | |
337 | /* Return a variant of TYPE which has all the type qualifiers of LIKE |
338 | as well as those of TYPE. */ |
339 | |
340 | static tree |
341 | qualify_type (tree type, tree like) |
342 | { |
343 | addr_space_t as_type = TYPE_ADDR_SPACE (type); |
344 | addr_space_t as_like = TYPE_ADDR_SPACE (like); |
345 | addr_space_t as_common; |
346 | |
347 | /* If the two named address spaces are different, determine the common |
348 | superset address space. If there isn't one, raise an error. */ |
349 | if (!addr_space_superset (as1: as_type, as2: as_like, common: &as_common)) |
350 | { |
351 | as_common = as_type; |
352 | error ("%qT and %qT are in disjoint named address spaces" , |
353 | type, like); |
354 | } |
355 | |
356 | return c_build_qualified_type (type, |
357 | TYPE_QUALS_NO_ADDR_SPACE (type) |
358 | | TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (like) |
359 | | ENCODE_QUAL_ADDR_SPACE (as_common)); |
360 | } |
361 | |
362 | |
363 | /* If NTYPE is a type of a non-variadic function with a prototype |
364 | and OTYPE is a type of a function without a prototype and ATTRS |
365 | contains attribute format, diagnosess and removes it from ATTRS. |
366 | Returns the result of build_type_attribute_variant of NTYPE and |
367 | the (possibly) modified ATTRS. */ |
368 | |
369 | static tree |
370 | build_functype_attribute_variant (tree ntype, tree otype, tree attrs) |
371 | { |
372 | if (!prototype_p (otype) |
373 | && prototype_p (ntype) |
374 | && lookup_attribute (attr_name: "format" , list: attrs)) |
375 | { |
376 | warning_at (input_location, OPT_Wattributes, |
377 | "%qs attribute cannot be applied to a function that " |
378 | "does not take variable arguments" , "format" ); |
379 | attrs = remove_attribute ("format" , attrs); |
380 | } |
381 | return build_type_attribute_variant (ntype, attrs); |
382 | |
383 | } |
384 | /* Return the composite type of two compatible types. |
385 | |
386 | We assume that comptypes has already been done and returned |
387 | nonzero; if that isn't so, this may crash. In particular, we |
388 | assume that qualifiers match. */ |
389 | |
390 | tree |
391 | composite_type (tree t1, tree t2) |
392 | { |
393 | enum tree_code code1; |
394 | enum tree_code code2; |
395 | tree attributes; |
396 | |
397 | /* Save time if the two types are the same. */ |
398 | |
399 | if (t1 == t2) return t1; |
400 | |
401 | /* If one type is nonsense, use the other. */ |
402 | if (t1 == error_mark_node) |
403 | return t2; |
404 | if (t2 == error_mark_node) |
405 | return t1; |
406 | |
407 | code1 = TREE_CODE (t1); |
408 | code2 = TREE_CODE (t2); |
409 | |
410 | /* Merge the attributes. */ |
411 | attributes = targetm.merge_type_attributes (t1, t2); |
412 | |
413 | /* If one is an enumerated type and the other is the compatible |
414 | integer type, the composite type might be either of the two |
415 | (DR#013 question 3). For consistency, use the enumerated type as |
416 | the composite type. */ |
417 | |
418 | if (code1 == ENUMERAL_TYPE |
419 | && (code2 == INTEGER_TYPE |
420 | || code2 == BOOLEAN_TYPE)) |
421 | return t1; |
422 | if (code2 == ENUMERAL_TYPE |
423 | && (code1 == INTEGER_TYPE |
424 | || code1 == BOOLEAN_TYPE)) |
425 | return t2; |
426 | |
427 | gcc_assert (code1 == code2); |
428 | |
429 | switch (code1) |
430 | { |
431 | case POINTER_TYPE: |
432 | /* For two pointers, do this recursively on the target type. */ |
433 | { |
434 | tree pointed_to_1 = TREE_TYPE (t1); |
435 | tree pointed_to_2 = TREE_TYPE (t2); |
436 | tree target = composite_type (t1: pointed_to_1, t2: pointed_to_2); |
437 | t1 = build_pointer_type_for_mode (target, TYPE_MODE (t1), false); |
438 | t1 = build_type_attribute_variant (t1, attributes); |
439 | return qualify_type (type: t1, like: t2); |
440 | } |
441 | |
442 | case ARRAY_TYPE: |
443 | { |
444 | tree elt = composite_type (TREE_TYPE (t1), TREE_TYPE (t2)); |
445 | int quals; |
446 | tree unqual_elt; |
447 | tree d1 = TYPE_DOMAIN (t1); |
448 | tree d2 = TYPE_DOMAIN (t2); |
449 | bool d1_variable, d2_variable; |
450 | bool d1_zero, d2_zero; |
451 | bool t1_complete, t2_complete; |
452 | |
453 | /* We should not have any type quals on arrays at all. */ |
454 | gcc_assert (!TYPE_QUALS_NO_ADDR_SPACE (t1) |
455 | && !TYPE_QUALS_NO_ADDR_SPACE (t2)); |
456 | |
457 | t1_complete = COMPLETE_TYPE_P (t1); |
458 | t2_complete = COMPLETE_TYPE_P (t2); |
459 | |
460 | d1_zero = d1 == NULL_TREE || !TYPE_MAX_VALUE (d1); |
461 | d2_zero = d2 == NULL_TREE || !TYPE_MAX_VALUE (d2); |
462 | |
463 | d1_variable = (!d1_zero |
464 | && (TREE_CODE (TYPE_MIN_VALUE (d1)) != INTEGER_CST |
465 | || TREE_CODE (TYPE_MAX_VALUE (d1)) != INTEGER_CST)); |
466 | d2_variable = (!d2_zero |
467 | && (TREE_CODE (TYPE_MIN_VALUE (d2)) != INTEGER_CST |
468 | || TREE_CODE (TYPE_MAX_VALUE (d2)) != INTEGER_CST)); |
469 | d1_variable = d1_variable || (d1_zero && C_TYPE_VARIABLE_SIZE (t1)); |
470 | d2_variable = d2_variable || (d2_zero && C_TYPE_VARIABLE_SIZE (t2)); |
471 | |
472 | /* Save space: see if the result is identical to one of the args. */ |
473 | if (elt == TREE_TYPE (t1) && TYPE_DOMAIN (t1) |
474 | && (d2_variable || d2_zero || !d1_variable)) |
475 | return build_type_attribute_variant (t1, attributes); |
476 | if (elt == TREE_TYPE (t2) && TYPE_DOMAIN (t2) |
477 | && (d1_variable || d1_zero || !d2_variable)) |
478 | return build_type_attribute_variant (t2, attributes); |
479 | |
480 | if (elt == TREE_TYPE (t1) && !TYPE_DOMAIN (t2) && !TYPE_DOMAIN (t1)) |
481 | return build_type_attribute_variant (t1, attributes); |
482 | if (elt == TREE_TYPE (t2) && !TYPE_DOMAIN (t2) && !TYPE_DOMAIN (t1)) |
483 | return build_type_attribute_variant (t2, attributes); |
484 | |
485 | /* Merge the element types, and have a size if either arg has |
486 | one. We may have qualifiers on the element types. To set |
487 | up TYPE_MAIN_VARIANT correctly, we need to form the |
488 | composite of the unqualified types and add the qualifiers |
489 | back at the end. */ |
490 | quals = TYPE_QUALS (strip_array_types (elt)); |
491 | unqual_elt = c_build_qualified_type (elt, TYPE_UNQUALIFIED); |
492 | t1 = build_array_type (unqual_elt, |
493 | TYPE_DOMAIN ((TYPE_DOMAIN (t1) |
494 | && (d2_variable |
495 | || d2_zero |
496 | || !d1_variable)) |
497 | ? t1 |
498 | : t2)); |
499 | /* Ensure a composite type involving a zero-length array type |
500 | is a zero-length type not an incomplete type. */ |
501 | if (d1_zero && d2_zero |
502 | && (t1_complete || t2_complete) |
503 | && !COMPLETE_TYPE_P (t1)) |
504 | { |
505 | TYPE_SIZE (t1) = bitsize_zero_node; |
506 | TYPE_SIZE_UNIT (t1) = size_zero_node; |
507 | } |
508 | t1 = c_build_qualified_type (t1, quals); |
509 | return build_type_attribute_variant (t1, attributes); |
510 | } |
511 | |
512 | case ENUMERAL_TYPE: |
513 | case RECORD_TYPE: |
514 | case UNION_TYPE: |
515 | if (attributes != NULL) |
516 | { |
517 | /* Try harder not to create a new aggregate type. */ |
518 | if (attribute_list_equal (TYPE_ATTRIBUTES (t1), attributes)) |
519 | return t1; |
520 | if (attribute_list_equal (TYPE_ATTRIBUTES (t2), attributes)) |
521 | return t2; |
522 | } |
523 | return build_type_attribute_variant (t1, attributes); |
524 | |
525 | case FUNCTION_TYPE: |
526 | /* Function types: prefer the one that specified arg types. |
527 | If both do, merge the arg types. Also merge the return types. */ |
528 | { |
529 | tree valtype = composite_type (TREE_TYPE (t1), TREE_TYPE (t2)); |
530 | tree p1 = TYPE_ARG_TYPES (t1); |
531 | tree p2 = TYPE_ARG_TYPES (t2); |
532 | int len; |
533 | tree newargs, n; |
534 | int i; |
535 | |
536 | /* Save space: see if the result is identical to one of the args. */ |
537 | if (valtype == TREE_TYPE (t1) && !TYPE_ARG_TYPES (t2)) |
538 | return build_functype_attribute_variant (ntype: t1, otype: t2, attrs: attributes); |
539 | if (valtype == TREE_TYPE (t2) && !TYPE_ARG_TYPES (t1)) |
540 | return build_functype_attribute_variant (ntype: t2, otype: t1, attrs: attributes); |
541 | |
542 | /* Simple way if one arg fails to specify argument types. */ |
543 | if (TYPE_ARG_TYPES (t1) == NULL_TREE) |
544 | { |
545 | t1 = build_function_type (valtype, TYPE_ARG_TYPES (t2), |
546 | TYPE_NO_NAMED_ARGS_STDARG_P (t2)); |
547 | t1 = build_type_attribute_variant (t1, attributes); |
548 | return qualify_type (type: t1, like: t2); |
549 | } |
550 | if (TYPE_ARG_TYPES (t2) == NULL_TREE) |
551 | { |
552 | t1 = build_function_type (valtype, TYPE_ARG_TYPES (t1), |
553 | TYPE_NO_NAMED_ARGS_STDARG_P (t1)); |
554 | t1 = build_type_attribute_variant (t1, attributes); |
555 | return qualify_type (type: t1, like: t2); |
556 | } |
557 | |
558 | /* If both args specify argument types, we must merge the two |
559 | lists, argument by argument. */ |
560 | |
561 | for (len = 0, newargs = p1; |
562 | newargs && newargs != void_list_node; |
563 | len++, newargs = TREE_CHAIN (newargs)) |
564 | ; |
565 | |
566 | for (i = 0; i < len; i++) |
567 | newargs = tree_cons (NULL_TREE, NULL_TREE, newargs); |
568 | |
569 | n = newargs; |
570 | |
571 | for (; p1 && p1 != void_list_node; |
572 | p1 = TREE_CHAIN (p1), p2 = TREE_CHAIN (p2), n = TREE_CHAIN (n)) |
573 | { |
574 | /* A null type means arg type is not specified. |
575 | Take whatever the other function type has. */ |
576 | if (TREE_VALUE (p1) == NULL_TREE) |
577 | { |
578 | TREE_VALUE (n) = TREE_VALUE (p2); |
579 | goto parm_done; |
580 | } |
581 | if (TREE_VALUE (p2) == NULL_TREE) |
582 | { |
583 | TREE_VALUE (n) = TREE_VALUE (p1); |
584 | goto parm_done; |
585 | } |
586 | |
587 | /* Given wait (union {union wait *u; int *i} *) |
588 | and wait (union wait *), |
589 | prefer union wait * as type of parm. */ |
590 | if (TREE_CODE (TREE_VALUE (p1)) == UNION_TYPE |
591 | && TREE_VALUE (p1) != TREE_VALUE (p2)) |
592 | { |
593 | tree memb; |
594 | tree mv2 = TREE_VALUE (p2); |
595 | if (mv2 && mv2 != error_mark_node |
596 | && TREE_CODE (mv2) != ARRAY_TYPE) |
597 | mv2 = TYPE_MAIN_VARIANT (mv2); |
598 | for (memb = TYPE_FIELDS (TREE_VALUE (p1)); |
599 | memb; memb = DECL_CHAIN (memb)) |
600 | { |
601 | tree mv3 = TREE_TYPE (memb); |
602 | if (mv3 && mv3 != error_mark_node |
603 | && TREE_CODE (mv3) != ARRAY_TYPE) |
604 | mv3 = TYPE_MAIN_VARIANT (mv3); |
605 | if (comptypes (mv3, mv2)) |
606 | { |
607 | TREE_VALUE (n) = composite_type (TREE_TYPE (memb), |
608 | TREE_VALUE (p2)); |
609 | pedwarn (input_location, OPT_Wpedantic, |
610 | "function types not truly compatible in ISO C" ); |
611 | goto parm_done; |
612 | } |
613 | } |
614 | } |
615 | if (TREE_CODE (TREE_VALUE (p2)) == UNION_TYPE |
616 | && TREE_VALUE (p2) != TREE_VALUE (p1)) |
617 | { |
618 | tree memb; |
619 | tree mv1 = TREE_VALUE (p1); |
620 | if (mv1 && mv1 != error_mark_node |
621 | && TREE_CODE (mv1) != ARRAY_TYPE) |
622 | mv1 = TYPE_MAIN_VARIANT (mv1); |
623 | for (memb = TYPE_FIELDS (TREE_VALUE (p2)); |
624 | memb; memb = DECL_CHAIN (memb)) |
625 | { |
626 | tree mv3 = TREE_TYPE (memb); |
627 | if (mv3 && mv3 != error_mark_node |
628 | && TREE_CODE (mv3) != ARRAY_TYPE) |
629 | mv3 = TYPE_MAIN_VARIANT (mv3); |
630 | if (comptypes (mv3, mv1)) |
631 | { |
632 | TREE_VALUE (n) = composite_type (TREE_TYPE (memb), |
633 | TREE_VALUE (p1)); |
634 | pedwarn (input_location, OPT_Wpedantic, |
635 | "function types not truly compatible in ISO C" ); |
636 | goto parm_done; |
637 | } |
638 | } |
639 | } |
640 | TREE_VALUE (n) = composite_type (TREE_VALUE (p1), TREE_VALUE (p2)); |
641 | parm_done: ; |
642 | } |
643 | |
644 | t1 = build_function_type (valtype, newargs); |
645 | t1 = qualify_type (type: t1, like: t2); |
646 | } |
647 | /* FALLTHRU */ |
648 | |
649 | default: |
650 | return build_type_attribute_variant (t1, attributes); |
651 | } |
652 | |
653 | } |
654 | |
655 | /* Return the type of a conditional expression between pointers to |
656 | possibly differently qualified versions of compatible types. |
657 | |
658 | We assume that comp_target_types has already been done and returned |
659 | true; if that isn't so, this may crash. */ |
660 | |
661 | static tree |
662 | common_pointer_type (tree t1, tree t2) |
663 | { |
664 | tree attributes; |
665 | tree pointed_to_1, mv1; |
666 | tree pointed_to_2, mv2; |
667 | tree target; |
668 | unsigned target_quals; |
669 | addr_space_t as1, as2, as_common; |
670 | int quals1, quals2; |
671 | |
672 | /* Save time if the two types are the same. */ |
673 | |
674 | if (t1 == t2) return t1; |
675 | |
676 | /* If one type is nonsense, use the other. */ |
677 | if (t1 == error_mark_node) |
678 | return t2; |
679 | if (t2 == error_mark_node) |
680 | return t1; |
681 | |
682 | gcc_assert (TREE_CODE (t1) == POINTER_TYPE |
683 | && TREE_CODE (t2) == POINTER_TYPE); |
684 | |
685 | /* Merge the attributes. */ |
686 | attributes = targetm.merge_type_attributes (t1, t2); |
687 | |
688 | /* Find the composite type of the target types, and combine the |
689 | qualifiers of the two types' targets. Do not lose qualifiers on |
690 | array element types by taking the TYPE_MAIN_VARIANT. */ |
691 | mv1 = pointed_to_1 = TREE_TYPE (t1); |
692 | mv2 = pointed_to_2 = TREE_TYPE (t2); |
693 | if (TREE_CODE (mv1) != ARRAY_TYPE) |
694 | mv1 = TYPE_MAIN_VARIANT (pointed_to_1); |
695 | if (TREE_CODE (mv2) != ARRAY_TYPE) |
696 | mv2 = TYPE_MAIN_VARIANT (pointed_to_2); |
697 | target = composite_type (t1: mv1, t2: mv2); |
698 | |
699 | /* Strip array types to get correct qualifier for pointers to arrays */ |
700 | quals1 = TYPE_QUALS_NO_ADDR_SPACE (strip_array_types (pointed_to_1)); |
701 | quals2 = TYPE_QUALS_NO_ADDR_SPACE (strip_array_types (pointed_to_2)); |
702 | |
703 | /* For function types do not merge const qualifiers, but drop them |
704 | if used inconsistently. The middle-end uses these to mark const |
705 | and noreturn functions. */ |
706 | if (TREE_CODE (pointed_to_1) == FUNCTION_TYPE) |
707 | target_quals = (quals1 & quals2); |
708 | else |
709 | target_quals = (quals1 | quals2); |
710 | |
711 | /* If the two named address spaces are different, determine the common |
712 | superset address space. This is guaranteed to exist due to the |
713 | assumption that comp_target_type returned true. */ |
714 | as1 = TYPE_ADDR_SPACE (pointed_to_1); |
715 | as2 = TYPE_ADDR_SPACE (pointed_to_2); |
716 | if (!addr_space_superset (as1, as2, common: &as_common)) |
717 | gcc_unreachable (); |
718 | |
719 | target_quals |= ENCODE_QUAL_ADDR_SPACE (as_common); |
720 | |
721 | t1 = build_pointer_type (c_build_qualified_type (target, target_quals)); |
722 | return build_type_attribute_variant (t1, attributes); |
723 | } |
724 | |
725 | /* Return the common type for two arithmetic types under the usual |
726 | arithmetic conversions. The default conversions have already been |
727 | applied, and enumerated types converted to their compatible integer |
728 | types. The resulting type is unqualified and has no attributes. |
729 | |
730 | This is the type for the result of most arithmetic operations |
731 | if the operands have the given two types. */ |
732 | |
733 | static tree |
734 | c_common_type (tree t1, tree t2) |
735 | { |
736 | enum tree_code code1; |
737 | enum tree_code code2; |
738 | |
739 | /* If one type is nonsense, use the other. */ |
740 | if (t1 == error_mark_node) |
741 | return t2; |
742 | if (t2 == error_mark_node) |
743 | return t1; |
744 | |
745 | if (TYPE_QUALS (t1) != TYPE_UNQUALIFIED) |
746 | t1 = TYPE_MAIN_VARIANT (t1); |
747 | |
748 | if (TYPE_QUALS (t2) != TYPE_UNQUALIFIED) |
749 | t2 = TYPE_MAIN_VARIANT (t2); |
750 | |
751 | if (TYPE_ATTRIBUTES (t1) != NULL_TREE) |
752 | { |
753 | tree attrs = affects_type_identity_attributes (TYPE_ATTRIBUTES (t1)); |
754 | t1 = build_type_attribute_variant (t1, attrs); |
755 | } |
756 | |
757 | if (TYPE_ATTRIBUTES (t2) != NULL_TREE) |
758 | { |
759 | tree attrs = affects_type_identity_attributes (TYPE_ATTRIBUTES (t2)); |
760 | t2 = build_type_attribute_variant (t2, attrs); |
761 | } |
762 | |
763 | /* Save time if the two types are the same. */ |
764 | |
765 | if (t1 == t2) return t1; |
766 | |
767 | code1 = TREE_CODE (t1); |
768 | code2 = TREE_CODE (t2); |
769 | |
770 | gcc_assert (code1 == VECTOR_TYPE || code1 == COMPLEX_TYPE |
771 | || code1 == FIXED_POINT_TYPE || code1 == REAL_TYPE |
772 | || code1 == INTEGER_TYPE || code1 == BITINT_TYPE); |
773 | gcc_assert (code2 == VECTOR_TYPE || code2 == COMPLEX_TYPE |
774 | || code2 == FIXED_POINT_TYPE || code2 == REAL_TYPE |
775 | || code2 == INTEGER_TYPE || code2 == BITINT_TYPE); |
776 | |
777 | /* When one operand is a decimal float type, the other operand cannot be |
778 | a generic float type or a complex type. We also disallow vector types |
779 | here. */ |
780 | if ((DECIMAL_FLOAT_TYPE_P (t1) || DECIMAL_FLOAT_TYPE_P (t2)) |
781 | && !(DECIMAL_FLOAT_TYPE_P (t1) && DECIMAL_FLOAT_TYPE_P (t2))) |
782 | { |
783 | if (code1 == VECTOR_TYPE || code2 == VECTOR_TYPE) |
784 | { |
785 | error ("cannot mix operands of decimal floating and vector types" ); |
786 | return error_mark_node; |
787 | } |
788 | if (code1 == COMPLEX_TYPE || code2 == COMPLEX_TYPE) |
789 | { |
790 | error ("cannot mix operands of decimal floating and complex types" ); |
791 | return error_mark_node; |
792 | } |
793 | if (code1 == REAL_TYPE && code2 == REAL_TYPE) |
794 | { |
795 | error ("cannot mix operands of decimal floating " |
796 | "and other floating types" ); |
797 | return error_mark_node; |
798 | } |
799 | } |
800 | |
801 | /* If one type is a vector type, return that type. (How the usual |
802 | arithmetic conversions apply to the vector types extension is not |
803 | precisely specified.) */ |
804 | if (code1 == VECTOR_TYPE) |
805 | return t1; |
806 | |
807 | if (code2 == VECTOR_TYPE) |
808 | return t2; |
809 | |
810 | /* If one type is complex, form the common type of the non-complex |
811 | components, then make that complex. Use T1 or T2 if it is the |
812 | required type. */ |
813 | if (code1 == COMPLEX_TYPE || code2 == COMPLEX_TYPE) |
814 | { |
815 | tree subtype1 = code1 == COMPLEX_TYPE ? TREE_TYPE (t1) : t1; |
816 | tree subtype2 = code2 == COMPLEX_TYPE ? TREE_TYPE (t2) : t2; |
817 | tree subtype = c_common_type (t1: subtype1, t2: subtype2); |
818 | |
819 | if (code1 == COMPLEX_TYPE && TREE_TYPE (t1) == subtype) |
820 | return t1; |
821 | else if (code2 == COMPLEX_TYPE && TREE_TYPE (t2) == subtype) |
822 | return t2; |
823 | else if (TREE_CODE (subtype) == BITINT_TYPE) |
824 | { |
825 | sorry ("%<_Complex _BitInt(%d)%> unsupported" , |
826 | TYPE_PRECISION (subtype)); |
827 | return code1 == COMPLEX_TYPE ? t1 : t2; |
828 | } |
829 | else |
830 | return build_complex_type (subtype); |
831 | } |
832 | |
833 | /* If only one is real, use it as the result. */ |
834 | |
835 | if (code1 == REAL_TYPE && code2 != REAL_TYPE) |
836 | return t1; |
837 | |
838 | if (code2 == REAL_TYPE && code1 != REAL_TYPE) |
839 | return t2; |
840 | |
841 | /* If both are real and either are decimal floating point types, use |
842 | the decimal floating point type with the greater precision. */ |
843 | |
844 | if (code1 == REAL_TYPE && code2 == REAL_TYPE) |
845 | { |
846 | if (TYPE_MAIN_VARIANT (t1) == dfloat128_type_node |
847 | || TYPE_MAIN_VARIANT (t2) == dfloat128_type_node) |
848 | return dfloat128_type_node; |
849 | else if (TYPE_MAIN_VARIANT (t1) == dfloat64_type_node |
850 | || TYPE_MAIN_VARIANT (t2) == dfloat64_type_node) |
851 | return dfloat64_type_node; |
852 | else if (TYPE_MAIN_VARIANT (t1) == dfloat32_type_node |
853 | || TYPE_MAIN_VARIANT (t2) == dfloat32_type_node) |
854 | return dfloat32_type_node; |
855 | } |
856 | |
857 | /* Deal with fixed-point types. */ |
858 | if (code1 == FIXED_POINT_TYPE || code2 == FIXED_POINT_TYPE) |
859 | { |
860 | unsigned int unsignedp = 0, satp = 0; |
861 | scalar_mode m1, m2; |
862 | unsigned int fbit1, ibit1, fbit2, ibit2, max_fbit, max_ibit; |
863 | |
864 | m1 = SCALAR_TYPE_MODE (t1); |
865 | m2 = SCALAR_TYPE_MODE (t2); |
866 | |
867 | /* If one input type is saturating, the result type is saturating. */ |
868 | if (TYPE_SATURATING (t1) || TYPE_SATURATING (t2)) |
869 | satp = 1; |
870 | |
871 | /* If both fixed-point types are unsigned, the result type is unsigned. |
872 | When mixing fixed-point and integer types, follow the sign of the |
873 | fixed-point type. |
874 | Otherwise, the result type is signed. */ |
875 | if ((TYPE_UNSIGNED (t1) && TYPE_UNSIGNED (t2) |
876 | && code1 == FIXED_POINT_TYPE && code2 == FIXED_POINT_TYPE) |
877 | || (code1 == FIXED_POINT_TYPE && code2 != FIXED_POINT_TYPE |
878 | && TYPE_UNSIGNED (t1)) |
879 | || (code1 != FIXED_POINT_TYPE && code2 == FIXED_POINT_TYPE |
880 | && TYPE_UNSIGNED (t2))) |
881 | unsignedp = 1; |
882 | |
883 | /* The result type is signed. */ |
884 | if (unsignedp == 0) |
885 | { |
886 | /* If the input type is unsigned, we need to convert to the |
887 | signed type. */ |
888 | if (code1 == FIXED_POINT_TYPE && TYPE_UNSIGNED (t1)) |
889 | { |
890 | enum mode_class mclass = (enum mode_class) 0; |
891 | if (GET_MODE_CLASS (m1) == MODE_UFRACT) |
892 | mclass = MODE_FRACT; |
893 | else if (GET_MODE_CLASS (m1) == MODE_UACCUM) |
894 | mclass = MODE_ACCUM; |
895 | else |
896 | gcc_unreachable (); |
897 | m1 = as_a <scalar_mode> |
898 | (m: mode_for_size (GET_MODE_PRECISION (mode: m1), mclass, 0)); |
899 | } |
900 | if (code2 == FIXED_POINT_TYPE && TYPE_UNSIGNED (t2)) |
901 | { |
902 | enum mode_class mclass = (enum mode_class) 0; |
903 | if (GET_MODE_CLASS (m2) == MODE_UFRACT) |
904 | mclass = MODE_FRACT; |
905 | else if (GET_MODE_CLASS (m2) == MODE_UACCUM) |
906 | mclass = MODE_ACCUM; |
907 | else |
908 | gcc_unreachable (); |
909 | m2 = as_a <scalar_mode> |
910 | (m: mode_for_size (GET_MODE_PRECISION (mode: m2), mclass, 0)); |
911 | } |
912 | } |
913 | |
914 | if (code1 == FIXED_POINT_TYPE) |
915 | { |
916 | fbit1 = GET_MODE_FBIT (m1); |
917 | ibit1 = GET_MODE_IBIT (m1); |
918 | } |
919 | else |
920 | { |
921 | fbit1 = 0; |
922 | /* Signed integers need to subtract one sign bit. */ |
923 | ibit1 = TYPE_PRECISION (t1) - (!TYPE_UNSIGNED (t1)); |
924 | } |
925 | |
926 | if (code2 == FIXED_POINT_TYPE) |
927 | { |
928 | fbit2 = GET_MODE_FBIT (m2); |
929 | ibit2 = GET_MODE_IBIT (m2); |
930 | } |
931 | else |
932 | { |
933 | fbit2 = 0; |
934 | /* Signed integers need to subtract one sign bit. */ |
935 | ibit2 = TYPE_PRECISION (t2) - (!TYPE_UNSIGNED (t2)); |
936 | } |
937 | |
938 | max_ibit = ibit1 >= ibit2 ? ibit1 : ibit2; |
939 | max_fbit = fbit1 >= fbit2 ? fbit1 : fbit2; |
940 | return c_common_fixed_point_type_for_size (max_ibit, max_fbit, unsignedp, |
941 | satp); |
942 | } |
943 | |
944 | /* Both real or both integers; use the one with greater precision. */ |
945 | |
946 | if (TYPE_PRECISION (t1) > TYPE_PRECISION (t2)) |
947 | return t1; |
948 | else if (TYPE_PRECISION (t2) > TYPE_PRECISION (t1)) |
949 | return t2; |
950 | |
951 | /* Same precision. Prefer long longs to longs to ints when the |
952 | same precision, following the C99 rules on integer type rank |
953 | (which are equivalent to the C90 rules for C90 types). */ |
954 | |
955 | if (TYPE_MAIN_VARIANT (t1) == long_long_unsigned_type_node |
956 | || TYPE_MAIN_VARIANT (t2) == long_long_unsigned_type_node) |
957 | return long_long_unsigned_type_node; |
958 | |
959 | if (TYPE_MAIN_VARIANT (t1) == long_long_integer_type_node |
960 | || TYPE_MAIN_VARIANT (t2) == long_long_integer_type_node) |
961 | { |
962 | if (TYPE_UNSIGNED (t1) || TYPE_UNSIGNED (t2)) |
963 | return long_long_unsigned_type_node; |
964 | else |
965 | return long_long_integer_type_node; |
966 | } |
967 | |
968 | if (TYPE_MAIN_VARIANT (t1) == long_unsigned_type_node |
969 | || TYPE_MAIN_VARIANT (t2) == long_unsigned_type_node) |
970 | return long_unsigned_type_node; |
971 | |
972 | if (TYPE_MAIN_VARIANT (t1) == long_integer_type_node |
973 | || TYPE_MAIN_VARIANT (t2) == long_integer_type_node) |
974 | { |
975 | /* But preserve unsignedness from the other type, |
976 | since long cannot hold all the values of an unsigned int. */ |
977 | if (TYPE_UNSIGNED (t1) || TYPE_UNSIGNED (t2)) |
978 | return long_unsigned_type_node; |
979 | else |
980 | return long_integer_type_node; |
981 | } |
982 | |
983 | /* For floating types of the same TYPE_PRECISION (which we here |
984 | assume means either the same set of values, or sets of values |
985 | neither a subset of the other, with behavior being undefined in |
986 | the latter case), follow the rules from TS 18661-3: prefer |
987 | interchange types _FloatN, then standard types long double, |
988 | double, float, then extended types _FloatNx. For extended types, |
989 | check them starting with _Float128x as that seems most consistent |
990 | in spirit with preferring long double to double; for interchange |
991 | types, also check in that order for consistency although it's not |
992 | possible for more than one of them to have the same |
993 | precision. */ |
994 | tree mv1 = TYPE_MAIN_VARIANT (t1); |
995 | tree mv2 = TYPE_MAIN_VARIANT (t2); |
996 | |
997 | for (int i = NUM_FLOATN_TYPES - 1; i >= 0; i--) |
998 | if (mv1 == FLOATN_TYPE_NODE (i) || mv2 == FLOATN_TYPE_NODE (i)) |
999 | return FLOATN_TYPE_NODE (i); |
1000 | |
1001 | /* Likewise, prefer long double to double even if same size. */ |
1002 | if (mv1 == long_double_type_node || mv2 == long_double_type_node) |
1003 | return long_double_type_node; |
1004 | |
1005 | /* Likewise, prefer double to float even if same size. |
1006 | We got a couple of embedded targets with 32 bit doubles, and the |
1007 | pdp11 might have 64 bit floats. */ |
1008 | if (mv1 == double_type_node || mv2 == double_type_node) |
1009 | return double_type_node; |
1010 | |
1011 | if (mv1 == float_type_node || mv2 == float_type_node) |
1012 | return float_type_node; |
1013 | |
1014 | for (int i = NUM_FLOATNX_TYPES - 1; i >= 0; i--) |
1015 | if (mv1 == FLOATNX_TYPE_NODE (i) || mv2 == FLOATNX_TYPE_NODE (i)) |
1016 | return FLOATNX_TYPE_NODE (i); |
1017 | |
1018 | if ((code1 == BITINT_TYPE || code2 == BITINT_TYPE) && code1 != code2) |
1019 | { |
1020 | /* Prefer any other integral types over bit-precise integer types. */ |
1021 | if (TYPE_UNSIGNED (t1) == TYPE_UNSIGNED (t2)) |
1022 | return code1 == BITINT_TYPE ? t2 : t1; |
1023 | /* If BITINT_TYPE is unsigned and the other type is signed |
1024 | non-BITINT_TYPE with the same precision, the latter has higher rank. |
1025 | In that case: |
1026 | Otherwise, both operands are converted to the unsigned integer type |
1027 | corresponding to the type of the operand with signed integer type. */ |
1028 | if (TYPE_UNSIGNED (code1 == BITINT_TYPE ? t1 : t2)) |
1029 | return c_common_unsigned_type (code1 == BITINT_TYPE ? t2 : t1); |
1030 | } |
1031 | |
1032 | /* Otherwise prefer the unsigned one. */ |
1033 | |
1034 | if (TYPE_UNSIGNED (t1)) |
1035 | return t1; |
1036 | else |
1037 | return t2; |
1038 | } |
1039 | |
1040 | /* Wrapper around c_common_type that is used by c-common.cc and other |
1041 | front end optimizations that remove promotions. ENUMERAL_TYPEs |
1042 | are allowed here and are converted to their compatible integer types. |
1043 | BOOLEAN_TYPEs are allowed here and return either boolean_type_node or |
1044 | preferably a non-Boolean type as the common type. */ |
1045 | tree |
1046 | common_type (tree t1, tree t2) |
1047 | { |
1048 | if (TREE_CODE (t1) == ENUMERAL_TYPE) |
1049 | t1 = ENUM_UNDERLYING_TYPE (t1); |
1050 | if (TREE_CODE (t2) == ENUMERAL_TYPE) |
1051 | t2 = ENUM_UNDERLYING_TYPE (t2); |
1052 | |
1053 | /* If both types are BOOLEAN_TYPE, then return boolean_type_node. */ |
1054 | if (TREE_CODE (t1) == BOOLEAN_TYPE |
1055 | && TREE_CODE (t2) == BOOLEAN_TYPE) |
1056 | return boolean_type_node; |
1057 | |
1058 | /* If either type is BOOLEAN_TYPE, then return the other. */ |
1059 | if (TREE_CODE (t1) == BOOLEAN_TYPE) |
1060 | return t2; |
1061 | if (TREE_CODE (t2) == BOOLEAN_TYPE) |
1062 | return t1; |
1063 | |
1064 | return c_common_type (t1, t2); |
1065 | } |
1066 | |
1067 | struct comptypes_data { |
1068 | |
1069 | bool enum_and_int_p; |
1070 | bool different_types_p; |
1071 | bool warning_needed; |
1072 | }; |
1073 | |
1074 | /* Return 1 if TYPE1 and TYPE2 are compatible types for assignment |
1075 | or various other operations. Return 2 if they are compatible |
1076 | but a warning may be needed if you use them together. */ |
1077 | |
1078 | int |
1079 | comptypes (tree type1, tree type2) |
1080 | { |
1081 | const struct tagged_tu_seen_cache * tagged_tu_seen_base1 = tagged_tu_seen_base; |
1082 | |
1083 | struct comptypes_data data = { }; |
1084 | bool ret = comptypes_internal (type1, type2, data: &data); |
1085 | |
1086 | free_all_tagged_tu_seen_up_to (tagged_tu_seen_base1); |
1087 | |
1088 | return ret ? (data.warning_needed ? 2 : 1) : 0; |
1089 | } |
1090 | |
1091 | /* Like comptypes, but if it returns non-zero because enum and int are |
1092 | compatible, it sets *ENUM_AND_INT_P to true. */ |
1093 | |
1094 | int |
1095 | comptypes_check_enum_int (tree type1, tree type2, bool *enum_and_int_p) |
1096 | { |
1097 | const struct tagged_tu_seen_cache * tagged_tu_seen_base1 = tagged_tu_seen_base; |
1098 | |
1099 | struct comptypes_data data = { }; |
1100 | bool ret = comptypes_internal (type1, type2, data: &data); |
1101 | *enum_and_int_p = data.enum_and_int_p; |
1102 | |
1103 | free_all_tagged_tu_seen_up_to (tagged_tu_seen_base1); |
1104 | |
1105 | return ret ? (data.warning_needed ? 2 : 1) : 0; |
1106 | } |
1107 | |
1108 | /* Like comptypes, but if it returns nonzero for different types, it |
1109 | sets *DIFFERENT_TYPES_P to true. */ |
1110 | |
1111 | int |
1112 | comptypes_check_different_types (tree type1, tree type2, |
1113 | bool *different_types_p) |
1114 | { |
1115 | const struct tagged_tu_seen_cache * tagged_tu_seen_base1 = tagged_tu_seen_base; |
1116 | |
1117 | struct comptypes_data data = { }; |
1118 | bool ret = comptypes_internal (type1, type2, data: &data); |
1119 | *different_types_p = data.different_types_p; |
1120 | |
1121 | free_all_tagged_tu_seen_up_to (tagged_tu_seen_base1); |
1122 | |
1123 | return ret ? (data.warning_needed ? 2 : 1) : 0; |
1124 | } |
1125 | |
1126 | /* Return true if TYPE1 and TYPE2 are compatible types for assignment |
1127 | or various other operations. If they are compatible but a warning may |
1128 | be needed if you use them together, 'warning_needed' in DATA is set. |
1129 | If one type is an enum and the other a compatible integer type, then |
1130 | this sets 'enum_and_int_p' in DATA to true (it is never set to |
1131 | false). If the types are compatible but different enough not to be |
1132 | permitted in C11 typedef redeclarations, then this sets |
1133 | 'different_types_p' in DATA to true; it is never set to |
1134 | false, but may or may not be set if the types are incompatible. |
1135 | This differs from comptypes, in that we don't free the seen |
1136 | types. */ |
1137 | |
1138 | static bool |
1139 | comptypes_internal (const_tree type1, const_tree type2, |
1140 | struct comptypes_data *data) |
1141 | { |
1142 | const_tree t1 = type1; |
1143 | const_tree t2 = type2; |
1144 | |
1145 | /* Suppress errors caused by previously reported errors. */ |
1146 | |
1147 | if (t1 == t2 || !t1 || !t2 |
1148 | || TREE_CODE (t1) == ERROR_MARK || TREE_CODE (t2) == ERROR_MARK) |
1149 | return true; |
1150 | |
1151 | /* Enumerated types are compatible with integer types, but this is |
1152 | not transitive: two enumerated types in the same translation unit |
1153 | are compatible with each other only if they are the same type. */ |
1154 | |
1155 | if (TREE_CODE (t1) == ENUMERAL_TYPE |
1156 | && COMPLETE_TYPE_P (t1) |
1157 | && TREE_CODE (t2) != ENUMERAL_TYPE) |
1158 | { |
1159 | t1 = ENUM_UNDERLYING_TYPE (t1); |
1160 | if (TREE_CODE (t2) != VOID_TYPE) |
1161 | { |
1162 | data->enum_and_int_p = true; |
1163 | data->different_types_p = true; |
1164 | } |
1165 | } |
1166 | else if (TREE_CODE (t2) == ENUMERAL_TYPE |
1167 | && COMPLETE_TYPE_P (t2) |
1168 | && TREE_CODE (t1) != ENUMERAL_TYPE) |
1169 | { |
1170 | t2 = ENUM_UNDERLYING_TYPE (t2); |
1171 | if (TREE_CODE (t1) != VOID_TYPE) |
1172 | { |
1173 | data->enum_and_int_p = true; |
1174 | data->different_types_p = true; |
1175 | } |
1176 | } |
1177 | |
1178 | if (t1 == t2) |
1179 | return true; |
1180 | |
1181 | /* Different classes of types can't be compatible. */ |
1182 | |
1183 | if (TREE_CODE (t1) != TREE_CODE (t2)) |
1184 | return false; |
1185 | |
1186 | /* Qualifiers must match. C99 6.7.3p9 */ |
1187 | |
1188 | if (TYPE_QUALS (t1) != TYPE_QUALS (t2)) |
1189 | return false; |
1190 | |
1191 | /* Allow for two different type nodes which have essentially the same |
1192 | definition. Note that we already checked for equality of the type |
1193 | qualifiers (just above). */ |
1194 | |
1195 | if (TREE_CODE (t1) != ARRAY_TYPE |
1196 | && TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2)) |
1197 | return true; |
1198 | |
1199 | int attrval; |
1200 | |
1201 | /* 1 if no need for warning yet, 2 if warning cause has been seen. */ |
1202 | if (!(attrval = comp_type_attributes (t1, t2))) |
1203 | return false; |
1204 | |
1205 | if (2 == attrval) |
1206 | data->warning_needed = true; |
1207 | |
1208 | switch (TREE_CODE (t1)) |
1209 | { |
1210 | case INTEGER_TYPE: |
1211 | case FIXED_POINT_TYPE: |
1212 | case REAL_TYPE: |
1213 | case BITINT_TYPE: |
1214 | /* With these nodes, we can't determine type equivalence by |
1215 | looking at what is stored in the nodes themselves, because |
1216 | two nodes might have different TYPE_MAIN_VARIANTs but still |
1217 | represent the same type. For example, wchar_t and int could |
1218 | have the same properties (TYPE_PRECISION, TYPE_MIN_VALUE, |
1219 | TYPE_MAX_VALUE, etc.), but have different TYPE_MAIN_VARIANTs |
1220 | and are distinct types. On the other hand, int and the |
1221 | following typedef |
1222 | |
1223 | typedef int INT __attribute((may_alias)); |
1224 | |
1225 | have identical properties, different TYPE_MAIN_VARIANTs, but |
1226 | represent the same type. The canonical type system keeps |
1227 | track of equivalence in this case, so we fall back on it. */ |
1228 | return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2); |
1229 | |
1230 | case POINTER_TYPE: |
1231 | /* Do not remove mode information. */ |
1232 | if (TYPE_MODE (t1) != TYPE_MODE (t2)) |
1233 | return false; |
1234 | return comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2), data); |
1235 | |
1236 | case FUNCTION_TYPE: |
1237 | return function_types_compatible_p (t1, t2, data); |
1238 | |
1239 | case ARRAY_TYPE: |
1240 | { |
1241 | tree d1 = TYPE_DOMAIN (t1); |
1242 | tree d2 = TYPE_DOMAIN (t2); |
1243 | bool d1_variable, d2_variable; |
1244 | bool d1_zero, d2_zero; |
1245 | |
1246 | /* Target types must match incl. qualifiers. */ |
1247 | if (!comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2), data)) |
1248 | return false; |
1249 | |
1250 | if ((d1 == NULL_TREE) != (d2 == NULL_TREE)) |
1251 | data->different_types_p = true; |
1252 | /* Sizes must match unless one is missing or variable. */ |
1253 | if (d1 == NULL_TREE || d2 == NULL_TREE || d1 == d2) |
1254 | return true; |
1255 | |
1256 | d1_zero = !TYPE_MAX_VALUE (d1); |
1257 | d2_zero = !TYPE_MAX_VALUE (d2); |
1258 | |
1259 | d1_variable = (!d1_zero |
1260 | && (TREE_CODE (TYPE_MIN_VALUE (d1)) != INTEGER_CST |
1261 | || TREE_CODE (TYPE_MAX_VALUE (d1)) != INTEGER_CST)); |
1262 | d2_variable = (!d2_zero |
1263 | && (TREE_CODE (TYPE_MIN_VALUE (d2)) != INTEGER_CST |
1264 | || TREE_CODE (TYPE_MAX_VALUE (d2)) != INTEGER_CST)); |
1265 | d1_variable = d1_variable || (d1_zero && C_TYPE_VARIABLE_SIZE (t1)); |
1266 | d2_variable = d2_variable || (d2_zero && C_TYPE_VARIABLE_SIZE (t2)); |
1267 | |
1268 | if (d1_variable != d2_variable) |
1269 | data->different_types_p = true; |
1270 | if (d1_variable || d2_variable) |
1271 | return true; |
1272 | if (d1_zero && d2_zero) |
1273 | return true; |
1274 | if (d1_zero || d2_zero |
1275 | || !tree_int_cst_equal (TYPE_MIN_VALUE (d1), TYPE_MIN_VALUE (d2)) |
1276 | || !tree_int_cst_equal (TYPE_MAX_VALUE (d1), TYPE_MAX_VALUE (d2))) |
1277 | return false; |
1278 | |
1279 | return true; |
1280 | } |
1281 | |
1282 | case ENUMERAL_TYPE: |
1283 | case RECORD_TYPE: |
1284 | case UNION_TYPE: |
1285 | if (false) |
1286 | { |
1287 | return tagged_types_tu_compatible_p (t1, t2, data); |
1288 | } |
1289 | return false; |
1290 | |
1291 | case VECTOR_TYPE: |
1292 | return known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2)) |
1293 | && comptypes_internal (TREE_TYPE (t1), TREE_TYPE (t2), data); |
1294 | |
1295 | default: |
1296 | return false; |
1297 | } |
1298 | gcc_unreachable (); |
1299 | } |
1300 | |
1301 | /* Return true if TTL and TTR are pointers to types that are equivalent, ignoring |
1302 | their qualifiers, except for named address spaces. If the pointers point to |
1303 | different named addresses, then we must determine if one address space is a |
1304 | subset of the other. */ |
1305 | |
1306 | static bool |
1307 | comp_target_types (location_t location, tree ttl, tree ttr) |
1308 | { |
1309 | int val; |
1310 | int val_ped; |
1311 | tree mvl = TREE_TYPE (ttl); |
1312 | tree mvr = TREE_TYPE (ttr); |
1313 | addr_space_t asl = TYPE_ADDR_SPACE (mvl); |
1314 | addr_space_t asr = TYPE_ADDR_SPACE (mvr); |
1315 | addr_space_t as_common; |
1316 | bool enum_and_int_p; |
1317 | |
1318 | /* Fail if pointers point to incompatible address spaces. */ |
1319 | if (!addr_space_superset (as1: asl, as2: asr, common: &as_common)) |
1320 | return 0; |
1321 | |
1322 | /* For pedantic record result of comptypes on arrays before losing |
1323 | qualifiers on the element type below. */ |
1324 | val_ped = 1; |
1325 | |
1326 | if (TREE_CODE (mvl) == ARRAY_TYPE |
1327 | && TREE_CODE (mvr) == ARRAY_TYPE) |
1328 | val_ped = comptypes (type1: mvl, type2: mvr); |
1329 | |
1330 | /* Qualifiers on element types of array types that are |
1331 | pointer targets are lost by taking their TYPE_MAIN_VARIANT. */ |
1332 | |
1333 | mvl = (TYPE_ATOMIC (strip_array_types (mvl)) |
1334 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mvl), TYPE_QUAL_ATOMIC) |
1335 | : TYPE_MAIN_VARIANT (mvl)); |
1336 | |
1337 | mvr = (TYPE_ATOMIC (strip_array_types (mvr)) |
1338 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mvr), TYPE_QUAL_ATOMIC) |
1339 | : TYPE_MAIN_VARIANT (mvr)); |
1340 | |
1341 | enum_and_int_p = false; |
1342 | val = comptypes_check_enum_int (type1: mvl, type2: mvr, enum_and_int_p: &enum_and_int_p); |
1343 | |
1344 | if (val == 1 && val_ped != 1) |
1345 | pedwarn_c11 (location, opt: OPT_Wpedantic, "invalid use of pointers to arrays with different qualifiers " |
1346 | "in ISO C before C23" ); |
1347 | |
1348 | if (val == 2) |
1349 | pedwarn (location, OPT_Wpedantic, "types are not quite compatible" ); |
1350 | |
1351 | if (val == 1 && enum_and_int_p && warn_cxx_compat) |
1352 | warning_at (location, OPT_Wc___compat, |
1353 | "pointer target types incompatible in C++" ); |
1354 | |
1355 | return val; |
1356 | } |
1357 | |
1358 | /* Subroutines of `comptypes'. */ |
1359 | |
1360 | |
1361 | |
1362 | /* Allocate the seen two types, assuming that they are compatible. */ |
1363 | |
1364 | static struct tagged_tu_seen_cache * |
1365 | alloc_tagged_tu_seen_cache (const_tree t1, const_tree t2) |
1366 | { |
1367 | struct tagged_tu_seen_cache *tu = XNEW (struct tagged_tu_seen_cache); |
1368 | tu->next = tagged_tu_seen_base; |
1369 | tu->t1 = t1; |
1370 | tu->t2 = t2; |
1371 | |
1372 | tagged_tu_seen_base = tu; |
1373 | |
1374 | /* The C standard says that two structures in different translation |
1375 | units are compatible with each other only if the types of their |
1376 | fields are compatible (among other things). We assume that they |
1377 | are compatible until proven otherwise when building the cache. |
1378 | An example where this can occur is: |
1379 | struct a |
1380 | { |
1381 | struct a *next; |
1382 | }; |
1383 | If we are comparing this against a similar struct in another TU, |
1384 | and did not assume they were compatible, we end up with an infinite |
1385 | loop. */ |
1386 | tu->val = 1; |
1387 | return tu; |
1388 | } |
1389 | |
1390 | /* Free the seen types until we get to TU_TIL. */ |
1391 | |
1392 | static void |
1393 | free_all_tagged_tu_seen_up_to (const struct tagged_tu_seen_cache *tu_til) |
1394 | { |
1395 | const struct tagged_tu_seen_cache *tu = tagged_tu_seen_base; |
1396 | while (tu != tu_til) |
1397 | { |
1398 | const struct tagged_tu_seen_cache *const tu1 |
1399 | = (const struct tagged_tu_seen_cache *) tu; |
1400 | tu = tu1->next; |
1401 | XDELETE (CONST_CAST (struct tagged_tu_seen_cache *, tu1)); |
1402 | } |
1403 | tagged_tu_seen_base = tu_til; |
1404 | } |
1405 | |
1406 | /* Return true if two 'struct', 'union', or 'enum' types T1 and T2 are |
1407 | compatible. The two types are not the same (which has been |
1408 | checked earlier in comptypes_internal). */ |
1409 | |
1410 | static bool |
1411 | tagged_types_tu_compatible_p (const_tree t1, const_tree t2, |
1412 | struct comptypes_data *data) |
1413 | { |
1414 | tree s1, s2; |
1415 | |
1416 | /* We have to verify that the tags of the types are the same. This |
1417 | is harder than it looks because this may be a typedef, so we have |
1418 | to go look at the original type. It may even be a typedef of a |
1419 | typedef... |
1420 | In the case of compiler-created builtin structs the TYPE_DECL |
1421 | may be a dummy, with no DECL_ORIGINAL_TYPE. Don't fault. */ |
1422 | while (TYPE_NAME (t1) |
1423 | && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL |
1424 | && DECL_ORIGINAL_TYPE (TYPE_NAME (t1))) |
1425 | t1 = DECL_ORIGINAL_TYPE (TYPE_NAME (t1)); |
1426 | |
1427 | while (TYPE_NAME (t2) |
1428 | && TREE_CODE (TYPE_NAME (t2)) == TYPE_DECL |
1429 | && DECL_ORIGINAL_TYPE (TYPE_NAME (t2))) |
1430 | t2 = DECL_ORIGINAL_TYPE (TYPE_NAME (t2)); |
1431 | |
1432 | /* C90 didn't have the requirement that the two tags be the same. */ |
1433 | if (flag_isoc99 && TYPE_NAME (t1) != TYPE_NAME (t2)) |
1434 | return 0; |
1435 | |
1436 | /* C90 didn't say what happened if one or both of the types were |
1437 | incomplete; we choose to follow C99 rules here, which is that they |
1438 | are compatible. */ |
1439 | if (TYPE_SIZE (t1) == NULL |
1440 | || TYPE_SIZE (t2) == NULL) |
1441 | return 1; |
1442 | |
1443 | { |
1444 | const struct tagged_tu_seen_cache * tts_i; |
1445 | for (tts_i = tagged_tu_seen_base; tts_i != NULL; tts_i = tts_i->next) |
1446 | if (tts_i->t1 == t1 && tts_i->t2 == t2) |
1447 | return tts_i->val; |
1448 | } |
1449 | |
1450 | switch (TREE_CODE (t1)) |
1451 | { |
1452 | case ENUMERAL_TYPE: |
1453 | { |
1454 | struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2); |
1455 | /* Speed up the case where the type values are in the same order. */ |
1456 | tree tv1 = TYPE_VALUES (t1); |
1457 | tree tv2 = TYPE_VALUES (t2); |
1458 | |
1459 | if (tv1 == tv2) |
1460 | { |
1461 | return 1; |
1462 | } |
1463 | |
1464 | for (;tv1 && tv2; tv1 = TREE_CHAIN (tv1), tv2 = TREE_CHAIN (tv2)) |
1465 | { |
1466 | if (TREE_PURPOSE (tv1) != TREE_PURPOSE (tv2)) |
1467 | break; |
1468 | if (simple_cst_equal (TREE_VALUE (tv1), TREE_VALUE (tv2)) != 1) |
1469 | { |
1470 | tu->val = 0; |
1471 | return 0; |
1472 | } |
1473 | } |
1474 | |
1475 | if (tv1 == NULL_TREE && tv2 == NULL_TREE) |
1476 | { |
1477 | return 1; |
1478 | } |
1479 | if (tv1 == NULL_TREE || tv2 == NULL_TREE) |
1480 | { |
1481 | tu->val = 0; |
1482 | return 0; |
1483 | } |
1484 | |
1485 | if (list_length (TYPE_VALUES (t1)) != list_length (TYPE_VALUES (t2))) |
1486 | { |
1487 | tu->val = 0; |
1488 | return 0; |
1489 | } |
1490 | |
1491 | for (s1 = TYPE_VALUES (t1); s1; s1 = TREE_CHAIN (s1)) |
1492 | { |
1493 | s2 = purpose_member (TREE_PURPOSE (s1), TYPE_VALUES (t2)); |
1494 | if (s2 == NULL |
1495 | || simple_cst_equal (TREE_VALUE (s1), TREE_VALUE (s2)) != 1) |
1496 | { |
1497 | tu->val = 0; |
1498 | return 0; |
1499 | } |
1500 | } |
1501 | return 1; |
1502 | } |
1503 | |
1504 | case UNION_TYPE: |
1505 | { |
1506 | struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2); |
1507 | |
1508 | if (list_length (TYPE_FIELDS (t1)) != list_length (TYPE_FIELDS (t2))) |
1509 | { |
1510 | tu->val = 0; |
1511 | return 0; |
1512 | } |
1513 | |
1514 | /* Speed up the common case where the fields are in the same order. */ |
1515 | for (s1 = TYPE_FIELDS (t1), s2 = TYPE_FIELDS (t2); s1 && s2; |
1516 | s1 = DECL_CHAIN (s1), s2 = DECL_CHAIN (s2)) |
1517 | { |
1518 | int result; |
1519 | |
1520 | if (DECL_NAME (s1) != DECL_NAME (s2)) |
1521 | break; |
1522 | result = comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2), data); |
1523 | |
1524 | if (result != 1 && !DECL_NAME (s1)) |
1525 | break; |
1526 | if (result == 0) |
1527 | { |
1528 | tu->val = 0; |
1529 | return 0; |
1530 | } |
1531 | |
1532 | if (TREE_CODE (s1) == FIELD_DECL |
1533 | && simple_cst_equal (DECL_FIELD_BIT_OFFSET (s1), |
1534 | DECL_FIELD_BIT_OFFSET (s2)) != 1) |
1535 | { |
1536 | tu->val = 0; |
1537 | return 0; |
1538 | } |
1539 | } |
1540 | if (!s1 && !s2) |
1541 | { |
1542 | return tu->val; |
1543 | } |
1544 | |
1545 | for (s1 = TYPE_FIELDS (t1); s1; s1 = DECL_CHAIN (s1)) |
1546 | { |
1547 | bool ok = false; |
1548 | |
1549 | for (s2 = TYPE_FIELDS (t2); s2; s2 = DECL_CHAIN (s2)) |
1550 | if (DECL_NAME (s1) == DECL_NAME (s2)) |
1551 | { |
1552 | int result; |
1553 | |
1554 | result = comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2), |
1555 | data); |
1556 | |
1557 | if (result != 1 && !DECL_NAME (s1)) |
1558 | continue; |
1559 | if (result == 0) |
1560 | { |
1561 | tu->val = 0; |
1562 | return 0; |
1563 | } |
1564 | |
1565 | if (TREE_CODE (s1) == FIELD_DECL |
1566 | && simple_cst_equal (DECL_FIELD_BIT_OFFSET (s1), |
1567 | DECL_FIELD_BIT_OFFSET (s2)) != 1) |
1568 | break; |
1569 | |
1570 | ok = true; |
1571 | break; |
1572 | } |
1573 | if (!ok) |
1574 | { |
1575 | tu->val = 0; |
1576 | return 0; |
1577 | } |
1578 | } |
1579 | return tu->val; |
1580 | } |
1581 | |
1582 | case RECORD_TYPE: |
1583 | { |
1584 | struct tagged_tu_seen_cache *tu = alloc_tagged_tu_seen_cache (t1, t2); |
1585 | |
1586 | if (list_length (TYPE_FIELDS (t1)) != list_length (TYPE_FIELDS (t2))) |
1587 | { |
1588 | tu->val = 0; |
1589 | return 0; |
1590 | } |
1591 | |
1592 | for (s1 = TYPE_FIELDS (t1), s2 = TYPE_FIELDS (t2); |
1593 | s1 && s2; |
1594 | s1 = DECL_CHAIN (s1), s2 = DECL_CHAIN (s2)) |
1595 | { |
1596 | int result; |
1597 | if (TREE_CODE (s1) != TREE_CODE (s2) |
1598 | || DECL_NAME (s1) != DECL_NAME (s2)) |
1599 | break; |
1600 | result = comptypes_internal (TREE_TYPE (s1), TREE_TYPE (s2), data); |
1601 | if (result == 0) |
1602 | break; |
1603 | |
1604 | if (TREE_CODE (s1) == FIELD_DECL |
1605 | && simple_cst_equal (DECL_FIELD_BIT_OFFSET (s1), |
1606 | DECL_FIELD_BIT_OFFSET (s2)) != 1) |
1607 | break; |
1608 | } |
1609 | if (s1 && s2) |
1610 | tu->val = 0; |
1611 | else |
1612 | tu->val = 1; |
1613 | return tu->val; |
1614 | } |
1615 | |
1616 | default: |
1617 | gcc_unreachable (); |
1618 | } |
1619 | } |
1620 | |
1621 | /* Return true if two function types F1 and F2 are compatible. |
1622 | If either type specifies no argument types, |
1623 | the other must specify a fixed number of self-promoting arg types. |
1624 | Otherwise, if one type specifies only the number of arguments, |
1625 | the other must specify that number of self-promoting arg types. |
1626 | Otherwise, the argument types must match. */ |
1627 | |
1628 | static bool |
1629 | function_types_compatible_p (const_tree f1, const_tree f2, |
1630 | struct comptypes_data *data) |
1631 | { |
1632 | tree args1, args2; |
1633 | /* 1 if no need for warning yet, 2 if warning cause has been seen. */ |
1634 | int val = 1; |
1635 | int val1; |
1636 | tree ret1, ret2; |
1637 | |
1638 | ret1 = TREE_TYPE (f1); |
1639 | ret2 = TREE_TYPE (f2); |
1640 | |
1641 | /* 'volatile' qualifiers on a function's return type used to mean |
1642 | the function is noreturn. */ |
1643 | if (TYPE_VOLATILE (ret1) != TYPE_VOLATILE (ret2)) |
1644 | pedwarn (input_location, 0, "function return types not compatible due to %<volatile%>" ); |
1645 | if (TYPE_VOLATILE (ret1)) |
1646 | ret1 = build_qualified_type (TYPE_MAIN_VARIANT (ret1), |
1647 | TYPE_QUALS (ret1) & ~TYPE_QUAL_VOLATILE); |
1648 | if (TYPE_VOLATILE (ret2)) |
1649 | ret2 = build_qualified_type (TYPE_MAIN_VARIANT (ret2), |
1650 | TYPE_QUALS (ret2) & ~TYPE_QUAL_VOLATILE); |
1651 | val = comptypes_internal (type1: ret1, type2: ret2, data); |
1652 | if (val == 0) |
1653 | return 0; |
1654 | |
1655 | args1 = TYPE_ARG_TYPES (f1); |
1656 | args2 = TYPE_ARG_TYPES (f2); |
1657 | |
1658 | if ((args1 == NULL_TREE) != (args2 == NULL_TREE)) |
1659 | data->different_types_p = true; |
1660 | |
1661 | /* An unspecified parmlist matches any specified parmlist |
1662 | whose argument types don't need default promotions. */ |
1663 | |
1664 | if (args1 == NULL_TREE) |
1665 | { |
1666 | if (TYPE_NO_NAMED_ARGS_STDARG_P (f1) != TYPE_NO_NAMED_ARGS_STDARG_P (f2)) |
1667 | return 0; |
1668 | if (!self_promoting_args_p (args2)) |
1669 | return 0; |
1670 | /* If one of these types comes from a non-prototype fn definition, |
1671 | compare that with the other type's arglist. |
1672 | If they don't match, ask for a warning (but no error). */ |
1673 | if (TYPE_ACTUAL_ARG_TYPES (f1) |
1674 | && type_lists_compatible_p (args2, TYPE_ACTUAL_ARG_TYPES (f1), |
1675 | data) != 1) |
1676 | { |
1677 | val = 1; |
1678 | data->warning_needed = true; |
1679 | } |
1680 | return val; |
1681 | } |
1682 | if (args2 == NULL_TREE) |
1683 | { |
1684 | if (TYPE_NO_NAMED_ARGS_STDARG_P (f1) != TYPE_NO_NAMED_ARGS_STDARG_P (f2)) |
1685 | return 0; |
1686 | if (!self_promoting_args_p (args1)) |
1687 | return 0; |
1688 | if (TYPE_ACTUAL_ARG_TYPES (f2) |
1689 | && type_lists_compatible_p (args1, TYPE_ACTUAL_ARG_TYPES (f2), |
1690 | data) != 1) |
1691 | { |
1692 | val = 1; |
1693 | data->warning_needed = true; |
1694 | } |
1695 | return val; |
1696 | } |
1697 | |
1698 | /* Both types have argument lists: compare them and propagate results. */ |
1699 | val1 = type_lists_compatible_p (args1, args2, data); |
1700 | return val1; |
1701 | } |
1702 | |
1703 | /* Check two lists of types for compatibility, returning false for |
1704 | incompatible, true for compatible. */ |
1705 | |
1706 | static bool |
1707 | type_lists_compatible_p (const_tree args1, const_tree args2, |
1708 | struct comptypes_data *data) |
1709 | { |
1710 | while (1) |
1711 | { |
1712 | tree a1, mv1, a2, mv2; |
1713 | if (args1 == NULL_TREE && args2 == NULL_TREE) |
1714 | return true; |
1715 | /* If one list is shorter than the other, |
1716 | they fail to match. */ |
1717 | if (args1 == NULL_TREE || args2 == NULL_TREE) |
1718 | return 0; |
1719 | mv1 = a1 = TREE_VALUE (args1); |
1720 | mv2 = a2 = TREE_VALUE (args2); |
1721 | if (mv1 && mv1 != error_mark_node && TREE_CODE (mv1) != ARRAY_TYPE) |
1722 | mv1 = (TYPE_ATOMIC (mv1) |
1723 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mv1), |
1724 | TYPE_QUAL_ATOMIC) |
1725 | : TYPE_MAIN_VARIANT (mv1)); |
1726 | if (mv2 && mv2 != error_mark_node && TREE_CODE (mv2) != ARRAY_TYPE) |
1727 | mv2 = (TYPE_ATOMIC (mv2) |
1728 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mv2), |
1729 | TYPE_QUAL_ATOMIC) |
1730 | : TYPE_MAIN_VARIANT (mv2)); |
1731 | /* A null pointer instead of a type |
1732 | means there is supposed to be an argument |
1733 | but nothing is specified about what type it has. |
1734 | So match anything that self-promotes. */ |
1735 | if ((a1 == NULL_TREE) != (a2 == NULL_TREE)) |
1736 | data->different_types_p = true; |
1737 | if (a1 == NULL_TREE) |
1738 | { |
1739 | if (c_type_promotes_to (type: a2) != a2) |
1740 | return 0; |
1741 | } |
1742 | else if (a2 == NULL_TREE) |
1743 | { |
1744 | if (c_type_promotes_to (type: a1) != a1) |
1745 | return 0; |
1746 | } |
1747 | /* If one of the lists has an error marker, ignore this arg. */ |
1748 | else if (TREE_CODE (a1) == ERROR_MARK |
1749 | || TREE_CODE (a2) == ERROR_MARK) |
1750 | ; |
1751 | else if (!comptypes_internal (type1: mv1, type2: mv2, data)) |
1752 | { |
1753 | data->different_types_p = true; |
1754 | /* Allow wait (union {union wait *u; int *i} *) |
1755 | and wait (union wait *) to be compatible. */ |
1756 | if (TREE_CODE (a1) == UNION_TYPE |
1757 | && (TYPE_NAME (a1) == NULL_TREE |
1758 | || TYPE_TRANSPARENT_AGGR (a1)) |
1759 | && TREE_CODE (TYPE_SIZE (a1)) == INTEGER_CST |
1760 | && tree_int_cst_equal (TYPE_SIZE (a1), |
1761 | TYPE_SIZE (a2))) |
1762 | { |
1763 | tree memb; |
1764 | for (memb = TYPE_FIELDS (a1); |
1765 | memb; memb = DECL_CHAIN (memb)) |
1766 | { |
1767 | tree mv3 = TREE_TYPE (memb); |
1768 | if (mv3 && mv3 != error_mark_node |
1769 | && TREE_CODE (mv3) != ARRAY_TYPE) |
1770 | mv3 = (TYPE_ATOMIC (mv3) |
1771 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mv3), |
1772 | TYPE_QUAL_ATOMIC) |
1773 | : TYPE_MAIN_VARIANT (mv3)); |
1774 | if (comptypes_internal (type1: mv3, type2: mv2, data)) |
1775 | break; |
1776 | } |
1777 | if (memb == NULL_TREE) |
1778 | return 0; |
1779 | } |
1780 | else if (TREE_CODE (a2) == UNION_TYPE |
1781 | && (TYPE_NAME (a2) == NULL_TREE |
1782 | || TYPE_TRANSPARENT_AGGR (a2)) |
1783 | && TREE_CODE (TYPE_SIZE (a2)) == INTEGER_CST |
1784 | && tree_int_cst_equal (TYPE_SIZE (a2), |
1785 | TYPE_SIZE (a1))) |
1786 | { |
1787 | tree memb; |
1788 | for (memb = TYPE_FIELDS (a2); |
1789 | memb; memb = DECL_CHAIN (memb)) |
1790 | { |
1791 | tree mv3 = TREE_TYPE (memb); |
1792 | if (mv3 && mv3 != error_mark_node |
1793 | && TREE_CODE (mv3) != ARRAY_TYPE) |
1794 | mv3 = (TYPE_ATOMIC (mv3) |
1795 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mv3), |
1796 | TYPE_QUAL_ATOMIC) |
1797 | : TYPE_MAIN_VARIANT (mv3)); |
1798 | if (comptypes_internal (type1: mv3, type2: mv1, data)) |
1799 | break; |
1800 | } |
1801 | if (memb == NULL_TREE) |
1802 | return 0; |
1803 | } |
1804 | else |
1805 | return 0; |
1806 | } |
1807 | |
1808 | args1 = TREE_CHAIN (args1); |
1809 | args2 = TREE_CHAIN (args2); |
1810 | } |
1811 | } |
1812 | |
1813 | /* Compute the size to increment a pointer by. When a function type or void |
1814 | type or incomplete type is passed, size_one_node is returned. |
1815 | This function does not emit any diagnostics; the caller is responsible |
1816 | for that. */ |
1817 | |
1818 | static tree |
1819 | c_size_in_bytes (const_tree type) |
1820 | { |
1821 | enum tree_code code = TREE_CODE (type); |
1822 | |
1823 | if (code == FUNCTION_TYPE || code == VOID_TYPE || code == ERROR_MARK |
1824 | || !COMPLETE_TYPE_P (type)) |
1825 | return size_one_node; |
1826 | |
1827 | /* Convert in case a char is more than one unit. */ |
1828 | return size_binop_loc (input_location, CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type), |
1829 | size_int (TYPE_PRECISION (char_type_node) |
1830 | / BITS_PER_UNIT)); |
1831 | } |
1832 | |
1833 | /* Return either DECL or its known constant value (if it has one). */ |
1834 | |
1835 | tree |
1836 | decl_constant_value_1 (tree decl, bool in_init) |
1837 | { |
1838 | if (/* Note that DECL_INITIAL isn't valid for a PARM_DECL. */ |
1839 | TREE_CODE (decl) != PARM_DECL |
1840 | && !TREE_THIS_VOLATILE (decl) |
1841 | && TREE_READONLY (decl) |
1842 | && DECL_INITIAL (decl) != NULL_TREE |
1843 | && !error_operand_p (DECL_INITIAL (decl)) |
1844 | /* This is invalid if initial value is not constant. |
1845 | If it has either a function call, a memory reference, |
1846 | or a variable, then re-evaluating it could give different results. */ |
1847 | && TREE_CONSTANT (DECL_INITIAL (decl)) |
1848 | /* Check for cases where this is sub-optimal, even though valid. */ |
1849 | && (in_init || TREE_CODE (DECL_INITIAL (decl)) != CONSTRUCTOR)) |
1850 | return DECL_INITIAL (decl); |
1851 | return decl; |
1852 | } |
1853 | |
1854 | /* Return either DECL or its known constant value (if it has one). |
1855 | Like the above, but always return decl outside of functions. */ |
1856 | |
1857 | tree |
1858 | decl_constant_value (tree decl) |
1859 | { |
1860 | /* Don't change a variable array bound or initial value to a constant |
1861 | in a place where a variable is invalid. */ |
1862 | return current_function_decl ? decl_constant_value_1 (decl, in_init: false) : decl; |
1863 | } |
1864 | |
1865 | /* Convert the array expression EXP to a pointer. */ |
1866 | static tree |
1867 | array_to_pointer_conversion (location_t loc, tree exp) |
1868 | { |
1869 | tree orig_exp = exp; |
1870 | tree type = TREE_TYPE (exp); |
1871 | tree adr; |
1872 | tree restype = TREE_TYPE (type); |
1873 | tree ptrtype; |
1874 | |
1875 | gcc_assert (TREE_CODE (type) == ARRAY_TYPE); |
1876 | |
1877 | STRIP_TYPE_NOPS (exp); |
1878 | |
1879 | copy_warning (exp, orig_exp); |
1880 | |
1881 | ptrtype = build_pointer_type (restype); |
1882 | |
1883 | if (INDIRECT_REF_P (exp)) |
1884 | return convert (ptrtype, TREE_OPERAND (exp, 0)); |
1885 | |
1886 | /* In C++ array compound literals are temporary objects unless they are |
1887 | const or appear in namespace scope, so they are destroyed too soon |
1888 | to use them for much of anything (c++/53220). */ |
1889 | if (warn_cxx_compat && TREE_CODE (exp) == COMPOUND_LITERAL_EXPR) |
1890 | { |
1891 | tree decl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); |
1892 | if (!TREE_READONLY (decl) && !TREE_STATIC (decl)) |
1893 | warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wc___compat, |
1894 | "converting an array compound literal to a pointer " |
1895 | "is ill-formed in C++" ); |
1896 | } |
1897 | |
1898 | adr = build_unary_op (loc, ADDR_EXPR, exp, true); |
1899 | return convert (ptrtype, adr); |
1900 | } |
1901 | |
1902 | /* Convert the function expression EXP to a pointer. */ |
1903 | static tree |
1904 | function_to_pointer_conversion (location_t loc, tree exp) |
1905 | { |
1906 | tree orig_exp = exp; |
1907 | |
1908 | gcc_assert (TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE); |
1909 | |
1910 | STRIP_TYPE_NOPS (exp); |
1911 | |
1912 | copy_warning (exp, orig_exp); |
1913 | |
1914 | return build_unary_op (loc, ADDR_EXPR, exp, false); |
1915 | } |
1916 | |
1917 | /* Mark EXP as read, not just set, for set but not used -Wunused |
1918 | warning purposes. */ |
1919 | |
1920 | void |
1921 | mark_exp_read (tree exp) |
1922 | { |
1923 | switch (TREE_CODE (exp)) |
1924 | { |
1925 | case VAR_DECL: |
1926 | case PARM_DECL: |
1927 | DECL_READ_P (exp) = 1; |
1928 | break; |
1929 | case ARRAY_REF: |
1930 | case COMPONENT_REF: |
1931 | case MODIFY_EXPR: |
1932 | case REALPART_EXPR: |
1933 | case IMAGPART_EXPR: |
1934 | CASE_CONVERT: |
1935 | case ADDR_EXPR: |
1936 | case VIEW_CONVERT_EXPR: |
1937 | mark_exp_read (TREE_OPERAND (exp, 0)); |
1938 | break; |
1939 | case COMPOUND_EXPR: |
1940 | /* Pattern match what build_atomic_assign produces with modifycode |
1941 | NOP_EXPR. */ |
1942 | if (VAR_P (TREE_OPERAND (exp, 1)) |
1943 | && DECL_ARTIFICIAL (TREE_OPERAND (exp, 1)) |
1944 | && TREE_CODE (TREE_OPERAND (exp, 0)) == COMPOUND_EXPR) |
1945 | { |
1946 | tree t1 = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); |
1947 | tree t2 = TREE_OPERAND (TREE_OPERAND (exp, 0), 1); |
1948 | if (TREE_CODE (t1) == TARGET_EXPR |
1949 | && TARGET_EXPR_SLOT (t1) == TREE_OPERAND (exp, 1) |
1950 | && TREE_CODE (t2) == CALL_EXPR) |
1951 | { |
1952 | tree fndecl = get_callee_fndecl (t2); |
1953 | tree arg = NULL_TREE; |
1954 | if (fndecl |
1955 | && TREE_CODE (fndecl) == FUNCTION_DECL |
1956 | && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL) |
1957 | && call_expr_nargs (t2) >= 2) |
1958 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
1959 | { |
1960 | case BUILT_IN_ATOMIC_STORE: |
1961 | arg = CALL_EXPR_ARG (t2, 1); |
1962 | break; |
1963 | case BUILT_IN_ATOMIC_STORE_1: |
1964 | case BUILT_IN_ATOMIC_STORE_2: |
1965 | case BUILT_IN_ATOMIC_STORE_4: |
1966 | case BUILT_IN_ATOMIC_STORE_8: |
1967 | case BUILT_IN_ATOMIC_STORE_16: |
1968 | arg = CALL_EXPR_ARG (t2, 0); |
1969 | break; |
1970 | default: |
1971 | break; |
1972 | } |
1973 | if (arg) |
1974 | { |
1975 | STRIP_NOPS (arg); |
1976 | if (TREE_CODE (arg) == ADDR_EXPR |
1977 | && DECL_P (TREE_OPERAND (arg, 0)) |
1978 | && TYPE_ATOMIC (TREE_TYPE (TREE_OPERAND (arg, 0)))) |
1979 | mark_exp_read (TREE_OPERAND (arg, 0)); |
1980 | } |
1981 | } |
1982 | } |
1983 | /* FALLTHRU */ |
1984 | case C_MAYBE_CONST_EXPR: |
1985 | mark_exp_read (TREE_OPERAND (exp, 1)); |
1986 | break; |
1987 | default: |
1988 | break; |
1989 | } |
1990 | } |
1991 | |
1992 | /* Perform the default conversion of arrays and functions to pointers. |
1993 | Return the result of converting EXP. For any other expression, just |
1994 | return EXP. |
1995 | |
1996 | LOC is the location of the expression. */ |
1997 | |
1998 | struct c_expr |
1999 | default_function_array_conversion (location_t loc, struct c_expr exp) |
2000 | { |
2001 | tree orig_exp = exp.value; |
2002 | tree type = TREE_TYPE (exp.value); |
2003 | enum tree_code code = TREE_CODE (type); |
2004 | |
2005 | switch (code) |
2006 | { |
2007 | case ARRAY_TYPE: |
2008 | { |
2009 | bool not_lvalue = false; |
2010 | bool lvalue_array_p; |
2011 | |
2012 | while ((TREE_CODE (exp.value) == NON_LVALUE_EXPR |
2013 | || CONVERT_EXPR_P (exp.value)) |
2014 | && TREE_TYPE (TREE_OPERAND (exp.value, 0)) == type) |
2015 | { |
2016 | if (TREE_CODE (exp.value) == NON_LVALUE_EXPR) |
2017 | not_lvalue = true; |
2018 | exp.value = TREE_OPERAND (exp.value, 0); |
2019 | } |
2020 | |
2021 | copy_warning (exp.value, orig_exp); |
2022 | |
2023 | lvalue_array_p = !not_lvalue && lvalue_p (exp.value); |
2024 | if (!flag_isoc99 && !lvalue_array_p) |
2025 | { |
2026 | /* Before C99, non-lvalue arrays do not decay to pointers. |
2027 | Normally, using such an array would be invalid; but it can |
2028 | be used correctly inside sizeof or as a statement expression. |
2029 | Thus, do not give an error here; an error will result later. */ |
2030 | return exp; |
2031 | } |
2032 | |
2033 | exp.value = array_to_pointer_conversion (loc, exp: exp.value); |
2034 | } |
2035 | break; |
2036 | case FUNCTION_TYPE: |
2037 | exp.value = function_to_pointer_conversion (loc, exp: exp.value); |
2038 | break; |
2039 | default: |
2040 | break; |
2041 | } |
2042 | |
2043 | return exp; |
2044 | } |
2045 | |
2046 | struct c_expr |
2047 | default_function_array_read_conversion (location_t loc, struct c_expr exp) |
2048 | { |
2049 | mark_exp_read (exp: exp.value); |
2050 | return default_function_array_conversion (loc, exp); |
2051 | } |
2052 | |
2053 | /* Return whether EXPR should be treated as an atomic lvalue for the |
2054 | purposes of load and store handling. */ |
2055 | |
2056 | static bool |
2057 | really_atomic_lvalue (tree expr) |
2058 | { |
2059 | if (error_operand_p (t: expr)) |
2060 | return false; |
2061 | if (!TYPE_ATOMIC (TREE_TYPE (expr))) |
2062 | return false; |
2063 | if (!lvalue_p (expr)) |
2064 | return false; |
2065 | |
2066 | /* Ignore _Atomic on register variables, since their addresses can't |
2067 | be taken so (a) atomicity is irrelevant and (b) the normal atomic |
2068 | sequences wouldn't work. Ignore _Atomic on structures containing |
2069 | bit-fields, since accessing elements of atomic structures or |
2070 | unions is undefined behavior (C11 6.5.2.3#5), but it's unclear if |
2071 | it's undefined at translation time or execution time, and the |
2072 | normal atomic sequences again wouldn't work. */ |
2073 | while (handled_component_p (t: expr)) |
2074 | { |
2075 | if (TREE_CODE (expr) == COMPONENT_REF |
2076 | && DECL_C_BIT_FIELD (TREE_OPERAND (expr, 1))) |
2077 | return false; |
2078 | expr = TREE_OPERAND (expr, 0); |
2079 | } |
2080 | if (DECL_P (expr) && C_DECL_REGISTER (expr)) |
2081 | return false; |
2082 | return true; |
2083 | } |
2084 | |
2085 | /* If EXPR is a named constant (C23) derived from a constexpr variable |
2086 | - that is, a reference to such a variable, or a member extracted by |
2087 | a sequence of structure and union (but not array) member accesses |
2088 | (where union member accesses must access the same member as |
2089 | initialized) - then return the corresponding initializer; |
2090 | otherwise, return NULL_TREE. */ |
2091 | |
2092 | static tree |
2093 | maybe_get_constexpr_init (tree expr) |
2094 | { |
2095 | tree decl = NULL_TREE; |
2096 | if (TREE_CODE (expr) == VAR_DECL) |
2097 | decl = expr; |
2098 | else if (TREE_CODE (expr) == COMPOUND_LITERAL_EXPR) |
2099 | decl = COMPOUND_LITERAL_EXPR_DECL (expr); |
2100 | if (decl |
2101 | && C_DECL_DECLARED_CONSTEXPR (decl) |
2102 | && DECL_INITIAL (decl) != NULL_TREE |
2103 | && !error_operand_p (DECL_INITIAL (decl))) |
2104 | return DECL_INITIAL (decl); |
2105 | if (TREE_CODE (expr) != COMPONENT_REF) |
2106 | return NULL_TREE; |
2107 | tree inner = maybe_get_constexpr_init (TREE_OPERAND (expr, 0)); |
2108 | if (inner == NULL_TREE) |
2109 | return NULL_TREE; |
2110 | while ((CONVERT_EXPR_P (inner) || TREE_CODE (inner) == NON_LVALUE_EXPR) |
2111 | && !error_operand_p (t: inner) |
2112 | && (TYPE_MAIN_VARIANT (TREE_TYPE (inner)) |
2113 | == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (inner, 0))))) |
2114 | inner = TREE_OPERAND (inner, 0); |
2115 | if (TREE_CODE (inner) != CONSTRUCTOR) |
2116 | return NULL_TREE; |
2117 | tree field = TREE_OPERAND (expr, 1); |
2118 | unsigned HOST_WIDE_INT cidx; |
2119 | tree cfield, cvalue; |
2120 | bool have_other_init = false; |
2121 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (inner), cidx, cfield, cvalue) |
2122 | { |
2123 | if (cfield == field) |
2124 | return cvalue; |
2125 | have_other_init = true; |
2126 | } |
2127 | if (TREE_CODE (TREE_TYPE (inner)) == UNION_TYPE |
2128 | && (have_other_init || field != TYPE_FIELDS (TREE_TYPE (inner)))) |
2129 | return NULL_TREE; |
2130 | /* Return a default initializer. */ |
2131 | if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (expr))) |
2132 | return build_constructor (TREE_TYPE (expr), NULL); |
2133 | return build_zero_cst (TREE_TYPE (expr)); |
2134 | } |
2135 | |
2136 | /* Convert expression EXP (location LOC) from lvalue to rvalue, |
2137 | including converting functions and arrays to pointers if CONVERT_P. |
2138 | If READ_P, also mark the expression as having been read. If |
2139 | FOR_INIT, constexpr expressions of structure and union type should |
2140 | be replaced by the corresponding CONSTRUCTOR; otherwise, only |
2141 | constexpr scalars (including elements of structures and unions) are |
2142 | replaced by their initializers. */ |
2143 | |
2144 | struct c_expr |
2145 | convert_lvalue_to_rvalue (location_t loc, struct c_expr exp, |
2146 | bool convert_p, bool read_p, bool for_init) |
2147 | { |
2148 | bool force_non_npc = false; |
2149 | if (read_p) |
2150 | mark_exp_read (exp: exp.value); |
2151 | if (convert_p) |
2152 | exp = default_function_array_conversion (loc, exp); |
2153 | if (!VOID_TYPE_P (TREE_TYPE (exp.value))) |
2154 | exp.value = require_complete_type (loc, value: exp.value); |
2155 | if (for_init || !RECORD_OR_UNION_TYPE_P (TREE_TYPE (exp.value))) |
2156 | { |
2157 | tree init = maybe_get_constexpr_init (expr: exp.value); |
2158 | if (init != NULL_TREE) |
2159 | { |
2160 | /* A named constant of pointer type or type nullptr_t is not |
2161 | a null pointer constant even if the initializer is |
2162 | one. */ |
2163 | if (TREE_CODE (init) == INTEGER_CST |
2164 | && !INTEGRAL_TYPE_P (TREE_TYPE (init)) |
2165 | && integer_zerop (init)) |
2166 | force_non_npc = true; |
2167 | exp.value = init; |
2168 | } |
2169 | } |
2170 | if (really_atomic_lvalue (expr: exp.value)) |
2171 | { |
2172 | vec<tree, va_gc> *params; |
2173 | tree nonatomic_type, tmp, tmp_addr, fndecl, func_call; |
2174 | tree expr_type = TREE_TYPE (exp.value); |
2175 | tree expr_addr = build_unary_op (loc, ADDR_EXPR, exp.value, false); |
2176 | tree seq_cst = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST); |
2177 | |
2178 | gcc_assert (TYPE_ATOMIC (expr_type)); |
2179 | |
2180 | /* Expansion of a generic atomic load may require an addition |
2181 | element, so allocate enough to prevent a resize. */ |
2182 | vec_alloc (v&: params, nelems: 4); |
2183 | |
2184 | /* Remove the qualifiers for the rest of the expressions and |
2185 | create the VAL temp variable to hold the RHS. */ |
2186 | nonatomic_type = build_qualified_type (expr_type, TYPE_UNQUALIFIED); |
2187 | tmp = create_tmp_var_raw (nonatomic_type); |
2188 | tmp_addr = build_unary_op (loc, ADDR_EXPR, tmp, false); |
2189 | TREE_ADDRESSABLE (tmp) = 1; |
2190 | /* Do not disable warnings for TMP even though it's artificial. |
2191 | -Winvalid-memory-model depends on it. */ |
2192 | |
2193 | /* Issue __atomic_load (&expr, &tmp, SEQ_CST); */ |
2194 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_LOAD); |
2195 | params->quick_push (obj: expr_addr); |
2196 | params->quick_push (obj: tmp_addr); |
2197 | params->quick_push (obj: seq_cst); |
2198 | func_call = c_build_function_call_vec (loc, vNULL, fndecl, params, NULL); |
2199 | |
2200 | /* EXPR is always read. */ |
2201 | mark_exp_read (exp: exp.value); |
2202 | |
2203 | /* Return tmp which contains the value loaded. */ |
2204 | exp.value = build4 (TARGET_EXPR, nonatomic_type, tmp, func_call, |
2205 | NULL_TREE, NULL_TREE); |
2206 | } |
2207 | if (convert_p && !error_operand_p (t: exp.value) |
2208 | && (TREE_CODE (TREE_TYPE (exp.value)) != ARRAY_TYPE)) |
2209 | exp.value = convert (build_qualified_type (TREE_TYPE (exp.value), TYPE_UNQUALIFIED), exp.value); |
2210 | if (force_non_npc) |
2211 | exp.value = build1 (NOP_EXPR, TREE_TYPE (exp.value), exp.value); |
2212 | return exp; |
2213 | } |
2214 | |
2215 | /* EXP is an expression of integer type. Apply the integer promotions |
2216 | to it and return the promoted value. */ |
2217 | |
2218 | tree |
2219 | perform_integral_promotions (tree exp) |
2220 | { |
2221 | tree type = TREE_TYPE (exp); |
2222 | enum tree_code code = TREE_CODE (type); |
2223 | |
2224 | gcc_assert (INTEGRAL_TYPE_P (type)); |
2225 | |
2226 | /* Convert enums to the result of applying the integer promotions to |
2227 | their underlying type. */ |
2228 | if (code == ENUMERAL_TYPE) |
2229 | { |
2230 | type = ENUM_UNDERLYING_TYPE (type); |
2231 | if (c_promoting_integer_type_p (type)) |
2232 | { |
2233 | if (TYPE_UNSIGNED (type) |
2234 | && TYPE_PRECISION (type) == TYPE_PRECISION (integer_type_node)) |
2235 | type = unsigned_type_node; |
2236 | else |
2237 | type = integer_type_node; |
2238 | } |
2239 | |
2240 | return convert (type, exp); |
2241 | } |
2242 | |
2243 | /* ??? This should no longer be needed now bit-fields have their |
2244 | proper types. */ |
2245 | if (TREE_CODE (exp) == COMPONENT_REF |
2246 | && DECL_C_BIT_FIELD (TREE_OPERAND (exp, 1))) |
2247 | { |
2248 | if (TREE_CODE (DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))) |
2249 | == BITINT_TYPE) |
2250 | return convert (DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1)), exp); |
2251 | /* If it's thinner than an int, promote it like a |
2252 | c_promoting_integer_type_p, otherwise leave it alone. */ |
2253 | if (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)), |
2254 | TYPE_PRECISION (integer_type_node)) < 0) |
2255 | return convert (integer_type_node, exp); |
2256 | } |
2257 | |
2258 | if (c_promoting_integer_type_p (type)) |
2259 | { |
2260 | /* Preserve unsignedness if not really getting any wider. */ |
2261 | if (TYPE_UNSIGNED (type) |
2262 | && TYPE_PRECISION (type) == TYPE_PRECISION (integer_type_node)) |
2263 | return convert (unsigned_type_node, exp); |
2264 | |
2265 | return convert (integer_type_node, exp); |
2266 | } |
2267 | |
2268 | return exp; |
2269 | } |
2270 | |
2271 | |
2272 | /* Perform default promotions for C data used in expressions. |
2273 | Enumeral types or short or char are converted to int. |
2274 | In addition, manifest constants symbols are replaced by their values. */ |
2275 | |
2276 | tree |
2277 | default_conversion (tree exp) |
2278 | { |
2279 | tree orig_exp; |
2280 | tree type = TREE_TYPE (exp); |
2281 | enum tree_code code = TREE_CODE (type); |
2282 | tree promoted_type; |
2283 | |
2284 | mark_exp_read (exp); |
2285 | |
2286 | /* Functions and arrays have been converted during parsing. */ |
2287 | gcc_assert (code != FUNCTION_TYPE); |
2288 | if (code == ARRAY_TYPE) |
2289 | return exp; |
2290 | |
2291 | /* Constants can be used directly unless they're not loadable. */ |
2292 | if (TREE_CODE (exp) == CONST_DECL) |
2293 | exp = DECL_INITIAL (exp); |
2294 | |
2295 | /* Strip no-op conversions. */ |
2296 | orig_exp = exp; |
2297 | STRIP_TYPE_NOPS (exp); |
2298 | |
2299 | copy_warning (exp, orig_exp); |
2300 | |
2301 | if (code == VOID_TYPE) |
2302 | { |
2303 | error_at (EXPR_LOC_OR_LOC (exp, input_location), |
2304 | "void value not ignored as it ought to be" ); |
2305 | return error_mark_node; |
2306 | } |
2307 | |
2308 | exp = require_complete_type (EXPR_LOC_OR_LOC (exp, input_location), value: exp); |
2309 | if (exp == error_mark_node) |
2310 | return error_mark_node; |
2311 | |
2312 | promoted_type = targetm.promoted_type (type); |
2313 | if (promoted_type) |
2314 | return convert (promoted_type, exp); |
2315 | |
2316 | if (INTEGRAL_TYPE_P (type)) |
2317 | return perform_integral_promotions (exp); |
2318 | |
2319 | return exp; |
2320 | } |
2321 | |
2322 | /* Look up COMPONENT in a structure or union TYPE. |
2323 | |
2324 | If the component name is not found, returns NULL_TREE. Otherwise, |
2325 | the return value is a TREE_LIST, with each TREE_VALUE a FIELD_DECL |
2326 | stepping down the chain to the component, which is in the last |
2327 | TREE_VALUE of the list. Normally the list is of length one, but if |
2328 | the component is embedded within (nested) anonymous structures or |
2329 | unions, the list steps down the chain to the component. */ |
2330 | |
2331 | static tree |
2332 | lookup_field (tree type, tree component) |
2333 | { |
2334 | tree field; |
2335 | |
2336 | /* If TYPE_LANG_SPECIFIC is set, then it is a sorted array of pointers |
2337 | to the field elements. Use a binary search on this array to quickly |
2338 | find the element. Otherwise, do a linear search. TYPE_LANG_SPECIFIC |
2339 | will always be set for structures which have many elements. |
2340 | |
2341 | Duplicate field checking replaces duplicates with NULL_TREE so |
2342 | TYPE_LANG_SPECIFIC arrays are potentially no longer sorted. In that |
2343 | case just iterate using DECL_CHAIN. */ |
2344 | |
2345 | if (TYPE_LANG_SPECIFIC (type) && TYPE_LANG_SPECIFIC (type)->s |
2346 | && !seen_error ()) |
2347 | { |
2348 | int bot, top, half; |
2349 | tree *field_array = &TYPE_LANG_SPECIFIC (type)->s->elts[0]; |
2350 | |
2351 | field = TYPE_FIELDS (type); |
2352 | bot = 0; |
2353 | top = TYPE_LANG_SPECIFIC (type)->s->len; |
2354 | while (top - bot > 1) |
2355 | { |
2356 | half = (top - bot + 1) >> 1; |
2357 | field = field_array[bot+half]; |
2358 | |
2359 | if (DECL_NAME (field) == NULL_TREE) |
2360 | { |
2361 | /* Step through all anon unions in linear fashion. */ |
2362 | while (DECL_NAME (field_array[bot]) == NULL_TREE) |
2363 | { |
2364 | field = field_array[bot++]; |
2365 | if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))) |
2366 | { |
2367 | tree anon = lookup_field (TREE_TYPE (field), component); |
2368 | |
2369 | if (anon) |
2370 | return tree_cons (NULL_TREE, field, anon); |
2371 | |
2372 | /* The Plan 9 compiler permits referring |
2373 | directly to an anonymous struct/union field |
2374 | using a typedef name. */ |
2375 | if (flag_plan9_extensions |
2376 | && TYPE_NAME (TREE_TYPE (field)) != NULL_TREE |
2377 | && (TREE_CODE (TYPE_NAME (TREE_TYPE (field))) |
2378 | == TYPE_DECL) |
2379 | && (DECL_NAME (TYPE_NAME (TREE_TYPE (field))) |
2380 | == component)) |
2381 | break; |
2382 | } |
2383 | } |
2384 | |
2385 | /* Entire record is only anon unions. */ |
2386 | if (bot > top) |
2387 | return NULL_TREE; |
2388 | |
2389 | /* Restart the binary search, with new lower bound. */ |
2390 | continue; |
2391 | } |
2392 | |
2393 | if (DECL_NAME (field) == component) |
2394 | break; |
2395 | if (DECL_NAME (field) < component) |
2396 | bot += half; |
2397 | else |
2398 | top = bot + half; |
2399 | } |
2400 | |
2401 | if (DECL_NAME (field_array[bot]) == component) |
2402 | field = field_array[bot]; |
2403 | else if (DECL_NAME (field) != component) |
2404 | return NULL_TREE; |
2405 | } |
2406 | else |
2407 | { |
2408 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
2409 | { |
2410 | if (DECL_NAME (field) == NULL_TREE |
2411 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))) |
2412 | { |
2413 | tree anon = lookup_field (TREE_TYPE (field), component); |
2414 | |
2415 | if (anon) |
2416 | return tree_cons (NULL_TREE, field, anon); |
2417 | |
2418 | /* The Plan 9 compiler permits referring directly to an |
2419 | anonymous struct/union field using a typedef |
2420 | name. */ |
2421 | if (flag_plan9_extensions |
2422 | && TYPE_NAME (TREE_TYPE (field)) != NULL_TREE |
2423 | && TREE_CODE (TYPE_NAME (TREE_TYPE (field))) == TYPE_DECL |
2424 | && (DECL_NAME (TYPE_NAME (TREE_TYPE (field))) |
2425 | == component)) |
2426 | break; |
2427 | } |
2428 | |
2429 | if (DECL_NAME (field) == component) |
2430 | break; |
2431 | } |
2432 | |
2433 | if (field == NULL_TREE) |
2434 | return NULL_TREE; |
2435 | } |
2436 | |
2437 | return tree_cons (NULL_TREE, field, NULL_TREE); |
2438 | } |
2439 | |
2440 | /* Recursively append candidate IDENTIFIER_NODEs to CANDIDATES. */ |
2441 | |
2442 | static void |
2443 | lookup_field_fuzzy_find_candidates (tree type, tree component, |
2444 | vec<tree> *candidates) |
2445 | { |
2446 | tree field; |
2447 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
2448 | { |
2449 | if (DECL_NAME (field) == NULL_TREE |
2450 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))) |
2451 | lookup_field_fuzzy_find_candidates (TREE_TYPE (field), component, |
2452 | candidates); |
2453 | |
2454 | if (DECL_NAME (field)) |
2455 | candidates->safe_push (DECL_NAME (field)); |
2456 | } |
2457 | } |
2458 | |
2459 | /* Like "lookup_field", but find the closest matching IDENTIFIER_NODE, |
2460 | rather than returning a TREE_LIST for an exact match. */ |
2461 | |
2462 | static tree |
2463 | lookup_field_fuzzy (tree type, tree component) |
2464 | { |
2465 | gcc_assert (TREE_CODE (component) == IDENTIFIER_NODE); |
2466 | |
2467 | /* First, gather a list of candidates. */ |
2468 | auto_vec <tree> candidates; |
2469 | |
2470 | lookup_field_fuzzy_find_candidates (type, component, |
2471 | candidates: &candidates); |
2472 | |
2473 | return find_closest_identifier (target: component, candidates: &candidates); |
2474 | } |
2475 | |
2476 | /* Support function for build_component_ref's error-handling. |
2477 | |
2478 | Given DATUM_TYPE, and "DATUM.COMPONENT", where DATUM is *not* a |
2479 | struct or union, should we suggest "DATUM->COMPONENT" as a hint? */ |
2480 | |
2481 | static bool |
2482 | should_suggest_deref_p (tree datum_type) |
2483 | { |
2484 | /* We don't do it for Objective-C, since Objective-C 2.0 dot-syntax |
2485 | allows "." for ptrs; we could be handling a failed attempt |
2486 | to access a property. */ |
2487 | if (c_dialect_objc ()) |
2488 | return false; |
2489 | |
2490 | /* Only suggest it for pointers... */ |
2491 | if (TREE_CODE (datum_type) != POINTER_TYPE) |
2492 | return false; |
2493 | |
2494 | /* ...to structs/unions. */ |
2495 | tree underlying_type = TREE_TYPE (datum_type); |
2496 | enum tree_code code = TREE_CODE (underlying_type); |
2497 | if (code == RECORD_TYPE || code == UNION_TYPE) |
2498 | return true; |
2499 | else |
2500 | return false; |
2501 | } |
2502 | |
2503 | /* Make an expression to refer to the COMPONENT field of structure or |
2504 | union value DATUM. COMPONENT is an IDENTIFIER_NODE. LOC is the |
2505 | location of the COMPONENT_REF. COMPONENT_LOC is the location |
2506 | of COMPONENT. ARROW_LOC is the location of the first -> operand if |
2507 | it is from -> operator. */ |
2508 | |
2509 | tree |
2510 | build_component_ref (location_t loc, tree datum, tree component, |
2511 | location_t component_loc, location_t arrow_loc) |
2512 | { |
2513 | tree type = TREE_TYPE (datum); |
2514 | enum tree_code code = TREE_CODE (type); |
2515 | tree field = NULL; |
2516 | tree ref; |
2517 | bool datum_lvalue = lvalue_p (datum); |
2518 | |
2519 | if (!objc_is_public (datum, component)) |
2520 | return error_mark_node; |
2521 | |
2522 | /* Detect Objective-C property syntax object.property. */ |
2523 | if (c_dialect_objc () |
2524 | && (ref = objc_maybe_build_component_ref (datum, component))) |
2525 | return ref; |
2526 | |
2527 | /* See if there is a field or component with name COMPONENT. */ |
2528 | |
2529 | if (code == RECORD_TYPE || code == UNION_TYPE) |
2530 | { |
2531 | if (!COMPLETE_TYPE_P (type)) |
2532 | { |
2533 | c_incomplete_type_error (loc, NULL_TREE, type); |
2534 | return error_mark_node; |
2535 | } |
2536 | |
2537 | field = lookup_field (type, component); |
2538 | |
2539 | if (!field) |
2540 | { |
2541 | tree guessed_id = lookup_field_fuzzy (type, component); |
2542 | if (guessed_id) |
2543 | { |
2544 | /* Attempt to provide a fixit replacement hint, if |
2545 | we have a valid range for the component. */ |
2546 | location_t reported_loc |
2547 | = (component_loc != UNKNOWN_LOCATION) ? component_loc : loc; |
2548 | gcc_rich_location rich_loc (reported_loc); |
2549 | if (component_loc != UNKNOWN_LOCATION) |
2550 | rich_loc.add_fixit_misspelled_id (misspelled_token_loc: component_loc, hint_id: guessed_id); |
2551 | error_at (&rich_loc, |
2552 | "%qT has no member named %qE; did you mean %qE?" , |
2553 | type, component, guessed_id); |
2554 | } |
2555 | else |
2556 | error_at (loc, "%qT has no member named %qE" , type, component); |
2557 | return error_mark_node; |
2558 | } |
2559 | |
2560 | /* Accessing elements of atomic structures or unions is undefined |
2561 | behavior (C11 6.5.2.3#5). */ |
2562 | if (TYPE_ATOMIC (type) && c_inhibit_evaluation_warnings == 0) |
2563 | { |
2564 | if (code == RECORD_TYPE) |
2565 | warning_at (loc, 0, "accessing a member %qE of an atomic " |
2566 | "structure %qE" , component, datum); |
2567 | else |
2568 | warning_at (loc, 0, "accessing a member %qE of an atomic " |
2569 | "union %qE" , component, datum); |
2570 | } |
2571 | |
2572 | /* Chain the COMPONENT_REFs if necessary down to the FIELD. |
2573 | This might be better solved in future the way the C++ front |
2574 | end does it - by giving the anonymous entities each a |
2575 | separate name and type, and then have build_component_ref |
2576 | recursively call itself. We can't do that here. */ |
2577 | do |
2578 | { |
2579 | tree subdatum = TREE_VALUE (field); |
2580 | int quals; |
2581 | tree subtype; |
2582 | bool use_datum_quals; |
2583 | |
2584 | if (TREE_TYPE (subdatum) == error_mark_node) |
2585 | return error_mark_node; |
2586 | |
2587 | /* If this is an rvalue, it does not have qualifiers in C |
2588 | standard terms and we must avoid propagating such |
2589 | qualifiers down to a non-lvalue array that is then |
2590 | converted to a pointer. */ |
2591 | use_datum_quals = (datum_lvalue |
2592 | || TREE_CODE (TREE_TYPE (subdatum)) != ARRAY_TYPE); |
2593 | |
2594 | quals = TYPE_QUALS (strip_array_types (TREE_TYPE (subdatum))); |
2595 | if (use_datum_quals) |
2596 | quals |= TYPE_QUALS (TREE_TYPE (datum)); |
2597 | subtype = c_build_qualified_type (TREE_TYPE (subdatum), quals); |
2598 | |
2599 | ref = build3 (COMPONENT_REF, subtype, datum, subdatum, |
2600 | NULL_TREE); |
2601 | SET_EXPR_LOCATION (ref, loc); |
2602 | if (TREE_READONLY (subdatum) |
2603 | || (use_datum_quals && TREE_READONLY (datum))) |
2604 | TREE_READONLY (ref) = 1; |
2605 | if (TREE_THIS_VOLATILE (subdatum) |
2606 | || (use_datum_quals && TREE_THIS_VOLATILE (datum))) |
2607 | TREE_THIS_VOLATILE (ref) = 1; |
2608 | |
2609 | if (TREE_UNAVAILABLE (subdatum)) |
2610 | error_unavailable_use (subdatum, NULL_TREE); |
2611 | else if (TREE_DEPRECATED (subdatum)) |
2612 | warn_deprecated_use (subdatum, NULL_TREE); |
2613 | |
2614 | datum = ref; |
2615 | |
2616 | field = TREE_CHAIN (field); |
2617 | } |
2618 | while (field); |
2619 | |
2620 | return ref; |
2621 | } |
2622 | else if (should_suggest_deref_p (datum_type: type)) |
2623 | { |
2624 | /* Special-case the error message for "ptr.field" for the case |
2625 | where the user has confused "." vs "->". */ |
2626 | rich_location richloc (line_table, loc); |
2627 | if (INDIRECT_REF_P (datum) && arrow_loc != UNKNOWN_LOCATION) |
2628 | { |
2629 | richloc.add_fixit_insert_before (where: arrow_loc, new_content: "(*" ); |
2630 | richloc.add_fixit_insert_after (where: arrow_loc, new_content: ")" ); |
2631 | error_at (&richloc, |
2632 | "%qE is a pointer to pointer; did you mean to dereference " |
2633 | "it before applying %<->%> to it?" , |
2634 | TREE_OPERAND (datum, 0)); |
2635 | } |
2636 | else |
2637 | { |
2638 | /* "loc" should be the "." token. */ |
2639 | richloc.add_fixit_replace (new_content: "->" ); |
2640 | error_at (&richloc, |
2641 | "%qE is a pointer; did you mean to use %<->%>?" , |
2642 | datum); |
2643 | } |
2644 | return error_mark_node; |
2645 | } |
2646 | else if (code != ERROR_MARK) |
2647 | error_at (loc, |
2648 | "request for member %qE in something not a structure or union" , |
2649 | component); |
2650 | |
2651 | return error_mark_node; |
2652 | } |
2653 | |
2654 | /* Given an expression PTR for a pointer, return an expression |
2655 | for the value pointed to. |
2656 | ERRORSTRING is the name of the operator to appear in error messages. |
2657 | |
2658 | LOC is the location to use for the generated tree. */ |
2659 | |
2660 | tree |
2661 | build_indirect_ref (location_t loc, tree ptr, ref_operator errstring) |
2662 | { |
2663 | tree pointer = default_conversion (exp: ptr); |
2664 | tree type = TREE_TYPE (pointer); |
2665 | tree ref; |
2666 | |
2667 | if (TREE_CODE (type) == POINTER_TYPE) |
2668 | { |
2669 | if (CONVERT_EXPR_P (pointer) |
2670 | || TREE_CODE (pointer) == VIEW_CONVERT_EXPR) |
2671 | { |
2672 | /* If a warning is issued, mark it to avoid duplicates from |
2673 | the backend. This only needs to be done at |
2674 | warn_strict_aliasing > 2. */ |
2675 | if (warn_strict_aliasing > 2) |
2676 | if (strict_aliasing_warning (EXPR_LOCATION (pointer), |
2677 | type, TREE_OPERAND (pointer, 0))) |
2678 | suppress_warning (pointer, OPT_Wstrict_aliasing_); |
2679 | } |
2680 | |
2681 | if (TREE_CODE (pointer) == ADDR_EXPR |
2682 | && (TREE_TYPE (TREE_OPERAND (pointer, 0)) |
2683 | == TREE_TYPE (type))) |
2684 | { |
2685 | ref = TREE_OPERAND (pointer, 0); |
2686 | protected_set_expr_location (ref, loc); |
2687 | return ref; |
2688 | } |
2689 | else |
2690 | { |
2691 | tree t = TREE_TYPE (type); |
2692 | |
2693 | ref = build1 (INDIRECT_REF, t, pointer); |
2694 | |
2695 | if (VOID_TYPE_P (t) && c_inhibit_evaluation_warnings == 0) |
2696 | warning_at (loc, 0, "dereferencing %<void *%> pointer" ); |
2697 | |
2698 | /* We *must* set TREE_READONLY when dereferencing a pointer to const, |
2699 | so that we get the proper error message if the result is used |
2700 | to assign to. Also, &* is supposed to be a no-op. |
2701 | And ANSI C seems to specify that the type of the result |
2702 | should be the const type. */ |
2703 | /* A de-reference of a pointer to const is not a const. It is valid |
2704 | to change it via some other pointer. */ |
2705 | TREE_READONLY (ref) = TYPE_READONLY (t); |
2706 | TREE_SIDE_EFFECTS (ref) |
2707 | = TYPE_VOLATILE (t) || TREE_SIDE_EFFECTS (pointer); |
2708 | TREE_THIS_VOLATILE (ref) = TYPE_VOLATILE (t); |
2709 | protected_set_expr_location (ref, loc); |
2710 | return ref; |
2711 | } |
2712 | } |
2713 | else if (TREE_CODE (pointer) != ERROR_MARK) |
2714 | invalid_indirection_error (loc, type, errstring); |
2715 | |
2716 | return error_mark_node; |
2717 | } |
2718 | |
2719 | /* This handles expressions of the form "a[i]", which denotes |
2720 | an array reference. |
2721 | |
2722 | This is logically equivalent in C to *(a+i), but we may do it differently. |
2723 | If A is a variable or a member, we generate a primitive ARRAY_REF. |
2724 | This avoids forcing the array out of registers, and can work on |
2725 | arrays that are not lvalues (for example, members of structures returned |
2726 | by functions). |
2727 | |
2728 | For vector types, allow vector[i] but not i[vector], and create |
2729 | *(((type*)&vectortype) + i) for the expression. |
2730 | |
2731 | LOC is the location to use for the returned expression. */ |
2732 | |
2733 | tree |
2734 | build_array_ref (location_t loc, tree array, tree index) |
2735 | { |
2736 | tree ret; |
2737 | bool swapped = false; |
2738 | if (TREE_TYPE (array) == error_mark_node |
2739 | || TREE_TYPE (index) == error_mark_node) |
2740 | return error_mark_node; |
2741 | |
2742 | if (TREE_CODE (TREE_TYPE (array)) != ARRAY_TYPE |
2743 | && TREE_CODE (TREE_TYPE (array)) != POINTER_TYPE |
2744 | /* Allow vector[index] but not index[vector]. */ |
2745 | && !gnu_vector_type_p (TREE_TYPE (array))) |
2746 | { |
2747 | if (TREE_CODE (TREE_TYPE (index)) != ARRAY_TYPE |
2748 | && TREE_CODE (TREE_TYPE (index)) != POINTER_TYPE) |
2749 | { |
2750 | error_at (loc, |
2751 | "subscripted value is neither array nor pointer nor vector" ); |
2752 | |
2753 | return error_mark_node; |
2754 | } |
2755 | std::swap (a&: array, b&: index); |
2756 | swapped = true; |
2757 | } |
2758 | |
2759 | if (!INTEGRAL_TYPE_P (TREE_TYPE (index))) |
2760 | { |
2761 | error_at (loc, "array subscript is not an integer" ); |
2762 | return error_mark_node; |
2763 | } |
2764 | |
2765 | if (TREE_CODE (TREE_TYPE (TREE_TYPE (array))) == FUNCTION_TYPE) |
2766 | { |
2767 | error_at (loc, "subscripted value is pointer to function" ); |
2768 | return error_mark_node; |
2769 | } |
2770 | |
2771 | /* ??? Existing practice has been to warn only when the char |
2772 | index is syntactically the index, not for char[array]. */ |
2773 | if (!swapped) |
2774 | warn_array_subscript_with_type_char (loc, index); |
2775 | |
2776 | /* Apply default promotions *after* noticing character types. */ |
2777 | index = default_conversion (exp: index); |
2778 | if (index == error_mark_node) |
2779 | return error_mark_node; |
2780 | |
2781 | gcc_assert (TREE_CODE (TREE_TYPE (index)) == INTEGER_TYPE |
2782 | || TREE_CODE (TREE_TYPE (index)) == BITINT_TYPE); |
2783 | |
2784 | bool was_vector = VECTOR_TYPE_P (TREE_TYPE (array)); |
2785 | bool non_lvalue = convert_vector_to_array_for_subscript (loc, &array, index); |
2786 | |
2787 | if (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE) |
2788 | { |
2789 | tree rval, type; |
2790 | |
2791 | /* An array that is indexed by a non-constant |
2792 | cannot be stored in a register; we must be able to do |
2793 | address arithmetic on its address. |
2794 | Likewise an array of elements of variable size. */ |
2795 | if (TREE_CODE (index) != INTEGER_CST |
2796 | || (COMPLETE_TYPE_P (TREE_TYPE (TREE_TYPE (array))) |
2797 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (array)))) != INTEGER_CST)) |
2798 | { |
2799 | if (!c_mark_addressable (array, true)) |
2800 | return error_mark_node; |
2801 | } |
2802 | /* An array that is indexed by a constant value which is not within |
2803 | the array bounds cannot be stored in a register either; because we |
2804 | would get a crash in store_bit_field/extract_bit_field when trying |
2805 | to access a non-existent part of the register. */ |
2806 | if (TREE_CODE (index) == INTEGER_CST |
2807 | && TYPE_DOMAIN (TREE_TYPE (array)) |
2808 | && !int_fits_type_p (index, TYPE_DOMAIN (TREE_TYPE (array)))) |
2809 | { |
2810 | if (!c_mark_addressable (array)) |
2811 | return error_mark_node; |
2812 | } |
2813 | |
2814 | if ((pedantic || warn_c90_c99_compat) |
2815 | && ! was_vector) |
2816 | { |
2817 | tree foo = array; |
2818 | while (TREE_CODE (foo) == COMPONENT_REF) |
2819 | foo = TREE_OPERAND (foo, 0); |
2820 | if (VAR_P (foo) && C_DECL_REGISTER (foo)) |
2821 | pedwarn (loc, OPT_Wpedantic, |
2822 | "ISO C forbids subscripting %<register%> array" ); |
2823 | else if (!lvalue_p (foo)) |
2824 | pedwarn_c90 (loc, opt: OPT_Wpedantic, |
2825 | "ISO C90 forbids subscripting non-lvalue " |
2826 | "array" ); |
2827 | } |
2828 | |
2829 | type = TREE_TYPE (TREE_TYPE (array)); |
2830 | rval = build4 (ARRAY_REF, type, array, index, NULL_TREE, NULL_TREE); |
2831 | /* Array ref is const/volatile if the array elements are |
2832 | or if the array is. */ |
2833 | TREE_READONLY (rval) |
2834 | |= (TYPE_READONLY (TREE_TYPE (TREE_TYPE (array))) |
2835 | | TREE_READONLY (array)); |
2836 | TREE_SIDE_EFFECTS (rval) |
2837 | |= (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (array))) |
2838 | | TREE_SIDE_EFFECTS (array)); |
2839 | TREE_THIS_VOLATILE (rval) |
2840 | |= (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (array))) |
2841 | /* This was added by rms on 16 Nov 91. |
2842 | It fixes vol struct foo *a; a->elts[1] |
2843 | in an inline function. |
2844 | Hope it doesn't break something else. */ |
2845 | | TREE_THIS_VOLATILE (array)); |
2846 | ret = require_complete_type (loc, value: rval); |
2847 | protected_set_expr_location (ret, loc); |
2848 | if (non_lvalue) |
2849 | ret = non_lvalue_loc (loc, ret); |
2850 | return ret; |
2851 | } |
2852 | else |
2853 | { |
2854 | tree ar = default_conversion (exp: array); |
2855 | |
2856 | if (ar == error_mark_node) |
2857 | return ar; |
2858 | |
2859 | gcc_assert (TREE_CODE (TREE_TYPE (ar)) == POINTER_TYPE); |
2860 | gcc_assert (TREE_CODE (TREE_TYPE (TREE_TYPE (ar))) != FUNCTION_TYPE); |
2861 | |
2862 | ret = build_indirect_ref (loc, ptr: build_binary_op (loc, PLUS_EXPR, ar, |
2863 | index, false), |
2864 | errstring: RO_ARRAY_INDEXING); |
2865 | if (non_lvalue) |
2866 | ret = non_lvalue_loc (loc, ret); |
2867 | return ret; |
2868 | } |
2869 | } |
2870 | |
2871 | /* Build an external reference to identifier ID. FUN indicates |
2872 | whether this will be used for a function call. LOC is the source |
2873 | location of the identifier. This sets *TYPE to the type of the |
2874 | identifier, which is not the same as the type of the returned value |
2875 | for CONST_DECLs defined as enum constants. If the type of the |
2876 | identifier is not available, *TYPE is set to NULL. */ |
2877 | tree |
2878 | build_external_ref (location_t loc, tree id, bool fun, tree *type) |
2879 | { |
2880 | tree ref; |
2881 | tree decl = lookup_name (id); |
2882 | |
2883 | /* In Objective-C, an instance variable (ivar) may be preferred to |
2884 | whatever lookup_name() found. */ |
2885 | decl = objc_lookup_ivar (decl, id); |
2886 | |
2887 | *type = NULL; |
2888 | if (decl && decl != error_mark_node) |
2889 | { |
2890 | ref = decl; |
2891 | *type = TREE_TYPE (ref); |
2892 | if (DECL_P (decl) && C_DECL_UNDERSPECIFIED (decl)) |
2893 | error_at (loc, "underspecified %qD referenced in its initializer" , |
2894 | decl); |
2895 | } |
2896 | else if (fun) |
2897 | /* Implicit function declaration. */ |
2898 | ref = implicitly_declare (loc, id); |
2899 | else if (decl == error_mark_node) |
2900 | /* Don't complain about something that's already been |
2901 | complained about. */ |
2902 | return error_mark_node; |
2903 | else |
2904 | { |
2905 | undeclared_variable (loc, id); |
2906 | return error_mark_node; |
2907 | } |
2908 | |
2909 | if (TREE_TYPE (ref) == error_mark_node) |
2910 | return error_mark_node; |
2911 | |
2912 | if (TREE_UNAVAILABLE (ref)) |
2913 | error_unavailable_use (ref, NULL_TREE); |
2914 | else if (TREE_DEPRECATED (ref)) |
2915 | warn_deprecated_use (ref, NULL_TREE); |
2916 | |
2917 | /* Recursive call does not count as usage. */ |
2918 | if (ref != current_function_decl) |
2919 | { |
2920 | TREE_USED (ref) = 1; |
2921 | } |
2922 | |
2923 | if (TREE_CODE (ref) == FUNCTION_DECL && !in_alignof) |
2924 | { |
2925 | if (!in_sizeof && !in_typeof) |
2926 | C_DECL_USED (ref) = 1; |
2927 | else if (DECL_INITIAL (ref) == NULL_TREE |
2928 | && DECL_EXTERNAL (ref) |
2929 | && !TREE_PUBLIC (ref)) |
2930 | record_maybe_used_decl (ref); |
2931 | } |
2932 | |
2933 | if (TREE_CODE (ref) == CONST_DECL) |
2934 | { |
2935 | used_types_insert (TREE_TYPE (ref)); |
2936 | |
2937 | if (warn_cxx_compat |
2938 | && TREE_CODE (TREE_TYPE (ref)) == ENUMERAL_TYPE |
2939 | && C_TYPE_DEFINED_IN_STRUCT (TREE_TYPE (ref))) |
2940 | { |
2941 | warning_at (loc, OPT_Wc___compat, |
2942 | ("enum constant defined in struct or union " |
2943 | "is not visible in C++" )); |
2944 | inform (DECL_SOURCE_LOCATION (ref), "enum constant defined here" ); |
2945 | } |
2946 | |
2947 | ref = DECL_INITIAL (ref); |
2948 | TREE_CONSTANT (ref) = 1; |
2949 | } |
2950 | else if (current_function_decl != NULL_TREE |
2951 | && !DECL_FILE_SCOPE_P (current_function_decl) |
2952 | && (VAR_OR_FUNCTION_DECL_P (ref) |
2953 | || TREE_CODE (ref) == PARM_DECL)) |
2954 | { |
2955 | tree context = decl_function_context (ref); |
2956 | |
2957 | if (context != NULL_TREE && context != current_function_decl) |
2958 | DECL_NONLOCAL (ref) = 1; |
2959 | } |
2960 | /* C99 6.7.4p3: An inline definition of a function with external |
2961 | linkage ... shall not contain a reference to an identifier with |
2962 | internal linkage. */ |
2963 | else if (current_function_decl != NULL_TREE |
2964 | && DECL_DECLARED_INLINE_P (current_function_decl) |
2965 | && DECL_EXTERNAL (current_function_decl) |
2966 | && VAR_OR_FUNCTION_DECL_P (ref) |
2967 | && (!VAR_P (ref) || TREE_STATIC (ref)) |
2968 | && ! TREE_PUBLIC (ref) |
2969 | && DECL_CONTEXT (ref) != current_function_decl) |
2970 | record_inline_static (loc, current_function_decl, ref, |
2971 | csi_internal); |
2972 | |
2973 | return ref; |
2974 | } |
2975 | |
2976 | /* Record details of decls possibly used inside sizeof or typeof. */ |
2977 | struct maybe_used_decl |
2978 | { |
2979 | /* The decl. */ |
2980 | tree decl; |
2981 | /* The level seen at (in_sizeof + in_typeof). */ |
2982 | int level; |
2983 | /* The next one at this level or above, or NULL. */ |
2984 | struct maybe_used_decl *next; |
2985 | }; |
2986 | |
2987 | static struct maybe_used_decl *maybe_used_decls; |
2988 | |
2989 | /* Record that DECL, an undefined static function reference seen |
2990 | inside sizeof or typeof, might be used if the operand of sizeof is |
2991 | a VLA type or the operand of typeof is a variably modified |
2992 | type. */ |
2993 | |
2994 | static void |
2995 | record_maybe_used_decl (tree decl) |
2996 | { |
2997 | struct maybe_used_decl *t = XOBNEW (&parser_obstack, struct maybe_used_decl); |
2998 | t->decl = decl; |
2999 | t->level = in_sizeof + in_typeof; |
3000 | t->next = maybe_used_decls; |
3001 | maybe_used_decls = t; |
3002 | } |
3003 | |
3004 | /* Pop the stack of decls possibly used inside sizeof or typeof. If |
3005 | USED is false, just discard them. If it is true, mark them used |
3006 | (if no longer inside sizeof or typeof) or move them to the next |
3007 | level up (if still inside sizeof or typeof). */ |
3008 | |
3009 | void |
3010 | pop_maybe_used (bool used) |
3011 | { |
3012 | struct maybe_used_decl *p = maybe_used_decls; |
3013 | int cur_level = in_sizeof + in_typeof; |
3014 | while (p && p->level > cur_level) |
3015 | { |
3016 | if (used) |
3017 | { |
3018 | if (cur_level == 0) |
3019 | C_DECL_USED (p->decl) = 1; |
3020 | else |
3021 | p->level = cur_level; |
3022 | } |
3023 | p = p->next; |
3024 | } |
3025 | if (!used || cur_level == 0) |
3026 | maybe_used_decls = p; |
3027 | } |
3028 | |
3029 | /* Return the result of sizeof applied to EXPR. */ |
3030 | |
3031 | struct c_expr |
3032 | c_expr_sizeof_expr (location_t loc, struct c_expr expr) |
3033 | { |
3034 | struct c_expr ret; |
3035 | if (expr.value == error_mark_node) |
3036 | { |
3037 | ret.value = error_mark_node; |
3038 | ret.original_code = ERROR_MARK; |
3039 | ret.original_type = NULL; |
3040 | ret.m_decimal = 0; |
3041 | pop_maybe_used (used: false); |
3042 | } |
3043 | else |
3044 | { |
3045 | bool expr_const_operands = true; |
3046 | |
3047 | if (TREE_CODE (expr.value) == PARM_DECL |
3048 | && C_ARRAY_PARAMETER (expr.value)) |
3049 | { |
3050 | auto_diagnostic_group d; |
3051 | if (warning_at (loc, OPT_Wsizeof_array_argument, |
3052 | "%<sizeof%> on array function parameter %qE will " |
3053 | "return size of %qT" , expr.value, |
3054 | TREE_TYPE (expr.value))) |
3055 | inform (DECL_SOURCE_LOCATION (expr.value), "declared here" ); |
3056 | } |
3057 | tree folded_expr = c_fully_fold (expr.value, require_constant_value, |
3058 | &expr_const_operands); |
3059 | ret.value = c_sizeof (loc, TREE_TYPE (folded_expr)); |
3060 | c_last_sizeof_arg = expr.value; |
3061 | c_last_sizeof_loc = loc; |
3062 | ret.original_code = SIZEOF_EXPR; |
3063 | ret.original_type = NULL; |
3064 | ret.m_decimal = 0; |
3065 | if (C_TYPE_VARIABLE_SIZE (TREE_TYPE (folded_expr))) |
3066 | { |
3067 | /* sizeof is evaluated when given a vla (C99 6.5.3.4p2). */ |
3068 | ret.value = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (ret.value), |
3069 | folded_expr, ret.value); |
3070 | C_MAYBE_CONST_EXPR_NON_CONST (ret.value) = !expr_const_operands; |
3071 | SET_EXPR_LOCATION (ret.value, loc); |
3072 | } |
3073 | pop_maybe_used (C_TYPE_VARIABLE_SIZE (TREE_TYPE (folded_expr))); |
3074 | } |
3075 | return ret; |
3076 | } |
3077 | |
3078 | /* Return the result of sizeof applied to T, a structure for the type |
3079 | name passed to sizeof (rather than the type itself). LOC is the |
3080 | location of the original expression. */ |
3081 | |
3082 | struct c_expr |
3083 | c_expr_sizeof_type (location_t loc, struct c_type_name *t) |
3084 | { |
3085 | tree type; |
3086 | struct c_expr ret; |
3087 | tree type_expr = NULL_TREE; |
3088 | bool type_expr_const = true; |
3089 | type = groktypename (t, &type_expr, &type_expr_const); |
3090 | ret.value = c_sizeof (loc, type); |
3091 | c_last_sizeof_arg = type; |
3092 | c_last_sizeof_loc = loc; |
3093 | ret.original_code = SIZEOF_EXPR; |
3094 | ret.original_type = NULL; |
3095 | ret.m_decimal = 0; |
3096 | if (type == error_mark_node) |
3097 | { |
3098 | ret.value = error_mark_node; |
3099 | ret.original_code = ERROR_MARK; |
3100 | } |
3101 | else |
3102 | if ((type_expr || TREE_CODE (ret.value) == INTEGER_CST) |
3103 | && C_TYPE_VARIABLE_SIZE (type)) |
3104 | { |
3105 | /* If the type is a [*] array, it is a VLA but is represented as |
3106 | having a size of zero. In such a case we must ensure that |
3107 | the result of sizeof does not get folded to a constant by |
3108 | c_fully_fold, because if the size is evaluated the result is |
3109 | not constant and so constraints on zero or negative size |
3110 | arrays must not be applied when this sizeof call is inside |
3111 | another array declarator. */ |
3112 | if (!type_expr) |
3113 | type_expr = integer_zero_node; |
3114 | ret.value = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (ret.value), |
3115 | type_expr, ret.value); |
3116 | C_MAYBE_CONST_EXPR_NON_CONST (ret.value) = !type_expr_const; |
3117 | } |
3118 | pop_maybe_used (used: type != error_mark_node |
3119 | ? C_TYPE_VARIABLE_SIZE (type) : false); |
3120 | return ret; |
3121 | } |
3122 | |
3123 | /* Build a function call to function FUNCTION with parameters PARAMS. |
3124 | The function call is at LOC. |
3125 | PARAMS is a list--a chain of TREE_LIST nodes--in which the |
3126 | TREE_VALUE of each node is a parameter-expression. |
3127 | FUNCTION's data type may be a function type or a pointer-to-function. */ |
3128 | |
3129 | tree |
3130 | build_function_call (location_t loc, tree function, tree params) |
3131 | { |
3132 | vec<tree, va_gc> *v; |
3133 | tree ret; |
3134 | |
3135 | vec_alloc (v, nelems: list_length (params)); |
3136 | for (; params; params = TREE_CHAIN (params)) |
3137 | v->quick_push (TREE_VALUE (params)); |
3138 | ret = c_build_function_call_vec (loc, vNULL, function, v, NULL); |
3139 | vec_free (v); |
3140 | return ret; |
3141 | } |
3142 | |
3143 | /* Give a note about the location of the declaration of DECL. */ |
3144 | |
3145 | static void |
3146 | inform_declaration (tree decl) |
3147 | { |
3148 | if (decl && (TREE_CODE (decl) != FUNCTION_DECL |
3149 | || !DECL_IS_UNDECLARED_BUILTIN (decl))) |
3150 | inform (DECL_SOURCE_LOCATION (decl), "declared here" ); |
3151 | } |
3152 | |
3153 | /* Build a function call to function FUNCTION with parameters PARAMS. |
3154 | If FUNCTION is the result of resolving an overloaded target built-in, |
3155 | ORIG_FUNDECL is the original function decl, otherwise it is null. |
3156 | ORIGTYPES, if not NULL, is a vector of types; each element is |
3157 | either NULL or the original type of the corresponding element in |
3158 | PARAMS. The original type may differ from TREE_TYPE of the |
3159 | parameter for enums. FUNCTION's data type may be a function type |
3160 | or pointer-to-function. This function changes the elements of |
3161 | PARAMS. */ |
3162 | |
3163 | tree |
3164 | build_function_call_vec (location_t loc, vec<location_t> arg_loc, |
3165 | tree function, vec<tree, va_gc> *params, |
3166 | vec<tree, va_gc> *origtypes, tree orig_fundecl) |
3167 | { |
3168 | tree fntype, fundecl = NULL_TREE; |
3169 | tree name = NULL_TREE, result; |
3170 | tree tem; |
3171 | int nargs; |
3172 | tree *argarray; |
3173 | |
3174 | |
3175 | /* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */ |
3176 | STRIP_TYPE_NOPS (function); |
3177 | |
3178 | /* Convert anything with function type to a pointer-to-function. */ |
3179 | if (TREE_CODE (function) == FUNCTION_DECL) |
3180 | { |
3181 | name = DECL_NAME (function); |
3182 | |
3183 | if (flag_tm) |
3184 | tm_malloc_replacement (function); |
3185 | fundecl = function; |
3186 | if (!orig_fundecl) |
3187 | orig_fundecl = fundecl; |
3188 | /* Atomic functions have type checking/casting already done. They are |
3189 | often rewritten and don't match the original parameter list. */ |
3190 | if (name && startswith (IDENTIFIER_POINTER (name), prefix: "__atomic_" )) |
3191 | origtypes = NULL; |
3192 | } |
3193 | if (TREE_CODE (TREE_TYPE (function)) == FUNCTION_TYPE) |
3194 | function = function_to_pointer_conversion (loc, exp: function); |
3195 | |
3196 | /* For Objective-C, convert any calls via a cast to OBJC_TYPE_REF |
3197 | expressions, like those used for ObjC messenger dispatches. */ |
3198 | if (params && !params->is_empty ()) |
3199 | function = objc_rewrite_function_call (function, (*params)[0]); |
3200 | |
3201 | function = c_fully_fold (function, false, NULL); |
3202 | |
3203 | fntype = TREE_TYPE (function); |
3204 | |
3205 | if (TREE_CODE (fntype) == ERROR_MARK) |
3206 | return error_mark_node; |
3207 | |
3208 | if (!(TREE_CODE (fntype) == POINTER_TYPE |
3209 | && TREE_CODE (TREE_TYPE (fntype)) == FUNCTION_TYPE)) |
3210 | { |
3211 | if (!flag_diagnostics_show_caret && !STATEMENT_CLASS_P (function)) |
3212 | error_at (loc, |
3213 | "called object %qE is not a function or function pointer" , |
3214 | function); |
3215 | else if (DECL_P (function)) |
3216 | { |
3217 | error_at (loc, |
3218 | "called object %qD is not a function or function pointer" , |
3219 | function); |
3220 | inform_declaration (decl: function); |
3221 | } |
3222 | else |
3223 | error_at (loc, |
3224 | "called object is not a function or function pointer" ); |
3225 | return error_mark_node; |
3226 | } |
3227 | |
3228 | if (fundecl && TREE_THIS_VOLATILE (fundecl)) |
3229 | current_function_returns_abnormally = 1; |
3230 | |
3231 | /* fntype now gets the type of function pointed to. */ |
3232 | fntype = TREE_TYPE (fntype); |
3233 | tree return_type = TREE_TYPE (fntype); |
3234 | |
3235 | /* Convert the parameters to the types declared in the |
3236 | function prototype, or apply default promotions. */ |
3237 | |
3238 | nargs = convert_arguments (loc, arg_loc, TYPE_ARG_TYPES (fntype), params, |
3239 | origtypes, function, fundecl); |
3240 | if (nargs < 0) |
3241 | return error_mark_node; |
3242 | |
3243 | /* Check that the function is called through a compatible prototype. |
3244 | If it is not, warn. */ |
3245 | if (CONVERT_EXPR_P (function) |
3246 | && TREE_CODE (tem = TREE_OPERAND (function, 0)) == ADDR_EXPR |
3247 | && TREE_CODE (tem = TREE_OPERAND (tem, 0)) == FUNCTION_DECL |
3248 | && !comptypes (type1: fntype, TREE_TYPE (tem))) |
3249 | { |
3250 | /* This situation leads to run-time undefined behavior. We can't, |
3251 | therefore, simply error unless we can prove that all possible |
3252 | executions of the program must execute the code. */ |
3253 | warning_at (loc, 0, "function called through a non-compatible type" ); |
3254 | |
3255 | if (VOID_TYPE_P (return_type) |
3256 | && TYPE_QUALS (return_type) != TYPE_UNQUALIFIED) |
3257 | pedwarn (loc, 0, |
3258 | "function with qualified void return type called" ); |
3259 | } |
3260 | |
3261 | argarray = vec_safe_address (v: params); |
3262 | |
3263 | /* Check that arguments to builtin functions match the expectations. */ |
3264 | if (fundecl |
3265 | && fndecl_built_in_p (node: fundecl) |
3266 | && !check_builtin_function_arguments (loc, arg_loc, fundecl, |
3267 | orig_fundecl, nargs, argarray)) |
3268 | return error_mark_node; |
3269 | |
3270 | /* Check that the arguments to the function are valid. */ |
3271 | bool warned_p = check_function_arguments (loc, fundecl, fntype, |
3272 | nargs, argarray, &arg_loc); |
3273 | |
3274 | if (TYPE_QUALS (return_type) != TYPE_UNQUALIFIED |
3275 | && !VOID_TYPE_P (return_type)) |
3276 | return_type = c_build_qualified_type (return_type, TYPE_UNQUALIFIED); |
3277 | if (name != NULL_TREE |
3278 | && startswith (IDENTIFIER_POINTER (name), prefix: "__builtin_" )) |
3279 | { |
3280 | if (require_constant_value) |
3281 | result |
3282 | = fold_build_call_array_initializer_loc (loc, return_type, |
3283 | function, nargs, argarray); |
3284 | else |
3285 | result = fold_build_call_array_loc (loc, return_type, |
3286 | function, nargs, argarray); |
3287 | if (TREE_CODE (result) == NOP_EXPR |
3288 | && TREE_CODE (TREE_OPERAND (result, 0)) == INTEGER_CST) |
3289 | STRIP_TYPE_NOPS (result); |
3290 | } |
3291 | else |
3292 | result = build_call_array_loc (loc, return_type, |
3293 | function, nargs, argarray); |
3294 | /* If -Wnonnull warning has been diagnosed, avoid diagnosing it again |
3295 | later. */ |
3296 | if (warned_p && TREE_CODE (result) == CALL_EXPR) |
3297 | suppress_warning (result, OPT_Wnonnull); |
3298 | |
3299 | /* In this improbable scenario, a nested function returns a VM type. |
3300 | Create a TARGET_EXPR so that the call always has a LHS, much as |
3301 | what the C++ FE does for functions returning non-PODs. */ |
3302 | if (C_TYPE_VARIABLY_MODIFIED (TREE_TYPE (fntype))) |
3303 | { |
3304 | tree tmp = create_tmp_var_raw (TREE_TYPE (fntype)); |
3305 | result = build4 (TARGET_EXPR, TREE_TYPE (fntype), tmp, result, |
3306 | NULL_TREE, NULL_TREE); |
3307 | } |
3308 | |
3309 | if (VOID_TYPE_P (TREE_TYPE (result))) |
3310 | { |
3311 | if (TYPE_QUALS (TREE_TYPE (result)) != TYPE_UNQUALIFIED) |
3312 | pedwarn (loc, 0, |
3313 | "function with qualified void return type called" ); |
3314 | return result; |
3315 | } |
3316 | return require_complete_type (loc, value: result); |
3317 | } |
3318 | |
3319 | /* Like build_function_call_vec, but call also resolve_overloaded_builtin. */ |
3320 | |
3321 | tree |
3322 | c_build_function_call_vec (location_t loc, const vec<location_t> &arg_loc, |
3323 | tree function, vec<tree, va_gc> *params, |
3324 | vec<tree, va_gc> *origtypes) |
3325 | { |
3326 | /* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */ |
3327 | STRIP_TYPE_NOPS (function); |
3328 | |
3329 | /* Convert anything with function type to a pointer-to-function. */ |
3330 | if (TREE_CODE (function) == FUNCTION_DECL) |
3331 | { |
3332 | /* Implement type-directed function overloading for builtins. |
3333 | resolve_overloaded_builtin and targetm.resolve_overloaded_builtin |
3334 | handle all the type checking. The result is a complete expression |
3335 | that implements this function call. */ |
3336 | tree tem = resolve_overloaded_builtin (loc, function, params); |
3337 | if (tem) |
3338 | return tem; |
3339 | } |
3340 | return build_function_call_vec (loc, arg_loc, function, params, origtypes); |
3341 | } |
3342 | |
3343 | /* Helper for convert_arguments called to convert the VALue of argument |
3344 | number ARGNUM from ORIGTYPE to the corresponding parameter number |
3345 | PARMNUM and TYPE. |
3346 | PLOC is the location where the conversion is being performed. |
3347 | FUNCTION and FUNDECL are the same as in convert_arguments. |
3348 | VALTYPE is the original type of VAL before the conversion and, |
3349 | for EXCESS_PRECISION_EXPR, the operand of the expression. |
3350 | NPC is true if VAL represents the null pointer constant (VAL itself |
3351 | will have been folded to an integer constant). |
3352 | RNAME is the same as FUNCTION except in Objective C when it's |
3353 | the function selector. |
3354 | EXCESS_PRECISION is true when VAL was originally represented |
3355 | as EXCESS_PRECISION_EXPR. |
3356 | WARNOPT is the same as in convert_for_assignment. */ |
3357 | |
3358 | static tree |
3359 | convert_argument (location_t ploc, tree function, tree fundecl, |
3360 | tree type, tree origtype, tree val, tree valtype, |
3361 | bool npc, tree rname, int parmnum, int argnum, |
3362 | bool excess_precision, int warnopt) |
3363 | { |
3364 | /* Formal parm type is specified by a function prototype. */ |
3365 | |
3366 | if (type == error_mark_node || !COMPLETE_TYPE_P (type)) |
3367 | { |
3368 | error_at (ploc, "type of formal parameter %d is incomplete" , |
3369 | parmnum + 1); |
3370 | return error_mark_node; |
3371 | } |
3372 | |
3373 | /* Optionally warn about conversions that differ from the default |
3374 | conversions. */ |
3375 | if (warn_traditional_conversion || warn_traditional) |
3376 | { |
3377 | if (INTEGRAL_TYPE_P (type) |
3378 | && SCALAR_FLOAT_TYPE_P (valtype)) |
3379 | warning_at (ploc, OPT_Wtraditional_conversion, |
3380 | "passing argument %d of %qE as integer rather " |
3381 | "than floating due to prototype" , |
3382 | argnum, rname); |
3383 | if (INTEGRAL_TYPE_P (type) |
3384 | && TREE_CODE (valtype) == COMPLEX_TYPE) |
3385 | warning_at (ploc, OPT_Wtraditional_conversion, |
3386 | "passing argument %d of %qE as integer rather " |
3387 | "than complex due to prototype" , |
3388 | argnum, rname); |
3389 | else if (TREE_CODE (type) == COMPLEX_TYPE |
3390 | && SCALAR_FLOAT_TYPE_P (valtype)) |
3391 | warning_at (ploc, OPT_Wtraditional_conversion, |
3392 | "passing argument %d of %qE as complex rather " |
3393 | "than floating due to prototype" , |
3394 | argnum, rname); |
3395 | else if (SCALAR_FLOAT_TYPE_P (type) |
3396 | && INTEGRAL_TYPE_P (valtype)) |
3397 | warning_at (ploc, OPT_Wtraditional_conversion, |
3398 | "passing argument %d of %qE as floating rather " |
3399 | "than integer due to prototype" , |
3400 | argnum, rname); |
3401 | else if (TREE_CODE (type) == COMPLEX_TYPE |
3402 | && INTEGRAL_TYPE_P (valtype)) |
3403 | warning_at (ploc, OPT_Wtraditional_conversion, |
3404 | "passing argument %d of %qE as complex rather " |
3405 | "than integer due to prototype" , |
3406 | argnum, rname); |
3407 | else if (SCALAR_FLOAT_TYPE_P (type) |
3408 | && TREE_CODE (valtype) == COMPLEX_TYPE) |
3409 | warning_at (ploc, OPT_Wtraditional_conversion, |
3410 | "passing argument %d of %qE as floating rather " |
3411 | "than complex due to prototype" , |
3412 | argnum, rname); |
3413 | /* ??? At some point, messages should be written about |
3414 | conversions between complex types, but that's too messy |
3415 | to do now. */ |
3416 | else if (SCALAR_FLOAT_TYPE_P (type) |
3417 | && SCALAR_FLOAT_TYPE_P (valtype)) |
3418 | { |
3419 | unsigned int formal_prec = TYPE_PRECISION (type); |
3420 | |
3421 | /* Warn if any argument is passed as `float', |
3422 | since without a prototype it would be `double'. */ |
3423 | if (formal_prec == TYPE_PRECISION (float_type_node) |
3424 | && type != dfloat32_type_node) |
3425 | warning_at (ploc, 0, |
3426 | "passing argument %d of %qE as %<float%> " |
3427 | "rather than %<double%> due to prototype" , |
3428 | argnum, rname); |
3429 | |
3430 | /* Warn if mismatch between argument and prototype |
3431 | for decimal float types. Warn of conversions with |
3432 | binary float types and of precision narrowing due to |
3433 | prototype. */ |
3434 | else if (type != valtype |
3435 | && (type == dfloat32_type_node |
3436 | || type == dfloat64_type_node |
3437 | || type == dfloat128_type_node |
3438 | || valtype == dfloat32_type_node |
3439 | || valtype == dfloat64_type_node |
3440 | || valtype == dfloat128_type_node) |
3441 | && (formal_prec |
3442 | <= TYPE_PRECISION (valtype) |
3443 | || (type == dfloat128_type_node |
3444 | && (valtype |
3445 | != dfloat64_type_node |
3446 | && (valtype |
3447 | != dfloat32_type_node))) |
3448 | || (type == dfloat64_type_node |
3449 | && (valtype |
3450 | != dfloat32_type_node)))) |
3451 | warning_at (ploc, 0, |
3452 | "passing argument %d of %qE as %qT " |
3453 | "rather than %qT due to prototype" , |
3454 | argnum, rname, type, valtype); |
3455 | |
3456 | } |
3457 | /* Detect integer changing in width or signedness. |
3458 | These warnings are only activated with |
3459 | -Wtraditional-conversion, not with -Wtraditional. */ |
3460 | else if (warn_traditional_conversion |
3461 | && INTEGRAL_TYPE_P (type) |
3462 | && INTEGRAL_TYPE_P (valtype)) |
3463 | { |
3464 | unsigned int formal_prec = TYPE_PRECISION (type); |
3465 | tree would_have_been = default_conversion (exp: val); |
3466 | tree type1 = TREE_TYPE (would_have_been); |
3467 | |
3468 | if (val == error_mark_node) |
3469 | /* VAL could have been of incomplete type. */; |
3470 | else if (TREE_CODE (type) == ENUMERAL_TYPE |
3471 | && (TYPE_MAIN_VARIANT (type) |
3472 | == TYPE_MAIN_VARIANT (valtype))) |
3473 | /* No warning if function asks for enum |
3474 | and the actual arg is that enum type. */ |
3475 | ; |
3476 | else if (formal_prec != TYPE_PRECISION (type1)) |
3477 | warning_at (ploc, OPT_Wtraditional_conversion, |
3478 | "passing argument %d of %qE " |
3479 | "with different width due to prototype" , |
3480 | argnum, rname); |
3481 | else if (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (type1)) |
3482 | ; |
3483 | /* Don't complain if the formal parameter type |
3484 | is an enum, because we can't tell now whether |
3485 | the value was an enum--even the same enum. */ |
3486 | else if (TREE_CODE (type) == ENUMERAL_TYPE) |
3487 | ; |
3488 | else if (TREE_CODE (val) == INTEGER_CST |
3489 | && int_fits_type_p (val, type)) |
3490 | /* Change in signedness doesn't matter |
3491 | if a constant value is unaffected. */ |
3492 | ; |
3493 | /* If the value is extended from a narrower |
3494 | unsigned type, it doesn't matter whether we |
3495 | pass it as signed or unsigned; the value |
3496 | certainly is the same either way. */ |
3497 | else if (TYPE_PRECISION (valtype) < TYPE_PRECISION (type) |
3498 | && TYPE_UNSIGNED (valtype)) |
3499 | ; |
3500 | else if (TYPE_UNSIGNED (type)) |
3501 | warning_at (ploc, OPT_Wtraditional_conversion, |
3502 | "passing argument %d of %qE " |
3503 | "as unsigned due to prototype" , |
3504 | argnum, rname); |
3505 | else |
3506 | warning_at (ploc, OPT_Wtraditional_conversion, |
3507 | "passing argument %d of %qE " |
3508 | "as signed due to prototype" , |
3509 | argnum, rname); |
3510 | } |
3511 | } |
3512 | |
3513 | /* Possibly restore an EXCESS_PRECISION_EXPR for the |
3514 | sake of better warnings from convert_and_check. */ |
3515 | if (excess_precision) |
3516 | val = build1 (EXCESS_PRECISION_EXPR, valtype, val); |
3517 | |
3518 | tree parmval = convert_for_assignment (ploc, ploc, type, |
3519 | val, origtype, ic_argpass, |
3520 | npc, fundecl, function, |
3521 | parmnum + 1, warnopt); |
3522 | |
3523 | if (targetm.calls.promote_prototypes (fundecl ? TREE_TYPE (fundecl) : 0) |
3524 | && INTEGRAL_TYPE_P (type) |
3525 | && (TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))) |
3526 | parmval = default_conversion (exp: parmval); |
3527 | |
3528 | return parmval; |
3529 | } |
3530 | |
3531 | /* Convert the argument expressions in the vector VALUES |
3532 | to the types in the list TYPELIST. |
3533 | |
3534 | If TYPELIST is exhausted, or when an element has NULL as its type, |
3535 | perform the default conversions. |
3536 | |
3537 | ORIGTYPES is the original types of the expressions in VALUES. This |
3538 | holds the type of enum values which have been converted to integral |
3539 | types. It may be NULL. |
3540 | |
3541 | FUNCTION is a tree for the called function. It is used only for |
3542 | error messages, where it is formatted with %qE. |
3543 | |
3544 | This is also where warnings about wrong number of args are generated. |
3545 | |
3546 | ARG_LOC are locations of function arguments (if any). |
3547 | |
3548 | Returns the actual number of arguments processed (which may be less |
3549 | than the length of VALUES in some error situations), or -1 on |
3550 | failure. */ |
3551 | |
3552 | static int |
3553 | convert_arguments (location_t loc, vec<location_t> arg_loc, tree typelist, |
3554 | vec<tree, va_gc> *values, vec<tree, va_gc> *origtypes, |
3555 | tree function, tree fundecl) |
3556 | { |
3557 | unsigned int parmnum; |
3558 | bool error_args = false; |
3559 | const bool type_generic = fundecl |
3560 | && lookup_attribute (attr_name: "type generic" , TYPE_ATTRIBUTES (TREE_TYPE (fundecl))); |
3561 | bool type_generic_remove_excess_precision = false; |
3562 | bool type_generic_overflow_p = false; |
3563 | tree selector; |
3564 | |
3565 | /* Change pointer to function to the function itself for |
3566 | diagnostics. */ |
3567 | if (TREE_CODE (function) == ADDR_EXPR |
3568 | && TREE_CODE (TREE_OPERAND (function, 0)) == FUNCTION_DECL) |
3569 | function = TREE_OPERAND (function, 0); |
3570 | |
3571 | /* Handle an ObjC selector specially for diagnostics. */ |
3572 | selector = objc_message_selector (); |
3573 | |
3574 | /* For a call to a built-in function declared without a prototype, |
3575 | set to the built-in function's argument list. */ |
3576 | tree builtin_typelist = NULL_TREE; |
3577 | |
3578 | /* For type-generic built-in functions, determine whether excess |
3579 | precision should be removed (classification) or not |
3580 | (comparison). */ |
3581 | if (fundecl |
3582 | && fndecl_built_in_p (node: fundecl, klass: BUILT_IN_NORMAL)) |
3583 | { |
3584 | built_in_function code = DECL_FUNCTION_CODE (decl: fundecl); |
3585 | if (C_DECL_BUILTIN_PROTOTYPE (fundecl)) |
3586 | { |
3587 | /* For a call to a built-in function declared without a prototype |
3588 | use the types of the parameters of the internal built-in to |
3589 | match those of the arguments to. */ |
3590 | if (tree bdecl = builtin_decl_explicit (fncode: code)) |
3591 | builtin_typelist = TYPE_ARG_TYPES (TREE_TYPE (bdecl)); |
3592 | } |
3593 | |
3594 | /* For type-generic built-in functions, determine whether excess |
3595 | precision should be removed (classification) or not |
3596 | (comparison). */ |
3597 | if (type_generic) |
3598 | switch (code) |
3599 | { |
3600 | case BUILT_IN_ISFINITE: |
3601 | case BUILT_IN_ISINF: |
3602 | case BUILT_IN_ISINF_SIGN: |
3603 | case BUILT_IN_ISNAN: |
3604 | case BUILT_IN_ISNORMAL: |
3605 | case BUILT_IN_ISSIGNALING: |
3606 | case BUILT_IN_FPCLASSIFY: |
3607 | type_generic_remove_excess_precision = true; |
3608 | break; |
3609 | |
3610 | case BUILT_IN_ADD_OVERFLOW_P: |
3611 | case BUILT_IN_SUB_OVERFLOW_P: |
3612 | case BUILT_IN_MUL_OVERFLOW_P: |
3613 | /* The last argument of these type-generic builtins |
3614 | should not be promoted. */ |
3615 | type_generic_overflow_p = true; |
3616 | break; |
3617 | |
3618 | default: |
3619 | break; |
3620 | } |
3621 | } |
3622 | |
3623 | /* Scan the given expressions (VALUES) and types (TYPELIST), producing |
3624 | individual converted arguments. */ |
3625 | |
3626 | tree typetail, builtin_typetail, val; |
3627 | for (typetail = typelist, |
3628 | builtin_typetail = builtin_typelist, |
3629 | parmnum = 0; |
3630 | values && values->iterate (ix: parmnum, ptr: &val); |
3631 | ++parmnum) |
3632 | { |
3633 | /* The type of the function parameter (if it was declared with one). */ |
3634 | tree type = typetail ? TREE_VALUE (typetail) : NULL_TREE; |
3635 | /* The type of the built-in function parameter (if the function |
3636 | is a built-in). Used to detect type incompatibilities in |
3637 | calls to built-ins declared without a prototype. */ |
3638 | tree builtin_type = (builtin_typetail |
3639 | ? TREE_VALUE (builtin_typetail) : NULL_TREE); |
3640 | /* The original type of the argument being passed to the function. */ |
3641 | tree valtype = TREE_TYPE (val); |
3642 | /* The called function (or function selector in Objective C). */ |
3643 | tree rname = function; |
3644 | int argnum = parmnum + 1; |
3645 | const char *invalid_func_diag; |
3646 | /* Set for EXCESS_PRECISION_EXPR arguments. */ |
3647 | bool excess_precision = false; |
3648 | /* The value of the argument after conversion to the type |
3649 | of the function parameter it is passed to. */ |
3650 | tree parmval; |
3651 | /* Some __atomic_* builtins have additional hidden argument at |
3652 | position 0. */ |
3653 | location_t ploc |
3654 | = !arg_loc.is_empty () && values->length () == arg_loc.length () |
3655 | ? expansion_point_location_if_in_system_header (arg_loc[parmnum]) |
3656 | : input_location; |
3657 | |
3658 | if (type == void_type_node) |
3659 | { |
3660 | if (selector) |
3661 | error_at (loc, "too many arguments to method %qE" , selector); |
3662 | else |
3663 | error_at (loc, "too many arguments to function %qE" , function); |
3664 | inform_declaration (decl: fundecl); |
3665 | return error_args ? -1 : (int) parmnum; |
3666 | } |
3667 | |
3668 | if (builtin_type == void_type_node) |
3669 | { |
3670 | if (warning_at (loc, OPT_Wbuiltin_declaration_mismatch, |
3671 | "too many arguments to built-in function %qE " |
3672 | "expecting %d" , function, parmnum)) |
3673 | inform_declaration (decl: fundecl); |
3674 | builtin_typetail = NULL_TREE; |
3675 | } |
3676 | |
3677 | if (selector && argnum > 2) |
3678 | { |
3679 | rname = selector; |
3680 | argnum -= 2; |
3681 | } |
3682 | |
3683 | /* Determine if VAL is a null pointer constant before folding it. */ |
3684 | bool npc = null_pointer_constant_p (expr: val); |
3685 | |
3686 | /* If there is excess precision and a prototype, convert once to |
3687 | the required type rather than converting via the semantic |
3688 | type. Likewise without a prototype a float value represented |
3689 | as long double should be converted once to double. But for |
3690 | type-generic classification functions excess precision must |
3691 | be removed here. */ |
3692 | if (TREE_CODE (val) == EXCESS_PRECISION_EXPR |
3693 | && (type || !type_generic || !type_generic_remove_excess_precision)) |
3694 | { |
3695 | val = TREE_OPERAND (val, 0); |
3696 | excess_precision = true; |
3697 | } |
3698 | val = c_fully_fold (val, false, NULL); |
3699 | STRIP_TYPE_NOPS (val); |
3700 | |
3701 | val = require_complete_type (loc: ploc, value: val); |
3702 | |
3703 | /* Some floating-point arguments must be promoted to double when |
3704 | no type is specified by a prototype. This applies to |
3705 | arguments of type float, and to architecture-specific types |
3706 | (ARM __fp16), but not to _FloatN or _FloatNx types. */ |
3707 | bool promote_float_arg = false; |
3708 | if (type == NULL_TREE |
3709 | && TREE_CODE (valtype) == REAL_TYPE |
3710 | && (TYPE_PRECISION (valtype) |
3711 | <= TYPE_PRECISION (double_type_node)) |
3712 | && TYPE_MAIN_VARIANT (valtype) != double_type_node |
3713 | && TYPE_MAIN_VARIANT (valtype) != long_double_type_node |
3714 | && !DECIMAL_FLOAT_MODE_P (TYPE_MODE (valtype))) |
3715 | { |
3716 | /* Promote this argument, unless it has a _FloatN or |
3717 | _FloatNx type. */ |
3718 | promote_float_arg = true; |
3719 | for (int i = 0; i < NUM_FLOATN_NX_TYPES; i++) |
3720 | if (TYPE_MAIN_VARIANT (valtype) == FLOATN_NX_TYPE_NODE (i)) |
3721 | { |
3722 | promote_float_arg = false; |
3723 | break; |
3724 | } |
3725 | /* Don't promote __bf16 either. */ |
3726 | if (TYPE_MAIN_VARIANT (valtype) == bfloat16_type_node) |
3727 | promote_float_arg = false; |
3728 | } |
3729 | |
3730 | if (type != NULL_TREE) |
3731 | { |
3732 | tree origtype = (!origtypes) ? NULL_TREE : (*origtypes)[parmnum]; |
3733 | parmval = convert_argument (ploc, function, fundecl, type, origtype, |
3734 | val, valtype, npc, rname, parmnum, argnum, |
3735 | excess_precision, warnopt: 0); |
3736 | } |
3737 | else if (promote_float_arg) |
3738 | { |
3739 | if (type_generic) |
3740 | parmval = val; |
3741 | else |
3742 | { |
3743 | /* Convert `float' to `double'. */ |
3744 | if (warn_double_promotion && !c_inhibit_evaluation_warnings) |
3745 | warning_at (ploc, OPT_Wdouble_promotion, |
3746 | "implicit conversion from %qT to %qT when passing " |
3747 | "argument to function" , |
3748 | valtype, double_type_node); |
3749 | parmval = convert (double_type_node, val); |
3750 | } |
3751 | } |
3752 | else if ((excess_precision && !type_generic) |
3753 | || (type_generic_overflow_p && parmnum == 2)) |
3754 | /* A "double" argument with excess precision being passed |
3755 | without a prototype or in variable arguments. |
3756 | The last argument of __builtin_*_overflow_p should not be |
3757 | promoted. */ |
3758 | parmval = convert (valtype, val); |
3759 | else if ((invalid_func_diag = |
3760 | targetm.calls.invalid_arg_for_unprototyped_fn (typelist, fundecl, val))) |
3761 | { |
3762 | error (invalid_func_diag); |
3763 | return -1; |
3764 | } |
3765 | else if (TREE_CODE (val) == ADDR_EXPR && reject_gcc_builtin (val)) |
3766 | { |
3767 | return -1; |
3768 | } |
3769 | else |
3770 | /* Convert `short' and `char' to full-size `int'. */ |
3771 | parmval = default_conversion (exp: val); |
3772 | |
3773 | (*values)[parmnum] = parmval; |
3774 | if (parmval == error_mark_node) |
3775 | error_args = true; |
3776 | |
3777 | if (!type && builtin_type && TREE_CODE (builtin_type) != VOID_TYPE) |
3778 | { |
3779 | /* For a call to a built-in function declared without a prototype, |
3780 | perform the conversions from the argument to the expected type |
3781 | but issue warnings rather than errors for any mismatches. |
3782 | Ignore the converted argument and use the PARMVAL obtained |
3783 | above by applying default conversions instead. */ |
3784 | tree origtype = (!origtypes) ? NULL_TREE : (*origtypes)[parmnum]; |
3785 | convert_argument (ploc, function, fundecl, type: builtin_type, origtype, |
3786 | val, valtype, npc, rname, parmnum, argnum, |
3787 | excess_precision, |
3788 | warnopt: OPT_Wbuiltin_declaration_mismatch); |
3789 | } |
3790 | |
3791 | if (typetail) |
3792 | typetail = TREE_CHAIN (typetail); |
3793 | |
3794 | if (builtin_typetail) |
3795 | builtin_typetail = TREE_CHAIN (builtin_typetail); |
3796 | } |
3797 | |
3798 | gcc_assert (parmnum == vec_safe_length (values)); |
3799 | |
3800 | if (typetail != NULL_TREE && TREE_VALUE (typetail) != void_type_node) |
3801 | { |
3802 | error_at (loc, "too few arguments to function %qE" , function); |
3803 | inform_declaration (decl: fundecl); |
3804 | return -1; |
3805 | } |
3806 | |
3807 | if (builtin_typetail && TREE_VALUE (builtin_typetail) != void_type_node) |
3808 | { |
3809 | unsigned nargs = parmnum; |
3810 | for (tree t = builtin_typetail; t; t = TREE_CHAIN (t)) |
3811 | ++nargs; |
3812 | |
3813 | if (warning_at (loc, OPT_Wbuiltin_declaration_mismatch, |
3814 | "too few arguments to built-in function %qE " |
3815 | "expecting %u" , function, nargs - 1)) |
3816 | inform_declaration (decl: fundecl); |
3817 | } |
3818 | |
3819 | return error_args ? -1 : (int) parmnum; |
3820 | } |
3821 | |
3822 | /* This is the entry point used by the parser to build unary operators |
3823 | in the input. CODE, a tree_code, specifies the unary operator, and |
3824 | ARG is the operand. For unary plus, the C parser currently uses |
3825 | CONVERT_EXPR for code. |
3826 | |
3827 | LOC is the location to use for the tree generated. |
3828 | */ |
3829 | |
3830 | struct c_expr |
3831 | parser_build_unary_op (location_t loc, enum tree_code code, struct c_expr arg) |
3832 | { |
3833 | struct c_expr result; |
3834 | |
3835 | result.original_code = code; |
3836 | result.original_type = NULL; |
3837 | result.m_decimal = 0; |
3838 | |
3839 | if (reject_gcc_builtin (arg.value)) |
3840 | { |
3841 | result.value = error_mark_node; |
3842 | } |
3843 | else |
3844 | { |
3845 | result.value = build_unary_op (loc, code, arg.value, false); |
3846 | |
3847 | if (TREE_OVERFLOW_P (result.value) && !TREE_OVERFLOW_P (arg.value)) |
3848 | overflow_warning (loc, result.value, arg.value); |
3849 | } |
3850 | |
3851 | /* We are typically called when parsing a prefix token at LOC acting on |
3852 | ARG. Reflect this by updating the source range of the result to |
3853 | start at LOC and end at the end of ARG. */ |
3854 | set_c_expr_source_range (expr: &result, |
3855 | start: loc, finish: arg.get_finish ()); |
3856 | |
3857 | return result; |
3858 | } |
3859 | |
3860 | /* Returns true if TYPE is a character type, *not* including wchar_t. */ |
3861 | |
3862 | bool |
3863 | char_type_p (tree type) |
3864 | { |
3865 | return (type == char_type_node |
3866 | || type == unsigned_char_type_node |
3867 | || type == signed_char_type_node |
3868 | || type == char16_type_node |
3869 | || type == char32_type_node); |
3870 | } |
3871 | |
3872 | /* This is the entry point used by the parser to build binary operators |
3873 | in the input. CODE, a tree_code, specifies the binary operator, and |
3874 | ARG1 and ARG2 are the operands. In addition to constructing the |
3875 | expression, we check for operands that were written with other binary |
3876 | operators in a way that is likely to confuse the user. |
3877 | |
3878 | LOCATION is the location of the binary operator. */ |
3879 | |
3880 | struct c_expr |
3881 | parser_build_binary_op (location_t location, enum tree_code code, |
3882 | struct c_expr arg1, struct c_expr arg2) |
3883 | { |
3884 | struct c_expr result; |
3885 | result.m_decimal = 0; |
3886 | |
3887 | enum tree_code code1 = arg1.original_code; |
3888 | enum tree_code code2 = arg2.original_code; |
3889 | tree type1 = (arg1.original_type |
3890 | ? arg1.original_type |
3891 | : TREE_TYPE (arg1.value)); |
3892 | tree type2 = (arg2.original_type |
3893 | ? arg2.original_type |
3894 | : TREE_TYPE (arg2.value)); |
3895 | |
3896 | result.value = build_binary_op (location, code, |
3897 | arg1.value, arg2.value, true); |
3898 | result.original_code = code; |
3899 | result.original_type = NULL; |
3900 | result.m_decimal = 0; |
3901 | |
3902 | if (TREE_CODE (result.value) == ERROR_MARK) |
3903 | { |
3904 | set_c_expr_source_range (expr: &result, |
3905 | start: arg1.get_start (), |
3906 | finish: arg2.get_finish ()); |
3907 | return result; |
3908 | } |
3909 | |
3910 | if (location != UNKNOWN_LOCATION) |
3911 | protected_set_expr_location (result.value, location); |
3912 | |
3913 | set_c_expr_source_range (expr: &result, |
3914 | start: arg1.get_start (), |
3915 | finish: arg2.get_finish ()); |
3916 | |
3917 | /* Check for cases such as x+y<<z which users are likely |
3918 | to misinterpret. */ |
3919 | if (warn_parentheses) |
3920 | warn_about_parentheses (location, code, code1, arg1.value, code2, |
3921 | arg2.value); |
3922 | |
3923 | if (warn_logical_op) |
3924 | warn_logical_operator (location, code, TREE_TYPE (result.value), |
3925 | code1, arg1.value, code2, arg2.value); |
3926 | |
3927 | if (warn_tautological_compare) |
3928 | { |
3929 | tree lhs = arg1.value; |
3930 | tree rhs = arg2.value; |
3931 | if (TREE_CODE (lhs) == C_MAYBE_CONST_EXPR) |
3932 | { |
3933 | if (C_MAYBE_CONST_EXPR_PRE (lhs) != NULL_TREE |
3934 | && TREE_SIDE_EFFECTS (C_MAYBE_CONST_EXPR_PRE (lhs))) |
3935 | lhs = NULL_TREE; |
3936 | else |
3937 | lhs = C_MAYBE_CONST_EXPR_EXPR (lhs); |
3938 | } |
3939 | if (TREE_CODE (rhs) == C_MAYBE_CONST_EXPR) |
3940 | { |
3941 | if (C_MAYBE_CONST_EXPR_PRE (rhs) != NULL_TREE |
3942 | && TREE_SIDE_EFFECTS (C_MAYBE_CONST_EXPR_PRE (rhs))) |
3943 | rhs = NULL_TREE; |
3944 | else |
3945 | rhs = C_MAYBE_CONST_EXPR_EXPR (rhs); |
3946 | } |
3947 | if (lhs != NULL_TREE && rhs != NULL_TREE) |
3948 | warn_tautological_cmp (location, code, lhs, rhs); |
3949 | } |
3950 | |
3951 | if (warn_logical_not_paren |
3952 | && TREE_CODE_CLASS (code) == tcc_comparison |
3953 | && code1 == TRUTH_NOT_EXPR |
3954 | && code2 != TRUTH_NOT_EXPR |
3955 | /* Avoid warning for !!x == y. */ |
3956 | && (TREE_CODE (arg1.value) != NE_EXPR |
3957 | || !integer_zerop (TREE_OPERAND (arg1.value, 1)))) |
3958 | { |
3959 | /* Avoid warning for !b == y where b has _Bool type. */ |
3960 | tree t = integer_zero_node; |
3961 | if (TREE_CODE (arg1.value) == EQ_EXPR |
3962 | && integer_zerop (TREE_OPERAND (arg1.value, 1)) |
3963 | && TREE_TYPE (TREE_OPERAND (arg1.value, 0)) == integer_type_node) |
3964 | { |
3965 | t = TREE_OPERAND (arg1.value, 0); |
3966 | do |
3967 | { |
3968 | if (TREE_TYPE (t) != integer_type_node) |
3969 | break; |
3970 | if (TREE_CODE (t) == C_MAYBE_CONST_EXPR) |
3971 | t = C_MAYBE_CONST_EXPR_EXPR (t); |
3972 | else if (CONVERT_EXPR_P (t)) |
3973 | t = TREE_OPERAND (t, 0); |
3974 | else |
3975 | break; |
3976 | } |
3977 | while (1); |
3978 | } |
3979 | if (!C_BOOLEAN_TYPE_P (TREE_TYPE (t))) |
3980 | warn_logical_not_parentheses (location, code, arg1.value, arg2.value); |
3981 | } |
3982 | |
3983 | /* Warn about comparisons against string literals, with the exception |
3984 | of testing for equality or inequality of a string literal with NULL. */ |
3985 | if (code == EQ_EXPR || code == NE_EXPR) |
3986 | { |
3987 | if ((code1 == STRING_CST |
3988 | && !integer_zerop (tree_strip_nop_conversions (arg2.value))) |
3989 | || (code2 == STRING_CST |
3990 | && !integer_zerop (tree_strip_nop_conversions (arg1.value)))) |
3991 | warning_at (location, OPT_Waddress, |
3992 | "comparison with string literal results in unspecified behavior" ); |
3993 | /* Warn for ptr == '\0', it's likely that it should've been ptr[0]. */ |
3994 | if (POINTER_TYPE_P (type1) |
3995 | && null_pointer_constant_p (expr: arg2.value) |
3996 | && char_type_p (type: type2)) |
3997 | { |
3998 | auto_diagnostic_group d; |
3999 | if (warning_at (location, OPT_Wpointer_compare, |
4000 | "comparison between pointer and zero character " |
4001 | "constant" )) |
4002 | inform (arg1.get_start (), |
4003 | "did you mean to dereference the pointer?" ); |
4004 | } |
4005 | else if (POINTER_TYPE_P (type2) |
4006 | && null_pointer_constant_p (expr: arg1.value) |
4007 | && char_type_p (type: type1)) |
4008 | { |
4009 | auto_diagnostic_group d; |
4010 | if (warning_at (location, OPT_Wpointer_compare, |
4011 | "comparison between pointer and zero character " |
4012 | "constant" )) |
4013 | inform (arg2.get_start (), |
4014 | "did you mean to dereference the pointer?" ); |
4015 | } |
4016 | } |
4017 | else if (TREE_CODE_CLASS (code) == tcc_comparison |
4018 | && (code1 == STRING_CST || code2 == STRING_CST)) |
4019 | warning_at (location, OPT_Waddress, |
4020 | "comparison with string literal results in unspecified " |
4021 | "behavior" ); |
4022 | |
4023 | if (warn_array_compare |
4024 | && TREE_CODE_CLASS (code) == tcc_comparison |
4025 | && TREE_CODE (type1) == ARRAY_TYPE |
4026 | && TREE_CODE (type2) == ARRAY_TYPE) |
4027 | do_warn_array_compare (location, code, arg1.value, arg2.value); |
4028 | |
4029 | if (TREE_OVERFLOW_P (result.value) |
4030 | && !TREE_OVERFLOW_P (arg1.value) |
4031 | && !TREE_OVERFLOW_P (arg2.value)) |
4032 | overflow_warning (location, result.value); |
4033 | |
4034 | /* Warn about comparisons of different enum types. */ |
4035 | if (warn_enum_compare |
4036 | && TREE_CODE_CLASS (code) == tcc_comparison |
4037 | && TREE_CODE (type1) == ENUMERAL_TYPE |
4038 | && TREE_CODE (type2) == ENUMERAL_TYPE |
4039 | && TYPE_MAIN_VARIANT (type1) != TYPE_MAIN_VARIANT (type2)) |
4040 | warning_at (location, OPT_Wenum_compare, |
4041 | "comparison between %qT and %qT" , |
4042 | type1, type2); |
4043 | |
4044 | if (warn_xor_used_as_pow |
4045 | && code == BIT_XOR_EXPR |
4046 | && arg1.m_decimal |
4047 | && arg2.m_decimal) |
4048 | check_for_xor_used_as_pow (lhs_loc: arg1.get_location (), lhs_val: arg1.value, |
4049 | operator_loc: location, |
4050 | rhs_loc: arg2.get_location (), rhs_val: arg2.value); |
4051 | |
4052 | return result; |
4053 | } |
4054 | |
4055 | /* Return a tree for the difference of pointers OP0 and OP1. |
4056 | The resulting tree has type ptrdiff_t. If POINTER_SUBTRACT sanitization is |
4057 | enabled, assign to INSTRUMENT_EXPR call to libsanitizer. */ |
4058 | |
4059 | static tree |
4060 | pointer_diff (location_t loc, tree op0, tree op1, tree *instrument_expr) |
4061 | { |
4062 | tree restype = ptrdiff_type_node; |
4063 | tree result, inttype; |
4064 | |
4065 | addr_space_t as0 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (op0))); |
4066 | addr_space_t as1 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (op1))); |
4067 | tree target_type = TREE_TYPE (TREE_TYPE (op0)); |
4068 | tree orig_op0 = op0; |
4069 | tree orig_op1 = op1; |
4070 | |
4071 | /* If the operands point into different address spaces, we need to |
4072 | explicitly convert them to pointers into the common address space |
4073 | before we can subtract the numerical address values. */ |
4074 | if (as0 != as1) |
4075 | { |
4076 | addr_space_t as_common; |
4077 | tree common_type; |
4078 | |
4079 | /* Determine the common superset address space. This is guaranteed |
4080 | to exist because the caller verified that comp_target_types |
4081 | returned non-zero. */ |
4082 | if (!addr_space_superset (as1: as0, as2: as1, common: &as_common)) |
4083 | gcc_unreachable (); |
4084 | |
4085 | common_type = common_pointer_type (TREE_TYPE (op0), TREE_TYPE (op1)); |
4086 | op0 = convert (common_type, op0); |
4087 | op1 = convert (common_type, op1); |
4088 | } |
4089 | |
4090 | /* Determine integer type result of the subtraction. This will usually |
4091 | be the same as the result type (ptrdiff_t), but may need to be a wider |
4092 | type if pointers for the address space are wider than ptrdiff_t. */ |
4093 | if (TYPE_PRECISION (restype) < TYPE_PRECISION (TREE_TYPE (op0))) |
4094 | inttype = c_common_type_for_size (TYPE_PRECISION (TREE_TYPE (op0)), 0); |
4095 | else |
4096 | inttype = restype; |
4097 | |
4098 | if (VOID_TYPE_P (target_type)) |
4099 | pedwarn (loc, OPT_Wpointer_arith, |
4100 | "pointer of type %<void *%> used in subtraction" ); |
4101 | if (TREE_CODE (target_type) == FUNCTION_TYPE) |
4102 | pedwarn (loc, OPT_Wpointer_arith, |
4103 | "pointer to a function used in subtraction" ); |
4104 | |
4105 | if (current_function_decl != NULL_TREE |
4106 | && sanitize_flags_p (flag: SANITIZE_POINTER_SUBTRACT)) |
4107 | { |
4108 | op0 = save_expr (op0); |
4109 | op1 = save_expr (op1); |
4110 | |
4111 | tree tt = builtin_decl_explicit (fncode: BUILT_IN_ASAN_POINTER_SUBTRACT); |
4112 | *instrument_expr = build_call_expr_loc (loc, tt, 2, op0, op1); |
4113 | } |
4114 | |
4115 | /* First do the subtraction, then build the divide operator |
4116 | and only convert at the very end. |
4117 | Do not do default conversions in case restype is a short type. */ |
4118 | |
4119 | /* POINTER_DIFF_EXPR requires a signed integer type of the same size as |
4120 | pointers. If some platform cannot provide that, or has a larger |
4121 | ptrdiff_type to support differences larger than half the address |
4122 | space, cast the pointers to some larger integer type and do the |
4123 | computations in that type. */ |
4124 | if (TYPE_PRECISION (inttype) > TYPE_PRECISION (TREE_TYPE (op0))) |
4125 | op0 = build_binary_op (loc, MINUS_EXPR, convert (inttype, op0), |
4126 | convert (inttype, op1), false); |
4127 | else |
4128 | { |
4129 | /* Cast away qualifiers. */ |
4130 | op0 = convert (c_common_type (TREE_TYPE (op0), TREE_TYPE (op0)), op0); |
4131 | op1 = convert (c_common_type (TREE_TYPE (op1), TREE_TYPE (op1)), op1); |
4132 | op0 = build2_loc (loc, code: POINTER_DIFF_EXPR, type: inttype, arg0: op0, arg1: op1); |
4133 | } |
4134 | |
4135 | /* This generates an error if op1 is pointer to incomplete type. */ |
4136 | if (!COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (TREE_TYPE (orig_op1)))) |
4137 | error_at (loc, "arithmetic on pointer to an incomplete type" ); |
4138 | else if (verify_type_context (loc, TCTX_POINTER_ARITH, |
4139 | TREE_TYPE (TREE_TYPE (orig_op0)))) |
4140 | verify_type_context (loc, TCTX_POINTER_ARITH, |
4141 | TREE_TYPE (TREE_TYPE (orig_op1))); |
4142 | |
4143 | op1 = c_size_in_bytes (type: target_type); |
4144 | |
4145 | if (pointer_to_zero_sized_aggr_p (TREE_TYPE (orig_op1))) |
4146 | error_at (loc, "arithmetic on pointer to an empty aggregate" ); |
4147 | |
4148 | /* Divide by the size, in easiest possible way. */ |
4149 | result = fold_build2_loc (loc, EXACT_DIV_EXPR, inttype, |
4150 | op0, convert (inttype, op1)); |
4151 | |
4152 | /* Convert to final result type if necessary. */ |
4153 | return convert (restype, result); |
4154 | } |
4155 | |
4156 | /* Expand atomic compound assignments into an appropriate sequence as |
4157 | specified by the C11 standard section 6.5.16.2. |
4158 | |
4159 | _Atomic T1 E1 |
4160 | T2 E2 |
4161 | E1 op= E2 |
4162 | |
4163 | This sequence is used for all types for which these operations are |
4164 | supported. |
4165 | |
4166 | In addition, built-in versions of the 'fe' prefixed routines may |
4167 | need to be invoked for floating point (real, complex or vector) when |
4168 | floating-point exceptions are supported. See 6.5.16.2 footnote 113. |
4169 | |
4170 | T1 newval; |
4171 | T1 old; |
4172 | T1 *addr |
4173 | T2 val |
4174 | fenv_t fenv |
4175 | |
4176 | addr = &E1; |
4177 | val = (E2); |
4178 | __atomic_load (addr, &old, SEQ_CST); |
4179 | feholdexcept (&fenv); |
4180 | loop: |
4181 | newval = old op val; |
4182 | if (__atomic_compare_exchange_strong (addr, &old, &newval, SEQ_CST, |
4183 | SEQ_CST)) |
4184 | goto done; |
4185 | feclearexcept (FE_ALL_EXCEPT); |
4186 | goto loop: |
4187 | done: |
4188 | feupdateenv (&fenv); |
4189 | |
4190 | The compiler will issue the __atomic_fetch_* built-in when possible, |
4191 | otherwise it will generate the generic form of the atomic operations. |
4192 | This requires temp(s) and has their address taken. The atomic processing |
4193 | is smart enough to figure out when the size of an object can utilize |
4194 | a lock-free version, and convert the built-in call to the appropriate |
4195 | lock-free routine. The optimizers will then dispose of any temps that |
4196 | are no longer required, and lock-free implementations are utilized as |
4197 | long as there is target support for the required size. |
4198 | |
4199 | If the operator is NOP_EXPR, then this is a simple assignment, and |
4200 | an __atomic_store is issued to perform the assignment rather than |
4201 | the above loop. */ |
4202 | |
4203 | /* Build an atomic assignment at LOC, expanding into the proper |
4204 | sequence to store LHS MODIFYCODE= RHS. Return a value representing |
4205 | the result of the operation, unless RETURN_OLD_P, in which case |
4206 | return the old value of LHS (this is only for postincrement and |
4207 | postdecrement). */ |
4208 | |
4209 | static tree |
4210 | build_atomic_assign (location_t loc, tree lhs, enum tree_code modifycode, |
4211 | tree rhs, bool return_old_p) |
4212 | { |
4213 | tree fndecl, func_call; |
4214 | vec<tree, va_gc> *params; |
4215 | tree val, nonatomic_lhs_type, nonatomic_rhs_type, newval, newval_addr; |
4216 | tree old, old_addr; |
4217 | tree compound_stmt = NULL_TREE; |
4218 | tree stmt, goto_stmt; |
4219 | tree loop_label, loop_decl, done_label, done_decl; |
4220 | |
4221 | tree lhs_type = TREE_TYPE (lhs); |
4222 | tree lhs_addr = build_unary_op (loc, ADDR_EXPR, lhs, false); |
4223 | tree seq_cst = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST); |
4224 | tree rhs_semantic_type = TREE_TYPE (rhs); |
4225 | tree nonatomic_rhs_semantic_type; |
4226 | tree rhs_type; |
4227 | |
4228 | gcc_assert (TYPE_ATOMIC (lhs_type)); |
4229 | |
4230 | if (return_old_p) |
4231 | gcc_assert (modifycode == PLUS_EXPR || modifycode == MINUS_EXPR); |
4232 | |
4233 | /* Allocate enough vector items for a compare_exchange. */ |
4234 | vec_alloc (v&: params, nelems: 6); |
4235 | |
4236 | /* Create a compound statement to hold the sequence of statements |
4237 | with a loop. */ |
4238 | if (modifycode != NOP_EXPR) |
4239 | { |
4240 | compound_stmt = c_begin_compound_stmt (false); |
4241 | |
4242 | /* For consistency with build_modify_expr on non-_Atomic, |
4243 | mark the lhs as read. Also, it would be very hard to match |
4244 | such expressions in mark_exp_read. */ |
4245 | mark_exp_read (exp: lhs); |
4246 | } |
4247 | |
4248 | /* Remove any excess precision (which is only present here in the |
4249 | case of compound assignments). */ |
4250 | if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR) |
4251 | { |
4252 | gcc_assert (modifycode != NOP_EXPR); |
4253 | rhs = TREE_OPERAND (rhs, 0); |
4254 | } |
4255 | rhs_type = TREE_TYPE (rhs); |
4256 | |
4257 | /* Fold the RHS if it hasn't already been folded. */ |
4258 | if (modifycode != NOP_EXPR) |
4259 | rhs = c_fully_fold (rhs, false, NULL); |
4260 | |
4261 | /* Remove the qualifiers for the rest of the expressions and create |
4262 | the VAL temp variable to hold the RHS. */ |
4263 | nonatomic_lhs_type = build_qualified_type (lhs_type, TYPE_UNQUALIFIED); |
4264 | nonatomic_rhs_type = build_qualified_type (rhs_type, TYPE_UNQUALIFIED); |
4265 | nonatomic_rhs_semantic_type = build_qualified_type (rhs_semantic_type, |
4266 | TYPE_UNQUALIFIED); |
4267 | val = create_tmp_var_raw (nonatomic_rhs_type); |
4268 | TREE_ADDRESSABLE (val) = 1; |
4269 | suppress_warning (val); |
4270 | rhs = build4 (TARGET_EXPR, nonatomic_rhs_type, val, rhs, NULL_TREE, |
4271 | NULL_TREE); |
4272 | TREE_SIDE_EFFECTS (rhs) = 1; |
4273 | SET_EXPR_LOCATION (rhs, loc); |
4274 | if (modifycode != NOP_EXPR) |
4275 | add_stmt (rhs); |
4276 | |
4277 | /* NOP_EXPR indicates it's a straight store of the RHS. Simply issue |
4278 | an atomic_store. */ |
4279 | if (modifycode == NOP_EXPR) |
4280 | { |
4281 | compound_stmt = rhs; |
4282 | /* Build __atomic_store (&lhs, &val, SEQ_CST) */ |
4283 | rhs = build_unary_op (loc, ADDR_EXPR, val, false); |
4284 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_STORE); |
4285 | params->quick_push (obj: lhs_addr); |
4286 | params->quick_push (obj: rhs); |
4287 | params->quick_push (obj: seq_cst); |
4288 | func_call = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
4289 | |
4290 | compound_stmt = build2 (COMPOUND_EXPR, void_type_node, |
4291 | compound_stmt, func_call); |
4292 | |
4293 | /* VAL is the value which was stored, return a COMPOUND_STMT of |
4294 | the statement and that value. */ |
4295 | return build2 (COMPOUND_EXPR, nonatomic_lhs_type, compound_stmt, val); |
4296 | } |
4297 | |
4298 | /* Attempt to implement the atomic operation as an __atomic_fetch_* or |
4299 | __atomic_*_fetch built-in rather than a CAS loop. atomic_bool type |
4300 | isn't applicable for such builtins. ??? Do we want to handle enums? */ |
4301 | if ((TREE_CODE (lhs_type) == INTEGER_TYPE || POINTER_TYPE_P (lhs_type)) |
4302 | && TREE_CODE (rhs_type) == INTEGER_TYPE) |
4303 | { |
4304 | built_in_function fncode; |
4305 | switch (modifycode) |
4306 | { |
4307 | case PLUS_EXPR: |
4308 | case POINTER_PLUS_EXPR: |
4309 | fncode = (return_old_p |
4310 | ? BUILT_IN_ATOMIC_FETCH_ADD_N |
4311 | : BUILT_IN_ATOMIC_ADD_FETCH_N); |
4312 | break; |
4313 | case MINUS_EXPR: |
4314 | fncode = (return_old_p |
4315 | ? BUILT_IN_ATOMIC_FETCH_SUB_N |
4316 | : BUILT_IN_ATOMIC_SUB_FETCH_N); |
4317 | break; |
4318 | case BIT_AND_EXPR: |
4319 | fncode = (return_old_p |
4320 | ? BUILT_IN_ATOMIC_FETCH_AND_N |
4321 | : BUILT_IN_ATOMIC_AND_FETCH_N); |
4322 | break; |
4323 | case BIT_IOR_EXPR: |
4324 | fncode = (return_old_p |
4325 | ? BUILT_IN_ATOMIC_FETCH_OR_N |
4326 | : BUILT_IN_ATOMIC_OR_FETCH_N); |
4327 | break; |
4328 | case BIT_XOR_EXPR: |
4329 | fncode = (return_old_p |
4330 | ? BUILT_IN_ATOMIC_FETCH_XOR_N |
4331 | : BUILT_IN_ATOMIC_XOR_FETCH_N); |
4332 | break; |
4333 | default: |
4334 | goto cas_loop; |
4335 | } |
4336 | |
4337 | /* We can only use "_1" through "_16" variants of the atomic fetch |
4338 | built-ins. */ |
4339 | unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (lhs_type)); |
4340 | if (size != 1 && size != 2 && size != 4 && size != 8 && size != 16) |
4341 | goto cas_loop; |
4342 | |
4343 | /* If this is a pointer type, we need to multiply by the size of |
4344 | the pointer target type. */ |
4345 | if (POINTER_TYPE_P (lhs_type)) |
4346 | { |
4347 | if (!COMPLETE_TYPE_P (TREE_TYPE (lhs_type)) |
4348 | /* ??? This would introduce -Wdiscarded-qualifiers |
4349 | warning: __atomic_fetch_* expect volatile void * |
4350 | type as the first argument. (Assignments between |
4351 | atomic and non-atomic objects are OK.) */ |
4352 | || TYPE_RESTRICT (lhs_type)) |
4353 | goto cas_loop; |
4354 | tree sz = TYPE_SIZE_UNIT (TREE_TYPE (lhs_type)); |
4355 | rhs = fold_build2_loc (loc, MULT_EXPR, ptrdiff_type_node, |
4356 | convert (ptrdiff_type_node, rhs), |
4357 | convert (ptrdiff_type_node, sz)); |
4358 | } |
4359 | |
4360 | /* Build __atomic_fetch_* (&lhs, &val, SEQ_CST), or |
4361 | __atomic_*_fetch (&lhs, &val, SEQ_CST). */ |
4362 | fndecl = builtin_decl_explicit (fncode); |
4363 | params->quick_push (obj: lhs_addr); |
4364 | params->quick_push (obj: rhs); |
4365 | params->quick_push (obj: seq_cst); |
4366 | func_call = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
4367 | |
4368 | newval = create_tmp_var_raw (nonatomic_lhs_type); |
4369 | TREE_ADDRESSABLE (newval) = 1; |
4370 | suppress_warning (newval); |
4371 | rhs = build4 (TARGET_EXPR, nonatomic_lhs_type, newval, func_call, |
4372 | NULL_TREE, NULL_TREE); |
4373 | SET_EXPR_LOCATION (rhs, loc); |
4374 | add_stmt (rhs); |
4375 | |
4376 | /* Finish the compound statement. */ |
4377 | compound_stmt = c_end_compound_stmt (loc, compound_stmt, false); |
4378 | |
4379 | /* NEWVAL is the value which was stored, return a COMPOUND_STMT of |
4380 | the statement and that value. */ |
4381 | return build2 (COMPOUND_EXPR, nonatomic_lhs_type, compound_stmt, newval); |
4382 | } |
4383 | |
4384 | cas_loop: |
4385 | /* Create the variables and labels required for the op= form. */ |
4386 | old = create_tmp_var_raw (nonatomic_lhs_type); |
4387 | old_addr = build_unary_op (loc, ADDR_EXPR, old, false); |
4388 | TREE_ADDRESSABLE (old) = 1; |
4389 | suppress_warning (old); |
4390 | |
4391 | newval = create_tmp_var_raw (nonatomic_lhs_type); |
4392 | newval_addr = build_unary_op (loc, ADDR_EXPR, newval, false); |
4393 | TREE_ADDRESSABLE (newval) = 1; |
4394 | suppress_warning (newval); |
4395 | |
4396 | loop_decl = create_artificial_label (loc); |
4397 | loop_label = build1 (LABEL_EXPR, void_type_node, loop_decl); |
4398 | |
4399 | done_decl = create_artificial_label (loc); |
4400 | done_label = build1 (LABEL_EXPR, void_type_node, done_decl); |
4401 | |
4402 | /* __atomic_load (addr, &old, SEQ_CST). */ |
4403 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_LOAD); |
4404 | params->quick_push (obj: lhs_addr); |
4405 | params->quick_push (obj: old_addr); |
4406 | params->quick_push (obj: seq_cst); |
4407 | func_call = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
4408 | old = build4 (TARGET_EXPR, nonatomic_lhs_type, old, func_call, NULL_TREE, |
4409 | NULL_TREE); |
4410 | add_stmt (old); |
4411 | params->truncate (size: 0); |
4412 | |
4413 | /* Create the expressions for floating-point environment |
4414 | manipulation, if required. */ |
4415 | bool need_fenv = (flag_trapping_math |
4416 | && (FLOAT_TYPE_P (lhs_type) || FLOAT_TYPE_P (rhs_type))); |
4417 | tree hold_call = NULL_TREE, clear_call = NULL_TREE, update_call = NULL_TREE; |
4418 | if (need_fenv) |
4419 | targetm.atomic_assign_expand_fenv (&hold_call, &clear_call, &update_call); |
4420 | |
4421 | if (hold_call) |
4422 | add_stmt (hold_call); |
4423 | |
4424 | /* loop: */ |
4425 | add_stmt (loop_label); |
4426 | |
4427 | /* newval = old + val; */ |
4428 | if (rhs_type != rhs_semantic_type) |
4429 | val = build1 (EXCESS_PRECISION_EXPR, nonatomic_rhs_semantic_type, val); |
4430 | rhs = build_binary_op (loc, modifycode, old, val, true); |
4431 | if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR) |
4432 | { |
4433 | tree eptype = TREE_TYPE (rhs); |
4434 | rhs = c_fully_fold (TREE_OPERAND (rhs, 0), false, NULL); |
4435 | rhs = build1 (EXCESS_PRECISION_EXPR, eptype, rhs); |
4436 | } |
4437 | else |
4438 | rhs = c_fully_fold (rhs, false, NULL); |
4439 | rhs = convert_for_assignment (loc, UNKNOWN_LOCATION, nonatomic_lhs_type, |
4440 | rhs, NULL_TREE, ic_assign, false, NULL_TREE, |
4441 | NULL_TREE, 0); |
4442 | if (rhs != error_mark_node) |
4443 | { |
4444 | rhs = build4 (TARGET_EXPR, nonatomic_lhs_type, newval, rhs, NULL_TREE, |
4445 | NULL_TREE); |
4446 | SET_EXPR_LOCATION (rhs, loc); |
4447 | add_stmt (rhs); |
4448 | } |
4449 | |
4450 | /* if (__atomic_compare_exchange (addr, &old, &new, false, SEQ_CST, SEQ_CST)) |
4451 | goto done; */ |
4452 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_COMPARE_EXCHANGE); |
4453 | params->quick_push (obj: lhs_addr); |
4454 | params->quick_push (obj: old_addr); |
4455 | params->quick_push (obj: newval_addr); |
4456 | params->quick_push (integer_zero_node); |
4457 | params->quick_push (obj: seq_cst); |
4458 | params->quick_push (obj: seq_cst); |
4459 | func_call = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
4460 | |
4461 | goto_stmt = build1 (GOTO_EXPR, void_type_node, done_decl); |
4462 | SET_EXPR_LOCATION (goto_stmt, loc); |
4463 | |
4464 | stmt = build3 (COND_EXPR, void_type_node, func_call, goto_stmt, NULL_TREE); |
4465 | SET_EXPR_LOCATION (stmt, loc); |
4466 | add_stmt (stmt); |
4467 | |
4468 | if (clear_call) |
4469 | add_stmt (clear_call); |
4470 | |
4471 | /* goto loop; */ |
4472 | goto_stmt = build1 (GOTO_EXPR, void_type_node, loop_decl); |
4473 | SET_EXPR_LOCATION (goto_stmt, loc); |
4474 | add_stmt (goto_stmt); |
4475 | |
4476 | /* done: */ |
4477 | add_stmt (done_label); |
4478 | |
4479 | if (update_call) |
4480 | add_stmt (update_call); |
4481 | |
4482 | /* Finish the compound statement. */ |
4483 | compound_stmt = c_end_compound_stmt (loc, compound_stmt, false); |
4484 | |
4485 | /* NEWVAL is the value that was successfully stored, return a |
4486 | COMPOUND_EXPR of the statement and the appropriate value. */ |
4487 | return build2 (COMPOUND_EXPR, nonatomic_lhs_type, compound_stmt, |
4488 | return_old_p ? old : newval); |
4489 | } |
4490 | |
4491 | /* Construct and perhaps optimize a tree representation |
4492 | for a unary operation. CODE, a tree_code, specifies the operation |
4493 | and XARG is the operand. |
4494 | For any CODE other than ADDR_EXPR, NOCONVERT suppresses the default |
4495 | promotions (such as from short to int). |
4496 | For ADDR_EXPR, the default promotions are not applied; NOCONVERT allows |
4497 | non-lvalues; this is only used to handle conversion of non-lvalue arrays |
4498 | to pointers in C99. |
4499 | |
4500 | LOCATION is the location of the operator. */ |
4501 | |
4502 | tree |
4503 | build_unary_op (location_t location, enum tree_code code, tree xarg, |
4504 | bool noconvert) |
4505 | { |
4506 | /* No default_conversion here. It causes trouble for ADDR_EXPR. */ |
4507 | tree arg = xarg; |
4508 | tree argtype = NULL_TREE; |
4509 | enum tree_code typecode; |
4510 | tree val; |
4511 | tree ret = error_mark_node; |
4512 | tree eptype = NULL_TREE; |
4513 | const char *invalid_op_diag; |
4514 | bool int_operands; |
4515 | |
4516 | int_operands = EXPR_INT_CONST_OPERANDS (xarg); |
4517 | if (int_operands) |
4518 | arg = remove_c_maybe_const_expr (expr: arg); |
4519 | |
4520 | if (code != ADDR_EXPR) |
4521 | arg = require_complete_type (loc: location, value: arg); |
4522 | |
4523 | typecode = TREE_CODE (TREE_TYPE (arg)); |
4524 | if (typecode == ERROR_MARK) |
4525 | return error_mark_node; |
4526 | if (typecode == ENUMERAL_TYPE || typecode == BOOLEAN_TYPE) |
4527 | typecode = INTEGER_TYPE; |
4528 | |
4529 | if ((invalid_op_diag |
4530 | = targetm.invalid_unary_op (code, TREE_TYPE (xarg)))) |
4531 | { |
4532 | error_at (location, invalid_op_diag); |
4533 | return error_mark_node; |
4534 | } |
4535 | |
4536 | if (TREE_CODE (arg) == EXCESS_PRECISION_EXPR) |
4537 | { |
4538 | eptype = TREE_TYPE (arg); |
4539 | arg = TREE_OPERAND (arg, 0); |
4540 | } |
4541 | |
4542 | switch (code) |
4543 | { |
4544 | case CONVERT_EXPR: |
4545 | /* This is used for unary plus, because a CONVERT_EXPR |
4546 | is enough to prevent anybody from looking inside for |
4547 | associativity, but won't generate any code. */ |
4548 | if (!(typecode == INTEGER_TYPE || typecode == REAL_TYPE |
4549 | || typecode == FIXED_POINT_TYPE || typecode == COMPLEX_TYPE |
4550 | || typecode == BITINT_TYPE |
4551 | || gnu_vector_type_p (TREE_TYPE (arg)))) |
4552 | { |
4553 | error_at (location, "wrong type argument to unary plus" ); |
4554 | return error_mark_node; |
4555 | } |
4556 | else if (!noconvert) |
4557 | arg = default_conversion (exp: arg); |
4558 | arg = non_lvalue_loc (location, arg); |
4559 | break; |
4560 | |
4561 | case NEGATE_EXPR: |
4562 | if (!(typecode == INTEGER_TYPE || typecode == REAL_TYPE |
4563 | || typecode == FIXED_POINT_TYPE || typecode == COMPLEX_TYPE |
4564 | || typecode == BITINT_TYPE |
4565 | || gnu_vector_type_p (TREE_TYPE (arg)))) |
4566 | { |
4567 | error_at (location, "wrong type argument to unary minus" ); |
4568 | return error_mark_node; |
4569 | } |
4570 | else if (!noconvert) |
4571 | arg = default_conversion (exp: arg); |
4572 | break; |
4573 | |
4574 | case BIT_NOT_EXPR: |
4575 | /* ~ works on integer types and non float vectors. */ |
4576 | if (typecode == INTEGER_TYPE |
4577 | || typecode == BITINT_TYPE |
4578 | || (gnu_vector_type_p (TREE_TYPE (arg)) |
4579 | && !VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg)))) |
4580 | { |
4581 | tree e = arg; |
4582 | |
4583 | /* Warn if the expression has boolean value. */ |
4584 | while (TREE_CODE (e) == COMPOUND_EXPR) |
4585 | e = TREE_OPERAND (e, 1); |
4586 | |
4587 | if ((C_BOOLEAN_TYPE_P (TREE_TYPE (arg)) |
4588 | || truth_value_p (TREE_CODE (e)))) |
4589 | { |
4590 | auto_diagnostic_group d; |
4591 | if (warning_at (location, OPT_Wbool_operation, |
4592 | "%<~%> on a boolean expression" )) |
4593 | { |
4594 | gcc_rich_location richloc (location); |
4595 | richloc.add_fixit_insert_before (where: location, new_content: "!" ); |
4596 | inform (&richloc, "did you mean to use logical not?" ); |
4597 | } |
4598 | } |
4599 | if (!noconvert) |
4600 | arg = default_conversion (exp: arg); |
4601 | } |
4602 | else if (typecode == COMPLEX_TYPE) |
4603 | { |
4604 | code = CONJ_EXPR; |
4605 | pedwarn (location, OPT_Wpedantic, |
4606 | "ISO C does not support %<~%> for complex conjugation" ); |
4607 | if (!noconvert) |
4608 | arg = default_conversion (exp: arg); |
4609 | } |
4610 | else |
4611 | { |
4612 | error_at (location, "wrong type argument to bit-complement" ); |
4613 | return error_mark_node; |
4614 | } |
4615 | break; |
4616 | |
4617 | case ABS_EXPR: |
4618 | if (!(typecode == INTEGER_TYPE || typecode == REAL_TYPE)) |
4619 | { |
4620 | error_at (location, "wrong type argument to abs" ); |
4621 | return error_mark_node; |
4622 | } |
4623 | else if (!noconvert) |
4624 | arg = default_conversion (exp: arg); |
4625 | break; |
4626 | |
4627 | case ABSU_EXPR: |
4628 | if (!(typecode == INTEGER_TYPE)) |
4629 | { |
4630 | error_at (location, "wrong type argument to absu" ); |
4631 | return error_mark_node; |
4632 | } |
4633 | else if (!noconvert) |
4634 | arg = default_conversion (exp: arg); |
4635 | break; |
4636 | |
4637 | case CONJ_EXPR: |
4638 | /* Conjugating a real value is a no-op, but allow it anyway. */ |
4639 | if (!(typecode == INTEGER_TYPE || typecode == REAL_TYPE |
4640 | || typecode == COMPLEX_TYPE)) |
4641 | { |
4642 | error_at (location, "wrong type argument to conjugation" ); |
4643 | return error_mark_node; |
4644 | } |
4645 | else if (!noconvert) |
4646 | arg = default_conversion (exp: arg); |
4647 | break; |
4648 | |
4649 | case TRUTH_NOT_EXPR: |
4650 | if (typecode != INTEGER_TYPE && typecode != FIXED_POINT_TYPE |
4651 | && typecode != REAL_TYPE && typecode != POINTER_TYPE |
4652 | && typecode != COMPLEX_TYPE && typecode != NULLPTR_TYPE |
4653 | && typecode != BITINT_TYPE) |
4654 | { |
4655 | error_at (location, |
4656 | "wrong type argument to unary exclamation mark" ); |
4657 | return error_mark_node; |
4658 | } |
4659 | if (int_operands) |
4660 | { |
4661 | arg = c_objc_common_truthvalue_conversion (location, xarg); |
4662 | arg = remove_c_maybe_const_expr (expr: arg); |
4663 | } |
4664 | else |
4665 | arg = c_objc_common_truthvalue_conversion (location, arg); |
4666 | ret = invert_truthvalue_loc (location, arg); |
4667 | /* If the TRUTH_NOT_EXPR has been folded, reset the location. */ |
4668 | if (EXPR_P (ret) && EXPR_HAS_LOCATION (ret)) |
4669 | location = EXPR_LOCATION (ret); |
4670 | goto return_build_unary_op; |
4671 | |
4672 | case REALPART_EXPR: |
4673 | case IMAGPART_EXPR: |
4674 | ret = build_real_imag_expr (location, code, arg); |
4675 | if (ret == error_mark_node) |
4676 | return error_mark_node; |
4677 | if (eptype && TREE_CODE (eptype) == COMPLEX_TYPE) |
4678 | eptype = TREE_TYPE (eptype); |
4679 | goto return_build_unary_op; |
4680 | |
4681 | case PREINCREMENT_EXPR: |
4682 | case POSTINCREMENT_EXPR: |
4683 | case PREDECREMENT_EXPR: |
4684 | case POSTDECREMENT_EXPR: |
4685 | |
4686 | if (TREE_CODE (arg) == C_MAYBE_CONST_EXPR) |
4687 | { |
4688 | tree inner = build_unary_op (location, code, |
4689 | C_MAYBE_CONST_EXPR_EXPR (arg), |
4690 | noconvert); |
4691 | if (inner == error_mark_node) |
4692 | return error_mark_node; |
4693 | ret = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (inner), |
4694 | C_MAYBE_CONST_EXPR_PRE (arg), inner); |
4695 | gcc_assert (!C_MAYBE_CONST_EXPR_INT_OPERANDS (arg)); |
4696 | C_MAYBE_CONST_EXPR_NON_CONST (ret) = 1; |
4697 | goto return_build_unary_op; |
4698 | } |
4699 | |
4700 | /* Complain about anything that is not a true lvalue. In |
4701 | Objective-C, skip this check for property_refs. */ |
4702 | if (!objc_is_property_ref (arg) |
4703 | && !lvalue_or_else (location, |
4704 | arg, ((code == PREINCREMENT_EXPR |
4705 | || code == POSTINCREMENT_EXPR) |
4706 | ? lv_increment |
4707 | : lv_decrement))) |
4708 | return error_mark_node; |
4709 | |
4710 | if (warn_cxx_compat && TREE_CODE (TREE_TYPE (arg)) == ENUMERAL_TYPE) |
4711 | { |
4712 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
4713 | warning_at (location, OPT_Wc___compat, |
4714 | "increment of enumeration value is invalid in C++" ); |
4715 | else |
4716 | warning_at (location, OPT_Wc___compat, |
4717 | "decrement of enumeration value is invalid in C++" ); |
4718 | } |
4719 | |
4720 | if (C_BOOLEAN_TYPE_P (TREE_TYPE (arg))) |
4721 | { |
4722 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
4723 | warning_at (location, OPT_Wbool_operation, |
4724 | "increment of a boolean expression" ); |
4725 | else |
4726 | warning_at (location, OPT_Wbool_operation, |
4727 | "decrement of a boolean expression" ); |
4728 | } |
4729 | |
4730 | /* Ensure the argument is fully folded inside any SAVE_EXPR. */ |
4731 | arg = c_fully_fold (arg, false, NULL, true); |
4732 | |
4733 | bool atomic_op; |
4734 | atomic_op = really_atomic_lvalue (expr: arg); |
4735 | |
4736 | /* Increment or decrement the real part of the value, |
4737 | and don't change the imaginary part. */ |
4738 | if (typecode == COMPLEX_TYPE) |
4739 | { |
4740 | tree real, imag; |
4741 | |
4742 | pedwarn (location, OPT_Wpedantic, |
4743 | "ISO C does not support %<++%> and %<--%> on complex types" ); |
4744 | |
4745 | if (!atomic_op) |
4746 | { |
4747 | arg = stabilize_reference (arg); |
4748 | real = build_unary_op (EXPR_LOCATION (arg), code: REALPART_EXPR, xarg: arg, |
4749 | noconvert: true); |
4750 | imag = build_unary_op (EXPR_LOCATION (arg), code: IMAGPART_EXPR, xarg: arg, |
4751 | noconvert: true); |
4752 | real = build_unary_op (EXPR_LOCATION (arg), code, xarg: real, noconvert: true); |
4753 | if (real == error_mark_node || imag == error_mark_node) |
4754 | return error_mark_node; |
4755 | ret = build2 (COMPLEX_EXPR, TREE_TYPE (arg), |
4756 | real, imag); |
4757 | goto return_build_unary_op; |
4758 | } |
4759 | } |
4760 | |
4761 | /* Report invalid types. */ |
4762 | |
4763 | if (typecode != POINTER_TYPE && typecode != FIXED_POINT_TYPE |
4764 | && typecode != INTEGER_TYPE && typecode != REAL_TYPE |
4765 | && typecode != COMPLEX_TYPE && typecode != BITINT_TYPE |
4766 | && !gnu_vector_type_p (TREE_TYPE (arg))) |
4767 | { |
4768 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
4769 | error_at (location, "wrong type argument to increment" ); |
4770 | else |
4771 | error_at (location, "wrong type argument to decrement" ); |
4772 | |
4773 | return error_mark_node; |
4774 | } |
4775 | |
4776 | { |
4777 | tree inc; |
4778 | |
4779 | argtype = TREE_TYPE (arg); |
4780 | |
4781 | /* Compute the increment. */ |
4782 | |
4783 | if (typecode == POINTER_TYPE) |
4784 | { |
4785 | /* If pointer target is an incomplete type, |
4786 | we just cannot know how to do the arithmetic. */ |
4787 | if (!COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (argtype))) |
4788 | { |
4789 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
4790 | error_at (location, |
4791 | "increment of pointer to an incomplete type %qT" , |
4792 | TREE_TYPE (argtype)); |
4793 | else |
4794 | error_at (location, |
4795 | "decrement of pointer to an incomplete type %qT" , |
4796 | TREE_TYPE (argtype)); |
4797 | } |
4798 | else if (TREE_CODE (TREE_TYPE (argtype)) == FUNCTION_TYPE |
4799 | || VOID_TYPE_P (TREE_TYPE (argtype))) |
4800 | { |
4801 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
4802 | pedwarn (location, OPT_Wpointer_arith, |
4803 | "wrong type argument to increment" ); |
4804 | else |
4805 | pedwarn (location, OPT_Wpointer_arith, |
4806 | "wrong type argument to decrement" ); |
4807 | } |
4808 | else |
4809 | verify_type_context (location, TCTX_POINTER_ARITH, |
4810 | TREE_TYPE (argtype)); |
4811 | |
4812 | inc = c_size_in_bytes (TREE_TYPE (argtype)); |
4813 | inc = convert_to_ptrofftype_loc (loc: location, off: inc); |
4814 | } |
4815 | else if (FRACT_MODE_P (TYPE_MODE (argtype))) |
4816 | { |
4817 | /* For signed fract types, we invert ++ to -- or |
4818 | -- to ++, and change inc from 1 to -1, because |
4819 | it is not possible to represent 1 in signed fract constants. |
4820 | For unsigned fract types, the result always overflows and |
4821 | we get an undefined (original) or the maximum value. */ |
4822 | if (code == PREINCREMENT_EXPR) |
4823 | code = PREDECREMENT_EXPR; |
4824 | else if (code == PREDECREMENT_EXPR) |
4825 | code = PREINCREMENT_EXPR; |
4826 | else if (code == POSTINCREMENT_EXPR) |
4827 | code = POSTDECREMENT_EXPR; |
4828 | else /* code == POSTDECREMENT_EXPR */ |
4829 | code = POSTINCREMENT_EXPR; |
4830 | |
4831 | inc = integer_minus_one_node; |
4832 | inc = convert (argtype, inc); |
4833 | } |
4834 | else |
4835 | { |
4836 | inc = VECTOR_TYPE_P (argtype) |
4837 | ? build_one_cst (argtype) |
4838 | : integer_one_node; |
4839 | inc = convert (argtype, inc); |
4840 | } |
4841 | |
4842 | /* If 'arg' is an Objective-C PROPERTY_REF expression, then we |
4843 | need to ask Objective-C to build the increment or decrement |
4844 | expression for it. */ |
4845 | if (objc_is_property_ref (arg)) |
4846 | return objc_build_incr_expr_for_property_ref (location, code, |
4847 | arg, inc); |
4848 | |
4849 | /* Report a read-only lvalue. */ |
4850 | if (TYPE_READONLY (argtype)) |
4851 | { |
4852 | readonly_error (location, arg, |
4853 | ((code == PREINCREMENT_EXPR |
4854 | || code == POSTINCREMENT_EXPR) |
4855 | ? lv_increment : lv_decrement)); |
4856 | return error_mark_node; |
4857 | } |
4858 | else if (TREE_READONLY (arg)) |
4859 | readonly_warning (arg, |
4860 | ((code == PREINCREMENT_EXPR |
4861 | || code == POSTINCREMENT_EXPR) |
4862 | ? lv_increment : lv_decrement)); |
4863 | |
4864 | /* If the argument is atomic, use the special code sequences for |
4865 | atomic compound assignment. */ |
4866 | if (atomic_op) |
4867 | { |
4868 | arg = stabilize_reference (arg); |
4869 | ret = build_atomic_assign (loc: location, lhs: arg, |
4870 | modifycode: ((code == PREINCREMENT_EXPR |
4871 | || code == POSTINCREMENT_EXPR) |
4872 | ? PLUS_EXPR |
4873 | : MINUS_EXPR), |
4874 | rhs: (FRACT_MODE_P (TYPE_MODE (argtype)) |
4875 | ? inc |
4876 | : integer_one_node), |
4877 | return_old_p: (code == POSTINCREMENT_EXPR |
4878 | || code == POSTDECREMENT_EXPR)); |
4879 | goto return_build_unary_op; |
4880 | } |
4881 | |
4882 | if (C_BOOLEAN_TYPE_P (TREE_TYPE (arg))) |
4883 | val = boolean_increment (code, arg); |
4884 | else |
4885 | val = build2 (code, TREE_TYPE (arg), arg, inc); |
4886 | TREE_SIDE_EFFECTS (val) = 1; |
4887 | if (TYPE_QUALS (TREE_TYPE (val)) != TYPE_UNQUALIFIED) |
4888 | TREE_TYPE (val) = c_build_qualified_type (TREE_TYPE (val), |
4889 | TYPE_UNQUALIFIED); |
4890 | ret = val; |
4891 | goto return_build_unary_op; |
4892 | } |
4893 | |
4894 | case ADDR_EXPR: |
4895 | /* Note that this operation never does default_conversion. */ |
4896 | |
4897 | /* The operand of unary '&' must be an lvalue (which excludes |
4898 | expressions of type void), or, in C99, the result of a [] or |
4899 | unary '*' operator. */ |
4900 | if (VOID_TYPE_P (TREE_TYPE (arg)) |
4901 | && TYPE_QUALS (TREE_TYPE (arg)) == TYPE_UNQUALIFIED |
4902 | && (!INDIRECT_REF_P (arg) || !flag_isoc99)) |
4903 | pedwarn (location, 0, "taking address of expression of type %<void%>" ); |
4904 | |
4905 | /* Let &* cancel out to simplify resulting code. */ |
4906 | if (INDIRECT_REF_P (arg)) |
4907 | { |
4908 | /* Don't let this be an lvalue. */ |
4909 | if (lvalue_p (TREE_OPERAND (arg, 0))) |
4910 | return non_lvalue_loc (location, TREE_OPERAND (arg, 0)); |
4911 | ret = TREE_OPERAND (arg, 0); |
4912 | goto return_build_unary_op; |
4913 | } |
4914 | |
4915 | /* Anything not already handled and not a true memory reference |
4916 | or a non-lvalue array is an error. */ |
4917 | if (typecode != FUNCTION_TYPE && !noconvert |
4918 | && !lvalue_or_else (location, arg, lv_addressof)) |
4919 | return error_mark_node; |
4920 | |
4921 | /* Move address operations inside C_MAYBE_CONST_EXPR to simplify |
4922 | folding later. */ |
4923 | if (TREE_CODE (arg) == C_MAYBE_CONST_EXPR) |
4924 | { |
4925 | tree inner = build_unary_op (location, code, |
4926 | C_MAYBE_CONST_EXPR_EXPR (arg), |
4927 | noconvert); |
4928 | ret = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (inner), |
4929 | C_MAYBE_CONST_EXPR_PRE (arg), inner); |
4930 | gcc_assert (!C_MAYBE_CONST_EXPR_INT_OPERANDS (arg)); |
4931 | C_MAYBE_CONST_EXPR_NON_CONST (ret) |
4932 | = C_MAYBE_CONST_EXPR_NON_CONST (arg); |
4933 | goto return_build_unary_op; |
4934 | } |
4935 | |
4936 | /* Ordinary case; arg is a COMPONENT_REF or a decl. */ |
4937 | argtype = TREE_TYPE (arg); |
4938 | |
4939 | /* If the lvalue is const or volatile, merge that into the type |
4940 | to which the address will point. This is only needed |
4941 | for function types. */ |
4942 | if ((DECL_P (arg) || REFERENCE_CLASS_P (arg)) |
4943 | && (TREE_READONLY (arg) || TREE_THIS_VOLATILE (arg)) |
4944 | && TREE_CODE (argtype) == FUNCTION_TYPE) |
4945 | { |
4946 | int orig_quals = TYPE_QUALS (strip_array_types (argtype)); |
4947 | int quals = orig_quals; |
4948 | |
4949 | if (TREE_READONLY (arg)) |
4950 | quals |= TYPE_QUAL_CONST; |
4951 | if (TREE_THIS_VOLATILE (arg)) |
4952 | quals |= TYPE_QUAL_VOLATILE; |
4953 | |
4954 | argtype = c_build_qualified_type (argtype, quals); |
4955 | } |
4956 | |
4957 | switch (TREE_CODE (arg)) |
4958 | { |
4959 | case COMPONENT_REF: |
4960 | if (DECL_C_BIT_FIELD (TREE_OPERAND (arg, 1))) |
4961 | { |
4962 | error_at (location, "cannot take address of bit-field %qD" , |
4963 | TREE_OPERAND (arg, 1)); |
4964 | return error_mark_node; |
4965 | } |
4966 | |
4967 | /* fall through */ |
4968 | |
4969 | case ARRAY_REF: |
4970 | if (TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_OPERAND (arg, 0)))) |
4971 | { |
4972 | if (!AGGREGATE_TYPE_P (TREE_TYPE (arg)) |
4973 | && !POINTER_TYPE_P (TREE_TYPE (arg)) |
4974 | && !VECTOR_TYPE_P (TREE_TYPE (arg))) |
4975 | { |
4976 | error_at (location, "cannot take address of scalar with " |
4977 | "reverse storage order" ); |
4978 | return error_mark_node; |
4979 | } |
4980 | |
4981 | if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE |
4982 | && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (arg))) |
4983 | warning_at (location, OPT_Wscalar_storage_order, |
4984 | "address of array with reverse scalar storage " |
4985 | "order requested" ); |
4986 | } |
4987 | |
4988 | default: |
4989 | break; |
4990 | } |
4991 | |
4992 | if (!c_mark_addressable (arg)) |
4993 | return error_mark_node; |
4994 | |
4995 | gcc_assert (TREE_CODE (arg) != COMPONENT_REF |
4996 | || !DECL_C_BIT_FIELD (TREE_OPERAND (arg, 1))); |
4997 | |
4998 | argtype = build_pointer_type (argtype); |
4999 | |
5000 | /* ??? Cope with user tricks that amount to offsetof. Delete this |
5001 | when we have proper support for integer constant expressions. */ |
5002 | val = get_base_address (t: arg); |
5003 | if (val && INDIRECT_REF_P (val) |
5004 | && TREE_CONSTANT (TREE_OPERAND (val, 0))) |
5005 | { |
5006 | ret = fold_offsetof (arg, argtype); |
5007 | goto return_build_unary_op; |
5008 | } |
5009 | |
5010 | val = build1 (ADDR_EXPR, argtype, arg); |
5011 | |
5012 | ret = val; |
5013 | goto return_build_unary_op; |
5014 | |
5015 | case PAREN_EXPR: |
5016 | ret = build1 (code, TREE_TYPE (arg), arg); |
5017 | goto return_build_unary_op; |
5018 | |
5019 | default: |
5020 | gcc_unreachable (); |
5021 | } |
5022 | |
5023 | if (argtype == NULL_TREE) |
5024 | argtype = TREE_TYPE (arg); |
5025 | if (TREE_CODE (arg) == INTEGER_CST) |
5026 | ret = (require_constant_value |
5027 | ? fold_build1_initializer_loc (location, code, argtype, arg) |
5028 | : fold_build1_loc (location, code, argtype, arg)); |
5029 | else |
5030 | ret = build1 (code, argtype, arg); |
5031 | return_build_unary_op: |
5032 | gcc_assert (ret != error_mark_node); |
5033 | if (TREE_CODE (ret) == INTEGER_CST && !TREE_OVERFLOW (ret) |
5034 | && !(TREE_CODE (xarg) == INTEGER_CST && !TREE_OVERFLOW (xarg))) |
5035 | ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); |
5036 | else if (TREE_CODE (ret) != INTEGER_CST && int_operands) |
5037 | ret = note_integer_operands (expr: ret); |
5038 | if (eptype) |
5039 | ret = build1 (EXCESS_PRECISION_EXPR, eptype, ret); |
5040 | protected_set_expr_location (ret, location); |
5041 | return ret; |
5042 | } |
5043 | |
5044 | /* Return nonzero if REF is an lvalue valid for this language. |
5045 | Lvalues can be assigned, unless their type has TYPE_READONLY. |
5046 | Lvalues can have their address taken, unless they have C_DECL_REGISTER. */ |
5047 | |
5048 | bool |
5049 | lvalue_p (const_tree ref) |
5050 | { |
5051 | const enum tree_code code = TREE_CODE (ref); |
5052 | |
5053 | switch (code) |
5054 | { |
5055 | case REALPART_EXPR: |
5056 | case IMAGPART_EXPR: |
5057 | case COMPONENT_REF: |
5058 | return lvalue_p (TREE_OPERAND (ref, 0)); |
5059 | |
5060 | case C_MAYBE_CONST_EXPR: |
5061 | return lvalue_p (TREE_OPERAND (ref, 1)); |
5062 | |
5063 | case COMPOUND_LITERAL_EXPR: |
5064 | case STRING_CST: |
5065 | return true; |
5066 | |
5067 | case MEM_REF: |
5068 | case TARGET_MEM_REF: |
5069 | /* MEM_REFs can appear from -fgimple parsing or folding, so allow them |
5070 | here as well. */ |
5071 | case INDIRECT_REF: |
5072 | case ARRAY_REF: |
5073 | case VAR_DECL: |
5074 | case PARM_DECL: |
5075 | case RESULT_DECL: |
5076 | case ERROR_MARK: |
5077 | return (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE |
5078 | && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE); |
5079 | |
5080 | case BIND_EXPR: |
5081 | return TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE; |
5082 | |
5083 | default: |
5084 | return false; |
5085 | } |
5086 | } |
5087 | |
5088 | /* Give a warning for storing in something that is read-only in GCC |
5089 | terms but not const in ISO C terms. */ |
5090 | |
5091 | static void |
5092 | readonly_warning (tree arg, enum lvalue_use use) |
5093 | { |
5094 | switch (use) |
5095 | { |
5096 | case lv_assign: |
5097 | warning (0, "assignment of read-only location %qE" , arg); |
5098 | break; |
5099 | case lv_increment: |
5100 | warning (0, "increment of read-only location %qE" , arg); |
5101 | break; |
5102 | case lv_decrement: |
5103 | warning (0, "decrement of read-only location %qE" , arg); |
5104 | break; |
5105 | default: |
5106 | gcc_unreachable (); |
5107 | } |
5108 | return; |
5109 | } |
5110 | |
5111 | |
5112 | /* Return nonzero if REF is an lvalue valid for this language; |
5113 | otherwise, print an error message and return zero. USE says |
5114 | how the lvalue is being used and so selects the error message. |
5115 | LOCATION is the location at which any error should be reported. */ |
5116 | |
5117 | static int |
5118 | lvalue_or_else (location_t loc, const_tree ref, enum lvalue_use use) |
5119 | { |
5120 | int win = lvalue_p (ref); |
5121 | |
5122 | if (!win) |
5123 | lvalue_error (loc, use); |
5124 | |
5125 | return win; |
5126 | } |
5127 | |
5128 | /* Mark EXP saying that we need to be able to take the |
5129 | address of it; it should not be allocated in a register. |
5130 | Returns true if successful. ARRAY_REF_P is true if this |
5131 | is for ARRAY_REF construction - in that case we don't want |
5132 | to look through VIEW_CONVERT_EXPR from VECTOR_TYPE to ARRAY_TYPE, |
5133 | it is fine to use ARRAY_REFs for vector subscripts on vector |
5134 | register variables. */ |
5135 | |
5136 | bool |
5137 | c_mark_addressable (tree exp, bool array_ref_p) |
5138 | { |
5139 | tree x = exp; |
5140 | |
5141 | while (1) |
5142 | switch (TREE_CODE (x)) |
5143 | { |
5144 | case VIEW_CONVERT_EXPR: |
5145 | if (array_ref_p |
5146 | && TREE_CODE (TREE_TYPE (x)) == ARRAY_TYPE |
5147 | && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0)))) |
5148 | return true; |
5149 | x = TREE_OPERAND (x, 0); |
5150 | break; |
5151 | |
5152 | case COMPONENT_REF: |
5153 | if (DECL_C_BIT_FIELD (TREE_OPERAND (x, 1))) |
5154 | { |
5155 | error ("cannot take address of bit-field %qD" , |
5156 | TREE_OPERAND (x, 1)); |
5157 | return false; |
5158 | } |
5159 | /* FALLTHRU */ |
5160 | case ADDR_EXPR: |
5161 | case ARRAY_REF: |
5162 | case REALPART_EXPR: |
5163 | case IMAGPART_EXPR: |
5164 | x = TREE_OPERAND (x, 0); |
5165 | break; |
5166 | |
5167 | case COMPOUND_LITERAL_EXPR: |
5168 | if (C_DECL_REGISTER (COMPOUND_LITERAL_EXPR_DECL (x))) |
5169 | { |
5170 | error ("address of register compound literal requested" ); |
5171 | return false; |
5172 | } |
5173 | TREE_ADDRESSABLE (x) = 1; |
5174 | TREE_ADDRESSABLE (COMPOUND_LITERAL_EXPR_DECL (x)) = 1; |
5175 | return true; |
5176 | |
5177 | case CONSTRUCTOR: |
5178 | TREE_ADDRESSABLE (x) = 1; |
5179 | return true; |
5180 | |
5181 | case VAR_DECL: |
5182 | case CONST_DECL: |
5183 | case PARM_DECL: |
5184 | case RESULT_DECL: |
5185 | if (C_DECL_REGISTER (x) |
5186 | && DECL_NONLOCAL (x)) |
5187 | { |
5188 | if (TREE_PUBLIC (x) || is_global_var (t: x)) |
5189 | { |
5190 | error |
5191 | ("global register variable %qD used in nested function" , x); |
5192 | return false; |
5193 | } |
5194 | pedwarn (input_location, 0, "register variable %qD used in nested function" , x); |
5195 | } |
5196 | else if (C_DECL_REGISTER (x)) |
5197 | { |
5198 | if (TREE_PUBLIC (x) || is_global_var (t: x)) |
5199 | error ("address of global register variable %qD requested" , x); |
5200 | else |
5201 | error ("address of register variable %qD requested" , x); |
5202 | return false; |
5203 | } |
5204 | |
5205 | /* FALLTHRU */ |
5206 | case FUNCTION_DECL: |
5207 | TREE_ADDRESSABLE (x) = 1; |
5208 | /* FALLTHRU */ |
5209 | default: |
5210 | return true; |
5211 | } |
5212 | } |
5213 | |
5214 | /* Convert EXPR to TYPE, warning about conversion problems with |
5215 | constants. SEMANTIC_TYPE is the type this conversion would use |
5216 | without excess precision. If SEMANTIC_TYPE is NULL, this function |
5217 | is equivalent to convert_and_check. This function is a wrapper that |
5218 | handles conversions that may be different than |
5219 | the usual ones because of excess precision. */ |
5220 | |
5221 | static tree |
5222 | ep_convert_and_check (location_t loc, tree type, tree expr, |
5223 | tree semantic_type) |
5224 | { |
5225 | if (TREE_TYPE (expr) == type) |
5226 | return expr; |
5227 | |
5228 | /* For C11, integer conversions may have results with excess |
5229 | precision. */ |
5230 | if (flag_isoc11 || !semantic_type) |
5231 | return convert_and_check (loc, type, expr); |
5232 | |
5233 | if (TREE_CODE (TREE_TYPE (expr)) == INTEGER_TYPE |
5234 | && TREE_TYPE (expr) != semantic_type) |
5235 | { |
5236 | /* For integers, we need to check the real conversion, not |
5237 | the conversion to the excess precision type. */ |
5238 | expr = convert_and_check (loc, semantic_type, expr); |
5239 | } |
5240 | /* Result type is the excess precision type, which should be |
5241 | large enough, so do not check. */ |
5242 | return convert (type, expr); |
5243 | } |
5244 | |
5245 | /* If EXPR refers to a built-in declared without a prototype returns |
5246 | the actual type of the built-in and, if non-null, set *BLTIN to |
5247 | a pointer to the built-in. Otherwise return the type of EXPR |
5248 | and clear *BLTIN if non-null. */ |
5249 | |
5250 | static tree |
5251 | type_or_builtin_type (tree expr, tree *bltin = NULL) |
5252 | { |
5253 | tree dummy; |
5254 | if (!bltin) |
5255 | bltin = &dummy; |
5256 | |
5257 | *bltin = NULL_TREE; |
5258 | |
5259 | tree type = TREE_TYPE (expr); |
5260 | if (TREE_CODE (expr) != ADDR_EXPR) |
5261 | return type; |
5262 | |
5263 | tree oper = TREE_OPERAND (expr, 0); |
5264 | if (!DECL_P (oper) |
5265 | || TREE_CODE (oper) != FUNCTION_DECL |
5266 | || !fndecl_built_in_p (node: oper, klass: BUILT_IN_NORMAL)) |
5267 | return type; |
5268 | |
5269 | built_in_function code = DECL_FUNCTION_CODE (decl: oper); |
5270 | if (!C_DECL_BUILTIN_PROTOTYPE (oper)) |
5271 | return type; |
5272 | |
5273 | if ((*bltin = builtin_decl_implicit (fncode: code))) |
5274 | type = build_pointer_type (TREE_TYPE (*bltin)); |
5275 | |
5276 | return type; |
5277 | } |
5278 | |
5279 | /* Build and return a conditional expression IFEXP ? OP1 : OP2. If |
5280 | IFEXP_BCP then the condition is a call to __builtin_constant_p, and |
5281 | if folded to an integer constant then the unselected half may |
5282 | contain arbitrary operations not normally permitted in constant |
5283 | expressions. Set the location of the expression to LOC. */ |
5284 | |
5285 | tree |
5286 | build_conditional_expr (location_t colon_loc, tree ifexp, bool ifexp_bcp, |
5287 | tree op1, tree op1_original_type, location_t op1_loc, |
5288 | tree op2, tree op2_original_type, location_t op2_loc) |
5289 | { |
5290 | tree type1; |
5291 | tree type2; |
5292 | enum tree_code code1; |
5293 | enum tree_code code2; |
5294 | tree result_type = NULL; |
5295 | tree semantic_result_type = NULL; |
5296 | tree orig_op1 = op1, orig_op2 = op2; |
5297 | bool int_const, op1_int_operands, op2_int_operands, int_operands; |
5298 | bool ifexp_int_operands; |
5299 | tree ret; |
5300 | |
5301 | op1_int_operands = EXPR_INT_CONST_OPERANDS (orig_op1); |
5302 | if (op1_int_operands) |
5303 | op1 = remove_c_maybe_const_expr (expr: op1); |
5304 | op2_int_operands = EXPR_INT_CONST_OPERANDS (orig_op2); |
5305 | if (op2_int_operands) |
5306 | op2 = remove_c_maybe_const_expr (expr: op2); |
5307 | ifexp_int_operands = EXPR_INT_CONST_OPERANDS (ifexp); |
5308 | if (ifexp_int_operands) |
5309 | ifexp = remove_c_maybe_const_expr (expr: ifexp); |
5310 | |
5311 | /* Promote both alternatives. */ |
5312 | |
5313 | if (TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE) |
5314 | op1 = default_conversion (exp: op1); |
5315 | if (TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE) |
5316 | op2 = default_conversion (exp: op2); |
5317 | |
5318 | if (TREE_CODE (ifexp) == ERROR_MARK |
5319 | || TREE_CODE (TREE_TYPE (op1)) == ERROR_MARK |
5320 | || TREE_CODE (TREE_TYPE (op2)) == ERROR_MARK) |
5321 | return error_mark_node; |
5322 | |
5323 | tree bltin1 = NULL_TREE; |
5324 | tree bltin2 = NULL_TREE; |
5325 | type1 = type_or_builtin_type (expr: op1, bltin: &bltin1); |
5326 | code1 = TREE_CODE (type1); |
5327 | type2 = type_or_builtin_type (expr: op2, bltin: &bltin2); |
5328 | code2 = TREE_CODE (type2); |
5329 | |
5330 | if (code1 == POINTER_TYPE && reject_gcc_builtin (op1)) |
5331 | return error_mark_node; |
5332 | |
5333 | if (code2 == POINTER_TYPE && reject_gcc_builtin (op2)) |
5334 | return error_mark_node; |
5335 | |
5336 | /* C90 does not permit non-lvalue arrays in conditional expressions. |
5337 | In C99 they will be pointers by now. */ |
5338 | if (code1 == ARRAY_TYPE || code2 == ARRAY_TYPE) |
5339 | { |
5340 | error_at (colon_loc, "non-lvalue array in conditional expression" ); |
5341 | return error_mark_node; |
5342 | } |
5343 | |
5344 | if ((TREE_CODE (op1) == EXCESS_PRECISION_EXPR |
5345 | || TREE_CODE (op2) == EXCESS_PRECISION_EXPR) |
5346 | && (code1 == INTEGER_TYPE || code1 == REAL_TYPE |
5347 | || code1 == COMPLEX_TYPE || code1 == BITINT_TYPE) |
5348 | && (code2 == INTEGER_TYPE || code2 == REAL_TYPE |
5349 | || code2 == COMPLEX_TYPE || code2 == BITINT_TYPE)) |
5350 | { |
5351 | semantic_result_type = c_common_type (t1: type1, t2: type2); |
5352 | if (TREE_CODE (op1) == EXCESS_PRECISION_EXPR) |
5353 | { |
5354 | op1 = TREE_OPERAND (op1, 0); |
5355 | type1 = TREE_TYPE (op1); |
5356 | gcc_assert (TREE_CODE (type1) == code1); |
5357 | } |
5358 | if (TREE_CODE (op2) == EXCESS_PRECISION_EXPR) |
5359 | { |
5360 | op2 = TREE_OPERAND (op2, 0); |
5361 | type2 = TREE_TYPE (op2); |
5362 | gcc_assert (TREE_CODE (type2) == code2); |
5363 | } |
5364 | } |
5365 | |
5366 | if (warn_cxx_compat) |
5367 | { |
5368 | tree t1 = op1_original_type ? op1_original_type : TREE_TYPE (orig_op1); |
5369 | tree t2 = op2_original_type ? op2_original_type : TREE_TYPE (orig_op2); |
5370 | |
5371 | if (TREE_CODE (t1) == ENUMERAL_TYPE |
5372 | && TREE_CODE (t2) == ENUMERAL_TYPE |
5373 | && TYPE_MAIN_VARIANT (t1) != TYPE_MAIN_VARIANT (t2)) |
5374 | warning_at (colon_loc, OPT_Wc___compat, |
5375 | ("different enum types in conditional is " |
5376 | "invalid in C++: %qT vs %qT" ), |
5377 | t1, t2); |
5378 | } |
5379 | |
5380 | /* Quickly detect the usual case where op1 and op2 have the same type |
5381 | after promotion. */ |
5382 | if (TYPE_MAIN_VARIANT (type1) == TYPE_MAIN_VARIANT (type2)) |
5383 | { |
5384 | if (type1 == type2) |
5385 | result_type = type1; |
5386 | else |
5387 | result_type = TYPE_MAIN_VARIANT (type1); |
5388 | } |
5389 | else if ((code1 == INTEGER_TYPE || code1 == REAL_TYPE |
5390 | || code1 == COMPLEX_TYPE || code1 == BITINT_TYPE) |
5391 | && (code2 == INTEGER_TYPE || code2 == REAL_TYPE |
5392 | || code2 == COMPLEX_TYPE || code2 == BITINT_TYPE)) |
5393 | { |
5394 | /* In C11, a conditional expression between a floating-point |
5395 | type and an integer type should convert the integer type to |
5396 | the evaluation format of the floating-point type, with |
5397 | possible excess precision. */ |
5398 | tree eptype1 = type1; |
5399 | tree eptype2 = type2; |
5400 | if (flag_isoc11) |
5401 | { |
5402 | tree eptype; |
5403 | if (ANY_INTEGRAL_TYPE_P (type1) |
5404 | && (eptype = excess_precision_type (type2)) != NULL_TREE) |
5405 | { |
5406 | eptype2 = eptype; |
5407 | if (!semantic_result_type) |
5408 | semantic_result_type = c_common_type (t1: type1, t2: type2); |
5409 | } |
5410 | else if (ANY_INTEGRAL_TYPE_P (type2) |
5411 | && (eptype = excess_precision_type (type1)) != NULL_TREE) |
5412 | { |
5413 | eptype1 = eptype; |
5414 | if (!semantic_result_type) |
5415 | semantic_result_type = c_common_type (t1: type1, t2: type2); |
5416 | } |
5417 | } |
5418 | result_type = c_common_type (t1: eptype1, t2: eptype2); |
5419 | if (result_type == error_mark_node) |
5420 | return error_mark_node; |
5421 | do_warn_double_promotion (result_type, type1, type2, |
5422 | "implicit conversion from %qT to %qT to " |
5423 | "match other result of conditional" , |
5424 | colon_loc); |
5425 | |
5426 | /* If -Wsign-compare, warn here if type1 and type2 have |
5427 | different signedness. We'll promote the signed to unsigned |
5428 | and later code won't know it used to be different. |
5429 | Do this check on the original types, so that explicit casts |
5430 | will be considered, but default promotions won't. */ |
5431 | if (c_inhibit_evaluation_warnings == 0) |
5432 | { |
5433 | int unsigned_op1 = TYPE_UNSIGNED (TREE_TYPE (orig_op1)); |
5434 | int unsigned_op2 = TYPE_UNSIGNED (TREE_TYPE (orig_op2)); |
5435 | |
5436 | if (unsigned_op1 ^ unsigned_op2) |
5437 | { |
5438 | bool ovf; |
5439 | |
5440 | /* Do not warn if the result type is signed, since the |
5441 | signed type will only be chosen if it can represent |
5442 | all the values of the unsigned type. */ |
5443 | if (!TYPE_UNSIGNED (result_type)) |
5444 | /* OK */; |
5445 | else |
5446 | { |
5447 | bool op1_maybe_const = true; |
5448 | bool op2_maybe_const = true; |
5449 | |
5450 | /* Do not warn if the signed quantity is an |
5451 | unsuffixed integer literal (or some static |
5452 | constant expression involving such literals) and |
5453 | it is non-negative. This warning requires the |
5454 | operands to be folded for best results, so do |
5455 | that folding in this case even without |
5456 | warn_sign_compare to avoid warning options |
5457 | possibly affecting code generation. */ |
5458 | c_inhibit_evaluation_warnings |
5459 | += (ifexp == truthvalue_false_node); |
5460 | op1 = c_fully_fold (op1, require_constant_value, |
5461 | &op1_maybe_const); |
5462 | c_inhibit_evaluation_warnings |
5463 | -= (ifexp == truthvalue_false_node); |
5464 | |
5465 | c_inhibit_evaluation_warnings |
5466 | += (ifexp == truthvalue_true_node); |
5467 | op2 = c_fully_fold (op2, require_constant_value, |
5468 | &op2_maybe_const); |
5469 | c_inhibit_evaluation_warnings |
5470 | -= (ifexp == truthvalue_true_node); |
5471 | |
5472 | if (warn_sign_compare) |
5473 | { |
5474 | if ((unsigned_op2 |
5475 | && tree_expr_nonnegative_warnv_p (op1, &ovf)) |
5476 | || (unsigned_op1 |
5477 | && tree_expr_nonnegative_warnv_p (op2, &ovf))) |
5478 | /* OK */; |
5479 | else if (unsigned_op2) |
5480 | warning_at (op1_loc, OPT_Wsign_compare, |
5481 | "operand of %<?:%> changes signedness from " |
5482 | "%qT to %qT due to unsignedness of other " |
5483 | "operand" , TREE_TYPE (orig_op1), |
5484 | TREE_TYPE (orig_op2)); |
5485 | else |
5486 | warning_at (op2_loc, OPT_Wsign_compare, |
5487 | "operand of %<?:%> changes signedness from " |
5488 | "%qT to %qT due to unsignedness of other " |
5489 | "operand" , TREE_TYPE (orig_op2), |
5490 | TREE_TYPE (orig_op1)); |
5491 | } |
5492 | if (!op1_maybe_const || TREE_CODE (op1) != INTEGER_CST) |
5493 | op1 = c_wrap_maybe_const (op1, !op1_maybe_const); |
5494 | if (!op2_maybe_const || TREE_CODE (op2) != INTEGER_CST) |
5495 | op2 = c_wrap_maybe_const (op2, !op2_maybe_const); |
5496 | } |
5497 | } |
5498 | } |
5499 | } |
5500 | else if (code1 == VOID_TYPE || code2 == VOID_TYPE) |
5501 | { |
5502 | if (code1 != VOID_TYPE || code2 != VOID_TYPE) |
5503 | pedwarn (colon_loc, OPT_Wpedantic, |
5504 | "ISO C forbids conditional expr with only one void side" ); |
5505 | result_type = void_type_node; |
5506 | } |
5507 | else if (code1 == POINTER_TYPE && code2 == POINTER_TYPE) |
5508 | { |
5509 | addr_space_t as1 = TYPE_ADDR_SPACE (TREE_TYPE (type1)); |
5510 | addr_space_t as2 = TYPE_ADDR_SPACE (TREE_TYPE (type2)); |
5511 | addr_space_t as_common; |
5512 | |
5513 | if (comp_target_types (location: colon_loc, ttl: type1, ttr: type2)) |
5514 | result_type = common_pointer_type (t1: type1, t2: type2); |
5515 | else if (null_pointer_constant_p (expr: orig_op1)) |
5516 | result_type = type2; |
5517 | else if (null_pointer_constant_p (expr: orig_op2)) |
5518 | result_type = type1; |
5519 | else if (!addr_space_superset (as1, as2, common: &as_common)) |
5520 | { |
5521 | error_at (colon_loc, "pointers to disjoint address spaces " |
5522 | "used in conditional expression" ); |
5523 | return error_mark_node; |
5524 | } |
5525 | else if ((VOID_TYPE_P (TREE_TYPE (type1)) |
5526 | && !TYPE_ATOMIC (TREE_TYPE (type1))) |
5527 | || (VOID_TYPE_P (TREE_TYPE (type2)) |
5528 | && !TYPE_ATOMIC (TREE_TYPE (type2)))) |
5529 | { |
5530 | tree t1 = TREE_TYPE (type1); |
5531 | tree t2 = TREE_TYPE (type2); |
5532 | if (!(VOID_TYPE_P (t1) |
5533 | && !TYPE_ATOMIC (t1))) |
5534 | { |
5535 | /* roles are swapped */ |
5536 | t1 = t2; |
5537 | t2 = TREE_TYPE (type1); |
5538 | } |
5539 | tree t2_stripped = strip_array_types (type: t2); |
5540 | if ((TREE_CODE (t2) == ARRAY_TYPE) |
5541 | && (TYPE_QUALS (t2_stripped) & ~TYPE_QUALS (t1))) |
5542 | { |
5543 | if (!flag_isoc23) |
5544 | warning_at (colon_loc, OPT_Wdiscarded_array_qualifiers, |
5545 | "pointer to array loses qualifier " |
5546 | "in conditional expression" ); |
5547 | else if (warn_c11_c23_compat > 0) |
5548 | warning_at (colon_loc, OPT_Wc11_c23_compat, |
5549 | "pointer to array loses qualifier " |
5550 | "in conditional expression in ISO C before C23" ); |
5551 | } |
5552 | if (TREE_CODE (t2) == FUNCTION_TYPE) |
5553 | pedwarn (colon_loc, OPT_Wpedantic, |
5554 | "ISO C forbids conditional expr between " |
5555 | "%<void *%> and function pointer" ); |
5556 | /* for array, use qualifiers of element type */ |
5557 | if (flag_isoc23) |
5558 | t2 = t2_stripped; |
5559 | result_type = build_pointer_type (qualify_type (type: t1, like: t2)); |
5560 | } |
5561 | /* Objective-C pointer comparisons are a bit more lenient. */ |
5562 | else if (objc_have_common_type (type1, type2, -3, NULL_TREE)) |
5563 | result_type = objc_common_type (type1, type2); |
5564 | else |
5565 | { |
5566 | int qual = ENCODE_QUAL_ADDR_SPACE (as_common); |
5567 | if (emit_diagnostic (bltin1 && bltin2 ? DK_WARNING : DK_PEDWARN, |
5568 | colon_loc, OPT_Wincompatible_pointer_types, |
5569 | "pointer type mismatch " |
5570 | "in conditional expression" )) |
5571 | { |
5572 | inform (op1_loc, "first expression has type %qT" , type1); |
5573 | inform (op2_loc, "second expression has type %qT" , type2); |
5574 | } |
5575 | result_type = build_pointer_type |
5576 | (build_qualified_type (void_type_node, qual)); |
5577 | } |
5578 | } |
5579 | else if (code1 == POINTER_TYPE |
5580 | && (code2 == INTEGER_TYPE || code2 == BITINT_TYPE)) |
5581 | { |
5582 | if (!null_pointer_constant_p (expr: orig_op2)) |
5583 | pedwarn (colon_loc, OPT_Wint_conversion, |
5584 | "pointer/integer type mismatch in conditional expression" ); |
5585 | else |
5586 | { |
5587 | op2 = null_pointer_node; |
5588 | } |
5589 | result_type = type1; |
5590 | } |
5591 | else if (code2 == POINTER_TYPE |
5592 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
5593 | { |
5594 | if (!null_pointer_constant_p (expr: orig_op1)) |
5595 | pedwarn (colon_loc, OPT_Wint_conversion, |
5596 | "pointer/integer type mismatch in conditional expression" ); |
5597 | else |
5598 | { |
5599 | op1 = null_pointer_node; |
5600 | } |
5601 | result_type = type2; |
5602 | } |
5603 | /* 6.5.15: "if one is a null pointer constant (other than a pointer) or has |
5604 | type nullptr_t and the other is a pointer, the result type is the pointer |
5605 | type." */ |
5606 | else if (code1 == NULLPTR_TYPE && code2 == POINTER_TYPE) |
5607 | result_type = type2; |
5608 | else if (code1 == POINTER_TYPE && code2 == NULLPTR_TYPE) |
5609 | result_type = type1; |
5610 | |
5611 | if (!result_type) |
5612 | { |
5613 | if (flag_cond_mismatch) |
5614 | result_type = void_type_node; |
5615 | else |
5616 | { |
5617 | error_at (colon_loc, "type mismatch in conditional expression" ); |
5618 | return error_mark_node; |
5619 | } |
5620 | } |
5621 | |
5622 | /* Merge const and volatile flags of the incoming types. */ |
5623 | result_type |
5624 | = build_type_variant (result_type, |
5625 | TYPE_READONLY (type1) || TYPE_READONLY (type2), |
5626 | TYPE_VOLATILE (type1) || TYPE_VOLATILE (type2)); |
5627 | |
5628 | op1 = ep_convert_and_check (loc: colon_loc, type: result_type, expr: op1, |
5629 | semantic_type: semantic_result_type); |
5630 | op2 = ep_convert_and_check (loc: colon_loc, type: result_type, expr: op2, |
5631 | semantic_type: semantic_result_type); |
5632 | |
5633 | if (ifexp_bcp && ifexp == truthvalue_true_node) |
5634 | { |
5635 | op2_int_operands = true; |
5636 | op1 = c_fully_fold (op1, require_constant_value, NULL); |
5637 | } |
5638 | if (ifexp_bcp && ifexp == truthvalue_false_node) |
5639 | { |
5640 | op1_int_operands = true; |
5641 | op2 = c_fully_fold (op2, require_constant_value, NULL); |
5642 | } |
5643 | int_const = int_operands = (ifexp_int_operands |
5644 | && op1_int_operands |
5645 | && op2_int_operands); |
5646 | if (int_operands) |
5647 | { |
5648 | int_const = ((ifexp == truthvalue_true_node |
5649 | && TREE_CODE (orig_op1) == INTEGER_CST |
5650 | && !TREE_OVERFLOW (orig_op1)) |
5651 | || (ifexp == truthvalue_false_node |
5652 | && TREE_CODE (orig_op2) == INTEGER_CST |
5653 | && !TREE_OVERFLOW (orig_op2))); |
5654 | } |
5655 | |
5656 | /* Need to convert condition operand into a vector mask. */ |
5657 | if (VECTOR_TYPE_P (TREE_TYPE (ifexp))) |
5658 | { |
5659 | tree vectype = TREE_TYPE (ifexp); |
5660 | tree elem_type = TREE_TYPE (vectype); |
5661 | tree zero = build_int_cst (elem_type, 0); |
5662 | tree zero_vec = build_vector_from_val (vectype, zero); |
5663 | tree cmp_type = truth_type_for (vectype); |
5664 | ifexp = build2 (NE_EXPR, cmp_type, ifexp, zero_vec); |
5665 | } |
5666 | |
5667 | if (int_const || (ifexp_bcp && TREE_CODE (ifexp) == INTEGER_CST)) |
5668 | ret = fold_build3_loc (colon_loc, COND_EXPR, result_type, ifexp, op1, op2); |
5669 | else |
5670 | { |
5671 | if (int_operands) |
5672 | { |
5673 | /* Use c_fully_fold here, since C_MAYBE_CONST_EXPR might be |
5674 | nested inside of the expression. */ |
5675 | op1 = c_fully_fold (op1, false, NULL); |
5676 | op2 = c_fully_fold (op2, false, NULL); |
5677 | } |
5678 | ret = build3 (COND_EXPR, result_type, ifexp, op1, op2); |
5679 | if (int_operands) |
5680 | ret = note_integer_operands (expr: ret); |
5681 | } |
5682 | if (semantic_result_type) |
5683 | ret = build1 (EXCESS_PRECISION_EXPR, semantic_result_type, ret); |
5684 | |
5685 | protected_set_expr_location (ret, colon_loc); |
5686 | |
5687 | /* If the OP1 and OP2 are the same and don't have side-effects, |
5688 | warn here, because the COND_EXPR will be turned into OP1. */ |
5689 | if (warn_duplicated_branches |
5690 | && TREE_CODE (ret) == COND_EXPR |
5691 | && (op1 == op2 || operand_equal_p (op1, op2, flags: OEP_ADDRESS_OF_SAME_FIELD))) |
5692 | warning_at (EXPR_LOCATION (ret), OPT_Wduplicated_branches, |
5693 | "this condition has identical branches" ); |
5694 | |
5695 | return ret; |
5696 | } |
5697 | |
5698 | /* EXPR is an expression, location LOC, whose result is discarded. |
5699 | Warn if it is a call to a nodiscard function (or a COMPOUND_EXPR |
5700 | whose right-hand operand is such a call, possibly recursively). */ |
5701 | |
5702 | static void |
5703 | maybe_warn_nodiscard (location_t loc, tree expr) |
5704 | { |
5705 | if (VOID_TYPE_P (TREE_TYPE (expr))) |
5706 | return; |
5707 | while (TREE_CODE (expr) == COMPOUND_EXPR) |
5708 | { |
5709 | expr = TREE_OPERAND (expr, 1); |
5710 | if (EXPR_HAS_LOCATION (expr)) |
5711 | loc = EXPR_LOCATION (expr); |
5712 | } |
5713 | if (TREE_CODE (expr) != CALL_EXPR) |
5714 | return; |
5715 | tree fn = CALL_EXPR_FN (expr); |
5716 | if (!fn) |
5717 | return; |
5718 | tree attr; |
5719 | if (TREE_CODE (fn) == ADDR_EXPR |
5720 | && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL |
5721 | && (attr = lookup_attribute (attr_name: "nodiscard" , |
5722 | DECL_ATTRIBUTES (TREE_OPERAND (fn, 0))))) |
5723 | { |
5724 | fn = TREE_OPERAND (fn, 0); |
5725 | tree args = TREE_VALUE (attr); |
5726 | if (args) |
5727 | args = TREE_VALUE (args); |
5728 | auto_diagnostic_group d; |
5729 | int warned; |
5730 | if (args) |
5731 | warned = warning_at (loc, OPT_Wunused_result, |
5732 | "ignoring return value of %qD, declared with " |
5733 | "attribute %<nodiscard%>: %E" , fn, args); |
5734 | else |
5735 | warned = warning_at (loc, OPT_Wunused_result, |
5736 | "ignoring return value of %qD, declared with " |
5737 | "attribute %<nodiscard%>" , fn); |
5738 | if (warned) |
5739 | inform (DECL_SOURCE_LOCATION (fn), "declared here" ); |
5740 | } |
5741 | else |
5742 | { |
5743 | tree rettype = TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))); |
5744 | attr = lookup_attribute (attr_name: "nodiscard" , TYPE_ATTRIBUTES (rettype)); |
5745 | if (!attr) |
5746 | return; |
5747 | tree args = TREE_VALUE (attr); |
5748 | if (args) |
5749 | args = TREE_VALUE (args); |
5750 | auto_diagnostic_group d; |
5751 | int warned; |
5752 | if (args) |
5753 | warned = warning_at (loc, OPT_Wunused_result, |
5754 | "ignoring return value of type %qT, declared " |
5755 | "with attribute %<nodiscard%>: %E" , |
5756 | rettype, args); |
5757 | else |
5758 | warned = warning_at (loc, OPT_Wunused_result, |
5759 | "ignoring return value of type %qT, declared " |
5760 | "with attribute %<nodiscard%>" , rettype); |
5761 | if (warned) |
5762 | { |
5763 | if (TREE_CODE (fn) == ADDR_EXPR) |
5764 | { |
5765 | fn = TREE_OPERAND (fn, 0); |
5766 | if (TREE_CODE (fn) == FUNCTION_DECL) |
5767 | inform (DECL_SOURCE_LOCATION (fn), |
5768 | "in call to %qD, declared here" , fn); |
5769 | } |
5770 | } |
5771 | } |
5772 | } |
5773 | |
5774 | /* Return a compound expression that performs two expressions and |
5775 | returns the value of the second of them. |
5776 | |
5777 | LOC is the location of the COMPOUND_EXPR. */ |
5778 | |
5779 | tree |
5780 | build_compound_expr (location_t loc, tree expr1, tree expr2) |
5781 | { |
5782 | bool expr1_int_operands, expr2_int_operands; |
5783 | tree eptype = NULL_TREE; |
5784 | tree ret; |
5785 | |
5786 | expr1_int_operands = EXPR_INT_CONST_OPERANDS (expr1); |
5787 | if (expr1_int_operands) |
5788 | expr1 = remove_c_maybe_const_expr (expr: expr1); |
5789 | expr2_int_operands = EXPR_INT_CONST_OPERANDS (expr2); |
5790 | if (expr2_int_operands) |
5791 | expr2 = remove_c_maybe_const_expr (expr: expr2); |
5792 | |
5793 | if (TREE_CODE (expr1) == EXCESS_PRECISION_EXPR) |
5794 | expr1 = TREE_OPERAND (expr1, 0); |
5795 | if (TREE_CODE (expr2) == EXCESS_PRECISION_EXPR) |
5796 | { |
5797 | eptype = TREE_TYPE (expr2); |
5798 | expr2 = TREE_OPERAND (expr2, 0); |
5799 | } |
5800 | |
5801 | if (!TREE_SIDE_EFFECTS (expr1)) |
5802 | { |
5803 | /* The left-hand operand of a comma expression is like an expression |
5804 | statement: with -Wunused, we should warn if it doesn't have |
5805 | any side-effects, unless it was explicitly cast to (void). */ |
5806 | if (warn_unused_value) |
5807 | { |
5808 | if (VOID_TYPE_P (TREE_TYPE (expr1)) |
5809 | && CONVERT_EXPR_P (expr1)) |
5810 | ; /* (void) a, b */ |
5811 | else if (VOID_TYPE_P (TREE_TYPE (expr1)) |
5812 | && TREE_CODE (expr1) == COMPOUND_EXPR |
5813 | && CONVERT_EXPR_P (TREE_OPERAND (expr1, 1))) |
5814 | ; /* (void) a, (void) b, c */ |
5815 | else |
5816 | warning_at (loc, OPT_Wunused_value, |
5817 | "left-hand operand of comma expression has no effect" ); |
5818 | } |
5819 | } |
5820 | else if (TREE_CODE (expr1) == COMPOUND_EXPR |
5821 | && warn_unused_value) |
5822 | { |
5823 | tree r = expr1; |
5824 | location_t cloc = loc; |
5825 | while (TREE_CODE (r) == COMPOUND_EXPR) |
5826 | { |
5827 | if (EXPR_HAS_LOCATION (r)) |
5828 | cloc = EXPR_LOCATION (r); |
5829 | r = TREE_OPERAND (r, 1); |
5830 | } |
5831 | if (!TREE_SIDE_EFFECTS (r) |
5832 | && !VOID_TYPE_P (TREE_TYPE (r)) |
5833 | && !CONVERT_EXPR_P (r)) |
5834 | warning_at (cloc, OPT_Wunused_value, |
5835 | "right-hand operand of comma expression has no effect" ); |
5836 | } |
5837 | |
5838 | /* With -Wunused, we should also warn if the left-hand operand does have |
5839 | side-effects, but computes a value which is not used. For example, in |
5840 | `foo() + bar(), baz()' the result of the `+' operator is not used, |
5841 | so we should issue a warning. */ |
5842 | else if (warn_unused_value) |
5843 | warn_if_unused_value (expr1, loc); |
5844 | |
5845 | maybe_warn_nodiscard (loc, expr: expr1); |
5846 | |
5847 | if (expr2 == error_mark_node) |
5848 | return error_mark_node; |
5849 | |
5850 | ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr2), expr1, expr2); |
5851 | |
5852 | if (flag_isoc99 |
5853 | && expr1_int_operands |
5854 | && expr2_int_operands) |
5855 | ret = note_integer_operands (expr: ret); |
5856 | |
5857 | if (eptype) |
5858 | ret = build1 (EXCESS_PRECISION_EXPR, eptype, ret); |
5859 | |
5860 | protected_set_expr_location (ret, loc); |
5861 | return ret; |
5862 | } |
5863 | |
5864 | /* Issue -Wcast-qual warnings when appropriate. TYPE is the type to |
5865 | which we are casting. OTYPE is the type of the expression being |
5866 | cast. Both TYPE and OTYPE are pointer types. LOC is the location |
5867 | of the cast. -Wcast-qual appeared on the command line. Named |
5868 | address space qualifiers are not handled here, because they result |
5869 | in different warnings. */ |
5870 | |
5871 | static void |
5872 | handle_warn_cast_qual (location_t loc, tree type, tree otype) |
5873 | { |
5874 | tree in_type = type; |
5875 | tree in_otype = otype; |
5876 | int added = 0; |
5877 | int discarded = 0; |
5878 | bool is_const; |
5879 | |
5880 | /* Check that the qualifiers on IN_TYPE are a superset of the |
5881 | qualifiers of IN_OTYPE. The outermost level of POINTER_TYPE |
5882 | nodes is uninteresting and we stop as soon as we hit a |
5883 | non-POINTER_TYPE node on either type. */ |
5884 | do |
5885 | { |
5886 | in_otype = TREE_TYPE (in_otype); |
5887 | in_type = TREE_TYPE (in_type); |
5888 | |
5889 | /* GNU C allows cv-qualified function types. 'const' means the |
5890 | function is very pure, 'volatile' means it can't return. We |
5891 | need to warn when such qualifiers are added, not when they're |
5892 | taken away. */ |
5893 | if (TREE_CODE (in_otype) == FUNCTION_TYPE |
5894 | && TREE_CODE (in_type) == FUNCTION_TYPE) |
5895 | added |= (TYPE_QUALS_NO_ADDR_SPACE (in_type) |
5896 | & ~TYPE_QUALS_NO_ADDR_SPACE (in_otype)); |
5897 | else |
5898 | discarded |= (TYPE_QUALS_NO_ADDR_SPACE (in_otype) |
5899 | & ~TYPE_QUALS_NO_ADDR_SPACE (in_type)); |
5900 | } |
5901 | while (TREE_CODE (in_type) == POINTER_TYPE |
5902 | && TREE_CODE (in_otype) == POINTER_TYPE); |
5903 | |
5904 | if (added) |
5905 | warning_at (loc, OPT_Wcast_qual, |
5906 | "cast adds %q#v qualifier to function type" , added); |
5907 | |
5908 | if (discarded) |
5909 | /* There are qualifiers present in IN_OTYPE that are not present |
5910 | in IN_TYPE. */ |
5911 | warning_at (loc, OPT_Wcast_qual, |
5912 | "cast discards %qv qualifier from pointer target type" , |
5913 | discarded); |
5914 | |
5915 | if (added || discarded) |
5916 | return; |
5917 | |
5918 | /* A cast from **T to const **T is unsafe, because it can cause a |
5919 | const value to be changed with no additional warning. We only |
5920 | issue this warning if T is the same on both sides, and we only |
5921 | issue the warning if there are the same number of pointers on |
5922 | both sides, as otherwise the cast is clearly unsafe anyhow. A |
5923 | cast is unsafe when a qualifier is added at one level and const |
5924 | is not present at all outer levels. |
5925 | |
5926 | To issue this warning, we check at each level whether the cast |
5927 | adds new qualifiers not already seen. We don't need to special |
5928 | case function types, as they won't have the same |
5929 | TYPE_MAIN_VARIANT. */ |
5930 | |
5931 | if (TYPE_MAIN_VARIANT (in_type) != TYPE_MAIN_VARIANT (in_otype)) |
5932 | return; |
5933 | if (TREE_CODE (TREE_TYPE (type)) != POINTER_TYPE) |
5934 | return; |
5935 | |
5936 | in_type = type; |
5937 | in_otype = otype; |
5938 | is_const = TYPE_READONLY (TREE_TYPE (in_type)); |
5939 | do |
5940 | { |
5941 | in_type = TREE_TYPE (in_type); |
5942 | in_otype = TREE_TYPE (in_otype); |
5943 | if ((TYPE_QUALS (in_type) &~ TYPE_QUALS (in_otype)) != 0 |
5944 | && !is_const) |
5945 | { |
5946 | warning_at (loc, OPT_Wcast_qual, |
5947 | "to be safe all intermediate pointers in cast from " |
5948 | "%qT to %qT must be %<const%> qualified" , |
5949 | otype, type); |
5950 | break; |
5951 | } |
5952 | if (is_const) |
5953 | is_const = TYPE_READONLY (in_type); |
5954 | } |
5955 | while (TREE_CODE (in_type) == POINTER_TYPE); |
5956 | } |
5957 | |
5958 | /* Heuristic check if two parameter types can be considered ABI-equivalent. */ |
5959 | |
5960 | static bool |
5961 | c_safe_arg_type_equiv_p (tree t1, tree t2) |
5962 | { |
5963 | if (error_operand_p (t: t1) || error_operand_p (t: t2)) |
5964 | return true; |
5965 | |
5966 | t1 = TYPE_MAIN_VARIANT (t1); |
5967 | t2 = TYPE_MAIN_VARIANT (t2); |
5968 | |
5969 | if (TREE_CODE (t1) == POINTER_TYPE |
5970 | && TREE_CODE (t2) == POINTER_TYPE) |
5971 | return true; |
5972 | |
5973 | /* The signedness of the parameter matters only when an integral |
5974 | type smaller than int is promoted to int, otherwise only the |
5975 | precision of the parameter matters. |
5976 | This check should make sure that the callee does not see |
5977 | undefined values in argument registers. */ |
5978 | if (INTEGRAL_TYPE_P (t1) |
5979 | && INTEGRAL_TYPE_P (t2) |
5980 | && TYPE_PRECISION (t1) == TYPE_PRECISION (t2) |
5981 | && (TYPE_UNSIGNED (t1) == TYPE_UNSIGNED (t2) |
5982 | || !targetm.calls.promote_prototypes (NULL_TREE) |
5983 | || TYPE_PRECISION (t1) >= TYPE_PRECISION (integer_type_node))) |
5984 | return true; |
5985 | |
5986 | return comptypes (type1: t1, type2: t2); |
5987 | } |
5988 | |
5989 | /* Check if a type cast between two function types can be considered safe. */ |
5990 | |
5991 | static bool |
5992 | c_safe_function_type_cast_p (tree t1, tree t2) |
5993 | { |
5994 | if (TREE_TYPE (t1) == void_type_node && |
5995 | TYPE_ARG_TYPES (t1) == void_list_node) |
5996 | return true; |
5997 | |
5998 | if (TREE_TYPE (t2) == void_type_node && |
5999 | TYPE_ARG_TYPES (t2) == void_list_node) |
6000 | return true; |
6001 | |
6002 | if (!c_safe_arg_type_equiv_p (TREE_TYPE (t1), TREE_TYPE (t2))) |
6003 | return false; |
6004 | |
6005 | for (t1 = TYPE_ARG_TYPES (t1), t2 = TYPE_ARG_TYPES (t2); |
6006 | t1 && t2; |
6007 | t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2)) |
6008 | if (!c_safe_arg_type_equiv_p (TREE_VALUE (t1), TREE_VALUE (t2))) |
6009 | return false; |
6010 | |
6011 | return true; |
6012 | } |
6013 | |
6014 | /* Build an expression representing a cast to type TYPE of expression EXPR. |
6015 | LOC is the location of the cast-- typically the open paren of the cast. */ |
6016 | |
6017 | tree |
6018 | build_c_cast (location_t loc, tree type, tree expr) |
6019 | { |
6020 | tree value; |
6021 | |
6022 | bool int_operands = EXPR_INT_CONST_OPERANDS (expr); |
6023 | |
6024 | if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR) |
6025 | expr = TREE_OPERAND (expr, 0); |
6026 | |
6027 | value = expr; |
6028 | if (int_operands) |
6029 | value = remove_c_maybe_const_expr (expr: value); |
6030 | |
6031 | if (type == error_mark_node || expr == error_mark_node) |
6032 | return error_mark_node; |
6033 | |
6034 | /* The ObjC front-end uses TYPE_MAIN_VARIANT to tie together types differing |
6035 | only in <protocol> qualifications. But when constructing cast expressions, |
6036 | the protocols do matter and must be kept around. */ |
6037 | if (objc_is_object_ptr (type) && objc_is_object_ptr (TREE_TYPE (expr))) |
6038 | return build1 (NOP_EXPR, type, expr); |
6039 | |
6040 | type = TYPE_MAIN_VARIANT (type); |
6041 | |
6042 | if (TREE_CODE (type) == ARRAY_TYPE) |
6043 | { |
6044 | error_at (loc, "cast specifies array type" ); |
6045 | return error_mark_node; |
6046 | } |
6047 | |
6048 | if (TREE_CODE (type) == FUNCTION_TYPE) |
6049 | { |
6050 | error_at (loc, "cast specifies function type" ); |
6051 | return error_mark_node; |
6052 | } |
6053 | |
6054 | if (!VOID_TYPE_P (type)) |
6055 | { |
6056 | value = require_complete_type (loc, value); |
6057 | if (value == error_mark_node) |
6058 | return error_mark_node; |
6059 | } |
6060 | |
6061 | if (type == TYPE_MAIN_VARIANT (TREE_TYPE (value))) |
6062 | { |
6063 | if (RECORD_OR_UNION_TYPE_P (type) |
6064 | && pedwarn (loc, OPT_Wpedantic, |
6065 | "ISO C forbids casting nonscalar to the same type" )) |
6066 | ; |
6067 | else if (warn_useless_cast) |
6068 | warning_at (loc, OPT_Wuseless_cast, |
6069 | "useless cast to type %qT" , type); |
6070 | |
6071 | /* Convert to remove any qualifiers from VALUE's type. */ |
6072 | value = convert (type, value); |
6073 | } |
6074 | else if (TREE_CODE (type) == UNION_TYPE) |
6075 | { |
6076 | tree field; |
6077 | |
6078 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
6079 | if (TREE_TYPE (field) != error_mark_node |
6080 | && comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (field)), |
6081 | TYPE_MAIN_VARIANT (TREE_TYPE (value)))) |
6082 | break; |
6083 | |
6084 | if (field) |
6085 | { |
6086 | tree t; |
6087 | bool maybe_const = true; |
6088 | |
6089 | pedwarn (loc, OPT_Wpedantic, "ISO C forbids casts to union type" ); |
6090 | t = c_fully_fold (value, false, &maybe_const); |
6091 | t = build_constructor_single (type, field, t); |
6092 | if (!maybe_const) |
6093 | t = c_wrap_maybe_const (t, true); |
6094 | t = digest_init (loc, type, t, |
6095 | NULL_TREE, false, false, false, true, false, false); |
6096 | TREE_CONSTANT (t) = TREE_CONSTANT (value); |
6097 | return t; |
6098 | } |
6099 | error_at (loc, "cast to union type from type not present in union" ); |
6100 | return error_mark_node; |
6101 | } |
6102 | else |
6103 | { |
6104 | tree otype, ovalue; |
6105 | |
6106 | if (type == void_type_node) |
6107 | { |
6108 | tree t = build1 (CONVERT_EXPR, type, value); |
6109 | SET_EXPR_LOCATION (t, loc); |
6110 | return t; |
6111 | } |
6112 | |
6113 | otype = TREE_TYPE (value); |
6114 | |
6115 | /* Optionally warn about potentially worrisome casts. */ |
6116 | if (warn_cast_qual |
6117 | && TREE_CODE (type) == POINTER_TYPE |
6118 | && TREE_CODE (otype) == POINTER_TYPE) |
6119 | handle_warn_cast_qual (loc, type, otype); |
6120 | |
6121 | /* Warn about conversions between pointers to disjoint |
6122 | address spaces. */ |
6123 | if (TREE_CODE (type) == POINTER_TYPE |
6124 | && TREE_CODE (otype) == POINTER_TYPE |
6125 | && !null_pointer_constant_p (expr: value)) |
6126 | { |
6127 | addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type)); |
6128 | addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (otype)); |
6129 | addr_space_t as_common; |
6130 | |
6131 | if (!addr_space_superset (as1: as_to, as2: as_from, common: &as_common)) |
6132 | { |
6133 | if (ADDR_SPACE_GENERIC_P (as_from)) |
6134 | warning_at (loc, 0, "cast to %qs address space pointer " |
6135 | "from disjoint generic address space pointer" , |
6136 | c_addr_space_name (as: as_to)); |
6137 | |
6138 | else if (ADDR_SPACE_GENERIC_P (as_to)) |
6139 | warning_at (loc, 0, "cast to generic address space pointer " |
6140 | "from disjoint %qs address space pointer" , |
6141 | c_addr_space_name (as: as_from)); |
6142 | |
6143 | else |
6144 | warning_at (loc, 0, "cast to %qs address space pointer " |
6145 | "from disjoint %qs address space pointer" , |
6146 | c_addr_space_name (as: as_to), |
6147 | c_addr_space_name (as: as_from)); |
6148 | } |
6149 | } |
6150 | |
6151 | /* Warn about possible alignment problems. */ |
6152 | if ((STRICT_ALIGNMENT || warn_cast_align == 2) |
6153 | && TREE_CODE (type) == POINTER_TYPE |
6154 | && TREE_CODE (otype) == POINTER_TYPE |
6155 | && TREE_CODE (TREE_TYPE (otype)) != VOID_TYPE |
6156 | && TREE_CODE (TREE_TYPE (otype)) != FUNCTION_TYPE |
6157 | /* Don't warn about opaque types, where the actual alignment |
6158 | restriction is unknown. */ |
6159 | && !(RECORD_OR_UNION_TYPE_P (TREE_TYPE (otype)) |
6160 | && TYPE_MODE (TREE_TYPE (otype)) == VOIDmode) |
6161 | && min_align_of_type (TREE_TYPE (type)) |
6162 | > min_align_of_type (TREE_TYPE (otype))) |
6163 | warning_at (loc, OPT_Wcast_align, |
6164 | "cast increases required alignment of target type" ); |
6165 | |
6166 | if ((TREE_CODE (type) == INTEGER_TYPE |
6167 | || TREE_CODE (type) == BITINT_TYPE) |
6168 | && TREE_CODE (otype) == POINTER_TYPE |
6169 | && TYPE_PRECISION (type) != TYPE_PRECISION (otype)) |
6170 | /* Unlike conversion of integers to pointers, where the |
6171 | warning is disabled for converting constants because |
6172 | of cases such as SIG_*, warn about converting constant |
6173 | pointers to integers. In some cases it may cause unwanted |
6174 | sign extension, and a warning is appropriate. */ |
6175 | warning_at (loc, OPT_Wpointer_to_int_cast, |
6176 | "cast from pointer to integer of different size" ); |
6177 | |
6178 | if (TREE_CODE (value) == CALL_EXPR |
6179 | && TREE_CODE (type) != TREE_CODE (otype)) |
6180 | warning_at (loc, OPT_Wbad_function_cast, |
6181 | "cast from function call of type %qT " |
6182 | "to non-matching type %qT" , otype, type); |
6183 | |
6184 | if (TREE_CODE (type) == POINTER_TYPE |
6185 | && (TREE_CODE (otype) == INTEGER_TYPE |
6186 | || TREE_CODE (otype) == BITINT_TYPE) |
6187 | && TYPE_PRECISION (type) != TYPE_PRECISION (otype) |
6188 | /* Don't warn about converting any constant. */ |
6189 | && !TREE_CONSTANT (value)) |
6190 | warning_at (loc, |
6191 | OPT_Wint_to_pointer_cast, "cast to pointer from integer " |
6192 | "of different size" ); |
6193 | |
6194 | if (warn_strict_aliasing <= 2) |
6195 | strict_aliasing_warning (EXPR_LOCATION (value), type, expr); |
6196 | |
6197 | /* If pedantic, warn for conversions between function and object |
6198 | pointer types, except for converting a null pointer constant |
6199 | to function pointer type. */ |
6200 | if (pedantic |
6201 | && TREE_CODE (type) == POINTER_TYPE |
6202 | && TREE_CODE (otype) == POINTER_TYPE |
6203 | && TREE_CODE (TREE_TYPE (otype)) == FUNCTION_TYPE |
6204 | && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE) |
6205 | pedwarn (loc, OPT_Wpedantic, "ISO C forbids " |
6206 | "conversion of function pointer to object pointer type" ); |
6207 | |
6208 | if (pedantic |
6209 | && TREE_CODE (type) == POINTER_TYPE |
6210 | && TREE_CODE (otype) == POINTER_TYPE |
6211 | && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE |
6212 | && TREE_CODE (TREE_TYPE (otype)) != FUNCTION_TYPE |
6213 | && !null_pointer_constant_p (expr: value)) |
6214 | pedwarn (loc, OPT_Wpedantic, "ISO C forbids " |
6215 | "conversion of object pointer to function pointer type" ); |
6216 | |
6217 | if (TREE_CODE (type) == POINTER_TYPE |
6218 | && TREE_CODE (otype) == POINTER_TYPE |
6219 | && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE |
6220 | && TREE_CODE (TREE_TYPE (otype)) == FUNCTION_TYPE |
6221 | && !c_safe_function_type_cast_p (TREE_TYPE (type), |
6222 | TREE_TYPE (otype))) |
6223 | warning_at (loc, OPT_Wcast_function_type, |
6224 | "cast between incompatible function types" |
6225 | " from %qT to %qT" , otype, type); |
6226 | |
6227 | ovalue = value; |
6228 | value = convert (type, value); |
6229 | |
6230 | /* Ignore any integer overflow caused by the cast. */ |
6231 | if (TREE_CODE (value) == INTEGER_CST && !FLOAT_TYPE_P (otype)) |
6232 | { |
6233 | if (TREE_OVERFLOW_P (ovalue)) |
6234 | { |
6235 | if (!TREE_OVERFLOW (value)) |
6236 | { |
6237 | /* Avoid clobbering a shared constant. */ |
6238 | value = copy_node (value); |
6239 | TREE_OVERFLOW (value) = TREE_OVERFLOW (ovalue); |
6240 | } |
6241 | } |
6242 | else if (TREE_OVERFLOW (value)) |
6243 | /* Reset VALUE's overflow flags, ensuring constant sharing. */ |
6244 | value = wide_int_to_tree (TREE_TYPE (value), cst: wi::to_wide (t: value)); |
6245 | } |
6246 | } |
6247 | |
6248 | /* Don't let a cast be an lvalue. */ |
6249 | if (lvalue_p (ref: value)) |
6250 | value = non_lvalue_loc (loc, value); |
6251 | |
6252 | /* Don't allow the results of casting to floating-point or complex |
6253 | types be confused with actual constants, or casts involving |
6254 | integer and pointer types other than direct integer-to-integer |
6255 | and integer-to-pointer be confused with integer constant |
6256 | expressions and null pointer constants. */ |
6257 | if (TREE_CODE (value) == REAL_CST |
6258 | || TREE_CODE (value) == COMPLEX_CST |
6259 | || (TREE_CODE (value) == INTEGER_CST |
6260 | && !((TREE_CODE (expr) == INTEGER_CST |
6261 | && INTEGRAL_TYPE_P (TREE_TYPE (expr))) |
6262 | || TREE_CODE (expr) == REAL_CST |
6263 | || TREE_CODE (expr) == COMPLEX_CST))) |
6264 | value = build1 (NOP_EXPR, type, value); |
6265 | |
6266 | /* If the expression has integer operands and so can occur in an |
6267 | unevaluated part of an integer constant expression, ensure the |
6268 | return value reflects this. */ |
6269 | if (int_operands |
6270 | && INTEGRAL_TYPE_P (type) |
6271 | && value != error_mark_node |
6272 | && !EXPR_INT_CONST_OPERANDS (value)) |
6273 | value = note_integer_operands (expr: value); |
6274 | |
6275 | protected_set_expr_location (value, loc); |
6276 | return value; |
6277 | } |
6278 | |
6279 | /* Interpret a cast of expression EXPR to type TYPE. LOC is the |
6280 | location of the open paren of the cast, or the position of the cast |
6281 | expr. */ |
6282 | tree |
6283 | c_cast_expr (location_t loc, struct c_type_name *type_name, tree expr) |
6284 | { |
6285 | tree type; |
6286 | tree type_expr = NULL_TREE; |
6287 | bool type_expr_const = true; |
6288 | tree ret; |
6289 | int saved_wsp = warn_strict_prototypes; |
6290 | |
6291 | /* This avoids warnings about unprototyped casts on |
6292 | integers. E.g. "#define SIG_DFL (void(*)())0". */ |
6293 | if (TREE_CODE (expr) == INTEGER_CST) |
6294 | warn_strict_prototypes = 0; |
6295 | type = groktypename (type_name, &type_expr, &type_expr_const); |
6296 | warn_strict_prototypes = saved_wsp; |
6297 | |
6298 | if (TREE_CODE (expr) == ADDR_EXPR && !VOID_TYPE_P (type) |
6299 | && reject_gcc_builtin (expr)) |
6300 | return error_mark_node; |
6301 | |
6302 | ret = build_c_cast (loc, type, expr); |
6303 | if (type_expr) |
6304 | { |
6305 | bool inner_expr_const = true; |
6306 | ret = c_fully_fold (ret, require_constant_value, &inner_expr_const); |
6307 | ret = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (ret), type_expr, ret); |
6308 | C_MAYBE_CONST_EXPR_NON_CONST (ret) = !(type_expr_const |
6309 | && inner_expr_const); |
6310 | SET_EXPR_LOCATION (ret, loc); |
6311 | } |
6312 | |
6313 | if (!EXPR_HAS_LOCATION (ret)) |
6314 | protected_set_expr_location (ret, loc); |
6315 | |
6316 | /* C++ does not permits types to be defined in a cast, but it |
6317 | allows references to incomplete types. */ |
6318 | if (warn_cxx_compat && type_name->specs->typespec_kind == ctsk_tagdef) |
6319 | warning_at (loc, OPT_Wc___compat, |
6320 | "defining a type in a cast is invalid in C++" ); |
6321 | |
6322 | return ret; |
6323 | } |
6324 | |
6325 | /* Build an assignment expression of lvalue LHS from value RHS. |
6326 | If LHS_ORIGTYPE is not NULL, it is the original type of LHS, which |
6327 | may differ from TREE_TYPE (LHS) for an enum bitfield. |
6328 | MODIFYCODE is the code for a binary operator that we use |
6329 | to combine the old value of LHS with RHS to get the new value. |
6330 | Or else MODIFYCODE is NOP_EXPR meaning do a simple assignment. |
6331 | If RHS_ORIGTYPE is not NULL_TREE, it is the original type of RHS, |
6332 | which may differ from TREE_TYPE (RHS) for an enum value. |
6333 | |
6334 | LOCATION is the location of the MODIFYCODE operator. |
6335 | RHS_LOC is the location of the RHS. */ |
6336 | |
6337 | tree |
6338 | build_modify_expr (location_t location, tree lhs, tree lhs_origtype, |
6339 | enum tree_code modifycode, |
6340 | location_t rhs_loc, tree rhs, tree rhs_origtype) |
6341 | { |
6342 | tree result; |
6343 | tree newrhs; |
6344 | tree rhseval = NULL_TREE; |
6345 | tree lhstype = TREE_TYPE (lhs); |
6346 | tree olhstype = lhstype; |
6347 | bool npc; |
6348 | bool is_atomic_op; |
6349 | |
6350 | /* Types that aren't fully specified cannot be used in assignments. */ |
6351 | lhs = require_complete_type (loc: location, value: lhs); |
6352 | |
6353 | /* Avoid duplicate error messages from operands that had errors. */ |
6354 | if (TREE_CODE (lhs) == ERROR_MARK || TREE_CODE (rhs) == ERROR_MARK) |
6355 | return error_mark_node; |
6356 | |
6357 | /* Ensure an error for assigning a non-lvalue array to an array in |
6358 | C90. */ |
6359 | if (TREE_CODE (lhstype) == ARRAY_TYPE) |
6360 | { |
6361 | error_at (location, "assignment to expression with array type" ); |
6362 | return error_mark_node; |
6363 | } |
6364 | |
6365 | /* For ObjC properties, defer this check. */ |
6366 | if (!objc_is_property_ref (lhs) && !lvalue_or_else (loc: location, ref: lhs, use: lv_assign)) |
6367 | return error_mark_node; |
6368 | |
6369 | is_atomic_op = really_atomic_lvalue (expr: lhs); |
6370 | |
6371 | newrhs = rhs; |
6372 | |
6373 | if (TREE_CODE (lhs) == C_MAYBE_CONST_EXPR) |
6374 | { |
6375 | tree inner = build_modify_expr (location, C_MAYBE_CONST_EXPR_EXPR (lhs), |
6376 | lhs_origtype, modifycode, rhs_loc, rhs, |
6377 | rhs_origtype); |
6378 | if (inner == error_mark_node) |
6379 | return error_mark_node; |
6380 | result = build2 (C_MAYBE_CONST_EXPR, TREE_TYPE (inner), |
6381 | C_MAYBE_CONST_EXPR_PRE (lhs), inner); |
6382 | gcc_assert (!C_MAYBE_CONST_EXPR_INT_OPERANDS (lhs)); |
6383 | C_MAYBE_CONST_EXPR_NON_CONST (result) = 1; |
6384 | protected_set_expr_location (result, location); |
6385 | return result; |
6386 | } |
6387 | |
6388 | /* If a binary op has been requested, combine the old LHS value with the RHS |
6389 | producing the value we should actually store into the LHS. */ |
6390 | |
6391 | if (modifycode != NOP_EXPR) |
6392 | { |
6393 | lhs = c_fully_fold (lhs, false, NULL, true); |
6394 | lhs = stabilize_reference (lhs); |
6395 | |
6396 | /* Construct the RHS for any non-atomic compound assignemnt. */ |
6397 | if (!is_atomic_op) |
6398 | { |
6399 | /* If in LHS op= RHS the RHS has side-effects, ensure they |
6400 | are preevaluated before the rest of the assignment expression's |
6401 | side-effects, because RHS could contain e.g. function calls |
6402 | that modify LHS. */ |
6403 | if (TREE_SIDE_EFFECTS (rhs)) |
6404 | { |
6405 | if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR) |
6406 | newrhs = save_expr (TREE_OPERAND (rhs, 0)); |
6407 | else |
6408 | newrhs = save_expr (rhs); |
6409 | rhseval = newrhs; |
6410 | if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR) |
6411 | newrhs = build1 (EXCESS_PRECISION_EXPR, TREE_TYPE (rhs), |
6412 | newrhs); |
6413 | } |
6414 | newrhs = build_binary_op (location, |
6415 | modifycode, lhs, newrhs, true); |
6416 | |
6417 | /* The original type of the right hand side is no longer |
6418 | meaningful. */ |
6419 | rhs_origtype = NULL_TREE; |
6420 | } |
6421 | } |
6422 | |
6423 | if (c_dialect_objc ()) |
6424 | { |
6425 | /* Check if we are modifying an Objective-C property reference; |
6426 | if so, we need to generate setter calls. */ |
6427 | if (TREE_CODE (newrhs) == EXCESS_PRECISION_EXPR) |
6428 | result = objc_maybe_build_modify_expr (lhs, TREE_OPERAND (newrhs, 0)); |
6429 | else |
6430 | result = objc_maybe_build_modify_expr (lhs, newrhs); |
6431 | if (result) |
6432 | goto return_result; |
6433 | |
6434 | /* Else, do the check that we postponed for Objective-C. */ |
6435 | if (!lvalue_or_else (loc: location, ref: lhs, use: lv_assign)) |
6436 | return error_mark_node; |
6437 | } |
6438 | |
6439 | /* Give an error for storing in something that is 'const'. */ |
6440 | |
6441 | if (TYPE_READONLY (lhstype) |
6442 | || (RECORD_OR_UNION_TYPE_P (lhstype) |
6443 | && C_TYPE_FIELDS_READONLY (lhstype))) |
6444 | { |
6445 | readonly_error (location, lhs, lv_assign); |
6446 | return error_mark_node; |
6447 | } |
6448 | else if (TREE_READONLY (lhs)) |
6449 | readonly_warning (arg: lhs, use: lv_assign); |
6450 | |
6451 | /* If storing into a structure or union member, |
6452 | it has probably been given type `int'. |
6453 | Compute the type that would go with |
6454 | the actual amount of storage the member occupies. */ |
6455 | |
6456 | if (TREE_CODE (lhs) == COMPONENT_REF |
6457 | && (TREE_CODE (lhstype) == INTEGER_TYPE |
6458 | || TREE_CODE (lhstype) == BOOLEAN_TYPE |
6459 | || SCALAR_FLOAT_TYPE_P (lhstype) |
6460 | || TREE_CODE (lhstype) == ENUMERAL_TYPE)) |
6461 | lhstype = TREE_TYPE (get_unwidened (lhs, 0)); |
6462 | |
6463 | /* If storing in a field that is in actuality a short or narrower than one, |
6464 | we must store in the field in its actual type. */ |
6465 | |
6466 | if (lhstype != TREE_TYPE (lhs)) |
6467 | { |
6468 | lhs = copy_node (lhs); |
6469 | TREE_TYPE (lhs) = lhstype; |
6470 | } |
6471 | |
6472 | /* Issue -Wc++-compat warnings about an assignment to an enum type |
6473 | when LHS does not have its original type. This happens for, |
6474 | e.g., an enum bitfield in a struct. */ |
6475 | if (warn_cxx_compat |
6476 | && lhs_origtype != NULL_TREE |
6477 | && lhs_origtype != lhstype |
6478 | && TREE_CODE (lhs_origtype) == ENUMERAL_TYPE) |
6479 | { |
6480 | tree checktype = (rhs_origtype != NULL_TREE |
6481 | ? rhs_origtype |
6482 | : TREE_TYPE (rhs)); |
6483 | if (checktype != error_mark_node |
6484 | && (TYPE_MAIN_VARIANT (checktype) != TYPE_MAIN_VARIANT (lhs_origtype) |
6485 | || (is_atomic_op && modifycode != NOP_EXPR))) |
6486 | warning_at (location, OPT_Wc___compat, |
6487 | "enum conversion in assignment is invalid in C++" ); |
6488 | } |
6489 | |
6490 | /* Remove qualifiers. */ |
6491 | lhstype = build_qualified_type (lhstype, TYPE_UNQUALIFIED); |
6492 | olhstype = build_qualified_type (olhstype, TYPE_UNQUALIFIED); |
6493 | |
6494 | /* Convert new value to destination type. Fold it first, then |
6495 | restore any excess precision information, for the sake of |
6496 | conversion warnings. */ |
6497 | |
6498 | if (!(is_atomic_op && modifycode != NOP_EXPR)) |
6499 | { |
6500 | tree rhs_semantic_type = NULL_TREE; |
6501 | if (!c_in_omp_for) |
6502 | { |
6503 | if (TREE_CODE (newrhs) == EXCESS_PRECISION_EXPR) |
6504 | { |
6505 | rhs_semantic_type = TREE_TYPE (newrhs); |
6506 | newrhs = TREE_OPERAND (newrhs, 0); |
6507 | } |
6508 | npc = null_pointer_constant_p (expr: newrhs); |
6509 | newrhs = c_fully_fold (newrhs, false, NULL); |
6510 | if (rhs_semantic_type) |
6511 | newrhs = build1 (EXCESS_PRECISION_EXPR, rhs_semantic_type, newrhs); |
6512 | } |
6513 | else |
6514 | npc = null_pointer_constant_p (expr: newrhs); |
6515 | newrhs = convert_for_assignment (location, rhs_loc, lhstype, newrhs, |
6516 | rhs_origtype, ic_assign, npc, |
6517 | NULL_TREE, NULL_TREE, 0); |
6518 | if (TREE_CODE (newrhs) == ERROR_MARK) |
6519 | return error_mark_node; |
6520 | } |
6521 | |
6522 | /* Emit ObjC write barrier, if necessary. */ |
6523 | if (c_dialect_objc () && flag_objc_gc) |
6524 | { |
6525 | result = objc_generate_write_barrier (lhs, modifycode, newrhs); |
6526 | if (result) |
6527 | { |
6528 | protected_set_expr_location (result, location); |
6529 | goto return_result; |
6530 | } |
6531 | } |
6532 | |
6533 | /* Scan operands. */ |
6534 | |
6535 | if (is_atomic_op) |
6536 | result = build_atomic_assign (loc: location, lhs, modifycode, rhs: newrhs, return_old_p: false); |
6537 | else |
6538 | { |
6539 | result = build2 (MODIFY_EXPR, lhstype, lhs, newrhs); |
6540 | TREE_SIDE_EFFECTS (result) = 1; |
6541 | protected_set_expr_location (result, location); |
6542 | } |
6543 | |
6544 | /* If we got the LHS in a different type for storing in, |
6545 | convert the result back to the nominal type of LHS |
6546 | so that the value we return always has the same type |
6547 | as the LHS argument. */ |
6548 | |
6549 | if (olhstype == TREE_TYPE (result)) |
6550 | goto return_result; |
6551 | |
6552 | result = convert_for_assignment (location, rhs_loc, olhstype, result, |
6553 | rhs_origtype, ic_assign, false, NULL_TREE, |
6554 | NULL_TREE, 0); |
6555 | protected_set_expr_location (result, location); |
6556 | |
6557 | return_result: |
6558 | if (rhseval) |
6559 | result = build2 (COMPOUND_EXPR, TREE_TYPE (result), rhseval, result); |
6560 | return result; |
6561 | } |
6562 | |
6563 | /* Return whether STRUCT_TYPE has an anonymous field with type TYPE. |
6564 | This is used to implement -fplan9-extensions. */ |
6565 | |
6566 | static bool |
6567 | find_anonymous_field_with_type (tree struct_type, tree type) |
6568 | { |
6569 | tree field; |
6570 | bool found; |
6571 | |
6572 | gcc_assert (RECORD_OR_UNION_TYPE_P (struct_type)); |
6573 | found = false; |
6574 | for (field = TYPE_FIELDS (struct_type); |
6575 | field != NULL_TREE; |
6576 | field = TREE_CHAIN (field)) |
6577 | { |
6578 | tree fieldtype = (TYPE_ATOMIC (TREE_TYPE (field)) |
6579 | ? c_build_qualified_type (TREE_TYPE (field), |
6580 | TYPE_QUAL_ATOMIC) |
6581 | : TYPE_MAIN_VARIANT (TREE_TYPE (field))); |
6582 | if (DECL_NAME (field) == NULL |
6583 | && comptypes (type1: type, type2: fieldtype)) |
6584 | { |
6585 | if (found) |
6586 | return false; |
6587 | found = true; |
6588 | } |
6589 | else if (DECL_NAME (field) == NULL |
6590 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field)) |
6591 | && find_anonymous_field_with_type (TREE_TYPE (field), type)) |
6592 | { |
6593 | if (found) |
6594 | return false; |
6595 | found = true; |
6596 | } |
6597 | } |
6598 | return found; |
6599 | } |
6600 | |
6601 | /* RHS is an expression whose type is pointer to struct. If there is |
6602 | an anonymous field in RHS with type TYPE, then return a pointer to |
6603 | that field in RHS. This is used with -fplan9-extensions. This |
6604 | returns NULL if no conversion could be found. */ |
6605 | |
6606 | static tree |
6607 | convert_to_anonymous_field (location_t location, tree type, tree rhs) |
6608 | { |
6609 | tree rhs_struct_type, lhs_main_type; |
6610 | tree field, found_field; |
6611 | bool found_sub_field; |
6612 | tree ret; |
6613 | |
6614 | gcc_assert (POINTER_TYPE_P (TREE_TYPE (rhs))); |
6615 | rhs_struct_type = TREE_TYPE (TREE_TYPE (rhs)); |
6616 | gcc_assert (RECORD_OR_UNION_TYPE_P (rhs_struct_type)); |
6617 | |
6618 | gcc_assert (POINTER_TYPE_P (type)); |
6619 | lhs_main_type = (TYPE_ATOMIC (TREE_TYPE (type)) |
6620 | ? c_build_qualified_type (TREE_TYPE (type), |
6621 | TYPE_QUAL_ATOMIC) |
6622 | : TYPE_MAIN_VARIANT (TREE_TYPE (type))); |
6623 | |
6624 | found_field = NULL_TREE; |
6625 | found_sub_field = false; |
6626 | for (field = TYPE_FIELDS (rhs_struct_type); |
6627 | field != NULL_TREE; |
6628 | field = TREE_CHAIN (field)) |
6629 | { |
6630 | if (DECL_NAME (field) != NULL_TREE |
6631 | || !RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))) |
6632 | continue; |
6633 | tree fieldtype = (TYPE_ATOMIC (TREE_TYPE (field)) |
6634 | ? c_build_qualified_type (TREE_TYPE (field), |
6635 | TYPE_QUAL_ATOMIC) |
6636 | : TYPE_MAIN_VARIANT (TREE_TYPE (field))); |
6637 | if (comptypes (type1: lhs_main_type, type2: fieldtype)) |
6638 | { |
6639 | if (found_field != NULL_TREE) |
6640 | return NULL_TREE; |
6641 | found_field = field; |
6642 | } |
6643 | else if (find_anonymous_field_with_type (TREE_TYPE (field), |
6644 | type: lhs_main_type)) |
6645 | { |
6646 | if (found_field != NULL_TREE) |
6647 | return NULL_TREE; |
6648 | found_field = field; |
6649 | found_sub_field = true; |
6650 | } |
6651 | } |
6652 | |
6653 | if (found_field == NULL_TREE) |
6654 | return NULL_TREE; |
6655 | |
6656 | ret = fold_build3_loc (location, COMPONENT_REF, TREE_TYPE (found_field), |
6657 | build_fold_indirect_ref (rhs), found_field, |
6658 | NULL_TREE); |
6659 | ret = build_fold_addr_expr_loc (location, ret); |
6660 | |
6661 | if (found_sub_field) |
6662 | { |
6663 | ret = convert_to_anonymous_field (location, type, rhs: ret); |
6664 | gcc_assert (ret != NULL_TREE); |
6665 | } |
6666 | |
6667 | return ret; |
6668 | } |
6669 | |
6670 | /* Issue an error message for a bad initializer component. |
6671 | GMSGID identifies the message. |
6672 | The component name is taken from the spelling stack. */ |
6673 | |
6674 | static void ATTRIBUTE_GCC_DIAG (2,0) |
6675 | error_init (location_t loc, const char *gmsgid, ...) |
6676 | { |
6677 | char *ofwhat; |
6678 | |
6679 | auto_diagnostic_group d; |
6680 | |
6681 | /* The gmsgid may be a format string with %< and %>. */ |
6682 | va_list ap; |
6683 | va_start (ap, gmsgid); |
6684 | bool warned = emit_diagnostic_valist (DK_ERROR, loc, -1, gmsgid, &ap); |
6685 | va_end (ap); |
6686 | |
6687 | ofwhat = print_spelling ((char *) alloca (spelling_length () + 1)); |
6688 | if (*ofwhat && warned) |
6689 | inform (loc, "(near initialization for %qs)" , ofwhat); |
6690 | } |
6691 | |
6692 | /* Issue a pedantic warning for a bad initializer component. OPT is |
6693 | the option OPT_* (from options.h) controlling this warning or 0 if |
6694 | it is unconditionally given. GMSGID identifies the message. The |
6695 | component name is taken from the spelling stack. */ |
6696 | |
6697 | static void ATTRIBUTE_GCC_DIAG (3,0) |
6698 | pedwarn_init (location_t loc, int opt, const char *gmsgid, ...) |
6699 | { |
6700 | /* Use the location where a macro was expanded rather than where |
6701 | it was defined to make sure macros defined in system headers |
6702 | but used incorrectly elsewhere are diagnosed. */ |
6703 | location_t exploc = expansion_point_location_if_in_system_header (loc); |
6704 | auto_diagnostic_group d; |
6705 | va_list ap; |
6706 | va_start (ap, gmsgid); |
6707 | bool warned = emit_diagnostic_valist (DK_PEDWARN, exploc, opt, gmsgid, &ap); |
6708 | va_end (ap); |
6709 | char *ofwhat = print_spelling ((char *) alloca (spelling_length () + 1)); |
6710 | if (*ofwhat && warned) |
6711 | inform (exploc, "(near initialization for %qs)" , ofwhat); |
6712 | } |
6713 | |
6714 | /* Issue a warning for a bad initializer component. |
6715 | |
6716 | OPT is the OPT_W* value corresponding to the warning option that |
6717 | controls this warning. GMSGID identifies the message. The |
6718 | component name is taken from the spelling stack. */ |
6719 | |
6720 | static void |
6721 | warning_init (location_t loc, int opt, const char *gmsgid) |
6722 | { |
6723 | char *ofwhat; |
6724 | bool warned; |
6725 | |
6726 | auto_diagnostic_group d; |
6727 | |
6728 | /* Use the location where a macro was expanded rather than where |
6729 | it was defined to make sure macros defined in system headers |
6730 | but used incorrectly elsewhere are diagnosed. */ |
6731 | location_t exploc = expansion_point_location_if_in_system_header (loc); |
6732 | |
6733 | /* The gmsgid may be a format string with %< and %>. */ |
6734 | warned = warning_at (exploc, opt, gmsgid); |
6735 | ofwhat = print_spelling ((char *) alloca (spelling_length () + 1)); |
6736 | if (*ofwhat && warned) |
6737 | inform (exploc, "(near initialization for %qs)" , ofwhat); |
6738 | } |
6739 | |
6740 | /* If TYPE is an array type and EXPR is a parenthesized string |
6741 | constant, warn if pedantic that EXPR is being used to initialize an |
6742 | object of type TYPE. */ |
6743 | |
6744 | void |
6745 | maybe_warn_string_init (location_t loc, tree type, struct c_expr expr) |
6746 | { |
6747 | if (pedantic |
6748 | && TREE_CODE (type) == ARRAY_TYPE |
6749 | && TREE_CODE (expr.value) == STRING_CST |
6750 | && expr.original_code != STRING_CST) |
6751 | pedwarn_init (loc, opt: OPT_Wpedantic, |
6752 | gmsgid: "array initialized from parenthesized string constant" ); |
6753 | } |
6754 | |
6755 | /* Attempt to locate the parameter with the given index within FNDECL, |
6756 | returning DECL_SOURCE_LOCATION (FNDECL) if it can't be found. */ |
6757 | |
6758 | static location_t |
6759 | get_fndecl_argument_location (tree fndecl, int argnum) |
6760 | { |
6761 | int i; |
6762 | tree param; |
6763 | |
6764 | /* Locate param by index within DECL_ARGUMENTS (fndecl). */ |
6765 | for (i = 0, param = DECL_ARGUMENTS (fndecl); |
6766 | i < argnum && param; |
6767 | i++, param = TREE_CHAIN (param)) |
6768 | ; |
6769 | |
6770 | /* If something went wrong (e.g. if we have a builtin and thus no arguments), |
6771 | return DECL_SOURCE_LOCATION (FNDECL). */ |
6772 | if (param == NULL) |
6773 | return DECL_SOURCE_LOCATION (fndecl); |
6774 | |
6775 | return DECL_SOURCE_LOCATION (param); |
6776 | } |
6777 | |
6778 | /* Issue a note about a mismatching argument for parameter PARMNUM |
6779 | to FUNDECL, for types EXPECTED_TYPE and ACTUAL_TYPE. |
6780 | Attempt to issue the note at the pertinent parameter of the decl; |
6781 | failing that issue it at the location of FUNDECL; failing that |
6782 | issue it at PLOC. */ |
6783 | |
6784 | static void |
6785 | inform_for_arg (tree fundecl, location_t ploc, int parmnum, |
6786 | tree expected_type, tree actual_type) |
6787 | { |
6788 | location_t loc; |
6789 | if (fundecl && !DECL_IS_UNDECLARED_BUILTIN (fundecl)) |
6790 | loc = get_fndecl_argument_location (fndecl: fundecl, argnum: parmnum - 1); |
6791 | else |
6792 | loc = ploc; |
6793 | |
6794 | inform (loc, |
6795 | "expected %qT but argument is of type %qT" , |
6796 | expected_type, actual_type); |
6797 | } |
6798 | |
6799 | /* Issue a warning when an argument of ARGTYPE is passed to a built-in |
6800 | function FUNDECL declared without prototype to parameter PARMNUM of |
6801 | PARMTYPE when ARGTYPE does not promote to PARMTYPE. */ |
6802 | |
6803 | static void |
6804 | maybe_warn_builtin_no_proto_arg (location_t loc, tree fundecl, int parmnum, |
6805 | tree parmtype, tree argtype) |
6806 | { |
6807 | tree_code parmcode = TREE_CODE (parmtype); |
6808 | tree_code argcode = TREE_CODE (argtype); |
6809 | tree promoted = c_type_promotes_to (type: argtype); |
6810 | |
6811 | /* Avoid warning for enum arguments that promote to an integer type |
6812 | of the same size/mode. */ |
6813 | if (parmcode == INTEGER_TYPE |
6814 | && argcode == ENUMERAL_TYPE |
6815 | && TYPE_MODE (parmtype) == TYPE_MODE (argtype)) |
6816 | return; |
6817 | |
6818 | if ((parmcode == argcode |
6819 | || (parmcode == INTEGER_TYPE |
6820 | && argcode == ENUMERAL_TYPE)) |
6821 | && TYPE_MAIN_VARIANT (parmtype) == TYPE_MAIN_VARIANT (promoted)) |
6822 | return; |
6823 | |
6824 | /* This diagnoses even signed/unsigned mismatches. Those might be |
6825 | safe in many cases but GCC may emit suboptimal code for them so |
6826 | warning on those cases drives efficiency improvements. */ |
6827 | if (warning_at (loc, OPT_Wbuiltin_declaration_mismatch, |
6828 | TYPE_MAIN_VARIANT (promoted) == argtype |
6829 | ? G_("%qD argument %d type is %qT where %qT is expected " |
6830 | "in a call to built-in function declared without " |
6831 | "prototype" ) |
6832 | : G_("%qD argument %d promotes to %qT where %qT is expected " |
6833 | "in a call to built-in function declared without " |
6834 | "prototype" ), |
6835 | fundecl, parmnum, promoted, parmtype)) |
6836 | inform (DECL_SOURCE_LOCATION (fundecl), |
6837 | "built-in %qD declared here" , |
6838 | fundecl); |
6839 | } |
6840 | |
6841 | /* Convert value RHS to type TYPE as preparation for an assignment to |
6842 | an lvalue of type TYPE. If ORIGTYPE is not NULL_TREE, it is the |
6843 | original type of RHS; this differs from TREE_TYPE (RHS) for enum |
6844 | types. NULL_POINTER_CONSTANT says whether RHS was a null pointer |
6845 | constant before any folding. |
6846 | The real work of conversion is done by `convert'. |
6847 | The purpose of this function is to generate error messages |
6848 | for assignments that are not allowed in C. |
6849 | ERRTYPE says whether it is argument passing, assignment, |
6850 | initialization or return. |
6851 | |
6852 | In the following example, '~' denotes where EXPR_LOC and '^' where |
6853 | LOCATION point to: |
6854 | |
6855 | f (var); [ic_argpass] |
6856 | ^ ~~~ |
6857 | x = var; [ic_assign] |
6858 | ^ ~~~; |
6859 | int x = var; [ic_init] |
6860 | ^^^ |
6861 | return x; [ic_return] |
6862 | ^ |
6863 | |
6864 | FUNCTION is a tree for the function being called. |
6865 | PARMNUM is the number of the argument, for printing in error messages. |
6866 | WARNOPT may be set to a warning option to issue the corresponding warning |
6867 | rather than an error for invalid conversions. Used for calls to built-in |
6868 | functions declared without a prototype. */ |
6869 | |
6870 | static tree |
6871 | convert_for_assignment (location_t location, location_t expr_loc, tree type, |
6872 | tree rhs, tree origtype, enum impl_conv errtype, |
6873 | bool null_pointer_constant, tree fundecl, |
6874 | tree function, int parmnum, int warnopt /* = 0 */) |
6875 | { |
6876 | enum tree_code codel = TREE_CODE (type); |
6877 | tree orig_rhs = rhs; |
6878 | tree rhstype; |
6879 | enum tree_code coder; |
6880 | tree rname = NULL_TREE; |
6881 | bool objc_ok = false; |
6882 | |
6883 | /* Use the expansion point location to handle cases such as user's |
6884 | function returning a wrong-type macro defined in a system header. */ |
6885 | location = expansion_point_location_if_in_system_header (location); |
6886 | |
6887 | if (errtype == ic_argpass) |
6888 | { |
6889 | tree selector; |
6890 | /* Change pointer to function to the function itself for |
6891 | diagnostics. */ |
6892 | if (TREE_CODE (function) == ADDR_EXPR |
6893 | && TREE_CODE (TREE_OPERAND (function, 0)) == FUNCTION_DECL) |
6894 | function = TREE_OPERAND (function, 0); |
6895 | |
6896 | /* Handle an ObjC selector specially for diagnostics. */ |
6897 | selector = objc_message_selector (); |
6898 | rname = function; |
6899 | if (selector && parmnum > 2) |
6900 | { |
6901 | rname = selector; |
6902 | parmnum -= 2; |
6903 | } |
6904 | } |
6905 | |
6906 | /* This macro is used to emit diagnostics to ensure that all format |
6907 | strings are complete sentences, visible to gettext and checked at |
6908 | compile time. */ |
6909 | #define PEDWARN_FOR_ASSIGNMENT(LOCATION, PLOC, OPT, AR, AS, IN, RE) \ |
6910 | do { \ |
6911 | switch (errtype) \ |
6912 | { \ |
6913 | case ic_argpass: \ |
6914 | { \ |
6915 | auto_diagnostic_group d; \ |
6916 | if (pedwarn (PLOC, OPT, AR, parmnum, rname)) \ |
6917 | inform_for_arg (fundecl, (PLOC), parmnum, type, rhstype); \ |
6918 | } \ |
6919 | break; \ |
6920 | case ic_assign: \ |
6921 | pedwarn (LOCATION, OPT, AS); \ |
6922 | break; \ |
6923 | case ic_init: \ |
6924 | case ic_init_const: \ |
6925 | pedwarn_init (LOCATION, OPT, IN); \ |
6926 | break; \ |
6927 | case ic_return: \ |
6928 | pedwarn (LOCATION, OPT, RE); \ |
6929 | break; \ |
6930 | default: \ |
6931 | gcc_unreachable (); \ |
6932 | } \ |
6933 | } while (0) |
6934 | |
6935 | /* This macro is used to emit diagnostics to ensure that all format |
6936 | strings are complete sentences, visible to gettext and checked at |
6937 | compile time. It can be called with 'pedwarn' or 'warning_at'. */ |
6938 | #define WARNING_FOR_QUALIFIERS(PEDWARN, LOCATION, PLOC, OPT, AR, AS, IN, RE, QUALS) \ |
6939 | do { \ |
6940 | switch (errtype) \ |
6941 | { \ |
6942 | case ic_argpass: \ |
6943 | { \ |
6944 | auto_diagnostic_group d; \ |
6945 | if (PEDWARN) { \ |
6946 | if (pedwarn (PLOC, OPT, AR, parmnum, rname, QUALS)) \ |
6947 | inform_for_arg (fundecl, (PLOC), parmnum, type, rhstype); \ |
6948 | } else { \ |
6949 | if (warning_at (PLOC, OPT, AR, parmnum, rname, QUALS)) \ |
6950 | inform_for_arg (fundecl, (PLOC), parmnum, type, rhstype); \ |
6951 | } \ |
6952 | } \ |
6953 | break; \ |
6954 | case ic_assign: \ |
6955 | if (PEDWARN) \ |
6956 | pedwarn (LOCATION, OPT, AS, QUALS); \ |
6957 | else \ |
6958 | warning_at (LOCATION, OPT, AS, QUALS); \ |
6959 | break; \ |
6960 | case ic_init: \ |
6961 | case ic_init_const: \ |
6962 | if (PEDWARN) \ |
6963 | pedwarn (LOCATION, OPT, IN, QUALS); \ |
6964 | else \ |
6965 | warning_at (LOCATION, OPT, IN, QUALS); \ |
6966 | break; \ |
6967 | case ic_return: \ |
6968 | if (PEDWARN) \ |
6969 | pedwarn (LOCATION, OPT, RE, QUALS); \ |
6970 | else \ |
6971 | warning_at (LOCATION, OPT, RE, QUALS); \ |
6972 | break; \ |
6973 | default: \ |
6974 | gcc_unreachable (); \ |
6975 | } \ |
6976 | } while (0) |
6977 | |
6978 | /* This macro is used to emit diagnostics to ensure that all format |
6979 | strings are complete sentences, visible to gettext and checked at |
6980 | compile time. It is the same as PEDWARN_FOR_ASSIGNMENT but with an |
6981 | extra parameter to enumerate qualifiers. */ |
6982 | #define PEDWARN_FOR_QUALIFIERS(LOCATION, PLOC, OPT, AR, AS, IN, RE, QUALS) \ |
6983 | WARNING_FOR_QUALIFIERS (true, LOCATION, PLOC, OPT, AR, AS, IN, RE, QUALS) |
6984 | |
6985 | |
6986 | if (TREE_CODE (rhs) == EXCESS_PRECISION_EXPR) |
6987 | rhs = TREE_OPERAND (rhs, 0); |
6988 | |
6989 | rhstype = TREE_TYPE (rhs); |
6990 | coder = TREE_CODE (rhstype); |
6991 | |
6992 | if (coder == ERROR_MARK) |
6993 | return error_mark_node; |
6994 | |
6995 | if (c_dialect_objc ()) |
6996 | { |
6997 | int parmno; |
6998 | |
6999 | switch (errtype) |
7000 | { |
7001 | case ic_return: |
7002 | parmno = 0; |
7003 | break; |
7004 | |
7005 | case ic_assign: |
7006 | parmno = -1; |
7007 | break; |
7008 | |
7009 | case ic_init: |
7010 | case ic_init_const: |
7011 | parmno = -2; |
7012 | break; |
7013 | |
7014 | default: |
7015 | parmno = parmnum; |
7016 | break; |
7017 | } |
7018 | |
7019 | objc_ok = objc_compare_types (type, rhstype, parmno, rname); |
7020 | } |
7021 | |
7022 | if (warn_cxx_compat) |
7023 | { |
7024 | tree checktype = origtype != NULL_TREE ? origtype : rhstype; |
7025 | if (checktype != error_mark_node |
7026 | && TREE_CODE (type) == ENUMERAL_TYPE |
7027 | && TYPE_MAIN_VARIANT (checktype) != TYPE_MAIN_VARIANT (type)) |
7028 | switch (errtype) |
7029 | { |
7030 | case ic_argpass: |
7031 | if (pedwarn (expr_loc, OPT_Wc___compat, "enum conversion when " |
7032 | "passing argument %d of %qE is invalid in C++" , |
7033 | parmnum, rname)) |
7034 | inform ((fundecl && !DECL_IS_UNDECLARED_BUILTIN (fundecl)) |
7035 | ? DECL_SOURCE_LOCATION (fundecl) : expr_loc, |
7036 | "expected %qT but argument is of type %qT" , |
7037 | type, rhstype); |
7038 | break; |
7039 | case ic_assign: |
7040 | pedwarn (location, OPT_Wc___compat, "enum conversion from %qT to " |
7041 | "%qT in assignment is invalid in C++" , rhstype, type); |
7042 | break; |
7043 | case ic_init: |
7044 | case ic_init_const: |
7045 | pedwarn_init (loc: location, opt: OPT_Wc___compat, gmsgid: "enum conversion from " |
7046 | "%qT to %qT in initialization is invalid in C++" , |
7047 | rhstype, type); |
7048 | break; |
7049 | case ic_return: |
7050 | pedwarn (location, OPT_Wc___compat, "enum conversion from %qT to " |
7051 | "%qT in return is invalid in C++" , rhstype, type); |
7052 | break; |
7053 | default: |
7054 | gcc_unreachable (); |
7055 | } |
7056 | } |
7057 | |
7058 | if (warn_enum_conversion) |
7059 | { |
7060 | tree checktype = origtype != NULL_TREE ? origtype : rhstype; |
7061 | if (checktype != error_mark_node |
7062 | && TREE_CODE (checktype) == ENUMERAL_TYPE |
7063 | && TREE_CODE (type) == ENUMERAL_TYPE |
7064 | && TYPE_MAIN_VARIANT (checktype) != TYPE_MAIN_VARIANT (type)) |
7065 | { |
7066 | gcc_rich_location loc (location); |
7067 | warning_at (&loc, OPT_Wenum_conversion, |
7068 | "implicit conversion from %qT to %qT" , |
7069 | checktype, type); |
7070 | } |
7071 | } |
7072 | |
7073 | if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (rhstype)) |
7074 | { |
7075 | warn_for_address_or_pointer_of_packed_member (type, orig_rhs); |
7076 | return rhs; |
7077 | } |
7078 | |
7079 | if (coder == VOID_TYPE) |
7080 | { |
7081 | /* Except for passing an argument to an unprototyped function, |
7082 | this is a constraint violation. When passing an argument to |
7083 | an unprototyped function, it is compile-time undefined; |
7084 | making it a constraint in that case was rejected in |
7085 | DR#252. */ |
7086 | const char msg[] = "void value not ignored as it ought to be" ; |
7087 | if (warnopt) |
7088 | warning_at (location, warnopt, msg); |
7089 | else |
7090 | error_at (location, msg); |
7091 | return error_mark_node; |
7092 | } |
7093 | rhs = require_complete_type (loc: location, value: rhs); |
7094 | if (rhs == error_mark_node) |
7095 | return error_mark_node; |
7096 | |
7097 | if (coder == POINTER_TYPE && reject_gcc_builtin (rhs)) |
7098 | return error_mark_node; |
7099 | |
7100 | /* A non-reference type can convert to a reference. This handles |
7101 | va_start, va_copy and possibly port built-ins. */ |
7102 | if (codel == REFERENCE_TYPE && coder != REFERENCE_TYPE) |
7103 | { |
7104 | if (!lvalue_p (ref: rhs)) |
7105 | { |
7106 | const char msg[] = "cannot pass rvalue to reference parameter" ; |
7107 | if (warnopt) |
7108 | warning_at (location, warnopt, msg); |
7109 | else |
7110 | error_at (location, msg); |
7111 | return error_mark_node; |
7112 | } |
7113 | if (!c_mark_addressable (exp: rhs)) |
7114 | return error_mark_node; |
7115 | rhs = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (rhs)), rhs); |
7116 | SET_EXPR_LOCATION (rhs, location); |
7117 | |
7118 | rhs = convert_for_assignment (location, expr_loc, |
7119 | type: build_pointer_type (TREE_TYPE (type)), |
7120 | rhs, origtype, errtype, |
7121 | null_pointer_constant, fundecl, function, |
7122 | parmnum, warnopt); |
7123 | if (rhs == error_mark_node) |
7124 | return error_mark_node; |
7125 | |
7126 | rhs = build1 (NOP_EXPR, type, rhs); |
7127 | SET_EXPR_LOCATION (rhs, location); |
7128 | return rhs; |
7129 | } |
7130 | /* Some types can interconvert without explicit casts. */ |
7131 | else if (codel == VECTOR_TYPE && coder == VECTOR_TYPE |
7132 | && vector_types_convertible_p (t1: type, TREE_TYPE (rhs), emit_lax_note: true)) |
7133 | return convert (type, rhs); |
7134 | /* Arithmetic types all interconvert, and enum is treated like int. */ |
7135 | else if ((codel == INTEGER_TYPE || codel == REAL_TYPE |
7136 | || codel == FIXED_POINT_TYPE |
7137 | || codel == ENUMERAL_TYPE || codel == COMPLEX_TYPE |
7138 | || codel == BOOLEAN_TYPE || codel == BITINT_TYPE) |
7139 | && (coder == INTEGER_TYPE || coder == REAL_TYPE |
7140 | || coder == FIXED_POINT_TYPE |
7141 | || coder == ENUMERAL_TYPE || coder == COMPLEX_TYPE |
7142 | || coder == BOOLEAN_TYPE || coder == BITINT_TYPE)) |
7143 | { |
7144 | if (warnopt && errtype == ic_argpass) |
7145 | maybe_warn_builtin_no_proto_arg (loc: expr_loc, fundecl, parmnum, parmtype: type, |
7146 | argtype: rhstype); |
7147 | |
7148 | bool save = in_late_binary_op; |
7149 | if (C_BOOLEAN_TYPE_P (type) || codel == COMPLEX_TYPE |
7150 | || (coder == REAL_TYPE |
7151 | && (codel == INTEGER_TYPE || codel == ENUMERAL_TYPE) |
7152 | && sanitize_flags_p (flag: SANITIZE_FLOAT_CAST))) |
7153 | in_late_binary_op = true; |
7154 | tree ret = convert_and_check (expr_loc != UNKNOWN_LOCATION |
7155 | ? expr_loc : location, type, orig_rhs, |
7156 | errtype == ic_init_const); |
7157 | in_late_binary_op = save; |
7158 | return ret; |
7159 | } |
7160 | |
7161 | /* Aggregates in different TUs might need conversion. */ |
7162 | if ((codel == RECORD_TYPE || codel == UNION_TYPE) |
7163 | && codel == coder |
7164 | && comptypes (type1: type, type2: rhstype)) |
7165 | return convert_and_check (expr_loc != UNKNOWN_LOCATION |
7166 | ? expr_loc : location, type, rhs); |
7167 | |
7168 | /* Conversion to a transparent union or record from its member types. |
7169 | This applies only to function arguments. */ |
7170 | if (((codel == UNION_TYPE || codel == RECORD_TYPE) |
7171 | && TYPE_TRANSPARENT_AGGR (type)) |
7172 | && errtype == ic_argpass) |
7173 | { |
7174 | tree memb, marginal_memb = NULL_TREE; |
7175 | |
7176 | for (memb = TYPE_FIELDS (type); memb ; memb = DECL_CHAIN (memb)) |
7177 | { |
7178 | tree memb_type = TREE_TYPE (memb); |
7179 | |
7180 | if (comptypes (TYPE_MAIN_VARIANT (memb_type), |
7181 | TYPE_MAIN_VARIANT (rhstype))) |
7182 | break; |
7183 | |
7184 | if (TREE_CODE (memb_type) != POINTER_TYPE) |
7185 | continue; |
7186 | |
7187 | if (coder == POINTER_TYPE) |
7188 | { |
7189 | tree ttl = TREE_TYPE (memb_type); |
7190 | tree ttr = TREE_TYPE (rhstype); |
7191 | |
7192 | /* Any non-function converts to a [const][volatile] void * |
7193 | and vice versa; otherwise, targets must be the same. |
7194 | Meanwhile, the lhs target must have all the qualifiers of |
7195 | the rhs. */ |
7196 | if ((VOID_TYPE_P (ttl) && !TYPE_ATOMIC (ttl)) |
7197 | || (VOID_TYPE_P (ttr) && !TYPE_ATOMIC (ttr)) |
7198 | || comp_target_types (location, ttl: memb_type, ttr: rhstype)) |
7199 | { |
7200 | int lquals = TYPE_QUALS (ttl) & ~TYPE_QUAL_ATOMIC; |
7201 | int rquals = TYPE_QUALS (ttr) & ~TYPE_QUAL_ATOMIC; |
7202 | /* If this type won't generate any warnings, use it. */ |
7203 | if (lquals == rquals |
7204 | || ((TREE_CODE (ttr) == FUNCTION_TYPE |
7205 | && TREE_CODE (ttl) == FUNCTION_TYPE) |
7206 | ? ((lquals | rquals) == rquals) |
7207 | : ((lquals | rquals) == lquals))) |
7208 | break; |
7209 | |
7210 | /* Keep looking for a better type, but remember this one. */ |
7211 | if (!marginal_memb) |
7212 | marginal_memb = memb; |
7213 | } |
7214 | } |
7215 | |
7216 | /* Can convert integer zero to any pointer type. */ |
7217 | if (null_pointer_constant) |
7218 | { |
7219 | rhs = null_pointer_node; |
7220 | break; |
7221 | } |
7222 | } |
7223 | |
7224 | if (memb || marginal_memb) |
7225 | { |
7226 | if (!memb) |
7227 | { |
7228 | /* We have only a marginally acceptable member type; |
7229 | it needs a warning. */ |
7230 | tree ttl = TREE_TYPE (TREE_TYPE (marginal_memb)); |
7231 | tree ttr = TREE_TYPE (rhstype); |
7232 | |
7233 | /* Const and volatile mean something different for function |
7234 | types, so the usual warnings are not appropriate. */ |
7235 | if (TREE_CODE (ttr) == FUNCTION_TYPE |
7236 | && TREE_CODE (ttl) == FUNCTION_TYPE) |
7237 | { |
7238 | /* Because const and volatile on functions are |
7239 | restrictions that say the function will not do |
7240 | certain things, it is okay to use a const or volatile |
7241 | function where an ordinary one is wanted, but not |
7242 | vice-versa. */ |
7243 | if (TYPE_QUALS_NO_ADDR_SPACE (ttl) |
7244 | & ~TYPE_QUALS_NO_ADDR_SPACE (ttr)) |
7245 | PEDWARN_FOR_QUALIFIERS (location, expr_loc, |
7246 | OPT_Wdiscarded_qualifiers, |
7247 | G_("passing argument %d of %qE " |
7248 | "makes %q#v qualified function " |
7249 | "pointer from unqualified" ), |
7250 | G_("assignment makes %q#v qualified " |
7251 | "function pointer from " |
7252 | "unqualified" ), |
7253 | G_("initialization makes %q#v qualified " |
7254 | "function pointer from " |
7255 | "unqualified" ), |
7256 | G_("return makes %q#v qualified function " |
7257 | "pointer from unqualified" ), |
7258 | TYPE_QUALS (ttl) & ~TYPE_QUALS (ttr)); |
7259 | } |
7260 | else if (TYPE_QUALS_NO_ADDR_SPACE (ttr) |
7261 | & ~TYPE_QUALS_NO_ADDR_SPACE (ttl)) |
7262 | PEDWARN_FOR_QUALIFIERS (location, expr_loc, |
7263 | OPT_Wdiscarded_qualifiers, |
7264 | G_("passing argument %d of %qE discards " |
7265 | "%qv qualifier from pointer target type" ), |
7266 | G_("assignment discards %qv qualifier " |
7267 | "from pointer target type" ), |
7268 | G_("initialization discards %qv qualifier " |
7269 | "from pointer target type" ), |
7270 | G_("return discards %qv qualifier from " |
7271 | "pointer target type" ), |
7272 | TYPE_QUALS (ttr) & ~TYPE_QUALS (ttl)); |
7273 | |
7274 | memb = marginal_memb; |
7275 | } |
7276 | |
7277 | if (!fundecl || !DECL_IN_SYSTEM_HEADER (fundecl)) |
7278 | pedwarn (location, OPT_Wpedantic, |
7279 | "ISO C prohibits argument conversion to union type" ); |
7280 | |
7281 | rhs = fold_convert_loc (location, TREE_TYPE (memb), rhs); |
7282 | return build_constructor_single (type, memb, rhs); |
7283 | } |
7284 | } |
7285 | |
7286 | /* Conversions among pointers */ |
7287 | else if ((codel == POINTER_TYPE || codel == REFERENCE_TYPE) |
7288 | && (coder == codel)) |
7289 | { |
7290 | /* If RHS refers to a built-in declared without a prototype |
7291 | BLTIN is the declaration of the built-in with a prototype |
7292 | and RHSTYPE is set to the actual type of the built-in. */ |
7293 | tree bltin; |
7294 | rhstype = type_or_builtin_type (expr: rhs, bltin: &bltin); |
7295 | |
7296 | tree ttl = TREE_TYPE (type); |
7297 | tree ttr = TREE_TYPE (rhstype); |
7298 | tree mvl = ttl; |
7299 | tree mvr = ttr; |
7300 | bool is_opaque_pointer; |
7301 | bool target_cmp = false; /* Cache comp_target_types () result. */ |
7302 | addr_space_t asl; |
7303 | addr_space_t asr; |
7304 | |
7305 | if (TREE_CODE (mvl) != ARRAY_TYPE) |
7306 | mvl = (TYPE_ATOMIC (mvl) |
7307 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mvl), |
7308 | TYPE_QUAL_ATOMIC) |
7309 | : TYPE_MAIN_VARIANT (mvl)); |
7310 | if (TREE_CODE (mvr) != ARRAY_TYPE) |
7311 | mvr = (TYPE_ATOMIC (mvr) |
7312 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (mvr), |
7313 | TYPE_QUAL_ATOMIC) |
7314 | : TYPE_MAIN_VARIANT (mvr)); |
7315 | /* Opaque pointers are treated like void pointers. */ |
7316 | is_opaque_pointer = vector_targets_convertible_p (t1: ttl, t2: ttr); |
7317 | |
7318 | /* The Plan 9 compiler permits a pointer to a struct to be |
7319 | automatically converted into a pointer to an anonymous field |
7320 | within the struct. */ |
7321 | if (flag_plan9_extensions |
7322 | && RECORD_OR_UNION_TYPE_P (mvl) |
7323 | && RECORD_OR_UNION_TYPE_P (mvr) |
7324 | && mvl != mvr) |
7325 | { |
7326 | tree new_rhs = convert_to_anonymous_field (location, type, rhs); |
7327 | if (new_rhs != NULL_TREE) |
7328 | { |
7329 | rhs = new_rhs; |
7330 | rhstype = TREE_TYPE (rhs); |
7331 | coder = TREE_CODE (rhstype); |
7332 | ttr = TREE_TYPE (rhstype); |
7333 | mvr = TYPE_MAIN_VARIANT (ttr); |
7334 | } |
7335 | } |
7336 | |
7337 | /* C++ does not allow the implicit conversion void* -> T*. However, |
7338 | for the purpose of reducing the number of false positives, we |
7339 | tolerate the special case of |
7340 | |
7341 | int *p = NULL; |
7342 | |
7343 | where NULL is typically defined in C to be '(void *) 0'. */ |
7344 | if (VOID_TYPE_P (ttr) && rhs != null_pointer_node && !VOID_TYPE_P (ttl)) |
7345 | warning_at (errtype == ic_argpass ? expr_loc : location, |
7346 | OPT_Wc___compat, |
7347 | "request for implicit conversion " |
7348 | "from %qT to %qT not permitted in C++" , rhstype, type); |
7349 | |
7350 | /* Warn of new allocations that are not big enough for the target |
7351 | type. */ |
7352 | tree fndecl; |
7353 | if (warn_alloc_size |
7354 | && TREE_CODE (rhs) == CALL_EXPR |
7355 | && (fndecl = get_callee_fndecl (rhs)) != NULL_TREE |
7356 | && DECL_IS_MALLOC (fndecl)) |
7357 | { |
7358 | tree fntype = TREE_TYPE (fndecl); |
7359 | tree fntypeattrs = TYPE_ATTRIBUTES (fntype); |
7360 | tree alloc_size = lookup_attribute (attr_name: "alloc_size" , list: fntypeattrs); |
7361 | if (alloc_size) |
7362 | { |
7363 | tree args = TREE_VALUE (alloc_size); |
7364 | int idx = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1; |
7365 | /* For calloc only use the second argument. */ |
7366 | if (TREE_CHAIN (args)) |
7367 | idx = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1; |
7368 | tree arg = CALL_EXPR_ARG (rhs, idx); |
7369 | if (TREE_CODE (arg) == INTEGER_CST |
7370 | && !VOID_TYPE_P (ttl) && TYPE_SIZE_UNIT (ttl) |
7371 | && INTEGER_CST == TREE_CODE (TYPE_SIZE_UNIT (ttl)) |
7372 | && tree_int_cst_lt (t1: arg, TYPE_SIZE_UNIT (ttl))) |
7373 | warning_at (location, OPT_Walloc_size, "allocation of " |
7374 | "insufficient size %qE for type %qT with " |
7375 | "size %qE" , arg, ttl, TYPE_SIZE_UNIT (ttl)); |
7376 | } |
7377 | } |
7378 | |
7379 | /* See if the pointers point to incompatible address spaces. */ |
7380 | asl = TYPE_ADDR_SPACE (ttl); |
7381 | asr = TYPE_ADDR_SPACE (ttr); |
7382 | if (!null_pointer_constant_p (expr: rhs) |
7383 | && asr != asl && !targetm.addr_space.subset_p (asr, asl)) |
7384 | { |
7385 | auto_diagnostic_group d; |
7386 | bool diagnosed = true; |
7387 | switch (errtype) |
7388 | { |
7389 | case ic_argpass: |
7390 | { |
7391 | const char msg[] = G_("passing argument %d of %qE from " |
7392 | "pointer to non-enclosed address space" ); |
7393 | if (warnopt) |
7394 | diagnosed |
7395 | = warning_at (expr_loc, warnopt, msg, parmnum, rname); |
7396 | else |
7397 | error_at (expr_loc, msg, parmnum, rname); |
7398 | break; |
7399 | } |
7400 | case ic_assign: |
7401 | { |
7402 | const char msg[] = G_("assignment from pointer to " |
7403 | "non-enclosed address space" ); |
7404 | if (warnopt) |
7405 | diagnosed = warning_at (location, warnopt, msg); |
7406 | else |
7407 | error_at (location, msg); |
7408 | break; |
7409 | } |
7410 | case ic_init: |
7411 | case ic_init_const: |
7412 | { |
7413 | const char msg[] = G_("initialization from pointer to " |
7414 | "non-enclosed address space" ); |
7415 | if (warnopt) |
7416 | diagnosed = warning_at (location, warnopt, msg); |
7417 | else |
7418 | error_at (location, msg); |
7419 | break; |
7420 | } |
7421 | case ic_return: |
7422 | { |
7423 | const char msg[] = G_("return from pointer to " |
7424 | "non-enclosed address space" ); |
7425 | if (warnopt) |
7426 | diagnosed = warning_at (location, warnopt, msg); |
7427 | else |
7428 | error_at (location, msg); |
7429 | break; |
7430 | } |
7431 | default: |
7432 | gcc_unreachable (); |
7433 | } |
7434 | if (diagnosed) |
7435 | { |
7436 | if (errtype == ic_argpass) |
7437 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, actual_type: rhstype); |
7438 | else |
7439 | inform (location, "expected %qT but pointer is of type %qT" , |
7440 | type, rhstype); |
7441 | } |
7442 | return error_mark_node; |
7443 | } |
7444 | |
7445 | /* Check if the right-hand side has a format attribute but the |
7446 | left-hand side doesn't. */ |
7447 | if (warn_suggest_attribute_format |
7448 | && check_missing_format_attribute (type, rhstype)) |
7449 | { |
7450 | switch (errtype) |
7451 | { |
7452 | case ic_argpass: |
7453 | warning_at (expr_loc, OPT_Wsuggest_attribute_format, |
7454 | "argument %d of %qE might be " |
7455 | "a candidate for a format attribute" , |
7456 | parmnum, rname); |
7457 | break; |
7458 | case ic_assign: |
7459 | warning_at (location, OPT_Wsuggest_attribute_format, |
7460 | "assignment left-hand side might be " |
7461 | "a candidate for a format attribute" ); |
7462 | break; |
7463 | case ic_init: |
7464 | case ic_init_const: |
7465 | warning_at (location, OPT_Wsuggest_attribute_format, |
7466 | "initialization left-hand side might be " |
7467 | "a candidate for a format attribute" ); |
7468 | break; |
7469 | case ic_return: |
7470 | warning_at (location, OPT_Wsuggest_attribute_format, |
7471 | "return type might be " |
7472 | "a candidate for a format attribute" ); |
7473 | break; |
7474 | default: |
7475 | gcc_unreachable (); |
7476 | } |
7477 | } |
7478 | |
7479 | /* See if the pointers point to incompatible scalar storage orders. */ |
7480 | if (warn_scalar_storage_order |
7481 | && !null_pointer_constant_p (expr: rhs) |
7482 | && (AGGREGATE_TYPE_P (ttl) && TYPE_REVERSE_STORAGE_ORDER (ttl)) |
7483 | != (AGGREGATE_TYPE_P (ttr) && TYPE_REVERSE_STORAGE_ORDER (ttr))) |
7484 | { |
7485 | tree t; |
7486 | |
7487 | switch (errtype) |
7488 | { |
7489 | case ic_argpass: |
7490 | /* Do not warn for built-in functions, for example memcpy, since we |
7491 | control how they behave and they can be useful in this area. */ |
7492 | if (TREE_CODE (rname) != FUNCTION_DECL |
7493 | || !fndecl_built_in_p (node: rname)) |
7494 | warning_at (location, OPT_Wscalar_storage_order, |
7495 | "passing argument %d of %qE from incompatible " |
7496 | "scalar storage order" , parmnum, rname); |
7497 | break; |
7498 | case ic_assign: |
7499 | /* Do not warn if the RHS is a call to a function that returns a |
7500 | pointer that is not an alias. */ |
7501 | if (TREE_CODE (rhs) != CALL_EXPR |
7502 | || (t = get_callee_fndecl (rhs)) == NULL_TREE |
7503 | || !DECL_IS_MALLOC (t)) |
7504 | warning_at (location, OPT_Wscalar_storage_order, |
7505 | "assignment to %qT from pointer type %qT with " |
7506 | "incompatible scalar storage order" , type, rhstype); |
7507 | break; |
7508 | case ic_init: |
7509 | case ic_init_const: |
7510 | /* Likewise. */ |
7511 | if (TREE_CODE (rhs) != CALL_EXPR |
7512 | || (t = get_callee_fndecl (rhs)) == NULL_TREE |
7513 | || !DECL_IS_MALLOC (t)) |
7514 | warning_at (location, OPT_Wscalar_storage_order, |
7515 | "initialization of %qT from pointer type %qT with " |
7516 | "incompatible scalar storage order" , type, rhstype); |
7517 | break; |
7518 | case ic_return: |
7519 | warning_at (location, OPT_Wscalar_storage_order, |
7520 | "returning %qT from pointer type with incompatible " |
7521 | "scalar storage order %qT" , rhstype, type); |
7522 | break; |
7523 | default: |
7524 | gcc_unreachable (); |
7525 | } |
7526 | } |
7527 | |
7528 | /* Any non-function converts to a [const][volatile] void * |
7529 | and vice versa; otherwise, targets must be the same. |
7530 | Meanwhile, the lhs target must have all the qualifiers of the rhs. */ |
7531 | if ((VOID_TYPE_P (ttl) && !TYPE_ATOMIC (ttl)) |
7532 | || (VOID_TYPE_P (ttr) && !TYPE_ATOMIC (ttr)) |
7533 | || (target_cmp = comp_target_types (location, ttl: type, ttr: rhstype)) |
7534 | || is_opaque_pointer |
7535 | || ((c_common_unsigned_type (mvl) |
7536 | == c_common_unsigned_type (mvr)) |
7537 | && (c_common_signed_type (mvl) |
7538 | == c_common_signed_type (mvr)) |
7539 | && TYPE_ATOMIC (mvl) == TYPE_ATOMIC (mvr))) |
7540 | { |
7541 | /* Warn about loss of qualifers from pointers to arrays with |
7542 | qualifiers on the element type. */ |
7543 | if (TREE_CODE (ttr) == ARRAY_TYPE) |
7544 | { |
7545 | ttr = strip_array_types (type: ttr); |
7546 | ttl = strip_array_types (type: ttl); |
7547 | |
7548 | if (TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (ttr) |
7549 | & ~TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (ttl)) |
7550 | WARNING_FOR_QUALIFIERS (flag_isoc23, |
7551 | location, expr_loc, |
7552 | OPT_Wdiscarded_array_qualifiers, |
7553 | G_("passing argument %d of %qE discards " |
7554 | "%qv qualifier from pointer target type" ), |
7555 | G_("assignment discards %qv qualifier " |
7556 | "from pointer target type" ), |
7557 | G_("initialization discards %qv qualifier " |
7558 | "from pointer target type" ), |
7559 | G_("return discards %qv qualifier from " |
7560 | "pointer target type" ), |
7561 | TYPE_QUALS (ttr) & ~TYPE_QUALS (ttl)); |
7562 | } |
7563 | else if (pedantic |
7564 | && ((VOID_TYPE_P (ttl) && TREE_CODE (ttr) == FUNCTION_TYPE) |
7565 | || |
7566 | (VOID_TYPE_P (ttr) |
7567 | && !null_pointer_constant |
7568 | && TREE_CODE (ttl) == FUNCTION_TYPE))) |
7569 | PEDWARN_FOR_ASSIGNMENT (location, expr_loc, OPT_Wpedantic, |
7570 | G_("ISO C forbids passing argument %d of " |
7571 | "%qE between function pointer " |
7572 | "and %<void *%>" ), |
7573 | G_("ISO C forbids assignment between " |
7574 | "function pointer and %<void *%>" ), |
7575 | G_("ISO C forbids initialization between " |
7576 | "function pointer and %<void *%>" ), |
7577 | G_("ISO C forbids return between function " |
7578 | "pointer and %<void *%>" )); |
7579 | /* Const and volatile mean something different for function types, |
7580 | so the usual warnings are not appropriate. */ |
7581 | else if (TREE_CODE (ttr) != FUNCTION_TYPE |
7582 | && TREE_CODE (ttl) != FUNCTION_TYPE) |
7583 | { |
7584 | /* Assignments between atomic and non-atomic objects are OK. */ |
7585 | bool warn_quals_ped = TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (ttr) |
7586 | & ~TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (ttl); |
7587 | bool warn_quals = TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (ttr) |
7588 | & ~TYPE_QUALS_NO_ADDR_SPACE_NO_ATOMIC (strip_array_types (ttl)); |
7589 | |
7590 | /* Don't warn about loss of qualifier for conversions from |
7591 | qualified void* to pointers to arrays with corresponding |
7592 | qualifier on the element type (except for pedantic before C23). */ |
7593 | if (warn_quals || (warn_quals_ped && pedantic && !flag_isoc23)) |
7594 | PEDWARN_FOR_QUALIFIERS (location, expr_loc, |
7595 | OPT_Wdiscarded_qualifiers, |
7596 | G_("passing argument %d of %qE discards " |
7597 | "%qv qualifier from pointer target type" ), |
7598 | G_("assignment discards %qv qualifier " |
7599 | "from pointer target type" ), |
7600 | G_("initialization discards %qv qualifier " |
7601 | "from pointer target type" ), |
7602 | G_("return discards %qv qualifier from " |
7603 | "pointer target type" ), |
7604 | TYPE_QUALS (ttr) & ~TYPE_QUALS (ttl)); |
7605 | else if (warn_quals_ped) |
7606 | pedwarn_c11 (location, opt: OPT_Wc11_c23_compat, |
7607 | "array with qualifier on the element is not qualified before C23" ); |
7608 | |
7609 | /* If this is not a case of ignoring a mismatch in signedness, |
7610 | no warning. */ |
7611 | else if (VOID_TYPE_P (ttl) || VOID_TYPE_P (ttr) |
7612 | || target_cmp) |
7613 | ; |
7614 | /* If there is a mismatch, do warn. */ |
7615 | else if (warn_pointer_sign) |
7616 | switch (errtype) |
7617 | { |
7618 | case ic_argpass: |
7619 | { |
7620 | auto_diagnostic_group d; |
7621 | range_label_for_type_mismatch rhs_label (rhstype, type); |
7622 | gcc_rich_location richloc (expr_loc, &rhs_label); |
7623 | if (pedwarn (&richloc, OPT_Wpointer_sign, |
7624 | "pointer targets in passing argument %d of " |
7625 | "%qE differ in signedness" , parmnum, rname)) |
7626 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, |
7627 | actual_type: rhstype); |
7628 | } |
7629 | break; |
7630 | case ic_assign: |
7631 | pedwarn (location, OPT_Wpointer_sign, |
7632 | "pointer targets in assignment from %qT to %qT " |
7633 | "differ in signedness" , rhstype, type); |
7634 | break; |
7635 | case ic_init: |
7636 | case ic_init_const: |
7637 | pedwarn_init (loc: location, opt: OPT_Wpointer_sign, |
7638 | gmsgid: "pointer targets in initialization of %qT " |
7639 | "from %qT differ in signedness" , type, |
7640 | rhstype); |
7641 | break; |
7642 | case ic_return: |
7643 | pedwarn (location, OPT_Wpointer_sign, "pointer targets in " |
7644 | "returning %qT from a function with return type " |
7645 | "%qT differ in signedness" , rhstype, type); |
7646 | break; |
7647 | default: |
7648 | gcc_unreachable (); |
7649 | } |
7650 | } |
7651 | else if (TREE_CODE (ttl) == FUNCTION_TYPE |
7652 | && TREE_CODE (ttr) == FUNCTION_TYPE) |
7653 | { |
7654 | /* Because const and volatile on functions are restrictions |
7655 | that say the function will not do certain things, |
7656 | it is okay to use a const or volatile function |
7657 | where an ordinary one is wanted, but not vice-versa. */ |
7658 | if (TYPE_QUALS_NO_ADDR_SPACE (ttl) |
7659 | & ~TYPE_QUALS_NO_ADDR_SPACE (ttr)) |
7660 | PEDWARN_FOR_QUALIFIERS (location, expr_loc, |
7661 | OPT_Wdiscarded_qualifiers, |
7662 | G_("passing argument %d of %qE makes " |
7663 | "%q#v qualified function pointer " |
7664 | "from unqualified" ), |
7665 | G_("assignment makes %q#v qualified function " |
7666 | "pointer from unqualified" ), |
7667 | G_("initialization makes %q#v qualified " |
7668 | "function pointer from unqualified" ), |
7669 | G_("return makes %q#v qualified function " |
7670 | "pointer from unqualified" ), |
7671 | TYPE_QUALS (ttl) & ~TYPE_QUALS (ttr)); |
7672 | } |
7673 | } |
7674 | /* Avoid warning about the volatile ObjC EH puts on decls. */ |
7675 | else if (!objc_ok) |
7676 | { |
7677 | switch (errtype) |
7678 | { |
7679 | case ic_argpass: |
7680 | { |
7681 | auto_diagnostic_group d; |
7682 | range_label_for_type_mismatch rhs_label (rhstype, type); |
7683 | gcc_rich_location richloc (expr_loc, &rhs_label); |
7684 | if (pedwarn (&richloc, OPT_Wincompatible_pointer_types, |
7685 | "passing argument %d of %qE from incompatible " |
7686 | "pointer type" , parmnum, rname)) |
7687 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, actual_type: rhstype); |
7688 | } |
7689 | break; |
7690 | case ic_assign: |
7691 | if (bltin) |
7692 | pedwarn (location, OPT_Wincompatible_pointer_types, |
7693 | "assignment to %qT from pointer to " |
7694 | "%qD with incompatible type %qT" , |
7695 | type, bltin, rhstype); |
7696 | else |
7697 | pedwarn (location, OPT_Wincompatible_pointer_types, |
7698 | "assignment to %qT from incompatible pointer type %qT" , |
7699 | type, rhstype); |
7700 | break; |
7701 | case ic_init: |
7702 | case ic_init_const: |
7703 | if (bltin) |
7704 | pedwarn_init (loc: location, opt: OPT_Wincompatible_pointer_types, |
7705 | gmsgid: "initialization of %qT from pointer to " |
7706 | "%qD with incompatible type %qT" , |
7707 | type, bltin, rhstype); |
7708 | else |
7709 | pedwarn_init (loc: location, opt: OPT_Wincompatible_pointer_types, |
7710 | gmsgid: "initialization of %qT from incompatible " |
7711 | "pointer type %qT" , |
7712 | type, rhstype); |
7713 | break; |
7714 | case ic_return: |
7715 | if (bltin) |
7716 | pedwarn (location, OPT_Wincompatible_pointer_types, |
7717 | "returning pointer to %qD of type %qT from " |
7718 | "a function with incompatible type %qT" , |
7719 | bltin, rhstype, type); |
7720 | else |
7721 | pedwarn (location, OPT_Wincompatible_pointer_types, |
7722 | "returning %qT from a function with incompatible " |
7723 | "return type %qT" , rhstype, type); |
7724 | break; |
7725 | default: |
7726 | gcc_unreachable (); |
7727 | } |
7728 | } |
7729 | |
7730 | /* If RHS isn't an address, check pointer or array of packed |
7731 | struct or union. */ |
7732 | warn_for_address_or_pointer_of_packed_member (type, orig_rhs); |
7733 | |
7734 | return convert (type, rhs); |
7735 | } |
7736 | else if (codel == POINTER_TYPE && coder == ARRAY_TYPE) |
7737 | { |
7738 | /* ??? This should not be an error when inlining calls to |
7739 | unprototyped functions. */ |
7740 | const char msg[] = "invalid use of non-lvalue array" ; |
7741 | if (warnopt) |
7742 | warning_at (location, warnopt, msg); |
7743 | else |
7744 | error_at (location, msg); |
7745 | return error_mark_node; |
7746 | } |
7747 | else if (codel == POINTER_TYPE |
7748 | && (coder == INTEGER_TYPE |
7749 | || coder == NULLPTR_TYPE |
7750 | || coder == BITINT_TYPE)) |
7751 | { |
7752 | /* An explicit constant 0 or type nullptr_t can convert to a pointer, |
7753 | or one that results from arithmetic, even including a cast to |
7754 | integer type. */ |
7755 | if (!null_pointer_constant && coder != NULLPTR_TYPE) |
7756 | switch (errtype) |
7757 | { |
7758 | case ic_argpass: |
7759 | { |
7760 | auto_diagnostic_group d; |
7761 | range_label_for_type_mismatch rhs_label (rhstype, type); |
7762 | gcc_rich_location richloc (expr_loc, &rhs_label); |
7763 | if (pedwarn (&richloc, OPT_Wint_conversion, |
7764 | "passing argument %d of %qE makes pointer from " |
7765 | "integer without a cast" , parmnum, rname)) |
7766 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, actual_type: rhstype); |
7767 | } |
7768 | break; |
7769 | case ic_assign: |
7770 | pedwarn (location, OPT_Wint_conversion, |
7771 | "assignment to %qT from %qT makes pointer from integer " |
7772 | "without a cast" , type, rhstype); |
7773 | break; |
7774 | case ic_init: |
7775 | case ic_init_const: |
7776 | pedwarn_init (loc: location, opt: OPT_Wint_conversion, |
7777 | gmsgid: "initialization of %qT from %qT makes pointer from " |
7778 | "integer without a cast" , type, rhstype); |
7779 | break; |
7780 | case ic_return: |
7781 | pedwarn (location, OPT_Wint_conversion, "returning %qT from a " |
7782 | "function with return type %qT makes pointer from " |
7783 | "integer without a cast" , rhstype, type); |
7784 | break; |
7785 | default: |
7786 | gcc_unreachable (); |
7787 | } |
7788 | |
7789 | return convert (type, rhs); |
7790 | } |
7791 | else if ((codel == INTEGER_TYPE || codel == BITINT_TYPE) |
7792 | && coder == POINTER_TYPE) |
7793 | { |
7794 | switch (errtype) |
7795 | { |
7796 | case ic_argpass: |
7797 | { |
7798 | auto_diagnostic_group d; |
7799 | range_label_for_type_mismatch rhs_label (rhstype, type); |
7800 | gcc_rich_location richloc (expr_loc, &rhs_label); |
7801 | if (pedwarn (&richloc, OPT_Wint_conversion, |
7802 | "passing argument %d of %qE makes integer from " |
7803 | "pointer without a cast" , parmnum, rname)) |
7804 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, actual_type: rhstype); |
7805 | } |
7806 | break; |
7807 | case ic_assign: |
7808 | pedwarn (location, OPT_Wint_conversion, |
7809 | "assignment to %qT from %qT makes integer from pointer " |
7810 | "without a cast" , type, rhstype); |
7811 | break; |
7812 | case ic_init: |
7813 | case ic_init_const: |
7814 | pedwarn_init (loc: location, opt: OPT_Wint_conversion, |
7815 | gmsgid: "initialization of %qT from %qT makes integer from " |
7816 | "pointer without a cast" , type, rhstype); |
7817 | break; |
7818 | case ic_return: |
7819 | pedwarn (location, OPT_Wint_conversion, "returning %qT from a " |
7820 | "function with return type %qT makes integer from " |
7821 | "pointer without a cast" , rhstype, type); |
7822 | break; |
7823 | default: |
7824 | gcc_unreachable (); |
7825 | } |
7826 | |
7827 | return convert (type, rhs); |
7828 | } |
7829 | else if (C_BOOLEAN_TYPE_P (type) |
7830 | /* The type nullptr_t may be converted to bool. The |
7831 | result is false. */ |
7832 | && (coder == POINTER_TYPE || coder == NULLPTR_TYPE)) |
7833 | { |
7834 | tree ret; |
7835 | bool save = in_late_binary_op; |
7836 | in_late_binary_op = true; |
7837 | ret = convert (type, rhs); |
7838 | in_late_binary_op = save; |
7839 | return ret; |
7840 | } |
7841 | else if (codel == NULLPTR_TYPE && null_pointer_constant) |
7842 | return convert (type, rhs); |
7843 | |
7844 | switch (errtype) |
7845 | { |
7846 | case ic_argpass: |
7847 | { |
7848 | auto_diagnostic_group d; |
7849 | range_label_for_type_mismatch rhs_label (rhstype, type); |
7850 | gcc_rich_location richloc (expr_loc, &rhs_label); |
7851 | const char msg[] = G_("incompatible type for argument %d of %qE" ); |
7852 | if (warnopt) |
7853 | warning_at (expr_loc, warnopt, msg, parmnum, rname); |
7854 | else |
7855 | error_at (&richloc, msg, parmnum, rname); |
7856 | inform_for_arg (fundecl, ploc: expr_loc, parmnum, expected_type: type, actual_type: rhstype); |
7857 | } |
7858 | break; |
7859 | case ic_assign: |
7860 | { |
7861 | const char msg[] |
7862 | = G_("incompatible types when assigning to type %qT from type %qT" ); |
7863 | if (warnopt) |
7864 | warning_at (expr_loc, 0, msg, type, rhstype); |
7865 | else |
7866 | error_at (expr_loc, msg, type, rhstype); |
7867 | break; |
7868 | } |
7869 | case ic_init: |
7870 | case ic_init_const: |
7871 | { |
7872 | const char msg[] |
7873 | = G_("incompatible types when initializing type %qT using type %qT" ); |
7874 | if (warnopt) |
7875 | warning_at (location, 0, msg, type, rhstype); |
7876 | else |
7877 | error_at (location, msg, type, rhstype); |
7878 | break; |
7879 | } |
7880 | case ic_return: |
7881 | { |
7882 | const char msg[] |
7883 | = G_("incompatible types when returning type %qT but %qT was expected" ); |
7884 | if (warnopt) |
7885 | warning_at (location, 0, msg, rhstype, type); |
7886 | else |
7887 | error_at (location, msg, rhstype, type); |
7888 | break; |
7889 | } |
7890 | default: |
7891 | gcc_unreachable (); |
7892 | } |
7893 | |
7894 | return error_mark_node; |
7895 | } |
7896 | |
7897 | /* If VALUE is a compound expr all of whose expressions are constant, then |
7898 | return its value. Otherwise, return error_mark_node. |
7899 | |
7900 | This is for handling COMPOUND_EXPRs as initializer elements |
7901 | which is allowed with a warning when -pedantic is specified. */ |
7902 | |
7903 | static tree |
7904 | valid_compound_expr_initializer (tree value, tree endtype) |
7905 | { |
7906 | if (TREE_CODE (value) == COMPOUND_EXPR) |
7907 | { |
7908 | if (valid_compound_expr_initializer (TREE_OPERAND (value, 0), endtype) |
7909 | == error_mark_node) |
7910 | return error_mark_node; |
7911 | return valid_compound_expr_initializer (TREE_OPERAND (value, 1), |
7912 | endtype); |
7913 | } |
7914 | else if (!initializer_constant_valid_p (value, endtype)) |
7915 | return error_mark_node; |
7916 | else |
7917 | return value; |
7918 | } |
7919 | |
7920 | /* Perform appropriate conversions on the initial value of a variable, |
7921 | store it in the declaration DECL, |
7922 | and print any error messages that are appropriate. |
7923 | If ORIGTYPE is not NULL_TREE, it is the original type of INIT. |
7924 | If the init is invalid, store an ERROR_MARK. |
7925 | |
7926 | INIT_LOC is the location of the initial value. */ |
7927 | |
7928 | void |
7929 | store_init_value (location_t init_loc, tree decl, tree init, tree origtype) |
7930 | { |
7931 | tree value, type; |
7932 | bool npc = false; |
7933 | bool int_const_expr = false; |
7934 | bool arith_const_expr = false; |
7935 | |
7936 | /* If variable's type was invalidly declared, just ignore it. */ |
7937 | |
7938 | type = TREE_TYPE (decl); |
7939 | if (TREE_CODE (type) == ERROR_MARK) |
7940 | return; |
7941 | |
7942 | /* Digest the specified initializer into an expression. */ |
7943 | |
7944 | if (init) |
7945 | { |
7946 | npc = null_pointer_constant_p (expr: init); |
7947 | int_const_expr = (TREE_CODE (init) == INTEGER_CST |
7948 | && !TREE_OVERFLOW (init) |
7949 | && INTEGRAL_TYPE_P (TREE_TYPE (init))); |
7950 | /* Not fully determined before folding. */ |
7951 | arith_const_expr = true; |
7952 | } |
7953 | bool constexpr_p = (VAR_P (decl) |
7954 | && C_DECL_DECLARED_CONSTEXPR (decl)); |
7955 | value = digest_init (init_loc, type, init, origtype, npc, int_const_expr, |
7956 | arith_const_expr, true, |
7957 | TREE_STATIC (decl) || constexpr_p, constexpr_p); |
7958 | |
7959 | /* Store the expression if valid; else report error. */ |
7960 | |
7961 | if (!in_system_header_at (loc: input_location) |
7962 | && AGGREGATE_TYPE_P (TREE_TYPE (decl)) && !TREE_STATIC (decl)) |
7963 | warning (OPT_Wtraditional, "traditional C rejects automatic " |
7964 | "aggregate initialization" ); |
7965 | |
7966 | if (value != error_mark_node || TREE_CODE (decl) != FUNCTION_DECL) |
7967 | DECL_INITIAL (decl) = value; |
7968 | |
7969 | /* ANSI wants warnings about out-of-range constant initializers. */ |
7970 | STRIP_TYPE_NOPS (value); |
7971 | if (TREE_STATIC (decl)) |
7972 | constant_expression_warning (value); |
7973 | |
7974 | /* Check if we need to set array size from compound literal size. */ |
7975 | if (TREE_CODE (type) == ARRAY_TYPE |
7976 | && TYPE_DOMAIN (type) == NULL_TREE |
7977 | && value != error_mark_node) |
7978 | { |
7979 | tree inside_init = init; |
7980 | |
7981 | STRIP_TYPE_NOPS (inside_init); |
7982 | inside_init = fold (inside_init); |
7983 | |
7984 | if (TREE_CODE (inside_init) == COMPOUND_LITERAL_EXPR) |
7985 | { |
7986 | tree cldecl = COMPOUND_LITERAL_EXPR_DECL (inside_init); |
7987 | |
7988 | if (TYPE_DOMAIN (TREE_TYPE (cldecl))) |
7989 | { |
7990 | /* For int foo[] = (int [3]){1}; we need to set array size |
7991 | now since later on array initializer will be just the |
7992 | brace enclosed list of the compound literal. */ |
7993 | tree etype = strip_array_types (TREE_TYPE (decl)); |
7994 | type = build_distinct_type_copy (TYPE_MAIN_VARIANT (type)); |
7995 | TYPE_DOMAIN (type) = TYPE_DOMAIN (TREE_TYPE (cldecl)); |
7996 | layout_type (type); |
7997 | layout_decl (cldecl, 0); |
7998 | TREE_TYPE (decl) |
7999 | = c_build_qualified_type (type, TYPE_QUALS (etype)); |
8000 | } |
8001 | } |
8002 | } |
8003 | } |
8004 | |
8005 | /* Methods for storing and printing names for error messages. */ |
8006 | |
8007 | /* Implement a spelling stack that allows components of a name to be pushed |
8008 | and popped. Each element on the stack is this structure. */ |
8009 | |
8010 | struct spelling |
8011 | { |
8012 | int kind; |
8013 | union |
8014 | { |
8015 | unsigned HOST_WIDE_INT i; |
8016 | const char *s; |
8017 | } u; |
8018 | }; |
8019 | |
8020 | #define SPELLING_STRING 1 |
8021 | #define SPELLING_MEMBER 2 |
8022 | #define SPELLING_BOUNDS 3 |
8023 | |
8024 | static struct spelling *spelling; /* Next stack element (unused). */ |
8025 | static struct spelling *spelling_base; /* Spelling stack base. */ |
8026 | static int spelling_size; /* Size of the spelling stack. */ |
8027 | |
8028 | /* Macros to save and restore the spelling stack around push_... functions. |
8029 | Alternative to SAVE_SPELLING_STACK. */ |
8030 | |
8031 | #define SPELLING_DEPTH() (spelling - spelling_base) |
8032 | #define RESTORE_SPELLING_DEPTH(DEPTH) (spelling = spelling_base + (DEPTH)) |
8033 | |
8034 | /* Push an element on the spelling stack with type KIND and assign VALUE |
8035 | to MEMBER. */ |
8036 | |
8037 | #define PUSH_SPELLING(KIND, VALUE, MEMBER) \ |
8038 | { \ |
8039 | int depth = SPELLING_DEPTH (); \ |
8040 | \ |
8041 | if (depth >= spelling_size) \ |
8042 | { \ |
8043 | spelling_size += 10; \ |
8044 | spelling_base = XRESIZEVEC (struct spelling, spelling_base, \ |
8045 | spelling_size); \ |
8046 | RESTORE_SPELLING_DEPTH (depth); \ |
8047 | } \ |
8048 | \ |
8049 | spelling->kind = (KIND); \ |
8050 | spelling->MEMBER = (VALUE); \ |
8051 | spelling++; \ |
8052 | } |
8053 | |
8054 | /* Push STRING on the stack. Printed literally. */ |
8055 | |
8056 | static void |
8057 | push_string (const char *string) |
8058 | { |
8059 | PUSH_SPELLING (SPELLING_STRING, string, u.s); |
8060 | } |
8061 | |
8062 | /* Push a member name on the stack. Printed as '.' STRING. */ |
8063 | |
8064 | static void |
8065 | push_member_name (tree decl) |
8066 | { |
8067 | const char *const string |
8068 | = (DECL_NAME (decl) |
8069 | ? identifier_to_locale (IDENTIFIER_POINTER (DECL_NAME (decl))) |
8070 | : _("<anonymous>" )); |
8071 | PUSH_SPELLING (SPELLING_MEMBER, string, u.s); |
8072 | } |
8073 | |
8074 | /* Push an array bounds on the stack. Printed as [BOUNDS]. */ |
8075 | |
8076 | static void |
8077 | push_array_bounds (unsigned HOST_WIDE_INT bounds) |
8078 | { |
8079 | PUSH_SPELLING (SPELLING_BOUNDS, bounds, u.i); |
8080 | } |
8081 | |
8082 | /* Compute the maximum size in bytes of the printed spelling. */ |
8083 | |
8084 | static int |
8085 | spelling_length (void) |
8086 | { |
8087 | int size = 0; |
8088 | struct spelling *p; |
8089 | |
8090 | for (p = spelling_base; p < spelling; p++) |
8091 | { |
8092 | if (p->kind == SPELLING_BOUNDS) |
8093 | size += 25; |
8094 | else |
8095 | size += strlen (s: p->u.s) + 1; |
8096 | } |
8097 | |
8098 | return size; |
8099 | } |
8100 | |
8101 | /* Print the spelling to BUFFER and return it. */ |
8102 | |
8103 | static char * |
8104 | print_spelling (char *buffer) |
8105 | { |
8106 | char *d = buffer; |
8107 | struct spelling *p; |
8108 | |
8109 | for (p = spelling_base; p < spelling; p++) |
8110 | if (p->kind == SPELLING_BOUNDS) |
8111 | { |
8112 | sprintf (s: d, format: "[" HOST_WIDE_INT_PRINT_UNSIGNED "]" , p->u.i); |
8113 | d += strlen (s: d); |
8114 | } |
8115 | else |
8116 | { |
8117 | const char *s; |
8118 | if (p->kind == SPELLING_MEMBER) |
8119 | *d++ = '.'; |
8120 | for (s = p->u.s; (*d = *s++); d++) |
8121 | ; |
8122 | } |
8123 | *d++ = '\0'; |
8124 | return buffer; |
8125 | } |
8126 | |
8127 | /* Check whether INIT, a floating or integer constant, is |
8128 | representable in TYPE, a real floating type with the same radix or |
8129 | a decimal floating type initialized with a binary floating |
8130 | constant. Return true if OK, false if not. */ |
8131 | static bool |
8132 | constexpr_init_fits_real_type (tree type, tree init) |
8133 | { |
8134 | gcc_assert (SCALAR_FLOAT_TYPE_P (type)); |
8135 | gcc_assert (TREE_CODE (init) == INTEGER_CST || TREE_CODE (init) == REAL_CST); |
8136 | if (TREE_CODE (init) == REAL_CST |
8137 | && TYPE_MODE (TREE_TYPE (init)) == TYPE_MODE (type)) |
8138 | { |
8139 | /* Same mode, no conversion required except for the case of |
8140 | signaling NaNs if the types are incompatible (e.g. double and |
8141 | long double with the same mode). */ |
8142 | if (REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (init)) |
8143 | && !comptypes (TYPE_MAIN_VARIANT (type), |
8144 | TYPE_MAIN_VARIANT (TREE_TYPE (init)))) |
8145 | return false; |
8146 | return true; |
8147 | } |
8148 | if (TREE_CODE (init) == INTEGER_CST) |
8149 | { |
8150 | tree converted = build_real_from_int_cst (type, init); |
8151 | bool fail = false; |
8152 | wide_int w = real_to_integer (&TREE_REAL_CST (converted), &fail, |
8153 | TYPE_PRECISION (TREE_TYPE (init))); |
8154 | return !fail && wi::eq_p (x: w, y: wi::to_wide (t: init)); |
8155 | } |
8156 | if (REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (init))) |
8157 | return false; |
8158 | if ((REAL_VALUE_ISINF (TREE_REAL_CST (init)) |
8159 | && MODE_HAS_INFINITIES (TYPE_MODE (type))) |
8160 | || (REAL_VALUE_ISNAN (TREE_REAL_CST (init)) |
8161 | && MODE_HAS_NANS (TYPE_MODE (type)))) |
8162 | return true; |
8163 | if (DECIMAL_FLOAT_TYPE_P (type) |
8164 | && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (init))) |
8165 | { |
8166 | /* This is valid if the real number represented by the |
8167 | initializer can be exactly represented in the decimal |
8168 | type. Compare the values using MPFR. */ |
8169 | REAL_VALUE_TYPE t; |
8170 | real_convert (&t, TYPE_MODE (type), &TREE_REAL_CST (init)); |
8171 | mpfr_t bin_val, dec_val; |
8172 | mpfr_init2 (bin_val, REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (init)))->p); |
8173 | mpfr_init2 (dec_val, REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (init)))->p); |
8174 | mpfr_from_real (bin_val, &TREE_REAL_CST (init), MPFR_RNDN); |
8175 | char string[256]; |
8176 | real_to_decimal (string, &t, sizeof string, 0, 1); |
8177 | bool res = (mpfr_strtofr (dec_val, string, NULL, 10, MPFR_RNDN) == 0 |
8178 | && mpfr_equal_p (bin_val, dec_val)); |
8179 | mpfr_clear (bin_val); |
8180 | mpfr_clear (dec_val); |
8181 | return res; |
8182 | } |
8183 | /* exact_real_truncate is not quite right here, since it doesn't |
8184 | allow even an exact conversion to subnormal values. */ |
8185 | REAL_VALUE_TYPE t; |
8186 | real_convert (&t, TYPE_MODE (type), &TREE_REAL_CST (init)); |
8187 | return real_identical (&t, &TREE_REAL_CST (init)); |
8188 | } |
8189 | |
8190 | /* Check whether INIT (location LOC) is valid as a 'constexpr' |
8191 | initializer for type TYPE, and give an error if not. INIT has |
8192 | already been folded and verified to be constant. INT_CONST_EXPR |
8193 | and ARITH_CONST_EXPR say whether it is an integer constant |
8194 | expression or arithmetic constant expression, respectively. If |
8195 | TYPE is not a scalar type, this function does nothing. */ |
8196 | |
8197 | static void |
8198 | check_constexpr_init (location_t loc, tree type, tree init, |
8199 | bool int_const_expr, bool arith_const_expr) |
8200 | { |
8201 | if (POINTER_TYPE_P (type)) |
8202 | { |
8203 | /* The initializer must be null. */ |
8204 | if (TREE_CODE (init) != INTEGER_CST || !integer_zerop (init)) |
8205 | error_at (loc, "%<constexpr%> pointer initializer is not null" ); |
8206 | return; |
8207 | } |
8208 | if (INTEGRAL_TYPE_P (type)) |
8209 | { |
8210 | /* The initializer must be an integer constant expression, |
8211 | representable in the target type. */ |
8212 | if (!int_const_expr) |
8213 | error_at (loc, "%<constexpr%> integer initializer is not an " |
8214 | "integer constant expression" ); |
8215 | if (!int_fits_type_p (init, type)) |
8216 | error_at (loc, "%<constexpr%> initializer not representable in " |
8217 | "type of object" ); |
8218 | return; |
8219 | } |
8220 | /* We don't apply any extra checks to extension types such as vector |
8221 | or fixed-point types. */ |
8222 | if (TREE_CODE (type) != REAL_TYPE && TREE_CODE (type) != COMPLEX_TYPE) |
8223 | return; |
8224 | if (!arith_const_expr) |
8225 | { |
8226 | error_at (loc, "%<constexpr%> initializer is not an arithmetic " |
8227 | "constant expression" ); |
8228 | return; |
8229 | } |
8230 | /* We don't apply any extra checks to complex integers. */ |
8231 | if (TREE_CODE (type) == COMPLEX_TYPE |
8232 | && TREE_CODE (TREE_TYPE (type)) != REAL_TYPE) |
8233 | return; |
8234 | /* Following N3082, a real type cannot be initialized from a complex |
8235 | type and a binary type cannot be initialized from a decimal type |
8236 | (but initializing a decimal type from a binary type is OK). |
8237 | Signaling NaN initializers are OK only if the types are |
8238 | compatible (not just the same mode); all quiet NaN and infinity |
8239 | initializations are considered to preserve the value. */ |
8240 | if (TREE_CODE (TREE_TYPE (init)) == COMPLEX_TYPE |
8241 | && SCALAR_FLOAT_TYPE_P (type)) |
8242 | { |
8243 | error_at (loc, "%<constexpr%> initializer for a real type is of " |
8244 | "complex type" ); |
8245 | return; |
8246 | } |
8247 | if (SCALAR_FLOAT_TYPE_P (type) |
8248 | && SCALAR_FLOAT_TYPE_P (TREE_TYPE (init)) |
8249 | && DECIMAL_FLOAT_TYPE_P (TREE_TYPE (init)) |
8250 | && !DECIMAL_FLOAT_TYPE_P (type)) |
8251 | { |
8252 | error_at (loc, "%<constexpr%> initializer for a binary " |
8253 | "floating-point type is of decimal type" ); |
8254 | return; |
8255 | } |
8256 | bool fits; |
8257 | if (TREE_CODE (type) == COMPLEX_TYPE) |
8258 | { |
8259 | switch (TREE_CODE (init)) |
8260 | { |
8261 | case INTEGER_CST: |
8262 | case REAL_CST: |
8263 | fits = constexpr_init_fits_real_type (TREE_TYPE (type), init); |
8264 | break; |
8265 | case COMPLEX_CST: |
8266 | fits = (constexpr_init_fits_real_type (TREE_TYPE (type), |
8267 | TREE_REALPART (init)) |
8268 | && constexpr_init_fits_real_type (TREE_TYPE (type), |
8269 | TREE_IMAGPART (init))); |
8270 | break; |
8271 | default: |
8272 | gcc_unreachable (); |
8273 | } |
8274 | } |
8275 | else |
8276 | fits = constexpr_init_fits_real_type (type, init); |
8277 | if (!fits) |
8278 | error_at (loc, "%<constexpr%> initializer not representable in " |
8279 | "type of object" ); |
8280 | } |
8281 | |
8282 | /* Digest the parser output INIT as an initializer for type TYPE. |
8283 | Return a C expression of type TYPE to represent the initial value. |
8284 | |
8285 | If ORIGTYPE is not NULL_TREE, it is the original type of INIT. |
8286 | |
8287 | NULL_POINTER_CONSTANT is true if INIT is a null pointer constant, |
8288 | INT_CONST_EXPR is true if INIT is an integer constant expression, |
8289 | and ARITH_CONST_EXPR is true if INIT is, or might be, an arithmetic |
8290 | constant expression, false if it has already been determined in the |
8291 | caller that it is not (but folding may have made the value passed here |
8292 | indistinguishable from an arithmetic constant expression). |
8293 | |
8294 | If INIT is a string constant, STRICT_STRING is true if it is |
8295 | unparenthesized or we should not warn here for it being parenthesized. |
8296 | For other types of INIT, STRICT_STRING is not used. |
8297 | |
8298 | INIT_LOC is the location of the INIT. |
8299 | |
8300 | REQUIRE_CONSTANT requests an error if non-constant initializers or |
8301 | elements are seen. REQUIRE_CONSTEXPR means the stricter requirements |
8302 | on initializers for 'constexpr' objects apply. */ |
8303 | |
8304 | static tree |
8305 | digest_init (location_t init_loc, tree type, tree init, tree origtype, |
8306 | bool null_pointer_constant, bool int_const_expr, |
8307 | bool arith_const_expr, bool strict_string, |
8308 | bool require_constant, bool require_constexpr) |
8309 | { |
8310 | enum tree_code code = TREE_CODE (type); |
8311 | tree inside_init = init; |
8312 | tree semantic_type = NULL_TREE; |
8313 | bool maybe_const = true; |
8314 | |
8315 | if (type == error_mark_node |
8316 | || !init |
8317 | || error_operand_p (t: init)) |
8318 | return error_mark_node; |
8319 | |
8320 | STRIP_TYPE_NOPS (inside_init); |
8321 | |
8322 | if (!c_in_omp_for) |
8323 | { |
8324 | if (TREE_CODE (inside_init) == EXCESS_PRECISION_EXPR) |
8325 | { |
8326 | semantic_type = TREE_TYPE (inside_init); |
8327 | inside_init = TREE_OPERAND (inside_init, 0); |
8328 | } |
8329 | inside_init = c_fully_fold (inside_init, require_constant, &maybe_const); |
8330 | } |
8331 | /* TODO: this may not detect all cases of expressions folding to |
8332 | constants that are not arithmetic constant expressions. */ |
8333 | if (!maybe_const) |
8334 | arith_const_expr = false; |
8335 | else if (!INTEGRAL_TYPE_P (TREE_TYPE (inside_init)) |
8336 | && TREE_CODE (TREE_TYPE (inside_init)) != REAL_TYPE |
8337 | && TREE_CODE (TREE_TYPE (inside_init)) != COMPLEX_TYPE) |
8338 | arith_const_expr = false; |
8339 | else if (TREE_CODE (inside_init) != INTEGER_CST |
8340 | && TREE_CODE (inside_init) != REAL_CST |
8341 | && TREE_CODE (inside_init) != COMPLEX_CST) |
8342 | arith_const_expr = false; |
8343 | else if (TREE_OVERFLOW (inside_init)) |
8344 | arith_const_expr = false; |
8345 | |
8346 | /* Initialization of an array of chars from a string constant |
8347 | optionally enclosed in braces. */ |
8348 | |
8349 | if (code == ARRAY_TYPE && inside_init |
8350 | && TREE_CODE (inside_init) == STRING_CST) |
8351 | { |
8352 | tree typ1 |
8353 | = (TYPE_ATOMIC (TREE_TYPE (type)) |
8354 | ? c_build_qualified_type (TYPE_MAIN_VARIANT (TREE_TYPE (type)), |
8355 | TYPE_QUAL_ATOMIC) |
8356 | : TYPE_MAIN_VARIANT (TREE_TYPE (type))); |
8357 | /* Note that an array could be both an array of character type |
8358 | and an array of wchar_t if wchar_t is signed char or unsigned |
8359 | char. */ |
8360 | bool char_array = (typ1 == char_type_node |
8361 | || typ1 == signed_char_type_node |
8362 | || typ1 == unsigned_char_type_node); |
8363 | bool wchar_array = !!comptypes (type1: typ1, wchar_type_node); |
8364 | bool char16_array = !!comptypes (type1: typ1, char16_type_node); |
8365 | bool char32_array = !!comptypes (type1: typ1, char32_type_node); |
8366 | |
8367 | if (char_array || wchar_array || char16_array || char32_array) |
8368 | { |
8369 | struct c_expr expr; |
8370 | tree typ2 = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (inside_init))); |
8371 | bool incompat_string_cst = false; |
8372 | expr.value = inside_init; |
8373 | expr.original_code = (strict_string ? STRING_CST : ERROR_MARK); |
8374 | expr.original_type = NULL; |
8375 | expr.m_decimal = 0; |
8376 | maybe_warn_string_init (loc: init_loc, type, expr); |
8377 | |
8378 | if (TYPE_DOMAIN (type) && !TYPE_MAX_VALUE (TYPE_DOMAIN (type))) |
8379 | pedwarn_init (loc: init_loc, opt: OPT_Wpedantic, |
8380 | gmsgid: "initialization of a flexible array member" ); |
8381 | |
8382 | if (comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (inside_init)), |
8383 | TYPE_MAIN_VARIANT (type))) |
8384 | return inside_init; |
8385 | |
8386 | if (char_array) |
8387 | { |
8388 | if (typ2 != char_type_node && typ2 != char8_type_node) |
8389 | incompat_string_cst = true; |
8390 | } |
8391 | else if (!comptypes (type1: typ1, type2: typ2)) |
8392 | incompat_string_cst = true; |
8393 | |
8394 | if (incompat_string_cst) |
8395 | { |
8396 | error_init (loc: init_loc, gmsgid: "cannot initialize array of %qT from " |
8397 | "a string literal with type array of %qT" , |
8398 | typ1, typ2); |
8399 | return error_mark_node; |
8400 | } |
8401 | |
8402 | if (require_constexpr |
8403 | && TYPE_UNSIGNED (typ1) != TYPE_UNSIGNED (typ2)) |
8404 | { |
8405 | /* Check if all characters of the string can be |
8406 | represented in the type of the constexpr object being |
8407 | initialized. */ |
8408 | unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (inside_init); |
8409 | const unsigned char *p = |
8410 | (const unsigned char *) TREE_STRING_POINTER (inside_init); |
8411 | gcc_assert (CHAR_TYPE_SIZE == 8 && CHAR_BIT == 8); |
8412 | for (unsigned i = 0; i < len; i++) |
8413 | if (p[i] > 127) |
8414 | { |
8415 | error_init (loc: init_loc, gmsgid: "%<constexpr%> initializer not " |
8416 | "representable in type of object" ); |
8417 | break; |
8418 | } |
8419 | } |
8420 | |
8421 | if (TYPE_DOMAIN (type) != NULL_TREE |
8422 | && TYPE_SIZE (type) != NULL_TREE |
8423 | && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST) |
8424 | { |
8425 | unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (inside_init); |
8426 | unsigned unit = TYPE_PRECISION (typ1) / BITS_PER_UNIT; |
8427 | |
8428 | /* Subtract the size of a single (possibly wide) character |
8429 | because it's ok to ignore the terminating null char |
8430 | that is counted in the length of the constant. */ |
8431 | if (compare_tree_int (TYPE_SIZE_UNIT (type), len - unit) < 0) |
8432 | pedwarn_init (loc: init_loc, opt: 0, |
8433 | gmsgid: ("initializer-string for array of %qT " |
8434 | "is too long" ), typ1); |
8435 | else if (warn_cxx_compat |
8436 | && compare_tree_int (TYPE_SIZE_UNIT (type), len) < 0) |
8437 | warning_at (init_loc, OPT_Wc___compat, |
8438 | ("initializer-string for array of %qT " |
8439 | "is too long for C++" ), typ1); |
8440 | if (compare_tree_int (TYPE_SIZE_UNIT (type), len) < 0) |
8441 | { |
8442 | unsigned HOST_WIDE_INT size |
8443 | = tree_to_uhwi (TYPE_SIZE_UNIT (type)); |
8444 | const char *p = TREE_STRING_POINTER (inside_init); |
8445 | |
8446 | inside_init = build_string (size, p); |
8447 | } |
8448 | } |
8449 | |
8450 | TREE_TYPE (inside_init) = type; |
8451 | return inside_init; |
8452 | } |
8453 | else if (INTEGRAL_TYPE_P (typ1)) |
8454 | { |
8455 | error_init (loc: init_loc, gmsgid: "array of inappropriate type initialized " |
8456 | "from string constant" ); |
8457 | return error_mark_node; |
8458 | } |
8459 | } |
8460 | |
8461 | /* Build a VECTOR_CST from a *constant* vector constructor. If the |
8462 | vector constructor is not constant (e.g. {1,2,3,foo()}) then punt |
8463 | below and handle as a constructor. */ |
8464 | if (code == VECTOR_TYPE |
8465 | && VECTOR_TYPE_P (TREE_TYPE (inside_init)) |
8466 | && vector_types_convertible_p (TREE_TYPE (inside_init), t2: type, emit_lax_note: true) |
8467 | && TREE_CONSTANT (inside_init)) |
8468 | { |
8469 | if (TREE_CODE (inside_init) == VECTOR_CST |
8470 | && comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (inside_init)), |
8471 | TYPE_MAIN_VARIANT (type))) |
8472 | return inside_init; |
8473 | |
8474 | if (TREE_CODE (inside_init) == CONSTRUCTOR) |
8475 | { |
8476 | unsigned HOST_WIDE_INT ix; |
8477 | tree value; |
8478 | bool constant_p = true; |
8479 | |
8480 | /* Iterate through elements and check if all constructor |
8481 | elements are *_CSTs. */ |
8482 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (inside_init), ix, value) |
8483 | if (!CONSTANT_CLASS_P (value)) |
8484 | { |
8485 | constant_p = false; |
8486 | break; |
8487 | } |
8488 | |
8489 | if (constant_p) |
8490 | return build_vector_from_ctor (type, |
8491 | CONSTRUCTOR_ELTS (inside_init)); |
8492 | } |
8493 | } |
8494 | |
8495 | if (warn_sequence_point) |
8496 | verify_sequence_points (inside_init); |
8497 | |
8498 | /* Any type can be initialized |
8499 | from an expression of the same type, optionally with braces. */ |
8500 | |
8501 | if (inside_init && TREE_TYPE (inside_init) != NULL_TREE |
8502 | && (comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (inside_init)), |
8503 | TYPE_MAIN_VARIANT (type)) |
8504 | || (code == ARRAY_TYPE |
8505 | && comptypes (TREE_TYPE (inside_init), type2: type)) |
8506 | || (gnu_vector_type_p (type) |
8507 | && comptypes (TREE_TYPE (inside_init), type2: type)) |
8508 | || (code == POINTER_TYPE |
8509 | && TREE_CODE (TREE_TYPE (inside_init)) == ARRAY_TYPE |
8510 | && comptypes (TREE_TYPE (TREE_TYPE (inside_init)), |
8511 | TREE_TYPE (type))))) |
8512 | { |
8513 | if (code == POINTER_TYPE) |
8514 | { |
8515 | if (TREE_CODE (TREE_TYPE (inside_init)) == ARRAY_TYPE) |
8516 | { |
8517 | if (TREE_CODE (inside_init) == STRING_CST |
8518 | || TREE_CODE (inside_init) == COMPOUND_LITERAL_EXPR) |
8519 | inside_init = array_to_pointer_conversion |
8520 | (loc: init_loc, exp: inside_init); |
8521 | else |
8522 | { |
8523 | error_init (loc: init_loc, gmsgid: "invalid use of non-lvalue array" ); |
8524 | return error_mark_node; |
8525 | } |
8526 | } |
8527 | } |
8528 | |
8529 | if (code == VECTOR_TYPE) |
8530 | /* Although the types are compatible, we may require a |
8531 | conversion. */ |
8532 | inside_init = convert (type, inside_init); |
8533 | |
8534 | if (require_constant |
8535 | && TREE_CODE (inside_init) == COMPOUND_LITERAL_EXPR) |
8536 | { |
8537 | /* As an extension, allow initializing objects with static storage |
8538 | duration with compound literals (which are then treated just as |
8539 | the brace enclosed list they contain). Also allow this for |
8540 | vectors, as we can only assign them with compound literals. */ |
8541 | if (flag_isoc99 && code != VECTOR_TYPE) |
8542 | pedwarn_init (loc: init_loc, opt: OPT_Wpedantic, gmsgid: "initializer element " |
8543 | "is not constant" ); |
8544 | tree decl = COMPOUND_LITERAL_EXPR_DECL (inside_init); |
8545 | inside_init = DECL_INITIAL (decl); |
8546 | } |
8547 | |
8548 | if (code == ARRAY_TYPE && TREE_CODE (inside_init) != STRING_CST |
8549 | && TREE_CODE (inside_init) != CONSTRUCTOR) |
8550 | { |
8551 | error_init (loc: init_loc, gmsgid: "array initialized from non-constant array " |
8552 | "expression" ); |
8553 | return error_mark_node; |
8554 | } |
8555 | |
8556 | /* Compound expressions can only occur here if -Wpedantic or |
8557 | -pedantic-errors is specified. In the later case, we always want |
8558 | an error. In the former case, we simply want a warning. */ |
8559 | if (require_constant && pedantic |
8560 | && TREE_CODE (inside_init) == COMPOUND_EXPR) |
8561 | { |
8562 | inside_init |
8563 | = valid_compound_expr_initializer (value: inside_init, |
8564 | TREE_TYPE (inside_init)); |
8565 | if (inside_init == error_mark_node) |
8566 | error_init (loc: init_loc, gmsgid: "initializer element is not constant" ); |
8567 | else |
8568 | pedwarn_init (loc: init_loc, opt: OPT_Wpedantic, |
8569 | gmsgid: "initializer element is not constant" ); |
8570 | if (flag_pedantic_errors) |
8571 | inside_init = error_mark_node; |
8572 | } |
8573 | else if (require_constant |
8574 | && !initializer_constant_valid_p (inside_init, |
8575 | TREE_TYPE (inside_init))) |
8576 | { |
8577 | error_init (loc: init_loc, gmsgid: "initializer element is not constant" ); |
8578 | inside_init = error_mark_node; |
8579 | } |
8580 | else if (require_constant && !maybe_const) |
8581 | pedwarn_init (loc: init_loc, opt: OPT_Wpedantic, |
8582 | gmsgid: "initializer element is not a constant expression" ); |
8583 | else if (require_constexpr) |
8584 | check_constexpr_init (loc: init_loc, type, init: inside_init, |
8585 | int_const_expr, arith_const_expr); |
8586 | |
8587 | /* Added to enable additional -Wsuggest-attribute=format warnings. */ |
8588 | if (TREE_CODE (TREE_TYPE (inside_init)) == POINTER_TYPE) |
8589 | inside_init = convert_for_assignment (location: init_loc, UNKNOWN_LOCATION, |
8590 | type, rhs: inside_init, origtype, |
8591 | errtype: (require_constant |
8592 | ? ic_init_const |
8593 | : ic_init), null_pointer_constant, |
8594 | NULL_TREE, NULL_TREE, parmnum: 0); |
8595 | return inside_init; |
8596 | } |
8597 | |
8598 | /* Handle scalar types, including conversions. */ |
8599 | |
8600 | if (code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE |
8601 | || code == POINTER_TYPE || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE |
8602 | || code == COMPLEX_TYPE || code == VECTOR_TYPE || code == NULLPTR_TYPE |
8603 | || code == BITINT_TYPE) |
8604 | { |
8605 | tree unconverted_init = inside_init; |
8606 | if (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE |
8607 | && (TREE_CODE (init) == STRING_CST |
8608 | || TREE_CODE (init) == COMPOUND_LITERAL_EXPR)) |
8609 | inside_init = init = array_to_pointer_conversion (loc: init_loc, exp: init); |
8610 | if (semantic_type) |
8611 | inside_init = build1 (EXCESS_PRECISION_EXPR, semantic_type, |
8612 | inside_init); |
8613 | inside_init |
8614 | = convert_for_assignment (location: init_loc, UNKNOWN_LOCATION, type, |
8615 | rhs: inside_init, origtype, |
8616 | errtype: require_constant ? ic_init_const : ic_init, |
8617 | null_pointer_constant, NULL_TREE, NULL_TREE, |
8618 | parmnum: 0); |
8619 | |
8620 | /* Check to see if we have already given an error message. */ |
8621 | if (inside_init == error_mark_node) |
8622 | ; |
8623 | else if (require_constant && !TREE_CONSTANT (inside_init)) |
8624 | { |
8625 | error_init (loc: init_loc, gmsgid: "initializer element is not constant" ); |
8626 | inside_init = error_mark_node; |
8627 | } |
8628 | else if (require_constant |
8629 | && !initializer_constant_valid_p (inside_init, |
8630 | TREE_TYPE (inside_init))) |
8631 | { |
8632 | error_init (loc: init_loc, gmsgid: "initializer element is not computable at " |
8633 | "load time" ); |
8634 | inside_init = error_mark_node; |
8635 | } |
8636 | else if (require_constant && !maybe_const) |
8637 | pedwarn_init (loc: init_loc, opt: OPT_Wpedantic, |
8638 | gmsgid: "initializer element is not a constant expression" ); |
8639 | else if (require_constexpr) |
8640 | check_constexpr_init (loc: init_loc, type, init: unconverted_init, |
8641 | int_const_expr, arith_const_expr); |
8642 | |
8643 | return inside_init; |
8644 | } |
8645 | |
8646 | /* Come here only for records and arrays. */ |
8647 | |
8648 | if (COMPLETE_TYPE_P (type) && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) |
8649 | { |
8650 | error_init (loc: init_loc, |
8651 | gmsgid: "variable-sized object may not be initialized except " |
8652 | "with an empty initializer" ); |
8653 | return error_mark_node; |
8654 | } |
8655 | |
8656 | error_init (loc: init_loc, gmsgid: "invalid initializer" ); |
8657 | return error_mark_node; |
8658 | } |
8659 | |
8660 | /* Handle initializers that use braces. */ |
8661 | |
8662 | /* Type of object we are accumulating a constructor for. |
8663 | This type is always a RECORD_TYPE, UNION_TYPE or ARRAY_TYPE. */ |
8664 | static tree constructor_type; |
8665 | |
8666 | /* For a RECORD_TYPE or UNION_TYPE, this is the chain of fields |
8667 | left to fill. */ |
8668 | static tree constructor_fields; |
8669 | |
8670 | /* For an ARRAY_TYPE, this is the specified index |
8671 | at which to store the next element we get. */ |
8672 | static tree constructor_index; |
8673 | |
8674 | /* For an ARRAY_TYPE, this is the maximum index. */ |
8675 | static tree constructor_max_index; |
8676 | |
8677 | /* For a RECORD_TYPE, this is the first field not yet written out. */ |
8678 | static tree constructor_unfilled_fields; |
8679 | |
8680 | /* For an ARRAY_TYPE, this is the index of the first element |
8681 | not yet written out. */ |
8682 | static tree constructor_unfilled_index; |
8683 | |
8684 | /* In a RECORD_TYPE, the byte index of the next consecutive field. |
8685 | This is so we can generate gaps between fields, when appropriate. */ |
8686 | static tree constructor_bit_index; |
8687 | |
8688 | /* If we are saving up the elements rather than allocating them, |
8689 | this is the list of elements so far (in reverse order, |
8690 | most recent first). */ |
8691 | static vec<constructor_elt, va_gc> *constructor_elements; |
8692 | |
8693 | /* 1 if constructor should be incrementally stored into a constructor chain, |
8694 | 0 if all the elements should be kept in AVL tree. */ |
8695 | static int constructor_incremental; |
8696 | |
8697 | /* 1 if so far this constructor's elements are all compile-time constants. */ |
8698 | static int constructor_constant; |
8699 | |
8700 | /* 1 if so far this constructor's elements are all valid address constants. */ |
8701 | static int constructor_simple; |
8702 | |
8703 | /* 1 if this constructor has an element that cannot be part of a |
8704 | constant expression. */ |
8705 | static int constructor_nonconst; |
8706 | |
8707 | /* 1 if this constructor is erroneous so far. */ |
8708 | static int constructor_erroneous; |
8709 | |
8710 | /* 1 if this constructor is the universal zero initializer { 0 }. */ |
8711 | static int constructor_zeroinit; |
8712 | |
8713 | /* Structure for managing pending initializer elements, organized as an |
8714 | AVL tree. */ |
8715 | |
8716 | struct init_node |
8717 | { |
8718 | struct init_node *left, *right; |
8719 | struct init_node *parent; |
8720 | int balance; |
8721 | tree purpose; |
8722 | tree value; |
8723 | tree origtype; |
8724 | }; |
8725 | |
8726 | /* Tree of pending elements at this constructor level. |
8727 | These are elements encountered out of order |
8728 | which belong at places we haven't reached yet in actually |
8729 | writing the output. |
8730 | Will never hold tree nodes across GC runs. */ |
8731 | static struct init_node *constructor_pending_elts; |
8732 | |
8733 | /* The SPELLING_DEPTH of this constructor. */ |
8734 | static int constructor_depth; |
8735 | |
8736 | /* DECL node for which an initializer is being read. |
8737 | 0 means we are reading a constructor expression |
8738 | such as (struct foo) {...}. */ |
8739 | static tree constructor_decl; |
8740 | |
8741 | /* Nonzero if there were any member designators in this initializer. */ |
8742 | static int constructor_designated; |
8743 | |
8744 | /* Nesting depth of designator list. */ |
8745 | static int designator_depth; |
8746 | |
8747 | /* Nonzero if there were diagnosed errors in this designator list. */ |
8748 | static int designator_erroneous; |
8749 | |
8750 | |
8751 | /* This stack has a level for each implicit or explicit level of |
8752 | structuring in the initializer, including the outermost one. It |
8753 | saves the values of most of the variables above. */ |
8754 | |
8755 | struct constructor_range_stack; |
8756 | |
8757 | struct constructor_stack |
8758 | { |
8759 | struct constructor_stack *next; |
8760 | tree type; |
8761 | tree fields; |
8762 | tree index; |
8763 | tree max_index; |
8764 | tree unfilled_index; |
8765 | tree unfilled_fields; |
8766 | tree bit_index; |
8767 | vec<constructor_elt, va_gc> *elements; |
8768 | struct init_node *pending_elts; |
8769 | int offset; |
8770 | int depth; |
8771 | /* If value nonzero, this value should replace the entire |
8772 | constructor at this level. */ |
8773 | struct c_expr replacement_value; |
8774 | struct constructor_range_stack *range_stack; |
8775 | char constant; |
8776 | char simple; |
8777 | char nonconst; |
8778 | char implicit; |
8779 | char erroneous; |
8780 | char outer; |
8781 | char incremental; |
8782 | char designated; |
8783 | int designator_depth; |
8784 | }; |
8785 | |
8786 | static struct constructor_stack *constructor_stack; |
8787 | |
8788 | /* This stack represents designators from some range designator up to |
8789 | the last designator in the list. */ |
8790 | |
8791 | struct constructor_range_stack |
8792 | { |
8793 | struct constructor_range_stack *next, *prev; |
8794 | struct constructor_stack *stack; |
8795 | tree range_start; |
8796 | tree index; |
8797 | tree range_end; |
8798 | tree fields; |
8799 | }; |
8800 | |
8801 | static struct constructor_range_stack *constructor_range_stack; |
8802 | |
8803 | /* This stack records separate initializers that are nested. |
8804 | Nested initializers can't happen in ANSI C, but GNU C allows them |
8805 | in cases like { ... (struct foo) { ... } ... }. */ |
8806 | |
8807 | struct initializer_stack |
8808 | { |
8809 | struct initializer_stack *next; |
8810 | tree decl; |
8811 | struct constructor_stack *constructor_stack; |
8812 | struct constructor_range_stack *constructor_range_stack; |
8813 | vec<constructor_elt, va_gc> *elements; |
8814 | struct spelling *spelling; |
8815 | struct spelling *spelling_base; |
8816 | int spelling_size; |
8817 | char require_constant_value; |
8818 | char require_constant_elements; |
8819 | char require_constexpr_value; |
8820 | char designated; |
8821 | rich_location *missing_brace_richloc; |
8822 | }; |
8823 | |
8824 | static struct initializer_stack *initializer_stack; |
8825 | |
8826 | /* Prepare to parse and output the initializer for variable DECL. */ |
8827 | |
8828 | void |
8829 | start_init (tree decl, tree asmspec_tree ATTRIBUTE_UNUSED, |
8830 | bool init_require_constant, bool init_require_constexpr, |
8831 | rich_location *richloc) |
8832 | { |
8833 | const char *locus; |
8834 | struct initializer_stack *p = XNEW (struct initializer_stack); |
8835 | |
8836 | p->decl = constructor_decl; |
8837 | p->require_constant_value = require_constant_value; |
8838 | p->require_constant_elements = require_constant_elements; |
8839 | p->require_constexpr_value = require_constexpr_value; |
8840 | p->constructor_stack = constructor_stack; |
8841 | p->constructor_range_stack = constructor_range_stack; |
8842 | p->elements = constructor_elements; |
8843 | p->spelling = spelling; |
8844 | p->spelling_base = spelling_base; |
8845 | p->spelling_size = spelling_size; |
8846 | p->next = initializer_stack; |
8847 | p->missing_brace_richloc = richloc; |
8848 | p->designated = constructor_designated; |
8849 | initializer_stack = p; |
8850 | |
8851 | constructor_decl = decl; |
8852 | constructor_designated = 0; |
8853 | |
8854 | require_constant_value = init_require_constant; |
8855 | require_constexpr_value = init_require_constexpr; |
8856 | if (decl != NULL_TREE && decl != error_mark_node) |
8857 | { |
8858 | require_constant_elements |
8859 | = ((init_require_constant || (pedantic && !flag_isoc99)) |
8860 | /* For a scalar, you can always use any value to initialize, |
8861 | even within braces. */ |
8862 | && AGGREGATE_TYPE_P (TREE_TYPE (decl))); |
8863 | locus = identifier_to_locale (IDENTIFIER_POINTER (DECL_NAME (decl))); |
8864 | } |
8865 | else |
8866 | { |
8867 | require_constant_elements = false; |
8868 | locus = _("(anonymous)" ); |
8869 | } |
8870 | |
8871 | constructor_stack = 0; |
8872 | constructor_range_stack = 0; |
8873 | |
8874 | found_missing_braces = 0; |
8875 | |
8876 | spelling_base = 0; |
8877 | spelling_size = 0; |
8878 | RESTORE_SPELLING_DEPTH (0); |
8879 | |
8880 | if (locus) |
8881 | push_string (string: locus); |
8882 | } |
8883 | |
8884 | void |
8885 | finish_init (void) |
8886 | { |
8887 | struct initializer_stack *p = initializer_stack; |
8888 | |
8889 | /* Free the whole constructor stack of this initializer. */ |
8890 | while (constructor_stack) |
8891 | { |
8892 | struct constructor_stack *q = constructor_stack; |
8893 | constructor_stack = q->next; |
8894 | XDELETE (q); |
8895 | } |
8896 | |
8897 | gcc_assert (!constructor_range_stack); |
8898 | |
8899 | /* Pop back to the data of the outer initializer (if any). */ |
8900 | XDELETE (spelling_base); |
8901 | |
8902 | constructor_decl = p->decl; |
8903 | require_constant_value = p->require_constant_value; |
8904 | require_constant_elements = p->require_constant_elements; |
8905 | require_constexpr_value = p->require_constexpr_value; |
8906 | constructor_stack = p->constructor_stack; |
8907 | constructor_designated = p->designated; |
8908 | constructor_range_stack = p->constructor_range_stack; |
8909 | constructor_elements = p->elements; |
8910 | spelling = p->spelling; |
8911 | spelling_base = p->spelling_base; |
8912 | spelling_size = p->spelling_size; |
8913 | initializer_stack = p->next; |
8914 | XDELETE (p); |
8915 | } |
8916 | |
8917 | /* Call here when we see the initializer is surrounded by braces. |
8918 | This is instead of a call to push_init_level; |
8919 | it is matched by a call to pop_init_level. |
8920 | |
8921 | TYPE is the type to initialize, for a constructor expression. |
8922 | For an initializer for a decl, TYPE is zero. */ |
8923 | |
8924 | void |
8925 | really_start_incremental_init (tree type) |
8926 | { |
8927 | struct constructor_stack *p = XNEW (struct constructor_stack); |
8928 | |
8929 | if (type == NULL_TREE) |
8930 | type = TREE_TYPE (constructor_decl); |
8931 | |
8932 | if (VECTOR_TYPE_P (type) |
8933 | && TYPE_VECTOR_OPAQUE (type)) |
8934 | error ("opaque vector types cannot be initialized" ); |
8935 | |
8936 | p->type = constructor_type; |
8937 | p->fields = constructor_fields; |
8938 | p->index = constructor_index; |
8939 | p->max_index = constructor_max_index; |
8940 | p->unfilled_index = constructor_unfilled_index; |
8941 | p->unfilled_fields = constructor_unfilled_fields; |
8942 | p->bit_index = constructor_bit_index; |
8943 | p->elements = constructor_elements; |
8944 | p->constant = constructor_constant; |
8945 | p->simple = constructor_simple; |
8946 | p->nonconst = constructor_nonconst; |
8947 | p->erroneous = constructor_erroneous; |
8948 | p->pending_elts = constructor_pending_elts; |
8949 | p->depth = constructor_depth; |
8950 | p->replacement_value.value = 0; |
8951 | p->replacement_value.original_code = ERROR_MARK; |
8952 | p->replacement_value.original_type = NULL; |
8953 | p->implicit = 0; |
8954 | p->range_stack = 0; |
8955 | p->outer = 0; |
8956 | p->incremental = constructor_incremental; |
8957 | p->designated = constructor_designated; |
8958 | p->designator_depth = designator_depth; |
8959 | p->next = 0; |
8960 | constructor_stack = p; |
8961 | |
8962 | constructor_constant = 1; |
8963 | constructor_simple = 1; |
8964 | constructor_nonconst = 0; |
8965 | constructor_depth = SPELLING_DEPTH (); |
8966 | constructor_elements = NULL; |
8967 | constructor_pending_elts = 0; |
8968 | constructor_type = type; |
8969 | constructor_incremental = 1; |
8970 | constructor_designated = 0; |
8971 | constructor_zeroinit = 1; |
8972 | designator_depth = 0; |
8973 | designator_erroneous = 0; |
8974 | |
8975 | if (RECORD_OR_UNION_TYPE_P (constructor_type)) |
8976 | { |
8977 | constructor_fields = TYPE_FIELDS (constructor_type); |
8978 | /* Skip any nameless bit fields at the beginning. */ |
8979 | while (constructor_fields != NULL_TREE |
8980 | && DECL_UNNAMED_BIT_FIELD (constructor_fields)) |
8981 | constructor_fields = DECL_CHAIN (constructor_fields); |
8982 | |
8983 | constructor_unfilled_fields = constructor_fields; |
8984 | constructor_bit_index = bitsize_zero_node; |
8985 | } |
8986 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
8987 | { |
8988 | if (TYPE_DOMAIN (constructor_type)) |
8989 | { |
8990 | constructor_max_index |
8991 | = TYPE_MAX_VALUE (TYPE_DOMAIN (constructor_type)); |
8992 | |
8993 | /* Detect non-empty initializations of zero-length arrays. */ |
8994 | if (constructor_max_index == NULL_TREE |
8995 | && TYPE_SIZE (constructor_type)) |
8996 | constructor_max_index = integer_minus_one_node; |
8997 | |
8998 | /* constructor_max_index needs to be an INTEGER_CST. Attempts |
8999 | to initialize VLAs with a nonempty initializer will cause a |
9000 | proper error; avoid tree checking errors as well by setting a |
9001 | safe value. */ |
9002 | if (constructor_max_index |
9003 | && TREE_CODE (constructor_max_index) != INTEGER_CST) |
9004 | constructor_max_index = integer_minus_one_node; |
9005 | |
9006 | constructor_index |
9007 | = convert (bitsizetype, |
9008 | TYPE_MIN_VALUE (TYPE_DOMAIN (constructor_type))); |
9009 | } |
9010 | else |
9011 | { |
9012 | constructor_index = bitsize_zero_node; |
9013 | constructor_max_index = NULL_TREE; |
9014 | } |
9015 | |
9016 | constructor_unfilled_index = constructor_index; |
9017 | } |
9018 | else if (gnu_vector_type_p (type: constructor_type)) |
9019 | { |
9020 | /* Vectors are like simple fixed-size arrays. */ |
9021 | constructor_max_index = |
9022 | bitsize_int (TYPE_VECTOR_SUBPARTS (constructor_type) - 1); |
9023 | constructor_index = bitsize_zero_node; |
9024 | constructor_unfilled_index = constructor_index; |
9025 | } |
9026 | else |
9027 | { |
9028 | /* Handle the case of int x = {5}; */ |
9029 | constructor_fields = constructor_type; |
9030 | constructor_unfilled_fields = constructor_type; |
9031 | } |
9032 | } |
9033 | |
9034 | extern location_t last_init_list_comma; |
9035 | |
9036 | /* Called when we see an open brace for a nested initializer. Finish |
9037 | off any pending levels with implicit braces. */ |
9038 | void |
9039 | finish_implicit_inits (location_t loc, struct obstack *braced_init_obstack) |
9040 | { |
9041 | while (constructor_stack->implicit) |
9042 | { |
9043 | if (RECORD_OR_UNION_TYPE_P (constructor_type) |
9044 | && constructor_fields == NULL_TREE) |
9045 | process_init_element (input_location, |
9046 | pop_init_level (loc, 1, braced_init_obstack, |
9047 | last_init_list_comma), |
9048 | true, braced_init_obstack); |
9049 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE |
9050 | && constructor_max_index |
9051 | && tree_int_cst_lt (t1: constructor_max_index, |
9052 | t2: constructor_index)) |
9053 | process_init_element (input_location, |
9054 | pop_init_level (loc, 1, braced_init_obstack, |
9055 | last_init_list_comma), |
9056 | true, braced_init_obstack); |
9057 | else |
9058 | break; |
9059 | } |
9060 | } |
9061 | |
9062 | /* Push down into a subobject, for initialization. |
9063 | If this is for an explicit set of braces, IMPLICIT is 0. |
9064 | If it is because the next element belongs at a lower level, |
9065 | IMPLICIT is 1 (or 2 if the push is because of designator list). */ |
9066 | |
9067 | void |
9068 | push_init_level (location_t loc, int implicit, |
9069 | struct obstack *braced_init_obstack) |
9070 | { |
9071 | struct constructor_stack *p; |
9072 | tree value = NULL_TREE; |
9073 | |
9074 | /* Unless this is an explicit brace, we need to preserve previous |
9075 | content if any. */ |
9076 | if (implicit) |
9077 | { |
9078 | if (RECORD_OR_UNION_TYPE_P (constructor_type) && constructor_fields) |
9079 | value = find_init_member (constructor_fields, braced_init_obstack); |
9080 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
9081 | value = find_init_member (constructor_index, braced_init_obstack); |
9082 | } |
9083 | |
9084 | p = XNEW (struct constructor_stack); |
9085 | p->type = constructor_type; |
9086 | p->fields = constructor_fields; |
9087 | p->index = constructor_index; |
9088 | p->max_index = constructor_max_index; |
9089 | p->unfilled_index = constructor_unfilled_index; |
9090 | p->unfilled_fields = constructor_unfilled_fields; |
9091 | p->bit_index = constructor_bit_index; |
9092 | p->elements = constructor_elements; |
9093 | p->constant = constructor_constant; |
9094 | p->simple = constructor_simple; |
9095 | p->nonconst = constructor_nonconst; |
9096 | p->erroneous = constructor_erroneous; |
9097 | p->pending_elts = constructor_pending_elts; |
9098 | p->depth = constructor_depth; |
9099 | p->replacement_value.value = NULL_TREE; |
9100 | p->replacement_value.original_code = ERROR_MARK; |
9101 | p->replacement_value.original_type = NULL; |
9102 | p->implicit = implicit; |
9103 | p->outer = 0; |
9104 | p->incremental = constructor_incremental; |
9105 | p->designated = constructor_designated; |
9106 | p->designator_depth = designator_depth; |
9107 | p->next = constructor_stack; |
9108 | p->range_stack = 0; |
9109 | constructor_stack = p; |
9110 | |
9111 | constructor_constant = 1; |
9112 | constructor_simple = 1; |
9113 | constructor_nonconst = 0; |
9114 | constructor_depth = SPELLING_DEPTH (); |
9115 | constructor_elements = NULL; |
9116 | constructor_incremental = 1; |
9117 | /* If the upper initializer is designated, then mark this as |
9118 | designated too to prevent bogus warnings. */ |
9119 | constructor_designated = p->designated; |
9120 | constructor_pending_elts = 0; |
9121 | if (!implicit) |
9122 | { |
9123 | p->range_stack = constructor_range_stack; |
9124 | constructor_range_stack = 0; |
9125 | designator_depth = 0; |
9126 | designator_erroneous = 0; |
9127 | } |
9128 | |
9129 | /* Don't die if an entire brace-pair level is superfluous |
9130 | in the containing level. */ |
9131 | if (constructor_type == NULL_TREE) |
9132 | ; |
9133 | else if (RECORD_OR_UNION_TYPE_P (constructor_type)) |
9134 | { |
9135 | /* Don't die if there are extra init elts at the end. */ |
9136 | if (constructor_fields == NULL_TREE) |
9137 | constructor_type = NULL_TREE; |
9138 | else |
9139 | { |
9140 | constructor_type = TREE_TYPE (constructor_fields); |
9141 | push_member_name (decl: constructor_fields); |
9142 | constructor_depth++; |
9143 | } |
9144 | } |
9145 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
9146 | { |
9147 | constructor_type = TREE_TYPE (constructor_type); |
9148 | push_array_bounds (bounds: tree_to_uhwi (constructor_index)); |
9149 | constructor_depth++; |
9150 | } |
9151 | |
9152 | if (constructor_type == NULL_TREE) |
9153 | { |
9154 | error_init (loc, gmsgid: "extra brace group at end of initializer" ); |
9155 | constructor_fields = NULL_TREE; |
9156 | constructor_unfilled_fields = NULL_TREE; |
9157 | return; |
9158 | } |
9159 | |
9160 | if (value && TREE_CODE (value) == CONSTRUCTOR) |
9161 | { |
9162 | constructor_constant = TREE_CONSTANT (value); |
9163 | constructor_simple = TREE_STATIC (value); |
9164 | constructor_nonconst = CONSTRUCTOR_NON_CONST (value); |
9165 | constructor_elements = CONSTRUCTOR_ELTS (value); |
9166 | if (!vec_safe_is_empty (v: constructor_elements) |
9167 | && (TREE_CODE (constructor_type) == RECORD_TYPE |
9168 | || TREE_CODE (constructor_type) == ARRAY_TYPE)) |
9169 | set_nonincremental_init (braced_init_obstack); |
9170 | } |
9171 | |
9172 | if (implicit == 1) |
9173 | { |
9174 | found_missing_braces = 1; |
9175 | if (initializer_stack->missing_brace_richloc) |
9176 | initializer_stack->missing_brace_richloc->add_fixit_insert_before |
9177 | (where: loc, new_content: "{" ); |
9178 | } |
9179 | |
9180 | if (RECORD_OR_UNION_TYPE_P (constructor_type)) |
9181 | { |
9182 | constructor_fields = TYPE_FIELDS (constructor_type); |
9183 | /* Skip any nameless bit fields at the beginning. */ |
9184 | while (constructor_fields != NULL_TREE |
9185 | && DECL_UNNAMED_BIT_FIELD (constructor_fields)) |
9186 | constructor_fields = DECL_CHAIN (constructor_fields); |
9187 | |
9188 | constructor_unfilled_fields = constructor_fields; |
9189 | constructor_bit_index = bitsize_zero_node; |
9190 | } |
9191 | else if (gnu_vector_type_p (type: constructor_type)) |
9192 | { |
9193 | /* Vectors are like simple fixed-size arrays. */ |
9194 | constructor_max_index = |
9195 | bitsize_int (TYPE_VECTOR_SUBPARTS (constructor_type) - 1); |
9196 | constructor_index = bitsize_int (0); |
9197 | constructor_unfilled_index = constructor_index; |
9198 | } |
9199 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
9200 | { |
9201 | if (TYPE_DOMAIN (constructor_type)) |
9202 | { |
9203 | constructor_max_index |
9204 | = TYPE_MAX_VALUE (TYPE_DOMAIN (constructor_type)); |
9205 | |
9206 | /* Detect non-empty initializations of zero-length arrays. */ |
9207 | if (constructor_max_index == NULL_TREE |
9208 | && TYPE_SIZE (constructor_type)) |
9209 | constructor_max_index = integer_minus_one_node; |
9210 | |
9211 | /* constructor_max_index needs to be an INTEGER_CST. Attempts |
9212 | to initialize VLAs will cause a proper error; avoid tree |
9213 | checking errors as well by setting a safe value. */ |
9214 | if (constructor_max_index |
9215 | && TREE_CODE (constructor_max_index) != INTEGER_CST) |
9216 | constructor_max_index = integer_minus_one_node; |
9217 | |
9218 | constructor_index |
9219 | = convert (bitsizetype, |
9220 | TYPE_MIN_VALUE (TYPE_DOMAIN (constructor_type))); |
9221 | } |
9222 | else |
9223 | constructor_index = bitsize_zero_node; |
9224 | |
9225 | constructor_unfilled_index = constructor_index; |
9226 | if (value && TREE_CODE (value) == STRING_CST) |
9227 | { |
9228 | /* We need to split the char/wchar array into individual |
9229 | characters, so that we don't have to special case it |
9230 | everywhere. */ |
9231 | set_nonincremental_init_from_string (value, braced_init_obstack); |
9232 | } |
9233 | } |
9234 | else |
9235 | { |
9236 | if (constructor_type != error_mark_node) |
9237 | warning_init (loc: input_location, opt: 0, gmsgid: "braces around scalar initializer" ); |
9238 | constructor_fields = constructor_type; |
9239 | constructor_unfilled_fields = constructor_type; |
9240 | } |
9241 | } |
9242 | |
9243 | /* At the end of an implicit or explicit brace level, |
9244 | finish up that level of constructor. If a single expression |
9245 | with redundant braces initialized that level, return the |
9246 | c_expr structure for that expression. Otherwise, the original_code |
9247 | element is set to ERROR_MARK. |
9248 | If we were outputting the elements as they are read, return 0 as the value |
9249 | from inner levels (process_init_element ignores that), |
9250 | but return error_mark_node as the value from the outermost level |
9251 | (that's what we want to put in DECL_INITIAL). |
9252 | Otherwise, return a CONSTRUCTOR expression as the value. */ |
9253 | |
9254 | struct c_expr |
9255 | pop_init_level (location_t loc, int implicit, |
9256 | struct obstack *braced_init_obstack, |
9257 | location_t insert_before) |
9258 | { |
9259 | struct constructor_stack *p; |
9260 | struct c_expr ret; |
9261 | ret.value = NULL_TREE; |
9262 | ret.original_code = ERROR_MARK; |
9263 | ret.original_type = NULL; |
9264 | ret.m_decimal = 0; |
9265 | |
9266 | if (implicit == 0) |
9267 | { |
9268 | /* When we come to an explicit close brace, |
9269 | pop any inner levels that didn't have explicit braces. */ |
9270 | while (constructor_stack->implicit) |
9271 | process_init_element (input_location, |
9272 | pop_init_level (loc, implicit: 1, braced_init_obstack, |
9273 | insert_before), |
9274 | true, braced_init_obstack); |
9275 | gcc_assert (!constructor_range_stack); |
9276 | } |
9277 | else |
9278 | if (initializer_stack->missing_brace_richloc) |
9279 | initializer_stack->missing_brace_richloc->add_fixit_insert_before |
9280 | (where: insert_before, new_content: "}" ); |
9281 | |
9282 | /* Now output all pending elements. */ |
9283 | constructor_incremental = 1; |
9284 | output_pending_init_elements (1, braced_init_obstack); |
9285 | |
9286 | p = constructor_stack; |
9287 | |
9288 | /* Error for initializing a flexible array member, or a zero-length |
9289 | array member in an inappropriate context. */ |
9290 | if (constructor_type && constructor_fields |
9291 | && TREE_CODE (constructor_type) == ARRAY_TYPE |
9292 | && TYPE_DOMAIN (constructor_type) |
9293 | && !TYPE_MAX_VALUE (TYPE_DOMAIN (constructor_type))) |
9294 | { |
9295 | /* Silently discard empty initializations. The parser will |
9296 | already have pedwarned for empty brackets. */ |
9297 | if (integer_zerop (constructor_unfilled_index)) |
9298 | constructor_type = NULL_TREE; |
9299 | else |
9300 | { |
9301 | gcc_assert (!TYPE_SIZE (constructor_type)); |
9302 | |
9303 | if (constructor_depth > 2) |
9304 | error_init (loc, gmsgid: "initialization of flexible array member in a nested context" ); |
9305 | else |
9306 | pedwarn_init (loc, opt: OPT_Wpedantic, |
9307 | gmsgid: "initialization of a flexible array member" ); |
9308 | |
9309 | /* We have already issued an error message for the existence |
9310 | of a flexible array member not at the end of the structure. |
9311 | Discard the initializer so that we do not die later. */ |
9312 | if (DECL_CHAIN (constructor_fields) != NULL_TREE) |
9313 | constructor_type = NULL_TREE; |
9314 | } |
9315 | } |
9316 | |
9317 | switch (vec_safe_length (v: constructor_elements)) |
9318 | { |
9319 | case 0: |
9320 | /* Initialization with { } counts as zeroinit. */ |
9321 | constructor_zeroinit = 1; |
9322 | break; |
9323 | case 1: |
9324 | /* This might be zeroinit as well. */ |
9325 | if (integer_zerop ((*constructor_elements)[0].value)) |
9326 | constructor_zeroinit = 1; |
9327 | break; |
9328 | default: |
9329 | /* If the constructor has more than one element, it can't be { 0 }. */ |
9330 | constructor_zeroinit = 0; |
9331 | break; |
9332 | } |
9333 | |
9334 | /* Warn when some structs are initialized with direct aggregation. */ |
9335 | if (!implicit && found_missing_braces && warn_missing_braces |
9336 | && !constructor_zeroinit) |
9337 | { |
9338 | gcc_assert (initializer_stack->missing_brace_richloc); |
9339 | warning_at (initializer_stack->missing_brace_richloc, |
9340 | OPT_Wmissing_braces, |
9341 | "missing braces around initializer" ); |
9342 | } |
9343 | |
9344 | /* Warn when some struct elements are implicitly initialized to zero. */ |
9345 | if (warn_missing_field_initializers |
9346 | && constructor_type |
9347 | && TREE_CODE (constructor_type) == RECORD_TYPE |
9348 | && constructor_unfilled_fields) |
9349 | { |
9350 | /* Do not warn for flexible array members or zero-length arrays. */ |
9351 | while (constructor_unfilled_fields |
9352 | && (!DECL_SIZE (constructor_unfilled_fields) |
9353 | || integer_zerop (DECL_SIZE (constructor_unfilled_fields)))) |
9354 | constructor_unfilled_fields = DECL_CHAIN (constructor_unfilled_fields); |
9355 | |
9356 | if (constructor_unfilled_fields |
9357 | /* Do not warn if this level of the initializer uses member |
9358 | designators; it is likely to be deliberate. */ |
9359 | && !constructor_designated |
9360 | /* Do not warn about initializing with { 0 } or with { }. */ |
9361 | && !constructor_zeroinit) |
9362 | { |
9363 | if (warning_at (input_location, OPT_Wmissing_field_initializers, |
9364 | "missing initializer for field %qD of %qT" , |
9365 | constructor_unfilled_fields, |
9366 | constructor_type)) |
9367 | inform (DECL_SOURCE_LOCATION (constructor_unfilled_fields), |
9368 | "%qD declared here" , constructor_unfilled_fields); |
9369 | } |
9370 | } |
9371 | |
9372 | /* Pad out the end of the structure. */ |
9373 | if (p->replacement_value.value) |
9374 | /* If this closes a superfluous brace pair, |
9375 | just pass out the element between them. */ |
9376 | ret = p->replacement_value; |
9377 | else if (constructor_type == NULL_TREE) |
9378 | ; |
9379 | else if (!RECORD_OR_UNION_TYPE_P (constructor_type) |
9380 | && TREE_CODE (constructor_type) != ARRAY_TYPE |
9381 | && !gnu_vector_type_p (type: constructor_type)) |
9382 | { |
9383 | /* A nonincremental scalar initializer--just return |
9384 | the element, after verifying there is just one. |
9385 | Empty scalar initializers are supported in C23. */ |
9386 | if (vec_safe_is_empty (v: constructor_elements)) |
9387 | { |
9388 | if (constructor_erroneous || constructor_type == error_mark_node) |
9389 | ret.value = error_mark_node; |
9390 | else if (TREE_CODE (constructor_type) == FUNCTION_TYPE) |
9391 | { |
9392 | error_init (loc, gmsgid: "invalid initializer" ); |
9393 | ret.value = error_mark_node; |
9394 | } |
9395 | else if (TREE_CODE (constructor_type) == POINTER_TYPE) |
9396 | /* Ensure this is a null pointer constant in the case of a |
9397 | 'constexpr' object initialized with {}. */ |
9398 | ret.value = build_zero_cst (ptr_type_node); |
9399 | else |
9400 | ret.value = build_zero_cst (constructor_type); |
9401 | } |
9402 | else if (vec_safe_length (v: constructor_elements) != 1) |
9403 | { |
9404 | error_init (loc, gmsgid: "extra elements in scalar initializer" ); |
9405 | ret.value = (*constructor_elements)[0].value; |
9406 | } |
9407 | else |
9408 | ret.value = (*constructor_elements)[0].value; |
9409 | } |
9410 | else |
9411 | { |
9412 | if (constructor_erroneous) |
9413 | ret.value = error_mark_node; |
9414 | else |
9415 | { |
9416 | ret.value = build_constructor (constructor_type, |
9417 | constructor_elements); |
9418 | if (constructor_constant) |
9419 | TREE_CONSTANT (ret.value) = 1; |
9420 | if (constructor_constant && constructor_simple) |
9421 | TREE_STATIC (ret.value) = 1; |
9422 | if (constructor_nonconst) |
9423 | CONSTRUCTOR_NON_CONST (ret.value) = 1; |
9424 | } |
9425 | } |
9426 | |
9427 | if (ret.value && TREE_CODE (ret.value) != CONSTRUCTOR) |
9428 | { |
9429 | if (constructor_nonconst) |
9430 | ret.original_code = C_MAYBE_CONST_EXPR; |
9431 | else if (ret.original_code == C_MAYBE_CONST_EXPR) |
9432 | ret.original_code = ERROR_MARK; |
9433 | } |
9434 | |
9435 | constructor_type = p->type; |
9436 | constructor_fields = p->fields; |
9437 | constructor_index = p->index; |
9438 | constructor_max_index = p->max_index; |
9439 | constructor_unfilled_index = p->unfilled_index; |
9440 | constructor_unfilled_fields = p->unfilled_fields; |
9441 | constructor_bit_index = p->bit_index; |
9442 | constructor_elements = p->elements; |
9443 | constructor_constant = p->constant; |
9444 | constructor_simple = p->simple; |
9445 | constructor_nonconst = p->nonconst; |
9446 | constructor_erroneous = p->erroneous; |
9447 | constructor_incremental = p->incremental; |
9448 | constructor_designated = p->designated; |
9449 | designator_depth = p->designator_depth; |
9450 | constructor_pending_elts = p->pending_elts; |
9451 | constructor_depth = p->depth; |
9452 | if (!p->implicit) |
9453 | constructor_range_stack = p->range_stack; |
9454 | RESTORE_SPELLING_DEPTH (constructor_depth); |
9455 | |
9456 | constructor_stack = p->next; |
9457 | XDELETE (p); |
9458 | |
9459 | if (ret.value == NULL_TREE && constructor_stack == 0) |
9460 | ret.value = error_mark_node; |
9461 | return ret; |
9462 | } |
9463 | |
9464 | /* Common handling for both array range and field name designators. |
9465 | ARRAY argument is nonzero for array ranges. Returns false for success. */ |
9466 | |
9467 | static bool |
9468 | set_designator (location_t loc, bool array, |
9469 | struct obstack *braced_init_obstack) |
9470 | { |
9471 | tree subtype; |
9472 | enum tree_code subcode; |
9473 | |
9474 | /* Don't die if an entire brace-pair level is superfluous |
9475 | in the containing level, or for an erroneous type. */ |
9476 | if (constructor_type == NULL_TREE || constructor_type == error_mark_node) |
9477 | return true; |
9478 | |
9479 | /* If there were errors in this designator list already, bail out |
9480 | silently. */ |
9481 | if (designator_erroneous) |
9482 | return true; |
9483 | |
9484 | /* Likewise for an initializer for a variable-size type. Those are |
9485 | diagnosed in the parser, except for empty initializer braces. */ |
9486 | if (COMPLETE_TYPE_P (constructor_type) |
9487 | && TREE_CODE (TYPE_SIZE (constructor_type)) != INTEGER_CST) |
9488 | return true; |
9489 | |
9490 | if (!designator_depth) |
9491 | { |
9492 | gcc_assert (!constructor_range_stack); |
9493 | |
9494 | /* Designator list starts at the level of closest explicit |
9495 | braces. */ |
9496 | while (constructor_stack->implicit) |
9497 | process_init_element (input_location, |
9498 | pop_init_level (loc, implicit: 1, braced_init_obstack, |
9499 | insert_before: last_init_list_comma), |
9500 | true, braced_init_obstack); |
9501 | constructor_designated = 1; |
9502 | return false; |
9503 | } |
9504 | |
9505 | switch (TREE_CODE (constructor_type)) |
9506 | { |
9507 | case RECORD_TYPE: |
9508 | case UNION_TYPE: |
9509 | subtype = TREE_TYPE (constructor_fields); |
9510 | if (subtype != error_mark_node) |
9511 | subtype = TYPE_MAIN_VARIANT (subtype); |
9512 | break; |
9513 | case ARRAY_TYPE: |
9514 | subtype = TYPE_MAIN_VARIANT (TREE_TYPE (constructor_type)); |
9515 | break; |
9516 | default: |
9517 | gcc_unreachable (); |
9518 | } |
9519 | |
9520 | subcode = TREE_CODE (subtype); |
9521 | if (array && subcode != ARRAY_TYPE) |
9522 | { |
9523 | error_init (loc, gmsgid: "array index in non-array initializer" ); |
9524 | return true; |
9525 | } |
9526 | else if (!array && subcode != RECORD_TYPE && subcode != UNION_TYPE) |
9527 | { |
9528 | error_init (loc, gmsgid: "field name not in record or union initializer" ); |
9529 | return true; |
9530 | } |
9531 | |
9532 | constructor_designated = 1; |
9533 | finish_implicit_inits (loc, braced_init_obstack); |
9534 | push_init_level (loc, implicit: 2, braced_init_obstack); |
9535 | return false; |
9536 | } |
9537 | |
9538 | /* If there are range designators in designator list, push a new designator |
9539 | to constructor_range_stack. RANGE_END is end of such stack range or |
9540 | NULL_TREE if there is no range designator at this level. */ |
9541 | |
9542 | static void |
9543 | push_range_stack (tree range_end, struct obstack * braced_init_obstack) |
9544 | { |
9545 | struct constructor_range_stack *p; |
9546 | |
9547 | p = (struct constructor_range_stack *) |
9548 | obstack_alloc (braced_init_obstack, |
9549 | sizeof (struct constructor_range_stack)); |
9550 | p->prev = constructor_range_stack; |
9551 | p->next = 0; |
9552 | p->fields = constructor_fields; |
9553 | p->range_start = constructor_index; |
9554 | p->index = constructor_index; |
9555 | p->stack = constructor_stack; |
9556 | p->range_end = range_end; |
9557 | if (constructor_range_stack) |
9558 | constructor_range_stack->next = p; |
9559 | constructor_range_stack = p; |
9560 | } |
9561 | |
9562 | /* Within an array initializer, specify the next index to be initialized. |
9563 | FIRST is that index. If LAST is nonzero, then initialize a range |
9564 | of indices, running from FIRST through LAST. */ |
9565 | |
9566 | void |
9567 | set_init_index (location_t loc, tree first, tree last, |
9568 | struct obstack *braced_init_obstack) |
9569 | { |
9570 | if (set_designator (loc, array: true, braced_init_obstack)) |
9571 | return; |
9572 | |
9573 | designator_erroneous = 1; |
9574 | |
9575 | if (!INTEGRAL_TYPE_P (TREE_TYPE (first)) |
9576 | || (last && !INTEGRAL_TYPE_P (TREE_TYPE (last)))) |
9577 | { |
9578 | error_init (loc, gmsgid: "array index in initializer not of integer type" ); |
9579 | return; |
9580 | } |
9581 | |
9582 | if (TREE_CODE (first) != INTEGER_CST) |
9583 | { |
9584 | first = c_fully_fold (first, false, NULL); |
9585 | if (TREE_CODE (first) == INTEGER_CST) |
9586 | pedwarn_init (loc, opt: OPT_Wpedantic, |
9587 | gmsgid: "array index in initializer is not " |
9588 | "an integer constant expression" ); |
9589 | } |
9590 | |
9591 | if (last && TREE_CODE (last) != INTEGER_CST) |
9592 | { |
9593 | last = c_fully_fold (last, false, NULL); |
9594 | if (TREE_CODE (last) == INTEGER_CST) |
9595 | pedwarn_init (loc, opt: OPT_Wpedantic, |
9596 | gmsgid: "array index in initializer is not " |
9597 | "an integer constant expression" ); |
9598 | } |
9599 | |
9600 | if (TREE_CODE (first) != INTEGER_CST) |
9601 | error_init (loc, gmsgid: "nonconstant array index in initializer" ); |
9602 | else if (last != NULL_TREE && TREE_CODE (last) != INTEGER_CST) |
9603 | error_init (loc, gmsgid: "nonconstant array index in initializer" ); |
9604 | else if (TREE_CODE (constructor_type) != ARRAY_TYPE) |
9605 | error_init (loc, gmsgid: "array index in non-array initializer" ); |
9606 | else if (tree_int_cst_sgn (first) == -1) |
9607 | error_init (loc, gmsgid: "array index in initializer exceeds array bounds" ); |
9608 | else if (constructor_max_index |
9609 | && tree_int_cst_lt (t1: constructor_max_index, t2: first)) |
9610 | error_init (loc, gmsgid: "array index in initializer exceeds array bounds" ); |
9611 | else |
9612 | { |
9613 | constant_expression_warning (first); |
9614 | if (last) |
9615 | constant_expression_warning (last); |
9616 | constructor_index = convert (bitsizetype, first); |
9617 | if (tree_int_cst_lt (t1: constructor_index, t2: first)) |
9618 | { |
9619 | constructor_index = copy_node (constructor_index); |
9620 | TREE_OVERFLOW (constructor_index) = 1; |
9621 | } |
9622 | |
9623 | if (last) |
9624 | { |
9625 | if (tree_int_cst_equal (first, last)) |
9626 | last = NULL_TREE; |
9627 | else if (tree_int_cst_lt (t1: last, t2: first)) |
9628 | { |
9629 | error_init (loc, gmsgid: "empty index range in initializer" ); |
9630 | last = NULL_TREE; |
9631 | } |
9632 | else |
9633 | { |
9634 | last = convert (bitsizetype, last); |
9635 | if (constructor_max_index != NULL_TREE |
9636 | && tree_int_cst_lt (t1: constructor_max_index, t2: last)) |
9637 | { |
9638 | error_init (loc, gmsgid: "array index range in initializer exceeds " |
9639 | "array bounds" ); |
9640 | last = NULL_TREE; |
9641 | } |
9642 | } |
9643 | } |
9644 | |
9645 | designator_depth++; |
9646 | designator_erroneous = 0; |
9647 | if (constructor_range_stack || last) |
9648 | push_range_stack (range_end: last, braced_init_obstack); |
9649 | } |
9650 | } |
9651 | |
9652 | /* Within a struct initializer, specify the next field to be initialized. */ |
9653 | |
9654 | void |
9655 | set_init_label (location_t loc, tree fieldname, location_t fieldname_loc, |
9656 | struct obstack *braced_init_obstack) |
9657 | { |
9658 | tree field; |
9659 | |
9660 | if (set_designator (loc, array: false, braced_init_obstack)) |
9661 | return; |
9662 | |
9663 | designator_erroneous = 1; |
9664 | |
9665 | if (!RECORD_OR_UNION_TYPE_P (constructor_type)) |
9666 | { |
9667 | error_init (loc, gmsgid: "field name not in record or union initializer" ); |
9668 | return; |
9669 | } |
9670 | |
9671 | field = lookup_field (type: constructor_type, component: fieldname); |
9672 | |
9673 | if (field == NULL_TREE) |
9674 | { |
9675 | tree guessed_id = lookup_field_fuzzy (type: constructor_type, component: fieldname); |
9676 | if (guessed_id) |
9677 | { |
9678 | gcc_rich_location rich_loc (fieldname_loc); |
9679 | rich_loc.add_fixit_misspelled_id (misspelled_token_loc: fieldname_loc, hint_id: guessed_id); |
9680 | error_at (&rich_loc, |
9681 | "%qT has no member named %qE; did you mean %qE?" , |
9682 | constructor_type, fieldname, guessed_id); |
9683 | } |
9684 | else |
9685 | error_at (fieldname_loc, "%qT has no member named %qE" , |
9686 | constructor_type, fieldname); |
9687 | } |
9688 | else |
9689 | do |
9690 | { |
9691 | constructor_fields = TREE_VALUE (field); |
9692 | designator_depth++; |
9693 | designator_erroneous = 0; |
9694 | if (constructor_range_stack) |
9695 | push_range_stack (NULL_TREE, braced_init_obstack); |
9696 | field = TREE_CHAIN (field); |
9697 | if (field) |
9698 | { |
9699 | if (set_designator (loc, array: false, braced_init_obstack)) |
9700 | return; |
9701 | } |
9702 | } |
9703 | while (field != NULL_TREE); |
9704 | } |
9705 | |
9706 | /* Add a new initializer to the tree of pending initializers. PURPOSE |
9707 | identifies the initializer, either array index or field in a structure. |
9708 | VALUE is the value of that index or field. If ORIGTYPE is not |
9709 | NULL_TREE, it is the original type of VALUE. |
9710 | |
9711 | IMPLICIT is true if value comes from pop_init_level (1), |
9712 | the new initializer has been merged with the existing one |
9713 | and thus no warnings should be emitted about overriding an |
9714 | existing initializer. */ |
9715 | |
9716 | static void |
9717 | add_pending_init (location_t loc, tree purpose, tree value, tree origtype, |
9718 | bool implicit, struct obstack *braced_init_obstack) |
9719 | { |
9720 | struct init_node *p, **q, *r; |
9721 | |
9722 | q = &constructor_pending_elts; |
9723 | p = 0; |
9724 | |
9725 | if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
9726 | { |
9727 | while (*q != 0) |
9728 | { |
9729 | p = *q; |
9730 | if (tree_int_cst_lt (t1: purpose, t2: p->purpose)) |
9731 | q = &p->left; |
9732 | else if (tree_int_cst_lt (t1: p->purpose, t2: purpose)) |
9733 | q = &p->right; |
9734 | else |
9735 | { |
9736 | if (!implicit) |
9737 | { |
9738 | if (TREE_SIDE_EFFECTS (p->value)) |
9739 | warning_init (loc, opt: OPT_Woverride_init_side_effects, |
9740 | gmsgid: "initialized field with side-effects " |
9741 | "overwritten" ); |
9742 | else if (warn_override_init) |
9743 | warning_init (loc, opt: OPT_Woverride_init, |
9744 | gmsgid: "initialized field overwritten" ); |
9745 | } |
9746 | p->value = value; |
9747 | p->origtype = origtype; |
9748 | return; |
9749 | } |
9750 | } |
9751 | } |
9752 | else |
9753 | { |
9754 | tree bitpos; |
9755 | |
9756 | bitpos = bit_position (purpose); |
9757 | while (*q != NULL) |
9758 | { |
9759 | p = *q; |
9760 | if (tree_int_cst_lt (t1: bitpos, t2: bit_position (p->purpose))) |
9761 | q = &p->left; |
9762 | else if (p->purpose != purpose) |
9763 | q = &p->right; |
9764 | else |
9765 | { |
9766 | if (!implicit) |
9767 | { |
9768 | if (TREE_SIDE_EFFECTS (p->value)) |
9769 | warning_init (loc, opt: OPT_Woverride_init_side_effects, |
9770 | gmsgid: "initialized field with side-effects " |
9771 | "overwritten" ); |
9772 | else if (warn_override_init) |
9773 | warning_init (loc, opt: OPT_Woverride_init, |
9774 | gmsgid: "initialized field overwritten" ); |
9775 | } |
9776 | p->value = value; |
9777 | p->origtype = origtype; |
9778 | return; |
9779 | } |
9780 | } |
9781 | } |
9782 | |
9783 | r = (struct init_node *) obstack_alloc (braced_init_obstack, |
9784 | sizeof (struct init_node)); |
9785 | r->purpose = purpose; |
9786 | r->value = value; |
9787 | r->origtype = origtype; |
9788 | |
9789 | *q = r; |
9790 | r->parent = p; |
9791 | r->left = 0; |
9792 | r->right = 0; |
9793 | r->balance = 0; |
9794 | |
9795 | while (p) |
9796 | { |
9797 | struct init_node *s; |
9798 | |
9799 | if (r == p->left) |
9800 | { |
9801 | if (p->balance == 0) |
9802 | p->balance = -1; |
9803 | else if (p->balance < 0) |
9804 | { |
9805 | if (r->balance < 0) |
9806 | { |
9807 | /* L rotation. */ |
9808 | p->left = r->right; |
9809 | if (p->left) |
9810 | p->left->parent = p; |
9811 | r->right = p; |
9812 | |
9813 | p->balance = 0; |
9814 | r->balance = 0; |
9815 | |
9816 | s = p->parent; |
9817 | p->parent = r; |
9818 | r->parent = s; |
9819 | if (s) |
9820 | { |
9821 | if (s->left == p) |
9822 | s->left = r; |
9823 | else |
9824 | s->right = r; |
9825 | } |
9826 | else |
9827 | constructor_pending_elts = r; |
9828 | } |
9829 | else |
9830 | { |
9831 | /* LR rotation. */ |
9832 | struct init_node *t = r->right; |
9833 | |
9834 | r->right = t->left; |
9835 | if (r->right) |
9836 | r->right->parent = r; |
9837 | t->left = r; |
9838 | |
9839 | p->left = t->right; |
9840 | if (p->left) |
9841 | p->left->parent = p; |
9842 | t->right = p; |
9843 | |
9844 | p->balance = t->balance < 0; |
9845 | r->balance = -(t->balance > 0); |
9846 | t->balance = 0; |
9847 | |
9848 | s = p->parent; |
9849 | p->parent = t; |
9850 | r->parent = t; |
9851 | t->parent = s; |
9852 | if (s) |
9853 | { |
9854 | if (s->left == p) |
9855 | s->left = t; |
9856 | else |
9857 | s->right = t; |
9858 | } |
9859 | else |
9860 | constructor_pending_elts = t; |
9861 | } |
9862 | break; |
9863 | } |
9864 | else |
9865 | { |
9866 | /* p->balance == +1; growth of left side balances the node. */ |
9867 | p->balance = 0; |
9868 | break; |
9869 | } |
9870 | } |
9871 | else /* r == p->right */ |
9872 | { |
9873 | if (p->balance == 0) |
9874 | /* Growth propagation from right side. */ |
9875 | p->balance++; |
9876 | else if (p->balance > 0) |
9877 | { |
9878 | if (r->balance > 0) |
9879 | { |
9880 | /* R rotation. */ |
9881 | p->right = r->left; |
9882 | if (p->right) |
9883 | p->right->parent = p; |
9884 | r->left = p; |
9885 | |
9886 | p->balance = 0; |
9887 | r->balance = 0; |
9888 | |
9889 | s = p->parent; |
9890 | p->parent = r; |
9891 | r->parent = s; |
9892 | if (s) |
9893 | { |
9894 | if (s->left == p) |
9895 | s->left = r; |
9896 | else |
9897 | s->right = r; |
9898 | } |
9899 | else |
9900 | constructor_pending_elts = r; |
9901 | } |
9902 | else /* r->balance == -1 */ |
9903 | { |
9904 | /* RL rotation */ |
9905 | struct init_node *t = r->left; |
9906 | |
9907 | r->left = t->right; |
9908 | if (r->left) |
9909 | r->left->parent = r; |
9910 | t->right = r; |
9911 | |
9912 | p->right = t->left; |
9913 | if (p->right) |
9914 | p->right->parent = p; |
9915 | t->left = p; |
9916 | |
9917 | r->balance = (t->balance < 0); |
9918 | p->balance = -(t->balance > 0); |
9919 | t->balance = 0; |
9920 | |
9921 | s = p->parent; |
9922 | p->parent = t; |
9923 | r->parent = t; |
9924 | t->parent = s; |
9925 | if (s) |
9926 | { |
9927 | if (s->left == p) |
9928 | s->left = t; |
9929 | else |
9930 | s->right = t; |
9931 | } |
9932 | else |
9933 | constructor_pending_elts = t; |
9934 | } |
9935 | break; |
9936 | } |
9937 | else |
9938 | { |
9939 | /* p->balance == -1; growth of right side balances the node. */ |
9940 | p->balance = 0; |
9941 | break; |
9942 | } |
9943 | } |
9944 | |
9945 | r = p; |
9946 | p = p->parent; |
9947 | } |
9948 | } |
9949 | |
9950 | /* Build AVL tree from a sorted chain. */ |
9951 | |
9952 | static void |
9953 | set_nonincremental_init (struct obstack * braced_init_obstack) |
9954 | { |
9955 | unsigned HOST_WIDE_INT ix; |
9956 | tree index, value; |
9957 | |
9958 | if (TREE_CODE (constructor_type) != RECORD_TYPE |
9959 | && TREE_CODE (constructor_type) != ARRAY_TYPE) |
9960 | return; |
9961 | |
9962 | FOR_EACH_CONSTRUCTOR_ELT (constructor_elements, ix, index, value) |
9963 | add_pending_init (loc: input_location, purpose: index, value, NULL_TREE, implicit: true, |
9964 | braced_init_obstack); |
9965 | constructor_elements = NULL; |
9966 | if (TREE_CODE (constructor_type) == RECORD_TYPE) |
9967 | { |
9968 | constructor_unfilled_fields = TYPE_FIELDS (constructor_type); |
9969 | /* Skip any nameless bit fields at the beginning. */ |
9970 | while (constructor_unfilled_fields != NULL_TREE |
9971 | && DECL_UNNAMED_BIT_FIELD (constructor_unfilled_fields)) |
9972 | constructor_unfilled_fields = TREE_CHAIN (constructor_unfilled_fields); |
9973 | |
9974 | } |
9975 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
9976 | { |
9977 | if (TYPE_DOMAIN (constructor_type)) |
9978 | constructor_unfilled_index |
9979 | = convert (bitsizetype, |
9980 | TYPE_MIN_VALUE (TYPE_DOMAIN (constructor_type))); |
9981 | else |
9982 | constructor_unfilled_index = bitsize_zero_node; |
9983 | } |
9984 | constructor_incremental = 0; |
9985 | } |
9986 | |
9987 | /* Build AVL tree from a string constant. */ |
9988 | |
9989 | static void |
9990 | set_nonincremental_init_from_string (tree str, |
9991 | struct obstack * braced_init_obstack) |
9992 | { |
9993 | tree value, purpose, type; |
9994 | HOST_WIDE_INT val[2]; |
9995 | const char *p, *end; |
9996 | int byte, wchar_bytes, charwidth, bitpos; |
9997 | |
9998 | gcc_assert (TREE_CODE (constructor_type) == ARRAY_TYPE); |
9999 | |
10000 | wchar_bytes = TYPE_PRECISION (TREE_TYPE (TREE_TYPE (str))) / BITS_PER_UNIT; |
10001 | charwidth = TYPE_PRECISION (char_type_node); |
10002 | gcc_assert ((size_t) wchar_bytes * charwidth |
10003 | <= ARRAY_SIZE (val) * HOST_BITS_PER_WIDE_INT); |
10004 | type = TREE_TYPE (constructor_type); |
10005 | p = TREE_STRING_POINTER (str); |
10006 | end = p + TREE_STRING_LENGTH (str); |
10007 | |
10008 | for (purpose = bitsize_zero_node; |
10009 | p < end |
10010 | && !(constructor_max_index |
10011 | && tree_int_cst_lt (t1: constructor_max_index, t2: purpose)); |
10012 | purpose = size_binop (PLUS_EXPR, purpose, bitsize_one_node)) |
10013 | { |
10014 | if (wchar_bytes == 1) |
10015 | { |
10016 | val[0] = (unsigned char) *p++; |
10017 | val[1] = 0; |
10018 | } |
10019 | else |
10020 | { |
10021 | val[1] = 0; |
10022 | val[0] = 0; |
10023 | for (byte = 0; byte < wchar_bytes; byte++) |
10024 | { |
10025 | if (BYTES_BIG_ENDIAN) |
10026 | bitpos = (wchar_bytes - byte - 1) * charwidth; |
10027 | else |
10028 | bitpos = byte * charwidth; |
10029 | val[bitpos / HOST_BITS_PER_WIDE_INT] |
10030 | |= ((unsigned HOST_WIDE_INT) ((unsigned char) *p++)) |
10031 | << (bitpos % HOST_BITS_PER_WIDE_INT); |
10032 | } |
10033 | } |
10034 | |
10035 | if (!TYPE_UNSIGNED (type)) |
10036 | { |
10037 | bitpos = ((wchar_bytes - 1) * charwidth) + HOST_BITS_PER_CHAR; |
10038 | if (bitpos < HOST_BITS_PER_WIDE_INT) |
10039 | { |
10040 | if (val[0] & (HOST_WIDE_INT_1 << (bitpos - 1))) |
10041 | { |
10042 | val[0] |= HOST_WIDE_INT_M1U << bitpos; |
10043 | val[1] = -1; |
10044 | } |
10045 | } |
10046 | else if (bitpos == HOST_BITS_PER_WIDE_INT) |
10047 | { |
10048 | if (val[0] < 0) |
10049 | val[1] = -1; |
10050 | } |
10051 | else if (val[1] & (HOST_WIDE_INT_1 |
10052 | << (bitpos - 1 - HOST_BITS_PER_WIDE_INT))) |
10053 | val[1] |= HOST_WIDE_INT_M1U << (bitpos - HOST_BITS_PER_WIDE_INT); |
10054 | } |
10055 | |
10056 | value = wide_int_to_tree (type, |
10057 | cst: wide_int::from_array (val, len: 2, |
10058 | HOST_BITS_PER_WIDE_INT * 2)); |
10059 | add_pending_init (loc: input_location, purpose, value, NULL_TREE, implicit: true, |
10060 | braced_init_obstack); |
10061 | } |
10062 | |
10063 | constructor_incremental = 0; |
10064 | } |
10065 | |
10066 | /* Return value of FIELD in pending initializer or NULL_TREE if the field was |
10067 | not initialized yet. */ |
10068 | |
10069 | static tree |
10070 | find_init_member (tree field, struct obstack * braced_init_obstack) |
10071 | { |
10072 | struct init_node *p; |
10073 | |
10074 | if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
10075 | { |
10076 | if (constructor_incremental |
10077 | && tree_int_cst_lt (t1: field, t2: constructor_unfilled_index)) |
10078 | set_nonincremental_init (braced_init_obstack); |
10079 | |
10080 | p = constructor_pending_elts; |
10081 | while (p) |
10082 | { |
10083 | if (tree_int_cst_lt (t1: field, t2: p->purpose)) |
10084 | p = p->left; |
10085 | else if (tree_int_cst_lt (t1: p->purpose, t2: field)) |
10086 | p = p->right; |
10087 | else |
10088 | return p->value; |
10089 | } |
10090 | } |
10091 | else if (TREE_CODE (constructor_type) == RECORD_TYPE) |
10092 | { |
10093 | tree bitpos = bit_position (field); |
10094 | |
10095 | if (constructor_incremental |
10096 | && (!constructor_unfilled_fields |
10097 | || tree_int_cst_lt (t1: bitpos, |
10098 | t2: bit_position (constructor_unfilled_fields)))) |
10099 | set_nonincremental_init (braced_init_obstack); |
10100 | |
10101 | p = constructor_pending_elts; |
10102 | while (p) |
10103 | { |
10104 | if (field == p->purpose) |
10105 | return p->value; |
10106 | else if (tree_int_cst_lt (t1: bitpos, t2: bit_position (p->purpose))) |
10107 | p = p->left; |
10108 | else |
10109 | p = p->right; |
10110 | } |
10111 | } |
10112 | else if (TREE_CODE (constructor_type) == UNION_TYPE) |
10113 | { |
10114 | if (!vec_safe_is_empty (v: constructor_elements) |
10115 | && (constructor_elements->last ().index == field)) |
10116 | return constructor_elements->last ().value; |
10117 | } |
10118 | return NULL_TREE; |
10119 | } |
10120 | |
10121 | /* "Output" the next constructor element. |
10122 | At top level, really output it to assembler code now. |
10123 | Otherwise, collect it in a list from which we will make a CONSTRUCTOR. |
10124 | If ORIGTYPE is not NULL_TREE, it is the original type of VALUE. |
10125 | TYPE is the data type that the containing data type wants here. |
10126 | FIELD is the field (a FIELD_DECL) or the index that this element fills. |
10127 | If VALUE is a string constant, STRICT_STRING is true if it is |
10128 | unparenthesized or we should not warn here for it being parenthesized. |
10129 | For other types of VALUE, STRICT_STRING is not used. |
10130 | |
10131 | PENDING if true means output pending elements that belong |
10132 | right after this element. (PENDING is normally true; |
10133 | it is false while outputting pending elements, to avoid recursion.) |
10134 | |
10135 | IMPLICIT is true if value comes from pop_init_level (1), |
10136 | the new initializer has been merged with the existing one |
10137 | and thus no warnings should be emitted about overriding an |
10138 | existing initializer. */ |
10139 | |
10140 | static void |
10141 | output_init_element (location_t loc, tree value, tree origtype, |
10142 | bool strict_string, tree type, tree field, bool pending, |
10143 | bool implicit, struct obstack * braced_init_obstack) |
10144 | { |
10145 | tree semantic_type = NULL_TREE; |
10146 | bool maybe_const = true; |
10147 | bool npc, int_const_expr, arith_const_expr; |
10148 | |
10149 | if (type == error_mark_node || value == error_mark_node) |
10150 | { |
10151 | constructor_erroneous = 1; |
10152 | return; |
10153 | } |
10154 | if (TREE_CODE (TREE_TYPE (value)) == ARRAY_TYPE |
10155 | && (TREE_CODE (value) == STRING_CST |
10156 | || TREE_CODE (value) == COMPOUND_LITERAL_EXPR) |
10157 | && !(TREE_CODE (value) == STRING_CST |
10158 | && TREE_CODE (type) == ARRAY_TYPE |
10159 | && INTEGRAL_TYPE_P (TREE_TYPE (type))) |
10160 | && !comptypes (TYPE_MAIN_VARIANT (TREE_TYPE (value)), |
10161 | TYPE_MAIN_VARIANT (type))) |
10162 | value = array_to_pointer_conversion (loc: input_location, exp: value); |
10163 | |
10164 | if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR |
10165 | && require_constant_value && pending) |
10166 | { |
10167 | /* As an extension, allow initializing objects with static storage |
10168 | duration with compound literals (which are then treated just as |
10169 | the brace enclosed list they contain). */ |
10170 | if (flag_isoc99) |
10171 | pedwarn_init (loc, opt: OPT_Wpedantic, gmsgid: "initializer element is not " |
10172 | "constant" ); |
10173 | tree decl = COMPOUND_LITERAL_EXPR_DECL (value); |
10174 | value = DECL_INITIAL (decl); |
10175 | } |
10176 | |
10177 | npc = null_pointer_constant_p (expr: value); |
10178 | int_const_expr = (TREE_CODE (value) == INTEGER_CST |
10179 | && !TREE_OVERFLOW (value) |
10180 | && INTEGRAL_TYPE_P (TREE_TYPE (value))); |
10181 | /* Not fully determined before folding. */ |
10182 | arith_const_expr = true; |
10183 | if (TREE_CODE (value) == EXCESS_PRECISION_EXPR) |
10184 | { |
10185 | semantic_type = TREE_TYPE (value); |
10186 | value = TREE_OPERAND (value, 0); |
10187 | } |
10188 | value = c_fully_fold (value, require_constant_value, &maybe_const); |
10189 | /* TODO: this may not detect all cases of expressions folding to |
10190 | constants that are not arithmetic constant expressions. */ |
10191 | if (!maybe_const) |
10192 | arith_const_expr = false; |
10193 | else if (!INTEGRAL_TYPE_P (TREE_TYPE (value)) |
10194 | && TREE_CODE (TREE_TYPE (value)) != REAL_TYPE |
10195 | && TREE_CODE (TREE_TYPE (value)) != COMPLEX_TYPE) |
10196 | arith_const_expr = false; |
10197 | else if (TREE_CODE (value) != INTEGER_CST |
10198 | && TREE_CODE (value) != REAL_CST |
10199 | && TREE_CODE (value) != COMPLEX_CST) |
10200 | arith_const_expr = false; |
10201 | else if (TREE_OVERFLOW (value)) |
10202 | arith_const_expr = false; |
10203 | |
10204 | if (value == error_mark_node) |
10205 | constructor_erroneous = 1; |
10206 | else if (!TREE_CONSTANT (value)) |
10207 | constructor_constant = 0; |
10208 | else if (!initializer_constant_valid_p (value, |
10209 | TREE_TYPE (value), |
10210 | AGGREGATE_TYPE_P (constructor_type) |
10211 | && TYPE_REVERSE_STORAGE_ORDER |
10212 | (constructor_type)) |
10213 | || (RECORD_OR_UNION_TYPE_P (constructor_type) |
10214 | && DECL_C_BIT_FIELD (field) |
10215 | && TREE_CODE (value) != INTEGER_CST)) |
10216 | constructor_simple = 0; |
10217 | if (!maybe_const) |
10218 | constructor_nonconst = 1; |
10219 | |
10220 | /* Digest the initializer and issue any errors about incompatible |
10221 | types before issuing errors about non-constant initializers. */ |
10222 | tree new_value = value; |
10223 | if (semantic_type) |
10224 | new_value = build1 (EXCESS_PRECISION_EXPR, semantic_type, value); |
10225 | /* In the case of braces around a scalar initializer, the result of |
10226 | this initializer processing goes through digest_init again at the |
10227 | outer level. In the case of a constexpr initializer for a |
10228 | pointer, avoid converting a null pointer constant to something |
10229 | that is not a null pointer constant to avoid a spurious error |
10230 | from that second processing. */ |
10231 | if (!require_constexpr_value |
10232 | || !npc |
10233 | || TREE_CODE (constructor_type) != POINTER_TYPE) |
10234 | new_value = digest_init (init_loc: loc, type, init: new_value, origtype, null_pointer_constant: npc, |
10235 | int_const_expr, arith_const_expr, strict_string, |
10236 | require_constant: require_constant_value, require_constexpr: require_constexpr_value); |
10237 | if (new_value == error_mark_node) |
10238 | { |
10239 | constructor_erroneous = 1; |
10240 | return; |
10241 | } |
10242 | if (require_constant_value || require_constant_elements) |
10243 | constant_expression_warning (new_value); |
10244 | |
10245 | /* Proceed to check the constness of the original initializer. */ |
10246 | if (!initializer_constant_valid_p (value, TREE_TYPE (value))) |
10247 | { |
10248 | if (require_constant_value) |
10249 | { |
10250 | error_init (loc, gmsgid: "initializer element is not constant" ); |
10251 | value = error_mark_node; |
10252 | } |
10253 | else if (require_constant_elements) |
10254 | pedwarn (loc, OPT_Wpedantic, |
10255 | "initializer element is not computable at load time" ); |
10256 | } |
10257 | else if (!maybe_const |
10258 | && (require_constant_value || require_constant_elements)) |
10259 | pedwarn_init (loc, opt: OPT_Wpedantic, |
10260 | gmsgid: "initializer element is not a constant expression" ); |
10261 | /* digest_init has already carried out the additional checks |
10262 | required for 'constexpr' initializers (using the information |
10263 | passed to it about whether the original initializer was certain |
10264 | kinds of constant expression), so that check does not need to be |
10265 | repeated here. */ |
10266 | |
10267 | /* Issue -Wc++-compat warnings about initializing a bitfield with |
10268 | enum type. */ |
10269 | if (warn_cxx_compat |
10270 | && field != NULL_TREE |
10271 | && TREE_CODE (field) == FIELD_DECL |
10272 | && DECL_BIT_FIELD_TYPE (field) != NULL_TREE |
10273 | && (TYPE_MAIN_VARIANT (DECL_BIT_FIELD_TYPE (field)) |
10274 | != TYPE_MAIN_VARIANT (type)) |
10275 | && TREE_CODE (DECL_BIT_FIELD_TYPE (field)) == ENUMERAL_TYPE) |
10276 | { |
10277 | tree checktype = origtype != NULL_TREE ? origtype : TREE_TYPE (value); |
10278 | if (checktype != error_mark_node |
10279 | && (TYPE_MAIN_VARIANT (checktype) |
10280 | != TYPE_MAIN_VARIANT (DECL_BIT_FIELD_TYPE (field)))) |
10281 | warning_init (loc, opt: OPT_Wc___compat, |
10282 | gmsgid: "enum conversion in initialization is invalid in C++" ); |
10283 | } |
10284 | |
10285 | /* If this field is empty and does not have side effects (and is not at |
10286 | the end of structure), don't do anything other than checking the |
10287 | initializer. */ |
10288 | if (field |
10289 | && (TREE_TYPE (field) == error_mark_node |
10290 | || (COMPLETE_TYPE_P (TREE_TYPE (field)) |
10291 | && integer_zerop (TYPE_SIZE (TREE_TYPE (field))) |
10292 | && !TREE_SIDE_EFFECTS (new_value) |
10293 | && (TREE_CODE (constructor_type) == ARRAY_TYPE |
10294 | || DECL_CHAIN (field))))) |
10295 | return; |
10296 | |
10297 | /* Finally, set VALUE to the initializer value digested above. */ |
10298 | value = new_value; |
10299 | |
10300 | /* If this element doesn't come next in sequence, |
10301 | put it on constructor_pending_elts. */ |
10302 | if (TREE_CODE (constructor_type) == ARRAY_TYPE |
10303 | && (!constructor_incremental |
10304 | || !tree_int_cst_equal (field, constructor_unfilled_index))) |
10305 | { |
10306 | if (constructor_incremental |
10307 | && tree_int_cst_lt (t1: field, t2: constructor_unfilled_index)) |
10308 | set_nonincremental_init (braced_init_obstack); |
10309 | |
10310 | add_pending_init (loc, purpose: field, value, origtype, implicit, |
10311 | braced_init_obstack); |
10312 | return; |
10313 | } |
10314 | else if (TREE_CODE (constructor_type) == RECORD_TYPE |
10315 | && (!constructor_incremental |
10316 | || field != constructor_unfilled_fields)) |
10317 | { |
10318 | /* We do this for records but not for unions. In a union, |
10319 | no matter which field is specified, it can be initialized |
10320 | right away since it starts at the beginning of the union. */ |
10321 | if (constructor_incremental) |
10322 | { |
10323 | if (!constructor_unfilled_fields) |
10324 | set_nonincremental_init (braced_init_obstack); |
10325 | else |
10326 | { |
10327 | tree bitpos, unfillpos; |
10328 | |
10329 | bitpos = bit_position (field); |
10330 | unfillpos = bit_position (constructor_unfilled_fields); |
10331 | |
10332 | if (tree_int_cst_lt (t1: bitpos, t2: unfillpos)) |
10333 | set_nonincremental_init (braced_init_obstack); |
10334 | } |
10335 | } |
10336 | |
10337 | add_pending_init (loc, purpose: field, value, origtype, implicit, |
10338 | braced_init_obstack); |
10339 | return; |
10340 | } |
10341 | else if (TREE_CODE (constructor_type) == UNION_TYPE |
10342 | && !vec_safe_is_empty (v: constructor_elements)) |
10343 | { |
10344 | if (!implicit) |
10345 | { |
10346 | if (TREE_SIDE_EFFECTS (constructor_elements->last ().value)) |
10347 | warning_init (loc, opt: OPT_Woverride_init_side_effects, |
10348 | gmsgid: "initialized field with side-effects overwritten" ); |
10349 | else if (warn_override_init) |
10350 | warning_init (loc, opt: OPT_Woverride_init, |
10351 | gmsgid: "initialized field overwritten" ); |
10352 | } |
10353 | |
10354 | /* We can have just one union field set. */ |
10355 | constructor_elements = NULL; |
10356 | } |
10357 | |
10358 | /* Otherwise, output this element either to |
10359 | constructor_elements or to the assembler file. */ |
10360 | |
10361 | constructor_elt celt = {.index: field, .value: value}; |
10362 | vec_safe_push (v&: constructor_elements, obj: celt); |
10363 | |
10364 | /* Advance the variable that indicates sequential elements output. */ |
10365 | if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
10366 | constructor_unfilled_index |
10367 | = size_binop_loc (input_location, PLUS_EXPR, constructor_unfilled_index, |
10368 | bitsize_one_node); |
10369 | else if (TREE_CODE (constructor_type) == RECORD_TYPE) |
10370 | { |
10371 | constructor_unfilled_fields |
10372 | = DECL_CHAIN (constructor_unfilled_fields); |
10373 | |
10374 | /* Skip any nameless bit fields. */ |
10375 | while (constructor_unfilled_fields != NULL_TREE |
10376 | && DECL_UNNAMED_BIT_FIELD (constructor_unfilled_fields)) |
10377 | constructor_unfilled_fields = |
10378 | DECL_CHAIN (constructor_unfilled_fields); |
10379 | } |
10380 | else if (TREE_CODE (constructor_type) == UNION_TYPE) |
10381 | constructor_unfilled_fields = NULL_TREE; |
10382 | |
10383 | /* Now output any pending elements which have become next. */ |
10384 | if (pending) |
10385 | output_pending_init_elements (0, braced_init_obstack); |
10386 | } |
10387 | |
10388 | /* For two FIELD_DECLs in the same chain, return -1 if field1 |
10389 | comes before field2, 1 if field1 comes after field2 and |
10390 | 0 if field1 == field2. */ |
10391 | |
10392 | static int |
10393 | init_field_decl_cmp (tree field1, tree field2) |
10394 | { |
10395 | if (field1 == field2) |
10396 | return 0; |
10397 | |
10398 | tree bitpos1 = bit_position (field1); |
10399 | tree bitpos2 = bit_position (field2); |
10400 | if (tree_int_cst_equal (bitpos1, bitpos2)) |
10401 | { |
10402 | /* If one of the fields has non-zero bitsize, then that |
10403 | field must be the last one in a sequence of zero |
10404 | sized fields, fields after it will have bigger |
10405 | bit_position. */ |
10406 | if (TREE_TYPE (field1) != error_mark_node |
10407 | && COMPLETE_TYPE_P (TREE_TYPE (field1)) |
10408 | && integer_nonzerop (TREE_TYPE (field1))) |
10409 | return 1; |
10410 | if (TREE_TYPE (field2) != error_mark_node |
10411 | && COMPLETE_TYPE_P (TREE_TYPE (field2)) |
10412 | && integer_nonzerop (TREE_TYPE (field2))) |
10413 | return -1; |
10414 | /* Otherwise, fallback to DECL_CHAIN walk to find out |
10415 | which field comes earlier. Walk chains of both |
10416 | fields, so that if field1 and field2 are close to each |
10417 | other in either order, it is found soon even for large |
10418 | sequences of zero sized fields. */ |
10419 | tree f1 = field1, f2 = field2; |
10420 | while (1) |
10421 | { |
10422 | f1 = DECL_CHAIN (f1); |
10423 | f2 = DECL_CHAIN (f2); |
10424 | if (f1 == NULL_TREE) |
10425 | { |
10426 | gcc_assert (f2); |
10427 | return 1; |
10428 | } |
10429 | if (f2 == NULL_TREE) |
10430 | return -1; |
10431 | if (f1 == field2) |
10432 | return -1; |
10433 | if (f2 == field1) |
10434 | return 1; |
10435 | if (!tree_int_cst_equal (bit_position (f1), bitpos1)) |
10436 | return 1; |
10437 | if (!tree_int_cst_equal (bit_position (f2), bitpos1)) |
10438 | return -1; |
10439 | } |
10440 | } |
10441 | else if (tree_int_cst_lt (t1: bitpos1, t2: bitpos2)) |
10442 | return -1; |
10443 | else |
10444 | return 1; |
10445 | } |
10446 | |
10447 | /* Output any pending elements which have become next. |
10448 | As we output elements, constructor_unfilled_{fields,index} |
10449 | advances, which may cause other elements to become next; |
10450 | if so, they too are output. |
10451 | |
10452 | If ALL is 0, we return when there are |
10453 | no more pending elements to output now. |
10454 | |
10455 | If ALL is 1, we output space as necessary so that |
10456 | we can output all the pending elements. */ |
10457 | static void |
10458 | output_pending_init_elements (int all, struct obstack * braced_init_obstack) |
10459 | { |
10460 | struct init_node *elt = constructor_pending_elts; |
10461 | tree next; |
10462 | |
10463 | retry: |
10464 | |
10465 | /* Look through the whole pending tree. |
10466 | If we find an element that should be output now, |
10467 | output it. Otherwise, set NEXT to the element |
10468 | that comes first among those still pending. */ |
10469 | |
10470 | next = NULL_TREE; |
10471 | while (elt) |
10472 | { |
10473 | if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
10474 | { |
10475 | if (tree_int_cst_equal (elt->purpose, |
10476 | constructor_unfilled_index)) |
10477 | output_init_element (loc: input_location, value: elt->value, origtype: elt->origtype, |
10478 | strict_string: true, TREE_TYPE (constructor_type), |
10479 | field: constructor_unfilled_index, pending: false, implicit: false, |
10480 | braced_init_obstack); |
10481 | else if (tree_int_cst_lt (t1: constructor_unfilled_index, |
10482 | t2: elt->purpose)) |
10483 | { |
10484 | /* Advance to the next smaller node. */ |
10485 | if (elt->left) |
10486 | elt = elt->left; |
10487 | else |
10488 | { |
10489 | /* We have reached the smallest node bigger than the |
10490 | current unfilled index. Fill the space first. */ |
10491 | next = elt->purpose; |
10492 | break; |
10493 | } |
10494 | } |
10495 | else |
10496 | { |
10497 | /* Advance to the next bigger node. */ |
10498 | if (elt->right) |
10499 | elt = elt->right; |
10500 | else |
10501 | { |
10502 | /* We have reached the biggest node in a subtree. Find |
10503 | the parent of it, which is the next bigger node. */ |
10504 | while (elt->parent && elt->parent->right == elt) |
10505 | elt = elt->parent; |
10506 | elt = elt->parent; |
10507 | if (elt && tree_int_cst_lt (t1: constructor_unfilled_index, |
10508 | t2: elt->purpose)) |
10509 | { |
10510 | next = elt->purpose; |
10511 | break; |
10512 | } |
10513 | } |
10514 | } |
10515 | } |
10516 | else if (RECORD_OR_UNION_TYPE_P (constructor_type)) |
10517 | { |
10518 | /* If the current record is complete we are done. */ |
10519 | if (constructor_unfilled_fields == NULL_TREE) |
10520 | break; |
10521 | |
10522 | int cmp = init_field_decl_cmp (field1: constructor_unfilled_fields, |
10523 | field2: elt->purpose); |
10524 | if (cmp == 0) |
10525 | output_init_element (loc: input_location, value: elt->value, origtype: elt->origtype, |
10526 | strict_string: true, TREE_TYPE (elt->purpose), |
10527 | field: elt->purpose, pending: false, implicit: false, |
10528 | braced_init_obstack); |
10529 | else if (cmp < 0) |
10530 | { |
10531 | /* Advance to the next smaller node. */ |
10532 | if (elt->left) |
10533 | elt = elt->left; |
10534 | else |
10535 | { |
10536 | /* We have reached the smallest node bigger than the |
10537 | current unfilled field. Fill the space first. */ |
10538 | next = elt->purpose; |
10539 | break; |
10540 | } |
10541 | } |
10542 | else |
10543 | { |
10544 | /* Advance to the next bigger node. */ |
10545 | if (elt->right) |
10546 | elt = elt->right; |
10547 | else |
10548 | { |
10549 | /* We have reached the biggest node in a subtree. Find |
10550 | the parent of it, which is the next bigger node. */ |
10551 | while (elt->parent && elt->parent->right == elt) |
10552 | elt = elt->parent; |
10553 | elt = elt->parent; |
10554 | if (elt |
10555 | && init_field_decl_cmp (field1: constructor_unfilled_fields, |
10556 | field2: elt->purpose) < 0) |
10557 | { |
10558 | next = elt->purpose; |
10559 | break; |
10560 | } |
10561 | } |
10562 | } |
10563 | } |
10564 | } |
10565 | |
10566 | /* Ordinarily return, but not if we want to output all |
10567 | and there are elements left. */ |
10568 | if (!(all && next != NULL_TREE)) |
10569 | return; |
10570 | |
10571 | /* If it's not incremental, just skip over the gap, so that after |
10572 | jumping to retry we will output the next successive element. */ |
10573 | if (RECORD_OR_UNION_TYPE_P (constructor_type)) |
10574 | constructor_unfilled_fields = next; |
10575 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
10576 | constructor_unfilled_index = next; |
10577 | |
10578 | /* ELT now points to the node in the pending tree with the next |
10579 | initializer to output. */ |
10580 | goto retry; |
10581 | } |
10582 | |
10583 | /* Expression VALUE coincides with the start of type TYPE in a braced |
10584 | initializer. Return true if we should treat VALUE as initializing |
10585 | the first element of TYPE, false if we should treat it as initializing |
10586 | TYPE as a whole. |
10587 | |
10588 | If the initializer is clearly invalid, the question becomes: |
10589 | which choice gives the best error message? */ |
10590 | |
10591 | static bool |
10592 | initialize_elementwise_p (tree type, tree value) |
10593 | { |
10594 | if (type == error_mark_node || value == error_mark_node) |
10595 | return false; |
10596 | |
10597 | gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type); |
10598 | |
10599 | tree value_type = TREE_TYPE (value); |
10600 | if (value_type == error_mark_node) |
10601 | return false; |
10602 | |
10603 | /* GNU vectors can be initialized elementwise. However, treat any |
10604 | kind of vector value as initializing the vector type as a whole, |
10605 | regardless of whether the value is a GNU vector. Such initializers |
10606 | are valid if and only if they would have been valid in a non-braced |
10607 | initializer like: |
10608 | |
10609 | TYPE foo = VALUE; |
10610 | |
10611 | so recursing into the vector type would be at best confusing or at |
10612 | worst wrong. For example, when -flax-vector-conversions is in effect, |
10613 | it's possible to initialize a V8HI from a V4SI, even though the vectors |
10614 | have different element types and different numbers of elements. */ |
10615 | if (gnu_vector_type_p (type)) |
10616 | return !VECTOR_TYPE_P (value_type); |
10617 | |
10618 | if (AGGREGATE_TYPE_P (type)) |
10619 | return type != TYPE_MAIN_VARIANT (value_type); |
10620 | |
10621 | return false; |
10622 | } |
10623 | |
10624 | /* Add one non-braced element to the current constructor level. |
10625 | This adjusts the current position within the constructor's type. |
10626 | This may also start or terminate implicit levels |
10627 | to handle a partly-braced initializer. |
10628 | |
10629 | Once this has found the correct level for the new element, |
10630 | it calls output_init_element. |
10631 | |
10632 | IMPLICIT is true if value comes from pop_init_level (1), |
10633 | the new initializer has been merged with the existing one |
10634 | and thus no warnings should be emitted about overriding an |
10635 | existing initializer. */ |
10636 | |
10637 | void |
10638 | process_init_element (location_t loc, struct c_expr value, bool implicit, |
10639 | struct obstack * braced_init_obstack) |
10640 | { |
10641 | tree orig_value = value.value; |
10642 | int string_flag |
10643 | = (orig_value != NULL_TREE && TREE_CODE (orig_value) == STRING_CST); |
10644 | bool strict_string = value.original_code == STRING_CST; |
10645 | bool was_designated = designator_depth != 0; |
10646 | |
10647 | designator_depth = 0; |
10648 | designator_erroneous = 0; |
10649 | |
10650 | if (!implicit && value.value && !integer_zerop (value.value)) |
10651 | constructor_zeroinit = 0; |
10652 | |
10653 | /* Handle superfluous braces around string cst as in |
10654 | char x[] = {"foo"}; */ |
10655 | if (constructor_type |
10656 | && !was_designated |
10657 | && TREE_CODE (constructor_type) == ARRAY_TYPE |
10658 | && INTEGRAL_TYPE_P (TREE_TYPE (constructor_type)) |
10659 | && integer_zerop (constructor_unfilled_index)) |
10660 | { |
10661 | if (constructor_stack->replacement_value.value) |
10662 | { |
10663 | error_init (loc, gmsgid: "excess elements in %qT initializer" , constructor_type); |
10664 | return; |
10665 | } |
10666 | else if (string_flag) |
10667 | { |
10668 | constructor_stack->replacement_value = value; |
10669 | return; |
10670 | } |
10671 | } |
10672 | |
10673 | if (constructor_stack->replacement_value.value != NULL_TREE) |
10674 | { |
10675 | error_init (loc, gmsgid: "excess elements in struct initializer" ); |
10676 | return; |
10677 | } |
10678 | |
10679 | /* Ignore elements of a brace group if it is entirely superfluous |
10680 | and has already been diagnosed, or if the type is erroneous. */ |
10681 | if (constructor_type == NULL_TREE || constructor_type == error_mark_node) |
10682 | return; |
10683 | |
10684 | /* Ignore elements of an initializer for a variable-size type. |
10685 | Those are diagnosed in the parser (empty initializer braces are OK). */ |
10686 | if (COMPLETE_TYPE_P (constructor_type) |
10687 | && !poly_int_tree_p (TYPE_SIZE (constructor_type))) |
10688 | return; |
10689 | |
10690 | if (!implicit && warn_designated_init && !was_designated |
10691 | && TREE_CODE (constructor_type) == RECORD_TYPE |
10692 | && lookup_attribute (attr_name: "designated_init" , |
10693 | TYPE_ATTRIBUTES (constructor_type))) |
10694 | warning_init (loc, |
10695 | opt: OPT_Wdesignated_init, |
10696 | gmsgid: "positional initialization of field " |
10697 | "in %<struct%> declared with %<designated_init%> attribute" ); |
10698 | |
10699 | /* If we've exhausted any levels that didn't have braces, |
10700 | pop them now. */ |
10701 | while (constructor_stack->implicit) |
10702 | { |
10703 | if (RECORD_OR_UNION_TYPE_P (constructor_type) |
10704 | && constructor_fields == NULL_TREE) |
10705 | process_init_element (loc, |
10706 | value: pop_init_level (loc, implicit: 1, braced_init_obstack, |
10707 | insert_before: last_init_list_comma), |
10708 | implicit: true, braced_init_obstack); |
10709 | else if ((TREE_CODE (constructor_type) == ARRAY_TYPE |
10710 | || gnu_vector_type_p (type: constructor_type)) |
10711 | && constructor_max_index |
10712 | && tree_int_cst_lt (t1: constructor_max_index, |
10713 | t2: constructor_index)) |
10714 | process_init_element (loc, |
10715 | value: pop_init_level (loc, implicit: 1, braced_init_obstack, |
10716 | insert_before: last_init_list_comma), |
10717 | implicit: true, braced_init_obstack); |
10718 | else |
10719 | break; |
10720 | } |
10721 | |
10722 | /* In the case of [LO ... HI] = VALUE, only evaluate VALUE once. */ |
10723 | if (constructor_range_stack) |
10724 | { |
10725 | /* If value is a compound literal and we'll be just using its |
10726 | content, don't put it into a SAVE_EXPR. */ |
10727 | if (TREE_CODE (value.value) != COMPOUND_LITERAL_EXPR |
10728 | || !require_constant_value) |
10729 | { |
10730 | tree semantic_type = NULL_TREE; |
10731 | if (TREE_CODE (value.value) == EXCESS_PRECISION_EXPR) |
10732 | { |
10733 | semantic_type = TREE_TYPE (value.value); |
10734 | value.value = TREE_OPERAND (value.value, 0); |
10735 | } |
10736 | value.value = save_expr (value.value); |
10737 | if (semantic_type) |
10738 | value.value = build1 (EXCESS_PRECISION_EXPR, semantic_type, |
10739 | value.value); |
10740 | } |
10741 | } |
10742 | |
10743 | while (1) |
10744 | { |
10745 | if (TREE_CODE (constructor_type) == RECORD_TYPE) |
10746 | { |
10747 | tree fieldtype; |
10748 | enum tree_code fieldcode; |
10749 | |
10750 | if (constructor_fields == NULL_TREE) |
10751 | { |
10752 | pedwarn_init (loc, opt: 0, gmsgid: "excess elements in struct initializer" ); |
10753 | break; |
10754 | } |
10755 | |
10756 | fieldtype = TREE_TYPE (constructor_fields); |
10757 | if (fieldtype != error_mark_node) |
10758 | fieldtype = TYPE_MAIN_VARIANT (fieldtype); |
10759 | fieldcode = TREE_CODE (fieldtype); |
10760 | |
10761 | /* Error for non-static initialization of a flexible array member. */ |
10762 | if (fieldcode == ARRAY_TYPE |
10763 | && !require_constant_value |
10764 | && TYPE_SIZE (fieldtype) == NULL_TREE |
10765 | && DECL_CHAIN (constructor_fields) == NULL_TREE) |
10766 | { |
10767 | error_init (loc, gmsgid: "non-static initialization of a flexible " |
10768 | "array member" ); |
10769 | break; |
10770 | } |
10771 | |
10772 | /* Error for initialization of a flexible array member with |
10773 | a string constant if the structure is in an array. E.g.: |
10774 | struct S { int x; char y[]; }; |
10775 | struct S s[] = { { 1, "foo" } }; |
10776 | is invalid. */ |
10777 | if (string_flag |
10778 | && fieldcode == ARRAY_TYPE |
10779 | && constructor_depth > 1 |
10780 | && TYPE_SIZE (fieldtype) == NULL_TREE |
10781 | && DECL_CHAIN (constructor_fields) == NULL_TREE) |
10782 | { |
10783 | bool in_array_p = false; |
10784 | for (struct constructor_stack *p = constructor_stack; |
10785 | p && p->type; p = p->next) |
10786 | if (TREE_CODE (p->type) == ARRAY_TYPE) |
10787 | { |
10788 | in_array_p = true; |
10789 | break; |
10790 | } |
10791 | if (in_array_p) |
10792 | { |
10793 | error_init (loc, gmsgid: "initialization of flexible array " |
10794 | "member in a nested context" ); |
10795 | break; |
10796 | } |
10797 | } |
10798 | |
10799 | /* Accept a string constant to initialize a subarray. */ |
10800 | if (value.value != NULL_TREE |
10801 | && fieldcode == ARRAY_TYPE |
10802 | && INTEGRAL_TYPE_P (TREE_TYPE (fieldtype)) |
10803 | && string_flag) |
10804 | value.value = orig_value; |
10805 | /* Otherwise, if we have come to a subaggregate, |
10806 | and we don't have an element of its type, push into it. */ |
10807 | else if (value.value != NULL_TREE |
10808 | && initialize_elementwise_p (type: fieldtype, value: value.value)) |
10809 | { |
10810 | push_init_level (loc, implicit: 1, braced_init_obstack); |
10811 | continue; |
10812 | } |
10813 | |
10814 | if (value.value) |
10815 | { |
10816 | push_member_name (decl: constructor_fields); |
10817 | output_init_element (loc, value: value.value, origtype: value.original_type, |
10818 | strict_string, type: fieldtype, |
10819 | field: constructor_fields, pending: true, implicit, |
10820 | braced_init_obstack); |
10821 | RESTORE_SPELLING_DEPTH (constructor_depth); |
10822 | } |
10823 | else |
10824 | /* Do the bookkeeping for an element that was |
10825 | directly output as a constructor. */ |
10826 | { |
10827 | /* For a record, keep track of end position of last field. */ |
10828 | if (DECL_SIZE (constructor_fields)) |
10829 | constructor_bit_index |
10830 | = size_binop_loc (input_location, PLUS_EXPR, |
10831 | bit_position (constructor_fields), |
10832 | DECL_SIZE (constructor_fields)); |
10833 | |
10834 | /* If the current field was the first one not yet written out, |
10835 | it isn't now, so update. */ |
10836 | if (constructor_unfilled_fields == constructor_fields) |
10837 | { |
10838 | constructor_unfilled_fields = DECL_CHAIN (constructor_fields); |
10839 | /* Skip any nameless bit fields. */ |
10840 | while (constructor_unfilled_fields != 0 |
10841 | && (DECL_UNNAMED_BIT_FIELD |
10842 | (constructor_unfilled_fields))) |
10843 | constructor_unfilled_fields = |
10844 | DECL_CHAIN (constructor_unfilled_fields); |
10845 | } |
10846 | } |
10847 | |
10848 | constructor_fields = DECL_CHAIN (constructor_fields); |
10849 | /* Skip any nameless bit fields at the beginning. */ |
10850 | while (constructor_fields != NULL_TREE |
10851 | && DECL_UNNAMED_BIT_FIELD (constructor_fields)) |
10852 | constructor_fields = DECL_CHAIN (constructor_fields); |
10853 | } |
10854 | else if (TREE_CODE (constructor_type) == UNION_TYPE) |
10855 | { |
10856 | tree fieldtype; |
10857 | enum tree_code fieldcode; |
10858 | |
10859 | if (constructor_fields == NULL_TREE) |
10860 | { |
10861 | pedwarn_init (loc, opt: 0, |
10862 | gmsgid: "excess elements in union initializer" ); |
10863 | break; |
10864 | } |
10865 | |
10866 | fieldtype = TREE_TYPE (constructor_fields); |
10867 | if (fieldtype != error_mark_node) |
10868 | fieldtype = TYPE_MAIN_VARIANT (fieldtype); |
10869 | fieldcode = TREE_CODE (fieldtype); |
10870 | |
10871 | /* Warn that traditional C rejects initialization of unions. |
10872 | We skip the warning if the value is zero. This is done |
10873 | under the assumption that the zero initializer in user |
10874 | code appears conditioned on e.g. __STDC__ to avoid |
10875 | "missing initializer" warnings and relies on default |
10876 | initialization to zero in the traditional C case. |
10877 | We also skip the warning if the initializer is designated, |
10878 | again on the assumption that this must be conditional on |
10879 | __STDC__ anyway (and we've already complained about the |
10880 | member-designator already). */ |
10881 | if (!in_system_header_at (loc: input_location) && !constructor_designated |
10882 | && !(value.value && (integer_zerop (value.value) |
10883 | || real_zerop (value.value)))) |
10884 | warning (OPT_Wtraditional, "traditional C rejects initialization " |
10885 | "of unions" ); |
10886 | |
10887 | /* Accept a string constant to initialize a subarray. */ |
10888 | if (value.value != NULL_TREE |
10889 | && fieldcode == ARRAY_TYPE |
10890 | && INTEGRAL_TYPE_P (TREE_TYPE (fieldtype)) |
10891 | && string_flag) |
10892 | value.value = orig_value; |
10893 | /* Otherwise, if we have come to a subaggregate, |
10894 | and we don't have an element of its type, push into it. */ |
10895 | else if (value.value != NULL_TREE |
10896 | && initialize_elementwise_p (type: fieldtype, value: value.value)) |
10897 | { |
10898 | push_init_level (loc, implicit: 1, braced_init_obstack); |
10899 | continue; |
10900 | } |
10901 | |
10902 | if (value.value) |
10903 | { |
10904 | push_member_name (decl: constructor_fields); |
10905 | output_init_element (loc, value: value.value, origtype: value.original_type, |
10906 | strict_string, type: fieldtype, |
10907 | field: constructor_fields, pending: true, implicit, |
10908 | braced_init_obstack); |
10909 | RESTORE_SPELLING_DEPTH (constructor_depth); |
10910 | } |
10911 | else |
10912 | /* Do the bookkeeping for an element that was |
10913 | directly output as a constructor. */ |
10914 | { |
10915 | constructor_bit_index = DECL_SIZE (constructor_fields); |
10916 | constructor_unfilled_fields = DECL_CHAIN (constructor_fields); |
10917 | } |
10918 | |
10919 | constructor_fields = NULL_TREE; |
10920 | } |
10921 | else if (TREE_CODE (constructor_type) == ARRAY_TYPE) |
10922 | { |
10923 | tree elttype = TYPE_MAIN_VARIANT (TREE_TYPE (constructor_type)); |
10924 | enum tree_code eltcode = TREE_CODE (elttype); |
10925 | |
10926 | /* Accept a string constant to initialize a subarray. */ |
10927 | if (value.value != NULL_TREE |
10928 | && eltcode == ARRAY_TYPE |
10929 | && INTEGRAL_TYPE_P (TREE_TYPE (elttype)) |
10930 | && string_flag) |
10931 | value.value = orig_value; |
10932 | /* Otherwise, if we have come to a subaggregate, |
10933 | and we don't have an element of its type, push into it. */ |
10934 | else if (value.value != NULL_TREE |
10935 | && initialize_elementwise_p (type: elttype, value: value.value)) |
10936 | { |
10937 | push_init_level (loc, implicit: 1, braced_init_obstack); |
10938 | continue; |
10939 | } |
10940 | |
10941 | if (constructor_max_index != NULL_TREE |
10942 | && (tree_int_cst_lt (t1: constructor_max_index, t2: constructor_index) |
10943 | || integer_all_onesp (constructor_max_index))) |
10944 | { |
10945 | pedwarn_init (loc, opt: 0, |
10946 | gmsgid: "excess elements in array initializer" ); |
10947 | break; |
10948 | } |
10949 | |
10950 | /* Now output the actual element. */ |
10951 | if (value.value) |
10952 | { |
10953 | push_array_bounds (bounds: tree_to_uhwi (constructor_index)); |
10954 | output_init_element (loc, value: value.value, origtype: value.original_type, |
10955 | strict_string, type: elttype, |
10956 | field: constructor_index, pending: true, implicit, |
10957 | braced_init_obstack); |
10958 | RESTORE_SPELLING_DEPTH (constructor_depth); |
10959 | } |
10960 | |
10961 | constructor_index |
10962 | = size_binop_loc (input_location, PLUS_EXPR, |
10963 | constructor_index, bitsize_one_node); |
10964 | |
10965 | if (!value.value) |
10966 | /* If we are doing the bookkeeping for an element that was |
10967 | directly output as a constructor, we must update |
10968 | constructor_unfilled_index. */ |
10969 | constructor_unfilled_index = constructor_index; |
10970 | } |
10971 | else if (gnu_vector_type_p (type: constructor_type)) |
10972 | { |
10973 | tree elttype = TYPE_MAIN_VARIANT (TREE_TYPE (constructor_type)); |
10974 | |
10975 | /* Do a basic check of initializer size. Note that vectors |
10976 | always have a fixed size derived from their type. */ |
10977 | if (tree_int_cst_lt (t1: constructor_max_index, t2: constructor_index)) |
10978 | { |
10979 | pedwarn_init (loc, opt: 0, |
10980 | gmsgid: "excess elements in vector initializer" ); |
10981 | break; |
10982 | } |
10983 | |
10984 | /* Now output the actual element. */ |
10985 | if (value.value) |
10986 | { |
10987 | if (TREE_CODE (value.value) == VECTOR_CST) |
10988 | elttype = TYPE_MAIN_VARIANT (constructor_type); |
10989 | output_init_element (loc, value: value.value, origtype: value.original_type, |
10990 | strict_string, type: elttype, |
10991 | field: constructor_index, pending: true, implicit, |
10992 | braced_init_obstack); |
10993 | } |
10994 | |
10995 | constructor_index |
10996 | = size_binop_loc (input_location, |
10997 | PLUS_EXPR, constructor_index, bitsize_one_node); |
10998 | |
10999 | if (!value.value) |
11000 | /* If we are doing the bookkeeping for an element that was |
11001 | directly output as a constructor, we must update |
11002 | constructor_unfilled_index. */ |
11003 | constructor_unfilled_index = constructor_index; |
11004 | } |
11005 | |
11006 | /* Handle the sole element allowed in a braced initializer |
11007 | for a scalar variable. */ |
11008 | else if (constructor_type != error_mark_node |
11009 | && constructor_fields == NULL_TREE) |
11010 | { |
11011 | pedwarn_init (loc, opt: 0, |
11012 | gmsgid: "excess elements in scalar initializer" ); |
11013 | break; |
11014 | } |
11015 | else |
11016 | { |
11017 | if (value.value) |
11018 | output_init_element (loc, value: value.value, origtype: value.original_type, |
11019 | strict_string, type: constructor_type, |
11020 | NULL_TREE, pending: true, implicit, |
11021 | braced_init_obstack); |
11022 | constructor_fields = NULL_TREE; |
11023 | } |
11024 | |
11025 | /* Handle range initializers either at this level or anywhere higher |
11026 | in the designator stack. */ |
11027 | if (constructor_range_stack) |
11028 | { |
11029 | struct constructor_range_stack *p, *range_stack; |
11030 | int finish = 0; |
11031 | |
11032 | range_stack = constructor_range_stack; |
11033 | constructor_range_stack = 0; |
11034 | while (constructor_stack != range_stack->stack) |
11035 | { |
11036 | gcc_assert (constructor_stack->implicit); |
11037 | process_init_element (loc, |
11038 | value: pop_init_level (loc, implicit: 1, |
11039 | braced_init_obstack, |
11040 | insert_before: last_init_list_comma), |
11041 | implicit: true, braced_init_obstack); |
11042 | } |
11043 | for (p = range_stack; |
11044 | !p->range_end || tree_int_cst_equal (p->index, p->range_end); |
11045 | p = p->prev) |
11046 | { |
11047 | gcc_assert (constructor_stack->implicit); |
11048 | process_init_element (loc, |
11049 | value: pop_init_level (loc, implicit: 1, |
11050 | braced_init_obstack, |
11051 | insert_before: last_init_list_comma), |
11052 | implicit: true, braced_init_obstack); |
11053 | } |
11054 | |
11055 | p->index = size_binop_loc (input_location, |
11056 | PLUS_EXPR, p->index, bitsize_one_node); |
11057 | if (tree_int_cst_equal (p->index, p->range_end) && !p->prev) |
11058 | finish = 1; |
11059 | |
11060 | while (1) |
11061 | { |
11062 | constructor_index = p->index; |
11063 | constructor_fields = p->fields; |
11064 | if (finish && p->range_end && p->index == p->range_start) |
11065 | { |
11066 | finish = 0; |
11067 | p->prev = 0; |
11068 | } |
11069 | p = p->next; |
11070 | if (!p) |
11071 | break; |
11072 | finish_implicit_inits (loc, braced_init_obstack); |
11073 | push_init_level (loc, implicit: 2, braced_init_obstack); |
11074 | p->stack = constructor_stack; |
11075 | if (p->range_end && tree_int_cst_equal (p->index, p->range_end)) |
11076 | p->index = p->range_start; |
11077 | } |
11078 | |
11079 | if (!finish) |
11080 | constructor_range_stack = range_stack; |
11081 | continue; |
11082 | } |
11083 | |
11084 | break; |
11085 | } |
11086 | |
11087 | constructor_range_stack = 0; |
11088 | } |
11089 | |
11090 | /* Build a complete asm-statement, whose components are a CV_QUALIFIER |
11091 | (guaranteed to be 'volatile' or null) and ARGS (represented using |
11092 | an ASM_EXPR node). */ |
11093 | tree |
11094 | build_asm_stmt (bool is_volatile, tree args) |
11095 | { |
11096 | if (is_volatile) |
11097 | ASM_VOLATILE_P (args) = 1; |
11098 | return add_stmt (args); |
11099 | } |
11100 | |
11101 | /* Build an asm-expr, whose components are a STRING, some OUTPUTS, |
11102 | some INPUTS, and some CLOBBERS. The latter three may be NULL. |
11103 | SIMPLE indicates whether there was anything at all after the |
11104 | string in the asm expression -- asm("blah") and asm("blah" : ) |
11105 | are subtly different. We use a ASM_EXPR node to represent this. |
11106 | LOC is the location of the asm, and IS_INLINE says whether this |
11107 | is asm inline. */ |
11108 | tree |
11109 | build_asm_expr (location_t loc, tree string, tree outputs, tree inputs, |
11110 | tree clobbers, tree labels, bool simple, bool is_inline) |
11111 | { |
11112 | tree tail; |
11113 | tree args; |
11114 | int i; |
11115 | const char *constraint; |
11116 | const char **oconstraints; |
11117 | bool allows_mem, allows_reg, is_inout; |
11118 | int ninputs, noutputs; |
11119 | |
11120 | ninputs = list_length (inputs); |
11121 | noutputs = list_length (outputs); |
11122 | oconstraints = (const char **) alloca (noutputs * sizeof (const char *)); |
11123 | |
11124 | string = resolve_asm_operand_names (string, outputs, inputs, labels); |
11125 | |
11126 | /* Remove output conversions that change the type but not the mode. */ |
11127 | for (i = 0, tail = outputs; tail; ++i, tail = TREE_CHAIN (tail)) |
11128 | { |
11129 | tree output = TREE_VALUE (tail); |
11130 | |
11131 | output = c_fully_fold (output, false, NULL, true); |
11132 | |
11133 | /* ??? Really, this should not be here. Users should be using a |
11134 | proper lvalue, dammit. But there's a long history of using casts |
11135 | in the output operands. In cases like longlong.h, this becomes a |
11136 | primitive form of typechecking -- if the cast can be removed, then |
11137 | the output operand had a type of the proper width; otherwise we'll |
11138 | get an error. Gross, but ... */ |
11139 | STRIP_NOPS (output); |
11140 | |
11141 | if (!lvalue_or_else (loc, ref: output, use: lv_asm)) |
11142 | output = error_mark_node; |
11143 | |
11144 | if (output != error_mark_node |
11145 | && (TREE_READONLY (output) |
11146 | || TYPE_READONLY (TREE_TYPE (output)) |
11147 | || (RECORD_OR_UNION_TYPE_P (TREE_TYPE (output)) |
11148 | && C_TYPE_FIELDS_READONLY (TREE_TYPE (output))))) |
11149 | readonly_error (loc, output, lv_asm); |
11150 | |
11151 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tail))); |
11152 | oconstraints[i] = constraint; |
11153 | |
11154 | if (parse_output_constraint (&constraint, i, ninputs, noutputs, |
11155 | &allows_mem, &allows_reg, &is_inout)) |
11156 | { |
11157 | /* If the operand is going to end up in memory, |
11158 | mark it addressable. */ |
11159 | if (!allows_reg && !c_mark_addressable (exp: output)) |
11160 | output = error_mark_node; |
11161 | if (!(!allows_reg && allows_mem) |
11162 | && output != error_mark_node |
11163 | && VOID_TYPE_P (TREE_TYPE (output))) |
11164 | { |
11165 | error_at (loc, "invalid use of void expression" ); |
11166 | output = error_mark_node; |
11167 | } |
11168 | } |
11169 | else |
11170 | output = error_mark_node; |
11171 | |
11172 | TREE_VALUE (tail) = output; |
11173 | } |
11174 | |
11175 | for (i = 0, tail = inputs; tail; ++i, tail = TREE_CHAIN (tail)) |
11176 | { |
11177 | tree input; |
11178 | |
11179 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tail))); |
11180 | input = TREE_VALUE (tail); |
11181 | |
11182 | if (parse_input_constraint (&constraint, i, ninputs, noutputs, 0, |
11183 | oconstraints, &allows_mem, &allows_reg)) |
11184 | { |
11185 | /* If the operand is going to end up in memory, |
11186 | mark it addressable. */ |
11187 | if (!allows_reg && allows_mem) |
11188 | { |
11189 | input = c_fully_fold (input, false, NULL, true); |
11190 | |
11191 | /* Strip the nops as we allow this case. FIXME, this really |
11192 | should be rejected or made deprecated. */ |
11193 | STRIP_NOPS (input); |
11194 | if (!c_mark_addressable (exp: input)) |
11195 | input = error_mark_node; |
11196 | } |
11197 | else |
11198 | { |
11199 | struct c_expr expr; |
11200 | memset (s: &expr, c: 0, n: sizeof (expr)); |
11201 | expr.value = input; |
11202 | expr = convert_lvalue_to_rvalue (loc, exp: expr, convert_p: true, read_p: false); |
11203 | input = c_fully_fold (expr.value, false, NULL); |
11204 | |
11205 | if (input != error_mark_node && VOID_TYPE_P (TREE_TYPE (input))) |
11206 | { |
11207 | error_at (loc, "invalid use of void expression" ); |
11208 | input = error_mark_node; |
11209 | } |
11210 | } |
11211 | } |
11212 | else |
11213 | input = error_mark_node; |
11214 | |
11215 | TREE_VALUE (tail) = input; |
11216 | } |
11217 | |
11218 | args = build_stmt (loc, ASM_EXPR, string, outputs, inputs, clobbers, labels); |
11219 | |
11220 | /* asm statements without outputs, including simple ones, are treated |
11221 | as volatile. */ |
11222 | ASM_INPUT_P (args) = simple; |
11223 | ASM_VOLATILE_P (args) = (noutputs == 0); |
11224 | ASM_INLINE_P (args) = is_inline; |
11225 | |
11226 | return args; |
11227 | } |
11228 | |
11229 | /* Generate a goto statement to LABEL. LOC is the location of the |
11230 | GOTO. */ |
11231 | |
11232 | tree |
11233 | c_finish_goto_label (location_t loc, tree label) |
11234 | { |
11235 | tree decl = lookup_label_for_goto (loc, label); |
11236 | if (!decl) |
11237 | return NULL_TREE; |
11238 | TREE_USED (decl) = 1; |
11239 | { |
11240 | add_stmt (build_predict_expr (PRED_GOTO, NOT_TAKEN)); |
11241 | tree t = build1 (GOTO_EXPR, void_type_node, decl); |
11242 | SET_EXPR_LOCATION (t, loc); |
11243 | return add_stmt (t); |
11244 | } |
11245 | } |
11246 | |
11247 | /* Generate a computed goto statement to EXPR. LOC is the location of |
11248 | the GOTO. */ |
11249 | |
11250 | tree |
11251 | c_finish_goto_ptr (location_t loc, c_expr val) |
11252 | { |
11253 | tree expr = val.value; |
11254 | tree t; |
11255 | pedwarn (loc, OPT_Wpedantic, "ISO C forbids %<goto *expr;%>" ); |
11256 | if (expr != error_mark_node |
11257 | && !POINTER_TYPE_P (TREE_TYPE (expr)) |
11258 | && !null_pointer_constant_p (expr)) |
11259 | { |
11260 | error_at (val.get_location (), |
11261 | "computed goto must be pointer type" ); |
11262 | expr = build_zero_cst (ptr_type_node); |
11263 | } |
11264 | expr = c_fully_fold (expr, false, NULL); |
11265 | expr = convert (ptr_type_node, expr); |
11266 | t = build1 (GOTO_EXPR, void_type_node, expr); |
11267 | SET_EXPR_LOCATION (t, loc); |
11268 | return add_stmt (t); |
11269 | } |
11270 | |
11271 | /* Generate a C `return' statement. RETVAL is the expression for what |
11272 | to return, or a null pointer for `return;' with no value. LOC is |
11273 | the location of the return statement, or the location of the expression, |
11274 | if the statement has any. If ORIGTYPE is not NULL_TREE, it |
11275 | is the original type of RETVAL. */ |
11276 | |
11277 | tree |
11278 | c_finish_return (location_t loc, tree retval, tree origtype) |
11279 | { |
11280 | tree valtype = TREE_TYPE (TREE_TYPE (current_function_decl)), ret_stmt; |
11281 | bool no_warning = false; |
11282 | bool npc = false; |
11283 | |
11284 | /* Use the expansion point to handle cases such as returning NULL |
11285 | in a function returning void. */ |
11286 | location_t xloc = expansion_point_location_if_in_system_header (loc); |
11287 | |
11288 | if (TREE_THIS_VOLATILE (current_function_decl)) |
11289 | warning_at (xloc, 0, |
11290 | "function declared %<noreturn%> has a %<return%> statement" ); |
11291 | |
11292 | if (retval) |
11293 | { |
11294 | tree semantic_type = NULL_TREE; |
11295 | npc = null_pointer_constant_p (expr: retval); |
11296 | if (TREE_CODE (retval) == EXCESS_PRECISION_EXPR) |
11297 | { |
11298 | semantic_type = TREE_TYPE (retval); |
11299 | retval = TREE_OPERAND (retval, 0); |
11300 | } |
11301 | retval = c_fully_fold (retval, false, NULL); |
11302 | if (semantic_type |
11303 | && valtype != NULL_TREE |
11304 | && TREE_CODE (valtype) != VOID_TYPE) |
11305 | retval = build1 (EXCESS_PRECISION_EXPR, semantic_type, retval); |
11306 | } |
11307 | |
11308 | if (!retval) |
11309 | { |
11310 | current_function_returns_null = 1; |
11311 | if ((warn_return_type >= 0 || flag_isoc99) |
11312 | && valtype != NULL_TREE && TREE_CODE (valtype) != VOID_TYPE) |
11313 | { |
11314 | no_warning = true; |
11315 | if (emit_diagnostic (flag_isoc99 ? DK_PEDWARN : DK_WARNING, |
11316 | loc, OPT_Wreturn_mismatch, |
11317 | "%<return%> with no value," |
11318 | " in function returning non-void" )) |
11319 | inform (DECL_SOURCE_LOCATION (current_function_decl), |
11320 | "declared here" ); |
11321 | } |
11322 | } |
11323 | else if (valtype == NULL_TREE || VOID_TYPE_P (valtype)) |
11324 | { |
11325 | current_function_returns_null = 1; |
11326 | bool warned_here; |
11327 | if (TREE_CODE (TREE_TYPE (retval)) != VOID_TYPE) |
11328 | warned_here = pedwarn |
11329 | (xloc, OPT_Wreturn_mismatch, |
11330 | "%<return%> with a value, in function returning void" ); |
11331 | else |
11332 | warned_here = pedwarn |
11333 | (xloc, OPT_Wpedantic, "ISO C forbids " |
11334 | "%<return%> with expression, in function returning void" ); |
11335 | if (warned_here) |
11336 | inform (DECL_SOURCE_LOCATION (current_function_decl), |
11337 | "declared here" ); |
11338 | } |
11339 | else |
11340 | { |
11341 | tree t = convert_for_assignment (location: loc, UNKNOWN_LOCATION, type: valtype, |
11342 | rhs: retval, origtype, errtype: ic_return, |
11343 | null_pointer_constant: npc, NULL_TREE, NULL_TREE, parmnum: 0); |
11344 | tree res = DECL_RESULT (current_function_decl); |
11345 | tree inner; |
11346 | bool save; |
11347 | |
11348 | current_function_returns_value = 1; |
11349 | if (t == error_mark_node) |
11350 | return NULL_TREE; |
11351 | |
11352 | save = in_late_binary_op; |
11353 | if (C_BOOLEAN_TYPE_P (TREE_TYPE (res)) |
11354 | || TREE_CODE (TREE_TYPE (res)) == COMPLEX_TYPE |
11355 | || (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)) |
11356 | && (TREE_CODE (TREE_TYPE (res)) == INTEGER_TYPE |
11357 | || TREE_CODE (TREE_TYPE (res)) == ENUMERAL_TYPE) |
11358 | && sanitize_flags_p (flag: SANITIZE_FLOAT_CAST))) |
11359 | in_late_binary_op = true; |
11360 | inner = t = convert (TREE_TYPE (res), t); |
11361 | in_late_binary_op = save; |
11362 | |
11363 | /* Strip any conversions, additions, and subtractions, and see if |
11364 | we are returning the address of a local variable. Warn if so. */ |
11365 | while (1) |
11366 | { |
11367 | switch (TREE_CODE (inner)) |
11368 | { |
11369 | CASE_CONVERT: |
11370 | case NON_LVALUE_EXPR: |
11371 | case PLUS_EXPR: |
11372 | case POINTER_PLUS_EXPR: |
11373 | inner = TREE_OPERAND (inner, 0); |
11374 | continue; |
11375 | |
11376 | case MINUS_EXPR: |
11377 | /* If the second operand of the MINUS_EXPR has a pointer |
11378 | type (or is converted from it), this may be valid, so |
11379 | don't give a warning. */ |
11380 | { |
11381 | tree op1 = TREE_OPERAND (inner, 1); |
11382 | |
11383 | while (!POINTER_TYPE_P (TREE_TYPE (op1)) |
11384 | && (CONVERT_EXPR_P (op1) |
11385 | || TREE_CODE (op1) == NON_LVALUE_EXPR)) |
11386 | op1 = TREE_OPERAND (op1, 0); |
11387 | |
11388 | if (POINTER_TYPE_P (TREE_TYPE (op1))) |
11389 | break; |
11390 | |
11391 | inner = TREE_OPERAND (inner, 0); |
11392 | continue; |
11393 | } |
11394 | |
11395 | case ADDR_EXPR: |
11396 | inner = TREE_OPERAND (inner, 0); |
11397 | |
11398 | while (REFERENCE_CLASS_P (inner) |
11399 | && !INDIRECT_REF_P (inner)) |
11400 | inner = TREE_OPERAND (inner, 0); |
11401 | |
11402 | if (DECL_P (inner) |
11403 | && !DECL_EXTERNAL (inner) |
11404 | && !TREE_STATIC (inner) |
11405 | && DECL_CONTEXT (inner) == current_function_decl |
11406 | && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) |
11407 | { |
11408 | if (TREE_CODE (inner) == LABEL_DECL) |
11409 | warning_at (loc, OPT_Wreturn_local_addr, |
11410 | "function returns address of label" ); |
11411 | else |
11412 | { |
11413 | warning_at (loc, OPT_Wreturn_local_addr, |
11414 | "function returns address of local variable" ); |
11415 | tree zero = build_zero_cst (TREE_TYPE (res)); |
11416 | t = build2 (COMPOUND_EXPR, TREE_TYPE (res), t, zero); |
11417 | } |
11418 | } |
11419 | break; |
11420 | |
11421 | default: |
11422 | break; |
11423 | } |
11424 | |
11425 | break; |
11426 | } |
11427 | |
11428 | retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, t); |
11429 | SET_EXPR_LOCATION (retval, loc); |
11430 | |
11431 | if (warn_sequence_point) |
11432 | verify_sequence_points (retval); |
11433 | } |
11434 | |
11435 | ret_stmt = build_stmt (loc, RETURN_EXPR, retval); |
11436 | if (no_warning) |
11437 | suppress_warning (ret_stmt, OPT_Wreturn_type); |
11438 | return add_stmt (ret_stmt); |
11439 | } |
11440 | |
11441 | struct c_switch { |
11442 | /* The SWITCH_STMT being built. */ |
11443 | tree switch_stmt; |
11444 | |
11445 | /* The original type of the testing expression, i.e. before the |
11446 | default conversion is applied. */ |
11447 | tree orig_type; |
11448 | |
11449 | /* A splay-tree mapping the low element of a case range to the high |
11450 | element, or NULL_TREE if there is no high element. Used to |
11451 | determine whether or not a new case label duplicates an old case |
11452 | label. We need a tree, rather than simply a hash table, because |
11453 | of the GNU case range extension. */ |
11454 | splay_tree cases; |
11455 | |
11456 | /* The bindings at the point of the switch. This is used for |
11457 | warnings crossing decls when branching to a case label. */ |
11458 | struct c_spot_bindings *bindings; |
11459 | |
11460 | /* Whether the switch includes any break statements. */ |
11461 | bool break_stmt_seen_p; |
11462 | |
11463 | /* The next node on the stack. */ |
11464 | struct c_switch *next; |
11465 | |
11466 | /* Remember whether the controlling expression had boolean type |
11467 | before integer promotions for the sake of -Wswitch-bool. */ |
11468 | bool bool_cond_p; |
11469 | }; |
11470 | |
11471 | /* A stack of the currently active switch statements. The innermost |
11472 | switch statement is on the top of the stack. There is no need to |
11473 | mark the stack for garbage collection because it is only active |
11474 | during the processing of the body of a function, and we never |
11475 | collect at that point. */ |
11476 | |
11477 | struct c_switch *c_switch_stack; |
11478 | |
11479 | /* Start a C switch statement, testing expression EXP. Return the new |
11480 | SWITCH_STMT. SWITCH_LOC is the location of the `switch'. |
11481 | SWITCH_COND_LOC is the location of the switch's condition. |
11482 | EXPLICIT_CAST_P is true if the expression EXP has an explicit cast. */ |
11483 | |
11484 | tree |
11485 | c_start_switch (location_t switch_loc, |
11486 | location_t switch_cond_loc, |
11487 | tree exp, bool explicit_cast_p) |
11488 | { |
11489 | tree orig_type = error_mark_node; |
11490 | bool bool_cond_p = false; |
11491 | struct c_switch *cs; |
11492 | |
11493 | if (exp != error_mark_node) |
11494 | { |
11495 | orig_type = TREE_TYPE (exp); |
11496 | |
11497 | if (!INTEGRAL_TYPE_P (orig_type)) |
11498 | { |
11499 | if (orig_type != error_mark_node) |
11500 | { |
11501 | error_at (switch_cond_loc, "switch quantity not an integer" ); |
11502 | orig_type = error_mark_node; |
11503 | } |
11504 | exp = integer_zero_node; |
11505 | } |
11506 | else |
11507 | { |
11508 | tree type = TYPE_MAIN_VARIANT (orig_type); |
11509 | tree e = exp; |
11510 | |
11511 | /* Warn if the condition has boolean value. */ |
11512 | while (TREE_CODE (e) == COMPOUND_EXPR) |
11513 | e = TREE_OPERAND (e, 1); |
11514 | |
11515 | if ((C_BOOLEAN_TYPE_P (type) |
11516 | || truth_value_p (TREE_CODE (e))) |
11517 | /* Explicit cast to int suppresses this warning. */ |
11518 | && !(TREE_CODE (type) == INTEGER_TYPE |
11519 | && explicit_cast_p)) |
11520 | bool_cond_p = true; |
11521 | |
11522 | if (!in_system_header_at (loc: input_location) |
11523 | && (type == long_integer_type_node |
11524 | || type == long_unsigned_type_node)) |
11525 | warning_at (switch_cond_loc, |
11526 | OPT_Wtraditional, "%<long%> switch expression not " |
11527 | "converted to %<int%> in ISO C" ); |
11528 | |
11529 | exp = c_fully_fold (exp, false, NULL); |
11530 | exp = default_conversion (exp); |
11531 | |
11532 | if (warn_sequence_point) |
11533 | verify_sequence_points (exp); |
11534 | } |
11535 | } |
11536 | |
11537 | /* Add this new SWITCH_STMT to the stack. */ |
11538 | cs = XNEW (struct c_switch); |
11539 | cs->switch_stmt = build_stmt (switch_loc, SWITCH_STMT, exp, |
11540 | NULL_TREE, orig_type, NULL_TREE); |
11541 | cs->orig_type = orig_type; |
11542 | cs->cases = splay_tree_new (case_compare, NULL, NULL); |
11543 | cs->bindings = c_get_switch_bindings (); |
11544 | cs->break_stmt_seen_p = false; |
11545 | cs->bool_cond_p = bool_cond_p; |
11546 | cs->next = c_switch_stack; |
11547 | c_switch_stack = cs; |
11548 | |
11549 | return add_stmt (cs->switch_stmt); |
11550 | } |
11551 | |
11552 | /* Process a case label at location LOC, with attributes ATTRS. */ |
11553 | |
11554 | tree |
11555 | do_case (location_t loc, tree low_value, tree high_value, tree attrs) |
11556 | { |
11557 | tree label = NULL_TREE; |
11558 | |
11559 | if (low_value && TREE_CODE (low_value) != INTEGER_CST) |
11560 | { |
11561 | low_value = c_fully_fold (low_value, false, NULL); |
11562 | if (TREE_CODE (low_value) == INTEGER_CST) |
11563 | pedwarn (loc, OPT_Wpedantic, |
11564 | "case label is not an integer constant expression" ); |
11565 | } |
11566 | |
11567 | if (high_value && TREE_CODE (high_value) != INTEGER_CST) |
11568 | { |
11569 | high_value = c_fully_fold (high_value, false, NULL); |
11570 | if (TREE_CODE (high_value) == INTEGER_CST) |
11571 | pedwarn (input_location, OPT_Wpedantic, |
11572 | "case label is not an integer constant expression" ); |
11573 | } |
11574 | |
11575 | if (c_switch_stack == NULL) |
11576 | { |
11577 | if (low_value) |
11578 | error_at (loc, "case label not within a switch statement" ); |
11579 | else |
11580 | error_at (loc, "%<default%> label not within a switch statement" ); |
11581 | return NULL_TREE; |
11582 | } |
11583 | |
11584 | if (c_check_switch_jump_warnings (c_switch_stack->bindings, |
11585 | EXPR_LOCATION (c_switch_stack->switch_stmt), |
11586 | loc)) |
11587 | return NULL_TREE; |
11588 | |
11589 | label = c_add_case_label (loc, c_switch_stack->cases, |
11590 | SWITCH_STMT_COND (c_switch_stack->switch_stmt), |
11591 | low_value, high_value, attrs); |
11592 | if (label == error_mark_node) |
11593 | label = NULL_TREE; |
11594 | return label; |
11595 | } |
11596 | |
11597 | /* Finish the switch statement. TYPE is the original type of the |
11598 | controlling expression of the switch, or NULL_TREE. */ |
11599 | |
11600 | void |
11601 | c_finish_switch (tree body, tree type) |
11602 | { |
11603 | struct c_switch *cs = c_switch_stack; |
11604 | location_t switch_location; |
11605 | |
11606 | SWITCH_STMT_BODY (cs->switch_stmt) = body; |
11607 | |
11608 | /* Emit warnings as needed. */ |
11609 | switch_location = EXPR_LOCATION (cs->switch_stmt); |
11610 | c_do_switch_warnings (cs->cases, switch_location, |
11611 | type ? type : SWITCH_STMT_TYPE (cs->switch_stmt), |
11612 | SWITCH_STMT_COND (cs->switch_stmt), cs->bool_cond_p); |
11613 | if (c_switch_covers_all_cases_p (cs->cases, |
11614 | SWITCH_STMT_TYPE (cs->switch_stmt))) |
11615 | SWITCH_STMT_ALL_CASES_P (cs->switch_stmt) = 1; |
11616 | SWITCH_STMT_NO_BREAK_P (cs->switch_stmt) = !cs->break_stmt_seen_p; |
11617 | |
11618 | /* Pop the stack. */ |
11619 | c_switch_stack = cs->next; |
11620 | splay_tree_delete (cs->cases); |
11621 | c_release_switch_bindings (cs->bindings); |
11622 | XDELETE (cs); |
11623 | } |
11624 | |
11625 | /* Emit an if statement. IF_LOCUS is the location of the 'if'. COND, |
11626 | THEN_BLOCK and ELSE_BLOCK are expressions to be used; ELSE_BLOCK |
11627 | may be null. */ |
11628 | |
11629 | void |
11630 | c_finish_if_stmt (location_t if_locus, tree cond, tree then_block, |
11631 | tree else_block) |
11632 | { |
11633 | tree stmt; |
11634 | |
11635 | stmt = build3 (COND_EXPR, void_type_node, cond, then_block, else_block); |
11636 | SET_EXPR_LOCATION (stmt, if_locus); |
11637 | add_stmt (stmt); |
11638 | } |
11639 | |
11640 | tree |
11641 | c_finish_bc_stmt (location_t loc, tree label, bool is_break) |
11642 | { |
11643 | /* In switch statements break is sometimes stylistically used after |
11644 | a return statement. This can lead to spurious warnings about |
11645 | control reaching the end of a non-void function when it is |
11646 | inlined. Note that we are calling block_may_fallthru with |
11647 | language specific tree nodes; this works because |
11648 | block_may_fallthru returns true when given something it does not |
11649 | understand. */ |
11650 | bool skip = !block_may_fallthru (cur_stmt_list); |
11651 | |
11652 | if (is_break) |
11653 | switch (in_statement) |
11654 | { |
11655 | case 0: |
11656 | error_at (loc, "break statement not within loop or switch" ); |
11657 | return NULL_TREE; |
11658 | case IN_OMP_BLOCK: |
11659 | error_at (loc, "invalid exit from OpenMP structured block" ); |
11660 | return NULL_TREE; |
11661 | case IN_OMP_FOR: |
11662 | error_at (loc, "break statement used with OpenMP for loop" ); |
11663 | return NULL_TREE; |
11664 | case IN_ITERATION_STMT: |
11665 | case IN_OBJC_FOREACH: |
11666 | break; |
11667 | default: |
11668 | gcc_assert (in_statement & IN_SWITCH_STMT); |
11669 | c_switch_stack->break_stmt_seen_p = true; |
11670 | break; |
11671 | } |
11672 | else |
11673 | switch (in_statement & ~IN_SWITCH_STMT) |
11674 | { |
11675 | case 0: |
11676 | error_at (loc, "continue statement not within a loop" ); |
11677 | return NULL_TREE; |
11678 | case IN_OMP_BLOCK: |
11679 | error_at (loc, "invalid exit from OpenMP structured block" ); |
11680 | return NULL_TREE; |
11681 | case IN_ITERATION_STMT: |
11682 | case IN_OMP_FOR: |
11683 | case IN_OBJC_FOREACH: |
11684 | break; |
11685 | default: |
11686 | gcc_unreachable (); |
11687 | } |
11688 | |
11689 | if (skip) |
11690 | return NULL_TREE; |
11691 | else if ((in_statement & IN_OBJC_FOREACH) |
11692 | && !(is_break && (in_statement & IN_SWITCH_STMT))) |
11693 | { |
11694 | /* The foreach expander produces low-level code using gotos instead |
11695 | of a structured loop construct. */ |
11696 | gcc_assert (label); |
11697 | return add_stmt (build_stmt (loc, GOTO_EXPR, label)); |
11698 | } |
11699 | return add_stmt (build_stmt (loc, (is_break ? BREAK_STMT : CONTINUE_STMT))); |
11700 | } |
11701 | |
11702 | /* A helper routine for c_process_expr_stmt and c_finish_stmt_expr. */ |
11703 | |
11704 | static void |
11705 | emit_side_effect_warnings (location_t loc, tree expr) |
11706 | { |
11707 | maybe_warn_nodiscard (loc, expr); |
11708 | if (!warn_unused_value) |
11709 | return; |
11710 | if (expr == error_mark_node) |
11711 | ; |
11712 | else if (!TREE_SIDE_EFFECTS (expr)) |
11713 | { |
11714 | if (!VOID_TYPE_P (TREE_TYPE (expr)) |
11715 | && !warning_suppressed_p (expr, OPT_Wunused_value)) |
11716 | warning_at (loc, OPT_Wunused_value, "statement with no effect" ); |
11717 | } |
11718 | else if (TREE_CODE (expr) == COMPOUND_EXPR) |
11719 | { |
11720 | tree r = expr; |
11721 | location_t cloc = loc; |
11722 | while (TREE_CODE (r) == COMPOUND_EXPR) |
11723 | { |
11724 | if (EXPR_HAS_LOCATION (r)) |
11725 | cloc = EXPR_LOCATION (r); |
11726 | r = TREE_OPERAND (r, 1); |
11727 | } |
11728 | if (!TREE_SIDE_EFFECTS (r) |
11729 | && !VOID_TYPE_P (TREE_TYPE (r)) |
11730 | && !CONVERT_EXPR_P (r) |
11731 | && !warning_suppressed_p (r, OPT_Wunused_value) |
11732 | && !warning_suppressed_p (expr, OPT_Wunused_value)) |
11733 | warning_at (cloc, OPT_Wunused_value, |
11734 | "right-hand operand of comma expression has no effect" ); |
11735 | } |
11736 | else |
11737 | warn_if_unused_value (expr, loc); |
11738 | } |
11739 | |
11740 | /* Process an expression as if it were a complete statement. Emit |
11741 | diagnostics, but do not call ADD_STMT. LOC is the location of the |
11742 | statement. */ |
11743 | |
11744 | tree |
11745 | c_process_expr_stmt (location_t loc, tree expr) |
11746 | { |
11747 | tree exprv; |
11748 | |
11749 | if (!expr) |
11750 | return NULL_TREE; |
11751 | |
11752 | expr = c_fully_fold (expr, false, NULL); |
11753 | |
11754 | if (warn_sequence_point) |
11755 | verify_sequence_points (expr); |
11756 | |
11757 | if (TREE_TYPE (expr) != error_mark_node |
11758 | && !COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (expr)) |
11759 | && TREE_CODE (TREE_TYPE (expr)) != ARRAY_TYPE) |
11760 | error_at (loc, "expression statement has incomplete type" ); |
11761 | |
11762 | /* If we're not processing a statement expression, warn about unused values. |
11763 | Warnings for statement expressions will be emitted later, once we figure |
11764 | out which is the result. */ |
11765 | if (!STATEMENT_LIST_STMT_EXPR (cur_stmt_list) |
11766 | && (warn_unused_value || warn_unused_result)) |
11767 | emit_side_effect_warnings (EXPR_LOC_OR_LOC (expr, loc), expr); |
11768 | |
11769 | exprv = expr; |
11770 | while (TREE_CODE (exprv) == COMPOUND_EXPR) |
11771 | exprv = TREE_OPERAND (exprv, 1); |
11772 | while (CONVERT_EXPR_P (exprv)) |
11773 | exprv = TREE_OPERAND (exprv, 0); |
11774 | if (DECL_P (exprv) |
11775 | || handled_component_p (t: exprv) |
11776 | || TREE_CODE (exprv) == ADDR_EXPR) |
11777 | mark_exp_read (exp: exprv); |
11778 | |
11779 | /* If the expression is not of a type to which we cannot assign a line |
11780 | number, wrap the thing in a no-op NOP_EXPR. */ |
11781 | if (DECL_P (expr) || CONSTANT_CLASS_P (expr)) |
11782 | { |
11783 | expr = build1 (NOP_EXPR, TREE_TYPE (expr), expr); |
11784 | SET_EXPR_LOCATION (expr, loc); |
11785 | } |
11786 | |
11787 | return expr; |
11788 | } |
11789 | |
11790 | /* Emit an expression as a statement. LOC is the location of the |
11791 | expression. */ |
11792 | |
11793 | tree |
11794 | c_finish_expr_stmt (location_t loc, tree expr) |
11795 | { |
11796 | if (expr) |
11797 | return add_stmt (c_process_expr_stmt (loc, expr)); |
11798 | else |
11799 | return NULL; |
11800 | } |
11801 | |
11802 | /* Do the opposite and emit a statement as an expression. To begin, |
11803 | create a new binding level and return it. */ |
11804 | |
11805 | tree |
11806 | c_begin_stmt_expr (void) |
11807 | { |
11808 | tree ret; |
11809 | |
11810 | /* We must force a BLOCK for this level so that, if it is not expanded |
11811 | later, there is a way to turn off the entire subtree of blocks that |
11812 | are contained in it. */ |
11813 | keep_next_level (); |
11814 | ret = c_begin_compound_stmt (true); |
11815 | |
11816 | c_bindings_start_stmt_expr (c_switch_stack == NULL |
11817 | ? NULL |
11818 | : c_switch_stack->bindings); |
11819 | |
11820 | /* Mark the current statement list as belonging to a statement list. */ |
11821 | STATEMENT_LIST_STMT_EXPR (ret) = 1; |
11822 | |
11823 | return ret; |
11824 | } |
11825 | |
11826 | /* LOC is the location of the compound statement to which this body |
11827 | belongs. */ |
11828 | |
11829 | tree |
11830 | c_finish_stmt_expr (location_t loc, tree body) |
11831 | { |
11832 | tree last, type, tmp, val; |
11833 | tree *last_p; |
11834 | |
11835 | body = c_end_compound_stmt (loc, body, true); |
11836 | |
11837 | c_bindings_end_stmt_expr (c_switch_stack == NULL |
11838 | ? NULL |
11839 | : c_switch_stack->bindings); |
11840 | |
11841 | /* Locate the last statement in BODY. See c_end_compound_stmt |
11842 | about always returning a BIND_EXPR. */ |
11843 | last_p = &BIND_EXPR_BODY (body); |
11844 | last = BIND_EXPR_BODY (body); |
11845 | |
11846 | continue_searching: |
11847 | if (TREE_CODE (last) == STATEMENT_LIST) |
11848 | { |
11849 | tree_stmt_iterator l = tsi_last (t: last); |
11850 | |
11851 | while (!tsi_end_p (i: l) && TREE_CODE (tsi_stmt (l)) == DEBUG_BEGIN_STMT) |
11852 | tsi_prev (i: &l); |
11853 | |
11854 | /* This can happen with degenerate cases like ({ }). No value. */ |
11855 | if (tsi_end_p (i: l)) |
11856 | return body; |
11857 | |
11858 | /* If we're supposed to generate side effects warnings, process |
11859 | all of the statements except the last. */ |
11860 | if (warn_unused_value || warn_unused_result) |
11861 | { |
11862 | for (tree_stmt_iterator i = tsi_start (t: last); |
11863 | tsi_stmt (i) != tsi_stmt (i: l); tsi_next (i: &i)) |
11864 | { |
11865 | location_t tloc; |
11866 | tree t = tsi_stmt (i); |
11867 | |
11868 | tloc = EXPR_HAS_LOCATION (t) ? EXPR_LOCATION (t) : loc; |
11869 | emit_side_effect_warnings (loc: tloc, expr: t); |
11870 | } |
11871 | } |
11872 | last_p = tsi_stmt_ptr (i: l); |
11873 | last = *last_p; |
11874 | } |
11875 | |
11876 | /* If the end of the list is exception related, then the list was split |
11877 | by a call to push_cleanup. Continue searching. */ |
11878 | if (TREE_CODE (last) == TRY_FINALLY_EXPR |
11879 | || TREE_CODE (last) == TRY_CATCH_EXPR) |
11880 | { |
11881 | last_p = &TREE_OPERAND (last, 0); |
11882 | last = *last_p; |
11883 | goto continue_searching; |
11884 | } |
11885 | |
11886 | if (last == error_mark_node) |
11887 | return last; |
11888 | |
11889 | /* In the case that the BIND_EXPR is not necessary, return the |
11890 | expression out from inside it. */ |
11891 | if ((last == BIND_EXPR_BODY (body) |
11892 | /* Skip nested debug stmts. */ |
11893 | || last == expr_first (BIND_EXPR_BODY (body))) |
11894 | && BIND_EXPR_VARS (body) == NULL) |
11895 | { |
11896 | /* Even if this looks constant, do not allow it in a constant |
11897 | expression. */ |
11898 | last = c_wrap_maybe_const (last, true); |
11899 | /* Do not warn if the return value of a statement expression is |
11900 | unused. */ |
11901 | suppress_warning (last, OPT_Wunused); |
11902 | return last; |
11903 | } |
11904 | |
11905 | /* Extract the type of said expression. */ |
11906 | type = TREE_TYPE (last); |
11907 | |
11908 | /* If we're not returning a value at all, then the BIND_EXPR that |
11909 | we already have is a fine expression to return. */ |
11910 | if (!type || VOID_TYPE_P (type)) |
11911 | return body; |
11912 | |
11913 | /* Now that we've located the expression containing the value, it seems |
11914 | silly to make voidify_wrapper_expr repeat the process. Create a |
11915 | temporary of the appropriate type and stick it in a TARGET_EXPR. */ |
11916 | tmp = create_tmp_var_raw (type); |
11917 | |
11918 | /* Unwrap a no-op NOP_EXPR as added by c_finish_expr_stmt. This avoids |
11919 | tree_expr_nonnegative_p giving up immediately. */ |
11920 | val = last; |
11921 | if (TREE_CODE (val) == NOP_EXPR |
11922 | && TREE_TYPE (val) == TREE_TYPE (TREE_OPERAND (val, 0))) |
11923 | val = TREE_OPERAND (val, 0); |
11924 | |
11925 | *last_p = build2 (MODIFY_EXPR, void_type_node, tmp, val); |
11926 | SET_EXPR_LOCATION (*last_p, EXPR_LOCATION (last)); |
11927 | |
11928 | { |
11929 | tree t = build4 (TARGET_EXPR, type, tmp, body, NULL_TREE, NULL_TREE); |
11930 | SET_EXPR_LOCATION (t, loc); |
11931 | return t; |
11932 | } |
11933 | } |
11934 | |
11935 | /* Begin and end compound statements. This is as simple as pushing |
11936 | and popping new statement lists from the tree. */ |
11937 | |
11938 | tree |
11939 | c_begin_compound_stmt (bool do_scope) |
11940 | { |
11941 | tree stmt = push_stmt_list (); |
11942 | if (do_scope) |
11943 | push_scope (); |
11944 | return stmt; |
11945 | } |
11946 | |
11947 | /* End a compound statement. STMT is the statement. LOC is the |
11948 | location of the compound statement-- this is usually the location |
11949 | of the opening brace. */ |
11950 | |
11951 | tree |
11952 | c_end_compound_stmt (location_t loc, tree stmt, bool do_scope) |
11953 | { |
11954 | tree block = NULL; |
11955 | |
11956 | if (do_scope) |
11957 | { |
11958 | if (c_dialect_objc ()) |
11959 | objc_clear_super_receiver (); |
11960 | block = pop_scope (); |
11961 | } |
11962 | |
11963 | stmt = pop_stmt_list (stmt); |
11964 | stmt = c_build_bind_expr (loc, block, stmt); |
11965 | |
11966 | /* If this compound statement is nested immediately inside a statement |
11967 | expression, then force a BIND_EXPR to be created. Otherwise we'll |
11968 | do the wrong thing for ({ { 1; } }) or ({ 1; { } }). In particular, |
11969 | STATEMENT_LISTs merge, and thus we can lose track of what statement |
11970 | was really last. */ |
11971 | if (building_stmt_list_p () |
11972 | && STATEMENT_LIST_STMT_EXPR (cur_stmt_list) |
11973 | && TREE_CODE (stmt) != BIND_EXPR) |
11974 | { |
11975 | stmt = build3 (BIND_EXPR, void_type_node, NULL, stmt, NULL); |
11976 | TREE_SIDE_EFFECTS (stmt) = 1; |
11977 | SET_EXPR_LOCATION (stmt, loc); |
11978 | } |
11979 | |
11980 | return stmt; |
11981 | } |
11982 | |
11983 | /* Queue a cleanup. CLEANUP is an expression/statement to be executed |
11984 | when the current scope is exited. EH_ONLY is true when this is not |
11985 | meant to apply to normal control flow transfer. */ |
11986 | |
11987 | void |
11988 | push_cleanup (tree decl, tree cleanup, bool eh_only) |
11989 | { |
11990 | enum tree_code code; |
11991 | tree stmt, list; |
11992 | bool stmt_expr; |
11993 | |
11994 | code = eh_only ? TRY_CATCH_EXPR : TRY_FINALLY_EXPR; |
11995 | stmt = build_stmt (DECL_SOURCE_LOCATION (decl), code, NULL, cleanup); |
11996 | add_stmt (stmt); |
11997 | stmt_expr = STATEMENT_LIST_STMT_EXPR (cur_stmt_list); |
11998 | list = push_stmt_list (); |
11999 | TREE_OPERAND (stmt, 0) = list; |
12000 | STATEMENT_LIST_STMT_EXPR (list) = stmt_expr; |
12001 | } |
12002 | |
12003 | /* Build a vector comparison of ARG0 and ARG1 using CODE opcode |
12004 | into a value of TYPE type. Comparison is done via VEC_COND_EXPR. */ |
12005 | |
12006 | static tree |
12007 | build_vec_cmp (tree_code code, tree type, |
12008 | tree arg0, tree arg1) |
12009 | { |
12010 | tree zero_vec = build_zero_cst (type); |
12011 | tree minus_one_vec = build_minus_one_cst (type); |
12012 | tree cmp_type = truth_type_for (TREE_TYPE (arg0)); |
12013 | tree cmp = build2 (code, cmp_type, arg0, arg1); |
12014 | return build3 (VEC_COND_EXPR, type, cmp, minus_one_vec, zero_vec); |
12015 | } |
12016 | |
12017 | /* Possibly warn about an address of OP never being NULL in a comparison |
12018 | operation CODE involving null. */ |
12019 | |
12020 | static void |
12021 | maybe_warn_for_null_address (location_t loc, tree op, tree_code code) |
12022 | { |
12023 | /* Prevent warnings issued for macro expansion. */ |
12024 | if (!warn_address |
12025 | || warning_suppressed_p (op, OPT_Waddress) |
12026 | || from_macro_expansion_at (loc)) |
12027 | return; |
12028 | |
12029 | if (TREE_CODE (op) == NOP_EXPR) |
12030 | { |
12031 | /* Allow casts to intptr_t to suppress the warning. */ |
12032 | tree type = TREE_TYPE (op); |
12033 | if (TREE_CODE (type) == INTEGER_TYPE) |
12034 | return; |
12035 | op = TREE_OPERAND (op, 0); |
12036 | } |
12037 | |
12038 | if (TREE_CODE (op) == POINTER_PLUS_EXPR) |
12039 | { |
12040 | /* Allow a cast to void* to suppress the warning. */ |
12041 | tree type = TREE_TYPE (TREE_TYPE (op)); |
12042 | if (VOID_TYPE_P (type)) |
12043 | return; |
12044 | |
12045 | /* Adding any value to a null pointer, including zero, is undefined |
12046 | in C. This includes the expression &p[0] where p is the null |
12047 | pointer, although &p[0] will have been folded to p by this point |
12048 | and so not diagnosed. */ |
12049 | if (code == EQ_EXPR) |
12050 | warning_at (loc, OPT_Waddress, |
12051 | "the comparison will always evaluate as %<false%> " |
12052 | "for the pointer operand in %qE must not be NULL" , |
12053 | op); |
12054 | else |
12055 | warning_at (loc, OPT_Waddress, |
12056 | "the comparison will always evaluate as %<true%> " |
12057 | "for the pointer operand in %qE must not be NULL" , |
12058 | op); |
12059 | |
12060 | return; |
12061 | } |
12062 | |
12063 | if (TREE_CODE (op) != ADDR_EXPR) |
12064 | return; |
12065 | |
12066 | op = TREE_OPERAND (op, 0); |
12067 | |
12068 | if (TREE_CODE (op) == IMAGPART_EXPR |
12069 | || TREE_CODE (op) == REALPART_EXPR) |
12070 | { |
12071 | /* The address of either complex part may not be null. */ |
12072 | if (code == EQ_EXPR) |
12073 | warning_at (loc, OPT_Waddress, |
12074 | "the comparison will always evaluate as %<false%> " |
12075 | "for the address of %qE will never be NULL" , |
12076 | op); |
12077 | else |
12078 | warning_at (loc, OPT_Waddress, |
12079 | "the comparison will always evaluate as %<true%> " |
12080 | "for the address of %qE will never be NULL" , |
12081 | op); |
12082 | return; |
12083 | } |
12084 | |
12085 | /* Set to true in the loop below if OP dereferences is operand. |
12086 | In such a case the ultimate target need not be a decl for |
12087 | the null [in]equality test to be constant. */ |
12088 | bool deref = false; |
12089 | |
12090 | /* Get the outermost array or object, or member. */ |
12091 | while (handled_component_p (t: op)) |
12092 | { |
12093 | if (TREE_CODE (op) == COMPONENT_REF) |
12094 | { |
12095 | /* Get the member (its address is never null). */ |
12096 | op = TREE_OPERAND (op, 1); |
12097 | break; |
12098 | } |
12099 | |
12100 | /* Get the outer array/object to refer to in the warning. */ |
12101 | op = TREE_OPERAND (op, 0); |
12102 | deref = true; |
12103 | } |
12104 | |
12105 | if ((!deref && !decl_with_nonnull_addr_p (op)) |
12106 | || from_macro_expansion_at (loc)) |
12107 | return; |
12108 | |
12109 | bool w; |
12110 | if (code == EQ_EXPR) |
12111 | w = warning_at (loc, OPT_Waddress, |
12112 | "the comparison will always evaluate as %<false%> " |
12113 | "for the address of %qE will never be NULL" , |
12114 | op); |
12115 | else |
12116 | w = warning_at (loc, OPT_Waddress, |
12117 | "the comparison will always evaluate as %<true%> " |
12118 | "for the address of %qE will never be NULL" , |
12119 | op); |
12120 | |
12121 | if (w && DECL_P (op)) |
12122 | inform (DECL_SOURCE_LOCATION (op), "%qD declared here" , op); |
12123 | } |
12124 | |
12125 | /* Build a binary-operation expression without default conversions. |
12126 | CODE is the kind of expression to build. |
12127 | LOCATION is the operator's location. |
12128 | This function differs from `build' in several ways: |
12129 | the data type of the result is computed and recorded in it, |
12130 | warnings are generated if arg data types are invalid, |
12131 | special handling for addition and subtraction of pointers is known, |
12132 | and some optimization is done (operations on narrow ints |
12133 | are done in the narrower type when that gives the same result). |
12134 | Constant folding is also done before the result is returned. |
12135 | |
12136 | Note that the operands will never have enumeral types, or function |
12137 | or array types, because either they will have the default conversions |
12138 | performed or they have both just been converted to some other type in which |
12139 | the arithmetic is to be done. */ |
12140 | |
12141 | tree |
12142 | build_binary_op (location_t location, enum tree_code code, |
12143 | tree orig_op0, tree orig_op1, bool convert_p) |
12144 | { |
12145 | tree type0, type1, orig_type0, orig_type1; |
12146 | tree eptype; |
12147 | enum tree_code code0, code1; |
12148 | tree op0, op1; |
12149 | tree ret = error_mark_node; |
12150 | const char *invalid_op_diag; |
12151 | bool op0_int_operands, op1_int_operands; |
12152 | bool int_const, int_const_or_overflow, int_operands; |
12153 | |
12154 | /* Expression code to give to the expression when it is built. |
12155 | Normally this is CODE, which is what the caller asked for, |
12156 | but in some special cases we change it. */ |
12157 | enum tree_code resultcode = code; |
12158 | |
12159 | /* Data type in which the computation is to be performed. |
12160 | In the simplest cases this is the common type of the arguments. */ |
12161 | tree result_type = NULL; |
12162 | |
12163 | /* When the computation is in excess precision, the type of the |
12164 | final EXCESS_PRECISION_EXPR. */ |
12165 | tree semantic_result_type = NULL; |
12166 | |
12167 | /* Nonzero means operands have already been type-converted |
12168 | in whatever way is necessary. |
12169 | Zero means they need to be converted to RESULT_TYPE. */ |
12170 | int converted = 0; |
12171 | |
12172 | /* Nonzero means create the expression with this type, rather than |
12173 | RESULT_TYPE. */ |
12174 | tree build_type = NULL_TREE; |
12175 | |
12176 | /* Nonzero means after finally constructing the expression |
12177 | convert it to this type. */ |
12178 | tree final_type = NULL_TREE; |
12179 | |
12180 | /* Nonzero if this is an operation like MIN or MAX which can |
12181 | safely be computed in short if both args are promoted shorts. |
12182 | Also implies COMMON. |
12183 | -1 indicates a bitwise operation; this makes a difference |
12184 | in the exact conditions for when it is safe to do the operation |
12185 | in a narrower mode. */ |
12186 | int shorten = 0; |
12187 | |
12188 | /* Nonzero if this is a comparison operation; |
12189 | if both args are promoted shorts, compare the original shorts. |
12190 | Also implies COMMON. */ |
12191 | int short_compare = 0; |
12192 | |
12193 | /* Nonzero if this is a right-shift operation, which can be computed on the |
12194 | original short and then promoted if the operand is a promoted short. */ |
12195 | int short_shift = 0; |
12196 | |
12197 | /* Nonzero means set RESULT_TYPE to the common type of the args. */ |
12198 | int common = 0; |
12199 | |
12200 | /* True means types are compatible as far as ObjC is concerned. */ |
12201 | bool objc_ok; |
12202 | |
12203 | /* True means this is an arithmetic operation that may need excess |
12204 | precision. */ |
12205 | bool may_need_excess_precision; |
12206 | |
12207 | /* True means this is a boolean operation that converts both its |
12208 | operands to truth-values. */ |
12209 | bool boolean_op = false; |
12210 | |
12211 | /* Remember whether we're doing / or %. */ |
12212 | bool doing_div_or_mod = false; |
12213 | |
12214 | /* Remember whether we're doing << or >>. */ |
12215 | bool doing_shift = false; |
12216 | |
12217 | /* Tree holding instrumentation expression. */ |
12218 | tree instrument_expr = NULL; |
12219 | |
12220 | if (location == UNKNOWN_LOCATION) |
12221 | location = input_location; |
12222 | |
12223 | op0 = orig_op0; |
12224 | op1 = orig_op1; |
12225 | |
12226 | op0_int_operands = EXPR_INT_CONST_OPERANDS (orig_op0); |
12227 | if (op0_int_operands) |
12228 | op0 = remove_c_maybe_const_expr (expr: op0); |
12229 | op1_int_operands = EXPR_INT_CONST_OPERANDS (orig_op1); |
12230 | if (op1_int_operands) |
12231 | op1 = remove_c_maybe_const_expr (expr: op1); |
12232 | int_operands = (op0_int_operands && op1_int_operands); |
12233 | if (int_operands) |
12234 | { |
12235 | int_const_or_overflow = (TREE_CODE (orig_op0) == INTEGER_CST |
12236 | && TREE_CODE (orig_op1) == INTEGER_CST); |
12237 | int_const = (int_const_or_overflow |
12238 | && !TREE_OVERFLOW (orig_op0) |
12239 | && !TREE_OVERFLOW (orig_op1)); |
12240 | } |
12241 | else |
12242 | int_const = int_const_or_overflow = false; |
12243 | |
12244 | /* Do not apply default conversion in mixed vector/scalar expression. */ |
12245 | if (convert_p |
12246 | && VECTOR_TYPE_P (TREE_TYPE (op0)) == VECTOR_TYPE_P (TREE_TYPE (op1))) |
12247 | { |
12248 | op0 = default_conversion (exp: op0); |
12249 | op1 = default_conversion (exp: op1); |
12250 | } |
12251 | |
12252 | orig_type0 = type0 = TREE_TYPE (op0); |
12253 | |
12254 | orig_type1 = type1 = TREE_TYPE (op1); |
12255 | |
12256 | /* The expression codes of the data types of the arguments tell us |
12257 | whether the arguments are integers, floating, pointers, etc. */ |
12258 | code0 = TREE_CODE (type0); |
12259 | code1 = TREE_CODE (type1); |
12260 | |
12261 | /* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */ |
12262 | STRIP_TYPE_NOPS (op0); |
12263 | STRIP_TYPE_NOPS (op1); |
12264 | |
12265 | /* If an error was already reported for one of the arguments, |
12266 | avoid reporting another error. */ |
12267 | |
12268 | if (code0 == ERROR_MARK || code1 == ERROR_MARK) |
12269 | return error_mark_node; |
12270 | |
12271 | if (code0 == POINTER_TYPE |
12272 | && reject_gcc_builtin (op0, EXPR_LOCATION (orig_op0))) |
12273 | return error_mark_node; |
12274 | |
12275 | if (code1 == POINTER_TYPE |
12276 | && reject_gcc_builtin (op1, EXPR_LOCATION (orig_op1))) |
12277 | return error_mark_node; |
12278 | |
12279 | if ((invalid_op_diag |
12280 | = targetm.invalid_binary_op (code, type0, type1))) |
12281 | { |
12282 | error_at (location, invalid_op_diag); |
12283 | return error_mark_node; |
12284 | } |
12285 | |
12286 | switch (code) |
12287 | { |
12288 | case PLUS_EXPR: |
12289 | case MINUS_EXPR: |
12290 | case MULT_EXPR: |
12291 | case TRUNC_DIV_EXPR: |
12292 | case CEIL_DIV_EXPR: |
12293 | case FLOOR_DIV_EXPR: |
12294 | case ROUND_DIV_EXPR: |
12295 | case EXACT_DIV_EXPR: |
12296 | may_need_excess_precision = true; |
12297 | break; |
12298 | |
12299 | case EQ_EXPR: |
12300 | case NE_EXPR: |
12301 | case LE_EXPR: |
12302 | case GE_EXPR: |
12303 | case LT_EXPR: |
12304 | case GT_EXPR: |
12305 | /* Excess precision for implicit conversions of integers to |
12306 | floating point in C11 and later. */ |
12307 | may_need_excess_precision = (flag_isoc11 |
12308 | && (ANY_INTEGRAL_TYPE_P (type0) |
12309 | || ANY_INTEGRAL_TYPE_P (type1))); |
12310 | break; |
12311 | |
12312 | default: |
12313 | may_need_excess_precision = false; |
12314 | break; |
12315 | } |
12316 | if (TREE_CODE (op0) == EXCESS_PRECISION_EXPR) |
12317 | { |
12318 | op0 = TREE_OPERAND (op0, 0); |
12319 | type0 = TREE_TYPE (op0); |
12320 | } |
12321 | else if (may_need_excess_precision |
12322 | && (eptype = excess_precision_type (type0)) != NULL_TREE) |
12323 | { |
12324 | type0 = eptype; |
12325 | op0 = convert (eptype, op0); |
12326 | } |
12327 | if (TREE_CODE (op1) == EXCESS_PRECISION_EXPR) |
12328 | { |
12329 | op1 = TREE_OPERAND (op1, 0); |
12330 | type1 = TREE_TYPE (op1); |
12331 | } |
12332 | else if (may_need_excess_precision |
12333 | && (eptype = excess_precision_type (type1)) != NULL_TREE) |
12334 | { |
12335 | type1 = eptype; |
12336 | op1 = convert (eptype, op1); |
12337 | } |
12338 | |
12339 | objc_ok = objc_compare_types (type0, type1, -3, NULL_TREE); |
12340 | |
12341 | /* In case when one of the operands of the binary operation is |
12342 | a vector and another is a scalar -- convert scalar to vector. */ |
12343 | if ((gnu_vector_type_p (type: type0) && code1 != VECTOR_TYPE) |
12344 | || (gnu_vector_type_p (type: type1) && code0 != VECTOR_TYPE)) |
12345 | { |
12346 | enum stv_conv convert_flag = scalar_to_vector (loc: location, code, op0: orig_op0, |
12347 | op1: orig_op1, true); |
12348 | |
12349 | switch (convert_flag) |
12350 | { |
12351 | case stv_error: |
12352 | return error_mark_node; |
12353 | case stv_firstarg: |
12354 | { |
12355 | bool maybe_const = true; |
12356 | tree sc; |
12357 | sc = c_fully_fold (op0, false, &maybe_const); |
12358 | sc = save_expr (sc); |
12359 | sc = convert (TREE_TYPE (type1), sc); |
12360 | op0 = build_vector_from_val (type1, sc); |
12361 | if (!maybe_const) |
12362 | op0 = c_wrap_maybe_const (op0, true); |
12363 | orig_type0 = type0 = TREE_TYPE (op0); |
12364 | code0 = TREE_CODE (type0); |
12365 | converted = 1; |
12366 | break; |
12367 | } |
12368 | case stv_secondarg: |
12369 | { |
12370 | bool maybe_const = true; |
12371 | tree sc; |
12372 | sc = c_fully_fold (op1, false, &maybe_const); |
12373 | sc = save_expr (sc); |
12374 | sc = convert (TREE_TYPE (type0), sc); |
12375 | op1 = build_vector_from_val (type0, sc); |
12376 | if (!maybe_const) |
12377 | op1 = c_wrap_maybe_const (op1, true); |
12378 | orig_type1 = type1 = TREE_TYPE (op1); |
12379 | code1 = TREE_CODE (type1); |
12380 | converted = 1; |
12381 | break; |
12382 | } |
12383 | default: |
12384 | break; |
12385 | } |
12386 | } |
12387 | |
12388 | switch (code) |
12389 | { |
12390 | case PLUS_EXPR: |
12391 | /* Handle the pointer + int case. */ |
12392 | if (code0 == POINTER_TYPE |
12393 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12394 | { |
12395 | ret = pointer_int_sum (location, PLUS_EXPR, op0, op1); |
12396 | goto return_build_binary_op; |
12397 | } |
12398 | else if (code1 == POINTER_TYPE |
12399 | && (code0 == INTEGER_TYPE || code0 == BITINT_TYPE)) |
12400 | { |
12401 | ret = pointer_int_sum (location, PLUS_EXPR, op1, op0); |
12402 | goto return_build_binary_op; |
12403 | } |
12404 | else |
12405 | common = 1; |
12406 | break; |
12407 | |
12408 | case MINUS_EXPR: |
12409 | /* Subtraction of two similar pointers. |
12410 | We must subtract them as integers, then divide by object size. */ |
12411 | if (code0 == POINTER_TYPE && code1 == POINTER_TYPE |
12412 | && comp_target_types (location, ttl: type0, ttr: type1)) |
12413 | { |
12414 | ret = pointer_diff (loc: location, op0, op1, instrument_expr: &instrument_expr); |
12415 | goto return_build_binary_op; |
12416 | } |
12417 | /* Handle pointer minus int. Just like pointer plus int. */ |
12418 | else if (code0 == POINTER_TYPE |
12419 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12420 | { |
12421 | ret = pointer_int_sum (location, MINUS_EXPR, op0, op1); |
12422 | goto return_build_binary_op; |
12423 | } |
12424 | else |
12425 | common = 1; |
12426 | break; |
12427 | |
12428 | case MULT_EXPR: |
12429 | common = 1; |
12430 | break; |
12431 | |
12432 | case TRUNC_DIV_EXPR: |
12433 | case CEIL_DIV_EXPR: |
12434 | case FLOOR_DIV_EXPR: |
12435 | case ROUND_DIV_EXPR: |
12436 | case EXACT_DIV_EXPR: |
12437 | doing_div_or_mod = true; |
12438 | warn_for_div_by_zero (location, divisor: op1); |
12439 | |
12440 | if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE |
12441 | || code0 == FIXED_POINT_TYPE || code0 == BITINT_TYPE |
12442 | || code0 == COMPLEX_TYPE |
12443 | || gnu_vector_type_p (type: type0)) |
12444 | && (code1 == INTEGER_TYPE || code1 == REAL_TYPE |
12445 | || code1 == FIXED_POINT_TYPE || code1 == BITINT_TYPE |
12446 | || code1 == COMPLEX_TYPE |
12447 | || gnu_vector_type_p (type: type1))) |
12448 | { |
12449 | enum tree_code tcode0 = code0, tcode1 = code1; |
12450 | |
12451 | if (code0 == COMPLEX_TYPE || code0 == VECTOR_TYPE) |
12452 | tcode0 = TREE_CODE (TREE_TYPE (TREE_TYPE (op0))); |
12453 | if (code1 == COMPLEX_TYPE || code1 == VECTOR_TYPE) |
12454 | tcode1 = TREE_CODE (TREE_TYPE (TREE_TYPE (op1))); |
12455 | |
12456 | if (!(((tcode0 == INTEGER_TYPE || tcode0 == BITINT_TYPE) |
12457 | && (tcode1 == INTEGER_TYPE || tcode1 == BITINT_TYPE)) |
12458 | || (tcode0 == FIXED_POINT_TYPE && tcode1 == FIXED_POINT_TYPE))) |
12459 | resultcode = RDIV_EXPR; |
12460 | else |
12461 | /* Although it would be tempting to shorten always here, that |
12462 | loses on some targets, since the modulo instruction is |
12463 | undefined if the quotient can't be represented in the |
12464 | computation mode. We shorten only if unsigned or if |
12465 | dividing by something we know != -1. */ |
12466 | shorten = may_shorten_divmod (op0, op1); |
12467 | common = 1; |
12468 | } |
12469 | break; |
12470 | |
12471 | case BIT_AND_EXPR: |
12472 | case BIT_IOR_EXPR: |
12473 | case BIT_XOR_EXPR: |
12474 | if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE) |
12475 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12476 | shorten = -1; |
12477 | /* Allow vector types which are not floating point types. */ |
12478 | else if (gnu_vector_type_p (type: type0) |
12479 | && gnu_vector_type_p (type: type1) |
12480 | && !VECTOR_FLOAT_TYPE_P (type0) |
12481 | && !VECTOR_FLOAT_TYPE_P (type1)) |
12482 | common = 1; |
12483 | break; |
12484 | |
12485 | case TRUNC_MOD_EXPR: |
12486 | case FLOOR_MOD_EXPR: |
12487 | doing_div_or_mod = true; |
12488 | warn_for_div_by_zero (location, divisor: op1); |
12489 | |
12490 | if (gnu_vector_type_p (type: type0) |
12491 | && gnu_vector_type_p (type: type1) |
12492 | && TREE_CODE (TREE_TYPE (type0)) == INTEGER_TYPE |
12493 | && TREE_CODE (TREE_TYPE (type1)) == INTEGER_TYPE) |
12494 | common = 1; |
12495 | else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE) |
12496 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12497 | { |
12498 | /* Although it would be tempting to shorten always here, that loses |
12499 | on some targets, since the modulo instruction is undefined if the |
12500 | quotient can't be represented in the computation mode. We shorten |
12501 | only if unsigned or if dividing by something we know != -1. */ |
12502 | shorten = may_shorten_divmod (op0, op1); |
12503 | common = 1; |
12504 | } |
12505 | break; |
12506 | |
12507 | case TRUTH_ANDIF_EXPR: |
12508 | case TRUTH_ORIF_EXPR: |
12509 | case TRUTH_AND_EXPR: |
12510 | case TRUTH_OR_EXPR: |
12511 | case TRUTH_XOR_EXPR: |
12512 | if ((code0 == INTEGER_TYPE || code0 == POINTER_TYPE |
12513 | || code0 == REAL_TYPE || code0 == COMPLEX_TYPE |
12514 | || code0 == FIXED_POINT_TYPE || code0 == NULLPTR_TYPE |
12515 | || code0 == BITINT_TYPE) |
12516 | && (code1 == INTEGER_TYPE || code1 == POINTER_TYPE |
12517 | || code1 == REAL_TYPE || code1 == COMPLEX_TYPE |
12518 | || code1 == FIXED_POINT_TYPE || code1 == NULLPTR_TYPE |
12519 | || code1 == BITINT_TYPE)) |
12520 | { |
12521 | /* Result of these operations is always an int, |
12522 | but that does not mean the operands should be |
12523 | converted to ints! */ |
12524 | result_type = integer_type_node; |
12525 | if (op0_int_operands) |
12526 | { |
12527 | op0 = c_objc_common_truthvalue_conversion (location, orig_op0); |
12528 | op0 = remove_c_maybe_const_expr (expr: op0); |
12529 | } |
12530 | else |
12531 | op0 = c_objc_common_truthvalue_conversion (location, op0); |
12532 | if (op1_int_operands) |
12533 | { |
12534 | op1 = c_objc_common_truthvalue_conversion (location, orig_op1); |
12535 | op1 = remove_c_maybe_const_expr (expr: op1); |
12536 | } |
12537 | else |
12538 | op1 = c_objc_common_truthvalue_conversion (location, op1); |
12539 | converted = 1; |
12540 | boolean_op = true; |
12541 | } |
12542 | if (code == TRUTH_ANDIF_EXPR) |
12543 | { |
12544 | int_const_or_overflow = (int_operands |
12545 | && TREE_CODE (orig_op0) == INTEGER_CST |
12546 | && (op0 == truthvalue_false_node |
12547 | || TREE_CODE (orig_op1) == INTEGER_CST)); |
12548 | int_const = (int_const_or_overflow |
12549 | && !TREE_OVERFLOW (orig_op0) |
12550 | && (op0 == truthvalue_false_node |
12551 | || !TREE_OVERFLOW (orig_op1))); |
12552 | } |
12553 | else if (code == TRUTH_ORIF_EXPR) |
12554 | { |
12555 | int_const_or_overflow = (int_operands |
12556 | && TREE_CODE (orig_op0) == INTEGER_CST |
12557 | && (op0 == truthvalue_true_node |
12558 | || TREE_CODE (orig_op1) == INTEGER_CST)); |
12559 | int_const = (int_const_or_overflow |
12560 | && !TREE_OVERFLOW (orig_op0) |
12561 | && (op0 == truthvalue_true_node |
12562 | || !TREE_OVERFLOW (orig_op1))); |
12563 | } |
12564 | break; |
12565 | |
12566 | /* Shift operations: result has same type as first operand; |
12567 | always convert second operand to int. |
12568 | Also set SHORT_SHIFT if shifting rightward. */ |
12569 | |
12570 | case RSHIFT_EXPR: |
12571 | if (gnu_vector_type_p (type: type0) |
12572 | && gnu_vector_type_p (type: type1) |
12573 | && TREE_CODE (TREE_TYPE (type0)) == INTEGER_TYPE |
12574 | && TREE_CODE (TREE_TYPE (type1)) == INTEGER_TYPE |
12575 | && known_eq (TYPE_VECTOR_SUBPARTS (type0), |
12576 | TYPE_VECTOR_SUBPARTS (type1))) |
12577 | { |
12578 | result_type = type0; |
12579 | converted = 1; |
12580 | } |
12581 | else if ((code0 == INTEGER_TYPE || code0 == FIXED_POINT_TYPE |
12582 | || code0 == BITINT_TYPE |
12583 | || (gnu_vector_type_p (type: type0) |
12584 | && TREE_CODE (TREE_TYPE (type0)) == INTEGER_TYPE)) |
12585 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12586 | { |
12587 | doing_shift = true; |
12588 | if (TREE_CODE (op1) == INTEGER_CST) |
12589 | { |
12590 | if (tree_int_cst_sgn (op1) < 0) |
12591 | { |
12592 | int_const = false; |
12593 | if (c_inhibit_evaluation_warnings == 0) |
12594 | warning_at (location, OPT_Wshift_count_negative, |
12595 | "right shift count is negative" ); |
12596 | } |
12597 | else if (code0 == VECTOR_TYPE) |
12598 | { |
12599 | if (compare_tree_int (op1, |
12600 | TYPE_PRECISION (TREE_TYPE (type0))) |
12601 | >= 0) |
12602 | { |
12603 | int_const = false; |
12604 | if (c_inhibit_evaluation_warnings == 0) |
12605 | warning_at (location, OPT_Wshift_count_overflow, |
12606 | "right shift count >= width of vector element" ); |
12607 | } |
12608 | } |
12609 | else |
12610 | { |
12611 | if (!integer_zerop (op1)) |
12612 | short_shift = 1; |
12613 | |
12614 | if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0) |
12615 | { |
12616 | int_const = false; |
12617 | if (c_inhibit_evaluation_warnings == 0) |
12618 | warning_at (location, OPT_Wshift_count_overflow, |
12619 | "right shift count >= width of type" ); |
12620 | } |
12621 | } |
12622 | } |
12623 | |
12624 | /* Use the type of the value to be shifted. */ |
12625 | result_type = type0; |
12626 | /* Avoid converting op1 to result_type later. */ |
12627 | converted = 1; |
12628 | } |
12629 | break; |
12630 | |
12631 | case LSHIFT_EXPR: |
12632 | if (gnu_vector_type_p (type: type0) |
12633 | && gnu_vector_type_p (type: type1) |
12634 | && TREE_CODE (TREE_TYPE (type0)) == INTEGER_TYPE |
12635 | && TREE_CODE (TREE_TYPE (type1)) == INTEGER_TYPE |
12636 | && known_eq (TYPE_VECTOR_SUBPARTS (type0), |
12637 | TYPE_VECTOR_SUBPARTS (type1))) |
12638 | { |
12639 | result_type = type0; |
12640 | converted = 1; |
12641 | } |
12642 | else if ((code0 == INTEGER_TYPE || code0 == FIXED_POINT_TYPE |
12643 | || code0 == BITINT_TYPE |
12644 | || (gnu_vector_type_p (type: type0) |
12645 | && TREE_CODE (TREE_TYPE (type0)) == INTEGER_TYPE)) |
12646 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12647 | { |
12648 | doing_shift = true; |
12649 | if (TREE_CODE (op0) == INTEGER_CST |
12650 | && tree_int_cst_sgn (op0) < 0 |
12651 | && !TYPE_OVERFLOW_WRAPS (type0)) |
12652 | { |
12653 | /* Don't reject a left shift of a negative value in a context |
12654 | where a constant expression is needed in C90. */ |
12655 | if (flag_isoc99) |
12656 | int_const = false; |
12657 | if (c_inhibit_evaluation_warnings == 0) |
12658 | warning_at (location, OPT_Wshift_negative_value, |
12659 | "left shift of negative value" ); |
12660 | } |
12661 | if (TREE_CODE (op1) == INTEGER_CST) |
12662 | { |
12663 | if (tree_int_cst_sgn (op1) < 0) |
12664 | { |
12665 | int_const = false; |
12666 | if (c_inhibit_evaluation_warnings == 0) |
12667 | warning_at (location, OPT_Wshift_count_negative, |
12668 | "left shift count is negative" ); |
12669 | } |
12670 | else if (code0 == VECTOR_TYPE) |
12671 | { |
12672 | if (compare_tree_int (op1, |
12673 | TYPE_PRECISION (TREE_TYPE (type0))) |
12674 | >= 0) |
12675 | { |
12676 | int_const = false; |
12677 | if (c_inhibit_evaluation_warnings == 0) |
12678 | warning_at (location, OPT_Wshift_count_overflow, |
12679 | "left shift count >= width of vector element" ); |
12680 | } |
12681 | } |
12682 | else if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0) |
12683 | { |
12684 | int_const = false; |
12685 | if (c_inhibit_evaluation_warnings == 0) |
12686 | warning_at (location, OPT_Wshift_count_overflow, |
12687 | "left shift count >= width of type" ); |
12688 | } |
12689 | else if (TREE_CODE (op0) == INTEGER_CST |
12690 | && maybe_warn_shift_overflow (location, op0, op1) |
12691 | && flag_isoc99) |
12692 | int_const = false; |
12693 | } |
12694 | |
12695 | /* Use the type of the value to be shifted. */ |
12696 | result_type = type0; |
12697 | /* Avoid converting op1 to result_type later. */ |
12698 | converted = 1; |
12699 | } |
12700 | break; |
12701 | |
12702 | case EQ_EXPR: |
12703 | case NE_EXPR: |
12704 | if (gnu_vector_type_p (type: type0) && gnu_vector_type_p (type: type1)) |
12705 | { |
12706 | tree intt; |
12707 | if (!vector_types_compatible_elements_p (type0, type1)) |
12708 | { |
12709 | error_at (location, "comparing vectors with different " |
12710 | "element types" ); |
12711 | return error_mark_node; |
12712 | } |
12713 | |
12714 | if (maybe_ne (a: TYPE_VECTOR_SUBPARTS (node: type0), |
12715 | b: TYPE_VECTOR_SUBPARTS (node: type1))) |
12716 | { |
12717 | error_at (location, "comparing vectors with different " |
12718 | "number of elements" ); |
12719 | return error_mark_node; |
12720 | } |
12721 | |
12722 | /* It's not precisely specified how the usual arithmetic |
12723 | conversions apply to the vector types. Here, we use |
12724 | the unsigned type if one of the operands is signed and |
12725 | the other one is unsigned. */ |
12726 | if (TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)) |
12727 | { |
12728 | if (!TYPE_UNSIGNED (type0)) |
12729 | op0 = build1 (VIEW_CONVERT_EXPR, type1, op0); |
12730 | else |
12731 | op1 = build1 (VIEW_CONVERT_EXPR, type0, op1); |
12732 | warning_at (location, OPT_Wsign_compare, "comparison between " |
12733 | "types %qT and %qT" , type0, type1); |
12734 | } |
12735 | |
12736 | /* Always construct signed integer vector type. */ |
12737 | intt = c_common_type_for_size (GET_MODE_BITSIZE |
12738 | (SCALAR_TYPE_MODE |
12739 | (TREE_TYPE (type0))), 0); |
12740 | if (!intt) |
12741 | { |
12742 | error_at (location, "could not find an integer type " |
12743 | "of the same size as %qT" , |
12744 | TREE_TYPE (type0)); |
12745 | return error_mark_node; |
12746 | } |
12747 | result_type = build_opaque_vector_type (intt, |
12748 | TYPE_VECTOR_SUBPARTS (node: type0)); |
12749 | converted = 1; |
12750 | ret = build_vec_cmp (code: resultcode, type: result_type, arg0: op0, arg1: op1); |
12751 | goto return_build_binary_op; |
12752 | } |
12753 | if (FLOAT_TYPE_P (type0) || FLOAT_TYPE_P (type1)) |
12754 | warning_at (location, |
12755 | OPT_Wfloat_equal, |
12756 | "comparing floating-point with %<==%> or %<!=%> is unsafe" ); |
12757 | /* Result of comparison is always int, |
12758 | but don't convert the args to int! */ |
12759 | build_type = integer_type_node; |
12760 | if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE || code0 == BITINT_TYPE |
12761 | || code0 == FIXED_POINT_TYPE || code0 == COMPLEX_TYPE) |
12762 | && (code1 == INTEGER_TYPE || code1 == REAL_TYPE |
12763 | || code1 == BITINT_TYPE |
12764 | || code1 == FIXED_POINT_TYPE || code1 == COMPLEX_TYPE)) |
12765 | short_compare = 1; |
12766 | else if (code0 == POINTER_TYPE |
12767 | && (code1 == NULLPTR_TYPE |
12768 | || null_pointer_constant_p (expr: orig_op1))) |
12769 | { |
12770 | maybe_warn_for_null_address (loc: location, op: op0, code); |
12771 | result_type = type0; |
12772 | } |
12773 | else if (code1 == POINTER_TYPE |
12774 | && (code0 == NULLPTR_TYPE |
12775 | || null_pointer_constant_p (expr: orig_op0))) |
12776 | { |
12777 | maybe_warn_for_null_address (loc: location, op: op1, code); |
12778 | result_type = type1; |
12779 | } |
12780 | else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE) |
12781 | { |
12782 | tree tt0 = TREE_TYPE (type0); |
12783 | tree tt1 = TREE_TYPE (type1); |
12784 | addr_space_t as0 = TYPE_ADDR_SPACE (tt0); |
12785 | addr_space_t as1 = TYPE_ADDR_SPACE (tt1); |
12786 | addr_space_t as_common = ADDR_SPACE_GENERIC; |
12787 | |
12788 | /* Anything compares with void *. void * compares with anything. |
12789 | Otherwise, the targets must be compatible |
12790 | and both must be object or both incomplete. */ |
12791 | if (comp_target_types (location, ttl: type0, ttr: type1)) |
12792 | result_type = common_pointer_type (t1: type0, t2: type1); |
12793 | else if (!addr_space_superset (as1: as0, as2: as1, common: &as_common)) |
12794 | { |
12795 | error_at (location, "comparison of pointers to " |
12796 | "disjoint address spaces" ); |
12797 | return error_mark_node; |
12798 | } |
12799 | else if (VOID_TYPE_P (tt0) && !TYPE_ATOMIC (tt0)) |
12800 | { |
12801 | if (pedantic && TREE_CODE (tt1) == FUNCTION_TYPE) |
12802 | pedwarn (location, OPT_Wpedantic, "ISO C forbids " |
12803 | "comparison of %<void *%> with function pointer" ); |
12804 | } |
12805 | else if (VOID_TYPE_P (tt1) && !TYPE_ATOMIC (tt1)) |
12806 | { |
12807 | if (pedantic && TREE_CODE (tt0) == FUNCTION_TYPE) |
12808 | pedwarn (location, OPT_Wpedantic, "ISO C forbids " |
12809 | "comparison of %<void *%> with function pointer" ); |
12810 | } |
12811 | else |
12812 | /* Avoid warning about the volatile ObjC EH puts on decls. */ |
12813 | if (!objc_ok) |
12814 | pedwarn (location, OPT_Wcompare_distinct_pointer_types, |
12815 | "comparison of distinct pointer types lacks a cast" ); |
12816 | |
12817 | if (result_type == NULL_TREE) |
12818 | { |
12819 | int qual = ENCODE_QUAL_ADDR_SPACE (as_common); |
12820 | result_type = build_pointer_type |
12821 | (build_qualified_type (void_type_node, qual)); |
12822 | } |
12823 | } |
12824 | else if (code0 == POINTER_TYPE |
12825 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12826 | { |
12827 | result_type = type0; |
12828 | pedwarn (location, 0, "comparison between pointer and integer" ); |
12829 | } |
12830 | else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE) |
12831 | && code1 == POINTER_TYPE) |
12832 | { |
12833 | result_type = type1; |
12834 | pedwarn (location, 0, "comparison between pointer and integer" ); |
12835 | } |
12836 | /* 6.5.9: One of the following shall hold: |
12837 | -- both operands have type nullptr_t; */ |
12838 | else if (code0 == NULLPTR_TYPE && code1 == NULLPTR_TYPE) |
12839 | { |
12840 | result_type = nullptr_type_node; |
12841 | /* No need to convert the operands to result_type later. */ |
12842 | converted = 1; |
12843 | } |
12844 | /* -- one operand has type nullptr_t and the other is a null pointer |
12845 | constant. We will have to convert the former to the type of the |
12846 | latter, because during gimplification we can't have mismatching |
12847 | comparison operand type. We convert from nullptr_t to the other |
12848 | type, since only nullptr_t can be converted to nullptr_t. Also, |
12849 | even a constant 0 is a null pointer constant, so we may have to |
12850 | create a pointer type from its type. */ |
12851 | else if (code0 == NULLPTR_TYPE && null_pointer_constant_p (expr: orig_op1)) |
12852 | result_type = (INTEGRAL_TYPE_P (type1) |
12853 | ? build_pointer_type (type1) : type1); |
12854 | else if (code1 == NULLPTR_TYPE && null_pointer_constant_p (expr: orig_op0)) |
12855 | result_type = (INTEGRAL_TYPE_P (type0) |
12856 | ? build_pointer_type (type0) : type0); |
12857 | if ((C_BOOLEAN_TYPE_P (TREE_TYPE (orig_op0)) |
12858 | || truth_value_p (TREE_CODE (orig_op0))) |
12859 | ^ (C_BOOLEAN_TYPE_P (TREE_TYPE (orig_op1)) |
12860 | || truth_value_p (TREE_CODE (orig_op1)))) |
12861 | maybe_warn_bool_compare (location, code, orig_op0, orig_op1); |
12862 | break; |
12863 | |
12864 | case LE_EXPR: |
12865 | case GE_EXPR: |
12866 | case LT_EXPR: |
12867 | case GT_EXPR: |
12868 | if (gnu_vector_type_p (type: type0) && gnu_vector_type_p (type: type1)) |
12869 | { |
12870 | tree intt; |
12871 | if (!vector_types_compatible_elements_p (type0, type1)) |
12872 | { |
12873 | error_at (location, "comparing vectors with different " |
12874 | "element types" ); |
12875 | return error_mark_node; |
12876 | } |
12877 | |
12878 | if (maybe_ne (a: TYPE_VECTOR_SUBPARTS (node: type0), |
12879 | b: TYPE_VECTOR_SUBPARTS (node: type1))) |
12880 | { |
12881 | error_at (location, "comparing vectors with different " |
12882 | "number of elements" ); |
12883 | return error_mark_node; |
12884 | } |
12885 | |
12886 | /* It's not precisely specified how the usual arithmetic |
12887 | conversions apply to the vector types. Here, we use |
12888 | the unsigned type if one of the operands is signed and |
12889 | the other one is unsigned. */ |
12890 | if (TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)) |
12891 | { |
12892 | if (!TYPE_UNSIGNED (type0)) |
12893 | op0 = build1 (VIEW_CONVERT_EXPR, type1, op0); |
12894 | else |
12895 | op1 = build1 (VIEW_CONVERT_EXPR, type0, op1); |
12896 | warning_at (location, OPT_Wsign_compare, "comparison between " |
12897 | "types %qT and %qT" , type0, type1); |
12898 | } |
12899 | |
12900 | /* Always construct signed integer vector type. */ |
12901 | intt = c_common_type_for_size (GET_MODE_BITSIZE |
12902 | (SCALAR_TYPE_MODE |
12903 | (TREE_TYPE (type0))), 0); |
12904 | if (!intt) |
12905 | { |
12906 | error_at (location, "could not find an integer type " |
12907 | "of the same size as %qT" , |
12908 | TREE_TYPE (type0)); |
12909 | return error_mark_node; |
12910 | } |
12911 | result_type = build_opaque_vector_type (intt, |
12912 | TYPE_VECTOR_SUBPARTS (node: type0)); |
12913 | converted = 1; |
12914 | ret = build_vec_cmp (code: resultcode, type: result_type, arg0: op0, arg1: op1); |
12915 | goto return_build_binary_op; |
12916 | } |
12917 | build_type = integer_type_node; |
12918 | if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE |
12919 | || code0 == BITINT_TYPE || code0 == FIXED_POINT_TYPE) |
12920 | && (code1 == INTEGER_TYPE || code1 == REAL_TYPE |
12921 | || code1 == BITINT_TYPE || code1 == FIXED_POINT_TYPE)) |
12922 | short_compare = 1; |
12923 | else if (code0 == POINTER_TYPE && code1 == POINTER_TYPE) |
12924 | { |
12925 | addr_space_t as0 = TYPE_ADDR_SPACE (TREE_TYPE (type0)); |
12926 | addr_space_t as1 = TYPE_ADDR_SPACE (TREE_TYPE (type1)); |
12927 | addr_space_t as_common; |
12928 | |
12929 | if (comp_target_types (location, ttl: type0, ttr: type1)) |
12930 | { |
12931 | result_type = common_pointer_type (t1: type0, t2: type1); |
12932 | if (!COMPLETE_TYPE_P (TREE_TYPE (type0)) |
12933 | != !COMPLETE_TYPE_P (TREE_TYPE (type1))) |
12934 | pedwarn_c99 (location, opt: OPT_Wpedantic, |
12935 | "comparison of complete and incomplete pointers" ); |
12936 | else if (TREE_CODE (TREE_TYPE (type0)) == FUNCTION_TYPE) |
12937 | pedwarn (location, OPT_Wpedantic, "ISO C forbids " |
12938 | "ordered comparisons of pointers to functions" ); |
12939 | else if (null_pointer_constant_p (expr: orig_op0) |
12940 | || null_pointer_constant_p (expr: orig_op1)) |
12941 | warning_at (location, OPT_Wextra, |
12942 | "ordered comparison of pointer with null pointer" ); |
12943 | |
12944 | } |
12945 | else if (!addr_space_superset (as1: as0, as2: as1, common: &as_common)) |
12946 | { |
12947 | error_at (location, "comparison of pointers to " |
12948 | "disjoint address spaces" ); |
12949 | return error_mark_node; |
12950 | } |
12951 | else |
12952 | { |
12953 | int qual = ENCODE_QUAL_ADDR_SPACE (as_common); |
12954 | result_type = build_pointer_type |
12955 | (build_qualified_type (void_type_node, qual)); |
12956 | pedwarn (location, OPT_Wcompare_distinct_pointer_types, |
12957 | "comparison of distinct pointer types lacks a cast" ); |
12958 | } |
12959 | } |
12960 | else if (code0 == POINTER_TYPE && null_pointer_constant_p (expr: orig_op1)) |
12961 | { |
12962 | result_type = type0; |
12963 | if (pedantic) |
12964 | pedwarn (location, OPT_Wpedantic, |
12965 | "ordered comparison of pointer with integer zero" ); |
12966 | else if (extra_warnings) |
12967 | warning_at (location, OPT_Wextra, |
12968 | "ordered comparison of pointer with integer zero" ); |
12969 | } |
12970 | else if (code1 == POINTER_TYPE && null_pointer_constant_p (expr: orig_op0)) |
12971 | { |
12972 | result_type = type1; |
12973 | if (pedantic) |
12974 | pedwarn (location, OPT_Wpedantic, |
12975 | "ordered comparison of pointer with integer zero" ); |
12976 | else if (extra_warnings) |
12977 | warning_at (location, OPT_Wextra, |
12978 | "ordered comparison of pointer with integer zero" ); |
12979 | } |
12980 | else if (code0 == POINTER_TYPE |
12981 | && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE)) |
12982 | { |
12983 | result_type = type0; |
12984 | pedwarn (location, 0, "comparison between pointer and integer" ); |
12985 | } |
12986 | else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE) |
12987 | && code1 == POINTER_TYPE) |
12988 | { |
12989 | result_type = type1; |
12990 | pedwarn (location, 0, "comparison between pointer and integer" ); |
12991 | } |
12992 | |
12993 | if ((code0 == POINTER_TYPE || code1 == POINTER_TYPE) |
12994 | && current_function_decl != NULL_TREE |
12995 | && sanitize_flags_p (flag: SANITIZE_POINTER_COMPARE)) |
12996 | { |
12997 | op0 = save_expr (op0); |
12998 | op1 = save_expr (op1); |
12999 | |
13000 | tree tt = builtin_decl_explicit (fncode: BUILT_IN_ASAN_POINTER_COMPARE); |
13001 | instrument_expr = build_call_expr_loc (location, tt, 2, op0, op1); |
13002 | } |
13003 | |
13004 | if ((C_BOOLEAN_TYPE_P (TREE_TYPE (orig_op0)) |
13005 | || truth_value_p (TREE_CODE (orig_op0))) |
13006 | ^ (C_BOOLEAN_TYPE_P (TREE_TYPE (orig_op1)) |
13007 | || truth_value_p (TREE_CODE (orig_op1)))) |
13008 | maybe_warn_bool_compare (location, code, orig_op0, orig_op1); |
13009 | break; |
13010 | |
13011 | case MIN_EXPR: |
13012 | case MAX_EXPR: |
13013 | /* Used for OpenMP atomics. */ |
13014 | gcc_assert (flag_openmp); |
13015 | common = 1; |
13016 | break; |
13017 | |
13018 | default: |
13019 | gcc_unreachable (); |
13020 | } |
13021 | |
13022 | if (code0 == ERROR_MARK || code1 == ERROR_MARK) |
13023 | return error_mark_node; |
13024 | |
13025 | if (gnu_vector_type_p (type: type0) |
13026 | && gnu_vector_type_p (type: type1) |
13027 | && (!tree_int_cst_equal (TYPE_SIZE (type0), TYPE_SIZE (type1)) |
13028 | || !vector_types_compatible_elements_p (type0, type1))) |
13029 | { |
13030 | gcc_rich_location richloc (location); |
13031 | maybe_range_label_for_tree_type_mismatch |
13032 | label_for_op0 (orig_op0, orig_op1), |
13033 | label_for_op1 (orig_op1, orig_op0); |
13034 | richloc.maybe_add_expr (t: orig_op0, label: &label_for_op0); |
13035 | richloc.maybe_add_expr (t: orig_op1, label: &label_for_op1); |
13036 | binary_op_error (&richloc, code, type0, type1); |
13037 | return error_mark_node; |
13038 | } |
13039 | |
13040 | if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE || code0 == COMPLEX_TYPE |
13041 | || code0 == FIXED_POINT_TYPE || code0 == BITINT_TYPE |
13042 | || gnu_vector_type_p (type: type0)) |
13043 | && (code1 == INTEGER_TYPE || code1 == REAL_TYPE || code1 == COMPLEX_TYPE |
13044 | || code1 == FIXED_POINT_TYPE || code1 == BITINT_TYPE |
13045 | || gnu_vector_type_p (type: type1))) |
13046 | { |
13047 | bool first_complex = (code0 == COMPLEX_TYPE); |
13048 | bool second_complex = (code1 == COMPLEX_TYPE); |
13049 | int none_complex = (!first_complex && !second_complex); |
13050 | |
13051 | if (shorten || common || short_compare) |
13052 | { |
13053 | result_type = c_common_type (t1: type0, t2: type1); |
13054 | do_warn_double_promotion (result_type, type0, type1, |
13055 | "implicit conversion from %qT to %qT " |
13056 | "to match other operand of binary " |
13057 | "expression" , |
13058 | location); |
13059 | if (result_type == error_mark_node) |
13060 | return error_mark_node; |
13061 | } |
13062 | |
13063 | if (first_complex != second_complex |
13064 | && (code == PLUS_EXPR |
13065 | || code == MINUS_EXPR |
13066 | || code == MULT_EXPR |
13067 | || (code == TRUNC_DIV_EXPR && first_complex)) |
13068 | && TREE_CODE (TREE_TYPE (result_type)) == REAL_TYPE |
13069 | && flag_signed_zeros) |
13070 | { |
13071 | /* An operation on mixed real/complex operands must be |
13072 | handled specially, but the language-independent code can |
13073 | more easily optimize the plain complex arithmetic if |
13074 | -fno-signed-zeros. */ |
13075 | tree real_type = TREE_TYPE (result_type); |
13076 | tree real, imag; |
13077 | if (type0 != orig_type0 || type1 != orig_type1) |
13078 | { |
13079 | gcc_assert (may_need_excess_precision && common); |
13080 | semantic_result_type = c_common_type (t1: orig_type0, t2: orig_type1); |
13081 | } |
13082 | if (first_complex) |
13083 | { |
13084 | if (TREE_TYPE (op0) != result_type) |
13085 | op0 = convert_and_check (location, result_type, op0); |
13086 | if (TREE_TYPE (op1) != real_type) |
13087 | op1 = convert_and_check (location, real_type, op1); |
13088 | } |
13089 | else |
13090 | { |
13091 | if (TREE_TYPE (op0) != real_type) |
13092 | op0 = convert_and_check (location, real_type, op0); |
13093 | if (TREE_TYPE (op1) != result_type) |
13094 | op1 = convert_and_check (location, result_type, op1); |
13095 | } |
13096 | if (TREE_CODE (op0) == ERROR_MARK || TREE_CODE (op1) == ERROR_MARK) |
13097 | return error_mark_node; |
13098 | if (first_complex) |
13099 | { |
13100 | op0 = save_expr (op0); |
13101 | real = build_unary_op (EXPR_LOCATION (orig_op0), code: REALPART_EXPR, |
13102 | xarg: op0, noconvert: true); |
13103 | imag = build_unary_op (EXPR_LOCATION (orig_op0), code: IMAGPART_EXPR, |
13104 | xarg: op0, noconvert: true); |
13105 | switch (code) |
13106 | { |
13107 | case MULT_EXPR: |
13108 | case TRUNC_DIV_EXPR: |
13109 | op1 = save_expr (op1); |
13110 | imag = build2 (resultcode, real_type, imag, op1); |
13111 | /* Fall through. */ |
13112 | case PLUS_EXPR: |
13113 | case MINUS_EXPR: |
13114 | real = build2 (resultcode, real_type, real, op1); |
13115 | break; |
13116 | default: |
13117 | gcc_unreachable(); |
13118 | } |
13119 | } |
13120 | else |
13121 | { |
13122 | op1 = save_expr (op1); |
13123 | real = build_unary_op (EXPR_LOCATION (orig_op1), code: REALPART_EXPR, |
13124 | xarg: op1, noconvert: true); |
13125 | imag = build_unary_op (EXPR_LOCATION (orig_op1), code: IMAGPART_EXPR, |
13126 | xarg: op1, noconvert: true); |
13127 | switch (code) |
13128 | { |
13129 | case MULT_EXPR: |
13130 | op0 = save_expr (op0); |
13131 | imag = build2 (resultcode, real_type, op0, imag); |
13132 | /* Fall through. */ |
13133 | case PLUS_EXPR: |
13134 | real = build2 (resultcode, real_type, op0, real); |
13135 | break; |
13136 | case MINUS_EXPR: |
13137 | real = build2 (resultcode, real_type, op0, real); |
13138 | imag = build1 (NEGATE_EXPR, real_type, imag); |
13139 | break; |
13140 | default: |
13141 | gcc_unreachable(); |
13142 | } |
13143 | } |
13144 | ret = build2 (COMPLEX_EXPR, result_type, real, imag); |
13145 | goto return_build_binary_op; |
13146 | } |
13147 | |
13148 | /* For certain operations (which identify themselves by shorten != 0) |
13149 | if both args were extended from the same smaller type, |
13150 | do the arithmetic in that type and then extend. |
13151 | |
13152 | shorten !=0 and !=1 indicates a bitwise operation. |
13153 | For them, this optimization is safe only if |
13154 | both args are zero-extended or both are sign-extended. |
13155 | Otherwise, we might change the result. |
13156 | Eg, (short)-1 | (unsigned short)-1 is (int)-1 |
13157 | but calculated in (unsigned short) it would be (unsigned short)-1. */ |
13158 | |
13159 | if (shorten && none_complex) |
13160 | { |
13161 | final_type = result_type; |
13162 | result_type = shorten_binary_op (result_type, op0, op1, |
13163 | bitwise: shorten == -1); |
13164 | } |
13165 | |
13166 | /* Shifts can be shortened if shifting right. */ |
13167 | |
13168 | if (short_shift) |
13169 | { |
13170 | int unsigned_arg; |
13171 | tree arg0 = get_narrower (op0, &unsigned_arg); |
13172 | |
13173 | final_type = result_type; |
13174 | |
13175 | if (arg0 == op0 && final_type == TREE_TYPE (op0)) |
13176 | unsigned_arg = TYPE_UNSIGNED (TREE_TYPE (op0)); |
13177 | |
13178 | if (TYPE_PRECISION (TREE_TYPE (arg0)) < TYPE_PRECISION (result_type) |
13179 | && tree_int_cst_sgn (op1) > 0 |
13180 | /* We can shorten only if the shift count is less than the |
13181 | number of bits in the smaller type size. */ |
13182 | && compare_tree_int (op1, TYPE_PRECISION (TREE_TYPE (arg0))) < 0 |
13183 | /* We cannot drop an unsigned shift after sign-extension. */ |
13184 | && (!TYPE_UNSIGNED (final_type) || unsigned_arg)) |
13185 | { |
13186 | /* Do an unsigned shift if the operand was zero-extended. */ |
13187 | result_type |
13188 | = c_common_signed_or_unsigned_type (unsigned_arg, |
13189 | TREE_TYPE (arg0)); |
13190 | /* Convert value-to-be-shifted to that type. */ |
13191 | if (TREE_TYPE (op0) != result_type) |
13192 | op0 = convert (result_type, op0); |
13193 | converted = 1; |
13194 | } |
13195 | } |
13196 | |
13197 | /* Comparison operations are shortened too but differently. |
13198 | They identify themselves by setting short_compare = 1. */ |
13199 | |
13200 | if (short_compare) |
13201 | { |
13202 | /* Don't write &op0, etc., because that would prevent op0 |
13203 | from being kept in a register. |
13204 | Instead, make copies of the our local variables and |
13205 | pass the copies by reference, then copy them back afterward. */ |
13206 | tree xop0 = op0, xop1 = op1, xresult_type = result_type; |
13207 | enum tree_code xresultcode = resultcode; |
13208 | tree val |
13209 | = shorten_compare (location, &xop0, &xop1, &xresult_type, |
13210 | &xresultcode); |
13211 | |
13212 | if (val != NULL_TREE) |
13213 | { |
13214 | ret = val; |
13215 | goto return_build_binary_op; |
13216 | } |
13217 | |
13218 | op0 = xop0, op1 = xop1; |
13219 | converted = 1; |
13220 | resultcode = xresultcode; |
13221 | |
13222 | if (c_inhibit_evaluation_warnings == 0 && !c_in_omp_for) |
13223 | { |
13224 | bool op0_maybe_const = true; |
13225 | bool op1_maybe_const = true; |
13226 | tree orig_op0_folded, orig_op1_folded; |
13227 | |
13228 | if (in_late_binary_op) |
13229 | { |
13230 | orig_op0_folded = orig_op0; |
13231 | orig_op1_folded = orig_op1; |
13232 | } |
13233 | else |
13234 | { |
13235 | /* Fold for the sake of possible warnings, as in |
13236 | build_conditional_expr. This requires the |
13237 | "original" values to be folded, not just op0 and |
13238 | op1. */ |
13239 | c_inhibit_evaluation_warnings++; |
13240 | op0 = c_fully_fold (op0, require_constant_value, |
13241 | &op0_maybe_const); |
13242 | op1 = c_fully_fold (op1, require_constant_value, |
13243 | &op1_maybe_const); |
13244 | c_inhibit_evaluation_warnings--; |
13245 | orig_op0_folded = c_fully_fold (orig_op0, |
13246 | require_constant_value, |
13247 | NULL); |
13248 | orig_op1_folded = c_fully_fold (orig_op1, |
13249 | require_constant_value, |
13250 | NULL); |
13251 | } |
13252 | |
13253 | if (warn_sign_compare) |
13254 | warn_for_sign_compare (location, orig_op0: orig_op0_folded, |
13255 | orig_op1: orig_op1_folded, op0, op1, |
13256 | result_type, resultcode); |
13257 | if (!in_late_binary_op && !int_operands) |
13258 | { |
13259 | if (!op0_maybe_const || TREE_CODE (op0) != INTEGER_CST) |
13260 | op0 = c_wrap_maybe_const (op0, !op0_maybe_const); |
13261 | if (!op1_maybe_const || TREE_CODE (op1) != INTEGER_CST) |
13262 | op1 = c_wrap_maybe_const (op1, !op1_maybe_const); |
13263 | } |
13264 | } |
13265 | } |
13266 | } |
13267 | |
13268 | /* At this point, RESULT_TYPE must be nonzero to avoid an error message. |
13269 | If CONVERTED is zero, both args will be converted to type RESULT_TYPE. |
13270 | Then the expression will be built. |
13271 | It will be given type FINAL_TYPE if that is nonzero; |
13272 | otherwise, it will be given type RESULT_TYPE. */ |
13273 | |
13274 | if (!result_type) |
13275 | { |
13276 | /* Favor showing any expression locations that are available. */ |
13277 | op_location_t oploc (location, UNKNOWN_LOCATION); |
13278 | binary_op_rich_location richloc (oploc, orig_op0, orig_op1, true); |
13279 | binary_op_error (&richloc, code, TREE_TYPE (op0), TREE_TYPE (op1)); |
13280 | return error_mark_node; |
13281 | } |
13282 | |
13283 | if (build_type == NULL_TREE) |
13284 | { |
13285 | build_type = result_type; |
13286 | if ((type0 != orig_type0 || type1 != orig_type1) |
13287 | && !boolean_op) |
13288 | { |
13289 | gcc_assert (may_need_excess_precision && common); |
13290 | semantic_result_type = c_common_type (t1: orig_type0, t2: orig_type1); |
13291 | } |
13292 | } |
13293 | |
13294 | if (!converted) |
13295 | { |
13296 | op0 = ep_convert_and_check (loc: location, type: result_type, expr: op0, |
13297 | semantic_type: semantic_result_type); |
13298 | op1 = ep_convert_and_check (loc: location, type: result_type, expr: op1, |
13299 | semantic_type: semantic_result_type); |
13300 | |
13301 | /* This can happen if one operand has a vector type, and the other |
13302 | has a different type. */ |
13303 | if (TREE_CODE (op0) == ERROR_MARK || TREE_CODE (op1) == ERROR_MARK) |
13304 | return error_mark_node; |
13305 | } |
13306 | |
13307 | if (sanitize_flags_p (flag: (SANITIZE_SHIFT |
13308 | | SANITIZE_DIVIDE |
13309 | | SANITIZE_FLOAT_DIVIDE |
13310 | | SANITIZE_SI_OVERFLOW)) |
13311 | && current_function_decl != NULL_TREE |
13312 | && (doing_div_or_mod || doing_shift) |
13313 | && !require_constant_value) |
13314 | { |
13315 | /* OP0 and/or OP1 might have side-effects. */ |
13316 | op0 = save_expr (op0); |
13317 | op1 = save_expr (op1); |
13318 | op0 = c_fully_fold (op0, false, NULL); |
13319 | op1 = c_fully_fold (op1, false, NULL); |
13320 | if (doing_div_or_mod && (sanitize_flags_p (flag: (SANITIZE_DIVIDE |
13321 | | SANITIZE_FLOAT_DIVIDE |
13322 | | SANITIZE_SI_OVERFLOW)))) |
13323 | instrument_expr = ubsan_instrument_division (location, op0, op1); |
13324 | else if (doing_shift && sanitize_flags_p (flag: SANITIZE_SHIFT)) |
13325 | instrument_expr = ubsan_instrument_shift (location, code, op0, op1); |
13326 | } |
13327 | |
13328 | /* Treat expressions in initializers specially as they can't trap. */ |
13329 | if (int_const_or_overflow) |
13330 | ret = (require_constant_value |
13331 | ? fold_build2_initializer_loc (location, resultcode, build_type, |
13332 | op0, op1) |
13333 | : fold_build2_loc (location, resultcode, build_type, op0, op1)); |
13334 | else |
13335 | ret = build2 (resultcode, build_type, op0, op1); |
13336 | if (final_type != NULL_TREE) |
13337 | ret = convert (final_type, ret); |
13338 | |
13339 | return_build_binary_op: |
13340 | gcc_assert (ret != error_mark_node); |
13341 | if (TREE_CODE (ret) == INTEGER_CST && !TREE_OVERFLOW (ret) && !int_const) |
13342 | ret = (int_operands |
13343 | ? note_integer_operands (expr: ret) |
13344 | : build1 (NOP_EXPR, TREE_TYPE (ret), ret)); |
13345 | else if (TREE_CODE (ret) != INTEGER_CST && int_operands |
13346 | && !in_late_binary_op) |
13347 | ret = note_integer_operands (expr: ret); |
13348 | protected_set_expr_location (ret, location); |
13349 | |
13350 | if (instrument_expr != NULL) |
13351 | ret = fold_build2 (COMPOUND_EXPR, TREE_TYPE (ret), |
13352 | instrument_expr, ret); |
13353 | |
13354 | if (semantic_result_type) |
13355 | ret = build1_loc (loc: location, code: EXCESS_PRECISION_EXPR, |
13356 | type: semantic_result_type, arg1: ret); |
13357 | |
13358 | return ret; |
13359 | } |
13360 | |
13361 | |
13362 | /* Convert EXPR to be a truth-value, validating its type for this |
13363 | purpose. LOCATION is the source location for the expression. */ |
13364 | |
13365 | tree |
13366 | c_objc_common_truthvalue_conversion (location_t location, tree expr) |
13367 | { |
13368 | bool int_const, int_operands; |
13369 | |
13370 | switch (TREE_CODE (TREE_TYPE (expr))) |
13371 | { |
13372 | case ARRAY_TYPE: |
13373 | error_at (location, "used array that cannot be converted to pointer where scalar is required" ); |
13374 | return error_mark_node; |
13375 | |
13376 | case RECORD_TYPE: |
13377 | error_at (location, "used struct type value where scalar is required" ); |
13378 | return error_mark_node; |
13379 | |
13380 | case UNION_TYPE: |
13381 | error_at (location, "used union type value where scalar is required" ); |
13382 | return error_mark_node; |
13383 | |
13384 | case VOID_TYPE: |
13385 | error_at (location, "void value not ignored as it ought to be" ); |
13386 | return error_mark_node; |
13387 | |
13388 | case POINTER_TYPE: |
13389 | if (reject_gcc_builtin (expr)) |
13390 | return error_mark_node; |
13391 | break; |
13392 | |
13393 | case FUNCTION_TYPE: |
13394 | gcc_unreachable (); |
13395 | |
13396 | case VECTOR_TYPE: |
13397 | error_at (location, "used vector type where scalar is required" ); |
13398 | return error_mark_node; |
13399 | |
13400 | default: |
13401 | break; |
13402 | } |
13403 | |
13404 | int_const = (TREE_CODE (expr) == INTEGER_CST && !TREE_OVERFLOW (expr)); |
13405 | int_operands = EXPR_INT_CONST_OPERANDS (expr); |
13406 | if (int_operands && TREE_CODE (expr) != INTEGER_CST) |
13407 | { |
13408 | expr = remove_c_maybe_const_expr (expr); |
13409 | expr = build2 (NE_EXPR, integer_type_node, expr, |
13410 | convert (TREE_TYPE (expr), integer_zero_node)); |
13411 | expr = note_integer_operands (expr); |
13412 | } |
13413 | else |
13414 | /* ??? Should we also give an error for vectors rather than leaving |
13415 | those to give errors later? */ |
13416 | expr = c_common_truthvalue_conversion (location, expr); |
13417 | |
13418 | if (TREE_CODE (expr) == INTEGER_CST && int_operands && !int_const) |
13419 | { |
13420 | if (TREE_OVERFLOW (expr)) |
13421 | return expr; |
13422 | else |
13423 | return note_integer_operands (expr); |
13424 | } |
13425 | if (TREE_CODE (expr) == INTEGER_CST && !int_const) |
13426 | return build1 (NOP_EXPR, TREE_TYPE (expr), expr); |
13427 | return expr; |
13428 | } |
13429 | |
13430 | |
13431 | /* Convert EXPR to a contained DECL, updating *TC, *TI and *SE as |
13432 | required. */ |
13433 | |
13434 | tree |
13435 | c_expr_to_decl (tree expr, bool *tc ATTRIBUTE_UNUSED, bool *se) |
13436 | { |
13437 | if (TREE_CODE (expr) == COMPOUND_LITERAL_EXPR) |
13438 | { |
13439 | tree decl = COMPOUND_LITERAL_EXPR_DECL (expr); |
13440 | /* Executing a compound literal inside a function reinitializes |
13441 | it. */ |
13442 | if (!TREE_STATIC (decl)) |
13443 | *se = true; |
13444 | return decl; |
13445 | } |
13446 | else |
13447 | return expr; |
13448 | } |
13449 | |
13450 | /* Generate OMP construct CODE, with BODY and CLAUSES as its compound |
13451 | statement. LOC is the location of the construct. */ |
13452 | |
13453 | tree |
13454 | c_finish_omp_construct (location_t loc, enum tree_code code, tree body, |
13455 | tree clauses) |
13456 | { |
13457 | body = c_end_compound_stmt (loc, stmt: body, do_scope: true); |
13458 | |
13459 | tree stmt = make_node (code); |
13460 | TREE_TYPE (stmt) = void_type_node; |
13461 | OMP_BODY (stmt) = body; |
13462 | OMP_CLAUSES (stmt) = clauses; |
13463 | SET_EXPR_LOCATION (stmt, loc); |
13464 | |
13465 | return add_stmt (stmt); |
13466 | } |
13467 | |
13468 | /* Generate OACC_DATA, with CLAUSES and BLOCK as its compound |
13469 | statement. LOC is the location of the OACC_DATA. */ |
13470 | |
13471 | tree |
13472 | c_finish_oacc_data (location_t loc, tree clauses, tree block) |
13473 | { |
13474 | tree stmt; |
13475 | |
13476 | block = c_end_compound_stmt (loc, stmt: block, do_scope: true); |
13477 | |
13478 | stmt = make_node (OACC_DATA); |
13479 | TREE_TYPE (stmt) = void_type_node; |
13480 | OACC_DATA_CLAUSES (stmt) = clauses; |
13481 | OACC_DATA_BODY (stmt) = block; |
13482 | SET_EXPR_LOCATION (stmt, loc); |
13483 | |
13484 | return add_stmt (stmt); |
13485 | } |
13486 | |
13487 | /* Generate OACC_HOST_DATA, with CLAUSES and BLOCK as its compound |
13488 | statement. LOC is the location of the OACC_HOST_DATA. */ |
13489 | |
13490 | tree |
13491 | c_finish_oacc_host_data (location_t loc, tree clauses, tree block) |
13492 | { |
13493 | tree stmt; |
13494 | |
13495 | block = c_end_compound_stmt (loc, stmt: block, do_scope: true); |
13496 | |
13497 | stmt = make_node (OACC_HOST_DATA); |
13498 | TREE_TYPE (stmt) = void_type_node; |
13499 | OACC_HOST_DATA_CLAUSES (stmt) = clauses; |
13500 | OACC_HOST_DATA_BODY (stmt) = block; |
13501 | SET_EXPR_LOCATION (stmt, loc); |
13502 | |
13503 | return add_stmt (stmt); |
13504 | } |
13505 | |
13506 | /* Like c_begin_compound_stmt, except force the retention of the BLOCK. */ |
13507 | |
13508 | tree |
13509 | c_begin_omp_parallel (void) |
13510 | { |
13511 | tree block; |
13512 | |
13513 | keep_next_level (); |
13514 | block = c_begin_compound_stmt (do_scope: true); |
13515 | |
13516 | return block; |
13517 | } |
13518 | |
13519 | /* Generate OMP_PARALLEL, with CLAUSES and BLOCK as its compound |
13520 | statement. LOC is the location of the OMP_PARALLEL. */ |
13521 | |
13522 | tree |
13523 | c_finish_omp_parallel (location_t loc, tree clauses, tree block) |
13524 | { |
13525 | tree stmt; |
13526 | |
13527 | block = c_end_compound_stmt (loc, stmt: block, do_scope: true); |
13528 | |
13529 | stmt = make_node (OMP_PARALLEL); |
13530 | TREE_TYPE (stmt) = void_type_node; |
13531 | OMP_PARALLEL_CLAUSES (stmt) = clauses; |
13532 | OMP_PARALLEL_BODY (stmt) = block; |
13533 | SET_EXPR_LOCATION (stmt, loc); |
13534 | |
13535 | return add_stmt (stmt); |
13536 | } |
13537 | |
13538 | /* Like c_begin_compound_stmt, except force the retention of the BLOCK. */ |
13539 | |
13540 | tree |
13541 | c_begin_omp_task (void) |
13542 | { |
13543 | tree block; |
13544 | |
13545 | keep_next_level (); |
13546 | block = c_begin_compound_stmt (do_scope: true); |
13547 | |
13548 | return block; |
13549 | } |
13550 | |
13551 | /* Generate OMP_TASK, with CLAUSES and BLOCK as its compound |
13552 | statement. LOC is the location of the #pragma. */ |
13553 | |
13554 | tree |
13555 | c_finish_omp_task (location_t loc, tree clauses, tree block) |
13556 | { |
13557 | tree stmt; |
13558 | |
13559 | block = c_end_compound_stmt (loc, stmt: block, do_scope: true); |
13560 | |
13561 | stmt = make_node (OMP_TASK); |
13562 | TREE_TYPE (stmt) = void_type_node; |
13563 | OMP_TASK_CLAUSES (stmt) = clauses; |
13564 | OMP_TASK_BODY (stmt) = block; |
13565 | SET_EXPR_LOCATION (stmt, loc); |
13566 | |
13567 | return add_stmt (stmt); |
13568 | } |
13569 | |
13570 | /* Generate GOMP_cancel call for #pragma omp cancel. */ |
13571 | |
13572 | void |
13573 | c_finish_omp_cancel (location_t loc, tree clauses) |
13574 | { |
13575 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CANCEL); |
13576 | int mask = 0; |
13577 | if (omp_find_clause (clauses, kind: OMP_CLAUSE_PARALLEL)) |
13578 | mask = 1; |
13579 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_FOR)) |
13580 | mask = 2; |
13581 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_SECTIONS)) |
13582 | mask = 4; |
13583 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_TASKGROUP)) |
13584 | mask = 8; |
13585 | else |
13586 | { |
13587 | error_at (loc, "%<#pragma omp cancel%> must specify one of " |
13588 | "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> " |
13589 | "clauses" ); |
13590 | return; |
13591 | } |
13592 | tree ifc = omp_find_clause (clauses, kind: OMP_CLAUSE_IF); |
13593 | if (ifc != NULL_TREE) |
13594 | { |
13595 | if (OMP_CLAUSE_IF_MODIFIER (ifc) != ERROR_MARK |
13596 | && OMP_CLAUSE_IF_MODIFIER (ifc) != VOID_CST) |
13597 | error_at (OMP_CLAUSE_LOCATION (ifc), |
13598 | "expected %<cancel%> %<if%> clause modifier" ); |
13599 | else |
13600 | { |
13601 | tree ifc2 = omp_find_clause (OMP_CLAUSE_CHAIN (ifc), kind: OMP_CLAUSE_IF); |
13602 | if (ifc2 != NULL_TREE) |
13603 | { |
13604 | gcc_assert (OMP_CLAUSE_IF_MODIFIER (ifc) == VOID_CST |
13605 | && OMP_CLAUSE_IF_MODIFIER (ifc2) != ERROR_MARK |
13606 | && OMP_CLAUSE_IF_MODIFIER (ifc2) != VOID_CST); |
13607 | error_at (OMP_CLAUSE_LOCATION (ifc2), |
13608 | "expected %<cancel%> %<if%> clause modifier" ); |
13609 | } |
13610 | } |
13611 | |
13612 | tree type = TREE_TYPE (OMP_CLAUSE_IF_EXPR (ifc)); |
13613 | ifc = fold_build2_loc (OMP_CLAUSE_LOCATION (ifc), NE_EXPR, |
13614 | boolean_type_node, OMP_CLAUSE_IF_EXPR (ifc), |
13615 | build_zero_cst (type)); |
13616 | } |
13617 | else |
13618 | ifc = boolean_true_node; |
13619 | tree stmt = build_call_expr_loc (loc, fn, 2, |
13620 | build_int_cst (integer_type_node, mask), |
13621 | ifc); |
13622 | add_stmt (stmt); |
13623 | } |
13624 | |
13625 | /* Generate GOMP_cancellation_point call for |
13626 | #pragma omp cancellation point. */ |
13627 | |
13628 | void |
13629 | c_finish_omp_cancellation_point (location_t loc, tree clauses) |
13630 | { |
13631 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CANCELLATION_POINT); |
13632 | int mask = 0; |
13633 | if (omp_find_clause (clauses, kind: OMP_CLAUSE_PARALLEL)) |
13634 | mask = 1; |
13635 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_FOR)) |
13636 | mask = 2; |
13637 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_SECTIONS)) |
13638 | mask = 4; |
13639 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_TASKGROUP)) |
13640 | mask = 8; |
13641 | else |
13642 | { |
13643 | error_at (loc, "%<#pragma omp cancellation point%> must specify one of " |
13644 | "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> " |
13645 | "clauses" ); |
13646 | return; |
13647 | } |
13648 | tree stmt = build_call_expr_loc (loc, fn, 1, |
13649 | build_int_cst (integer_type_node, mask)); |
13650 | add_stmt (stmt); |
13651 | } |
13652 | |
13653 | /* Helper function for handle_omp_array_sections. Called recursively |
13654 | to handle multiple array-section-subscripts. C is the clause, |
13655 | T current expression (initially OMP_CLAUSE_DECL), which is either |
13656 | a TREE_LIST for array-section-subscript (TREE_PURPOSE is low-bound |
13657 | expression if specified, TREE_VALUE length expression if specified, |
13658 | TREE_CHAIN is what it has been specified after, or some decl. |
13659 | TYPES vector is populated with array section types, MAYBE_ZERO_LEN |
13660 | set to true if any of the array-section-subscript could have length |
13661 | of zero (explicit or implicit), FIRST_NON_ONE is the index of the |
13662 | first array-section-subscript which is known not to have length |
13663 | of one. Given say: |
13664 | map(a[:b][2:1][:c][:2][:d][e:f][2:5]) |
13665 | FIRST_NON_ONE will be 3, array-section-subscript [:b], [2:1] and [:c] |
13666 | all are or may have length of 1, array-section-subscript [:2] is the |
13667 | first one known not to have length 1. For array-section-subscript |
13668 | <= FIRST_NON_ONE we diagnose non-contiguous arrays if low bound isn't |
13669 | 0 or length isn't the array domain max + 1, for > FIRST_NON_ONE we |
13670 | can if MAYBE_ZERO_LEN is false. MAYBE_ZERO_LEN will be true in the above |
13671 | case though, as some lengths could be zero. */ |
13672 | |
13673 | static tree |
13674 | handle_omp_array_sections_1 (tree c, tree t, vec<tree> &types, |
13675 | bool &maybe_zero_len, unsigned int &first_non_one, |
13676 | enum c_omp_region_type ort) |
13677 | { |
13678 | tree ret, low_bound, length, type; |
13679 | if (TREE_CODE (t) != TREE_LIST) |
13680 | { |
13681 | if (error_operand_p (t)) |
13682 | return error_mark_node; |
13683 | ret = t; |
13684 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
13685 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
13686 | && TYPE_ATOMIC (strip_array_types (TREE_TYPE (t)))) |
13687 | { |
13688 | error_at (OMP_CLAUSE_LOCATION (c), "%<_Atomic%> %qE in %qs clause" , |
13689 | t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13690 | return error_mark_node; |
13691 | } |
13692 | while (INDIRECT_REF_P (t)) |
13693 | { |
13694 | t = TREE_OPERAND (t, 0); |
13695 | STRIP_NOPS (t); |
13696 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
13697 | t = TREE_OPERAND (t, 0); |
13698 | } |
13699 | while (TREE_CODE (t) == COMPOUND_EXPR) |
13700 | { |
13701 | t = TREE_OPERAND (t, 1); |
13702 | STRIP_NOPS (t); |
13703 | } |
13704 | if (TREE_CODE (t) == COMPONENT_REF |
13705 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
13706 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO |
13707 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM)) |
13708 | { |
13709 | if (DECL_BIT_FIELD (TREE_OPERAND (t, 1))) |
13710 | { |
13711 | error_at (OMP_CLAUSE_LOCATION (c), |
13712 | "bit-field %qE in %qs clause" , |
13713 | t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13714 | return error_mark_node; |
13715 | } |
13716 | while (TREE_CODE (t) == COMPONENT_REF) |
13717 | { |
13718 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == UNION_TYPE) |
13719 | { |
13720 | error_at (OMP_CLAUSE_LOCATION (c), |
13721 | "%qE is a member of a union" , t); |
13722 | return error_mark_node; |
13723 | } |
13724 | t = TREE_OPERAND (t, 0); |
13725 | while (TREE_CODE (t) == MEM_REF |
13726 | || INDIRECT_REF_P (t) |
13727 | || TREE_CODE (t) == ARRAY_REF) |
13728 | { |
13729 | t = TREE_OPERAND (t, 0); |
13730 | STRIP_NOPS (t); |
13731 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
13732 | t = TREE_OPERAND (t, 0); |
13733 | } |
13734 | if (ort == C_ORT_ACC && TREE_CODE (t) == MEM_REF) |
13735 | { |
13736 | if (maybe_ne (a: mem_ref_offset (t), b: 0)) |
13737 | error_at (OMP_CLAUSE_LOCATION (c), |
13738 | "cannot dereference %qE in %qs clause" , t, |
13739 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13740 | else |
13741 | t = TREE_OPERAND (t, 0); |
13742 | } |
13743 | } |
13744 | } |
13745 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
13746 | { |
13747 | if (DECL_P (t)) |
13748 | error_at (OMP_CLAUSE_LOCATION (c), |
13749 | "%qD is not a variable in %qs clause" , t, |
13750 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13751 | else |
13752 | error_at (OMP_CLAUSE_LOCATION (c), |
13753 | "%qE is not a variable in %qs clause" , t, |
13754 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13755 | return error_mark_node; |
13756 | } |
13757 | else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
13758 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
13759 | && TYPE_ATOMIC (TREE_TYPE (t))) |
13760 | { |
13761 | error_at (OMP_CLAUSE_LOCATION (c), "%<_Atomic%> %qD in %qs clause" , |
13762 | t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13763 | return error_mark_node; |
13764 | } |
13765 | else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
13766 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
13767 | && VAR_P (t) |
13768 | && DECL_THREAD_LOCAL_P (t)) |
13769 | { |
13770 | error_at (OMP_CLAUSE_LOCATION (c), |
13771 | "%qD is threadprivate variable in %qs clause" , t, |
13772 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13773 | return error_mark_node; |
13774 | } |
13775 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY |
13776 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND) |
13777 | && TYPE_ATOMIC (TREE_TYPE (t)) |
13778 | && POINTER_TYPE_P (TREE_TYPE (t))) |
13779 | { |
13780 | /* If the array section is pointer based and the pointer |
13781 | itself is _Atomic qualified, we need to atomically load |
13782 | the pointer. */ |
13783 | c_expr expr; |
13784 | memset (s: &expr, c: 0, n: sizeof (expr)); |
13785 | expr.value = ret; |
13786 | expr = convert_lvalue_to_rvalue (OMP_CLAUSE_LOCATION (c), |
13787 | exp: expr, convert_p: false, read_p: false); |
13788 | ret = expr.value; |
13789 | } |
13790 | return ret; |
13791 | } |
13792 | |
13793 | ret = handle_omp_array_sections_1 (c, TREE_CHAIN (t), types, |
13794 | maybe_zero_len, first_non_one, ort); |
13795 | if (ret == error_mark_node || ret == NULL_TREE) |
13796 | return ret; |
13797 | |
13798 | type = TREE_TYPE (ret); |
13799 | low_bound = TREE_PURPOSE (t); |
13800 | length = TREE_VALUE (t); |
13801 | |
13802 | if (low_bound == error_mark_node || length == error_mark_node) |
13803 | return error_mark_node; |
13804 | |
13805 | if (low_bound && !INTEGRAL_TYPE_P (TREE_TYPE (low_bound))) |
13806 | { |
13807 | error_at (OMP_CLAUSE_LOCATION (c), |
13808 | "low bound %qE of array section does not have integral type" , |
13809 | low_bound); |
13810 | return error_mark_node; |
13811 | } |
13812 | if (length && !INTEGRAL_TYPE_P (TREE_TYPE (length))) |
13813 | { |
13814 | error_at (OMP_CLAUSE_LOCATION (c), |
13815 | "length %qE of array section does not have integral type" , |
13816 | length); |
13817 | return error_mark_node; |
13818 | } |
13819 | if (low_bound |
13820 | && TREE_CODE (low_bound) == INTEGER_CST |
13821 | && TYPE_PRECISION (TREE_TYPE (low_bound)) |
13822 | > TYPE_PRECISION (sizetype)) |
13823 | low_bound = fold_convert (sizetype, low_bound); |
13824 | if (length |
13825 | && TREE_CODE (length) == INTEGER_CST |
13826 | && TYPE_PRECISION (TREE_TYPE (length)) |
13827 | > TYPE_PRECISION (sizetype)) |
13828 | length = fold_convert (sizetype, length); |
13829 | if (low_bound == NULL_TREE) |
13830 | low_bound = integer_zero_node; |
13831 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
13832 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
13833 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)) |
13834 | { |
13835 | if (length != integer_one_node) |
13836 | { |
13837 | error_at (OMP_CLAUSE_LOCATION (c), |
13838 | "expected single pointer in %qs clause" , |
13839 | user_omp_clause_code_name (c, ort == C_ORT_ACC)); |
13840 | return error_mark_node; |
13841 | } |
13842 | } |
13843 | if (length != NULL_TREE) |
13844 | { |
13845 | if (!integer_nonzerop (length)) |
13846 | { |
13847 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY |
13848 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
13849 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
13850 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
13851 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
13852 | { |
13853 | if (integer_zerop (length)) |
13854 | { |
13855 | error_at (OMP_CLAUSE_LOCATION (c), |
13856 | "zero length array section in %qs clause" , |
13857 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13858 | return error_mark_node; |
13859 | } |
13860 | } |
13861 | else |
13862 | maybe_zero_len = true; |
13863 | } |
13864 | if (first_non_one == types.length () |
13865 | && (TREE_CODE (length) != INTEGER_CST || integer_onep (length))) |
13866 | first_non_one++; |
13867 | } |
13868 | if (TREE_CODE (type) == ARRAY_TYPE) |
13869 | { |
13870 | if (length == NULL_TREE |
13871 | && (TYPE_DOMAIN (type) == NULL_TREE |
13872 | || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)) |
13873 | { |
13874 | error_at (OMP_CLAUSE_LOCATION (c), |
13875 | "for unknown bound array type length expression must " |
13876 | "be specified" ); |
13877 | return error_mark_node; |
13878 | } |
13879 | if (TREE_CODE (low_bound) == INTEGER_CST |
13880 | && tree_int_cst_sgn (low_bound) == -1) |
13881 | { |
13882 | error_at (OMP_CLAUSE_LOCATION (c), |
13883 | "negative low bound in array section in %qs clause" , |
13884 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13885 | return error_mark_node; |
13886 | } |
13887 | if (length != NULL_TREE |
13888 | && TREE_CODE (length) == INTEGER_CST |
13889 | && tree_int_cst_sgn (length) == -1) |
13890 | { |
13891 | error_at (OMP_CLAUSE_LOCATION (c), |
13892 | "negative length in array section in %qs clause" , |
13893 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13894 | return error_mark_node; |
13895 | } |
13896 | if (TYPE_DOMAIN (type) |
13897 | && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) |
13898 | && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) |
13899 | == INTEGER_CST) |
13900 | { |
13901 | tree size |
13902 | = fold_convert (sizetype, TYPE_MAX_VALUE (TYPE_DOMAIN (type))); |
13903 | size = size_binop (PLUS_EXPR, size, size_one_node); |
13904 | if (TREE_CODE (low_bound) == INTEGER_CST) |
13905 | { |
13906 | if (tree_int_cst_lt (t1: size, t2: low_bound)) |
13907 | { |
13908 | error_at (OMP_CLAUSE_LOCATION (c), |
13909 | "low bound %qE above array section size " |
13910 | "in %qs clause" , low_bound, |
13911 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13912 | return error_mark_node; |
13913 | } |
13914 | if (tree_int_cst_equal (size, low_bound)) |
13915 | { |
13916 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY |
13917 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
13918 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
13919 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
13920 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
13921 | { |
13922 | error_at (OMP_CLAUSE_LOCATION (c), |
13923 | "zero length array section in %qs clause" , |
13924 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13925 | return error_mark_node; |
13926 | } |
13927 | maybe_zero_len = true; |
13928 | } |
13929 | else if (length == NULL_TREE |
13930 | && first_non_one == types.length () |
13931 | && tree_int_cst_equal |
13932 | (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), |
13933 | low_bound)) |
13934 | first_non_one++; |
13935 | } |
13936 | else if (length == NULL_TREE) |
13937 | { |
13938 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
13939 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
13940 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
13941 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION |
13942 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION) |
13943 | maybe_zero_len = true; |
13944 | if (first_non_one == types.length ()) |
13945 | first_non_one++; |
13946 | } |
13947 | if (length && TREE_CODE (length) == INTEGER_CST) |
13948 | { |
13949 | if (tree_int_cst_lt (t1: size, t2: length)) |
13950 | { |
13951 | error_at (OMP_CLAUSE_LOCATION (c), |
13952 | "length %qE above array section size " |
13953 | "in %qs clause" , length, |
13954 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13955 | return error_mark_node; |
13956 | } |
13957 | if (TREE_CODE (low_bound) == INTEGER_CST) |
13958 | { |
13959 | tree lbpluslen |
13960 | = size_binop (PLUS_EXPR, |
13961 | fold_convert (sizetype, low_bound), |
13962 | fold_convert (sizetype, length)); |
13963 | if (TREE_CODE (lbpluslen) == INTEGER_CST |
13964 | && tree_int_cst_lt (t1: size, t2: lbpluslen)) |
13965 | { |
13966 | error_at (OMP_CLAUSE_LOCATION (c), |
13967 | "high bound %qE above array section size " |
13968 | "in %qs clause" , lbpluslen, |
13969 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
13970 | return error_mark_node; |
13971 | } |
13972 | } |
13973 | } |
13974 | } |
13975 | else if (length == NULL_TREE) |
13976 | { |
13977 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
13978 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
13979 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
13980 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION |
13981 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION) |
13982 | maybe_zero_len = true; |
13983 | if (first_non_one == types.length ()) |
13984 | first_non_one++; |
13985 | } |
13986 | |
13987 | /* For [lb:] we will need to evaluate lb more than once. */ |
13988 | if (length == NULL_TREE && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) |
13989 | { |
13990 | tree lb = save_expr (low_bound); |
13991 | if (lb != low_bound) |
13992 | { |
13993 | TREE_PURPOSE (t) = lb; |
13994 | low_bound = lb; |
13995 | } |
13996 | } |
13997 | } |
13998 | else if (TREE_CODE (type) == POINTER_TYPE) |
13999 | { |
14000 | if (length == NULL_TREE) |
14001 | { |
14002 | if (TREE_CODE (ret) == PARM_DECL && C_ARRAY_PARAMETER (ret)) |
14003 | error_at (OMP_CLAUSE_LOCATION (c), |
14004 | "for array function parameter length expression " |
14005 | "must be specified" ); |
14006 | else |
14007 | error_at (OMP_CLAUSE_LOCATION (c), |
14008 | "for pointer type length expression must be specified" ); |
14009 | return error_mark_node; |
14010 | } |
14011 | if (length != NULL_TREE |
14012 | && TREE_CODE (length) == INTEGER_CST |
14013 | && tree_int_cst_sgn (length) == -1) |
14014 | { |
14015 | error_at (OMP_CLAUSE_LOCATION (c), |
14016 | "negative length in array section in %qs clause" , |
14017 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14018 | return error_mark_node; |
14019 | } |
14020 | /* If there is a pointer type anywhere but in the very first |
14021 | array-section-subscript, the array section could be non-contiguous. */ |
14022 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
14023 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
14024 | && TREE_CODE (TREE_CHAIN (t)) == TREE_LIST) |
14025 | { |
14026 | /* If any prior dimension has a non-one length, then deem this |
14027 | array section as non-contiguous. */ |
14028 | for (tree d = TREE_CHAIN (t); TREE_CODE (d) == TREE_LIST; |
14029 | d = TREE_CHAIN (d)) |
14030 | { |
14031 | tree d_length = TREE_VALUE (d); |
14032 | if (d_length == NULL_TREE || !integer_onep (d_length)) |
14033 | { |
14034 | error_at (OMP_CLAUSE_LOCATION (c), |
14035 | "array section is not contiguous in %qs clause" , |
14036 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14037 | return error_mark_node; |
14038 | } |
14039 | } |
14040 | } |
14041 | } |
14042 | else |
14043 | { |
14044 | error_at (OMP_CLAUSE_LOCATION (c), |
14045 | "%qE does not have pointer or array type" , ret); |
14046 | return error_mark_node; |
14047 | } |
14048 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) |
14049 | types.safe_push (TREE_TYPE (ret)); |
14050 | /* We will need to evaluate lb more than once. */ |
14051 | tree lb = save_expr (low_bound); |
14052 | if (lb != low_bound) |
14053 | { |
14054 | TREE_PURPOSE (t) = lb; |
14055 | low_bound = lb; |
14056 | } |
14057 | ret = build_array_ref (OMP_CLAUSE_LOCATION (c), array: ret, index: low_bound); |
14058 | return ret; |
14059 | } |
14060 | |
14061 | /* Handle array sections for clause C. */ |
14062 | |
14063 | static bool |
14064 | handle_omp_array_sections (tree c, enum c_omp_region_type ort) |
14065 | { |
14066 | bool maybe_zero_len = false; |
14067 | unsigned int first_non_one = 0; |
14068 | auto_vec<tree, 10> types; |
14069 | tree *tp = &OMP_CLAUSE_DECL (c); |
14070 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
14071 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY) |
14072 | && TREE_CODE (*tp) == TREE_LIST |
14073 | && TREE_PURPOSE (*tp) |
14074 | && TREE_CODE (TREE_PURPOSE (*tp)) == TREE_VEC) |
14075 | tp = &TREE_VALUE (*tp); |
14076 | tree first = handle_omp_array_sections_1 (c, t: *tp, types, |
14077 | maybe_zero_len, first_non_one, |
14078 | ort); |
14079 | if (first == error_mark_node) |
14080 | return true; |
14081 | if (first == NULL_TREE) |
14082 | return false; |
14083 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
14084 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY) |
14085 | { |
14086 | tree t = *tp; |
14087 | tree tem = NULL_TREE; |
14088 | /* Need to evaluate side effects in the length expressions |
14089 | if any. */ |
14090 | while (TREE_CODE (t) == TREE_LIST) |
14091 | { |
14092 | if (TREE_VALUE (t) && TREE_SIDE_EFFECTS (TREE_VALUE (t))) |
14093 | { |
14094 | if (tem == NULL_TREE) |
14095 | tem = TREE_VALUE (t); |
14096 | else |
14097 | tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), |
14098 | TREE_VALUE (t), tem); |
14099 | } |
14100 | t = TREE_CHAIN (t); |
14101 | } |
14102 | if (tem) |
14103 | first = build2 (COMPOUND_EXPR, TREE_TYPE (first), tem, first); |
14104 | first = c_fully_fold (first, false, NULL, true); |
14105 | *tp = first; |
14106 | } |
14107 | else |
14108 | { |
14109 | unsigned int num = types.length (), i; |
14110 | tree t, side_effects = NULL_TREE, size = NULL_TREE; |
14111 | tree condition = NULL_TREE; |
14112 | |
14113 | if (int_size_in_bytes (TREE_TYPE (first)) <= 0) |
14114 | maybe_zero_len = true; |
14115 | |
14116 | for (i = num, t = OMP_CLAUSE_DECL (c); i > 0; |
14117 | t = TREE_CHAIN (t)) |
14118 | { |
14119 | tree low_bound = TREE_PURPOSE (t); |
14120 | tree length = TREE_VALUE (t); |
14121 | |
14122 | i--; |
14123 | if (low_bound |
14124 | && TREE_CODE (low_bound) == INTEGER_CST |
14125 | && TYPE_PRECISION (TREE_TYPE (low_bound)) |
14126 | > TYPE_PRECISION (sizetype)) |
14127 | low_bound = fold_convert (sizetype, low_bound); |
14128 | if (length |
14129 | && TREE_CODE (length) == INTEGER_CST |
14130 | && TYPE_PRECISION (TREE_TYPE (length)) |
14131 | > TYPE_PRECISION (sizetype)) |
14132 | length = fold_convert (sizetype, length); |
14133 | if (low_bound == NULL_TREE) |
14134 | low_bound = integer_zero_node; |
14135 | if (!maybe_zero_len && i > first_non_one) |
14136 | { |
14137 | if (integer_nonzerop (low_bound)) |
14138 | goto do_warn_noncontiguous; |
14139 | if (length != NULL_TREE |
14140 | && TREE_CODE (length) == INTEGER_CST |
14141 | && TYPE_DOMAIN (types[i]) |
14142 | && TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])) |
14143 | && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (types[i]))) |
14144 | == INTEGER_CST) |
14145 | { |
14146 | tree size; |
14147 | size = size_binop (PLUS_EXPR, |
14148 | TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])), |
14149 | size_one_node); |
14150 | if (!tree_int_cst_equal (length, size)) |
14151 | { |
14152 | do_warn_noncontiguous: |
14153 | error_at (OMP_CLAUSE_LOCATION (c), |
14154 | "array section is not contiguous in %qs " |
14155 | "clause" , |
14156 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14157 | return true; |
14158 | } |
14159 | } |
14160 | if (length != NULL_TREE |
14161 | && TREE_SIDE_EFFECTS (length)) |
14162 | { |
14163 | if (side_effects == NULL_TREE) |
14164 | side_effects = length; |
14165 | else |
14166 | side_effects = build2 (COMPOUND_EXPR, |
14167 | TREE_TYPE (side_effects), |
14168 | length, side_effects); |
14169 | } |
14170 | } |
14171 | else |
14172 | { |
14173 | tree l; |
14174 | |
14175 | if (i > first_non_one |
14176 | && ((length && integer_nonzerop (length)) |
14177 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
14178 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
14179 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)) |
14180 | continue; |
14181 | if (length) |
14182 | l = fold_convert (sizetype, length); |
14183 | else |
14184 | { |
14185 | l = size_binop (PLUS_EXPR, |
14186 | TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])), |
14187 | size_one_node); |
14188 | l = size_binop (MINUS_EXPR, l, |
14189 | fold_convert (sizetype, low_bound)); |
14190 | } |
14191 | if (i > first_non_one) |
14192 | { |
14193 | l = fold_build2 (NE_EXPR, boolean_type_node, l, |
14194 | size_zero_node); |
14195 | if (condition == NULL_TREE) |
14196 | condition = l; |
14197 | else |
14198 | condition = fold_build2 (BIT_AND_EXPR, boolean_type_node, |
14199 | l, condition); |
14200 | } |
14201 | else if (size == NULL_TREE) |
14202 | { |
14203 | size = size_in_bytes (TREE_TYPE (types[i])); |
14204 | tree eltype = TREE_TYPE (types[num - 1]); |
14205 | while (TREE_CODE (eltype) == ARRAY_TYPE) |
14206 | eltype = TREE_TYPE (eltype); |
14207 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
14208 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
14209 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
14210 | { |
14211 | if (integer_zerop (size) |
14212 | || integer_zerop (size_in_bytes (t: eltype))) |
14213 | { |
14214 | error_at (OMP_CLAUSE_LOCATION (c), |
14215 | "zero length array section in %qs clause" , |
14216 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14217 | return error_mark_node; |
14218 | } |
14219 | size = size_binop (EXACT_DIV_EXPR, size, |
14220 | size_in_bytes (eltype)); |
14221 | } |
14222 | size = size_binop (MULT_EXPR, size, l); |
14223 | if (condition) |
14224 | size = fold_build3 (COND_EXPR, sizetype, condition, |
14225 | size, size_zero_node); |
14226 | } |
14227 | else |
14228 | size = size_binop (MULT_EXPR, size, l); |
14229 | } |
14230 | } |
14231 | if (side_effects) |
14232 | size = build2 (COMPOUND_EXPR, sizetype, side_effects, size); |
14233 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
14234 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
14235 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
14236 | { |
14237 | size = size_binop (MINUS_EXPR, size, size_one_node); |
14238 | size = c_fully_fold (size, false, NULL); |
14239 | size = save_expr (size); |
14240 | tree index_type = build_index_type (size); |
14241 | tree eltype = TREE_TYPE (first); |
14242 | while (TREE_CODE (eltype) == ARRAY_TYPE) |
14243 | eltype = TREE_TYPE (eltype); |
14244 | tree type = build_array_type (eltype, index_type); |
14245 | tree ptype = build_pointer_type (eltype); |
14246 | if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) |
14247 | t = build_fold_addr_expr (t); |
14248 | tree t2 = build_fold_addr_expr (first); |
14249 | t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
14250 | ptrdiff_type_node, t2); |
14251 | t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, |
14252 | ptrdiff_type_node, t2, |
14253 | fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
14254 | ptrdiff_type_node, t)); |
14255 | t2 = c_fully_fold (t2, false, NULL); |
14256 | if (tree_fits_shwi_p (t2)) |
14257 | t = build2 (MEM_REF, type, t, |
14258 | build_int_cst (ptype, tree_to_shwi (t2))); |
14259 | else |
14260 | { |
14261 | t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c), sizetype, t2); |
14262 | t = build2_loc (OMP_CLAUSE_LOCATION (c), code: POINTER_PLUS_EXPR, |
14263 | TREE_TYPE (t), arg0: t, arg1: t2); |
14264 | t = build2 (MEM_REF, type, t, build_int_cst (ptype, 0)); |
14265 | } |
14266 | OMP_CLAUSE_DECL (c) = t; |
14267 | return false; |
14268 | } |
14269 | first = c_fully_fold (first, false, NULL); |
14270 | OMP_CLAUSE_DECL (c) = first; |
14271 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
14272 | return false; |
14273 | if (size) |
14274 | size = c_fully_fold (size, false, NULL); |
14275 | OMP_CLAUSE_SIZE (c) = size; |
14276 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
14277 | || (TREE_CODE (t) == COMPONENT_REF |
14278 | && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)) |
14279 | return false; |
14280 | gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR); |
14281 | switch (OMP_CLAUSE_MAP_KIND (c)) |
14282 | { |
14283 | case GOMP_MAP_ALLOC: |
14284 | case GOMP_MAP_IF_PRESENT: |
14285 | case GOMP_MAP_TO: |
14286 | case GOMP_MAP_FROM: |
14287 | case GOMP_MAP_TOFROM: |
14288 | case GOMP_MAP_ALWAYS_TO: |
14289 | case GOMP_MAP_ALWAYS_FROM: |
14290 | case GOMP_MAP_ALWAYS_TOFROM: |
14291 | case GOMP_MAP_RELEASE: |
14292 | case GOMP_MAP_DELETE: |
14293 | case GOMP_MAP_FORCE_TO: |
14294 | case GOMP_MAP_FORCE_FROM: |
14295 | case GOMP_MAP_FORCE_TOFROM: |
14296 | case GOMP_MAP_FORCE_PRESENT: |
14297 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
14298 | break; |
14299 | default: |
14300 | break; |
14301 | } |
14302 | tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP); |
14303 | if (TREE_CODE (t) == COMPONENT_REF) |
14304 | OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH); |
14305 | else |
14306 | OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_POINTER); |
14307 | OMP_CLAUSE_MAP_IMPLICIT (c2) = OMP_CLAUSE_MAP_IMPLICIT (c); |
14308 | if (OMP_CLAUSE_MAP_KIND (c2) != GOMP_MAP_FIRSTPRIVATE_POINTER |
14309 | && !c_mark_addressable (exp: t)) |
14310 | return false; |
14311 | OMP_CLAUSE_DECL (c2) = t; |
14312 | t = build_fold_addr_expr (first); |
14313 | t = fold_convert_loc (OMP_CLAUSE_LOCATION (c), ptrdiff_type_node, t); |
14314 | tree ptr = OMP_CLAUSE_DECL (c2); |
14315 | if (!POINTER_TYPE_P (TREE_TYPE (ptr))) |
14316 | ptr = build_fold_addr_expr (ptr); |
14317 | t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, |
14318 | ptrdiff_type_node, t, |
14319 | fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
14320 | ptrdiff_type_node, ptr)); |
14321 | t = c_fully_fold (t, false, NULL); |
14322 | OMP_CLAUSE_SIZE (c2) = t; |
14323 | OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c); |
14324 | OMP_CLAUSE_CHAIN (c) = c2; |
14325 | } |
14326 | return false; |
14327 | } |
14328 | |
14329 | /* Helper function of finish_omp_clauses. Clone STMT as if we were making |
14330 | an inline call. But, remap |
14331 | the OMP_DECL1 VAR_DECL (omp_out resp. omp_orig) to PLACEHOLDER |
14332 | and OMP_DECL2 VAR_DECL (omp_in resp. omp_priv) to DECL. */ |
14333 | |
14334 | static tree |
14335 | c_clone_omp_udr (tree stmt, tree omp_decl1, tree omp_decl2, |
14336 | tree decl, tree placeholder) |
14337 | { |
14338 | copy_body_data id; |
14339 | hash_map<tree, tree> decl_map; |
14340 | |
14341 | decl_map.put (k: omp_decl1, v: placeholder); |
14342 | decl_map.put (k: omp_decl2, v: decl); |
14343 | memset (s: &id, c: 0, n: sizeof (id)); |
14344 | id.src_fn = DECL_CONTEXT (omp_decl1); |
14345 | id.dst_fn = current_function_decl; |
14346 | id.src_cfun = DECL_STRUCT_FUNCTION (id.src_fn); |
14347 | id.decl_map = &decl_map; |
14348 | |
14349 | id.copy_decl = copy_decl_no_change; |
14350 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; |
14351 | id.transform_new_cfg = true; |
14352 | id.transform_return_to_modify = false; |
14353 | id.eh_lp_nr = 0; |
14354 | walk_tree (&stmt, copy_tree_body_r, &id, NULL); |
14355 | return stmt; |
14356 | } |
14357 | |
14358 | /* Helper function of c_finish_omp_clauses, called via walk_tree. |
14359 | Find OMP_CLAUSE_PLACEHOLDER (passed in DATA) in *TP. */ |
14360 | |
14361 | static tree |
14362 | c_find_omp_placeholder_r (tree *tp, int *, void *data) |
14363 | { |
14364 | if (*tp == (tree) data) |
14365 | return *tp; |
14366 | return NULL_TREE; |
14367 | } |
14368 | |
14369 | /* Similarly, but also walk aggregate fields. */ |
14370 | |
14371 | struct c_find_omp_var_s { tree var; hash_set<tree> *pset; }; |
14372 | |
14373 | static tree |
14374 | c_find_omp_var_r (tree *tp, int *, void *data) |
14375 | { |
14376 | if (*tp == ((struct c_find_omp_var_s *) data)->var) |
14377 | return *tp; |
14378 | if (RECORD_OR_UNION_TYPE_P (*tp)) |
14379 | { |
14380 | tree field; |
14381 | hash_set<tree> *pset = ((struct c_find_omp_var_s *) data)->pset; |
14382 | |
14383 | for (field = TYPE_FIELDS (*tp); field; |
14384 | field = DECL_CHAIN (field)) |
14385 | if (TREE_CODE (field) == FIELD_DECL) |
14386 | { |
14387 | tree ret = walk_tree (&DECL_FIELD_OFFSET (field), |
14388 | c_find_omp_var_r, data, pset); |
14389 | if (ret) |
14390 | return ret; |
14391 | ret = walk_tree (&DECL_SIZE (field), c_find_omp_var_r, data, pset); |
14392 | if (ret) |
14393 | return ret; |
14394 | ret = walk_tree (&DECL_SIZE_UNIT (field), c_find_omp_var_r, data, |
14395 | pset); |
14396 | if (ret) |
14397 | return ret; |
14398 | ret = walk_tree (&TREE_TYPE (field), c_find_omp_var_r, data, pset); |
14399 | if (ret) |
14400 | return ret; |
14401 | } |
14402 | } |
14403 | else if (INTEGRAL_TYPE_P (*tp)) |
14404 | return walk_tree (&TYPE_MAX_VALUE (*tp), c_find_omp_var_r, data, |
14405 | ((struct c_find_omp_var_s *) data)->pset); |
14406 | return NULL_TREE; |
14407 | } |
14408 | |
14409 | /* Finish OpenMP iterators ITER. Return true if they are errorneous |
14410 | and clauses containing them should be removed. */ |
14411 | |
14412 | static bool |
14413 | c_omp_finish_iterators (tree iter) |
14414 | { |
14415 | bool ret = false; |
14416 | for (tree it = iter; it; it = TREE_CHAIN (it)) |
14417 | { |
14418 | tree var = TREE_VEC_ELT (it, 0); |
14419 | tree begin = TREE_VEC_ELT (it, 1); |
14420 | tree end = TREE_VEC_ELT (it, 2); |
14421 | tree step = TREE_VEC_ELT (it, 3); |
14422 | tree orig_step; |
14423 | tree type = TREE_TYPE (var); |
14424 | location_t loc = DECL_SOURCE_LOCATION (var); |
14425 | if (type == error_mark_node) |
14426 | { |
14427 | ret = true; |
14428 | continue; |
14429 | } |
14430 | if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) |
14431 | { |
14432 | error_at (loc, "iterator %qD has neither integral nor pointer type" , |
14433 | var); |
14434 | ret = true; |
14435 | continue; |
14436 | } |
14437 | else if (TYPE_ATOMIC (type)) |
14438 | { |
14439 | error_at (loc, "iterator %qD has %<_Atomic%> qualified type" , var); |
14440 | ret = true; |
14441 | continue; |
14442 | } |
14443 | else if (TYPE_READONLY (type)) |
14444 | { |
14445 | error_at (loc, "iterator %qD has const qualified type" , var); |
14446 | ret = true; |
14447 | continue; |
14448 | } |
14449 | else if (step == error_mark_node |
14450 | || TREE_TYPE (step) == error_mark_node) |
14451 | { |
14452 | ret = true; |
14453 | continue; |
14454 | } |
14455 | else if (!INTEGRAL_TYPE_P (TREE_TYPE (step))) |
14456 | { |
14457 | error_at (EXPR_LOC_OR_LOC (step, loc), |
14458 | "iterator step with non-integral type" ); |
14459 | ret = true; |
14460 | continue; |
14461 | } |
14462 | begin = c_fully_fold (build_c_cast (loc, type, expr: begin), false, NULL); |
14463 | end = c_fully_fold (build_c_cast (loc, type, expr: end), false, NULL); |
14464 | orig_step = save_expr (c_fully_fold (step, false, NULL)); |
14465 | tree stype = POINTER_TYPE_P (type) ? sizetype : type; |
14466 | step = c_fully_fold (build_c_cast (loc, type: stype, expr: orig_step), false, NULL); |
14467 | if (POINTER_TYPE_P (type)) |
14468 | { |
14469 | begin = save_expr (begin); |
14470 | step = pointer_int_sum (loc, PLUS_EXPR, begin, step); |
14471 | step = fold_build2_loc (loc, MINUS_EXPR, sizetype, |
14472 | fold_convert (sizetype, step), |
14473 | fold_convert (sizetype, begin)); |
14474 | step = fold_convert (ssizetype, step); |
14475 | } |
14476 | if (integer_zerop (step)) |
14477 | { |
14478 | error_at (loc, "iterator %qD has zero step" , var); |
14479 | ret = true; |
14480 | continue; |
14481 | } |
14482 | |
14483 | if (begin == error_mark_node |
14484 | || end == error_mark_node |
14485 | || step == error_mark_node |
14486 | || orig_step == error_mark_node) |
14487 | { |
14488 | ret = true; |
14489 | continue; |
14490 | } |
14491 | hash_set<tree> pset; |
14492 | tree it2; |
14493 | for (it2 = TREE_CHAIN (it); it2; it2 = TREE_CHAIN (it2)) |
14494 | { |
14495 | tree var2 = TREE_VEC_ELT (it2, 0); |
14496 | tree begin2 = TREE_VEC_ELT (it2, 1); |
14497 | tree end2 = TREE_VEC_ELT (it2, 2); |
14498 | tree step2 = TREE_VEC_ELT (it2, 3); |
14499 | tree type2 = TREE_TYPE (var2); |
14500 | location_t loc2 = DECL_SOURCE_LOCATION (var2); |
14501 | struct c_find_omp_var_s data = { .var: var, .pset: &pset }; |
14502 | if (walk_tree (&type2, c_find_omp_var_r, &data, &pset)) |
14503 | { |
14504 | error_at (loc2, |
14505 | "type of iterator %qD refers to outer iterator %qD" , |
14506 | var2, var); |
14507 | break; |
14508 | } |
14509 | else if (walk_tree (&begin2, c_find_omp_var_r, &data, &pset)) |
14510 | { |
14511 | error_at (EXPR_LOC_OR_LOC (begin2, loc2), |
14512 | "begin expression refers to outer iterator %qD" , var); |
14513 | break; |
14514 | } |
14515 | else if (walk_tree (&end2, c_find_omp_var_r, &data, &pset)) |
14516 | { |
14517 | error_at (EXPR_LOC_OR_LOC (end2, loc2), |
14518 | "end expression refers to outer iterator %qD" , var); |
14519 | break; |
14520 | } |
14521 | else if (walk_tree (&step2, c_find_omp_var_r, &data, &pset)) |
14522 | { |
14523 | error_at (EXPR_LOC_OR_LOC (step2, loc2), |
14524 | "step expression refers to outer iterator %qD" , var); |
14525 | break; |
14526 | } |
14527 | } |
14528 | if (it2) |
14529 | { |
14530 | ret = true; |
14531 | continue; |
14532 | } |
14533 | TREE_VEC_ELT (it, 1) = begin; |
14534 | TREE_VEC_ELT (it, 2) = end; |
14535 | TREE_VEC_ELT (it, 3) = step; |
14536 | TREE_VEC_ELT (it, 4) = orig_step; |
14537 | } |
14538 | return ret; |
14539 | } |
14540 | |
14541 | /* Ensure that pointers are used in OpenACC attach and detach clauses. |
14542 | Return true if an error has been detected. */ |
14543 | |
14544 | static bool |
14545 | c_oacc_check_attachments (tree c) |
14546 | { |
14547 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
14548 | return false; |
14549 | |
14550 | /* OpenACC attach / detach clauses must be pointers. */ |
14551 | if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
14552 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
14553 | { |
14554 | tree t = OMP_CLAUSE_DECL (c); |
14555 | |
14556 | while (TREE_CODE (t) == TREE_LIST) |
14557 | t = TREE_CHAIN (t); |
14558 | |
14559 | if (TREE_CODE (TREE_TYPE (t)) != POINTER_TYPE) |
14560 | { |
14561 | error_at (OMP_CLAUSE_LOCATION (c), "expected pointer in %qs clause" , |
14562 | user_omp_clause_code_name (c, true)); |
14563 | return true; |
14564 | } |
14565 | } |
14566 | |
14567 | return false; |
14568 | } |
14569 | |
14570 | /* For all elements of CLAUSES, validate them against their constraints. |
14571 | Remove any elements from the list that are invalid. */ |
14572 | |
14573 | tree |
14574 | c_finish_omp_clauses (tree clauses, enum c_omp_region_type ort) |
14575 | { |
14576 | bitmap_head generic_head, firstprivate_head, lastprivate_head; |
14577 | bitmap_head aligned_head, map_head, map_field_head, map_firstprivate_head; |
14578 | bitmap_head oacc_reduction_head, is_on_device_head; |
14579 | tree c, t, type, *pc; |
14580 | tree simdlen = NULL_TREE, safelen = NULL_TREE; |
14581 | bool branch_seen = false; |
14582 | bool copyprivate_seen = false; |
14583 | bool mergeable_seen = false; |
14584 | tree *detach_seen = NULL; |
14585 | bool linear_variable_step_check = false; |
14586 | tree *nowait_clause = NULL; |
14587 | tree ordered_clause = NULL_TREE; |
14588 | tree schedule_clause = NULL_TREE; |
14589 | bool oacc_async = false; |
14590 | bool indir_component_ref_p = false; |
14591 | tree last_iterators = NULL_TREE; |
14592 | bool last_iterators_remove = false; |
14593 | tree *nogroup_seen = NULL; |
14594 | tree *order_clause = NULL; |
14595 | /* 1 if normal/task reduction has been seen, -1 if inscan reduction |
14596 | has been seen, -2 if mixed inscan/normal reduction diagnosed. */ |
14597 | int reduction_seen = 0; |
14598 | bool allocate_seen = false; |
14599 | bool implicit_moved = false; |
14600 | bool target_in_reduction_seen = false; |
14601 | |
14602 | bitmap_obstack_initialize (NULL); |
14603 | bitmap_initialize (head: &generic_head, obstack: &bitmap_default_obstack); |
14604 | bitmap_initialize (head: &firstprivate_head, obstack: &bitmap_default_obstack); |
14605 | bitmap_initialize (head: &lastprivate_head, obstack: &bitmap_default_obstack); |
14606 | bitmap_initialize (head: &aligned_head, obstack: &bitmap_default_obstack); |
14607 | /* If ort == C_ORT_OMP_DECLARE_SIMD used as uniform_head instead. */ |
14608 | bitmap_initialize (head: &map_head, obstack: &bitmap_default_obstack); |
14609 | bitmap_initialize (head: &map_field_head, obstack: &bitmap_default_obstack); |
14610 | bitmap_initialize (head: &map_firstprivate_head, obstack: &bitmap_default_obstack); |
14611 | /* If ort == C_ORT_OMP used as nontemporal_head or use_device_xxx_head |
14612 | instead and for ort == C_ORT_OMP_TARGET used as in_reduction_head. */ |
14613 | bitmap_initialize (head: &oacc_reduction_head, obstack: &bitmap_default_obstack); |
14614 | bitmap_initialize (head: &is_on_device_head, obstack: &bitmap_default_obstack); |
14615 | |
14616 | if (ort & C_ORT_ACC) |
14617 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
14618 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ASYNC) |
14619 | { |
14620 | oacc_async = true; |
14621 | break; |
14622 | } |
14623 | |
14624 | tree *grp_start_p = NULL, grp_sentinel = NULL_TREE; |
14625 | |
14626 | for (pc = &clauses, c = clauses; c ; c = *pc) |
14627 | { |
14628 | bool remove = false; |
14629 | bool need_complete = false; |
14630 | bool need_implicitly_determined = false; |
14631 | |
14632 | /* We've reached the end of a list of expanded nodes. Reset the group |
14633 | start pointer. */ |
14634 | if (c == grp_sentinel) |
14635 | grp_start_p = NULL; |
14636 | |
14637 | switch (OMP_CLAUSE_CODE (c)) |
14638 | { |
14639 | case OMP_CLAUSE_SHARED: |
14640 | need_implicitly_determined = true; |
14641 | goto check_dup_generic; |
14642 | |
14643 | case OMP_CLAUSE_PRIVATE: |
14644 | need_complete = true; |
14645 | need_implicitly_determined = true; |
14646 | goto check_dup_generic; |
14647 | |
14648 | case OMP_CLAUSE_REDUCTION: |
14649 | if (reduction_seen == 0) |
14650 | reduction_seen = OMP_CLAUSE_REDUCTION_INSCAN (c) ? -1 : 1; |
14651 | else if (reduction_seen != -2 |
14652 | && reduction_seen != (OMP_CLAUSE_REDUCTION_INSCAN (c) |
14653 | ? -1 : 1)) |
14654 | { |
14655 | error_at (OMP_CLAUSE_LOCATION (c), |
14656 | "%<inscan%> and non-%<inscan%> %<reduction%> clauses " |
14657 | "on the same construct" ); |
14658 | reduction_seen = -2; |
14659 | } |
14660 | /* FALLTHRU */ |
14661 | case OMP_CLAUSE_IN_REDUCTION: |
14662 | case OMP_CLAUSE_TASK_REDUCTION: |
14663 | need_implicitly_determined = true; |
14664 | t = OMP_CLAUSE_DECL (c); |
14665 | if (TREE_CODE (t) == TREE_LIST) |
14666 | { |
14667 | if (handle_omp_array_sections (c, ort)) |
14668 | { |
14669 | remove = true; |
14670 | break; |
14671 | } |
14672 | |
14673 | t = OMP_CLAUSE_DECL (c); |
14674 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
14675 | && OMP_CLAUSE_REDUCTION_INSCAN (c)) |
14676 | { |
14677 | error_at (OMP_CLAUSE_LOCATION (c), |
14678 | "%<inscan%> %<reduction%> clause with array " |
14679 | "section" ); |
14680 | remove = true; |
14681 | break; |
14682 | } |
14683 | } |
14684 | t = require_complete_type (OMP_CLAUSE_LOCATION (c), value: t); |
14685 | if (t == error_mark_node) |
14686 | { |
14687 | remove = true; |
14688 | break; |
14689 | } |
14690 | if (oacc_async) |
14691 | c_mark_addressable (exp: t); |
14692 | type = TREE_TYPE (t); |
14693 | if (TREE_CODE (t) == MEM_REF) |
14694 | type = TREE_TYPE (type); |
14695 | if (TREE_CODE (type) == ARRAY_TYPE) |
14696 | { |
14697 | tree oatype = type; |
14698 | gcc_assert (TREE_CODE (t) != MEM_REF); |
14699 | while (TREE_CODE (type) == ARRAY_TYPE) |
14700 | type = TREE_TYPE (type); |
14701 | if (integer_zerop (TYPE_SIZE_UNIT (type))) |
14702 | { |
14703 | error_at (OMP_CLAUSE_LOCATION (c), |
14704 | "%qD in %<reduction%> clause is a zero size array" , |
14705 | t); |
14706 | remove = true; |
14707 | break; |
14708 | } |
14709 | tree size = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (oatype), |
14710 | TYPE_SIZE_UNIT (type)); |
14711 | if (integer_zerop (size)) |
14712 | { |
14713 | error_at (OMP_CLAUSE_LOCATION (c), |
14714 | "%qD in %<reduction%> clause is a zero size array" , |
14715 | t); |
14716 | remove = true; |
14717 | break; |
14718 | } |
14719 | size = size_binop (MINUS_EXPR, size, size_one_node); |
14720 | size = save_expr (size); |
14721 | tree index_type = build_index_type (size); |
14722 | tree atype = build_array_type (TYPE_MAIN_VARIANT (type), |
14723 | index_type); |
14724 | atype = c_build_qualified_type (atype, TYPE_QUALS (type)); |
14725 | tree ptype = build_pointer_type (type); |
14726 | if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) |
14727 | t = build_fold_addr_expr (t); |
14728 | t = build2 (MEM_REF, atype, t, build_int_cst (ptype, 0)); |
14729 | OMP_CLAUSE_DECL (c) = t; |
14730 | } |
14731 | if (TYPE_ATOMIC (type)) |
14732 | { |
14733 | error_at (OMP_CLAUSE_LOCATION (c), |
14734 | "%<_Atomic%> %qE in %<reduction%> clause" , t); |
14735 | remove = true; |
14736 | break; |
14737 | } |
14738 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
14739 | || OMP_CLAUSE_REDUCTION_TASK (c)) |
14740 | { |
14741 | /* Disallow zero sized or potentially zero sized task |
14742 | reductions. */ |
14743 | if (integer_zerop (TYPE_SIZE_UNIT (type))) |
14744 | { |
14745 | error_at (OMP_CLAUSE_LOCATION (c), |
14746 | "zero sized type %qT in %qs clause" , type, |
14747 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14748 | remove = true; |
14749 | break; |
14750 | } |
14751 | else if (TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST) |
14752 | { |
14753 | error_at (OMP_CLAUSE_LOCATION (c), |
14754 | "variable sized type %qT in %qs clause" , type, |
14755 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
14756 | remove = true; |
14757 | break; |
14758 | } |
14759 | } |
14760 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE |
14761 | && (FLOAT_TYPE_P (type) |
14762 | || TREE_CODE (type) == COMPLEX_TYPE)) |
14763 | { |
14764 | enum tree_code r_code = OMP_CLAUSE_REDUCTION_CODE (c); |
14765 | const char *r_name = NULL; |
14766 | |
14767 | switch (r_code) |
14768 | { |
14769 | case PLUS_EXPR: |
14770 | case MULT_EXPR: |
14771 | case MINUS_EXPR: |
14772 | case TRUTH_ANDIF_EXPR: |
14773 | case TRUTH_ORIF_EXPR: |
14774 | break; |
14775 | case MIN_EXPR: |
14776 | if (TREE_CODE (type) == COMPLEX_TYPE) |
14777 | r_name = "min" ; |
14778 | break; |
14779 | case MAX_EXPR: |
14780 | if (TREE_CODE (type) == COMPLEX_TYPE) |
14781 | r_name = "max" ; |
14782 | break; |
14783 | case BIT_AND_EXPR: |
14784 | r_name = "&" ; |
14785 | break; |
14786 | case BIT_XOR_EXPR: |
14787 | r_name = "^" ; |
14788 | break; |
14789 | case BIT_IOR_EXPR: |
14790 | r_name = "|" ; |
14791 | break; |
14792 | default: |
14793 | gcc_unreachable (); |
14794 | } |
14795 | if (r_name) |
14796 | { |
14797 | error_at (OMP_CLAUSE_LOCATION (c), |
14798 | "%qE has invalid type for %<reduction(%s)%>" , |
14799 | t, r_name); |
14800 | remove = true; |
14801 | break; |
14802 | } |
14803 | } |
14804 | else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == error_mark_node) |
14805 | { |
14806 | error_at (OMP_CLAUSE_LOCATION (c), |
14807 | "user defined reduction not found for %qE" , t); |
14808 | remove = true; |
14809 | break; |
14810 | } |
14811 | else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
14812 | { |
14813 | tree list = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
14814 | type = TYPE_MAIN_VARIANT (type); |
14815 | tree placeholder = build_decl (OMP_CLAUSE_LOCATION (c), |
14816 | VAR_DECL, NULL_TREE, type); |
14817 | tree decl_placeholder = NULL_TREE; |
14818 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = placeholder; |
14819 | DECL_ARTIFICIAL (placeholder) = 1; |
14820 | DECL_IGNORED_P (placeholder) = 1; |
14821 | if (TREE_CODE (t) == MEM_REF) |
14822 | { |
14823 | decl_placeholder = build_decl (OMP_CLAUSE_LOCATION (c), |
14824 | VAR_DECL, NULL_TREE, type); |
14825 | OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = decl_placeholder; |
14826 | DECL_ARTIFICIAL (decl_placeholder) = 1; |
14827 | DECL_IGNORED_P (decl_placeholder) = 1; |
14828 | } |
14829 | if (TREE_ADDRESSABLE (TREE_VEC_ELT (list, 0))) |
14830 | c_mark_addressable (exp: placeholder); |
14831 | if (TREE_ADDRESSABLE (TREE_VEC_ELT (list, 1))) |
14832 | c_mark_addressable (exp: decl_placeholder ? decl_placeholder |
14833 | : OMP_CLAUSE_DECL (c)); |
14834 | OMP_CLAUSE_REDUCTION_MERGE (c) |
14835 | = c_clone_omp_udr (TREE_VEC_ELT (list, 2), |
14836 | TREE_VEC_ELT (list, 0), |
14837 | TREE_VEC_ELT (list, 1), |
14838 | decl: decl_placeholder ? decl_placeholder |
14839 | : OMP_CLAUSE_DECL (c), placeholder); |
14840 | OMP_CLAUSE_REDUCTION_MERGE (c) |
14841 | = build3_loc (OMP_CLAUSE_LOCATION (c), code: BIND_EXPR, |
14842 | void_type_node, NULL_TREE, |
14843 | OMP_CLAUSE_REDUCTION_MERGE (c), NULL_TREE); |
14844 | TREE_SIDE_EFFECTS (OMP_CLAUSE_REDUCTION_MERGE (c)) = 1; |
14845 | if (TREE_VEC_LENGTH (list) == 6) |
14846 | { |
14847 | if (TREE_ADDRESSABLE (TREE_VEC_ELT (list, 3))) |
14848 | c_mark_addressable (exp: decl_placeholder ? decl_placeholder |
14849 | : OMP_CLAUSE_DECL (c)); |
14850 | if (TREE_ADDRESSABLE (TREE_VEC_ELT (list, 4))) |
14851 | c_mark_addressable (exp: placeholder); |
14852 | tree init = TREE_VEC_ELT (list, 5); |
14853 | if (init == error_mark_node) |
14854 | init = DECL_INITIAL (TREE_VEC_ELT (list, 3)); |
14855 | OMP_CLAUSE_REDUCTION_INIT (c) |
14856 | = c_clone_omp_udr (stmt: init, TREE_VEC_ELT (list, 4), |
14857 | TREE_VEC_ELT (list, 3), |
14858 | decl: decl_placeholder ? decl_placeholder |
14859 | : OMP_CLAUSE_DECL (c), placeholder); |
14860 | if (TREE_VEC_ELT (list, 5) == error_mark_node) |
14861 | { |
14862 | tree v = decl_placeholder ? decl_placeholder : t; |
14863 | OMP_CLAUSE_REDUCTION_INIT (c) |
14864 | = build2 (INIT_EXPR, TREE_TYPE (v), v, |
14865 | OMP_CLAUSE_REDUCTION_INIT (c)); |
14866 | } |
14867 | if (walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c), |
14868 | c_find_omp_placeholder_r, |
14869 | placeholder, NULL)) |
14870 | OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1; |
14871 | } |
14872 | else |
14873 | { |
14874 | tree init; |
14875 | tree v = decl_placeholder ? decl_placeholder : t; |
14876 | if (AGGREGATE_TYPE_P (TREE_TYPE (v))) |
14877 | init = build_constructor (TREE_TYPE (v), NULL); |
14878 | else |
14879 | init = fold_convert (TREE_TYPE (v), integer_zero_node); |
14880 | OMP_CLAUSE_REDUCTION_INIT (c) |
14881 | = build2 (INIT_EXPR, TREE_TYPE (v), v, init); |
14882 | } |
14883 | OMP_CLAUSE_REDUCTION_INIT (c) |
14884 | = build3_loc (OMP_CLAUSE_LOCATION (c), code: BIND_EXPR, |
14885 | void_type_node, NULL_TREE, |
14886 | OMP_CLAUSE_REDUCTION_INIT (c), NULL_TREE); |
14887 | TREE_SIDE_EFFECTS (OMP_CLAUSE_REDUCTION_INIT (c)) = 1; |
14888 | } |
14889 | if (TREE_CODE (t) == MEM_REF) |
14890 | { |
14891 | if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (t))) == NULL_TREE |
14892 | || TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (t)))) |
14893 | != INTEGER_CST) |
14894 | { |
14895 | sorry ("variable length element type in array " |
14896 | "%<reduction%> clause" ); |
14897 | remove = true; |
14898 | break; |
14899 | } |
14900 | t = TREE_OPERAND (t, 0); |
14901 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
14902 | t = TREE_OPERAND (t, 0); |
14903 | if (TREE_CODE (t) == ADDR_EXPR) |
14904 | t = TREE_OPERAND (t, 0); |
14905 | } |
14906 | goto check_dup_generic_t; |
14907 | |
14908 | case OMP_CLAUSE_COPYPRIVATE: |
14909 | copyprivate_seen = true; |
14910 | if (nowait_clause) |
14911 | { |
14912 | error_at (OMP_CLAUSE_LOCATION (*nowait_clause), |
14913 | "%<nowait%> clause must not be used together " |
14914 | "with %<copyprivate%>" ); |
14915 | *nowait_clause = OMP_CLAUSE_CHAIN (*nowait_clause); |
14916 | nowait_clause = NULL; |
14917 | } |
14918 | goto check_dup_generic; |
14919 | |
14920 | case OMP_CLAUSE_COPYIN: |
14921 | t = OMP_CLAUSE_DECL (c); |
14922 | if (!VAR_P (t) || !DECL_THREAD_LOCAL_P (t)) |
14923 | { |
14924 | error_at (OMP_CLAUSE_LOCATION (c), |
14925 | "%qE must be %<threadprivate%> for %<copyin%>" , t); |
14926 | remove = true; |
14927 | break; |
14928 | } |
14929 | goto check_dup_generic; |
14930 | |
14931 | case OMP_CLAUSE_LINEAR: |
14932 | if (ort != C_ORT_OMP_DECLARE_SIMD) |
14933 | need_implicitly_determined = true; |
14934 | t = OMP_CLAUSE_DECL (c); |
14935 | if (ort != C_ORT_OMP_DECLARE_SIMD |
14936 | && OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_DEFAULT |
14937 | && OMP_CLAUSE_LINEAR_OLD_LINEAR_MODIFIER (c)) |
14938 | { |
14939 | error_at (OMP_CLAUSE_LOCATION (c), |
14940 | "modifier should not be specified in %<linear%> " |
14941 | "clause on %<simd%> or %<for%> constructs when not " |
14942 | "using OpenMP 5.2 modifiers" ); |
14943 | OMP_CLAUSE_LINEAR_KIND (c) = OMP_CLAUSE_LINEAR_DEFAULT; |
14944 | } |
14945 | if (!INTEGRAL_TYPE_P (TREE_TYPE (t)) |
14946 | && TREE_CODE (TREE_TYPE (t)) != POINTER_TYPE) |
14947 | { |
14948 | error_at (OMP_CLAUSE_LOCATION (c), |
14949 | "linear clause applied to non-integral non-pointer " |
14950 | "variable with type %qT" , TREE_TYPE (t)); |
14951 | remove = true; |
14952 | break; |
14953 | } |
14954 | if (TYPE_ATOMIC (TREE_TYPE (t))) |
14955 | { |
14956 | error_at (OMP_CLAUSE_LOCATION (c), |
14957 | "%<_Atomic%> %qD in %<linear%> clause" , t); |
14958 | remove = true; |
14959 | break; |
14960 | } |
14961 | if (ort == C_ORT_OMP_DECLARE_SIMD) |
14962 | { |
14963 | tree s = OMP_CLAUSE_LINEAR_STEP (c); |
14964 | if (TREE_CODE (s) == PARM_DECL) |
14965 | { |
14966 | OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) = 1; |
14967 | /* map_head bitmap is used as uniform_head if |
14968 | declare_simd. */ |
14969 | if (!bitmap_bit_p (&map_head, DECL_UID (s))) |
14970 | linear_variable_step_check = true; |
14971 | goto check_dup_generic; |
14972 | } |
14973 | if (TREE_CODE (s) != INTEGER_CST) |
14974 | { |
14975 | error_at (OMP_CLAUSE_LOCATION (c), |
14976 | "%<linear%> clause step %qE is neither constant " |
14977 | "nor a parameter" , s); |
14978 | remove = true; |
14979 | break; |
14980 | } |
14981 | } |
14982 | if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c))) == POINTER_TYPE) |
14983 | { |
14984 | tree s = OMP_CLAUSE_LINEAR_STEP (c); |
14985 | s = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR, |
14986 | OMP_CLAUSE_DECL (c), s); |
14987 | s = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, |
14988 | sizetype, fold_convert (sizetype, s), |
14989 | fold_convert |
14990 | (sizetype, OMP_CLAUSE_DECL (c))); |
14991 | if (s == error_mark_node) |
14992 | s = size_one_node; |
14993 | OMP_CLAUSE_LINEAR_STEP (c) = s; |
14994 | } |
14995 | else |
14996 | OMP_CLAUSE_LINEAR_STEP (c) |
14997 | = fold_convert (TREE_TYPE (t), OMP_CLAUSE_LINEAR_STEP (c)); |
14998 | goto check_dup_generic; |
14999 | |
15000 | check_dup_generic: |
15001 | t = OMP_CLAUSE_DECL (c); |
15002 | check_dup_generic_t: |
15003 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15004 | { |
15005 | error_at (OMP_CLAUSE_LOCATION (c), |
15006 | "%qE is not a variable in clause %qs" , t, |
15007 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15008 | remove = true; |
15009 | } |
15010 | else if ((ort == C_ORT_ACC |
15011 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) |
15012 | || (ort == C_ORT_OMP |
15013 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR |
15014 | || (OMP_CLAUSE_CODE (c) |
15015 | == OMP_CLAUSE_USE_DEVICE_ADDR))) |
15016 | || (ort == C_ORT_OMP_TARGET |
15017 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)) |
15018 | { |
15019 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
15020 | && (bitmap_bit_p (&generic_head, DECL_UID (t)) |
15021 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)))) |
15022 | { |
15023 | error_at (OMP_CLAUSE_LOCATION (c), |
15024 | "%qD appears more than once in data-sharing " |
15025 | "clauses" , t); |
15026 | remove = true; |
15027 | break; |
15028 | } |
15029 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION) |
15030 | target_in_reduction_seen = true; |
15031 | if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
15032 | { |
15033 | error_at (OMP_CLAUSE_LOCATION (c), |
15034 | ort == C_ORT_ACC |
15035 | ? "%qD appears more than once in reduction clauses" |
15036 | : "%qD appears more than once in data clauses" , |
15037 | t); |
15038 | remove = true; |
15039 | } |
15040 | else |
15041 | bitmap_set_bit (&oacc_reduction_head, DECL_UID (t)); |
15042 | } |
15043 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
15044 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
15045 | || bitmap_bit_p (&lastprivate_head, DECL_UID (t)) |
15046 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
15047 | { |
15048 | error_at (OMP_CLAUSE_LOCATION (c), |
15049 | "%qE appears more than once in data clauses" , t); |
15050 | remove = true; |
15051 | } |
15052 | else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
15053 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR |
15054 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR) |
15055 | && bitmap_bit_p (&map_head, DECL_UID (t))) |
15056 | { |
15057 | if (ort == C_ORT_ACC) |
15058 | error_at (OMP_CLAUSE_LOCATION (c), |
15059 | "%qD appears more than once in data clauses" , t); |
15060 | else |
15061 | error_at (OMP_CLAUSE_LOCATION (c), |
15062 | "%qD appears both in data and map clauses" , t); |
15063 | remove = true; |
15064 | } |
15065 | else |
15066 | bitmap_set_bit (&generic_head, DECL_UID (t)); |
15067 | break; |
15068 | |
15069 | case OMP_CLAUSE_FIRSTPRIVATE: |
15070 | if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) && !implicit_moved) |
15071 | { |
15072 | move_implicit: |
15073 | implicit_moved = true; |
15074 | /* Move firstprivate and map clauses with |
15075 | OMP_CLAUSE_{FIRSTPRIVATE,MAP}_IMPLICIT set to the end of |
15076 | clauses chain. */ |
15077 | tree cl1 = NULL_TREE, cl2 = NULL_TREE; |
15078 | tree *pc1 = pc, *pc2 = &cl1, *pc3 = &cl2; |
15079 | while (*pc1) |
15080 | if (OMP_CLAUSE_CODE (*pc1) == OMP_CLAUSE_FIRSTPRIVATE |
15081 | && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (*pc1)) |
15082 | { |
15083 | *pc3 = *pc1; |
15084 | pc3 = &OMP_CLAUSE_CHAIN (*pc3); |
15085 | *pc1 = OMP_CLAUSE_CHAIN (*pc1); |
15086 | } |
15087 | else if (OMP_CLAUSE_CODE (*pc1) == OMP_CLAUSE_MAP |
15088 | && OMP_CLAUSE_MAP_IMPLICIT (*pc1)) |
15089 | { |
15090 | *pc2 = *pc1; |
15091 | pc2 = &OMP_CLAUSE_CHAIN (*pc2); |
15092 | *pc1 = OMP_CLAUSE_CHAIN (*pc1); |
15093 | } |
15094 | else |
15095 | pc1 = &OMP_CLAUSE_CHAIN (*pc1); |
15096 | *pc3 = NULL; |
15097 | *pc2 = cl2; |
15098 | *pc1 = cl1; |
15099 | continue; |
15100 | } |
15101 | t = OMP_CLAUSE_DECL (c); |
15102 | need_complete = true; |
15103 | need_implicitly_determined = true; |
15104 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15105 | { |
15106 | error_at (OMP_CLAUSE_LOCATION (c), |
15107 | "%qE is not a variable in clause %<firstprivate%>" , t); |
15108 | remove = true; |
15109 | } |
15110 | else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) |
15111 | && !OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) |
15112 | && bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
15113 | remove = true; |
15114 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
15115 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
15116 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
15117 | { |
15118 | error_at (OMP_CLAUSE_LOCATION (c), |
15119 | "%qE appears more than once in data clauses" , t); |
15120 | remove = true; |
15121 | } |
15122 | else if (bitmap_bit_p (&map_head, DECL_UID (t))) |
15123 | { |
15124 | if (ort == C_ORT_ACC) |
15125 | error_at (OMP_CLAUSE_LOCATION (c), |
15126 | "%qD appears more than once in data clauses" , t); |
15127 | else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) |
15128 | && !OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c)) |
15129 | /* Silently drop the clause. */; |
15130 | else |
15131 | error_at (OMP_CLAUSE_LOCATION (c), |
15132 | "%qD appears both in data and map clauses" , t); |
15133 | remove = true; |
15134 | } |
15135 | else |
15136 | bitmap_set_bit (&firstprivate_head, DECL_UID (t)); |
15137 | break; |
15138 | |
15139 | case OMP_CLAUSE_LASTPRIVATE: |
15140 | t = OMP_CLAUSE_DECL (c); |
15141 | need_complete = true; |
15142 | need_implicitly_determined = true; |
15143 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15144 | { |
15145 | error_at (OMP_CLAUSE_LOCATION (c), |
15146 | "%qE is not a variable in clause %<lastprivate%>" , t); |
15147 | remove = true; |
15148 | } |
15149 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
15150 | || bitmap_bit_p (&lastprivate_head, DECL_UID (t))) |
15151 | { |
15152 | error_at (OMP_CLAUSE_LOCATION (c), |
15153 | "%qE appears more than once in data clauses" , t); |
15154 | remove = true; |
15155 | } |
15156 | else |
15157 | bitmap_set_bit (&lastprivate_head, DECL_UID (t)); |
15158 | break; |
15159 | |
15160 | case OMP_CLAUSE_ALIGNED: |
15161 | t = OMP_CLAUSE_DECL (c); |
15162 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15163 | { |
15164 | error_at (OMP_CLAUSE_LOCATION (c), |
15165 | "%qE is not a variable in %<aligned%> clause" , t); |
15166 | remove = true; |
15167 | } |
15168 | else if (!POINTER_TYPE_P (TREE_TYPE (t)) |
15169 | && TREE_CODE (TREE_TYPE (t)) != ARRAY_TYPE) |
15170 | { |
15171 | error_at (OMP_CLAUSE_LOCATION (c), |
15172 | "%qE in %<aligned%> clause is neither a pointer nor " |
15173 | "an array" , t); |
15174 | remove = true; |
15175 | } |
15176 | else if (TYPE_ATOMIC (TREE_TYPE (t))) |
15177 | { |
15178 | error_at (OMP_CLAUSE_LOCATION (c), |
15179 | "%<_Atomic%> %qD in %<aligned%> clause" , t); |
15180 | remove = true; |
15181 | break; |
15182 | } |
15183 | else if (bitmap_bit_p (&aligned_head, DECL_UID (t))) |
15184 | { |
15185 | error_at (OMP_CLAUSE_LOCATION (c), |
15186 | "%qE appears more than once in %<aligned%> clauses" , |
15187 | t); |
15188 | remove = true; |
15189 | } |
15190 | else |
15191 | bitmap_set_bit (&aligned_head, DECL_UID (t)); |
15192 | break; |
15193 | |
15194 | case OMP_CLAUSE_NONTEMPORAL: |
15195 | t = OMP_CLAUSE_DECL (c); |
15196 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15197 | { |
15198 | error_at (OMP_CLAUSE_LOCATION (c), |
15199 | "%qE is not a variable in %<nontemporal%> clause" , t); |
15200 | remove = true; |
15201 | } |
15202 | else if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
15203 | { |
15204 | error_at (OMP_CLAUSE_LOCATION (c), |
15205 | "%qE appears more than once in %<nontemporal%> " |
15206 | "clauses" , t); |
15207 | remove = true; |
15208 | } |
15209 | else |
15210 | bitmap_set_bit (&oacc_reduction_head, DECL_UID (t)); |
15211 | break; |
15212 | |
15213 | case OMP_CLAUSE_ALLOCATE: |
15214 | t = OMP_CLAUSE_DECL (c); |
15215 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15216 | { |
15217 | error_at (OMP_CLAUSE_LOCATION (c), |
15218 | "%qE is not a variable in %<allocate%> clause" , t); |
15219 | remove = true; |
15220 | } |
15221 | else if (bitmap_bit_p (&aligned_head, DECL_UID (t))) |
15222 | { |
15223 | warning_at (OMP_CLAUSE_LOCATION (c), 0, |
15224 | "%qE appears more than once in %<allocate%> clauses" , |
15225 | t); |
15226 | remove = true; |
15227 | } |
15228 | else |
15229 | { |
15230 | bitmap_set_bit (&aligned_head, DECL_UID (t)); |
15231 | if (!OMP_CLAUSE_ALLOCATE_COMBINED (c)) |
15232 | allocate_seen = true; |
15233 | } |
15234 | break; |
15235 | |
15236 | case OMP_CLAUSE_DOACROSS: |
15237 | t = OMP_CLAUSE_DECL (c); |
15238 | if (t == NULL_TREE) |
15239 | break; |
15240 | if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK) |
15241 | { |
15242 | gcc_assert (TREE_CODE (t) == TREE_LIST); |
15243 | for (; t; t = TREE_CHAIN (t)) |
15244 | { |
15245 | tree decl = TREE_VALUE (t); |
15246 | if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) |
15247 | { |
15248 | tree offset = TREE_PURPOSE (t); |
15249 | bool neg = wi::neg_p (x: wi::to_wide (t: offset)); |
15250 | offset = fold_unary (ABS_EXPR, TREE_TYPE (offset), offset); |
15251 | tree t2 = pointer_int_sum (OMP_CLAUSE_LOCATION (c), |
15252 | neg ? MINUS_EXPR : PLUS_EXPR, |
15253 | decl, offset); |
15254 | t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, |
15255 | sizetype, |
15256 | fold_convert (sizetype, t2), |
15257 | fold_convert (sizetype, decl)); |
15258 | if (t2 == error_mark_node) |
15259 | { |
15260 | remove = true; |
15261 | break; |
15262 | } |
15263 | TREE_PURPOSE (t) = t2; |
15264 | } |
15265 | } |
15266 | break; |
15267 | } |
15268 | gcc_unreachable (); |
15269 | case OMP_CLAUSE_DEPEND: |
15270 | case OMP_CLAUSE_AFFINITY: |
15271 | t = OMP_CLAUSE_DECL (c); |
15272 | if (TREE_CODE (t) == TREE_LIST |
15273 | && TREE_PURPOSE (t) |
15274 | && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC) |
15275 | { |
15276 | if (TREE_PURPOSE (t) != last_iterators) |
15277 | last_iterators_remove |
15278 | = c_omp_finish_iterators (TREE_PURPOSE (t)); |
15279 | last_iterators = TREE_PURPOSE (t); |
15280 | t = TREE_VALUE (t); |
15281 | if (last_iterators_remove) |
15282 | t = error_mark_node; |
15283 | } |
15284 | else |
15285 | last_iterators = NULL_TREE; |
15286 | if (TREE_CODE (t) == TREE_LIST) |
15287 | { |
15288 | if (handle_omp_array_sections (c, ort)) |
15289 | remove = true; |
15290 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
15291 | && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_DEPOBJ) |
15292 | { |
15293 | error_at (OMP_CLAUSE_LOCATION (c), |
15294 | "%<depend%> clause with %<depobj%> dependence " |
15295 | "type on array section" ); |
15296 | remove = true; |
15297 | } |
15298 | break; |
15299 | } |
15300 | if (t == error_mark_node) |
15301 | remove = true; |
15302 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
15303 | && t == ridpointers[RID_OMP_ALL_MEMORY]) |
15304 | { |
15305 | if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_OUT |
15306 | && OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_INOUT) |
15307 | { |
15308 | error_at (OMP_CLAUSE_LOCATION (c), |
15309 | "%<omp_all_memory%> used with %<depend%> kind " |
15310 | "other than %<out%> or %<inout%>" ); |
15311 | remove = true; |
15312 | } |
15313 | } |
15314 | else if (!lvalue_p (ref: t)) |
15315 | { |
15316 | error_at (OMP_CLAUSE_LOCATION (c), |
15317 | "%qE is not lvalue expression nor array section in " |
15318 | "%qs clause" , t, |
15319 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15320 | remove = true; |
15321 | } |
15322 | else if (TREE_CODE (t) == COMPONENT_REF |
15323 | && DECL_C_BIT_FIELD (TREE_OPERAND (t, 1))) |
15324 | { |
15325 | gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
15326 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY); |
15327 | error_at (OMP_CLAUSE_LOCATION (c), |
15328 | "bit-field %qE in %qs clause" , t, |
15329 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15330 | remove = true; |
15331 | } |
15332 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
15333 | && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_DEPOBJ) |
15334 | { |
15335 | if (!c_omp_depend_t_p (TREE_TYPE (t))) |
15336 | { |
15337 | error_at (OMP_CLAUSE_LOCATION (c), |
15338 | "%qE does not have %<omp_depend_t%> type in " |
15339 | "%<depend%> clause with %<depobj%> dependence " |
15340 | "type" , t); |
15341 | remove = true; |
15342 | } |
15343 | } |
15344 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
15345 | && c_omp_depend_t_p (TREE_TYPE (t))) |
15346 | { |
15347 | error_at (OMP_CLAUSE_LOCATION (c), |
15348 | "%qE should not have %<omp_depend_t%> type in " |
15349 | "%<depend%> clause with dependence type other than " |
15350 | "%<depobj%>" , t); |
15351 | remove = true; |
15352 | } |
15353 | if (!remove) |
15354 | { |
15355 | if (t == ridpointers[RID_OMP_ALL_MEMORY]) |
15356 | t = null_pointer_node; |
15357 | else |
15358 | { |
15359 | tree addr = build_unary_op (OMP_CLAUSE_LOCATION (c), |
15360 | code: ADDR_EXPR, xarg: t, noconvert: false); |
15361 | if (addr == error_mark_node) |
15362 | { |
15363 | remove = true; |
15364 | break; |
15365 | } |
15366 | t = build_indirect_ref (OMP_CLAUSE_LOCATION (c), ptr: addr, |
15367 | errstring: RO_UNARY_STAR); |
15368 | if (t == error_mark_node) |
15369 | { |
15370 | remove = true; |
15371 | break; |
15372 | } |
15373 | } |
15374 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == TREE_LIST |
15375 | && TREE_PURPOSE (OMP_CLAUSE_DECL (c)) |
15376 | && (TREE_CODE (TREE_PURPOSE (OMP_CLAUSE_DECL (c))) |
15377 | == TREE_VEC)) |
15378 | TREE_VALUE (OMP_CLAUSE_DECL (c)) = t; |
15379 | else |
15380 | OMP_CLAUSE_DECL (c) = t; |
15381 | } |
15382 | break; |
15383 | |
15384 | case OMP_CLAUSE_MAP: |
15385 | if (OMP_CLAUSE_MAP_IMPLICIT (c) && !implicit_moved) |
15386 | goto move_implicit; |
15387 | /* FALLTHRU */ |
15388 | case OMP_CLAUSE_TO: |
15389 | case OMP_CLAUSE_FROM: |
15390 | case OMP_CLAUSE__CACHE_: |
15391 | t = OMP_CLAUSE_DECL (c); |
15392 | if (TREE_CODE (t) == TREE_LIST) |
15393 | { |
15394 | grp_start_p = pc; |
15395 | grp_sentinel = OMP_CLAUSE_CHAIN (c); |
15396 | |
15397 | if (handle_omp_array_sections (c, ort)) |
15398 | remove = true; |
15399 | else |
15400 | { |
15401 | t = OMP_CLAUSE_DECL (c); |
15402 | if (!omp_mappable_type (TREE_TYPE (t))) |
15403 | { |
15404 | error_at (OMP_CLAUSE_LOCATION (c), |
15405 | "array section does not have mappable type " |
15406 | "in %qs clause" , |
15407 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15408 | remove = true; |
15409 | } |
15410 | else if (TYPE_ATOMIC (TREE_TYPE (t))) |
15411 | { |
15412 | error_at (OMP_CLAUSE_LOCATION (c), |
15413 | "%<_Atomic%> %qE in %qs clause" , t, |
15414 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15415 | remove = true; |
15416 | } |
15417 | while (TREE_CODE (t) == ARRAY_REF) |
15418 | t = TREE_OPERAND (t, 0); |
15419 | if (TREE_CODE (t) == COMPONENT_REF |
15420 | && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) |
15421 | { |
15422 | do |
15423 | { |
15424 | t = TREE_OPERAND (t, 0); |
15425 | if (TREE_CODE (t) == MEM_REF |
15426 | || INDIRECT_REF_P (t)) |
15427 | { |
15428 | t = TREE_OPERAND (t, 0); |
15429 | STRIP_NOPS (t); |
15430 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
15431 | t = TREE_OPERAND (t, 0); |
15432 | } |
15433 | } |
15434 | while (TREE_CODE (t) == COMPONENT_REF |
15435 | || TREE_CODE (t) == ARRAY_REF); |
15436 | |
15437 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15438 | && OMP_CLAUSE_MAP_IMPLICIT (c) |
15439 | && (bitmap_bit_p (&map_head, DECL_UID (t)) |
15440 | || bitmap_bit_p (&map_field_head, DECL_UID (t)) |
15441 | || bitmap_bit_p (&map_firstprivate_head, |
15442 | DECL_UID (t)))) |
15443 | { |
15444 | remove = true; |
15445 | break; |
15446 | } |
15447 | if (bitmap_bit_p (&map_field_head, DECL_UID (t))) |
15448 | break; |
15449 | if (bitmap_bit_p (&map_head, DECL_UID (t))) |
15450 | { |
15451 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
15452 | error_at (OMP_CLAUSE_LOCATION (c), |
15453 | "%qD appears more than once in motion " |
15454 | "clauses" , t); |
15455 | else if (ort == C_ORT_ACC) |
15456 | error_at (OMP_CLAUSE_LOCATION (c), |
15457 | "%qD appears more than once in data " |
15458 | "clauses" , t); |
15459 | else |
15460 | error_at (OMP_CLAUSE_LOCATION (c), |
15461 | "%qD appears more than once in map " |
15462 | "clauses" , t); |
15463 | remove = true; |
15464 | } |
15465 | else |
15466 | { |
15467 | bitmap_set_bit (&map_head, DECL_UID (t)); |
15468 | bitmap_set_bit (&map_field_head, DECL_UID (t)); |
15469 | } |
15470 | } |
15471 | } |
15472 | if (c_oacc_check_attachments (c)) |
15473 | remove = true; |
15474 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15475 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
15476 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)) |
15477 | /* In this case, we have a single array element which is a |
15478 | pointer, and we already set OMP_CLAUSE_SIZE in |
15479 | handle_omp_array_sections above. For attach/detach clauses, |
15480 | reset the OMP_CLAUSE_SIZE (representing a bias) to zero |
15481 | here. */ |
15482 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
15483 | break; |
15484 | } |
15485 | if (t == error_mark_node) |
15486 | { |
15487 | remove = true; |
15488 | break; |
15489 | } |
15490 | /* OpenACC attach / detach clauses must be pointers. */ |
15491 | if (c_oacc_check_attachments (c)) |
15492 | { |
15493 | remove = true; |
15494 | break; |
15495 | } |
15496 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15497 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
15498 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)) |
15499 | /* For attach/detach clauses, set OMP_CLAUSE_SIZE (representing a |
15500 | bias) to zero here, so it is not set erroneously to the pointer |
15501 | size later on in gimplify.cc. */ |
15502 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
15503 | while (INDIRECT_REF_P (t) |
15504 | || TREE_CODE (t) == ARRAY_REF) |
15505 | { |
15506 | t = TREE_OPERAND (t, 0); |
15507 | STRIP_NOPS (t); |
15508 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
15509 | t = TREE_OPERAND (t, 0); |
15510 | } |
15511 | while (TREE_CODE (t) == COMPOUND_EXPR) |
15512 | { |
15513 | t = TREE_OPERAND (t, 1); |
15514 | STRIP_NOPS (t); |
15515 | } |
15516 | indir_component_ref_p = false; |
15517 | if (TREE_CODE (t) == COMPONENT_REF |
15518 | && (TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF |
15519 | || INDIRECT_REF_P (TREE_OPERAND (t, 0)) |
15520 | || TREE_CODE (TREE_OPERAND (t, 0)) == ARRAY_REF)) |
15521 | { |
15522 | t = TREE_OPERAND (TREE_OPERAND (t, 0), 0); |
15523 | indir_component_ref_p = true; |
15524 | STRIP_NOPS (t); |
15525 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
15526 | t = TREE_OPERAND (t, 0); |
15527 | } |
15528 | |
15529 | if (TREE_CODE (t) == COMPONENT_REF |
15530 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE__CACHE_) |
15531 | { |
15532 | if (DECL_BIT_FIELD (TREE_OPERAND (t, 1))) |
15533 | { |
15534 | error_at (OMP_CLAUSE_LOCATION (c), |
15535 | "bit-field %qE in %qs clause" , |
15536 | t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15537 | remove = true; |
15538 | } |
15539 | else if (!omp_mappable_type (TREE_TYPE (t))) |
15540 | { |
15541 | error_at (OMP_CLAUSE_LOCATION (c), |
15542 | "%qE does not have a mappable type in %qs clause" , |
15543 | t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15544 | remove = true; |
15545 | } |
15546 | else if (TYPE_ATOMIC (TREE_TYPE (t))) |
15547 | { |
15548 | error_at (OMP_CLAUSE_LOCATION (c), |
15549 | "%<_Atomic%> %qE in %qs clause" , t, |
15550 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15551 | remove = true; |
15552 | } |
15553 | while (TREE_CODE (t) == COMPONENT_REF) |
15554 | { |
15555 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) |
15556 | == UNION_TYPE) |
15557 | { |
15558 | error_at (OMP_CLAUSE_LOCATION (c), |
15559 | "%qE is a member of a union" , t); |
15560 | remove = true; |
15561 | break; |
15562 | } |
15563 | t = TREE_OPERAND (t, 0); |
15564 | if (TREE_CODE (t) == MEM_REF) |
15565 | { |
15566 | if (maybe_ne (a: mem_ref_offset (t), b: 0)) |
15567 | error_at (OMP_CLAUSE_LOCATION (c), |
15568 | "cannot dereference %qE in %qs clause" , t, |
15569 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15570 | else |
15571 | t = TREE_OPERAND (t, 0); |
15572 | } |
15573 | while (TREE_CODE (t) == MEM_REF |
15574 | || INDIRECT_REF_P (t) |
15575 | || TREE_CODE (t) == ARRAY_REF) |
15576 | { |
15577 | t = TREE_OPERAND (t, 0); |
15578 | STRIP_NOPS (t); |
15579 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
15580 | t = TREE_OPERAND (t, 0); |
15581 | } |
15582 | } |
15583 | if (remove) |
15584 | break; |
15585 | if (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
15586 | { |
15587 | if (bitmap_bit_p (&map_field_head, DECL_UID (t)) |
15588 | || (ort != C_ORT_ACC |
15589 | && bitmap_bit_p (&map_head, DECL_UID (t)))) |
15590 | break; |
15591 | } |
15592 | } |
15593 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15594 | { |
15595 | error_at (OMP_CLAUSE_LOCATION (c), |
15596 | "%qE is not a variable in %qs clause" , t, |
15597 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15598 | remove = true; |
15599 | } |
15600 | else if (VAR_P (t) && DECL_THREAD_LOCAL_P (t)) |
15601 | { |
15602 | error_at (OMP_CLAUSE_LOCATION (c), |
15603 | "%qD is threadprivate variable in %qs clause" , t, |
15604 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15605 | remove = true; |
15606 | } |
15607 | else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
15608 | || (OMP_CLAUSE_MAP_KIND (c) |
15609 | != GOMP_MAP_FIRSTPRIVATE_POINTER)) |
15610 | && !indir_component_ref_p |
15611 | && !c_mark_addressable (exp: t)) |
15612 | remove = true; |
15613 | else if (!(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15614 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER |
15615 | || (OMP_CLAUSE_MAP_KIND (c) |
15616 | == GOMP_MAP_FIRSTPRIVATE_POINTER) |
15617 | || (OMP_CLAUSE_MAP_KIND (c) |
15618 | == GOMP_MAP_FORCE_DEVICEPTR))) |
15619 | && t == OMP_CLAUSE_DECL (c) |
15620 | && !omp_mappable_type (TREE_TYPE (t))) |
15621 | { |
15622 | error_at (OMP_CLAUSE_LOCATION (c), |
15623 | "%qD does not have a mappable type in %qs clause" , t, |
15624 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15625 | remove = true; |
15626 | } |
15627 | else if (TREE_TYPE (t) == error_mark_node) |
15628 | remove = true; |
15629 | else if (TYPE_ATOMIC (strip_array_types (TREE_TYPE (t)))) |
15630 | { |
15631 | error_at (OMP_CLAUSE_LOCATION (c), |
15632 | "%<_Atomic%> %qE in %qs clause" , t, |
15633 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15634 | remove = true; |
15635 | } |
15636 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15637 | && OMP_CLAUSE_MAP_IMPLICIT (c) |
15638 | && (bitmap_bit_p (&map_head, DECL_UID (t)) |
15639 | || bitmap_bit_p (&map_field_head, DECL_UID (t)) |
15640 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t)))) |
15641 | remove = true; |
15642 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
15643 | && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) |
15644 | { |
15645 | if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
15646 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
15647 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
15648 | { |
15649 | error_at (OMP_CLAUSE_LOCATION (c), |
15650 | "%qD appears more than once in data clauses" , t); |
15651 | remove = true; |
15652 | } |
15653 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
15654 | && !bitmap_bit_p (&map_field_head, DECL_UID (t))) |
15655 | { |
15656 | if (ort == C_ORT_ACC) |
15657 | error_at (OMP_CLAUSE_LOCATION (c), |
15658 | "%qD appears more than once in data clauses" , t); |
15659 | else |
15660 | error_at (OMP_CLAUSE_LOCATION (c), |
15661 | "%qD appears both in data and map clauses" , t); |
15662 | remove = true; |
15663 | } |
15664 | else |
15665 | bitmap_set_bit (&map_firstprivate_head, DECL_UID (t)); |
15666 | } |
15667 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
15668 | && !bitmap_bit_p (&map_field_head, DECL_UID (t))) |
15669 | { |
15670 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
15671 | error_at (OMP_CLAUSE_LOCATION (c), |
15672 | "%qD appears more than once in motion clauses" , t); |
15673 | else if (ort == C_ORT_ACC) |
15674 | error_at (OMP_CLAUSE_LOCATION (c), |
15675 | "%qD appears more than once in data clauses" , t); |
15676 | else |
15677 | error_at (OMP_CLAUSE_LOCATION (c), |
15678 | "%qD appears more than once in map clauses" , t); |
15679 | remove = true; |
15680 | } |
15681 | else if (ort == C_ORT_ACC |
15682 | && bitmap_bit_p (&generic_head, DECL_UID (t))) |
15683 | { |
15684 | error_at (OMP_CLAUSE_LOCATION (c), |
15685 | "%qD appears more than once in data clauses" , t); |
15686 | remove = true; |
15687 | } |
15688 | else if (bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
15689 | || bitmap_bit_p (&is_on_device_head, DECL_UID (t))) |
15690 | { |
15691 | if (ort == C_ORT_ACC) |
15692 | error_at (OMP_CLAUSE_LOCATION (c), |
15693 | "%qD appears more than once in data clauses" , t); |
15694 | else |
15695 | error_at (OMP_CLAUSE_LOCATION (c), |
15696 | "%qD appears both in data and map clauses" , t); |
15697 | remove = true; |
15698 | } |
15699 | else |
15700 | { |
15701 | bitmap_set_bit (&map_head, DECL_UID (t)); |
15702 | if (t != OMP_CLAUSE_DECL (c) |
15703 | && TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF) |
15704 | bitmap_set_bit (&map_field_head, DECL_UID (t)); |
15705 | } |
15706 | break; |
15707 | |
15708 | case OMP_CLAUSE_ENTER: |
15709 | case OMP_CLAUSE_LINK: |
15710 | t = OMP_CLAUSE_DECL (c); |
15711 | const char *cname; |
15712 | cname = omp_clause_code_name[OMP_CLAUSE_CODE (c)]; |
15713 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER |
15714 | && OMP_CLAUSE_ENTER_TO (c)) |
15715 | cname = "to" ; |
15716 | if (TREE_CODE (t) == FUNCTION_DECL |
15717 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER) |
15718 | ; |
15719 | else if (!VAR_P (t)) |
15720 | { |
15721 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER) |
15722 | error_at (OMP_CLAUSE_LOCATION (c), |
15723 | "%qE is neither a variable nor a function name in " |
15724 | "clause %qs" , t, cname); |
15725 | else |
15726 | error_at (OMP_CLAUSE_LOCATION (c), |
15727 | "%qE is not a variable in clause %qs" , t, cname); |
15728 | remove = true; |
15729 | } |
15730 | else if (DECL_THREAD_LOCAL_P (t)) |
15731 | { |
15732 | error_at (OMP_CLAUSE_LOCATION (c), |
15733 | "%qD is threadprivate variable in %qs clause" , t, |
15734 | cname); |
15735 | remove = true; |
15736 | } |
15737 | else if (!omp_mappable_type (TREE_TYPE (t))) |
15738 | { |
15739 | error_at (OMP_CLAUSE_LOCATION (c), |
15740 | "%qD does not have a mappable type in %qs clause" , t, |
15741 | cname); |
15742 | remove = true; |
15743 | } |
15744 | if (remove) |
15745 | break; |
15746 | if (bitmap_bit_p (&generic_head, DECL_UID (t))) |
15747 | { |
15748 | error_at (OMP_CLAUSE_LOCATION (c), |
15749 | "%qE appears more than once on the same " |
15750 | "%<declare target%> directive" , t); |
15751 | remove = true; |
15752 | } |
15753 | else |
15754 | bitmap_set_bit (&generic_head, DECL_UID (t)); |
15755 | break; |
15756 | |
15757 | case OMP_CLAUSE_UNIFORM: |
15758 | t = OMP_CLAUSE_DECL (c); |
15759 | if (TREE_CODE (t) != PARM_DECL) |
15760 | { |
15761 | if (DECL_P (t)) |
15762 | error_at (OMP_CLAUSE_LOCATION (c), |
15763 | "%qD is not an argument in %<uniform%> clause" , t); |
15764 | else |
15765 | error_at (OMP_CLAUSE_LOCATION (c), |
15766 | "%qE is not an argument in %<uniform%> clause" , t); |
15767 | remove = true; |
15768 | break; |
15769 | } |
15770 | /* map_head bitmap is used as uniform_head if declare_simd. */ |
15771 | bitmap_set_bit (&map_head, DECL_UID (t)); |
15772 | goto check_dup_generic; |
15773 | |
15774 | case OMP_CLAUSE_IS_DEVICE_PTR: |
15775 | case OMP_CLAUSE_USE_DEVICE_PTR: |
15776 | t = OMP_CLAUSE_DECL (c); |
15777 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR) |
15778 | bitmap_set_bit (&is_on_device_head, DECL_UID (t)); |
15779 | if (TREE_CODE (TREE_TYPE (t)) != POINTER_TYPE) |
15780 | { |
15781 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR |
15782 | && ort != C_ORT_ACC) |
15783 | { |
15784 | error_at (OMP_CLAUSE_LOCATION (c), |
15785 | "%qs variable is not a pointer" , |
15786 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15787 | remove = true; |
15788 | } |
15789 | else if (TREE_CODE (TREE_TYPE (t)) != ARRAY_TYPE) |
15790 | { |
15791 | error_at (OMP_CLAUSE_LOCATION (c), |
15792 | "%qs variable is neither a pointer nor an array" , |
15793 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15794 | remove = true; |
15795 | } |
15796 | } |
15797 | goto check_dup_generic; |
15798 | |
15799 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
15800 | t = OMP_CLAUSE_DECL (c); |
15801 | if (TREE_CODE (t) == TREE_LIST) |
15802 | { |
15803 | if (handle_omp_array_sections (c, ort)) |
15804 | remove = true; |
15805 | else |
15806 | { |
15807 | t = OMP_CLAUSE_DECL (c); |
15808 | while (TREE_CODE (t) == ARRAY_REF) |
15809 | t = TREE_OPERAND (t, 0); |
15810 | } |
15811 | } |
15812 | bitmap_set_bit (&is_on_device_head, DECL_UID (t)); |
15813 | if (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
15814 | c_mark_addressable (exp: t); |
15815 | goto check_dup_generic_t; |
15816 | |
15817 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
15818 | t = OMP_CLAUSE_DECL (c); |
15819 | if (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
15820 | c_mark_addressable (exp: t); |
15821 | goto check_dup_generic; |
15822 | |
15823 | case OMP_CLAUSE_NOWAIT: |
15824 | if (copyprivate_seen) |
15825 | { |
15826 | error_at (OMP_CLAUSE_LOCATION (c), |
15827 | "%<nowait%> clause must not be used together " |
15828 | "with %<copyprivate%>" ); |
15829 | remove = true; |
15830 | break; |
15831 | } |
15832 | nowait_clause = pc; |
15833 | pc = &OMP_CLAUSE_CHAIN (c); |
15834 | continue; |
15835 | |
15836 | case OMP_CLAUSE_ORDER: |
15837 | if (ordered_clause) |
15838 | { |
15839 | error_at (OMP_CLAUSE_LOCATION (c), |
15840 | "%<order%> clause must not be used together " |
15841 | "with %<ordered%>" ); |
15842 | remove = true; |
15843 | break; |
15844 | } |
15845 | else if (order_clause) |
15846 | { |
15847 | /* Silently remove duplicates. */ |
15848 | remove = true; |
15849 | break; |
15850 | } |
15851 | order_clause = pc; |
15852 | pc = &OMP_CLAUSE_CHAIN (c); |
15853 | continue; |
15854 | |
15855 | case OMP_CLAUSE_DETACH: |
15856 | t = OMP_CLAUSE_DECL (c); |
15857 | if (detach_seen) |
15858 | { |
15859 | error_at (OMP_CLAUSE_LOCATION (c), |
15860 | "too many %qs clauses on a task construct" , |
15861 | "detach" ); |
15862 | remove = true; |
15863 | break; |
15864 | } |
15865 | detach_seen = pc; |
15866 | pc = &OMP_CLAUSE_CHAIN (c); |
15867 | c_mark_addressable (exp: t); |
15868 | continue; |
15869 | |
15870 | case OMP_CLAUSE_IF: |
15871 | case OMP_CLAUSE_SELF: |
15872 | case OMP_CLAUSE_NUM_THREADS: |
15873 | case OMP_CLAUSE_NUM_TEAMS: |
15874 | case OMP_CLAUSE_THREAD_LIMIT: |
15875 | case OMP_CLAUSE_DEFAULT: |
15876 | case OMP_CLAUSE_UNTIED: |
15877 | case OMP_CLAUSE_COLLAPSE: |
15878 | case OMP_CLAUSE_FINAL: |
15879 | case OMP_CLAUSE_DEVICE: |
15880 | case OMP_CLAUSE_DIST_SCHEDULE: |
15881 | case OMP_CLAUSE_PARALLEL: |
15882 | case OMP_CLAUSE_FOR: |
15883 | case OMP_CLAUSE_SECTIONS: |
15884 | case OMP_CLAUSE_TASKGROUP: |
15885 | case OMP_CLAUSE_PROC_BIND: |
15886 | case OMP_CLAUSE_DEVICE_TYPE: |
15887 | case OMP_CLAUSE_PRIORITY: |
15888 | case OMP_CLAUSE_GRAINSIZE: |
15889 | case OMP_CLAUSE_NUM_TASKS: |
15890 | case OMP_CLAUSE_THREADS: |
15891 | case OMP_CLAUSE_SIMD: |
15892 | case OMP_CLAUSE_HINT: |
15893 | case OMP_CLAUSE_FILTER: |
15894 | case OMP_CLAUSE_DEFAULTMAP: |
15895 | case OMP_CLAUSE_BIND: |
15896 | case OMP_CLAUSE_NUM_GANGS: |
15897 | case OMP_CLAUSE_NUM_WORKERS: |
15898 | case OMP_CLAUSE_VECTOR_LENGTH: |
15899 | case OMP_CLAUSE_ASYNC: |
15900 | case OMP_CLAUSE_WAIT: |
15901 | case OMP_CLAUSE_AUTO: |
15902 | case OMP_CLAUSE_INDEPENDENT: |
15903 | case OMP_CLAUSE_SEQ: |
15904 | case OMP_CLAUSE_GANG: |
15905 | case OMP_CLAUSE_WORKER: |
15906 | case OMP_CLAUSE_VECTOR: |
15907 | case OMP_CLAUSE_TILE: |
15908 | case OMP_CLAUSE_IF_PRESENT: |
15909 | case OMP_CLAUSE_FINALIZE: |
15910 | case OMP_CLAUSE_NOHOST: |
15911 | case OMP_CLAUSE_INDIRECT: |
15912 | pc = &OMP_CLAUSE_CHAIN (c); |
15913 | continue; |
15914 | |
15915 | case OMP_CLAUSE_MERGEABLE: |
15916 | mergeable_seen = true; |
15917 | pc = &OMP_CLAUSE_CHAIN (c); |
15918 | continue; |
15919 | |
15920 | case OMP_CLAUSE_NOGROUP: |
15921 | nogroup_seen = pc; |
15922 | pc = &OMP_CLAUSE_CHAIN (c); |
15923 | continue; |
15924 | |
15925 | case OMP_CLAUSE_SCHEDULE: |
15926 | schedule_clause = c; |
15927 | pc = &OMP_CLAUSE_CHAIN (c); |
15928 | continue; |
15929 | |
15930 | case OMP_CLAUSE_ORDERED: |
15931 | ordered_clause = c; |
15932 | if (order_clause) |
15933 | { |
15934 | error_at (OMP_CLAUSE_LOCATION (*order_clause), |
15935 | "%<order%> clause must not be used together " |
15936 | "with %<ordered%>" ); |
15937 | *order_clause = OMP_CLAUSE_CHAIN (*order_clause); |
15938 | order_clause = NULL; |
15939 | } |
15940 | pc = &OMP_CLAUSE_CHAIN (c); |
15941 | continue; |
15942 | |
15943 | case OMP_CLAUSE_SAFELEN: |
15944 | safelen = c; |
15945 | pc = &OMP_CLAUSE_CHAIN (c); |
15946 | continue; |
15947 | case OMP_CLAUSE_SIMDLEN: |
15948 | simdlen = c; |
15949 | pc = &OMP_CLAUSE_CHAIN (c); |
15950 | continue; |
15951 | |
15952 | case OMP_CLAUSE_INBRANCH: |
15953 | case OMP_CLAUSE_NOTINBRANCH: |
15954 | if (branch_seen) |
15955 | { |
15956 | error_at (OMP_CLAUSE_LOCATION (c), |
15957 | "%<inbranch%> clause is incompatible with " |
15958 | "%<notinbranch%>" ); |
15959 | remove = true; |
15960 | break; |
15961 | } |
15962 | branch_seen = true; |
15963 | pc = &OMP_CLAUSE_CHAIN (c); |
15964 | continue; |
15965 | |
15966 | case OMP_CLAUSE_INCLUSIVE: |
15967 | case OMP_CLAUSE_EXCLUSIVE: |
15968 | need_complete = true; |
15969 | need_implicitly_determined = true; |
15970 | t = OMP_CLAUSE_DECL (c); |
15971 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
15972 | { |
15973 | error_at (OMP_CLAUSE_LOCATION (c), |
15974 | "%qE is not a variable in clause %qs" , t, |
15975 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
15976 | remove = true; |
15977 | } |
15978 | break; |
15979 | |
15980 | default: |
15981 | gcc_unreachable (); |
15982 | } |
15983 | |
15984 | if (!remove) |
15985 | { |
15986 | t = OMP_CLAUSE_DECL (c); |
15987 | |
15988 | if (need_complete) |
15989 | { |
15990 | t = require_complete_type (OMP_CLAUSE_LOCATION (c), value: t); |
15991 | if (t == error_mark_node) |
15992 | remove = true; |
15993 | } |
15994 | |
15995 | if (need_implicitly_determined) |
15996 | { |
15997 | const char *share_name = NULL; |
15998 | |
15999 | if (VAR_P (t) && DECL_THREAD_LOCAL_P (t)) |
16000 | share_name = "threadprivate" ; |
16001 | else switch (c_omp_predetermined_sharing (t)) |
16002 | { |
16003 | case OMP_CLAUSE_DEFAULT_UNSPECIFIED: |
16004 | break; |
16005 | case OMP_CLAUSE_DEFAULT_SHARED: |
16006 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
16007 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) |
16008 | && c_omp_predefined_variable (t)) |
16009 | /* The __func__ variable and similar function-local |
16010 | predefined variables may be listed in a shared or |
16011 | firstprivate clause. */ |
16012 | break; |
16013 | share_name = "shared" ; |
16014 | break; |
16015 | case OMP_CLAUSE_DEFAULT_PRIVATE: |
16016 | share_name = "private" ; |
16017 | break; |
16018 | default: |
16019 | gcc_unreachable (); |
16020 | } |
16021 | if (share_name) |
16022 | { |
16023 | error_at (OMP_CLAUSE_LOCATION (c), |
16024 | "%qE is predetermined %qs for %qs" , |
16025 | t, share_name, |
16026 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
16027 | remove = true; |
16028 | } |
16029 | else if (TREE_READONLY (t) |
16030 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED |
16031 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE) |
16032 | { |
16033 | error_at (OMP_CLAUSE_LOCATION (c), |
16034 | "%<const%> qualified %qE may appear only in " |
16035 | "%<shared%> or %<firstprivate%> clauses" , t); |
16036 | remove = true; |
16037 | } |
16038 | } |
16039 | } |
16040 | |
16041 | if (remove) |
16042 | { |
16043 | if (grp_start_p) |
16044 | { |
16045 | /* If we found a clause to remove, we want to remove the whole |
16046 | expanded group, otherwise gimplify |
16047 | (omp_resolve_clause_dependencies) can get confused. */ |
16048 | *grp_start_p = grp_sentinel; |
16049 | pc = grp_start_p; |
16050 | grp_start_p = NULL; |
16051 | } |
16052 | else |
16053 | *pc = OMP_CLAUSE_CHAIN (c); |
16054 | } |
16055 | else |
16056 | pc = &OMP_CLAUSE_CHAIN (c); |
16057 | } |
16058 | |
16059 | if (simdlen |
16060 | && safelen |
16061 | && tree_int_cst_lt (OMP_CLAUSE_SAFELEN_EXPR (safelen), |
16062 | OMP_CLAUSE_SIMDLEN_EXPR (simdlen))) |
16063 | { |
16064 | error_at (OMP_CLAUSE_LOCATION (simdlen), |
16065 | "%<simdlen%> clause value is bigger than " |
16066 | "%<safelen%> clause value" ); |
16067 | OMP_CLAUSE_SIMDLEN_EXPR (simdlen) |
16068 | = OMP_CLAUSE_SAFELEN_EXPR (safelen); |
16069 | } |
16070 | |
16071 | if (ordered_clause |
16072 | && schedule_clause |
16073 | && (OMP_CLAUSE_SCHEDULE_KIND (schedule_clause) |
16074 | & OMP_CLAUSE_SCHEDULE_NONMONOTONIC)) |
16075 | { |
16076 | error_at (OMP_CLAUSE_LOCATION (schedule_clause), |
16077 | "%<nonmonotonic%> schedule modifier specified together " |
16078 | "with %<ordered%> clause" ); |
16079 | OMP_CLAUSE_SCHEDULE_KIND (schedule_clause) |
16080 | = (enum omp_clause_schedule_kind) |
16081 | (OMP_CLAUSE_SCHEDULE_KIND (schedule_clause) |
16082 | & ~OMP_CLAUSE_SCHEDULE_NONMONOTONIC); |
16083 | } |
16084 | |
16085 | if (reduction_seen < 0 && ordered_clause) |
16086 | { |
16087 | error_at (OMP_CLAUSE_LOCATION (ordered_clause), |
16088 | "%qs clause specified together with %<inscan%> " |
16089 | "%<reduction%> clause" , "ordered" ); |
16090 | reduction_seen = -2; |
16091 | } |
16092 | |
16093 | if (reduction_seen < 0 && schedule_clause) |
16094 | { |
16095 | error_at (OMP_CLAUSE_LOCATION (schedule_clause), |
16096 | "%qs clause specified together with %<inscan%> " |
16097 | "%<reduction%> clause" , "schedule" ); |
16098 | reduction_seen = -2; |
16099 | } |
16100 | |
16101 | if (linear_variable_step_check |
16102 | || reduction_seen == -2 |
16103 | || allocate_seen |
16104 | || target_in_reduction_seen) |
16105 | for (pc = &clauses, c = clauses; c ; c = *pc) |
16106 | { |
16107 | bool remove = false; |
16108 | if (allocate_seen) |
16109 | switch (OMP_CLAUSE_CODE (c)) |
16110 | { |
16111 | case OMP_CLAUSE_REDUCTION: |
16112 | case OMP_CLAUSE_IN_REDUCTION: |
16113 | case OMP_CLAUSE_TASK_REDUCTION: |
16114 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF) |
16115 | { |
16116 | t = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0); |
16117 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
16118 | t = TREE_OPERAND (t, 0); |
16119 | if (TREE_CODE (t) == ADDR_EXPR |
16120 | || INDIRECT_REF_P (t)) |
16121 | t = TREE_OPERAND (t, 0); |
16122 | if (DECL_P (t)) |
16123 | bitmap_clear_bit (&aligned_head, DECL_UID (t)); |
16124 | break; |
16125 | } |
16126 | /* FALLTHRU */ |
16127 | case OMP_CLAUSE_PRIVATE: |
16128 | case OMP_CLAUSE_FIRSTPRIVATE: |
16129 | case OMP_CLAUSE_LASTPRIVATE: |
16130 | case OMP_CLAUSE_LINEAR: |
16131 | if (DECL_P (OMP_CLAUSE_DECL (c))) |
16132 | bitmap_clear_bit (&aligned_head, |
16133 | DECL_UID (OMP_CLAUSE_DECL (c))); |
16134 | break; |
16135 | default: |
16136 | break; |
16137 | } |
16138 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
16139 | && OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) |
16140 | && !bitmap_bit_p (&map_head, |
16141 | DECL_UID (OMP_CLAUSE_LINEAR_STEP (c)))) |
16142 | { |
16143 | error_at (OMP_CLAUSE_LOCATION (c), |
16144 | "%<linear%> clause step is a parameter %qD not " |
16145 | "specified in %<uniform%> clause" , |
16146 | OMP_CLAUSE_LINEAR_STEP (c)); |
16147 | remove = true; |
16148 | } |
16149 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
16150 | && reduction_seen == -2) |
16151 | OMP_CLAUSE_REDUCTION_INSCAN (c) = 0; |
16152 | if (target_in_reduction_seen |
16153 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP) |
16154 | { |
16155 | tree t = OMP_CLAUSE_DECL (c); |
16156 | while (handled_component_p (t) |
16157 | || INDIRECT_REF_P (t) |
16158 | || TREE_CODE (t) == ADDR_EXPR |
16159 | || TREE_CODE (t) == MEM_REF |
16160 | || TREE_CODE (t) == NON_LVALUE_EXPR) |
16161 | t = TREE_OPERAND (t, 0); |
16162 | if (DECL_P (t) |
16163 | && bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
16164 | OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1; |
16165 | } |
16166 | |
16167 | if (remove) |
16168 | *pc = OMP_CLAUSE_CHAIN (c); |
16169 | else |
16170 | pc = &OMP_CLAUSE_CHAIN (c); |
16171 | } |
16172 | |
16173 | if (allocate_seen) |
16174 | for (pc = &clauses, c = clauses; c ; c = *pc) |
16175 | { |
16176 | bool remove = false; |
16177 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE |
16178 | && !OMP_CLAUSE_ALLOCATE_COMBINED (c) |
16179 | && bitmap_bit_p (&aligned_head, DECL_UID (OMP_CLAUSE_DECL (c)))) |
16180 | { |
16181 | error_at (OMP_CLAUSE_LOCATION (c), |
16182 | "%qD specified in %<allocate%> clause but not in " |
16183 | "an explicit privatization clause" , OMP_CLAUSE_DECL (c)); |
16184 | remove = true; |
16185 | } |
16186 | if (remove) |
16187 | *pc = OMP_CLAUSE_CHAIN (c); |
16188 | else |
16189 | pc = &OMP_CLAUSE_CHAIN (c); |
16190 | } |
16191 | |
16192 | if (nogroup_seen && reduction_seen) |
16193 | { |
16194 | error_at (OMP_CLAUSE_LOCATION (*nogroup_seen), |
16195 | "%<nogroup%> clause must not be used together with " |
16196 | "%<reduction%> clause" ); |
16197 | *nogroup_seen = OMP_CLAUSE_CHAIN (*nogroup_seen); |
16198 | } |
16199 | |
16200 | if (detach_seen) |
16201 | { |
16202 | if (mergeable_seen) |
16203 | { |
16204 | error_at (OMP_CLAUSE_LOCATION (*detach_seen), |
16205 | "%<detach%> clause must not be used together with " |
16206 | "%<mergeable%> clause" ); |
16207 | *detach_seen = OMP_CLAUSE_CHAIN (*detach_seen); |
16208 | } |
16209 | else |
16210 | { |
16211 | tree detach_decl = OMP_CLAUSE_DECL (*detach_seen); |
16212 | |
16213 | for (pc = &clauses, c = clauses; c ; c = *pc) |
16214 | { |
16215 | bool remove = false; |
16216 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
16217 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
16218 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
16219 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE) |
16220 | && OMP_CLAUSE_DECL (c) == detach_decl) |
16221 | { |
16222 | error_at (OMP_CLAUSE_LOCATION (c), |
16223 | "the event handle of a %<detach%> clause " |
16224 | "should not be in a data-sharing clause" ); |
16225 | remove = true; |
16226 | } |
16227 | if (remove) |
16228 | *pc = OMP_CLAUSE_CHAIN (c); |
16229 | else |
16230 | pc = &OMP_CLAUSE_CHAIN (c); |
16231 | } |
16232 | } |
16233 | } |
16234 | |
16235 | bitmap_obstack_release (NULL); |
16236 | return clauses; |
16237 | } |
16238 | |
16239 | /* Return code to initialize DST with a copy constructor from SRC. |
16240 | C doesn't have copy constructors nor assignment operators, only for |
16241 | _Atomic vars we need to perform __atomic_load from src into a temporary |
16242 | followed by __atomic_store of the temporary to dst. */ |
16243 | |
16244 | tree |
16245 | c_omp_clause_copy_ctor (tree clause, tree dst, tree src) |
16246 | { |
16247 | if (!really_atomic_lvalue (expr: dst) && !really_atomic_lvalue (expr: src)) |
16248 | return build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); |
16249 | |
16250 | location_t loc = OMP_CLAUSE_LOCATION (clause); |
16251 | tree type = TREE_TYPE (dst); |
16252 | tree nonatomic_type = build_qualified_type (type, TYPE_UNQUALIFIED); |
16253 | tree tmp = create_tmp_var (nonatomic_type); |
16254 | tree tmp_addr = build_fold_addr_expr (tmp); |
16255 | TREE_ADDRESSABLE (tmp) = 1; |
16256 | suppress_warning (tmp); |
16257 | tree src_addr = build_fold_addr_expr (src); |
16258 | tree dst_addr = build_fold_addr_expr (dst); |
16259 | tree seq_cst = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST); |
16260 | vec<tree, va_gc> *params; |
16261 | /* Expansion of a generic atomic load may require an addition |
16262 | element, so allocate enough to prevent a resize. */ |
16263 | vec_alloc (v&: params, nelems: 4); |
16264 | |
16265 | /* Build __atomic_load (&src, &tmp, SEQ_CST); */ |
16266 | tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_LOAD); |
16267 | params->quick_push (obj: src_addr); |
16268 | params->quick_push (obj: tmp_addr); |
16269 | params->quick_push (obj: seq_cst); |
16270 | tree load = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
16271 | |
16272 | vec_alloc (v&: params, nelems: 4); |
16273 | |
16274 | /* Build __atomic_store (&dst, &tmp, SEQ_CST); */ |
16275 | fndecl = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_STORE); |
16276 | params->quick_push (obj: dst_addr); |
16277 | params->quick_push (obj: tmp_addr); |
16278 | params->quick_push (obj: seq_cst); |
16279 | tree store = c_build_function_call_vec (loc, arg_loc: vNULL, function: fndecl, params, NULL); |
16280 | return build2 (COMPOUND_EXPR, void_type_node, load, store); |
16281 | } |
16282 | |
16283 | /* Create a transaction node. */ |
16284 | |
16285 | tree |
16286 | c_finish_transaction (location_t loc, tree block, int flags) |
16287 | { |
16288 | tree stmt = build_stmt (loc, TRANSACTION_EXPR, block); |
16289 | if (flags & TM_STMT_ATTR_OUTER) |
16290 | TRANSACTION_EXPR_OUTER (stmt) = 1; |
16291 | if (flags & TM_STMT_ATTR_RELAXED) |
16292 | TRANSACTION_EXPR_RELAXED (stmt) = 1; |
16293 | return add_stmt (stmt); |
16294 | } |
16295 | |
16296 | /* Make a variant type in the proper way for C/C++, propagating qualifiers |
16297 | down to the element type of an array. If ORIG_QUAL_TYPE is not |
16298 | NULL, then it should be used as the qualified type |
16299 | ORIG_QUAL_INDIRECT levels down in array type derivation (to |
16300 | preserve information about the typedef name from which an array |
16301 | type was derived). */ |
16302 | |
16303 | tree |
16304 | c_build_qualified_type (tree type, int type_quals, tree orig_qual_type, |
16305 | size_t orig_qual_indirect) |
16306 | { |
16307 | if (type == error_mark_node) |
16308 | return type; |
16309 | |
16310 | if (TREE_CODE (type) == ARRAY_TYPE) |
16311 | { |
16312 | tree t; |
16313 | tree element_type = c_build_qualified_type (TREE_TYPE (type), |
16314 | type_quals, orig_qual_type, |
16315 | orig_qual_indirect: orig_qual_indirect - 1); |
16316 | |
16317 | /* See if we already have an identically qualified type. */ |
16318 | if (orig_qual_type && orig_qual_indirect == 0) |
16319 | t = orig_qual_type; |
16320 | else |
16321 | for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) |
16322 | { |
16323 | if (TYPE_QUALS (strip_array_types (t)) == type_quals |
16324 | && TYPE_NAME (t) == TYPE_NAME (type) |
16325 | && TYPE_CONTEXT (t) == TYPE_CONTEXT (type) |
16326 | && attribute_list_equal (TYPE_ATTRIBUTES (t), |
16327 | TYPE_ATTRIBUTES (type))) |
16328 | break; |
16329 | } |
16330 | if (!t) |
16331 | { |
16332 | tree domain = TYPE_DOMAIN (type); |
16333 | |
16334 | t = build_variant_type_copy (type); |
16335 | TREE_TYPE (t) = element_type; |
16336 | |
16337 | if (TYPE_STRUCTURAL_EQUALITY_P (element_type) |
16338 | || (domain && TYPE_STRUCTURAL_EQUALITY_P (domain))) |
16339 | SET_TYPE_STRUCTURAL_EQUALITY (t); |
16340 | else if (TYPE_CANONICAL (element_type) != element_type |
16341 | || (domain && TYPE_CANONICAL (domain) != domain)) |
16342 | { |
16343 | tree unqualified_canon |
16344 | = build_array_type (TYPE_CANONICAL (element_type), |
16345 | domain? TYPE_CANONICAL (domain) |
16346 | : NULL_TREE); |
16347 | if (TYPE_REVERSE_STORAGE_ORDER (type)) |
16348 | { |
16349 | unqualified_canon |
16350 | = build_distinct_type_copy (unqualified_canon); |
16351 | TYPE_REVERSE_STORAGE_ORDER (unqualified_canon) = 1; |
16352 | } |
16353 | TYPE_CANONICAL (t) |
16354 | = c_build_qualified_type (type: unqualified_canon, type_quals); |
16355 | } |
16356 | else |
16357 | TYPE_CANONICAL (t) = t; |
16358 | } |
16359 | return t; |
16360 | } |
16361 | |
16362 | /* A restrict-qualified pointer type must be a pointer to object or |
16363 | incomplete type. Note that the use of POINTER_TYPE_P also allows |
16364 | REFERENCE_TYPEs, which is appropriate for C++. */ |
16365 | if ((type_quals & TYPE_QUAL_RESTRICT) |
16366 | && (!POINTER_TYPE_P (type) |
16367 | || !C_TYPE_OBJECT_OR_INCOMPLETE_P (TREE_TYPE (type)))) |
16368 | { |
16369 | error ("invalid use of %<restrict%>" ); |
16370 | type_quals &= ~TYPE_QUAL_RESTRICT; |
16371 | } |
16372 | |
16373 | tree var_type = (orig_qual_type && orig_qual_indirect == 0 |
16374 | ? orig_qual_type |
16375 | : build_qualified_type (type, type_quals)); |
16376 | /* A variant type does not inherit the list of incomplete vars from the |
16377 | type main variant. */ |
16378 | if ((RECORD_OR_UNION_TYPE_P (var_type) |
16379 | || TREE_CODE (var_type) == ENUMERAL_TYPE) |
16380 | && TYPE_MAIN_VARIANT (var_type) != var_type) |
16381 | C_TYPE_INCOMPLETE_VARS (var_type) = 0; |
16382 | return var_type; |
16383 | } |
16384 | |
16385 | /* Build a VA_ARG_EXPR for the C parser. */ |
16386 | |
16387 | tree |
16388 | c_build_va_arg (location_t loc1, tree expr, location_t loc2, tree type) |
16389 | { |
16390 | if (error_operand_p (t: type)) |
16391 | return error_mark_node; |
16392 | /* VA_ARG_EXPR cannot be used for a scalar va_list with reverse storage |
16393 | order because it takes the address of the expression. */ |
16394 | else if (handled_component_p (t: expr) |
16395 | && reverse_storage_order_for_component_p (t: expr)) |
16396 | { |
16397 | error_at (loc1, "cannot use %<va_arg%> with reverse storage order" ); |
16398 | return error_mark_node; |
16399 | } |
16400 | else if (!COMPLETE_TYPE_P (type)) |
16401 | { |
16402 | error_at (loc2, "second argument to %<va_arg%> is of incomplete " |
16403 | "type %qT" , type); |
16404 | return error_mark_node; |
16405 | } |
16406 | else if (TREE_CODE (type) == FUNCTION_TYPE) |
16407 | { |
16408 | error_at (loc2, "second argument to %<va_arg%> is a function type %qT" , |
16409 | type); |
16410 | return error_mark_node; |
16411 | } |
16412 | else if (warn_cxx_compat && TREE_CODE (type) == ENUMERAL_TYPE) |
16413 | warning_at (loc2, OPT_Wc___compat, |
16414 | "C++ requires promoted type, not enum type, in %<va_arg%>" ); |
16415 | return build_va_arg (loc2, expr, type); |
16416 | } |
16417 | |
16418 | /* Return truthvalue of whether T1 is the same tree structure as T2. |
16419 | Return 1 if they are the same. Return false if they are different. */ |
16420 | |
16421 | bool |
16422 | c_tree_equal (tree t1, tree t2) |
16423 | { |
16424 | enum tree_code code1, code2; |
16425 | |
16426 | if (t1 == t2) |
16427 | return true; |
16428 | if (!t1 || !t2) |
16429 | return false; |
16430 | |
16431 | for (code1 = TREE_CODE (t1); code1 == NON_LVALUE_EXPR; |
16432 | code1 = TREE_CODE (t1)) |
16433 | t1 = TREE_OPERAND (t1, 0); |
16434 | for (code2 = TREE_CODE (t2); code2 == NON_LVALUE_EXPR; |
16435 | code2 = TREE_CODE (t2)) |
16436 | t2 = TREE_OPERAND (t2, 0); |
16437 | |
16438 | /* They might have become equal now. */ |
16439 | if (t1 == t2) |
16440 | return true; |
16441 | |
16442 | if (code1 != code2) |
16443 | return false; |
16444 | |
16445 | if (CONSTANT_CLASS_P (t1) && !comptypes (TREE_TYPE (t1), TREE_TYPE (t2))) |
16446 | return false; |
16447 | |
16448 | switch (code1) |
16449 | { |
16450 | case INTEGER_CST: |
16451 | return wi::to_wide (t: t1) == wi::to_wide (t: t2); |
16452 | |
16453 | case REAL_CST: |
16454 | return real_equal (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2)); |
16455 | |
16456 | case STRING_CST: |
16457 | return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) |
16458 | && !memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), |
16459 | TREE_STRING_LENGTH (t1)); |
16460 | |
16461 | case FIXED_CST: |
16462 | return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), |
16463 | TREE_FIXED_CST (t2)); |
16464 | |
16465 | case COMPLEX_CST: |
16466 | return c_tree_equal (TREE_REALPART (t1), TREE_REALPART (t2)) |
16467 | && c_tree_equal (TREE_IMAGPART (t1), TREE_IMAGPART (t2)); |
16468 | |
16469 | case VECTOR_CST: |
16470 | return operand_equal_p (t1, t2, flags: OEP_ONLY_CONST); |
16471 | |
16472 | case CONSTRUCTOR: |
16473 | /* We need to do this when determining whether or not two |
16474 | non-type pointer to member function template arguments |
16475 | are the same. */ |
16476 | if (!comptypes (TREE_TYPE (t1), TREE_TYPE (t2)) |
16477 | || CONSTRUCTOR_NELTS (t1) != CONSTRUCTOR_NELTS (t2)) |
16478 | return false; |
16479 | { |
16480 | tree field, value; |
16481 | unsigned int i; |
16482 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, field, value) |
16483 | { |
16484 | constructor_elt *elt2 = CONSTRUCTOR_ELT (t2, i); |
16485 | if (!c_tree_equal (t1: field, t2: elt2->index) |
16486 | || !c_tree_equal (t1: value, t2: elt2->value)) |
16487 | return false; |
16488 | } |
16489 | } |
16490 | return true; |
16491 | |
16492 | case TREE_LIST: |
16493 | if (!c_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))) |
16494 | return false; |
16495 | if (!c_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2))) |
16496 | return false; |
16497 | return c_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2)); |
16498 | |
16499 | case SAVE_EXPR: |
16500 | return c_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); |
16501 | |
16502 | case CALL_EXPR: |
16503 | { |
16504 | tree arg1, arg2; |
16505 | call_expr_arg_iterator iter1, iter2; |
16506 | if (!c_tree_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2))) |
16507 | return false; |
16508 | for (arg1 = first_call_expr_arg (exp: t1, iter: &iter1), |
16509 | arg2 = first_call_expr_arg (exp: t2, iter: &iter2); |
16510 | arg1 && arg2; |
16511 | arg1 = next_call_expr_arg (iter: &iter1), |
16512 | arg2 = next_call_expr_arg (iter: &iter2)) |
16513 | if (!c_tree_equal (t1: arg1, t2: arg2)) |
16514 | return false; |
16515 | if (arg1 || arg2) |
16516 | return false; |
16517 | return true; |
16518 | } |
16519 | |
16520 | case TARGET_EXPR: |
16521 | { |
16522 | tree o1 = TREE_OPERAND (t1, 0); |
16523 | tree o2 = TREE_OPERAND (t2, 0); |
16524 | |
16525 | /* Special case: if either target is an unallocated VAR_DECL, |
16526 | it means that it's going to be unified with whatever the |
16527 | TARGET_EXPR is really supposed to initialize, so treat it |
16528 | as being equivalent to anything. */ |
16529 | if (VAR_P (o1) && DECL_NAME (o1) == NULL_TREE |
16530 | && !DECL_RTL_SET_P (o1)) |
16531 | /*Nop*/; |
16532 | else if (VAR_P (o2) && DECL_NAME (o2) == NULL_TREE |
16533 | && !DECL_RTL_SET_P (o2)) |
16534 | /*Nop*/; |
16535 | else if (!c_tree_equal (t1: o1, t2: o2)) |
16536 | return false; |
16537 | |
16538 | return c_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); |
16539 | } |
16540 | |
16541 | case COMPONENT_REF: |
16542 | if (TREE_OPERAND (t1, 1) != TREE_OPERAND (t2, 1)) |
16543 | return false; |
16544 | return c_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); |
16545 | |
16546 | case PARM_DECL: |
16547 | case VAR_DECL: |
16548 | case CONST_DECL: |
16549 | case FIELD_DECL: |
16550 | case FUNCTION_DECL: |
16551 | case IDENTIFIER_NODE: |
16552 | case SSA_NAME: |
16553 | return false; |
16554 | |
16555 | case TREE_VEC: |
16556 | { |
16557 | unsigned ix; |
16558 | if (TREE_VEC_LENGTH (t1) != TREE_VEC_LENGTH (t2)) |
16559 | return false; |
16560 | for (ix = TREE_VEC_LENGTH (t1); ix--;) |
16561 | if (!c_tree_equal (TREE_VEC_ELT (t1, ix), |
16562 | TREE_VEC_ELT (t2, ix))) |
16563 | return false; |
16564 | return true; |
16565 | } |
16566 | |
16567 | CASE_CONVERT: |
16568 | if (!comptypes (TREE_TYPE (t1), TREE_TYPE (t2))) |
16569 | return false; |
16570 | break; |
16571 | |
16572 | default: |
16573 | break; |
16574 | } |
16575 | |
16576 | switch (TREE_CODE_CLASS (code1)) |
16577 | { |
16578 | case tcc_unary: |
16579 | case tcc_binary: |
16580 | case tcc_comparison: |
16581 | case tcc_expression: |
16582 | case tcc_vl_exp: |
16583 | case tcc_reference: |
16584 | case tcc_statement: |
16585 | { |
16586 | int i, n = TREE_OPERAND_LENGTH (t1); |
16587 | |
16588 | switch (code1) |
16589 | { |
16590 | case PREINCREMENT_EXPR: |
16591 | case PREDECREMENT_EXPR: |
16592 | case POSTINCREMENT_EXPR: |
16593 | case POSTDECREMENT_EXPR: |
16594 | n = 1; |
16595 | break; |
16596 | case ARRAY_REF: |
16597 | n = 2; |
16598 | break; |
16599 | default: |
16600 | break; |
16601 | } |
16602 | |
16603 | if (TREE_CODE_CLASS (code1) == tcc_vl_exp |
16604 | && n != TREE_OPERAND_LENGTH (t2)) |
16605 | return false; |
16606 | |
16607 | for (i = 0; i < n; ++i) |
16608 | if (!c_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i))) |
16609 | return false; |
16610 | |
16611 | return true; |
16612 | } |
16613 | |
16614 | case tcc_type: |
16615 | return comptypes (type1: t1, type2: t2); |
16616 | default: |
16617 | gcc_unreachable (); |
16618 | } |
16619 | } |
16620 | |
16621 | /* Returns true when the function declaration FNDECL is implicit, |
16622 | introduced as a result of a call to an otherwise undeclared |
16623 | function, and false otherwise. */ |
16624 | |
16625 | bool |
16626 | c_decl_implicit (const_tree fndecl) |
16627 | { |
16628 | return C_DECL_IMPLICIT (fndecl); |
16629 | } |
16630 | |