1 | /* Support for fully folding sub-trees of an expression for C compiler. |
2 | Copyright (C) 1992-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "target.h" |
24 | #include "function.h" |
25 | #include "bitmap.h" |
26 | #include "c-tree.h" |
27 | #include "intl.h" |
28 | #include "gimplify.h" |
29 | |
30 | static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool, |
31 | bool); |
32 | |
33 | /* If DISABLE is true, stop issuing warnings. This is used when |
34 | parsing code that we know will not be executed. This function may |
35 | be called multiple times, and works as a stack. */ |
36 | |
37 | static void |
38 | c_disable_warnings (bool disable) |
39 | { |
40 | if (disable) |
41 | { |
42 | ++c_inhibit_evaluation_warnings; |
43 | fold_defer_overflow_warnings (); |
44 | } |
45 | } |
46 | |
47 | /* If ENABLE is true, reenable issuing warnings. */ |
48 | |
49 | static void |
50 | c_enable_warnings (bool enable) |
51 | { |
52 | if (enable) |
53 | { |
54 | --c_inhibit_evaluation_warnings; |
55 | fold_undefer_and_ignore_overflow_warnings (); |
56 | } |
57 | } |
58 | |
59 | /* Try to fold ARRAY_REF ary[index] if possible and not handled by |
60 | normal fold, return NULL_TREE otherwise. */ |
61 | |
62 | static tree |
63 | c_fold_array_ref (tree type, tree ary, tree index) |
64 | { |
65 | if (TREE_CODE (ary) != STRING_CST |
66 | || TREE_CODE (index) != INTEGER_CST |
67 | || TREE_OVERFLOW (index) |
68 | || TREE_CODE (TREE_TYPE (ary)) != ARRAY_TYPE |
69 | || !tree_fits_uhwi_p (index)) |
70 | return NULL_TREE; |
71 | |
72 | tree elem_type = TREE_TYPE (TREE_TYPE (ary)); |
73 | unsigned elem_nchars = (TYPE_PRECISION (elem_type) |
74 | / TYPE_PRECISION (char_type_node)); |
75 | unsigned len = (unsigned) TREE_STRING_LENGTH (ary) / elem_nchars; |
76 | tree nelts = array_type_nelts (TREE_TYPE (ary)); |
77 | bool dummy1 = true, dummy2 = true; |
78 | nelts = c_fully_fold_internal (expr: nelts, true, &dummy1, &dummy2, false, false); |
79 | unsigned HOST_WIDE_INT i = tree_to_uhwi (index); |
80 | if (!tree_int_cst_le (t1: index, t2: nelts) |
81 | || i >= len |
82 | || i + elem_nchars > len) |
83 | return NULL_TREE; |
84 | |
85 | if (elem_nchars == 1) |
86 | return build_int_cst (type, TREE_STRING_POINTER (ary)[i]); |
87 | |
88 | const unsigned char *ptr |
89 | = ((const unsigned char *)TREE_STRING_POINTER (ary) + i * elem_nchars); |
90 | return native_interpret_expr (type, ptr, elem_nchars); |
91 | } |
92 | |
93 | /* Fully fold EXPR, an expression that was not folded (beyond integer |
94 | constant expressions and null pointer constants) when being built |
95 | up. If IN_INIT, this is in a static initializer and certain |
96 | changes are made to the folding done. Clear *MAYBE_CONST if |
97 | MAYBE_CONST is not NULL and EXPR is definitely not a constant |
98 | expression because it contains an evaluated operator (in C99) or an |
99 | operator outside of sizeof returning an integer constant (in C90) |
100 | not permitted in constant expressions, or because it contains an |
101 | evaluated arithmetic overflow. (*MAYBE_CONST should typically be |
102 | set to true by callers before calling this function.) Return the |
103 | folded expression. Function arguments have already been folded |
104 | before calling this function, as have the contents of SAVE_EXPR, |
105 | TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and |
106 | C_MAYBE_CONST_EXPR. LVAL is true if it should be treated as an |
107 | lvalue. */ |
108 | |
109 | tree |
110 | c_fully_fold (tree expr, bool in_init, bool *maybe_const, bool lval) |
111 | { |
112 | tree ret; |
113 | tree eptype = NULL_TREE; |
114 | bool dummy = true; |
115 | bool maybe_const_itself = true; |
116 | location_t loc = EXPR_LOCATION (expr); |
117 | |
118 | if (!maybe_const) |
119 | maybe_const = &dummy; |
120 | if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR) |
121 | { |
122 | eptype = TREE_TYPE (expr); |
123 | expr = TREE_OPERAND (expr, 0); |
124 | } |
125 | ret = c_fully_fold_internal (expr, in_init, maybe_const, |
126 | &maybe_const_itself, false, lval); |
127 | if (eptype) |
128 | ret = fold_convert_loc (loc, eptype, ret); |
129 | *maybe_const &= maybe_const_itself; |
130 | return ret; |
131 | } |
132 | |
133 | /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for |
134 | c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands |
135 | not permitted, while *MAYBE_CONST_ITSELF is cleared because of |
136 | arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from |
137 | both evaluated and unevaluated subexpressions while |
138 | *MAYBE_CONST_ITSELF is carried from only evaluated |
139 | subexpressions). FOR_INT_CONST indicates if EXPR is an expression |
140 | with integer constant operands, and if any of the operands doesn't |
141 | get folded to an integer constant, don't fold the expression itself. |
142 | LVAL indicates folding of lvalue, where we can't replace it with |
143 | an rvalue. */ |
144 | |
145 | static tree |
146 | c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands, |
147 | bool *maybe_const_itself, bool for_int_const, bool lval) |
148 | { |
149 | tree ret = expr; |
150 | enum tree_code code = TREE_CODE (expr); |
151 | enum tree_code_class kind = TREE_CODE_CLASS (code); |
152 | location_t loc = EXPR_LOCATION (expr); |
153 | tree op0, op1, op2, op3; |
154 | tree orig_op0, orig_op1, orig_op2; |
155 | bool op0_const = true, op1_const = true, op2_const = true; |
156 | bool op0_const_self = true, op1_const_self = true, op2_const_self = true; |
157 | bool nowarning = warning_suppressed_p (expr, OPT_Woverflow); |
158 | bool unused_p; |
159 | bool op0_lval = false; |
160 | source_range old_range; |
161 | |
162 | /* Constants, declarations, statements, errors, and anything else not |
163 | counted as an expression cannot usefully be folded further at this |
164 | point. */ |
165 | if (!IS_EXPR_CODE_CLASS (kind) || kind == tcc_statement) |
166 | { |
167 | /* Except for variables which we can optimize to its initializer. */ |
168 | if (VAR_P (expr) && !lval && (optimize || in_init)) |
169 | { |
170 | if (in_init) |
171 | ret = decl_constant_value_1 (expr, true); |
172 | else |
173 | { |
174 | ret = decl_constant_value (expr); |
175 | if (ret != expr |
176 | && (TYPE_MODE (TREE_TYPE (ret)) == BLKmode |
177 | || TREE_CODE (TREE_TYPE (ret)) == ARRAY_TYPE)) |
178 | return expr; |
179 | } |
180 | /* Avoid unwanted tree sharing between the initializer and current |
181 | function's body where the tree can be modified e.g. by the |
182 | gimplifier. */ |
183 | if (ret != expr && TREE_STATIC (expr)) |
184 | ret = unshare_expr (ret); |
185 | return ret; |
186 | } |
187 | return expr; |
188 | } |
189 | |
190 | if (IS_EXPR_CODE_CLASS (kind)) |
191 | old_range = EXPR_LOCATION_RANGE (expr); |
192 | |
193 | /* Operands of variable-length expressions (function calls) have |
194 | already been folded, as have __builtin_* function calls, and such |
195 | expressions cannot occur in constant expressions. */ |
196 | if (kind == tcc_vl_exp) |
197 | { |
198 | *maybe_const_operands = false; |
199 | ret = fold (expr); |
200 | goto out; |
201 | } |
202 | |
203 | if (code == C_MAYBE_CONST_EXPR) |
204 | { |
205 | tree pre = C_MAYBE_CONST_EXPR_PRE (expr); |
206 | tree inner = C_MAYBE_CONST_EXPR_EXPR (expr); |
207 | if (C_MAYBE_CONST_EXPR_NON_CONST (expr)) |
208 | *maybe_const_operands = false; |
209 | if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr)) |
210 | { |
211 | *maybe_const_itself = false; |
212 | inner = c_fully_fold_internal (expr: inner, in_init, maybe_const_operands, |
213 | maybe_const_itself, for_int_const: true, lval); |
214 | } |
215 | if (pre && !in_init) |
216 | ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner); |
217 | else |
218 | ret = inner; |
219 | goto out; |
220 | } |
221 | |
222 | /* Assignment, increment, decrement, function call and comma |
223 | operators, and statement expressions, cannot occur in constant |
224 | expressions if evaluated / outside of sizeof. (Function calls |
225 | were handled above, though VA_ARG_EXPR is treated like a function |
226 | call here, and statement expressions are handled through |
227 | C_MAYBE_CONST_EXPR to avoid folding inside them.) */ |
228 | switch (code) |
229 | { |
230 | case MODIFY_EXPR: |
231 | case PREDECREMENT_EXPR: |
232 | case PREINCREMENT_EXPR: |
233 | case POSTDECREMENT_EXPR: |
234 | case POSTINCREMENT_EXPR: |
235 | case COMPOUND_EXPR: |
236 | *maybe_const_operands = false; |
237 | break; |
238 | |
239 | case VA_ARG_EXPR: |
240 | case TARGET_EXPR: |
241 | case BIND_EXPR: |
242 | case OBJ_TYPE_REF: |
243 | *maybe_const_operands = false; |
244 | ret = fold (expr); |
245 | goto out; |
246 | |
247 | default: |
248 | break; |
249 | } |
250 | |
251 | /* Fold individual tree codes as appropriate. */ |
252 | switch (code) |
253 | { |
254 | case COMPOUND_LITERAL_EXPR: |
255 | /* Any non-constancy will have been marked in a containing |
256 | C_MAYBE_CONST_EXPR; there is no more folding to do here. */ |
257 | goto out; |
258 | |
259 | case COMPONENT_REF: |
260 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
261 | op1 = TREE_OPERAND (expr, 1); |
262 | op2 = TREE_OPERAND (expr, 2); |
263 | op0 = c_fully_fold_internal (expr: op0, in_init, maybe_const_operands, |
264 | maybe_const_itself, for_int_const, lval); |
265 | STRIP_TYPE_NOPS (op0); |
266 | if (op0 != orig_op0) |
267 | ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2); |
268 | if (ret != expr) |
269 | { |
270 | TREE_READONLY (ret) = TREE_READONLY (expr); |
271 | TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); |
272 | } |
273 | if (!lval) |
274 | ret = fold (ret); |
275 | goto out; |
276 | |
277 | case ARRAY_REF: |
278 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
279 | orig_op1 = op1 = TREE_OPERAND (expr, 1); |
280 | op2 = TREE_OPERAND (expr, 2); |
281 | op3 = TREE_OPERAND (expr, 3); |
282 | op0 = c_fully_fold_internal (expr: op0, in_init, maybe_const_operands, |
283 | maybe_const_itself, for_int_const, lval); |
284 | STRIP_TYPE_NOPS (op0); |
285 | op1 = c_fully_fold_internal (expr: op1, in_init, maybe_const_operands, |
286 | maybe_const_itself, for_int_const, lval: false); |
287 | STRIP_TYPE_NOPS (op1); |
288 | /* Fold "foo"[2] in initializers. */ |
289 | if (!lval && in_init) |
290 | { |
291 | ret = c_fold_array_ref (TREE_TYPE (expr), ary: op0, index: op1); |
292 | if (ret) |
293 | goto out; |
294 | ret = expr; |
295 | } |
296 | if (op0 != orig_op0 || op1 != orig_op1) |
297 | ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3); |
298 | if (ret != expr) |
299 | { |
300 | TREE_READONLY (ret) = TREE_READONLY (expr); |
301 | TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); |
302 | TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); |
303 | } |
304 | if (!lval) |
305 | ret = fold (ret); |
306 | goto out; |
307 | |
308 | case MODIFY_EXPR: |
309 | case PREDECREMENT_EXPR: |
310 | case PREINCREMENT_EXPR: |
311 | case POSTDECREMENT_EXPR: |
312 | case POSTINCREMENT_EXPR: |
313 | op0_lval = true; |
314 | /* FALLTHRU */ |
315 | case COMPOUND_EXPR: |
316 | case PLUS_EXPR: |
317 | case MINUS_EXPR: |
318 | case MULT_EXPR: |
319 | case POINTER_PLUS_EXPR: |
320 | case POINTER_DIFF_EXPR: |
321 | case TRUNC_DIV_EXPR: |
322 | case CEIL_DIV_EXPR: |
323 | case FLOOR_DIV_EXPR: |
324 | case TRUNC_MOD_EXPR: |
325 | case RDIV_EXPR: |
326 | case EXACT_DIV_EXPR: |
327 | case LSHIFT_EXPR: |
328 | case RSHIFT_EXPR: |
329 | case BIT_IOR_EXPR: |
330 | case BIT_XOR_EXPR: |
331 | case BIT_AND_EXPR: |
332 | case LT_EXPR: |
333 | case LE_EXPR: |
334 | case GT_EXPR: |
335 | case GE_EXPR: |
336 | case EQ_EXPR: |
337 | case NE_EXPR: |
338 | case COMPLEX_EXPR: |
339 | case TRUTH_AND_EXPR: |
340 | case TRUTH_OR_EXPR: |
341 | case TRUTH_XOR_EXPR: |
342 | case UNORDERED_EXPR: |
343 | case ORDERED_EXPR: |
344 | case UNLT_EXPR: |
345 | case UNLE_EXPR: |
346 | case UNGT_EXPR: |
347 | case UNGE_EXPR: |
348 | case UNEQ_EXPR: |
349 | case MEM_REF: |
350 | /* Binary operations evaluating both arguments (increment and |
351 | decrement are binary internally in GCC). */ |
352 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
353 | orig_op1 = op1 = TREE_OPERAND (expr, 1); |
354 | op0 = c_fully_fold_internal (expr: op0, in_init, maybe_const_operands, |
355 | maybe_const_itself, for_int_const, |
356 | lval: op0_lval); |
357 | STRIP_TYPE_NOPS (op0); |
358 | /* The RHS of a MODIFY_EXPR was fully folded when building that |
359 | expression for the sake of conversion warnings. */ |
360 | if (code != MODIFY_EXPR) |
361 | op1 = c_fully_fold_internal (expr: op1, in_init, maybe_const_operands, |
362 | maybe_const_itself, for_int_const, lval: false); |
363 | STRIP_TYPE_NOPS (op1); |
364 | |
365 | if (for_int_const && (TREE_CODE (op0) != INTEGER_CST |
366 | || TREE_CODE (op1) != INTEGER_CST)) |
367 | goto out; |
368 | |
369 | if (op0 != orig_op0 || op1 != orig_op1 || in_init) |
370 | ret = in_init |
371 | ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1) |
372 | : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1); |
373 | else |
374 | ret = fold (expr); |
375 | if (TREE_OVERFLOW_P (ret) |
376 | && !TREE_OVERFLOW_P (op0) |
377 | && !(BINARY_CLASS_P (op0) && TREE_OVERFLOW_P (TREE_OPERAND (op0, 1))) |
378 | && !TREE_OVERFLOW_P (op1)) |
379 | overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret, expr); |
380 | if (code == LSHIFT_EXPR |
381 | && TREE_CODE (orig_op0) != INTEGER_CST |
382 | && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE |
383 | || TREE_CODE (TREE_TYPE (orig_op0)) == BITINT_TYPE) |
384 | && TREE_CODE (op0) == INTEGER_CST |
385 | && c_inhibit_evaluation_warnings == 0 |
386 | && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (orig_op0)) |
387 | && tree_int_cst_sgn (op0) < 0) |
388 | warning_at (loc, OPT_Wshift_negative_value, |
389 | "left shift of negative value" ); |
390 | if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR) |
391 | && TREE_CODE (orig_op1) != INTEGER_CST |
392 | && TREE_CODE (op1) == INTEGER_CST |
393 | && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE |
394 | && c_inhibit_evaluation_warnings == 0) |
395 | { |
396 | if (tree_int_cst_sgn (op1) < 0) |
397 | warning_at (loc, OPT_Wshift_count_negative, |
398 | (code == LSHIFT_EXPR |
399 | ? G_("left shift count is negative" ) |
400 | : G_("right shift count is negative" ))); |
401 | else if ((TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE |
402 | || TREE_CODE (TREE_TYPE (orig_op0)) == BITINT_TYPE |
403 | || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE) |
404 | && compare_tree_int (op1, |
405 | TYPE_PRECISION (TREE_TYPE (orig_op0))) |
406 | >= 0) |
407 | warning_at (loc, OPT_Wshift_count_overflow, |
408 | (code == LSHIFT_EXPR |
409 | ? G_("left shift count >= width of type" ) |
410 | : G_("right shift count >= width of type" ))); |
411 | else if (TREE_CODE (TREE_TYPE (orig_op0)) == VECTOR_TYPE |
412 | && compare_tree_int (op1, |
413 | TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0)))) |
414 | >= 0) |
415 | warning_at (loc, OPT_Wshift_count_overflow, |
416 | code == LSHIFT_EXPR |
417 | ? G_("left shift count >= width of vector element" ) |
418 | : G_("right shift count >= width of vector element" )); |
419 | } |
420 | if (code == LSHIFT_EXPR |
421 | /* If either OP0 has been folded to INTEGER_CST... */ |
422 | && ((TREE_CODE (orig_op0) != INTEGER_CST |
423 | && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE |
424 | || TREE_CODE (TREE_TYPE (orig_op0)) == BITINT_TYPE) |
425 | && TREE_CODE (op0) == INTEGER_CST) |
426 | /* ...or if OP1 has been folded to INTEGER_CST... */ |
427 | || (TREE_CODE (orig_op1) != INTEGER_CST |
428 | && (TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE |
429 | || TREE_CODE (TREE_TYPE (orig_op1)) == BITINT_TYPE) |
430 | && TREE_CODE (op1) == INTEGER_CST)) |
431 | && c_inhibit_evaluation_warnings == 0) |
432 | /* ...then maybe we can detect an overflow. */ |
433 | maybe_warn_shift_overflow (loc, op0, op1); |
434 | if ((code == TRUNC_DIV_EXPR |
435 | || code == CEIL_DIV_EXPR |
436 | || code == FLOOR_DIV_EXPR |
437 | || code == EXACT_DIV_EXPR |
438 | || code == TRUNC_MOD_EXPR) |
439 | && TREE_CODE (orig_op1) != INTEGER_CST |
440 | && TREE_CODE (op1) == INTEGER_CST |
441 | && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE |
442 | || TREE_CODE (TREE_TYPE (orig_op0)) == BITINT_TYPE |
443 | || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE) |
444 | && (TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE |
445 | || TREE_CODE (TREE_TYPE (orig_op1)) == BITINT_TYPE)) |
446 | warn_for_div_by_zero (loc, divisor: op1); |
447 | if (code == MEM_REF |
448 | && ret != expr |
449 | && TREE_CODE (ret) == MEM_REF) |
450 | { |
451 | TREE_READONLY (ret) = TREE_READONLY (expr); |
452 | TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); |
453 | TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); |
454 | } |
455 | goto out; |
456 | |
457 | case ADDR_EXPR: |
458 | op0_lval = true; |
459 | goto unary; |
460 | case REALPART_EXPR: |
461 | case IMAGPART_EXPR: |
462 | case VIEW_CONVERT_EXPR: |
463 | op0_lval = lval; |
464 | /* FALLTHRU */ |
465 | case INDIRECT_REF: |
466 | case FIX_TRUNC_EXPR: |
467 | case FLOAT_EXPR: |
468 | CASE_CONVERT: |
469 | case ADDR_SPACE_CONVERT_EXPR: |
470 | case NON_LVALUE_EXPR: |
471 | case NEGATE_EXPR: |
472 | case BIT_NOT_EXPR: |
473 | case TRUTH_NOT_EXPR: |
474 | case CONJ_EXPR: |
475 | case PAREN_EXPR: |
476 | unary: |
477 | /* Unary operations. */ |
478 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
479 | op0 = c_fully_fold_internal (expr: op0, in_init, maybe_const_operands, |
480 | maybe_const_itself, for_int_const, |
481 | lval: op0_lval); |
482 | STRIP_TYPE_NOPS (op0); |
483 | |
484 | if (for_int_const && TREE_CODE (op0) != INTEGER_CST) |
485 | goto out; |
486 | |
487 | /* ??? Cope with user tricks that amount to offsetof. The middle-end is |
488 | not prepared to deal with them if they occur in initializers. */ |
489 | if (op0 != orig_op0 |
490 | && code == ADDR_EXPR |
491 | && (op1 = get_base_address (t: op0)) != NULL_TREE |
492 | && INDIRECT_REF_P (op1) |
493 | && TREE_CONSTANT (TREE_OPERAND (op1, 0))) |
494 | ret = fold_offsetof (op0, TREE_TYPE (expr)); |
495 | else if (op0 != orig_op0 || in_init) |
496 | ret = in_init |
497 | ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0) |
498 | : fold_build1_loc (loc, code, TREE_TYPE (expr), op0); |
499 | else |
500 | ret = fold (expr); |
501 | if (code == INDIRECT_REF |
502 | && ret != expr |
503 | && INDIRECT_REF_P (ret)) |
504 | { |
505 | TREE_READONLY (ret) = TREE_READONLY (expr); |
506 | TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); |
507 | TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); |
508 | } |
509 | switch (code) |
510 | { |
511 | case FIX_TRUNC_EXPR: |
512 | case FLOAT_EXPR: |
513 | CASE_CONVERT: |
514 | /* Don't warn about explicit conversions. We will already |
515 | have warned about suspect implicit conversions. */ |
516 | break; |
517 | |
518 | default: |
519 | if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0)) |
520 | overflow_warning (EXPR_LOCATION (expr), ret, op0); |
521 | break; |
522 | } |
523 | goto out; |
524 | |
525 | case TRUTH_ANDIF_EXPR: |
526 | case TRUTH_ORIF_EXPR: |
527 | /* Binary operations not necessarily evaluating both |
528 | arguments. */ |
529 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
530 | orig_op1 = op1 = TREE_OPERAND (expr, 1); |
531 | op0 = c_fully_fold_internal (expr: op0, in_init, maybe_const_operands: &op0_const, maybe_const_itself: &op0_const_self, |
532 | for_int_const, lval: false); |
533 | STRIP_TYPE_NOPS (op0); |
534 | |
535 | unused_p = (op0 == (code == TRUTH_ANDIF_EXPR |
536 | ? truthvalue_false_node |
537 | : truthvalue_true_node)); |
538 | c_disable_warnings (disable: unused_p); |
539 | op1 = c_fully_fold_internal (expr: op1, in_init, maybe_const_operands: &op1_const, maybe_const_itself: &op1_const_self, |
540 | for_int_const, lval: false); |
541 | STRIP_TYPE_NOPS (op1); |
542 | c_enable_warnings (enable: unused_p); |
543 | |
544 | if (for_int_const |
545 | && (TREE_CODE (op0) != INTEGER_CST |
546 | /* Require OP1 be an INTEGER_CST only if it's evaluated. */ |
547 | || (!unused_p && TREE_CODE (op1) != INTEGER_CST))) |
548 | goto out; |
549 | |
550 | if (op0 != orig_op0 || op1 != orig_op1 || in_init) |
551 | ret = in_init |
552 | ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1) |
553 | : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1); |
554 | else |
555 | ret = fold (expr); |
556 | *maybe_const_operands &= op0_const; |
557 | *maybe_const_itself &= op0_const_self; |
558 | if (!(flag_isoc99 |
559 | && op0_const |
560 | && op0_const_self |
561 | && (code == TRUTH_ANDIF_EXPR |
562 | ? op0 == truthvalue_false_node |
563 | : op0 == truthvalue_true_node))) |
564 | *maybe_const_operands &= op1_const; |
565 | if (!(op0_const |
566 | && op0_const_self |
567 | && (code == TRUTH_ANDIF_EXPR |
568 | ? op0 == truthvalue_false_node |
569 | : op0 == truthvalue_true_node))) |
570 | *maybe_const_itself &= op1_const_self; |
571 | goto out; |
572 | |
573 | case COND_EXPR: |
574 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
575 | orig_op1 = op1 = TREE_OPERAND (expr, 1); |
576 | orig_op2 = op2 = TREE_OPERAND (expr, 2); |
577 | op0 = c_fully_fold_internal (expr: op0, in_init, maybe_const_operands: &op0_const, maybe_const_itself: &op0_const_self, |
578 | for_int_const, lval: false); |
579 | |
580 | STRIP_TYPE_NOPS (op0); |
581 | c_disable_warnings (disable: op0 == truthvalue_false_node); |
582 | op1 = c_fully_fold_internal (expr: op1, in_init, maybe_const_operands: &op1_const, maybe_const_itself: &op1_const_self, |
583 | for_int_const, lval: false); |
584 | STRIP_TYPE_NOPS (op1); |
585 | c_enable_warnings (enable: op0 == truthvalue_false_node); |
586 | |
587 | c_disable_warnings (disable: op0 == truthvalue_true_node); |
588 | op2 = c_fully_fold_internal (expr: op2, in_init, maybe_const_operands: &op2_const, maybe_const_itself: &op2_const_self, |
589 | for_int_const, lval: false); |
590 | STRIP_TYPE_NOPS (op2); |
591 | c_enable_warnings (enable: op0 == truthvalue_true_node); |
592 | |
593 | if (for_int_const |
594 | && (TREE_CODE (op0) != INTEGER_CST |
595 | /* Only the evaluated operand must be an INTEGER_CST. */ |
596 | || (op0 == truthvalue_true_node |
597 | ? TREE_CODE (op1) != INTEGER_CST |
598 | : TREE_CODE (op2) != INTEGER_CST))) |
599 | goto out; |
600 | |
601 | if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2) |
602 | ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2); |
603 | else |
604 | ret = fold (expr); |
605 | *maybe_const_operands &= op0_const; |
606 | *maybe_const_itself &= op0_const_self; |
607 | if (!(flag_isoc99 |
608 | && op0_const |
609 | && op0_const_self |
610 | && op0 == truthvalue_false_node)) |
611 | *maybe_const_operands &= op1_const; |
612 | if (!(op0_const |
613 | && op0_const_self |
614 | && op0 == truthvalue_false_node)) |
615 | *maybe_const_itself &= op1_const_self; |
616 | if (!(flag_isoc99 |
617 | && op0_const |
618 | && op0_const_self |
619 | && op0 == truthvalue_true_node)) |
620 | *maybe_const_operands &= op2_const; |
621 | if (!(op0_const |
622 | && op0_const_self |
623 | && op0 == truthvalue_true_node)) |
624 | *maybe_const_itself &= op2_const_self; |
625 | goto out; |
626 | |
627 | case VEC_COND_EXPR: |
628 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
629 | orig_op1 = op1 = TREE_OPERAND (expr, 1); |
630 | orig_op2 = op2 = TREE_OPERAND (expr, 2); |
631 | op0 = c_fully_fold_internal (expr: op0, in_init, maybe_const_operands, |
632 | maybe_const_itself, for_int_const, lval: false); |
633 | STRIP_TYPE_NOPS (op0); |
634 | op1 = c_fully_fold_internal (expr: op1, in_init, maybe_const_operands, |
635 | maybe_const_itself, for_int_const, lval: false); |
636 | STRIP_TYPE_NOPS (op1); |
637 | op2 = c_fully_fold_internal (expr: op2, in_init, maybe_const_operands, |
638 | maybe_const_itself, for_int_const, lval: false); |
639 | STRIP_TYPE_NOPS (op2); |
640 | |
641 | if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2) |
642 | ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2); |
643 | else |
644 | ret = fold (expr); |
645 | goto out; |
646 | |
647 | case EXCESS_PRECISION_EXPR: |
648 | /* Each case where an operand with excess precision may be |
649 | encountered must remove the EXCESS_PRECISION_EXPR around |
650 | inner operands and possibly put one around the whole |
651 | expression or possibly convert to the semantic type (which |
652 | c_fully_fold does); we cannot tell at this stage which is |
653 | appropriate in any particular case. */ |
654 | gcc_unreachable (); |
655 | |
656 | case SAVE_EXPR: |
657 | /* Make sure to fold the contents of a SAVE_EXPR exactly once. */ |
658 | op0 = TREE_OPERAND (expr, 0); |
659 | if (!SAVE_EXPR_FOLDED_P (expr)) |
660 | { |
661 | op0 = c_fully_fold_internal (expr: op0, in_init, maybe_const_operands, |
662 | maybe_const_itself, for_int_const, |
663 | lval: false); |
664 | TREE_OPERAND (expr, 0) = op0; |
665 | SAVE_EXPR_FOLDED_P (expr) = true; |
666 | } |
667 | /* Return the SAVE_EXPR operand if it is invariant. */ |
668 | if (tree_invariant_p (op0)) |
669 | ret = op0; |
670 | goto out; |
671 | |
672 | default: |
673 | /* Various codes may appear through folding built-in functions |
674 | and their arguments. */ |
675 | goto out; |
676 | } |
677 | |
678 | out: |
679 | /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks |
680 | have been done by this point, so remove them again. */ |
681 | nowarning |= warning_suppressed_p (ret, OPT_Woverflow); |
682 | STRIP_TYPE_NOPS (ret); |
683 | if (nowarning && !warning_suppressed_p (ret, OPT_Woverflow)) |
684 | { |
685 | if (!CAN_HAVE_LOCATION_P (ret)) |
686 | ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); |
687 | suppress_warning (ret, OPT_Woverflow); |
688 | } |
689 | if (ret != expr) |
690 | { |
691 | protected_set_expr_location (ret, loc); |
692 | if (IS_EXPR_CODE_CLASS (kind)) |
693 | set_source_range (expr: ret, start: old_range.m_start, finish: old_range.m_finish); |
694 | } |
695 | return ret; |
696 | } |
697 | |
698 | /* Fold X for consideration by one of the warning functions when checking |
699 | whether an expression has a constant value. */ |
700 | |
701 | tree |
702 | fold_for_warn (tree x) |
703 | { |
704 | /* The C front-end has already folded X appropriately. */ |
705 | return x; |
706 | } |
707 | |