1 | /* Gimple decl, type, and expression support functions. |
2 | |
3 | Copyright (C) 2007-2023 Free Software Foundation, Inc. |
4 | Contributed by Aldy Hernandez <aldyh@redhat.com> |
5 | |
6 | This file is part of GCC. |
7 | |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free |
10 | Software Foundation; either version 3, or (at your option) any later |
11 | version. |
12 | |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
16 | for more details. |
17 | |
18 | You should have received a copy of the GNU General Public License |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ |
21 | |
22 | #include "config.h" |
23 | #include "system.h" |
24 | #include "coretypes.h" |
25 | #include "backend.h" |
26 | #include "tree.h" |
27 | #include "gimple.h" |
28 | #include "stringpool.h" |
29 | #include "gimple-ssa.h" |
30 | #include "fold-const.h" |
31 | #include "tree-eh.h" |
32 | #include "gimplify.h" |
33 | #include "stor-layout.h" |
34 | #include "demangle.h" |
35 | #include "hash-set.h" |
36 | #include "rtl.h" |
37 | #include "tree-pass.h" |
38 | #include "stringpool.h" |
39 | #include "attribs.h" |
40 | #include "target.h" |
41 | |
42 | /* ----- Type related ----- */ |
43 | |
44 | /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a |
45 | useless type conversion, otherwise return false. |
46 | |
47 | This function implicitly defines the middle-end type system. With |
48 | the notion of 'a < b' meaning that useless_type_conversion_p (a, b) |
49 | holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds, |
50 | the following invariants shall be fulfilled: |
51 | |
52 | 1) useless_type_conversion_p is transitive. |
53 | If a < b and b < c then a < c. |
54 | |
55 | 2) useless_type_conversion_p is not symmetric. |
56 | From a < b does not follow a > b. |
57 | |
58 | 3) Types define the available set of operations applicable to values. |
59 | A type conversion is useless if the operations for the target type |
60 | is a subset of the operations for the source type. For example |
61 | casts to void* are useless, casts from void* are not (void* can't |
62 | be dereferenced or offsetted, but copied, hence its set of operations |
63 | is a strict subset of that of all other data pointer types). Casts |
64 | to const T* are useless (can't be written to), casts from const T* |
65 | to T* are not. */ |
66 | |
67 | bool |
68 | useless_type_conversion_p (tree outer_type, tree inner_type) |
69 | { |
70 | /* Do the following before stripping toplevel qualifiers. */ |
71 | if (POINTER_TYPE_P (inner_type) |
72 | && POINTER_TYPE_P (outer_type)) |
73 | { |
74 | /* Do not lose casts between pointers to different address spaces. */ |
75 | if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) |
76 | != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))) |
77 | return false; |
78 | /* Do not lose casts to function pointer types. */ |
79 | if (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (outer_type)) |
80 | && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (inner_type))) |
81 | return false; |
82 | } |
83 | |
84 | /* From now on qualifiers on value types do not matter. */ |
85 | inner_type = TYPE_MAIN_VARIANT (inner_type); |
86 | outer_type = TYPE_MAIN_VARIANT (outer_type); |
87 | |
88 | if (inner_type == outer_type) |
89 | return true; |
90 | |
91 | /* Changes in machine mode are never useless conversions because the RTL |
92 | middle-end expects explicit conversions between modes. */ |
93 | if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)) |
94 | return false; |
95 | |
96 | /* If both the inner and outer types are integral types, then the |
97 | conversion is not necessary if they have the same mode and |
98 | signedness and precision, and both or neither are boolean. */ |
99 | if (INTEGRAL_TYPE_P (inner_type) |
100 | && INTEGRAL_TYPE_P (outer_type)) |
101 | { |
102 | /* Preserve changes in signedness or precision. */ |
103 | if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type) |
104 | || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) |
105 | return false; |
106 | |
107 | /* Preserve conversions to/from BOOLEAN_TYPE if types are not |
108 | of precision one. */ |
109 | if (((TREE_CODE (inner_type) == BOOLEAN_TYPE) |
110 | != (TREE_CODE (outer_type) == BOOLEAN_TYPE)) |
111 | && TYPE_PRECISION (outer_type) != 1) |
112 | return false; |
113 | |
114 | /* Preserve conversions to/from BITINT_TYPE. While we don't |
115 | need to care that much about such conversions within a function's |
116 | body, we need to prevent changing BITINT_TYPE to INTEGER_TYPE |
117 | of the same precision or vice versa when passed to functions, |
118 | especially for varargs. */ |
119 | if ((TREE_CODE (inner_type) == BITINT_TYPE) |
120 | != (TREE_CODE (outer_type) == BITINT_TYPE)) |
121 | return false; |
122 | |
123 | /* We don't need to preserve changes in the types minimum or |
124 | maximum value in general as these do not generate code |
125 | unless the types precisions are different. */ |
126 | return true; |
127 | } |
128 | |
129 | /* Scalar floating point types with the same mode are compatible. */ |
130 | else if (SCALAR_FLOAT_TYPE_P (inner_type) |
131 | && SCALAR_FLOAT_TYPE_P (outer_type)) |
132 | return true; |
133 | |
134 | /* Fixed point types with the same mode are compatible. */ |
135 | else if (FIXED_POINT_TYPE_P (inner_type) |
136 | && FIXED_POINT_TYPE_P (outer_type)) |
137 | return TYPE_SATURATING (inner_type) == TYPE_SATURATING (outer_type); |
138 | |
139 | /* We need to take special care recursing to pointed-to types. */ |
140 | else if (POINTER_TYPE_P (inner_type) |
141 | && POINTER_TYPE_P (outer_type)) |
142 | { |
143 | /* We do not care for const qualification of the pointed-to types |
144 | as const qualification has no semantic value to the middle-end. */ |
145 | |
146 | /* Otherwise pointers/references are equivalent. */ |
147 | return true; |
148 | } |
149 | |
150 | /* Recurse for complex types. */ |
151 | else if (TREE_CODE (inner_type) == COMPLEX_TYPE |
152 | && TREE_CODE (outer_type) == COMPLEX_TYPE) |
153 | return useless_type_conversion_p (TREE_TYPE (outer_type), |
154 | TREE_TYPE (inner_type)); |
155 | |
156 | /* Recurse for vector types with the same number of subparts. */ |
157 | else if (VECTOR_TYPE_P (inner_type) |
158 | && VECTOR_TYPE_P (outer_type)) |
159 | return (known_eq (TYPE_VECTOR_SUBPARTS (inner_type), |
160 | TYPE_VECTOR_SUBPARTS (outer_type)) |
161 | && useless_type_conversion_p (TREE_TYPE (outer_type), |
162 | TREE_TYPE (inner_type)) |
163 | && targetm.compatible_vector_types_p (inner_type, outer_type)); |
164 | |
165 | else if (TREE_CODE (inner_type) == ARRAY_TYPE |
166 | && TREE_CODE (outer_type) == ARRAY_TYPE) |
167 | { |
168 | /* Preserve various attributes. */ |
169 | if (TYPE_REVERSE_STORAGE_ORDER (inner_type) |
170 | != TYPE_REVERSE_STORAGE_ORDER (outer_type)) |
171 | return false; |
172 | if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type)) |
173 | return false; |
174 | |
175 | /* Conversions from array types with unknown extent to |
176 | array types with known extent are not useless. */ |
177 | if (!TYPE_DOMAIN (inner_type) && TYPE_DOMAIN (outer_type)) |
178 | return false; |
179 | |
180 | /* Nor are conversions from array types with non-constant size to |
181 | array types with constant size or to different size. */ |
182 | if (TYPE_SIZE (outer_type) |
183 | && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST |
184 | && (!TYPE_SIZE (inner_type) |
185 | || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST |
186 | || !tree_int_cst_equal (TYPE_SIZE (outer_type), |
187 | TYPE_SIZE (inner_type)))) |
188 | return false; |
189 | |
190 | /* Check conversions between arrays with partially known extents. |
191 | If the array min/max values are constant they have to match. |
192 | Otherwise allow conversions to unknown and variable extents. |
193 | In particular this declares conversions that may change the |
194 | mode to BLKmode as useless. */ |
195 | if (TYPE_DOMAIN (inner_type) |
196 | && TYPE_DOMAIN (outer_type) |
197 | && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type)) |
198 | { |
199 | tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type)); |
200 | tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type)); |
201 | tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type)); |
202 | tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type)); |
203 | |
204 | /* After gimplification a variable min/max value carries no |
205 | additional information compared to a NULL value. All that |
206 | matters has been lowered to be part of the IL. */ |
207 | if (inner_min && TREE_CODE (inner_min) != INTEGER_CST) |
208 | inner_min = NULL_TREE; |
209 | if (outer_min && TREE_CODE (outer_min) != INTEGER_CST) |
210 | outer_min = NULL_TREE; |
211 | if (inner_max && TREE_CODE (inner_max) != INTEGER_CST) |
212 | inner_max = NULL_TREE; |
213 | if (outer_max && TREE_CODE (outer_max) != INTEGER_CST) |
214 | outer_max = NULL_TREE; |
215 | |
216 | /* Conversions NULL / variable <- cst are useless, but not |
217 | the other way around. */ |
218 | if (outer_min |
219 | && (!inner_min |
220 | || !tree_int_cst_equal (inner_min, outer_min))) |
221 | return false; |
222 | if (outer_max |
223 | && (!inner_max |
224 | || !tree_int_cst_equal (inner_max, outer_max))) |
225 | return false; |
226 | } |
227 | |
228 | /* Recurse on the element check. */ |
229 | return useless_type_conversion_p (TREE_TYPE (outer_type), |
230 | TREE_TYPE (inner_type)); |
231 | } |
232 | |
233 | else if (FUNC_OR_METHOD_TYPE_P (inner_type) |
234 | && TREE_CODE (inner_type) == TREE_CODE (outer_type)) |
235 | { |
236 | tree outer_parm, inner_parm; |
237 | |
238 | /* If the return types are not compatible bail out. */ |
239 | if (!useless_type_conversion_p (TREE_TYPE (outer_type), |
240 | TREE_TYPE (inner_type))) |
241 | return false; |
242 | |
243 | /* Method types should belong to a compatible base class. */ |
244 | if (TREE_CODE (inner_type) == METHOD_TYPE |
245 | && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type), |
246 | TYPE_METHOD_BASETYPE (inner_type))) |
247 | return false; |
248 | |
249 | /* A conversion to an unprototyped argument list is ok. */ |
250 | if (!prototype_p (outer_type)) |
251 | return true; |
252 | |
253 | /* If the unqualified argument types are compatible the conversion |
254 | is useless. */ |
255 | if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type)) |
256 | return true; |
257 | |
258 | for (outer_parm = TYPE_ARG_TYPES (outer_type), |
259 | inner_parm = TYPE_ARG_TYPES (inner_type); |
260 | outer_parm && inner_parm; |
261 | outer_parm = TREE_CHAIN (outer_parm), |
262 | inner_parm = TREE_CHAIN (inner_parm)) |
263 | if (!useless_type_conversion_p |
264 | (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)), |
265 | TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm)))) |
266 | return false; |
267 | |
268 | /* If there is a mismatch in the number of arguments the functions |
269 | are not compatible. */ |
270 | if (outer_parm || inner_parm) |
271 | return false; |
272 | |
273 | /* Defer to the target if necessary. */ |
274 | if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type)) |
275 | return comp_type_attributes (outer_type, inner_type) != 0; |
276 | |
277 | return true; |
278 | } |
279 | |
280 | /* For aggregates we rely on TYPE_CANONICAL exclusively and require |
281 | explicit conversions for types involving to be structurally |
282 | compared types. */ |
283 | else if (AGGREGATE_TYPE_P (inner_type) |
284 | && TREE_CODE (inner_type) == TREE_CODE (outer_type)) |
285 | return TYPE_CANONICAL (inner_type) |
286 | && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type); |
287 | |
288 | else if (TREE_CODE (inner_type) == OFFSET_TYPE |
289 | && TREE_CODE (outer_type) == OFFSET_TYPE) |
290 | return useless_type_conversion_p (TREE_TYPE (outer_type), |
291 | TREE_TYPE (inner_type)) |
292 | && useless_type_conversion_p |
293 | (TYPE_OFFSET_BASETYPE (outer_type), |
294 | TYPE_OFFSET_BASETYPE (inner_type)); |
295 | |
296 | return false; |
297 | } |
298 | |
299 | |
300 | /* ----- Decl related ----- */ |
301 | |
302 | /* Set sequence SEQ to be the GIMPLE body for function FN. */ |
303 | |
304 | void |
305 | gimple_set_body (tree fndecl, gimple_seq seq) |
306 | { |
307 | struct function *fn = DECL_STRUCT_FUNCTION (fndecl); |
308 | if (fn == NULL) |
309 | { |
310 | /* If FNDECL still does not have a function structure associated |
311 | with it, then it does not make sense for it to receive a |
312 | GIMPLE body. */ |
313 | gcc_assert (seq == NULL); |
314 | } |
315 | else |
316 | fn->gimple_body = seq; |
317 | } |
318 | |
319 | |
320 | /* Return the body of GIMPLE statements for function FN. After the |
321 | CFG pass, the function body doesn't exist anymore because it has |
322 | been split up into basic blocks. In this case, it returns |
323 | NULL. */ |
324 | |
325 | gimple_seq |
326 | gimple_body (tree fndecl) |
327 | { |
328 | struct function *fn = DECL_STRUCT_FUNCTION (fndecl); |
329 | return fn ? fn->gimple_body : NULL; |
330 | } |
331 | |
332 | /* Return true when FNDECL has Gimple body either in unlowered |
333 | or CFG form. */ |
334 | bool |
335 | gimple_has_body_p (tree fndecl) |
336 | { |
337 | struct function *fn = DECL_STRUCT_FUNCTION (fndecl); |
338 | return (gimple_body (fndecl) || (fn && fn->cfg && !(fn->curr_properties & PROP_rtl))); |
339 | } |
340 | |
341 | /* Return a printable name for symbol DECL. */ |
342 | |
343 | const char * |
344 | gimple_decl_printable_name (tree decl, int verbosity) |
345 | { |
346 | if (!DECL_NAME (decl)) |
347 | return NULL; |
348 | |
349 | if (HAS_DECL_ASSEMBLER_NAME_P (decl) && DECL_ASSEMBLER_NAME_SET_P (decl)) |
350 | { |
351 | int dmgl_opts = DMGL_NO_OPTS; |
352 | |
353 | if (verbosity >= 2) |
354 | { |
355 | dmgl_opts = DMGL_VERBOSE |
356 | | DMGL_ANSI |
357 | | DMGL_GNU_V3 |
358 | | DMGL_RET_POSTFIX; |
359 | if (TREE_CODE (decl) == FUNCTION_DECL) |
360 | dmgl_opts |= DMGL_PARAMS; |
361 | } |
362 | |
363 | const char *mangled_str |
364 | = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl)); |
365 | const char *str = cplus_demangle_v3 (mangled: mangled_str, options: dmgl_opts); |
366 | return str ? str : mangled_str; |
367 | } |
368 | |
369 | return IDENTIFIER_POINTER (DECL_NAME (decl)); |
370 | } |
371 | |
372 | |
373 | /* Create a new VAR_DECL and copy information from VAR to it. */ |
374 | |
375 | tree |
376 | copy_var_decl (tree var, tree name, tree type) |
377 | { |
378 | tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type); |
379 | |
380 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var); |
381 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var); |
382 | DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (var); |
383 | DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var); |
384 | DECL_IGNORED_P (copy) = DECL_IGNORED_P (var); |
385 | DECL_CONTEXT (copy) = DECL_CONTEXT (var); |
386 | TREE_USED (copy) = 1; |
387 | DECL_SEEN_IN_BIND_EXPR_P (copy) = 1; |
388 | DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var); |
389 | if (DECL_USER_ALIGN (var)) |
390 | { |
391 | SET_DECL_ALIGN (copy, DECL_ALIGN (var)); |
392 | DECL_USER_ALIGN (copy) = 1; |
393 | } |
394 | |
395 | copy_warning (copy, var); |
396 | return copy; |
397 | } |
398 | |
399 | /* Strip off a legitimate source ending from the input string NAME of |
400 | length LEN. Rather than having to know the names used by all of |
401 | our front ends, we strip off an ending of a period followed by |
402 | up to four characters. (like ".cpp".) */ |
403 | |
404 | static inline void |
405 | remove_suffix (char *name, int len) |
406 | { |
407 | int i; |
408 | |
409 | for (i = 2; i < 7 && len > i; i++) |
410 | { |
411 | if (name[len - i] == '.') |
412 | { |
413 | name[len - i] = '\0'; |
414 | break; |
415 | } |
416 | } |
417 | } |
418 | |
419 | /* Create a new temporary name with PREFIX. Return an identifier. */ |
420 | |
421 | static GTY(()) unsigned int tmp_var_id_num; |
422 | |
423 | tree |
424 | create_tmp_var_name (const char *prefix) |
425 | { |
426 | char *tmp_name; |
427 | |
428 | if (prefix) |
429 | { |
430 | char *preftmp = ASTRDUP (prefix); |
431 | |
432 | remove_suffix (name: preftmp, len: strlen (s: preftmp)); |
433 | clean_symbol_name (preftmp); |
434 | |
435 | prefix = preftmp; |
436 | } |
437 | |
438 | ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T" , tmp_var_id_num++); |
439 | return get_identifier (tmp_name); |
440 | } |
441 | |
442 | /* Create a new temporary variable declaration of type TYPE. |
443 | Do NOT push it into the current binding. */ |
444 | |
445 | tree |
446 | create_tmp_var_raw (tree type, const char *prefix) |
447 | { |
448 | tree tmp_var; |
449 | |
450 | tmp_var = build_decl (input_location, |
451 | VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL, |
452 | type); |
453 | |
454 | /* The variable was declared by the compiler. */ |
455 | DECL_ARTIFICIAL (tmp_var) = 1; |
456 | /* And we don't want debug info for it. */ |
457 | DECL_IGNORED_P (tmp_var) = 1; |
458 | /* And we don't want even the fancy names of those printed in |
459 | -fdump-final-insns= dumps. */ |
460 | DECL_NAMELESS (tmp_var) = 1; |
461 | |
462 | /* Make the variable writable. */ |
463 | TREE_READONLY (tmp_var) = 0; |
464 | |
465 | DECL_EXTERNAL (tmp_var) = 0; |
466 | TREE_STATIC (tmp_var) = 0; |
467 | TREE_USED (tmp_var) = 1; |
468 | |
469 | return tmp_var; |
470 | } |
471 | |
472 | /* Create a new temporary variable declaration of type TYPE. DO push the |
473 | variable into the current binding. Further, assume that this is called |
474 | only from gimplification or optimization, at which point the creation of |
475 | certain types are bugs. */ |
476 | |
477 | tree |
478 | create_tmp_var (tree type, const char *prefix) |
479 | { |
480 | tree tmp_var; |
481 | |
482 | /* We don't allow types that are addressable (meaning we can't make copies), |
483 | or incomplete. We also used to reject every variable size objects here, |
484 | but now support those for which a constant upper bound can be obtained. |
485 | The processing for variable sizes is performed in gimple_add_tmp_var, |
486 | point at which it really matters and possibly reached via paths not going |
487 | through this function, e.g. after direct calls to create_tmp_var_raw. */ |
488 | gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); |
489 | |
490 | tmp_var = create_tmp_var_raw (type, prefix); |
491 | gimple_add_tmp_var (tmp_var); |
492 | return tmp_var; |
493 | } |
494 | |
495 | /* Create a new temporary variable declaration of type TYPE by calling |
496 | create_tmp_var and if TYPE is a vector or a complex number, mark the new |
497 | temporary as gimple register. */ |
498 | |
499 | tree |
500 | create_tmp_reg (tree type, const char *prefix) |
501 | { |
502 | return create_tmp_var (type, prefix); |
503 | } |
504 | |
505 | /* Create a new temporary variable declaration of type TYPE by calling |
506 | create_tmp_var and if TYPE is a vector or a complex number, mark the new |
507 | temporary as gimple register. */ |
508 | |
509 | tree |
510 | create_tmp_reg_fn (struct function *fn, tree type, const char *prefix) |
511 | { |
512 | tree tmp; |
513 | |
514 | tmp = create_tmp_var_raw (type, prefix); |
515 | gimple_add_tmp_var_fn (fn, tmp); |
516 | |
517 | return tmp; |
518 | } |
519 | |
520 | |
521 | /* ----- Expression related ----- */ |
522 | |
523 | /* Extract the operands and code for expression EXPR into *SUBCODE_P, |
524 | *OP1_P, *OP2_P and *OP3_P respectively. */ |
525 | |
526 | void |
527 | (tree expr, enum tree_code *subcode_p, tree *op1_p, |
528 | tree *op2_p, tree *op3_p) |
529 | { |
530 | *subcode_p = TREE_CODE (expr); |
531 | switch (get_gimple_rhs_class (code: *subcode_p)) |
532 | { |
533 | case GIMPLE_TERNARY_RHS: |
534 | { |
535 | *op1_p = TREE_OPERAND (expr, 0); |
536 | *op2_p = TREE_OPERAND (expr, 1); |
537 | *op3_p = TREE_OPERAND (expr, 2); |
538 | break; |
539 | } |
540 | case GIMPLE_BINARY_RHS: |
541 | { |
542 | *op1_p = TREE_OPERAND (expr, 0); |
543 | *op2_p = TREE_OPERAND (expr, 1); |
544 | *op3_p = NULL_TREE; |
545 | break; |
546 | } |
547 | case GIMPLE_UNARY_RHS: |
548 | { |
549 | *op1_p = TREE_OPERAND (expr, 0); |
550 | *op2_p = NULL_TREE; |
551 | *op3_p = NULL_TREE; |
552 | break; |
553 | } |
554 | case GIMPLE_SINGLE_RHS: |
555 | { |
556 | *op1_p = expr; |
557 | *op2_p = NULL_TREE; |
558 | *op3_p = NULL_TREE; |
559 | break; |
560 | } |
561 | default: |
562 | gcc_unreachable (); |
563 | } |
564 | } |
565 | |
566 | /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */ |
567 | |
568 | void |
569 | gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p, |
570 | tree *lhs_p, tree *rhs_p) |
571 | { |
572 | gcc_assert (COMPARISON_CLASS_P (cond) |
573 | || TREE_CODE (cond) == TRUTH_NOT_EXPR |
574 | || is_gimple_min_invariant (cond) |
575 | || SSA_VAR_P (cond)); |
576 | gcc_checking_assert (!tree_could_throw_p (cond)); |
577 | |
578 | extract_ops_from_tree (expr: cond, code: code_p, op0: lhs_p, op1: rhs_p); |
579 | |
580 | /* Canonicalize conditionals of the form 'if (!VAL)'. */ |
581 | if (*code_p == TRUTH_NOT_EXPR) |
582 | { |
583 | *code_p = EQ_EXPR; |
584 | gcc_assert (*lhs_p && *rhs_p == NULL_TREE); |
585 | *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p)); |
586 | } |
587 | /* Canonicalize conditionals of the form 'if (VAL)' */ |
588 | else if (TREE_CODE_CLASS (*code_p) != tcc_comparison) |
589 | { |
590 | *code_p = NE_EXPR; |
591 | gcc_assert (*lhs_p && *rhs_p == NULL_TREE); |
592 | *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p)); |
593 | } |
594 | } |
595 | |
596 | /* Return true if T is a valid LHS for a GIMPLE assignment expression. */ |
597 | |
598 | bool |
599 | is_gimple_lvalue (tree t) |
600 | { |
601 | return (is_gimple_addressable (t) |
602 | || TREE_CODE (t) == WITH_SIZE_EXPR |
603 | /* These are complex lvalues, but don't have addresses, so they |
604 | go here. */ |
605 | || TREE_CODE (t) == BIT_FIELD_REF); |
606 | } |
607 | |
608 | /* Helper for is_gimple_condexpr and is_gimple_condexpr_for_cond. */ |
609 | |
610 | static bool |
611 | is_gimple_condexpr_1 (tree t, bool allow_traps, bool allow_cplx) |
612 | { |
613 | tree op0; |
614 | return (is_gimple_val (t) |
615 | || (COMPARISON_CLASS_P (t) |
616 | && (allow_traps || !tree_could_throw_p (t)) |
617 | && ((op0 = TREE_OPERAND (t, 0)), true) |
618 | && (allow_cplx || TREE_CODE (TREE_TYPE (op0)) != COMPLEX_TYPE) |
619 | && is_gimple_val (op0) |
620 | && is_gimple_val (TREE_OPERAND (t, 1)))); |
621 | } |
622 | |
623 | /* Like is_gimple_condexpr, but does not allow T to trap. */ |
624 | |
625 | bool |
626 | is_gimple_condexpr_for_cond (tree t) |
627 | { |
628 | return is_gimple_condexpr_1 (t, allow_traps: false, allow_cplx: true); |
629 | } |
630 | |
631 | /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns |
632 | a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if |
633 | we failed to create one. */ |
634 | |
635 | tree |
636 | canonicalize_cond_expr_cond (tree t) |
637 | { |
638 | /* Strip conversions around boolean operations. */ |
639 | if (CONVERT_EXPR_P (t) |
640 | && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))) |
641 | || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) |
642 | == BOOLEAN_TYPE)) |
643 | t = TREE_OPERAND (t, 0); |
644 | |
645 | /* For !x use x == 0. */ |
646 | if (TREE_CODE (t) == TRUTH_NOT_EXPR) |
647 | { |
648 | tree top0 = TREE_OPERAND (t, 0); |
649 | t = build2 (EQ_EXPR, TREE_TYPE (t), |
650 | top0, build_int_cst (TREE_TYPE (top0), 0)); |
651 | } |
652 | /* For cmp ? 1 : 0 use cmp. */ |
653 | else if (TREE_CODE (t) == COND_EXPR |
654 | && COMPARISON_CLASS_P (TREE_OPERAND (t, 0)) |
655 | && integer_onep (TREE_OPERAND (t, 1)) |
656 | && integer_zerop (TREE_OPERAND (t, 2))) |
657 | { |
658 | tree top0 = TREE_OPERAND (t, 0); |
659 | t = build2 (TREE_CODE (top0), TREE_TYPE (t), |
660 | TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1)); |
661 | } |
662 | /* For x ^ y use x != y. */ |
663 | else if (TREE_CODE (t) == BIT_XOR_EXPR) |
664 | t = build2 (NE_EXPR, TREE_TYPE (t), |
665 | TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)); |
666 | |
667 | /* We don't know where this will be used so allow both traps and |
668 | _Complex. The caller is responsible for more precise checking. */ |
669 | if (is_gimple_condexpr_1 (t, allow_traps: true, allow_cplx: true)) |
670 | return t; |
671 | |
672 | return NULL_TREE; |
673 | } |
674 | |
675 | /* Return true if T is a gimple address. */ |
676 | |
677 | bool |
678 | is_gimple_address (const_tree t) |
679 | { |
680 | tree op; |
681 | |
682 | if (TREE_CODE (t) != ADDR_EXPR) |
683 | return false; |
684 | |
685 | op = TREE_OPERAND (t, 0); |
686 | while (handled_component_p (t: op)) |
687 | { |
688 | if ((TREE_CODE (op) == ARRAY_REF |
689 | || TREE_CODE (op) == ARRAY_RANGE_REF) |
690 | && !is_gimple_val (TREE_OPERAND (op, 1))) |
691 | return false; |
692 | |
693 | op = TREE_OPERAND (op, 0); |
694 | } |
695 | |
696 | if (CONSTANT_CLASS_P (op) |
697 | || TREE_CODE (op) == TARGET_MEM_REF |
698 | || TREE_CODE (op) == MEM_REF) |
699 | return true; |
700 | |
701 | switch (TREE_CODE (op)) |
702 | { |
703 | case PARM_DECL: |
704 | case RESULT_DECL: |
705 | case LABEL_DECL: |
706 | case FUNCTION_DECL: |
707 | case VAR_DECL: |
708 | case CONST_DECL: |
709 | return true; |
710 | |
711 | default: |
712 | return false; |
713 | } |
714 | } |
715 | |
716 | /* Return true if T is a gimple invariant address. */ |
717 | |
718 | bool |
719 | is_gimple_invariant_address (const_tree t) |
720 | { |
721 | const_tree op; |
722 | |
723 | if (TREE_CODE (t) != ADDR_EXPR) |
724 | return false; |
725 | |
726 | op = strip_invariant_refs (TREE_OPERAND (t, 0)); |
727 | if (!op) |
728 | return false; |
729 | |
730 | if (TREE_CODE (op) == MEM_REF) |
731 | { |
732 | const_tree op0 = TREE_OPERAND (op, 0); |
733 | return (TREE_CODE (op0) == ADDR_EXPR |
734 | && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)) |
735 | || decl_address_invariant_p (TREE_OPERAND (op0, 0)))); |
736 | } |
737 | |
738 | return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op); |
739 | } |
740 | |
741 | /* Return true if T is a gimple invariant address at IPA level |
742 | (so addresses of variables on stack are not allowed). */ |
743 | |
744 | bool |
745 | is_gimple_ip_invariant_address (const_tree t) |
746 | { |
747 | const_tree op; |
748 | |
749 | if (TREE_CODE (t) != ADDR_EXPR) |
750 | return false; |
751 | |
752 | op = strip_invariant_refs (TREE_OPERAND (t, 0)); |
753 | if (!op) |
754 | return false; |
755 | |
756 | if (TREE_CODE (op) == MEM_REF) |
757 | { |
758 | const_tree op0 = TREE_OPERAND (op, 0); |
759 | return (TREE_CODE (op0) == ADDR_EXPR |
760 | && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)) |
761 | || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0)))); |
762 | } |
763 | |
764 | return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op); |
765 | } |
766 | |
767 | /* Return true if T is a GIMPLE minimal invariant. It's a restricted |
768 | form of function invariant. */ |
769 | |
770 | bool |
771 | is_gimple_min_invariant (const_tree t) |
772 | { |
773 | if (TREE_CODE (t) == ADDR_EXPR) |
774 | return is_gimple_invariant_address (t); |
775 | |
776 | return is_gimple_constant (t); |
777 | } |
778 | |
779 | /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted |
780 | form of gimple minimal invariant. */ |
781 | |
782 | bool |
783 | is_gimple_ip_invariant (const_tree t) |
784 | { |
785 | if (TREE_CODE (t) == ADDR_EXPR) |
786 | return is_gimple_ip_invariant_address (t); |
787 | |
788 | return is_gimple_constant (t); |
789 | } |
790 | |
791 | /* Return true if T is a non-aggregate register variable. */ |
792 | |
793 | bool |
794 | is_gimple_reg (tree t) |
795 | { |
796 | if (virtual_operand_p (op: t)) |
797 | return false; |
798 | |
799 | if (TREE_CODE (t) == SSA_NAME) |
800 | return true; |
801 | |
802 | if (!is_gimple_variable (t)) |
803 | return false; |
804 | |
805 | if (!is_gimple_reg_type (TREE_TYPE (t))) |
806 | return false; |
807 | |
808 | /* A volatile decl is not acceptable because we can't reuse it as |
809 | needed. We need to copy it into a temp first. */ |
810 | if (TREE_THIS_VOLATILE (t)) |
811 | return false; |
812 | |
813 | /* We define "registers" as things that can be renamed as needed, |
814 | which with our infrastructure does not apply to memory. */ |
815 | if (needs_to_live_in_memory (t)) |
816 | return false; |
817 | |
818 | /* Hard register variables are an interesting case. For those that |
819 | are call-clobbered, we don't know where all the calls are, since |
820 | we don't (want to) take into account which operations will turn |
821 | into libcalls at the rtl level. For those that are call-saved, |
822 | we don't currently model the fact that calls may in fact change |
823 | global hard registers, nor do we examine ASM_CLOBBERS at the tree |
824 | level, and so miss variable changes that might imply. All around, |
825 | it seems safest to not do too much optimization with these at the |
826 | tree level at all. We'll have to rely on the rtl optimizers to |
827 | clean this up, as there we've got all the appropriate bits exposed. */ |
828 | if (VAR_P (t) && DECL_HARD_REGISTER (t)) |
829 | return false; |
830 | |
831 | /* Variables can be marked as having partial definitions, avoid |
832 | putting them into SSA form. */ |
833 | return !DECL_NOT_GIMPLE_REG_P (t); |
834 | } |
835 | |
836 | |
837 | /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */ |
838 | |
839 | bool |
840 | is_gimple_val (tree t) |
841 | { |
842 | /* Make loads from volatiles and memory vars explicit. */ |
843 | if (is_gimple_variable (t) |
844 | && is_gimple_reg_type (TREE_TYPE (t)) |
845 | && !is_gimple_reg (t)) |
846 | return false; |
847 | |
848 | return (is_gimple_variable (t) || is_gimple_min_invariant (t)); |
849 | } |
850 | |
851 | /* Similarly, but accept hard registers as inputs to asm statements. */ |
852 | |
853 | bool |
854 | is_gimple_asm_val (tree t) |
855 | { |
856 | if (VAR_P (t) && DECL_HARD_REGISTER (t)) |
857 | return true; |
858 | |
859 | return is_gimple_val (t); |
860 | } |
861 | |
862 | /* Return true if T is a GIMPLE minimal lvalue. */ |
863 | |
864 | bool |
865 | is_gimple_min_lval (tree t) |
866 | { |
867 | if (!(t = CONST_CAST_TREE (strip_invariant_refs (t)))) |
868 | return false; |
869 | return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF); |
870 | } |
871 | |
872 | /* Return true if T is a valid function operand of a CALL_EXPR. */ |
873 | |
874 | bool |
875 | is_gimple_call_addr (tree t) |
876 | { |
877 | return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t)); |
878 | } |
879 | |
880 | /* Return true if T is a valid address operand of a MEM_REF. */ |
881 | |
882 | bool |
883 | is_gimple_mem_ref_addr (tree t) |
884 | { |
885 | return (is_gimple_reg (t) |
886 | || TREE_CODE (t) == INTEGER_CST |
887 | || (TREE_CODE (t) == ADDR_EXPR |
888 | && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0)) |
889 | || decl_address_invariant_p (TREE_OPERAND (t, 0))))); |
890 | } |
891 | |
892 | /* Hold trees marked addressable during expand. */ |
893 | |
894 | static hash_set<tree> *mark_addressable_queue; |
895 | |
896 | /* Mark X as addressable or queue it up if called during expand. We |
897 | don't want to apply it immediately during expand because decls are |
898 | made addressable at that point due to RTL-only concerns, such as |
899 | uses of memcpy for block moves, and TREE_ADDRESSABLE changes |
900 | is_gimple_reg, which might make it seem like a variable that used |
901 | to be a gimple_reg shouldn't have been an SSA name. So we queue up |
902 | this flag setting and only apply it when we're done with GIMPLE and |
903 | only RTL issues matter. */ |
904 | |
905 | static void |
906 | mark_addressable_1 (tree x) |
907 | { |
908 | if (!currently_expanding_to_rtl) |
909 | { |
910 | TREE_ADDRESSABLE (x) = 1; |
911 | return; |
912 | } |
913 | |
914 | if (!mark_addressable_queue) |
915 | mark_addressable_queue = new hash_set<tree>(); |
916 | mark_addressable_queue->add (k: x); |
917 | } |
918 | |
919 | /* Adaptor for mark_addressable_1 for use in hash_set traversal. */ |
920 | |
921 | static bool |
922 | mark_addressable_2 (tree const &x, void * ATTRIBUTE_UNUSED = NULL) |
923 | { |
924 | mark_addressable_1 (x); |
925 | return false; |
926 | } |
927 | |
928 | /* Mark all queued trees as addressable, and empty the queue. To be |
929 | called right after clearing CURRENTLY_EXPANDING_TO_RTL. */ |
930 | |
931 | void |
932 | flush_mark_addressable_queue () |
933 | { |
934 | gcc_assert (!currently_expanding_to_rtl); |
935 | if (mark_addressable_queue) |
936 | { |
937 | mark_addressable_queue->traverse<void*, mark_addressable_2> (NULL); |
938 | delete mark_addressable_queue; |
939 | mark_addressable_queue = NULL; |
940 | } |
941 | } |
942 | |
943 | /* Mark X addressable. Unlike the langhook we expect X to be in gimple |
944 | form and we don't do any syntax checking. */ |
945 | |
946 | void |
947 | mark_addressable (tree x) |
948 | { |
949 | if (TREE_CODE (x) == WITH_SIZE_EXPR) |
950 | x = TREE_OPERAND (x, 0); |
951 | while (handled_component_p (t: x)) |
952 | x = TREE_OPERAND (x, 0); |
953 | if ((TREE_CODE (x) == MEM_REF |
954 | || TREE_CODE (x) == TARGET_MEM_REF) |
955 | && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR) |
956 | x = TREE_OPERAND (TREE_OPERAND (x, 0), 0); |
957 | if (!VAR_P (x) |
958 | && TREE_CODE (x) != PARM_DECL |
959 | && TREE_CODE (x) != RESULT_DECL) |
960 | return; |
961 | mark_addressable_1 (x); |
962 | |
963 | /* Also mark the artificial SSA_NAME that points to the partition of X. */ |
964 | if (VAR_P (x) |
965 | && !DECL_EXTERNAL (x) |
966 | && !TREE_STATIC (x) |
967 | && cfun->gimple_df != NULL |
968 | && cfun->gimple_df->decls_to_pointers != NULL) |
969 | { |
970 | tree *namep = cfun->gimple_df->decls_to_pointers->get (k: x); |
971 | if (namep) |
972 | mark_addressable_1 (x: *namep); |
973 | } |
974 | } |
975 | |
976 | /* Returns true iff T is a valid RHS for an assignment to a renamed |
977 | user -- or front-end generated artificial -- variable. */ |
978 | |
979 | bool |
980 | is_gimple_reg_rhs (tree t) |
981 | { |
982 | return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS; |
983 | } |
984 | |
985 | #include "gt-gimple-expr.h" |
986 | |