1 | /* SSA operands management for trees. |
2 | Copyright (C) 2003-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify |
7 | it under the terms of the GNU General Public License as published by |
8 | the Free Software Foundation; either version 3, or (at your option) |
9 | any later version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14 | GNU General Public License for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "tree.h" |
25 | #include "gimple.h" |
26 | #include "timevar.h" |
27 | #include "ssa.h" |
28 | #include "gimple-pretty-print.h" |
29 | #include "diagnostic-core.h" |
30 | #include "stmt.h" |
31 | #include "print-tree.h" |
32 | #include "dumpfile.h" |
33 | #include "value-query.h" |
34 | |
35 | |
36 | /* This file contains the code required to manage the operands cache of the |
37 | SSA optimizer. For every stmt, we maintain an operand cache in the stmt |
38 | annotation. This cache contains operands that will be of interest to |
39 | optimizers and other passes wishing to manipulate the IL. |
40 | |
41 | The operand type are broken up into REAL and VIRTUAL operands. The real |
42 | operands are represented as pointers into the stmt's operand tree. Thus |
43 | any manipulation of the real operands will be reflected in the actual tree. |
44 | Virtual operands are represented solely in the cache, although the base |
45 | variable for the SSA_NAME may, or may not occur in the stmt's tree. |
46 | Manipulation of the virtual operands will not be reflected in the stmt tree. |
47 | |
48 | The routines in this file are concerned with creating this operand cache |
49 | from a stmt tree. |
50 | |
51 | The operand tree is the parsed by the various get_* routines which look |
52 | through the stmt tree for the occurrence of operands which may be of |
53 | interest, and calls are made to the append_* routines whenever one is |
54 | found. There are 4 of these routines, each representing one of the |
55 | 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. |
56 | |
57 | The append_* routines check for duplication, and simply keep a list of |
58 | unique objects for each operand type in the build_* extendable vectors. |
59 | |
60 | Once the stmt tree is completely parsed, the finalize_ssa_operands() |
61 | routine is called, which proceeds to perform the finalization routine |
62 | on each of the 4 operand vectors which have been built up. |
63 | |
64 | If the stmt had a previous operand cache, the finalization routines |
65 | attempt to match up the new operands with the old ones. If it's a perfect |
66 | match, the old vector is simply reused. If it isn't a perfect match, then |
67 | a new vector is created and the new operands are placed there. For |
68 | virtual operands, if the previous cache had SSA_NAME version of a |
69 | variable, and that same variable occurs in the same operands cache, then |
70 | the new cache vector will also get the same SSA_NAME. |
71 | |
72 | i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new |
73 | operand vector for VUSE, then the new vector will also be modified |
74 | such that it contains 'a_5' rather than 'a'. */ |
75 | |
76 | |
77 | /* Flags to describe operand properties in helpers. */ |
78 | |
79 | /* By default, operands are loaded. */ |
80 | #define opf_use 0 |
81 | |
82 | /* Operand is the target of an assignment expression or a |
83 | call-clobbered variable. */ |
84 | #define opf_def (1 << 0) |
85 | |
86 | /* No virtual operands should be created in the expression. This is used |
87 | when traversing ADDR_EXPR nodes which have different semantics than |
88 | other expressions. Inside an ADDR_EXPR node, the only operands that we |
89 | need to consider are indices into arrays. For instance, &a.b[i] should |
90 | generate a USE of 'i' but it should not generate a VUSE for 'a' nor a |
91 | VUSE for 'b'. */ |
92 | #define opf_no_vops (1 << 1) |
93 | |
94 | /* Operand is in a place where address-taken does not imply addressable. */ |
95 | #define opf_non_addressable (1 << 3) |
96 | |
97 | /* Operand is in a place where opf_non_addressable does not apply. */ |
98 | #define opf_not_non_addressable (1 << 4) |
99 | |
100 | /* Operand is having its address taken. */ |
101 | #define opf_address_taken (1 << 5) |
102 | |
103 | /* Class containing temporary per-stmt state. */ |
104 | |
105 | class operands_scanner |
106 | { |
107 | public: |
108 | operands_scanner (struct function *fun, gimple *statement) |
109 | { |
110 | build_vuse = NULL_TREE; |
111 | build_vdef = NULL_TREE; |
112 | fn = fun; |
113 | stmt = statement; |
114 | } |
115 | |
116 | /* Create an operands cache for STMT. */ |
117 | void build_ssa_operands (); |
118 | |
119 | /* Verifies SSA statement operands. */ |
120 | DEBUG_FUNCTION bool verify_ssa_operands (); |
121 | |
122 | private: |
123 | /* Disable copy and assign of this class, as it may have problems with |
124 | build_uses vec. */ |
125 | DISABLE_COPY_AND_ASSIGN (operands_scanner); |
126 | |
127 | /* Array for building all the use operands. */ |
128 | auto_vec<tree *, 16> build_uses; |
129 | |
130 | /* The built VDEF operand. */ |
131 | tree build_vdef; |
132 | |
133 | /* The built VUSE operand. */ |
134 | tree build_vuse; |
135 | |
136 | /* Function which STMT belongs to. */ |
137 | struct function *fn; |
138 | |
139 | /* Statement to work on. */ |
140 | gimple *stmt; |
141 | |
142 | /* Takes elements from build_uses and turns them into use operands of STMT. */ |
143 | void finalize_ssa_uses (); |
144 | |
145 | /* Clear the in_list bits and empty the build array for VDEFs and |
146 | VUSEs. */ |
147 | void cleanup_build_arrays (); |
148 | |
149 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
150 | void finalize_ssa_stmt_operands (); |
151 | |
152 | /* Start the process of building up operands vectors in INFO. */ |
153 | void start_ssa_stmt_operands (); |
154 | |
155 | /* Add USE_P to the list of pointers to operands. */ |
156 | void append_use (tree *use_p); |
157 | |
158 | /* Add VAR to the set of variables that require a VDEF operator. */ |
159 | void append_vdef (tree var); |
160 | |
161 | /* Add VAR to the set of variables that require a VUSE operator. */ |
162 | void append_vuse (tree var); |
163 | |
164 | /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ |
165 | void add_virtual_operand (int flags); |
166 | |
167 | |
168 | /* Add *VAR_P to the appropriate operand array for statement STMT. |
169 | FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, |
170 | it will be added to the statement's real operands, otherwise it is |
171 | added to virtual operands. */ |
172 | void add_stmt_operand (tree *var_p, int flags); |
173 | |
174 | /* A subroutine of get_expr_operands to handle MEM_REF. |
175 | |
176 | STMT is the statement being processed, EXPR is the MEM_REF |
177 | that got us here. |
178 | |
179 | FLAGS is as in get_expr_operands. */ |
180 | void get_mem_ref_operands (tree expr, int flags); |
181 | |
182 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
183 | void get_tmr_operands (tree expr, int flags); |
184 | |
185 | |
186 | /* If STMT is a call that may clobber globals and other symbols that |
187 | escape, add them to the VDEF/VUSE lists for it. */ |
188 | void maybe_add_call_vops (gcall *stmt); |
189 | |
190 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ |
191 | void get_asm_stmt_operands (gasm *stmt); |
192 | |
193 | |
194 | /* Recursively scan the expression pointed to by EXPR_P in statement |
195 | STMT. FLAGS is one of the OPF_* constants modifying how to |
196 | interpret the operands found. */ |
197 | void get_expr_operands (tree *expr_p, int flags); |
198 | |
199 | /* Parse STMT looking for operands. When finished, the various |
200 | build_* operand vectors will have potential operands in them. */ |
201 | void parse_ssa_operands (); |
202 | |
203 | |
204 | /* Takes elements from build_defs and turns them into def operands of STMT. |
205 | TODO -- Make build_defs vec of tree *. */ |
206 | void finalize_ssa_defs (); |
207 | }; |
208 | |
209 | /* Accessor to tree-ssa-operands.cc caches. */ |
210 | static inline struct ssa_operands * |
211 | gimple_ssa_operands (const struct function *fun) |
212 | { |
213 | return &fun->gimple_df->ssa_operands; |
214 | } |
215 | |
216 | |
217 | /* Return true if the SSA operands cache is active. */ |
218 | |
219 | bool |
220 | ssa_operands_active (struct function *fun) |
221 | { |
222 | if (fun == NULL) |
223 | return false; |
224 | |
225 | return fun->gimple_df && gimple_ssa_operands (fun)->ops_active; |
226 | } |
227 | |
228 | |
229 | /* Create the VOP variable, an artificial global variable to act as a |
230 | representative of all of the virtual operands FUD chain. */ |
231 | |
232 | static void |
233 | create_vop_var (struct function *fn) |
234 | { |
235 | tree global_var; |
236 | |
237 | gcc_assert (fn->gimple_df->vop == NULL_TREE); |
238 | |
239 | global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, |
240 | get_identifier (".MEM" ), |
241 | void_type_node); |
242 | DECL_ARTIFICIAL (global_var) = 1; |
243 | DECL_IGNORED_P (global_var) = 1; |
244 | TREE_READONLY (global_var) = 0; |
245 | DECL_EXTERNAL (global_var) = 1; |
246 | TREE_STATIC (global_var) = 1; |
247 | TREE_USED (global_var) = 1; |
248 | DECL_CONTEXT (global_var) = NULL_TREE; |
249 | TREE_THIS_VOLATILE (global_var) = 0; |
250 | TREE_ADDRESSABLE (global_var) = 0; |
251 | VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1; |
252 | |
253 | fn->gimple_df->vop = global_var; |
254 | } |
255 | |
256 | /* These are the sizes of the operand memory buffer in bytes which gets |
257 | allocated each time more operands space is required. The final value is |
258 | the amount that is allocated every time after that. |
259 | In 1k we can fit 25 use operands (or 63 def operands) on a host with |
260 | 8 byte pointers, that would be 10 statements each with 1 def and 2 |
261 | uses. */ |
262 | |
263 | #define OP_SIZE_INIT 0 |
264 | #define OP_SIZE_1 (1024 - sizeof (void *)) |
265 | #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) |
266 | #define OP_SIZE_3 (1024 * 16 - sizeof (void *)) |
267 | |
268 | /* Initialize the operand cache routines. */ |
269 | |
270 | void |
271 | init_ssa_operands (struct function *fn) |
272 | { |
273 | gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL); |
274 | gimple_ssa_operands (fun: fn)->operand_memory_index |
275 | = gimple_ssa_operands (fun: fn)->ssa_operand_mem_size; |
276 | gimple_ssa_operands (fun: fn)->ops_active = true; |
277 | gimple_ssa_operands (fun: fn)->ssa_operand_mem_size = OP_SIZE_INIT; |
278 | create_vop_var (fn); |
279 | } |
280 | |
281 | |
282 | /* Dispose of anything required by the operand routines. */ |
283 | |
284 | void |
285 | fini_ssa_operands (struct function *fn) |
286 | { |
287 | struct ssa_operand_memory_d *ptr; |
288 | |
289 | gimple_ssa_operands (fun: fn)->free_uses = NULL; |
290 | |
291 | while ((ptr = gimple_ssa_operands (fun: fn)->operand_memory) != NULL) |
292 | { |
293 | gimple_ssa_operands (fun: fn)->operand_memory |
294 | = gimple_ssa_operands (fun: fn)->operand_memory->next; |
295 | ggc_free (ptr); |
296 | } |
297 | |
298 | gimple_ssa_operands (fun: fn)->ops_active = false; |
299 | |
300 | fn->gimple_df->vop = NULL_TREE; |
301 | } |
302 | |
303 | |
304 | /* Return memory for an operand of size SIZE. */ |
305 | |
306 | static inline void * |
307 | ssa_operand_alloc (struct function *fn, unsigned size) |
308 | { |
309 | char *ptr; |
310 | |
311 | gcc_assert (size == sizeof (struct use_optype_d)); |
312 | |
313 | if (gimple_ssa_operands (fun: fn)->operand_memory_index + size |
314 | >= gimple_ssa_operands (fun: fn)->ssa_operand_mem_size) |
315 | { |
316 | struct ssa_operand_memory_d *ptr; |
317 | |
318 | switch (gimple_ssa_operands (fun: fn)->ssa_operand_mem_size) |
319 | { |
320 | case OP_SIZE_INIT: |
321 | gimple_ssa_operands (fun: fn)->ssa_operand_mem_size = OP_SIZE_1; |
322 | break; |
323 | case OP_SIZE_1: |
324 | gimple_ssa_operands (fun: fn)->ssa_operand_mem_size = OP_SIZE_2; |
325 | break; |
326 | case OP_SIZE_2: |
327 | case OP_SIZE_3: |
328 | gimple_ssa_operands (fun: fn)->ssa_operand_mem_size = OP_SIZE_3; |
329 | break; |
330 | default: |
331 | gcc_unreachable (); |
332 | } |
333 | |
334 | |
335 | ptr = (ssa_operand_memory_d *) ggc_internal_alloc |
336 | (s: sizeof (void *) + gimple_ssa_operands (fun: fn)->ssa_operand_mem_size); |
337 | |
338 | ptr->next = gimple_ssa_operands (fun: fn)->operand_memory; |
339 | gimple_ssa_operands (fun: fn)->operand_memory = ptr; |
340 | gimple_ssa_operands (fun: fn)->operand_memory_index = 0; |
341 | } |
342 | |
343 | ptr = &(gimple_ssa_operands (fun: fn)->operand_memory |
344 | ->mem[gimple_ssa_operands (fun: fn)->operand_memory_index]); |
345 | gimple_ssa_operands (fun: fn)->operand_memory_index += size; |
346 | return ptr; |
347 | } |
348 | |
349 | |
350 | /* Allocate a USE operand. */ |
351 | |
352 | static inline struct use_optype_d * |
353 | alloc_use (struct function *fn) |
354 | { |
355 | struct use_optype_d *ret; |
356 | if (gimple_ssa_operands (fun: fn)->free_uses) |
357 | { |
358 | ret = gimple_ssa_operands (fun: fn)->free_uses; |
359 | gimple_ssa_operands (fun: fn)->free_uses |
360 | = gimple_ssa_operands (fun: fn)->free_uses->next; |
361 | } |
362 | else |
363 | ret = (struct use_optype_d *) |
364 | ssa_operand_alloc (fn, size: sizeof (struct use_optype_d)); |
365 | return ret; |
366 | } |
367 | |
368 | |
369 | /* Adds OP to the list of uses of statement STMT after LAST. */ |
370 | |
371 | static inline use_optype_p |
372 | add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last) |
373 | { |
374 | use_optype_p new_use; |
375 | |
376 | new_use = alloc_use (fn); |
377 | USE_OP_PTR (new_use)->use = op; |
378 | link_imm_use_stmt (USE_OP_PTR (new_use), def: *op, stmt); |
379 | last->next = new_use; |
380 | new_use->next = NULL; |
381 | return new_use; |
382 | } |
383 | |
384 | |
385 | |
386 | /* Takes elements from build_defs and turns them into def operands of STMT. |
387 | TODO -- Make build_defs vec of tree *. */ |
388 | |
389 | inline void |
390 | operands_scanner::finalize_ssa_defs () |
391 | { |
392 | /* Pre-pend the vdef we may have built. */ |
393 | if (build_vdef != NULL_TREE) |
394 | { |
395 | tree oldvdef = gimple_vdef (g: stmt); |
396 | if (oldvdef |
397 | && TREE_CODE (oldvdef) == SSA_NAME) |
398 | oldvdef = SSA_NAME_VAR (oldvdef); |
399 | if (oldvdef != build_vdef) |
400 | gimple_set_vdef (g: stmt, vdef: build_vdef); |
401 | } |
402 | |
403 | /* Clear and unlink a no longer necessary VDEF. */ |
404 | if (build_vdef == NULL_TREE |
405 | && gimple_vdef (g: stmt) != NULL_TREE) |
406 | { |
407 | if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) |
408 | { |
409 | unlink_stmt_vdef (stmt); |
410 | release_ssa_name_fn (fn, gimple_vdef (g: stmt)); |
411 | } |
412 | gimple_set_vdef (g: stmt, NULL_TREE); |
413 | } |
414 | |
415 | /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ |
416 | if (gimple_vdef (g: stmt) |
417 | && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) |
418 | { |
419 | fn->gimple_df->rename_vops = 1; |
420 | fn->gimple_df->ssa_renaming_needed = 1; |
421 | } |
422 | } |
423 | |
424 | |
425 | /* Takes elements from build_uses and turns them into use operands of STMT. */ |
426 | |
427 | inline void |
428 | operands_scanner::finalize_ssa_uses () |
429 | { |
430 | unsigned new_i; |
431 | struct use_optype_d new_list; |
432 | use_optype_p old_ops, ptr, last; |
433 | |
434 | /* Pre-pend the VUSE we may have built. */ |
435 | if (build_vuse != NULL_TREE) |
436 | { |
437 | tree oldvuse = gimple_vuse (g: stmt); |
438 | if (oldvuse |
439 | && TREE_CODE (oldvuse) == SSA_NAME) |
440 | oldvuse = SSA_NAME_VAR (oldvuse); |
441 | if (oldvuse != (build_vuse != NULL_TREE |
442 | ? build_vuse : build_vdef)) |
443 | gimple_set_vuse (g: stmt, NULL_TREE); |
444 | build_uses.safe_insert (ix: 0, obj: gimple_vuse_ptr (g: stmt)); |
445 | } |
446 | |
447 | new_list.next = NULL; |
448 | last = &new_list; |
449 | |
450 | old_ops = gimple_use_ops (g: stmt); |
451 | |
452 | /* Clear a no longer necessary VUSE. */ |
453 | if (build_vuse == NULL_TREE |
454 | && gimple_vuse (g: stmt) != NULL_TREE) |
455 | gimple_set_vuse (g: stmt, NULL_TREE); |
456 | |
457 | /* If there is anything in the old list, free it. */ |
458 | if (old_ops) |
459 | { |
460 | for (ptr = old_ops; ptr->next; ptr = ptr->next) |
461 | delink_imm_use (USE_OP_PTR (ptr)); |
462 | delink_imm_use (USE_OP_PTR (ptr)); |
463 | ptr->next = gimple_ssa_operands (fun: fn)->free_uses; |
464 | gimple_ssa_operands (fun: fn)->free_uses = old_ops; |
465 | } |
466 | |
467 | /* If we added a VUSE, make sure to set the operand if it is not already |
468 | present and mark it for renaming. */ |
469 | if (build_vuse != NULL_TREE |
470 | && gimple_vuse (g: stmt) == NULL_TREE) |
471 | { |
472 | gimple_set_vuse (g: stmt, vuse: gimple_vop (fun: fn)); |
473 | fn->gimple_df->rename_vops = 1; |
474 | fn->gimple_df->ssa_renaming_needed = 1; |
475 | } |
476 | |
477 | /* Now create nodes for all the new nodes. */ |
478 | for (new_i = 0; new_i < build_uses.length (); new_i++) |
479 | { |
480 | tree *op = build_uses[new_i]; |
481 | last = add_use_op (fn, stmt, op, last); |
482 | } |
483 | |
484 | /* Now set the stmt's operands. */ |
485 | gimple_set_use_ops (g: stmt, use: new_list.next); |
486 | } |
487 | |
488 | |
489 | /* Clear the in_list bits and empty the build array for VDEFs and |
490 | VUSEs. */ |
491 | |
492 | inline void |
493 | operands_scanner::cleanup_build_arrays () |
494 | { |
495 | build_vdef = NULL_TREE; |
496 | build_vuse = NULL_TREE; |
497 | build_uses.truncate (size: 0); |
498 | } |
499 | |
500 | |
501 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
502 | |
503 | inline void |
504 | operands_scanner::finalize_ssa_stmt_operands () |
505 | { |
506 | finalize_ssa_defs (); |
507 | finalize_ssa_uses (); |
508 | cleanup_build_arrays (); |
509 | } |
510 | |
511 | |
512 | /* Start the process of building up operands vectors in INFO. */ |
513 | |
514 | inline void |
515 | operands_scanner::start_ssa_stmt_operands () |
516 | { |
517 | gcc_assert (build_uses.length () == 0); |
518 | gcc_assert (build_vuse == NULL_TREE); |
519 | gcc_assert (build_vdef == NULL_TREE); |
520 | } |
521 | |
522 | |
523 | /* Add USE_P to the list of pointers to operands. */ |
524 | |
525 | inline void |
526 | operands_scanner::append_use (tree *use_p) |
527 | { |
528 | build_uses.safe_push (obj: use_p); |
529 | } |
530 | |
531 | |
532 | /* Add VAR to the set of variables that require a VDEF operator. */ |
533 | |
534 | inline void |
535 | operands_scanner::append_vdef (tree var) |
536 | { |
537 | gcc_assert ((build_vdef == NULL_TREE |
538 | || build_vdef == var) |
539 | && (build_vuse == NULL_TREE |
540 | || build_vuse == var)); |
541 | |
542 | build_vdef = var; |
543 | build_vuse = var; |
544 | } |
545 | |
546 | |
547 | /* Add VAR to the set of variables that require a VUSE operator. */ |
548 | |
549 | inline void |
550 | operands_scanner::append_vuse (tree var) |
551 | { |
552 | gcc_assert (build_vuse == NULL_TREE |
553 | || build_vuse == var); |
554 | |
555 | build_vuse = var; |
556 | } |
557 | |
558 | /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ |
559 | |
560 | void |
561 | operands_scanner::add_virtual_operand (int flags) |
562 | { |
563 | /* Add virtual operands to the stmt, unless the caller has specifically |
564 | requested not to do that (used when adding operands inside an |
565 | ADDR_EXPR expression). */ |
566 | if (flags & opf_no_vops) |
567 | return; |
568 | |
569 | gcc_assert (!is_gimple_debug (stmt)); |
570 | |
571 | if (flags & opf_def) |
572 | append_vdef (var: gimple_vop (fun: fn)); |
573 | else |
574 | append_vuse (var: gimple_vop (fun: fn)); |
575 | } |
576 | |
577 | |
578 | /* Add *VAR_P to the appropriate operand array for statement STMT. |
579 | FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, |
580 | it will be added to the statement's real operands, otherwise it is |
581 | added to virtual operands. */ |
582 | |
583 | void |
584 | operands_scanner::add_stmt_operand (tree *var_p, int flags) |
585 | { |
586 | tree var = *var_p; |
587 | |
588 | gcc_assert (SSA_VAR_P (*var_p) |
589 | || TREE_CODE (*var_p) == STRING_CST |
590 | || TREE_CODE (*var_p) == CONST_DECL); |
591 | |
592 | if (is_gimple_reg (var)) |
593 | { |
594 | /* The variable is a GIMPLE register. Add it to real operands. */ |
595 | if (flags & opf_def) |
596 | ; |
597 | else |
598 | append_use (use_p: var_p); |
599 | if (DECL_P (*var_p)) |
600 | fn->gimple_df->ssa_renaming_needed = 1; |
601 | } |
602 | else |
603 | { |
604 | /* Mark statements with volatile operands. */ |
605 | if (!(flags & opf_no_vops) |
606 | && TREE_THIS_VOLATILE (var)) |
607 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
608 | |
609 | /* The variable is a memory access. Add virtual operands. */ |
610 | add_virtual_operand (flags); |
611 | } |
612 | } |
613 | |
614 | /* Mark the base address of REF as having its address taken. |
615 | REF may be a single variable whose address has been taken or any |
616 | other valid GIMPLE memory reference (structure reference, array, |
617 | etc). */ |
618 | |
619 | static void |
620 | mark_address_taken (tree ref) |
621 | { |
622 | tree var; |
623 | |
624 | /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF |
625 | as the only thing we take the address of. If VAR is a structure, |
626 | taking the address of a field means that the whole structure may |
627 | be referenced using pointer arithmetic. See PR 21407 and the |
628 | ensuing mailing list discussion. */ |
629 | var = get_base_address (t: ref); |
630 | if (VAR_P (var) |
631 | || TREE_CODE (var) == RESULT_DECL |
632 | || TREE_CODE (var) == PARM_DECL) |
633 | TREE_ADDRESSABLE (var) = 1; |
634 | } |
635 | |
636 | |
637 | /* A subroutine of get_expr_operands to handle MEM_REF. |
638 | |
639 | STMT is the statement being processed, EXPR is the MEM_REF |
640 | that got us here. |
641 | |
642 | FLAGS is as in get_expr_operands. */ |
643 | |
644 | void |
645 | operands_scanner::get_mem_ref_operands (tree expr, int flags) |
646 | { |
647 | tree *pptr = &TREE_OPERAND (expr, 0); |
648 | |
649 | if (!(flags & opf_no_vops) |
650 | && TREE_THIS_VOLATILE (expr)) |
651 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
652 | |
653 | /* Add the VOP. */ |
654 | add_virtual_operand (flags); |
655 | |
656 | /* If requested, add a USE operand for the base pointer. */ |
657 | get_expr_operands (expr_p: pptr, |
658 | opf_non_addressable | opf_use |
659 | | (flags & (opf_no_vops|opf_not_non_addressable))); |
660 | } |
661 | |
662 | |
663 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
664 | |
665 | void |
666 | operands_scanner::get_tmr_operands(tree expr, int flags) |
667 | { |
668 | if (!(flags & opf_no_vops) |
669 | && TREE_THIS_VOLATILE (expr)) |
670 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
671 | |
672 | /* First record the real operands. */ |
673 | get_expr_operands (expr_p: &TMR_BASE (expr), |
674 | opf_non_addressable | opf_use |
675 | | (flags & (opf_no_vops|opf_not_non_addressable))); |
676 | get_expr_operands (expr_p: &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); |
677 | get_expr_operands (expr_p: &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops)); |
678 | |
679 | add_virtual_operand (flags); |
680 | } |
681 | |
682 | |
683 | /* If STMT is a call that may clobber globals and other symbols that |
684 | escape, add them to the VDEF/VUSE lists for it. */ |
685 | |
686 | void |
687 | operands_scanner::maybe_add_call_vops (gcall *stmt) |
688 | { |
689 | int call_flags = gimple_call_flags (stmt); |
690 | |
691 | /* If aliases have been computed already, add VDEF or VUSE |
692 | operands for all the symbols that have been found to be |
693 | call-clobbered. */ |
694 | if (!(call_flags & ECF_NOVOPS)) |
695 | { |
696 | /* A 'pure' or a 'const' function never call-clobbers anything. */ |
697 | if (!(call_flags & (ECF_PURE | ECF_CONST))) |
698 | add_virtual_operand (opf_def); |
699 | else if (!(call_flags & ECF_CONST)) |
700 | add_virtual_operand (opf_use); |
701 | } |
702 | } |
703 | |
704 | |
705 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ |
706 | |
707 | void |
708 | operands_scanner::get_asm_stmt_operands (gasm *stmt) |
709 | { |
710 | size_t i, noutputs; |
711 | const char **oconstraints; |
712 | const char *constraint; |
713 | bool allows_mem, allows_reg, is_inout; |
714 | |
715 | noutputs = gimple_asm_noutputs (asm_stmt: stmt); |
716 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); |
717 | |
718 | /* Gather all output operands. */ |
719 | for (i = 0; i < gimple_asm_noutputs (asm_stmt: stmt); i++) |
720 | { |
721 | tree link = gimple_asm_output_op (asm_stmt: stmt, index: i); |
722 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
723 | oconstraints[i] = constraint; |
724 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, |
725 | &allows_reg, &is_inout); |
726 | |
727 | /* This should have been split in gimplify_asm_expr. */ |
728 | gcc_assert (!allows_reg || !is_inout); |
729 | |
730 | /* Memory operands are addressable. Note that STMT needs the |
731 | address of this operand. */ |
732 | if (!allows_reg && allows_mem) |
733 | mark_address_taken (TREE_VALUE (link)); |
734 | |
735 | get_expr_operands (expr_p: &TREE_VALUE (link), opf_def | opf_not_non_addressable); |
736 | } |
737 | |
738 | /* Gather all input operands. */ |
739 | for (i = 0; i < gimple_asm_ninputs (asm_stmt: stmt); i++) |
740 | { |
741 | tree link = gimple_asm_input_op (asm_stmt: stmt, index: i); |
742 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
743 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
744 | &allows_mem, &allows_reg); |
745 | |
746 | /* Memory operands are addressable. Note that STMT needs the |
747 | address of this operand. */ |
748 | if (!allows_reg && allows_mem) |
749 | mark_address_taken (TREE_VALUE (link)); |
750 | |
751 | get_expr_operands (expr_p: &TREE_VALUE (link), opf_not_non_addressable); |
752 | } |
753 | |
754 | /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ |
755 | if (gimple_asm_clobbers_memory_p (stmt)) |
756 | add_virtual_operand (opf_def); |
757 | } |
758 | |
759 | |
760 | /* Recursively scan the expression pointed to by EXPR_P in statement |
761 | STMT. FLAGS is one of the OPF_* constants modifying how to |
762 | interpret the operands found. */ |
763 | |
764 | void |
765 | operands_scanner::get_expr_operands (tree *expr_p, int flags) |
766 | { |
767 | enum tree_code code; |
768 | enum tree_code_class codeclass; |
769 | tree expr = *expr_p; |
770 | int uflags = opf_use; |
771 | |
772 | if (expr == NULL) |
773 | return; |
774 | |
775 | if (is_gimple_debug (gs: stmt)) |
776 | uflags |= (flags & opf_no_vops); |
777 | |
778 | code = TREE_CODE (expr); |
779 | codeclass = TREE_CODE_CLASS (code); |
780 | |
781 | switch (code) |
782 | { |
783 | case ADDR_EXPR: |
784 | /* Taking the address of a variable does not represent a |
785 | reference to it, but the fact that the statement takes its |
786 | address will be of interest to some passes (e.g. alias |
787 | resolution). */ |
788 | if ((!(flags & opf_non_addressable) |
789 | || (flags & opf_not_non_addressable)) |
790 | && !is_gimple_debug (gs: stmt)) |
791 | mark_address_taken (TREE_OPERAND (expr, 0)); |
792 | |
793 | /* Otherwise, there may be variables referenced inside but there |
794 | should be no VUSEs created, since the referenced objects are |
795 | not really accessed. The only operands that we should find |
796 | here are ARRAY_REF indices which will always be real operands |
797 | (GIMPLE does not allow non-registers as array indices). */ |
798 | flags |= opf_no_vops; |
799 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), |
800 | flags: flags | opf_not_non_addressable | opf_address_taken); |
801 | return; |
802 | |
803 | case SSA_NAME: |
804 | case VAR_DECL: |
805 | case PARM_DECL: |
806 | case RESULT_DECL: |
807 | case STRING_CST: |
808 | case CONST_DECL: |
809 | if (!(flags & opf_address_taken)) |
810 | add_stmt_operand (var_p: expr_p, flags); |
811 | return; |
812 | |
813 | case DEBUG_EXPR_DECL: |
814 | gcc_assert (gimple_debug_bind_p (stmt)); |
815 | return; |
816 | |
817 | case MEM_REF: |
818 | get_mem_ref_operands (expr, flags); |
819 | return; |
820 | |
821 | case TARGET_MEM_REF: |
822 | get_tmr_operands (expr, flags); |
823 | return; |
824 | |
825 | case ARRAY_REF: |
826 | case ARRAY_RANGE_REF: |
827 | case COMPONENT_REF: |
828 | case REALPART_EXPR: |
829 | case IMAGPART_EXPR: |
830 | { |
831 | if (!(flags & opf_no_vops) |
832 | && TREE_THIS_VOLATILE (expr)) |
833 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
834 | |
835 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags); |
836 | |
837 | if (code == COMPONENT_REF) |
838 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 2), flags: uflags); |
839 | else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) |
840 | { |
841 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 1), flags: uflags); |
842 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 2), flags: uflags); |
843 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 3), flags: uflags); |
844 | } |
845 | |
846 | return; |
847 | } |
848 | |
849 | case WITH_SIZE_EXPR: |
850 | /* WITH_SIZE_EXPR is a pass-through reference to its first argument, |
851 | and an rvalue reference to its second argument. */ |
852 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 1), flags: uflags); |
853 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags); |
854 | return; |
855 | |
856 | case COND_EXPR: |
857 | case VEC_COND_EXPR: |
858 | case VEC_PERM_EXPR: |
859 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags: uflags); |
860 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 1), flags: uflags); |
861 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 2), flags: uflags); |
862 | return; |
863 | |
864 | case CONSTRUCTOR: |
865 | { |
866 | /* General aggregate CONSTRUCTORs have been decomposed, but they |
867 | are still in use as the COMPLEX_EXPR equivalent for vectors. */ |
868 | constructor_elt *ce; |
869 | unsigned HOST_WIDE_INT idx; |
870 | |
871 | /* A volatile constructor is actually TREE_CLOBBER_P, transfer |
872 | the volatility to the statement, don't use TREE_CLOBBER_P for |
873 | mirroring the other uses of THIS_VOLATILE in this file. */ |
874 | if (!(flags & opf_no_vops) |
875 | && TREE_THIS_VOLATILE (expr)) |
876 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
877 | |
878 | for (idx = 0; |
879 | vec_safe_iterate (CONSTRUCTOR_ELTS (expr), ix: idx, ptr: &ce); |
880 | idx++) |
881 | get_expr_operands (expr_p: &ce->value, flags: uflags); |
882 | |
883 | return; |
884 | } |
885 | |
886 | case BIT_FIELD_REF: |
887 | if (!(flags & opf_no_vops) |
888 | && TREE_THIS_VOLATILE (expr)) |
889 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
890 | /* FALLTHRU */ |
891 | |
892 | case VIEW_CONVERT_EXPR: |
893 | do_unary: |
894 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags); |
895 | return; |
896 | |
897 | case BIT_INSERT_EXPR: |
898 | case COMPOUND_EXPR: |
899 | case OBJ_TYPE_REF: |
900 | do_binary: |
901 | { |
902 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags); |
903 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 1), flags); |
904 | return; |
905 | } |
906 | |
907 | case DOT_PROD_EXPR: |
908 | case SAD_EXPR: |
909 | case REALIGN_LOAD_EXPR: |
910 | case WIDEN_MULT_PLUS_EXPR: |
911 | case WIDEN_MULT_MINUS_EXPR: |
912 | { |
913 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags); |
914 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 1), flags); |
915 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 2), flags); |
916 | return; |
917 | } |
918 | |
919 | case FUNCTION_DECL: |
920 | case LABEL_DECL: |
921 | case CASE_LABEL_EXPR: |
922 | /* Expressions that make no memory references. */ |
923 | return; |
924 | |
925 | default: |
926 | if (codeclass == tcc_unary) |
927 | goto do_unary; |
928 | if (codeclass == tcc_binary || codeclass == tcc_comparison) |
929 | goto do_binary; |
930 | if (codeclass == tcc_constant || codeclass == tcc_type) |
931 | return; |
932 | } |
933 | |
934 | /* If we get here, something has gone wrong. */ |
935 | if (flag_checking) |
936 | { |
937 | fprintf (stderr, format: "unhandled expression in get_expr_operands():\n" ); |
938 | debug_tree (expr); |
939 | fputs (s: "\n" , stderr); |
940 | gcc_unreachable (); |
941 | } |
942 | } |
943 | |
944 | |
945 | /* Parse STMT looking for operands. When finished, the various |
946 | build_* operand vectors will have potential operands in them. */ |
947 | |
948 | void |
949 | operands_scanner::parse_ssa_operands () |
950 | { |
951 | enum gimple_code code = gimple_code (g: stmt); |
952 | size_t i, n, start = 0; |
953 | |
954 | switch (code) |
955 | { |
956 | case GIMPLE_ASM: |
957 | get_asm_stmt_operands (stmt: as_a <gasm *> (p: stmt)); |
958 | break; |
959 | |
960 | case GIMPLE_TRANSACTION: |
961 | /* The start of a transaction is a memory barrier. */ |
962 | add_virtual_operand (opf_def | opf_use); |
963 | break; |
964 | |
965 | case GIMPLE_DEBUG: |
966 | if (gimple_debug_bind_p (s: stmt) |
967 | && gimple_debug_bind_has_value_p (dbg: stmt)) |
968 | get_expr_operands (expr_p: gimple_debug_bind_get_value_ptr (dbg: stmt), |
969 | opf_use | opf_no_vops); |
970 | break; |
971 | |
972 | case GIMPLE_RETURN: |
973 | append_vuse (var: gimple_vop (fun: fn)); |
974 | goto do_default; |
975 | |
976 | case GIMPLE_CALL: |
977 | /* Add call-clobbered operands, if needed. */ |
978 | maybe_add_call_vops (stmt: as_a <gcall *> (p: stmt)); |
979 | /* FALLTHRU */ |
980 | |
981 | case GIMPLE_ASSIGN: |
982 | get_expr_operands (expr_p: gimple_op_ptr (gs: stmt, i: 0), opf_def); |
983 | start = 1; |
984 | /* FALLTHRU */ |
985 | |
986 | default: |
987 | do_default: |
988 | n = gimple_num_ops (gs: stmt); |
989 | for (i = start; i < n; i++) |
990 | get_expr_operands (expr_p: gimple_op_ptr (gs: stmt, i), opf_use); |
991 | break; |
992 | } |
993 | } |
994 | |
995 | |
996 | /* Create an operands cache for STMT. */ |
997 | |
998 | void |
999 | operands_scanner::build_ssa_operands () |
1000 | { |
1001 | /* Initially assume that the statement has no volatile operands. */ |
1002 | gimple_set_has_volatile_ops (stmt, volatilep: false); |
1003 | |
1004 | start_ssa_stmt_operands (); |
1005 | parse_ssa_operands (); |
1006 | finalize_ssa_stmt_operands (); |
1007 | } |
1008 | |
1009 | /* Verifies SSA statement operands. */ |
1010 | |
1011 | DEBUG_FUNCTION bool |
1012 | operands_scanner::verify_ssa_operands () |
1013 | { |
1014 | use_operand_p use_p; |
1015 | def_operand_p def_p; |
1016 | ssa_op_iter iter; |
1017 | unsigned i; |
1018 | tree def; |
1019 | bool volatile_p = gimple_has_volatile_ops (stmt); |
1020 | |
1021 | /* build_ssa_operands w/o finalizing them. */ |
1022 | gimple_set_has_volatile_ops (stmt, volatilep: false); |
1023 | start_ssa_stmt_operands (); |
1024 | parse_ssa_operands (); |
1025 | |
1026 | /* Now verify the built operands are the same as present in STMT. */ |
1027 | def = gimple_vdef (g: stmt); |
1028 | if (def |
1029 | && TREE_CODE (def) == SSA_NAME) |
1030 | def = SSA_NAME_VAR (def); |
1031 | if (build_vdef != def) |
1032 | { |
1033 | error ("virtual definition of statement not up to date" ); |
1034 | return true; |
1035 | } |
1036 | if (gimple_vdef (g: stmt) |
1037 | && ((def_p = gimple_vdef_op (g: stmt)) == NULL_DEF_OPERAND_P |
1038 | || DEF_FROM_PTR (def_p) != gimple_vdef (g: stmt))) |
1039 | { |
1040 | error ("virtual def operand missing for statement" ); |
1041 | return true; |
1042 | } |
1043 | |
1044 | tree use = gimple_vuse (g: stmt); |
1045 | if (use |
1046 | && TREE_CODE (use) == SSA_NAME) |
1047 | use = SSA_NAME_VAR (use); |
1048 | if (build_vuse != use) |
1049 | { |
1050 | error ("virtual use of statement not up to date" ); |
1051 | return true; |
1052 | } |
1053 | if (gimple_vuse (g: stmt) |
1054 | && ((use_p = gimple_vuse_op (g: stmt)) == NULL_USE_OPERAND_P |
1055 | || USE_FROM_PTR (use_p) != gimple_vuse (g: stmt))) |
1056 | { |
1057 | error ("virtual use operand missing for statement" ); |
1058 | return true; |
1059 | } |
1060 | |
1061 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) |
1062 | { |
1063 | tree *op; |
1064 | FOR_EACH_VEC_ELT (build_uses, i, op) |
1065 | { |
1066 | if (use_p->use == op) |
1067 | { |
1068 | build_uses[i] = NULL; |
1069 | break; |
1070 | } |
1071 | } |
1072 | if (i == build_uses.length ()) |
1073 | { |
1074 | error ("excess use operand for statement" ); |
1075 | debug_generic_expr (USE_FROM_PTR (use_p)); |
1076 | return true; |
1077 | } |
1078 | } |
1079 | |
1080 | tree *op; |
1081 | FOR_EACH_VEC_ELT (build_uses, i, op) |
1082 | if (op != NULL) |
1083 | { |
1084 | error ("use operand missing for statement" ); |
1085 | debug_generic_expr (*op); |
1086 | return true; |
1087 | } |
1088 | |
1089 | if (gimple_has_volatile_ops (stmt) != volatile_p) |
1090 | { |
1091 | error ("statement volatile flag not up to date" ); |
1092 | return true; |
1093 | } |
1094 | |
1095 | cleanup_build_arrays (); |
1096 | return false; |
1097 | } |
1098 | |
1099 | /* Interface for external use. */ |
1100 | |
1101 | DEBUG_FUNCTION bool |
1102 | verify_ssa_operands (struct function *fn, gimple *stmt) |
1103 | { |
1104 | return operands_scanner (fn, stmt).verify_ssa_operands (); |
1105 | } |
1106 | |
1107 | |
1108 | /* Releases the operands of STMT back to their freelists, and clears |
1109 | the stmt operand lists. */ |
1110 | |
1111 | void |
1112 | free_stmt_operands (struct function *fn, gimple *stmt) |
1113 | { |
1114 | use_optype_p uses = gimple_use_ops (g: stmt), last_use; |
1115 | |
1116 | if (uses) |
1117 | { |
1118 | for (last_use = uses; last_use->next; last_use = last_use->next) |
1119 | delink_imm_use (USE_OP_PTR (last_use)); |
1120 | delink_imm_use (USE_OP_PTR (last_use)); |
1121 | last_use->next = gimple_ssa_operands (fun: fn)->free_uses; |
1122 | gimple_ssa_operands (fun: fn)->free_uses = uses; |
1123 | gimple_set_use_ops (g: stmt, NULL); |
1124 | } |
1125 | |
1126 | if (gimple_has_mem_ops (g: stmt)) |
1127 | { |
1128 | gimple_set_vuse (g: stmt, NULL_TREE); |
1129 | gimple_set_vdef (g: stmt, NULL_TREE); |
1130 | } |
1131 | } |
1132 | |
1133 | |
1134 | /* Get the operands of statement STMT. */ |
1135 | |
1136 | void |
1137 | update_stmt_operands (struct function *fn, gimple *stmt) |
1138 | { |
1139 | /* If update_stmt_operands is called before SSA is initialized, do |
1140 | nothing. */ |
1141 | if (!ssa_operands_active (fun: fn)) |
1142 | return; |
1143 | |
1144 | timevar_push (tv: TV_TREE_OPS); |
1145 | |
1146 | gcc_assert (gimple_modified_p (stmt)); |
1147 | operands_scanner (fn, stmt).build_ssa_operands (); |
1148 | gimple_set_modified (s: stmt, modifiedp: false); |
1149 | // Inform the active range query an update has happened. |
1150 | get_range_query (fun: fn)->update_stmt (stmt); |
1151 | |
1152 | timevar_pop (tv: TV_TREE_OPS); |
1153 | } |
1154 | |
1155 | |
1156 | /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done |
1157 | to test the validity of the swap operation. */ |
1158 | |
1159 | void |
1160 | swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1) |
1161 | { |
1162 | tree op0, op1; |
1163 | op0 = *exp0; |
1164 | op1 = *exp1; |
1165 | |
1166 | if (op0 != op1) |
1167 | { |
1168 | /* Attempt to preserve the relative positions of these two operands in |
1169 | their * respective immediate use lists by adjusting their use pointer |
1170 | to point to the new operand position. */ |
1171 | use_optype_p use0, use1, ptr; |
1172 | use0 = use1 = NULL; |
1173 | |
1174 | /* Find the 2 operands in the cache, if they are there. */ |
1175 | for (ptr = gimple_use_ops (g: stmt); ptr; ptr = ptr->next) |
1176 | if (USE_OP_PTR (ptr)->use == exp0) |
1177 | { |
1178 | use0 = ptr; |
1179 | break; |
1180 | } |
1181 | |
1182 | for (ptr = gimple_use_ops (g: stmt); ptr; ptr = ptr->next) |
1183 | if (USE_OP_PTR (ptr)->use == exp1) |
1184 | { |
1185 | use1 = ptr; |
1186 | break; |
1187 | } |
1188 | |
1189 | /* And adjust their location to point to the new position of the |
1190 | operand. */ |
1191 | if (use0) |
1192 | USE_OP_PTR (use0)->use = exp1; |
1193 | if (use1) |
1194 | USE_OP_PTR (use1)->use = exp0; |
1195 | |
1196 | /* Now swap the data. */ |
1197 | *exp0 = op1; |
1198 | *exp1 = op0; |
1199 | } |
1200 | } |
1201 | |
1202 | |
1203 | /* Scan the immediate_use list for VAR making sure its linked properly. |
1204 | Return TRUE if there is a problem and emit an error message to F. */ |
1205 | |
1206 | DEBUG_FUNCTION bool |
1207 | verify_imm_links (FILE *f, tree var) |
1208 | { |
1209 | use_operand_p ptr, prev, list; |
1210 | unsigned int count; |
1211 | |
1212 | gcc_assert (TREE_CODE (var) == SSA_NAME); |
1213 | |
1214 | list = &(SSA_NAME_IMM_USE_NODE (var)); |
1215 | gcc_assert (list->use == NULL); |
1216 | |
1217 | if (list->prev == NULL) |
1218 | { |
1219 | gcc_assert (list->next == NULL); |
1220 | return false; |
1221 | } |
1222 | |
1223 | prev = list; |
1224 | count = 0; |
1225 | for (ptr = list->next; ptr != list; ) |
1226 | { |
1227 | if (prev != ptr->prev) |
1228 | { |
1229 | fprintf (stream: f, format: "prev != ptr->prev\n" ); |
1230 | goto error; |
1231 | } |
1232 | |
1233 | if (ptr->use == NULL) |
1234 | { |
1235 | fprintf (stream: f, format: "ptr->use == NULL\n" ); |
1236 | goto error; /* 2 roots, or SAFE guard node. */ |
1237 | } |
1238 | else if (*(ptr->use) != var) |
1239 | { |
1240 | fprintf (stream: f, format: "*(ptr->use) != var\n" ); |
1241 | goto error; |
1242 | } |
1243 | |
1244 | prev = ptr; |
1245 | ptr = ptr->next; |
1246 | |
1247 | count++; |
1248 | if (count == 0) |
1249 | { |
1250 | fprintf (stream: f, format: "number of immediate uses doesn't fit unsigned int\n" ); |
1251 | goto error; |
1252 | } |
1253 | } |
1254 | |
1255 | /* Verify list in the other direction. */ |
1256 | prev = list; |
1257 | for (ptr = list->prev; ptr != list; ) |
1258 | { |
1259 | if (prev != ptr->next) |
1260 | { |
1261 | fprintf (stream: f, format: "prev != ptr->next\n" ); |
1262 | goto error; |
1263 | } |
1264 | prev = ptr; |
1265 | ptr = ptr->prev; |
1266 | if (count == 0) |
1267 | { |
1268 | fprintf (stream: f, format: "count-- < 0\n" ); |
1269 | goto error; |
1270 | } |
1271 | count--; |
1272 | } |
1273 | |
1274 | if (count != 0) |
1275 | { |
1276 | fprintf (stream: f, format: "count != 0\n" ); |
1277 | goto error; |
1278 | } |
1279 | |
1280 | return false; |
1281 | |
1282 | error: |
1283 | if (ptr->loc.stmt && gimple_modified_p (g: ptr->loc.stmt)) |
1284 | { |
1285 | fprintf (stream: f, format: " STMT MODIFIED. - <%p> " , (void *)ptr->loc.stmt); |
1286 | print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); |
1287 | } |
1288 | fprintf (stream: f, format: " IMM ERROR : (use_p : tree - %p:%p)" , (void *)ptr, |
1289 | (void *)ptr->use); |
1290 | print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); |
1291 | fprintf (stream: f, format: "\n" ); |
1292 | return true; |
1293 | } |
1294 | |
1295 | |
1296 | /* Dump all the immediate uses to FILE. */ |
1297 | |
1298 | void |
1299 | dump_immediate_uses_for (FILE *file, tree var) |
1300 | { |
1301 | imm_use_iterator iter; |
1302 | use_operand_p use_p; |
1303 | |
1304 | gcc_assert (var && TREE_CODE (var) == SSA_NAME); |
1305 | |
1306 | print_generic_expr (file, var, TDF_SLIM); |
1307 | fprintf (stream: file, format: " : -->" ); |
1308 | if (has_zero_uses (var)) |
1309 | fprintf (stream: file, format: " no uses.\n" ); |
1310 | else |
1311 | if (has_single_use (var)) |
1312 | fprintf (stream: file, format: " single use.\n" ); |
1313 | else |
1314 | fprintf (stream: file, format: "%d uses.\n" , num_imm_uses (var)); |
1315 | |
1316 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) |
1317 | { |
1318 | if (use_p->loc.stmt == NULL && use_p->use == NULL) |
1319 | fprintf (stream: file, format: "***end of stmt iterator marker***\n" ); |
1320 | else |
1321 | if (!is_gimple_reg (USE_FROM_PTR (use_p))) |
1322 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); |
1323 | else |
1324 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); |
1325 | } |
1326 | fprintf (stream: file, format: "\n" ); |
1327 | } |
1328 | |
1329 | |
1330 | /* Dump all the immediate uses to FILE. */ |
1331 | |
1332 | void |
1333 | dump_immediate_uses (FILE *file) |
1334 | { |
1335 | tree var; |
1336 | unsigned int x; |
1337 | |
1338 | fprintf (stream: file, format: "Immediate_uses: \n\n" ); |
1339 | FOR_EACH_SSA_NAME (x, var, cfun) |
1340 | { |
1341 | dump_immediate_uses_for (file, var); |
1342 | } |
1343 | } |
1344 | |
1345 | |
1346 | /* Dump def-use edges on stderr. */ |
1347 | |
1348 | DEBUG_FUNCTION void |
1349 | debug_immediate_uses (void) |
1350 | { |
1351 | dump_immediate_uses (stderr); |
1352 | } |
1353 | |
1354 | |
1355 | /* Dump def-use edges on stderr. */ |
1356 | |
1357 | DEBUG_FUNCTION void |
1358 | debug_immediate_uses_for (tree var) |
1359 | { |
1360 | dump_immediate_uses_for (stderr, var); |
1361 | } |
1362 | |
1363 | |
1364 | /* Unlink STMTs virtual definition from the IL by propagating its use. */ |
1365 | |
1366 | void |
1367 | unlink_stmt_vdef (gimple *stmt) |
1368 | { |
1369 | use_operand_p use_p; |
1370 | imm_use_iterator iter; |
1371 | gimple *use_stmt; |
1372 | tree vdef = gimple_vdef (g: stmt); |
1373 | tree vuse = gimple_vuse (g: stmt); |
1374 | |
1375 | if (!vdef |
1376 | || TREE_CODE (vdef) != SSA_NAME) |
1377 | return; |
1378 | |
1379 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) |
1380 | { |
1381 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) |
1382 | SET_USE (use_p, vuse); |
1383 | } |
1384 | |
1385 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)) |
1386 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; |
1387 | } |
1388 | |
1389 | /* Return true if the var whose chain of uses starts at PTR has a |
1390 | single nondebug use. Set USE_P and STMT to that single nondebug |
1391 | use, if so, or to NULL otherwise. */ |
1392 | bool |
1393 | single_imm_use_1 (const ssa_use_operand_t *head, |
1394 | use_operand_p *use_p, gimple **stmt) |
1395 | { |
1396 | ssa_use_operand_t *ptr, *single_use = 0; |
1397 | |
1398 | for (ptr = head->next; ptr != head; ptr = ptr->next) |
1399 | if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr))) |
1400 | { |
1401 | if (single_use) |
1402 | { |
1403 | single_use = NULL; |
1404 | break; |
1405 | } |
1406 | single_use = ptr; |
1407 | } |
1408 | |
1409 | if (use_p) |
1410 | *use_p = single_use; |
1411 | |
1412 | if (stmt) |
1413 | *stmt = single_use ? single_use->loc.stmt : NULL; |
1414 | |
1415 | return single_use; |
1416 | } |
1417 | |
1418 | |