1 | /* Tree based points-to analysis |
2 | Copyright (C) 2005-2025 Free Software Foundation, Inc. |
3 | Contributed by Daniel Berlin <dberlin@dberlin.org> |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify |
8 | under the terms of the GNU General Public License as published by |
9 | the Free Software Foundation; either version 3 of the License, or |
10 | (at your option) any later version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 | GNU General Public License for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "backend.h" |
25 | #include "rtl.h" |
26 | #include "tree.h" |
27 | #include "gimple.h" |
28 | #include "alloc-pool.h" |
29 | #include "tree-pass.h" |
30 | #include "ssa.h" |
31 | #include "cgraph.h" |
32 | #include "tree-pretty-print.h" |
33 | #include "diagnostic-core.h" |
34 | #include "fold-const.h" |
35 | #include "stor-layout.h" |
36 | #include "stmt.h" |
37 | #include "gimple-iterator.h" |
38 | #include "tree-into-ssa.h" |
39 | #include "tree-dfa.h" |
40 | #include "gimple-walk.h" |
41 | #include "varasm.h" |
42 | #include "stringpool.h" |
43 | #include "attribs.h" |
44 | #include "tree-ssa.h" |
45 | #include "tree-cfg.h" |
46 | #include "gimple-range.h" |
47 | #include "ipa-modref-tree.h" |
48 | #include "ipa-modref.h" |
49 | #include "attr-fnspec.h" |
50 | |
51 | /* The idea behind this analyzer is to generate set constraints from the |
52 | program, then solve the resulting constraints in order to generate the |
53 | points-to sets. |
54 | |
55 | Set constraints are a way of modeling program analysis problems that |
56 | involve sets. They consist of an inclusion constraint language, |
57 | describing the variables (each variable is a set) and operations that |
58 | are involved on the variables, and a set of rules that derive facts |
59 | from these operations. To solve a system of set constraints, you derive |
60 | all possible facts under the rules, which gives you the correct sets |
61 | as a consequence. |
62 | |
63 | See "Efficient Field-sensitive pointer analysis for C" by "David |
64 | J. Pearce and Paul H. J. Kelly and Chris Hankin", at |
65 | http://citeseer.ist.psu.edu/pearce04efficient.html |
66 | |
67 | Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines |
68 | of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at |
69 | http://citeseer.ist.psu.edu/heintze01ultrafast.html |
70 | |
71 | There are three types of real constraint expressions, DEREF, |
72 | ADDRESSOF, and SCALAR. Each constraint expression consists |
73 | of a constraint type, a variable, and an offset. |
74 | |
75 | SCALAR is a constraint expression type used to represent x, whether |
76 | it appears on the LHS or the RHS of a statement. |
77 | DEREF is a constraint expression type used to represent *x, whether |
78 | it appears on the LHS or the RHS of a statement. |
79 | ADDRESSOF is a constraint expression used to represent &x, whether |
80 | it appears on the LHS or the RHS of a statement. |
81 | |
82 | Each pointer variable in the program is assigned an integer id, and |
83 | each field of a structure variable is assigned an integer id as well. |
84 | |
85 | Structure variables are linked to their list of fields through a "next |
86 | field" in each variable that points to the next field in offset |
87 | order. |
88 | Each variable for a structure field has |
89 | |
90 | 1. "size", that tells the size in bits of that field. |
91 | 2. "fullsize", that tells the size in bits of the entire structure. |
92 | 3. "offset", that tells the offset in bits from the beginning of the |
93 | structure to this field. |
94 | |
95 | Thus, |
96 | struct f |
97 | { |
98 | int a; |
99 | int b; |
100 | } foo; |
101 | int *bar; |
102 | |
103 | looks like |
104 | |
105 | foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b |
106 | foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL |
107 | bar -> id 3, size 32, offset 0, fullsize 32, next NULL |
108 | |
109 | |
110 | In order to solve the system of set constraints, the following is |
111 | done: |
112 | |
113 | 1. Each constraint variable x has a solution set associated with it, |
114 | Sol(x). |
115 | |
116 | 2. Constraints are separated into direct, copy, and complex. |
117 | Direct constraints are ADDRESSOF constraints that require no extra |
118 | processing, such as P = &Q |
119 | Copy constraints are those of the form P = Q. |
120 | Complex constraints are all the constraints involving dereferences |
121 | and offsets (including offsetted copies). |
122 | |
123 | 3. All direct constraints of the form P = &Q are processed, such |
124 | that Q is added to Sol(P) |
125 | |
126 | 4. All complex constraints for a given constraint variable are stored in a |
127 | linked list attached to that variable's node. |
128 | |
129 | 5. A directed graph is built out of the copy constraints. Each |
130 | constraint variable is a node in the graph, and an edge from |
131 | Q to P is added for each copy constraint of the form P = Q |
132 | |
133 | 6. The graph is then walked, and solution sets are |
134 | propagated along the copy edges, such that an edge from Q to P |
135 | causes Sol(P) <- Sol(P) union Sol(Q). |
136 | |
137 | 7. As we visit each node, all complex constraints associated with |
138 | that node are processed by adding appropriate copy edges to the graph, or the |
139 | appropriate variables to the solution set. |
140 | |
141 | 8. The process of walking the graph is iterated until no solution |
142 | sets change. |
143 | |
144 | Prior to walking the graph in steps 6 and 7, We perform static |
145 | cycle elimination on the constraint graph, as well |
146 | as off-line variable substitution. |
147 | |
148 | TODO: Adding offsets to pointer-to-structures can be handled (IE not punted |
149 | on and turned into anything), but isn't. You can just see what offset |
150 | inside the pointed-to struct it's going to access. |
151 | |
152 | TODO: Constant bounded arrays can be handled as if they were structs of the |
153 | same number of elements. |
154 | |
155 | TODO: Modeling heap and incoming pointers becomes much better if we |
156 | add fields to them as we discover them, which we could do. |
157 | |
158 | TODO: We could handle unions, but to be honest, it's probably not |
159 | worth the pain or slowdown. */ |
160 | |
161 | /* IPA-PTA optimizations possible. |
162 | |
163 | When the indirect function called is ANYTHING we can add disambiguation |
164 | based on the function signatures (or simply the parameter count which |
165 | is the varinfo size). We also do not need to consider functions that |
166 | do not have their address taken. |
167 | |
168 | The is_global_var bit which marks escape points is overly conservative |
169 | in IPA mode. Split it to is_escape_point and is_global_var - only |
170 | externally visible globals are escape points in IPA mode. |
171 | There is now is_ipa_escape_point but this is only used in a few |
172 | selected places. |
173 | |
174 | The way we introduce DECL_PT_UID to avoid fixing up all points-to |
175 | sets in the translation unit when we copy a DECL during inlining |
176 | pessimizes precision. The advantage is that the DECL_PT_UID keeps |
177 | compile-time and memory usage overhead low - the points-to sets |
178 | do not grow or get unshared as they would during a fixup phase. |
179 | An alternative solution is to delay IPA PTA until after all |
180 | inlining transformations have been applied. |
181 | |
182 | The way we propagate clobber/use information isn't optimized. |
183 | It should use a new complex constraint that properly filters |
184 | out local variables of the callee (though that would make |
185 | the sets invalid after inlining). OTOH we might as well |
186 | admit defeat to WHOPR and simply do all the clobber/use analysis |
187 | and propagation after PTA finished but before we threw away |
188 | points-to information for memory variables. WHOPR and PTA |
189 | do not play along well anyway - the whole constraint solving |
190 | would need to be done in WPA phase and it will be very interesting |
191 | to apply the results to local SSA names during LTRANS phase. |
192 | |
193 | We probably should compute a per-function unit-ESCAPE solution |
194 | propagating it simply like the clobber / uses solutions. The |
195 | solution can go alongside the non-IPA escaped solution and be |
196 | used to query which vars escape the unit through a function. |
197 | This is also required to make the escaped-HEAP trick work in IPA mode. |
198 | |
199 | We never put function decls in points-to sets so we do not |
200 | keep the set of called functions for indirect calls. |
201 | |
202 | And probably more. */ |
203 | |
204 | static bool use_field_sensitive = true; |
205 | static int in_ipa_mode = 0; |
206 | |
207 | /* Used for predecessor bitmaps. */ |
208 | static bitmap_obstack predbitmap_obstack; |
209 | |
210 | /* Used for points-to sets. */ |
211 | static bitmap_obstack pta_obstack; |
212 | |
213 | /* Used for oldsolution members of variables. */ |
214 | static bitmap_obstack oldpta_obstack; |
215 | |
216 | /* Used for per-solver-iteration bitmaps. */ |
217 | static bitmap_obstack iteration_obstack; |
218 | |
219 | static unsigned int create_variable_info_for (tree, const char *, bool); |
220 | typedef struct constraint_graph *constraint_graph_t; |
221 | static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool); |
222 | |
223 | struct constraint; |
224 | typedef struct constraint *constraint_t; |
225 | |
226 | |
227 | #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \ |
228 | if (a) \ |
229 | EXECUTE_IF_SET_IN_BITMAP (a, b, c, d) |
230 | |
231 | static struct constraint_stats |
232 | { |
233 | unsigned int total_vars; |
234 | unsigned int nonpointer_vars; |
235 | unsigned int unified_vars_static; |
236 | unsigned int unified_vars_dynamic; |
237 | unsigned int iterations; |
238 | unsigned int num_edges; |
239 | unsigned int num_implicit_edges; |
240 | unsigned int num_avoided_edges; |
241 | unsigned int points_to_sets_created; |
242 | } stats; |
243 | |
244 | struct variable_info |
245 | { |
246 | /* ID of this variable */ |
247 | unsigned int id; |
248 | |
249 | /* True if this is a variable created by the constraint analysis, such as |
250 | heap variables and constraints we had to break up. */ |
251 | unsigned int is_artificial_var : 1; |
252 | |
253 | /* True if this is a special variable whose solution set should not be |
254 | changed. */ |
255 | unsigned int is_special_var : 1; |
256 | |
257 | /* True for variables whose size is not known or variable. */ |
258 | unsigned int is_unknown_size_var : 1; |
259 | |
260 | /* True for (sub-)fields that represent a whole variable. */ |
261 | unsigned int is_full_var : 1; |
262 | |
263 | /* True if this is a heap variable. */ |
264 | unsigned int is_heap_var : 1; |
265 | |
266 | /* True if this is a register variable. */ |
267 | unsigned int is_reg_var : 1; |
268 | |
269 | /* True if this field may contain pointers. */ |
270 | unsigned int may_have_pointers : 1; |
271 | |
272 | /* True if this field has only restrict qualified pointers. */ |
273 | unsigned int only_restrict_pointers : 1; |
274 | |
275 | /* True if this represents a heap var created for a restrict qualified |
276 | pointer. */ |
277 | unsigned int is_restrict_var : 1; |
278 | |
279 | /* True if this represents a global variable. */ |
280 | unsigned int is_global_var : 1; |
281 | |
282 | /* True if this represents a module escape point for IPA analysis. */ |
283 | unsigned int is_ipa_escape_point : 1; |
284 | |
285 | /* True if this represents a IPA function info. */ |
286 | unsigned int is_fn_info : 1; |
287 | |
288 | /* True if this appears as RHS in a ADDRESSOF constraint. */ |
289 | unsigned int address_taken : 1; |
290 | |
291 | /* ??? Store somewhere better. */ |
292 | unsigned short ruid; |
293 | |
294 | /* The ID of the variable for the next field in this structure |
295 | or zero for the last field in this structure. */ |
296 | unsigned next; |
297 | |
298 | /* The ID of the variable for the first field in this structure. */ |
299 | unsigned head; |
300 | |
301 | /* Offset of this variable, in bits, from the base variable */ |
302 | unsigned HOST_WIDE_INT offset; |
303 | |
304 | /* Size of the variable, in bits. */ |
305 | unsigned HOST_WIDE_INT size; |
306 | |
307 | /* Full size of the base variable, in bits. */ |
308 | unsigned HOST_WIDE_INT fullsize; |
309 | |
310 | /* In IPA mode the shadow UID in case the variable needs to be duplicated in |
311 | the final points-to solution because it reaches its containing |
312 | function recursively. Zero if none is needed. */ |
313 | unsigned int shadow_var_uid; |
314 | |
315 | /* Name of this variable */ |
316 | const char *name; |
317 | |
318 | /* Tree that this variable is associated with. */ |
319 | tree decl; |
320 | |
321 | /* Points-to set for this variable. */ |
322 | bitmap solution; |
323 | |
324 | /* Old points-to set for this variable. */ |
325 | bitmap oldsolution; |
326 | }; |
327 | typedef struct variable_info *varinfo_t; |
328 | |
329 | static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT); |
330 | static varinfo_t first_or_preceding_vi_for_offset (varinfo_t, |
331 | unsigned HOST_WIDE_INT); |
332 | static varinfo_t lookup_vi_for_tree (tree); |
333 | static inline bool type_can_have_subvars (const_tree); |
334 | static void make_param_constraints (varinfo_t); |
335 | |
336 | /* Pool of variable info structures. */ |
337 | static object_allocator<variable_info> variable_info_pool |
338 | ("Variable info pool" ); |
339 | |
340 | /* Map varinfo to final pt_solution. */ |
341 | static hash_map<varinfo_t, pt_solution *> *final_solutions; |
342 | struct obstack final_solutions_obstack; |
343 | |
344 | /* Table of variable info structures for constraint variables. |
345 | Indexed directly by variable info id. */ |
346 | static vec<varinfo_t> varmap; |
347 | |
348 | /* Return the varmap element N */ |
349 | |
350 | static inline varinfo_t |
351 | get_varinfo (unsigned int n) |
352 | { |
353 | return varmap[n]; |
354 | } |
355 | |
356 | /* Return the next variable in the list of sub-variables of VI |
357 | or NULL if VI is the last sub-variable. */ |
358 | |
359 | static inline varinfo_t |
360 | vi_next (varinfo_t vi) |
361 | { |
362 | return get_varinfo (n: vi->next); |
363 | } |
364 | |
365 | /* Static IDs for the special variables. Variable ID zero is unused |
366 | and used as terminator for the sub-variable chain. */ |
367 | enum { nothing_id = 1, anything_id = 2, string_id = 3, |
368 | escaped_id = 4, nonlocal_id = 5, escaped_return_id = 6, |
369 | storedanything_id = 7, integer_id = 8 }; |
370 | |
371 | /* Return a new variable info structure consisting for a variable |
372 | named NAME, and using constraint graph node NODE. Append it |
373 | to the vector of variable info structures. */ |
374 | |
375 | static varinfo_t |
376 | new_var_info (tree t, const char *name, bool add_id) |
377 | { |
378 | unsigned index = varmap.length (); |
379 | varinfo_t ret = variable_info_pool.allocate (); |
380 | |
381 | if (dump_file && add_id) |
382 | { |
383 | char *tempname = xasprintf ("%s(%d)" , name, index); |
384 | name = ggc_strdup (tempname); |
385 | free (ptr: tempname); |
386 | } |
387 | |
388 | ret->id = index; |
389 | ret->name = name; |
390 | ret->decl = t; |
391 | /* Vars without decl are artificial and do not have sub-variables. */ |
392 | ret->is_artificial_var = (t == NULL_TREE); |
393 | ret->is_special_var = false; |
394 | ret->is_unknown_size_var = false; |
395 | ret->is_full_var = (t == NULL_TREE); |
396 | ret->is_heap_var = false; |
397 | ret->may_have_pointers = true; |
398 | ret->only_restrict_pointers = false; |
399 | ret->is_restrict_var = false; |
400 | ret->ruid = 0; |
401 | ret->is_global_var = (t == NULL_TREE); |
402 | ret->is_ipa_escape_point = false; |
403 | ret->is_fn_info = false; |
404 | ret->address_taken = false; |
405 | if (t && DECL_P (t)) |
406 | ret->is_global_var = (is_global_var (t) |
407 | /* We have to treat even local register variables |
408 | as escape points. */ |
409 | || (VAR_P (t) && DECL_HARD_REGISTER (t))); |
410 | ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME); |
411 | ret->solution = BITMAP_ALLOC (obstack: &pta_obstack); |
412 | ret->oldsolution = NULL; |
413 | ret->next = 0; |
414 | ret->shadow_var_uid = 0; |
415 | ret->head = ret->id; |
416 | |
417 | stats.total_vars++; |
418 | |
419 | varmap.safe_push (obj: ret); |
420 | |
421 | return ret; |
422 | } |
423 | |
424 | /* A map mapping call statements to per-stmt variables for uses |
425 | and clobbers specific to the call. */ |
426 | static hash_map<gimple *, varinfo_t> *call_stmt_vars; |
427 | |
428 | /* Lookup or create the variable for the call statement CALL. */ |
429 | |
430 | static varinfo_t |
431 | get_call_vi (gcall *call) |
432 | { |
433 | varinfo_t vi, vi2; |
434 | |
435 | bool existed; |
436 | varinfo_t *slot_p = &call_stmt_vars->get_or_insert (k: call, existed: &existed); |
437 | if (existed) |
438 | return *slot_p; |
439 | |
440 | vi = new_var_info (NULL_TREE, name: "CALLUSED" , add_id: true); |
441 | vi->offset = 0; |
442 | vi->size = 1; |
443 | vi->fullsize = 2; |
444 | vi->is_full_var = true; |
445 | vi->is_reg_var = true; |
446 | |
447 | vi2 = new_var_info (NULL_TREE, name: "CALLCLOBBERED" , add_id: true); |
448 | vi2->offset = 1; |
449 | vi2->size = 1; |
450 | vi2->fullsize = 2; |
451 | vi2->is_full_var = true; |
452 | vi2->is_reg_var = true; |
453 | |
454 | vi->next = vi2->id; |
455 | |
456 | *slot_p = vi; |
457 | return vi; |
458 | } |
459 | |
460 | /* Lookup the variable for the call statement CALL representing |
461 | the uses. Returns NULL if there is nothing special about this call. */ |
462 | |
463 | static varinfo_t |
464 | lookup_call_use_vi (gcall *call) |
465 | { |
466 | varinfo_t *slot_p = call_stmt_vars->get (k: call); |
467 | if (slot_p) |
468 | return *slot_p; |
469 | |
470 | return NULL; |
471 | } |
472 | |
473 | /* Lookup the variable for the call statement CALL representing |
474 | the clobbers. Returns NULL if there is nothing special about this call. */ |
475 | |
476 | static varinfo_t |
477 | lookup_call_clobber_vi (gcall *call) |
478 | { |
479 | varinfo_t uses = lookup_call_use_vi (call); |
480 | if (!uses) |
481 | return NULL; |
482 | |
483 | return vi_next (vi: uses); |
484 | } |
485 | |
486 | /* Lookup or create the variable for the call statement CALL representing |
487 | the uses. */ |
488 | |
489 | static varinfo_t |
490 | get_call_use_vi (gcall *call) |
491 | { |
492 | return get_call_vi (call); |
493 | } |
494 | |
495 | /* Lookup or create the variable for the call statement CALL representing |
496 | the clobbers. */ |
497 | |
498 | static varinfo_t ATTRIBUTE_UNUSED |
499 | get_call_clobber_vi (gcall *call) |
500 | { |
501 | return vi_next (vi: get_call_vi (call)); |
502 | } |
503 | |
504 | |
505 | enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF}; |
506 | |
507 | /* An expression that appears in a constraint. */ |
508 | |
509 | struct constraint_expr |
510 | { |
511 | /* Constraint type. */ |
512 | constraint_expr_type type; |
513 | |
514 | /* Variable we are referring to in the constraint. */ |
515 | unsigned int var; |
516 | |
517 | /* Offset, in bits, of this constraint from the beginning of |
518 | variables it ends up referring to. |
519 | |
520 | IOW, in a deref constraint, we would deref, get the result set, |
521 | then add OFFSET to each member. */ |
522 | HOST_WIDE_INT offset; |
523 | }; |
524 | |
525 | /* Use 0x8000... as special unknown offset. */ |
526 | #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN |
527 | |
528 | typedef struct constraint_expr ce_s; |
529 | static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool); |
530 | static void get_constraint_for (tree, vec<ce_s> *); |
531 | static void get_constraint_for_rhs (tree, vec<ce_s> *); |
532 | static void do_deref (vec<ce_s> *); |
533 | |
534 | /* Our set constraints are made up of two constraint expressions, one |
535 | LHS, and one RHS. |
536 | |
537 | As described in the introduction, our set constraints each represent an |
538 | operation between set valued variables. |
539 | */ |
540 | struct constraint |
541 | { |
542 | struct constraint_expr lhs; |
543 | struct constraint_expr rhs; |
544 | }; |
545 | |
546 | /* List of constraints that we use to build the constraint graph from. */ |
547 | |
548 | static vec<constraint_t> constraints; |
549 | static object_allocator<constraint> constraint_pool ("Constraint pool" ); |
550 | |
551 | /* The constraint graph is represented as an array of bitmaps |
552 | containing successor nodes. */ |
553 | |
554 | struct constraint_graph |
555 | { |
556 | /* Size of this graph, which may be different than the number of |
557 | nodes in the variable map. */ |
558 | unsigned int size; |
559 | |
560 | /* Explicit successors of each node. */ |
561 | bitmap *succs; |
562 | |
563 | /* Implicit predecessors of each node (Used for variable |
564 | substitution). */ |
565 | bitmap *implicit_preds; |
566 | |
567 | /* Explicit predecessors of each node (Used for variable substitution). */ |
568 | bitmap *preds; |
569 | |
570 | /* Indirect cycle representatives, or -1 if the node has no indirect |
571 | cycles. */ |
572 | int *indirect_cycles; |
573 | |
574 | /* Representative node for a node. rep[a] == a unless the node has |
575 | been unified. */ |
576 | unsigned int *rep; |
577 | |
578 | /* Equivalence class representative for a label. This is used for |
579 | variable substitution. */ |
580 | int *eq_rep; |
581 | |
582 | /* Pointer equivalence label for a node. All nodes with the same |
583 | pointer equivalence label can be unified together at some point |
584 | (either during constraint optimization or after the constraint |
585 | graph is built). */ |
586 | unsigned int *pe; |
587 | |
588 | /* Pointer equivalence representative for a label. This is used to |
589 | handle nodes that are pointer equivalent but not location |
590 | equivalent. We can unite these once the addressof constraints |
591 | are transformed into initial points-to sets. */ |
592 | int *pe_rep; |
593 | |
594 | /* Pointer equivalence label for each node, used during variable |
595 | substitution. */ |
596 | unsigned int *pointer_label; |
597 | |
598 | /* Location equivalence label for each node, used during location |
599 | equivalence finding. */ |
600 | unsigned int *loc_label; |
601 | |
602 | /* Pointed-by set for each node, used during location equivalence |
603 | finding. This is pointed-by rather than pointed-to, because it |
604 | is constructed using the predecessor graph. */ |
605 | bitmap *pointed_by; |
606 | |
607 | /* Points to sets for pointer equivalence. This is *not* the actual |
608 | points-to sets for nodes. */ |
609 | bitmap *points_to; |
610 | |
611 | /* Bitmap of nodes where the bit is set if the node is a direct |
612 | node. Used for variable substitution. */ |
613 | sbitmap direct_nodes; |
614 | |
615 | /* Bitmap of nodes where the bit is set if the node is address |
616 | taken. Used for variable substitution. */ |
617 | bitmap address_taken; |
618 | |
619 | /* Vector of complex constraints for each graph node. Complex |
620 | constraints are those involving dereferences or offsets that are |
621 | not 0. */ |
622 | vec<constraint_t> *complex; |
623 | }; |
624 | |
625 | static constraint_graph_t graph; |
626 | |
627 | /* During variable substitution and the offline version of indirect |
628 | cycle finding, we create nodes to represent dereferences and |
629 | address taken constraints. These represent where these start and |
630 | end. */ |
631 | #define FIRST_REF_NODE (varmap).length () |
632 | #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1)) |
633 | |
634 | /* Return the representative node for NODE, if NODE has been unioned |
635 | with another NODE. |
636 | This function performs path compression along the way to finding |
637 | the representative. */ |
638 | |
639 | static unsigned int |
640 | find (unsigned int node) |
641 | { |
642 | gcc_checking_assert (node < graph->size); |
643 | if (graph->rep[node] != node) |
644 | return graph->rep[node] = find (node: graph->rep[node]); |
645 | return node; |
646 | } |
647 | |
648 | /* Union the TO and FROM nodes to the TO nodes. |
649 | Note that at some point in the future, we may want to do |
650 | union-by-rank, in which case we are going to have to return the |
651 | node we unified to. */ |
652 | |
653 | static bool |
654 | unite (unsigned int to, unsigned int from) |
655 | { |
656 | gcc_checking_assert (to < graph->size && from < graph->size); |
657 | if (to != from && graph->rep[from] != to) |
658 | { |
659 | graph->rep[from] = to; |
660 | return true; |
661 | } |
662 | return false; |
663 | } |
664 | |
665 | /* Create a new constraint consisting of LHS and RHS expressions. */ |
666 | |
667 | static constraint_t |
668 | new_constraint (const struct constraint_expr lhs, |
669 | const struct constraint_expr rhs) |
670 | { |
671 | constraint_t ret = constraint_pool.allocate (); |
672 | ret->lhs = lhs; |
673 | ret->rhs = rhs; |
674 | return ret; |
675 | } |
676 | |
677 | /* Print out constraint C to FILE. */ |
678 | |
679 | static void |
680 | dump_constraint (FILE *file, constraint_t c) |
681 | { |
682 | if (c->lhs.type == ADDRESSOF) |
683 | fprintf (stream: file, format: "&" ); |
684 | else if (c->lhs.type == DEREF) |
685 | fprintf (stream: file, format: "*" ); |
686 | if (dump_file) |
687 | fprintf (stream: file, format: "%s" , get_varinfo (n: c->lhs.var)->name); |
688 | else |
689 | fprintf (stream: file, format: "V%d" , c->lhs.var); |
690 | if (c->lhs.offset == UNKNOWN_OFFSET) |
691 | fprintf (stream: file, format: " + UNKNOWN" ); |
692 | else if (c->lhs.offset != 0) |
693 | fprintf (stream: file, format: " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset); |
694 | fprintf (stream: file, format: " = " ); |
695 | if (c->rhs.type == ADDRESSOF) |
696 | fprintf (stream: file, format: "&" ); |
697 | else if (c->rhs.type == DEREF) |
698 | fprintf (stream: file, format: "*" ); |
699 | if (dump_file) |
700 | fprintf (stream: file, format: "%s" , get_varinfo (n: c->rhs.var)->name); |
701 | else |
702 | fprintf (stream: file, format: "V%d" , c->rhs.var); |
703 | if (c->rhs.offset == UNKNOWN_OFFSET) |
704 | fprintf (stream: file, format: " + UNKNOWN" ); |
705 | else if (c->rhs.offset != 0) |
706 | fprintf (stream: file, format: " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset); |
707 | } |
708 | |
709 | |
710 | void debug_constraint (constraint_t); |
711 | void debug_constraints (void); |
712 | void debug_constraint_graph (void); |
713 | void debug_solution_for_var (unsigned int); |
714 | void debug_sa_points_to_info (void); |
715 | void debug_varinfo (varinfo_t); |
716 | void debug_varmap (void); |
717 | |
718 | /* Print out constraint C to stderr. */ |
719 | |
720 | DEBUG_FUNCTION void |
721 | debug_constraint (constraint_t c) |
722 | { |
723 | dump_constraint (stderr, c); |
724 | fprintf (stderr, format: "\n" ); |
725 | } |
726 | |
727 | /* Print out all constraints to FILE */ |
728 | |
729 | static void |
730 | dump_constraints (FILE *file, int from) |
731 | { |
732 | int i; |
733 | constraint_t c; |
734 | for (i = from; constraints.iterate (ix: i, ptr: &c); i++) |
735 | if (c) |
736 | { |
737 | dump_constraint (file, c); |
738 | fprintf (stream: file, format: "\n" ); |
739 | } |
740 | } |
741 | |
742 | /* Print out all constraints to stderr. */ |
743 | |
744 | DEBUG_FUNCTION void |
745 | debug_constraints (void) |
746 | { |
747 | dump_constraints (stderr, from: 0); |
748 | } |
749 | |
750 | /* Print the constraint graph in dot format. */ |
751 | |
752 | static void |
753 | dump_constraint_graph (FILE *file) |
754 | { |
755 | unsigned int i; |
756 | |
757 | /* Only print the graph if it has already been initialized: */ |
758 | if (!graph) |
759 | return; |
760 | |
761 | /* Prints the header of the dot file: */ |
762 | fprintf (stream: file, format: "strict digraph {\n" ); |
763 | fprintf (stream: file, format: " node [\n shape = box\n ]\n" ); |
764 | fprintf (stream: file, format: " edge [\n fontsize = \"12\"\n ]\n" ); |
765 | fprintf (stream: file, format: "\n // List of nodes and complex constraints in " |
766 | "the constraint graph:\n" ); |
767 | |
768 | /* The next lines print the nodes in the graph together with the |
769 | complex constraints attached to them. */ |
770 | for (i = 1; i < graph->size; i++) |
771 | { |
772 | if (i == FIRST_REF_NODE) |
773 | continue; |
774 | if (find (node: i) != i) |
775 | continue; |
776 | if (i < FIRST_REF_NODE) |
777 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
778 | else |
779 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
780 | if (graph->complex[i].exists ()) |
781 | { |
782 | unsigned j; |
783 | constraint_t c; |
784 | fprintf (stream: file, format: " [label=\"\\N\\n" ); |
785 | for (j = 0; graph->complex[i].iterate (ix: j, ptr: &c); ++j) |
786 | { |
787 | dump_constraint (file, c); |
788 | fprintf (stream: file, format: "\\l" ); |
789 | } |
790 | fprintf (stream: file, format: "\"]" ); |
791 | } |
792 | fprintf (stream: file, format: ";\n" ); |
793 | } |
794 | |
795 | /* Go over the edges. */ |
796 | fprintf (stream: file, format: "\n // Edges in the constraint graph:\n" ); |
797 | for (i = 1; i < graph->size; i++) |
798 | { |
799 | unsigned j; |
800 | bitmap_iterator bi; |
801 | if (find (node: i) != i) |
802 | continue; |
803 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi) |
804 | { |
805 | unsigned to = find (node: j); |
806 | if (i == to) |
807 | continue; |
808 | if (i < FIRST_REF_NODE) |
809 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
810 | else |
811 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
812 | fprintf (stream: file, format: " -> " ); |
813 | if (to < FIRST_REF_NODE) |
814 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: to)->name); |
815 | else |
816 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: to - FIRST_REF_NODE)->name); |
817 | fprintf (stream: file, format: ";\n" ); |
818 | } |
819 | } |
820 | |
821 | /* Prints the tail of the dot file. */ |
822 | fprintf (stream: file, format: "}\n" ); |
823 | } |
824 | |
825 | /* Print out the constraint graph to stderr. */ |
826 | |
827 | DEBUG_FUNCTION void |
828 | debug_constraint_graph (void) |
829 | { |
830 | dump_constraint_graph (stderr); |
831 | } |
832 | |
833 | /* SOLVER FUNCTIONS |
834 | |
835 | The solver is a simple worklist solver, that works on the following |
836 | algorithm: |
837 | |
838 | sbitmap changed_nodes = all zeroes; |
839 | changed_count = 0; |
840 | For each node that is not already collapsed: |
841 | changed_count++; |
842 | set bit in changed nodes |
843 | |
844 | while (changed_count > 0) |
845 | { |
846 | compute topological ordering for constraint graph |
847 | |
848 | find and collapse cycles in the constraint graph (updating |
849 | changed if necessary) |
850 | |
851 | for each node (n) in the graph in topological order: |
852 | changed_count--; |
853 | |
854 | Process each complex constraint associated with the node, |
855 | updating changed if necessary. |
856 | |
857 | For each outgoing edge from n, propagate the solution from n to |
858 | the destination of the edge, updating changed as necessary. |
859 | |
860 | } */ |
861 | |
862 | /* Return true if two constraint expressions A and B are equal. */ |
863 | |
864 | static bool |
865 | constraint_expr_equal (struct constraint_expr a, struct constraint_expr b) |
866 | { |
867 | return a.type == b.type && a.var == b.var && a.offset == b.offset; |
868 | } |
869 | |
870 | /* Return true if constraint expression A is less than constraint expression |
871 | B. This is just arbitrary, but consistent, in order to give them an |
872 | ordering. */ |
873 | |
874 | static bool |
875 | constraint_expr_less (struct constraint_expr a, struct constraint_expr b) |
876 | { |
877 | if (a.type == b.type) |
878 | { |
879 | if (a.var == b.var) |
880 | return a.offset < b.offset; |
881 | else |
882 | return a.var < b.var; |
883 | } |
884 | else |
885 | return a.type < b.type; |
886 | } |
887 | |
888 | /* Return true if constraint A is less than constraint B. This is just |
889 | arbitrary, but consistent, in order to give them an ordering. */ |
890 | |
891 | static bool |
892 | constraint_less (const constraint_t &a, const constraint_t &b) |
893 | { |
894 | if (constraint_expr_less (a: a->lhs, b: b->lhs)) |
895 | return true; |
896 | else if (constraint_expr_less (a: b->lhs, b: a->lhs)) |
897 | return false; |
898 | else |
899 | return constraint_expr_less (a: a->rhs, b: b->rhs); |
900 | } |
901 | |
902 | /* Return true if two constraints A and B are equal. */ |
903 | |
904 | static bool |
905 | constraint_equal (const constraint &a, const constraint &b) |
906 | { |
907 | return constraint_expr_equal (a: a.lhs, b: b.lhs) |
908 | && constraint_expr_equal (a: a.rhs, b: b.rhs); |
909 | } |
910 | |
911 | |
912 | /* Find a constraint LOOKFOR in the sorted constraint vector VEC */ |
913 | |
914 | static constraint_t |
915 | constraint_vec_find (vec<constraint_t> vec, |
916 | constraint &lookfor) |
917 | { |
918 | unsigned int place; |
919 | constraint_t found; |
920 | |
921 | if (!vec.exists ()) |
922 | return NULL; |
923 | |
924 | place = vec.lower_bound (obj: &lookfor, lessthan: constraint_less); |
925 | if (place >= vec.length ()) |
926 | return NULL; |
927 | found = vec[place]; |
928 | if (!constraint_equal (a: *found, b: lookfor)) |
929 | return NULL; |
930 | return found; |
931 | } |
932 | |
933 | /* Union two constraint vectors, TO and FROM. Put the result in TO. |
934 | Returns true of TO set is changed. */ |
935 | |
936 | static bool |
937 | constraint_set_union (vec<constraint_t> *to, |
938 | vec<constraint_t> *from) |
939 | { |
940 | int i; |
941 | constraint_t c; |
942 | bool any_change = false; |
943 | |
944 | FOR_EACH_VEC_ELT (*from, i, c) |
945 | { |
946 | if (constraint_vec_find (vec: *to, lookfor&: *c) == NULL) |
947 | { |
948 | unsigned int place = to->lower_bound (obj: c, lessthan: constraint_less); |
949 | to->safe_insert (ix: place, obj: c); |
950 | any_change = true; |
951 | } |
952 | } |
953 | return any_change; |
954 | } |
955 | |
956 | /* Expands the solution in SET to all sub-fields of variables included. */ |
957 | |
958 | static bitmap |
959 | solution_set_expand (bitmap set, bitmap *expanded) |
960 | { |
961 | bitmap_iterator bi; |
962 | unsigned j; |
963 | |
964 | if (*expanded) |
965 | return *expanded; |
966 | |
967 | *expanded = BITMAP_ALLOC (obstack: &iteration_obstack); |
968 | |
969 | /* In a first pass expand variables, once for each head to avoid |
970 | quadratic behavior, to include all sub-fields. */ |
971 | unsigned prev_head = 0; |
972 | EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi) |
973 | { |
974 | varinfo_t v = get_varinfo (n: j); |
975 | if (v->is_artificial_var |
976 | || v->is_full_var) |
977 | continue; |
978 | if (v->head != prev_head) |
979 | { |
980 | varinfo_t head = get_varinfo (n: v->head); |
981 | unsigned num = 1; |
982 | for (varinfo_t n = vi_next (vi: head); n != NULL; n = vi_next (vi: n)) |
983 | { |
984 | if (n->id != head->id + num) |
985 | { |
986 | /* Usually sub variables are adjacent but since we |
987 | create pointed-to restrict representatives there |
988 | can be gaps as well. */ |
989 | bitmap_set_range (*expanded, head->id, num); |
990 | head = n; |
991 | num = 1; |
992 | } |
993 | else |
994 | num++; |
995 | } |
996 | |
997 | bitmap_set_range (*expanded, head->id, num); |
998 | prev_head = v->head; |
999 | } |
1000 | } |
1001 | |
1002 | /* And finally set the rest of the bits from SET in an efficient way. */ |
1003 | bitmap_ior_into (*expanded, set); |
1004 | |
1005 | return *expanded; |
1006 | } |
1007 | |
1008 | /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the |
1009 | process. */ |
1010 | |
1011 | static bool |
1012 | set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc, |
1013 | bitmap *expanded_delta) |
1014 | { |
1015 | bool changed = false; |
1016 | bitmap_iterator bi; |
1017 | unsigned int i; |
1018 | |
1019 | /* If the solution of DELTA contains anything it is good enough to transfer |
1020 | this to TO. */ |
1021 | if (bitmap_bit_p (delta, anything_id)) |
1022 | return bitmap_set_bit (to, anything_id); |
1023 | |
1024 | /* If the offset is unknown we have to expand the solution to |
1025 | all subfields. */ |
1026 | if (inc == UNKNOWN_OFFSET) |
1027 | { |
1028 | delta = solution_set_expand (set: delta, expanded: expanded_delta); |
1029 | changed |= bitmap_ior_into (to, delta); |
1030 | return changed; |
1031 | } |
1032 | |
1033 | /* For non-zero offset union the offsetted solution into the destination. */ |
1034 | EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi) |
1035 | { |
1036 | varinfo_t vi = get_varinfo (n: i); |
1037 | |
1038 | /* If this is a variable with just one field just set its bit |
1039 | in the result. */ |
1040 | if (vi->is_artificial_var |
1041 | || vi->is_unknown_size_var |
1042 | || vi->is_full_var) |
1043 | changed |= bitmap_set_bit (to, i); |
1044 | else |
1045 | { |
1046 | HOST_WIDE_INT fieldoffset = vi->offset + inc; |
1047 | unsigned HOST_WIDE_INT size = vi->size; |
1048 | |
1049 | /* If the offset makes the pointer point to before the |
1050 | variable use offset zero for the field lookup. */ |
1051 | if (fieldoffset < 0) |
1052 | vi = get_varinfo (n: vi->head); |
1053 | else |
1054 | vi = first_or_preceding_vi_for_offset (vi, fieldoffset); |
1055 | |
1056 | do |
1057 | { |
1058 | changed |= bitmap_set_bit (to, vi->id); |
1059 | if (vi->is_full_var |
1060 | || vi->next == 0) |
1061 | break; |
1062 | |
1063 | /* We have to include all fields that overlap the current field |
1064 | shifted by inc. */ |
1065 | vi = vi_next (vi); |
1066 | } |
1067 | while (vi->offset < fieldoffset + size); |
1068 | } |
1069 | } |
1070 | |
1071 | return changed; |
1072 | } |
1073 | |
1074 | /* Insert constraint C into the list of complex constraints for graph |
1075 | node VAR. */ |
1076 | |
1077 | static void |
1078 | insert_into_complex (constraint_graph_t graph, |
1079 | unsigned int var, constraint_t c) |
1080 | { |
1081 | vec<constraint_t> complex = graph->complex[var]; |
1082 | unsigned int place = complex.lower_bound (obj: c, lessthan: constraint_less); |
1083 | |
1084 | /* Only insert constraints that do not already exist. */ |
1085 | if (place >= complex.length () |
1086 | || !constraint_equal (a: *c, b: *complex[place])) |
1087 | graph->complex[var].safe_insert (ix: place, obj: c); |
1088 | } |
1089 | |
1090 | |
1091 | /* Condense two variable nodes into a single variable node, by moving |
1092 | all associated info from FROM to TO. Returns true if TO node's |
1093 | constraint set changes after the merge. */ |
1094 | |
1095 | static bool |
1096 | merge_node_constraints (constraint_graph_t graph, unsigned int to, |
1097 | unsigned int from) |
1098 | { |
1099 | unsigned int i; |
1100 | constraint_t c; |
1101 | bool any_change = false; |
1102 | |
1103 | gcc_checking_assert (find (from) == to); |
1104 | |
1105 | /* Move all complex constraints from src node into to node */ |
1106 | FOR_EACH_VEC_ELT (graph->complex[from], i, c) |
1107 | { |
1108 | /* In complex constraints for node FROM, we may have either |
1109 | a = *FROM, and *FROM = a, or an offseted constraint which are |
1110 | always added to the rhs node's constraints. */ |
1111 | |
1112 | if (c->rhs.type == DEREF) |
1113 | c->rhs.var = to; |
1114 | else if (c->lhs.type == DEREF) |
1115 | c->lhs.var = to; |
1116 | else |
1117 | c->rhs.var = to; |
1118 | |
1119 | } |
1120 | any_change = constraint_set_union (to: &graph->complex[to], |
1121 | from: &graph->complex[from]); |
1122 | graph->complex[from].release (); |
1123 | return any_change; |
1124 | } |
1125 | |
1126 | |
1127 | /* Remove edges involving NODE from GRAPH. */ |
1128 | |
1129 | static void |
1130 | clear_edges_for_node (constraint_graph_t graph, unsigned int node) |
1131 | { |
1132 | if (graph->succs[node]) |
1133 | BITMAP_FREE (graph->succs[node]); |
1134 | } |
1135 | |
1136 | /* Merge GRAPH nodes FROM and TO into node TO. */ |
1137 | |
1138 | static void |
1139 | merge_graph_nodes (constraint_graph_t graph, unsigned int to, |
1140 | unsigned int from) |
1141 | { |
1142 | if (graph->indirect_cycles[from] != -1) |
1143 | { |
1144 | /* If we have indirect cycles with the from node, and we have |
1145 | none on the to node, the to node has indirect cycles from the |
1146 | from node now that they are unified. |
1147 | If indirect cycles exist on both, unify the nodes that they |
1148 | are in a cycle with, since we know they are in a cycle with |
1149 | each other. */ |
1150 | if (graph->indirect_cycles[to] == -1) |
1151 | graph->indirect_cycles[to] = graph->indirect_cycles[from]; |
1152 | } |
1153 | |
1154 | /* Merge all the successor edges. */ |
1155 | if (graph->succs[from]) |
1156 | { |
1157 | if (!graph->succs[to]) |
1158 | graph->succs[to] = BITMAP_ALLOC (obstack: &pta_obstack); |
1159 | bitmap_ior_into (graph->succs[to], |
1160 | graph->succs[from]); |
1161 | } |
1162 | |
1163 | clear_edges_for_node (graph, node: from); |
1164 | } |
1165 | |
1166 | |
1167 | /* Add an indirect graph edge to GRAPH, going from TO to FROM if |
1168 | it doesn't exist in the graph already. */ |
1169 | |
1170 | static void |
1171 | add_implicit_graph_edge (constraint_graph_t graph, unsigned int to, |
1172 | unsigned int from) |
1173 | { |
1174 | if (to == from) |
1175 | return; |
1176 | |
1177 | if (!graph->implicit_preds[to]) |
1178 | graph->implicit_preds[to] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1179 | |
1180 | if (bitmap_set_bit (graph->implicit_preds[to], from)) |
1181 | stats.num_implicit_edges++; |
1182 | } |
1183 | |
1184 | /* Add a predecessor graph edge to GRAPH, going from TO to FROM if |
1185 | it doesn't exist in the graph already. |
1186 | Return false if the edge already existed, true otherwise. */ |
1187 | |
1188 | static void |
1189 | add_pred_graph_edge (constraint_graph_t graph, unsigned int to, |
1190 | unsigned int from) |
1191 | { |
1192 | if (!graph->preds[to]) |
1193 | graph->preds[to] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1194 | bitmap_set_bit (graph->preds[to], from); |
1195 | } |
1196 | |
1197 | /* Add a graph edge to GRAPH, going from FROM to TO if |
1198 | it doesn't exist in the graph already. |
1199 | Return false if the edge already existed, true otherwise. */ |
1200 | |
1201 | static bool |
1202 | add_graph_edge (constraint_graph_t graph, unsigned int to, |
1203 | unsigned int from) |
1204 | { |
1205 | if (to == from) |
1206 | { |
1207 | return false; |
1208 | } |
1209 | else |
1210 | { |
1211 | bool r = false; |
1212 | |
1213 | if (!graph->succs[from]) |
1214 | graph->succs[from] = BITMAP_ALLOC (obstack: &pta_obstack); |
1215 | |
1216 | /* The graph solving process does not avoid "triangles", thus |
1217 | there can be multiple paths from a node to another involving |
1218 | intermediate other nodes. That causes extra copying which is |
1219 | most difficult to avoid when the intermediate node is ESCAPED |
1220 | because there are no edges added from ESCAPED. Avoid |
1221 | adding the direct edge FROM -> TO when we have FROM -> ESCAPED |
1222 | and TO contains ESCAPED. |
1223 | ??? Note this is only a heuristic, it does not prevent the |
1224 | situation from occuring. The heuristic helps PR38474 and |
1225 | PR99912 significantly. */ |
1226 | if (to < FIRST_REF_NODE |
1227 | && bitmap_bit_p (graph->succs[from], find (node: escaped_id)) |
1228 | && bitmap_bit_p (get_varinfo (n: find (node: to))->solution, escaped_id)) |
1229 | { |
1230 | stats.num_avoided_edges++; |
1231 | return false; |
1232 | } |
1233 | |
1234 | if (bitmap_set_bit (graph->succs[from], to)) |
1235 | { |
1236 | r = true; |
1237 | if (to < FIRST_REF_NODE && from < FIRST_REF_NODE) |
1238 | stats.num_edges++; |
1239 | } |
1240 | return r; |
1241 | } |
1242 | } |
1243 | |
1244 | |
1245 | /* Initialize the constraint graph structure to contain SIZE nodes. */ |
1246 | |
1247 | static void |
1248 | init_graph (unsigned int size) |
1249 | { |
1250 | unsigned int j; |
1251 | |
1252 | graph = XCNEW (struct constraint_graph); |
1253 | graph->size = size; |
1254 | graph->succs = XCNEWVEC (bitmap, graph->size); |
1255 | graph->indirect_cycles = XNEWVEC (int, graph->size); |
1256 | graph->rep = XNEWVEC (unsigned int, graph->size); |
1257 | /* ??? Macros do not support template types with multiple arguments, |
1258 | so we use a typedef to work around it. */ |
1259 | typedef vec<constraint_t> vec_constraint_t_heap; |
1260 | graph->complex = XCNEWVEC (vec_constraint_t_heap, size); |
1261 | graph->pe = XCNEWVEC (unsigned int, graph->size); |
1262 | graph->pe_rep = XNEWVEC (int, graph->size); |
1263 | |
1264 | for (j = 0; j < graph->size; j++) |
1265 | { |
1266 | graph->rep[j] = j; |
1267 | graph->pe_rep[j] = -1; |
1268 | graph->indirect_cycles[j] = -1; |
1269 | } |
1270 | } |
1271 | |
1272 | /* Build the constraint graph, adding only predecessor edges right now. */ |
1273 | |
1274 | static void |
1275 | build_pred_graph (void) |
1276 | { |
1277 | int i; |
1278 | constraint_t c; |
1279 | unsigned int j; |
1280 | |
1281 | graph->implicit_preds = XCNEWVEC (bitmap, graph->size); |
1282 | graph->preds = XCNEWVEC (bitmap, graph->size); |
1283 | graph->pointer_label = XCNEWVEC (unsigned int, graph->size); |
1284 | graph->loc_label = XCNEWVEC (unsigned int, graph->size); |
1285 | graph->pointed_by = XCNEWVEC (bitmap, graph->size); |
1286 | graph->points_to = XCNEWVEC (bitmap, graph->size); |
1287 | graph->eq_rep = XNEWVEC (int, graph->size); |
1288 | graph->direct_nodes = sbitmap_alloc (graph->size); |
1289 | graph->address_taken = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1290 | bitmap_clear (graph->direct_nodes); |
1291 | |
1292 | for (j = 1; j < FIRST_REF_NODE; j++) |
1293 | { |
1294 | if (!get_varinfo (n: j)->is_special_var) |
1295 | bitmap_set_bit (map: graph->direct_nodes, bitno: j); |
1296 | } |
1297 | |
1298 | for (j = 0; j < graph->size; j++) |
1299 | graph->eq_rep[j] = -1; |
1300 | |
1301 | for (j = 0; j < varmap.length (); j++) |
1302 | graph->indirect_cycles[j] = -1; |
1303 | |
1304 | FOR_EACH_VEC_ELT (constraints, i, c) |
1305 | { |
1306 | struct constraint_expr lhs = c->lhs; |
1307 | struct constraint_expr rhs = c->rhs; |
1308 | unsigned int lhsvar = lhs.var; |
1309 | unsigned int rhsvar = rhs.var; |
1310 | |
1311 | if (lhs.type == DEREF) |
1312 | { |
1313 | /* *x = y. */ |
1314 | if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR) |
1315 | { |
1316 | if (lhs.var == anything_id) |
1317 | add_pred_graph_edge (graph, to: storedanything_id, from: rhsvar); |
1318 | else |
1319 | add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, from: rhsvar); |
1320 | } |
1321 | } |
1322 | else if (rhs.type == DEREF) |
1323 | { |
1324 | /* x = *y */ |
1325 | if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR) |
1326 | add_pred_graph_edge (graph, to: lhsvar, FIRST_REF_NODE + rhsvar); |
1327 | else |
1328 | bitmap_clear_bit (map: graph->direct_nodes, bitno: lhsvar); |
1329 | } |
1330 | else if (rhs.type == ADDRESSOF) |
1331 | { |
1332 | varinfo_t v; |
1333 | |
1334 | /* x = &y */ |
1335 | if (graph->points_to[lhsvar] == NULL) |
1336 | graph->points_to[lhsvar] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1337 | bitmap_set_bit (graph->points_to[lhsvar], rhsvar); |
1338 | |
1339 | if (graph->pointed_by[rhsvar] == NULL) |
1340 | graph->pointed_by[rhsvar] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1341 | bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar); |
1342 | |
1343 | /* Implicitly, *x = y */ |
1344 | add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, from: rhsvar); |
1345 | |
1346 | /* All related variables are no longer direct nodes. */ |
1347 | bitmap_clear_bit (map: graph->direct_nodes, bitno: rhsvar); |
1348 | v = get_varinfo (n: rhsvar); |
1349 | if (!v->is_full_var) |
1350 | { |
1351 | v = get_varinfo (n: v->head); |
1352 | do |
1353 | { |
1354 | bitmap_clear_bit (map: graph->direct_nodes, bitno: v->id); |
1355 | v = vi_next (vi: v); |
1356 | } |
1357 | while (v != NULL); |
1358 | } |
1359 | bitmap_set_bit (graph->address_taken, rhsvar); |
1360 | } |
1361 | else if (lhsvar > anything_id |
1362 | && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0) |
1363 | { |
1364 | /* x = y */ |
1365 | add_pred_graph_edge (graph, to: lhsvar, from: rhsvar); |
1366 | /* Implicitly, *x = *y */ |
1367 | add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, |
1368 | FIRST_REF_NODE + rhsvar); |
1369 | } |
1370 | else if (lhs.offset != 0 || rhs.offset != 0) |
1371 | { |
1372 | if (rhs.offset != 0) |
1373 | bitmap_clear_bit (map: graph->direct_nodes, bitno: lhs.var); |
1374 | else if (lhs.offset != 0) |
1375 | bitmap_clear_bit (map: graph->direct_nodes, bitno: rhs.var); |
1376 | } |
1377 | } |
1378 | } |
1379 | |
1380 | /* Build the constraint graph, adding successor edges. */ |
1381 | |
1382 | static void |
1383 | build_succ_graph (void) |
1384 | { |
1385 | unsigned i, t; |
1386 | constraint_t c; |
1387 | |
1388 | FOR_EACH_VEC_ELT (constraints, i, c) |
1389 | { |
1390 | struct constraint_expr lhs; |
1391 | struct constraint_expr rhs; |
1392 | unsigned int lhsvar; |
1393 | unsigned int rhsvar; |
1394 | |
1395 | if (!c) |
1396 | continue; |
1397 | |
1398 | lhs = c->lhs; |
1399 | rhs = c->rhs; |
1400 | lhsvar = find (node: lhs.var); |
1401 | rhsvar = find (node: rhs.var); |
1402 | |
1403 | if (lhs.type == DEREF) |
1404 | { |
1405 | if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR) |
1406 | { |
1407 | if (lhs.var == anything_id) |
1408 | add_graph_edge (graph, to: storedanything_id, from: rhsvar); |
1409 | else |
1410 | add_graph_edge (graph, FIRST_REF_NODE + lhsvar, from: rhsvar); |
1411 | } |
1412 | } |
1413 | else if (rhs.type == DEREF) |
1414 | { |
1415 | if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR) |
1416 | add_graph_edge (graph, to: lhsvar, FIRST_REF_NODE + rhsvar); |
1417 | } |
1418 | else if (rhs.type == ADDRESSOF) |
1419 | { |
1420 | /* x = &y */ |
1421 | gcc_checking_assert (find (rhs.var) == rhs.var); |
1422 | bitmap_set_bit (get_varinfo (n: lhsvar)->solution, rhsvar); |
1423 | } |
1424 | else if (lhsvar > anything_id |
1425 | && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0) |
1426 | { |
1427 | add_graph_edge (graph, to: lhsvar, from: rhsvar); |
1428 | } |
1429 | } |
1430 | |
1431 | /* Add edges from STOREDANYTHING to all nodes that can receive pointers. */ |
1432 | t = find (node: storedanything_id); |
1433 | for (i = integer_id + 1; i < FIRST_REF_NODE; ++i) |
1434 | { |
1435 | if (get_varinfo (n: i)->may_have_pointers) |
1436 | add_graph_edge (graph, to: find (node: i), from: t); |
1437 | } |
1438 | |
1439 | /* Everything stored to ANYTHING also potentially escapes. */ |
1440 | add_graph_edge (graph, to: find (node: escaped_id), from: t); |
1441 | } |
1442 | |
1443 | |
1444 | /* Changed variables on the last iteration. */ |
1445 | static bitmap changed; |
1446 | |
1447 | /* Strongly Connected Component visitation info. */ |
1448 | |
1449 | class scc_info |
1450 | { |
1451 | public: |
1452 | scc_info (size_t size); |
1453 | ~scc_info (); |
1454 | |
1455 | auto_sbitmap visited; |
1456 | auto_sbitmap deleted; |
1457 | unsigned int *dfs; |
1458 | unsigned int *node_mapping; |
1459 | int current_index; |
1460 | auto_vec<unsigned> scc_stack; |
1461 | }; |
1462 | |
1463 | |
1464 | /* Recursive routine to find strongly connected components in GRAPH. |
1465 | SI is the SCC info to store the information in, and N is the id of current |
1466 | graph node we are processing. |
1467 | |
1468 | This is Tarjan's strongly connected component finding algorithm, as |
1469 | modified by Nuutila to keep only non-root nodes on the stack. |
1470 | The algorithm can be found in "On finding the strongly connected |
1471 | connected components in a directed graph" by Esko Nuutila and Eljas |
1472 | Soisalon-Soininen, in Information Processing Letters volume 49, |
1473 | number 1, pages 9-14. */ |
1474 | |
1475 | static void |
1476 | scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n) |
1477 | { |
1478 | unsigned int i; |
1479 | bitmap_iterator bi; |
1480 | unsigned int my_dfs; |
1481 | |
1482 | bitmap_set_bit (map: si->visited, bitno: n); |
1483 | si->dfs[n] = si->current_index ++; |
1484 | my_dfs = si->dfs[n]; |
1485 | |
1486 | /* Visit all the successors. */ |
1487 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi) |
1488 | { |
1489 | unsigned int w; |
1490 | |
1491 | if (i > LAST_REF_NODE) |
1492 | break; |
1493 | |
1494 | w = find (node: i); |
1495 | if (bitmap_bit_p (map: si->deleted, bitno: w)) |
1496 | continue; |
1497 | |
1498 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
1499 | scc_visit (graph, si, n: w); |
1500 | |
1501 | unsigned int t = find (node: w); |
1502 | gcc_checking_assert (find (n) == n); |
1503 | if (si->dfs[t] < si->dfs[n]) |
1504 | si->dfs[n] = si->dfs[t]; |
1505 | } |
1506 | |
1507 | /* See if any components have been identified. */ |
1508 | if (si->dfs[n] == my_dfs) |
1509 | { |
1510 | if (si->scc_stack.length () > 0 |
1511 | && si->dfs[si->scc_stack.last ()] >= my_dfs) |
1512 | { |
1513 | bitmap scc = BITMAP_ALLOC (NULL); |
1514 | unsigned int lowest_node; |
1515 | bitmap_iterator bi; |
1516 | |
1517 | bitmap_set_bit (scc, n); |
1518 | |
1519 | while (si->scc_stack.length () != 0 |
1520 | && si->dfs[si->scc_stack.last ()] >= my_dfs) |
1521 | { |
1522 | unsigned int w = si->scc_stack.pop (); |
1523 | |
1524 | bitmap_set_bit (scc, w); |
1525 | } |
1526 | |
1527 | lowest_node = bitmap_first_set_bit (scc); |
1528 | gcc_assert (lowest_node < FIRST_REF_NODE); |
1529 | |
1530 | /* Collapse the SCC nodes into a single node, and mark the |
1531 | indirect cycles. */ |
1532 | EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi) |
1533 | { |
1534 | if (i < FIRST_REF_NODE) |
1535 | { |
1536 | if (unite (to: lowest_node, from: i)) |
1537 | unify_nodes (graph, lowest_node, i, false); |
1538 | } |
1539 | else |
1540 | { |
1541 | unite (to: lowest_node, from: i); |
1542 | graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node; |
1543 | } |
1544 | } |
1545 | bitmap_set_bit (map: si->deleted, bitno: lowest_node); |
1546 | } |
1547 | else |
1548 | bitmap_set_bit (map: si->deleted, bitno: n); |
1549 | } |
1550 | else |
1551 | si->scc_stack.safe_push (obj: n); |
1552 | } |
1553 | |
1554 | /* Unify node FROM into node TO, updating the changed count if |
1555 | necessary when UPDATE_CHANGED is true. */ |
1556 | |
1557 | static void |
1558 | unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from, |
1559 | bool update_changed) |
1560 | { |
1561 | gcc_checking_assert (to != from && find (to) == to); |
1562 | |
1563 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1564 | fprintf (stream: dump_file, format: "Unifying %s to %s\n" , |
1565 | get_varinfo (n: from)->name, |
1566 | get_varinfo (n: to)->name); |
1567 | |
1568 | if (update_changed) |
1569 | stats.unified_vars_dynamic++; |
1570 | else |
1571 | stats.unified_vars_static++; |
1572 | |
1573 | merge_graph_nodes (graph, to, from); |
1574 | if (merge_node_constraints (graph, to, from)) |
1575 | { |
1576 | if (update_changed) |
1577 | bitmap_set_bit (changed, to); |
1578 | } |
1579 | |
1580 | /* Mark TO as changed if FROM was changed. If TO was already marked |
1581 | as changed, decrease the changed count. */ |
1582 | |
1583 | if (update_changed |
1584 | && bitmap_clear_bit (changed, from)) |
1585 | bitmap_set_bit (changed, to); |
1586 | varinfo_t fromvi = get_varinfo (n: from); |
1587 | if (fromvi->solution) |
1588 | { |
1589 | /* If the solution changes because of the merging, we need to mark |
1590 | the variable as changed. */ |
1591 | varinfo_t tovi = get_varinfo (n: to); |
1592 | if (bitmap_ior_into (tovi->solution, fromvi->solution)) |
1593 | { |
1594 | if (update_changed) |
1595 | bitmap_set_bit (changed, to); |
1596 | } |
1597 | |
1598 | BITMAP_FREE (fromvi->solution); |
1599 | if (fromvi->oldsolution) |
1600 | BITMAP_FREE (fromvi->oldsolution); |
1601 | |
1602 | if (stats.iterations > 0 |
1603 | && tovi->oldsolution) |
1604 | BITMAP_FREE (tovi->oldsolution); |
1605 | } |
1606 | if (graph->succs[to]) |
1607 | bitmap_clear_bit (graph->succs[to], to); |
1608 | } |
1609 | |
1610 | /* Add a copy edge FROM -> TO, optimizing special cases. Returns TRUE |
1611 | if the solution of TO changed. */ |
1612 | |
1613 | static bool |
1614 | solve_add_graph_edge (constraint_graph_t graph, unsigned int to, |
1615 | unsigned int from) |
1616 | { |
1617 | /* Adding edges from the special vars is pointless. |
1618 | They don't have sets that can change. */ |
1619 | if (get_varinfo (n: from)->is_special_var) |
1620 | return bitmap_ior_into (get_varinfo (n: to)->solution, |
1621 | get_varinfo (n: from)->solution); |
1622 | /* Merging the solution from ESCAPED needlessly increases |
1623 | the set. Use ESCAPED as representative instead. */ |
1624 | else if (from == find (node: escaped_id)) |
1625 | return bitmap_set_bit (get_varinfo (n: to)->solution, escaped_id); |
1626 | else if (get_varinfo (n: from)->may_have_pointers |
1627 | && add_graph_edge (graph, to, from)) |
1628 | return bitmap_ior_into (get_varinfo (n: to)->solution, |
1629 | get_varinfo (n: from)->solution); |
1630 | return false; |
1631 | } |
1632 | |
1633 | /* Process a constraint C that represents x = *(y + off), using DELTA as the |
1634 | starting solution for y. */ |
1635 | |
1636 | static void |
1637 | do_sd_constraint (constraint_graph_t graph, constraint_t c, |
1638 | bitmap delta, bitmap *expanded_delta) |
1639 | { |
1640 | unsigned int lhs = c->lhs.var; |
1641 | bool flag = false; |
1642 | bitmap sol = get_varinfo (n: lhs)->solution; |
1643 | unsigned int j; |
1644 | bitmap_iterator bi; |
1645 | HOST_WIDE_INT roffset = c->rhs.offset; |
1646 | |
1647 | /* Our IL does not allow this. */ |
1648 | gcc_checking_assert (c->lhs.offset == 0); |
1649 | |
1650 | /* If the solution of Y contains anything it is good enough to transfer |
1651 | this to the LHS. */ |
1652 | if (bitmap_bit_p (delta, anything_id)) |
1653 | { |
1654 | flag |= bitmap_set_bit (sol, anything_id); |
1655 | goto done; |
1656 | } |
1657 | |
1658 | /* If we do not know at with offset the rhs is dereferenced compute |
1659 | the reachability set of DELTA, conservatively assuming it is |
1660 | dereferenced at all valid offsets. */ |
1661 | if (roffset == UNKNOWN_OFFSET) |
1662 | { |
1663 | delta = solution_set_expand (set: delta, expanded: expanded_delta); |
1664 | /* No further offset processing is necessary. */ |
1665 | roffset = 0; |
1666 | } |
1667 | |
1668 | /* For each variable j in delta (Sol(y)), add |
1669 | an edge in the graph from j to x, and union Sol(j) into Sol(x). */ |
1670 | EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) |
1671 | { |
1672 | varinfo_t v = get_varinfo (n: j); |
1673 | HOST_WIDE_INT fieldoffset = v->offset + roffset; |
1674 | unsigned HOST_WIDE_INT size = v->size; |
1675 | unsigned int t; |
1676 | |
1677 | if (v->is_full_var) |
1678 | ; |
1679 | else if (roffset != 0) |
1680 | { |
1681 | if (fieldoffset < 0) |
1682 | v = get_varinfo (n: v->head); |
1683 | else |
1684 | v = first_or_preceding_vi_for_offset (v, fieldoffset); |
1685 | } |
1686 | |
1687 | /* We have to include all fields that overlap the current field |
1688 | shifted by roffset. */ |
1689 | do |
1690 | { |
1691 | t = find (node: v->id); |
1692 | |
1693 | flag |= solve_add_graph_edge (graph, to: lhs, from: t); |
1694 | |
1695 | if (v->is_full_var |
1696 | || v->next == 0) |
1697 | break; |
1698 | |
1699 | v = vi_next (vi: v); |
1700 | } |
1701 | while (v->offset < fieldoffset + size); |
1702 | } |
1703 | |
1704 | done: |
1705 | /* If the LHS solution changed, mark the var as changed. */ |
1706 | if (flag) |
1707 | bitmap_set_bit (changed, lhs); |
1708 | } |
1709 | |
1710 | /* Process a constraint C that represents *(x + off) = y using DELTA |
1711 | as the starting solution for x. */ |
1712 | |
1713 | static void |
1714 | do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta) |
1715 | { |
1716 | unsigned int rhs = c->rhs.var; |
1717 | bitmap sol = get_varinfo (n: rhs)->solution; |
1718 | unsigned int j; |
1719 | bitmap_iterator bi; |
1720 | HOST_WIDE_INT loff = c->lhs.offset; |
1721 | bool escaped_p = false; |
1722 | |
1723 | /* Our IL does not allow this. */ |
1724 | gcc_checking_assert (c->rhs.offset == 0); |
1725 | |
1726 | /* If the solution of y contains ANYTHING simply use the ANYTHING |
1727 | solution. This avoids needlessly increasing the points-to sets. */ |
1728 | if (bitmap_bit_p (sol, anything_id)) |
1729 | sol = get_varinfo (n: find (node: anything_id))->solution; |
1730 | |
1731 | /* If the solution for x contains ANYTHING we have to merge the |
1732 | solution of y into all pointer variables which we do via |
1733 | STOREDANYTHING. */ |
1734 | if (bitmap_bit_p (delta, anything_id)) |
1735 | { |
1736 | unsigned t = find (node: storedanything_id); |
1737 | if (solve_add_graph_edge (graph, to: t, from: rhs)) |
1738 | bitmap_set_bit (changed, t); |
1739 | return; |
1740 | } |
1741 | |
1742 | /* If we do not know at with offset the rhs is dereferenced compute |
1743 | the reachability set of DELTA, conservatively assuming it is |
1744 | dereferenced at all valid offsets. */ |
1745 | if (loff == UNKNOWN_OFFSET) |
1746 | { |
1747 | delta = solution_set_expand (set: delta, expanded: expanded_delta); |
1748 | loff = 0; |
1749 | } |
1750 | |
1751 | /* For each member j of delta (Sol(x)), add an edge from y to j and |
1752 | union Sol(y) into Sol(j) */ |
1753 | EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) |
1754 | { |
1755 | varinfo_t v = get_varinfo (n: j); |
1756 | unsigned int t; |
1757 | HOST_WIDE_INT fieldoffset = v->offset + loff; |
1758 | unsigned HOST_WIDE_INT size = v->size; |
1759 | |
1760 | if (v->is_full_var) |
1761 | ; |
1762 | else if (loff != 0) |
1763 | { |
1764 | if (fieldoffset < 0) |
1765 | v = get_varinfo (n: v->head); |
1766 | else |
1767 | v = first_or_preceding_vi_for_offset (v, fieldoffset); |
1768 | } |
1769 | |
1770 | /* We have to include all fields that overlap the current field |
1771 | shifted by loff. */ |
1772 | do |
1773 | { |
1774 | if (v->may_have_pointers) |
1775 | { |
1776 | /* If v is a global variable then this is an escape point. */ |
1777 | if (v->is_global_var |
1778 | && !escaped_p) |
1779 | { |
1780 | t = find (node: escaped_id); |
1781 | if (add_graph_edge (graph, to: t, from: rhs) |
1782 | && bitmap_ior_into (get_varinfo (n: t)->solution, sol)) |
1783 | bitmap_set_bit (changed, t); |
1784 | /* Enough to let rhs escape once. */ |
1785 | escaped_p = true; |
1786 | } |
1787 | |
1788 | if (v->is_special_var) |
1789 | break; |
1790 | |
1791 | t = find (node: v->id); |
1792 | |
1793 | if (solve_add_graph_edge (graph, to: t, from: rhs)) |
1794 | bitmap_set_bit (changed, t); |
1795 | } |
1796 | |
1797 | if (v->is_full_var |
1798 | || v->next == 0) |
1799 | break; |
1800 | |
1801 | v = vi_next (vi: v); |
1802 | } |
1803 | while (v->offset < fieldoffset + size); |
1804 | } |
1805 | } |
1806 | |
1807 | /* Handle a non-simple (simple meaning requires no iteration), |
1808 | constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */ |
1809 | |
1810 | static void |
1811 | do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta, |
1812 | bitmap *expanded_delta) |
1813 | { |
1814 | if (c->lhs.type == DEREF) |
1815 | { |
1816 | if (c->rhs.type == ADDRESSOF) |
1817 | { |
1818 | gcc_unreachable (); |
1819 | } |
1820 | else |
1821 | { |
1822 | /* *x = y */ |
1823 | do_ds_constraint (c, delta, expanded_delta); |
1824 | } |
1825 | } |
1826 | else if (c->rhs.type == DEREF) |
1827 | { |
1828 | /* x = *y */ |
1829 | if (!(get_varinfo (n: c->lhs.var)->is_special_var)) |
1830 | do_sd_constraint (graph, c, delta, expanded_delta); |
1831 | } |
1832 | else |
1833 | { |
1834 | bitmap tmp; |
1835 | bool flag = false; |
1836 | |
1837 | gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR |
1838 | && c->rhs.offset != 0 && c->lhs.offset == 0); |
1839 | tmp = get_varinfo (n: c->lhs.var)->solution; |
1840 | |
1841 | flag = set_union_with_increment (to: tmp, delta, inc: c->rhs.offset, |
1842 | expanded_delta); |
1843 | |
1844 | if (flag) |
1845 | bitmap_set_bit (changed, c->lhs.var); |
1846 | } |
1847 | } |
1848 | |
1849 | /* Initialize and return a new SCC info structure. */ |
1850 | |
1851 | scc_info::scc_info (size_t size) : |
1852 | visited (size), deleted (size), current_index (0), scc_stack (1) |
1853 | { |
1854 | bitmap_clear (visited); |
1855 | bitmap_clear (deleted); |
1856 | node_mapping = XNEWVEC (unsigned int, size); |
1857 | dfs = XCNEWVEC (unsigned int, size); |
1858 | |
1859 | for (size_t i = 0; i < size; i++) |
1860 | node_mapping[i] = i; |
1861 | } |
1862 | |
1863 | /* Free an SCC info structure pointed to by SI */ |
1864 | |
1865 | scc_info::~scc_info () |
1866 | { |
1867 | free (ptr: node_mapping); |
1868 | free (ptr: dfs); |
1869 | } |
1870 | |
1871 | |
1872 | /* Find indirect cycles in GRAPH that occur, using strongly connected |
1873 | components, and note them in the indirect cycles map. |
1874 | |
1875 | This technique comes from Ben Hardekopf and Calvin Lin, |
1876 | "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of |
1877 | Lines of Code", submitted to PLDI 2007. */ |
1878 | |
1879 | static void |
1880 | find_indirect_cycles (constraint_graph_t graph) |
1881 | { |
1882 | unsigned int i; |
1883 | unsigned int size = graph->size; |
1884 | scc_info si (size); |
1885 | |
1886 | for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ ) |
1887 | if (!bitmap_bit_p (map: si.visited, bitno: i) && find (node: i) == i) |
1888 | scc_visit (graph, si: &si, n: i); |
1889 | } |
1890 | |
1891 | /* Visit the graph in topological order starting at node N, and store the |
1892 | order in TOPO_ORDER using VISITED to indicate visited nodes. */ |
1893 | |
1894 | static void |
1895 | topo_visit (constraint_graph_t graph, vec<unsigned> &topo_order, |
1896 | sbitmap visited, unsigned int n) |
1897 | { |
1898 | bitmap_iterator bi; |
1899 | unsigned int j; |
1900 | |
1901 | bitmap_set_bit (map: visited, bitno: n); |
1902 | |
1903 | if (graph->succs[n]) |
1904 | EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi) |
1905 | { |
1906 | unsigned k = find (node: j); |
1907 | if (!bitmap_bit_p (map: visited, bitno: k)) |
1908 | topo_visit (graph, topo_order, visited, n: k); |
1909 | } |
1910 | |
1911 | /* Also consider copy with offset complex constraints as implicit edges. */ |
1912 | for (auto c : graph->complex[n]) |
1913 | { |
1914 | /* Constraints are ordered so that SCALAR = SCALAR appear first. */ |
1915 | if (c->lhs.type != SCALAR || c->rhs.type != SCALAR) |
1916 | break; |
1917 | gcc_checking_assert (c->rhs.var == n); |
1918 | unsigned k = find (node: c->lhs.var); |
1919 | if (!bitmap_bit_p (map: visited, bitno: k)) |
1920 | topo_visit (graph, topo_order, visited, n: k); |
1921 | } |
1922 | |
1923 | topo_order.quick_push (obj: n); |
1924 | } |
1925 | |
1926 | /* Compute a topological ordering for GRAPH, and return the result. */ |
1927 | |
1928 | static auto_vec<unsigned> |
1929 | compute_topo_order (constraint_graph_t graph) |
1930 | { |
1931 | unsigned int i; |
1932 | unsigned int size = graph->size; |
1933 | |
1934 | auto_sbitmap visited (size); |
1935 | bitmap_clear (visited); |
1936 | |
1937 | /* For the heuristic in add_graph_edge to work optimally make sure to |
1938 | first visit the connected component of the graph containing |
1939 | ESCAPED. Do this by extracting the connected component |
1940 | with ESCAPED and append that to all other components as solve_graph |
1941 | pops from the order. */ |
1942 | auto_vec<unsigned> tail (size); |
1943 | topo_visit (graph, topo_order&: tail, visited, n: find (node: escaped_id)); |
1944 | |
1945 | auto_vec<unsigned> topo_order (size); |
1946 | |
1947 | for (i = 0; i != size; ++i) |
1948 | if (!bitmap_bit_p (map: visited, bitno: i) && find (node: i) == i) |
1949 | topo_visit (graph, topo_order, visited, n: i); |
1950 | |
1951 | topo_order.splice (src: tail); |
1952 | return topo_order; |
1953 | } |
1954 | |
1955 | /* Structure used to for hash value numbering of pointer equivalence |
1956 | classes. */ |
1957 | |
1958 | typedef struct equiv_class_label |
1959 | { |
1960 | hashval_t hashcode; |
1961 | unsigned int equivalence_class; |
1962 | bitmap labels; |
1963 | } *equiv_class_label_t; |
1964 | typedef const struct equiv_class_label *const_equiv_class_label_t; |
1965 | |
1966 | /* Equiv_class_label hashtable helpers. */ |
1967 | |
1968 | struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label> |
1969 | { |
1970 | static inline hashval_t hash (const equiv_class_label *); |
1971 | static inline bool equal (const equiv_class_label *, |
1972 | const equiv_class_label *); |
1973 | }; |
1974 | |
1975 | /* Hash function for a equiv_class_label_t */ |
1976 | |
1977 | inline hashval_t |
1978 | equiv_class_hasher::hash (const equiv_class_label *ecl) |
1979 | { |
1980 | return ecl->hashcode; |
1981 | } |
1982 | |
1983 | /* Equality function for two equiv_class_label_t's. */ |
1984 | |
1985 | inline bool |
1986 | equiv_class_hasher::equal (const equiv_class_label *eql1, |
1987 | const equiv_class_label *eql2) |
1988 | { |
1989 | return (eql1->hashcode == eql2->hashcode |
1990 | && bitmap_equal_p (eql1->labels, eql2->labels)); |
1991 | } |
1992 | |
1993 | /* A hashtable for mapping a bitmap of labels->pointer equivalence |
1994 | classes. */ |
1995 | static hash_table<equiv_class_hasher> *pointer_equiv_class_table; |
1996 | |
1997 | /* A hashtable for mapping a bitmap of labels->location equivalence |
1998 | classes. */ |
1999 | static hash_table<equiv_class_hasher> *location_equiv_class_table; |
2000 | |
2001 | struct obstack equiv_class_obstack; |
2002 | |
2003 | /* Lookup a equivalence class in TABLE by the bitmap of LABELS with |
2004 | hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS |
2005 | is equivalent to. */ |
2006 | |
2007 | static equiv_class_label * |
2008 | equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table, |
2009 | bitmap labels) |
2010 | { |
2011 | equiv_class_label **slot; |
2012 | equiv_class_label ecl; |
2013 | |
2014 | ecl.labels = labels; |
2015 | ecl.hashcode = bitmap_hash (labels); |
2016 | slot = table->find_slot (value: &ecl, insert: INSERT); |
2017 | if (!*slot) |
2018 | { |
2019 | *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label); |
2020 | (*slot)->labels = labels; |
2021 | (*slot)->hashcode = ecl.hashcode; |
2022 | (*slot)->equivalence_class = 0; |
2023 | } |
2024 | |
2025 | return *slot; |
2026 | } |
2027 | |
2028 | /* Perform offline variable substitution. |
2029 | |
2030 | This is a worst case quadratic time way of identifying variables |
2031 | that must have equivalent points-to sets, including those caused by |
2032 | static cycles, and single entry subgraphs, in the constraint graph. |
2033 | |
2034 | The technique is described in "Exploiting Pointer and Location |
2035 | Equivalence to Optimize Pointer Analysis. In the 14th International |
2036 | Static Analysis Symposium (SAS), August 2007." It is known as the |
2037 | "HU" algorithm, and is equivalent to value numbering the collapsed |
2038 | constraint graph including evaluating unions. |
2039 | |
2040 | The general method of finding equivalence classes is as follows: |
2041 | Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints. |
2042 | Initialize all non-REF nodes to be direct nodes. |
2043 | For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh |
2044 | variable} |
2045 | For each constraint containing the dereference, we also do the same |
2046 | thing. |
2047 | |
2048 | We then compute SCC's in the graph and unify nodes in the same SCC, |
2049 | including pts sets. |
2050 | |
2051 | For each non-collapsed node x: |
2052 | Visit all unvisited explicit incoming edges. |
2053 | Ignoring all non-pointers, set pts(x) = Union of pts(a) for y |
2054 | where y->x. |
2055 | Lookup the equivalence class for pts(x). |
2056 | If we found one, equivalence_class(x) = found class. |
2057 | Otherwise, equivalence_class(x) = new class, and new_class is |
2058 | added to the lookup table. |
2059 | |
2060 | All direct nodes with the same equivalence class can be replaced |
2061 | with a single representative node. |
2062 | All unlabeled nodes (label == 0) are not pointers and all edges |
2063 | involving them can be eliminated. |
2064 | We perform these optimizations during rewrite_constraints |
2065 | |
2066 | In addition to pointer equivalence class finding, we also perform |
2067 | location equivalence class finding. This is the set of variables |
2068 | that always appear together in points-to sets. We use this to |
2069 | compress the size of the points-to sets. */ |
2070 | |
2071 | /* Current maximum pointer equivalence class id. */ |
2072 | static int pointer_equiv_class; |
2073 | |
2074 | /* Current maximum location equivalence class id. */ |
2075 | static int location_equiv_class; |
2076 | |
2077 | /* Recursive routine to find strongly connected components in GRAPH, |
2078 | and label it's nodes with DFS numbers. */ |
2079 | |
2080 | static void |
2081 | condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n) |
2082 | { |
2083 | unsigned int i; |
2084 | bitmap_iterator bi; |
2085 | unsigned int my_dfs; |
2086 | |
2087 | gcc_checking_assert (si->node_mapping[n] == n); |
2088 | bitmap_set_bit (map: si->visited, bitno: n); |
2089 | si->dfs[n] = si->current_index ++; |
2090 | my_dfs = si->dfs[n]; |
2091 | |
2092 | /* Visit all the successors. */ |
2093 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi) |
2094 | { |
2095 | unsigned int w = si->node_mapping[i]; |
2096 | |
2097 | if (bitmap_bit_p (map: si->deleted, bitno: w)) |
2098 | continue; |
2099 | |
2100 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
2101 | condense_visit (graph, si, n: w); |
2102 | |
2103 | unsigned int t = si->node_mapping[w]; |
2104 | gcc_checking_assert (si->node_mapping[n] == n); |
2105 | if (si->dfs[t] < si->dfs[n]) |
2106 | si->dfs[n] = si->dfs[t]; |
2107 | } |
2108 | |
2109 | /* Visit all the implicit predecessors. */ |
2110 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi) |
2111 | { |
2112 | unsigned int w = si->node_mapping[i]; |
2113 | |
2114 | if (bitmap_bit_p (map: si->deleted, bitno: w)) |
2115 | continue; |
2116 | |
2117 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
2118 | condense_visit (graph, si, n: w); |
2119 | |
2120 | unsigned int t = si->node_mapping[w]; |
2121 | gcc_assert (si->node_mapping[n] == n); |
2122 | if (si->dfs[t] < si->dfs[n]) |
2123 | si->dfs[n] = si->dfs[t]; |
2124 | } |
2125 | |
2126 | /* See if any components have been identified. */ |
2127 | if (si->dfs[n] == my_dfs) |
2128 | { |
2129 | if (si->scc_stack.length () != 0 |
2130 | && si->dfs[si->scc_stack.last ()] >= my_dfs) |
2131 | { |
2132 | /* Find the first node of the SCC and do non-bitmap work. */ |
2133 | bool direct_p = true; |
2134 | unsigned first = si->scc_stack.length (); |
2135 | do |
2136 | { |
2137 | --first; |
2138 | unsigned int w = si->scc_stack[first]; |
2139 | si->node_mapping[w] = n; |
2140 | if (!bitmap_bit_p (map: graph->direct_nodes, bitno: w)) |
2141 | direct_p = false; |
2142 | } |
2143 | while (first > 0 |
2144 | && si->dfs[si->scc_stack[first - 1]] >= my_dfs); |
2145 | if (!direct_p) |
2146 | bitmap_clear_bit (map: graph->direct_nodes, bitno: n); |
2147 | |
2148 | /* Want to reduce to node n, push that first. */ |
2149 | si->scc_stack.reserve (nelems: 1); |
2150 | si->scc_stack.quick_push (obj: si->scc_stack[first]); |
2151 | si->scc_stack[first] = n; |
2152 | |
2153 | unsigned scc_size = si->scc_stack.length () - first; |
2154 | unsigned split = scc_size / 2; |
2155 | unsigned carry = scc_size - split * 2; |
2156 | while (split > 0) |
2157 | { |
2158 | for (unsigned i = 0; i < split; ++i) |
2159 | { |
2160 | unsigned a = si->scc_stack[first + i]; |
2161 | unsigned b = si->scc_stack[first + split + carry + i]; |
2162 | |
2163 | /* Unify our nodes. */ |
2164 | if (graph->preds[b]) |
2165 | { |
2166 | if (!graph->preds[a]) |
2167 | std::swap (a&: graph->preds[a], b&: graph->preds[b]); |
2168 | else |
2169 | bitmap_ior_into_and_free (graph->preds[a], |
2170 | &graph->preds[b]); |
2171 | } |
2172 | if (graph->implicit_preds[b]) |
2173 | { |
2174 | if (!graph->implicit_preds[a]) |
2175 | std::swap (a&: graph->implicit_preds[a], |
2176 | b&: graph->implicit_preds[b]); |
2177 | else |
2178 | bitmap_ior_into_and_free (graph->implicit_preds[a], |
2179 | &graph->implicit_preds[b]); |
2180 | } |
2181 | if (graph->points_to[b]) |
2182 | { |
2183 | if (!graph->points_to[a]) |
2184 | std::swap (a&: graph->points_to[a], b&: graph->points_to[b]); |
2185 | else |
2186 | bitmap_ior_into_and_free (graph->points_to[a], |
2187 | &graph->points_to[b]); |
2188 | } |
2189 | } |
2190 | unsigned remain = split + carry; |
2191 | split = remain / 2; |
2192 | carry = remain - split * 2; |
2193 | } |
2194 | /* Actually pop the SCC. */ |
2195 | si->scc_stack.truncate (size: first); |
2196 | } |
2197 | bitmap_set_bit (map: si->deleted, bitno: n); |
2198 | } |
2199 | else |
2200 | si->scc_stack.safe_push (obj: n); |
2201 | } |
2202 | |
2203 | /* Label pointer equivalences. |
2204 | |
2205 | This performs a value numbering of the constraint graph to |
2206 | discover which variables will always have the same points-to sets |
2207 | under the current set of constraints. |
2208 | |
2209 | The way it value numbers is to store the set of points-to bits |
2210 | generated by the constraints and graph edges. This is just used as a |
2211 | hash and equality comparison. The *actual set of points-to bits* is |
2212 | completely irrelevant, in that we don't care about being able to |
2213 | extract them later. |
2214 | |
2215 | The equality values (currently bitmaps) just have to satisfy a few |
2216 | constraints, the main ones being: |
2217 | 1. The combining operation must be order independent. |
2218 | 2. The end result of a given set of operations must be unique iff the |
2219 | combination of input values is unique |
2220 | 3. Hashable. */ |
2221 | |
2222 | static void |
2223 | label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n) |
2224 | { |
2225 | unsigned int i, first_pred; |
2226 | bitmap_iterator bi; |
2227 | |
2228 | bitmap_set_bit (map: si->visited, bitno: n); |
2229 | |
2230 | /* Label and union our incoming edges's points to sets. */ |
2231 | first_pred = -1U; |
2232 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi) |
2233 | { |
2234 | unsigned int w = si->node_mapping[i]; |
2235 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
2236 | label_visit (graph, si, n: w); |
2237 | |
2238 | /* Skip unused edges */ |
2239 | if (w == n || graph->pointer_label[w] == 0) |
2240 | continue; |
2241 | |
2242 | if (graph->points_to[w]) |
2243 | { |
2244 | if (!graph->points_to[n]) |
2245 | { |
2246 | if (first_pred == -1U) |
2247 | first_pred = w; |
2248 | else |
2249 | { |
2250 | graph->points_to[n] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
2251 | bitmap_ior (graph->points_to[n], |
2252 | graph->points_to[first_pred], |
2253 | graph->points_to[w]); |
2254 | } |
2255 | } |
2256 | else |
2257 | bitmap_ior_into (graph->points_to[n], graph->points_to[w]); |
2258 | } |
2259 | } |
2260 | |
2261 | /* Indirect nodes get fresh variables and a new pointer equiv class. */ |
2262 | if (!bitmap_bit_p (map: graph->direct_nodes, bitno: n)) |
2263 | { |
2264 | if (!graph->points_to[n]) |
2265 | { |
2266 | graph->points_to[n] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
2267 | if (first_pred != -1U) |
2268 | bitmap_copy (graph->points_to[n], graph->points_to[first_pred]); |
2269 | } |
2270 | bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n); |
2271 | graph->pointer_label[n] = pointer_equiv_class++; |
2272 | equiv_class_label_t ecl; |
2273 | ecl = equiv_class_lookup_or_add (table: pointer_equiv_class_table, |
2274 | labels: graph->points_to[n]); |
2275 | ecl->equivalence_class = graph->pointer_label[n]; |
2276 | return; |
2277 | } |
2278 | |
2279 | /* If there was only a single non-empty predecessor the pointer equiv |
2280 | class is the same. */ |
2281 | if (!graph->points_to[n]) |
2282 | { |
2283 | if (first_pred != -1U) |
2284 | { |
2285 | graph->pointer_label[n] = graph->pointer_label[first_pred]; |
2286 | graph->points_to[n] = graph->points_to[first_pred]; |
2287 | } |
2288 | return; |
2289 | } |
2290 | |
2291 | if (!bitmap_empty_p (map: graph->points_to[n])) |
2292 | { |
2293 | equiv_class_label_t ecl; |
2294 | ecl = equiv_class_lookup_or_add (table: pointer_equiv_class_table, |
2295 | labels: graph->points_to[n]); |
2296 | if (ecl->equivalence_class == 0) |
2297 | ecl->equivalence_class = pointer_equiv_class++; |
2298 | else |
2299 | { |
2300 | BITMAP_FREE (graph->points_to[n]); |
2301 | graph->points_to[n] = ecl->labels; |
2302 | } |
2303 | graph->pointer_label[n] = ecl->equivalence_class; |
2304 | } |
2305 | } |
2306 | |
2307 | /* Print the pred graph in dot format. */ |
2308 | |
2309 | static void |
2310 | dump_pred_graph (class scc_info *si, FILE *file) |
2311 | { |
2312 | unsigned int i; |
2313 | |
2314 | /* Only print the graph if it has already been initialized: */ |
2315 | if (!graph) |
2316 | return; |
2317 | |
2318 | /* Prints the header of the dot file: */ |
2319 | fprintf (stream: file, format: "strict digraph {\n" ); |
2320 | fprintf (stream: file, format: " node [\n shape = box\n ]\n" ); |
2321 | fprintf (stream: file, format: " edge [\n fontsize = \"12\"\n ]\n" ); |
2322 | fprintf (stream: file, format: "\n // List of nodes and complex constraints in " |
2323 | "the constraint graph:\n" ); |
2324 | |
2325 | /* The next lines print the nodes in the graph together with the |
2326 | complex constraints attached to them. */ |
2327 | for (i = 1; i < graph->size; i++) |
2328 | { |
2329 | if (i == FIRST_REF_NODE) |
2330 | continue; |
2331 | if (si->node_mapping[i] != i) |
2332 | continue; |
2333 | if (i < FIRST_REF_NODE) |
2334 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2335 | else |
2336 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
2337 | if (graph->points_to[i] |
2338 | && !bitmap_empty_p (map: graph->points_to[i])) |
2339 | { |
2340 | if (i < FIRST_REF_NODE) |
2341 | fprintf (stream: file, format: "[label=\"%s = {" , get_varinfo (n: i)->name); |
2342 | else |
2343 | fprintf (stream: file, format: "[label=\"*%s = {" , |
2344 | get_varinfo (n: i - FIRST_REF_NODE)->name); |
2345 | unsigned j; |
2346 | bitmap_iterator bi; |
2347 | EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi) |
2348 | fprintf (stream: file, format: " %d" , j); |
2349 | fprintf (stream: file, format: " }\"]" ); |
2350 | } |
2351 | fprintf (stream: file, format: ";\n" ); |
2352 | } |
2353 | |
2354 | /* Go over the edges. */ |
2355 | fprintf (stream: file, format: "\n // Edges in the constraint graph:\n" ); |
2356 | for (i = 1; i < graph->size; i++) |
2357 | { |
2358 | unsigned j; |
2359 | bitmap_iterator bi; |
2360 | if (si->node_mapping[i] != i) |
2361 | continue; |
2362 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi) |
2363 | { |
2364 | unsigned from = si->node_mapping[j]; |
2365 | if (from < FIRST_REF_NODE) |
2366 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: from)->name); |
2367 | else |
2368 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: from - FIRST_REF_NODE)->name); |
2369 | fprintf (stream: file, format: " -> " ); |
2370 | if (i < FIRST_REF_NODE) |
2371 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2372 | else |
2373 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
2374 | fprintf (stream: file, format: ";\n" ); |
2375 | } |
2376 | } |
2377 | |
2378 | /* Prints the tail of the dot file. */ |
2379 | fprintf (stream: file, format: "}\n" ); |
2380 | } |
2381 | |
2382 | /* Perform offline variable substitution, discovering equivalence |
2383 | classes, and eliminating non-pointer variables. */ |
2384 | |
2385 | static class scc_info * |
2386 | perform_var_substitution (constraint_graph_t graph) |
2387 | { |
2388 | unsigned int i; |
2389 | unsigned int size = graph->size; |
2390 | scc_info *si = new scc_info (size); |
2391 | |
2392 | bitmap_obstack_initialize (&iteration_obstack); |
2393 | gcc_obstack_init (&equiv_class_obstack); |
2394 | pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511); |
2395 | location_equiv_class_table |
2396 | = new hash_table<equiv_class_hasher> (511); |
2397 | pointer_equiv_class = 1; |
2398 | location_equiv_class = 1; |
2399 | |
2400 | /* Condense the nodes, which means to find SCC's, count incoming |
2401 | predecessors, and unite nodes in SCC's. */ |
2402 | for (i = 1; i < FIRST_REF_NODE; i++) |
2403 | if (!bitmap_bit_p (map: si->visited, bitno: si->node_mapping[i])) |
2404 | condense_visit (graph, si, n: si->node_mapping[i]); |
2405 | |
2406 | if (dump_file && (dump_flags & TDF_GRAPH)) |
2407 | { |
2408 | fprintf (stream: dump_file, format: "\n\n// The constraint graph before var-substitution " |
2409 | "in dot format:\n" ); |
2410 | dump_pred_graph (si, file: dump_file); |
2411 | fprintf (stream: dump_file, format: "\n\n" ); |
2412 | } |
2413 | |
2414 | bitmap_clear (si->visited); |
2415 | /* Actually the label the nodes for pointer equivalences */ |
2416 | for (i = 1; i < FIRST_REF_NODE; i++) |
2417 | if (!bitmap_bit_p (map: si->visited, bitno: si->node_mapping[i])) |
2418 | label_visit (graph, si, n: si->node_mapping[i]); |
2419 | |
2420 | /* Calculate location equivalence labels. */ |
2421 | for (i = 1; i < FIRST_REF_NODE; i++) |
2422 | { |
2423 | bitmap pointed_by; |
2424 | bitmap_iterator bi; |
2425 | unsigned int j; |
2426 | |
2427 | if (!graph->pointed_by[i]) |
2428 | continue; |
2429 | pointed_by = BITMAP_ALLOC (obstack: &iteration_obstack); |
2430 | |
2431 | /* Translate the pointed-by mapping for pointer equivalence |
2432 | labels. */ |
2433 | EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi) |
2434 | { |
2435 | bitmap_set_bit (pointed_by, |
2436 | graph->pointer_label[si->node_mapping[j]]); |
2437 | } |
2438 | /* The original pointed_by is now dead. */ |
2439 | BITMAP_FREE (graph->pointed_by[i]); |
2440 | |
2441 | /* Look up the location equivalence label if one exists, or make |
2442 | one otherwise. */ |
2443 | equiv_class_label_t ecl; |
2444 | ecl = equiv_class_lookup_or_add (table: location_equiv_class_table, labels: pointed_by); |
2445 | if (ecl->equivalence_class == 0) |
2446 | ecl->equivalence_class = location_equiv_class++; |
2447 | else |
2448 | { |
2449 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2450 | fprintf (stream: dump_file, format: "Found location equivalence for node %s\n" , |
2451 | get_varinfo (n: i)->name); |
2452 | BITMAP_FREE (pointed_by); |
2453 | } |
2454 | graph->loc_label[i] = ecl->equivalence_class; |
2455 | |
2456 | } |
2457 | |
2458 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2459 | for (i = 1; i < FIRST_REF_NODE; i++) |
2460 | { |
2461 | unsigned j = si->node_mapping[i]; |
2462 | if (j != i) |
2463 | { |
2464 | fprintf (stream: dump_file, format: "%s node id %d " , |
2465 | bitmap_bit_p (map: graph->direct_nodes, bitno: i) |
2466 | ? "Direct" : "Indirect" , i); |
2467 | if (i < FIRST_REF_NODE) |
2468 | fprintf (stream: dump_file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2469 | else |
2470 | fprintf (stream: dump_file, format: "\"*%s\"" , |
2471 | get_varinfo (n: i - FIRST_REF_NODE)->name); |
2472 | fprintf (stream: dump_file, format: " mapped to SCC leader node id %d " , j); |
2473 | if (j < FIRST_REF_NODE) |
2474 | fprintf (stream: dump_file, format: "\"%s\"\n" , get_varinfo (n: j)->name); |
2475 | else |
2476 | fprintf (stream: dump_file, format: "\"*%s\"\n" , |
2477 | get_varinfo (n: j - FIRST_REF_NODE)->name); |
2478 | } |
2479 | else |
2480 | { |
2481 | fprintf (stream: dump_file, |
2482 | format: "Equivalence classes for %s node id %d " , |
2483 | bitmap_bit_p (map: graph->direct_nodes, bitno: i) |
2484 | ? "direct" : "indirect" , i); |
2485 | if (i < FIRST_REF_NODE) |
2486 | fprintf (stream: dump_file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2487 | else |
2488 | fprintf (stream: dump_file, format: "\"*%s\"" , |
2489 | get_varinfo (n: i - FIRST_REF_NODE)->name); |
2490 | fprintf (stream: dump_file, |
2491 | format: ": pointer %d, location %d\n" , |
2492 | graph->pointer_label[i], graph->loc_label[i]); |
2493 | } |
2494 | } |
2495 | |
2496 | /* Quickly eliminate our non-pointer variables. */ |
2497 | |
2498 | for (i = 1; i < FIRST_REF_NODE; i++) |
2499 | { |
2500 | unsigned int node = si->node_mapping[i]; |
2501 | |
2502 | if (graph->pointer_label[node] == 0) |
2503 | { |
2504 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2505 | fprintf (stream: dump_file, |
2506 | format: "%s is a non-pointer variable, eliminating edges.\n" , |
2507 | get_varinfo (n: node)->name); |
2508 | stats.nonpointer_vars++; |
2509 | clear_edges_for_node (graph, node); |
2510 | } |
2511 | } |
2512 | |
2513 | return si; |
2514 | } |
2515 | |
2516 | /* Free information that was only necessary for variable |
2517 | substitution. */ |
2518 | |
2519 | static void |
2520 | free_var_substitution_info (class scc_info *si) |
2521 | { |
2522 | delete si; |
2523 | free (ptr: graph->pointer_label); |
2524 | free (ptr: graph->loc_label); |
2525 | free (ptr: graph->pointed_by); |
2526 | free (ptr: graph->points_to); |
2527 | free (ptr: graph->eq_rep); |
2528 | sbitmap_free (map: graph->direct_nodes); |
2529 | delete pointer_equiv_class_table; |
2530 | pointer_equiv_class_table = NULL; |
2531 | delete location_equiv_class_table; |
2532 | location_equiv_class_table = NULL; |
2533 | obstack_free (&equiv_class_obstack, NULL); |
2534 | bitmap_obstack_release (&iteration_obstack); |
2535 | } |
2536 | |
2537 | /* Return an existing node that is equivalent to NODE, which has |
2538 | equivalence class LABEL, if one exists. Return NODE otherwise. */ |
2539 | |
2540 | static unsigned int |
2541 | find_equivalent_node (constraint_graph_t graph, |
2542 | unsigned int node, unsigned int label) |
2543 | { |
2544 | /* If the address version of this variable is unused, we can |
2545 | substitute it for anything else with the same label. |
2546 | Otherwise, we know the pointers are equivalent, but not the |
2547 | locations, and we can unite them later. */ |
2548 | |
2549 | if (!bitmap_bit_p (graph->address_taken, node)) |
2550 | { |
2551 | gcc_checking_assert (label < graph->size); |
2552 | |
2553 | if (graph->eq_rep[label] != -1) |
2554 | { |
2555 | /* Unify the two variables since we know they are equivalent. */ |
2556 | if (unite (to: graph->eq_rep[label], from: node)) |
2557 | unify_nodes (graph, to: graph->eq_rep[label], from: node, update_changed: false); |
2558 | return graph->eq_rep[label]; |
2559 | } |
2560 | else |
2561 | { |
2562 | graph->eq_rep[label] = node; |
2563 | graph->pe_rep[label] = node; |
2564 | } |
2565 | } |
2566 | else |
2567 | { |
2568 | gcc_checking_assert (label < graph->size); |
2569 | graph->pe[node] = label; |
2570 | if (graph->pe_rep[label] == -1) |
2571 | graph->pe_rep[label] = node; |
2572 | } |
2573 | |
2574 | return node; |
2575 | } |
2576 | |
2577 | /* Unite pointer equivalent but not location equivalent nodes in |
2578 | GRAPH. This may only be performed once variable substitution is |
2579 | finished. */ |
2580 | |
2581 | static void |
2582 | unite_pointer_equivalences (constraint_graph_t graph) |
2583 | { |
2584 | unsigned int i; |
2585 | |
2586 | /* Go through the pointer equivalences and unite them to their |
2587 | representative, if they aren't already. */ |
2588 | for (i = 1; i < FIRST_REF_NODE; i++) |
2589 | { |
2590 | unsigned int label = graph->pe[i]; |
2591 | if (label) |
2592 | { |
2593 | int label_rep = graph->pe_rep[label]; |
2594 | |
2595 | if (label_rep == -1) |
2596 | continue; |
2597 | |
2598 | label_rep = find (node: label_rep); |
2599 | if (label_rep >= 0 && unite (to: label_rep, from: find (node: i))) |
2600 | unify_nodes (graph, to: label_rep, from: i, update_changed: false); |
2601 | } |
2602 | } |
2603 | } |
2604 | |
2605 | /* Move complex constraints to the GRAPH nodes they belong to. */ |
2606 | |
2607 | static void |
2608 | move_complex_constraints (constraint_graph_t graph) |
2609 | { |
2610 | int i; |
2611 | constraint_t c; |
2612 | |
2613 | FOR_EACH_VEC_ELT (constraints, i, c) |
2614 | { |
2615 | if (c) |
2616 | { |
2617 | struct constraint_expr lhs = c->lhs; |
2618 | struct constraint_expr rhs = c->rhs; |
2619 | |
2620 | if (lhs.type == DEREF) |
2621 | { |
2622 | insert_into_complex (graph, var: lhs.var, c); |
2623 | } |
2624 | else if (rhs.type == DEREF) |
2625 | { |
2626 | if (!(get_varinfo (n: lhs.var)->is_special_var)) |
2627 | insert_into_complex (graph, var: rhs.var, c); |
2628 | } |
2629 | else if (rhs.type != ADDRESSOF && lhs.var > anything_id |
2630 | && (lhs.offset != 0 || rhs.offset != 0)) |
2631 | { |
2632 | insert_into_complex (graph, var: rhs.var, c); |
2633 | } |
2634 | } |
2635 | } |
2636 | } |
2637 | |
2638 | |
2639 | /* Optimize and rewrite complex constraints while performing |
2640 | collapsing of equivalent nodes. SI is the SCC_INFO that is the |
2641 | result of perform_variable_substitution. */ |
2642 | |
2643 | static void |
2644 | rewrite_constraints (constraint_graph_t graph, |
2645 | class scc_info *si) |
2646 | { |
2647 | int i; |
2648 | constraint_t c; |
2649 | |
2650 | if (flag_checking) |
2651 | { |
2652 | for (unsigned int j = 0; j < graph->size; j++) |
2653 | gcc_assert (find (j) == j); |
2654 | } |
2655 | |
2656 | FOR_EACH_VEC_ELT (constraints, i, c) |
2657 | { |
2658 | struct constraint_expr lhs = c->lhs; |
2659 | struct constraint_expr rhs = c->rhs; |
2660 | unsigned int lhsvar = find (node: lhs.var); |
2661 | unsigned int rhsvar = find (node: rhs.var); |
2662 | unsigned int lhsnode, rhsnode; |
2663 | unsigned int lhslabel, rhslabel; |
2664 | |
2665 | lhsnode = si->node_mapping[lhsvar]; |
2666 | rhsnode = si->node_mapping[rhsvar]; |
2667 | lhslabel = graph->pointer_label[lhsnode]; |
2668 | rhslabel = graph->pointer_label[rhsnode]; |
2669 | |
2670 | /* See if it is really a non-pointer variable, and if so, ignore |
2671 | the constraint. */ |
2672 | if (lhslabel == 0) |
2673 | { |
2674 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2675 | { |
2676 | |
2677 | fprintf (stream: dump_file, format: "%s is a non-pointer variable, " |
2678 | "ignoring constraint:" , |
2679 | get_varinfo (n: lhs.var)->name); |
2680 | dump_constraint (file: dump_file, c); |
2681 | fprintf (stream: dump_file, format: "\n" ); |
2682 | } |
2683 | constraints[i] = NULL; |
2684 | continue; |
2685 | } |
2686 | |
2687 | if (rhslabel == 0) |
2688 | { |
2689 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2690 | { |
2691 | |
2692 | fprintf (stream: dump_file, format: "%s is a non-pointer variable, " |
2693 | "ignoring constraint:" , |
2694 | get_varinfo (n: rhs.var)->name); |
2695 | dump_constraint (file: dump_file, c); |
2696 | fprintf (stream: dump_file, format: "\n" ); |
2697 | } |
2698 | constraints[i] = NULL; |
2699 | continue; |
2700 | } |
2701 | |
2702 | lhsvar = find_equivalent_node (graph, node: lhsvar, label: lhslabel); |
2703 | rhsvar = find_equivalent_node (graph, node: rhsvar, label: rhslabel); |
2704 | c->lhs.var = lhsvar; |
2705 | c->rhs.var = rhsvar; |
2706 | } |
2707 | } |
2708 | |
2709 | /* Eliminate indirect cycles involving NODE. Return true if NODE was |
2710 | part of an SCC, false otherwise. */ |
2711 | |
2712 | static bool |
2713 | eliminate_indirect_cycles (unsigned int node) |
2714 | { |
2715 | if (graph->indirect_cycles[node] != -1 |
2716 | && !bitmap_empty_p (map: get_varinfo (n: node)->solution)) |
2717 | { |
2718 | unsigned int i; |
2719 | auto_vec<unsigned> queue; |
2720 | int queuepos; |
2721 | unsigned int to = find (node: graph->indirect_cycles[node]); |
2722 | bitmap_iterator bi; |
2723 | |
2724 | /* We can't touch the solution set and call unify_nodes |
2725 | at the same time, because unify_nodes is going to do |
2726 | bitmap unions into it. */ |
2727 | |
2728 | EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi) |
2729 | { |
2730 | if (find (node: i) == i && i != to) |
2731 | { |
2732 | if (unite (to, from: i)) |
2733 | queue.safe_push (obj: i); |
2734 | } |
2735 | } |
2736 | |
2737 | for (queuepos = 0; |
2738 | queue.iterate (ix: queuepos, ptr: &i); |
2739 | queuepos++) |
2740 | { |
2741 | unify_nodes (graph, to, from: i, update_changed: true); |
2742 | } |
2743 | return true; |
2744 | } |
2745 | return false; |
2746 | } |
2747 | |
2748 | /* Solve the constraint graph GRAPH using our worklist solver. |
2749 | This is based on the PW* family of solvers from the "Efficient Field |
2750 | Sensitive Pointer Analysis for C" paper. |
2751 | It works by iterating over all the graph nodes, processing the complex |
2752 | constraints and propagating the copy constraints, until everything stops |
2753 | changed. This corresponds to steps 6-8 in the solving list given above. */ |
2754 | |
2755 | static void |
2756 | solve_graph (constraint_graph_t graph) |
2757 | { |
2758 | unsigned int size = graph->size; |
2759 | unsigned int i; |
2760 | bitmap pts; |
2761 | |
2762 | changed = BITMAP_ALLOC (NULL); |
2763 | |
2764 | /* Mark all initial non-collapsed nodes as changed. */ |
2765 | for (i = 1; i < size; i++) |
2766 | { |
2767 | varinfo_t ivi = get_varinfo (n: i); |
2768 | if (find (node: i) == i && !bitmap_empty_p (map: ivi->solution) |
2769 | && ((graph->succs[i] && !bitmap_empty_p (map: graph->succs[i])) |
2770 | || graph->complex[i].length () > 0)) |
2771 | bitmap_set_bit (changed, i); |
2772 | } |
2773 | |
2774 | /* Allocate a bitmap to be used to store the changed bits. */ |
2775 | pts = BITMAP_ALLOC (obstack: &pta_obstack); |
2776 | |
2777 | while (!bitmap_empty_p (map: changed)) |
2778 | { |
2779 | unsigned int i; |
2780 | stats.iterations++; |
2781 | |
2782 | bitmap_obstack_initialize (&iteration_obstack); |
2783 | |
2784 | auto_vec<unsigned> topo_order = compute_topo_order (graph); |
2785 | while (topo_order.length () != 0) |
2786 | { |
2787 | i = topo_order.pop (); |
2788 | |
2789 | /* If this variable is not a representative, skip it. */ |
2790 | if (find (node: i) != i) |
2791 | continue; |
2792 | |
2793 | /* In certain indirect cycle cases, we may merge this |
2794 | variable to another. */ |
2795 | if (eliminate_indirect_cycles (node: i) && find (node: i) != i) |
2796 | continue; |
2797 | |
2798 | /* If the node has changed, we need to process the |
2799 | complex constraints and outgoing edges again. For complex |
2800 | constraints that modify i itself, like the common group of |
2801 | callarg = callarg + UNKNOWN; |
2802 | callarg = *callarg + UNKNOWN; |
2803 | *callarg = callescape; |
2804 | make sure to iterate immediately because that maximizes |
2805 | cache reuse and expands the graph quickest, leading to |
2806 | better visitation order in the next iteration. */ |
2807 | while (bitmap_clear_bit (changed, i)) |
2808 | { |
2809 | bitmap solution; |
2810 | vec<constraint_t> &complex = graph->complex[i]; |
2811 | varinfo_t vi = get_varinfo (n: i); |
2812 | bool solution_empty; |
2813 | |
2814 | /* Compute the changed set of solution bits. If anything |
2815 | is in the solution just propagate that. */ |
2816 | if (bitmap_bit_p (vi->solution, anything_id)) |
2817 | { |
2818 | /* If anything is also in the old solution there is |
2819 | nothing to do. |
2820 | ??? But we shouldn't ended up with "changed" set ... */ |
2821 | if (vi->oldsolution |
2822 | && bitmap_bit_p (vi->oldsolution, anything_id)) |
2823 | break; |
2824 | bitmap_copy (pts, get_varinfo (n: find (node: anything_id))->solution); |
2825 | } |
2826 | else if (vi->oldsolution) |
2827 | bitmap_and_compl (pts, vi->solution, vi->oldsolution); |
2828 | else |
2829 | bitmap_copy (pts, vi->solution); |
2830 | |
2831 | if (bitmap_empty_p (map: pts)) |
2832 | break; |
2833 | |
2834 | if (vi->oldsolution) |
2835 | bitmap_ior_into (vi->oldsolution, pts); |
2836 | else |
2837 | { |
2838 | vi->oldsolution = BITMAP_ALLOC (obstack: &oldpta_obstack); |
2839 | bitmap_copy (vi->oldsolution, pts); |
2840 | } |
2841 | |
2842 | solution = vi->solution; |
2843 | solution_empty = bitmap_empty_p (map: solution); |
2844 | |
2845 | /* Process the complex constraints */ |
2846 | hash_set<constraint_t> *cvisited = nullptr; |
2847 | if (flag_checking) |
2848 | cvisited = new hash_set<constraint_t>; |
2849 | bitmap expanded_pts = NULL; |
2850 | for (unsigned j = 0; j < complex.length (); ++j) |
2851 | { |
2852 | constraint_t c = complex[j]; |
2853 | /* At unification time only the directly involved nodes |
2854 | will get their complex constraints updated. Update |
2855 | our complex constraints now but keep the constraint |
2856 | vector sorted and clear of duplicates. Also make |
2857 | sure to evaluate each prevailing constraint only once. */ |
2858 | unsigned int new_lhs = find (node: c->lhs.var); |
2859 | unsigned int new_rhs = find (node: c->rhs.var); |
2860 | if (c->lhs.var != new_lhs || c->rhs.var != new_rhs) |
2861 | { |
2862 | constraint tem = *c; |
2863 | tem.lhs.var = new_lhs; |
2864 | tem.rhs.var = new_rhs; |
2865 | unsigned int place |
2866 | = complex.lower_bound (obj: &tem, lessthan: constraint_less); |
2867 | c->lhs.var = new_lhs; |
2868 | c->rhs.var = new_rhs; |
2869 | if (place != j) |
2870 | { |
2871 | complex.ordered_remove (ix: j); |
2872 | if (j < place) |
2873 | --place; |
2874 | if (place < complex.length ()) |
2875 | { |
2876 | if (constraint_equal (a: *complex[place], b: *c)) |
2877 | { |
2878 | j--; |
2879 | continue; |
2880 | } |
2881 | else |
2882 | complex.safe_insert (ix: place, obj: c); |
2883 | } |
2884 | else |
2885 | complex.quick_push (obj: c); |
2886 | if (place > j) |
2887 | { |
2888 | j--; |
2889 | continue; |
2890 | } |
2891 | } |
2892 | } |
2893 | |
2894 | /* The only complex constraint that can change our |
2895 | solution to non-empty, given an empty solution, |
2896 | is a constraint where the lhs side is receiving |
2897 | some set from elsewhere. */ |
2898 | if (cvisited && cvisited->add (k: c)) |
2899 | gcc_unreachable (); |
2900 | if (!solution_empty || c->lhs.type != DEREF) |
2901 | do_complex_constraint (graph, c, delta: pts, expanded_delta: &expanded_pts); |
2902 | } |
2903 | if (cvisited) |
2904 | { |
2905 | /* When checking, verify the order of constraints is |
2906 | maintained and each constraint is evaluated exactly |
2907 | once. */ |
2908 | for (unsigned j = 1; j < complex.length (); ++j) |
2909 | gcc_assert (constraint_less (complex[j-1], complex[j])); |
2910 | gcc_assert (cvisited->elements () == complex.length ()); |
2911 | delete cvisited; |
2912 | } |
2913 | BITMAP_FREE (expanded_pts); |
2914 | |
2915 | solution_empty = bitmap_empty_p (map: solution); |
2916 | |
2917 | if (!solution_empty) |
2918 | { |
2919 | bitmap_iterator bi; |
2920 | unsigned eff_escaped_id = find (node: escaped_id); |
2921 | unsigned j; |
2922 | |
2923 | /* Propagate solution to all successors. */ |
2924 | unsigned to_remove = ~0U; |
2925 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], |
2926 | 0, j, bi) |
2927 | { |
2928 | if (to_remove != ~0U) |
2929 | { |
2930 | bitmap_clear_bit (graph->succs[i], to_remove); |
2931 | to_remove = ~0U; |
2932 | } |
2933 | unsigned int to = find (node: j); |
2934 | if (to != j) |
2935 | { |
2936 | /* Update the succ graph, avoiding duplicate |
2937 | work. */ |
2938 | to_remove = j; |
2939 | if (! bitmap_set_bit (graph->succs[i], to)) |
2940 | continue; |
2941 | /* We eventually end up processing 'to' twice |
2942 | as it is undefined whether bitmap iteration |
2943 | iterates over bits set during iteration. |
2944 | Play safe instead of doing tricks. */ |
2945 | } |
2946 | /* Don't try to propagate to ourselves. */ |
2947 | if (to == i) |
2948 | { |
2949 | to_remove = j; |
2950 | continue; |
2951 | } |
2952 | /* Early node unification can lead to edges from |
2953 | escaped - remove them. */ |
2954 | if (i == eff_escaped_id) |
2955 | { |
2956 | to_remove = j; |
2957 | if (bitmap_set_bit (get_varinfo (n: to)->solution, |
2958 | escaped_id)) |
2959 | bitmap_set_bit (changed, to); |
2960 | continue; |
2961 | } |
2962 | |
2963 | if (bitmap_ior_into (get_varinfo (n: to)->solution, pts)) |
2964 | bitmap_set_bit (changed, to); |
2965 | } |
2966 | if (to_remove != ~0U) |
2967 | bitmap_clear_bit (graph->succs[i], to_remove); |
2968 | } |
2969 | } |
2970 | } |
2971 | bitmap_obstack_release (&iteration_obstack); |
2972 | } |
2973 | |
2974 | BITMAP_FREE (pts); |
2975 | BITMAP_FREE (changed); |
2976 | bitmap_obstack_release (&oldpta_obstack); |
2977 | } |
2978 | |
2979 | /* Map from trees to variable infos. */ |
2980 | static hash_map<tree, varinfo_t> *vi_for_tree; |
2981 | |
2982 | |
2983 | /* Insert ID as the variable id for tree T in the vi_for_tree map. */ |
2984 | |
2985 | static void |
2986 | insert_vi_for_tree (tree t, varinfo_t vi) |
2987 | { |
2988 | gcc_assert (vi); |
2989 | bool existed = vi_for_tree->put (k: t, v: vi); |
2990 | gcc_assert (!existed); |
2991 | } |
2992 | |
2993 | /* Find the variable info for tree T in VI_FOR_TREE. If T does not |
2994 | exist in the map, return NULL, otherwise, return the varinfo we found. */ |
2995 | |
2996 | static varinfo_t |
2997 | lookup_vi_for_tree (tree t) |
2998 | { |
2999 | varinfo_t *slot = vi_for_tree->get (k: t); |
3000 | if (slot == NULL) |
3001 | return NULL; |
3002 | |
3003 | return *slot; |
3004 | } |
3005 | |
3006 | /* Return a printable name for DECL */ |
3007 | |
3008 | static const char * |
3009 | alias_get_name (tree decl) |
3010 | { |
3011 | const char *res = "NULL" ; |
3012 | if (dump_file) |
3013 | { |
3014 | char *temp = NULL; |
3015 | if (TREE_CODE (decl) == SSA_NAME) |
3016 | { |
3017 | res = get_name (decl); |
3018 | temp = xasprintf ("%s_%u" , res ? res : "" , SSA_NAME_VERSION (decl)); |
3019 | } |
3020 | else if (HAS_DECL_ASSEMBLER_NAME_P (decl) |
3021 | && DECL_ASSEMBLER_NAME_SET_P (decl)) |
3022 | res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl)); |
3023 | else if (DECL_P (decl)) |
3024 | { |
3025 | res = get_name (decl); |
3026 | if (!res) |
3027 | temp = xasprintf ("D.%u" , DECL_UID (decl)); |
3028 | } |
3029 | |
3030 | if (temp) |
3031 | { |
3032 | res = ggc_strdup (temp); |
3033 | free (ptr: temp); |
3034 | } |
3035 | } |
3036 | |
3037 | return res; |
3038 | } |
3039 | |
3040 | /* Find the variable id for tree T in the map. |
3041 | If T doesn't exist in the map, create an entry for it and return it. */ |
3042 | |
3043 | static varinfo_t |
3044 | get_vi_for_tree (tree t) |
3045 | { |
3046 | varinfo_t *slot = vi_for_tree->get (k: t); |
3047 | if (slot == NULL) |
3048 | { |
3049 | unsigned int id = create_variable_info_for (t, alias_get_name (decl: t), false); |
3050 | return get_varinfo (n: id); |
3051 | } |
3052 | |
3053 | return *slot; |
3054 | } |
3055 | |
3056 | /* Get a scalar constraint expression for a new temporary variable. */ |
3057 | |
3058 | static struct constraint_expr |
3059 | new_scalar_tmp_constraint_exp (const char *name, bool add_id) |
3060 | { |
3061 | struct constraint_expr tmp; |
3062 | varinfo_t vi; |
3063 | |
3064 | vi = new_var_info (NULL_TREE, name, add_id); |
3065 | vi->offset = 0; |
3066 | vi->size = -1; |
3067 | vi->fullsize = -1; |
3068 | vi->is_full_var = 1; |
3069 | vi->is_reg_var = 1; |
3070 | |
3071 | tmp.var = vi->id; |
3072 | tmp.type = SCALAR; |
3073 | tmp.offset = 0; |
3074 | |
3075 | return tmp; |
3076 | } |
3077 | |
3078 | /* Get a constraint expression vector from an SSA_VAR_P node. |
3079 | If address_p is true, the result will be taken its address of. */ |
3080 | |
3081 | static void |
3082 | get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p) |
3083 | { |
3084 | struct constraint_expr cexpr; |
3085 | varinfo_t vi; |
3086 | |
3087 | /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */ |
3088 | gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t)); |
3089 | |
3090 | if (TREE_CODE (t) == SSA_NAME |
3091 | && SSA_NAME_IS_DEFAULT_DEF (t)) |
3092 | { |
3093 | /* For parameters, get at the points-to set for the actual parm |
3094 | decl. */ |
3095 | if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL |
3096 | || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL) |
3097 | { |
3098 | get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p); |
3099 | return; |
3100 | } |
3101 | /* For undefined SSA names return nothing. */ |
3102 | else if (!ssa_defined_default_def_p (t)) |
3103 | { |
3104 | cexpr.var = nothing_id; |
3105 | cexpr.type = SCALAR; |
3106 | cexpr.offset = 0; |
3107 | results->safe_push (obj: cexpr); |
3108 | return; |
3109 | } |
3110 | } |
3111 | |
3112 | /* For global variables resort to the alias target. */ |
3113 | if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t))) |
3114 | { |
3115 | varpool_node *node = varpool_node::get (decl: t); |
3116 | if (node && node->alias && node->analyzed) |
3117 | { |
3118 | node = node->ultimate_alias_target (); |
3119 | /* Canonicalize the PT uid of all aliases to the ultimate target. |
3120 | ??? Hopefully the set of aliases can't change in a way that |
3121 | changes the ultimate alias target. */ |
3122 | gcc_assert ((! DECL_PT_UID_SET_P (node->decl) |
3123 | || DECL_PT_UID (node->decl) == DECL_UID (node->decl)) |
3124 | && (! DECL_PT_UID_SET_P (t) |
3125 | || DECL_PT_UID (t) == DECL_UID (node->decl))); |
3126 | DECL_PT_UID (t) = DECL_UID (node->decl); |
3127 | t = node->decl; |
3128 | } |
3129 | |
3130 | /* If this is decl may bind to NULL note that. */ |
3131 | if (address_p |
3132 | && (! node || ! node->nonzero_address ())) |
3133 | { |
3134 | cexpr.var = nothing_id; |
3135 | cexpr.type = SCALAR; |
3136 | cexpr.offset = 0; |
3137 | results->safe_push (obj: cexpr); |
3138 | } |
3139 | } |
3140 | |
3141 | vi = get_vi_for_tree (t); |
3142 | cexpr.var = vi->id; |
3143 | cexpr.type = SCALAR; |
3144 | cexpr.offset = 0; |
3145 | |
3146 | /* If we are not taking the address of the constraint expr, add all |
3147 | sub-fiels of the variable as well. */ |
3148 | if (!address_p |
3149 | && !vi->is_full_var) |
3150 | { |
3151 | for (; vi; vi = vi_next (vi)) |
3152 | { |
3153 | cexpr.var = vi->id; |
3154 | results->safe_push (obj: cexpr); |
3155 | } |
3156 | return; |
3157 | } |
3158 | |
3159 | results->safe_push (obj: cexpr); |
3160 | } |
3161 | |
3162 | /* Process constraint T, performing various simplifications and then |
3163 | adding it to our list of overall constraints. */ |
3164 | |
3165 | static void |
3166 | process_constraint (constraint_t t) |
3167 | { |
3168 | struct constraint_expr rhs = t->rhs; |
3169 | struct constraint_expr lhs = t->lhs; |
3170 | |
3171 | gcc_assert (rhs.var < varmap.length ()); |
3172 | gcc_assert (lhs.var < varmap.length ()); |
3173 | |
3174 | /* If we didn't get any useful constraint from the lhs we get |
3175 | &ANYTHING as fallback from get_constraint_for. Deal with |
3176 | it here by turning it into *ANYTHING. */ |
3177 | if (lhs.type == ADDRESSOF |
3178 | && lhs.var == anything_id) |
3179 | t->lhs.type = lhs.type = DEREF; |
3180 | |
3181 | /* ADDRESSOF on the lhs is invalid. */ |
3182 | gcc_assert (lhs.type != ADDRESSOF); |
3183 | |
3184 | /* We shouldn't add constraints from things that cannot have pointers. |
3185 | It's not completely trivial to avoid in the callers, so do it here. */ |
3186 | if (rhs.type != ADDRESSOF |
3187 | && !get_varinfo (n: rhs.var)->may_have_pointers) |
3188 | return; |
3189 | |
3190 | /* Likewise adding to the solution of a non-pointer var isn't useful. */ |
3191 | if (!get_varinfo (n: lhs.var)->may_have_pointers) |
3192 | return; |
3193 | |
3194 | /* This can happen in our IR with things like n->a = *p */ |
3195 | if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id) |
3196 | { |
3197 | /* Split into tmp = *rhs, *lhs = tmp */ |
3198 | struct constraint_expr tmplhs; |
3199 | tmplhs = new_scalar_tmp_constraint_exp (name: "doubledereftmp" , add_id: true); |
3200 | process_constraint (t: new_constraint (lhs: tmplhs, rhs)); |
3201 | process_constraint (t: new_constraint (lhs, rhs: tmplhs)); |
3202 | } |
3203 | else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF) |
3204 | { |
3205 | /* Split into tmp = &rhs, *lhs = tmp */ |
3206 | struct constraint_expr tmplhs; |
3207 | tmplhs = new_scalar_tmp_constraint_exp (name: "derefaddrtmp" , add_id: true); |
3208 | process_constraint (t: new_constraint (lhs: tmplhs, rhs)); |
3209 | process_constraint (t: new_constraint (lhs, rhs: tmplhs)); |
3210 | } |
3211 | else |
3212 | { |
3213 | gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0); |
3214 | if (rhs.type == ADDRESSOF) |
3215 | get_varinfo (n: get_varinfo (n: rhs.var)->head)->address_taken = true; |
3216 | constraints.safe_push (obj: t); |
3217 | } |
3218 | } |
3219 | |
3220 | |
3221 | /* Return the position, in bits, of FIELD_DECL from the beginning of its |
3222 | structure. */ |
3223 | |
3224 | static unsigned HOST_WIDE_INT |
3225 | bitpos_of_field (const tree fdecl) |
3226 | { |
3227 | if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fdecl)) |
3228 | || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fdecl))) |
3229 | return -1; |
3230 | |
3231 | return (tree_to_uhwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT |
3232 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fdecl))); |
3233 | } |
3234 | |
3235 | |
3236 | /* Get constraint expressions for offsetting PTR by OFFSET. Stores the |
3237 | resulting constraint expressions in *RESULTS. */ |
3238 | |
3239 | static void |
3240 | get_constraint_for_ptr_offset (tree ptr, tree offset, |
3241 | vec<ce_s> *results) |
3242 | { |
3243 | struct constraint_expr c; |
3244 | unsigned int j, n; |
3245 | HOST_WIDE_INT rhsoffset; |
3246 | |
3247 | /* If we do not do field-sensitive PTA adding offsets to pointers |
3248 | does not change the points-to solution. */ |
3249 | if (!use_field_sensitive) |
3250 | { |
3251 | get_constraint_for_rhs (ptr, results); |
3252 | return; |
3253 | } |
3254 | |
3255 | /* If the offset is not a non-negative integer constant that fits |
3256 | in a HOST_WIDE_INT, we have to fall back to a conservative |
3257 | solution which includes all sub-fields of all pointed-to |
3258 | variables of ptr. */ |
3259 | if (offset == NULL_TREE |
3260 | || TREE_CODE (offset) != INTEGER_CST) |
3261 | rhsoffset = UNKNOWN_OFFSET; |
3262 | else |
3263 | { |
3264 | /* Sign-extend the offset. */ |
3265 | offset_int soffset = offset_int::from (x: wi::to_wide (t: offset), sgn: SIGNED); |
3266 | if (!wi::fits_shwi_p (x: soffset)) |
3267 | rhsoffset = UNKNOWN_OFFSET; |
3268 | else |
3269 | { |
3270 | /* Make sure the bit-offset also fits. */ |
3271 | HOST_WIDE_INT rhsunitoffset = soffset.to_shwi (); |
3272 | rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT; |
3273 | if (rhsunitoffset != rhsoffset / BITS_PER_UNIT) |
3274 | rhsoffset = UNKNOWN_OFFSET; |
3275 | } |
3276 | } |
3277 | |
3278 | get_constraint_for_rhs (ptr, results); |
3279 | if (rhsoffset == 0) |
3280 | return; |
3281 | |
3282 | /* As we are eventually appending to the solution do not use |
3283 | vec::iterate here. */ |
3284 | n = results->length (); |
3285 | for (j = 0; j < n; j++) |
3286 | { |
3287 | varinfo_t curr; |
3288 | c = (*results)[j]; |
3289 | curr = get_varinfo (n: c.var); |
3290 | |
3291 | if (c.type == ADDRESSOF |
3292 | /* If this varinfo represents a full variable just use it. */ |
3293 | && curr->is_full_var) |
3294 | ; |
3295 | else if (c.type == ADDRESSOF |
3296 | /* If we do not know the offset add all subfields. */ |
3297 | && rhsoffset == UNKNOWN_OFFSET) |
3298 | { |
3299 | varinfo_t temp = get_varinfo (n: curr->head); |
3300 | do |
3301 | { |
3302 | struct constraint_expr c2; |
3303 | c2.var = temp->id; |
3304 | c2.type = ADDRESSOF; |
3305 | c2.offset = 0; |
3306 | if (c2.var != c.var) |
3307 | results->safe_push (obj: c2); |
3308 | temp = vi_next (vi: temp); |
3309 | } |
3310 | while (temp); |
3311 | } |
3312 | else if (c.type == ADDRESSOF) |
3313 | { |
3314 | varinfo_t temp; |
3315 | unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset; |
3316 | |
3317 | /* If curr->offset + rhsoffset is less than zero adjust it. */ |
3318 | if (rhsoffset < 0 |
3319 | && curr->offset < offset) |
3320 | offset = 0; |
3321 | |
3322 | /* We have to include all fields that overlap the current |
3323 | field shifted by rhsoffset. And we include at least |
3324 | the last or the first field of the variable to represent |
3325 | reachability of off-bound addresses, in particular &object + 1, |
3326 | conservatively correct. */ |
3327 | temp = first_or_preceding_vi_for_offset (curr, offset); |
3328 | c.var = temp->id; |
3329 | c.offset = 0; |
3330 | temp = vi_next (vi: temp); |
3331 | while (temp |
3332 | && temp->offset < offset + curr->size) |
3333 | { |
3334 | struct constraint_expr c2; |
3335 | c2.var = temp->id; |
3336 | c2.type = ADDRESSOF; |
3337 | c2.offset = 0; |
3338 | results->safe_push (obj: c2); |
3339 | temp = vi_next (vi: temp); |
3340 | } |
3341 | } |
3342 | else if (c.type == SCALAR) |
3343 | { |
3344 | gcc_assert (c.offset == 0); |
3345 | c.offset = rhsoffset; |
3346 | } |
3347 | else |
3348 | /* We shouldn't get any DEREFs here. */ |
3349 | gcc_unreachable (); |
3350 | |
3351 | (*results)[j] = c; |
3352 | } |
3353 | } |
3354 | |
3355 | |
3356 | /* Given a COMPONENT_REF T, return the constraint_expr vector for it. |
3357 | If address_p is true the result will be taken its address of. |
3358 | If lhs_p is true then the constraint expression is assumed to be used |
3359 | as the lhs. */ |
3360 | |
3361 | static void |
3362 | get_constraint_for_component_ref (tree t, vec<ce_s> *results, |
3363 | bool address_p, bool lhs_p) |
3364 | { |
3365 | tree orig_t = t; |
3366 | poly_int64 bitsize = -1; |
3367 | poly_int64 bitmaxsize = -1; |
3368 | poly_int64 bitpos; |
3369 | bool reverse; |
3370 | tree forzero; |
3371 | |
3372 | /* Some people like to do cute things like take the address of |
3373 | &0->a.b */ |
3374 | forzero = t; |
3375 | while (handled_component_p (t: forzero) |
3376 | || INDIRECT_REF_P (forzero) |
3377 | || TREE_CODE (forzero) == MEM_REF) |
3378 | forzero = TREE_OPERAND (forzero, 0); |
3379 | |
3380 | if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero)) |
3381 | { |
3382 | struct constraint_expr temp; |
3383 | |
3384 | temp.offset = 0; |
3385 | temp.var = integer_id; |
3386 | temp.type = SCALAR; |
3387 | results->safe_push (obj: temp); |
3388 | return; |
3389 | } |
3390 | |
3391 | t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse); |
3392 | |
3393 | /* We can end up here for component references on a |
3394 | VIEW_CONVERT_EXPR <>(&foobar) or things like a |
3395 | BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for |
3396 | symbolic constants simply give up. */ |
3397 | if (TREE_CODE (t) == ADDR_EXPR) |
3398 | { |
3399 | constraint_expr result; |
3400 | result.type = SCALAR; |
3401 | result.var = anything_id; |
3402 | result.offset = 0; |
3403 | results->safe_push (obj: result); |
3404 | return; |
3405 | } |
3406 | |
3407 | /* Avoid creating pointer-offset constraints, so handle MEM_REF |
3408 | offsets directly. Pretend to take the address of the base, |
3409 | we'll take care of adding the required subset of sub-fields below. */ |
3410 | if (TREE_CODE (t) == MEM_REF |
3411 | && !integer_zerop (TREE_OPERAND (t, 0))) |
3412 | { |
3413 | poly_offset_int off = mem_ref_offset (t); |
3414 | off <<= LOG2_BITS_PER_UNIT; |
3415 | off += bitpos; |
3416 | poly_int64 off_hwi; |
3417 | if (off.to_shwi (r: &off_hwi)) |
3418 | bitpos = off_hwi; |
3419 | else |
3420 | { |
3421 | bitpos = 0; |
3422 | bitmaxsize = -1; |
3423 | } |
3424 | get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p); |
3425 | do_deref (results); |
3426 | } |
3427 | else |
3428 | get_constraint_for_1 (t, results, true, lhs_p); |
3429 | |
3430 | /* Strip off nothing_id. */ |
3431 | if (results->length () == 2) |
3432 | { |
3433 | gcc_assert ((*results)[0].var == nothing_id); |
3434 | results->unordered_remove (ix: 0); |
3435 | } |
3436 | gcc_assert (results->length () == 1); |
3437 | struct constraint_expr &result = results->last (); |
3438 | |
3439 | if (result.type == SCALAR |
3440 | && get_varinfo (n: result.var)->is_full_var) |
3441 | /* For single-field vars do not bother about the offset. */ |
3442 | result.offset = 0; |
3443 | else if (result.type == SCALAR) |
3444 | { |
3445 | /* In languages like C, you can access one past the end of an |
3446 | array. You aren't allowed to dereference it, so we can |
3447 | ignore this constraint. When we handle pointer subtraction, |
3448 | we may have to do something cute here. */ |
3449 | |
3450 | if (maybe_lt (a: poly_uint64 (bitpos), b: get_varinfo (n: result.var)->fullsize) |
3451 | && maybe_ne (a: bitmaxsize, b: 0)) |
3452 | { |
3453 | /* It's also not true that the constraint will actually start at the |
3454 | right offset, it may start in some padding. We only care about |
3455 | setting the constraint to the first actual field it touches, so |
3456 | walk to find it. */ |
3457 | struct constraint_expr cexpr = result; |
3458 | varinfo_t curr; |
3459 | results->pop (); |
3460 | cexpr.offset = 0; |
3461 | for (curr = get_varinfo (n: cexpr.var); curr; curr = vi_next (vi: curr)) |
3462 | { |
3463 | if (ranges_maybe_overlap_p (pos1: poly_int64 (curr->offset), |
3464 | size1: curr->size, pos2: bitpos, size2: bitmaxsize)) |
3465 | { |
3466 | cexpr.var = curr->id; |
3467 | results->safe_push (obj: cexpr); |
3468 | if (address_p) |
3469 | break; |
3470 | } |
3471 | } |
3472 | /* If we are going to take the address of this field then |
3473 | to be able to compute reachability correctly add at least |
3474 | the last field of the variable. */ |
3475 | if (address_p && results->length () == 0) |
3476 | { |
3477 | curr = get_varinfo (n: cexpr.var); |
3478 | while (curr->next != 0) |
3479 | curr = vi_next (vi: curr); |
3480 | cexpr.var = curr->id; |
3481 | results->safe_push (obj: cexpr); |
3482 | } |
3483 | else if (results->length () == 0) |
3484 | /* Assert that we found *some* field there. The user couldn't be |
3485 | accessing *only* padding. */ |
3486 | /* Still the user could access one past the end of an array |
3487 | embedded in a struct resulting in accessing *only* padding. */ |
3488 | /* Or accessing only padding via type-punning to a type |
3489 | that has a filed just in padding space. */ |
3490 | { |
3491 | cexpr.type = SCALAR; |
3492 | cexpr.var = anything_id; |
3493 | cexpr.offset = 0; |
3494 | results->safe_push (obj: cexpr); |
3495 | } |
3496 | } |
3497 | else if (known_eq (bitmaxsize, 0)) |
3498 | { |
3499 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3500 | fprintf (stream: dump_file, format: "Access to zero-sized part of variable, " |
3501 | "ignoring\n" ); |
3502 | } |
3503 | else |
3504 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3505 | fprintf (stream: dump_file, format: "Access to past the end of variable, ignoring\n" ); |
3506 | } |
3507 | else if (result.type == DEREF) |
3508 | { |
3509 | /* If we do not know exactly where the access goes say so. Note |
3510 | that only for non-structure accesses we know that we access |
3511 | at most one subfiled of any variable. */ |
3512 | HOST_WIDE_INT const_bitpos; |
3513 | if (!bitpos.is_constant (const_value: &const_bitpos) |
3514 | || const_bitpos == -1 |
3515 | || maybe_ne (a: bitsize, b: bitmaxsize) |
3516 | || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)) |
3517 | || result.offset == UNKNOWN_OFFSET) |
3518 | result.offset = UNKNOWN_OFFSET; |
3519 | else |
3520 | result.offset += const_bitpos; |
3521 | } |
3522 | else if (result.type == ADDRESSOF) |
3523 | { |
3524 | /* We can end up here for component references on constants like |
3525 | VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */ |
3526 | result.type = SCALAR; |
3527 | result.var = anything_id; |
3528 | result.offset = 0; |
3529 | } |
3530 | else |
3531 | gcc_unreachable (); |
3532 | } |
3533 | |
3534 | |
3535 | /* Dereference the constraint expression CONS, and return the result. |
3536 | DEREF (ADDRESSOF) = SCALAR |
3537 | DEREF (SCALAR) = DEREF |
3538 | DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp)) |
3539 | This is needed so that we can handle dereferencing DEREF constraints. */ |
3540 | |
3541 | static void |
3542 | do_deref (vec<ce_s> *constraints) |
3543 | { |
3544 | struct constraint_expr *c; |
3545 | unsigned int i = 0; |
3546 | |
3547 | FOR_EACH_VEC_ELT (*constraints, i, c) |
3548 | { |
3549 | if (c->type == SCALAR) |
3550 | c->type = DEREF; |
3551 | else if (c->type == ADDRESSOF) |
3552 | c->type = SCALAR; |
3553 | else if (c->type == DEREF) |
3554 | { |
3555 | struct constraint_expr tmplhs; |
3556 | tmplhs = new_scalar_tmp_constraint_exp (name: "dereftmp" , add_id: true); |
3557 | process_constraint (t: new_constraint (lhs: tmplhs, rhs: *c)); |
3558 | c->var = tmplhs.var; |
3559 | } |
3560 | else |
3561 | gcc_unreachable (); |
3562 | } |
3563 | } |
3564 | |
3565 | /* Given a tree T, return the constraint expression for taking the |
3566 | address of it. */ |
3567 | |
3568 | static void |
3569 | get_constraint_for_address_of (tree t, vec<ce_s> *results) |
3570 | { |
3571 | struct constraint_expr *c; |
3572 | unsigned int i; |
3573 | |
3574 | get_constraint_for_1 (t, results, true, true); |
3575 | |
3576 | FOR_EACH_VEC_ELT (*results, i, c) |
3577 | { |
3578 | if (c->type == DEREF) |
3579 | c->type = SCALAR; |
3580 | else |
3581 | c->type = ADDRESSOF; |
3582 | } |
3583 | } |
3584 | |
3585 | /* Given a tree T, return the constraint expression for it. */ |
3586 | |
3587 | static void |
3588 | get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p, |
3589 | bool lhs_p) |
3590 | { |
3591 | struct constraint_expr temp; |
3592 | |
3593 | /* x = integer is all glommed to a single variable, which doesn't |
3594 | point to anything by itself. That is, of course, unless it is an |
3595 | integer constant being treated as a pointer, in which case, we |
3596 | will return that this is really the addressof anything. This |
3597 | happens below, since it will fall into the default case. The only |
3598 | case we know something about an integer treated like a pointer is |
3599 | when it is the NULL pointer, and then we just say it points to |
3600 | NULL. |
3601 | |
3602 | Do not do that if -fno-delete-null-pointer-checks though, because |
3603 | in that case *NULL does not fail, so it _should_ alias *anything. |
3604 | It is not worth adding a new option or renaming the existing one, |
3605 | since this case is relatively obscure. */ |
3606 | if ((TREE_CODE (t) == INTEGER_CST |
3607 | && integer_zerop (t)) |
3608 | /* The only valid CONSTRUCTORs in gimple with pointer typed |
3609 | elements are zero-initializer. But in IPA mode we also |
3610 | process global initializers, so verify at least. */ |
3611 | || (TREE_CODE (t) == CONSTRUCTOR |
3612 | && CONSTRUCTOR_NELTS (t) == 0)) |
3613 | { |
3614 | if (flag_delete_null_pointer_checks) |
3615 | temp.var = nothing_id; |
3616 | else |
3617 | temp.var = nonlocal_id; |
3618 | temp.type = ADDRESSOF; |
3619 | temp.offset = 0; |
3620 | results->safe_push (obj: temp); |
3621 | return; |
3622 | } |
3623 | |
3624 | /* String constants are read-only, ideally we'd have a CONST_DECL |
3625 | for those. */ |
3626 | if (TREE_CODE (t) == STRING_CST) |
3627 | { |
3628 | temp.var = string_id; |
3629 | temp.type = SCALAR; |
3630 | temp.offset = 0; |
3631 | results->safe_push (obj: temp); |
3632 | return; |
3633 | } |
3634 | |
3635 | switch (TREE_CODE_CLASS (TREE_CODE (t))) |
3636 | { |
3637 | case tcc_expression: |
3638 | { |
3639 | switch (TREE_CODE (t)) |
3640 | { |
3641 | case ADDR_EXPR: |
3642 | get_constraint_for_address_of (TREE_OPERAND (t, 0), results); |
3643 | return; |
3644 | default:; |
3645 | } |
3646 | break; |
3647 | } |
3648 | case tcc_reference: |
3649 | { |
3650 | if (!lhs_p && TREE_THIS_VOLATILE (t)) |
3651 | /* Fall back to anything. */ |
3652 | break; |
3653 | |
3654 | switch (TREE_CODE (t)) |
3655 | { |
3656 | case MEM_REF: |
3657 | { |
3658 | struct constraint_expr cs; |
3659 | varinfo_t vi, curr; |
3660 | get_constraint_for_ptr_offset (TREE_OPERAND (t, 0), |
3661 | TREE_OPERAND (t, 1), results); |
3662 | do_deref (constraints: results); |
3663 | |
3664 | /* If we are not taking the address then make sure to process |
3665 | all subvariables we might access. */ |
3666 | if (address_p) |
3667 | return; |
3668 | |
3669 | cs = results->last (); |
3670 | if (cs.type == DEREF |
3671 | && type_can_have_subvars (TREE_TYPE (t))) |
3672 | { |
3673 | /* For dereferences this means we have to defer it |
3674 | to solving time. */ |
3675 | results->last ().offset = UNKNOWN_OFFSET; |
3676 | return; |
3677 | } |
3678 | if (cs.type != SCALAR) |
3679 | return; |
3680 | |
3681 | vi = get_varinfo (n: cs.var); |
3682 | curr = vi_next (vi); |
3683 | if (!vi->is_full_var |
3684 | && curr) |
3685 | { |
3686 | unsigned HOST_WIDE_INT size; |
3687 | if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t)))) |
3688 | size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t))); |
3689 | else |
3690 | size = -1; |
3691 | for (; curr; curr = vi_next (vi: curr)) |
3692 | { |
3693 | if (curr->offset - vi->offset < size) |
3694 | { |
3695 | cs.var = curr->id; |
3696 | results->safe_push (obj: cs); |
3697 | } |
3698 | else |
3699 | break; |
3700 | } |
3701 | } |
3702 | return; |
3703 | } |
3704 | case ARRAY_REF: |
3705 | case ARRAY_RANGE_REF: |
3706 | case COMPONENT_REF: |
3707 | case IMAGPART_EXPR: |
3708 | case REALPART_EXPR: |
3709 | case BIT_FIELD_REF: |
3710 | get_constraint_for_component_ref (t, results, address_p, lhs_p); |
3711 | return; |
3712 | case VIEW_CONVERT_EXPR: |
3713 | get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p, |
3714 | lhs_p); |
3715 | return; |
3716 | /* We are missing handling for TARGET_MEM_REF here. */ |
3717 | default:; |
3718 | } |
3719 | break; |
3720 | } |
3721 | case tcc_exceptional: |
3722 | { |
3723 | switch (TREE_CODE (t)) |
3724 | { |
3725 | case SSA_NAME: |
3726 | { |
3727 | get_constraint_for_ssa_var (t, results, address_p); |
3728 | return; |
3729 | } |
3730 | case CONSTRUCTOR: |
3731 | { |
3732 | unsigned int i; |
3733 | tree val; |
3734 | auto_vec<ce_s> tmp; |
3735 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val) |
3736 | { |
3737 | struct constraint_expr *rhsp; |
3738 | unsigned j; |
3739 | get_constraint_for_1 (t: val, results: &tmp, address_p, lhs_p); |
3740 | FOR_EACH_VEC_ELT (tmp, j, rhsp) |
3741 | results->safe_push (obj: *rhsp); |
3742 | tmp.truncate (size: 0); |
3743 | } |
3744 | /* We do not know whether the constructor was complete, |
3745 | so technically we have to add &NOTHING or &ANYTHING |
3746 | like we do for an empty constructor as well. */ |
3747 | return; |
3748 | } |
3749 | default:; |
3750 | } |
3751 | break; |
3752 | } |
3753 | case tcc_declaration: |
3754 | { |
3755 | if (!lhs_p && VAR_P (t) && TREE_THIS_VOLATILE (t)) |
3756 | /* Fall back to anything. */ |
3757 | break; |
3758 | get_constraint_for_ssa_var (t, results, address_p); |
3759 | return; |
3760 | } |
3761 | case tcc_constant: |
3762 | { |
3763 | /* We cannot refer to automatic variables through constants. */ |
3764 | temp.type = ADDRESSOF; |
3765 | temp.var = nonlocal_id; |
3766 | temp.offset = 0; |
3767 | results->safe_push (obj: temp); |
3768 | return; |
3769 | } |
3770 | default:; |
3771 | } |
3772 | |
3773 | /* The default fallback is a constraint from anything. */ |
3774 | temp.type = ADDRESSOF; |
3775 | temp.var = anything_id; |
3776 | temp.offset = 0; |
3777 | results->safe_push (obj: temp); |
3778 | } |
3779 | |
3780 | /* Given a gimple tree T, return the constraint expression vector for it. */ |
3781 | |
3782 | static void |
3783 | get_constraint_for (tree t, vec<ce_s> *results) |
3784 | { |
3785 | gcc_assert (results->length () == 0); |
3786 | |
3787 | get_constraint_for_1 (t, results, address_p: false, lhs_p: true); |
3788 | } |
3789 | |
3790 | /* Given a gimple tree T, return the constraint expression vector for it |
3791 | to be used as the rhs of a constraint. */ |
3792 | |
3793 | static void |
3794 | get_constraint_for_rhs (tree t, vec<ce_s> *results) |
3795 | { |
3796 | gcc_assert (results->length () == 0); |
3797 | |
3798 | get_constraint_for_1 (t, results, address_p: false, lhs_p: false); |
3799 | } |
3800 | |
3801 | |
3802 | /* Efficiently generates constraints from all entries in *RHSC to all |
3803 | entries in *LHSC. */ |
3804 | |
3805 | static void |
3806 | process_all_all_constraints (const vec<ce_s> &lhsc, |
3807 | const vec<ce_s> &rhsc) |
3808 | { |
3809 | struct constraint_expr *lhsp, *rhsp; |
3810 | unsigned i, j; |
3811 | |
3812 | if (lhsc.length () <= 1 || rhsc.length () <= 1) |
3813 | { |
3814 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
3815 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
3816 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: *rhsp)); |
3817 | } |
3818 | else |
3819 | { |
3820 | struct constraint_expr tmp; |
3821 | tmp = new_scalar_tmp_constraint_exp (name: "allalltmp" , add_id: true); |
3822 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
3823 | process_constraint (t: new_constraint (lhs: tmp, rhs: *rhsp)); |
3824 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
3825 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: tmp)); |
3826 | } |
3827 | } |
3828 | |
3829 | /* Handle aggregate copies by expanding into copies of the respective |
3830 | fields of the structures. */ |
3831 | |
3832 | static void |
3833 | do_structure_copy (tree lhsop, tree rhsop) |
3834 | { |
3835 | struct constraint_expr *lhsp, *rhsp; |
3836 | auto_vec<ce_s> lhsc; |
3837 | auto_vec<ce_s> rhsc; |
3838 | unsigned j; |
3839 | |
3840 | get_constraint_for (t: lhsop, results: &lhsc); |
3841 | get_constraint_for_rhs (t: rhsop, results: &rhsc); |
3842 | lhsp = &lhsc[0]; |
3843 | rhsp = &rhsc[0]; |
3844 | if (lhsp->type == DEREF |
3845 | || (lhsp->type == ADDRESSOF && lhsp->var == anything_id) |
3846 | || rhsp->type == DEREF) |
3847 | { |
3848 | if (lhsp->type == DEREF) |
3849 | { |
3850 | gcc_assert (lhsc.length () == 1); |
3851 | lhsp->offset = UNKNOWN_OFFSET; |
3852 | } |
3853 | if (rhsp->type == DEREF) |
3854 | { |
3855 | gcc_assert (rhsc.length () == 1); |
3856 | rhsp->offset = UNKNOWN_OFFSET; |
3857 | } |
3858 | process_all_all_constraints (lhsc, rhsc); |
3859 | } |
3860 | else if (lhsp->type == SCALAR |
3861 | && (rhsp->type == SCALAR |
3862 | || rhsp->type == ADDRESSOF)) |
3863 | { |
3864 | HOST_WIDE_INT lhssize, lhsoffset; |
3865 | HOST_WIDE_INT rhssize, rhsoffset; |
3866 | bool reverse; |
3867 | unsigned k = 0; |
3868 | if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse) |
3869 | || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize, |
3870 | &reverse)) |
3871 | { |
3872 | process_all_all_constraints (lhsc, rhsc); |
3873 | return; |
3874 | } |
3875 | for (j = 0; lhsc.iterate (ix: j, ptr: &lhsp);) |
3876 | { |
3877 | varinfo_t lhsv, rhsv; |
3878 | rhsp = &rhsc[k]; |
3879 | lhsv = get_varinfo (n: lhsp->var); |
3880 | rhsv = get_varinfo (n: rhsp->var); |
3881 | if (lhsv->may_have_pointers |
3882 | && (lhsv->is_full_var |
3883 | || rhsv->is_full_var |
3884 | || ranges_overlap_p (pos1: lhsv->offset + rhsoffset, size1: lhsv->size, |
3885 | pos2: rhsv->offset + lhsoffset, size2: rhsv->size))) |
3886 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: *rhsp)); |
3887 | if (!rhsv->is_full_var |
3888 | && (lhsv->is_full_var |
3889 | || (lhsv->offset + rhsoffset + lhsv->size |
3890 | > rhsv->offset + lhsoffset + rhsv->size))) |
3891 | { |
3892 | ++k; |
3893 | if (k >= rhsc.length ()) |
3894 | break; |
3895 | } |
3896 | else |
3897 | ++j; |
3898 | } |
3899 | } |
3900 | else |
3901 | gcc_unreachable (); |
3902 | } |
3903 | |
3904 | /* Create constraints ID = { rhsc }. */ |
3905 | |
3906 | static void |
3907 | make_constraints_to (unsigned id, const vec<ce_s> &rhsc) |
3908 | { |
3909 | struct constraint_expr *c; |
3910 | struct constraint_expr includes; |
3911 | unsigned int j; |
3912 | |
3913 | includes.var = id; |
3914 | includes.offset = 0; |
3915 | includes.type = SCALAR; |
3916 | |
3917 | FOR_EACH_VEC_ELT (rhsc, j, c) |
3918 | process_constraint (t: new_constraint (lhs: includes, rhs: *c)); |
3919 | } |
3920 | |
3921 | /* Create a constraint ID = OP. */ |
3922 | |
3923 | static void |
3924 | make_constraint_to (unsigned id, tree op) |
3925 | { |
3926 | auto_vec<ce_s> rhsc; |
3927 | get_constraint_for_rhs (t: op, results: &rhsc); |
3928 | make_constraints_to (id, rhsc); |
3929 | } |
3930 | |
3931 | /* Create a constraint ID = &FROM. */ |
3932 | |
3933 | static void |
3934 | make_constraint_from (varinfo_t vi, int from) |
3935 | { |
3936 | struct constraint_expr lhs, rhs; |
3937 | |
3938 | lhs.var = vi->id; |
3939 | lhs.offset = 0; |
3940 | lhs.type = SCALAR; |
3941 | |
3942 | rhs.var = from; |
3943 | rhs.offset = 0; |
3944 | rhs.type = ADDRESSOF; |
3945 | process_constraint (t: new_constraint (lhs, rhs)); |
3946 | } |
3947 | |
3948 | /* Create a constraint ID = FROM. */ |
3949 | |
3950 | static void |
3951 | make_copy_constraint (varinfo_t vi, int from) |
3952 | { |
3953 | struct constraint_expr lhs, rhs; |
3954 | |
3955 | lhs.var = vi->id; |
3956 | lhs.offset = 0; |
3957 | lhs.type = SCALAR; |
3958 | |
3959 | rhs.var = from; |
3960 | rhs.offset = 0; |
3961 | rhs.type = SCALAR; |
3962 | process_constraint (t: new_constraint (lhs, rhs)); |
3963 | } |
3964 | |
3965 | /* Make constraints necessary to make OP escape. */ |
3966 | |
3967 | static void |
3968 | make_escape_constraint (tree op) |
3969 | { |
3970 | make_constraint_to (id: escaped_id, op); |
3971 | } |
3972 | |
3973 | /* Make constraint necessary to make all indirect references |
3974 | from VI escape. */ |
3975 | |
3976 | static void |
3977 | make_indirect_escape_constraint (varinfo_t vi) |
3978 | { |
3979 | struct constraint_expr lhs, rhs; |
3980 | /* escaped = *(VAR + UNKNOWN); */ |
3981 | lhs.type = SCALAR; |
3982 | lhs.var = escaped_id; |
3983 | lhs.offset = 0; |
3984 | rhs.type = DEREF; |
3985 | rhs.var = vi->id; |
3986 | rhs.offset = UNKNOWN_OFFSET; |
3987 | process_constraint (t: new_constraint (lhs, rhs)); |
3988 | } |
3989 | |
3990 | /* Add constraints to that the solution of VI is transitively closed. */ |
3991 | |
3992 | static void |
3993 | make_transitive_closure_constraints (varinfo_t vi) |
3994 | { |
3995 | struct constraint_expr lhs, rhs; |
3996 | |
3997 | /* VAR = *(VAR + UNKNOWN); */ |
3998 | lhs.type = SCALAR; |
3999 | lhs.var = vi->id; |
4000 | lhs.offset = 0; |
4001 | rhs.type = DEREF; |
4002 | rhs.var = vi->id; |
4003 | rhs.offset = UNKNOWN_OFFSET; |
4004 | process_constraint (t: new_constraint (lhs, rhs)); |
4005 | } |
4006 | |
4007 | /* Add constraints to that the solution of VI has all subvariables added. */ |
4008 | |
4009 | static void |
4010 | make_any_offset_constraints (varinfo_t vi) |
4011 | { |
4012 | struct constraint_expr lhs, rhs; |
4013 | |
4014 | /* VAR = VAR + UNKNOWN; */ |
4015 | lhs.type = SCALAR; |
4016 | lhs.var = vi->id; |
4017 | lhs.offset = 0; |
4018 | rhs.type = SCALAR; |
4019 | rhs.var = vi->id; |
4020 | rhs.offset = UNKNOWN_OFFSET; |
4021 | process_constraint (t: new_constraint (lhs, rhs)); |
4022 | } |
4023 | |
4024 | /* Temporary storage for fake var decls. */ |
4025 | struct obstack fake_var_decl_obstack; |
4026 | |
4027 | /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */ |
4028 | |
4029 | static tree |
4030 | build_fake_var_decl (tree type) |
4031 | { |
4032 | tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl); |
4033 | memset (s: decl, c: 0, n: sizeof (struct tree_var_decl)); |
4034 | TREE_SET_CODE (decl, VAR_DECL); |
4035 | TREE_TYPE (decl) = type; |
4036 | DECL_UID (decl) = allocate_decl_uid (); |
4037 | SET_DECL_PT_UID (decl, -1); |
4038 | layout_decl (decl, 0); |
4039 | return decl; |
4040 | } |
4041 | |
4042 | /* Create a new artificial heap variable with NAME. |
4043 | Return the created variable. */ |
4044 | |
4045 | static varinfo_t |
4046 | make_heapvar (const char *name, bool add_id) |
4047 | { |
4048 | varinfo_t vi; |
4049 | tree heapvar; |
4050 | |
4051 | heapvar = build_fake_var_decl (ptr_type_node); |
4052 | DECL_EXTERNAL (heapvar) = 1; |
4053 | |
4054 | vi = new_var_info (t: heapvar, name, add_id); |
4055 | vi->is_heap_var = true; |
4056 | vi->is_unknown_size_var = true; |
4057 | vi->offset = 0; |
4058 | vi->fullsize = ~0; |
4059 | vi->size = ~0; |
4060 | vi->is_full_var = true; |
4061 | insert_vi_for_tree (t: heapvar, vi); |
4062 | |
4063 | return vi; |
4064 | } |
4065 | |
4066 | /* Create a new artificial heap variable with NAME and make a |
4067 | constraint from it to LHS. Set flags according to a tag used |
4068 | for tracking restrict pointers. */ |
4069 | |
4070 | static varinfo_t |
4071 | make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id) |
4072 | { |
4073 | varinfo_t vi = make_heapvar (name, add_id); |
4074 | vi->is_restrict_var = 1; |
4075 | vi->is_global_var = 1; |
4076 | vi->may_have_pointers = 1; |
4077 | make_constraint_from (vi: lhs, from: vi->id); |
4078 | return vi; |
4079 | } |
4080 | |
4081 | /* Create a new artificial heap variable with NAME and make a |
4082 | constraint from it to LHS. Set flags according to a tag used |
4083 | for tracking restrict pointers and make the artificial heap |
4084 | point to global memory. */ |
4085 | |
4086 | static varinfo_t |
4087 | make_constraint_from_global_restrict (varinfo_t lhs, const char *name, |
4088 | bool add_id) |
4089 | { |
4090 | varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id); |
4091 | make_copy_constraint (vi, from: nonlocal_id); |
4092 | return vi; |
4093 | } |
4094 | |
4095 | /* In IPA mode there are varinfos for different aspects of reach |
4096 | function designator. One for the points-to set of the return |
4097 | value, one for the variables that are clobbered by the function, |
4098 | one for its uses and one for each parameter (including a single |
4099 | glob for remaining variadic arguments). */ |
4100 | |
4101 | enum { fi_clobbers = 1, fi_uses = 2, |
4102 | fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 }; |
4103 | |
4104 | /* Get a constraint for the requested part of a function designator FI |
4105 | when operating in IPA mode. */ |
4106 | |
4107 | static struct constraint_expr |
4108 | get_function_part_constraint (varinfo_t fi, unsigned part) |
4109 | { |
4110 | struct constraint_expr c; |
4111 | |
4112 | gcc_assert (in_ipa_mode); |
4113 | |
4114 | if (fi->id == anything_id) |
4115 | { |
4116 | /* ??? We probably should have a ANYFN special variable. */ |
4117 | c.var = anything_id; |
4118 | c.offset = 0; |
4119 | c.type = SCALAR; |
4120 | } |
4121 | else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL) |
4122 | { |
4123 | varinfo_t ai = first_vi_for_offset (fi, part); |
4124 | if (ai) |
4125 | c.var = ai->id; |
4126 | else |
4127 | c.var = anything_id; |
4128 | c.offset = 0; |
4129 | c.type = SCALAR; |
4130 | } |
4131 | else |
4132 | { |
4133 | c.var = fi->id; |
4134 | c.offset = part; |
4135 | c.type = DEREF; |
4136 | } |
4137 | |
4138 | return c; |
4139 | } |
4140 | |
4141 | /* Produce constraints for argument ARG of call STMT with eaf flags |
4142 | FLAGS. RESULTS is array holding constraints for return value. |
4143 | CALLESCAPE_ID is variable where call loocal escapes are added. |
4144 | WRITES_GLOVEL_MEMORY is true if callee may write global memory. */ |
4145 | |
4146 | static void |
4147 | handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags, |
4148 | int callescape_id, bool writes_global_memory) |
4149 | { |
4150 | int relevant_indirect_flags = EAF_NO_INDIRECT_CLOBBER | EAF_NO_INDIRECT_READ |
4151 | | EAF_NO_INDIRECT_ESCAPE; |
4152 | int relevant_flags = relevant_indirect_flags |
4153 | | EAF_NO_DIRECT_CLOBBER |
4154 | | EAF_NO_DIRECT_READ |
4155 | | EAF_NO_DIRECT_ESCAPE; |
4156 | if (gimple_call_lhs (gs: stmt)) |
4157 | { |
4158 | relevant_flags |= EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY; |
4159 | relevant_indirect_flags |= EAF_NOT_RETURNED_INDIRECTLY; |
4160 | |
4161 | /* If value is never read from it can not be returned indirectly |
4162 | (except through the escape solution). |
4163 | For all flags we get these implications right except for |
4164 | not_returned because we miss return functions in ipa-prop. */ |
4165 | |
4166 | if (flags & EAF_NO_DIRECT_READ) |
4167 | flags |= EAF_NOT_RETURNED_INDIRECTLY; |
4168 | } |
4169 | |
4170 | /* If the argument is not used we can ignore it. |
4171 | Similarly argument is invisile for us if it not clobbered, does not |
4172 | escape, is not read and can not be returned. */ |
4173 | if ((flags & EAF_UNUSED) || ((flags & relevant_flags) == relevant_flags)) |
4174 | return; |
4175 | |
4176 | /* Produce varinfo for direct accesses to ARG. */ |
4177 | varinfo_t tem = new_var_info (NULL_TREE, name: "callarg" , add_id: true); |
4178 | tem->is_reg_var = true; |
4179 | make_constraint_to (id: tem->id, op: arg); |
4180 | make_any_offset_constraints (vi: tem); |
4181 | |
4182 | bool callarg_transitive = false; |
4183 | |
4184 | /* As an compile time optimization if we make no difference between |
4185 | direct and indirect accesses make arg transitively closed. |
4186 | This avoids the need to build indir arg and do everything twice. */ |
4187 | if (((flags & EAF_NO_INDIRECT_CLOBBER) != 0) |
4188 | == ((flags & EAF_NO_DIRECT_CLOBBER) != 0) |
4189 | && (((flags & EAF_NO_INDIRECT_READ) != 0) |
4190 | == ((flags & EAF_NO_DIRECT_READ) != 0)) |
4191 | && (((flags & EAF_NO_INDIRECT_ESCAPE) != 0) |
4192 | == ((flags & EAF_NO_DIRECT_ESCAPE) != 0)) |
4193 | && (((flags & EAF_NOT_RETURNED_INDIRECTLY) != 0) |
4194 | == ((flags & EAF_NOT_RETURNED_DIRECTLY) != 0))) |
4195 | { |
4196 | make_transitive_closure_constraints (vi: tem); |
4197 | callarg_transitive = true; |
4198 | } |
4199 | |
4200 | /* If necessary, produce varinfo for indirect accesses to ARG. */ |
4201 | varinfo_t indir_tem = NULL; |
4202 | if (!callarg_transitive |
4203 | && (flags & relevant_indirect_flags) != relevant_indirect_flags) |
4204 | { |
4205 | struct constraint_expr lhs, rhs; |
4206 | indir_tem = new_var_info (NULL_TREE, name: "indircallarg" , add_id: true); |
4207 | indir_tem->is_reg_var = true; |
4208 | |
4209 | /* indir_term = *tem. */ |
4210 | lhs.type = SCALAR; |
4211 | lhs.var = indir_tem->id; |
4212 | lhs.offset = 0; |
4213 | |
4214 | rhs.type = DEREF; |
4215 | rhs.var = tem->id; |
4216 | rhs.offset = UNKNOWN_OFFSET; |
4217 | process_constraint (t: new_constraint (lhs, rhs)); |
4218 | |
4219 | make_any_offset_constraints (vi: indir_tem); |
4220 | |
4221 | /* If we do not read indirectly there is no need for transitive closure. |
4222 | We know there is only one level of indirection. */ |
4223 | if (!(flags & EAF_NO_INDIRECT_READ)) |
4224 | make_transitive_closure_constraints (vi: indir_tem); |
4225 | gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ)); |
4226 | } |
4227 | |
4228 | if (gimple_call_lhs (gs: stmt)) |
4229 | { |
4230 | if (!(flags & EAF_NOT_RETURNED_DIRECTLY)) |
4231 | { |
4232 | struct constraint_expr cexpr; |
4233 | cexpr.var = tem->id; |
4234 | cexpr.type = SCALAR; |
4235 | cexpr.offset = 0; |
4236 | results->safe_push (obj: cexpr); |
4237 | } |
4238 | if (!callarg_transitive & !(flags & EAF_NOT_RETURNED_INDIRECTLY)) |
4239 | { |
4240 | struct constraint_expr cexpr; |
4241 | cexpr.var = indir_tem->id; |
4242 | cexpr.type = SCALAR; |
4243 | cexpr.offset = 0; |
4244 | results->safe_push (obj: cexpr); |
4245 | } |
4246 | } |
4247 | |
4248 | if (!(flags & EAF_NO_DIRECT_READ)) |
4249 | { |
4250 | varinfo_t uses = get_call_use_vi (call: stmt); |
4251 | make_copy_constraint (vi: uses, from: tem->id); |
4252 | if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_READ)) |
4253 | make_copy_constraint (vi: uses, from: indir_tem->id); |
4254 | } |
4255 | else |
4256 | /* To read indirectly we need to read directly. */ |
4257 | gcc_checking_assert (flags & EAF_NO_INDIRECT_READ); |
4258 | |
4259 | if (!(flags & EAF_NO_DIRECT_CLOBBER)) |
4260 | { |
4261 | struct constraint_expr lhs, rhs; |
4262 | |
4263 | /* *arg = callescape. */ |
4264 | lhs.type = DEREF; |
4265 | lhs.var = tem->id; |
4266 | lhs.offset = 0; |
4267 | |
4268 | rhs.type = SCALAR; |
4269 | rhs.var = callescape_id; |
4270 | rhs.offset = 0; |
4271 | process_constraint (t: new_constraint (lhs, rhs)); |
4272 | |
4273 | /* callclobbered = arg. */ |
4274 | make_copy_constraint (vi: get_call_clobber_vi (call: stmt), from: tem->id); |
4275 | } |
4276 | if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_CLOBBER)) |
4277 | { |
4278 | struct constraint_expr lhs, rhs; |
4279 | |
4280 | /* *indir_arg = callescape. */ |
4281 | lhs.type = DEREF; |
4282 | lhs.var = indir_tem->id; |
4283 | lhs.offset = 0; |
4284 | |
4285 | rhs.type = SCALAR; |
4286 | rhs.var = callescape_id; |
4287 | rhs.offset = 0; |
4288 | process_constraint (t: new_constraint (lhs, rhs)); |
4289 | |
4290 | /* callclobbered = indir_arg. */ |
4291 | make_copy_constraint (vi: get_call_clobber_vi (call: stmt), from: indir_tem->id); |
4292 | } |
4293 | |
4294 | if (!(flags & (EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE))) |
4295 | { |
4296 | struct constraint_expr lhs, rhs; |
4297 | |
4298 | /* callescape = arg; */ |
4299 | lhs.var = callescape_id; |
4300 | lhs.offset = 0; |
4301 | lhs.type = SCALAR; |
4302 | |
4303 | rhs.var = tem->id; |
4304 | rhs.offset = 0; |
4305 | rhs.type = SCALAR; |
4306 | process_constraint (t: new_constraint (lhs, rhs)); |
4307 | |
4308 | if (writes_global_memory) |
4309 | make_escape_constraint (op: arg); |
4310 | } |
4311 | else if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_ESCAPE)) |
4312 | { |
4313 | struct constraint_expr lhs, rhs; |
4314 | |
4315 | /* callescape = *(indir_arg + UNKNOWN); */ |
4316 | lhs.var = callescape_id; |
4317 | lhs.offset = 0; |
4318 | lhs.type = SCALAR; |
4319 | |
4320 | rhs.var = indir_tem->id; |
4321 | rhs.offset = 0; |
4322 | rhs.type = SCALAR; |
4323 | process_constraint (t: new_constraint (lhs, rhs)); |
4324 | |
4325 | if (writes_global_memory) |
4326 | make_indirect_escape_constraint (vi: tem); |
4327 | } |
4328 | } |
4329 | |
4330 | /* Determine global memory access of call STMT and update |
4331 | WRITES_GLOBAL_MEMORY, READS_GLOBAL_MEMORY and USES_GLOBAL_MEMORY. */ |
4332 | |
4333 | static void |
4334 | determine_global_memory_access (gcall *stmt, |
4335 | bool *writes_global_memory, |
4336 | bool *reads_global_memory, |
4337 | bool *uses_global_memory) |
4338 | { |
4339 | tree callee; |
4340 | cgraph_node *node; |
4341 | modref_summary *summary; |
4342 | |
4343 | /* We need to detrmine reads to set uses. */ |
4344 | gcc_assert (!uses_global_memory || reads_global_memory); |
4345 | |
4346 | if ((callee = gimple_call_fndecl (gs: stmt)) != NULL_TREE |
4347 | && (node = cgraph_node::get (decl: callee)) != NULL |
4348 | && (summary = get_modref_function_summary (func: node))) |
4349 | { |
4350 | if (writes_global_memory && *writes_global_memory) |
4351 | *writes_global_memory = summary->global_memory_written; |
4352 | if (reads_global_memory && *reads_global_memory) |
4353 | *reads_global_memory = summary->global_memory_read; |
4354 | if (reads_global_memory && uses_global_memory |
4355 | && !summary->calls_interposable |
4356 | && !*reads_global_memory && node->binds_to_current_def_p ()) |
4357 | *uses_global_memory = false; |
4358 | } |
4359 | if ((writes_global_memory && *writes_global_memory) |
4360 | || (uses_global_memory && *uses_global_memory) |
4361 | || (reads_global_memory && *reads_global_memory)) |
4362 | { |
4363 | attr_fnspec fnspec = gimple_call_fnspec (stmt); |
4364 | if (fnspec.known_p ()) |
4365 | { |
4366 | if (writes_global_memory |
4367 | && !fnspec.global_memory_written_p ()) |
4368 | *writes_global_memory = false; |
4369 | if (reads_global_memory && !fnspec.global_memory_read_p ()) |
4370 | { |
4371 | *reads_global_memory = false; |
4372 | if (uses_global_memory) |
4373 | *uses_global_memory = false; |
4374 | } |
4375 | } |
4376 | } |
4377 | } |
4378 | |
4379 | /* For non-IPA mode, generate constraints necessary for a call on the |
4380 | RHS and collect return value constraint to RESULTS to be used later in |
4381 | handle_lhs_call. |
4382 | |
4383 | IMPLICIT_EAF_FLAGS are added to each function argument. If |
4384 | WRITES_GLOBAL_MEMORY is true function is assumed to possibly write to global |
4385 | memory. Similar for READS_GLOBAL_MEMORY. */ |
4386 | |
4387 | static void |
4388 | handle_rhs_call (gcall *stmt, vec<ce_s> *results, |
4389 | int implicit_eaf_flags, |
4390 | bool writes_global_memory, |
4391 | bool reads_global_memory) |
4392 | { |
4393 | determine_global_memory_access (stmt, writes_global_memory: &writes_global_memory, |
4394 | reads_global_memory: &reads_global_memory, |
4395 | NULL); |
4396 | |
4397 | varinfo_t callescape = new_var_info (NULL_TREE, name: "callescape" , add_id: true); |
4398 | |
4399 | /* If function can use global memory, add it to callescape |
4400 | and to possible return values. If not we can still use/return addresses |
4401 | of global symbols. */ |
4402 | struct constraint_expr lhs, rhs; |
4403 | |
4404 | lhs.type = SCALAR; |
4405 | lhs.var = callescape->id; |
4406 | lhs.offset = 0; |
4407 | |
4408 | rhs.type = reads_global_memory ? SCALAR : ADDRESSOF; |
4409 | rhs.var = nonlocal_id; |
4410 | rhs.offset = 0; |
4411 | |
4412 | process_constraint (t: new_constraint (lhs, rhs)); |
4413 | results->safe_push (obj: rhs); |
4414 | |
4415 | varinfo_t uses = get_call_use_vi (call: stmt); |
4416 | make_copy_constraint (vi: uses, from: callescape->id); |
4417 | |
4418 | for (unsigned i = 0; i < gimple_call_num_args (gs: stmt); ++i) |
4419 | { |
4420 | tree arg = gimple_call_arg (gs: stmt, index: i); |
4421 | int flags = gimple_call_arg_flags (stmt, i); |
4422 | handle_call_arg (stmt, arg, results, |
4423 | flags: flags | implicit_eaf_flags, |
4424 | callescape_id: callescape->id, writes_global_memory); |
4425 | } |
4426 | |
4427 | /* The static chain escapes as well. */ |
4428 | if (gimple_call_chain (gs: stmt)) |
4429 | handle_call_arg (stmt, arg: gimple_call_chain (gs: stmt), results, |
4430 | flags: implicit_eaf_flags |
4431 | | gimple_call_static_chain_flags (stmt), |
4432 | callescape_id: callescape->id, writes_global_memory); |
4433 | |
4434 | /* And if we applied NRV the address of the return slot escapes as well. */ |
4435 | if (gimple_call_return_slot_opt_p (s: stmt) |
4436 | && gimple_call_lhs (gs: stmt) != NULL_TREE |
4437 | && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt)))) |
4438 | { |
4439 | int flags = gimple_call_retslot_flags (stmt); |
4440 | const int relevant_flags = EAF_NO_DIRECT_ESCAPE |
4441 | | EAF_NOT_RETURNED_DIRECTLY; |
4442 | |
4443 | if (!(flags & EAF_UNUSED) && (flags & relevant_flags) != relevant_flags) |
4444 | { |
4445 | auto_vec<ce_s> tmpc; |
4446 | |
4447 | get_constraint_for_address_of (t: gimple_call_lhs (gs: stmt), results: &tmpc); |
4448 | |
4449 | if (!(flags & EAF_NO_DIRECT_ESCAPE)) |
4450 | { |
4451 | make_constraints_to (id: callescape->id, rhsc: tmpc); |
4452 | if (writes_global_memory) |
4453 | make_constraints_to (id: escaped_id, rhsc: tmpc); |
4454 | } |
4455 | if (!(flags & EAF_NOT_RETURNED_DIRECTLY)) |
4456 | { |
4457 | struct constraint_expr *c; |
4458 | unsigned i; |
4459 | FOR_EACH_VEC_ELT (tmpc, i, c) |
4460 | results->safe_push (obj: *c); |
4461 | } |
4462 | } |
4463 | } |
4464 | } |
4465 | |
4466 | /* For non-IPA mode, generate constraints necessary for a call |
4467 | that returns a pointer and assigns it to LHS. This simply makes |
4468 | the LHS point to global and escaped variables. */ |
4469 | |
4470 | static void |
4471 | handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc, |
4472 | tree fndecl) |
4473 | { |
4474 | auto_vec<ce_s> lhsc; |
4475 | |
4476 | get_constraint_for (t: lhs, results: &lhsc); |
4477 | /* If the store is to a global decl make sure to |
4478 | add proper escape constraints. */ |
4479 | lhs = get_base_address (t: lhs); |
4480 | if (lhs |
4481 | && DECL_P (lhs) |
4482 | && is_global_var (t: lhs)) |
4483 | { |
4484 | struct constraint_expr tmpc; |
4485 | tmpc.var = escaped_id; |
4486 | tmpc.offset = 0; |
4487 | tmpc.type = SCALAR; |
4488 | lhsc.safe_push (obj: tmpc); |
4489 | } |
4490 | |
4491 | /* If the call returns an argument unmodified override the rhs |
4492 | constraints. */ |
4493 | if (flags & ERF_RETURNS_ARG |
4494 | && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (gs: stmt)) |
4495 | { |
4496 | tree arg; |
4497 | rhsc.truncate (size: 0); |
4498 | arg = gimple_call_arg (gs: stmt, index: flags & ERF_RETURN_ARG_MASK); |
4499 | get_constraint_for (t: arg, results: &rhsc); |
4500 | process_all_all_constraints (lhsc, rhsc); |
4501 | rhsc.truncate (size: 0); |
4502 | } |
4503 | else if (flags & ERF_NOALIAS) |
4504 | { |
4505 | varinfo_t vi; |
4506 | struct constraint_expr tmpc; |
4507 | rhsc.truncate (size: 0); |
4508 | vi = make_heapvar (name: "HEAP" , add_id: true); |
4509 | /* We are marking allocated storage local, we deal with it becoming |
4510 | global by escaping and setting of vars_contains_escaped_heap. */ |
4511 | DECL_EXTERNAL (vi->decl) = 0; |
4512 | vi->is_global_var = 0; |
4513 | /* If this is not a real malloc call assume the memory was |
4514 | initialized and thus may point to global memory. All |
4515 | builtin functions with the malloc attribute behave in a sane way. */ |
4516 | if (!fndecl |
4517 | || !fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL)) |
4518 | make_constraint_from (vi, from: nonlocal_id); |
4519 | tmpc.var = vi->id; |
4520 | tmpc.offset = 0; |
4521 | tmpc.type = ADDRESSOF; |
4522 | rhsc.safe_push (obj: tmpc); |
4523 | process_all_all_constraints (lhsc, rhsc); |
4524 | rhsc.truncate (size: 0); |
4525 | } |
4526 | else |
4527 | process_all_all_constraints (lhsc, rhsc); |
4528 | } |
4529 | |
4530 | |
4531 | /* Return the varinfo for the callee of CALL. */ |
4532 | |
4533 | static varinfo_t |
4534 | get_fi_for_callee (gcall *call) |
4535 | { |
4536 | tree decl, fn = gimple_call_fn (gs: call); |
4537 | |
4538 | if (fn && TREE_CODE (fn) == OBJ_TYPE_REF) |
4539 | fn = OBJ_TYPE_REF_EXPR (fn); |
4540 | |
4541 | /* If we can directly resolve the function being called, do so. |
4542 | Otherwise, it must be some sort of indirect expression that |
4543 | we should still be able to handle. */ |
4544 | decl = gimple_call_addr_fndecl (fn); |
4545 | if (decl) |
4546 | return get_vi_for_tree (t: decl); |
4547 | |
4548 | /* If the function is anything other than a SSA name pointer we have no |
4549 | clue and should be getting ANYFN (well, ANYTHING for now). */ |
4550 | if (!fn || TREE_CODE (fn) != SSA_NAME) |
4551 | return get_varinfo (n: anything_id); |
4552 | |
4553 | if (SSA_NAME_IS_DEFAULT_DEF (fn) |
4554 | && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL |
4555 | || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL)) |
4556 | fn = SSA_NAME_VAR (fn); |
4557 | |
4558 | return get_vi_for_tree (t: fn); |
4559 | } |
4560 | |
4561 | /* Create constraints for assigning call argument ARG to the incoming parameter |
4562 | INDEX of function FI. */ |
4563 | |
4564 | static void |
4565 | find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg) |
4566 | { |
4567 | struct constraint_expr lhs; |
4568 | lhs = get_function_part_constraint (fi, part: fi_parm_base + index); |
4569 | |
4570 | auto_vec<ce_s, 2> rhsc; |
4571 | get_constraint_for_rhs (t: arg, results: &rhsc); |
4572 | |
4573 | unsigned j; |
4574 | struct constraint_expr *rhsp; |
4575 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
4576 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
4577 | } |
4578 | |
4579 | /* Return true if FNDECL may be part of another lto partition. */ |
4580 | |
4581 | static bool |
4582 | fndecl_maybe_in_other_partition (tree fndecl) |
4583 | { |
4584 | cgraph_node *fn_node = cgraph_node::get (decl: fndecl); |
4585 | if (fn_node == NULL) |
4586 | return true; |
4587 | |
4588 | return fn_node->in_other_partition; |
4589 | } |
4590 | |
4591 | /* Create constraints for the builtin call T. Return true if the call |
4592 | was handled, otherwise false. */ |
4593 | |
4594 | static bool |
4595 | find_func_aliases_for_builtin_call (struct function *fn, gcall *t) |
4596 | { |
4597 | tree fndecl = gimple_call_fndecl (gs: t); |
4598 | auto_vec<ce_s, 2> lhsc; |
4599 | auto_vec<ce_s, 4> rhsc; |
4600 | varinfo_t fi; |
4601 | |
4602 | if (gimple_call_builtin_p (t, BUILT_IN_NORMAL)) |
4603 | /* ??? All builtins that are handled here need to be handled |
4604 | in the alias-oracle query functions explicitly! */ |
4605 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
4606 | { |
4607 | /* All the following functions return a pointer to the same object |
4608 | as their first argument points to. The functions do not add |
4609 | to the ESCAPED solution. The functions make the first argument |
4610 | pointed to memory point to what the second argument pointed to |
4611 | memory points to. */ |
4612 | case BUILT_IN_STRCPY: |
4613 | case BUILT_IN_STRNCPY: |
4614 | case BUILT_IN_BCOPY: |
4615 | case BUILT_IN_MEMCPY: |
4616 | case BUILT_IN_MEMMOVE: |
4617 | case BUILT_IN_MEMPCPY: |
4618 | case BUILT_IN_STPCPY: |
4619 | case BUILT_IN_STPNCPY: |
4620 | case BUILT_IN_STRCAT: |
4621 | case BUILT_IN_STRNCAT: |
4622 | case BUILT_IN_STRCPY_CHK: |
4623 | case BUILT_IN_STRNCPY_CHK: |
4624 | case BUILT_IN_MEMCPY_CHK: |
4625 | case BUILT_IN_MEMMOVE_CHK: |
4626 | case BUILT_IN_MEMPCPY_CHK: |
4627 | case BUILT_IN_STPCPY_CHK: |
4628 | case BUILT_IN_STPNCPY_CHK: |
4629 | case BUILT_IN_STRCAT_CHK: |
4630 | case BUILT_IN_STRNCAT_CHK: |
4631 | case BUILT_IN_TM_MEMCPY: |
4632 | case BUILT_IN_TM_MEMMOVE: |
4633 | { |
4634 | tree res = gimple_call_lhs (gs: t); |
4635 | tree dest = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl: fndecl) |
4636 | == BUILT_IN_BCOPY ? 1 : 0)); |
4637 | tree src = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl: fndecl) |
4638 | == BUILT_IN_BCOPY ? 0 : 1)); |
4639 | if (res != NULL_TREE) |
4640 | { |
4641 | get_constraint_for (t: res, results: &lhsc); |
4642 | if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_MEMPCPY |
4643 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPCPY |
4644 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPNCPY |
4645 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_MEMPCPY_CHK |
4646 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPCPY_CHK |
4647 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPNCPY_CHK) |
4648 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &rhsc); |
4649 | else |
4650 | get_constraint_for (t: dest, results: &rhsc); |
4651 | process_all_all_constraints (lhsc, rhsc); |
4652 | lhsc.truncate (size: 0); |
4653 | rhsc.truncate (size: 0); |
4654 | } |
4655 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
4656 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
4657 | do_deref (constraints: &lhsc); |
4658 | do_deref (constraints: &rhsc); |
4659 | process_all_all_constraints (lhsc, rhsc); |
4660 | return true; |
4661 | } |
4662 | case BUILT_IN_MEMSET: |
4663 | case BUILT_IN_MEMSET_CHK: |
4664 | case BUILT_IN_TM_MEMSET: |
4665 | { |
4666 | tree res = gimple_call_lhs (gs: t); |
4667 | tree dest = gimple_call_arg (gs: t, index: 0); |
4668 | unsigned i; |
4669 | ce_s *lhsp; |
4670 | struct constraint_expr ac; |
4671 | if (res != NULL_TREE) |
4672 | { |
4673 | get_constraint_for (t: res, results: &lhsc); |
4674 | get_constraint_for (t: dest, results: &rhsc); |
4675 | process_all_all_constraints (lhsc, rhsc); |
4676 | lhsc.truncate (size: 0); |
4677 | } |
4678 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
4679 | do_deref (constraints: &lhsc); |
4680 | if (flag_delete_null_pointer_checks |
4681 | && integer_zerop (gimple_call_arg (gs: t, index: 1))) |
4682 | { |
4683 | ac.type = ADDRESSOF; |
4684 | ac.var = nothing_id; |
4685 | } |
4686 | else |
4687 | { |
4688 | ac.type = SCALAR; |
4689 | ac.var = integer_id; |
4690 | } |
4691 | ac.offset = 0; |
4692 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
4693 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: ac)); |
4694 | return true; |
4695 | } |
4696 | case BUILT_IN_STACK_SAVE: |
4697 | case BUILT_IN_STACK_RESTORE: |
4698 | /* Nothing interesting happens. */ |
4699 | return true; |
4700 | case BUILT_IN_ALLOCA: |
4701 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
4702 | case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX: |
4703 | { |
4704 | tree ptr = gimple_call_lhs (gs: t); |
4705 | if (ptr == NULL_TREE) |
4706 | return true; |
4707 | get_constraint_for (t: ptr, results: &lhsc); |
4708 | varinfo_t vi = make_heapvar (name: "HEAP" , add_id: true); |
4709 | /* Alloca storage is never global. To exempt it from escaped |
4710 | handling make it a non-heap var. */ |
4711 | DECL_EXTERNAL (vi->decl) = 0; |
4712 | vi->is_global_var = 0; |
4713 | vi->is_heap_var = 0; |
4714 | struct constraint_expr tmpc; |
4715 | tmpc.var = vi->id; |
4716 | tmpc.offset = 0; |
4717 | tmpc.type = ADDRESSOF; |
4718 | rhsc.safe_push (obj: tmpc); |
4719 | process_all_all_constraints (lhsc, rhsc); |
4720 | return true; |
4721 | } |
4722 | case BUILT_IN_POSIX_MEMALIGN: |
4723 | { |
4724 | tree ptrptr = gimple_call_arg (gs: t, index: 0); |
4725 | get_constraint_for (t: ptrptr, results: &lhsc); |
4726 | do_deref (constraints: &lhsc); |
4727 | varinfo_t vi = make_heapvar (name: "HEAP" , add_id: true); |
4728 | /* We are marking allocated storage local, we deal with it becoming |
4729 | global by escaping and setting of vars_contains_escaped_heap. */ |
4730 | DECL_EXTERNAL (vi->decl) = 0; |
4731 | vi->is_global_var = 0; |
4732 | struct constraint_expr tmpc; |
4733 | tmpc.var = vi->id; |
4734 | tmpc.offset = 0; |
4735 | tmpc.type = ADDRESSOF; |
4736 | rhsc.safe_push (obj: tmpc); |
4737 | process_all_all_constraints (lhsc, rhsc); |
4738 | return true; |
4739 | } |
4740 | case BUILT_IN_ASSUME_ALIGNED: |
4741 | { |
4742 | tree res = gimple_call_lhs (gs: t); |
4743 | tree dest = gimple_call_arg (gs: t, index: 0); |
4744 | if (res != NULL_TREE) |
4745 | { |
4746 | get_constraint_for (t: res, results: &lhsc); |
4747 | get_constraint_for (t: dest, results: &rhsc); |
4748 | process_all_all_constraints (lhsc, rhsc); |
4749 | } |
4750 | return true; |
4751 | } |
4752 | /* All the following functions do not return pointers, do not |
4753 | modify the points-to sets of memory reachable from their |
4754 | arguments and do not add to the ESCAPED solution. */ |
4755 | case BUILT_IN_SINCOS: |
4756 | case BUILT_IN_SINCOSF: |
4757 | case BUILT_IN_SINCOSL: |
4758 | case BUILT_IN_FREXP: |
4759 | case BUILT_IN_FREXPF: |
4760 | case BUILT_IN_FREXPL: |
4761 | case BUILT_IN_GAMMA_R: |
4762 | case BUILT_IN_GAMMAF_R: |
4763 | case BUILT_IN_GAMMAL_R: |
4764 | case BUILT_IN_LGAMMA_R: |
4765 | case BUILT_IN_LGAMMAF_R: |
4766 | case BUILT_IN_LGAMMAL_R: |
4767 | case BUILT_IN_MODF: |
4768 | case BUILT_IN_MODFF: |
4769 | case BUILT_IN_MODFL: |
4770 | case BUILT_IN_REMQUO: |
4771 | case BUILT_IN_REMQUOF: |
4772 | case BUILT_IN_REMQUOL: |
4773 | case BUILT_IN_FREE: |
4774 | return true; |
4775 | case BUILT_IN_STRDUP: |
4776 | case BUILT_IN_STRNDUP: |
4777 | case BUILT_IN_REALLOC: |
4778 | if (gimple_call_lhs (gs: t)) |
4779 | { |
4780 | auto_vec<ce_s> rhsc; |
4781 | handle_lhs_call (stmt: t, lhs: gimple_call_lhs (gs: t), |
4782 | flags: gimple_call_return_flags (t) | ERF_NOALIAS, |
4783 | rhsc, fndecl); |
4784 | get_constraint_for_ptr_offset (ptr: gimple_call_lhs (gs: t), |
4785 | NULL_TREE, results: &lhsc); |
4786 | get_constraint_for_ptr_offset (ptr: gimple_call_arg (gs: t, index: 0), |
4787 | NULL_TREE, results: &rhsc); |
4788 | do_deref (constraints: &lhsc); |
4789 | do_deref (constraints: &rhsc); |
4790 | process_all_all_constraints (lhsc, rhsc); |
4791 | lhsc.truncate (size: 0); |
4792 | rhsc.truncate (size: 0); |
4793 | /* For realloc the resulting pointer can be equal to the |
4794 | argument as well. But only doing this wouldn't be |
4795 | correct because with ptr == 0 realloc behaves like malloc. */ |
4796 | if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_REALLOC) |
4797 | { |
4798 | get_constraint_for (t: gimple_call_lhs (gs: t), results: &lhsc); |
4799 | get_constraint_for (t: gimple_call_arg (gs: t, index: 0), results: &rhsc); |
4800 | process_all_all_constraints (lhsc, rhsc); |
4801 | } |
4802 | return true; |
4803 | } |
4804 | break; |
4805 | /* String / character search functions return a pointer into the |
4806 | source string or NULL. */ |
4807 | case BUILT_IN_INDEX: |
4808 | case BUILT_IN_STRCHR: |
4809 | case BUILT_IN_STRRCHR: |
4810 | case BUILT_IN_MEMCHR: |
4811 | case BUILT_IN_STRSTR: |
4812 | case BUILT_IN_STRPBRK: |
4813 | if (gimple_call_lhs (gs: t)) |
4814 | { |
4815 | tree src = gimple_call_arg (gs: t, index: 0); |
4816 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
4817 | constraint_expr nul; |
4818 | nul.var = nothing_id; |
4819 | nul.offset = 0; |
4820 | nul.type = ADDRESSOF; |
4821 | rhsc.safe_push (obj: nul); |
4822 | get_constraint_for (t: gimple_call_lhs (gs: t), results: &lhsc); |
4823 | process_all_all_constraints (lhsc, rhsc); |
4824 | } |
4825 | return true; |
4826 | /* Pure functions that return something not based on any object and |
4827 | that use the memory pointed to by their arguments (but not |
4828 | transitively). */ |
4829 | case BUILT_IN_STRCMP: |
4830 | case BUILT_IN_STRCMP_EQ: |
4831 | case BUILT_IN_STRNCMP: |
4832 | case BUILT_IN_STRNCMP_EQ: |
4833 | case BUILT_IN_STRCASECMP: |
4834 | case BUILT_IN_STRNCASECMP: |
4835 | case BUILT_IN_MEMCMP: |
4836 | case BUILT_IN_BCMP: |
4837 | case BUILT_IN_STRSPN: |
4838 | case BUILT_IN_STRCSPN: |
4839 | { |
4840 | varinfo_t uses = get_call_use_vi (call: t); |
4841 | make_any_offset_constraints (vi: uses); |
4842 | make_constraint_to (id: uses->id, op: gimple_call_arg (gs: t, index: 0)); |
4843 | make_constraint_to (id: uses->id, op: gimple_call_arg (gs: t, index: 1)); |
4844 | /* No constraints are necessary for the return value. */ |
4845 | return true; |
4846 | } |
4847 | case BUILT_IN_STRLEN: |
4848 | { |
4849 | varinfo_t uses = get_call_use_vi (call: t); |
4850 | make_any_offset_constraints (vi: uses); |
4851 | make_constraint_to (id: uses->id, op: gimple_call_arg (gs: t, index: 0)); |
4852 | /* No constraints are necessary for the return value. */ |
4853 | return true; |
4854 | } |
4855 | case BUILT_IN_OBJECT_SIZE: |
4856 | case BUILT_IN_CONSTANT_P: |
4857 | { |
4858 | /* No constraints are necessary for the return value or the |
4859 | arguments. */ |
4860 | return true; |
4861 | } |
4862 | /* Trampolines are special - they set up passing the static |
4863 | frame. */ |
4864 | case BUILT_IN_INIT_TRAMPOLINE: |
4865 | { |
4866 | tree tramp = gimple_call_arg (gs: t, index: 0); |
4867 | tree nfunc = gimple_call_arg (gs: t, index: 1); |
4868 | tree frame = gimple_call_arg (gs: t, index: 2); |
4869 | unsigned i; |
4870 | struct constraint_expr lhs, *rhsp; |
4871 | if (in_ipa_mode) |
4872 | { |
4873 | varinfo_t nfi = NULL; |
4874 | gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR); |
4875 | nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0)); |
4876 | if (nfi) |
4877 | { |
4878 | lhs = get_function_part_constraint (fi: nfi, part: fi_static_chain); |
4879 | get_constraint_for (t: frame, results: &rhsc); |
4880 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
4881 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
4882 | rhsc.truncate (size: 0); |
4883 | |
4884 | /* Make the frame point to the function for |
4885 | the trampoline adjustment call. */ |
4886 | get_constraint_for (t: tramp, results: &lhsc); |
4887 | do_deref (constraints: &lhsc); |
4888 | get_constraint_for (t: nfunc, results: &rhsc); |
4889 | process_all_all_constraints (lhsc, rhsc); |
4890 | |
4891 | return true; |
4892 | } |
4893 | } |
4894 | /* Else fallthru to generic handling which will let |
4895 | the frame escape. */ |
4896 | break; |
4897 | } |
4898 | case BUILT_IN_ADJUST_TRAMPOLINE: |
4899 | { |
4900 | tree tramp = gimple_call_arg (gs: t, index: 0); |
4901 | tree res = gimple_call_lhs (gs: t); |
4902 | if (in_ipa_mode && res) |
4903 | { |
4904 | get_constraint_for (t: res, results: &lhsc); |
4905 | get_constraint_for (t: tramp, results: &rhsc); |
4906 | do_deref (constraints: &rhsc); |
4907 | process_all_all_constraints (lhsc, rhsc); |
4908 | } |
4909 | return true; |
4910 | } |
4911 | CASE_BUILT_IN_TM_STORE (1): |
4912 | CASE_BUILT_IN_TM_STORE (2): |
4913 | CASE_BUILT_IN_TM_STORE (4): |
4914 | CASE_BUILT_IN_TM_STORE (8): |
4915 | CASE_BUILT_IN_TM_STORE (FLOAT): |
4916 | CASE_BUILT_IN_TM_STORE (DOUBLE): |
4917 | CASE_BUILT_IN_TM_STORE (LDOUBLE): |
4918 | CASE_BUILT_IN_TM_STORE (M64): |
4919 | CASE_BUILT_IN_TM_STORE (M128): |
4920 | CASE_BUILT_IN_TM_STORE (M256): |
4921 | { |
4922 | tree addr = gimple_call_arg (gs: t, index: 0); |
4923 | tree src = gimple_call_arg (gs: t, index: 1); |
4924 | |
4925 | get_constraint_for (t: addr, results: &lhsc); |
4926 | do_deref (constraints: &lhsc); |
4927 | get_constraint_for (t: src, results: &rhsc); |
4928 | process_all_all_constraints (lhsc, rhsc); |
4929 | return true; |
4930 | } |
4931 | CASE_BUILT_IN_TM_LOAD (1): |
4932 | CASE_BUILT_IN_TM_LOAD (2): |
4933 | CASE_BUILT_IN_TM_LOAD (4): |
4934 | CASE_BUILT_IN_TM_LOAD (8): |
4935 | CASE_BUILT_IN_TM_LOAD (FLOAT): |
4936 | CASE_BUILT_IN_TM_LOAD (DOUBLE): |
4937 | CASE_BUILT_IN_TM_LOAD (LDOUBLE): |
4938 | CASE_BUILT_IN_TM_LOAD (M64): |
4939 | CASE_BUILT_IN_TM_LOAD (M128): |
4940 | CASE_BUILT_IN_TM_LOAD (M256): |
4941 | { |
4942 | tree dest = gimple_call_lhs (gs: t); |
4943 | tree addr = gimple_call_arg (gs: t, index: 0); |
4944 | |
4945 | get_constraint_for (t: dest, results: &lhsc); |
4946 | get_constraint_for (t: addr, results: &rhsc); |
4947 | do_deref (constraints: &rhsc); |
4948 | process_all_all_constraints (lhsc, rhsc); |
4949 | return true; |
4950 | } |
4951 | /* Variadic argument handling needs to be handled in IPA |
4952 | mode as well. */ |
4953 | case BUILT_IN_VA_START: |
4954 | { |
4955 | tree valist = gimple_call_arg (gs: t, index: 0); |
4956 | struct constraint_expr rhs, *lhsp; |
4957 | unsigned i; |
4958 | get_constraint_for_ptr_offset (ptr: valist, NULL_TREE, results: &lhsc); |
4959 | do_deref (constraints: &lhsc); |
4960 | /* The va_list gets access to pointers in variadic |
4961 | arguments. Which we know in the case of IPA analysis |
4962 | and otherwise are just all nonlocal variables. */ |
4963 | if (in_ipa_mode) |
4964 | { |
4965 | fi = lookup_vi_for_tree (t: fn->decl); |
4966 | rhs = get_function_part_constraint (fi, part: ~0); |
4967 | rhs.type = ADDRESSOF; |
4968 | } |
4969 | else |
4970 | { |
4971 | rhs.var = nonlocal_id; |
4972 | rhs.type = ADDRESSOF; |
4973 | rhs.offset = 0; |
4974 | } |
4975 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
4976 | process_constraint (t: new_constraint (lhs: *lhsp, rhs)); |
4977 | /* va_list is clobbered. */ |
4978 | make_constraint_to (id: get_call_clobber_vi (call: t)->id, op: valist); |
4979 | return true; |
4980 | } |
4981 | /* va_end doesn't have any effect that matters. */ |
4982 | case BUILT_IN_VA_END: |
4983 | return true; |
4984 | /* Alternate return. Simply give up for now. */ |
4985 | case BUILT_IN_RETURN: |
4986 | { |
4987 | fi = NULL; |
4988 | if (!in_ipa_mode |
4989 | || !(fi = get_vi_for_tree (t: fn->decl))) |
4990 | make_constraint_from (vi: get_varinfo (n: escaped_id), from: anything_id); |
4991 | else if (in_ipa_mode |
4992 | && fi != NULL) |
4993 | { |
4994 | struct constraint_expr lhs, rhs; |
4995 | lhs = get_function_part_constraint (fi, part: fi_result); |
4996 | rhs.var = anything_id; |
4997 | rhs.offset = 0; |
4998 | rhs.type = SCALAR; |
4999 | process_constraint (t: new_constraint (lhs, rhs)); |
5000 | } |
5001 | return true; |
5002 | } |
5003 | case BUILT_IN_GOMP_PARALLEL: |
5004 | case BUILT_IN_GOACC_PARALLEL: |
5005 | { |
5006 | if (in_ipa_mode) |
5007 | { |
5008 | unsigned int fnpos, argpos; |
5009 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
5010 | { |
5011 | case BUILT_IN_GOMP_PARALLEL: |
5012 | /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */ |
5013 | fnpos = 0; |
5014 | argpos = 1; |
5015 | break; |
5016 | case BUILT_IN_GOACC_PARALLEL: |
5017 | /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs, |
5018 | sizes, kinds, ...). */ |
5019 | fnpos = 1; |
5020 | argpos = 3; |
5021 | break; |
5022 | default: |
5023 | gcc_unreachable (); |
5024 | } |
5025 | |
5026 | tree fnarg = gimple_call_arg (gs: t, index: fnpos); |
5027 | gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR); |
5028 | tree fndecl = TREE_OPERAND (fnarg, 0); |
5029 | if (fndecl_maybe_in_other_partition (fndecl)) |
5030 | /* Fallthru to general call handling. */ |
5031 | break; |
5032 | |
5033 | tree arg = gimple_call_arg (gs: t, index: argpos); |
5034 | |
5035 | varinfo_t fi = get_vi_for_tree (t: fndecl); |
5036 | find_func_aliases_for_call_arg (fi, index: 0, arg); |
5037 | return true; |
5038 | } |
5039 | /* Else fallthru to generic call handling. */ |
5040 | break; |
5041 | } |
5042 | /* printf-style functions may have hooks to set pointers to |
5043 | point to somewhere into the generated string. Leave them |
5044 | for a later exercise... */ |
5045 | default: |
5046 | /* Fallthru to general call handling. */; |
5047 | } |
5048 | |
5049 | return false; |
5050 | } |
5051 | |
5052 | /* Create constraints for the call T. */ |
5053 | |
5054 | static void |
5055 | find_func_aliases_for_call (struct function *fn, gcall *t) |
5056 | { |
5057 | tree fndecl = gimple_call_fndecl (gs: t); |
5058 | varinfo_t fi; |
5059 | |
5060 | if (fndecl != NULL_TREE |
5061 | && fndecl_built_in_p (node: fndecl) |
5062 | && find_func_aliases_for_builtin_call (fn, t)) |
5063 | return; |
5064 | |
5065 | if (gimple_call_internal_p (gs: t, fn: IFN_DEFERRED_INIT)) |
5066 | return; |
5067 | |
5068 | fi = get_fi_for_callee (call: t); |
5069 | if (!in_ipa_mode |
5070 | || (fi->decl && fndecl && !fi->is_fn_info)) |
5071 | { |
5072 | auto_vec<ce_s, 16> rhsc; |
5073 | int flags = gimple_call_flags (t); |
5074 | |
5075 | /* Const functions can return their arguments and addresses |
5076 | of global memory but not of escaped memory. */ |
5077 | if (flags & (ECF_CONST|ECF_NOVOPS)) |
5078 | { |
5079 | if (gimple_call_lhs (gs: t)) |
5080 | handle_rhs_call (stmt: t, results: &rhsc, implicit_eaf_flags: implicit_const_eaf_flags, writes_global_memory: false, reads_global_memory: false); |
5081 | } |
5082 | /* Pure functions can return addresses in and of memory |
5083 | reachable from their arguments, but they are not an escape |
5084 | point for reachable memory of their arguments. */ |
5085 | else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE)) |
5086 | handle_rhs_call (stmt: t, results: &rhsc, implicit_eaf_flags: implicit_pure_eaf_flags, writes_global_memory: false, reads_global_memory: true); |
5087 | /* If the call is to a replaceable operator delete and results |
5088 | from a delete expression as opposed to a direct call to |
5089 | such operator, then the effects for PTA (in particular |
5090 | the escaping of the pointer) can be ignored. */ |
5091 | else if (fndecl |
5092 | && DECL_IS_OPERATOR_DELETE_P (fndecl) |
5093 | && gimple_call_from_new_or_delete (s: t)) |
5094 | ; |
5095 | else |
5096 | handle_rhs_call (stmt: t, results: &rhsc, implicit_eaf_flags: 0, writes_global_memory: true, reads_global_memory: true); |
5097 | if (gimple_call_lhs (gs: t)) |
5098 | handle_lhs_call (stmt: t, lhs: gimple_call_lhs (gs: t), |
5099 | flags: gimple_call_return_flags (t), rhsc, fndecl); |
5100 | } |
5101 | else |
5102 | { |
5103 | auto_vec<ce_s, 2> rhsc; |
5104 | tree lhsop; |
5105 | unsigned j; |
5106 | |
5107 | /* Assign all the passed arguments to the appropriate incoming |
5108 | parameters of the function. */ |
5109 | for (j = 0; j < gimple_call_num_args (gs: t); j++) |
5110 | { |
5111 | tree arg = gimple_call_arg (gs: t, index: j); |
5112 | find_func_aliases_for_call_arg (fi, index: j, arg); |
5113 | } |
5114 | |
5115 | /* If we are returning a value, assign it to the result. */ |
5116 | lhsop = gimple_call_lhs (gs: t); |
5117 | if (lhsop) |
5118 | { |
5119 | auto_vec<ce_s, 2> lhsc; |
5120 | struct constraint_expr rhs; |
5121 | struct constraint_expr *lhsp; |
5122 | bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (gs: t)); |
5123 | |
5124 | get_constraint_for (t: lhsop, results: &lhsc); |
5125 | rhs = get_function_part_constraint (fi, part: fi_result); |
5126 | if (aggr_p) |
5127 | { |
5128 | auto_vec<ce_s, 2> tem; |
5129 | tem.quick_push (obj: rhs); |
5130 | do_deref (constraints: &tem); |
5131 | gcc_checking_assert (tem.length () == 1); |
5132 | rhs = tem[0]; |
5133 | } |
5134 | FOR_EACH_VEC_ELT (lhsc, j, lhsp) |
5135 | process_constraint (t: new_constraint (lhs: *lhsp, rhs)); |
5136 | |
5137 | /* If we pass the result decl by reference, honor that. */ |
5138 | if (aggr_p) |
5139 | { |
5140 | struct constraint_expr lhs; |
5141 | struct constraint_expr *rhsp; |
5142 | |
5143 | get_constraint_for_address_of (t: lhsop, results: &rhsc); |
5144 | lhs = get_function_part_constraint (fi, part: fi_result); |
5145 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5146 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5147 | rhsc.truncate (size: 0); |
5148 | } |
5149 | } |
5150 | |
5151 | /* If we use a static chain, pass it along. */ |
5152 | if (gimple_call_chain (gs: t)) |
5153 | { |
5154 | struct constraint_expr lhs; |
5155 | struct constraint_expr *rhsp; |
5156 | |
5157 | get_constraint_for (t: gimple_call_chain (gs: t), results: &rhsc); |
5158 | lhs = get_function_part_constraint (fi, part: fi_static_chain); |
5159 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5160 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5161 | } |
5162 | } |
5163 | } |
5164 | |
5165 | /* Walk statement T setting up aliasing constraints according to the |
5166 | references found in T. This function is the main part of the |
5167 | constraint builder. AI points to auxiliary alias information used |
5168 | when building alias sets and computing alias grouping heuristics. */ |
5169 | |
5170 | static void |
5171 | find_func_aliases (struct function *fn, gimple *origt) |
5172 | { |
5173 | gimple *t = origt; |
5174 | auto_vec<ce_s, 16> lhsc; |
5175 | auto_vec<ce_s, 16> rhsc; |
5176 | varinfo_t fi; |
5177 | |
5178 | /* Now build constraints expressions. */ |
5179 | if (gimple_code (g: t) == GIMPLE_PHI) |
5180 | { |
5181 | /* For a phi node, assign all the arguments to |
5182 | the result. */ |
5183 | get_constraint_for (t: gimple_phi_result (gs: t), results: &lhsc); |
5184 | for (unsigned i = 0; i < gimple_phi_num_args (gs: t); i++) |
5185 | { |
5186 | get_constraint_for_rhs (t: gimple_phi_arg_def (gs: t, index: i), results: &rhsc); |
5187 | process_all_all_constraints (lhsc, rhsc); |
5188 | rhsc.truncate (size: 0); |
5189 | } |
5190 | } |
5191 | /* In IPA mode, we need to generate constraints to pass call |
5192 | arguments through their calls. There are two cases, |
5193 | either a GIMPLE_CALL returning a value, or just a plain |
5194 | GIMPLE_CALL when we are not. |
5195 | |
5196 | In non-ipa mode, we need to generate constraints for each |
5197 | pointer passed by address. */ |
5198 | else if (is_gimple_call (gs: t)) |
5199 | find_func_aliases_for_call (fn, t: as_a <gcall *> (p: t)); |
5200 | |
5201 | /* Otherwise, just a regular assignment statement. Only care about |
5202 | operations with pointer result, others are dealt with as escape |
5203 | points if they have pointer operands. */ |
5204 | else if (is_gimple_assign (gs: t)) |
5205 | { |
5206 | /* Otherwise, just a regular assignment statement. */ |
5207 | tree lhsop = gimple_assign_lhs (gs: t); |
5208 | tree rhsop = (gimple_num_ops (gs: t) == 2) ? gimple_assign_rhs1 (gs: t) : NULL; |
5209 | |
5210 | if (rhsop && TREE_CLOBBER_P (rhsop)) |
5211 | /* Ignore clobbers, they don't actually store anything into |
5212 | the LHS. */ |
5213 | ; |
5214 | else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop))) |
5215 | do_structure_copy (lhsop, rhsop); |
5216 | else |
5217 | { |
5218 | enum tree_code code = gimple_assign_rhs_code (gs: t); |
5219 | |
5220 | get_constraint_for (t: lhsop, results: &lhsc); |
5221 | |
5222 | if (code == POINTER_PLUS_EXPR) |
5223 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5224 | offset: gimple_assign_rhs2 (gs: t), results: &rhsc); |
5225 | else if (code == POINTER_DIFF_EXPR) |
5226 | /* The result is not a pointer (part). */ |
5227 | ; |
5228 | else if (code == BIT_AND_EXPR |
5229 | && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST) |
5230 | { |
5231 | /* Aligning a pointer via a BIT_AND_EXPR is offsetting |
5232 | the pointer. Handle it by offsetting it by UNKNOWN. */ |
5233 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5234 | NULL_TREE, results: &rhsc); |
5235 | } |
5236 | else if (code == TRUNC_DIV_EXPR |
5237 | || code == CEIL_DIV_EXPR |
5238 | || code == FLOOR_DIV_EXPR |
5239 | || code == ROUND_DIV_EXPR |
5240 | || code == EXACT_DIV_EXPR |
5241 | || code == TRUNC_MOD_EXPR |
5242 | || code == CEIL_MOD_EXPR |
5243 | || code == FLOOR_MOD_EXPR |
5244 | || code == ROUND_MOD_EXPR) |
5245 | /* Division and modulo transfer the pointer from the LHS. */ |
5246 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5247 | NULL_TREE, results: &rhsc); |
5248 | else if (CONVERT_EXPR_CODE_P (code) |
5249 | || gimple_assign_single_p (gs: t)) |
5250 | /* See through conversions, single RHS are handled by |
5251 | get_constraint_for_rhs. */ |
5252 | get_constraint_for_rhs (t: rhsop, results: &rhsc); |
5253 | else if (code == COND_EXPR) |
5254 | { |
5255 | /* The result is a merge of both COND_EXPR arms. */ |
5256 | auto_vec<ce_s, 2> tmp; |
5257 | struct constraint_expr *rhsp; |
5258 | unsigned i; |
5259 | get_constraint_for_rhs (t: gimple_assign_rhs2 (gs: t), results: &rhsc); |
5260 | get_constraint_for_rhs (t: gimple_assign_rhs3 (gs: t), results: &tmp); |
5261 | FOR_EACH_VEC_ELT (tmp, i, rhsp) |
5262 | rhsc.safe_push (obj: *rhsp); |
5263 | } |
5264 | else if (truth_value_p (code)) |
5265 | /* Truth value results are not pointer (parts). Or at least |
5266 | very unreasonable obfuscation of a part. */ |
5267 | ; |
5268 | else |
5269 | { |
5270 | /* All other operations are possibly offsetting merges. */ |
5271 | auto_vec<ce_s, 4> tmp; |
5272 | struct constraint_expr *rhsp; |
5273 | unsigned i, j; |
5274 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5275 | NULL_TREE, results: &rhsc); |
5276 | for (i = 2; i < gimple_num_ops (gs: t); ++i) |
5277 | { |
5278 | get_constraint_for_ptr_offset (ptr: gimple_op (gs: t, i), |
5279 | NULL_TREE, results: &tmp); |
5280 | FOR_EACH_VEC_ELT (tmp, j, rhsp) |
5281 | rhsc.safe_push (obj: *rhsp); |
5282 | tmp.truncate (size: 0); |
5283 | } |
5284 | } |
5285 | process_all_all_constraints (lhsc, rhsc); |
5286 | } |
5287 | /* If there is a store to a global variable the rhs escapes. */ |
5288 | if ((lhsop = get_base_address (t: lhsop)) != NULL_TREE |
5289 | && DECL_P (lhsop)) |
5290 | { |
5291 | varinfo_t vi = get_vi_for_tree (t: lhsop); |
5292 | if ((! in_ipa_mode && vi->is_global_var) |
5293 | || vi->is_ipa_escape_point) |
5294 | make_escape_constraint (op: rhsop); |
5295 | } |
5296 | } |
5297 | /* Handle escapes through return. */ |
5298 | else if (gimple_code (g: t) == GIMPLE_RETURN |
5299 | && gimple_return_retval (gs: as_a <greturn *> (p: t)) != NULL_TREE) |
5300 | { |
5301 | greturn *return_stmt = as_a <greturn *> (p: t); |
5302 | tree retval = gimple_return_retval (gs: return_stmt); |
5303 | if (!in_ipa_mode) |
5304 | make_constraint_to (id: escaped_return_id, op: retval); |
5305 | else |
5306 | { |
5307 | struct constraint_expr lhs ; |
5308 | struct constraint_expr *rhsp; |
5309 | unsigned i; |
5310 | |
5311 | fi = lookup_vi_for_tree (t: fn->decl); |
5312 | lhs = get_function_part_constraint (fi, part: fi_result); |
5313 | get_constraint_for_rhs (t: retval, results: &rhsc); |
5314 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5315 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5316 | } |
5317 | } |
5318 | /* Handle asms conservatively by adding escape constraints to everything. */ |
5319 | else if (gasm *asm_stmt = dyn_cast <gasm *> (p: t)) |
5320 | { |
5321 | unsigned i, noutputs; |
5322 | const char **oconstraints; |
5323 | const char *constraint; |
5324 | bool allows_mem, allows_reg, is_inout; |
5325 | |
5326 | noutputs = gimple_asm_noutputs (asm_stmt); |
5327 | oconstraints = XALLOCAVEC (const char *, noutputs); |
5328 | |
5329 | for (i = 0; i < noutputs; ++i) |
5330 | { |
5331 | tree link = gimple_asm_output_op (asm_stmt, index: i); |
5332 | tree op = TREE_VALUE (link); |
5333 | |
5334 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
5335 | oconstraints[i] = constraint; |
5336 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, |
5337 | &allows_reg, &is_inout); |
5338 | |
5339 | /* A memory constraint makes the address of the operand escape. */ |
5340 | if (!allows_reg && allows_mem) |
5341 | { |
5342 | auto_vec<ce_s> tmpc; |
5343 | get_constraint_for_address_of (t: op, results: &tmpc); |
5344 | make_constraints_to (id: escaped_id, rhsc: tmpc); |
5345 | } |
5346 | |
5347 | /* The asm may read global memory, so outputs may point to |
5348 | any global memory. */ |
5349 | if (op) |
5350 | { |
5351 | auto_vec<ce_s, 2> lhsc; |
5352 | struct constraint_expr rhsc, *lhsp; |
5353 | unsigned j; |
5354 | get_constraint_for (t: op, results: &lhsc); |
5355 | rhsc.var = nonlocal_id; |
5356 | rhsc.offset = 0; |
5357 | rhsc.type = SCALAR; |
5358 | FOR_EACH_VEC_ELT (lhsc, j, lhsp) |
5359 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: rhsc)); |
5360 | } |
5361 | } |
5362 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
5363 | { |
5364 | tree link = gimple_asm_input_op (asm_stmt, index: i); |
5365 | tree op = TREE_VALUE (link); |
5366 | |
5367 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
5368 | |
5369 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
5370 | &allows_mem, &allows_reg); |
5371 | |
5372 | /* A memory constraint makes the address of the operand escape. */ |
5373 | if (!allows_reg && allows_mem) |
5374 | { |
5375 | auto_vec<ce_s> tmpc; |
5376 | get_constraint_for_address_of (t: op, results: &tmpc); |
5377 | make_constraints_to (id: escaped_id, rhsc: tmpc); |
5378 | } |
5379 | /* Strictly we'd only need the constraint to ESCAPED if |
5380 | the asm clobbers memory, otherwise using something |
5381 | along the lines of per-call clobbers/uses would be enough. */ |
5382 | else if (op) |
5383 | make_escape_constraint (op); |
5384 | } |
5385 | } |
5386 | } |
5387 | |
5388 | |
5389 | /* Create a constraint adding to the clobber set of FI the memory |
5390 | pointed to by PTR. */ |
5391 | |
5392 | static void |
5393 | process_ipa_clobber (varinfo_t fi, tree ptr) |
5394 | { |
5395 | vec<ce_s> ptrc = vNULL; |
5396 | struct constraint_expr *c, lhs; |
5397 | unsigned i; |
5398 | get_constraint_for_rhs (t: ptr, results: &ptrc); |
5399 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5400 | FOR_EACH_VEC_ELT (ptrc, i, c) |
5401 | process_constraint (t: new_constraint (lhs, rhs: *c)); |
5402 | ptrc.release (); |
5403 | } |
5404 | |
5405 | /* Walk statement T setting up clobber and use constraints according to the |
5406 | references found in T. This function is a main part of the |
5407 | IPA constraint builder. */ |
5408 | |
5409 | static void |
5410 | find_func_clobbers (struct function *fn, gimple *origt) |
5411 | { |
5412 | gimple *t = origt; |
5413 | auto_vec<ce_s, 16> lhsc; |
5414 | auto_vec<ce_s, 16> rhsc; |
5415 | varinfo_t fi; |
5416 | |
5417 | /* Add constraints for clobbered/used in IPA mode. |
5418 | We are not interested in what automatic variables are clobbered |
5419 | or used as we only use the information in the caller to which |
5420 | they do not escape. */ |
5421 | gcc_assert (in_ipa_mode); |
5422 | |
5423 | /* If the stmt refers to memory in any way it better had a VUSE. */ |
5424 | if (gimple_vuse (g: t) == NULL_TREE) |
5425 | return; |
5426 | |
5427 | /* We'd better have function information for the current function. */ |
5428 | fi = lookup_vi_for_tree (t: fn->decl); |
5429 | gcc_assert (fi != NULL); |
5430 | |
5431 | /* Account for stores in assignments and calls. */ |
5432 | if (gimple_vdef (g: t) != NULL_TREE |
5433 | && gimple_has_lhs (stmt: t)) |
5434 | { |
5435 | tree lhs = gimple_get_lhs (t); |
5436 | tree tem = lhs; |
5437 | while (handled_component_p (t: tem)) |
5438 | tem = TREE_OPERAND (tem, 0); |
5439 | if ((DECL_P (tem) |
5440 | && !auto_var_in_fn_p (tem, fn->decl)) |
5441 | || INDIRECT_REF_P (tem) |
5442 | || (TREE_CODE (tem) == MEM_REF |
5443 | && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR |
5444 | && auto_var_in_fn_p |
5445 | (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl)))) |
5446 | { |
5447 | struct constraint_expr lhsc, *rhsp; |
5448 | unsigned i; |
5449 | lhsc = get_function_part_constraint (fi, part: fi_clobbers); |
5450 | get_constraint_for_address_of (t: lhs, results: &rhsc); |
5451 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5452 | process_constraint (t: new_constraint (lhs: lhsc, rhs: *rhsp)); |
5453 | rhsc.truncate (size: 0); |
5454 | } |
5455 | } |
5456 | |
5457 | /* Account for uses in assigments and returns. */ |
5458 | if (gimple_assign_single_p (gs: t) |
5459 | || (gimple_code (g: t) == GIMPLE_RETURN |
5460 | && gimple_return_retval (gs: as_a <greturn *> (p: t)) != NULL_TREE)) |
5461 | { |
5462 | tree rhs = (gimple_assign_single_p (gs: t) |
5463 | ? gimple_assign_rhs1 (gs: t) |
5464 | : gimple_return_retval (gs: as_a <greturn *> (p: t))); |
5465 | tree tem = rhs; |
5466 | while (handled_component_p (t: tem)) |
5467 | tem = TREE_OPERAND (tem, 0); |
5468 | if ((DECL_P (tem) |
5469 | && !auto_var_in_fn_p (tem, fn->decl)) |
5470 | || INDIRECT_REF_P (tem) |
5471 | || (TREE_CODE (tem) == MEM_REF |
5472 | && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR |
5473 | && auto_var_in_fn_p |
5474 | (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl)))) |
5475 | { |
5476 | struct constraint_expr lhs, *rhsp; |
5477 | unsigned i; |
5478 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5479 | get_constraint_for_address_of (t: rhs, results: &rhsc); |
5480 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5481 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5482 | rhsc.truncate (size: 0); |
5483 | } |
5484 | } |
5485 | |
5486 | if (gcall *call_stmt = dyn_cast <gcall *> (p: t)) |
5487 | { |
5488 | varinfo_t cfi = NULL; |
5489 | tree decl = gimple_call_fndecl (gs: t); |
5490 | struct constraint_expr lhs, rhs; |
5491 | unsigned i, j; |
5492 | |
5493 | /* For builtins we do not have separate function info. For those |
5494 | we do not generate escapes for we have to generate clobbers/uses. */ |
5495 | if (gimple_call_builtin_p (t, BUILT_IN_NORMAL)) |
5496 | switch (DECL_FUNCTION_CODE (decl)) |
5497 | { |
5498 | /* The following functions use and clobber memory pointed to |
5499 | by their arguments. */ |
5500 | case BUILT_IN_STRCPY: |
5501 | case BUILT_IN_STRNCPY: |
5502 | case BUILT_IN_BCOPY: |
5503 | case BUILT_IN_MEMCPY: |
5504 | case BUILT_IN_MEMMOVE: |
5505 | case BUILT_IN_MEMPCPY: |
5506 | case BUILT_IN_STPCPY: |
5507 | case BUILT_IN_STPNCPY: |
5508 | case BUILT_IN_STRCAT: |
5509 | case BUILT_IN_STRNCAT: |
5510 | case BUILT_IN_STRCPY_CHK: |
5511 | case BUILT_IN_STRNCPY_CHK: |
5512 | case BUILT_IN_MEMCPY_CHK: |
5513 | case BUILT_IN_MEMMOVE_CHK: |
5514 | case BUILT_IN_MEMPCPY_CHK: |
5515 | case BUILT_IN_STPCPY_CHK: |
5516 | case BUILT_IN_STPNCPY_CHK: |
5517 | case BUILT_IN_STRCAT_CHK: |
5518 | case BUILT_IN_STRNCAT_CHK: |
5519 | { |
5520 | tree dest = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl) |
5521 | == BUILT_IN_BCOPY ? 1 : 0)); |
5522 | tree src = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl) |
5523 | == BUILT_IN_BCOPY ? 0 : 1)); |
5524 | unsigned i; |
5525 | struct constraint_expr *rhsp, *lhsp; |
5526 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
5527 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5528 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
5529 | process_constraint (t: new_constraint (lhs, rhs: *lhsp)); |
5530 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
5531 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5532 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5533 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5534 | return; |
5535 | } |
5536 | /* The following function clobbers memory pointed to by |
5537 | its argument. */ |
5538 | case BUILT_IN_MEMSET: |
5539 | case BUILT_IN_MEMSET_CHK: |
5540 | case BUILT_IN_POSIX_MEMALIGN: |
5541 | { |
5542 | tree dest = gimple_call_arg (gs: t, index: 0); |
5543 | unsigned i; |
5544 | ce_s *lhsp; |
5545 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
5546 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5547 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
5548 | process_constraint (t: new_constraint (lhs, rhs: *lhsp)); |
5549 | return; |
5550 | } |
5551 | /* The following functions clobber their second and third |
5552 | arguments. */ |
5553 | case BUILT_IN_SINCOS: |
5554 | case BUILT_IN_SINCOSF: |
5555 | case BUILT_IN_SINCOSL: |
5556 | { |
5557 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 1)); |
5558 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 2)); |
5559 | return; |
5560 | } |
5561 | /* The following functions clobber their second argument. */ |
5562 | case BUILT_IN_FREXP: |
5563 | case BUILT_IN_FREXPF: |
5564 | case BUILT_IN_FREXPL: |
5565 | case BUILT_IN_LGAMMA_R: |
5566 | case BUILT_IN_LGAMMAF_R: |
5567 | case BUILT_IN_LGAMMAL_R: |
5568 | case BUILT_IN_GAMMA_R: |
5569 | case BUILT_IN_GAMMAF_R: |
5570 | case BUILT_IN_GAMMAL_R: |
5571 | case BUILT_IN_MODF: |
5572 | case BUILT_IN_MODFF: |
5573 | case BUILT_IN_MODFL: |
5574 | { |
5575 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 1)); |
5576 | return; |
5577 | } |
5578 | /* The following functions clobber their third argument. */ |
5579 | case BUILT_IN_REMQUO: |
5580 | case BUILT_IN_REMQUOF: |
5581 | case BUILT_IN_REMQUOL: |
5582 | { |
5583 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 2)); |
5584 | return; |
5585 | } |
5586 | /* The following functions use what their first argument |
5587 | points to. */ |
5588 | case BUILT_IN_STRDUP: |
5589 | case BUILT_IN_STRNDUP: |
5590 | case BUILT_IN_REALLOC: |
5591 | case BUILT_IN_INDEX: |
5592 | case BUILT_IN_STRCHR: |
5593 | case BUILT_IN_STRRCHR: |
5594 | case BUILT_IN_MEMCHR: |
5595 | { |
5596 | tree src = gimple_call_arg (gs: t, index: 0); |
5597 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
5598 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5599 | struct constraint_expr *rhsp; |
5600 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5601 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5602 | return; |
5603 | } |
5604 | /* The following functions use what their first and second argument |
5605 | point to. */ |
5606 | case BUILT_IN_STRSTR: |
5607 | case BUILT_IN_STRPBRK: |
5608 | { |
5609 | tree src = gimple_call_arg (gs: t, index: 0); |
5610 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
5611 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5612 | struct constraint_expr *rhsp; |
5613 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5614 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5615 | rhsc.truncate (size: 0); |
5616 | src = gimple_call_arg (gs: t, index: 1); |
5617 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
5618 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5619 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5620 | return; |
5621 | } |
5622 | /* The following functions neither read nor clobber memory. */ |
5623 | case BUILT_IN_ASSUME_ALIGNED: |
5624 | case BUILT_IN_FREE: |
5625 | return; |
5626 | /* Trampolines are of no interest to us. */ |
5627 | case BUILT_IN_INIT_TRAMPOLINE: |
5628 | case BUILT_IN_ADJUST_TRAMPOLINE: |
5629 | return; |
5630 | case BUILT_IN_VA_START: |
5631 | case BUILT_IN_VA_END: |
5632 | return; |
5633 | case BUILT_IN_GOMP_PARALLEL: |
5634 | case BUILT_IN_GOACC_PARALLEL: |
5635 | { |
5636 | unsigned int fnpos, argpos; |
5637 | unsigned int implicit_use_args[2]; |
5638 | unsigned int num_implicit_use_args = 0; |
5639 | switch (DECL_FUNCTION_CODE (decl)) |
5640 | { |
5641 | case BUILT_IN_GOMP_PARALLEL: |
5642 | /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */ |
5643 | fnpos = 0; |
5644 | argpos = 1; |
5645 | break; |
5646 | case BUILT_IN_GOACC_PARALLEL: |
5647 | /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs, |
5648 | sizes, kinds, ...). */ |
5649 | fnpos = 1; |
5650 | argpos = 3; |
5651 | implicit_use_args[num_implicit_use_args++] = 4; |
5652 | implicit_use_args[num_implicit_use_args++] = 5; |
5653 | break; |
5654 | default: |
5655 | gcc_unreachable (); |
5656 | } |
5657 | |
5658 | tree fnarg = gimple_call_arg (gs: t, index: fnpos); |
5659 | gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR); |
5660 | tree fndecl = TREE_OPERAND (fnarg, 0); |
5661 | if (fndecl_maybe_in_other_partition (fndecl)) |
5662 | /* Fallthru to general call handling. */ |
5663 | break; |
5664 | |
5665 | varinfo_t cfi = get_vi_for_tree (t: fndecl); |
5666 | |
5667 | tree arg = gimple_call_arg (gs: t, index: argpos); |
5668 | |
5669 | /* Parameter passed by value is used. */ |
5670 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5671 | struct constraint_expr *rhsp; |
5672 | get_constraint_for (t: arg, results: &rhsc); |
5673 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5674 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5675 | rhsc.truncate (size: 0); |
5676 | |
5677 | /* Handle parameters used by the call, but not used in cfi, as |
5678 | implicitly used by cfi. */ |
5679 | lhs = get_function_part_constraint (fi: cfi, part: fi_uses); |
5680 | for (unsigned i = 0; i < num_implicit_use_args; ++i) |
5681 | { |
5682 | tree arg = gimple_call_arg (gs: t, index: implicit_use_args[i]); |
5683 | get_constraint_for (t: arg, results: &rhsc); |
5684 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5685 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5686 | rhsc.truncate (size: 0); |
5687 | } |
5688 | |
5689 | /* The caller clobbers what the callee does. */ |
5690 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5691 | rhs = get_function_part_constraint (fi: cfi, part: fi_clobbers); |
5692 | process_constraint (t: new_constraint (lhs, rhs)); |
5693 | |
5694 | /* The caller uses what the callee does. */ |
5695 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5696 | rhs = get_function_part_constraint (fi: cfi, part: fi_uses); |
5697 | process_constraint (t: new_constraint (lhs, rhs)); |
5698 | |
5699 | return; |
5700 | } |
5701 | /* printf-style functions may have hooks to set pointers to |
5702 | point to somewhere into the generated string. Leave them |
5703 | for a later exercise... */ |
5704 | default: |
5705 | /* Fallthru to general call handling. */; |
5706 | } |
5707 | |
5708 | /* Parameters passed by value are used. */ |
5709 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5710 | for (i = 0; i < gimple_call_num_args (gs: t); i++) |
5711 | { |
5712 | struct constraint_expr *rhsp; |
5713 | tree arg = gimple_call_arg (gs: t, index: i); |
5714 | |
5715 | if (TREE_CODE (arg) == SSA_NAME |
5716 | || is_gimple_min_invariant (arg)) |
5717 | continue; |
5718 | |
5719 | get_constraint_for_address_of (t: arg, results: &rhsc); |
5720 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5721 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5722 | rhsc.truncate (size: 0); |
5723 | } |
5724 | |
5725 | /* Build constraints for propagating clobbers/uses along the |
5726 | callgraph edges. */ |
5727 | cfi = get_fi_for_callee (call: call_stmt); |
5728 | if (cfi->id == anything_id) |
5729 | { |
5730 | if (gimple_vdef (g: t)) |
5731 | make_constraint_from (vi: first_vi_for_offset (fi, fi_clobbers), |
5732 | from: anything_id); |
5733 | make_constraint_from (vi: first_vi_for_offset (fi, fi_uses), |
5734 | from: anything_id); |
5735 | return; |
5736 | } |
5737 | |
5738 | /* For callees without function info (that's external functions), |
5739 | ESCAPED is clobbered and used. */ |
5740 | if (cfi->decl |
5741 | && TREE_CODE (cfi->decl) == FUNCTION_DECL |
5742 | && !cfi->is_fn_info) |
5743 | { |
5744 | varinfo_t vi; |
5745 | |
5746 | if (gimple_vdef (g: t)) |
5747 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_clobbers), |
5748 | from: escaped_id); |
5749 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_uses), from: escaped_id); |
5750 | |
5751 | /* Also honor the call statement use/clobber info. */ |
5752 | if ((vi = lookup_call_clobber_vi (call: call_stmt)) != NULL) |
5753 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_clobbers), |
5754 | from: vi->id); |
5755 | if ((vi = lookup_call_use_vi (call: call_stmt)) != NULL) |
5756 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_uses), |
5757 | from: vi->id); |
5758 | return; |
5759 | } |
5760 | |
5761 | /* Otherwise the caller clobbers and uses what the callee does. |
5762 | ??? This should use a new complex constraint that filters |
5763 | local variables of the callee. */ |
5764 | if (gimple_vdef (g: t)) |
5765 | { |
5766 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5767 | rhs = get_function_part_constraint (fi: cfi, part: fi_clobbers); |
5768 | process_constraint (t: new_constraint (lhs, rhs)); |
5769 | } |
5770 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5771 | rhs = get_function_part_constraint (fi: cfi, part: fi_uses); |
5772 | process_constraint (t: new_constraint (lhs, rhs)); |
5773 | } |
5774 | else if (gimple_code (g: t) == GIMPLE_ASM) |
5775 | { |
5776 | /* ??? Ick. We can do better. */ |
5777 | if (gimple_vdef (g: t)) |
5778 | make_constraint_from (vi: first_vi_for_offset (fi, fi_clobbers), |
5779 | from: anything_id); |
5780 | make_constraint_from (vi: first_vi_for_offset (fi, fi_uses), |
5781 | from: anything_id); |
5782 | } |
5783 | } |
5784 | |
5785 | |
5786 | /* Find the first varinfo in the same variable as START that overlaps with |
5787 | OFFSET. Return NULL if we can't find one. */ |
5788 | |
5789 | static varinfo_t |
5790 | first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset) |
5791 | { |
5792 | /* If the offset is outside of the variable, bail out. */ |
5793 | if (offset >= start->fullsize) |
5794 | return NULL; |
5795 | |
5796 | /* If we cannot reach offset from start, lookup the first field |
5797 | and start from there. */ |
5798 | if (start->offset > offset) |
5799 | start = get_varinfo (n: start->head); |
5800 | |
5801 | while (start) |
5802 | { |
5803 | /* We may not find a variable in the field list with the actual |
5804 | offset when we have glommed a structure to a variable. |
5805 | In that case, however, offset should still be within the size |
5806 | of the variable. */ |
5807 | if (offset >= start->offset |
5808 | && (offset - start->offset) < start->size) |
5809 | return start; |
5810 | |
5811 | start = vi_next (vi: start); |
5812 | } |
5813 | |
5814 | return NULL; |
5815 | } |
5816 | |
5817 | /* Find the first varinfo in the same variable as START that overlaps with |
5818 | OFFSET. If there is no such varinfo the varinfo directly preceding |
5819 | OFFSET is returned. */ |
5820 | |
5821 | static varinfo_t |
5822 | first_or_preceding_vi_for_offset (varinfo_t start, |
5823 | unsigned HOST_WIDE_INT offset) |
5824 | { |
5825 | /* If we cannot reach offset from start, lookup the first field |
5826 | and start from there. */ |
5827 | if (start->offset > offset) |
5828 | start = get_varinfo (n: start->head); |
5829 | |
5830 | /* We may not find a variable in the field list with the actual |
5831 | offset when we have glommed a structure to a variable. |
5832 | In that case, however, offset should still be within the size |
5833 | of the variable. |
5834 | If we got beyond the offset we look for return the field |
5835 | directly preceding offset which may be the last field. */ |
5836 | while (start->next |
5837 | && offset >= start->offset |
5838 | && !((offset - start->offset) < start->size)) |
5839 | start = vi_next (vi: start); |
5840 | |
5841 | return start; |
5842 | } |
5843 | |
5844 | |
5845 | /* This structure is used during pushing fields onto the fieldstack |
5846 | to track the offset of the field, since bitpos_of_field gives it |
5847 | relative to its immediate containing type, and we want it relative |
5848 | to the ultimate containing object. */ |
5849 | |
5850 | struct fieldoff |
5851 | { |
5852 | /* Offset from the base of the base containing object to this field. */ |
5853 | HOST_WIDE_INT offset; |
5854 | |
5855 | /* Size, in bits, of the field. */ |
5856 | unsigned HOST_WIDE_INT size; |
5857 | |
5858 | unsigned has_unknown_size : 1; |
5859 | |
5860 | unsigned must_have_pointers : 1; |
5861 | |
5862 | unsigned may_have_pointers : 1; |
5863 | |
5864 | unsigned only_restrict_pointers : 1; |
5865 | |
5866 | tree restrict_pointed_type; |
5867 | }; |
5868 | typedef struct fieldoff fieldoff_s; |
5869 | |
5870 | |
5871 | /* qsort comparison function for two fieldoff's PA and PB */ |
5872 | |
5873 | static int |
5874 | fieldoff_compare (const void *pa, const void *pb) |
5875 | { |
5876 | const fieldoff_s *foa = (const fieldoff_s *)pa; |
5877 | const fieldoff_s *fob = (const fieldoff_s *)pb; |
5878 | unsigned HOST_WIDE_INT foasize, fobsize; |
5879 | |
5880 | if (foa->offset < fob->offset) |
5881 | return -1; |
5882 | else if (foa->offset > fob->offset) |
5883 | return 1; |
5884 | |
5885 | foasize = foa->size; |
5886 | fobsize = fob->size; |
5887 | if (foasize < fobsize) |
5888 | return -1; |
5889 | else if (foasize > fobsize) |
5890 | return 1; |
5891 | return 0; |
5892 | } |
5893 | |
5894 | /* Sort a fieldstack according to the field offset and sizes. */ |
5895 | static void |
5896 | sort_fieldstack (vec<fieldoff_s> &fieldstack) |
5897 | { |
5898 | fieldstack.qsort (fieldoff_compare); |
5899 | } |
5900 | |
5901 | /* Return true if T is a type that can have subvars. */ |
5902 | |
5903 | static inline bool |
5904 | type_can_have_subvars (const_tree t) |
5905 | { |
5906 | /* Aggregates without overlapping fields can have subvars. */ |
5907 | return TREE_CODE (t) == RECORD_TYPE; |
5908 | } |
5909 | |
5910 | /* Return true if V is a tree that we can have subvars for. |
5911 | Normally, this is any aggregate type. Also complex |
5912 | types which are not gimple registers can have subvars. */ |
5913 | |
5914 | static inline bool |
5915 | var_can_have_subvars (const_tree v) |
5916 | { |
5917 | /* Volatile variables should never have subvars. */ |
5918 | if (TREE_THIS_VOLATILE (v)) |
5919 | return false; |
5920 | |
5921 | /* Non decls or memory tags can never have subvars. */ |
5922 | if (!DECL_P (v)) |
5923 | return false; |
5924 | |
5925 | return type_can_have_subvars (TREE_TYPE (v)); |
5926 | } |
5927 | |
5928 | /* Return true if T is a type that does contain pointers. */ |
5929 | |
5930 | static bool |
5931 | type_must_have_pointers (tree type) |
5932 | { |
5933 | if (POINTER_TYPE_P (type)) |
5934 | return true; |
5935 | |
5936 | if (TREE_CODE (type) == ARRAY_TYPE) |
5937 | return type_must_have_pointers (TREE_TYPE (type)); |
5938 | |
5939 | /* A function or method can have pointers as arguments, so track |
5940 | those separately. */ |
5941 | if (FUNC_OR_METHOD_TYPE_P (type)) |
5942 | return true; |
5943 | |
5944 | return false; |
5945 | } |
5946 | |
5947 | static bool |
5948 | field_must_have_pointers (tree t) |
5949 | { |
5950 | return type_must_have_pointers (TREE_TYPE (t)); |
5951 | } |
5952 | |
5953 | /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all |
5954 | the fields of TYPE onto fieldstack, recording their offsets along |
5955 | the way. |
5956 | |
5957 | OFFSET is used to keep track of the offset in this entire |
5958 | structure, rather than just the immediately containing structure. |
5959 | Returns false if the caller is supposed to handle the field we |
5960 | recursed for. */ |
5961 | |
5962 | static bool |
5963 | push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack, |
5964 | unsigned HOST_WIDE_INT offset) |
5965 | { |
5966 | tree field; |
5967 | bool empty_p = true; |
5968 | |
5969 | if (TREE_CODE (type) != RECORD_TYPE) |
5970 | return false; |
5971 | |
5972 | /* If the vector of fields is growing too big, bail out early. |
5973 | Callers check for vec::length <= param_max_fields_for_field_sensitive, make |
5974 | sure this fails. */ |
5975 | if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive) |
5976 | return false; |
5977 | |
5978 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
5979 | if (TREE_CODE (field) == FIELD_DECL) |
5980 | { |
5981 | bool push = false; |
5982 | unsigned HOST_WIDE_INT foff = bitpos_of_field (fdecl: field); |
5983 | tree field_type = TREE_TYPE (field); |
5984 | |
5985 | if (!var_can_have_subvars (v: field) |
5986 | || TREE_CODE (field_type) == QUAL_UNION_TYPE |
5987 | || TREE_CODE (field_type) == UNION_TYPE) |
5988 | push = true; |
5989 | else if (!push_fields_onto_fieldstack |
5990 | (type: field_type, fieldstack, offset: offset + foff) |
5991 | && (DECL_SIZE (field) |
5992 | && !integer_zerop (DECL_SIZE (field)))) |
5993 | /* Empty structures may have actual size, like in C++. So |
5994 | see if we didn't push any subfields and the size is |
5995 | nonzero, push the field onto the stack. */ |
5996 | push = true; |
5997 | |
5998 | if (push) |
5999 | { |
6000 | fieldoff_s *pair = NULL; |
6001 | bool has_unknown_size = false; |
6002 | bool must_have_pointers_p; |
6003 | |
6004 | if (!fieldstack->is_empty ()) |
6005 | pair = &fieldstack->last (); |
6006 | |
6007 | /* If there isn't anything at offset zero, create sth. */ |
6008 | if (!pair |
6009 | && offset + foff != 0) |
6010 | { |
6011 | fieldoff_s e |
6012 | = {.offset: 0, .size: offset + foff, .has_unknown_size: false, .must_have_pointers: false, .may_have_pointers: true, .only_restrict_pointers: false, NULL_TREE}; |
6013 | pair = fieldstack->safe_push (obj: e); |
6014 | } |
6015 | |
6016 | if (!DECL_SIZE (field) |
6017 | || !tree_fits_uhwi_p (DECL_SIZE (field))) |
6018 | has_unknown_size = true; |
6019 | |
6020 | /* If adjacent fields do not contain pointers merge them. */ |
6021 | must_have_pointers_p = field_must_have_pointers (t: field); |
6022 | if (pair |
6023 | && !has_unknown_size |
6024 | && !must_have_pointers_p |
6025 | && !pair->must_have_pointers |
6026 | && !pair->has_unknown_size |
6027 | && pair->offset + pair->size == offset + foff) |
6028 | { |
6029 | pair->size += tree_to_uhwi (DECL_SIZE (field)); |
6030 | } |
6031 | else |
6032 | { |
6033 | fieldoff_s e; |
6034 | e.offset = offset + foff; |
6035 | e.has_unknown_size = has_unknown_size; |
6036 | if (!has_unknown_size) |
6037 | e.size = tree_to_uhwi (DECL_SIZE (field)); |
6038 | else |
6039 | e.size = -1; |
6040 | e.must_have_pointers = must_have_pointers_p; |
6041 | e.may_have_pointers = true; |
6042 | e.only_restrict_pointers |
6043 | = (!has_unknown_size |
6044 | && POINTER_TYPE_P (field_type) |
6045 | && TYPE_RESTRICT (field_type)); |
6046 | if (e.only_restrict_pointers) |
6047 | e.restrict_pointed_type = TREE_TYPE (field_type); |
6048 | fieldstack->safe_push (obj: e); |
6049 | } |
6050 | } |
6051 | |
6052 | empty_p = false; |
6053 | } |
6054 | |
6055 | return !empty_p; |
6056 | } |
6057 | |
6058 | /* Count the number of arguments DECL has, and set IS_VARARGS to true |
6059 | if it is a varargs function. */ |
6060 | |
6061 | static unsigned int |
6062 | count_num_arguments (tree decl, bool *is_varargs) |
6063 | { |
6064 | unsigned int num = 0; |
6065 | tree t; |
6066 | |
6067 | /* Capture named arguments for K&R functions. They do not |
6068 | have a prototype and thus no TYPE_ARG_TYPES. */ |
6069 | for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t)) |
6070 | ++num; |
6071 | |
6072 | /* Check if the function has variadic arguments. */ |
6073 | for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t)) |
6074 | if (TREE_VALUE (t) == void_type_node) |
6075 | break; |
6076 | if (!t) |
6077 | *is_varargs = true; |
6078 | |
6079 | return num; |
6080 | } |
6081 | |
6082 | /* Creation function node for DECL, using NAME, and return the index |
6083 | of the variable we've created for the function. If NONLOCAL_p, create |
6084 | initial constraints. */ |
6085 | |
6086 | static varinfo_t |
6087 | create_function_info_for (tree decl, const char *name, bool add_id, |
6088 | bool nonlocal_p) |
6089 | { |
6090 | struct function *fn = DECL_STRUCT_FUNCTION (decl); |
6091 | varinfo_t vi, prev_vi; |
6092 | tree arg; |
6093 | unsigned int i; |
6094 | bool is_varargs = false; |
6095 | unsigned int num_args = count_num_arguments (decl, is_varargs: &is_varargs); |
6096 | |
6097 | /* Create the variable info. */ |
6098 | |
6099 | vi = new_var_info (t: decl, name, add_id); |
6100 | vi->offset = 0; |
6101 | vi->size = 1; |
6102 | vi->fullsize = fi_parm_base + num_args; |
6103 | vi->is_fn_info = 1; |
6104 | vi->may_have_pointers = false; |
6105 | if (is_varargs) |
6106 | vi->fullsize = ~0; |
6107 | insert_vi_for_tree (t: vi->decl, vi); |
6108 | |
6109 | prev_vi = vi; |
6110 | |
6111 | /* Create a variable for things the function clobbers and one for |
6112 | things the function uses. */ |
6113 | { |
6114 | varinfo_t clobbervi, usevi; |
6115 | const char *newname; |
6116 | char *tempname; |
6117 | |
6118 | tempname = xasprintf ("%s.clobber" , name); |
6119 | newname = ggc_strdup (tempname); |
6120 | free (ptr: tempname); |
6121 | |
6122 | clobbervi = new_var_info (NULL, name: newname, add_id: false); |
6123 | clobbervi->offset = fi_clobbers; |
6124 | clobbervi->size = 1; |
6125 | clobbervi->fullsize = vi->fullsize; |
6126 | clobbervi->is_full_var = true; |
6127 | clobbervi->is_global_var = false; |
6128 | clobbervi->is_reg_var = true; |
6129 | |
6130 | gcc_assert (prev_vi->offset < clobbervi->offset); |
6131 | prev_vi->next = clobbervi->id; |
6132 | prev_vi = clobbervi; |
6133 | |
6134 | tempname = xasprintf ("%s.use" , name); |
6135 | newname = ggc_strdup (tempname); |
6136 | free (ptr: tempname); |
6137 | |
6138 | usevi = new_var_info (NULL, name: newname, add_id: false); |
6139 | usevi->offset = fi_uses; |
6140 | usevi->size = 1; |
6141 | usevi->fullsize = vi->fullsize; |
6142 | usevi->is_full_var = true; |
6143 | usevi->is_global_var = false; |
6144 | usevi->is_reg_var = true; |
6145 | |
6146 | gcc_assert (prev_vi->offset < usevi->offset); |
6147 | prev_vi->next = usevi->id; |
6148 | prev_vi = usevi; |
6149 | } |
6150 | |
6151 | /* And one for the static chain. */ |
6152 | if (fn->static_chain_decl != NULL_TREE) |
6153 | { |
6154 | varinfo_t chainvi; |
6155 | const char *newname; |
6156 | char *tempname; |
6157 | |
6158 | tempname = xasprintf ("%s.chain" , name); |
6159 | newname = ggc_strdup (tempname); |
6160 | free (ptr: tempname); |
6161 | |
6162 | chainvi = new_var_info (t: fn->static_chain_decl, name: newname, add_id: false); |
6163 | chainvi->offset = fi_static_chain; |
6164 | chainvi->size = 1; |
6165 | chainvi->fullsize = vi->fullsize; |
6166 | chainvi->is_full_var = true; |
6167 | chainvi->is_global_var = false; |
6168 | |
6169 | insert_vi_for_tree (t: fn->static_chain_decl, vi: chainvi); |
6170 | |
6171 | if (nonlocal_p |
6172 | && chainvi->may_have_pointers) |
6173 | make_constraint_from (vi: chainvi, from: nonlocal_id); |
6174 | |
6175 | gcc_assert (prev_vi->offset < chainvi->offset); |
6176 | prev_vi->next = chainvi->id; |
6177 | prev_vi = chainvi; |
6178 | } |
6179 | |
6180 | /* Create a variable for the return var. */ |
6181 | if (DECL_RESULT (decl) != NULL |
6182 | || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl)))) |
6183 | { |
6184 | varinfo_t resultvi; |
6185 | const char *newname; |
6186 | char *tempname; |
6187 | tree resultdecl = decl; |
6188 | |
6189 | if (DECL_RESULT (decl)) |
6190 | resultdecl = DECL_RESULT (decl); |
6191 | |
6192 | tempname = xasprintf ("%s.result" , name); |
6193 | newname = ggc_strdup (tempname); |
6194 | free (ptr: tempname); |
6195 | |
6196 | resultvi = new_var_info (t: resultdecl, name: newname, add_id: false); |
6197 | resultvi->offset = fi_result; |
6198 | resultvi->size = 1; |
6199 | resultvi->fullsize = vi->fullsize; |
6200 | resultvi->is_full_var = true; |
6201 | if (DECL_RESULT (decl)) |
6202 | resultvi->may_have_pointers = true; |
6203 | |
6204 | if (DECL_RESULT (decl)) |
6205 | insert_vi_for_tree (DECL_RESULT (decl), vi: resultvi); |
6206 | |
6207 | if (nonlocal_p |
6208 | && DECL_RESULT (decl) |
6209 | && DECL_BY_REFERENCE (DECL_RESULT (decl))) |
6210 | make_constraint_from (vi: resultvi, from: nonlocal_id); |
6211 | |
6212 | gcc_assert (prev_vi->offset < resultvi->offset); |
6213 | prev_vi->next = resultvi->id; |
6214 | prev_vi = resultvi; |
6215 | } |
6216 | |
6217 | /* We also need to make function return values escape. Nothing |
6218 | escapes by returning from main though. */ |
6219 | if (nonlocal_p |
6220 | && !MAIN_NAME_P (DECL_NAME (decl))) |
6221 | { |
6222 | varinfo_t fi, rvi; |
6223 | fi = lookup_vi_for_tree (t: decl); |
6224 | rvi = first_vi_for_offset (start: fi, offset: fi_result); |
6225 | if (rvi && rvi->offset == fi_result) |
6226 | make_copy_constraint (vi: get_varinfo (n: escaped_id), from: rvi->id); |
6227 | } |
6228 | |
6229 | /* Set up variables for each argument. */ |
6230 | arg = DECL_ARGUMENTS (decl); |
6231 | for (i = 0; i < num_args; i++) |
6232 | { |
6233 | varinfo_t argvi; |
6234 | const char *newname; |
6235 | char *tempname; |
6236 | tree argdecl = decl; |
6237 | |
6238 | if (arg) |
6239 | argdecl = arg; |
6240 | |
6241 | tempname = xasprintf ("%s.arg%d" , name, i); |
6242 | newname = ggc_strdup (tempname); |
6243 | free (ptr: tempname); |
6244 | |
6245 | argvi = new_var_info (t: argdecl, name: newname, add_id: false); |
6246 | argvi->offset = fi_parm_base + i; |
6247 | argvi->size = 1; |
6248 | argvi->is_full_var = true; |
6249 | argvi->fullsize = vi->fullsize; |
6250 | if (arg) |
6251 | argvi->may_have_pointers = true; |
6252 | |
6253 | if (arg) |
6254 | insert_vi_for_tree (t: arg, vi: argvi); |
6255 | |
6256 | if (nonlocal_p |
6257 | && argvi->may_have_pointers) |
6258 | make_constraint_from (vi: argvi, from: nonlocal_id); |
6259 | |
6260 | gcc_assert (prev_vi->offset < argvi->offset); |
6261 | prev_vi->next = argvi->id; |
6262 | prev_vi = argvi; |
6263 | if (arg) |
6264 | arg = DECL_CHAIN (arg); |
6265 | } |
6266 | |
6267 | /* Add one representative for all further args. */ |
6268 | if (is_varargs) |
6269 | { |
6270 | varinfo_t argvi; |
6271 | const char *newname; |
6272 | char *tempname; |
6273 | tree decl; |
6274 | |
6275 | tempname = xasprintf ("%s.varargs" , name); |
6276 | newname = ggc_strdup (tempname); |
6277 | free (ptr: tempname); |
6278 | |
6279 | /* We need sth that can be pointed to for va_start. */ |
6280 | decl = build_fake_var_decl (ptr_type_node); |
6281 | |
6282 | argvi = new_var_info (t: decl, name: newname, add_id: false); |
6283 | argvi->offset = fi_parm_base + num_args; |
6284 | argvi->size = ~0; |
6285 | argvi->is_full_var = true; |
6286 | argvi->is_heap_var = true; |
6287 | argvi->fullsize = vi->fullsize; |
6288 | |
6289 | if (nonlocal_p |
6290 | && argvi->may_have_pointers) |
6291 | make_constraint_from (vi: argvi, from: nonlocal_id); |
6292 | |
6293 | gcc_assert (prev_vi->offset < argvi->offset); |
6294 | prev_vi->next = argvi->id; |
6295 | } |
6296 | |
6297 | return vi; |
6298 | } |
6299 | |
6300 | |
6301 | /* Return true if FIELDSTACK contains fields that overlap. |
6302 | FIELDSTACK is assumed to be sorted by offset. */ |
6303 | |
6304 | static bool |
6305 | check_for_overlaps (const vec<fieldoff_s> &fieldstack) |
6306 | { |
6307 | fieldoff_s *fo = NULL; |
6308 | unsigned int i; |
6309 | HOST_WIDE_INT lastoffset = -1; |
6310 | |
6311 | FOR_EACH_VEC_ELT (fieldstack, i, fo) |
6312 | { |
6313 | if (fo->offset == lastoffset) |
6314 | return true; |
6315 | lastoffset = fo->offset; |
6316 | } |
6317 | return false; |
6318 | } |
6319 | |
6320 | /* Create a varinfo structure for NAME and DECL, and add it to VARMAP. |
6321 | This will also create any varinfo structures necessary for fields |
6322 | of DECL. DECL is a function parameter if HANDLE_PARAM is set. |
6323 | HANDLED_STRUCT_TYPE is used to register struct types reached by following |
6324 | restrict pointers. This is needed to prevent infinite recursion. |
6325 | If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL |
6326 | does not advertise it. */ |
6327 | |
6328 | static varinfo_t |
6329 | create_variable_info_for_1 (tree decl, const char *name, bool add_id, |
6330 | bool handle_param, bitmap handled_struct_type, |
6331 | bool add_restrict = false) |
6332 | { |
6333 | varinfo_t vi, newvi; |
6334 | tree decl_type = TREE_TYPE (decl); |
6335 | tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type); |
6336 | auto_vec<fieldoff_s> fieldstack; |
6337 | fieldoff_s *fo; |
6338 | unsigned int i; |
6339 | |
6340 | if (!declsize |
6341 | || !tree_fits_uhwi_p (declsize)) |
6342 | { |
6343 | vi = new_var_info (t: decl, name, add_id); |
6344 | vi->offset = 0; |
6345 | vi->size = ~0; |
6346 | vi->fullsize = ~0; |
6347 | vi->is_unknown_size_var = true; |
6348 | vi->is_full_var = true; |
6349 | vi->may_have_pointers = true; |
6350 | return vi; |
6351 | } |
6352 | |
6353 | /* Collect field information. */ |
6354 | if (use_field_sensitive |
6355 | && var_can_have_subvars (v: decl) |
6356 | /* ??? Force us to not use subfields for globals in IPA mode. |
6357 | Else we'd have to parse arbitrary initializers. */ |
6358 | && !(in_ipa_mode |
6359 | && is_global_var (t: decl))) |
6360 | { |
6361 | fieldoff_s *fo = NULL; |
6362 | bool notokay = false; |
6363 | unsigned int i; |
6364 | |
6365 | push_fields_onto_fieldstack (type: decl_type, fieldstack: &fieldstack, offset: 0); |
6366 | |
6367 | for (i = 0; !notokay && fieldstack.iterate (ix: i, ptr: &fo); i++) |
6368 | if (fo->has_unknown_size |
6369 | || fo->offset < 0) |
6370 | { |
6371 | notokay = true; |
6372 | break; |
6373 | } |
6374 | |
6375 | /* We can't sort them if we have a field with a variable sized type, |
6376 | which will make notokay = true. In that case, we are going to return |
6377 | without creating varinfos for the fields anyway, so sorting them is a |
6378 | waste to boot. */ |
6379 | if (!notokay) |
6380 | { |
6381 | sort_fieldstack (fieldstack); |
6382 | /* Due to some C++ FE issues, like PR 22488, we might end up |
6383 | what appear to be overlapping fields even though they, |
6384 | in reality, do not overlap. Until the C++ FE is fixed, |
6385 | we will simply disable field-sensitivity for these cases. */ |
6386 | notokay = check_for_overlaps (fieldstack); |
6387 | } |
6388 | |
6389 | if (notokay) |
6390 | fieldstack.release (); |
6391 | } |
6392 | |
6393 | /* If we didn't end up collecting sub-variables create a full |
6394 | variable for the decl. */ |
6395 | if (fieldstack.length () == 0 |
6396 | || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive) |
6397 | { |
6398 | vi = new_var_info (t: decl, name, add_id); |
6399 | vi->offset = 0; |
6400 | vi->may_have_pointers = true; |
6401 | vi->fullsize = tree_to_uhwi (declsize); |
6402 | vi->size = vi->fullsize; |
6403 | vi->is_full_var = true; |
6404 | if (POINTER_TYPE_P (decl_type) |
6405 | && (TYPE_RESTRICT (decl_type) || add_restrict)) |
6406 | vi->only_restrict_pointers = 1; |
6407 | if (vi->only_restrict_pointers |
6408 | && !type_contains_placeholder_p (TREE_TYPE (decl_type)) |
6409 | && handle_param |
6410 | && !bitmap_bit_p (handled_struct_type, |
6411 | TYPE_UID (TREE_TYPE (decl_type)))) |
6412 | { |
6413 | varinfo_t rvi; |
6414 | tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type)); |
6415 | DECL_EXTERNAL (heapvar) = 1; |
6416 | if (var_can_have_subvars (v: heapvar)) |
6417 | bitmap_set_bit (handled_struct_type, |
6418 | TYPE_UID (TREE_TYPE (decl_type))); |
6419 | rvi = create_variable_info_for_1 (decl: heapvar, name: "PARM_NOALIAS" , add_id: true, |
6420 | handle_param: true, handled_struct_type); |
6421 | if (var_can_have_subvars (v: heapvar)) |
6422 | bitmap_clear_bit (handled_struct_type, |
6423 | TYPE_UID (TREE_TYPE (decl_type))); |
6424 | rvi->is_restrict_var = 1; |
6425 | insert_vi_for_tree (t: heapvar, vi: rvi); |
6426 | make_constraint_from (vi, from: rvi->id); |
6427 | make_param_constraints (rvi); |
6428 | } |
6429 | fieldstack.release (); |
6430 | return vi; |
6431 | } |
6432 | |
6433 | vi = new_var_info (t: decl, name, add_id); |
6434 | vi->fullsize = tree_to_uhwi (declsize); |
6435 | if (fieldstack.length () == 1) |
6436 | vi->is_full_var = true; |
6437 | for (i = 0, newvi = vi; |
6438 | fieldstack.iterate (ix: i, ptr: &fo); |
6439 | ++i, newvi = vi_next (vi: newvi)) |
6440 | { |
6441 | const char *newname = NULL; |
6442 | char *tempname; |
6443 | |
6444 | if (dump_file) |
6445 | { |
6446 | if (fieldstack.length () != 1) |
6447 | { |
6448 | tempname |
6449 | = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC |
6450 | "+" HOST_WIDE_INT_PRINT_DEC, name, |
6451 | fo->offset, fo->size); |
6452 | newname = ggc_strdup (tempname); |
6453 | free (ptr: tempname); |
6454 | } |
6455 | } |
6456 | else |
6457 | newname = "NULL" ; |
6458 | |
6459 | if (newname) |
6460 | newvi->name = newname; |
6461 | newvi->offset = fo->offset; |
6462 | newvi->size = fo->size; |
6463 | newvi->fullsize = vi->fullsize; |
6464 | newvi->may_have_pointers = fo->may_have_pointers; |
6465 | newvi->only_restrict_pointers = fo->only_restrict_pointers; |
6466 | if (handle_param |
6467 | && newvi->only_restrict_pointers |
6468 | && !type_contains_placeholder_p (fo->restrict_pointed_type) |
6469 | && !bitmap_bit_p (handled_struct_type, |
6470 | TYPE_UID (fo->restrict_pointed_type))) |
6471 | { |
6472 | varinfo_t rvi; |
6473 | tree heapvar = build_fake_var_decl (type: fo->restrict_pointed_type); |
6474 | DECL_EXTERNAL (heapvar) = 1; |
6475 | if (var_can_have_subvars (v: heapvar)) |
6476 | bitmap_set_bit (handled_struct_type, |
6477 | TYPE_UID (fo->restrict_pointed_type)); |
6478 | rvi = create_variable_info_for_1 (decl: heapvar, name: "PARM_NOALIAS" , add_id: true, |
6479 | handle_param: true, handled_struct_type); |
6480 | if (var_can_have_subvars (v: heapvar)) |
6481 | bitmap_clear_bit (handled_struct_type, |
6482 | TYPE_UID (fo->restrict_pointed_type)); |
6483 | rvi->is_restrict_var = 1; |
6484 | insert_vi_for_tree (t: heapvar, vi: rvi); |
6485 | make_constraint_from (vi: newvi, from: rvi->id); |
6486 | make_param_constraints (rvi); |
6487 | } |
6488 | if (i + 1 < fieldstack.length ()) |
6489 | { |
6490 | varinfo_t tem = new_var_info (t: decl, name, add_id: false); |
6491 | newvi->next = tem->id; |
6492 | tem->head = vi->id; |
6493 | } |
6494 | } |
6495 | |
6496 | return vi; |
6497 | } |
6498 | |
6499 | static unsigned int |
6500 | create_variable_info_for (tree decl, const char *name, bool add_id) |
6501 | { |
6502 | /* First see if we are dealing with an ifunc resolver call and |
6503 | assiociate that with a call to the resolver function result. */ |
6504 | cgraph_node *node; |
6505 | if (in_ipa_mode |
6506 | && TREE_CODE (decl) == FUNCTION_DECL |
6507 | && (node = cgraph_node::get (decl)) |
6508 | && node->ifunc_resolver) |
6509 | { |
6510 | varinfo_t fi = get_vi_for_tree (t: node->get_alias_target ()->decl); |
6511 | constraint_expr rhs |
6512 | = get_function_part_constraint (fi, part: fi_result); |
6513 | fi = new_var_info (NULL_TREE, name: "ifuncres" , add_id: true); |
6514 | fi->is_reg_var = true; |
6515 | constraint_expr lhs; |
6516 | lhs.type = SCALAR; |
6517 | lhs.var = fi->id; |
6518 | lhs.offset = 0; |
6519 | process_constraint (t: new_constraint (lhs, rhs)); |
6520 | insert_vi_for_tree (t: decl, vi: fi); |
6521 | return fi->id; |
6522 | } |
6523 | |
6524 | varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, handle_param: false, NULL); |
6525 | unsigned int id = vi->id; |
6526 | |
6527 | insert_vi_for_tree (t: decl, vi); |
6528 | |
6529 | if (!VAR_P (decl)) |
6530 | return id; |
6531 | |
6532 | /* Create initial constraints for globals. */ |
6533 | for (; vi; vi = vi_next (vi)) |
6534 | { |
6535 | if (!vi->may_have_pointers |
6536 | || !vi->is_global_var) |
6537 | continue; |
6538 | |
6539 | /* Mark global restrict qualified pointers. */ |
6540 | if ((POINTER_TYPE_P (TREE_TYPE (decl)) |
6541 | && TYPE_RESTRICT (TREE_TYPE (decl))) |
6542 | || vi->only_restrict_pointers) |
6543 | { |
6544 | varinfo_t rvi |
6545 | = make_constraint_from_global_restrict (lhs: vi, name: "GLOBAL_RESTRICT" , |
6546 | add_id: true); |
6547 | /* ??? For now exclude reads from globals as restrict sources |
6548 | if those are not (indirectly) from incoming parameters. */ |
6549 | rvi->is_restrict_var = false; |
6550 | continue; |
6551 | } |
6552 | |
6553 | /* In non-IPA mode the initializer from nonlocal is all we need. */ |
6554 | if (!in_ipa_mode |
6555 | || DECL_HARD_REGISTER (decl)) |
6556 | make_copy_constraint (vi, from: nonlocal_id); |
6557 | |
6558 | /* In IPA mode parse the initializer and generate proper constraints |
6559 | for it. */ |
6560 | else |
6561 | { |
6562 | varpool_node *vnode = varpool_node::get (decl); |
6563 | |
6564 | /* For escaped variables initialize them from nonlocal. */ |
6565 | if (!vnode || !vnode->all_refs_explicit_p ()) |
6566 | make_copy_constraint (vi, from: nonlocal_id); |
6567 | |
6568 | /* While we can in theory walk references for the varpool |
6569 | node that does not cover zero-initialization or references |
6570 | to the constant pool. */ |
6571 | if (DECL_INITIAL (decl)) |
6572 | { |
6573 | auto_vec<ce_s> rhsc; |
6574 | struct constraint_expr lhs, *rhsp; |
6575 | unsigned i; |
6576 | lhs.var = vi->id; |
6577 | lhs.offset = 0; |
6578 | lhs.type = SCALAR; |
6579 | get_constraint_for (DECL_INITIAL (decl), results: &rhsc); |
6580 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
6581 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
6582 | /* If this is a variable that escapes from the unit |
6583 | the initializer escapes as well. */ |
6584 | if (!vnode || !vnode->all_refs_explicit_p ()) |
6585 | { |
6586 | lhs.var = escaped_id; |
6587 | lhs.offset = 0; |
6588 | lhs.type = SCALAR; |
6589 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
6590 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
6591 | } |
6592 | } |
6593 | } |
6594 | } |
6595 | |
6596 | return id; |
6597 | } |
6598 | |
6599 | /* Print out the points-to solution for VAR to FILE. */ |
6600 | |
6601 | static void |
6602 | dump_solution_for_var (FILE *file, unsigned int var) |
6603 | { |
6604 | varinfo_t vi = get_varinfo (n: var); |
6605 | unsigned int i; |
6606 | bitmap_iterator bi; |
6607 | |
6608 | /* Dump the solution for unified vars anyway, this avoids difficulties |
6609 | in scanning dumps in the testsuite. */ |
6610 | fprintf (stream: file, format: "%s = { " , vi->name); |
6611 | vi = get_varinfo (n: find (node: var)); |
6612 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
6613 | fprintf (stream: file, format: "%s " , get_varinfo (n: i)->name); |
6614 | fprintf (stream: file, format: "}" ); |
6615 | |
6616 | /* But note when the variable was unified. */ |
6617 | if (vi->id != var) |
6618 | fprintf (stream: file, format: " same as %s" , vi->name); |
6619 | |
6620 | fprintf (stream: file, format: "\n" ); |
6621 | } |
6622 | |
6623 | /* Print the points-to solution for VAR to stderr. */ |
6624 | |
6625 | DEBUG_FUNCTION void |
6626 | debug_solution_for_var (unsigned int var) |
6627 | { |
6628 | dump_solution_for_var (stderr, var); |
6629 | } |
6630 | |
6631 | /* Register the constraints for function parameter related VI. */ |
6632 | |
6633 | static void |
6634 | make_param_constraints (varinfo_t vi) |
6635 | { |
6636 | for (; vi; vi = vi_next (vi)) |
6637 | { |
6638 | if (vi->only_restrict_pointers) |
6639 | ; |
6640 | else if (vi->may_have_pointers) |
6641 | make_constraint_from (vi, from: nonlocal_id); |
6642 | |
6643 | if (vi->is_full_var) |
6644 | break; |
6645 | } |
6646 | } |
6647 | |
6648 | /* Create varinfo structures for all of the variables in the |
6649 | function for intraprocedural mode. */ |
6650 | |
6651 | static void |
6652 | intra_create_variable_infos (struct function *fn) |
6653 | { |
6654 | tree t; |
6655 | bitmap handled_struct_type = NULL; |
6656 | bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl); |
6657 | |
6658 | /* For each incoming pointer argument arg, create the constraint ARG |
6659 | = NONLOCAL or a dummy variable if it is a restrict qualified |
6660 | passed-by-reference argument. */ |
6661 | for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t)) |
6662 | { |
6663 | if (handled_struct_type == NULL) |
6664 | handled_struct_type = BITMAP_ALLOC (NULL); |
6665 | |
6666 | varinfo_t p |
6667 | = create_variable_info_for_1 (decl: t, name: alias_get_name (decl: t), add_id: false, handle_param: true, |
6668 | handled_struct_type, add_restrict: this_parm_in_ctor); |
6669 | insert_vi_for_tree (t, vi: p); |
6670 | |
6671 | make_param_constraints (vi: p); |
6672 | |
6673 | this_parm_in_ctor = false; |
6674 | } |
6675 | |
6676 | if (handled_struct_type != NULL) |
6677 | BITMAP_FREE (handled_struct_type); |
6678 | |
6679 | /* Add a constraint for a result decl that is passed by reference. */ |
6680 | if (DECL_RESULT (fn->decl) |
6681 | && DECL_BY_REFERENCE (DECL_RESULT (fn->decl))) |
6682 | { |
6683 | varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl)); |
6684 | |
6685 | for (p = result_vi; p; p = vi_next (vi: p)) |
6686 | make_constraint_from (vi: p, from: nonlocal_id); |
6687 | } |
6688 | |
6689 | /* Add a constraint for the incoming static chain parameter. */ |
6690 | if (fn->static_chain_decl != NULL_TREE) |
6691 | { |
6692 | varinfo_t p, chain_vi = get_vi_for_tree (t: fn->static_chain_decl); |
6693 | |
6694 | for (p = chain_vi; p; p = vi_next (vi: p)) |
6695 | make_constraint_from (vi: p, from: nonlocal_id); |
6696 | } |
6697 | } |
6698 | |
6699 | /* Structure used to put solution bitmaps in a hashtable so they can |
6700 | be shared among variables with the same points-to set. */ |
6701 | |
6702 | typedef struct shared_bitmap_info |
6703 | { |
6704 | bitmap pt_vars; |
6705 | hashval_t hashcode; |
6706 | } *shared_bitmap_info_t; |
6707 | typedef const struct shared_bitmap_info *const_shared_bitmap_info_t; |
6708 | |
6709 | /* Shared_bitmap hashtable helpers. */ |
6710 | |
6711 | struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info> |
6712 | { |
6713 | static inline hashval_t hash (const shared_bitmap_info *); |
6714 | static inline bool equal (const shared_bitmap_info *, |
6715 | const shared_bitmap_info *); |
6716 | }; |
6717 | |
6718 | /* Hash function for a shared_bitmap_info_t */ |
6719 | |
6720 | inline hashval_t |
6721 | shared_bitmap_hasher::hash (const shared_bitmap_info *bi) |
6722 | { |
6723 | return bi->hashcode; |
6724 | } |
6725 | |
6726 | /* Equality function for two shared_bitmap_info_t's. */ |
6727 | |
6728 | inline bool |
6729 | shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1, |
6730 | const shared_bitmap_info *sbi2) |
6731 | { |
6732 | return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars); |
6733 | } |
6734 | |
6735 | /* Shared_bitmap hashtable. */ |
6736 | |
6737 | static hash_table<shared_bitmap_hasher> *shared_bitmap_table; |
6738 | |
6739 | /* Lookup a bitmap in the shared bitmap hashtable, and return an already |
6740 | existing instance if there is one, NULL otherwise. */ |
6741 | |
6742 | static bitmap |
6743 | shared_bitmap_lookup (bitmap pt_vars) |
6744 | { |
6745 | shared_bitmap_info **slot; |
6746 | struct shared_bitmap_info sbi; |
6747 | |
6748 | sbi.pt_vars = pt_vars; |
6749 | sbi.hashcode = bitmap_hash (pt_vars); |
6750 | |
6751 | slot = shared_bitmap_table->find_slot (value: &sbi, insert: NO_INSERT); |
6752 | if (!slot) |
6753 | return NULL; |
6754 | else |
6755 | return (*slot)->pt_vars; |
6756 | } |
6757 | |
6758 | |
6759 | /* Add a bitmap to the shared bitmap hashtable. */ |
6760 | |
6761 | static void |
6762 | shared_bitmap_add (bitmap pt_vars) |
6763 | { |
6764 | shared_bitmap_info **slot; |
6765 | shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info); |
6766 | |
6767 | sbi->pt_vars = pt_vars; |
6768 | sbi->hashcode = bitmap_hash (pt_vars); |
6769 | |
6770 | slot = shared_bitmap_table->find_slot (value: sbi, insert: INSERT); |
6771 | gcc_assert (!*slot); |
6772 | *slot = sbi; |
6773 | } |
6774 | |
6775 | |
6776 | /* Set bits in INTO corresponding to the variable uids in solution set FROM. */ |
6777 | |
6778 | static void |
6779 | set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt, |
6780 | tree fndecl) |
6781 | { |
6782 | unsigned int i; |
6783 | bitmap_iterator bi; |
6784 | varinfo_t escaped_vi = get_varinfo (n: find (node: escaped_id)); |
6785 | varinfo_t escaped_return_vi = get_varinfo (n: find (node: escaped_return_id)); |
6786 | bool everything_escaped |
6787 | = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id); |
6788 | |
6789 | EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi) |
6790 | { |
6791 | varinfo_t vi = get_varinfo (n: i); |
6792 | |
6793 | if (vi->is_artificial_var) |
6794 | continue; |
6795 | |
6796 | if (everything_escaped |
6797 | || (escaped_vi->solution |
6798 | && bitmap_bit_p (escaped_vi->solution, i))) |
6799 | { |
6800 | pt->vars_contains_escaped = true; |
6801 | pt->vars_contains_escaped_heap |= vi->is_heap_var; |
6802 | } |
6803 | if (escaped_return_vi->solution |
6804 | && bitmap_bit_p (escaped_return_vi->solution, i)) |
6805 | pt->vars_contains_escaped_heap |= vi->is_heap_var; |
6806 | |
6807 | if (vi->is_restrict_var) |
6808 | pt->vars_contains_restrict = true; |
6809 | |
6810 | if (VAR_P (vi->decl) |
6811 | || TREE_CODE (vi->decl) == PARM_DECL |
6812 | || TREE_CODE (vi->decl) == RESULT_DECL) |
6813 | { |
6814 | /* If we are in IPA mode we will not recompute points-to |
6815 | sets after inlining so make sure they stay valid. */ |
6816 | if (in_ipa_mode |
6817 | && !DECL_PT_UID_SET_P (vi->decl)) |
6818 | SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl)); |
6819 | |
6820 | /* Add the decl to the points-to set. Note that the points-to |
6821 | set contains global variables. */ |
6822 | bitmap_set_bit (into, DECL_PT_UID (vi->decl)); |
6823 | if (vi->is_global_var |
6824 | /* In IPA mode the escaped_heap trick doesn't work as |
6825 | ESCAPED is escaped from the unit but |
6826 | pt_solution_includes_global needs to answer true for |
6827 | all variables not automatic within a function. |
6828 | For the same reason is_global_var is not the |
6829 | correct flag to track - local variables from other |
6830 | functions also need to be considered global. |
6831 | Conveniently all HEAP vars are not put in function |
6832 | scope. */ |
6833 | || (in_ipa_mode |
6834 | && fndecl |
6835 | && ! auto_var_in_fn_p (vi->decl, fndecl))) |
6836 | pt->vars_contains_nonlocal = true; |
6837 | |
6838 | /* If we have a variable that is interposable record that fact |
6839 | for pointer comparison simplification. */ |
6840 | if (VAR_P (vi->decl) |
6841 | && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl)) |
6842 | && ! decl_binds_to_current_def_p (vi->decl)) |
6843 | pt->vars_contains_interposable = true; |
6844 | |
6845 | /* If this is a local variable we can have overlapping lifetime |
6846 | of different function invocations through recursion duplicate |
6847 | it with its shadow variable. */ |
6848 | if (in_ipa_mode |
6849 | && vi->shadow_var_uid != 0) |
6850 | { |
6851 | bitmap_set_bit (into, vi->shadow_var_uid); |
6852 | pt->vars_contains_nonlocal = true; |
6853 | } |
6854 | } |
6855 | |
6856 | else if (TREE_CODE (vi->decl) == FUNCTION_DECL |
6857 | || TREE_CODE (vi->decl) == LABEL_DECL) |
6858 | { |
6859 | /* Nothing should read/write from/to code so we can |
6860 | save bits by not including them in the points-to bitmaps. |
6861 | Still mark the points-to set as containing global memory |
6862 | to make code-patching possible - see PR70128. */ |
6863 | pt->vars_contains_nonlocal = true; |
6864 | } |
6865 | } |
6866 | } |
6867 | |
6868 | |
6869 | /* Compute the points-to solution *PT for the variable VI. */ |
6870 | |
6871 | static struct pt_solution |
6872 | find_what_var_points_to (tree fndecl, varinfo_t orig_vi) |
6873 | { |
6874 | unsigned int i; |
6875 | bitmap_iterator bi; |
6876 | bitmap finished_solution; |
6877 | bitmap result; |
6878 | varinfo_t vi; |
6879 | struct pt_solution *pt; |
6880 | |
6881 | /* This variable may have been collapsed, let's get the real |
6882 | variable. */ |
6883 | vi = get_varinfo (n: find (node: orig_vi->id)); |
6884 | |
6885 | /* See if we have already computed the solution and return it. */ |
6886 | pt_solution **slot = &final_solutions->get_or_insert (k: vi); |
6887 | if (*slot != NULL) |
6888 | return **slot; |
6889 | |
6890 | *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution); |
6891 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
6892 | |
6893 | /* Translate artificial variables into SSA_NAME_PTR_INFO |
6894 | attributes. */ |
6895 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
6896 | { |
6897 | varinfo_t vi = get_varinfo (n: i); |
6898 | |
6899 | if (vi->is_artificial_var) |
6900 | { |
6901 | if (vi->id == nothing_id) |
6902 | pt->null = 1; |
6903 | else if (vi->id == escaped_id) |
6904 | { |
6905 | if (in_ipa_mode) |
6906 | pt->ipa_escaped = 1; |
6907 | else |
6908 | pt->escaped = 1; |
6909 | /* Expand some special vars of ESCAPED in-place here. */ |
6910 | varinfo_t evi = get_varinfo (n: find (node: escaped_id)); |
6911 | if (bitmap_bit_p (evi->solution, nonlocal_id)) |
6912 | pt->nonlocal = 1; |
6913 | } |
6914 | else if (vi->id == nonlocal_id) |
6915 | pt->nonlocal = 1; |
6916 | else if (vi->id == string_id) |
6917 | pt->const_pool = 1; |
6918 | else if (vi->id == anything_id |
6919 | || vi->id == integer_id) |
6920 | pt->anything = 1; |
6921 | } |
6922 | } |
6923 | |
6924 | /* Instead of doing extra work, simply do not create |
6925 | elaborate points-to information for pt_anything pointers. */ |
6926 | if (pt->anything) |
6927 | return *pt; |
6928 | |
6929 | /* Share the final set of variables when possible. */ |
6930 | finished_solution = BITMAP_GGC_ALLOC (); |
6931 | stats.points_to_sets_created++; |
6932 | |
6933 | set_uids_in_ptset (into: finished_solution, from: vi->solution, pt, fndecl); |
6934 | result = shared_bitmap_lookup (pt_vars: finished_solution); |
6935 | if (!result) |
6936 | { |
6937 | shared_bitmap_add (pt_vars: finished_solution); |
6938 | pt->vars = finished_solution; |
6939 | } |
6940 | else |
6941 | { |
6942 | pt->vars = result; |
6943 | bitmap_clear (finished_solution); |
6944 | } |
6945 | |
6946 | return *pt; |
6947 | } |
6948 | |
6949 | /* Given a pointer variable P, fill in its points-to set. */ |
6950 | |
6951 | static void |
6952 | find_what_p_points_to (tree fndecl, tree p) |
6953 | { |
6954 | struct ptr_info_def *pi; |
6955 | tree lookup_p = p; |
6956 | varinfo_t vi; |
6957 | prange vr; |
6958 | get_range_query (DECL_STRUCT_FUNCTION (fndecl))->range_of_expr (r&: vr, expr: p); |
6959 | bool nonnull = vr.nonzero_p (); |
6960 | |
6961 | /* For parameters, get at the points-to set for the actual parm |
6962 | decl. */ |
6963 | if (TREE_CODE (p) == SSA_NAME |
6964 | && SSA_NAME_IS_DEFAULT_DEF (p) |
6965 | && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL |
6966 | || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)) |
6967 | lookup_p = SSA_NAME_VAR (p); |
6968 | |
6969 | vi = lookup_vi_for_tree (t: lookup_p); |
6970 | if (!vi) |
6971 | return; |
6972 | |
6973 | pi = get_ptr_info (p); |
6974 | pi->pt = find_what_var_points_to (fndecl, orig_vi: vi); |
6975 | /* Conservatively set to NULL from PTA (to true). */ |
6976 | pi->pt.null = 1; |
6977 | /* Preserve pointer nonnull globally computed. */ |
6978 | if (nonnull) |
6979 | set_ptr_nonnull (p); |
6980 | } |
6981 | |
6982 | |
6983 | /* Query statistics for points-to solutions. */ |
6984 | |
6985 | static struct { |
6986 | unsigned HOST_WIDE_INT pt_solution_includes_may_alias; |
6987 | unsigned HOST_WIDE_INT pt_solution_includes_no_alias; |
6988 | unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias; |
6989 | unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias; |
6990 | } pta_stats; |
6991 | |
6992 | void |
6993 | dump_pta_stats (FILE *s) |
6994 | { |
6995 | fprintf (stream: s, format: "\nPTA query stats:\n" ); |
6996 | fprintf (stream: s, format: " pt_solution_includes: " |
6997 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
6998 | HOST_WIDE_INT_PRINT_DEC" queries\n" , |
6999 | pta_stats.pt_solution_includes_no_alias, |
7000 | pta_stats.pt_solution_includes_no_alias |
7001 | + pta_stats.pt_solution_includes_may_alias); |
7002 | fprintf (stream: s, format: " pt_solutions_intersect: " |
7003 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
7004 | HOST_WIDE_INT_PRINT_DEC" queries\n" , |
7005 | pta_stats.pt_solutions_intersect_no_alias, |
7006 | pta_stats.pt_solutions_intersect_no_alias |
7007 | + pta_stats.pt_solutions_intersect_may_alias); |
7008 | } |
7009 | |
7010 | |
7011 | /* Reset the points-to solution *PT to a conservative default |
7012 | (point to anything). */ |
7013 | |
7014 | void |
7015 | pt_solution_reset (struct pt_solution *pt) |
7016 | { |
7017 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7018 | pt->anything = true; |
7019 | pt->null = true; |
7020 | } |
7021 | |
7022 | /* Set the points-to solution *PT to point only to the variables |
7023 | in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains |
7024 | global variables and VARS_CONTAINS_RESTRICT specifies whether |
7025 | it contains restrict tag variables. */ |
7026 | |
7027 | void |
7028 | pt_solution_set (struct pt_solution *pt, bitmap vars, |
7029 | bool vars_contains_nonlocal) |
7030 | { |
7031 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7032 | pt->vars = vars; |
7033 | pt->vars_contains_nonlocal = vars_contains_nonlocal; |
7034 | pt->vars_contains_escaped |
7035 | = (cfun->gimple_df->escaped.anything |
7036 | || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars)); |
7037 | } |
7038 | |
7039 | /* Set the points-to solution *PT to point only to the variable VAR. */ |
7040 | |
7041 | void |
7042 | pt_solution_set_var (struct pt_solution *pt, tree var) |
7043 | { |
7044 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7045 | pt->vars = BITMAP_GGC_ALLOC (); |
7046 | bitmap_set_bit (pt->vars, DECL_PT_UID (var)); |
7047 | pt->vars_contains_nonlocal = is_global_var (t: var); |
7048 | pt->vars_contains_escaped |
7049 | = (cfun->gimple_df->escaped.anything |
7050 | || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var))); |
7051 | } |
7052 | |
7053 | /* Computes the union of the points-to solutions *DEST and *SRC and |
7054 | stores the result in *DEST. This changes the points-to bitmap |
7055 | of *DEST and thus may not be used if that might be shared. |
7056 | The points-to bitmap of *SRC and *DEST will not be shared after |
7057 | this function if they were not before. */ |
7058 | |
7059 | static void |
7060 | pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src) |
7061 | { |
7062 | dest->anything |= src->anything; |
7063 | if (dest->anything) |
7064 | { |
7065 | pt_solution_reset (pt: dest); |
7066 | return; |
7067 | } |
7068 | |
7069 | dest->nonlocal |= src->nonlocal; |
7070 | dest->escaped |= src->escaped; |
7071 | dest->ipa_escaped |= src->ipa_escaped; |
7072 | dest->null |= src->null; |
7073 | dest->const_pool |= src->const_pool ; |
7074 | dest->vars_contains_nonlocal |= src->vars_contains_nonlocal; |
7075 | dest->vars_contains_escaped |= src->vars_contains_escaped; |
7076 | dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap; |
7077 | if (!src->vars) |
7078 | return; |
7079 | |
7080 | if (!dest->vars) |
7081 | dest->vars = BITMAP_GGC_ALLOC (); |
7082 | bitmap_ior_into (dest->vars, src->vars); |
7083 | } |
7084 | |
7085 | /* Return true if the points-to solution *PT is empty. */ |
7086 | |
7087 | bool |
7088 | pt_solution_empty_p (const pt_solution *pt) |
7089 | { |
7090 | if (pt->anything |
7091 | || pt->nonlocal) |
7092 | return false; |
7093 | |
7094 | if (pt->vars |
7095 | && !bitmap_empty_p (map: pt->vars)) |
7096 | return false; |
7097 | |
7098 | /* If the solution includes ESCAPED, check if that is empty. */ |
7099 | if (pt->escaped |
7100 | && !pt_solution_empty_p (pt: &cfun->gimple_df->escaped)) |
7101 | return false; |
7102 | |
7103 | /* If the solution includes ESCAPED, check if that is empty. */ |
7104 | if (pt->ipa_escaped |
7105 | && !pt_solution_empty_p (pt: &ipa_escaped_pt)) |
7106 | return false; |
7107 | |
7108 | return true; |
7109 | } |
7110 | |
7111 | /* Return true if the points-to solution *PT only point to a single var, and |
7112 | return the var uid in *UID. */ |
7113 | |
7114 | bool |
7115 | pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid) |
7116 | { |
7117 | if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped |
7118 | || pt->vars == NULL |
7119 | || !bitmap_single_bit_set_p (pt->vars)) |
7120 | return false; |
7121 | |
7122 | *uid = bitmap_first_set_bit (pt->vars); |
7123 | return true; |
7124 | } |
7125 | |
7126 | /* Return true if the points-to solution *PT includes global memory. |
7127 | If ESCAPED_LOCAL_P is true then escaped local variables are also |
7128 | considered global. */ |
7129 | |
7130 | bool |
7131 | pt_solution_includes_global (struct pt_solution *pt, bool escaped_local_p) |
7132 | { |
7133 | if (pt->anything |
7134 | || pt->nonlocal |
7135 | || pt->vars_contains_nonlocal |
7136 | /* The following is a hack to make the malloc escape hack work. |
7137 | In reality we'd need different sets for escaped-through-return |
7138 | and escaped-to-callees and passes would need to be updated. */ |
7139 | || pt->vars_contains_escaped_heap) |
7140 | return true; |
7141 | |
7142 | if (escaped_local_p && pt->vars_contains_escaped) |
7143 | return true; |
7144 | |
7145 | /* 'escaped' is also a placeholder so we have to look into it. */ |
7146 | if (pt->escaped) |
7147 | return pt_solution_includes_global (pt: &cfun->gimple_df->escaped, |
7148 | escaped_local_p); |
7149 | |
7150 | if (pt->ipa_escaped) |
7151 | return pt_solution_includes_global (pt: &ipa_escaped_pt, |
7152 | escaped_local_p); |
7153 | |
7154 | return false; |
7155 | } |
7156 | |
7157 | /* Return true if the points-to solution *PT includes the variable |
7158 | declaration DECL. */ |
7159 | |
7160 | static bool |
7161 | pt_solution_includes_1 (struct pt_solution *pt, const_tree decl) |
7162 | { |
7163 | if (pt->anything) |
7164 | return true; |
7165 | |
7166 | if (pt->nonlocal |
7167 | && is_global_var (t: decl)) |
7168 | return true; |
7169 | |
7170 | if (pt->vars |
7171 | && bitmap_bit_p (pt->vars, DECL_PT_UID (decl))) |
7172 | return true; |
7173 | |
7174 | /* If the solution includes ESCAPED, check it. */ |
7175 | if (pt->escaped |
7176 | && pt_solution_includes_1 (pt: &cfun->gimple_df->escaped, decl)) |
7177 | return true; |
7178 | |
7179 | /* If the solution includes ESCAPED, check it. */ |
7180 | if (pt->ipa_escaped |
7181 | && pt_solution_includes_1 (pt: &ipa_escaped_pt, decl)) |
7182 | return true; |
7183 | |
7184 | return false; |
7185 | } |
7186 | |
7187 | bool |
7188 | pt_solution_includes (struct pt_solution *pt, const_tree decl) |
7189 | { |
7190 | bool res = pt_solution_includes_1 (pt, decl); |
7191 | if (res) |
7192 | ++pta_stats.pt_solution_includes_may_alias; |
7193 | else |
7194 | ++pta_stats.pt_solution_includes_no_alias; |
7195 | return res; |
7196 | } |
7197 | |
7198 | /* Return true if the points-to solution *PT contains a reference to a |
7199 | constant pool entry. */ |
7200 | |
7201 | bool |
7202 | pt_solution_includes_const_pool (struct pt_solution *pt) |
7203 | { |
7204 | return (pt->const_pool |
7205 | || pt->nonlocal |
7206 | || (pt->escaped && (!cfun || cfun->gimple_df->escaped.const_pool)) |
7207 | || (pt->ipa_escaped && ipa_escaped_pt.const_pool)); |
7208 | } |
7209 | |
7210 | /* Return true if both points-to solutions PT1 and PT2 have a non-empty |
7211 | intersection. */ |
7212 | |
7213 | static bool |
7214 | pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2) |
7215 | { |
7216 | if (pt1->anything || pt2->anything) |
7217 | return true; |
7218 | |
7219 | /* If either points to unknown global memory and the other points to |
7220 | any global memory they alias. */ |
7221 | if ((pt1->nonlocal |
7222 | && (pt2->nonlocal |
7223 | || pt2->vars_contains_nonlocal)) |
7224 | || (pt2->nonlocal |
7225 | && pt1->vars_contains_nonlocal)) |
7226 | return true; |
7227 | |
7228 | /* If either points to all escaped memory and the other points to |
7229 | any escaped memory they alias. */ |
7230 | if ((pt1->escaped |
7231 | && (pt2->escaped |
7232 | || pt2->vars_contains_escaped)) |
7233 | || (pt2->escaped |
7234 | && pt1->vars_contains_escaped)) |
7235 | return true; |
7236 | |
7237 | /* Check the escaped solution if required. |
7238 | ??? Do we need to check the local against the IPA escaped sets? */ |
7239 | if ((pt1->ipa_escaped || pt2->ipa_escaped) |
7240 | && !pt_solution_empty_p (pt: &ipa_escaped_pt)) |
7241 | { |
7242 | /* If both point to escaped memory and that solution |
7243 | is not empty they alias. */ |
7244 | if (pt1->ipa_escaped && pt2->ipa_escaped) |
7245 | return true; |
7246 | |
7247 | /* If either points to escaped memory see if the escaped solution |
7248 | intersects with the other. */ |
7249 | if ((pt1->ipa_escaped |
7250 | && pt_solutions_intersect_1 (pt1: &ipa_escaped_pt, pt2)) |
7251 | || (pt2->ipa_escaped |
7252 | && pt_solutions_intersect_1 (pt1: &ipa_escaped_pt, pt2: pt1))) |
7253 | return true; |
7254 | } |
7255 | |
7256 | /* Now both pointers alias if their points-to solution intersects. */ |
7257 | return (pt1->vars |
7258 | && pt2->vars |
7259 | && bitmap_intersect_p (pt1->vars, pt2->vars)); |
7260 | } |
7261 | |
7262 | bool |
7263 | pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2) |
7264 | { |
7265 | bool res = pt_solutions_intersect_1 (pt1, pt2); |
7266 | if (res) |
7267 | ++pta_stats.pt_solutions_intersect_may_alias; |
7268 | else |
7269 | ++pta_stats.pt_solutions_intersect_no_alias; |
7270 | return res; |
7271 | } |
7272 | |
7273 | /* Dump stats information to OUTFILE. */ |
7274 | |
7275 | static void |
7276 | dump_sa_stats (FILE *outfile) |
7277 | { |
7278 | fprintf (stream: outfile, format: "Points-to Stats:\n" ); |
7279 | fprintf (stream: outfile, format: "Total vars: %d\n" , stats.total_vars); |
7280 | fprintf (stream: outfile, format: "Non-pointer vars: %d\n" , |
7281 | stats.nonpointer_vars); |
7282 | fprintf (stream: outfile, format: "Statically unified vars: %d\n" , |
7283 | stats.unified_vars_static); |
7284 | fprintf (stream: outfile, format: "Dynamically unified vars: %d\n" , |
7285 | stats.unified_vars_dynamic); |
7286 | fprintf (stream: outfile, format: "Iterations: %d\n" , stats.iterations); |
7287 | fprintf (stream: outfile, format: "Number of edges: %d\n" , stats.num_edges); |
7288 | fprintf (stream: outfile, format: "Number of implicit edges: %d\n" , |
7289 | stats.num_implicit_edges); |
7290 | fprintf (stream: outfile, format: "Number of avoided edges: %d\n" , |
7291 | stats.num_avoided_edges); |
7292 | } |
7293 | |
7294 | /* Dump points-to information to OUTFILE. */ |
7295 | |
7296 | static void |
7297 | dump_sa_points_to_info (FILE *outfile) |
7298 | { |
7299 | fprintf (stream: outfile, format: "\nPoints-to sets\n\n" ); |
7300 | |
7301 | for (unsigned i = 1; i < varmap.length (); i++) |
7302 | { |
7303 | varinfo_t vi = get_varinfo (n: i); |
7304 | if (!vi->may_have_pointers) |
7305 | continue; |
7306 | dump_solution_for_var (file: outfile, var: i); |
7307 | } |
7308 | } |
7309 | |
7310 | |
7311 | /* Debug points-to information to stderr. */ |
7312 | |
7313 | DEBUG_FUNCTION void |
7314 | debug_sa_points_to_info (void) |
7315 | { |
7316 | dump_sa_points_to_info (stderr); |
7317 | } |
7318 | |
7319 | |
7320 | /* Initialize the always-existing constraint variables for NULL |
7321 | ANYTHING, READONLY, and INTEGER */ |
7322 | |
7323 | static void |
7324 | init_base_vars (void) |
7325 | { |
7326 | struct constraint_expr lhs, rhs; |
7327 | varinfo_t var_anything; |
7328 | varinfo_t var_nothing; |
7329 | varinfo_t var_string; |
7330 | varinfo_t var_escaped; |
7331 | varinfo_t var_nonlocal; |
7332 | varinfo_t var_escaped_return; |
7333 | varinfo_t var_storedanything; |
7334 | varinfo_t var_integer; |
7335 | |
7336 | /* Variable ID zero is reserved and should be NULL. */ |
7337 | varmap.safe_push (NULL); |
7338 | |
7339 | /* Create the NULL variable, used to represent that a variable points |
7340 | to NULL. */ |
7341 | var_nothing = new_var_info (NULL_TREE, name: "NULL" , add_id: false); |
7342 | gcc_assert (var_nothing->id == nothing_id); |
7343 | var_nothing->is_artificial_var = 1; |
7344 | var_nothing->offset = 0; |
7345 | var_nothing->size = ~0; |
7346 | var_nothing->fullsize = ~0; |
7347 | var_nothing->is_special_var = 1; |
7348 | var_nothing->may_have_pointers = 0; |
7349 | var_nothing->is_global_var = 0; |
7350 | |
7351 | /* Create the ANYTHING variable, used to represent that a variable |
7352 | points to some unknown piece of memory. */ |
7353 | var_anything = new_var_info (NULL_TREE, name: "ANYTHING" , add_id: false); |
7354 | gcc_assert (var_anything->id == anything_id); |
7355 | var_anything->is_artificial_var = 1; |
7356 | var_anything->size = ~0; |
7357 | var_anything->offset = 0; |
7358 | var_anything->fullsize = ~0; |
7359 | var_anything->is_special_var = 1; |
7360 | |
7361 | /* Anything points to anything. This makes deref constraints just |
7362 | work in the presence of linked list and other p = *p type loops, |
7363 | by saying that *ANYTHING = ANYTHING. */ |
7364 | lhs.type = SCALAR; |
7365 | lhs.var = anything_id; |
7366 | lhs.offset = 0; |
7367 | rhs.type = ADDRESSOF; |
7368 | rhs.var = anything_id; |
7369 | rhs.offset = 0; |
7370 | |
7371 | /* This specifically does not use process_constraint because |
7372 | process_constraint ignores all anything = anything constraints, since all |
7373 | but this one are redundant. */ |
7374 | constraints.safe_push (obj: new_constraint (lhs, rhs)); |
7375 | |
7376 | /* Create the STRING variable, used to represent that a variable |
7377 | points to a string literal. String literals don't contain |
7378 | pointers so STRING doesn't point to anything. */ |
7379 | var_string = new_var_info (NULL_TREE, name: "STRING" , add_id: false); |
7380 | gcc_assert (var_string->id == string_id); |
7381 | var_string->is_artificial_var = 1; |
7382 | var_string->offset = 0; |
7383 | var_string->size = ~0; |
7384 | var_string->fullsize = ~0; |
7385 | var_string->is_special_var = 1; |
7386 | var_string->may_have_pointers = 0; |
7387 | |
7388 | /* Create the ESCAPED variable, used to represent the set of escaped |
7389 | memory. */ |
7390 | var_escaped = new_var_info (NULL_TREE, name: "ESCAPED" , add_id: false); |
7391 | gcc_assert (var_escaped->id == escaped_id); |
7392 | var_escaped->is_artificial_var = 1; |
7393 | var_escaped->offset = 0; |
7394 | var_escaped->size = ~0; |
7395 | var_escaped->fullsize = ~0; |
7396 | var_escaped->is_special_var = 0; |
7397 | |
7398 | /* Create the NONLOCAL variable, used to represent the set of nonlocal |
7399 | memory. */ |
7400 | var_nonlocal = new_var_info (NULL_TREE, name: "NONLOCAL" , add_id: false); |
7401 | gcc_assert (var_nonlocal->id == nonlocal_id); |
7402 | var_nonlocal->is_artificial_var = 1; |
7403 | var_nonlocal->offset = 0; |
7404 | var_nonlocal->size = ~0; |
7405 | var_nonlocal->fullsize = ~0; |
7406 | var_nonlocal->is_special_var = 1; |
7407 | |
7408 | /* Create the ESCAPED_RETURN variable, used to represent the set of escaped |
7409 | memory via a regular return stmt. */ |
7410 | var_escaped_return = new_var_info (NULL_TREE, name: "ESCAPED_RETURN" , add_id: false); |
7411 | gcc_assert (var_escaped_return->id == escaped_return_id); |
7412 | var_escaped_return->is_artificial_var = 1; |
7413 | var_escaped_return->offset = 0; |
7414 | var_escaped_return->size = ~0; |
7415 | var_escaped_return->fullsize = ~0; |
7416 | var_escaped_return->is_special_var = 0; |
7417 | |
7418 | /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */ |
7419 | lhs.type = SCALAR; |
7420 | lhs.var = escaped_id; |
7421 | lhs.offset = 0; |
7422 | rhs.type = DEREF; |
7423 | rhs.var = escaped_id; |
7424 | rhs.offset = 0; |
7425 | process_constraint (t: new_constraint (lhs, rhs)); |
7426 | |
7427 | /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the |
7428 | whole variable escapes. */ |
7429 | lhs.type = SCALAR; |
7430 | lhs.var = escaped_id; |
7431 | lhs.offset = 0; |
7432 | rhs.type = SCALAR; |
7433 | rhs.var = escaped_id; |
7434 | rhs.offset = UNKNOWN_OFFSET; |
7435 | process_constraint (t: new_constraint (lhs, rhs)); |
7436 | |
7437 | /* *ESCAPED = NONLOCAL. This is true because we have to assume |
7438 | everything pointed to by escaped points to what global memory can |
7439 | point to. */ |
7440 | lhs.type = DEREF; |
7441 | lhs.var = escaped_id; |
7442 | lhs.offset = 0; |
7443 | rhs.type = SCALAR; |
7444 | rhs.var = nonlocal_id; |
7445 | rhs.offset = 0; |
7446 | process_constraint (t: new_constraint (lhs, rhs)); |
7447 | |
7448 | /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because |
7449 | global memory may point to global memory and escaped memory. */ |
7450 | lhs.type = SCALAR; |
7451 | lhs.var = nonlocal_id; |
7452 | lhs.offset = 0; |
7453 | rhs.type = ADDRESSOF; |
7454 | rhs.var = nonlocal_id; |
7455 | rhs.offset = 0; |
7456 | process_constraint (t: new_constraint (lhs, rhs)); |
7457 | rhs.type = ADDRESSOF; |
7458 | rhs.var = escaped_id; |
7459 | rhs.offset = 0; |
7460 | process_constraint (t: new_constraint (lhs, rhs)); |
7461 | |
7462 | /* Transitively close ESCAPED_RETURN. |
7463 | ESCAPED_RETURN = ESCAPED_RETURN + UNKNOWN_OFFSET |
7464 | ESCAPED_RETURN = *ESCAPED_RETURN. */ |
7465 | lhs.type = SCALAR; |
7466 | lhs.var = escaped_return_id; |
7467 | lhs.offset = 0; |
7468 | rhs.type = SCALAR; |
7469 | rhs.var = escaped_return_id; |
7470 | rhs.offset = UNKNOWN_OFFSET; |
7471 | process_constraint (t: new_constraint (lhs, rhs)); |
7472 | lhs.type = SCALAR; |
7473 | lhs.var = escaped_return_id; |
7474 | lhs.offset = 0; |
7475 | rhs.type = DEREF; |
7476 | rhs.var = escaped_return_id; |
7477 | rhs.offset = 0; |
7478 | process_constraint (t: new_constraint (lhs, rhs)); |
7479 | |
7480 | /* Create the STOREDANYTHING variable, used to represent the set of |
7481 | variables stored to *ANYTHING. */ |
7482 | var_storedanything = new_var_info (NULL_TREE, name: "STOREDANYTHING" , add_id: false); |
7483 | gcc_assert (var_storedanything->id == storedanything_id); |
7484 | var_storedanything->is_artificial_var = 1; |
7485 | var_storedanything->offset = 0; |
7486 | var_storedanything->size = ~0; |
7487 | var_storedanything->fullsize = ~0; |
7488 | var_storedanything->is_special_var = 0; |
7489 | |
7490 | /* Create the INTEGER variable, used to represent that a variable points |
7491 | to what an INTEGER "points to". */ |
7492 | var_integer = new_var_info (NULL_TREE, name: "INTEGER" , add_id: false); |
7493 | gcc_assert (var_integer->id == integer_id); |
7494 | var_integer->is_artificial_var = 1; |
7495 | var_integer->size = ~0; |
7496 | var_integer->fullsize = ~0; |
7497 | var_integer->offset = 0; |
7498 | var_integer->is_special_var = 1; |
7499 | |
7500 | /* INTEGER = ANYTHING, because we don't know where a dereference of |
7501 | a random integer will point to. */ |
7502 | lhs.type = SCALAR; |
7503 | lhs.var = integer_id; |
7504 | lhs.offset = 0; |
7505 | rhs.type = ADDRESSOF; |
7506 | rhs.var = anything_id; |
7507 | rhs.offset = 0; |
7508 | process_constraint (t: new_constraint (lhs, rhs)); |
7509 | } |
7510 | |
7511 | /* Initialize things necessary to perform PTA */ |
7512 | |
7513 | static void |
7514 | init_alias_vars (void) |
7515 | { |
7516 | use_field_sensitive = (param_max_fields_for_field_sensitive > 1); |
7517 | |
7518 | bitmap_obstack_initialize (&pta_obstack); |
7519 | bitmap_obstack_initialize (&oldpta_obstack); |
7520 | bitmap_obstack_initialize (&predbitmap_obstack); |
7521 | |
7522 | constraints.create (nelems: 8); |
7523 | varmap.create (nelems: 8); |
7524 | vi_for_tree = new hash_map<tree, varinfo_t>; |
7525 | call_stmt_vars = new hash_map<gimple *, varinfo_t>; |
7526 | |
7527 | memset (s: &stats, c: 0, n: sizeof (stats)); |
7528 | shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511); |
7529 | init_base_vars (); |
7530 | |
7531 | gcc_obstack_init (&fake_var_decl_obstack); |
7532 | |
7533 | final_solutions = new hash_map<varinfo_t, pt_solution *>; |
7534 | gcc_obstack_init (&final_solutions_obstack); |
7535 | } |
7536 | |
7537 | /* Remove the REF and ADDRESS edges from GRAPH, as well as all the |
7538 | predecessor edges. */ |
7539 | |
7540 | static void |
7541 | remove_preds_and_fake_succs (constraint_graph_t graph) |
7542 | { |
7543 | unsigned int i; |
7544 | |
7545 | /* Clear the implicit ref and address nodes from the successor |
7546 | lists. */ |
7547 | for (i = 1; i < FIRST_REF_NODE; i++) |
7548 | { |
7549 | if (graph->succs[i]) |
7550 | bitmap_clear_range (graph->succs[i], FIRST_REF_NODE, |
7551 | FIRST_REF_NODE * 2); |
7552 | } |
7553 | |
7554 | /* Free the successor list for the non-ref nodes. */ |
7555 | for (i = FIRST_REF_NODE + 1; i < graph->size; i++) |
7556 | { |
7557 | if (graph->succs[i]) |
7558 | BITMAP_FREE (graph->succs[i]); |
7559 | } |
7560 | |
7561 | /* Now reallocate the size of the successor list as, and blow away |
7562 | the predecessor bitmaps. */ |
7563 | graph->size = varmap.length (); |
7564 | graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size); |
7565 | |
7566 | free (ptr: graph->implicit_preds); |
7567 | graph->implicit_preds = NULL; |
7568 | free (ptr: graph->preds); |
7569 | graph->preds = NULL; |
7570 | bitmap_obstack_release (&predbitmap_obstack); |
7571 | } |
7572 | |
7573 | /* Solve the constraint set. */ |
7574 | |
7575 | static void |
7576 | solve_constraints (void) |
7577 | { |
7578 | class scc_info *si; |
7579 | |
7580 | /* Sort varinfos so that ones that cannot be pointed to are last. |
7581 | This makes bitmaps more efficient. */ |
7582 | unsigned int *map = XNEWVEC (unsigned int, varmap.length ()); |
7583 | for (unsigned i = 0; i < integer_id + 1; ++i) |
7584 | map[i] = i; |
7585 | /* Start with address-taken vars, followed by not address-taken vars |
7586 | to move vars never appearing in the points-to solution bitmaps last. */ |
7587 | unsigned j = integer_id + 1; |
7588 | for (unsigned i = integer_id + 1; i < varmap.length (); ++i) |
7589 | if (varmap[varmap[i]->head]->address_taken) |
7590 | map[i] = j++; |
7591 | for (unsigned i = integer_id + 1; i < varmap.length (); ++i) |
7592 | if (! varmap[varmap[i]->head]->address_taken) |
7593 | map[i] = j++; |
7594 | /* Shuffle varmap according to map. */ |
7595 | for (unsigned i = integer_id + 1; i < varmap.length (); ++i) |
7596 | { |
7597 | while (map[varmap[i]->id] != i) |
7598 | std::swap (a&: varmap[i], b&: varmap[map[varmap[i]->id]]); |
7599 | gcc_assert (bitmap_empty_p (varmap[i]->solution)); |
7600 | varmap[i]->id = i; |
7601 | varmap[i]->next = map[varmap[i]->next]; |
7602 | varmap[i]->head = map[varmap[i]->head]; |
7603 | } |
7604 | /* Finally rewrite constraints. */ |
7605 | for (unsigned i = 0; i < constraints.length (); ++i) |
7606 | { |
7607 | constraints[i]->lhs.var = map[constraints[i]->lhs.var]; |
7608 | constraints[i]->rhs.var = map[constraints[i]->rhs.var]; |
7609 | } |
7610 | free (ptr: map); |
7611 | |
7612 | if (dump_file) |
7613 | fprintf (stream: dump_file, |
7614 | format: "\nCollapsing static cycles and doing variable " |
7615 | "substitution\n" ); |
7616 | |
7617 | init_graph (size: varmap.length () * 2); |
7618 | |
7619 | if (dump_file) |
7620 | fprintf (stream: dump_file, format: "Building predecessor graph\n" ); |
7621 | build_pred_graph (); |
7622 | |
7623 | if (dump_file) |
7624 | fprintf (stream: dump_file, format: "Detecting pointer and location " |
7625 | "equivalences\n" ); |
7626 | si = perform_var_substitution (graph); |
7627 | |
7628 | if (dump_file) |
7629 | fprintf (stream: dump_file, format: "Rewriting constraints and unifying " |
7630 | "variables\n" ); |
7631 | rewrite_constraints (graph, si); |
7632 | |
7633 | build_succ_graph (); |
7634 | |
7635 | free_var_substitution_info (si); |
7636 | |
7637 | /* Attach complex constraints to graph nodes. */ |
7638 | move_complex_constraints (graph); |
7639 | |
7640 | if (dump_file) |
7641 | fprintf (stream: dump_file, format: "Uniting pointer but not location equivalent " |
7642 | "variables\n" ); |
7643 | unite_pointer_equivalences (graph); |
7644 | |
7645 | if (dump_file) |
7646 | fprintf (stream: dump_file, format: "Finding indirect cycles\n" ); |
7647 | find_indirect_cycles (graph); |
7648 | |
7649 | /* Implicit nodes and predecessors are no longer necessary at this |
7650 | point. */ |
7651 | remove_preds_and_fake_succs (graph); |
7652 | |
7653 | if (dump_file && (dump_flags & TDF_GRAPH)) |
7654 | { |
7655 | fprintf (stream: dump_file, format: "\n\n// The constraint graph before solve-graph " |
7656 | "in dot format:\n" ); |
7657 | dump_constraint_graph (file: dump_file); |
7658 | fprintf (stream: dump_file, format: "\n\n" ); |
7659 | } |
7660 | |
7661 | if (dump_file) |
7662 | fprintf (stream: dump_file, format: "Solving graph\n" ); |
7663 | |
7664 | solve_graph (graph); |
7665 | |
7666 | if (dump_file && (dump_flags & TDF_GRAPH)) |
7667 | { |
7668 | fprintf (stream: dump_file, format: "\n\n// The constraint graph after solve-graph " |
7669 | "in dot format:\n" ); |
7670 | dump_constraint_graph (file: dump_file); |
7671 | fprintf (stream: dump_file, format: "\n\n" ); |
7672 | } |
7673 | } |
7674 | |
7675 | /* Create points-to sets for the current function. See the comments |
7676 | at the start of the file for an algorithmic overview. */ |
7677 | |
7678 | static void |
7679 | compute_points_to_sets (void) |
7680 | { |
7681 | basic_block bb; |
7682 | varinfo_t vi; |
7683 | |
7684 | timevar_push (tv: TV_TREE_PTA); |
7685 | |
7686 | init_alias_vars (); |
7687 | |
7688 | intra_create_variable_infos (cfun); |
7689 | |
7690 | /* Now walk all statements and build the constraint set. */ |
7691 | FOR_EACH_BB_FN (bb, cfun) |
7692 | { |
7693 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); |
7694 | gsi_next (i: &gsi)) |
7695 | { |
7696 | gphi *phi = gsi.phi (); |
7697 | |
7698 | if (! virtual_operand_p (op: gimple_phi_result (gs: phi))) |
7699 | find_func_aliases (cfun, origt: phi); |
7700 | } |
7701 | |
7702 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); |
7703 | gsi_next (i: &gsi)) |
7704 | { |
7705 | gimple *stmt = gsi_stmt (i: gsi); |
7706 | |
7707 | find_func_aliases (cfun, origt: stmt); |
7708 | } |
7709 | } |
7710 | |
7711 | if (dump_file && (dump_flags & TDF_DETAILS)) |
7712 | { |
7713 | fprintf (stream: dump_file, format: "Points-to analysis\n\nConstraints:\n\n" ); |
7714 | dump_constraints (file: dump_file, from: 0); |
7715 | } |
7716 | |
7717 | /* From the constraints compute the points-to sets. */ |
7718 | solve_constraints (); |
7719 | |
7720 | if (dump_file && (dump_flags & TDF_STATS)) |
7721 | dump_sa_stats (outfile: dump_file); |
7722 | |
7723 | if (dump_file && (dump_flags & TDF_DETAILS)) |
7724 | dump_sa_points_to_info (outfile: dump_file); |
7725 | |
7726 | /* Compute the points-to set for ESCAPED used for call-clobber analysis. */ |
7727 | cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl, |
7728 | orig_vi: get_varinfo (n: escaped_id)); |
7729 | |
7730 | /* Make sure the ESCAPED solution (which is used as placeholder in |
7731 | other solutions) does not reference itself. This simplifies |
7732 | points-to solution queries. */ |
7733 | cfun->gimple_df->escaped.escaped = 0; |
7734 | |
7735 | /* The ESCAPED_RETURN solution is what contains all memory that needs |
7736 | to be considered global. */ |
7737 | cfun->gimple_df->escaped_return |
7738 | = find_what_var_points_to (cfun->decl, orig_vi: get_varinfo (n: escaped_return_id)); |
7739 | cfun->gimple_df->escaped_return.escaped = 1; |
7740 | |
7741 | /* Compute the points-to sets for pointer SSA_NAMEs. */ |
7742 | unsigned i; |
7743 | tree ptr; |
7744 | |
7745 | FOR_EACH_SSA_NAME (i, ptr, cfun) |
7746 | { |
7747 | if (POINTER_TYPE_P (TREE_TYPE (ptr))) |
7748 | find_what_p_points_to (cfun->decl, p: ptr); |
7749 | } |
7750 | |
7751 | /* Compute the call-used/clobbered sets. */ |
7752 | FOR_EACH_BB_FN (bb, cfun) |
7753 | { |
7754 | gimple_stmt_iterator gsi; |
7755 | |
7756 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
7757 | { |
7758 | gcall *stmt; |
7759 | struct pt_solution *pt; |
7760 | |
7761 | stmt = dyn_cast <gcall *> (p: gsi_stmt (i: gsi)); |
7762 | if (!stmt) |
7763 | continue; |
7764 | |
7765 | pt = gimple_call_use_set (call_stmt: stmt); |
7766 | if (gimple_call_flags (stmt) & ECF_CONST) |
7767 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7768 | else |
7769 | { |
7770 | bool uses_global_memory = true; |
7771 | bool reads_global_memory = true; |
7772 | |
7773 | determine_global_memory_access (stmt, NULL, |
7774 | reads_global_memory: &reads_global_memory, |
7775 | uses_global_memory: &uses_global_memory); |
7776 | if ((vi = lookup_call_use_vi (call: stmt)) != NULL) |
7777 | { |
7778 | *pt = find_what_var_points_to (cfun->decl, orig_vi: vi); |
7779 | /* Escaped (and thus nonlocal) variables are always |
7780 | implicitly used by calls. */ |
7781 | /* ??? ESCAPED can be empty even though NONLOCAL |
7782 | always escaped. */ |
7783 | if (uses_global_memory) |
7784 | { |
7785 | pt->nonlocal = 1; |
7786 | pt->escaped = 1; |
7787 | } |
7788 | } |
7789 | else if (uses_global_memory) |
7790 | { |
7791 | /* If there is nothing special about this call then |
7792 | we have made everything that is used also escape. */ |
7793 | *pt = cfun->gimple_df->escaped; |
7794 | pt->nonlocal = 1; |
7795 | } |
7796 | else |
7797 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7798 | } |
7799 | |
7800 | pt = gimple_call_clobber_set (call_stmt: stmt); |
7801 | if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS)) |
7802 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7803 | else |
7804 | { |
7805 | bool writes_global_memory = true; |
7806 | |
7807 | determine_global_memory_access (stmt, writes_global_memory: &writes_global_memory, |
7808 | NULL, NULL); |
7809 | |
7810 | if ((vi = lookup_call_clobber_vi (call: stmt)) != NULL) |
7811 | { |
7812 | *pt = find_what_var_points_to (cfun->decl, orig_vi: vi); |
7813 | /* Escaped (and thus nonlocal) variables are always |
7814 | implicitly clobbered by calls. */ |
7815 | /* ??? ESCAPED can be empty even though NONLOCAL |
7816 | always escaped. */ |
7817 | if (writes_global_memory) |
7818 | { |
7819 | pt->nonlocal = 1; |
7820 | pt->escaped = 1; |
7821 | } |
7822 | } |
7823 | else if (writes_global_memory) |
7824 | { |
7825 | /* If there is nothing special about this call then |
7826 | we have made everything that is used also escape. */ |
7827 | *pt = cfun->gimple_df->escaped; |
7828 | pt->nonlocal = 1; |
7829 | } |
7830 | else |
7831 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7832 | } |
7833 | } |
7834 | } |
7835 | |
7836 | timevar_pop (tv: TV_TREE_PTA); |
7837 | } |
7838 | |
7839 | |
7840 | /* Delete created points-to sets. */ |
7841 | |
7842 | static void |
7843 | delete_points_to_sets (void) |
7844 | { |
7845 | unsigned int i; |
7846 | |
7847 | delete shared_bitmap_table; |
7848 | shared_bitmap_table = NULL; |
7849 | if (dump_file && (dump_flags & TDF_STATS)) |
7850 | fprintf (stream: dump_file, format: "Points to sets created:%d\n" , |
7851 | stats.points_to_sets_created); |
7852 | |
7853 | delete vi_for_tree; |
7854 | delete call_stmt_vars; |
7855 | bitmap_obstack_release (&pta_obstack); |
7856 | constraints.release (); |
7857 | |
7858 | for (i = 0; i < graph->size; i++) |
7859 | graph->complex[i].release (); |
7860 | free (ptr: graph->complex); |
7861 | |
7862 | free (ptr: graph->rep); |
7863 | free (ptr: graph->succs); |
7864 | free (ptr: graph->pe); |
7865 | free (ptr: graph->pe_rep); |
7866 | free (ptr: graph->indirect_cycles); |
7867 | free (ptr: graph); |
7868 | |
7869 | varmap.release (); |
7870 | variable_info_pool.release (); |
7871 | constraint_pool.release (); |
7872 | |
7873 | obstack_free (&fake_var_decl_obstack, NULL); |
7874 | |
7875 | delete final_solutions; |
7876 | obstack_free (&final_solutions_obstack, NULL); |
7877 | } |
7878 | |
7879 | struct vls_data |
7880 | { |
7881 | unsigned short clique; |
7882 | bool escaped_p; |
7883 | bitmap rvars; |
7884 | }; |
7885 | |
7886 | /* Mark "other" loads and stores as belonging to CLIQUE and with |
7887 | base zero. */ |
7888 | |
7889 | static bool |
7890 | visit_loadstore (gimple *, tree base, tree ref, void *data) |
7891 | { |
7892 | unsigned short clique = ((vls_data *) data)->clique; |
7893 | bitmap rvars = ((vls_data *) data)->rvars; |
7894 | bool escaped_p = ((vls_data *) data)->escaped_p; |
7895 | if (TREE_CODE (base) == MEM_REF |
7896 | || TREE_CODE (base) == TARGET_MEM_REF) |
7897 | { |
7898 | tree ptr = TREE_OPERAND (base, 0); |
7899 | if (TREE_CODE (ptr) == SSA_NAME) |
7900 | { |
7901 | /* For parameters, get at the points-to set for the actual parm |
7902 | decl. */ |
7903 | if (SSA_NAME_IS_DEFAULT_DEF (ptr) |
7904 | && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL |
7905 | || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL)) |
7906 | ptr = SSA_NAME_VAR (ptr); |
7907 | |
7908 | /* We need to make sure 'ptr' doesn't include any of |
7909 | the restrict tags we added bases for in its points-to set. */ |
7910 | varinfo_t vi = lookup_vi_for_tree (t: ptr); |
7911 | if (! vi) |
7912 | return false; |
7913 | |
7914 | vi = get_varinfo (n: find (node: vi->id)); |
7915 | if (bitmap_intersect_p (rvars, vi->solution) |
7916 | || (escaped_p && bitmap_bit_p (vi->solution, escaped_id))) |
7917 | return false; |
7918 | } |
7919 | |
7920 | /* Do not overwrite existing cliques (that includes clique, base |
7921 | pairs we just set). */ |
7922 | if (MR_DEPENDENCE_CLIQUE (base) == 0) |
7923 | { |
7924 | MR_DEPENDENCE_CLIQUE (base) = clique; |
7925 | MR_DEPENDENCE_BASE (base) = 0; |
7926 | } |
7927 | } |
7928 | |
7929 | /* For plain decl accesses see whether they are accesses to globals |
7930 | and rewrite them to MEM_REFs with { clique, 0 }. */ |
7931 | if (VAR_P (base) |
7932 | && is_global_var (t: base) |
7933 | /* ??? We can't rewrite a plain decl with the walk_stmt_load_store |
7934 | ops callback. */ |
7935 | && base != ref) |
7936 | { |
7937 | tree *basep = &ref; |
7938 | while (handled_component_p (t: *basep)) |
7939 | basep = &TREE_OPERAND (*basep, 0); |
7940 | gcc_assert (VAR_P (*basep)); |
7941 | tree ptr = build_fold_addr_expr (*basep); |
7942 | tree zero = build_int_cst (TREE_TYPE (ptr), 0); |
7943 | *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero); |
7944 | MR_DEPENDENCE_CLIQUE (*basep) = clique; |
7945 | MR_DEPENDENCE_BASE (*basep) = 0; |
7946 | } |
7947 | |
7948 | return false; |
7949 | } |
7950 | |
7951 | struct msdi_data { |
7952 | tree ptr; |
7953 | unsigned short *clique; |
7954 | unsigned short *last_ruid; |
7955 | varinfo_t restrict_var; |
7956 | }; |
7957 | |
7958 | /* If BASE is a MEM_REF then assign a clique, base pair to it, updating |
7959 | CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA. |
7960 | Return whether dependence info was assigned to BASE. */ |
7961 | |
7962 | static bool |
7963 | maybe_set_dependence_info (gimple *, tree base, tree, void *data) |
7964 | { |
7965 | tree ptr = ((msdi_data *)data)->ptr; |
7966 | unsigned short &clique = *((msdi_data *)data)->clique; |
7967 | unsigned short &last_ruid = *((msdi_data *)data)->last_ruid; |
7968 | varinfo_t restrict_var = ((msdi_data *)data)->restrict_var; |
7969 | if ((TREE_CODE (base) == MEM_REF |
7970 | || TREE_CODE (base) == TARGET_MEM_REF) |
7971 | && TREE_OPERAND (base, 0) == ptr) |
7972 | { |
7973 | /* Do not overwrite existing cliques. This avoids overwriting dependence |
7974 | info inlined from a function with restrict parameters inlined |
7975 | into a function with restrict parameters. This usually means we |
7976 | prefer to be precise in innermost loops. */ |
7977 | if (MR_DEPENDENCE_CLIQUE (base) == 0) |
7978 | { |
7979 | if (clique == 0) |
7980 | { |
7981 | if (cfun->last_clique == 0) |
7982 | cfun->last_clique = 1; |
7983 | clique = 1; |
7984 | } |
7985 | if (restrict_var->ruid == 0) |
7986 | restrict_var->ruid = ++last_ruid; |
7987 | MR_DEPENDENCE_CLIQUE (base) = clique; |
7988 | MR_DEPENDENCE_BASE (base) = restrict_var->ruid; |
7989 | return true; |
7990 | } |
7991 | } |
7992 | return false; |
7993 | } |
7994 | |
7995 | /* Clear dependence info for the clique DATA. */ |
7996 | |
7997 | static bool |
7998 | clear_dependence_clique (gimple *, tree base, tree, void *data) |
7999 | { |
8000 | unsigned short clique = (uintptr_t)data; |
8001 | if ((TREE_CODE (base) == MEM_REF |
8002 | || TREE_CODE (base) == TARGET_MEM_REF) |
8003 | && MR_DEPENDENCE_CLIQUE (base) == clique) |
8004 | { |
8005 | MR_DEPENDENCE_CLIQUE (base) = 0; |
8006 | MR_DEPENDENCE_BASE (base) = 0; |
8007 | } |
8008 | |
8009 | return false; |
8010 | } |
8011 | |
8012 | /* Compute the set of independend memory references based on restrict |
8013 | tags and their conservative propagation to the points-to sets. */ |
8014 | |
8015 | static void |
8016 | compute_dependence_clique (void) |
8017 | { |
8018 | /* First clear the special "local" clique. */ |
8019 | basic_block bb; |
8020 | if (cfun->last_clique != 0) |
8021 | FOR_EACH_BB_FN (bb, cfun) |
8022 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); |
8023 | !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
8024 | { |
8025 | gimple *stmt = gsi_stmt (i: gsi); |
8026 | walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1, |
8027 | clear_dependence_clique, |
8028 | clear_dependence_clique); |
8029 | } |
8030 | |
8031 | unsigned short clique = 0; |
8032 | unsigned short last_ruid = 0; |
8033 | bitmap rvars = BITMAP_ALLOC (NULL); |
8034 | bool escaped_p = false; |
8035 | for (unsigned i = 0; i < num_ssa_names; ++i) |
8036 | { |
8037 | tree ptr = ssa_name (i); |
8038 | if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr))) |
8039 | continue; |
8040 | |
8041 | /* Avoid all this when ptr is not dereferenced? */ |
8042 | tree p = ptr; |
8043 | if (SSA_NAME_IS_DEFAULT_DEF (ptr) |
8044 | && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL |
8045 | || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL)) |
8046 | p = SSA_NAME_VAR (ptr); |
8047 | varinfo_t vi = lookup_vi_for_tree (t: p); |
8048 | if (!vi) |
8049 | continue; |
8050 | vi = get_varinfo (n: find (node: vi->id)); |
8051 | bitmap_iterator bi; |
8052 | unsigned j; |
8053 | varinfo_t restrict_var = NULL; |
8054 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi) |
8055 | { |
8056 | varinfo_t oi = get_varinfo (n: j); |
8057 | if (oi->head != j) |
8058 | oi = get_varinfo (n: oi->head); |
8059 | if (oi->is_restrict_var) |
8060 | { |
8061 | if (restrict_var |
8062 | && restrict_var != oi) |
8063 | { |
8064 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8065 | { |
8066 | fprintf (stream: dump_file, format: "found restrict pointed-to " |
8067 | "for " ); |
8068 | print_generic_expr (dump_file, ptr); |
8069 | fprintf (stream: dump_file, format: " but not exclusively\n" ); |
8070 | } |
8071 | restrict_var = NULL; |
8072 | break; |
8073 | } |
8074 | restrict_var = oi; |
8075 | } |
8076 | /* NULL is the only other valid points-to entry. */ |
8077 | else if (oi->id != nothing_id) |
8078 | { |
8079 | restrict_var = NULL; |
8080 | break; |
8081 | } |
8082 | } |
8083 | /* Ok, found that ptr must(!) point to a single(!) restrict |
8084 | variable. */ |
8085 | /* ??? PTA isn't really a proper propagation engine to compute |
8086 | this property. |
8087 | ??? We could handle merging of two restricts by unifying them. */ |
8088 | if (restrict_var) |
8089 | { |
8090 | /* Now look at possible dereferences of ptr. */ |
8091 | imm_use_iterator ui; |
8092 | gimple *use_stmt; |
8093 | bool used = false; |
8094 | msdi_data data = { .ptr: ptr, .clique: &clique, .last_ruid: &last_ruid, .restrict_var: restrict_var }; |
8095 | FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr) |
8096 | used |= walk_stmt_load_store_ops (use_stmt, &data, |
8097 | maybe_set_dependence_info, |
8098 | maybe_set_dependence_info); |
8099 | if (used) |
8100 | { |
8101 | /* Add all subvars to the set of restrict pointed-to set. */ |
8102 | for (unsigned sv = restrict_var->head; sv != 0; |
8103 | sv = get_varinfo (n: sv)->next) |
8104 | bitmap_set_bit (rvars, sv); |
8105 | varinfo_t escaped = get_varinfo (n: find (node: escaped_id)); |
8106 | if (bitmap_bit_p (escaped->solution, restrict_var->id)) |
8107 | escaped_p = true; |
8108 | } |
8109 | } |
8110 | } |
8111 | |
8112 | if (clique != 0) |
8113 | { |
8114 | /* Assign the BASE id zero to all accesses not based on a restrict |
8115 | pointer. That way they get disambiguated against restrict |
8116 | accesses but not against each other. */ |
8117 | /* ??? For restricts derived from globals (thus not incoming |
8118 | parameters) we can't restrict scoping properly thus the following |
8119 | is too aggressive there. For now we have excluded those globals from |
8120 | getting into the MR_DEPENDENCE machinery. */ |
8121 | vls_data data = { .clique: clique, .escaped_p: escaped_p, .rvars: rvars }; |
8122 | basic_block bb; |
8123 | FOR_EACH_BB_FN (bb, cfun) |
8124 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); |
8125 | !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
8126 | { |
8127 | gimple *stmt = gsi_stmt (i: gsi); |
8128 | walk_stmt_load_store_ops (stmt, &data, |
8129 | visit_loadstore, visit_loadstore); |
8130 | } |
8131 | } |
8132 | |
8133 | BITMAP_FREE (rvars); |
8134 | } |
8135 | |
8136 | /* Compute points-to information for every SSA_NAME pointer in the |
8137 | current function and compute the transitive closure of escaped |
8138 | variables to re-initialize the call-clobber states of local variables. */ |
8139 | |
8140 | unsigned int |
8141 | compute_may_aliases (void) |
8142 | { |
8143 | if (cfun->gimple_df->ipa_pta) |
8144 | { |
8145 | if (dump_file) |
8146 | { |
8147 | fprintf (stream: dump_file, format: "\nNot re-computing points-to information " |
8148 | "because IPA points-to information is available.\n\n" ); |
8149 | |
8150 | /* But still dump what we have remaining it. */ |
8151 | if (dump_flags & (TDF_DETAILS|TDF_ALIAS)) |
8152 | dump_alias_info (dump_file); |
8153 | } |
8154 | |
8155 | return 0; |
8156 | } |
8157 | |
8158 | /* For each pointer P_i, determine the sets of variables that P_i may |
8159 | point-to. Compute the reachability set of escaped and call-used |
8160 | variables. */ |
8161 | compute_points_to_sets (); |
8162 | |
8163 | /* Debugging dumps. */ |
8164 | if (dump_file && (dump_flags & (TDF_DETAILS|TDF_ALIAS))) |
8165 | dump_alias_info (dump_file); |
8166 | |
8167 | /* Compute restrict-based memory disambiguations. */ |
8168 | compute_dependence_clique (); |
8169 | |
8170 | /* Deallocate memory used by aliasing data structures and the internal |
8171 | points-to solution. */ |
8172 | delete_points_to_sets (); |
8173 | |
8174 | gcc_assert (!need_ssa_update_p (cfun)); |
8175 | |
8176 | return 0; |
8177 | } |
8178 | |
8179 | /* A dummy pass to cause points-to information to be computed via |
8180 | TODO_rebuild_alias. */ |
8181 | |
8182 | namespace { |
8183 | |
8184 | const pass_data pass_data_build_alias = |
8185 | { |
8186 | .type: GIMPLE_PASS, /* type */ |
8187 | .name: "alias" , /* name */ |
8188 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
8189 | .tv_id: TV_NONE, /* tv_id */ |
8190 | .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */ |
8191 | .properties_provided: 0, /* properties_provided */ |
8192 | .properties_destroyed: 0, /* properties_destroyed */ |
8193 | .todo_flags_start: 0, /* todo_flags_start */ |
8194 | TODO_rebuild_alias, /* todo_flags_finish */ |
8195 | }; |
8196 | |
8197 | class pass_build_alias : public gimple_opt_pass |
8198 | { |
8199 | public: |
8200 | pass_build_alias (gcc::context *ctxt) |
8201 | : gimple_opt_pass (pass_data_build_alias, ctxt) |
8202 | {} |
8203 | |
8204 | /* opt_pass methods: */ |
8205 | bool gate (function *) final override { return flag_tree_pta; } |
8206 | |
8207 | }; // class pass_build_alias |
8208 | |
8209 | } // anon namespace |
8210 | |
8211 | gimple_opt_pass * |
8212 | make_pass_build_alias (gcc::context *ctxt) |
8213 | { |
8214 | return new pass_build_alias (ctxt); |
8215 | } |
8216 | |
8217 | /* A dummy pass to cause points-to information to be computed via |
8218 | TODO_rebuild_alias. */ |
8219 | |
8220 | namespace { |
8221 | |
8222 | const pass_data pass_data_build_ealias = |
8223 | { |
8224 | .type: GIMPLE_PASS, /* type */ |
8225 | .name: "ealias" , /* name */ |
8226 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
8227 | .tv_id: TV_NONE, /* tv_id */ |
8228 | .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */ |
8229 | .properties_provided: 0, /* properties_provided */ |
8230 | .properties_destroyed: 0, /* properties_destroyed */ |
8231 | .todo_flags_start: 0, /* todo_flags_start */ |
8232 | TODO_rebuild_alias, /* todo_flags_finish */ |
8233 | }; |
8234 | |
8235 | class pass_build_ealias : public gimple_opt_pass |
8236 | { |
8237 | public: |
8238 | pass_build_ealias (gcc::context *ctxt) |
8239 | : gimple_opt_pass (pass_data_build_ealias, ctxt) |
8240 | {} |
8241 | |
8242 | /* opt_pass methods: */ |
8243 | bool gate (function *) final override { return flag_tree_pta; } |
8244 | |
8245 | }; // class pass_build_ealias |
8246 | |
8247 | } // anon namespace |
8248 | |
8249 | gimple_opt_pass * |
8250 | make_pass_build_ealias (gcc::context *ctxt) |
8251 | { |
8252 | return new pass_build_ealias (ctxt); |
8253 | } |
8254 | |
8255 | |
8256 | /* IPA PTA solutions for ESCAPED. */ |
8257 | struct pt_solution ipa_escaped_pt |
8258 | = { .anything: true, .nonlocal: false, .escaped: false, .ipa_escaped: false, .null: false, .const_pool: false, |
8259 | .vars_contains_nonlocal: false, .vars_contains_escaped: false, .vars_contains_escaped_heap: false, .vars_contains_restrict: false, .vars_contains_interposable: false, NULL }; |
8260 | |
8261 | /* Associate node with varinfo DATA. Worker for |
8262 | cgraph_for_symbol_thunks_and_aliases. */ |
8263 | static bool |
8264 | associate_varinfo_to_alias (struct cgraph_node *node, void *data) |
8265 | { |
8266 | if ((node->alias |
8267 | || (node->thunk |
8268 | && ! node->inlined_to)) |
8269 | && node->analyzed |
8270 | && !node->ifunc_resolver) |
8271 | insert_vi_for_tree (t: node->decl, vi: (varinfo_t)data); |
8272 | return false; |
8273 | } |
8274 | |
8275 | /* Dump varinfo VI to FILE. */ |
8276 | |
8277 | static void |
8278 | dump_varinfo (FILE *file, varinfo_t vi) |
8279 | { |
8280 | if (vi == NULL) |
8281 | return; |
8282 | |
8283 | fprintf (stream: file, format: "%u: %s\n" , vi->id, vi->name); |
8284 | |
8285 | const char *sep = " " ; |
8286 | if (vi->is_artificial_var) |
8287 | fprintf (stream: file, format: "%sartificial" , sep); |
8288 | if (vi->is_special_var) |
8289 | fprintf (stream: file, format: "%sspecial" , sep); |
8290 | if (vi->is_unknown_size_var) |
8291 | fprintf (stream: file, format: "%sunknown-size" , sep); |
8292 | if (vi->is_full_var) |
8293 | fprintf (stream: file, format: "%sfull" , sep); |
8294 | if (vi->is_heap_var) |
8295 | fprintf (stream: file, format: "%sheap" , sep); |
8296 | if (vi->may_have_pointers) |
8297 | fprintf (stream: file, format: "%smay-have-pointers" , sep); |
8298 | if (vi->only_restrict_pointers) |
8299 | fprintf (stream: file, format: "%sonly-restrict-pointers" , sep); |
8300 | if (vi->is_restrict_var) |
8301 | fprintf (stream: file, format: "%sis-restrict-var" , sep); |
8302 | if (vi->is_global_var) |
8303 | fprintf (stream: file, format: "%sglobal" , sep); |
8304 | if (vi->is_ipa_escape_point) |
8305 | fprintf (stream: file, format: "%sipa-escape-point" , sep); |
8306 | if (vi->is_fn_info) |
8307 | fprintf (stream: file, format: "%sfn-info" , sep); |
8308 | if (vi->ruid) |
8309 | fprintf (stream: file, format: "%srestrict-uid:%u" , sep, vi->ruid); |
8310 | if (vi->next) |
8311 | fprintf (stream: file, format: "%snext:%u" , sep, vi->next); |
8312 | if (vi->head != vi->id) |
8313 | fprintf (stream: file, format: "%shead:%u" , sep, vi->head); |
8314 | if (vi->offset) |
8315 | fprintf (stream: file, format: "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset); |
8316 | if (vi->size != ~HOST_WIDE_INT_0U) |
8317 | fprintf (stream: file, format: "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size); |
8318 | if (vi->fullsize != ~HOST_WIDE_INT_0U && vi->fullsize != vi->size) |
8319 | fprintf (stream: file, format: "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep, |
8320 | vi->fullsize); |
8321 | fprintf (stream: file, format: "\n" ); |
8322 | |
8323 | if (vi->solution && !bitmap_empty_p (map: vi->solution)) |
8324 | { |
8325 | bitmap_iterator bi; |
8326 | unsigned i; |
8327 | fprintf (stream: file, format: " solution: {" ); |
8328 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
8329 | fprintf (stream: file, format: " %u" , i); |
8330 | fprintf (stream: file, format: " }\n" ); |
8331 | } |
8332 | |
8333 | if (vi->oldsolution && !bitmap_empty_p (map: vi->oldsolution) |
8334 | && !bitmap_equal_p (vi->solution, vi->oldsolution)) |
8335 | { |
8336 | bitmap_iterator bi; |
8337 | unsigned i; |
8338 | fprintf (stream: file, format: " oldsolution: {" ); |
8339 | EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi) |
8340 | fprintf (stream: file, format: " %u" , i); |
8341 | fprintf (stream: file, format: " }\n" ); |
8342 | } |
8343 | } |
8344 | |
8345 | /* Dump varinfo VI to stderr. */ |
8346 | |
8347 | DEBUG_FUNCTION void |
8348 | debug_varinfo (varinfo_t vi) |
8349 | { |
8350 | dump_varinfo (stderr, vi); |
8351 | } |
8352 | |
8353 | /* Dump varmap to FILE. */ |
8354 | |
8355 | static void |
8356 | dump_varmap (FILE *file) |
8357 | { |
8358 | if (varmap.length () == 0) |
8359 | return; |
8360 | |
8361 | fprintf (stream: file, format: "variables:\n" ); |
8362 | |
8363 | for (unsigned int i = 0; i < varmap.length (); ++i) |
8364 | { |
8365 | varinfo_t vi = get_varinfo (n: i); |
8366 | dump_varinfo (file, vi); |
8367 | } |
8368 | |
8369 | fprintf (stream: file, format: "\n" ); |
8370 | } |
8371 | |
8372 | /* Dump varmap to stderr. */ |
8373 | |
8374 | DEBUG_FUNCTION void |
8375 | debug_varmap (void) |
8376 | { |
8377 | dump_varmap (stderr); |
8378 | } |
8379 | |
8380 | /* Compute whether node is refered to non-locally. Worker for |
8381 | cgraph_for_symbol_thunks_and_aliases. */ |
8382 | static bool |
8383 | refered_from_nonlocal_fn (struct cgraph_node *node, void *data) |
8384 | { |
8385 | bool *nonlocal_p = (bool *)data; |
8386 | *nonlocal_p |= (node->used_from_other_partition |
8387 | || DECL_EXTERNAL (node->decl) |
8388 | || TREE_PUBLIC (node->decl) |
8389 | || node->force_output |
8390 | || lookup_attribute (attr_name: "noipa" , DECL_ATTRIBUTES (node->decl))); |
8391 | return false; |
8392 | } |
8393 | |
8394 | /* Same for varpool nodes. */ |
8395 | static bool |
8396 | refered_from_nonlocal_var (struct varpool_node *node, void *data) |
8397 | { |
8398 | bool *nonlocal_p = (bool *)data; |
8399 | *nonlocal_p |= (node->used_from_other_partition |
8400 | || DECL_EXTERNAL (node->decl) |
8401 | || TREE_PUBLIC (node->decl) |
8402 | || node->force_output); |
8403 | return false; |
8404 | } |
8405 | |
8406 | /* Execute the driver for IPA PTA. */ |
8407 | static unsigned int |
8408 | ipa_pta_execute (void) |
8409 | { |
8410 | struct cgraph_node *node; |
8411 | varpool_node *var; |
8412 | unsigned int from = 0; |
8413 | |
8414 | in_ipa_mode = 1; |
8415 | |
8416 | init_alias_vars (); |
8417 | |
8418 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8419 | { |
8420 | symtab->dump (f: dump_file); |
8421 | fprintf (stream: dump_file, format: "\n" ); |
8422 | } |
8423 | |
8424 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8425 | { |
8426 | fprintf (stream: dump_file, format: "Generating generic constraints\n\n" ); |
8427 | dump_constraints (file: dump_file, from); |
8428 | fprintf (stream: dump_file, format: "\n" ); |
8429 | from = constraints.length (); |
8430 | } |
8431 | |
8432 | /* Build the constraints. */ |
8433 | FOR_EACH_DEFINED_FUNCTION (node) |
8434 | { |
8435 | varinfo_t vi; |
8436 | /* Nodes without a body in this partition are not interesting. |
8437 | Especially do not visit clones at this point for now - we |
8438 | get duplicate decls there for inline clones at least. */ |
8439 | if (!node->has_gimple_body_p () |
8440 | || node->in_other_partition |
8441 | || node->inlined_to) |
8442 | continue; |
8443 | node->get_body (); |
8444 | |
8445 | gcc_assert (!node->clone_of); |
8446 | |
8447 | /* For externally visible or attribute used annotated functions use |
8448 | local constraints for their arguments. |
8449 | For local functions we see all callers and thus do not need initial |
8450 | constraints for parameters. */ |
8451 | bool nonlocal_p = (node->used_from_other_partition |
8452 | || DECL_EXTERNAL (node->decl) |
8453 | || TREE_PUBLIC (node->decl) |
8454 | || node->force_output |
8455 | || lookup_attribute (attr_name: "noipa" , |
8456 | DECL_ATTRIBUTES (node->decl))); |
8457 | node->call_for_symbol_thunks_and_aliases (callback: refered_from_nonlocal_fn, |
8458 | data: &nonlocal_p, include_overwritable: true); |
8459 | |
8460 | vi = create_function_info_for (decl: node->decl, |
8461 | name: alias_get_name (decl: node->decl), add_id: false, |
8462 | nonlocal_p); |
8463 | if (dump_file && (dump_flags & TDF_DETAILS) |
8464 | && from != constraints.length ()) |
8465 | { |
8466 | fprintf (stream: dump_file, |
8467 | format: "Generating initial constraints for %s" , |
8468 | node->dump_name ()); |
8469 | if (DECL_ASSEMBLER_NAME_SET_P (node->decl)) |
8470 | fprintf (stream: dump_file, format: " (%s)" , |
8471 | IDENTIFIER_POINTER |
8472 | (DECL_ASSEMBLER_NAME (node->decl))); |
8473 | fprintf (stream: dump_file, format: "\n\n" ); |
8474 | dump_constraints (file: dump_file, from); |
8475 | fprintf (stream: dump_file, format: "\n" ); |
8476 | |
8477 | from = constraints.length (); |
8478 | } |
8479 | |
8480 | node->call_for_symbol_thunks_and_aliases |
8481 | (callback: associate_varinfo_to_alias, data: vi, include_overwritable: true); |
8482 | } |
8483 | |
8484 | /* Create constraints for global variables and their initializers. */ |
8485 | FOR_EACH_VARIABLE (var) |
8486 | { |
8487 | if (var->alias && var->analyzed) |
8488 | continue; |
8489 | |
8490 | varinfo_t vi = get_vi_for_tree (t: var->decl); |
8491 | |
8492 | /* For the purpose of IPA PTA unit-local globals are not |
8493 | escape points. */ |
8494 | bool nonlocal_p = (DECL_EXTERNAL (var->decl) |
8495 | || TREE_PUBLIC (var->decl) |
8496 | || var->used_from_other_partition |
8497 | || var->force_output); |
8498 | var->call_for_symbol_and_aliases (callback: refered_from_nonlocal_var, |
8499 | data: &nonlocal_p, include_overwritable: true); |
8500 | if (nonlocal_p) |
8501 | vi->is_ipa_escape_point = true; |
8502 | } |
8503 | |
8504 | if (dump_file && (dump_flags & TDF_DETAILS) |
8505 | && from != constraints.length ()) |
8506 | { |
8507 | fprintf (stream: dump_file, |
8508 | format: "Generating constraints for global initializers\n\n" ); |
8509 | dump_constraints (file: dump_file, from); |
8510 | fprintf (stream: dump_file, format: "\n" ); |
8511 | from = constraints.length (); |
8512 | } |
8513 | |
8514 | FOR_EACH_DEFINED_FUNCTION (node) |
8515 | { |
8516 | struct function *func; |
8517 | basic_block bb; |
8518 | |
8519 | /* Nodes without a body in this partition are not interesting. */ |
8520 | if (!node->has_gimple_body_p () |
8521 | || node->in_other_partition |
8522 | || node->clone_of) |
8523 | continue; |
8524 | |
8525 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8526 | { |
8527 | fprintf (stream: dump_file, |
8528 | format: "Generating constraints for %s" , node->dump_name ()); |
8529 | if (DECL_ASSEMBLER_NAME_SET_P (node->decl)) |
8530 | fprintf (stream: dump_file, format: " (%s)" , |
8531 | IDENTIFIER_POINTER |
8532 | (DECL_ASSEMBLER_NAME (node->decl))); |
8533 | fprintf (stream: dump_file, format: "\n" ); |
8534 | } |
8535 | |
8536 | func = DECL_STRUCT_FUNCTION (node->decl); |
8537 | gcc_assert (cfun == NULL); |
8538 | |
8539 | /* Build constriants for the function body. */ |
8540 | FOR_EACH_BB_FN (bb, func) |
8541 | { |
8542 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); |
8543 | gsi_next (i: &gsi)) |
8544 | { |
8545 | gphi *phi = gsi.phi (); |
8546 | |
8547 | if (! virtual_operand_p (op: gimple_phi_result (gs: phi))) |
8548 | find_func_aliases (fn: func, origt: phi); |
8549 | } |
8550 | |
8551 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); |
8552 | gsi_next (i: &gsi)) |
8553 | { |
8554 | gimple *stmt = gsi_stmt (i: gsi); |
8555 | |
8556 | find_func_aliases (fn: func, origt: stmt); |
8557 | find_func_clobbers (fn: func, origt: stmt); |
8558 | } |
8559 | } |
8560 | |
8561 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8562 | { |
8563 | fprintf (stream: dump_file, format: "\n" ); |
8564 | dump_constraints (file: dump_file, from); |
8565 | fprintf (stream: dump_file, format: "\n" ); |
8566 | from = constraints.length (); |
8567 | } |
8568 | } |
8569 | |
8570 | /* From the constraints compute the points-to sets. */ |
8571 | solve_constraints (); |
8572 | |
8573 | if (dump_file && (dump_flags & TDF_STATS)) |
8574 | dump_sa_stats (outfile: dump_file); |
8575 | |
8576 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8577 | dump_sa_points_to_info (outfile: dump_file); |
8578 | |
8579 | /* Now post-process solutions to handle locals from different |
8580 | runtime instantiations coming in through recursive invocations. */ |
8581 | unsigned shadow_var_cnt = 0; |
8582 | for (unsigned i = 1; i < varmap.length (); ++i) |
8583 | { |
8584 | varinfo_t fi = get_varinfo (n: i); |
8585 | if (fi->is_fn_info |
8586 | && fi->decl) |
8587 | /* Automatic variables pointed to by their containing functions |
8588 | parameters need this treatment. */ |
8589 | for (varinfo_t ai = first_vi_for_offset (start: fi, offset: fi_parm_base); |
8590 | ai; ai = vi_next (vi: ai)) |
8591 | { |
8592 | varinfo_t vi = get_varinfo (n: find (node: ai->id)); |
8593 | bitmap_iterator bi; |
8594 | unsigned j; |
8595 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi) |
8596 | { |
8597 | varinfo_t pt = get_varinfo (n: j); |
8598 | if (pt->shadow_var_uid == 0 |
8599 | && pt->decl |
8600 | && auto_var_in_fn_p (pt->decl, fi->decl)) |
8601 | { |
8602 | pt->shadow_var_uid = allocate_decl_uid (); |
8603 | shadow_var_cnt++; |
8604 | } |
8605 | } |
8606 | } |
8607 | /* As well as global variables which are another way of passing |
8608 | arguments to recursive invocations. */ |
8609 | else if (fi->is_global_var) |
8610 | { |
8611 | for (varinfo_t ai = fi; ai; ai = vi_next (vi: ai)) |
8612 | { |
8613 | varinfo_t vi = get_varinfo (n: find (node: ai->id)); |
8614 | bitmap_iterator bi; |
8615 | unsigned j; |
8616 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi) |
8617 | { |
8618 | varinfo_t pt = get_varinfo (n: j); |
8619 | if (pt->shadow_var_uid == 0 |
8620 | && pt->decl |
8621 | && auto_var_p (pt->decl)) |
8622 | { |
8623 | pt->shadow_var_uid = allocate_decl_uid (); |
8624 | shadow_var_cnt++; |
8625 | } |
8626 | } |
8627 | } |
8628 | } |
8629 | } |
8630 | if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS)) |
8631 | fprintf (stream: dump_file, format: "Allocated %u shadow variables for locals " |
8632 | "maybe leaking into recursive invocations of their containing " |
8633 | "functions\n" , shadow_var_cnt); |
8634 | |
8635 | /* Compute the global points-to sets for ESCAPED. |
8636 | ??? Note that the computed escape set is not correct |
8637 | for the whole unit as we fail to consider graph edges to |
8638 | externally visible functions. */ |
8639 | ipa_escaped_pt = find_what_var_points_to (NULL, orig_vi: get_varinfo (n: escaped_id)); |
8640 | |
8641 | /* Make sure the ESCAPED solution (which is used as placeholder in |
8642 | other solutions) does not reference itself. This simplifies |
8643 | points-to solution queries. */ |
8644 | ipa_escaped_pt.ipa_escaped = 0; |
8645 | |
8646 | /* Assign the points-to sets to the SSA names in the unit. */ |
8647 | FOR_EACH_DEFINED_FUNCTION (node) |
8648 | { |
8649 | tree ptr; |
8650 | struct function *fn; |
8651 | unsigned i; |
8652 | basic_block bb; |
8653 | |
8654 | /* Nodes without a body in this partition are not interesting. */ |
8655 | if (!node->has_gimple_body_p () |
8656 | || node->in_other_partition |
8657 | || node->clone_of) |
8658 | continue; |
8659 | |
8660 | fn = DECL_STRUCT_FUNCTION (node->decl); |
8661 | |
8662 | /* Compute the points-to sets for pointer SSA_NAMEs. */ |
8663 | FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr) |
8664 | { |
8665 | if (ptr |
8666 | && POINTER_TYPE_P (TREE_TYPE (ptr))) |
8667 | find_what_p_points_to (fndecl: node->decl, p: ptr); |
8668 | } |
8669 | |
8670 | /* Compute the call-use and call-clobber sets for indirect calls |
8671 | and calls to external functions. */ |
8672 | FOR_EACH_BB_FN (bb, fn) |
8673 | { |
8674 | gimple_stmt_iterator gsi; |
8675 | |
8676 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
8677 | { |
8678 | gcall *stmt; |
8679 | struct pt_solution *pt; |
8680 | varinfo_t vi, fi; |
8681 | tree decl; |
8682 | |
8683 | stmt = dyn_cast <gcall *> (p: gsi_stmt (i: gsi)); |
8684 | if (!stmt) |
8685 | continue; |
8686 | |
8687 | /* Handle direct calls to functions with body. */ |
8688 | decl = gimple_call_fndecl (gs: stmt); |
8689 | |
8690 | { |
8691 | tree called_decl = NULL_TREE; |
8692 | if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL)) |
8693 | called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0); |
8694 | else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL)) |
8695 | called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0); |
8696 | |
8697 | if (called_decl != NULL_TREE |
8698 | && !fndecl_maybe_in_other_partition (fndecl: called_decl)) |
8699 | decl = called_decl; |
8700 | } |
8701 | |
8702 | if (decl |
8703 | && (fi = lookup_vi_for_tree (t: decl)) |
8704 | && fi->is_fn_info) |
8705 | { |
8706 | *gimple_call_clobber_set (call_stmt: stmt) |
8707 | = find_what_var_points_to |
8708 | (fndecl: node->decl, orig_vi: first_vi_for_offset (start: fi, offset: fi_clobbers)); |
8709 | *gimple_call_use_set (call_stmt: stmt) |
8710 | = find_what_var_points_to |
8711 | (fndecl: node->decl, orig_vi: first_vi_for_offset (start: fi, offset: fi_uses)); |
8712 | } |
8713 | /* Handle direct calls to external functions. */ |
8714 | else if (decl && (!fi || fi->decl)) |
8715 | { |
8716 | pt = gimple_call_use_set (call_stmt: stmt); |
8717 | if (gimple_call_flags (stmt) & ECF_CONST) |
8718 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
8719 | else if ((vi = lookup_call_use_vi (call: stmt)) != NULL) |
8720 | { |
8721 | *pt = find_what_var_points_to (fndecl: node->decl, orig_vi: vi); |
8722 | /* Escaped (and thus nonlocal) variables are always |
8723 | implicitly used by calls. */ |
8724 | /* ??? ESCAPED can be empty even though NONLOCAL |
8725 | always escaped. */ |
8726 | pt->nonlocal = 1; |
8727 | pt->ipa_escaped = 1; |
8728 | } |
8729 | else |
8730 | { |
8731 | /* If there is nothing special about this call then |
8732 | we have made everything that is used also escape. */ |
8733 | *pt = ipa_escaped_pt; |
8734 | pt->nonlocal = 1; |
8735 | } |
8736 | |
8737 | pt = gimple_call_clobber_set (call_stmt: stmt); |
8738 | if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS)) |
8739 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
8740 | else if ((vi = lookup_call_clobber_vi (call: stmt)) != NULL) |
8741 | { |
8742 | *pt = find_what_var_points_to (fndecl: node->decl, orig_vi: vi); |
8743 | /* Escaped (and thus nonlocal) variables are always |
8744 | implicitly clobbered by calls. */ |
8745 | /* ??? ESCAPED can be empty even though NONLOCAL |
8746 | always escaped. */ |
8747 | pt->nonlocal = 1; |
8748 | pt->ipa_escaped = 1; |
8749 | } |
8750 | else |
8751 | { |
8752 | /* If there is nothing special about this call then |
8753 | we have made everything that is used also escape. */ |
8754 | *pt = ipa_escaped_pt; |
8755 | pt->nonlocal = 1; |
8756 | } |
8757 | } |
8758 | /* Handle indirect calls. */ |
8759 | else if ((fi = get_fi_for_callee (call: stmt))) |
8760 | { |
8761 | /* We need to accumulate all clobbers/uses of all possible |
8762 | callees. */ |
8763 | fi = get_varinfo (n: find (node: fi->id)); |
8764 | /* If we cannot constrain the set of functions we'll end up |
8765 | calling we end up using/clobbering everything. */ |
8766 | if (bitmap_bit_p (fi->solution, anything_id) |
8767 | || bitmap_bit_p (fi->solution, nonlocal_id) |
8768 | || bitmap_bit_p (fi->solution, escaped_id)) |
8769 | { |
8770 | pt_solution_reset (pt: gimple_call_clobber_set (call_stmt: stmt)); |
8771 | pt_solution_reset (pt: gimple_call_use_set (call_stmt: stmt)); |
8772 | } |
8773 | else |
8774 | { |
8775 | bitmap_iterator bi; |
8776 | unsigned i; |
8777 | struct pt_solution *uses, *clobbers; |
8778 | |
8779 | uses = gimple_call_use_set (call_stmt: stmt); |
8780 | clobbers = gimple_call_clobber_set (call_stmt: stmt); |
8781 | memset (s: uses, c: 0, n: sizeof (struct pt_solution)); |
8782 | memset (s: clobbers, c: 0, n: sizeof (struct pt_solution)); |
8783 | EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi) |
8784 | { |
8785 | struct pt_solution sol; |
8786 | |
8787 | vi = get_varinfo (n: i); |
8788 | if (!vi->is_fn_info) |
8789 | { |
8790 | /* ??? We could be more precise here? */ |
8791 | uses->nonlocal = 1; |
8792 | uses->ipa_escaped = 1; |
8793 | clobbers->nonlocal = 1; |
8794 | clobbers->ipa_escaped = 1; |
8795 | continue; |
8796 | } |
8797 | |
8798 | if (!uses->anything) |
8799 | { |
8800 | sol = find_what_var_points_to |
8801 | (fndecl: node->decl, |
8802 | orig_vi: first_vi_for_offset (start: vi, offset: fi_uses)); |
8803 | pt_solution_ior_into (dest: uses, src: &sol); |
8804 | } |
8805 | if (!clobbers->anything) |
8806 | { |
8807 | sol = find_what_var_points_to |
8808 | (fndecl: node->decl, |
8809 | orig_vi: first_vi_for_offset (start: vi, offset: fi_clobbers)); |
8810 | pt_solution_ior_into (dest: clobbers, src: &sol); |
8811 | } |
8812 | } |
8813 | } |
8814 | } |
8815 | else |
8816 | gcc_unreachable (); |
8817 | } |
8818 | } |
8819 | |
8820 | fn->gimple_df->ipa_pta = true; |
8821 | |
8822 | /* We have to re-set the final-solution cache after each function |
8823 | because what is a "global" is dependent on function context. */ |
8824 | final_solutions->empty (); |
8825 | obstack_free (&final_solutions_obstack, NULL); |
8826 | gcc_obstack_init (&final_solutions_obstack); |
8827 | } |
8828 | |
8829 | delete_points_to_sets (); |
8830 | |
8831 | in_ipa_mode = 0; |
8832 | |
8833 | return 0; |
8834 | } |
8835 | |
8836 | namespace { |
8837 | |
8838 | const pass_data pass_data_ipa_pta = |
8839 | { |
8840 | .type: SIMPLE_IPA_PASS, /* type */ |
8841 | .name: "pta" , /* name */ |
8842 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
8843 | .tv_id: TV_IPA_PTA, /* tv_id */ |
8844 | .properties_required: 0, /* properties_required */ |
8845 | .properties_provided: 0, /* properties_provided */ |
8846 | .properties_destroyed: 0, /* properties_destroyed */ |
8847 | .todo_flags_start: 0, /* todo_flags_start */ |
8848 | .todo_flags_finish: 0, /* todo_flags_finish */ |
8849 | }; |
8850 | |
8851 | class pass_ipa_pta : public simple_ipa_opt_pass |
8852 | { |
8853 | public: |
8854 | pass_ipa_pta (gcc::context *ctxt) |
8855 | : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt) |
8856 | {} |
8857 | |
8858 | /* opt_pass methods: */ |
8859 | bool gate (function *) final override |
8860 | { |
8861 | return (optimize |
8862 | && flag_ipa_pta |
8863 | /* Don't bother doing anything if the program has errors. */ |
8864 | && !seen_error ()); |
8865 | } |
8866 | |
8867 | opt_pass * clone () final override { return new pass_ipa_pta (m_ctxt); } |
8868 | |
8869 | unsigned int execute (function *) final override |
8870 | { |
8871 | return ipa_pta_execute (); |
8872 | } |
8873 | |
8874 | }; // class pass_ipa_pta |
8875 | |
8876 | } // anon namespace |
8877 | |
8878 | simple_ipa_opt_pass * |
8879 | make_pass_ipa_pta (gcc::context *ctxt) |
8880 | { |
8881 | return new pass_ipa_pta (ctxt); |
8882 | } |
8883 | |