1 | /* Tree based points-to analysis |
2 | Copyright (C) 2005-2023 Free Software Foundation, Inc. |
3 | Contributed by Daniel Berlin <dberlin@dberlin.org> |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify |
8 | under the terms of the GNU General Public License as published by |
9 | the Free Software Foundation; either version 3 of the License, or |
10 | (at your option) any later version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 | GNU General Public License for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "backend.h" |
25 | #include "rtl.h" |
26 | #include "tree.h" |
27 | #include "gimple.h" |
28 | #include "alloc-pool.h" |
29 | #include "tree-pass.h" |
30 | #include "ssa.h" |
31 | #include "cgraph.h" |
32 | #include "tree-pretty-print.h" |
33 | #include "diagnostic-core.h" |
34 | #include "fold-const.h" |
35 | #include "stor-layout.h" |
36 | #include "stmt.h" |
37 | #include "gimple-iterator.h" |
38 | #include "tree-into-ssa.h" |
39 | #include "tree-dfa.h" |
40 | #include "gimple-walk.h" |
41 | #include "varasm.h" |
42 | #include "stringpool.h" |
43 | #include "attribs.h" |
44 | #include "tree-ssa.h" |
45 | #include "tree-cfg.h" |
46 | #include "gimple-range.h" |
47 | #include "ipa-modref-tree.h" |
48 | #include "ipa-modref.h" |
49 | #include "attr-fnspec.h" |
50 | |
51 | /* The idea behind this analyzer is to generate set constraints from the |
52 | program, then solve the resulting constraints in order to generate the |
53 | points-to sets. |
54 | |
55 | Set constraints are a way of modeling program analysis problems that |
56 | involve sets. They consist of an inclusion constraint language, |
57 | describing the variables (each variable is a set) and operations that |
58 | are involved on the variables, and a set of rules that derive facts |
59 | from these operations. To solve a system of set constraints, you derive |
60 | all possible facts under the rules, which gives you the correct sets |
61 | as a consequence. |
62 | |
63 | See "Efficient Field-sensitive pointer analysis for C" by "David |
64 | J. Pearce and Paul H. J. Kelly and Chris Hankin", at |
65 | http://citeseer.ist.psu.edu/pearce04efficient.html |
66 | |
67 | Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines |
68 | of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at |
69 | http://citeseer.ist.psu.edu/heintze01ultrafast.html |
70 | |
71 | There are three types of real constraint expressions, DEREF, |
72 | ADDRESSOF, and SCALAR. Each constraint expression consists |
73 | of a constraint type, a variable, and an offset. |
74 | |
75 | SCALAR is a constraint expression type used to represent x, whether |
76 | it appears on the LHS or the RHS of a statement. |
77 | DEREF is a constraint expression type used to represent *x, whether |
78 | it appears on the LHS or the RHS of a statement. |
79 | ADDRESSOF is a constraint expression used to represent &x, whether |
80 | it appears on the LHS or the RHS of a statement. |
81 | |
82 | Each pointer variable in the program is assigned an integer id, and |
83 | each field of a structure variable is assigned an integer id as well. |
84 | |
85 | Structure variables are linked to their list of fields through a "next |
86 | field" in each variable that points to the next field in offset |
87 | order. |
88 | Each variable for a structure field has |
89 | |
90 | 1. "size", that tells the size in bits of that field. |
91 | 2. "fullsize", that tells the size in bits of the entire structure. |
92 | 3. "offset", that tells the offset in bits from the beginning of the |
93 | structure to this field. |
94 | |
95 | Thus, |
96 | struct f |
97 | { |
98 | int a; |
99 | int b; |
100 | } foo; |
101 | int *bar; |
102 | |
103 | looks like |
104 | |
105 | foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b |
106 | foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL |
107 | bar -> id 3, size 32, offset 0, fullsize 32, next NULL |
108 | |
109 | |
110 | In order to solve the system of set constraints, the following is |
111 | done: |
112 | |
113 | 1. Each constraint variable x has a solution set associated with it, |
114 | Sol(x). |
115 | |
116 | 2. Constraints are separated into direct, copy, and complex. |
117 | Direct constraints are ADDRESSOF constraints that require no extra |
118 | processing, such as P = &Q |
119 | Copy constraints are those of the form P = Q. |
120 | Complex constraints are all the constraints involving dereferences |
121 | and offsets (including offsetted copies). |
122 | |
123 | 3. All direct constraints of the form P = &Q are processed, such |
124 | that Q is added to Sol(P) |
125 | |
126 | 4. All complex constraints for a given constraint variable are stored in a |
127 | linked list attached to that variable's node. |
128 | |
129 | 5. A directed graph is built out of the copy constraints. Each |
130 | constraint variable is a node in the graph, and an edge from |
131 | Q to P is added for each copy constraint of the form P = Q |
132 | |
133 | 6. The graph is then walked, and solution sets are |
134 | propagated along the copy edges, such that an edge from Q to P |
135 | causes Sol(P) <- Sol(P) union Sol(Q). |
136 | |
137 | 7. As we visit each node, all complex constraints associated with |
138 | that node are processed by adding appropriate copy edges to the graph, or the |
139 | appropriate variables to the solution set. |
140 | |
141 | 8. The process of walking the graph is iterated until no solution |
142 | sets change. |
143 | |
144 | Prior to walking the graph in steps 6 and 7, We perform static |
145 | cycle elimination on the constraint graph, as well |
146 | as off-line variable substitution. |
147 | |
148 | TODO: Adding offsets to pointer-to-structures can be handled (IE not punted |
149 | on and turned into anything), but isn't. You can just see what offset |
150 | inside the pointed-to struct it's going to access. |
151 | |
152 | TODO: Constant bounded arrays can be handled as if they were structs of the |
153 | same number of elements. |
154 | |
155 | TODO: Modeling heap and incoming pointers becomes much better if we |
156 | add fields to them as we discover them, which we could do. |
157 | |
158 | TODO: We could handle unions, but to be honest, it's probably not |
159 | worth the pain or slowdown. */ |
160 | |
161 | /* IPA-PTA optimizations possible. |
162 | |
163 | When the indirect function called is ANYTHING we can add disambiguation |
164 | based on the function signatures (or simply the parameter count which |
165 | is the varinfo size). We also do not need to consider functions that |
166 | do not have their address taken. |
167 | |
168 | The is_global_var bit which marks escape points is overly conservative |
169 | in IPA mode. Split it to is_escape_point and is_global_var - only |
170 | externally visible globals are escape points in IPA mode. |
171 | There is now is_ipa_escape_point but this is only used in a few |
172 | selected places. |
173 | |
174 | The way we introduce DECL_PT_UID to avoid fixing up all points-to |
175 | sets in the translation unit when we copy a DECL during inlining |
176 | pessimizes precision. The advantage is that the DECL_PT_UID keeps |
177 | compile-time and memory usage overhead low - the points-to sets |
178 | do not grow or get unshared as they would during a fixup phase. |
179 | An alternative solution is to delay IPA PTA until after all |
180 | inlining transformations have been applied. |
181 | |
182 | The way we propagate clobber/use information isn't optimized. |
183 | It should use a new complex constraint that properly filters |
184 | out local variables of the callee (though that would make |
185 | the sets invalid after inlining). OTOH we might as well |
186 | admit defeat to WHOPR and simply do all the clobber/use analysis |
187 | and propagation after PTA finished but before we threw away |
188 | points-to information for memory variables. WHOPR and PTA |
189 | do not play along well anyway - the whole constraint solving |
190 | would need to be done in WPA phase and it will be very interesting |
191 | to apply the results to local SSA names during LTRANS phase. |
192 | |
193 | We probably should compute a per-function unit-ESCAPE solution |
194 | propagating it simply like the clobber / uses solutions. The |
195 | solution can go alongside the non-IPA escaped solution and be |
196 | used to query which vars escape the unit through a function. |
197 | This is also required to make the escaped-HEAP trick work in IPA mode. |
198 | |
199 | We never put function decls in points-to sets so we do not |
200 | keep the set of called functions for indirect calls. |
201 | |
202 | And probably more. */ |
203 | |
204 | static bool use_field_sensitive = true; |
205 | static int in_ipa_mode = 0; |
206 | |
207 | /* Used for predecessor bitmaps. */ |
208 | static bitmap_obstack predbitmap_obstack; |
209 | |
210 | /* Used for points-to sets. */ |
211 | static bitmap_obstack pta_obstack; |
212 | |
213 | /* Used for oldsolution members of variables. */ |
214 | static bitmap_obstack oldpta_obstack; |
215 | |
216 | /* Used for per-solver-iteration bitmaps. */ |
217 | static bitmap_obstack iteration_obstack; |
218 | |
219 | static unsigned int create_variable_info_for (tree, const char *, bool); |
220 | typedef struct constraint_graph *constraint_graph_t; |
221 | static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool); |
222 | |
223 | struct constraint; |
224 | typedef struct constraint *constraint_t; |
225 | |
226 | |
227 | #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \ |
228 | if (a) \ |
229 | EXECUTE_IF_SET_IN_BITMAP (a, b, c, d) |
230 | |
231 | static struct constraint_stats |
232 | { |
233 | unsigned int total_vars; |
234 | unsigned int nonpointer_vars; |
235 | unsigned int unified_vars_static; |
236 | unsigned int unified_vars_dynamic; |
237 | unsigned int iterations; |
238 | unsigned int num_edges; |
239 | unsigned int num_implicit_edges; |
240 | unsigned int num_avoided_edges; |
241 | unsigned int points_to_sets_created; |
242 | } stats; |
243 | |
244 | struct variable_info |
245 | { |
246 | /* ID of this variable */ |
247 | unsigned int id; |
248 | |
249 | /* True if this is a variable created by the constraint analysis, such as |
250 | heap variables and constraints we had to break up. */ |
251 | unsigned int is_artificial_var : 1; |
252 | |
253 | /* True if this is a special variable whose solution set should not be |
254 | changed. */ |
255 | unsigned int is_special_var : 1; |
256 | |
257 | /* True for variables whose size is not known or variable. */ |
258 | unsigned int is_unknown_size_var : 1; |
259 | |
260 | /* True for (sub-)fields that represent a whole variable. */ |
261 | unsigned int is_full_var : 1; |
262 | |
263 | /* True if this is a heap variable. */ |
264 | unsigned int is_heap_var : 1; |
265 | |
266 | /* True if this is a register variable. */ |
267 | unsigned int is_reg_var : 1; |
268 | |
269 | /* True if this field may contain pointers. */ |
270 | unsigned int may_have_pointers : 1; |
271 | |
272 | /* True if this field has only restrict qualified pointers. */ |
273 | unsigned int only_restrict_pointers : 1; |
274 | |
275 | /* True if this represents a heap var created for a restrict qualified |
276 | pointer. */ |
277 | unsigned int is_restrict_var : 1; |
278 | |
279 | /* True if this represents a global variable. */ |
280 | unsigned int is_global_var : 1; |
281 | |
282 | /* True if this represents a module escape point for IPA analysis. */ |
283 | unsigned int is_ipa_escape_point : 1; |
284 | |
285 | /* True if this represents a IPA function info. */ |
286 | unsigned int is_fn_info : 1; |
287 | |
288 | /* True if this appears as RHS in a ADDRESSOF constraint. */ |
289 | unsigned int address_taken : 1; |
290 | |
291 | /* ??? Store somewhere better. */ |
292 | unsigned short ruid; |
293 | |
294 | /* The ID of the variable for the next field in this structure |
295 | or zero for the last field in this structure. */ |
296 | unsigned next; |
297 | |
298 | /* The ID of the variable for the first field in this structure. */ |
299 | unsigned head; |
300 | |
301 | /* Offset of this variable, in bits, from the base variable */ |
302 | unsigned HOST_WIDE_INT offset; |
303 | |
304 | /* Size of the variable, in bits. */ |
305 | unsigned HOST_WIDE_INT size; |
306 | |
307 | /* Full size of the base variable, in bits. */ |
308 | unsigned HOST_WIDE_INT fullsize; |
309 | |
310 | /* In IPA mode the shadow UID in case the variable needs to be duplicated in |
311 | the final points-to solution because it reaches its containing |
312 | function recursively. Zero if none is needed. */ |
313 | unsigned int shadow_var_uid; |
314 | |
315 | /* Name of this variable */ |
316 | const char *name; |
317 | |
318 | /* Tree that this variable is associated with. */ |
319 | tree decl; |
320 | |
321 | /* Points-to set for this variable. */ |
322 | bitmap solution; |
323 | |
324 | /* Old points-to set for this variable. */ |
325 | bitmap oldsolution; |
326 | }; |
327 | typedef struct variable_info *varinfo_t; |
328 | |
329 | static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT); |
330 | static varinfo_t first_or_preceding_vi_for_offset (varinfo_t, |
331 | unsigned HOST_WIDE_INT); |
332 | static varinfo_t lookup_vi_for_tree (tree); |
333 | static inline bool type_can_have_subvars (const_tree); |
334 | static void make_param_constraints (varinfo_t); |
335 | |
336 | /* Pool of variable info structures. */ |
337 | static object_allocator<variable_info> variable_info_pool |
338 | ("Variable info pool" ); |
339 | |
340 | /* Map varinfo to final pt_solution. */ |
341 | static hash_map<varinfo_t, pt_solution *> *final_solutions; |
342 | struct obstack final_solutions_obstack; |
343 | |
344 | /* Table of variable info structures for constraint variables. |
345 | Indexed directly by variable info id. */ |
346 | static vec<varinfo_t> varmap; |
347 | |
348 | /* Return the varmap element N */ |
349 | |
350 | static inline varinfo_t |
351 | get_varinfo (unsigned int n) |
352 | { |
353 | return varmap[n]; |
354 | } |
355 | |
356 | /* Return the next variable in the list of sub-variables of VI |
357 | or NULL if VI is the last sub-variable. */ |
358 | |
359 | static inline varinfo_t |
360 | vi_next (varinfo_t vi) |
361 | { |
362 | return get_varinfo (n: vi->next); |
363 | } |
364 | |
365 | /* Static IDs for the special variables. Variable ID zero is unused |
366 | and used as terminator for the sub-variable chain. */ |
367 | enum { nothing_id = 1, anything_id = 2, string_id = 3, |
368 | escaped_id = 4, nonlocal_id = 5, |
369 | storedanything_id = 6, integer_id = 7 }; |
370 | |
371 | /* Return a new variable info structure consisting for a variable |
372 | named NAME, and using constraint graph node NODE. Append it |
373 | to the vector of variable info structures. */ |
374 | |
375 | static varinfo_t |
376 | new_var_info (tree t, const char *name, bool add_id) |
377 | { |
378 | unsigned index = varmap.length (); |
379 | varinfo_t ret = variable_info_pool.allocate (); |
380 | |
381 | if (dump_file && add_id) |
382 | { |
383 | char *tempname = xasprintf ("%s(%d)" , name, index); |
384 | name = ggc_strdup (tempname); |
385 | free (ptr: tempname); |
386 | } |
387 | |
388 | ret->id = index; |
389 | ret->name = name; |
390 | ret->decl = t; |
391 | /* Vars without decl are artificial and do not have sub-variables. */ |
392 | ret->is_artificial_var = (t == NULL_TREE); |
393 | ret->is_special_var = false; |
394 | ret->is_unknown_size_var = false; |
395 | ret->is_full_var = (t == NULL_TREE); |
396 | ret->is_heap_var = false; |
397 | ret->may_have_pointers = true; |
398 | ret->only_restrict_pointers = false; |
399 | ret->is_restrict_var = false; |
400 | ret->ruid = 0; |
401 | ret->is_global_var = (t == NULL_TREE); |
402 | ret->is_ipa_escape_point = false; |
403 | ret->is_fn_info = false; |
404 | ret->address_taken = false; |
405 | if (t && DECL_P (t)) |
406 | ret->is_global_var = (is_global_var (t) |
407 | /* We have to treat even local register variables |
408 | as escape points. */ |
409 | || (VAR_P (t) && DECL_HARD_REGISTER (t))); |
410 | ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME); |
411 | ret->solution = BITMAP_ALLOC (obstack: &pta_obstack); |
412 | ret->oldsolution = NULL; |
413 | ret->next = 0; |
414 | ret->shadow_var_uid = 0; |
415 | ret->head = ret->id; |
416 | |
417 | stats.total_vars++; |
418 | |
419 | varmap.safe_push (obj: ret); |
420 | |
421 | return ret; |
422 | } |
423 | |
424 | /* A map mapping call statements to per-stmt variables for uses |
425 | and clobbers specific to the call. */ |
426 | static hash_map<gimple *, varinfo_t> *call_stmt_vars; |
427 | |
428 | /* Lookup or create the variable for the call statement CALL. */ |
429 | |
430 | static varinfo_t |
431 | get_call_vi (gcall *call) |
432 | { |
433 | varinfo_t vi, vi2; |
434 | |
435 | bool existed; |
436 | varinfo_t *slot_p = &call_stmt_vars->get_or_insert (k: call, existed: &existed); |
437 | if (existed) |
438 | return *slot_p; |
439 | |
440 | vi = new_var_info (NULL_TREE, name: "CALLUSED" , add_id: true); |
441 | vi->offset = 0; |
442 | vi->size = 1; |
443 | vi->fullsize = 2; |
444 | vi->is_full_var = true; |
445 | vi->is_reg_var = true; |
446 | |
447 | vi2 = new_var_info (NULL_TREE, name: "CALLCLOBBERED" , add_id: true); |
448 | vi2->offset = 1; |
449 | vi2->size = 1; |
450 | vi2->fullsize = 2; |
451 | vi2->is_full_var = true; |
452 | vi2->is_reg_var = true; |
453 | |
454 | vi->next = vi2->id; |
455 | |
456 | *slot_p = vi; |
457 | return vi; |
458 | } |
459 | |
460 | /* Lookup the variable for the call statement CALL representing |
461 | the uses. Returns NULL if there is nothing special about this call. */ |
462 | |
463 | static varinfo_t |
464 | lookup_call_use_vi (gcall *call) |
465 | { |
466 | varinfo_t *slot_p = call_stmt_vars->get (k: call); |
467 | if (slot_p) |
468 | return *slot_p; |
469 | |
470 | return NULL; |
471 | } |
472 | |
473 | /* Lookup the variable for the call statement CALL representing |
474 | the clobbers. Returns NULL if there is nothing special about this call. */ |
475 | |
476 | static varinfo_t |
477 | lookup_call_clobber_vi (gcall *call) |
478 | { |
479 | varinfo_t uses = lookup_call_use_vi (call); |
480 | if (!uses) |
481 | return NULL; |
482 | |
483 | return vi_next (vi: uses); |
484 | } |
485 | |
486 | /* Lookup or create the variable for the call statement CALL representing |
487 | the uses. */ |
488 | |
489 | static varinfo_t |
490 | get_call_use_vi (gcall *call) |
491 | { |
492 | return get_call_vi (call); |
493 | } |
494 | |
495 | /* Lookup or create the variable for the call statement CALL representing |
496 | the clobbers. */ |
497 | |
498 | static varinfo_t ATTRIBUTE_UNUSED |
499 | get_call_clobber_vi (gcall *call) |
500 | { |
501 | return vi_next (vi: get_call_vi (call)); |
502 | } |
503 | |
504 | |
505 | enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF}; |
506 | |
507 | /* An expression that appears in a constraint. */ |
508 | |
509 | struct constraint_expr |
510 | { |
511 | /* Constraint type. */ |
512 | constraint_expr_type type; |
513 | |
514 | /* Variable we are referring to in the constraint. */ |
515 | unsigned int var; |
516 | |
517 | /* Offset, in bits, of this constraint from the beginning of |
518 | variables it ends up referring to. |
519 | |
520 | IOW, in a deref constraint, we would deref, get the result set, |
521 | then add OFFSET to each member. */ |
522 | HOST_WIDE_INT offset; |
523 | }; |
524 | |
525 | /* Use 0x8000... as special unknown offset. */ |
526 | #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN |
527 | |
528 | typedef struct constraint_expr ce_s; |
529 | static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool); |
530 | static void get_constraint_for (tree, vec<ce_s> *); |
531 | static void get_constraint_for_rhs (tree, vec<ce_s> *); |
532 | static void do_deref (vec<ce_s> *); |
533 | |
534 | /* Our set constraints are made up of two constraint expressions, one |
535 | LHS, and one RHS. |
536 | |
537 | As described in the introduction, our set constraints each represent an |
538 | operation between set valued variables. |
539 | */ |
540 | struct constraint |
541 | { |
542 | struct constraint_expr lhs; |
543 | struct constraint_expr rhs; |
544 | }; |
545 | |
546 | /* List of constraints that we use to build the constraint graph from. */ |
547 | |
548 | static vec<constraint_t> constraints; |
549 | static object_allocator<constraint> constraint_pool ("Constraint pool" ); |
550 | |
551 | /* The constraint graph is represented as an array of bitmaps |
552 | containing successor nodes. */ |
553 | |
554 | struct constraint_graph |
555 | { |
556 | /* Size of this graph, which may be different than the number of |
557 | nodes in the variable map. */ |
558 | unsigned int size; |
559 | |
560 | /* Explicit successors of each node. */ |
561 | bitmap *succs; |
562 | |
563 | /* Implicit predecessors of each node (Used for variable |
564 | substitution). */ |
565 | bitmap *implicit_preds; |
566 | |
567 | /* Explicit predecessors of each node (Used for variable substitution). */ |
568 | bitmap *preds; |
569 | |
570 | /* Indirect cycle representatives, or -1 if the node has no indirect |
571 | cycles. */ |
572 | int *indirect_cycles; |
573 | |
574 | /* Representative node for a node. rep[a] == a unless the node has |
575 | been unified. */ |
576 | unsigned int *rep; |
577 | |
578 | /* Equivalence class representative for a label. This is used for |
579 | variable substitution. */ |
580 | int *eq_rep; |
581 | |
582 | /* Pointer equivalence label for a node. All nodes with the same |
583 | pointer equivalence label can be unified together at some point |
584 | (either during constraint optimization or after the constraint |
585 | graph is built). */ |
586 | unsigned int *pe; |
587 | |
588 | /* Pointer equivalence representative for a label. This is used to |
589 | handle nodes that are pointer equivalent but not location |
590 | equivalent. We can unite these once the addressof constraints |
591 | are transformed into initial points-to sets. */ |
592 | int *pe_rep; |
593 | |
594 | /* Pointer equivalence label for each node, used during variable |
595 | substitution. */ |
596 | unsigned int *pointer_label; |
597 | |
598 | /* Location equivalence label for each node, used during location |
599 | equivalence finding. */ |
600 | unsigned int *loc_label; |
601 | |
602 | /* Pointed-by set for each node, used during location equivalence |
603 | finding. This is pointed-by rather than pointed-to, because it |
604 | is constructed using the predecessor graph. */ |
605 | bitmap *pointed_by; |
606 | |
607 | /* Points to sets for pointer equivalence. This is *not* the actual |
608 | points-to sets for nodes. */ |
609 | bitmap *points_to; |
610 | |
611 | /* Bitmap of nodes where the bit is set if the node is a direct |
612 | node. Used for variable substitution. */ |
613 | sbitmap direct_nodes; |
614 | |
615 | /* Bitmap of nodes where the bit is set if the node is address |
616 | taken. Used for variable substitution. */ |
617 | bitmap address_taken; |
618 | |
619 | /* Vector of complex constraints for each graph node. Complex |
620 | constraints are those involving dereferences or offsets that are |
621 | not 0. */ |
622 | vec<constraint_t> *complex; |
623 | }; |
624 | |
625 | static constraint_graph_t graph; |
626 | |
627 | /* During variable substitution and the offline version of indirect |
628 | cycle finding, we create nodes to represent dereferences and |
629 | address taken constraints. These represent where these start and |
630 | end. */ |
631 | #define FIRST_REF_NODE (varmap).length () |
632 | #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1)) |
633 | |
634 | /* Return the representative node for NODE, if NODE has been unioned |
635 | with another NODE. |
636 | This function performs path compression along the way to finding |
637 | the representative. */ |
638 | |
639 | static unsigned int |
640 | find (unsigned int node) |
641 | { |
642 | gcc_checking_assert (node < graph->size); |
643 | if (graph->rep[node] != node) |
644 | return graph->rep[node] = find (node: graph->rep[node]); |
645 | return node; |
646 | } |
647 | |
648 | /* Union the TO and FROM nodes to the TO nodes. |
649 | Note that at some point in the future, we may want to do |
650 | union-by-rank, in which case we are going to have to return the |
651 | node we unified to. */ |
652 | |
653 | static bool |
654 | unite (unsigned int to, unsigned int from) |
655 | { |
656 | gcc_checking_assert (to < graph->size && from < graph->size); |
657 | if (to != from && graph->rep[from] != to) |
658 | { |
659 | graph->rep[from] = to; |
660 | return true; |
661 | } |
662 | return false; |
663 | } |
664 | |
665 | /* Create a new constraint consisting of LHS and RHS expressions. */ |
666 | |
667 | static constraint_t |
668 | new_constraint (const struct constraint_expr lhs, |
669 | const struct constraint_expr rhs) |
670 | { |
671 | constraint_t ret = constraint_pool.allocate (); |
672 | ret->lhs = lhs; |
673 | ret->rhs = rhs; |
674 | return ret; |
675 | } |
676 | |
677 | /* Print out constraint C to FILE. */ |
678 | |
679 | static void |
680 | dump_constraint (FILE *file, constraint_t c) |
681 | { |
682 | if (c->lhs.type == ADDRESSOF) |
683 | fprintf (stream: file, format: "&" ); |
684 | else if (c->lhs.type == DEREF) |
685 | fprintf (stream: file, format: "*" ); |
686 | if (dump_file) |
687 | fprintf (stream: file, format: "%s" , get_varinfo (n: c->lhs.var)->name); |
688 | else |
689 | fprintf (stream: file, format: "V%d" , c->lhs.var); |
690 | if (c->lhs.offset == UNKNOWN_OFFSET) |
691 | fprintf (stream: file, format: " + UNKNOWN" ); |
692 | else if (c->lhs.offset != 0) |
693 | fprintf (stream: file, format: " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset); |
694 | fprintf (stream: file, format: " = " ); |
695 | if (c->rhs.type == ADDRESSOF) |
696 | fprintf (stream: file, format: "&" ); |
697 | else if (c->rhs.type == DEREF) |
698 | fprintf (stream: file, format: "*" ); |
699 | if (dump_file) |
700 | fprintf (stream: file, format: "%s" , get_varinfo (n: c->rhs.var)->name); |
701 | else |
702 | fprintf (stream: file, format: "V%d" , c->rhs.var); |
703 | if (c->rhs.offset == UNKNOWN_OFFSET) |
704 | fprintf (stream: file, format: " + UNKNOWN" ); |
705 | else if (c->rhs.offset != 0) |
706 | fprintf (stream: file, format: " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset); |
707 | } |
708 | |
709 | |
710 | void debug_constraint (constraint_t); |
711 | void debug_constraints (void); |
712 | void debug_constraint_graph (void); |
713 | void debug_solution_for_var (unsigned int); |
714 | void debug_sa_points_to_info (void); |
715 | void debug_varinfo (varinfo_t); |
716 | void debug_varmap (void); |
717 | |
718 | /* Print out constraint C to stderr. */ |
719 | |
720 | DEBUG_FUNCTION void |
721 | debug_constraint (constraint_t c) |
722 | { |
723 | dump_constraint (stderr, c); |
724 | fprintf (stderr, format: "\n" ); |
725 | } |
726 | |
727 | /* Print out all constraints to FILE */ |
728 | |
729 | static void |
730 | dump_constraints (FILE *file, int from) |
731 | { |
732 | int i; |
733 | constraint_t c; |
734 | for (i = from; constraints.iterate (ix: i, ptr: &c); i++) |
735 | if (c) |
736 | { |
737 | dump_constraint (file, c); |
738 | fprintf (stream: file, format: "\n" ); |
739 | } |
740 | } |
741 | |
742 | /* Print out all constraints to stderr. */ |
743 | |
744 | DEBUG_FUNCTION void |
745 | debug_constraints (void) |
746 | { |
747 | dump_constraints (stderr, from: 0); |
748 | } |
749 | |
750 | /* Print the constraint graph in dot format. */ |
751 | |
752 | static void |
753 | dump_constraint_graph (FILE *file) |
754 | { |
755 | unsigned int i; |
756 | |
757 | /* Only print the graph if it has already been initialized: */ |
758 | if (!graph) |
759 | return; |
760 | |
761 | /* Prints the header of the dot file: */ |
762 | fprintf (stream: file, format: "strict digraph {\n" ); |
763 | fprintf (stream: file, format: " node [\n shape = box\n ]\n" ); |
764 | fprintf (stream: file, format: " edge [\n fontsize = \"12\"\n ]\n" ); |
765 | fprintf (stream: file, format: "\n // List of nodes and complex constraints in " |
766 | "the constraint graph:\n" ); |
767 | |
768 | /* The next lines print the nodes in the graph together with the |
769 | complex constraints attached to them. */ |
770 | for (i = 1; i < graph->size; i++) |
771 | { |
772 | if (i == FIRST_REF_NODE) |
773 | continue; |
774 | if (find (node: i) != i) |
775 | continue; |
776 | if (i < FIRST_REF_NODE) |
777 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
778 | else |
779 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
780 | if (graph->complex[i].exists ()) |
781 | { |
782 | unsigned j; |
783 | constraint_t c; |
784 | fprintf (stream: file, format: " [label=\"\\N\\n" ); |
785 | for (j = 0; graph->complex[i].iterate (ix: j, ptr: &c); ++j) |
786 | { |
787 | dump_constraint (file, c); |
788 | fprintf (stream: file, format: "\\l" ); |
789 | } |
790 | fprintf (stream: file, format: "\"]" ); |
791 | } |
792 | fprintf (stream: file, format: ";\n" ); |
793 | } |
794 | |
795 | /* Go over the edges. */ |
796 | fprintf (stream: file, format: "\n // Edges in the constraint graph:\n" ); |
797 | for (i = 1; i < graph->size; i++) |
798 | { |
799 | unsigned j; |
800 | bitmap_iterator bi; |
801 | if (find (node: i) != i) |
802 | continue; |
803 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi) |
804 | { |
805 | unsigned to = find (node: j); |
806 | if (i == to) |
807 | continue; |
808 | if (i < FIRST_REF_NODE) |
809 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
810 | else |
811 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
812 | fprintf (stream: file, format: " -> " ); |
813 | if (to < FIRST_REF_NODE) |
814 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: to)->name); |
815 | else |
816 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: to - FIRST_REF_NODE)->name); |
817 | fprintf (stream: file, format: ";\n" ); |
818 | } |
819 | } |
820 | |
821 | /* Prints the tail of the dot file. */ |
822 | fprintf (stream: file, format: "}\n" ); |
823 | } |
824 | |
825 | /* Print out the constraint graph to stderr. */ |
826 | |
827 | DEBUG_FUNCTION void |
828 | debug_constraint_graph (void) |
829 | { |
830 | dump_constraint_graph (stderr); |
831 | } |
832 | |
833 | /* SOLVER FUNCTIONS |
834 | |
835 | The solver is a simple worklist solver, that works on the following |
836 | algorithm: |
837 | |
838 | sbitmap changed_nodes = all zeroes; |
839 | changed_count = 0; |
840 | For each node that is not already collapsed: |
841 | changed_count++; |
842 | set bit in changed nodes |
843 | |
844 | while (changed_count > 0) |
845 | { |
846 | compute topological ordering for constraint graph |
847 | |
848 | find and collapse cycles in the constraint graph (updating |
849 | changed if necessary) |
850 | |
851 | for each node (n) in the graph in topological order: |
852 | changed_count--; |
853 | |
854 | Process each complex constraint associated with the node, |
855 | updating changed if necessary. |
856 | |
857 | For each outgoing edge from n, propagate the solution from n to |
858 | the destination of the edge, updating changed as necessary. |
859 | |
860 | } */ |
861 | |
862 | /* Return true if two constraint expressions A and B are equal. */ |
863 | |
864 | static bool |
865 | constraint_expr_equal (struct constraint_expr a, struct constraint_expr b) |
866 | { |
867 | return a.type == b.type && a.var == b.var && a.offset == b.offset; |
868 | } |
869 | |
870 | /* Return true if constraint expression A is less than constraint expression |
871 | B. This is just arbitrary, but consistent, in order to give them an |
872 | ordering. */ |
873 | |
874 | static bool |
875 | constraint_expr_less (struct constraint_expr a, struct constraint_expr b) |
876 | { |
877 | if (a.type == b.type) |
878 | { |
879 | if (a.var == b.var) |
880 | return a.offset < b.offset; |
881 | else |
882 | return a.var < b.var; |
883 | } |
884 | else |
885 | return a.type < b.type; |
886 | } |
887 | |
888 | /* Return true if constraint A is less than constraint B. This is just |
889 | arbitrary, but consistent, in order to give them an ordering. */ |
890 | |
891 | static bool |
892 | constraint_less (const constraint_t &a, const constraint_t &b) |
893 | { |
894 | if (constraint_expr_less (a: a->lhs, b: b->lhs)) |
895 | return true; |
896 | else if (constraint_expr_less (a: b->lhs, b: a->lhs)) |
897 | return false; |
898 | else |
899 | return constraint_expr_less (a: a->rhs, b: b->rhs); |
900 | } |
901 | |
902 | /* Return true if two constraints A and B are equal. */ |
903 | |
904 | static bool |
905 | constraint_equal (struct constraint a, struct constraint b) |
906 | { |
907 | return constraint_expr_equal (a: a.lhs, b: b.lhs) |
908 | && constraint_expr_equal (a: a.rhs, b: b.rhs); |
909 | } |
910 | |
911 | |
912 | /* Find a constraint LOOKFOR in the sorted constraint vector VEC */ |
913 | |
914 | static constraint_t |
915 | constraint_vec_find (vec<constraint_t> vec, |
916 | struct constraint lookfor) |
917 | { |
918 | unsigned int place; |
919 | constraint_t found; |
920 | |
921 | if (!vec.exists ()) |
922 | return NULL; |
923 | |
924 | place = vec.lower_bound (obj: &lookfor, lessthan: constraint_less); |
925 | if (place >= vec.length ()) |
926 | return NULL; |
927 | found = vec[place]; |
928 | if (!constraint_equal (a: *found, b: lookfor)) |
929 | return NULL; |
930 | return found; |
931 | } |
932 | |
933 | /* Union two constraint vectors, TO and FROM. Put the result in TO. |
934 | Returns true of TO set is changed. */ |
935 | |
936 | static bool |
937 | constraint_set_union (vec<constraint_t> *to, |
938 | vec<constraint_t> *from) |
939 | { |
940 | int i; |
941 | constraint_t c; |
942 | bool any_change = false; |
943 | |
944 | FOR_EACH_VEC_ELT (*from, i, c) |
945 | { |
946 | if (constraint_vec_find (vec: *to, lookfor: *c) == NULL) |
947 | { |
948 | unsigned int place = to->lower_bound (obj: c, lessthan: constraint_less); |
949 | to->safe_insert (ix: place, obj: c); |
950 | any_change = true; |
951 | } |
952 | } |
953 | return any_change; |
954 | } |
955 | |
956 | /* Expands the solution in SET to all sub-fields of variables included. */ |
957 | |
958 | static bitmap |
959 | solution_set_expand (bitmap set, bitmap *expanded) |
960 | { |
961 | bitmap_iterator bi; |
962 | unsigned j; |
963 | |
964 | if (*expanded) |
965 | return *expanded; |
966 | |
967 | *expanded = BITMAP_ALLOC (obstack: &iteration_obstack); |
968 | |
969 | /* In a first pass expand variables, once for each head to avoid |
970 | quadratic behavior, to include all sub-fields. */ |
971 | unsigned prev_head = 0; |
972 | EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi) |
973 | { |
974 | varinfo_t v = get_varinfo (n: j); |
975 | if (v->is_artificial_var |
976 | || v->is_full_var) |
977 | continue; |
978 | if (v->head != prev_head) |
979 | { |
980 | varinfo_t head = get_varinfo (n: v->head); |
981 | unsigned num = 1; |
982 | for (varinfo_t n = vi_next (vi: head); n != NULL; n = vi_next (vi: n)) |
983 | { |
984 | if (n->id != head->id + num) |
985 | { |
986 | /* Usually sub variables are adjacent but since we |
987 | create pointed-to restrict representatives there |
988 | can be gaps as well. */ |
989 | bitmap_set_range (*expanded, head->id, num); |
990 | head = n; |
991 | num = 1; |
992 | } |
993 | else |
994 | num++; |
995 | } |
996 | |
997 | bitmap_set_range (*expanded, head->id, num); |
998 | prev_head = v->head; |
999 | } |
1000 | } |
1001 | |
1002 | /* And finally set the rest of the bits from SET in an efficient way. */ |
1003 | bitmap_ior_into (*expanded, set); |
1004 | |
1005 | return *expanded; |
1006 | } |
1007 | |
1008 | /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the |
1009 | process. */ |
1010 | |
1011 | static bool |
1012 | set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc, |
1013 | bitmap *expanded_delta) |
1014 | { |
1015 | bool changed = false; |
1016 | bitmap_iterator bi; |
1017 | unsigned int i; |
1018 | |
1019 | /* If the solution of DELTA contains anything it is good enough to transfer |
1020 | this to TO. */ |
1021 | if (bitmap_bit_p (delta, anything_id)) |
1022 | return bitmap_set_bit (to, anything_id); |
1023 | |
1024 | /* If the offset is unknown we have to expand the solution to |
1025 | all subfields. */ |
1026 | if (inc == UNKNOWN_OFFSET) |
1027 | { |
1028 | delta = solution_set_expand (set: delta, expanded: expanded_delta); |
1029 | changed |= bitmap_ior_into (to, delta); |
1030 | return changed; |
1031 | } |
1032 | |
1033 | /* For non-zero offset union the offsetted solution into the destination. */ |
1034 | EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi) |
1035 | { |
1036 | varinfo_t vi = get_varinfo (n: i); |
1037 | |
1038 | /* If this is a variable with just one field just set its bit |
1039 | in the result. */ |
1040 | if (vi->is_artificial_var |
1041 | || vi->is_unknown_size_var |
1042 | || vi->is_full_var) |
1043 | changed |= bitmap_set_bit (to, i); |
1044 | else |
1045 | { |
1046 | HOST_WIDE_INT fieldoffset = vi->offset + inc; |
1047 | unsigned HOST_WIDE_INT size = vi->size; |
1048 | |
1049 | /* If the offset makes the pointer point to before the |
1050 | variable use offset zero for the field lookup. */ |
1051 | if (fieldoffset < 0) |
1052 | vi = get_varinfo (n: vi->head); |
1053 | else |
1054 | vi = first_or_preceding_vi_for_offset (vi, fieldoffset); |
1055 | |
1056 | do |
1057 | { |
1058 | changed |= bitmap_set_bit (to, vi->id); |
1059 | if (vi->is_full_var |
1060 | || vi->next == 0) |
1061 | break; |
1062 | |
1063 | /* We have to include all fields that overlap the current field |
1064 | shifted by inc. */ |
1065 | vi = vi_next (vi); |
1066 | } |
1067 | while (vi->offset < fieldoffset + size); |
1068 | } |
1069 | } |
1070 | |
1071 | return changed; |
1072 | } |
1073 | |
1074 | /* Insert constraint C into the list of complex constraints for graph |
1075 | node VAR. */ |
1076 | |
1077 | static void |
1078 | insert_into_complex (constraint_graph_t graph, |
1079 | unsigned int var, constraint_t c) |
1080 | { |
1081 | vec<constraint_t> complex = graph->complex[var]; |
1082 | unsigned int place = complex.lower_bound (obj: c, lessthan: constraint_less); |
1083 | |
1084 | /* Only insert constraints that do not already exist. */ |
1085 | if (place >= complex.length () |
1086 | || !constraint_equal (a: *c, b: *complex[place])) |
1087 | graph->complex[var].safe_insert (ix: place, obj: c); |
1088 | } |
1089 | |
1090 | |
1091 | /* Condense two variable nodes into a single variable node, by moving |
1092 | all associated info from FROM to TO. Returns true if TO node's |
1093 | constraint set changes after the merge. */ |
1094 | |
1095 | static bool |
1096 | merge_node_constraints (constraint_graph_t graph, unsigned int to, |
1097 | unsigned int from) |
1098 | { |
1099 | unsigned int i; |
1100 | constraint_t c; |
1101 | bool any_change = false; |
1102 | |
1103 | gcc_checking_assert (find (from) == to); |
1104 | |
1105 | /* Move all complex constraints from src node into to node */ |
1106 | FOR_EACH_VEC_ELT (graph->complex[from], i, c) |
1107 | { |
1108 | /* In complex constraints for node FROM, we may have either |
1109 | a = *FROM, and *FROM = a, or an offseted constraint which are |
1110 | always added to the rhs node's constraints. */ |
1111 | |
1112 | if (c->rhs.type == DEREF) |
1113 | c->rhs.var = to; |
1114 | else if (c->lhs.type == DEREF) |
1115 | c->lhs.var = to; |
1116 | else |
1117 | c->rhs.var = to; |
1118 | |
1119 | } |
1120 | any_change = constraint_set_union (to: &graph->complex[to], |
1121 | from: &graph->complex[from]); |
1122 | graph->complex[from].release (); |
1123 | return any_change; |
1124 | } |
1125 | |
1126 | |
1127 | /* Remove edges involving NODE from GRAPH. */ |
1128 | |
1129 | static void |
1130 | clear_edges_for_node (constraint_graph_t graph, unsigned int node) |
1131 | { |
1132 | if (graph->succs[node]) |
1133 | BITMAP_FREE (graph->succs[node]); |
1134 | } |
1135 | |
1136 | /* Merge GRAPH nodes FROM and TO into node TO. */ |
1137 | |
1138 | static void |
1139 | merge_graph_nodes (constraint_graph_t graph, unsigned int to, |
1140 | unsigned int from) |
1141 | { |
1142 | if (graph->indirect_cycles[from] != -1) |
1143 | { |
1144 | /* If we have indirect cycles with the from node, and we have |
1145 | none on the to node, the to node has indirect cycles from the |
1146 | from node now that they are unified. |
1147 | If indirect cycles exist on both, unify the nodes that they |
1148 | are in a cycle with, since we know they are in a cycle with |
1149 | each other. */ |
1150 | if (graph->indirect_cycles[to] == -1) |
1151 | graph->indirect_cycles[to] = graph->indirect_cycles[from]; |
1152 | } |
1153 | |
1154 | /* Merge all the successor edges. */ |
1155 | if (graph->succs[from]) |
1156 | { |
1157 | if (!graph->succs[to]) |
1158 | graph->succs[to] = BITMAP_ALLOC (obstack: &pta_obstack); |
1159 | bitmap_ior_into (graph->succs[to], |
1160 | graph->succs[from]); |
1161 | } |
1162 | |
1163 | clear_edges_for_node (graph, node: from); |
1164 | } |
1165 | |
1166 | |
1167 | /* Add an indirect graph edge to GRAPH, going from TO to FROM if |
1168 | it doesn't exist in the graph already. */ |
1169 | |
1170 | static void |
1171 | add_implicit_graph_edge (constraint_graph_t graph, unsigned int to, |
1172 | unsigned int from) |
1173 | { |
1174 | if (to == from) |
1175 | return; |
1176 | |
1177 | if (!graph->implicit_preds[to]) |
1178 | graph->implicit_preds[to] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1179 | |
1180 | if (bitmap_set_bit (graph->implicit_preds[to], from)) |
1181 | stats.num_implicit_edges++; |
1182 | } |
1183 | |
1184 | /* Add a predecessor graph edge to GRAPH, going from TO to FROM if |
1185 | it doesn't exist in the graph already. |
1186 | Return false if the edge already existed, true otherwise. */ |
1187 | |
1188 | static void |
1189 | add_pred_graph_edge (constraint_graph_t graph, unsigned int to, |
1190 | unsigned int from) |
1191 | { |
1192 | if (!graph->preds[to]) |
1193 | graph->preds[to] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1194 | bitmap_set_bit (graph->preds[to], from); |
1195 | } |
1196 | |
1197 | /* Add a graph edge to GRAPH, going from FROM to TO if |
1198 | it doesn't exist in the graph already. |
1199 | Return false if the edge already existed, true otherwise. */ |
1200 | |
1201 | static bool |
1202 | add_graph_edge (constraint_graph_t graph, unsigned int to, |
1203 | unsigned int from) |
1204 | { |
1205 | if (to == from) |
1206 | { |
1207 | return false; |
1208 | } |
1209 | else |
1210 | { |
1211 | bool r = false; |
1212 | |
1213 | if (!graph->succs[from]) |
1214 | graph->succs[from] = BITMAP_ALLOC (obstack: &pta_obstack); |
1215 | |
1216 | /* The graph solving process does not avoid "triangles", thus |
1217 | there can be multiple paths from a node to another involving |
1218 | intermediate other nodes. That causes extra copying which is |
1219 | most difficult to avoid when the intermediate node is ESCAPED |
1220 | because there are no edges added from ESCAPED. Avoid |
1221 | adding the direct edge FROM -> TO when we have FROM -> ESCAPED |
1222 | and TO contains ESCAPED. |
1223 | ??? Note this is only a heuristic, it does not prevent the |
1224 | situation from occuring. The heuristic helps PR38474 and |
1225 | PR99912 significantly. */ |
1226 | if (to < FIRST_REF_NODE |
1227 | && bitmap_bit_p (graph->succs[from], find (node: escaped_id)) |
1228 | && bitmap_bit_p (get_varinfo (n: find (node: to))->solution, escaped_id)) |
1229 | { |
1230 | stats.num_avoided_edges++; |
1231 | return false; |
1232 | } |
1233 | |
1234 | if (bitmap_set_bit (graph->succs[from], to)) |
1235 | { |
1236 | r = true; |
1237 | if (to < FIRST_REF_NODE && from < FIRST_REF_NODE) |
1238 | stats.num_edges++; |
1239 | } |
1240 | return r; |
1241 | } |
1242 | } |
1243 | |
1244 | |
1245 | /* Initialize the constraint graph structure to contain SIZE nodes. */ |
1246 | |
1247 | static void |
1248 | init_graph (unsigned int size) |
1249 | { |
1250 | unsigned int j; |
1251 | |
1252 | graph = XCNEW (struct constraint_graph); |
1253 | graph->size = size; |
1254 | graph->succs = XCNEWVEC (bitmap, graph->size); |
1255 | graph->indirect_cycles = XNEWVEC (int, graph->size); |
1256 | graph->rep = XNEWVEC (unsigned int, graph->size); |
1257 | /* ??? Macros do not support template types with multiple arguments, |
1258 | so we use a typedef to work around it. */ |
1259 | typedef vec<constraint_t> vec_constraint_t_heap; |
1260 | graph->complex = XCNEWVEC (vec_constraint_t_heap, size); |
1261 | graph->pe = XCNEWVEC (unsigned int, graph->size); |
1262 | graph->pe_rep = XNEWVEC (int, graph->size); |
1263 | |
1264 | for (j = 0; j < graph->size; j++) |
1265 | { |
1266 | graph->rep[j] = j; |
1267 | graph->pe_rep[j] = -1; |
1268 | graph->indirect_cycles[j] = -1; |
1269 | } |
1270 | } |
1271 | |
1272 | /* Build the constraint graph, adding only predecessor edges right now. */ |
1273 | |
1274 | static void |
1275 | build_pred_graph (void) |
1276 | { |
1277 | int i; |
1278 | constraint_t c; |
1279 | unsigned int j; |
1280 | |
1281 | graph->implicit_preds = XCNEWVEC (bitmap, graph->size); |
1282 | graph->preds = XCNEWVEC (bitmap, graph->size); |
1283 | graph->pointer_label = XCNEWVEC (unsigned int, graph->size); |
1284 | graph->loc_label = XCNEWVEC (unsigned int, graph->size); |
1285 | graph->pointed_by = XCNEWVEC (bitmap, graph->size); |
1286 | graph->points_to = XCNEWVEC (bitmap, graph->size); |
1287 | graph->eq_rep = XNEWVEC (int, graph->size); |
1288 | graph->direct_nodes = sbitmap_alloc (graph->size); |
1289 | graph->address_taken = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1290 | bitmap_clear (graph->direct_nodes); |
1291 | |
1292 | for (j = 1; j < FIRST_REF_NODE; j++) |
1293 | { |
1294 | if (!get_varinfo (n: j)->is_special_var) |
1295 | bitmap_set_bit (map: graph->direct_nodes, bitno: j); |
1296 | } |
1297 | |
1298 | for (j = 0; j < graph->size; j++) |
1299 | graph->eq_rep[j] = -1; |
1300 | |
1301 | for (j = 0; j < varmap.length (); j++) |
1302 | graph->indirect_cycles[j] = -1; |
1303 | |
1304 | FOR_EACH_VEC_ELT (constraints, i, c) |
1305 | { |
1306 | struct constraint_expr lhs = c->lhs; |
1307 | struct constraint_expr rhs = c->rhs; |
1308 | unsigned int lhsvar = lhs.var; |
1309 | unsigned int rhsvar = rhs.var; |
1310 | |
1311 | if (lhs.type == DEREF) |
1312 | { |
1313 | /* *x = y. */ |
1314 | if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR) |
1315 | add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, from: rhsvar); |
1316 | } |
1317 | else if (rhs.type == DEREF) |
1318 | { |
1319 | /* x = *y */ |
1320 | if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR) |
1321 | add_pred_graph_edge (graph, to: lhsvar, FIRST_REF_NODE + rhsvar); |
1322 | else |
1323 | bitmap_clear_bit (map: graph->direct_nodes, bitno: lhsvar); |
1324 | } |
1325 | else if (rhs.type == ADDRESSOF) |
1326 | { |
1327 | varinfo_t v; |
1328 | |
1329 | /* x = &y */ |
1330 | if (graph->points_to[lhsvar] == NULL) |
1331 | graph->points_to[lhsvar] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1332 | bitmap_set_bit (graph->points_to[lhsvar], rhsvar); |
1333 | |
1334 | if (graph->pointed_by[rhsvar] == NULL) |
1335 | graph->pointed_by[rhsvar] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
1336 | bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar); |
1337 | |
1338 | /* Implicitly, *x = y */ |
1339 | add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, from: rhsvar); |
1340 | |
1341 | /* All related variables are no longer direct nodes. */ |
1342 | bitmap_clear_bit (map: graph->direct_nodes, bitno: rhsvar); |
1343 | v = get_varinfo (n: rhsvar); |
1344 | if (!v->is_full_var) |
1345 | { |
1346 | v = get_varinfo (n: v->head); |
1347 | do |
1348 | { |
1349 | bitmap_clear_bit (map: graph->direct_nodes, bitno: v->id); |
1350 | v = vi_next (vi: v); |
1351 | } |
1352 | while (v != NULL); |
1353 | } |
1354 | bitmap_set_bit (graph->address_taken, rhsvar); |
1355 | } |
1356 | else if (lhsvar > anything_id |
1357 | && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0) |
1358 | { |
1359 | /* x = y */ |
1360 | add_pred_graph_edge (graph, to: lhsvar, from: rhsvar); |
1361 | /* Implicitly, *x = *y */ |
1362 | add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, |
1363 | FIRST_REF_NODE + rhsvar); |
1364 | } |
1365 | else if (lhs.offset != 0 || rhs.offset != 0) |
1366 | { |
1367 | if (rhs.offset != 0) |
1368 | bitmap_clear_bit (map: graph->direct_nodes, bitno: lhs.var); |
1369 | else if (lhs.offset != 0) |
1370 | bitmap_clear_bit (map: graph->direct_nodes, bitno: rhs.var); |
1371 | } |
1372 | } |
1373 | } |
1374 | |
1375 | /* Build the constraint graph, adding successor edges. */ |
1376 | |
1377 | static void |
1378 | build_succ_graph (void) |
1379 | { |
1380 | unsigned i, t; |
1381 | constraint_t c; |
1382 | |
1383 | FOR_EACH_VEC_ELT (constraints, i, c) |
1384 | { |
1385 | struct constraint_expr lhs; |
1386 | struct constraint_expr rhs; |
1387 | unsigned int lhsvar; |
1388 | unsigned int rhsvar; |
1389 | |
1390 | if (!c) |
1391 | continue; |
1392 | |
1393 | lhs = c->lhs; |
1394 | rhs = c->rhs; |
1395 | lhsvar = find (node: lhs.var); |
1396 | rhsvar = find (node: rhs.var); |
1397 | |
1398 | if (lhs.type == DEREF) |
1399 | { |
1400 | if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR) |
1401 | add_graph_edge (graph, FIRST_REF_NODE + lhsvar, from: rhsvar); |
1402 | } |
1403 | else if (rhs.type == DEREF) |
1404 | { |
1405 | if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR) |
1406 | add_graph_edge (graph, to: lhsvar, FIRST_REF_NODE + rhsvar); |
1407 | } |
1408 | else if (rhs.type == ADDRESSOF) |
1409 | { |
1410 | /* x = &y */ |
1411 | gcc_checking_assert (find (rhs.var) == rhs.var); |
1412 | bitmap_set_bit (get_varinfo (n: lhsvar)->solution, rhsvar); |
1413 | } |
1414 | else if (lhsvar > anything_id |
1415 | && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0) |
1416 | { |
1417 | add_graph_edge (graph, to: lhsvar, from: rhsvar); |
1418 | } |
1419 | } |
1420 | |
1421 | /* Add edges from STOREDANYTHING to all non-direct nodes that can |
1422 | receive pointers. */ |
1423 | t = find (node: storedanything_id); |
1424 | for (i = integer_id + 1; i < FIRST_REF_NODE; ++i) |
1425 | { |
1426 | if (!bitmap_bit_p (map: graph->direct_nodes, bitno: i) |
1427 | && get_varinfo (n: i)->may_have_pointers) |
1428 | add_graph_edge (graph, to: find (node: i), from: t); |
1429 | } |
1430 | |
1431 | /* Everything stored to ANYTHING also potentially escapes. */ |
1432 | add_graph_edge (graph, to: find (node: escaped_id), from: t); |
1433 | } |
1434 | |
1435 | |
1436 | /* Changed variables on the last iteration. */ |
1437 | static bitmap changed; |
1438 | |
1439 | /* Strongly Connected Component visitation info. */ |
1440 | |
1441 | class scc_info |
1442 | { |
1443 | public: |
1444 | scc_info (size_t size); |
1445 | ~scc_info (); |
1446 | |
1447 | auto_sbitmap visited; |
1448 | auto_sbitmap deleted; |
1449 | unsigned int *dfs; |
1450 | unsigned int *node_mapping; |
1451 | int current_index; |
1452 | auto_vec<unsigned> scc_stack; |
1453 | }; |
1454 | |
1455 | |
1456 | /* Recursive routine to find strongly connected components in GRAPH. |
1457 | SI is the SCC info to store the information in, and N is the id of current |
1458 | graph node we are processing. |
1459 | |
1460 | This is Tarjan's strongly connected component finding algorithm, as |
1461 | modified by Nuutila to keep only non-root nodes on the stack. |
1462 | The algorithm can be found in "On finding the strongly connected |
1463 | connected components in a directed graph" by Esko Nuutila and Eljas |
1464 | Soisalon-Soininen, in Information Processing Letters volume 49, |
1465 | number 1, pages 9-14. */ |
1466 | |
1467 | static void |
1468 | scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n) |
1469 | { |
1470 | unsigned int i; |
1471 | bitmap_iterator bi; |
1472 | unsigned int my_dfs; |
1473 | |
1474 | bitmap_set_bit (map: si->visited, bitno: n); |
1475 | si->dfs[n] = si->current_index ++; |
1476 | my_dfs = si->dfs[n]; |
1477 | |
1478 | /* Visit all the successors. */ |
1479 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi) |
1480 | { |
1481 | unsigned int w; |
1482 | |
1483 | if (i > LAST_REF_NODE) |
1484 | break; |
1485 | |
1486 | w = find (node: i); |
1487 | if (bitmap_bit_p (map: si->deleted, bitno: w)) |
1488 | continue; |
1489 | |
1490 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
1491 | scc_visit (graph, si, n: w); |
1492 | |
1493 | unsigned int t = find (node: w); |
1494 | gcc_checking_assert (find (n) == n); |
1495 | if (si->dfs[t] < si->dfs[n]) |
1496 | si->dfs[n] = si->dfs[t]; |
1497 | } |
1498 | |
1499 | /* See if any components have been identified. */ |
1500 | if (si->dfs[n] == my_dfs) |
1501 | { |
1502 | if (si->scc_stack.length () > 0 |
1503 | && si->dfs[si->scc_stack.last ()] >= my_dfs) |
1504 | { |
1505 | bitmap scc = BITMAP_ALLOC (NULL); |
1506 | unsigned int lowest_node; |
1507 | bitmap_iterator bi; |
1508 | |
1509 | bitmap_set_bit (scc, n); |
1510 | |
1511 | while (si->scc_stack.length () != 0 |
1512 | && si->dfs[si->scc_stack.last ()] >= my_dfs) |
1513 | { |
1514 | unsigned int w = si->scc_stack.pop (); |
1515 | |
1516 | bitmap_set_bit (scc, w); |
1517 | } |
1518 | |
1519 | lowest_node = bitmap_first_set_bit (scc); |
1520 | gcc_assert (lowest_node < FIRST_REF_NODE); |
1521 | |
1522 | /* Collapse the SCC nodes into a single node, and mark the |
1523 | indirect cycles. */ |
1524 | EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi) |
1525 | { |
1526 | if (i < FIRST_REF_NODE) |
1527 | { |
1528 | if (unite (to: lowest_node, from: i)) |
1529 | unify_nodes (graph, lowest_node, i, false); |
1530 | } |
1531 | else |
1532 | { |
1533 | unite (to: lowest_node, from: i); |
1534 | graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node; |
1535 | } |
1536 | } |
1537 | } |
1538 | bitmap_set_bit (map: si->deleted, bitno: n); |
1539 | } |
1540 | else |
1541 | si->scc_stack.safe_push (obj: n); |
1542 | } |
1543 | |
1544 | /* Unify node FROM into node TO, updating the changed count if |
1545 | necessary when UPDATE_CHANGED is true. */ |
1546 | |
1547 | static void |
1548 | unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from, |
1549 | bool update_changed) |
1550 | { |
1551 | gcc_checking_assert (to != from && find (to) == to); |
1552 | |
1553 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1554 | fprintf (stream: dump_file, format: "Unifying %s to %s\n" , |
1555 | get_varinfo (n: from)->name, |
1556 | get_varinfo (n: to)->name); |
1557 | |
1558 | if (update_changed) |
1559 | stats.unified_vars_dynamic++; |
1560 | else |
1561 | stats.unified_vars_static++; |
1562 | |
1563 | merge_graph_nodes (graph, to, from); |
1564 | if (merge_node_constraints (graph, to, from)) |
1565 | { |
1566 | if (update_changed) |
1567 | bitmap_set_bit (changed, to); |
1568 | } |
1569 | |
1570 | /* Mark TO as changed if FROM was changed. If TO was already marked |
1571 | as changed, decrease the changed count. */ |
1572 | |
1573 | if (update_changed |
1574 | && bitmap_clear_bit (changed, from)) |
1575 | bitmap_set_bit (changed, to); |
1576 | varinfo_t fromvi = get_varinfo (n: from); |
1577 | if (fromvi->solution) |
1578 | { |
1579 | /* If the solution changes because of the merging, we need to mark |
1580 | the variable as changed. */ |
1581 | varinfo_t tovi = get_varinfo (n: to); |
1582 | if (bitmap_ior_into (tovi->solution, fromvi->solution)) |
1583 | { |
1584 | if (update_changed) |
1585 | bitmap_set_bit (changed, to); |
1586 | } |
1587 | |
1588 | BITMAP_FREE (fromvi->solution); |
1589 | if (fromvi->oldsolution) |
1590 | BITMAP_FREE (fromvi->oldsolution); |
1591 | |
1592 | if (stats.iterations > 0 |
1593 | && tovi->oldsolution) |
1594 | BITMAP_FREE (tovi->oldsolution); |
1595 | } |
1596 | if (graph->succs[to]) |
1597 | bitmap_clear_bit (graph->succs[to], to); |
1598 | } |
1599 | |
1600 | /* Add a copy edge FROM -> TO, optimizing special cases. Returns TRUE |
1601 | if the solution of TO changed. */ |
1602 | |
1603 | static bool |
1604 | solve_add_graph_edge (constraint_graph_t graph, unsigned int to, |
1605 | unsigned int from) |
1606 | { |
1607 | /* Adding edges from the special vars is pointless. |
1608 | They don't have sets that can change. */ |
1609 | if (get_varinfo (n: from)->is_special_var) |
1610 | return bitmap_ior_into (get_varinfo (n: to)->solution, |
1611 | get_varinfo (n: from)->solution); |
1612 | /* Merging the solution from ESCAPED needlessly increases |
1613 | the set. Use ESCAPED as representative instead. */ |
1614 | else if (from == find (node: escaped_id)) |
1615 | return bitmap_set_bit (get_varinfo (n: to)->solution, escaped_id); |
1616 | else if (get_varinfo (n: from)->may_have_pointers |
1617 | && add_graph_edge (graph, to, from)) |
1618 | return bitmap_ior_into (get_varinfo (n: to)->solution, |
1619 | get_varinfo (n: from)->solution); |
1620 | return false; |
1621 | } |
1622 | |
1623 | /* Process a constraint C that represents x = *(y + off), using DELTA as the |
1624 | starting solution for y. */ |
1625 | |
1626 | static void |
1627 | do_sd_constraint (constraint_graph_t graph, constraint_t c, |
1628 | bitmap delta, bitmap *expanded_delta) |
1629 | { |
1630 | unsigned int lhs = c->lhs.var; |
1631 | bool flag = false; |
1632 | bitmap sol = get_varinfo (n: lhs)->solution; |
1633 | unsigned int j; |
1634 | bitmap_iterator bi; |
1635 | HOST_WIDE_INT roffset = c->rhs.offset; |
1636 | |
1637 | /* Our IL does not allow this. */ |
1638 | gcc_checking_assert (c->lhs.offset == 0); |
1639 | |
1640 | /* If the solution of Y contains anything it is good enough to transfer |
1641 | this to the LHS. */ |
1642 | if (bitmap_bit_p (delta, anything_id)) |
1643 | { |
1644 | flag |= bitmap_set_bit (sol, anything_id); |
1645 | goto done; |
1646 | } |
1647 | |
1648 | /* If we do not know at with offset the rhs is dereferenced compute |
1649 | the reachability set of DELTA, conservatively assuming it is |
1650 | dereferenced at all valid offsets. */ |
1651 | if (roffset == UNKNOWN_OFFSET) |
1652 | { |
1653 | delta = solution_set_expand (set: delta, expanded: expanded_delta); |
1654 | /* No further offset processing is necessary. */ |
1655 | roffset = 0; |
1656 | } |
1657 | |
1658 | /* For each variable j in delta (Sol(y)), add |
1659 | an edge in the graph from j to x, and union Sol(j) into Sol(x). */ |
1660 | EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) |
1661 | { |
1662 | varinfo_t v = get_varinfo (n: j); |
1663 | HOST_WIDE_INT fieldoffset = v->offset + roffset; |
1664 | unsigned HOST_WIDE_INT size = v->size; |
1665 | unsigned int t; |
1666 | |
1667 | if (v->is_full_var) |
1668 | ; |
1669 | else if (roffset != 0) |
1670 | { |
1671 | if (fieldoffset < 0) |
1672 | v = get_varinfo (n: v->head); |
1673 | else |
1674 | v = first_or_preceding_vi_for_offset (v, fieldoffset); |
1675 | } |
1676 | |
1677 | /* We have to include all fields that overlap the current field |
1678 | shifted by roffset. */ |
1679 | do |
1680 | { |
1681 | t = find (node: v->id); |
1682 | |
1683 | flag |= solve_add_graph_edge (graph, to: lhs, from: t); |
1684 | |
1685 | if (v->is_full_var |
1686 | || v->next == 0) |
1687 | break; |
1688 | |
1689 | v = vi_next (vi: v); |
1690 | } |
1691 | while (v->offset < fieldoffset + size); |
1692 | } |
1693 | |
1694 | done: |
1695 | /* If the LHS solution changed, mark the var as changed. */ |
1696 | if (flag) |
1697 | bitmap_set_bit (changed, lhs); |
1698 | } |
1699 | |
1700 | /* Process a constraint C that represents *(x + off) = y using DELTA |
1701 | as the starting solution for x. */ |
1702 | |
1703 | static void |
1704 | do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta) |
1705 | { |
1706 | unsigned int rhs = c->rhs.var; |
1707 | bitmap sol = get_varinfo (n: rhs)->solution; |
1708 | unsigned int j; |
1709 | bitmap_iterator bi; |
1710 | HOST_WIDE_INT loff = c->lhs.offset; |
1711 | bool escaped_p = false; |
1712 | |
1713 | /* Our IL does not allow this. */ |
1714 | gcc_checking_assert (c->rhs.offset == 0); |
1715 | |
1716 | /* If the solution of y contains ANYTHING simply use the ANYTHING |
1717 | solution. This avoids needlessly increasing the points-to sets. */ |
1718 | if (bitmap_bit_p (sol, anything_id)) |
1719 | sol = get_varinfo (n: find (node: anything_id))->solution; |
1720 | |
1721 | /* If the solution for x contains ANYTHING we have to merge the |
1722 | solution of y into all pointer variables which we do via |
1723 | STOREDANYTHING. */ |
1724 | if (bitmap_bit_p (delta, anything_id)) |
1725 | { |
1726 | unsigned t = find (node: storedanything_id); |
1727 | if (solve_add_graph_edge (graph, to: t, from: rhs)) |
1728 | bitmap_set_bit (changed, t); |
1729 | return; |
1730 | } |
1731 | |
1732 | /* If we do not know at with offset the rhs is dereferenced compute |
1733 | the reachability set of DELTA, conservatively assuming it is |
1734 | dereferenced at all valid offsets. */ |
1735 | if (loff == UNKNOWN_OFFSET) |
1736 | { |
1737 | delta = solution_set_expand (set: delta, expanded: expanded_delta); |
1738 | loff = 0; |
1739 | } |
1740 | |
1741 | /* For each member j of delta (Sol(x)), add an edge from y to j and |
1742 | union Sol(y) into Sol(j) */ |
1743 | EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) |
1744 | { |
1745 | varinfo_t v = get_varinfo (n: j); |
1746 | unsigned int t; |
1747 | HOST_WIDE_INT fieldoffset = v->offset + loff; |
1748 | unsigned HOST_WIDE_INT size = v->size; |
1749 | |
1750 | if (v->is_full_var) |
1751 | ; |
1752 | else if (loff != 0) |
1753 | { |
1754 | if (fieldoffset < 0) |
1755 | v = get_varinfo (n: v->head); |
1756 | else |
1757 | v = first_or_preceding_vi_for_offset (v, fieldoffset); |
1758 | } |
1759 | |
1760 | /* We have to include all fields that overlap the current field |
1761 | shifted by loff. */ |
1762 | do |
1763 | { |
1764 | if (v->may_have_pointers) |
1765 | { |
1766 | /* If v is a global variable then this is an escape point. */ |
1767 | if (v->is_global_var |
1768 | && !escaped_p) |
1769 | { |
1770 | t = find (node: escaped_id); |
1771 | if (add_graph_edge (graph, to: t, from: rhs) |
1772 | && bitmap_ior_into (get_varinfo (n: t)->solution, sol)) |
1773 | bitmap_set_bit (changed, t); |
1774 | /* Enough to let rhs escape once. */ |
1775 | escaped_p = true; |
1776 | } |
1777 | |
1778 | if (v->is_special_var) |
1779 | break; |
1780 | |
1781 | t = find (node: v->id); |
1782 | |
1783 | if (solve_add_graph_edge (graph, to: t, from: rhs)) |
1784 | bitmap_set_bit (changed, t); |
1785 | } |
1786 | |
1787 | if (v->is_full_var |
1788 | || v->next == 0) |
1789 | break; |
1790 | |
1791 | v = vi_next (vi: v); |
1792 | } |
1793 | while (v->offset < fieldoffset + size); |
1794 | } |
1795 | } |
1796 | |
1797 | /* Handle a non-simple (simple meaning requires no iteration), |
1798 | constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */ |
1799 | |
1800 | static void |
1801 | do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta, |
1802 | bitmap *expanded_delta) |
1803 | { |
1804 | if (c->lhs.type == DEREF) |
1805 | { |
1806 | if (c->rhs.type == ADDRESSOF) |
1807 | { |
1808 | gcc_unreachable (); |
1809 | } |
1810 | else |
1811 | { |
1812 | /* *x = y */ |
1813 | do_ds_constraint (c, delta, expanded_delta); |
1814 | } |
1815 | } |
1816 | else if (c->rhs.type == DEREF) |
1817 | { |
1818 | /* x = *y */ |
1819 | if (!(get_varinfo (n: c->lhs.var)->is_special_var)) |
1820 | do_sd_constraint (graph, c, delta, expanded_delta); |
1821 | } |
1822 | else |
1823 | { |
1824 | bitmap tmp; |
1825 | bool flag = false; |
1826 | |
1827 | gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR |
1828 | && c->rhs.offset != 0 && c->lhs.offset == 0); |
1829 | tmp = get_varinfo (n: c->lhs.var)->solution; |
1830 | |
1831 | flag = set_union_with_increment (to: tmp, delta, inc: c->rhs.offset, |
1832 | expanded_delta); |
1833 | |
1834 | if (flag) |
1835 | bitmap_set_bit (changed, c->lhs.var); |
1836 | } |
1837 | } |
1838 | |
1839 | /* Initialize and return a new SCC info structure. */ |
1840 | |
1841 | scc_info::scc_info (size_t size) : |
1842 | visited (size), deleted (size), current_index (0), scc_stack (1) |
1843 | { |
1844 | bitmap_clear (visited); |
1845 | bitmap_clear (deleted); |
1846 | node_mapping = XNEWVEC (unsigned int, size); |
1847 | dfs = XCNEWVEC (unsigned int, size); |
1848 | |
1849 | for (size_t i = 0; i < size; i++) |
1850 | node_mapping[i] = i; |
1851 | } |
1852 | |
1853 | /* Free an SCC info structure pointed to by SI */ |
1854 | |
1855 | scc_info::~scc_info () |
1856 | { |
1857 | free (ptr: node_mapping); |
1858 | free (ptr: dfs); |
1859 | } |
1860 | |
1861 | |
1862 | /* Find indirect cycles in GRAPH that occur, using strongly connected |
1863 | components, and note them in the indirect cycles map. |
1864 | |
1865 | This technique comes from Ben Hardekopf and Calvin Lin, |
1866 | "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of |
1867 | Lines of Code", submitted to PLDI 2007. */ |
1868 | |
1869 | static void |
1870 | find_indirect_cycles (constraint_graph_t graph) |
1871 | { |
1872 | unsigned int i; |
1873 | unsigned int size = graph->size; |
1874 | scc_info si (size); |
1875 | |
1876 | for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ ) |
1877 | if (!bitmap_bit_p (map: si.visited, bitno: i) && find (node: i) == i) |
1878 | scc_visit (graph, si: &si, n: i); |
1879 | } |
1880 | |
1881 | /* Visit the graph in topological order starting at node N, and store the |
1882 | order in TOPO_ORDER using VISITED to indicate visited nodes. */ |
1883 | |
1884 | static void |
1885 | topo_visit (constraint_graph_t graph, vec<unsigned> &topo_order, |
1886 | sbitmap visited, unsigned int n) |
1887 | { |
1888 | bitmap_iterator bi; |
1889 | unsigned int j; |
1890 | |
1891 | bitmap_set_bit (map: visited, bitno: n); |
1892 | |
1893 | if (graph->succs[n]) |
1894 | EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi) |
1895 | { |
1896 | unsigned k = find (node: j); |
1897 | if (!bitmap_bit_p (map: visited, bitno: k)) |
1898 | topo_visit (graph, topo_order, visited, n: k); |
1899 | } |
1900 | |
1901 | topo_order.quick_push (obj: n); |
1902 | } |
1903 | |
1904 | /* Compute a topological ordering for GRAPH, and return the result. */ |
1905 | |
1906 | static auto_vec<unsigned> |
1907 | compute_topo_order (constraint_graph_t graph) |
1908 | { |
1909 | unsigned int i; |
1910 | unsigned int size = graph->size; |
1911 | |
1912 | auto_sbitmap visited (size); |
1913 | bitmap_clear (visited); |
1914 | |
1915 | /* For the heuristic in add_graph_edge to work optimally make sure to |
1916 | first visit the connected component of the graph containing |
1917 | ESCAPED. Do this by extracting the connected component |
1918 | with ESCAPED and append that to all other components as solve_graph |
1919 | pops from the order. */ |
1920 | auto_vec<unsigned> tail (size); |
1921 | topo_visit (graph, topo_order&: tail, visited, n: find (node: escaped_id)); |
1922 | |
1923 | auto_vec<unsigned> topo_order (size); |
1924 | |
1925 | for (i = 0; i != size; ++i) |
1926 | if (!bitmap_bit_p (map: visited, bitno: i) && find (node: i) == i) |
1927 | topo_visit (graph, topo_order, visited, n: i); |
1928 | |
1929 | topo_order.splice (src: tail); |
1930 | return topo_order; |
1931 | } |
1932 | |
1933 | /* Structure used to for hash value numbering of pointer equivalence |
1934 | classes. */ |
1935 | |
1936 | typedef struct equiv_class_label |
1937 | { |
1938 | hashval_t hashcode; |
1939 | unsigned int equivalence_class; |
1940 | bitmap labels; |
1941 | } *equiv_class_label_t; |
1942 | typedef const struct equiv_class_label *const_equiv_class_label_t; |
1943 | |
1944 | /* Equiv_class_label hashtable helpers. */ |
1945 | |
1946 | struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label> |
1947 | { |
1948 | static inline hashval_t hash (const equiv_class_label *); |
1949 | static inline bool equal (const equiv_class_label *, |
1950 | const equiv_class_label *); |
1951 | }; |
1952 | |
1953 | /* Hash function for a equiv_class_label_t */ |
1954 | |
1955 | inline hashval_t |
1956 | equiv_class_hasher::hash (const equiv_class_label *ecl) |
1957 | { |
1958 | return ecl->hashcode; |
1959 | } |
1960 | |
1961 | /* Equality function for two equiv_class_label_t's. */ |
1962 | |
1963 | inline bool |
1964 | equiv_class_hasher::equal (const equiv_class_label *eql1, |
1965 | const equiv_class_label *eql2) |
1966 | { |
1967 | return (eql1->hashcode == eql2->hashcode |
1968 | && bitmap_equal_p (eql1->labels, eql2->labels)); |
1969 | } |
1970 | |
1971 | /* A hashtable for mapping a bitmap of labels->pointer equivalence |
1972 | classes. */ |
1973 | static hash_table<equiv_class_hasher> *pointer_equiv_class_table; |
1974 | |
1975 | /* A hashtable for mapping a bitmap of labels->location equivalence |
1976 | classes. */ |
1977 | static hash_table<equiv_class_hasher> *location_equiv_class_table; |
1978 | |
1979 | struct obstack equiv_class_obstack; |
1980 | |
1981 | /* Lookup a equivalence class in TABLE by the bitmap of LABELS with |
1982 | hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS |
1983 | is equivalent to. */ |
1984 | |
1985 | static equiv_class_label * |
1986 | equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table, |
1987 | bitmap labels) |
1988 | { |
1989 | equiv_class_label **slot; |
1990 | equiv_class_label ecl; |
1991 | |
1992 | ecl.labels = labels; |
1993 | ecl.hashcode = bitmap_hash (labels); |
1994 | slot = table->find_slot (value: &ecl, insert: INSERT); |
1995 | if (!*slot) |
1996 | { |
1997 | *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label); |
1998 | (*slot)->labels = labels; |
1999 | (*slot)->hashcode = ecl.hashcode; |
2000 | (*slot)->equivalence_class = 0; |
2001 | } |
2002 | |
2003 | return *slot; |
2004 | } |
2005 | |
2006 | /* Perform offline variable substitution. |
2007 | |
2008 | This is a worst case quadratic time way of identifying variables |
2009 | that must have equivalent points-to sets, including those caused by |
2010 | static cycles, and single entry subgraphs, in the constraint graph. |
2011 | |
2012 | The technique is described in "Exploiting Pointer and Location |
2013 | Equivalence to Optimize Pointer Analysis. In the 14th International |
2014 | Static Analysis Symposium (SAS), August 2007." It is known as the |
2015 | "HU" algorithm, and is equivalent to value numbering the collapsed |
2016 | constraint graph including evaluating unions. |
2017 | |
2018 | The general method of finding equivalence classes is as follows: |
2019 | Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints. |
2020 | Initialize all non-REF nodes to be direct nodes. |
2021 | For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh |
2022 | variable} |
2023 | For each constraint containing the dereference, we also do the same |
2024 | thing. |
2025 | |
2026 | We then compute SCC's in the graph and unify nodes in the same SCC, |
2027 | including pts sets. |
2028 | |
2029 | For each non-collapsed node x: |
2030 | Visit all unvisited explicit incoming edges. |
2031 | Ignoring all non-pointers, set pts(x) = Union of pts(a) for y |
2032 | where y->x. |
2033 | Lookup the equivalence class for pts(x). |
2034 | If we found one, equivalence_class(x) = found class. |
2035 | Otherwise, equivalence_class(x) = new class, and new_class is |
2036 | added to the lookup table. |
2037 | |
2038 | All direct nodes with the same equivalence class can be replaced |
2039 | with a single representative node. |
2040 | All unlabeled nodes (label == 0) are not pointers and all edges |
2041 | involving them can be eliminated. |
2042 | We perform these optimizations during rewrite_constraints |
2043 | |
2044 | In addition to pointer equivalence class finding, we also perform |
2045 | location equivalence class finding. This is the set of variables |
2046 | that always appear together in points-to sets. We use this to |
2047 | compress the size of the points-to sets. */ |
2048 | |
2049 | /* Current maximum pointer equivalence class id. */ |
2050 | static int pointer_equiv_class; |
2051 | |
2052 | /* Current maximum location equivalence class id. */ |
2053 | static int location_equiv_class; |
2054 | |
2055 | /* Recursive routine to find strongly connected components in GRAPH, |
2056 | and label it's nodes with DFS numbers. */ |
2057 | |
2058 | static void |
2059 | condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n) |
2060 | { |
2061 | unsigned int i; |
2062 | bitmap_iterator bi; |
2063 | unsigned int my_dfs; |
2064 | |
2065 | gcc_checking_assert (si->node_mapping[n] == n); |
2066 | bitmap_set_bit (map: si->visited, bitno: n); |
2067 | si->dfs[n] = si->current_index ++; |
2068 | my_dfs = si->dfs[n]; |
2069 | |
2070 | /* Visit all the successors. */ |
2071 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi) |
2072 | { |
2073 | unsigned int w = si->node_mapping[i]; |
2074 | |
2075 | if (bitmap_bit_p (map: si->deleted, bitno: w)) |
2076 | continue; |
2077 | |
2078 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
2079 | condense_visit (graph, si, n: w); |
2080 | |
2081 | unsigned int t = si->node_mapping[w]; |
2082 | gcc_checking_assert (si->node_mapping[n] == n); |
2083 | if (si->dfs[t] < si->dfs[n]) |
2084 | si->dfs[n] = si->dfs[t]; |
2085 | } |
2086 | |
2087 | /* Visit all the implicit predecessors. */ |
2088 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi) |
2089 | { |
2090 | unsigned int w = si->node_mapping[i]; |
2091 | |
2092 | if (bitmap_bit_p (map: si->deleted, bitno: w)) |
2093 | continue; |
2094 | |
2095 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
2096 | condense_visit (graph, si, n: w); |
2097 | |
2098 | unsigned int t = si->node_mapping[w]; |
2099 | gcc_assert (si->node_mapping[n] == n); |
2100 | if (si->dfs[t] < si->dfs[n]) |
2101 | si->dfs[n] = si->dfs[t]; |
2102 | } |
2103 | |
2104 | /* See if any components have been identified. */ |
2105 | if (si->dfs[n] == my_dfs) |
2106 | { |
2107 | if (si->scc_stack.length () != 0 |
2108 | && si->dfs[si->scc_stack.last ()] >= my_dfs) |
2109 | { |
2110 | /* Find the first node of the SCC and do non-bitmap work. */ |
2111 | bool direct_p = true; |
2112 | unsigned first = si->scc_stack.length (); |
2113 | do |
2114 | { |
2115 | --first; |
2116 | unsigned int w = si->scc_stack[first]; |
2117 | si->node_mapping[w] = n; |
2118 | if (!bitmap_bit_p (map: graph->direct_nodes, bitno: w)) |
2119 | direct_p = false; |
2120 | } |
2121 | while (first > 0 |
2122 | && si->dfs[si->scc_stack[first - 1]] >= my_dfs); |
2123 | if (!direct_p) |
2124 | bitmap_clear_bit (map: graph->direct_nodes, bitno: n); |
2125 | |
2126 | /* Want to reduce to node n, push that first. */ |
2127 | si->scc_stack.reserve (nelems: 1); |
2128 | si->scc_stack.quick_push (obj: si->scc_stack[first]); |
2129 | si->scc_stack[first] = n; |
2130 | |
2131 | unsigned scc_size = si->scc_stack.length () - first; |
2132 | unsigned split = scc_size / 2; |
2133 | unsigned carry = scc_size - split * 2; |
2134 | while (split > 0) |
2135 | { |
2136 | for (unsigned i = 0; i < split; ++i) |
2137 | { |
2138 | unsigned a = si->scc_stack[first + i]; |
2139 | unsigned b = si->scc_stack[first + split + carry + i]; |
2140 | |
2141 | /* Unify our nodes. */ |
2142 | if (graph->preds[b]) |
2143 | { |
2144 | if (!graph->preds[a]) |
2145 | std::swap (a&: graph->preds[a], b&: graph->preds[b]); |
2146 | else |
2147 | bitmap_ior_into_and_free (graph->preds[a], |
2148 | &graph->preds[b]); |
2149 | } |
2150 | if (graph->implicit_preds[b]) |
2151 | { |
2152 | if (!graph->implicit_preds[a]) |
2153 | std::swap (a&: graph->implicit_preds[a], |
2154 | b&: graph->implicit_preds[b]); |
2155 | else |
2156 | bitmap_ior_into_and_free (graph->implicit_preds[a], |
2157 | &graph->implicit_preds[b]); |
2158 | } |
2159 | if (graph->points_to[b]) |
2160 | { |
2161 | if (!graph->points_to[a]) |
2162 | std::swap (a&: graph->points_to[a], b&: graph->points_to[b]); |
2163 | else |
2164 | bitmap_ior_into_and_free (graph->points_to[a], |
2165 | &graph->points_to[b]); |
2166 | } |
2167 | } |
2168 | unsigned remain = split + carry; |
2169 | split = remain / 2; |
2170 | carry = remain - split * 2; |
2171 | } |
2172 | /* Actually pop the SCC. */ |
2173 | si->scc_stack.truncate (size: first); |
2174 | } |
2175 | bitmap_set_bit (map: si->deleted, bitno: n); |
2176 | } |
2177 | else |
2178 | si->scc_stack.safe_push (obj: n); |
2179 | } |
2180 | |
2181 | /* Label pointer equivalences. |
2182 | |
2183 | This performs a value numbering of the constraint graph to |
2184 | discover which variables will always have the same points-to sets |
2185 | under the current set of constraints. |
2186 | |
2187 | The way it value numbers is to store the set of points-to bits |
2188 | generated by the constraints and graph edges. This is just used as a |
2189 | hash and equality comparison. The *actual set of points-to bits* is |
2190 | completely irrelevant, in that we don't care about being able to |
2191 | extract them later. |
2192 | |
2193 | The equality values (currently bitmaps) just have to satisfy a few |
2194 | constraints, the main ones being: |
2195 | 1. The combining operation must be order independent. |
2196 | 2. The end result of a given set of operations must be unique iff the |
2197 | combination of input values is unique |
2198 | 3. Hashable. */ |
2199 | |
2200 | static void |
2201 | label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n) |
2202 | { |
2203 | unsigned int i, first_pred; |
2204 | bitmap_iterator bi; |
2205 | |
2206 | bitmap_set_bit (map: si->visited, bitno: n); |
2207 | |
2208 | /* Label and union our incoming edges's points to sets. */ |
2209 | first_pred = -1U; |
2210 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi) |
2211 | { |
2212 | unsigned int w = si->node_mapping[i]; |
2213 | if (!bitmap_bit_p (map: si->visited, bitno: w)) |
2214 | label_visit (graph, si, n: w); |
2215 | |
2216 | /* Skip unused edges */ |
2217 | if (w == n || graph->pointer_label[w] == 0) |
2218 | continue; |
2219 | |
2220 | if (graph->points_to[w]) |
2221 | { |
2222 | if (!graph->points_to[n]) |
2223 | { |
2224 | if (first_pred == -1U) |
2225 | first_pred = w; |
2226 | else |
2227 | { |
2228 | graph->points_to[n] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
2229 | bitmap_ior (graph->points_to[n], |
2230 | graph->points_to[first_pred], |
2231 | graph->points_to[w]); |
2232 | } |
2233 | } |
2234 | else |
2235 | bitmap_ior_into (graph->points_to[n], graph->points_to[w]); |
2236 | } |
2237 | } |
2238 | |
2239 | /* Indirect nodes get fresh variables and a new pointer equiv class. */ |
2240 | if (!bitmap_bit_p (map: graph->direct_nodes, bitno: n)) |
2241 | { |
2242 | if (!graph->points_to[n]) |
2243 | { |
2244 | graph->points_to[n] = BITMAP_ALLOC (obstack: &predbitmap_obstack); |
2245 | if (first_pred != -1U) |
2246 | bitmap_copy (graph->points_to[n], graph->points_to[first_pred]); |
2247 | } |
2248 | bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n); |
2249 | graph->pointer_label[n] = pointer_equiv_class++; |
2250 | equiv_class_label_t ecl; |
2251 | ecl = equiv_class_lookup_or_add (table: pointer_equiv_class_table, |
2252 | labels: graph->points_to[n]); |
2253 | ecl->equivalence_class = graph->pointer_label[n]; |
2254 | return; |
2255 | } |
2256 | |
2257 | /* If there was only a single non-empty predecessor the pointer equiv |
2258 | class is the same. */ |
2259 | if (!graph->points_to[n]) |
2260 | { |
2261 | if (first_pred != -1U) |
2262 | { |
2263 | graph->pointer_label[n] = graph->pointer_label[first_pred]; |
2264 | graph->points_to[n] = graph->points_to[first_pred]; |
2265 | } |
2266 | return; |
2267 | } |
2268 | |
2269 | if (!bitmap_empty_p (map: graph->points_to[n])) |
2270 | { |
2271 | equiv_class_label_t ecl; |
2272 | ecl = equiv_class_lookup_or_add (table: pointer_equiv_class_table, |
2273 | labels: graph->points_to[n]); |
2274 | if (ecl->equivalence_class == 0) |
2275 | ecl->equivalence_class = pointer_equiv_class++; |
2276 | else |
2277 | { |
2278 | BITMAP_FREE (graph->points_to[n]); |
2279 | graph->points_to[n] = ecl->labels; |
2280 | } |
2281 | graph->pointer_label[n] = ecl->equivalence_class; |
2282 | } |
2283 | } |
2284 | |
2285 | /* Print the pred graph in dot format. */ |
2286 | |
2287 | static void |
2288 | dump_pred_graph (class scc_info *si, FILE *file) |
2289 | { |
2290 | unsigned int i; |
2291 | |
2292 | /* Only print the graph if it has already been initialized: */ |
2293 | if (!graph) |
2294 | return; |
2295 | |
2296 | /* Prints the header of the dot file: */ |
2297 | fprintf (stream: file, format: "strict digraph {\n" ); |
2298 | fprintf (stream: file, format: " node [\n shape = box\n ]\n" ); |
2299 | fprintf (stream: file, format: " edge [\n fontsize = \"12\"\n ]\n" ); |
2300 | fprintf (stream: file, format: "\n // List of nodes and complex constraints in " |
2301 | "the constraint graph:\n" ); |
2302 | |
2303 | /* The next lines print the nodes in the graph together with the |
2304 | complex constraints attached to them. */ |
2305 | for (i = 1; i < graph->size; i++) |
2306 | { |
2307 | if (i == FIRST_REF_NODE) |
2308 | continue; |
2309 | if (si->node_mapping[i] != i) |
2310 | continue; |
2311 | if (i < FIRST_REF_NODE) |
2312 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2313 | else |
2314 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
2315 | if (graph->points_to[i] |
2316 | && !bitmap_empty_p (map: graph->points_to[i])) |
2317 | { |
2318 | if (i < FIRST_REF_NODE) |
2319 | fprintf (stream: file, format: "[label=\"%s = {" , get_varinfo (n: i)->name); |
2320 | else |
2321 | fprintf (stream: file, format: "[label=\"*%s = {" , |
2322 | get_varinfo (n: i - FIRST_REF_NODE)->name); |
2323 | unsigned j; |
2324 | bitmap_iterator bi; |
2325 | EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi) |
2326 | fprintf (stream: file, format: " %d" , j); |
2327 | fprintf (stream: file, format: " }\"]" ); |
2328 | } |
2329 | fprintf (stream: file, format: ";\n" ); |
2330 | } |
2331 | |
2332 | /* Go over the edges. */ |
2333 | fprintf (stream: file, format: "\n // Edges in the constraint graph:\n" ); |
2334 | for (i = 1; i < graph->size; i++) |
2335 | { |
2336 | unsigned j; |
2337 | bitmap_iterator bi; |
2338 | if (si->node_mapping[i] != i) |
2339 | continue; |
2340 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi) |
2341 | { |
2342 | unsigned from = si->node_mapping[j]; |
2343 | if (from < FIRST_REF_NODE) |
2344 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: from)->name); |
2345 | else |
2346 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: from - FIRST_REF_NODE)->name); |
2347 | fprintf (stream: file, format: " -> " ); |
2348 | if (i < FIRST_REF_NODE) |
2349 | fprintf (stream: file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2350 | else |
2351 | fprintf (stream: file, format: "\"*%s\"" , get_varinfo (n: i - FIRST_REF_NODE)->name); |
2352 | fprintf (stream: file, format: ";\n" ); |
2353 | } |
2354 | } |
2355 | |
2356 | /* Prints the tail of the dot file. */ |
2357 | fprintf (stream: file, format: "}\n" ); |
2358 | } |
2359 | |
2360 | /* Perform offline variable substitution, discovering equivalence |
2361 | classes, and eliminating non-pointer variables. */ |
2362 | |
2363 | static class scc_info * |
2364 | perform_var_substitution (constraint_graph_t graph) |
2365 | { |
2366 | unsigned int i; |
2367 | unsigned int size = graph->size; |
2368 | scc_info *si = new scc_info (size); |
2369 | |
2370 | bitmap_obstack_initialize (&iteration_obstack); |
2371 | gcc_obstack_init (&equiv_class_obstack); |
2372 | pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511); |
2373 | location_equiv_class_table |
2374 | = new hash_table<equiv_class_hasher> (511); |
2375 | pointer_equiv_class = 1; |
2376 | location_equiv_class = 1; |
2377 | |
2378 | /* Condense the nodes, which means to find SCC's, count incoming |
2379 | predecessors, and unite nodes in SCC's. */ |
2380 | for (i = 1; i < FIRST_REF_NODE; i++) |
2381 | if (!bitmap_bit_p (map: si->visited, bitno: si->node_mapping[i])) |
2382 | condense_visit (graph, si, n: si->node_mapping[i]); |
2383 | |
2384 | if (dump_file && (dump_flags & TDF_GRAPH)) |
2385 | { |
2386 | fprintf (stream: dump_file, format: "\n\n// The constraint graph before var-substitution " |
2387 | "in dot format:\n" ); |
2388 | dump_pred_graph (si, file: dump_file); |
2389 | fprintf (stream: dump_file, format: "\n\n" ); |
2390 | } |
2391 | |
2392 | bitmap_clear (si->visited); |
2393 | /* Actually the label the nodes for pointer equivalences */ |
2394 | for (i = 1; i < FIRST_REF_NODE; i++) |
2395 | if (!bitmap_bit_p (map: si->visited, bitno: si->node_mapping[i])) |
2396 | label_visit (graph, si, n: si->node_mapping[i]); |
2397 | |
2398 | /* Calculate location equivalence labels. */ |
2399 | for (i = 1; i < FIRST_REF_NODE; i++) |
2400 | { |
2401 | bitmap pointed_by; |
2402 | bitmap_iterator bi; |
2403 | unsigned int j; |
2404 | |
2405 | if (!graph->pointed_by[i]) |
2406 | continue; |
2407 | pointed_by = BITMAP_ALLOC (obstack: &iteration_obstack); |
2408 | |
2409 | /* Translate the pointed-by mapping for pointer equivalence |
2410 | labels. */ |
2411 | EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi) |
2412 | { |
2413 | bitmap_set_bit (pointed_by, |
2414 | graph->pointer_label[si->node_mapping[j]]); |
2415 | } |
2416 | /* The original pointed_by is now dead. */ |
2417 | BITMAP_FREE (graph->pointed_by[i]); |
2418 | |
2419 | /* Look up the location equivalence label if one exists, or make |
2420 | one otherwise. */ |
2421 | equiv_class_label_t ecl; |
2422 | ecl = equiv_class_lookup_or_add (table: location_equiv_class_table, labels: pointed_by); |
2423 | if (ecl->equivalence_class == 0) |
2424 | ecl->equivalence_class = location_equiv_class++; |
2425 | else |
2426 | { |
2427 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2428 | fprintf (stream: dump_file, format: "Found location equivalence for node %s\n" , |
2429 | get_varinfo (n: i)->name); |
2430 | BITMAP_FREE (pointed_by); |
2431 | } |
2432 | graph->loc_label[i] = ecl->equivalence_class; |
2433 | |
2434 | } |
2435 | |
2436 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2437 | for (i = 1; i < FIRST_REF_NODE; i++) |
2438 | { |
2439 | unsigned j = si->node_mapping[i]; |
2440 | if (j != i) |
2441 | { |
2442 | fprintf (stream: dump_file, format: "%s node id %d " , |
2443 | bitmap_bit_p (map: graph->direct_nodes, bitno: i) |
2444 | ? "Direct" : "Indirect" , i); |
2445 | if (i < FIRST_REF_NODE) |
2446 | fprintf (stream: dump_file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2447 | else |
2448 | fprintf (stream: dump_file, format: "\"*%s\"" , |
2449 | get_varinfo (n: i - FIRST_REF_NODE)->name); |
2450 | fprintf (stream: dump_file, format: " mapped to SCC leader node id %d " , j); |
2451 | if (j < FIRST_REF_NODE) |
2452 | fprintf (stream: dump_file, format: "\"%s\"\n" , get_varinfo (n: j)->name); |
2453 | else |
2454 | fprintf (stream: dump_file, format: "\"*%s\"\n" , |
2455 | get_varinfo (n: j - FIRST_REF_NODE)->name); |
2456 | } |
2457 | else |
2458 | { |
2459 | fprintf (stream: dump_file, |
2460 | format: "Equivalence classes for %s node id %d " , |
2461 | bitmap_bit_p (map: graph->direct_nodes, bitno: i) |
2462 | ? "direct" : "indirect" , i); |
2463 | if (i < FIRST_REF_NODE) |
2464 | fprintf (stream: dump_file, format: "\"%s\"" , get_varinfo (n: i)->name); |
2465 | else |
2466 | fprintf (stream: dump_file, format: "\"*%s\"" , |
2467 | get_varinfo (n: i - FIRST_REF_NODE)->name); |
2468 | fprintf (stream: dump_file, |
2469 | format: ": pointer %d, location %d\n" , |
2470 | graph->pointer_label[i], graph->loc_label[i]); |
2471 | } |
2472 | } |
2473 | |
2474 | /* Quickly eliminate our non-pointer variables. */ |
2475 | |
2476 | for (i = 1; i < FIRST_REF_NODE; i++) |
2477 | { |
2478 | unsigned int node = si->node_mapping[i]; |
2479 | |
2480 | if (graph->pointer_label[node] == 0) |
2481 | { |
2482 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2483 | fprintf (stream: dump_file, |
2484 | format: "%s is a non-pointer variable, eliminating edges.\n" , |
2485 | get_varinfo (n: node)->name); |
2486 | stats.nonpointer_vars++; |
2487 | clear_edges_for_node (graph, node); |
2488 | } |
2489 | } |
2490 | |
2491 | return si; |
2492 | } |
2493 | |
2494 | /* Free information that was only necessary for variable |
2495 | substitution. */ |
2496 | |
2497 | static void |
2498 | free_var_substitution_info (class scc_info *si) |
2499 | { |
2500 | delete si; |
2501 | free (ptr: graph->pointer_label); |
2502 | free (ptr: graph->loc_label); |
2503 | free (ptr: graph->pointed_by); |
2504 | free (ptr: graph->points_to); |
2505 | free (ptr: graph->eq_rep); |
2506 | sbitmap_free (map: graph->direct_nodes); |
2507 | delete pointer_equiv_class_table; |
2508 | pointer_equiv_class_table = NULL; |
2509 | delete location_equiv_class_table; |
2510 | location_equiv_class_table = NULL; |
2511 | obstack_free (&equiv_class_obstack, NULL); |
2512 | bitmap_obstack_release (&iteration_obstack); |
2513 | } |
2514 | |
2515 | /* Return an existing node that is equivalent to NODE, which has |
2516 | equivalence class LABEL, if one exists. Return NODE otherwise. */ |
2517 | |
2518 | static unsigned int |
2519 | find_equivalent_node (constraint_graph_t graph, |
2520 | unsigned int node, unsigned int label) |
2521 | { |
2522 | /* If the address version of this variable is unused, we can |
2523 | substitute it for anything else with the same label. |
2524 | Otherwise, we know the pointers are equivalent, but not the |
2525 | locations, and we can unite them later. */ |
2526 | |
2527 | if (!bitmap_bit_p (graph->address_taken, node)) |
2528 | { |
2529 | gcc_checking_assert (label < graph->size); |
2530 | |
2531 | if (graph->eq_rep[label] != -1) |
2532 | { |
2533 | /* Unify the two variables since we know they are equivalent. */ |
2534 | if (unite (to: graph->eq_rep[label], from: node)) |
2535 | unify_nodes (graph, to: graph->eq_rep[label], from: node, update_changed: false); |
2536 | return graph->eq_rep[label]; |
2537 | } |
2538 | else |
2539 | { |
2540 | graph->eq_rep[label] = node; |
2541 | graph->pe_rep[label] = node; |
2542 | } |
2543 | } |
2544 | else |
2545 | { |
2546 | gcc_checking_assert (label < graph->size); |
2547 | graph->pe[node] = label; |
2548 | if (graph->pe_rep[label] == -1) |
2549 | graph->pe_rep[label] = node; |
2550 | } |
2551 | |
2552 | return node; |
2553 | } |
2554 | |
2555 | /* Unite pointer equivalent but not location equivalent nodes in |
2556 | GRAPH. This may only be performed once variable substitution is |
2557 | finished. */ |
2558 | |
2559 | static void |
2560 | unite_pointer_equivalences (constraint_graph_t graph) |
2561 | { |
2562 | unsigned int i; |
2563 | |
2564 | /* Go through the pointer equivalences and unite them to their |
2565 | representative, if they aren't already. */ |
2566 | for (i = 1; i < FIRST_REF_NODE; i++) |
2567 | { |
2568 | unsigned int label = graph->pe[i]; |
2569 | if (label) |
2570 | { |
2571 | int label_rep = graph->pe_rep[label]; |
2572 | |
2573 | if (label_rep == -1) |
2574 | continue; |
2575 | |
2576 | label_rep = find (node: label_rep); |
2577 | if (label_rep >= 0 && unite (to: label_rep, from: find (node: i))) |
2578 | unify_nodes (graph, to: label_rep, from: i, update_changed: false); |
2579 | } |
2580 | } |
2581 | } |
2582 | |
2583 | /* Move complex constraints to the GRAPH nodes they belong to. */ |
2584 | |
2585 | static void |
2586 | move_complex_constraints (constraint_graph_t graph) |
2587 | { |
2588 | int i; |
2589 | constraint_t c; |
2590 | |
2591 | FOR_EACH_VEC_ELT (constraints, i, c) |
2592 | { |
2593 | if (c) |
2594 | { |
2595 | struct constraint_expr lhs = c->lhs; |
2596 | struct constraint_expr rhs = c->rhs; |
2597 | |
2598 | if (lhs.type == DEREF) |
2599 | { |
2600 | insert_into_complex (graph, var: lhs.var, c); |
2601 | } |
2602 | else if (rhs.type == DEREF) |
2603 | { |
2604 | if (!(get_varinfo (n: lhs.var)->is_special_var)) |
2605 | insert_into_complex (graph, var: rhs.var, c); |
2606 | } |
2607 | else if (rhs.type != ADDRESSOF && lhs.var > anything_id |
2608 | && (lhs.offset != 0 || rhs.offset != 0)) |
2609 | { |
2610 | insert_into_complex (graph, var: rhs.var, c); |
2611 | } |
2612 | } |
2613 | } |
2614 | } |
2615 | |
2616 | |
2617 | /* Optimize and rewrite complex constraints while performing |
2618 | collapsing of equivalent nodes. SI is the SCC_INFO that is the |
2619 | result of perform_variable_substitution. */ |
2620 | |
2621 | static void |
2622 | rewrite_constraints (constraint_graph_t graph, |
2623 | class scc_info *si) |
2624 | { |
2625 | int i; |
2626 | constraint_t c; |
2627 | |
2628 | if (flag_checking) |
2629 | { |
2630 | for (unsigned int j = 0; j < graph->size; j++) |
2631 | gcc_assert (find (j) == j); |
2632 | } |
2633 | |
2634 | FOR_EACH_VEC_ELT (constraints, i, c) |
2635 | { |
2636 | struct constraint_expr lhs = c->lhs; |
2637 | struct constraint_expr rhs = c->rhs; |
2638 | unsigned int lhsvar = find (node: lhs.var); |
2639 | unsigned int rhsvar = find (node: rhs.var); |
2640 | unsigned int lhsnode, rhsnode; |
2641 | unsigned int lhslabel, rhslabel; |
2642 | |
2643 | lhsnode = si->node_mapping[lhsvar]; |
2644 | rhsnode = si->node_mapping[rhsvar]; |
2645 | lhslabel = graph->pointer_label[lhsnode]; |
2646 | rhslabel = graph->pointer_label[rhsnode]; |
2647 | |
2648 | /* See if it is really a non-pointer variable, and if so, ignore |
2649 | the constraint. */ |
2650 | if (lhslabel == 0) |
2651 | { |
2652 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2653 | { |
2654 | |
2655 | fprintf (stream: dump_file, format: "%s is a non-pointer variable, " |
2656 | "ignoring constraint:" , |
2657 | get_varinfo (n: lhs.var)->name); |
2658 | dump_constraint (file: dump_file, c); |
2659 | fprintf (stream: dump_file, format: "\n" ); |
2660 | } |
2661 | constraints[i] = NULL; |
2662 | continue; |
2663 | } |
2664 | |
2665 | if (rhslabel == 0) |
2666 | { |
2667 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2668 | { |
2669 | |
2670 | fprintf (stream: dump_file, format: "%s is a non-pointer variable, " |
2671 | "ignoring constraint:" , |
2672 | get_varinfo (n: rhs.var)->name); |
2673 | dump_constraint (file: dump_file, c); |
2674 | fprintf (stream: dump_file, format: "\n" ); |
2675 | } |
2676 | constraints[i] = NULL; |
2677 | continue; |
2678 | } |
2679 | |
2680 | lhsvar = find_equivalent_node (graph, node: lhsvar, label: lhslabel); |
2681 | rhsvar = find_equivalent_node (graph, node: rhsvar, label: rhslabel); |
2682 | c->lhs.var = lhsvar; |
2683 | c->rhs.var = rhsvar; |
2684 | } |
2685 | } |
2686 | |
2687 | /* Eliminate indirect cycles involving NODE. Return true if NODE was |
2688 | part of an SCC, false otherwise. */ |
2689 | |
2690 | static bool |
2691 | eliminate_indirect_cycles (unsigned int node) |
2692 | { |
2693 | if (graph->indirect_cycles[node] != -1 |
2694 | && !bitmap_empty_p (map: get_varinfo (n: node)->solution)) |
2695 | { |
2696 | unsigned int i; |
2697 | auto_vec<unsigned> queue; |
2698 | int queuepos; |
2699 | unsigned int to = find (node: graph->indirect_cycles[node]); |
2700 | bitmap_iterator bi; |
2701 | |
2702 | /* We can't touch the solution set and call unify_nodes |
2703 | at the same time, because unify_nodes is going to do |
2704 | bitmap unions into it. */ |
2705 | |
2706 | EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi) |
2707 | { |
2708 | if (find (node: i) == i && i != to) |
2709 | { |
2710 | if (unite (to, from: i)) |
2711 | queue.safe_push (obj: i); |
2712 | } |
2713 | } |
2714 | |
2715 | for (queuepos = 0; |
2716 | queue.iterate (ix: queuepos, ptr: &i); |
2717 | queuepos++) |
2718 | { |
2719 | unify_nodes (graph, to, from: i, update_changed: true); |
2720 | } |
2721 | return true; |
2722 | } |
2723 | return false; |
2724 | } |
2725 | |
2726 | /* Solve the constraint graph GRAPH using our worklist solver. |
2727 | This is based on the PW* family of solvers from the "Efficient Field |
2728 | Sensitive Pointer Analysis for C" paper. |
2729 | It works by iterating over all the graph nodes, processing the complex |
2730 | constraints and propagating the copy constraints, until everything stops |
2731 | changed. This corresponds to steps 6-8 in the solving list given above. */ |
2732 | |
2733 | static void |
2734 | solve_graph (constraint_graph_t graph) |
2735 | { |
2736 | unsigned int size = graph->size; |
2737 | unsigned int i; |
2738 | bitmap pts; |
2739 | |
2740 | changed = BITMAP_ALLOC (NULL); |
2741 | |
2742 | /* Mark all initial non-collapsed nodes as changed. */ |
2743 | for (i = 1; i < size; i++) |
2744 | { |
2745 | varinfo_t ivi = get_varinfo (n: i); |
2746 | if (find (node: i) == i && !bitmap_empty_p (map: ivi->solution) |
2747 | && ((graph->succs[i] && !bitmap_empty_p (map: graph->succs[i])) |
2748 | || graph->complex[i].length () > 0)) |
2749 | bitmap_set_bit (changed, i); |
2750 | } |
2751 | |
2752 | /* Allocate a bitmap to be used to store the changed bits. */ |
2753 | pts = BITMAP_ALLOC (obstack: &pta_obstack); |
2754 | |
2755 | while (!bitmap_empty_p (map: changed)) |
2756 | { |
2757 | unsigned int i; |
2758 | stats.iterations++; |
2759 | |
2760 | bitmap_obstack_initialize (&iteration_obstack); |
2761 | |
2762 | auto_vec<unsigned> topo_order = compute_topo_order (graph); |
2763 | while (topo_order.length () != 0) |
2764 | { |
2765 | i = topo_order.pop (); |
2766 | |
2767 | /* If this variable is not a representative, skip it. */ |
2768 | if (find (node: i) != i) |
2769 | continue; |
2770 | |
2771 | /* In certain indirect cycle cases, we may merge this |
2772 | variable to another. */ |
2773 | if (eliminate_indirect_cycles (node: i) && find (node: i) != i) |
2774 | continue; |
2775 | |
2776 | /* If the node has changed, we need to process the |
2777 | complex constraints and outgoing edges again. For complex |
2778 | constraints that modify i itself, like the common group of |
2779 | callarg = callarg + UNKNOWN; |
2780 | callarg = *callarg + UNKNOWN; |
2781 | *callarg = callescape; |
2782 | make sure to iterate immediately because that maximizes |
2783 | cache reuse and expands the graph quickest, leading to |
2784 | better visitation order in the next iteration. */ |
2785 | while (bitmap_clear_bit (changed, i)) |
2786 | { |
2787 | unsigned int j; |
2788 | constraint_t c; |
2789 | bitmap solution; |
2790 | vec<constraint_t> complex = graph->complex[i]; |
2791 | varinfo_t vi = get_varinfo (n: i); |
2792 | bool solution_empty; |
2793 | |
2794 | /* Compute the changed set of solution bits. If anything |
2795 | is in the solution just propagate that. */ |
2796 | if (bitmap_bit_p (vi->solution, anything_id)) |
2797 | { |
2798 | /* If anything is also in the old solution there is |
2799 | nothing to do. |
2800 | ??? But we shouldn't ended up with "changed" set ... */ |
2801 | if (vi->oldsolution |
2802 | && bitmap_bit_p (vi->oldsolution, anything_id)) |
2803 | break; |
2804 | bitmap_copy (pts, get_varinfo (n: find (node: anything_id))->solution); |
2805 | } |
2806 | else if (vi->oldsolution) |
2807 | bitmap_and_compl (pts, vi->solution, vi->oldsolution); |
2808 | else |
2809 | bitmap_copy (pts, vi->solution); |
2810 | |
2811 | if (bitmap_empty_p (map: pts)) |
2812 | break; |
2813 | |
2814 | if (vi->oldsolution) |
2815 | bitmap_ior_into (vi->oldsolution, pts); |
2816 | else |
2817 | { |
2818 | vi->oldsolution = BITMAP_ALLOC (obstack: &oldpta_obstack); |
2819 | bitmap_copy (vi->oldsolution, pts); |
2820 | } |
2821 | |
2822 | solution = vi->solution; |
2823 | solution_empty = bitmap_empty_p (map: solution); |
2824 | |
2825 | /* Process the complex constraints */ |
2826 | bitmap expanded_pts = NULL; |
2827 | FOR_EACH_VEC_ELT (complex, j, c) |
2828 | { |
2829 | /* XXX: This is going to unsort the constraints in |
2830 | some cases, which will occasionally add duplicate |
2831 | constraints during unification. This does not |
2832 | affect correctness. */ |
2833 | c->lhs.var = find (node: c->lhs.var); |
2834 | c->rhs.var = find (node: c->rhs.var); |
2835 | |
2836 | /* The only complex constraint that can change our |
2837 | solution to non-empty, given an empty solution, |
2838 | is a constraint where the lhs side is receiving |
2839 | some set from elsewhere. */ |
2840 | if (!solution_empty || c->lhs.type != DEREF) |
2841 | do_complex_constraint (graph, c, delta: pts, expanded_delta: &expanded_pts); |
2842 | } |
2843 | BITMAP_FREE (expanded_pts); |
2844 | |
2845 | solution_empty = bitmap_empty_p (map: solution); |
2846 | |
2847 | if (!solution_empty) |
2848 | { |
2849 | bitmap_iterator bi; |
2850 | unsigned eff_escaped_id = find (node: escaped_id); |
2851 | |
2852 | /* Propagate solution to all successors. */ |
2853 | unsigned to_remove = ~0U; |
2854 | EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], |
2855 | 0, j, bi) |
2856 | { |
2857 | if (to_remove != ~0U) |
2858 | { |
2859 | bitmap_clear_bit (graph->succs[i], to_remove); |
2860 | to_remove = ~0U; |
2861 | } |
2862 | unsigned int to = find (node: j); |
2863 | if (to != j) |
2864 | { |
2865 | /* Update the succ graph, avoiding duplicate |
2866 | work. */ |
2867 | to_remove = j; |
2868 | if (! bitmap_set_bit (graph->succs[i], to)) |
2869 | continue; |
2870 | /* We eventually end up processing 'to' twice |
2871 | as it is undefined whether bitmap iteration |
2872 | iterates over bits set during iteration. |
2873 | Play safe instead of doing tricks. */ |
2874 | } |
2875 | /* Don't try to propagate to ourselves. */ |
2876 | if (to == i) |
2877 | { |
2878 | to_remove = j; |
2879 | continue; |
2880 | } |
2881 | /* Early node unification can lead to edges from |
2882 | escaped - remove them. */ |
2883 | if (i == eff_escaped_id) |
2884 | { |
2885 | to_remove = j; |
2886 | if (bitmap_set_bit (get_varinfo (n: to)->solution, |
2887 | escaped_id)) |
2888 | bitmap_set_bit (changed, to); |
2889 | continue; |
2890 | } |
2891 | |
2892 | if (bitmap_ior_into (get_varinfo (n: to)->solution, pts)) |
2893 | bitmap_set_bit (changed, to); |
2894 | } |
2895 | if (to_remove != ~0U) |
2896 | bitmap_clear_bit (graph->succs[i], to_remove); |
2897 | } |
2898 | } |
2899 | } |
2900 | bitmap_obstack_release (&iteration_obstack); |
2901 | } |
2902 | |
2903 | BITMAP_FREE (pts); |
2904 | BITMAP_FREE (changed); |
2905 | bitmap_obstack_release (&oldpta_obstack); |
2906 | } |
2907 | |
2908 | /* Map from trees to variable infos. */ |
2909 | static hash_map<tree, varinfo_t> *vi_for_tree; |
2910 | |
2911 | |
2912 | /* Insert ID as the variable id for tree T in the vi_for_tree map. */ |
2913 | |
2914 | static void |
2915 | insert_vi_for_tree (tree t, varinfo_t vi) |
2916 | { |
2917 | gcc_assert (vi); |
2918 | gcc_assert (!vi_for_tree->put (t, vi)); |
2919 | } |
2920 | |
2921 | /* Find the variable info for tree T in VI_FOR_TREE. If T does not |
2922 | exist in the map, return NULL, otherwise, return the varinfo we found. */ |
2923 | |
2924 | static varinfo_t |
2925 | lookup_vi_for_tree (tree t) |
2926 | { |
2927 | varinfo_t *slot = vi_for_tree->get (k: t); |
2928 | if (slot == NULL) |
2929 | return NULL; |
2930 | |
2931 | return *slot; |
2932 | } |
2933 | |
2934 | /* Return a printable name for DECL */ |
2935 | |
2936 | static const char * |
2937 | alias_get_name (tree decl) |
2938 | { |
2939 | const char *res = "NULL" ; |
2940 | if (dump_file) |
2941 | { |
2942 | char *temp = NULL; |
2943 | if (TREE_CODE (decl) == SSA_NAME) |
2944 | { |
2945 | res = get_name (decl); |
2946 | temp = xasprintf ("%s_%u" , res ? res : "" , SSA_NAME_VERSION (decl)); |
2947 | } |
2948 | else if (HAS_DECL_ASSEMBLER_NAME_P (decl) |
2949 | && DECL_ASSEMBLER_NAME_SET_P (decl)) |
2950 | res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl)); |
2951 | else if (DECL_P (decl)) |
2952 | { |
2953 | res = get_name (decl); |
2954 | if (!res) |
2955 | temp = xasprintf ("D.%u" , DECL_UID (decl)); |
2956 | } |
2957 | |
2958 | if (temp) |
2959 | { |
2960 | res = ggc_strdup (temp); |
2961 | free (ptr: temp); |
2962 | } |
2963 | } |
2964 | |
2965 | return res; |
2966 | } |
2967 | |
2968 | /* Find the variable id for tree T in the map. |
2969 | If T doesn't exist in the map, create an entry for it and return it. */ |
2970 | |
2971 | static varinfo_t |
2972 | get_vi_for_tree (tree t) |
2973 | { |
2974 | varinfo_t *slot = vi_for_tree->get (k: t); |
2975 | if (slot == NULL) |
2976 | { |
2977 | unsigned int id = create_variable_info_for (t, alias_get_name (decl: t), false); |
2978 | return get_varinfo (n: id); |
2979 | } |
2980 | |
2981 | return *slot; |
2982 | } |
2983 | |
2984 | /* Get a scalar constraint expression for a new temporary variable. */ |
2985 | |
2986 | static struct constraint_expr |
2987 | new_scalar_tmp_constraint_exp (const char *name, bool add_id) |
2988 | { |
2989 | struct constraint_expr tmp; |
2990 | varinfo_t vi; |
2991 | |
2992 | vi = new_var_info (NULL_TREE, name, add_id); |
2993 | vi->offset = 0; |
2994 | vi->size = -1; |
2995 | vi->fullsize = -1; |
2996 | vi->is_full_var = 1; |
2997 | vi->is_reg_var = 1; |
2998 | |
2999 | tmp.var = vi->id; |
3000 | tmp.type = SCALAR; |
3001 | tmp.offset = 0; |
3002 | |
3003 | return tmp; |
3004 | } |
3005 | |
3006 | /* Get a constraint expression vector from an SSA_VAR_P node. |
3007 | If address_p is true, the result will be taken its address of. */ |
3008 | |
3009 | static void |
3010 | get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p) |
3011 | { |
3012 | struct constraint_expr cexpr; |
3013 | varinfo_t vi; |
3014 | |
3015 | /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */ |
3016 | gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t)); |
3017 | |
3018 | if (TREE_CODE (t) == SSA_NAME |
3019 | && SSA_NAME_IS_DEFAULT_DEF (t)) |
3020 | { |
3021 | /* For parameters, get at the points-to set for the actual parm |
3022 | decl. */ |
3023 | if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL |
3024 | || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL) |
3025 | { |
3026 | get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p); |
3027 | return; |
3028 | } |
3029 | /* For undefined SSA names return nothing. */ |
3030 | else if (!ssa_defined_default_def_p (t)) |
3031 | { |
3032 | cexpr.var = nothing_id; |
3033 | cexpr.type = SCALAR; |
3034 | cexpr.offset = 0; |
3035 | results->safe_push (obj: cexpr); |
3036 | return; |
3037 | } |
3038 | } |
3039 | |
3040 | /* For global variables resort to the alias target. */ |
3041 | if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t))) |
3042 | { |
3043 | varpool_node *node = varpool_node::get (decl: t); |
3044 | if (node && node->alias && node->analyzed) |
3045 | { |
3046 | node = node->ultimate_alias_target (); |
3047 | /* Canonicalize the PT uid of all aliases to the ultimate target. |
3048 | ??? Hopefully the set of aliases can't change in a way that |
3049 | changes the ultimate alias target. */ |
3050 | gcc_assert ((! DECL_PT_UID_SET_P (node->decl) |
3051 | || DECL_PT_UID (node->decl) == DECL_UID (node->decl)) |
3052 | && (! DECL_PT_UID_SET_P (t) |
3053 | || DECL_PT_UID (t) == DECL_UID (node->decl))); |
3054 | DECL_PT_UID (t) = DECL_UID (node->decl); |
3055 | t = node->decl; |
3056 | } |
3057 | |
3058 | /* If this is decl may bind to NULL note that. */ |
3059 | if (address_p |
3060 | && (! node || ! node->nonzero_address ())) |
3061 | { |
3062 | cexpr.var = nothing_id; |
3063 | cexpr.type = SCALAR; |
3064 | cexpr.offset = 0; |
3065 | results->safe_push (obj: cexpr); |
3066 | } |
3067 | } |
3068 | |
3069 | vi = get_vi_for_tree (t); |
3070 | cexpr.var = vi->id; |
3071 | cexpr.type = SCALAR; |
3072 | cexpr.offset = 0; |
3073 | |
3074 | /* If we are not taking the address of the constraint expr, add all |
3075 | sub-fiels of the variable as well. */ |
3076 | if (!address_p |
3077 | && !vi->is_full_var) |
3078 | { |
3079 | for (; vi; vi = vi_next (vi)) |
3080 | { |
3081 | cexpr.var = vi->id; |
3082 | results->safe_push (obj: cexpr); |
3083 | } |
3084 | return; |
3085 | } |
3086 | |
3087 | results->safe_push (obj: cexpr); |
3088 | } |
3089 | |
3090 | /* Process constraint T, performing various simplifications and then |
3091 | adding it to our list of overall constraints. */ |
3092 | |
3093 | static void |
3094 | process_constraint (constraint_t t) |
3095 | { |
3096 | struct constraint_expr rhs = t->rhs; |
3097 | struct constraint_expr lhs = t->lhs; |
3098 | |
3099 | gcc_assert (rhs.var < varmap.length ()); |
3100 | gcc_assert (lhs.var < varmap.length ()); |
3101 | |
3102 | /* If we didn't get any useful constraint from the lhs we get |
3103 | &ANYTHING as fallback from get_constraint_for. Deal with |
3104 | it here by turning it into *ANYTHING. */ |
3105 | if (lhs.type == ADDRESSOF |
3106 | && lhs.var == anything_id) |
3107 | lhs.type = DEREF; |
3108 | |
3109 | /* ADDRESSOF on the lhs is invalid. */ |
3110 | gcc_assert (lhs.type != ADDRESSOF); |
3111 | |
3112 | /* We shouldn't add constraints from things that cannot have pointers. |
3113 | It's not completely trivial to avoid in the callers, so do it here. */ |
3114 | if (rhs.type != ADDRESSOF |
3115 | && !get_varinfo (n: rhs.var)->may_have_pointers) |
3116 | return; |
3117 | |
3118 | /* Likewise adding to the solution of a non-pointer var isn't useful. */ |
3119 | if (!get_varinfo (n: lhs.var)->may_have_pointers) |
3120 | return; |
3121 | |
3122 | /* This can happen in our IR with things like n->a = *p */ |
3123 | if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id) |
3124 | { |
3125 | /* Split into tmp = *rhs, *lhs = tmp */ |
3126 | struct constraint_expr tmplhs; |
3127 | tmplhs = new_scalar_tmp_constraint_exp (name: "doubledereftmp" , add_id: true); |
3128 | process_constraint (t: new_constraint (lhs: tmplhs, rhs)); |
3129 | process_constraint (t: new_constraint (lhs, rhs: tmplhs)); |
3130 | } |
3131 | else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF) |
3132 | { |
3133 | /* Split into tmp = &rhs, *lhs = tmp */ |
3134 | struct constraint_expr tmplhs; |
3135 | tmplhs = new_scalar_tmp_constraint_exp (name: "derefaddrtmp" , add_id: true); |
3136 | process_constraint (t: new_constraint (lhs: tmplhs, rhs)); |
3137 | process_constraint (t: new_constraint (lhs, rhs: tmplhs)); |
3138 | } |
3139 | else |
3140 | { |
3141 | gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0); |
3142 | if (rhs.type == ADDRESSOF) |
3143 | get_varinfo (n: get_varinfo (n: rhs.var)->head)->address_taken = true; |
3144 | constraints.safe_push (obj: t); |
3145 | } |
3146 | } |
3147 | |
3148 | |
3149 | /* Return the position, in bits, of FIELD_DECL from the beginning of its |
3150 | structure. */ |
3151 | |
3152 | static HOST_WIDE_INT |
3153 | bitpos_of_field (const tree fdecl) |
3154 | { |
3155 | if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl)) |
3156 | || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl))) |
3157 | return -1; |
3158 | |
3159 | return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT |
3160 | + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl))); |
3161 | } |
3162 | |
3163 | |
3164 | /* Get constraint expressions for offsetting PTR by OFFSET. Stores the |
3165 | resulting constraint expressions in *RESULTS. */ |
3166 | |
3167 | static void |
3168 | get_constraint_for_ptr_offset (tree ptr, tree offset, |
3169 | vec<ce_s> *results) |
3170 | { |
3171 | struct constraint_expr c; |
3172 | unsigned int j, n; |
3173 | HOST_WIDE_INT rhsoffset; |
3174 | |
3175 | /* If we do not do field-sensitive PTA adding offsets to pointers |
3176 | does not change the points-to solution. */ |
3177 | if (!use_field_sensitive) |
3178 | { |
3179 | get_constraint_for_rhs (ptr, results); |
3180 | return; |
3181 | } |
3182 | |
3183 | /* If the offset is not a non-negative integer constant that fits |
3184 | in a HOST_WIDE_INT, we have to fall back to a conservative |
3185 | solution which includes all sub-fields of all pointed-to |
3186 | variables of ptr. */ |
3187 | if (offset == NULL_TREE |
3188 | || TREE_CODE (offset) != INTEGER_CST) |
3189 | rhsoffset = UNKNOWN_OFFSET; |
3190 | else |
3191 | { |
3192 | /* Sign-extend the offset. */ |
3193 | offset_int soffset = offset_int::from (x: wi::to_wide (t: offset), sgn: SIGNED); |
3194 | if (!wi::fits_shwi_p (x: soffset)) |
3195 | rhsoffset = UNKNOWN_OFFSET; |
3196 | else |
3197 | { |
3198 | /* Make sure the bit-offset also fits. */ |
3199 | HOST_WIDE_INT rhsunitoffset = soffset.to_shwi (); |
3200 | rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT; |
3201 | if (rhsunitoffset != rhsoffset / BITS_PER_UNIT) |
3202 | rhsoffset = UNKNOWN_OFFSET; |
3203 | } |
3204 | } |
3205 | |
3206 | get_constraint_for_rhs (ptr, results); |
3207 | if (rhsoffset == 0) |
3208 | return; |
3209 | |
3210 | /* As we are eventually appending to the solution do not use |
3211 | vec::iterate here. */ |
3212 | n = results->length (); |
3213 | for (j = 0; j < n; j++) |
3214 | { |
3215 | varinfo_t curr; |
3216 | c = (*results)[j]; |
3217 | curr = get_varinfo (n: c.var); |
3218 | |
3219 | if (c.type == ADDRESSOF |
3220 | /* If this varinfo represents a full variable just use it. */ |
3221 | && curr->is_full_var) |
3222 | ; |
3223 | else if (c.type == ADDRESSOF |
3224 | /* If we do not know the offset add all subfields. */ |
3225 | && rhsoffset == UNKNOWN_OFFSET) |
3226 | { |
3227 | varinfo_t temp = get_varinfo (n: curr->head); |
3228 | do |
3229 | { |
3230 | struct constraint_expr c2; |
3231 | c2.var = temp->id; |
3232 | c2.type = ADDRESSOF; |
3233 | c2.offset = 0; |
3234 | if (c2.var != c.var) |
3235 | results->safe_push (obj: c2); |
3236 | temp = vi_next (vi: temp); |
3237 | } |
3238 | while (temp); |
3239 | } |
3240 | else if (c.type == ADDRESSOF) |
3241 | { |
3242 | varinfo_t temp; |
3243 | unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset; |
3244 | |
3245 | /* If curr->offset + rhsoffset is less than zero adjust it. */ |
3246 | if (rhsoffset < 0 |
3247 | && curr->offset < offset) |
3248 | offset = 0; |
3249 | |
3250 | /* We have to include all fields that overlap the current |
3251 | field shifted by rhsoffset. And we include at least |
3252 | the last or the first field of the variable to represent |
3253 | reachability of off-bound addresses, in particular &object + 1, |
3254 | conservatively correct. */ |
3255 | temp = first_or_preceding_vi_for_offset (curr, offset); |
3256 | c.var = temp->id; |
3257 | c.offset = 0; |
3258 | temp = vi_next (vi: temp); |
3259 | while (temp |
3260 | && temp->offset < offset + curr->size) |
3261 | { |
3262 | struct constraint_expr c2; |
3263 | c2.var = temp->id; |
3264 | c2.type = ADDRESSOF; |
3265 | c2.offset = 0; |
3266 | results->safe_push (obj: c2); |
3267 | temp = vi_next (vi: temp); |
3268 | } |
3269 | } |
3270 | else if (c.type == SCALAR) |
3271 | { |
3272 | gcc_assert (c.offset == 0); |
3273 | c.offset = rhsoffset; |
3274 | } |
3275 | else |
3276 | /* We shouldn't get any DEREFs here. */ |
3277 | gcc_unreachable (); |
3278 | |
3279 | (*results)[j] = c; |
3280 | } |
3281 | } |
3282 | |
3283 | |
3284 | /* Given a COMPONENT_REF T, return the constraint_expr vector for it. |
3285 | If address_p is true the result will be taken its address of. |
3286 | If lhs_p is true then the constraint expression is assumed to be used |
3287 | as the lhs. */ |
3288 | |
3289 | static void |
3290 | get_constraint_for_component_ref (tree t, vec<ce_s> *results, |
3291 | bool address_p, bool lhs_p) |
3292 | { |
3293 | tree orig_t = t; |
3294 | poly_int64 bitsize = -1; |
3295 | poly_int64 bitmaxsize = -1; |
3296 | poly_int64 bitpos; |
3297 | bool reverse; |
3298 | tree forzero; |
3299 | |
3300 | /* Some people like to do cute things like take the address of |
3301 | &0->a.b */ |
3302 | forzero = t; |
3303 | while (handled_component_p (t: forzero) |
3304 | || INDIRECT_REF_P (forzero) |
3305 | || TREE_CODE (forzero) == MEM_REF) |
3306 | forzero = TREE_OPERAND (forzero, 0); |
3307 | |
3308 | if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero)) |
3309 | { |
3310 | struct constraint_expr temp; |
3311 | |
3312 | temp.offset = 0; |
3313 | temp.var = integer_id; |
3314 | temp.type = SCALAR; |
3315 | results->safe_push (obj: temp); |
3316 | return; |
3317 | } |
3318 | |
3319 | t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse); |
3320 | |
3321 | /* We can end up here for component references on a |
3322 | VIEW_CONVERT_EXPR <>(&foobar) or things like a |
3323 | BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for |
3324 | symbolic constants simply give up. */ |
3325 | if (TREE_CODE (t) == ADDR_EXPR) |
3326 | { |
3327 | constraint_expr result; |
3328 | result.type = SCALAR; |
3329 | result.var = anything_id; |
3330 | result.offset = 0; |
3331 | results->safe_push (obj: result); |
3332 | return; |
3333 | } |
3334 | |
3335 | /* Avoid creating pointer-offset constraints, so handle MEM_REF |
3336 | offsets directly. Pretend to take the address of the base, |
3337 | we'll take care of adding the required subset of sub-fields below. */ |
3338 | if (TREE_CODE (t) == MEM_REF |
3339 | && !integer_zerop (TREE_OPERAND (t, 0))) |
3340 | { |
3341 | poly_offset_int off = mem_ref_offset (t); |
3342 | off <<= LOG2_BITS_PER_UNIT; |
3343 | off += bitpos; |
3344 | poly_int64 off_hwi; |
3345 | if (off.to_shwi (r: &off_hwi)) |
3346 | bitpos = off_hwi; |
3347 | else |
3348 | { |
3349 | bitpos = 0; |
3350 | bitmaxsize = -1; |
3351 | } |
3352 | get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p); |
3353 | do_deref (results); |
3354 | } |
3355 | else |
3356 | get_constraint_for_1 (t, results, true, lhs_p); |
3357 | |
3358 | /* Strip off nothing_id. */ |
3359 | if (results->length () == 2) |
3360 | { |
3361 | gcc_assert ((*results)[0].var == nothing_id); |
3362 | results->unordered_remove (ix: 0); |
3363 | } |
3364 | gcc_assert (results->length () == 1); |
3365 | struct constraint_expr &result = results->last (); |
3366 | |
3367 | if (result.type == SCALAR |
3368 | && get_varinfo (n: result.var)->is_full_var) |
3369 | /* For single-field vars do not bother about the offset. */ |
3370 | result.offset = 0; |
3371 | else if (result.type == SCALAR) |
3372 | { |
3373 | /* In languages like C, you can access one past the end of an |
3374 | array. You aren't allowed to dereference it, so we can |
3375 | ignore this constraint. When we handle pointer subtraction, |
3376 | we may have to do something cute here. */ |
3377 | |
3378 | if (maybe_lt (a: poly_uint64 (bitpos), b: get_varinfo (n: result.var)->fullsize) |
3379 | && maybe_ne (a: bitmaxsize, b: 0)) |
3380 | { |
3381 | /* It's also not true that the constraint will actually start at the |
3382 | right offset, it may start in some padding. We only care about |
3383 | setting the constraint to the first actual field it touches, so |
3384 | walk to find it. */ |
3385 | struct constraint_expr cexpr = result; |
3386 | varinfo_t curr; |
3387 | results->pop (); |
3388 | cexpr.offset = 0; |
3389 | for (curr = get_varinfo (n: cexpr.var); curr; curr = vi_next (vi: curr)) |
3390 | { |
3391 | if (ranges_maybe_overlap_p (pos1: poly_int64 (curr->offset), |
3392 | size1: curr->size, pos2: bitpos, size2: bitmaxsize)) |
3393 | { |
3394 | cexpr.var = curr->id; |
3395 | results->safe_push (obj: cexpr); |
3396 | if (address_p) |
3397 | break; |
3398 | } |
3399 | } |
3400 | /* If we are going to take the address of this field then |
3401 | to be able to compute reachability correctly add at least |
3402 | the last field of the variable. */ |
3403 | if (address_p && results->length () == 0) |
3404 | { |
3405 | curr = get_varinfo (n: cexpr.var); |
3406 | while (curr->next != 0) |
3407 | curr = vi_next (vi: curr); |
3408 | cexpr.var = curr->id; |
3409 | results->safe_push (obj: cexpr); |
3410 | } |
3411 | else if (results->length () == 0) |
3412 | /* Assert that we found *some* field there. The user couldn't be |
3413 | accessing *only* padding. */ |
3414 | /* Still the user could access one past the end of an array |
3415 | embedded in a struct resulting in accessing *only* padding. */ |
3416 | /* Or accessing only padding via type-punning to a type |
3417 | that has a filed just in padding space. */ |
3418 | { |
3419 | cexpr.type = SCALAR; |
3420 | cexpr.var = anything_id; |
3421 | cexpr.offset = 0; |
3422 | results->safe_push (obj: cexpr); |
3423 | } |
3424 | } |
3425 | else if (known_eq (bitmaxsize, 0)) |
3426 | { |
3427 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3428 | fprintf (stream: dump_file, format: "Access to zero-sized part of variable, " |
3429 | "ignoring\n" ); |
3430 | } |
3431 | else |
3432 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3433 | fprintf (stream: dump_file, format: "Access to past the end of variable, ignoring\n" ); |
3434 | } |
3435 | else if (result.type == DEREF) |
3436 | { |
3437 | /* If we do not know exactly where the access goes say so. Note |
3438 | that only for non-structure accesses we know that we access |
3439 | at most one subfiled of any variable. */ |
3440 | HOST_WIDE_INT const_bitpos; |
3441 | if (!bitpos.is_constant (const_value: &const_bitpos) |
3442 | || const_bitpos == -1 |
3443 | || maybe_ne (a: bitsize, b: bitmaxsize) |
3444 | || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)) |
3445 | || result.offset == UNKNOWN_OFFSET) |
3446 | result.offset = UNKNOWN_OFFSET; |
3447 | else |
3448 | result.offset += const_bitpos; |
3449 | } |
3450 | else if (result.type == ADDRESSOF) |
3451 | { |
3452 | /* We can end up here for component references on constants like |
3453 | VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */ |
3454 | result.type = SCALAR; |
3455 | result.var = anything_id; |
3456 | result.offset = 0; |
3457 | } |
3458 | else |
3459 | gcc_unreachable (); |
3460 | } |
3461 | |
3462 | |
3463 | /* Dereference the constraint expression CONS, and return the result. |
3464 | DEREF (ADDRESSOF) = SCALAR |
3465 | DEREF (SCALAR) = DEREF |
3466 | DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp)) |
3467 | This is needed so that we can handle dereferencing DEREF constraints. */ |
3468 | |
3469 | static void |
3470 | do_deref (vec<ce_s> *constraints) |
3471 | { |
3472 | struct constraint_expr *c; |
3473 | unsigned int i = 0; |
3474 | |
3475 | FOR_EACH_VEC_ELT (*constraints, i, c) |
3476 | { |
3477 | if (c->type == SCALAR) |
3478 | c->type = DEREF; |
3479 | else if (c->type == ADDRESSOF) |
3480 | c->type = SCALAR; |
3481 | else if (c->type == DEREF) |
3482 | { |
3483 | struct constraint_expr tmplhs; |
3484 | tmplhs = new_scalar_tmp_constraint_exp (name: "dereftmp" , add_id: true); |
3485 | process_constraint (t: new_constraint (lhs: tmplhs, rhs: *c)); |
3486 | c->var = tmplhs.var; |
3487 | } |
3488 | else |
3489 | gcc_unreachable (); |
3490 | } |
3491 | } |
3492 | |
3493 | /* Given a tree T, return the constraint expression for taking the |
3494 | address of it. */ |
3495 | |
3496 | static void |
3497 | get_constraint_for_address_of (tree t, vec<ce_s> *results) |
3498 | { |
3499 | struct constraint_expr *c; |
3500 | unsigned int i; |
3501 | |
3502 | get_constraint_for_1 (t, results, true, true); |
3503 | |
3504 | FOR_EACH_VEC_ELT (*results, i, c) |
3505 | { |
3506 | if (c->type == DEREF) |
3507 | c->type = SCALAR; |
3508 | else |
3509 | c->type = ADDRESSOF; |
3510 | } |
3511 | } |
3512 | |
3513 | /* Given a tree T, return the constraint expression for it. */ |
3514 | |
3515 | static void |
3516 | get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p, |
3517 | bool lhs_p) |
3518 | { |
3519 | struct constraint_expr temp; |
3520 | |
3521 | /* x = integer is all glommed to a single variable, which doesn't |
3522 | point to anything by itself. That is, of course, unless it is an |
3523 | integer constant being treated as a pointer, in which case, we |
3524 | will return that this is really the addressof anything. This |
3525 | happens below, since it will fall into the default case. The only |
3526 | case we know something about an integer treated like a pointer is |
3527 | when it is the NULL pointer, and then we just say it points to |
3528 | NULL. |
3529 | |
3530 | Do not do that if -fno-delete-null-pointer-checks though, because |
3531 | in that case *NULL does not fail, so it _should_ alias *anything. |
3532 | It is not worth adding a new option or renaming the existing one, |
3533 | since this case is relatively obscure. */ |
3534 | if ((TREE_CODE (t) == INTEGER_CST |
3535 | && integer_zerop (t)) |
3536 | /* The only valid CONSTRUCTORs in gimple with pointer typed |
3537 | elements are zero-initializer. But in IPA mode we also |
3538 | process global initializers, so verify at least. */ |
3539 | || (TREE_CODE (t) == CONSTRUCTOR |
3540 | && CONSTRUCTOR_NELTS (t) == 0)) |
3541 | { |
3542 | if (flag_delete_null_pointer_checks) |
3543 | temp.var = nothing_id; |
3544 | else |
3545 | temp.var = nonlocal_id; |
3546 | temp.type = ADDRESSOF; |
3547 | temp.offset = 0; |
3548 | results->safe_push (obj: temp); |
3549 | return; |
3550 | } |
3551 | |
3552 | /* String constants are read-only, ideally we'd have a CONST_DECL |
3553 | for those. */ |
3554 | if (TREE_CODE (t) == STRING_CST) |
3555 | { |
3556 | temp.var = string_id; |
3557 | temp.type = SCALAR; |
3558 | temp.offset = 0; |
3559 | results->safe_push (obj: temp); |
3560 | return; |
3561 | } |
3562 | |
3563 | switch (TREE_CODE_CLASS (TREE_CODE (t))) |
3564 | { |
3565 | case tcc_expression: |
3566 | { |
3567 | switch (TREE_CODE (t)) |
3568 | { |
3569 | case ADDR_EXPR: |
3570 | get_constraint_for_address_of (TREE_OPERAND (t, 0), results); |
3571 | return; |
3572 | default:; |
3573 | } |
3574 | break; |
3575 | } |
3576 | case tcc_reference: |
3577 | { |
3578 | switch (TREE_CODE (t)) |
3579 | { |
3580 | case MEM_REF: |
3581 | { |
3582 | struct constraint_expr cs; |
3583 | varinfo_t vi, curr; |
3584 | get_constraint_for_ptr_offset (TREE_OPERAND (t, 0), |
3585 | TREE_OPERAND (t, 1), results); |
3586 | do_deref (constraints: results); |
3587 | |
3588 | /* If we are not taking the address then make sure to process |
3589 | all subvariables we might access. */ |
3590 | if (address_p) |
3591 | return; |
3592 | |
3593 | cs = results->last (); |
3594 | if (cs.type == DEREF |
3595 | && type_can_have_subvars (TREE_TYPE (t))) |
3596 | { |
3597 | /* For dereferences this means we have to defer it |
3598 | to solving time. */ |
3599 | results->last ().offset = UNKNOWN_OFFSET; |
3600 | return; |
3601 | } |
3602 | if (cs.type != SCALAR) |
3603 | return; |
3604 | |
3605 | vi = get_varinfo (n: cs.var); |
3606 | curr = vi_next (vi); |
3607 | if (!vi->is_full_var |
3608 | && curr) |
3609 | { |
3610 | unsigned HOST_WIDE_INT size; |
3611 | if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t)))) |
3612 | size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t))); |
3613 | else |
3614 | size = -1; |
3615 | for (; curr; curr = vi_next (vi: curr)) |
3616 | { |
3617 | if (curr->offset - vi->offset < size) |
3618 | { |
3619 | cs.var = curr->id; |
3620 | results->safe_push (obj: cs); |
3621 | } |
3622 | else |
3623 | break; |
3624 | } |
3625 | } |
3626 | return; |
3627 | } |
3628 | case ARRAY_REF: |
3629 | case ARRAY_RANGE_REF: |
3630 | case COMPONENT_REF: |
3631 | case IMAGPART_EXPR: |
3632 | case REALPART_EXPR: |
3633 | case BIT_FIELD_REF: |
3634 | get_constraint_for_component_ref (t, results, address_p, lhs_p); |
3635 | return; |
3636 | case VIEW_CONVERT_EXPR: |
3637 | get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p, |
3638 | lhs_p); |
3639 | return; |
3640 | /* We are missing handling for TARGET_MEM_REF here. */ |
3641 | default:; |
3642 | } |
3643 | break; |
3644 | } |
3645 | case tcc_exceptional: |
3646 | { |
3647 | switch (TREE_CODE (t)) |
3648 | { |
3649 | case SSA_NAME: |
3650 | { |
3651 | get_constraint_for_ssa_var (t, results, address_p); |
3652 | return; |
3653 | } |
3654 | case CONSTRUCTOR: |
3655 | { |
3656 | unsigned int i; |
3657 | tree val; |
3658 | auto_vec<ce_s> tmp; |
3659 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val) |
3660 | { |
3661 | struct constraint_expr *rhsp; |
3662 | unsigned j; |
3663 | get_constraint_for_1 (t: val, results: &tmp, address_p, lhs_p); |
3664 | FOR_EACH_VEC_ELT (tmp, j, rhsp) |
3665 | results->safe_push (obj: *rhsp); |
3666 | tmp.truncate (size: 0); |
3667 | } |
3668 | /* We do not know whether the constructor was complete, |
3669 | so technically we have to add &NOTHING or &ANYTHING |
3670 | like we do for an empty constructor as well. */ |
3671 | return; |
3672 | } |
3673 | default:; |
3674 | } |
3675 | break; |
3676 | } |
3677 | case tcc_declaration: |
3678 | { |
3679 | get_constraint_for_ssa_var (t, results, address_p); |
3680 | return; |
3681 | } |
3682 | case tcc_constant: |
3683 | { |
3684 | /* We cannot refer to automatic variables through constants. */ |
3685 | temp.type = ADDRESSOF; |
3686 | temp.var = nonlocal_id; |
3687 | temp.offset = 0; |
3688 | results->safe_push (obj: temp); |
3689 | return; |
3690 | } |
3691 | default:; |
3692 | } |
3693 | |
3694 | /* The default fallback is a constraint from anything. */ |
3695 | temp.type = ADDRESSOF; |
3696 | temp.var = anything_id; |
3697 | temp.offset = 0; |
3698 | results->safe_push (obj: temp); |
3699 | } |
3700 | |
3701 | /* Given a gimple tree T, return the constraint expression vector for it. */ |
3702 | |
3703 | static void |
3704 | get_constraint_for (tree t, vec<ce_s> *results) |
3705 | { |
3706 | gcc_assert (results->length () == 0); |
3707 | |
3708 | get_constraint_for_1 (t, results, address_p: false, lhs_p: true); |
3709 | } |
3710 | |
3711 | /* Given a gimple tree T, return the constraint expression vector for it |
3712 | to be used as the rhs of a constraint. */ |
3713 | |
3714 | static void |
3715 | get_constraint_for_rhs (tree t, vec<ce_s> *results) |
3716 | { |
3717 | gcc_assert (results->length () == 0); |
3718 | |
3719 | get_constraint_for_1 (t, results, address_p: false, lhs_p: false); |
3720 | } |
3721 | |
3722 | |
3723 | /* Efficiently generates constraints from all entries in *RHSC to all |
3724 | entries in *LHSC. */ |
3725 | |
3726 | static void |
3727 | process_all_all_constraints (const vec<ce_s> &lhsc, |
3728 | const vec<ce_s> &rhsc) |
3729 | { |
3730 | struct constraint_expr *lhsp, *rhsp; |
3731 | unsigned i, j; |
3732 | |
3733 | if (lhsc.length () <= 1 || rhsc.length () <= 1) |
3734 | { |
3735 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
3736 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
3737 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: *rhsp)); |
3738 | } |
3739 | else |
3740 | { |
3741 | struct constraint_expr tmp; |
3742 | tmp = new_scalar_tmp_constraint_exp (name: "allalltmp" , add_id: true); |
3743 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
3744 | process_constraint (t: new_constraint (lhs: tmp, rhs: *rhsp)); |
3745 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
3746 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: tmp)); |
3747 | } |
3748 | } |
3749 | |
3750 | /* Handle aggregate copies by expanding into copies of the respective |
3751 | fields of the structures. */ |
3752 | |
3753 | static void |
3754 | do_structure_copy (tree lhsop, tree rhsop) |
3755 | { |
3756 | struct constraint_expr *lhsp, *rhsp; |
3757 | auto_vec<ce_s> lhsc; |
3758 | auto_vec<ce_s> rhsc; |
3759 | unsigned j; |
3760 | |
3761 | get_constraint_for (t: lhsop, results: &lhsc); |
3762 | get_constraint_for_rhs (t: rhsop, results: &rhsc); |
3763 | lhsp = &lhsc[0]; |
3764 | rhsp = &rhsc[0]; |
3765 | if (lhsp->type == DEREF |
3766 | || (lhsp->type == ADDRESSOF && lhsp->var == anything_id) |
3767 | || rhsp->type == DEREF) |
3768 | { |
3769 | if (lhsp->type == DEREF) |
3770 | { |
3771 | gcc_assert (lhsc.length () == 1); |
3772 | lhsp->offset = UNKNOWN_OFFSET; |
3773 | } |
3774 | if (rhsp->type == DEREF) |
3775 | { |
3776 | gcc_assert (rhsc.length () == 1); |
3777 | rhsp->offset = UNKNOWN_OFFSET; |
3778 | } |
3779 | process_all_all_constraints (lhsc, rhsc); |
3780 | } |
3781 | else if (lhsp->type == SCALAR |
3782 | && (rhsp->type == SCALAR |
3783 | || rhsp->type == ADDRESSOF)) |
3784 | { |
3785 | HOST_WIDE_INT lhssize, lhsoffset; |
3786 | HOST_WIDE_INT rhssize, rhsoffset; |
3787 | bool reverse; |
3788 | unsigned k = 0; |
3789 | if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse) |
3790 | || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize, |
3791 | &reverse)) |
3792 | { |
3793 | process_all_all_constraints (lhsc, rhsc); |
3794 | return; |
3795 | } |
3796 | for (j = 0; lhsc.iterate (ix: j, ptr: &lhsp);) |
3797 | { |
3798 | varinfo_t lhsv, rhsv; |
3799 | rhsp = &rhsc[k]; |
3800 | lhsv = get_varinfo (n: lhsp->var); |
3801 | rhsv = get_varinfo (n: rhsp->var); |
3802 | if (lhsv->may_have_pointers |
3803 | && (lhsv->is_full_var |
3804 | || rhsv->is_full_var |
3805 | || ranges_overlap_p (pos1: lhsv->offset + rhsoffset, size1: lhsv->size, |
3806 | pos2: rhsv->offset + lhsoffset, size2: rhsv->size))) |
3807 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: *rhsp)); |
3808 | if (!rhsv->is_full_var |
3809 | && (lhsv->is_full_var |
3810 | || (lhsv->offset + rhsoffset + lhsv->size |
3811 | > rhsv->offset + lhsoffset + rhsv->size))) |
3812 | { |
3813 | ++k; |
3814 | if (k >= rhsc.length ()) |
3815 | break; |
3816 | } |
3817 | else |
3818 | ++j; |
3819 | } |
3820 | } |
3821 | else |
3822 | gcc_unreachable (); |
3823 | } |
3824 | |
3825 | /* Create constraints ID = { rhsc }. */ |
3826 | |
3827 | static void |
3828 | make_constraints_to (unsigned id, const vec<ce_s> &rhsc) |
3829 | { |
3830 | struct constraint_expr *c; |
3831 | struct constraint_expr includes; |
3832 | unsigned int j; |
3833 | |
3834 | includes.var = id; |
3835 | includes.offset = 0; |
3836 | includes.type = SCALAR; |
3837 | |
3838 | FOR_EACH_VEC_ELT (rhsc, j, c) |
3839 | process_constraint (t: new_constraint (lhs: includes, rhs: *c)); |
3840 | } |
3841 | |
3842 | /* Create a constraint ID = OP. */ |
3843 | |
3844 | static void |
3845 | make_constraint_to (unsigned id, tree op) |
3846 | { |
3847 | auto_vec<ce_s> rhsc; |
3848 | get_constraint_for_rhs (t: op, results: &rhsc); |
3849 | make_constraints_to (id, rhsc); |
3850 | } |
3851 | |
3852 | /* Create a constraint ID = &FROM. */ |
3853 | |
3854 | static void |
3855 | make_constraint_from (varinfo_t vi, int from) |
3856 | { |
3857 | struct constraint_expr lhs, rhs; |
3858 | |
3859 | lhs.var = vi->id; |
3860 | lhs.offset = 0; |
3861 | lhs.type = SCALAR; |
3862 | |
3863 | rhs.var = from; |
3864 | rhs.offset = 0; |
3865 | rhs.type = ADDRESSOF; |
3866 | process_constraint (t: new_constraint (lhs, rhs)); |
3867 | } |
3868 | |
3869 | /* Create a constraint ID = FROM. */ |
3870 | |
3871 | static void |
3872 | make_copy_constraint (varinfo_t vi, int from) |
3873 | { |
3874 | struct constraint_expr lhs, rhs; |
3875 | |
3876 | lhs.var = vi->id; |
3877 | lhs.offset = 0; |
3878 | lhs.type = SCALAR; |
3879 | |
3880 | rhs.var = from; |
3881 | rhs.offset = 0; |
3882 | rhs.type = SCALAR; |
3883 | process_constraint (t: new_constraint (lhs, rhs)); |
3884 | } |
3885 | |
3886 | /* Make constraints necessary to make OP escape. */ |
3887 | |
3888 | static void |
3889 | make_escape_constraint (tree op) |
3890 | { |
3891 | make_constraint_to (id: escaped_id, op); |
3892 | } |
3893 | |
3894 | /* Make constraint necessary to make all indirect references |
3895 | from VI escape. */ |
3896 | |
3897 | static void |
3898 | make_indirect_escape_constraint (varinfo_t vi) |
3899 | { |
3900 | struct constraint_expr lhs, rhs; |
3901 | /* escaped = *(VAR + UNKNOWN); */ |
3902 | lhs.type = SCALAR; |
3903 | lhs.var = escaped_id; |
3904 | lhs.offset = 0; |
3905 | rhs.type = DEREF; |
3906 | rhs.var = vi->id; |
3907 | rhs.offset = UNKNOWN_OFFSET; |
3908 | process_constraint (t: new_constraint (lhs, rhs)); |
3909 | } |
3910 | |
3911 | /* Add constraints to that the solution of VI is transitively closed. */ |
3912 | |
3913 | static void |
3914 | make_transitive_closure_constraints (varinfo_t vi) |
3915 | { |
3916 | struct constraint_expr lhs, rhs; |
3917 | |
3918 | /* VAR = *(VAR + UNKNOWN); */ |
3919 | lhs.type = SCALAR; |
3920 | lhs.var = vi->id; |
3921 | lhs.offset = 0; |
3922 | rhs.type = DEREF; |
3923 | rhs.var = vi->id; |
3924 | rhs.offset = UNKNOWN_OFFSET; |
3925 | process_constraint (t: new_constraint (lhs, rhs)); |
3926 | } |
3927 | |
3928 | /* Add constraints to that the solution of VI has all subvariables added. */ |
3929 | |
3930 | static void |
3931 | make_any_offset_constraints (varinfo_t vi) |
3932 | { |
3933 | struct constraint_expr lhs, rhs; |
3934 | |
3935 | /* VAR = VAR + UNKNOWN; */ |
3936 | lhs.type = SCALAR; |
3937 | lhs.var = vi->id; |
3938 | lhs.offset = 0; |
3939 | rhs.type = SCALAR; |
3940 | rhs.var = vi->id; |
3941 | rhs.offset = UNKNOWN_OFFSET; |
3942 | process_constraint (t: new_constraint (lhs, rhs)); |
3943 | } |
3944 | |
3945 | /* Temporary storage for fake var decls. */ |
3946 | struct obstack fake_var_decl_obstack; |
3947 | |
3948 | /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */ |
3949 | |
3950 | static tree |
3951 | build_fake_var_decl (tree type) |
3952 | { |
3953 | tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl); |
3954 | memset (s: decl, c: 0, n: sizeof (struct tree_var_decl)); |
3955 | TREE_SET_CODE (decl, VAR_DECL); |
3956 | TREE_TYPE (decl) = type; |
3957 | DECL_UID (decl) = allocate_decl_uid (); |
3958 | SET_DECL_PT_UID (decl, -1); |
3959 | layout_decl (decl, 0); |
3960 | return decl; |
3961 | } |
3962 | |
3963 | /* Create a new artificial heap variable with NAME. |
3964 | Return the created variable. */ |
3965 | |
3966 | static varinfo_t |
3967 | make_heapvar (const char *name, bool add_id) |
3968 | { |
3969 | varinfo_t vi; |
3970 | tree heapvar; |
3971 | |
3972 | heapvar = build_fake_var_decl (ptr_type_node); |
3973 | DECL_EXTERNAL (heapvar) = 1; |
3974 | |
3975 | vi = new_var_info (t: heapvar, name, add_id); |
3976 | vi->is_heap_var = true; |
3977 | vi->is_unknown_size_var = true; |
3978 | vi->offset = 0; |
3979 | vi->fullsize = ~0; |
3980 | vi->size = ~0; |
3981 | vi->is_full_var = true; |
3982 | insert_vi_for_tree (t: heapvar, vi); |
3983 | |
3984 | return vi; |
3985 | } |
3986 | |
3987 | /* Create a new artificial heap variable with NAME and make a |
3988 | constraint from it to LHS. Set flags according to a tag used |
3989 | for tracking restrict pointers. */ |
3990 | |
3991 | static varinfo_t |
3992 | make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id) |
3993 | { |
3994 | varinfo_t vi = make_heapvar (name, add_id); |
3995 | vi->is_restrict_var = 1; |
3996 | vi->is_global_var = 1; |
3997 | vi->may_have_pointers = 1; |
3998 | make_constraint_from (vi: lhs, from: vi->id); |
3999 | return vi; |
4000 | } |
4001 | |
4002 | /* Create a new artificial heap variable with NAME and make a |
4003 | constraint from it to LHS. Set flags according to a tag used |
4004 | for tracking restrict pointers and make the artificial heap |
4005 | point to global memory. */ |
4006 | |
4007 | static varinfo_t |
4008 | make_constraint_from_global_restrict (varinfo_t lhs, const char *name, |
4009 | bool add_id) |
4010 | { |
4011 | varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id); |
4012 | make_copy_constraint (vi, from: nonlocal_id); |
4013 | return vi; |
4014 | } |
4015 | |
4016 | /* In IPA mode there are varinfos for different aspects of reach |
4017 | function designator. One for the points-to set of the return |
4018 | value, one for the variables that are clobbered by the function, |
4019 | one for its uses and one for each parameter (including a single |
4020 | glob for remaining variadic arguments). */ |
4021 | |
4022 | enum { fi_clobbers = 1, fi_uses = 2, |
4023 | fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 }; |
4024 | |
4025 | /* Get a constraint for the requested part of a function designator FI |
4026 | when operating in IPA mode. */ |
4027 | |
4028 | static struct constraint_expr |
4029 | get_function_part_constraint (varinfo_t fi, unsigned part) |
4030 | { |
4031 | struct constraint_expr c; |
4032 | |
4033 | gcc_assert (in_ipa_mode); |
4034 | |
4035 | if (fi->id == anything_id) |
4036 | { |
4037 | /* ??? We probably should have a ANYFN special variable. */ |
4038 | c.var = anything_id; |
4039 | c.offset = 0; |
4040 | c.type = SCALAR; |
4041 | } |
4042 | else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL) |
4043 | { |
4044 | varinfo_t ai = first_vi_for_offset (fi, part); |
4045 | if (ai) |
4046 | c.var = ai->id; |
4047 | else |
4048 | c.var = anything_id; |
4049 | c.offset = 0; |
4050 | c.type = SCALAR; |
4051 | } |
4052 | else |
4053 | { |
4054 | c.var = fi->id; |
4055 | c.offset = part; |
4056 | c.type = DEREF; |
4057 | } |
4058 | |
4059 | return c; |
4060 | } |
4061 | |
4062 | /* Produce constraints for argument ARG of call STMT with eaf flags |
4063 | FLAGS. RESULTS is array holding constraints for return value. |
4064 | CALLESCAPE_ID is variable where call loocal escapes are added. |
4065 | WRITES_GLOVEL_MEMORY is true if callee may write global memory. */ |
4066 | |
4067 | static void |
4068 | handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags, |
4069 | int callescape_id, bool writes_global_memory) |
4070 | { |
4071 | int relevant_indirect_flags = EAF_NO_INDIRECT_CLOBBER | EAF_NO_INDIRECT_READ |
4072 | | EAF_NO_INDIRECT_ESCAPE; |
4073 | int relevant_flags = relevant_indirect_flags |
4074 | | EAF_NO_DIRECT_CLOBBER |
4075 | | EAF_NO_DIRECT_READ |
4076 | | EAF_NO_DIRECT_ESCAPE; |
4077 | if (gimple_call_lhs (gs: stmt)) |
4078 | { |
4079 | relevant_flags |= EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY; |
4080 | relevant_indirect_flags |= EAF_NOT_RETURNED_INDIRECTLY; |
4081 | |
4082 | /* If value is never read from it can not be returned indirectly |
4083 | (except through the escape solution). |
4084 | For all flags we get these implications right except for |
4085 | not_returned because we miss return functions in ipa-prop. */ |
4086 | |
4087 | if (flags & EAF_NO_DIRECT_READ) |
4088 | flags |= EAF_NOT_RETURNED_INDIRECTLY; |
4089 | } |
4090 | |
4091 | /* If the argument is not used we can ignore it. |
4092 | Similarly argument is invisile for us if it not clobbered, does not |
4093 | escape, is not read and can not be returned. */ |
4094 | if ((flags & EAF_UNUSED) || ((flags & relevant_flags) == relevant_flags)) |
4095 | return; |
4096 | |
4097 | /* Produce varinfo for direct accesses to ARG. */ |
4098 | varinfo_t tem = new_var_info (NULL_TREE, name: "callarg" , add_id: true); |
4099 | tem->is_reg_var = true; |
4100 | make_constraint_to (id: tem->id, op: arg); |
4101 | make_any_offset_constraints (vi: tem); |
4102 | |
4103 | bool callarg_transitive = false; |
4104 | |
4105 | /* As an compile time optimization if we make no difference between |
4106 | direct and indirect accesses make arg transitively closed. |
4107 | This avoids the need to build indir arg and do everything twice. */ |
4108 | if (((flags & EAF_NO_INDIRECT_CLOBBER) != 0) |
4109 | == ((flags & EAF_NO_DIRECT_CLOBBER) != 0) |
4110 | && (((flags & EAF_NO_INDIRECT_READ) != 0) |
4111 | == ((flags & EAF_NO_DIRECT_READ) != 0)) |
4112 | && (((flags & EAF_NO_INDIRECT_ESCAPE) != 0) |
4113 | == ((flags & EAF_NO_DIRECT_ESCAPE) != 0)) |
4114 | && (((flags & EAF_NOT_RETURNED_INDIRECTLY) != 0) |
4115 | == ((flags & EAF_NOT_RETURNED_DIRECTLY) != 0))) |
4116 | { |
4117 | make_transitive_closure_constraints (vi: tem); |
4118 | callarg_transitive = true; |
4119 | gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ)); |
4120 | } |
4121 | |
4122 | /* If necessary, produce varinfo for indirect accesses to ARG. */ |
4123 | varinfo_t indir_tem = NULL; |
4124 | if (!callarg_transitive |
4125 | && (flags & relevant_indirect_flags) != relevant_indirect_flags) |
4126 | { |
4127 | struct constraint_expr lhs, rhs; |
4128 | indir_tem = new_var_info (NULL_TREE, name: "indircallarg" , add_id: true); |
4129 | indir_tem->is_reg_var = true; |
4130 | |
4131 | /* indir_term = *tem. */ |
4132 | lhs.type = SCALAR; |
4133 | lhs.var = indir_tem->id; |
4134 | lhs.offset = 0; |
4135 | |
4136 | rhs.type = DEREF; |
4137 | rhs.var = tem->id; |
4138 | rhs.offset = UNKNOWN_OFFSET; |
4139 | process_constraint (t: new_constraint (lhs, rhs)); |
4140 | |
4141 | make_any_offset_constraints (vi: indir_tem); |
4142 | |
4143 | /* If we do not read indirectly there is no need for transitive closure. |
4144 | We know there is only one level of indirection. */ |
4145 | if (!(flags & EAF_NO_INDIRECT_READ)) |
4146 | make_transitive_closure_constraints (vi: indir_tem); |
4147 | gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ)); |
4148 | } |
4149 | |
4150 | if (gimple_call_lhs (gs: stmt)) |
4151 | { |
4152 | if (!(flags & EAF_NOT_RETURNED_DIRECTLY)) |
4153 | { |
4154 | struct constraint_expr cexpr; |
4155 | cexpr.var = tem->id; |
4156 | cexpr.type = SCALAR; |
4157 | cexpr.offset = 0; |
4158 | results->safe_push (obj: cexpr); |
4159 | } |
4160 | if (!callarg_transitive & !(flags & EAF_NOT_RETURNED_INDIRECTLY)) |
4161 | { |
4162 | struct constraint_expr cexpr; |
4163 | cexpr.var = indir_tem->id; |
4164 | cexpr.type = SCALAR; |
4165 | cexpr.offset = 0; |
4166 | results->safe_push (obj: cexpr); |
4167 | } |
4168 | } |
4169 | |
4170 | if (!(flags & EAF_NO_DIRECT_READ)) |
4171 | { |
4172 | varinfo_t uses = get_call_use_vi (call: stmt); |
4173 | make_copy_constraint (vi: uses, from: tem->id); |
4174 | if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_READ)) |
4175 | make_copy_constraint (vi: uses, from: indir_tem->id); |
4176 | } |
4177 | else |
4178 | /* To read indirectly we need to read directly. */ |
4179 | gcc_checking_assert (flags & EAF_NO_INDIRECT_READ); |
4180 | |
4181 | if (!(flags & EAF_NO_DIRECT_CLOBBER)) |
4182 | { |
4183 | struct constraint_expr lhs, rhs; |
4184 | |
4185 | /* *arg = callescape. */ |
4186 | lhs.type = DEREF; |
4187 | lhs.var = tem->id; |
4188 | lhs.offset = 0; |
4189 | |
4190 | rhs.type = SCALAR; |
4191 | rhs.var = callescape_id; |
4192 | rhs.offset = 0; |
4193 | process_constraint (t: new_constraint (lhs, rhs)); |
4194 | |
4195 | /* callclobbered = arg. */ |
4196 | make_copy_constraint (vi: get_call_clobber_vi (call: stmt), from: tem->id); |
4197 | } |
4198 | if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_CLOBBER)) |
4199 | { |
4200 | struct constraint_expr lhs, rhs; |
4201 | |
4202 | /* *indir_arg = callescape. */ |
4203 | lhs.type = DEREF; |
4204 | lhs.var = indir_tem->id; |
4205 | lhs.offset = 0; |
4206 | |
4207 | rhs.type = SCALAR; |
4208 | rhs.var = callescape_id; |
4209 | rhs.offset = 0; |
4210 | process_constraint (t: new_constraint (lhs, rhs)); |
4211 | |
4212 | /* callclobbered = indir_arg. */ |
4213 | make_copy_constraint (vi: get_call_clobber_vi (call: stmt), from: indir_tem->id); |
4214 | } |
4215 | |
4216 | if (!(flags & (EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE))) |
4217 | { |
4218 | struct constraint_expr lhs, rhs; |
4219 | |
4220 | /* callescape = arg; */ |
4221 | lhs.var = callescape_id; |
4222 | lhs.offset = 0; |
4223 | lhs.type = SCALAR; |
4224 | |
4225 | rhs.var = tem->id; |
4226 | rhs.offset = 0; |
4227 | rhs.type = SCALAR; |
4228 | process_constraint (t: new_constraint (lhs, rhs)); |
4229 | |
4230 | if (writes_global_memory) |
4231 | make_escape_constraint (op: arg); |
4232 | } |
4233 | else if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_ESCAPE)) |
4234 | { |
4235 | struct constraint_expr lhs, rhs; |
4236 | |
4237 | /* callescape = *(indir_arg + UNKNOWN); */ |
4238 | lhs.var = callescape_id; |
4239 | lhs.offset = 0; |
4240 | lhs.type = SCALAR; |
4241 | |
4242 | rhs.var = indir_tem->id; |
4243 | rhs.offset = 0; |
4244 | rhs.type = SCALAR; |
4245 | process_constraint (t: new_constraint (lhs, rhs)); |
4246 | |
4247 | if (writes_global_memory) |
4248 | make_indirect_escape_constraint (vi: tem); |
4249 | } |
4250 | } |
4251 | |
4252 | /* Determine global memory access of call STMT and update |
4253 | WRITES_GLOBAL_MEMORY, READS_GLOBAL_MEMORY and USES_GLOBAL_MEMORY. */ |
4254 | |
4255 | static void |
4256 | determine_global_memory_access (gcall *stmt, |
4257 | bool *writes_global_memory, |
4258 | bool *reads_global_memory, |
4259 | bool *uses_global_memory) |
4260 | { |
4261 | tree callee; |
4262 | cgraph_node *node; |
4263 | modref_summary *summary; |
4264 | |
4265 | /* We need to detrmine reads to set uses. */ |
4266 | gcc_assert (!uses_global_memory || reads_global_memory); |
4267 | |
4268 | if ((callee = gimple_call_fndecl (gs: stmt)) != NULL_TREE |
4269 | && (node = cgraph_node::get (decl: callee)) != NULL |
4270 | && (summary = get_modref_function_summary (func: node))) |
4271 | { |
4272 | if (writes_global_memory && *writes_global_memory) |
4273 | *writes_global_memory = summary->global_memory_written; |
4274 | if (reads_global_memory && *reads_global_memory) |
4275 | *reads_global_memory = summary->global_memory_read; |
4276 | if (reads_global_memory && uses_global_memory |
4277 | && !summary->calls_interposable |
4278 | && !*reads_global_memory && node->binds_to_current_def_p ()) |
4279 | *uses_global_memory = false; |
4280 | } |
4281 | if ((writes_global_memory && *writes_global_memory) |
4282 | || (uses_global_memory && *uses_global_memory) |
4283 | || (reads_global_memory && *reads_global_memory)) |
4284 | { |
4285 | attr_fnspec fnspec = gimple_call_fnspec (stmt); |
4286 | if (fnspec.known_p ()) |
4287 | { |
4288 | if (writes_global_memory |
4289 | && !fnspec.global_memory_written_p ()) |
4290 | *writes_global_memory = false; |
4291 | if (reads_global_memory && !fnspec.global_memory_read_p ()) |
4292 | { |
4293 | *reads_global_memory = false; |
4294 | if (uses_global_memory) |
4295 | *uses_global_memory = false; |
4296 | } |
4297 | } |
4298 | } |
4299 | } |
4300 | |
4301 | /* For non-IPA mode, generate constraints necessary for a call on the |
4302 | RHS and collect return value constraint to RESULTS to be used later in |
4303 | handle_lhs_call. |
4304 | |
4305 | IMPLICIT_EAF_FLAGS are added to each function argument. If |
4306 | WRITES_GLOBAL_MEMORY is true function is assumed to possibly write to global |
4307 | memory. Similar for READS_GLOBAL_MEMORY. */ |
4308 | |
4309 | static void |
4310 | handle_rhs_call (gcall *stmt, vec<ce_s> *results, |
4311 | int implicit_eaf_flags, |
4312 | bool writes_global_memory, |
4313 | bool reads_global_memory) |
4314 | { |
4315 | determine_global_memory_access (stmt, writes_global_memory: &writes_global_memory, |
4316 | reads_global_memory: &reads_global_memory, |
4317 | NULL); |
4318 | |
4319 | varinfo_t callescape = new_var_info (NULL_TREE, name: "callescape" , add_id: true); |
4320 | |
4321 | /* If function can use global memory, add it to callescape |
4322 | and to possible return values. If not we can still use/return addresses |
4323 | of global symbols. */ |
4324 | struct constraint_expr lhs, rhs; |
4325 | |
4326 | lhs.type = SCALAR; |
4327 | lhs.var = callescape->id; |
4328 | lhs.offset = 0; |
4329 | |
4330 | rhs.type = reads_global_memory ? SCALAR : ADDRESSOF; |
4331 | rhs.var = nonlocal_id; |
4332 | rhs.offset = 0; |
4333 | |
4334 | process_constraint (t: new_constraint (lhs, rhs)); |
4335 | results->safe_push (obj: rhs); |
4336 | |
4337 | varinfo_t uses = get_call_use_vi (call: stmt); |
4338 | make_copy_constraint (vi: uses, from: callescape->id); |
4339 | |
4340 | for (unsigned i = 0; i < gimple_call_num_args (gs: stmt); ++i) |
4341 | { |
4342 | tree arg = gimple_call_arg (gs: stmt, index: i); |
4343 | int flags = gimple_call_arg_flags (stmt, i); |
4344 | handle_call_arg (stmt, arg, results, |
4345 | flags: flags | implicit_eaf_flags, |
4346 | callescape_id: callescape->id, writes_global_memory); |
4347 | } |
4348 | |
4349 | /* The static chain escapes as well. */ |
4350 | if (gimple_call_chain (gs: stmt)) |
4351 | handle_call_arg (stmt, arg: gimple_call_chain (gs: stmt), results, |
4352 | flags: implicit_eaf_flags |
4353 | | gimple_call_static_chain_flags (stmt), |
4354 | callescape_id: callescape->id, writes_global_memory); |
4355 | |
4356 | /* And if we applied NRV the address of the return slot escapes as well. */ |
4357 | if (gimple_call_return_slot_opt_p (s: stmt) |
4358 | && gimple_call_lhs (gs: stmt) != NULL_TREE |
4359 | && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt)))) |
4360 | { |
4361 | int flags = gimple_call_retslot_flags (stmt); |
4362 | const int relevant_flags = EAF_NO_DIRECT_ESCAPE |
4363 | | EAF_NOT_RETURNED_DIRECTLY; |
4364 | |
4365 | if (!(flags & EAF_UNUSED) && (flags & relevant_flags) != relevant_flags) |
4366 | { |
4367 | auto_vec<ce_s> tmpc; |
4368 | |
4369 | get_constraint_for_address_of (t: gimple_call_lhs (gs: stmt), results: &tmpc); |
4370 | |
4371 | if (!(flags & EAF_NO_DIRECT_ESCAPE)) |
4372 | { |
4373 | make_constraints_to (id: callescape->id, rhsc: tmpc); |
4374 | if (writes_global_memory) |
4375 | make_constraints_to (id: escaped_id, rhsc: tmpc); |
4376 | } |
4377 | if (!(flags & EAF_NOT_RETURNED_DIRECTLY)) |
4378 | { |
4379 | struct constraint_expr *c; |
4380 | unsigned i; |
4381 | FOR_EACH_VEC_ELT (tmpc, i, c) |
4382 | results->safe_push (obj: *c); |
4383 | } |
4384 | } |
4385 | } |
4386 | } |
4387 | |
4388 | /* For non-IPA mode, generate constraints necessary for a call |
4389 | that returns a pointer and assigns it to LHS. This simply makes |
4390 | the LHS point to global and escaped variables. */ |
4391 | |
4392 | static void |
4393 | handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc, |
4394 | tree fndecl) |
4395 | { |
4396 | auto_vec<ce_s> lhsc; |
4397 | |
4398 | get_constraint_for (t: lhs, results: &lhsc); |
4399 | /* If the store is to a global decl make sure to |
4400 | add proper escape constraints. */ |
4401 | lhs = get_base_address (t: lhs); |
4402 | if (lhs |
4403 | && DECL_P (lhs) |
4404 | && is_global_var (t: lhs)) |
4405 | { |
4406 | struct constraint_expr tmpc; |
4407 | tmpc.var = escaped_id; |
4408 | tmpc.offset = 0; |
4409 | tmpc.type = SCALAR; |
4410 | lhsc.safe_push (obj: tmpc); |
4411 | } |
4412 | |
4413 | /* If the call returns an argument unmodified override the rhs |
4414 | constraints. */ |
4415 | if (flags & ERF_RETURNS_ARG |
4416 | && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (gs: stmt)) |
4417 | { |
4418 | tree arg; |
4419 | rhsc.truncate (size: 0); |
4420 | arg = gimple_call_arg (gs: stmt, index: flags & ERF_RETURN_ARG_MASK); |
4421 | get_constraint_for (t: arg, results: &rhsc); |
4422 | process_all_all_constraints (lhsc, rhsc); |
4423 | rhsc.truncate (size: 0); |
4424 | } |
4425 | else if (flags & ERF_NOALIAS) |
4426 | { |
4427 | varinfo_t vi; |
4428 | struct constraint_expr tmpc; |
4429 | rhsc.truncate (size: 0); |
4430 | vi = make_heapvar (name: "HEAP" , add_id: true); |
4431 | /* We are marking allocated storage local, we deal with it becoming |
4432 | global by escaping and setting of vars_contains_escaped_heap. */ |
4433 | DECL_EXTERNAL (vi->decl) = 0; |
4434 | vi->is_global_var = 0; |
4435 | /* If this is not a real malloc call assume the memory was |
4436 | initialized and thus may point to global memory. All |
4437 | builtin functions with the malloc attribute behave in a sane way. */ |
4438 | if (!fndecl |
4439 | || !fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL)) |
4440 | make_constraint_from (vi, from: nonlocal_id); |
4441 | tmpc.var = vi->id; |
4442 | tmpc.offset = 0; |
4443 | tmpc.type = ADDRESSOF; |
4444 | rhsc.safe_push (obj: tmpc); |
4445 | process_all_all_constraints (lhsc, rhsc); |
4446 | rhsc.truncate (size: 0); |
4447 | } |
4448 | else |
4449 | process_all_all_constraints (lhsc, rhsc); |
4450 | } |
4451 | |
4452 | |
4453 | /* Return the varinfo for the callee of CALL. */ |
4454 | |
4455 | static varinfo_t |
4456 | get_fi_for_callee (gcall *call) |
4457 | { |
4458 | tree decl, fn = gimple_call_fn (gs: call); |
4459 | |
4460 | if (fn && TREE_CODE (fn) == OBJ_TYPE_REF) |
4461 | fn = OBJ_TYPE_REF_EXPR (fn); |
4462 | |
4463 | /* If we can directly resolve the function being called, do so. |
4464 | Otherwise, it must be some sort of indirect expression that |
4465 | we should still be able to handle. */ |
4466 | decl = gimple_call_addr_fndecl (fn); |
4467 | if (decl) |
4468 | return get_vi_for_tree (t: decl); |
4469 | |
4470 | /* If the function is anything other than a SSA name pointer we have no |
4471 | clue and should be getting ANYFN (well, ANYTHING for now). */ |
4472 | if (!fn || TREE_CODE (fn) != SSA_NAME) |
4473 | return get_varinfo (n: anything_id); |
4474 | |
4475 | if (SSA_NAME_IS_DEFAULT_DEF (fn) |
4476 | && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL |
4477 | || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL)) |
4478 | fn = SSA_NAME_VAR (fn); |
4479 | |
4480 | return get_vi_for_tree (t: fn); |
4481 | } |
4482 | |
4483 | /* Create constraints for assigning call argument ARG to the incoming parameter |
4484 | INDEX of function FI. */ |
4485 | |
4486 | static void |
4487 | find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg) |
4488 | { |
4489 | struct constraint_expr lhs; |
4490 | lhs = get_function_part_constraint (fi, part: fi_parm_base + index); |
4491 | |
4492 | auto_vec<ce_s, 2> rhsc; |
4493 | get_constraint_for_rhs (t: arg, results: &rhsc); |
4494 | |
4495 | unsigned j; |
4496 | struct constraint_expr *rhsp; |
4497 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
4498 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
4499 | } |
4500 | |
4501 | /* Return true if FNDECL may be part of another lto partition. */ |
4502 | |
4503 | static bool |
4504 | fndecl_maybe_in_other_partition (tree fndecl) |
4505 | { |
4506 | cgraph_node *fn_node = cgraph_node::get (decl: fndecl); |
4507 | if (fn_node == NULL) |
4508 | return true; |
4509 | |
4510 | return fn_node->in_other_partition; |
4511 | } |
4512 | |
4513 | /* Create constraints for the builtin call T. Return true if the call |
4514 | was handled, otherwise false. */ |
4515 | |
4516 | static bool |
4517 | find_func_aliases_for_builtin_call (struct function *fn, gcall *t) |
4518 | { |
4519 | tree fndecl = gimple_call_fndecl (gs: t); |
4520 | auto_vec<ce_s, 2> lhsc; |
4521 | auto_vec<ce_s, 4> rhsc; |
4522 | varinfo_t fi; |
4523 | |
4524 | if (gimple_call_builtin_p (t, BUILT_IN_NORMAL)) |
4525 | /* ??? All builtins that are handled here need to be handled |
4526 | in the alias-oracle query functions explicitly! */ |
4527 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
4528 | { |
4529 | /* All the following functions return a pointer to the same object |
4530 | as their first argument points to. The functions do not add |
4531 | to the ESCAPED solution. The functions make the first argument |
4532 | pointed to memory point to what the second argument pointed to |
4533 | memory points to. */ |
4534 | case BUILT_IN_STRCPY: |
4535 | case BUILT_IN_STRNCPY: |
4536 | case BUILT_IN_BCOPY: |
4537 | case BUILT_IN_MEMCPY: |
4538 | case BUILT_IN_MEMMOVE: |
4539 | case BUILT_IN_MEMPCPY: |
4540 | case BUILT_IN_STPCPY: |
4541 | case BUILT_IN_STPNCPY: |
4542 | case BUILT_IN_STRCAT: |
4543 | case BUILT_IN_STRNCAT: |
4544 | case BUILT_IN_STRCPY_CHK: |
4545 | case BUILT_IN_STRNCPY_CHK: |
4546 | case BUILT_IN_MEMCPY_CHK: |
4547 | case BUILT_IN_MEMMOVE_CHK: |
4548 | case BUILT_IN_MEMPCPY_CHK: |
4549 | case BUILT_IN_STPCPY_CHK: |
4550 | case BUILT_IN_STPNCPY_CHK: |
4551 | case BUILT_IN_STRCAT_CHK: |
4552 | case BUILT_IN_STRNCAT_CHK: |
4553 | case BUILT_IN_TM_MEMCPY: |
4554 | case BUILT_IN_TM_MEMMOVE: |
4555 | { |
4556 | tree res = gimple_call_lhs (gs: t); |
4557 | tree dest = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl: fndecl) |
4558 | == BUILT_IN_BCOPY ? 1 : 0)); |
4559 | tree src = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl: fndecl) |
4560 | == BUILT_IN_BCOPY ? 0 : 1)); |
4561 | if (res != NULL_TREE) |
4562 | { |
4563 | get_constraint_for (t: res, results: &lhsc); |
4564 | if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_MEMPCPY |
4565 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPCPY |
4566 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPNCPY |
4567 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_MEMPCPY_CHK |
4568 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPCPY_CHK |
4569 | || DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_STPNCPY_CHK) |
4570 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &rhsc); |
4571 | else |
4572 | get_constraint_for (t: dest, results: &rhsc); |
4573 | process_all_all_constraints (lhsc, rhsc); |
4574 | lhsc.truncate (size: 0); |
4575 | rhsc.truncate (size: 0); |
4576 | } |
4577 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
4578 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
4579 | do_deref (constraints: &lhsc); |
4580 | do_deref (constraints: &rhsc); |
4581 | process_all_all_constraints (lhsc, rhsc); |
4582 | return true; |
4583 | } |
4584 | case BUILT_IN_MEMSET: |
4585 | case BUILT_IN_MEMSET_CHK: |
4586 | case BUILT_IN_TM_MEMSET: |
4587 | { |
4588 | tree res = gimple_call_lhs (gs: t); |
4589 | tree dest = gimple_call_arg (gs: t, index: 0); |
4590 | unsigned i; |
4591 | ce_s *lhsp; |
4592 | struct constraint_expr ac; |
4593 | if (res != NULL_TREE) |
4594 | { |
4595 | get_constraint_for (t: res, results: &lhsc); |
4596 | get_constraint_for (t: dest, results: &rhsc); |
4597 | process_all_all_constraints (lhsc, rhsc); |
4598 | lhsc.truncate (size: 0); |
4599 | } |
4600 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
4601 | do_deref (constraints: &lhsc); |
4602 | if (flag_delete_null_pointer_checks |
4603 | && integer_zerop (gimple_call_arg (gs: t, index: 1))) |
4604 | { |
4605 | ac.type = ADDRESSOF; |
4606 | ac.var = nothing_id; |
4607 | } |
4608 | else |
4609 | { |
4610 | ac.type = SCALAR; |
4611 | ac.var = integer_id; |
4612 | } |
4613 | ac.offset = 0; |
4614 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
4615 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: ac)); |
4616 | return true; |
4617 | } |
4618 | case BUILT_IN_STACK_SAVE: |
4619 | case BUILT_IN_STACK_RESTORE: |
4620 | /* Nothing interesting happens. */ |
4621 | return true; |
4622 | case BUILT_IN_ALLOCA: |
4623 | case BUILT_IN_ALLOCA_WITH_ALIGN: |
4624 | case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX: |
4625 | { |
4626 | tree ptr = gimple_call_lhs (gs: t); |
4627 | if (ptr == NULL_TREE) |
4628 | return true; |
4629 | get_constraint_for (t: ptr, results: &lhsc); |
4630 | varinfo_t vi = make_heapvar (name: "HEAP" , add_id: true); |
4631 | /* Alloca storage is never global. To exempt it from escaped |
4632 | handling make it a non-heap var. */ |
4633 | DECL_EXTERNAL (vi->decl) = 0; |
4634 | vi->is_global_var = 0; |
4635 | vi->is_heap_var = 0; |
4636 | struct constraint_expr tmpc; |
4637 | tmpc.var = vi->id; |
4638 | tmpc.offset = 0; |
4639 | tmpc.type = ADDRESSOF; |
4640 | rhsc.safe_push (obj: tmpc); |
4641 | process_all_all_constraints (lhsc, rhsc); |
4642 | return true; |
4643 | } |
4644 | case BUILT_IN_POSIX_MEMALIGN: |
4645 | { |
4646 | tree ptrptr = gimple_call_arg (gs: t, index: 0); |
4647 | get_constraint_for (t: ptrptr, results: &lhsc); |
4648 | do_deref (constraints: &lhsc); |
4649 | varinfo_t vi = make_heapvar (name: "HEAP" , add_id: true); |
4650 | /* We are marking allocated storage local, we deal with it becoming |
4651 | global by escaping and setting of vars_contains_escaped_heap. */ |
4652 | DECL_EXTERNAL (vi->decl) = 0; |
4653 | vi->is_global_var = 0; |
4654 | struct constraint_expr tmpc; |
4655 | tmpc.var = vi->id; |
4656 | tmpc.offset = 0; |
4657 | tmpc.type = ADDRESSOF; |
4658 | rhsc.safe_push (obj: tmpc); |
4659 | process_all_all_constraints (lhsc, rhsc); |
4660 | return true; |
4661 | } |
4662 | case BUILT_IN_ASSUME_ALIGNED: |
4663 | { |
4664 | tree res = gimple_call_lhs (gs: t); |
4665 | tree dest = gimple_call_arg (gs: t, index: 0); |
4666 | if (res != NULL_TREE) |
4667 | { |
4668 | get_constraint_for (t: res, results: &lhsc); |
4669 | get_constraint_for (t: dest, results: &rhsc); |
4670 | process_all_all_constraints (lhsc, rhsc); |
4671 | } |
4672 | return true; |
4673 | } |
4674 | /* All the following functions do not return pointers, do not |
4675 | modify the points-to sets of memory reachable from their |
4676 | arguments and do not add to the ESCAPED solution. */ |
4677 | case BUILT_IN_SINCOS: |
4678 | case BUILT_IN_SINCOSF: |
4679 | case BUILT_IN_SINCOSL: |
4680 | case BUILT_IN_FREXP: |
4681 | case BUILT_IN_FREXPF: |
4682 | case BUILT_IN_FREXPL: |
4683 | case BUILT_IN_GAMMA_R: |
4684 | case BUILT_IN_GAMMAF_R: |
4685 | case BUILT_IN_GAMMAL_R: |
4686 | case BUILT_IN_LGAMMA_R: |
4687 | case BUILT_IN_LGAMMAF_R: |
4688 | case BUILT_IN_LGAMMAL_R: |
4689 | case BUILT_IN_MODF: |
4690 | case BUILT_IN_MODFF: |
4691 | case BUILT_IN_MODFL: |
4692 | case BUILT_IN_REMQUO: |
4693 | case BUILT_IN_REMQUOF: |
4694 | case BUILT_IN_REMQUOL: |
4695 | case BUILT_IN_FREE: |
4696 | return true; |
4697 | case BUILT_IN_STRDUP: |
4698 | case BUILT_IN_STRNDUP: |
4699 | case BUILT_IN_REALLOC: |
4700 | if (gimple_call_lhs (gs: t)) |
4701 | { |
4702 | auto_vec<ce_s> rhsc; |
4703 | handle_lhs_call (stmt: t, lhs: gimple_call_lhs (gs: t), |
4704 | flags: gimple_call_return_flags (t) | ERF_NOALIAS, |
4705 | rhsc, fndecl); |
4706 | get_constraint_for_ptr_offset (ptr: gimple_call_lhs (gs: t), |
4707 | NULL_TREE, results: &lhsc); |
4708 | get_constraint_for_ptr_offset (ptr: gimple_call_arg (gs: t, index: 0), |
4709 | NULL_TREE, results: &rhsc); |
4710 | do_deref (constraints: &lhsc); |
4711 | do_deref (constraints: &rhsc); |
4712 | process_all_all_constraints (lhsc, rhsc); |
4713 | lhsc.truncate (size: 0); |
4714 | rhsc.truncate (size: 0); |
4715 | /* For realloc the resulting pointer can be equal to the |
4716 | argument as well. But only doing this wouldn't be |
4717 | correct because with ptr == 0 realloc behaves like malloc. */ |
4718 | if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_REALLOC) |
4719 | { |
4720 | get_constraint_for (t: gimple_call_lhs (gs: t), results: &lhsc); |
4721 | get_constraint_for (t: gimple_call_arg (gs: t, index: 0), results: &rhsc); |
4722 | process_all_all_constraints (lhsc, rhsc); |
4723 | } |
4724 | return true; |
4725 | } |
4726 | break; |
4727 | /* String / character search functions return a pointer into the |
4728 | source string or NULL. */ |
4729 | case BUILT_IN_INDEX: |
4730 | case BUILT_IN_STRCHR: |
4731 | case BUILT_IN_STRRCHR: |
4732 | case BUILT_IN_MEMCHR: |
4733 | case BUILT_IN_STRSTR: |
4734 | case BUILT_IN_STRPBRK: |
4735 | if (gimple_call_lhs (gs: t)) |
4736 | { |
4737 | tree src = gimple_call_arg (gs: t, index: 0); |
4738 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
4739 | constraint_expr nul; |
4740 | nul.var = nothing_id; |
4741 | nul.offset = 0; |
4742 | nul.type = ADDRESSOF; |
4743 | rhsc.safe_push (obj: nul); |
4744 | get_constraint_for (t: gimple_call_lhs (gs: t), results: &lhsc); |
4745 | process_all_all_constraints (lhsc, rhsc); |
4746 | } |
4747 | return true; |
4748 | /* Pure functions that return something not based on any object and |
4749 | that use the memory pointed to by their arguments (but not |
4750 | transitively). */ |
4751 | case BUILT_IN_STRCMP: |
4752 | case BUILT_IN_STRCMP_EQ: |
4753 | case BUILT_IN_STRNCMP: |
4754 | case BUILT_IN_STRNCMP_EQ: |
4755 | case BUILT_IN_STRCASECMP: |
4756 | case BUILT_IN_STRNCASECMP: |
4757 | case BUILT_IN_MEMCMP: |
4758 | case BUILT_IN_BCMP: |
4759 | case BUILT_IN_STRSPN: |
4760 | case BUILT_IN_STRCSPN: |
4761 | { |
4762 | varinfo_t uses = get_call_use_vi (call: t); |
4763 | make_any_offset_constraints (vi: uses); |
4764 | make_constraint_to (id: uses->id, op: gimple_call_arg (gs: t, index: 0)); |
4765 | make_constraint_to (id: uses->id, op: gimple_call_arg (gs: t, index: 1)); |
4766 | /* No constraints are necessary for the return value. */ |
4767 | return true; |
4768 | } |
4769 | case BUILT_IN_STRLEN: |
4770 | { |
4771 | varinfo_t uses = get_call_use_vi (call: t); |
4772 | make_any_offset_constraints (vi: uses); |
4773 | make_constraint_to (id: uses->id, op: gimple_call_arg (gs: t, index: 0)); |
4774 | /* No constraints are necessary for the return value. */ |
4775 | return true; |
4776 | } |
4777 | case BUILT_IN_OBJECT_SIZE: |
4778 | case BUILT_IN_CONSTANT_P: |
4779 | { |
4780 | /* No constraints are necessary for the return value or the |
4781 | arguments. */ |
4782 | return true; |
4783 | } |
4784 | /* Trampolines are special - they set up passing the static |
4785 | frame. */ |
4786 | case BUILT_IN_INIT_TRAMPOLINE: |
4787 | { |
4788 | tree tramp = gimple_call_arg (gs: t, index: 0); |
4789 | tree nfunc = gimple_call_arg (gs: t, index: 1); |
4790 | tree frame = gimple_call_arg (gs: t, index: 2); |
4791 | unsigned i; |
4792 | struct constraint_expr lhs, *rhsp; |
4793 | if (in_ipa_mode) |
4794 | { |
4795 | varinfo_t nfi = NULL; |
4796 | gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR); |
4797 | nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0)); |
4798 | if (nfi) |
4799 | { |
4800 | lhs = get_function_part_constraint (fi: nfi, part: fi_static_chain); |
4801 | get_constraint_for (t: frame, results: &rhsc); |
4802 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
4803 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
4804 | rhsc.truncate (size: 0); |
4805 | |
4806 | /* Make the frame point to the function for |
4807 | the trampoline adjustment call. */ |
4808 | get_constraint_for (t: tramp, results: &lhsc); |
4809 | do_deref (constraints: &lhsc); |
4810 | get_constraint_for (t: nfunc, results: &rhsc); |
4811 | process_all_all_constraints (lhsc, rhsc); |
4812 | |
4813 | return true; |
4814 | } |
4815 | } |
4816 | /* Else fallthru to generic handling which will let |
4817 | the frame escape. */ |
4818 | break; |
4819 | } |
4820 | case BUILT_IN_ADJUST_TRAMPOLINE: |
4821 | { |
4822 | tree tramp = gimple_call_arg (gs: t, index: 0); |
4823 | tree res = gimple_call_lhs (gs: t); |
4824 | if (in_ipa_mode && res) |
4825 | { |
4826 | get_constraint_for (t: res, results: &lhsc); |
4827 | get_constraint_for (t: tramp, results: &rhsc); |
4828 | do_deref (constraints: &rhsc); |
4829 | process_all_all_constraints (lhsc, rhsc); |
4830 | } |
4831 | return true; |
4832 | } |
4833 | CASE_BUILT_IN_TM_STORE (1): |
4834 | CASE_BUILT_IN_TM_STORE (2): |
4835 | CASE_BUILT_IN_TM_STORE (4): |
4836 | CASE_BUILT_IN_TM_STORE (8): |
4837 | CASE_BUILT_IN_TM_STORE (FLOAT): |
4838 | CASE_BUILT_IN_TM_STORE (DOUBLE): |
4839 | CASE_BUILT_IN_TM_STORE (LDOUBLE): |
4840 | CASE_BUILT_IN_TM_STORE (M64): |
4841 | CASE_BUILT_IN_TM_STORE (M128): |
4842 | CASE_BUILT_IN_TM_STORE (M256): |
4843 | { |
4844 | tree addr = gimple_call_arg (gs: t, index: 0); |
4845 | tree src = gimple_call_arg (gs: t, index: 1); |
4846 | |
4847 | get_constraint_for (t: addr, results: &lhsc); |
4848 | do_deref (constraints: &lhsc); |
4849 | get_constraint_for (t: src, results: &rhsc); |
4850 | process_all_all_constraints (lhsc, rhsc); |
4851 | return true; |
4852 | } |
4853 | CASE_BUILT_IN_TM_LOAD (1): |
4854 | CASE_BUILT_IN_TM_LOAD (2): |
4855 | CASE_BUILT_IN_TM_LOAD (4): |
4856 | CASE_BUILT_IN_TM_LOAD (8): |
4857 | CASE_BUILT_IN_TM_LOAD (FLOAT): |
4858 | CASE_BUILT_IN_TM_LOAD (DOUBLE): |
4859 | CASE_BUILT_IN_TM_LOAD (LDOUBLE): |
4860 | CASE_BUILT_IN_TM_LOAD (M64): |
4861 | CASE_BUILT_IN_TM_LOAD (M128): |
4862 | CASE_BUILT_IN_TM_LOAD (M256): |
4863 | { |
4864 | tree dest = gimple_call_lhs (gs: t); |
4865 | tree addr = gimple_call_arg (gs: t, index: 0); |
4866 | |
4867 | get_constraint_for (t: dest, results: &lhsc); |
4868 | get_constraint_for (t: addr, results: &rhsc); |
4869 | do_deref (constraints: &rhsc); |
4870 | process_all_all_constraints (lhsc, rhsc); |
4871 | return true; |
4872 | } |
4873 | /* Variadic argument handling needs to be handled in IPA |
4874 | mode as well. */ |
4875 | case BUILT_IN_VA_START: |
4876 | { |
4877 | tree valist = gimple_call_arg (gs: t, index: 0); |
4878 | struct constraint_expr rhs, *lhsp; |
4879 | unsigned i; |
4880 | get_constraint_for_ptr_offset (ptr: valist, NULL_TREE, results: &lhsc); |
4881 | do_deref (constraints: &lhsc); |
4882 | /* The va_list gets access to pointers in variadic |
4883 | arguments. Which we know in the case of IPA analysis |
4884 | and otherwise are just all nonlocal variables. */ |
4885 | if (in_ipa_mode) |
4886 | { |
4887 | fi = lookup_vi_for_tree (t: fn->decl); |
4888 | rhs = get_function_part_constraint (fi, part: ~0); |
4889 | rhs.type = ADDRESSOF; |
4890 | } |
4891 | else |
4892 | { |
4893 | rhs.var = nonlocal_id; |
4894 | rhs.type = ADDRESSOF; |
4895 | rhs.offset = 0; |
4896 | } |
4897 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
4898 | process_constraint (t: new_constraint (lhs: *lhsp, rhs)); |
4899 | /* va_list is clobbered. */ |
4900 | make_constraint_to (id: get_call_clobber_vi (call: t)->id, op: valist); |
4901 | return true; |
4902 | } |
4903 | /* va_end doesn't have any effect that matters. */ |
4904 | case BUILT_IN_VA_END: |
4905 | return true; |
4906 | /* Alternate return. Simply give up for now. */ |
4907 | case BUILT_IN_RETURN: |
4908 | { |
4909 | fi = NULL; |
4910 | if (!in_ipa_mode |
4911 | || !(fi = get_vi_for_tree (t: fn->decl))) |
4912 | make_constraint_from (vi: get_varinfo (n: escaped_id), from: anything_id); |
4913 | else if (in_ipa_mode |
4914 | && fi != NULL) |
4915 | { |
4916 | struct constraint_expr lhs, rhs; |
4917 | lhs = get_function_part_constraint (fi, part: fi_result); |
4918 | rhs.var = anything_id; |
4919 | rhs.offset = 0; |
4920 | rhs.type = SCALAR; |
4921 | process_constraint (t: new_constraint (lhs, rhs)); |
4922 | } |
4923 | return true; |
4924 | } |
4925 | case BUILT_IN_GOMP_PARALLEL: |
4926 | case BUILT_IN_GOACC_PARALLEL: |
4927 | { |
4928 | if (in_ipa_mode) |
4929 | { |
4930 | unsigned int fnpos, argpos; |
4931 | switch (DECL_FUNCTION_CODE (decl: fndecl)) |
4932 | { |
4933 | case BUILT_IN_GOMP_PARALLEL: |
4934 | /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */ |
4935 | fnpos = 0; |
4936 | argpos = 1; |
4937 | break; |
4938 | case BUILT_IN_GOACC_PARALLEL: |
4939 | /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs, |
4940 | sizes, kinds, ...). */ |
4941 | fnpos = 1; |
4942 | argpos = 3; |
4943 | break; |
4944 | default: |
4945 | gcc_unreachable (); |
4946 | } |
4947 | |
4948 | tree fnarg = gimple_call_arg (gs: t, index: fnpos); |
4949 | gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR); |
4950 | tree fndecl = TREE_OPERAND (fnarg, 0); |
4951 | if (fndecl_maybe_in_other_partition (fndecl)) |
4952 | /* Fallthru to general call handling. */ |
4953 | break; |
4954 | |
4955 | tree arg = gimple_call_arg (gs: t, index: argpos); |
4956 | |
4957 | varinfo_t fi = get_vi_for_tree (t: fndecl); |
4958 | find_func_aliases_for_call_arg (fi, index: 0, arg); |
4959 | return true; |
4960 | } |
4961 | /* Else fallthru to generic call handling. */ |
4962 | break; |
4963 | } |
4964 | /* printf-style functions may have hooks to set pointers to |
4965 | point to somewhere into the generated string. Leave them |
4966 | for a later exercise... */ |
4967 | default: |
4968 | /* Fallthru to general call handling. */; |
4969 | } |
4970 | |
4971 | return false; |
4972 | } |
4973 | |
4974 | /* Create constraints for the call T. */ |
4975 | |
4976 | static void |
4977 | find_func_aliases_for_call (struct function *fn, gcall *t) |
4978 | { |
4979 | tree fndecl = gimple_call_fndecl (gs: t); |
4980 | varinfo_t fi; |
4981 | |
4982 | if (fndecl != NULL_TREE |
4983 | && fndecl_built_in_p (node: fndecl) |
4984 | && find_func_aliases_for_builtin_call (fn, t)) |
4985 | return; |
4986 | |
4987 | if (gimple_call_internal_p (gs: t, fn: IFN_DEFERRED_INIT)) |
4988 | return; |
4989 | |
4990 | fi = get_fi_for_callee (call: t); |
4991 | if (!in_ipa_mode |
4992 | || (fi->decl && fndecl && !fi->is_fn_info)) |
4993 | { |
4994 | auto_vec<ce_s, 16> rhsc; |
4995 | int flags = gimple_call_flags (t); |
4996 | |
4997 | /* Const functions can return their arguments and addresses |
4998 | of global memory but not of escaped memory. */ |
4999 | if (flags & (ECF_CONST|ECF_NOVOPS)) |
5000 | { |
5001 | if (gimple_call_lhs (gs: t)) |
5002 | handle_rhs_call (stmt: t, results: &rhsc, implicit_eaf_flags: implicit_const_eaf_flags, writes_global_memory: false, reads_global_memory: false); |
5003 | } |
5004 | /* Pure functions can return addresses in and of memory |
5005 | reachable from their arguments, but they are not an escape |
5006 | point for reachable memory of their arguments. */ |
5007 | else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE)) |
5008 | handle_rhs_call (stmt: t, results: &rhsc, implicit_eaf_flags: implicit_pure_eaf_flags, writes_global_memory: false, reads_global_memory: true); |
5009 | /* If the call is to a replaceable operator delete and results |
5010 | from a delete expression as opposed to a direct call to |
5011 | such operator, then the effects for PTA (in particular |
5012 | the escaping of the pointer) can be ignored. */ |
5013 | else if (fndecl |
5014 | && DECL_IS_OPERATOR_DELETE_P (fndecl) |
5015 | && gimple_call_from_new_or_delete (s: t)) |
5016 | ; |
5017 | else |
5018 | handle_rhs_call (stmt: t, results: &rhsc, implicit_eaf_flags: 0, writes_global_memory: true, reads_global_memory: true); |
5019 | if (gimple_call_lhs (gs: t)) |
5020 | handle_lhs_call (stmt: t, lhs: gimple_call_lhs (gs: t), |
5021 | flags: gimple_call_return_flags (t), rhsc, fndecl); |
5022 | } |
5023 | else |
5024 | { |
5025 | auto_vec<ce_s, 2> rhsc; |
5026 | tree lhsop; |
5027 | unsigned j; |
5028 | |
5029 | /* Assign all the passed arguments to the appropriate incoming |
5030 | parameters of the function. */ |
5031 | for (j = 0; j < gimple_call_num_args (gs: t); j++) |
5032 | { |
5033 | tree arg = gimple_call_arg (gs: t, index: j); |
5034 | find_func_aliases_for_call_arg (fi, index: j, arg); |
5035 | } |
5036 | |
5037 | /* If we are returning a value, assign it to the result. */ |
5038 | lhsop = gimple_call_lhs (gs: t); |
5039 | if (lhsop) |
5040 | { |
5041 | auto_vec<ce_s, 2> lhsc; |
5042 | struct constraint_expr rhs; |
5043 | struct constraint_expr *lhsp; |
5044 | bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (gs: t)); |
5045 | |
5046 | get_constraint_for (t: lhsop, results: &lhsc); |
5047 | rhs = get_function_part_constraint (fi, part: fi_result); |
5048 | if (aggr_p) |
5049 | { |
5050 | auto_vec<ce_s, 2> tem; |
5051 | tem.quick_push (obj: rhs); |
5052 | do_deref (constraints: &tem); |
5053 | gcc_checking_assert (tem.length () == 1); |
5054 | rhs = tem[0]; |
5055 | } |
5056 | FOR_EACH_VEC_ELT (lhsc, j, lhsp) |
5057 | process_constraint (t: new_constraint (lhs: *lhsp, rhs)); |
5058 | |
5059 | /* If we pass the result decl by reference, honor that. */ |
5060 | if (aggr_p) |
5061 | { |
5062 | struct constraint_expr lhs; |
5063 | struct constraint_expr *rhsp; |
5064 | |
5065 | get_constraint_for_address_of (t: lhsop, results: &rhsc); |
5066 | lhs = get_function_part_constraint (fi, part: fi_result); |
5067 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5068 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5069 | rhsc.truncate (size: 0); |
5070 | } |
5071 | } |
5072 | |
5073 | /* If we use a static chain, pass it along. */ |
5074 | if (gimple_call_chain (gs: t)) |
5075 | { |
5076 | struct constraint_expr lhs; |
5077 | struct constraint_expr *rhsp; |
5078 | |
5079 | get_constraint_for (t: gimple_call_chain (gs: t), results: &rhsc); |
5080 | lhs = get_function_part_constraint (fi, part: fi_static_chain); |
5081 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5082 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5083 | } |
5084 | } |
5085 | } |
5086 | |
5087 | /* Walk statement T setting up aliasing constraints according to the |
5088 | references found in T. This function is the main part of the |
5089 | constraint builder. AI points to auxiliary alias information used |
5090 | when building alias sets and computing alias grouping heuristics. */ |
5091 | |
5092 | static void |
5093 | find_func_aliases (struct function *fn, gimple *origt) |
5094 | { |
5095 | gimple *t = origt; |
5096 | auto_vec<ce_s, 16> lhsc; |
5097 | auto_vec<ce_s, 16> rhsc; |
5098 | varinfo_t fi; |
5099 | |
5100 | /* Now build constraints expressions. */ |
5101 | if (gimple_code (g: t) == GIMPLE_PHI) |
5102 | { |
5103 | /* For a phi node, assign all the arguments to |
5104 | the result. */ |
5105 | get_constraint_for (t: gimple_phi_result (gs: t), results: &lhsc); |
5106 | for (unsigned i = 0; i < gimple_phi_num_args (gs: t); i++) |
5107 | { |
5108 | get_constraint_for_rhs (t: gimple_phi_arg_def (gs: t, index: i), results: &rhsc); |
5109 | process_all_all_constraints (lhsc, rhsc); |
5110 | rhsc.truncate (size: 0); |
5111 | } |
5112 | } |
5113 | /* In IPA mode, we need to generate constraints to pass call |
5114 | arguments through their calls. There are two cases, |
5115 | either a GIMPLE_CALL returning a value, or just a plain |
5116 | GIMPLE_CALL when we are not. |
5117 | |
5118 | In non-ipa mode, we need to generate constraints for each |
5119 | pointer passed by address. */ |
5120 | else if (is_gimple_call (gs: t)) |
5121 | find_func_aliases_for_call (fn, t: as_a <gcall *> (p: t)); |
5122 | |
5123 | /* Otherwise, just a regular assignment statement. Only care about |
5124 | operations with pointer result, others are dealt with as escape |
5125 | points if they have pointer operands. */ |
5126 | else if (is_gimple_assign (gs: t)) |
5127 | { |
5128 | /* Otherwise, just a regular assignment statement. */ |
5129 | tree lhsop = gimple_assign_lhs (gs: t); |
5130 | tree rhsop = (gimple_num_ops (gs: t) == 2) ? gimple_assign_rhs1 (gs: t) : NULL; |
5131 | |
5132 | if (rhsop && TREE_CLOBBER_P (rhsop)) |
5133 | /* Ignore clobbers, they don't actually store anything into |
5134 | the LHS. */ |
5135 | ; |
5136 | else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop))) |
5137 | do_structure_copy (lhsop, rhsop); |
5138 | else |
5139 | { |
5140 | enum tree_code code = gimple_assign_rhs_code (gs: t); |
5141 | |
5142 | get_constraint_for (t: lhsop, results: &lhsc); |
5143 | |
5144 | if (code == POINTER_PLUS_EXPR) |
5145 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5146 | offset: gimple_assign_rhs2 (gs: t), results: &rhsc); |
5147 | else if (code == POINTER_DIFF_EXPR) |
5148 | /* The result is not a pointer (part). */ |
5149 | ; |
5150 | else if (code == BIT_AND_EXPR |
5151 | && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST) |
5152 | { |
5153 | /* Aligning a pointer via a BIT_AND_EXPR is offsetting |
5154 | the pointer. Handle it by offsetting it by UNKNOWN. */ |
5155 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5156 | NULL_TREE, results: &rhsc); |
5157 | } |
5158 | else if (code == TRUNC_DIV_EXPR |
5159 | || code == CEIL_DIV_EXPR |
5160 | || code == FLOOR_DIV_EXPR |
5161 | || code == ROUND_DIV_EXPR |
5162 | || code == EXACT_DIV_EXPR |
5163 | || code == TRUNC_MOD_EXPR |
5164 | || code == CEIL_MOD_EXPR |
5165 | || code == FLOOR_MOD_EXPR |
5166 | || code == ROUND_MOD_EXPR) |
5167 | /* Division and modulo transfer the pointer from the LHS. */ |
5168 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5169 | NULL_TREE, results: &rhsc); |
5170 | else if (CONVERT_EXPR_CODE_P (code) |
5171 | || gimple_assign_single_p (gs: t)) |
5172 | /* See through conversions, single RHS are handled by |
5173 | get_constraint_for_rhs. */ |
5174 | get_constraint_for_rhs (t: rhsop, results: &rhsc); |
5175 | else if (code == COND_EXPR) |
5176 | { |
5177 | /* The result is a merge of both COND_EXPR arms. */ |
5178 | auto_vec<ce_s, 2> tmp; |
5179 | struct constraint_expr *rhsp; |
5180 | unsigned i; |
5181 | get_constraint_for_rhs (t: gimple_assign_rhs2 (gs: t), results: &rhsc); |
5182 | get_constraint_for_rhs (t: gimple_assign_rhs3 (gs: t), results: &tmp); |
5183 | FOR_EACH_VEC_ELT (tmp, i, rhsp) |
5184 | rhsc.safe_push (obj: *rhsp); |
5185 | } |
5186 | else if (truth_value_p (code)) |
5187 | /* Truth value results are not pointer (parts). Or at least |
5188 | very unreasonable obfuscation of a part. */ |
5189 | ; |
5190 | else |
5191 | { |
5192 | /* All other operations are possibly offsetting merges. */ |
5193 | auto_vec<ce_s, 4> tmp; |
5194 | struct constraint_expr *rhsp; |
5195 | unsigned i, j; |
5196 | get_constraint_for_ptr_offset (ptr: gimple_assign_rhs1 (gs: t), |
5197 | NULL_TREE, results: &rhsc); |
5198 | for (i = 2; i < gimple_num_ops (gs: t); ++i) |
5199 | { |
5200 | get_constraint_for_ptr_offset (ptr: gimple_op (gs: t, i), |
5201 | NULL_TREE, results: &tmp); |
5202 | FOR_EACH_VEC_ELT (tmp, j, rhsp) |
5203 | rhsc.safe_push (obj: *rhsp); |
5204 | tmp.truncate (size: 0); |
5205 | } |
5206 | } |
5207 | process_all_all_constraints (lhsc, rhsc); |
5208 | } |
5209 | /* If there is a store to a global variable the rhs escapes. */ |
5210 | if ((lhsop = get_base_address (t: lhsop)) != NULL_TREE |
5211 | && DECL_P (lhsop)) |
5212 | { |
5213 | varinfo_t vi = get_vi_for_tree (t: lhsop); |
5214 | if ((! in_ipa_mode && vi->is_global_var) |
5215 | || vi->is_ipa_escape_point) |
5216 | make_escape_constraint (op: rhsop); |
5217 | } |
5218 | } |
5219 | /* Handle escapes through return. */ |
5220 | else if (gimple_code (g: t) == GIMPLE_RETURN |
5221 | && gimple_return_retval (gs: as_a <greturn *> (p: t)) != NULL_TREE) |
5222 | { |
5223 | greturn *return_stmt = as_a <greturn *> (p: t); |
5224 | fi = NULL; |
5225 | if (!in_ipa_mode |
5226 | && SSA_VAR_P (gimple_return_retval (return_stmt))) |
5227 | { |
5228 | /* We handle simple returns by post-processing the solutions. */ |
5229 | ; |
5230 | } |
5231 | if (!(fi = get_vi_for_tree (t: fn->decl))) |
5232 | make_escape_constraint (op: gimple_return_retval (gs: return_stmt)); |
5233 | else if (in_ipa_mode) |
5234 | { |
5235 | struct constraint_expr lhs ; |
5236 | struct constraint_expr *rhsp; |
5237 | unsigned i; |
5238 | |
5239 | lhs = get_function_part_constraint (fi, part: fi_result); |
5240 | get_constraint_for_rhs (t: gimple_return_retval (gs: return_stmt), results: &rhsc); |
5241 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5242 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5243 | } |
5244 | } |
5245 | /* Handle asms conservatively by adding escape constraints to everything. */ |
5246 | else if (gasm *asm_stmt = dyn_cast <gasm *> (p: t)) |
5247 | { |
5248 | unsigned i, noutputs; |
5249 | const char **oconstraints; |
5250 | const char *constraint; |
5251 | bool allows_mem, allows_reg, is_inout; |
5252 | |
5253 | noutputs = gimple_asm_noutputs (asm_stmt); |
5254 | oconstraints = XALLOCAVEC (const char *, noutputs); |
5255 | |
5256 | for (i = 0; i < noutputs; ++i) |
5257 | { |
5258 | tree link = gimple_asm_output_op (asm_stmt, index: i); |
5259 | tree op = TREE_VALUE (link); |
5260 | |
5261 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
5262 | oconstraints[i] = constraint; |
5263 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, |
5264 | &allows_reg, &is_inout); |
5265 | |
5266 | /* A memory constraint makes the address of the operand escape. */ |
5267 | if (!allows_reg && allows_mem) |
5268 | make_escape_constraint (build_fold_addr_expr (op)); |
5269 | |
5270 | /* The asm may read global memory, so outputs may point to |
5271 | any global memory. */ |
5272 | if (op) |
5273 | { |
5274 | auto_vec<ce_s, 2> lhsc; |
5275 | struct constraint_expr rhsc, *lhsp; |
5276 | unsigned j; |
5277 | get_constraint_for (t: op, results: &lhsc); |
5278 | rhsc.var = nonlocal_id; |
5279 | rhsc.offset = 0; |
5280 | rhsc.type = SCALAR; |
5281 | FOR_EACH_VEC_ELT (lhsc, j, lhsp) |
5282 | process_constraint (t: new_constraint (lhs: *lhsp, rhs: rhsc)); |
5283 | } |
5284 | } |
5285 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
5286 | { |
5287 | tree link = gimple_asm_input_op (asm_stmt, index: i); |
5288 | tree op = TREE_VALUE (link); |
5289 | |
5290 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
5291 | |
5292 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
5293 | &allows_mem, &allows_reg); |
5294 | |
5295 | /* A memory constraint makes the address of the operand escape. */ |
5296 | if (!allows_reg && allows_mem) |
5297 | make_escape_constraint (build_fold_addr_expr (op)); |
5298 | /* Strictly we'd only need the constraint to ESCAPED if |
5299 | the asm clobbers memory, otherwise using something |
5300 | along the lines of per-call clobbers/uses would be enough. */ |
5301 | else if (op) |
5302 | make_escape_constraint (op); |
5303 | } |
5304 | } |
5305 | } |
5306 | |
5307 | |
5308 | /* Create a constraint adding to the clobber set of FI the memory |
5309 | pointed to by PTR. */ |
5310 | |
5311 | static void |
5312 | process_ipa_clobber (varinfo_t fi, tree ptr) |
5313 | { |
5314 | vec<ce_s> ptrc = vNULL; |
5315 | struct constraint_expr *c, lhs; |
5316 | unsigned i; |
5317 | get_constraint_for_rhs (t: ptr, results: &ptrc); |
5318 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5319 | FOR_EACH_VEC_ELT (ptrc, i, c) |
5320 | process_constraint (t: new_constraint (lhs, rhs: *c)); |
5321 | ptrc.release (); |
5322 | } |
5323 | |
5324 | /* Walk statement T setting up clobber and use constraints according to the |
5325 | references found in T. This function is a main part of the |
5326 | IPA constraint builder. */ |
5327 | |
5328 | static void |
5329 | find_func_clobbers (struct function *fn, gimple *origt) |
5330 | { |
5331 | gimple *t = origt; |
5332 | auto_vec<ce_s, 16> lhsc; |
5333 | auto_vec<ce_s, 16> rhsc; |
5334 | varinfo_t fi; |
5335 | |
5336 | /* Add constraints for clobbered/used in IPA mode. |
5337 | We are not interested in what automatic variables are clobbered |
5338 | or used as we only use the information in the caller to which |
5339 | they do not escape. */ |
5340 | gcc_assert (in_ipa_mode); |
5341 | |
5342 | /* If the stmt refers to memory in any way it better had a VUSE. */ |
5343 | if (gimple_vuse (g: t) == NULL_TREE) |
5344 | return; |
5345 | |
5346 | /* We'd better have function information for the current function. */ |
5347 | fi = lookup_vi_for_tree (t: fn->decl); |
5348 | gcc_assert (fi != NULL); |
5349 | |
5350 | /* Account for stores in assignments and calls. */ |
5351 | if (gimple_vdef (g: t) != NULL_TREE |
5352 | && gimple_has_lhs (stmt: t)) |
5353 | { |
5354 | tree lhs = gimple_get_lhs (t); |
5355 | tree tem = lhs; |
5356 | while (handled_component_p (t: tem)) |
5357 | tem = TREE_OPERAND (tem, 0); |
5358 | if ((DECL_P (tem) |
5359 | && !auto_var_in_fn_p (tem, fn->decl)) |
5360 | || INDIRECT_REF_P (tem) |
5361 | || (TREE_CODE (tem) == MEM_REF |
5362 | && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR |
5363 | && auto_var_in_fn_p |
5364 | (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl)))) |
5365 | { |
5366 | struct constraint_expr lhsc, *rhsp; |
5367 | unsigned i; |
5368 | lhsc = get_function_part_constraint (fi, part: fi_clobbers); |
5369 | get_constraint_for_address_of (t: lhs, results: &rhsc); |
5370 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5371 | process_constraint (t: new_constraint (lhs: lhsc, rhs: *rhsp)); |
5372 | rhsc.truncate (size: 0); |
5373 | } |
5374 | } |
5375 | |
5376 | /* Account for uses in assigments and returns. */ |
5377 | if (gimple_assign_single_p (gs: t) |
5378 | || (gimple_code (g: t) == GIMPLE_RETURN |
5379 | && gimple_return_retval (gs: as_a <greturn *> (p: t)) != NULL_TREE)) |
5380 | { |
5381 | tree rhs = (gimple_assign_single_p (gs: t) |
5382 | ? gimple_assign_rhs1 (gs: t) |
5383 | : gimple_return_retval (gs: as_a <greturn *> (p: t))); |
5384 | tree tem = rhs; |
5385 | while (handled_component_p (t: tem)) |
5386 | tem = TREE_OPERAND (tem, 0); |
5387 | if ((DECL_P (tem) |
5388 | && !auto_var_in_fn_p (tem, fn->decl)) |
5389 | || INDIRECT_REF_P (tem) |
5390 | || (TREE_CODE (tem) == MEM_REF |
5391 | && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR |
5392 | && auto_var_in_fn_p |
5393 | (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl)))) |
5394 | { |
5395 | struct constraint_expr lhs, *rhsp; |
5396 | unsigned i; |
5397 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5398 | get_constraint_for_address_of (t: rhs, results: &rhsc); |
5399 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5400 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5401 | rhsc.truncate (size: 0); |
5402 | } |
5403 | } |
5404 | |
5405 | if (gcall *call_stmt = dyn_cast <gcall *> (p: t)) |
5406 | { |
5407 | varinfo_t cfi = NULL; |
5408 | tree decl = gimple_call_fndecl (gs: t); |
5409 | struct constraint_expr lhs, rhs; |
5410 | unsigned i, j; |
5411 | |
5412 | /* For builtins we do not have separate function info. For those |
5413 | we do not generate escapes for we have to generate clobbers/uses. */ |
5414 | if (gimple_call_builtin_p (t, BUILT_IN_NORMAL)) |
5415 | switch (DECL_FUNCTION_CODE (decl)) |
5416 | { |
5417 | /* The following functions use and clobber memory pointed to |
5418 | by their arguments. */ |
5419 | case BUILT_IN_STRCPY: |
5420 | case BUILT_IN_STRNCPY: |
5421 | case BUILT_IN_BCOPY: |
5422 | case BUILT_IN_MEMCPY: |
5423 | case BUILT_IN_MEMMOVE: |
5424 | case BUILT_IN_MEMPCPY: |
5425 | case BUILT_IN_STPCPY: |
5426 | case BUILT_IN_STPNCPY: |
5427 | case BUILT_IN_STRCAT: |
5428 | case BUILT_IN_STRNCAT: |
5429 | case BUILT_IN_STRCPY_CHK: |
5430 | case BUILT_IN_STRNCPY_CHK: |
5431 | case BUILT_IN_MEMCPY_CHK: |
5432 | case BUILT_IN_MEMMOVE_CHK: |
5433 | case BUILT_IN_MEMPCPY_CHK: |
5434 | case BUILT_IN_STPCPY_CHK: |
5435 | case BUILT_IN_STPNCPY_CHK: |
5436 | case BUILT_IN_STRCAT_CHK: |
5437 | case BUILT_IN_STRNCAT_CHK: |
5438 | { |
5439 | tree dest = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl) |
5440 | == BUILT_IN_BCOPY ? 1 : 0)); |
5441 | tree src = gimple_call_arg (gs: t, index: (DECL_FUNCTION_CODE (decl) |
5442 | == BUILT_IN_BCOPY ? 0 : 1)); |
5443 | unsigned i; |
5444 | struct constraint_expr *rhsp, *lhsp; |
5445 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
5446 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5447 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
5448 | process_constraint (t: new_constraint (lhs, rhs: *lhsp)); |
5449 | get_constraint_for_ptr_offset (ptr: src, NULL_TREE, results: &rhsc); |
5450 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5451 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
5452 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5453 | return; |
5454 | } |
5455 | /* The following function clobbers memory pointed to by |
5456 | its argument. */ |
5457 | case BUILT_IN_MEMSET: |
5458 | case BUILT_IN_MEMSET_CHK: |
5459 | case BUILT_IN_POSIX_MEMALIGN: |
5460 | { |
5461 | tree dest = gimple_call_arg (gs: t, index: 0); |
5462 | unsigned i; |
5463 | ce_s *lhsp; |
5464 | get_constraint_for_ptr_offset (ptr: dest, NULL_TREE, results: &lhsc); |
5465 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5466 | FOR_EACH_VEC_ELT (lhsc, i, lhsp) |
5467 | process_constraint (t: new_constraint (lhs, rhs: *lhsp)); |
5468 | return; |
5469 | } |
5470 | /* The following functions clobber their second and third |
5471 | arguments. */ |
5472 | case BUILT_IN_SINCOS: |
5473 | case BUILT_IN_SINCOSF: |
5474 | case BUILT_IN_SINCOSL: |
5475 | { |
5476 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 1)); |
5477 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 2)); |
5478 | return; |
5479 | } |
5480 | /* The following functions clobber their second argument. */ |
5481 | case BUILT_IN_FREXP: |
5482 | case BUILT_IN_FREXPF: |
5483 | case BUILT_IN_FREXPL: |
5484 | case BUILT_IN_LGAMMA_R: |
5485 | case BUILT_IN_LGAMMAF_R: |
5486 | case BUILT_IN_LGAMMAL_R: |
5487 | case BUILT_IN_GAMMA_R: |
5488 | case BUILT_IN_GAMMAF_R: |
5489 | case BUILT_IN_GAMMAL_R: |
5490 | case BUILT_IN_MODF: |
5491 | case BUILT_IN_MODFF: |
5492 | case BUILT_IN_MODFL: |
5493 | { |
5494 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 1)); |
5495 | return; |
5496 | } |
5497 | /* The following functions clobber their third argument. */ |
5498 | case BUILT_IN_REMQUO: |
5499 | case BUILT_IN_REMQUOF: |
5500 | case BUILT_IN_REMQUOL: |
5501 | { |
5502 | process_ipa_clobber (fi, ptr: gimple_call_arg (gs: t, index: 2)); |
5503 | return; |
5504 | } |
5505 | /* The following functions neither read nor clobber memory. */ |
5506 | case BUILT_IN_ASSUME_ALIGNED: |
5507 | case BUILT_IN_FREE: |
5508 | return; |
5509 | /* Trampolines are of no interest to us. */ |
5510 | case BUILT_IN_INIT_TRAMPOLINE: |
5511 | case BUILT_IN_ADJUST_TRAMPOLINE: |
5512 | return; |
5513 | case BUILT_IN_VA_START: |
5514 | case BUILT_IN_VA_END: |
5515 | return; |
5516 | case BUILT_IN_GOMP_PARALLEL: |
5517 | case BUILT_IN_GOACC_PARALLEL: |
5518 | { |
5519 | unsigned int fnpos, argpos; |
5520 | unsigned int implicit_use_args[2]; |
5521 | unsigned int num_implicit_use_args = 0; |
5522 | switch (DECL_FUNCTION_CODE (decl)) |
5523 | { |
5524 | case BUILT_IN_GOMP_PARALLEL: |
5525 | /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */ |
5526 | fnpos = 0; |
5527 | argpos = 1; |
5528 | break; |
5529 | case BUILT_IN_GOACC_PARALLEL: |
5530 | /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs, |
5531 | sizes, kinds, ...). */ |
5532 | fnpos = 1; |
5533 | argpos = 3; |
5534 | implicit_use_args[num_implicit_use_args++] = 4; |
5535 | implicit_use_args[num_implicit_use_args++] = 5; |
5536 | break; |
5537 | default: |
5538 | gcc_unreachable (); |
5539 | } |
5540 | |
5541 | tree fnarg = gimple_call_arg (gs: t, index: fnpos); |
5542 | gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR); |
5543 | tree fndecl = TREE_OPERAND (fnarg, 0); |
5544 | if (fndecl_maybe_in_other_partition (fndecl)) |
5545 | /* Fallthru to general call handling. */ |
5546 | break; |
5547 | |
5548 | varinfo_t cfi = get_vi_for_tree (t: fndecl); |
5549 | |
5550 | tree arg = gimple_call_arg (gs: t, index: argpos); |
5551 | |
5552 | /* Parameter passed by value is used. */ |
5553 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5554 | struct constraint_expr *rhsp; |
5555 | get_constraint_for (t: arg, results: &rhsc); |
5556 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5557 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5558 | rhsc.truncate (size: 0); |
5559 | |
5560 | /* Handle parameters used by the call, but not used in cfi, as |
5561 | implicitly used by cfi. */ |
5562 | lhs = get_function_part_constraint (fi: cfi, part: fi_uses); |
5563 | for (unsigned i = 0; i < num_implicit_use_args; ++i) |
5564 | { |
5565 | tree arg = gimple_call_arg (gs: t, index: implicit_use_args[i]); |
5566 | get_constraint_for (t: arg, results: &rhsc); |
5567 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5568 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5569 | rhsc.truncate (size: 0); |
5570 | } |
5571 | |
5572 | /* The caller clobbers what the callee does. */ |
5573 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5574 | rhs = get_function_part_constraint (fi: cfi, part: fi_clobbers); |
5575 | process_constraint (t: new_constraint (lhs, rhs)); |
5576 | |
5577 | /* The caller uses what the callee does. */ |
5578 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5579 | rhs = get_function_part_constraint (fi: cfi, part: fi_uses); |
5580 | process_constraint (t: new_constraint (lhs, rhs)); |
5581 | |
5582 | return; |
5583 | } |
5584 | /* printf-style functions may have hooks to set pointers to |
5585 | point to somewhere into the generated string. Leave them |
5586 | for a later exercise... */ |
5587 | default: |
5588 | /* Fallthru to general call handling. */; |
5589 | } |
5590 | |
5591 | /* Parameters passed by value are used. */ |
5592 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5593 | for (i = 0; i < gimple_call_num_args (gs: t); i++) |
5594 | { |
5595 | struct constraint_expr *rhsp; |
5596 | tree arg = gimple_call_arg (gs: t, index: i); |
5597 | |
5598 | if (TREE_CODE (arg) == SSA_NAME |
5599 | || is_gimple_min_invariant (arg)) |
5600 | continue; |
5601 | |
5602 | get_constraint_for_address_of (t: arg, results: &rhsc); |
5603 | FOR_EACH_VEC_ELT (rhsc, j, rhsp) |
5604 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
5605 | rhsc.truncate (size: 0); |
5606 | } |
5607 | |
5608 | /* Build constraints for propagating clobbers/uses along the |
5609 | callgraph edges. */ |
5610 | cfi = get_fi_for_callee (call: call_stmt); |
5611 | if (cfi->id == anything_id) |
5612 | { |
5613 | if (gimple_vdef (g: t)) |
5614 | make_constraint_from (vi: first_vi_for_offset (fi, fi_clobbers), |
5615 | from: anything_id); |
5616 | make_constraint_from (vi: first_vi_for_offset (fi, fi_uses), |
5617 | from: anything_id); |
5618 | return; |
5619 | } |
5620 | |
5621 | /* For callees without function info (that's external functions), |
5622 | ESCAPED is clobbered and used. */ |
5623 | if (cfi->decl |
5624 | && TREE_CODE (cfi->decl) == FUNCTION_DECL |
5625 | && !cfi->is_fn_info) |
5626 | { |
5627 | varinfo_t vi; |
5628 | |
5629 | if (gimple_vdef (g: t)) |
5630 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_clobbers), |
5631 | from: escaped_id); |
5632 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_uses), from: escaped_id); |
5633 | |
5634 | /* Also honor the call statement use/clobber info. */ |
5635 | if ((vi = lookup_call_clobber_vi (call: call_stmt)) != NULL) |
5636 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_clobbers), |
5637 | from: vi->id); |
5638 | if ((vi = lookup_call_use_vi (call: call_stmt)) != NULL) |
5639 | make_copy_constraint (vi: first_vi_for_offset (fi, fi_uses), |
5640 | from: vi->id); |
5641 | return; |
5642 | } |
5643 | |
5644 | /* Otherwise the caller clobbers and uses what the callee does. |
5645 | ??? This should use a new complex constraint that filters |
5646 | local variables of the callee. */ |
5647 | if (gimple_vdef (g: t)) |
5648 | { |
5649 | lhs = get_function_part_constraint (fi, part: fi_clobbers); |
5650 | rhs = get_function_part_constraint (fi: cfi, part: fi_clobbers); |
5651 | process_constraint (t: new_constraint (lhs, rhs)); |
5652 | } |
5653 | lhs = get_function_part_constraint (fi, part: fi_uses); |
5654 | rhs = get_function_part_constraint (fi: cfi, part: fi_uses); |
5655 | process_constraint (t: new_constraint (lhs, rhs)); |
5656 | } |
5657 | else if (gimple_code (g: t) == GIMPLE_ASM) |
5658 | { |
5659 | /* ??? Ick. We can do better. */ |
5660 | if (gimple_vdef (g: t)) |
5661 | make_constraint_from (vi: first_vi_for_offset (fi, fi_clobbers), |
5662 | from: anything_id); |
5663 | make_constraint_from (vi: first_vi_for_offset (fi, fi_uses), |
5664 | from: anything_id); |
5665 | } |
5666 | } |
5667 | |
5668 | |
5669 | /* Find the first varinfo in the same variable as START that overlaps with |
5670 | OFFSET. Return NULL if we can't find one. */ |
5671 | |
5672 | static varinfo_t |
5673 | first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset) |
5674 | { |
5675 | /* If the offset is outside of the variable, bail out. */ |
5676 | if (offset >= start->fullsize) |
5677 | return NULL; |
5678 | |
5679 | /* If we cannot reach offset from start, lookup the first field |
5680 | and start from there. */ |
5681 | if (start->offset > offset) |
5682 | start = get_varinfo (n: start->head); |
5683 | |
5684 | while (start) |
5685 | { |
5686 | /* We may not find a variable in the field list with the actual |
5687 | offset when we have glommed a structure to a variable. |
5688 | In that case, however, offset should still be within the size |
5689 | of the variable. */ |
5690 | if (offset >= start->offset |
5691 | && (offset - start->offset) < start->size) |
5692 | return start; |
5693 | |
5694 | start = vi_next (vi: start); |
5695 | } |
5696 | |
5697 | return NULL; |
5698 | } |
5699 | |
5700 | /* Find the first varinfo in the same variable as START that overlaps with |
5701 | OFFSET. If there is no such varinfo the varinfo directly preceding |
5702 | OFFSET is returned. */ |
5703 | |
5704 | static varinfo_t |
5705 | first_or_preceding_vi_for_offset (varinfo_t start, |
5706 | unsigned HOST_WIDE_INT offset) |
5707 | { |
5708 | /* If we cannot reach offset from start, lookup the first field |
5709 | and start from there. */ |
5710 | if (start->offset > offset) |
5711 | start = get_varinfo (n: start->head); |
5712 | |
5713 | /* We may not find a variable in the field list with the actual |
5714 | offset when we have glommed a structure to a variable. |
5715 | In that case, however, offset should still be within the size |
5716 | of the variable. |
5717 | If we got beyond the offset we look for return the field |
5718 | directly preceding offset which may be the last field. */ |
5719 | while (start->next |
5720 | && offset >= start->offset |
5721 | && !((offset - start->offset) < start->size)) |
5722 | start = vi_next (vi: start); |
5723 | |
5724 | return start; |
5725 | } |
5726 | |
5727 | |
5728 | /* This structure is used during pushing fields onto the fieldstack |
5729 | to track the offset of the field, since bitpos_of_field gives it |
5730 | relative to its immediate containing type, and we want it relative |
5731 | to the ultimate containing object. */ |
5732 | |
5733 | struct fieldoff |
5734 | { |
5735 | /* Offset from the base of the base containing object to this field. */ |
5736 | HOST_WIDE_INT offset; |
5737 | |
5738 | /* Size, in bits, of the field. */ |
5739 | unsigned HOST_WIDE_INT size; |
5740 | |
5741 | unsigned has_unknown_size : 1; |
5742 | |
5743 | unsigned must_have_pointers : 1; |
5744 | |
5745 | unsigned may_have_pointers : 1; |
5746 | |
5747 | unsigned only_restrict_pointers : 1; |
5748 | |
5749 | tree restrict_pointed_type; |
5750 | }; |
5751 | typedef struct fieldoff fieldoff_s; |
5752 | |
5753 | |
5754 | /* qsort comparison function for two fieldoff's PA and PB */ |
5755 | |
5756 | static int |
5757 | fieldoff_compare (const void *pa, const void *pb) |
5758 | { |
5759 | const fieldoff_s *foa = (const fieldoff_s *)pa; |
5760 | const fieldoff_s *fob = (const fieldoff_s *)pb; |
5761 | unsigned HOST_WIDE_INT foasize, fobsize; |
5762 | |
5763 | if (foa->offset < fob->offset) |
5764 | return -1; |
5765 | else if (foa->offset > fob->offset) |
5766 | return 1; |
5767 | |
5768 | foasize = foa->size; |
5769 | fobsize = fob->size; |
5770 | if (foasize < fobsize) |
5771 | return -1; |
5772 | else if (foasize > fobsize) |
5773 | return 1; |
5774 | return 0; |
5775 | } |
5776 | |
5777 | /* Sort a fieldstack according to the field offset and sizes. */ |
5778 | static void |
5779 | sort_fieldstack (vec<fieldoff_s> &fieldstack) |
5780 | { |
5781 | fieldstack.qsort (fieldoff_compare); |
5782 | } |
5783 | |
5784 | /* Return true if T is a type that can have subvars. */ |
5785 | |
5786 | static inline bool |
5787 | type_can_have_subvars (const_tree t) |
5788 | { |
5789 | /* Aggregates without overlapping fields can have subvars. */ |
5790 | return TREE_CODE (t) == RECORD_TYPE; |
5791 | } |
5792 | |
5793 | /* Return true if V is a tree that we can have subvars for. |
5794 | Normally, this is any aggregate type. Also complex |
5795 | types which are not gimple registers can have subvars. */ |
5796 | |
5797 | static inline bool |
5798 | var_can_have_subvars (const_tree v) |
5799 | { |
5800 | /* Volatile variables should never have subvars. */ |
5801 | if (TREE_THIS_VOLATILE (v)) |
5802 | return false; |
5803 | |
5804 | /* Non decls or memory tags can never have subvars. */ |
5805 | if (!DECL_P (v)) |
5806 | return false; |
5807 | |
5808 | return type_can_have_subvars (TREE_TYPE (v)); |
5809 | } |
5810 | |
5811 | /* Return true if T is a type that does contain pointers. */ |
5812 | |
5813 | static bool |
5814 | type_must_have_pointers (tree type) |
5815 | { |
5816 | if (POINTER_TYPE_P (type)) |
5817 | return true; |
5818 | |
5819 | if (TREE_CODE (type) == ARRAY_TYPE) |
5820 | return type_must_have_pointers (TREE_TYPE (type)); |
5821 | |
5822 | /* A function or method can have pointers as arguments, so track |
5823 | those separately. */ |
5824 | if (FUNC_OR_METHOD_TYPE_P (type)) |
5825 | return true; |
5826 | |
5827 | return false; |
5828 | } |
5829 | |
5830 | static bool |
5831 | field_must_have_pointers (tree t) |
5832 | { |
5833 | return type_must_have_pointers (TREE_TYPE (t)); |
5834 | } |
5835 | |
5836 | /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all |
5837 | the fields of TYPE onto fieldstack, recording their offsets along |
5838 | the way. |
5839 | |
5840 | OFFSET is used to keep track of the offset in this entire |
5841 | structure, rather than just the immediately containing structure. |
5842 | Returns false if the caller is supposed to handle the field we |
5843 | recursed for. */ |
5844 | |
5845 | static bool |
5846 | push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack, |
5847 | HOST_WIDE_INT offset) |
5848 | { |
5849 | tree field; |
5850 | bool empty_p = true; |
5851 | |
5852 | if (TREE_CODE (type) != RECORD_TYPE) |
5853 | return false; |
5854 | |
5855 | /* If the vector of fields is growing too big, bail out early. |
5856 | Callers check for vec::length <= param_max_fields_for_field_sensitive, make |
5857 | sure this fails. */ |
5858 | if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive) |
5859 | return false; |
5860 | |
5861 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
5862 | if (TREE_CODE (field) == FIELD_DECL) |
5863 | { |
5864 | bool push = false; |
5865 | HOST_WIDE_INT foff = bitpos_of_field (fdecl: field); |
5866 | tree field_type = TREE_TYPE (field); |
5867 | |
5868 | if (!var_can_have_subvars (v: field) |
5869 | || TREE_CODE (field_type) == QUAL_UNION_TYPE |
5870 | || TREE_CODE (field_type) == UNION_TYPE) |
5871 | push = true; |
5872 | else if (!push_fields_onto_fieldstack |
5873 | (type: field_type, fieldstack, offset: offset + foff) |
5874 | && (DECL_SIZE (field) |
5875 | && !integer_zerop (DECL_SIZE (field)))) |
5876 | /* Empty structures may have actual size, like in C++. So |
5877 | see if we didn't push any subfields and the size is |
5878 | nonzero, push the field onto the stack. */ |
5879 | push = true; |
5880 | |
5881 | if (push) |
5882 | { |
5883 | fieldoff_s *pair = NULL; |
5884 | bool has_unknown_size = false; |
5885 | bool must_have_pointers_p; |
5886 | |
5887 | if (!fieldstack->is_empty ()) |
5888 | pair = &fieldstack->last (); |
5889 | |
5890 | /* If there isn't anything at offset zero, create sth. */ |
5891 | if (!pair |
5892 | && offset + foff != 0) |
5893 | { |
5894 | fieldoff_s e |
5895 | = {.offset: 0, .size: offset + foff, .has_unknown_size: false, .must_have_pointers: false, .may_have_pointers: true, .only_restrict_pointers: false, NULL_TREE}; |
5896 | pair = fieldstack->safe_push (obj: e); |
5897 | } |
5898 | |
5899 | if (!DECL_SIZE (field) |
5900 | || !tree_fits_uhwi_p (DECL_SIZE (field))) |
5901 | has_unknown_size = true; |
5902 | |
5903 | /* If adjacent fields do not contain pointers merge them. */ |
5904 | must_have_pointers_p = field_must_have_pointers (t: field); |
5905 | if (pair |
5906 | && !has_unknown_size |
5907 | && !must_have_pointers_p |
5908 | && !pair->must_have_pointers |
5909 | && !pair->has_unknown_size |
5910 | && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff) |
5911 | { |
5912 | pair->size += tree_to_uhwi (DECL_SIZE (field)); |
5913 | } |
5914 | else |
5915 | { |
5916 | fieldoff_s e; |
5917 | e.offset = offset + foff; |
5918 | e.has_unknown_size = has_unknown_size; |
5919 | if (!has_unknown_size) |
5920 | e.size = tree_to_uhwi (DECL_SIZE (field)); |
5921 | else |
5922 | e.size = -1; |
5923 | e.must_have_pointers = must_have_pointers_p; |
5924 | e.may_have_pointers = true; |
5925 | e.only_restrict_pointers |
5926 | = (!has_unknown_size |
5927 | && POINTER_TYPE_P (field_type) |
5928 | && TYPE_RESTRICT (field_type)); |
5929 | if (e.only_restrict_pointers) |
5930 | e.restrict_pointed_type = TREE_TYPE (field_type); |
5931 | fieldstack->safe_push (obj: e); |
5932 | } |
5933 | } |
5934 | |
5935 | empty_p = false; |
5936 | } |
5937 | |
5938 | return !empty_p; |
5939 | } |
5940 | |
5941 | /* Count the number of arguments DECL has, and set IS_VARARGS to true |
5942 | if it is a varargs function. */ |
5943 | |
5944 | static unsigned int |
5945 | count_num_arguments (tree decl, bool *is_varargs) |
5946 | { |
5947 | unsigned int num = 0; |
5948 | tree t; |
5949 | |
5950 | /* Capture named arguments for K&R functions. They do not |
5951 | have a prototype and thus no TYPE_ARG_TYPES. */ |
5952 | for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t)) |
5953 | ++num; |
5954 | |
5955 | /* Check if the function has variadic arguments. */ |
5956 | for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t)) |
5957 | if (TREE_VALUE (t) == void_type_node) |
5958 | break; |
5959 | if (!t) |
5960 | *is_varargs = true; |
5961 | |
5962 | return num; |
5963 | } |
5964 | |
5965 | /* Creation function node for DECL, using NAME, and return the index |
5966 | of the variable we've created for the function. If NONLOCAL_p, create |
5967 | initial constraints. */ |
5968 | |
5969 | static varinfo_t |
5970 | create_function_info_for (tree decl, const char *name, bool add_id, |
5971 | bool nonlocal_p) |
5972 | { |
5973 | struct function *fn = DECL_STRUCT_FUNCTION (decl); |
5974 | varinfo_t vi, prev_vi; |
5975 | tree arg; |
5976 | unsigned int i; |
5977 | bool is_varargs = false; |
5978 | unsigned int num_args = count_num_arguments (decl, is_varargs: &is_varargs); |
5979 | |
5980 | /* Create the variable info. */ |
5981 | |
5982 | vi = new_var_info (t: decl, name, add_id); |
5983 | vi->offset = 0; |
5984 | vi->size = 1; |
5985 | vi->fullsize = fi_parm_base + num_args; |
5986 | vi->is_fn_info = 1; |
5987 | vi->may_have_pointers = false; |
5988 | if (is_varargs) |
5989 | vi->fullsize = ~0; |
5990 | insert_vi_for_tree (t: vi->decl, vi); |
5991 | |
5992 | prev_vi = vi; |
5993 | |
5994 | /* Create a variable for things the function clobbers and one for |
5995 | things the function uses. */ |
5996 | { |
5997 | varinfo_t clobbervi, usevi; |
5998 | const char *newname; |
5999 | char *tempname; |
6000 | |
6001 | tempname = xasprintf ("%s.clobber" , name); |
6002 | newname = ggc_strdup (tempname); |
6003 | free (ptr: tempname); |
6004 | |
6005 | clobbervi = new_var_info (NULL, name: newname, add_id: false); |
6006 | clobbervi->offset = fi_clobbers; |
6007 | clobbervi->size = 1; |
6008 | clobbervi->fullsize = vi->fullsize; |
6009 | clobbervi->is_full_var = true; |
6010 | clobbervi->is_global_var = false; |
6011 | clobbervi->is_reg_var = true; |
6012 | |
6013 | gcc_assert (prev_vi->offset < clobbervi->offset); |
6014 | prev_vi->next = clobbervi->id; |
6015 | prev_vi = clobbervi; |
6016 | |
6017 | tempname = xasprintf ("%s.use" , name); |
6018 | newname = ggc_strdup (tempname); |
6019 | free (ptr: tempname); |
6020 | |
6021 | usevi = new_var_info (NULL, name: newname, add_id: false); |
6022 | usevi->offset = fi_uses; |
6023 | usevi->size = 1; |
6024 | usevi->fullsize = vi->fullsize; |
6025 | usevi->is_full_var = true; |
6026 | usevi->is_global_var = false; |
6027 | usevi->is_reg_var = true; |
6028 | |
6029 | gcc_assert (prev_vi->offset < usevi->offset); |
6030 | prev_vi->next = usevi->id; |
6031 | prev_vi = usevi; |
6032 | } |
6033 | |
6034 | /* And one for the static chain. */ |
6035 | if (fn->static_chain_decl != NULL_TREE) |
6036 | { |
6037 | varinfo_t chainvi; |
6038 | const char *newname; |
6039 | char *tempname; |
6040 | |
6041 | tempname = xasprintf ("%s.chain" , name); |
6042 | newname = ggc_strdup (tempname); |
6043 | free (ptr: tempname); |
6044 | |
6045 | chainvi = new_var_info (t: fn->static_chain_decl, name: newname, add_id: false); |
6046 | chainvi->offset = fi_static_chain; |
6047 | chainvi->size = 1; |
6048 | chainvi->fullsize = vi->fullsize; |
6049 | chainvi->is_full_var = true; |
6050 | chainvi->is_global_var = false; |
6051 | |
6052 | insert_vi_for_tree (t: fn->static_chain_decl, vi: chainvi); |
6053 | |
6054 | if (nonlocal_p |
6055 | && chainvi->may_have_pointers) |
6056 | make_constraint_from (vi: chainvi, from: nonlocal_id); |
6057 | |
6058 | gcc_assert (prev_vi->offset < chainvi->offset); |
6059 | prev_vi->next = chainvi->id; |
6060 | prev_vi = chainvi; |
6061 | } |
6062 | |
6063 | /* Create a variable for the return var. */ |
6064 | if (DECL_RESULT (decl) != NULL |
6065 | || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl)))) |
6066 | { |
6067 | varinfo_t resultvi; |
6068 | const char *newname; |
6069 | char *tempname; |
6070 | tree resultdecl = decl; |
6071 | |
6072 | if (DECL_RESULT (decl)) |
6073 | resultdecl = DECL_RESULT (decl); |
6074 | |
6075 | tempname = xasprintf ("%s.result" , name); |
6076 | newname = ggc_strdup (tempname); |
6077 | free (ptr: tempname); |
6078 | |
6079 | resultvi = new_var_info (t: resultdecl, name: newname, add_id: false); |
6080 | resultvi->offset = fi_result; |
6081 | resultvi->size = 1; |
6082 | resultvi->fullsize = vi->fullsize; |
6083 | resultvi->is_full_var = true; |
6084 | if (DECL_RESULT (decl)) |
6085 | resultvi->may_have_pointers = true; |
6086 | |
6087 | if (DECL_RESULT (decl)) |
6088 | insert_vi_for_tree (DECL_RESULT (decl), vi: resultvi); |
6089 | |
6090 | if (nonlocal_p |
6091 | && DECL_RESULT (decl) |
6092 | && DECL_BY_REFERENCE (DECL_RESULT (decl))) |
6093 | make_constraint_from (vi: resultvi, from: nonlocal_id); |
6094 | |
6095 | gcc_assert (prev_vi->offset < resultvi->offset); |
6096 | prev_vi->next = resultvi->id; |
6097 | prev_vi = resultvi; |
6098 | } |
6099 | |
6100 | /* We also need to make function return values escape. Nothing |
6101 | escapes by returning from main though. */ |
6102 | if (nonlocal_p |
6103 | && !MAIN_NAME_P (DECL_NAME (decl))) |
6104 | { |
6105 | varinfo_t fi, rvi; |
6106 | fi = lookup_vi_for_tree (t: decl); |
6107 | rvi = first_vi_for_offset (start: fi, offset: fi_result); |
6108 | if (rvi && rvi->offset == fi_result) |
6109 | make_copy_constraint (vi: get_varinfo (n: escaped_id), from: rvi->id); |
6110 | } |
6111 | |
6112 | /* Set up variables for each argument. */ |
6113 | arg = DECL_ARGUMENTS (decl); |
6114 | for (i = 0; i < num_args; i++) |
6115 | { |
6116 | varinfo_t argvi; |
6117 | const char *newname; |
6118 | char *tempname; |
6119 | tree argdecl = decl; |
6120 | |
6121 | if (arg) |
6122 | argdecl = arg; |
6123 | |
6124 | tempname = xasprintf ("%s.arg%d" , name, i); |
6125 | newname = ggc_strdup (tempname); |
6126 | free (ptr: tempname); |
6127 | |
6128 | argvi = new_var_info (t: argdecl, name: newname, add_id: false); |
6129 | argvi->offset = fi_parm_base + i; |
6130 | argvi->size = 1; |
6131 | argvi->is_full_var = true; |
6132 | argvi->fullsize = vi->fullsize; |
6133 | if (arg) |
6134 | argvi->may_have_pointers = true; |
6135 | |
6136 | if (arg) |
6137 | insert_vi_for_tree (t: arg, vi: argvi); |
6138 | |
6139 | if (nonlocal_p |
6140 | && argvi->may_have_pointers) |
6141 | make_constraint_from (vi: argvi, from: nonlocal_id); |
6142 | |
6143 | gcc_assert (prev_vi->offset < argvi->offset); |
6144 | prev_vi->next = argvi->id; |
6145 | prev_vi = argvi; |
6146 | if (arg) |
6147 | arg = DECL_CHAIN (arg); |
6148 | } |
6149 | |
6150 | /* Add one representative for all further args. */ |
6151 | if (is_varargs) |
6152 | { |
6153 | varinfo_t argvi; |
6154 | const char *newname; |
6155 | char *tempname; |
6156 | tree decl; |
6157 | |
6158 | tempname = xasprintf ("%s.varargs" , name); |
6159 | newname = ggc_strdup (tempname); |
6160 | free (ptr: tempname); |
6161 | |
6162 | /* We need sth that can be pointed to for va_start. */ |
6163 | decl = build_fake_var_decl (ptr_type_node); |
6164 | |
6165 | argvi = new_var_info (t: decl, name: newname, add_id: false); |
6166 | argvi->offset = fi_parm_base + num_args; |
6167 | argvi->size = ~0; |
6168 | argvi->is_full_var = true; |
6169 | argvi->is_heap_var = true; |
6170 | argvi->fullsize = vi->fullsize; |
6171 | |
6172 | if (nonlocal_p |
6173 | && argvi->may_have_pointers) |
6174 | make_constraint_from (vi: argvi, from: nonlocal_id); |
6175 | |
6176 | gcc_assert (prev_vi->offset < argvi->offset); |
6177 | prev_vi->next = argvi->id; |
6178 | } |
6179 | |
6180 | return vi; |
6181 | } |
6182 | |
6183 | |
6184 | /* Return true if FIELDSTACK contains fields that overlap. |
6185 | FIELDSTACK is assumed to be sorted by offset. */ |
6186 | |
6187 | static bool |
6188 | check_for_overlaps (const vec<fieldoff_s> &fieldstack) |
6189 | { |
6190 | fieldoff_s *fo = NULL; |
6191 | unsigned int i; |
6192 | HOST_WIDE_INT lastoffset = -1; |
6193 | |
6194 | FOR_EACH_VEC_ELT (fieldstack, i, fo) |
6195 | { |
6196 | if (fo->offset == lastoffset) |
6197 | return true; |
6198 | lastoffset = fo->offset; |
6199 | } |
6200 | return false; |
6201 | } |
6202 | |
6203 | /* Create a varinfo structure for NAME and DECL, and add it to VARMAP. |
6204 | This will also create any varinfo structures necessary for fields |
6205 | of DECL. DECL is a function parameter if HANDLE_PARAM is set. |
6206 | HANDLED_STRUCT_TYPE is used to register struct types reached by following |
6207 | restrict pointers. This is needed to prevent infinite recursion. |
6208 | If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL |
6209 | does not advertise it. */ |
6210 | |
6211 | static varinfo_t |
6212 | create_variable_info_for_1 (tree decl, const char *name, bool add_id, |
6213 | bool handle_param, bitmap handled_struct_type, |
6214 | bool add_restrict = false) |
6215 | { |
6216 | varinfo_t vi, newvi; |
6217 | tree decl_type = TREE_TYPE (decl); |
6218 | tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type); |
6219 | auto_vec<fieldoff_s> fieldstack; |
6220 | fieldoff_s *fo; |
6221 | unsigned int i; |
6222 | |
6223 | if (!declsize |
6224 | || !tree_fits_uhwi_p (declsize)) |
6225 | { |
6226 | vi = new_var_info (t: decl, name, add_id); |
6227 | vi->offset = 0; |
6228 | vi->size = ~0; |
6229 | vi->fullsize = ~0; |
6230 | vi->is_unknown_size_var = true; |
6231 | vi->is_full_var = true; |
6232 | vi->may_have_pointers = true; |
6233 | return vi; |
6234 | } |
6235 | |
6236 | /* Collect field information. */ |
6237 | if (use_field_sensitive |
6238 | && var_can_have_subvars (v: decl) |
6239 | /* ??? Force us to not use subfields for globals in IPA mode. |
6240 | Else we'd have to parse arbitrary initializers. */ |
6241 | && !(in_ipa_mode |
6242 | && is_global_var (t: decl))) |
6243 | { |
6244 | fieldoff_s *fo = NULL; |
6245 | bool notokay = false; |
6246 | unsigned int i; |
6247 | |
6248 | push_fields_onto_fieldstack (type: decl_type, fieldstack: &fieldstack, offset: 0); |
6249 | |
6250 | for (i = 0; !notokay && fieldstack.iterate (ix: i, ptr: &fo); i++) |
6251 | if (fo->has_unknown_size |
6252 | || fo->offset < 0) |
6253 | { |
6254 | notokay = true; |
6255 | break; |
6256 | } |
6257 | |
6258 | /* We can't sort them if we have a field with a variable sized type, |
6259 | which will make notokay = true. In that case, we are going to return |
6260 | without creating varinfos for the fields anyway, so sorting them is a |
6261 | waste to boot. */ |
6262 | if (!notokay) |
6263 | { |
6264 | sort_fieldstack (fieldstack); |
6265 | /* Due to some C++ FE issues, like PR 22488, we might end up |
6266 | what appear to be overlapping fields even though they, |
6267 | in reality, do not overlap. Until the C++ FE is fixed, |
6268 | we will simply disable field-sensitivity for these cases. */ |
6269 | notokay = check_for_overlaps (fieldstack); |
6270 | } |
6271 | |
6272 | if (notokay) |
6273 | fieldstack.release (); |
6274 | } |
6275 | |
6276 | /* If we didn't end up collecting sub-variables create a full |
6277 | variable for the decl. */ |
6278 | if (fieldstack.length () == 0 |
6279 | || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive) |
6280 | { |
6281 | vi = new_var_info (t: decl, name, add_id); |
6282 | vi->offset = 0; |
6283 | vi->may_have_pointers = true; |
6284 | vi->fullsize = tree_to_uhwi (declsize); |
6285 | vi->size = vi->fullsize; |
6286 | vi->is_full_var = true; |
6287 | if (POINTER_TYPE_P (decl_type) |
6288 | && (TYPE_RESTRICT (decl_type) || add_restrict)) |
6289 | vi->only_restrict_pointers = 1; |
6290 | if (vi->only_restrict_pointers |
6291 | && !type_contains_placeholder_p (TREE_TYPE (decl_type)) |
6292 | && handle_param |
6293 | && !bitmap_bit_p (handled_struct_type, |
6294 | TYPE_UID (TREE_TYPE (decl_type)))) |
6295 | { |
6296 | varinfo_t rvi; |
6297 | tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type)); |
6298 | DECL_EXTERNAL (heapvar) = 1; |
6299 | if (var_can_have_subvars (v: heapvar)) |
6300 | bitmap_set_bit (handled_struct_type, |
6301 | TYPE_UID (TREE_TYPE (decl_type))); |
6302 | rvi = create_variable_info_for_1 (decl: heapvar, name: "PARM_NOALIAS" , add_id: true, |
6303 | handle_param: true, handled_struct_type); |
6304 | if (var_can_have_subvars (v: heapvar)) |
6305 | bitmap_clear_bit (handled_struct_type, |
6306 | TYPE_UID (TREE_TYPE (decl_type))); |
6307 | rvi->is_restrict_var = 1; |
6308 | insert_vi_for_tree (t: heapvar, vi: rvi); |
6309 | make_constraint_from (vi, from: rvi->id); |
6310 | make_param_constraints (rvi); |
6311 | } |
6312 | fieldstack.release (); |
6313 | return vi; |
6314 | } |
6315 | |
6316 | vi = new_var_info (t: decl, name, add_id); |
6317 | vi->fullsize = tree_to_uhwi (declsize); |
6318 | if (fieldstack.length () == 1) |
6319 | vi->is_full_var = true; |
6320 | for (i = 0, newvi = vi; |
6321 | fieldstack.iterate (ix: i, ptr: &fo); |
6322 | ++i, newvi = vi_next (vi: newvi)) |
6323 | { |
6324 | const char *newname = NULL; |
6325 | char *tempname; |
6326 | |
6327 | if (dump_file) |
6328 | { |
6329 | if (fieldstack.length () != 1) |
6330 | { |
6331 | tempname |
6332 | = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC |
6333 | "+" HOST_WIDE_INT_PRINT_DEC, name, |
6334 | fo->offset, fo->size); |
6335 | newname = ggc_strdup (tempname); |
6336 | free (ptr: tempname); |
6337 | } |
6338 | } |
6339 | else |
6340 | newname = "NULL" ; |
6341 | |
6342 | if (newname) |
6343 | newvi->name = newname; |
6344 | newvi->offset = fo->offset; |
6345 | newvi->size = fo->size; |
6346 | newvi->fullsize = vi->fullsize; |
6347 | newvi->may_have_pointers = fo->may_have_pointers; |
6348 | newvi->only_restrict_pointers = fo->only_restrict_pointers; |
6349 | if (handle_param |
6350 | && newvi->only_restrict_pointers |
6351 | && !type_contains_placeholder_p (fo->restrict_pointed_type) |
6352 | && !bitmap_bit_p (handled_struct_type, |
6353 | TYPE_UID (fo->restrict_pointed_type))) |
6354 | { |
6355 | varinfo_t rvi; |
6356 | tree heapvar = build_fake_var_decl (type: fo->restrict_pointed_type); |
6357 | DECL_EXTERNAL (heapvar) = 1; |
6358 | if (var_can_have_subvars (v: heapvar)) |
6359 | bitmap_set_bit (handled_struct_type, |
6360 | TYPE_UID (fo->restrict_pointed_type)); |
6361 | rvi = create_variable_info_for_1 (decl: heapvar, name: "PARM_NOALIAS" , add_id: true, |
6362 | handle_param: true, handled_struct_type); |
6363 | if (var_can_have_subvars (v: heapvar)) |
6364 | bitmap_clear_bit (handled_struct_type, |
6365 | TYPE_UID (fo->restrict_pointed_type)); |
6366 | rvi->is_restrict_var = 1; |
6367 | insert_vi_for_tree (t: heapvar, vi: rvi); |
6368 | make_constraint_from (vi: newvi, from: rvi->id); |
6369 | make_param_constraints (rvi); |
6370 | } |
6371 | if (i + 1 < fieldstack.length ()) |
6372 | { |
6373 | varinfo_t tem = new_var_info (t: decl, name, add_id: false); |
6374 | newvi->next = tem->id; |
6375 | tem->head = vi->id; |
6376 | } |
6377 | } |
6378 | |
6379 | return vi; |
6380 | } |
6381 | |
6382 | static unsigned int |
6383 | create_variable_info_for (tree decl, const char *name, bool add_id) |
6384 | { |
6385 | /* First see if we are dealing with an ifunc resolver call and |
6386 | assiociate that with a call to the resolver function result. */ |
6387 | cgraph_node *node; |
6388 | if (in_ipa_mode |
6389 | && TREE_CODE (decl) == FUNCTION_DECL |
6390 | && (node = cgraph_node::get (decl)) |
6391 | && node->ifunc_resolver) |
6392 | { |
6393 | varinfo_t fi = get_vi_for_tree (t: node->get_alias_target ()->decl); |
6394 | constraint_expr rhs |
6395 | = get_function_part_constraint (fi, part: fi_result); |
6396 | fi = new_var_info (NULL_TREE, name: "ifuncres" , add_id: true); |
6397 | fi->is_reg_var = true; |
6398 | constraint_expr lhs; |
6399 | lhs.type = SCALAR; |
6400 | lhs.var = fi->id; |
6401 | lhs.offset = 0; |
6402 | process_constraint (t: new_constraint (lhs, rhs)); |
6403 | insert_vi_for_tree (t: decl, vi: fi); |
6404 | return fi->id; |
6405 | } |
6406 | |
6407 | varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, handle_param: false, NULL); |
6408 | unsigned int id = vi->id; |
6409 | |
6410 | insert_vi_for_tree (t: decl, vi); |
6411 | |
6412 | if (!VAR_P (decl)) |
6413 | return id; |
6414 | |
6415 | /* Create initial constraints for globals. */ |
6416 | for (; vi; vi = vi_next (vi)) |
6417 | { |
6418 | if (!vi->may_have_pointers |
6419 | || !vi->is_global_var) |
6420 | continue; |
6421 | |
6422 | /* Mark global restrict qualified pointers. */ |
6423 | if ((POINTER_TYPE_P (TREE_TYPE (decl)) |
6424 | && TYPE_RESTRICT (TREE_TYPE (decl))) |
6425 | || vi->only_restrict_pointers) |
6426 | { |
6427 | varinfo_t rvi |
6428 | = make_constraint_from_global_restrict (lhs: vi, name: "GLOBAL_RESTRICT" , |
6429 | add_id: true); |
6430 | /* ??? For now exclude reads from globals as restrict sources |
6431 | if those are not (indirectly) from incoming parameters. */ |
6432 | rvi->is_restrict_var = false; |
6433 | continue; |
6434 | } |
6435 | |
6436 | /* In non-IPA mode the initializer from nonlocal is all we need. */ |
6437 | if (!in_ipa_mode |
6438 | || DECL_HARD_REGISTER (decl)) |
6439 | make_copy_constraint (vi, from: nonlocal_id); |
6440 | |
6441 | /* In IPA mode parse the initializer and generate proper constraints |
6442 | for it. */ |
6443 | else |
6444 | { |
6445 | varpool_node *vnode = varpool_node::get (decl); |
6446 | |
6447 | /* For escaped variables initialize them from nonlocal. */ |
6448 | if (!vnode->all_refs_explicit_p ()) |
6449 | make_copy_constraint (vi, from: nonlocal_id); |
6450 | |
6451 | /* If this is a global variable with an initializer and we are in |
6452 | IPA mode generate constraints for it. */ |
6453 | ipa_ref *ref; |
6454 | for (unsigned idx = 0; vnode->iterate_reference (i: idx, ref); ++idx) |
6455 | { |
6456 | auto_vec<ce_s> rhsc; |
6457 | struct constraint_expr lhs, *rhsp; |
6458 | unsigned i; |
6459 | get_constraint_for_address_of (t: ref->referred->decl, results: &rhsc); |
6460 | lhs.var = vi->id; |
6461 | lhs.offset = 0; |
6462 | lhs.type = SCALAR; |
6463 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
6464 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
6465 | /* If this is a variable that escapes from the unit |
6466 | the initializer escapes as well. */ |
6467 | if (!vnode->all_refs_explicit_p ()) |
6468 | { |
6469 | lhs.var = escaped_id; |
6470 | lhs.offset = 0; |
6471 | lhs.type = SCALAR; |
6472 | FOR_EACH_VEC_ELT (rhsc, i, rhsp) |
6473 | process_constraint (t: new_constraint (lhs, rhs: *rhsp)); |
6474 | } |
6475 | } |
6476 | } |
6477 | } |
6478 | |
6479 | return id; |
6480 | } |
6481 | |
6482 | /* Print out the points-to solution for VAR to FILE. */ |
6483 | |
6484 | static void |
6485 | dump_solution_for_var (FILE *file, unsigned int var) |
6486 | { |
6487 | varinfo_t vi = get_varinfo (n: var); |
6488 | unsigned int i; |
6489 | bitmap_iterator bi; |
6490 | |
6491 | /* Dump the solution for unified vars anyway, this avoids difficulties |
6492 | in scanning dumps in the testsuite. */ |
6493 | fprintf (stream: file, format: "%s = { " , vi->name); |
6494 | vi = get_varinfo (n: find (node: var)); |
6495 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
6496 | fprintf (stream: file, format: "%s " , get_varinfo (n: i)->name); |
6497 | fprintf (stream: file, format: "}" ); |
6498 | |
6499 | /* But note when the variable was unified. */ |
6500 | if (vi->id != var) |
6501 | fprintf (stream: file, format: " same as %s" , vi->name); |
6502 | |
6503 | fprintf (stream: file, format: "\n" ); |
6504 | } |
6505 | |
6506 | /* Print the points-to solution for VAR to stderr. */ |
6507 | |
6508 | DEBUG_FUNCTION void |
6509 | debug_solution_for_var (unsigned int var) |
6510 | { |
6511 | dump_solution_for_var (stderr, var); |
6512 | } |
6513 | |
6514 | /* Register the constraints for function parameter related VI. */ |
6515 | |
6516 | static void |
6517 | make_param_constraints (varinfo_t vi) |
6518 | { |
6519 | for (; vi; vi = vi_next (vi)) |
6520 | { |
6521 | if (vi->only_restrict_pointers) |
6522 | ; |
6523 | else if (vi->may_have_pointers) |
6524 | make_constraint_from (vi, from: nonlocal_id); |
6525 | |
6526 | if (vi->is_full_var) |
6527 | break; |
6528 | } |
6529 | } |
6530 | |
6531 | /* Create varinfo structures for all of the variables in the |
6532 | function for intraprocedural mode. */ |
6533 | |
6534 | static void |
6535 | intra_create_variable_infos (struct function *fn) |
6536 | { |
6537 | tree t; |
6538 | bitmap handled_struct_type = NULL; |
6539 | bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl); |
6540 | |
6541 | /* For each incoming pointer argument arg, create the constraint ARG |
6542 | = NONLOCAL or a dummy variable if it is a restrict qualified |
6543 | passed-by-reference argument. */ |
6544 | for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t)) |
6545 | { |
6546 | if (handled_struct_type == NULL) |
6547 | handled_struct_type = BITMAP_ALLOC (NULL); |
6548 | |
6549 | varinfo_t p |
6550 | = create_variable_info_for_1 (decl: t, name: alias_get_name (decl: t), add_id: false, handle_param: true, |
6551 | handled_struct_type, add_restrict: this_parm_in_ctor); |
6552 | insert_vi_for_tree (t, vi: p); |
6553 | |
6554 | make_param_constraints (vi: p); |
6555 | |
6556 | this_parm_in_ctor = false; |
6557 | } |
6558 | |
6559 | if (handled_struct_type != NULL) |
6560 | BITMAP_FREE (handled_struct_type); |
6561 | |
6562 | /* Add a constraint for a result decl that is passed by reference. */ |
6563 | if (DECL_RESULT (fn->decl) |
6564 | && DECL_BY_REFERENCE (DECL_RESULT (fn->decl))) |
6565 | { |
6566 | varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl)); |
6567 | |
6568 | for (p = result_vi; p; p = vi_next (vi: p)) |
6569 | make_constraint_from (vi: p, from: nonlocal_id); |
6570 | } |
6571 | |
6572 | /* Add a constraint for the incoming static chain parameter. */ |
6573 | if (fn->static_chain_decl != NULL_TREE) |
6574 | { |
6575 | varinfo_t p, chain_vi = get_vi_for_tree (t: fn->static_chain_decl); |
6576 | |
6577 | for (p = chain_vi; p; p = vi_next (vi: p)) |
6578 | make_constraint_from (vi: p, from: nonlocal_id); |
6579 | } |
6580 | } |
6581 | |
6582 | /* Structure used to put solution bitmaps in a hashtable so they can |
6583 | be shared among variables with the same points-to set. */ |
6584 | |
6585 | typedef struct shared_bitmap_info |
6586 | { |
6587 | bitmap pt_vars; |
6588 | hashval_t hashcode; |
6589 | } *shared_bitmap_info_t; |
6590 | typedef const struct shared_bitmap_info *const_shared_bitmap_info_t; |
6591 | |
6592 | /* Shared_bitmap hashtable helpers. */ |
6593 | |
6594 | struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info> |
6595 | { |
6596 | static inline hashval_t hash (const shared_bitmap_info *); |
6597 | static inline bool equal (const shared_bitmap_info *, |
6598 | const shared_bitmap_info *); |
6599 | }; |
6600 | |
6601 | /* Hash function for a shared_bitmap_info_t */ |
6602 | |
6603 | inline hashval_t |
6604 | shared_bitmap_hasher::hash (const shared_bitmap_info *bi) |
6605 | { |
6606 | return bi->hashcode; |
6607 | } |
6608 | |
6609 | /* Equality function for two shared_bitmap_info_t's. */ |
6610 | |
6611 | inline bool |
6612 | shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1, |
6613 | const shared_bitmap_info *sbi2) |
6614 | { |
6615 | return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars); |
6616 | } |
6617 | |
6618 | /* Shared_bitmap hashtable. */ |
6619 | |
6620 | static hash_table<shared_bitmap_hasher> *shared_bitmap_table; |
6621 | |
6622 | /* Lookup a bitmap in the shared bitmap hashtable, and return an already |
6623 | existing instance if there is one, NULL otherwise. */ |
6624 | |
6625 | static bitmap |
6626 | shared_bitmap_lookup (bitmap pt_vars) |
6627 | { |
6628 | shared_bitmap_info **slot; |
6629 | struct shared_bitmap_info sbi; |
6630 | |
6631 | sbi.pt_vars = pt_vars; |
6632 | sbi.hashcode = bitmap_hash (pt_vars); |
6633 | |
6634 | slot = shared_bitmap_table->find_slot (value: &sbi, insert: NO_INSERT); |
6635 | if (!slot) |
6636 | return NULL; |
6637 | else |
6638 | return (*slot)->pt_vars; |
6639 | } |
6640 | |
6641 | |
6642 | /* Add a bitmap to the shared bitmap hashtable. */ |
6643 | |
6644 | static void |
6645 | shared_bitmap_add (bitmap pt_vars) |
6646 | { |
6647 | shared_bitmap_info **slot; |
6648 | shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info); |
6649 | |
6650 | sbi->pt_vars = pt_vars; |
6651 | sbi->hashcode = bitmap_hash (pt_vars); |
6652 | |
6653 | slot = shared_bitmap_table->find_slot (value: sbi, insert: INSERT); |
6654 | gcc_assert (!*slot); |
6655 | *slot = sbi; |
6656 | } |
6657 | |
6658 | |
6659 | /* Set bits in INTO corresponding to the variable uids in solution set FROM. */ |
6660 | |
6661 | static void |
6662 | set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt, |
6663 | tree fndecl) |
6664 | { |
6665 | unsigned int i; |
6666 | bitmap_iterator bi; |
6667 | varinfo_t escaped_vi = get_varinfo (n: find (node: escaped_id)); |
6668 | bool everything_escaped |
6669 | = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id); |
6670 | |
6671 | EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi) |
6672 | { |
6673 | varinfo_t vi = get_varinfo (n: i); |
6674 | |
6675 | if (vi->is_artificial_var) |
6676 | continue; |
6677 | |
6678 | if (everything_escaped |
6679 | || (escaped_vi->solution |
6680 | && bitmap_bit_p (escaped_vi->solution, i))) |
6681 | { |
6682 | pt->vars_contains_escaped = true; |
6683 | pt->vars_contains_escaped_heap |= vi->is_heap_var; |
6684 | } |
6685 | |
6686 | if (vi->is_restrict_var) |
6687 | pt->vars_contains_restrict = true; |
6688 | |
6689 | if (VAR_P (vi->decl) |
6690 | || TREE_CODE (vi->decl) == PARM_DECL |
6691 | || TREE_CODE (vi->decl) == RESULT_DECL) |
6692 | { |
6693 | /* If we are in IPA mode we will not recompute points-to |
6694 | sets after inlining so make sure they stay valid. */ |
6695 | if (in_ipa_mode |
6696 | && !DECL_PT_UID_SET_P (vi->decl)) |
6697 | SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl)); |
6698 | |
6699 | /* Add the decl to the points-to set. Note that the points-to |
6700 | set contains global variables. */ |
6701 | bitmap_set_bit (into, DECL_PT_UID (vi->decl)); |
6702 | if (vi->is_global_var |
6703 | /* In IPA mode the escaped_heap trick doesn't work as |
6704 | ESCAPED is escaped from the unit but |
6705 | pt_solution_includes_global needs to answer true for |
6706 | all variables not automatic within a function. |
6707 | For the same reason is_global_var is not the |
6708 | correct flag to track - local variables from other |
6709 | functions also need to be considered global. |
6710 | Conveniently all HEAP vars are not put in function |
6711 | scope. */ |
6712 | || (in_ipa_mode |
6713 | && fndecl |
6714 | && ! auto_var_in_fn_p (vi->decl, fndecl))) |
6715 | pt->vars_contains_nonlocal = true; |
6716 | |
6717 | /* If we have a variable that is interposable record that fact |
6718 | for pointer comparison simplification. */ |
6719 | if (VAR_P (vi->decl) |
6720 | && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl)) |
6721 | && ! decl_binds_to_current_def_p (vi->decl)) |
6722 | pt->vars_contains_interposable = true; |
6723 | |
6724 | /* If this is a local variable we can have overlapping lifetime |
6725 | of different function invocations through recursion duplicate |
6726 | it with its shadow variable. */ |
6727 | if (in_ipa_mode |
6728 | && vi->shadow_var_uid != 0) |
6729 | { |
6730 | bitmap_set_bit (into, vi->shadow_var_uid); |
6731 | pt->vars_contains_nonlocal = true; |
6732 | } |
6733 | } |
6734 | |
6735 | else if (TREE_CODE (vi->decl) == FUNCTION_DECL |
6736 | || TREE_CODE (vi->decl) == LABEL_DECL) |
6737 | { |
6738 | /* Nothing should read/write from/to code so we can |
6739 | save bits by not including them in the points-to bitmaps. |
6740 | Still mark the points-to set as containing global memory |
6741 | to make code-patching possible - see PR70128. */ |
6742 | pt->vars_contains_nonlocal = true; |
6743 | } |
6744 | } |
6745 | } |
6746 | |
6747 | |
6748 | /* Compute the points-to solution *PT for the variable VI. */ |
6749 | |
6750 | static struct pt_solution |
6751 | find_what_var_points_to (tree fndecl, varinfo_t orig_vi) |
6752 | { |
6753 | unsigned int i; |
6754 | bitmap_iterator bi; |
6755 | bitmap finished_solution; |
6756 | bitmap result; |
6757 | varinfo_t vi; |
6758 | struct pt_solution *pt; |
6759 | |
6760 | /* This variable may have been collapsed, let's get the real |
6761 | variable. */ |
6762 | vi = get_varinfo (n: find (node: orig_vi->id)); |
6763 | |
6764 | /* See if we have already computed the solution and return it. */ |
6765 | pt_solution **slot = &final_solutions->get_or_insert (k: vi); |
6766 | if (*slot != NULL) |
6767 | return **slot; |
6768 | |
6769 | *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution); |
6770 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
6771 | |
6772 | /* Translate artificial variables into SSA_NAME_PTR_INFO |
6773 | attributes. */ |
6774 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
6775 | { |
6776 | varinfo_t vi = get_varinfo (n: i); |
6777 | |
6778 | if (vi->is_artificial_var) |
6779 | { |
6780 | if (vi->id == nothing_id) |
6781 | pt->null = 1; |
6782 | else if (vi->id == escaped_id) |
6783 | { |
6784 | if (in_ipa_mode) |
6785 | pt->ipa_escaped = 1; |
6786 | else |
6787 | pt->escaped = 1; |
6788 | /* Expand some special vars of ESCAPED in-place here. */ |
6789 | varinfo_t evi = get_varinfo (n: find (node: escaped_id)); |
6790 | if (bitmap_bit_p (evi->solution, nonlocal_id)) |
6791 | pt->nonlocal = 1; |
6792 | } |
6793 | else if (vi->id == nonlocal_id) |
6794 | pt->nonlocal = 1; |
6795 | else if (vi->id == string_id) |
6796 | /* Nobody cares - STRING_CSTs are read-only entities. */ |
6797 | ; |
6798 | else if (vi->id == anything_id |
6799 | || vi->id == integer_id) |
6800 | pt->anything = 1; |
6801 | } |
6802 | } |
6803 | |
6804 | /* Instead of doing extra work, simply do not create |
6805 | elaborate points-to information for pt_anything pointers. */ |
6806 | if (pt->anything) |
6807 | return *pt; |
6808 | |
6809 | /* Share the final set of variables when possible. */ |
6810 | finished_solution = BITMAP_GGC_ALLOC (); |
6811 | stats.points_to_sets_created++; |
6812 | |
6813 | set_uids_in_ptset (into: finished_solution, from: vi->solution, pt, fndecl); |
6814 | result = shared_bitmap_lookup (pt_vars: finished_solution); |
6815 | if (!result) |
6816 | { |
6817 | shared_bitmap_add (pt_vars: finished_solution); |
6818 | pt->vars = finished_solution; |
6819 | } |
6820 | else |
6821 | { |
6822 | pt->vars = result; |
6823 | bitmap_clear (finished_solution); |
6824 | } |
6825 | |
6826 | return *pt; |
6827 | } |
6828 | |
6829 | /* Given a pointer variable P, fill in its points-to set. */ |
6830 | |
6831 | static void |
6832 | find_what_p_points_to (tree fndecl, tree p) |
6833 | { |
6834 | struct ptr_info_def *pi; |
6835 | tree lookup_p = p; |
6836 | varinfo_t vi; |
6837 | value_range vr; |
6838 | get_range_query (DECL_STRUCT_FUNCTION (fndecl))->range_of_expr (r&: vr, expr: p); |
6839 | bool nonnull = vr.nonzero_p (); |
6840 | |
6841 | /* For parameters, get at the points-to set for the actual parm |
6842 | decl. */ |
6843 | if (TREE_CODE (p) == SSA_NAME |
6844 | && SSA_NAME_IS_DEFAULT_DEF (p) |
6845 | && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL |
6846 | || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)) |
6847 | lookup_p = SSA_NAME_VAR (p); |
6848 | |
6849 | vi = lookup_vi_for_tree (t: lookup_p); |
6850 | if (!vi) |
6851 | return; |
6852 | |
6853 | pi = get_ptr_info (p); |
6854 | pi->pt = find_what_var_points_to (fndecl, orig_vi: vi); |
6855 | /* Conservatively set to NULL from PTA (to true). */ |
6856 | pi->pt.null = 1; |
6857 | /* Preserve pointer nonnull globally computed. */ |
6858 | if (nonnull) |
6859 | set_ptr_nonnull (p); |
6860 | } |
6861 | |
6862 | |
6863 | /* Query statistics for points-to solutions. */ |
6864 | |
6865 | static struct { |
6866 | unsigned HOST_WIDE_INT pt_solution_includes_may_alias; |
6867 | unsigned HOST_WIDE_INT pt_solution_includes_no_alias; |
6868 | unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias; |
6869 | unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias; |
6870 | } pta_stats; |
6871 | |
6872 | void |
6873 | dump_pta_stats (FILE *s) |
6874 | { |
6875 | fprintf (stream: s, format: "\nPTA query stats:\n" ); |
6876 | fprintf (stream: s, format: " pt_solution_includes: " |
6877 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
6878 | HOST_WIDE_INT_PRINT_DEC" queries\n" , |
6879 | pta_stats.pt_solution_includes_no_alias, |
6880 | pta_stats.pt_solution_includes_no_alias |
6881 | + pta_stats.pt_solution_includes_may_alias); |
6882 | fprintf (stream: s, format: " pt_solutions_intersect: " |
6883 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
6884 | HOST_WIDE_INT_PRINT_DEC" queries\n" , |
6885 | pta_stats.pt_solutions_intersect_no_alias, |
6886 | pta_stats.pt_solutions_intersect_no_alias |
6887 | + pta_stats.pt_solutions_intersect_may_alias); |
6888 | } |
6889 | |
6890 | |
6891 | /* Reset the points-to solution *PT to a conservative default |
6892 | (point to anything). */ |
6893 | |
6894 | void |
6895 | pt_solution_reset (struct pt_solution *pt) |
6896 | { |
6897 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
6898 | pt->anything = true; |
6899 | pt->null = true; |
6900 | } |
6901 | |
6902 | /* Set the points-to solution *PT to point only to the variables |
6903 | in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains |
6904 | global variables and VARS_CONTAINS_RESTRICT specifies whether |
6905 | it contains restrict tag variables. */ |
6906 | |
6907 | void |
6908 | pt_solution_set (struct pt_solution *pt, bitmap vars, |
6909 | bool vars_contains_nonlocal) |
6910 | { |
6911 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
6912 | pt->vars = vars; |
6913 | pt->vars_contains_nonlocal = vars_contains_nonlocal; |
6914 | pt->vars_contains_escaped |
6915 | = (cfun->gimple_df->escaped.anything |
6916 | || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars)); |
6917 | } |
6918 | |
6919 | /* Set the points-to solution *PT to point only to the variable VAR. */ |
6920 | |
6921 | void |
6922 | pt_solution_set_var (struct pt_solution *pt, tree var) |
6923 | { |
6924 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
6925 | pt->vars = BITMAP_GGC_ALLOC (); |
6926 | bitmap_set_bit (pt->vars, DECL_PT_UID (var)); |
6927 | pt->vars_contains_nonlocal = is_global_var (t: var); |
6928 | pt->vars_contains_escaped |
6929 | = (cfun->gimple_df->escaped.anything |
6930 | || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var))); |
6931 | } |
6932 | |
6933 | /* Computes the union of the points-to solutions *DEST and *SRC and |
6934 | stores the result in *DEST. This changes the points-to bitmap |
6935 | of *DEST and thus may not be used if that might be shared. |
6936 | The points-to bitmap of *SRC and *DEST will not be shared after |
6937 | this function if they were not before. */ |
6938 | |
6939 | static void |
6940 | pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src) |
6941 | { |
6942 | dest->anything |= src->anything; |
6943 | if (dest->anything) |
6944 | { |
6945 | pt_solution_reset (pt: dest); |
6946 | return; |
6947 | } |
6948 | |
6949 | dest->nonlocal |= src->nonlocal; |
6950 | dest->escaped |= src->escaped; |
6951 | dest->ipa_escaped |= src->ipa_escaped; |
6952 | dest->null |= src->null; |
6953 | dest->vars_contains_nonlocal |= src->vars_contains_nonlocal; |
6954 | dest->vars_contains_escaped |= src->vars_contains_escaped; |
6955 | dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap; |
6956 | if (!src->vars) |
6957 | return; |
6958 | |
6959 | if (!dest->vars) |
6960 | dest->vars = BITMAP_GGC_ALLOC (); |
6961 | bitmap_ior_into (dest->vars, src->vars); |
6962 | } |
6963 | |
6964 | /* Return true if the points-to solution *PT is empty. */ |
6965 | |
6966 | bool |
6967 | pt_solution_empty_p (const pt_solution *pt) |
6968 | { |
6969 | if (pt->anything |
6970 | || pt->nonlocal) |
6971 | return false; |
6972 | |
6973 | if (pt->vars |
6974 | && !bitmap_empty_p (map: pt->vars)) |
6975 | return false; |
6976 | |
6977 | /* If the solution includes ESCAPED, check if that is empty. */ |
6978 | if (pt->escaped |
6979 | && !pt_solution_empty_p (pt: &cfun->gimple_df->escaped)) |
6980 | return false; |
6981 | |
6982 | /* If the solution includes ESCAPED, check if that is empty. */ |
6983 | if (pt->ipa_escaped |
6984 | && !pt_solution_empty_p (pt: &ipa_escaped_pt)) |
6985 | return false; |
6986 | |
6987 | return true; |
6988 | } |
6989 | |
6990 | /* Return true if the points-to solution *PT only point to a single var, and |
6991 | return the var uid in *UID. */ |
6992 | |
6993 | bool |
6994 | pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid) |
6995 | { |
6996 | if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped |
6997 | || pt->vars == NULL |
6998 | || !bitmap_single_bit_set_p (pt->vars)) |
6999 | return false; |
7000 | |
7001 | *uid = bitmap_first_set_bit (pt->vars); |
7002 | return true; |
7003 | } |
7004 | |
7005 | /* Return true if the points-to solution *PT includes global memory. |
7006 | If ESCAPED_LOCAL_P is true then escaped local variables are also |
7007 | considered global. */ |
7008 | |
7009 | bool |
7010 | pt_solution_includes_global (struct pt_solution *pt, bool escaped_local_p) |
7011 | { |
7012 | if (pt->anything |
7013 | || pt->nonlocal |
7014 | || pt->vars_contains_nonlocal |
7015 | /* The following is a hack to make the malloc escape hack work. |
7016 | In reality we'd need different sets for escaped-through-return |
7017 | and escaped-to-callees and passes would need to be updated. */ |
7018 | || pt->vars_contains_escaped_heap) |
7019 | return true; |
7020 | |
7021 | if (escaped_local_p && pt->vars_contains_escaped) |
7022 | return true; |
7023 | |
7024 | /* 'escaped' is also a placeholder so we have to look into it. */ |
7025 | if (pt->escaped) |
7026 | return pt_solution_includes_global (pt: &cfun->gimple_df->escaped, |
7027 | escaped_local_p); |
7028 | |
7029 | if (pt->ipa_escaped) |
7030 | return pt_solution_includes_global (pt: &ipa_escaped_pt, |
7031 | escaped_local_p); |
7032 | |
7033 | return false; |
7034 | } |
7035 | |
7036 | /* Return true if the points-to solution *PT includes the variable |
7037 | declaration DECL. */ |
7038 | |
7039 | static bool |
7040 | pt_solution_includes_1 (struct pt_solution *pt, const_tree decl) |
7041 | { |
7042 | if (pt->anything) |
7043 | return true; |
7044 | |
7045 | if (pt->nonlocal |
7046 | && is_global_var (t: decl)) |
7047 | return true; |
7048 | |
7049 | if (pt->vars |
7050 | && bitmap_bit_p (pt->vars, DECL_PT_UID (decl))) |
7051 | return true; |
7052 | |
7053 | /* If the solution includes ESCAPED, check it. */ |
7054 | if (pt->escaped |
7055 | && pt_solution_includes_1 (pt: &cfun->gimple_df->escaped, decl)) |
7056 | return true; |
7057 | |
7058 | /* If the solution includes ESCAPED, check it. */ |
7059 | if (pt->ipa_escaped |
7060 | && pt_solution_includes_1 (pt: &ipa_escaped_pt, decl)) |
7061 | return true; |
7062 | |
7063 | return false; |
7064 | } |
7065 | |
7066 | bool |
7067 | pt_solution_includes (struct pt_solution *pt, const_tree decl) |
7068 | { |
7069 | bool res = pt_solution_includes_1 (pt, decl); |
7070 | if (res) |
7071 | ++pta_stats.pt_solution_includes_may_alias; |
7072 | else |
7073 | ++pta_stats.pt_solution_includes_no_alias; |
7074 | return res; |
7075 | } |
7076 | |
7077 | /* Return true if both points-to solutions PT1 and PT2 have a non-empty |
7078 | intersection. */ |
7079 | |
7080 | static bool |
7081 | pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2) |
7082 | { |
7083 | if (pt1->anything || pt2->anything) |
7084 | return true; |
7085 | |
7086 | /* If either points to unknown global memory and the other points to |
7087 | any global memory they alias. */ |
7088 | if ((pt1->nonlocal |
7089 | && (pt2->nonlocal |
7090 | || pt2->vars_contains_nonlocal)) |
7091 | || (pt2->nonlocal |
7092 | && pt1->vars_contains_nonlocal)) |
7093 | return true; |
7094 | |
7095 | /* If either points to all escaped memory and the other points to |
7096 | any escaped memory they alias. */ |
7097 | if ((pt1->escaped |
7098 | && (pt2->escaped |
7099 | || pt2->vars_contains_escaped)) |
7100 | || (pt2->escaped |
7101 | && pt1->vars_contains_escaped)) |
7102 | return true; |
7103 | |
7104 | /* Check the escaped solution if required. |
7105 | ??? Do we need to check the local against the IPA escaped sets? */ |
7106 | if ((pt1->ipa_escaped || pt2->ipa_escaped) |
7107 | && !pt_solution_empty_p (pt: &ipa_escaped_pt)) |
7108 | { |
7109 | /* If both point to escaped memory and that solution |
7110 | is not empty they alias. */ |
7111 | if (pt1->ipa_escaped && pt2->ipa_escaped) |
7112 | return true; |
7113 | |
7114 | /* If either points to escaped memory see if the escaped solution |
7115 | intersects with the other. */ |
7116 | if ((pt1->ipa_escaped |
7117 | && pt_solutions_intersect_1 (pt1: &ipa_escaped_pt, pt2)) |
7118 | || (pt2->ipa_escaped |
7119 | && pt_solutions_intersect_1 (pt1: &ipa_escaped_pt, pt2: pt1))) |
7120 | return true; |
7121 | } |
7122 | |
7123 | /* Now both pointers alias if their points-to solution intersects. */ |
7124 | return (pt1->vars |
7125 | && pt2->vars |
7126 | && bitmap_intersect_p (pt1->vars, pt2->vars)); |
7127 | } |
7128 | |
7129 | bool |
7130 | pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2) |
7131 | { |
7132 | bool res = pt_solutions_intersect_1 (pt1, pt2); |
7133 | if (res) |
7134 | ++pta_stats.pt_solutions_intersect_may_alias; |
7135 | else |
7136 | ++pta_stats.pt_solutions_intersect_no_alias; |
7137 | return res; |
7138 | } |
7139 | |
7140 | /* Dump stats information to OUTFILE. */ |
7141 | |
7142 | static void |
7143 | dump_sa_stats (FILE *outfile) |
7144 | { |
7145 | fprintf (stream: outfile, format: "Points-to Stats:\n" ); |
7146 | fprintf (stream: outfile, format: "Total vars: %d\n" , stats.total_vars); |
7147 | fprintf (stream: outfile, format: "Non-pointer vars: %d\n" , |
7148 | stats.nonpointer_vars); |
7149 | fprintf (stream: outfile, format: "Statically unified vars: %d\n" , |
7150 | stats.unified_vars_static); |
7151 | fprintf (stream: outfile, format: "Dynamically unified vars: %d\n" , |
7152 | stats.unified_vars_dynamic); |
7153 | fprintf (stream: outfile, format: "Iterations: %d\n" , stats.iterations); |
7154 | fprintf (stream: outfile, format: "Number of edges: %d\n" , stats.num_edges); |
7155 | fprintf (stream: outfile, format: "Number of implicit edges: %d\n" , |
7156 | stats.num_implicit_edges); |
7157 | fprintf (stream: outfile, format: "Number of avoided edges: %d\n" , |
7158 | stats.num_avoided_edges); |
7159 | } |
7160 | |
7161 | /* Dump points-to information to OUTFILE. */ |
7162 | |
7163 | static void |
7164 | dump_sa_points_to_info (FILE *outfile) |
7165 | { |
7166 | fprintf (stream: outfile, format: "\nPoints-to sets\n\n" ); |
7167 | |
7168 | for (unsigned i = 1; i < varmap.length (); i++) |
7169 | { |
7170 | varinfo_t vi = get_varinfo (n: i); |
7171 | if (!vi->may_have_pointers) |
7172 | continue; |
7173 | dump_solution_for_var (file: outfile, var: i); |
7174 | } |
7175 | } |
7176 | |
7177 | |
7178 | /* Debug points-to information to stderr. */ |
7179 | |
7180 | DEBUG_FUNCTION void |
7181 | debug_sa_points_to_info (void) |
7182 | { |
7183 | dump_sa_points_to_info (stderr); |
7184 | } |
7185 | |
7186 | |
7187 | /* Initialize the always-existing constraint variables for NULL |
7188 | ANYTHING, READONLY, and INTEGER */ |
7189 | |
7190 | static void |
7191 | init_base_vars (void) |
7192 | { |
7193 | struct constraint_expr lhs, rhs; |
7194 | varinfo_t var_anything; |
7195 | varinfo_t var_nothing; |
7196 | varinfo_t var_string; |
7197 | varinfo_t var_escaped; |
7198 | varinfo_t var_nonlocal; |
7199 | varinfo_t var_storedanything; |
7200 | varinfo_t var_integer; |
7201 | |
7202 | /* Variable ID zero is reserved and should be NULL. */ |
7203 | varmap.safe_push (NULL); |
7204 | |
7205 | /* Create the NULL variable, used to represent that a variable points |
7206 | to NULL. */ |
7207 | var_nothing = new_var_info (NULL_TREE, name: "NULL" , add_id: false); |
7208 | gcc_assert (var_nothing->id == nothing_id); |
7209 | var_nothing->is_artificial_var = 1; |
7210 | var_nothing->offset = 0; |
7211 | var_nothing->size = ~0; |
7212 | var_nothing->fullsize = ~0; |
7213 | var_nothing->is_special_var = 1; |
7214 | var_nothing->may_have_pointers = 0; |
7215 | var_nothing->is_global_var = 0; |
7216 | |
7217 | /* Create the ANYTHING variable, used to represent that a variable |
7218 | points to some unknown piece of memory. */ |
7219 | var_anything = new_var_info (NULL_TREE, name: "ANYTHING" , add_id: false); |
7220 | gcc_assert (var_anything->id == anything_id); |
7221 | var_anything->is_artificial_var = 1; |
7222 | var_anything->size = ~0; |
7223 | var_anything->offset = 0; |
7224 | var_anything->fullsize = ~0; |
7225 | var_anything->is_special_var = 1; |
7226 | |
7227 | /* Anything points to anything. This makes deref constraints just |
7228 | work in the presence of linked list and other p = *p type loops, |
7229 | by saying that *ANYTHING = ANYTHING. */ |
7230 | lhs.type = SCALAR; |
7231 | lhs.var = anything_id; |
7232 | lhs.offset = 0; |
7233 | rhs.type = ADDRESSOF; |
7234 | rhs.var = anything_id; |
7235 | rhs.offset = 0; |
7236 | |
7237 | /* This specifically does not use process_constraint because |
7238 | process_constraint ignores all anything = anything constraints, since all |
7239 | but this one are redundant. */ |
7240 | constraints.safe_push (obj: new_constraint (lhs, rhs)); |
7241 | |
7242 | /* Create the STRING variable, used to represent that a variable |
7243 | points to a string literal. String literals don't contain |
7244 | pointers so STRING doesn't point to anything. */ |
7245 | var_string = new_var_info (NULL_TREE, name: "STRING" , add_id: false); |
7246 | gcc_assert (var_string->id == string_id); |
7247 | var_string->is_artificial_var = 1; |
7248 | var_string->offset = 0; |
7249 | var_string->size = ~0; |
7250 | var_string->fullsize = ~0; |
7251 | var_string->is_special_var = 1; |
7252 | var_string->may_have_pointers = 0; |
7253 | |
7254 | /* Create the ESCAPED variable, used to represent the set of escaped |
7255 | memory. */ |
7256 | var_escaped = new_var_info (NULL_TREE, name: "ESCAPED" , add_id: false); |
7257 | gcc_assert (var_escaped->id == escaped_id); |
7258 | var_escaped->is_artificial_var = 1; |
7259 | var_escaped->offset = 0; |
7260 | var_escaped->size = ~0; |
7261 | var_escaped->fullsize = ~0; |
7262 | var_escaped->is_special_var = 0; |
7263 | |
7264 | /* Create the NONLOCAL variable, used to represent the set of nonlocal |
7265 | memory. */ |
7266 | var_nonlocal = new_var_info (NULL_TREE, name: "NONLOCAL" , add_id: false); |
7267 | gcc_assert (var_nonlocal->id == nonlocal_id); |
7268 | var_nonlocal->is_artificial_var = 1; |
7269 | var_nonlocal->offset = 0; |
7270 | var_nonlocal->size = ~0; |
7271 | var_nonlocal->fullsize = ~0; |
7272 | var_nonlocal->is_special_var = 1; |
7273 | |
7274 | /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */ |
7275 | lhs.type = SCALAR; |
7276 | lhs.var = escaped_id; |
7277 | lhs.offset = 0; |
7278 | rhs.type = DEREF; |
7279 | rhs.var = escaped_id; |
7280 | rhs.offset = 0; |
7281 | process_constraint (t: new_constraint (lhs, rhs)); |
7282 | |
7283 | /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the |
7284 | whole variable escapes. */ |
7285 | lhs.type = SCALAR; |
7286 | lhs.var = escaped_id; |
7287 | lhs.offset = 0; |
7288 | rhs.type = SCALAR; |
7289 | rhs.var = escaped_id; |
7290 | rhs.offset = UNKNOWN_OFFSET; |
7291 | process_constraint (t: new_constraint (lhs, rhs)); |
7292 | |
7293 | /* *ESCAPED = NONLOCAL. This is true because we have to assume |
7294 | everything pointed to by escaped points to what global memory can |
7295 | point to. */ |
7296 | lhs.type = DEREF; |
7297 | lhs.var = escaped_id; |
7298 | lhs.offset = 0; |
7299 | rhs.type = SCALAR; |
7300 | rhs.var = nonlocal_id; |
7301 | rhs.offset = 0; |
7302 | process_constraint (t: new_constraint (lhs, rhs)); |
7303 | |
7304 | /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because |
7305 | global memory may point to global memory and escaped memory. */ |
7306 | lhs.type = SCALAR; |
7307 | lhs.var = nonlocal_id; |
7308 | lhs.offset = 0; |
7309 | rhs.type = ADDRESSOF; |
7310 | rhs.var = nonlocal_id; |
7311 | rhs.offset = 0; |
7312 | process_constraint (t: new_constraint (lhs, rhs)); |
7313 | rhs.type = ADDRESSOF; |
7314 | rhs.var = escaped_id; |
7315 | rhs.offset = 0; |
7316 | process_constraint (t: new_constraint (lhs, rhs)); |
7317 | |
7318 | /* Create the STOREDANYTHING variable, used to represent the set of |
7319 | variables stored to *ANYTHING. */ |
7320 | var_storedanything = new_var_info (NULL_TREE, name: "STOREDANYTHING" , add_id: false); |
7321 | gcc_assert (var_storedanything->id == storedanything_id); |
7322 | var_storedanything->is_artificial_var = 1; |
7323 | var_storedanything->offset = 0; |
7324 | var_storedanything->size = ~0; |
7325 | var_storedanything->fullsize = ~0; |
7326 | var_storedanything->is_special_var = 0; |
7327 | |
7328 | /* Create the INTEGER variable, used to represent that a variable points |
7329 | to what an INTEGER "points to". */ |
7330 | var_integer = new_var_info (NULL_TREE, name: "INTEGER" , add_id: false); |
7331 | gcc_assert (var_integer->id == integer_id); |
7332 | var_integer->is_artificial_var = 1; |
7333 | var_integer->size = ~0; |
7334 | var_integer->fullsize = ~0; |
7335 | var_integer->offset = 0; |
7336 | var_integer->is_special_var = 1; |
7337 | |
7338 | /* INTEGER = ANYTHING, because we don't know where a dereference of |
7339 | a random integer will point to. */ |
7340 | lhs.type = SCALAR; |
7341 | lhs.var = integer_id; |
7342 | lhs.offset = 0; |
7343 | rhs.type = ADDRESSOF; |
7344 | rhs.var = anything_id; |
7345 | rhs.offset = 0; |
7346 | process_constraint (t: new_constraint (lhs, rhs)); |
7347 | } |
7348 | |
7349 | /* Initialize things necessary to perform PTA */ |
7350 | |
7351 | static void |
7352 | init_alias_vars (void) |
7353 | { |
7354 | use_field_sensitive = (param_max_fields_for_field_sensitive > 1); |
7355 | |
7356 | bitmap_obstack_initialize (&pta_obstack); |
7357 | bitmap_obstack_initialize (&oldpta_obstack); |
7358 | bitmap_obstack_initialize (&predbitmap_obstack); |
7359 | |
7360 | constraints.create (nelems: 8); |
7361 | varmap.create (nelems: 8); |
7362 | vi_for_tree = new hash_map<tree, varinfo_t>; |
7363 | call_stmt_vars = new hash_map<gimple *, varinfo_t>; |
7364 | |
7365 | memset (s: &stats, c: 0, n: sizeof (stats)); |
7366 | shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511); |
7367 | init_base_vars (); |
7368 | |
7369 | gcc_obstack_init (&fake_var_decl_obstack); |
7370 | |
7371 | final_solutions = new hash_map<varinfo_t, pt_solution *>; |
7372 | gcc_obstack_init (&final_solutions_obstack); |
7373 | } |
7374 | |
7375 | /* Remove the REF and ADDRESS edges from GRAPH, as well as all the |
7376 | predecessor edges. */ |
7377 | |
7378 | static void |
7379 | remove_preds_and_fake_succs (constraint_graph_t graph) |
7380 | { |
7381 | unsigned int i; |
7382 | |
7383 | /* Clear the implicit ref and address nodes from the successor |
7384 | lists. */ |
7385 | for (i = 1; i < FIRST_REF_NODE; i++) |
7386 | { |
7387 | if (graph->succs[i]) |
7388 | bitmap_clear_range (graph->succs[i], FIRST_REF_NODE, |
7389 | FIRST_REF_NODE * 2); |
7390 | } |
7391 | |
7392 | /* Free the successor list for the non-ref nodes. */ |
7393 | for (i = FIRST_REF_NODE + 1; i < graph->size; i++) |
7394 | { |
7395 | if (graph->succs[i]) |
7396 | BITMAP_FREE (graph->succs[i]); |
7397 | } |
7398 | |
7399 | /* Now reallocate the size of the successor list as, and blow away |
7400 | the predecessor bitmaps. */ |
7401 | graph->size = varmap.length (); |
7402 | graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size); |
7403 | |
7404 | free (ptr: graph->implicit_preds); |
7405 | graph->implicit_preds = NULL; |
7406 | free (ptr: graph->preds); |
7407 | graph->preds = NULL; |
7408 | bitmap_obstack_release (&predbitmap_obstack); |
7409 | } |
7410 | |
7411 | /* Solve the constraint set. */ |
7412 | |
7413 | static void |
7414 | solve_constraints (void) |
7415 | { |
7416 | class scc_info *si; |
7417 | |
7418 | /* Sort varinfos so that ones that cannot be pointed to are last. |
7419 | This makes bitmaps more efficient. */ |
7420 | unsigned int *map = XNEWVEC (unsigned int, varmap.length ()); |
7421 | for (unsigned i = 0; i < integer_id + 1; ++i) |
7422 | map[i] = i; |
7423 | /* Start with address-taken vars, followed by not address-taken vars |
7424 | to move vars never appearing in the points-to solution bitmaps last. */ |
7425 | unsigned j = integer_id + 1; |
7426 | for (unsigned i = integer_id + 1; i < varmap.length (); ++i) |
7427 | if (varmap[varmap[i]->head]->address_taken) |
7428 | map[i] = j++; |
7429 | for (unsigned i = integer_id + 1; i < varmap.length (); ++i) |
7430 | if (! varmap[varmap[i]->head]->address_taken) |
7431 | map[i] = j++; |
7432 | /* Shuffle varmap according to map. */ |
7433 | for (unsigned i = integer_id + 1; i < varmap.length (); ++i) |
7434 | { |
7435 | while (map[varmap[i]->id] != i) |
7436 | std::swap (a&: varmap[i], b&: varmap[map[varmap[i]->id]]); |
7437 | gcc_assert (bitmap_empty_p (varmap[i]->solution)); |
7438 | varmap[i]->id = i; |
7439 | varmap[i]->next = map[varmap[i]->next]; |
7440 | varmap[i]->head = map[varmap[i]->head]; |
7441 | } |
7442 | /* Finally rewrite constraints. */ |
7443 | for (unsigned i = 0; i < constraints.length (); ++i) |
7444 | { |
7445 | constraints[i]->lhs.var = map[constraints[i]->lhs.var]; |
7446 | constraints[i]->rhs.var = map[constraints[i]->rhs.var]; |
7447 | } |
7448 | free (ptr: map); |
7449 | |
7450 | if (dump_file) |
7451 | fprintf (stream: dump_file, |
7452 | format: "\nCollapsing static cycles and doing variable " |
7453 | "substitution\n" ); |
7454 | |
7455 | init_graph (size: varmap.length () * 2); |
7456 | |
7457 | if (dump_file) |
7458 | fprintf (stream: dump_file, format: "Building predecessor graph\n" ); |
7459 | build_pred_graph (); |
7460 | |
7461 | if (dump_file) |
7462 | fprintf (stream: dump_file, format: "Detecting pointer and location " |
7463 | "equivalences\n" ); |
7464 | si = perform_var_substitution (graph); |
7465 | |
7466 | if (dump_file) |
7467 | fprintf (stream: dump_file, format: "Rewriting constraints and unifying " |
7468 | "variables\n" ); |
7469 | rewrite_constraints (graph, si); |
7470 | |
7471 | build_succ_graph (); |
7472 | |
7473 | free_var_substitution_info (si); |
7474 | |
7475 | /* Attach complex constraints to graph nodes. */ |
7476 | move_complex_constraints (graph); |
7477 | |
7478 | if (dump_file) |
7479 | fprintf (stream: dump_file, format: "Uniting pointer but not location equivalent " |
7480 | "variables\n" ); |
7481 | unite_pointer_equivalences (graph); |
7482 | |
7483 | if (dump_file) |
7484 | fprintf (stream: dump_file, format: "Finding indirect cycles\n" ); |
7485 | find_indirect_cycles (graph); |
7486 | |
7487 | /* Implicit nodes and predecessors are no longer necessary at this |
7488 | point. */ |
7489 | remove_preds_and_fake_succs (graph); |
7490 | |
7491 | if (dump_file && (dump_flags & TDF_GRAPH)) |
7492 | { |
7493 | fprintf (stream: dump_file, format: "\n\n// The constraint graph before solve-graph " |
7494 | "in dot format:\n" ); |
7495 | dump_constraint_graph (file: dump_file); |
7496 | fprintf (stream: dump_file, format: "\n\n" ); |
7497 | } |
7498 | |
7499 | if (dump_file) |
7500 | fprintf (stream: dump_file, format: "Solving graph\n" ); |
7501 | |
7502 | solve_graph (graph); |
7503 | |
7504 | if (dump_file && (dump_flags & TDF_GRAPH)) |
7505 | { |
7506 | fprintf (stream: dump_file, format: "\n\n// The constraint graph after solve-graph " |
7507 | "in dot format:\n" ); |
7508 | dump_constraint_graph (file: dump_file); |
7509 | fprintf (stream: dump_file, format: "\n\n" ); |
7510 | } |
7511 | } |
7512 | |
7513 | /* Create points-to sets for the current function. See the comments |
7514 | at the start of the file for an algorithmic overview. */ |
7515 | |
7516 | static void |
7517 | compute_points_to_sets (void) |
7518 | { |
7519 | basic_block bb; |
7520 | varinfo_t vi; |
7521 | |
7522 | timevar_push (tv: TV_TREE_PTA); |
7523 | |
7524 | init_alias_vars (); |
7525 | |
7526 | intra_create_variable_infos (cfun); |
7527 | |
7528 | /* Now walk all statements and build the constraint set. */ |
7529 | FOR_EACH_BB_FN (bb, cfun) |
7530 | { |
7531 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); |
7532 | gsi_next (i: &gsi)) |
7533 | { |
7534 | gphi *phi = gsi.phi (); |
7535 | |
7536 | if (! virtual_operand_p (op: gimple_phi_result (gs: phi))) |
7537 | find_func_aliases (cfun, origt: phi); |
7538 | } |
7539 | |
7540 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); |
7541 | gsi_next (i: &gsi)) |
7542 | { |
7543 | gimple *stmt = gsi_stmt (i: gsi); |
7544 | |
7545 | find_func_aliases (cfun, origt: stmt); |
7546 | } |
7547 | } |
7548 | |
7549 | if (dump_file && (dump_flags & TDF_DETAILS)) |
7550 | { |
7551 | fprintf (stream: dump_file, format: "Points-to analysis\n\nConstraints:\n\n" ); |
7552 | dump_constraints (file: dump_file, from: 0); |
7553 | } |
7554 | |
7555 | /* From the constraints compute the points-to sets. */ |
7556 | solve_constraints (); |
7557 | |
7558 | /* Post-process solutions for escapes through returns. */ |
7559 | edge_iterator ei; |
7560 | edge e; |
7561 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
7562 | if (greturn *ret = safe_dyn_cast <greturn *> (p: *gsi_last_bb (bb: e->src))) |
7563 | { |
7564 | tree val = gimple_return_retval (gs: ret); |
7565 | /* ??? Easy to handle simple indirections with some work. |
7566 | Arbitrary references like foo.bar.baz are more difficult |
7567 | (but conservatively easy enough with just looking at the base). |
7568 | Mind to fixup find_func_aliases as well. */ |
7569 | if (!val || !SSA_VAR_P (val)) |
7570 | continue; |
7571 | /* returns happen last in non-IPA so they only influence |
7572 | the ESCAPED solution and we can filter local variables. */ |
7573 | varinfo_t escaped_vi = get_varinfo (n: find (node: escaped_id)); |
7574 | varinfo_t vi = lookup_vi_for_tree (t: val); |
7575 | bitmap delta = BITMAP_ALLOC (obstack: &pta_obstack); |
7576 | bitmap_iterator bi; |
7577 | unsigned i; |
7578 | for (; vi; vi = vi_next (vi)) |
7579 | { |
7580 | varinfo_t part_vi = get_varinfo (n: find (node: vi->id)); |
7581 | EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi->solution, |
7582 | escaped_vi->solution, 0, i, bi) |
7583 | { |
7584 | varinfo_t pointed_to_vi = get_varinfo (n: i); |
7585 | if (pointed_to_vi->is_global_var |
7586 | /* We delay marking of heap memory as global. */ |
7587 | || pointed_to_vi->is_heap_var) |
7588 | bitmap_set_bit (delta, i); |
7589 | } |
7590 | } |
7591 | |
7592 | /* Now compute the transitive closure. */ |
7593 | bitmap_ior_into (escaped_vi->solution, delta); |
7594 | bitmap new_delta = BITMAP_ALLOC (obstack: &pta_obstack); |
7595 | while (!bitmap_empty_p (map: delta)) |
7596 | { |
7597 | EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi) |
7598 | { |
7599 | varinfo_t pointed_to_vi = get_varinfo (n: i); |
7600 | pointed_to_vi = get_varinfo (n: find (node: pointed_to_vi->id)); |
7601 | unsigned j; |
7602 | bitmap_iterator bi2; |
7603 | EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi->solution, |
7604 | escaped_vi->solution, |
7605 | 0, j, bi2) |
7606 | { |
7607 | varinfo_t pointed_to_vi2 = get_varinfo (n: j); |
7608 | if (pointed_to_vi2->is_global_var |
7609 | /* We delay marking of heap memory as global. */ |
7610 | || pointed_to_vi2->is_heap_var) |
7611 | bitmap_set_bit (new_delta, j); |
7612 | } |
7613 | } |
7614 | bitmap_ior_into (escaped_vi->solution, new_delta); |
7615 | bitmap_clear (delta); |
7616 | std::swap (a&: delta, b&: new_delta); |
7617 | } |
7618 | BITMAP_FREE (delta); |
7619 | BITMAP_FREE (new_delta); |
7620 | } |
7621 | |
7622 | if (dump_file && (dump_flags & TDF_STATS)) |
7623 | dump_sa_stats (outfile: dump_file); |
7624 | |
7625 | if (dump_file && (dump_flags & TDF_DETAILS)) |
7626 | dump_sa_points_to_info (outfile: dump_file); |
7627 | |
7628 | /* Compute the points-to set for ESCAPED used for call-clobber analysis. */ |
7629 | cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl, |
7630 | orig_vi: get_varinfo (n: escaped_id)); |
7631 | |
7632 | /* Make sure the ESCAPED solution (which is used as placeholder in |
7633 | other solutions) does not reference itself. This simplifies |
7634 | points-to solution queries. */ |
7635 | cfun->gimple_df->escaped.escaped = 0; |
7636 | |
7637 | /* Compute the points-to sets for pointer SSA_NAMEs. */ |
7638 | unsigned i; |
7639 | tree ptr; |
7640 | |
7641 | FOR_EACH_SSA_NAME (i, ptr, cfun) |
7642 | { |
7643 | if (POINTER_TYPE_P (TREE_TYPE (ptr))) |
7644 | find_what_p_points_to (cfun->decl, p: ptr); |
7645 | } |
7646 | |
7647 | /* Compute the call-used/clobbered sets. */ |
7648 | FOR_EACH_BB_FN (bb, cfun) |
7649 | { |
7650 | gimple_stmt_iterator gsi; |
7651 | |
7652 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
7653 | { |
7654 | gcall *stmt; |
7655 | struct pt_solution *pt; |
7656 | |
7657 | stmt = dyn_cast <gcall *> (p: gsi_stmt (i: gsi)); |
7658 | if (!stmt) |
7659 | continue; |
7660 | |
7661 | pt = gimple_call_use_set (call_stmt: stmt); |
7662 | if (gimple_call_flags (stmt) & ECF_CONST) |
7663 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7664 | else |
7665 | { |
7666 | bool uses_global_memory = true; |
7667 | bool reads_global_memory = true; |
7668 | |
7669 | determine_global_memory_access (stmt, NULL, |
7670 | reads_global_memory: &reads_global_memory, |
7671 | uses_global_memory: &uses_global_memory); |
7672 | if ((vi = lookup_call_use_vi (call: stmt)) != NULL) |
7673 | { |
7674 | *pt = find_what_var_points_to (cfun->decl, orig_vi: vi); |
7675 | /* Escaped (and thus nonlocal) variables are always |
7676 | implicitly used by calls. */ |
7677 | /* ??? ESCAPED can be empty even though NONLOCAL |
7678 | always escaped. */ |
7679 | if (uses_global_memory) |
7680 | { |
7681 | pt->nonlocal = 1; |
7682 | pt->escaped = 1; |
7683 | } |
7684 | } |
7685 | else if (uses_global_memory) |
7686 | { |
7687 | /* If there is nothing special about this call then |
7688 | we have made everything that is used also escape. */ |
7689 | *pt = cfun->gimple_df->escaped; |
7690 | pt->nonlocal = 1; |
7691 | } |
7692 | else |
7693 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7694 | } |
7695 | |
7696 | pt = gimple_call_clobber_set (call_stmt: stmt); |
7697 | if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS)) |
7698 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7699 | else |
7700 | { |
7701 | bool writes_global_memory = true; |
7702 | |
7703 | determine_global_memory_access (stmt, writes_global_memory: &writes_global_memory, |
7704 | NULL, NULL); |
7705 | |
7706 | if ((vi = lookup_call_clobber_vi (call: stmt)) != NULL) |
7707 | { |
7708 | *pt = find_what_var_points_to (cfun->decl, orig_vi: vi); |
7709 | /* Escaped (and thus nonlocal) variables are always |
7710 | implicitly clobbered by calls. */ |
7711 | /* ??? ESCAPED can be empty even though NONLOCAL |
7712 | always escaped. */ |
7713 | if (writes_global_memory) |
7714 | { |
7715 | pt->nonlocal = 1; |
7716 | pt->escaped = 1; |
7717 | } |
7718 | } |
7719 | else if (writes_global_memory) |
7720 | { |
7721 | /* If there is nothing special about this call then |
7722 | we have made everything that is used also escape. */ |
7723 | *pt = cfun->gimple_df->escaped; |
7724 | pt->nonlocal = 1; |
7725 | } |
7726 | else |
7727 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
7728 | } |
7729 | } |
7730 | } |
7731 | |
7732 | timevar_pop (tv: TV_TREE_PTA); |
7733 | } |
7734 | |
7735 | |
7736 | /* Delete created points-to sets. */ |
7737 | |
7738 | static void |
7739 | delete_points_to_sets (void) |
7740 | { |
7741 | unsigned int i; |
7742 | |
7743 | delete shared_bitmap_table; |
7744 | shared_bitmap_table = NULL; |
7745 | if (dump_file && (dump_flags & TDF_STATS)) |
7746 | fprintf (stream: dump_file, format: "Points to sets created:%d\n" , |
7747 | stats.points_to_sets_created); |
7748 | |
7749 | delete vi_for_tree; |
7750 | delete call_stmt_vars; |
7751 | bitmap_obstack_release (&pta_obstack); |
7752 | constraints.release (); |
7753 | |
7754 | for (i = 0; i < graph->size; i++) |
7755 | graph->complex[i].release (); |
7756 | free (ptr: graph->complex); |
7757 | |
7758 | free (ptr: graph->rep); |
7759 | free (ptr: graph->succs); |
7760 | free (ptr: graph->pe); |
7761 | free (ptr: graph->pe_rep); |
7762 | free (ptr: graph->indirect_cycles); |
7763 | free (ptr: graph); |
7764 | |
7765 | varmap.release (); |
7766 | variable_info_pool.release (); |
7767 | constraint_pool.release (); |
7768 | |
7769 | obstack_free (&fake_var_decl_obstack, NULL); |
7770 | |
7771 | delete final_solutions; |
7772 | obstack_free (&final_solutions_obstack, NULL); |
7773 | } |
7774 | |
7775 | struct vls_data |
7776 | { |
7777 | unsigned short clique; |
7778 | bool escaped_p; |
7779 | bitmap rvars; |
7780 | }; |
7781 | |
7782 | /* Mark "other" loads and stores as belonging to CLIQUE and with |
7783 | base zero. */ |
7784 | |
7785 | static bool |
7786 | visit_loadstore (gimple *, tree base, tree ref, void *data) |
7787 | { |
7788 | unsigned short clique = ((vls_data *) data)->clique; |
7789 | bitmap rvars = ((vls_data *) data)->rvars; |
7790 | bool escaped_p = ((vls_data *) data)->escaped_p; |
7791 | if (TREE_CODE (base) == MEM_REF |
7792 | || TREE_CODE (base) == TARGET_MEM_REF) |
7793 | { |
7794 | tree ptr = TREE_OPERAND (base, 0); |
7795 | if (TREE_CODE (ptr) == SSA_NAME) |
7796 | { |
7797 | /* For parameters, get at the points-to set for the actual parm |
7798 | decl. */ |
7799 | if (SSA_NAME_IS_DEFAULT_DEF (ptr) |
7800 | && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL |
7801 | || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL)) |
7802 | ptr = SSA_NAME_VAR (ptr); |
7803 | |
7804 | /* We need to make sure 'ptr' doesn't include any of |
7805 | the restrict tags we added bases for in its points-to set. */ |
7806 | varinfo_t vi = lookup_vi_for_tree (t: ptr); |
7807 | if (! vi) |
7808 | return false; |
7809 | |
7810 | vi = get_varinfo (n: find (node: vi->id)); |
7811 | if (bitmap_intersect_p (rvars, vi->solution) |
7812 | || (escaped_p && bitmap_bit_p (vi->solution, escaped_id))) |
7813 | return false; |
7814 | } |
7815 | |
7816 | /* Do not overwrite existing cliques (that includes clique, base |
7817 | pairs we just set). */ |
7818 | if (MR_DEPENDENCE_CLIQUE (base) == 0) |
7819 | { |
7820 | MR_DEPENDENCE_CLIQUE (base) = clique; |
7821 | MR_DEPENDENCE_BASE (base) = 0; |
7822 | } |
7823 | } |
7824 | |
7825 | /* For plain decl accesses see whether they are accesses to globals |
7826 | and rewrite them to MEM_REFs with { clique, 0 }. */ |
7827 | if (VAR_P (base) |
7828 | && is_global_var (t: base) |
7829 | /* ??? We can't rewrite a plain decl with the walk_stmt_load_store |
7830 | ops callback. */ |
7831 | && base != ref) |
7832 | { |
7833 | tree *basep = &ref; |
7834 | while (handled_component_p (t: *basep)) |
7835 | basep = &TREE_OPERAND (*basep, 0); |
7836 | gcc_assert (VAR_P (*basep)); |
7837 | tree ptr = build_fold_addr_expr (*basep); |
7838 | tree zero = build_int_cst (TREE_TYPE (ptr), 0); |
7839 | *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero); |
7840 | MR_DEPENDENCE_CLIQUE (*basep) = clique; |
7841 | MR_DEPENDENCE_BASE (*basep) = 0; |
7842 | } |
7843 | |
7844 | return false; |
7845 | } |
7846 | |
7847 | struct msdi_data { |
7848 | tree ptr; |
7849 | unsigned short *clique; |
7850 | unsigned short *last_ruid; |
7851 | varinfo_t restrict_var; |
7852 | }; |
7853 | |
7854 | /* If BASE is a MEM_REF then assign a clique, base pair to it, updating |
7855 | CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA. |
7856 | Return whether dependence info was assigned to BASE. */ |
7857 | |
7858 | static bool |
7859 | maybe_set_dependence_info (gimple *, tree base, tree, void *data) |
7860 | { |
7861 | tree ptr = ((msdi_data *)data)->ptr; |
7862 | unsigned short &clique = *((msdi_data *)data)->clique; |
7863 | unsigned short &last_ruid = *((msdi_data *)data)->last_ruid; |
7864 | varinfo_t restrict_var = ((msdi_data *)data)->restrict_var; |
7865 | if ((TREE_CODE (base) == MEM_REF |
7866 | || TREE_CODE (base) == TARGET_MEM_REF) |
7867 | && TREE_OPERAND (base, 0) == ptr) |
7868 | { |
7869 | /* Do not overwrite existing cliques. This avoids overwriting dependence |
7870 | info inlined from a function with restrict parameters inlined |
7871 | into a function with restrict parameters. This usually means we |
7872 | prefer to be precise in innermost loops. */ |
7873 | if (MR_DEPENDENCE_CLIQUE (base) == 0) |
7874 | { |
7875 | if (clique == 0) |
7876 | { |
7877 | if (cfun->last_clique == 0) |
7878 | cfun->last_clique = 1; |
7879 | clique = 1; |
7880 | } |
7881 | if (restrict_var->ruid == 0) |
7882 | restrict_var->ruid = ++last_ruid; |
7883 | MR_DEPENDENCE_CLIQUE (base) = clique; |
7884 | MR_DEPENDENCE_BASE (base) = restrict_var->ruid; |
7885 | return true; |
7886 | } |
7887 | } |
7888 | return false; |
7889 | } |
7890 | |
7891 | /* Clear dependence info for the clique DATA. */ |
7892 | |
7893 | static bool |
7894 | clear_dependence_clique (gimple *, tree base, tree, void *data) |
7895 | { |
7896 | unsigned short clique = (uintptr_t)data; |
7897 | if ((TREE_CODE (base) == MEM_REF |
7898 | || TREE_CODE (base) == TARGET_MEM_REF) |
7899 | && MR_DEPENDENCE_CLIQUE (base) == clique) |
7900 | { |
7901 | MR_DEPENDENCE_CLIQUE (base) = 0; |
7902 | MR_DEPENDENCE_BASE (base) = 0; |
7903 | } |
7904 | |
7905 | return false; |
7906 | } |
7907 | |
7908 | /* Compute the set of independend memory references based on restrict |
7909 | tags and their conservative propagation to the points-to sets. */ |
7910 | |
7911 | static void |
7912 | compute_dependence_clique (void) |
7913 | { |
7914 | /* First clear the special "local" clique. */ |
7915 | basic_block bb; |
7916 | if (cfun->last_clique != 0) |
7917 | FOR_EACH_BB_FN (bb, cfun) |
7918 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); |
7919 | !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
7920 | { |
7921 | gimple *stmt = gsi_stmt (i: gsi); |
7922 | walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1, |
7923 | clear_dependence_clique, |
7924 | clear_dependence_clique); |
7925 | } |
7926 | |
7927 | unsigned short clique = 0; |
7928 | unsigned short last_ruid = 0; |
7929 | bitmap rvars = BITMAP_ALLOC (NULL); |
7930 | bool escaped_p = false; |
7931 | for (unsigned i = 0; i < num_ssa_names; ++i) |
7932 | { |
7933 | tree ptr = ssa_name (i); |
7934 | if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr))) |
7935 | continue; |
7936 | |
7937 | /* Avoid all this when ptr is not dereferenced? */ |
7938 | tree p = ptr; |
7939 | if (SSA_NAME_IS_DEFAULT_DEF (ptr) |
7940 | && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL |
7941 | || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL)) |
7942 | p = SSA_NAME_VAR (ptr); |
7943 | varinfo_t vi = lookup_vi_for_tree (t: p); |
7944 | if (!vi) |
7945 | continue; |
7946 | vi = get_varinfo (n: find (node: vi->id)); |
7947 | bitmap_iterator bi; |
7948 | unsigned j; |
7949 | varinfo_t restrict_var = NULL; |
7950 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi) |
7951 | { |
7952 | varinfo_t oi = get_varinfo (n: j); |
7953 | if (oi->head != j) |
7954 | oi = get_varinfo (n: oi->head); |
7955 | if (oi->is_restrict_var) |
7956 | { |
7957 | if (restrict_var |
7958 | && restrict_var != oi) |
7959 | { |
7960 | if (dump_file && (dump_flags & TDF_DETAILS)) |
7961 | { |
7962 | fprintf (stream: dump_file, format: "found restrict pointed-to " |
7963 | "for " ); |
7964 | print_generic_expr (dump_file, ptr); |
7965 | fprintf (stream: dump_file, format: " but not exclusively\n" ); |
7966 | } |
7967 | restrict_var = NULL; |
7968 | break; |
7969 | } |
7970 | restrict_var = oi; |
7971 | } |
7972 | /* NULL is the only other valid points-to entry. */ |
7973 | else if (oi->id != nothing_id) |
7974 | { |
7975 | restrict_var = NULL; |
7976 | break; |
7977 | } |
7978 | } |
7979 | /* Ok, found that ptr must(!) point to a single(!) restrict |
7980 | variable. */ |
7981 | /* ??? PTA isn't really a proper propagation engine to compute |
7982 | this property. |
7983 | ??? We could handle merging of two restricts by unifying them. */ |
7984 | if (restrict_var) |
7985 | { |
7986 | /* Now look at possible dereferences of ptr. */ |
7987 | imm_use_iterator ui; |
7988 | gimple *use_stmt; |
7989 | bool used = false; |
7990 | msdi_data data = { .ptr: ptr, .clique: &clique, .last_ruid: &last_ruid, .restrict_var: restrict_var }; |
7991 | FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr) |
7992 | used |= walk_stmt_load_store_ops (use_stmt, &data, |
7993 | maybe_set_dependence_info, |
7994 | maybe_set_dependence_info); |
7995 | if (used) |
7996 | { |
7997 | /* Add all subvars to the set of restrict pointed-to set. */ |
7998 | for (unsigned sv = restrict_var->head; sv != 0; |
7999 | sv = get_varinfo (n: sv)->next) |
8000 | bitmap_set_bit (rvars, sv); |
8001 | varinfo_t escaped = get_varinfo (n: find (node: escaped_id)); |
8002 | if (bitmap_bit_p (escaped->solution, restrict_var->id)) |
8003 | escaped_p = true; |
8004 | } |
8005 | } |
8006 | } |
8007 | |
8008 | if (clique != 0) |
8009 | { |
8010 | /* Assign the BASE id zero to all accesses not based on a restrict |
8011 | pointer. That way they get disambiguated against restrict |
8012 | accesses but not against each other. */ |
8013 | /* ??? For restricts derived from globals (thus not incoming |
8014 | parameters) we can't restrict scoping properly thus the following |
8015 | is too aggressive there. For now we have excluded those globals from |
8016 | getting into the MR_DEPENDENCE machinery. */ |
8017 | vls_data data = { .clique: clique, .escaped_p: escaped_p, .rvars: rvars }; |
8018 | basic_block bb; |
8019 | FOR_EACH_BB_FN (bb, cfun) |
8020 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); |
8021 | !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
8022 | { |
8023 | gimple *stmt = gsi_stmt (i: gsi); |
8024 | walk_stmt_load_store_ops (stmt, &data, |
8025 | visit_loadstore, visit_loadstore); |
8026 | } |
8027 | } |
8028 | |
8029 | BITMAP_FREE (rvars); |
8030 | } |
8031 | |
8032 | /* Compute points-to information for every SSA_NAME pointer in the |
8033 | current function and compute the transitive closure of escaped |
8034 | variables to re-initialize the call-clobber states of local variables. */ |
8035 | |
8036 | unsigned int |
8037 | compute_may_aliases (void) |
8038 | { |
8039 | if (cfun->gimple_df->ipa_pta) |
8040 | { |
8041 | if (dump_file) |
8042 | { |
8043 | fprintf (stream: dump_file, format: "\nNot re-computing points-to information " |
8044 | "because IPA points-to information is available.\n\n" ); |
8045 | |
8046 | /* But still dump what we have remaining it. */ |
8047 | if (dump_flags & (TDF_DETAILS|TDF_ALIAS)) |
8048 | dump_alias_info (dump_file); |
8049 | } |
8050 | |
8051 | return 0; |
8052 | } |
8053 | |
8054 | /* For each pointer P_i, determine the sets of variables that P_i may |
8055 | point-to. Compute the reachability set of escaped and call-used |
8056 | variables. */ |
8057 | compute_points_to_sets (); |
8058 | |
8059 | /* Debugging dumps. */ |
8060 | if (dump_file && (dump_flags & (TDF_DETAILS|TDF_ALIAS))) |
8061 | dump_alias_info (dump_file); |
8062 | |
8063 | /* Compute restrict-based memory disambiguations. */ |
8064 | compute_dependence_clique (); |
8065 | |
8066 | /* Deallocate memory used by aliasing data structures and the internal |
8067 | points-to solution. */ |
8068 | delete_points_to_sets (); |
8069 | |
8070 | gcc_assert (!need_ssa_update_p (cfun)); |
8071 | |
8072 | return 0; |
8073 | } |
8074 | |
8075 | /* A dummy pass to cause points-to information to be computed via |
8076 | TODO_rebuild_alias. */ |
8077 | |
8078 | namespace { |
8079 | |
8080 | const pass_data pass_data_build_alias = |
8081 | { |
8082 | .type: GIMPLE_PASS, /* type */ |
8083 | .name: "alias" , /* name */ |
8084 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
8085 | .tv_id: TV_NONE, /* tv_id */ |
8086 | .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */ |
8087 | .properties_provided: 0, /* properties_provided */ |
8088 | .properties_destroyed: 0, /* properties_destroyed */ |
8089 | .todo_flags_start: 0, /* todo_flags_start */ |
8090 | TODO_rebuild_alias, /* todo_flags_finish */ |
8091 | }; |
8092 | |
8093 | class pass_build_alias : public gimple_opt_pass |
8094 | { |
8095 | public: |
8096 | pass_build_alias (gcc::context *ctxt) |
8097 | : gimple_opt_pass (pass_data_build_alias, ctxt) |
8098 | {} |
8099 | |
8100 | /* opt_pass methods: */ |
8101 | bool gate (function *) final override { return flag_tree_pta; } |
8102 | |
8103 | }; // class pass_build_alias |
8104 | |
8105 | } // anon namespace |
8106 | |
8107 | gimple_opt_pass * |
8108 | make_pass_build_alias (gcc::context *ctxt) |
8109 | { |
8110 | return new pass_build_alias (ctxt); |
8111 | } |
8112 | |
8113 | /* A dummy pass to cause points-to information to be computed via |
8114 | TODO_rebuild_alias. */ |
8115 | |
8116 | namespace { |
8117 | |
8118 | const pass_data pass_data_build_ealias = |
8119 | { |
8120 | .type: GIMPLE_PASS, /* type */ |
8121 | .name: "ealias" , /* name */ |
8122 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
8123 | .tv_id: TV_NONE, /* tv_id */ |
8124 | .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */ |
8125 | .properties_provided: 0, /* properties_provided */ |
8126 | .properties_destroyed: 0, /* properties_destroyed */ |
8127 | .todo_flags_start: 0, /* todo_flags_start */ |
8128 | TODO_rebuild_alias, /* todo_flags_finish */ |
8129 | }; |
8130 | |
8131 | class pass_build_ealias : public gimple_opt_pass |
8132 | { |
8133 | public: |
8134 | pass_build_ealias (gcc::context *ctxt) |
8135 | : gimple_opt_pass (pass_data_build_ealias, ctxt) |
8136 | {} |
8137 | |
8138 | /* opt_pass methods: */ |
8139 | bool gate (function *) final override { return flag_tree_pta; } |
8140 | |
8141 | }; // class pass_build_ealias |
8142 | |
8143 | } // anon namespace |
8144 | |
8145 | gimple_opt_pass * |
8146 | make_pass_build_ealias (gcc::context *ctxt) |
8147 | { |
8148 | return new pass_build_ealias (ctxt); |
8149 | } |
8150 | |
8151 | |
8152 | /* IPA PTA solutions for ESCAPED. */ |
8153 | struct pt_solution ipa_escaped_pt |
8154 | = { .anything: true, .nonlocal: false, .escaped: false, .ipa_escaped: false, .null: false, |
8155 | .vars_contains_nonlocal: false, .vars_contains_escaped: false, .vars_contains_escaped_heap: false, .vars_contains_restrict: false, .vars_contains_interposable: false, NULL }; |
8156 | |
8157 | /* Associate node with varinfo DATA. Worker for |
8158 | cgraph_for_symbol_thunks_and_aliases. */ |
8159 | static bool |
8160 | associate_varinfo_to_alias (struct cgraph_node *node, void *data) |
8161 | { |
8162 | if ((node->alias |
8163 | || (node->thunk |
8164 | && ! node->inlined_to)) |
8165 | && node->analyzed |
8166 | && !node->ifunc_resolver) |
8167 | insert_vi_for_tree (t: node->decl, vi: (varinfo_t)data); |
8168 | return false; |
8169 | } |
8170 | |
8171 | /* Dump varinfo VI to FILE. */ |
8172 | |
8173 | static void |
8174 | dump_varinfo (FILE *file, varinfo_t vi) |
8175 | { |
8176 | if (vi == NULL) |
8177 | return; |
8178 | |
8179 | fprintf (stream: file, format: "%u: %s\n" , vi->id, vi->name); |
8180 | |
8181 | const char *sep = " " ; |
8182 | if (vi->is_artificial_var) |
8183 | fprintf (stream: file, format: "%sartificial" , sep); |
8184 | if (vi->is_special_var) |
8185 | fprintf (stream: file, format: "%sspecial" , sep); |
8186 | if (vi->is_unknown_size_var) |
8187 | fprintf (stream: file, format: "%sunknown-size" , sep); |
8188 | if (vi->is_full_var) |
8189 | fprintf (stream: file, format: "%sfull" , sep); |
8190 | if (vi->is_heap_var) |
8191 | fprintf (stream: file, format: "%sheap" , sep); |
8192 | if (vi->may_have_pointers) |
8193 | fprintf (stream: file, format: "%smay-have-pointers" , sep); |
8194 | if (vi->only_restrict_pointers) |
8195 | fprintf (stream: file, format: "%sonly-restrict-pointers" , sep); |
8196 | if (vi->is_restrict_var) |
8197 | fprintf (stream: file, format: "%sis-restrict-var" , sep); |
8198 | if (vi->is_global_var) |
8199 | fprintf (stream: file, format: "%sglobal" , sep); |
8200 | if (vi->is_ipa_escape_point) |
8201 | fprintf (stream: file, format: "%sipa-escape-point" , sep); |
8202 | if (vi->is_fn_info) |
8203 | fprintf (stream: file, format: "%sfn-info" , sep); |
8204 | if (vi->ruid) |
8205 | fprintf (stream: file, format: "%srestrict-uid:%u" , sep, vi->ruid); |
8206 | if (vi->next) |
8207 | fprintf (stream: file, format: "%snext:%u" , sep, vi->next); |
8208 | if (vi->head != vi->id) |
8209 | fprintf (stream: file, format: "%shead:%u" , sep, vi->head); |
8210 | if (vi->offset) |
8211 | fprintf (stream: file, format: "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset); |
8212 | if (vi->size != ~(unsigned HOST_WIDE_INT)0) |
8213 | fprintf (stream: file, format: "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size); |
8214 | if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0 |
8215 | && vi->fullsize != vi->size) |
8216 | fprintf (stream: file, format: "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep, |
8217 | vi->fullsize); |
8218 | fprintf (stream: file, format: "\n" ); |
8219 | |
8220 | if (vi->solution && !bitmap_empty_p (map: vi->solution)) |
8221 | { |
8222 | bitmap_iterator bi; |
8223 | unsigned i; |
8224 | fprintf (stream: file, format: " solution: {" ); |
8225 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
8226 | fprintf (stream: file, format: " %u" , i); |
8227 | fprintf (stream: file, format: " }\n" ); |
8228 | } |
8229 | |
8230 | if (vi->oldsolution && !bitmap_empty_p (map: vi->oldsolution) |
8231 | && !bitmap_equal_p (vi->solution, vi->oldsolution)) |
8232 | { |
8233 | bitmap_iterator bi; |
8234 | unsigned i; |
8235 | fprintf (stream: file, format: " oldsolution: {" ); |
8236 | EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi) |
8237 | fprintf (stream: file, format: " %u" , i); |
8238 | fprintf (stream: file, format: " }\n" ); |
8239 | } |
8240 | } |
8241 | |
8242 | /* Dump varinfo VI to stderr. */ |
8243 | |
8244 | DEBUG_FUNCTION void |
8245 | debug_varinfo (varinfo_t vi) |
8246 | { |
8247 | dump_varinfo (stderr, vi); |
8248 | } |
8249 | |
8250 | /* Dump varmap to FILE. */ |
8251 | |
8252 | static void |
8253 | dump_varmap (FILE *file) |
8254 | { |
8255 | if (varmap.length () == 0) |
8256 | return; |
8257 | |
8258 | fprintf (stream: file, format: "variables:\n" ); |
8259 | |
8260 | for (unsigned int i = 0; i < varmap.length (); ++i) |
8261 | { |
8262 | varinfo_t vi = get_varinfo (n: i); |
8263 | dump_varinfo (file, vi); |
8264 | } |
8265 | |
8266 | fprintf (stream: file, format: "\n" ); |
8267 | } |
8268 | |
8269 | /* Dump varmap to stderr. */ |
8270 | |
8271 | DEBUG_FUNCTION void |
8272 | debug_varmap (void) |
8273 | { |
8274 | dump_varmap (stderr); |
8275 | } |
8276 | |
8277 | /* Compute whether node is refered to non-locally. Worker for |
8278 | cgraph_for_symbol_thunks_and_aliases. */ |
8279 | static bool |
8280 | refered_from_nonlocal_fn (struct cgraph_node *node, void *data) |
8281 | { |
8282 | bool *nonlocal_p = (bool *)data; |
8283 | *nonlocal_p |= (node->used_from_other_partition |
8284 | || DECL_EXTERNAL (node->decl) |
8285 | || TREE_PUBLIC (node->decl) |
8286 | || node->force_output |
8287 | || lookup_attribute (attr_name: "noipa" , DECL_ATTRIBUTES (node->decl))); |
8288 | return false; |
8289 | } |
8290 | |
8291 | /* Same for varpool nodes. */ |
8292 | static bool |
8293 | refered_from_nonlocal_var (struct varpool_node *node, void *data) |
8294 | { |
8295 | bool *nonlocal_p = (bool *)data; |
8296 | *nonlocal_p |= (node->used_from_other_partition |
8297 | || DECL_EXTERNAL (node->decl) |
8298 | || TREE_PUBLIC (node->decl) |
8299 | || node->force_output); |
8300 | return false; |
8301 | } |
8302 | |
8303 | /* Execute the driver for IPA PTA. */ |
8304 | static unsigned int |
8305 | ipa_pta_execute (void) |
8306 | { |
8307 | struct cgraph_node *node; |
8308 | varpool_node *var; |
8309 | unsigned int from = 0; |
8310 | |
8311 | in_ipa_mode = 1; |
8312 | |
8313 | init_alias_vars (); |
8314 | |
8315 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8316 | { |
8317 | symtab->dump (f: dump_file); |
8318 | fprintf (stream: dump_file, format: "\n" ); |
8319 | } |
8320 | |
8321 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8322 | { |
8323 | fprintf (stream: dump_file, format: "Generating generic constraints\n\n" ); |
8324 | dump_constraints (file: dump_file, from); |
8325 | fprintf (stream: dump_file, format: "\n" ); |
8326 | from = constraints.length (); |
8327 | } |
8328 | |
8329 | /* Build the constraints. */ |
8330 | FOR_EACH_DEFINED_FUNCTION (node) |
8331 | { |
8332 | varinfo_t vi; |
8333 | /* Nodes without a body in this partition are not interesting. |
8334 | Especially do not visit clones at this point for now - we |
8335 | get duplicate decls there for inline clones at least. */ |
8336 | if (!node->has_gimple_body_p () |
8337 | || node->in_other_partition |
8338 | || node->inlined_to) |
8339 | continue; |
8340 | node->get_body (); |
8341 | |
8342 | gcc_assert (!node->clone_of); |
8343 | |
8344 | /* For externally visible or attribute used annotated functions use |
8345 | local constraints for their arguments. |
8346 | For local functions we see all callers and thus do not need initial |
8347 | constraints for parameters. */ |
8348 | bool nonlocal_p = (node->used_from_other_partition |
8349 | || DECL_EXTERNAL (node->decl) |
8350 | || TREE_PUBLIC (node->decl) |
8351 | || node->force_output |
8352 | || lookup_attribute (attr_name: "noipa" , |
8353 | DECL_ATTRIBUTES (node->decl))); |
8354 | node->call_for_symbol_thunks_and_aliases (callback: refered_from_nonlocal_fn, |
8355 | data: &nonlocal_p, include_overwritable: true); |
8356 | |
8357 | vi = create_function_info_for (decl: node->decl, |
8358 | name: alias_get_name (decl: node->decl), add_id: false, |
8359 | nonlocal_p); |
8360 | if (dump_file && (dump_flags & TDF_DETAILS) |
8361 | && from != constraints.length ()) |
8362 | { |
8363 | fprintf (stream: dump_file, |
8364 | format: "Generating initial constraints for %s" , |
8365 | node->dump_name ()); |
8366 | if (DECL_ASSEMBLER_NAME_SET_P (node->decl)) |
8367 | fprintf (stream: dump_file, format: " (%s)" , |
8368 | IDENTIFIER_POINTER |
8369 | (DECL_ASSEMBLER_NAME (node->decl))); |
8370 | fprintf (stream: dump_file, format: "\n\n" ); |
8371 | dump_constraints (file: dump_file, from); |
8372 | fprintf (stream: dump_file, format: "\n" ); |
8373 | |
8374 | from = constraints.length (); |
8375 | } |
8376 | |
8377 | node->call_for_symbol_thunks_and_aliases |
8378 | (callback: associate_varinfo_to_alias, data: vi, include_overwritable: true); |
8379 | } |
8380 | |
8381 | /* Create constraints for global variables and their initializers. */ |
8382 | FOR_EACH_VARIABLE (var) |
8383 | { |
8384 | if (var->alias && var->analyzed) |
8385 | continue; |
8386 | |
8387 | varinfo_t vi = get_vi_for_tree (t: var->decl); |
8388 | |
8389 | /* For the purpose of IPA PTA unit-local globals are not |
8390 | escape points. */ |
8391 | bool nonlocal_p = (DECL_EXTERNAL (var->decl) |
8392 | || TREE_PUBLIC (var->decl) |
8393 | || var->used_from_other_partition |
8394 | || var->force_output); |
8395 | var->call_for_symbol_and_aliases (callback: refered_from_nonlocal_var, |
8396 | data: &nonlocal_p, include_overwritable: true); |
8397 | if (nonlocal_p) |
8398 | vi->is_ipa_escape_point = true; |
8399 | } |
8400 | |
8401 | if (dump_file && (dump_flags & TDF_DETAILS) |
8402 | && from != constraints.length ()) |
8403 | { |
8404 | fprintf (stream: dump_file, |
8405 | format: "Generating constraints for global initializers\n\n" ); |
8406 | dump_constraints (file: dump_file, from); |
8407 | fprintf (stream: dump_file, format: "\n" ); |
8408 | from = constraints.length (); |
8409 | } |
8410 | |
8411 | FOR_EACH_DEFINED_FUNCTION (node) |
8412 | { |
8413 | struct function *func; |
8414 | basic_block bb; |
8415 | |
8416 | /* Nodes without a body in this partition are not interesting. */ |
8417 | if (!node->has_gimple_body_p () |
8418 | || node->in_other_partition |
8419 | || node->clone_of) |
8420 | continue; |
8421 | |
8422 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8423 | { |
8424 | fprintf (stream: dump_file, |
8425 | format: "Generating constraints for %s" , node->dump_name ()); |
8426 | if (DECL_ASSEMBLER_NAME_SET_P (node->decl)) |
8427 | fprintf (stream: dump_file, format: " (%s)" , |
8428 | IDENTIFIER_POINTER |
8429 | (DECL_ASSEMBLER_NAME (node->decl))); |
8430 | fprintf (stream: dump_file, format: "\n" ); |
8431 | } |
8432 | |
8433 | func = DECL_STRUCT_FUNCTION (node->decl); |
8434 | gcc_assert (cfun == NULL); |
8435 | |
8436 | /* Build constriants for the function body. */ |
8437 | FOR_EACH_BB_FN (bb, func) |
8438 | { |
8439 | for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (i: gsi); |
8440 | gsi_next (i: &gsi)) |
8441 | { |
8442 | gphi *phi = gsi.phi (); |
8443 | |
8444 | if (! virtual_operand_p (op: gimple_phi_result (gs: phi))) |
8445 | find_func_aliases (fn: func, origt: phi); |
8446 | } |
8447 | |
8448 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); |
8449 | gsi_next (i: &gsi)) |
8450 | { |
8451 | gimple *stmt = gsi_stmt (i: gsi); |
8452 | |
8453 | find_func_aliases (fn: func, origt: stmt); |
8454 | find_func_clobbers (fn: func, origt: stmt); |
8455 | } |
8456 | } |
8457 | |
8458 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8459 | { |
8460 | fprintf (stream: dump_file, format: "\n" ); |
8461 | dump_constraints (file: dump_file, from); |
8462 | fprintf (stream: dump_file, format: "\n" ); |
8463 | from = constraints.length (); |
8464 | } |
8465 | } |
8466 | |
8467 | /* From the constraints compute the points-to sets. */ |
8468 | solve_constraints (); |
8469 | |
8470 | if (dump_file && (dump_flags & TDF_STATS)) |
8471 | dump_sa_stats (outfile: dump_file); |
8472 | |
8473 | if (dump_file && (dump_flags & TDF_DETAILS)) |
8474 | dump_sa_points_to_info (outfile: dump_file); |
8475 | |
8476 | /* Now post-process solutions to handle locals from different |
8477 | runtime instantiations coming in through recursive invocations. */ |
8478 | unsigned shadow_var_cnt = 0; |
8479 | for (unsigned i = 1; i < varmap.length (); ++i) |
8480 | { |
8481 | varinfo_t fi = get_varinfo (n: i); |
8482 | if (fi->is_fn_info |
8483 | && fi->decl) |
8484 | /* Automatic variables pointed to by their containing functions |
8485 | parameters need this treatment. */ |
8486 | for (varinfo_t ai = first_vi_for_offset (start: fi, offset: fi_parm_base); |
8487 | ai; ai = vi_next (vi: ai)) |
8488 | { |
8489 | varinfo_t vi = get_varinfo (n: find (node: ai->id)); |
8490 | bitmap_iterator bi; |
8491 | unsigned j; |
8492 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi) |
8493 | { |
8494 | varinfo_t pt = get_varinfo (n: j); |
8495 | if (pt->shadow_var_uid == 0 |
8496 | && pt->decl |
8497 | && auto_var_in_fn_p (pt->decl, fi->decl)) |
8498 | { |
8499 | pt->shadow_var_uid = allocate_decl_uid (); |
8500 | shadow_var_cnt++; |
8501 | } |
8502 | } |
8503 | } |
8504 | /* As well as global variables which are another way of passing |
8505 | arguments to recursive invocations. */ |
8506 | else if (fi->is_global_var) |
8507 | { |
8508 | for (varinfo_t ai = fi; ai; ai = vi_next (vi: ai)) |
8509 | { |
8510 | varinfo_t vi = get_varinfo (n: find (node: ai->id)); |
8511 | bitmap_iterator bi; |
8512 | unsigned j; |
8513 | EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi) |
8514 | { |
8515 | varinfo_t pt = get_varinfo (n: j); |
8516 | if (pt->shadow_var_uid == 0 |
8517 | && pt->decl |
8518 | && auto_var_p (pt->decl)) |
8519 | { |
8520 | pt->shadow_var_uid = allocate_decl_uid (); |
8521 | shadow_var_cnt++; |
8522 | } |
8523 | } |
8524 | } |
8525 | } |
8526 | } |
8527 | if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS)) |
8528 | fprintf (stream: dump_file, format: "Allocated %u shadow variables for locals " |
8529 | "maybe leaking into recursive invocations of their containing " |
8530 | "functions\n" , shadow_var_cnt); |
8531 | |
8532 | /* Compute the global points-to sets for ESCAPED. |
8533 | ??? Note that the computed escape set is not correct |
8534 | for the whole unit as we fail to consider graph edges to |
8535 | externally visible functions. */ |
8536 | ipa_escaped_pt = find_what_var_points_to (NULL, orig_vi: get_varinfo (n: escaped_id)); |
8537 | |
8538 | /* Make sure the ESCAPED solution (which is used as placeholder in |
8539 | other solutions) does not reference itself. This simplifies |
8540 | points-to solution queries. */ |
8541 | ipa_escaped_pt.ipa_escaped = 0; |
8542 | |
8543 | /* Assign the points-to sets to the SSA names in the unit. */ |
8544 | FOR_EACH_DEFINED_FUNCTION (node) |
8545 | { |
8546 | tree ptr; |
8547 | struct function *fn; |
8548 | unsigned i; |
8549 | basic_block bb; |
8550 | |
8551 | /* Nodes without a body in this partition are not interesting. */ |
8552 | if (!node->has_gimple_body_p () |
8553 | || node->in_other_partition |
8554 | || node->clone_of) |
8555 | continue; |
8556 | |
8557 | fn = DECL_STRUCT_FUNCTION (node->decl); |
8558 | |
8559 | /* Compute the points-to sets for pointer SSA_NAMEs. */ |
8560 | FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr) |
8561 | { |
8562 | if (ptr |
8563 | && POINTER_TYPE_P (TREE_TYPE (ptr))) |
8564 | find_what_p_points_to (fndecl: node->decl, p: ptr); |
8565 | } |
8566 | |
8567 | /* Compute the call-use and call-clobber sets for indirect calls |
8568 | and calls to external functions. */ |
8569 | FOR_EACH_BB_FN (bb, fn) |
8570 | { |
8571 | gimple_stmt_iterator gsi; |
8572 | |
8573 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
8574 | { |
8575 | gcall *stmt; |
8576 | struct pt_solution *pt; |
8577 | varinfo_t vi, fi; |
8578 | tree decl; |
8579 | |
8580 | stmt = dyn_cast <gcall *> (p: gsi_stmt (i: gsi)); |
8581 | if (!stmt) |
8582 | continue; |
8583 | |
8584 | /* Handle direct calls to functions with body. */ |
8585 | decl = gimple_call_fndecl (gs: stmt); |
8586 | |
8587 | { |
8588 | tree called_decl = NULL_TREE; |
8589 | if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL)) |
8590 | called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0); |
8591 | else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL)) |
8592 | called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0); |
8593 | |
8594 | if (called_decl != NULL_TREE |
8595 | && !fndecl_maybe_in_other_partition (fndecl: called_decl)) |
8596 | decl = called_decl; |
8597 | } |
8598 | |
8599 | if (decl |
8600 | && (fi = lookup_vi_for_tree (t: decl)) |
8601 | && fi->is_fn_info) |
8602 | { |
8603 | *gimple_call_clobber_set (call_stmt: stmt) |
8604 | = find_what_var_points_to |
8605 | (fndecl: node->decl, orig_vi: first_vi_for_offset (start: fi, offset: fi_clobbers)); |
8606 | *gimple_call_use_set (call_stmt: stmt) |
8607 | = find_what_var_points_to |
8608 | (fndecl: node->decl, orig_vi: first_vi_for_offset (start: fi, offset: fi_uses)); |
8609 | } |
8610 | /* Handle direct calls to external functions. */ |
8611 | else if (decl && (!fi || fi->decl)) |
8612 | { |
8613 | pt = gimple_call_use_set (call_stmt: stmt); |
8614 | if (gimple_call_flags (stmt) & ECF_CONST) |
8615 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
8616 | else if ((vi = lookup_call_use_vi (call: stmt)) != NULL) |
8617 | { |
8618 | *pt = find_what_var_points_to (fndecl: node->decl, orig_vi: vi); |
8619 | /* Escaped (and thus nonlocal) variables are always |
8620 | implicitly used by calls. */ |
8621 | /* ??? ESCAPED can be empty even though NONLOCAL |
8622 | always escaped. */ |
8623 | pt->nonlocal = 1; |
8624 | pt->ipa_escaped = 1; |
8625 | } |
8626 | else |
8627 | { |
8628 | /* If there is nothing special about this call then |
8629 | we have made everything that is used also escape. */ |
8630 | *pt = ipa_escaped_pt; |
8631 | pt->nonlocal = 1; |
8632 | } |
8633 | |
8634 | pt = gimple_call_clobber_set (call_stmt: stmt); |
8635 | if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS)) |
8636 | memset (s: pt, c: 0, n: sizeof (struct pt_solution)); |
8637 | else if ((vi = lookup_call_clobber_vi (call: stmt)) != NULL) |
8638 | { |
8639 | *pt = find_what_var_points_to (fndecl: node->decl, orig_vi: vi); |
8640 | /* Escaped (and thus nonlocal) variables are always |
8641 | implicitly clobbered by calls. */ |
8642 | /* ??? ESCAPED can be empty even though NONLOCAL |
8643 | always escaped. */ |
8644 | pt->nonlocal = 1; |
8645 | pt->ipa_escaped = 1; |
8646 | } |
8647 | else |
8648 | { |
8649 | /* If there is nothing special about this call then |
8650 | we have made everything that is used also escape. */ |
8651 | *pt = ipa_escaped_pt; |
8652 | pt->nonlocal = 1; |
8653 | } |
8654 | } |
8655 | /* Handle indirect calls. */ |
8656 | else if ((fi = get_fi_for_callee (call: stmt))) |
8657 | { |
8658 | /* We need to accumulate all clobbers/uses of all possible |
8659 | callees. */ |
8660 | fi = get_varinfo (n: find (node: fi->id)); |
8661 | /* If we cannot constrain the set of functions we'll end up |
8662 | calling we end up using/clobbering everything. */ |
8663 | if (bitmap_bit_p (fi->solution, anything_id) |
8664 | || bitmap_bit_p (fi->solution, nonlocal_id) |
8665 | || bitmap_bit_p (fi->solution, escaped_id)) |
8666 | { |
8667 | pt_solution_reset (pt: gimple_call_clobber_set (call_stmt: stmt)); |
8668 | pt_solution_reset (pt: gimple_call_use_set (call_stmt: stmt)); |
8669 | } |
8670 | else |
8671 | { |
8672 | bitmap_iterator bi; |
8673 | unsigned i; |
8674 | struct pt_solution *uses, *clobbers; |
8675 | |
8676 | uses = gimple_call_use_set (call_stmt: stmt); |
8677 | clobbers = gimple_call_clobber_set (call_stmt: stmt); |
8678 | memset (s: uses, c: 0, n: sizeof (struct pt_solution)); |
8679 | memset (s: clobbers, c: 0, n: sizeof (struct pt_solution)); |
8680 | EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi) |
8681 | { |
8682 | struct pt_solution sol; |
8683 | |
8684 | vi = get_varinfo (n: i); |
8685 | if (!vi->is_fn_info) |
8686 | { |
8687 | /* ??? We could be more precise here? */ |
8688 | uses->nonlocal = 1; |
8689 | uses->ipa_escaped = 1; |
8690 | clobbers->nonlocal = 1; |
8691 | clobbers->ipa_escaped = 1; |
8692 | continue; |
8693 | } |
8694 | |
8695 | if (!uses->anything) |
8696 | { |
8697 | sol = find_what_var_points_to |
8698 | (fndecl: node->decl, |
8699 | orig_vi: first_vi_for_offset (start: vi, offset: fi_uses)); |
8700 | pt_solution_ior_into (dest: uses, src: &sol); |
8701 | } |
8702 | if (!clobbers->anything) |
8703 | { |
8704 | sol = find_what_var_points_to |
8705 | (fndecl: node->decl, |
8706 | orig_vi: first_vi_for_offset (start: vi, offset: fi_clobbers)); |
8707 | pt_solution_ior_into (dest: clobbers, src: &sol); |
8708 | } |
8709 | } |
8710 | } |
8711 | } |
8712 | else |
8713 | gcc_unreachable (); |
8714 | } |
8715 | } |
8716 | |
8717 | fn->gimple_df->ipa_pta = true; |
8718 | |
8719 | /* We have to re-set the final-solution cache after each function |
8720 | because what is a "global" is dependent on function context. */ |
8721 | final_solutions->empty (); |
8722 | obstack_free (&final_solutions_obstack, NULL); |
8723 | gcc_obstack_init (&final_solutions_obstack); |
8724 | } |
8725 | |
8726 | delete_points_to_sets (); |
8727 | |
8728 | in_ipa_mode = 0; |
8729 | |
8730 | return 0; |
8731 | } |
8732 | |
8733 | namespace { |
8734 | |
8735 | const pass_data pass_data_ipa_pta = |
8736 | { |
8737 | .type: SIMPLE_IPA_PASS, /* type */ |
8738 | .name: "pta" , /* name */ |
8739 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
8740 | .tv_id: TV_IPA_PTA, /* tv_id */ |
8741 | .properties_required: 0, /* properties_required */ |
8742 | .properties_provided: 0, /* properties_provided */ |
8743 | .properties_destroyed: 0, /* properties_destroyed */ |
8744 | .todo_flags_start: 0, /* todo_flags_start */ |
8745 | .todo_flags_finish: 0, /* todo_flags_finish */ |
8746 | }; |
8747 | |
8748 | class pass_ipa_pta : public simple_ipa_opt_pass |
8749 | { |
8750 | public: |
8751 | pass_ipa_pta (gcc::context *ctxt) |
8752 | : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt) |
8753 | {} |
8754 | |
8755 | /* opt_pass methods: */ |
8756 | bool gate (function *) final override |
8757 | { |
8758 | return (optimize |
8759 | && flag_ipa_pta |
8760 | /* Don't bother doing anything if the program has errors. */ |
8761 | && !seen_error ()); |
8762 | } |
8763 | |
8764 | opt_pass * clone () final override { return new pass_ipa_pta (m_ctxt); } |
8765 | |
8766 | unsigned int execute (function *) final override |
8767 | { |
8768 | return ipa_pta_execute (); |
8769 | } |
8770 | |
8771 | }; // class pass_ipa_pta |
8772 | |
8773 | } // anon namespace |
8774 | |
8775 | simple_ipa_opt_pass * |
8776 | make_pass_ipa_pta (gcc::context *ctxt) |
8777 | { |
8778 | return new pass_ipa_pta (ctxt); |
8779 | } |
8780 | |