1 | /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE. |
2 | Copyright (C) 2001-2023 Free Software Foundation, Inc. |
3 | Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher |
4 | <stevenb@suse.de> |
5 | |
6 | This file is part of GCC. |
7 | |
8 | GCC is free software; you can redistribute it and/or modify |
9 | it under the terms of the GNU General Public License as published by |
10 | the Free Software Foundation; either version 3, or (at your option) |
11 | any later version. |
12 | |
13 | GCC is distributed in the hope that it will be useful, |
14 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
16 | GNU General Public License for more details. |
17 | |
18 | You should have received a copy of the GNU General Public License |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ |
21 | |
22 | #include "config.h" |
23 | #include "system.h" |
24 | #include "coretypes.h" |
25 | #include "backend.h" |
26 | #include "rtl.h" |
27 | #include "tree.h" |
28 | #include "gimple.h" |
29 | #include "predict.h" |
30 | #include "alloc-pool.h" |
31 | #include "tree-pass.h" |
32 | #include "ssa.h" |
33 | #include "cgraph.h" |
34 | #include "gimple-pretty-print.h" |
35 | #include "fold-const.h" |
36 | #include "cfganal.h" |
37 | #include "gimple-iterator.h" |
38 | #include "gimple-fold.h" |
39 | #include "tree-eh.h" |
40 | #include "gimplify.h" |
41 | #include "tree-cfg.h" |
42 | #include "tree-into-ssa.h" |
43 | #include "tree-dfa.h" |
44 | #include "tree-ssa.h" |
45 | #include "cfgloop.h" |
46 | #include "tree-ssa-sccvn.h" |
47 | #include "tree-scalar-evolution.h" |
48 | #include "dbgcnt.h" |
49 | #include "domwalk.h" |
50 | #include "tree-ssa-propagate.h" |
51 | #include "tree-ssa-dce.h" |
52 | #include "tree-cfgcleanup.h" |
53 | #include "alias.h" |
54 | #include "gimple-range.h" |
55 | |
56 | /* Even though this file is called tree-ssa-pre.cc, we actually |
57 | implement a bit more than just PRE here. All of them piggy-back |
58 | on GVN which is implemented in tree-ssa-sccvn.cc. |
59 | |
60 | 1. Full Redundancy Elimination (FRE) |
61 | This is the elimination phase of GVN. |
62 | |
63 | 2. Partial Redundancy Elimination (PRE) |
64 | This is adds computation of AVAIL_OUT and ANTIC_IN and |
65 | doing expression insertion to form GVN-PRE. |
66 | |
67 | 3. Code hoisting |
68 | This optimization uses the ANTIC_IN sets computed for PRE |
69 | to move expressions further up than PRE would do, to make |
70 | multiple computations of the same value fully redundant. |
71 | This pass is explained below (after the explanation of the |
72 | basic algorithm for PRE). |
73 | */ |
74 | |
75 | /* TODO: |
76 | |
77 | 1. Avail sets can be shared by making an avail_find_leader that |
78 | walks up the dominator tree and looks in those avail sets. |
79 | This might affect code optimality, it's unclear right now. |
80 | Currently the AVAIL_OUT sets are the remaining quadraticness in |
81 | memory of GVN-PRE. |
82 | 2. Strength reduction can be performed by anticipating expressions |
83 | we can repair later on. |
84 | 3. We can do back-substitution or smarter value numbering to catch |
85 | commutative expressions split up over multiple statements. |
86 | */ |
87 | |
88 | /* For ease of terminology, "expression node" in the below refers to |
89 | every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs |
90 | represent the actual statement containing the expressions we care about, |
91 | and we cache the value number by putting it in the expression. */ |
92 | |
93 | /* Basic algorithm for Partial Redundancy Elimination: |
94 | |
95 | First we walk the statements to generate the AVAIL sets, the |
96 | EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the |
97 | generation of values/expressions by a given block. We use them |
98 | when computing the ANTIC sets. The AVAIL sets consist of |
99 | SSA_NAME's that represent values, so we know what values are |
100 | available in what blocks. AVAIL is a forward dataflow problem. In |
101 | SSA, values are never killed, so we don't need a kill set, or a |
102 | fixpoint iteration, in order to calculate the AVAIL sets. In |
103 | traditional parlance, AVAIL sets tell us the downsafety of the |
104 | expressions/values. |
105 | |
106 | Next, we generate the ANTIC sets. These sets represent the |
107 | anticipatable expressions. ANTIC is a backwards dataflow |
108 | problem. An expression is anticipatable in a given block if it could |
109 | be generated in that block. This means that if we had to perform |
110 | an insertion in that block, of the value of that expression, we |
111 | could. Calculating the ANTIC sets requires phi translation of |
112 | expressions, because the flow goes backwards through phis. We must |
113 | iterate to a fixpoint of the ANTIC sets, because we have a kill |
114 | set. Even in SSA form, values are not live over the entire |
115 | function, only from their definition point onwards. So we have to |
116 | remove values from the ANTIC set once we go past the definition |
117 | point of the leaders that make them up. |
118 | compute_antic/compute_antic_aux performs this computation. |
119 | |
120 | Third, we perform insertions to make partially redundant |
121 | expressions fully redundant. |
122 | |
123 | An expression is partially redundant (excluding partial |
124 | anticipation) if: |
125 | |
126 | 1. It is AVAIL in some, but not all, of the predecessors of a |
127 | given block. |
128 | 2. It is ANTIC in all the predecessors. |
129 | |
130 | In order to make it fully redundant, we insert the expression into |
131 | the predecessors where it is not available, but is ANTIC. |
132 | |
133 | When optimizing for size, we only eliminate the partial redundancy |
134 | if we need to insert in only one predecessor. This avoids almost |
135 | completely the code size increase that PRE usually causes. |
136 | |
137 | For the partial anticipation case, we only perform insertion if it |
138 | is partially anticipated in some block, and fully available in all |
139 | of the predecessors. |
140 | |
141 | do_pre_regular_insertion/do_pre_partial_partial_insertion |
142 | performs these steps, driven by insert/insert_aux. |
143 | |
144 | Fourth, we eliminate fully redundant expressions. |
145 | This is a simple statement walk that replaces redundant |
146 | calculations with the now available values. */ |
147 | |
148 | /* Basic algorithm for Code Hoisting: |
149 | |
150 | Code hoisting is: Moving value computations up in the control flow |
151 | graph to make multiple copies redundant. Typically this is a size |
152 | optimization, but there are cases where it also is helpful for speed. |
153 | |
154 | A simple code hoisting algorithm is implemented that piggy-backs on |
155 | the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT |
156 | which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be |
157 | computed for PRE, and we can use them to perform a limited version of |
158 | code hoisting, too. |
159 | |
160 | For the purpose of this implementation, a value is hoistable to a basic |
161 | block B if the following properties are met: |
162 | |
163 | 1. The value is in ANTIC_IN(B) -- the value will be computed on all |
164 | paths from B to function exit and it can be computed in B); |
165 | |
166 | 2. The value is not in AVAIL_OUT(B) -- there would be no need to |
167 | compute the value again and make it available twice; |
168 | |
169 | 3. All successors of B are dominated by B -- makes sure that inserting |
170 | a computation of the value in B will make the remaining |
171 | computations fully redundant; |
172 | |
173 | 4. At least one successor has the value in AVAIL_OUT -- to avoid |
174 | hoisting values up too far; |
175 | |
176 | 5. There are at least two successors of B -- hoisting in straight |
177 | line code is pointless. |
178 | |
179 | The third condition is not strictly necessary, but it would complicate |
180 | the hoisting pass a lot. In fact, I don't know of any code hoisting |
181 | algorithm that does not have this requirement. Fortunately, experiments |
182 | have show that most candidate hoistable values are in regions that meet |
183 | this condition (e.g. diamond-shape regions). |
184 | |
185 | The forth condition is necessary to avoid hoisting things up too far |
186 | away from the uses of the value. Nothing else limits the algorithm |
187 | from hoisting everything up as far as ANTIC_IN allows. Experiments |
188 | with SPEC and CSiBE have shown that hoisting up too far results in more |
189 | spilling, less benefits for code size, and worse benchmark scores. |
190 | Fortunately, in practice most of the interesting hoisting opportunities |
191 | are caught despite this limitation. |
192 | |
193 | For hoistable values that meet all conditions, expressions are inserted |
194 | to make the calculation of the hoistable value fully redundant. We |
195 | perform code hoisting insertions after each round of PRE insertions, |
196 | because code hoisting never exposes new PRE opportunities, but PRE can |
197 | create new code hoisting opportunities. |
198 | |
199 | The code hoisting algorithm is implemented in do_hoist_insert, driven |
200 | by insert/insert_aux. */ |
201 | |
202 | /* Representations of value numbers: |
203 | |
204 | Value numbers are represented by a representative SSA_NAME. We |
205 | will create fake SSA_NAME's in situations where we need a |
206 | representative but do not have one (because it is a complex |
207 | expression). In order to facilitate storing the value numbers in |
208 | bitmaps, and keep the number of wasted SSA_NAME's down, we also |
209 | associate a value_id with each value number, and create full blown |
210 | ssa_name's only where we actually need them (IE in operands of |
211 | existing expressions). |
212 | |
213 | Theoretically you could replace all the value_id's with |
214 | SSA_NAME_VERSION, but this would allocate a large number of |
215 | SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number. |
216 | It would also require an additional indirection at each point we |
217 | use the value id. */ |
218 | |
219 | /* Representation of expressions on value numbers: |
220 | |
221 | Expressions consisting of value numbers are represented the same |
222 | way as our VN internally represents them, with an additional |
223 | "pre_expr" wrapping around them in order to facilitate storing all |
224 | of the expressions in the same sets. */ |
225 | |
226 | /* Representation of sets: |
227 | |
228 | The dataflow sets do not need to be sorted in any particular order |
229 | for the majority of their lifetime, are simply represented as two |
230 | bitmaps, one that keeps track of values present in the set, and one |
231 | that keeps track of expressions present in the set. |
232 | |
233 | When we need them in topological order, we produce it on demand by |
234 | transforming the bitmap into an array and sorting it into topo |
235 | order. */ |
236 | |
237 | /* Type of expression, used to know which member of the PRE_EXPR union |
238 | is valid. */ |
239 | |
240 | enum pre_expr_kind |
241 | { |
242 | NAME, |
243 | NARY, |
244 | REFERENCE, |
245 | CONSTANT |
246 | }; |
247 | |
248 | union pre_expr_union |
249 | { |
250 | tree name; |
251 | tree constant; |
252 | vn_nary_op_t nary; |
253 | vn_reference_t reference; |
254 | }; |
255 | |
256 | typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d> |
257 | { |
258 | enum pre_expr_kind kind; |
259 | unsigned int id; |
260 | unsigned value_id; |
261 | location_t loc; |
262 | pre_expr_union u; |
263 | |
264 | /* hash_table support. */ |
265 | static inline hashval_t hash (const pre_expr_d *); |
266 | static inline int equal (const pre_expr_d *, const pre_expr_d *); |
267 | } *pre_expr; |
268 | |
269 | #define PRE_EXPR_NAME(e) (e)->u.name |
270 | #define PRE_EXPR_NARY(e) (e)->u.nary |
271 | #define PRE_EXPR_REFERENCE(e) (e)->u.reference |
272 | #define PRE_EXPR_CONSTANT(e) (e)->u.constant |
273 | |
274 | /* Compare E1 and E1 for equality. */ |
275 | |
276 | inline int |
277 | pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2) |
278 | { |
279 | if (e1->kind != e2->kind) |
280 | return false; |
281 | |
282 | switch (e1->kind) |
283 | { |
284 | case CONSTANT: |
285 | return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1), |
286 | PRE_EXPR_CONSTANT (e2)); |
287 | case NAME: |
288 | return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2); |
289 | case NARY: |
290 | return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2)); |
291 | case REFERENCE: |
292 | return vn_reference_eq (PRE_EXPR_REFERENCE (e1), |
293 | PRE_EXPR_REFERENCE (e2)); |
294 | default: |
295 | gcc_unreachable (); |
296 | } |
297 | } |
298 | |
299 | /* Hash E. */ |
300 | |
301 | inline hashval_t |
302 | pre_expr_d::hash (const pre_expr_d *e) |
303 | { |
304 | switch (e->kind) |
305 | { |
306 | case CONSTANT: |
307 | return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e)); |
308 | case NAME: |
309 | return SSA_NAME_VERSION (PRE_EXPR_NAME (e)); |
310 | case NARY: |
311 | return PRE_EXPR_NARY (e)->hashcode; |
312 | case REFERENCE: |
313 | return PRE_EXPR_REFERENCE (e)->hashcode; |
314 | default: |
315 | gcc_unreachable (); |
316 | } |
317 | } |
318 | |
319 | /* Next global expression id number. */ |
320 | static unsigned int next_expression_id; |
321 | |
322 | /* Mapping from expression to id number we can use in bitmap sets. */ |
323 | static vec<pre_expr> expressions; |
324 | static hash_table<pre_expr_d> *expression_to_id; |
325 | static vec<unsigned> name_to_id; |
326 | static obstack pre_expr_obstack; |
327 | |
328 | /* Allocate an expression id for EXPR. */ |
329 | |
330 | static inline unsigned int |
331 | alloc_expression_id (pre_expr expr) |
332 | { |
333 | struct pre_expr_d **slot; |
334 | /* Make sure we won't overflow. */ |
335 | gcc_assert (next_expression_id + 1 > next_expression_id); |
336 | expr->id = next_expression_id++; |
337 | expressions.safe_push (obj: expr); |
338 | if (expr->kind == NAME) |
339 | { |
340 | unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr)); |
341 | /* vec::safe_grow_cleared allocates no headroom. Avoid frequent |
342 | re-allocations by using vec::reserve upfront. */ |
343 | unsigned old_len = name_to_id.length (); |
344 | name_to_id.reserve (num_ssa_names - old_len); |
345 | name_to_id.quick_grow_cleared (num_ssa_names); |
346 | gcc_assert (name_to_id[version] == 0); |
347 | name_to_id[version] = expr->id; |
348 | } |
349 | else |
350 | { |
351 | slot = expression_to_id->find_slot (value: expr, insert: INSERT); |
352 | gcc_assert (!*slot); |
353 | *slot = expr; |
354 | } |
355 | return next_expression_id - 1; |
356 | } |
357 | |
358 | /* Return the expression id for tree EXPR. */ |
359 | |
360 | static inline unsigned int |
361 | get_expression_id (const pre_expr expr) |
362 | { |
363 | return expr->id; |
364 | } |
365 | |
366 | static inline unsigned int |
367 | lookup_expression_id (const pre_expr expr) |
368 | { |
369 | struct pre_expr_d **slot; |
370 | |
371 | if (expr->kind == NAME) |
372 | { |
373 | unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr)); |
374 | if (name_to_id.length () <= version) |
375 | return 0; |
376 | return name_to_id[version]; |
377 | } |
378 | else |
379 | { |
380 | slot = expression_to_id->find_slot (value: expr, insert: NO_INSERT); |
381 | if (!slot) |
382 | return 0; |
383 | return ((pre_expr)*slot)->id; |
384 | } |
385 | } |
386 | |
387 | /* Return the expression that has expression id ID */ |
388 | |
389 | static inline pre_expr |
390 | expression_for_id (unsigned int id) |
391 | { |
392 | return expressions[id]; |
393 | } |
394 | |
395 | static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes" ); |
396 | |
397 | /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */ |
398 | |
399 | static pre_expr |
400 | get_or_alloc_expr_for_name (tree name) |
401 | { |
402 | struct pre_expr_d expr; |
403 | pre_expr result; |
404 | unsigned int result_id; |
405 | |
406 | expr.kind = NAME; |
407 | expr.id = 0; |
408 | PRE_EXPR_NAME (&expr) = name; |
409 | result_id = lookup_expression_id (expr: &expr); |
410 | if (result_id != 0) |
411 | return expression_for_id (id: result_id); |
412 | |
413 | result = pre_expr_pool.allocate (); |
414 | result->kind = NAME; |
415 | result->loc = UNKNOWN_LOCATION; |
416 | result->value_id = VN_INFO (name)->value_id; |
417 | PRE_EXPR_NAME (result) = name; |
418 | alloc_expression_id (expr: result); |
419 | return result; |
420 | } |
421 | |
422 | /* Given an NARY, get or create a pre_expr to represent it. Assign |
423 | VALUE_ID to it or allocate a new value-id if it is zero. Record |
424 | LOC as the original location of the expression. */ |
425 | |
426 | static pre_expr |
427 | get_or_alloc_expr_for_nary (vn_nary_op_t nary, unsigned value_id, |
428 | location_t loc = UNKNOWN_LOCATION) |
429 | { |
430 | struct pre_expr_d expr; |
431 | pre_expr result; |
432 | unsigned int result_id; |
433 | |
434 | gcc_assert (value_id == 0 || !value_id_constant_p (value_id)); |
435 | |
436 | expr.kind = NARY; |
437 | expr.id = 0; |
438 | nary->hashcode = vn_nary_op_compute_hash (nary); |
439 | PRE_EXPR_NARY (&expr) = nary; |
440 | result_id = lookup_expression_id (expr: &expr); |
441 | if (result_id != 0) |
442 | return expression_for_id (id: result_id); |
443 | |
444 | result = pre_expr_pool.allocate (); |
445 | result->kind = NARY; |
446 | result->loc = loc; |
447 | result->value_id = value_id ? value_id : get_next_value_id (); |
448 | PRE_EXPR_NARY (result) |
449 | = alloc_vn_nary_op_noinit (nary->length, &pre_expr_obstack); |
450 | memcpy (PRE_EXPR_NARY (result), src: nary, n: sizeof_vn_nary_op (length: nary->length)); |
451 | alloc_expression_id (expr: result); |
452 | return result; |
453 | } |
454 | |
455 | /* Given an REFERENCE, get or create a pre_expr to represent it. */ |
456 | |
457 | static pre_expr |
458 | get_or_alloc_expr_for_reference (vn_reference_t reference, |
459 | location_t loc = UNKNOWN_LOCATION) |
460 | { |
461 | struct pre_expr_d expr; |
462 | pre_expr result; |
463 | unsigned int result_id; |
464 | |
465 | expr.kind = REFERENCE; |
466 | expr.id = 0; |
467 | PRE_EXPR_REFERENCE (&expr) = reference; |
468 | result_id = lookup_expression_id (expr: &expr); |
469 | if (result_id != 0) |
470 | return expression_for_id (id: result_id); |
471 | |
472 | result = pre_expr_pool.allocate (); |
473 | result->kind = REFERENCE; |
474 | result->loc = loc; |
475 | result->value_id = reference->value_id; |
476 | PRE_EXPR_REFERENCE (result) = reference; |
477 | alloc_expression_id (expr: result); |
478 | return result; |
479 | } |
480 | |
481 | |
482 | /* An unordered bitmap set. One bitmap tracks values, the other, |
483 | expressions. */ |
484 | typedef class bitmap_set |
485 | { |
486 | public: |
487 | bitmap_head expressions; |
488 | bitmap_head values; |
489 | } *bitmap_set_t; |
490 | |
491 | #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \ |
492 | EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi)) |
493 | |
494 | #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \ |
495 | EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi)) |
496 | |
497 | /* Mapping from value id to expressions with that value_id. */ |
498 | static vec<bitmap> value_expressions; |
499 | /* We just record a single expression for each constant value, |
500 | one of kind CONSTANT. */ |
501 | static vec<pre_expr> constant_value_expressions; |
502 | |
503 | |
504 | /* This structure is used to keep track of statistics on what |
505 | optimization PRE was able to perform. */ |
506 | static struct |
507 | { |
508 | /* The number of new expressions/temporaries generated by PRE. */ |
509 | int insertions; |
510 | |
511 | /* The number of inserts found due to partial anticipation */ |
512 | int pa_insert; |
513 | |
514 | /* The number of inserts made for code hoisting. */ |
515 | int hoist_insert; |
516 | |
517 | /* The number of new PHI nodes added by PRE. */ |
518 | int phis; |
519 | } pre_stats; |
520 | |
521 | static bool do_partial_partial; |
522 | static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int); |
523 | static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr); |
524 | static bool bitmap_value_replace_in_set (bitmap_set_t, pre_expr); |
525 | static void bitmap_set_copy (bitmap_set_t, bitmap_set_t); |
526 | static bool bitmap_set_contains_value (bitmap_set_t, unsigned int); |
527 | static void bitmap_insert_into_set (bitmap_set_t, pre_expr); |
528 | static bitmap_set_t bitmap_set_new (void); |
529 | static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *, |
530 | tree); |
531 | static tree find_or_generate_expression (basic_block, tree, gimple_seq *); |
532 | static unsigned int get_expr_value_id (pre_expr); |
533 | |
534 | /* We can add and remove elements and entries to and from sets |
535 | and hash tables, so we use alloc pools for them. */ |
536 | |
537 | static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets" ); |
538 | static bitmap_obstack grand_bitmap_obstack; |
539 | |
540 | /* A three tuple {e, pred, v} used to cache phi translations in the |
541 | phi_translate_table. */ |
542 | |
543 | typedef struct expr_pred_trans_d : public typed_noop_remove <expr_pred_trans_d> |
544 | { |
545 | typedef expr_pred_trans_d value_type; |
546 | typedef expr_pred_trans_d compare_type; |
547 | |
548 | /* The expression ID. */ |
549 | unsigned e; |
550 | |
551 | /* The value expression ID that resulted from the translation. */ |
552 | unsigned v; |
553 | |
554 | /* hash_table support. */ |
555 | static inline void mark_empty (expr_pred_trans_d &); |
556 | static inline bool is_empty (const expr_pred_trans_d &); |
557 | static inline void mark_deleted (expr_pred_trans_d &); |
558 | static inline bool is_deleted (const expr_pred_trans_d &); |
559 | static const bool empty_zero_p = true; |
560 | static inline hashval_t hash (const expr_pred_trans_d &); |
561 | static inline int equal (const expr_pred_trans_d &, const expr_pred_trans_d &); |
562 | } *expr_pred_trans_t; |
563 | typedef const struct expr_pred_trans_d *const_expr_pred_trans_t; |
564 | |
565 | inline bool |
566 | expr_pred_trans_d::is_empty (const expr_pred_trans_d &e) |
567 | { |
568 | return e.e == 0; |
569 | } |
570 | |
571 | inline bool |
572 | expr_pred_trans_d::is_deleted (const expr_pred_trans_d &e) |
573 | { |
574 | return e.e == -1u; |
575 | } |
576 | |
577 | inline void |
578 | expr_pred_trans_d::mark_empty (expr_pred_trans_d &e) |
579 | { |
580 | e.e = 0; |
581 | } |
582 | |
583 | inline void |
584 | expr_pred_trans_d::mark_deleted (expr_pred_trans_d &e) |
585 | { |
586 | e.e = -1u; |
587 | } |
588 | |
589 | inline hashval_t |
590 | expr_pred_trans_d::hash (const expr_pred_trans_d &e) |
591 | { |
592 | return e.e; |
593 | } |
594 | |
595 | inline int |
596 | expr_pred_trans_d::equal (const expr_pred_trans_d &ve1, |
597 | const expr_pred_trans_d &ve2) |
598 | { |
599 | return ve1.e == ve2.e; |
600 | } |
601 | |
602 | /* Sets that we need to keep track of. */ |
603 | typedef struct bb_bitmap_sets |
604 | { |
605 | /* The EXP_GEN set, which represents expressions/values generated in |
606 | a basic block. */ |
607 | bitmap_set_t exp_gen; |
608 | |
609 | /* The PHI_GEN set, which represents PHI results generated in a |
610 | basic block. */ |
611 | bitmap_set_t phi_gen; |
612 | |
613 | /* The TMP_GEN set, which represents results/temporaries generated |
614 | in a basic block. IE the LHS of an expression. */ |
615 | bitmap_set_t tmp_gen; |
616 | |
617 | /* The AVAIL_OUT set, which represents which values are available in |
618 | a given basic block. */ |
619 | bitmap_set_t avail_out; |
620 | |
621 | /* The ANTIC_IN set, which represents which values are anticipatable |
622 | in a given basic block. */ |
623 | bitmap_set_t antic_in; |
624 | |
625 | /* The PA_IN set, which represents which values are |
626 | partially anticipatable in a given basic block. */ |
627 | bitmap_set_t pa_in; |
628 | |
629 | /* The NEW_SETS set, which is used during insertion to augment the |
630 | AVAIL_OUT set of blocks with the new insertions performed during |
631 | the current iteration. */ |
632 | bitmap_set_t new_sets; |
633 | |
634 | /* A cache for value_dies_in_block_x. */ |
635 | bitmap expr_dies; |
636 | |
637 | /* The live virtual operand on successor edges. */ |
638 | tree vop_on_exit; |
639 | |
640 | /* PHI translate cache for the single successor edge. */ |
641 | hash_table<expr_pred_trans_d> *phi_translate_table; |
642 | |
643 | /* True if we have visited this block during ANTIC calculation. */ |
644 | unsigned int visited : 1; |
645 | |
646 | /* True when the block contains a call that might not return. */ |
647 | unsigned int contains_may_not_return_call : 1; |
648 | } *bb_value_sets_t; |
649 | |
650 | #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen |
651 | #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen |
652 | #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen |
653 | #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out |
654 | #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in |
655 | #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in |
656 | #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets |
657 | #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies |
658 | #define PHI_TRANS_TABLE(BB) ((bb_value_sets_t) ((BB)->aux))->phi_translate_table |
659 | #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited |
660 | #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call |
661 | #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit |
662 | |
663 | |
664 | /* Add the tuple mapping from {expression E, basic block PRED} to |
665 | the phi translation table and return whether it pre-existed. */ |
666 | |
667 | static inline bool |
668 | phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred) |
669 | { |
670 | if (!PHI_TRANS_TABLE (pred)) |
671 | PHI_TRANS_TABLE (pred) = new hash_table<expr_pred_trans_d> (11); |
672 | |
673 | expr_pred_trans_t slot; |
674 | expr_pred_trans_d tem; |
675 | unsigned id = get_expression_id (expr: e); |
676 | tem.e = id; |
677 | slot = PHI_TRANS_TABLE (pred)->find_slot_with_hash (comparable: tem, hash: id, insert: INSERT); |
678 | if (slot->e) |
679 | { |
680 | *entry = slot; |
681 | return true; |
682 | } |
683 | |
684 | *entry = slot; |
685 | slot->e = id; |
686 | return false; |
687 | } |
688 | |
689 | |
690 | /* Add expression E to the expression set of value id V. */ |
691 | |
692 | static void |
693 | add_to_value (unsigned int v, pre_expr e) |
694 | { |
695 | gcc_checking_assert (get_expr_value_id (e) == v); |
696 | |
697 | if (value_id_constant_p (v)) |
698 | { |
699 | if (e->kind != CONSTANT) |
700 | return; |
701 | |
702 | if (-v >= constant_value_expressions.length ()) |
703 | constant_value_expressions.safe_grow_cleared (len: -v + 1); |
704 | |
705 | pre_expr leader = constant_value_expressions[-v]; |
706 | if (!leader) |
707 | constant_value_expressions[-v] = e; |
708 | } |
709 | else |
710 | { |
711 | if (v >= value_expressions.length ()) |
712 | value_expressions.safe_grow_cleared (len: v + 1); |
713 | |
714 | bitmap set = value_expressions[v]; |
715 | if (!set) |
716 | { |
717 | set = BITMAP_ALLOC (obstack: &grand_bitmap_obstack); |
718 | value_expressions[v] = set; |
719 | } |
720 | bitmap_set_bit (set, get_expression_id (expr: e)); |
721 | } |
722 | } |
723 | |
724 | /* Create a new bitmap set and return it. */ |
725 | |
726 | static bitmap_set_t |
727 | bitmap_set_new (void) |
728 | { |
729 | bitmap_set_t ret = bitmap_set_pool.allocate (); |
730 | bitmap_initialize (head: &ret->expressions, obstack: &grand_bitmap_obstack); |
731 | bitmap_initialize (head: &ret->values, obstack: &grand_bitmap_obstack); |
732 | return ret; |
733 | } |
734 | |
735 | /* Return the value id for a PRE expression EXPR. */ |
736 | |
737 | static unsigned int |
738 | get_expr_value_id (pre_expr expr) |
739 | { |
740 | /* ??? We cannot assert that expr has a value-id (it can be 0), because |
741 | we assign value-ids only to expressions that have a result |
742 | in set_hashtable_value_ids. */ |
743 | return expr->value_id; |
744 | } |
745 | |
746 | /* Return a VN valnum (SSA name or constant) for the PRE value-id VAL. */ |
747 | |
748 | static tree |
749 | vn_valnum_from_value_id (unsigned int val) |
750 | { |
751 | if (value_id_constant_p (v: val)) |
752 | { |
753 | pre_expr vexpr = constant_value_expressions[-val]; |
754 | if (vexpr) |
755 | return PRE_EXPR_CONSTANT (vexpr); |
756 | return NULL_TREE; |
757 | } |
758 | |
759 | bitmap exprset = value_expressions[val]; |
760 | bitmap_iterator bi; |
761 | unsigned int i; |
762 | EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi) |
763 | { |
764 | pre_expr vexpr = expression_for_id (id: i); |
765 | if (vexpr->kind == NAME) |
766 | return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum; |
767 | } |
768 | return NULL_TREE; |
769 | } |
770 | |
771 | /* Insert an expression EXPR into a bitmapped set. */ |
772 | |
773 | static void |
774 | bitmap_insert_into_set (bitmap_set_t set, pre_expr expr) |
775 | { |
776 | unsigned int val = get_expr_value_id (expr); |
777 | if (! value_id_constant_p (v: val)) |
778 | { |
779 | /* Note this is the only function causing multiple expressions |
780 | for the same value to appear in a set. This is needed for |
781 | TMP_GEN, PHI_GEN and NEW_SETs. */ |
782 | bitmap_set_bit (&set->values, val); |
783 | bitmap_set_bit (&set->expressions, get_expression_id (expr)); |
784 | } |
785 | } |
786 | |
787 | /* Copy a bitmapped set ORIG, into bitmapped set DEST. */ |
788 | |
789 | static void |
790 | bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig) |
791 | { |
792 | bitmap_copy (&dest->expressions, &orig->expressions); |
793 | bitmap_copy (&dest->values, &orig->values); |
794 | } |
795 | |
796 | |
797 | /* Free memory used up by SET. */ |
798 | static void |
799 | bitmap_set_free (bitmap_set_t set) |
800 | { |
801 | bitmap_clear (&set->expressions); |
802 | bitmap_clear (&set->values); |
803 | } |
804 | |
805 | static void |
806 | pre_expr_DFS (pre_expr expr, bitmap_set_t set, bitmap val_visited, |
807 | vec<pre_expr> &post); |
808 | |
809 | /* DFS walk leaders of VAL to their operands with leaders in SET, collecting |
810 | expressions in SET in postorder into POST. */ |
811 | |
812 | static void |
813 | pre_expr_DFS (unsigned val, bitmap_set_t set, bitmap val_visited, |
814 | vec<pre_expr> &post) |
815 | { |
816 | unsigned int i; |
817 | bitmap_iterator bi; |
818 | |
819 | /* Iterate over all leaders and DFS recurse. Borrowed from |
820 | bitmap_find_leader. */ |
821 | bitmap exprset = value_expressions[val]; |
822 | if (!exprset->first->next) |
823 | { |
824 | EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi) |
825 | if (bitmap_bit_p (&set->expressions, i)) |
826 | pre_expr_DFS (expr: expression_for_id (id: i), set, val_visited, post); |
827 | return; |
828 | } |
829 | |
830 | EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi) |
831 | pre_expr_DFS (expr: expression_for_id (id: i), set, val_visited, post); |
832 | } |
833 | |
834 | /* DFS walk EXPR to its operands with leaders in SET, collecting |
835 | expressions in SET in postorder into POST. */ |
836 | |
837 | static void |
838 | pre_expr_DFS (pre_expr expr, bitmap_set_t set, bitmap val_visited, |
839 | vec<pre_expr> &post) |
840 | { |
841 | switch (expr->kind) |
842 | { |
843 | case NARY: |
844 | { |
845 | vn_nary_op_t nary = PRE_EXPR_NARY (expr); |
846 | for (unsigned i = 0; i < nary->length; i++) |
847 | { |
848 | if (TREE_CODE (nary->op[i]) != SSA_NAME) |
849 | continue; |
850 | unsigned int op_val_id = VN_INFO (nary->op[i])->value_id; |
851 | /* If we already found a leader for the value we've |
852 | recursed already. Avoid the costly bitmap_find_leader. */ |
853 | if (bitmap_bit_p (&set->values, op_val_id) |
854 | && bitmap_set_bit (val_visited, op_val_id)) |
855 | pre_expr_DFS (val: op_val_id, set, val_visited, post); |
856 | } |
857 | break; |
858 | } |
859 | case REFERENCE: |
860 | { |
861 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr); |
862 | vec<vn_reference_op_s> operands = ref->operands; |
863 | vn_reference_op_t operand; |
864 | for (unsigned i = 0; operands.iterate (ix: i, ptr: &operand); i++) |
865 | { |
866 | tree op[3]; |
867 | op[0] = operand->op0; |
868 | op[1] = operand->op1; |
869 | op[2] = operand->op2; |
870 | for (unsigned n = 0; n < 3; ++n) |
871 | { |
872 | if (!op[n] || TREE_CODE (op[n]) != SSA_NAME) |
873 | continue; |
874 | unsigned op_val_id = VN_INFO (op[n])->value_id; |
875 | if (bitmap_bit_p (&set->values, op_val_id) |
876 | && bitmap_set_bit (val_visited, op_val_id)) |
877 | pre_expr_DFS (val: op_val_id, set, val_visited, post); |
878 | } |
879 | } |
880 | break; |
881 | } |
882 | default:; |
883 | } |
884 | post.quick_push (obj: expr); |
885 | } |
886 | |
887 | /* Generate an topological-ordered array of bitmap set SET. */ |
888 | |
889 | static vec<pre_expr> |
890 | sorted_array_from_bitmap_set (bitmap_set_t set) |
891 | { |
892 | unsigned int i; |
893 | bitmap_iterator bi; |
894 | vec<pre_expr> result; |
895 | |
896 | /* Pre-allocate enough space for the array. */ |
897 | result.create (nelems: bitmap_count_bits (&set->expressions)); |
898 | |
899 | auto_bitmap val_visited (&grand_bitmap_obstack); |
900 | bitmap_tree_view (val_visited); |
901 | FOR_EACH_VALUE_ID_IN_SET (set, i, bi) |
902 | if (bitmap_set_bit (val_visited, i)) |
903 | pre_expr_DFS (val: i, set, val_visited, post&: result); |
904 | |
905 | return result; |
906 | } |
907 | |
908 | /* Subtract all expressions contained in ORIG from DEST. */ |
909 | |
910 | static bitmap_set_t |
911 | bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig) |
912 | { |
913 | bitmap_set_t result = bitmap_set_new (); |
914 | bitmap_iterator bi; |
915 | unsigned int i; |
916 | |
917 | bitmap_and_compl (&result->expressions, &dest->expressions, |
918 | &orig->expressions); |
919 | |
920 | FOR_EACH_EXPR_ID_IN_SET (result, i, bi) |
921 | { |
922 | pre_expr expr = expression_for_id (id: i); |
923 | unsigned int value_id = get_expr_value_id (expr); |
924 | bitmap_set_bit (&result->values, value_id); |
925 | } |
926 | |
927 | return result; |
928 | } |
929 | |
930 | /* Subtract all values in bitmap set B from bitmap set A. */ |
931 | |
932 | static void |
933 | bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b) |
934 | { |
935 | unsigned int i; |
936 | bitmap_iterator bi; |
937 | unsigned to_remove = -1U; |
938 | bitmap_and_compl_into (&a->values, &b->values); |
939 | FOR_EACH_EXPR_ID_IN_SET (a, i, bi) |
940 | { |
941 | if (to_remove != -1U) |
942 | { |
943 | bitmap_clear_bit (&a->expressions, to_remove); |
944 | to_remove = -1U; |
945 | } |
946 | pre_expr expr = expression_for_id (id: i); |
947 | if (! bitmap_bit_p (&a->values, get_expr_value_id (expr))) |
948 | to_remove = i; |
949 | } |
950 | if (to_remove != -1U) |
951 | bitmap_clear_bit (&a->expressions, to_remove); |
952 | } |
953 | |
954 | |
955 | /* Return true if bitmapped set SET contains the value VALUE_ID. */ |
956 | |
957 | static bool |
958 | bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id) |
959 | { |
960 | if (value_id_constant_p (v: value_id)) |
961 | return true; |
962 | |
963 | return bitmap_bit_p (&set->values, value_id); |
964 | } |
965 | |
966 | /* Return true if two bitmap sets are equal. */ |
967 | |
968 | static bool |
969 | bitmap_set_equal (bitmap_set_t a, bitmap_set_t b) |
970 | { |
971 | return bitmap_equal_p (&a->values, &b->values); |
972 | } |
973 | |
974 | /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists, |
975 | and add it otherwise. Return true if any changes were made. */ |
976 | |
977 | static bool |
978 | bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr) |
979 | { |
980 | unsigned int val = get_expr_value_id (expr); |
981 | if (value_id_constant_p (v: val)) |
982 | return false; |
983 | |
984 | if (bitmap_set_contains_value (set, value_id: val)) |
985 | { |
986 | /* The number of expressions having a given value is usually |
987 | significantly less than the total number of expressions in SET. |
988 | Thus, rather than check, for each expression in SET, whether it |
989 | has the value LOOKFOR, we walk the reverse mapping that tells us |
990 | what expressions have a given value, and see if any of those |
991 | expressions are in our set. For large testcases, this is about |
992 | 5-10x faster than walking the bitmap. If this is somehow a |
993 | significant lose for some cases, we can choose which set to walk |
994 | based on the set size. */ |
995 | unsigned int i; |
996 | bitmap_iterator bi; |
997 | bitmap exprset = value_expressions[val]; |
998 | EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi) |
999 | { |
1000 | if (bitmap_clear_bit (&set->expressions, i)) |
1001 | { |
1002 | bitmap_set_bit (&set->expressions, get_expression_id (expr)); |
1003 | return i != get_expression_id (expr); |
1004 | } |
1005 | } |
1006 | gcc_unreachable (); |
1007 | } |
1008 | |
1009 | bitmap_insert_into_set (set, expr); |
1010 | return true; |
1011 | } |
1012 | |
1013 | /* Insert EXPR into SET if EXPR's value is not already present in |
1014 | SET. */ |
1015 | |
1016 | static void |
1017 | bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr) |
1018 | { |
1019 | unsigned int val = get_expr_value_id (expr); |
1020 | |
1021 | gcc_checking_assert (expr->id == get_expression_id (expr)); |
1022 | |
1023 | /* Constant values are always considered to be part of the set. */ |
1024 | if (value_id_constant_p (v: val)) |
1025 | return; |
1026 | |
1027 | /* If the value membership changed, add the expression. */ |
1028 | if (bitmap_set_bit (&set->values, val)) |
1029 | bitmap_set_bit (&set->expressions, expr->id); |
1030 | } |
1031 | |
1032 | /* Print out EXPR to outfile. */ |
1033 | |
1034 | static void |
1035 | print_pre_expr (FILE *outfile, const pre_expr expr) |
1036 | { |
1037 | if (! expr) |
1038 | { |
1039 | fprintf (stream: outfile, format: "NULL" ); |
1040 | return; |
1041 | } |
1042 | switch (expr->kind) |
1043 | { |
1044 | case CONSTANT: |
1045 | print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr)); |
1046 | break; |
1047 | case NAME: |
1048 | print_generic_expr (outfile, PRE_EXPR_NAME (expr)); |
1049 | break; |
1050 | case NARY: |
1051 | { |
1052 | unsigned int i; |
1053 | vn_nary_op_t nary = PRE_EXPR_NARY (expr); |
1054 | fprintf (stream: outfile, format: "{%s," , get_tree_code_name (nary->opcode)); |
1055 | for (i = 0; i < nary->length; i++) |
1056 | { |
1057 | print_generic_expr (outfile, nary->op[i]); |
1058 | if (i != (unsigned) nary->length - 1) |
1059 | fprintf (stream: outfile, format: "," ); |
1060 | } |
1061 | fprintf (stream: outfile, format: "}" ); |
1062 | } |
1063 | break; |
1064 | |
1065 | case REFERENCE: |
1066 | { |
1067 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr); |
1068 | print_vn_reference_ops (outfile, ref->operands); |
1069 | if (ref->vuse) |
1070 | { |
1071 | fprintf (stream: outfile, format: "@" ); |
1072 | print_generic_expr (outfile, ref->vuse); |
1073 | } |
1074 | } |
1075 | break; |
1076 | } |
1077 | } |
1078 | void debug_pre_expr (pre_expr); |
1079 | |
1080 | /* Like print_pre_expr but always prints to stderr. */ |
1081 | DEBUG_FUNCTION void |
1082 | debug_pre_expr (pre_expr e) |
1083 | { |
1084 | print_pre_expr (stderr, expr: e); |
1085 | fprintf (stderr, format: "\n" ); |
1086 | } |
1087 | |
1088 | /* Print out SET to OUTFILE. */ |
1089 | |
1090 | static void |
1091 | print_bitmap_set (FILE *outfile, bitmap_set_t set, |
1092 | const char *setname, int blockindex) |
1093 | { |
1094 | fprintf (stream: outfile, format: "%s[%d] := { " , setname, blockindex); |
1095 | if (set) |
1096 | { |
1097 | bool first = true; |
1098 | unsigned i; |
1099 | bitmap_iterator bi; |
1100 | |
1101 | FOR_EACH_EXPR_ID_IN_SET (set, i, bi) |
1102 | { |
1103 | const pre_expr expr = expression_for_id (id: i); |
1104 | |
1105 | if (!first) |
1106 | fprintf (stream: outfile, format: ", " ); |
1107 | first = false; |
1108 | print_pre_expr (outfile, expr); |
1109 | |
1110 | fprintf (stream: outfile, format: " (%04d)" , get_expr_value_id (expr)); |
1111 | } |
1112 | } |
1113 | fprintf (stream: outfile, format: " }\n" ); |
1114 | } |
1115 | |
1116 | void debug_bitmap_set (bitmap_set_t); |
1117 | |
1118 | DEBUG_FUNCTION void |
1119 | debug_bitmap_set (bitmap_set_t set) |
1120 | { |
1121 | print_bitmap_set (stderr, set, setname: "debug" , blockindex: 0); |
1122 | } |
1123 | |
1124 | void debug_bitmap_sets_for (basic_block); |
1125 | |
1126 | DEBUG_FUNCTION void |
1127 | debug_bitmap_sets_for (basic_block bb) |
1128 | { |
1129 | print_bitmap_set (stderr, AVAIL_OUT (bb), setname: "avail_out" , blockindex: bb->index); |
1130 | print_bitmap_set (stderr, EXP_GEN (bb), setname: "exp_gen" , blockindex: bb->index); |
1131 | print_bitmap_set (stderr, PHI_GEN (bb), setname: "phi_gen" , blockindex: bb->index); |
1132 | print_bitmap_set (stderr, TMP_GEN (bb), setname: "tmp_gen" , blockindex: bb->index); |
1133 | print_bitmap_set (stderr, ANTIC_IN (bb), setname: "antic_in" , blockindex: bb->index); |
1134 | if (do_partial_partial) |
1135 | print_bitmap_set (stderr, PA_IN (bb), setname: "pa_in" , blockindex: bb->index); |
1136 | print_bitmap_set (stderr, NEW_SETS (bb), setname: "new_sets" , blockindex: bb->index); |
1137 | } |
1138 | |
1139 | /* Print out the expressions that have VAL to OUTFILE. */ |
1140 | |
1141 | static void |
1142 | print_value_expressions (FILE *outfile, unsigned int val) |
1143 | { |
1144 | bitmap set = value_expressions[val]; |
1145 | if (set) |
1146 | { |
1147 | bitmap_set x; |
1148 | char s[10]; |
1149 | sprintf (s: s, format: "%04d" , val); |
1150 | x.expressions = *set; |
1151 | print_bitmap_set (outfile, set: &x, setname: s, blockindex: 0); |
1152 | } |
1153 | } |
1154 | |
1155 | |
1156 | DEBUG_FUNCTION void |
1157 | debug_value_expressions (unsigned int val) |
1158 | { |
1159 | print_value_expressions (stderr, val); |
1160 | } |
1161 | |
1162 | /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to |
1163 | represent it. */ |
1164 | |
1165 | static pre_expr |
1166 | get_or_alloc_expr_for_constant (tree constant) |
1167 | { |
1168 | unsigned int result_id; |
1169 | struct pre_expr_d expr; |
1170 | pre_expr newexpr; |
1171 | |
1172 | expr.kind = CONSTANT; |
1173 | PRE_EXPR_CONSTANT (&expr) = constant; |
1174 | result_id = lookup_expression_id (expr: &expr); |
1175 | if (result_id != 0) |
1176 | return expression_for_id (id: result_id); |
1177 | |
1178 | newexpr = pre_expr_pool.allocate (); |
1179 | newexpr->kind = CONSTANT; |
1180 | newexpr->loc = UNKNOWN_LOCATION; |
1181 | PRE_EXPR_CONSTANT (newexpr) = constant; |
1182 | alloc_expression_id (expr: newexpr); |
1183 | newexpr->value_id = get_or_alloc_constant_value_id (constant); |
1184 | add_to_value (v: newexpr->value_id, e: newexpr); |
1185 | return newexpr; |
1186 | } |
1187 | |
1188 | /* Return the folded version of T if T, when folded, is a gimple |
1189 | min_invariant or an SSA name. Otherwise, return T. */ |
1190 | |
1191 | static pre_expr |
1192 | fully_constant_expression (pre_expr e) |
1193 | { |
1194 | switch (e->kind) |
1195 | { |
1196 | case CONSTANT: |
1197 | return e; |
1198 | case NARY: |
1199 | { |
1200 | vn_nary_op_t nary = PRE_EXPR_NARY (e); |
1201 | tree res = vn_nary_simplify (nary); |
1202 | if (!res) |
1203 | return e; |
1204 | if (is_gimple_min_invariant (res)) |
1205 | return get_or_alloc_expr_for_constant (constant: res); |
1206 | if (TREE_CODE (res) == SSA_NAME) |
1207 | return get_or_alloc_expr_for_name (name: res); |
1208 | return e; |
1209 | } |
1210 | case REFERENCE: |
1211 | { |
1212 | vn_reference_t ref = PRE_EXPR_REFERENCE (e); |
1213 | tree folded; |
1214 | if ((folded = fully_constant_vn_reference_p (ref))) |
1215 | return get_or_alloc_expr_for_constant (constant: folded); |
1216 | return e; |
1217 | } |
1218 | default: |
1219 | return e; |
1220 | } |
1221 | } |
1222 | |
1223 | /* Translate the VUSE backwards through phi nodes in E->dest, so that |
1224 | it has the value it would have in E->src. Set *SAME_VALID to true |
1225 | in case the new vuse doesn't change the value id of the OPERANDS. */ |
1226 | |
1227 | static tree |
1228 | translate_vuse_through_block (vec<vn_reference_op_s> operands, |
1229 | alias_set_type set, alias_set_type base_set, |
1230 | tree type, tree vuse, edge e, bool *same_valid) |
1231 | { |
1232 | basic_block phiblock = e->dest; |
1233 | gimple *phi = SSA_NAME_DEF_STMT (vuse); |
1234 | ao_ref ref; |
1235 | |
1236 | if (same_valid) |
1237 | *same_valid = true; |
1238 | |
1239 | /* If value-numbering provided a memory state for this |
1240 | that dominates PHIBLOCK we can just use that. */ |
1241 | if (gimple_nop_p (g: phi) |
1242 | || (gimple_bb (g: phi) != phiblock |
1243 | && dominated_by_p (CDI_DOMINATORS, phiblock, gimple_bb (g: phi)))) |
1244 | return vuse; |
1245 | |
1246 | /* We have pruned expressions that are killed in PHIBLOCK via |
1247 | prune_clobbered_mems but we have not rewritten the VUSE to the one |
1248 | live at the start of the block. If there is no virtual PHI to translate |
1249 | through return the VUSE live at entry. Otherwise the VUSE to translate |
1250 | is the def of the virtual PHI node. */ |
1251 | phi = get_virtual_phi (phiblock); |
1252 | if (!phi) |
1253 | return BB_LIVE_VOP_ON_EXIT |
1254 | (get_immediate_dominator (CDI_DOMINATORS, phiblock)); |
1255 | |
1256 | if (same_valid |
1257 | && ao_ref_init_from_vn_reference (&ref, set, base_set, type, operands)) |
1258 | { |
1259 | bitmap visited = NULL; |
1260 | /* Try to find a vuse that dominates this phi node by skipping |
1261 | non-clobbering statements. */ |
1262 | unsigned int cnt = param_sccvn_max_alias_queries_per_access; |
1263 | vuse = get_continuation_for_phi (phi, &ref, true, |
1264 | cnt, &visited, false, NULL, NULL); |
1265 | if (visited) |
1266 | BITMAP_FREE (visited); |
1267 | } |
1268 | else |
1269 | vuse = NULL_TREE; |
1270 | /* If we didn't find any, the value ID can't stay the same. */ |
1271 | if (!vuse && same_valid) |
1272 | *same_valid = false; |
1273 | |
1274 | /* ??? We would like to return vuse here as this is the canonical |
1275 | upmost vdef that this reference is associated with. But during |
1276 | insertion of the references into the hash tables we only ever |
1277 | directly insert with their direct gimple_vuse, hence returning |
1278 | something else would make us not find the other expression. */ |
1279 | return PHI_ARG_DEF (phi, e->dest_idx); |
1280 | } |
1281 | |
1282 | /* Like bitmap_find_leader, but checks for the value existing in SET1 *or* |
1283 | SET2 *or* SET3. This is used to avoid making a set consisting of the union |
1284 | of PA_IN and ANTIC_IN during insert and phi-translation. */ |
1285 | |
1286 | static inline pre_expr |
1287 | find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2, |
1288 | bitmap_set_t set3 = NULL) |
1289 | { |
1290 | pre_expr result = NULL; |
1291 | |
1292 | if (set1) |
1293 | result = bitmap_find_leader (set1, val); |
1294 | if (!result && set2) |
1295 | result = bitmap_find_leader (set2, val); |
1296 | if (!result && set3) |
1297 | result = bitmap_find_leader (set3, val); |
1298 | return result; |
1299 | } |
1300 | |
1301 | /* Get the tree type for our PRE expression e. */ |
1302 | |
1303 | static tree |
1304 | get_expr_type (const pre_expr e) |
1305 | { |
1306 | switch (e->kind) |
1307 | { |
1308 | case NAME: |
1309 | return TREE_TYPE (PRE_EXPR_NAME (e)); |
1310 | case CONSTANT: |
1311 | return TREE_TYPE (PRE_EXPR_CONSTANT (e)); |
1312 | case REFERENCE: |
1313 | return PRE_EXPR_REFERENCE (e)->type; |
1314 | case NARY: |
1315 | return PRE_EXPR_NARY (e)->type; |
1316 | } |
1317 | gcc_unreachable (); |
1318 | } |
1319 | |
1320 | /* Get a representative SSA_NAME for a given expression that is available in B. |
1321 | Since all of our sub-expressions are treated as values, we require |
1322 | them to be SSA_NAME's for simplicity. |
1323 | Prior versions of GVNPRE used to use "value handles" here, so that |
1324 | an expression would be VH.11 + VH.10 instead of d_3 + e_6. In |
1325 | either case, the operands are really values (IE we do not expect |
1326 | them to be usable without finding leaders). */ |
1327 | |
1328 | static tree |
1329 | get_representative_for (const pre_expr e, basic_block b = NULL) |
1330 | { |
1331 | tree name, valnum = NULL_TREE; |
1332 | unsigned int value_id = get_expr_value_id (expr: e); |
1333 | |
1334 | switch (e->kind) |
1335 | { |
1336 | case NAME: |
1337 | return PRE_EXPR_NAME (e); |
1338 | case CONSTANT: |
1339 | return PRE_EXPR_CONSTANT (e); |
1340 | case NARY: |
1341 | case REFERENCE: |
1342 | { |
1343 | /* Go through all of the expressions representing this value |
1344 | and pick out an SSA_NAME. */ |
1345 | unsigned int i; |
1346 | bitmap_iterator bi; |
1347 | bitmap exprs = value_expressions[value_id]; |
1348 | EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi) |
1349 | { |
1350 | pre_expr rep = expression_for_id (id: i); |
1351 | if (rep->kind == NAME) |
1352 | { |
1353 | tree name = PRE_EXPR_NAME (rep); |
1354 | valnum = VN_INFO (name)->valnum; |
1355 | gimple *def = SSA_NAME_DEF_STMT (name); |
1356 | /* We have to return either a new representative or one |
1357 | that can be used for expression simplification and thus |
1358 | is available in B. */ |
1359 | if (! b |
1360 | || gimple_nop_p (g: def) |
1361 | || dominated_by_p (CDI_DOMINATORS, b, gimple_bb (g: def))) |
1362 | return name; |
1363 | } |
1364 | else if (rep->kind == CONSTANT) |
1365 | return PRE_EXPR_CONSTANT (rep); |
1366 | } |
1367 | } |
1368 | break; |
1369 | } |
1370 | |
1371 | /* If we reached here we couldn't find an SSA_NAME. This can |
1372 | happen when we've discovered a value that has never appeared in |
1373 | the program as set to an SSA_NAME, as the result of phi translation. |
1374 | Create one here. |
1375 | ??? We should be able to re-use this when we insert the statement |
1376 | to compute it. */ |
1377 | name = make_temp_ssa_name (type: get_expr_type (e), stmt: gimple_build_nop (), name: "pretmp" ); |
1378 | vn_ssa_aux_t vn_info = VN_INFO (name); |
1379 | vn_info->value_id = value_id; |
1380 | vn_info->valnum = valnum ? valnum : name; |
1381 | vn_info->visited = true; |
1382 | /* ??? For now mark this SSA name for release by VN. */ |
1383 | vn_info->needs_insertion = true; |
1384 | add_to_value (v: value_id, e: get_or_alloc_expr_for_name (name)); |
1385 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1386 | { |
1387 | fprintf (stream: dump_file, format: "Created SSA_NAME representative " ); |
1388 | print_generic_expr (dump_file, name); |
1389 | fprintf (stream: dump_file, format: " for expression:" ); |
1390 | print_pre_expr (outfile: dump_file, expr: e); |
1391 | fprintf (stream: dump_file, format: " (%04d)\n" , value_id); |
1392 | } |
1393 | |
1394 | return name; |
1395 | } |
1396 | |
1397 | |
1398 | static pre_expr |
1399 | phi_translate (bitmap_set_t, pre_expr, bitmap_set_t, bitmap_set_t, edge); |
1400 | |
1401 | /* Translate EXPR using phis in PHIBLOCK, so that it has the values of |
1402 | the phis in PRED. Return NULL if we can't find a leader for each part |
1403 | of the translated expression. */ |
1404 | |
1405 | static pre_expr |
1406 | phi_translate_1 (bitmap_set_t dest, |
1407 | pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, edge e) |
1408 | { |
1409 | basic_block pred = e->src; |
1410 | basic_block phiblock = e->dest; |
1411 | location_t expr_loc = expr->loc; |
1412 | switch (expr->kind) |
1413 | { |
1414 | case NARY: |
1415 | { |
1416 | unsigned int i; |
1417 | bool changed = false; |
1418 | vn_nary_op_t nary = PRE_EXPR_NARY (expr); |
1419 | vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s, |
1420 | sizeof_vn_nary_op (nary->length)); |
1421 | memcpy (dest: newnary, src: nary, n: sizeof_vn_nary_op (length: nary->length)); |
1422 | |
1423 | for (i = 0; i < newnary->length; i++) |
1424 | { |
1425 | if (TREE_CODE (newnary->op[i]) != SSA_NAME) |
1426 | continue; |
1427 | else |
1428 | { |
1429 | pre_expr leader, result; |
1430 | unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id; |
1431 | leader = find_leader_in_sets (val: op_val_id, set1, set2); |
1432 | result = phi_translate (dest, leader, set1, set2, e); |
1433 | if (result && result != leader) |
1434 | /* If op has a leader in the sets we translate make |
1435 | sure to use the value of the translated expression. |
1436 | We might need a new representative for that. */ |
1437 | newnary->op[i] = get_representative_for (e: result, b: pred); |
1438 | else if (!result) |
1439 | return NULL; |
1440 | |
1441 | changed |= newnary->op[i] != nary->op[i]; |
1442 | } |
1443 | } |
1444 | if (changed) |
1445 | { |
1446 | pre_expr constant; |
1447 | unsigned int new_val_id; |
1448 | |
1449 | PRE_EXPR_NARY (expr) = newnary; |
1450 | constant = fully_constant_expression (e: expr); |
1451 | PRE_EXPR_NARY (expr) = nary; |
1452 | if (constant != expr) |
1453 | { |
1454 | /* For non-CONSTANTs we have to make sure we can eventually |
1455 | insert the expression. Which means we need to have a |
1456 | leader for it. */ |
1457 | if (constant->kind != CONSTANT) |
1458 | { |
1459 | /* Do not allow simplifications to non-constants over |
1460 | backedges as this will likely result in a loop PHI node |
1461 | to be inserted and increased register pressure. |
1462 | See PR77498 - this avoids doing predcoms work in |
1463 | a less efficient way. */ |
1464 | if (e->flags & EDGE_DFS_BACK) |
1465 | ; |
1466 | else |
1467 | { |
1468 | unsigned value_id = get_expr_value_id (expr: constant); |
1469 | /* We want a leader in ANTIC_OUT or AVAIL_OUT here. |
1470 | dest has what we computed into ANTIC_OUT sofar |
1471 | so pick from that - since topological sorting |
1472 | by sorted_array_from_bitmap_set isn't perfect |
1473 | we may lose some cases here. */ |
1474 | constant = find_leader_in_sets (val: value_id, set1: dest, |
1475 | AVAIL_OUT (pred)); |
1476 | if (constant) |
1477 | { |
1478 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1479 | { |
1480 | fprintf (stream: dump_file, format: "simplifying " ); |
1481 | print_pre_expr (outfile: dump_file, expr); |
1482 | fprintf (stream: dump_file, format: " translated %d -> %d to " , |
1483 | phiblock->index, pred->index); |
1484 | PRE_EXPR_NARY (expr) = newnary; |
1485 | print_pre_expr (outfile: dump_file, expr); |
1486 | PRE_EXPR_NARY (expr) = nary; |
1487 | fprintf (stream: dump_file, format: " to " ); |
1488 | print_pre_expr (outfile: dump_file, expr: constant); |
1489 | fprintf (stream: dump_file, format: "\n" ); |
1490 | } |
1491 | return constant; |
1492 | } |
1493 | } |
1494 | } |
1495 | else |
1496 | return constant; |
1497 | } |
1498 | |
1499 | tree result = vn_nary_op_lookup_pieces (newnary->length, |
1500 | newnary->opcode, |
1501 | newnary->type, |
1502 | &newnary->op[0], |
1503 | &nary); |
1504 | if (result && is_gimple_min_invariant (result)) |
1505 | return get_or_alloc_expr_for_constant (constant: result); |
1506 | |
1507 | if (!nary || nary->predicated_values) |
1508 | new_val_id = 0; |
1509 | else |
1510 | new_val_id = nary->value_id; |
1511 | expr = get_or_alloc_expr_for_nary (nary: newnary, value_id: new_val_id, loc: expr_loc); |
1512 | add_to_value (v: get_expr_value_id (expr), e: expr); |
1513 | } |
1514 | return expr; |
1515 | } |
1516 | break; |
1517 | |
1518 | case REFERENCE: |
1519 | { |
1520 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr); |
1521 | vec<vn_reference_op_s> operands = ref->operands; |
1522 | tree vuse = ref->vuse; |
1523 | tree newvuse = vuse; |
1524 | vec<vn_reference_op_s> newoperands = vNULL; |
1525 | bool changed = false, same_valid = true; |
1526 | unsigned int i, n; |
1527 | vn_reference_op_t operand; |
1528 | vn_reference_t newref; |
1529 | |
1530 | for (i = 0; operands.iterate (ix: i, ptr: &operand); i++) |
1531 | { |
1532 | pre_expr opresult; |
1533 | pre_expr leader; |
1534 | tree op[3]; |
1535 | tree type = operand->type; |
1536 | vn_reference_op_s newop = *operand; |
1537 | op[0] = operand->op0; |
1538 | op[1] = operand->op1; |
1539 | op[2] = operand->op2; |
1540 | for (n = 0; n < 3; ++n) |
1541 | { |
1542 | unsigned int op_val_id; |
1543 | if (!op[n]) |
1544 | continue; |
1545 | if (TREE_CODE (op[n]) != SSA_NAME) |
1546 | { |
1547 | /* We can't possibly insert these. */ |
1548 | if (n != 0 |
1549 | && !is_gimple_min_invariant (op[n])) |
1550 | break; |
1551 | continue; |
1552 | } |
1553 | op_val_id = VN_INFO (op[n])->value_id; |
1554 | leader = find_leader_in_sets (val: op_val_id, set1, set2); |
1555 | opresult = phi_translate (dest, leader, set1, set2, e); |
1556 | if (opresult && opresult != leader) |
1557 | { |
1558 | tree name = get_representative_for (e: opresult); |
1559 | changed |= name != op[n]; |
1560 | op[n] = name; |
1561 | } |
1562 | else if (!opresult) |
1563 | break; |
1564 | } |
1565 | if (n != 3) |
1566 | { |
1567 | newoperands.release (); |
1568 | return NULL; |
1569 | } |
1570 | /* When we translate a MEM_REF across a backedge and we have |
1571 | restrict info that's not from our functions parameters |
1572 | we have to remap it since we now may deal with a different |
1573 | instance where the dependence info is no longer valid. |
1574 | See PR102970. Note instead of keeping a remapping table |
1575 | per backedge we simply throw away restrict info. */ |
1576 | if ((newop.opcode == MEM_REF |
1577 | || newop.opcode == TARGET_MEM_REF) |
1578 | && newop.clique > 1 |
1579 | && (e->flags & EDGE_DFS_BACK)) |
1580 | { |
1581 | newop.clique = 0; |
1582 | newop.base = 0; |
1583 | changed = true; |
1584 | } |
1585 | if (!changed) |
1586 | continue; |
1587 | if (!newoperands.exists ()) |
1588 | newoperands = operands.copy (); |
1589 | /* We may have changed from an SSA_NAME to a constant */ |
1590 | if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME) |
1591 | newop.opcode = TREE_CODE (op[0]); |
1592 | newop.type = type; |
1593 | newop.op0 = op[0]; |
1594 | newop.op1 = op[1]; |
1595 | newop.op2 = op[2]; |
1596 | newoperands[i] = newop; |
1597 | } |
1598 | gcc_checking_assert (i == operands.length ()); |
1599 | |
1600 | if (vuse) |
1601 | { |
1602 | newvuse = translate_vuse_through_block (operands: newoperands.exists () |
1603 | ? newoperands : operands, |
1604 | set: ref->set, base_set: ref->base_set, |
1605 | type: ref->type, vuse, e, |
1606 | same_valid: changed |
1607 | ? NULL : &same_valid); |
1608 | if (newvuse == NULL_TREE) |
1609 | { |
1610 | newoperands.release (); |
1611 | return NULL; |
1612 | } |
1613 | } |
1614 | |
1615 | if (changed || newvuse != vuse) |
1616 | { |
1617 | unsigned int new_val_id; |
1618 | |
1619 | tree result = vn_reference_lookup_pieces (newvuse, ref->set, |
1620 | ref->base_set, |
1621 | ref->type, |
1622 | newoperands.exists () |
1623 | ? newoperands : operands, |
1624 | &newref, VN_WALK); |
1625 | if (result) |
1626 | newoperands.release (); |
1627 | |
1628 | /* We can always insert constants, so if we have a partial |
1629 | redundant constant load of another type try to translate it |
1630 | to a constant of appropriate type. */ |
1631 | if (result && is_gimple_min_invariant (result)) |
1632 | { |
1633 | tree tem = result; |
1634 | if (!useless_type_conversion_p (ref->type, TREE_TYPE (result))) |
1635 | { |
1636 | tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result); |
1637 | if (tem && !is_gimple_min_invariant (tem)) |
1638 | tem = NULL_TREE; |
1639 | } |
1640 | if (tem) |
1641 | return get_or_alloc_expr_for_constant (constant: tem); |
1642 | } |
1643 | |
1644 | /* If we'd have to convert things we would need to validate |
1645 | if we can insert the translated expression. So fail |
1646 | here for now - we cannot insert an alias with a different |
1647 | type in the VN tables either, as that would assert. */ |
1648 | if (result |
1649 | && !useless_type_conversion_p (ref->type, TREE_TYPE (result))) |
1650 | return NULL; |
1651 | else if (!result && newref |
1652 | && !useless_type_conversion_p (ref->type, newref->type)) |
1653 | { |
1654 | newoperands.release (); |
1655 | return NULL; |
1656 | } |
1657 | |
1658 | if (newref) |
1659 | new_val_id = newref->value_id; |
1660 | else |
1661 | { |
1662 | if (changed || !same_valid) |
1663 | new_val_id = get_next_value_id (); |
1664 | else |
1665 | new_val_id = ref->value_id; |
1666 | if (!newoperands.exists ()) |
1667 | newoperands = operands.copy (); |
1668 | newref = vn_reference_insert_pieces (newvuse, ref->set, |
1669 | ref->base_set, ref->type, |
1670 | newoperands, |
1671 | result, new_val_id); |
1672 | newoperands = vNULL; |
1673 | } |
1674 | expr = get_or_alloc_expr_for_reference (reference: newref, loc: expr_loc); |
1675 | add_to_value (v: new_val_id, e: expr); |
1676 | } |
1677 | newoperands.release (); |
1678 | return expr; |
1679 | } |
1680 | break; |
1681 | |
1682 | case NAME: |
1683 | { |
1684 | tree name = PRE_EXPR_NAME (expr); |
1685 | gimple *def_stmt = SSA_NAME_DEF_STMT (name); |
1686 | /* If the SSA name is defined by a PHI node in this block, |
1687 | translate it. */ |
1688 | if (gimple_code (g: def_stmt) == GIMPLE_PHI |
1689 | && gimple_bb (g: def_stmt) == phiblock) |
1690 | { |
1691 | tree def = PHI_ARG_DEF (def_stmt, e->dest_idx); |
1692 | |
1693 | /* Handle constant. */ |
1694 | if (is_gimple_min_invariant (def)) |
1695 | return get_or_alloc_expr_for_constant (constant: def); |
1696 | |
1697 | return get_or_alloc_expr_for_name (name: def); |
1698 | } |
1699 | /* Otherwise return it unchanged - it will get removed if its |
1700 | value is not available in PREDs AVAIL_OUT set of expressions |
1701 | by the subtraction of TMP_GEN. */ |
1702 | return expr; |
1703 | } |
1704 | |
1705 | default: |
1706 | gcc_unreachable (); |
1707 | } |
1708 | } |
1709 | |
1710 | /* Wrapper around phi_translate_1 providing caching functionality. */ |
1711 | |
1712 | static pre_expr |
1713 | phi_translate (bitmap_set_t dest, pre_expr expr, |
1714 | bitmap_set_t set1, bitmap_set_t set2, edge e) |
1715 | { |
1716 | expr_pred_trans_t slot = NULL; |
1717 | pre_expr phitrans; |
1718 | |
1719 | if (!expr) |
1720 | return NULL; |
1721 | |
1722 | /* Constants contain no values that need translation. */ |
1723 | if (expr->kind == CONSTANT) |
1724 | return expr; |
1725 | |
1726 | if (value_id_constant_p (v: get_expr_value_id (expr))) |
1727 | return expr; |
1728 | |
1729 | /* Don't add translations of NAMEs as those are cheap to translate. */ |
1730 | if (expr->kind != NAME) |
1731 | { |
1732 | if (phi_trans_add (entry: &slot, e: expr, pred: e->src)) |
1733 | return slot->v == 0 ? NULL : expression_for_id (id: slot->v); |
1734 | /* Store NULL for the value we want to return in the case of |
1735 | recursing. */ |
1736 | slot->v = 0; |
1737 | } |
1738 | |
1739 | /* Translate. */ |
1740 | basic_block saved_valueize_bb = vn_context_bb; |
1741 | vn_context_bb = e->src; |
1742 | phitrans = phi_translate_1 (dest, expr, set1, set2, e); |
1743 | vn_context_bb = saved_valueize_bb; |
1744 | |
1745 | if (slot) |
1746 | { |
1747 | /* We may have reallocated. */ |
1748 | phi_trans_add (entry: &slot, e: expr, pred: e->src); |
1749 | if (phitrans) |
1750 | slot->v = get_expression_id (expr: phitrans); |
1751 | else |
1752 | /* Remove failed translations again, they cause insert |
1753 | iteration to not pick up new opportunities reliably. */ |
1754 | PHI_TRANS_TABLE (e->src)->clear_slot (slot); |
1755 | } |
1756 | |
1757 | return phitrans; |
1758 | } |
1759 | |
1760 | |
1761 | /* For each expression in SET, translate the values through phi nodes |
1762 | in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting |
1763 | expressions in DEST. */ |
1764 | |
1765 | static void |
1766 | phi_translate_set (bitmap_set_t dest, bitmap_set_t set, edge e) |
1767 | { |
1768 | bitmap_iterator bi; |
1769 | unsigned int i; |
1770 | |
1771 | if (gimple_seq_empty_p (s: phi_nodes (bb: e->dest))) |
1772 | { |
1773 | bitmap_set_copy (dest, orig: set); |
1774 | return; |
1775 | } |
1776 | |
1777 | /* Allocate the phi-translation cache where we have an idea about |
1778 | its size. hash-table implementation internals tell us that |
1779 | allocating the table to fit twice the number of elements will |
1780 | make sure we do not usually re-allocate. */ |
1781 | if (!PHI_TRANS_TABLE (e->src)) |
1782 | PHI_TRANS_TABLE (e->src) = new hash_table<expr_pred_trans_d> |
1783 | (2 * bitmap_count_bits (&set->expressions)); |
1784 | FOR_EACH_EXPR_ID_IN_SET (set, i, bi) |
1785 | { |
1786 | pre_expr expr = expression_for_id (id: i); |
1787 | pre_expr translated = phi_translate (dest, expr, set1: set, NULL, e); |
1788 | if (!translated) |
1789 | continue; |
1790 | |
1791 | bitmap_insert_into_set (set: dest, expr: translated); |
1792 | } |
1793 | } |
1794 | |
1795 | /* Find the leader for a value (i.e., the name representing that |
1796 | value) in a given set, and return it. Return NULL if no leader |
1797 | is found. */ |
1798 | |
1799 | static pre_expr |
1800 | bitmap_find_leader (bitmap_set_t set, unsigned int val) |
1801 | { |
1802 | if (value_id_constant_p (v: val)) |
1803 | return constant_value_expressions[-val]; |
1804 | |
1805 | if (bitmap_set_contains_value (set, value_id: val)) |
1806 | { |
1807 | /* Rather than walk the entire bitmap of expressions, and see |
1808 | whether any of them has the value we are looking for, we look |
1809 | at the reverse mapping, which tells us the set of expressions |
1810 | that have a given value (IE value->expressions with that |
1811 | value) and see if any of those expressions are in our set. |
1812 | The number of expressions per value is usually significantly |
1813 | less than the number of expressions in the set. In fact, for |
1814 | large testcases, doing it this way is roughly 5-10x faster |
1815 | than walking the bitmap. |
1816 | If this is somehow a significant lose for some cases, we can |
1817 | choose which set to walk based on which set is smaller. */ |
1818 | unsigned int i; |
1819 | bitmap_iterator bi; |
1820 | bitmap exprset = value_expressions[val]; |
1821 | |
1822 | if (!exprset->first->next) |
1823 | EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi) |
1824 | if (bitmap_bit_p (&set->expressions, i)) |
1825 | return expression_for_id (id: i); |
1826 | |
1827 | EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi) |
1828 | return expression_for_id (id: i); |
1829 | } |
1830 | return NULL; |
1831 | } |
1832 | |
1833 | /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of |
1834 | BLOCK by seeing if it is not killed in the block. Note that we are |
1835 | only determining whether there is a store that kills it. Because |
1836 | of the order in which clean iterates over values, we are guaranteed |
1837 | that altered operands will have caused us to be eliminated from the |
1838 | ANTIC_IN set already. */ |
1839 | |
1840 | static bool |
1841 | value_dies_in_block_x (pre_expr expr, basic_block block) |
1842 | { |
1843 | tree vuse = PRE_EXPR_REFERENCE (expr)->vuse; |
1844 | vn_reference_t refx = PRE_EXPR_REFERENCE (expr); |
1845 | gimple *def; |
1846 | gimple_stmt_iterator gsi; |
1847 | unsigned id = get_expression_id (expr); |
1848 | bool res = false; |
1849 | ao_ref ref; |
1850 | |
1851 | if (!vuse) |
1852 | return false; |
1853 | |
1854 | /* Lookup a previously calculated result. */ |
1855 | if (EXPR_DIES (block) |
1856 | && bitmap_bit_p (EXPR_DIES (block), id * 2)) |
1857 | return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1); |
1858 | |
1859 | /* A memory expression {e, VUSE} dies in the block if there is a |
1860 | statement that may clobber e. If, starting statement walk from the |
1861 | top of the basic block, a statement uses VUSE there can be no kill |
1862 | inbetween that use and the original statement that loaded {e, VUSE}, |
1863 | so we can stop walking. */ |
1864 | ref.base = NULL_TREE; |
1865 | for (gsi = gsi_start_bb (bb: block); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
1866 | { |
1867 | tree def_vuse, def_vdef; |
1868 | def = gsi_stmt (i: gsi); |
1869 | def_vuse = gimple_vuse (g: def); |
1870 | def_vdef = gimple_vdef (g: def); |
1871 | |
1872 | /* Not a memory statement. */ |
1873 | if (!def_vuse) |
1874 | continue; |
1875 | |
1876 | /* Not a may-def. */ |
1877 | if (!def_vdef) |
1878 | { |
1879 | /* A load with the same VUSE, we're done. */ |
1880 | if (def_vuse == vuse) |
1881 | break; |
1882 | |
1883 | continue; |
1884 | } |
1885 | |
1886 | /* Init ref only if we really need it. */ |
1887 | if (ref.base == NULL_TREE |
1888 | && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->base_set, |
1889 | refx->type, refx->operands)) |
1890 | { |
1891 | res = true; |
1892 | break; |
1893 | } |
1894 | /* If the statement may clobber expr, it dies. */ |
1895 | if (stmt_may_clobber_ref_p_1 (def, &ref)) |
1896 | { |
1897 | res = true; |
1898 | break; |
1899 | } |
1900 | } |
1901 | |
1902 | /* Remember the result. */ |
1903 | if (!EXPR_DIES (block)) |
1904 | EXPR_DIES (block) = BITMAP_ALLOC (obstack: &grand_bitmap_obstack); |
1905 | bitmap_set_bit (EXPR_DIES (block), id * 2); |
1906 | if (res) |
1907 | bitmap_set_bit (EXPR_DIES (block), id * 2 + 1); |
1908 | |
1909 | return res; |
1910 | } |
1911 | |
1912 | |
1913 | /* Determine if OP is valid in SET1 U SET2, which it is when the union |
1914 | contains its value-id. */ |
1915 | |
1916 | static bool |
1917 | op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op) |
1918 | { |
1919 | if (op && TREE_CODE (op) == SSA_NAME) |
1920 | { |
1921 | unsigned int value_id = VN_INFO (op)->value_id; |
1922 | if (!(bitmap_set_contains_value (set: set1, value_id) |
1923 | || (set2 && bitmap_set_contains_value (set: set2, value_id)))) |
1924 | return false; |
1925 | } |
1926 | return true; |
1927 | } |
1928 | |
1929 | /* Determine if the expression EXPR is valid in SET1 U SET2. |
1930 | ONLY SET2 CAN BE NULL. |
1931 | This means that we have a leader for each part of the expression |
1932 | (if it consists of values), or the expression is an SSA_NAME. |
1933 | For loads/calls, we also see if the vuse is killed in this block. */ |
1934 | |
1935 | static bool |
1936 | valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr) |
1937 | { |
1938 | switch (expr->kind) |
1939 | { |
1940 | case NAME: |
1941 | /* By construction all NAMEs are available. Non-available |
1942 | NAMEs are removed by subtracting TMP_GEN from the sets. */ |
1943 | return true; |
1944 | case NARY: |
1945 | { |
1946 | unsigned int i; |
1947 | vn_nary_op_t nary = PRE_EXPR_NARY (expr); |
1948 | for (i = 0; i < nary->length; i++) |
1949 | if (!op_valid_in_sets (set1, set2, op: nary->op[i])) |
1950 | return false; |
1951 | return true; |
1952 | } |
1953 | break; |
1954 | case REFERENCE: |
1955 | { |
1956 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr); |
1957 | vn_reference_op_t vro; |
1958 | unsigned int i; |
1959 | |
1960 | FOR_EACH_VEC_ELT (ref->operands, i, vro) |
1961 | { |
1962 | if (!op_valid_in_sets (set1, set2, op: vro->op0) |
1963 | || !op_valid_in_sets (set1, set2, op: vro->op1) |
1964 | || !op_valid_in_sets (set1, set2, op: vro->op2)) |
1965 | return false; |
1966 | } |
1967 | return true; |
1968 | } |
1969 | default: |
1970 | gcc_unreachable (); |
1971 | } |
1972 | } |
1973 | |
1974 | /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2. |
1975 | This means expressions that are made up of values we have no leaders for |
1976 | in SET1 or SET2. */ |
1977 | |
1978 | static void |
1979 | clean (bitmap_set_t set1, bitmap_set_t set2 = NULL) |
1980 | { |
1981 | vec<pre_expr> exprs = sorted_array_from_bitmap_set (set: set1); |
1982 | pre_expr expr; |
1983 | int i; |
1984 | |
1985 | FOR_EACH_VEC_ELT (exprs, i, expr) |
1986 | { |
1987 | if (!valid_in_sets (set1, set2, expr)) |
1988 | { |
1989 | unsigned int val = get_expr_value_id (expr); |
1990 | bitmap_clear_bit (&set1->expressions, get_expression_id (expr)); |
1991 | /* We are entered with possibly multiple expressions for a value |
1992 | so before removing a value from the set see if there's an |
1993 | expression for it left. */ |
1994 | if (! bitmap_find_leader (set: set1, val)) |
1995 | bitmap_clear_bit (&set1->values, val); |
1996 | } |
1997 | } |
1998 | exprs.release (); |
1999 | |
2000 | if (flag_checking) |
2001 | { |
2002 | unsigned j; |
2003 | bitmap_iterator bi; |
2004 | FOR_EACH_EXPR_ID_IN_SET (set1, j, bi) |
2005 | gcc_assert (valid_in_sets (set1, set2, expression_for_id (j))); |
2006 | } |
2007 | } |
2008 | |
2009 | /* Clean the set of expressions that are no longer valid in SET because |
2010 | they are clobbered in BLOCK or because they trap and may not be executed. */ |
2011 | |
2012 | static void |
2013 | prune_clobbered_mems (bitmap_set_t set, basic_block block) |
2014 | { |
2015 | bitmap_iterator bi; |
2016 | unsigned i; |
2017 | unsigned to_remove = -1U; |
2018 | bool any_removed = false; |
2019 | |
2020 | FOR_EACH_EXPR_ID_IN_SET (set, i, bi) |
2021 | { |
2022 | /* Remove queued expr. */ |
2023 | if (to_remove != -1U) |
2024 | { |
2025 | bitmap_clear_bit (&set->expressions, to_remove); |
2026 | any_removed = true; |
2027 | to_remove = -1U; |
2028 | } |
2029 | |
2030 | pre_expr expr = expression_for_id (id: i); |
2031 | if (expr->kind == REFERENCE) |
2032 | { |
2033 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr); |
2034 | if (ref->vuse) |
2035 | { |
2036 | gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse); |
2037 | if (!gimple_nop_p (g: def_stmt) |
2038 | /* If value-numbering provided a memory state for this |
2039 | that dominates BLOCK we're done, otherwise we have |
2040 | to check if the value dies in BLOCK. */ |
2041 | && !(gimple_bb (g: def_stmt) != block |
2042 | && dominated_by_p (CDI_DOMINATORS, |
2043 | block, gimple_bb (g: def_stmt))) |
2044 | && value_dies_in_block_x (expr, block)) |
2045 | to_remove = i; |
2046 | } |
2047 | /* If the REFERENCE may trap make sure the block does not contain |
2048 | a possible exit point. |
2049 | ??? This is overly conservative if we translate AVAIL_OUT |
2050 | as the available expression might be after the exit point. */ |
2051 | if (BB_MAY_NOTRETURN (block) |
2052 | && vn_reference_may_trap (ref)) |
2053 | to_remove = i; |
2054 | } |
2055 | else if (expr->kind == NARY) |
2056 | { |
2057 | vn_nary_op_t nary = PRE_EXPR_NARY (expr); |
2058 | /* If the NARY may trap make sure the block does not contain |
2059 | a possible exit point. |
2060 | ??? This is overly conservative if we translate AVAIL_OUT |
2061 | as the available expression might be after the exit point. */ |
2062 | if (BB_MAY_NOTRETURN (block) |
2063 | && vn_nary_may_trap (nary)) |
2064 | to_remove = i; |
2065 | } |
2066 | } |
2067 | |
2068 | /* Remove queued expr. */ |
2069 | if (to_remove != -1U) |
2070 | { |
2071 | bitmap_clear_bit (&set->expressions, to_remove); |
2072 | any_removed = true; |
2073 | } |
2074 | |
2075 | /* Above we only removed expressions, now clean the set of values |
2076 | which no longer have any corresponding expression. We cannot |
2077 | clear the value at the time we remove an expression since there |
2078 | may be multiple expressions per value. |
2079 | If we'd queue possibly to be removed values we could use |
2080 | the bitmap_find_leader way to see if there's still an expression |
2081 | for it. For some ratio of to be removed values and number of |
2082 | values/expressions in the set this might be faster than rebuilding |
2083 | the value-set. */ |
2084 | if (any_removed) |
2085 | { |
2086 | bitmap_clear (&set->values); |
2087 | FOR_EACH_EXPR_ID_IN_SET (set, i, bi) |
2088 | { |
2089 | pre_expr expr = expression_for_id (id: i); |
2090 | unsigned int value_id = get_expr_value_id (expr); |
2091 | bitmap_set_bit (&set->values, value_id); |
2092 | } |
2093 | } |
2094 | } |
2095 | |
2096 | /* Compute the ANTIC set for BLOCK. |
2097 | |
2098 | If succs(BLOCK) > 1 then |
2099 | ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK) |
2100 | else if succs(BLOCK) == 1 then |
2101 | ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)]) |
2102 | |
2103 | ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK]) |
2104 | |
2105 | Note that clean() is deferred until after the iteration. */ |
2106 | |
2107 | static bool |
2108 | compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge) |
2109 | { |
2110 | bitmap_set_t S, old, ANTIC_OUT; |
2111 | edge e; |
2112 | edge_iterator ei; |
2113 | |
2114 | bool was_visited = BB_VISITED (block); |
2115 | bool changed = ! BB_VISITED (block); |
2116 | BB_VISITED (block) = 1; |
2117 | old = ANTIC_OUT = S = NULL; |
2118 | |
2119 | /* If any edges from predecessors are abnormal, antic_in is empty, |
2120 | so do nothing. */ |
2121 | if (block_has_abnormal_pred_edge) |
2122 | goto maybe_dump_sets; |
2123 | |
2124 | old = ANTIC_IN (block); |
2125 | ANTIC_OUT = bitmap_set_new (); |
2126 | |
2127 | /* If the block has no successors, ANTIC_OUT is empty. */ |
2128 | if (EDGE_COUNT (block->succs) == 0) |
2129 | ; |
2130 | /* If we have one successor, we could have some phi nodes to |
2131 | translate through. */ |
2132 | else if (single_succ_p (bb: block)) |
2133 | { |
2134 | e = single_succ_edge (bb: block); |
2135 | gcc_assert (BB_VISITED (e->dest)); |
2136 | phi_translate_set (dest: ANTIC_OUT, ANTIC_IN (e->dest), e); |
2137 | } |
2138 | /* If we have multiple successors, we take the intersection of all of |
2139 | them. Note that in the case of loop exit phi nodes, we may have |
2140 | phis to translate through. */ |
2141 | else |
2142 | { |
2143 | size_t i; |
2144 | edge first = NULL; |
2145 | |
2146 | auto_vec<edge> worklist (EDGE_COUNT (block->succs)); |
2147 | FOR_EACH_EDGE (e, ei, block->succs) |
2148 | { |
2149 | if (!first |
2150 | && BB_VISITED (e->dest)) |
2151 | first = e; |
2152 | else if (BB_VISITED (e->dest)) |
2153 | worklist.quick_push (obj: e); |
2154 | else |
2155 | { |
2156 | /* Unvisited successors get their ANTIC_IN replaced by the |
2157 | maximal set to arrive at a maximum ANTIC_IN solution. |
2158 | We can ignore them in the intersection operation and thus |
2159 | need not explicitely represent that maximum solution. */ |
2160 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2161 | fprintf (stream: dump_file, format: "ANTIC_IN is MAX on %d->%d\n" , |
2162 | e->src->index, e->dest->index); |
2163 | } |
2164 | } |
2165 | |
2166 | /* Of multiple successors we have to have visited one already |
2167 | which is guaranteed by iteration order. */ |
2168 | gcc_assert (first != NULL); |
2169 | |
2170 | phi_translate_set (dest: ANTIC_OUT, ANTIC_IN (first->dest), e: first); |
2171 | |
2172 | /* If we have multiple successors we need to intersect the ANTIC_OUT |
2173 | sets. For values that's a simple intersection but for |
2174 | expressions it is a union. Given we want to have a single |
2175 | expression per value in our sets we have to canonicalize. |
2176 | Avoid randomness and running into cycles like for PR82129 and |
2177 | canonicalize the expression we choose to the one with the |
2178 | lowest id. This requires we actually compute the union first. */ |
2179 | FOR_EACH_VEC_ELT (worklist, i, e) |
2180 | { |
2181 | if (!gimple_seq_empty_p (s: phi_nodes (bb: e->dest))) |
2182 | { |
2183 | bitmap_set_t tmp = bitmap_set_new (); |
2184 | phi_translate_set (dest: tmp, ANTIC_IN (e->dest), e); |
2185 | bitmap_and_into (&ANTIC_OUT->values, &tmp->values); |
2186 | bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions); |
2187 | bitmap_set_free (set: tmp); |
2188 | } |
2189 | else |
2190 | { |
2191 | bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (e->dest)->values); |
2192 | bitmap_ior_into (&ANTIC_OUT->expressions, |
2193 | &ANTIC_IN (e->dest)->expressions); |
2194 | } |
2195 | } |
2196 | if (! worklist.is_empty ()) |
2197 | { |
2198 | /* Prune expressions not in the value set. */ |
2199 | bitmap_iterator bi; |
2200 | unsigned int i; |
2201 | unsigned int to_clear = -1U; |
2202 | FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi) |
2203 | { |
2204 | if (to_clear != -1U) |
2205 | { |
2206 | bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear); |
2207 | to_clear = -1U; |
2208 | } |
2209 | pre_expr expr = expression_for_id (id: i); |
2210 | unsigned int value_id = get_expr_value_id (expr); |
2211 | if (!bitmap_bit_p (&ANTIC_OUT->values, value_id)) |
2212 | to_clear = i; |
2213 | } |
2214 | if (to_clear != -1U) |
2215 | bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear); |
2216 | } |
2217 | } |
2218 | |
2219 | /* Dump ANTIC_OUT before it's pruned. */ |
2220 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2221 | print_bitmap_set (outfile: dump_file, set: ANTIC_OUT, setname: "ANTIC_OUT" , blockindex: block->index); |
2222 | |
2223 | /* Prune expressions that are clobbered in block and thus become |
2224 | invalid if translated from ANTIC_OUT to ANTIC_IN. */ |
2225 | prune_clobbered_mems (set: ANTIC_OUT, block); |
2226 | |
2227 | /* Generate ANTIC_OUT - TMP_GEN. */ |
2228 | S = bitmap_set_subtract_expressions (dest: ANTIC_OUT, TMP_GEN (block)); |
2229 | |
2230 | /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */ |
2231 | ANTIC_IN (block) = bitmap_set_subtract_expressions (EXP_GEN (block), |
2232 | TMP_GEN (block)); |
2233 | |
2234 | /* Then union in the ANTIC_OUT - TMP_GEN values, |
2235 | to get ANTIC_OUT U EXP_GEN - TMP_GEN */ |
2236 | bitmap_ior_into (&ANTIC_IN (block)->values, &S->values); |
2237 | bitmap_ior_into (&ANTIC_IN (block)->expressions, &S->expressions); |
2238 | |
2239 | /* clean (ANTIC_IN (block)) is defered to after the iteration converged |
2240 | because it can cause non-convergence, see for example PR81181. */ |
2241 | |
2242 | /* Intersect ANTIC_IN with the old ANTIC_IN. This is required until |
2243 | we properly represent the maximum expression set, thus not prune |
2244 | values without expressions during the iteration. */ |
2245 | if (was_visited |
2246 | && bitmap_and_into (&ANTIC_IN (block)->values, &old->values)) |
2247 | { |
2248 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2249 | fprintf (stream: dump_file, format: "warning: intersecting with old ANTIC_IN " |
2250 | "shrinks the set\n" ); |
2251 | /* Prune expressions not in the value set. */ |
2252 | bitmap_iterator bi; |
2253 | unsigned int i; |
2254 | unsigned int to_clear = -1U; |
2255 | FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (block), i, bi) |
2256 | { |
2257 | if (to_clear != -1U) |
2258 | { |
2259 | bitmap_clear_bit (&ANTIC_IN (block)->expressions, to_clear); |
2260 | to_clear = -1U; |
2261 | } |
2262 | pre_expr expr = expression_for_id (id: i); |
2263 | unsigned int value_id = get_expr_value_id (expr); |
2264 | if (!bitmap_bit_p (&ANTIC_IN (block)->values, value_id)) |
2265 | to_clear = i; |
2266 | } |
2267 | if (to_clear != -1U) |
2268 | bitmap_clear_bit (&ANTIC_IN (block)->expressions, to_clear); |
2269 | } |
2270 | |
2271 | if (!bitmap_set_equal (a: old, ANTIC_IN (block))) |
2272 | changed = true; |
2273 | |
2274 | maybe_dump_sets: |
2275 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2276 | { |
2277 | if (changed) |
2278 | fprintf (stream: dump_file, format: "[changed] " ); |
2279 | print_bitmap_set (outfile: dump_file, ANTIC_IN (block), setname: "ANTIC_IN" , |
2280 | blockindex: block->index); |
2281 | |
2282 | if (S) |
2283 | print_bitmap_set (outfile: dump_file, set: S, setname: "S" , blockindex: block->index); |
2284 | } |
2285 | if (old) |
2286 | bitmap_set_free (set: old); |
2287 | if (S) |
2288 | bitmap_set_free (set: S); |
2289 | if (ANTIC_OUT) |
2290 | bitmap_set_free (set: ANTIC_OUT); |
2291 | return changed; |
2292 | } |
2293 | |
2294 | /* Compute PARTIAL_ANTIC for BLOCK. |
2295 | |
2296 | If succs(BLOCK) > 1 then |
2297 | PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not |
2298 | in ANTIC_OUT for all succ(BLOCK) |
2299 | else if succs(BLOCK) == 1 then |
2300 | PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)]) |
2301 | |
2302 | PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK]) |
2303 | |
2304 | */ |
2305 | static void |
2306 | compute_partial_antic_aux (basic_block block, |
2307 | bool block_has_abnormal_pred_edge) |
2308 | { |
2309 | bitmap_set_t old_PA_IN; |
2310 | bitmap_set_t PA_OUT; |
2311 | edge e; |
2312 | edge_iterator ei; |
2313 | unsigned long max_pa = param_max_partial_antic_length; |
2314 | |
2315 | old_PA_IN = PA_OUT = NULL; |
2316 | |
2317 | /* If any edges from predecessors are abnormal, antic_in is empty, |
2318 | so do nothing. */ |
2319 | if (block_has_abnormal_pred_edge) |
2320 | goto maybe_dump_sets; |
2321 | |
2322 | /* If there are too many partially anticipatable values in the |
2323 | block, phi_translate_set can take an exponential time: stop |
2324 | before the translation starts. */ |
2325 | if (max_pa |
2326 | && single_succ_p (bb: block) |
2327 | && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa) |
2328 | goto maybe_dump_sets; |
2329 | |
2330 | old_PA_IN = PA_IN (block); |
2331 | PA_OUT = bitmap_set_new (); |
2332 | |
2333 | /* If the block has no successors, ANTIC_OUT is empty. */ |
2334 | if (EDGE_COUNT (block->succs) == 0) |
2335 | ; |
2336 | /* If we have one successor, we could have some phi nodes to |
2337 | translate through. Note that we can't phi translate across DFS |
2338 | back edges in partial antic, because it uses a union operation on |
2339 | the successors. For recurrences like IV's, we will end up |
2340 | generating a new value in the set on each go around (i + 3 (VH.1) |
2341 | VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */ |
2342 | else if (single_succ_p (bb: block)) |
2343 | { |
2344 | e = single_succ_edge (bb: block); |
2345 | if (!(e->flags & EDGE_DFS_BACK)) |
2346 | phi_translate_set (dest: PA_OUT, PA_IN (e->dest), e); |
2347 | } |
2348 | /* If we have multiple successors, we take the union of all of |
2349 | them. */ |
2350 | else |
2351 | { |
2352 | size_t i; |
2353 | |
2354 | auto_vec<edge> worklist (EDGE_COUNT (block->succs)); |
2355 | FOR_EACH_EDGE (e, ei, block->succs) |
2356 | { |
2357 | if (e->flags & EDGE_DFS_BACK) |
2358 | continue; |
2359 | worklist.quick_push (obj: e); |
2360 | } |
2361 | if (worklist.length () > 0) |
2362 | { |
2363 | FOR_EACH_VEC_ELT (worklist, i, e) |
2364 | { |
2365 | unsigned int i; |
2366 | bitmap_iterator bi; |
2367 | |
2368 | if (!gimple_seq_empty_p (s: phi_nodes (bb: e->dest))) |
2369 | { |
2370 | bitmap_set_t antic_in = bitmap_set_new (); |
2371 | phi_translate_set (dest: antic_in, ANTIC_IN (e->dest), e); |
2372 | FOR_EACH_EXPR_ID_IN_SET (antic_in, i, bi) |
2373 | bitmap_value_insert_into_set (set: PA_OUT, |
2374 | expr: expression_for_id (id: i)); |
2375 | bitmap_set_free (set: antic_in); |
2376 | bitmap_set_t pa_in = bitmap_set_new (); |
2377 | phi_translate_set (dest: pa_in, PA_IN (e->dest), e); |
2378 | FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi) |
2379 | bitmap_value_insert_into_set (set: PA_OUT, |
2380 | expr: expression_for_id (id: i)); |
2381 | bitmap_set_free (set: pa_in); |
2382 | } |
2383 | else |
2384 | { |
2385 | FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (e->dest), i, bi) |
2386 | bitmap_value_insert_into_set (set: PA_OUT, |
2387 | expr: expression_for_id (id: i)); |
2388 | FOR_EACH_EXPR_ID_IN_SET (PA_IN (e->dest), i, bi) |
2389 | bitmap_value_insert_into_set (set: PA_OUT, |
2390 | expr: expression_for_id (id: i)); |
2391 | } |
2392 | } |
2393 | } |
2394 | } |
2395 | |
2396 | /* Prune expressions that are clobbered in block and thus become |
2397 | invalid if translated from PA_OUT to PA_IN. */ |
2398 | prune_clobbered_mems (set: PA_OUT, block); |
2399 | |
2400 | /* PA_IN starts with PA_OUT - TMP_GEN. |
2401 | Then we subtract things from ANTIC_IN. */ |
2402 | PA_IN (block) = bitmap_set_subtract_expressions (dest: PA_OUT, TMP_GEN (block)); |
2403 | |
2404 | /* For partial antic, we want to put back in the phi results, since |
2405 | we will properly avoid making them partially antic over backedges. */ |
2406 | bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values); |
2407 | bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions); |
2408 | |
2409 | /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */ |
2410 | bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block)); |
2411 | |
2412 | clean (PA_IN (block), ANTIC_IN (block)); |
2413 | |
2414 | maybe_dump_sets: |
2415 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2416 | { |
2417 | if (PA_OUT) |
2418 | print_bitmap_set (outfile: dump_file, set: PA_OUT, setname: "PA_OUT" , blockindex: block->index); |
2419 | |
2420 | print_bitmap_set (outfile: dump_file, PA_IN (block), setname: "PA_IN" , blockindex: block->index); |
2421 | } |
2422 | if (old_PA_IN) |
2423 | bitmap_set_free (set: old_PA_IN); |
2424 | if (PA_OUT) |
2425 | bitmap_set_free (set: PA_OUT); |
2426 | } |
2427 | |
2428 | /* Compute ANTIC and partial ANTIC sets. */ |
2429 | |
2430 | static void |
2431 | compute_antic (void) |
2432 | { |
2433 | bool changed = true; |
2434 | int num_iterations = 0; |
2435 | basic_block block; |
2436 | int i; |
2437 | edge_iterator ei; |
2438 | edge e; |
2439 | |
2440 | /* If any predecessor edges are abnormal, we punt, so antic_in is empty. |
2441 | We pre-build the map of blocks with incoming abnormal edges here. */ |
2442 | auto_sbitmap has_abnormal_preds (last_basic_block_for_fn (cfun)); |
2443 | bitmap_clear (has_abnormal_preds); |
2444 | |
2445 | FOR_ALL_BB_FN (block, cfun) |
2446 | { |
2447 | BB_VISITED (block) = 0; |
2448 | |
2449 | FOR_EACH_EDGE (e, ei, block->preds) |
2450 | if (e->flags & EDGE_ABNORMAL) |
2451 | { |
2452 | bitmap_set_bit (map: has_abnormal_preds, bitno: block->index); |
2453 | break; |
2454 | } |
2455 | |
2456 | /* While we are here, give empty ANTIC_IN sets to each block. */ |
2457 | ANTIC_IN (block) = bitmap_set_new (); |
2458 | if (do_partial_partial) |
2459 | PA_IN (block) = bitmap_set_new (); |
2460 | } |
2461 | |
2462 | /* At the exit block we anticipate nothing. */ |
2463 | BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1; |
2464 | |
2465 | /* For ANTIC computation we need a postorder that also guarantees that |
2466 | a block with a single successor is visited after its successor. |
2467 | RPO on the inverted CFG has this property. */ |
2468 | int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun)); |
2469 | int n = inverted_rev_post_order_compute (cfun, rpo); |
2470 | |
2471 | auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1); |
2472 | bitmap_clear (worklist); |
2473 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
2474 | bitmap_set_bit (map: worklist, bitno: e->src->index); |
2475 | while (changed) |
2476 | { |
2477 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2478 | fprintf (stream: dump_file, format: "Starting iteration %d\n" , num_iterations); |
2479 | /* ??? We need to clear our PHI translation cache here as the |
2480 | ANTIC sets shrink and we restrict valid translations to |
2481 | those having operands with leaders in ANTIC. Same below |
2482 | for PA ANTIC computation. */ |
2483 | num_iterations++; |
2484 | changed = false; |
2485 | for (i = 0; i < n; ++i) |
2486 | { |
2487 | if (bitmap_bit_p (map: worklist, bitno: rpo[i])) |
2488 | { |
2489 | basic_block block = BASIC_BLOCK_FOR_FN (cfun, rpo[i]); |
2490 | bitmap_clear_bit (map: worklist, bitno: block->index); |
2491 | if (compute_antic_aux (block, |
2492 | block_has_abnormal_pred_edge: bitmap_bit_p (map: has_abnormal_preds, |
2493 | bitno: block->index))) |
2494 | { |
2495 | FOR_EACH_EDGE (e, ei, block->preds) |
2496 | bitmap_set_bit (map: worklist, bitno: e->src->index); |
2497 | changed = true; |
2498 | } |
2499 | } |
2500 | } |
2501 | /* Theoretically possible, but *highly* unlikely. */ |
2502 | gcc_checking_assert (num_iterations < 500); |
2503 | } |
2504 | |
2505 | /* We have to clean after the dataflow problem converged as cleaning |
2506 | can cause non-convergence because it is based on expressions |
2507 | rather than values. */ |
2508 | FOR_EACH_BB_FN (block, cfun) |
2509 | clean (ANTIC_IN (block)); |
2510 | |
2511 | statistics_histogram_event (cfun, "compute_antic iterations" , |
2512 | num_iterations); |
2513 | |
2514 | if (do_partial_partial) |
2515 | { |
2516 | /* For partial antic we ignore backedges and thus we do not need |
2517 | to perform any iteration when we process blocks in rpo. */ |
2518 | for (i = 0; i < n; ++i) |
2519 | { |
2520 | basic_block block = BASIC_BLOCK_FOR_FN (cfun, rpo[i]); |
2521 | compute_partial_antic_aux (block, |
2522 | block_has_abnormal_pred_edge: bitmap_bit_p (map: has_abnormal_preds, |
2523 | bitno: block->index)); |
2524 | } |
2525 | } |
2526 | |
2527 | free (ptr: rpo); |
2528 | } |
2529 | |
2530 | |
2531 | /* Inserted expressions are placed onto this worklist, which is used |
2532 | for performing quick dead code elimination of insertions we made |
2533 | that didn't turn out to be necessary. */ |
2534 | static bitmap inserted_exprs; |
2535 | |
2536 | /* The actual worker for create_component_ref_by_pieces. */ |
2537 | |
2538 | static tree |
2539 | create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref, |
2540 | unsigned int *operand, gimple_seq *stmts) |
2541 | { |
2542 | vn_reference_op_t currop = &ref->operands[*operand]; |
2543 | tree genop; |
2544 | ++*operand; |
2545 | switch (currop->opcode) |
2546 | { |
2547 | case CALL_EXPR: |
2548 | gcc_unreachable (); |
2549 | |
2550 | case MEM_REF: |
2551 | { |
2552 | tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, |
2553 | stmts); |
2554 | if (!baseop) |
2555 | return NULL_TREE; |
2556 | tree offset = currop->op0; |
2557 | if (TREE_CODE (baseop) == ADDR_EXPR |
2558 | && handled_component_p (TREE_OPERAND (baseop, 0))) |
2559 | { |
2560 | poly_int64 off; |
2561 | tree base; |
2562 | base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0), |
2563 | &off); |
2564 | gcc_assert (base); |
2565 | offset = int_const_binop (PLUS_EXPR, offset, |
2566 | build_int_cst (TREE_TYPE (offset), |
2567 | off)); |
2568 | baseop = build_fold_addr_expr (base); |
2569 | } |
2570 | genop = build2 (MEM_REF, currop->type, baseop, offset); |
2571 | MR_DEPENDENCE_CLIQUE (genop) = currop->clique; |
2572 | MR_DEPENDENCE_BASE (genop) = currop->base; |
2573 | REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse; |
2574 | return genop; |
2575 | } |
2576 | |
2577 | case TARGET_MEM_REF: |
2578 | { |
2579 | tree genop0 = NULL_TREE, genop1 = NULL_TREE; |
2580 | vn_reference_op_t nextop = &ref->operands[(*operand)++]; |
2581 | tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, |
2582 | stmts); |
2583 | if (!baseop) |
2584 | return NULL_TREE; |
2585 | if (currop->op0) |
2586 | { |
2587 | genop0 = find_or_generate_expression (block, currop->op0, stmts); |
2588 | if (!genop0) |
2589 | return NULL_TREE; |
2590 | } |
2591 | if (nextop->op0) |
2592 | { |
2593 | genop1 = find_or_generate_expression (block, nextop->op0, stmts); |
2594 | if (!genop1) |
2595 | return NULL_TREE; |
2596 | } |
2597 | genop = build5 (TARGET_MEM_REF, currop->type, |
2598 | baseop, currop->op2, genop0, currop->op1, genop1); |
2599 | |
2600 | MR_DEPENDENCE_CLIQUE (genop) = currop->clique; |
2601 | MR_DEPENDENCE_BASE (genop) = currop->base; |
2602 | return genop; |
2603 | } |
2604 | |
2605 | case ADDR_EXPR: |
2606 | if (currop->op0) |
2607 | { |
2608 | gcc_assert (is_gimple_min_invariant (currop->op0)); |
2609 | return currop->op0; |
2610 | } |
2611 | /* Fallthrough. */ |
2612 | case REALPART_EXPR: |
2613 | case IMAGPART_EXPR: |
2614 | case VIEW_CONVERT_EXPR: |
2615 | { |
2616 | tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, |
2617 | stmts); |
2618 | if (!genop0) |
2619 | return NULL_TREE; |
2620 | return fold_build1 (currop->opcode, currop->type, genop0); |
2621 | } |
2622 | |
2623 | case WITH_SIZE_EXPR: |
2624 | { |
2625 | tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, |
2626 | stmts); |
2627 | if (!genop0) |
2628 | return NULL_TREE; |
2629 | tree genop1 = find_or_generate_expression (block, currop->op0, stmts); |
2630 | if (!genop1) |
2631 | return NULL_TREE; |
2632 | return fold_build2 (currop->opcode, currop->type, genop0, genop1); |
2633 | } |
2634 | |
2635 | case BIT_FIELD_REF: |
2636 | { |
2637 | tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, |
2638 | stmts); |
2639 | if (!genop0) |
2640 | return NULL_TREE; |
2641 | tree op1 = currop->op0; |
2642 | tree op2 = currop->op1; |
2643 | tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2); |
2644 | REF_REVERSE_STORAGE_ORDER (t) = currop->reverse; |
2645 | return fold (t); |
2646 | } |
2647 | |
2648 | /* For array ref vn_reference_op's, operand 1 of the array ref |
2649 | is op0 of the reference op and operand 3 of the array ref is |
2650 | op1. */ |
2651 | case ARRAY_RANGE_REF: |
2652 | case ARRAY_REF: |
2653 | { |
2654 | tree genop0; |
2655 | tree genop1 = currop->op0; |
2656 | tree genop2 = currop->op1; |
2657 | tree genop3 = currop->op2; |
2658 | genop0 = create_component_ref_by_pieces_1 (block, ref, operand, |
2659 | stmts); |
2660 | if (!genop0) |
2661 | return NULL_TREE; |
2662 | genop1 = find_or_generate_expression (block, genop1, stmts); |
2663 | if (!genop1) |
2664 | return NULL_TREE; |
2665 | if (genop2) |
2666 | { |
2667 | tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0)); |
2668 | /* Drop zero minimum index if redundant. */ |
2669 | if (integer_zerop (genop2) |
2670 | && (!domain_type |
2671 | || integer_zerop (TYPE_MIN_VALUE (domain_type)))) |
2672 | genop2 = NULL_TREE; |
2673 | else |
2674 | { |
2675 | genop2 = find_or_generate_expression (block, genop2, stmts); |
2676 | if (!genop2) |
2677 | return NULL_TREE; |
2678 | } |
2679 | } |
2680 | if (genop3) |
2681 | { |
2682 | tree elmt_type = TREE_TYPE (TREE_TYPE (genop0)); |
2683 | /* We can't always put a size in units of the element alignment |
2684 | here as the element alignment may be not visible. See |
2685 | PR43783. Simply drop the element size for constant |
2686 | sizes. */ |
2687 | if (TREE_CODE (genop3) == INTEGER_CST |
2688 | && TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST |
2689 | && wi::eq_p (x: wi::to_offset (TYPE_SIZE_UNIT (elmt_type)), |
2690 | y: (wi::to_offset (t: genop3) |
2691 | * vn_ref_op_align_unit (op: currop)))) |
2692 | genop3 = NULL_TREE; |
2693 | else |
2694 | { |
2695 | genop3 = find_or_generate_expression (block, genop3, stmts); |
2696 | if (!genop3) |
2697 | return NULL_TREE; |
2698 | } |
2699 | } |
2700 | return build4 (currop->opcode, currop->type, genop0, genop1, |
2701 | genop2, genop3); |
2702 | } |
2703 | case COMPONENT_REF: |
2704 | { |
2705 | tree op0; |
2706 | tree op1; |
2707 | tree genop2 = currop->op1; |
2708 | op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts); |
2709 | if (!op0) |
2710 | return NULL_TREE; |
2711 | /* op1 should be a FIELD_DECL, which are represented by themselves. */ |
2712 | op1 = currop->op0; |
2713 | if (genop2) |
2714 | { |
2715 | genop2 = find_or_generate_expression (block, genop2, stmts); |
2716 | if (!genop2) |
2717 | return NULL_TREE; |
2718 | } |
2719 | return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2); |
2720 | } |
2721 | |
2722 | case SSA_NAME: |
2723 | { |
2724 | genop = find_or_generate_expression (block, currop->op0, stmts); |
2725 | return genop; |
2726 | } |
2727 | case STRING_CST: |
2728 | case INTEGER_CST: |
2729 | case POLY_INT_CST: |
2730 | case COMPLEX_CST: |
2731 | case VECTOR_CST: |
2732 | case REAL_CST: |
2733 | case CONSTRUCTOR: |
2734 | case VAR_DECL: |
2735 | case PARM_DECL: |
2736 | case CONST_DECL: |
2737 | case RESULT_DECL: |
2738 | case FUNCTION_DECL: |
2739 | return currop->op0; |
2740 | |
2741 | default: |
2742 | gcc_unreachable (); |
2743 | } |
2744 | } |
2745 | |
2746 | /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the |
2747 | COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with |
2748 | trying to rename aggregates into ssa form directly, which is a no no. |
2749 | |
2750 | Thus, this routine doesn't create temporaries, it just builds a |
2751 | single access expression for the array, calling |
2752 | find_or_generate_expression to build the innermost pieces. |
2753 | |
2754 | This function is a subroutine of create_expression_by_pieces, and |
2755 | should not be called on it's own unless you really know what you |
2756 | are doing. */ |
2757 | |
2758 | static tree |
2759 | create_component_ref_by_pieces (basic_block block, vn_reference_t ref, |
2760 | gimple_seq *stmts) |
2761 | { |
2762 | unsigned int op = 0; |
2763 | return create_component_ref_by_pieces_1 (block, ref, operand: &op, stmts); |
2764 | } |
2765 | |
2766 | /* Find a simple leader for an expression, or generate one using |
2767 | create_expression_by_pieces from a NARY expression for the value. |
2768 | BLOCK is the basic_block we are looking for leaders in. |
2769 | OP is the tree expression to find a leader for or generate. |
2770 | Returns the leader or NULL_TREE on failure. */ |
2771 | |
2772 | static tree |
2773 | find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts) |
2774 | { |
2775 | /* Constants are always leaders. */ |
2776 | if (is_gimple_min_invariant (op)) |
2777 | return op; |
2778 | |
2779 | gcc_assert (TREE_CODE (op) == SSA_NAME); |
2780 | vn_ssa_aux_t info = VN_INFO (op); |
2781 | unsigned int lookfor = info->value_id; |
2782 | if (value_id_constant_p (v: lookfor)) |
2783 | return info->valnum; |
2784 | |
2785 | pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), val: lookfor); |
2786 | if (leader) |
2787 | { |
2788 | if (leader->kind == NAME) |
2789 | return PRE_EXPR_NAME (leader); |
2790 | else if (leader->kind == CONSTANT) |
2791 | return PRE_EXPR_CONSTANT (leader); |
2792 | |
2793 | /* Defer. */ |
2794 | return NULL_TREE; |
2795 | } |
2796 | gcc_assert (!value_id_constant_p (lookfor)); |
2797 | |
2798 | /* It must be a complex expression, so generate it recursively. Note |
2799 | that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c |
2800 | where the insert algorithm fails to insert a required expression. */ |
2801 | bitmap exprset = value_expressions[lookfor]; |
2802 | bitmap_iterator bi; |
2803 | unsigned int i; |
2804 | EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi) |
2805 | { |
2806 | pre_expr temp = expression_for_id (id: i); |
2807 | /* We cannot insert random REFERENCE expressions at arbitrary |
2808 | places. We can insert NARYs which eventually re-materializes |
2809 | its operand values. */ |
2810 | if (temp->kind == NARY) |
2811 | return create_expression_by_pieces (block, temp, stmts, |
2812 | TREE_TYPE (op)); |
2813 | } |
2814 | |
2815 | /* Defer. */ |
2816 | return NULL_TREE; |
2817 | } |
2818 | |
2819 | /* Create an expression in pieces, so that we can handle very complex |
2820 | expressions that may be ANTIC, but not necessary GIMPLE. |
2821 | BLOCK is the basic block the expression will be inserted into, |
2822 | EXPR is the expression to insert (in value form) |
2823 | STMTS is a statement list to append the necessary insertions into. |
2824 | |
2825 | This function will die if we hit some value that shouldn't be |
2826 | ANTIC but is (IE there is no leader for it, or its components). |
2827 | The function returns NULL_TREE in case a different antic expression |
2828 | has to be inserted first. |
2829 | This function may also generate expressions that are themselves |
2830 | partially or fully redundant. Those that are will be either made |
2831 | fully redundant during the next iteration of insert (for partially |
2832 | redundant ones), or eliminated by eliminate (for fully redundant |
2833 | ones). */ |
2834 | |
2835 | static tree |
2836 | create_expression_by_pieces (basic_block block, pre_expr expr, |
2837 | gimple_seq *stmts, tree type) |
2838 | { |
2839 | tree name; |
2840 | tree folded; |
2841 | gimple_seq forced_stmts = NULL; |
2842 | unsigned int value_id; |
2843 | gimple_stmt_iterator gsi; |
2844 | tree exprtype = type ? type : get_expr_type (e: expr); |
2845 | pre_expr nameexpr; |
2846 | gassign *newstmt; |
2847 | |
2848 | switch (expr->kind) |
2849 | { |
2850 | /* We may hit the NAME/CONSTANT case if we have to convert types |
2851 | that value numbering saw through. */ |
2852 | case NAME: |
2853 | folded = PRE_EXPR_NAME (expr); |
2854 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (folded)) |
2855 | return NULL_TREE; |
2856 | if (useless_type_conversion_p (exprtype, TREE_TYPE (folded))) |
2857 | return folded; |
2858 | break; |
2859 | case CONSTANT: |
2860 | { |
2861 | folded = PRE_EXPR_CONSTANT (expr); |
2862 | tree tem = fold_convert (exprtype, folded); |
2863 | if (is_gimple_min_invariant (tem)) |
2864 | return tem; |
2865 | break; |
2866 | } |
2867 | case REFERENCE: |
2868 | if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR) |
2869 | { |
2870 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr); |
2871 | unsigned int operand = 1; |
2872 | vn_reference_op_t currop = &ref->operands[0]; |
2873 | tree sc = NULL_TREE; |
2874 | tree fn = NULL_TREE; |
2875 | if (currop->op0) |
2876 | { |
2877 | fn = find_or_generate_expression (block, op: currop->op0, stmts); |
2878 | if (!fn) |
2879 | return NULL_TREE; |
2880 | } |
2881 | if (currop->op1) |
2882 | { |
2883 | sc = find_or_generate_expression (block, op: currop->op1, stmts); |
2884 | if (!sc) |
2885 | return NULL_TREE; |
2886 | } |
2887 | auto_vec<tree> args (ref->operands.length () - 1); |
2888 | while (operand < ref->operands.length ()) |
2889 | { |
2890 | tree arg = create_component_ref_by_pieces_1 (block, ref, |
2891 | operand: &operand, stmts); |
2892 | if (!arg) |
2893 | return NULL_TREE; |
2894 | args.quick_push (obj: arg); |
2895 | } |
2896 | gcall *call; |
2897 | if (currop->op0) |
2898 | { |
2899 | call = gimple_build_call_vec (fn, args); |
2900 | gimple_call_set_fntype (call_stmt: call, fntype: currop->type); |
2901 | } |
2902 | else |
2903 | call = gimple_build_call_internal_vec ((internal_fn)currop->clique, |
2904 | args); |
2905 | gimple_set_location (g: call, location: expr->loc); |
2906 | if (sc) |
2907 | gimple_call_set_chain (call_stmt: call, chain: sc); |
2908 | tree forcedname = make_ssa_name (var: ref->type); |
2909 | gimple_call_set_lhs (gs: call, lhs: forcedname); |
2910 | /* There's no CCP pass after PRE which would re-compute alignment |
2911 | information so make sure we re-materialize this here. */ |
2912 | if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED) |
2913 | && args.length () - 2 <= 1 |
2914 | && tree_fits_uhwi_p (args[1]) |
2915 | && (args.length () != 3 || tree_fits_uhwi_p (args[2]))) |
2916 | { |
2917 | unsigned HOST_WIDE_INT halign = tree_to_uhwi (args[1]); |
2918 | unsigned HOST_WIDE_INT hmisalign |
2919 | = args.length () == 3 ? tree_to_uhwi (args[2]) : 0; |
2920 | if ((halign & (halign - 1)) == 0 |
2921 | && (hmisalign & ~(halign - 1)) == 0 |
2922 | && (unsigned int)halign != 0) |
2923 | set_ptr_info_alignment (get_ptr_info (forcedname), |
2924 | halign, hmisalign); |
2925 | } |
2926 | gimple_set_vuse (g: call, BB_LIVE_VOP_ON_EXIT (block)); |
2927 | gimple_seq_add_stmt_without_update (&forced_stmts, call); |
2928 | folded = forcedname; |
2929 | } |
2930 | else |
2931 | { |
2932 | folded = create_component_ref_by_pieces (block, |
2933 | PRE_EXPR_REFERENCE (expr), |
2934 | stmts); |
2935 | if (!folded) |
2936 | return NULL_TREE; |
2937 | name = make_temp_ssa_name (type: exprtype, NULL, name: "pretmp" ); |
2938 | newstmt = gimple_build_assign (name, folded); |
2939 | gimple_set_location (g: newstmt, location: expr->loc); |
2940 | gimple_seq_add_stmt_without_update (&forced_stmts, newstmt); |
2941 | gimple_set_vuse (g: newstmt, BB_LIVE_VOP_ON_EXIT (block)); |
2942 | folded = name; |
2943 | } |
2944 | break; |
2945 | case NARY: |
2946 | { |
2947 | vn_nary_op_t nary = PRE_EXPR_NARY (expr); |
2948 | tree *genop = XALLOCAVEC (tree, nary->length); |
2949 | unsigned i; |
2950 | for (i = 0; i < nary->length; ++i) |
2951 | { |
2952 | genop[i] = find_or_generate_expression (block, op: nary->op[i], stmts); |
2953 | if (!genop[i]) |
2954 | return NULL_TREE; |
2955 | /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It |
2956 | may have conversions stripped. */ |
2957 | if (nary->opcode == POINTER_PLUS_EXPR) |
2958 | { |
2959 | if (i == 0) |
2960 | genop[i] = gimple_convert (seq: &forced_stmts, |
2961 | type: nary->type, op: genop[i]); |
2962 | else if (i == 1) |
2963 | genop[i] = gimple_convert (seq: &forced_stmts, |
2964 | sizetype, op: genop[i]); |
2965 | } |
2966 | else |
2967 | genop[i] = gimple_convert (seq: &forced_stmts, |
2968 | TREE_TYPE (nary->op[i]), op: genop[i]); |
2969 | } |
2970 | if (nary->opcode == CONSTRUCTOR) |
2971 | { |
2972 | vec<constructor_elt, va_gc> *elts = NULL; |
2973 | for (i = 0; i < nary->length; ++i) |
2974 | CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]); |
2975 | folded = build_constructor (nary->type, elts); |
2976 | name = make_temp_ssa_name (type: exprtype, NULL, name: "pretmp" ); |
2977 | newstmt = gimple_build_assign (name, folded); |
2978 | gimple_set_location (g: newstmt, location: expr->loc); |
2979 | gimple_seq_add_stmt_without_update (&forced_stmts, newstmt); |
2980 | folded = name; |
2981 | } |
2982 | else |
2983 | { |
2984 | switch (nary->length) |
2985 | { |
2986 | case 1: |
2987 | folded = gimple_build (seq: &forced_stmts, loc: expr->loc, |
2988 | code: nary->opcode, type: nary->type, ops: genop[0]); |
2989 | break; |
2990 | case 2: |
2991 | folded = gimple_build (seq: &forced_stmts, loc: expr->loc, code: nary->opcode, |
2992 | type: nary->type, ops: genop[0], ops: genop[1]); |
2993 | break; |
2994 | case 3: |
2995 | folded = gimple_build (seq: &forced_stmts, loc: expr->loc, code: nary->opcode, |
2996 | type: nary->type, ops: genop[0], ops: genop[1], |
2997 | ops: genop[2]); |
2998 | break; |
2999 | default: |
3000 | gcc_unreachable (); |
3001 | } |
3002 | } |
3003 | } |
3004 | break; |
3005 | default: |
3006 | gcc_unreachable (); |
3007 | } |
3008 | |
3009 | folded = gimple_convert (seq: &forced_stmts, type: exprtype, op: folded); |
3010 | |
3011 | /* If there is nothing to insert, return the simplified result. */ |
3012 | if (gimple_seq_empty_p (s: forced_stmts)) |
3013 | return folded; |
3014 | /* If we simplified to a constant return it and discard eventually |
3015 | built stmts. */ |
3016 | if (is_gimple_min_invariant (folded)) |
3017 | { |
3018 | gimple_seq_discard (forced_stmts); |
3019 | return folded; |
3020 | } |
3021 | /* Likewise if we simplified to sth not queued for insertion. */ |
3022 | bool found = false; |
3023 | gsi = gsi_last (seq&: forced_stmts); |
3024 | for (; !gsi_end_p (i: gsi); gsi_prev (i: &gsi)) |
3025 | { |
3026 | gimple *stmt = gsi_stmt (i: gsi); |
3027 | tree forcedname = gimple_get_lhs (stmt); |
3028 | if (forcedname == folded) |
3029 | { |
3030 | found = true; |
3031 | break; |
3032 | } |
3033 | } |
3034 | if (! found) |
3035 | { |
3036 | gimple_seq_discard (forced_stmts); |
3037 | return folded; |
3038 | } |
3039 | gcc_assert (TREE_CODE (folded) == SSA_NAME); |
3040 | |
3041 | /* If we have any intermediate expressions to the value sets, add them |
3042 | to the value sets and chain them in the instruction stream. */ |
3043 | if (forced_stmts) |
3044 | { |
3045 | gsi = gsi_start (seq&: forced_stmts); |
3046 | for (; !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
3047 | { |
3048 | gimple *stmt = gsi_stmt (i: gsi); |
3049 | tree forcedname = gimple_get_lhs (stmt); |
3050 | pre_expr nameexpr; |
3051 | |
3052 | if (forcedname != folded) |
3053 | { |
3054 | vn_ssa_aux_t vn_info = VN_INFO (forcedname); |
3055 | vn_info->valnum = forcedname; |
3056 | vn_info->value_id = get_next_value_id (); |
3057 | nameexpr = get_or_alloc_expr_for_name (name: forcedname); |
3058 | add_to_value (v: vn_info->value_id, e: nameexpr); |
3059 | if (NEW_SETS (block)) |
3060 | bitmap_value_replace_in_set (NEW_SETS (block), expr: nameexpr); |
3061 | bitmap_value_replace_in_set (AVAIL_OUT (block), expr: nameexpr); |
3062 | } |
3063 | |
3064 | bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname)); |
3065 | } |
3066 | gimple_seq_add_seq (stmts, forced_stmts); |
3067 | } |
3068 | |
3069 | name = folded; |
3070 | |
3071 | /* Fold the last statement. */ |
3072 | gsi = gsi_last (seq&: *stmts); |
3073 | if (fold_stmt_inplace (&gsi)) |
3074 | update_stmt (s: gsi_stmt (i: gsi)); |
3075 | |
3076 | /* Add a value number to the temporary. |
3077 | The value may already exist in either NEW_SETS, or AVAIL_OUT, because |
3078 | we are creating the expression by pieces, and this particular piece of |
3079 | the expression may have been represented. There is no harm in replacing |
3080 | here. */ |
3081 | value_id = get_expr_value_id (expr); |
3082 | vn_ssa_aux_t vn_info = VN_INFO (name); |
3083 | vn_info->value_id = value_id; |
3084 | vn_info->valnum = vn_valnum_from_value_id (val: value_id); |
3085 | if (vn_info->valnum == NULL_TREE) |
3086 | vn_info->valnum = name; |
3087 | gcc_assert (vn_info->valnum != NULL_TREE); |
3088 | nameexpr = get_or_alloc_expr_for_name (name); |
3089 | add_to_value (v: value_id, e: nameexpr); |
3090 | if (NEW_SETS (block)) |
3091 | bitmap_value_replace_in_set (NEW_SETS (block), expr: nameexpr); |
3092 | bitmap_value_replace_in_set (AVAIL_OUT (block), expr: nameexpr); |
3093 | |
3094 | pre_stats.insertions++; |
3095 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3096 | { |
3097 | fprintf (stream: dump_file, format: "Inserted " ); |
3098 | print_gimple_stmt (dump_file, gsi_stmt (i: gsi_last (seq&: *stmts)), 0); |
3099 | fprintf (stream: dump_file, format: " in predecessor %d (%04d)\n" , |
3100 | block->index, value_id); |
3101 | } |
3102 | |
3103 | return name; |
3104 | } |
3105 | |
3106 | |
3107 | /* Insert the to-be-made-available values of expression EXPRNUM for each |
3108 | predecessor, stored in AVAIL, into the predecessors of BLOCK, and |
3109 | merge the result with a phi node, given the same value number as |
3110 | NODE. Return true if we have inserted new stuff. */ |
3111 | |
3112 | static bool |
3113 | insert_into_preds_of_block (basic_block block, unsigned int exprnum, |
3114 | vec<pre_expr> &avail) |
3115 | { |
3116 | pre_expr expr = expression_for_id (id: exprnum); |
3117 | pre_expr newphi; |
3118 | unsigned int val = get_expr_value_id (expr); |
3119 | edge pred; |
3120 | bool insertions = false; |
3121 | bool nophi = false; |
3122 | basic_block bprime; |
3123 | pre_expr eprime; |
3124 | edge_iterator ei; |
3125 | tree type = get_expr_type (e: expr); |
3126 | tree temp; |
3127 | gphi *phi; |
3128 | |
3129 | /* Make sure we aren't creating an induction variable. */ |
3130 | if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2) |
3131 | { |
3132 | bool firstinsideloop = false; |
3133 | bool secondinsideloop = false; |
3134 | firstinsideloop = flow_bb_inside_loop_p (block->loop_father, |
3135 | EDGE_PRED (block, 0)->src); |
3136 | secondinsideloop = flow_bb_inside_loop_p (block->loop_father, |
3137 | EDGE_PRED (block, 1)->src); |
3138 | /* Induction variables only have one edge inside the loop. */ |
3139 | if ((firstinsideloop ^ secondinsideloop) |
3140 | && expr->kind != REFERENCE) |
3141 | { |
3142 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3143 | fprintf (stream: dump_file, format: "Skipping insertion of phi for partial " |
3144 | "redundancy: Looks like an induction variable\n" ); |
3145 | nophi = true; |
3146 | } |
3147 | } |
3148 | |
3149 | /* Make the necessary insertions. */ |
3150 | FOR_EACH_EDGE (pred, ei, block->preds) |
3151 | { |
3152 | /* When we are not inserting a PHI node do not bother inserting |
3153 | into places that do not dominate the anticipated computations. */ |
3154 | if (nophi && !dominated_by_p (CDI_DOMINATORS, block, pred->src)) |
3155 | continue; |
3156 | gimple_seq stmts = NULL; |
3157 | tree builtexpr; |
3158 | bprime = pred->src; |
3159 | eprime = avail[pred->dest_idx]; |
3160 | builtexpr = create_expression_by_pieces (block: bprime, expr: eprime, |
3161 | stmts: &stmts, type); |
3162 | gcc_assert (!(pred->flags & EDGE_ABNORMAL)); |
3163 | if (!gimple_seq_empty_p (s: stmts)) |
3164 | { |
3165 | basic_block new_bb = gsi_insert_seq_on_edge_immediate (pred, stmts); |
3166 | gcc_assert (! new_bb); |
3167 | insertions = true; |
3168 | } |
3169 | if (!builtexpr) |
3170 | { |
3171 | /* We cannot insert a PHI node if we failed to insert |
3172 | on one edge. */ |
3173 | nophi = true; |
3174 | continue; |
3175 | } |
3176 | if (is_gimple_min_invariant (builtexpr)) |
3177 | avail[pred->dest_idx] = get_or_alloc_expr_for_constant (constant: builtexpr); |
3178 | else |
3179 | avail[pred->dest_idx] = get_or_alloc_expr_for_name (name: builtexpr); |
3180 | } |
3181 | /* If we didn't want a phi node, and we made insertions, we still have |
3182 | inserted new stuff, and thus return true. If we didn't want a phi node, |
3183 | and didn't make insertions, we haven't added anything new, so return |
3184 | false. */ |
3185 | if (nophi && insertions) |
3186 | return true; |
3187 | else if (nophi && !insertions) |
3188 | return false; |
3189 | |
3190 | /* Now build a phi for the new variable. */ |
3191 | temp = make_temp_ssa_name (type, NULL, name: "prephitmp" ); |
3192 | phi = create_phi_node (temp, block); |
3193 | |
3194 | vn_ssa_aux_t vn_info = VN_INFO (temp); |
3195 | vn_info->value_id = val; |
3196 | vn_info->valnum = vn_valnum_from_value_id (val); |
3197 | if (vn_info->valnum == NULL_TREE) |
3198 | vn_info->valnum = temp; |
3199 | bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp)); |
3200 | FOR_EACH_EDGE (pred, ei, block->preds) |
3201 | { |
3202 | pre_expr ae = avail[pred->dest_idx]; |
3203 | gcc_assert (get_expr_type (ae) == type |
3204 | || useless_type_conversion_p (type, get_expr_type (ae))); |
3205 | if (ae->kind == CONSTANT) |
3206 | add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)), |
3207 | pred, UNKNOWN_LOCATION); |
3208 | else |
3209 | add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION); |
3210 | } |
3211 | |
3212 | newphi = get_or_alloc_expr_for_name (name: temp); |
3213 | add_to_value (v: val, e: newphi); |
3214 | |
3215 | /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing |
3216 | this insertion, since we test for the existence of this value in PHI_GEN |
3217 | before proceeding with the partial redundancy checks in insert_aux. |
3218 | |
3219 | The value may exist in AVAIL_OUT, in particular, it could be represented |
3220 | by the expression we are trying to eliminate, in which case we want the |
3221 | replacement to occur. If it's not existing in AVAIL_OUT, we want it |
3222 | inserted there. |
3223 | |
3224 | Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of |
3225 | this block, because if it did, it would have existed in our dominator's |
3226 | AVAIL_OUT, and would have been skipped due to the full redundancy check. |
3227 | */ |
3228 | |
3229 | bitmap_insert_into_set (PHI_GEN (block), expr: newphi); |
3230 | bitmap_value_replace_in_set (AVAIL_OUT (block), |
3231 | expr: newphi); |
3232 | if (NEW_SETS (block)) |
3233 | bitmap_insert_into_set (NEW_SETS (block), expr: newphi); |
3234 | |
3235 | /* If we insert a PHI node for a conversion of another PHI node |
3236 | in the same basic-block try to preserve range information. |
3237 | This is important so that followup loop passes receive optimal |
3238 | number of iteration analysis results. See PR61743. */ |
3239 | if (expr->kind == NARY |
3240 | && CONVERT_EXPR_CODE_P (expr->u.nary->opcode) |
3241 | && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME |
3242 | && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block |
3243 | && INTEGRAL_TYPE_P (type) |
3244 | && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0])) |
3245 | && (TYPE_PRECISION (type) |
3246 | >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0]))) |
3247 | && SSA_NAME_RANGE_INFO (expr->u.nary->op[0])) |
3248 | { |
3249 | value_range r; |
3250 | if (get_range_query (cfun)->range_of_expr (r, expr: expr->u.nary->op[0]) |
3251 | && !r.undefined_p () |
3252 | && !r.varying_p () |
3253 | && !wi::neg_p (x: r.lower_bound (), sgn: SIGNED) |
3254 | && !wi::neg_p (x: r.upper_bound (), sgn: SIGNED)) |
3255 | { |
3256 | /* Just handle extension and sign-changes of all-positive ranges. */ |
3257 | range_cast (r, type); |
3258 | set_range_info (temp, r); |
3259 | } |
3260 | } |
3261 | |
3262 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3263 | { |
3264 | fprintf (stream: dump_file, format: "Created phi " ); |
3265 | print_gimple_stmt (dump_file, phi, 0); |
3266 | fprintf (stream: dump_file, format: " in block %d (%04d)\n" , block->index, val); |
3267 | } |
3268 | pre_stats.phis++; |
3269 | return true; |
3270 | } |
3271 | |
3272 | |
3273 | |
3274 | /* Perform insertion of partially redundant or hoistable values. |
3275 | For BLOCK, do the following: |
3276 | 1. Propagate the NEW_SETS of the dominator into the current block. |
3277 | If the block has multiple predecessors, |
3278 | 2a. Iterate over the ANTIC expressions for the block to see if |
3279 | any of them are partially redundant. |
3280 | 2b. If so, insert them into the necessary predecessors to make |
3281 | the expression fully redundant. |
3282 | 2c. Insert a new PHI merging the values of the predecessors. |
3283 | 2d. Insert the new PHI, and the new expressions, into the |
3284 | NEW_SETS set. |
3285 | If the block has multiple successors, |
3286 | 3a. Iterate over the ANTIC values for the block to see if |
3287 | any of them are good candidates for hoisting. |
3288 | 3b. If so, insert expressions computing the values in BLOCK, |
3289 | and add the new expressions into the NEW_SETS set. |
3290 | 4. Recursively call ourselves on the dominator children of BLOCK. |
3291 | |
3292 | Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by |
3293 | do_pre_regular_insertion and do_partial_insertion. 3a and 3b are |
3294 | done in do_hoist_insertion. |
3295 | */ |
3296 | |
3297 | static bool |
3298 | do_pre_regular_insertion (basic_block block, basic_block dom, |
3299 | vec<pre_expr> exprs) |
3300 | { |
3301 | bool new_stuff = false; |
3302 | pre_expr expr; |
3303 | auto_vec<pre_expr, 2> avail; |
3304 | int i; |
3305 | |
3306 | avail.safe_grow (EDGE_COUNT (block->preds), exact: true); |
3307 | |
3308 | FOR_EACH_VEC_ELT (exprs, i, expr) |
3309 | { |
3310 | if (expr->kind == NARY |
3311 | || expr->kind == REFERENCE) |
3312 | { |
3313 | unsigned int val; |
3314 | bool by_some = false; |
3315 | bool cant_insert = false; |
3316 | bool all_same = true; |
3317 | unsigned num_inserts = 0; |
3318 | unsigned num_const = 0; |
3319 | pre_expr first_s = NULL; |
3320 | edge pred; |
3321 | basic_block bprime; |
3322 | pre_expr eprime = NULL; |
3323 | edge_iterator ei; |
3324 | pre_expr edoubleprime = NULL; |
3325 | bool do_insertion = false; |
3326 | |
3327 | val = get_expr_value_id (expr); |
3328 | if (bitmap_set_contains_value (PHI_GEN (block), value_id: val)) |
3329 | continue; |
3330 | if (bitmap_set_contains_value (AVAIL_OUT (dom), value_id: val)) |
3331 | { |
3332 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3333 | { |
3334 | fprintf (stream: dump_file, format: "Found fully redundant value: " ); |
3335 | print_pre_expr (outfile: dump_file, expr); |
3336 | fprintf (stream: dump_file, format: "\n" ); |
3337 | } |
3338 | continue; |
3339 | } |
3340 | |
3341 | FOR_EACH_EDGE (pred, ei, block->preds) |
3342 | { |
3343 | unsigned int vprime; |
3344 | |
3345 | /* We should never run insertion for the exit block |
3346 | and so not come across fake pred edges. */ |
3347 | gcc_assert (!(pred->flags & EDGE_FAKE)); |
3348 | bprime = pred->src; |
3349 | /* We are looking at ANTIC_OUT of bprime. */ |
3350 | eprime = phi_translate (NULL, expr, ANTIC_IN (block), NULL, e: pred); |
3351 | |
3352 | /* eprime will generally only be NULL if the |
3353 | value of the expression, translated |
3354 | through the PHI for this predecessor, is |
3355 | undefined. If that is the case, we can't |
3356 | make the expression fully redundant, |
3357 | because its value is undefined along a |
3358 | predecessor path. We can thus break out |
3359 | early because it doesn't matter what the |
3360 | rest of the results are. */ |
3361 | if (eprime == NULL) |
3362 | { |
3363 | avail[pred->dest_idx] = NULL; |
3364 | cant_insert = true; |
3365 | break; |
3366 | } |
3367 | |
3368 | vprime = get_expr_value_id (expr: eprime); |
3369 | edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), |
3370 | val: vprime); |
3371 | if (edoubleprime == NULL) |
3372 | { |
3373 | avail[pred->dest_idx] = eprime; |
3374 | all_same = false; |
3375 | num_inserts++; |
3376 | } |
3377 | else |
3378 | { |
3379 | avail[pred->dest_idx] = edoubleprime; |
3380 | by_some = true; |
3381 | if (edoubleprime->kind == CONSTANT) |
3382 | num_const++; |
3383 | /* We want to perform insertions to remove a redundancy on |
3384 | a path in the CFG we want to optimize for speed. */ |
3385 | if (optimize_edge_for_speed_p (pred)) |
3386 | do_insertion = true; |
3387 | if (first_s == NULL) |
3388 | first_s = edoubleprime; |
3389 | else if (!pre_expr_d::equal (e1: first_s, e2: edoubleprime)) |
3390 | all_same = false; |
3391 | } |
3392 | } |
3393 | /* If we can insert it, it's not the same value |
3394 | already existing along every predecessor, and |
3395 | it's defined by some predecessor, it is |
3396 | partially redundant. */ |
3397 | if (!cant_insert && !all_same && by_some) |
3398 | { |
3399 | /* If the expression is redundant on all edges and we need |
3400 | to at most insert one copy from a constant do the PHI |
3401 | insertion even when not optimizing a path that's to be |
3402 | optimized for speed. */ |
3403 | if (num_inserts == 0 && num_const <= 1) |
3404 | do_insertion = true; |
3405 | if (!do_insertion) |
3406 | { |
3407 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3408 | { |
3409 | fprintf (stream: dump_file, format: "Skipping partial redundancy for " |
3410 | "expression " ); |
3411 | print_pre_expr (outfile: dump_file, expr); |
3412 | fprintf (stream: dump_file, format: " (%04d), no redundancy on to be " |
3413 | "optimized for speed edge\n" , val); |
3414 | } |
3415 | } |
3416 | else if (dbg_cnt (index: treepre_insert)) |
3417 | { |
3418 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3419 | { |
3420 | fprintf (stream: dump_file, format: "Found partial redundancy for " |
3421 | "expression " ); |
3422 | print_pre_expr (outfile: dump_file, expr); |
3423 | fprintf (stream: dump_file, format: " (%04d)\n" , |
3424 | get_expr_value_id (expr)); |
3425 | } |
3426 | if (insert_into_preds_of_block (block, |
3427 | exprnum: get_expression_id (expr), |
3428 | avail)) |
3429 | new_stuff = true; |
3430 | } |
3431 | } |
3432 | /* If all edges produce the same value and that value is |
3433 | an invariant, then the PHI has the same value on all |
3434 | edges. Note this. */ |
3435 | else if (!cant_insert |
3436 | && all_same |
3437 | && (edoubleprime->kind != NAME |
3438 | || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI |
3439 | (PRE_EXPR_NAME (edoubleprime)))) |
3440 | { |
3441 | gcc_assert (edoubleprime->kind == CONSTANT |
3442 | || edoubleprime->kind == NAME); |
3443 | |
3444 | tree temp = make_temp_ssa_name (type: get_expr_type (e: expr), |
3445 | NULL, name: "pretmp" ); |
3446 | gassign *assign |
3447 | = gimple_build_assign (temp, |
3448 | edoubleprime->kind == CONSTANT ? |
3449 | PRE_EXPR_CONSTANT (edoubleprime) : |
3450 | PRE_EXPR_NAME (edoubleprime)); |
3451 | gimple_stmt_iterator gsi = gsi_after_labels (bb: block); |
3452 | gsi_insert_before (&gsi, assign, GSI_NEW_STMT); |
3453 | |
3454 | vn_ssa_aux_t vn_info = VN_INFO (temp); |
3455 | vn_info->value_id = val; |
3456 | vn_info->valnum = vn_valnum_from_value_id (val); |
3457 | if (vn_info->valnum == NULL_TREE) |
3458 | vn_info->valnum = temp; |
3459 | bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp)); |
3460 | pre_expr newe = get_or_alloc_expr_for_name (name: temp); |
3461 | add_to_value (v: val, e: newe); |
3462 | bitmap_value_replace_in_set (AVAIL_OUT (block), expr: newe); |
3463 | bitmap_insert_into_set (NEW_SETS (block), expr: newe); |
3464 | bitmap_insert_into_set (PHI_GEN (block), expr: newe); |
3465 | } |
3466 | } |
3467 | } |
3468 | |
3469 | return new_stuff; |
3470 | } |
3471 | |
3472 | |
3473 | /* Perform insertion for partially anticipatable expressions. There |
3474 | is only one case we will perform insertion for these. This case is |
3475 | if the expression is partially anticipatable, and fully available. |
3476 | In this case, we know that putting it earlier will enable us to |
3477 | remove the later computation. */ |
3478 | |
3479 | static bool |
3480 | do_pre_partial_partial_insertion (basic_block block, basic_block dom, |
3481 | vec<pre_expr> exprs) |
3482 | { |
3483 | bool new_stuff = false; |
3484 | pre_expr expr; |
3485 | auto_vec<pre_expr, 2> avail; |
3486 | int i; |
3487 | |
3488 | avail.safe_grow (EDGE_COUNT (block->preds), exact: true); |
3489 | |
3490 | FOR_EACH_VEC_ELT (exprs, i, expr) |
3491 | { |
3492 | if (expr->kind == NARY |
3493 | || expr->kind == REFERENCE) |
3494 | { |
3495 | unsigned int val; |
3496 | bool by_all = true; |
3497 | bool cant_insert = false; |
3498 | edge pred; |
3499 | basic_block bprime; |
3500 | pre_expr eprime = NULL; |
3501 | edge_iterator ei; |
3502 | |
3503 | val = get_expr_value_id (expr); |
3504 | if (bitmap_set_contains_value (PHI_GEN (block), value_id: val)) |
3505 | continue; |
3506 | if (bitmap_set_contains_value (AVAIL_OUT (dom), value_id: val)) |
3507 | continue; |
3508 | |
3509 | FOR_EACH_EDGE (pred, ei, block->preds) |
3510 | { |
3511 | unsigned int vprime; |
3512 | pre_expr edoubleprime; |
3513 | |
3514 | /* We should never run insertion for the exit block |
3515 | and so not come across fake pred edges. */ |
3516 | gcc_assert (!(pred->flags & EDGE_FAKE)); |
3517 | bprime = pred->src; |
3518 | eprime = phi_translate (NULL, expr, ANTIC_IN (block), |
3519 | PA_IN (block), e: pred); |
3520 | |
3521 | /* eprime will generally only be NULL if the |
3522 | value of the expression, translated |
3523 | through the PHI for this predecessor, is |
3524 | undefined. If that is the case, we can't |
3525 | make the expression fully redundant, |
3526 | because its value is undefined along a |
3527 | predecessor path. We can thus break out |
3528 | early because it doesn't matter what the |
3529 | rest of the results are. */ |
3530 | if (eprime == NULL) |
3531 | { |
3532 | avail[pred->dest_idx] = NULL; |
3533 | cant_insert = true; |
3534 | break; |
3535 | } |
3536 | |
3537 | vprime = get_expr_value_id (expr: eprime); |
3538 | edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), val: vprime); |
3539 | avail[pred->dest_idx] = edoubleprime; |
3540 | if (edoubleprime == NULL) |
3541 | { |
3542 | by_all = false; |
3543 | break; |
3544 | } |
3545 | } |
3546 | |
3547 | /* If we can insert it, it's not the same value |
3548 | already existing along every predecessor, and |
3549 | it's defined by some predecessor, it is |
3550 | partially redundant. */ |
3551 | if (!cant_insert && by_all) |
3552 | { |
3553 | edge succ; |
3554 | bool do_insertion = false; |
3555 | |
3556 | /* Insert only if we can remove a later expression on a path |
3557 | that we want to optimize for speed. |
3558 | The phi node that we will be inserting in BLOCK is not free, |
3559 | and inserting it for the sake of !optimize_for_speed successor |
3560 | may cause regressions on the speed path. */ |
3561 | FOR_EACH_EDGE (succ, ei, block->succs) |
3562 | { |
3563 | if (bitmap_set_contains_value (PA_IN (succ->dest), value_id: val) |
3564 | || bitmap_set_contains_value (ANTIC_IN (succ->dest), value_id: val)) |
3565 | { |
3566 | if (optimize_edge_for_speed_p (succ)) |
3567 | do_insertion = true; |
3568 | } |
3569 | } |
3570 | |
3571 | if (!do_insertion) |
3572 | { |
3573 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3574 | { |
3575 | fprintf (stream: dump_file, format: "Skipping partial partial redundancy " |
3576 | "for expression " ); |
3577 | print_pre_expr (outfile: dump_file, expr); |
3578 | fprintf (stream: dump_file, format: " (%04d), not (partially) anticipated " |
3579 | "on any to be optimized for speed edges\n" , val); |
3580 | } |
3581 | } |
3582 | else if (dbg_cnt (index: treepre_insert)) |
3583 | { |
3584 | pre_stats.pa_insert++; |
3585 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3586 | { |
3587 | fprintf (stream: dump_file, format: "Found partial partial redundancy " |
3588 | "for expression " ); |
3589 | print_pre_expr (outfile: dump_file, expr); |
3590 | fprintf (stream: dump_file, format: " (%04d)\n" , |
3591 | get_expr_value_id (expr)); |
3592 | } |
3593 | if (insert_into_preds_of_block (block, |
3594 | exprnum: get_expression_id (expr), |
3595 | avail)) |
3596 | new_stuff = true; |
3597 | } |
3598 | } |
3599 | } |
3600 | } |
3601 | |
3602 | return new_stuff; |
3603 | } |
3604 | |
3605 | /* Insert expressions in BLOCK to compute hoistable values up. |
3606 | Return TRUE if something was inserted, otherwise return FALSE. |
3607 | The caller has to make sure that BLOCK has at least two successors. */ |
3608 | |
3609 | static bool |
3610 | do_hoist_insertion (basic_block block) |
3611 | { |
3612 | edge e; |
3613 | edge_iterator ei; |
3614 | bool new_stuff = false; |
3615 | unsigned i; |
3616 | gimple_stmt_iterator last; |
3617 | |
3618 | /* At least two successors, or else... */ |
3619 | gcc_assert (EDGE_COUNT (block->succs) >= 2); |
3620 | |
3621 | /* Check that all successors of BLOCK are dominated by block. |
3622 | We could use dominated_by_p() for this, but actually there is a much |
3623 | quicker check: any successor that is dominated by BLOCK can't have |
3624 | more than one predecessor edge. */ |
3625 | FOR_EACH_EDGE (e, ei, block->succs) |
3626 | if (! single_pred_p (bb: e->dest)) |
3627 | return false; |
3628 | |
3629 | /* Determine the insertion point. If we cannot safely insert before |
3630 | the last stmt if we'd have to, bail out. */ |
3631 | last = gsi_last_bb (bb: block); |
3632 | if (!gsi_end_p (i: last) |
3633 | && !is_ctrl_stmt (gsi_stmt (i: last)) |
3634 | && stmt_ends_bb_p (gsi_stmt (i: last))) |
3635 | return false; |
3636 | |
3637 | /* We have multiple successors, compute ANTIC_OUT by taking the intersection |
3638 | of all of ANTIC_IN translating through PHI nodes. Track the union |
3639 | of the expression sets so we can pick a representative that is |
3640 | fully generatable out of hoistable expressions. */ |
3641 | bitmap_set_t ANTIC_OUT = bitmap_set_new (); |
3642 | bool first = true; |
3643 | FOR_EACH_EDGE (e, ei, block->succs) |
3644 | { |
3645 | if (first) |
3646 | { |
3647 | phi_translate_set (dest: ANTIC_OUT, ANTIC_IN (e->dest), e); |
3648 | first = false; |
3649 | } |
3650 | else if (!gimple_seq_empty_p (s: phi_nodes (bb: e->dest))) |
3651 | { |
3652 | bitmap_set_t tmp = bitmap_set_new (); |
3653 | phi_translate_set (dest: tmp, ANTIC_IN (e->dest), e); |
3654 | bitmap_and_into (&ANTIC_OUT->values, &tmp->values); |
3655 | bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions); |
3656 | bitmap_set_free (set: tmp); |
3657 | } |
3658 | else |
3659 | { |
3660 | bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (e->dest)->values); |
3661 | bitmap_ior_into (&ANTIC_OUT->expressions, |
3662 | &ANTIC_IN (e->dest)->expressions); |
3663 | } |
3664 | } |
3665 | |
3666 | /* Compute the set of hoistable expressions from ANTIC_OUT. First compute |
3667 | hoistable values. */ |
3668 | bitmap_set hoistable_set; |
3669 | |
3670 | /* A hoistable value must be in ANTIC_OUT(block) |
3671 | but not in AVAIL_OUT(BLOCK). */ |
3672 | bitmap_initialize (head: &hoistable_set.values, obstack: &grand_bitmap_obstack); |
3673 | bitmap_and_compl (&hoistable_set.values, |
3674 | &ANTIC_OUT->values, &AVAIL_OUT (block)->values); |
3675 | |
3676 | /* Short-cut for a common case: hoistable_set is empty. */ |
3677 | if (bitmap_empty_p (map: &hoistable_set.values)) |
3678 | { |
3679 | bitmap_set_free (set: ANTIC_OUT); |
3680 | return false; |
3681 | } |
3682 | |
3683 | /* Compute which of the hoistable values is in AVAIL_OUT of |
3684 | at least one of the successors of BLOCK. */ |
3685 | bitmap_head availout_in_some; |
3686 | bitmap_initialize (head: &availout_in_some, obstack: &grand_bitmap_obstack); |
3687 | FOR_EACH_EDGE (e, ei, block->succs) |
3688 | /* Do not consider expressions solely because their availability |
3689 | on loop exits. They'd be ANTIC-IN throughout the whole loop |
3690 | and thus effectively hoisted across loops by combination of |
3691 | PRE and hoisting. */ |
3692 | if (! loop_exit_edge_p (block->loop_father, e)) |
3693 | bitmap_ior_and_into (DST: &availout_in_some, B: &hoistable_set.values, |
3694 | C: &AVAIL_OUT (e->dest)->values); |
3695 | bitmap_clear (&hoistable_set.values); |
3696 | |
3697 | /* Short-cut for a common case: availout_in_some is empty. */ |
3698 | if (bitmap_empty_p (map: &availout_in_some)) |
3699 | { |
3700 | bitmap_set_free (set: ANTIC_OUT); |
3701 | return false; |
3702 | } |
3703 | |
3704 | /* Hack hoistable_set in-place so we can use sorted_array_from_bitmap_set. */ |
3705 | bitmap_move (&hoistable_set.values, &availout_in_some); |
3706 | hoistable_set.expressions = ANTIC_OUT->expressions; |
3707 | |
3708 | /* Now finally construct the topological-ordered expression set. */ |
3709 | vec<pre_expr> exprs = sorted_array_from_bitmap_set (set: &hoistable_set); |
3710 | |
3711 | /* If there are candidate values for hoisting, insert expressions |
3712 | strategically to make the hoistable expressions fully redundant. */ |
3713 | pre_expr expr; |
3714 | FOR_EACH_VEC_ELT (exprs, i, expr) |
3715 | { |
3716 | /* While we try to sort expressions topologically above the |
3717 | sorting doesn't work out perfectly. Catch expressions we |
3718 | already inserted. */ |
3719 | unsigned int value_id = get_expr_value_id (expr); |
3720 | if (bitmap_set_contains_value (AVAIL_OUT (block), value_id)) |
3721 | { |
3722 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3723 | { |
3724 | fprintf (stream: dump_file, |
3725 | format: "Already inserted expression for " ); |
3726 | print_pre_expr (outfile: dump_file, expr); |
3727 | fprintf (stream: dump_file, format: " (%04d)\n" , value_id); |
3728 | } |
3729 | continue; |
3730 | } |
3731 | |
3732 | /* If we end up with a punned expression representation and this |
3733 | happens to be a float typed one give up - we can't know for |
3734 | sure whether all paths perform the floating-point load we are |
3735 | about to insert and on some targets this can cause correctness |
3736 | issues. See PR88240. */ |
3737 | if (expr->kind == REFERENCE |
3738 | && PRE_EXPR_REFERENCE (expr)->punned |
3739 | && FLOAT_TYPE_P (get_expr_type (expr))) |
3740 | continue; |
3741 | |
3742 | /* Only hoist if the full expression is available for hoisting. |
3743 | This avoids hoisting values that are not common and for |
3744 | example evaluate an expression that's not valid to evaluate |
3745 | unconditionally (PR112310). */ |
3746 | if (!valid_in_sets (set1: &hoistable_set, AVAIL_OUT (block), expr)) |
3747 | continue; |
3748 | |
3749 | /* OK, we should hoist this value. Perform the transformation. */ |
3750 | pre_stats.hoist_insert++; |
3751 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3752 | { |
3753 | fprintf (stream: dump_file, |
3754 | format: "Inserting expression in block %d for code hoisting: " , |
3755 | block->index); |
3756 | print_pre_expr (outfile: dump_file, expr); |
3757 | fprintf (stream: dump_file, format: " (%04d)\n" , value_id); |
3758 | } |
3759 | |
3760 | gimple_seq stmts = NULL; |
3761 | tree res = create_expression_by_pieces (block, expr, stmts: &stmts, |
3762 | type: get_expr_type (e: expr)); |
3763 | |
3764 | /* Do not return true if expression creation ultimately |
3765 | did not insert any statements. */ |
3766 | if (gimple_seq_empty_p (s: stmts)) |
3767 | res = NULL_TREE; |
3768 | else |
3769 | { |
3770 | if (gsi_end_p (i: last) || is_ctrl_stmt (gsi_stmt (i: last))) |
3771 | gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT); |
3772 | else |
3773 | gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT); |
3774 | } |
3775 | |
3776 | /* Make sure to not return true if expression creation ultimately |
3777 | failed but also make sure to insert any stmts produced as they |
3778 | are tracked in inserted_exprs. */ |
3779 | if (! res) |
3780 | continue; |
3781 | |
3782 | new_stuff = true; |
3783 | } |
3784 | |
3785 | exprs.release (); |
3786 | bitmap_clear (&hoistable_set.values); |
3787 | bitmap_set_free (set: ANTIC_OUT); |
3788 | |
3789 | return new_stuff; |
3790 | } |
3791 | |
3792 | /* Perform insertion of partially redundant and hoistable values. */ |
3793 | |
3794 | static void |
3795 | insert (void) |
3796 | { |
3797 | basic_block bb; |
3798 | |
3799 | FOR_ALL_BB_FN (bb, cfun) |
3800 | NEW_SETS (bb) = bitmap_set_new (); |
3801 | |
3802 | int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun)); |
3803 | int *bb_rpo = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1); |
3804 | int rpo_num = pre_and_rev_post_order_compute (NULL, rpo, false); |
3805 | for (int i = 0; i < rpo_num; ++i) |
3806 | bb_rpo[rpo[i]] = i; |
3807 | |
3808 | int num_iterations = 0; |
3809 | bool changed; |
3810 | do |
3811 | { |
3812 | num_iterations++; |
3813 | if (dump_file && dump_flags & TDF_DETAILS) |
3814 | fprintf (stream: dump_file, format: "Starting insert iteration %d\n" , num_iterations); |
3815 | |
3816 | changed = false; |
3817 | for (int idx = 0; idx < rpo_num; ++idx) |
3818 | { |
3819 | basic_block block = BASIC_BLOCK_FOR_FN (cfun, rpo[idx]); |
3820 | basic_block dom = get_immediate_dominator (CDI_DOMINATORS, block); |
3821 | if (dom) |
3822 | { |
3823 | unsigned i; |
3824 | bitmap_iterator bi; |
3825 | bitmap_set_t newset; |
3826 | |
3827 | /* First, update the AVAIL_OUT set with anything we may have |
3828 | inserted higher up in the dominator tree. */ |
3829 | newset = NEW_SETS (dom); |
3830 | |
3831 | /* Note that we need to value_replace both NEW_SETS, and |
3832 | AVAIL_OUT. For both the case of NEW_SETS, the value may be |
3833 | represented by some non-simple expression here that we want |
3834 | to replace it with. */ |
3835 | bool avail_out_changed = false; |
3836 | FOR_EACH_EXPR_ID_IN_SET (newset, i, bi) |
3837 | { |
3838 | pre_expr expr = expression_for_id (id: i); |
3839 | bitmap_value_replace_in_set (NEW_SETS (block), expr); |
3840 | avail_out_changed |
3841 | |= bitmap_value_replace_in_set (AVAIL_OUT (block), expr); |
3842 | } |
3843 | /* We need to iterate if AVAIL_OUT of an already processed |
3844 | block source changed. */ |
3845 | if (avail_out_changed && !changed) |
3846 | { |
3847 | edge_iterator ei; |
3848 | edge e; |
3849 | FOR_EACH_EDGE (e, ei, block->succs) |
3850 | if (e->dest->index != EXIT_BLOCK |
3851 | && bb_rpo[e->dest->index] < idx) |
3852 | changed = true; |
3853 | } |
3854 | |
3855 | /* Insert expressions for partial redundancies. */ |
3856 | if (flag_tree_pre && !single_pred_p (bb: block)) |
3857 | { |
3858 | vec<pre_expr> exprs |
3859 | = sorted_array_from_bitmap_set (ANTIC_IN (block)); |
3860 | /* Sorting is not perfect, iterate locally. */ |
3861 | while (do_pre_regular_insertion (block, dom, exprs)) |
3862 | ; |
3863 | exprs.release (); |
3864 | if (do_partial_partial) |
3865 | { |
3866 | exprs = sorted_array_from_bitmap_set (PA_IN (block)); |
3867 | while (do_pre_partial_partial_insertion (block, dom, |
3868 | exprs)) |
3869 | ; |
3870 | exprs.release (); |
3871 | } |
3872 | } |
3873 | } |
3874 | } |
3875 | |
3876 | /* Clear the NEW sets before the next iteration. We have already |
3877 | fully propagated its contents. */ |
3878 | if (changed) |
3879 | FOR_ALL_BB_FN (bb, cfun) |
3880 | bitmap_set_free (NEW_SETS (bb)); |
3881 | } |
3882 | while (changed); |
3883 | |
3884 | statistics_histogram_event (cfun, "insert iterations" , num_iterations); |
3885 | |
3886 | /* AVAIL_OUT is not needed after insertion so we don't have to |
3887 | propagate NEW_SETS from hoist insertion. */ |
3888 | FOR_ALL_BB_FN (bb, cfun) |
3889 | { |
3890 | bitmap_set_free (NEW_SETS (bb)); |
3891 | bitmap_set_pool.remove (NEW_SETS (bb)); |
3892 | NEW_SETS (bb) = NULL; |
3893 | } |
3894 | |
3895 | /* Insert expressions for hoisting. Do a backward walk here since |
3896 | inserting into BLOCK exposes new opportunities in its predecessors. |
3897 | Since PRE and hoist insertions can cause back-to-back iteration |
3898 | and we are interested in PRE insertion exposed hoisting opportunities |
3899 | but not in hoisting exposed PRE ones do hoist insertion only after |
3900 | PRE insertion iteration finished and do not iterate it. */ |
3901 | if (flag_code_hoisting) |
3902 | for (int idx = rpo_num - 1; idx >= 0; --idx) |
3903 | { |
3904 | basic_block block = BASIC_BLOCK_FOR_FN (cfun, rpo[idx]); |
3905 | if (EDGE_COUNT (block->succs) >= 2) |
3906 | changed |= do_hoist_insertion (block); |
3907 | } |
3908 | |
3909 | free (ptr: rpo); |
3910 | free (ptr: bb_rpo); |
3911 | } |
3912 | |
3913 | |
3914 | /* Compute the AVAIL set for all basic blocks. |
3915 | |
3916 | This function performs value numbering of the statements in each basic |
3917 | block. The AVAIL sets are built from information we glean while doing |
3918 | this value numbering, since the AVAIL sets contain only one entry per |
3919 | value. |
3920 | |
3921 | AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)]. |
3922 | AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */ |
3923 | |
3924 | static void |
3925 | compute_avail (function *fun) |
3926 | { |
3927 | |
3928 | basic_block block, son; |
3929 | basic_block *worklist; |
3930 | size_t sp = 0; |
3931 | unsigned i; |
3932 | tree name; |
3933 | |
3934 | /* We pretend that default definitions are defined in the entry block. |
3935 | This includes function arguments and the static chain decl. */ |
3936 | FOR_EACH_SSA_NAME (i, name, fun) |
3937 | { |
3938 | pre_expr e; |
3939 | if (!SSA_NAME_IS_DEFAULT_DEF (name) |
3940 | || has_zero_uses (var: name) |
3941 | || virtual_operand_p (op: name)) |
3942 | continue; |
3943 | |
3944 | e = get_or_alloc_expr_for_name (name); |
3945 | add_to_value (v: get_expr_value_id (expr: e), e); |
3946 | bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (fun)), expr: e); |
3947 | bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (fun)), |
3948 | expr: e); |
3949 | } |
3950 | |
3951 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3952 | { |
3953 | print_bitmap_set (outfile: dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (fun)), |
3954 | setname: "tmp_gen" , ENTRY_BLOCK); |
3955 | print_bitmap_set (outfile: dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (fun)), |
3956 | setname: "avail_out" , ENTRY_BLOCK); |
3957 | } |
3958 | |
3959 | /* Allocate the worklist. */ |
3960 | worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (fun)); |
3961 | |
3962 | /* Seed the algorithm by putting the dominator children of the entry |
3963 | block on the worklist. */ |
3964 | for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (fun)); |
3965 | son; |
3966 | son = next_dom_son (CDI_DOMINATORS, son)) |
3967 | worklist[sp++] = son; |
3968 | |
3969 | BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (fun)) |
3970 | = ssa_default_def (fun, gimple_vop (fun)); |
3971 | |
3972 | /* Loop until the worklist is empty. */ |
3973 | while (sp) |
3974 | { |
3975 | gimple *stmt; |
3976 | basic_block dom; |
3977 | |
3978 | /* Pick a block from the worklist. */ |
3979 | block = worklist[--sp]; |
3980 | vn_context_bb = block; |
3981 | |
3982 | /* Initially, the set of available values in BLOCK is that of |
3983 | its immediate dominator. */ |
3984 | dom = get_immediate_dominator (CDI_DOMINATORS, block); |
3985 | if (dom) |
3986 | { |
3987 | bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom)); |
3988 | BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom); |
3989 | } |
3990 | |
3991 | /* Generate values for PHI nodes. */ |
3992 | for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (i: gsi); |
3993 | gsi_next (i: &gsi)) |
3994 | { |
3995 | tree result = gimple_phi_result (gs: gsi.phi ()); |
3996 | |
3997 | /* We have no need for virtual phis, as they don't represent |
3998 | actual computations. */ |
3999 | if (virtual_operand_p (op: result)) |
4000 | { |
4001 | BB_LIVE_VOP_ON_EXIT (block) = result; |
4002 | continue; |
4003 | } |
4004 | |
4005 | pre_expr e = get_or_alloc_expr_for_name (name: result); |
4006 | add_to_value (v: get_expr_value_id (expr: e), e); |
4007 | bitmap_value_insert_into_set (AVAIL_OUT (block), expr: e); |
4008 | bitmap_insert_into_set (PHI_GEN (block), expr: e); |
4009 | } |
4010 | |
4011 | BB_MAY_NOTRETURN (block) = 0; |
4012 | |
4013 | /* Now compute value numbers and populate value sets with all |
4014 | the expressions computed in BLOCK. */ |
4015 | bool set_bb_may_notreturn = false; |
4016 | for (gimple_stmt_iterator gsi = gsi_start_bb (bb: block); !gsi_end_p (i: gsi); |
4017 | gsi_next (i: &gsi)) |
4018 | { |
4019 | ssa_op_iter iter; |
4020 | tree op; |
4021 | |
4022 | stmt = gsi_stmt (i: gsi); |
4023 | |
4024 | if (set_bb_may_notreturn) |
4025 | { |
4026 | BB_MAY_NOTRETURN (block) = 1; |
4027 | set_bb_may_notreturn = false; |
4028 | } |
4029 | |
4030 | /* Cache whether the basic-block has any non-visible side-effect |
4031 | or control flow. |
4032 | If this isn't a call or it is the last stmt in the |
4033 | basic-block then the CFG represents things correctly. */ |
4034 | if (is_gimple_call (gs: stmt) && !stmt_ends_bb_p (stmt)) |
4035 | { |
4036 | /* Non-looping const functions always return normally. |
4037 | Otherwise the call might not return or have side-effects |
4038 | that forbids hoisting possibly trapping expressions |
4039 | before it. */ |
4040 | int flags = gimple_call_flags (stmt); |
4041 | if (!(flags & (ECF_CONST|ECF_PURE)) |
4042 | || (flags & ECF_LOOPING_CONST_OR_PURE) |
4043 | || stmt_can_throw_external (fun, stmt)) |
4044 | /* Defer setting of BB_MAY_NOTRETURN to avoid it |
4045 | influencing the processing of the call itself. */ |
4046 | set_bb_may_notreturn = true; |
4047 | } |
4048 | |
4049 | FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF) |
4050 | { |
4051 | pre_expr e = get_or_alloc_expr_for_name (name: op); |
4052 | add_to_value (v: get_expr_value_id (expr: e), e); |
4053 | bitmap_insert_into_set (TMP_GEN (block), expr: e); |
4054 | bitmap_value_insert_into_set (AVAIL_OUT (block), expr: e); |
4055 | } |
4056 | |
4057 | if (gimple_vdef (g: stmt)) |
4058 | BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (g: stmt); |
4059 | |
4060 | if (gimple_has_side_effects (stmt) |
4061 | || stmt_could_throw_p (fun, stmt) |
4062 | || is_gimple_debug (gs: stmt)) |
4063 | continue; |
4064 | |
4065 | FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) |
4066 | { |
4067 | if (ssa_undefined_value_p (op)) |
4068 | continue; |
4069 | pre_expr e = get_or_alloc_expr_for_name (name: op); |
4070 | bitmap_value_insert_into_set (EXP_GEN (block), expr: e); |
4071 | } |
4072 | |
4073 | switch (gimple_code (g: stmt)) |
4074 | { |
4075 | case GIMPLE_RETURN: |
4076 | continue; |
4077 | |
4078 | case GIMPLE_CALL: |
4079 | { |
4080 | vn_reference_t ref; |
4081 | vn_reference_s ref1; |
4082 | pre_expr result = NULL; |
4083 | |
4084 | vn_reference_lookup_call (as_a <gcall *> (p: stmt), &ref, &ref1); |
4085 | /* There is no point to PRE a call without a value. */ |
4086 | if (!ref || !ref->result) |
4087 | continue; |
4088 | |
4089 | /* If the value of the call is not invalidated in |
4090 | this block until it is computed, add the expression |
4091 | to EXP_GEN. */ |
4092 | if ((!gimple_vuse (g: stmt) |
4093 | || gimple_code |
4094 | (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI |
4095 | || gimple_bb (SSA_NAME_DEF_STMT |
4096 | (gimple_vuse (stmt))) != block) |
4097 | /* If the REFERENCE traps and there was a preceding |
4098 | point in the block that might not return avoid |
4099 | adding the reference to EXP_GEN. */ |
4100 | && (!BB_MAY_NOTRETURN (block) |
4101 | || !vn_reference_may_trap (ref))) |
4102 | { |
4103 | result = get_or_alloc_expr_for_reference |
4104 | (reference: ref, loc: gimple_location (g: stmt)); |
4105 | add_to_value (v: get_expr_value_id (expr: result), e: result); |
4106 | bitmap_value_insert_into_set (EXP_GEN (block), expr: result); |
4107 | } |
4108 | continue; |
4109 | } |
4110 | |
4111 | case GIMPLE_ASSIGN: |
4112 | { |
4113 | pre_expr result = NULL; |
4114 | switch (vn_get_stmt_kind (stmt)) |
4115 | { |
4116 | case VN_NARY: |
4117 | { |
4118 | enum tree_code code = gimple_assign_rhs_code (gs: stmt); |
4119 | vn_nary_op_t nary; |
4120 | |
4121 | /* COND_EXPR is awkward in that it contains an |
4122 | embedded complex expression. |
4123 | Don't even try to shove it through PRE. */ |
4124 | if (code == COND_EXPR) |
4125 | continue; |
4126 | |
4127 | vn_nary_op_lookup_stmt (stmt, &nary); |
4128 | if (!nary || nary->predicated_values) |
4129 | continue; |
4130 | |
4131 | unsigned value_id = nary->value_id; |
4132 | if (value_id_constant_p (v: value_id)) |
4133 | continue; |
4134 | |
4135 | /* Record the un-valueized expression for EXP_GEN. */ |
4136 | nary = XALLOCAVAR (struct vn_nary_op_s, |
4137 | sizeof_vn_nary_op |
4138 | (vn_nary_length_from_stmt (stmt))); |
4139 | init_vn_nary_op_from_stmt (nary, as_a <gassign *> (p: stmt)); |
4140 | |
4141 | /* If the NARY traps and there was a preceding |
4142 | point in the block that might not return avoid |
4143 | adding the nary to EXP_GEN. */ |
4144 | if (BB_MAY_NOTRETURN (block) |
4145 | && vn_nary_may_trap (nary)) |
4146 | continue; |
4147 | |
4148 | result = get_or_alloc_expr_for_nary |
4149 | (nary, value_id, loc: gimple_location (g: stmt)); |
4150 | break; |
4151 | } |
4152 | |
4153 | case VN_REFERENCE: |
4154 | { |
4155 | tree rhs1 = gimple_assign_rhs1 (gs: stmt); |
4156 | ao_ref rhs1_ref; |
4157 | ao_ref_init (&rhs1_ref, rhs1); |
4158 | alias_set_type set = ao_ref_alias_set (&rhs1_ref); |
4159 | alias_set_type base_set |
4160 | = ao_ref_base_alias_set (&rhs1_ref); |
4161 | vec<vn_reference_op_s> operands |
4162 | = vn_reference_operands_for_lookup (rhs1); |
4163 | vn_reference_t ref; |
4164 | vn_reference_lookup_pieces (gimple_vuse (g: stmt), set, |
4165 | base_set, TREE_TYPE (rhs1), |
4166 | operands, &ref, VN_WALK); |
4167 | if (!ref) |
4168 | { |
4169 | operands.release (); |
4170 | continue; |
4171 | } |
4172 | |
4173 | /* If the REFERENCE traps and there was a preceding |
4174 | point in the block that might not return avoid |
4175 | adding the reference to EXP_GEN. */ |
4176 | if (BB_MAY_NOTRETURN (block) |
4177 | && vn_reference_may_trap (ref)) |
4178 | { |
4179 | operands.release (); |
4180 | continue; |
4181 | } |
4182 | |
4183 | /* If the value of the reference is not invalidated in |
4184 | this block until it is computed, add the expression |
4185 | to EXP_GEN. */ |
4186 | if (gimple_vuse (g: stmt)) |
4187 | { |
4188 | gimple *def_stmt; |
4189 | bool ok = true; |
4190 | def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt)); |
4191 | while (!gimple_nop_p (g: def_stmt) |
4192 | && gimple_code (g: def_stmt) != GIMPLE_PHI |
4193 | && gimple_bb (g: def_stmt) == block) |
4194 | { |
4195 | if (stmt_may_clobber_ref_p |
4196 | (def_stmt, gimple_assign_rhs1 (gs: stmt))) |
4197 | { |
4198 | ok = false; |
4199 | break; |
4200 | } |
4201 | def_stmt |
4202 | = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt)); |
4203 | } |
4204 | if (!ok) |
4205 | { |
4206 | operands.release (); |
4207 | continue; |
4208 | } |
4209 | } |
4210 | |
4211 | /* If the load was value-numbered to another |
4212 | load make sure we do not use its expression |
4213 | for insertion if it wouldn't be a valid |
4214 | replacement. */ |
4215 | /* At the momemt we have a testcase |
4216 | for hoist insertion of aligned vs. misaligned |
4217 | variants in gcc.dg/torture/pr65270-1.c thus |
4218 | with just alignment to be considered we can |
4219 | simply replace the expression in the hashtable |
4220 | with the most conservative one. */ |
4221 | vn_reference_op_t ref1 = &ref->operands.last (); |
4222 | while (ref1->opcode != TARGET_MEM_REF |
4223 | && ref1->opcode != MEM_REF |
4224 | && ref1 != &ref->operands[0]) |
4225 | --ref1; |
4226 | vn_reference_op_t ref2 = &operands.last (); |
4227 | while (ref2->opcode != TARGET_MEM_REF |
4228 | && ref2->opcode != MEM_REF |
4229 | && ref2 != &operands[0]) |
4230 | --ref2; |
4231 | if ((ref1->opcode == TARGET_MEM_REF |
4232 | || ref1->opcode == MEM_REF) |
4233 | && (TYPE_ALIGN (ref1->type) |
4234 | > TYPE_ALIGN (ref2->type))) |
4235 | ref1->type |
4236 | = build_aligned_type (ref1->type, |
4237 | TYPE_ALIGN (ref2->type)); |
4238 | /* TBAA behavior is an obvious part so make sure |
4239 | that the hashtable one covers this as well |
4240 | by adjusting the ref alias set and its base. */ |
4241 | if ((ref->set == set |
4242 | || alias_set_subset_of (set, ref->set)) |
4243 | && (ref->base_set == base_set |
4244 | || alias_set_subset_of (base_set, ref->base_set))) |
4245 | ; |
4246 | else if (ref1->opcode != ref2->opcode |
4247 | || (ref1->opcode != MEM_REF |
4248 | && ref1->opcode != TARGET_MEM_REF)) |
4249 | { |
4250 | /* With mismatching base opcodes or bases |
4251 | other than MEM_REF or TARGET_MEM_REF we |
4252 | can't do any easy TBAA adjustment. */ |
4253 | operands.release (); |
4254 | continue; |
4255 | } |
4256 | else if (ref->set == set |
4257 | || alias_set_subset_of (ref->set, set)) |
4258 | { |
4259 | tree reft = reference_alias_ptr_type (rhs1); |
4260 | ref->set = set; |
4261 | ref->base_set = set; |
4262 | if (ref1->opcode == MEM_REF) |
4263 | ref1->op0 |
4264 | = wide_int_to_tree (type: reft, |
4265 | cst: wi::to_wide (t: ref1->op0)); |
4266 | else |
4267 | ref1->op2 |
4268 | = wide_int_to_tree (type: reft, |
4269 | cst: wi::to_wide (t: ref1->op2)); |
4270 | } |
4271 | else |
4272 | { |
4273 | ref->set = 0; |
4274 | ref->base_set = 0; |
4275 | if (ref1->opcode == MEM_REF) |
4276 | ref1->op0 |
4277 | = wide_int_to_tree (ptr_type_node, |
4278 | cst: wi::to_wide (t: ref1->op0)); |
4279 | else |
4280 | ref1->op2 |
4281 | = wide_int_to_tree (ptr_type_node, |
4282 | cst: wi::to_wide (t: ref1->op2)); |
4283 | } |
4284 | operands.release (); |
4285 | |
4286 | result = get_or_alloc_expr_for_reference |
4287 | (reference: ref, loc: gimple_location (g: stmt)); |
4288 | break; |
4289 | } |
4290 | |
4291 | default: |
4292 | continue; |
4293 | } |
4294 | |
4295 | add_to_value (v: get_expr_value_id (expr: result), e: result); |
4296 | bitmap_value_insert_into_set (EXP_GEN (block), expr: result); |
4297 | continue; |
4298 | } |
4299 | default: |
4300 | break; |
4301 | } |
4302 | } |
4303 | if (set_bb_may_notreturn) |
4304 | { |
4305 | BB_MAY_NOTRETURN (block) = 1; |
4306 | set_bb_may_notreturn = false; |
4307 | } |
4308 | |
4309 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4310 | { |
4311 | print_bitmap_set (outfile: dump_file, EXP_GEN (block), |
4312 | setname: "exp_gen" , blockindex: block->index); |
4313 | print_bitmap_set (outfile: dump_file, PHI_GEN (block), |
4314 | setname: "phi_gen" , blockindex: block->index); |
4315 | print_bitmap_set (outfile: dump_file, TMP_GEN (block), |
4316 | setname: "tmp_gen" , blockindex: block->index); |
4317 | print_bitmap_set (outfile: dump_file, AVAIL_OUT (block), |
4318 | setname: "avail_out" , blockindex: block->index); |
4319 | } |
4320 | |
4321 | /* Put the dominator children of BLOCK on the worklist of blocks |
4322 | to compute available sets for. */ |
4323 | for (son = first_dom_son (CDI_DOMINATORS, block); |
4324 | son; |
4325 | son = next_dom_son (CDI_DOMINATORS, son)) |
4326 | worklist[sp++] = son; |
4327 | } |
4328 | vn_context_bb = NULL; |
4329 | |
4330 | free (ptr: worklist); |
4331 | } |
4332 | |
4333 | |
4334 | /* Initialize data structures used by PRE. */ |
4335 | |
4336 | static void |
4337 | init_pre (void) |
4338 | { |
4339 | basic_block bb; |
4340 | |
4341 | next_expression_id = 1; |
4342 | expressions.create (nelems: 0); |
4343 | expressions.safe_push (NULL); |
4344 | value_expressions.create (nelems: get_max_value_id () + 1); |
4345 | value_expressions.quick_grow_cleared (len: get_max_value_id () + 1); |
4346 | constant_value_expressions.create (nelems: get_max_constant_value_id () + 1); |
4347 | constant_value_expressions.quick_grow_cleared (len: get_max_constant_value_id () + 1); |
4348 | name_to_id.create (nelems: 0); |
4349 | gcc_obstack_init (&pre_expr_obstack); |
4350 | |
4351 | inserted_exprs = BITMAP_ALLOC (NULL); |
4352 | |
4353 | connect_infinite_loops_to_exit (); |
4354 | memset (s: &pre_stats, c: 0, n: sizeof (pre_stats)); |
4355 | |
4356 | alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets)); |
4357 | |
4358 | calculate_dominance_info (CDI_DOMINATORS); |
4359 | |
4360 | bitmap_obstack_initialize (&grand_bitmap_obstack); |
4361 | expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3); |
4362 | FOR_ALL_BB_FN (bb, cfun) |
4363 | { |
4364 | EXP_GEN (bb) = bitmap_set_new (); |
4365 | PHI_GEN (bb) = bitmap_set_new (); |
4366 | TMP_GEN (bb) = bitmap_set_new (); |
4367 | AVAIL_OUT (bb) = bitmap_set_new (); |
4368 | PHI_TRANS_TABLE (bb) = NULL; |
4369 | } |
4370 | } |
4371 | |
4372 | |
4373 | /* Deallocate data structures used by PRE. */ |
4374 | |
4375 | static void |
4376 | fini_pre () |
4377 | { |
4378 | value_expressions.release (); |
4379 | constant_value_expressions.release (); |
4380 | expressions.release (); |
4381 | bitmap_obstack_release (&grand_bitmap_obstack); |
4382 | bitmap_set_pool.release (); |
4383 | pre_expr_pool.release (); |
4384 | delete expression_to_id; |
4385 | expression_to_id = NULL; |
4386 | name_to_id.release (); |
4387 | obstack_free (&pre_expr_obstack, NULL); |
4388 | |
4389 | basic_block bb; |
4390 | FOR_ALL_BB_FN (bb, cfun) |
4391 | if (bb->aux && PHI_TRANS_TABLE (bb)) |
4392 | delete PHI_TRANS_TABLE (bb); |
4393 | free_aux_for_blocks (); |
4394 | } |
4395 | |
4396 | namespace { |
4397 | |
4398 | const pass_data pass_data_pre = |
4399 | { |
4400 | .type: GIMPLE_PASS, /* type */ |
4401 | .name: "pre" , /* name */ |
4402 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
4403 | .tv_id: TV_TREE_PRE, /* tv_id */ |
4404 | .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */ |
4405 | .properties_provided: 0, /* properties_provided */ |
4406 | .properties_destroyed: 0, /* properties_destroyed */ |
4407 | TODO_rebuild_alias, /* todo_flags_start */ |
4408 | .todo_flags_finish: 0, /* todo_flags_finish */ |
4409 | }; |
4410 | |
4411 | class pass_pre : public gimple_opt_pass |
4412 | { |
4413 | public: |
4414 | pass_pre (gcc::context *ctxt) |
4415 | : gimple_opt_pass (pass_data_pre, ctxt) |
4416 | {} |
4417 | |
4418 | /* opt_pass methods: */ |
4419 | bool gate (function *) final override |
4420 | { return flag_tree_pre != 0 || flag_code_hoisting != 0; } |
4421 | unsigned int execute (function *) final override; |
4422 | |
4423 | }; // class pass_pre |
4424 | |
4425 | /* Valueization hook for RPO VN when we are calling back to it |
4426 | at ANTIC compute time. */ |
4427 | |
4428 | static tree |
4429 | pre_valueize (tree name) |
4430 | { |
4431 | if (TREE_CODE (name) == SSA_NAME) |
4432 | { |
4433 | tree tem = VN_INFO (name)->valnum; |
4434 | if (tem != VN_TOP && tem != name) |
4435 | { |
4436 | if (TREE_CODE (tem) != SSA_NAME |
4437 | || SSA_NAME_IS_DEFAULT_DEF (tem)) |
4438 | return tem; |
4439 | /* We create temporary SSA names for representatives that |
4440 | do not have a definition (yet) but are not default defs either |
4441 | assume they are fine to use. */ |
4442 | basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (tem)); |
4443 | if (! def_bb |
4444 | || dominated_by_p (CDI_DOMINATORS, vn_context_bb, def_bb)) |
4445 | return tem; |
4446 | /* ??? Now we could look for a leader. Ideally we'd somehow |
4447 | expose RPO VN leaders and get rid of AVAIL_OUT as well... */ |
4448 | } |
4449 | } |
4450 | return name; |
4451 | } |
4452 | |
4453 | unsigned int |
4454 | pass_pre::execute (function *fun) |
4455 | { |
4456 | unsigned int todo = 0; |
4457 | |
4458 | do_partial_partial = |
4459 | flag_tree_partial_pre && optimize_function_for_speed_p (fun); |
4460 | |
4461 | /* This has to happen before VN runs because |
4462 | loop_optimizer_init may create new phis, etc. */ |
4463 | loop_optimizer_init (LOOPS_NORMAL); |
4464 | split_edges_for_insertion (); |
4465 | scev_initialize (); |
4466 | calculate_dominance_info (CDI_DOMINATORS); |
4467 | |
4468 | run_rpo_vn (VN_WALK); |
4469 | |
4470 | init_pre (); |
4471 | |
4472 | vn_valueize = pre_valueize; |
4473 | |
4474 | /* Insert can get quite slow on an incredibly large number of basic |
4475 | blocks due to some quadratic behavior. Until this behavior is |
4476 | fixed, don't run it when he have an incredibly large number of |
4477 | bb's. If we aren't going to run insert, there is no point in |
4478 | computing ANTIC, either, even though it's plenty fast nor do |
4479 | we require AVAIL. */ |
4480 | if (n_basic_blocks_for_fn (fun) < 4000) |
4481 | { |
4482 | compute_avail (fun); |
4483 | compute_antic (); |
4484 | insert (); |
4485 | } |
4486 | |
4487 | /* Make sure to remove fake edges before committing our inserts. |
4488 | This makes sure we don't end up with extra critical edges that |
4489 | we would need to split. */ |
4490 | remove_fake_exit_edges (); |
4491 | gsi_commit_edge_inserts (); |
4492 | |
4493 | /* Eliminate folds statements which might (should not...) end up |
4494 | not keeping virtual operands up-to-date. */ |
4495 | gcc_assert (!need_ssa_update_p (fun)); |
4496 | |
4497 | statistics_counter_event (fun, "Insertions" , pre_stats.insertions); |
4498 | statistics_counter_event (fun, "PA inserted" , pre_stats.pa_insert); |
4499 | statistics_counter_event (fun, "HOIST inserted" , pre_stats.hoist_insert); |
4500 | statistics_counter_event (fun, "New PHIs" , pre_stats.phis); |
4501 | |
4502 | todo |= eliminate_with_rpo_vn (inserted_exprs); |
4503 | |
4504 | vn_valueize = NULL; |
4505 | |
4506 | fini_pre (); |
4507 | |
4508 | scev_finalize (); |
4509 | loop_optimizer_finalize (); |
4510 | |
4511 | /* Perform a CFG cleanup before we run simple_dce_from_worklist since |
4512 | unreachable code regions will have not up-to-date SSA form which |
4513 | confuses it. */ |
4514 | bool need_crit_edge_split = false; |
4515 | if (todo & TODO_cleanup_cfg) |
4516 | { |
4517 | cleanup_tree_cfg (); |
4518 | need_crit_edge_split = true; |
4519 | } |
4520 | |
4521 | /* Because we don't follow exactly the standard PRE algorithm, and decide not |
4522 | to insert PHI nodes sometimes, and because value numbering of casts isn't |
4523 | perfect, we sometimes end up inserting dead code. This simple DCE-like |
4524 | pass removes any insertions we made that weren't actually used. */ |
4525 | simple_dce_from_worklist (inserted_exprs); |
4526 | BITMAP_FREE (inserted_exprs); |
4527 | |
4528 | /* TODO: tail_merge_optimize may merge all predecessors of a block, in which |
4529 | case we can merge the block with the remaining predecessor of the block. |
4530 | It should either: |
4531 | - call merge_blocks after each tail merge iteration |
4532 | - call merge_blocks after all tail merge iterations |
4533 | - mark TODO_cleanup_cfg when necessary. */ |
4534 | todo |= tail_merge_optimize (need_crit_edge_split); |
4535 | |
4536 | free_rpo_vn (); |
4537 | |
4538 | /* Tail merging invalidates the virtual SSA web, together with |
4539 | cfg-cleanup opportunities exposed by PRE this will wreck the |
4540 | SSA updating machinery. So make sure to run update-ssa |
4541 | manually, before eventually scheduling cfg-cleanup as part of |
4542 | the todo. */ |
4543 | update_ssa (TODO_update_ssa_only_virtuals); |
4544 | |
4545 | return todo; |
4546 | } |
4547 | |
4548 | } // anon namespace |
4549 | |
4550 | gimple_opt_pass * |
4551 | make_pass_pre (gcc::context *ctxt) |
4552 | { |
4553 | return new pass_pre (ctxt); |
4554 | } |
4555 | |