1 | /* Basic block path solver. |
2 | Copyright (C) 2021-2023 Free Software Foundation, Inc. |
3 | Contributed by Aldy Hernandez <aldyh@redhat.com>. |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free |
9 | Software Foundation; either version 3, or (at your option) any later |
10 | version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
15 | for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "backend.h" |
25 | #include "tree.h" |
26 | #include "gimple.h" |
27 | #include "cfganal.h" |
28 | #include "value-range.h" |
29 | #include "gimple-range.h" |
30 | #include "tree-pretty-print.h" |
31 | #include "gimple-range-path.h" |
32 | #include "ssa.h" |
33 | #include "tree-cfg.h" |
34 | #include "gimple-iterator.h" |
35 | |
36 | // Internal construct to help facilitate debugging of solver. |
37 | #define DEBUG_SOLVER (dump_file && (param_threader_debug == THREADER_DEBUG_ALL)) |
38 | |
39 | path_range_query::path_range_query (gimple_ranger &ranger, |
40 | const vec<basic_block> &path, |
41 | const bitmap_head *dependencies, |
42 | bool resolve) |
43 | : m_cache (), |
44 | m_ranger (ranger), |
45 | m_resolve (resolve) |
46 | { |
47 | m_oracle = new path_oracle (m_ranger.oracle ()); |
48 | |
49 | reset_path (path, dependencies); |
50 | } |
51 | |
52 | path_range_query::path_range_query (gimple_ranger &ranger, bool resolve) |
53 | : m_cache (), |
54 | m_ranger (ranger), |
55 | m_resolve (resolve) |
56 | { |
57 | m_oracle = new path_oracle (m_ranger.oracle ()); |
58 | } |
59 | |
60 | path_range_query::~path_range_query () |
61 | { |
62 | delete m_oracle; |
63 | } |
64 | |
65 | // Return TRUE if NAME is an exit dependency for the path. |
66 | |
67 | bool |
68 | path_range_query::exit_dependency_p (tree name) |
69 | { |
70 | return (TREE_CODE (name) == SSA_NAME |
71 | && bitmap_bit_p (m_exit_dependencies, SSA_NAME_VERSION (name))); |
72 | } |
73 | |
74 | // If NAME has a cache entry, return it in R, and return TRUE. |
75 | |
76 | inline bool |
77 | path_range_query::get_cache (vrange &r, tree name) |
78 | { |
79 | if (!gimple_range_ssa_p (exp: name)) |
80 | return get_global_range_query ()->range_of_expr (r, expr: name); |
81 | |
82 | return m_cache.get_range (r, name); |
83 | } |
84 | |
85 | void |
86 | path_range_query::dump (FILE *dump_file) |
87 | { |
88 | push_dump_file save (dump_file, dump_flags & ~TDF_DETAILS); |
89 | |
90 | if (m_path.is_empty ()) |
91 | return; |
92 | |
93 | unsigned i; |
94 | bitmap_iterator bi; |
95 | |
96 | dump_ranger (dump_file, path: m_path); |
97 | |
98 | fprintf (stream: dump_file, format: "Exit dependencies:\n" ); |
99 | EXECUTE_IF_SET_IN_BITMAP (m_exit_dependencies, 0, i, bi) |
100 | { |
101 | tree name = ssa_name (i); |
102 | print_generic_expr (dump_file, name, TDF_SLIM); |
103 | fprintf (stream: dump_file, format: "\n" ); |
104 | } |
105 | |
106 | m_cache.dump (f: dump_file); |
107 | } |
108 | |
109 | void |
110 | path_range_query::debug () |
111 | { |
112 | dump (stderr); |
113 | } |
114 | |
115 | // Return TRUE if NAME is defined outside the current path. |
116 | |
117 | bool |
118 | path_range_query::defined_outside_path (tree name) |
119 | { |
120 | gimple *def = SSA_NAME_DEF_STMT (name); |
121 | basic_block bb = gimple_bb (g: def); |
122 | |
123 | return !bb || !m_path.contains (search: bb); |
124 | } |
125 | |
126 | // Return the range of NAME on entry to the path. |
127 | |
128 | void |
129 | path_range_query::range_on_path_entry (vrange &r, tree name) |
130 | { |
131 | gcc_checking_assert (defined_outside_path (name)); |
132 | basic_block entry = entry_bb (); |
133 | m_ranger.range_on_entry (r, bb: entry, name); |
134 | } |
135 | |
136 | // Return the range of NAME at the end of the path being analyzed. |
137 | |
138 | bool |
139 | path_range_query::internal_range_of_expr (vrange &r, tree name, gimple *stmt) |
140 | { |
141 | if (!r.supports_type_p (TREE_TYPE (name))) |
142 | return false; |
143 | |
144 | if (get_cache (r, name)) |
145 | return true; |
146 | |
147 | if (m_resolve && defined_outside_path (name)) |
148 | { |
149 | range_on_path_entry (r, name); |
150 | m_cache.set_range (name, r); |
151 | return true; |
152 | } |
153 | |
154 | if (stmt |
155 | && range_defined_in_block (r, name, bb: gimple_bb (g: stmt))) |
156 | { |
157 | if (TREE_CODE (name) == SSA_NAME) |
158 | { |
159 | Value_Range glob (TREE_TYPE (name)); |
160 | gimple_range_global (v&: glob, name); |
161 | r.intersect (glob); |
162 | } |
163 | |
164 | m_cache.set_range (name, r); |
165 | return true; |
166 | } |
167 | |
168 | gimple_range_global (v&: r, name); |
169 | return true; |
170 | } |
171 | |
172 | bool |
173 | path_range_query::range_of_expr (vrange &r, tree name, gimple *stmt) |
174 | { |
175 | if (internal_range_of_expr (r, name, stmt)) |
176 | { |
177 | if (r.undefined_p ()) |
178 | m_undefined_path = true; |
179 | |
180 | return true; |
181 | } |
182 | return false; |
183 | } |
184 | |
185 | bool |
186 | path_range_query::unreachable_path_p () |
187 | { |
188 | return m_undefined_path; |
189 | } |
190 | |
191 | // Reset the current path to PATH. |
192 | |
193 | void |
194 | path_range_query::reset_path (const vec<basic_block> &path, |
195 | const bitmap_head *dependencies) |
196 | { |
197 | gcc_checking_assert (path.length () > 1); |
198 | m_path = path.copy (); |
199 | m_pos = m_path.length () - 1; |
200 | m_undefined_path = false; |
201 | m_cache.clear (); |
202 | |
203 | compute_ranges (dependencies); |
204 | } |
205 | |
206 | bool |
207 | path_range_query::ssa_defined_in_bb (tree name, basic_block bb) |
208 | { |
209 | return (TREE_CODE (name) == SSA_NAME |
210 | && SSA_NAME_DEF_STMT (name) |
211 | && gimple_bb (SSA_NAME_DEF_STMT (name)) == bb); |
212 | } |
213 | |
214 | // Return the range of the result of PHI in R. |
215 | // |
216 | // Since PHIs are calculated in parallel at the beginning of the |
217 | // block, we must be careful to never save anything to the cache here. |
218 | // It is the caller's responsibility to adjust the cache. Also, |
219 | // calculating the PHI's range must not trigger additional lookups. |
220 | |
221 | void |
222 | path_range_query::ssa_range_in_phi (vrange &r, gphi *phi) |
223 | { |
224 | tree name = gimple_phi_result (gs: phi); |
225 | |
226 | if (at_entry ()) |
227 | { |
228 | if (m_resolve && m_ranger.range_of_expr (r, name, phi)) |
229 | return; |
230 | |
231 | // Try to fold the phi exclusively with global values. |
232 | // This will get things like PHI <5(99), 6(88)>. We do this by |
233 | // calling range_of_expr with no context. |
234 | unsigned nargs = gimple_phi_num_args (gs: phi); |
235 | Value_Range arg_range (TREE_TYPE (name)); |
236 | r.set_undefined (); |
237 | for (size_t i = 0; i < nargs; ++i) |
238 | { |
239 | tree arg = gimple_phi_arg_def (gs: phi, index: i); |
240 | if (m_ranger.range_of_expr (r&: arg_range, name: arg, /*stmt=*/NULL)) |
241 | r.union_ (arg_range); |
242 | else |
243 | { |
244 | r.set_varying (TREE_TYPE (name)); |
245 | return; |
246 | } |
247 | } |
248 | return; |
249 | } |
250 | |
251 | basic_block bb = gimple_bb (g: phi); |
252 | basic_block prev = prev_bb (); |
253 | edge e_in = find_edge (prev, bb); |
254 | tree arg = PHI_ARG_DEF_FROM_EDGE (phi, e_in); |
255 | // Avoid using the cache for ARGs defined in this block, as |
256 | // that could create an ordering problem. |
257 | if (ssa_defined_in_bb (name: arg, bb) || !get_cache (r, name: arg)) |
258 | { |
259 | if (m_resolve) |
260 | { |
261 | Value_Range tmp (TREE_TYPE (name)); |
262 | // Using both the range on entry to the path, and the |
263 | // range on this edge yields significantly better |
264 | // results. |
265 | if (TREE_CODE (arg) == SSA_NAME |
266 | && defined_outside_path (name: arg)) |
267 | range_on_path_entry (r, name: arg); |
268 | else |
269 | r.set_varying (TREE_TYPE (name)); |
270 | m_ranger.range_on_edge (r&: tmp, e: e_in, name: arg); |
271 | r.intersect (tmp); |
272 | return; |
273 | } |
274 | r.set_varying (TREE_TYPE (name)); |
275 | } |
276 | } |
277 | |
278 | // If NAME is defined in BB, set R to the range of NAME, and return |
279 | // TRUE. Otherwise, return FALSE. |
280 | |
281 | bool |
282 | path_range_query::range_defined_in_block (vrange &r, tree name, basic_block bb) |
283 | { |
284 | gimple *def_stmt = SSA_NAME_DEF_STMT (name); |
285 | basic_block def_bb = gimple_bb (g: def_stmt); |
286 | |
287 | if (def_bb != bb) |
288 | return false; |
289 | |
290 | if (get_cache (r, name)) |
291 | return true; |
292 | |
293 | if (gimple_code (g: def_stmt) == GIMPLE_PHI) |
294 | ssa_range_in_phi (r, phi: as_a<gphi *> (p: def_stmt)); |
295 | else |
296 | { |
297 | if (name) |
298 | get_path_oracle ()->killing_def (name); |
299 | |
300 | if (!range_of_stmt (r, def_stmt, name)) |
301 | r.set_varying (TREE_TYPE (name)); |
302 | } |
303 | |
304 | if (bb && POINTER_TYPE_P (TREE_TYPE (name))) |
305 | m_ranger.m_cache.m_exit.maybe_adjust_range (r, name, bb); |
306 | |
307 | if (DEBUG_SOLVER && (bb || !r.varying_p ())) |
308 | { |
309 | fprintf (stream: dump_file, format: "range_defined_in_block (BB%d) for " , bb ? bb->index : -1); |
310 | print_generic_expr (dump_file, name, TDF_SLIM); |
311 | fprintf (stream: dump_file, format: " is " ); |
312 | r.dump (dump_file); |
313 | fprintf (stream: dump_file, format: "\n" ); |
314 | } |
315 | |
316 | return true; |
317 | } |
318 | |
319 | // Compute ranges defined in the PHIs in this block. |
320 | |
321 | void |
322 | path_range_query::compute_ranges_in_phis (basic_block bb) |
323 | { |
324 | // PHIs must be resolved simultaneously on entry to the block |
325 | // because any dependencies must be satisfied with values on entry. |
326 | // Thus, we calculate all PHIs first, and then update the cache at |
327 | // the end. |
328 | |
329 | for (auto iter = gsi_start_phis (bb); !gsi_end_p (i: iter); gsi_next (i: &iter)) |
330 | { |
331 | gphi *phi = iter.phi (); |
332 | tree name = gimple_phi_result (gs: phi); |
333 | |
334 | if (!exit_dependency_p (name)) |
335 | continue; |
336 | |
337 | Value_Range r (TREE_TYPE (name)); |
338 | if (range_defined_in_block (r, name, bb)) |
339 | m_cache.set_range (name, r); |
340 | } |
341 | } |
342 | |
343 | // Return TRUE if relations may be invalidated after crossing edge E. |
344 | |
345 | bool |
346 | path_range_query::relations_may_be_invalidated (edge e) |
347 | { |
348 | // As soon as the path crosses a back edge, we can encounter |
349 | // definitions of SSA_NAMEs that may have had a use in the path |
350 | // already, so this will then be a new definition. The relation |
351 | // code is all designed around seeing things in dominator order, and |
352 | // crossing a back edge in the path violates this assumption. |
353 | return (e->flags & EDGE_DFS_BACK); |
354 | } |
355 | |
356 | // Compute ranges defined in the current block, or exported to the |
357 | // next block. |
358 | |
359 | void |
360 | path_range_query::compute_ranges_in_block (basic_block bb) |
361 | { |
362 | bitmap_iterator bi; |
363 | unsigned i; |
364 | |
365 | if (m_resolve && !at_entry ()) |
366 | compute_phi_relations (bb, prev: prev_bb ()); |
367 | |
368 | // Force recalculation of any names in the cache that are defined in |
369 | // this block. This can happen on interdependent SSA/phis in loops. |
370 | EXECUTE_IF_SET_IN_BITMAP (m_exit_dependencies, 0, i, bi) |
371 | { |
372 | tree name = ssa_name (i); |
373 | if (ssa_defined_in_bb (name, bb)) |
374 | m_cache.clear_range (name); |
375 | } |
376 | |
377 | // Solve dependencies defined in this block, starting with the PHIs... |
378 | compute_ranges_in_phis (bb); |
379 | // ...and then the rest of the dependencies. |
380 | EXECUTE_IF_SET_IN_BITMAP (m_exit_dependencies, 0, i, bi) |
381 | { |
382 | tree name = ssa_name (i); |
383 | Value_Range r (TREE_TYPE (name)); |
384 | |
385 | if (gimple_code (SSA_NAME_DEF_STMT (name)) != GIMPLE_PHI |
386 | && range_defined_in_block (r, name, bb)) |
387 | m_cache.set_range (name, r); |
388 | } |
389 | |
390 | if (at_exit ()) |
391 | return; |
392 | |
393 | // Solve dependencies that are exported to the next block. |
394 | basic_block next = next_bb (); |
395 | edge e = find_edge (bb, next); |
396 | |
397 | if (m_resolve && relations_may_be_invalidated (e)) |
398 | { |
399 | if (DEBUG_SOLVER) |
400 | fprintf (stream: dump_file, |
401 | format: "Resetting relations as they may be invalidated in %d->%d.\n" , |
402 | e->src->index, e->dest->index); |
403 | |
404 | path_oracle *p = get_path_oracle (); |
405 | // ?? Instead of nuking the root oracle altogether, we could |
406 | // reset the path oracle to search for relations from the top of |
407 | // the loop with the root oracle. Something for future development. |
408 | p->reset_path (); |
409 | } |
410 | |
411 | gori_compute &g = m_ranger.gori (); |
412 | bitmap exports = g.exports (bb); |
413 | EXECUTE_IF_AND_IN_BITMAP (m_exit_dependencies, exports, 0, i, bi) |
414 | { |
415 | tree name = ssa_name (i); |
416 | Value_Range r (TREE_TYPE (name)); |
417 | if (g.outgoing_edge_range_p (r, e, name, q&: *this)) |
418 | { |
419 | Value_Range cached_range (TREE_TYPE (name)); |
420 | if (get_cache (r&: cached_range, name)) |
421 | r.intersect (r: cached_range); |
422 | |
423 | m_cache.set_range (name, r); |
424 | if (DEBUG_SOLVER) |
425 | { |
426 | fprintf (stream: dump_file, format: "outgoing_edge_range_p for " ); |
427 | print_generic_expr (dump_file, name, TDF_SLIM); |
428 | fprintf (stream: dump_file, format: " on edge %d->%d " , |
429 | e->src->index, e->dest->index); |
430 | fprintf (stream: dump_file, format: "is " ); |
431 | r.dump (dump_file); |
432 | fprintf (stream: dump_file, format: "\n" ); |
433 | } |
434 | } |
435 | } |
436 | |
437 | if (m_resolve) |
438 | compute_outgoing_relations (bb, next); |
439 | } |
440 | |
441 | // Adjust all pointer exit dependencies in BB with non-null information. |
442 | |
443 | void |
444 | path_range_query::adjust_for_non_null_uses (basic_block bb) |
445 | { |
446 | int_range_max r; |
447 | bitmap_iterator bi; |
448 | unsigned i; |
449 | |
450 | EXECUTE_IF_SET_IN_BITMAP (m_exit_dependencies, 0, i, bi) |
451 | { |
452 | tree name = ssa_name (i); |
453 | |
454 | if (!POINTER_TYPE_P (TREE_TYPE (name))) |
455 | continue; |
456 | |
457 | if (get_cache (r, name)) |
458 | { |
459 | if (r.nonzero_p ()) |
460 | continue; |
461 | } |
462 | else |
463 | r.set_varying (TREE_TYPE (name)); |
464 | |
465 | if (m_ranger.m_cache.m_exit.maybe_adjust_range (r, name, bb)) |
466 | m_cache.set_range (name, r); |
467 | } |
468 | } |
469 | |
470 | // If NAME is a supported SSA_NAME, add it to the bitmap in dependencies. |
471 | |
472 | bool |
473 | path_range_query::add_to_exit_dependencies (tree name, bitmap dependencies) |
474 | { |
475 | if (TREE_CODE (name) == SSA_NAME |
476 | && Value_Range::supports_type_p (TREE_TYPE (name))) |
477 | return bitmap_set_bit (dependencies, SSA_NAME_VERSION (name)); |
478 | return false; |
479 | } |
480 | |
481 | // Compute the exit dependencies to PATH. These are essentially the |
482 | // SSA names used to calculate the final conditional along the path. |
483 | |
484 | void |
485 | path_range_query::compute_exit_dependencies (bitmap dependencies) |
486 | { |
487 | // Start with the imports from the exit block... |
488 | basic_block exit = m_path[0]; |
489 | gori_compute &gori = m_ranger.gori (); |
490 | bitmap_copy (dependencies, gori.imports (bb: exit)); |
491 | |
492 | auto_vec<tree> worklist (bitmap_count_bits (dependencies)); |
493 | bitmap_iterator bi; |
494 | unsigned i; |
495 | EXECUTE_IF_SET_IN_BITMAP (dependencies, 0, i, bi) |
496 | { |
497 | tree name = ssa_name (i); |
498 | worklist.quick_push (obj: name); |
499 | } |
500 | |
501 | // ...and add any operands used to define these imports. |
502 | while (!worklist.is_empty ()) |
503 | { |
504 | tree name = worklist.pop (); |
505 | gimple *def_stmt = SSA_NAME_DEF_STMT (name); |
506 | if (SSA_NAME_IS_DEFAULT_DEF (name) |
507 | || !m_path.contains (search: gimple_bb (g: def_stmt))) |
508 | continue; |
509 | |
510 | if (gphi *phi = dyn_cast <gphi *> (p: def_stmt)) |
511 | { |
512 | for (size_t i = 0; i < gimple_phi_num_args (gs: phi); ++i) |
513 | { |
514 | edge e = gimple_phi_arg_edge (phi, i); |
515 | tree arg = gimple_phi_arg (gs: phi, index: i)->def; |
516 | |
517 | if (TREE_CODE (arg) == SSA_NAME |
518 | && m_path.contains (search: e->src) |
519 | && bitmap_set_bit (dependencies, SSA_NAME_VERSION (arg))) |
520 | worklist.safe_push (obj: arg); |
521 | } |
522 | } |
523 | else if (gassign *ass = dyn_cast <gassign *> (p: def_stmt)) |
524 | { |
525 | tree ssa[3]; |
526 | unsigned count = gimple_range_ssa_names (vec: ssa, vec_size: 3, stmt: ass); |
527 | for (unsigned j = 0; j < count; ++j) |
528 | if (add_to_exit_dependencies (name: ssa[j], dependencies)) |
529 | worklist.safe_push (obj: ssa[j]); |
530 | } |
531 | } |
532 | // Exported booleans along the path, may help conditionals. |
533 | if (m_resolve) |
534 | for (i = 0; i < m_path.length (); ++i) |
535 | { |
536 | basic_block bb = m_path[i]; |
537 | tree name; |
538 | FOR_EACH_GORI_EXPORT_NAME (gori, bb, name) |
539 | if (TREE_CODE (TREE_TYPE (name)) == BOOLEAN_TYPE) |
540 | bitmap_set_bit (dependencies, SSA_NAME_VERSION (name)); |
541 | } |
542 | } |
543 | |
544 | // Compute the ranges for DEPENDENCIES along PATH. |
545 | // |
546 | // DEPENDENCIES are path exit dependencies. They are the set of SSA |
547 | // names, any of which could potentially change the value of the final |
548 | // conditional in PATH. If none is given, the exit dependencies are |
549 | // calculated from the final conditional in the path. |
550 | |
551 | void |
552 | path_range_query::compute_ranges (const bitmap_head *dependencies) |
553 | { |
554 | if (DEBUG_SOLVER) |
555 | fprintf (stream: dump_file, format: "\n==============================================\n" ); |
556 | |
557 | if (dependencies) |
558 | bitmap_copy (m_exit_dependencies, dependencies); |
559 | else |
560 | compute_exit_dependencies (dependencies: m_exit_dependencies); |
561 | |
562 | if (m_resolve) |
563 | { |
564 | path_oracle *p = get_path_oracle (); |
565 | p->reset_path (oracle: m_ranger.oracle ()); |
566 | } |
567 | |
568 | if (DEBUG_SOLVER) |
569 | { |
570 | fprintf (stream: dump_file, format: "path_range_query: compute_ranges for path: " ); |
571 | for (unsigned i = m_path.length (); i > 0; --i) |
572 | { |
573 | basic_block bb = m_path[i - 1]; |
574 | fprintf (stream: dump_file, format: "%d" , bb->index); |
575 | if (i > 1) |
576 | fprintf (stream: dump_file, format: "->" ); |
577 | } |
578 | fprintf (stream: dump_file, format: "\n" ); |
579 | } |
580 | |
581 | while (1) |
582 | { |
583 | basic_block bb = curr_bb (); |
584 | |
585 | compute_ranges_in_block (bb); |
586 | adjust_for_non_null_uses (bb); |
587 | |
588 | if (at_exit ()) |
589 | break; |
590 | |
591 | move_next (); |
592 | } |
593 | |
594 | if (DEBUG_SOLVER) |
595 | { |
596 | get_path_oracle ()->dump (dump_file); |
597 | dump (dump_file); |
598 | } |
599 | } |
600 | |
601 | // A folding aid used to register and query relations along a path. |
602 | // When queried, it returns relations as they would appear on exit to |
603 | // the path. |
604 | // |
605 | // Relations are registered on entry so the path_oracle knows which |
606 | // block to query the root oracle at when a relation lies outside the |
607 | // path. However, when queried we return the relation on exit to the |
608 | // path, since the root_oracle ignores the registered. |
609 | |
610 | class jt_fur_source : public fur_depend |
611 | { |
612 | public: |
613 | jt_fur_source (gimple *s, path_range_query *, gori_compute *, |
614 | const vec<basic_block> &); |
615 | relation_kind query_relation (tree op1, tree op2) override; |
616 | void register_relation (gimple *, relation_kind, tree op1, tree op2) override; |
617 | void register_relation (edge, relation_kind, tree op1, tree op2) override; |
618 | private: |
619 | basic_block m_entry; |
620 | }; |
621 | |
622 | jt_fur_source::jt_fur_source (gimple *s, |
623 | path_range_query *query, |
624 | gori_compute *gori, |
625 | const vec<basic_block> &path) |
626 | : fur_depend (s, gori, query) |
627 | { |
628 | gcc_checking_assert (!path.is_empty ()); |
629 | |
630 | m_entry = path[path.length () - 1]; |
631 | |
632 | if (dom_info_available_p (CDI_DOMINATORS)) |
633 | m_oracle = query->oracle (); |
634 | else |
635 | m_oracle = NULL; |
636 | } |
637 | |
638 | // Ignore statement and register relation on entry to path. |
639 | |
640 | void |
641 | jt_fur_source::register_relation (gimple *, relation_kind k, tree op1, tree op2) |
642 | { |
643 | if (m_oracle) |
644 | m_oracle->register_relation (m_entry, k, op1, op2); |
645 | } |
646 | |
647 | // Ignore edge and register relation on entry to path. |
648 | |
649 | void |
650 | jt_fur_source::register_relation (edge, relation_kind k, tree op1, tree op2) |
651 | { |
652 | if (m_oracle) |
653 | m_oracle->register_relation (m_entry, k, op1, op2); |
654 | } |
655 | |
656 | relation_kind |
657 | jt_fur_source::query_relation (tree op1, tree op2) |
658 | { |
659 | if (!m_oracle) |
660 | return VREL_VARYING; |
661 | |
662 | if (TREE_CODE (op1) != SSA_NAME || TREE_CODE (op2) != SSA_NAME) |
663 | return VREL_VARYING; |
664 | |
665 | return m_oracle->query_relation (m_entry, op1, op2); |
666 | } |
667 | |
668 | // Return the range of STMT at the end of the path being analyzed. |
669 | |
670 | bool |
671 | path_range_query::range_of_stmt (vrange &r, gimple *stmt, tree) |
672 | { |
673 | tree type = gimple_range_type (s: stmt); |
674 | |
675 | if (!type || !r.supports_type_p (type)) |
676 | return false; |
677 | |
678 | // If resolving unknowns, fold the statement making use of any |
679 | // relations along the path. |
680 | if (m_resolve) |
681 | { |
682 | fold_using_range f; |
683 | jt_fur_source src (stmt, this, &m_ranger.gori (), m_path); |
684 | if (!f.fold_stmt (r, s: stmt, src)) |
685 | r.set_varying (type); |
686 | } |
687 | // Otherwise, fold without relations. |
688 | else if (!fold_range (r, s: stmt, q: this)) |
689 | r.set_varying (type); |
690 | |
691 | return true; |
692 | } |
693 | |
694 | // If possible, register the relation on the incoming edge E into PHI. |
695 | |
696 | void |
697 | path_range_query::maybe_register_phi_relation (gphi *phi, edge e) |
698 | { |
699 | tree arg = gimple_phi_arg_def (gs: phi, index: e->dest_idx); |
700 | |
701 | if (!gimple_range_ssa_p (exp: arg)) |
702 | return; |
703 | |
704 | if (relations_may_be_invalidated (e)) |
705 | return; |
706 | |
707 | basic_block bb = gimple_bb (g: phi); |
708 | tree result = gimple_phi_result (gs: phi); |
709 | |
710 | // Avoid recording the equivalence if the arg is defined in this |
711 | // block, as that could create an ordering problem. |
712 | if (ssa_defined_in_bb (name: arg, bb)) |
713 | return; |
714 | |
715 | if (dump_file && (dump_flags & TDF_DETAILS)) |
716 | fprintf (stream: dump_file, format: "maybe_register_phi_relation in bb%d:" , bb->index); |
717 | |
718 | get_path_oracle ()->killing_def (result); |
719 | m_oracle->register_relation (entry_bb (), VREL_EQ, arg, result); |
720 | } |
721 | |
722 | // Compute relations for each PHI in BB. For example: |
723 | // |
724 | // x_5 = PHI<y_9(5),...> |
725 | // |
726 | // If the path flows through BB5, we can register that x_5 == y_9. |
727 | |
728 | void |
729 | path_range_query::compute_phi_relations (basic_block bb, basic_block prev) |
730 | { |
731 | if (prev == NULL) |
732 | return; |
733 | |
734 | edge e_in = find_edge (prev, bb); |
735 | |
736 | for (gphi_iterator iter = gsi_start_phis (bb); !gsi_end_p (i: iter); |
737 | gsi_next (i: &iter)) |
738 | { |
739 | gphi *phi = iter.phi (); |
740 | tree result = gimple_phi_result (gs: phi); |
741 | unsigned nargs = gimple_phi_num_args (gs: phi); |
742 | |
743 | if (!exit_dependency_p (name: result)) |
744 | continue; |
745 | |
746 | for (size_t i = 0; i < nargs; ++i) |
747 | if (e_in == gimple_phi_arg_edge (phi, i)) |
748 | { |
749 | maybe_register_phi_relation (phi, e: e_in); |
750 | break; |
751 | } |
752 | } |
753 | } |
754 | |
755 | // Compute outgoing relations from BB to NEXT. |
756 | |
757 | void |
758 | path_range_query::compute_outgoing_relations (basic_block bb, basic_block next) |
759 | { |
760 | if (gcond *cond = safe_dyn_cast <gcond *> (p: *gsi_last_bb (bb))) |
761 | { |
762 | int_range<2> r; |
763 | edge e0 = EDGE_SUCC (bb, 0); |
764 | edge e1 = EDGE_SUCC (bb, 1); |
765 | |
766 | if (e0->dest == next) |
767 | gcond_edge_range (r, e: e0); |
768 | else if (e1->dest == next) |
769 | gcond_edge_range (r, e: e1); |
770 | else |
771 | gcc_unreachable (); |
772 | |
773 | jt_fur_source src (NULL, this, &m_ranger.gori (), m_path); |
774 | src.register_outgoing_edges (cond, lhs_range&: r, e0, e1); |
775 | } |
776 | } |
777 | |