1 | /* Gimple range inference implementation. |
2 | Copyright (C) 2022-2023 Free Software Foundation, Inc. |
3 | Contributed by Andrew MacLeod <amacleod@redhat.com>. |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify |
8 | it under the terms of the GNU General Public License as published by |
9 | the Free Software Foundation; either version 3, or (at your option) |
10 | any later version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 | GNU General Public License for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "backend.h" |
25 | #include "insn-codes.h" |
26 | #include "tree.h" |
27 | #include "gimple.h" |
28 | #include "ssa.h" |
29 | #include "gimple-pretty-print.h" |
30 | #include "gimple-range.h" |
31 | #include "value-range-storage.h" |
32 | #include "tree-cfg.h" |
33 | #include "target.h" |
34 | #include "attribs.h" |
35 | #include "gimple-iterator.h" |
36 | #include "gimple-walk.h" |
37 | #include "cfganal.h" |
38 | #include "tree-dfa.h" |
39 | |
40 | // Adapted from infer_nonnull_range_by_dereference and check_loadstore |
41 | // to process nonnull ssa_name OP in S. DATA contains a pointer to a |
42 | // stmt range inference instance. |
43 | |
44 | static bool |
45 | non_null_loadstore (gimple *, tree op, tree, void *data) |
46 | { |
47 | if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF) |
48 | { |
49 | /* Some address spaces may legitimately dereference zero. */ |
50 | addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op)); |
51 | if (!targetm.addr_space.zero_address_valid (as)) |
52 | { |
53 | tree ssa = TREE_OPERAND (op, 0); |
54 | ((gimple_infer_range *)data)->add_nonzero (name: ssa); |
55 | } |
56 | } |
57 | return false; |
58 | } |
59 | |
60 | // Process an ASSUME call to see if there are any inferred ranges available. |
61 | |
62 | void |
63 | gimple_infer_range::check_assume_func (gcall *call) |
64 | { |
65 | tree arg; |
66 | unsigned i; |
67 | tree assume_id = TREE_OPERAND (gimple_call_arg (call, 0), 0); |
68 | if (!assume_id) |
69 | return; |
70 | struct function *fun = DECL_STRUCT_FUNCTION (assume_id); |
71 | if (!fun) |
72 | return; |
73 | // Loop over arguments, matching them to the assume parameters. |
74 | for (arg = DECL_ARGUMENTS (assume_id), i = 1; |
75 | arg && i < gimple_call_num_args (gs: call); |
76 | i++, arg = DECL_CHAIN (arg)) |
77 | { |
78 | tree op = gimple_call_arg (gs: call, index: i); |
79 | tree type = TREE_TYPE (op); |
80 | if (gimple_range_ssa_p (exp: op) && Value_Range::supports_type_p (type)) |
81 | { |
82 | tree default_def = ssa_default_def (fun, arg); |
83 | if (!default_def || type != TREE_TYPE (default_def)) |
84 | continue; |
85 | // Query the global range of the default def in the assume function. |
86 | Value_Range assume_range (type); |
87 | gimple_range_global (v&: assume_range, name: default_def, f: fun); |
88 | // If there is a non-varying result, add it as an inferred range. |
89 | if (!assume_range.varying_p ()) |
90 | { |
91 | add_range (name: op, range&: assume_range); |
92 | if (dump_file) |
93 | { |
94 | print_generic_expr (dump_file, assume_id, TDF_SLIM); |
95 | fprintf (stream: dump_file, format: " assume inferred range of " ); |
96 | print_generic_expr (dump_file, op, TDF_SLIM); |
97 | fprintf (stream: dump_file, format: " (param " ); |
98 | print_generic_expr (dump_file, arg, TDF_SLIM); |
99 | fprintf (stream: dump_file, format: ") = " ); |
100 | assume_range.dump (dump_file); |
101 | fputc (c: '\n', stream: dump_file); |
102 | } |
103 | } |
104 | } |
105 | } |
106 | } |
107 | |
108 | // Add NAME and RANGE to the range inference summary. |
109 | |
110 | void |
111 | gimple_infer_range::add_range (tree name, vrange &range) |
112 | { |
113 | m_names[num_args] = name; |
114 | m_ranges[num_args] = range; |
115 | if (num_args < size_limit - 1) |
116 | num_args++; |
117 | } |
118 | |
119 | // Add a nonzero range for NAME to the range inference summary. |
120 | |
121 | void |
122 | gimple_infer_range::add_nonzero (tree name) |
123 | { |
124 | if (!gimple_range_ssa_p (exp: name)) |
125 | return; |
126 | int_range<2> nz; |
127 | nz.set_nonzero (TREE_TYPE (name)); |
128 | add_range (name, range&: nz); |
129 | } |
130 | |
131 | // Process S for range inference and fill in the summary list. |
132 | // This is the routine where new inferred ranges should be added. |
133 | |
134 | gimple_infer_range::gimple_infer_range (gimple *s) |
135 | { |
136 | num_args = 0; |
137 | |
138 | if (is_a<gphi *> (p: s)) |
139 | return; |
140 | |
141 | if (is_a<gcall *> (p: s) && flag_delete_null_pointer_checks) |
142 | { |
143 | tree fntype = gimple_call_fntype (gs: s); |
144 | bitmap nonnullargs = get_nonnull_args (fntype); |
145 | // Process any non-null arguments |
146 | if (nonnullargs) |
147 | { |
148 | for (unsigned i = 0; i < gimple_call_num_args (gs: s); i++) |
149 | { |
150 | if (bitmap_empty_p (map: nonnullargs) |
151 | || bitmap_bit_p (nonnullargs, i)) |
152 | { |
153 | tree op = gimple_call_arg (gs: s, index: i); |
154 | if (POINTER_TYPE_P (TREE_TYPE (op))) |
155 | add_nonzero (name: op); |
156 | } |
157 | } |
158 | BITMAP_FREE (nonnullargs); |
159 | } |
160 | // Fallthru and walk load/store ops now. |
161 | } |
162 | |
163 | // Check for inferred ranges from ASSUME calls. |
164 | if (is_a<gcall *> (p: s) && gimple_call_internal_p (gs: s) |
165 | && gimple_call_internal_fn (gs: s) == IFN_ASSUME) |
166 | check_assume_func (call: as_a<gcall *> (p: s)); |
167 | |
168 | // Look for possible non-null values. |
169 | if (flag_delete_null_pointer_checks && gimple_code (g: s) != GIMPLE_ASM |
170 | && !gimple_clobber_p (s)) |
171 | walk_stmt_load_store_ops (s, (void *)this, non_null_loadstore, |
172 | non_null_loadstore); |
173 | |
174 | } |
175 | |
176 | // ------------------------------------------------------------------------- |
177 | |
178 | // This class is an element in the list of inferred ranges. |
179 | |
180 | class exit_range |
181 | { |
182 | public: |
183 | tree name; |
184 | vrange_storage *range; |
185 | exit_range *next; |
186 | }; |
187 | |
188 | // If there is an element which matches SSA, return a pointer to the element. |
189 | // Otherwise return NULL. |
190 | |
191 | exit_range * |
192 | infer_range_manager::exit_range_head::find_ptr (tree ssa) |
193 | { |
194 | // Return NULL if SSA is not in this list. |
195 | if (!m_names || !bitmap_bit_p (m_names, SSA_NAME_VERSION (ssa))) |
196 | return NULL; |
197 | for (exit_range *ptr = head; ptr != NULL; ptr = ptr->next) |
198 | if (ptr->name == ssa) |
199 | return ptr; |
200 | // Should be unreachable. |
201 | gcc_unreachable (); |
202 | return NULL; |
203 | } |
204 | |
205 | // Construct a range infer manager. DO_SEARCH indicates whether an immediate |
206 | // use scan should be made the first time a name is processed. This is for |
207 | // on-demand clients who may not visit every statement and may miss uses. |
208 | |
209 | infer_range_manager::infer_range_manager (bool do_search) |
210 | { |
211 | bitmap_obstack_initialize (&m_bitmaps); |
212 | m_on_exit.create (nelems: 0); |
213 | m_on_exit.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1); |
214 | // m_seen == NULL indicates no scanning. Otherwise the bit indicates a |
215 | // scan has been performed on NAME. |
216 | if (do_search) |
217 | m_seen = BITMAP_ALLOC (obstack: &m_bitmaps); |
218 | else |
219 | m_seen = NULL; |
220 | obstack_init (&m_list_obstack); |
221 | // Non-zero elements are very common, so cache them for each ssa-name. |
222 | m_nonzero.create (nelems: 0); |
223 | m_nonzero.safe_grow_cleared (num_ssa_names + 1); |
224 | m_range_allocator = new vrange_allocator; |
225 | } |
226 | |
227 | // Destruct a range infer manager. |
228 | |
229 | infer_range_manager::~infer_range_manager () |
230 | { |
231 | m_nonzero.release (); |
232 | obstack_free (&m_list_obstack, NULL); |
233 | m_on_exit.release (); |
234 | bitmap_obstack_release (&m_bitmaps); |
235 | delete m_range_allocator; |
236 | } |
237 | |
238 | // Return a non-zero range value of the appropriate type for NAME from |
239 | // the cache, creating it if necessary. |
240 | |
241 | const vrange& |
242 | infer_range_manager::get_nonzero (tree name) |
243 | { |
244 | unsigned v = SSA_NAME_VERSION (name); |
245 | if (v >= m_nonzero.length ()) |
246 | m_nonzero.safe_grow_cleared (num_ssa_names + 20); |
247 | if (!m_nonzero[v]) |
248 | { |
249 | m_nonzero[v] |
250 | = (irange *) m_range_allocator->alloc (size: sizeof (int_range <2>)); |
251 | m_nonzero[v]->set_nonzero (TREE_TYPE (name)); |
252 | } |
253 | return *(m_nonzero[v]); |
254 | } |
255 | |
256 | // Return TRUE if there are any range inferences in block BB. |
257 | |
258 | bool |
259 | infer_range_manager::has_range_p (basic_block bb) |
260 | { |
261 | if (bb->index >= (int)m_on_exit.length ()) |
262 | return false; |
263 | bitmap b = m_on_exit[bb->index].m_names; |
264 | return b && !bitmap_empty_p (map: b); |
265 | } |
266 | |
267 | // Return TRUE if NAME has a range inference in block BB. |
268 | |
269 | bool |
270 | infer_range_manager::has_range_p (tree name, basic_block bb) |
271 | { |
272 | // Check if this is an immediate use search model. |
273 | if (m_seen && !bitmap_bit_p (m_seen, SSA_NAME_VERSION (name))) |
274 | register_all_uses (name); |
275 | |
276 | if (bb->index >= (int)m_on_exit.length ()) |
277 | return false; |
278 | if (!m_on_exit[bb->index].m_names) |
279 | return false; |
280 | if (!bitmap_bit_p (m_on_exit[bb->index].m_names, SSA_NAME_VERSION (name))) |
281 | return false; |
282 | return true; |
283 | } |
284 | |
285 | // Return TRUE if NAME has a range inference in block BB, and adjust range R |
286 | // to include it. |
287 | |
288 | bool |
289 | infer_range_manager::maybe_adjust_range (vrange &r, tree name, basic_block bb) |
290 | { |
291 | if (!has_range_p (name, bb)) |
292 | return false; |
293 | exit_range *ptr = m_on_exit[bb->index].find_ptr (ssa: name); |
294 | gcc_checking_assert (ptr); |
295 | // Return true if this exit range changes R, otherwise false. |
296 | tree type = TREE_TYPE (name); |
297 | Value_Range tmp (type); |
298 | ptr->range->get_vrange (r&: tmp, type); |
299 | return r.intersect (tmp); |
300 | } |
301 | |
302 | // Add range R as an inferred range for NAME in block BB. |
303 | |
304 | void |
305 | infer_range_manager::add_range (tree name, basic_block bb, const vrange &r) |
306 | { |
307 | if (bb->index >= (int)m_on_exit.length ()) |
308 | m_on_exit.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1); |
309 | |
310 | // Create the summary list bitmap if it doesn't exist. |
311 | if (!m_on_exit[bb->index].m_names) |
312 | m_on_exit[bb->index].m_names = BITMAP_ALLOC (obstack: &m_bitmaps); |
313 | |
314 | if (dump_file && (dump_flags & TDF_DETAILS)) |
315 | { |
316 | fprintf (stream: dump_file, format: " on-exit update " ); |
317 | print_generic_expr (dump_file, name, TDF_SLIM); |
318 | fprintf (stream: dump_file, format: " in BB%d : " ,bb->index); |
319 | r.dump (dump_file); |
320 | fprintf (stream: dump_file, format: "\n" ); |
321 | } |
322 | |
323 | // If NAME already has a range, intersect them and done. |
324 | exit_range *ptr = m_on_exit[bb->index].find_ptr (ssa: name); |
325 | if (ptr) |
326 | { |
327 | tree type = TREE_TYPE (name); |
328 | Value_Range cur (r), name_range (type); |
329 | ptr->range->get_vrange (r&: name_range, type); |
330 | // If no new info is added, just return. |
331 | if (!cur.intersect (r: name_range)) |
332 | return; |
333 | if (ptr->range->fits_p (r: cur)) |
334 | ptr->range->set_vrange (cur); |
335 | else |
336 | ptr->range = m_range_allocator->clone (r: cur); |
337 | return; |
338 | } |
339 | |
340 | // Otherwise create a record. |
341 | bitmap_set_bit (m_on_exit[bb->index].m_names, SSA_NAME_VERSION (name)); |
342 | ptr = (exit_range *)obstack_alloc (&m_list_obstack, sizeof (exit_range)); |
343 | ptr->range = m_range_allocator->clone (r); |
344 | ptr->name = name; |
345 | ptr->next = m_on_exit[bb->index].head; |
346 | m_on_exit[bb->index].head = ptr; |
347 | } |
348 | |
349 | // Add a non-zero inferred range for NAME in block BB. |
350 | |
351 | void |
352 | infer_range_manager::add_nonzero (tree name, basic_block bb) |
353 | { |
354 | add_range (name, bb, r: get_nonzero (name)); |
355 | } |
356 | |
357 | // Follow immediate use chains and find all inferred ranges for NAME. |
358 | |
359 | void |
360 | infer_range_manager::register_all_uses (tree name) |
361 | { |
362 | gcc_checking_assert (m_seen); |
363 | |
364 | // Check if we've already processed this name. |
365 | unsigned v = SSA_NAME_VERSION (name); |
366 | if (bitmap_bit_p (m_seen, v)) |
367 | return; |
368 | bitmap_set_bit (m_seen, v); |
369 | |
370 | use_operand_p use_p; |
371 | imm_use_iterator iter; |
372 | |
373 | // Loop over each immediate use and see if it has an inferred range. |
374 | FOR_EACH_IMM_USE_FAST (use_p, iter, name) |
375 | { |
376 | gimple *s = USE_STMT (use_p); |
377 | gimple_infer_range infer (s); |
378 | for (unsigned x = 0; x < infer.num (); x++) |
379 | { |
380 | if (name == infer.name (index: x)) |
381 | add_range (name, bb: gimple_bb (g: s), r: infer.range (index: x)); |
382 | } |
383 | } |
384 | } |
385 | |