1 | /* Consolidation of svalues and regions. |
2 | Copyright (C) 2020-2024 Free Software Foundation, Inc. |
3 | Contributed by David Malcolm <dmalcolm@redhat.com>. |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify it |
8 | under the terms of the GNU General Public License as published by |
9 | the Free Software Foundation; either version 3, or (at your option) |
10 | any later version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, but |
13 | WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
15 | General Public License for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #define INCLUDE_MEMORY |
23 | #include "system.h" |
24 | #include "coretypes.h" |
25 | #include "tree.h" |
26 | #include "diagnostic-core.h" |
27 | #include "gimple-pretty-print.h" |
28 | #include "function.h" |
29 | #include "basic-block.h" |
30 | #include "gimple.h" |
31 | #include "gimple-iterator.h" |
32 | #include "diagnostic-core.h" |
33 | #include "graphviz.h" |
34 | #include "options.h" |
35 | #include "cgraph.h" |
36 | #include "tree-dfa.h" |
37 | #include "stringpool.h" |
38 | #include "convert.h" |
39 | #include "target.h" |
40 | #include "fold-const.h" |
41 | #include "tree-pretty-print.h" |
42 | #include "bitmap.h" |
43 | #include "analyzer/analyzer.h" |
44 | #include "analyzer/analyzer-logging.h" |
45 | #include "ordered-hash-map.h" |
46 | #include "options.h" |
47 | #include "analyzer/supergraph.h" |
48 | #include "sbitmap.h" |
49 | #include "analyzer/call-string.h" |
50 | #include "analyzer/program-point.h" |
51 | #include "analyzer/store.h" |
52 | #include "analyzer/region-model.h" |
53 | #include "analyzer/constraint-manager.h" |
54 | |
55 | #if ENABLE_ANALYZER |
56 | |
57 | namespace ana { |
58 | |
59 | /* class region_model_manager. */ |
60 | |
61 | /* region_model_manager's ctor. */ |
62 | |
63 | region_model_manager::region_model_manager (logger *logger) |
64 | : m_logger (logger), |
65 | m_next_symbol_id (0), |
66 | m_empty_call_string (), |
67 | m_root_region (alloc_symbol_id ()), |
68 | m_stack_region (alloc_symbol_id (), &m_root_region), |
69 | m_heap_region (alloc_symbol_id (), &m_root_region), |
70 | m_unknown_NULL (NULL), |
71 | m_checking_feasibility (false), |
72 | m_max_complexity (0, 0), |
73 | m_code_region (alloc_symbol_id (), &m_root_region), |
74 | m_fndecls_map (), m_labels_map (), |
75 | m_globals_region (alloc_symbol_id (), &m_root_region), |
76 | m_globals_map (), |
77 | m_thread_local_region (alloc_symbol_id (), &m_root_region), |
78 | m_errno_region (alloc_symbol_id (), &m_thread_local_region), |
79 | m_store_mgr (this), |
80 | m_range_mgr (new bounded_ranges_manager ()), |
81 | m_known_fn_mgr (logger) |
82 | { |
83 | } |
84 | |
85 | /* region_model_manager's dtor. Delete all of the managed svalues |
86 | and regions. */ |
87 | |
88 | region_model_manager::~region_model_manager () |
89 | { |
90 | /* Delete consolidated svalues. */ |
91 | for (constants_map_t::iterator iter = m_constants_map.begin (); |
92 | iter != m_constants_map.end (); ++iter) |
93 | delete (*iter).second; |
94 | for (unknowns_map_t::iterator iter = m_unknowns_map.begin (); |
95 | iter != m_unknowns_map.end (); ++iter) |
96 | delete (*iter).second; |
97 | delete m_unknown_NULL; |
98 | for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin (); |
99 | iter != m_poisoned_values_map.end (); ++iter) |
100 | delete (*iter).second; |
101 | for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin (); |
102 | iter != m_setjmp_values_map.end (); ++iter) |
103 | delete (*iter).second; |
104 | for (initial_values_map_t::iterator iter = m_initial_values_map.begin (); |
105 | iter != m_initial_values_map.end (); ++iter) |
106 | delete (*iter).second; |
107 | for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin (); |
108 | iter != m_pointer_values_map.end (); ++iter) |
109 | delete (*iter).second; |
110 | for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin (); |
111 | iter != m_unaryop_values_map.end (); ++iter) |
112 | delete (*iter).second; |
113 | for (binop_values_map_t::iterator iter = m_binop_values_map.begin (); |
114 | iter != m_binop_values_map.end (); ++iter) |
115 | delete (*iter).second; |
116 | for (sub_values_map_t::iterator iter = m_sub_values_map.begin (); |
117 | iter != m_sub_values_map.end (); ++iter) |
118 | delete (*iter).second; |
119 | for (auto iter : m_repeated_values_map) |
120 | delete iter.second; |
121 | for (auto iter : m_bits_within_values_map) |
122 | delete iter.second; |
123 | for (unmergeable_values_map_t::iterator iter |
124 | = m_unmergeable_values_map.begin (); |
125 | iter != m_unmergeable_values_map.end (); ++iter) |
126 | delete (*iter).second; |
127 | for (widening_values_map_t::iterator iter = m_widening_values_map.begin (); |
128 | iter != m_widening_values_map.end (); ++iter) |
129 | delete (*iter).second; |
130 | for (compound_values_map_t::iterator iter = m_compound_values_map.begin (); |
131 | iter != m_compound_values_map.end (); ++iter) |
132 | delete (*iter).second; |
133 | for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin (); |
134 | iter != m_conjured_values_map.end (); ++iter) |
135 | delete (*iter).second; |
136 | for (auto iter : m_asm_output_values_map) |
137 | delete iter.second; |
138 | for (auto iter : m_const_fn_result_values_map) |
139 | delete iter.second; |
140 | |
141 | /* Delete consolidated regions. */ |
142 | for (fndecls_map_t::iterator iter = m_fndecls_map.begin (); |
143 | iter != m_fndecls_map.end (); ++iter) |
144 | delete (*iter).second; |
145 | for (labels_map_t::iterator iter = m_labels_map.begin (); |
146 | iter != m_labels_map.end (); ++iter) |
147 | delete (*iter).second; |
148 | for (globals_map_t::iterator iter = m_globals_map.begin (); |
149 | iter != m_globals_map.end (); ++iter) |
150 | delete (*iter).second; |
151 | for (string_map_t::iterator iter = m_string_map.begin (); |
152 | iter != m_string_map.end (); ++iter) |
153 | delete (*iter).second; |
154 | |
155 | delete m_range_mgr; |
156 | } |
157 | |
158 | /* Return true if C exceeds the complexity limit for svalues. */ |
159 | |
160 | bool |
161 | region_model_manager::too_complex_p (const complexity &c) const |
162 | { |
163 | if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth) |
164 | return true; |
165 | return false; |
166 | } |
167 | |
168 | /* If SVAL exceeds the complexity limit for svalues, delete it |
169 | and return true. |
170 | Otherwise update m_max_complexity and return false. */ |
171 | |
172 | bool |
173 | region_model_manager::reject_if_too_complex (svalue *sval) |
174 | { |
175 | if (m_checking_feasibility) |
176 | return false; |
177 | |
178 | const complexity &c = sval->get_complexity (); |
179 | if (!too_complex_p (c)) |
180 | { |
181 | if (m_max_complexity.m_num_nodes < c.m_num_nodes) |
182 | m_max_complexity.m_num_nodes = c.m_num_nodes; |
183 | if (m_max_complexity.m_max_depth < c.m_max_depth) |
184 | m_max_complexity.m_max_depth = c.m_max_depth; |
185 | return false; |
186 | } |
187 | |
188 | pretty_printer pp; |
189 | pp_format_decoder (&pp) = default_tree_printer; |
190 | sval->dump_to_pp (pp: &pp, simple: true); |
191 | if (warning_at (input_location, OPT_Wanalyzer_symbol_too_complex, |
192 | "symbol too complicated: %qs" , |
193 | pp_formatted_text (&pp))) |
194 | inform (input_location, |
195 | "max_depth %i exceeds --param=analyzer-max-svalue-depth=%i" , |
196 | c.m_max_depth, param_analyzer_max_svalue_depth); |
197 | |
198 | delete sval; |
199 | return true; |
200 | } |
201 | |
202 | /* Macro for imposing a complexity limit on svalues, for use within |
203 | region_model_manager member functions. |
204 | |
205 | If SVAL exceeds the complexity limit, delete it and return an UNKNOWN |
206 | value of the same type. |
207 | Otherwise update m_max_complexity and carry on. */ |
208 | |
209 | #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \ |
210 | do { \ |
211 | svalue *sval_ = (SVAL); \ |
212 | tree type_ = sval_->get_type (); \ |
213 | if (reject_if_too_complex (sval_)) \ |
214 | return get_or_create_unknown_svalue (type_); \ |
215 | } while (0) |
216 | |
217 | /* svalue consolidation. */ |
218 | |
219 | /* Return the svalue * for a constant_svalue for CST_EXPR, |
220 | creating it if necessary. |
221 | The constant_svalue instances are reused, based on pointer equality |
222 | of trees */ |
223 | |
224 | const svalue * |
225 | region_model_manager::get_or_create_constant_svalue (tree type, tree cst_expr) |
226 | { |
227 | gcc_assert (cst_expr); |
228 | gcc_assert (CONSTANT_CLASS_P (cst_expr)); |
229 | gcc_assert (type == TREE_TYPE (cst_expr) || type == NULL_TREE); |
230 | |
231 | constant_svalue::key_t key (type, cst_expr); |
232 | constant_svalue **slot = m_constants_map.get (k: key); |
233 | if (slot) |
234 | return *slot; |
235 | constant_svalue *cst_sval |
236 | = new constant_svalue (alloc_symbol_id (), type, cst_expr); |
237 | RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval); |
238 | m_constants_map.put (k: key, v: cst_sval); |
239 | return cst_sval; |
240 | } |
241 | |
242 | const svalue * |
243 | region_model_manager::get_or_create_constant_svalue (tree cst_expr) |
244 | { |
245 | return get_or_create_constant_svalue (TREE_TYPE (cst_expr), cst_expr); |
246 | } |
247 | |
248 | /* Return the svalue * for a constant_svalue for the INTEGER_CST |
249 | for VAL of type TYPE, creating it if necessary. */ |
250 | |
251 | const svalue * |
252 | region_model_manager::get_or_create_int_cst (tree type, |
253 | const poly_wide_int_ref &cst) |
254 | { |
255 | tree effective_type = type; |
256 | if (!type) |
257 | effective_type = ptrdiff_type_node; |
258 | gcc_assert (INTEGRAL_TYPE_P (effective_type) |
259 | || POINTER_TYPE_P (effective_type)); |
260 | tree tree_cst = wide_int_to_tree (type: effective_type, cst); |
261 | return get_or_create_constant_svalue (type, cst_expr: tree_cst); |
262 | } |
263 | |
264 | /* Return the svalue * for the constant_svalue for the NULL pointer |
265 | of POINTER_TYPE, creating it if necessary. */ |
266 | |
267 | const svalue * |
268 | region_model_manager::get_or_create_null_ptr (tree pointer_type) |
269 | { |
270 | gcc_assert (pointer_type); |
271 | gcc_assert (POINTER_TYPE_P (pointer_type)); |
272 | return get_or_create_int_cst (type: pointer_type, cst: 0); |
273 | } |
274 | |
275 | /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL), |
276 | creating it if necessary. |
277 | The unknown_svalue instances are reused, based on pointer equality |
278 | of the types */ |
279 | |
280 | const svalue * |
281 | region_model_manager::get_or_create_unknown_svalue (tree type) |
282 | { |
283 | /* Don't create unknown values when doing feasibility testing; |
284 | instead, create a unique svalue. */ |
285 | if (m_checking_feasibility) |
286 | return create_unique_svalue (type); |
287 | |
288 | /* Special-case NULL, so that the hash_map can use NULL as the |
289 | "empty" value. */ |
290 | if (type == NULL_TREE) |
291 | { |
292 | if (!m_unknown_NULL) |
293 | m_unknown_NULL = new unknown_svalue (alloc_symbol_id (), type); |
294 | return m_unknown_NULL; |
295 | } |
296 | |
297 | unknown_svalue **slot = m_unknowns_map.get (k: type); |
298 | if (slot) |
299 | return *slot; |
300 | unknown_svalue *sval = new unknown_svalue (alloc_symbol_id (), type); |
301 | m_unknowns_map.put (k: type, v: sval); |
302 | return sval; |
303 | } |
304 | |
305 | /* Return a freshly-allocated svalue of TYPE, owned by this manager. */ |
306 | |
307 | const svalue * |
308 | region_model_manager::create_unique_svalue (tree type) |
309 | { |
310 | svalue *sval = new placeholder_svalue (alloc_symbol_id (), type, "unique" ); |
311 | m_managed_dynamic_svalues.safe_push (obj: sval); |
312 | return sval; |
313 | } |
314 | |
315 | /* Return the svalue * for the initial value of REG, creating it if |
316 | necessary. */ |
317 | |
318 | const svalue * |
319 | region_model_manager::get_or_create_initial_value (const region *reg, |
320 | bool check_poisoned) |
321 | { |
322 | if (!reg->can_have_initial_svalue_p () && check_poisoned) |
323 | return get_or_create_poisoned_svalue (kind: POISON_KIND_UNINIT, |
324 | type: reg->get_type ()); |
325 | |
326 | /* The initial value of a cast is a cast of the initial value. */ |
327 | if (const cast_region *cast_reg = reg->dyn_cast_cast_region ()) |
328 | { |
329 | const region *original_reg = cast_reg->get_original_region (); |
330 | return get_or_create_cast (type: cast_reg->get_type (), |
331 | arg: get_or_create_initial_value (reg: original_reg)); |
332 | } |
333 | |
334 | /* Simplify: |
335 | INIT_VAL(ELEMENT_REG(STRING_REG), CONSTANT_SVAL) |
336 | to: |
337 | CONSTANT_SVAL(STRING[N]). */ |
338 | if (const element_region *element_reg = reg->dyn_cast_element_region ()) |
339 | if (tree cst_idx = element_reg->get_index ()->maybe_get_constant ()) |
340 | if (const string_region *string_reg |
341 | = element_reg->get_parent_region ()->dyn_cast_string_region ()) |
342 | if (tree_fits_shwi_p (cst_idx)) |
343 | { |
344 | HOST_WIDE_INT idx = tree_to_shwi (cst_idx); |
345 | tree string_cst = string_reg->get_string_cst (); |
346 | if (idx >= 0 && idx <= TREE_STRING_LENGTH (string_cst)) |
347 | { |
348 | int ch = TREE_STRING_POINTER (string_cst)[idx]; |
349 | return get_or_create_int_cst (type: reg->get_type (), cst: ch); |
350 | } |
351 | } |
352 | |
353 | /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */ |
354 | if (reg->symbolic_for_unknown_ptr_p ()) |
355 | return get_or_create_unknown_svalue (type: reg->get_type ()); |
356 | |
357 | if (initial_svalue **slot = m_initial_values_map.get (k: reg)) |
358 | return *slot; |
359 | initial_svalue *initial_sval |
360 | = new initial_svalue (alloc_symbol_id (), reg->get_type (), reg); |
361 | RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval); |
362 | m_initial_values_map.put (k: reg, v: initial_sval); |
363 | return initial_sval; |
364 | } |
365 | |
366 | /* Return the svalue * for R using type TYPE, creating it if |
367 | necessary. */ |
368 | |
369 | const svalue * |
370 | region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r, |
371 | tree type) |
372 | { |
373 | setjmp_svalue::key_t key (r, type); |
374 | if (setjmp_svalue **slot = m_setjmp_values_map.get (k: key)) |
375 | return *slot; |
376 | setjmp_svalue *setjmp_sval = new setjmp_svalue (r, alloc_symbol_id (), type); |
377 | RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval); |
378 | m_setjmp_values_map.put (k: key, v: setjmp_sval); |
379 | return setjmp_sval; |
380 | } |
381 | |
382 | /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if |
383 | necessary. */ |
384 | |
385 | const svalue * |
386 | region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind, |
387 | tree type) |
388 | { |
389 | poisoned_svalue::key_t key (kind, type); |
390 | if (poisoned_svalue **slot = m_poisoned_values_map.get (k: key)) |
391 | return *slot; |
392 | poisoned_svalue *poisoned_sval |
393 | = new poisoned_svalue (kind, alloc_symbol_id (), type); |
394 | RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval); |
395 | m_poisoned_values_map.put (k: key, v: poisoned_sval); |
396 | return poisoned_sval; |
397 | } |
398 | |
399 | /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE, |
400 | creating it if necessary. */ |
401 | |
402 | const svalue * |
403 | region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee) |
404 | { |
405 | /* If this is a symbolic region from dereferencing a pointer, and the types |
406 | match, then return the original pointer. */ |
407 | if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ()) |
408 | if (ptr_type == sym_reg->get_pointer ()->get_type ()) |
409 | return sym_reg->get_pointer (); |
410 | |
411 | region_svalue::key_t key (ptr_type, pointee); |
412 | if (region_svalue **slot = m_pointer_values_map.get (k: key)) |
413 | return *slot; |
414 | region_svalue *sval |
415 | = new region_svalue (alloc_symbol_id (), ptr_type, pointee); |
416 | RETURN_UNKNOWN_IF_TOO_COMPLEX (sval); |
417 | m_pointer_values_map.put (k: key, v: sval); |
418 | return sval; |
419 | } |
420 | |
421 | /* Subroutine of region_model_manager::get_or_create_unaryop. |
422 | Attempt to fold the inputs and return a simpler svalue *. |
423 | Otherwise, return NULL. */ |
424 | |
425 | const svalue * |
426 | region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op, |
427 | const svalue *arg) |
428 | { |
429 | /* Ops on "unknown" are also unknown. */ |
430 | if (arg->get_kind () == SK_UNKNOWN) |
431 | return get_or_create_unknown_svalue (type); |
432 | /* Likewise for "poisoned". */ |
433 | else if (const poisoned_svalue *poisoned_sval |
434 | = arg->dyn_cast_poisoned_svalue ()) |
435 | return get_or_create_poisoned_svalue (kind: poisoned_sval->get_poison_kind (), |
436 | type); |
437 | |
438 | gcc_assert (arg->can_have_associated_state_p ()); |
439 | |
440 | switch (op) |
441 | { |
442 | default: break; |
443 | case VIEW_CONVERT_EXPR: |
444 | case NOP_EXPR: |
445 | { |
446 | if (!type) |
447 | return nullptr; |
448 | |
449 | /* Handle redundant casts. */ |
450 | if (arg->get_type () |
451 | && useless_type_conversion_p (arg->get_type (), type)) |
452 | return arg; |
453 | |
454 | /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg)) |
455 | => "cast<TYPE> (innermost_arg)", |
456 | unless INNER_TYPE is narrower than TYPE. */ |
457 | if (const svalue *innermost_arg = arg->maybe_undo_cast ()) |
458 | { |
459 | if (tree inner_type = arg->get_type ()) |
460 | if (TYPE_SIZE (type) |
461 | && TYPE_SIZE (inner_type) |
462 | && (fold_binary (LE_EXPR, boolean_type_node, |
463 | TYPE_SIZE (type), TYPE_SIZE (inner_type)) |
464 | == boolean_true_node)) |
465 | return maybe_fold_unaryop (type, op, arg: innermost_arg); |
466 | } |
467 | /* Avoid creating symbolic regions for pointer casts by |
468 | simplifying (T*)(®ION) to ((T*)®ION). */ |
469 | if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ()) |
470 | if (POINTER_TYPE_P (type) |
471 | && region_sval->get_type () |
472 | && POINTER_TYPE_P (region_sval->get_type ())) |
473 | return get_ptr_svalue (ptr_type: type, pointee: region_sval->get_pointee ()); |
474 | |
475 | /* Casting all zeroes should give all zeroes. */ |
476 | if (type |
477 | && arg->all_zeroes_p () |
478 | && (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))) |
479 | return get_or_create_int_cst (type, cst: 0); |
480 | } |
481 | break; |
482 | case TRUTH_NOT_EXPR: |
483 | { |
484 | /* Invert comparisons e.g. "!(x == y)" => "x != y". */ |
485 | if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ()) |
486 | if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison) |
487 | { |
488 | enum tree_code inv_op |
489 | = invert_tree_comparison (binop->get_op (), |
490 | HONOR_NANS (binop->get_type ())); |
491 | if (inv_op != ERROR_MARK) |
492 | return get_or_create_binop (type: binop->get_type (), op: inv_op, |
493 | arg0: binop->get_arg0 (), |
494 | arg1: binop->get_arg1 ()); |
495 | } |
496 | } |
497 | break; |
498 | case NEGATE_EXPR: |
499 | { |
500 | /* -(-(VAL)) is VAL, for integer types. */ |
501 | if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ()) |
502 | if (unaryop->get_op () == NEGATE_EXPR |
503 | && type == unaryop->get_type () |
504 | && type |
505 | && INTEGRAL_TYPE_P (type)) |
506 | return unaryop->get_arg (); |
507 | } |
508 | break; |
509 | } |
510 | |
511 | /* Constants. */ |
512 | if (tree cst = arg->maybe_get_constant ()) |
513 | if (tree result = fold_unary (op, type, cst)) |
514 | { |
515 | if (CONSTANT_CLASS_P (result)) |
516 | return get_or_create_constant_svalue (cst_expr: result); |
517 | |
518 | /* fold_unary can return casts of constants; try to handle them. */ |
519 | if (op != NOP_EXPR |
520 | && type |
521 | && TREE_CODE (result) == NOP_EXPR |
522 | && CONSTANT_CLASS_P (TREE_OPERAND (result, 0))) |
523 | { |
524 | const svalue *inner_cst |
525 | = get_or_create_constant_svalue (TREE_OPERAND (result, 0)); |
526 | return get_or_create_cast (type, |
527 | arg: get_or_create_cast (TREE_TYPE (result), |
528 | arg: inner_cst)); |
529 | } |
530 | } |
531 | |
532 | return NULL; |
533 | } |
534 | |
535 | /* Return the svalue * for an unary operation OP on ARG with a result of |
536 | type TYPE, creating it if necessary. */ |
537 | |
538 | const svalue * |
539 | region_model_manager::get_or_create_unaryop (tree type, enum tree_code op, |
540 | const svalue *arg) |
541 | { |
542 | if (const svalue *folded = maybe_fold_unaryop (type, op, arg)) |
543 | return folded; |
544 | unaryop_svalue::key_t key (type, op, arg); |
545 | if (unaryop_svalue **slot = m_unaryop_values_map.get (k: key)) |
546 | return *slot; |
547 | unaryop_svalue *unaryop_sval |
548 | = new unaryop_svalue (alloc_symbol_id (), type, op, arg); |
549 | RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval); |
550 | m_unaryop_values_map.put (k: key, v: unaryop_sval); |
551 | return unaryop_sval; |
552 | } |
553 | |
554 | /* Get a tree code for a cast to DST_TYPE from SRC_TYPE. |
555 | Use NOP_EXPR if possible (e.g. to help fold_unary convert casts |
556 | of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR |
557 | and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash |
558 | on. */ |
559 | |
560 | static enum tree_code |
561 | get_code_for_cast (tree dst_type, tree src_type) |
562 | { |
563 | if (!dst_type) |
564 | return NOP_EXPR; |
565 | if (!src_type) |
566 | return NOP_EXPR; |
567 | |
568 | if (SCALAR_FLOAT_TYPE_P (src_type)) |
569 | { |
570 | if (TREE_CODE (dst_type) == INTEGER_TYPE) |
571 | return FIX_TRUNC_EXPR; |
572 | else |
573 | return VIEW_CONVERT_EXPR; |
574 | } |
575 | |
576 | return NOP_EXPR; |
577 | } |
578 | |
579 | /* Return the svalue * for a cast of ARG to type TYPE, creating it |
580 | if necessary. */ |
581 | |
582 | const svalue * |
583 | region_model_manager::get_or_create_cast (tree type, const svalue *arg) |
584 | { |
585 | /* No-op if the types are the same. */ |
586 | if (type == arg->get_type ()) |
587 | return arg; |
588 | |
589 | /* Don't attempt to handle casts involving vector types for now. */ |
590 | if (type) |
591 | if (VECTOR_TYPE_P (type) |
592 | || (arg->get_type () |
593 | && VECTOR_TYPE_P (arg->get_type ()))) |
594 | return get_or_create_unknown_svalue (type); |
595 | |
596 | enum tree_code op = get_code_for_cast (dst_type: type, src_type: arg->get_type ()); |
597 | return get_or_create_unaryop (type, op, arg); |
598 | } |
599 | |
600 | /* Subroutine of region_model_manager::maybe_fold_binop for handling |
601 | (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by |
602 | optimize_bit_field_compare, where CST is from ARG1. |
603 | |
604 | Support masking out bits from a compound_svalue for comparing a bitfield |
605 | against a value, as generated by optimize_bit_field_compare for |
606 | BITFIELD == VALUE. |
607 | |
608 | If COMPOUND_SVAL has a value for the appropriate bits, return it, |
609 | shifted accordingly. |
610 | Otherwise return NULL. */ |
611 | |
612 | const svalue * |
613 | region_model_manager:: |
614 | maybe_undo_optimize_bit_field_compare (tree type, |
615 | const compound_svalue *compound_sval, |
616 | tree cst, |
617 | const svalue *arg1) |
618 | { |
619 | if (!type) |
620 | return nullptr; |
621 | if (!INTEGRAL_TYPE_P (type)) |
622 | return NULL; |
623 | |
624 | const binding_map &map = compound_sval->get_map (); |
625 | unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst); |
626 | /* If "mask" is a contiguous range of set bits, see if the |
627 | compound_sval has a value for those bits. */ |
628 | bit_range bits (0, 0); |
629 | if (!bit_range::from_mask (mask, out: &bits)) |
630 | return NULL; |
631 | |
632 | bit_range bound_bits (bits); |
633 | if (BYTES_BIG_ENDIAN) |
634 | bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (), |
635 | bits.m_size_in_bits); |
636 | const concrete_binding *conc |
637 | = get_store_manager ()->get_concrete_binding (bits: bound_bits); |
638 | const svalue *sval = map.get (key: conc); |
639 | if (!sval) |
640 | return NULL; |
641 | |
642 | /* We have a value; |
643 | shift it by the correct number of bits. */ |
644 | const svalue *lhs = get_or_create_cast (type, arg: sval); |
645 | HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi (); |
646 | const svalue *shift_sval = get_or_create_int_cst (type, cst: bit_offset); |
647 | const svalue *shifted_sval = get_or_create_binop (type, op: LSHIFT_EXPR, |
648 | arg0: lhs, arg1: shift_sval); |
649 | /* Reapply the mask (needed for negative |
650 | signed bitfields). */ |
651 | return get_or_create_binop (type, op: BIT_AND_EXPR, |
652 | arg0: shifted_sval, arg1); |
653 | } |
654 | |
655 | /* Subroutine of region_model_manager::get_or_create_binop. |
656 | Attempt to fold the inputs and return a simpler svalue *. |
657 | Otherwise, return NULL. */ |
658 | |
659 | const svalue * |
660 | region_model_manager::maybe_fold_binop (tree type, enum tree_code op, |
661 | const svalue *arg0, |
662 | const svalue *arg1) |
663 | { |
664 | tree cst0 = arg0->maybe_get_constant (); |
665 | tree cst1 = arg1->maybe_get_constant (); |
666 | /* (CST OP CST). */ |
667 | if (cst0 && cst1) |
668 | { |
669 | if (type) |
670 | { |
671 | if (tree result = fold_binary (op, type, cst0, cst1)) |
672 | if (CONSTANT_CLASS_P (result)) |
673 | return get_or_create_constant_svalue (cst_expr: result); |
674 | } |
675 | else |
676 | { |
677 | if (tree result = int_const_binop (op, cst0, cst1, -1)) |
678 | return get_or_create_constant_svalue (NULL_TREE, cst_expr: result); |
679 | } |
680 | } |
681 | |
682 | if ((type && FLOAT_TYPE_P (type)) |
683 | || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ())) |
684 | || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ()))) |
685 | return NULL; |
686 | |
687 | switch (op) |
688 | { |
689 | default: |
690 | break; |
691 | case POINTER_PLUS_EXPR: |
692 | case PLUS_EXPR: |
693 | /* (VAL + 0) -> VAL. */ |
694 | if (cst1 && zerop (cst1)) |
695 | return get_or_create_cast (type, arg: arg0); |
696 | break; |
697 | case MINUS_EXPR: |
698 | /* (VAL - 0) -> VAL. */ |
699 | if (cst1 && zerop (cst1)) |
700 | return get_or_create_cast (type, arg: arg0); |
701 | /* (0 - VAL) -> -VAL. */ |
702 | if (cst0 && zerop (cst0)) |
703 | return get_or_create_unaryop (type, op: NEGATE_EXPR, arg: arg1); |
704 | /* (X + Y) - X -> Y. */ |
705 | if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ()) |
706 | if (binop->get_op () == PLUS_EXPR) |
707 | if (binop->get_arg0 () == arg1) |
708 | return get_or_create_cast (type, arg: binop->get_arg1 ()); |
709 | break; |
710 | case MULT_EXPR: |
711 | /* (VAL * 0). */ |
712 | if (cst1 |
713 | && zerop (cst1) |
714 | && (type == NULL_TREE || INTEGRAL_TYPE_P (type))) |
715 | return get_or_create_int_cst (type, cst: 0); |
716 | /* (VAL * 1) -> VAL. */ |
717 | if (cst1 && integer_onep (cst1)) |
718 | return get_or_create_cast (type, arg: arg0); |
719 | break; |
720 | case BIT_AND_EXPR: |
721 | if (cst1) |
722 | { |
723 | if (zerop (cst1) |
724 | && (type == NULL_TREE || INTEGRAL_TYPE_P (type))) |
725 | /* "(ARG0 & 0)" -> "0". */ |
726 | return get_or_create_int_cst (type, cst: 0); |
727 | |
728 | if (const compound_svalue *compound_sval |
729 | = arg0->dyn_cast_compound_svalue ()) |
730 | if (const svalue *sval |
731 | = maybe_undo_optimize_bit_field_compare (type, |
732 | compound_sval, |
733 | cst: cst1, arg1)) |
734 | return sval; |
735 | } |
736 | if (arg0->get_type () == boolean_type_node |
737 | && arg1->get_type () == boolean_type_node) |
738 | { |
739 | /* If the LHS are both _Bool, then... */ |
740 | /* ..."(1 & x) -> x". */ |
741 | if (cst0 && !zerop (cst0)) |
742 | return get_or_create_cast (type, arg: arg1); |
743 | /* ..."(x & 1) -> x". */ |
744 | if (cst1 && !zerop (cst1)) |
745 | return get_or_create_cast (type, arg: arg0); |
746 | /* ..."(0 & x) -> 0". */ |
747 | if (cst0 && zerop (cst0)) |
748 | return get_or_create_int_cst (type, cst: 0); |
749 | /* ..."(x & 0) -> 0". */ |
750 | if (cst1 && zerop (cst1)) |
751 | return get_or_create_int_cst (type, cst: 0); |
752 | } |
753 | break; |
754 | case BIT_IOR_EXPR: |
755 | if (arg0->get_type () == boolean_type_node |
756 | && arg1->get_type () == boolean_type_node) |
757 | { |
758 | /* If the LHS are both _Bool, then... */ |
759 | /* ..."(1 | x) -> 1". */ |
760 | if (cst0 && !zerop (cst0)) |
761 | return get_or_create_int_cst (type, cst: 1); |
762 | /* ..."(x | 1) -> 1". */ |
763 | if (cst1 && !zerop (cst1)) |
764 | return get_or_create_int_cst (type, cst: 1); |
765 | /* ..."(0 | x) -> x". */ |
766 | if (cst0 && zerop (cst0)) |
767 | return get_or_create_cast (type, arg: arg1); |
768 | /* ..."(x | 0) -> x". */ |
769 | if (cst1 && zerop (cst1)) |
770 | return get_or_create_cast (type, arg: arg0); |
771 | } |
772 | break; |
773 | case TRUTH_ANDIF_EXPR: |
774 | case TRUTH_AND_EXPR: |
775 | if (cst1) |
776 | { |
777 | if (zerop (cst1) && INTEGRAL_TYPE_P (type)) |
778 | /* "(ARG0 && 0)" -> "0". */ |
779 | return get_or_create_constant_svalue (cst_expr: build_int_cst (type, 0)); |
780 | else |
781 | /* "(ARG0 && nonzero-cst)" -> "ARG0". */ |
782 | return get_or_create_cast (type, arg: arg0); |
783 | } |
784 | break; |
785 | case TRUTH_ORIF_EXPR: |
786 | case TRUTH_OR_EXPR: |
787 | if (cst1) |
788 | { |
789 | if (zerop (cst1)) |
790 | /* "(ARG0 || 0)" -> "ARG0". */ |
791 | return get_or_create_cast (type, arg: arg0); |
792 | else |
793 | /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */ |
794 | return get_or_create_cast (type, arg: arg1); |
795 | } |
796 | break; |
797 | } |
798 | |
799 | /* For associative ops, fold "(X op CST_A) op CST_B)" to |
800 | "X op (CST_A op CST_B)". */ |
801 | if (cst1 && associative_tree_code (op)) |
802 | if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ()) |
803 | if (binop->get_op () == op |
804 | && binop->get_arg1 ()->maybe_get_constant ()) |
805 | return get_or_create_binop |
806 | (type, op, arg0: binop->get_arg0 (), |
807 | arg1: get_or_create_binop (type, op, |
808 | arg0: binop->get_arg1 (), arg1)); |
809 | |
810 | /* associative_tree_code is false for POINTER_PLUS_EXPR, but we |
811 | can fold: |
812 | "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)" |
813 | e.g. in data-model-1.c: test_4c. */ |
814 | if (cst1 && op == POINTER_PLUS_EXPR) |
815 | if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ()) |
816 | if (binop->get_op () == POINTER_PLUS_EXPR) |
817 | if (binop->get_arg1 ()->maybe_get_constant ()) |
818 | return get_or_create_binop |
819 | (type, op, arg0: binop->get_arg0 (), |
820 | arg1: get_or_create_binop (size_type_node, op, |
821 | arg0: binop->get_arg1 (), arg1)); |
822 | |
823 | /* Distribute multiplication by a constant through addition/subtraction: |
824 | (X + Y) * CST => (X * CST) + (Y * CST). */ |
825 | if (cst1 && op == MULT_EXPR) |
826 | if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ()) |
827 | if (binop->get_op () == PLUS_EXPR |
828 | || binop->get_op () == MINUS_EXPR) |
829 | { |
830 | return get_or_create_binop |
831 | (type, op: binop->get_op (), |
832 | arg0: get_or_create_binop (type, op, |
833 | arg0: binop->get_arg0 (), arg1), |
834 | arg1: get_or_create_binop (type, op, |
835 | arg0: binop->get_arg1 (), arg1)); |
836 | } |
837 | |
838 | |
839 | /* Typeless operations, assumed to be effectively arbitrary sized |
840 | integers following normal arithmetic rules. */ |
841 | if (!type) |
842 | switch (op) |
843 | { |
844 | default: |
845 | break; |
846 | case MINUS_EXPR: |
847 | { |
848 | /* (X - X) -> 0. */ |
849 | if (arg0 == arg1) |
850 | return get_or_create_int_cst (type, cst: 0); |
851 | |
852 | /* (X + A) - (A + B) -> (A - B). */ |
853 | if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ()) |
854 | if (const binop_svalue *binop1 = arg1->dyn_cast_binop_svalue ()) |
855 | if (binop0->get_op () == PLUS_EXPR |
856 | && binop1->get_op () == PLUS_EXPR |
857 | && binop0->get_arg0 () == binop1->get_arg0 ()) |
858 | return get_or_create_binop (NULL_TREE, op, |
859 | arg0: binop0->get_arg1 (), |
860 | arg1: binop1->get_arg1 ()); |
861 | } |
862 | break; |
863 | |
864 | case EXACT_DIV_EXPR: |
865 | { |
866 | if (const unaryop_svalue *unaryop0 = arg0->dyn_cast_unaryop_svalue ()) |
867 | { |
868 | if (unaryop0->get_op () == NOP_EXPR) |
869 | if (const svalue *sval = maybe_fold_binop (NULL_TREE, op, |
870 | arg0: unaryop0->get_arg (), |
871 | arg1)) |
872 | return sval; |
873 | } |
874 | if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ()) |
875 | { |
876 | switch (binop0->get_op ()) |
877 | { |
878 | default: |
879 | break; |
880 | |
881 | case PLUS_EXPR: |
882 | case MINUS_EXPR: |
883 | /* (A op B) / C -> (A / C) op (B / C). */ |
884 | { |
885 | if (const svalue *op_on_a |
886 | = maybe_fold_binop (NULL_TREE, op, |
887 | arg0: binop0->get_arg0 (), arg1)) |
888 | if (const svalue *op_on_b |
889 | = maybe_fold_binop (NULL_TREE, op, |
890 | arg0: binop0->get_arg1 (), arg1)) |
891 | return get_or_create_binop (NULL_TREE, |
892 | op: binop0->get_op (), |
893 | arg0: op_on_a, arg1: op_on_b); |
894 | } |
895 | break; |
896 | |
897 | case MULT_EXPR: |
898 | /* (A * B) / C -> A * (B / C) if C is a divisor of B. |
899 | In particular, this should also handle the case |
900 | (A * B) / B -> A. */ |
901 | if (const svalue *b_div_c |
902 | = maybe_fold_binop (NULL_TREE, op, |
903 | arg0: binop0->get_arg1 (), arg1)) |
904 | return get_or_create_binop (NULL_TREE, op: binop0->get_op (), |
905 | arg0: binop0->get_arg0 (), arg1: b_div_c); |
906 | } |
907 | } |
908 | } |
909 | break; |
910 | } |
911 | |
912 | /* etc. */ |
913 | |
914 | return NULL; |
915 | } |
916 | |
917 | /* Return the svalue * for an binary operation OP on ARG0 and ARG1 |
918 | with a result of type TYPE, creating it if necessary. */ |
919 | |
920 | const svalue * |
921 | region_model_manager::get_or_create_binop (tree type, enum tree_code op, |
922 | const svalue *arg0, |
923 | const svalue *arg1) |
924 | { |
925 | /* For commutative ops, put any constant on the RHS. */ |
926 | if (arg0->maybe_get_constant () && commutative_tree_code (op)) |
927 | std::swap (a&: arg0, b&: arg1); |
928 | |
929 | if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1)) |
930 | return folded; |
931 | |
932 | /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold |
933 | it via an identity in maybe_fold_binop). */ |
934 | if (!arg0->can_have_associated_state_p () |
935 | || !arg1->can_have_associated_state_p ()) |
936 | return get_or_create_unknown_svalue (type); |
937 | |
938 | binop_svalue::key_t key (type, op, arg0, arg1); |
939 | if (binop_svalue **slot = m_binop_values_map.get (k: key)) |
940 | return *slot; |
941 | binop_svalue *binop_sval |
942 | = new binop_svalue (alloc_symbol_id (), type, op, arg0, arg1); |
943 | RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval); |
944 | m_binop_values_map.put (k: key, v: binop_sval); |
945 | return binop_sval; |
946 | } |
947 | |
948 | /* Subroutine of region_model_manager::get_or_create_sub_svalue. |
949 | Return a folded svalue, or NULL. */ |
950 | |
951 | const svalue * |
952 | region_model_manager::maybe_fold_sub_svalue (tree type, |
953 | const svalue *parent_svalue, |
954 | const region *subregion) |
955 | { |
956 | /* Subvalues of "unknown"/"poisoned" are unknown. */ |
957 | if (!parent_svalue->can_have_associated_state_p ()) |
958 | return get_or_create_unknown_svalue (type); |
959 | |
960 | /* If we have a subregion of a zero-fill, it's zero. */ |
961 | if (const unaryop_svalue *unary |
962 | = parent_svalue->dyn_cast_unaryop_svalue ()) |
963 | { |
964 | if (unary->get_op () == NOP_EXPR |
965 | || unary->get_op () == VIEW_CONVERT_EXPR) |
966 | if (tree cst = unary->get_arg ()->maybe_get_constant ()) |
967 | if (zerop (cst) && type) |
968 | { |
969 | const svalue *cst_sval |
970 | = get_or_create_constant_svalue (cst_expr: cst); |
971 | return get_or_create_cast (type, arg: cst_sval); |
972 | } |
973 | } |
974 | |
975 | /* Handle getting individual chars from a STRING_CST. */ |
976 | if (tree cst = parent_svalue->maybe_get_constant ()) |
977 | if (TREE_CODE (cst) == STRING_CST) |
978 | { |
979 | /* If we have a concrete 1-byte access within the parent region... */ |
980 | byte_range subregion_bytes (0, 0); |
981 | if (subregion->get_relative_concrete_byte_range (out: &subregion_bytes) |
982 | && subregion_bytes.m_size_in_bytes == 1 |
983 | && type) |
984 | { |
985 | /* ...then attempt to get that char from the STRING_CST. */ |
986 | HOST_WIDE_INT hwi_start_byte |
987 | = subregion_bytes.m_start_byte_offset.to_shwi (); |
988 | tree cst_idx |
989 | = build_int_cst_type (size_type_node, hwi_start_byte); |
990 | if (const svalue *char_sval |
991 | = maybe_get_char_from_string_cst (string_cst: cst, byte_offset_cst: cst_idx)) |
992 | return get_or_create_cast (type, arg: char_sval); |
993 | } |
994 | } |
995 | |
996 | if (const initial_svalue *init_sval |
997 | = parent_svalue->dyn_cast_initial_svalue ()) |
998 | { |
999 | /* SUB(INIT(r)).FIELD -> INIT(r.FIELD) |
1000 | i.e. |
1001 | Subvalue(InitialValue(R1), FieldRegion(R2, F)) |
1002 | -> InitialValue(FieldRegion(R1, F)). */ |
1003 | if (const field_region *field_reg = subregion->dyn_cast_field_region ()) |
1004 | { |
1005 | const region *field_reg_new |
1006 | = get_field_region (parent: init_sval->get_region (), |
1007 | field: field_reg->get_field ()); |
1008 | return get_or_create_initial_value (reg: field_reg_new); |
1009 | } |
1010 | /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT]) |
1011 | i.e. |
1012 | Subvalue(InitialValue(R1), ElementRegion(R2, IDX)) |
1013 | -> InitialValue(ElementRegion(R1, IDX)). */ |
1014 | if (const element_region *element_reg = subregion->dyn_cast_element_region ()) |
1015 | { |
1016 | const region *element_reg_new |
1017 | = get_element_region (parent: init_sval->get_region (), |
1018 | element_type: element_reg->get_type (), |
1019 | index: element_reg->get_index ()); |
1020 | return get_or_create_initial_value (reg: element_reg_new); |
1021 | } |
1022 | } |
1023 | |
1024 | if (const repeated_svalue *repeated_sval |
1025 | = parent_svalue->dyn_cast_repeated_svalue ()) |
1026 | if (type) |
1027 | return get_or_create_cast (type, arg: repeated_sval->get_inner_svalue ()); |
1028 | |
1029 | return NULL; |
1030 | } |
1031 | |
1032 | /* Return the svalue * for extracting a subvalue of type TYPE from |
1033 | PARENT_SVALUE based on SUBREGION, creating it if necessary. */ |
1034 | |
1035 | const svalue * |
1036 | region_model_manager::get_or_create_sub_svalue (tree type, |
1037 | const svalue *parent_svalue, |
1038 | const region *subregion) |
1039 | { |
1040 | if (const svalue *folded |
1041 | = maybe_fold_sub_svalue (type, parent_svalue, subregion)) |
1042 | return folded; |
1043 | |
1044 | sub_svalue::key_t key (type, parent_svalue, subregion); |
1045 | if (sub_svalue **slot = m_sub_values_map.get (k: key)) |
1046 | return *slot; |
1047 | sub_svalue *sub_sval |
1048 | = new sub_svalue (alloc_symbol_id (), type, parent_svalue, subregion); |
1049 | RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval); |
1050 | m_sub_values_map.put (k: key, v: sub_sval); |
1051 | return sub_sval; |
1052 | } |
1053 | |
1054 | /* Subroutine of region_model_manager::get_or_create_repeated_svalue. |
1055 | Return a folded svalue, or NULL. */ |
1056 | |
1057 | const svalue * |
1058 | region_model_manager::maybe_fold_repeated_svalue (tree type, |
1059 | const svalue *outer_size, |
1060 | const svalue *inner_svalue) |
1061 | { |
1062 | /* Repeated "unknown"/"poisoned" is unknown. */ |
1063 | if (!outer_size->can_have_associated_state_p () |
1064 | || !inner_svalue->can_have_associated_state_p ()) |
1065 | return get_or_create_unknown_svalue (type); |
1066 | |
1067 | /* If INNER_SVALUE is the same size as OUTER_SIZE, |
1068 | turn into simply a cast. */ |
1069 | if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ()) |
1070 | { |
1071 | HOST_WIDE_INT num_bytes_inner_svalue |
1072 | = int_size_in_bytes (inner_svalue->get_type ()); |
1073 | if (num_bytes_inner_svalue != -1) |
1074 | if (num_bytes_inner_svalue |
1075 | == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes)) |
1076 | { |
1077 | if (type) |
1078 | return get_or_create_cast (type, arg: inner_svalue); |
1079 | else |
1080 | return inner_svalue; |
1081 | } |
1082 | } |
1083 | |
1084 | /* Handle zero-fill of a specific type. */ |
1085 | if (tree cst = inner_svalue->maybe_get_constant ()) |
1086 | if (zerop (cst) && type) |
1087 | return get_or_create_cast (type, arg: inner_svalue); |
1088 | |
1089 | return NULL; |
1090 | } |
1091 | |
1092 | /* Return the svalue * of type TYPE in which INNER_SVALUE is repeated |
1093 | enough times to be of size OUTER_SIZE, creating it if necessary. |
1094 | e.g. for filling buffers with a constant value. */ |
1095 | |
1096 | const svalue * |
1097 | region_model_manager::get_or_create_repeated_svalue (tree type, |
1098 | const svalue *outer_size, |
1099 | const svalue *inner_svalue) |
1100 | { |
1101 | if (const svalue *folded |
1102 | = maybe_fold_repeated_svalue (type, outer_size, inner_svalue)) |
1103 | return folded; |
1104 | |
1105 | repeated_svalue::key_t key (type, outer_size, inner_svalue); |
1106 | if (repeated_svalue **slot = m_repeated_values_map.get (k: key)) |
1107 | return *slot; |
1108 | repeated_svalue *repeated_sval |
1109 | = new repeated_svalue (alloc_symbol_id (), type, outer_size, inner_svalue); |
1110 | RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval); |
1111 | m_repeated_values_map.put (k: key, v: repeated_sval); |
1112 | return repeated_sval; |
1113 | } |
1114 | |
1115 | /* Attempt to get the bit_range for FIELD within a RECORD_TYPE. |
1116 | Return true and write the result to OUT if successful. |
1117 | Return false otherwise. */ |
1118 | |
1119 | static bool |
1120 | get_bit_range_for_field (tree field, bit_range *out) |
1121 | { |
1122 | bit_size_t bit_size; |
1123 | if (!int_size_in_bits (TREE_TYPE (field), out: &bit_size)) |
1124 | return false; |
1125 | int field_bit_offset = int_bit_position (field); |
1126 | *out = bit_range (field_bit_offset, bit_size); |
1127 | return true; |
1128 | } |
1129 | |
1130 | /* Attempt to get the byte_range for FIELD within a RECORD_TYPE. |
1131 | Return true and write the result to OUT if successful. |
1132 | Return false otherwise. */ |
1133 | |
1134 | static bool |
1135 | get_byte_range_for_field (tree field, byte_range *out) |
1136 | { |
1137 | bit_range field_bits (0, 0); |
1138 | if (!get_bit_range_for_field (field, out: &field_bits)) |
1139 | return false; |
1140 | return field_bits.as_byte_range (out); |
1141 | } |
1142 | |
1143 | /* Attempt to determine if there is a specific field within RECORD_TYPE |
1144 | at BYTES. If so, return it, and write the location of BYTES relative |
1145 | to the field to *OUT_RANGE_WITHIN_FIELD. |
1146 | Otherwise, return NULL_TREE. |
1147 | For example, given: |
1148 | struct foo { uint32 a; uint32; b}; |
1149 | and |
1150 | bytes = {bytes 6-7} (of foo) |
1151 | we have bytes 3-4 of field b. */ |
1152 | |
1153 | static tree |
1154 | get_field_at_byte_range (tree record_type, const byte_range &bytes, |
1155 | byte_range *out_range_within_field) |
1156 | { |
1157 | bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT; |
1158 | |
1159 | tree field = get_field_at_bit_offset (record_type, bit_offset); |
1160 | if (!field) |
1161 | return NULL_TREE; |
1162 | |
1163 | byte_range field_bytes (0,0); |
1164 | if (!get_byte_range_for_field (field, out: &field_bytes)) |
1165 | return NULL_TREE; |
1166 | |
1167 | /* Is BYTES fully within field_bytes? */ |
1168 | byte_range bytes_within_field (0,0); |
1169 | if (!field_bytes.contains_p (other: bytes, out: &bytes_within_field)) |
1170 | return NULL_TREE; |
1171 | |
1172 | *out_range_within_field = bytes_within_field; |
1173 | return field; |
1174 | } |
1175 | |
1176 | /* Subroutine of region_model_manager::get_or_create_bits_within. |
1177 | Return a folded svalue, or NULL. */ |
1178 | |
1179 | const svalue * |
1180 | region_model_manager::maybe_fold_bits_within_svalue (tree type, |
1181 | const bit_range &bits, |
1182 | const svalue *inner_svalue) |
1183 | { |
1184 | tree inner_type = inner_svalue->get_type (); |
1185 | /* Fold: |
1186 | BITS_WITHIN ((0, sizeof (VAL), VAL)) |
1187 | to: |
1188 | CAST(TYPE, VAL). */ |
1189 | if (bits.m_start_bit_offset == 0 && inner_type) |
1190 | { |
1191 | bit_size_t inner_type_size; |
1192 | if (int_size_in_bits (type: inner_type, out: &inner_type_size)) |
1193 | if (inner_type_size == bits.m_size_in_bits) |
1194 | { |
1195 | if (type) |
1196 | return get_or_create_cast (type, arg: inner_svalue); |
1197 | else |
1198 | return inner_svalue; |
1199 | } |
1200 | } |
1201 | |
1202 | /* Kind-specific folding. */ |
1203 | if (const svalue *sval |
1204 | = inner_svalue->maybe_fold_bits_within (type, subrange: bits, mgr: this)) |
1205 | return sval; |
1206 | |
1207 | byte_range bytes (0,0); |
1208 | if (bits.as_byte_range (out: &bytes) && inner_type) |
1209 | switch (TREE_CODE (inner_type)) |
1210 | { |
1211 | default: |
1212 | break; |
1213 | case ARRAY_TYPE: |
1214 | { |
1215 | /* Fold: |
1216 | BITS_WITHIN (range, KIND(REG)) |
1217 | to: |
1218 | BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT)) |
1219 | if range1 is a byte-range fully within one ELEMENT. */ |
1220 | tree element_type = TREE_TYPE (inner_type); |
1221 | HOST_WIDE_INT element_byte_size |
1222 | = int_size_in_bytes (element_type); |
1223 | if (element_byte_size > 0) |
1224 | { |
1225 | HOST_WIDE_INT start_idx |
1226 | = (bytes.get_start_byte_offset ().to_shwi () |
1227 | / element_byte_size); |
1228 | HOST_WIDE_INT last_idx |
1229 | = (bytes.get_last_byte_offset ().to_shwi () |
1230 | / element_byte_size); |
1231 | if (start_idx == last_idx) |
1232 | { |
1233 | if (const initial_svalue *initial_sval |
1234 | = inner_svalue->dyn_cast_initial_svalue ()) |
1235 | { |
1236 | bit_offset_t start_of_element |
1237 | = start_idx * element_byte_size * BITS_PER_UNIT; |
1238 | bit_range bits_within_element |
1239 | (bits.m_start_bit_offset - start_of_element, |
1240 | bits.m_size_in_bits); |
1241 | const svalue *idx_sval |
1242 | = get_or_create_int_cst (integer_type_node, cst: start_idx); |
1243 | const region *element_reg = |
1244 | get_element_region (parent: initial_sval->get_region (), |
1245 | element_type, index: idx_sval); |
1246 | const svalue *element_reg_sval |
1247 | = get_or_create_initial_value (reg: element_reg); |
1248 | return get_or_create_bits_within (type, |
1249 | bits: bits_within_element, |
1250 | inner_svalue: element_reg_sval); |
1251 | } |
1252 | } |
1253 | } |
1254 | } |
1255 | break; |
1256 | case RECORD_TYPE: |
1257 | { |
1258 | /* Fold: |
1259 | BYTES_WITHIN (range, KIND(REG)) |
1260 | to: |
1261 | BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD)) |
1262 | if range1 is fully within FIELD. */ |
1263 | byte_range bytes_within_field (0, 0); |
1264 | if (tree field = get_field_at_byte_range (record_type: inner_type, bytes, |
1265 | out_range_within_field: &bytes_within_field)) |
1266 | { |
1267 | if (const initial_svalue *initial_sval |
1268 | = inner_svalue->dyn_cast_initial_svalue ()) |
1269 | { |
1270 | const region *field_reg = |
1271 | get_field_region (parent: initial_sval->get_region (), field); |
1272 | const svalue *initial_reg_sval |
1273 | = get_or_create_initial_value (reg: field_reg); |
1274 | return get_or_create_bits_within |
1275 | (type, |
1276 | bits: bytes_within_field.as_bit_range (), |
1277 | inner_svalue: initial_reg_sval); |
1278 | } |
1279 | } |
1280 | } |
1281 | break; |
1282 | } |
1283 | return NULL; |
1284 | } |
1285 | |
1286 | /* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE, |
1287 | creating it if necessary. */ |
1288 | |
1289 | const svalue * |
1290 | region_model_manager::get_or_create_bits_within (tree type, |
1291 | const bit_range &bits, |
1292 | const svalue *inner_svalue) |
1293 | { |
1294 | if (const svalue *folded |
1295 | = maybe_fold_bits_within_svalue (type, bits, inner_svalue)) |
1296 | return folded; |
1297 | |
1298 | bits_within_svalue::key_t key (type, bits, inner_svalue); |
1299 | if (bits_within_svalue **slot = m_bits_within_values_map.get (k: key)) |
1300 | return *slot; |
1301 | bits_within_svalue *bits_within_sval |
1302 | = new bits_within_svalue (alloc_symbol_id (), type, bits, inner_svalue); |
1303 | RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval); |
1304 | m_bits_within_values_map.put (k: key, v: bits_within_sval); |
1305 | return bits_within_sval; |
1306 | } |
1307 | |
1308 | /* Return the svalue * that decorates ARG as being unmergeable, |
1309 | creating it if necessary. */ |
1310 | |
1311 | const svalue * |
1312 | region_model_manager::get_or_create_unmergeable (const svalue *arg) |
1313 | { |
1314 | if (arg->get_kind () == SK_UNMERGEABLE) |
1315 | return arg; |
1316 | |
1317 | if (unmergeable_svalue **slot = m_unmergeable_values_map.get (k: arg)) |
1318 | return *slot; |
1319 | unmergeable_svalue *unmergeable_sval |
1320 | = new unmergeable_svalue (alloc_symbol_id (), arg); |
1321 | RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval); |
1322 | m_unmergeable_values_map.put (k: arg, v: unmergeable_sval); |
1323 | return unmergeable_sval; |
1324 | } |
1325 | |
1326 | /* Return the svalue * of type TYPE for the merger of value BASE_SVAL |
1327 | and ITER_SVAL at POINT, creating it if necessary. */ |
1328 | |
1329 | const svalue * |
1330 | region_model_manager:: |
1331 | get_or_create_widening_svalue (tree type, |
1332 | const function_point &point, |
1333 | const svalue *base_sval, |
1334 | const svalue *iter_sval) |
1335 | { |
1336 | gcc_assert (base_sval->get_kind () != SK_WIDENING); |
1337 | gcc_assert (iter_sval->get_kind () != SK_WIDENING); |
1338 | widening_svalue::key_t key (type, point, base_sval, iter_sval); |
1339 | if (widening_svalue **slot = m_widening_values_map.get (k: key)) |
1340 | return *slot; |
1341 | widening_svalue *widening_sval |
1342 | = new widening_svalue (alloc_symbol_id (), type, point, base_sval, |
1343 | iter_sval); |
1344 | RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval); |
1345 | m_widening_values_map.put (k: key, v: widening_sval); |
1346 | return widening_sval; |
1347 | } |
1348 | |
1349 | /* Return the svalue * of type TYPE for the compound values in MAP, |
1350 | creating it if necessary. */ |
1351 | |
1352 | const svalue * |
1353 | region_model_manager::get_or_create_compound_svalue (tree type, |
1354 | const binding_map &map) |
1355 | { |
1356 | compound_svalue::key_t tmp_key (type, &map); |
1357 | if (compound_svalue **slot = m_compound_values_map.get (k: tmp_key)) |
1358 | return *slot; |
1359 | compound_svalue *compound_sval |
1360 | = new compound_svalue (alloc_symbol_id (), type, map); |
1361 | RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval); |
1362 | /* Use make_key rather than reusing the key, so that we use a |
1363 | ptr to compound_sval's binding_map, rather than the MAP param. */ |
1364 | m_compound_values_map.put (k: compound_sval->make_key (), v: compound_sval); |
1365 | return compound_sval; |
1366 | } |
1367 | |
1368 | /* class conjured_purge. */ |
1369 | |
1370 | /* Purge state relating to SVAL. */ |
1371 | |
1372 | void |
1373 | conjured_purge::purge (const conjured_svalue *sval) const |
1374 | { |
1375 | m_model->purge_state_involving (sval, ctxt: m_ctxt); |
1376 | } |
1377 | |
1378 | /* Return the svalue * of type TYPE for the value conjured for ID_REG |
1379 | at STMT (using IDX for any further disambiguation), |
1380 | creating it if necessary. |
1381 | Use P to purge existing state from the svalue, for the case where a |
1382 | conjured_svalue would be reused along an execution path. */ |
1383 | |
1384 | const svalue * |
1385 | region_model_manager::get_or_create_conjured_svalue (tree type, |
1386 | const gimple *stmt, |
1387 | const region *id_reg, |
1388 | const conjured_purge &p, |
1389 | unsigned idx) |
1390 | { |
1391 | conjured_svalue::key_t key (type, stmt, id_reg, idx); |
1392 | if (conjured_svalue **slot = m_conjured_values_map.get (k: key)) |
1393 | { |
1394 | const conjured_svalue *sval = *slot; |
1395 | /* We're reusing an existing conjured_svalue, perhaps from a different |
1396 | state within this analysis, or perhaps from an earlier state on this |
1397 | execution path. For the latter, purge any state involving the "new" |
1398 | svalue from the current program_state. */ |
1399 | p.purge (sval); |
1400 | return sval; |
1401 | } |
1402 | conjured_svalue *conjured_sval |
1403 | = new conjured_svalue (alloc_symbol_id (), type, stmt, id_reg, idx); |
1404 | RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval); |
1405 | m_conjured_values_map.put (k: key, v: conjured_sval); |
1406 | return conjured_sval; |
1407 | } |
1408 | |
1409 | /* Subroutine of region_model_manager::get_or_create_asm_output_svalue. |
1410 | Return a folded svalue, or NULL. */ |
1411 | |
1412 | const svalue * |
1413 | region_model_manager:: |
1414 | maybe_fold_asm_output_svalue (tree type, |
1415 | const vec<const svalue *> &inputs) |
1416 | { |
1417 | /* Unknown inputs should lead to unknown results. */ |
1418 | for (const auto &iter : inputs) |
1419 | if (iter->get_kind () == SK_UNKNOWN) |
1420 | return get_or_create_unknown_svalue (type); |
1421 | |
1422 | return NULL; |
1423 | } |
1424 | |
1425 | /* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic |
1426 | asm stmt ASM_STMT, given INPUTS as inputs. */ |
1427 | |
1428 | const svalue * |
1429 | region_model_manager:: |
1430 | get_or_create_asm_output_svalue (tree type, |
1431 | const gasm *asm_stmt, |
1432 | unsigned output_idx, |
1433 | const vec<const svalue *> &inputs) |
1434 | { |
1435 | gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS); |
1436 | |
1437 | if (const svalue *folded |
1438 | = maybe_fold_asm_output_svalue (type, inputs)) |
1439 | return folded; |
1440 | |
1441 | const char *asm_string = gimple_asm_string (asm_stmt); |
1442 | const unsigned noutputs = gimple_asm_noutputs (asm_stmt); |
1443 | |
1444 | asm_output_svalue::key_t key (type, asm_string, output_idx, inputs); |
1445 | if (asm_output_svalue **slot = m_asm_output_values_map.get (k: key)) |
1446 | return *slot; |
1447 | asm_output_svalue *asm_output_sval |
1448 | = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx, |
1449 | noutputs, inputs); |
1450 | RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval); |
1451 | m_asm_output_values_map.put (k: key, v: asm_output_sval); |
1452 | return asm_output_sval; |
1453 | } |
1454 | |
1455 | /* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic |
1456 | asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given |
1457 | INPUTS as inputs. */ |
1458 | |
1459 | const svalue * |
1460 | region_model_manager:: |
1461 | get_or_create_asm_output_svalue (tree type, |
1462 | const char *asm_string, |
1463 | unsigned output_idx, |
1464 | unsigned num_outputs, |
1465 | const vec<const svalue *> &inputs) |
1466 | { |
1467 | gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS); |
1468 | |
1469 | if (const svalue *folded |
1470 | = maybe_fold_asm_output_svalue (type, inputs)) |
1471 | return folded; |
1472 | |
1473 | asm_output_svalue::key_t key (type, asm_string, output_idx, inputs); |
1474 | if (asm_output_svalue **slot = m_asm_output_values_map.get (k: key)) |
1475 | return *slot; |
1476 | asm_output_svalue *asm_output_sval |
1477 | = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx, |
1478 | num_outputs, inputs); |
1479 | RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval); |
1480 | m_asm_output_values_map.put (k: key, v: asm_output_sval); |
1481 | return asm_output_sval; |
1482 | } |
1483 | |
1484 | /* Return the svalue * of type TYPE for the result of a call to FNDECL |
1485 | with __attribute__((const)), given INPUTS as inputs. */ |
1486 | |
1487 | const svalue * |
1488 | region_model_manager:: |
1489 | get_or_create_const_fn_result_svalue (tree type, |
1490 | tree fndecl, |
1491 | const vec<const svalue *> &inputs) |
1492 | { |
1493 | gcc_assert (fndecl); |
1494 | gcc_assert (DECL_P (fndecl)); |
1495 | gcc_assert (TREE_READONLY (fndecl)); |
1496 | gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS); |
1497 | |
1498 | const_fn_result_svalue::key_t key (type, fndecl, inputs); |
1499 | if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (k: key)) |
1500 | return *slot; |
1501 | const_fn_result_svalue *const_fn_result_sval |
1502 | = new const_fn_result_svalue (alloc_symbol_id (), type, fndecl, inputs); |
1503 | RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval); |
1504 | m_const_fn_result_values_map.put (k: key, v: const_fn_result_sval); |
1505 | return const_fn_result_sval; |
1506 | } |
1507 | |
1508 | /* Get a tree for the size of STRING_CST, or NULL_TREE. |
1509 | Note that this may be larger than TREE_STRING_LENGTH (implying |
1510 | a run of trailing zero bytes from TREE_STRING_LENGTH up to this |
1511 | higher limit). */ |
1512 | |
1513 | tree |
1514 | get_string_cst_size (const_tree string_cst) |
1515 | { |
1516 | gcc_assert (TREE_CODE (string_cst) == STRING_CST); |
1517 | gcc_assert (TREE_CODE (TREE_TYPE (string_cst)) == ARRAY_TYPE); |
1518 | |
1519 | return TYPE_SIZE_UNIT (TREE_TYPE (string_cst)); |
1520 | } |
1521 | |
1522 | /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant, |
1523 | attempt to get the character at that offset, returning either |
1524 | the svalue for the character constant, or NULL if unsuccessful. */ |
1525 | |
1526 | const svalue * |
1527 | region_model_manager::maybe_get_char_from_string_cst (tree string_cst, |
1528 | tree byte_offset_cst) |
1529 | { |
1530 | gcc_assert (TREE_CODE (string_cst) == STRING_CST); |
1531 | |
1532 | /* Adapted from fold_read_from_constant_string. */ |
1533 | scalar_int_mode char_mode; |
1534 | if (TREE_CODE (byte_offset_cst) == INTEGER_CST |
1535 | && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))), |
1536 | int_mode: &char_mode) |
1537 | && GET_MODE_SIZE (mode: char_mode) == 1) |
1538 | { |
1539 | /* If we're beyond the string_cst, the read is unsuccessful. */ |
1540 | if (compare_constants (lhs_const: byte_offset_cst, |
1541 | op: GE_EXPR, |
1542 | rhs_const: get_string_cst_size (string_cst)).is_true ()) |
1543 | return NULL; |
1544 | |
1545 | int char_val; |
1546 | if (compare_tree_int (byte_offset_cst, |
1547 | TREE_STRING_LENGTH (string_cst)) < 0) |
1548 | /* We're within the area defined by TREE_STRING_POINTER. */ |
1549 | char_val = (TREE_STRING_POINTER (string_cst) |
1550 | [TREE_INT_CST_LOW (byte_offset_cst)]); |
1551 | else |
1552 | /* We're in the padding area of trailing zeroes. */ |
1553 | char_val = 0; |
1554 | tree char_cst |
1555 | = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)), char_val); |
1556 | return get_or_create_constant_svalue (cst_expr: char_cst); |
1557 | } |
1558 | return NULL; |
1559 | } |
1560 | |
1561 | /* region consolidation. */ |
1562 | |
1563 | /* Return the region for FNDECL, creating it if necessary. */ |
1564 | |
1565 | const function_region * |
1566 | region_model_manager::get_region_for_fndecl (tree fndecl) |
1567 | { |
1568 | gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL); |
1569 | |
1570 | function_region **slot = m_fndecls_map.get (k: fndecl); |
1571 | if (slot) |
1572 | return *slot; |
1573 | function_region *reg |
1574 | = new function_region (alloc_symbol_id (), &m_code_region, fndecl); |
1575 | m_fndecls_map.put (k: fndecl, v: reg); |
1576 | return reg; |
1577 | } |
1578 | |
1579 | /* Return the region for LABEL, creating it if necessary. */ |
1580 | |
1581 | const label_region * |
1582 | region_model_manager::get_region_for_label (tree label) |
1583 | { |
1584 | gcc_assert (TREE_CODE (label) == LABEL_DECL); |
1585 | |
1586 | label_region **slot = m_labels_map.get (k: label); |
1587 | if (slot) |
1588 | return *slot; |
1589 | |
1590 | tree fndecl = DECL_CONTEXT (label); |
1591 | gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL); |
1592 | |
1593 | const function_region *func_reg = get_region_for_fndecl (fndecl); |
1594 | label_region *reg |
1595 | = new label_region (alloc_symbol_id (), func_reg, label); |
1596 | m_labels_map.put (k: label, v: reg); |
1597 | return reg; |
1598 | } |
1599 | |
1600 | /* Return the region for EXPR, creating it if necessary. */ |
1601 | |
1602 | const decl_region * |
1603 | region_model_manager::get_region_for_global (tree expr) |
1604 | { |
1605 | gcc_assert (VAR_P (expr)); |
1606 | |
1607 | decl_region **slot = m_globals_map.get (k: expr); |
1608 | if (slot) |
1609 | return *slot; |
1610 | decl_region *reg |
1611 | = new decl_region (alloc_symbol_id (), &m_globals_region, expr); |
1612 | m_globals_map.put (k: expr, v: reg); |
1613 | return reg; |
1614 | } |
1615 | |
1616 | /* Return the region for an unknown access of type REGION_TYPE, |
1617 | creating it if necessary. |
1618 | This is a symbolic_region, where the pointer is an unknown_svalue |
1619 | of type ®ION_TYPE. */ |
1620 | |
1621 | const region * |
1622 | region_model_manager::get_unknown_symbolic_region (tree region_type) |
1623 | { |
1624 | tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE; |
1625 | const svalue *unknown_ptr = get_or_create_unknown_svalue (type: ptr_type); |
1626 | return get_symbolic_region (sval: unknown_ptr); |
1627 | } |
1628 | |
1629 | /* Return the region that describes accessing field FIELD of PARENT, |
1630 | creating it if necessary. */ |
1631 | |
1632 | const region * |
1633 | region_model_manager::get_field_region (const region *parent, tree field) |
1634 | { |
1635 | gcc_assert (TREE_CODE (field) == FIELD_DECL); |
1636 | |
1637 | /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */ |
1638 | if (parent->symbolic_for_unknown_ptr_p ()) |
1639 | return get_unknown_symbolic_region (TREE_TYPE (field)); |
1640 | |
1641 | field_region::key_t key (parent, field); |
1642 | if (field_region *reg = m_field_regions.get (k: key)) |
1643 | return reg; |
1644 | |
1645 | field_region *field_reg |
1646 | = new field_region (alloc_symbol_id (), parent, field); |
1647 | m_field_regions.put (k: key, instance: field_reg); |
1648 | return field_reg; |
1649 | } |
1650 | |
1651 | /* Return the region that describes accessing the element of type |
1652 | ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */ |
1653 | |
1654 | const region * |
1655 | region_model_manager::get_element_region (const region *parent, |
1656 | tree element_type, |
1657 | const svalue *index) |
1658 | { |
1659 | /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */ |
1660 | if (parent->symbolic_for_unknown_ptr_p ()) |
1661 | return get_unknown_symbolic_region (region_type: element_type); |
1662 | |
1663 | element_region::key_t key (parent, element_type, index); |
1664 | if (element_region *reg = m_element_regions.get (k: key)) |
1665 | return reg; |
1666 | |
1667 | element_region *element_reg |
1668 | = new element_region (alloc_symbol_id (), parent, element_type, index); |
1669 | m_element_regions.put (k: key, instance: element_reg); |
1670 | return element_reg; |
1671 | } |
1672 | |
1673 | /* Return the region that describes accessing the subregion of type |
1674 | ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if |
1675 | necessary. */ |
1676 | |
1677 | const region * |
1678 | region_model_manager::get_offset_region (const region *parent, |
1679 | tree type, |
1680 | const svalue *byte_offset) |
1681 | { |
1682 | /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */ |
1683 | if (parent->symbolic_for_unknown_ptr_p ()) |
1684 | return get_unknown_symbolic_region (region_type: type); |
1685 | |
1686 | /* If BYTE_OFFSET is zero, return PARENT. */ |
1687 | if (tree cst_offset = byte_offset->maybe_get_constant ()) |
1688 | if (zerop (cst_offset)) |
1689 | return get_cast_region (original_region: parent, type); |
1690 | |
1691 | /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y) |
1692 | to OFFSET_REGION(REG, (X + Y)). */ |
1693 | if (const offset_region *parent_offset_reg |
1694 | = parent->dyn_cast_offset_region ()) |
1695 | { |
1696 | const svalue *sval_x = parent_offset_reg->get_byte_offset (); |
1697 | const svalue *sval_sum |
1698 | = get_or_create_binop (type: byte_offset->get_type (), |
1699 | op: PLUS_EXPR, arg0: sval_x, arg1: byte_offset); |
1700 | return get_offset_region (parent: parent->get_parent_region (), type, byte_offset: sval_sum); |
1701 | } |
1702 | |
1703 | offset_region::key_t key (parent, type, byte_offset); |
1704 | if (offset_region *reg = m_offset_regions.get (k: key)) |
1705 | return reg; |
1706 | |
1707 | offset_region *offset_reg |
1708 | = new offset_region (alloc_symbol_id (), parent, type, byte_offset); |
1709 | m_offset_regions.put (k: key, instance: offset_reg); |
1710 | return offset_reg; |
1711 | } |
1712 | |
1713 | /* Return the region that describes accessing the subregion of type |
1714 | TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */ |
1715 | |
1716 | const region * |
1717 | region_model_manager::get_sized_region (const region *parent, |
1718 | tree type, |
1719 | const svalue *byte_size_sval) |
1720 | { |
1721 | if (parent->symbolic_for_unknown_ptr_p ()) |
1722 | return get_unknown_symbolic_region (region_type: type); |
1723 | |
1724 | if (byte_size_sval->get_type () != size_type_node) |
1725 | byte_size_sval = get_or_create_cast (size_type_node, arg: byte_size_sval); |
1726 | |
1727 | /* If PARENT is already that size, return it. */ |
1728 | const svalue *parent_byte_size_sval = parent->get_byte_size_sval (mgr: this); |
1729 | if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ()) |
1730 | if (tree size_cst = byte_size_sval->maybe_get_constant ()) |
1731 | { |
1732 | tree comparison |
1733 | = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst); |
1734 | if (comparison == boolean_true_node) |
1735 | return parent; |
1736 | } |
1737 | |
1738 | sized_region::key_t key (parent, type, byte_size_sval); |
1739 | if (sized_region *reg = m_sized_regions.get (k: key)) |
1740 | return reg; |
1741 | |
1742 | sized_region *sized_reg |
1743 | = new sized_region (alloc_symbol_id (), parent, type, byte_size_sval); |
1744 | m_sized_regions.put (k: key, instance: sized_reg); |
1745 | return sized_reg; |
1746 | } |
1747 | |
1748 | /* Return the region that describes accessing PARENT_REGION as if |
1749 | it were of type TYPE, creating it if necessary. */ |
1750 | |
1751 | const region * |
1752 | region_model_manager::get_cast_region (const region *original_region, |
1753 | tree type) |
1754 | { |
1755 | /* If types match, return ORIGINAL_REGION. */ |
1756 | if (type == original_region->get_type ()) |
1757 | return original_region; |
1758 | |
1759 | if (original_region->symbolic_for_unknown_ptr_p ()) |
1760 | return get_unknown_symbolic_region (region_type: type); |
1761 | |
1762 | cast_region::key_t key (original_region, type); |
1763 | if (cast_region *reg = m_cast_regions.get (k: key)) |
1764 | return reg; |
1765 | |
1766 | cast_region *cast_reg |
1767 | = new cast_region (alloc_symbol_id (), original_region, type); |
1768 | m_cast_regions.put (k: key, instance: cast_reg); |
1769 | return cast_reg; |
1770 | } |
1771 | |
1772 | /* Return the frame_region for call to FUN from CALLING_FRAME, creating it |
1773 | if necessary. CALLING_FRAME may be NULL. */ |
1774 | |
1775 | const frame_region * |
1776 | region_model_manager::get_frame_region (const frame_region *calling_frame, |
1777 | const function &fun) |
1778 | { |
1779 | int index = calling_frame ? calling_frame->get_index () + 1 : 0; |
1780 | |
1781 | frame_region::key_t key (calling_frame, fun); |
1782 | if (frame_region *reg = m_frame_regions.get (k: key)) |
1783 | return reg; |
1784 | |
1785 | frame_region *frame_reg |
1786 | = new frame_region (alloc_symbol_id (), &m_stack_region, calling_frame, |
1787 | fun, index); |
1788 | m_frame_regions.put (k: key, instance: frame_reg); |
1789 | return frame_reg; |
1790 | } |
1791 | |
1792 | /* Return the region that describes dereferencing SVAL, creating it |
1793 | if necessary. */ |
1794 | |
1795 | const region * |
1796 | region_model_manager::get_symbolic_region (const svalue *sval) |
1797 | { |
1798 | symbolic_region::key_t key (&m_root_region, sval); |
1799 | if (symbolic_region *reg = m_symbolic_regions.get (k: key)) |
1800 | return reg; |
1801 | |
1802 | symbolic_region *symbolic_reg |
1803 | = new symbolic_region (alloc_symbol_id (), &m_root_region, sval); |
1804 | m_symbolic_regions.put (k: key, instance: symbolic_reg); |
1805 | return symbolic_reg; |
1806 | } |
1807 | |
1808 | /* Return the region that describes accessing STRING_CST, creating it |
1809 | if necessary. */ |
1810 | |
1811 | const string_region * |
1812 | region_model_manager::get_region_for_string (tree string_cst) |
1813 | { |
1814 | gcc_assert (TREE_CODE (string_cst) == STRING_CST); |
1815 | |
1816 | string_region **slot = m_string_map.get (k: string_cst); |
1817 | if (slot) |
1818 | return *slot; |
1819 | string_region *reg |
1820 | = new string_region (alloc_symbol_id (), &m_root_region, string_cst); |
1821 | m_string_map.put (k: string_cst, v: reg); |
1822 | return reg; |
1823 | } |
1824 | |
1825 | /* Return the region that describes accessing BITS within PARENT as TYPE, |
1826 | creating it if necessary. */ |
1827 | |
1828 | const region * |
1829 | region_model_manager::get_bit_range (const region *parent, tree type, |
1830 | const bit_range &bits) |
1831 | { |
1832 | gcc_assert (parent); |
1833 | |
1834 | if (parent->symbolic_for_unknown_ptr_p ()) |
1835 | return get_unknown_symbolic_region (region_type: type); |
1836 | |
1837 | bit_range_region::key_t key (parent, type, bits); |
1838 | if (bit_range_region *reg = m_bit_range_regions.get (k: key)) |
1839 | return reg; |
1840 | |
1841 | bit_range_region *bit_range_reg |
1842 | = new bit_range_region (alloc_symbol_id (), parent, type, bits); |
1843 | m_bit_range_regions.put (k: key, instance: bit_range_reg); |
1844 | return bit_range_reg; |
1845 | } |
1846 | |
1847 | /* Return the region that describes accessing the IDX-th variadic argument |
1848 | within PARENT_FRAME, creating it if necessary. */ |
1849 | |
1850 | const var_arg_region * |
1851 | region_model_manager::get_var_arg_region (const frame_region *parent_frame, |
1852 | unsigned idx) |
1853 | { |
1854 | gcc_assert (parent_frame); |
1855 | |
1856 | var_arg_region::key_t key (parent_frame, idx); |
1857 | if (var_arg_region *reg = m_var_arg_regions.get (k: key)) |
1858 | return reg; |
1859 | |
1860 | var_arg_region *var_arg_reg |
1861 | = new var_arg_region (alloc_symbol_id (), parent_frame, idx); |
1862 | m_var_arg_regions.put (k: key, instance: var_arg_reg); |
1863 | return var_arg_reg; |
1864 | } |
1865 | |
1866 | /* If we see a tree code we don't know how to handle, rather than |
1867 | ICE or generate bogus results, create a dummy region, and notify |
1868 | CTXT so that it can mark the new state as being not properly |
1869 | modelled. The exploded graph can then stop exploring that path, |
1870 | since any diagnostics we might issue will have questionable |
1871 | validity. */ |
1872 | |
1873 | const region * |
1874 | region_model_manager:: |
1875 | get_region_for_unexpected_tree_code (region_model_context *ctxt, |
1876 | tree t, |
1877 | const dump_location_t &loc) |
1878 | { |
1879 | tree type = TYPE_P (t) ? t : TREE_TYPE (t); |
1880 | region *new_reg |
1881 | = new unknown_region (alloc_symbol_id (), &m_root_region, type); |
1882 | if (ctxt) |
1883 | ctxt->on_unexpected_tree_code (t, loc); |
1884 | return new_reg; |
1885 | } |
1886 | |
1887 | /* Return a region describing a heap-allocated block of memory. |
1888 | Reuse an existing heap_allocated_region is its id is not within |
1889 | BASE_REGS_IN_USE. */ |
1890 | |
1891 | const region * |
1892 | region_model_manager:: |
1893 | get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use) |
1894 | { |
1895 | /* Try to reuse an existing region, if it's unreferenced in the |
1896 | client state. */ |
1897 | for (auto existing_reg : m_managed_dynamic_regions) |
1898 | if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ())) |
1899 | if (existing_reg->get_kind () == RK_HEAP_ALLOCATED) |
1900 | return existing_reg; |
1901 | |
1902 | /* All existing ones (if any) are in use; create a new one. */ |
1903 | region *reg |
1904 | = new heap_allocated_region (alloc_symbol_id (), &m_heap_region); |
1905 | m_managed_dynamic_regions.safe_push (obj: reg); |
1906 | return reg; |
1907 | } |
1908 | |
1909 | /* Return a new region describing a block of memory allocated within FRAME. */ |
1910 | |
1911 | const region * |
1912 | region_model_manager::create_region_for_alloca (const frame_region *frame) |
1913 | { |
1914 | gcc_assert (frame); |
1915 | region *reg = new alloca_region (alloc_symbol_id (), frame); |
1916 | m_managed_dynamic_regions.safe_push (obj: reg); |
1917 | return reg; |
1918 | } |
1919 | |
1920 | /* Log OBJ to LOGGER. */ |
1921 | |
1922 | template <typename T> |
1923 | static void |
1924 | log_managed_object (logger *logger, const T *obj) |
1925 | { |
1926 | logger->start_log_line (); |
1927 | pretty_printer *pp = logger->get_printer (); |
1928 | pp_string (pp, " " ); |
1929 | obj->dump_to_pp (pp, true); |
1930 | logger->end_log_line (); |
1931 | } |
1932 | |
1933 | /* Specialization for frame_region, which also logs the count of locals |
1934 | managed by the frame_region. */ |
1935 | |
1936 | template <> |
1937 | void |
1938 | log_managed_object (logger *logger, const frame_region *obj) |
1939 | { |
1940 | logger->start_log_line (); |
1941 | pretty_printer *pp = logger->get_printer (); |
1942 | pp_string (pp, " " ); |
1943 | obj->dump_to_pp (pp, simple: true); |
1944 | pp_printf (pp, " [with %i region(s) for locals]" , obj->get_num_locals ()); |
1945 | logger->end_log_line (); |
1946 | } |
1947 | |
1948 | /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER. |
1949 | If SHOW_OBJS is true, also dump the objects themselves. */ |
1950 | |
1951 | template <typename K, typename T> |
1952 | static void |
1953 | log_uniq_map (logger *logger, bool show_objs, const char *title, |
1954 | const hash_map<K, T*> &uniq_map) |
1955 | { |
1956 | logger->log (fmt: " # %s: %li" , title, (long)uniq_map.elements ()); |
1957 | if (!show_objs) |
1958 | return; |
1959 | auto_vec<const T *> vec_objs (uniq_map.elements ()); |
1960 | for (typename hash_map<K, T*>::iterator iter = uniq_map.begin (); |
1961 | iter != uniq_map.end (); ++iter) |
1962 | vec_objs.quick_push ((*iter).second); |
1963 | |
1964 | vec_objs.qsort (T::cmp_ptr_ptr); |
1965 | |
1966 | unsigned i; |
1967 | const T *obj; |
1968 | FOR_EACH_VEC_ELT (vec_objs, i, obj) |
1969 | log_managed_object<T> (logger, obj); |
1970 | } |
1971 | |
1972 | /* Dump the number of objects that were managed by MAP to LOGGER. |
1973 | If SHOW_OBJS is true, also dump the objects themselves. */ |
1974 | |
1975 | template <typename T> |
1976 | static void |
1977 | log_uniq_map (logger *logger, bool show_objs, const char *title, |
1978 | const consolidation_map<T> &map) |
1979 | { |
1980 | logger->log (fmt: " # %s: %li" , title, (long)map.elements ()); |
1981 | if (!show_objs) |
1982 | return; |
1983 | |
1984 | auto_vec<const T *> vec_objs (map.elements ()); |
1985 | for (typename consolidation_map<T>::iterator iter = map.begin (); |
1986 | iter != map.end (); ++iter) |
1987 | vec_objs.quick_push ((*iter).second); |
1988 | |
1989 | vec_objs.qsort (T::cmp_ptr_ptr); |
1990 | |
1991 | unsigned i; |
1992 | const T *obj; |
1993 | FOR_EACH_VEC_ELT (vec_objs, i, obj) |
1994 | log_managed_object<T> (logger, obj); |
1995 | } |
1996 | |
1997 | /* Dump the number of objects of each class that were managed by this |
1998 | manager to LOGGER. |
1999 | If SHOW_OBJS is true, also dump the objects themselves. */ |
2000 | |
2001 | void |
2002 | region_model_manager::log_stats (logger *logger, bool show_objs) const |
2003 | { |
2004 | LOG_SCOPE (logger); |
2005 | logger->log (fmt: "call string consolidation" ); |
2006 | m_empty_call_string.recursive_log (logger); |
2007 | logger->log (fmt: "next symbol id: %i" , m_next_symbol_id); |
2008 | logger->log (fmt: "svalue consolidation" ); |
2009 | log_uniq_map (logger, show_objs, title: "constant_svalue" , uniq_map: m_constants_map); |
2010 | log_uniq_map (logger, show_objs, title: "unknown_svalue" , uniq_map: m_unknowns_map); |
2011 | if (m_unknown_NULL) |
2012 | log_managed_object (logger, obj: m_unknown_NULL); |
2013 | log_uniq_map (logger, show_objs, title: "poisoned_svalue" , uniq_map: m_poisoned_values_map); |
2014 | log_uniq_map (logger, show_objs, title: "setjmp_svalue" , uniq_map: m_setjmp_values_map); |
2015 | log_uniq_map (logger, show_objs, title: "initial_svalue" , uniq_map: m_initial_values_map); |
2016 | log_uniq_map (logger, show_objs, title: "region_svalue" , uniq_map: m_pointer_values_map); |
2017 | log_uniq_map (logger, show_objs, title: "unaryop_svalue" , uniq_map: m_unaryop_values_map); |
2018 | log_uniq_map (logger, show_objs, title: "binop_svalue" , uniq_map: m_binop_values_map); |
2019 | log_uniq_map (logger, show_objs, title: "sub_svalue" , uniq_map: m_sub_values_map); |
2020 | log_uniq_map (logger, show_objs, title: "repeated_svalue" , uniq_map: m_repeated_values_map); |
2021 | log_uniq_map (logger, show_objs, title: "bits_within_svalue" , |
2022 | uniq_map: m_bits_within_values_map); |
2023 | log_uniq_map (logger, show_objs, title: "unmergeable_svalue" , |
2024 | uniq_map: m_unmergeable_values_map); |
2025 | log_uniq_map (logger, show_objs, title: "widening_svalue" , uniq_map: m_widening_values_map); |
2026 | log_uniq_map (logger, show_objs, title: "compound_svalue" , uniq_map: m_compound_values_map); |
2027 | log_uniq_map (logger, show_objs, title: "conjured_svalue" , uniq_map: m_conjured_values_map); |
2028 | log_uniq_map (logger, show_objs, title: "asm_output_svalue" , |
2029 | uniq_map: m_asm_output_values_map); |
2030 | log_uniq_map (logger, show_objs, title: "const_fn_result_svalue" , |
2031 | uniq_map: m_const_fn_result_values_map); |
2032 | |
2033 | logger->log (fmt: "max accepted svalue num_nodes: %i" , |
2034 | m_max_complexity.m_num_nodes); |
2035 | logger->log (fmt: "max accepted svalue max_depth: %i" , |
2036 | m_max_complexity.m_max_depth); |
2037 | |
2038 | logger->log (fmt: "region consolidation" ); |
2039 | log_uniq_map (logger, show_objs, title: "function_region" , uniq_map: m_fndecls_map); |
2040 | log_uniq_map (logger, show_objs, title: "label_region" , uniq_map: m_labels_map); |
2041 | log_uniq_map (logger, show_objs, title: "decl_region for globals" , uniq_map: m_globals_map); |
2042 | log_uniq_map (logger, show_objs, title: "field_region" , map: m_field_regions); |
2043 | log_uniq_map (logger, show_objs, title: "element_region" , map: m_element_regions); |
2044 | log_uniq_map (logger, show_objs, title: "offset_region" , map: m_offset_regions); |
2045 | log_uniq_map (logger, show_objs, title: "sized_region" , map: m_sized_regions); |
2046 | log_uniq_map (logger, show_objs, title: "cast_region" , map: m_cast_regions); |
2047 | log_uniq_map (logger, show_objs, title: "frame_region" , map: m_frame_regions); |
2048 | log_uniq_map (logger, show_objs, title: "symbolic_region" , map: m_symbolic_regions); |
2049 | log_uniq_map (logger, show_objs, title: "string_region" , uniq_map: m_string_map); |
2050 | log_uniq_map (logger, show_objs, title: "bit_range_region" , map: m_bit_range_regions); |
2051 | log_uniq_map (logger, show_objs, title: "var_arg_region" , map: m_var_arg_regions); |
2052 | logger->log (fmt: " # managed dynamic regions: %i" , |
2053 | m_managed_dynamic_regions.length ()); |
2054 | m_store_mgr.log_stats (logger, show_objs); |
2055 | m_range_mgr->log_stats (logger, show_objs); |
2056 | } |
2057 | |
2058 | /* Dump the number of objects of each class that were managed by this |
2059 | manager to LOGGER. |
2060 | If SHOW_OBJS is true, also dump the objects themselves. |
2061 | This is here so it can use log_uniq_map. */ |
2062 | |
2063 | void |
2064 | store_manager::log_stats (logger *logger, bool show_objs) const |
2065 | { |
2066 | LOG_SCOPE (logger); |
2067 | log_uniq_map (logger, show_objs, title: "concrete_binding" , |
2068 | map: m_concrete_binding_key_mgr); |
2069 | log_uniq_map (logger, show_objs, title: "symbolic_binding" , |
2070 | map: m_symbolic_binding_key_mgr); |
2071 | } |
2072 | |
2073 | /* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests |
2074 | (using -fdump-analyzer-untracked). */ |
2075 | |
2076 | static void |
2077 | dump_untracked_region (const decl_region *decl_reg) |
2078 | { |
2079 | tree decl = decl_reg->get_decl (); |
2080 | if (TREE_CODE (decl) != VAR_DECL) |
2081 | return; |
2082 | /* For now, don't emit the status of decls in the constant pool, to avoid |
2083 | differences in DejaGnu test results between targets that use these vs |
2084 | those that don't. |
2085 | (Eventually these decls should probably be untracked and we should test |
2086 | for that, but that's not stage 4 material). */ |
2087 | if (DECL_IN_CONSTANT_POOL (decl)) |
2088 | return; |
2089 | warning_at (DECL_SOURCE_LOCATION (decl), 0, |
2090 | "track %qD: %s" , |
2091 | decl, (decl_reg->tracked_p () ? "yes" : "no" )); |
2092 | } |
2093 | |
2094 | /* Implementation of -fdump-analyzer-untracked. */ |
2095 | |
2096 | void |
2097 | region_model_manager::dump_untracked_regions () const |
2098 | { |
2099 | for (auto iter : m_globals_map) |
2100 | { |
2101 | const decl_region *decl_reg = iter.second; |
2102 | dump_untracked_region (decl_reg); |
2103 | } |
2104 | for (auto frame_iter : m_frame_regions) |
2105 | { |
2106 | const frame_region *frame_reg = frame_iter.second; |
2107 | frame_reg->dump_untracked_regions (); |
2108 | } |
2109 | } |
2110 | |
2111 | void |
2112 | frame_region::dump_untracked_regions () const |
2113 | { |
2114 | for (auto iter : m_locals) |
2115 | { |
2116 | const decl_region *decl_reg = iter.second; |
2117 | dump_untracked_region (decl_reg); |
2118 | } |
2119 | } |
2120 | |
2121 | } // namespace ana |
2122 | |
2123 | #endif /* #if ENABLE_ANALYZER */ |
2124 | |