1 /* Consolidation of svalues and regions.
2 Copyright (C) 2020-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "diagnostic-core.h"
26 #include "gimple-pretty-print.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "tristate.h"
42 #include "bitmap.h"
43 #include "selftest.h"
44 #include "function.h"
45 #include "json.h"
46 #include "analyzer/analyzer.h"
47 #include "analyzer/analyzer-logging.h"
48 #include "ordered-hash-map.h"
49 #include "options.h"
50 #include "cgraph.h"
51 #include "cfg.h"
52 #include "digraph.h"
53 #include "analyzer/supergraph.h"
54 #include "sbitmap.h"
55 #include "analyzer/call-string.h"
56 #include "analyzer/program-point.h"
57 #include "analyzer/store.h"
58 #include "analyzer/region-model.h"
59 #include "analyzer/constraint-manager.h"
60
61 #if ENABLE_ANALYZER
62
63 namespace ana {
64
65 /* class region_model_manager. */
66
67 /* region_model_manager's ctor. */
68
region_model_manager(logger * logger)69 region_model_manager::region_model_manager (logger *logger)
70 : m_logger (logger),
71 m_next_region_id (0),
72 m_root_region (alloc_region_id ()),
73 m_stack_region (alloc_region_id (), &m_root_region),
74 m_heap_region (alloc_region_id (), &m_root_region),
75 m_unknown_NULL (NULL),
76 m_checking_feasibility (false),
77 m_max_complexity (0, 0),
78 m_code_region (alloc_region_id (), &m_root_region),
79 m_fndecls_map (), m_labels_map (),
80 m_globals_region (alloc_region_id (), &m_root_region),
81 m_globals_map (),
82 m_store_mgr (this),
83 m_range_mgr (new bounded_ranges_manager ())
84 {
85 }
86
87 /* region_model_manager's dtor. Delete all of the managed svalues
88 and regions. */
89
~region_model_manager()90 region_model_manager::~region_model_manager ()
91 {
92 /* Delete consolidated svalues. */
93 for (constants_map_t::iterator iter = m_constants_map.begin ();
94 iter != m_constants_map.end (); ++iter)
95 delete (*iter).second;
96 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
97 iter != m_unknowns_map.end (); ++iter)
98 delete (*iter).second;
99 delete m_unknown_NULL;
100 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
101 iter != m_poisoned_values_map.end (); ++iter)
102 delete (*iter).second;
103 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
104 iter != m_setjmp_values_map.end (); ++iter)
105 delete (*iter).second;
106 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
107 iter != m_initial_values_map.end (); ++iter)
108 delete (*iter).second;
109 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
110 iter != m_pointer_values_map.end (); ++iter)
111 delete (*iter).second;
112 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
113 iter != m_unaryop_values_map.end (); ++iter)
114 delete (*iter).second;
115 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
116 iter != m_binop_values_map.end (); ++iter)
117 delete (*iter).second;
118 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
119 iter != m_sub_values_map.end (); ++iter)
120 delete (*iter).second;
121 for (auto iter : m_repeated_values_map)
122 delete iter.second;
123 for (auto iter : m_bits_within_values_map)
124 delete iter.second;
125 for (unmergeable_values_map_t::iterator iter
126 = m_unmergeable_values_map.begin ();
127 iter != m_unmergeable_values_map.end (); ++iter)
128 delete (*iter).second;
129 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
130 iter != m_widening_values_map.end (); ++iter)
131 delete (*iter).second;
132 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
133 iter != m_compound_values_map.end (); ++iter)
134 delete (*iter).second;
135 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
136 iter != m_conjured_values_map.end (); ++iter)
137 delete (*iter).second;
138 for (auto iter : m_asm_output_values_map)
139 delete iter.second;
140 for (auto iter : m_const_fn_result_values_map)
141 delete iter.second;
142
143 /* Delete consolidated regions. */
144 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
145 iter != m_fndecls_map.end (); ++iter)
146 delete (*iter).second;
147 for (labels_map_t::iterator iter = m_labels_map.begin ();
148 iter != m_labels_map.end (); ++iter)
149 delete (*iter).second;
150 for (globals_map_t::iterator iter = m_globals_map.begin ();
151 iter != m_globals_map.end (); ++iter)
152 delete (*iter).second;
153 for (string_map_t::iterator iter = m_string_map.begin ();
154 iter != m_string_map.end (); ++iter)
155 delete (*iter).second;
156
157 delete m_range_mgr;
158 }
159
160 /* Return true if C exceeds the complexity limit for svalues. */
161
162 bool
too_complex_p(const complexity & c) const163 region_model_manager::too_complex_p (const complexity &c) const
164 {
165 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
166 return true;
167 return false;
168 }
169
170 /* If SVAL exceeds the complexity limit for svalues, delete it
171 and return true.
172 Otherwise update m_max_complexity and return false. */
173
174 bool
reject_if_too_complex(svalue * sval)175 region_model_manager::reject_if_too_complex (svalue *sval)
176 {
177 if (m_checking_feasibility)
178 return false;
179
180 const complexity &c = sval->get_complexity ();
181 if (!too_complex_p (c))
182 {
183 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
184 m_max_complexity.m_num_nodes = c.m_num_nodes;
185 if (m_max_complexity.m_max_depth < c.m_max_depth)
186 m_max_complexity.m_max_depth = c.m_max_depth;
187 return false;
188 }
189
190 delete sval;
191 return true;
192 }
193
194 /* Macro for imposing a complexity limit on svalues, for use within
195 region_model_manager member functions.
196
197 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
198 value of the same type.
199 Otherwise update m_max_complexity and carry on. */
200
201 #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
202 do { \
203 svalue *sval_ = (SVAL); \
204 tree type_ = sval_->get_type (); \
205 if (reject_if_too_complex (sval_)) \
206 return get_or_create_unknown_svalue (type_); \
207 } while (0)
208
209 /* svalue consolidation. */
210
211 /* Return the svalue * for a constant_svalue for CST_EXPR,
212 creating it if necessary.
213 The constant_svalue instances are reused, based on pointer equality
214 of trees */
215
216 const svalue *
get_or_create_constant_svalue(tree cst_expr)217 region_model_manager::get_or_create_constant_svalue (tree cst_expr)
218 {
219 gcc_assert (cst_expr);
220 gcc_assert (CONSTANT_CLASS_P (cst_expr));
221
222 constant_svalue **slot = m_constants_map.get (cst_expr);
223 if (slot)
224 return *slot;
225 constant_svalue *cst_sval = new constant_svalue (cst_expr);
226 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
227 m_constants_map.put (cst_expr, cst_sval);
228 return cst_sval;
229 }
230
231 /* Return the svalue * for a constant_svalue for the INTEGER_CST
232 for VAL of type TYPE, creating it if necessary. */
233
234 const svalue *
get_or_create_int_cst(tree type,poly_int64 val)235 region_model_manager::get_or_create_int_cst (tree type, poly_int64 val)
236 {
237 gcc_assert (type);
238 tree tree_cst = build_int_cst (type, val);
239 return get_or_create_constant_svalue (tree_cst);
240 }
241
242 /* Return the svalue * for the constant_svalue for the NULL pointer
243 of POINTER_TYPE, creating it if necessary. */
244
245 const svalue *
get_or_create_null_ptr(tree pointer_type)246 region_model_manager::get_or_create_null_ptr (tree pointer_type)
247 {
248 gcc_assert (pointer_type);
249 gcc_assert (POINTER_TYPE_P (pointer_type));
250 return get_or_create_int_cst (pointer_type, 0);
251 }
252
253 /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
254 creating it if necessary.
255 The unknown_svalue instances are reused, based on pointer equality
256 of the types */
257
258 const svalue *
get_or_create_unknown_svalue(tree type)259 region_model_manager::get_or_create_unknown_svalue (tree type)
260 {
261 /* Don't create unknown values when doing feasibility testing;
262 instead, create a unique svalue. */
263 if (m_checking_feasibility)
264 return create_unique_svalue (type);
265
266 /* Special-case NULL, so that the hash_map can use NULL as the
267 "empty" value. */
268 if (type == NULL_TREE)
269 {
270 if (!m_unknown_NULL)
271 m_unknown_NULL = new unknown_svalue (type);
272 return m_unknown_NULL;
273 }
274
275 unknown_svalue **slot = m_unknowns_map.get (type);
276 if (slot)
277 return *slot;
278 unknown_svalue *sval = new unknown_svalue (type);
279 m_unknowns_map.put (type, sval);
280 return sval;
281 }
282
283 /* Return a freshly-allocated svalue of TYPE, owned by this manager. */
284
285 const svalue *
create_unique_svalue(tree type)286 region_model_manager::create_unique_svalue (tree type)
287 {
288 svalue *sval = new placeholder_svalue (type, "unique");
289 m_managed_dynamic_svalues.safe_push (sval);
290 return sval;
291 }
292
293 /* Return the svalue * for the initial value of REG, creating it if
294 necessary. */
295
296 const svalue *
get_or_create_initial_value(const region * reg)297 region_model_manager::get_or_create_initial_value (const region *reg)
298 {
299 if (!reg->can_have_initial_svalue_p ())
300 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
301 reg->get_type ());
302
303 /* The initial value of a cast is a cast of the initial value. */
304 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
305 {
306 const region *original_reg = cast_reg->get_original_region ();
307 return get_or_create_cast (cast_reg->get_type (),
308 get_or_create_initial_value (original_reg));
309 }
310
311 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
312 if (reg->symbolic_for_unknown_ptr_p ())
313 return get_or_create_unknown_svalue (reg->get_type ());
314
315 if (initial_svalue **slot = m_initial_values_map.get (reg))
316 return *slot;
317 initial_svalue *initial_sval = new initial_svalue (reg->get_type (), reg);
318 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
319 m_initial_values_map.put (reg, initial_sval);
320 return initial_sval;
321 }
322
323 /* Return the svalue * for R using type TYPE, creating it if
324 necessary. */
325
326 const svalue *
get_or_create_setjmp_svalue(const setjmp_record & r,tree type)327 region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
328 tree type)
329 {
330 setjmp_svalue::key_t key (r, type);
331 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
332 return *slot;
333 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, type);
334 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
335 m_setjmp_values_map.put (key, setjmp_sval);
336 return setjmp_sval;
337 }
338
339 /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
340 necessary. */
341
342 const svalue *
get_or_create_poisoned_svalue(enum poison_kind kind,tree type)343 region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
344 tree type)
345 {
346 poisoned_svalue::key_t key (kind, type);
347 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
348 return *slot;
349 poisoned_svalue *poisoned_sval = new poisoned_svalue (kind, type);
350 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
351 m_poisoned_values_map.put (key, poisoned_sval);
352 return poisoned_sval;
353 }
354
355 /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
356 creating it if necessary. */
357
358 const svalue *
get_ptr_svalue(tree ptr_type,const region * pointee)359 region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
360 {
361 /* If this is a symbolic region from dereferencing a pointer, and the types
362 match, then return the original pointer. */
363 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
364 if (ptr_type == sym_reg->get_pointer ()->get_type ())
365 return sym_reg->get_pointer ();
366
367 region_svalue::key_t key (ptr_type, pointee);
368 if (region_svalue **slot = m_pointer_values_map.get (key))
369 return *slot;
370 region_svalue *sval = new region_svalue (ptr_type, pointee);
371 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
372 m_pointer_values_map.put (key, sval);
373 return sval;
374 }
375
376 /* Subroutine of region_model_manager::get_or_create_unaryop.
377 Attempt to fold the inputs and return a simpler svalue *.
378 Otherwise, return NULL. */
379
380 const svalue *
maybe_fold_unaryop(tree type,enum tree_code op,const svalue * arg)381 region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
382 const svalue *arg)
383 {
384 /* Ops on "unknown" are also unknown. */
385 if (arg->get_kind () == SK_UNKNOWN)
386 return get_or_create_unknown_svalue (type);
387 /* Likewise for "poisoned". */
388 else if (const poisoned_svalue *poisoned_sval
389 = arg->dyn_cast_poisoned_svalue ())
390 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
391 type);
392
393 gcc_assert (arg->can_have_associated_state_p ());
394
395 switch (op)
396 {
397 default: break;
398 case VIEW_CONVERT_EXPR:
399 case NOP_EXPR:
400 {
401 /* Handle redundant casts. */
402 if (arg->get_type ()
403 && useless_type_conversion_p (arg->get_type (), type))
404 return arg;
405
406 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
407 => "cast<TYPE> (innermost_arg)",
408 unless INNER_TYPE is narrower than TYPE. */
409 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
410 {
411 tree inner_type = arg->get_type ();
412 if (TYPE_SIZE (type)
413 && TYPE_SIZE (inner_type)
414 && (fold_binary (LE_EXPR, boolean_type_node,
415 TYPE_SIZE (type), TYPE_SIZE (inner_type))
416 == boolean_true_node))
417 return maybe_fold_unaryop (type, op, innermost_arg);
418 }
419 /* Avoid creating symbolic regions for pointer casts by
420 simplifying (T*)(®ION) to ((T*)®ION). */
421 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
422 if (POINTER_TYPE_P (type)
423 && region_sval->get_type ()
424 && POINTER_TYPE_P (region_sval->get_type ()))
425 return get_ptr_svalue (type, region_sval->get_pointee ());
426 }
427 break;
428 case TRUTH_NOT_EXPR:
429 {
430 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
431 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
432 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
433 {
434 enum tree_code inv_op
435 = invert_tree_comparison (binop->get_op (),
436 HONOR_NANS (binop->get_type ()));
437 if (inv_op != ERROR_MARK)
438 return get_or_create_binop (binop->get_type (), inv_op,
439 binop->get_arg0 (),
440 binop->get_arg1 ());
441 }
442 }
443 break;
444 case NEGATE_EXPR:
445 {
446 /* -(-(VAL)) is VAL, for integer types. */
447 if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
448 if (unaryop->get_op () == NEGATE_EXPR
449 && type == unaryop->get_type ()
450 && type
451 && INTEGRAL_TYPE_P (type))
452 return unaryop->get_arg ();
453 }
454 break;
455 }
456
457 /* Constants. */
458 if (tree cst = arg->maybe_get_constant ())
459 if (tree result = fold_unary (op, type, cst))
460 {
461 if (CONSTANT_CLASS_P (result))
462 return get_or_create_constant_svalue (result);
463
464 /* fold_unary can return casts of constants; try to handle them. */
465 if (op != NOP_EXPR
466 && type
467 && TREE_CODE (result) == NOP_EXPR
468 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
469 {
470 const svalue *inner_cst
471 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
472 return get_or_create_cast (type,
473 get_or_create_cast (TREE_TYPE (result),
474 inner_cst));
475 }
476 }
477
478 return NULL;
479 }
480
481 /* Return the svalue * for an unary operation OP on ARG with a result of
482 type TYPE, creating it if necessary. */
483
484 const svalue *
get_or_create_unaryop(tree type,enum tree_code op,const svalue * arg)485 region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
486 const svalue *arg)
487 {
488 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
489 return folded;
490 unaryop_svalue::key_t key (type, op, arg);
491 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
492 return *slot;
493 unaryop_svalue *unaryop_sval = new unaryop_svalue (type, op, arg);
494 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
495 m_unaryop_values_map.put (key, unaryop_sval);
496 return unaryop_sval;
497 }
498
499 /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
500 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
501 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
502 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
503 on. */
504
505 static enum tree_code
get_code_for_cast(tree dst_type,tree src_type)506 get_code_for_cast (tree dst_type, tree src_type)
507 {
508 gcc_assert (dst_type);
509 if (!src_type)
510 return NOP_EXPR;
511
512 if (TREE_CODE (src_type) == REAL_TYPE)
513 {
514 if (TREE_CODE (dst_type) == INTEGER_TYPE)
515 return FIX_TRUNC_EXPR;
516 else
517 return VIEW_CONVERT_EXPR;
518 }
519
520 return NOP_EXPR;
521 }
522
523 /* Return the svalue * for a cast of ARG to type TYPE, creating it
524 if necessary. */
525
526 const svalue *
get_or_create_cast(tree type,const svalue * arg)527 region_model_manager::get_or_create_cast (tree type, const svalue *arg)
528 {
529 gcc_assert (type);
530
531 /* No-op if the types are the same. */
532 if (type == arg->get_type ())
533 return arg;
534
535 /* Don't attempt to handle casts involving vector types for now. */
536 if (TREE_CODE (type) == VECTOR_TYPE
537 || (arg->get_type ()
538 && TREE_CODE (arg->get_type ()) == VECTOR_TYPE))
539 return get_or_create_unknown_svalue (type);
540
541 enum tree_code op = get_code_for_cast (type, arg->get_type ());
542 return get_or_create_unaryop (type, op, arg);
543 }
544
545 /* Subroutine of region_model_manager::maybe_fold_binop for handling
546 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
547 optimize_bit_field_compare, where CST is from ARG1.
548
549 Support masking out bits from a compound_svalue for comparing a bitfield
550 against a value, as generated by optimize_bit_field_compare for
551 BITFIELD == VALUE.
552
553 If COMPOUND_SVAL has a value for the appropriate bits, return it,
554 shifted accordingly.
555 Otherwise return NULL. */
556
557 const svalue *
558 region_model_manager::
maybe_undo_optimize_bit_field_compare(tree type,const compound_svalue * compound_sval,tree cst,const svalue * arg1)559 maybe_undo_optimize_bit_field_compare (tree type,
560 const compound_svalue *compound_sval,
561 tree cst,
562 const svalue *arg1)
563 {
564 if (type != unsigned_char_type_node)
565 return NULL;
566
567 const binding_map &map = compound_sval->get_map ();
568 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
569 /* If "mask" is a contiguous range of set bits, see if the
570 compound_sval has a value for those bits. */
571 bit_range bits (0, 0);
572 if (!bit_range::from_mask (mask, &bits))
573 return NULL;
574
575 bit_range bound_bits (bits);
576 if (BYTES_BIG_ENDIAN)
577 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
578 bits.m_size_in_bits);
579 const concrete_binding *conc
580 = get_store_manager ()->get_concrete_binding (bound_bits);
581 const svalue *sval = map.get (conc);
582 if (!sval)
583 return NULL;
584
585 /* We have a value;
586 shift it by the correct number of bits. */
587 const svalue *lhs = get_or_create_cast (type, sval);
588 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
589 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
590 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
591 lhs, shift_sval);
592 /* Reapply the mask (needed for negative
593 signed bitfields). */
594 return get_or_create_binop (type, BIT_AND_EXPR,
595 shifted_sval, arg1);
596 }
597
598 /* Subroutine of region_model_manager::get_or_create_binop.
599 Attempt to fold the inputs and return a simpler svalue *.
600 Otherwise, return NULL. */
601
602 const svalue *
maybe_fold_binop(tree type,enum tree_code op,const svalue * arg0,const svalue * arg1)603 region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
604 const svalue *arg0,
605 const svalue *arg1)
606 {
607 tree cst0 = arg0->maybe_get_constant ();
608 tree cst1 = arg1->maybe_get_constant ();
609 /* (CST OP CST). */
610 if (cst0 && cst1)
611 {
612 if (tree result = fold_binary (op, type, cst0, cst1))
613 if (CONSTANT_CLASS_P (result))
614 return get_or_create_constant_svalue (result);
615 }
616
617 if (FLOAT_TYPE_P (type)
618 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
619 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
620 return NULL;
621
622 switch (op)
623 {
624 default:
625 break;
626 case POINTER_PLUS_EXPR:
627 case PLUS_EXPR:
628 /* (VAL + 0) -> VAL. */
629 if (cst1 && zerop (cst1))
630 return get_or_create_cast (type, arg0);
631 break;
632 case MINUS_EXPR:
633 /* (VAL - 0) -> VAL. */
634 if (cst1 && zerop (cst1))
635 return get_or_create_cast (type, arg0);
636 /* (0 - VAL) -> -VAL. */
637 if (cst0 && zerop (cst0))
638 return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
639 break;
640 case MULT_EXPR:
641 /* (VAL * 0). */
642 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
643 return get_or_create_constant_svalue (build_int_cst (type, 0));
644 /* (VAL * 1) -> VAL. */
645 if (cst1 && integer_onep (cst1))
646 return arg0;
647 break;
648 case BIT_AND_EXPR:
649 if (cst1)
650 {
651 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
652 /* "(ARG0 & 0)" -> "0". */
653 return get_or_create_constant_svalue (build_int_cst (type, 0));
654
655 if (const compound_svalue *compound_sval
656 = arg0->dyn_cast_compound_svalue ())
657 if (const svalue *sval
658 = maybe_undo_optimize_bit_field_compare (type,
659 compound_sval,
660 cst1, arg1))
661 return sval;
662 }
663 if (arg0->get_type () == boolean_type_node
664 && arg1->get_type () == boolean_type_node)
665 {
666 /* If the LHS are both _Bool, then... */
667 /* ..."(1 & x) -> x". */
668 if (cst0 && !zerop (cst0))
669 return get_or_create_cast (type, arg1);
670 /* ..."(x & 1) -> x". */
671 if (cst1 && !zerop (cst1))
672 return get_or_create_cast (type, arg0);
673 /* ..."(0 & x) -> 0". */
674 if (cst0 && zerop (cst0))
675 return get_or_create_int_cst (type, 0);
676 /* ..."(x & 0) -> 0". */
677 if (cst1 && zerop (cst1))
678 return get_or_create_int_cst (type, 0);
679 }
680 break;
681 case BIT_IOR_EXPR:
682 if (arg0->get_type () == boolean_type_node
683 && arg1->get_type () == boolean_type_node)
684 {
685 /* If the LHS are both _Bool, then... */
686 /* ..."(1 | x) -> 1". */
687 if (cst0 && !zerop (cst0))
688 return get_or_create_int_cst (type, 1);
689 /* ..."(x | 1) -> 1". */
690 if (cst1 && !zerop (cst1))
691 return get_or_create_int_cst (type, 1);
692 /* ..."(0 | x) -> x". */
693 if (cst0 && zerop (cst0))
694 return get_or_create_cast (type, arg1);
695 /* ..."(x | 0) -> x". */
696 if (cst1 && zerop (cst1))
697 return get_or_create_cast (type, arg0);
698 }
699 break;
700 case TRUTH_ANDIF_EXPR:
701 case TRUTH_AND_EXPR:
702 if (cst1)
703 {
704 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
705 /* "(ARG0 && 0)" -> "0". */
706 return get_or_create_constant_svalue (build_int_cst (type, 0));
707 else
708 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
709 return get_or_create_cast (type, arg0);
710 }
711 break;
712 case TRUTH_ORIF_EXPR:
713 case TRUTH_OR_EXPR:
714 if (cst1)
715 {
716 if (zerop (cst1))
717 /* "(ARG0 || 0)" -> "ARG0". */
718 return get_or_create_cast (type, arg0);
719 else
720 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
721 return get_or_create_cast (type, arg1);
722 }
723 break;
724 }
725
726 /* For associative ops, fold "(X op CST_A) op CST_B)" to
727 "X op (CST_A op CST_B)". */
728 if (cst1 && associative_tree_code (op))
729 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
730 if (binop->get_op () == op
731 && binop->get_arg1 ()->maybe_get_constant ()
732 && type == binop->get_type ()
733 && type == binop->get_arg0 ()->get_type ()
734 && type == binop->get_arg1 ()->get_type ())
735 return get_or_create_binop
736 (type, op, binop->get_arg0 (),
737 get_or_create_binop (type, op,
738 binop->get_arg1 (), arg1));
739
740 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
741 can fold:
742 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
743 e.g. in data-model-1.c: test_4c. */
744 if (cst1 && op == POINTER_PLUS_EXPR)
745 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
746 if (binop->get_op () == POINTER_PLUS_EXPR)
747 if (binop->get_arg1 ()->maybe_get_constant ())
748 return get_or_create_binop
749 (type, op, binop->get_arg0 (),
750 get_or_create_binop (size_type_node, op,
751 binop->get_arg1 (), arg1));
752
753 /* etc. */
754
755 return NULL;
756 }
757
758 /* Return the svalue * for an binary operation OP on ARG0 and ARG1
759 with a result of type TYPE, creating it if necessary. */
760
761 const svalue *
get_or_create_binop(tree type,enum tree_code op,const svalue * arg0,const svalue * arg1)762 region_model_manager::get_or_create_binop (tree type, enum tree_code op,
763 const svalue *arg0,
764 const svalue *arg1)
765 {
766 /* For commutative ops, put any constant on the RHS. */
767 if (arg0->maybe_get_constant () && commutative_tree_code (op))
768 std::swap (arg0, arg1);
769
770 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
771 return folded;
772
773 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
774 it via an identity in maybe_fold_binop). */
775 if (!arg0->can_have_associated_state_p ()
776 || !arg1->can_have_associated_state_p ())
777 return get_or_create_unknown_svalue (type);
778
779 binop_svalue::key_t key (type, op, arg0, arg1);
780 if (binop_svalue **slot = m_binop_values_map.get (key))
781 return *slot;
782 binop_svalue *binop_sval = new binop_svalue (type, op, arg0, arg1);
783 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
784 m_binop_values_map.put (key, binop_sval);
785 return binop_sval;
786 }
787
788 /* Subroutine of region_model_manager::get_or_create_sub_svalue.
789 Return a folded svalue, or NULL. */
790
791 const svalue *
maybe_fold_sub_svalue(tree type,const svalue * parent_svalue,const region * subregion)792 region_model_manager::maybe_fold_sub_svalue (tree type,
793 const svalue *parent_svalue,
794 const region *subregion)
795 {
796 /* Subvalues of "unknown"/"poisoned" are unknown. */
797 if (!parent_svalue->can_have_associated_state_p ())
798 return get_or_create_unknown_svalue (type);
799
800 /* If we have a subregion of a zero-fill, it's zero. */
801 if (const unaryop_svalue *unary
802 = parent_svalue->dyn_cast_unaryop_svalue ())
803 {
804 if (unary->get_op () == NOP_EXPR
805 || unary->get_op () == VIEW_CONVERT_EXPR)
806 if (tree cst = unary->get_arg ()->maybe_get_constant ())
807 if (zerop (cst) && type)
808 {
809 const svalue *cst_sval
810 = get_or_create_constant_svalue (cst);
811 return get_or_create_cast (type, cst_sval);
812 }
813 }
814
815 /* Handle getting individual chars from a STRING_CST. */
816 if (tree cst = parent_svalue->maybe_get_constant ())
817 if (TREE_CODE (cst) == STRING_CST)
818 {
819 /* If we have a concrete 1-byte access within the parent region... */
820 byte_range subregion_bytes (0, 0);
821 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
822 && subregion_bytes.m_size_in_bytes == 1
823 && type)
824 {
825 /* ...then attempt to get that char from the STRING_CST. */
826 HOST_WIDE_INT hwi_start_byte
827 = subregion_bytes.m_start_byte_offset.to_shwi ();
828 tree cst_idx
829 = build_int_cst_type (size_type_node, hwi_start_byte);
830 if (const svalue *char_sval
831 = maybe_get_char_from_string_cst (cst, cst_idx))
832 return get_or_create_cast (type, char_sval);
833 }
834 }
835
836 if (const initial_svalue *init_sval
837 = parent_svalue->dyn_cast_initial_svalue ())
838 {
839 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
840 i.e.
841 Subvalue(InitialValue(R1), FieldRegion(R2, F))
842 -> InitialValue(FieldRegion(R1, F)). */
843 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
844 {
845 const region *field_reg_new
846 = get_field_region (init_sval->get_region (),
847 field_reg->get_field ());
848 return get_or_create_initial_value (field_reg_new);
849 }
850 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
851 i.e.
852 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
853 -> InitialValue(ElementRegion(R1, IDX)). */
854 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
855 {
856 const region *element_reg_new
857 = get_element_region (init_sval->get_region (),
858 element_reg->get_type (),
859 element_reg->get_index ());
860 return get_or_create_initial_value (element_reg_new);
861 }
862 }
863
864 if (const repeated_svalue *repeated_sval
865 = parent_svalue->dyn_cast_repeated_svalue ())
866 if (type)
867 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
868
869 return NULL;
870 }
871
872 /* Return the svalue * for extracting a subvalue of type TYPE from
873 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
874
875 const svalue *
get_or_create_sub_svalue(tree type,const svalue * parent_svalue,const region * subregion)876 region_model_manager::get_or_create_sub_svalue (tree type,
877 const svalue *parent_svalue,
878 const region *subregion)
879 {
880 if (const svalue *folded
881 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
882 return folded;
883
884 sub_svalue::key_t key (type, parent_svalue, subregion);
885 if (sub_svalue **slot = m_sub_values_map.get (key))
886 return *slot;
887 sub_svalue *sub_sval
888 = new sub_svalue (type, parent_svalue, subregion);
889 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
890 m_sub_values_map.put (key, sub_sval);
891 return sub_sval;
892 }
893
894 /* Subroutine of region_model_manager::get_or_create_repeated_svalue.
895 Return a folded svalue, or NULL. */
896
897 const svalue *
maybe_fold_repeated_svalue(tree type,const svalue * outer_size,const svalue * inner_svalue)898 region_model_manager::maybe_fold_repeated_svalue (tree type,
899 const svalue *outer_size,
900 const svalue *inner_svalue)
901 {
902 /* Repeated "unknown"/"poisoned" is unknown. */
903 if (!outer_size->can_have_associated_state_p ()
904 || !inner_svalue->can_have_associated_state_p ())
905 return get_or_create_unknown_svalue (type);
906
907 /* If INNER_SVALUE is the same size as OUTER_SIZE,
908 turn into simply a cast. */
909 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
910 {
911 HOST_WIDE_INT num_bytes_inner_svalue
912 = int_size_in_bytes (inner_svalue->get_type ());
913 if (num_bytes_inner_svalue != -1)
914 if (num_bytes_inner_svalue
915 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
916 {
917 if (type)
918 return get_or_create_cast (type, inner_svalue);
919 else
920 return inner_svalue;
921 }
922 }
923
924 /* Handle zero-fill of a specific type. */
925 if (tree cst = inner_svalue->maybe_get_constant ())
926 if (zerop (cst) && type)
927 return get_or_create_cast (type, inner_svalue);
928
929 return NULL;
930 }
931
932 /* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
933 enough times to be of size OUTER_SIZE, creating it if necessary.
934 e.g. for filling buffers with a constant value. */
935
936 const svalue *
get_or_create_repeated_svalue(tree type,const svalue * outer_size,const svalue * inner_svalue)937 region_model_manager::get_or_create_repeated_svalue (tree type,
938 const svalue *outer_size,
939 const svalue *inner_svalue)
940 {
941 if (const svalue *folded
942 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
943 return folded;
944
945 repeated_svalue::key_t key (type, outer_size, inner_svalue);
946 if (repeated_svalue **slot = m_repeated_values_map.get (key))
947 return *slot;
948 repeated_svalue *repeated_sval
949 = new repeated_svalue (type, outer_size, inner_svalue);
950 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
951 m_repeated_values_map.put (key, repeated_sval);
952 return repeated_sval;
953 }
954
955 /* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
956 Return true and write the result to OUT if successful.
957 Return false otherwise. */
958
959 static bool
get_bit_range_for_field(tree field,bit_range * out)960 get_bit_range_for_field (tree field, bit_range *out)
961 {
962 bit_size_t bit_size;
963 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
964 return false;
965 int field_bit_offset = int_bit_position (field);
966 *out = bit_range (field_bit_offset, bit_size);
967 return true;
968 }
969
970 /* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
971 Return true and write the result to OUT if successful.
972 Return false otherwise. */
973
974 static bool
get_byte_range_for_field(tree field,byte_range * out)975 get_byte_range_for_field (tree field, byte_range *out)
976 {
977 bit_range field_bits (0, 0);
978 if (!get_bit_range_for_field (field, &field_bits))
979 return false;
980 return field_bits.as_byte_range (out);
981 }
982
983 /* Attempt to determine if there is a specific field within RECORD_TYPE
984 at BYTES. If so, return it, and write the location of BYTES relative
985 to the field to *OUT_RANGE_WITHIN_FIELD.
986 Otherwise, return NULL_TREE.
987 For example, given:
988 struct foo { uint32 a; uint32; b};
989 and
990 bytes = {bytes 6-7} (of foo)
991 we have bytes 3-4 of field b. */
992
993 static tree
get_field_at_byte_range(tree record_type,const byte_range & bytes,byte_range * out_range_within_field)994 get_field_at_byte_range (tree record_type, const byte_range &bytes,
995 byte_range *out_range_within_field)
996 {
997 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
998
999 tree field = get_field_at_bit_offset (record_type, bit_offset);
1000 if (!field)
1001 return NULL_TREE;
1002
1003 byte_range field_bytes (0,0);
1004 if (!get_byte_range_for_field (field, &field_bytes))
1005 return NULL_TREE;
1006
1007 /* Is BYTES fully within field_bytes? */
1008 byte_range bytes_within_field (0,0);
1009 if (!field_bytes.contains_p (bytes, &bytes_within_field))
1010 return NULL_TREE;
1011
1012 *out_range_within_field = bytes_within_field;
1013 return field;
1014 }
1015
1016 /* Subroutine of region_model_manager::get_or_create_bits_within.
1017 Return a folded svalue, or NULL. */
1018
1019 const svalue *
maybe_fold_bits_within_svalue(tree type,const bit_range & bits,const svalue * inner_svalue)1020 region_model_manager::maybe_fold_bits_within_svalue (tree type,
1021 const bit_range &bits,
1022 const svalue *inner_svalue)
1023 {
1024 tree inner_type = inner_svalue->get_type ();
1025 /* Fold:
1026 BITS_WITHIN ((0, sizeof (VAL), VAL))
1027 to:
1028 CAST(TYPE, VAL). */
1029 if (bits.m_start_bit_offset == 0 && inner_type)
1030 {
1031 bit_size_t inner_type_size;
1032 if (int_size_in_bits (inner_type, &inner_type_size))
1033 if (inner_type_size == bits.m_size_in_bits)
1034 {
1035 if (type)
1036 return get_or_create_cast (type, inner_svalue);
1037 else
1038 return inner_svalue;
1039 }
1040 }
1041
1042 /* Kind-specific folding. */
1043 if (const svalue *sval
1044 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1045 return sval;
1046
1047 byte_range bytes (0,0);
1048 if (bits.as_byte_range (&bytes) && inner_type)
1049 switch (TREE_CODE (inner_type))
1050 {
1051 default:
1052 break;
1053 case ARRAY_TYPE:
1054 {
1055 /* Fold:
1056 BITS_WITHIN (range, KIND(REG))
1057 to:
1058 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1059 if range1 is a byte-range fully within one ELEMENT. */
1060 tree element_type = TREE_TYPE (inner_type);
1061 HOST_WIDE_INT element_byte_size
1062 = int_size_in_bytes (element_type);
1063 if (element_byte_size > 0)
1064 {
1065 HOST_WIDE_INT start_idx
1066 = (bytes.get_start_byte_offset ().to_shwi ()
1067 / element_byte_size);
1068 HOST_WIDE_INT last_idx
1069 = (bytes.get_last_byte_offset ().to_shwi ()
1070 / element_byte_size);
1071 if (start_idx == last_idx)
1072 {
1073 if (const initial_svalue *initial_sval
1074 = inner_svalue->dyn_cast_initial_svalue ())
1075 {
1076 bit_offset_t start_of_element
1077 = start_idx * element_byte_size * BITS_PER_UNIT;
1078 bit_range bits_within_element
1079 (bits.m_start_bit_offset - start_of_element,
1080 bits.m_size_in_bits);
1081 const svalue *idx_sval
1082 = get_or_create_int_cst (integer_type_node, start_idx);
1083 const region *element_reg =
1084 get_element_region (initial_sval->get_region (),
1085 element_type, idx_sval);
1086 const svalue *element_reg_sval
1087 = get_or_create_initial_value (element_reg);
1088 return get_or_create_bits_within (type,
1089 bits_within_element,
1090 element_reg_sval);
1091 }
1092 }
1093 }
1094 }
1095 break;
1096 case RECORD_TYPE:
1097 {
1098 /* Fold:
1099 BYTES_WITHIN (range, KIND(REG))
1100 to:
1101 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1102 if range1 is fully within FIELD. */
1103 byte_range bytes_within_field (0, 0);
1104 if (tree field = get_field_at_byte_range (inner_type, bytes,
1105 &bytes_within_field))
1106 {
1107 if (const initial_svalue *initial_sval
1108 = inner_svalue->dyn_cast_initial_svalue ())
1109 {
1110 const region *field_reg =
1111 get_field_region (initial_sval->get_region (), field);
1112 const svalue *initial_reg_sval
1113 = get_or_create_initial_value (field_reg);
1114 return get_or_create_bits_within
1115 (type,
1116 bytes_within_field.as_bit_range (),
1117 initial_reg_sval);
1118 }
1119 }
1120 }
1121 break;
1122 }
1123 return NULL;
1124 }
1125
1126 /* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1127 creating it if necessary. */
1128
1129 const svalue *
get_or_create_bits_within(tree type,const bit_range & bits,const svalue * inner_svalue)1130 region_model_manager::get_or_create_bits_within (tree type,
1131 const bit_range &bits,
1132 const svalue *inner_svalue)
1133 {
1134 if (const svalue *folded
1135 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1136 return folded;
1137
1138 bits_within_svalue::key_t key (type, bits, inner_svalue);
1139 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1140 return *slot;
1141 bits_within_svalue *bits_within_sval
1142 = new bits_within_svalue (type, bits, inner_svalue);
1143 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1144 m_bits_within_values_map.put (key, bits_within_sval);
1145 return bits_within_sval;
1146 }
1147
1148 /* Return the svalue * that decorates ARG as being unmergeable,
1149 creating it if necessary. */
1150
1151 const svalue *
get_or_create_unmergeable(const svalue * arg)1152 region_model_manager::get_or_create_unmergeable (const svalue *arg)
1153 {
1154 if (arg->get_kind () == SK_UNMERGEABLE)
1155 return arg;
1156
1157 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1158 return *slot;
1159 unmergeable_svalue *unmergeable_sval = new unmergeable_svalue (arg);
1160 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1161 m_unmergeable_values_map.put (arg, unmergeable_sval);
1162 return unmergeable_sval;
1163 }
1164
1165 /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1166 and ITER_SVAL at POINT, creating it if necessary. */
1167
1168 const svalue *
get_or_create_widening_svalue(tree type,const program_point & point,const svalue * base_sval,const svalue * iter_sval)1169 region_model_manager::get_or_create_widening_svalue (tree type,
1170 const program_point &point,
1171 const svalue *base_sval,
1172 const svalue *iter_sval)
1173 {
1174 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1175 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
1176 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1177 if (widening_svalue **slot = m_widening_values_map.get (key))
1178 return *slot;
1179 widening_svalue *widening_sval
1180 = new widening_svalue (type, point, base_sval, iter_sval);
1181 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1182 m_widening_values_map.put (key, widening_sval);
1183 return widening_sval;
1184 }
1185
1186 /* Return the svalue * of type TYPE for the compound values in MAP,
1187 creating it if necessary. */
1188
1189 const svalue *
get_or_create_compound_svalue(tree type,const binding_map & map)1190 region_model_manager::get_or_create_compound_svalue (tree type,
1191 const binding_map &map)
1192 {
1193 compound_svalue::key_t tmp_key (type, &map);
1194 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1195 return *slot;
1196 compound_svalue *compound_sval
1197 = new compound_svalue (type, map);
1198 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1199 /* Use make_key rather than reusing the key, so that we use a
1200 ptr to compound_sval's binding_map, rather than the MAP param. */
1201 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1202 return compound_sval;
1203 }
1204
1205 /* class conjured_purge. */
1206
1207 /* Purge state relating to SVAL. */
1208
1209 void
purge(const conjured_svalue * sval) const1210 conjured_purge::purge (const conjured_svalue *sval) const
1211 {
1212 m_model->purge_state_involving (sval, m_ctxt);
1213 }
1214
1215 /* Return the svalue * of type TYPE for the value conjured for ID_REG
1216 at STMT, creating it if necessary.
1217 Use P to purge existing state from the svalue, for the case where a
1218 conjured_svalue would be reused along an execution path. */
1219
1220 const svalue *
get_or_create_conjured_svalue(tree type,const gimple * stmt,const region * id_reg,const conjured_purge & p)1221 region_model_manager::get_or_create_conjured_svalue (tree type,
1222 const gimple *stmt,
1223 const region *id_reg,
1224 const conjured_purge &p)
1225 {
1226 conjured_svalue::key_t key (type, stmt, id_reg);
1227 if (conjured_svalue **slot = m_conjured_values_map.get (key))
1228 {
1229 const conjured_svalue *sval = *slot;
1230 /* We're reusing an existing conjured_svalue, perhaps from a different
1231 state within this analysis, or perhaps from an earlier state on this
1232 execution path. For the latter, purge any state involving the "new"
1233 svalue from the current program_state. */
1234 p.purge (sval);
1235 return sval;
1236 }
1237 conjured_svalue *conjured_sval
1238 = new conjured_svalue (type, stmt, id_reg);
1239 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1240 m_conjured_values_map.put (key, conjured_sval);
1241 return conjured_sval;
1242 }
1243
1244 /* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1245 Return a folded svalue, or NULL. */
1246
1247 const svalue *
1248 region_model_manager::
maybe_fold_asm_output_svalue(tree type,const vec<const svalue * > & inputs)1249 maybe_fold_asm_output_svalue (tree type,
1250 const vec<const svalue *> &inputs)
1251 {
1252 /* Unknown inputs should lead to unknown results. */
1253 for (const auto &iter : inputs)
1254 if (iter->get_kind () == SK_UNKNOWN)
1255 return get_or_create_unknown_svalue (type);
1256
1257 return NULL;
1258 }
1259
1260 /* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1261 asm stmt ASM_STMT, given INPUTS as inputs. */
1262
1263 const svalue *
1264 region_model_manager::
get_or_create_asm_output_svalue(tree type,const gasm * asm_stmt,unsigned output_idx,const vec<const svalue * > & inputs)1265 get_or_create_asm_output_svalue (tree type,
1266 const gasm *asm_stmt,
1267 unsigned output_idx,
1268 const vec<const svalue *> &inputs)
1269 {
1270 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1271
1272 if (const svalue *folded
1273 = maybe_fold_asm_output_svalue (type, inputs))
1274 return folded;
1275
1276 const char *asm_string = gimple_asm_string (asm_stmt);
1277 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1278
1279 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1280 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1281 return *slot;
1282 asm_output_svalue *asm_output_sval
1283 = new asm_output_svalue (type, asm_string, output_idx, noutputs, inputs);
1284 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1285 m_asm_output_values_map.put (key, asm_output_sval);
1286 return asm_output_sval;
1287 }
1288
1289
1290 /* Return the svalue * of type TYPE for the result of a call to FNDECL
1291 with __attribute__((const)), given INPUTS as inputs. */
1292
1293 const svalue *
1294 region_model_manager::
get_or_create_const_fn_result_svalue(tree type,tree fndecl,const vec<const svalue * > & inputs)1295 get_or_create_const_fn_result_svalue (tree type,
1296 tree fndecl,
1297 const vec<const svalue *> &inputs)
1298 {
1299 gcc_assert (type);
1300 gcc_assert (fndecl);
1301 gcc_assert (DECL_P (fndecl));
1302 gcc_assert (TREE_READONLY (fndecl));
1303 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1304
1305 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1306 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1307 return *slot;
1308 const_fn_result_svalue *const_fn_result_sval
1309 = new const_fn_result_svalue (type, fndecl, inputs);
1310 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1311 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1312 return const_fn_result_sval;
1313 }
1314
1315 /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1316 attempt to get the character at that offset, returning either
1317 the svalue for the character constant, or NULL if unsuccessful. */
1318
1319 const svalue *
maybe_get_char_from_string_cst(tree string_cst,tree byte_offset_cst)1320 region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1321 tree byte_offset_cst)
1322 {
1323 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1324
1325 /* Adapted from fold_read_from_constant_string. */
1326 scalar_int_mode char_mode;
1327 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1328 && compare_tree_int (byte_offset_cst,
1329 TREE_STRING_LENGTH (string_cst)) < 0
1330 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1331 &char_mode)
1332 && GET_MODE_SIZE (char_mode) == 1)
1333 {
1334 tree char_cst
1335 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1336 (TREE_STRING_POINTER (string_cst)
1337 [TREE_INT_CST_LOW (byte_offset_cst)]));
1338 return get_or_create_constant_svalue (char_cst);
1339 }
1340 return NULL;
1341 }
1342
1343 /* region consolidation. */
1344
1345 /* Return the region for FNDECL, creating it if necessary. */
1346
1347 const function_region *
get_region_for_fndecl(tree fndecl)1348 region_model_manager::get_region_for_fndecl (tree fndecl)
1349 {
1350 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1351
1352 function_region **slot = m_fndecls_map.get (fndecl);
1353 if (slot)
1354 return *slot;
1355 function_region *reg
1356 = new function_region (alloc_region_id (), &m_code_region, fndecl);
1357 m_fndecls_map.put (fndecl, reg);
1358 return reg;
1359 }
1360
1361 /* Return the region for LABEL, creating it if necessary. */
1362
1363 const label_region *
get_region_for_label(tree label)1364 region_model_manager::get_region_for_label (tree label)
1365 {
1366 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1367
1368 label_region **slot = m_labels_map.get (label);
1369 if (slot)
1370 return *slot;
1371
1372 tree fndecl = DECL_CONTEXT (label);
1373 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1374
1375 const function_region *func_reg = get_region_for_fndecl (fndecl);
1376 label_region *reg
1377 = new label_region (alloc_region_id (), func_reg, label);
1378 m_labels_map.put (label, reg);
1379 return reg;
1380 }
1381
1382 /* Return the region for EXPR, creating it if necessary. */
1383
1384 const decl_region *
get_region_for_global(tree expr)1385 region_model_manager::get_region_for_global (tree expr)
1386 {
1387 gcc_assert (TREE_CODE (expr) == VAR_DECL);
1388
1389 decl_region **slot = m_globals_map.get (expr);
1390 if (slot)
1391 return *slot;
1392 decl_region *reg
1393 = new decl_region (alloc_region_id (), &m_globals_region, expr);
1394 m_globals_map.put (expr, reg);
1395 return reg;
1396 }
1397
1398 /* Return the region for an unknown access of type REGION_TYPE,
1399 creating it if necessary.
1400 This is a symbolic_region, where the pointer is an unknown_svalue
1401 of type ®ION_TYPE. */
1402
1403 const region *
get_unknown_symbolic_region(tree region_type)1404 region_model_manager::get_unknown_symbolic_region (tree region_type)
1405 {
1406 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1407 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1408 return get_symbolic_region (unknown_ptr);
1409 }
1410
1411 /* Return the region that describes accessing field FIELD of PARENT,
1412 creating it if necessary. */
1413
1414 const region *
get_field_region(const region * parent,tree field)1415 region_model_manager::get_field_region (const region *parent, tree field)
1416 {
1417 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1418
1419 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1420 if (parent->symbolic_for_unknown_ptr_p ())
1421 return get_unknown_symbolic_region (TREE_TYPE (field));
1422
1423 field_region::key_t key (parent, field);
1424 if (field_region *reg = m_field_regions.get (key))
1425 return reg;
1426
1427 field_region *field_reg
1428 = new field_region (alloc_region_id (), parent, field);
1429 m_field_regions.put (key, field_reg);
1430 return field_reg;
1431 }
1432
1433 /* Return the region that describes accessing the element of type
1434 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1435
1436 const region *
get_element_region(const region * parent,tree element_type,const svalue * index)1437 region_model_manager::get_element_region (const region *parent,
1438 tree element_type,
1439 const svalue *index)
1440 {
1441 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1442 if (parent->symbolic_for_unknown_ptr_p ())
1443 return get_unknown_symbolic_region (element_type);
1444
1445 element_region::key_t key (parent, element_type, index);
1446 if (element_region *reg = m_element_regions.get (key))
1447 return reg;
1448
1449 element_region *element_reg
1450 = new element_region (alloc_region_id (), parent, element_type, index);
1451 m_element_regions.put (key, element_reg);
1452 return element_reg;
1453 }
1454
1455 /* Return the region that describes accessing the subregion of type
1456 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1457 necessary. */
1458
1459 const region *
get_offset_region(const region * parent,tree type,const svalue * byte_offset)1460 region_model_manager::get_offset_region (const region *parent,
1461 tree type,
1462 const svalue *byte_offset)
1463 {
1464 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1465 if (parent->symbolic_for_unknown_ptr_p ())
1466 return get_unknown_symbolic_region (type);
1467
1468 /* If BYTE_OFFSET is zero, return PARENT. */
1469 if (tree cst_offset = byte_offset->maybe_get_constant ())
1470 if (zerop (cst_offset))
1471 return get_cast_region (parent, type);
1472
1473 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1474 to OFFSET_REGION(REG, (X + Y)). */
1475 if (const offset_region *parent_offset_reg
1476 = parent->dyn_cast_offset_region ())
1477 {
1478 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1479 const svalue *sval_sum
1480 = get_or_create_binop (byte_offset->get_type (),
1481 PLUS_EXPR, sval_x, byte_offset);
1482 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1483 }
1484
1485 offset_region::key_t key (parent, type, byte_offset);
1486 if (offset_region *reg = m_offset_regions.get (key))
1487 return reg;
1488
1489 offset_region *offset_reg
1490 = new offset_region (alloc_region_id (), parent, type, byte_offset);
1491 m_offset_regions.put (key, offset_reg);
1492 return offset_reg;
1493 }
1494
1495 /* Return the region that describes accessing the subregion of type
1496 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1497
1498 const region *
get_sized_region(const region * parent,tree type,const svalue * byte_size_sval)1499 region_model_manager::get_sized_region (const region *parent,
1500 tree type,
1501 const svalue *byte_size_sval)
1502 {
1503 if (parent->symbolic_for_unknown_ptr_p ())
1504 return get_unknown_symbolic_region (type);
1505
1506 if (byte_size_sval->get_type () != size_type_node)
1507 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1508
1509 /* If PARENT is already that size, return it. */
1510 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1511 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1512 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1513 {
1514 tree comparison
1515 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1516 if (comparison == boolean_true_node)
1517 return parent;
1518 }
1519
1520 sized_region::key_t key (parent, type, byte_size_sval);
1521 if (sized_region *reg = m_sized_regions.get (key))
1522 return reg;
1523
1524 sized_region *sized_reg
1525 = new sized_region (alloc_region_id (), parent, type, byte_size_sval);
1526 m_sized_regions.put (key, sized_reg);
1527 return sized_reg;
1528 }
1529
1530 /* Return the region that describes accessing PARENT_REGION as if
1531 it were of type TYPE, creating it if necessary. */
1532
1533 const region *
get_cast_region(const region * original_region,tree type)1534 region_model_manager::get_cast_region (const region *original_region,
1535 tree type)
1536 {
1537 /* If types match, return ORIGINAL_REGION. */
1538 if (type == original_region->get_type ())
1539 return original_region;
1540
1541 if (original_region->symbolic_for_unknown_ptr_p ())
1542 return get_unknown_symbolic_region (type);
1543
1544 cast_region::key_t key (original_region, type);
1545 if (cast_region *reg = m_cast_regions.get (key))
1546 return reg;
1547
1548 cast_region *cast_reg
1549 = new cast_region (alloc_region_id (), original_region, type);
1550 m_cast_regions.put (key, cast_reg);
1551 return cast_reg;
1552 }
1553
1554 /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1555 if necessary. CALLING_FRAME may be NULL. */
1556
1557 const frame_region *
get_frame_region(const frame_region * calling_frame,function * fun)1558 region_model_manager::get_frame_region (const frame_region *calling_frame,
1559 function *fun)
1560 {
1561 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1562
1563 frame_region::key_t key (calling_frame, fun);
1564 if (frame_region *reg = m_frame_regions.get (key))
1565 return reg;
1566
1567 frame_region *frame_reg
1568 = new frame_region (alloc_region_id (), &m_stack_region, calling_frame,
1569 fun, index);
1570 m_frame_regions.put (key, frame_reg);
1571 return frame_reg;
1572 }
1573
1574 /* Return the region that describes dereferencing SVAL, creating it
1575 if necessary. */
1576
1577 const region *
get_symbolic_region(const svalue * sval)1578 region_model_manager::get_symbolic_region (const svalue *sval)
1579 {
1580 symbolic_region::key_t key (&m_root_region, sval);
1581 if (symbolic_region *reg = m_symbolic_regions.get (key))
1582 return reg;
1583
1584 symbolic_region *symbolic_reg
1585 = new symbolic_region (alloc_region_id (), &m_root_region, sval);
1586 m_symbolic_regions.put (key, symbolic_reg);
1587 return symbolic_reg;
1588 }
1589
1590 /* Return the region that describes accessing STRING_CST, creating it
1591 if necessary. */
1592
1593 const string_region *
get_region_for_string(tree string_cst)1594 region_model_manager::get_region_for_string (tree string_cst)
1595 {
1596 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1597
1598 string_region **slot = m_string_map.get (string_cst);
1599 if (slot)
1600 return *slot;
1601 string_region *reg
1602 = new string_region (alloc_region_id (), &m_root_region, string_cst);
1603 m_string_map.put (string_cst, reg);
1604 return reg;
1605 }
1606
1607 /* Return the region that describes accessing BITS within PARENT as TYPE,
1608 creating it if necessary. */
1609
1610 const region *
get_bit_range(const region * parent,tree type,const bit_range & bits)1611 region_model_manager::get_bit_range (const region *parent, tree type,
1612 const bit_range &bits)
1613 {
1614 gcc_assert (parent);
1615
1616 if (parent->symbolic_for_unknown_ptr_p ())
1617 return get_unknown_symbolic_region (type);
1618
1619 bit_range_region::key_t key (parent, type, bits);
1620 if (bit_range_region *reg = m_bit_range_regions.get (key))
1621 return reg;
1622
1623 bit_range_region *bit_range_reg
1624 = new bit_range_region (alloc_region_id (), parent, type, bits);
1625 m_bit_range_regions.put (key, bit_range_reg);
1626 return bit_range_reg;
1627 }
1628
1629 /* If we see a tree code we don't know how to handle, rather than
1630 ICE or generate bogus results, create a dummy region, and notify
1631 CTXT so that it can mark the new state as being not properly
1632 modelled. The exploded graph can then stop exploring that path,
1633 since any diagnostics we might issue will have questionable
1634 validity. */
1635
1636 const region *
1637 region_model_manager::
get_region_for_unexpected_tree_code(region_model_context * ctxt,tree t,const dump_location_t & loc)1638 get_region_for_unexpected_tree_code (region_model_context *ctxt,
1639 tree t,
1640 const dump_location_t &loc)
1641 {
1642 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1643 region *new_reg
1644 = new unknown_region (alloc_region_id (), &m_root_region, type);
1645 if (ctxt)
1646 ctxt->on_unexpected_tree_code (t, loc);
1647 return new_reg;
1648 }
1649
1650 /* Return a new region describing a heap-allocated block of memory. */
1651
1652 const region *
create_region_for_heap_alloc()1653 region_model_manager::create_region_for_heap_alloc ()
1654 {
1655 region *reg
1656 = new heap_allocated_region (alloc_region_id (), &m_heap_region);
1657 m_managed_dynamic_regions.safe_push (reg);
1658 return reg;
1659 }
1660
1661 /* Return a new region describing a block of memory allocated within FRAME. */
1662
1663 const region *
create_region_for_alloca(const frame_region * frame)1664 region_model_manager::create_region_for_alloca (const frame_region *frame)
1665 {
1666 gcc_assert (frame);
1667 region *reg = new alloca_region (alloc_region_id (), frame);
1668 m_managed_dynamic_regions.safe_push (reg);
1669 return reg;
1670 }
1671
1672 /* Log OBJ to LOGGER. */
1673
1674 template <typename T>
1675 static void
log_managed_object(logger * logger,const T * obj)1676 log_managed_object (logger *logger, const T *obj)
1677 {
1678 logger->start_log_line ();
1679 pretty_printer *pp = logger->get_printer ();
1680 pp_string (pp, " ");
1681 obj->dump_to_pp (pp, true);
1682 logger->end_log_line ();
1683 }
1684
1685 /* Specialization for frame_region, which also logs the count of locals
1686 managed by the frame_region. */
1687
1688 template <>
1689 void
log_managed_object(logger * logger,const frame_region * obj)1690 log_managed_object (logger *logger, const frame_region *obj)
1691 {
1692 logger->start_log_line ();
1693 pretty_printer *pp = logger->get_printer ();
1694 pp_string (pp, " ");
1695 obj->dump_to_pp (pp, true);
1696 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1697 logger->end_log_line ();
1698 }
1699
1700 /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1701 If SHOW_OBJS is true, also dump the objects themselves. */
1702
1703 template <typename K, typename T>
1704 static void
log_uniq_map(logger * logger,bool show_objs,const char * title,const hash_map<K,T * > & uniq_map)1705 log_uniq_map (logger *logger, bool show_objs, const char *title,
1706 const hash_map<K, T*> &uniq_map)
1707 {
1708 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
1709 if (!show_objs)
1710 return;
1711 auto_vec<const T *> vec_objs (uniq_map.elements ());
1712 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1713 iter != uniq_map.end (); ++iter)
1714 vec_objs.quick_push ((*iter).second);
1715
1716 vec_objs.qsort (T::cmp_ptr_ptr);
1717
1718 unsigned i;
1719 const T *obj;
1720 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1721 log_managed_object<T> (logger, obj);
1722 }
1723
1724 /* Dump the number of objects that were managed by MAP to LOGGER.
1725 If SHOW_OBJS is true, also dump the objects themselves. */
1726
1727 template <typename T>
1728 static void
log_uniq_map(logger * logger,bool show_objs,const char * title,const consolidation_map<T> & map)1729 log_uniq_map (logger *logger, bool show_objs, const char *title,
1730 const consolidation_map<T> &map)
1731 {
1732 logger->log (" # %s: %li", title, (long)map.elements ());
1733 if (!show_objs)
1734 return;
1735
1736 auto_vec<const T *> vec_objs (map.elements ());
1737 for (typename consolidation_map<T>::iterator iter = map.begin ();
1738 iter != map.end (); ++iter)
1739 vec_objs.quick_push ((*iter).second);
1740
1741 vec_objs.qsort (T::cmp_ptr_ptr);
1742
1743 unsigned i;
1744 const T *obj;
1745 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1746 log_managed_object<T> (logger, obj);
1747 }
1748
1749 /* Dump the number of objects of each class that were managed by this
1750 manager to LOGGER.
1751 If SHOW_OBJS is true, also dump the objects themselves. */
1752
1753 void
log_stats(logger * logger,bool show_objs) const1754 region_model_manager::log_stats (logger *logger, bool show_objs) const
1755 {
1756 LOG_SCOPE (logger);
1757 logger->log ("svalue consolidation");
1758 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1759 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1760 if (m_unknown_NULL)
1761 log_managed_object (logger, m_unknown_NULL);
1762 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1763 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1764 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1765 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1766 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1767 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1768 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
1769 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1770 log_uniq_map (logger, show_objs, "bits_within_svalue",
1771 m_bits_within_values_map);
1772 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1773 m_unmergeable_values_map);
1774 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1775 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1776 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
1777 log_uniq_map (logger, show_objs, "asm_output_svalue",
1778 m_asm_output_values_map);
1779 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
1780 m_const_fn_result_values_map);
1781
1782 logger->log ("max accepted svalue num_nodes: %i",
1783 m_max_complexity.m_num_nodes);
1784 logger->log ("max accepted svalue max_depth: %i",
1785 m_max_complexity.m_max_depth);
1786
1787 logger->log ("region consolidation");
1788 logger->log (" next region id: %i", m_next_region_id);
1789 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1790 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1791 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1792 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1793 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1794 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
1795 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
1796 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1797 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1798 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1799 log_uniq_map (logger, show_objs, "string_region", m_string_map);
1800 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
1801 logger->log (" # managed dynamic regions: %i",
1802 m_managed_dynamic_regions.length ());
1803 m_store_mgr.log_stats (logger, show_objs);
1804 m_range_mgr->log_stats (logger, show_objs);
1805 }
1806
1807 /* Dump the number of objects of each class that were managed by this
1808 manager to LOGGER.
1809 If SHOW_OBJS is true, also dump the objects themselves.
1810 This is here so it can use log_uniq_map. */
1811
1812 void
log_stats(logger * logger,bool show_objs) const1813 store_manager::log_stats (logger *logger, bool show_objs) const
1814 {
1815 LOG_SCOPE (logger);
1816 log_uniq_map (logger, show_objs, "concrete_binding",
1817 m_concrete_binding_key_mgr);
1818 log_uniq_map (logger, show_objs, "symbolic_binding",
1819 m_symbolic_binding_key_mgr);
1820 }
1821
1822 /* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
1823 (using -fdump-analyzer-untracked). */
1824
1825 static void
dump_untracked_region(const decl_region * decl_reg)1826 dump_untracked_region (const decl_region *decl_reg)
1827 {
1828 tree decl = decl_reg->get_decl ();
1829 if (TREE_CODE (decl) != VAR_DECL)
1830 return;
1831 /* For now, don't emit the status of decls in the constant pool, to avoid
1832 differences in DejaGnu test results between targets that use these vs
1833 those that don't.
1834 (Eventually these decls should probably be untracked and we should test
1835 for that, but that's not stage 4 material). */
1836 if (DECL_IN_CONSTANT_POOL (decl))
1837 return;
1838 warning_at (DECL_SOURCE_LOCATION (decl), 0,
1839 "track %qD: %s",
1840 decl, (decl_reg->tracked_p () ? "yes" : "no"));
1841 }
1842
1843 /* Implementation of -fdump-analyzer-untracked. */
1844
1845 void
dump_untracked_regions() const1846 region_model_manager::dump_untracked_regions () const
1847 {
1848 for (auto iter : m_globals_map)
1849 {
1850 const decl_region *decl_reg = iter.second;
1851 dump_untracked_region (decl_reg);
1852 }
1853 for (auto frame_iter : m_frame_regions)
1854 {
1855 const frame_region *frame_reg = frame_iter.second;
1856 frame_reg->dump_untracked_regions ();
1857 }
1858 }
1859
1860 void
dump_untracked_regions() const1861 frame_region::dump_untracked_regions () const
1862 {
1863 for (auto iter : m_locals)
1864 {
1865 const decl_region *decl_reg = iter.second;
1866 dump_untracked_region (decl_reg);
1867 }
1868 }
1869
1870 } // namespace ana
1871
1872 #endif /* #if ENABLE_ANALYZER */
1873