1 /* Classes for modeling the state of memory. 2 Copyright (C) 2019-2020 Free Software Foundation, Inc. 3 Contributed by David Malcolm <dmalcolm@redhat.com>. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it 8 under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, but 13 WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "tree.h" 25 #include "function.h" 26 #include "basic-block.h" 27 #include "gimple.h" 28 #include "gimple-iterator.h" 29 #include "diagnostic-core.h" 30 #include "graphviz.h" 31 #include "options.h" 32 #include "cgraph.h" 33 #include "tree-dfa.h" 34 #include "stringpool.h" 35 #include "convert.h" 36 #include "target.h" 37 #include "fold-const.h" 38 #include "tree-pretty-print.h" 39 #include "diagnostic-color.h" 40 #include "diagnostic-metadata.h" 41 #include "tristate.h" 42 #include "bitmap.h" 43 #include "selftest.h" 44 #include "function.h" 45 #include "analyzer/analyzer.h" 46 #include "analyzer/analyzer-logging.h" 47 #include "ordered-hash-map.h" 48 #include "options.h" 49 #include "cgraph.h" 50 #include "cfg.h" 51 #include "digraph.h" 52 #include "analyzer/supergraph.h" 53 #include "sbitmap.h" 54 #include "analyzer/region-model.h" 55 #include "analyzer/constraint-manager.h" 56 #include "diagnostic-event-id.h" 57 #include "analyzer/sm.h" 58 #include "diagnostic-event-id.h" 59 #include "analyzer/sm.h" 60 #include "analyzer/pending-diagnostic.h" 61 #include "analyzer/analyzer-selftests.h" 62 #include "stor-layout.h" 63 64 #if ENABLE_ANALYZER 65 66 namespace ana { 67 68 /* Dump T to PP in language-independent form, for debugging/logging/dumping 69 purposes. */ 70 71 static void 72 dump_tree (pretty_printer *pp, tree t) 73 { 74 dump_generic_node (pp, t, 0, TDF_SLIM, 0); 75 } 76 77 /* Dump T to PP in language-independent form in quotes, for 78 debugging/logging/dumping purposes. */ 79 80 void 81 dump_quoted_tree (pretty_printer *pp, tree t) 82 { 83 pp_begin_quote (pp, pp_show_color (pp)); 84 dump_tree (pp, t); 85 pp_end_quote (pp, pp_show_color (pp)); 86 } 87 88 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf 89 calls within other pp_printf calls. 90 91 default_tree_printer handles 'T' and some other codes by calling 92 dump_generic_node (pp, t, 0, TDF_SLIM, 0); 93 dump_generic_node calls pp_printf in various places, leading to 94 garbled output. 95 96 Ideally pp_printf could be made to be reentrant, but in the meantime 97 this function provides a workaround. */ 98 99 static void 100 print_quoted_type (pretty_printer *pp, tree t) 101 { 102 pp_begin_quote (pp, pp_show_color (pp)); 103 dump_generic_node (pp, t, 0, TDF_SLIM, 0); 104 pp_end_quote (pp, pp_show_color (pp)); 105 } 106 107 /* Dump this path_var to PP (which must support %E for trees). 108 109 Express the stack depth using an "@DEPTH" suffix, so e.g. given 110 void foo (int j); 111 void bar (int i) 112 { 113 foo (i); 114 } 115 then: 116 - the "i" in "bar" would be "(i @ 0)" 117 - the "j" in "foo" would be "(j @ 1)". */ 118 119 void 120 path_var::dump (pretty_printer *pp) const 121 { 122 if (m_tree == NULL_TREE) 123 pp_string (pp, "NULL"); 124 if (CONSTANT_CLASS_P (m_tree)) 125 pp_printf (pp, "%qE", m_tree); 126 else 127 pp_printf (pp, "(%qE @ %i)", m_tree, m_stack_depth); 128 } 129 130 /* For use in printing a comma-separated list. */ 131 132 static void 133 dump_separator (pretty_printer *pp, bool *is_first) 134 { 135 if (!*is_first) 136 pp_string (pp, ", "); 137 *is_first = false; 138 } 139 140 /* Concrete subclass of constraint_manager that wires it up to a region_model 141 (whilst allowing the constraint_manager and region_model to be somewhat 142 at arms length). 143 TODO: revisit this; maybe put the region_model * into the constraint_manager 144 base class. */ 145 146 class impl_constraint_manager : public constraint_manager 147 { 148 public: 149 impl_constraint_manager (region_model *model) 150 : constraint_manager (), 151 m_model (model) 152 {} 153 154 impl_constraint_manager (const impl_constraint_manager &other, 155 region_model *model) 156 : constraint_manager (other), 157 m_model (model) 158 {} 159 160 constraint_manager *clone (region_model *model) const 161 { 162 return new impl_constraint_manager (*this, model); 163 } 164 165 tree maybe_get_constant (svalue_id sid) const FINAL OVERRIDE 166 { 167 svalue *svalue = m_model->get_svalue (sid); 168 return svalue->maybe_get_constant (); 169 } 170 171 svalue_id get_sid_for_constant (tree cst) const FINAL OVERRIDE 172 { 173 gcc_assert (CONSTANT_CLASS_P (cst)); 174 return m_model->get_rvalue (cst, NULL); 175 } 176 177 int get_num_svalues () const FINAL OVERRIDE 178 { 179 return m_model->get_num_svalues (); 180 } 181 182 private: 183 region_model *m_model; 184 }; 185 186 /* class svalue_id. */ 187 188 /* Print this svalue_id to PP. */ 189 190 void 191 svalue_id::print (pretty_printer *pp) const 192 { 193 if (null_p ()) 194 pp_printf (pp, "null"); 195 else 196 pp_printf (pp, "sv%i", m_idx); 197 } 198 199 /* Print this svalue_id in .dot format to PP. */ 200 201 void 202 svalue_id::dump_node_name_to_pp (pretty_printer *pp) const 203 { 204 gcc_assert (!null_p ()); 205 pp_printf (pp, "svalue_%i", m_idx); 206 } 207 208 /* Assert that this object is valid (w.r.t. MODEL). */ 209 210 void 211 svalue_id::validate (const region_model &model) const 212 { 213 gcc_assert (null_p () || m_idx < (int)model.get_num_svalues ()); 214 } 215 216 /* class region_id. */ 217 218 /* Print this region_id to PP. */ 219 220 void 221 region_id::print (pretty_printer *pp) const 222 { 223 if (null_p ()) 224 pp_printf (pp, "null"); 225 else 226 pp_printf (pp, "r%i", m_idx); 227 } 228 229 /* Print this region_id in .dot format to PP. */ 230 231 void 232 region_id::dump_node_name_to_pp (pretty_printer *pp) const 233 { 234 gcc_assert (!null_p ()); 235 pp_printf (pp, "region_%i", m_idx); 236 } 237 238 /* Assert that this object is valid (w.r.t. MODEL). */ 239 240 void 241 region_id::validate (const region_model &model) const 242 { 243 gcc_assert (null_p () || m_idx < (int)model.get_num_regions ()); 244 } 245 246 /* class region_id_set. */ 247 248 region_id_set::region_id_set (const region_model *model) 249 : m_bitmap (model->get_num_regions ()) 250 { 251 bitmap_clear (m_bitmap); 252 } 253 254 /* class svalue_id_set. */ 255 256 svalue_id_set::svalue_id_set () 257 : m_bitmap (NULL) 258 { 259 bitmap_clear (m_bitmap); 260 } 261 262 /* class svalue and its various subclasses. */ 263 264 /* class svalue. */ 265 266 /* svalue's equality operator. Most of the work is done by the 267 a "compare_fields" implementation on each subclass. */ 268 269 bool 270 svalue::operator== (const svalue &other) const 271 { 272 enum svalue_kind this_kind = get_kind (); 273 enum svalue_kind other_kind = other.get_kind (); 274 if (this_kind != other_kind) 275 return false; 276 277 if (m_type != other.m_type) 278 return false; 279 280 switch (this_kind) 281 { 282 default: 283 gcc_unreachable (); 284 case SK_REGION: 285 { 286 const region_svalue &this_sub 287 = (const region_svalue &)*this; 288 const region_svalue &other_sub 289 = (const region_svalue &)other; 290 return this_sub.compare_fields (other_sub); 291 } 292 break; 293 case SK_CONSTANT: 294 { 295 const constant_svalue &this_sub 296 = (const constant_svalue &)*this; 297 const constant_svalue &other_sub 298 = (const constant_svalue &)other; 299 return this_sub.compare_fields (other_sub); 300 } 301 break; 302 case SK_UNKNOWN: 303 { 304 const unknown_svalue &this_sub 305 = (const unknown_svalue &)*this; 306 const unknown_svalue &other_sub 307 = (const unknown_svalue &)other; 308 return this_sub.compare_fields (other_sub); 309 } 310 break; 311 case SK_POISONED: 312 { 313 const poisoned_svalue &this_sub 314 = (const poisoned_svalue &)*this; 315 const poisoned_svalue &other_sub 316 = (const poisoned_svalue &)other; 317 return this_sub.compare_fields (other_sub); 318 } 319 break; 320 case SK_SETJMP: 321 { 322 const setjmp_svalue &this_sub 323 = (const setjmp_svalue &)*this; 324 const setjmp_svalue &other_sub 325 = (const setjmp_svalue &)other; 326 return this_sub.compare_fields (other_sub); 327 } 328 break; 329 } 330 } 331 332 /* Generate a hash value for this svalue. Most of the work is done by the 333 add_to_hash vfunc. */ 334 335 hashval_t 336 svalue::hash () const 337 { 338 inchash::hash hstate; 339 if (m_type) 340 hstate.add_int (TYPE_UID (m_type)); 341 add_to_hash (hstate); 342 return hstate.end (); 343 } 344 345 /* Print this svalue and its ID to PP. */ 346 347 void 348 svalue::print (const region_model &model, 349 svalue_id this_sid, 350 pretty_printer *pp) const 351 { 352 this_sid.print (pp); 353 pp_string (pp, ": {"); 354 355 if (m_type) 356 { 357 gcc_assert (TYPE_P (m_type)); 358 pp_string (pp, "type: "); 359 print_quoted_type (pp, m_type); 360 pp_string (pp, ", "); 361 } 362 363 /* vfunc. */ 364 print_details (model, this_sid, pp); 365 366 pp_string (pp, "}"); 367 } 368 369 /* Dump this svalue in the form of a .dot record to PP. */ 370 371 void 372 svalue::dump_dot_to_pp (const region_model &model, 373 svalue_id this_sid, 374 pretty_printer *pp) const 375 { 376 this_sid.dump_node_name_to_pp (pp); 377 pp_printf (pp, " [label=\""); 378 pp_write_text_to_stream (pp); 379 this_sid.print (pp); 380 pp_string (pp, ": {"); 381 print (model, this_sid, pp); 382 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false); 383 pp_string (pp, "}\"];"); 384 pp_newline (pp); 385 } 386 387 /* Base implementation of svalue::remap_region_ids vfunc. */ 388 389 void 390 svalue::remap_region_ids (const region_id_map &) 391 { 392 /* Empty. */ 393 } 394 395 /* Base implementation of svalue::walk_for_canonicalization vfunc. */ 396 397 void 398 svalue::walk_for_canonicalization (canonicalization *) const 399 { 400 /* Empty. */ 401 } 402 403 /* Base implementation of svalue::get_child_sid vfunc. */ 404 405 svalue_id 406 svalue::get_child_sid (region *parent ATTRIBUTE_UNUSED, 407 region *child, 408 region_model &model, 409 region_model_context *ctxt ATTRIBUTE_UNUSED) 410 { 411 svalue *new_child_value = clone (); 412 if (child->get_type ()) 413 new_child_value->m_type = child->get_type (); 414 svalue_id new_child_sid = model.add_svalue (new_child_value); 415 return new_child_sid; 416 } 417 418 /* If this svalue is a constant_svalue, return the underlying tree constant. 419 Otherwise return NULL_TREE. */ 420 421 tree 422 svalue::maybe_get_constant () const 423 { 424 if (const constant_svalue *cst_sval = dyn_cast_constant_svalue ()) 425 return cst_sval->get_constant (); 426 else 427 return NULL_TREE; 428 } 429 430 /* class region_svalue : public svalue. */ 431 432 /* Compare the fields of this region_svalue with OTHER, returning true 433 if they are equal. 434 For use by svalue::operator==. */ 435 436 bool 437 region_svalue::compare_fields (const region_svalue &other) const 438 { 439 return m_rid == other.m_rid; 440 } 441 442 /* Implementation of svalue::add_to_hash vfunc for region_svalue. */ 443 444 void 445 region_svalue::add_to_hash (inchash::hash &hstate) const 446 { 447 inchash::add (m_rid, hstate); 448 } 449 450 /* Implementation of svalue::print_details vfunc for region_svalue. */ 451 452 void 453 region_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED, 454 svalue_id this_sid ATTRIBUTE_UNUSED, 455 pretty_printer *pp) const 456 { 457 if (m_rid.null_p ()) 458 pp_string (pp, "NULL"); 459 else 460 { 461 pp_string (pp, "&"); 462 m_rid.print (pp); 463 } 464 } 465 466 /* Implementation of svalue::dump_dot_to_pp for region_svalue. */ 467 468 void 469 region_svalue::dump_dot_to_pp (const region_model &model, 470 svalue_id this_sid, 471 pretty_printer *pp) const 472 { 473 svalue::dump_dot_to_pp (model, this_sid, pp); 474 475 /* If non-NULL, add an edge to the pointed-to region. */ 476 if (!m_rid.null_p ()) 477 { 478 this_sid.dump_node_name_to_pp (pp); 479 pp_string (pp, " -> "); 480 m_rid.dump_node_name_to_pp (pp); 481 pp_string (pp, ";"); 482 pp_newline (pp); 483 } 484 } 485 486 /* Implementation of svalue::remap_region_ids vfunc for region_svalue. */ 487 488 void 489 region_svalue::remap_region_ids (const region_id_map &map) 490 { 491 map.update (&m_rid); 492 } 493 494 /* Merge REGION_SVAL_A and REGION_SVAL_B using MERGER, writing the result 495 into *MERGED_SID. */ 496 497 void 498 region_svalue::merge_values (const region_svalue ®ion_sval_a, 499 const region_svalue ®ion_sval_b, 500 svalue_id *merged_sid, 501 tree type, 502 model_merger *merger) 503 { 504 region_id a_rid = region_sval_a.get_pointee (); 505 region_id b_rid = region_sval_b.get_pointee (); 506 507 /* Both are non-NULL. */ 508 gcc_assert (!a_rid.null_p () && !b_rid.null_p ()); 509 510 /* Have these ptr-values already been merged? */ 511 512 region_id a_rid_in_m 513 = merger->m_map_regions_from_a_to_m.get_dst_for_src (a_rid); 514 region_id b_rid_in_m 515 = merger->m_map_regions_from_b_to_m.get_dst_for_src (b_rid); 516 517 /* "null_p" here means "we haven't seen this ptr-value before". 518 If we've seen one but not the other, or we have different 519 regions, then the merged ptr has to be "unknown". */ 520 if (a_rid_in_m != b_rid_in_m) 521 { 522 svalue *merged_sval = new unknown_svalue (type); 523 *merged_sid = merger->m_merged_model->add_svalue (merged_sval); 524 return; 525 } 526 527 /* Have we seen this yet? If so, reuse the value. */ 528 if (!a_rid_in_m.null_p ()) 529 { 530 *merged_sid 531 = merger->m_merged_model->get_or_create_ptr_svalue (type, a_rid_in_m); 532 return; 533 } 534 535 /* Otherwise we have A/B regions that haven't been referenced yet. */ 536 537 /* Are the regions the "same", when seen from the tree point-of-view. 538 If so, create a merged pointer to it. */ 539 path_var pv_a = merger->m_model_a->get_representative_path_var (a_rid); 540 path_var pv_b = merger->m_model_b->get_representative_path_var (b_rid); 541 if (pv_a.m_tree 542 && pv_a == pv_b) 543 { 544 region_id merged_pointee_rid 545 = merger->m_merged_model->get_lvalue (pv_a, NULL); 546 *merged_sid 547 = merger->m_merged_model->get_or_create_ptr_svalue (type, 548 merged_pointee_rid); 549 merger->record_regions (a_rid, b_rid, merged_pointee_rid); 550 return; 551 } 552 553 /* Handle an A/B pair of ptrs that both point at heap regions. 554 If they both have a heap region in the merger model, merge them. */ 555 region *region_a = merger->m_model_a->get_region (a_rid); 556 region *region_b = merger->m_model_b->get_region (b_rid); 557 region_id a_parent_rid = region_a->get_parent (); 558 region_id b_parent_rid = region_b->get_parent (); 559 region *parent_region_a = merger->m_model_a->get_region (a_parent_rid); 560 region *parent_region_b = merger->m_model_b->get_region (b_parent_rid); 561 if (parent_region_a 562 && parent_region_b 563 && parent_region_a->get_kind () == RK_HEAP 564 && parent_region_b->get_kind () == RK_HEAP) 565 { 566 /* We have an A/B pair of ptrs that both point at heap regions. */ 567 /* presumably we want to see if each A/B heap region already 568 has a merged region, and, if so, is it the same one. */ 569 // This check is above 570 571 region_id merged_pointee_rid 572 = merger->m_merged_model->add_new_malloc_region (); 573 *merged_sid 574 = merger->m_merged_model->get_or_create_ptr_svalue 575 (type, merged_pointee_rid); 576 merger->record_regions (a_rid, b_rid, merged_pointee_rid); 577 return; 578 } 579 580 /* Two different non-NULL pointers? Merge to unknown. */ 581 svalue *merged_sval = new unknown_svalue (type); 582 *merged_sid = merger->m_merged_model->add_svalue (merged_sval); 583 return; 584 } 585 586 /* Implementation of svalue::walk_for_canonicalization vfunc for 587 region_svalue. */ 588 589 void 590 region_svalue::walk_for_canonicalization (canonicalization *c) const 591 { 592 c->walk_rid (m_rid); 593 } 594 595 /* Evaluate the condition LHS OP RHS. 596 Subroutine of region_model::eval_condition for when we have a pair of 597 pointers. */ 598 599 tristate 600 region_svalue::eval_condition (region_svalue *lhs, 601 enum tree_code op, 602 region_svalue *rhs) 603 { 604 /* See if they point to the same region. */ 605 /* TODO: what about child regions where the child is the first child 606 (or descendent)? */ 607 region_id lhs_rid = lhs->get_pointee (); 608 region_id rhs_rid = rhs->get_pointee (); 609 switch (op) 610 { 611 default: 612 gcc_unreachable (); 613 614 case EQ_EXPR: 615 if (lhs_rid == rhs_rid) 616 return tristate::TS_TRUE; 617 else 618 return tristate::TS_FALSE; 619 break; 620 621 case NE_EXPR: 622 if (lhs_rid != rhs_rid) 623 return tristate::TS_TRUE; 624 else 625 return tristate::TS_FALSE; 626 break; 627 628 case GE_EXPR: 629 case LE_EXPR: 630 if (lhs_rid == rhs_rid) 631 return tristate::TS_TRUE; 632 break; 633 634 case GT_EXPR: 635 case LT_EXPR: 636 if (lhs_rid == rhs_rid) 637 return tristate::TS_FALSE; 638 break; 639 } 640 641 return tristate::TS_UNKNOWN; 642 } 643 644 /* class constant_svalue : public svalue. */ 645 646 /* Compare the fields of this constant_svalue with OTHER, returning true 647 if they are equal. 648 For use by svalue::operator==. */ 649 650 bool 651 constant_svalue::compare_fields (const constant_svalue &other) const 652 { 653 return m_cst_expr == other.m_cst_expr; 654 } 655 656 /* Implementation of svalue::add_to_hash vfunc for constant_svalue. */ 657 658 void 659 constant_svalue::add_to_hash (inchash::hash &hstate) const 660 { 661 inchash::add_expr (m_cst_expr, hstate); 662 } 663 664 /* Merge the CST_SVAL_A and CST_SVAL_B using MERGER, writing the id of 665 the resulting svalue into *MERGED_SID. */ 666 667 void 668 constant_svalue::merge_values (const constant_svalue &cst_sval_a, 669 const constant_svalue &cst_sval_b, 670 svalue_id *merged_sid, 671 model_merger *merger) 672 { 673 tree cst_a = cst_sval_a.get_constant (); 674 tree cst_b = cst_sval_b.get_constant (); 675 svalue *merged_sval; 676 if (cst_a == cst_b) 677 { 678 /* If they are the same constant, merge as that constant value. */ 679 merged_sval = new constant_svalue (cst_a); 680 } 681 else 682 { 683 /* Otherwise, we have two different constant values. 684 Merge as an unknown value. 685 TODO: impose constraints on the value? 686 (maybe just based on A, to avoid infinite chains) */ 687 merged_sval = new unknown_svalue (TREE_TYPE (cst_a)); 688 } 689 *merged_sid = merger->m_merged_model->add_svalue (merged_sval); 690 } 691 692 /* Evaluate the condition LHS OP RHS. 693 Subroutine of region_model::eval_condition for when we have a pair of 694 constants. */ 695 696 tristate 697 constant_svalue::eval_condition (constant_svalue *lhs, 698 enum tree_code op, 699 constant_svalue *rhs) 700 { 701 tree lhs_const = lhs->get_constant (); 702 tree rhs_const = rhs->get_constant (); 703 704 gcc_assert (CONSTANT_CLASS_P (lhs_const)); 705 gcc_assert (CONSTANT_CLASS_P (rhs_const)); 706 707 /* Check for comparable types. */ 708 if (types_compatible_p (TREE_TYPE (lhs_const), TREE_TYPE (rhs_const))) 709 { 710 tree comparison 711 = fold_binary (op, boolean_type_node, lhs_const, rhs_const); 712 if (comparison == boolean_true_node) 713 return tristate (tristate::TS_TRUE); 714 if (comparison == boolean_false_node) 715 return tristate (tristate::TS_FALSE); 716 } 717 return tristate::TS_UNKNOWN; 718 } 719 720 /* Implementation of svalue::print_details vfunc for constant_svalue. */ 721 722 void 723 constant_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED, 724 svalue_id this_sid ATTRIBUTE_UNUSED, 725 pretty_printer *pp) const 726 { 727 pp_printf (pp, "%qE", m_cst_expr); 728 } 729 730 /* Implementation of svalue::get_child_sid vfunc for constant_svalue. */ 731 732 svalue_id 733 constant_svalue::get_child_sid (region *parent ATTRIBUTE_UNUSED, 734 region *child, 735 region_model &model, 736 region_model_context *ctxt ATTRIBUTE_UNUSED) 737 { 738 /* TODO: handle the all-zeroes case by returning an all-zeroes of the 739 child type. */ 740 741 /* Otherwise, we don't have a good way to get a child value out of a 742 constant. 743 744 Handle this case by using an unknown value. */ 745 svalue *unknown_sval = new unknown_svalue (child->get_type ()); 746 return model.add_svalue (unknown_sval); 747 } 748 749 /* class unknown_svalue : public svalue. */ 750 751 /* Compare the fields of this unknown_svalue with OTHER, returning true 752 if they are equal. 753 For use by svalue::operator==. */ 754 755 bool 756 unknown_svalue::compare_fields (const unknown_svalue &) const 757 { 758 /* I *think* we want to return true here, in that when comparing 759 two region models, we want two peer unknown_svalue instances 760 to be the "same". */ 761 return true; 762 } 763 764 /* Implementation of svalue::add_to_hash vfunc for unknown_svalue. */ 765 766 void 767 unknown_svalue::add_to_hash (inchash::hash &) const 768 { 769 /* Empty. */ 770 } 771 772 /* Implementation of svalue::print_details vfunc for unknown_svalue. */ 773 774 void 775 unknown_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED, 776 svalue_id this_sid ATTRIBUTE_UNUSED, 777 pretty_printer *pp) const 778 { 779 pp_string (pp, "unknown"); 780 } 781 782 /* Get a string for KIND for use in debug dumps. */ 783 784 const char * 785 poison_kind_to_str (enum poison_kind kind) 786 { 787 switch (kind) 788 { 789 default: 790 gcc_unreachable (); 791 case POISON_KIND_FREED: 792 return "freed"; 793 case POISON_KIND_POPPED_STACK: 794 return "popped stack"; 795 } 796 } 797 798 /* class poisoned_svalue : public svalue. */ 799 800 /* Compare the fields of this poisoned_svalue with OTHER, returning true 801 if they are equal. 802 For use by svalue::operator==. */ 803 804 bool 805 poisoned_svalue::compare_fields (const poisoned_svalue &other) const 806 { 807 return m_kind == other.m_kind; 808 } 809 810 /* Implementation of svalue::add_to_hash vfunc for poisoned_svalue. */ 811 812 void 813 poisoned_svalue::add_to_hash (inchash::hash &hstate) const 814 { 815 hstate.add_int (m_kind); 816 } 817 818 /* Implementation of svalue::print_details vfunc for poisoned_svalue. */ 819 820 void 821 poisoned_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED, 822 svalue_id this_sid ATTRIBUTE_UNUSED, 823 pretty_printer *pp) const 824 { 825 pp_printf (pp, "poisoned: %s", poison_kind_to_str (m_kind)); 826 } 827 828 /* class setjmp_svalue's implementation is in engine.cc, so that it can use 829 the declaration of exploded_node. */ 830 831 /* class region and its various subclasses. */ 832 833 /* Get a string for KIND for use in debug dumps. */ 834 835 const char * 836 region_kind_to_str (enum region_kind kind) 837 { 838 switch (kind) 839 { 840 default: 841 gcc_unreachable (); 842 case RK_PRIMITIVE: 843 return "primitive"; 844 case RK_STRUCT: 845 return "struct"; 846 case RK_UNION: 847 return "union"; 848 case RK_ARRAY: 849 return "array"; 850 case RK_FRAME: 851 return "frame"; 852 case RK_GLOBALS: 853 return "globals"; 854 case RK_CODE: 855 return "code"; 856 case RK_FUNCTION: 857 return "function"; 858 case RK_STACK: 859 return "stack"; 860 case RK_HEAP: 861 return "heap"; 862 case RK_ROOT: 863 return "root"; 864 case RK_SYMBOLIC: 865 return "symbolic"; 866 } 867 } 868 869 /* class region. */ 870 871 /* Equality operator for region. 872 After comparing base class fields and kind, the rest of the 873 comparison is handled off to a "compare_fields" member function 874 specific to the appropriate subclass. */ 875 876 bool 877 region::operator== (const region &other) const 878 { 879 if (m_parent_rid != other.m_parent_rid) 880 return false; 881 if (m_sval_id != other.m_sval_id) 882 return false; 883 if (m_type != other.m_type) 884 return false; 885 886 enum region_kind this_kind = get_kind (); 887 enum region_kind other_kind = other.get_kind (); 888 if (this_kind != other_kind) 889 return false; 890 891 /* Compare views. */ 892 if (m_view_rids.length () != other.m_view_rids.length ()) 893 return false; 894 int i; 895 region_id *rid; 896 FOR_EACH_VEC_ELT (m_view_rids, i, rid) 897 if (! (*rid == other.m_view_rids[i])) 898 return false; 899 900 switch (this_kind) 901 { 902 default: 903 gcc_unreachable (); 904 case RK_PRIMITIVE: 905 { 906 #if 1 907 return true; 908 #else 909 const primitive_region &this_sub 910 = (const primitive_region &)*this; 911 const primitive_region &other_sub 912 = (const primitive_region &)other; 913 return this_sub.compare_fields (other_sub); 914 #endif 915 } 916 case RK_STRUCT: 917 { 918 const struct_region &this_sub 919 = (const struct_region &)*this; 920 const struct_region &other_sub 921 = (const struct_region &)other; 922 return this_sub.compare_fields (other_sub); 923 } 924 case RK_UNION: 925 { 926 const union_region &this_sub 927 = (const union_region &)*this; 928 const union_region &other_sub 929 = (const union_region &)other; 930 return this_sub.compare_fields (other_sub); 931 } 932 case RK_ARRAY: 933 { 934 const array_region &this_sub 935 = (const array_region &)*this; 936 const array_region &other_sub 937 = (const array_region &)other; 938 return this_sub.compare_fields (other_sub); 939 } 940 case RK_FRAME: 941 { 942 const frame_region &this_sub 943 = (const frame_region &)*this; 944 const frame_region &other_sub 945 = (const frame_region &)other; 946 return this_sub.compare_fields (other_sub); 947 } 948 case RK_GLOBALS: 949 { 950 const globals_region &this_sub 951 = (const globals_region &)*this; 952 const globals_region &other_sub 953 = (const globals_region &)other; 954 return this_sub.compare_fields (other_sub); 955 } 956 case RK_CODE: 957 { 958 const code_region &this_sub 959 = (const code_region &)*this; 960 const code_region &other_sub 961 = (const code_region &)other; 962 return this_sub.compare_fields (other_sub); 963 } 964 case RK_FUNCTION: 965 { 966 const function_region &this_sub 967 = (const function_region &)*this; 968 const function_region &other_sub 969 = (const function_region &)other; 970 return this_sub.compare_fields (other_sub); 971 } 972 case RK_STACK: 973 { 974 const stack_region &this_sub 975 = (const stack_region &)*this; 976 const stack_region &other_sub 977 = (const stack_region &)other; 978 return this_sub.compare_fields (other_sub); 979 } 980 case RK_ROOT: 981 { 982 const root_region &this_sub 983 = (const root_region &)*this; 984 const root_region &other_sub 985 = (const root_region &)other; 986 return this_sub.compare_fields (other_sub); 987 } 988 case RK_SYMBOLIC: 989 { 990 const symbolic_region &this_sub 991 = (const symbolic_region &)*this; 992 const symbolic_region &other_sub 993 = (const symbolic_region &)other; 994 return this_sub.compare_fields (other_sub); 995 } 996 case RK_HEAP: 997 { 998 const heap_region &this_sub 999 = (const heap_region &)*this; 1000 const heap_region &other_sub 1001 = (const heap_region &)other; 1002 return this_sub.compare_fields (other_sub); 1003 } 1004 } 1005 } 1006 1007 /* Get the parent region of this region. */ 1008 1009 region * 1010 region::get_parent_region (const region_model &model) const 1011 { 1012 return model.get_region (m_parent_rid); 1013 } 1014 1015 /* Set this region's value to RHS_SID (or potentially a variant of it, 1016 for some kinds of casts). */ 1017 1018 void 1019 region::set_value (region_model &model, region_id this_rid, svalue_id rhs_sid, 1020 region_model_context *ctxt) 1021 { 1022 /* Handle some kinds of casting. */ 1023 if (m_type) 1024 { 1025 svalue *sval = model.get_svalue (rhs_sid); 1026 if (sval->get_type ()) 1027 rhs_sid = model.maybe_cast (m_type, rhs_sid, ctxt); 1028 1029 sval = model.get_svalue (rhs_sid); 1030 if (sval->get_type ()) 1031 gcc_assert (m_type == sval->get_type ()); 1032 } 1033 1034 m_sval_id = rhs_sid; 1035 1036 /* Update views. 1037 If this is a view, it becomes its parent's active view. 1038 If there was already an active views, invalidate its value; otherwise 1039 if the parent itself had a value, invalidate it. 1040 If it's not a view, then deactivate any view that is active on this 1041 region. */ 1042 { 1043 if (m_is_view) 1044 become_active_view (model, this_rid); 1045 else 1046 { 1047 deactivate_any_active_view (model); 1048 gcc_assert (m_active_view_rid.null_p ()); 1049 } 1050 } 1051 } 1052 1053 /* Make this region (with id THIS_RID) the "active" view of its parent. 1054 Any other active view has its value set to "unknown" and descendent values 1055 cleared. 1056 If there wasn't an active view, then set the parent's value to unknown, and 1057 clear its descendent values (apart from this view). */ 1058 1059 void 1060 region::become_active_view (region_model &model, region_id this_rid) 1061 { 1062 gcc_assert (m_is_view); 1063 1064 region *parent_reg = model.get_region (m_parent_rid); 1065 gcc_assert (parent_reg); 1066 1067 region_id old_active_view_rid = parent_reg->m_active_view_rid; 1068 1069 if (old_active_view_rid == this_rid) 1070 { 1071 /* Already the active view: do nothing. */ 1072 return; 1073 } 1074 1075 /* We have a change of active view. */ 1076 parent_reg->m_active_view_rid = this_rid; 1077 1078 if (old_active_view_rid.null_p ()) 1079 { 1080 /* No previous active view, but the parent and its other children 1081 might have values. 1082 If so, invalidate those values - but not that of the new view. */ 1083 region_id_set below_region (&model); 1084 model.get_descendents (m_parent_rid, &below_region, this_rid); 1085 for (unsigned i = 0; i < model.get_num_regions (); i++) 1086 { 1087 region_id rid (region_id::from_int (i)); 1088 if (below_region.region_p (rid)) 1089 { 1090 region *other_reg = model.get_region (rid); 1091 other_reg->m_sval_id = svalue_id::null (); 1092 } 1093 } 1094 region *parent = model.get_region (m_parent_rid); 1095 parent->m_sval_id 1096 = model.add_svalue (new unknown_svalue (parent->get_type ())); 1097 } 1098 else 1099 { 1100 /* If there was an active view, invalidate it. */ 1101 region *old_active_view = model.get_region (old_active_view_rid); 1102 old_active_view->deactivate_view (model, old_active_view_rid); 1103 } 1104 } 1105 1106 /* If this region (with id THIS_RID) has an active view, deactivate it, 1107 clearing m_active_view_rid. */ 1108 1109 void 1110 region::deactivate_any_active_view (region_model &model) 1111 { 1112 if (m_active_view_rid.null_p ()) 1113 return; 1114 region *view = model.get_region (m_active_view_rid); 1115 view->deactivate_view (model, m_active_view_rid); 1116 m_active_view_rid = region_id::null (); 1117 } 1118 1119 /* Clear any values for regions below THIS_RID. 1120 Set the view's value to unknown. */ 1121 1122 void 1123 region::deactivate_view (region_model &model, region_id this_view_rid) 1124 { 1125 gcc_assert (is_view_p ()); 1126 1127 /* Purge values from old_active_this_view_rid and all its 1128 descendents. Potentially we could use a poison value 1129 for this, but let's use unknown for now. */ 1130 region_id_set below_view (&model); 1131 model.get_descendents (this_view_rid, &below_view, region_id::null ()); 1132 1133 for (unsigned i = 0; i < model.get_num_regions (); i++) 1134 { 1135 region_id rid (region_id::from_int (i)); 1136 if (below_view.region_p (rid)) 1137 { 1138 region *other_reg = model.get_region (rid); 1139 other_reg->m_sval_id = svalue_id::null (); 1140 } 1141 } 1142 1143 m_sval_id = model.add_svalue (new unknown_svalue (get_type ())); 1144 } 1145 1146 /* Get a value for this region, either its value if it has one, 1147 or, failing that, "inherit" a value from first ancestor with a 1148 non-null value. 1149 1150 For example, when getting the value for a local variable within 1151 a stack frame that doesn't have one, the frame doesn't have a value 1152 either, but the stack as a whole will have an "uninitialized" poison 1153 value, so inherit that. */ 1154 1155 svalue_id 1156 region::get_value (region_model &model, bool non_null, 1157 region_model_context *ctxt) 1158 { 1159 /* If this region has a value, use it. */ 1160 if (!m_sval_id.null_p ()) 1161 return m_sval_id; 1162 1163 /* Otherwise, "inherit" value from first ancestor with a 1164 non-null value. */ 1165 1166 region *parent = model.get_region (m_parent_rid); 1167 if (parent) 1168 { 1169 svalue_id inherited_sid 1170 = parent->get_inherited_child_sid (this, model, ctxt); 1171 if (!inherited_sid.null_p ()) 1172 return inherited_sid; 1173 } 1174 1175 /* If a non-null value has been requested, then generate 1176 a new unknown value. Store it, so that repeated reads from this 1177 region will yield the same unknown value. */ 1178 if (non_null) 1179 { 1180 svalue_id unknown_sid = model.add_svalue (new unknown_svalue (m_type)); 1181 m_sval_id = unknown_sid; 1182 return unknown_sid; 1183 } 1184 1185 return svalue_id::null (); 1186 } 1187 1188 /* Get a value for CHILD, inheriting from this region. 1189 1190 Recurse, so this region will inherit a value if it doesn't already 1191 have one. */ 1192 1193 svalue_id 1194 region::get_inherited_child_sid (region *child, 1195 region_model &model, 1196 region_model_context *ctxt) 1197 { 1198 if (m_sval_id.null_p ()) 1199 { 1200 /* Recurse. */ 1201 if (!m_parent_rid.null_p ()) 1202 { 1203 region *parent = model.get_region (m_parent_rid); 1204 m_sval_id = parent->get_inherited_child_sid (this, model, ctxt); 1205 } 1206 } 1207 1208 if (!m_sval_id.null_p ()) 1209 { 1210 /* Clone the parent's value, so that attempts to update it 1211 (e.g giving a specific value to an inherited "uninitialized" 1212 value) touch the child, and not the parent. */ 1213 svalue *this_value = model.get_svalue (m_sval_id); 1214 svalue_id new_child_sid 1215 = this_value->get_child_sid (this, child, model, ctxt); 1216 if (ctxt) 1217 ctxt->on_inherited_svalue (m_sval_id, new_child_sid); 1218 child->m_sval_id = new_child_sid; 1219 return new_child_sid; 1220 } 1221 1222 return svalue_id::null (); 1223 } 1224 1225 /* Copy from SRC_RID to DST_RID, using CTXT for any issues that occur. 1226 Copy across any value for the region, and handle structs, unions 1227 and arrays recursively. */ 1228 1229 void 1230 region_model::copy_region (region_id dst_rid, region_id src_rid, 1231 region_model_context *ctxt) 1232 { 1233 gcc_assert (!dst_rid.null_p ()); 1234 gcc_assert (!src_rid.null_p ()); 1235 if (dst_rid == src_rid) 1236 return; 1237 region *dst_reg = get_region (dst_rid); 1238 region *src_reg = get_region (src_rid); 1239 1240 /* Copy across any value for the src region itself. */ 1241 svalue_id sid = src_reg->get_value (*this, true, ctxt); 1242 set_value (dst_rid, sid, ctxt); 1243 1244 if (dst_reg->get_kind () != src_reg->get_kind ()) 1245 return; 1246 1247 /* Copy across child regions for structs, unions, and arrays. */ 1248 switch (dst_reg->get_kind ()) 1249 { 1250 case RK_PRIMITIVE: 1251 return; 1252 case RK_STRUCT: 1253 { 1254 struct_region *dst_sub = as_a <struct_region *> (dst_reg); 1255 struct_region *src_sub = as_a <struct_region *> (src_reg); 1256 copy_struct_region (dst_rid, dst_sub, src_sub, ctxt); 1257 } 1258 return; 1259 case RK_UNION: 1260 { 1261 union_region *src_sub = as_a <union_region *> (src_reg); 1262 copy_union_region (dst_rid, src_sub, ctxt); 1263 } 1264 return; 1265 case RK_FRAME: 1266 case RK_GLOBALS: 1267 case RK_CODE: 1268 case RK_FUNCTION: 1269 return; 1270 case RK_ARRAY: 1271 { 1272 array_region *dst_sub = as_a <array_region *> (dst_reg); 1273 array_region *src_sub = as_a <array_region *> (src_reg); 1274 copy_array_region (dst_rid, dst_sub, src_sub, ctxt); 1275 } 1276 return; 1277 case RK_STACK: 1278 case RK_HEAP: 1279 case RK_ROOT: 1280 case RK_SYMBOLIC: 1281 return; 1282 } 1283 } 1284 1285 /* Subroutine of region_model::copy_region for copying the child 1286 regions for a struct. */ 1287 1288 void 1289 region_model::copy_struct_region (region_id dst_rid, 1290 struct_region *dst_reg, 1291 struct_region *src_reg, 1292 region_model_context *ctxt) 1293 { 1294 for (map_region::iterator_t iter = src_reg->begin (); 1295 iter != src_reg->end (); ++iter) 1296 { 1297 tree src_key = (*iter).first; 1298 region_id src_field_rid = (*iter).second; 1299 region *src_field_reg = get_region (src_field_rid); 1300 region_id dst_field_rid 1301 = dst_reg->get_or_create (this, dst_rid, src_key, 1302 src_field_reg->get_type (), ctxt); 1303 copy_region (dst_field_rid, src_field_rid, ctxt); 1304 } 1305 } 1306 1307 /* Subroutine of region_model::copy_region for copying the active 1308 child region for a union. */ 1309 1310 void 1311 region_model::copy_union_region (region_id dst_rid, 1312 union_region *src_reg, 1313 region_model_context *ctxt) 1314 { 1315 region_id src_active_view_rid = src_reg->get_active_view (); 1316 if (src_active_view_rid.null_p ()) 1317 return; 1318 region *src_active_view = get_region (src_active_view_rid); 1319 tree type = src_active_view->get_type (); 1320 region_id dst_active_view_rid = get_or_create_view (dst_rid, type, ctxt); 1321 copy_region (dst_active_view_rid, src_active_view_rid, ctxt); 1322 } 1323 1324 /* Subroutine of region_model::copy_region for copying the child 1325 regions for an array. */ 1326 1327 void 1328 region_model::copy_array_region (region_id dst_rid, 1329 array_region *dst_reg, 1330 array_region *src_reg, 1331 region_model_context *ctxt) 1332 { 1333 for (array_region::iterator_t iter = src_reg->begin (); 1334 iter != src_reg->end (); ++iter) 1335 { 1336 array_region::key_t src_key = (*iter).first; 1337 region_id src_field_rid = (*iter).second; 1338 region *src_field_reg = get_region (src_field_rid); 1339 region_id dst_field_rid 1340 = dst_reg->get_or_create (this, dst_rid, src_key, 1341 src_field_reg->get_type (), ctxt); 1342 copy_region (dst_field_rid, src_field_rid, ctxt); 1343 } 1344 } 1345 1346 /* Generate a hash value for this region. The work is done by the 1347 add_to_hash vfunc. */ 1348 1349 hashval_t 1350 region::hash () const 1351 { 1352 inchash::hash hstate; 1353 add_to_hash (hstate); 1354 return hstate.end (); 1355 } 1356 1357 /* Print a one-liner representation of this region to PP, assuming 1358 that this region is within MODEL and its id is THIS_RID. */ 1359 1360 void 1361 region::print (const region_model &model, 1362 region_id this_rid, 1363 pretty_printer *pp) const 1364 { 1365 this_rid.print (pp); 1366 pp_string (pp, ": {"); 1367 1368 /* vfunc. */ 1369 print_fields (model, this_rid, pp); 1370 1371 pp_string (pp, "}"); 1372 } 1373 1374 /* Base class implementation of region::dump_dot_to_pp vfunc. */ 1375 1376 void 1377 region::dump_dot_to_pp (const region_model &model, 1378 region_id this_rid, 1379 pretty_printer *pp) const 1380 { 1381 this_rid.dump_node_name_to_pp (pp); 1382 pp_printf (pp, " [shape=none,margin=0,style=filled,fillcolor=%s,label=\"", 1383 "lightgrey"); 1384 pp_write_text_to_stream (pp); 1385 print (model, this_rid, pp); 1386 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false); 1387 pp_string (pp, "\"];"); 1388 pp_newline (pp); 1389 1390 /* Add edge to svalue. */ 1391 if (!m_sval_id.null_p ()) 1392 { 1393 this_rid.dump_node_name_to_pp (pp); 1394 pp_string (pp, " -> "); 1395 m_sval_id.dump_node_name_to_pp (pp); 1396 pp_string (pp, ";"); 1397 pp_newline (pp); 1398 } 1399 1400 /* Add edge to parent. */ 1401 if (!m_parent_rid.null_p ()) 1402 { 1403 this_rid.dump_node_name_to_pp (pp); 1404 pp_string (pp, " -> "); 1405 m_parent_rid.dump_node_name_to_pp (pp); 1406 pp_string (pp, ";"); 1407 pp_newline (pp); 1408 } 1409 } 1410 1411 /* Dump a tree-like ASCII-art representation of this region to PP. */ 1412 1413 void 1414 region::dump_to_pp (const region_model &model, 1415 region_id this_rid, 1416 pretty_printer *pp, 1417 const char *prefix, 1418 bool is_last_child) const 1419 { 1420 print (model, this_rid, pp); 1421 pp_newline (pp); 1422 1423 const char *new_prefix; 1424 if (!m_parent_rid.null_p ()) 1425 new_prefix = ACONCAT ((prefix, is_last_child ? " " : "| ", NULL)); 1426 else 1427 new_prefix = prefix; 1428 1429 const char *begin_color = colorize_start (pp_show_color (pp), "note"); 1430 const char *end_color = colorize_stop (pp_show_color (pp)); 1431 char *field_prefix 1432 = ACONCAT ((begin_color, new_prefix, "|:", end_color, NULL)); 1433 1434 if (!m_sval_id.null_p ()) 1435 { 1436 pp_printf (pp, "%s sval: ", field_prefix); 1437 model.get_svalue (m_sval_id)->print (model, m_sval_id, pp); 1438 pp_newline (pp); 1439 } 1440 if (m_type) 1441 { 1442 pp_printf (pp, "%s type: ", field_prefix); 1443 print_quoted_type (pp, m_type); 1444 pp_newline (pp); 1445 } 1446 1447 /* Find the children. */ 1448 1449 auto_vec<region_id> child_rids; 1450 unsigned i; 1451 for (unsigned i = 0; i < model.get_num_regions (); ++i) 1452 { 1453 region_id rid = region_id::from_int (i); 1454 region *child = model.get_region (rid); 1455 if (child->m_parent_rid == this_rid) 1456 child_rids.safe_push (rid); 1457 } 1458 1459 /* Print the children, using dump_child_label to label them. */ 1460 1461 region_id *child_rid; 1462 FOR_EACH_VEC_ELT (child_rids, i, child_rid) 1463 { 1464 is_last_child = (i == child_rids.length () - 1); 1465 if (!this_rid.null_p ()) 1466 { 1467 const char *tail = is_last_child ? "`-" : "|-"; 1468 pp_printf (pp, "%r%s%s%R", "note", new_prefix, tail); 1469 } 1470 dump_child_label (model, this_rid, *child_rid, pp); 1471 model.get_region (*child_rid)->dump_to_pp (model, *child_rid, pp, 1472 new_prefix, 1473 is_last_child); 1474 } 1475 } 1476 1477 /* Base implementation of region::dump_child_label vfunc. */ 1478 1479 void 1480 region::dump_child_label (const region_model &model, 1481 region_id this_rid ATTRIBUTE_UNUSED, 1482 region_id child_rid, 1483 pretty_printer *pp) const 1484 { 1485 region *child = model.get_region (child_rid); 1486 if (child->m_is_view) 1487 { 1488 gcc_assert (TYPE_P (child->get_type ())); 1489 if (m_active_view_rid == child_rid) 1490 pp_string (pp, "active "); 1491 else 1492 pp_string (pp, "inactive "); 1493 pp_string (pp, "view as "); 1494 print_quoted_type (pp, child->get_type ()); 1495 pp_string (pp, ": "); 1496 } 1497 } 1498 1499 /* Base implementation of region::validate vfunc. 1500 Assert that the fields of "region" are valid; subclasses should 1501 chain up their implementation to this one. */ 1502 1503 void 1504 region::validate (const region_model &model) const 1505 { 1506 m_parent_rid.validate (model); 1507 m_sval_id.validate (model); 1508 unsigned i; 1509 region_id *view_rid; 1510 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid) 1511 { 1512 gcc_assert (!view_rid->null_p ()); 1513 view_rid->validate (model); 1514 } 1515 m_active_view_rid.validate (model); 1516 } 1517 1518 /* Apply MAP to svalue_ids to this region. This updates the value 1519 for the region (if any). */ 1520 1521 void 1522 region::remap_svalue_ids (const svalue_id_map &map) 1523 { 1524 map.update (&m_sval_id); 1525 } 1526 1527 /* Base implementation of region::remap_region_ids vfunc; subclasses should 1528 chain up to this, updating any region_id data. */ 1529 1530 void 1531 region::remap_region_ids (const region_id_map &map) 1532 { 1533 map.update (&m_parent_rid); 1534 unsigned i; 1535 region_id *view_rid; 1536 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid) 1537 map.update (view_rid); 1538 map.update (&m_active_view_rid); 1539 } 1540 1541 /* Add a new region with id VIEW_RID as a view of this region. */ 1542 1543 void 1544 region::add_view (region_id view_rid, region_model *model) 1545 { 1546 gcc_assert (!view_rid.null_p ()); 1547 region *new_view = model->get_region (view_rid); 1548 new_view->m_is_view = true; 1549 gcc_assert (!new_view->m_parent_rid.null_p ()); 1550 gcc_assert (new_view->m_sval_id.null_p ()); 1551 1552 //gcc_assert (new_view->get_type () != NULL_TREE); 1553 // TODO: this can sometimes be NULL, when viewing through a (void *) 1554 1555 // TODO: the type ought to not be present yet 1556 1557 m_view_rids.safe_push (view_rid); 1558 } 1559 1560 /* Look for a view of type TYPE of this region, returning its id if found, 1561 or null otherwise. */ 1562 1563 region_id 1564 region::get_view (tree type, region_model *model) const 1565 { 1566 unsigned i; 1567 region_id *view_rid; 1568 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid) 1569 { 1570 region *view = model->get_region (*view_rid); 1571 gcc_assert (view->m_is_view); 1572 if (view->get_type () == type) 1573 return *view_rid; 1574 } 1575 return region_id::null (); 1576 } 1577 1578 /* region's ctor. */ 1579 1580 region::region (region_id parent_rid, svalue_id sval_id, tree type) 1581 : m_parent_rid (parent_rid), m_sval_id (sval_id), m_type (type), 1582 m_view_rids (), m_is_view (false), m_active_view_rid (region_id::null ()) 1583 { 1584 gcc_assert (type == NULL_TREE || TYPE_P (type)); 1585 } 1586 1587 /* region's copy ctor. */ 1588 1589 region::region (const region &other) 1590 : m_parent_rid (other.m_parent_rid), m_sval_id (other.m_sval_id), 1591 m_type (other.m_type), m_view_rids (other.m_view_rids.length ()), 1592 m_is_view (other.m_is_view), m_active_view_rid (other.m_active_view_rid) 1593 { 1594 int i; 1595 region_id *rid; 1596 FOR_EACH_VEC_ELT (other.m_view_rids, i, rid) 1597 m_view_rids.quick_push (*rid); 1598 } 1599 1600 /* Base implementation of region::add_to_hash vfunc; subclasses should 1601 chain up to this. */ 1602 1603 void 1604 region::add_to_hash (inchash::hash &hstate) const 1605 { 1606 inchash::add (m_parent_rid, hstate); 1607 inchash::add (m_sval_id, hstate); 1608 hstate.add_ptr (m_type); 1609 // TODO: views 1610 } 1611 1612 /* Base implementation of region::print_fields vfunc. */ 1613 1614 void 1615 region::print_fields (const region_model &model ATTRIBUTE_UNUSED, 1616 region_id this_rid ATTRIBUTE_UNUSED, 1617 pretty_printer *pp) const 1618 { 1619 pp_printf (pp, "kind: %qs", region_kind_to_str (get_kind ())); 1620 1621 pp_string (pp, ", parent: "); 1622 m_parent_rid.print (pp); 1623 1624 pp_printf (pp, ", sval: "); 1625 m_sval_id.print (pp); 1626 1627 if (m_type) 1628 { 1629 pp_printf (pp, ", type: "); 1630 print_quoted_type (pp, m_type); 1631 } 1632 } 1633 1634 /* Determine if a pointer to this region must be non-NULL. 1635 1636 Generally, pointers to regions must be non-NULL, but pointers 1637 to symbolic_regions might, in fact, be NULL. 1638 1639 This allows us to simulate functions like malloc and calloc with: 1640 - only one "outcome" from each statement, 1641 - the idea that the pointer is on the heap if non-NULL 1642 - the possibility that the pointer could be NULL 1643 - the idea that successive values returned from malloc are non-equal 1644 - to be able to zero-fill for calloc. */ 1645 1646 bool 1647 region::non_null_p (const region_model &model) const 1648 { 1649 /* Look through views to get at the underlying region. */ 1650 if (is_view_p ()) 1651 return model.get_region (m_parent_rid)->non_null_p (model); 1652 1653 /* Are we within a symbolic_region? If so, it could be NULL. */ 1654 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ()) 1655 { 1656 if (sym_reg->m_possibly_null) 1657 return false; 1658 } 1659 1660 return true; 1661 } 1662 1663 /* class primitive_region : public region. */ 1664 1665 /* Implementation of region::clone vfunc for primitive_region. */ 1666 1667 region * 1668 primitive_region::clone () const 1669 { 1670 return new primitive_region (*this); 1671 } 1672 1673 /* Implementation of region::walk_for_canonicalization vfunc for 1674 primitive_region. */ 1675 1676 void 1677 primitive_region::walk_for_canonicalization (canonicalization *) const 1678 { 1679 /* Empty. */ 1680 } 1681 1682 /* class map_region : public region. */ 1683 1684 /* map_region's copy ctor. */ 1685 1686 map_region::map_region (const map_region &other) 1687 : region (other), 1688 m_map (other.m_map) 1689 { 1690 } 1691 1692 /* Compare the fields of this map_region with OTHER, returning true 1693 if they are equal. 1694 For use by region::operator==. */ 1695 1696 bool 1697 map_region::compare_fields (const map_region &other) const 1698 { 1699 if (m_map.elements () != other.m_map.elements ()) 1700 return false; 1701 1702 for (map_t::iterator iter = m_map.begin (); 1703 iter != m_map.end (); 1704 ++iter) 1705 { 1706 tree key = (*iter).first; 1707 region_id e = (*iter).second; 1708 region_id *other_slot = const_cast <map_t &> (other.m_map).get (key); 1709 if (other_slot == NULL) 1710 return false; 1711 if (e != *other_slot) 1712 return false; 1713 } 1714 return true; 1715 } 1716 1717 /* Implementation of region::print_fields vfunc for map_region. */ 1718 1719 void 1720 map_region::print_fields (const region_model &model, 1721 region_id this_rid, 1722 pretty_printer *pp) const 1723 { 1724 region::print_fields (model, this_rid, pp); 1725 pp_string (pp, ", map: {"); 1726 for (map_t::iterator iter = m_map.begin (); 1727 iter != m_map.end (); 1728 ++iter) 1729 { 1730 if (iter != m_map.begin ()) 1731 pp_string (pp, ", "); 1732 tree expr = (*iter).first; 1733 region_id child_rid = (*iter).second; 1734 dump_quoted_tree (pp, expr); 1735 pp_string (pp, ": "); 1736 child_rid.print (pp); 1737 } 1738 pp_string (pp, "}"); 1739 } 1740 1741 /* Implementation of region::validate vfunc for map_region. */ 1742 1743 void 1744 map_region::validate (const region_model &model) const 1745 { 1746 region::validate (model); 1747 for (map_t::iterator iter = m_map.begin (); 1748 iter != m_map.end (); 1749 ++iter) 1750 { 1751 region_id child_rid = (*iter).second; 1752 child_rid.validate (model); 1753 } 1754 } 1755 1756 /* Implementation of region::dump_dot_to_pp vfunc for map_region. */ 1757 1758 void 1759 map_region::dump_dot_to_pp (const region_model &model, 1760 region_id this_rid, 1761 pretty_printer *pp) const 1762 { 1763 region::dump_dot_to_pp (model, this_rid, pp); 1764 for (map_t::iterator iter = m_map.begin (); 1765 iter != m_map.end (); 1766 ++iter) 1767 { 1768 // TODO: add nodes/edges to label things 1769 1770 tree expr = (*iter).first; 1771 region_id child_rid = (*iter).second; 1772 1773 pp_printf (pp, "rid_label_%i [label=\"", child_rid.as_int ()); 1774 pp_write_text_to_stream (pp); 1775 pp_printf (pp, "%qE", expr); 1776 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false); 1777 pp_string (pp, "\"];"); 1778 pp_newline (pp); 1779 1780 pp_printf (pp, "rid_label_%i", child_rid.as_int ()); 1781 pp_string (pp, " -> "); 1782 child_rid.dump_node_name_to_pp (pp); 1783 pp_string (pp, ";"); 1784 pp_newline (pp); 1785 } 1786 } 1787 1788 /* Implementation of region::dump_child_label vfunc for map_region. */ 1789 1790 void 1791 map_region::dump_child_label (const region_model &model, 1792 region_id this_rid, 1793 region_id child_rid, 1794 pretty_printer *pp) const 1795 { 1796 region::dump_child_label (model, this_rid, child_rid, pp); 1797 1798 for (map_t::iterator iter = m_map.begin (); 1799 iter != m_map.end (); 1800 ++iter) 1801 { 1802 if (child_rid == (*iter).second) 1803 { 1804 tree key = (*iter).first; 1805 dump_quoted_tree (pp, key); 1806 pp_string (pp, ": "); 1807 } 1808 } 1809 } 1810 1811 /* Look for a child region for KEY within this map_region. 1812 If it doesn't already exist, create a child map_region, using TYPE for 1813 its type. 1814 Return the region_id of the child (whether pre-existing, or 1815 newly-created). 1816 Notify CTXT if we don't know how to handle TYPE. */ 1817 1818 region_id 1819 map_region::get_or_create (region_model *model, 1820 region_id this_rid, 1821 tree key, 1822 tree type, 1823 region_model_context *ctxt) 1824 { 1825 gcc_assert (key); 1826 gcc_assert (valid_key_p (key)); 1827 region_id *slot = m_map.get (key); 1828 if (slot) 1829 return *slot; 1830 region_id child_rid = model->add_region_for_type (this_rid, type, ctxt); 1831 m_map.put (key, child_rid); 1832 return child_rid; 1833 } 1834 1835 /* Get the region_id for the child region for KEY within this 1836 MAP_REGION, or NULL if there is no such child region. */ 1837 1838 region_id * 1839 map_region::get (tree key) 1840 { 1841 gcc_assert (key); 1842 gcc_assert (valid_key_p (key)); 1843 region_id *slot = m_map.get (key); 1844 return slot; 1845 } 1846 1847 /* Implementation of region::add_to_hash vfunc for map_region. */ 1848 1849 void 1850 map_region::add_to_hash (inchash::hash &hstate) const 1851 { 1852 region::add_to_hash (hstate); 1853 // TODO 1854 } 1855 1856 /* Implementation of region::remap_region_ids vfunc for map_region. */ 1857 1858 void 1859 map_region::remap_region_ids (const region_id_map &map) 1860 { 1861 region::remap_region_ids (map); 1862 1863 /* Remap the region ids within the map entries. */ 1864 for (map_t::iterator iter = m_map.begin (); 1865 iter != m_map.end (); ++iter) 1866 map.update (&(*iter).second); 1867 } 1868 1869 /* Remove the binding of KEY to its child region (but not the 1870 child region itself). 1871 For use when purging unneeded SSA names. */ 1872 1873 void 1874 map_region::unbind (tree key) 1875 { 1876 gcc_assert (key); 1877 gcc_assert (valid_key_p (key)); 1878 m_map.remove (key); 1879 } 1880 1881 /* Look for a child region with id CHILD_RID within this map_region. 1882 If one is found, return its tree key, otherwise return NULL_TREE. */ 1883 1884 tree 1885 map_region::get_tree_for_child_region (region_id child_rid) const 1886 { 1887 // TODO: do we want to store an inverse map? 1888 for (map_t::iterator iter = m_map.begin (); 1889 iter != m_map.end (); 1890 ++iter) 1891 { 1892 tree key = (*iter).first; 1893 region_id r = (*iter).second; 1894 if (r == child_rid) 1895 return key; 1896 } 1897 1898 return NULL_TREE; 1899 } 1900 1901 /* Look for a child region CHILD within this map_region. 1902 If one is found, return its tree key, otherwise return NULL_TREE. */ 1903 1904 tree 1905 map_region::get_tree_for_child_region (region *child, 1906 const region_model &model) const 1907 { 1908 // TODO: do we want to store an inverse map? 1909 for (map_t::iterator iter = m_map.begin (); 1910 iter != m_map.end (); 1911 ++iter) 1912 { 1913 tree key = (*iter).first; 1914 region_id r = (*iter).second; 1915 if (model.get_region (r) == child) 1916 return key; 1917 } 1918 1919 return NULL_TREE; 1920 } 1921 1922 /* Comparator for trees to impose a deterministic ordering on 1923 T1 and T2. */ 1924 1925 static int 1926 tree_cmp (const_tree t1, const_tree t2) 1927 { 1928 gcc_assert (t1); 1929 gcc_assert (t2); 1930 1931 /* Test tree codes first. */ 1932 if (TREE_CODE (t1) != TREE_CODE (t2)) 1933 return TREE_CODE (t1) - TREE_CODE (t2); 1934 1935 /* From this point on, we know T1 and T2 have the same tree code. */ 1936 1937 if (DECL_P (t1)) 1938 { 1939 if (DECL_NAME (t1) && DECL_NAME (t2)) 1940 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)), 1941 IDENTIFIER_POINTER (DECL_NAME (t2))); 1942 else 1943 { 1944 if (DECL_NAME (t1)) 1945 return -1; 1946 else if (DECL_NAME (t2)) 1947 return 1; 1948 else 1949 return DECL_UID (t1) - DECL_UID (t2); 1950 } 1951 } 1952 1953 switch (TREE_CODE (t1)) 1954 { 1955 case SSA_NAME: 1956 { 1957 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2)) 1958 { 1959 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2)); 1960 if (var_cmp) 1961 return var_cmp; 1962 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2); 1963 } 1964 else 1965 { 1966 if (SSA_NAME_VAR (t1)) 1967 return -1; 1968 else if (SSA_NAME_VAR (t2)) 1969 return 1; 1970 else 1971 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2); 1972 } 1973 } 1974 break; 1975 1976 case INTEGER_CST: 1977 return tree_int_cst_compare (t1, t2); 1978 1979 case REAL_CST: 1980 { 1981 const real_value *rv1 = TREE_REAL_CST_PTR (t1); 1982 const real_value *rv2 = TREE_REAL_CST_PTR (t2); 1983 if (real_compare (UNORDERED_EXPR, rv1, rv2)) 1984 { 1985 /* Impose an arbitrary order on NaNs relative to other NaNs 1986 and to non-NaNs. */ 1987 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2)) 1988 return cmp_isnan; 1989 if (int cmp_issignaling_nan 1990 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2)) 1991 return cmp_issignaling_nan; 1992 return real_isneg (rv1) - real_isneg (rv2); 1993 } 1994 if (real_compare (LT_EXPR, rv1, rv2)) 1995 return -1; 1996 if (real_compare (GT_EXPR, rv1, rv2)) 1997 return 1; 1998 return 0; 1999 } 2000 2001 case STRING_CST: 2002 return strcmp (TREE_STRING_POINTER (t1), 2003 TREE_STRING_POINTER (t2)); 2004 2005 default: 2006 gcc_unreachable (); 2007 break; 2008 } 2009 2010 gcc_unreachable (); 2011 2012 return 0; 2013 } 2014 2015 /* qsort comparator for trees to impose a deterministic ordering on 2016 P1 and P2. */ 2017 2018 static int 2019 tree_cmp (const void *p1, const void *p2) 2020 { 2021 const_tree t1 = *(const_tree const *)p1; 2022 const_tree t2 = *(const_tree const *)p2; 2023 2024 return tree_cmp (t1, t2); 2025 } 2026 2027 /* Attempt to merge MAP_REGION_A and MAP_REGION_B into MERGED_MAP_REGION, 2028 which has region_id MERGED_RID, using MERGER. 2029 Return true if the merger is possible, false otherwise. */ 2030 2031 bool 2032 map_region::can_merge_p (const map_region *map_region_a, 2033 const map_region *map_region_b, 2034 map_region *merged_map_region, 2035 region_id merged_rid, 2036 model_merger *merger) 2037 { 2038 for (map_t::iterator iter = map_region_a->m_map.begin (); 2039 iter != map_region_a->m_map.end (); 2040 ++iter) 2041 { 2042 tree key_a = (*iter).first; 2043 region_id rid_a = (*iter).second; 2044 2045 if (const region_id *slot_b 2046 = const_cast<map_region *>(map_region_b)->m_map.get (key_a)) 2047 { 2048 region_id rid_b = *slot_b; 2049 2050 region *child_region_a = merger->get_region_a <region> (rid_a); 2051 region *child_region_b = merger->get_region_b <region> (rid_b); 2052 2053 gcc_assert (child_region_a->get_type () 2054 == child_region_b->get_type ()); 2055 2056 gcc_assert (child_region_a->get_kind () 2057 == child_region_b->get_kind ()); 2058 2059 region_id child_merged_rid 2060 = merged_map_region->get_or_create (merger->m_merged_model, 2061 merged_rid, 2062 key_a, 2063 child_region_a->get_type (), 2064 NULL); 2065 2066 region *child_merged_region 2067 = merger->m_merged_model->get_region (child_merged_rid); 2068 2069 /* Consider values. */ 2070 svalue_id child_a_sid = child_region_a->get_value_direct (); 2071 svalue_id child_b_sid = child_region_b->get_value_direct (); 2072 svalue_id child_merged_sid; 2073 if (!merger->can_merge_values_p (child_a_sid, child_b_sid, 2074 &child_merged_sid)) 2075 return false; 2076 if (!child_merged_sid.null_p ()) 2077 child_merged_region->set_value (*merger->m_merged_model, 2078 child_merged_rid, 2079 child_merged_sid, 2080 NULL); 2081 2082 if (map_region *map_region_a = child_region_a->dyn_cast_map_region ()) 2083 { 2084 /* Recurse. */ 2085 if (!can_merge_p (map_region_a, 2086 as_a <map_region *> (child_region_b), 2087 as_a <map_region *> (child_merged_region), 2088 child_merged_rid, 2089 merger)) 2090 return false; 2091 } 2092 2093 } 2094 else 2095 { 2096 /* TODO: region is present in A, but absent in B. */ 2097 } 2098 } 2099 2100 /* TODO: check for keys in B that aren't in A. */ 2101 2102 return true; 2103 } 2104 2105 2106 /* Implementation of region::walk_for_canonicalization vfunc for 2107 map_region. */ 2108 2109 void 2110 map_region::walk_for_canonicalization (canonicalization *c) const 2111 { 2112 auto_vec<tree> keys (m_map.elements ()); 2113 for (map_t::iterator iter = m_map.begin (); 2114 iter != m_map.end (); 2115 ++iter) 2116 { 2117 tree key_a = (*iter).first; 2118 keys.quick_push (key_a); 2119 } 2120 keys.qsort (tree_cmp); 2121 2122 unsigned i; 2123 tree key; 2124 FOR_EACH_VEC_ELT (keys, i, key) 2125 { 2126 region_id rid = *const_cast<map_region *>(this)->m_map.get (key); 2127 c->walk_rid (rid); 2128 } 2129 } 2130 2131 /* For debugging purposes: look for a child region for a decl named 2132 IDENTIFIER (or an SSA_NAME for such a decl), returning its value, 2133 or svalue_id::null if none are found. */ 2134 2135 svalue_id 2136 map_region::get_value_by_name (tree identifier, 2137 const region_model &model) const 2138 { 2139 for (map_t::iterator iter = m_map.begin (); 2140 iter != m_map.end (); 2141 ++iter) 2142 { 2143 tree key = (*iter).first; 2144 if (TREE_CODE (key) == SSA_NAME) 2145 if (SSA_NAME_VAR (key)) 2146 key = SSA_NAME_VAR (key); 2147 if (DECL_P (key)) 2148 if (DECL_NAME (key) == identifier) 2149 { 2150 region_id rid = (*iter).second; 2151 region *region = model.get_region (rid); 2152 return region->get_value (const_cast<region_model &>(model), 2153 false, NULL); 2154 } 2155 } 2156 return svalue_id::null (); 2157 } 2158 2159 /* class struct_or_union_region : public map_region. */ 2160 2161 /* Implementation of map_region::valid_key_p vfunc for 2162 struct_or_union_region. */ 2163 2164 bool 2165 struct_or_union_region::valid_key_p (tree key) const 2166 { 2167 return TREE_CODE (key) == FIELD_DECL; 2168 } 2169 2170 /* Compare the fields of this struct_or_union_region with OTHER, returning 2171 true if they are equal. 2172 For use by region::operator==. */ 2173 2174 bool 2175 struct_or_union_region::compare_fields (const struct_or_union_region &other) 2176 const 2177 { 2178 return map_region::compare_fields (other); 2179 } 2180 2181 /* class struct_region : public struct_or_union_region. */ 2182 2183 /* Implementation of region::clone vfunc for struct_region. */ 2184 2185 region * 2186 struct_region::clone () const 2187 { 2188 return new struct_region (*this); 2189 } 2190 2191 /* Compare the fields of this struct_region with OTHER, returning true 2192 if they are equal. 2193 For use by region::operator==. */ 2194 2195 bool 2196 struct_region::compare_fields (const struct_region &other) const 2197 { 2198 return struct_or_union_region::compare_fields (other); 2199 } 2200 2201 /* class union_region : public struct_or_union_region. */ 2202 2203 /* Implementation of region::clone vfunc for union_region. */ 2204 2205 region * 2206 union_region::clone () const 2207 { 2208 return new union_region (*this); 2209 } 2210 2211 /* Compare the fields of this union_region with OTHER, returning true 2212 if they are equal. 2213 For use by region::operator==. */ 2214 2215 bool 2216 union_region::compare_fields (const union_region &other) const 2217 { 2218 return struct_or_union_region::compare_fields (other); 2219 } 2220 2221 /* class frame_region : public map_region. */ 2222 2223 /* Compare the fields of this frame_region with OTHER, returning true 2224 if they are equal. 2225 For use by region::operator==. */ 2226 2227 bool 2228 frame_region::compare_fields (const frame_region &other) const 2229 { 2230 if (!map_region::compare_fields (other)) 2231 return false; 2232 if (m_fun != other.m_fun) 2233 return false; 2234 if (m_depth != other.m_depth) 2235 return false; 2236 return true; 2237 } 2238 2239 /* Implementation of region::clone vfunc for frame_region. */ 2240 2241 region * 2242 frame_region::clone () const 2243 { 2244 return new frame_region (*this); 2245 } 2246 2247 /* Implementation of map_region::valid_key_p vfunc for frame_region. */ 2248 2249 bool 2250 frame_region::valid_key_p (tree key) const 2251 { 2252 // TODO: could also check that VAR_DECLs are locals 2253 return (TREE_CODE (key) == PARM_DECL 2254 || TREE_CODE (key) == VAR_DECL 2255 || TREE_CODE (key) == SSA_NAME 2256 || TREE_CODE (key) == RESULT_DECL); 2257 } 2258 2259 /* Implementation of region::print_fields vfunc for frame_region. */ 2260 2261 void 2262 frame_region::print_fields (const region_model &model, 2263 region_id this_rid, 2264 pretty_printer *pp) const 2265 { 2266 map_region::print_fields (model, this_rid, pp); 2267 pp_printf (pp, ", function: %qs, depth: %i", function_name (m_fun), m_depth); 2268 } 2269 2270 /* Implementation of region::add_to_hash vfunc for frame_region. */ 2271 2272 void 2273 frame_region::add_to_hash (inchash::hash &hstate) const 2274 { 2275 map_region::add_to_hash (hstate); 2276 hstate.add_ptr (m_fun); 2277 hstate.add_int (m_depth); 2278 } 2279 2280 /* class globals_region : public scope_region. */ 2281 2282 /* Compare the fields of this globals_region with OTHER, returning true 2283 if they are equal. 2284 For use by region::operator==. */ 2285 2286 bool 2287 globals_region::compare_fields (const globals_region &other) const 2288 { 2289 return map_region::compare_fields (other); 2290 } 2291 2292 /* Implementation of region::clone vfunc for globals_region. */ 2293 2294 region * 2295 globals_region::clone () const 2296 { 2297 return new globals_region (*this); 2298 } 2299 2300 /* Implementation of map_region::valid_key_p vfunc for globals_region. */ 2301 2302 bool 2303 globals_region::valid_key_p (tree key) const 2304 { 2305 return TREE_CODE (key) == VAR_DECL; 2306 } 2307 2308 /* class code_region : public map_region. */ 2309 2310 /* Compare the fields of this code_region with OTHER, returning true 2311 if they are equal. 2312 For use by region::operator==. */ 2313 2314 bool 2315 code_region::compare_fields (const code_region &other) const 2316 { 2317 return map_region::compare_fields (other); 2318 } 2319 2320 /* Implementation of region::clone vfunc for code_region. */ 2321 2322 region * 2323 code_region::clone () const 2324 { 2325 return new code_region (*this); 2326 } 2327 2328 /* Implementation of map_region::valid_key_p vfunc for code_region. */ 2329 2330 bool 2331 code_region::valid_key_p (tree key) const 2332 { 2333 return TREE_CODE (key) == FUNCTION_DECL; 2334 } 2335 2336 /* class array_region : public region. */ 2337 2338 /* array_region's copy ctor. */ 2339 2340 array_region::array_region (const array_region &other) 2341 : region (other), 2342 m_map (other.m_map) 2343 { 2344 } 2345 2346 /* Get a child region for the element with index INDEX_SID. */ 2347 2348 region_id 2349 array_region::get_element (region_model *model, 2350 region_id this_rid, 2351 svalue_id index_sid, 2352 region_model_context *ctxt) 2353 { 2354 tree element_type = TREE_TYPE (get_type ()); 2355 svalue *index_sval = model->get_svalue (index_sid); 2356 if (tree cst_index = index_sval->maybe_get_constant ()) 2357 { 2358 key_t key = key_from_constant (cst_index); 2359 region_id element_rid 2360 = get_or_create (model, this_rid, key, element_type, ctxt); 2361 return element_rid; 2362 } 2363 2364 return model->get_or_create_view (this_rid, element_type, ctxt); 2365 } 2366 2367 /* Implementation of region::clone vfunc for array_region. */ 2368 2369 region * 2370 array_region::clone () const 2371 { 2372 return new array_region (*this); 2373 } 2374 2375 /* Compare the fields of this array_region with OTHER, returning true 2376 if they are equal. 2377 For use by region::operator==. */ 2378 2379 bool 2380 array_region::compare_fields (const array_region &other) const 2381 { 2382 if (m_map.elements () != other.m_map.elements ()) 2383 return false; 2384 2385 for (map_t::iterator iter = m_map.begin (); 2386 iter != m_map.end (); 2387 ++iter) 2388 { 2389 int key = (*iter).first; 2390 region_id e = (*iter).second; 2391 region_id *other_slot = const_cast <map_t &> (other.m_map).get (key); 2392 if (other_slot == NULL) 2393 return false; 2394 if (e != *other_slot) 2395 return false; 2396 } 2397 return true; 2398 } 2399 2400 /* Implementation of region::print_fields vfunc for array_region. */ 2401 2402 void 2403 array_region::print_fields (const region_model &model, 2404 region_id this_rid, 2405 pretty_printer *pp) const 2406 { 2407 region::print_fields (model, this_rid, pp); 2408 pp_string (pp, ", array: {"); 2409 for (map_t::iterator iter = m_map.begin (); 2410 iter != m_map.end (); 2411 ++iter) 2412 { 2413 if (iter != m_map.begin ()) 2414 pp_string (pp, ", "); 2415 int key = (*iter).first; 2416 region_id child_rid = (*iter).second; 2417 pp_printf (pp, "[%i]: ", key); 2418 child_rid.print (pp); 2419 } 2420 pp_string (pp, "}"); 2421 } 2422 2423 /* Implementation of region::validate vfunc for array_region. */ 2424 2425 void 2426 array_region::validate (const region_model &model) const 2427 { 2428 region::validate (model); 2429 for (map_t::iterator iter = m_map.begin (); 2430 iter != m_map.end (); 2431 ++iter) 2432 { 2433 region_id child_rid = (*iter).second; 2434 child_rid.validate (model); 2435 } 2436 } 2437 2438 /* Implementation of region::dump_dot_to_pp vfunc for array_region. */ 2439 2440 void 2441 array_region::dump_dot_to_pp (const region_model &model, 2442 region_id this_rid, 2443 pretty_printer *pp) const 2444 { 2445 region::dump_dot_to_pp (model, this_rid, pp); 2446 for (map_t::iterator iter = m_map.begin (); 2447 iter != m_map.end (); 2448 ++iter) 2449 { 2450 // TODO: add nodes/edges to label things 2451 2452 int key = (*iter).first; 2453 region_id child_rid = (*iter).second; 2454 2455 pp_printf (pp, "rid_label_%i [label=\"", child_rid.as_int ()); 2456 pp_write_text_to_stream (pp); 2457 pp_printf (pp, "%qi", key); 2458 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false); 2459 pp_string (pp, "\"];"); 2460 pp_newline (pp); 2461 2462 pp_printf (pp, "rid_label_%i", child_rid.as_int ()); 2463 pp_string (pp, " -> "); 2464 child_rid.dump_node_name_to_pp (pp); 2465 pp_string (pp, ";"); 2466 pp_newline (pp); 2467 } 2468 } 2469 2470 /* Implementation of region::dump_child_label vfunc for array_region. */ 2471 2472 void 2473 array_region::dump_child_label (const region_model &model, 2474 region_id this_rid, 2475 region_id child_rid, 2476 pretty_printer *pp) const 2477 { 2478 region::dump_child_label (model, this_rid, child_rid, pp); 2479 2480 for (map_t::iterator iter = m_map.begin (); 2481 iter != m_map.end (); 2482 ++iter) 2483 { 2484 if (child_rid == (*iter).second) 2485 { 2486 int key = (*iter).first; 2487 pp_printf (pp, "[%i]: ", key); 2488 } 2489 } 2490 } 2491 2492 /* Look for a child region for KEY within this array_region. 2493 If it doesn't already exist, create a child array_region, using TYPE for 2494 its type. 2495 Return the region_id of the child (whether pre-existing, or 2496 newly-created). 2497 Notify CTXT if we don't know how to handle TYPE. */ 2498 2499 region_id 2500 array_region::get_or_create (region_model *model, 2501 region_id this_rid, 2502 key_t key, 2503 tree type, 2504 region_model_context *ctxt) 2505 { 2506 region_id *slot = m_map.get (key); 2507 if (slot) 2508 return *slot; 2509 region_id child_rid = model->add_region_for_type (this_rid, type, ctxt); 2510 m_map.put (key, child_rid); 2511 return child_rid; 2512 } 2513 2514 /* Get the region_id for the child region for KEY within this 2515 ARRAY_REGION, or NULL if there is no such child region. */ 2516 2517 region_id * 2518 array_region::get (key_t key) 2519 { 2520 region_id *slot = m_map.get (key); 2521 return slot; 2522 } 2523 2524 /* Implementation of region::add_to_hash vfunc for array_region. */ 2525 2526 void 2527 array_region::add_to_hash (inchash::hash &hstate) const 2528 { 2529 region::add_to_hash (hstate); 2530 // TODO 2531 } 2532 2533 /* Implementation of region::remap_region_ids vfunc for array_region. */ 2534 2535 void 2536 array_region::remap_region_ids (const region_id_map &map) 2537 { 2538 region::remap_region_ids (map); 2539 2540 /* Remap the region ids within the map entries. */ 2541 for (map_t::iterator iter = m_map.begin (); 2542 iter != m_map.end (); ++iter) 2543 map.update (&(*iter).second); 2544 } 2545 2546 /* Look for a child region with id CHILD_RID within this array_region. 2547 If one is found, write its key to *OUT and return true, 2548 otherwise return false. */ 2549 2550 bool 2551 array_region::get_key_for_child_region (region_id child_rid, key_t *out) const 2552 { 2553 // TODO: do we want to store an inverse map? 2554 for (map_t::iterator iter = m_map.begin (); 2555 iter != m_map.end (); 2556 ++iter) 2557 { 2558 key_t key = (*iter).first; 2559 region_id r = (*iter).second; 2560 if (r == child_rid) 2561 { 2562 *out = key; 2563 return true; 2564 } 2565 } 2566 2567 return false; 2568 } 2569 2570 /* qsort comparator for array_region's keys. */ 2571 2572 int 2573 array_region::key_cmp (const void *p1, const void *p2) 2574 { 2575 key_t i1 = *(const key_t *)p1; 2576 key_t i2 = *(const key_t *)p2; 2577 2578 if (i1 > i2) 2579 return 1; 2580 else if (i1 < i2) 2581 return -1; 2582 else 2583 return 0; 2584 } 2585 2586 /* Implementation of region::walk_for_canonicalization vfunc for 2587 array_region. */ 2588 2589 void 2590 array_region::walk_for_canonicalization (canonicalization *c) const 2591 { 2592 auto_vec<int> keys (m_map.elements ()); 2593 for (map_t::iterator iter = m_map.begin (); 2594 iter != m_map.end (); 2595 ++iter) 2596 { 2597 int key_a = (*iter).first; 2598 keys.quick_push (key_a); 2599 } 2600 keys.qsort (key_cmp); 2601 2602 unsigned i; 2603 int key; 2604 FOR_EACH_VEC_ELT (keys, i, key) 2605 { 2606 region_id rid = *const_cast<array_region *>(this)->m_map.get (key); 2607 c->walk_rid (rid); 2608 } 2609 } 2610 2611 /* Convert constant CST into an array_region::key_t. */ 2612 2613 array_region::key_t 2614 array_region::key_from_constant (tree cst) 2615 { 2616 gcc_assert (CONSTANT_CLASS_P (cst)); 2617 wide_int w = wi::to_wide (cst); 2618 key_t result = w.to_shwi (); 2619 return result; 2620 } 2621 2622 /* Convert array_region::key_t KEY into a tree constant. */ 2623 2624 tree 2625 array_region::constant_from_key (key_t key) 2626 { 2627 tree array_type = get_type (); 2628 tree index_type = TYPE_DOMAIN (array_type); 2629 return build_int_cst (index_type, key); 2630 } 2631 2632 /* class function_region : public map_region. */ 2633 2634 /* Compare the fields of this function_region with OTHER, returning true 2635 if they are equal. 2636 For use by region::operator==. */ 2637 2638 bool 2639 function_region::compare_fields (const function_region &other) const 2640 { 2641 return map_region::compare_fields (other); 2642 } 2643 2644 /* Implementation of region::clone vfunc for function_region. */ 2645 2646 region * 2647 function_region::clone () const 2648 { 2649 return new function_region (*this); 2650 } 2651 2652 /* Implementation of map_region::valid_key_p vfunc for function_region. */ 2653 2654 bool 2655 function_region::valid_key_p (tree key) const 2656 { 2657 return TREE_CODE (key) == LABEL_DECL; 2658 } 2659 2660 /* class stack_region : public region. */ 2661 2662 /* stack_region's copy ctor. */ 2663 2664 stack_region::stack_region (const stack_region &other) 2665 : region (other), 2666 m_frame_rids (other.m_frame_rids.length ()) 2667 { 2668 int i; 2669 region_id *frame_rid; 2670 FOR_EACH_VEC_ELT (other.m_frame_rids, i, frame_rid) 2671 m_frame_rids.quick_push (*frame_rid); 2672 } 2673 2674 /* Compare the fields of this stack_region with OTHER, returning true 2675 if they are equal. 2676 For use by region::operator==. */ 2677 2678 bool 2679 stack_region::compare_fields (const stack_region &other) const 2680 { 2681 if (m_frame_rids.length () != other.m_frame_rids.length ()) 2682 return false; 2683 2684 int i; 2685 region_id *frame_rid; 2686 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid) 2687 if (m_frame_rids[i] != other.m_frame_rids[i]) 2688 return false; 2689 2690 return true; 2691 } 2692 2693 /* Implementation of region::clone vfunc for stack_region. */ 2694 2695 region * 2696 stack_region::clone () const 2697 { 2698 return new stack_region (*this); 2699 } 2700 2701 /* Implementation of region::print_fields vfunc for stack_region. */ 2702 2703 void 2704 stack_region::print_fields (const region_model &model, 2705 region_id this_rid, 2706 pretty_printer *pp) const 2707 { 2708 region::print_fields (model, this_rid, pp); 2709 // TODO 2710 } 2711 2712 /* Implementation of region::dump_child_label vfunc for stack_region. */ 2713 2714 void 2715 stack_region::dump_child_label (const region_model &model, 2716 region_id this_rid ATTRIBUTE_UNUSED, 2717 region_id child_rid, 2718 pretty_printer *pp) const 2719 { 2720 function *fun = model.get_region<frame_region> (child_rid)->get_function (); 2721 pp_printf (pp, "frame for %qs: ", function_name (fun)); 2722 } 2723 2724 /* Implementation of region::validate vfunc for stack_region. */ 2725 2726 void 2727 stack_region::validate (const region_model &model) const 2728 { 2729 region::validate (model); 2730 int i; 2731 region_id *frame_rid; 2732 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid) 2733 m_frame_rids[i].validate (model); 2734 } 2735 2736 /* Push FRAME_RID (for a frame_region) onto this stack. */ 2737 2738 void 2739 stack_region::push_frame (region_id frame_rid) 2740 { 2741 m_frame_rids.safe_push (frame_rid); 2742 } 2743 2744 /* Get the region_id of the top-most frame in this stack, if any. */ 2745 2746 region_id 2747 stack_region::get_current_frame_id () const 2748 { 2749 if (m_frame_rids.length () > 0) 2750 return m_frame_rids[m_frame_rids.length () - 1]; 2751 else 2752 return region_id::null (); 2753 } 2754 2755 /* Pop the topmost frame_region from this stack. 2756 2757 If RESULT_DST_RID is non-null, copy any return value from the frame 2758 into RESULT_DST_RID's region. 2759 2760 Purge the frame region and all its descendent regions. 2761 Convert any pointers that point into such regions into 2762 POISON_KIND_POPPED_STACK svalues. 2763 2764 If PURGE, then purge all unused svalues, with the exception of any 2765 returned values. 2766 2767 Accumulate stats on purged entities into STATS. */ 2768 2769 void 2770 stack_region::pop_frame (region_model *model, region_id result_dst_rid, 2771 bool purge, purge_stats *stats, 2772 region_model_context *ctxt) 2773 { 2774 gcc_assert (m_frame_rids.length () > 0); 2775 2776 region_id frame_rid = get_current_frame_id (); 2777 frame_region *frame = model->get_region<frame_region> (frame_rid); 2778 2779 /* Evaluate the result, within the callee frame. */ 2780 svalue_id_set returned_sids; 2781 tree fndecl = frame->get_function ()->decl; 2782 tree result = DECL_RESULT (fndecl); 2783 if (result && TREE_TYPE (result) != void_type_node) 2784 { 2785 if (!result_dst_rid.null_p ()) 2786 { 2787 /* Copy the result to RESULT_DST_RID. */ 2788 model->copy_region (result_dst_rid, model->get_lvalue (result, ctxt), 2789 ctxt); 2790 } 2791 if (purge) 2792 { 2793 /* Populate returned_sids, to avoid purging them. */ 2794 region_id return_rid = model->get_lvalue (result, NULL); 2795 region_id_set returned_rids (model); 2796 model->get_descendents (return_rid, &returned_rids, 2797 region_id::null ()); 2798 for (unsigned i = 0; i < model->get_num_regions (); i++) 2799 { 2800 region_id rid = region_id::from_int (i); 2801 if (returned_rids.region_p (rid)) 2802 { 2803 svalue_id sid = model->get_region (rid)->get_value_direct (); 2804 returned_sids.add_svalue (sid); 2805 } 2806 } 2807 } 2808 } 2809 2810 /* Pop the frame RID. */ 2811 m_frame_rids.pop (); 2812 2813 model->delete_region_and_descendents (frame_rid, 2814 POISON_KIND_POPPED_STACK, 2815 stats, 2816 ctxt ? ctxt->get_logger () : NULL); 2817 2818 /* Delete unused svalues, but don't delete the return value(s). */ 2819 if (purge) 2820 model->purge_unused_svalues (stats, ctxt, &returned_sids); 2821 2822 model->validate (); 2823 } 2824 2825 /* Implementation of region::add_to_hash vfunc for stack_region. */ 2826 2827 void 2828 stack_region::add_to_hash (inchash::hash &hstate) const 2829 { 2830 region::add_to_hash (hstate); 2831 2832 int i; 2833 region_id *frame_rid; 2834 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid) 2835 inchash::add (*frame_rid, hstate); 2836 } 2837 2838 /* Implementation of region::remap_region_ids vfunc for stack_region. */ 2839 2840 void 2841 stack_region::remap_region_ids (const region_id_map &map) 2842 { 2843 region::remap_region_ids (map); 2844 int i; 2845 region_id *frame_rid; 2846 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid) 2847 map.update (&m_frame_rids[i]); 2848 } 2849 2850 /* Attempt to merge STACK_REGION_A and STACK_REGION_B using MERGER. 2851 Return true if the merger is possible, false otherwise. */ 2852 2853 bool 2854 stack_region::can_merge_p (const stack_region *stack_region_a, 2855 const stack_region *stack_region_b, 2856 model_merger *merger) 2857 { 2858 if (stack_region_a->get_num_frames () 2859 != stack_region_b->get_num_frames ()) 2860 return false; 2861 2862 region_model *merged_model = merger->m_merged_model; 2863 2864 region_id rid_merged_stack 2865 = merged_model->get_root_region ()->ensure_stack_region (merged_model); 2866 2867 stack_region *merged_stack 2868 = merged_model->get_region <stack_region> (rid_merged_stack); 2869 2870 /* First, create all frames in the merged model, without populating them. 2871 The merging code assumes that all frames in the merged model already exist, 2872 so we have to do this first to handle the case in which a local in an 2873 older frame points at a local in a more recent frame. */ 2874 for (unsigned i = 0; i < stack_region_a->get_num_frames (); i++) 2875 { 2876 region_id rid_a = stack_region_a->get_frame_rid (i); 2877 frame_region *frame_a = merger->get_region_a <frame_region> (rid_a); 2878 2879 region_id rid_b = stack_region_b->get_frame_rid (i); 2880 frame_region *frame_b = merger->get_region_b <frame_region> (rid_b); 2881 2882 if (frame_a->get_function () != frame_b->get_function ()) 2883 return false; 2884 2885 frame_region *merged_frame = new frame_region (rid_merged_stack, 2886 frame_a->get_function (), 2887 frame_a->get_depth ()); 2888 region_id rid_merged_frame = merged_model->add_region (merged_frame); 2889 merged_stack->push_frame (rid_merged_frame); 2890 } 2891 2892 /* Now populate the frames we created. */ 2893 for (unsigned i = 0; i < stack_region_a->get_num_frames (); i++) 2894 { 2895 region_id rid_a = stack_region_a->get_frame_rid (i); 2896 frame_region *frame_a = merger->get_region_a <frame_region> (rid_a); 2897 2898 region_id rid_b = stack_region_b->get_frame_rid (i); 2899 frame_region *frame_b = merger->get_region_b <frame_region> (rid_b); 2900 2901 region_id rid_merged_frame = merged_stack->get_frame_rid (i); 2902 frame_region *merged_frame 2903 = merged_model->get_region <frame_region> (rid_merged_frame); 2904 if (!map_region::can_merge_p (frame_a, frame_b, 2905 merged_frame, rid_merged_frame, 2906 merger)) 2907 return false; 2908 } 2909 2910 return true; 2911 } 2912 2913 /* Implementation of region::walk_for_canonicalization vfunc for 2914 stack_region. */ 2915 2916 void 2917 stack_region::walk_for_canonicalization (canonicalization *c) const 2918 { 2919 int i; 2920 region_id *frame_rid; 2921 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid) 2922 c->walk_rid (*frame_rid); 2923 } 2924 2925 /* For debugging purposes: look for a grandchild region within one of 2926 the child frame regions, where the grandchild is for a decl named 2927 IDENTIFIER (or an SSA_NAME for such a decl): 2928 2929 stack_region 2930 `-frame_region 2931 `-region for decl named IDENTIFIER 2932 2933 returning its value, or svalue_id::null if none are found. */ 2934 2935 svalue_id 2936 stack_region::get_value_by_name (tree identifier, 2937 const region_model &model) const 2938 { 2939 int i; 2940 region_id *frame_rid; 2941 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid) 2942 { 2943 frame_region *frame = model.get_region<frame_region> (*frame_rid); 2944 svalue_id sid = frame->get_value_by_name (identifier, model); 2945 if (!sid.null_p ()) 2946 return sid; 2947 } 2948 2949 return svalue_id::null (); 2950 } 2951 2952 /* class heap_region : public region. */ 2953 2954 /* heap_region's copy ctor. */ 2955 2956 heap_region::heap_region (const heap_region &other) 2957 : region (other) 2958 { 2959 } 2960 2961 /* Compare the fields of this heap_region with OTHER, returning true 2962 if they are equal. 2963 For use by region::operator==. */ 2964 2965 bool 2966 heap_region::compare_fields (const heap_region &) const 2967 { 2968 /* Empty. */ 2969 return true; 2970 } 2971 2972 /* Implementation of region::clone vfunc for heap_region. */ 2973 2974 region * 2975 heap_region::clone () const 2976 { 2977 return new heap_region (*this); 2978 } 2979 2980 /* Implementation of region::walk_for_canonicalization vfunc for 2981 heap_region. */ 2982 2983 void 2984 heap_region::walk_for_canonicalization (canonicalization *) const 2985 { 2986 /* Empty. */ 2987 } 2988 2989 /* class root_region : public region. */ 2990 2991 /* root_region's default ctor. */ 2992 2993 root_region::root_region () 2994 : region (region_id::null (), 2995 svalue_id::null (), 2996 NULL_TREE) 2997 { 2998 } 2999 3000 /* root_region's copy ctor. */ 3001 3002 root_region::root_region (const root_region &other) 3003 : region (other), 3004 m_stack_rid (other.m_stack_rid), 3005 m_globals_rid (other.m_globals_rid), 3006 m_code_rid (other.m_code_rid), 3007 m_heap_rid (other.m_heap_rid) 3008 { 3009 } 3010 3011 /* Compare the fields of this root_region with OTHER, returning true 3012 if they are equal. 3013 For use by region::operator==. */ 3014 3015 bool 3016 root_region::compare_fields (const root_region &other) const 3017 { 3018 if (m_stack_rid != other.m_stack_rid) 3019 return false; 3020 if (m_globals_rid != other.m_globals_rid) 3021 return false; 3022 if (m_code_rid != other.m_code_rid) 3023 return false; 3024 if (m_heap_rid != other.m_heap_rid) 3025 return false; 3026 return true; 3027 } 3028 3029 /* Implementation of region::clone vfunc for root_region. */ 3030 3031 region * 3032 root_region::clone () const 3033 { 3034 return new root_region (*this); 3035 } 3036 3037 /* Implementation of region::print_fields vfunc for root_region. */ 3038 3039 void 3040 root_region::print_fields (const region_model &model, 3041 region_id this_rid, 3042 pretty_printer *pp) const 3043 { 3044 region::print_fields (model, this_rid, pp); 3045 // TODO 3046 } 3047 3048 /* Implementation of region::validate vfunc for root_region. */ 3049 3050 void 3051 root_region::validate (const region_model &model) const 3052 { 3053 region::validate (model); 3054 m_stack_rid.validate (model); 3055 m_globals_rid.validate (model); 3056 m_code_rid.validate (model); 3057 m_heap_rid.validate (model); 3058 } 3059 3060 /* Implementation of region::dump_child_label vfunc for root_region. */ 3061 3062 void 3063 root_region::dump_child_label (const region_model &model ATTRIBUTE_UNUSED, 3064 region_id this_rid ATTRIBUTE_UNUSED, 3065 region_id child_rid, 3066 pretty_printer *pp) const 3067 { 3068 if (child_rid == m_stack_rid) 3069 pp_printf (pp, "stack: "); 3070 else if (child_rid == m_globals_rid) 3071 pp_printf (pp, "globals: "); 3072 else if (child_rid == m_code_rid) 3073 pp_printf (pp, "code: "); 3074 else if (child_rid == m_heap_rid) 3075 pp_printf (pp, "heap: "); 3076 } 3077 3078 /* Create a new frame_region for a call to FUN and push it onto 3079 the stack. 3080 3081 If ARG_SIDS is non-NULL, use it to populate the parameters 3082 in the new frame. 3083 Otherwise, populate them with unknown values. 3084 3085 Return the region_id of the new frame. */ 3086 3087 region_id 3088 root_region::push_frame (region_model *model, function *fun, 3089 vec<svalue_id> *arg_sids, 3090 region_model_context *ctxt) 3091 { 3092 gcc_assert (fun); 3093 /* arg_sids can be NULL. */ 3094 3095 ensure_stack_region (model); 3096 stack_region *stack = model->get_region <stack_region> (m_stack_rid); 3097 3098 frame_region *region = new frame_region (m_stack_rid, fun, 3099 stack->get_num_frames ()); 3100 region_id frame_rid = model->add_region (region); 3101 3102 // TODO: unify these cases by building a vec of unknown? 3103 3104 if (arg_sids) 3105 { 3106 /* Arguments supplied from a caller frame. */ 3107 3108 tree fndecl = fun->decl; 3109 unsigned idx = 0; 3110 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm; 3111 iter_parm = DECL_CHAIN (iter_parm), ++idx) 3112 { 3113 /* If there's a mismatching declaration, the call stmt might 3114 not have enough args. Handle this case by leaving the 3115 rest of the params as uninitialized. */ 3116 if (idx >= arg_sids->length ()) 3117 break; 3118 svalue_id arg_sid = (*arg_sids)[idx]; 3119 region_id parm_rid 3120 = region->get_or_create (model, frame_rid, iter_parm, 3121 TREE_TYPE (iter_parm), ctxt); 3122 model->set_value (parm_rid, arg_sid, ctxt); 3123 3124 /* Also do it for default SSA name (sharing the same unknown 3125 value). */ 3126 tree parm_default_ssa = ssa_default_def (fun, iter_parm); 3127 if (parm_default_ssa) 3128 { 3129 region_id defssa_rid 3130 = region->get_or_create (model, frame_rid, parm_default_ssa, 3131 TREE_TYPE (iter_parm), ctxt); 3132 model->set_value (defssa_rid, arg_sid, ctxt); 3133 } 3134 } 3135 } 3136 else 3137 { 3138 /* No known arguments (a top-level call within the analysis). */ 3139 3140 /* Params have a defined, unknown value; they should not inherit 3141 from the poisoned uninit value. */ 3142 tree fndecl = fun->decl; 3143 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm; 3144 iter_parm = DECL_CHAIN (iter_parm)) 3145 { 3146 region_id parm_rid 3147 = region->get_or_create (model, frame_rid, iter_parm, 3148 TREE_TYPE (iter_parm), ctxt); 3149 svalue_id parm_sid 3150 = model->set_to_new_unknown_value (parm_rid, TREE_TYPE (iter_parm), 3151 ctxt); 3152 3153 /* Also do it for default SSA name (sharing the same unknown 3154 value). */ 3155 tree parm_default_ssa = ssa_default_def (fun, iter_parm); 3156 if (parm_default_ssa) 3157 { 3158 region_id defssa_rid 3159 = region->get_or_create (model, frame_rid, parm_default_ssa, 3160 TREE_TYPE (iter_parm), ctxt); 3161 model->get_region (defssa_rid)->set_value (*model, defssa_rid, 3162 parm_sid, ctxt); 3163 } 3164 } 3165 } 3166 3167 stack->push_frame (frame_rid); 3168 3169 return frame_rid; 3170 } 3171 3172 /* Get the region_id of the top-most frame in this root_region's stack, 3173 if any. */ 3174 3175 region_id 3176 root_region::get_current_frame_id (const region_model &model) const 3177 { 3178 stack_region *stack = model.get_region <stack_region> (m_stack_rid); 3179 if (stack) 3180 return stack->get_current_frame_id (); 3181 else 3182 return region_id::null (); 3183 } 3184 3185 /* Pop the topmost frame_region from this root_region's stack; 3186 see the comment for stack_region::pop_frame. */ 3187 3188 void 3189 root_region::pop_frame (region_model *model, region_id result_dst_rid, 3190 bool purge, purge_stats *out, 3191 region_model_context *ctxt) 3192 { 3193 stack_region *stack = model->get_region <stack_region> (m_stack_rid); 3194 stack->pop_frame (model, result_dst_rid, purge, out, ctxt); 3195 } 3196 3197 /* Return the region_id of the stack region, creating it if doesn't 3198 already exist. */ 3199 3200 region_id 3201 root_region::ensure_stack_region (region_model *model) 3202 { 3203 if (m_stack_rid.null_p ()) 3204 { 3205 m_stack_rid 3206 = model->add_region (new stack_region (model->get_root_rid (), 3207 svalue_id::null ())); 3208 } 3209 return m_stack_rid; 3210 } 3211 3212 /* Return the stack region (which could be NULL). */ 3213 3214 stack_region * 3215 root_region::get_stack_region (const region_model *model) const 3216 { 3217 return model->get_region <stack_region> (m_stack_rid); 3218 } 3219 3220 /* Return the region_id of the globals region, creating it if doesn't 3221 already exist. */ 3222 3223 region_id 3224 root_region::ensure_globals_region (region_model *model) 3225 { 3226 if (m_globals_rid.null_p ()) 3227 m_globals_rid 3228 = model->add_region (new globals_region (model->get_root_rid ())); 3229 return m_globals_rid; 3230 } 3231 3232 /* Return the code region (which could be NULL). */ 3233 3234 code_region * 3235 root_region::get_code_region (const region_model *model) const 3236 { 3237 return model->get_region <code_region> (m_code_rid); 3238 } 3239 3240 /* Return the region_id of the code region, creating it if doesn't 3241 already exist. */ 3242 3243 region_id 3244 root_region::ensure_code_region (region_model *model) 3245 { 3246 if (m_code_rid.null_p ()) 3247 m_code_rid 3248 = model->add_region (new code_region (model->get_root_rid ())); 3249 return m_code_rid; 3250 } 3251 3252 /* Return the globals region (which could be NULL). */ 3253 3254 globals_region * 3255 root_region::get_globals_region (const region_model *model) const 3256 { 3257 return model->get_region <globals_region> (m_globals_rid); 3258 } 3259 3260 /* Return the region_id of the heap region, creating it if doesn't 3261 already exist. */ 3262 3263 region_id 3264 root_region::ensure_heap_region (region_model *model) 3265 { 3266 if (m_heap_rid.null_p ()) 3267 { 3268 m_heap_rid 3269 = model->add_region (new heap_region (model->get_root_rid (), 3270 svalue_id::null ())); 3271 } 3272 return m_heap_rid; 3273 } 3274 3275 /* Return the heap region (which could be NULL). */ 3276 3277 heap_region * 3278 root_region::get_heap_region (const region_model *model) const 3279 { 3280 return model->get_region <heap_region> (m_heap_rid); 3281 } 3282 3283 /* Implementation of region::remap_region_ids vfunc for root_region. */ 3284 3285 void 3286 root_region::remap_region_ids (const region_id_map &map) 3287 { 3288 map.update (&m_stack_rid); 3289 map.update (&m_globals_rid); 3290 map.update (&m_code_rid); 3291 map.update (&m_heap_rid); 3292 } 3293 3294 /* Attempt to merge ROOT_REGION_A and ROOT_REGION_B into 3295 MERGED_ROOT_REGION using MERGER. 3296 Return true if the merger is possible, false otherwise. */ 3297 3298 bool 3299 root_region::can_merge_p (const root_region *root_region_a, 3300 const root_region *root_region_b, 3301 root_region *merged_root_region, 3302 model_merger *merger) 3303 { 3304 /* We can only merge if the stacks are sufficiently similar. */ 3305 stack_region *stack_a = root_region_a->get_stack_region (merger->m_model_a); 3306 stack_region *stack_b = root_region_b->get_stack_region (merger->m_model_b); 3307 if (stack_a && stack_b) 3308 { 3309 /* If the two models both have a stack, attempt to merge them. */ 3310 merged_root_region->ensure_stack_region (merger->m_merged_model); 3311 if (!stack_region::can_merge_p (stack_a, stack_b, merger)) 3312 return false; 3313 } 3314 else if (stack_a || stack_b) 3315 /* Don't attempt to merge if one model has a stack and the other 3316 doesn't. */ 3317 return false; 3318 3319 map_region *globals_a = root_region_a->get_globals_region (merger->m_model_a); 3320 map_region *globals_b = root_region_b->get_globals_region (merger->m_model_b); 3321 if (globals_a && globals_b) 3322 { 3323 /* If both models have globals regions, attempt to merge them. */ 3324 region_id merged_globals_rid 3325 = merged_root_region->ensure_globals_region (merger->m_merged_model); 3326 map_region *merged_globals 3327 = merged_root_region->get_globals_region (merger->m_merged_model); 3328 if (!map_region::can_merge_p (globals_a, globals_b, 3329 merged_globals, merged_globals_rid, 3330 merger)) 3331 return false; 3332 } 3333 /* otherwise, merge as "no globals". */ 3334 3335 map_region *code_a = root_region_a->get_code_region (merger->m_model_a); 3336 map_region *code_b = root_region_b->get_code_region (merger->m_model_b); 3337 if (code_a && code_b) 3338 { 3339 /* If both models have code regions, attempt to merge them. */ 3340 region_id merged_code_rid 3341 = merged_root_region->ensure_code_region (merger->m_merged_model); 3342 map_region *merged_code 3343 = merged_root_region->get_code_region (merger->m_merged_model); 3344 if (!map_region::can_merge_p (code_a, code_b, 3345 merged_code, merged_code_rid, 3346 merger)) 3347 return false; 3348 } 3349 /* otherwise, merge as "no code". */ 3350 3351 heap_region *heap_a = root_region_a->get_heap_region (merger->m_model_a); 3352 heap_region *heap_b = root_region_b->get_heap_region (merger->m_model_b); 3353 if (heap_a && heap_b) 3354 { 3355 /* If both have a heap, create a "merged" heap. 3356 Actually merging the heap contents happens via the region_svalue 3357 instances, as needed, when seeing pairs of region_svalue instances. */ 3358 merged_root_region->ensure_heap_region (merger->m_merged_model); 3359 } 3360 /* otherwise, merge as "no heap". */ 3361 3362 return true; 3363 } 3364 3365 /* Implementation of region::add_to_hash vfunc for root_region. */ 3366 3367 void 3368 root_region::add_to_hash (inchash::hash &hstate) const 3369 { 3370 region::add_to_hash (hstate); 3371 inchash::add (m_stack_rid, hstate); 3372 inchash::add (m_globals_rid, hstate); 3373 inchash::add (m_code_rid, hstate); 3374 inchash::add (m_heap_rid, hstate); 3375 } 3376 3377 /* Implementation of region::walk_for_canonicalization vfunc for 3378 root_region. */ 3379 3380 void 3381 root_region::walk_for_canonicalization (canonicalization *c) const 3382 { 3383 c->walk_rid (m_stack_rid); 3384 c->walk_rid (m_globals_rid); 3385 c->walk_rid (m_code_rid); 3386 c->walk_rid (m_heap_rid); 3387 } 3388 3389 /* For debugging purposes: look for a descendant region for a local 3390 or global decl named IDENTIFIER (or an SSA_NAME for such a decl), 3391 returning its value, or svalue_id::null if none are found. */ 3392 3393 svalue_id 3394 root_region::get_value_by_name (tree identifier, 3395 const region_model &model) const 3396 { 3397 if (stack_region *stack = get_stack_region (&model)) 3398 { 3399 svalue_id sid = stack->get_value_by_name (identifier, model); 3400 if (!sid.null_p ()) 3401 return sid; 3402 } 3403 if (map_region *globals = get_globals_region (&model)) 3404 { 3405 svalue_id sid = globals->get_value_by_name (identifier, model); 3406 if (!sid.null_p ()) 3407 return sid; 3408 } 3409 return svalue_id::null (); 3410 } 3411 3412 /* class symbolic_region : public map_region. */ 3413 3414 /* symbolic_region's copy ctor. */ 3415 3416 symbolic_region::symbolic_region (const symbolic_region &other) 3417 : region (other), 3418 m_possibly_null (other.m_possibly_null) 3419 { 3420 } 3421 3422 /* Compare the fields of this symbolic_region with OTHER, returning true 3423 if they are equal. 3424 For use by region::operator==. */ 3425 3426 bool 3427 symbolic_region::compare_fields (const symbolic_region &other) const 3428 { 3429 return m_possibly_null == other.m_possibly_null; 3430 } 3431 3432 /* Implementation of region::clone vfunc for symbolic_region. */ 3433 3434 region * 3435 symbolic_region::clone () const 3436 { 3437 return new symbolic_region (*this); 3438 } 3439 3440 /* Implementation of region::walk_for_canonicalization vfunc for 3441 symbolic_region. */ 3442 3443 void 3444 symbolic_region::walk_for_canonicalization (canonicalization *) const 3445 { 3446 /* Empty. */ 3447 } 3448 3449 /* Implementation of region::print_fields vfunc for symbolic_region. */ 3450 3451 void 3452 symbolic_region::print_fields (const region_model &model, 3453 region_id this_rid, 3454 pretty_printer *pp) const 3455 { 3456 region::print_fields (model, this_rid, pp); 3457 pp_printf (pp, ", possibly_null: %s", m_possibly_null ? "true" : "false"); 3458 } 3459 3460 /* class region_model. */ 3461 3462 /* region_model's default ctor. */ 3463 3464 region_model::region_model () 3465 { 3466 m_root_rid = add_region (new root_region ()); 3467 m_constraints = new impl_constraint_manager (this); 3468 // TODO 3469 } 3470 3471 /* region_model's copy ctor. */ 3472 3473 region_model::region_model (const region_model &other) 3474 : m_svalues (other.m_svalues.length ()), 3475 m_regions (other.m_regions.length ()), 3476 m_root_rid (other.m_root_rid) 3477 { 3478 /* Clone the svalues and regions. */ 3479 int i; 3480 3481 svalue *svalue; 3482 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue) 3483 m_svalues.quick_push (svalue->clone ()); 3484 3485 region *region; 3486 FOR_EACH_VEC_ELT (other.m_regions, i, region) 3487 m_regions.quick_push (region->clone ()); 3488 3489 m_constraints = other.m_constraints->clone (this); 3490 } 3491 3492 /* region_model's dtor. */ 3493 3494 region_model::~region_model () 3495 { 3496 delete m_constraints; 3497 } 3498 3499 /* region_model's assignment operator. */ 3500 3501 region_model & 3502 region_model::operator= (const region_model &other) 3503 { 3504 unsigned i; 3505 svalue *svalue; 3506 region *region; 3507 3508 /* Delete existing content. */ 3509 FOR_EACH_VEC_ELT (m_svalues, i, svalue) 3510 delete svalue; 3511 m_svalues.truncate (0); 3512 3513 FOR_EACH_VEC_ELT (m_regions, i, region) 3514 delete region; 3515 m_regions.truncate (0); 3516 3517 delete m_constraints; 3518 3519 /* Clone the svalues and regions. */ 3520 m_svalues.reserve (other.m_svalues.length (), true); 3521 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue) 3522 m_svalues.quick_push (svalue->clone ()); 3523 3524 m_regions.reserve (other.m_regions.length (), true); 3525 FOR_EACH_VEC_ELT (other.m_regions, i, region) 3526 m_regions.quick_push (region->clone ()); 3527 3528 m_root_rid = other.m_root_rid; 3529 3530 m_constraints = other.m_constraints->clone (this); 3531 3532 return *this; 3533 } 3534 3535 /* Equality operator for region_model. 3536 3537 Amongst other things this directly compares the svalue and region 3538 vectors and so for this to be meaningful both this and OTHER should 3539 have been canonicalized. */ 3540 3541 bool 3542 region_model::operator== (const region_model &other) const 3543 { 3544 if (m_root_rid != other.m_root_rid) 3545 return false; 3546 3547 if (m_svalues.length () != other.m_svalues.length ()) 3548 return false; 3549 3550 if (m_regions.length () != other.m_regions.length ()) 3551 return false; 3552 3553 if (*m_constraints != *other.m_constraints) 3554 return false; 3555 3556 unsigned i; 3557 svalue *svalue; 3558 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue) 3559 if (!(*m_svalues[i] == *other.m_svalues[i])) 3560 return false; 3561 3562 region *region; 3563 FOR_EACH_VEC_ELT (other.m_regions, i, region) 3564 if (!(*m_regions[i] == *other.m_regions[i])) 3565 return false; 3566 3567 gcc_checking_assert (hash () == other.hash ()); 3568 3569 return true; 3570 } 3571 3572 /* Generate a hash value for this region_model. */ 3573 3574 hashval_t 3575 region_model::hash () const 3576 { 3577 hashval_t result = 0; 3578 int i; 3579 3580 svalue *svalue; 3581 FOR_EACH_VEC_ELT (m_svalues, i, svalue) 3582 result ^= svalue->hash (); 3583 3584 region *region; 3585 FOR_EACH_VEC_ELT (m_regions, i, region) 3586 result ^= region->hash (); 3587 3588 result ^= m_constraints->hash (); 3589 3590 return result; 3591 } 3592 3593 /* Print an all-on-one-line representation of this region_model to PP, 3594 which must support %E for trees. */ 3595 3596 void 3597 region_model::print (pretty_printer *pp) const 3598 { 3599 int i; 3600 3601 pp_string (pp, "svalues: ["); 3602 svalue *svalue; 3603 FOR_EACH_VEC_ELT (m_svalues, i, svalue) 3604 { 3605 if (i > 0) 3606 pp_string (pp, ", "); 3607 print_svalue (svalue_id::from_int (i), pp); 3608 } 3609 3610 pp_string (pp, "], regions: ["); 3611 3612 region *region; 3613 FOR_EACH_VEC_ELT (m_regions, i, region) 3614 { 3615 if (i > 0) 3616 pp_string (pp, ", "); 3617 region->print (*this, region_id::from_int (i), pp); 3618 } 3619 3620 pp_string (pp, "], constraints: "); 3621 3622 m_constraints->print (pp); 3623 } 3624 3625 /* Print the svalue with id SID to PP. */ 3626 3627 void 3628 region_model::print_svalue (svalue_id sid, pretty_printer *pp) const 3629 { 3630 get_svalue (sid)->print (*this, sid, pp); 3631 } 3632 3633 /* Dump a .dot representation of this region_model to PP, showing 3634 the values and the hierarchy of regions. */ 3635 3636 void 3637 region_model::dump_dot_to_pp (pretty_printer *pp) const 3638 { 3639 graphviz_out gv (pp); 3640 3641 pp_string (pp, "digraph \""); 3642 pp_write_text_to_stream (pp); 3643 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false); 3644 pp_string (pp, "\" {\n"); 3645 3646 gv.indent (); 3647 3648 pp_string (pp, "overlap=false;\n"); 3649 pp_string (pp, "compound=true;\n"); 3650 3651 int i; 3652 3653 svalue *svalue; 3654 FOR_EACH_VEC_ELT (m_svalues, i, svalue) 3655 svalue->dump_dot_to_pp (*this, svalue_id::from_int (i), pp); 3656 3657 region *region; 3658 FOR_EACH_VEC_ELT (m_regions, i, region) 3659 region->dump_dot_to_pp (*this, region_id::from_int (i), pp); 3660 3661 /* TODO: constraints. */ 3662 3663 /* Terminate "digraph" */ 3664 gv.outdent (); 3665 pp_string (pp, "}"); 3666 pp_newline (pp); 3667 } 3668 3669 /* Dump a .dot representation of this region_model to FP. */ 3670 3671 void 3672 region_model::dump_dot_to_file (FILE *fp) const 3673 { 3674 pretty_printer pp; 3675 pp_format_decoder (&pp) = default_tree_printer; 3676 pp.buffer->stream = fp; 3677 dump_dot_to_pp (&pp); 3678 pp_flush (&pp); 3679 } 3680 3681 /* Dump a .dot representation of this region_model to PATH. */ 3682 3683 void 3684 region_model::dump_dot (const char *path) const 3685 { 3686 FILE *fp = fopen (path, "w"); 3687 dump_dot_to_file (fp); 3688 fclose (fp); 3689 } 3690 3691 /* Dump a multiline representation of this model to PP, showing the 3692 region hierarchy, the svalues, and any constraints. 3693 3694 If SUMMARIZE is true, show only the most pertinent information, 3695 in a form that attempts to be less verbose. 3696 Otherwise, show all information. */ 3697 3698 void 3699 region_model::dump_to_pp (pretty_printer *pp, bool summarize) const 3700 { 3701 if (summarize) 3702 { 3703 auto_vec<path_var> rep_path_vars; 3704 3705 unsigned i; 3706 region *reg; 3707 FOR_EACH_VEC_ELT (m_regions, i, reg) 3708 { 3709 region_id rid = region_id::from_int (i); 3710 path_var pv = get_representative_path_var (rid); 3711 if (pv.m_tree) 3712 rep_path_vars.safe_push (pv); 3713 } 3714 bool is_first = true; 3715 3716 /* Work with a copy in case the get_lvalue calls change anything 3717 (they shouldn't). */ 3718 region_model copy (*this); 3719 copy.dump_summary_of_rep_path_vars (pp, &rep_path_vars, &is_first); 3720 3721 equiv_class *ec; 3722 FOR_EACH_VEC_ELT (m_constraints->m_equiv_classes, i, ec) 3723 { 3724 for (unsigned j = 0; j < ec->m_vars.length (); j++) 3725 { 3726 svalue_id lhs_sid = ec->m_vars[j]; 3727 tree lhs_tree = get_representative_tree (lhs_sid); 3728 if (lhs_tree == NULL_TREE) 3729 continue; 3730 for (unsigned k = j + 1; k < ec->m_vars.length (); k++) 3731 { 3732 svalue_id rhs_sid = ec->m_vars[k]; 3733 tree rhs_tree = get_representative_tree (rhs_sid); 3734 if (rhs_tree 3735 && !(CONSTANT_CLASS_P (lhs_tree) 3736 && CONSTANT_CLASS_P (rhs_tree))) 3737 { 3738 dump_separator (pp, &is_first); 3739 dump_tree (pp, lhs_tree); 3740 pp_string (pp, " == "); 3741 dump_tree (pp, rhs_tree); 3742 } 3743 } 3744 } 3745 } 3746 3747 constraint *c; 3748 FOR_EACH_VEC_ELT (m_constraints->m_constraints, i, c) 3749 { 3750 const equiv_class &lhs = c->m_lhs.get_obj (*m_constraints); 3751 const equiv_class &rhs = c->m_rhs.get_obj (*m_constraints); 3752 svalue_id lhs_sid = lhs.get_representative (); 3753 svalue_id rhs_sid = rhs.get_representative (); 3754 tree lhs_tree = get_representative_tree (lhs_sid); 3755 tree rhs_tree = get_representative_tree (rhs_sid); 3756 if (lhs_tree && rhs_tree 3757 && !(CONSTANT_CLASS_P (lhs_tree) && CONSTANT_CLASS_P (rhs_tree))) 3758 { 3759 dump_separator (pp, &is_first); 3760 dump_tree (pp, lhs_tree); 3761 pp_printf (pp, " %s ", constraint_op_code (c->m_op)); 3762 dump_tree (pp, rhs_tree); 3763 } 3764 } 3765 3766 return; 3767 } 3768 3769 get_region (m_root_rid)->dump_to_pp (*this, m_root_rid, pp, "", true); 3770 3771 pp_string (pp, "svalues:"); 3772 pp_newline (pp); 3773 int i; 3774 svalue *svalue; 3775 FOR_EACH_VEC_ELT (m_svalues, i, svalue) 3776 { 3777 pp_string (pp, " "); 3778 svalue_id sid = svalue_id::from_int (i); 3779 print_svalue (sid, pp); 3780 pp_newline (pp); 3781 } 3782 3783 pp_string (pp, "constraint manager:"); 3784 pp_newline (pp); 3785 m_constraints->dump_to_pp (pp); 3786 } 3787 3788 /* Dump a multiline representation of this model to FILE. */ 3789 3790 void 3791 region_model::dump (FILE *fp, bool summarize) const 3792 { 3793 pretty_printer pp; 3794 pp_format_decoder (&pp) = default_tree_printer; 3795 pp_show_color (&pp) = pp_show_color (global_dc->printer); 3796 pp.buffer->stream = fp; 3797 dump_to_pp (&pp, summarize); 3798 pp_flush (&pp); 3799 } 3800 3801 /* Dump a multiline representation of this model to stderr. */ 3802 3803 DEBUG_FUNCTION void 3804 region_model::dump (bool summarize) const 3805 { 3806 dump (stderr, summarize); 3807 } 3808 3809 /* Dump RMODEL fully to stderr (i.e. without summarization). */ 3810 3811 DEBUG_FUNCTION void 3812 region_model::debug () const 3813 { 3814 dump (false); 3815 } 3816 3817 /* Dump VEC to PP, in the form "{VEC elements}: LABEL". */ 3818 3819 static void 3820 dump_vec_of_tree (pretty_printer *pp, 3821 bool *is_first, 3822 const auto_vec<tree> &vec, 3823 const char *label) 3824 { 3825 if (vec.length () == 0) 3826 return; 3827 3828 dump_separator (pp, is_first); 3829 pp_printf (pp, "{"); 3830 unsigned i; 3831 tree key; 3832 FOR_EACH_VEC_ELT (vec, i, key) 3833 { 3834 if (i > 0) 3835 pp_string (pp, ", "); 3836 dump_tree (pp, key); 3837 } 3838 pp_printf (pp, "}: %s", label); 3839 } 3840 3841 /* Dump all *REP_PATH_VARS to PP in compact form, updating *IS_FIRST. 3842 Subroutine of region_model::dump_to_pp. */ 3843 3844 void 3845 region_model::dump_summary_of_rep_path_vars (pretty_printer *pp, 3846 auto_vec<path_var> *rep_path_vars, 3847 bool *is_first) 3848 { 3849 /* Print pointers, constants, and poisoned values that aren't "uninit"; 3850 gather keys for unknown and uninit values. */ 3851 unsigned i; 3852 path_var *pv; 3853 auto_vec<tree> unknown_trees; 3854 FOR_EACH_VEC_ELT (*rep_path_vars, i, pv) 3855 { 3856 if (TREE_CODE (pv->m_tree) == STRING_CST) 3857 continue; 3858 tentative_region_model_context ctxt; 3859 region_id child_rid = get_lvalue (*pv, &ctxt); 3860 if (ctxt.had_errors_p ()) 3861 continue; 3862 region *child_region = get_region (child_rid); 3863 if (!child_region) 3864 continue; 3865 svalue_id sid = child_region->get_value_direct (); 3866 if (sid.null_p ()) 3867 continue; 3868 svalue *sval = get_svalue (sid); 3869 switch (sval->get_kind ()) 3870 { 3871 default: 3872 gcc_unreachable (); 3873 case SK_REGION: 3874 { 3875 region_svalue *region_sval = as_a <region_svalue *> (sval); 3876 region_id pointee_rid = region_sval->get_pointee (); 3877 gcc_assert (!pointee_rid.null_p ()); 3878 tree pointee = get_representative_path_var (pointee_rid).m_tree; 3879 dump_separator (pp, is_first); 3880 dump_tree (pp, pv->m_tree); 3881 pp_string (pp, ": "); 3882 pp_character (pp, '&'); 3883 if (pointee) 3884 dump_tree (pp, pointee); 3885 else 3886 pointee_rid.print (pp); 3887 } 3888 break; 3889 case SK_CONSTANT: 3890 dump_separator (pp, is_first); 3891 dump_tree (pp, pv->m_tree); 3892 pp_string (pp, ": "); 3893 dump_tree (pp, sval->dyn_cast_constant_svalue ()->get_constant ()); 3894 break; 3895 case SK_UNKNOWN: 3896 unknown_trees.safe_push (pv->m_tree); 3897 break; 3898 case SK_POISONED: 3899 { 3900 poisoned_svalue *poisoned_sval = as_a <poisoned_svalue *> (sval); 3901 enum poison_kind pkind = poisoned_sval->get_poison_kind (); 3902 dump_separator (pp, is_first); 3903 dump_tree (pp, pv->m_tree); 3904 pp_printf (pp, ": %s", poison_kind_to_str (pkind)); 3905 } 3906 break; 3907 case SK_SETJMP: 3908 dump_separator (pp, is_first); 3909 pp_printf (pp, "setjmp: EN: %i", 3910 sval->dyn_cast_setjmp_svalue ()->get_enode_index ()); 3911 break; 3912 } 3913 } 3914 3915 /* Print unknown and uninitialized values in consolidated form. */ 3916 dump_vec_of_tree (pp, is_first, unknown_trees, "unknown"); 3917 } 3918 3919 /* Assert that this object is valid. */ 3920 3921 void 3922 region_model::validate () const 3923 { 3924 /* Skip this in a release build. */ 3925 #if !CHECKING_P 3926 return; 3927 #endif 3928 3929 m_constraints->validate (); 3930 3931 unsigned i; 3932 region *r; 3933 FOR_EACH_VEC_ELT (m_regions, i, r) 3934 r->validate (*this); 3935 3936 // TODO: anything else? 3937 } 3938 3939 /* Global data for use by svalue_id_cmp_by_constant_svalue. */ 3940 3941 static region_model *svalue_id_cmp_by_constant_svalue_model = NULL; 3942 3943 /* Comparator for use by region_model::canonicalize. */ 3944 3945 static int 3946 svalue_id_cmp_by_constant_svalue (const void *p1, const void *p2) 3947 { 3948 const svalue_id *sid1 = (const svalue_id *)p1; 3949 const svalue_id *sid2 = (const svalue_id *)p2; 3950 gcc_assert (!sid1->null_p ()); 3951 gcc_assert (!sid2->null_p ()); 3952 gcc_assert (svalue_id_cmp_by_constant_svalue_model); 3953 const svalue &sval1 3954 = *svalue_id_cmp_by_constant_svalue_model->get_svalue (*sid1); 3955 const svalue &sval2 3956 = *svalue_id_cmp_by_constant_svalue_model->get_svalue (*sid2); 3957 gcc_assert (sval1.get_kind () == SK_CONSTANT); 3958 gcc_assert (sval2.get_kind () == SK_CONSTANT); 3959 3960 tree cst1 = ((const constant_svalue &)sval1).get_constant (); 3961 tree cst2 = ((const constant_svalue &)sval2).get_constant (); 3962 return tree_cmp (cst1, cst2); 3963 } 3964 3965 /* Reorder the regions and svalues into a deterministic "canonical" order, 3966 to maximize the chance of equality. 3967 If non-NULL, notify CTXT about the svalue id remapping. */ 3968 3969 void 3970 region_model::canonicalize (region_model_context *ctxt) 3971 { 3972 /* Walk all regions and values in a deterministic order, visiting 3973 rids and sids, generating a rid and sid map. */ 3974 canonicalization c (*this); 3975 3976 /* (1): Walk all svalues, putting constants first, sorting the constants 3977 (thus imposing an ordering on any constants that are purely referenced 3978 by constraints). 3979 Ignore other svalues for now. */ 3980 { 3981 unsigned i; 3982 auto_vec<svalue_id> sids; 3983 svalue *sval; 3984 FOR_EACH_VEC_ELT (m_svalues, i, sval) 3985 { 3986 if (sval->get_kind () == SK_CONSTANT) 3987 sids.safe_push (svalue_id::from_int (i)); 3988 } 3989 svalue_id_cmp_by_constant_svalue_model = this; 3990 sids.qsort (svalue_id_cmp_by_constant_svalue); 3991 svalue_id_cmp_by_constant_svalue_model = NULL; 3992 svalue_id *sid; 3993 FOR_EACH_VEC_ELT (sids, i, sid) 3994 c.walk_sid (*sid); 3995 } 3996 3997 /* (2): Walk all regions (and thus their values) in a deterministic 3998 order. */ 3999 c.walk_rid (m_root_rid); 4000 4001 /* (3): Ensure we've visited everything, as we don't want to purge 4002 at this stage. Anything we visit for the first time here has 4003 arbitrary order. */ 4004 { 4005 unsigned i; 4006 region *region; 4007 FOR_EACH_VEC_ELT (m_regions, i, region) 4008 c.walk_rid (region_id::from_int (i)); 4009 svalue *sval; 4010 FOR_EACH_VEC_ELT (m_svalues, i, sval) 4011 c.walk_sid (svalue_id::from_int (i)); 4012 } 4013 4014 /* (4): We now have a reordering of the regions and values. 4015 Apply it. */ 4016 remap_svalue_ids (c.m_sid_map); 4017 remap_region_ids (c.m_rid_map); 4018 if (ctxt) 4019 ctxt->remap_svalue_ids (c.m_sid_map); 4020 4021 /* (5): Canonicalize the constraint_manager (it has already had its 4022 svalue_ids remapped above). This makes use of the new svalue_id 4023 values, and so must happen last. */ 4024 m_constraints->canonicalize (get_num_svalues ()); 4025 4026 validate (); 4027 } 4028 4029 /* Return true if this region_model is in canonical form. */ 4030 4031 bool 4032 region_model::canonicalized_p () const 4033 { 4034 region_model copy (*this); 4035 copy.canonicalize (NULL); 4036 return *this == copy; 4037 } 4038 4039 /* A subclass of pending_diagnostic for complaining about uses of 4040 poisoned values. */ 4041 4042 class poisoned_value_diagnostic 4043 : public pending_diagnostic_subclass<poisoned_value_diagnostic> 4044 { 4045 public: 4046 poisoned_value_diagnostic (tree expr, enum poison_kind pkind) 4047 : m_expr (expr), m_pkind (pkind) 4048 {} 4049 4050 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; } 4051 4052 bool operator== (const poisoned_value_diagnostic &other) const 4053 { 4054 return m_expr == other.m_expr; 4055 } 4056 4057 bool emit (rich_location *rich_loc) FINAL OVERRIDE 4058 { 4059 switch (m_pkind) 4060 { 4061 default: 4062 gcc_unreachable (); 4063 case POISON_KIND_FREED: 4064 { 4065 diagnostic_metadata m; 4066 m.add_cwe (416); /* "CWE-416: Use After Free". */ 4067 return warning_meta (rich_loc, m, 4068 OPT_Wanalyzer_use_after_free, 4069 "use after %<free%> of %qE", 4070 m_expr); 4071 } 4072 break; 4073 case POISON_KIND_POPPED_STACK: 4074 { 4075 /* TODO: which CWE? */ 4076 return warning_at (rich_loc, 4077 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame, 4078 "use of pointer %qE within stale stack frame", 4079 m_expr); 4080 } 4081 break; 4082 } 4083 } 4084 4085 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE 4086 { 4087 switch (m_pkind) 4088 { 4089 default: 4090 gcc_unreachable (); 4091 case POISON_KIND_FREED: 4092 return ev.formatted_print ("use after %<free%> of %qE here", 4093 m_expr); 4094 case POISON_KIND_POPPED_STACK: 4095 return ev.formatted_print 4096 ("use of pointer %qE within stale stack frame here", 4097 m_expr); 4098 } 4099 } 4100 4101 private: 4102 tree m_expr; 4103 enum poison_kind m_pkind; 4104 }; 4105 4106 /* Determine if EXPR is poisoned, and if so, queue a diagnostic to CTXT. */ 4107 4108 void 4109 region_model::check_for_poison (tree expr, region_model_context *ctxt) 4110 { 4111 if (!ctxt) 4112 return; 4113 4114 // TODO: this is disabled for now (too many false positives) 4115 return; 4116 4117 svalue_id expr_sid = get_rvalue (expr, ctxt); 4118 gcc_assert (!expr_sid.null_p ()); 4119 svalue *expr_svalue = get_svalue (expr_sid); 4120 gcc_assert (expr_svalue); 4121 if (const poisoned_svalue *poisoned_sval 4122 = expr_svalue->dyn_cast_poisoned_svalue ()) 4123 { 4124 enum poison_kind pkind = poisoned_sval->get_poison_kind (); 4125 ctxt->warn (new poisoned_value_diagnostic (expr, pkind)); 4126 } 4127 } 4128 4129 /* Update this model for the ASSIGN stmt, using CTXT to report any 4130 diagnostics. */ 4131 4132 void 4133 region_model::on_assignment (const gassign *assign, region_model_context *ctxt) 4134 { 4135 tree lhs = gimple_assign_lhs (assign); 4136 tree rhs1 = gimple_assign_rhs1 (assign); 4137 4138 region_id lhs_rid = get_lvalue (lhs, ctxt); 4139 4140 /* Check for uses of poisoned values. */ 4141 switch (get_gimple_rhs_class (gimple_expr_code (assign))) 4142 { 4143 case GIMPLE_INVALID_RHS: 4144 gcc_unreachable (); 4145 break; 4146 case GIMPLE_TERNARY_RHS: 4147 check_for_poison (gimple_assign_rhs3 (assign), ctxt); 4148 /* Fallthru */ 4149 case GIMPLE_BINARY_RHS: 4150 check_for_poison (gimple_assign_rhs2 (assign), ctxt); 4151 /* Fallthru */ 4152 case GIMPLE_UNARY_RHS: 4153 case GIMPLE_SINGLE_RHS: 4154 check_for_poison (gimple_assign_rhs1 (assign), ctxt); 4155 } 4156 4157 if (lhs_rid.null_p ()) 4158 return; 4159 // TODO: issue a warning for this case 4160 4161 enum tree_code op = gimple_assign_rhs_code (assign); 4162 switch (op) 4163 { 4164 default: 4165 { 4166 if (0) 4167 sorry_at (assign->location, "unhandled assignment op: %qs", 4168 get_tree_code_name (op)); 4169 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt); 4170 } 4171 break; 4172 4173 case BIT_FIELD_REF: 4174 { 4175 // TODO 4176 } 4177 break; 4178 4179 case CONSTRUCTOR: 4180 { 4181 /* e.g. "x ={v} {CLOBBER};" */ 4182 // TODO 4183 } 4184 break; 4185 4186 case POINTER_PLUS_EXPR: 4187 { 4188 /* e.g. "_1 = a_10(D) + 12;" */ 4189 tree ptr = rhs1; 4190 tree offset = gimple_assign_rhs2 (assign); 4191 4192 svalue_id ptr_sid = get_rvalue (ptr, ctxt); 4193 svalue_id offset_sid = get_rvalue (offset, ctxt); 4194 region_id element_rid 4195 = get_or_create_pointer_plus_expr (TREE_TYPE (TREE_TYPE (ptr)), 4196 ptr_sid, offset_sid, 4197 ctxt); 4198 svalue_id element_ptr_sid 4199 = get_or_create_ptr_svalue (TREE_TYPE (ptr), element_rid); 4200 set_value (lhs_rid, element_ptr_sid, ctxt); 4201 } 4202 break; 4203 4204 case POINTER_DIFF_EXPR: 4205 { 4206 /* e.g. "_1 = p_2(D) - q_3(D);". */ 4207 4208 /* TODO. */ 4209 4210 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt); 4211 } 4212 break; 4213 4214 case ADDR_EXPR: 4215 { 4216 /* LHS = &RHS; */ 4217 svalue_id ptr_sid = get_rvalue (rhs1, ctxt); 4218 set_value (lhs_rid, ptr_sid, ctxt); 4219 } 4220 break; 4221 4222 case MEM_REF: 4223 { 4224 region_id rhs_rid = get_lvalue (rhs1, ctxt); 4225 svalue_id rhs_sid 4226 = get_region (rhs_rid)->get_value (*this, true, ctxt); 4227 set_value (lhs_rid, rhs_sid, ctxt); 4228 } 4229 break; 4230 4231 case REAL_CST: 4232 case INTEGER_CST: 4233 case ARRAY_REF: 4234 { 4235 /* LHS = RHS; */ 4236 svalue_id cst_sid = get_rvalue (rhs1, ctxt); 4237 set_value (lhs_rid, cst_sid, ctxt); 4238 } 4239 break; 4240 4241 case FIX_TRUNC_EXPR: 4242 case FLOAT_EXPR: 4243 case NOP_EXPR: 4244 // cast: TODO 4245 // fall though for now 4246 case SSA_NAME: 4247 case VAR_DECL: 4248 case PARM_DECL: 4249 { 4250 /* LHS = VAR; */ 4251 region_id rhs_rid = get_lvalue (rhs1, ctxt); 4252 copy_region (lhs_rid, rhs_rid, ctxt); 4253 } 4254 break; 4255 4256 case EQ_EXPR: 4257 case GE_EXPR: 4258 case LE_EXPR: 4259 case NE_EXPR: 4260 case GT_EXPR: 4261 case LT_EXPR: 4262 { 4263 tree rhs2 = gimple_assign_rhs2 (assign); 4264 4265 // TODO: constraints between svalues 4266 svalue_id rhs1_sid = get_rvalue (rhs1, ctxt); 4267 svalue_id rhs2_sid = get_rvalue (rhs2, ctxt); 4268 4269 tristate t = eval_condition (rhs1_sid, op, rhs2_sid); 4270 if (t.is_known ()) 4271 set_value (lhs_rid, 4272 get_rvalue (t.is_true () 4273 ? boolean_true_node 4274 : boolean_false_node, 4275 ctxt), 4276 ctxt); 4277 else 4278 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt); 4279 } 4280 break; 4281 4282 case NEGATE_EXPR: 4283 case BIT_NOT_EXPR: 4284 { 4285 // TODO: unary ops 4286 4287 // TODO: constant? 4288 4289 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt); 4290 } 4291 break; 4292 4293 case PLUS_EXPR: 4294 case MINUS_EXPR: 4295 case MULT_EXPR: 4296 case TRUNC_DIV_EXPR: 4297 case TRUNC_MOD_EXPR: 4298 case LSHIFT_EXPR: 4299 case RSHIFT_EXPR: 4300 case BIT_IOR_EXPR: 4301 case BIT_XOR_EXPR: 4302 case BIT_AND_EXPR: 4303 case MIN_EXPR: 4304 case MAX_EXPR: 4305 { 4306 /* Binary ops. */ 4307 tree rhs2 = gimple_assign_rhs2 (assign); 4308 4309 svalue_id rhs1_sid = get_rvalue (rhs1, ctxt); 4310 svalue_id rhs2_sid = get_rvalue (rhs2, ctxt); 4311 4312 if (tree rhs1_cst = maybe_get_constant (rhs1_sid)) 4313 if (tree rhs2_cst = maybe_get_constant (rhs2_sid)) 4314 { 4315 tree result = fold_binary (op, TREE_TYPE (lhs), 4316 rhs1_cst, rhs2_cst); 4317 if (result && CONSTANT_CLASS_P (result)) 4318 { 4319 svalue_id result_sid 4320 = get_or_create_constant_svalue (result); 4321 set_value (lhs_rid, result_sid, ctxt); 4322 return; 4323 } 4324 } 4325 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt); 4326 } 4327 break; 4328 4329 case COMPONENT_REF: 4330 { 4331 /* LHS = op0.op1; */ 4332 region_id child_rid = get_lvalue (rhs1, ctxt); 4333 svalue_id child_sid 4334 = get_region (child_rid)->get_value (*this, true, ctxt); 4335 set_value (lhs_rid, child_sid, ctxt); 4336 } 4337 break; 4338 } 4339 } 4340 4341 /* Update this model for the CALL stmt, using CTXT to report any 4342 diagnostics - the first half. 4343 4344 Updates to the region_model that should be made *before* sm-states 4345 are updated are done here; other updates to the region_model are done 4346 in region_model::on_call_post. 4347 4348 Return true if the function call has unknown side effects (it wasn't 4349 recognized and we don't have a body for it, or are unable to tell which 4350 fndecl it is). */ 4351 4352 bool 4353 region_model::on_call_pre (const gcall *call, region_model_context *ctxt) 4354 { 4355 region_id lhs_rid; 4356 tree lhs_type = NULL_TREE; 4357 if (tree lhs = gimple_call_lhs (call)) 4358 { 4359 lhs_rid = get_lvalue (lhs, ctxt); 4360 lhs_type = TREE_TYPE (lhs); 4361 } 4362 4363 /* Check for uses of poisoned values. 4364 For now, special-case "free", to avoid warning about "use-after-free" 4365 when "double free" would be more precise. */ 4366 if (!is_special_named_call_p (call, "free", 1)) 4367 for (unsigned i = 0; i < gimple_call_num_args (call); i++) 4368 check_for_poison (gimple_call_arg (call, i), ctxt); 4369 4370 bool unknown_side_effects = false; 4371 4372 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt)) 4373 { 4374 if (is_named_call_p (callee_fndecl, "malloc", call, 1)) 4375 { 4376 // TODO: capture size as a svalue? 4377 region_id new_rid = add_new_malloc_region (); 4378 if (!lhs_rid.null_p ()) 4379 { 4380 svalue_id ptr_sid 4381 = get_or_create_ptr_svalue (lhs_type, new_rid); 4382 set_value (lhs_rid, ptr_sid, ctxt); 4383 } 4384 return false; 4385 } 4386 else if (is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1)) 4387 { 4388 region_id frame_rid = get_current_frame_id (); 4389 region_id new_rid 4390 = add_region (new symbolic_region (frame_rid, NULL_TREE, false)); 4391 if (!lhs_rid.null_p ()) 4392 { 4393 svalue_id ptr_sid 4394 = get_or_create_ptr_svalue (lhs_type, new_rid); 4395 set_value (lhs_rid, ptr_sid, ctxt); 4396 } 4397 return false; 4398 } 4399 else if (gimple_call_builtin_p (call, BUILT_IN_EXPECT) 4400 || gimple_call_builtin_p (call, BUILT_IN_EXPECT_WITH_PROBABILITY) 4401 || gimple_call_internal_p (call, IFN_BUILTIN_EXPECT)) 4402 { 4403 /* __builtin_expect's return value is its initial argument. */ 4404 if (!lhs_rid.null_p ()) 4405 { 4406 tree initial_arg = gimple_call_arg (call, 0); 4407 svalue_id sid = get_rvalue (initial_arg, ctxt); 4408 set_value (lhs_rid, sid, ctxt); 4409 } 4410 return false; 4411 } 4412 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)) 4413 { 4414 region_id buf_rid = deref_rvalue (gimple_call_arg (call, 0), ctxt); 4415 svalue_id buf_sid 4416 = get_region (buf_rid)->get_value (*this, true, ctxt); 4417 if (tree cst_expr = maybe_get_constant (buf_sid)) 4418 { 4419 if (TREE_CODE (cst_expr) == STRING_CST 4420 && !lhs_rid.null_p ()) 4421 { 4422 /* TREE_STRING_LENGTH is sizeof, not strlen. */ 4423 int sizeof_cst = TREE_STRING_LENGTH (cst_expr); 4424 int strlen_cst = sizeof_cst - 1; 4425 tree t_cst = build_int_cst (lhs_type, strlen_cst); 4426 svalue_id result_sid 4427 = get_or_create_constant_svalue (t_cst); 4428 set_value (lhs_rid, result_sid, ctxt); 4429 return false; 4430 } 4431 } 4432 /* Otherwise an unknown value. */ 4433 } 4434 else if (is_named_call_p (callee_fndecl, 4435 "__analyzer_dump_num_heap_regions", call, 0)) 4436 { 4437 /* Handle the builtin "__analyzer_dump_num_heap_regions" by emitting 4438 a warning (for use in DejaGnu tests). */ 4439 int num_heap_regions = 0; 4440 region_id heap_rid = get_root_region ()->ensure_heap_region (this); 4441 unsigned i; 4442 region *region; 4443 FOR_EACH_VEC_ELT (m_regions, i, region) 4444 if (region->get_parent () == heap_rid) 4445 num_heap_regions++; 4446 /* Use quotes to ensure the output isn't truncated. */ 4447 warning_at (call->location, 0, 4448 "num heap regions: %qi", num_heap_regions); 4449 return false; 4450 } 4451 else if (!fndecl_has_gimple_body_p (callee_fndecl) 4452 && !DECL_PURE_P (callee_fndecl)) 4453 unknown_side_effects = true; 4454 } 4455 else 4456 unknown_side_effects = true; 4457 4458 /* Unknown return value. */ 4459 if (!lhs_rid.null_p ()) 4460 set_to_new_unknown_value (lhs_rid, lhs_type, ctxt); 4461 4462 return unknown_side_effects; 4463 } 4464 4465 /* Update this model for the CALL stmt, using CTXT to report any 4466 diagnostics - the second half. 4467 4468 Updates to the region_model that should be made *after* sm-states 4469 are updated are done here; other updates to the region_model are done 4470 in region_model::on_call_pre. 4471 4472 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call 4473 to purge state. */ 4474 4475 void 4476 region_model::on_call_post (const gcall *call, 4477 bool unknown_side_effects, 4478 region_model_context *ctxt) 4479 { 4480 /* Update for "free" here, after sm-handling. 4481 4482 If the ptr points to an underlying heap region, delete the region, 4483 poisoning pointers to it and regions within it. 4484 4485 We delay this until after sm-state has been updated so that the 4486 sm-handling can transition all of the various casts of the pointer 4487 to a "freed" state *before* we delete the related region here. 4488 4489 This has to be done here so that the sm-handling can use the fact 4490 that they point to the same region to establish that they are equal 4491 (in region_model::eval_condition_without_cm), and thus transition 4492 all pointers to the region to the "freed" state together, regardless 4493 of casts. */ 4494 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt)) 4495 if (is_named_call_p (callee_fndecl, "free", call, 1)) 4496 { 4497 tree ptr = gimple_call_arg (call, 0); 4498 svalue_id ptr_sid = get_rvalue (ptr, ctxt); 4499 svalue *ptr_sval = get_svalue (ptr_sid); 4500 if (region_svalue *ptr_to_region_sval 4501 = ptr_sval->dyn_cast_region_svalue ()) 4502 { 4503 /* If the ptr points to an underlying heap region, delete it, 4504 poisoning pointers. */ 4505 region_id pointee_rid = ptr_to_region_sval->get_pointee (); 4506 region_id heap_rid = get_root_region ()->ensure_heap_region (this); 4507 if (!pointee_rid.null_p () 4508 && get_region (pointee_rid)->get_parent () == heap_rid) 4509 { 4510 purge_stats stats; 4511 delete_region_and_descendents (pointee_rid, 4512 POISON_KIND_FREED, 4513 &stats, ctxt->get_logger ()); 4514 purge_unused_svalues (&stats, ctxt); 4515 validate (); 4516 // TODO: do anything with stats? 4517 } 4518 } 4519 return; 4520 } 4521 4522 if (unknown_side_effects) 4523 handle_unrecognized_call (call, ctxt); 4524 } 4525 4526 /* Helper class for region_model::handle_unrecognized_call, for keeping 4527 track of all regions that are reachable, and, of those, which are 4528 mutable. */ 4529 4530 class reachable_regions 4531 { 4532 public: 4533 reachable_regions (region_model *model) 4534 : m_model (model), m_reachable_rids (), m_mutable_rids () 4535 {} 4536 4537 /* Lazily mark RID as being reachable, recursively adding regions 4538 reachable from RID. */ 4539 void add (region_id rid, bool is_mutable) 4540 { 4541 gcc_assert (!rid.null_p ()); 4542 4543 unsigned idx = rid.as_int (); 4544 /* Bail out if this region is already in the sets at the IS_MUTABLE 4545 level of mutability. */ 4546 if (!is_mutable && bitmap_bit_p (m_reachable_rids, idx)) 4547 return; 4548 bitmap_set_bit (m_reachable_rids, idx); 4549 4550 if (is_mutable) 4551 { 4552 if (bitmap_bit_p (m_mutable_rids, idx)) 4553 return; 4554 else 4555 bitmap_set_bit (m_mutable_rids, idx); 4556 } 4557 4558 /* If this region's value is a pointer, add the pointee. */ 4559 region *reg = m_model->get_region (rid); 4560 svalue_id sid = reg->get_value_direct (); 4561 svalue *sval = m_model->get_svalue (sid); 4562 if (sval) 4563 if (region_svalue *ptr = sval->dyn_cast_region_svalue ()) 4564 { 4565 region_id pointee_rid = ptr->get_pointee (); 4566 /* Use const-ness of pointer type to affect mutability. */ 4567 bool ptr_is_mutable = true; 4568 if (ptr->get_type () 4569 && TREE_CODE (ptr->get_type ()) == POINTER_TYPE 4570 && TYPE_READONLY (TREE_TYPE (ptr->get_type ()))) 4571 ptr_is_mutable = false; 4572 add (pointee_rid, ptr_is_mutable); 4573 } 4574 4575 /* Add descendents of this region. */ 4576 region_id_set descendents (m_model); 4577 m_model->get_descendents (rid, &descendents, region_id::null ()); 4578 for (unsigned i = 0; i < m_model->get_num_regions (); i++) 4579 { 4580 region_id iter_rid = region_id::from_int (i); 4581 if (descendents.region_p (iter_rid)) 4582 add (iter_rid, is_mutable); 4583 } 4584 } 4585 4586 bool mutable_p (region_id rid) 4587 { 4588 gcc_assert (!rid.null_p ()); 4589 return bitmap_bit_p (m_mutable_rids, rid.as_int ()); 4590 } 4591 4592 private: 4593 region_model *m_model; 4594 4595 /* The region ids already seen. This has to be an auto_bitmap rather than 4596 an auto_sbitmap as new regions can be created within the model during 4597 the traversal. */ 4598 auto_bitmap m_reachable_rids; 4599 4600 /* The region_ids that can be changed (accessed via non-const pointers). */ 4601 auto_bitmap m_mutable_rids; 4602 }; 4603 4604 /* Handle a call CALL to a function with unknown behavior. 4605 4606 Traverse the regions in this model, determining what regions are 4607 reachable from pointer arguments to CALL and from global variables, 4608 recursively. 4609 4610 Set all reachable regions to new unknown values and purge sm-state 4611 from their values, and from values that point to them. */ 4612 4613 void 4614 region_model::handle_unrecognized_call (const gcall *call, 4615 region_model_context *ctxt) 4616 { 4617 tree fndecl = get_fndecl_for_call (call, ctxt); 4618 4619 reachable_regions reachable_regions (this); 4620 4621 /* Determine the reachable regions and their mutability. */ 4622 { 4623 /* Globals. */ 4624 region_id globals_rid = get_globals_region_id (); 4625 if (!globals_rid.null_p ()) 4626 reachable_regions.add (globals_rid, true); 4627 4628 /* Params that are pointers. */ 4629 tree iter_param_types = NULL_TREE; 4630 if (fndecl) 4631 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 4632 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++) 4633 { 4634 /* Track expected param type, where available. */ 4635 tree param_type = NULL_TREE; 4636 if (iter_param_types) 4637 { 4638 param_type = TREE_VALUE (iter_param_types); 4639 gcc_assert (param_type); 4640 iter_param_types = TREE_CHAIN (iter_param_types); 4641 } 4642 4643 tree parm = gimple_call_arg (call, arg_idx); 4644 svalue_id parm_sid = get_rvalue (parm, ctxt); 4645 svalue *parm_sval = get_svalue (parm_sid); 4646 if (parm_sval) 4647 if (region_svalue *parm_ptr = parm_sval->dyn_cast_region_svalue ()) 4648 { 4649 region_id pointee_rid = parm_ptr->get_pointee (); 4650 bool is_mutable = true; 4651 if (param_type 4652 && TREE_CODE (param_type) == POINTER_TYPE 4653 && TYPE_READONLY (TREE_TYPE (param_type))) 4654 is_mutable = false; 4655 reachable_regions.add (pointee_rid, is_mutable); 4656 } 4657 // FIXME: what about compound parms that contain ptrs? 4658 } 4659 } 4660 4661 /* OK: we now have all reachable regions. 4662 Set them all to new unknown values. */ 4663 for (unsigned i = 0; i < get_num_regions (); i++) 4664 { 4665 region_id iter_rid = region_id::from_int (i); 4666 if (reachable_regions.mutable_p (iter_rid)) 4667 { 4668 region *reg = get_region (iter_rid); 4669 4670 /* Purge any sm-state for any underlying svalue. */ 4671 svalue_id curr_sid = reg->get_value_direct (); 4672 if (!curr_sid.null_p ()) 4673 ctxt->on_unknown_change (curr_sid); 4674 4675 set_to_new_unknown_value (iter_rid, 4676 reg->get_type (), 4677 ctxt); 4678 } 4679 } 4680 4681 /* Purge sm-state for any remaining svalues that point to regions that 4682 were reachable. This helps suppress leak false-positives. 4683 4684 For example, if we had a malloc call that was cast to a "foo *" type, 4685 we could have a temporary void * for the result of malloc which has its 4686 own svalue, not reachable from the function call, but for which the 4687 "foo *" svalue was reachable. If we don't purge it, the temporary will 4688 be reported as a leak. */ 4689 int i; 4690 svalue *svalue; 4691 FOR_EACH_VEC_ELT (m_svalues, i, svalue) 4692 if (region_svalue *ptr = svalue->dyn_cast_region_svalue ()) 4693 { 4694 region_id pointee_rid = ptr->get_pointee (); 4695 if (reachable_regions.mutable_p (pointee_rid)) 4696 ctxt->on_unknown_change (svalue_id::from_int (i)); 4697 } 4698 4699 validate (); 4700 } 4701 4702 /* Update this model for the RETURN_STMT, using CTXT to report any 4703 diagnostics. */ 4704 4705 void 4706 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt) 4707 { 4708 tree callee = get_current_function ()->decl; 4709 tree lhs = DECL_RESULT (callee); 4710 tree rhs = gimple_return_retval (return_stmt); 4711 4712 if (lhs && rhs) 4713 copy_region (get_lvalue (lhs, ctxt), get_lvalue (rhs, ctxt), ctxt); 4714 } 4715 4716 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within 4717 ENODE, using CTXT to report any diagnostics. 4718 4719 This is for the initial direct invocation of setjmp/sigsetjmp (which returns 4720 0), as opposed to any second return due to longjmp/sigsetjmp. */ 4721 4722 void 4723 region_model::on_setjmp (const gcall *call, const exploded_node *enode, 4724 region_model_context *ctxt) 4725 { 4726 region_id buf_rid = deref_rvalue (gimple_call_arg (call, 0), ctxt); 4727 region *buf = get_region (buf_rid); 4728 4729 /* Create a setjmp_svalue for this call and store it in BUF_RID's region. */ 4730 if (buf) 4731 { 4732 setjmp_record r (enode, call); 4733 svalue *sval = new setjmp_svalue (r, buf->get_type ()); 4734 svalue_id new_sid = add_svalue (sval); 4735 set_value (buf_rid, new_sid, ctxt); 4736 } 4737 4738 /* Direct calls to setjmp return 0. */ 4739 if (tree lhs = gimple_call_lhs (call)) 4740 { 4741 tree zero = build_int_cst (TREE_TYPE (lhs), 0); 4742 svalue_id new_sid = get_or_create_constant_svalue (zero); 4743 region_id lhs_rid = get_lvalue (lhs, ctxt); 4744 set_value (lhs_rid, new_sid, ctxt); 4745 } 4746 } 4747 4748 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL 4749 to a "setjmp" at SETJMP_CALL where the final stack depth should be 4750 SETJMP_STACK_DEPTH. Purge any stack frames, potentially reporting on 4751 leaks to CTXT. */ 4752 4753 void 4754 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call, 4755 int setjmp_stack_depth, 4756 region_model_context *ctxt) 4757 { 4758 /* Evaluate the val, using the frame of the "longjmp". */ 4759 tree fake_retval = gimple_call_arg (longjmp_call, 1); 4760 svalue_id fake_retval_sid = get_rvalue (fake_retval, ctxt); 4761 4762 /* Pop any frames until we reach the stack depth of the function where 4763 setjmp was called. */ 4764 gcc_assert (get_stack_depth () >= setjmp_stack_depth); 4765 while (get_stack_depth () > setjmp_stack_depth) 4766 { 4767 /* Don't purge unused svalues yet, as we're using fake_retval_sid. */ 4768 pop_frame (region_id::null (), false, NULL, ctxt); 4769 } 4770 4771 gcc_assert (get_stack_depth () == setjmp_stack_depth); 4772 4773 /* Assign to LHS of "setjmp" in new_state. */ 4774 if (tree lhs = gimple_call_lhs (setjmp_call)) 4775 { 4776 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */ 4777 tree t_zero = build_int_cst (TREE_TYPE (fake_retval), 0); 4778 svalue_id zero_sid = get_or_create_constant_svalue (t_zero); 4779 tristate eq_zero = eval_condition (fake_retval_sid, EQ_EXPR, zero_sid); 4780 /* If we have 0, use 1. */ 4781 if (eq_zero.is_true ()) 4782 { 4783 tree t_one = build_int_cst (TREE_TYPE (fake_retval), 1); 4784 svalue_id one_sid = get_or_create_constant_svalue (t_one); 4785 fake_retval_sid = one_sid; 4786 } 4787 else 4788 { 4789 /* Otherwise note that the value is nonzero. */ 4790 m_constraints->add_constraint (fake_retval_sid, NE_EXPR, zero_sid); 4791 } 4792 4793 region_id lhs_rid = get_lvalue (lhs, ctxt); 4794 set_value (lhs_rid, fake_retval_sid, ctxt); 4795 } 4796 4797 /* Now that we've assigned the fake_retval, we can purge the unused 4798 svalues, which could detect leaks. */ 4799 purge_unused_svalues (NULL, ctxt, NULL); 4800 validate (); 4801 } 4802 4803 /* Update this region_model for a phi stmt of the form 4804 LHS = PHI <...RHS...>. 4805 where RHS is for the appropriate edge. */ 4806 4807 void 4808 region_model::handle_phi (const gphi *phi, 4809 tree lhs, tree rhs, bool is_back_edge, 4810 region_model_context *ctxt) 4811 { 4812 /* For now, don't bother tracking the .MEM SSA names. */ 4813 if (tree var = SSA_NAME_VAR (lhs)) 4814 if (TREE_CODE (var) == VAR_DECL) 4815 if (VAR_DECL_IS_VIRTUAL_OPERAND (var)) 4816 return; 4817 4818 svalue_id rhs_sid = get_rvalue (rhs, ctxt); 4819 4820 if (is_back_edge && get_svalue (rhs_sid)->get_kind () != SK_UNKNOWN) 4821 { 4822 /* If we have a back edge, we probably have a loop. 4823 Use an unknown value, to avoid effectively unrolling the 4824 loop. 4825 To terminate, we need to avoid generating a series of 4826 models with an unbounded monotonically increasing number of 4827 redundant unknown values; hence we need to purge svalues 4828 before inserting the state into the exploded graph, to 4829 collect unused svalues. */ 4830 set_to_new_unknown_value (get_lvalue (lhs, ctxt), TREE_TYPE (lhs), ctxt); 4831 } 4832 else 4833 set_value (get_lvalue (lhs, ctxt), rhs_sid, ctxt); 4834 4835 if (ctxt) 4836 ctxt->on_phi (phi, rhs); 4837 } 4838 4839 /* Implementation of region_model::get_lvalue; the latter adds type-checking. 4840 4841 Get the id of the region for PV within this region_model, 4842 emitting any diagnostics to CTXT. */ 4843 4844 region_id 4845 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) 4846 { 4847 tree expr = pv.m_tree; 4848 4849 gcc_assert (expr); 4850 4851 switch (TREE_CODE (expr)) 4852 { 4853 default: 4854 return make_region_for_unexpected_tree_code (ctxt, expr, 4855 dump_location_t ()); 4856 4857 case ARRAY_REF: 4858 { 4859 tree array = TREE_OPERAND (expr, 0); 4860 tree index = TREE_OPERAND (expr, 1); 4861 #if 0 4862 // TODO: operands 2 and 3, if present: 4863 gcc_assert (TREE_OPERAND (expr, 2) == NULL_TREE); 4864 gcc_assert (TREE_OPERAND (expr, 3) == NULL_TREE); 4865 #endif 4866 4867 region_id array_rid = get_lvalue (array, ctxt); 4868 svalue_id index_sid = get_rvalue (index, ctxt); 4869 region *base_array_reg = get_region (array_rid); 4870 array_region *array_reg = base_array_reg->dyn_cast_array_region (); 4871 if (!array_reg) 4872 { 4873 /* Normally, array_rid ought to refer to an array_region, since 4874 array's type will be ARRAY_TYPE. However, if we have an 4875 unexpected tree code for array, we could have a 4876 symbolic_region here. If so, we're in error-handling. */ 4877 gcc_assert (base_array_reg->get_type () == NULL_TREE); 4878 return make_region_for_unexpected_tree_code (ctxt, expr, 4879 dump_location_t ()); 4880 } 4881 return array_reg->get_element (this, array_rid, index_sid, ctxt); 4882 } 4883 break; 4884 4885 case BIT_FIELD_REF: 4886 { 4887 /* For now, create a view, as if a cast, ignoring the bit positions. */ 4888 tree obj = TREE_OPERAND (expr, 0); 4889 return get_or_create_view (get_lvalue (obj, ctxt), TREE_TYPE (expr), 4890 ctxt); 4891 }; 4892 break; 4893 4894 case MEM_REF: 4895 { 4896 tree ptr = TREE_OPERAND (expr, 0); 4897 tree offset = TREE_OPERAND (expr, 1); 4898 svalue_id ptr_sid = get_rvalue (ptr, ctxt); 4899 svalue_id offset_sid = get_rvalue (offset, ctxt); 4900 return get_or_create_mem_ref (TREE_TYPE (expr), ptr_sid, 4901 offset_sid, ctxt); 4902 } 4903 break; 4904 4905 case VAR_DECL: 4906 /* Handle globals. */ 4907 if (is_global_var (expr)) 4908 { 4909 region_id globals_rid 4910 = get_root_region ()->ensure_globals_region (this); 4911 map_region *globals = get_region<map_region> (globals_rid); 4912 region_id var_rid = globals->get_or_create (this, globals_rid, expr, 4913 TREE_TYPE (expr), ctxt); 4914 return var_rid; 4915 } 4916 4917 /* Fall through. */ 4918 4919 case SSA_NAME: 4920 case PARM_DECL: 4921 case RESULT_DECL: 4922 { 4923 gcc_assert (TREE_CODE (expr) == SSA_NAME 4924 || TREE_CODE (expr) == PARM_DECL 4925 || TREE_CODE (expr) == VAR_DECL 4926 || TREE_CODE (expr) == RESULT_DECL); 4927 4928 int stack_depth = pv.m_stack_depth; 4929 stack_region *stack = get_root_region ()->get_stack_region (this); 4930 gcc_assert (stack); 4931 region_id frame_rid = stack->get_frame_rid (stack_depth); 4932 frame_region *frame = get_region <frame_region> (frame_rid); 4933 gcc_assert (frame); 4934 region_id child_rid = frame->get_or_create (this, frame_rid, expr, 4935 TREE_TYPE (expr), ctxt); 4936 return child_rid; 4937 } 4938 4939 case COMPONENT_REF: 4940 { 4941 /* obj.field */ 4942 tree obj = TREE_OPERAND (expr, 0); 4943 tree field = TREE_OPERAND (expr, 1); 4944 tree obj_type = TREE_TYPE (obj); 4945 if (TREE_CODE (obj_type) != RECORD_TYPE 4946 && TREE_CODE (obj_type) != UNION_TYPE) 4947 return make_region_for_unexpected_tree_code (ctxt, obj_type, 4948 dump_location_t ()); 4949 region_id obj_rid = get_lvalue (obj, ctxt); 4950 region_id struct_or_union_rid 4951 = get_or_create_view (obj_rid, TREE_TYPE (obj), ctxt); 4952 return get_field_region (struct_or_union_rid, field, ctxt); 4953 } 4954 break; 4955 4956 case CONST_DECL: 4957 { 4958 tree cst_type = TREE_TYPE (expr); 4959 region_id cst_rid = add_region_for_type (m_root_rid, cst_type, ctxt); 4960 if (tree value = DECL_INITIAL (expr)) 4961 { 4962 svalue_id sid = get_rvalue (value, ctxt); 4963 get_region (cst_rid)->set_value (*this, cst_rid, sid, ctxt); 4964 } 4965 return cst_rid; 4966 } 4967 break; 4968 4969 case STRING_CST: 4970 { 4971 tree cst_type = TREE_TYPE (expr); 4972 array_region *cst_region = new array_region (m_root_rid, cst_type); 4973 region_id cst_rid = add_region (cst_region); 4974 svalue_id cst_sid = get_or_create_constant_svalue (expr); 4975 cst_region->set_value (*this, cst_rid, cst_sid, ctxt); 4976 return cst_rid; 4977 } 4978 break; 4979 4980 case NOP_EXPR: 4981 case VIEW_CONVERT_EXPR: 4982 { 4983 tree obj = TREE_OPERAND (expr, 0); 4984 return get_or_create_view (get_lvalue (obj, ctxt), TREE_TYPE (expr), 4985 ctxt); 4986 }; 4987 break; 4988 } 4989 } 4990 4991 /* If we see a tree code we don't know how to handle, rather than 4992 ICE or generate bogus results, create a dummy region, and notify 4993 CTXT so that it can mark the new state as being not properly 4994 modelled. The exploded graph can then stop exploring that path, 4995 since any diagnostics we might issue will have questionable 4996 validity. */ 4997 4998 region_id 4999 region_model::make_region_for_unexpected_tree_code (region_model_context *ctxt, 5000 tree t, 5001 const dump_location_t &loc) 5002 { 5003 gcc_assert (ctxt); 5004 region_id new_rid 5005 = add_region (new symbolic_region (m_root_rid, NULL_TREE, false)); 5006 ctxt->on_unexpected_tree_code (t, loc); 5007 return new_rid; 5008 } 5009 5010 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */ 5011 5012 static void 5013 assert_compat_types (tree src_type, tree dst_type) 5014 { 5015 if (src_type && dst_type && !VOID_TYPE_P (dst_type)) 5016 gcc_checking_assert (useless_type_conversion_p (src_type, dst_type)); 5017 } 5018 5019 /* Get the id of the region for PV within this region_model, 5020 emitting any diagnostics to CTXT. */ 5021 5022 region_id 5023 region_model::get_lvalue (path_var pv, region_model_context *ctxt) 5024 { 5025 if (pv.m_tree == NULL_TREE) 5026 return region_id::null (); 5027 5028 region_id result_rid = get_lvalue_1 (pv, ctxt); 5029 assert_compat_types (get_region (result_rid)->get_type (), 5030 TREE_TYPE (pv.m_tree)); 5031 return result_rid; 5032 } 5033 5034 /* Get the region_id for EXPR within this region_model (assuming the most 5035 recent stack frame if it's a local). */ 5036 5037 region_id 5038 region_model::get_lvalue (tree expr, region_model_context *ctxt) 5039 { 5040 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt); 5041 } 5042 5043 /* Implementation of region_model::get_rvalue; the latter adds type-checking. 5044 5045 Get the value of PV within this region_model, 5046 emitting any diagnostics to CTXT. */ 5047 5048 svalue_id 5049 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) 5050 { 5051 gcc_assert (pv.m_tree); 5052 5053 switch (TREE_CODE (pv.m_tree)) 5054 { 5055 default: 5056 { 5057 svalue *unknown_sval = new unknown_svalue (TREE_TYPE (pv.m_tree)); 5058 return add_svalue (unknown_sval); 5059 } 5060 break; 5061 5062 case ADDR_EXPR: 5063 { 5064 /* "&EXPR". */ 5065 tree expr = pv.m_tree; 5066 tree op0 = TREE_OPERAND (expr, 0); 5067 if (TREE_CODE (op0) == FUNCTION_DECL) 5068 return get_svalue_for_fndecl (TREE_TYPE (expr), op0, ctxt); 5069 else if (TREE_CODE (op0) == LABEL_DECL) 5070 return get_svalue_for_label (TREE_TYPE (expr), op0, ctxt); 5071 region_id expr_rid = get_lvalue (op0, ctxt); 5072 return get_or_create_ptr_svalue (TREE_TYPE (expr), expr_rid); 5073 } 5074 break; 5075 5076 case ARRAY_REF: 5077 { 5078 region_id element_rid = get_lvalue (pv, ctxt); 5079 return get_region (element_rid)->get_value (*this, true, ctxt); 5080 } 5081 5082 case INTEGER_CST: 5083 case REAL_CST: 5084 case STRING_CST: 5085 return get_or_create_constant_svalue (pv.m_tree); 5086 5087 case COMPONENT_REF: 5088 case MEM_REF: 5089 case SSA_NAME: 5090 case VAR_DECL: 5091 case PARM_DECL: 5092 case RESULT_DECL: 5093 { 5094 region_id var_rid = get_lvalue (pv, ctxt); 5095 return get_region (var_rid)->get_value (*this, true, ctxt); 5096 } 5097 } 5098 } 5099 5100 /* Get the value of PV within this region_model, 5101 emitting any diagnostics to CTXT. */ 5102 5103 svalue_id 5104 region_model::get_rvalue (path_var pv, region_model_context *ctxt) 5105 { 5106 if (pv.m_tree == NULL_TREE) 5107 return svalue_id::null (); 5108 svalue_id result_sid = get_rvalue_1 (pv, ctxt); 5109 5110 assert_compat_types (get_svalue (result_sid)->get_type (), 5111 TREE_TYPE (pv.m_tree)); 5112 5113 return result_sid; 5114 } 5115 5116 /* Get the value of EXPR within this region_model (assuming the most 5117 recent stack frame if it's a local). */ 5118 5119 svalue_id 5120 region_model::get_rvalue (tree expr, region_model_context *ctxt) 5121 { 5122 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt); 5123 } 5124 5125 /* Return an svalue_id for a pointer to RID of type PTR_TYPE, reusing 5126 existing pointer values if one is available. */ 5127 5128 svalue_id 5129 region_model::get_or_create_ptr_svalue (tree ptr_type, region_id rid) 5130 { 5131 /* Reuse existing region_svalue, if one of the right type is 5132 available. */ 5133 /* In theory we could stash a svalue_id in "region", but differing 5134 pointer types muddles things. 5135 For now, just do a linear search through all existing svalues. */ 5136 int i; 5137 svalue *svalue; 5138 FOR_EACH_VEC_ELT (m_svalues, i, svalue) 5139 if (region_svalue *ptr_svalue = svalue->dyn_cast_region_svalue ()) 5140 if (ptr_svalue->get_pointee () == rid 5141 && ptr_svalue->get_type () == ptr_type) 5142 return svalue_id::from_int (i); 5143 5144 return add_svalue (new region_svalue (ptr_type, rid)); 5145 } 5146 5147 /* Return an svalue_id for a constant_svalue for CST_EXPR, 5148 creating the constant_svalue if necessary. 5149 The constant_svalue instances are reused, based on pointer equality 5150 of trees */ 5151 5152 svalue_id 5153 region_model::get_or_create_constant_svalue (tree cst_expr) 5154 { 5155 gcc_assert (cst_expr); 5156 5157 /* Reuse one if it already exists. */ 5158 // TODO: maybe store a map, rather than do linear search? 5159 int i; 5160 svalue *svalue; 5161 FOR_EACH_VEC_ELT (m_svalues, i, svalue) 5162 if (svalue->maybe_get_constant () == cst_expr) 5163 return svalue_id::from_int (i); 5164 5165 svalue_id cst_sid = add_svalue (new constant_svalue (cst_expr)); 5166 return cst_sid; 5167 } 5168 5169 /* Return an svalue_id for a region_svalue for FNDECL, 5170 creating the function_region if necessary. */ 5171 5172 svalue_id 5173 region_model::get_svalue_for_fndecl (tree ptr_type, tree fndecl, 5174 region_model_context *ctxt) 5175 { 5176 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL); 5177 region_id function_rid = get_region_for_fndecl (fndecl, ctxt); 5178 return get_or_create_ptr_svalue (ptr_type, function_rid); 5179 } 5180 5181 /* Return a region_id for a function_region for FNDECL, 5182 creating it if necessary. */ 5183 5184 region_id 5185 region_model::get_region_for_fndecl (tree fndecl, 5186 region_model_context *ctxt) 5187 { 5188 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL); 5189 5190 region_id code_rid = get_root_region ()->ensure_code_region (this); 5191 code_region *code = get_root_region ()->get_code_region (this); 5192 5193 return code->get_or_create (this, code_rid, fndecl, TREE_TYPE (fndecl), 5194 ctxt); 5195 } 5196 5197 /* Return an svalue_id for a region_svalue for LABEL, 5198 creating the label_region if necessary. */ 5199 5200 svalue_id 5201 region_model::get_svalue_for_label (tree ptr_type, tree label, 5202 region_model_context *ctxt) 5203 { 5204 gcc_assert (TREE_CODE (label) == LABEL_DECL); 5205 region_id label_rid = get_region_for_label (label, ctxt); 5206 return get_or_create_ptr_svalue (ptr_type, label_rid); 5207 } 5208 5209 /* Return a region_id for a label_region for LABEL, 5210 creating it if necessary. */ 5211 5212 region_id 5213 region_model::get_region_for_label (tree label, 5214 region_model_context *ctxt) 5215 { 5216 gcc_assert (TREE_CODE (label) == LABEL_DECL); 5217 5218 tree fndecl = DECL_CONTEXT (label); 5219 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL); 5220 5221 region_id func_rid = get_region_for_fndecl (fndecl, ctxt); 5222 function_region *func_reg = get_region <function_region> (func_rid); 5223 return func_reg->get_or_create (this, func_rid, label, TREE_TYPE (label), 5224 ctxt); 5225 } 5226 5227 /* Build a cast of SRC_EXPR to DST_TYPE, or return NULL_TREE. 5228 5229 Adapted from gcc::jit::playback::context::build_cast, which in turn is 5230 adapted from 5231 - c/c-typeck.c:build_c_cast 5232 - c/c-convert.c: convert 5233 - convert.h 5234 Only some kinds of cast are currently supported here. */ 5235 5236 static tree 5237 build_cast (tree dst_type, tree src_expr) 5238 { 5239 tree result = targetm.convert_to_type (dst_type, src_expr); 5240 if (result) 5241 return result; 5242 enum tree_code dst_code = TREE_CODE (dst_type); 5243 switch (dst_code) 5244 { 5245 case INTEGER_TYPE: 5246 case ENUMERAL_TYPE: 5247 result = convert_to_integer (dst_type, src_expr); 5248 goto maybe_fold; 5249 5250 case BOOLEAN_TYPE: 5251 /* Compare with c_objc_common_truthvalue_conversion and 5252 c_common_truthvalue_conversion. */ 5253 /* For now, convert to: (src_expr != 0) */ 5254 result = build2 (NE_EXPR, dst_type, 5255 src_expr, 5256 build_int_cst (TREE_TYPE (src_expr), 0)); 5257 goto maybe_fold; 5258 5259 case REAL_TYPE: 5260 result = convert_to_real (dst_type, src_expr); 5261 goto maybe_fold; 5262 5263 case POINTER_TYPE: 5264 result = build1 (NOP_EXPR, dst_type, src_expr); 5265 goto maybe_fold; 5266 5267 default: 5268 return NULL_TREE; 5269 5270 maybe_fold: 5271 if (TREE_CODE (result) != C_MAYBE_CONST_EXPR) 5272 result = fold (result); 5273 return result; 5274 } 5275 } 5276 5277 /* If the type of SID's underlying value is DST_TYPE, return SID. 5278 Otherwise, attempt to create (or reuse) an svalue representing an access 5279 of SID as a DST_TYPE and return that value's svalue_id. */ 5280 5281 svalue_id 5282 region_model::maybe_cast_1 (tree dst_type, svalue_id sid) 5283 { 5284 svalue *sval = get_svalue (sid); 5285 tree src_type = sval->get_type (); 5286 if (src_type == dst_type) 5287 return sid; 5288 5289 if (POINTER_TYPE_P (dst_type) 5290 || POINTER_TYPE_P (src_type)) 5291 { 5292 /* Pointer to region. */ 5293 if (region_svalue *ptr_sval = sval->dyn_cast_region_svalue ()) 5294 return get_or_create_ptr_svalue (dst_type, ptr_sval->get_pointee ()); 5295 5296 /* Unknown pointer? Get or create a new unknown pointer of the 5297 correct type, preserving the equality between the pointers. */ 5298 if (sval->dyn_cast_unknown_svalue ()) 5299 { 5300 equiv_class &ec = m_constraints->get_equiv_class (sid); 5301 5302 /* Look for an existing pointer of the correct type within the EC. */ 5303 int i; 5304 svalue_id *equiv_sid; 5305 FOR_EACH_VEC_ELT (ec.m_vars, i, equiv_sid) 5306 { 5307 svalue *equiv_val = get_svalue (*equiv_sid); 5308 if (equiv_val->get_type () == dst_type) 5309 return *equiv_sid; 5310 } 5311 5312 /* Otherwise, create a new unknown pointer of the correct type. */ 5313 svalue *unknown_sval = new unknown_svalue (dst_type); 5314 svalue_id new_ptr_sid = add_svalue (unknown_sval); 5315 m_constraints->add_constraint (sid, EQ_EXPR, new_ptr_sid); 5316 return new_ptr_sid; 5317 } 5318 } 5319 5320 /* Attempt to cast constants. */ 5321 if (tree src_cst = sval->maybe_get_constant ()) 5322 { 5323 if (tree dst = build_cast (dst_type, src_cst)) 5324 if (CONSTANT_CLASS_P (dst)) 5325 return get_or_create_constant_svalue (dst); 5326 } 5327 5328 /* Otherwise, return a new unknown value. */ 5329 svalue *unknown_sval = new unknown_svalue (dst_type); 5330 return add_svalue (unknown_sval); 5331 } 5332 5333 /* If the type of SID's underlying value is DST_TYPE, return SID. 5334 Otherwise, attempt to create (or reuse) an svalue representing an access 5335 of SID as a DST_TYPE and return that value's svalue_id. 5336 5337 If the result != SID, then call CTXT's on_cast vfunc (if CTXT is non-NULL), 5338 so that sm-state can be propagated from SID to the result. */ 5339 5340 svalue_id 5341 region_model::maybe_cast (tree dst_type, svalue_id sid, 5342 region_model_context *ctxt) 5343 { 5344 svalue_id result = maybe_cast_1 (dst_type, sid); 5345 if (result != sid) 5346 if (ctxt) 5347 { 5348 /* Notify ctxt about a cast, so any sm-state can be copied. */ 5349 ctxt->on_cast (sid, result); 5350 } 5351 return result; 5352 } 5353 5354 /* Ensure that the region for OBJ_RID has a child region for FIELD; 5355 return the child region's region_id. */ 5356 5357 region_id 5358 region_model::get_field_region (region_id struct_or_union_rid, tree field, 5359 region_model_context *ctxt) 5360 { 5361 struct_or_union_region *sou_reg 5362 = get_region<struct_or_union_region> (struct_or_union_rid); 5363 5364 /* Inherit constness from parent type. */ 5365 const int qual_mask = TYPE_QUAL_CONST; 5366 int sou_quals = TYPE_QUALS (sou_reg->get_type ()) & qual_mask; 5367 tree field_type = TREE_TYPE (field); 5368 tree field_type_with_quals = build_qualified_type (field_type, sou_quals); 5369 5370 // TODO: maybe convert to a vfunc? 5371 if (sou_reg->get_kind () == RK_UNION) 5372 { 5373 /* Union. 5374 Get a view of the union as a whole, with the type of the field. */ 5375 region_id view_rid 5376 = get_or_create_view (struct_or_union_rid, field_type_with_quals, ctxt); 5377 return view_rid; 5378 } 5379 else 5380 { 5381 /* Struct. */ 5382 region_id child_rid 5383 = sou_reg->get_or_create (this, struct_or_union_rid, field, 5384 field_type_with_quals, ctxt); 5385 return child_rid; 5386 } 5387 } 5388 5389 /* Get a region_id for referencing PTR_SID, creating a region if need be, and 5390 potentially generating warnings via CTXT. */ 5391 5392 region_id 5393 region_model::deref_rvalue (svalue_id ptr_sid, region_model_context *ctxt) 5394 { 5395 gcc_assert (!ptr_sid.null_p ()); 5396 svalue *ptr_svalue = get_svalue (ptr_sid); 5397 gcc_assert (ptr_svalue); 5398 5399 switch (ptr_svalue->get_kind ()) 5400 { 5401 case SK_REGION: 5402 { 5403 region_svalue *region_sval = as_a <region_svalue *> (ptr_svalue); 5404 return region_sval->get_pointee (); 5405 } 5406 5407 case SK_CONSTANT: 5408 goto create_symbolic_region; 5409 5410 case SK_POISONED: 5411 { 5412 if (ctxt) 5413 if (tree ptr = get_representative_tree (ptr_sid)) 5414 { 5415 poisoned_svalue *poisoned_sval 5416 = as_a <poisoned_svalue *> (ptr_svalue); 5417 enum poison_kind pkind = poisoned_sval->get_poison_kind (); 5418 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind)); 5419 } 5420 goto create_symbolic_region; 5421 } 5422 5423 case SK_UNKNOWN: 5424 { 5425 create_symbolic_region: 5426 /* We need a symbolic_region to represent this unknown region. 5427 We don't know if it on the heap, stack, or a global, 5428 so use the root region as parent. */ 5429 region_id new_rid 5430 = add_region (new symbolic_region (m_root_rid, NULL_TREE, false)); 5431 5432 /* We need to write the region back into the pointer, 5433 or we'll get a new, different region each time. 5434 We do this by changing the meaning of ptr_sid, replacing 5435 the unknown value with the ptr to the new region. 5436 We replace the meaning of the ID rather than simply writing 5437 to PTR's lvalue since there could be several places sharing 5438 the same unknown ptr value. */ 5439 svalue *ptr_val 5440 = new region_svalue (ptr_svalue->get_type (), new_rid); 5441 replace_svalue (ptr_sid, ptr_val); 5442 5443 return new_rid; 5444 } 5445 5446 case SK_SETJMP: 5447 goto create_symbolic_region; 5448 } 5449 5450 gcc_unreachable (); 5451 } 5452 5453 /* Get a region_id for referencing PTR, creating a region if need be, and 5454 potentially generating warnings via CTXT. */ 5455 5456 region_id 5457 region_model::deref_rvalue (tree ptr, region_model_context *ctxt) 5458 { 5459 svalue_id ptr_sid = get_rvalue (ptr, ctxt); 5460 return deref_rvalue (ptr_sid, ctxt); 5461 } 5462 5463 /* Set the value of the region given by LHS_RID to the value given 5464 by RHS_SID. */ 5465 5466 void 5467 region_model::set_value (region_id lhs_rid, svalue_id rhs_sid, 5468 region_model_context *ctxt) 5469 { 5470 gcc_assert (!lhs_rid.null_p ()); 5471 gcc_assert (!rhs_sid.null_p ()); 5472 get_region (lhs_rid)->set_value (*this, lhs_rid, rhs_sid, ctxt); 5473 } 5474 5475 /* Set the value of the region given by LHS to the value given 5476 by RHS. */ 5477 5478 void 5479 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt) 5480 { 5481 region_id lhs_rid = get_lvalue (lhs, ctxt); 5482 svalue_id rhs_sid = get_rvalue (rhs, ctxt); 5483 gcc_assert (!lhs_rid.null_p ()); 5484 gcc_assert (!rhs_sid.null_p ()); 5485 set_value (lhs_rid, rhs_sid, ctxt); 5486 } 5487 5488 /* Determine what is known about the condition "LHS_SID OP RHS_SID" within 5489 this model. */ 5490 5491 tristate 5492 region_model::eval_condition (svalue_id lhs_sid, 5493 enum tree_code op, 5494 svalue_id rhs_sid) const 5495 { 5496 svalue *lhs = get_svalue (lhs_sid); 5497 svalue *rhs = get_svalue (rhs_sid); 5498 5499 /* For now, make no attempt to capture constraints on floating-point 5500 values. */ 5501 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ())) 5502 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ()))) 5503 return tristate::unknown (); 5504 5505 tristate ts = eval_condition_without_cm (lhs_sid, op, rhs_sid); 5506 5507 if (ts.is_known ()) 5508 return ts; 5509 5510 /* Otherwise, try constraints. */ 5511 return m_constraints->eval_condition (lhs_sid, op, rhs_sid); 5512 } 5513 5514 /* Determine what is known about the condition "LHS_SID OP RHS_SID" within 5515 this model, without resorting to the constraint_manager. 5516 5517 This is exposed so that impl_region_model_context::on_state_leak can 5518 check for equality part-way through region_model::purge_unused_svalues 5519 without risking creating new ECs. */ 5520 5521 tristate 5522 region_model::eval_condition_without_cm (svalue_id lhs_sid, 5523 enum tree_code op, 5524 svalue_id rhs_sid) const 5525 { 5526 svalue *lhs = get_svalue (lhs_sid); 5527 svalue *rhs = get_svalue (rhs_sid); 5528 gcc_assert (lhs); 5529 gcc_assert (rhs); 5530 5531 /* See what we know based on the values. */ 5532 if (lhs && rhs) 5533 { 5534 /* For now, make no attempt to capture constraints on floating-point 5535 values. */ 5536 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ())) 5537 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ()))) 5538 return tristate::unknown (); 5539 5540 if (lhs == rhs) 5541 { 5542 /* If we have the same svalue, then we have equality 5543 (apart from NaN-handling). 5544 TODO: should this definitely be the case for poisoned values? */ 5545 switch (op) 5546 { 5547 case EQ_EXPR: 5548 case GE_EXPR: 5549 case LE_EXPR: 5550 return tristate::TS_TRUE; 5551 5552 case NE_EXPR: 5553 case GT_EXPR: 5554 case LT_EXPR: 5555 return tristate::TS_FALSE; 5556 5557 default: 5558 /* For other ops, use the logic below. */ 5559 break; 5560 } 5561 } 5562 5563 /* If we have a pair of region_svalues, compare them. */ 5564 if (region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ()) 5565 if (region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ()) 5566 { 5567 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr); 5568 if (res.is_known ()) 5569 return res; 5570 /* Otherwise, only known through constraints. */ 5571 } 5572 5573 /* If we have a pair of constants, compare them. */ 5574 if (constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ()) 5575 if (constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ()) 5576 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs); 5577 5578 /* Handle comparison of a region_svalue against zero. */ 5579 if (region_svalue *ptr = lhs->dyn_cast_region_svalue ()) 5580 if (constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ()) 5581 if (zerop (cst_rhs->get_constant ())) 5582 { 5583 /* A region_svalue is a non-NULL pointer, except in certain 5584 special cases (see the comment for region::non_null_p. */ 5585 region *pointee = get_region (ptr->get_pointee ()); 5586 if (pointee->non_null_p (*this)) 5587 { 5588 switch (op) 5589 { 5590 default: 5591 gcc_unreachable (); 5592 5593 case EQ_EXPR: 5594 case GE_EXPR: 5595 case LE_EXPR: 5596 return tristate::TS_FALSE; 5597 5598 case NE_EXPR: 5599 case GT_EXPR: 5600 case LT_EXPR: 5601 return tristate::TS_TRUE; 5602 } 5603 } 5604 } 5605 } 5606 5607 return tristate::TS_UNKNOWN; 5608 } 5609 5610 /* Attempt to add the constraint "LHS OP RHS" to this region_model. 5611 If it is consistent with existing constraints, add it, and return true. 5612 Return false if it contradicts existing constraints. 5613 Use CTXT for reporting any diagnostics associated with the accesses. */ 5614 5615 bool 5616 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs, 5617 region_model_context *ctxt) 5618 { 5619 /* For now, make no attempt to capture constraints on floating-point 5620 values. */ 5621 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs))) 5622 return true; 5623 5624 svalue_id lhs_sid = get_rvalue (lhs, ctxt); 5625 svalue_id rhs_sid = get_rvalue (rhs, ctxt); 5626 5627 tristate t_cond = eval_condition (lhs_sid, op, rhs_sid); 5628 5629 /* If we already have the condition, do nothing. */ 5630 if (t_cond.is_true ()) 5631 return true; 5632 5633 /* Reject a constraint that would contradict existing knowledge, as 5634 unsatisfiable. */ 5635 if (t_cond.is_false ()) 5636 return false; 5637 5638 /* Store the constraint. */ 5639 m_constraints->add_constraint (lhs_sid, op, rhs_sid); 5640 5641 add_any_constraints_from_ssa_def_stmt (lhs, op, rhs, ctxt); 5642 5643 /* If we now know a symbolic_region is non-NULL, clear its 5644 m_possibly_null. */ 5645 if (zerop (rhs) && op == NE_EXPR) 5646 if (region_svalue *ptr = get_svalue (lhs_sid)->dyn_cast_region_svalue ()) 5647 { 5648 region *pointee = get_region (ptr->get_pointee ()); 5649 if (symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ()) 5650 sym_reg->m_possibly_null = false; 5651 } 5652 5653 /* Notify the context, if any. This exists so that the state machines 5654 in a program_state can be notified about the condition, and so can 5655 set sm-state for e.g. unchecked->checked, both for cfg-edges, and 5656 when synthesizing constraints as above. */ 5657 if (ctxt) 5658 ctxt->on_condition (lhs, op, rhs); 5659 5660 return true; 5661 } 5662 5663 /* Subroutine of region_model::add_constraint for handling optimized 5664 && and || conditionals. 5665 5666 If we have an SSA_NAME for a boolean compared against 0, 5667 look at anything implied by the def stmt and call add_constraint 5668 for it (which could recurse). 5669 5670 For example, if we have 5671 _1 = p_6 == 0B; 5672 _2 = p_8 == 0B 5673 _3 = _1 | _2 5674 and add the constraint 5675 (_3 == 0), 5676 then the def stmt for _3 implies that _1 and _2 are both false, 5677 and hence we can add the constraints: 5678 p_6 != 0B 5679 p_8 != 0B. */ 5680 5681 void 5682 region_model::add_any_constraints_from_ssa_def_stmt (tree lhs, 5683 enum tree_code op, 5684 tree rhs, 5685 region_model_context *ctxt) 5686 { 5687 if (TREE_CODE (lhs) != SSA_NAME) 5688 return; 5689 5690 if (!zerop (rhs)) 5691 return; 5692 5693 if (op != NE_EXPR && op != EQ_EXPR) 5694 return; 5695 5696 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs); 5697 if (const gassign *assign = dyn_cast<gassign *> (def_stmt)) 5698 add_any_constraints_from_gassign (op, rhs, assign, ctxt); 5699 else if (gcall *call = dyn_cast<gcall *> (def_stmt)) 5700 add_any_constraints_from_gcall (op, rhs, call, ctxt); 5701 } 5702 5703 /* Add any constraints for an SSA_NAME defined by ASSIGN 5704 where the result OP RHS. */ 5705 5706 void 5707 region_model::add_any_constraints_from_gassign (enum tree_code op, 5708 tree rhs, 5709 const gassign *assign, 5710 region_model_context *ctxt) 5711 { 5712 /* We have either 5713 - "LHS != false" (i.e. LHS is true), or 5714 - "LHS == false" (i.e. LHS is false). */ 5715 bool is_true = op == NE_EXPR; 5716 5717 enum tree_code rhs_code = gimple_assign_rhs_code (assign); 5718 5719 switch (rhs_code) 5720 { 5721 default: 5722 break; 5723 5724 case NOP_EXPR: 5725 { 5726 add_constraint (gimple_assign_rhs1 (assign), op, rhs, ctxt); 5727 } 5728 break; 5729 5730 case BIT_AND_EXPR: 5731 { 5732 if (is_true) 5733 { 5734 /* ...and "LHS == (rhs1 & rhs2) i.e. "(rhs1 & rhs2)" is true 5735 then both rhs1 and rhs2 must be true. */ 5736 tree rhs1 = gimple_assign_rhs1 (assign); 5737 tree rhs2 = gimple_assign_rhs2 (assign); 5738 add_constraint (rhs1, NE_EXPR, boolean_false_node, ctxt); 5739 add_constraint (rhs2, NE_EXPR, boolean_false_node, ctxt); 5740 } 5741 } 5742 break; 5743 5744 case BIT_IOR_EXPR: 5745 { 5746 if (!is_true) 5747 { 5748 /* ...and "LHS == (rhs1 | rhs2) 5749 i.e. "(rhs1 | rhs2)" is false 5750 then both rhs1 and rhs2 must be false. */ 5751 tree rhs1 = gimple_assign_rhs1 (assign); 5752 tree rhs2 = gimple_assign_rhs2 (assign); 5753 add_constraint (rhs1, EQ_EXPR, boolean_false_node, ctxt); 5754 add_constraint (rhs2, EQ_EXPR, boolean_false_node, ctxt); 5755 } 5756 } 5757 break; 5758 5759 case EQ_EXPR: 5760 case NE_EXPR: 5761 { 5762 /* ...and "LHS == (rhs1 OP rhs2)" 5763 then rhs1 OP rhs2 must have the same logical value as LHS. */ 5764 tree rhs1 = gimple_assign_rhs1 (assign); 5765 tree rhs2 = gimple_assign_rhs2 (assign); 5766 if (!is_true) 5767 rhs_code 5768 = invert_tree_comparison (rhs_code, false /* honor_nans */); 5769 add_constraint (rhs1, rhs_code, rhs2, ctxt); 5770 } 5771 break; 5772 } 5773 } 5774 5775 /* Add any constraints for an SSA_NAME defined by CALL 5776 where the result OP RHS. */ 5777 5778 void 5779 region_model::add_any_constraints_from_gcall (enum tree_code op, 5780 tree rhs, 5781 const gcall *call, 5782 region_model_context *ctxt) 5783 { 5784 if (gimple_call_builtin_p (call, BUILT_IN_EXPECT) 5785 || gimple_call_builtin_p (call, BUILT_IN_EXPECT_WITH_PROBABILITY) 5786 || gimple_call_internal_p (call, IFN_BUILTIN_EXPECT)) 5787 { 5788 /* __builtin_expect's return value is its initial argument. */ 5789 add_constraint (gimple_call_arg (call, 0), op, rhs, ctxt); 5790 } 5791 } 5792 5793 /* Determine what is known about the condition "LHS OP RHS" within 5794 this model. 5795 Use CTXT for reporting any diagnostics associated with the accesses. */ 5796 5797 tristate 5798 region_model::eval_condition (tree lhs, 5799 enum tree_code op, 5800 tree rhs, 5801 region_model_context *ctxt) 5802 { 5803 /* For now, make no attempt to model constraints on floating-point 5804 values. */ 5805 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs))) 5806 return tristate::unknown (); 5807 5808 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt)); 5809 } 5810 5811 /* If SID is a constant value, return the underlying tree constant. 5812 Otherwise, return NULL_TREE. */ 5813 5814 tree 5815 region_model::maybe_get_constant (svalue_id sid) const 5816 { 5817 gcc_assert (!sid.null_p ()); 5818 svalue *sval = get_svalue (sid); 5819 return sval->maybe_get_constant (); 5820 } 5821 5822 /* Create a new child region of the heap (creating the heap region if 5823 necessary). 5824 Return the region_id of the new child region. */ 5825 5826 region_id 5827 region_model::add_new_malloc_region () 5828 { 5829 region_id heap_rid 5830 = get_root_region ()->ensure_heap_region (this); 5831 return add_region (new symbolic_region (heap_rid, NULL_TREE, true)); 5832 } 5833 5834 /* Attempt to return a tree that represents SID, or return NULL_TREE. */ 5835 5836 tree 5837 region_model::get_representative_tree (svalue_id sid) const 5838 { 5839 if (sid.null_p ()) 5840 return NULL_TREE; 5841 5842 /* Find the first region that stores the value (e.g. a local) and 5843 generate a representative tree for it. */ 5844 unsigned i; 5845 region *region; 5846 FOR_EACH_VEC_ELT (m_regions, i, region) 5847 if (sid == region->get_value_direct ()) 5848 { 5849 path_var pv = get_representative_path_var (region_id::from_int (i)); 5850 if (pv.m_tree) 5851 return pv.m_tree; 5852 } 5853 5854 /* Handle string literals and various other pointers. */ 5855 svalue *sval = get_svalue (sid); 5856 if (region_svalue *ptr_sval = sval->dyn_cast_region_svalue ()) 5857 { 5858 region_id rid = ptr_sval->get_pointee (); 5859 path_var pv = get_representative_path_var (rid); 5860 if (pv.m_tree) 5861 return build1 (ADDR_EXPR, 5862 TREE_TYPE (sval->get_type ()), 5863 pv.m_tree); 5864 } 5865 5866 return maybe_get_constant (sid); 5867 } 5868 5869 /* Attempt to return a path_var that represents the region, or return 5870 the NULL path_var. 5871 For example, a region for a field of a local would be a path_var 5872 wrapping a COMPONENT_REF. */ 5873 5874 path_var 5875 region_model::get_representative_path_var (region_id rid) const 5876 { 5877 region *reg = get_region (rid); 5878 region *parent_reg = get_region (reg->get_parent ()); 5879 region_id stack_rid = get_stack_region_id (); 5880 if (!stack_rid.null_p ()) 5881 if (parent_reg && parent_reg->get_parent () == stack_rid) 5882 { 5883 frame_region *parent_frame = (frame_region *)parent_reg; 5884 tree t = parent_frame->get_tree_for_child_region (rid); 5885 return path_var (t, parent_frame->get_depth ()); 5886 } 5887 if (reg->get_parent () == get_globals_region_id ()) 5888 { 5889 map_region *globals = get_root_region ()->get_globals_region (this); 5890 if (globals) 5891 return path_var (globals->get_tree_for_child_region (rid), -1); 5892 } 5893 5894 /* Handle e.g. fields of a local by recursing. */ 5895 region_id parent_rid = reg->get_parent (); 5896 if (parent_reg) 5897 { 5898 if (reg->is_view_p ()) 5899 { 5900 path_var parent_pv = get_representative_path_var (parent_rid); 5901 if (parent_pv.m_tree && reg->get_type ()) 5902 return path_var (build1 (NOP_EXPR, 5903 reg->get_type (), 5904 parent_pv.m_tree), 5905 parent_pv.m_stack_depth); 5906 } 5907 5908 if (parent_reg->get_kind () == RK_STRUCT) 5909 { 5910 map_region *parent_map_region = (map_region *)parent_reg; 5911 /* This can fail if we have a view, rather than a field. */ 5912 if (tree child_key 5913 = parent_map_region->get_tree_for_child_region (rid)) 5914 { 5915 path_var parent_pv = get_representative_path_var (parent_rid); 5916 if (parent_pv.m_tree && TREE_CODE (child_key) == FIELD_DECL) 5917 return path_var (build3 (COMPONENT_REF, 5918 TREE_TYPE (child_key), 5919 parent_pv.m_tree, child_key, 5920 NULL_TREE), 5921 parent_pv.m_stack_depth); 5922 } 5923 } 5924 5925 /* Handle elements within an array. */ 5926 if (array_region *array_reg = parent_reg->dyn_cast_array_region ()) 5927 { 5928 array_region::key_t key; 5929 if (array_reg->get_key_for_child_region (rid, &key)) 5930 { 5931 path_var parent_pv = get_representative_path_var (parent_rid); 5932 if (parent_pv.m_tree && reg->get_type ()) 5933 { 5934 tree index = array_reg->constant_from_key (key); 5935 return path_var (build4 (ARRAY_REF, 5936 reg->get_type (), 5937 parent_pv.m_tree, index, 5938 NULL_TREE, NULL_TREE), 5939 parent_pv.m_stack_depth); 5940 } 5941 } 5942 } 5943 } 5944 5945 /* Handle string literals. */ 5946 svalue_id sid = reg->get_value_direct (); 5947 if (svalue *sval = get_svalue (sid)) 5948 if (tree cst = sval->maybe_get_constant ()) 5949 if (TREE_CODE (cst) == STRING_CST) 5950 return path_var (cst, 0); 5951 5952 return path_var (NULL_TREE, 0); 5953 } 5954 5955 /* Locate all regions that directly have value SID and append representative 5956 path_var instances for them into *OUT. */ 5957 5958 void 5959 region_model::get_path_vars_for_svalue (svalue_id sid, vec<path_var> *out) const 5960 { 5961 unsigned i; 5962 region *region; 5963 FOR_EACH_VEC_ELT (m_regions, i, region) 5964 if (sid == region->get_value_direct ()) 5965 { 5966 path_var pv = get_representative_path_var (region_id::from_int (i)); 5967 if (pv.m_tree) 5968 out->safe_push (pv); 5969 } 5970 } 5971 5972 /* Set DST_RID value to be a new unknown value of type TYPE. */ 5973 5974 svalue_id 5975 region_model::set_to_new_unknown_value (region_id dst_rid, tree type, 5976 region_model_context *ctxt) 5977 { 5978 gcc_assert (!dst_rid.null_p ()); 5979 svalue_id new_sid = add_svalue (new unknown_svalue (type)); 5980 set_value (dst_rid, new_sid, ctxt); 5981 5982 // TODO: presumably purge all child regions too (but do this in set_value?) 5983 5984 return new_sid; 5985 } 5986 5987 /* Update this model for any phis in SNODE, assuming we came from 5988 LAST_CFG_SUPEREDGE. */ 5989 5990 void 5991 region_model::update_for_phis (const supernode *snode, 5992 const cfg_superedge *last_cfg_superedge, 5993 region_model_context *ctxt) 5994 { 5995 gcc_assert (last_cfg_superedge); 5996 5997 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis (); 5998 !gsi_end_p (gpi); gsi_next (&gpi)) 5999 { 6000 gphi *phi = gpi.phi (); 6001 6002 tree src = last_cfg_superedge->get_phi_arg (phi); 6003 tree lhs = gimple_phi_result (phi); 6004 6005 /* Update next_state based on phi. */ 6006 bool is_back_edge = last_cfg_superedge->back_edge_p (); 6007 handle_phi (phi, lhs, src, is_back_edge, ctxt); 6008 } 6009 } 6010 6011 /* Attempt to update this model for taking EDGE (where the last statement 6012 was LAST_STMT), returning true if the edge can be taken, false 6013 otherwise. 6014 6015 For CFG superedges where LAST_STMT is a conditional or a switch 6016 statement, attempt to add the relevant conditions for EDGE to this 6017 model, returning true if they are feasible, or false if they are 6018 impossible. 6019 6020 For call superedges, push frame information and store arguments 6021 into parameters. 6022 6023 For return superedges, pop frame information and store return 6024 values into any lhs. 6025 6026 Rejection of call/return superedges happens elsewhere, in 6027 program_point::on_edge (i.e. based on program point, rather 6028 than program state). */ 6029 6030 bool 6031 region_model::maybe_update_for_edge (const superedge &edge, 6032 const gimple *last_stmt, 6033 region_model_context *ctxt) 6034 { 6035 /* Handle frame updates for interprocedural edges. */ 6036 switch (edge.m_kind) 6037 { 6038 default: 6039 break; 6040 6041 case SUPEREDGE_CALL: 6042 { 6043 const call_superedge *call_edge = as_a <const call_superedge *> (&edge); 6044 update_for_call_superedge (*call_edge, ctxt); 6045 } 6046 break; 6047 6048 case SUPEREDGE_RETURN: 6049 { 6050 const return_superedge *return_edge 6051 = as_a <const return_superedge *> (&edge); 6052 update_for_return_superedge (*return_edge, ctxt); 6053 } 6054 break; 6055 6056 case SUPEREDGE_INTRAPROCEDURAL_CALL: 6057 { 6058 const callgraph_superedge *cg_sedge 6059 = as_a <const callgraph_superedge *> (&edge); 6060 update_for_call_summary (*cg_sedge, ctxt); 6061 } 6062 break; 6063 } 6064 6065 if (last_stmt == NULL) 6066 return true; 6067 6068 /* Apply any constraints for conditionals/switch statements. */ 6069 6070 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt)) 6071 { 6072 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge); 6073 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt); 6074 } 6075 6076 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt)) 6077 { 6078 const switch_cfg_superedge *switch_sedge 6079 = as_a <const switch_cfg_superedge *> (&edge); 6080 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt, ctxt); 6081 } 6082 6083 return true; 6084 } 6085 6086 /* Push a new frame_region on to the stack region. 6087 Populate the frame_region with child regions for the function call's 6088 parameters, using values from the arguments at the callsite in the 6089 caller's frame. */ 6090 6091 void 6092 region_model::update_for_call_superedge (const call_superedge &call_edge, 6093 region_model_context *ctxt) 6094 { 6095 /* Build a vec of argument svalue_id, using the current top 6096 frame for resolving tree expressions. */ 6097 const gcall *call_stmt = call_edge.get_call_stmt (); 6098 auto_vec<svalue_id> arg_sids (gimple_call_num_args (call_stmt)); 6099 6100 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++) 6101 { 6102 tree arg = gimple_call_arg (call_stmt, i); 6103 arg_sids.quick_push (get_rvalue (arg, ctxt)); 6104 } 6105 6106 push_frame (call_edge.get_callee_function (), &arg_sids, ctxt); 6107 } 6108 6109 /* Pop the top-most frame_region from the stack, and copy the return 6110 region's values (if any) into the region for the lvalue of the LHS of 6111 the call (if any). */ 6112 6113 void 6114 region_model::update_for_return_superedge (const return_superedge &return_edge, 6115 region_model_context *ctxt) 6116 { 6117 region_id stack_rid = get_stack_region_id (); 6118 stack_region *stack = get_region <stack_region> (stack_rid); 6119 6120 /* Get the region for the result of the call, within the caller frame. */ 6121 region_id result_dst_rid; 6122 const gcall *call_stmt = return_edge.get_call_stmt (); 6123 tree lhs = gimple_call_lhs (call_stmt); 6124 if (lhs) 6125 { 6126 /* Normally we access the top-level frame, which is: 6127 path_var (expr, stack->get_num_frames () - 1) 6128 whereas here we need the caller frame, hence "- 2" here. */ 6129 gcc_assert (stack->get_num_frames () >= 2); 6130 result_dst_rid = get_lvalue (path_var (lhs, stack->get_num_frames () - 2), 6131 ctxt); 6132 } 6133 6134 purge_stats stats; 6135 stack->pop_frame (this, result_dst_rid, true, &stats, ctxt); 6136 // TODO: do something with the stats? 6137 6138 if (!lhs) 6139 { 6140 /* This could be a leak; try purging again, but this time, 6141 don't special-case the result sids (as was done in pop_frame). */ 6142 purge_unused_svalues (&stats, ctxt); 6143 } 6144 } 6145 6146 /* Update this region_model with a summary of the effect of calling 6147 and returning from CG_SEDGE. 6148 6149 TODO: Currently this is extremely simplistic: we merely set the 6150 return value to "unknown". A proper implementation would e.g. update 6151 sm-state, and presumably be reworked to support multiple outcomes. */ 6152 6153 void 6154 region_model::update_for_call_summary (const callgraph_superedge &cg_sedge, 6155 region_model_context *ctxt) 6156 { 6157 /* For now, set any return value to "unknown". */ 6158 const gcall *call_stmt = cg_sedge.get_call_stmt (); 6159 tree lhs = gimple_call_lhs (call_stmt); 6160 if (lhs) 6161 set_to_new_unknown_value (get_lvalue (lhs, ctxt), TREE_TYPE (lhs), ctxt); 6162 6163 // TODO: actually implement some kind of summary here 6164 } 6165 6166 /* Given a true or false edge guarded by conditional statement COND_STMT, 6167 determine appropriate constraints for the edge to be taken. 6168 6169 If they are feasible, add the constraints and return true. 6170 6171 Return false if the constraints contradict existing knowledge 6172 (and so the edge should not be taken). */ 6173 6174 bool 6175 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge, 6176 const gcond *cond_stmt, 6177 region_model_context *ctxt) 6178 { 6179 ::edge cfg_edge = sedge.get_cfg_edge (); 6180 gcc_assert (cfg_edge != NULL); 6181 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)); 6182 6183 enum tree_code op = gimple_cond_code (cond_stmt); 6184 tree lhs = gimple_cond_lhs (cond_stmt); 6185 tree rhs = gimple_cond_rhs (cond_stmt); 6186 if (cfg_edge->flags & EDGE_FALSE_VALUE) 6187 op = invert_tree_comparison (op, false /* honor_nans */); 6188 return add_constraint (lhs, op, rhs, ctxt); 6189 } 6190 6191 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints 6192 for the edge to be taken. 6193 6194 If they are feasible, add the constraints and return true. 6195 6196 Return false if the constraints contradict existing knowledge 6197 (and so the edge should not be taken). */ 6198 6199 bool 6200 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge, 6201 const gswitch *switch_stmt, 6202 region_model_context *ctxt) 6203 { 6204 tree index = gimple_switch_index (switch_stmt); 6205 tree case_label = edge.get_case_label (); 6206 gcc_assert (TREE_CODE (case_label) == CASE_LABEL_EXPR); 6207 tree lower_bound = CASE_LOW (case_label); 6208 tree upper_bound = CASE_HIGH (case_label); 6209 if (lower_bound) 6210 { 6211 if (upper_bound) 6212 { 6213 /* Range. */ 6214 if (!add_constraint (index, GE_EXPR, lower_bound, ctxt)) 6215 return false; 6216 return add_constraint (index, LE_EXPR, upper_bound, ctxt); 6217 } 6218 else 6219 /* Single-value. */ 6220 return add_constraint (index, EQ_EXPR, lower_bound, ctxt); 6221 } 6222 else 6223 { 6224 /* The default case. 6225 Add exclusions based on the other cases. */ 6226 for (unsigned other_idx = 1; 6227 other_idx < gimple_switch_num_labels (switch_stmt); 6228 other_idx++) 6229 { 6230 tree other_label = gimple_switch_label (switch_stmt, 6231 other_idx); 6232 tree other_lower_bound = CASE_LOW (other_label); 6233 tree other_upper_bound = CASE_HIGH (other_label); 6234 gcc_assert (other_lower_bound); 6235 if (other_upper_bound) 6236 { 6237 /* Exclude this range-valued case. 6238 For now, we just exclude the boundary values. 6239 TODO: exclude the values within the region. */ 6240 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt)) 6241 return false; 6242 if (!add_constraint (index, NE_EXPR, other_upper_bound, ctxt)) 6243 return false; 6244 } 6245 else 6246 /* Exclude this single-valued case. */ 6247 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt)) 6248 return false; 6249 } 6250 return true; 6251 } 6252 } 6253 6254 /* Get the root_region within this model (guaranteed to be non-null). */ 6255 6256 root_region * 6257 region_model::get_root_region () const 6258 { 6259 return get_region<root_region> (m_root_rid); 6260 } 6261 6262 /* Get the region_id of this model's stack region (if any). */ 6263 6264 region_id 6265 region_model::get_stack_region_id () const 6266 { 6267 return get_root_region ()->get_stack_region_id (); 6268 } 6269 6270 /* Create a new frame_region for a call to FUN and push it onto 6271 the stack. 6272 6273 If ARG_SIDS is non-NULL, use it to populate the parameters 6274 in the new frame. 6275 Otherwise, populate them with unknown values. 6276 6277 Return the region_id of the new frame_region. */ 6278 6279 region_id 6280 region_model::push_frame (function *fun, vec<svalue_id> *arg_sids, 6281 region_model_context *ctxt) 6282 { 6283 return get_root_region ()->push_frame (this, fun, arg_sids, ctxt); 6284 } 6285 6286 /* Get the region_id of the top-most frame in this region_model's stack, 6287 if any. */ 6288 6289 region_id 6290 region_model::get_current_frame_id () const 6291 { 6292 return get_root_region ()->get_current_frame_id (*this); 6293 } 6294 6295 /* Get the function of the top-most frame in this region_model's stack. 6296 There must be such a frame. */ 6297 6298 function * 6299 region_model::get_current_function () const 6300 { 6301 region_id frame_id = get_current_frame_id (); 6302 frame_region *frame = get_region<frame_region> (frame_id); 6303 return frame->get_function (); 6304 } 6305 6306 /* Pop the topmost frame_region from this region_model's stack; 6307 see the comment for stack_region::pop_frame. */ 6308 6309 void 6310 region_model::pop_frame (region_id result_dst_rid, 6311 bool purge, purge_stats *out, 6312 region_model_context *ctxt) 6313 { 6314 get_root_region ()->pop_frame (this, result_dst_rid, purge, out, ctxt); 6315 } 6316 6317 /* Get the number of frames in this region_model's stack. */ 6318 6319 int 6320 region_model::get_stack_depth () const 6321 { 6322 stack_region *stack = get_root_region ()->get_stack_region (this); 6323 if (stack) 6324 return stack->get_num_frames (); 6325 else 6326 return 0; 6327 } 6328 6329 /* Get the function * at DEPTH within the call stack. */ 6330 6331 function * 6332 region_model::get_function_at_depth (unsigned depth) const 6333 { 6334 stack_region *stack = get_root_region ()->get_stack_region (this); 6335 gcc_assert (stack); 6336 region_id frame_rid = stack->get_frame_rid (depth); 6337 frame_region *frame = get_region <frame_region> (frame_rid); 6338 return frame->get_function (); 6339 } 6340 6341 /* Get the region_id of this model's globals region (if any). */ 6342 6343 region_id 6344 region_model::get_globals_region_id () const 6345 { 6346 return get_root_region ()->get_globals_region_id (); 6347 } 6348 6349 /* Add SVAL to this model, taking ownership, and returning its new 6350 svalue_id. */ 6351 6352 svalue_id 6353 region_model::add_svalue (svalue *sval) 6354 { 6355 gcc_assert (sval); 6356 m_svalues.safe_push (sval); 6357 return svalue_id::from_int (m_svalues.length () - 1); 6358 } 6359 6360 /* Change the meaning of SID to be NEW_SVAL 6361 (e.g. when deferencing an unknown pointer, the pointer 6362 becomes a pointer to a symbolic region, so that all users 6363 of the former unknown pointer are now effectively pointing 6364 at the same region). */ 6365 6366 void 6367 region_model::replace_svalue (svalue_id sid, svalue *new_sval) 6368 { 6369 gcc_assert (!sid.null_p ()); 6370 int idx = sid.as_int (); 6371 6372 gcc_assert (m_svalues[idx]); 6373 gcc_assert (m_svalues[idx]->get_type () == new_sval->get_type ()); 6374 delete m_svalues[idx]; 6375 6376 m_svalues[idx] = new_sval; 6377 } 6378 6379 /* Add region R to this model, taking ownership, and returning its new 6380 region_id. */ 6381 6382 region_id 6383 region_model::add_region (region *r) 6384 { 6385 gcc_assert (r); 6386 m_regions.safe_push (r); 6387 return region_id::from_int (m_regions.length () - 1); 6388 } 6389 6390 /* Return the svalue with id SVAL_ID, or NULL for a null id. */ 6391 6392 svalue * 6393 region_model::get_svalue (svalue_id sval_id) const 6394 { 6395 if (sval_id.null_p ()) 6396 return NULL; 6397 return m_svalues[sval_id.as_int ()]; 6398 } 6399 6400 /* Return the region with id RID, or NULL for a null id. */ 6401 6402 region * 6403 region_model::get_region (region_id rid) const 6404 { 6405 if (rid.null_p ()) 6406 return NULL; 6407 return m_regions[rid.as_int ()]; 6408 } 6409 6410 /* Make a region of an appropriate subclass for TYPE, 6411 with parent PARENT_RID, or return NULL for types we don't yet know 6412 how to handle. */ 6413 6414 static region * 6415 make_region_for_type (region_id parent_rid, tree type) 6416 { 6417 gcc_assert (TYPE_P (type)); 6418 6419 if (INTEGRAL_TYPE_P (type) 6420 || SCALAR_FLOAT_TYPE_P (type) 6421 || POINTER_TYPE_P (type) 6422 || TREE_CODE (type) == COMPLEX_TYPE 6423 || TREE_CODE (type) == VECTOR_TYPE) 6424 return new primitive_region (parent_rid, type); 6425 6426 if (TREE_CODE (type) == RECORD_TYPE) 6427 return new struct_region (parent_rid, type); 6428 6429 if (TREE_CODE (type) == ARRAY_TYPE) 6430 return new array_region (parent_rid, type); 6431 6432 if (TREE_CODE (type) == UNION_TYPE) 6433 return new union_region (parent_rid, type); 6434 6435 if (FUNC_OR_METHOD_TYPE_P (type)) 6436 return new function_region (parent_rid, type); 6437 6438 /* If we have a void *, make a new symbolic region. */ 6439 if (VOID_TYPE_P (type)) 6440 return new symbolic_region (parent_rid, type, false); 6441 6442 return NULL; 6443 } 6444 6445 /* Add a region with type TYPE and parent PARENT_RID. */ 6446 6447 region_id 6448 region_model::add_region_for_type (region_id parent_rid, tree type, 6449 region_model_context *ctxt) 6450 { 6451 if (type) 6452 { 6453 gcc_assert (TYPE_P (type)); 6454 6455 if (region *new_region = make_region_for_type (parent_rid, type)) 6456 return add_region (new_region); 6457 } 6458 6459 /* If we can't handle TYPE, return a placeholder region, and stop 6460 exploring this path. */ 6461 return make_region_for_unexpected_tree_code (ctxt, type, 6462 dump_location_t ()); 6463 } 6464 6465 /* Helper class for region_model::purge_unused_svalues. */ 6466 6467 class restrict_to_used_svalues : public purge_criteria 6468 { 6469 public: 6470 restrict_to_used_svalues (const auto_sbitmap &used) : m_used (used) {} 6471 6472 bool should_purge_p (svalue_id sid) const FINAL OVERRIDE 6473 { 6474 gcc_assert (!sid.null_p ()); 6475 return !bitmap_bit_p (m_used, sid.as_int ()); 6476 } 6477 6478 private: 6479 const auto_sbitmap &m_used; 6480 }; 6481 6482 /* Remove unused svalues from this model, accumulating stats into STATS. 6483 Unused svalues are deleted. Doing so could reorder the svalues, and 6484 thus change the meaning of svalue_ids. 6485 6486 If CTXT is non-NULL, then it is notified about svalue_id remappings, 6487 and about svalue_ids that are about to be deleted. This allows e.g. 6488 for warning about resource leaks, for the case where the svalue 6489 represents a resource handle in the user code (e.g. a FILE * or a malloc 6490 buffer). 6491 6492 Amongst other things, removing unused svalues is important for ensuring 6493 that the analysis of loops terminates. Otherwise, we could generate a 6494 succession of models with unreferenced "unknown" values, where the 6495 number of redundant unknown values could grow without bounds, and each 6496 such model would be treated as distinct. 6497 6498 If KNOWN_USED_SIDS is non-NULL, treat *KNOWN_USED_SIDS as used (this is for 6499 handling values being returned from functions as their frame is popped, 6500 since otherwise we'd have to simultaneously determine both the rvalue 6501 of the return expr in the callee frame and the lvalue for the gcall's 6502 assignment in the caller frame, and it seems cleaner to express all 6503 lvalue and rvalue lookups implicitly relative to a "current" frame). 6504 The svalue_ids in *KNOWN_USED_SIDS are not remapped and hence this 6505 call makes it invalid. */ 6506 6507 void 6508 region_model::purge_unused_svalues (purge_stats *stats, 6509 region_model_context *ctxt, 6510 svalue_id_set *known_used_sids) 6511 { 6512 // TODO: might want to avoid a vfunc call just to do logging here: 6513 logger *logger = ctxt ? ctxt->get_logger () : NULL; 6514 6515 LOG_SCOPE (logger); 6516 6517 auto_sbitmap used (m_svalues.length ()); 6518 bitmap_clear (used); 6519 6520 if (known_used_sids) 6521 { 6522 /* We can't use an sbitmap for known_used_sids as the number of 6523 svalues could have grown since it was created. */ 6524 for (unsigned i = 0; i < get_num_svalues (); i++) 6525 if (known_used_sids->svalue_p (svalue_id::from_int (i))) 6526 bitmap_set_bit (used, i); 6527 } 6528 6529 /* Walk the regions, marking sids that are used. */ 6530 unsigned i; 6531 region *r; 6532 FOR_EACH_VEC_ELT (m_regions, i, r) 6533 { 6534 svalue_id sid = r->get_value_direct (); 6535 if (!sid.null_p ()) 6536 bitmap_set_bit (used, sid.as_int ()); 6537 } 6538 6539 /* Now purge any constraints involving svalues we don't care about. */ 6540 restrict_to_used_svalues criterion (used); 6541 m_constraints->purge (criterion, stats); 6542 6543 /* Mark any sids that are in constraints that survived. */ 6544 { 6545 equiv_class *ec; 6546 FOR_EACH_VEC_ELT (m_constraints->m_equiv_classes, i, ec) 6547 { 6548 int j; 6549 svalue_id *sid; 6550 FOR_EACH_VEC_ELT (ec->m_vars, j, sid) 6551 { 6552 gcc_assert (!sid->null_p ()); 6553 bitmap_set_bit (used, sid->as_int ()); 6554 } 6555 } 6556 } 6557 6558 /* Build a mapping from old-sid to new-sid so that we can preserve 6559 order of the used IDs and move all redundant ones to the end. 6560 Iterate though svalue IDs, adding used ones to the front of 6561 the new list, and unused ones to the back. */ 6562 svalue_id_map map (m_svalues.length ()); 6563 int next_used_new_sid = 0; 6564 int after_next_unused_new_sid = m_svalues.length (); 6565 for (unsigned i = 0; i < m_svalues.length (); i++) 6566 { 6567 svalue_id src (svalue_id::from_int (i)); 6568 if (bitmap_bit_p (used, i)) 6569 { 6570 if (logger) 6571 logger->log ("sv%i is used", i); 6572 map.put (src, svalue_id::from_int (next_used_new_sid++)); 6573 } 6574 else 6575 { 6576 if (logger) 6577 logger->log ("sv%i is unused", i); 6578 map.put (src, svalue_id::from_int (--after_next_unused_new_sid)); 6579 } 6580 } 6581 /* The two insertion points should have met. */ 6582 gcc_assert (next_used_new_sid == after_next_unused_new_sid); 6583 6584 /* Now walk the regions and the constraints, remapping sids, 6585 so that all the redundant svalues are at the end. */ 6586 remap_svalue_ids (map); 6587 6588 if (logger) 6589 { 6590 logger->start_log_line (); 6591 logger->log_partial ("map: "); 6592 map.dump_to_pp (logger->get_printer ()); 6593 logger->end_log_line (); 6594 } 6595 6596 /* Notify any client about the remapping and pending deletion. 6597 Potentially this could trigger leak warnings. */ 6598 if (ctxt) 6599 { 6600 ctxt->remap_svalue_ids (map); 6601 int num_client_items_purged 6602 = ctxt->on_svalue_purge (svalue_id::from_int (next_used_new_sid), map); 6603 if (stats) 6604 stats->m_num_client_items += num_client_items_purged; 6605 } 6606 6607 /* Drop the redundant svalues from the end of the vector. */ 6608 while ((signed)m_svalues.length () > next_used_new_sid) 6609 { 6610 if (logger) 6611 { 6612 svalue_id victim = svalue_id::from_int (m_svalues.length () - 1); 6613 logger->log ("deleting sv%i (was sv%i)", 6614 victim.as_int (), 6615 map.get_src_for_dst (victim).as_int ()); 6616 } 6617 delete m_svalues.pop (); 6618 if (stats) 6619 stats->m_num_svalues++; 6620 } 6621 6622 validate (); 6623 } 6624 6625 /* Renumber the svalues within this model according to MAP. */ 6626 6627 void 6628 region_model::remap_svalue_ids (const svalue_id_map &map) 6629 { 6630 /* Update IDs within regions. */ 6631 unsigned i; 6632 region *r; 6633 FOR_EACH_VEC_ELT (m_regions, i, r) 6634 r->remap_svalue_ids (map); 6635 6636 /* Update IDs within ECs within constraints. */ 6637 m_constraints->remap_svalue_ids (map); 6638 6639 /* Build a reordered svalues vector. */ 6640 auto_vec<svalue *> new_svalues (m_svalues.length ()); 6641 for (unsigned i = 0; i < m_svalues.length (); i++) 6642 { 6643 svalue_id dst (svalue_id::from_int (i)); 6644 svalue_id src = map.get_src_for_dst (dst); 6645 new_svalues.quick_push (get_svalue (src)); 6646 } 6647 6648 /* Copy over the reordered vec to m_svalues. */ 6649 m_svalues.truncate (0); 6650 gcc_assert (m_svalues.space (new_svalues.length ())); 6651 svalue *sval; 6652 FOR_EACH_VEC_ELT (new_svalues, i, sval) 6653 m_svalues.quick_push (sval); 6654 } 6655 6656 /* Renumber the regions within this model according to MAP. */ 6657 6658 void 6659 region_model::remap_region_ids (const region_id_map &map) 6660 { 6661 /* Update IDs within regions. */ 6662 unsigned i; 6663 region *r; 6664 FOR_EACH_VEC_ELT (m_regions, i, r) 6665 r->remap_region_ids (map); 6666 6667 /* Update IDs within svalues. */ 6668 svalue *sval; 6669 FOR_EACH_VEC_ELT (m_svalues, i, sval) 6670 sval->remap_region_ids (map); 6671 6672 /* Build a reordered regions vector. */ 6673 auto_vec<region *> new_regions (m_regions.length ()); 6674 for (unsigned i = 0; i < m_regions.length (); i++) 6675 { 6676 region_id dst (region_id::from_int (i)); 6677 region_id src = map.get_src_for_dst (dst); 6678 new_regions.quick_push (get_region (src)); 6679 } 6680 6681 /* Copy over the reordered vec to m_regions. */ 6682 m_regions.truncate (0); 6683 gcc_assert (m_regions.space (new_regions.length ())); 6684 FOR_EACH_VEC_ELT (new_regions, i, r) 6685 m_regions.quick_push (r); 6686 } 6687 6688 /* Delete all regions within SET_TO_PURGE, remapping region IDs for 6689 other regions. It's required that there are no uses of the 6690 regions within the set (or the region IDs will become invalid). 6691 6692 Accumulate stats to STATS. */ 6693 6694 void 6695 region_model::purge_regions (const region_id_set &set_to_purge, 6696 purge_stats *stats, 6697 logger *) 6698 { 6699 /* Build a mapping from old-rid to new-rid so that we can preserve 6700 order of the used IDs and move all redundant ones to the end. 6701 Iterate though region IDs, adding used ones to the front of 6702 the new list, and unused ones to the back. */ 6703 region_id_map map (m_regions.length ()); 6704 int next_used_new_rid = 0; 6705 int after_next_unused_new_rid = m_regions.length (); 6706 for (unsigned i = 0; i < m_regions.length (); i++) 6707 { 6708 region_id src (region_id::from_int (i)); 6709 if (set_to_purge.region_p (src)) 6710 map.put (src, region_id::from_int (--after_next_unused_new_rid)); 6711 else 6712 map.put (src, region_id::from_int (next_used_new_rid++)); 6713 } 6714 /* The two insertion points should have met. */ 6715 gcc_assert (next_used_new_rid == after_next_unused_new_rid); 6716 6717 /* Now walk the regions and svalues, remapping rids, 6718 so that all the redundant regions are at the end. */ 6719 remap_region_ids (map); 6720 6721 /* Drop the redundant regions from the end of the vector. */ 6722 while ((signed)m_regions.length () > next_used_new_rid) 6723 { 6724 delete m_regions.pop (); 6725 if (stats) 6726 stats->m_num_regions++; 6727 } 6728 } 6729 6730 /* Populate *OUT with RID and all of its descendents. 6731 If EXCLUDE_RID is non-null, then don't add it or its descendents. */ 6732 6733 void 6734 region_model::get_descendents (region_id rid, region_id_set *out, 6735 region_id exclude_rid) const 6736 { 6737 out->add_region (rid); 6738 6739 bool changed = true; 6740 while (changed) 6741 { 6742 changed = false; 6743 unsigned i; 6744 region *r; 6745 FOR_EACH_VEC_ELT (m_regions, i, r) 6746 { 6747 region_id iter_rid = region_id::from_int (i); 6748 if (iter_rid == exclude_rid) 6749 continue; 6750 if (!out->region_p (iter_rid)) 6751 { 6752 region_id parent_rid = r->get_parent (); 6753 if (!parent_rid.null_p ()) 6754 if (out->region_p (parent_rid)) 6755 { 6756 out->add_region (iter_rid); 6757 changed = true; 6758 } 6759 } 6760 } 6761 } 6762 } 6763 6764 /* Delete RID and all descendent regions. 6765 Find any pointers to such regions; convert them to 6766 poisoned values of kind PKIND. 6767 Accumulate stats on purged entities into STATS. */ 6768 6769 void 6770 region_model::delete_region_and_descendents (region_id rid, 6771 enum poison_kind pkind, 6772 purge_stats *stats, 6773 logger *logger) 6774 { 6775 /* Find all child and descendent regions. */ 6776 region_id_set descendents (this); 6777 get_descendents (rid, &descendents, region_id::null ()); 6778 6779 /* Find any pointers to such regions; convert to poisoned. */ 6780 poison_any_pointers_to_bad_regions (descendents, pkind); 6781 6782 /* Delete all such regions. */ 6783 purge_regions (descendents, stats, logger); 6784 } 6785 6786 /* Find any pointers to regions within BAD_REGIONS; convert them to 6787 poisoned values of kind PKIND. */ 6788 6789 void 6790 region_model::poison_any_pointers_to_bad_regions (const region_id_set & 6791 bad_regions, 6792 enum poison_kind pkind) 6793 { 6794 int i; 6795 svalue *sval; 6796 FOR_EACH_VEC_ELT (m_svalues, i, sval) 6797 if (region_svalue *ptr_sval = sval->dyn_cast_region_svalue ()) 6798 { 6799 region_id ptr_dst = ptr_sval->get_pointee (); 6800 if (!ptr_dst.null_p ()) 6801 if (bad_regions.region_p (ptr_dst)) 6802 replace_svalue 6803 (svalue_id::from_int (i), 6804 new poisoned_svalue (pkind, sval->get_type ())); 6805 } 6806 } 6807 6808 /* Attempt to merge THIS with OTHER_MODEL, writing the result 6809 to OUT_MODEL, and populating SID_MAPPING. */ 6810 6811 bool 6812 region_model::can_merge_with_p (const region_model &other_model, 6813 region_model *out_model, 6814 svalue_id_merger_mapping *sid_mapping) const 6815 { 6816 gcc_assert (m_root_rid == other_model.m_root_rid); 6817 gcc_assert (m_root_rid.as_int () == 0); 6818 gcc_assert (sid_mapping); 6819 gcc_assert (out_model); 6820 6821 model_merger merger (this, &other_model, out_model, sid_mapping); 6822 6823 if (!root_region::can_merge_p (get_root_region (), 6824 other_model.get_root_region (), 6825 out_model->get_root_region (), 6826 &merger)) 6827 return false; 6828 6829 /* Merge constraints. */ 6830 constraint_manager::merge (*m_constraints, 6831 *other_model.m_constraints, 6832 out_model->m_constraints, 6833 merger); 6834 6835 out_model->validate (); 6836 6837 /* The merged model should be simpler (or as simple) as the inputs. */ 6838 #if 0 6839 gcc_assert (out_model->m_svalues.length () <= m_svalues.length ()); 6840 gcc_assert (out_model->m_svalues.length () 6841 <= other_model.m_svalues.length ()); 6842 #endif 6843 gcc_assert (out_model->m_regions.length () <= m_regions.length ()); 6844 gcc_assert (out_model->m_regions.length () 6845 <= other_model.m_regions.length ()); 6846 // TODO: same, for constraints 6847 6848 return true; 6849 } 6850 6851 /* As above, but supply a placeholder svalue_id_merger_mapping 6852 instance to be used and receive output. For use in selftests. */ 6853 6854 bool 6855 region_model::can_merge_with_p (const region_model &other_model, 6856 region_model *out_model) const 6857 { 6858 svalue_id_merger_mapping sid_mapping (*this, other_model); 6859 return can_merge_with_p (other_model, out_model, &sid_mapping); 6860 } 6861 6862 /* For debugging purposes: look for a region within this region_model 6863 for a decl named NAME (or an SSA_NAME for such a decl), 6864 returning its value, or svalue_id::null if none are found. */ 6865 6866 svalue_id 6867 region_model::get_value_by_name (const char *name) const 6868 { 6869 gcc_assert (name); 6870 tree identifier = get_identifier (name); 6871 return get_root_region ()->get_value_by_name (identifier, *this); 6872 } 6873 6874 /* Generate or reuse an svalue_id within this model for an index 6875 into an array of type PTR_TYPE, based on OFFSET_SID. */ 6876 6877 svalue_id 6878 region_model::convert_byte_offset_to_array_index (tree ptr_type, 6879 svalue_id offset_sid) 6880 { 6881 gcc_assert (POINTER_TYPE_P (ptr_type)); 6882 6883 if (tree offset_cst = maybe_get_constant (offset_sid)) 6884 { 6885 tree elem_type = TREE_TYPE (ptr_type); 6886 6887 /* Arithmetic on void-pointers is a GNU C extension, treating the size 6888 of a void as 1. 6889 https://gcc.gnu.org/onlinedocs/gcc/Pointer-Arith.html */ 6890 if (TREE_CODE (elem_type) == VOID_TYPE) 6891 return offset_sid; 6892 6893 /* First, use int_size_in_bytes, to reject the case where we have an 6894 incomplete type, or a non-constant value. */ 6895 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type); 6896 if (hwi_byte_size > 0) 6897 { 6898 /* Now call size_in_bytes to get the answer in tree form. */ 6899 tree byte_size = size_in_bytes (elem_type); 6900 gcc_assert (byte_size); 6901 /* Try to get a constant by dividing, ensuring that we're in a 6902 signed representation first. */ 6903 tree index 6904 = fold_binary (TRUNC_DIV_EXPR, ssizetype, 6905 fold_convert (ssizetype, offset_cst), 6906 fold_convert (ssizetype, byte_size)); 6907 if (index && TREE_CODE (index) == INTEGER_CST) 6908 return get_or_create_constant_svalue (index); 6909 } 6910 } 6911 6912 /* Otherwise, we don't know the array index; generate a new unknown value. 6913 TODO: do we need to capture the relationship between two unknown 6914 values (the offset and the index)? */ 6915 return add_svalue (new unknown_svalue (integer_type_node)); 6916 } 6917 6918 /* Get a region of type TYPE for PTR_SID[OFFSET_SID/sizeof (*PTR_SID)]. 6919 6920 If OFFSET_SID is known to be zero, then dereference PTR_SID. 6921 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID, 6922 and then get a view of type TYPE on the relevant array element. */ 6923 6924 region_id 6925 region_model::get_or_create_mem_ref (tree type, 6926 svalue_id ptr_sid, 6927 svalue_id offset_sid, 6928 region_model_context *ctxt) 6929 { 6930 svalue *ptr_sval = get_svalue (ptr_sid); 6931 tree ptr_type = ptr_sval->get_type (); 6932 gcc_assert (ptr_type); 6933 6934 region_id raw_rid = deref_rvalue (ptr_sid, ctxt); 6935 6936 svalue *offset_sval = get_svalue (offset_sid); 6937 tree offset_type = offset_sval->get_type (); 6938 gcc_assert (offset_type); 6939 6940 if (constant_svalue *cst_sval = offset_sval->dyn_cast_constant_svalue ()) 6941 { 6942 if (zerop (cst_sval->get_constant ())) 6943 { 6944 /* Handle the zero offset case. */ 6945 return get_or_create_view (raw_rid, type, ctxt); 6946 } 6947 6948 /* If we're already within an array of the correct type, 6949 then we want to reuse that array, rather than starting 6950 a new view. 6951 If so, figure out our raw_rid's offset from its parent, 6952 if we can, and use that to offset OFFSET_SID, and create 6953 the element within the parent region. */ 6954 region *raw_reg = get_region (raw_rid); 6955 region_id parent_rid = raw_reg->get_parent (); 6956 tree parent_type = get_region (parent_rid)->get_type (); 6957 if (parent_type 6958 && TREE_CODE (parent_type) == ARRAY_TYPE) 6959 { 6960 // TODO: check we have the correct parent type 6961 array_region *parent_array = get_region <array_region> (parent_rid); 6962 array_region::key_t key_for_raw_rid; 6963 if (parent_array->get_key_for_child_region (raw_rid, 6964 &key_for_raw_rid)) 6965 { 6966 /* Convert from offset to index. */ 6967 svalue_id index_sid 6968 = convert_byte_offset_to_array_index (ptr_type, offset_sid); 6969 if (tree index_cst 6970 = get_svalue (index_sid)->maybe_get_constant ()) 6971 { 6972 array_region::key_t index_offset 6973 = array_region::key_from_constant (index_cst); 6974 array_region::key_t index_rel_to_parent 6975 = key_for_raw_rid + index_offset; 6976 tree index_rel_to_parent_cst 6977 = wide_int_to_tree (integer_type_node, 6978 index_rel_to_parent); 6979 svalue_id index_sid 6980 = get_or_create_constant_svalue (index_rel_to_parent_cst); 6981 6982 /* Carry on, using the parent region and adjusted index. */ 6983 region_id element_rid 6984 = parent_array->get_element (this, raw_rid, index_sid, 6985 ctxt); 6986 return get_or_create_view (element_rid, type, ctxt); 6987 } 6988 } 6989 } 6990 } 6991 6992 tree array_type = build_array_type (TREE_TYPE (ptr_type), 6993 integer_type_node); 6994 region_id array_view_rid = get_or_create_view (raw_rid, array_type, ctxt); 6995 array_region *array_reg = get_region <array_region> (array_view_rid); 6996 6997 svalue_id index_sid 6998 = convert_byte_offset_to_array_index (ptr_type, offset_sid); 6999 7000 region_id element_rid 7001 = array_reg->get_element (this, array_view_rid, index_sid, ctxt); 7002 7003 return get_or_create_view (element_rid, type, ctxt); 7004 } 7005 7006 /* Get a region of type TYPE for PTR_SID + OFFSET_SID. 7007 7008 If OFFSET_SID is known to be zero, then dereference PTR_SID. 7009 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID, 7010 and then get a view of type TYPE on the relevant array element. */ 7011 7012 region_id 7013 region_model::get_or_create_pointer_plus_expr (tree type, 7014 svalue_id ptr_sid, 7015 svalue_id offset_in_bytes_sid, 7016 region_model_context *ctxt) 7017 { 7018 return get_or_create_mem_ref (type, 7019 ptr_sid, 7020 offset_in_bytes_sid, 7021 ctxt); 7022 } 7023 7024 /* Get or create a view of type TYPE of the region with id RAW_ID. 7025 Return the id of the view (or RAW_ID if it of the same type). */ 7026 7027 region_id 7028 region_model::get_or_create_view (region_id raw_rid, tree type, 7029 region_model_context *ctxt) 7030 { 7031 region *raw_region = get_region (raw_rid); 7032 7033 gcc_assert (TYPE_P (type)); 7034 if (type != raw_region->get_type ()) 7035 { 7036 /* If the region already has a view of the requested type, 7037 reuse it. */ 7038 region_id existing_view_rid = raw_region->get_view (type, this); 7039 if (!existing_view_rid.null_p ()) 7040 return existing_view_rid; 7041 7042 /* Otherwise, make one (adding it to the region_model and 7043 to the viewed region). */ 7044 region_id view_rid = add_region_for_type (raw_rid, type, ctxt); 7045 raw_region->add_view (view_rid, this); 7046 // TODO: something to signify that this is a "view" 7047 return view_rid; 7048 } 7049 7050 return raw_rid; 7051 } 7052 7053 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE 7054 otherwise. */ 7055 7056 tree 7057 region_model::get_fndecl_for_call (const gcall *call, 7058 region_model_context *ctxt) 7059 { 7060 tree fn_ptr = gimple_call_fn (call); 7061 if (fn_ptr == NULL_TREE) 7062 return NULL_TREE; 7063 svalue_id fn_ptr_sid = get_rvalue (fn_ptr, ctxt); 7064 svalue *fn_ptr_sval = get_svalue (fn_ptr_sid); 7065 if (region_svalue *fn_ptr_ptr = fn_ptr_sval->dyn_cast_region_svalue ()) 7066 { 7067 region_id fn_rid = fn_ptr_ptr->get_pointee (); 7068 code_region *code = get_root_region ()->get_code_region (this); 7069 if (code) 7070 { 7071 tree fn_decl = code->get_tree_for_child_region (fn_rid); 7072 if (!fn_decl) 7073 return NULL_TREE; 7074 cgraph_node *node = cgraph_node::get (fn_decl); 7075 if (!node) 7076 return NULL_TREE; 7077 const cgraph_node *ultimate_node = node->ultimate_alias_target (); 7078 if (ultimate_node) 7079 return ultimate_node->decl; 7080 } 7081 } 7082 7083 return NULL_TREE; 7084 } 7085 7086 /* struct model_merger. */ 7087 7088 /* Dump a multiline representation of this merger to PP. */ 7089 7090 void 7091 model_merger::dump_to_pp (pretty_printer *pp) const 7092 { 7093 pp_string (pp, "model A:"); 7094 pp_newline (pp); 7095 m_model_a->dump_to_pp (pp, false); 7096 pp_newline (pp); 7097 7098 pp_string (pp, "model B:"); 7099 pp_newline (pp); 7100 m_model_b->dump_to_pp (pp, false); 7101 pp_newline (pp); 7102 7103 pp_string (pp, "merged model:"); 7104 pp_newline (pp); 7105 m_merged_model->dump_to_pp (pp, false); 7106 pp_newline (pp); 7107 7108 pp_string (pp, "region map: model A to merged model:"); 7109 pp_newline (pp); 7110 m_map_regions_from_a_to_m.dump_to_pp (pp); 7111 pp_newline (pp); 7112 7113 pp_string (pp, "region map: model B to merged model:"); 7114 pp_newline (pp); 7115 m_map_regions_from_b_to_m.dump_to_pp (pp); 7116 pp_newline (pp); 7117 7118 m_sid_mapping->dump_to_pp (pp); 7119 } 7120 7121 /* Dump a multiline representation of this merger to FILE. */ 7122 7123 void 7124 model_merger::dump (FILE *fp) const 7125 { 7126 pretty_printer pp; 7127 pp_format_decoder (&pp) = default_tree_printer; 7128 pp_show_color (&pp) = pp_show_color (global_dc->printer); 7129 pp.buffer->stream = fp; 7130 dump_to_pp (&pp); 7131 pp_flush (&pp); 7132 } 7133 7134 /* Dump a multiline representation of this merger to stderr. */ 7135 7136 DEBUG_FUNCTION void 7137 model_merger::dump () const 7138 { 7139 dump (stderr); 7140 } 7141 7142 /* Attempt to merge the svalues of SID_A and SID_B (from their 7143 respective models), writing the id of the resulting svalue 7144 into *MERGED_SID. 7145 Return true if the merger is possible, false otherwise. */ 7146 7147 bool 7148 model_merger::can_merge_values_p (svalue_id sid_a, 7149 svalue_id sid_b, 7150 svalue_id *merged_sid) 7151 { 7152 gcc_assert (merged_sid); 7153 svalue *sval_a = m_model_a->get_svalue (sid_a); 7154 svalue *sval_b = m_model_b->get_svalue (sid_b); 7155 7156 /* If both are NULL, then the "values" are trivially mergeable. */ 7157 if (!sval_a && !sval_b) 7158 return true; 7159 7160 /* If one is NULL and the other non-NULL, then the "values" 7161 are not mergeable. */ 7162 if (!(sval_a && sval_b)) 7163 return false; 7164 7165 /* Have they both already been mapped to the same new svalue_id? 7166 If so, use it. */ 7167 svalue_id sid_a_in_m 7168 = m_sid_mapping->m_map_from_a_to_m.get_dst_for_src (sid_a); 7169 svalue_id sid_b_in_m 7170 = m_sid_mapping->m_map_from_b_to_m.get_dst_for_src (sid_b); 7171 if (!sid_a_in_m.null_p () 7172 && !sid_b_in_m.null_p () 7173 && sid_a_in_m == sid_b_in_m) 7174 { 7175 *merged_sid = sid_a_in_m; 7176 return true; 7177 } 7178 7179 tree type = sval_a->get_type (); 7180 if (type == NULL_TREE) 7181 type = sval_b->get_type (); 7182 7183 /* If the values have different kinds, or are both unknown, 7184 then merge as "unknown". */ 7185 if (sval_a->get_kind () != sval_b->get_kind () 7186 || sval_a->get_kind () == SK_UNKNOWN) 7187 { 7188 svalue *merged_sval = new unknown_svalue (type); 7189 *merged_sid = m_merged_model->add_svalue (merged_sval); 7190 record_svalues (sid_a, sid_b, *merged_sid); 7191 return true; 7192 } 7193 7194 gcc_assert (sval_a->get_kind () == sval_b->get_kind ()); 7195 7196 switch (sval_a->get_kind ()) 7197 { 7198 default: 7199 case SK_UNKNOWN: /* SK_UNKNOWN handled above. */ 7200 gcc_unreachable (); 7201 7202 case SK_REGION: 7203 { 7204 /* If we have two region pointers, then we can merge (possibly to 7205 "unknown"). */ 7206 const region_svalue ®ion_sval_a = *as_a <region_svalue *> (sval_a); 7207 const region_svalue ®ion_sval_b = *as_a <region_svalue *> (sval_b); 7208 region_svalue::merge_values (region_sval_a, region_sval_b, 7209 merged_sid, type, 7210 this); 7211 record_svalues (sid_a, sid_b, *merged_sid); 7212 return true; 7213 } 7214 break; 7215 case SK_CONSTANT: 7216 { 7217 /* If we have two constants, then we can merge. */ 7218 const constant_svalue &cst_sval_a = *as_a <constant_svalue *> (sval_a); 7219 const constant_svalue &cst_sval_b = *as_a <constant_svalue *> (sval_b); 7220 constant_svalue::merge_values (cst_sval_a, cst_sval_b, 7221 merged_sid, this); 7222 record_svalues (sid_a, sid_b, *merged_sid); 7223 return true; 7224 } 7225 break; 7226 7227 case SK_POISONED: 7228 case SK_SETJMP: 7229 return false; 7230 } 7231 } 7232 7233 /* Record that A_RID in model A and B_RID in model B 7234 correspond to MERGED_RID in the merged model, so 7235 that pointers can be accurately merged. */ 7236 7237 void 7238 model_merger::record_regions (region_id a_rid, 7239 region_id b_rid, 7240 region_id merged_rid) 7241 { 7242 m_map_regions_from_a_to_m.put (a_rid, merged_rid); 7243 m_map_regions_from_b_to_m.put (b_rid, merged_rid); 7244 } 7245 7246 /* Record that A_SID in model A and B_SID in model B 7247 correspond to MERGED_SID in the merged model. */ 7248 7249 void 7250 model_merger::record_svalues (svalue_id a_sid, 7251 svalue_id b_sid, 7252 svalue_id merged_sid) 7253 { 7254 gcc_assert (m_sid_mapping); 7255 m_sid_mapping->m_map_from_a_to_m.put (a_sid, merged_sid); 7256 m_sid_mapping->m_map_from_b_to_m.put (b_sid, merged_sid); 7257 } 7258 7259 /* struct svalue_id_merger_mapping. */ 7260 7261 /* svalue_id_merger_mapping's ctor. */ 7262 7263 svalue_id_merger_mapping::svalue_id_merger_mapping (const region_model &a, 7264 const region_model &b) 7265 : m_map_from_a_to_m (a.get_num_svalues ()), 7266 m_map_from_b_to_m (b.get_num_svalues ()) 7267 { 7268 } 7269 7270 /* Dump a multiline representation of this to PP. */ 7271 7272 void 7273 svalue_id_merger_mapping::dump_to_pp (pretty_printer *pp) const 7274 { 7275 pp_string (pp, "svalue_id map: model A to merged model:"); 7276 pp_newline (pp); 7277 m_map_from_a_to_m.dump_to_pp (pp); 7278 pp_newline (pp); 7279 7280 pp_string (pp, "svalue_id map: model B to merged model:"); 7281 pp_newline (pp); 7282 m_map_from_b_to_m.dump_to_pp (pp); 7283 pp_newline (pp); 7284 } 7285 7286 /* Dump a multiline representation of this to FILE. */ 7287 7288 void 7289 svalue_id_merger_mapping::dump (FILE *fp) const 7290 { 7291 pretty_printer pp; 7292 pp_format_decoder (&pp) = default_tree_printer; 7293 pp_show_color (&pp) = pp_show_color (global_dc->printer); 7294 pp.buffer->stream = fp; 7295 dump_to_pp (&pp); 7296 pp_flush (&pp); 7297 } 7298 7299 /* Dump a multiline representation of this to stderr. */ 7300 7301 DEBUG_FUNCTION void 7302 svalue_id_merger_mapping::dump () const 7303 { 7304 dump (stderr); 7305 } 7306 7307 /* struct canonicalization. */ 7308 7309 /* canonicalization's ctor. */ 7310 7311 canonicalization::canonicalization (const region_model &model) 7312 : m_model (model), 7313 m_rid_map (model.get_num_regions ()), 7314 m_sid_map (model.get_num_svalues ()), 7315 m_next_rid_int (0), 7316 m_next_sid_int (0) 7317 { 7318 } 7319 7320 /* If we've not seen RID yet, assign it a canonicalized region_id, 7321 and walk the region's svalue and then the region. */ 7322 7323 void 7324 canonicalization::walk_rid (region_id rid) 7325 { 7326 /* Stop if we've already seen RID. */ 7327 if (!m_rid_map.get_dst_for_src (rid).null_p ()) 7328 return; 7329 7330 region *region = m_model.get_region (rid); 7331 if (region) 7332 { 7333 m_rid_map.put (rid, region_id::from_int (m_next_rid_int++)); 7334 walk_sid (region->get_value_direct ()); 7335 region->walk_for_canonicalization (this); 7336 } 7337 } 7338 7339 /* If we've not seen SID yet, assign it a canonicalized svalue_id, 7340 and walk the svalue (and potentially regions e.g. for ptr values). */ 7341 7342 void 7343 canonicalization::walk_sid (svalue_id sid) 7344 { 7345 /* Stop if we've already seen SID. */ 7346 if (!m_sid_map.get_dst_for_src (sid).null_p ()) 7347 return; 7348 7349 svalue *sval = m_model.get_svalue (sid); 7350 if (sval) 7351 { 7352 m_sid_map.put (sid, svalue_id::from_int (m_next_sid_int++)); 7353 /* Potentially walk regions e.g. for ptrs. */ 7354 sval->walk_for_canonicalization (this); 7355 } 7356 } 7357 7358 /* Dump a multiline representation of this to PP. */ 7359 7360 void 7361 canonicalization::dump_to_pp (pretty_printer *pp) const 7362 { 7363 pp_string (pp, "region_id map:"); 7364 pp_newline (pp); 7365 m_rid_map.dump_to_pp (pp); 7366 pp_newline (pp); 7367 7368 pp_string (pp, "svalue_id map:"); 7369 pp_newline (pp); 7370 m_sid_map.dump_to_pp (pp); 7371 pp_newline (pp); 7372 } 7373 7374 /* Dump a multiline representation of this to FILE. */ 7375 7376 void 7377 canonicalization::dump (FILE *fp) const 7378 { 7379 pretty_printer pp; 7380 pp_format_decoder (&pp) = default_tree_printer; 7381 pp_show_color (&pp) = pp_show_color (global_dc->printer); 7382 pp.buffer->stream = fp; 7383 dump_to_pp (&pp); 7384 pp_flush (&pp); 7385 } 7386 7387 /* Dump a multiline representation of this to stderr. */ 7388 7389 DEBUG_FUNCTION void 7390 canonicalization::dump () const 7391 { 7392 dump (stderr); 7393 } 7394 7395 } // namespace ana 7396 7397 /* Update HSTATE with a hash of SID. */ 7398 7399 void 7400 inchash::add (svalue_id sid, inchash::hash &hstate) 7401 { 7402 hstate.add_int (sid.as_int ()); 7403 } 7404 7405 /* Update HSTATE with a hash of RID. */ 7406 7407 void 7408 inchash::add (region_id rid, inchash::hash &hstate) 7409 { 7410 hstate.add_int (rid.as_int ()); 7411 } 7412 7413 /* Dump RMODEL fully to stderr (i.e. without summarization). */ 7414 7415 DEBUG_FUNCTION void 7416 debug (const region_model &rmodel) 7417 { 7418 rmodel.dump (false); 7419 } 7420 7421 namespace ana { 7422 7423 #if CHECKING_P 7424 7425 namespace selftest { 7426 7427 /* Build a constant tree of the given type from STR. */ 7428 7429 static tree 7430 build_real_cst_from_string (tree type, const char *str) 7431 { 7432 REAL_VALUE_TYPE real; 7433 real_from_string (&real, str); 7434 return build_real (type, real); 7435 } 7436 7437 /* Append various "interesting" constants to OUT (e.g. NaN). */ 7438 7439 static void 7440 append_interesting_constants (auto_vec<tree> *out) 7441 { 7442 out->safe_push (build_int_cst (integer_type_node, 0)); 7443 out->safe_push (build_int_cst (integer_type_node, 42)); 7444 out->safe_push (build_int_cst (unsigned_type_node, 0)); 7445 out->safe_push (build_int_cst (unsigned_type_node, 42)); 7446 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN")); 7447 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN")); 7448 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN")); 7449 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN")); 7450 out->safe_push (build_real_cst_from_string (float_type_node, "0.0")); 7451 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0")); 7452 out->safe_push (build_real_cst_from_string (float_type_node, "Inf")); 7453 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf")); 7454 } 7455 7456 /* Verify that tree_cmp is a well-behaved comparator for qsort, even 7457 if the underlying constants aren't comparable. */ 7458 7459 static void 7460 test_tree_cmp_on_constants () 7461 { 7462 auto_vec<tree> csts; 7463 append_interesting_constants (&csts); 7464 7465 /* Try sorting every triple. */ 7466 const unsigned num = csts.length (); 7467 for (unsigned i = 0; i < num; i++) 7468 for (unsigned j = 0; j < num; j++) 7469 for (unsigned k = 0; k < num; k++) 7470 { 7471 auto_vec<tree> v (3); 7472 v.quick_push (csts[i]); 7473 v.quick_push (csts[j]); 7474 v.quick_push (csts[k]); 7475 v.qsort (tree_cmp); 7476 } 7477 } 7478 7479 /* Implementation detail of the ASSERT_CONDITION_* macros. */ 7480 7481 void 7482 assert_condition (const location &loc, 7483 region_model &model, 7484 tree lhs, tree_code op, tree rhs, 7485 tristate expected) 7486 { 7487 tristate actual = model.eval_condition (lhs, op, rhs, NULL); 7488 ASSERT_EQ_AT (loc, actual, expected); 7489 } 7490 7491 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */ 7492 7493 static void 7494 assert_dump_tree_eq (const location &loc, tree t, const char *expected) 7495 { 7496 auto_fix_quotes sentinel; 7497 pretty_printer pp; 7498 pp_format_decoder (&pp) = default_tree_printer; 7499 dump_tree (&pp, t); 7500 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected); 7501 } 7502 7503 /* Assert that dump_tree (T) is EXPECTED. */ 7504 7505 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \ 7506 SELFTEST_BEGIN_STMT \ 7507 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \ 7508 SELFTEST_END_STMT 7509 7510 /* Implementation detail of ASSERT_DUMP_EQ. */ 7511 7512 static void 7513 assert_dump_eq (const location &loc, 7514 const region_model &model, 7515 bool summarize, 7516 const char *expected) 7517 { 7518 auto_fix_quotes sentinel; 7519 pretty_printer pp; 7520 pp_format_decoder (&pp) = default_tree_printer; 7521 model.dump_to_pp (&pp, summarize); 7522 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected); 7523 } 7524 7525 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */ 7526 7527 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \ 7528 SELFTEST_BEGIN_STMT \ 7529 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \ 7530 SELFTEST_END_STMT 7531 7532 /* Smoketest for region_model::dump_to_pp. */ 7533 7534 static void 7535 test_dump () 7536 { 7537 region_model model; 7538 model.get_root_region ()->ensure_stack_region (&model); 7539 model.get_root_region ()->ensure_globals_region (&model); 7540 model.get_root_region ()->ensure_heap_region (&model); 7541 7542 ASSERT_DUMP_EQ (model, false, 7543 "r0: {kind: `root', parent: null, sval: null}\n" 7544 "|-stack: r1: {kind: `stack', parent: r0, sval: null}\n" 7545 "|-globals: r2: {kind: `globals', parent: r0, sval: null, map: {}}\n" 7546 "`-heap: r3: {kind: `heap', parent: r0, sval: null}\n" 7547 "svalues:\n" 7548 "constraint manager:\n" 7549 " equiv classes:\n" 7550 " constraints:\n"); 7551 ASSERT_DUMP_EQ (model, true, ""); 7552 } 7553 7554 /* Helper function for selftests. Create a struct or union type named NAME, 7555 with the fields given by the FIELD_DECLS in FIELDS. 7556 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise 7557 create a UNION_TYPE. */ 7558 7559 static tree 7560 make_test_compound_type (const char *name, bool is_struct, 7561 const auto_vec<tree> *fields) 7562 { 7563 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE); 7564 TYPE_NAME (t) = get_identifier (name); 7565 TYPE_SIZE (t) = 0; 7566 7567 tree fieldlist = NULL; 7568 int i; 7569 tree field; 7570 FOR_EACH_VEC_ELT (*fields, i, field) 7571 { 7572 gcc_assert (TREE_CODE (field) == FIELD_DECL); 7573 DECL_CONTEXT (field) = t; 7574 fieldlist = chainon (field, fieldlist); 7575 } 7576 fieldlist = nreverse (fieldlist); 7577 TYPE_FIELDS (t) = fieldlist; 7578 7579 layout_type (t); 7580 return t; 7581 } 7582 7583 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */ 7584 7585 struct coord_test 7586 { 7587 coord_test () 7588 { 7589 auto_vec<tree> fields; 7590 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, 7591 get_identifier ("x"), integer_type_node); 7592 fields.safe_push (m_x_field); 7593 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, 7594 get_identifier ("y"), integer_type_node); 7595 fields.safe_push (m_y_field); 7596 m_coord_type = make_test_compound_type ("coord", true, &fields); 7597 } 7598 7599 tree m_x_field; 7600 tree m_y_field; 7601 tree m_coord_type; 7602 }; 7603 7604 /* Verify that dumps can show struct fields. */ 7605 7606 static void 7607 test_dump_2 () 7608 { 7609 coord_test ct; 7610 7611 tree c = build_global_decl ("c", ct.m_coord_type); 7612 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field), 7613 c, ct.m_x_field, NULL_TREE); 7614 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field), 7615 c, ct.m_y_field, NULL_TREE); 7616 7617 tree int_17 = build_int_cst (integer_type_node, 17); 7618 tree int_m3 = build_int_cst (integer_type_node, -3); 7619 7620 region_model model; 7621 model.set_value (c_x, int_17, NULL); 7622 model.set_value (c_y, int_m3, NULL); 7623 7624 /* Simplified dump. */ 7625 ASSERT_DUMP_EQ (model, true, "c.x: 17, c.y: -3"); 7626 7627 /* Full dump. */ 7628 ASSERT_DUMP_EQ 7629 (model, false, 7630 "r0: {kind: `root', parent: null, sval: null}\n" 7631 "`-globals: r1: {kind: `globals', parent: r0, sval: null, map: {`c': r2}}\n" 7632 " `-`c': r2: {kind: `struct', parent: r1, sval: null, type: `struct coord', map: {`x': r3, `y': r4}}\n" 7633 " |: type: `struct coord'\n" 7634 " |-`x': r3: {kind: `primitive', parent: r2, sval: sv0, type: `int'}\n" 7635 " | |: sval: sv0: {type: `int', `17'}\n" 7636 " | |: type: `int'\n" 7637 " `-`y': r4: {kind: `primitive', parent: r2, sval: sv1, type: `int'}\n" 7638 " |: sval: sv1: {type: `int', `-3'}\n" 7639 " |: type: `int'\n" 7640 "svalues:\n" 7641 " sv0: {type: `int', `17'}\n" 7642 " sv1: {type: `int', `-3'}\n" 7643 "constraint manager:\n" 7644 " equiv classes:\n" 7645 " constraints:\n"); 7646 } 7647 7648 /* Verify that dumps can show array elements. */ 7649 7650 static void 7651 test_dump_3 () 7652 { 7653 tree tlen = size_int (10); 7654 tree arr_type = build_array_type (char_type_node, build_index_type (tlen)); 7655 7656 tree a = build_global_decl ("a", arr_type); 7657 7658 region_model model; 7659 tree int_0 = build_int_cst (integer_type_node, 0); 7660 tree a_0 = build4 (ARRAY_REF, char_type_node, 7661 a, int_0, NULL_TREE, NULL_TREE); 7662 tree char_A = build_int_cst (char_type_node, 'A'); 7663 model.set_value (a_0, char_A, NULL); 7664 7665 /* Simplified dump. */ 7666 ASSERT_DUMP_EQ (model, true, "a[0]: 65"); 7667 7668 /* Full dump. */ 7669 ASSERT_DUMP_EQ 7670 (model, false, 7671 "r0: {kind: `root', parent: null, sval: null}\n" 7672 "`-globals: r1: {kind: `globals', parent: r0, sval: null, map: {`a': r2}}\n" 7673 " `-`a': r2: {kind: `array', parent: r1, sval: null, type: `char[11]', array: {[0]: r3}}\n" 7674 " |: type: `char[11]'\n" 7675 " `-[0]: r3: {kind: `primitive', parent: r2, sval: sv1, type: `char'}\n" 7676 " |: sval: sv1: {type: `char', `65'}\n" 7677 " |: type: `char'\n" 7678 "svalues:\n" 7679 " sv0: {type: `int', `0'}\n" 7680 " sv1: {type: `char', `65'}\n" 7681 "constraint manager:\n" 7682 " equiv classes:\n" 7683 " constraints:\n"); 7684 } 7685 7686 /* Verify that region_model::get_representative_tree works as expected. */ 7687 7688 static void 7689 test_get_representative_tree () 7690 { 7691 /* STRING_CST. */ 7692 { 7693 tree string_cst = build_string (4, "foo"); 7694 region_model m; 7695 svalue_id str_sid = m.get_rvalue (string_cst, NULL); 7696 tree rep = m.get_representative_tree (str_sid); 7697 ASSERT_EQ (rep, string_cst); 7698 } 7699 7700 /* String literal. */ 7701 { 7702 tree string_cst_ptr = build_string_literal (4, "foo"); 7703 region_model m; 7704 svalue_id str_sid = m.get_rvalue (string_cst_ptr, NULL); 7705 tree rep = m.get_representative_tree (str_sid); 7706 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]"); 7707 } 7708 } 7709 7710 /* Verify that calling region_model::get_rvalue repeatedly on the same 7711 tree constant retrieves the same svalue_id. */ 7712 7713 static void 7714 test_unique_constants () 7715 { 7716 tree int_0 = build_int_cst (integer_type_node, 0); 7717 tree int_42 = build_int_cst (integer_type_node, 42); 7718 7719 test_region_model_context ctxt; 7720 region_model model; 7721 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt)); 7722 ASSERT_EQ (model.get_rvalue (int_42, &ctxt), 7723 model.get_rvalue (int_42, &ctxt)); 7724 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt)); 7725 ASSERT_EQ (ctxt.get_num_diagnostics (), 0); 7726 } 7727 7728 /* Check that operator== and hashing works as expected for the 7729 various svalue subclasses. */ 7730 7731 static void 7732 test_svalue_equality () 7733 { 7734 tree int_42 = build_int_cst (integer_type_node, 42); 7735 tree int_0 = build_int_cst (integer_type_node, 0); 7736 7737 /* Create pairs instances of the various subclasses of svalue, 7738 testing for hash and equality between (this, this) and 7739 (this, other of same subclass). */ 7740 svalue *ptr_to_r0 7741 = new region_svalue (ptr_type_node, region_id::from_int (0)); 7742 svalue *ptr_to_r1 7743 = new region_svalue (ptr_type_node, region_id::from_int (1)); 7744 7745 ASSERT_EQ (ptr_to_r0->hash (), ptr_to_r0->hash ()); 7746 ASSERT_EQ (*ptr_to_r0, *ptr_to_r0); 7747 7748 ASSERT_NE (ptr_to_r0->hash (), ptr_to_r1->hash ()); 7749 ASSERT_NE (*ptr_to_r0, *ptr_to_r1); 7750 7751 svalue *cst_int_42 = new constant_svalue (int_42); 7752 svalue *cst_int_0 = new constant_svalue (int_0); 7753 7754 ASSERT_EQ (cst_int_42->hash (), cst_int_42->hash ()); 7755 ASSERT_EQ (*cst_int_42, *cst_int_42); 7756 7757 ASSERT_NE (cst_int_42->hash (), cst_int_0->hash ()); 7758 ASSERT_NE (*cst_int_42, *cst_int_0); 7759 7760 svalue *unknown_0 = new unknown_svalue (ptr_type_node); 7761 svalue *unknown_1 = new unknown_svalue (ptr_type_node); 7762 ASSERT_EQ (unknown_0->hash (), unknown_0->hash ()); 7763 ASSERT_EQ (*unknown_0, *unknown_0); 7764 ASSERT_EQ (*unknown_1, *unknown_1); 7765 7766 /* Comparisons between different kinds of svalue. */ 7767 ASSERT_NE (*ptr_to_r0, *cst_int_42); 7768 ASSERT_NE (*ptr_to_r0, *unknown_0); 7769 ASSERT_NE (*cst_int_42, *ptr_to_r0); 7770 ASSERT_NE (*cst_int_42, *unknown_0); 7771 ASSERT_NE (*unknown_0, *ptr_to_r0); 7772 ASSERT_NE (*unknown_0, *cst_int_42); 7773 7774 delete ptr_to_r0; 7775 delete ptr_to_r1; 7776 delete cst_int_42; 7777 delete cst_int_0; 7778 delete unknown_0; 7779 delete unknown_1; 7780 } 7781 7782 /* Check that operator== and hashing works as expected for the 7783 various region subclasses. */ 7784 7785 static void 7786 test_region_equality () 7787 { 7788 region *r0 7789 = new primitive_region (region_id::from_int (3), integer_type_node); 7790 region *r1 7791 = new primitive_region (region_id::from_int (4), integer_type_node); 7792 7793 ASSERT_EQ (*r0, *r0); 7794 ASSERT_EQ (r0->hash (), r0->hash ()); 7795 ASSERT_NE (*r0, *r1); 7796 ASSERT_NE (r0->hash (), r1->hash ()); 7797 7798 delete r0; 7799 delete r1; 7800 7801 // TODO: test coverage for the map within a map_region 7802 } 7803 7804 /* A subclass of purge_criteria for selftests: purge all svalue_id instances. */ 7805 7806 class purge_all_svalue_ids : public purge_criteria 7807 { 7808 public: 7809 bool should_purge_p (svalue_id) const FINAL OVERRIDE 7810 { 7811 return true; 7812 } 7813 }; 7814 7815 /* A subclass of purge_criteria: purge a specific svalue_id. */ 7816 7817 class purge_one_svalue_id : public purge_criteria 7818 { 7819 public: 7820 purge_one_svalue_id (svalue_id victim) : m_victim (victim) {} 7821 7822 purge_one_svalue_id (region_model model, tree expr) 7823 : m_victim (model.get_rvalue (expr, NULL)) {} 7824 7825 bool should_purge_p (svalue_id sid) const FINAL OVERRIDE 7826 { 7827 return sid == m_victim; 7828 } 7829 7830 private: 7831 svalue_id m_victim; 7832 }; 7833 7834 /* Check that constraint_manager::purge works for individual svalue_ids. */ 7835 7836 static void 7837 test_purging_by_criteria () 7838 { 7839 tree int_42 = build_int_cst (integer_type_node, 42); 7840 tree int_0 = build_int_cst (integer_type_node, 0); 7841 7842 tree x = build_global_decl ("x", integer_type_node); 7843 tree y = build_global_decl ("y", integer_type_node); 7844 7845 { 7846 region_model model0; 7847 region_model model1; 7848 7849 ADD_SAT_CONSTRAINT (model1, x, EQ_EXPR, y); 7850 ASSERT_NE (model0, model1); 7851 7852 purge_stats stats_for_px; 7853 purge_one_svalue_id px (model1, x); 7854 model1.get_constraints ()->purge (px, &stats_for_px); 7855 ASSERT_EQ (stats_for_px.m_num_equiv_classes, 0); 7856 7857 purge_stats stats_for_py; 7858 purge_one_svalue_id py (model1.get_rvalue (y, NULL)); 7859 model1.get_constraints ()->purge (py, &stats_for_py); 7860 ASSERT_EQ (stats_for_py.m_num_equiv_classes, 1); 7861 7862 ASSERT_EQ (*model0.get_constraints (), *model1.get_constraints ()); 7863 } 7864 7865 { 7866 region_model model0; 7867 region_model model1; 7868 7869 ADD_SAT_CONSTRAINT (model1, x, EQ_EXPR, int_42); 7870 ASSERT_NE (model0, model1); 7871 ASSERT_CONDITION_TRUE (model1, x, EQ_EXPR, int_42); 7872 7873 purge_stats stats; 7874 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats); 7875 7876 ASSERT_CONDITION_UNKNOWN (model1, x, EQ_EXPR, int_42); 7877 } 7878 7879 { 7880 region_model model0; 7881 region_model model1; 7882 7883 ADD_SAT_CONSTRAINT (model1, x, GE_EXPR, int_0); 7884 ADD_SAT_CONSTRAINT (model1, x, LE_EXPR, int_42); 7885 ASSERT_NE (model0, model1); 7886 7887 ASSERT_CONDITION_TRUE (model1, x, GE_EXPR, int_0); 7888 ASSERT_CONDITION_TRUE (model1, x, LE_EXPR, int_42); 7889 7890 purge_stats stats; 7891 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats); 7892 7893 ASSERT_CONDITION_UNKNOWN (model1, x, GE_EXPR, int_0); 7894 ASSERT_CONDITION_UNKNOWN (model1, x, LE_EXPR, int_42); 7895 } 7896 7897 { 7898 region_model model0; 7899 region_model model1; 7900 7901 ADD_SAT_CONSTRAINT (model1, x, NE_EXPR, int_42); 7902 ADD_SAT_CONSTRAINT (model1, y, NE_EXPR, int_0); 7903 ASSERT_NE (model0, model1); 7904 ASSERT_CONDITION_TRUE (model1, x, NE_EXPR, int_42); 7905 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0); 7906 7907 purge_stats stats; 7908 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats); 7909 ASSERT_NE (model0, model1); 7910 7911 ASSERT_CONDITION_UNKNOWN (model1, x, NE_EXPR, int_42); 7912 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0); 7913 } 7914 7915 { 7916 region_model model0; 7917 region_model model1; 7918 7919 ADD_SAT_CONSTRAINT (model1, x, NE_EXPR, int_42); 7920 ADD_SAT_CONSTRAINT (model1, y, NE_EXPR, int_0); 7921 ASSERT_NE (model0, model1); 7922 ASSERT_CONDITION_TRUE (model1, x, NE_EXPR, int_42); 7923 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0); 7924 7925 purge_stats stats; 7926 model1.get_constraints ()->purge (purge_all_svalue_ids (), &stats); 7927 ASSERT_CONDITION_UNKNOWN (model1, x, NE_EXPR, int_42); 7928 ASSERT_CONDITION_UNKNOWN (model1, y, NE_EXPR, int_0); 7929 } 7930 7931 } 7932 7933 /* Test that region_model::purge_unused_svalues works as expected. */ 7934 7935 static void 7936 test_purge_unused_svalues () 7937 { 7938 tree int_42 = build_int_cst (integer_type_node, 42); 7939 tree int_0 = build_int_cst (integer_type_node, 0); 7940 tree x = build_global_decl ("x", integer_type_node); 7941 tree y = build_global_decl ("y", integer_type_node); 7942 7943 test_region_model_context ctxt; 7944 region_model model; 7945 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x), 7946 &ctxt); 7947 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x), 7948 &ctxt); 7949 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x), 7950 &ctxt); 7951 model.add_constraint (x, NE_EXPR, int_42, &ctxt); 7952 7953 model.set_value (model.get_lvalue (x, &ctxt), 7954 model.get_rvalue (int_42, &ctxt), 7955 &ctxt); 7956 model.add_constraint (y, GT_EXPR, int_0, &ctxt); 7957 7958 /* The redundant unknown values should have been purged. */ 7959 purge_stats purged; 7960 model.purge_unused_svalues (&purged, NULL); 7961 ASSERT_EQ (purged.m_num_svalues, 3); 7962 7963 /* and the redundant constraint on an old, unknown value for x should 7964 have been purged. */ 7965 ASSERT_EQ (purged.m_num_equiv_classes, 1); 7966 ASSERT_EQ (purged.m_num_constraints, 1); 7967 ASSERT_EQ (model.get_constraints ()->m_constraints.length (), 2); 7968 7969 /* ...but we should still have x == 42. */ 7970 ASSERT_EQ (model.eval_condition (x, EQ_EXPR, int_42, &ctxt), 7971 tristate::TS_TRUE); 7972 7973 /* ...and we should still have the constraint on y. */ 7974 ASSERT_EQ (model.eval_condition (y, GT_EXPR, int_0, &ctxt), 7975 tristate::TS_TRUE); 7976 7977 ASSERT_EQ (ctxt.get_num_diagnostics (), 0); 7978 } 7979 7980 /* Verify that simple assignments work as expected. */ 7981 7982 static void 7983 test_assignment () 7984 { 7985 tree int_0 = build_int_cst (integer_type_node, 0); 7986 tree x = build_global_decl ("x", integer_type_node); 7987 tree y = build_global_decl ("y", integer_type_node); 7988 7989 /* "x == 0", then use of y, then "y = 0;". */ 7990 region_model model; 7991 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0); 7992 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0); 7993 model.set_value (model.get_lvalue (y, NULL), 7994 model.get_rvalue (int_0, NULL), 7995 NULL); 7996 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0); 7997 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x); 7998 7999 ASSERT_DUMP_EQ (model, true, "y: 0, {x}: unknown, x == y"); 8000 } 8001 8002 /* Verify that compound assignments work as expected. */ 8003 8004 static void 8005 test_compound_assignment () 8006 { 8007 coord_test ct; 8008 8009 tree c = build_global_decl ("c", ct.m_coord_type); 8010 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field), 8011 c, ct.m_x_field, NULL_TREE); 8012 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field), 8013 c, ct.m_y_field, NULL_TREE); 8014 tree d = build_global_decl ("d", ct.m_coord_type); 8015 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field), 8016 d, ct.m_x_field, NULL_TREE); 8017 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field), 8018 d, ct.m_y_field, NULL_TREE); 8019 8020 tree int_17 = build_int_cst (integer_type_node, 17); 8021 tree int_m3 = build_int_cst (integer_type_node, -3); 8022 8023 region_model model; 8024 model.set_value (c_x, int_17, NULL); 8025 model.set_value (c_y, int_m3, NULL); 8026 8027 ASSERT_DUMP_EQ (model, true, "c.x: 17, c.y: -3"); 8028 8029 /* Copy c to d. */ 8030 model.copy_region (model.get_lvalue (d, NULL), model.get_lvalue (c, NULL), 8031 NULL); 8032 /* Check that the fields have the same svalues. */ 8033 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL)); 8034 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL)); 8035 } 8036 8037 /* Verify the details of pushing and popping stack frames. */ 8038 8039 static void 8040 test_stack_frames () 8041 { 8042 tree int_42 = build_int_cst (integer_type_node, 42); 8043 tree int_10 = build_int_cst (integer_type_node, 10); 8044 tree int_5 = build_int_cst (integer_type_node, 5); 8045 tree int_0 = build_int_cst (integer_type_node, 0); 8046 8047 auto_vec <tree> param_types; 8048 tree parent_fndecl = make_fndecl (integer_type_node, 8049 "parent_fn", 8050 param_types); 8051 allocate_struct_function (parent_fndecl, true); 8052 8053 tree child_fndecl = make_fndecl (integer_type_node, 8054 "child_fn", 8055 param_types); 8056 allocate_struct_function (child_fndecl, true); 8057 8058 /* "a" and "b" in the parent frame. */ 8059 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL, 8060 get_identifier ("a"), 8061 integer_type_node); 8062 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL, 8063 get_identifier ("b"), 8064 integer_type_node); 8065 /* "x" and "y" in a child frame. */ 8066 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL, 8067 get_identifier ("x"), 8068 integer_type_node); 8069 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL, 8070 get_identifier ("y"), 8071 integer_type_node); 8072 8073 /* "p" global. */ 8074 tree p = build_global_decl ("p", ptr_type_node); 8075 8076 /* "q" global. */ 8077 tree q = build_global_decl ("q", ptr_type_node); 8078 8079 test_region_model_context ctxt; 8080 region_model model; 8081 8082 /* Push stack frame for "parent_fn". */ 8083 region_id parent_frame_rid 8084 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl), NULL, &ctxt); 8085 ASSERT_EQ (model.get_current_frame_id (), parent_frame_rid); 8086 region_id a_in_parent_rid = model.get_lvalue (a, &ctxt); 8087 model.set_value (a_in_parent_rid, model.get_rvalue (int_42, &ctxt), &ctxt); 8088 model.set_to_new_unknown_value (model.get_lvalue (b, &ctxt), 8089 integer_type_node, &ctxt); 8090 model.add_constraint (b, LT_EXPR, int_10, &ctxt); 8091 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt), 8092 tristate (tristate::TS_TRUE)); 8093 8094 /* Push stack frame for "child_fn". */ 8095 region_id child_frame_rid 8096 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt); 8097 ASSERT_EQ (model.get_current_frame_id (), child_frame_rid); 8098 region_id x_in_child_rid = model.get_lvalue (x, &ctxt); 8099 model.set_value (x_in_child_rid, model.get_rvalue (int_0, &ctxt), &ctxt); 8100 model.set_to_new_unknown_value (model.get_lvalue (y, &ctxt), 8101 integer_type_node, &ctxt); 8102 model.add_constraint (y, NE_EXPR, int_5, &ctxt); 8103 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt), 8104 tristate (tristate::TS_TRUE)); 8105 8106 /* Point a global pointer at a local in the child frame: p = &x. */ 8107 region_id p_in_globals_rid = model.get_lvalue (p, &ctxt); 8108 model.set_value (p_in_globals_rid, 8109 model.get_or_create_ptr_svalue (ptr_type_node, 8110 x_in_child_rid), 8111 &ctxt); 8112 8113 /* Point another global pointer at p: q = &p. */ 8114 region_id q_in_globals_rid = model.get_lvalue (q, &ctxt); 8115 model.set_value (q_in_globals_rid, 8116 model.get_or_create_ptr_svalue (ptr_type_node, 8117 p_in_globals_rid), 8118 &ctxt); 8119 8120 /* Test get_descendents. */ 8121 region_id_set descendents (&model); 8122 model.get_descendents (child_frame_rid, &descendents, region_id::null ()); 8123 ASSERT_TRUE (descendents.region_p (child_frame_rid)); 8124 ASSERT_TRUE (descendents.region_p (x_in_child_rid)); 8125 ASSERT_FALSE (descendents.region_p (a_in_parent_rid)); 8126 ASSERT_EQ (descendents.num_regions (), 3); 8127 #if 0 8128 auto_vec<region_id> test_vec; 8129 for (region_id_set::iterator_t iter = descendents.begin (); 8130 iter != descendents.end (); 8131 ++iter) 8132 test_vec.safe_push (*iter); 8133 gcc_unreachable (); // TODO 8134 //ASSERT_EQ (); 8135 #endif 8136 8137 ASSERT_DUMP_EQ (model, true, 8138 "a: 42, x: 0, p: &x, q: &p, {b, y}: unknown, b < 10, y != 5"); 8139 8140 /* Pop the "child_fn" frame from the stack. */ 8141 purge_stats purged; 8142 model.pop_frame (region_id::null (), true, &purged, &ctxt); 8143 8144 /* We should have purged the unknown values for x and y. */ 8145 ASSERT_EQ (purged.m_num_svalues, 2); 8146 8147 /* We should have purged the frame region and the regions for x and y. */ 8148 ASSERT_EQ (purged.m_num_regions, 3); 8149 8150 /* We should have purged the constraint on y. */ 8151 ASSERT_EQ (purged.m_num_equiv_classes, 1); 8152 ASSERT_EQ (purged.m_num_constraints, 1); 8153 8154 /* Verify that p (which was pointing at the local "x" in the popped 8155 frame) has been poisoned. */ 8156 svalue *new_p_sval = model.get_svalue (model.get_rvalue (p, &ctxt)); 8157 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED); 8158 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (), 8159 POISON_KIND_POPPED_STACK); 8160 8161 /* Verify that q still points to p, in spite of the region 8162 renumbering. */ 8163 svalue *new_q_sval = model.get_svalue (model.get_rvalue (q, &ctxt)); 8164 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION); 8165 ASSERT_EQ (new_q_sval->dyn_cast_region_svalue ()->get_pointee (), 8166 model.get_lvalue (p, &ctxt)); 8167 8168 /* Verify that top of stack has been updated. */ 8169 ASSERT_EQ (model.get_current_frame_id (), parent_frame_rid); 8170 8171 /* Verify locals in parent frame. */ 8172 /* Verify "a" still has its value. */ 8173 svalue *new_a_sval = model.get_svalue (model.get_rvalue (a, &ctxt)); 8174 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT); 8175 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (), 8176 int_42); 8177 /* Verify "b" still has its constraint. */ 8178 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt), 8179 tristate (tristate::TS_TRUE)); 8180 } 8181 8182 /* Verify that get_representative_path_var works as expected, that 8183 we can map from region ids to parms and back within a recursive call 8184 stack. */ 8185 8186 static void 8187 test_get_representative_path_var () 8188 { 8189 auto_vec <tree> param_types; 8190 tree fndecl = make_fndecl (integer_type_node, 8191 "factorial", 8192 param_types); 8193 allocate_struct_function (fndecl, true); 8194 8195 /* Parm "n". */ 8196 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL, 8197 get_identifier ("n"), 8198 integer_type_node); 8199 8200 region_model model; 8201 8202 /* Push 5 stack frames for "factorial", each with a param */ 8203 auto_vec<region_id> parm_rids; 8204 auto_vec<svalue_id> parm_sids; 8205 for (int depth = 0; depth < 5; depth++) 8206 { 8207 region_id frame_rid 8208 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, NULL); 8209 region_id rid_n = model.get_lvalue (path_var (n, depth), NULL); 8210 parm_rids.safe_push (rid_n); 8211 8212 ASSERT_EQ (model.get_region (rid_n)->get_parent (), frame_rid); 8213 8214 svalue_id sid_n 8215 = model.set_to_new_unknown_value (rid_n, integer_type_node, NULL); 8216 parm_sids.safe_push (sid_n); 8217 } 8218 8219 /* Verify that we can recognize that the regions are the parms, 8220 at every depth. */ 8221 for (int depth = 0; depth < 5; depth++) 8222 { 8223 ASSERT_EQ (model.get_representative_path_var (parm_rids[depth]), 8224 path_var (n, depth)); 8225 /* ...and that we can lookup lvalues for locals for all frames, 8226 not just the top. */ 8227 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL), 8228 parm_rids[depth]); 8229 /* ...and that we can locate the svalues. */ 8230 auto_vec<path_var> pvs; 8231 model.get_path_vars_for_svalue (parm_sids[depth], &pvs); 8232 ASSERT_EQ (pvs.length (), 1); 8233 ASSERT_EQ (pvs[0], path_var (n, depth)); 8234 } 8235 } 8236 8237 /* Verify that the core regions within a region_model are in a consistent 8238 order after canonicalization. */ 8239 8240 static void 8241 test_canonicalization_1 () 8242 { 8243 region_model model0; 8244 model0.get_root_region ()->ensure_stack_region (&model0); 8245 model0.get_root_region ()->ensure_globals_region (&model0); 8246 8247 region_model model1; 8248 model1.get_root_region ()->ensure_globals_region (&model1); 8249 model1.get_root_region ()->ensure_stack_region (&model1); 8250 8251 model0.canonicalize (NULL); 8252 model1.canonicalize (NULL); 8253 ASSERT_EQ (model0, model1); 8254 } 8255 8256 /* Verify that region models for 8257 x = 42; y = 113; 8258 and 8259 y = 113; x = 42; 8260 are equal after canonicalization. */ 8261 8262 static void 8263 test_canonicalization_2 () 8264 { 8265 tree int_42 = build_int_cst (integer_type_node, 42); 8266 tree int_113 = build_int_cst (integer_type_node, 113); 8267 tree x = build_global_decl ("x", integer_type_node); 8268 tree y = build_global_decl ("y", integer_type_node); 8269 8270 region_model model0; 8271 model0.set_value (model0.get_lvalue (x, NULL), 8272 model0.get_rvalue (int_42, NULL), 8273 NULL); 8274 model0.set_value (model0.get_lvalue (y, NULL), 8275 model0.get_rvalue (int_113, NULL), 8276 NULL); 8277 8278 region_model model1; 8279 model1.set_value (model1.get_lvalue (y, NULL), 8280 model1.get_rvalue (int_113, NULL), 8281 NULL); 8282 model1.set_value (model1.get_lvalue (x, NULL), 8283 model1.get_rvalue (int_42, NULL), 8284 NULL); 8285 8286 model0.canonicalize (NULL); 8287 model1.canonicalize (NULL); 8288 ASSERT_EQ (model0, model1); 8289 } 8290 8291 /* Verify that constraints for 8292 x > 3 && y > 42 8293 and 8294 y > 42 && x > 3 8295 are equal after canonicalization. */ 8296 8297 static void 8298 test_canonicalization_3 () 8299 { 8300 tree int_3 = build_int_cst (integer_type_node, 3); 8301 tree int_42 = build_int_cst (integer_type_node, 42); 8302 tree x = build_global_decl ("x", integer_type_node); 8303 tree y = build_global_decl ("y", integer_type_node); 8304 8305 region_model model0; 8306 model0.add_constraint (x, GT_EXPR, int_3, NULL); 8307 model0.add_constraint (y, GT_EXPR, int_42, NULL); 8308 8309 region_model model1; 8310 model1.add_constraint (y, GT_EXPR, int_42, NULL); 8311 model1.add_constraint (x, GT_EXPR, int_3, NULL); 8312 8313 model0.canonicalize (NULL); 8314 model1.canonicalize (NULL); 8315 ASSERT_EQ (model0, model1); 8316 } 8317 8318 /* Verify that we can canonicalize a model containing NaN and other real 8319 constants. */ 8320 8321 static void 8322 test_canonicalization_4 () 8323 { 8324 auto_vec<tree> csts; 8325 append_interesting_constants (&csts); 8326 8327 region_model model; 8328 8329 unsigned i; 8330 tree cst; 8331 FOR_EACH_VEC_ELT (csts, i, cst) 8332 model.get_rvalue (cst, NULL); 8333 8334 model.canonicalize (NULL); 8335 } 8336 8337 /* Assert that if we have two region_model instances 8338 with values VAL_A and VAL_B for EXPR that they are 8339 mergable. Write the merged model to *OUT_MERGED_MODEL, 8340 and the merged svalue ptr to *OUT_MERGED_SVALUE. 8341 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR 8342 for that region_model. */ 8343 8344 static void 8345 assert_region_models_merge (tree expr, tree val_a, tree val_b, 8346 region_model *out_merged_model, 8347 svalue **out_merged_svalue) 8348 { 8349 test_region_model_context ctxt; 8350 region_model model0; 8351 region_model model1; 8352 if (val_a) 8353 model0.set_value (model0.get_lvalue (expr, &ctxt), 8354 model0.get_rvalue (val_a, &ctxt), 8355 &ctxt); 8356 if (val_b) 8357 model1.set_value (model1.get_lvalue (expr, &ctxt), 8358 model1.get_rvalue (val_b, &ctxt), 8359 &ctxt); 8360 8361 /* They should be mergeable. */ 8362 ASSERT_TRUE (model0.can_merge_with_p (model1, out_merged_model)); 8363 8364 svalue_id merged_svalue_sid = out_merged_model->get_rvalue (expr, &ctxt); 8365 *out_merged_svalue = out_merged_model->get_svalue (merged_svalue_sid); 8366 } 8367 8368 /* Verify that we can merge region_model instances. */ 8369 8370 static void 8371 test_state_merging () 8372 { 8373 tree int_42 = build_int_cst (integer_type_node, 42); 8374 tree int_113 = build_int_cst (integer_type_node, 113); 8375 tree x = build_global_decl ("x", integer_type_node); 8376 tree y = build_global_decl ("y", integer_type_node); 8377 tree z = build_global_decl ("z", integer_type_node); 8378 tree p = build_global_decl ("p", ptr_type_node); 8379 8380 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y); 8381 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z); 8382 8383 auto_vec <tree> param_types; 8384 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types); 8385 allocate_struct_function (test_fndecl, true); 8386 8387 /* Param "a". */ 8388 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL, 8389 get_identifier ("a"), 8390 integer_type_node); 8391 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a); 8392 8393 /* Param "q", a pointer. */ 8394 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL, 8395 get_identifier ("q"), 8396 ptr_type_node); 8397 8398 { 8399 region_model model0; 8400 region_model model1; 8401 region_model merged; 8402 /* Verify empty models can be merged. */ 8403 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8404 ASSERT_EQ (model0, merged); 8405 } 8406 8407 /* Verify that we can merge two contradictory constraints on the 8408 value for a global. */ 8409 /* TODO: verify that the merged model doesn't have a value for 8410 the global */ 8411 { 8412 region_model model0; 8413 region_model model1; 8414 region_model merged; 8415 test_region_model_context ctxt; 8416 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt); 8417 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt); 8418 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8419 ASSERT_NE (model0, merged); 8420 ASSERT_NE (model1, merged); 8421 } 8422 8423 /* Verify handling of a PARM_DECL. */ 8424 { 8425 test_region_model_context ctxt; 8426 region_model model0; 8427 region_model model1; 8428 ASSERT_EQ (model0.get_stack_depth (), 0); 8429 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt); 8430 ASSERT_EQ (model0.get_stack_depth (), 1); 8431 ASSERT_EQ (model0.get_function_at_depth (0), 8432 DECL_STRUCT_FUNCTION (test_fndecl)); 8433 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt); 8434 8435 svalue_id sid_a 8436 = model0.set_to_new_unknown_value (model0.get_lvalue (a, &ctxt), 8437 integer_type_node, &ctxt); 8438 model1.set_to_new_unknown_value (model1.get_lvalue (a, &ctxt), 8439 integer_type_node, &ctxt); 8440 ASSERT_EQ (model0, model1); 8441 8442 /* Check that get_value_by_name works for locals. */ 8443 ASSERT_EQ (model0.get_value_by_name ("a"), sid_a); 8444 8445 /* They should be mergeable, and the result should be the same. */ 8446 region_model merged; 8447 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8448 ASSERT_EQ (model0, merged); 8449 /* In particular, there should be an unknown value for "a". */ 8450 svalue *merged_a_sval = merged.get_svalue (merged.get_rvalue (a, &ctxt)); 8451 ASSERT_EQ (merged_a_sval->get_kind (), SK_UNKNOWN); 8452 } 8453 8454 /* Verify handling of a global. */ 8455 { 8456 test_region_model_context ctxt; 8457 region_model model0; 8458 region_model model1; 8459 svalue_id sid_x 8460 = model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt), 8461 integer_type_node, &ctxt); 8462 model1.set_to_new_unknown_value (model1.get_lvalue (x, &ctxt), 8463 integer_type_node, &ctxt); 8464 ASSERT_EQ (model0, model1); 8465 8466 /* Check that get_value_by_name works for globals. */ 8467 ASSERT_EQ (model0.get_value_by_name ("x"), sid_x); 8468 8469 /* They should be mergeable, and the result should be the same. */ 8470 region_model merged; 8471 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8472 ASSERT_EQ (model0, merged); 8473 /* In particular, there should be an unknown value for "x". */ 8474 svalue *merged_x_sval = merged.get_svalue (merged.get_rvalue (x, &ctxt)); 8475 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN); 8476 } 8477 8478 /* Use global-handling to verify various combinations of values. */ 8479 8480 /* Two equal constant values. */ 8481 { 8482 region_model merged; 8483 svalue *merged_x_sval; 8484 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval); 8485 8486 /* In particular, there should be a constant value for "x". */ 8487 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT); 8488 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (), 8489 int_42); 8490 } 8491 8492 /* Two non-equal constant values. */ 8493 { 8494 region_model merged; 8495 svalue *merged_x_sval; 8496 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval); 8497 8498 /* In particular, there should be an unknown value for "x". */ 8499 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN); 8500 } 8501 8502 /* Uninit and constant. */ 8503 { 8504 region_model merged; 8505 svalue *merged_x_sval; 8506 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval); 8507 8508 /* In particular, there should be an unknown value for "x". */ 8509 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN); 8510 } 8511 8512 /* Constant and uninit. */ 8513 { 8514 region_model merged; 8515 svalue *merged_x_sval; 8516 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval); 8517 8518 /* In particular, there should be an unknown value for "x". */ 8519 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN); 8520 } 8521 8522 /* Unknown and constant. */ 8523 // TODO 8524 8525 /* Pointers: NULL and NULL. */ 8526 // TODO 8527 8528 /* Pointers: NULL and non-NULL. */ 8529 // TODO 8530 8531 /* Pointers: non-NULL and non-NULL: ptr to a local. */ 8532 { 8533 region_model model0; 8534 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL); 8535 model0.set_to_new_unknown_value (model0.get_lvalue (a, NULL), 8536 integer_type_node, NULL); 8537 model0.set_value (model0.get_lvalue (p, NULL), 8538 model0.get_rvalue (addr_of_a, NULL), NULL); 8539 8540 region_model model1 (model0); 8541 ASSERT_EQ (model0, model1); 8542 8543 /* They should be mergeable, and the result should be the same. */ 8544 region_model merged; 8545 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8546 ASSERT_EQ (model0, merged); 8547 } 8548 8549 /* Pointers: non-NULL and non-NULL: ptr to a global. */ 8550 { 8551 region_model merged; 8552 /* p == &y in both input models. */ 8553 svalue *merged_p_sval; 8554 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged, 8555 &merged_p_sval); 8556 8557 /* We should get p == &y in the merged model. */ 8558 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION); 8559 region_svalue *merged_p_ptr = merged_p_sval->dyn_cast_region_svalue (); 8560 region_id merged_p_star_rid = merged_p_ptr->get_pointee (); 8561 ASSERT_EQ (merged_p_star_rid, merged.get_lvalue (y, NULL)); 8562 } 8563 8564 /* Pointers: non-NULL ptrs to different globals: should be unknown. */ 8565 { 8566 region_model merged; 8567 /* x == &y vs x == &z in the input models. */ 8568 svalue *merged_x_sval; 8569 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged, 8570 &merged_x_sval); 8571 8572 /* We should get x == unknown in the merged model. */ 8573 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN); 8574 } 8575 8576 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */ 8577 { 8578 test_region_model_context ctxt; 8579 region_model model0; 8580 region_id new_rid = model0.add_new_malloc_region (); 8581 svalue_id ptr_sid 8582 = model0.get_or_create_ptr_svalue (ptr_type_node, new_rid); 8583 model0.set_value (model0.get_lvalue (p, &ctxt), 8584 ptr_sid, &ctxt); 8585 model0.canonicalize (&ctxt); 8586 8587 region_model model1 (model0); 8588 8589 ASSERT_EQ (model0, model1); 8590 8591 region_model merged; 8592 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8593 8594 merged.canonicalize (&ctxt); 8595 8596 /* The merged model ought to be identical (after canonicalization, 8597 at least). */ 8598 ASSERT_EQ (model0, merged); 8599 } 8600 8601 /* Two regions sharing the same unknown svalue should continue sharing 8602 an unknown svalue after self-merger. */ 8603 { 8604 test_region_model_context ctxt; 8605 region_model model0; 8606 svalue_id sid 8607 = model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt), 8608 integer_type_node, &ctxt); 8609 model0.set_value (model0.get_lvalue (y, &ctxt), sid, &ctxt); 8610 region_model model1 (model0); 8611 8612 /* They should be mergeable, and the result should be the same. */ 8613 region_model merged; 8614 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8615 ASSERT_EQ (model0, merged); 8616 8617 /* In particular, we should have x == y. */ 8618 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt), 8619 tristate (tristate::TS_TRUE)); 8620 } 8621 8622 #if 0 8623 { 8624 region_model model0; 8625 region_model model1; 8626 test_region_model_context ctxt; 8627 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt); 8628 model1.add_constraint (x, NE_EXPR, int_42, &ctxt); 8629 ASSERT_TRUE (model0.can_merge_with_p (model1)); 8630 } 8631 8632 { 8633 region_model model0; 8634 region_model model1; 8635 test_region_model_context ctxt; 8636 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt); 8637 model1.add_constraint (x, NE_EXPR, int_42, &ctxt); 8638 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt); 8639 ASSERT_TRUE (model0.can_merge_with_p (model1)); 8640 } 8641 #endif 8642 8643 // TODO: what can't we merge? need at least one such test 8644 8645 /* TODO: various things 8646 - heap regions 8647 - value merging: 8648 - every combination, but in particular 8649 - pairs of regions 8650 */ 8651 8652 /* Views. */ 8653 { 8654 test_region_model_context ctxt; 8655 region_model model0; 8656 8657 region_id x_rid = model0.get_lvalue (x, &ctxt); 8658 region_id x_as_ptr = model0.get_or_create_view (x_rid, ptr_type_node, 8659 &ctxt); 8660 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt); 8661 8662 region_model model1 (model0); 8663 ASSERT_EQ (model1, model0); 8664 8665 /* They should be mergeable, and the result should be the same. */ 8666 region_model merged; 8667 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8668 } 8669 8670 /* Verify that we can merge a model in which a local in an older stack 8671 frame points to a local in a more recent stack frame. */ 8672 { 8673 region_model model0; 8674 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL); 8675 region_id q_in_first_frame = model0.get_lvalue (q, NULL); 8676 8677 /* Push a second frame. */ 8678 region_id rid_2nd_frame 8679 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL); 8680 8681 /* Have a pointer in the older frame point to a local in the 8682 more recent frame. */ 8683 svalue_id sid_ptr = model0.get_rvalue (addr_of_a, NULL); 8684 model0.set_value (q_in_first_frame, sid_ptr, NULL); 8685 8686 /* Verify that it's pointing at the newer frame. */ 8687 region_id rid_pointee 8688 = model0.get_svalue (sid_ptr)->dyn_cast_region_svalue ()->get_pointee (); 8689 ASSERT_EQ (model0.get_region (rid_pointee)->get_parent (), rid_2nd_frame); 8690 8691 model0.canonicalize (NULL); 8692 8693 region_model model1 (model0); 8694 ASSERT_EQ (model0, model1); 8695 8696 /* They should be mergeable, and the result should be the same 8697 (after canonicalization, at least). */ 8698 region_model merged; 8699 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8700 merged.canonicalize (NULL); 8701 ASSERT_EQ (model0, merged); 8702 } 8703 8704 /* Verify that we can merge a model in which a local points to a global. */ 8705 { 8706 region_model model0; 8707 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL); 8708 model0.set_value (model0.get_lvalue (q, NULL), 8709 model0.get_rvalue (addr_of_y, NULL), NULL); 8710 8711 model0.canonicalize (NULL); 8712 8713 region_model model1 (model0); 8714 ASSERT_EQ (model0, model1); 8715 8716 /* They should be mergeable, and the result should be the same 8717 (after canonicalization, at least). */ 8718 region_model merged; 8719 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8720 merged.canonicalize (NULL); 8721 ASSERT_EQ (model0, merged); 8722 } 8723 } 8724 8725 /* Verify that constraints are correctly merged when merging region_model 8726 instances. */ 8727 8728 static void 8729 test_constraint_merging () 8730 { 8731 tree int_0 = build_int_cst (integer_type_node, 0); 8732 tree int_5 = build_int_cst (integer_type_node, 5); 8733 tree x = build_global_decl ("x", integer_type_node); 8734 tree y = build_global_decl ("y", integer_type_node); 8735 tree z = build_global_decl ("z", integer_type_node); 8736 tree n = build_global_decl ("n", integer_type_node); 8737 8738 test_region_model_context ctxt; 8739 8740 /* model0: 0 <= (x == y) < n. */ 8741 region_model model0; 8742 model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt), 8743 integer_type_node, &ctxt); 8744 model0.add_constraint (x, EQ_EXPR, y, &ctxt); 8745 model0.add_constraint (x, GE_EXPR, int_0, NULL); 8746 model0.add_constraint (x, LT_EXPR, n, NULL); 8747 8748 /* model1: z != 5 && (0 <= x < n). */ 8749 region_model model1; 8750 model1.set_to_new_unknown_value (model1.get_lvalue (x, &ctxt), 8751 integer_type_node, &ctxt); 8752 model1.add_constraint (z, NE_EXPR, int_5, NULL); 8753 model1.add_constraint (x, GE_EXPR, int_0, NULL); 8754 model1.add_constraint (x, LT_EXPR, n, NULL); 8755 8756 /* They should be mergeable; the merged constraints should 8757 be: (0 <= x < n). */ 8758 region_model merged; 8759 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged)); 8760 8761 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt), 8762 tristate (tristate::TS_TRUE)); 8763 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt), 8764 tristate (tristate::TS_TRUE)); 8765 8766 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt), 8767 tristate (tristate::TS_UNKNOWN)); 8768 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt), 8769 tristate (tristate::TS_UNKNOWN)); 8770 } 8771 8772 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL, 8773 all cast pointers to that region are also known to be non-NULL. */ 8774 8775 static void 8776 test_malloc_constraints () 8777 { 8778 region_model model; 8779 tree p = build_global_decl ("p", ptr_type_node); 8780 tree char_star = build_pointer_type (char_type_node); 8781 tree q = build_global_decl ("q", char_star); 8782 tree null_ptr = build_int_cst (ptr_type_node, 0); 8783 8784 region_id rid = model.add_new_malloc_region (); 8785 svalue_id sid = model.get_or_create_ptr_svalue (ptr_type_node, rid); 8786 model.set_value (model.get_lvalue (p, NULL), sid, NULL); 8787 model.set_value (q, p, NULL); 8788 8789 /* We should have a symbolic_region with m_possibly_null: true. */ 8790 region *pointee = model.get_region (rid); 8791 symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region (); 8792 ASSERT_NE (sym_reg, NULL); 8793 ASSERT_TRUE (sym_reg->m_possibly_null); 8794 8795 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr); 8796 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr); 8797 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr); 8798 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr); 8799 8800 model.add_constraint (p, NE_EXPR, null_ptr, NULL); 8801 8802 /* Adding the constraint should have cleared m_possibly_null. */ 8803 ASSERT_FALSE (sym_reg->m_possibly_null); 8804 8805 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr); 8806 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr); 8807 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr); 8808 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr); 8809 } 8810 8811 /* Run all of the selftests within this file. */ 8812 8813 void 8814 analyzer_region_model_cc_tests () 8815 { 8816 test_tree_cmp_on_constants (); 8817 test_dump (); 8818 test_dump_2 (); 8819 test_dump_3 (); 8820 test_get_representative_tree (); 8821 test_unique_constants (); 8822 test_svalue_equality (); 8823 test_region_equality (); 8824 test_purging_by_criteria (); 8825 test_purge_unused_svalues (); 8826 test_assignment (); 8827 test_compound_assignment (); 8828 test_stack_frames (); 8829 test_get_representative_path_var (); 8830 test_canonicalization_1 (); 8831 test_canonicalization_2 (); 8832 test_canonicalization_3 (); 8833 test_canonicalization_4 (); 8834 test_state_merging (); 8835 test_constraint_merging (); 8836 test_malloc_constraints (); 8837 } 8838 8839 } // namespace selftest 8840 8841 #endif /* CHECKING_P */ 8842 8843 } // namespace ana 8844 8845 #endif /* #if ENABLE_ANALYZER */ 8846