1 /* __builtin_object_size (ptr, object_size_type) computation 2 Copyright (C) 2004-2019 Free Software Foundation, Inc. 3 Contributed by Jakub Jelinek <jakub@redhat.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "tree.h" 26 #include "gimple.h" 27 #include "tree-pass.h" 28 #include "ssa.h" 29 #include "gimple-pretty-print.h" 30 #include "fold-const.h" 31 #include "tree-object-size.h" 32 #include "gimple-fold.h" 33 #include "gimple-iterator.h" 34 #include "tree-cfg.h" 35 #include "stringpool.h" 36 #include "attribs.h" 37 38 struct object_size_info 39 { 40 int object_size_type; 41 unsigned char pass; 42 bool changed; 43 bitmap visited, reexamine; 44 unsigned int *depths; 45 unsigned int *stack, *tos; 46 }; 47 48 static const unsigned HOST_WIDE_INT unknown[4] = { 49 HOST_WIDE_INT_M1U, 50 HOST_WIDE_INT_M1U, 51 0, 52 0 53 }; 54 55 static tree compute_object_offset (const_tree, const_tree); 56 static bool addr_object_size (struct object_size_info *, 57 const_tree, int, unsigned HOST_WIDE_INT *); 58 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int); 59 static tree pass_through_call (const gcall *); 60 static void collect_object_sizes_for (struct object_size_info *, tree); 61 static void expr_object_size (struct object_size_info *, tree, tree); 62 static bool merge_object_sizes (struct object_size_info *, tree, tree, 63 unsigned HOST_WIDE_INT); 64 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *); 65 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *); 66 static void init_offset_limit (void); 67 static void check_for_plus_in_loops (struct object_size_info *, tree); 68 static void check_for_plus_in_loops_1 (struct object_size_info *, tree, 69 unsigned int); 70 71 /* object_sizes[0] is upper bound for number of bytes till the end of 72 the object. 73 object_sizes[1] is upper bound for number of bytes till the end of 74 the subobject (innermost array or field with address taken). 75 object_sizes[2] is lower bound for number of bytes till the end of 76 the object and object_sizes[3] lower bound for subobject. */ 77 static vec<unsigned HOST_WIDE_INT> object_sizes[4]; 78 79 /* Bitmaps what object sizes have been computed already. */ 80 static bitmap computed[4]; 81 82 /* Maximum value of offset we consider to be addition. */ 83 static unsigned HOST_WIDE_INT offset_limit; 84 85 86 /* Initialize OFFSET_LIMIT variable. */ 87 static void 88 init_offset_limit (void) 89 { 90 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype))) 91 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype)); 92 else 93 offset_limit = -1; 94 offset_limit /= 2; 95 } 96 97 98 /* Compute offset of EXPR within VAR. Return error_mark_node 99 if unknown. */ 100 101 static tree 102 compute_object_offset (const_tree expr, const_tree var) 103 { 104 enum tree_code code = PLUS_EXPR; 105 tree base, off, t; 106 107 if (expr == var) 108 return size_zero_node; 109 110 switch (TREE_CODE (expr)) 111 { 112 case COMPONENT_REF: 113 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 114 if (base == error_mark_node) 115 return base; 116 117 t = TREE_OPERAND (expr, 1); 118 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t), 119 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t)) 120 / BITS_PER_UNIT)); 121 break; 122 123 case REALPART_EXPR: 124 CASE_CONVERT: 125 case VIEW_CONVERT_EXPR: 126 case NON_LVALUE_EXPR: 127 return compute_object_offset (TREE_OPERAND (expr, 0), var); 128 129 case IMAGPART_EXPR: 130 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 131 if (base == error_mark_node) 132 return base; 133 134 off = TYPE_SIZE_UNIT (TREE_TYPE (expr)); 135 break; 136 137 case ARRAY_REF: 138 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 139 if (base == error_mark_node) 140 return base; 141 142 t = TREE_OPERAND (expr, 1); 143 tree low_bound, unit_size; 144 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr)); 145 unit_size = array_ref_element_size (CONST_CAST_TREE (expr)); 146 if (! integer_zerop (low_bound)) 147 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound); 148 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0) 149 { 150 code = MINUS_EXPR; 151 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t); 152 } 153 t = fold_convert (sizetype, t); 154 off = size_binop (MULT_EXPR, unit_size, t); 155 break; 156 157 case MEM_REF: 158 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR); 159 return wide_int_to_tree (sizetype, mem_ref_offset (expr)); 160 161 default: 162 return error_mark_node; 163 } 164 165 return size_binop (code, base, off); 166 } 167 168 169 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR. 170 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size. 171 If unknown, return unknown[object_size_type]. */ 172 173 static bool 174 addr_object_size (struct object_size_info *osi, const_tree ptr, 175 int object_size_type, unsigned HOST_WIDE_INT *psize) 176 { 177 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes; 178 179 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR); 180 181 /* Set to unknown and overwrite just before returning if the size 182 could be determined. */ 183 *psize = unknown[object_size_type]; 184 185 pt_var = TREE_OPERAND (ptr, 0); 186 while (handled_component_p (pt_var)) 187 pt_var = TREE_OPERAND (pt_var, 0); 188 189 if (pt_var 190 && TREE_CODE (pt_var) == MEM_REF) 191 { 192 unsigned HOST_WIDE_INT sz; 193 194 if (!osi || (object_size_type & 1) != 0 195 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME) 196 { 197 compute_builtin_object_size (TREE_OPERAND (pt_var, 0), 198 object_size_type & ~1, &sz); 199 } 200 else 201 { 202 tree var = TREE_OPERAND (pt_var, 0); 203 if (osi->pass == 0) 204 collect_object_sizes_for (osi, var); 205 if (bitmap_bit_p (computed[object_size_type], 206 SSA_NAME_VERSION (var))) 207 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)]; 208 else 209 sz = unknown[object_size_type]; 210 } 211 if (sz != unknown[object_size_type]) 212 { 213 offset_int mem_offset; 214 if (mem_ref_offset (pt_var).is_constant (&mem_offset)) 215 { 216 offset_int dsz = wi::sub (sz, mem_offset); 217 if (wi::neg_p (dsz)) 218 sz = 0; 219 else if (wi::fits_uhwi_p (dsz)) 220 sz = dsz.to_uhwi (); 221 else 222 sz = unknown[object_size_type]; 223 } 224 else 225 sz = unknown[object_size_type]; 226 } 227 228 if (sz != unknown[object_size_type] && sz < offset_limit) 229 pt_var_size = size_int (sz); 230 } 231 else if (pt_var 232 && DECL_P (pt_var) 233 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var)) 234 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit) 235 pt_var_size = DECL_SIZE_UNIT (pt_var); 236 else if (pt_var 237 && TREE_CODE (pt_var) == STRING_CST 238 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var)) 239 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var))) 240 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var))) 241 < offset_limit) 242 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var)); 243 else 244 return false; 245 246 if (pt_var != TREE_OPERAND (ptr, 0)) 247 { 248 tree var; 249 250 if (object_size_type & 1) 251 { 252 var = TREE_OPERAND (ptr, 0); 253 254 while (var != pt_var 255 && TREE_CODE (var) != BIT_FIELD_REF 256 && TREE_CODE (var) != COMPONENT_REF 257 && TREE_CODE (var) != ARRAY_REF 258 && TREE_CODE (var) != ARRAY_RANGE_REF 259 && TREE_CODE (var) != REALPART_EXPR 260 && TREE_CODE (var) != IMAGPART_EXPR) 261 var = TREE_OPERAND (var, 0); 262 if (var != pt_var && TREE_CODE (var) == ARRAY_REF) 263 var = TREE_OPERAND (var, 0); 264 if (! TYPE_SIZE_UNIT (TREE_TYPE (var)) 265 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var))) 266 || (pt_var_size 267 && tree_int_cst_lt (pt_var_size, 268 TYPE_SIZE_UNIT (TREE_TYPE (var))))) 269 var = pt_var; 270 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF) 271 { 272 tree v = var; 273 /* For &X->fld, compute object size only if fld isn't the last 274 field, as struct { int i; char c[1]; } is often used instead 275 of flexible array member. */ 276 while (v && v != pt_var) 277 switch (TREE_CODE (v)) 278 { 279 case ARRAY_REF: 280 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))) 281 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST) 282 { 283 tree domain 284 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0))); 285 if (domain 286 && TYPE_MAX_VALUE (domain) 287 && TREE_CODE (TYPE_MAX_VALUE (domain)) 288 == INTEGER_CST 289 && tree_int_cst_lt (TREE_OPERAND (v, 1), 290 TYPE_MAX_VALUE (domain))) 291 { 292 v = NULL_TREE; 293 break; 294 } 295 } 296 v = TREE_OPERAND (v, 0); 297 break; 298 case REALPART_EXPR: 299 case IMAGPART_EXPR: 300 v = NULL_TREE; 301 break; 302 case COMPONENT_REF: 303 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE) 304 { 305 v = NULL_TREE; 306 break; 307 } 308 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF) 309 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 310 != UNION_TYPE 311 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 312 != QUAL_UNION_TYPE) 313 break; 314 else 315 v = TREE_OPERAND (v, 0); 316 if (TREE_CODE (v) == COMPONENT_REF 317 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 318 == RECORD_TYPE) 319 { 320 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1)); 321 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain)) 322 if (TREE_CODE (fld_chain) == FIELD_DECL) 323 break; 324 325 if (fld_chain) 326 { 327 v = NULL_TREE; 328 break; 329 } 330 v = TREE_OPERAND (v, 0); 331 } 332 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF) 333 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 334 != UNION_TYPE 335 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 336 != QUAL_UNION_TYPE) 337 break; 338 else 339 v = TREE_OPERAND (v, 0); 340 if (v != pt_var) 341 v = NULL_TREE; 342 else 343 v = pt_var; 344 break; 345 default: 346 v = pt_var; 347 break; 348 } 349 if (v == pt_var) 350 var = pt_var; 351 } 352 } 353 else 354 var = pt_var; 355 356 if (var != pt_var) 357 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var)); 358 else if (!pt_var_size) 359 return false; 360 else 361 var_size = pt_var_size; 362 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var); 363 if (bytes != error_mark_node) 364 { 365 if (TREE_CODE (bytes) == INTEGER_CST 366 && tree_int_cst_lt (var_size, bytes)) 367 bytes = size_zero_node; 368 else 369 bytes = size_binop (MINUS_EXPR, var_size, bytes); 370 } 371 if (var != pt_var 372 && pt_var_size 373 && TREE_CODE (pt_var) == MEM_REF 374 && bytes != error_mark_node) 375 { 376 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var); 377 if (bytes2 != error_mark_node) 378 { 379 if (TREE_CODE (bytes2) == INTEGER_CST 380 && tree_int_cst_lt (pt_var_size, bytes2)) 381 bytes2 = size_zero_node; 382 else 383 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2); 384 bytes = size_binop (MIN_EXPR, bytes, bytes2); 385 } 386 } 387 } 388 else if (!pt_var_size) 389 return false; 390 else 391 bytes = pt_var_size; 392 393 if (tree_fits_uhwi_p (bytes)) 394 { 395 *psize = tree_to_uhwi (bytes); 396 return true; 397 } 398 399 return false; 400 } 401 402 403 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL. 404 Handles calls to functions declared with attribute alloc_size. 405 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size. 406 If unknown, return unknown[object_size_type]. */ 407 408 static unsigned HOST_WIDE_INT 409 alloc_object_size (const gcall *call, int object_size_type) 410 { 411 gcc_assert (is_gimple_call (call)); 412 413 tree calltype; 414 if (tree callfn = gimple_call_fndecl (call)) 415 calltype = TREE_TYPE (callfn); 416 else 417 calltype = gimple_call_fntype (call); 418 419 if (!calltype) 420 return unknown[object_size_type]; 421 422 /* Set to positions of alloc_size arguments. */ 423 int arg1 = -1, arg2 = -1; 424 tree alloc_size = lookup_attribute ("alloc_size", 425 TYPE_ATTRIBUTES (calltype)); 426 if (alloc_size && TREE_VALUE (alloc_size)) 427 { 428 tree p = TREE_VALUE (alloc_size); 429 430 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1; 431 if (TREE_CHAIN (p)) 432 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1; 433 } 434 435 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call) 436 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST 437 || (arg2 >= 0 438 && (arg2 >= (int)gimple_call_num_args (call) 439 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST))) 440 return unknown[object_size_type]; 441 442 tree bytes = NULL_TREE; 443 if (arg2 >= 0) 444 bytes = size_binop (MULT_EXPR, 445 fold_convert (sizetype, gimple_call_arg (call, arg1)), 446 fold_convert (sizetype, gimple_call_arg (call, arg2))); 447 else if (arg1 >= 0) 448 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1)); 449 450 if (bytes && tree_fits_uhwi_p (bytes)) 451 return tree_to_uhwi (bytes); 452 453 return unknown[object_size_type]; 454 } 455 456 457 /* If object size is propagated from one of function's arguments directly 458 to its return value, return that argument for GIMPLE_CALL statement CALL. 459 Otherwise return NULL. */ 460 461 static tree 462 pass_through_call (const gcall *call) 463 { 464 unsigned rf = gimple_call_return_flags (call); 465 if (rf & ERF_RETURNS_ARG) 466 { 467 unsigned argnum = rf & ERF_RETURN_ARG_MASK; 468 if (argnum < gimple_call_num_args (call)) 469 return gimple_call_arg (call, argnum); 470 } 471 472 /* __builtin_assume_aligned is intentionally not marked RET1. */ 473 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED)) 474 return gimple_call_arg (call, 0); 475 476 return NULL_TREE; 477 } 478 479 480 /* Compute __builtin_object_size value for PTR and set *PSIZE to 481 the resulting value. OBJECT_SIZE_TYPE is the second argument 482 to __builtin_object_size. Return true on success and false 483 when the object size could not be determined. */ 484 485 bool 486 compute_builtin_object_size (tree ptr, int object_size_type, 487 unsigned HOST_WIDE_INT *psize) 488 { 489 gcc_assert (object_size_type >= 0 && object_size_type <= 3); 490 491 /* Set to unknown and overwrite just before returning if the size 492 could be determined. */ 493 *psize = unknown[object_size_type]; 494 495 if (! offset_limit) 496 init_offset_limit (); 497 498 if (TREE_CODE (ptr) == ADDR_EXPR) 499 return addr_object_size (NULL, ptr, object_size_type, psize); 500 501 if (TREE_CODE (ptr) != SSA_NAME 502 || !POINTER_TYPE_P (TREE_TYPE (ptr))) 503 return false; 504 505 if (computed[object_size_type] == NULL) 506 { 507 if (optimize || object_size_type & 1) 508 return false; 509 510 /* When not optimizing, rather than failing, make a small effort 511 to determine the object size without the full benefit of 512 the (costly) computation below. */ 513 gimple *def = SSA_NAME_DEF_STMT (ptr); 514 if (gimple_code (def) == GIMPLE_ASSIGN) 515 { 516 tree_code code = gimple_assign_rhs_code (def); 517 if (code == POINTER_PLUS_EXPR) 518 { 519 tree offset = gimple_assign_rhs2 (def); 520 ptr = gimple_assign_rhs1 (def); 521 522 if (tree_fits_shwi_p (offset) 523 && compute_builtin_object_size (ptr, object_size_type, psize)) 524 { 525 /* Return zero when the offset is out of bounds. */ 526 unsigned HOST_WIDE_INT off = tree_to_shwi (offset); 527 *psize = off < *psize ? *psize - off : 0; 528 return true; 529 } 530 } 531 } 532 return false; 533 } 534 535 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr))) 536 { 537 struct object_size_info osi; 538 bitmap_iterator bi; 539 unsigned int i; 540 541 if (num_ssa_names > object_sizes[object_size_type].length ()) 542 object_sizes[object_size_type].safe_grow (num_ssa_names); 543 if (dump_file) 544 { 545 fprintf (dump_file, "Computing %s %sobject size for ", 546 (object_size_type & 2) ? "minimum" : "maximum", 547 (object_size_type & 1) ? "sub" : ""); 548 print_generic_expr (dump_file, ptr, dump_flags); 549 fprintf (dump_file, ":\n"); 550 } 551 552 osi.visited = BITMAP_ALLOC (NULL); 553 osi.reexamine = BITMAP_ALLOC (NULL); 554 osi.object_size_type = object_size_type; 555 osi.depths = NULL; 556 osi.stack = NULL; 557 osi.tos = NULL; 558 559 /* First pass: walk UD chains, compute object sizes that 560 can be computed. osi.reexamine bitmap at the end will 561 contain what variables were found in dependency cycles 562 and therefore need to be reexamined. */ 563 osi.pass = 0; 564 osi.changed = false; 565 collect_object_sizes_for (&osi, ptr); 566 567 /* Second pass: keep recomputing object sizes of variables 568 that need reexamination, until no object sizes are 569 increased or all object sizes are computed. */ 570 if (! bitmap_empty_p (osi.reexamine)) 571 { 572 bitmap reexamine = BITMAP_ALLOC (NULL); 573 574 /* If looking for minimum instead of maximum object size, 575 detect cases where a pointer is increased in a loop. 576 Although even without this detection pass 2 would eventually 577 terminate, it could take a long time. If a pointer is 578 increasing this way, we need to assume 0 object size. 579 E.g. p = &buf[0]; while (cond) p = p + 4; */ 580 if (object_size_type & 2) 581 { 582 osi.depths = XCNEWVEC (unsigned int, num_ssa_names); 583 osi.stack = XNEWVEC (unsigned int, num_ssa_names); 584 osi.tos = osi.stack; 585 osi.pass = 1; 586 /* collect_object_sizes_for is changing 587 osi.reexamine bitmap, so iterate over a copy. */ 588 bitmap_copy (reexamine, osi.reexamine); 589 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi) 590 if (bitmap_bit_p (osi.reexamine, i)) 591 check_for_plus_in_loops (&osi, ssa_name (i)); 592 593 free (osi.depths); 594 osi.depths = NULL; 595 free (osi.stack); 596 osi.stack = NULL; 597 osi.tos = NULL; 598 } 599 600 do 601 { 602 osi.pass = 2; 603 osi.changed = false; 604 /* collect_object_sizes_for is changing 605 osi.reexamine bitmap, so iterate over a copy. */ 606 bitmap_copy (reexamine, osi.reexamine); 607 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi) 608 if (bitmap_bit_p (osi.reexamine, i)) 609 { 610 collect_object_sizes_for (&osi, ssa_name (i)); 611 if (dump_file && (dump_flags & TDF_DETAILS)) 612 { 613 fprintf (dump_file, "Reexamining "); 614 print_generic_expr (dump_file, ssa_name (i), 615 dump_flags); 616 fprintf (dump_file, "\n"); 617 } 618 } 619 } 620 while (osi.changed); 621 622 BITMAP_FREE (reexamine); 623 } 624 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi) 625 bitmap_set_bit (computed[object_size_type], i); 626 627 /* Debugging dumps. */ 628 if (dump_file) 629 { 630 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi) 631 if (object_sizes[object_size_type][i] 632 != unknown[object_size_type]) 633 { 634 print_generic_expr (dump_file, ssa_name (i), 635 dump_flags); 636 fprintf (dump_file, 637 ": %s %sobject size " 638 HOST_WIDE_INT_PRINT_UNSIGNED "\n", 639 (object_size_type & 2) ? "minimum" : "maximum", 640 (object_size_type & 1) ? "sub" : "", 641 object_sizes[object_size_type][i]); 642 } 643 } 644 645 BITMAP_FREE (osi.reexamine); 646 BITMAP_FREE (osi.visited); 647 } 648 649 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)]; 650 return *psize != unknown[object_size_type]; 651 } 652 653 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */ 654 655 static void 656 expr_object_size (struct object_size_info *osi, tree ptr, tree value) 657 { 658 int object_size_type = osi->object_size_type; 659 unsigned int varno = SSA_NAME_VERSION (ptr); 660 unsigned HOST_WIDE_INT bytes; 661 662 gcc_assert (object_sizes[object_size_type][varno] 663 != unknown[object_size_type]); 664 gcc_assert (osi->pass == 0); 665 666 if (TREE_CODE (value) == WITH_SIZE_EXPR) 667 value = TREE_OPERAND (value, 0); 668 669 /* Pointer variables should have been handled by merge_object_sizes. */ 670 gcc_assert (TREE_CODE (value) != SSA_NAME 671 || !POINTER_TYPE_P (TREE_TYPE (value))); 672 673 if (TREE_CODE (value) == ADDR_EXPR) 674 addr_object_size (osi, value, object_size_type, &bytes); 675 else 676 bytes = unknown[object_size_type]; 677 678 if ((object_size_type & 2) == 0) 679 { 680 if (object_sizes[object_size_type][varno] < bytes) 681 object_sizes[object_size_type][varno] = bytes; 682 } 683 else 684 { 685 if (object_sizes[object_size_type][varno] > bytes) 686 object_sizes[object_size_type][varno] = bytes; 687 } 688 } 689 690 691 /* Compute object_sizes for PTR, defined to the result of a call. */ 692 693 static void 694 call_object_size (struct object_size_info *osi, tree ptr, gcall *call) 695 { 696 int object_size_type = osi->object_size_type; 697 unsigned int varno = SSA_NAME_VERSION (ptr); 698 unsigned HOST_WIDE_INT bytes; 699 700 gcc_assert (is_gimple_call (call)); 701 702 gcc_assert (object_sizes[object_size_type][varno] 703 != unknown[object_size_type]); 704 gcc_assert (osi->pass == 0); 705 706 bytes = alloc_object_size (call, object_size_type); 707 708 if ((object_size_type & 2) == 0) 709 { 710 if (object_sizes[object_size_type][varno] < bytes) 711 object_sizes[object_size_type][varno] = bytes; 712 } 713 else 714 { 715 if (object_sizes[object_size_type][varno] > bytes) 716 object_sizes[object_size_type][varno] = bytes; 717 } 718 } 719 720 721 /* Compute object_sizes for PTR, defined to an unknown value. */ 722 723 static void 724 unknown_object_size (struct object_size_info *osi, tree ptr) 725 { 726 int object_size_type = osi->object_size_type; 727 unsigned int varno = SSA_NAME_VERSION (ptr); 728 unsigned HOST_WIDE_INT bytes; 729 730 gcc_assert (object_sizes[object_size_type][varno] 731 != unknown[object_size_type]); 732 gcc_assert (osi->pass == 0); 733 734 bytes = unknown[object_size_type]; 735 736 if ((object_size_type & 2) == 0) 737 { 738 if (object_sizes[object_size_type][varno] < bytes) 739 object_sizes[object_size_type][varno] = bytes; 740 } 741 else 742 { 743 if (object_sizes[object_size_type][varno] > bytes) 744 object_sizes[object_size_type][varno] = bytes; 745 } 746 } 747 748 749 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if 750 the object size might need reexamination later. */ 751 752 static bool 753 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig, 754 unsigned HOST_WIDE_INT offset) 755 { 756 int object_size_type = osi->object_size_type; 757 unsigned int varno = SSA_NAME_VERSION (dest); 758 unsigned HOST_WIDE_INT orig_bytes; 759 760 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 761 return false; 762 if (offset >= offset_limit) 763 { 764 object_sizes[object_size_type][varno] = unknown[object_size_type]; 765 return false; 766 } 767 768 if (osi->pass == 0) 769 collect_object_sizes_for (osi, orig); 770 771 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)]; 772 if (orig_bytes != unknown[object_size_type]) 773 orig_bytes = (offset > orig_bytes) 774 ? HOST_WIDE_INT_0U : orig_bytes - offset; 775 776 if ((object_size_type & 2) == 0) 777 { 778 if (object_sizes[object_size_type][varno] < orig_bytes) 779 { 780 object_sizes[object_size_type][varno] = orig_bytes; 781 osi->changed = true; 782 } 783 } 784 else 785 { 786 if (object_sizes[object_size_type][varno] > orig_bytes) 787 { 788 object_sizes[object_size_type][varno] = orig_bytes; 789 osi->changed = true; 790 } 791 } 792 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig)); 793 } 794 795 796 /* Compute object_sizes for VAR, defined to the result of an assignment 797 with operator POINTER_PLUS_EXPR. Return true if the object size might 798 need reexamination later. */ 799 800 static bool 801 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt) 802 { 803 int object_size_type = osi->object_size_type; 804 unsigned int varno = SSA_NAME_VERSION (var); 805 unsigned HOST_WIDE_INT bytes; 806 tree op0, op1; 807 808 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 809 { 810 op0 = gimple_assign_rhs1 (stmt); 811 op1 = gimple_assign_rhs2 (stmt); 812 } 813 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR) 814 { 815 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); 816 gcc_assert (TREE_CODE (rhs) == MEM_REF); 817 op0 = TREE_OPERAND (rhs, 0); 818 op1 = TREE_OPERAND (rhs, 1); 819 } 820 else 821 gcc_unreachable (); 822 823 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 824 return false; 825 826 /* Handle PTR + OFFSET here. */ 827 if (TREE_CODE (op1) == INTEGER_CST 828 && (TREE_CODE (op0) == SSA_NAME 829 || TREE_CODE (op0) == ADDR_EXPR)) 830 { 831 if (! tree_fits_uhwi_p (op1)) 832 bytes = unknown[object_size_type]; 833 else if (TREE_CODE (op0) == SSA_NAME) 834 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1)); 835 else 836 { 837 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1); 838 839 /* op0 will be ADDR_EXPR here. */ 840 addr_object_size (osi, op0, object_size_type, &bytes); 841 if (bytes == unknown[object_size_type]) 842 ; 843 else if (off > offset_limit) 844 bytes = unknown[object_size_type]; 845 else if (off > bytes) 846 bytes = 0; 847 else 848 bytes -= off; 849 } 850 } 851 else 852 bytes = unknown[object_size_type]; 853 854 if ((object_size_type & 2) == 0) 855 { 856 if (object_sizes[object_size_type][varno] < bytes) 857 object_sizes[object_size_type][varno] = bytes; 858 } 859 else 860 { 861 if (object_sizes[object_size_type][varno] > bytes) 862 object_sizes[object_size_type][varno] = bytes; 863 } 864 return false; 865 } 866 867 868 /* Compute object_sizes for VAR, defined at STMT, which is 869 a COND_EXPR. Return true if the object size might need reexamination 870 later. */ 871 872 static bool 873 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt) 874 { 875 tree then_, else_; 876 int object_size_type = osi->object_size_type; 877 unsigned int varno = SSA_NAME_VERSION (var); 878 bool reexamine = false; 879 880 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR); 881 882 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 883 return false; 884 885 then_ = gimple_assign_rhs2 (stmt); 886 else_ = gimple_assign_rhs3 (stmt); 887 888 if (TREE_CODE (then_) == SSA_NAME) 889 reexamine |= merge_object_sizes (osi, var, then_, 0); 890 else 891 expr_object_size (osi, var, then_); 892 893 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 894 return reexamine; 895 896 if (TREE_CODE (else_) == SSA_NAME) 897 reexamine |= merge_object_sizes (osi, var, else_, 0); 898 else 899 expr_object_size (osi, var, else_); 900 901 return reexamine; 902 } 903 904 /* Compute object sizes for VAR. 905 For ADDR_EXPR an object size is the number of remaining bytes 906 to the end of the object (where what is considered an object depends on 907 OSI->object_size_type). 908 For allocation GIMPLE_CALL like malloc or calloc object size is the size 909 of the allocation. 910 For POINTER_PLUS_EXPR where second operand is a constant integer, 911 object size is object size of the first operand minus the constant. 912 If the constant is bigger than the number of remaining bytes until the 913 end of the object, object size is 0, but if it is instead a pointer 914 subtraction, object size is unknown[object_size_type]. 915 To differentiate addition from subtraction, ADDR_EXPR returns 916 unknown[object_size_type] for all objects bigger than half of the address 917 space, and constants less than half of the address space are considered 918 addition, while bigger constants subtraction. 919 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the 920 object size is object size of that argument. 921 Otherwise, object size is the maximum of object sizes of variables 922 that it might be set to. */ 923 924 static void 925 collect_object_sizes_for (struct object_size_info *osi, tree var) 926 { 927 int object_size_type = osi->object_size_type; 928 unsigned int varno = SSA_NAME_VERSION (var); 929 gimple *stmt; 930 bool reexamine; 931 932 if (bitmap_bit_p (computed[object_size_type], varno)) 933 return; 934 935 if (osi->pass == 0) 936 { 937 if (bitmap_set_bit (osi->visited, varno)) 938 { 939 object_sizes[object_size_type][varno] 940 = (object_size_type & 2) ? -1 : 0; 941 } 942 else 943 { 944 /* Found a dependency loop. Mark the variable for later 945 re-examination. */ 946 bitmap_set_bit (osi->reexamine, varno); 947 if (dump_file && (dump_flags & TDF_DETAILS)) 948 { 949 fprintf (dump_file, "Found a dependency loop at "); 950 print_generic_expr (dump_file, var, dump_flags); 951 fprintf (dump_file, "\n"); 952 } 953 return; 954 } 955 } 956 957 if (dump_file && (dump_flags & TDF_DETAILS)) 958 { 959 fprintf (dump_file, "Visiting use-def links for "); 960 print_generic_expr (dump_file, var, dump_flags); 961 fprintf (dump_file, "\n"); 962 } 963 964 stmt = SSA_NAME_DEF_STMT (var); 965 reexamine = false; 966 967 switch (gimple_code (stmt)) 968 { 969 case GIMPLE_ASSIGN: 970 { 971 tree rhs = gimple_assign_rhs1 (stmt); 972 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR 973 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR 974 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF)) 975 reexamine = plus_stmt_object_size (osi, var, stmt); 976 else if (gimple_assign_rhs_code (stmt) == COND_EXPR) 977 reexamine = cond_expr_object_size (osi, var, stmt); 978 else if (gimple_assign_single_p (stmt) 979 || gimple_assign_unary_nop_p (stmt)) 980 { 981 if (TREE_CODE (rhs) == SSA_NAME 982 && POINTER_TYPE_P (TREE_TYPE (rhs))) 983 reexamine = merge_object_sizes (osi, var, rhs, 0); 984 else 985 expr_object_size (osi, var, rhs); 986 } 987 else 988 unknown_object_size (osi, var); 989 break; 990 } 991 992 case GIMPLE_CALL: 993 { 994 gcall *call_stmt = as_a <gcall *> (stmt); 995 tree arg = pass_through_call (call_stmt); 996 if (arg) 997 { 998 if (TREE_CODE (arg) == SSA_NAME 999 && POINTER_TYPE_P (TREE_TYPE (arg))) 1000 reexamine = merge_object_sizes (osi, var, arg, 0); 1001 else 1002 expr_object_size (osi, var, arg); 1003 } 1004 else 1005 call_object_size (osi, var, call_stmt); 1006 break; 1007 } 1008 1009 case GIMPLE_ASM: 1010 /* Pointers defined by __asm__ statements can point anywhere. */ 1011 object_sizes[object_size_type][varno] = unknown[object_size_type]; 1012 break; 1013 1014 case GIMPLE_NOP: 1015 if (SSA_NAME_VAR (var) 1016 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL) 1017 expr_object_size (osi, var, SSA_NAME_VAR (var)); 1018 else 1019 /* Uninitialized SSA names point nowhere. */ 1020 object_sizes[object_size_type][varno] = unknown[object_size_type]; 1021 break; 1022 1023 case GIMPLE_PHI: 1024 { 1025 unsigned i; 1026 1027 for (i = 0; i < gimple_phi_num_args (stmt); i++) 1028 { 1029 tree rhs = gimple_phi_arg (stmt, i)->def; 1030 1031 if (object_sizes[object_size_type][varno] 1032 == unknown[object_size_type]) 1033 break; 1034 1035 if (TREE_CODE (rhs) == SSA_NAME) 1036 reexamine |= merge_object_sizes (osi, var, rhs, 0); 1037 else if (osi->pass == 0) 1038 expr_object_size (osi, var, rhs); 1039 } 1040 break; 1041 } 1042 1043 default: 1044 gcc_unreachable (); 1045 } 1046 1047 if (! reexamine 1048 || object_sizes[object_size_type][varno] == unknown[object_size_type]) 1049 { 1050 bitmap_set_bit (computed[object_size_type], varno); 1051 bitmap_clear_bit (osi->reexamine, varno); 1052 } 1053 else 1054 { 1055 bitmap_set_bit (osi->reexamine, varno); 1056 if (dump_file && (dump_flags & TDF_DETAILS)) 1057 { 1058 fprintf (dump_file, "Need to reexamine "); 1059 print_generic_expr (dump_file, var, dump_flags); 1060 fprintf (dump_file, "\n"); 1061 } 1062 } 1063 } 1064 1065 1066 /* Helper function for check_for_plus_in_loops. Called recursively 1067 to detect loops. */ 1068 1069 static void 1070 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var, 1071 unsigned int depth) 1072 { 1073 gimple *stmt = SSA_NAME_DEF_STMT (var); 1074 unsigned int varno = SSA_NAME_VERSION (var); 1075 1076 if (osi->depths[varno]) 1077 { 1078 if (osi->depths[varno] != depth) 1079 { 1080 unsigned int *sp; 1081 1082 /* Found a loop involving pointer addition. */ 1083 for (sp = osi->tos; sp > osi->stack; ) 1084 { 1085 --sp; 1086 bitmap_clear_bit (osi->reexamine, *sp); 1087 bitmap_set_bit (computed[osi->object_size_type], *sp); 1088 object_sizes[osi->object_size_type][*sp] = 0; 1089 if (*sp == varno) 1090 break; 1091 } 1092 } 1093 return; 1094 } 1095 else if (! bitmap_bit_p (osi->reexamine, varno)) 1096 return; 1097 1098 osi->depths[varno] = depth; 1099 *osi->tos++ = varno; 1100 1101 switch (gimple_code (stmt)) 1102 { 1103 1104 case GIMPLE_ASSIGN: 1105 { 1106 if ((gimple_assign_single_p (stmt) 1107 || gimple_assign_unary_nop_p (stmt)) 1108 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) 1109 { 1110 tree rhs = gimple_assign_rhs1 (stmt); 1111 1112 check_for_plus_in_loops_1 (osi, rhs, depth); 1113 } 1114 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 1115 { 1116 tree basevar = gimple_assign_rhs1 (stmt); 1117 tree cst = gimple_assign_rhs2 (stmt); 1118 1119 gcc_assert (TREE_CODE (cst) == INTEGER_CST); 1120 1121 check_for_plus_in_loops_1 (osi, basevar, 1122 depth + !integer_zerop (cst)); 1123 } 1124 else 1125 gcc_unreachable (); 1126 break; 1127 } 1128 1129 case GIMPLE_CALL: 1130 { 1131 gcall *call_stmt = as_a <gcall *> (stmt); 1132 tree arg = pass_through_call (call_stmt); 1133 if (arg) 1134 { 1135 if (TREE_CODE (arg) == SSA_NAME) 1136 check_for_plus_in_loops_1 (osi, arg, depth); 1137 else 1138 gcc_unreachable (); 1139 } 1140 break; 1141 } 1142 1143 case GIMPLE_PHI: 1144 { 1145 unsigned i; 1146 1147 for (i = 0; i < gimple_phi_num_args (stmt); i++) 1148 { 1149 tree rhs = gimple_phi_arg (stmt, i)->def; 1150 1151 if (TREE_CODE (rhs) == SSA_NAME) 1152 check_for_plus_in_loops_1 (osi, rhs, depth); 1153 } 1154 break; 1155 } 1156 1157 default: 1158 gcc_unreachable (); 1159 } 1160 1161 osi->depths[varno] = 0; 1162 osi->tos--; 1163 } 1164 1165 1166 /* Check if some pointer we are computing object size of is being increased 1167 within a loop. If yes, assume all the SSA variables participating in 1168 that loop have minimum object sizes 0. */ 1169 1170 static void 1171 check_for_plus_in_loops (struct object_size_info *osi, tree var) 1172 { 1173 gimple *stmt = SSA_NAME_DEF_STMT (var); 1174 1175 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here, 1176 and looked for a POINTER_PLUS_EXPR in the pass-through 1177 argument, if any. In GIMPLE, however, such an expression 1178 is not a valid call operand. */ 1179 1180 if (is_gimple_assign (stmt) 1181 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 1182 { 1183 tree basevar = gimple_assign_rhs1 (stmt); 1184 tree cst = gimple_assign_rhs2 (stmt); 1185 1186 gcc_assert (TREE_CODE (cst) == INTEGER_CST); 1187 1188 if (integer_zerop (cst)) 1189 return; 1190 1191 osi->depths[SSA_NAME_VERSION (basevar)] = 1; 1192 *osi->tos++ = SSA_NAME_VERSION (basevar); 1193 check_for_plus_in_loops_1 (osi, var, 2); 1194 osi->depths[SSA_NAME_VERSION (basevar)] = 0; 1195 osi->tos--; 1196 } 1197 } 1198 1199 1200 /* Initialize data structures for the object size computation. */ 1201 1202 void 1203 init_object_sizes (void) 1204 { 1205 int object_size_type; 1206 1207 if (computed[0]) 1208 return; 1209 1210 for (object_size_type = 0; object_size_type <= 3; object_size_type++) 1211 { 1212 object_sizes[object_size_type].safe_grow (num_ssa_names); 1213 computed[object_size_type] = BITMAP_ALLOC (NULL); 1214 } 1215 1216 init_offset_limit (); 1217 } 1218 1219 1220 /* Destroy data structures after the object size computation. */ 1221 1222 void 1223 fini_object_sizes (void) 1224 { 1225 int object_size_type; 1226 1227 for (object_size_type = 0; object_size_type <= 3; object_size_type++) 1228 { 1229 object_sizes[object_size_type].release (); 1230 BITMAP_FREE (computed[object_size_type]); 1231 } 1232 } 1233 1234 1235 /* Simple pass to optimize all __builtin_object_size () builtins. */ 1236 1237 namespace { 1238 1239 const pass_data pass_data_object_sizes = 1240 { 1241 GIMPLE_PASS, /* type */ 1242 "objsz", /* name */ 1243 OPTGROUP_NONE, /* optinfo_flags */ 1244 TV_NONE, /* tv_id */ 1245 ( PROP_cfg | PROP_ssa ), /* properties_required */ 1246 0, /* properties_provided */ 1247 0, /* properties_destroyed */ 1248 0, /* todo_flags_start */ 1249 0, /* todo_flags_finish */ 1250 }; 1251 1252 class pass_object_sizes : public gimple_opt_pass 1253 { 1254 public: 1255 pass_object_sizes (gcc::context *ctxt) 1256 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false) 1257 {} 1258 1259 /* opt_pass methods: */ 1260 opt_pass * clone () { return new pass_object_sizes (m_ctxt); } 1261 void set_pass_param (unsigned int n, bool param) 1262 { 1263 gcc_assert (n == 0); 1264 insert_min_max_p = param; 1265 } 1266 virtual unsigned int execute (function *); 1267 1268 private: 1269 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */ 1270 bool insert_min_max_p; 1271 }; // class pass_object_sizes 1272 1273 /* Dummy valueize function. */ 1274 1275 static tree 1276 do_valueize (tree t) 1277 { 1278 return t; 1279 } 1280 1281 unsigned int 1282 pass_object_sizes::execute (function *fun) 1283 { 1284 basic_block bb; 1285 FOR_EACH_BB_FN (bb, fun) 1286 { 1287 gimple_stmt_iterator i; 1288 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) 1289 { 1290 tree result; 1291 gimple *call = gsi_stmt (i); 1292 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE)) 1293 continue; 1294 1295 init_object_sizes (); 1296 1297 /* If insert_min_max_p, only attempt to fold 1298 __builtin_object_size (x, 1) and __builtin_object_size (x, 3), 1299 and rather than folding the builtin to the constant if any, 1300 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size 1301 call result and the computed constant. */ 1302 if (insert_min_max_p) 1303 { 1304 tree ost = gimple_call_arg (call, 1); 1305 if (tree_fits_uhwi_p (ost)) 1306 { 1307 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost); 1308 tree ptr = gimple_call_arg (call, 0); 1309 tree lhs = gimple_call_lhs (call); 1310 if ((object_size_type == 1 || object_size_type == 3) 1311 && (TREE_CODE (ptr) == ADDR_EXPR 1312 || TREE_CODE (ptr) == SSA_NAME) 1313 && lhs) 1314 { 1315 tree type = TREE_TYPE (lhs); 1316 unsigned HOST_WIDE_INT bytes; 1317 if (compute_builtin_object_size (ptr, object_size_type, 1318 &bytes) 1319 && wi::fits_to_tree_p (bytes, type)) 1320 { 1321 tree tem = make_ssa_name (type); 1322 gimple_call_set_lhs (call, tem); 1323 enum tree_code code 1324 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR; 1325 tree cst = build_int_cstu (type, bytes); 1326 gimple *g 1327 = gimple_build_assign (lhs, code, tem, cst); 1328 gsi_insert_after (&i, g, GSI_NEW_STMT); 1329 update_stmt (call); 1330 } 1331 } 1332 } 1333 continue; 1334 } 1335 1336 tree lhs = gimple_call_lhs (call); 1337 if (!lhs) 1338 continue; 1339 1340 result = gimple_fold_stmt_to_constant (call, do_valueize); 1341 if (!result) 1342 { 1343 tree ost = gimple_call_arg (call, 1); 1344 1345 if (tree_fits_uhwi_p (ost)) 1346 { 1347 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost); 1348 1349 if (object_size_type < 2) 1350 result = fold_convert (size_type_node, 1351 integer_minus_one_node); 1352 else if (object_size_type < 4) 1353 result = build_zero_cst (size_type_node); 1354 } 1355 1356 if (!result) 1357 continue; 1358 } 1359 1360 gcc_assert (TREE_CODE (result) == INTEGER_CST); 1361 1362 if (dump_file && (dump_flags & TDF_DETAILS)) 1363 { 1364 fprintf (dump_file, "Simplified\n "); 1365 print_gimple_stmt (dump_file, call, 0, dump_flags); 1366 fprintf (dump_file, " to "); 1367 print_generic_expr (dump_file, result); 1368 fprintf (dump_file, "\n"); 1369 } 1370 1371 /* Propagate into all uses and fold those stmts. */ 1372 replace_uses_by (lhs, result); 1373 } 1374 } 1375 1376 fini_object_sizes (); 1377 return 0; 1378 } 1379 1380 } // anon namespace 1381 1382 gimple_opt_pass * 1383 make_pass_object_sizes (gcc::context *ctxt) 1384 { 1385 return new pass_object_sizes (ctxt); 1386 } 1387