1 /* __builtin_object_size (ptr, object_size_type) computation 2 Copyright (C) 2004-2018 Free Software Foundation, Inc. 3 Contributed by Jakub Jelinek <jakub@redhat.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "tree.h" 26 #include "gimple.h" 27 #include "tree-pass.h" 28 #include "ssa.h" 29 #include "gimple-pretty-print.h" 30 #include "fold-const.h" 31 #include "tree-object-size.h" 32 #include "gimple-fold.h" 33 #include "gimple-iterator.h" 34 #include "tree-cfg.h" 35 #include "stringpool.h" 36 #include "attribs.h" 37 38 struct object_size_info 39 { 40 int object_size_type; 41 unsigned char pass; 42 bool changed; 43 bitmap visited, reexamine; 44 unsigned int *depths; 45 unsigned int *stack, *tos; 46 }; 47 48 static const unsigned HOST_WIDE_INT unknown[4] = { 49 HOST_WIDE_INT_M1U, 50 HOST_WIDE_INT_M1U, 51 0, 52 0 53 }; 54 55 static tree compute_object_offset (const_tree, const_tree); 56 static bool addr_object_size (struct object_size_info *, 57 const_tree, int, unsigned HOST_WIDE_INT *); 58 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int); 59 static tree pass_through_call (const gcall *); 60 static void collect_object_sizes_for (struct object_size_info *, tree); 61 static void expr_object_size (struct object_size_info *, tree, tree); 62 static bool merge_object_sizes (struct object_size_info *, tree, tree, 63 unsigned HOST_WIDE_INT); 64 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *); 65 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *); 66 static void init_offset_limit (void); 67 static void check_for_plus_in_loops (struct object_size_info *, tree); 68 static void check_for_plus_in_loops_1 (struct object_size_info *, tree, 69 unsigned int); 70 71 /* object_sizes[0] is upper bound for number of bytes till the end of 72 the object. 73 object_sizes[1] is upper bound for number of bytes till the end of 74 the subobject (innermost array or field with address taken). 75 object_sizes[2] is lower bound for number of bytes till the end of 76 the object and object_sizes[3] lower bound for subobject. */ 77 static vec<unsigned HOST_WIDE_INT> object_sizes[4]; 78 79 /* Bitmaps what object sizes have been computed already. */ 80 static bitmap computed[4]; 81 82 /* Maximum value of offset we consider to be addition. */ 83 static unsigned HOST_WIDE_INT offset_limit; 84 85 86 /* Initialize OFFSET_LIMIT variable. */ 87 static void 88 init_offset_limit (void) 89 { 90 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype))) 91 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype)); 92 else 93 offset_limit = -1; 94 offset_limit /= 2; 95 } 96 97 98 /* Compute offset of EXPR within VAR. Return error_mark_node 99 if unknown. */ 100 101 static tree 102 compute_object_offset (const_tree expr, const_tree var) 103 { 104 enum tree_code code = PLUS_EXPR; 105 tree base, off, t; 106 107 if (expr == var) 108 return size_zero_node; 109 110 switch (TREE_CODE (expr)) 111 { 112 case COMPONENT_REF: 113 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 114 if (base == error_mark_node) 115 return base; 116 117 t = TREE_OPERAND (expr, 1); 118 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t), 119 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t)) 120 / BITS_PER_UNIT)); 121 break; 122 123 case REALPART_EXPR: 124 CASE_CONVERT: 125 case VIEW_CONVERT_EXPR: 126 case NON_LVALUE_EXPR: 127 return compute_object_offset (TREE_OPERAND (expr, 0), var); 128 129 case IMAGPART_EXPR: 130 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 131 if (base == error_mark_node) 132 return base; 133 134 off = TYPE_SIZE_UNIT (TREE_TYPE (expr)); 135 break; 136 137 case ARRAY_REF: 138 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 139 if (base == error_mark_node) 140 return base; 141 142 t = TREE_OPERAND (expr, 1); 143 tree low_bound, unit_size; 144 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr)); 145 unit_size = array_ref_element_size (CONST_CAST_TREE (expr)); 146 if (! integer_zerop (low_bound)) 147 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound); 148 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0) 149 { 150 code = MINUS_EXPR; 151 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t); 152 } 153 t = fold_convert (sizetype, t); 154 off = size_binop (MULT_EXPR, unit_size, t); 155 break; 156 157 case MEM_REF: 158 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR); 159 return wide_int_to_tree (sizetype, mem_ref_offset (expr)); 160 161 default: 162 return error_mark_node; 163 } 164 165 return size_binop (code, base, off); 166 } 167 168 169 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR. 170 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size. 171 If unknown, return unknown[object_size_type]. */ 172 173 static bool 174 addr_object_size (struct object_size_info *osi, const_tree ptr, 175 int object_size_type, unsigned HOST_WIDE_INT *psize) 176 { 177 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes; 178 179 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR); 180 181 /* Set to unknown and overwrite just before returning if the size 182 could be determined. */ 183 *psize = unknown[object_size_type]; 184 185 pt_var = TREE_OPERAND (ptr, 0); 186 while (handled_component_p (pt_var)) 187 pt_var = TREE_OPERAND (pt_var, 0); 188 189 if (pt_var 190 && TREE_CODE (pt_var) == MEM_REF) 191 { 192 unsigned HOST_WIDE_INT sz; 193 194 if (!osi || (object_size_type & 1) != 0 195 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME) 196 { 197 compute_builtin_object_size (TREE_OPERAND (pt_var, 0), 198 object_size_type & ~1, &sz); 199 } 200 else 201 { 202 tree var = TREE_OPERAND (pt_var, 0); 203 if (osi->pass == 0) 204 collect_object_sizes_for (osi, var); 205 if (bitmap_bit_p (computed[object_size_type], 206 SSA_NAME_VERSION (var))) 207 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)]; 208 else 209 sz = unknown[object_size_type]; 210 } 211 if (sz != unknown[object_size_type]) 212 { 213 offset_int mem_offset; 214 if (mem_ref_offset (pt_var).is_constant (&mem_offset)) 215 { 216 offset_int dsz = wi::sub (sz, mem_offset); 217 if (wi::neg_p (dsz)) 218 sz = 0; 219 else if (wi::fits_uhwi_p (dsz)) 220 sz = dsz.to_uhwi (); 221 else 222 sz = unknown[object_size_type]; 223 } 224 else 225 sz = unknown[object_size_type]; 226 } 227 228 if (sz != unknown[object_size_type] && sz < offset_limit) 229 pt_var_size = size_int (sz); 230 } 231 else if (pt_var 232 && DECL_P (pt_var) 233 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var)) 234 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit) 235 pt_var_size = DECL_SIZE_UNIT (pt_var); 236 else if (pt_var 237 && TREE_CODE (pt_var) == STRING_CST 238 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var)) 239 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var))) 240 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var))) 241 < offset_limit) 242 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var)); 243 else 244 return false; 245 246 if (pt_var != TREE_OPERAND (ptr, 0)) 247 { 248 tree var; 249 250 if (object_size_type & 1) 251 { 252 var = TREE_OPERAND (ptr, 0); 253 254 while (var != pt_var 255 && TREE_CODE (var) != BIT_FIELD_REF 256 && TREE_CODE (var) != COMPONENT_REF 257 && TREE_CODE (var) != ARRAY_REF 258 && TREE_CODE (var) != ARRAY_RANGE_REF 259 && TREE_CODE (var) != REALPART_EXPR 260 && TREE_CODE (var) != IMAGPART_EXPR) 261 var = TREE_OPERAND (var, 0); 262 if (var != pt_var && TREE_CODE (var) == ARRAY_REF) 263 var = TREE_OPERAND (var, 0); 264 if (! TYPE_SIZE_UNIT (TREE_TYPE (var)) 265 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var))) 266 || (pt_var_size 267 && tree_int_cst_lt (pt_var_size, 268 TYPE_SIZE_UNIT (TREE_TYPE (var))))) 269 var = pt_var; 270 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF) 271 { 272 tree v = var; 273 /* For &X->fld, compute object size only if fld isn't the last 274 field, as struct { int i; char c[1]; } is often used instead 275 of flexible array member. */ 276 while (v && v != pt_var) 277 switch (TREE_CODE (v)) 278 { 279 case ARRAY_REF: 280 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))) 281 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST) 282 { 283 tree domain 284 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0))); 285 if (domain 286 && TYPE_MAX_VALUE (domain) 287 && TREE_CODE (TYPE_MAX_VALUE (domain)) 288 == INTEGER_CST 289 && tree_int_cst_lt (TREE_OPERAND (v, 1), 290 TYPE_MAX_VALUE (domain))) 291 { 292 v = NULL_TREE; 293 break; 294 } 295 } 296 v = TREE_OPERAND (v, 0); 297 break; 298 case REALPART_EXPR: 299 case IMAGPART_EXPR: 300 v = NULL_TREE; 301 break; 302 case COMPONENT_REF: 303 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE) 304 { 305 v = NULL_TREE; 306 break; 307 } 308 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF) 309 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 310 != UNION_TYPE 311 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 312 != QUAL_UNION_TYPE) 313 break; 314 else 315 v = TREE_OPERAND (v, 0); 316 if (TREE_CODE (v) == COMPONENT_REF 317 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 318 == RECORD_TYPE) 319 { 320 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1)); 321 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain)) 322 if (TREE_CODE (fld_chain) == FIELD_DECL) 323 break; 324 325 if (fld_chain) 326 { 327 v = NULL_TREE; 328 break; 329 } 330 v = TREE_OPERAND (v, 0); 331 } 332 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF) 333 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 334 != UNION_TYPE 335 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 336 != QUAL_UNION_TYPE) 337 break; 338 else 339 v = TREE_OPERAND (v, 0); 340 if (v != pt_var) 341 v = NULL_TREE; 342 else 343 v = pt_var; 344 break; 345 default: 346 v = pt_var; 347 break; 348 } 349 if (v == pt_var) 350 var = pt_var; 351 } 352 } 353 else 354 var = pt_var; 355 356 if (var != pt_var) 357 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var)); 358 else if (!pt_var_size) 359 return false; 360 else 361 var_size = pt_var_size; 362 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var); 363 if (bytes != error_mark_node) 364 { 365 if (TREE_CODE (bytes) == INTEGER_CST 366 && tree_int_cst_lt (var_size, bytes)) 367 bytes = size_zero_node; 368 else 369 bytes = size_binop (MINUS_EXPR, var_size, bytes); 370 } 371 if (var != pt_var 372 && pt_var_size 373 && TREE_CODE (pt_var) == MEM_REF 374 && bytes != error_mark_node) 375 { 376 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var); 377 if (bytes2 != error_mark_node) 378 { 379 if (TREE_CODE (bytes2) == INTEGER_CST 380 && tree_int_cst_lt (pt_var_size, bytes2)) 381 bytes2 = size_zero_node; 382 else 383 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2); 384 bytes = size_binop (MIN_EXPR, bytes, bytes2); 385 } 386 } 387 } 388 else if (!pt_var_size) 389 return false; 390 else 391 bytes = pt_var_size; 392 393 if (tree_fits_uhwi_p (bytes)) 394 { 395 *psize = tree_to_uhwi (bytes); 396 return true; 397 } 398 399 return false; 400 } 401 402 403 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL. 404 Handles various allocation calls. OBJECT_SIZE_TYPE is the second 405 argument from __builtin_object_size. If unknown, return 406 unknown[object_size_type]. */ 407 408 static unsigned HOST_WIDE_INT 409 alloc_object_size (const gcall *call, int object_size_type) 410 { 411 tree callee, bytes = NULL_TREE; 412 tree alloc_size; 413 int arg1 = -1, arg2 = -1; 414 415 gcc_assert (is_gimple_call (call)); 416 417 callee = gimple_call_fndecl (call); 418 if (!callee) 419 return unknown[object_size_type]; 420 421 alloc_size = lookup_attribute ("alloc_size", 422 TYPE_ATTRIBUTES (TREE_TYPE (callee))); 423 if (alloc_size && TREE_VALUE (alloc_size)) 424 { 425 tree p = TREE_VALUE (alloc_size); 426 427 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1; 428 if (TREE_CHAIN (p)) 429 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1; 430 } 431 432 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) 433 switch (DECL_FUNCTION_CODE (callee)) 434 { 435 case BUILT_IN_CALLOC: 436 arg2 = 1; 437 /* fall through */ 438 case BUILT_IN_MALLOC: 439 CASE_BUILT_IN_ALLOCA: 440 arg1 = 0; 441 default: 442 break; 443 } 444 445 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call) 446 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST 447 || (arg2 >= 0 448 && (arg2 >= (int)gimple_call_num_args (call) 449 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST))) 450 return unknown[object_size_type]; 451 452 if (arg2 >= 0) 453 bytes = size_binop (MULT_EXPR, 454 fold_convert (sizetype, gimple_call_arg (call, arg1)), 455 fold_convert (sizetype, gimple_call_arg (call, arg2))); 456 else if (arg1 >= 0) 457 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1)); 458 459 if (bytes && tree_fits_uhwi_p (bytes)) 460 return tree_to_uhwi (bytes); 461 462 return unknown[object_size_type]; 463 } 464 465 466 /* If object size is propagated from one of function's arguments directly 467 to its return value, return that argument for GIMPLE_CALL statement CALL. 468 Otherwise return NULL. */ 469 470 static tree 471 pass_through_call (const gcall *call) 472 { 473 unsigned rf = gimple_call_return_flags (call); 474 if (rf & ERF_RETURNS_ARG) 475 { 476 unsigned argnum = rf & ERF_RETURN_ARG_MASK; 477 if (argnum < gimple_call_num_args (call)) 478 return gimple_call_arg (call, argnum); 479 } 480 481 /* __builtin_assume_aligned is intentionally not marked RET1. */ 482 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED)) 483 return gimple_call_arg (call, 0); 484 485 return NULL_TREE; 486 } 487 488 489 /* Compute __builtin_object_size value for PTR and set *PSIZE to 490 the resulting value. OBJECT_SIZE_TYPE is the second argument 491 to __builtin_object_size. Return true on success and false 492 when the object size could not be determined. */ 493 494 bool 495 compute_builtin_object_size (tree ptr, int object_size_type, 496 unsigned HOST_WIDE_INT *psize) 497 { 498 gcc_assert (object_size_type >= 0 && object_size_type <= 3); 499 500 /* Set to unknown and overwrite just before returning if the size 501 could be determined. */ 502 *psize = unknown[object_size_type]; 503 504 if (! offset_limit) 505 init_offset_limit (); 506 507 if (TREE_CODE (ptr) == ADDR_EXPR) 508 return addr_object_size (NULL, ptr, object_size_type, psize); 509 510 if (TREE_CODE (ptr) != SSA_NAME 511 || !POINTER_TYPE_P (TREE_TYPE (ptr))) 512 return false; 513 514 if (computed[object_size_type] == NULL) 515 { 516 if (optimize || object_size_type & 1) 517 return false; 518 519 /* When not optimizing, rather than failing, make a small effort 520 to determine the object size without the full benefit of 521 the (costly) computation below. */ 522 gimple *def = SSA_NAME_DEF_STMT (ptr); 523 if (gimple_code (def) == GIMPLE_ASSIGN) 524 { 525 tree_code code = gimple_assign_rhs_code (def); 526 if (code == POINTER_PLUS_EXPR) 527 { 528 tree offset = gimple_assign_rhs2 (def); 529 ptr = gimple_assign_rhs1 (def); 530 531 if (tree_fits_shwi_p (offset) 532 && compute_builtin_object_size (ptr, object_size_type, psize)) 533 { 534 /* Return zero when the offset is out of bounds. */ 535 unsigned HOST_WIDE_INT off = tree_to_shwi (offset); 536 *psize = off < *psize ? *psize - off : 0; 537 return true; 538 } 539 } 540 } 541 return false; 542 } 543 544 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr))) 545 { 546 struct object_size_info osi; 547 bitmap_iterator bi; 548 unsigned int i; 549 550 if (num_ssa_names > object_sizes[object_size_type].length ()) 551 object_sizes[object_size_type].safe_grow (num_ssa_names); 552 if (dump_file) 553 { 554 fprintf (dump_file, "Computing %s %sobject size for ", 555 (object_size_type & 2) ? "minimum" : "maximum", 556 (object_size_type & 1) ? "sub" : ""); 557 print_generic_expr (dump_file, ptr, dump_flags); 558 fprintf (dump_file, ":\n"); 559 } 560 561 osi.visited = BITMAP_ALLOC (NULL); 562 osi.reexamine = BITMAP_ALLOC (NULL); 563 osi.object_size_type = object_size_type; 564 osi.depths = NULL; 565 osi.stack = NULL; 566 osi.tos = NULL; 567 568 /* First pass: walk UD chains, compute object sizes that 569 can be computed. osi.reexamine bitmap at the end will 570 contain what variables were found in dependency cycles 571 and therefore need to be reexamined. */ 572 osi.pass = 0; 573 osi.changed = false; 574 collect_object_sizes_for (&osi, ptr); 575 576 /* Second pass: keep recomputing object sizes of variables 577 that need reexamination, until no object sizes are 578 increased or all object sizes are computed. */ 579 if (! bitmap_empty_p (osi.reexamine)) 580 { 581 bitmap reexamine = BITMAP_ALLOC (NULL); 582 583 /* If looking for minimum instead of maximum object size, 584 detect cases where a pointer is increased in a loop. 585 Although even without this detection pass 2 would eventually 586 terminate, it could take a long time. If a pointer is 587 increasing this way, we need to assume 0 object size. 588 E.g. p = &buf[0]; while (cond) p = p + 4; */ 589 if (object_size_type & 2) 590 { 591 osi.depths = XCNEWVEC (unsigned int, num_ssa_names); 592 osi.stack = XNEWVEC (unsigned int, num_ssa_names); 593 osi.tos = osi.stack; 594 osi.pass = 1; 595 /* collect_object_sizes_for is changing 596 osi.reexamine bitmap, so iterate over a copy. */ 597 bitmap_copy (reexamine, osi.reexamine); 598 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi) 599 if (bitmap_bit_p (osi.reexamine, i)) 600 check_for_plus_in_loops (&osi, ssa_name (i)); 601 602 free (osi.depths); 603 osi.depths = NULL; 604 free (osi.stack); 605 osi.stack = NULL; 606 osi.tos = NULL; 607 } 608 609 do 610 { 611 osi.pass = 2; 612 osi.changed = false; 613 /* collect_object_sizes_for is changing 614 osi.reexamine bitmap, so iterate over a copy. */ 615 bitmap_copy (reexamine, osi.reexamine); 616 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi) 617 if (bitmap_bit_p (osi.reexamine, i)) 618 { 619 collect_object_sizes_for (&osi, ssa_name (i)); 620 if (dump_file && (dump_flags & TDF_DETAILS)) 621 { 622 fprintf (dump_file, "Reexamining "); 623 print_generic_expr (dump_file, ssa_name (i), 624 dump_flags); 625 fprintf (dump_file, "\n"); 626 } 627 } 628 } 629 while (osi.changed); 630 631 BITMAP_FREE (reexamine); 632 } 633 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi) 634 bitmap_set_bit (computed[object_size_type], i); 635 636 /* Debugging dumps. */ 637 if (dump_file) 638 { 639 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi) 640 if (object_sizes[object_size_type][i] 641 != unknown[object_size_type]) 642 { 643 print_generic_expr (dump_file, ssa_name (i), 644 dump_flags); 645 fprintf (dump_file, 646 ": %s %sobject size " 647 HOST_WIDE_INT_PRINT_UNSIGNED "\n", 648 (object_size_type & 2) ? "minimum" : "maximum", 649 (object_size_type & 1) ? "sub" : "", 650 object_sizes[object_size_type][i]); 651 } 652 } 653 654 BITMAP_FREE (osi.reexamine); 655 BITMAP_FREE (osi.visited); 656 } 657 658 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)]; 659 return *psize != unknown[object_size_type]; 660 } 661 662 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */ 663 664 static void 665 expr_object_size (struct object_size_info *osi, tree ptr, tree value) 666 { 667 int object_size_type = osi->object_size_type; 668 unsigned int varno = SSA_NAME_VERSION (ptr); 669 unsigned HOST_WIDE_INT bytes; 670 671 gcc_assert (object_sizes[object_size_type][varno] 672 != unknown[object_size_type]); 673 gcc_assert (osi->pass == 0); 674 675 if (TREE_CODE (value) == WITH_SIZE_EXPR) 676 value = TREE_OPERAND (value, 0); 677 678 /* Pointer variables should have been handled by merge_object_sizes. */ 679 gcc_assert (TREE_CODE (value) != SSA_NAME 680 || !POINTER_TYPE_P (TREE_TYPE (value))); 681 682 if (TREE_CODE (value) == ADDR_EXPR) 683 addr_object_size (osi, value, object_size_type, &bytes); 684 else 685 bytes = unknown[object_size_type]; 686 687 if ((object_size_type & 2) == 0) 688 { 689 if (object_sizes[object_size_type][varno] < bytes) 690 object_sizes[object_size_type][varno] = bytes; 691 } 692 else 693 { 694 if (object_sizes[object_size_type][varno] > bytes) 695 object_sizes[object_size_type][varno] = bytes; 696 } 697 } 698 699 700 /* Compute object_sizes for PTR, defined to the result of a call. */ 701 702 static void 703 call_object_size (struct object_size_info *osi, tree ptr, gcall *call) 704 { 705 int object_size_type = osi->object_size_type; 706 unsigned int varno = SSA_NAME_VERSION (ptr); 707 unsigned HOST_WIDE_INT bytes; 708 709 gcc_assert (is_gimple_call (call)); 710 711 gcc_assert (object_sizes[object_size_type][varno] 712 != unknown[object_size_type]); 713 gcc_assert (osi->pass == 0); 714 715 bytes = alloc_object_size (call, object_size_type); 716 717 if ((object_size_type & 2) == 0) 718 { 719 if (object_sizes[object_size_type][varno] < bytes) 720 object_sizes[object_size_type][varno] = bytes; 721 } 722 else 723 { 724 if (object_sizes[object_size_type][varno] > bytes) 725 object_sizes[object_size_type][varno] = bytes; 726 } 727 } 728 729 730 /* Compute object_sizes for PTR, defined to an unknown value. */ 731 732 static void 733 unknown_object_size (struct object_size_info *osi, tree ptr) 734 { 735 int object_size_type = osi->object_size_type; 736 unsigned int varno = SSA_NAME_VERSION (ptr); 737 unsigned HOST_WIDE_INT bytes; 738 739 gcc_assert (object_sizes[object_size_type][varno] 740 != unknown[object_size_type]); 741 gcc_assert (osi->pass == 0); 742 743 bytes = unknown[object_size_type]; 744 745 if ((object_size_type & 2) == 0) 746 { 747 if (object_sizes[object_size_type][varno] < bytes) 748 object_sizes[object_size_type][varno] = bytes; 749 } 750 else 751 { 752 if (object_sizes[object_size_type][varno] > bytes) 753 object_sizes[object_size_type][varno] = bytes; 754 } 755 } 756 757 758 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if 759 the object size might need reexamination later. */ 760 761 static bool 762 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig, 763 unsigned HOST_WIDE_INT offset) 764 { 765 int object_size_type = osi->object_size_type; 766 unsigned int varno = SSA_NAME_VERSION (dest); 767 unsigned HOST_WIDE_INT orig_bytes; 768 769 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 770 return false; 771 if (offset >= offset_limit) 772 { 773 object_sizes[object_size_type][varno] = unknown[object_size_type]; 774 return false; 775 } 776 777 if (osi->pass == 0) 778 collect_object_sizes_for (osi, orig); 779 780 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)]; 781 if (orig_bytes != unknown[object_size_type]) 782 orig_bytes = (offset > orig_bytes) 783 ? HOST_WIDE_INT_0U : orig_bytes - offset; 784 785 if ((object_size_type & 2) == 0) 786 { 787 if (object_sizes[object_size_type][varno] < orig_bytes) 788 { 789 object_sizes[object_size_type][varno] = orig_bytes; 790 osi->changed = true; 791 } 792 } 793 else 794 { 795 if (object_sizes[object_size_type][varno] > orig_bytes) 796 { 797 object_sizes[object_size_type][varno] = orig_bytes; 798 osi->changed = true; 799 } 800 } 801 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig)); 802 } 803 804 805 /* Compute object_sizes for VAR, defined to the result of an assignment 806 with operator POINTER_PLUS_EXPR. Return true if the object size might 807 need reexamination later. */ 808 809 static bool 810 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt) 811 { 812 int object_size_type = osi->object_size_type; 813 unsigned int varno = SSA_NAME_VERSION (var); 814 unsigned HOST_WIDE_INT bytes; 815 tree op0, op1; 816 817 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 818 { 819 op0 = gimple_assign_rhs1 (stmt); 820 op1 = gimple_assign_rhs2 (stmt); 821 } 822 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR) 823 { 824 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); 825 gcc_assert (TREE_CODE (rhs) == MEM_REF); 826 op0 = TREE_OPERAND (rhs, 0); 827 op1 = TREE_OPERAND (rhs, 1); 828 } 829 else 830 gcc_unreachable (); 831 832 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 833 return false; 834 835 /* Handle PTR + OFFSET here. */ 836 if (TREE_CODE (op1) == INTEGER_CST 837 && (TREE_CODE (op0) == SSA_NAME 838 || TREE_CODE (op0) == ADDR_EXPR)) 839 { 840 if (! tree_fits_uhwi_p (op1)) 841 bytes = unknown[object_size_type]; 842 else if (TREE_CODE (op0) == SSA_NAME) 843 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1)); 844 else 845 { 846 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1); 847 848 /* op0 will be ADDR_EXPR here. */ 849 addr_object_size (osi, op0, object_size_type, &bytes); 850 if (bytes == unknown[object_size_type]) 851 ; 852 else if (off > offset_limit) 853 bytes = unknown[object_size_type]; 854 else if (off > bytes) 855 bytes = 0; 856 else 857 bytes -= off; 858 } 859 } 860 else 861 bytes = unknown[object_size_type]; 862 863 if ((object_size_type & 2) == 0) 864 { 865 if (object_sizes[object_size_type][varno] < bytes) 866 object_sizes[object_size_type][varno] = bytes; 867 } 868 else 869 { 870 if (object_sizes[object_size_type][varno] > bytes) 871 object_sizes[object_size_type][varno] = bytes; 872 } 873 return false; 874 } 875 876 877 /* Compute object_sizes for VAR, defined at STMT, which is 878 a COND_EXPR. Return true if the object size might need reexamination 879 later. */ 880 881 static bool 882 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt) 883 { 884 tree then_, else_; 885 int object_size_type = osi->object_size_type; 886 unsigned int varno = SSA_NAME_VERSION (var); 887 bool reexamine = false; 888 889 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR); 890 891 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 892 return false; 893 894 then_ = gimple_assign_rhs2 (stmt); 895 else_ = gimple_assign_rhs3 (stmt); 896 897 if (TREE_CODE (then_) == SSA_NAME) 898 reexamine |= merge_object_sizes (osi, var, then_, 0); 899 else 900 expr_object_size (osi, var, then_); 901 902 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 903 return reexamine; 904 905 if (TREE_CODE (else_) == SSA_NAME) 906 reexamine |= merge_object_sizes (osi, var, else_, 0); 907 else 908 expr_object_size (osi, var, else_); 909 910 return reexamine; 911 } 912 913 /* Compute object sizes for VAR. 914 For ADDR_EXPR an object size is the number of remaining bytes 915 to the end of the object (where what is considered an object depends on 916 OSI->object_size_type). 917 For allocation GIMPLE_CALL like malloc or calloc object size is the size 918 of the allocation. 919 For POINTER_PLUS_EXPR where second operand is a constant integer, 920 object size is object size of the first operand minus the constant. 921 If the constant is bigger than the number of remaining bytes until the 922 end of the object, object size is 0, but if it is instead a pointer 923 subtraction, object size is unknown[object_size_type]. 924 To differentiate addition from subtraction, ADDR_EXPR returns 925 unknown[object_size_type] for all objects bigger than half of the address 926 space, and constants less than half of the address space are considered 927 addition, while bigger constants subtraction. 928 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the 929 object size is object size of that argument. 930 Otherwise, object size is the maximum of object sizes of variables 931 that it might be set to. */ 932 933 static void 934 collect_object_sizes_for (struct object_size_info *osi, tree var) 935 { 936 int object_size_type = osi->object_size_type; 937 unsigned int varno = SSA_NAME_VERSION (var); 938 gimple *stmt; 939 bool reexamine; 940 941 if (bitmap_bit_p (computed[object_size_type], varno)) 942 return; 943 944 if (osi->pass == 0) 945 { 946 if (bitmap_set_bit (osi->visited, varno)) 947 { 948 object_sizes[object_size_type][varno] 949 = (object_size_type & 2) ? -1 : 0; 950 } 951 else 952 { 953 /* Found a dependency loop. Mark the variable for later 954 re-examination. */ 955 bitmap_set_bit (osi->reexamine, varno); 956 if (dump_file && (dump_flags & TDF_DETAILS)) 957 { 958 fprintf (dump_file, "Found a dependency loop at "); 959 print_generic_expr (dump_file, var, dump_flags); 960 fprintf (dump_file, "\n"); 961 } 962 return; 963 } 964 } 965 966 if (dump_file && (dump_flags & TDF_DETAILS)) 967 { 968 fprintf (dump_file, "Visiting use-def links for "); 969 print_generic_expr (dump_file, var, dump_flags); 970 fprintf (dump_file, "\n"); 971 } 972 973 stmt = SSA_NAME_DEF_STMT (var); 974 reexamine = false; 975 976 switch (gimple_code (stmt)) 977 { 978 case GIMPLE_ASSIGN: 979 { 980 tree rhs = gimple_assign_rhs1 (stmt); 981 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR 982 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR 983 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF)) 984 reexamine = plus_stmt_object_size (osi, var, stmt); 985 else if (gimple_assign_rhs_code (stmt) == COND_EXPR) 986 reexamine = cond_expr_object_size (osi, var, stmt); 987 else if (gimple_assign_single_p (stmt) 988 || gimple_assign_unary_nop_p (stmt)) 989 { 990 if (TREE_CODE (rhs) == SSA_NAME 991 && POINTER_TYPE_P (TREE_TYPE (rhs))) 992 reexamine = merge_object_sizes (osi, var, rhs, 0); 993 else 994 expr_object_size (osi, var, rhs); 995 } 996 else 997 unknown_object_size (osi, var); 998 break; 999 } 1000 1001 case GIMPLE_CALL: 1002 { 1003 gcall *call_stmt = as_a <gcall *> (stmt); 1004 tree arg = pass_through_call (call_stmt); 1005 if (arg) 1006 { 1007 if (TREE_CODE (arg) == SSA_NAME 1008 && POINTER_TYPE_P (TREE_TYPE (arg))) 1009 reexamine = merge_object_sizes (osi, var, arg, 0); 1010 else 1011 expr_object_size (osi, var, arg); 1012 } 1013 else 1014 call_object_size (osi, var, call_stmt); 1015 break; 1016 } 1017 1018 case GIMPLE_ASM: 1019 /* Pointers defined by __asm__ statements can point anywhere. */ 1020 object_sizes[object_size_type][varno] = unknown[object_size_type]; 1021 break; 1022 1023 case GIMPLE_NOP: 1024 if (SSA_NAME_VAR (var) 1025 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL) 1026 expr_object_size (osi, var, SSA_NAME_VAR (var)); 1027 else 1028 /* Uninitialized SSA names point nowhere. */ 1029 object_sizes[object_size_type][varno] = unknown[object_size_type]; 1030 break; 1031 1032 case GIMPLE_PHI: 1033 { 1034 unsigned i; 1035 1036 for (i = 0; i < gimple_phi_num_args (stmt); i++) 1037 { 1038 tree rhs = gimple_phi_arg (stmt, i)->def; 1039 1040 if (object_sizes[object_size_type][varno] 1041 == unknown[object_size_type]) 1042 break; 1043 1044 if (TREE_CODE (rhs) == SSA_NAME) 1045 reexamine |= merge_object_sizes (osi, var, rhs, 0); 1046 else if (osi->pass == 0) 1047 expr_object_size (osi, var, rhs); 1048 } 1049 break; 1050 } 1051 1052 default: 1053 gcc_unreachable (); 1054 } 1055 1056 if (! reexamine 1057 || object_sizes[object_size_type][varno] == unknown[object_size_type]) 1058 { 1059 bitmap_set_bit (computed[object_size_type], varno); 1060 bitmap_clear_bit (osi->reexamine, varno); 1061 } 1062 else 1063 { 1064 bitmap_set_bit (osi->reexamine, varno); 1065 if (dump_file && (dump_flags & TDF_DETAILS)) 1066 { 1067 fprintf (dump_file, "Need to reexamine "); 1068 print_generic_expr (dump_file, var, dump_flags); 1069 fprintf (dump_file, "\n"); 1070 } 1071 } 1072 } 1073 1074 1075 /* Helper function for check_for_plus_in_loops. Called recursively 1076 to detect loops. */ 1077 1078 static void 1079 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var, 1080 unsigned int depth) 1081 { 1082 gimple *stmt = SSA_NAME_DEF_STMT (var); 1083 unsigned int varno = SSA_NAME_VERSION (var); 1084 1085 if (osi->depths[varno]) 1086 { 1087 if (osi->depths[varno] != depth) 1088 { 1089 unsigned int *sp; 1090 1091 /* Found a loop involving pointer addition. */ 1092 for (sp = osi->tos; sp > osi->stack; ) 1093 { 1094 --sp; 1095 bitmap_clear_bit (osi->reexamine, *sp); 1096 bitmap_set_bit (computed[osi->object_size_type], *sp); 1097 object_sizes[osi->object_size_type][*sp] = 0; 1098 if (*sp == varno) 1099 break; 1100 } 1101 } 1102 return; 1103 } 1104 else if (! bitmap_bit_p (osi->reexamine, varno)) 1105 return; 1106 1107 osi->depths[varno] = depth; 1108 *osi->tos++ = varno; 1109 1110 switch (gimple_code (stmt)) 1111 { 1112 1113 case GIMPLE_ASSIGN: 1114 { 1115 if ((gimple_assign_single_p (stmt) 1116 || gimple_assign_unary_nop_p (stmt)) 1117 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) 1118 { 1119 tree rhs = gimple_assign_rhs1 (stmt); 1120 1121 check_for_plus_in_loops_1 (osi, rhs, depth); 1122 } 1123 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 1124 { 1125 tree basevar = gimple_assign_rhs1 (stmt); 1126 tree cst = gimple_assign_rhs2 (stmt); 1127 1128 gcc_assert (TREE_CODE (cst) == INTEGER_CST); 1129 1130 check_for_plus_in_loops_1 (osi, basevar, 1131 depth + !integer_zerop (cst)); 1132 } 1133 else 1134 gcc_unreachable (); 1135 break; 1136 } 1137 1138 case GIMPLE_CALL: 1139 { 1140 gcall *call_stmt = as_a <gcall *> (stmt); 1141 tree arg = pass_through_call (call_stmt); 1142 if (arg) 1143 { 1144 if (TREE_CODE (arg) == SSA_NAME) 1145 check_for_plus_in_loops_1 (osi, arg, depth); 1146 else 1147 gcc_unreachable (); 1148 } 1149 break; 1150 } 1151 1152 case GIMPLE_PHI: 1153 { 1154 unsigned i; 1155 1156 for (i = 0; i < gimple_phi_num_args (stmt); i++) 1157 { 1158 tree rhs = gimple_phi_arg (stmt, i)->def; 1159 1160 if (TREE_CODE (rhs) == SSA_NAME) 1161 check_for_plus_in_loops_1 (osi, rhs, depth); 1162 } 1163 break; 1164 } 1165 1166 default: 1167 gcc_unreachable (); 1168 } 1169 1170 osi->depths[varno] = 0; 1171 osi->tos--; 1172 } 1173 1174 1175 /* Check if some pointer we are computing object size of is being increased 1176 within a loop. If yes, assume all the SSA variables participating in 1177 that loop have minimum object sizes 0. */ 1178 1179 static void 1180 check_for_plus_in_loops (struct object_size_info *osi, tree var) 1181 { 1182 gimple *stmt = SSA_NAME_DEF_STMT (var); 1183 1184 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here, 1185 and looked for a POINTER_PLUS_EXPR in the pass-through 1186 argument, if any. In GIMPLE, however, such an expression 1187 is not a valid call operand. */ 1188 1189 if (is_gimple_assign (stmt) 1190 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 1191 { 1192 tree basevar = gimple_assign_rhs1 (stmt); 1193 tree cst = gimple_assign_rhs2 (stmt); 1194 1195 gcc_assert (TREE_CODE (cst) == INTEGER_CST); 1196 1197 if (integer_zerop (cst)) 1198 return; 1199 1200 osi->depths[SSA_NAME_VERSION (basevar)] = 1; 1201 *osi->tos++ = SSA_NAME_VERSION (basevar); 1202 check_for_plus_in_loops_1 (osi, var, 2); 1203 osi->depths[SSA_NAME_VERSION (basevar)] = 0; 1204 osi->tos--; 1205 } 1206 } 1207 1208 1209 /* Initialize data structures for the object size computation. */ 1210 1211 void 1212 init_object_sizes (void) 1213 { 1214 int object_size_type; 1215 1216 if (computed[0]) 1217 return; 1218 1219 for (object_size_type = 0; object_size_type <= 3; object_size_type++) 1220 { 1221 object_sizes[object_size_type].safe_grow (num_ssa_names); 1222 computed[object_size_type] = BITMAP_ALLOC (NULL); 1223 } 1224 1225 init_offset_limit (); 1226 } 1227 1228 1229 /* Destroy data structures after the object size computation. */ 1230 1231 void 1232 fini_object_sizes (void) 1233 { 1234 int object_size_type; 1235 1236 for (object_size_type = 0; object_size_type <= 3; object_size_type++) 1237 { 1238 object_sizes[object_size_type].release (); 1239 BITMAP_FREE (computed[object_size_type]); 1240 } 1241 } 1242 1243 1244 /* Simple pass to optimize all __builtin_object_size () builtins. */ 1245 1246 namespace { 1247 1248 const pass_data pass_data_object_sizes = 1249 { 1250 GIMPLE_PASS, /* type */ 1251 "objsz", /* name */ 1252 OPTGROUP_NONE, /* optinfo_flags */ 1253 TV_NONE, /* tv_id */ 1254 ( PROP_cfg | PROP_ssa ), /* properties_required */ 1255 0, /* properties_provided */ 1256 0, /* properties_destroyed */ 1257 0, /* todo_flags_start */ 1258 0, /* todo_flags_finish */ 1259 }; 1260 1261 class pass_object_sizes : public gimple_opt_pass 1262 { 1263 public: 1264 pass_object_sizes (gcc::context *ctxt) 1265 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false) 1266 {} 1267 1268 /* opt_pass methods: */ 1269 opt_pass * clone () { return new pass_object_sizes (m_ctxt); } 1270 void set_pass_param (unsigned int n, bool param) 1271 { 1272 gcc_assert (n == 0); 1273 insert_min_max_p = param; 1274 } 1275 virtual unsigned int execute (function *); 1276 1277 private: 1278 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */ 1279 bool insert_min_max_p; 1280 }; // class pass_object_sizes 1281 1282 /* Dummy valueize function. */ 1283 1284 static tree 1285 do_valueize (tree t) 1286 { 1287 return t; 1288 } 1289 1290 unsigned int 1291 pass_object_sizes::execute (function *fun) 1292 { 1293 basic_block bb; 1294 FOR_EACH_BB_FN (bb, fun) 1295 { 1296 gimple_stmt_iterator i; 1297 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) 1298 { 1299 tree result; 1300 gimple *call = gsi_stmt (i); 1301 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE)) 1302 continue; 1303 1304 init_object_sizes (); 1305 1306 /* If insert_min_max_p, only attempt to fold 1307 __builtin_object_size (x, 1) and __builtin_object_size (x, 3), 1308 and rather than folding the builtin to the constant if any, 1309 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size 1310 call result and the computed constant. */ 1311 if (insert_min_max_p) 1312 { 1313 tree ost = gimple_call_arg (call, 1); 1314 if (tree_fits_uhwi_p (ost)) 1315 { 1316 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost); 1317 tree ptr = gimple_call_arg (call, 0); 1318 tree lhs = gimple_call_lhs (call); 1319 if ((object_size_type == 1 || object_size_type == 3) 1320 && (TREE_CODE (ptr) == ADDR_EXPR 1321 || TREE_CODE (ptr) == SSA_NAME) 1322 && lhs) 1323 { 1324 tree type = TREE_TYPE (lhs); 1325 unsigned HOST_WIDE_INT bytes; 1326 if (compute_builtin_object_size (ptr, object_size_type, 1327 &bytes) 1328 && wi::fits_to_tree_p (bytes, type)) 1329 { 1330 tree tem = make_ssa_name (type); 1331 gimple_call_set_lhs (call, tem); 1332 enum tree_code code 1333 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR; 1334 tree cst = build_int_cstu (type, bytes); 1335 gimple *g 1336 = gimple_build_assign (lhs, code, tem, cst); 1337 gsi_insert_after (&i, g, GSI_NEW_STMT); 1338 update_stmt (call); 1339 } 1340 } 1341 } 1342 continue; 1343 } 1344 1345 tree lhs = gimple_call_lhs (call); 1346 if (!lhs) 1347 continue; 1348 1349 result = gimple_fold_stmt_to_constant (call, do_valueize); 1350 if (!result) 1351 { 1352 tree ost = gimple_call_arg (call, 1); 1353 1354 if (tree_fits_uhwi_p (ost)) 1355 { 1356 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost); 1357 1358 if (object_size_type < 2) 1359 result = fold_convert (size_type_node, 1360 integer_minus_one_node); 1361 else if (object_size_type < 4) 1362 result = build_zero_cst (size_type_node); 1363 } 1364 1365 if (!result) 1366 continue; 1367 } 1368 1369 gcc_assert (TREE_CODE (result) == INTEGER_CST); 1370 1371 if (dump_file && (dump_flags & TDF_DETAILS)) 1372 { 1373 fprintf (dump_file, "Simplified\n "); 1374 print_gimple_stmt (dump_file, call, 0, dump_flags); 1375 fprintf (dump_file, " to "); 1376 print_generic_expr (dump_file, result); 1377 fprintf (dump_file, "\n"); 1378 } 1379 1380 /* Propagate into all uses and fold those stmts. */ 1381 replace_uses_by (lhs, result); 1382 } 1383 } 1384 1385 fini_object_sizes (); 1386 return 0; 1387 } 1388 1389 } // anon namespace 1390 1391 gimple_opt_pass * 1392 make_pass_object_sizes (gcc::context *ctxt) 1393 { 1394 return new pass_object_sizes (ctxt); 1395 } 1396