1 /* __builtin_object_size (ptr, object_size_type) computation 2 Copyright (C) 2004-2017 Free Software Foundation, Inc. 3 Contributed by Jakub Jelinek <jakub@redhat.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "tree.h" 26 #include "gimple.h" 27 #include "tree-pass.h" 28 #include "ssa.h" 29 #include "gimple-pretty-print.h" 30 #include "fold-const.h" 31 #include "tree-object-size.h" 32 #include "gimple-fold.h" 33 #include "gimple-iterator.h" 34 #include "tree-cfg.h" 35 36 struct object_size_info 37 { 38 int object_size_type; 39 unsigned char pass; 40 bool changed; 41 bitmap visited, reexamine; 42 unsigned int *depths; 43 unsigned int *stack, *tos; 44 }; 45 46 static const unsigned HOST_WIDE_INT unknown[4] = { 47 HOST_WIDE_INT_M1U, 48 HOST_WIDE_INT_M1U, 49 0, 50 0 51 }; 52 53 static tree compute_object_offset (const_tree, const_tree); 54 static bool addr_object_size (struct object_size_info *, 55 const_tree, int, unsigned HOST_WIDE_INT *); 56 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int); 57 static tree pass_through_call (const gcall *); 58 static void collect_object_sizes_for (struct object_size_info *, tree); 59 static void expr_object_size (struct object_size_info *, tree, tree); 60 static bool merge_object_sizes (struct object_size_info *, tree, tree, 61 unsigned HOST_WIDE_INT); 62 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *); 63 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *); 64 static void init_offset_limit (void); 65 static void check_for_plus_in_loops (struct object_size_info *, tree); 66 static void check_for_plus_in_loops_1 (struct object_size_info *, tree, 67 unsigned int); 68 69 /* object_sizes[0] is upper bound for number of bytes till the end of 70 the object. 71 object_sizes[1] is upper bound for number of bytes till the end of 72 the subobject (innermost array or field with address taken). 73 object_sizes[2] is lower bound for number of bytes till the end of 74 the object and object_sizes[3] lower bound for subobject. */ 75 static vec<unsigned HOST_WIDE_INT> object_sizes[4]; 76 77 /* Bitmaps what object sizes have been computed already. */ 78 static bitmap computed[4]; 79 80 /* Maximum value of offset we consider to be addition. */ 81 static unsigned HOST_WIDE_INT offset_limit; 82 83 84 /* Initialize OFFSET_LIMIT variable. */ 85 static void 86 init_offset_limit (void) 87 { 88 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype))) 89 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype)); 90 else 91 offset_limit = -1; 92 offset_limit /= 2; 93 } 94 95 96 /* Compute offset of EXPR within VAR. Return error_mark_node 97 if unknown. */ 98 99 static tree 100 compute_object_offset (const_tree expr, const_tree var) 101 { 102 enum tree_code code = PLUS_EXPR; 103 tree base, off, t; 104 105 if (expr == var) 106 return size_zero_node; 107 108 switch (TREE_CODE (expr)) 109 { 110 case COMPONENT_REF: 111 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 112 if (base == error_mark_node) 113 return base; 114 115 t = TREE_OPERAND (expr, 1); 116 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t), 117 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t)) 118 / BITS_PER_UNIT)); 119 break; 120 121 case REALPART_EXPR: 122 CASE_CONVERT: 123 case VIEW_CONVERT_EXPR: 124 case NON_LVALUE_EXPR: 125 return compute_object_offset (TREE_OPERAND (expr, 0), var); 126 127 case IMAGPART_EXPR: 128 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 129 if (base == error_mark_node) 130 return base; 131 132 off = TYPE_SIZE_UNIT (TREE_TYPE (expr)); 133 break; 134 135 case ARRAY_REF: 136 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 137 if (base == error_mark_node) 138 return base; 139 140 t = TREE_OPERAND (expr, 1); 141 tree low_bound, unit_size; 142 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr)); 143 unit_size = array_ref_element_size (CONST_CAST_TREE (expr)); 144 if (! integer_zerop (low_bound)) 145 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound); 146 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0) 147 { 148 code = MINUS_EXPR; 149 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t); 150 } 151 t = fold_convert (sizetype, t); 152 off = size_binop (MULT_EXPR, unit_size, t); 153 break; 154 155 case MEM_REF: 156 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR); 157 return wide_int_to_tree (sizetype, mem_ref_offset (expr)); 158 159 default: 160 return error_mark_node; 161 } 162 163 return size_binop (code, base, off); 164 } 165 166 167 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR. 168 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size. 169 If unknown, return unknown[object_size_type]. */ 170 171 static bool 172 addr_object_size (struct object_size_info *osi, const_tree ptr, 173 int object_size_type, unsigned HOST_WIDE_INT *psize) 174 { 175 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes; 176 177 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR); 178 179 /* Set to unknown and overwrite just before returning if the size 180 could be determined. */ 181 *psize = unknown[object_size_type]; 182 183 pt_var = TREE_OPERAND (ptr, 0); 184 while (handled_component_p (pt_var)) 185 pt_var = TREE_OPERAND (pt_var, 0); 186 187 if (pt_var 188 && TREE_CODE (pt_var) == MEM_REF) 189 { 190 unsigned HOST_WIDE_INT sz; 191 192 if (!osi || (object_size_type & 1) != 0 193 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME) 194 { 195 compute_builtin_object_size (TREE_OPERAND (pt_var, 0), 196 object_size_type & ~1, &sz); 197 } 198 else 199 { 200 tree var = TREE_OPERAND (pt_var, 0); 201 if (osi->pass == 0) 202 collect_object_sizes_for (osi, var); 203 if (bitmap_bit_p (computed[object_size_type], 204 SSA_NAME_VERSION (var))) 205 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)]; 206 else 207 sz = unknown[object_size_type]; 208 } 209 if (sz != unknown[object_size_type]) 210 { 211 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var)); 212 if (wi::neg_p (dsz)) 213 sz = 0; 214 else if (wi::fits_uhwi_p (dsz)) 215 sz = dsz.to_uhwi (); 216 else 217 sz = unknown[object_size_type]; 218 } 219 220 if (sz != unknown[object_size_type] && sz < offset_limit) 221 pt_var_size = size_int (sz); 222 } 223 else if (pt_var 224 && DECL_P (pt_var) 225 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var)) 226 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit) 227 pt_var_size = DECL_SIZE_UNIT (pt_var); 228 else if (pt_var 229 && TREE_CODE (pt_var) == STRING_CST 230 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var)) 231 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var))) 232 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var))) 233 < offset_limit) 234 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var)); 235 else 236 return false; 237 238 if (pt_var != TREE_OPERAND (ptr, 0)) 239 { 240 tree var; 241 242 if (object_size_type & 1) 243 { 244 var = TREE_OPERAND (ptr, 0); 245 246 while (var != pt_var 247 && TREE_CODE (var) != BIT_FIELD_REF 248 && TREE_CODE (var) != COMPONENT_REF 249 && TREE_CODE (var) != ARRAY_REF 250 && TREE_CODE (var) != ARRAY_RANGE_REF 251 && TREE_CODE (var) != REALPART_EXPR 252 && TREE_CODE (var) != IMAGPART_EXPR) 253 var = TREE_OPERAND (var, 0); 254 if (var != pt_var && TREE_CODE (var) == ARRAY_REF) 255 var = TREE_OPERAND (var, 0); 256 if (! TYPE_SIZE_UNIT (TREE_TYPE (var)) 257 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var))) 258 || (pt_var_size 259 && tree_int_cst_lt (pt_var_size, 260 TYPE_SIZE_UNIT (TREE_TYPE (var))))) 261 var = pt_var; 262 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF) 263 { 264 tree v = var; 265 /* For &X->fld, compute object size only if fld isn't the last 266 field, as struct { int i; char c[1]; } is often used instead 267 of flexible array member. */ 268 while (v && v != pt_var) 269 switch (TREE_CODE (v)) 270 { 271 case ARRAY_REF: 272 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))) 273 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST) 274 { 275 tree domain 276 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0))); 277 if (domain 278 && TYPE_MAX_VALUE (domain) 279 && TREE_CODE (TYPE_MAX_VALUE (domain)) 280 == INTEGER_CST 281 && tree_int_cst_lt (TREE_OPERAND (v, 1), 282 TYPE_MAX_VALUE (domain))) 283 { 284 v = NULL_TREE; 285 break; 286 } 287 } 288 v = TREE_OPERAND (v, 0); 289 break; 290 case REALPART_EXPR: 291 case IMAGPART_EXPR: 292 v = NULL_TREE; 293 break; 294 case COMPONENT_REF: 295 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE) 296 { 297 v = NULL_TREE; 298 break; 299 } 300 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF) 301 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 302 != UNION_TYPE 303 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 304 != QUAL_UNION_TYPE) 305 break; 306 else 307 v = TREE_OPERAND (v, 0); 308 if (TREE_CODE (v) == COMPONENT_REF 309 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 310 == RECORD_TYPE) 311 { 312 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1)); 313 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain)) 314 if (TREE_CODE (fld_chain) == FIELD_DECL) 315 break; 316 317 if (fld_chain) 318 { 319 v = NULL_TREE; 320 break; 321 } 322 v = TREE_OPERAND (v, 0); 323 } 324 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF) 325 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 326 != UNION_TYPE 327 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 328 != QUAL_UNION_TYPE) 329 break; 330 else 331 v = TREE_OPERAND (v, 0); 332 if (v != pt_var) 333 v = NULL_TREE; 334 else 335 v = pt_var; 336 break; 337 default: 338 v = pt_var; 339 break; 340 } 341 if (v == pt_var) 342 var = pt_var; 343 } 344 } 345 else 346 var = pt_var; 347 348 if (var != pt_var) 349 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var)); 350 else if (!pt_var_size) 351 return false; 352 else 353 var_size = pt_var_size; 354 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var); 355 if (bytes != error_mark_node) 356 { 357 if (TREE_CODE (bytes) == INTEGER_CST 358 && tree_int_cst_lt (var_size, bytes)) 359 bytes = size_zero_node; 360 else 361 bytes = size_binop (MINUS_EXPR, var_size, bytes); 362 } 363 if (var != pt_var 364 && pt_var_size 365 && TREE_CODE (pt_var) == MEM_REF 366 && bytes != error_mark_node) 367 { 368 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var); 369 if (bytes2 != error_mark_node) 370 { 371 if (TREE_CODE (bytes2) == INTEGER_CST 372 && tree_int_cst_lt (pt_var_size, bytes2)) 373 bytes2 = size_zero_node; 374 else 375 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2); 376 bytes = size_binop (MIN_EXPR, bytes, bytes2); 377 } 378 } 379 } 380 else if (!pt_var_size) 381 return false; 382 else 383 bytes = pt_var_size; 384 385 if (tree_fits_uhwi_p (bytes)) 386 { 387 *psize = tree_to_uhwi (bytes); 388 return true; 389 } 390 391 return false; 392 } 393 394 395 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL. 396 Handles various allocation calls. OBJECT_SIZE_TYPE is the second 397 argument from __builtin_object_size. If unknown, return 398 unknown[object_size_type]. */ 399 400 static unsigned HOST_WIDE_INT 401 alloc_object_size (const gcall *call, int object_size_type) 402 { 403 tree callee, bytes = NULL_TREE; 404 tree alloc_size; 405 int arg1 = -1, arg2 = -1; 406 407 gcc_assert (is_gimple_call (call)); 408 409 callee = gimple_call_fndecl (call); 410 if (!callee) 411 return unknown[object_size_type]; 412 413 alloc_size = lookup_attribute ("alloc_size", 414 TYPE_ATTRIBUTES (TREE_TYPE (callee))); 415 if (alloc_size && TREE_VALUE (alloc_size)) 416 { 417 tree p = TREE_VALUE (alloc_size); 418 419 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1; 420 if (TREE_CHAIN (p)) 421 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1; 422 } 423 424 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) 425 switch (DECL_FUNCTION_CODE (callee)) 426 { 427 case BUILT_IN_CALLOC: 428 arg2 = 1; 429 /* fall through */ 430 case BUILT_IN_MALLOC: 431 case BUILT_IN_ALLOCA: 432 case BUILT_IN_ALLOCA_WITH_ALIGN: 433 arg1 = 0; 434 default: 435 break; 436 } 437 438 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call) 439 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST 440 || (arg2 >= 0 441 && (arg2 >= (int)gimple_call_num_args (call) 442 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST))) 443 return unknown[object_size_type]; 444 445 if (arg2 >= 0) 446 bytes = size_binop (MULT_EXPR, 447 fold_convert (sizetype, gimple_call_arg (call, arg1)), 448 fold_convert (sizetype, gimple_call_arg (call, arg2))); 449 else if (arg1 >= 0) 450 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1)); 451 452 if (bytes && tree_fits_uhwi_p (bytes)) 453 return tree_to_uhwi (bytes); 454 455 return unknown[object_size_type]; 456 } 457 458 459 /* If object size is propagated from one of function's arguments directly 460 to its return value, return that argument for GIMPLE_CALL statement CALL. 461 Otherwise return NULL. */ 462 463 static tree 464 pass_through_call (const gcall *call) 465 { 466 tree callee = gimple_call_fndecl (call); 467 468 if (callee 469 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) 470 switch (DECL_FUNCTION_CODE (callee)) 471 { 472 case BUILT_IN_MEMCPY: 473 case BUILT_IN_MEMMOVE: 474 case BUILT_IN_MEMSET: 475 case BUILT_IN_STRCPY: 476 case BUILT_IN_STRNCPY: 477 case BUILT_IN_STRCAT: 478 case BUILT_IN_STRNCAT: 479 case BUILT_IN_MEMCPY_CHK: 480 case BUILT_IN_MEMMOVE_CHK: 481 case BUILT_IN_MEMSET_CHK: 482 case BUILT_IN_STRCPY_CHK: 483 case BUILT_IN_STRNCPY_CHK: 484 case BUILT_IN_STRCAT_CHK: 485 case BUILT_IN_STRNCAT_CHK: 486 case BUILT_IN_ASSUME_ALIGNED: 487 if (gimple_call_num_args (call) >= 1) 488 return gimple_call_arg (call, 0); 489 break; 490 default: 491 break; 492 } 493 494 return NULL_TREE; 495 } 496 497 498 /* Compute __builtin_object_size value for PTR and set *PSIZE to 499 the resulting value. OBJECT_SIZE_TYPE is the second argument 500 to __builtin_object_size. Return true on success and false 501 when the object size could not be determined. */ 502 503 bool 504 compute_builtin_object_size (tree ptr, int object_size_type, 505 unsigned HOST_WIDE_INT *psize) 506 { 507 gcc_assert (object_size_type >= 0 && object_size_type <= 3); 508 509 /* Set to unknown and overwrite just before returning if the size 510 could be determined. */ 511 *psize = unknown[object_size_type]; 512 513 if (! offset_limit) 514 init_offset_limit (); 515 516 if (TREE_CODE (ptr) == ADDR_EXPR) 517 return addr_object_size (NULL, ptr, object_size_type, psize); 518 519 if (TREE_CODE (ptr) != SSA_NAME 520 || !POINTER_TYPE_P (TREE_TYPE (ptr))) 521 return false; 522 523 if (computed[object_size_type] == NULL) 524 { 525 if (optimize || object_size_type & 1) 526 return false; 527 528 /* When not optimizing, rather than failing, make a small effort 529 to determine the object size without the full benefit of 530 the (costly) computation below. */ 531 gimple *def = SSA_NAME_DEF_STMT (ptr); 532 if (gimple_code (def) == GIMPLE_ASSIGN) 533 { 534 tree_code code = gimple_assign_rhs_code (def); 535 if (code == POINTER_PLUS_EXPR) 536 { 537 tree offset = gimple_assign_rhs2 (def); 538 ptr = gimple_assign_rhs1 (def); 539 540 if (tree_fits_shwi_p (offset) 541 && compute_builtin_object_size (ptr, object_size_type, psize)) 542 { 543 /* Return zero when the offset is out of bounds. */ 544 unsigned HOST_WIDE_INT off = tree_to_shwi (offset); 545 *psize = off < *psize ? *psize - off : 0; 546 return true; 547 } 548 } 549 } 550 return false; 551 } 552 553 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr))) 554 { 555 struct object_size_info osi; 556 bitmap_iterator bi; 557 unsigned int i; 558 559 if (num_ssa_names > object_sizes[object_size_type].length ()) 560 object_sizes[object_size_type].safe_grow (num_ssa_names); 561 if (dump_file) 562 { 563 fprintf (dump_file, "Computing %s %sobject size for ", 564 (object_size_type & 2) ? "minimum" : "maximum", 565 (object_size_type & 1) ? "sub" : ""); 566 print_generic_expr (dump_file, ptr, dump_flags); 567 fprintf (dump_file, ":\n"); 568 } 569 570 osi.visited = BITMAP_ALLOC (NULL); 571 osi.reexamine = BITMAP_ALLOC (NULL); 572 osi.object_size_type = object_size_type; 573 osi.depths = NULL; 574 osi.stack = NULL; 575 osi.tos = NULL; 576 577 /* First pass: walk UD chains, compute object sizes that 578 can be computed. osi.reexamine bitmap at the end will 579 contain what variables were found in dependency cycles 580 and therefore need to be reexamined. */ 581 osi.pass = 0; 582 osi.changed = false; 583 collect_object_sizes_for (&osi, ptr); 584 585 /* Second pass: keep recomputing object sizes of variables 586 that need reexamination, until no object sizes are 587 increased or all object sizes are computed. */ 588 if (! bitmap_empty_p (osi.reexamine)) 589 { 590 bitmap reexamine = BITMAP_ALLOC (NULL); 591 592 /* If looking for minimum instead of maximum object size, 593 detect cases where a pointer is increased in a loop. 594 Although even without this detection pass 2 would eventually 595 terminate, it could take a long time. If a pointer is 596 increasing this way, we need to assume 0 object size. 597 E.g. p = &buf[0]; while (cond) p = p + 4; */ 598 if (object_size_type & 2) 599 { 600 osi.depths = XCNEWVEC (unsigned int, num_ssa_names); 601 osi.stack = XNEWVEC (unsigned int, num_ssa_names); 602 osi.tos = osi.stack; 603 osi.pass = 1; 604 /* collect_object_sizes_for is changing 605 osi.reexamine bitmap, so iterate over a copy. */ 606 bitmap_copy (reexamine, osi.reexamine); 607 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi) 608 if (bitmap_bit_p (osi.reexamine, i)) 609 check_for_plus_in_loops (&osi, ssa_name (i)); 610 611 free (osi.depths); 612 osi.depths = NULL; 613 free (osi.stack); 614 osi.stack = NULL; 615 osi.tos = NULL; 616 } 617 618 do 619 { 620 osi.pass = 2; 621 osi.changed = false; 622 /* collect_object_sizes_for is changing 623 osi.reexamine bitmap, so iterate over a copy. */ 624 bitmap_copy (reexamine, osi.reexamine); 625 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi) 626 if (bitmap_bit_p (osi.reexamine, i)) 627 { 628 collect_object_sizes_for (&osi, ssa_name (i)); 629 if (dump_file && (dump_flags & TDF_DETAILS)) 630 { 631 fprintf (dump_file, "Reexamining "); 632 print_generic_expr (dump_file, ssa_name (i), 633 dump_flags); 634 fprintf (dump_file, "\n"); 635 } 636 } 637 } 638 while (osi.changed); 639 640 BITMAP_FREE (reexamine); 641 } 642 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi) 643 bitmap_set_bit (computed[object_size_type], i); 644 645 /* Debugging dumps. */ 646 if (dump_file) 647 { 648 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi) 649 if (object_sizes[object_size_type][i] 650 != unknown[object_size_type]) 651 { 652 print_generic_expr (dump_file, ssa_name (i), 653 dump_flags); 654 fprintf (dump_file, 655 ": %s %sobject size " 656 HOST_WIDE_INT_PRINT_UNSIGNED "\n", 657 (object_size_type & 2) ? "minimum" : "maximum", 658 (object_size_type & 1) ? "sub" : "", 659 object_sizes[object_size_type][i]); 660 } 661 } 662 663 BITMAP_FREE (osi.reexamine); 664 BITMAP_FREE (osi.visited); 665 } 666 667 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)]; 668 return *psize != unknown[object_size_type]; 669 } 670 671 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */ 672 673 static void 674 expr_object_size (struct object_size_info *osi, tree ptr, tree value) 675 { 676 int object_size_type = osi->object_size_type; 677 unsigned int varno = SSA_NAME_VERSION (ptr); 678 unsigned HOST_WIDE_INT bytes; 679 680 gcc_assert (object_sizes[object_size_type][varno] 681 != unknown[object_size_type]); 682 gcc_assert (osi->pass == 0); 683 684 if (TREE_CODE (value) == WITH_SIZE_EXPR) 685 value = TREE_OPERAND (value, 0); 686 687 /* Pointer variables should have been handled by merge_object_sizes. */ 688 gcc_assert (TREE_CODE (value) != SSA_NAME 689 || !POINTER_TYPE_P (TREE_TYPE (value))); 690 691 if (TREE_CODE (value) == ADDR_EXPR) 692 addr_object_size (osi, value, object_size_type, &bytes); 693 else 694 bytes = unknown[object_size_type]; 695 696 if ((object_size_type & 2) == 0) 697 { 698 if (object_sizes[object_size_type][varno] < bytes) 699 object_sizes[object_size_type][varno] = bytes; 700 } 701 else 702 { 703 if (object_sizes[object_size_type][varno] > bytes) 704 object_sizes[object_size_type][varno] = bytes; 705 } 706 } 707 708 709 /* Compute object_sizes for PTR, defined to the result of a call. */ 710 711 static void 712 call_object_size (struct object_size_info *osi, tree ptr, gcall *call) 713 { 714 int object_size_type = osi->object_size_type; 715 unsigned int varno = SSA_NAME_VERSION (ptr); 716 unsigned HOST_WIDE_INT bytes; 717 718 gcc_assert (is_gimple_call (call)); 719 720 gcc_assert (object_sizes[object_size_type][varno] 721 != unknown[object_size_type]); 722 gcc_assert (osi->pass == 0); 723 724 bytes = alloc_object_size (call, object_size_type); 725 726 if ((object_size_type & 2) == 0) 727 { 728 if (object_sizes[object_size_type][varno] < bytes) 729 object_sizes[object_size_type][varno] = bytes; 730 } 731 else 732 { 733 if (object_sizes[object_size_type][varno] > bytes) 734 object_sizes[object_size_type][varno] = bytes; 735 } 736 } 737 738 739 /* Compute object_sizes for PTR, defined to an unknown value. */ 740 741 static void 742 unknown_object_size (struct object_size_info *osi, tree ptr) 743 { 744 int object_size_type = osi->object_size_type; 745 unsigned int varno = SSA_NAME_VERSION (ptr); 746 unsigned HOST_WIDE_INT bytes; 747 748 gcc_assert (object_sizes[object_size_type][varno] 749 != unknown[object_size_type]); 750 gcc_assert (osi->pass == 0); 751 752 bytes = unknown[object_size_type]; 753 754 if ((object_size_type & 2) == 0) 755 { 756 if (object_sizes[object_size_type][varno] < bytes) 757 object_sizes[object_size_type][varno] = bytes; 758 } 759 else 760 { 761 if (object_sizes[object_size_type][varno] > bytes) 762 object_sizes[object_size_type][varno] = bytes; 763 } 764 } 765 766 767 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if 768 the object size might need reexamination later. */ 769 770 static bool 771 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig, 772 unsigned HOST_WIDE_INT offset) 773 { 774 int object_size_type = osi->object_size_type; 775 unsigned int varno = SSA_NAME_VERSION (dest); 776 unsigned HOST_WIDE_INT orig_bytes; 777 778 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 779 return false; 780 if (offset >= offset_limit) 781 { 782 object_sizes[object_size_type][varno] = unknown[object_size_type]; 783 return false; 784 } 785 786 if (osi->pass == 0) 787 collect_object_sizes_for (osi, orig); 788 789 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)]; 790 if (orig_bytes != unknown[object_size_type]) 791 orig_bytes = (offset > orig_bytes) 792 ? HOST_WIDE_INT_0U : orig_bytes - offset; 793 794 if ((object_size_type & 2) == 0) 795 { 796 if (object_sizes[object_size_type][varno] < orig_bytes) 797 { 798 object_sizes[object_size_type][varno] = orig_bytes; 799 osi->changed = true; 800 } 801 } 802 else 803 { 804 if (object_sizes[object_size_type][varno] > orig_bytes) 805 { 806 object_sizes[object_size_type][varno] = orig_bytes; 807 osi->changed = true; 808 } 809 } 810 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig)); 811 } 812 813 814 /* Compute object_sizes for VAR, defined to the result of an assignment 815 with operator POINTER_PLUS_EXPR. Return true if the object size might 816 need reexamination later. */ 817 818 static bool 819 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt) 820 { 821 int object_size_type = osi->object_size_type; 822 unsigned int varno = SSA_NAME_VERSION (var); 823 unsigned HOST_WIDE_INT bytes; 824 tree op0, op1; 825 826 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 827 { 828 op0 = gimple_assign_rhs1 (stmt); 829 op1 = gimple_assign_rhs2 (stmt); 830 } 831 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR) 832 { 833 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); 834 gcc_assert (TREE_CODE (rhs) == MEM_REF); 835 op0 = TREE_OPERAND (rhs, 0); 836 op1 = TREE_OPERAND (rhs, 1); 837 } 838 else 839 gcc_unreachable (); 840 841 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 842 return false; 843 844 /* Handle PTR + OFFSET here. */ 845 if (TREE_CODE (op1) == INTEGER_CST 846 && (TREE_CODE (op0) == SSA_NAME 847 || TREE_CODE (op0) == ADDR_EXPR)) 848 { 849 if (! tree_fits_uhwi_p (op1)) 850 bytes = unknown[object_size_type]; 851 else if (TREE_CODE (op0) == SSA_NAME) 852 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1)); 853 else 854 { 855 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1); 856 857 /* op0 will be ADDR_EXPR here. */ 858 addr_object_size (osi, op0, object_size_type, &bytes); 859 if (bytes == unknown[object_size_type]) 860 ; 861 else if (off > offset_limit) 862 bytes = unknown[object_size_type]; 863 else if (off > bytes) 864 bytes = 0; 865 else 866 bytes -= off; 867 } 868 } 869 else 870 bytes = unknown[object_size_type]; 871 872 if ((object_size_type & 2) == 0) 873 { 874 if (object_sizes[object_size_type][varno] < bytes) 875 object_sizes[object_size_type][varno] = bytes; 876 } 877 else 878 { 879 if (object_sizes[object_size_type][varno] > bytes) 880 object_sizes[object_size_type][varno] = bytes; 881 } 882 return false; 883 } 884 885 886 /* Compute object_sizes for VAR, defined at STMT, which is 887 a COND_EXPR. Return true if the object size might need reexamination 888 later. */ 889 890 static bool 891 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt) 892 { 893 tree then_, else_; 894 int object_size_type = osi->object_size_type; 895 unsigned int varno = SSA_NAME_VERSION (var); 896 bool reexamine = false; 897 898 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR); 899 900 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 901 return false; 902 903 then_ = gimple_assign_rhs2 (stmt); 904 else_ = gimple_assign_rhs3 (stmt); 905 906 if (TREE_CODE (then_) == SSA_NAME) 907 reexamine |= merge_object_sizes (osi, var, then_, 0); 908 else 909 expr_object_size (osi, var, then_); 910 911 if (TREE_CODE (else_) == SSA_NAME) 912 reexamine |= merge_object_sizes (osi, var, else_, 0); 913 else 914 expr_object_size (osi, var, else_); 915 916 return reexamine; 917 } 918 919 /* Compute object sizes for VAR. 920 For ADDR_EXPR an object size is the number of remaining bytes 921 to the end of the object (where what is considered an object depends on 922 OSI->object_size_type). 923 For allocation GIMPLE_CALL like malloc or calloc object size is the size 924 of the allocation. 925 For POINTER_PLUS_EXPR where second operand is a constant integer, 926 object size is object size of the first operand minus the constant. 927 If the constant is bigger than the number of remaining bytes until the 928 end of the object, object size is 0, but if it is instead a pointer 929 subtraction, object size is unknown[object_size_type]. 930 To differentiate addition from subtraction, ADDR_EXPR returns 931 unknown[object_size_type] for all objects bigger than half of the address 932 space, and constants less than half of the address space are considered 933 addition, while bigger constants subtraction. 934 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the 935 object size is object size of that argument. 936 Otherwise, object size is the maximum of object sizes of variables 937 that it might be set to. */ 938 939 static void 940 collect_object_sizes_for (struct object_size_info *osi, tree var) 941 { 942 int object_size_type = osi->object_size_type; 943 unsigned int varno = SSA_NAME_VERSION (var); 944 gimple *stmt; 945 bool reexamine; 946 947 if (bitmap_bit_p (computed[object_size_type], varno)) 948 return; 949 950 if (osi->pass == 0) 951 { 952 if (bitmap_set_bit (osi->visited, varno)) 953 { 954 object_sizes[object_size_type][varno] 955 = (object_size_type & 2) ? -1 : 0; 956 } 957 else 958 { 959 /* Found a dependency loop. Mark the variable for later 960 re-examination. */ 961 bitmap_set_bit (osi->reexamine, varno); 962 if (dump_file && (dump_flags & TDF_DETAILS)) 963 { 964 fprintf (dump_file, "Found a dependency loop at "); 965 print_generic_expr (dump_file, var, dump_flags); 966 fprintf (dump_file, "\n"); 967 } 968 return; 969 } 970 } 971 972 if (dump_file && (dump_flags & TDF_DETAILS)) 973 { 974 fprintf (dump_file, "Visiting use-def links for "); 975 print_generic_expr (dump_file, var, dump_flags); 976 fprintf (dump_file, "\n"); 977 } 978 979 stmt = SSA_NAME_DEF_STMT (var); 980 reexamine = false; 981 982 switch (gimple_code (stmt)) 983 { 984 case GIMPLE_ASSIGN: 985 { 986 tree rhs = gimple_assign_rhs1 (stmt); 987 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR 988 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR 989 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF)) 990 reexamine = plus_stmt_object_size (osi, var, stmt); 991 else if (gimple_assign_rhs_code (stmt) == COND_EXPR) 992 reexamine = cond_expr_object_size (osi, var, stmt); 993 else if (gimple_assign_single_p (stmt) 994 || gimple_assign_unary_nop_p (stmt)) 995 { 996 if (TREE_CODE (rhs) == SSA_NAME 997 && POINTER_TYPE_P (TREE_TYPE (rhs))) 998 reexamine = merge_object_sizes (osi, var, rhs, 0); 999 else 1000 expr_object_size (osi, var, rhs); 1001 } 1002 else 1003 unknown_object_size (osi, var); 1004 break; 1005 } 1006 1007 case GIMPLE_CALL: 1008 { 1009 gcall *call_stmt = as_a <gcall *> (stmt); 1010 tree arg = pass_through_call (call_stmt); 1011 if (arg) 1012 { 1013 if (TREE_CODE (arg) == SSA_NAME 1014 && POINTER_TYPE_P (TREE_TYPE (arg))) 1015 reexamine = merge_object_sizes (osi, var, arg, 0); 1016 else 1017 expr_object_size (osi, var, arg); 1018 } 1019 else 1020 call_object_size (osi, var, call_stmt); 1021 break; 1022 } 1023 1024 case GIMPLE_ASM: 1025 /* Pointers defined by __asm__ statements can point anywhere. */ 1026 object_sizes[object_size_type][varno] = unknown[object_size_type]; 1027 break; 1028 1029 case GIMPLE_NOP: 1030 if (SSA_NAME_VAR (var) 1031 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL) 1032 expr_object_size (osi, var, SSA_NAME_VAR (var)); 1033 else 1034 /* Uninitialized SSA names point nowhere. */ 1035 object_sizes[object_size_type][varno] = unknown[object_size_type]; 1036 break; 1037 1038 case GIMPLE_PHI: 1039 { 1040 unsigned i; 1041 1042 for (i = 0; i < gimple_phi_num_args (stmt); i++) 1043 { 1044 tree rhs = gimple_phi_arg (stmt, i)->def; 1045 1046 if (object_sizes[object_size_type][varno] 1047 == unknown[object_size_type]) 1048 break; 1049 1050 if (TREE_CODE (rhs) == SSA_NAME) 1051 reexamine |= merge_object_sizes (osi, var, rhs, 0); 1052 else if (osi->pass == 0) 1053 expr_object_size (osi, var, rhs); 1054 } 1055 break; 1056 } 1057 1058 default: 1059 gcc_unreachable (); 1060 } 1061 1062 if (! reexamine 1063 || object_sizes[object_size_type][varno] == unknown[object_size_type]) 1064 { 1065 bitmap_set_bit (computed[object_size_type], varno); 1066 bitmap_clear_bit (osi->reexamine, varno); 1067 } 1068 else 1069 { 1070 bitmap_set_bit (osi->reexamine, varno); 1071 if (dump_file && (dump_flags & TDF_DETAILS)) 1072 { 1073 fprintf (dump_file, "Need to reexamine "); 1074 print_generic_expr (dump_file, var, dump_flags); 1075 fprintf (dump_file, "\n"); 1076 } 1077 } 1078 } 1079 1080 1081 /* Helper function for check_for_plus_in_loops. Called recursively 1082 to detect loops. */ 1083 1084 static void 1085 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var, 1086 unsigned int depth) 1087 { 1088 gimple *stmt = SSA_NAME_DEF_STMT (var); 1089 unsigned int varno = SSA_NAME_VERSION (var); 1090 1091 if (osi->depths[varno]) 1092 { 1093 if (osi->depths[varno] != depth) 1094 { 1095 unsigned int *sp; 1096 1097 /* Found a loop involving pointer addition. */ 1098 for (sp = osi->tos; sp > osi->stack; ) 1099 { 1100 --sp; 1101 bitmap_clear_bit (osi->reexamine, *sp); 1102 bitmap_set_bit (computed[osi->object_size_type], *sp); 1103 object_sizes[osi->object_size_type][*sp] = 0; 1104 if (*sp == varno) 1105 break; 1106 } 1107 } 1108 return; 1109 } 1110 else if (! bitmap_bit_p (osi->reexamine, varno)) 1111 return; 1112 1113 osi->depths[varno] = depth; 1114 *osi->tos++ = varno; 1115 1116 switch (gimple_code (stmt)) 1117 { 1118 1119 case GIMPLE_ASSIGN: 1120 { 1121 if ((gimple_assign_single_p (stmt) 1122 || gimple_assign_unary_nop_p (stmt)) 1123 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) 1124 { 1125 tree rhs = gimple_assign_rhs1 (stmt); 1126 1127 check_for_plus_in_loops_1 (osi, rhs, depth); 1128 } 1129 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 1130 { 1131 tree basevar = gimple_assign_rhs1 (stmt); 1132 tree cst = gimple_assign_rhs2 (stmt); 1133 1134 gcc_assert (TREE_CODE (cst) == INTEGER_CST); 1135 1136 check_for_plus_in_loops_1 (osi, basevar, 1137 depth + !integer_zerop (cst)); 1138 } 1139 else 1140 gcc_unreachable (); 1141 break; 1142 } 1143 1144 case GIMPLE_CALL: 1145 { 1146 gcall *call_stmt = as_a <gcall *> (stmt); 1147 tree arg = pass_through_call (call_stmt); 1148 if (arg) 1149 { 1150 if (TREE_CODE (arg) == SSA_NAME) 1151 check_for_plus_in_loops_1 (osi, arg, depth); 1152 else 1153 gcc_unreachable (); 1154 } 1155 break; 1156 } 1157 1158 case GIMPLE_PHI: 1159 { 1160 unsigned i; 1161 1162 for (i = 0; i < gimple_phi_num_args (stmt); i++) 1163 { 1164 tree rhs = gimple_phi_arg (stmt, i)->def; 1165 1166 if (TREE_CODE (rhs) == SSA_NAME) 1167 check_for_plus_in_loops_1 (osi, rhs, depth); 1168 } 1169 break; 1170 } 1171 1172 default: 1173 gcc_unreachable (); 1174 } 1175 1176 osi->depths[varno] = 0; 1177 osi->tos--; 1178 } 1179 1180 1181 /* Check if some pointer we are computing object size of is being increased 1182 within a loop. If yes, assume all the SSA variables participating in 1183 that loop have minimum object sizes 0. */ 1184 1185 static void 1186 check_for_plus_in_loops (struct object_size_info *osi, tree var) 1187 { 1188 gimple *stmt = SSA_NAME_DEF_STMT (var); 1189 1190 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here, 1191 and looked for a POINTER_PLUS_EXPR in the pass-through 1192 argument, if any. In GIMPLE, however, such an expression 1193 is not a valid call operand. */ 1194 1195 if (is_gimple_assign (stmt) 1196 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 1197 { 1198 tree basevar = gimple_assign_rhs1 (stmt); 1199 tree cst = gimple_assign_rhs2 (stmt); 1200 1201 gcc_assert (TREE_CODE (cst) == INTEGER_CST); 1202 1203 if (integer_zerop (cst)) 1204 return; 1205 1206 osi->depths[SSA_NAME_VERSION (basevar)] = 1; 1207 *osi->tos++ = SSA_NAME_VERSION (basevar); 1208 check_for_plus_in_loops_1 (osi, var, 2); 1209 osi->depths[SSA_NAME_VERSION (basevar)] = 0; 1210 osi->tos--; 1211 } 1212 } 1213 1214 1215 /* Initialize data structures for the object size computation. */ 1216 1217 void 1218 init_object_sizes (void) 1219 { 1220 int object_size_type; 1221 1222 if (computed[0]) 1223 return; 1224 1225 for (object_size_type = 0; object_size_type <= 3; object_size_type++) 1226 { 1227 object_sizes[object_size_type].safe_grow (num_ssa_names); 1228 computed[object_size_type] = BITMAP_ALLOC (NULL); 1229 } 1230 1231 init_offset_limit (); 1232 } 1233 1234 1235 /* Destroy data structures after the object size computation. */ 1236 1237 void 1238 fini_object_sizes (void) 1239 { 1240 int object_size_type; 1241 1242 for (object_size_type = 0; object_size_type <= 3; object_size_type++) 1243 { 1244 object_sizes[object_size_type].release (); 1245 BITMAP_FREE (computed[object_size_type]); 1246 } 1247 } 1248 1249 1250 /* Simple pass to optimize all __builtin_object_size () builtins. */ 1251 1252 namespace { 1253 1254 const pass_data pass_data_object_sizes = 1255 { 1256 GIMPLE_PASS, /* type */ 1257 "objsz", /* name */ 1258 OPTGROUP_NONE, /* optinfo_flags */ 1259 TV_NONE, /* tv_id */ 1260 ( PROP_cfg | PROP_ssa ), /* properties_required */ 1261 0, /* properties_provided */ 1262 0, /* properties_destroyed */ 1263 0, /* todo_flags_start */ 1264 0, /* todo_flags_finish */ 1265 }; 1266 1267 class pass_object_sizes : public gimple_opt_pass 1268 { 1269 public: 1270 pass_object_sizes (gcc::context *ctxt) 1271 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false) 1272 {} 1273 1274 /* opt_pass methods: */ 1275 opt_pass * clone () { return new pass_object_sizes (m_ctxt); } 1276 void set_pass_param (unsigned int n, bool param) 1277 { 1278 gcc_assert (n == 0); 1279 insert_min_max_p = param; 1280 } 1281 virtual unsigned int execute (function *); 1282 1283 private: 1284 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */ 1285 bool insert_min_max_p; 1286 }; // class pass_object_sizes 1287 1288 /* Dummy valueize function. */ 1289 1290 static tree 1291 do_valueize (tree t) 1292 { 1293 return t; 1294 } 1295 1296 unsigned int 1297 pass_object_sizes::execute (function *fun) 1298 { 1299 basic_block bb; 1300 FOR_EACH_BB_FN (bb, fun) 1301 { 1302 gimple_stmt_iterator i; 1303 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) 1304 { 1305 tree result; 1306 gimple *call = gsi_stmt (i); 1307 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE)) 1308 continue; 1309 1310 init_object_sizes (); 1311 1312 /* If insert_min_max_p, only attempt to fold 1313 __builtin_object_size (x, 1) and __builtin_object_size (x, 3), 1314 and rather than folding the builtin to the constant if any, 1315 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size 1316 call result and the computed constant. */ 1317 if (insert_min_max_p) 1318 { 1319 tree ost = gimple_call_arg (call, 1); 1320 if (tree_fits_uhwi_p (ost)) 1321 { 1322 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost); 1323 tree ptr = gimple_call_arg (call, 0); 1324 tree lhs = gimple_call_lhs (call); 1325 if ((object_size_type == 1 || object_size_type == 3) 1326 && (TREE_CODE (ptr) == ADDR_EXPR 1327 || TREE_CODE (ptr) == SSA_NAME) 1328 && lhs) 1329 { 1330 tree type = TREE_TYPE (lhs); 1331 unsigned HOST_WIDE_INT bytes; 1332 if (compute_builtin_object_size (ptr, object_size_type, 1333 &bytes) 1334 && wi::fits_to_tree_p (bytes, type)) 1335 { 1336 tree tem = make_ssa_name (type); 1337 gimple_call_set_lhs (call, tem); 1338 enum tree_code code 1339 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR; 1340 tree cst = build_int_cstu (type, bytes); 1341 gimple *g 1342 = gimple_build_assign (lhs, code, tem, cst); 1343 gsi_insert_after (&i, g, GSI_NEW_STMT); 1344 update_stmt (call); 1345 } 1346 } 1347 } 1348 continue; 1349 } 1350 1351 tree lhs = gimple_call_lhs (call); 1352 if (!lhs) 1353 continue; 1354 1355 result = gimple_fold_stmt_to_constant (call, do_valueize); 1356 if (!result) 1357 { 1358 tree ost = gimple_call_arg (call, 1); 1359 1360 if (tree_fits_uhwi_p (ost)) 1361 { 1362 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost); 1363 1364 if (object_size_type < 2) 1365 result = fold_convert (size_type_node, 1366 integer_minus_one_node); 1367 else if (object_size_type < 4) 1368 result = build_zero_cst (size_type_node); 1369 } 1370 1371 if (!result) 1372 continue; 1373 } 1374 1375 gcc_assert (TREE_CODE (result) == INTEGER_CST); 1376 1377 if (dump_file && (dump_flags & TDF_DETAILS)) 1378 { 1379 fprintf (dump_file, "Simplified\n "); 1380 print_gimple_stmt (dump_file, call, 0, dump_flags); 1381 fprintf (dump_file, " to "); 1382 print_generic_expr (dump_file, result, 0); 1383 fprintf (dump_file, "\n"); 1384 } 1385 1386 /* Propagate into all uses and fold those stmts. */ 1387 replace_uses_by (lhs, result); 1388 } 1389 } 1390 1391 fini_object_sizes (); 1392 return 0; 1393 } 1394 1395 } // anon namespace 1396 1397 gimple_opt_pass * 1398 make_pass_object_sizes (gcc::context *ctxt) 1399 { 1400 return new pass_object_sizes (ctxt); 1401 } 1402