1 /* __builtin_object_size (ptr, object_size_type) computation 2 Copyright (C) 2004-2016 Free Software Foundation, Inc. 3 Contributed by Jakub Jelinek <jakub@redhat.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "tree.h" 26 #include "gimple.h" 27 #include "tree-pass.h" 28 #include "ssa.h" 29 #include "gimple-pretty-print.h" 30 #include "fold-const.h" 31 #include "tree-object-size.h" 32 #include "gimple-fold.h" 33 #include "gimple-iterator.h" 34 #include "tree-cfg.h" 35 36 struct object_size_info 37 { 38 int object_size_type; 39 bitmap visited, reexamine; 40 int pass; 41 bool changed; 42 unsigned int *depths; 43 unsigned int *stack, *tos; 44 }; 45 46 static const unsigned HOST_WIDE_INT unknown[4] = { 47 HOST_WIDE_INT_M1U, 48 HOST_WIDE_INT_M1U, 49 0, 50 0 51 }; 52 53 static tree compute_object_offset (const_tree, const_tree); 54 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *, 55 const_tree, int); 56 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int); 57 static tree pass_through_call (const gcall *); 58 static void collect_object_sizes_for (struct object_size_info *, tree); 59 static void expr_object_size (struct object_size_info *, tree, tree); 60 static bool merge_object_sizes (struct object_size_info *, tree, tree, 61 unsigned HOST_WIDE_INT); 62 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *); 63 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *); 64 static void init_offset_limit (void); 65 static void check_for_plus_in_loops (struct object_size_info *, tree); 66 static void check_for_plus_in_loops_1 (struct object_size_info *, tree, 67 unsigned int); 68 69 /* object_sizes[0] is upper bound for number of bytes till the end of 70 the object. 71 object_sizes[1] is upper bound for number of bytes till the end of 72 the subobject (innermost array or field with address taken). 73 object_sizes[2] is lower bound for number of bytes till the end of 74 the object and object_sizes[3] lower bound for subobject. */ 75 static vec<unsigned HOST_WIDE_INT> object_sizes[4]; 76 77 /* Bitmaps what object sizes have been computed already. */ 78 static bitmap computed[4]; 79 80 /* Maximum value of offset we consider to be addition. */ 81 static unsigned HOST_WIDE_INT offset_limit; 82 83 84 /* Initialize OFFSET_LIMIT variable. */ 85 static void 86 init_offset_limit (void) 87 { 88 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype))) 89 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype)); 90 else 91 offset_limit = -1; 92 offset_limit /= 2; 93 } 94 95 96 /* Compute offset of EXPR within VAR. Return error_mark_node 97 if unknown. */ 98 99 static tree 100 compute_object_offset (const_tree expr, const_tree var) 101 { 102 enum tree_code code = PLUS_EXPR; 103 tree base, off, t; 104 105 if (expr == var) 106 return size_zero_node; 107 108 switch (TREE_CODE (expr)) 109 { 110 case COMPONENT_REF: 111 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 112 if (base == error_mark_node) 113 return base; 114 115 t = TREE_OPERAND (expr, 1); 116 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t), 117 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t)) 118 / BITS_PER_UNIT)); 119 break; 120 121 case REALPART_EXPR: 122 CASE_CONVERT: 123 case VIEW_CONVERT_EXPR: 124 case NON_LVALUE_EXPR: 125 return compute_object_offset (TREE_OPERAND (expr, 0), var); 126 127 case IMAGPART_EXPR: 128 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 129 if (base == error_mark_node) 130 return base; 131 132 off = TYPE_SIZE_UNIT (TREE_TYPE (expr)); 133 break; 134 135 case ARRAY_REF: 136 base = compute_object_offset (TREE_OPERAND (expr, 0), var); 137 if (base == error_mark_node) 138 return base; 139 140 t = TREE_OPERAND (expr, 1); 141 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0) 142 { 143 code = MINUS_EXPR; 144 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t); 145 } 146 t = fold_convert (sizetype, t); 147 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t); 148 break; 149 150 case MEM_REF: 151 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR); 152 return wide_int_to_tree (sizetype, mem_ref_offset (expr)); 153 154 default: 155 return error_mark_node; 156 } 157 158 return size_binop (code, base, off); 159 } 160 161 162 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR. 163 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size. 164 If unknown, return unknown[object_size_type]. */ 165 166 static unsigned HOST_WIDE_INT 167 addr_object_size (struct object_size_info *osi, const_tree ptr, 168 int object_size_type) 169 { 170 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes; 171 172 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR); 173 174 pt_var = TREE_OPERAND (ptr, 0); 175 while (handled_component_p (pt_var)) 176 pt_var = TREE_OPERAND (pt_var, 0); 177 178 if (pt_var 179 && TREE_CODE (pt_var) == MEM_REF) 180 { 181 unsigned HOST_WIDE_INT sz; 182 183 if (!osi || (object_size_type & 1) != 0 184 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME) 185 { 186 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0), 187 object_size_type & ~1); 188 } 189 else 190 { 191 tree var = TREE_OPERAND (pt_var, 0); 192 if (osi->pass == 0) 193 collect_object_sizes_for (osi, var); 194 if (bitmap_bit_p (computed[object_size_type], 195 SSA_NAME_VERSION (var))) 196 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)]; 197 else 198 sz = unknown[object_size_type]; 199 } 200 if (sz != unknown[object_size_type]) 201 { 202 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var)); 203 if (wi::neg_p (dsz)) 204 sz = 0; 205 else if (wi::fits_uhwi_p (dsz)) 206 sz = dsz.to_uhwi (); 207 else 208 sz = unknown[object_size_type]; 209 } 210 211 if (sz != unknown[object_size_type] && sz < offset_limit) 212 pt_var_size = size_int (sz); 213 } 214 else if (pt_var 215 && DECL_P (pt_var) 216 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var)) 217 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit) 218 pt_var_size = DECL_SIZE_UNIT (pt_var); 219 else if (pt_var 220 && TREE_CODE (pt_var) == STRING_CST 221 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var)) 222 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var))) 223 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var))) 224 < offset_limit) 225 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var)); 226 else 227 return unknown[object_size_type]; 228 229 if (pt_var != TREE_OPERAND (ptr, 0)) 230 { 231 tree var; 232 233 if (object_size_type & 1) 234 { 235 var = TREE_OPERAND (ptr, 0); 236 237 while (var != pt_var 238 && TREE_CODE (var) != BIT_FIELD_REF 239 && TREE_CODE (var) != COMPONENT_REF 240 && TREE_CODE (var) != ARRAY_REF 241 && TREE_CODE (var) != ARRAY_RANGE_REF 242 && TREE_CODE (var) != REALPART_EXPR 243 && TREE_CODE (var) != IMAGPART_EXPR) 244 var = TREE_OPERAND (var, 0); 245 if (var != pt_var && TREE_CODE (var) == ARRAY_REF) 246 var = TREE_OPERAND (var, 0); 247 if (! TYPE_SIZE_UNIT (TREE_TYPE (var)) 248 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var))) 249 || (pt_var_size 250 && tree_int_cst_lt (pt_var_size, 251 TYPE_SIZE_UNIT (TREE_TYPE (var))))) 252 var = pt_var; 253 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF) 254 { 255 tree v = var; 256 /* For &X->fld, compute object size only if fld isn't the last 257 field, as struct { int i; char c[1]; } is often used instead 258 of flexible array member. */ 259 while (v && v != pt_var) 260 switch (TREE_CODE (v)) 261 { 262 case ARRAY_REF: 263 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))) 264 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST) 265 { 266 tree domain 267 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0))); 268 if (domain 269 && TYPE_MAX_VALUE (domain) 270 && TREE_CODE (TYPE_MAX_VALUE (domain)) 271 == INTEGER_CST 272 && tree_int_cst_lt (TREE_OPERAND (v, 1), 273 TYPE_MAX_VALUE (domain))) 274 { 275 v = NULL_TREE; 276 break; 277 } 278 } 279 v = TREE_OPERAND (v, 0); 280 break; 281 case REALPART_EXPR: 282 case IMAGPART_EXPR: 283 v = NULL_TREE; 284 break; 285 case COMPONENT_REF: 286 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE) 287 { 288 v = NULL_TREE; 289 break; 290 } 291 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF) 292 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 293 != UNION_TYPE 294 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 295 != QUAL_UNION_TYPE) 296 break; 297 else 298 v = TREE_OPERAND (v, 0); 299 if (TREE_CODE (v) == COMPONENT_REF 300 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 301 == RECORD_TYPE) 302 { 303 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1)); 304 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain)) 305 if (TREE_CODE (fld_chain) == FIELD_DECL) 306 break; 307 308 if (fld_chain) 309 { 310 v = NULL_TREE; 311 break; 312 } 313 v = TREE_OPERAND (v, 0); 314 } 315 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF) 316 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 317 != UNION_TYPE 318 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) 319 != QUAL_UNION_TYPE) 320 break; 321 else 322 v = TREE_OPERAND (v, 0); 323 if (v != pt_var) 324 v = NULL_TREE; 325 else 326 v = pt_var; 327 break; 328 default: 329 v = pt_var; 330 break; 331 } 332 if (v == pt_var) 333 var = pt_var; 334 } 335 } 336 else 337 var = pt_var; 338 339 if (var != pt_var) 340 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var)); 341 else if (!pt_var_size) 342 return unknown[object_size_type]; 343 else 344 var_size = pt_var_size; 345 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var); 346 if (bytes != error_mark_node) 347 { 348 if (TREE_CODE (bytes) == INTEGER_CST 349 && tree_int_cst_lt (var_size, bytes)) 350 bytes = size_zero_node; 351 else 352 bytes = size_binop (MINUS_EXPR, var_size, bytes); 353 } 354 if (var != pt_var 355 && pt_var_size 356 && TREE_CODE (pt_var) == MEM_REF 357 && bytes != error_mark_node) 358 { 359 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var); 360 if (bytes2 != error_mark_node) 361 { 362 if (TREE_CODE (bytes2) == INTEGER_CST 363 && tree_int_cst_lt (pt_var_size, bytes2)) 364 bytes2 = size_zero_node; 365 else 366 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2); 367 bytes = size_binop (MIN_EXPR, bytes, bytes2); 368 } 369 } 370 } 371 else if (!pt_var_size) 372 return unknown[object_size_type]; 373 else 374 bytes = pt_var_size; 375 376 if (tree_fits_uhwi_p (bytes)) 377 return tree_to_uhwi (bytes); 378 379 return unknown[object_size_type]; 380 } 381 382 383 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL. 384 Handles various allocation calls. OBJECT_SIZE_TYPE is the second 385 argument from __builtin_object_size. If unknown, return 386 unknown[object_size_type]. */ 387 388 static unsigned HOST_WIDE_INT 389 alloc_object_size (const gcall *call, int object_size_type) 390 { 391 tree callee, bytes = NULL_TREE; 392 tree alloc_size; 393 int arg1 = -1, arg2 = -1; 394 395 gcc_assert (is_gimple_call (call)); 396 397 callee = gimple_call_fndecl (call); 398 if (!callee) 399 return unknown[object_size_type]; 400 401 alloc_size = lookup_attribute ("alloc_size", 402 TYPE_ATTRIBUTES (TREE_TYPE (callee))); 403 if (alloc_size && TREE_VALUE (alloc_size)) 404 { 405 tree p = TREE_VALUE (alloc_size); 406 407 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1; 408 if (TREE_CHAIN (p)) 409 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1; 410 } 411 412 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) 413 switch (DECL_FUNCTION_CODE (callee)) 414 { 415 case BUILT_IN_CALLOC: 416 arg2 = 1; 417 /* fall through */ 418 case BUILT_IN_MALLOC: 419 case BUILT_IN_ALLOCA: 420 case BUILT_IN_ALLOCA_WITH_ALIGN: 421 arg1 = 0; 422 default: 423 break; 424 } 425 426 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call) 427 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST 428 || (arg2 >= 0 429 && (arg2 >= (int)gimple_call_num_args (call) 430 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST))) 431 return unknown[object_size_type]; 432 433 if (arg2 >= 0) 434 bytes = size_binop (MULT_EXPR, 435 fold_convert (sizetype, gimple_call_arg (call, arg1)), 436 fold_convert (sizetype, gimple_call_arg (call, arg2))); 437 else if (arg1 >= 0) 438 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1)); 439 440 if (bytes && tree_fits_uhwi_p (bytes)) 441 return tree_to_uhwi (bytes); 442 443 return unknown[object_size_type]; 444 } 445 446 447 /* If object size is propagated from one of function's arguments directly 448 to its return value, return that argument for GIMPLE_CALL statement CALL. 449 Otherwise return NULL. */ 450 451 static tree 452 pass_through_call (const gcall *call) 453 { 454 tree callee = gimple_call_fndecl (call); 455 456 if (callee 457 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) 458 switch (DECL_FUNCTION_CODE (callee)) 459 { 460 case BUILT_IN_MEMCPY: 461 case BUILT_IN_MEMMOVE: 462 case BUILT_IN_MEMSET: 463 case BUILT_IN_STRCPY: 464 case BUILT_IN_STRNCPY: 465 case BUILT_IN_STRCAT: 466 case BUILT_IN_STRNCAT: 467 case BUILT_IN_MEMCPY_CHK: 468 case BUILT_IN_MEMMOVE_CHK: 469 case BUILT_IN_MEMSET_CHK: 470 case BUILT_IN_STRCPY_CHK: 471 case BUILT_IN_STRNCPY_CHK: 472 case BUILT_IN_STRCAT_CHK: 473 case BUILT_IN_STRNCAT_CHK: 474 case BUILT_IN_ASSUME_ALIGNED: 475 if (gimple_call_num_args (call) >= 1) 476 return gimple_call_arg (call, 0); 477 break; 478 default: 479 break; 480 } 481 482 return NULL_TREE; 483 } 484 485 486 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the 487 second argument from __builtin_object_size. */ 488 489 unsigned HOST_WIDE_INT 490 compute_builtin_object_size (tree ptr, int object_size_type) 491 { 492 gcc_assert (object_size_type >= 0 && object_size_type <= 3); 493 494 if (! offset_limit) 495 init_offset_limit (); 496 497 if (TREE_CODE (ptr) == ADDR_EXPR) 498 return addr_object_size (NULL, ptr, object_size_type); 499 500 if (TREE_CODE (ptr) == SSA_NAME 501 && POINTER_TYPE_P (TREE_TYPE (ptr)) 502 && computed[object_size_type] != NULL) 503 { 504 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr))) 505 { 506 struct object_size_info osi; 507 bitmap_iterator bi; 508 unsigned int i; 509 510 if (num_ssa_names > object_sizes[object_size_type].length ()) 511 object_sizes[object_size_type].safe_grow (num_ssa_names); 512 if (dump_file) 513 { 514 fprintf (dump_file, "Computing %s %sobject size for ", 515 (object_size_type & 2) ? "minimum" : "maximum", 516 (object_size_type & 1) ? "sub" : ""); 517 print_generic_expr (dump_file, ptr, dump_flags); 518 fprintf (dump_file, ":\n"); 519 } 520 521 osi.visited = BITMAP_ALLOC (NULL); 522 osi.reexamine = BITMAP_ALLOC (NULL); 523 osi.object_size_type = object_size_type; 524 osi.depths = NULL; 525 osi.stack = NULL; 526 osi.tos = NULL; 527 528 /* First pass: walk UD chains, compute object sizes that 529 can be computed. osi.reexamine bitmap at the end will 530 contain what variables were found in dependency cycles 531 and therefore need to be reexamined. */ 532 osi.pass = 0; 533 osi.changed = false; 534 collect_object_sizes_for (&osi, ptr); 535 536 /* Second pass: keep recomputing object sizes of variables 537 that need reexamination, until no object sizes are 538 increased or all object sizes are computed. */ 539 if (! bitmap_empty_p (osi.reexamine)) 540 { 541 bitmap reexamine = BITMAP_ALLOC (NULL); 542 543 /* If looking for minimum instead of maximum object size, 544 detect cases where a pointer is increased in a loop. 545 Although even without this detection pass 2 would eventually 546 terminate, it could take a long time. If a pointer is 547 increasing this way, we need to assume 0 object size. 548 E.g. p = &buf[0]; while (cond) p = p + 4; */ 549 if (object_size_type & 2) 550 { 551 osi.depths = XCNEWVEC (unsigned int, num_ssa_names); 552 osi.stack = XNEWVEC (unsigned int, num_ssa_names); 553 osi.tos = osi.stack; 554 osi.pass = 1; 555 /* collect_object_sizes_for is changing 556 osi.reexamine bitmap, so iterate over a copy. */ 557 bitmap_copy (reexamine, osi.reexamine); 558 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi) 559 if (bitmap_bit_p (osi.reexamine, i)) 560 check_for_plus_in_loops (&osi, ssa_name (i)); 561 562 free (osi.depths); 563 osi.depths = NULL; 564 free (osi.stack); 565 osi.stack = NULL; 566 osi.tos = NULL; 567 } 568 569 do 570 { 571 osi.pass = 2; 572 osi.changed = false; 573 /* collect_object_sizes_for is changing 574 osi.reexamine bitmap, so iterate over a copy. */ 575 bitmap_copy (reexamine, osi.reexamine); 576 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi) 577 if (bitmap_bit_p (osi.reexamine, i)) 578 { 579 collect_object_sizes_for (&osi, ssa_name (i)); 580 if (dump_file && (dump_flags & TDF_DETAILS)) 581 { 582 fprintf (dump_file, "Reexamining "); 583 print_generic_expr (dump_file, ssa_name (i), 584 dump_flags); 585 fprintf (dump_file, "\n"); 586 } 587 } 588 } 589 while (osi.changed); 590 591 BITMAP_FREE (reexamine); 592 } 593 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi) 594 bitmap_set_bit (computed[object_size_type], i); 595 596 /* Debugging dumps. */ 597 if (dump_file) 598 { 599 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi) 600 if (object_sizes[object_size_type][i] 601 != unknown[object_size_type]) 602 { 603 print_generic_expr (dump_file, ssa_name (i), 604 dump_flags); 605 fprintf (dump_file, 606 ": %s %sobject size " 607 HOST_WIDE_INT_PRINT_UNSIGNED "\n", 608 (object_size_type & 2) ? "minimum" : "maximum", 609 (object_size_type & 1) ? "sub" : "", 610 object_sizes[object_size_type][i]); 611 } 612 } 613 614 BITMAP_FREE (osi.reexamine); 615 BITMAP_FREE (osi.visited); 616 } 617 618 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)]; 619 } 620 621 return unknown[object_size_type]; 622 } 623 624 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */ 625 626 static void 627 expr_object_size (struct object_size_info *osi, tree ptr, tree value) 628 { 629 int object_size_type = osi->object_size_type; 630 unsigned int varno = SSA_NAME_VERSION (ptr); 631 unsigned HOST_WIDE_INT bytes; 632 633 gcc_assert (object_sizes[object_size_type][varno] 634 != unknown[object_size_type]); 635 gcc_assert (osi->pass == 0); 636 637 if (TREE_CODE (value) == WITH_SIZE_EXPR) 638 value = TREE_OPERAND (value, 0); 639 640 /* Pointer variables should have been handled by merge_object_sizes. */ 641 gcc_assert (TREE_CODE (value) != SSA_NAME 642 || !POINTER_TYPE_P (TREE_TYPE (value))); 643 644 if (TREE_CODE (value) == ADDR_EXPR) 645 bytes = addr_object_size (osi, value, object_size_type); 646 else 647 bytes = unknown[object_size_type]; 648 649 if ((object_size_type & 2) == 0) 650 { 651 if (object_sizes[object_size_type][varno] < bytes) 652 object_sizes[object_size_type][varno] = bytes; 653 } 654 else 655 { 656 if (object_sizes[object_size_type][varno] > bytes) 657 object_sizes[object_size_type][varno] = bytes; 658 } 659 } 660 661 662 /* Compute object_sizes for PTR, defined to the result of a call. */ 663 664 static void 665 call_object_size (struct object_size_info *osi, tree ptr, gcall *call) 666 { 667 int object_size_type = osi->object_size_type; 668 unsigned int varno = SSA_NAME_VERSION (ptr); 669 unsigned HOST_WIDE_INT bytes; 670 671 gcc_assert (is_gimple_call (call)); 672 673 gcc_assert (object_sizes[object_size_type][varno] 674 != unknown[object_size_type]); 675 gcc_assert (osi->pass == 0); 676 677 bytes = alloc_object_size (call, object_size_type); 678 679 if ((object_size_type & 2) == 0) 680 { 681 if (object_sizes[object_size_type][varno] < bytes) 682 object_sizes[object_size_type][varno] = bytes; 683 } 684 else 685 { 686 if (object_sizes[object_size_type][varno] > bytes) 687 object_sizes[object_size_type][varno] = bytes; 688 } 689 } 690 691 692 /* Compute object_sizes for PTR, defined to an unknown value. */ 693 694 static void 695 unknown_object_size (struct object_size_info *osi, tree ptr) 696 { 697 int object_size_type = osi->object_size_type; 698 unsigned int varno = SSA_NAME_VERSION (ptr); 699 unsigned HOST_WIDE_INT bytes; 700 701 gcc_assert (object_sizes[object_size_type][varno] 702 != unknown[object_size_type]); 703 gcc_assert (osi->pass == 0); 704 705 bytes = unknown[object_size_type]; 706 707 if ((object_size_type & 2) == 0) 708 { 709 if (object_sizes[object_size_type][varno] < bytes) 710 object_sizes[object_size_type][varno] = bytes; 711 } 712 else 713 { 714 if (object_sizes[object_size_type][varno] > bytes) 715 object_sizes[object_size_type][varno] = bytes; 716 } 717 } 718 719 720 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if 721 the object size might need reexamination later. */ 722 723 static bool 724 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig, 725 unsigned HOST_WIDE_INT offset) 726 { 727 int object_size_type = osi->object_size_type; 728 unsigned int varno = SSA_NAME_VERSION (dest); 729 unsigned HOST_WIDE_INT orig_bytes; 730 731 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 732 return false; 733 if (offset >= offset_limit) 734 { 735 object_sizes[object_size_type][varno] = unknown[object_size_type]; 736 return false; 737 } 738 739 if (osi->pass == 0) 740 collect_object_sizes_for (osi, orig); 741 742 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)]; 743 if (orig_bytes != unknown[object_size_type]) 744 orig_bytes = (offset > orig_bytes) 745 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset; 746 747 if ((object_size_type & 2) == 0) 748 { 749 if (object_sizes[object_size_type][varno] < orig_bytes) 750 { 751 object_sizes[object_size_type][varno] = orig_bytes; 752 osi->changed = true; 753 } 754 } 755 else 756 { 757 if (object_sizes[object_size_type][varno] > orig_bytes) 758 { 759 object_sizes[object_size_type][varno] = orig_bytes; 760 osi->changed = true; 761 } 762 } 763 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig)); 764 } 765 766 767 /* Compute object_sizes for VAR, defined to the result of an assignment 768 with operator POINTER_PLUS_EXPR. Return true if the object size might 769 need reexamination later. */ 770 771 static bool 772 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt) 773 { 774 int object_size_type = osi->object_size_type; 775 unsigned int varno = SSA_NAME_VERSION (var); 776 unsigned HOST_WIDE_INT bytes; 777 tree op0, op1; 778 779 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 780 { 781 op0 = gimple_assign_rhs1 (stmt); 782 op1 = gimple_assign_rhs2 (stmt); 783 } 784 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR) 785 { 786 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); 787 gcc_assert (TREE_CODE (rhs) == MEM_REF); 788 op0 = TREE_OPERAND (rhs, 0); 789 op1 = TREE_OPERAND (rhs, 1); 790 } 791 else 792 gcc_unreachable (); 793 794 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 795 return false; 796 797 /* Handle PTR + OFFSET here. */ 798 if (TREE_CODE (op1) == INTEGER_CST 799 && (TREE_CODE (op0) == SSA_NAME 800 || TREE_CODE (op0) == ADDR_EXPR)) 801 { 802 if (! tree_fits_uhwi_p (op1)) 803 bytes = unknown[object_size_type]; 804 else if (TREE_CODE (op0) == SSA_NAME) 805 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1)); 806 else 807 { 808 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1); 809 810 /* op0 will be ADDR_EXPR here. */ 811 bytes = addr_object_size (osi, op0, object_size_type); 812 if (bytes == unknown[object_size_type]) 813 ; 814 else if (off > offset_limit) 815 bytes = unknown[object_size_type]; 816 else if (off > bytes) 817 bytes = 0; 818 else 819 bytes -= off; 820 } 821 } 822 else 823 bytes = unknown[object_size_type]; 824 825 if ((object_size_type & 2) == 0) 826 { 827 if (object_sizes[object_size_type][varno] < bytes) 828 object_sizes[object_size_type][varno] = bytes; 829 } 830 else 831 { 832 if (object_sizes[object_size_type][varno] > bytes) 833 object_sizes[object_size_type][varno] = bytes; 834 } 835 return false; 836 } 837 838 839 /* Compute object_sizes for VAR, defined at STMT, which is 840 a COND_EXPR. Return true if the object size might need reexamination 841 later. */ 842 843 static bool 844 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt) 845 { 846 tree then_, else_; 847 int object_size_type = osi->object_size_type; 848 unsigned int varno = SSA_NAME_VERSION (var); 849 bool reexamine = false; 850 851 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR); 852 853 if (object_sizes[object_size_type][varno] == unknown[object_size_type]) 854 return false; 855 856 then_ = gimple_assign_rhs2 (stmt); 857 else_ = gimple_assign_rhs3 (stmt); 858 859 if (TREE_CODE (then_) == SSA_NAME) 860 reexamine |= merge_object_sizes (osi, var, then_, 0); 861 else 862 expr_object_size (osi, var, then_); 863 864 if (TREE_CODE (else_) == SSA_NAME) 865 reexamine |= merge_object_sizes (osi, var, else_, 0); 866 else 867 expr_object_size (osi, var, else_); 868 869 return reexamine; 870 } 871 872 /* Compute object sizes for VAR. 873 For ADDR_EXPR an object size is the number of remaining bytes 874 to the end of the object (where what is considered an object depends on 875 OSI->object_size_type). 876 For allocation GIMPLE_CALL like malloc or calloc object size is the size 877 of the allocation. 878 For POINTER_PLUS_EXPR where second operand is a constant integer, 879 object size is object size of the first operand minus the constant. 880 If the constant is bigger than the number of remaining bytes until the 881 end of the object, object size is 0, but if it is instead a pointer 882 subtraction, object size is unknown[object_size_type]. 883 To differentiate addition from subtraction, ADDR_EXPR returns 884 unknown[object_size_type] for all objects bigger than half of the address 885 space, and constants less than half of the address space are considered 886 addition, while bigger constants subtraction. 887 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the 888 object size is object size of that argument. 889 Otherwise, object size is the maximum of object sizes of variables 890 that it might be set to. */ 891 892 static void 893 collect_object_sizes_for (struct object_size_info *osi, tree var) 894 { 895 int object_size_type = osi->object_size_type; 896 unsigned int varno = SSA_NAME_VERSION (var); 897 gimple *stmt; 898 bool reexamine; 899 900 if (bitmap_bit_p (computed[object_size_type], varno)) 901 return; 902 903 if (osi->pass == 0) 904 { 905 if (bitmap_set_bit (osi->visited, varno)) 906 { 907 object_sizes[object_size_type][varno] 908 = (object_size_type & 2) ? -1 : 0; 909 } 910 else 911 { 912 /* Found a dependency loop. Mark the variable for later 913 re-examination. */ 914 bitmap_set_bit (osi->reexamine, varno); 915 if (dump_file && (dump_flags & TDF_DETAILS)) 916 { 917 fprintf (dump_file, "Found a dependency loop at "); 918 print_generic_expr (dump_file, var, dump_flags); 919 fprintf (dump_file, "\n"); 920 } 921 return; 922 } 923 } 924 925 if (dump_file && (dump_flags & TDF_DETAILS)) 926 { 927 fprintf (dump_file, "Visiting use-def links for "); 928 print_generic_expr (dump_file, var, dump_flags); 929 fprintf (dump_file, "\n"); 930 } 931 932 stmt = SSA_NAME_DEF_STMT (var); 933 reexamine = false; 934 935 switch (gimple_code (stmt)) 936 { 937 case GIMPLE_ASSIGN: 938 { 939 tree rhs = gimple_assign_rhs1 (stmt); 940 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR 941 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR 942 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF)) 943 reexamine = plus_stmt_object_size (osi, var, stmt); 944 else if (gimple_assign_rhs_code (stmt) == COND_EXPR) 945 reexamine = cond_expr_object_size (osi, var, stmt); 946 else if (gimple_assign_single_p (stmt) 947 || gimple_assign_unary_nop_p (stmt)) 948 { 949 if (TREE_CODE (rhs) == SSA_NAME 950 && POINTER_TYPE_P (TREE_TYPE (rhs))) 951 reexamine = merge_object_sizes (osi, var, rhs, 0); 952 else 953 expr_object_size (osi, var, rhs); 954 } 955 else 956 unknown_object_size (osi, var); 957 break; 958 } 959 960 case GIMPLE_CALL: 961 { 962 gcall *call_stmt = as_a <gcall *> (stmt); 963 tree arg = pass_through_call (call_stmt); 964 if (arg) 965 { 966 if (TREE_CODE (arg) == SSA_NAME 967 && POINTER_TYPE_P (TREE_TYPE (arg))) 968 reexamine = merge_object_sizes (osi, var, arg, 0); 969 else 970 expr_object_size (osi, var, arg); 971 } 972 else 973 call_object_size (osi, var, call_stmt); 974 break; 975 } 976 977 case GIMPLE_ASM: 978 /* Pointers defined by __asm__ statements can point anywhere. */ 979 object_sizes[object_size_type][varno] = unknown[object_size_type]; 980 break; 981 982 case GIMPLE_NOP: 983 if (SSA_NAME_VAR (var) 984 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL) 985 expr_object_size (osi, var, SSA_NAME_VAR (var)); 986 else 987 /* Uninitialized SSA names point nowhere. */ 988 object_sizes[object_size_type][varno] = unknown[object_size_type]; 989 break; 990 991 case GIMPLE_PHI: 992 { 993 unsigned i; 994 995 for (i = 0; i < gimple_phi_num_args (stmt); i++) 996 { 997 tree rhs = gimple_phi_arg (stmt, i)->def; 998 999 if (object_sizes[object_size_type][varno] 1000 == unknown[object_size_type]) 1001 break; 1002 1003 if (TREE_CODE (rhs) == SSA_NAME) 1004 reexamine |= merge_object_sizes (osi, var, rhs, 0); 1005 else if (osi->pass == 0) 1006 expr_object_size (osi, var, rhs); 1007 } 1008 break; 1009 } 1010 1011 default: 1012 gcc_unreachable (); 1013 } 1014 1015 if (! reexamine 1016 || object_sizes[object_size_type][varno] == unknown[object_size_type]) 1017 { 1018 bitmap_set_bit (computed[object_size_type], varno); 1019 bitmap_clear_bit (osi->reexamine, varno); 1020 } 1021 else 1022 { 1023 bitmap_set_bit (osi->reexamine, varno); 1024 if (dump_file && (dump_flags & TDF_DETAILS)) 1025 { 1026 fprintf (dump_file, "Need to reexamine "); 1027 print_generic_expr (dump_file, var, dump_flags); 1028 fprintf (dump_file, "\n"); 1029 } 1030 } 1031 } 1032 1033 1034 /* Helper function for check_for_plus_in_loops. Called recursively 1035 to detect loops. */ 1036 1037 static void 1038 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var, 1039 unsigned int depth) 1040 { 1041 gimple *stmt = SSA_NAME_DEF_STMT (var); 1042 unsigned int varno = SSA_NAME_VERSION (var); 1043 1044 if (osi->depths[varno]) 1045 { 1046 if (osi->depths[varno] != depth) 1047 { 1048 unsigned int *sp; 1049 1050 /* Found a loop involving pointer addition. */ 1051 for (sp = osi->tos; sp > osi->stack; ) 1052 { 1053 --sp; 1054 bitmap_clear_bit (osi->reexamine, *sp); 1055 bitmap_set_bit (computed[osi->object_size_type], *sp); 1056 object_sizes[osi->object_size_type][*sp] = 0; 1057 if (*sp == varno) 1058 break; 1059 } 1060 } 1061 return; 1062 } 1063 else if (! bitmap_bit_p (osi->reexamine, varno)) 1064 return; 1065 1066 osi->depths[varno] = depth; 1067 *osi->tos++ = varno; 1068 1069 switch (gimple_code (stmt)) 1070 { 1071 1072 case GIMPLE_ASSIGN: 1073 { 1074 if ((gimple_assign_single_p (stmt) 1075 || gimple_assign_unary_nop_p (stmt)) 1076 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) 1077 { 1078 tree rhs = gimple_assign_rhs1 (stmt); 1079 1080 check_for_plus_in_loops_1 (osi, rhs, depth); 1081 } 1082 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 1083 { 1084 tree basevar = gimple_assign_rhs1 (stmt); 1085 tree cst = gimple_assign_rhs2 (stmt); 1086 1087 gcc_assert (TREE_CODE (cst) == INTEGER_CST); 1088 1089 check_for_plus_in_loops_1 (osi, basevar, 1090 depth + !integer_zerop (cst)); 1091 } 1092 else 1093 gcc_unreachable (); 1094 break; 1095 } 1096 1097 case GIMPLE_CALL: 1098 { 1099 gcall *call_stmt = as_a <gcall *> (stmt); 1100 tree arg = pass_through_call (call_stmt); 1101 if (arg) 1102 { 1103 if (TREE_CODE (arg) == SSA_NAME) 1104 check_for_plus_in_loops_1 (osi, arg, depth); 1105 else 1106 gcc_unreachable (); 1107 } 1108 break; 1109 } 1110 1111 case GIMPLE_PHI: 1112 { 1113 unsigned i; 1114 1115 for (i = 0; i < gimple_phi_num_args (stmt); i++) 1116 { 1117 tree rhs = gimple_phi_arg (stmt, i)->def; 1118 1119 if (TREE_CODE (rhs) == SSA_NAME) 1120 check_for_plus_in_loops_1 (osi, rhs, depth); 1121 } 1122 break; 1123 } 1124 1125 default: 1126 gcc_unreachable (); 1127 } 1128 1129 osi->depths[varno] = 0; 1130 osi->tos--; 1131 } 1132 1133 1134 /* Check if some pointer we are computing object size of is being increased 1135 within a loop. If yes, assume all the SSA variables participating in 1136 that loop have minimum object sizes 0. */ 1137 1138 static void 1139 check_for_plus_in_loops (struct object_size_info *osi, tree var) 1140 { 1141 gimple *stmt = SSA_NAME_DEF_STMT (var); 1142 1143 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here, 1144 and looked for a POINTER_PLUS_EXPR in the pass-through 1145 argument, if any. In GIMPLE, however, such an expression 1146 is not a valid call operand. */ 1147 1148 if (is_gimple_assign (stmt) 1149 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 1150 { 1151 tree basevar = gimple_assign_rhs1 (stmt); 1152 tree cst = gimple_assign_rhs2 (stmt); 1153 1154 gcc_assert (TREE_CODE (cst) == INTEGER_CST); 1155 1156 if (integer_zerop (cst)) 1157 return; 1158 1159 osi->depths[SSA_NAME_VERSION (basevar)] = 1; 1160 *osi->tos++ = SSA_NAME_VERSION (basevar); 1161 check_for_plus_in_loops_1 (osi, var, 2); 1162 osi->depths[SSA_NAME_VERSION (basevar)] = 0; 1163 osi->tos--; 1164 } 1165 } 1166 1167 1168 /* Initialize data structures for the object size computation. */ 1169 1170 void 1171 init_object_sizes (void) 1172 { 1173 int object_size_type; 1174 1175 if (computed[0]) 1176 return; 1177 1178 for (object_size_type = 0; object_size_type <= 3; object_size_type++) 1179 { 1180 object_sizes[object_size_type].safe_grow (num_ssa_names); 1181 computed[object_size_type] = BITMAP_ALLOC (NULL); 1182 } 1183 1184 init_offset_limit (); 1185 } 1186 1187 1188 /* Destroy data structures after the object size computation. */ 1189 1190 static void 1191 fini_object_sizes (void) 1192 { 1193 int object_size_type; 1194 1195 for (object_size_type = 0; object_size_type <= 3; object_size_type++) 1196 { 1197 object_sizes[object_size_type].release (); 1198 BITMAP_FREE (computed[object_size_type]); 1199 } 1200 } 1201 1202 1203 /* Simple pass to optimize all __builtin_object_size () builtins. */ 1204 1205 namespace { 1206 1207 const pass_data pass_data_object_sizes = 1208 { 1209 GIMPLE_PASS, /* type */ 1210 "objsz", /* name */ 1211 OPTGROUP_NONE, /* optinfo_flags */ 1212 TV_NONE, /* tv_id */ 1213 ( PROP_cfg | PROP_ssa ), /* properties_required */ 1214 0, /* properties_provided */ 1215 0, /* properties_destroyed */ 1216 0, /* todo_flags_start */ 1217 0, /* todo_flags_finish */ 1218 }; 1219 1220 class pass_object_sizes : public gimple_opt_pass 1221 { 1222 public: 1223 pass_object_sizes (gcc::context *ctxt) 1224 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false) 1225 {} 1226 1227 /* opt_pass methods: */ 1228 opt_pass * clone () { return new pass_object_sizes (m_ctxt); } 1229 void set_pass_param (unsigned int n, bool param) 1230 { 1231 gcc_assert (n == 0); 1232 insert_min_max_p = param; 1233 } 1234 virtual unsigned int execute (function *); 1235 1236 private: 1237 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */ 1238 bool insert_min_max_p; 1239 }; // class pass_object_sizes 1240 1241 /* Dummy valueize function. */ 1242 1243 static tree 1244 do_valueize (tree t) 1245 { 1246 return t; 1247 } 1248 1249 unsigned int 1250 pass_object_sizes::execute (function *fun) 1251 { 1252 basic_block bb; 1253 FOR_EACH_BB_FN (bb, fun) 1254 { 1255 gimple_stmt_iterator i; 1256 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) 1257 { 1258 tree result; 1259 gimple *call = gsi_stmt (i); 1260 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE)) 1261 continue; 1262 1263 init_object_sizes (); 1264 1265 /* If insert_min_max_p, only attempt to fold 1266 __builtin_object_size (x, 1) and __builtin_object_size (x, 3), 1267 and rather than folding the builtin to the constant if any, 1268 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size 1269 call result and the computed constant. */ 1270 if (insert_min_max_p) 1271 { 1272 tree ost = gimple_call_arg (call, 1); 1273 if (tree_fits_uhwi_p (ost)) 1274 { 1275 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost); 1276 tree ptr = gimple_call_arg (call, 0); 1277 tree lhs = gimple_call_lhs (call); 1278 if ((object_size_type == 1 || object_size_type == 3) 1279 && (TREE_CODE (ptr) == ADDR_EXPR 1280 || TREE_CODE (ptr) == SSA_NAME) 1281 && lhs) 1282 { 1283 tree type = TREE_TYPE (lhs); 1284 unsigned HOST_WIDE_INT bytes 1285 = compute_builtin_object_size (ptr, object_size_type); 1286 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1 1287 ? -1 : 0) 1288 && wi::fits_to_tree_p (bytes, type)) 1289 { 1290 tree tem = make_ssa_name (type); 1291 gimple_call_set_lhs (call, tem); 1292 enum tree_code code 1293 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR; 1294 tree cst = build_int_cstu (type, bytes); 1295 gimple *g 1296 = gimple_build_assign (lhs, code, tem, cst); 1297 gsi_insert_after (&i, g, GSI_NEW_STMT); 1298 update_stmt (call); 1299 } 1300 } 1301 } 1302 continue; 1303 } 1304 1305 tree lhs = gimple_call_lhs (call); 1306 if (!lhs) 1307 continue; 1308 1309 result = gimple_fold_stmt_to_constant (call, do_valueize); 1310 if (!result) 1311 { 1312 tree ost = gimple_call_arg (call, 1); 1313 1314 if (tree_fits_uhwi_p (ost)) 1315 { 1316 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost); 1317 1318 if (object_size_type < 2) 1319 result = fold_convert (size_type_node, 1320 integer_minus_one_node); 1321 else if (object_size_type < 4) 1322 result = build_zero_cst (size_type_node); 1323 } 1324 1325 if (!result) 1326 continue; 1327 } 1328 1329 gcc_assert (TREE_CODE (result) == INTEGER_CST); 1330 1331 if (dump_file && (dump_flags & TDF_DETAILS)) 1332 { 1333 fprintf (dump_file, "Simplified\n "); 1334 print_gimple_stmt (dump_file, call, 0, dump_flags); 1335 fprintf (dump_file, " to "); 1336 print_generic_expr (dump_file, result, 0); 1337 fprintf (dump_file, "\n"); 1338 } 1339 1340 /* Propagate into all uses and fold those stmts. */ 1341 replace_uses_by (lhs, result); 1342 } 1343 } 1344 1345 fini_object_sizes (); 1346 return 0; 1347 } 1348 1349 } // anon namespace 1350 1351 gimple_opt_pass * 1352 make_pass_object_sizes (gcc::context *ctxt) 1353 { 1354 return new pass_object_sizes (ctxt); 1355 } 1356