1 /* Pass computing data for optimizing stdarg functions. 2 Copyright (C) 2004-2016 Free Software Foundation, Inc. 3 Contributed by Jakub Jelinek <jakub@redhat.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "target.h" 26 #include "tree.h" 27 #include "gimple.h" 28 #include "tree-pass.h" 29 #include "ssa.h" 30 #include "gimple-pretty-print.h" 31 #include "fold-const.h" 32 #include "langhooks.h" 33 #include "gimple-iterator.h" 34 #include "gimple-walk.h" 35 #include "gimplify.h" 36 #include "tree-into-ssa.h" 37 #include "tree-cfg.h" 38 #include "tree-stdarg.h" 39 #include "tree-chkp.h" 40 41 /* A simple pass that attempts to optimize stdarg functions on architectures 42 that need to save register arguments to stack on entry to stdarg functions. 43 If the function doesn't use any va_start macros, no registers need to 44 be saved. If va_start macros are used, the va_list variables don't escape 45 the function, it is only necessary to save registers that will be used 46 in va_arg macros. E.g. if va_arg is only used with integral types 47 in the function, floating point registers don't need to be saved, etc. */ 48 49 50 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and 51 is executed at most as many times as VA_START_BB. */ 52 53 static bool 54 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb) 55 { 56 vec<edge> stack = vNULL; 57 edge e; 58 edge_iterator ei; 59 sbitmap visited; 60 bool ret; 61 62 if (va_arg_bb == va_start_bb) 63 return true; 64 65 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb)) 66 return false; 67 68 visited = sbitmap_alloc (last_basic_block_for_fn (cfun)); 69 bitmap_clear (visited); 70 ret = true; 71 72 FOR_EACH_EDGE (e, ei, va_arg_bb->preds) 73 stack.safe_push (e); 74 75 while (! stack.is_empty ()) 76 { 77 basic_block src; 78 79 e = stack.pop (); 80 src = e->src; 81 82 if (e->flags & EDGE_COMPLEX) 83 { 84 ret = false; 85 break; 86 } 87 88 if (src == va_start_bb) 89 continue; 90 91 /* va_arg_bb can be executed more times than va_start_bb. */ 92 if (src == va_arg_bb) 93 { 94 ret = false; 95 break; 96 } 97 98 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun)); 99 100 if (! bitmap_bit_p (visited, src->index)) 101 { 102 bitmap_set_bit (visited, src->index); 103 FOR_EACH_EDGE (e, ei, src->preds) 104 stack.safe_push (e); 105 } 106 } 107 108 stack.release (); 109 sbitmap_free (visited); 110 return ret; 111 } 112 113 114 /* For statement COUNTER = RHS, if RHS is COUNTER + constant, 115 return constant, otherwise return HOST_WIDE_INT_M1U. 116 GPR_P is true if this is GPR counter. */ 117 118 static unsigned HOST_WIDE_INT 119 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs, 120 bool gpr_p) 121 { 122 tree lhs, orig_lhs; 123 gimple *stmt; 124 unsigned HOST_WIDE_INT ret = 0, val, counter_val; 125 unsigned int max_size; 126 127 if (si->offsets == NULL) 128 { 129 unsigned int i; 130 131 si->offsets = XNEWVEC (int, num_ssa_names); 132 for (i = 0; i < num_ssa_names; ++i) 133 si->offsets[i] = -1; 134 } 135 136 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size; 137 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE; 138 orig_lhs = lhs = rhs; 139 while (lhs) 140 { 141 enum tree_code rhs_code; 142 tree rhs1; 143 144 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) 145 { 146 if (counter_val >= max_size) 147 { 148 ret = max_size; 149 break; 150 } 151 152 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)]; 153 break; 154 } 155 156 stmt = SSA_NAME_DEF_STMT (lhs); 157 158 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs) 159 return HOST_WIDE_INT_M1U; 160 161 rhs_code = gimple_assign_rhs_code (stmt); 162 rhs1 = gimple_assign_rhs1 (stmt); 163 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS 164 || gimple_assign_cast_p (stmt)) 165 && TREE_CODE (rhs1) == SSA_NAME) 166 { 167 lhs = rhs1; 168 continue; 169 } 170 171 if ((rhs_code == POINTER_PLUS_EXPR 172 || rhs_code == PLUS_EXPR) 173 && TREE_CODE (rhs1) == SSA_NAME 174 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt))) 175 { 176 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt)); 177 lhs = rhs1; 178 continue; 179 } 180 181 if (rhs_code == ADDR_EXPR 182 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF 183 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME 184 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1))) 185 { 186 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)); 187 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0); 188 continue; 189 } 190 191 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS) 192 return HOST_WIDE_INT_M1U; 193 194 rhs = gimple_assign_rhs1 (stmt); 195 if (TREE_CODE (counter) != TREE_CODE (rhs)) 196 return HOST_WIDE_INT_M1U; 197 198 if (TREE_CODE (counter) == COMPONENT_REF) 199 { 200 if (get_base_address (counter) != get_base_address (rhs) 201 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL 202 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1)) 203 return HOST_WIDE_INT_M1U; 204 } 205 else if (counter != rhs) 206 return HOST_WIDE_INT_M1U; 207 208 lhs = NULL; 209 } 210 211 lhs = orig_lhs; 212 val = ret + counter_val; 213 while (lhs) 214 { 215 enum tree_code rhs_code; 216 tree rhs1; 217 218 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) 219 break; 220 221 if (val >= max_size) 222 si->offsets[SSA_NAME_VERSION (lhs)] = max_size; 223 else 224 si->offsets[SSA_NAME_VERSION (lhs)] = val; 225 226 stmt = SSA_NAME_DEF_STMT (lhs); 227 228 rhs_code = gimple_assign_rhs_code (stmt); 229 rhs1 = gimple_assign_rhs1 (stmt); 230 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS 231 || gimple_assign_cast_p (stmt)) 232 && TREE_CODE (rhs1) == SSA_NAME) 233 { 234 lhs = rhs1; 235 continue; 236 } 237 238 if ((rhs_code == POINTER_PLUS_EXPR 239 || rhs_code == PLUS_EXPR) 240 && TREE_CODE (rhs1) == SSA_NAME 241 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt))) 242 { 243 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt)); 244 lhs = rhs1; 245 continue; 246 } 247 248 if (rhs_code == ADDR_EXPR 249 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF 250 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME 251 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1))) 252 { 253 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)); 254 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0); 255 continue; 256 } 257 258 lhs = NULL; 259 } 260 261 return ret; 262 } 263 264 265 /* Called by walk_tree to look for references to va_list variables. */ 266 267 static tree 268 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, 269 void *data) 270 { 271 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info; 272 tree var = *tp; 273 274 if (TREE_CODE (var) == SSA_NAME) 275 { 276 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var))) 277 return var; 278 } 279 else if (TREE_CODE (var) == VAR_DECL) 280 { 281 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names)) 282 return var; 283 } 284 285 return NULL_TREE; 286 } 287 288 289 /* Helper function of va_list_counter_struct_op. Compute 290 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter, 291 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP 292 statement. GPR_P is true if AP is a GPR counter, false if it is 293 a FPR counter. */ 294 295 static void 296 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p, 297 bool write_p) 298 { 299 unsigned HOST_WIDE_INT increment; 300 301 if (si->compute_sizes < 0) 302 { 303 si->compute_sizes = 0; 304 if (si->va_start_count == 1 305 && reachable_at_most_once (si->bb, si->va_start_bb)) 306 si->compute_sizes = 1; 307 308 if (dump_file && (dump_flags & TDF_DETAILS)) 309 fprintf (dump_file, 310 "bb%d will %sbe executed at most once for each va_start " 311 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", 312 si->va_start_bb->index); 313 } 314 315 if (write_p 316 && si->compute_sizes 317 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1) 318 { 319 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE) 320 { 321 cfun->va_list_gpr_size += increment; 322 return; 323 } 324 325 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE) 326 { 327 cfun->va_list_fpr_size += increment; 328 return; 329 } 330 } 331 332 if (write_p || !si->compute_sizes) 333 { 334 if (gpr_p) 335 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 336 else 337 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 338 } 339 } 340 341 342 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size. 343 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P 344 is false, AP has been seen in VAR = AP assignment. 345 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized 346 va_arg operation that doesn't cause the va_list variable to escape 347 current function. */ 348 349 static bool 350 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var, 351 bool write_p) 352 { 353 tree base; 354 355 if (TREE_CODE (ap) != COMPONENT_REF 356 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL) 357 return false; 358 359 if (TREE_CODE (var) != SSA_NAME 360 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var))) 361 return false; 362 363 base = get_base_address (ap); 364 if (TREE_CODE (base) != VAR_DECL 365 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names)) 366 return false; 367 368 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field) 369 va_list_counter_op (si, ap, var, true, write_p); 370 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field) 371 va_list_counter_op (si, ap, var, false, write_p); 372 373 return true; 374 } 375 376 377 /* Check for TEM = AP. Return true if found and the caller shouldn't 378 search for va_list references in the statement. */ 379 380 static bool 381 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem) 382 { 383 if (TREE_CODE (ap) != VAR_DECL 384 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names)) 385 return false; 386 387 if (TREE_CODE (tem) != SSA_NAME 388 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem))) 389 return false; 390 391 if (si->compute_sizes < 0) 392 { 393 si->compute_sizes = 0; 394 if (si->va_start_count == 1 395 && reachable_at_most_once (si->bb, si->va_start_bb)) 396 si->compute_sizes = 1; 397 398 if (dump_file && (dump_flags & TDF_DETAILS)) 399 fprintf (dump_file, 400 "bb%d will %sbe executed at most once for each va_start " 401 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", 402 si->va_start_bb->index); 403 } 404 405 /* For void * or char * va_list types, there is just one counter. 406 If va_arg is used in a loop, we don't know how many registers need 407 saving. */ 408 if (! si->compute_sizes) 409 return false; 410 411 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U) 412 return false; 413 414 /* Note the temporary, as we need to track whether it doesn't escape 415 the current function. */ 416 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem)); 417 418 return true; 419 } 420 421 422 /* Check for: 423 tem1 = AP; 424 TEM2 = tem1 + CST; 425 AP = TEM2; 426 sequence and update cfun->va_list_gpr_size. Return true if found. */ 427 428 static bool 429 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2) 430 { 431 unsigned HOST_WIDE_INT increment; 432 433 if (TREE_CODE (ap) != VAR_DECL 434 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names)) 435 return false; 436 437 if (TREE_CODE (tem2) != SSA_NAME 438 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2))) 439 return false; 440 441 if (si->compute_sizes <= 0) 442 return false; 443 444 increment = va_list_counter_bump (si, ap, tem2, true); 445 if (increment + 1 <= 1) 446 return false; 447 448 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE) 449 cfun->va_list_gpr_size += increment; 450 else 451 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 452 453 return true; 454 } 455 456 457 /* If RHS is X, (some type *) X or X + CST for X a temporary variable 458 containing value of some va_list variable plus optionally some constant, 459 either set si->va_list_escapes or add LHS to si->va_list_escape_vars, 460 depending whether LHS is a function local temporary. */ 461 462 static void 463 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs) 464 { 465 if (! POINTER_TYPE_P (TREE_TYPE (rhs))) 466 return; 467 468 if (TREE_CODE (rhs) == SSA_NAME) 469 { 470 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs))) 471 return; 472 } 473 else if (TREE_CODE (rhs) == ADDR_EXPR 474 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF 475 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME) 476 { 477 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0); 478 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr))) 479 return; 480 } 481 else 482 return; 483 484 if (TREE_CODE (lhs) != SSA_NAME) 485 { 486 si->va_list_escapes = true; 487 return; 488 } 489 490 if (si->compute_sizes < 0) 491 { 492 si->compute_sizes = 0; 493 if (si->va_start_count == 1 494 && reachable_at_most_once (si->bb, si->va_start_bb)) 495 si->compute_sizes = 1; 496 497 if (dump_file && (dump_flags & TDF_DETAILS)) 498 fprintf (dump_file, 499 "bb%d will %sbe executed at most once for each va_start " 500 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", 501 si->va_start_bb->index); 502 } 503 504 /* For void * or char * va_list types, there is just one counter. 505 If va_arg is used in a loop, we don't know how many registers need 506 saving. */ 507 if (! si->compute_sizes) 508 { 509 si->va_list_escapes = true; 510 return; 511 } 512 513 if (va_list_counter_bump (si, si->va_start_ap, lhs, true) 514 == HOST_WIDE_INT_M1U) 515 { 516 si->va_list_escapes = true; 517 return; 518 } 519 520 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs)); 521 } 522 523 524 /* Check all uses of temporaries from si->va_list_escape_vars bitmap. 525 Return true if va_list might be escaping. */ 526 527 static bool 528 check_all_va_list_escapes (struct stdarg_info *si) 529 { 530 basic_block bb; 531 532 FOR_EACH_BB_FN (bb, cfun) 533 { 534 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i); 535 gsi_next (&i)) 536 { 537 tree lhs; 538 use_operand_p uop; 539 ssa_op_iter soi; 540 gphi *phi = i.phi (); 541 542 lhs = PHI_RESULT (phi); 543 if (virtual_operand_p (lhs) 544 || bitmap_bit_p (si->va_list_escape_vars, 545 SSA_NAME_VERSION (lhs))) 546 continue; 547 548 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE) 549 { 550 tree rhs = USE_FROM_PTR (uop); 551 if (TREE_CODE (rhs) == SSA_NAME 552 && bitmap_bit_p (si->va_list_escape_vars, 553 SSA_NAME_VERSION (rhs))) 554 { 555 if (dump_file && (dump_flags & TDF_DETAILS)) 556 { 557 fputs ("va_list escapes in ", dump_file); 558 print_gimple_stmt (dump_file, phi, 0, dump_flags); 559 fputc ('\n', dump_file); 560 } 561 return true; 562 } 563 } 564 } 565 566 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i); 567 gsi_next (&i)) 568 { 569 gimple *stmt = gsi_stmt (i); 570 tree use; 571 ssa_op_iter iter; 572 573 if (is_gimple_debug (stmt)) 574 continue; 575 576 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES) 577 { 578 if (! bitmap_bit_p (si->va_list_escape_vars, 579 SSA_NAME_VERSION (use))) 580 continue; 581 582 if (is_gimple_assign (stmt)) 583 { 584 tree rhs = gimple_assign_rhs1 (stmt); 585 enum tree_code rhs_code = gimple_assign_rhs_code (stmt); 586 587 /* x = *ap_temp; */ 588 if (rhs_code == MEM_REF 589 && TREE_OPERAND (rhs, 0) == use 590 && TYPE_SIZE_UNIT (TREE_TYPE (rhs)) 591 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs))) 592 && si->offsets[SSA_NAME_VERSION (use)] != -1) 593 { 594 unsigned HOST_WIDE_INT gpr_size; 595 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs)); 596 597 gpr_size = si->offsets[SSA_NAME_VERSION (use)] 598 + tree_to_shwi (TREE_OPERAND (rhs, 1)) 599 + tree_to_uhwi (access_size); 600 if (gpr_size >= VA_LIST_MAX_GPR_SIZE) 601 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 602 else if (gpr_size > cfun->va_list_gpr_size) 603 cfun->va_list_gpr_size = gpr_size; 604 continue; 605 } 606 607 /* va_arg sequences may contain 608 other_ap_temp = ap_temp; 609 other_ap_temp = ap_temp + constant; 610 other_ap_temp = (some_type *) ap_temp; 611 ap = ap_temp; 612 statements. */ 613 if (rhs == use 614 && ((rhs_code == POINTER_PLUS_EXPR 615 && (TREE_CODE (gimple_assign_rhs2 (stmt)) 616 == INTEGER_CST)) 617 || gimple_assign_cast_p (stmt) 618 || (get_gimple_rhs_class (rhs_code) 619 == GIMPLE_SINGLE_RHS))) 620 { 621 tree lhs = gimple_assign_lhs (stmt); 622 623 if (TREE_CODE (lhs) == SSA_NAME 624 && bitmap_bit_p (si->va_list_escape_vars, 625 SSA_NAME_VERSION (lhs))) 626 continue; 627 628 if (TREE_CODE (lhs) == VAR_DECL 629 && bitmap_bit_p (si->va_list_vars, 630 DECL_UID (lhs) + num_ssa_names)) 631 continue; 632 } 633 else if (rhs_code == ADDR_EXPR 634 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF 635 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use) 636 { 637 tree lhs = gimple_assign_lhs (stmt); 638 639 if (bitmap_bit_p (si->va_list_escape_vars, 640 SSA_NAME_VERSION (lhs))) 641 continue; 642 } 643 } 644 645 if (dump_file && (dump_flags & TDF_DETAILS)) 646 { 647 fputs ("va_list escapes in ", dump_file); 648 print_gimple_stmt (dump_file, stmt, 0, dump_flags); 649 fputc ('\n', dump_file); 650 } 651 return true; 652 } 653 } 654 } 655 656 return false; 657 } 658 659 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */ 660 661 static void 662 optimize_va_list_gpr_fpr_size (function *fun) 663 { 664 basic_block bb; 665 bool va_list_escapes = false; 666 bool va_list_simple_ptr; 667 struct stdarg_info si; 668 struct walk_stmt_info wi; 669 const char *funcname = NULL; 670 tree cfun_va_list; 671 672 fun->va_list_gpr_size = 0; 673 fun->va_list_fpr_size = 0; 674 memset (&si, 0, sizeof (si)); 675 si.va_list_vars = BITMAP_ALLOC (NULL); 676 si.va_list_escape_vars = BITMAP_ALLOC (NULL); 677 678 if (dump_file) 679 funcname = lang_hooks.decl_printable_name (current_function_decl, 2); 680 681 cfun_va_list = targetm.fn_abi_va_list (fun->decl); 682 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) 683 && (TREE_TYPE (cfun_va_list) == void_type_node 684 || TREE_TYPE (cfun_va_list) == char_type_node); 685 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr); 686 687 FOR_EACH_BB_FN (bb, fun) 688 { 689 gimple_stmt_iterator i; 690 691 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) 692 { 693 gimple *stmt = gsi_stmt (i); 694 tree callee, ap; 695 696 if (!is_gimple_call (stmt)) 697 continue; 698 699 callee = gimple_call_fndecl (stmt); 700 if (!callee 701 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) 702 continue; 703 704 switch (DECL_FUNCTION_CODE (callee)) 705 { 706 case BUILT_IN_VA_START: 707 break; 708 /* If old style builtins are used, don't optimize anything. */ 709 case BUILT_IN_SAVEREGS: 710 case BUILT_IN_NEXT_ARG: 711 va_list_escapes = true; 712 continue; 713 default: 714 continue; 715 } 716 717 si.va_start_count++; 718 ap = gimple_call_arg (stmt, 0); 719 720 if (TREE_CODE (ap) != ADDR_EXPR) 721 { 722 va_list_escapes = true; 723 break; 724 } 725 ap = TREE_OPERAND (ap, 0); 726 if (TREE_CODE (ap) == ARRAY_REF) 727 { 728 if (! integer_zerop (TREE_OPERAND (ap, 1))) 729 { 730 va_list_escapes = true; 731 break; 732 } 733 ap = TREE_OPERAND (ap, 0); 734 } 735 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap)) 736 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl)) 737 || TREE_CODE (ap) != VAR_DECL) 738 { 739 va_list_escapes = true; 740 break; 741 } 742 743 if (is_global_var (ap)) 744 { 745 va_list_escapes = true; 746 break; 747 } 748 749 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names); 750 751 /* VA_START_BB and VA_START_AP will be only used if there is just 752 one va_start in the function. */ 753 si.va_start_bb = bb; 754 si.va_start_ap = ap; 755 } 756 757 if (va_list_escapes) 758 break; 759 } 760 761 /* If there were no va_start uses in the function, there is no need to 762 save anything. */ 763 if (si.va_start_count == 0) 764 goto finish; 765 766 /* If some va_list arguments weren't local, we can't optimize. */ 767 if (va_list_escapes) 768 goto finish; 769 770 /* For void * or char * va_list, something useful can be done only 771 if there is just one va_start. */ 772 if (va_list_simple_ptr && si.va_start_count > 1) 773 { 774 va_list_escapes = true; 775 goto finish; 776 } 777 778 /* For struct * va_list, if the backend didn't tell us what the counter fields 779 are, there is nothing more we can do. */ 780 if (!va_list_simple_ptr 781 && va_list_gpr_counter_field == NULL_TREE 782 && va_list_fpr_counter_field == NULL_TREE) 783 { 784 va_list_escapes = true; 785 goto finish; 786 } 787 788 /* For void * or char * va_list there is just one counter 789 (va_list itself). Use VA_LIST_GPR_SIZE for it. */ 790 if (va_list_simple_ptr) 791 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 792 793 calculate_dominance_info (CDI_DOMINATORS); 794 memset (&wi, 0, sizeof (wi)); 795 wi.info = si.va_list_vars; 796 797 FOR_EACH_BB_FN (bb, fun) 798 { 799 si.compute_sizes = -1; 800 si.bb = bb; 801 802 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat 803 them as assignments for the purpose of escape analysis. This is 804 not needed for non-simple va_list because virtual phis don't perform 805 any real data movement. Also, check PHI nodes for taking address of 806 the va_list vars. */ 807 tree lhs, rhs; 808 use_operand_p uop; 809 ssa_op_iter soi; 810 811 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i); 812 gsi_next (&i)) 813 { 814 gphi *phi = i.phi (); 815 lhs = PHI_RESULT (phi); 816 817 if (virtual_operand_p (lhs)) 818 continue; 819 820 if (va_list_simple_ptr) 821 { 822 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE) 823 { 824 rhs = USE_FROM_PTR (uop); 825 if (va_list_ptr_read (&si, rhs, lhs)) 826 continue; 827 else if (va_list_ptr_write (&si, lhs, rhs)) 828 continue; 829 else 830 check_va_list_escapes (&si, lhs, rhs); 831 832 if (si.va_list_escapes) 833 { 834 if (dump_file && (dump_flags & TDF_DETAILS)) 835 { 836 fputs ("va_list escapes in ", dump_file); 837 print_gimple_stmt (dump_file, phi, 0, dump_flags); 838 fputc ('\n', dump_file); 839 } 840 va_list_escapes = true; 841 } 842 } 843 } 844 845 for (unsigned j = 0; !va_list_escapes 846 && j < gimple_phi_num_args (phi); ++j) 847 if ((!va_list_simple_ptr 848 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME) 849 && walk_tree (gimple_phi_arg_def_ptr (phi, j), 850 find_va_list_reference, &wi, NULL)) 851 { 852 if (dump_file && (dump_flags & TDF_DETAILS)) 853 { 854 fputs ("va_list escapes in ", dump_file); 855 print_gimple_stmt (dump_file, phi, 0, dump_flags); 856 fputc ('\n', dump_file); 857 } 858 va_list_escapes = true; 859 } 860 } 861 862 for (gimple_stmt_iterator i = gsi_start_bb (bb); 863 !gsi_end_p (i) && !va_list_escapes; 864 gsi_next (&i)) 865 { 866 gimple *stmt = gsi_stmt (i); 867 868 /* Don't look at __builtin_va_{start,end}, they are ok. */ 869 if (is_gimple_call (stmt)) 870 { 871 tree callee = gimple_call_fndecl (stmt); 872 873 if (callee 874 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL 875 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START 876 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END)) 877 continue; 878 } 879 880 if (is_gimple_assign (stmt)) 881 { 882 lhs = gimple_assign_lhs (stmt); 883 rhs = gimple_assign_rhs1 (stmt); 884 885 if (va_list_simple_ptr) 886 { 887 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) 888 == GIMPLE_SINGLE_RHS) 889 { 890 /* Check for ap ={v} {}. */ 891 if (TREE_CLOBBER_P (rhs)) 892 continue; 893 894 /* Check for tem = ap. */ 895 else if (va_list_ptr_read (&si, rhs, lhs)) 896 continue; 897 898 /* Check for the last insn in: 899 tem1 = ap; 900 tem2 = tem1 + CST; 901 ap = tem2; 902 sequence. */ 903 else if (va_list_ptr_write (&si, lhs, rhs)) 904 continue; 905 } 906 907 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR 908 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST) 909 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)) 910 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) 911 == GIMPLE_SINGLE_RHS)) 912 check_va_list_escapes (&si, lhs, rhs); 913 } 914 else 915 { 916 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) 917 == GIMPLE_SINGLE_RHS) 918 { 919 /* Check for ap ={v} {}. */ 920 if (TREE_CLOBBER_P (rhs)) 921 continue; 922 923 /* Check for ap[0].field = temp. */ 924 else if (va_list_counter_struct_op (&si, lhs, rhs, true)) 925 continue; 926 927 /* Check for temp = ap[0].field. */ 928 else if (va_list_counter_struct_op (&si, rhs, lhs, 929 false)) 930 continue; 931 } 932 933 /* Do any architecture specific checking. */ 934 if (targetm.stdarg_optimize_hook 935 && targetm.stdarg_optimize_hook (&si, stmt)) 936 continue; 937 } 938 } 939 else if (is_gimple_debug (stmt)) 940 continue; 941 942 /* All other uses of va_list are either va_copy (that is not handled 943 in this optimization), taking address of va_list variable or 944 passing va_list to other functions (in that case va_list might 945 escape the function and therefore va_start needs to set it up 946 fully), or some unexpected use of va_list. None of these should 947 happen in a gimplified VA_ARG_EXPR. */ 948 if (si.va_list_escapes 949 || walk_gimple_op (stmt, find_va_list_reference, &wi)) 950 { 951 if (dump_file && (dump_flags & TDF_DETAILS)) 952 { 953 fputs ("va_list escapes in ", dump_file); 954 print_gimple_stmt (dump_file, stmt, 0, dump_flags); 955 fputc ('\n', dump_file); 956 } 957 va_list_escapes = true; 958 } 959 } 960 961 if (va_list_escapes) 962 break; 963 } 964 965 if (! va_list_escapes 966 && va_list_simple_ptr 967 && ! bitmap_empty_p (si.va_list_escape_vars) 968 && check_all_va_list_escapes (&si)) 969 va_list_escapes = true; 970 971 finish: 972 if (va_list_escapes) 973 { 974 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 975 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 976 } 977 BITMAP_FREE (si.va_list_vars); 978 BITMAP_FREE (si.va_list_escape_vars); 979 free (si.offsets); 980 if (dump_file) 981 { 982 fprintf (dump_file, "%s: va_list escapes %d, needs to save ", 983 funcname, (int) va_list_escapes); 984 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE) 985 fputs ("all", dump_file); 986 else 987 fprintf (dump_file, "%d", cfun->va_list_gpr_size); 988 fputs (" GPR units and ", dump_file); 989 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE) 990 fputs ("all", dump_file); 991 else 992 fprintf (dump_file, "%d", cfun->va_list_fpr_size); 993 fputs (" FPR units.\n", dump_file); 994 } 995 } 996 997 /* Return true if STMT is IFN_VA_ARG. */ 998 999 static bool 1000 gimple_call_ifn_va_arg_p (gimple *stmt) 1001 { 1002 return (is_gimple_call (stmt) 1003 && gimple_call_internal_p (stmt) 1004 && gimple_call_internal_fn (stmt) == IFN_VA_ARG); 1005 } 1006 1007 /* Expand IFN_VA_ARGs in FUN. */ 1008 1009 static void 1010 expand_ifn_va_arg_1 (function *fun) 1011 { 1012 bool modified = false; 1013 basic_block bb; 1014 gimple_stmt_iterator i; 1015 location_t saved_location; 1016 1017 FOR_EACH_BB_FN (bb, fun) 1018 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) 1019 { 1020 gimple *stmt = gsi_stmt (i); 1021 tree ap, aptype, expr, lhs, type; 1022 gimple_seq pre = NULL, post = NULL; 1023 1024 if (!gimple_call_ifn_va_arg_p (stmt)) 1025 continue; 1026 1027 modified = true; 1028 1029 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1))); 1030 ap = gimple_call_arg (stmt, 0); 1031 aptype = TREE_TYPE (gimple_call_arg (stmt, 2)); 1032 gcc_assert (POINTER_TYPE_P (aptype)); 1033 1034 /* Balanced out the &ap, usually added by build_va_arg. */ 1035 ap = build2 (MEM_REF, TREE_TYPE (aptype), ap, 1036 build_int_cst (aptype, 0)); 1037 1038 push_gimplify_context (false); 1039 saved_location = input_location; 1040 input_location = gimple_location (stmt); 1041 1042 /* Make it easier for the backends by protecting the valist argument 1043 from multiple evaluations. */ 1044 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue); 1045 1046 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post); 1047 1048 lhs = gimple_call_lhs (stmt); 1049 if (lhs != NULL_TREE) 1050 { 1051 unsigned int nargs = gimple_call_num_args (stmt); 1052 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type)); 1053 1054 /* We replace call with a new expr. This may require 1055 corresponding bndret call fixup. */ 1056 if (chkp_function_instrumented_p (fun->decl)) 1057 chkp_fixup_inlined_call (lhs, expr); 1058 1059 if (nargs == 4) 1060 { 1061 /* We've transported the size of with WITH_SIZE_EXPR here as 1062 the last argument of the internal fn call. Now reinstate 1063 it. */ 1064 tree size = gimple_call_arg (stmt, nargs - 1); 1065 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size); 1066 } 1067 1068 /* We use gimplify_assign here, rather than gimple_build_assign, 1069 because gimple_assign knows how to deal with variable-sized 1070 types. */ 1071 gimplify_assign (lhs, expr, &pre); 1072 } 1073 else 1074 gimplify_expr (&expr, &pre, &post, is_gimple_lvalue, fb_lvalue); 1075 1076 input_location = saved_location; 1077 pop_gimplify_context (NULL); 1078 1079 gimple_seq_add_seq (&pre, post); 1080 update_modified_stmts (pre); 1081 1082 /* Add the sequence after IFN_VA_ARG. This splits the bb right 1083 after IFN_VA_ARG, and adds the sequence in one or more new bbs 1084 inbetween. */ 1085 gimple_find_sub_bbs (pre, &i); 1086 1087 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the 1088 bb. */ 1089 unlink_stmt_vdef (stmt); 1090 release_ssa_name_fn (fun, gimple_vdef (stmt)); 1091 gsi_remove (&i, true); 1092 gcc_assert (gsi_end_p (i)); 1093 1094 /* We're walking here into the bbs which contain the expansion of 1095 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs 1096 expanding. We could try to skip walking these bbs, perhaps by 1097 walking backwards over gimples and bbs. */ 1098 break; 1099 } 1100 1101 if (!modified) 1102 return; 1103 1104 free_dominance_info (CDI_DOMINATORS); 1105 update_ssa (TODO_update_ssa); 1106 } 1107 1108 /* Expand IFN_VA_ARGs in FUN, if necessary. */ 1109 1110 static void 1111 expand_ifn_va_arg (function *fun) 1112 { 1113 if ((fun->curr_properties & PROP_gimple_lva) == 0) 1114 expand_ifn_va_arg_1 (fun); 1115 1116 if (flag_checking) 1117 { 1118 basic_block bb; 1119 gimple_stmt_iterator i; 1120 FOR_EACH_BB_FN (bb, fun) 1121 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) 1122 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i))); 1123 } 1124 } 1125 1126 namespace { 1127 1128 const pass_data pass_data_stdarg = 1129 { 1130 GIMPLE_PASS, /* type */ 1131 "stdarg", /* name */ 1132 OPTGROUP_NONE, /* optinfo_flags */ 1133 TV_NONE, /* tv_id */ 1134 ( PROP_cfg | PROP_ssa ), /* properties_required */ 1135 PROP_gimple_lva, /* properties_provided */ 1136 0, /* properties_destroyed */ 1137 0, /* todo_flags_start */ 1138 0, /* todo_flags_finish */ 1139 }; 1140 1141 class pass_stdarg : public gimple_opt_pass 1142 { 1143 public: 1144 pass_stdarg (gcc::context *ctxt) 1145 : gimple_opt_pass (pass_data_stdarg, ctxt) 1146 {} 1147 1148 /* opt_pass methods: */ 1149 virtual bool gate (function *) 1150 { 1151 /* Always run this pass, in order to expand va_arg internal_fns. We 1152 also need to do that if fun->stdarg == 0, because a va_arg may also 1153 occur in a function without varargs, f.i. if when passing a va_list to 1154 another function. */ 1155 return true; 1156 } 1157 1158 virtual unsigned int execute (function *); 1159 1160 }; // class pass_stdarg 1161 1162 unsigned int 1163 pass_stdarg::execute (function *fun) 1164 { 1165 /* TODO: Postpone expand_ifn_va_arg till after 1166 optimize_va_list_gpr_fpr_size. */ 1167 expand_ifn_va_arg (fun); 1168 1169 if (flag_stdarg_opt 1170 /* This optimization is only for stdarg functions. */ 1171 && fun->stdarg != 0) 1172 optimize_va_list_gpr_fpr_size (fun); 1173 1174 return 0; 1175 } 1176 1177 } // anon namespace 1178 1179 gimple_opt_pass * 1180 make_pass_stdarg (gcc::context *ctxt) 1181 { 1182 return new pass_stdarg (ctxt); 1183 } 1184 1185 namespace { 1186 1187 const pass_data pass_data_lower_vaarg = 1188 { 1189 GIMPLE_PASS, /* type */ 1190 "lower_vaarg", /* name */ 1191 OPTGROUP_NONE, /* optinfo_flags */ 1192 TV_NONE, /* tv_id */ 1193 ( PROP_cfg | PROP_ssa ), /* properties_required */ 1194 PROP_gimple_lva, /* properties_provided */ 1195 0, /* properties_destroyed */ 1196 0, /* todo_flags_start */ 1197 0, /* todo_flags_finish */ 1198 }; 1199 1200 class pass_lower_vaarg : public gimple_opt_pass 1201 { 1202 public: 1203 pass_lower_vaarg (gcc::context *ctxt) 1204 : gimple_opt_pass (pass_data_lower_vaarg, ctxt) 1205 {} 1206 1207 /* opt_pass methods: */ 1208 virtual bool gate (function *) 1209 { 1210 return (cfun->curr_properties & PROP_gimple_lva) == 0; 1211 } 1212 1213 virtual unsigned int execute (function *); 1214 1215 }; // class pass_lower_vaarg 1216 1217 unsigned int 1218 pass_lower_vaarg::execute (function *fun) 1219 { 1220 expand_ifn_va_arg (fun); 1221 return 0; 1222 } 1223 1224 } // anon namespace 1225 1226 gimple_opt_pass * 1227 make_pass_lower_vaarg (gcc::context *ctxt) 1228 { 1229 return new pass_lower_vaarg (ctxt); 1230 } 1231