1 /* Pass computing data for optimizing stdarg functions. 2 Copyright (C) 2004-2013 Free Software Foundation, Inc. 3 Contributed by Jakub Jelinek <jakub@redhat.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "tm.h" 25 #include "tree.h" 26 #include "function.h" 27 #include "langhooks.h" 28 #include "gimple-pretty-print.h" 29 #include "target.h" 30 #include "tree-flow.h" 31 #include "tree-pass.h" 32 #include "tree-stdarg.h" 33 34 /* A simple pass that attempts to optimize stdarg functions on architectures 35 that need to save register arguments to stack on entry to stdarg functions. 36 If the function doesn't use any va_start macros, no registers need to 37 be saved. If va_start macros are used, the va_list variables don't escape 38 the function, it is only necessary to save registers that will be used 39 in va_arg macros. E.g. if va_arg is only used with integral types 40 in the function, floating point registers don't need to be saved, etc. */ 41 42 43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and 44 is executed at most as many times as VA_START_BB. */ 45 46 static bool 47 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb) 48 { 49 vec<edge> stack = vNULL; 50 edge e; 51 edge_iterator ei; 52 sbitmap visited; 53 bool ret; 54 55 if (va_arg_bb == va_start_bb) 56 return true; 57 58 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb)) 59 return false; 60 61 visited = sbitmap_alloc (last_basic_block); 62 bitmap_clear (visited); 63 ret = true; 64 65 FOR_EACH_EDGE (e, ei, va_arg_bb->preds) 66 stack.safe_push (e); 67 68 while (! stack.is_empty ()) 69 { 70 basic_block src; 71 72 e = stack.pop (); 73 src = e->src; 74 75 if (e->flags & EDGE_COMPLEX) 76 { 77 ret = false; 78 break; 79 } 80 81 if (src == va_start_bb) 82 continue; 83 84 /* va_arg_bb can be executed more times than va_start_bb. */ 85 if (src == va_arg_bb) 86 { 87 ret = false; 88 break; 89 } 90 91 gcc_assert (src != ENTRY_BLOCK_PTR); 92 93 if (! bitmap_bit_p (visited, src->index)) 94 { 95 bitmap_set_bit (visited, src->index); 96 FOR_EACH_EDGE (e, ei, src->preds) 97 stack.safe_push (e); 98 } 99 } 100 101 stack.release (); 102 sbitmap_free (visited); 103 return ret; 104 } 105 106 107 /* For statement COUNTER = RHS, if RHS is COUNTER + constant, 108 return constant, otherwise return (unsigned HOST_WIDE_INT) -1. 109 GPR_P is true if this is GPR counter. */ 110 111 static unsigned HOST_WIDE_INT 112 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs, 113 bool gpr_p) 114 { 115 tree lhs, orig_lhs; 116 gimple stmt; 117 unsigned HOST_WIDE_INT ret = 0, val, counter_val; 118 unsigned int max_size; 119 120 if (si->offsets == NULL) 121 { 122 unsigned int i; 123 124 si->offsets = XNEWVEC (int, num_ssa_names); 125 for (i = 0; i < num_ssa_names; ++i) 126 si->offsets[i] = -1; 127 } 128 129 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size; 130 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE; 131 orig_lhs = lhs = rhs; 132 while (lhs) 133 { 134 enum tree_code rhs_code; 135 tree rhs1; 136 137 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) 138 { 139 if (counter_val >= max_size) 140 { 141 ret = max_size; 142 break; 143 } 144 145 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)]; 146 break; 147 } 148 149 stmt = SSA_NAME_DEF_STMT (lhs); 150 151 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs) 152 return (unsigned HOST_WIDE_INT) -1; 153 154 rhs_code = gimple_assign_rhs_code (stmt); 155 rhs1 = gimple_assign_rhs1 (stmt); 156 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS 157 || gimple_assign_cast_p (stmt)) 158 && TREE_CODE (rhs1) == SSA_NAME) 159 { 160 lhs = rhs1; 161 continue; 162 } 163 164 if ((rhs_code == POINTER_PLUS_EXPR 165 || rhs_code == PLUS_EXPR) 166 && TREE_CODE (rhs1) == SSA_NAME 167 && host_integerp (gimple_assign_rhs2 (stmt), 1)) 168 { 169 ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1); 170 lhs = rhs1; 171 continue; 172 } 173 174 if (rhs_code == ADDR_EXPR 175 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF 176 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME 177 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1)) 178 { 179 ret += tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1); 180 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0); 181 continue; 182 } 183 184 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS) 185 return (unsigned HOST_WIDE_INT) -1; 186 187 rhs = gimple_assign_rhs1 (stmt); 188 if (TREE_CODE (counter) != TREE_CODE (rhs)) 189 return (unsigned HOST_WIDE_INT) -1; 190 191 if (TREE_CODE (counter) == COMPONENT_REF) 192 { 193 if (get_base_address (counter) != get_base_address (rhs) 194 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL 195 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1)) 196 return (unsigned HOST_WIDE_INT) -1; 197 } 198 else if (counter != rhs) 199 return (unsigned HOST_WIDE_INT) -1; 200 201 lhs = NULL; 202 } 203 204 lhs = orig_lhs; 205 val = ret + counter_val; 206 while (lhs) 207 { 208 enum tree_code rhs_code; 209 tree rhs1; 210 211 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1) 212 break; 213 214 if (val >= max_size) 215 si->offsets[SSA_NAME_VERSION (lhs)] = max_size; 216 else 217 si->offsets[SSA_NAME_VERSION (lhs)] = val; 218 219 stmt = SSA_NAME_DEF_STMT (lhs); 220 221 rhs_code = gimple_assign_rhs_code (stmt); 222 rhs1 = gimple_assign_rhs1 (stmt); 223 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS 224 || gimple_assign_cast_p (stmt)) 225 && TREE_CODE (rhs1) == SSA_NAME) 226 { 227 lhs = rhs1; 228 continue; 229 } 230 231 if ((rhs_code == POINTER_PLUS_EXPR 232 || rhs_code == PLUS_EXPR) 233 && TREE_CODE (rhs1) == SSA_NAME 234 && host_integerp (gimple_assign_rhs2 (stmt), 1)) 235 { 236 val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1); 237 lhs = rhs1; 238 continue; 239 } 240 241 if (rhs_code == ADDR_EXPR 242 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF 243 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME 244 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1)) 245 { 246 val -= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1); 247 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0); 248 continue; 249 } 250 251 lhs = NULL; 252 } 253 254 return ret; 255 } 256 257 258 /* Called by walk_tree to look for references to va_list variables. */ 259 260 static tree 261 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, 262 void *data) 263 { 264 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info; 265 tree var = *tp; 266 267 if (TREE_CODE (var) == SSA_NAME) 268 { 269 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var))) 270 return var; 271 } 272 else if (TREE_CODE (var) == VAR_DECL) 273 { 274 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names)) 275 return var; 276 } 277 278 return NULL_TREE; 279 } 280 281 282 /* Helper function of va_list_counter_struct_op. Compute 283 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter, 284 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP 285 statement. GPR_P is true if AP is a GPR counter, false if it is 286 a FPR counter. */ 287 288 static void 289 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p, 290 bool write_p) 291 { 292 unsigned HOST_WIDE_INT increment; 293 294 if (si->compute_sizes < 0) 295 { 296 si->compute_sizes = 0; 297 if (si->va_start_count == 1 298 && reachable_at_most_once (si->bb, si->va_start_bb)) 299 si->compute_sizes = 1; 300 301 if (dump_file && (dump_flags & TDF_DETAILS)) 302 fprintf (dump_file, 303 "bb%d will %sbe executed at most once for each va_start " 304 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", 305 si->va_start_bb->index); 306 } 307 308 if (write_p 309 && si->compute_sizes 310 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1) 311 { 312 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE) 313 { 314 cfun->va_list_gpr_size += increment; 315 return; 316 } 317 318 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE) 319 { 320 cfun->va_list_fpr_size += increment; 321 return; 322 } 323 } 324 325 if (write_p || !si->compute_sizes) 326 { 327 if (gpr_p) 328 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 329 else 330 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 331 } 332 } 333 334 335 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size. 336 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P 337 is false, AP has been seen in VAR = AP assignment. 338 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized 339 va_arg operation that doesn't cause the va_list variable to escape 340 current function. */ 341 342 static bool 343 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var, 344 bool write_p) 345 { 346 tree base; 347 348 if (TREE_CODE (ap) != COMPONENT_REF 349 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL) 350 return false; 351 352 if (TREE_CODE (var) != SSA_NAME 353 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var))) 354 return false; 355 356 base = get_base_address (ap); 357 if (TREE_CODE (base) != VAR_DECL 358 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names)) 359 return false; 360 361 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field) 362 va_list_counter_op (si, ap, var, true, write_p); 363 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field) 364 va_list_counter_op (si, ap, var, false, write_p); 365 366 return true; 367 } 368 369 370 /* Check for TEM = AP. Return true if found and the caller shouldn't 371 search for va_list references in the statement. */ 372 373 static bool 374 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem) 375 { 376 if (TREE_CODE (ap) != VAR_DECL 377 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names)) 378 return false; 379 380 if (TREE_CODE (tem) != SSA_NAME 381 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem))) 382 return false; 383 384 if (si->compute_sizes < 0) 385 { 386 si->compute_sizes = 0; 387 if (si->va_start_count == 1 388 && reachable_at_most_once (si->bb, si->va_start_bb)) 389 si->compute_sizes = 1; 390 391 if (dump_file && (dump_flags & TDF_DETAILS)) 392 fprintf (dump_file, 393 "bb%d will %sbe executed at most once for each va_start " 394 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", 395 si->va_start_bb->index); 396 } 397 398 /* For void * or char * va_list types, there is just one counter. 399 If va_arg is used in a loop, we don't know how many registers need 400 saving. */ 401 if (! si->compute_sizes) 402 return false; 403 404 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1) 405 return false; 406 407 /* Note the temporary, as we need to track whether it doesn't escape 408 the current function. */ 409 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem)); 410 411 return true; 412 } 413 414 415 /* Check for: 416 tem1 = AP; 417 TEM2 = tem1 + CST; 418 AP = TEM2; 419 sequence and update cfun->va_list_gpr_size. Return true if found. */ 420 421 static bool 422 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2) 423 { 424 unsigned HOST_WIDE_INT increment; 425 426 if (TREE_CODE (ap) != VAR_DECL 427 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names)) 428 return false; 429 430 if (TREE_CODE (tem2) != SSA_NAME 431 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2))) 432 return false; 433 434 if (si->compute_sizes <= 0) 435 return false; 436 437 increment = va_list_counter_bump (si, ap, tem2, true); 438 if (increment + 1 <= 1) 439 return false; 440 441 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE) 442 cfun->va_list_gpr_size += increment; 443 else 444 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 445 446 return true; 447 } 448 449 450 /* If RHS is X, (some type *) X or X + CST for X a temporary variable 451 containing value of some va_list variable plus optionally some constant, 452 either set si->va_list_escapes or add LHS to si->va_list_escape_vars, 453 depending whether LHS is a function local temporary. */ 454 455 static void 456 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs) 457 { 458 if (! POINTER_TYPE_P (TREE_TYPE (rhs))) 459 return; 460 461 if (TREE_CODE (rhs) == SSA_NAME) 462 { 463 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs))) 464 return; 465 } 466 else if (TREE_CODE (rhs) == ADDR_EXPR 467 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF 468 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME) 469 { 470 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0); 471 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr))) 472 return; 473 } 474 else 475 return; 476 477 if (TREE_CODE (lhs) != SSA_NAME) 478 { 479 si->va_list_escapes = true; 480 return; 481 } 482 483 if (si->compute_sizes < 0) 484 { 485 si->compute_sizes = 0; 486 if (si->va_start_count == 1 487 && reachable_at_most_once (si->bb, si->va_start_bb)) 488 si->compute_sizes = 1; 489 490 if (dump_file && (dump_flags & TDF_DETAILS)) 491 fprintf (dump_file, 492 "bb%d will %sbe executed at most once for each va_start " 493 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ", 494 si->va_start_bb->index); 495 } 496 497 /* For void * or char * va_list types, there is just one counter. 498 If va_arg is used in a loop, we don't know how many registers need 499 saving. */ 500 if (! si->compute_sizes) 501 { 502 si->va_list_escapes = true; 503 return; 504 } 505 506 if (va_list_counter_bump (si, si->va_start_ap, lhs, true) 507 == (unsigned HOST_WIDE_INT) -1) 508 { 509 si->va_list_escapes = true; 510 return; 511 } 512 513 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs)); 514 } 515 516 517 /* Check all uses of temporaries from si->va_list_escape_vars bitmap. 518 Return true if va_list might be escaping. */ 519 520 static bool 521 check_all_va_list_escapes (struct stdarg_info *si) 522 { 523 basic_block bb; 524 525 FOR_EACH_BB (bb) 526 { 527 gimple_stmt_iterator i; 528 529 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) 530 { 531 tree lhs; 532 use_operand_p uop; 533 ssa_op_iter soi; 534 gimple phi = gsi_stmt (i); 535 536 lhs = PHI_RESULT (phi); 537 if (virtual_operand_p (lhs) 538 || bitmap_bit_p (si->va_list_escape_vars, 539 SSA_NAME_VERSION (lhs))) 540 continue; 541 542 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE) 543 { 544 tree rhs = USE_FROM_PTR (uop); 545 if (TREE_CODE (rhs) == SSA_NAME 546 && bitmap_bit_p (si->va_list_escape_vars, 547 SSA_NAME_VERSION (rhs))) 548 { 549 if (dump_file && (dump_flags & TDF_DETAILS)) 550 { 551 fputs ("va_list escapes in ", dump_file); 552 print_gimple_stmt (dump_file, phi, 0, dump_flags); 553 fputc ('\n', dump_file); 554 } 555 return true; 556 } 557 } 558 } 559 560 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) 561 { 562 gimple stmt = gsi_stmt (i); 563 tree use; 564 ssa_op_iter iter; 565 566 if (is_gimple_debug (stmt)) 567 continue; 568 569 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES) 570 { 571 if (! bitmap_bit_p (si->va_list_escape_vars, 572 SSA_NAME_VERSION (use))) 573 continue; 574 575 if (is_gimple_assign (stmt)) 576 { 577 tree rhs = gimple_assign_rhs1 (stmt); 578 enum tree_code rhs_code = gimple_assign_rhs_code (stmt); 579 580 /* x = *ap_temp; */ 581 if (rhs_code == MEM_REF 582 && TREE_OPERAND (rhs, 0) == use 583 && TYPE_SIZE_UNIT (TREE_TYPE (rhs)) 584 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1) 585 && si->offsets[SSA_NAME_VERSION (use)] != -1) 586 { 587 unsigned HOST_WIDE_INT gpr_size; 588 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs)); 589 590 gpr_size = si->offsets[SSA_NAME_VERSION (use)] 591 + tree_low_cst (TREE_OPERAND (rhs, 1), 0) 592 + tree_low_cst (access_size, 1); 593 if (gpr_size >= VA_LIST_MAX_GPR_SIZE) 594 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 595 else if (gpr_size > cfun->va_list_gpr_size) 596 cfun->va_list_gpr_size = gpr_size; 597 continue; 598 } 599 600 /* va_arg sequences may contain 601 other_ap_temp = ap_temp; 602 other_ap_temp = ap_temp + constant; 603 other_ap_temp = (some_type *) ap_temp; 604 ap = ap_temp; 605 statements. */ 606 if (rhs == use 607 && ((rhs_code == POINTER_PLUS_EXPR 608 && (TREE_CODE (gimple_assign_rhs2 (stmt)) 609 == INTEGER_CST)) 610 || gimple_assign_cast_p (stmt) 611 || (get_gimple_rhs_class (rhs_code) 612 == GIMPLE_SINGLE_RHS))) 613 { 614 tree lhs = gimple_assign_lhs (stmt); 615 616 if (TREE_CODE (lhs) == SSA_NAME 617 && bitmap_bit_p (si->va_list_escape_vars, 618 SSA_NAME_VERSION (lhs))) 619 continue; 620 621 if (TREE_CODE (lhs) == VAR_DECL 622 && bitmap_bit_p (si->va_list_vars, 623 DECL_UID (lhs) + num_ssa_names)) 624 continue; 625 } 626 else if (rhs_code == ADDR_EXPR 627 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF 628 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use) 629 { 630 tree lhs = gimple_assign_lhs (stmt); 631 632 if (bitmap_bit_p (si->va_list_escape_vars, 633 SSA_NAME_VERSION (lhs))) 634 continue; 635 } 636 } 637 638 if (dump_file && (dump_flags & TDF_DETAILS)) 639 { 640 fputs ("va_list escapes in ", dump_file); 641 print_gimple_stmt (dump_file, stmt, 0, dump_flags); 642 fputc ('\n', dump_file); 643 } 644 return true; 645 } 646 } 647 } 648 649 return false; 650 } 651 652 653 /* Return true if this optimization pass should be done. 654 It makes only sense for stdarg functions. */ 655 656 static bool 657 gate_optimize_stdarg (void) 658 { 659 /* This optimization is only for stdarg functions. */ 660 return cfun->stdarg != 0; 661 } 662 663 664 /* Entry point to the stdarg optimization pass. */ 665 666 static unsigned int 667 execute_optimize_stdarg (void) 668 { 669 basic_block bb; 670 bool va_list_escapes = false; 671 bool va_list_simple_ptr; 672 struct stdarg_info si; 673 struct walk_stmt_info wi; 674 const char *funcname = NULL; 675 tree cfun_va_list; 676 677 cfun->va_list_gpr_size = 0; 678 cfun->va_list_fpr_size = 0; 679 memset (&si, 0, sizeof (si)); 680 si.va_list_vars = BITMAP_ALLOC (NULL); 681 si.va_list_escape_vars = BITMAP_ALLOC (NULL); 682 683 if (dump_file) 684 funcname = lang_hooks.decl_printable_name (current_function_decl, 2); 685 686 cfun_va_list = targetm.fn_abi_va_list (cfun->decl); 687 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) 688 && (TREE_TYPE (cfun_va_list) == void_type_node 689 || TREE_TYPE (cfun_va_list) == char_type_node); 690 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr); 691 692 FOR_EACH_BB (bb) 693 { 694 gimple_stmt_iterator i; 695 696 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) 697 { 698 gimple stmt = gsi_stmt (i); 699 tree callee, ap; 700 701 if (!is_gimple_call (stmt)) 702 continue; 703 704 callee = gimple_call_fndecl (stmt); 705 if (!callee 706 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) 707 continue; 708 709 switch (DECL_FUNCTION_CODE (callee)) 710 { 711 case BUILT_IN_VA_START: 712 break; 713 /* If old style builtins are used, don't optimize anything. */ 714 case BUILT_IN_SAVEREGS: 715 case BUILT_IN_NEXT_ARG: 716 va_list_escapes = true; 717 continue; 718 default: 719 continue; 720 } 721 722 si.va_start_count++; 723 ap = gimple_call_arg (stmt, 0); 724 725 if (TREE_CODE (ap) != ADDR_EXPR) 726 { 727 va_list_escapes = true; 728 break; 729 } 730 ap = TREE_OPERAND (ap, 0); 731 if (TREE_CODE (ap) == ARRAY_REF) 732 { 733 if (! integer_zerop (TREE_OPERAND (ap, 1))) 734 { 735 va_list_escapes = true; 736 break; 737 } 738 ap = TREE_OPERAND (ap, 0); 739 } 740 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap)) 741 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl)) 742 || TREE_CODE (ap) != VAR_DECL) 743 { 744 va_list_escapes = true; 745 break; 746 } 747 748 if (is_global_var (ap)) 749 { 750 va_list_escapes = true; 751 break; 752 } 753 754 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names); 755 756 /* VA_START_BB and VA_START_AP will be only used if there is just 757 one va_start in the function. */ 758 si.va_start_bb = bb; 759 si.va_start_ap = ap; 760 } 761 762 if (va_list_escapes) 763 break; 764 } 765 766 /* If there were no va_start uses in the function, there is no need to 767 save anything. */ 768 if (si.va_start_count == 0) 769 goto finish; 770 771 /* If some va_list arguments weren't local, we can't optimize. */ 772 if (va_list_escapes) 773 goto finish; 774 775 /* For void * or char * va_list, something useful can be done only 776 if there is just one va_start. */ 777 if (va_list_simple_ptr && si.va_start_count > 1) 778 { 779 va_list_escapes = true; 780 goto finish; 781 } 782 783 /* For struct * va_list, if the backend didn't tell us what the counter fields 784 are, there is nothing more we can do. */ 785 if (!va_list_simple_ptr 786 && va_list_gpr_counter_field == NULL_TREE 787 && va_list_fpr_counter_field == NULL_TREE) 788 { 789 va_list_escapes = true; 790 goto finish; 791 } 792 793 /* For void * or char * va_list there is just one counter 794 (va_list itself). Use VA_LIST_GPR_SIZE for it. */ 795 if (va_list_simple_ptr) 796 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 797 798 calculate_dominance_info (CDI_DOMINATORS); 799 memset (&wi, 0, sizeof (wi)); 800 wi.info = si.va_list_vars; 801 802 FOR_EACH_BB (bb) 803 { 804 gimple_stmt_iterator i; 805 806 si.compute_sizes = -1; 807 si.bb = bb; 808 809 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat 810 them as assignments for the purpose of escape analysis. This is 811 not needed for non-simple va_list because virtual phis don't perform 812 any real data movement. Also, check PHI nodes for taking address of 813 the va_list vars. */ 814 tree lhs, rhs; 815 use_operand_p uop; 816 ssa_op_iter soi; 817 818 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i)) 819 { 820 gimple phi = gsi_stmt (i); 821 lhs = PHI_RESULT (phi); 822 823 if (virtual_operand_p (lhs)) 824 continue; 825 826 if (va_list_simple_ptr) 827 { 828 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE) 829 { 830 rhs = USE_FROM_PTR (uop); 831 if (va_list_ptr_read (&si, rhs, lhs)) 832 continue; 833 else if (va_list_ptr_write (&si, lhs, rhs)) 834 continue; 835 else 836 check_va_list_escapes (&si, lhs, rhs); 837 838 if (si.va_list_escapes) 839 { 840 if (dump_file && (dump_flags & TDF_DETAILS)) 841 { 842 fputs ("va_list escapes in ", dump_file); 843 print_gimple_stmt (dump_file, phi, 0, dump_flags); 844 fputc ('\n', dump_file); 845 } 846 va_list_escapes = true; 847 } 848 } 849 } 850 851 for (unsigned j = 0; !va_list_escapes 852 && j < gimple_phi_num_args (phi); ++j) 853 if ((!va_list_simple_ptr 854 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME) 855 && walk_tree (gimple_phi_arg_def_ptr (phi, j), 856 find_va_list_reference, &wi, NULL)) 857 { 858 if (dump_file && (dump_flags & TDF_DETAILS)) 859 { 860 fputs ("va_list escapes in ", dump_file); 861 print_gimple_stmt (dump_file, phi, 0, dump_flags); 862 fputc ('\n', dump_file); 863 } 864 va_list_escapes = true; 865 } 866 } 867 868 for (i = gsi_start_bb (bb); 869 !gsi_end_p (i) && !va_list_escapes; 870 gsi_next (&i)) 871 { 872 gimple stmt = gsi_stmt (i); 873 874 /* Don't look at __builtin_va_{start,end}, they are ok. */ 875 if (is_gimple_call (stmt)) 876 { 877 tree callee = gimple_call_fndecl (stmt); 878 879 if (callee 880 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL 881 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START 882 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END)) 883 continue; 884 } 885 886 if (is_gimple_assign (stmt)) 887 { 888 lhs = gimple_assign_lhs (stmt); 889 rhs = gimple_assign_rhs1 (stmt); 890 891 if (va_list_simple_ptr) 892 { 893 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) 894 == GIMPLE_SINGLE_RHS) 895 { 896 /* Check for ap ={v} {}. */ 897 if (TREE_CLOBBER_P (rhs)) 898 continue; 899 900 /* Check for tem = ap. */ 901 else if (va_list_ptr_read (&si, rhs, lhs)) 902 continue; 903 904 /* Check for the last insn in: 905 tem1 = ap; 906 tem2 = tem1 + CST; 907 ap = tem2; 908 sequence. */ 909 else if (va_list_ptr_write (&si, lhs, rhs)) 910 continue; 911 } 912 913 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR 914 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST) 915 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)) 916 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) 917 == GIMPLE_SINGLE_RHS)) 918 check_va_list_escapes (&si, lhs, rhs); 919 } 920 else 921 { 922 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) 923 == GIMPLE_SINGLE_RHS) 924 { 925 /* Check for ap ={v} {}. */ 926 if (TREE_CLOBBER_P (rhs)) 927 continue; 928 929 /* Check for ap[0].field = temp. */ 930 else if (va_list_counter_struct_op (&si, lhs, rhs, true)) 931 continue; 932 933 /* Check for temp = ap[0].field. */ 934 else if (va_list_counter_struct_op (&si, rhs, lhs, 935 false)) 936 continue; 937 } 938 939 /* Do any architecture specific checking. */ 940 if (targetm.stdarg_optimize_hook 941 && targetm.stdarg_optimize_hook (&si, stmt)) 942 continue; 943 } 944 } 945 else if (is_gimple_debug (stmt)) 946 continue; 947 948 /* All other uses of va_list are either va_copy (that is not handled 949 in this optimization), taking address of va_list variable or 950 passing va_list to other functions (in that case va_list might 951 escape the function and therefore va_start needs to set it up 952 fully), or some unexpected use of va_list. None of these should 953 happen in a gimplified VA_ARG_EXPR. */ 954 if (si.va_list_escapes 955 || walk_gimple_op (stmt, find_va_list_reference, &wi)) 956 { 957 if (dump_file && (dump_flags & TDF_DETAILS)) 958 { 959 fputs ("va_list escapes in ", dump_file); 960 print_gimple_stmt (dump_file, stmt, 0, dump_flags); 961 fputc ('\n', dump_file); 962 } 963 va_list_escapes = true; 964 } 965 } 966 967 if (va_list_escapes) 968 break; 969 } 970 971 if (! va_list_escapes 972 && va_list_simple_ptr 973 && ! bitmap_empty_p (si.va_list_escape_vars) 974 && check_all_va_list_escapes (&si)) 975 va_list_escapes = true; 976 977 finish: 978 if (va_list_escapes) 979 { 980 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; 981 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; 982 } 983 BITMAP_FREE (si.va_list_vars); 984 BITMAP_FREE (si.va_list_escape_vars); 985 free (si.offsets); 986 if (dump_file) 987 { 988 fprintf (dump_file, "%s: va_list escapes %d, needs to save ", 989 funcname, (int) va_list_escapes); 990 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE) 991 fputs ("all", dump_file); 992 else 993 fprintf (dump_file, "%d", cfun->va_list_gpr_size); 994 fputs (" GPR units and ", dump_file); 995 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE) 996 fputs ("all", dump_file); 997 else 998 fprintf (dump_file, "%d", cfun->va_list_fpr_size); 999 fputs (" FPR units.\n", dump_file); 1000 } 1001 return 0; 1002 } 1003 1004 1005 struct gimple_opt_pass pass_stdarg = 1006 { 1007 { 1008 GIMPLE_PASS, 1009 "stdarg", /* name */ 1010 OPTGROUP_NONE, /* optinfo_flags */ 1011 gate_optimize_stdarg, /* gate */ 1012 execute_optimize_stdarg, /* execute */ 1013 NULL, /* sub */ 1014 NULL, /* next */ 1015 0, /* static_pass_number */ 1016 TV_NONE, /* tv_id */ 1017 PROP_cfg | PROP_ssa, /* properties_required */ 1018 0, /* properties_provided */ 1019 0, /* properties_destroyed */ 1020 0, /* todo_flags_start */ 1021 0 /* todo_flags_finish */ 1022 } 1023 }; 1024