1 /* Lower complex number operations to scalar operations. 2 Copyright (C) 2004-2013 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it 7 under the terms of the GNU General Public License as published by the 8 Free Software Foundation; either version 3, or (at your option) any 9 later version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT 12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "tm.h" 24 #include "tree.h" 25 #include "flags.h" 26 #include "tree-flow.h" 27 #include "gimple.h" 28 #include "tree-iterator.h" 29 #include "tree-pass.h" 30 #include "tree-ssa-propagate.h" 31 32 33 /* For each complex ssa name, a lattice value. We're interested in finding 34 out whether a complex number is degenerate in some way, having only real 35 or only complex parts. */ 36 37 enum 38 { 39 UNINITIALIZED = 0, 40 ONLY_REAL = 1, 41 ONLY_IMAG = 2, 42 VARYING = 3 43 }; 44 45 /* The type complex_lattice_t holds combinations of the above 46 constants. */ 47 typedef int complex_lattice_t; 48 49 #define PAIR(a, b) ((a) << 2 | (b)) 50 51 52 static vec<complex_lattice_t> complex_lattice_values; 53 54 /* For each complex variable, a pair of variables for the components exists in 55 the hashtable. */ 56 static htab_t complex_variable_components; 57 58 /* For each complex SSA_NAME, a pair of ssa names for the components. */ 59 static vec<tree> complex_ssa_name_components; 60 61 /* Lookup UID in the complex_variable_components hashtable and return the 62 associated tree. */ 63 static tree 64 cvc_lookup (unsigned int uid) 65 { 66 struct int_tree_map *h, in; 67 in.uid = uid; 68 h = (struct int_tree_map *) htab_find_with_hash (complex_variable_components, &in, uid); 69 return h ? h->to : NULL; 70 } 71 72 /* Insert the pair UID, TO into the complex_variable_components hashtable. */ 73 74 static void 75 cvc_insert (unsigned int uid, tree to) 76 { 77 struct int_tree_map *h; 78 void **loc; 79 80 h = XNEW (struct int_tree_map); 81 h->uid = uid; 82 h->to = to; 83 loc = htab_find_slot_with_hash (complex_variable_components, h, 84 uid, INSERT); 85 *(struct int_tree_map **) loc = h; 86 } 87 88 /* Return true if T is not a zero constant. In the case of real values, 89 we're only interested in +0.0. */ 90 91 static int 92 some_nonzerop (tree t) 93 { 94 int zerop = false; 95 96 /* Operations with real or imaginary part of a complex number zero 97 cannot be treated the same as operations with a real or imaginary 98 operand if we care about the signs of zeros in the result. */ 99 if (TREE_CODE (t) == REAL_CST && !flag_signed_zeros) 100 zerop = REAL_VALUES_IDENTICAL (TREE_REAL_CST (t), dconst0); 101 else if (TREE_CODE (t) == FIXED_CST) 102 zerop = fixed_zerop (t); 103 else if (TREE_CODE (t) == INTEGER_CST) 104 zerop = integer_zerop (t); 105 106 return !zerop; 107 } 108 109 110 /* Compute a lattice value from the components of a complex type REAL 111 and IMAG. */ 112 113 static complex_lattice_t 114 find_lattice_value_parts (tree real, tree imag) 115 { 116 int r, i; 117 complex_lattice_t ret; 118 119 r = some_nonzerop (real); 120 i = some_nonzerop (imag); 121 ret = r * ONLY_REAL + i * ONLY_IMAG; 122 123 /* ??? On occasion we could do better than mapping 0+0i to real, but we 124 certainly don't want to leave it UNINITIALIZED, which eventually gets 125 mapped to VARYING. */ 126 if (ret == UNINITIALIZED) 127 ret = ONLY_REAL; 128 129 return ret; 130 } 131 132 133 /* Compute a lattice value from gimple_val T. */ 134 135 static complex_lattice_t 136 find_lattice_value (tree t) 137 { 138 tree real, imag; 139 140 switch (TREE_CODE (t)) 141 { 142 case SSA_NAME: 143 return complex_lattice_values[SSA_NAME_VERSION (t)]; 144 145 case COMPLEX_CST: 146 real = TREE_REALPART (t); 147 imag = TREE_IMAGPART (t); 148 break; 149 150 default: 151 gcc_unreachable (); 152 } 153 154 return find_lattice_value_parts (real, imag); 155 } 156 157 /* Determine if LHS is something for which we're interested in seeing 158 simulation results. */ 159 160 static bool 161 is_complex_reg (tree lhs) 162 { 163 return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs); 164 } 165 166 /* Mark the incoming parameters to the function as VARYING. */ 167 168 static void 169 init_parameter_lattice_values (void) 170 { 171 tree parm, ssa_name; 172 173 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm)) 174 if (is_complex_reg (parm) 175 && (ssa_name = ssa_default_def (cfun, parm)) != NULL_TREE) 176 complex_lattice_values[SSA_NAME_VERSION (ssa_name)] = VARYING; 177 } 178 179 /* Initialize simulation state for each statement. Return false if we 180 found no statements we want to simulate, and thus there's nothing 181 for the entire pass to do. */ 182 183 static bool 184 init_dont_simulate_again (void) 185 { 186 basic_block bb; 187 gimple_stmt_iterator gsi; 188 gimple phi; 189 bool saw_a_complex_op = false; 190 191 FOR_EACH_BB (bb) 192 { 193 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 194 { 195 phi = gsi_stmt (gsi); 196 prop_set_simulate_again (phi, 197 is_complex_reg (gimple_phi_result (phi))); 198 } 199 200 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 201 { 202 gimple stmt; 203 tree op0, op1; 204 bool sim_again_p; 205 206 stmt = gsi_stmt (gsi); 207 op0 = op1 = NULL_TREE; 208 209 /* Most control-altering statements must be initially 210 simulated, else we won't cover the entire cfg. */ 211 sim_again_p = stmt_ends_bb_p (stmt); 212 213 switch (gimple_code (stmt)) 214 { 215 case GIMPLE_CALL: 216 if (gimple_call_lhs (stmt)) 217 sim_again_p = is_complex_reg (gimple_call_lhs (stmt)); 218 break; 219 220 case GIMPLE_ASSIGN: 221 sim_again_p = is_complex_reg (gimple_assign_lhs (stmt)); 222 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR 223 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR) 224 op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); 225 else 226 op0 = gimple_assign_rhs1 (stmt); 227 if (gimple_num_ops (stmt) > 2) 228 op1 = gimple_assign_rhs2 (stmt); 229 break; 230 231 case GIMPLE_COND: 232 op0 = gimple_cond_lhs (stmt); 233 op1 = gimple_cond_rhs (stmt); 234 break; 235 236 default: 237 break; 238 } 239 240 if (op0 || op1) 241 switch (gimple_expr_code (stmt)) 242 { 243 case EQ_EXPR: 244 case NE_EXPR: 245 case PLUS_EXPR: 246 case MINUS_EXPR: 247 case MULT_EXPR: 248 case TRUNC_DIV_EXPR: 249 case CEIL_DIV_EXPR: 250 case FLOOR_DIV_EXPR: 251 case ROUND_DIV_EXPR: 252 case RDIV_EXPR: 253 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE 254 || TREE_CODE (TREE_TYPE (op1)) == COMPLEX_TYPE) 255 saw_a_complex_op = true; 256 break; 257 258 case NEGATE_EXPR: 259 case CONJ_EXPR: 260 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE) 261 saw_a_complex_op = true; 262 break; 263 264 case REALPART_EXPR: 265 case IMAGPART_EXPR: 266 /* The total store transformation performed during 267 gimplification creates such uninitialized loads 268 and we need to lower the statement to be able 269 to fix things up. */ 270 if (TREE_CODE (op0) == SSA_NAME 271 && ssa_undefined_value_p (op0)) 272 saw_a_complex_op = true; 273 break; 274 275 default: 276 break; 277 } 278 279 prop_set_simulate_again (stmt, sim_again_p); 280 } 281 } 282 283 return saw_a_complex_op; 284 } 285 286 287 /* Evaluate statement STMT against the complex lattice defined above. */ 288 289 static enum ssa_prop_result 290 complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED, 291 tree *result_p) 292 { 293 complex_lattice_t new_l, old_l, op1_l, op2_l; 294 unsigned int ver; 295 tree lhs; 296 297 lhs = gimple_get_lhs (stmt); 298 /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */ 299 if (!lhs) 300 return SSA_PROP_VARYING; 301 302 /* These conditions should be satisfied due to the initial filter 303 set up in init_dont_simulate_again. */ 304 gcc_assert (TREE_CODE (lhs) == SSA_NAME); 305 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE); 306 307 *result_p = lhs; 308 ver = SSA_NAME_VERSION (lhs); 309 old_l = complex_lattice_values[ver]; 310 311 switch (gimple_expr_code (stmt)) 312 { 313 case SSA_NAME: 314 case COMPLEX_CST: 315 new_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 316 break; 317 318 case COMPLEX_EXPR: 319 new_l = find_lattice_value_parts (gimple_assign_rhs1 (stmt), 320 gimple_assign_rhs2 (stmt)); 321 break; 322 323 case PLUS_EXPR: 324 case MINUS_EXPR: 325 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 326 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt)); 327 328 /* We've set up the lattice values such that IOR neatly 329 models addition. */ 330 new_l = op1_l | op2_l; 331 break; 332 333 case MULT_EXPR: 334 case RDIV_EXPR: 335 case TRUNC_DIV_EXPR: 336 case CEIL_DIV_EXPR: 337 case FLOOR_DIV_EXPR: 338 case ROUND_DIV_EXPR: 339 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 340 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt)); 341 342 /* Obviously, if either varies, so does the result. */ 343 if (op1_l == VARYING || op2_l == VARYING) 344 new_l = VARYING; 345 /* Don't prematurely promote variables if we've not yet seen 346 their inputs. */ 347 else if (op1_l == UNINITIALIZED) 348 new_l = op2_l; 349 else if (op2_l == UNINITIALIZED) 350 new_l = op1_l; 351 else 352 { 353 /* At this point both numbers have only one component. If the 354 numbers are of opposite kind, the result is imaginary, 355 otherwise the result is real. The add/subtract translates 356 the real/imag from/to 0/1; the ^ performs the comparison. */ 357 new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL; 358 359 /* Don't allow the lattice value to flip-flop indefinitely. */ 360 new_l |= old_l; 361 } 362 break; 363 364 case NEGATE_EXPR: 365 case CONJ_EXPR: 366 new_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 367 break; 368 369 default: 370 new_l = VARYING; 371 break; 372 } 373 374 /* If nothing changed this round, let the propagator know. */ 375 if (new_l == old_l) 376 return SSA_PROP_NOT_INTERESTING; 377 378 complex_lattice_values[ver] = new_l; 379 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; 380 } 381 382 /* Evaluate a PHI node against the complex lattice defined above. */ 383 384 static enum ssa_prop_result 385 complex_visit_phi (gimple phi) 386 { 387 complex_lattice_t new_l, old_l; 388 unsigned int ver; 389 tree lhs; 390 int i; 391 392 lhs = gimple_phi_result (phi); 393 394 /* This condition should be satisfied due to the initial filter 395 set up in init_dont_simulate_again. */ 396 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE); 397 398 /* We've set up the lattice values such that IOR neatly models PHI meet. */ 399 new_l = UNINITIALIZED; 400 for (i = gimple_phi_num_args (phi) - 1; i >= 0; --i) 401 new_l |= find_lattice_value (gimple_phi_arg_def (phi, i)); 402 403 ver = SSA_NAME_VERSION (lhs); 404 old_l = complex_lattice_values[ver]; 405 406 if (new_l == old_l) 407 return SSA_PROP_NOT_INTERESTING; 408 409 complex_lattice_values[ver] = new_l; 410 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; 411 } 412 413 /* Create one backing variable for a complex component of ORIG. */ 414 415 static tree 416 create_one_component_var (tree type, tree orig, const char *prefix, 417 const char *suffix, enum tree_code code) 418 { 419 tree r = create_tmp_var (type, prefix); 420 421 DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (orig); 422 DECL_ARTIFICIAL (r) = 1; 423 424 if (DECL_NAME (orig) && !DECL_IGNORED_P (orig)) 425 { 426 const char *name = IDENTIFIER_POINTER (DECL_NAME (orig)); 427 428 DECL_NAME (r) = get_identifier (ACONCAT ((name, suffix, NULL))); 429 430 SET_DECL_DEBUG_EXPR (r, build1 (code, type, orig)); 431 DECL_DEBUG_EXPR_IS_FROM (r) = 1; 432 DECL_IGNORED_P (r) = 0; 433 TREE_NO_WARNING (r) = TREE_NO_WARNING (orig); 434 } 435 else 436 { 437 DECL_IGNORED_P (r) = 1; 438 TREE_NO_WARNING (r) = 1; 439 } 440 441 return r; 442 } 443 444 /* Retrieve a value for a complex component of VAR. */ 445 446 static tree 447 get_component_var (tree var, bool imag_p) 448 { 449 size_t decl_index = DECL_UID (var) * 2 + imag_p; 450 tree ret = cvc_lookup (decl_index); 451 452 if (ret == NULL) 453 { 454 ret = create_one_component_var (TREE_TYPE (TREE_TYPE (var)), var, 455 imag_p ? "CI" : "CR", 456 imag_p ? "$imag" : "$real", 457 imag_p ? IMAGPART_EXPR : REALPART_EXPR); 458 cvc_insert (decl_index, ret); 459 } 460 461 return ret; 462 } 463 464 /* Retrieve a value for a complex component of SSA_NAME. */ 465 466 static tree 467 get_component_ssa_name (tree ssa_name, bool imag_p) 468 { 469 complex_lattice_t lattice = find_lattice_value (ssa_name); 470 size_t ssa_name_index; 471 tree ret; 472 473 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG)) 474 { 475 tree inner_type = TREE_TYPE (TREE_TYPE (ssa_name)); 476 if (SCALAR_FLOAT_TYPE_P (inner_type)) 477 return build_real (inner_type, dconst0); 478 else 479 return build_int_cst (inner_type, 0); 480 } 481 482 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p; 483 ret = complex_ssa_name_components[ssa_name_index]; 484 if (ret == NULL) 485 { 486 if (SSA_NAME_VAR (ssa_name)) 487 ret = get_component_var (SSA_NAME_VAR (ssa_name), imag_p); 488 else 489 ret = TREE_TYPE (TREE_TYPE (ssa_name)); 490 ret = make_ssa_name (ret, NULL); 491 492 /* Copy some properties from the original. In particular, whether it 493 is used in an abnormal phi, and whether it's uninitialized. */ 494 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret) 495 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name); 496 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name) 497 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL) 498 { 499 SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name); 500 set_ssa_default_def (cfun, SSA_NAME_VAR (ret), ret); 501 } 502 503 complex_ssa_name_components[ssa_name_index] = ret; 504 } 505 506 return ret; 507 } 508 509 /* Set a value for a complex component of SSA_NAME, return a 510 gimple_seq of stuff that needs doing. */ 511 512 static gimple_seq 513 set_component_ssa_name (tree ssa_name, bool imag_p, tree value) 514 { 515 complex_lattice_t lattice = find_lattice_value (ssa_name); 516 size_t ssa_name_index; 517 tree comp; 518 gimple last; 519 gimple_seq list; 520 521 /* We know the value must be zero, else there's a bug in our lattice 522 analysis. But the value may well be a variable known to contain 523 zero. We should be safe ignoring it. */ 524 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG)) 525 return NULL; 526 527 /* If we've already assigned an SSA_NAME to this component, then this 528 means that our walk of the basic blocks found a use before the set. 529 This is fine. Now we should create an initialization for the value 530 we created earlier. */ 531 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p; 532 comp = complex_ssa_name_components[ssa_name_index]; 533 if (comp) 534 ; 535 536 /* If we've nothing assigned, and the value we're given is already stable, 537 then install that as the value for this SSA_NAME. This preemptively 538 copy-propagates the value, which avoids unnecessary memory allocation. */ 539 else if (is_gimple_min_invariant (value) 540 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) 541 { 542 complex_ssa_name_components[ssa_name_index] = value; 543 return NULL; 544 } 545 else if (TREE_CODE (value) == SSA_NAME 546 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) 547 { 548 /* Replace an anonymous base value with the variable from cvc_lookup. 549 This should result in better debug info. */ 550 if (SSA_NAME_VAR (ssa_name) 551 && (!SSA_NAME_VAR (value) || DECL_IGNORED_P (SSA_NAME_VAR (value))) 552 && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name))) 553 { 554 comp = get_component_var (SSA_NAME_VAR (ssa_name), imag_p); 555 replace_ssa_name_symbol (value, comp); 556 } 557 558 complex_ssa_name_components[ssa_name_index] = value; 559 return NULL; 560 } 561 562 /* Finally, we need to stabilize the result by installing the value into 563 a new ssa name. */ 564 else 565 comp = get_component_ssa_name (ssa_name, imag_p); 566 567 /* Do all the work to assign VALUE to COMP. */ 568 list = NULL; 569 value = force_gimple_operand (value, &list, false, NULL); 570 last = gimple_build_assign (comp, value); 571 gimple_seq_add_stmt (&list, last); 572 gcc_assert (SSA_NAME_DEF_STMT (comp) == last); 573 574 return list; 575 } 576 577 /* Extract the real or imaginary part of a complex variable or constant. 578 Make sure that it's a proper gimple_val and gimplify it if not. 579 Emit any new code before gsi. */ 580 581 static tree 582 extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p, 583 bool gimple_p) 584 { 585 switch (TREE_CODE (t)) 586 { 587 case COMPLEX_CST: 588 return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t); 589 590 case COMPLEX_EXPR: 591 gcc_unreachable (); 592 593 case VAR_DECL: 594 case RESULT_DECL: 595 case PARM_DECL: 596 case COMPONENT_REF: 597 case ARRAY_REF: 598 case VIEW_CONVERT_EXPR: 599 case MEM_REF: 600 { 601 tree inner_type = TREE_TYPE (TREE_TYPE (t)); 602 603 t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR), 604 inner_type, unshare_expr (t)); 605 606 if (gimple_p) 607 t = force_gimple_operand_gsi (gsi, t, true, NULL, true, 608 GSI_SAME_STMT); 609 610 return t; 611 } 612 613 case SSA_NAME: 614 return get_component_ssa_name (t, imagpart_p); 615 616 default: 617 gcc_unreachable (); 618 } 619 } 620 621 /* Update the complex components of the ssa name on the lhs of STMT. */ 622 623 static void 624 update_complex_components (gimple_stmt_iterator *gsi, gimple stmt, tree r, 625 tree i) 626 { 627 tree lhs; 628 gimple_seq list; 629 630 lhs = gimple_get_lhs (stmt); 631 632 list = set_component_ssa_name (lhs, false, r); 633 if (list) 634 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); 635 636 list = set_component_ssa_name (lhs, true, i); 637 if (list) 638 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); 639 } 640 641 static void 642 update_complex_components_on_edge (edge e, tree lhs, tree r, tree i) 643 { 644 gimple_seq list; 645 646 list = set_component_ssa_name (lhs, false, r); 647 if (list) 648 gsi_insert_seq_on_edge (e, list); 649 650 list = set_component_ssa_name (lhs, true, i); 651 if (list) 652 gsi_insert_seq_on_edge (e, list); 653 } 654 655 656 /* Update an assignment to a complex variable in place. */ 657 658 static void 659 update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i) 660 { 661 gimple stmt; 662 663 gimple_assign_set_rhs_with_ops (gsi, COMPLEX_EXPR, r, i); 664 stmt = gsi_stmt (*gsi); 665 update_stmt (stmt); 666 if (maybe_clean_eh_stmt (stmt)) 667 gimple_purge_dead_eh_edges (gimple_bb (stmt)); 668 669 if (gimple_in_ssa_p (cfun)) 670 update_complex_components (gsi, gsi_stmt (*gsi), r, i); 671 } 672 673 674 /* Generate code at the entry point of the function to initialize the 675 component variables for a complex parameter. */ 676 677 static void 678 update_parameter_components (void) 679 { 680 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR); 681 tree parm; 682 683 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm)) 684 { 685 tree type = TREE_TYPE (parm); 686 tree ssa_name, r, i; 687 688 if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm)) 689 continue; 690 691 type = TREE_TYPE (type); 692 ssa_name = ssa_default_def (cfun, parm); 693 if (!ssa_name) 694 continue; 695 696 r = build1 (REALPART_EXPR, type, ssa_name); 697 i = build1 (IMAGPART_EXPR, type, ssa_name); 698 update_complex_components_on_edge (entry_edge, ssa_name, r, i); 699 } 700 } 701 702 /* Generate code to set the component variables of a complex variable 703 to match the PHI statements in block BB. */ 704 705 static void 706 update_phi_components (basic_block bb) 707 { 708 gimple_stmt_iterator gsi; 709 710 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 711 { 712 gimple phi = gsi_stmt (gsi); 713 714 if (is_complex_reg (gimple_phi_result (phi))) 715 { 716 tree lr, li; 717 gimple pr = NULL, pi = NULL; 718 unsigned int i, n; 719 720 lr = get_component_ssa_name (gimple_phi_result (phi), false); 721 if (TREE_CODE (lr) == SSA_NAME) 722 pr = create_phi_node (lr, bb); 723 724 li = get_component_ssa_name (gimple_phi_result (phi), true); 725 if (TREE_CODE (li) == SSA_NAME) 726 pi = create_phi_node (li, bb); 727 728 for (i = 0, n = gimple_phi_num_args (phi); i < n; ++i) 729 { 730 tree comp, arg = gimple_phi_arg_def (phi, i); 731 if (pr) 732 { 733 comp = extract_component (NULL, arg, false, false); 734 SET_PHI_ARG_DEF (pr, i, comp); 735 } 736 if (pi) 737 { 738 comp = extract_component (NULL, arg, true, false); 739 SET_PHI_ARG_DEF (pi, i, comp); 740 } 741 } 742 } 743 } 744 } 745 746 /* Expand a complex move to scalars. */ 747 748 static void 749 expand_complex_move (gimple_stmt_iterator *gsi, tree type) 750 { 751 tree inner_type = TREE_TYPE (type); 752 tree r, i, lhs, rhs; 753 gimple stmt = gsi_stmt (*gsi); 754 755 if (is_gimple_assign (stmt)) 756 { 757 lhs = gimple_assign_lhs (stmt); 758 if (gimple_num_ops (stmt) == 2) 759 rhs = gimple_assign_rhs1 (stmt); 760 else 761 rhs = NULL_TREE; 762 } 763 else if (is_gimple_call (stmt)) 764 { 765 lhs = gimple_call_lhs (stmt); 766 rhs = NULL_TREE; 767 } 768 else 769 gcc_unreachable (); 770 771 if (TREE_CODE (lhs) == SSA_NAME) 772 { 773 if (is_ctrl_altering_stmt (stmt)) 774 { 775 edge e; 776 777 /* The value is not assigned on the exception edges, so we need not 778 concern ourselves there. We do need to update on the fallthru 779 edge. Find it. */ 780 e = find_fallthru_edge (gsi_bb (*gsi)->succs); 781 if (!e) 782 gcc_unreachable (); 783 784 r = build1 (REALPART_EXPR, inner_type, lhs); 785 i = build1 (IMAGPART_EXPR, inner_type, lhs); 786 update_complex_components_on_edge (e, lhs, r, i); 787 } 788 else if (is_gimple_call (stmt) 789 || gimple_has_side_effects (stmt) 790 || gimple_assign_rhs_code (stmt) == PAREN_EXPR) 791 { 792 r = build1 (REALPART_EXPR, inner_type, lhs); 793 i = build1 (IMAGPART_EXPR, inner_type, lhs); 794 update_complex_components (gsi, stmt, r, i); 795 } 796 else 797 { 798 if (gimple_assign_rhs_code (stmt) != COMPLEX_EXPR) 799 { 800 r = extract_component (gsi, rhs, 0, true); 801 i = extract_component (gsi, rhs, 1, true); 802 } 803 else 804 { 805 r = gimple_assign_rhs1 (stmt); 806 i = gimple_assign_rhs2 (stmt); 807 } 808 update_complex_assignment (gsi, r, i); 809 } 810 } 811 else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs)) 812 { 813 tree x; 814 gimple t; 815 816 r = extract_component (gsi, rhs, 0, false); 817 i = extract_component (gsi, rhs, 1, false); 818 819 x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs)); 820 t = gimple_build_assign (x, r); 821 gsi_insert_before (gsi, t, GSI_SAME_STMT); 822 823 if (stmt == gsi_stmt (*gsi)) 824 { 825 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs)); 826 gimple_assign_set_lhs (stmt, x); 827 gimple_assign_set_rhs1 (stmt, i); 828 } 829 else 830 { 831 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs)); 832 t = gimple_build_assign (x, i); 833 gsi_insert_before (gsi, t, GSI_SAME_STMT); 834 835 stmt = gsi_stmt (*gsi); 836 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN); 837 gimple_return_set_retval (stmt, lhs); 838 } 839 840 update_stmt (stmt); 841 } 842 } 843 844 /* Expand complex addition to scalars: 845 a + b = (ar + br) + i(ai + bi) 846 a - b = (ar - br) + i(ai + bi) 847 */ 848 849 static void 850 expand_complex_addition (gimple_stmt_iterator *gsi, tree inner_type, 851 tree ar, tree ai, tree br, tree bi, 852 enum tree_code code, 853 complex_lattice_t al, complex_lattice_t bl) 854 { 855 tree rr, ri; 856 857 switch (PAIR (al, bl)) 858 { 859 case PAIR (ONLY_REAL, ONLY_REAL): 860 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 861 ri = ai; 862 break; 863 864 case PAIR (ONLY_REAL, ONLY_IMAG): 865 rr = ar; 866 if (code == MINUS_EXPR) 867 ri = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, bi); 868 else 869 ri = bi; 870 break; 871 872 case PAIR (ONLY_IMAG, ONLY_REAL): 873 if (code == MINUS_EXPR) 874 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ar, br); 875 else 876 rr = br; 877 ri = ai; 878 break; 879 880 case PAIR (ONLY_IMAG, ONLY_IMAG): 881 rr = ar; 882 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 883 break; 884 885 case PAIR (VARYING, ONLY_REAL): 886 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 887 ri = ai; 888 break; 889 890 case PAIR (VARYING, ONLY_IMAG): 891 rr = ar; 892 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 893 break; 894 895 case PAIR (ONLY_REAL, VARYING): 896 if (code == MINUS_EXPR) 897 goto general; 898 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 899 ri = bi; 900 break; 901 902 case PAIR (ONLY_IMAG, VARYING): 903 if (code == MINUS_EXPR) 904 goto general; 905 rr = br; 906 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 907 break; 908 909 case PAIR (VARYING, VARYING): 910 general: 911 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 912 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 913 break; 914 915 default: 916 gcc_unreachable (); 917 } 918 919 update_complex_assignment (gsi, rr, ri); 920 } 921 922 /* Expand a complex multiplication or division to a libcall to the c99 923 compliant routines. */ 924 925 static void 926 expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai, 927 tree br, tree bi, enum tree_code code) 928 { 929 enum machine_mode mode; 930 enum built_in_function bcode; 931 tree fn, type, lhs; 932 gimple old_stmt, stmt; 933 934 old_stmt = gsi_stmt (*gsi); 935 lhs = gimple_assign_lhs (old_stmt); 936 type = TREE_TYPE (lhs); 937 938 mode = TYPE_MODE (type); 939 gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT); 940 941 if (code == MULT_EXPR) 942 bcode = ((enum built_in_function) 943 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 944 else if (code == RDIV_EXPR) 945 bcode = ((enum built_in_function) 946 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 947 else 948 gcc_unreachable (); 949 fn = builtin_decl_explicit (bcode); 950 951 stmt = gimple_build_call (fn, 4, ar, ai, br, bi); 952 gimple_call_set_lhs (stmt, lhs); 953 update_stmt (stmt); 954 gsi_replace (gsi, stmt, false); 955 956 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)) 957 gimple_purge_dead_eh_edges (gsi_bb (*gsi)); 958 959 if (gimple_in_ssa_p (cfun)) 960 { 961 type = TREE_TYPE (type); 962 update_complex_components (gsi, stmt, 963 build1 (REALPART_EXPR, type, lhs), 964 build1 (IMAGPART_EXPR, type, lhs)); 965 SSA_NAME_DEF_STMT (lhs) = stmt; 966 } 967 } 968 969 /* Expand complex multiplication to scalars: 970 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai) 971 */ 972 973 static void 974 expand_complex_multiplication (gimple_stmt_iterator *gsi, tree inner_type, 975 tree ar, tree ai, tree br, tree bi, 976 complex_lattice_t al, complex_lattice_t bl) 977 { 978 tree rr, ri; 979 980 if (al < bl) 981 { 982 complex_lattice_t tl; 983 rr = ar, ar = br, br = rr; 984 ri = ai, ai = bi, bi = ri; 985 tl = al, al = bl, bl = tl; 986 } 987 988 switch (PAIR (al, bl)) 989 { 990 case PAIR (ONLY_REAL, ONLY_REAL): 991 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 992 ri = ai; 993 break; 994 995 case PAIR (ONLY_IMAG, ONLY_REAL): 996 rr = ar; 997 if (TREE_CODE (ai) == REAL_CST 998 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst1)) 999 ri = br; 1000 else 1001 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1002 break; 1003 1004 case PAIR (ONLY_IMAG, ONLY_IMAG): 1005 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1006 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr); 1007 ri = ar; 1008 break; 1009 1010 case PAIR (VARYING, ONLY_REAL): 1011 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1012 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1013 break; 1014 1015 case PAIR (VARYING, ONLY_IMAG): 1016 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1017 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr); 1018 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi); 1019 break; 1020 1021 case PAIR (VARYING, VARYING): 1022 if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type)) 1023 { 1024 expand_complex_libcall (gsi, ar, ai, br, bi, MULT_EXPR); 1025 return; 1026 } 1027 else 1028 { 1029 tree t1, t2, t3, t4; 1030 1031 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1032 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1033 t3 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi); 1034 1035 /* Avoid expanding redundant multiplication for the common 1036 case of squaring a complex number. */ 1037 if (ar == br && ai == bi) 1038 t4 = t3; 1039 else 1040 t4 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1041 1042 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2); 1043 ri = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t3, t4); 1044 } 1045 break; 1046 1047 default: 1048 gcc_unreachable (); 1049 } 1050 1051 update_complex_assignment (gsi, rr, ri); 1052 } 1053 1054 /* Keep this algorithm in sync with fold-const.c:const_binop(). 1055 1056 Expand complex division to scalars, straightforward algorithm. 1057 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t) 1058 t = br*br + bi*bi 1059 */ 1060 1061 static void 1062 expand_complex_div_straight (gimple_stmt_iterator *gsi, tree inner_type, 1063 tree ar, tree ai, tree br, tree bi, 1064 enum tree_code code) 1065 { 1066 tree rr, ri, div, t1, t2, t3; 1067 1068 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, br); 1069 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, bi); 1070 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2); 1071 1072 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1073 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1074 t3 = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2); 1075 rr = gimplify_build2 (gsi, code, inner_type, t3, div); 1076 1077 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1078 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi); 1079 t3 = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2); 1080 ri = gimplify_build2 (gsi, code, inner_type, t3, div); 1081 1082 update_complex_assignment (gsi, rr, ri); 1083 } 1084 1085 /* Keep this algorithm in sync with fold-const.c:const_binop(). 1086 1087 Expand complex division to scalars, modified algorithm to minimize 1088 overflow with wide input ranges. */ 1089 1090 static void 1091 expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type, 1092 tree ar, tree ai, tree br, tree bi, 1093 enum tree_code code) 1094 { 1095 tree rr, ri, ratio, div, t1, t2, tr, ti, compare; 1096 basic_block bb_cond, bb_true, bb_false, bb_join; 1097 gimple stmt; 1098 1099 /* Examine |br| < |bi|, and branch. */ 1100 t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br); 1101 t2 = gimplify_build1 (gsi, ABS_EXPR, inner_type, bi); 1102 compare = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), 1103 LT_EXPR, boolean_type_node, t1, t2); 1104 STRIP_NOPS (compare); 1105 1106 bb_cond = bb_true = bb_false = bb_join = NULL; 1107 rr = ri = tr = ti = NULL; 1108 if (TREE_CODE (compare) != INTEGER_CST) 1109 { 1110 edge e; 1111 gimple stmt; 1112 tree cond, tmp; 1113 1114 tmp = create_tmp_var (boolean_type_node, NULL); 1115 stmt = gimple_build_assign (tmp, compare); 1116 if (gimple_in_ssa_p (cfun)) 1117 { 1118 tmp = make_ssa_name (tmp, stmt); 1119 gimple_assign_set_lhs (stmt, tmp); 1120 } 1121 1122 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1123 1124 cond = fold_build2_loc (gimple_location (stmt), 1125 EQ_EXPR, boolean_type_node, tmp, boolean_true_node); 1126 stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE); 1127 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1128 1129 /* Split the original block, and create the TRUE and FALSE blocks. */ 1130 e = split_block (gsi_bb (*gsi), stmt); 1131 bb_cond = e->src; 1132 bb_join = e->dest; 1133 bb_true = create_empty_bb (bb_cond); 1134 bb_false = create_empty_bb (bb_true); 1135 1136 /* Wire the blocks together. */ 1137 e->flags = EDGE_TRUE_VALUE; 1138 redirect_edge_succ (e, bb_true); 1139 make_edge (bb_cond, bb_false, EDGE_FALSE_VALUE); 1140 make_edge (bb_true, bb_join, EDGE_FALLTHRU); 1141 make_edge (bb_false, bb_join, EDGE_FALLTHRU); 1142 1143 /* Update dominance info. Note that bb_join's data was 1144 updated by split_block. */ 1145 if (dom_info_available_p (CDI_DOMINATORS)) 1146 { 1147 set_immediate_dominator (CDI_DOMINATORS, bb_true, bb_cond); 1148 set_immediate_dominator (CDI_DOMINATORS, bb_false, bb_cond); 1149 } 1150 1151 rr = create_tmp_reg (inner_type, NULL); 1152 ri = create_tmp_reg (inner_type, NULL); 1153 } 1154 1155 /* In the TRUE branch, we compute 1156 ratio = br/bi; 1157 div = (br * ratio) + bi; 1158 tr = (ar * ratio) + ai; 1159 ti = (ai * ratio) - ar; 1160 tr = tr / div; 1161 ti = ti / div; */ 1162 if (bb_true || integer_nonzerop (compare)) 1163 { 1164 if (bb_true) 1165 { 1166 *gsi = gsi_last_bb (bb_true); 1167 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT); 1168 } 1169 1170 ratio = gimplify_build2 (gsi, code, inner_type, br, bi); 1171 1172 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, ratio); 1173 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, bi); 1174 1175 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio); 1176 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ai); 1177 1178 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio); 1179 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, ar); 1180 1181 tr = gimplify_build2 (gsi, code, inner_type, tr, div); 1182 ti = gimplify_build2 (gsi, code, inner_type, ti, div); 1183 1184 if (bb_true) 1185 { 1186 stmt = gimple_build_assign (rr, tr); 1187 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1188 stmt = gimple_build_assign (ri, ti); 1189 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1190 gsi_remove (gsi, true); 1191 } 1192 } 1193 1194 /* In the FALSE branch, we compute 1195 ratio = d/c; 1196 divisor = (d * ratio) + c; 1197 tr = (b * ratio) + a; 1198 ti = b - (a * ratio); 1199 tr = tr / div; 1200 ti = ti / div; */ 1201 if (bb_false || integer_zerop (compare)) 1202 { 1203 if (bb_false) 1204 { 1205 *gsi = gsi_last_bb (bb_false); 1206 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT); 1207 } 1208 1209 ratio = gimplify_build2 (gsi, code, inner_type, bi, br); 1210 1211 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, ratio); 1212 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, br); 1213 1214 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio); 1215 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ar); 1216 1217 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio); 1218 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, t1); 1219 1220 tr = gimplify_build2 (gsi, code, inner_type, tr, div); 1221 ti = gimplify_build2 (gsi, code, inner_type, ti, div); 1222 1223 if (bb_false) 1224 { 1225 stmt = gimple_build_assign (rr, tr); 1226 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1227 stmt = gimple_build_assign (ri, ti); 1228 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1229 gsi_remove (gsi, true); 1230 } 1231 } 1232 1233 if (bb_join) 1234 *gsi = gsi_start_bb (bb_join); 1235 else 1236 rr = tr, ri = ti; 1237 1238 update_complex_assignment (gsi, rr, ri); 1239 } 1240 1241 /* Expand complex division to scalars. */ 1242 1243 static void 1244 expand_complex_division (gimple_stmt_iterator *gsi, tree inner_type, 1245 tree ar, tree ai, tree br, tree bi, 1246 enum tree_code code, 1247 complex_lattice_t al, complex_lattice_t bl) 1248 { 1249 tree rr, ri; 1250 1251 switch (PAIR (al, bl)) 1252 { 1253 case PAIR (ONLY_REAL, ONLY_REAL): 1254 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 1255 ri = ai; 1256 break; 1257 1258 case PAIR (ONLY_REAL, ONLY_IMAG): 1259 rr = ai; 1260 ri = gimplify_build2 (gsi, code, inner_type, ar, bi); 1261 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri); 1262 break; 1263 1264 case PAIR (ONLY_IMAG, ONLY_REAL): 1265 rr = ar; 1266 ri = gimplify_build2 (gsi, code, inner_type, ai, br); 1267 break; 1268 1269 case PAIR (ONLY_IMAG, ONLY_IMAG): 1270 rr = gimplify_build2 (gsi, code, inner_type, ai, bi); 1271 ri = ar; 1272 break; 1273 1274 case PAIR (VARYING, ONLY_REAL): 1275 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 1276 ri = gimplify_build2 (gsi, code, inner_type, ai, br); 1277 break; 1278 1279 case PAIR (VARYING, ONLY_IMAG): 1280 rr = gimplify_build2 (gsi, code, inner_type, ai, bi); 1281 ri = gimplify_build2 (gsi, code, inner_type, ar, bi); 1282 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri); 1283 1284 case PAIR (ONLY_REAL, VARYING): 1285 case PAIR (ONLY_IMAG, VARYING): 1286 case PAIR (VARYING, VARYING): 1287 switch (flag_complex_method) 1288 { 1289 case 0: 1290 /* straightforward implementation of complex divide acceptable. */ 1291 expand_complex_div_straight (gsi, inner_type, ar, ai, br, bi, code); 1292 break; 1293 1294 case 2: 1295 if (SCALAR_FLOAT_TYPE_P (inner_type)) 1296 { 1297 expand_complex_libcall (gsi, ar, ai, br, bi, code); 1298 break; 1299 } 1300 /* FALLTHRU */ 1301 1302 case 1: 1303 /* wide ranges of inputs must work for complex divide. */ 1304 expand_complex_div_wide (gsi, inner_type, ar, ai, br, bi, code); 1305 break; 1306 1307 default: 1308 gcc_unreachable (); 1309 } 1310 return; 1311 1312 default: 1313 gcc_unreachable (); 1314 } 1315 1316 update_complex_assignment (gsi, rr, ri); 1317 } 1318 1319 /* Expand complex negation to scalars: 1320 -a = (-ar) + i(-ai) 1321 */ 1322 1323 static void 1324 expand_complex_negation (gimple_stmt_iterator *gsi, tree inner_type, 1325 tree ar, tree ai) 1326 { 1327 tree rr, ri; 1328 1329 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ar); 1330 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai); 1331 1332 update_complex_assignment (gsi, rr, ri); 1333 } 1334 1335 /* Expand complex conjugate to scalars: 1336 ~a = (ar) + i(-ai) 1337 */ 1338 1339 static void 1340 expand_complex_conjugate (gimple_stmt_iterator *gsi, tree inner_type, 1341 tree ar, tree ai) 1342 { 1343 tree ri; 1344 1345 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai); 1346 1347 update_complex_assignment (gsi, ar, ri); 1348 } 1349 1350 /* Expand complex comparison (EQ or NE only). */ 1351 1352 static void 1353 expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai, 1354 tree br, tree bi, enum tree_code code) 1355 { 1356 tree cr, ci, cc, type; 1357 gimple stmt; 1358 1359 cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br); 1360 ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi); 1361 cc = gimplify_build2 (gsi, 1362 (code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR), 1363 boolean_type_node, cr, ci); 1364 1365 stmt = gsi_stmt (*gsi); 1366 1367 switch (gimple_code (stmt)) 1368 { 1369 case GIMPLE_RETURN: 1370 type = TREE_TYPE (gimple_return_retval (stmt)); 1371 gimple_return_set_retval (stmt, fold_convert (type, cc)); 1372 break; 1373 1374 case GIMPLE_ASSIGN: 1375 type = TREE_TYPE (gimple_assign_lhs (stmt)); 1376 gimple_assign_set_rhs_from_tree (gsi, fold_convert (type, cc)); 1377 stmt = gsi_stmt (*gsi); 1378 break; 1379 1380 case GIMPLE_COND: 1381 gimple_cond_set_code (stmt, EQ_EXPR); 1382 gimple_cond_set_lhs (stmt, cc); 1383 gimple_cond_set_rhs (stmt, boolean_true_node); 1384 break; 1385 1386 default: 1387 gcc_unreachable (); 1388 } 1389 1390 update_stmt (stmt); 1391 } 1392 1393 /* Expand inline asm that sets some complex SSA_NAMEs. */ 1394 1395 static void 1396 expand_complex_asm (gimple_stmt_iterator *gsi) 1397 { 1398 gimple stmt = gsi_stmt (*gsi); 1399 unsigned int i; 1400 1401 for (i = 0; i < gimple_asm_noutputs (stmt); ++i) 1402 { 1403 tree link = gimple_asm_output_op (stmt, i); 1404 tree op = TREE_VALUE (link); 1405 if (TREE_CODE (op) == SSA_NAME 1406 && TREE_CODE (TREE_TYPE (op)) == COMPLEX_TYPE) 1407 { 1408 tree type = TREE_TYPE (op); 1409 tree inner_type = TREE_TYPE (type); 1410 tree r = build1 (REALPART_EXPR, inner_type, op); 1411 tree i = build1 (IMAGPART_EXPR, inner_type, op); 1412 gimple_seq list = set_component_ssa_name (op, false, r); 1413 1414 if (list) 1415 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); 1416 1417 list = set_component_ssa_name (op, true, i); 1418 if (list) 1419 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); 1420 } 1421 } 1422 } 1423 1424 /* Process one statement. If we identify a complex operation, expand it. */ 1425 1426 static void 1427 expand_complex_operations_1 (gimple_stmt_iterator *gsi) 1428 { 1429 gimple stmt = gsi_stmt (*gsi); 1430 tree type, inner_type, lhs; 1431 tree ac, ar, ai, bc, br, bi; 1432 complex_lattice_t al, bl; 1433 enum tree_code code; 1434 1435 if (gimple_code (stmt) == GIMPLE_ASM) 1436 { 1437 expand_complex_asm (gsi); 1438 return; 1439 } 1440 1441 lhs = gimple_get_lhs (stmt); 1442 if (!lhs && gimple_code (stmt) != GIMPLE_COND) 1443 return; 1444 1445 type = TREE_TYPE (gimple_op (stmt, 0)); 1446 code = gimple_expr_code (stmt); 1447 1448 /* Initial filter for operations we handle. */ 1449 switch (code) 1450 { 1451 case PLUS_EXPR: 1452 case MINUS_EXPR: 1453 case MULT_EXPR: 1454 case TRUNC_DIV_EXPR: 1455 case CEIL_DIV_EXPR: 1456 case FLOOR_DIV_EXPR: 1457 case ROUND_DIV_EXPR: 1458 case RDIV_EXPR: 1459 case NEGATE_EXPR: 1460 case CONJ_EXPR: 1461 if (TREE_CODE (type) != COMPLEX_TYPE) 1462 return; 1463 inner_type = TREE_TYPE (type); 1464 break; 1465 1466 case EQ_EXPR: 1467 case NE_EXPR: 1468 /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR 1469 subocde, so we need to access the operands using gimple_op. */ 1470 inner_type = TREE_TYPE (gimple_op (stmt, 1)); 1471 if (TREE_CODE (inner_type) != COMPLEX_TYPE) 1472 return; 1473 break; 1474 1475 default: 1476 { 1477 tree rhs; 1478 1479 /* GIMPLE_COND may also fallthru here, but we do not need to 1480 do anything with it. */ 1481 if (gimple_code (stmt) == GIMPLE_COND) 1482 return; 1483 1484 if (TREE_CODE (type) == COMPLEX_TYPE) 1485 expand_complex_move (gsi, type); 1486 else if (is_gimple_assign (stmt) 1487 && (gimple_assign_rhs_code (stmt) == REALPART_EXPR 1488 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR) 1489 && TREE_CODE (lhs) == SSA_NAME) 1490 { 1491 rhs = gimple_assign_rhs1 (stmt); 1492 rhs = extract_component (gsi, TREE_OPERAND (rhs, 0), 1493 gimple_assign_rhs_code (stmt) 1494 == IMAGPART_EXPR, 1495 false); 1496 gimple_assign_set_rhs_from_tree (gsi, rhs); 1497 stmt = gsi_stmt (*gsi); 1498 update_stmt (stmt); 1499 } 1500 } 1501 return; 1502 } 1503 1504 /* Extract the components of the two complex values. Make sure and 1505 handle the common case of the same value used twice specially. */ 1506 if (is_gimple_assign (stmt)) 1507 { 1508 ac = gimple_assign_rhs1 (stmt); 1509 bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL; 1510 } 1511 /* GIMPLE_CALL can not get here. */ 1512 else 1513 { 1514 ac = gimple_cond_lhs (stmt); 1515 bc = gimple_cond_rhs (stmt); 1516 } 1517 1518 ar = extract_component (gsi, ac, false, true); 1519 ai = extract_component (gsi, ac, true, true); 1520 1521 if (ac == bc) 1522 br = ar, bi = ai; 1523 else if (bc) 1524 { 1525 br = extract_component (gsi, bc, 0, true); 1526 bi = extract_component (gsi, bc, 1, true); 1527 } 1528 else 1529 br = bi = NULL_TREE; 1530 1531 if (gimple_in_ssa_p (cfun)) 1532 { 1533 al = find_lattice_value (ac); 1534 if (al == UNINITIALIZED) 1535 al = VARYING; 1536 1537 if (TREE_CODE_CLASS (code) == tcc_unary) 1538 bl = UNINITIALIZED; 1539 else if (ac == bc) 1540 bl = al; 1541 else 1542 { 1543 bl = find_lattice_value (bc); 1544 if (bl == UNINITIALIZED) 1545 bl = VARYING; 1546 } 1547 } 1548 else 1549 al = bl = VARYING; 1550 1551 switch (code) 1552 { 1553 case PLUS_EXPR: 1554 case MINUS_EXPR: 1555 expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl); 1556 break; 1557 1558 case MULT_EXPR: 1559 expand_complex_multiplication (gsi, inner_type, ar, ai, br, bi, al, bl); 1560 break; 1561 1562 case TRUNC_DIV_EXPR: 1563 case CEIL_DIV_EXPR: 1564 case FLOOR_DIV_EXPR: 1565 case ROUND_DIV_EXPR: 1566 case RDIV_EXPR: 1567 expand_complex_division (gsi, inner_type, ar, ai, br, bi, code, al, bl); 1568 break; 1569 1570 case NEGATE_EXPR: 1571 expand_complex_negation (gsi, inner_type, ar, ai); 1572 break; 1573 1574 case CONJ_EXPR: 1575 expand_complex_conjugate (gsi, inner_type, ar, ai); 1576 break; 1577 1578 case EQ_EXPR: 1579 case NE_EXPR: 1580 expand_complex_comparison (gsi, ar, ai, br, bi, code); 1581 break; 1582 1583 default: 1584 gcc_unreachable (); 1585 } 1586 } 1587 1588 1589 /* Entry point for complex operation lowering during optimization. */ 1590 1591 static unsigned int 1592 tree_lower_complex (void) 1593 { 1594 int old_last_basic_block; 1595 gimple_stmt_iterator gsi; 1596 basic_block bb; 1597 1598 if (!init_dont_simulate_again ()) 1599 return 0; 1600 1601 complex_lattice_values.create (num_ssa_names); 1602 complex_lattice_values.safe_grow_cleared (num_ssa_names); 1603 1604 init_parameter_lattice_values (); 1605 ssa_propagate (complex_visit_stmt, complex_visit_phi); 1606 1607 complex_variable_components = htab_create (10, int_tree_map_hash, 1608 int_tree_map_eq, free); 1609 1610 complex_ssa_name_components.create (2 * num_ssa_names); 1611 complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names); 1612 1613 update_parameter_components (); 1614 1615 /* ??? Ideally we'd traverse the blocks in breadth-first order. */ 1616 old_last_basic_block = last_basic_block; 1617 FOR_EACH_BB (bb) 1618 { 1619 if (bb->index >= old_last_basic_block) 1620 continue; 1621 1622 update_phi_components (bb); 1623 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1624 expand_complex_operations_1 (&gsi); 1625 } 1626 1627 gsi_commit_edge_inserts (); 1628 1629 htab_delete (complex_variable_components); 1630 complex_ssa_name_components.release (); 1631 complex_lattice_values.release (); 1632 return 0; 1633 } 1634 1635 struct gimple_opt_pass pass_lower_complex = 1636 { 1637 { 1638 GIMPLE_PASS, 1639 "cplxlower", /* name */ 1640 OPTGROUP_NONE, /* optinfo_flags */ 1641 0, /* gate */ 1642 tree_lower_complex, /* execute */ 1643 NULL, /* sub */ 1644 NULL, /* next */ 1645 0, /* static_pass_number */ 1646 TV_NONE, /* tv_id */ 1647 PROP_ssa, /* properties_required */ 1648 PROP_gimple_lcx, /* properties_provided */ 1649 0, /* properties_destroyed */ 1650 0, /* todo_flags_start */ 1651 TODO_ggc_collect 1652 | TODO_update_ssa 1653 | TODO_verify_stmts /* todo_flags_finish */ 1654 } 1655 }; 1656 1657 1658 static bool 1659 gate_no_optimization (void) 1660 { 1661 /* With errors, normal optimization passes are not run. If we don't 1662 lower complex operations at all, rtl expansion will abort. */ 1663 return !(cfun->curr_properties & PROP_gimple_lcx); 1664 } 1665 1666 struct gimple_opt_pass pass_lower_complex_O0 = 1667 { 1668 { 1669 GIMPLE_PASS, 1670 "cplxlower0", /* name */ 1671 OPTGROUP_NONE, /* optinfo_flags */ 1672 gate_no_optimization, /* gate */ 1673 tree_lower_complex, /* execute */ 1674 NULL, /* sub */ 1675 NULL, /* next */ 1676 0, /* static_pass_number */ 1677 TV_NONE, /* tv_id */ 1678 PROP_cfg, /* properties_required */ 1679 PROP_gimple_lcx, /* properties_provided */ 1680 0, /* properties_destroyed */ 1681 0, /* todo_flags_start */ 1682 TODO_ggc_collect 1683 | TODO_update_ssa 1684 | TODO_verify_stmts /* todo_flags_finish */ 1685 } 1686 }; 1687