1 /* Lower complex number operations to scalar operations. 2 Copyright (C) 2004-2020 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it 7 under the terms of the GNU General Public License as published by the 8 Free Software Foundation; either version 3, or (at your option) any 9 later version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT 12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "backend.h" 24 #include "rtl.h" 25 #include "tree.h" 26 #include "gimple.h" 27 #include "cfghooks.h" 28 #include "tree-pass.h" 29 #include "ssa.h" 30 #include "fold-const.h" 31 #include "stor-layout.h" 32 #include "tree-eh.h" 33 #include "gimplify.h" 34 #include "gimple-iterator.h" 35 #include "gimplify-me.h" 36 #include "tree-cfg.h" 37 #include "tree-dfa.h" 38 #include "tree-ssa.h" 39 #include "tree-ssa-propagate.h" 40 #include "tree-hasher.h" 41 #include "cfgloop.h" 42 #include "cfganal.h" 43 44 45 /* For each complex ssa name, a lattice value. We're interested in finding 46 out whether a complex number is degenerate in some way, having only real 47 or only complex parts. */ 48 49 enum 50 { 51 UNINITIALIZED = 0, 52 ONLY_REAL = 1, 53 ONLY_IMAG = 2, 54 VARYING = 3 55 }; 56 57 /* The type complex_lattice_t holds combinations of the above 58 constants. */ 59 typedef int complex_lattice_t; 60 61 #define PAIR(a, b) ((a) << 2 | (b)) 62 63 class complex_propagate : public ssa_propagation_engine 64 { 65 enum ssa_prop_result visit_stmt (gimple *, edge *, tree *) FINAL OVERRIDE; 66 enum ssa_prop_result visit_phi (gphi *) FINAL OVERRIDE; 67 }; 68 69 static vec<complex_lattice_t> complex_lattice_values; 70 71 /* For each complex variable, a pair of variables for the components exists in 72 the hashtable. */ 73 static int_tree_htab_type *complex_variable_components; 74 75 /* For each complex SSA_NAME, a pair of ssa names for the components. */ 76 static vec<tree> complex_ssa_name_components; 77 78 /* Vector of PHI triplets (original complex PHI and corresponding real and 79 imag PHIs if real and/or imag PHIs contain temporarily 80 non-SSA_NAME/non-invariant args that need to be replaced by SSA_NAMEs. */ 81 static vec<gphi *> phis_to_revisit; 82 83 /* BBs that need EH cleanup. */ 84 static bitmap need_eh_cleanup; 85 86 /* Lookup UID in the complex_variable_components hashtable and return the 87 associated tree. */ 88 static tree 89 cvc_lookup (unsigned int uid) 90 { 91 struct int_tree_map in; 92 in.uid = uid; 93 return complex_variable_components->find_with_hash (in, uid).to; 94 } 95 96 /* Insert the pair UID, TO into the complex_variable_components hashtable. */ 97 98 static void 99 cvc_insert (unsigned int uid, tree to) 100 { 101 int_tree_map h; 102 int_tree_map *loc; 103 104 h.uid = uid; 105 loc = complex_variable_components->find_slot_with_hash (h, uid, INSERT); 106 loc->uid = uid; 107 loc->to = to; 108 } 109 110 /* Return true if T is not a zero constant. In the case of real values, 111 we're only interested in +0.0. */ 112 113 static int 114 some_nonzerop (tree t) 115 { 116 int zerop = false; 117 118 /* Operations with real or imaginary part of a complex number zero 119 cannot be treated the same as operations with a real or imaginary 120 operand if we care about the signs of zeros in the result. */ 121 if (TREE_CODE (t) == REAL_CST && !flag_signed_zeros) 122 zerop = real_identical (&TREE_REAL_CST (t), &dconst0); 123 else if (TREE_CODE (t) == FIXED_CST) 124 zerop = fixed_zerop (t); 125 else if (TREE_CODE (t) == INTEGER_CST) 126 zerop = integer_zerop (t); 127 128 return !zerop; 129 } 130 131 132 /* Compute a lattice value from the components of a complex type REAL 133 and IMAG. */ 134 135 static complex_lattice_t 136 find_lattice_value_parts (tree real, tree imag) 137 { 138 int r, i; 139 complex_lattice_t ret; 140 141 r = some_nonzerop (real); 142 i = some_nonzerop (imag); 143 ret = r * ONLY_REAL + i * ONLY_IMAG; 144 145 /* ??? On occasion we could do better than mapping 0+0i to real, but we 146 certainly don't want to leave it UNINITIALIZED, which eventually gets 147 mapped to VARYING. */ 148 if (ret == UNINITIALIZED) 149 ret = ONLY_REAL; 150 151 return ret; 152 } 153 154 155 /* Compute a lattice value from gimple_val T. */ 156 157 static complex_lattice_t 158 find_lattice_value (tree t) 159 { 160 tree real, imag; 161 162 switch (TREE_CODE (t)) 163 { 164 case SSA_NAME: 165 return complex_lattice_values[SSA_NAME_VERSION (t)]; 166 167 case COMPLEX_CST: 168 real = TREE_REALPART (t); 169 imag = TREE_IMAGPART (t); 170 break; 171 172 default: 173 gcc_unreachable (); 174 } 175 176 return find_lattice_value_parts (real, imag); 177 } 178 179 /* Determine if LHS is something for which we're interested in seeing 180 simulation results. */ 181 182 static bool 183 is_complex_reg (tree lhs) 184 { 185 return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs); 186 } 187 188 /* Mark the incoming parameters to the function as VARYING. */ 189 190 static void 191 init_parameter_lattice_values (void) 192 { 193 tree parm, ssa_name; 194 195 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm)) 196 if (is_complex_reg (parm) 197 && (ssa_name = ssa_default_def (cfun, parm)) != NULL_TREE) 198 complex_lattice_values[SSA_NAME_VERSION (ssa_name)] = VARYING; 199 } 200 201 /* Initialize simulation state for each statement. Return false if we 202 found no statements we want to simulate, and thus there's nothing 203 for the entire pass to do. */ 204 205 static bool 206 init_dont_simulate_again (void) 207 { 208 basic_block bb; 209 bool saw_a_complex_op = false; 210 211 FOR_EACH_BB_FN (bb, cfun) 212 { 213 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); 214 gsi_next (&gsi)) 215 { 216 gphi *phi = gsi.phi (); 217 prop_set_simulate_again (phi, 218 is_complex_reg (gimple_phi_result (phi))); 219 } 220 221 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); 222 gsi_next (&gsi)) 223 { 224 gimple *stmt; 225 tree op0, op1; 226 bool sim_again_p; 227 228 stmt = gsi_stmt (gsi); 229 op0 = op1 = NULL_TREE; 230 231 /* Most control-altering statements must be initially 232 simulated, else we won't cover the entire cfg. */ 233 sim_again_p = stmt_ends_bb_p (stmt); 234 235 switch (gimple_code (stmt)) 236 { 237 case GIMPLE_CALL: 238 if (gimple_call_lhs (stmt)) 239 sim_again_p = is_complex_reg (gimple_call_lhs (stmt)); 240 break; 241 242 case GIMPLE_ASSIGN: 243 sim_again_p = is_complex_reg (gimple_assign_lhs (stmt)); 244 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR 245 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR) 246 op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); 247 else 248 op0 = gimple_assign_rhs1 (stmt); 249 if (gimple_num_ops (stmt) > 2) 250 op1 = gimple_assign_rhs2 (stmt); 251 break; 252 253 case GIMPLE_COND: 254 op0 = gimple_cond_lhs (stmt); 255 op1 = gimple_cond_rhs (stmt); 256 break; 257 258 default: 259 break; 260 } 261 262 if (op0 || op1) 263 switch (gimple_expr_code (stmt)) 264 { 265 case EQ_EXPR: 266 case NE_EXPR: 267 case PLUS_EXPR: 268 case MINUS_EXPR: 269 case MULT_EXPR: 270 case TRUNC_DIV_EXPR: 271 case CEIL_DIV_EXPR: 272 case FLOOR_DIV_EXPR: 273 case ROUND_DIV_EXPR: 274 case RDIV_EXPR: 275 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE 276 || TREE_CODE (TREE_TYPE (op1)) == COMPLEX_TYPE) 277 saw_a_complex_op = true; 278 break; 279 280 case NEGATE_EXPR: 281 case CONJ_EXPR: 282 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE) 283 saw_a_complex_op = true; 284 break; 285 286 case REALPART_EXPR: 287 case IMAGPART_EXPR: 288 /* The total store transformation performed during 289 gimplification creates such uninitialized loads 290 and we need to lower the statement to be able 291 to fix things up. */ 292 if (TREE_CODE (op0) == SSA_NAME 293 && ssa_undefined_value_p (op0)) 294 saw_a_complex_op = true; 295 break; 296 297 default: 298 break; 299 } 300 301 prop_set_simulate_again (stmt, sim_again_p); 302 } 303 } 304 305 return saw_a_complex_op; 306 } 307 308 309 /* Evaluate statement STMT against the complex lattice defined above. */ 310 311 enum ssa_prop_result 312 complex_propagate::visit_stmt (gimple *stmt, edge *taken_edge_p ATTRIBUTE_UNUSED, 313 tree *result_p) 314 { 315 complex_lattice_t new_l, old_l, op1_l, op2_l; 316 unsigned int ver; 317 tree lhs; 318 319 lhs = gimple_get_lhs (stmt); 320 /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */ 321 if (!lhs || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) 322 return SSA_PROP_VARYING; 323 324 /* These conditions should be satisfied due to the initial filter 325 set up in init_dont_simulate_again. */ 326 gcc_assert (TREE_CODE (lhs) == SSA_NAME); 327 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE); 328 329 *result_p = lhs; 330 ver = SSA_NAME_VERSION (lhs); 331 old_l = complex_lattice_values[ver]; 332 333 switch (gimple_expr_code (stmt)) 334 { 335 case SSA_NAME: 336 case COMPLEX_CST: 337 new_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 338 break; 339 340 case COMPLEX_EXPR: 341 new_l = find_lattice_value_parts (gimple_assign_rhs1 (stmt), 342 gimple_assign_rhs2 (stmt)); 343 break; 344 345 case PLUS_EXPR: 346 case MINUS_EXPR: 347 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 348 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt)); 349 350 /* We've set up the lattice values such that IOR neatly 351 models addition. */ 352 new_l = op1_l | op2_l; 353 break; 354 355 case MULT_EXPR: 356 case RDIV_EXPR: 357 case TRUNC_DIV_EXPR: 358 case CEIL_DIV_EXPR: 359 case FLOOR_DIV_EXPR: 360 case ROUND_DIV_EXPR: 361 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 362 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt)); 363 364 /* Obviously, if either varies, so does the result. */ 365 if (op1_l == VARYING || op2_l == VARYING) 366 new_l = VARYING; 367 /* Don't prematurely promote variables if we've not yet seen 368 their inputs. */ 369 else if (op1_l == UNINITIALIZED) 370 new_l = op2_l; 371 else if (op2_l == UNINITIALIZED) 372 new_l = op1_l; 373 else 374 { 375 /* At this point both numbers have only one component. If the 376 numbers are of opposite kind, the result is imaginary, 377 otherwise the result is real. The add/subtract translates 378 the real/imag from/to 0/1; the ^ performs the comparison. */ 379 new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL; 380 381 /* Don't allow the lattice value to flip-flop indefinitely. */ 382 new_l |= old_l; 383 } 384 break; 385 386 case NEGATE_EXPR: 387 case CONJ_EXPR: 388 new_l = find_lattice_value (gimple_assign_rhs1 (stmt)); 389 break; 390 391 default: 392 new_l = VARYING; 393 break; 394 } 395 396 /* If nothing changed this round, let the propagator know. */ 397 if (new_l == old_l) 398 return SSA_PROP_NOT_INTERESTING; 399 400 complex_lattice_values[ver] = new_l; 401 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; 402 } 403 404 /* Evaluate a PHI node against the complex lattice defined above. */ 405 406 enum ssa_prop_result 407 complex_propagate::visit_phi (gphi *phi) 408 { 409 complex_lattice_t new_l, old_l; 410 unsigned int ver; 411 tree lhs; 412 int i; 413 414 lhs = gimple_phi_result (phi); 415 416 /* This condition should be satisfied due to the initial filter 417 set up in init_dont_simulate_again. */ 418 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE); 419 420 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) 421 return SSA_PROP_VARYING; 422 423 /* We've set up the lattice values such that IOR neatly models PHI meet. */ 424 new_l = UNINITIALIZED; 425 for (i = gimple_phi_num_args (phi) - 1; i >= 0; --i) 426 new_l |= find_lattice_value (gimple_phi_arg_def (phi, i)); 427 428 ver = SSA_NAME_VERSION (lhs); 429 old_l = complex_lattice_values[ver]; 430 431 if (new_l == old_l) 432 return SSA_PROP_NOT_INTERESTING; 433 434 complex_lattice_values[ver] = new_l; 435 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; 436 } 437 438 /* Create one backing variable for a complex component of ORIG. */ 439 440 static tree 441 create_one_component_var (tree type, tree orig, const char *prefix, 442 const char *suffix, enum tree_code code) 443 { 444 tree r = create_tmp_var (type, prefix); 445 446 DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (orig); 447 DECL_ARTIFICIAL (r) = 1; 448 449 if (DECL_NAME (orig) && !DECL_IGNORED_P (orig)) 450 { 451 const char *name = IDENTIFIER_POINTER (DECL_NAME (orig)); 452 name = ACONCAT ((name, suffix, NULL)); 453 DECL_NAME (r) = get_identifier (name); 454 455 SET_DECL_DEBUG_EXPR (r, build1 (code, type, orig)); 456 DECL_HAS_DEBUG_EXPR_P (r) = 1; 457 DECL_IGNORED_P (r) = 0; 458 TREE_NO_WARNING (r) = TREE_NO_WARNING (orig); 459 } 460 else 461 { 462 DECL_IGNORED_P (r) = 1; 463 TREE_NO_WARNING (r) = 1; 464 } 465 466 return r; 467 } 468 469 /* Retrieve a value for a complex component of VAR. */ 470 471 static tree 472 get_component_var (tree var, bool imag_p) 473 { 474 size_t decl_index = DECL_UID (var) * 2 + imag_p; 475 tree ret = cvc_lookup (decl_index); 476 477 if (ret == NULL) 478 { 479 ret = create_one_component_var (TREE_TYPE (TREE_TYPE (var)), var, 480 imag_p ? "CI" : "CR", 481 imag_p ? "$imag" : "$real", 482 imag_p ? IMAGPART_EXPR : REALPART_EXPR); 483 cvc_insert (decl_index, ret); 484 } 485 486 return ret; 487 } 488 489 /* Retrieve a value for a complex component of SSA_NAME. */ 490 491 static tree 492 get_component_ssa_name (tree ssa_name, bool imag_p) 493 { 494 complex_lattice_t lattice = find_lattice_value (ssa_name); 495 size_t ssa_name_index; 496 tree ret; 497 498 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG)) 499 { 500 tree inner_type = TREE_TYPE (TREE_TYPE (ssa_name)); 501 if (SCALAR_FLOAT_TYPE_P (inner_type)) 502 return build_real (inner_type, dconst0); 503 else 504 return build_int_cst (inner_type, 0); 505 } 506 507 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p; 508 ret = complex_ssa_name_components[ssa_name_index]; 509 if (ret == NULL) 510 { 511 if (SSA_NAME_VAR (ssa_name)) 512 ret = get_component_var (SSA_NAME_VAR (ssa_name), imag_p); 513 else 514 ret = TREE_TYPE (TREE_TYPE (ssa_name)); 515 ret = make_ssa_name (ret); 516 517 /* Copy some properties from the original. In particular, whether it 518 is used in an abnormal phi, and whether it's uninitialized. */ 519 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret) 520 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name); 521 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name) 522 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL) 523 { 524 SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name); 525 set_ssa_default_def (cfun, SSA_NAME_VAR (ret), ret); 526 } 527 528 complex_ssa_name_components[ssa_name_index] = ret; 529 } 530 531 return ret; 532 } 533 534 /* Set a value for a complex component of SSA_NAME, return a 535 gimple_seq of stuff that needs doing. */ 536 537 static gimple_seq 538 set_component_ssa_name (tree ssa_name, bool imag_p, tree value) 539 { 540 complex_lattice_t lattice = find_lattice_value (ssa_name); 541 size_t ssa_name_index; 542 tree comp; 543 gimple *last; 544 gimple_seq list; 545 546 /* We know the value must be zero, else there's a bug in our lattice 547 analysis. But the value may well be a variable known to contain 548 zero. We should be safe ignoring it. */ 549 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG)) 550 return NULL; 551 552 /* If we've already assigned an SSA_NAME to this component, then this 553 means that our walk of the basic blocks found a use before the set. 554 This is fine. Now we should create an initialization for the value 555 we created earlier. */ 556 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p; 557 comp = complex_ssa_name_components[ssa_name_index]; 558 if (comp) 559 ; 560 561 /* If we've nothing assigned, and the value we're given is already stable, 562 then install that as the value for this SSA_NAME. This preemptively 563 copy-propagates the value, which avoids unnecessary memory allocation. */ 564 else if (is_gimple_min_invariant (value) 565 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) 566 { 567 complex_ssa_name_components[ssa_name_index] = value; 568 return NULL; 569 } 570 else if (TREE_CODE (value) == SSA_NAME 571 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) 572 { 573 /* Replace an anonymous base value with the variable from cvc_lookup. 574 This should result in better debug info. */ 575 if (!SSA_NAME_IS_DEFAULT_DEF (value) 576 && SSA_NAME_VAR (ssa_name) 577 && (!SSA_NAME_VAR (value) || DECL_IGNORED_P (SSA_NAME_VAR (value))) 578 && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name))) 579 { 580 comp = get_component_var (SSA_NAME_VAR (ssa_name), imag_p); 581 replace_ssa_name_symbol (value, comp); 582 } 583 584 complex_ssa_name_components[ssa_name_index] = value; 585 return NULL; 586 } 587 588 /* Finally, we need to stabilize the result by installing the value into 589 a new ssa name. */ 590 else 591 comp = get_component_ssa_name (ssa_name, imag_p); 592 593 /* Do all the work to assign VALUE to COMP. */ 594 list = NULL; 595 value = force_gimple_operand (value, &list, false, NULL); 596 last = gimple_build_assign (comp, value); 597 gimple_seq_add_stmt (&list, last); 598 gcc_assert (SSA_NAME_DEF_STMT (comp) == last); 599 600 return list; 601 } 602 603 /* Extract the real or imaginary part of a complex variable or constant. 604 Make sure that it's a proper gimple_val and gimplify it if not. 605 Emit any new code before gsi. */ 606 607 static tree 608 extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p, 609 bool gimple_p, bool phiarg_p = false) 610 { 611 switch (TREE_CODE (t)) 612 { 613 case COMPLEX_CST: 614 return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t); 615 616 case COMPLEX_EXPR: 617 gcc_unreachable (); 618 619 case BIT_FIELD_REF: 620 { 621 tree inner_type = TREE_TYPE (TREE_TYPE (t)); 622 t = unshare_expr (t); 623 TREE_TYPE (t) = inner_type; 624 TREE_OPERAND (t, 1) = TYPE_SIZE (inner_type); 625 if (imagpart_p) 626 TREE_OPERAND (t, 2) = size_binop (PLUS_EXPR, TREE_OPERAND (t, 2), 627 TYPE_SIZE (inner_type)); 628 if (gimple_p) 629 t = force_gimple_operand_gsi (gsi, t, true, NULL, true, 630 GSI_SAME_STMT); 631 return t; 632 } 633 634 case VAR_DECL: 635 case RESULT_DECL: 636 case PARM_DECL: 637 case COMPONENT_REF: 638 case ARRAY_REF: 639 case VIEW_CONVERT_EXPR: 640 case MEM_REF: 641 { 642 tree inner_type = TREE_TYPE (TREE_TYPE (t)); 643 644 t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR), 645 inner_type, unshare_expr (t)); 646 647 if (gimple_p) 648 t = force_gimple_operand_gsi (gsi, t, true, NULL, true, 649 GSI_SAME_STMT); 650 651 return t; 652 } 653 654 case SSA_NAME: 655 t = get_component_ssa_name (t, imagpart_p); 656 if (TREE_CODE (t) == SSA_NAME && SSA_NAME_DEF_STMT (t) == NULL) 657 gcc_assert (phiarg_p); 658 return t; 659 660 default: 661 gcc_unreachable (); 662 } 663 } 664 665 /* Update the complex components of the ssa name on the lhs of STMT. */ 666 667 static void 668 update_complex_components (gimple_stmt_iterator *gsi, gimple *stmt, tree r, 669 tree i) 670 { 671 tree lhs; 672 gimple_seq list; 673 674 lhs = gimple_get_lhs (stmt); 675 676 list = set_component_ssa_name (lhs, false, r); 677 if (list) 678 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); 679 680 list = set_component_ssa_name (lhs, true, i); 681 if (list) 682 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); 683 } 684 685 static void 686 update_complex_components_on_edge (edge e, tree lhs, tree r, tree i) 687 { 688 gimple_seq list; 689 690 list = set_component_ssa_name (lhs, false, r); 691 if (list) 692 gsi_insert_seq_on_edge (e, list); 693 694 list = set_component_ssa_name (lhs, true, i); 695 if (list) 696 gsi_insert_seq_on_edge (e, list); 697 } 698 699 700 /* Update an assignment to a complex variable in place. */ 701 702 static void 703 update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i) 704 { 705 gimple *old_stmt = gsi_stmt (*gsi); 706 gimple_assign_set_rhs_with_ops (gsi, COMPLEX_EXPR, r, i); 707 gimple *stmt = gsi_stmt (*gsi); 708 update_stmt (stmt); 709 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)) 710 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index); 711 712 update_complex_components (gsi, gsi_stmt (*gsi), r, i); 713 } 714 715 716 /* Generate code at the entry point of the function to initialize the 717 component variables for a complex parameter. */ 718 719 static void 720 update_parameter_components (void) 721 { 722 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); 723 tree parm; 724 725 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm)) 726 { 727 tree type = TREE_TYPE (parm); 728 tree ssa_name, r, i; 729 730 if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm)) 731 continue; 732 733 type = TREE_TYPE (type); 734 ssa_name = ssa_default_def (cfun, parm); 735 if (!ssa_name) 736 continue; 737 738 r = build1 (REALPART_EXPR, type, ssa_name); 739 i = build1 (IMAGPART_EXPR, type, ssa_name); 740 update_complex_components_on_edge (entry_edge, ssa_name, r, i); 741 } 742 } 743 744 /* Generate code to set the component variables of a complex variable 745 to match the PHI statements in block BB. */ 746 747 static void 748 update_phi_components (basic_block bb) 749 { 750 gphi_iterator gsi; 751 752 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 753 { 754 gphi *phi = gsi.phi (); 755 756 if (is_complex_reg (gimple_phi_result (phi))) 757 { 758 gphi *p[2] = { NULL, NULL }; 759 unsigned int i, j, n; 760 bool revisit_phi = false; 761 762 for (j = 0; j < 2; j++) 763 { 764 tree l = get_component_ssa_name (gimple_phi_result (phi), j > 0); 765 if (TREE_CODE (l) == SSA_NAME) 766 p[j] = create_phi_node (l, bb); 767 } 768 769 for (i = 0, n = gimple_phi_num_args (phi); i < n; ++i) 770 { 771 tree comp, arg = gimple_phi_arg_def (phi, i); 772 for (j = 0; j < 2; j++) 773 if (p[j]) 774 { 775 comp = extract_component (NULL, arg, j > 0, false, true); 776 if (TREE_CODE (comp) == SSA_NAME 777 && SSA_NAME_DEF_STMT (comp) == NULL) 778 { 779 /* For the benefit of any gimple simplification during 780 this pass that might walk SSA_NAME def stmts, 781 don't add SSA_NAMEs without definitions into the 782 PHI arguments, but put a decl in there instead 783 temporarily, and revisit this PHI later on. */ 784 if (SSA_NAME_VAR (comp)) 785 comp = SSA_NAME_VAR (comp); 786 else 787 comp = create_tmp_reg (TREE_TYPE (comp), 788 get_name (comp)); 789 revisit_phi = true; 790 } 791 SET_PHI_ARG_DEF (p[j], i, comp); 792 } 793 } 794 795 if (revisit_phi) 796 { 797 phis_to_revisit.safe_push (phi); 798 phis_to_revisit.safe_push (p[0]); 799 phis_to_revisit.safe_push (p[1]); 800 } 801 } 802 } 803 } 804 805 /* Expand a complex move to scalars. */ 806 807 static void 808 expand_complex_move (gimple_stmt_iterator *gsi, tree type) 809 { 810 tree inner_type = TREE_TYPE (type); 811 tree r, i, lhs, rhs; 812 gimple *stmt = gsi_stmt (*gsi); 813 814 if (is_gimple_assign (stmt)) 815 { 816 lhs = gimple_assign_lhs (stmt); 817 if (gimple_num_ops (stmt) == 2) 818 rhs = gimple_assign_rhs1 (stmt); 819 else 820 rhs = NULL_TREE; 821 } 822 else if (is_gimple_call (stmt)) 823 { 824 lhs = gimple_call_lhs (stmt); 825 rhs = NULL_TREE; 826 } 827 else 828 gcc_unreachable (); 829 830 if (TREE_CODE (lhs) == SSA_NAME) 831 { 832 if (is_ctrl_altering_stmt (stmt)) 833 { 834 edge e; 835 836 /* The value is not assigned on the exception edges, so we need not 837 concern ourselves there. We do need to update on the fallthru 838 edge. Find it. */ 839 e = find_fallthru_edge (gsi_bb (*gsi)->succs); 840 if (!e) 841 gcc_unreachable (); 842 843 r = build1 (REALPART_EXPR, inner_type, lhs); 844 i = build1 (IMAGPART_EXPR, inner_type, lhs); 845 update_complex_components_on_edge (e, lhs, r, i); 846 } 847 else if (is_gimple_call (stmt) 848 || gimple_has_side_effects (stmt) 849 || gimple_assign_rhs_code (stmt) == PAREN_EXPR) 850 { 851 r = build1 (REALPART_EXPR, inner_type, lhs); 852 i = build1 (IMAGPART_EXPR, inner_type, lhs); 853 update_complex_components (gsi, stmt, r, i); 854 } 855 else 856 { 857 if (gimple_assign_rhs_code (stmt) != COMPLEX_EXPR) 858 { 859 r = extract_component (gsi, rhs, 0, true); 860 i = extract_component (gsi, rhs, 1, true); 861 } 862 else 863 { 864 r = gimple_assign_rhs1 (stmt); 865 i = gimple_assign_rhs2 (stmt); 866 } 867 update_complex_assignment (gsi, r, i); 868 } 869 } 870 else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs)) 871 { 872 tree x; 873 gimple *t; 874 location_t loc; 875 876 loc = gimple_location (stmt); 877 r = extract_component (gsi, rhs, 0, false); 878 i = extract_component (gsi, rhs, 1, false); 879 880 x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs)); 881 t = gimple_build_assign (x, r); 882 gimple_set_location (t, loc); 883 gsi_insert_before (gsi, t, GSI_SAME_STMT); 884 885 if (stmt == gsi_stmt (*gsi)) 886 { 887 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs)); 888 gimple_assign_set_lhs (stmt, x); 889 gimple_assign_set_rhs1 (stmt, i); 890 } 891 else 892 { 893 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs)); 894 t = gimple_build_assign (x, i); 895 gimple_set_location (t, loc); 896 gsi_insert_before (gsi, t, GSI_SAME_STMT); 897 898 stmt = gsi_stmt (*gsi); 899 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN); 900 gimple_return_set_retval (as_a <greturn *> (stmt), lhs); 901 } 902 903 update_stmt (stmt); 904 } 905 } 906 907 /* Expand complex addition to scalars: 908 a + b = (ar + br) + i(ai + bi) 909 a - b = (ar - br) + i(ai + bi) 910 */ 911 912 static void 913 expand_complex_addition (gimple_stmt_iterator *gsi, tree inner_type, 914 tree ar, tree ai, tree br, tree bi, 915 enum tree_code code, 916 complex_lattice_t al, complex_lattice_t bl) 917 { 918 tree rr, ri; 919 920 switch (PAIR (al, bl)) 921 { 922 case PAIR (ONLY_REAL, ONLY_REAL): 923 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 924 ri = ai; 925 break; 926 927 case PAIR (ONLY_REAL, ONLY_IMAG): 928 rr = ar; 929 if (code == MINUS_EXPR) 930 ri = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, bi); 931 else 932 ri = bi; 933 break; 934 935 case PAIR (ONLY_IMAG, ONLY_REAL): 936 if (code == MINUS_EXPR) 937 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ar, br); 938 else 939 rr = br; 940 ri = ai; 941 break; 942 943 case PAIR (ONLY_IMAG, ONLY_IMAG): 944 rr = ar; 945 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 946 break; 947 948 case PAIR (VARYING, ONLY_REAL): 949 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 950 ri = ai; 951 break; 952 953 case PAIR (VARYING, ONLY_IMAG): 954 rr = ar; 955 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 956 break; 957 958 case PAIR (ONLY_REAL, VARYING): 959 if (code == MINUS_EXPR) 960 goto general; 961 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 962 ri = bi; 963 break; 964 965 case PAIR (ONLY_IMAG, VARYING): 966 if (code == MINUS_EXPR) 967 goto general; 968 rr = br; 969 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 970 break; 971 972 case PAIR (VARYING, VARYING): 973 general: 974 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 975 ri = gimplify_build2 (gsi, code, inner_type, ai, bi); 976 break; 977 978 default: 979 gcc_unreachable (); 980 } 981 982 update_complex_assignment (gsi, rr, ri); 983 } 984 985 /* Expand a complex multiplication or division to a libcall to the c99 986 compliant routines. TYPE is the complex type of the operation. 987 If INPLACE_P replace the statement at GSI with 988 the libcall and return NULL_TREE. Else insert the call, assign its 989 result to an output variable and return that variable. If INPLACE_P 990 is true then the statement being replaced should be an assignment 991 statement. */ 992 993 static tree 994 expand_complex_libcall (gimple_stmt_iterator *gsi, tree type, tree ar, tree ai, 995 tree br, tree bi, enum tree_code code, bool inplace_p) 996 { 997 machine_mode mode; 998 enum built_in_function bcode; 999 tree fn, lhs; 1000 gcall *stmt; 1001 1002 mode = TYPE_MODE (type); 1003 gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT); 1004 1005 if (code == MULT_EXPR) 1006 bcode = ((enum built_in_function) 1007 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 1008 else if (code == RDIV_EXPR) 1009 bcode = ((enum built_in_function) 1010 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 1011 else 1012 gcc_unreachable (); 1013 fn = builtin_decl_explicit (bcode); 1014 stmt = gimple_build_call (fn, 4, ar, ai, br, bi); 1015 1016 if (inplace_p) 1017 { 1018 gimple *old_stmt = gsi_stmt (*gsi); 1019 gimple_call_set_nothrow (stmt, !stmt_could_throw_p (cfun, old_stmt)); 1020 lhs = gimple_assign_lhs (old_stmt); 1021 gimple_call_set_lhs (stmt, lhs); 1022 gsi_replace (gsi, stmt, true); 1023 1024 type = TREE_TYPE (type); 1025 if (stmt_can_throw_internal (cfun, stmt)) 1026 { 1027 edge_iterator ei; 1028 edge e; 1029 FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs) 1030 if (!(e->flags & EDGE_EH)) 1031 break; 1032 basic_block bb = split_edge (e); 1033 gimple_stmt_iterator gsi2 = gsi_start_bb (bb); 1034 update_complex_components (&gsi2, stmt, 1035 build1 (REALPART_EXPR, type, lhs), 1036 build1 (IMAGPART_EXPR, type, lhs)); 1037 return NULL_TREE; 1038 } 1039 else 1040 update_complex_components (gsi, stmt, 1041 build1 (REALPART_EXPR, type, lhs), 1042 build1 (IMAGPART_EXPR, type, lhs)); 1043 SSA_NAME_DEF_STMT (lhs) = stmt; 1044 return NULL_TREE; 1045 } 1046 1047 gimple_call_set_nothrow (stmt, true); 1048 lhs = make_ssa_name (type); 1049 gimple_call_set_lhs (stmt, lhs); 1050 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1051 1052 return lhs; 1053 } 1054 1055 /* Perform a complex multiplication on two complex constants A, B represented 1056 by AR, AI, BR, BI of type TYPE. 1057 The operation we want is: a * b = (ar*br - ai*bi) + i(ar*bi + br*ai). 1058 Insert the GIMPLE statements into GSI. Store the real and imaginary 1059 components of the result into RR and RI. */ 1060 1061 static void 1062 expand_complex_multiplication_components (gimple_stmt_iterator *gsi, 1063 tree type, tree ar, tree ai, 1064 tree br, tree bi, 1065 tree *rr, tree *ri) 1066 { 1067 tree t1, t2, t3, t4; 1068 1069 t1 = gimplify_build2 (gsi, MULT_EXPR, type, ar, br); 1070 t2 = gimplify_build2 (gsi, MULT_EXPR, type, ai, bi); 1071 t3 = gimplify_build2 (gsi, MULT_EXPR, type, ar, bi); 1072 1073 /* Avoid expanding redundant multiplication for the common 1074 case of squaring a complex number. */ 1075 if (ar == br && ai == bi) 1076 t4 = t3; 1077 else 1078 t4 = gimplify_build2 (gsi, MULT_EXPR, type, ai, br); 1079 1080 *rr = gimplify_build2 (gsi, MINUS_EXPR, type, t1, t2); 1081 *ri = gimplify_build2 (gsi, PLUS_EXPR, type, t3, t4); 1082 } 1083 1084 /* Expand complex multiplication to scalars: 1085 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai) 1086 */ 1087 1088 static void 1089 expand_complex_multiplication (gimple_stmt_iterator *gsi, tree type, 1090 tree ar, tree ai, tree br, tree bi, 1091 complex_lattice_t al, complex_lattice_t bl) 1092 { 1093 tree rr, ri; 1094 tree inner_type = TREE_TYPE (type); 1095 1096 if (al < bl) 1097 { 1098 complex_lattice_t tl; 1099 rr = ar, ar = br, br = rr; 1100 ri = ai, ai = bi, bi = ri; 1101 tl = al, al = bl, bl = tl; 1102 } 1103 1104 switch (PAIR (al, bl)) 1105 { 1106 case PAIR (ONLY_REAL, ONLY_REAL): 1107 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1108 ri = ai; 1109 break; 1110 1111 case PAIR (ONLY_IMAG, ONLY_REAL): 1112 rr = ar; 1113 if (TREE_CODE (ai) == REAL_CST 1114 && real_identical (&TREE_REAL_CST (ai), &dconst1)) 1115 ri = br; 1116 else 1117 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1118 break; 1119 1120 case PAIR (ONLY_IMAG, ONLY_IMAG): 1121 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1122 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr); 1123 ri = ar; 1124 break; 1125 1126 case PAIR (VARYING, ONLY_REAL): 1127 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1128 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1129 break; 1130 1131 case PAIR (VARYING, ONLY_IMAG): 1132 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1133 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr); 1134 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi); 1135 break; 1136 1137 case PAIR (VARYING, VARYING): 1138 if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type)) 1139 { 1140 /* If optimizing for size or not at all just do a libcall. 1141 Same if there are exception-handling edges or signaling NaNs. */ 1142 if (optimize == 0 || optimize_bb_for_size_p (gsi_bb (*gsi)) 1143 || stmt_can_throw_internal (cfun, gsi_stmt (*gsi)) 1144 || flag_signaling_nans) 1145 { 1146 expand_complex_libcall (gsi, type, ar, ai, br, bi, 1147 MULT_EXPR, true); 1148 return; 1149 } 1150 1151 if (!HONOR_NANS (inner_type)) 1152 { 1153 /* If we are not worrying about NaNs expand to 1154 (ar*br - ai*bi) + i(ar*bi + br*ai) directly. */ 1155 expand_complex_multiplication_components (gsi, inner_type, 1156 ar, ai, br, bi, 1157 &rr, &ri); 1158 break; 1159 } 1160 1161 /* Else, expand x = a * b into 1162 x = (ar*br - ai*bi) + i(ar*bi + br*ai); 1163 if (isunordered (__real__ x, __imag__ x)) 1164 x = __muldc3 (a, b); */ 1165 1166 tree tmpr, tmpi; 1167 expand_complex_multiplication_components (gsi, inner_type, ar, ai, 1168 br, bi, &tmpr, &tmpi); 1169 1170 gimple *check 1171 = gimple_build_cond (UNORDERED_EXPR, tmpr, tmpi, 1172 NULL_TREE, NULL_TREE); 1173 1174 basic_block orig_bb = gsi_bb (*gsi); 1175 /* We want to keep track of the original complex multiplication 1176 statement as we're going to modify it later in 1177 update_complex_assignment. Make sure that insert_cond_bb leaves 1178 that statement in the join block. */ 1179 gsi_prev (gsi); 1180 basic_block cond_bb 1181 = insert_cond_bb (gsi_bb (*gsi), gsi_stmt (*gsi), check, 1182 profile_probability::very_unlikely ()); 1183 1184 gimple_stmt_iterator cond_bb_gsi = gsi_last_bb (cond_bb); 1185 gsi_insert_after (&cond_bb_gsi, gimple_build_nop (), GSI_NEW_STMT); 1186 1187 tree libcall_res 1188 = expand_complex_libcall (&cond_bb_gsi, type, ar, ai, br, 1189 bi, MULT_EXPR, false); 1190 tree cond_real = gimplify_build1 (&cond_bb_gsi, REALPART_EXPR, 1191 inner_type, libcall_res); 1192 tree cond_imag = gimplify_build1 (&cond_bb_gsi, IMAGPART_EXPR, 1193 inner_type, libcall_res); 1194 1195 basic_block join_bb = single_succ_edge (cond_bb)->dest; 1196 *gsi = gsi_start_nondebug_after_labels_bb (join_bb); 1197 1198 /* We have a conditional block with some assignments in cond_bb. 1199 Wire up the PHIs to wrap up. */ 1200 rr = make_ssa_name (inner_type); 1201 ri = make_ssa_name (inner_type); 1202 edge cond_to_join = single_succ_edge (cond_bb); 1203 edge orig_to_join = find_edge (orig_bb, join_bb); 1204 1205 gphi *real_phi = create_phi_node (rr, gsi_bb (*gsi)); 1206 add_phi_arg (real_phi, cond_real, cond_to_join, UNKNOWN_LOCATION); 1207 add_phi_arg (real_phi, tmpr, orig_to_join, UNKNOWN_LOCATION); 1208 1209 gphi *imag_phi = create_phi_node (ri, gsi_bb (*gsi)); 1210 add_phi_arg (imag_phi, cond_imag, cond_to_join, UNKNOWN_LOCATION); 1211 add_phi_arg (imag_phi, tmpi, orig_to_join, UNKNOWN_LOCATION); 1212 } 1213 else 1214 /* If we are not worrying about NaNs expand to 1215 (ar*br - ai*bi) + i(ar*bi + br*ai) directly. */ 1216 expand_complex_multiplication_components (gsi, inner_type, ar, ai, 1217 br, bi, &rr, &ri); 1218 break; 1219 1220 default: 1221 gcc_unreachable (); 1222 } 1223 1224 update_complex_assignment (gsi, rr, ri); 1225 } 1226 1227 /* Keep this algorithm in sync with fold-const.c:const_binop(). 1228 1229 Expand complex division to scalars, straightforward algorithm. 1230 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t) 1231 t = br*br + bi*bi 1232 */ 1233 1234 static void 1235 expand_complex_div_straight (gimple_stmt_iterator *gsi, tree inner_type, 1236 tree ar, tree ai, tree br, tree bi, 1237 enum tree_code code) 1238 { 1239 tree rr, ri, div, t1, t2, t3; 1240 1241 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, br); 1242 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, bi); 1243 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2); 1244 1245 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1246 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi); 1247 t3 = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2); 1248 rr = gimplify_build2 (gsi, code, inner_type, t3, div); 1249 1250 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br); 1251 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi); 1252 t3 = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2); 1253 ri = gimplify_build2 (gsi, code, inner_type, t3, div); 1254 1255 update_complex_assignment (gsi, rr, ri); 1256 } 1257 1258 /* Keep this algorithm in sync with fold-const.c:const_binop(). 1259 1260 Expand complex division to scalars, modified algorithm to minimize 1261 overflow with wide input ranges. */ 1262 1263 static void 1264 expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type, 1265 tree ar, tree ai, tree br, tree bi, 1266 enum tree_code code) 1267 { 1268 tree rr, ri, ratio, div, t1, t2, tr, ti, compare; 1269 basic_block bb_cond, bb_true, bb_false, bb_join; 1270 gimple *stmt; 1271 1272 /* Examine |br| < |bi|, and branch. */ 1273 t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br); 1274 t2 = gimplify_build1 (gsi, ABS_EXPR, inner_type, bi); 1275 compare = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), 1276 LT_EXPR, boolean_type_node, t1, t2); 1277 STRIP_NOPS (compare); 1278 1279 bb_cond = bb_true = bb_false = bb_join = NULL; 1280 rr = ri = tr = ti = NULL; 1281 if (TREE_CODE (compare) != INTEGER_CST) 1282 { 1283 edge e; 1284 gimple *stmt; 1285 tree cond, tmp; 1286 1287 tmp = make_ssa_name (boolean_type_node); 1288 stmt = gimple_build_assign (tmp, compare); 1289 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1290 1291 cond = fold_build2_loc (gimple_location (stmt), 1292 EQ_EXPR, boolean_type_node, tmp, boolean_true_node); 1293 stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE); 1294 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1295 1296 /* Split the original block, and create the TRUE and FALSE blocks. */ 1297 e = split_block (gsi_bb (*gsi), stmt); 1298 bb_cond = e->src; 1299 bb_join = e->dest; 1300 bb_true = create_empty_bb (bb_cond); 1301 bb_false = create_empty_bb (bb_true); 1302 bb_true->count = bb_false->count 1303 = bb_cond->count.apply_probability (profile_probability::even ()); 1304 1305 /* Wire the blocks together. */ 1306 e->flags = EDGE_TRUE_VALUE; 1307 /* TODO: With value profile we could add an historgram to determine real 1308 branch outcome. */ 1309 e->probability = profile_probability::even (); 1310 redirect_edge_succ (e, bb_true); 1311 edge e2 = make_edge (bb_cond, bb_false, EDGE_FALSE_VALUE); 1312 e2->probability = profile_probability::even (); 1313 make_single_succ_edge (bb_true, bb_join, EDGE_FALLTHRU); 1314 make_single_succ_edge (bb_false, bb_join, EDGE_FALLTHRU); 1315 add_bb_to_loop (bb_true, bb_cond->loop_father); 1316 add_bb_to_loop (bb_false, bb_cond->loop_father); 1317 1318 /* Update dominance info. Note that bb_join's data was 1319 updated by split_block. */ 1320 if (dom_info_available_p (CDI_DOMINATORS)) 1321 { 1322 set_immediate_dominator (CDI_DOMINATORS, bb_true, bb_cond); 1323 set_immediate_dominator (CDI_DOMINATORS, bb_false, bb_cond); 1324 } 1325 1326 rr = create_tmp_reg (inner_type); 1327 ri = create_tmp_reg (inner_type); 1328 } 1329 1330 /* In the TRUE branch, we compute 1331 ratio = br/bi; 1332 div = (br * ratio) + bi; 1333 tr = (ar * ratio) + ai; 1334 ti = (ai * ratio) - ar; 1335 tr = tr / div; 1336 ti = ti / div; */ 1337 if (bb_true || integer_nonzerop (compare)) 1338 { 1339 if (bb_true) 1340 { 1341 *gsi = gsi_last_bb (bb_true); 1342 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT); 1343 } 1344 1345 ratio = gimplify_build2 (gsi, code, inner_type, br, bi); 1346 1347 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, ratio); 1348 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, bi); 1349 1350 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio); 1351 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ai); 1352 1353 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio); 1354 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, ar); 1355 1356 tr = gimplify_build2 (gsi, code, inner_type, tr, div); 1357 ti = gimplify_build2 (gsi, code, inner_type, ti, div); 1358 1359 if (bb_true) 1360 { 1361 stmt = gimple_build_assign (rr, tr); 1362 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1363 stmt = gimple_build_assign (ri, ti); 1364 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1365 gsi_remove (gsi, true); 1366 } 1367 } 1368 1369 /* In the FALSE branch, we compute 1370 ratio = d/c; 1371 divisor = (d * ratio) + c; 1372 tr = (b * ratio) + a; 1373 ti = b - (a * ratio); 1374 tr = tr / div; 1375 ti = ti / div; */ 1376 if (bb_false || integer_zerop (compare)) 1377 { 1378 if (bb_false) 1379 { 1380 *gsi = gsi_last_bb (bb_false); 1381 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT); 1382 } 1383 1384 ratio = gimplify_build2 (gsi, code, inner_type, bi, br); 1385 1386 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, ratio); 1387 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, br); 1388 1389 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio); 1390 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ar); 1391 1392 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio); 1393 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, t1); 1394 1395 tr = gimplify_build2 (gsi, code, inner_type, tr, div); 1396 ti = gimplify_build2 (gsi, code, inner_type, ti, div); 1397 1398 if (bb_false) 1399 { 1400 stmt = gimple_build_assign (rr, tr); 1401 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1402 stmt = gimple_build_assign (ri, ti); 1403 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1404 gsi_remove (gsi, true); 1405 } 1406 } 1407 1408 if (bb_join) 1409 *gsi = gsi_start_bb (bb_join); 1410 else 1411 rr = tr, ri = ti; 1412 1413 update_complex_assignment (gsi, rr, ri); 1414 } 1415 1416 /* Expand complex division to scalars. */ 1417 1418 static void 1419 expand_complex_division (gimple_stmt_iterator *gsi, tree type, 1420 tree ar, tree ai, tree br, tree bi, 1421 enum tree_code code, 1422 complex_lattice_t al, complex_lattice_t bl) 1423 { 1424 tree rr, ri; 1425 1426 tree inner_type = TREE_TYPE (type); 1427 switch (PAIR (al, bl)) 1428 { 1429 case PAIR (ONLY_REAL, ONLY_REAL): 1430 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 1431 ri = ai; 1432 break; 1433 1434 case PAIR (ONLY_REAL, ONLY_IMAG): 1435 rr = ai; 1436 ri = gimplify_build2 (gsi, code, inner_type, ar, bi); 1437 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri); 1438 break; 1439 1440 case PAIR (ONLY_IMAG, ONLY_REAL): 1441 rr = ar; 1442 ri = gimplify_build2 (gsi, code, inner_type, ai, br); 1443 break; 1444 1445 case PAIR (ONLY_IMAG, ONLY_IMAG): 1446 rr = gimplify_build2 (gsi, code, inner_type, ai, bi); 1447 ri = ar; 1448 break; 1449 1450 case PAIR (VARYING, ONLY_REAL): 1451 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 1452 ri = gimplify_build2 (gsi, code, inner_type, ai, br); 1453 break; 1454 1455 case PAIR (VARYING, ONLY_IMAG): 1456 rr = gimplify_build2 (gsi, code, inner_type, ai, bi); 1457 ri = gimplify_build2 (gsi, code, inner_type, ar, bi); 1458 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri); 1459 break; 1460 1461 case PAIR (ONLY_REAL, VARYING): 1462 case PAIR (ONLY_IMAG, VARYING): 1463 case PAIR (VARYING, VARYING): 1464 switch (flag_complex_method) 1465 { 1466 case 0: 1467 /* straightforward implementation of complex divide acceptable. */ 1468 expand_complex_div_straight (gsi, inner_type, ar, ai, br, bi, code); 1469 break; 1470 1471 case 2: 1472 if (SCALAR_FLOAT_TYPE_P (inner_type)) 1473 { 1474 expand_complex_libcall (gsi, type, ar, ai, br, bi, code, true); 1475 break; 1476 } 1477 /* FALLTHRU */ 1478 1479 case 1: 1480 /* wide ranges of inputs must work for complex divide. */ 1481 expand_complex_div_wide (gsi, inner_type, ar, ai, br, bi, code); 1482 break; 1483 1484 default: 1485 gcc_unreachable (); 1486 } 1487 return; 1488 1489 default: 1490 gcc_unreachable (); 1491 } 1492 1493 update_complex_assignment (gsi, rr, ri); 1494 } 1495 1496 /* Expand complex negation to scalars: 1497 -a = (-ar) + i(-ai) 1498 */ 1499 1500 static void 1501 expand_complex_negation (gimple_stmt_iterator *gsi, tree inner_type, 1502 tree ar, tree ai) 1503 { 1504 tree rr, ri; 1505 1506 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ar); 1507 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai); 1508 1509 update_complex_assignment (gsi, rr, ri); 1510 } 1511 1512 /* Expand complex conjugate to scalars: 1513 ~a = (ar) + i(-ai) 1514 */ 1515 1516 static void 1517 expand_complex_conjugate (gimple_stmt_iterator *gsi, tree inner_type, 1518 tree ar, tree ai) 1519 { 1520 tree ri; 1521 1522 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai); 1523 1524 update_complex_assignment (gsi, ar, ri); 1525 } 1526 1527 /* Expand complex comparison (EQ or NE only). */ 1528 1529 static void 1530 expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai, 1531 tree br, tree bi, enum tree_code code) 1532 { 1533 tree cr, ci, cc, type; 1534 gimple *stmt; 1535 1536 cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br); 1537 ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi); 1538 cc = gimplify_build2 (gsi, 1539 (code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR), 1540 boolean_type_node, cr, ci); 1541 1542 stmt = gsi_stmt (*gsi); 1543 1544 switch (gimple_code (stmt)) 1545 { 1546 case GIMPLE_RETURN: 1547 { 1548 greturn *return_stmt = as_a <greturn *> (stmt); 1549 type = TREE_TYPE (gimple_return_retval (return_stmt)); 1550 gimple_return_set_retval (return_stmt, fold_convert (type, cc)); 1551 } 1552 break; 1553 1554 case GIMPLE_ASSIGN: 1555 type = TREE_TYPE (gimple_assign_lhs (stmt)); 1556 gimple_assign_set_rhs_from_tree (gsi, fold_convert (type, cc)); 1557 stmt = gsi_stmt (*gsi); 1558 break; 1559 1560 case GIMPLE_COND: 1561 { 1562 gcond *cond_stmt = as_a <gcond *> (stmt); 1563 gimple_cond_set_code (cond_stmt, EQ_EXPR); 1564 gimple_cond_set_lhs (cond_stmt, cc); 1565 gimple_cond_set_rhs (cond_stmt, boolean_true_node); 1566 } 1567 break; 1568 1569 default: 1570 gcc_unreachable (); 1571 } 1572 1573 update_stmt (stmt); 1574 if (maybe_clean_eh_stmt (stmt)) 1575 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index); 1576 } 1577 1578 /* Expand inline asm that sets some complex SSA_NAMEs. */ 1579 1580 static void 1581 expand_complex_asm (gimple_stmt_iterator *gsi) 1582 { 1583 gasm *stmt = as_a <gasm *> (gsi_stmt (*gsi)); 1584 unsigned int i; 1585 1586 for (i = 0; i < gimple_asm_noutputs (stmt); ++i) 1587 { 1588 tree link = gimple_asm_output_op (stmt, i); 1589 tree op = TREE_VALUE (link); 1590 if (TREE_CODE (op) == SSA_NAME 1591 && TREE_CODE (TREE_TYPE (op)) == COMPLEX_TYPE) 1592 { 1593 tree type = TREE_TYPE (op); 1594 tree inner_type = TREE_TYPE (type); 1595 tree r = build1 (REALPART_EXPR, inner_type, op); 1596 tree i = build1 (IMAGPART_EXPR, inner_type, op); 1597 gimple_seq list = set_component_ssa_name (op, false, r); 1598 1599 if (list) 1600 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); 1601 1602 list = set_component_ssa_name (op, true, i); 1603 if (list) 1604 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING); 1605 } 1606 } 1607 } 1608 1609 /* Process one statement. If we identify a complex operation, expand it. */ 1610 1611 static void 1612 expand_complex_operations_1 (gimple_stmt_iterator *gsi) 1613 { 1614 gimple *stmt = gsi_stmt (*gsi); 1615 tree type, inner_type, lhs; 1616 tree ac, ar, ai, bc, br, bi; 1617 complex_lattice_t al, bl; 1618 enum tree_code code; 1619 1620 if (gimple_code (stmt) == GIMPLE_ASM) 1621 { 1622 expand_complex_asm (gsi); 1623 return; 1624 } 1625 1626 lhs = gimple_get_lhs (stmt); 1627 if (!lhs && gimple_code (stmt) != GIMPLE_COND) 1628 return; 1629 1630 type = TREE_TYPE (gimple_op (stmt, 0)); 1631 code = gimple_expr_code (stmt); 1632 1633 /* Initial filter for operations we handle. */ 1634 switch (code) 1635 { 1636 case PLUS_EXPR: 1637 case MINUS_EXPR: 1638 case MULT_EXPR: 1639 case TRUNC_DIV_EXPR: 1640 case CEIL_DIV_EXPR: 1641 case FLOOR_DIV_EXPR: 1642 case ROUND_DIV_EXPR: 1643 case RDIV_EXPR: 1644 case NEGATE_EXPR: 1645 case CONJ_EXPR: 1646 if (TREE_CODE (type) != COMPLEX_TYPE) 1647 return; 1648 inner_type = TREE_TYPE (type); 1649 break; 1650 1651 case EQ_EXPR: 1652 case NE_EXPR: 1653 /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR 1654 subcode, so we need to access the operands using gimple_op. */ 1655 inner_type = TREE_TYPE (gimple_op (stmt, 1)); 1656 if (TREE_CODE (inner_type) != COMPLEX_TYPE) 1657 return; 1658 break; 1659 1660 default: 1661 { 1662 tree rhs; 1663 1664 /* GIMPLE_COND may also fallthru here, but we do not need to 1665 do anything with it. */ 1666 if (gimple_code (stmt) == GIMPLE_COND) 1667 return; 1668 1669 if (TREE_CODE (type) == COMPLEX_TYPE) 1670 expand_complex_move (gsi, type); 1671 else if (is_gimple_assign (stmt) 1672 && (gimple_assign_rhs_code (stmt) == REALPART_EXPR 1673 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR) 1674 && TREE_CODE (lhs) == SSA_NAME) 1675 { 1676 rhs = gimple_assign_rhs1 (stmt); 1677 rhs = extract_component (gsi, TREE_OPERAND (rhs, 0), 1678 gimple_assign_rhs_code (stmt) 1679 == IMAGPART_EXPR, 1680 false); 1681 gimple_assign_set_rhs_from_tree (gsi, rhs); 1682 stmt = gsi_stmt (*gsi); 1683 update_stmt (stmt); 1684 } 1685 } 1686 return; 1687 } 1688 1689 /* Extract the components of the two complex values. Make sure and 1690 handle the common case of the same value used twice specially. */ 1691 if (is_gimple_assign (stmt)) 1692 { 1693 ac = gimple_assign_rhs1 (stmt); 1694 bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL; 1695 } 1696 /* GIMPLE_CALL cannot get here. */ 1697 else 1698 { 1699 ac = gimple_cond_lhs (stmt); 1700 bc = gimple_cond_rhs (stmt); 1701 } 1702 1703 ar = extract_component (gsi, ac, false, true); 1704 ai = extract_component (gsi, ac, true, true); 1705 1706 if (ac == bc) 1707 br = ar, bi = ai; 1708 else if (bc) 1709 { 1710 br = extract_component (gsi, bc, 0, true); 1711 bi = extract_component (gsi, bc, 1, true); 1712 } 1713 else 1714 br = bi = NULL_TREE; 1715 1716 al = find_lattice_value (ac); 1717 if (al == UNINITIALIZED) 1718 al = VARYING; 1719 1720 if (TREE_CODE_CLASS (code) == tcc_unary) 1721 bl = UNINITIALIZED; 1722 else if (ac == bc) 1723 bl = al; 1724 else 1725 { 1726 bl = find_lattice_value (bc); 1727 if (bl == UNINITIALIZED) 1728 bl = VARYING; 1729 } 1730 1731 switch (code) 1732 { 1733 case PLUS_EXPR: 1734 case MINUS_EXPR: 1735 expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl); 1736 break; 1737 1738 case MULT_EXPR: 1739 expand_complex_multiplication (gsi, type, ar, ai, br, bi, al, bl); 1740 break; 1741 1742 case TRUNC_DIV_EXPR: 1743 case CEIL_DIV_EXPR: 1744 case FLOOR_DIV_EXPR: 1745 case ROUND_DIV_EXPR: 1746 case RDIV_EXPR: 1747 expand_complex_division (gsi, type, ar, ai, br, bi, code, al, bl); 1748 break; 1749 1750 case NEGATE_EXPR: 1751 expand_complex_negation (gsi, inner_type, ar, ai); 1752 break; 1753 1754 case CONJ_EXPR: 1755 expand_complex_conjugate (gsi, inner_type, ar, ai); 1756 break; 1757 1758 case EQ_EXPR: 1759 case NE_EXPR: 1760 expand_complex_comparison (gsi, ar, ai, br, bi, code); 1761 break; 1762 1763 default: 1764 gcc_unreachable (); 1765 } 1766 } 1767 1768 1769 /* Entry point for complex operation lowering during optimization. */ 1770 1771 static unsigned int 1772 tree_lower_complex (void) 1773 { 1774 gimple_stmt_iterator gsi; 1775 basic_block bb; 1776 int n_bbs, i; 1777 int *rpo; 1778 1779 if (!init_dont_simulate_again ()) 1780 return 0; 1781 1782 complex_lattice_values.create (num_ssa_names); 1783 complex_lattice_values.safe_grow_cleared (num_ssa_names); 1784 1785 init_parameter_lattice_values (); 1786 class complex_propagate complex_propagate; 1787 complex_propagate.ssa_propagate (); 1788 1789 need_eh_cleanup = BITMAP_ALLOC (NULL); 1790 1791 complex_variable_components = new int_tree_htab_type (10); 1792 1793 complex_ssa_name_components.create (2 * num_ssa_names); 1794 complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names); 1795 1796 update_parameter_components (); 1797 1798 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun)); 1799 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false); 1800 for (i = 0; i < n_bbs; i++) 1801 { 1802 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]); 1803 if (!bb) 1804 continue; 1805 update_phi_components (bb); 1806 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1807 expand_complex_operations_1 (&gsi); 1808 } 1809 1810 free (rpo); 1811 1812 if (!phis_to_revisit.is_empty ()) 1813 { 1814 unsigned int n = phis_to_revisit.length (); 1815 for (unsigned int j = 0; j < n; j += 3) 1816 for (unsigned int k = 0; k < 2; k++) 1817 if (gphi *phi = phis_to_revisit[j + k + 1]) 1818 { 1819 unsigned int m = gimple_phi_num_args (phi); 1820 for (unsigned int l = 0; l < m; ++l) 1821 { 1822 tree op = gimple_phi_arg_def (phi, l); 1823 if (TREE_CODE (op) == SSA_NAME 1824 || is_gimple_min_invariant (op)) 1825 continue; 1826 tree arg = gimple_phi_arg_def (phis_to_revisit[j], l); 1827 op = extract_component (NULL, arg, k > 0, false, false); 1828 SET_PHI_ARG_DEF (phi, l, op); 1829 } 1830 } 1831 phis_to_revisit.release (); 1832 } 1833 1834 gsi_commit_edge_inserts (); 1835 1836 unsigned todo 1837 = gimple_purge_all_dead_eh_edges (need_eh_cleanup) ? TODO_cleanup_cfg : 0; 1838 BITMAP_FREE (need_eh_cleanup); 1839 1840 delete complex_variable_components; 1841 complex_variable_components = NULL; 1842 complex_ssa_name_components.release (); 1843 complex_lattice_values.release (); 1844 return todo; 1845 } 1846 1847 namespace { 1848 1849 const pass_data pass_data_lower_complex = 1850 { 1851 GIMPLE_PASS, /* type */ 1852 "cplxlower", /* name */ 1853 OPTGROUP_NONE, /* optinfo_flags */ 1854 TV_NONE, /* tv_id */ 1855 PROP_ssa, /* properties_required */ 1856 PROP_gimple_lcx, /* properties_provided */ 1857 0, /* properties_destroyed */ 1858 0, /* todo_flags_start */ 1859 TODO_update_ssa, /* todo_flags_finish */ 1860 }; 1861 1862 class pass_lower_complex : public gimple_opt_pass 1863 { 1864 public: 1865 pass_lower_complex (gcc::context *ctxt) 1866 : gimple_opt_pass (pass_data_lower_complex, ctxt) 1867 {} 1868 1869 /* opt_pass methods: */ 1870 opt_pass * clone () { return new pass_lower_complex (m_ctxt); } 1871 virtual unsigned int execute (function *) { return tree_lower_complex (); } 1872 1873 }; // class pass_lower_complex 1874 1875 } // anon namespace 1876 1877 gimple_opt_pass * 1878 make_pass_lower_complex (gcc::context *ctxt) 1879 { 1880 return new pass_lower_complex (ctxt); 1881 } 1882 1883 1884 namespace { 1885 1886 const pass_data pass_data_lower_complex_O0 = 1887 { 1888 GIMPLE_PASS, /* type */ 1889 "cplxlower0", /* name */ 1890 OPTGROUP_NONE, /* optinfo_flags */ 1891 TV_NONE, /* tv_id */ 1892 PROP_cfg, /* properties_required */ 1893 PROP_gimple_lcx, /* properties_provided */ 1894 0, /* properties_destroyed */ 1895 0, /* todo_flags_start */ 1896 TODO_update_ssa, /* todo_flags_finish */ 1897 }; 1898 1899 class pass_lower_complex_O0 : public gimple_opt_pass 1900 { 1901 public: 1902 pass_lower_complex_O0 (gcc::context *ctxt) 1903 : gimple_opt_pass (pass_data_lower_complex_O0, ctxt) 1904 {} 1905 1906 /* opt_pass methods: */ 1907 virtual bool gate (function *fun) 1908 { 1909 /* With errors, normal optimization passes are not run. If we don't 1910 lower complex operations at all, rtl expansion will abort. */ 1911 return !(fun->curr_properties & PROP_gimple_lcx); 1912 } 1913 1914 virtual unsigned int execute (function *) { return tree_lower_complex (); } 1915 1916 }; // class pass_lower_complex_O0 1917 1918 } // anon namespace 1919 1920 gimple_opt_pass * 1921 make_pass_lower_complex_O0 (gcc::context *ctxt) 1922 { 1923 return new pass_lower_complex_O0 (ctxt); 1924 } 1925