xref: /openbsd-src/gnu/gcc/gcc/tree-complex.c (revision 404b540a9034ac75a6199ad1a32d1bbc7a0d4210)
1 /* Lower complex number operations to scalar operations.
2    Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING.  If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "real.h"
28 #include "flags.h"
29 #include "tree-flow.h"
30 #include "tree-gimple.h"
31 #include "tree-iterator.h"
32 #include "tree-pass.h"
33 #include "tree-ssa-propagate.h"
34 #include "diagnostic.h"
35 
36 
37 /* For each complex ssa name, a lattice value.  We're interested in finding
38    out whether a complex number is degenerate in some way, having only real
39    or only complex parts.  */
40 
41 typedef enum
42 {
43   UNINITIALIZED = 0,
44   ONLY_REAL = 1,
45   ONLY_IMAG = 2,
46   VARYING = 3
47 } complex_lattice_t;
48 
49 #define PAIR(a, b)  ((a) << 2 | (b))
50 
51 DEF_VEC_I(complex_lattice_t);
52 DEF_VEC_ALLOC_I(complex_lattice_t, heap);
53 
54 static VEC(complex_lattice_t, heap) *complex_lattice_values;
55 
56 /* For each complex variable, a pair of variables for the components exists in
57    the hashtable.  */
58 static htab_t complex_variable_components;
59 
60 /* For each complex SSA_NAME, a pair of ssa names for the components.  */
61 static VEC(tree, heap) *complex_ssa_name_components;
62 
63 /* Lookup UID in the complex_variable_components hashtable and return the
64    associated tree.  */
65 static tree
cvc_lookup(unsigned int uid)66 cvc_lookup (unsigned int uid)
67 {
68   struct int_tree_map *h, in;
69   in.uid = uid;
70   h = htab_find_with_hash (complex_variable_components, &in, uid);
71   return h ? h->to : NULL;
72 }
73 
74 /* Insert the pair UID, TO into the complex_variable_components hashtable.  */
75 
76 static void
cvc_insert(unsigned int uid,tree to)77 cvc_insert (unsigned int uid, tree to)
78 {
79   struct int_tree_map *h;
80   void **loc;
81 
82   h = XNEW (struct int_tree_map);
83   h->uid = uid;
84   h->to = to;
85   loc = htab_find_slot_with_hash (complex_variable_components, h,
86 				  uid, INSERT);
87   *(struct int_tree_map **) loc = h;
88 }
89 
90 /* Return true if T is not a zero constant.  In the case of real values,
91    we're only interested in +0.0.  */
92 
93 static int
some_nonzerop(tree t)94 some_nonzerop (tree t)
95 {
96   int zerop = false;
97 
98   if (TREE_CODE (t) == REAL_CST)
99     zerop = REAL_VALUES_IDENTICAL (TREE_REAL_CST (t), dconst0);
100   else if (TREE_CODE (t) == INTEGER_CST)
101     zerop = integer_zerop (t);
102 
103   return !zerop;
104 }
105 
106 /* Compute a lattice value from T.  It may be a gimple_val, or, as a
107    special exception, a COMPLEX_EXPR.  */
108 
109 static complex_lattice_t
find_lattice_value(tree t)110 find_lattice_value (tree t)
111 {
112   tree real, imag;
113   int r, i;
114   complex_lattice_t ret;
115 
116   switch (TREE_CODE (t))
117     {
118     case SSA_NAME:
119       return VEC_index (complex_lattice_t, complex_lattice_values,
120 			SSA_NAME_VERSION (t));
121 
122     case COMPLEX_CST:
123       real = TREE_REALPART (t);
124       imag = TREE_IMAGPART (t);
125       break;
126 
127     case COMPLEX_EXPR:
128       real = TREE_OPERAND (t, 0);
129       imag = TREE_OPERAND (t, 1);
130       break;
131 
132     default:
133       gcc_unreachable ();
134     }
135 
136   r = some_nonzerop (real);
137   i = some_nonzerop (imag);
138   ret = r*ONLY_REAL + i*ONLY_IMAG;
139 
140   /* ??? On occasion we could do better than mapping 0+0i to real, but we
141      certainly don't want to leave it UNINITIALIZED, which eventually gets
142      mapped to VARYING.  */
143   if (ret == UNINITIALIZED)
144     ret = ONLY_REAL;
145 
146   return ret;
147 }
148 
149 /* Determine if LHS is something for which we're interested in seeing
150    simulation results.  */
151 
152 static bool
is_complex_reg(tree lhs)153 is_complex_reg (tree lhs)
154 {
155   return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs);
156 }
157 
158 /* Mark the incoming parameters to the function as VARYING.  */
159 
160 static void
init_parameter_lattice_values(void)161 init_parameter_lattice_values (void)
162 {
163   tree parm;
164 
165   for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = TREE_CHAIN (parm))
166     if (is_complex_reg (parm) && var_ann (parm) != NULL)
167       {
168 	tree ssa_name = default_def (parm);
169 	VEC_replace (complex_lattice_t, complex_lattice_values,
170 		     SSA_NAME_VERSION (ssa_name), VARYING);
171       }
172 }
173 
174 /* Initialize DONT_SIMULATE_AGAIN for each stmt and phi.  Return false if
175    we found no statements we want to simulate, and thus there's nothing for
176    the entire pass to do.  */
177 
178 static bool
init_dont_simulate_again(void)179 init_dont_simulate_again (void)
180 {
181   basic_block bb;
182   block_stmt_iterator bsi;
183   tree phi;
184   bool saw_a_complex_op = false;
185 
186   FOR_EACH_BB (bb)
187     {
188       for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
189 	DONT_SIMULATE_AGAIN (phi) = !is_complex_reg (PHI_RESULT (phi));
190 
191       for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
192 	{
193 	  tree orig_stmt, stmt, rhs = NULL;
194 	  bool dsa;
195 
196 	  orig_stmt = stmt = bsi_stmt (bsi);
197 
198 	  /* Most control-altering statements must be initially
199 	     simulated, else we won't cover the entire cfg.  */
200 	  dsa = !stmt_ends_bb_p (stmt);
201 
202 	  switch (TREE_CODE (stmt))
203 	    {
204 	    case RETURN_EXPR:
205 	      /* We don't care what the lattice value of <retval> is,
206 		 since it's never used as an input to another computation.  */
207 	      dsa = true;
208 	      stmt = TREE_OPERAND (stmt, 0);
209 	      if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
210 		break;
211 	      /* FALLTHRU */
212 
213 	    case MODIFY_EXPR:
214 	      dsa = !is_complex_reg (TREE_OPERAND (stmt, 0));
215 	      rhs = TREE_OPERAND (stmt, 1);
216 	      break;
217 
218 	    case COND_EXPR:
219 	      rhs = TREE_OPERAND (stmt, 0);
220 	      break;
221 
222 	    default:
223 	      break;
224 	    }
225 
226 	  if (rhs)
227 	    switch (TREE_CODE (rhs))
228 	      {
229 	      case EQ_EXPR:
230 	      case NE_EXPR:
231 		rhs = TREE_OPERAND (rhs, 0);
232 		/* FALLTHRU */
233 
234 	      case PLUS_EXPR:
235 	      case MINUS_EXPR:
236 	      case MULT_EXPR:
237 	      case TRUNC_DIV_EXPR:
238 	      case CEIL_DIV_EXPR:
239 	      case FLOOR_DIV_EXPR:
240 	      case ROUND_DIV_EXPR:
241 	      case RDIV_EXPR:
242 	      case NEGATE_EXPR:
243 	      case CONJ_EXPR:
244 		if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE)
245 		  saw_a_complex_op = true;
246 		break;
247 
248 	      default:
249 		break;
250 	      }
251 
252 	  DONT_SIMULATE_AGAIN (orig_stmt) = dsa;
253 	}
254     }
255 
256   return saw_a_complex_op;
257 }
258 
259 
260 /* Evaluate statement STMT against the complex lattice defined above.  */
261 
262 static enum ssa_prop_result
complex_visit_stmt(tree stmt,edge * taken_edge_p ATTRIBUTE_UNUSED,tree * result_p)263 complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
264 		    tree *result_p)
265 {
266   complex_lattice_t new_l, old_l, op1_l, op2_l;
267   unsigned int ver;
268   tree lhs, rhs;
269 
270   if (TREE_CODE (stmt) != MODIFY_EXPR)
271     return SSA_PROP_VARYING;
272 
273   lhs = TREE_OPERAND (stmt, 0);
274   rhs = TREE_OPERAND (stmt, 1);
275 
276   /* These conditions should be satisfied due to the initial filter
277      set up in init_dont_simulate_again.  */
278   gcc_assert (TREE_CODE (lhs) == SSA_NAME);
279   gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
280 
281   *result_p = lhs;
282   ver = SSA_NAME_VERSION (lhs);
283   old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
284 
285   switch (TREE_CODE (rhs))
286     {
287     case SSA_NAME:
288     case COMPLEX_EXPR:
289     case COMPLEX_CST:
290       new_l = find_lattice_value (rhs);
291       break;
292 
293     case PLUS_EXPR:
294     case MINUS_EXPR:
295       op1_l = find_lattice_value (TREE_OPERAND (rhs, 0));
296       op2_l = find_lattice_value (TREE_OPERAND (rhs, 1));
297 
298       /* We've set up the lattice values such that IOR neatly
299 	 models addition.  */
300       new_l = op1_l | op2_l;
301       break;
302 
303     case MULT_EXPR:
304     case RDIV_EXPR:
305     case TRUNC_DIV_EXPR:
306     case CEIL_DIV_EXPR:
307     case FLOOR_DIV_EXPR:
308     case ROUND_DIV_EXPR:
309       op1_l = find_lattice_value (TREE_OPERAND (rhs, 0));
310       op2_l = find_lattice_value (TREE_OPERAND (rhs, 1));
311 
312       /* Obviously, if either varies, so does the result.  */
313       if (op1_l == VARYING || op2_l == VARYING)
314 	new_l = VARYING;
315       /* Don't prematurely promote variables if we've not yet seen
316 	 their inputs.  */
317       else if (op1_l == UNINITIALIZED)
318 	new_l = op2_l;
319       else if (op2_l == UNINITIALIZED)
320 	new_l = op1_l;
321       else
322 	{
323 	  /* At this point both numbers have only one component. If the
324 	     numbers are of opposite kind, the result is imaginary,
325 	     otherwise the result is real. The add/subtract translates
326 	     the real/imag from/to 0/1; the ^ performs the comparison.  */
327 	  new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL;
328 
329 	  /* Don't allow the lattice value to flip-flop indefinitely.  */
330 	  new_l |= old_l;
331 	}
332       break;
333 
334     case NEGATE_EXPR:
335     case CONJ_EXPR:
336       new_l = find_lattice_value (TREE_OPERAND (rhs, 0));
337       break;
338 
339     default:
340       new_l = VARYING;
341       break;
342     }
343 
344   /* If nothing changed this round, let the propagator know.  */
345   if (new_l == old_l)
346     return SSA_PROP_NOT_INTERESTING;
347 
348   VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l);
349   return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
350 }
351 
352 /* Evaluate a PHI node against the complex lattice defined above.  */
353 
354 static enum ssa_prop_result
complex_visit_phi(tree phi)355 complex_visit_phi (tree phi)
356 {
357   complex_lattice_t new_l, old_l;
358   unsigned int ver;
359   tree lhs;
360   int i;
361 
362   lhs = PHI_RESULT (phi);
363 
364   /* This condition should be satisfied due to the initial filter
365      set up in init_dont_simulate_again.  */
366   gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
367 
368   /* We've set up the lattice values such that IOR neatly models PHI meet.  */
369   new_l = UNINITIALIZED;
370   for (i = PHI_NUM_ARGS (phi) - 1; i >= 0; --i)
371     new_l |= find_lattice_value (PHI_ARG_DEF (phi, i));
372 
373   ver = SSA_NAME_VERSION (lhs);
374   old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
375 
376   if (new_l == old_l)
377     return SSA_PROP_NOT_INTERESTING;
378 
379   VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l);
380   return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
381 }
382 
383 /* Create one backing variable for a complex component of ORIG.  */
384 
385 static tree
create_one_component_var(tree type,tree orig,const char * prefix,const char * suffix,enum tree_code code)386 create_one_component_var (tree type, tree orig, const char *prefix,
387 			  const char *suffix, enum tree_code code)
388 {
389   tree r = create_tmp_var (type, prefix);
390   add_referenced_var (r);
391 
392   DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (orig);
393   DECL_ARTIFICIAL (r) = 1;
394 
395   if (DECL_NAME (orig) && !DECL_IGNORED_P (orig))
396     {
397       const char *name = IDENTIFIER_POINTER (DECL_NAME (orig));
398       tree inner_type;
399 
400       DECL_NAME (r) = get_identifier (ACONCAT ((name, suffix, NULL)));
401 
402       inner_type = TREE_TYPE (TREE_TYPE (orig));
403       SET_DECL_DEBUG_EXPR (r, build1 (code, type, orig));
404       DECL_DEBUG_EXPR_IS_FROM (r) = 1;
405       DECL_IGNORED_P (r) = 0;
406       TREE_NO_WARNING (r) = TREE_NO_WARNING (orig);
407     }
408   else
409     {
410       DECL_IGNORED_P (r) = 1;
411       TREE_NO_WARNING (r) = 1;
412     }
413 
414   return r;
415 }
416 
417 /* Retrieve a value for a complex component of VAR.  */
418 
419 static tree
get_component_var(tree var,bool imag_p)420 get_component_var (tree var, bool imag_p)
421 {
422   size_t decl_index = DECL_UID (var) * 2 + imag_p;
423   tree ret = cvc_lookup (decl_index);
424 
425   if (ret == NULL)
426     {
427       ret = create_one_component_var (TREE_TYPE (TREE_TYPE (var)), var,
428 				      imag_p ? "CI" : "CR",
429 				      imag_p ? "$imag" : "$real",
430 				      imag_p ? IMAGPART_EXPR : REALPART_EXPR);
431       cvc_insert (decl_index, ret);
432     }
433 
434   return ret;
435 }
436 
437 /* Retrieve a value for a complex component of SSA_NAME.  */
438 
439 static tree
get_component_ssa_name(tree ssa_name,bool imag_p)440 get_component_ssa_name (tree ssa_name, bool imag_p)
441 {
442   complex_lattice_t lattice = find_lattice_value (ssa_name);
443   size_t ssa_name_index;
444   tree ret;
445 
446   if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG))
447     {
448       tree inner_type = TREE_TYPE (TREE_TYPE (ssa_name));
449       if (SCALAR_FLOAT_TYPE_P (inner_type))
450 	return build_real (inner_type, dconst0);
451       else
452 	return build_int_cst (inner_type, 0);
453     }
454 
455   ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p;
456   ret = VEC_index (tree, complex_ssa_name_components, ssa_name_index);
457   if (ret == NULL)
458     {
459       ret = get_component_var (SSA_NAME_VAR (ssa_name), imag_p);
460       ret = make_ssa_name (ret, NULL);
461 
462       /* Copy some properties from the original.  In particular, whether it
463 	 is used in an abnormal phi, and whether it's uninitialized.  */
464       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret)
465 	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name);
466       if (TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL
467 	  && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name)))
468 	{
469 	  SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name);
470 	  set_default_def (SSA_NAME_VAR (ret), ret);
471 	}
472 
473       VEC_replace (tree, complex_ssa_name_components, ssa_name_index, ret);
474     }
475 
476   return ret;
477 }
478 
479 /* Set a value for a complex component of SSA_NAME, return a STMT_LIST of
480    stuff that needs doing.  */
481 
482 static tree
set_component_ssa_name(tree ssa_name,bool imag_p,tree value)483 set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
484 {
485   complex_lattice_t lattice = find_lattice_value (ssa_name);
486   size_t ssa_name_index;
487   tree comp, list, last;
488 
489   /* We know the value must be zero, else there's a bug in our lattice
490      analysis.  But the value may well be a variable known to contain
491      zero.  We should be safe ignoring it.  */
492   if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG))
493     return NULL;
494 
495   /* If we've already assigned an SSA_NAME to this component, then this
496      means that our walk of the basic blocks found a use before the set.
497      This is fine.  Now we should create an initialization for the value
498      we created earlier.  */
499   ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p;
500   comp = VEC_index (tree, complex_ssa_name_components, ssa_name_index);
501   if (comp)
502     ;
503 
504   /* If we've nothing assigned, and the value we're given is already stable,
505      then install that as the value for this SSA_NAME.  This preemptively
506      copy-propagates the value, which avoids unnecessary memory allocation.  */
507   else if (is_gimple_min_invariant (value))
508     {
509       VEC_replace (tree, complex_ssa_name_components, ssa_name_index, value);
510       return NULL;
511     }
512   else if (TREE_CODE (value) == SSA_NAME
513 	   && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
514     {
515       /* Replace an anonymous base value with the variable from cvc_lookup.
516 	 This should result in better debug info.  */
517       if (DECL_IGNORED_P (SSA_NAME_VAR (value))
518 	  && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name)))
519 	{
520 	  comp = get_component_var (SSA_NAME_VAR (ssa_name), imag_p);
521 	  replace_ssa_name_symbol (value, comp);
522 	}
523 
524       VEC_replace (tree, complex_ssa_name_components, ssa_name_index, value);
525       return NULL;
526     }
527 
528   /* Finally, we need to stabilize the result by installing the value into
529      a new ssa name.  */
530   else
531     comp = get_component_ssa_name (ssa_name, imag_p);
532 
533   /* Do all the work to assign VALUE to COMP.  */
534   value = force_gimple_operand (value, &list, false, NULL);
535   last = build2 (MODIFY_EXPR, TREE_TYPE (comp), comp, value);
536   append_to_statement_list (last, &list);
537 
538   gcc_assert (SSA_NAME_DEF_STMT (comp) == NULL);
539   SSA_NAME_DEF_STMT (comp) = last;
540 
541   return list;
542 }
543 
544 /* Extract the real or imaginary part of a complex variable or constant.
545    Make sure that it's a proper gimple_val and gimplify it if not.
546    Emit any new code before BSI.  */
547 
548 static tree
extract_component(block_stmt_iterator * bsi,tree t,bool imagpart_p,bool gimple_p)549 extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
550 		   bool gimple_p)
551 {
552   switch (TREE_CODE (t))
553     {
554     case COMPLEX_CST:
555       return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t);
556 
557     case COMPLEX_EXPR:
558       return TREE_OPERAND (t, imagpart_p);
559 
560     case VAR_DECL:
561     case RESULT_DECL:
562     case PARM_DECL:
563     case INDIRECT_REF:
564     case COMPONENT_REF:
565     case ARRAY_REF:
566       {
567 	tree inner_type = TREE_TYPE (TREE_TYPE (t));
568 
569 	t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR),
570 		    inner_type, unshare_expr (t));
571 
572 	if (gimple_p)
573 	  t = gimplify_val (bsi, inner_type, t);
574 
575 	return t;
576       }
577 
578     case SSA_NAME:
579       return get_component_ssa_name (t, imagpart_p);
580 
581     default:
582       gcc_unreachable ();
583     }
584 }
585 
586 /* Update the complex components of the ssa name on the lhs of STMT.  */
587 
588 static void
update_complex_components(block_stmt_iterator * bsi,tree stmt,tree r,tree i)589 update_complex_components (block_stmt_iterator *bsi, tree stmt, tree r, tree i)
590 {
591   tree lhs = TREE_OPERAND (stmt, 0);
592   tree list;
593 
594   list = set_component_ssa_name (lhs, false, r);
595   if (list)
596     bsi_insert_after (bsi, list, BSI_CONTINUE_LINKING);
597 
598   list = set_component_ssa_name (lhs, true, i);
599   if (list)
600     bsi_insert_after (bsi, list, BSI_CONTINUE_LINKING);
601 }
602 
603 static void
update_complex_components_on_edge(edge e,tree lhs,tree r,tree i)604 update_complex_components_on_edge (edge e, tree lhs, tree r, tree i)
605 {
606   tree list;
607 
608   list = set_component_ssa_name (lhs, false, r);
609   if (list)
610     bsi_insert_on_edge (e, list);
611 
612   list = set_component_ssa_name (lhs, true, i);
613   if (list)
614     bsi_insert_on_edge (e, list);
615 }
616 
617 /* Update an assignment to a complex variable in place.  */
618 
619 static void
update_complex_assignment(block_stmt_iterator * bsi,tree r,tree i)620 update_complex_assignment (block_stmt_iterator *bsi, tree r, tree i)
621 {
622   tree stmt, mod;
623   tree type;
624 
625   mod = stmt = bsi_stmt (*bsi);
626   if (TREE_CODE (stmt) == RETURN_EXPR)
627     mod = TREE_OPERAND (mod, 0);
628   else if (in_ssa_p)
629     update_complex_components (bsi, stmt, r, i);
630 
631   type = TREE_TYPE (TREE_OPERAND (mod, 1));
632   TREE_OPERAND (mod, 1) = build2 (COMPLEX_EXPR, type, r, i);
633   update_stmt (stmt);
634 }
635 
636 /* Generate code at the entry point of the function to initialize the
637    component variables for a complex parameter.  */
638 
639 static void
update_parameter_components(void)640 update_parameter_components (void)
641 {
642   edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
643   tree parm;
644 
645   for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = TREE_CHAIN (parm))
646     {
647       tree type = TREE_TYPE (parm);
648       tree ssa_name, r, i;
649 
650       if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm))
651 	continue;
652 
653       type = TREE_TYPE (type);
654       ssa_name = default_def (parm);
655       if (!ssa_name)
656 	continue;
657 
658       r = build1 (REALPART_EXPR, type, ssa_name);
659       i = build1 (IMAGPART_EXPR, type, ssa_name);
660       update_complex_components_on_edge (entry_edge, ssa_name, r, i);
661     }
662 }
663 
664 /* Generate code to set the component variables of a complex variable
665    to match the PHI statements in block BB.  */
666 
667 static void
update_phi_components(basic_block bb)668 update_phi_components (basic_block bb)
669 {
670   tree phi;
671 
672   for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
673     if (is_complex_reg (PHI_RESULT (phi)))
674       {
675 	tree lr, li, pr = NULL, pi = NULL;
676 	unsigned int i, n;
677 
678 	lr = get_component_ssa_name (PHI_RESULT (phi), false);
679 	if (TREE_CODE (lr) == SSA_NAME)
680 	  {
681 	    pr = create_phi_node (lr, bb);
682 	    SSA_NAME_DEF_STMT (lr) = pr;
683 	  }
684 
685 	li = get_component_ssa_name (PHI_RESULT (phi), true);
686 	if (TREE_CODE (li) == SSA_NAME)
687 	  {
688 	    pi = create_phi_node (li, bb);
689 	    SSA_NAME_DEF_STMT (li) = pi;
690 	  }
691 
692 	for (i = 0, n = PHI_NUM_ARGS (phi); i < n; ++i)
693 	  {
694 	    tree comp, arg = PHI_ARG_DEF (phi, i);
695 	    if (pr)
696 	      {
697 		comp = extract_component (NULL, arg, false, false);
698 		SET_PHI_ARG_DEF (pr, i, comp);
699 	      }
700 	    if (pi)
701 	      {
702 		comp = extract_component (NULL, arg, true, false);
703 		SET_PHI_ARG_DEF (pi, i, comp);
704 	      }
705 	  }
706       }
707 }
708 
709 /* Mark each virtual op in STMT for ssa update.  */
710 
711 static void
update_all_vops(tree stmt)712 update_all_vops (tree stmt)
713 {
714   ssa_op_iter iter;
715   tree sym;
716 
717   FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
718     {
719       if (TREE_CODE (sym) == SSA_NAME)
720 	sym = SSA_NAME_VAR (sym);
721       mark_sym_for_renaming (sym);
722     }
723 }
724 
725 /* Expand a complex move to scalars.  */
726 
727 static void
expand_complex_move(block_stmt_iterator * bsi,tree stmt,tree type,tree lhs,tree rhs)728 expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
729 		     tree lhs, tree rhs)
730 {
731   tree inner_type = TREE_TYPE (type);
732   tree r, i;
733 
734   if (TREE_CODE (lhs) == SSA_NAME)
735     {
736       if (is_ctrl_altering_stmt (bsi_stmt (*bsi)))
737 	{
738 	  edge_iterator ei;
739 	  edge e;
740 
741 	  /* The value is not assigned on the exception edges, so we need not
742 	     concern ourselves there.  We do need to update on the fallthru
743 	     edge.  Find it.  */
744 	  FOR_EACH_EDGE (e, ei, bsi->bb->succs)
745 	    if (e->flags & EDGE_FALLTHRU)
746 	      goto found_fallthru;
747 	  gcc_unreachable ();
748 	found_fallthru:
749 
750 	  r = build1 (REALPART_EXPR, inner_type, lhs);
751 	  i = build1 (IMAGPART_EXPR, inner_type, lhs);
752 	  update_complex_components_on_edge (e, lhs, r, i);
753 	}
754       else if (TREE_CODE (rhs) == CALL_EXPR || TREE_SIDE_EFFECTS (rhs))
755 	{
756 	  r = build1 (REALPART_EXPR, inner_type, lhs);
757 	  i = build1 (IMAGPART_EXPR, inner_type, lhs);
758 	  update_complex_components (bsi, stmt, r, i);
759 	}
760       else
761 	{
762 	  update_all_vops (bsi_stmt (*bsi));
763 	  r = extract_component (bsi, rhs, 0, true);
764 	  i = extract_component (bsi, rhs, 1, true);
765 	  update_complex_assignment (bsi, r, i);
766 	}
767     }
768   else if (TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
769     {
770       tree x;
771 
772       r = extract_component (bsi, rhs, 0, false);
773       i = extract_component (bsi, rhs, 1, false);
774 
775       x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs));
776       x = build2 (MODIFY_EXPR, inner_type, x, r);
777       bsi_insert_before (bsi, x, BSI_SAME_STMT);
778 
779       if (stmt == bsi_stmt (*bsi))
780 	{
781 	  x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
782 	  TREE_OPERAND (stmt, 0) = x;
783 	  TREE_OPERAND (stmt, 1) = i;
784 	  TREE_TYPE (stmt) = inner_type;
785 	}
786       else
787 	{
788 	  x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
789 	  x = build2 (MODIFY_EXPR, inner_type, x, i);
790 	  bsi_insert_before (bsi, x, BSI_SAME_STMT);
791 
792 	  stmt = bsi_stmt (*bsi);
793 	  gcc_assert (TREE_CODE (stmt) == RETURN_EXPR);
794 	  TREE_OPERAND (stmt, 0) = lhs;
795 	}
796 
797       update_all_vops (stmt);
798       update_stmt (stmt);
799     }
800 }
801 
802 /* Expand complex addition to scalars:
803 	a + b = (ar + br) + i(ai + bi)
804 	a - b = (ar - br) + i(ai + bi)
805 */
806 
807 static void
expand_complex_addition(block_stmt_iterator * bsi,tree inner_type,tree ar,tree ai,tree br,tree bi,enum tree_code code,complex_lattice_t al,complex_lattice_t bl)808 expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
809 			 tree ar, tree ai, tree br, tree bi,
810 			 enum tree_code code,
811 			 complex_lattice_t al, complex_lattice_t bl)
812 {
813   tree rr, ri;
814 
815   switch (PAIR (al, bl))
816     {
817     case PAIR (ONLY_REAL, ONLY_REAL):
818       rr = gimplify_build2 (bsi, code, inner_type, ar, br);
819       ri = ai;
820       break;
821 
822     case PAIR (ONLY_REAL, ONLY_IMAG):
823       rr = ar;
824       if (code == MINUS_EXPR)
825 	ri = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, bi);
826       else
827 	ri = bi;
828       break;
829 
830     case PAIR (ONLY_IMAG, ONLY_REAL):
831       if (code == MINUS_EXPR)
832 	rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ar, br);
833       else
834 	rr = br;
835       ri = ai;
836       break;
837 
838     case PAIR (ONLY_IMAG, ONLY_IMAG):
839       rr = ar;
840       ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
841       break;
842 
843     case PAIR (VARYING, ONLY_REAL):
844       rr = gimplify_build2 (bsi, code, inner_type, ar, br);
845       ri = ai;
846       break;
847 
848     case PAIR (VARYING, ONLY_IMAG):
849       rr = ar;
850       ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
851       break;
852 
853     case PAIR (ONLY_REAL, VARYING):
854       if (code == MINUS_EXPR)
855 	goto general;
856       rr = gimplify_build2 (bsi, code, inner_type, ar, br);
857       ri = bi;
858       break;
859 
860     case PAIR (ONLY_IMAG, VARYING):
861       if (code == MINUS_EXPR)
862 	goto general;
863       rr = br;
864       ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
865       break;
866 
867     case PAIR (VARYING, VARYING):
868     general:
869       rr = gimplify_build2 (bsi, code, inner_type, ar, br);
870       ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
871       break;
872 
873     default:
874       gcc_unreachable ();
875     }
876 
877   update_complex_assignment (bsi, rr, ri);
878 }
879 
880 /* Expand a complex multiplication or division to a libcall to the c99
881    compliant routines.  */
882 
883 static void
expand_complex_libcall(block_stmt_iterator * bsi,tree ar,tree ai,tree br,tree bi,enum tree_code code)884 expand_complex_libcall (block_stmt_iterator *bsi, tree ar, tree ai,
885 			tree br, tree bi, enum tree_code code)
886 {
887   enum machine_mode mode;
888   enum built_in_function bcode;
889   tree args, fn, stmt, type;
890 
891   args = tree_cons (NULL, bi, NULL);
892   args = tree_cons (NULL, br, args);
893   args = tree_cons (NULL, ai, args);
894   args = tree_cons (NULL, ar, args);
895 
896   stmt = bsi_stmt (*bsi);
897   type = TREE_TYPE (TREE_OPERAND (stmt, 1));
898 
899   mode = TYPE_MODE (type);
900   gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT);
901   if (code == MULT_EXPR)
902     bcode = BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT;
903   else if (code == RDIV_EXPR)
904     bcode = BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT;
905   else
906     gcc_unreachable ();
907   fn = built_in_decls[bcode];
908 
909   TREE_OPERAND (stmt, 1)
910     = build3 (CALL_EXPR, type, build_fold_addr_expr (fn), args, NULL);
911   update_stmt (stmt);
912 
913   if (in_ssa_p)
914     {
915       tree lhs = TREE_OPERAND (stmt, 0);
916       type = TREE_TYPE (type);
917       update_complex_components (bsi, stmt,
918 				 build1 (REALPART_EXPR, type, lhs),
919 				 build1 (IMAGPART_EXPR, type, lhs));
920     }
921 }
922 
923 /* Expand complex multiplication to scalars:
924 	a * b = (ar*br - ai*bi) + i(ar*bi + br*ai)
925 */
926 
927 static void
expand_complex_multiplication(block_stmt_iterator * bsi,tree inner_type,tree ar,tree ai,tree br,tree bi,complex_lattice_t al,complex_lattice_t bl)928 expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
929 			       tree ar, tree ai, tree br, tree bi,
930 			       complex_lattice_t al, complex_lattice_t bl)
931 {
932   tree rr, ri;
933 
934   if (al < bl)
935     {
936       complex_lattice_t tl;
937       rr = ar, ar = br, br = rr;
938       ri = ai, ai = bi, bi = ri;
939       tl = al, al = bl, bl = tl;
940     }
941 
942   switch (PAIR (al, bl))
943     {
944     case PAIR (ONLY_REAL, ONLY_REAL):
945       rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
946       ri = ai;
947       break;
948 
949     case PAIR (ONLY_IMAG, ONLY_REAL):
950       rr = ar;
951       if (TREE_CODE (ai) == REAL_CST
952 	  && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst1))
953 	ri = br;
954       else
955 	ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
956       break;
957 
958     case PAIR (ONLY_IMAG, ONLY_IMAG):
959       rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
960       rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, rr);
961       ri = ar;
962       break;
963 
964     case PAIR (VARYING, ONLY_REAL):
965       rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
966       ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
967       break;
968 
969     case PAIR (VARYING, ONLY_IMAG):
970       rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
971       rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, rr);
972       ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
973       break;
974 
975     case PAIR (VARYING, VARYING):
976       if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type))
977 	{
978 	  expand_complex_libcall (bsi, ar, ai, br, bi, MULT_EXPR);
979 	  return;
980 	}
981       else
982 	{
983 	  tree t1, t2, t3, t4;
984 
985 	  t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
986 	  t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
987 	  t3 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
988 
989 	  /* Avoid expanding redundant multiplication for the common
990 	     case of squaring a complex number.  */
991 	  if (ar == br && ai == bi)
992 	    t4 = t3;
993 	  else
994 	    t4 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
995 
996 	  rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, t2);
997 	  ri = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t3, t4);
998 	}
999       break;
1000 
1001     default:
1002       gcc_unreachable ();
1003     }
1004 
1005   update_complex_assignment (bsi, rr, ri);
1006 }
1007 
1008 /* Expand complex division to scalars, straightforward algorithm.
1009 	a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1010 	    t = br*br + bi*bi
1011 */
1012 
1013 static void
expand_complex_div_straight(block_stmt_iterator * bsi,tree inner_type,tree ar,tree ai,tree br,tree bi,enum tree_code code)1014 expand_complex_div_straight (block_stmt_iterator *bsi, tree inner_type,
1015 			     tree ar, tree ai, tree br, tree bi,
1016 			     enum tree_code code)
1017 {
1018   tree rr, ri, div, t1, t2, t3;
1019 
1020   t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, br, br);
1021   t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, bi, bi);
1022   div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, t2);
1023 
1024   t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
1025   t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
1026   t3 = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, t2);
1027   rr = gimplify_build2 (bsi, code, inner_type, t3, div);
1028 
1029   t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
1030   t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
1031   t3 = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, t2);
1032   ri = gimplify_build2 (bsi, code, inner_type, t3, div);
1033 
1034   update_complex_assignment (bsi, rr, ri);
1035 }
1036 
1037 /* Expand complex division to scalars, modified algorithm to minimize
1038    overflow with wide input ranges.  */
1039 
1040 static void
expand_complex_div_wide(block_stmt_iterator * bsi,tree inner_type,tree ar,tree ai,tree br,tree bi,enum tree_code code)1041 expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
1042 			 tree ar, tree ai, tree br, tree bi,
1043 			 enum tree_code code)
1044 {
1045   tree rr, ri, ratio, div, t1, t2, tr, ti, cond;
1046   basic_block bb_cond, bb_true, bb_false, bb_join;
1047 
1048   /* Examine |br| < |bi|, and branch.  */
1049   t1 = gimplify_build1 (bsi, ABS_EXPR, inner_type, br);
1050   t2 = gimplify_build1 (bsi, ABS_EXPR, inner_type, bi);
1051   cond = fold_build2 (LT_EXPR, boolean_type_node, t1, t2);
1052   STRIP_NOPS (cond);
1053 
1054   bb_cond = bb_true = bb_false = bb_join = NULL;
1055   rr = ri = tr = ti = NULL;
1056   if (!TREE_CONSTANT (cond))
1057     {
1058       edge e;
1059 
1060       cond = build3 (COND_EXPR, void_type_node, cond, NULL_TREE, NULL_TREE);
1061       bsi_insert_before (bsi, cond, BSI_SAME_STMT);
1062 
1063       /* Split the original block, and create the TRUE and FALSE blocks.  */
1064       e = split_block (bsi->bb, cond);
1065       bb_cond = e->src;
1066       bb_join = e->dest;
1067       bb_true = create_empty_bb (bb_cond);
1068       bb_false = create_empty_bb (bb_true);
1069 
1070       t1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (bb_true));
1071       t2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (bb_false));
1072       COND_EXPR_THEN (cond) = t1;
1073       COND_EXPR_ELSE (cond) = t2;
1074 
1075       /* Wire the blocks together.  */
1076       e->flags = EDGE_TRUE_VALUE;
1077       redirect_edge_succ (e, bb_true);
1078       make_edge (bb_cond, bb_false, EDGE_FALSE_VALUE);
1079       make_edge (bb_true, bb_join, EDGE_FALLTHRU);
1080       make_edge (bb_false, bb_join, EDGE_FALLTHRU);
1081 
1082       /* Update dominance info.  Note that bb_join's data was
1083          updated by split_block.  */
1084       if (dom_info_available_p (CDI_DOMINATORS))
1085         {
1086           set_immediate_dominator (CDI_DOMINATORS, bb_true, bb_cond);
1087           set_immediate_dominator (CDI_DOMINATORS, bb_false, bb_cond);
1088         }
1089 
1090       rr = make_rename_temp (inner_type, NULL);
1091       ri = make_rename_temp (inner_type, NULL);
1092     }
1093 
1094   /* In the TRUE branch, we compute
1095       ratio = br/bi;
1096       div = (br * ratio) + bi;
1097       tr = (ar * ratio) + ai;
1098       ti = (ai * ratio) - ar;
1099       tr = tr / div;
1100       ti = ti / div;  */
1101   if (bb_true || integer_nonzerop (cond))
1102     {
1103       if (bb_true)
1104 	{
1105 	  *bsi = bsi_last (bb_true);
1106 	  bsi_insert_after (bsi, build_empty_stmt (), BSI_NEW_STMT);
1107 	}
1108 
1109       ratio = gimplify_build2 (bsi, code, inner_type, br, bi);
1110 
1111       t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, br, ratio);
1112       div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, bi);
1113 
1114       t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, ratio);
1115       tr = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, ai);
1116 
1117       t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, ratio);
1118       ti = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, ar);
1119 
1120       tr = gimplify_build2 (bsi, code, inner_type, tr, div);
1121       ti = gimplify_build2 (bsi, code, inner_type, ti, div);
1122 
1123      if (bb_true)
1124        {
1125 	 t1 = build2 (MODIFY_EXPR, inner_type, rr, tr);
1126 	 bsi_insert_before (bsi, t1, BSI_SAME_STMT);
1127 	 t1 = build2 (MODIFY_EXPR, inner_type, ri, ti);
1128 	 bsi_insert_before (bsi, t1, BSI_SAME_STMT);
1129 	 bsi_remove (bsi, true);
1130        }
1131     }
1132 
1133   /* In the FALSE branch, we compute
1134       ratio = d/c;
1135       divisor = (d * ratio) + c;
1136       tr = (b * ratio) + a;
1137       ti = b - (a * ratio);
1138       tr = tr / div;
1139       ti = ti / div;  */
1140   if (bb_false || integer_zerop (cond))
1141     {
1142       if (bb_false)
1143 	{
1144 	  *bsi = bsi_last (bb_false);
1145 	  bsi_insert_after (bsi, build_empty_stmt (), BSI_NEW_STMT);
1146 	}
1147 
1148       ratio = gimplify_build2 (bsi, code, inner_type, bi, br);
1149 
1150       t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, bi, ratio);
1151       div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, br);
1152 
1153       t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, ratio);
1154       tr = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, ar);
1155 
1156       t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, ratio);
1157       ti = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, t1);
1158 
1159       tr = gimplify_build2 (bsi, code, inner_type, tr, div);
1160       ti = gimplify_build2 (bsi, code, inner_type, ti, div);
1161 
1162      if (bb_false)
1163        {
1164 	 t1 = build2 (MODIFY_EXPR, inner_type, rr, tr);
1165 	 bsi_insert_before (bsi, t1, BSI_SAME_STMT);
1166 	 t1 = build2 (MODIFY_EXPR, inner_type, ri, ti);
1167 	 bsi_insert_before (bsi, t1, BSI_SAME_STMT);
1168 	 bsi_remove (bsi, true);
1169        }
1170     }
1171 
1172   if (bb_join)
1173     *bsi = bsi_start (bb_join);
1174   else
1175     rr = tr, ri = ti;
1176 
1177   update_complex_assignment (bsi, rr, ri);
1178 }
1179 
1180 /* Expand complex division to scalars.  */
1181 
1182 static void
expand_complex_division(block_stmt_iterator * bsi,tree inner_type,tree ar,tree ai,tree br,tree bi,enum tree_code code,complex_lattice_t al,complex_lattice_t bl)1183 expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
1184 			 tree ar, tree ai, tree br, tree bi,
1185 			 enum tree_code code,
1186 			 complex_lattice_t al, complex_lattice_t bl)
1187 {
1188   tree rr, ri;
1189 
1190   switch (PAIR (al, bl))
1191     {
1192     case PAIR (ONLY_REAL, ONLY_REAL):
1193       rr = gimplify_build2 (bsi, code, inner_type, ar, br);
1194       ri = ai;
1195       break;
1196 
1197     case PAIR (ONLY_REAL, ONLY_IMAG):
1198       rr = ai;
1199       ri = gimplify_build2 (bsi, code, inner_type, ar, bi);
1200       ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ri);
1201       break;
1202 
1203     case PAIR (ONLY_IMAG, ONLY_REAL):
1204       rr = ar;
1205       ri = gimplify_build2 (bsi, code, inner_type, ai, br);
1206       break;
1207 
1208     case PAIR (ONLY_IMAG, ONLY_IMAG):
1209       rr = gimplify_build2 (bsi, code, inner_type, ai, bi);
1210       ri = ar;
1211       break;
1212 
1213     case PAIR (VARYING, ONLY_REAL):
1214       rr = gimplify_build2 (bsi, code, inner_type, ar, br);
1215       ri = gimplify_build2 (bsi, code, inner_type, ai, br);
1216       break;
1217 
1218     case PAIR (VARYING, ONLY_IMAG):
1219       rr = gimplify_build2 (bsi, code, inner_type, ai, bi);
1220       ri = gimplify_build2 (bsi, code, inner_type, ar, bi);
1221       ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ri);
1222 
1223     case PAIR (ONLY_REAL, VARYING):
1224     case PAIR (ONLY_IMAG, VARYING):
1225     case PAIR (VARYING, VARYING):
1226       switch (flag_complex_method)
1227 	{
1228 	case 0:
1229 	  /* straightforward implementation of complex divide acceptable.  */
1230 	  expand_complex_div_straight (bsi, inner_type, ar, ai, br, bi, code);
1231 	  break;
1232 
1233 	case 2:
1234 	  if (SCALAR_FLOAT_TYPE_P (inner_type))
1235 	    {
1236 	      expand_complex_libcall (bsi, ar, ai, br, bi, code);
1237 	      break;
1238 	    }
1239 	  /* FALLTHRU */
1240 
1241 	case 1:
1242 	  /* wide ranges of inputs must work for complex divide.  */
1243 	  expand_complex_div_wide (bsi, inner_type, ar, ai, br, bi, code);
1244 	  break;
1245 
1246 	default:
1247 	  gcc_unreachable ();
1248 	}
1249       return;
1250 
1251     default:
1252       gcc_unreachable ();
1253     }
1254 
1255   update_complex_assignment (bsi, rr, ri);
1256 }
1257 
1258 /* Expand complex negation to scalars:
1259 	-a = (-ar) + i(-ai)
1260 */
1261 
1262 static void
expand_complex_negation(block_stmt_iterator * bsi,tree inner_type,tree ar,tree ai)1263 expand_complex_negation (block_stmt_iterator *bsi, tree inner_type,
1264 			 tree ar, tree ai)
1265 {
1266   tree rr, ri;
1267 
1268   rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ar);
1269   ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ai);
1270 
1271   update_complex_assignment (bsi, rr, ri);
1272 }
1273 
1274 /* Expand complex conjugate to scalars:
1275 	~a = (ar) + i(-ai)
1276 */
1277 
1278 static void
expand_complex_conjugate(block_stmt_iterator * bsi,tree inner_type,tree ar,tree ai)1279 expand_complex_conjugate (block_stmt_iterator *bsi, tree inner_type,
1280 			  tree ar, tree ai)
1281 {
1282   tree ri;
1283 
1284   ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ai);
1285 
1286   update_complex_assignment (bsi, ar, ri);
1287 }
1288 
1289 /* Expand complex comparison (EQ or NE only).  */
1290 
1291 static void
expand_complex_comparison(block_stmt_iterator * bsi,tree ar,tree ai,tree br,tree bi,enum tree_code code)1292 expand_complex_comparison (block_stmt_iterator *bsi, tree ar, tree ai,
1293 			   tree br, tree bi, enum tree_code code)
1294 {
1295   tree cr, ci, cc, stmt, expr, type;
1296 
1297   cr = gimplify_build2 (bsi, code, boolean_type_node, ar, br);
1298   ci = gimplify_build2 (bsi, code, boolean_type_node, ai, bi);
1299   cc = gimplify_build2 (bsi,
1300 			(code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR),
1301 			boolean_type_node, cr, ci);
1302 
1303   stmt = expr = bsi_stmt (*bsi);
1304 
1305   switch (TREE_CODE (stmt))
1306     {
1307     case RETURN_EXPR:
1308       expr = TREE_OPERAND (stmt, 0);
1309       /* FALLTHRU */
1310     case MODIFY_EXPR:
1311       type = TREE_TYPE (TREE_OPERAND (expr, 1));
1312       TREE_OPERAND (expr, 1) = fold_convert (type, cc);
1313       break;
1314     case COND_EXPR:
1315       TREE_OPERAND (stmt, 0) = cc;
1316       break;
1317     default:
1318       gcc_unreachable ();
1319     }
1320 
1321   update_stmt (stmt);
1322 }
1323 
1324 /* Process one statement.  If we identify a complex operation, expand it.  */
1325 
1326 static void
expand_complex_operations_1(block_stmt_iterator * bsi)1327 expand_complex_operations_1 (block_stmt_iterator *bsi)
1328 {
1329   tree stmt = bsi_stmt (*bsi);
1330   tree rhs, type, inner_type;
1331   tree ac, ar, ai, bc, br, bi;
1332   complex_lattice_t al, bl;
1333   enum tree_code code;
1334 
1335   switch (TREE_CODE (stmt))
1336     {
1337     case RETURN_EXPR:
1338       stmt = TREE_OPERAND (stmt, 0);
1339       if (!stmt)
1340 	return;
1341       if (TREE_CODE (stmt) != MODIFY_EXPR)
1342 	return;
1343       /* FALLTHRU */
1344 
1345     case MODIFY_EXPR:
1346       rhs = TREE_OPERAND (stmt, 1);
1347       break;
1348 
1349     case COND_EXPR:
1350       rhs = TREE_OPERAND (stmt, 0);
1351       break;
1352 
1353     default:
1354       return;
1355     }
1356 
1357   type = TREE_TYPE (rhs);
1358   code = TREE_CODE (rhs);
1359 
1360   /* Initial filter for operations we handle.  */
1361   switch (code)
1362     {
1363     case PLUS_EXPR:
1364     case MINUS_EXPR:
1365     case MULT_EXPR:
1366     case TRUNC_DIV_EXPR:
1367     case CEIL_DIV_EXPR:
1368     case FLOOR_DIV_EXPR:
1369     case ROUND_DIV_EXPR:
1370     case RDIV_EXPR:
1371     case NEGATE_EXPR:
1372     case CONJ_EXPR:
1373       if (TREE_CODE (type) != COMPLEX_TYPE)
1374 	return;
1375       inner_type = TREE_TYPE (type);
1376       break;
1377 
1378     case EQ_EXPR:
1379     case NE_EXPR:
1380       inner_type = TREE_TYPE (TREE_OPERAND (rhs, 1));
1381       if (TREE_CODE (inner_type) != COMPLEX_TYPE)
1382 	return;
1383       break;
1384 
1385     default:
1386       {
1387 	tree lhs = TREE_OPERAND (stmt, 0);
1388 	tree rhs = TREE_OPERAND (stmt, 1);
1389 
1390 	if (TREE_CODE (type) == COMPLEX_TYPE)
1391 	  expand_complex_move (bsi, stmt, type, lhs, rhs);
1392 	else if ((TREE_CODE (rhs) == REALPART_EXPR
1393 		  || TREE_CODE (rhs) == IMAGPART_EXPR)
1394 		 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1395 	  {
1396 	    TREE_OPERAND (stmt, 1)
1397 	      = extract_component (bsi, TREE_OPERAND (rhs, 0),
1398 				   TREE_CODE (rhs) == IMAGPART_EXPR, false);
1399 	    update_stmt (stmt);
1400 	  }
1401       }
1402       return;
1403     }
1404 
1405   /* Extract the components of the two complex values.  Make sure and
1406      handle the common case of the same value used twice specially.  */
1407   ac = TREE_OPERAND (rhs, 0);
1408   ar = extract_component (bsi, ac, 0, true);
1409   ai = extract_component (bsi, ac, 1, true);
1410 
1411   if (TREE_CODE_CLASS (code) == tcc_unary)
1412     bc = br = bi = NULL;
1413   else
1414     {
1415       bc = TREE_OPERAND (rhs, 1);
1416       if (ac == bc)
1417 	br = ar, bi = ai;
1418       else
1419 	{
1420 	  br = extract_component (bsi, bc, 0, true);
1421 	  bi = extract_component (bsi, bc, 1, true);
1422 	}
1423     }
1424 
1425   if (in_ssa_p)
1426     {
1427       al = find_lattice_value (ac);
1428       if (al == UNINITIALIZED)
1429 	al = VARYING;
1430 
1431       if (TREE_CODE_CLASS (code) == tcc_unary)
1432 	bl = UNINITIALIZED;
1433       else if (ac == bc)
1434 	bl = al;
1435       else
1436 	{
1437 	  bl = find_lattice_value (bc);
1438 	  if (bl == UNINITIALIZED)
1439 	    bl = VARYING;
1440 	}
1441     }
1442   else
1443     al = bl = VARYING;
1444 
1445   switch (code)
1446     {
1447     case PLUS_EXPR:
1448     case MINUS_EXPR:
1449       expand_complex_addition (bsi, inner_type, ar, ai, br, bi, code, al, bl);
1450       break;
1451 
1452     case MULT_EXPR:
1453       expand_complex_multiplication (bsi, inner_type, ar, ai, br, bi, al, bl);
1454       break;
1455 
1456     case TRUNC_DIV_EXPR:
1457     case CEIL_DIV_EXPR:
1458     case FLOOR_DIV_EXPR:
1459     case ROUND_DIV_EXPR:
1460     case RDIV_EXPR:
1461       expand_complex_division (bsi, inner_type, ar, ai, br, bi, code, al, bl);
1462       break;
1463 
1464     case NEGATE_EXPR:
1465       expand_complex_negation (bsi, inner_type, ar, ai);
1466       break;
1467 
1468     case CONJ_EXPR:
1469       expand_complex_conjugate (bsi, inner_type, ar, ai);
1470       break;
1471 
1472     case EQ_EXPR:
1473     case NE_EXPR:
1474       expand_complex_comparison (bsi, ar, ai, br, bi, code);
1475       break;
1476 
1477     default:
1478       gcc_unreachable ();
1479     }
1480 }
1481 
1482 
1483 /* Entry point for complex operation lowering during optimization.  */
1484 
1485 static unsigned int
tree_lower_complex(void)1486 tree_lower_complex (void)
1487 {
1488   int old_last_basic_block;
1489   block_stmt_iterator bsi;
1490   basic_block bb;
1491 
1492   if (!init_dont_simulate_again ())
1493     return 0;
1494 
1495   complex_lattice_values = VEC_alloc (complex_lattice_t, heap, num_ssa_names);
1496   VEC_safe_grow (complex_lattice_t, heap,
1497 		 complex_lattice_values, num_ssa_names);
1498   memset (VEC_address (complex_lattice_t, complex_lattice_values), 0,
1499 	  num_ssa_names * sizeof(complex_lattice_t));
1500 
1501   init_parameter_lattice_values ();
1502   ssa_propagate (complex_visit_stmt, complex_visit_phi);
1503 
1504   complex_variable_components = htab_create (10,  int_tree_map_hash,
1505 					     int_tree_map_eq, free);
1506 
1507   complex_ssa_name_components = VEC_alloc (tree, heap, 2*num_ssa_names);
1508   VEC_safe_grow (tree, heap, complex_ssa_name_components, 2*num_ssa_names);
1509   memset (VEC_address (tree, complex_ssa_name_components), 0,
1510 	  2 * num_ssa_names * sizeof(tree));
1511 
1512   update_parameter_components ();
1513 
1514   /* ??? Ideally we'd traverse the blocks in breadth-first order.  */
1515   old_last_basic_block = last_basic_block;
1516   FOR_EACH_BB (bb)
1517     {
1518       if (bb->index >= old_last_basic_block)
1519 	continue;
1520       update_phi_components (bb);
1521       for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1522 	expand_complex_operations_1 (&bsi);
1523     }
1524 
1525   bsi_commit_edge_inserts ();
1526 
1527   htab_delete (complex_variable_components);
1528   VEC_free (tree, heap, complex_ssa_name_components);
1529   VEC_free (complex_lattice_t, heap, complex_lattice_values);
1530   return 0;
1531 }
1532 
1533 struct tree_opt_pass pass_lower_complex =
1534 {
1535   "cplxlower",				/* name */
1536   0,					/* gate */
1537   tree_lower_complex,			/* execute */
1538   NULL,					/* sub */
1539   NULL,					/* next */
1540   0,					/* static_pass_number */
1541   0,					/* tv_id */
1542   PROP_ssa,				/* properties_required */
1543   0,					/* properties_provided */
1544   PROP_smt_usage,                       /* properties_destroyed */
1545   0,					/* todo_flags_start */
1546   TODO_dump_func | TODO_ggc_collect
1547   | TODO_update_smt_usage
1548   | TODO_update_ssa
1549   | TODO_verify_stmts,		        /* todo_flags_finish */
1550   0					/* letter */
1551 };
1552 
1553 
1554 /* Entry point for complex operation lowering without optimization.  */
1555 
1556 static unsigned int
tree_lower_complex_O0(void)1557 tree_lower_complex_O0 (void)
1558 {
1559   int old_last_basic_block = last_basic_block;
1560   block_stmt_iterator bsi;
1561   basic_block bb;
1562 
1563   FOR_EACH_BB (bb)
1564     {
1565       if (bb->index >= old_last_basic_block)
1566 	continue;
1567       for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1568 	expand_complex_operations_1 (&bsi);
1569     }
1570   return 0;
1571 }
1572 
1573 static bool
gate_no_optimization(void)1574 gate_no_optimization (void)
1575 {
1576   /* With errors, normal optimization passes are not run.  If we don't
1577      lower complex operations at all, rtl expansion will abort.  */
1578   return optimize == 0 || sorrycount || errorcount;
1579 }
1580 
1581 struct tree_opt_pass pass_lower_complex_O0 =
1582 {
1583   "cplxlower0",				/* name */
1584   gate_no_optimization,			/* gate */
1585   tree_lower_complex_O0,		/* execute */
1586   NULL,					/* sub */
1587   NULL,					/* next */
1588   0,					/* static_pass_number */
1589   0,					/* tv_id */
1590   PROP_cfg,				/* properties_required */
1591   0,					/* properties_provided */
1592   0,					/* properties_destroyed */
1593   0,					/* todo_flags_start */
1594   TODO_dump_func | TODO_ggc_collect
1595     | TODO_verify_stmts,		/* todo_flags_finish */
1596   0					/* letter */
1597 };
1598