xref: /netbsd-src/external/gpl3/gcc/dist/gcc/gimple-match-head.cc (revision 0a3071956a3a9fdebdbf7f338cf2d439b45fc728)
1 /* Preamble and helpers for the autogenerated gimple-match.cc file.
2    Copyright (C) 2014-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
35 #include "calls.h"
36 #include "tree-dfa.h"
37 #include "builtins.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
42 #include "gimplify.h"
43 #include "optabs-tree.h"
44 #include "tree-eh.h"
45 #include "dbgcnt.h"
46 #include "tm.h"
47 #include "gimple-range.h"
48 #include "attribs.h"
49 #include "asan.h"
50 
51 /* Forward declarations of the private auto-generated matchers.
52    They expect valueized operands in canonical order and do not
53    perform simplification of all-constant operands.  */
54 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
55 			     code_helper, tree, tree);
56 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
57 			     code_helper, tree, tree, tree);
58 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
59 			     code_helper, tree, tree, tree, tree);
60 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
61 			     code_helper, tree, tree, tree, tree, tree);
62 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
63 			     code_helper, tree, tree, tree, tree, tree, tree);
64 static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *,
65 				tree (*)(tree));
66 static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *,
67 				tree (*)(tree));
68 static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *,
69 				tree (*)(tree));
70 static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *,
71 				tree (*)(tree));
72 static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *,
73 				tree (*)(tree));
74 
75 const unsigned int gimple_match_op::MAX_NUM_OPS;
76 
77 /* Return whether T is a constant that we'll dispatch to fold to
78    evaluate fully constant expressions.  */
79 
80 static inline bool
constant_for_folding(tree t)81 constant_for_folding (tree t)
82 {
83   return (CONSTANT_CLASS_P (t)
84 	  /* The following is only interesting to string builtins.  */
85 	  || (TREE_CODE (t) == ADDR_EXPR
86 	      && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
87 }
88 
89 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
90    operation.  Return true on success, storing the new operation in NEW_OP.  */
91 
92 static bool
convert_conditional_op(gimple_match_op * orig_op,gimple_match_op * new_op)93 convert_conditional_op (gimple_match_op *orig_op,
94 			gimple_match_op *new_op)
95 {
96   internal_fn ifn;
97   if (orig_op->code.is_tree_code ())
98     ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
99   else
100     {
101       auto cfn = combined_fn (orig_op->code);
102       if (!internal_fn_p (cfn))
103 	return false;
104       ifn = get_conditional_internal_fn (as_internal_fn (cfn));
105     }
106   if (ifn == IFN_LAST)
107     return false;
108   unsigned int num_ops = orig_op->num_ops;
109   new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
110   new_op->ops[0] = orig_op->cond.cond;
111   for (unsigned int i = 0; i < num_ops; ++i)
112     new_op->ops[i + 1] = orig_op->ops[i];
113   tree else_value = orig_op->cond.else_value;
114   if (!else_value)
115     else_value = targetm.preferred_else_value (ifn, orig_op->type,
116 					       num_ops, orig_op->ops);
117   new_op->ops[num_ops + 1] = else_value;
118   return true;
119 }
120 
121 /* RES_OP is the result of a simplification.  If it is conditional,
122    try to replace it with the equivalent UNCOND form, such as an
123    IFN_COND_* call or a VEC_COND_EXPR.  Also try to resimplify the
124    result of the replacement if appropriate, adding any new statements to
125    SEQ and using VALUEIZE as the valueization function.  Return true if
126    this resimplification occurred and resulted in at least one change.  */
127 
128 static bool
maybe_resimplify_conditional_op(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))129 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
130 				 tree (*valueize) (tree))
131 {
132   if (!res_op->cond.cond)
133     return false;
134 
135   if (!res_op->cond.else_value
136       && res_op->code.is_tree_code ())
137     {
138       /* The "else" value doesn't matter.  If the "then" value is a
139 	 gimple value, just use it unconditionally.  This isn't a
140 	 simplification in itself, since there was no operation to
141 	 build in the first place.  */
142       if (gimple_simplified_result_is_gimple_val (res_op))
143 	{
144 	  res_op->cond.cond = NULL_TREE;
145 	  return false;
146 	}
147 
148       /* Likewise if the operation would not trap.  */
149       bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
150 			  && TYPE_OVERFLOW_TRAPS (res_op->type));
151       tree_code op_code = (tree_code) res_op->code;
152       bool op_could_trap;
153 
154       /* COND_EXPR will trap if, and only if, the condition
155 	 traps and hence we have to check this.  For all other operations, we
156 	 don't need to consider the operands.  */
157       if (op_code == COND_EXPR)
158 	op_could_trap = generic_expr_could_trap_p (res_op->ops[0]);
159       else
160 	op_could_trap = operation_could_trap_p ((tree_code) res_op->code,
161 						FLOAT_TYPE_P (res_op->type),
162 						honor_trapv,
163 						res_op->op_or_null (1));
164 
165       if (!op_could_trap)
166 	{
167 	  res_op->cond.cond = NULL_TREE;
168 	  return false;
169 	}
170     }
171 
172   /* If the "then" value is a gimple value and the "else" value matters,
173      create a VEC_COND_EXPR between them, then see if it can be further
174      simplified.  */
175   gimple_match_op new_op;
176   if (res_op->cond.else_value
177       && VECTOR_TYPE_P (res_op->type)
178       && gimple_simplified_result_is_gimple_val (res_op))
179     {
180       new_op.set_op (VEC_COND_EXPR, res_op->type,
181 		     res_op->cond.cond, res_op->ops[0],
182 		     res_op->cond.else_value);
183       *res_op = new_op;
184       return gimple_resimplify3 (seq, res_op, valueize);
185     }
186 
187   /* Otherwise try rewriting the operation as an IFN_COND_* call.
188      Again, this isn't a simplification in itself, since it's what
189      RES_OP already described.  */
190   if (convert_conditional_op (res_op, &new_op))
191     *res_op = new_op;
192 
193   return false;
194 }
195 
196 /* Helper that matches and simplifies the toplevel result from
197    a gimple_simplify run (where we don't want to build
198    a stmt in case it's used in in-place folding).  Replaces
199    RES_OP with a simplified and/or canonicalized result and
200    returns whether any change was made.  */
201 
202 static bool
gimple_resimplify1(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))203 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
204 		    tree (*valueize)(tree))
205 {
206   if (constant_for_folding (res_op->ops[0]))
207     {
208       tree tem = NULL_TREE;
209       if (res_op->code.is_tree_code ())
210 	{
211 	  auto code = tree_code (res_op->code);
212 	  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
213 	      && TREE_CODE_LENGTH (code) == 1)
214 	    tem = const_unop (code, res_op->type, res_op->ops[0]);
215 	}
216       else
217 	tem = fold_const_call (combined_fn (res_op->code), res_op->type,
218 			       res_op->ops[0]);
219       if (tem != NULL_TREE
220 	  && CONSTANT_CLASS_P (tem))
221 	{
222 	  if (TREE_OVERFLOW_P (tem))
223 	    tem = drop_tree_overflow (tem);
224 	  res_op->set_value (tem);
225 	  maybe_resimplify_conditional_op (seq, res_op, valueize);
226 	  return true;
227 	}
228     }
229 
230   /* Limit recursion, there are cases like PR80887 and others, for
231      example when value-numbering presents us with unfolded expressions
232      that we are really not prepared to handle without eventual
233      oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
234      itself as available expression.  */
235   static unsigned depth;
236   if (depth > 10)
237     {
238       if (dump_file && (dump_flags & TDF_FOLDING))
239 	fprintf (dump_file, "Aborting expression simplification due to "
240 		 "deep recursion\n");
241       return false;
242     }
243 
244   ++depth;
245   gimple_match_op res_op2 (*res_op);
246   if (gimple_simplify (&res_op2, seq, valueize,
247 		       res_op->code, res_op->type, res_op->ops[0]))
248     {
249       --depth;
250       *res_op = res_op2;
251       return true;
252     }
253   --depth;
254 
255   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
256     return true;
257 
258   return false;
259 }
260 
261 /* Helper that matches and simplifies the toplevel result from
262    a gimple_simplify run (where we don't want to build
263    a stmt in case it's used in in-place folding).  Replaces
264    RES_OP with a simplified and/or canonicalized result and
265    returns whether any change was made.  */
266 
267 static bool
gimple_resimplify2(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))268 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
269 		    tree (*valueize)(tree))
270 {
271   if (constant_for_folding (res_op->ops[0])
272       && constant_for_folding (res_op->ops[1]))
273     {
274       tree tem = NULL_TREE;
275       if (res_op->code.is_tree_code ())
276 	{
277 	  auto code = tree_code (res_op->code);
278 	  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
279 	      && TREE_CODE_LENGTH (code) == 2)
280 	    tem = const_binop (code, res_op->type,
281 			       res_op->ops[0], res_op->ops[1]);
282 	}
283       else
284 	tem = fold_const_call (combined_fn (res_op->code), res_op->type,
285 			       res_op->ops[0], res_op->ops[1]);
286       if (tem != NULL_TREE
287 	  && CONSTANT_CLASS_P (tem))
288 	{
289 	  if (TREE_OVERFLOW_P (tem))
290 	    tem = drop_tree_overflow (tem);
291 	  res_op->set_value (tem);
292 	  maybe_resimplify_conditional_op (seq, res_op, valueize);
293 	  return true;
294 	}
295     }
296 
297   /* Canonicalize operand order.  */
298   bool canonicalized = false;
299   bool is_comparison
300     = (res_op->code.is_tree_code ()
301        && TREE_CODE_CLASS (tree_code (res_op->code)) == tcc_comparison);
302   if ((is_comparison || commutative_binary_op_p (res_op->code, res_op->type))
303       && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
304     {
305       std::swap (res_op->ops[0], res_op->ops[1]);
306       if (is_comparison)
307 	res_op->code = swap_tree_comparison (tree_code (res_op->code));
308       canonicalized = true;
309     }
310 
311   /* Limit recursion, see gimple_resimplify1.  */
312   static unsigned depth;
313   if (depth > 10)
314     {
315       if (dump_file && (dump_flags & TDF_FOLDING))
316 	fprintf (dump_file, "Aborting expression simplification due to "
317 		 "deep recursion\n");
318       return false;
319     }
320 
321   ++depth;
322   gimple_match_op res_op2 (*res_op);
323   if (gimple_simplify (&res_op2, seq, valueize,
324 		       res_op->code, res_op->type,
325 		       res_op->ops[0], res_op->ops[1]))
326     {
327       --depth;
328       *res_op = res_op2;
329       return true;
330     }
331   --depth;
332 
333   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
334     return true;
335 
336   return canonicalized;
337 }
338 
339 /* Helper that matches and simplifies the toplevel result from
340    a gimple_simplify run (where we don't want to build
341    a stmt in case it's used in in-place folding).  Replaces
342    RES_OP with a simplified and/or canonicalized result and
343    returns whether any change was made.  */
344 
345 static bool
gimple_resimplify3(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))346 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
347 		    tree (*valueize)(tree))
348 {
349   if (constant_for_folding (res_op->ops[0])
350       && constant_for_folding (res_op->ops[1])
351       && constant_for_folding (res_op->ops[2]))
352     {
353       tree tem = NULL_TREE;
354       if (res_op->code.is_tree_code ())
355 	{
356 	  auto code = tree_code (res_op->code);
357 	  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
358 	      && TREE_CODE_LENGTH (code) == 3)
359 	    tem = fold_ternary/*_to_constant*/ (code, res_op->type,
360 						res_op->ops[0], res_op->ops[1],
361 						res_op->ops[2]);
362 	}
363       else
364 	tem = fold_const_call (combined_fn (res_op->code), res_op->type,
365 			       res_op->ops[0], res_op->ops[1], res_op->ops[2]);
366       if (tem != NULL_TREE
367 	  && CONSTANT_CLASS_P (tem))
368 	{
369 	  if (TREE_OVERFLOW_P (tem))
370 	    tem = drop_tree_overflow (tem);
371 	  res_op->set_value (tem);
372 	  maybe_resimplify_conditional_op (seq, res_op, valueize);
373 	  return true;
374 	}
375     }
376 
377   /* Canonicalize operand order.  */
378   bool canonicalized = false;
379   int argno = first_commutative_argument (res_op->code, res_op->type);
380   if (argno >= 0
381       && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
382     {
383       std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
384       canonicalized = true;
385     }
386 
387   /* Limit recursion, see gimple_resimplify1.  */
388   static unsigned depth;
389   if (depth > 10)
390     {
391       if (dump_file && (dump_flags & TDF_FOLDING))
392 	fprintf (dump_file, "Aborting expression simplification due to "
393 		 "deep recursion\n");
394       return false;
395     }
396 
397   ++depth;
398   gimple_match_op res_op2 (*res_op);
399   if (gimple_simplify (&res_op2, seq, valueize,
400 		       res_op->code, res_op->type,
401 		       res_op->ops[0], res_op->ops[1], res_op->ops[2]))
402     {
403       --depth;
404       *res_op = res_op2;
405       return true;
406     }
407   --depth;
408 
409   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
410     return true;
411 
412   return canonicalized;
413 }
414 
415 /* Helper that matches and simplifies the toplevel result from
416    a gimple_simplify run (where we don't want to build
417    a stmt in case it's used in in-place folding).  Replaces
418    RES_OP with a simplified and/or canonicalized result and
419    returns whether any change was made.  */
420 
421 static bool
gimple_resimplify4(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))422 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
423 		    tree (*valueize)(tree))
424 {
425   /* No constant folding is defined for four-operand functions.  */
426 
427   /* Canonicalize operand order.  */
428   bool canonicalized = false;
429   int argno = first_commutative_argument (res_op->code, res_op->type);
430   if (argno >= 0
431       && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
432     {
433       std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
434       canonicalized = true;
435     }
436 
437   /* Limit recursion, see gimple_resimplify1.  */
438   static unsigned depth;
439   if (depth > 10)
440     {
441       if (dump_file && (dump_flags & TDF_FOLDING))
442 	fprintf (dump_file, "Aborting expression simplification due to "
443 		 "deep recursion\n");
444       return false;
445     }
446 
447   ++depth;
448   gimple_match_op res_op2 (*res_op);
449   if (gimple_simplify (&res_op2, seq, valueize,
450 		       res_op->code, res_op->type,
451 		       res_op->ops[0], res_op->ops[1], res_op->ops[2],
452 		       res_op->ops[3]))
453     {
454       --depth;
455       *res_op = res_op2;
456       return true;
457     }
458   --depth;
459 
460   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
461     return true;
462 
463   return canonicalized;
464 }
465 
466 /* Helper that matches and simplifies the toplevel result from
467    a gimple_simplify run (where we don't want to build
468    a stmt in case it's used in in-place folding).  Replaces
469    RES_OP with a simplified and/or canonicalized result and
470    returns whether any change was made.  */
471 
472 static bool
gimple_resimplify5(gimple_seq * seq,gimple_match_op * res_op,tree (* valueize)(tree))473 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
474 		    tree (*valueize)(tree))
475 {
476   /* No constant folding is defined for five-operand functions.  */
477 
478   /* Canonicalize operand order.  */
479   bool canonicalized = false;
480   int argno = first_commutative_argument (res_op->code, res_op->type);
481   if (argno >= 0
482       && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
483     {
484       std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
485       canonicalized = true;
486     }
487 
488   gimple_match_op res_op2 (*res_op);
489   if (gimple_simplify (&res_op2, seq, valueize,
490 		       res_op->code, res_op->type,
491 		       res_op->ops[0], res_op->ops[1], res_op->ops[2],
492 		       res_op->ops[3], res_op->ops[4]))
493     {
494       *res_op = res_op2;
495       return true;
496     }
497 
498   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
499     return true;
500 
501   return canonicalized;
502 }
503 
504 /* Match and simplify the toplevel valueized operation THIS.
505    Replaces THIS with a simplified and/or canonicalized result and
506    returns whether any change was made.  */
507 
508 bool
resimplify(gimple_seq * seq,tree (* valueize)(tree))509 gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
510 {
511   switch (num_ops)
512     {
513     case 1:
514       return gimple_resimplify1 (seq, this, valueize);
515     case 2:
516       return gimple_resimplify2 (seq, this, valueize);
517     case 3:
518       return gimple_resimplify3 (seq, this, valueize);
519     case 4:
520       return gimple_resimplify4 (seq, this, valueize);
521     case 5:
522       return gimple_resimplify5 (seq, this, valueize);
523     default:
524       gcc_unreachable ();
525     }
526 }
527 
528 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
529    build a GENERIC tree for that expression and update RES_OP accordingly.  */
530 
531 void
maybe_build_generic_op(gimple_match_op * res_op)532 maybe_build_generic_op (gimple_match_op *res_op)
533 {
534   tree_code code = (tree_code) res_op->code;
535   tree val;
536   switch (code)
537     {
538     case REALPART_EXPR:
539     case IMAGPART_EXPR:
540     case VIEW_CONVERT_EXPR:
541       val = build1 (code, res_op->type, res_op->ops[0]);
542       res_op->set_value (val);
543       break;
544     case BIT_FIELD_REF:
545       val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
546 		    res_op->ops[2]);
547       REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
548       res_op->set_value (val);
549       break;
550     default:;
551     }
552 }
553 
554 tree (*mprts_hook) (gimple_match_op *);
555 
556 /* Try to build RES_OP, which is known to be a call to FN.  Return null
557    if the target doesn't support the function.  */
558 
559 static gcall *
build_call_internal(internal_fn fn,gimple_match_op * res_op)560 build_call_internal (internal_fn fn, gimple_match_op *res_op)
561 {
562   if (direct_internal_fn_p (fn))
563     {
564       tree_pair types = direct_internal_fn_types (fn, res_op->type,
565 						  res_op->ops);
566       if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
567 	return NULL;
568     }
569   return gimple_build_call_internal (fn, res_op->num_ops,
570 				     res_op->op_or_null (0),
571 				     res_op->op_or_null (1),
572 				     res_op->op_or_null (2),
573 				     res_op->op_or_null (3),
574 				     res_op->op_or_null (4));
575 }
576 
577 /* Push the exploded expression described by RES_OP as a statement to
578    SEQ if necessary and return a gimple value denoting the value of the
579    expression.  If RES is not NULL then the result will be always RES
580    and even gimple values are pushed to SEQ.  */
581 
582 tree
maybe_push_res_to_seq(gimple_match_op * res_op,gimple_seq * seq,tree res)583 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
584 {
585   tree *ops = res_op->ops;
586   unsigned num_ops = res_op->num_ops;
587 
588   /* The caller should have converted conditional operations into an UNCOND
589      form and resimplified as appropriate.  The conditional form only
590      survives this far if that conversion failed.  */
591   if (res_op->cond.cond)
592     return NULL_TREE;
593 
594   if (res_op->code.is_tree_code ())
595     {
596       if (!res
597 	  && gimple_simplified_result_is_gimple_val (res_op))
598 	return ops[0];
599       if (mprts_hook)
600 	{
601 	  tree tem = mprts_hook (res_op);
602 	  if (tem)
603 	    return tem;
604 	}
605     }
606 
607   if (!seq)
608     return NULL_TREE;
609 
610   /* Play safe and do not allow abnormals to be mentioned in
611      newly created statements.  */
612   for (unsigned int i = 0; i < num_ops; ++i)
613     if (TREE_CODE (ops[i]) == SSA_NAME
614 	&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
615       return NULL_TREE;
616 
617   if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
618     for (unsigned int i = 0; i < 2; ++i)
619       if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
620 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
621 	return NULL_TREE;
622 
623   if (res_op->code.is_tree_code ())
624     {
625       auto code = tree_code (res_op->code);
626       if (!res)
627 	{
628 	  if (gimple_in_ssa_p (cfun))
629 	    res = make_ssa_name (res_op->type);
630 	  else
631 	    res = create_tmp_reg (res_op->type);
632 	}
633       maybe_build_generic_op (res_op);
634       gimple *new_stmt = gimple_build_assign (res, code,
635 					      res_op->op_or_null (0),
636 					      res_op->op_or_null (1),
637 					      res_op->op_or_null (2));
638       gimple_seq_add_stmt_without_update (seq, new_stmt);
639       return res;
640     }
641   else
642     {
643       gcc_assert (num_ops != 0);
644       auto fn = combined_fn (res_op->code);
645       gcall *new_stmt = NULL;
646       if (internal_fn_p (fn))
647 	{
648 	  /* Generate the given function if we can.  */
649 	  internal_fn ifn = as_internal_fn (fn);
650 	  new_stmt = build_call_internal (ifn, res_op);
651 	  if (!new_stmt)
652 	    return NULL_TREE;
653 	}
654       else
655 	{
656 	  /* Find the function we want to call.  */
657 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
658 	  if (!decl)
659 	    return NULL;
660 
661 	  /* We can't and should not emit calls to non-const functions.  */
662 	  if (!(flags_from_decl_or_type (decl) & ECF_CONST))
663 	    return NULL;
664 
665 	  new_stmt = gimple_build_call (decl, num_ops,
666 					res_op->op_or_null (0),
667 					res_op->op_or_null (1),
668 					res_op->op_or_null (2),
669 					res_op->op_or_null (3),
670 					res_op->op_or_null (4));
671 	}
672       if (!res)
673 	{
674 	  if (gimple_in_ssa_p (cfun))
675 	    res = make_ssa_name (res_op->type);
676 	  else
677 	    res = create_tmp_reg (res_op->type);
678 	}
679       gimple_call_set_lhs (new_stmt, res);
680       gimple_seq_add_stmt_without_update (seq, new_stmt);
681       return res;
682     }
683 }
684 
685 
686 /* Public API overloads follow for operation being tree_code or
687    built_in_function and for one to three operands or arguments.
688    They return NULL_TREE if nothing could be simplified or
689    the resulting simplified value with parts pushed to SEQ.
690    If SEQ is NULL then if the simplification needs to create
691    new stmts it will fail.  If VALUEIZE is non-NULL then all
692    SSA names will be valueized using that hook prior to
693    applying simplifications.  */
694 
695 /* Unary ops.  */
696 
697 tree
gimple_simplify(enum tree_code code,tree type,tree op0,gimple_seq * seq,tree (* valueize)(tree))698 gimple_simplify (enum tree_code code, tree type,
699 		 tree op0,
700 		 gimple_seq *seq, tree (*valueize)(tree))
701 {
702   if (constant_for_folding (op0))
703     {
704       tree res = const_unop (code, type, op0);
705       if (res != NULL_TREE
706 	  && CONSTANT_CLASS_P (res))
707 	return res;
708     }
709 
710   gimple_match_op res_op;
711   if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
712     return NULL_TREE;
713   return maybe_push_res_to_seq (&res_op, seq);
714 }
715 
716 /* Binary ops.  */
717 
718 tree
gimple_simplify(enum tree_code code,tree type,tree op0,tree op1,gimple_seq * seq,tree (* valueize)(tree))719 gimple_simplify (enum tree_code code, tree type,
720 		 tree op0, tree op1,
721 		 gimple_seq *seq, tree (*valueize)(tree))
722 {
723   if (constant_for_folding (op0) && constant_for_folding (op1))
724     {
725       tree res = const_binop (code, type, op0, op1);
726       if (res != NULL_TREE
727 	  && CONSTANT_CLASS_P (res))
728 	return res;
729     }
730 
731   /* Canonicalize operand order both for matching and fallback stmt
732      generation.  */
733   if ((commutative_tree_code (code)
734        || TREE_CODE_CLASS (code) == tcc_comparison)
735       && tree_swap_operands_p (op0, op1))
736     {
737       std::swap (op0, op1);
738       if (TREE_CODE_CLASS (code) == tcc_comparison)
739 	code = swap_tree_comparison (code);
740     }
741 
742   gimple_match_op res_op;
743   if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
744     return NULL_TREE;
745   return maybe_push_res_to_seq (&res_op, seq);
746 }
747 
748 /* Ternary ops.  */
749 
750 tree
gimple_simplify(enum tree_code code,tree type,tree op0,tree op1,tree op2,gimple_seq * seq,tree (* valueize)(tree))751 gimple_simplify (enum tree_code code, tree type,
752 		 tree op0, tree op1, tree op2,
753 		 gimple_seq *seq, tree (*valueize)(tree))
754 {
755   if (constant_for_folding (op0) && constant_for_folding (op1)
756       && constant_for_folding (op2))
757     {
758       tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
759       if (res != NULL_TREE
760 	  && CONSTANT_CLASS_P (res))
761 	return res;
762     }
763 
764   /* Canonicalize operand order both for matching and fallback stmt
765      generation.  */
766   if (commutative_ternary_tree_code (code)
767       && tree_swap_operands_p (op0, op1))
768     std::swap (op0, op1);
769 
770   gimple_match_op res_op;
771   if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
772     return NULL_TREE;
773   return maybe_push_res_to_seq (&res_op, seq);
774 }
775 
776 /* Builtin or internal function with one argument.  */
777 
778 tree
gimple_simplify(combined_fn fn,tree type,tree arg0,gimple_seq * seq,tree (* valueize)(tree))779 gimple_simplify (combined_fn fn, tree type,
780 		 tree arg0,
781 		 gimple_seq *seq, tree (*valueize)(tree))
782 {
783   if (constant_for_folding (arg0))
784     {
785       tree res = fold_const_call (fn, type, arg0);
786       if (res && CONSTANT_CLASS_P (res))
787 	return res;
788     }
789 
790   gimple_match_op res_op;
791   if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
792     return NULL_TREE;
793   return maybe_push_res_to_seq (&res_op, seq);
794 }
795 
796 /* Builtin or internal function with two arguments.  */
797 
798 tree
gimple_simplify(combined_fn fn,tree type,tree arg0,tree arg1,gimple_seq * seq,tree (* valueize)(tree))799 gimple_simplify (combined_fn fn, tree type,
800 		 tree arg0, tree arg1,
801 		 gimple_seq *seq, tree (*valueize)(tree))
802 {
803   if (constant_for_folding (arg0)
804       && constant_for_folding (arg1))
805     {
806       tree res = fold_const_call (fn, type, arg0, arg1);
807       if (res && CONSTANT_CLASS_P (res))
808 	return res;
809     }
810 
811   gimple_match_op res_op;
812   if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
813     return NULL_TREE;
814   return maybe_push_res_to_seq (&res_op, seq);
815 }
816 
817 /* Builtin or internal function with three arguments.  */
818 
819 tree
gimple_simplify(combined_fn fn,tree type,tree arg0,tree arg1,tree arg2,gimple_seq * seq,tree (* valueize)(tree))820 gimple_simplify (combined_fn fn, tree type,
821 		 tree arg0, tree arg1, tree arg2,
822 		 gimple_seq *seq, tree (*valueize)(tree))
823 {
824   if (constant_for_folding (arg0)
825       && constant_for_folding (arg1)
826       && constant_for_folding (arg2))
827     {
828       tree res = fold_const_call (fn, type, arg0, arg1, arg2);
829       if (res && CONSTANT_CLASS_P (res))
830 	return res;
831     }
832 
833   gimple_match_op res_op;
834   if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
835     return NULL_TREE;
836   return maybe_push_res_to_seq (&res_op, seq);
837 }
838 
839 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
840    VALUEIZED to true if valueization changed OP.  */
841 
842 static inline tree
do_valueize(tree op,tree (* valueize)(tree),bool & valueized)843 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
844 {
845   if (valueize && TREE_CODE (op) == SSA_NAME)
846     {
847       tree tem = valueize (op);
848       if (tem && tem != op)
849 	{
850 	  op = tem;
851 	  valueized = true;
852 	}
853     }
854   return op;
855 }
856 
857 /* If RES_OP is a call to a conditional internal function, try simplifying
858    the associated unconditional operation and using the result to build
859    a new conditional operation.  For example, if RES_OP is:
860 
861      IFN_COND_ADD (COND, A, B, ELSE)
862 
863    try simplifying (plus A B) and using the result to build a replacement
864    for the whole IFN_COND_ADD.
865 
866    Return true if this approach led to a simplification, otherwise leave
867    RES_OP unchanged (and so suitable for other simplifications).  When
868    returning true, add any new statements to SEQ and use VALUEIZE as the
869    valueization function.
870 
871    RES_OP is known to be a call to IFN.  */
872 
873 static bool
try_conditional_simplification(internal_fn ifn,gimple_match_op * res_op,gimple_seq * seq,tree (* valueize)(tree))874 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
875 				gimple_seq *seq, tree (*valueize) (tree))
876 {
877   code_helper op;
878   tree_code code = conditional_internal_fn_code (ifn);
879   if (code != ERROR_MARK)
880     op = code;
881   else
882     {
883       ifn = get_unconditional_internal_fn (ifn);
884       if (ifn == IFN_LAST)
885 	return false;
886       op = as_combined_fn (ifn);
887     }
888 
889   unsigned int num_ops = res_op->num_ops;
890   gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
891 					      res_op->ops[num_ops - 1]),
892 			   op, res_op->type, num_ops - 2);
893 
894   memcpy (cond_op.ops, res_op->ops + 1, (num_ops - 1) * sizeof *cond_op.ops);
895   switch (num_ops - 2)
896     {
897     case 1:
898       if (!gimple_resimplify1 (seq, &cond_op, valueize))
899 	return false;
900       break;
901     case 2:
902       if (!gimple_resimplify2 (seq, &cond_op, valueize))
903 	return false;
904       break;
905     case 3:
906       if (!gimple_resimplify3 (seq, &cond_op, valueize))
907 	return false;
908       break;
909     default:
910       gcc_unreachable ();
911     }
912   *res_op = cond_op;
913   maybe_resimplify_conditional_op (seq, res_op, valueize);
914   return true;
915 }
916 
917 /* Common subroutine of gimple_extract_op and gimple_simplify.  Try to
918    describe STMT in RES_OP, returning true on success.  Before recording
919    an operand, call:
920 
921    - VALUEIZE_CONDITION for a COND_EXPR condition
922    - VALUEIZE_OP for every other top-level operand
923 
924    Both routines take a tree argument and returns a tree.  */
925 
926 template<typename ValueizeOp, typename ValueizeCondition>
927 inline bool
gimple_extract(gimple * stmt,gimple_match_op * res_op,ValueizeOp valueize_op,ValueizeCondition valueize_condition)928 gimple_extract (gimple *stmt, gimple_match_op *res_op,
929 		ValueizeOp valueize_op,
930 		ValueizeCondition valueize_condition)
931 {
932   switch (gimple_code (stmt))
933     {
934     case GIMPLE_ASSIGN:
935       {
936 	enum tree_code code = gimple_assign_rhs_code (stmt);
937 	tree type = TREE_TYPE (gimple_assign_lhs (stmt));
938 	switch (gimple_assign_rhs_class (stmt))
939 	  {
940 	  case GIMPLE_SINGLE_RHS:
941 	    if (code == REALPART_EXPR
942 		|| code == IMAGPART_EXPR
943 		|| code == VIEW_CONVERT_EXPR)
944 	      {
945 		tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
946 		res_op->set_op (code, type, valueize_op (op0));
947 		return true;
948 	      }
949 	    else if (code == BIT_FIELD_REF)
950 	      {
951 		tree rhs1 = gimple_assign_rhs1 (stmt);
952 		tree op0 = valueize_op (TREE_OPERAND (rhs1, 0));
953 		res_op->set_op (code, type, op0,
954 				TREE_OPERAND (rhs1, 1),
955 				TREE_OPERAND (rhs1, 2),
956 				REF_REVERSE_STORAGE_ORDER (rhs1));
957 		return true;
958 	      }
959 	    else if (code == SSA_NAME)
960 	      {
961 		tree op0 = gimple_assign_rhs1 (stmt);
962 		res_op->set_op (TREE_CODE (op0), type, valueize_op (op0));
963 		return true;
964 	      }
965 	    break;
966 	  case GIMPLE_UNARY_RHS:
967 	    {
968 	      tree rhs1 = gimple_assign_rhs1 (stmt);
969 	      res_op->set_op (code, type, valueize_op (rhs1));
970 	      return true;
971 	    }
972 	  case GIMPLE_BINARY_RHS:
973 	    {
974 	      tree rhs1 = valueize_op (gimple_assign_rhs1 (stmt));
975 	      tree rhs2 = valueize_op (gimple_assign_rhs2 (stmt));
976 	      res_op->set_op (code, type, rhs1, rhs2);
977 	      return true;
978 	    }
979 	  case GIMPLE_TERNARY_RHS:
980 	    {
981 	      tree rhs1 = gimple_assign_rhs1 (stmt);
982 	      if (code == COND_EXPR && COMPARISON_CLASS_P (rhs1))
983 		rhs1 = valueize_condition (rhs1);
984 	      else
985 		rhs1 = valueize_op (rhs1);
986 	      tree rhs2 = valueize_op (gimple_assign_rhs2 (stmt));
987 	      tree rhs3 = valueize_op (gimple_assign_rhs3 (stmt));
988 	      res_op->set_op (code, type, rhs1, rhs2, rhs3);
989 	      return true;
990 	    }
991 	  default:
992 	    gcc_unreachable ();
993 	  }
994 	break;
995       }
996 
997     case GIMPLE_CALL:
998       /* ???  This way we can't simplify calls with side-effects.  */
999       if (gimple_call_lhs (stmt) != NULL_TREE
1000 	  && gimple_call_num_args (stmt) >= 1
1001 	  && gimple_call_num_args (stmt) <= 5)
1002 	{
1003 	  combined_fn cfn;
1004 	  if (gimple_call_internal_p (stmt))
1005 	    cfn = as_combined_fn (gimple_call_internal_fn (stmt));
1006 	  else
1007 	    {
1008 	      tree fn = gimple_call_fn (stmt);
1009 	      if (!fn)
1010 		return false;
1011 
1012 	      fn = valueize_op (fn);
1013 	      if (TREE_CODE (fn) != ADDR_EXPR
1014 		  || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
1015 		return false;
1016 
1017 	      tree decl = TREE_OPERAND (fn, 0);
1018 	      if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
1019 		  || !gimple_builtin_call_types_compatible_p (stmt, decl))
1020 		return false;
1021 
1022 	      cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
1023 	    }
1024 
1025 	  unsigned int num_args = gimple_call_num_args (stmt);
1026 	  res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
1027 	  for (unsigned i = 0; i < num_args; ++i)
1028 	    res_op->ops[i] = valueize_op (gimple_call_arg (stmt, i));
1029 	  return true;
1030 	}
1031       break;
1032 
1033     case GIMPLE_COND:
1034       {
1035 	tree lhs = valueize_op (gimple_cond_lhs (stmt));
1036 	tree rhs = valueize_op (gimple_cond_rhs (stmt));
1037 	res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1038 	return true;
1039       }
1040 
1041     default:
1042       break;
1043     }
1044 
1045   return false;
1046 }
1047 
1048 /* Try to describe STMT in RES_OP, returning true on success.
1049    For GIMPLE_CONDs, describe the condition that is being tested.
1050    For GIMPLE_ASSIGNs, describe the rhs of the assignment.
1051    For GIMPLE_CALLs, describe the call.  */
1052 
1053 bool
gimple_extract_op(gimple * stmt,gimple_match_op * res_op)1054 gimple_extract_op (gimple *stmt, gimple_match_op *res_op)
1055 {
1056   auto nop = [](tree op) { return op; };
1057   return gimple_extract (stmt, res_op, nop, nop);
1058 }
1059 
1060 /* The main STMT based simplification entry.  It is used by the fold_stmt
1061    and the fold_stmt_to_constant APIs.  */
1062 
1063 bool
gimple_simplify(gimple * stmt,gimple_match_op * res_op,gimple_seq * seq,tree (* valueize)(tree),tree (* top_valueize)(tree))1064 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
1065 		 tree (*valueize)(tree), tree (*top_valueize)(tree))
1066 {
1067   bool valueized = false;
1068   auto valueize_op = [&](tree op)
1069     {
1070       return do_valueize (op, top_valueize, valueized);
1071     };
1072   auto valueize_condition = [&](tree op) -> tree
1073     {
1074       bool cond_valueized = false;
1075       tree lhs = do_valueize (TREE_OPERAND (op, 0), top_valueize,
1076 			      cond_valueized);
1077       tree rhs = do_valueize (TREE_OPERAND (op, 1), top_valueize,
1078 			      cond_valueized);
1079       gimple_match_op res_op2 (res_op->cond, TREE_CODE (op),
1080 			       TREE_TYPE (op), lhs, rhs);
1081       if ((gimple_resimplify2 (seq, &res_op2, valueize)
1082 	   || cond_valueized)
1083 	  && res_op2.code.is_tree_code ())
1084 	{
1085 	  auto code = tree_code (res_op2.code);
1086 	  if (TREE_CODE_CLASS (code) == tcc_comparison)
1087 	    {
1088 	      valueized = true;
1089 	      return build2 (code, TREE_TYPE (op),
1090 			     res_op2.ops[0], res_op2.ops[1]);
1091 	    }
1092 	  else if (code == SSA_NAME
1093 		   || code == INTEGER_CST
1094 		   || code == VECTOR_CST)
1095 	    {
1096 	      valueized = true;
1097 	      return res_op2.ops[0];
1098 	    }
1099 	}
1100       return valueize_op (op);
1101     };
1102 
1103   if (!gimple_extract (stmt, res_op, valueize_op, valueize_condition))
1104     return false;
1105 
1106   if (res_op->code.is_internal_fn ())
1107     {
1108       internal_fn ifn = internal_fn (res_op->code);
1109       if (try_conditional_simplification (ifn, res_op, seq, valueize))
1110 	return true;
1111     }
1112 
1113   if (!res_op->reverse
1114       && res_op->num_ops
1115       && res_op->resimplify (seq, valueize))
1116     return true;
1117 
1118   return valueized;
1119 }
1120 
1121 /* Helper for the autogenerated code, valueize OP.  */
1122 
1123 inline tree
do_valueize(tree (* valueize)(tree),tree op)1124 do_valueize (tree (*valueize)(tree), tree op)
1125 {
1126   if (valueize && TREE_CODE (op) == SSA_NAME)
1127     {
1128       tree tem = valueize (op);
1129       if (tem)
1130 	return tem;
1131     }
1132   return op;
1133 }
1134 
1135 /* Helper for the autogenerated code, get at the definition of NAME when
1136    VALUEIZE allows that.  */
1137 
1138 inline gimple *
get_def(tree (* valueize)(tree),tree name)1139 get_def (tree (*valueize)(tree), tree name)
1140 {
1141   if (valueize && ! valueize (name))
1142     return NULL;
1143   return SSA_NAME_DEF_STMT (name);
1144 }
1145 
1146 /* Routine to determine if the types T1 and T2 are effectively
1147    the same for GIMPLE.  If T1 or T2 is not a type, the test
1148    applies to their TREE_TYPE.  */
1149 
1150 static inline bool
types_match(tree t1,tree t2)1151 types_match (tree t1, tree t2)
1152 {
1153   if (!TYPE_P (t1))
1154     t1 = TREE_TYPE (t1);
1155   if (!TYPE_P (t2))
1156     t2 = TREE_TYPE (t2);
1157 
1158   return types_compatible_p (t1, t2);
1159 }
1160 
1161 /* Return if T has a single use.  For GIMPLE, we also allow any
1162    non-SSA_NAME (ie constants) and zero uses to cope with uses
1163    that aren't linked up yet.  */
1164 
1165 static bool
1166 single_use (const_tree) ATTRIBUTE_PURE;
1167 
1168 static bool
single_use(const_tree t)1169 single_use (const_tree t)
1170 {
1171   if (TREE_CODE (t) != SSA_NAME)
1172     return true;
1173 
1174   /* Inline return has_zero_uses (t) || has_single_use (t);  */
1175   const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (t));
1176   const ssa_use_operand_t *ptr;
1177   bool single = false;
1178 
1179   for (ptr = head->next; ptr != head; ptr = ptr->next)
1180     if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
1181       {
1182         if (single)
1183           return false;
1184 	single = true;
1185       }
1186   return true;
1187 }
1188 
1189 /* Return true if math operations should be canonicalized,
1190    e.g. sqrt(sqrt(x)) -> pow(x, 0.25).  */
1191 
1192 static inline bool
canonicalize_math_p()1193 canonicalize_math_p ()
1194 {
1195   return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1196 }
1197 
1198 /* Return true if math operations that are beneficial only after
1199    vectorization should be canonicalized.  */
1200 
1201 static inline bool
canonicalize_math_after_vectorization_p()1202 canonicalize_math_after_vectorization_p ()
1203 {
1204   return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1205 }
1206 
1207 /* Return true if we can still perform transformations that may introduce
1208    vector operations that are not supported by the target. Vector lowering
1209    normally handles those, but after that pass, it becomes unsafe.  */
1210 
1211 static inline bool
optimize_vectors_before_lowering_p()1212 optimize_vectors_before_lowering_p ()
1213 {
1214   return !cfun || (cfun->curr_properties & PROP_gimple_lvec) == 0;
1215 }
1216 
1217 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1218    As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1219    is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1220    where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1221    will likely be exact, while exp (log (arg0) * arg1) might be not.
1222    Also don't do it if arg1 is phi_res above and cst2 is an exact integer.  */
1223 
1224 static bool
optimize_pow_to_exp(tree arg0,tree arg1)1225 optimize_pow_to_exp (tree arg0, tree arg1)
1226 {
1227   gcc_assert (TREE_CODE (arg0) == REAL_CST);
1228   if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1229     return true;
1230 
1231   if (TREE_CODE (arg1) != SSA_NAME)
1232     return true;
1233 
1234   gimple *def = SSA_NAME_DEF_STMT (arg1);
1235   gphi *phi = dyn_cast <gphi *> (def);
1236   tree cst1 = NULL_TREE;
1237   enum tree_code code = ERROR_MARK;
1238   if (!phi)
1239     {
1240       if (!is_gimple_assign (def))
1241 	return true;
1242       code = gimple_assign_rhs_code (def);
1243       switch (code)
1244 	{
1245 	case PLUS_EXPR:
1246 	case MINUS_EXPR:
1247 	  break;
1248 	default:
1249 	  return true;
1250 	}
1251       if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1252 	  || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1253 	return true;
1254 
1255       cst1 = gimple_assign_rhs2 (def);
1256 
1257       phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1258       if (!phi)
1259 	return true;
1260     }
1261 
1262   tree cst2 = NULL_TREE;
1263   int n = gimple_phi_num_args (phi);
1264   for (int i = 0; i < n; i++)
1265     {
1266       tree arg = PHI_ARG_DEF (phi, i);
1267       if (TREE_CODE (arg) != REAL_CST)
1268 	continue;
1269       else if (cst2 == NULL_TREE)
1270 	cst2 = arg;
1271       else if (!operand_equal_p (cst2, arg, 0))
1272 	return true;
1273     }
1274 
1275   if (cst1 && cst2)
1276     cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1277   if (cst2
1278       && TREE_CODE (cst2) == REAL_CST
1279       && real_isinteger (TREE_REAL_CST_PTR (cst2),
1280 			 TYPE_MODE (TREE_TYPE (cst2))))
1281     return false;
1282   return true;
1283 }
1284 
1285 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1286    is another division can be optimized.  Don't optimize if INNER_DIV
1287    is used in a TRUNC_MOD_EXPR with DIVISOR as second operand.  */
1288 
1289 static bool
optimize_successive_divisions_p(tree divisor,tree inner_div)1290 optimize_successive_divisions_p (tree divisor, tree inner_div)
1291 {
1292   if (!gimple_in_ssa_p (cfun))
1293     return false;
1294 
1295   imm_use_iterator imm_iter;
1296   use_operand_p use_p;
1297   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
1298     {
1299       gimple *use_stmt = USE_STMT (use_p);
1300       if (!is_gimple_assign (use_stmt)
1301 	  || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
1302 	  || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
1303 	continue;
1304       return false;
1305     }
1306   return true;
1307 }
1308 
1309 /* Return a canonical form for CODE when operating on TYPE.  The idea
1310    is to remove redundant ways of representing the same operation so
1311    that code_helpers can be hashed and compared for equality.
1312 
1313    The only current canonicalization is to replace built-in functions
1314    with internal functions, in cases where internal-fn.def defines
1315    such an internal function.
1316 
1317    Note that the new code_helper cannot necessarily be used in place of
1318    the original code_helper.  For example, the new code_helper might be
1319    an internal function that the target does not support.  */
1320 
1321 code_helper
canonicalize_code(code_helper code,tree type)1322 canonicalize_code (code_helper code, tree type)
1323 {
1324   if (code.is_fn_code ())
1325     return associated_internal_fn (combined_fn (code), type);
1326   return code;
1327 }
1328 
1329 /* Return true if CODE is a binary operation and if CODE is commutative when
1330    operating on type TYPE.  */
1331 
1332 bool
commutative_binary_op_p(code_helper code,tree type)1333 commutative_binary_op_p (code_helper code, tree type)
1334 {
1335   if (code.is_tree_code ())
1336     return commutative_tree_code (tree_code (code));
1337   auto cfn = combined_fn (code);
1338   return commutative_binary_fn_p (associated_internal_fn (cfn, type));
1339 }
1340 
1341 /* Return true if CODE represents a ternary operation and if the first two
1342    operands are commutative when CODE is operating on TYPE.  */
1343 
1344 bool
commutative_ternary_op_p(code_helper code,tree type)1345 commutative_ternary_op_p (code_helper code, tree type)
1346 {
1347   if (code.is_tree_code ())
1348     return commutative_ternary_tree_code (tree_code (code));
1349   auto cfn = combined_fn (code);
1350   return commutative_ternary_fn_p (associated_internal_fn (cfn, type));
1351 }
1352 
1353 /* If CODE is commutative in two consecutive operands, return the
1354    index of the first, otherwise return -1.  */
1355 
1356 int
first_commutative_argument(code_helper code,tree type)1357 first_commutative_argument (code_helper code, tree type)
1358 {
1359   if (code.is_tree_code ())
1360     {
1361       auto tcode = tree_code (code);
1362       if (commutative_tree_code (tcode)
1363 	  || commutative_ternary_tree_code (tcode))
1364 	return 0;
1365       return -1;
1366     }
1367   auto cfn = combined_fn (code);
1368   return first_commutative_argument (associated_internal_fn (cfn, type));
1369 }
1370 
1371 /* Return true if CODE is a binary operation that is associative when
1372    operating on type TYPE.  */
1373 
1374 bool
associative_binary_op_p(code_helper code,tree type)1375 associative_binary_op_p (code_helper code, tree type)
1376 {
1377   if (code.is_tree_code ())
1378     return associative_tree_code (tree_code (code));
1379   auto cfn = combined_fn (code);
1380   return associative_binary_fn_p (associated_internal_fn (cfn, type));
1381 }
1382 
1383 /* Return true if the target directly supports operation CODE on type TYPE.
1384    QUERY_TYPE acts as for optab_for_tree_code.  */
1385 
1386 bool
directly_supported_p(code_helper code,tree type,optab_subtype query_type)1387 directly_supported_p (code_helper code, tree type, optab_subtype query_type)
1388 {
1389   if (code.is_tree_code ())
1390     {
1391       direct_optab optab = optab_for_tree_code (tree_code (code), type,
1392 						query_type);
1393       return (optab != unknown_optab
1394 	      && optab_handler (optab, TYPE_MODE (type)) != CODE_FOR_nothing);
1395     }
1396   gcc_assert (query_type == optab_default
1397 	      || (query_type == optab_vector && VECTOR_TYPE_P (type))
1398 	      || (query_type == optab_scalar && !VECTOR_TYPE_P (type)));
1399   internal_fn ifn = associated_internal_fn (combined_fn (code), type);
1400   return (direct_internal_fn_p (ifn)
1401 	  && direct_internal_fn_supported_p (ifn, type, OPTIMIZE_FOR_SPEED));
1402 }
1403 
1404 /* A wrapper around the internal-fn.cc versions of get_conditional_internal_fn
1405    for a code_helper CODE operating on type TYPE.  */
1406 
1407 internal_fn
get_conditional_internal_fn(code_helper code,tree type)1408 get_conditional_internal_fn (code_helper code, tree type)
1409 {
1410   if (code.is_tree_code ())
1411     return get_conditional_internal_fn (tree_code (code));
1412   auto cfn = combined_fn (code);
1413   return get_conditional_internal_fn (associated_internal_fn (cfn, type));
1414 }
1415