xref: /netbsd-src/external/gpl3/gcc/dist/gcc/gimple-match-head.cc (revision 2dd295436a0082eb4f8d294f4aa73c223413d0f2)
1 /* Preamble and helpers for the autogenerated gimple-match.cc file.
2    Copyright (C) 2014-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "vec-perm-indices.h"
31 #include "fold-const.h"
32 #include "fold-const-call.h"
33 #include "stor-layout.h"
34 #include "gimple-fold.h"
35 #include "calls.h"
36 #include "tree-dfa.h"
37 #include "builtins.h"
38 #include "gimple-match.h"
39 #include "tree-pass.h"
40 #include "internal-fn.h"
41 #include "case-cfn-macros.h"
42 #include "gimplify.h"
43 #include "optabs-tree.h"
44 #include "tree-eh.h"
45 #include "dbgcnt.h"
46 #include "tm.h"
47 #include "gimple-range.h"
48 
49 /* Forward declarations of the private auto-generated matchers.
50    They expect valueized operands in canonical order and do not
51    perform simplification of all-constant operands.  */
52 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
53 			     code_helper, tree, tree);
54 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
55 			     code_helper, tree, tree, tree);
56 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
57 			     code_helper, tree, tree, tree, tree);
58 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
59 			     code_helper, tree, tree, tree, tree, tree);
60 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
61 			     code_helper, tree, tree, tree, tree, tree, tree);
62 static bool gimple_resimplify1 (gimple_seq *, gimple_match_op *,
63 				tree (*)(tree));
64 static bool gimple_resimplify2 (gimple_seq *, gimple_match_op *,
65 				tree (*)(tree));
66 static bool gimple_resimplify3 (gimple_seq *, gimple_match_op *,
67 				tree (*)(tree));
68 static bool gimple_resimplify4 (gimple_seq *, gimple_match_op *,
69 				tree (*)(tree));
70 static bool gimple_resimplify5 (gimple_seq *, gimple_match_op *,
71 				tree (*)(tree));
72 
73 const unsigned int gimple_match_op::MAX_NUM_OPS;
74 
75 /* Return whether T is a constant that we'll dispatch to fold to
76    evaluate fully constant expressions.  */
77 
78 static inline bool
79 constant_for_folding (tree t)
80 {
81   return (CONSTANT_CLASS_P (t)
82 	  /* The following is only interesting to string builtins.  */
83 	  || (TREE_CODE (t) == ADDR_EXPR
84 	      && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
85 }
86 
87 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
88    operation.  Return true on success, storing the new operation in NEW_OP.  */
89 
90 static bool
91 convert_conditional_op (gimple_match_op *orig_op,
92 			gimple_match_op *new_op)
93 {
94   internal_fn ifn;
95   if (orig_op->code.is_tree_code ())
96     ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
97   else
98     {
99       auto cfn = combined_fn (orig_op->code);
100       if (!internal_fn_p (cfn))
101 	return false;
102       ifn = get_conditional_internal_fn (as_internal_fn (cfn));
103     }
104   if (ifn == IFN_LAST)
105     return false;
106   unsigned int num_ops = orig_op->num_ops;
107   new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
108   new_op->ops[0] = orig_op->cond.cond;
109   for (unsigned int i = 0; i < num_ops; ++i)
110     new_op->ops[i + 1] = orig_op->ops[i];
111   tree else_value = orig_op->cond.else_value;
112   if (!else_value)
113     else_value = targetm.preferred_else_value (ifn, orig_op->type,
114 					       num_ops, orig_op->ops);
115   new_op->ops[num_ops + 1] = else_value;
116   return true;
117 }
118 
119 /* RES_OP is the result of a simplification.  If it is conditional,
120    try to replace it with the equivalent UNCOND form, such as an
121    IFN_COND_* call or a VEC_COND_EXPR.  Also try to resimplify the
122    result of the replacement if appropriate, adding any new statements to
123    SEQ and using VALUEIZE as the valueization function.  Return true if
124    this resimplification occurred and resulted in at least one change.  */
125 
126 static bool
127 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
128 				 tree (*valueize) (tree))
129 {
130   if (!res_op->cond.cond)
131     return false;
132 
133   if (!res_op->cond.else_value
134       && res_op->code.is_tree_code ())
135     {
136       /* The "else" value doesn't matter.  If the "then" value is a
137 	 gimple value, just use it unconditionally.  This isn't a
138 	 simplification in itself, since there was no operation to
139 	 build in the first place.  */
140       if (gimple_simplified_result_is_gimple_val (res_op))
141 	{
142 	  res_op->cond.cond = NULL_TREE;
143 	  return false;
144 	}
145 
146       /* Likewise if the operation would not trap.  */
147       bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
148 			  && TYPE_OVERFLOW_TRAPS (res_op->type));
149       tree_code op_code = (tree_code) res_op->code;
150       bool op_could_trap;
151 
152       /* COND_EXPR will trap if, and only if, the condition
153 	 traps and hence we have to check this.  For all other operations, we
154 	 don't need to consider the operands.  */
155       if (op_code == COND_EXPR)
156 	op_could_trap = generic_expr_could_trap_p (res_op->ops[0]);
157       else
158 	op_could_trap = operation_could_trap_p ((tree_code) res_op->code,
159 						FLOAT_TYPE_P (res_op->type),
160 						honor_trapv,
161 						res_op->op_or_null (1));
162 
163       if (!op_could_trap)
164 	{
165 	  res_op->cond.cond = NULL_TREE;
166 	  return false;
167 	}
168     }
169 
170   /* If the "then" value is a gimple value and the "else" value matters,
171      create a VEC_COND_EXPR between them, then see if it can be further
172      simplified.  */
173   gimple_match_op new_op;
174   if (res_op->cond.else_value
175       && VECTOR_TYPE_P (res_op->type)
176       && gimple_simplified_result_is_gimple_val (res_op))
177     {
178       new_op.set_op (VEC_COND_EXPR, res_op->type,
179 		     res_op->cond.cond, res_op->ops[0],
180 		     res_op->cond.else_value);
181       *res_op = new_op;
182       return gimple_resimplify3 (seq, res_op, valueize);
183     }
184 
185   /* Otherwise try rewriting the operation as an IFN_COND_* call.
186      Again, this isn't a simplification in itself, since it's what
187      RES_OP already described.  */
188   if (convert_conditional_op (res_op, &new_op))
189     *res_op = new_op;
190 
191   return false;
192 }
193 
194 /* Helper that matches and simplifies the toplevel result from
195    a gimple_simplify run (where we don't want to build
196    a stmt in case it's used in in-place folding).  Replaces
197    RES_OP with a simplified and/or canonicalized result and
198    returns whether any change was made.  */
199 
200 static bool
201 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
202 		    tree (*valueize)(tree))
203 {
204   if (constant_for_folding (res_op->ops[0]))
205     {
206       tree tem = NULL_TREE;
207       if (res_op->code.is_tree_code ())
208 	{
209 	  auto code = tree_code (res_op->code);
210 	  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
211 	      && TREE_CODE_LENGTH (code) == 1)
212 	    tem = const_unop (code, res_op->type, res_op->ops[0]);
213 	}
214       else
215 	tem = fold_const_call (combined_fn (res_op->code), res_op->type,
216 			       res_op->ops[0]);
217       if (tem != NULL_TREE
218 	  && CONSTANT_CLASS_P (tem))
219 	{
220 	  if (TREE_OVERFLOW_P (tem))
221 	    tem = drop_tree_overflow (tem);
222 	  res_op->set_value (tem);
223 	  maybe_resimplify_conditional_op (seq, res_op, valueize);
224 	  return true;
225 	}
226     }
227 
228   /* Limit recursion, there are cases like PR80887 and others, for
229      example when value-numbering presents us with unfolded expressions
230      that we are really not prepared to handle without eventual
231      oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
232      itself as available expression.  */
233   static unsigned depth;
234   if (depth > 10)
235     {
236       if (dump_file && (dump_flags & TDF_FOLDING))
237 	fprintf (dump_file, "Aborting expression simplification due to "
238 		 "deep recursion\n");
239       return false;
240     }
241 
242   ++depth;
243   gimple_match_op res_op2 (*res_op);
244   if (gimple_simplify (&res_op2, seq, valueize,
245 		       res_op->code, res_op->type, res_op->ops[0]))
246     {
247       --depth;
248       *res_op = res_op2;
249       return true;
250     }
251   --depth;
252 
253   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
254     return true;
255 
256   return false;
257 }
258 
259 /* Helper that matches and simplifies the toplevel result from
260    a gimple_simplify run (where we don't want to build
261    a stmt in case it's used in in-place folding).  Replaces
262    RES_OP with a simplified and/or canonicalized result and
263    returns whether any change was made.  */
264 
265 static bool
266 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
267 		    tree (*valueize)(tree))
268 {
269   if (constant_for_folding (res_op->ops[0])
270       && constant_for_folding (res_op->ops[1]))
271     {
272       tree tem = NULL_TREE;
273       if (res_op->code.is_tree_code ())
274 	{
275 	  auto code = tree_code (res_op->code);
276 	  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
277 	      && TREE_CODE_LENGTH (code) == 2)
278 	    tem = const_binop (code, res_op->type,
279 			       res_op->ops[0], res_op->ops[1]);
280 	}
281       else
282 	tem = fold_const_call (combined_fn (res_op->code), res_op->type,
283 			       res_op->ops[0], res_op->ops[1]);
284       if (tem != NULL_TREE
285 	  && CONSTANT_CLASS_P (tem))
286 	{
287 	  if (TREE_OVERFLOW_P (tem))
288 	    tem = drop_tree_overflow (tem);
289 	  res_op->set_value (tem);
290 	  maybe_resimplify_conditional_op (seq, res_op, valueize);
291 	  return true;
292 	}
293     }
294 
295   /* Canonicalize operand order.  */
296   bool canonicalized = false;
297   bool is_comparison
298     = (res_op->code.is_tree_code ()
299        && TREE_CODE_CLASS (tree_code (res_op->code)) == tcc_comparison);
300   if ((is_comparison || commutative_binary_op_p (res_op->code, res_op->type))
301       && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
302     {
303       std::swap (res_op->ops[0], res_op->ops[1]);
304       if (is_comparison)
305 	res_op->code = swap_tree_comparison (tree_code (res_op->code));
306       canonicalized = true;
307     }
308 
309   /* Limit recursion, see gimple_resimplify1.  */
310   static unsigned depth;
311   if (depth > 10)
312     {
313       if (dump_file && (dump_flags & TDF_FOLDING))
314 	fprintf (dump_file, "Aborting expression simplification due to "
315 		 "deep recursion\n");
316       return false;
317     }
318 
319   ++depth;
320   gimple_match_op res_op2 (*res_op);
321   if (gimple_simplify (&res_op2, seq, valueize,
322 		       res_op->code, res_op->type,
323 		       res_op->ops[0], res_op->ops[1]))
324     {
325       --depth;
326       *res_op = res_op2;
327       return true;
328     }
329   --depth;
330 
331   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
332     return true;
333 
334   return canonicalized;
335 }
336 
337 /* Helper that matches and simplifies the toplevel result from
338    a gimple_simplify run (where we don't want to build
339    a stmt in case it's used in in-place folding).  Replaces
340    RES_OP with a simplified and/or canonicalized result and
341    returns whether any change was made.  */
342 
343 static bool
344 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
345 		    tree (*valueize)(tree))
346 {
347   if (constant_for_folding (res_op->ops[0])
348       && constant_for_folding (res_op->ops[1])
349       && constant_for_folding (res_op->ops[2]))
350     {
351       tree tem = NULL_TREE;
352       if (res_op->code.is_tree_code ())
353 	{
354 	  auto code = tree_code (res_op->code);
355 	  if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
356 	      && TREE_CODE_LENGTH (code) == 3)
357 	    tem = fold_ternary/*_to_constant*/ (code, res_op->type,
358 						res_op->ops[0], res_op->ops[1],
359 						res_op->ops[2]);
360 	}
361       else
362 	tem = fold_const_call (combined_fn (res_op->code), res_op->type,
363 			       res_op->ops[0], res_op->ops[1], res_op->ops[2]);
364       if (tem != NULL_TREE
365 	  && CONSTANT_CLASS_P (tem))
366 	{
367 	  if (TREE_OVERFLOW_P (tem))
368 	    tem = drop_tree_overflow (tem);
369 	  res_op->set_value (tem);
370 	  maybe_resimplify_conditional_op (seq, res_op, valueize);
371 	  return true;
372 	}
373     }
374 
375   /* Canonicalize operand order.  */
376   bool canonicalized = false;
377   int argno = first_commutative_argument (res_op->code, res_op->type);
378   if (argno >= 0
379       && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
380     {
381       std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
382       canonicalized = true;
383     }
384 
385   /* Limit recursion, see gimple_resimplify1.  */
386   static unsigned depth;
387   if (depth > 10)
388     {
389       if (dump_file && (dump_flags & TDF_FOLDING))
390 	fprintf (dump_file, "Aborting expression simplification due to "
391 		 "deep recursion\n");
392       return false;
393     }
394 
395   ++depth;
396   gimple_match_op res_op2 (*res_op);
397   if (gimple_simplify (&res_op2, seq, valueize,
398 		       res_op->code, res_op->type,
399 		       res_op->ops[0], res_op->ops[1], res_op->ops[2]))
400     {
401       --depth;
402       *res_op = res_op2;
403       return true;
404     }
405   --depth;
406 
407   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
408     return true;
409 
410   return canonicalized;
411 }
412 
413 /* Helper that matches and simplifies the toplevel result from
414    a gimple_simplify run (where we don't want to build
415    a stmt in case it's used in in-place folding).  Replaces
416    RES_OP with a simplified and/or canonicalized result and
417    returns whether any change was made.  */
418 
419 static bool
420 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
421 		    tree (*valueize)(tree))
422 {
423   /* No constant folding is defined for four-operand functions.  */
424 
425   /* Canonicalize operand order.  */
426   bool canonicalized = false;
427   int argno = first_commutative_argument (res_op->code, res_op->type);
428   if (argno >= 0
429       && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
430     {
431       std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
432       canonicalized = true;
433     }
434 
435   /* Limit recursion, see gimple_resimplify1.  */
436   static unsigned depth;
437   if (depth > 10)
438     {
439       if (dump_file && (dump_flags & TDF_FOLDING))
440 	fprintf (dump_file, "Aborting expression simplification due to "
441 		 "deep recursion\n");
442       return false;
443     }
444 
445   ++depth;
446   gimple_match_op res_op2 (*res_op);
447   if (gimple_simplify (&res_op2, seq, valueize,
448 		       res_op->code, res_op->type,
449 		       res_op->ops[0], res_op->ops[1], res_op->ops[2],
450 		       res_op->ops[3]))
451     {
452       --depth;
453       *res_op = res_op2;
454       return true;
455     }
456   --depth;
457 
458   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
459     return true;
460 
461   return canonicalized;
462 }
463 
464 /* Helper that matches and simplifies the toplevel result from
465    a gimple_simplify run (where we don't want to build
466    a stmt in case it's used in in-place folding).  Replaces
467    RES_OP with a simplified and/or canonicalized result and
468    returns whether any change was made.  */
469 
470 static bool
471 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
472 		    tree (*valueize)(tree))
473 {
474   /* No constant folding is defined for five-operand functions.  */
475 
476   /* Canonicalize operand order.  */
477   bool canonicalized = false;
478   int argno = first_commutative_argument (res_op->code, res_op->type);
479   if (argno >= 0
480       && tree_swap_operands_p (res_op->ops[argno], res_op->ops[argno + 1]))
481     {
482       std::swap (res_op->ops[argno], res_op->ops[argno + 1]);
483       canonicalized = true;
484     }
485 
486   gimple_match_op res_op2 (*res_op);
487   if (gimple_simplify (&res_op2, seq, valueize,
488 		       res_op->code, res_op->type,
489 		       res_op->ops[0], res_op->ops[1], res_op->ops[2],
490 		       res_op->ops[3], res_op->ops[4]))
491     {
492       *res_op = res_op2;
493       return true;
494     }
495 
496   if (maybe_resimplify_conditional_op (seq, res_op, valueize))
497     return true;
498 
499   return canonicalized;
500 }
501 
502 /* Match and simplify the toplevel valueized operation THIS.
503    Replaces THIS with a simplified and/or canonicalized result and
504    returns whether any change was made.  */
505 
506 bool
507 gimple_match_op::resimplify (gimple_seq *seq, tree (*valueize)(tree))
508 {
509   switch (num_ops)
510     {
511     case 1:
512       return gimple_resimplify1 (seq, this, valueize);
513     case 2:
514       return gimple_resimplify2 (seq, this, valueize);
515     case 3:
516       return gimple_resimplify3 (seq, this, valueize);
517     case 4:
518       return gimple_resimplify4 (seq, this, valueize);
519     case 5:
520       return gimple_resimplify5 (seq, this, valueize);
521     default:
522       gcc_unreachable ();
523     }
524 }
525 
526 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
527    build a GENERIC tree for that expression and update RES_OP accordingly.  */
528 
529 void
530 maybe_build_generic_op (gimple_match_op *res_op)
531 {
532   tree_code code = (tree_code) res_op->code;
533   tree val;
534   switch (code)
535     {
536     case REALPART_EXPR:
537     case IMAGPART_EXPR:
538     case VIEW_CONVERT_EXPR:
539       val = build1 (code, res_op->type, res_op->ops[0]);
540       res_op->set_value (val);
541       break;
542     case BIT_FIELD_REF:
543       val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
544 		    res_op->ops[2]);
545       REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
546       res_op->set_value (val);
547       break;
548     default:;
549     }
550 }
551 
552 tree (*mprts_hook) (gimple_match_op *);
553 
554 /* Try to build RES_OP, which is known to be a call to FN.  Return null
555    if the target doesn't support the function.  */
556 
557 static gcall *
558 build_call_internal (internal_fn fn, gimple_match_op *res_op)
559 {
560   if (direct_internal_fn_p (fn))
561     {
562       tree_pair types = direct_internal_fn_types (fn, res_op->type,
563 						  res_op->ops);
564       if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
565 	return NULL;
566     }
567   return gimple_build_call_internal (fn, res_op->num_ops,
568 				     res_op->op_or_null (0),
569 				     res_op->op_or_null (1),
570 				     res_op->op_or_null (2),
571 				     res_op->op_or_null (3),
572 				     res_op->op_or_null (4));
573 }
574 
575 /* Push the exploded expression described by RES_OP as a statement to
576    SEQ if necessary and return a gimple value denoting the value of the
577    expression.  If RES is not NULL then the result will be always RES
578    and even gimple values are pushed to SEQ.  */
579 
580 tree
581 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
582 {
583   tree *ops = res_op->ops;
584   unsigned num_ops = res_op->num_ops;
585 
586   /* The caller should have converted conditional operations into an UNCOND
587      form and resimplified as appropriate.  The conditional form only
588      survives this far if that conversion failed.  */
589   if (res_op->cond.cond)
590     return NULL_TREE;
591 
592   if (res_op->code.is_tree_code ())
593     {
594       if (!res
595 	  && gimple_simplified_result_is_gimple_val (res_op))
596 	return ops[0];
597       if (mprts_hook)
598 	{
599 	  tree tem = mprts_hook (res_op);
600 	  if (tem)
601 	    return tem;
602 	}
603     }
604 
605   if (!seq)
606     return NULL_TREE;
607 
608   /* Play safe and do not allow abnormals to be mentioned in
609      newly created statements.  */
610   for (unsigned int i = 0; i < num_ops; ++i)
611     if (TREE_CODE (ops[i]) == SSA_NAME
612 	&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
613       return NULL_TREE;
614 
615   if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
616     for (unsigned int i = 0; i < 2; ++i)
617       if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
618 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
619 	return NULL_TREE;
620 
621   if (res_op->code.is_tree_code ())
622     {
623       auto code = tree_code (res_op->code);
624       if (!res)
625 	{
626 	  if (gimple_in_ssa_p (cfun))
627 	    res = make_ssa_name (res_op->type);
628 	  else
629 	    res = create_tmp_reg (res_op->type);
630 	}
631       maybe_build_generic_op (res_op);
632       gimple *new_stmt = gimple_build_assign (res, code,
633 					      res_op->op_or_null (0),
634 					      res_op->op_or_null (1),
635 					      res_op->op_or_null (2));
636       gimple_seq_add_stmt_without_update (seq, new_stmt);
637       return res;
638     }
639   else
640     {
641       gcc_assert (num_ops != 0);
642       auto fn = combined_fn (res_op->code);
643       gcall *new_stmt = NULL;
644       if (internal_fn_p (fn))
645 	{
646 	  /* Generate the given function if we can.  */
647 	  internal_fn ifn = as_internal_fn (fn);
648 	  new_stmt = build_call_internal (ifn, res_op);
649 	  if (!new_stmt)
650 	    return NULL_TREE;
651 	}
652       else
653 	{
654 	  /* Find the function we want to call.  */
655 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
656 	  if (!decl)
657 	    return NULL;
658 
659 	  /* We can't and should not emit calls to non-const functions.  */
660 	  if (!(flags_from_decl_or_type (decl) & ECF_CONST))
661 	    return NULL;
662 
663 	  new_stmt = gimple_build_call (decl, num_ops,
664 					res_op->op_or_null (0),
665 					res_op->op_or_null (1),
666 					res_op->op_or_null (2),
667 					res_op->op_or_null (3),
668 					res_op->op_or_null (4));
669 	}
670       if (!res)
671 	{
672 	  if (gimple_in_ssa_p (cfun))
673 	    res = make_ssa_name (res_op->type);
674 	  else
675 	    res = create_tmp_reg (res_op->type);
676 	}
677       gimple_call_set_lhs (new_stmt, res);
678       gimple_seq_add_stmt_without_update (seq, new_stmt);
679       return res;
680     }
681 }
682 
683 
684 /* Public API overloads follow for operation being tree_code or
685    built_in_function and for one to three operands or arguments.
686    They return NULL_TREE if nothing could be simplified or
687    the resulting simplified value with parts pushed to SEQ.
688    If SEQ is NULL then if the simplification needs to create
689    new stmts it will fail.  If VALUEIZE is non-NULL then all
690    SSA names will be valueized using that hook prior to
691    applying simplifications.  */
692 
693 /* Unary ops.  */
694 
695 tree
696 gimple_simplify (enum tree_code code, tree type,
697 		 tree op0,
698 		 gimple_seq *seq, tree (*valueize)(tree))
699 {
700   if (constant_for_folding (op0))
701     {
702       tree res = const_unop (code, type, op0);
703       if (res != NULL_TREE
704 	  && CONSTANT_CLASS_P (res))
705 	return res;
706     }
707 
708   gimple_match_op res_op;
709   if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
710     return NULL_TREE;
711   return maybe_push_res_to_seq (&res_op, seq);
712 }
713 
714 /* Binary ops.  */
715 
716 tree
717 gimple_simplify (enum tree_code code, tree type,
718 		 tree op0, tree op1,
719 		 gimple_seq *seq, tree (*valueize)(tree))
720 {
721   if (constant_for_folding (op0) && constant_for_folding (op1))
722     {
723       tree res = const_binop (code, type, op0, op1);
724       if (res != NULL_TREE
725 	  && CONSTANT_CLASS_P (res))
726 	return res;
727     }
728 
729   /* Canonicalize operand order both for matching and fallback stmt
730      generation.  */
731   if ((commutative_tree_code (code)
732        || TREE_CODE_CLASS (code) == tcc_comparison)
733       && tree_swap_operands_p (op0, op1))
734     {
735       std::swap (op0, op1);
736       if (TREE_CODE_CLASS (code) == tcc_comparison)
737 	code = swap_tree_comparison (code);
738     }
739 
740   gimple_match_op res_op;
741   if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
742     return NULL_TREE;
743   return maybe_push_res_to_seq (&res_op, seq);
744 }
745 
746 /* Ternary ops.  */
747 
748 tree
749 gimple_simplify (enum tree_code code, tree type,
750 		 tree op0, tree op1, tree op2,
751 		 gimple_seq *seq, tree (*valueize)(tree))
752 {
753   if (constant_for_folding (op0) && constant_for_folding (op1)
754       && constant_for_folding (op2))
755     {
756       tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
757       if (res != NULL_TREE
758 	  && CONSTANT_CLASS_P (res))
759 	return res;
760     }
761 
762   /* Canonicalize operand order both for matching and fallback stmt
763      generation.  */
764   if (commutative_ternary_tree_code (code)
765       && tree_swap_operands_p (op0, op1))
766     std::swap (op0, op1);
767 
768   gimple_match_op res_op;
769   if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
770     return NULL_TREE;
771   return maybe_push_res_to_seq (&res_op, seq);
772 }
773 
774 /* Builtin or internal function with one argument.  */
775 
776 tree
777 gimple_simplify (combined_fn fn, tree type,
778 		 tree arg0,
779 		 gimple_seq *seq, tree (*valueize)(tree))
780 {
781   if (constant_for_folding (arg0))
782     {
783       tree res = fold_const_call (fn, type, arg0);
784       if (res && CONSTANT_CLASS_P (res))
785 	return res;
786     }
787 
788   gimple_match_op res_op;
789   if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
790     return NULL_TREE;
791   return maybe_push_res_to_seq (&res_op, seq);
792 }
793 
794 /* Builtin or internal function with two arguments.  */
795 
796 tree
797 gimple_simplify (combined_fn fn, tree type,
798 		 tree arg0, tree arg1,
799 		 gimple_seq *seq, tree (*valueize)(tree))
800 {
801   if (constant_for_folding (arg0)
802       && constant_for_folding (arg1))
803     {
804       tree res = fold_const_call (fn, type, arg0, arg1);
805       if (res && CONSTANT_CLASS_P (res))
806 	return res;
807     }
808 
809   gimple_match_op res_op;
810   if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
811     return NULL_TREE;
812   return maybe_push_res_to_seq (&res_op, seq);
813 }
814 
815 /* Builtin or internal function with three arguments.  */
816 
817 tree
818 gimple_simplify (combined_fn fn, tree type,
819 		 tree arg0, tree arg1, tree arg2,
820 		 gimple_seq *seq, tree (*valueize)(tree))
821 {
822   if (constant_for_folding (arg0)
823       && constant_for_folding (arg1)
824       && constant_for_folding (arg2))
825     {
826       tree res = fold_const_call (fn, type, arg0, arg1, arg2);
827       if (res && CONSTANT_CLASS_P (res))
828 	return res;
829     }
830 
831   gimple_match_op res_op;
832   if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
833     return NULL_TREE;
834   return maybe_push_res_to_seq (&res_op, seq);
835 }
836 
837 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
838    VALUEIZED to true if valueization changed OP.  */
839 
840 static inline tree
841 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
842 {
843   if (valueize && TREE_CODE (op) == SSA_NAME)
844     {
845       tree tem = valueize (op);
846       if (tem && tem != op)
847 	{
848 	  op = tem;
849 	  valueized = true;
850 	}
851     }
852   return op;
853 }
854 
855 /* If RES_OP is a call to a conditional internal function, try simplifying
856    the associated unconditional operation and using the result to build
857    a new conditional operation.  For example, if RES_OP is:
858 
859      IFN_COND_ADD (COND, A, B, ELSE)
860 
861    try simplifying (plus A B) and using the result to build a replacement
862    for the whole IFN_COND_ADD.
863 
864    Return true if this approach led to a simplification, otherwise leave
865    RES_OP unchanged (and so suitable for other simplifications).  When
866    returning true, add any new statements to SEQ and use VALUEIZE as the
867    valueization function.
868 
869    RES_OP is known to be a call to IFN.  */
870 
871 static bool
872 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
873 				gimple_seq *seq, tree (*valueize) (tree))
874 {
875   code_helper op;
876   tree_code code = conditional_internal_fn_code (ifn);
877   if (code != ERROR_MARK)
878     op = code;
879   else
880     {
881       ifn = get_unconditional_internal_fn (ifn);
882       if (ifn == IFN_LAST)
883 	return false;
884       op = as_combined_fn (ifn);
885     }
886 
887   unsigned int num_ops = res_op->num_ops;
888   gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
889 					      res_op->ops[num_ops - 1]),
890 			   op, res_op->type, num_ops - 2);
891 
892   memcpy (cond_op.ops, res_op->ops + 1, (num_ops - 1) * sizeof *cond_op.ops);
893   switch (num_ops - 2)
894     {
895     case 1:
896       if (!gimple_resimplify1 (seq, &cond_op, valueize))
897 	return false;
898       break;
899     case 2:
900       if (!gimple_resimplify2 (seq, &cond_op, valueize))
901 	return false;
902       break;
903     case 3:
904       if (!gimple_resimplify3 (seq, &cond_op, valueize))
905 	return false;
906       break;
907     default:
908       gcc_unreachable ();
909     }
910   *res_op = cond_op;
911   maybe_resimplify_conditional_op (seq, res_op, valueize);
912   return true;
913 }
914 
915 /* Common subroutine of gimple_extract_op and gimple_simplify.  Try to
916    describe STMT in RES_OP, returning true on success.  Before recording
917    an operand, call:
918 
919    - VALUEIZE_CONDITION for a COND_EXPR condition
920    - VALUEIZE_OP for every other top-level operand
921 
922    Both routines take a tree argument and returns a tree.  */
923 
924 template<typename ValueizeOp, typename ValueizeCondition>
925 inline bool
926 gimple_extract (gimple *stmt, gimple_match_op *res_op,
927 		ValueizeOp valueize_op,
928 		ValueizeCondition valueize_condition)
929 {
930   switch (gimple_code (stmt))
931     {
932     case GIMPLE_ASSIGN:
933       {
934 	enum tree_code code = gimple_assign_rhs_code (stmt);
935 	tree type = TREE_TYPE (gimple_assign_lhs (stmt));
936 	switch (gimple_assign_rhs_class (stmt))
937 	  {
938 	  case GIMPLE_SINGLE_RHS:
939 	    if (code == REALPART_EXPR
940 		|| code == IMAGPART_EXPR
941 		|| code == VIEW_CONVERT_EXPR)
942 	      {
943 		tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
944 		res_op->set_op (code, type, valueize_op (op0));
945 		return true;
946 	      }
947 	    else if (code == BIT_FIELD_REF)
948 	      {
949 		tree rhs1 = gimple_assign_rhs1 (stmt);
950 		tree op0 = valueize_op (TREE_OPERAND (rhs1, 0));
951 		res_op->set_op (code, type, op0,
952 				TREE_OPERAND (rhs1, 1),
953 				TREE_OPERAND (rhs1, 2),
954 				REF_REVERSE_STORAGE_ORDER (rhs1));
955 		return true;
956 	      }
957 	    else if (code == SSA_NAME)
958 	      {
959 		tree op0 = gimple_assign_rhs1 (stmt);
960 		res_op->set_op (TREE_CODE (op0), type, valueize_op (op0));
961 		return true;
962 	      }
963 	    break;
964 	  case GIMPLE_UNARY_RHS:
965 	    {
966 	      tree rhs1 = gimple_assign_rhs1 (stmt);
967 	      res_op->set_op (code, type, valueize_op (rhs1));
968 	      return true;
969 	    }
970 	  case GIMPLE_BINARY_RHS:
971 	    {
972 	      tree rhs1 = valueize_op (gimple_assign_rhs1 (stmt));
973 	      tree rhs2 = valueize_op (gimple_assign_rhs2 (stmt));
974 	      res_op->set_op (code, type, rhs1, rhs2);
975 	      return true;
976 	    }
977 	  case GIMPLE_TERNARY_RHS:
978 	    {
979 	      tree rhs1 = gimple_assign_rhs1 (stmt);
980 	      if (code == COND_EXPR && COMPARISON_CLASS_P (rhs1))
981 		rhs1 = valueize_condition (rhs1);
982 	      else
983 		rhs1 = valueize_op (rhs1);
984 	      tree rhs2 = valueize_op (gimple_assign_rhs2 (stmt));
985 	      tree rhs3 = valueize_op (gimple_assign_rhs3 (stmt));
986 	      res_op->set_op (code, type, rhs1, rhs2, rhs3);
987 	      return true;
988 	    }
989 	  default:
990 	    gcc_unreachable ();
991 	  }
992 	break;
993       }
994 
995     case GIMPLE_CALL:
996       /* ???  This way we can't simplify calls with side-effects.  */
997       if (gimple_call_lhs (stmt) != NULL_TREE
998 	  && gimple_call_num_args (stmt) >= 1
999 	  && gimple_call_num_args (stmt) <= 5)
1000 	{
1001 	  combined_fn cfn;
1002 	  if (gimple_call_internal_p (stmt))
1003 	    cfn = as_combined_fn (gimple_call_internal_fn (stmt));
1004 	  else
1005 	    {
1006 	      tree fn = gimple_call_fn (stmt);
1007 	      if (!fn)
1008 		return false;
1009 
1010 	      fn = valueize_op (fn);
1011 	      if (TREE_CODE (fn) != ADDR_EXPR
1012 		  || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
1013 		return false;
1014 
1015 	      tree decl = TREE_OPERAND (fn, 0);
1016 	      if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
1017 		  || !gimple_builtin_call_types_compatible_p (stmt, decl))
1018 		return false;
1019 
1020 	      cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
1021 	    }
1022 
1023 	  unsigned int num_args = gimple_call_num_args (stmt);
1024 	  res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
1025 	  for (unsigned i = 0; i < num_args; ++i)
1026 	    res_op->ops[i] = valueize_op (gimple_call_arg (stmt, i));
1027 	  return true;
1028 	}
1029       break;
1030 
1031     case GIMPLE_COND:
1032       {
1033 	tree lhs = valueize_op (gimple_cond_lhs (stmt));
1034 	tree rhs = valueize_op (gimple_cond_rhs (stmt));
1035 	res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
1036 	return true;
1037       }
1038 
1039     default:
1040       break;
1041     }
1042 
1043   return false;
1044 }
1045 
1046 /* Try to describe STMT in RES_OP, returning true on success.
1047    For GIMPLE_CONDs, describe the condition that is being tested.
1048    For GIMPLE_ASSIGNs, describe the rhs of the assignment.
1049    For GIMPLE_CALLs, describe the call.  */
1050 
1051 bool
1052 gimple_extract_op (gimple *stmt, gimple_match_op *res_op)
1053 {
1054   auto nop = [](tree op) { return op; };
1055   return gimple_extract (stmt, res_op, nop, nop);
1056 }
1057 
1058 /* The main STMT based simplification entry.  It is used by the fold_stmt
1059    and the fold_stmt_to_constant APIs.  */
1060 
1061 bool
1062 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
1063 		 tree (*valueize)(tree), tree (*top_valueize)(tree))
1064 {
1065   bool valueized = false;
1066   auto valueize_op = [&](tree op)
1067     {
1068       return do_valueize (op, top_valueize, valueized);
1069     };
1070   auto valueize_condition = [&](tree op) -> tree
1071     {
1072       bool cond_valueized = false;
1073       tree lhs = do_valueize (TREE_OPERAND (op, 0), top_valueize,
1074 			      cond_valueized);
1075       tree rhs = do_valueize (TREE_OPERAND (op, 1), top_valueize,
1076 			      cond_valueized);
1077       gimple_match_op res_op2 (res_op->cond, TREE_CODE (op),
1078 			       TREE_TYPE (op), lhs, rhs);
1079       if ((gimple_resimplify2 (seq, &res_op2, valueize)
1080 	   || cond_valueized)
1081 	  && res_op2.code.is_tree_code ())
1082 	{
1083 	  auto code = tree_code (res_op2.code);
1084 	  if (TREE_CODE_CLASS (code) == tcc_comparison)
1085 	    {
1086 	      valueized = true;
1087 	      return build2 (code, TREE_TYPE (op),
1088 			     res_op2.ops[0], res_op2.ops[1]);
1089 	    }
1090 	  else if (code == SSA_NAME
1091 		   || code == INTEGER_CST
1092 		   || code == VECTOR_CST)
1093 	    {
1094 	      valueized = true;
1095 	      return res_op2.ops[0];
1096 	    }
1097 	}
1098       return valueize_op (op);
1099     };
1100 
1101   if (!gimple_extract (stmt, res_op, valueize_op, valueize_condition))
1102     return false;
1103 
1104   if (res_op->code.is_internal_fn ())
1105     {
1106       internal_fn ifn = internal_fn (res_op->code);
1107       if (try_conditional_simplification (ifn, res_op, seq, valueize))
1108 	return true;
1109     }
1110 
1111   if (!res_op->reverse
1112       && res_op->num_ops
1113       && res_op->resimplify (seq, valueize))
1114     return true;
1115 
1116   return valueized;
1117 }
1118 
1119 /* Helper for the autogenerated code, valueize OP.  */
1120 
1121 inline tree
1122 do_valueize (tree (*valueize)(tree), tree op)
1123 {
1124   if (valueize && TREE_CODE (op) == SSA_NAME)
1125     {
1126       tree tem = valueize (op);
1127       if (tem)
1128 	return tem;
1129     }
1130   return op;
1131 }
1132 
1133 /* Helper for the autogenerated code, get at the definition of NAME when
1134    VALUEIZE allows that.  */
1135 
1136 inline gimple *
1137 get_def (tree (*valueize)(tree), tree name)
1138 {
1139   if (valueize && ! valueize (name))
1140     return NULL;
1141   return SSA_NAME_DEF_STMT (name);
1142 }
1143 
1144 /* Routine to determine if the types T1 and T2 are effectively
1145    the same for GIMPLE.  If T1 or T2 is not a type, the test
1146    applies to their TREE_TYPE.  */
1147 
1148 static inline bool
1149 types_match (tree t1, tree t2)
1150 {
1151   if (!TYPE_P (t1))
1152     t1 = TREE_TYPE (t1);
1153   if (!TYPE_P (t2))
1154     t2 = TREE_TYPE (t2);
1155 
1156   return types_compatible_p (t1, t2);
1157 }
1158 
1159 /* Return if T has a single use.  For GIMPLE, we also allow any
1160    non-SSA_NAME (ie constants) and zero uses to cope with uses
1161    that aren't linked up yet.  */
1162 
1163 static bool
1164 single_use (const_tree) ATTRIBUTE_PURE;
1165 
1166 static bool
1167 single_use (const_tree t)
1168 {
1169   if (TREE_CODE (t) != SSA_NAME)
1170     return true;
1171 
1172   /* Inline return has_zero_uses (t) || has_single_use (t);  */
1173   const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (t));
1174   const ssa_use_operand_t *ptr;
1175   bool single = false;
1176 
1177   for (ptr = head->next; ptr != head; ptr = ptr->next)
1178     if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
1179       {
1180         if (single)
1181           return false;
1182 	single = true;
1183       }
1184   return true;
1185 }
1186 
1187 /* Return true if math operations should be canonicalized,
1188    e.g. sqrt(sqrt(x)) -> pow(x, 0.25).  */
1189 
1190 static inline bool
1191 canonicalize_math_p ()
1192 {
1193   return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
1194 }
1195 
1196 /* Return true if math operations that are beneficial only after
1197    vectorization should be canonicalized.  */
1198 
1199 static inline bool
1200 canonicalize_math_after_vectorization_p ()
1201 {
1202   return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
1203 }
1204 
1205 /* Return true if we can still perform transformations that may introduce
1206    vector operations that are not supported by the target. Vector lowering
1207    normally handles those, but after that pass, it becomes unsafe.  */
1208 
1209 static inline bool
1210 optimize_vectors_before_lowering_p ()
1211 {
1212   return !cfun || (cfun->curr_properties & PROP_gimple_lvec) == 0;
1213 }
1214 
1215 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
1216    As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
1217    is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
1218    where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
1219    will likely be exact, while exp (log (arg0) * arg1) might be not.
1220    Also don't do it if arg1 is phi_res above and cst2 is an exact integer.  */
1221 
1222 static bool
1223 optimize_pow_to_exp (tree arg0, tree arg1)
1224 {
1225   gcc_assert (TREE_CODE (arg0) == REAL_CST);
1226   if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
1227     return true;
1228 
1229   if (TREE_CODE (arg1) != SSA_NAME)
1230     return true;
1231 
1232   gimple *def = SSA_NAME_DEF_STMT (arg1);
1233   gphi *phi = dyn_cast <gphi *> (def);
1234   tree cst1 = NULL_TREE;
1235   enum tree_code code = ERROR_MARK;
1236   if (!phi)
1237     {
1238       if (!is_gimple_assign (def))
1239 	return true;
1240       code = gimple_assign_rhs_code (def);
1241       switch (code)
1242 	{
1243 	case PLUS_EXPR:
1244 	case MINUS_EXPR:
1245 	  break;
1246 	default:
1247 	  return true;
1248 	}
1249       if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
1250 	  || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
1251 	return true;
1252 
1253       cst1 = gimple_assign_rhs2 (def);
1254 
1255       phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
1256       if (!phi)
1257 	return true;
1258     }
1259 
1260   tree cst2 = NULL_TREE;
1261   int n = gimple_phi_num_args (phi);
1262   for (int i = 0; i < n; i++)
1263     {
1264       tree arg = PHI_ARG_DEF (phi, i);
1265       if (TREE_CODE (arg) != REAL_CST)
1266 	continue;
1267       else if (cst2 == NULL_TREE)
1268 	cst2 = arg;
1269       else if (!operand_equal_p (cst2, arg, 0))
1270 	return true;
1271     }
1272 
1273   if (cst1 && cst2)
1274     cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
1275   if (cst2
1276       && TREE_CODE (cst2) == REAL_CST
1277       && real_isinteger (TREE_REAL_CST_PTR (cst2),
1278 			 TYPE_MODE (TREE_TYPE (cst2))))
1279     return false;
1280   return true;
1281 }
1282 
1283 /* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
1284    is another division can be optimized.  Don't optimize if INNER_DIV
1285    is used in a TRUNC_MOD_EXPR with DIVISOR as second operand.  */
1286 
1287 static bool
1288 optimize_successive_divisions_p (tree divisor, tree inner_div)
1289 {
1290   if (!gimple_in_ssa_p (cfun))
1291     return false;
1292 
1293   imm_use_iterator imm_iter;
1294   use_operand_p use_p;
1295   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
1296     {
1297       gimple *use_stmt = USE_STMT (use_p);
1298       if (!is_gimple_assign (use_stmt)
1299 	  || gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
1300 	  || !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
1301 	continue;
1302       return false;
1303     }
1304   return true;
1305 }
1306 
1307 /* Return a canonical form for CODE when operating on TYPE.  The idea
1308    is to remove redundant ways of representing the same operation so
1309    that code_helpers can be hashed and compared for equality.
1310 
1311    The only current canonicalization is to replace built-in functions
1312    with internal functions, in cases where internal-fn.def defines
1313    such an internal function.
1314 
1315    Note that the new code_helper cannot necessarily be used in place of
1316    the original code_helper.  For example, the new code_helper might be
1317    an internal function that the target does not support.  */
1318 
1319 code_helper
1320 canonicalize_code (code_helper code, tree type)
1321 {
1322   if (code.is_fn_code ())
1323     return associated_internal_fn (combined_fn (code), type);
1324   return code;
1325 }
1326 
1327 /* Return true if CODE is a binary operation and if CODE is commutative when
1328    operating on type TYPE.  */
1329 
1330 bool
1331 commutative_binary_op_p (code_helper code, tree type)
1332 {
1333   if (code.is_tree_code ())
1334     return commutative_tree_code (tree_code (code));
1335   auto cfn = combined_fn (code);
1336   return commutative_binary_fn_p (associated_internal_fn (cfn, type));
1337 }
1338 
1339 /* Return true if CODE represents a ternary operation and if the first two
1340    operands are commutative when CODE is operating on TYPE.  */
1341 
1342 bool
1343 commutative_ternary_op_p (code_helper code, tree type)
1344 {
1345   if (code.is_tree_code ())
1346     return commutative_ternary_tree_code (tree_code (code));
1347   auto cfn = combined_fn (code);
1348   return commutative_ternary_fn_p (associated_internal_fn (cfn, type));
1349 }
1350 
1351 /* If CODE is commutative in two consecutive operands, return the
1352    index of the first, otherwise return -1.  */
1353 
1354 int
1355 first_commutative_argument (code_helper code, tree type)
1356 {
1357   if (code.is_tree_code ())
1358     {
1359       auto tcode = tree_code (code);
1360       if (commutative_tree_code (tcode)
1361 	  || commutative_ternary_tree_code (tcode))
1362 	return 0;
1363       return -1;
1364     }
1365   auto cfn = combined_fn (code);
1366   return first_commutative_argument (associated_internal_fn (cfn, type));
1367 }
1368 
1369 /* Return true if CODE is a binary operation that is associative when
1370    operating on type TYPE.  */
1371 
1372 bool
1373 associative_binary_op_p (code_helper code, tree type)
1374 {
1375   if (code.is_tree_code ())
1376     return associative_tree_code (tree_code (code));
1377   auto cfn = combined_fn (code);
1378   return associative_binary_fn_p (associated_internal_fn (cfn, type));
1379 }
1380 
1381 /* Return true if the target directly supports operation CODE on type TYPE.
1382    QUERY_TYPE acts as for optab_for_tree_code.  */
1383 
1384 bool
1385 directly_supported_p (code_helper code, tree type, optab_subtype query_type)
1386 {
1387   if (code.is_tree_code ())
1388     {
1389       direct_optab optab = optab_for_tree_code (tree_code (code), type,
1390 						query_type);
1391       return (optab != unknown_optab
1392 	      && optab_handler (optab, TYPE_MODE (type)) != CODE_FOR_nothing);
1393     }
1394   gcc_assert (query_type == optab_default
1395 	      || (query_type == optab_vector && VECTOR_TYPE_P (type))
1396 	      || (query_type == optab_scalar && !VECTOR_TYPE_P (type)));
1397   internal_fn ifn = associated_internal_fn (combined_fn (code), type);
1398   return (direct_internal_fn_p (ifn)
1399 	  && direct_internal_fn_supported_p (ifn, type, OPTIMIZE_FOR_SPEED));
1400 }
1401 
1402 /* A wrapper around the internal-fn.cc versions of get_conditional_internal_fn
1403    for a code_helper CODE operating on type TYPE.  */
1404 
1405 internal_fn
1406 get_conditional_internal_fn (code_helper code, tree type)
1407 {
1408   if (code.is_tree_code ())
1409     return get_conditional_internal_fn (tree_code (code));
1410   auto cfn = combined_fn (code);
1411   return get_conditional_internal_fn (associated_internal_fn (cfn, type));
1412 }
1413