xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/gimple.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* Gimple IR support functions.
2 
3    Copyright (C) 2007-2019 Free Software Foundation, Inc.
4    Contributed by Aldy Hernandez <aldyh@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "diagnostic.h"
31 #include "alias.h"
32 #include "fold-const.h"
33 #include "calls.h"
34 #include "stor-layout.h"
35 #include "internal-fn.h"
36 #include "tree-eh.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "gimplify.h"
40 #include "target.h"
41 #include "builtins.h"
42 #include "selftest.h"
43 #include "gimple-pretty-print.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "asan.h"
47 #include "langhooks.h"
48 
49 
50 /* All the tuples have their operand vector (if present) at the very bottom
51    of the structure.  Therefore, the offset required to find the
52    operands vector the size of the structure minus the size of the 1
53    element tree array at the end (see gimple_ops).  */
54 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
55 	(HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
56 EXPORTED_CONST size_t gimple_ops_offset_[] = {
57 #include "gsstruct.def"
58 };
59 #undef DEFGSSTRUCT
60 
61 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
62 static const size_t gsstruct_code_size[] = {
63 #include "gsstruct.def"
64 };
65 #undef DEFGSSTRUCT
66 
67 #define DEFGSCODE(SYM, NAME, GSSCODE)	NAME,
68 const char *const gimple_code_name[] = {
69 #include "gimple.def"
70 };
71 #undef DEFGSCODE
72 
73 #define DEFGSCODE(SYM, NAME, GSSCODE)	GSSCODE,
74 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
75 #include "gimple.def"
76 };
77 #undef DEFGSCODE
78 
79 /* Gimple stats.  */
80 
81 uint64_t gimple_alloc_counts[(int) gimple_alloc_kind_all];
82 uint64_t gimple_alloc_sizes[(int) gimple_alloc_kind_all];
83 
84 /* Keep in sync with gimple.h:enum gimple_alloc_kind.  */
85 static const char * const gimple_alloc_kind_names[] = {
86     "assignments",
87     "phi nodes",
88     "conditionals",
89     "everything else"
90 };
91 
92 /* Static gimple tuple members.  */
93 const enum gimple_code gassign::code_;
94 const enum gimple_code gcall::code_;
95 const enum gimple_code gcond::code_;
96 
97 
98 /* Gimple tuple constructors.
99    Note: Any constructor taking a ``gimple_seq'' as a parameter, can
100    be passed a NULL to start with an empty sequence.  */
101 
102 /* Set the code for statement G to CODE.  */
103 
104 static inline void
105 gimple_set_code (gimple *g, enum gimple_code code)
106 {
107   g->code = code;
108 }
109 
110 /* Return the number of bytes needed to hold a GIMPLE statement with
111    code CODE.  */
112 
113 static inline size_t
114 gimple_size (enum gimple_code code)
115 {
116   return gsstruct_code_size[gss_for_code (code)];
117 }
118 
119 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
120    operands.  */
121 
122 gimple *
123 gimple_alloc (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
124 {
125   size_t size;
126   gimple *stmt;
127 
128   size = gimple_size (code);
129   if (num_ops > 0)
130     size += sizeof (tree) * (num_ops - 1);
131 
132   if (GATHER_STATISTICS)
133     {
134       enum gimple_alloc_kind kind = gimple_alloc_kind (code);
135       gimple_alloc_counts[(int) kind]++;
136       gimple_alloc_sizes[(int) kind] += size;
137     }
138 
139   stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
140   gimple_set_code (stmt, code);
141   gimple_set_num_ops (stmt, num_ops);
142 
143   /* Do not call gimple_set_modified here as it has other side
144      effects and this tuple is still not completely built.  */
145   stmt->modified = 1;
146   gimple_init_singleton (stmt);
147 
148   return stmt;
149 }
150 
151 /* Set SUBCODE to be the code of the expression computed by statement G.  */
152 
153 static inline void
154 gimple_set_subcode (gimple *g, unsigned subcode)
155 {
156   /* We only have 16 bits for the RHS code.  Assert that we are not
157      overflowing it.  */
158   gcc_assert (subcode < (1 << 16));
159   g->subcode = subcode;
160 }
161 
162 
163 
164 /* Build a tuple with operands.  CODE is the statement to build (which
165    must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the subcode
166    for the new tuple.  NUM_OPS is the number of operands to allocate.  */
167 
168 #define gimple_build_with_ops(c, s, n) \
169   gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
170 
171 static gimple *
172 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
173 		            unsigned num_ops MEM_STAT_DECL)
174 {
175   gimple *s = gimple_alloc (code, num_ops PASS_MEM_STAT);
176   gimple_set_subcode (s, subcode);
177 
178   return s;
179 }
180 
181 
182 /* Build a GIMPLE_RETURN statement returning RETVAL.  */
183 
184 greturn *
185 gimple_build_return (tree retval)
186 {
187   greturn *s
188     = as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
189 					       2));
190   if (retval)
191     gimple_return_set_retval (s, retval);
192   return s;
193 }
194 
195 /* Reset alias information on call S.  */
196 
197 void
198 gimple_call_reset_alias_info (gcall *s)
199 {
200   if (gimple_call_flags (s) & ECF_CONST)
201     memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
202   else
203     pt_solution_reset (gimple_call_use_set (s));
204   if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
205     memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
206   else
207     pt_solution_reset (gimple_call_clobber_set (s));
208 }
209 
210 /* Helper for gimple_build_call, gimple_build_call_valist,
211    gimple_build_call_vec and gimple_build_call_from_tree.  Build the basic
212    components of a GIMPLE_CALL statement to function FN with NARGS
213    arguments.  */
214 
215 static inline gcall *
216 gimple_build_call_1 (tree fn, unsigned nargs)
217 {
218   gcall *s
219     = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
220 					     nargs + 3));
221   if (TREE_CODE (fn) == FUNCTION_DECL)
222     fn = build_fold_addr_expr (fn);
223   gimple_set_op (s, 1, fn);
224   gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
225   gimple_call_reset_alias_info (s);
226   return s;
227 }
228 
229 
230 /* Build a GIMPLE_CALL statement to function FN with the arguments
231    specified in vector ARGS.  */
232 
233 gcall *
234 gimple_build_call_vec (tree fn, vec<tree> args)
235 {
236   unsigned i;
237   unsigned nargs = args.length ();
238   gcall *call = gimple_build_call_1 (fn, nargs);
239 
240   for (i = 0; i < nargs; i++)
241     gimple_call_set_arg (call, i, args[i]);
242 
243   return call;
244 }
245 
246 
247 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
248    arguments.  The ... are the arguments.  */
249 
250 gcall *
251 gimple_build_call (tree fn, unsigned nargs, ...)
252 {
253   va_list ap;
254   gcall *call;
255   unsigned i;
256 
257   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
258 
259   call = gimple_build_call_1 (fn, nargs);
260 
261   va_start (ap, nargs);
262   for (i = 0; i < nargs; i++)
263     gimple_call_set_arg (call, i, va_arg (ap, tree));
264   va_end (ap);
265 
266   return call;
267 }
268 
269 
270 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
271    arguments.  AP contains the arguments.  */
272 
273 gcall *
274 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
275 {
276   gcall *call;
277   unsigned i;
278 
279   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
280 
281   call = gimple_build_call_1 (fn, nargs);
282 
283   for (i = 0; i < nargs; i++)
284     gimple_call_set_arg (call, i, va_arg (ap, tree));
285 
286   return call;
287 }
288 
289 
290 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
291    Build the basic components of a GIMPLE_CALL statement to internal
292    function FN with NARGS arguments.  */
293 
294 static inline gcall *
295 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
296 {
297   gcall *s
298     = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
299 					     nargs + 3));
300   s->subcode |= GF_CALL_INTERNAL;
301   gimple_call_set_internal_fn (s, fn);
302   gimple_call_reset_alias_info (s);
303   return s;
304 }
305 
306 
307 /* Build a GIMPLE_CALL statement to internal function FN.  NARGS is
308    the number of arguments.  The ... are the arguments.  */
309 
310 gcall *
311 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
312 {
313   va_list ap;
314   gcall *call;
315   unsigned i;
316 
317   call = gimple_build_call_internal_1 (fn, nargs);
318   va_start (ap, nargs);
319   for (i = 0; i < nargs; i++)
320     gimple_call_set_arg (call, i, va_arg (ap, tree));
321   va_end (ap);
322 
323   return call;
324 }
325 
326 
327 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
328    specified in vector ARGS.  */
329 
330 gcall *
331 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
332 {
333   unsigned i, nargs;
334   gcall *call;
335 
336   nargs = args.length ();
337   call = gimple_build_call_internal_1 (fn, nargs);
338   for (i = 0; i < nargs; i++)
339     gimple_call_set_arg (call, i, args[i]);
340 
341   return call;
342 }
343 
344 
345 /* Build a GIMPLE_CALL statement from CALL_EXPR T.  Note that T is
346    assumed to be in GIMPLE form already.  Minimal checking is done of
347    this fact.  */
348 
349 gcall *
350 gimple_build_call_from_tree (tree t, tree fnptrtype)
351 {
352   unsigned i, nargs;
353   gcall *call;
354 
355   gcc_assert (TREE_CODE (t) == CALL_EXPR);
356 
357   nargs = call_expr_nargs (t);
358 
359   tree fndecl = NULL_TREE;
360   if (CALL_EXPR_FN (t) == NULL_TREE)
361     call = gimple_build_call_internal_1 (CALL_EXPR_IFN (t), nargs);
362   else
363     {
364       fndecl = get_callee_fndecl (t);
365       call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
366     }
367 
368   for (i = 0; i < nargs; i++)
369     gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
370 
371   gimple_set_block (call, TREE_BLOCK (t));
372   gimple_set_location (call, EXPR_LOCATION (t));
373 
374   /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL.  */
375   gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
376   gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
377   gimple_call_set_must_tail (call, CALL_EXPR_MUST_TAIL_CALL (t));
378   gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
379   if (fndecl
380       && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
381       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
382     gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
383   else
384     gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
385   gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
386   gimple_call_set_nothrow (call, TREE_NOTHROW (t));
387   gimple_call_set_by_descriptor (call, CALL_EXPR_BY_DESCRIPTOR (t));
388   gimple_set_no_warning (call, TREE_NO_WARNING (t));
389 
390   if (fnptrtype)
391     {
392       gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
393 
394       /* Check if it's an indirect CALL and the type has the
395  	 nocf_check attribute. In that case propagate the information
396 	 to the gimple CALL insn.  */
397       if (!fndecl)
398 	{
399 	  gcc_assert (POINTER_TYPE_P (fnptrtype));
400 	  tree fntype = TREE_TYPE (fnptrtype);
401 
402 	  if (lookup_attribute ("nocf_check", TYPE_ATTRIBUTES (fntype)))
403 	    gimple_call_set_nocf_check (call, TRUE);
404 	}
405     }
406 
407   return call;
408 }
409 
410 
411 /* Build a GIMPLE_ASSIGN statement.
412 
413    LHS of the assignment.
414    RHS of the assignment which can be unary or binary.  */
415 
416 gassign *
417 gimple_build_assign (tree lhs, tree rhs MEM_STAT_DECL)
418 {
419   enum tree_code subcode;
420   tree op1, op2, op3;
421 
422   extract_ops_from_tree (rhs, &subcode, &op1, &op2, &op3);
423   return gimple_build_assign (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
424 }
425 
426 
427 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
428    OP1, OP2 and OP3.  */
429 
430 static inline gassign *
431 gimple_build_assign_1 (tree lhs, enum tree_code subcode, tree op1,
432 		       tree op2, tree op3 MEM_STAT_DECL)
433 {
434   unsigned num_ops;
435   gassign *p;
436 
437   /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
438      code).  */
439   num_ops = get_gimple_rhs_num_ops (subcode) + 1;
440 
441   p = as_a <gassign *> (
442         gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
443 				    PASS_MEM_STAT));
444   gimple_assign_set_lhs (p, lhs);
445   gimple_assign_set_rhs1 (p, op1);
446   if (op2)
447     {
448       gcc_assert (num_ops > 2);
449       gimple_assign_set_rhs2 (p, op2);
450     }
451 
452   if (op3)
453     {
454       gcc_assert (num_ops > 3);
455       gimple_assign_set_rhs3 (p, op3);
456     }
457 
458   return p;
459 }
460 
461 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
462    OP1, OP2 and OP3.  */
463 
464 gassign *
465 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
466 		     tree op2, tree op3 MEM_STAT_DECL)
467 {
468   return gimple_build_assign_1 (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
469 }
470 
471 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
472    OP1 and OP2.  */
473 
474 gassign *
475 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
476 		     tree op2 MEM_STAT_DECL)
477 {
478   return gimple_build_assign_1 (lhs, subcode, op1, op2, NULL_TREE
479 				PASS_MEM_STAT);
480 }
481 
482 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operand OP1.  */
483 
484 gassign *
485 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1 MEM_STAT_DECL)
486 {
487   return gimple_build_assign_1 (lhs, subcode, op1, NULL_TREE, NULL_TREE
488 				PASS_MEM_STAT);
489 }
490 
491 
492 /* Build a GIMPLE_COND statement.
493 
494    PRED is the condition used to compare LHS and the RHS.
495    T_LABEL is the label to jump to if the condition is true.
496    F_LABEL is the label to jump to otherwise.  */
497 
498 gcond *
499 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
500 		   tree t_label, tree f_label)
501 {
502   gcond *p;
503 
504   gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
505   p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
506   gimple_cond_set_lhs (p, lhs);
507   gimple_cond_set_rhs (p, rhs);
508   gimple_cond_set_true_label (p, t_label);
509   gimple_cond_set_false_label (p, f_label);
510   return p;
511 }
512 
513 /* Build a GIMPLE_COND statement from the conditional expression tree
514    COND.  T_LABEL and F_LABEL are as in gimple_build_cond.  */
515 
516 gcond *
517 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
518 {
519   enum tree_code code;
520   tree lhs, rhs;
521 
522   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
523   return gimple_build_cond (code, lhs, rhs, t_label, f_label);
524 }
525 
526 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
527    boolean expression tree COND.  */
528 
529 void
530 gimple_cond_set_condition_from_tree (gcond *stmt, tree cond)
531 {
532   enum tree_code code;
533   tree lhs, rhs;
534 
535   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
536   gimple_cond_set_condition (stmt, code, lhs, rhs);
537 }
538 
539 /* Build a GIMPLE_LABEL statement for LABEL.  */
540 
541 glabel *
542 gimple_build_label (tree label)
543 {
544   glabel *p
545     = as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
546   gimple_label_set_label (p, label);
547   return p;
548 }
549 
550 /* Build a GIMPLE_GOTO statement to label DEST.  */
551 
552 ggoto *
553 gimple_build_goto (tree dest)
554 {
555   ggoto *p
556     = as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
557   gimple_goto_set_dest (p, dest);
558   return p;
559 }
560 
561 
562 /* Build a GIMPLE_NOP statement.  */
563 
564 gimple *
565 gimple_build_nop (void)
566 {
567   return gimple_alloc (GIMPLE_NOP, 0);
568 }
569 
570 
571 /* Build a GIMPLE_BIND statement.
572    VARS are the variables in BODY.
573    BLOCK is the containing block.  */
574 
575 gbind *
576 gimple_build_bind (tree vars, gimple_seq body, tree block)
577 {
578   gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0));
579   gimple_bind_set_vars (p, vars);
580   if (body)
581     gimple_bind_set_body (p, body);
582   if (block)
583     gimple_bind_set_block (p, block);
584   return p;
585 }
586 
587 /* Helper function to set the simple fields of a asm stmt.
588 
589    STRING is a pointer to a string that is the asm blocks assembly code.
590    NINPUT is the number of register inputs.
591    NOUTPUT is the number of register outputs.
592    NCLOBBERS is the number of clobbered registers.
593    */
594 
595 static inline gasm *
596 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
597                     unsigned nclobbers, unsigned nlabels)
598 {
599   gasm *p;
600   int size = strlen (string);
601 
602   /* ASMs with labels cannot have outputs.  This should have been
603      enforced by the front end.  */
604   gcc_assert (nlabels == 0 || noutputs == 0);
605 
606   p = as_a <gasm *> (
607         gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
608 			       ninputs + noutputs + nclobbers + nlabels));
609 
610   p->ni = ninputs;
611   p->no = noutputs;
612   p->nc = nclobbers;
613   p->nl = nlabels;
614   p->string = ggc_alloc_string (string, size);
615 
616   if (GATHER_STATISTICS)
617     gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
618 
619   return p;
620 }
621 
622 /* Build a GIMPLE_ASM statement.
623 
624    STRING is the assembly code.
625    NINPUT is the number of register inputs.
626    NOUTPUT is the number of register outputs.
627    NCLOBBERS is the number of clobbered registers.
628    INPUTS is a vector of the input register parameters.
629    OUTPUTS is a vector of the output register parameters.
630    CLOBBERS is a vector of the clobbered register parameters.
631    LABELS is a vector of destination labels.  */
632 
633 gasm *
634 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
635                       vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
636 		      vec<tree, va_gc> *labels)
637 {
638   gasm *p;
639   unsigned i;
640 
641   p = gimple_build_asm_1 (string,
642                           vec_safe_length (inputs),
643                           vec_safe_length (outputs),
644                           vec_safe_length (clobbers),
645 			  vec_safe_length (labels));
646 
647   for (i = 0; i < vec_safe_length (inputs); i++)
648     gimple_asm_set_input_op (p, i, (*inputs)[i]);
649 
650   for (i = 0; i < vec_safe_length (outputs); i++)
651     gimple_asm_set_output_op (p, i, (*outputs)[i]);
652 
653   for (i = 0; i < vec_safe_length (clobbers); i++)
654     gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
655 
656   for (i = 0; i < vec_safe_length (labels); i++)
657     gimple_asm_set_label_op (p, i, (*labels)[i]);
658 
659   return p;
660 }
661 
662 /* Build a GIMPLE_CATCH statement.
663 
664   TYPES are the catch types.
665   HANDLER is the exception handler.  */
666 
667 gcatch *
668 gimple_build_catch (tree types, gimple_seq handler)
669 {
670   gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0));
671   gimple_catch_set_types (p, types);
672   if (handler)
673     gimple_catch_set_handler (p, handler);
674 
675   return p;
676 }
677 
678 /* Build a GIMPLE_EH_FILTER statement.
679 
680    TYPES are the filter's types.
681    FAILURE is the filter's failure action.  */
682 
683 geh_filter *
684 gimple_build_eh_filter (tree types, gimple_seq failure)
685 {
686   geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0));
687   gimple_eh_filter_set_types (p, types);
688   if (failure)
689     gimple_eh_filter_set_failure (p, failure);
690 
691   return p;
692 }
693 
694 /* Build a GIMPLE_EH_MUST_NOT_THROW statement.  */
695 
696 geh_mnt *
697 gimple_build_eh_must_not_throw (tree decl)
698 {
699   geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
700 
701   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
702   gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
703   gimple_eh_must_not_throw_set_fndecl (p, decl);
704 
705   return p;
706 }
707 
708 /* Build a GIMPLE_EH_ELSE statement.  */
709 
710 geh_else *
711 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
712 {
713   geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0));
714   gimple_eh_else_set_n_body (p, n_body);
715   gimple_eh_else_set_e_body (p, e_body);
716   return p;
717 }
718 
719 /* Build a GIMPLE_TRY statement.
720 
721    EVAL is the expression to evaluate.
722    CLEANUP is the cleanup expression.
723    KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
724    whether this is a try/catch or a try/finally respectively.  */
725 
726 gtry *
727 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
728     		  enum gimple_try_flags kind)
729 {
730   gtry *p;
731 
732   gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
733   p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0));
734   gimple_set_subcode (p, kind);
735   if (eval)
736     gimple_try_set_eval (p, eval);
737   if (cleanup)
738     gimple_try_set_cleanup (p, cleanup);
739 
740   return p;
741 }
742 
743 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
744 
745    CLEANUP is the cleanup expression.  */
746 
747 gimple *
748 gimple_build_wce (gimple_seq cleanup)
749 {
750   gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
751   if (cleanup)
752     gimple_wce_set_cleanup (p, cleanup);
753 
754   return p;
755 }
756 
757 
758 /* Build a GIMPLE_RESX statement.  */
759 
760 gresx *
761 gimple_build_resx (int region)
762 {
763   gresx *p
764     = as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
765   p->region = region;
766   return p;
767 }
768 
769 
770 /* The helper for constructing a gimple switch statement.
771    INDEX is the switch's index.
772    NLABELS is the number of labels in the switch excluding the default.
773    DEFAULT_LABEL is the default label for the switch statement.  */
774 
775 gswitch *
776 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
777 {
778   /* nlabels + 1 default label + 1 index.  */
779   gcc_checking_assert (default_label);
780   gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,
781 							ERROR_MARK,
782 							1 + 1 + nlabels));
783   gimple_switch_set_index (p, index);
784   gimple_switch_set_default_label (p, default_label);
785   return p;
786 }
787 
788 /* Build a GIMPLE_SWITCH statement.
789 
790    INDEX is the switch's index.
791    DEFAULT_LABEL is the default label
792    ARGS is a vector of labels excluding the default.  */
793 
794 gswitch *
795 gimple_build_switch (tree index, tree default_label, vec<tree> args)
796 {
797   unsigned i, nlabels = args.length ();
798 
799   gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label);
800 
801   /* Copy the labels from the vector to the switch statement.  */
802   for (i = 0; i < nlabels; i++)
803     gimple_switch_set_label (p, i + 1, args[i]);
804 
805   return p;
806 }
807 
808 /* Build a GIMPLE_EH_DISPATCH statement.  */
809 
810 geh_dispatch *
811 gimple_build_eh_dispatch (int region)
812 {
813   geh_dispatch *p
814     = as_a <geh_dispatch *> (
815 	gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
816   p->region = region;
817   return p;
818 }
819 
820 /* Build a new GIMPLE_DEBUG_BIND statement.
821 
822    VAR is bound to VALUE; block and location are taken from STMT.  */
823 
824 gdebug *
825 gimple_build_debug_bind (tree var, tree value, gimple *stmt MEM_STAT_DECL)
826 {
827   gdebug *p
828     = as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
829 						   (unsigned)GIMPLE_DEBUG_BIND, 2
830 						   PASS_MEM_STAT));
831   gimple_debug_bind_set_var (p, var);
832   gimple_debug_bind_set_value (p, value);
833   if (stmt)
834     gimple_set_location (p, gimple_location (stmt));
835 
836   return p;
837 }
838 
839 
840 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
841 
842    VAR is bound to VALUE; block and location are taken from STMT.  */
843 
844 gdebug *
845 gimple_build_debug_source_bind (tree var, tree value,
846 				     gimple *stmt MEM_STAT_DECL)
847 {
848   gdebug *p
849     = as_a <gdebug *> (
850         gimple_build_with_ops_stat (GIMPLE_DEBUG,
851 				    (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
852 				    PASS_MEM_STAT));
853 
854   gimple_debug_source_bind_set_var (p, var);
855   gimple_debug_source_bind_set_value (p, value);
856   if (stmt)
857     gimple_set_location (p, gimple_location (stmt));
858 
859   return p;
860 }
861 
862 
863 /* Build a new GIMPLE_DEBUG_BEGIN_STMT statement in BLOCK at
864    LOCATION.  */
865 
866 gdebug *
867 gimple_build_debug_begin_stmt (tree block, location_t location
868 				    MEM_STAT_DECL)
869 {
870   gdebug *p
871     = as_a <gdebug *> (
872         gimple_build_with_ops_stat (GIMPLE_DEBUG,
873 				    (unsigned)GIMPLE_DEBUG_BEGIN_STMT, 0
874 				    PASS_MEM_STAT));
875 
876   gimple_set_location (p, location);
877   gimple_set_block (p, block);
878   cfun->debug_marker_count++;
879 
880   return p;
881 }
882 
883 
884 /* Build a new GIMPLE_DEBUG_INLINE_ENTRY statement in BLOCK at
885    LOCATION.  The BLOCK links to the inlined function.  */
886 
887 gdebug *
888 gimple_build_debug_inline_entry (tree block, location_t location
889 				      MEM_STAT_DECL)
890 {
891   gdebug *p
892     = as_a <gdebug *> (
893         gimple_build_with_ops_stat (GIMPLE_DEBUG,
894 				    (unsigned)GIMPLE_DEBUG_INLINE_ENTRY, 0
895 				    PASS_MEM_STAT));
896 
897   gimple_set_location (p, location);
898   gimple_set_block (p, block);
899   cfun->debug_marker_count++;
900 
901   return p;
902 }
903 
904 
905 /* Build a GIMPLE_OMP_CRITICAL statement.
906 
907    BODY is the sequence of statements for which only one thread can execute.
908    NAME is optional identifier for this critical block.
909    CLAUSES are clauses for this critical block.  */
910 
911 gomp_critical *
912 gimple_build_omp_critical (gimple_seq body, tree name, tree clauses)
913 {
914   gomp_critical *p
915     = as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
916   gimple_omp_critical_set_name (p, name);
917   gimple_omp_critical_set_clauses (p, clauses);
918   if (body)
919     gimple_omp_set_body (p, body);
920 
921   return p;
922 }
923 
924 /* Build a GIMPLE_OMP_FOR statement.
925 
926    BODY is sequence of statements inside the for loop.
927    KIND is the `for' variant.
928    CLAUSES are any of the construct's clauses.
929    COLLAPSE is the collapse count.
930    PRE_BODY is the sequence of statements that are loop invariant.  */
931 
932 gomp_for *
933 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
934 		      gimple_seq pre_body)
935 {
936   gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
937   if (body)
938     gimple_omp_set_body (p, body);
939   gimple_omp_for_set_clauses (p, clauses);
940   gimple_omp_for_set_kind (p, kind);
941   p->collapse = collapse;
942   p->iter =  ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse);
943 
944   if (pre_body)
945     gimple_omp_for_set_pre_body (p, pre_body);
946 
947   return p;
948 }
949 
950 
951 /* Build a GIMPLE_OMP_PARALLEL statement.
952 
953    BODY is sequence of statements which are executed in parallel.
954    CLAUSES are the OMP parallel construct's clauses.
955    CHILD_FN is the function created for the parallel threads to execute.
956    DATA_ARG are the shared data argument(s).  */
957 
958 gomp_parallel *
959 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
960 			   tree data_arg)
961 {
962   gomp_parallel *p
963     = as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
964   if (body)
965     gimple_omp_set_body (p, body);
966   gimple_omp_parallel_set_clauses (p, clauses);
967   gimple_omp_parallel_set_child_fn (p, child_fn);
968   gimple_omp_parallel_set_data_arg (p, data_arg);
969 
970   return p;
971 }
972 
973 
974 /* Build a GIMPLE_OMP_TASK statement.
975 
976    BODY is sequence of statements which are executed by the explicit task.
977    CLAUSES are the OMP task construct's clauses.
978    CHILD_FN is the function created for the parallel threads to execute.
979    DATA_ARG are the shared data argument(s).
980    COPY_FN is the optional function for firstprivate initialization.
981    ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
982 
983 gomp_task *
984 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
985 		       tree data_arg, tree copy_fn, tree arg_size,
986 		       tree arg_align)
987 {
988   gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0));
989   if (body)
990     gimple_omp_set_body (p, body);
991   gimple_omp_task_set_clauses (p, clauses);
992   gimple_omp_task_set_child_fn (p, child_fn);
993   gimple_omp_task_set_data_arg (p, data_arg);
994   gimple_omp_task_set_copy_fn (p, copy_fn);
995   gimple_omp_task_set_arg_size (p, arg_size);
996   gimple_omp_task_set_arg_align (p, arg_align);
997 
998   return p;
999 }
1000 
1001 
1002 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1003 
1004    BODY is the sequence of statements in the section.  */
1005 
1006 gimple *
1007 gimple_build_omp_section (gimple_seq body)
1008 {
1009   gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1010   if (body)
1011     gimple_omp_set_body (p, body);
1012 
1013   return p;
1014 }
1015 
1016 
1017 /* Build a GIMPLE_OMP_MASTER statement.
1018 
1019    BODY is the sequence of statements to be executed by just the master.  */
1020 
1021 gimple *
1022 gimple_build_omp_master (gimple_seq body)
1023 {
1024   gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1025   if (body)
1026     gimple_omp_set_body (p, body);
1027 
1028   return p;
1029 }
1030 
1031 /* Build a GIMPLE_OMP_GRID_BODY statement.
1032 
1033    BODY is the sequence of statements to be executed by the kernel.  */
1034 
1035 gimple *
1036 gimple_build_omp_grid_body (gimple_seq body)
1037 {
1038   gimple *p = gimple_alloc (GIMPLE_OMP_GRID_BODY, 0);
1039   if (body)
1040     gimple_omp_set_body (p, body);
1041 
1042   return p;
1043 }
1044 
1045 /* Build a GIMPLE_OMP_TASKGROUP statement.
1046 
1047    BODY is the sequence of statements to be executed by the taskgroup
1048    construct.
1049    CLAUSES are any of the construct's clauses.  */
1050 
1051 gimple *
1052 gimple_build_omp_taskgroup (gimple_seq body, tree clauses)
1053 {
1054   gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
1055   gimple_omp_taskgroup_set_clauses (p, clauses);
1056   if (body)
1057     gimple_omp_set_body (p, body);
1058 
1059   return p;
1060 }
1061 
1062 
1063 /* Build a GIMPLE_OMP_CONTINUE statement.
1064 
1065    CONTROL_DEF is the definition of the control variable.
1066    CONTROL_USE is the use of the control variable.  */
1067 
1068 gomp_continue *
1069 gimple_build_omp_continue (tree control_def, tree control_use)
1070 {
1071   gomp_continue *p
1072     = as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
1073   gimple_omp_continue_set_control_def (p, control_def);
1074   gimple_omp_continue_set_control_use (p, control_use);
1075   return p;
1076 }
1077 
1078 /* Build a GIMPLE_OMP_ORDERED statement.
1079 
1080    BODY is the sequence of statements inside a loop that will executed in
1081    sequence.
1082    CLAUSES are clauses for this statement.  */
1083 
1084 gomp_ordered *
1085 gimple_build_omp_ordered (gimple_seq body, tree clauses)
1086 {
1087   gomp_ordered *p
1088     = as_a <gomp_ordered *> (gimple_alloc (GIMPLE_OMP_ORDERED, 0));
1089   gimple_omp_ordered_set_clauses (p, clauses);
1090   if (body)
1091     gimple_omp_set_body (p, body);
1092 
1093   return p;
1094 }
1095 
1096 
1097 /* Build a GIMPLE_OMP_RETURN statement.
1098    WAIT_P is true if this is a non-waiting return.  */
1099 
1100 gimple *
1101 gimple_build_omp_return (bool wait_p)
1102 {
1103   gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1104   if (wait_p)
1105     gimple_omp_return_set_nowait (p);
1106 
1107   return p;
1108 }
1109 
1110 
1111 /* Build a GIMPLE_OMP_SECTIONS statement.
1112 
1113    BODY is a sequence of section statements.
1114    CLAUSES are any of the OMP sections contsruct's clauses: private,
1115    firstprivate, lastprivate, reduction, and nowait.  */
1116 
1117 gomp_sections *
1118 gimple_build_omp_sections (gimple_seq body, tree clauses)
1119 {
1120   gomp_sections *p
1121     = as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
1122   if (body)
1123     gimple_omp_set_body (p, body);
1124   gimple_omp_sections_set_clauses (p, clauses);
1125 
1126   return p;
1127 }
1128 
1129 
1130 /* Build a GIMPLE_OMP_SECTIONS_SWITCH.  */
1131 
1132 gimple *
1133 gimple_build_omp_sections_switch (void)
1134 {
1135   return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1136 }
1137 
1138 
1139 /* Build a GIMPLE_OMP_SINGLE statement.
1140 
1141    BODY is the sequence of statements that will be executed once.
1142    CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1143    copyprivate, nowait.  */
1144 
1145 gomp_single *
1146 gimple_build_omp_single (gimple_seq body, tree clauses)
1147 {
1148   gomp_single *p
1149     = as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
1150   if (body)
1151     gimple_omp_set_body (p, body);
1152   gimple_omp_single_set_clauses (p, clauses);
1153 
1154   return p;
1155 }
1156 
1157 
1158 /* Build a GIMPLE_OMP_TARGET statement.
1159 
1160    BODY is the sequence of statements that will be executed.
1161    KIND is the kind of the region.
1162    CLAUSES are any of the construct's clauses.  */
1163 
1164 gomp_target *
1165 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1166 {
1167   gomp_target *p
1168     = as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
1169   if (body)
1170     gimple_omp_set_body (p, body);
1171   gimple_omp_target_set_clauses (p, clauses);
1172   gimple_omp_target_set_kind (p, kind);
1173 
1174   return p;
1175 }
1176 
1177 
1178 /* Build a GIMPLE_OMP_TEAMS statement.
1179 
1180    BODY is the sequence of statements that will be executed.
1181    CLAUSES are any of the OMP teams construct's clauses.  */
1182 
1183 gomp_teams *
1184 gimple_build_omp_teams (gimple_seq body, tree clauses)
1185 {
1186   gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
1187   if (body)
1188     gimple_omp_set_body (p, body);
1189   gimple_omp_teams_set_clauses (p, clauses);
1190 
1191   return p;
1192 }
1193 
1194 
1195 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
1196 
1197 gomp_atomic_load *
1198 gimple_build_omp_atomic_load (tree lhs, tree rhs, enum omp_memory_order mo)
1199 {
1200   gomp_atomic_load *p
1201     = as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
1202   gimple_omp_atomic_load_set_lhs (p, lhs);
1203   gimple_omp_atomic_load_set_rhs (p, rhs);
1204   gimple_omp_atomic_set_memory_order (p, mo);
1205   return p;
1206 }
1207 
1208 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1209 
1210    VAL is the value we are storing.  */
1211 
1212 gomp_atomic_store *
1213 gimple_build_omp_atomic_store (tree val, enum omp_memory_order mo)
1214 {
1215   gomp_atomic_store *p
1216     = as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
1217   gimple_omp_atomic_store_set_val (p, val);
1218   gimple_omp_atomic_set_memory_order (p, mo);
1219   return p;
1220 }
1221 
1222 /* Build a GIMPLE_TRANSACTION statement.  */
1223 
1224 gtransaction *
1225 gimple_build_transaction (gimple_seq body)
1226 {
1227   gtransaction *p
1228     = as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
1229   gimple_transaction_set_body (p, body);
1230   gimple_transaction_set_label_norm (p, 0);
1231   gimple_transaction_set_label_uninst (p, 0);
1232   gimple_transaction_set_label_over (p, 0);
1233   return p;
1234 }
1235 
1236 #if defined ENABLE_GIMPLE_CHECKING
1237 /* Complain of a gimple type mismatch and die.  */
1238 
1239 void
1240 gimple_check_failed (const gimple *gs, const char *file, int line,
1241 		     const char *function, enum gimple_code code,
1242 		     enum tree_code subcode)
1243 {
1244   internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1245       		  gimple_code_name[code],
1246 		  get_tree_code_name (subcode),
1247 		  gimple_code_name[gimple_code (gs)],
1248 		  gs->subcode > 0
1249 		    ? get_tree_code_name ((enum tree_code) gs->subcode)
1250 		    : "",
1251 		  function, trim_filename (file), line);
1252 }
1253 #endif /* ENABLE_GIMPLE_CHECKING */
1254 
1255 
1256 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1257    *SEQ_P is NULL, a new sequence is allocated.  */
1258 
1259 void
1260 gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
1261 {
1262   gimple_stmt_iterator si;
1263   if (gs == NULL)
1264     return;
1265 
1266   si = gsi_last (*seq_p);
1267   gsi_insert_after (&si, gs, GSI_NEW_STMT);
1268 }
1269 
1270 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1271    *SEQ_P is NULL, a new sequence is allocated.  This function is
1272    similar to gimple_seq_add_stmt, but does not scan the operands.
1273    During gimplification, we need to manipulate statement sequences
1274    before the def/use vectors have been constructed.  */
1275 
1276 void
1277 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs)
1278 {
1279   gimple_stmt_iterator si;
1280 
1281   if (gs == NULL)
1282     return;
1283 
1284   si = gsi_last (*seq_p);
1285   gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1286 }
1287 
1288 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1289    NULL, a new sequence is allocated.  */
1290 
1291 void
1292 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1293 {
1294   gimple_stmt_iterator si;
1295   if (src == NULL)
1296     return;
1297 
1298   si = gsi_last (*dst_p);
1299   gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1300 }
1301 
1302 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1303    NULL, a new sequence is allocated.  This function is
1304    similar to gimple_seq_add_seq, but does not scan the operands.  */
1305 
1306 void
1307 gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
1308 {
1309   gimple_stmt_iterator si;
1310   if (src == NULL)
1311     return;
1312 
1313   si = gsi_last (*dst_p);
1314   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
1315 }
1316 
1317 /* Determine whether to assign a location to the statement GS.  */
1318 
1319 static bool
1320 should_carry_location_p (gimple *gs)
1321 {
1322   /* Don't emit a line note for a label.  We particularly don't want to
1323      emit one for the break label, since it doesn't actually correspond
1324      to the beginning of the loop/switch.  */
1325   if (gimple_code (gs) == GIMPLE_LABEL)
1326     return false;
1327 
1328   return true;
1329 }
1330 
1331 /* Set the location for gimple statement GS to LOCATION.  */
1332 
1333 static void
1334 annotate_one_with_location (gimple *gs, location_t location)
1335 {
1336   if (!gimple_has_location (gs)
1337       && !gimple_do_not_emit_location_p (gs)
1338       && should_carry_location_p (gs))
1339     gimple_set_location (gs, location);
1340 }
1341 
1342 /* Set LOCATION for all the statements after iterator GSI in sequence
1343    SEQ.  If GSI is pointing to the end of the sequence, start with the
1344    first statement in SEQ.  */
1345 
1346 void
1347 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1348 				  location_t location)
1349 {
1350   if (gsi_end_p (gsi))
1351     gsi = gsi_start (seq);
1352   else
1353     gsi_next (&gsi);
1354 
1355   for (; !gsi_end_p (gsi); gsi_next (&gsi))
1356     annotate_one_with_location (gsi_stmt (gsi), location);
1357 }
1358 
1359 /* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
1360 
1361 void
1362 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1363 {
1364   gimple_stmt_iterator i;
1365 
1366   if (gimple_seq_empty_p (stmt_p))
1367     return;
1368 
1369   for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1370     {
1371       gimple *gs = gsi_stmt (i);
1372       annotate_one_with_location (gs, location);
1373     }
1374 }
1375 
1376 /* Helper function of empty_body_p.  Return true if STMT is an empty
1377    statement.  */
1378 
1379 static bool
1380 empty_stmt_p (gimple *stmt)
1381 {
1382   if (gimple_code (stmt) == GIMPLE_NOP)
1383     return true;
1384   if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
1385     return empty_body_p (gimple_bind_body (bind_stmt));
1386   return false;
1387 }
1388 
1389 
1390 /* Return true if BODY contains nothing but empty statements.  */
1391 
1392 bool
1393 empty_body_p (gimple_seq body)
1394 {
1395   gimple_stmt_iterator i;
1396 
1397   if (gimple_seq_empty_p (body))
1398     return true;
1399   for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1400     if (!empty_stmt_p (gsi_stmt (i))
1401 	&& !is_gimple_debug (gsi_stmt (i)))
1402       return false;
1403 
1404   return true;
1405 }
1406 
1407 
1408 /* Perform a deep copy of sequence SRC and return the result.  */
1409 
1410 gimple_seq
1411 gimple_seq_copy (gimple_seq src)
1412 {
1413   gimple_stmt_iterator gsi;
1414   gimple_seq new_seq = NULL;
1415   gimple *stmt;
1416 
1417   for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1418     {
1419       stmt = gimple_copy (gsi_stmt (gsi));
1420       gimple_seq_add_stmt (&new_seq, stmt);
1421     }
1422 
1423   return new_seq;
1424 }
1425 
1426 
1427 
1428 /* Return true if calls C1 and C2 are known to go to the same function.  */
1429 
1430 bool
1431 gimple_call_same_target_p (const gimple *c1, const gimple *c2)
1432 {
1433   if (gimple_call_internal_p (c1))
1434     return (gimple_call_internal_p (c2)
1435 	    && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
1436 	    && (!gimple_call_internal_unique_p (as_a <const gcall *> (c1))
1437 		|| c1 == c2));
1438   else
1439     return (gimple_call_fn (c1) == gimple_call_fn (c2)
1440 	    || (gimple_call_fndecl (c1)
1441 		&& gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1442 }
1443 
1444 /* Detect flags from a GIMPLE_CALL.  This is just like
1445    call_expr_flags, but for gimple tuples.  */
1446 
1447 int
1448 gimple_call_flags (const gimple *stmt)
1449 {
1450   int flags = 0;
1451 
1452   if (gimple_call_internal_p (stmt))
1453     flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1454   else
1455     {
1456       tree decl = gimple_call_fndecl (stmt);
1457       if (decl)
1458 	flags = flags_from_decl_or_type (decl);
1459       flags |= flags_from_decl_or_type (gimple_call_fntype (stmt));
1460     }
1461 
1462   if (stmt->subcode & GF_CALL_NOTHROW)
1463     flags |= ECF_NOTHROW;
1464 
1465   if (stmt->subcode & GF_CALL_BY_DESCRIPTOR)
1466     flags |= ECF_BY_DESCRIPTOR;
1467 
1468   return flags;
1469 }
1470 
1471 /* Return the "fn spec" string for call STMT.  */
1472 
1473 static const_tree
1474 gimple_call_fnspec (const gcall *stmt)
1475 {
1476   tree type, attr;
1477 
1478   if (gimple_call_internal_p (stmt))
1479     return internal_fn_fnspec (gimple_call_internal_fn (stmt));
1480 
1481   type = gimple_call_fntype (stmt);
1482   if (!type)
1483     return NULL_TREE;
1484 
1485   attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1486   if (!attr)
1487     return NULL_TREE;
1488 
1489   return TREE_VALUE (TREE_VALUE (attr));
1490 }
1491 
1492 /* Detects argument flags for argument number ARG on call STMT.  */
1493 
1494 int
1495 gimple_call_arg_flags (const gcall *stmt, unsigned arg)
1496 {
1497   const_tree attr = gimple_call_fnspec (stmt);
1498 
1499   if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1500     return 0;
1501 
1502   switch (TREE_STRING_POINTER (attr)[1 + arg])
1503     {
1504     case 'x':
1505     case 'X':
1506       return EAF_UNUSED;
1507 
1508     case 'R':
1509       return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1510 
1511     case 'r':
1512       return EAF_NOCLOBBER | EAF_NOESCAPE;
1513 
1514     case 'W':
1515       return EAF_DIRECT | EAF_NOESCAPE;
1516 
1517     case 'w':
1518       return EAF_NOESCAPE;
1519 
1520     case '.':
1521     default:
1522       return 0;
1523     }
1524 }
1525 
1526 /* Detects return flags for the call STMT.  */
1527 
1528 int
1529 gimple_call_return_flags (const gcall *stmt)
1530 {
1531   const_tree attr;
1532 
1533   if (gimple_call_flags (stmt) & ECF_MALLOC)
1534     return ERF_NOALIAS;
1535 
1536   attr = gimple_call_fnspec (stmt);
1537   if (!attr || TREE_STRING_LENGTH (attr) < 1)
1538     return 0;
1539 
1540   switch (TREE_STRING_POINTER (attr)[0])
1541     {
1542     case '1':
1543     case '2':
1544     case '3':
1545     case '4':
1546       return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1547 
1548     case 'm':
1549       return ERF_NOALIAS;
1550 
1551     case '.':
1552     default:
1553       return 0;
1554     }
1555 }
1556 
1557 
1558 /* Return true if call STMT is known to return a non-zero result.  */
1559 
1560 bool
1561 gimple_call_nonnull_result_p (gcall *call)
1562 {
1563   tree fndecl = gimple_call_fndecl (call);
1564   if (!fndecl)
1565     return false;
1566   if (flag_delete_null_pointer_checks && !flag_check_new
1567       && DECL_IS_OPERATOR_NEW (fndecl)
1568       && !TREE_NOTHROW (fndecl))
1569     return true;
1570 
1571   /* References are always non-NULL.  */
1572   if (flag_delete_null_pointer_checks
1573       && TREE_CODE (TREE_TYPE (fndecl)) == REFERENCE_TYPE)
1574     return true;
1575 
1576   if (flag_delete_null_pointer_checks
1577       && lookup_attribute ("returns_nonnull",
1578 			   TYPE_ATTRIBUTES (gimple_call_fntype (call))))
1579     return true;
1580   return gimple_alloca_call_p (call);
1581 }
1582 
1583 
1584 /* If CALL returns a non-null result in an argument, return that arg.  */
1585 
1586 tree
1587 gimple_call_nonnull_arg (gcall *call)
1588 {
1589   tree fndecl = gimple_call_fndecl (call);
1590   if (!fndecl)
1591     return NULL_TREE;
1592 
1593   unsigned rf = gimple_call_return_flags (call);
1594   if (rf & ERF_RETURNS_ARG)
1595     {
1596       unsigned argnum = rf & ERF_RETURN_ARG_MASK;
1597       if (argnum < gimple_call_num_args (call))
1598 	{
1599 	  tree arg = gimple_call_arg (call, argnum);
1600 	  if (SSA_VAR_P (arg)
1601 	      && infer_nonnull_range_by_attribute (call, arg))
1602 	    return arg;
1603 	}
1604     }
1605   return NULL_TREE;
1606 }
1607 
1608 
1609 /* Return true if GS is a copy assignment.  */
1610 
1611 bool
1612 gimple_assign_copy_p (gimple *gs)
1613 {
1614   return (gimple_assign_single_p (gs)
1615 	  && is_gimple_val (gimple_op (gs, 1)));
1616 }
1617 
1618 
1619 /* Return true if GS is a SSA_NAME copy assignment.  */
1620 
1621 bool
1622 gimple_assign_ssa_name_copy_p (gimple *gs)
1623 {
1624   return (gimple_assign_single_p (gs)
1625 	  && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1626 	  && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1627 }
1628 
1629 
1630 /* Return true if GS is an assignment with a unary RHS, but the
1631    operator has no effect on the assigned value.  The logic is adapted
1632    from STRIP_NOPS.  This predicate is intended to be used in tuplifying
1633    instances in which STRIP_NOPS was previously applied to the RHS of
1634    an assignment.
1635 
1636    NOTE: In the use cases that led to the creation of this function
1637    and of gimple_assign_single_p, it is typical to test for either
1638    condition and to proceed in the same manner.  In each case, the
1639    assigned value is represented by the single RHS operand of the
1640    assignment.  I suspect there may be cases where gimple_assign_copy_p,
1641    gimple_assign_single_p, or equivalent logic is used where a similar
1642    treatment of unary NOPs is appropriate.  */
1643 
1644 bool
1645 gimple_assign_unary_nop_p (gimple *gs)
1646 {
1647   return (is_gimple_assign (gs)
1648           && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1649               || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1650           && gimple_assign_rhs1 (gs) != error_mark_node
1651           && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1652               == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1653 }
1654 
1655 /* Set BB to be the basic block holding G.  */
1656 
1657 void
1658 gimple_set_bb (gimple *stmt, basic_block bb)
1659 {
1660   stmt->bb = bb;
1661 
1662   if (gimple_code (stmt) != GIMPLE_LABEL)
1663     return;
1664 
1665   /* If the statement is a label, add the label to block-to-labels map
1666      so that we can speed up edge creation for GIMPLE_GOTOs.  */
1667   if (cfun->cfg)
1668     {
1669       tree t;
1670       int uid;
1671 
1672       t = gimple_label_label (as_a <glabel *> (stmt));
1673       uid = LABEL_DECL_UID (t);
1674       if (uid == -1)
1675 	{
1676 	  unsigned old_len =
1677 	    vec_safe_length (label_to_block_map_for_fn (cfun));
1678 	  LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1679 	  if (old_len <= (unsigned) uid)
1680 	    {
1681 	      unsigned new_len = 3 * uid / 2 + 1;
1682 
1683 	      vec_safe_grow_cleared (label_to_block_map_for_fn (cfun),
1684 				     new_len);
1685 	    }
1686 	}
1687 
1688       (*label_to_block_map_for_fn (cfun))[uid] = bb;
1689     }
1690 }
1691 
1692 
1693 /* Modify the RHS of the assignment pointed-to by GSI using the
1694    operands in the expression tree EXPR.
1695 
1696    NOTE: The statement pointed-to by GSI may be reallocated if it
1697    did not have enough operand slots.
1698 
1699    This function is useful to convert an existing tree expression into
1700    the flat representation used for the RHS of a GIMPLE assignment.
1701    It will reallocate memory as needed to expand or shrink the number
1702    of operand slots needed to represent EXPR.
1703 
1704    NOTE: If you find yourself building a tree and then calling this
1705    function, you are most certainly doing it the slow way.  It is much
1706    better to build a new assignment or to use the function
1707    gimple_assign_set_rhs_with_ops, which does not require an
1708    expression tree to be built.  */
1709 
1710 void
1711 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1712 {
1713   enum tree_code subcode;
1714   tree op1, op2, op3;
1715 
1716   extract_ops_from_tree (expr, &subcode, &op1, &op2, &op3);
1717   gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2, op3);
1718 }
1719 
1720 
1721 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1722    operands OP1, OP2 and OP3.
1723 
1724    NOTE: The statement pointed-to by GSI may be reallocated if it
1725    did not have enough operand slots.  */
1726 
1727 void
1728 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1729 				tree op1, tree op2, tree op3)
1730 {
1731   unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1732   gimple *stmt = gsi_stmt (*gsi);
1733   gimple *old_stmt = stmt;
1734 
1735   /* If the new CODE needs more operands, allocate a new statement.  */
1736   if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1737     {
1738       tree lhs = gimple_assign_lhs (old_stmt);
1739       stmt = gimple_alloc (gimple_code (old_stmt), new_rhs_ops + 1);
1740       memcpy (stmt, old_stmt, gimple_size (gimple_code (old_stmt)));
1741       gimple_init_singleton (stmt);
1742 
1743       /* The LHS needs to be reset as this also changes the SSA name
1744 	 on the LHS.  */
1745       gimple_assign_set_lhs (stmt, lhs);
1746     }
1747 
1748   gimple_set_num_ops (stmt, new_rhs_ops + 1);
1749   gimple_set_subcode (stmt, code);
1750   gimple_assign_set_rhs1 (stmt, op1);
1751   if (new_rhs_ops > 1)
1752     gimple_assign_set_rhs2 (stmt, op2);
1753   if (new_rhs_ops > 2)
1754     gimple_assign_set_rhs3 (stmt, op3);
1755   if (stmt != old_stmt)
1756     gsi_replace (gsi, stmt, false);
1757 }
1758 
1759 
1760 /* Return the LHS of a statement that performs an assignment,
1761    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  Returns NULL_TREE
1762    for a call to a function that returns no value, or for a
1763    statement other than an assignment or a call.  */
1764 
1765 tree
1766 gimple_get_lhs (const gimple *stmt)
1767 {
1768   enum gimple_code code = gimple_code (stmt);
1769 
1770   if (code == GIMPLE_ASSIGN)
1771     return gimple_assign_lhs (stmt);
1772   else if (code == GIMPLE_CALL)
1773     return gimple_call_lhs (stmt);
1774   else
1775     return NULL_TREE;
1776 }
1777 
1778 
1779 /* Set the LHS of a statement that performs an assignment,
1780    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
1781 
1782 void
1783 gimple_set_lhs (gimple *stmt, tree lhs)
1784 {
1785   enum gimple_code code = gimple_code (stmt);
1786 
1787   if (code == GIMPLE_ASSIGN)
1788     gimple_assign_set_lhs (stmt, lhs);
1789   else if (code == GIMPLE_CALL)
1790     gimple_call_set_lhs (stmt, lhs);
1791   else
1792     gcc_unreachable ();
1793 }
1794 
1795 
1796 /* Return a deep copy of statement STMT.  All the operands from STMT
1797    are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
1798    and VUSE operand arrays are set to empty in the new copy.  The new
1799    copy isn't part of any sequence.  */
1800 
1801 gimple *
1802 gimple_copy (gimple *stmt)
1803 {
1804   enum gimple_code code = gimple_code (stmt);
1805   unsigned num_ops = gimple_num_ops (stmt);
1806   gimple *copy = gimple_alloc (code, num_ops);
1807   unsigned i;
1808 
1809   /* Shallow copy all the fields from STMT.  */
1810   memcpy (copy, stmt, gimple_size (code));
1811   gimple_init_singleton (copy);
1812 
1813   /* If STMT has sub-statements, deep-copy them as well.  */
1814   if (gimple_has_substatements (stmt))
1815     {
1816       gimple_seq new_seq;
1817       tree t;
1818 
1819       switch (gimple_code (stmt))
1820 	{
1821 	case GIMPLE_BIND:
1822 	  {
1823 	    gbind *bind_stmt = as_a <gbind *> (stmt);
1824 	    gbind *bind_copy = as_a <gbind *> (copy);
1825 	    new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
1826 	    gimple_bind_set_body (bind_copy, new_seq);
1827 	    gimple_bind_set_vars (bind_copy,
1828 				  unshare_expr (gimple_bind_vars (bind_stmt)));
1829 	    gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
1830 	  }
1831 	  break;
1832 
1833 	case GIMPLE_CATCH:
1834 	  {
1835 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1836 	    gcatch *catch_copy = as_a <gcatch *> (copy);
1837 	    new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
1838 	    gimple_catch_set_handler (catch_copy, new_seq);
1839 	    t = unshare_expr (gimple_catch_types (catch_stmt));
1840 	    gimple_catch_set_types (catch_copy, t);
1841 	  }
1842 	  break;
1843 
1844 	case GIMPLE_EH_FILTER:
1845 	  {
1846 	    geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt);
1847 	    geh_filter *eh_filter_copy = as_a <geh_filter *> (copy);
1848 	    new_seq
1849 	      = gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
1850 	    gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
1851 	    t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
1852 	    gimple_eh_filter_set_types (eh_filter_copy, t);
1853 	  }
1854 	  break;
1855 
1856 	case GIMPLE_EH_ELSE:
1857 	  {
1858 	    geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
1859 	    geh_else *eh_else_copy = as_a <geh_else *> (copy);
1860 	    new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
1861 	    gimple_eh_else_set_n_body (eh_else_copy, new_seq);
1862 	    new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
1863 	    gimple_eh_else_set_e_body (eh_else_copy, new_seq);
1864 	  }
1865 	  break;
1866 
1867 	case GIMPLE_TRY:
1868 	  {
1869 	    gtry *try_stmt = as_a <gtry *> (stmt);
1870 	    gtry *try_copy = as_a <gtry *> (copy);
1871 	    new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
1872 	    gimple_try_set_eval (try_copy, new_seq);
1873 	    new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
1874 	    gimple_try_set_cleanup (try_copy, new_seq);
1875 	  }
1876 	  break;
1877 
1878 	case GIMPLE_OMP_FOR:
1879 	  new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1880 	  gimple_omp_for_set_pre_body (copy, new_seq);
1881 	  t = unshare_expr (gimple_omp_for_clauses (stmt));
1882 	  gimple_omp_for_set_clauses (copy, t);
1883 	  {
1884 	    gomp_for *omp_for_copy = as_a <gomp_for *> (copy);
1885 	    omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
1886 	      ( gimple_omp_for_collapse (stmt));
1887           }
1888 	  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1889 	    {
1890 	      gimple_omp_for_set_cond (copy, i,
1891 				       gimple_omp_for_cond (stmt, i));
1892 	      gimple_omp_for_set_index (copy, i,
1893 					gimple_omp_for_index (stmt, i));
1894 	      t = unshare_expr (gimple_omp_for_initial (stmt, i));
1895 	      gimple_omp_for_set_initial (copy, i, t);
1896 	      t = unshare_expr (gimple_omp_for_final (stmt, i));
1897 	      gimple_omp_for_set_final (copy, i, t);
1898 	      t = unshare_expr (gimple_omp_for_incr (stmt, i));
1899 	      gimple_omp_for_set_incr (copy, i, t);
1900 	    }
1901 	  goto copy_omp_body;
1902 
1903 	case GIMPLE_OMP_PARALLEL:
1904 	  {
1905 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1906 	    gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy);
1907 	    t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
1908 	    gimple_omp_parallel_set_clauses (omp_par_copy, t);
1909 	    t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
1910 	    gimple_omp_parallel_set_child_fn (omp_par_copy, t);
1911 	    t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
1912 	    gimple_omp_parallel_set_data_arg (omp_par_copy, t);
1913 	  }
1914 	  goto copy_omp_body;
1915 
1916 	case GIMPLE_OMP_TASK:
1917 	  t = unshare_expr (gimple_omp_task_clauses (stmt));
1918 	  gimple_omp_task_set_clauses (copy, t);
1919 	  t = unshare_expr (gimple_omp_task_child_fn (stmt));
1920 	  gimple_omp_task_set_child_fn (copy, t);
1921 	  t = unshare_expr (gimple_omp_task_data_arg (stmt));
1922 	  gimple_omp_task_set_data_arg (copy, t);
1923 	  t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1924 	  gimple_omp_task_set_copy_fn (copy, t);
1925 	  t = unshare_expr (gimple_omp_task_arg_size (stmt));
1926 	  gimple_omp_task_set_arg_size (copy, t);
1927 	  t = unshare_expr (gimple_omp_task_arg_align (stmt));
1928 	  gimple_omp_task_set_arg_align (copy, t);
1929 	  goto copy_omp_body;
1930 
1931 	case GIMPLE_OMP_CRITICAL:
1932 	  t = unshare_expr (gimple_omp_critical_name
1933 				(as_a <gomp_critical *> (stmt)));
1934 	  gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
1935 	  t = unshare_expr (gimple_omp_critical_clauses
1936 				(as_a <gomp_critical *> (stmt)));
1937 	  gimple_omp_critical_set_clauses (as_a <gomp_critical *> (copy), t);
1938 	  goto copy_omp_body;
1939 
1940 	case GIMPLE_OMP_ORDERED:
1941 	  t = unshare_expr (gimple_omp_ordered_clauses
1942 				(as_a <gomp_ordered *> (stmt)));
1943 	  gimple_omp_ordered_set_clauses (as_a <gomp_ordered *> (copy), t);
1944 	  goto copy_omp_body;
1945 
1946 	case GIMPLE_OMP_TASKGROUP:
1947 	  t = unshare_expr (gimple_omp_taskgroup_clauses (stmt));
1948 	  gimple_omp_taskgroup_set_clauses (copy, t);
1949 	  goto copy_omp_body;
1950 
1951 	case GIMPLE_OMP_SECTIONS:
1952 	  t = unshare_expr (gimple_omp_sections_clauses (stmt));
1953 	  gimple_omp_sections_set_clauses (copy, t);
1954 	  t = unshare_expr (gimple_omp_sections_control (stmt));
1955 	  gimple_omp_sections_set_control (copy, t);
1956 	  goto copy_omp_body;
1957 
1958 	case GIMPLE_OMP_SINGLE:
1959 	  {
1960 	    gomp_single *omp_single_copy = as_a <gomp_single *> (copy);
1961 	    t = unshare_expr (gimple_omp_single_clauses (stmt));
1962 	    gimple_omp_single_set_clauses (omp_single_copy, t);
1963 	  }
1964 	  goto copy_omp_body;
1965 
1966 	case GIMPLE_OMP_TARGET:
1967 	  {
1968 	    gomp_target *omp_target_stmt = as_a <gomp_target *> (stmt);
1969 	    gomp_target *omp_target_copy = as_a <gomp_target *> (copy);
1970 	    t = unshare_expr (gimple_omp_target_clauses (omp_target_stmt));
1971 	    gimple_omp_target_set_clauses (omp_target_copy, t);
1972 	    t = unshare_expr (gimple_omp_target_data_arg (omp_target_stmt));
1973 	    gimple_omp_target_set_data_arg (omp_target_copy, t);
1974 	  }
1975 	  goto copy_omp_body;
1976 
1977 	case GIMPLE_OMP_TEAMS:
1978 	  {
1979 	    gomp_teams *omp_teams_copy = as_a <gomp_teams *> (copy);
1980 	    t = unshare_expr (gimple_omp_teams_clauses (stmt));
1981 	    gimple_omp_teams_set_clauses (omp_teams_copy, t);
1982 	  }
1983 	  /* FALLTHRU  */
1984 
1985 	case GIMPLE_OMP_SECTION:
1986 	case GIMPLE_OMP_MASTER:
1987 	case GIMPLE_OMP_GRID_BODY:
1988 	copy_omp_body:
1989 	  new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1990 	  gimple_omp_set_body (copy, new_seq);
1991 	  break;
1992 
1993 	case GIMPLE_TRANSACTION:
1994 	  new_seq = gimple_seq_copy (gimple_transaction_body (
1995 				       as_a <gtransaction *> (stmt)));
1996 	  gimple_transaction_set_body (as_a <gtransaction *> (copy),
1997 				       new_seq);
1998 	  break;
1999 
2000 	case GIMPLE_WITH_CLEANUP_EXPR:
2001 	  new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2002 	  gimple_wce_set_cleanup (copy, new_seq);
2003 	  break;
2004 
2005 	default:
2006 	  gcc_unreachable ();
2007 	}
2008     }
2009 
2010   /* Make copy of operands.  */
2011   for (i = 0; i < num_ops; i++)
2012     gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2013 
2014   if (gimple_has_mem_ops (stmt))
2015     {
2016       gimple_set_vdef (copy, gimple_vdef (stmt));
2017       gimple_set_vuse (copy, gimple_vuse (stmt));
2018     }
2019 
2020   /* Clear out SSA operand vectors on COPY.  */
2021   if (gimple_has_ops (stmt))
2022     {
2023       gimple_set_use_ops (copy, NULL);
2024 
2025       /* SSA operands need to be updated.  */
2026       gimple_set_modified (copy, true);
2027     }
2028 
2029   if (gimple_debug_nonbind_marker_p (stmt))
2030     cfun->debug_marker_count++;
2031 
2032   return copy;
2033 }
2034 
2035 
2036 /* Return true if statement S has side-effects.  We consider a
2037    statement to have side effects if:
2038 
2039    - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2040    - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS.  */
2041 
2042 bool
2043 gimple_has_side_effects (const gimple *s)
2044 {
2045   if (is_gimple_debug (s))
2046     return false;
2047 
2048   /* We don't have to scan the arguments to check for
2049      volatile arguments, though, at present, we still
2050      do a scan to check for TREE_SIDE_EFFECTS.  */
2051   if (gimple_has_volatile_ops (s))
2052     return true;
2053 
2054   if (gimple_code (s) == GIMPLE_ASM
2055       && gimple_asm_volatile_p (as_a <const gasm *> (s)))
2056     return true;
2057 
2058   if (is_gimple_call (s))
2059     {
2060       int flags = gimple_call_flags (s);
2061 
2062       /* An infinite loop is considered a side effect.  */
2063       if (!(flags & (ECF_CONST | ECF_PURE))
2064 	  || (flags & ECF_LOOPING_CONST_OR_PURE))
2065 	return true;
2066 
2067       return false;
2068     }
2069 
2070   return false;
2071 }
2072 
2073 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2074    Return true if S can trap.  When INCLUDE_MEM is true, check whether
2075    the memory operations could trap.  When INCLUDE_STORES is true and
2076    S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked.  */
2077 
2078 bool
2079 gimple_could_trap_p_1 (gimple *s, bool include_mem, bool include_stores)
2080 {
2081   tree t, div = NULL_TREE;
2082   enum tree_code op;
2083 
2084   if (include_mem)
2085     {
2086       unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2087 
2088       for (i = start; i < gimple_num_ops (s); i++)
2089 	if (tree_could_trap_p (gimple_op (s, i)))
2090 	  return true;
2091     }
2092 
2093   switch (gimple_code (s))
2094     {
2095     case GIMPLE_ASM:
2096       return gimple_asm_volatile_p (as_a <gasm *> (s));
2097 
2098     case GIMPLE_CALL:
2099       t = gimple_call_fndecl (s);
2100       /* Assume that calls to weak functions may trap.  */
2101       if (!t || !DECL_P (t) || DECL_WEAK (t))
2102 	return true;
2103       return false;
2104 
2105     case GIMPLE_ASSIGN:
2106       t = gimple_expr_type (s);
2107       op = gimple_assign_rhs_code (s);
2108       if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2109 	div = gimple_assign_rhs2 (s);
2110       return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2111 				      (INTEGRAL_TYPE_P (t)
2112 				       && TYPE_OVERFLOW_TRAPS (t)),
2113 				      div));
2114 
2115     case GIMPLE_COND:
2116       t = TREE_TYPE (gimple_cond_lhs (s));
2117       return operation_could_trap_p (gimple_cond_code (s),
2118 				     FLOAT_TYPE_P (t), false, NULL_TREE);
2119 
2120     default:
2121       break;
2122     }
2123 
2124   return false;
2125 }
2126 
2127 /* Return true if statement S can trap.  */
2128 
2129 bool
2130 gimple_could_trap_p (gimple *s)
2131 {
2132   return gimple_could_trap_p_1 (s, true, true);
2133 }
2134 
2135 /* Return true if RHS of a GIMPLE_ASSIGN S can trap.  */
2136 
2137 bool
2138 gimple_assign_rhs_could_trap_p (gimple *s)
2139 {
2140   gcc_assert (is_gimple_assign (s));
2141   return gimple_could_trap_p_1 (s, true, false);
2142 }
2143 
2144 
2145 /* Print debugging information for gimple stmts generated.  */
2146 
2147 void
2148 dump_gimple_statistics (void)
2149 {
2150   int i;
2151   uint64_t total_tuples = 0, total_bytes = 0;
2152 
2153   if (! GATHER_STATISTICS)
2154     {
2155       fprintf (stderr, "No GIMPLE statistics\n");
2156       return;
2157     }
2158 
2159   fprintf (stderr, "\nGIMPLE statements\n");
2160   fprintf (stderr, "Kind                   Stmts      Bytes\n");
2161   fprintf (stderr, "---------------------------------------\n");
2162   for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2163     {
2164       fprintf (stderr, "%-20s %7" PRIu64 "%c %10" PRIu64 "%c\n",
2165 	       gimple_alloc_kind_names[i],
2166 	       SIZE_AMOUNT (gimple_alloc_counts[i]),
2167 	       SIZE_AMOUNT (gimple_alloc_sizes[i]));
2168       total_tuples += gimple_alloc_counts[i];
2169       total_bytes += gimple_alloc_sizes[i];
2170     }
2171   fprintf (stderr, "---------------------------------------\n");
2172   fprintf (stderr, "%-20s %7" PRIu64 "%c %10" PRIu64 "%c\n", "Total",
2173 	   SIZE_AMOUNT (total_tuples), SIZE_AMOUNT (total_bytes));
2174   fprintf (stderr, "---------------------------------------\n");
2175 }
2176 
2177 
2178 /* Return the number of operands needed on the RHS of a GIMPLE
2179    assignment for an expression with tree code CODE.  */
2180 
2181 unsigned
2182 get_gimple_rhs_num_ops (enum tree_code code)
2183 {
2184   enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2185 
2186   if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2187     return 1;
2188   else if (rhs_class == GIMPLE_BINARY_RHS)
2189     return 2;
2190   else if (rhs_class == GIMPLE_TERNARY_RHS)
2191     return 3;
2192   else
2193     gcc_unreachable ();
2194 }
2195 
2196 #define DEFTREECODE(SYM, STRING, TYPE, NARGS)   			    \
2197   (unsigned char)							    \
2198   ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS				    \
2199    : ((TYPE) == tcc_binary						    \
2200       || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS   		    \
2201    : ((TYPE) == tcc_constant						    \
2202       || (TYPE) == tcc_declaration					    \
2203       || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS			    \
2204    : ((SYM) == TRUTH_AND_EXPR						    \
2205       || (SYM) == TRUTH_OR_EXPR						    \
2206       || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS			    \
2207    : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS				    \
2208    : ((SYM) == COND_EXPR						    \
2209       || (SYM) == WIDEN_MULT_PLUS_EXPR					    \
2210       || (SYM) == WIDEN_MULT_MINUS_EXPR					    \
2211       || (SYM) == DOT_PROD_EXPR						    \
2212       || (SYM) == SAD_EXPR						    \
2213       || (SYM) == REALIGN_LOAD_EXPR					    \
2214       || (SYM) == VEC_COND_EXPR						    \
2215       || (SYM) == VEC_PERM_EXPR                                             \
2216       || (SYM) == BIT_INSERT_EXPR) ? GIMPLE_TERNARY_RHS			    \
2217    : ((SYM) == CONSTRUCTOR						    \
2218       || (SYM) == OBJ_TYPE_REF						    \
2219       || (SYM) == ASSERT_EXPR						    \
2220       || (SYM) == ADDR_EXPR						    \
2221       || (SYM) == WITH_SIZE_EXPR					    \
2222       || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS				    \
2223    : GIMPLE_INVALID_RHS),
2224 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2225 
2226 const unsigned char gimple_rhs_class_table[] = {
2227 #include "all-tree.def"
2228 };
2229 
2230 #undef DEFTREECODE
2231 #undef END_OF_BASE_TREE_CODES
2232 
2233 /* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
2234    a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2235    we failed to create one.  */
2236 
2237 tree
2238 canonicalize_cond_expr_cond (tree t)
2239 {
2240   /* Strip conversions around boolean operations.  */
2241   if (CONVERT_EXPR_P (t)
2242       && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2243           || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2244 	     == BOOLEAN_TYPE))
2245     t = TREE_OPERAND (t, 0);
2246 
2247   /* For !x use x == 0.  */
2248   if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2249     {
2250       tree top0 = TREE_OPERAND (t, 0);
2251       t = build2 (EQ_EXPR, TREE_TYPE (t),
2252 		  top0, build_int_cst (TREE_TYPE (top0), 0));
2253     }
2254   /* For cmp ? 1 : 0 use cmp.  */
2255   else if (TREE_CODE (t) == COND_EXPR
2256 	   && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2257 	   && integer_onep (TREE_OPERAND (t, 1))
2258 	   && integer_zerop (TREE_OPERAND (t, 2)))
2259     {
2260       tree top0 = TREE_OPERAND (t, 0);
2261       t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2262 		  TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2263     }
2264   /* For x ^ y use x != y.  */
2265   else if (TREE_CODE (t) == BIT_XOR_EXPR)
2266     t = build2 (NE_EXPR, TREE_TYPE (t),
2267 		TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2268 
2269   if (is_gimple_condexpr (t))
2270     return t;
2271 
2272   return NULL_TREE;
2273 }
2274 
2275 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2276    the positions marked by the set ARGS_TO_SKIP.  */
2277 
2278 gcall *
2279 gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip)
2280 {
2281   int i;
2282   int nargs = gimple_call_num_args (stmt);
2283   auto_vec<tree> vargs (nargs);
2284   gcall *new_stmt;
2285 
2286   for (i = 0; i < nargs; i++)
2287     if (!bitmap_bit_p (args_to_skip, i))
2288       vargs.quick_push (gimple_call_arg (stmt, i));
2289 
2290   if (gimple_call_internal_p (stmt))
2291     new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2292 					       vargs);
2293   else
2294     new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2295 
2296   if (gimple_call_lhs (stmt))
2297     gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2298 
2299   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2300   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2301 
2302   if (gimple_has_location (stmt))
2303     gimple_set_location (new_stmt, gimple_location (stmt));
2304   gimple_call_copy_flags (new_stmt, stmt);
2305   gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2306 
2307   gimple_set_modified (new_stmt, true);
2308 
2309   return new_stmt;
2310 }
2311 
2312 
2313 
2314 /* Return true if the field decls F1 and F2 are at the same offset.
2315 
2316    This is intended to be used on GIMPLE types only.  */
2317 
2318 bool
2319 gimple_compare_field_offset (tree f1, tree f2)
2320 {
2321   if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2322     {
2323       tree offset1 = DECL_FIELD_OFFSET (f1);
2324       tree offset2 = DECL_FIELD_OFFSET (f2);
2325       return ((offset1 == offset2
2326 	       /* Once gimplification is done, self-referential offsets are
2327 		  instantiated as operand #2 of the COMPONENT_REF built for
2328 		  each access and reset.  Therefore, they are not relevant
2329 		  anymore and fields are interchangeable provided that they
2330 		  represent the same access.  */
2331 	       || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2332 		   && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2333 		   && (DECL_SIZE (f1) == DECL_SIZE (f2)
2334 		       || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2335 			   && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2336 		       || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2337 		   && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2338 	       || operand_equal_p (offset1, offset2, 0))
2339 	      && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2340 				     DECL_FIELD_BIT_OFFSET (f2)));
2341     }
2342 
2343   /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2344      should be, so handle differing ones specially by decomposing
2345      the offset into a byte and bit offset manually.  */
2346   if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2347       && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2348     {
2349       unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2350       unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2351       bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2352       byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2353 		      + bit_offset1 / BITS_PER_UNIT);
2354       bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2355       byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2356 		      + bit_offset2 / BITS_PER_UNIT);
2357       if (byte_offset1 != byte_offset2)
2358 	return false;
2359       return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2360     }
2361 
2362   return false;
2363 }
2364 
2365 
2366 /* Return a type the same as TYPE except unsigned or
2367    signed according to UNSIGNEDP.  */
2368 
2369 static tree
2370 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2371 {
2372   tree type1;
2373   int i;
2374 
2375   type1 = TYPE_MAIN_VARIANT (type);
2376   if (type1 == signed_char_type_node
2377       || type1 == char_type_node
2378       || type1 == unsigned_char_type_node)
2379     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2380   if (type1 == integer_type_node || type1 == unsigned_type_node)
2381     return unsignedp ? unsigned_type_node : integer_type_node;
2382   if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2383     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2384   if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2385     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2386   if (type1 == long_long_integer_type_node
2387       || type1 == long_long_unsigned_type_node)
2388     return unsignedp
2389            ? long_long_unsigned_type_node
2390 	   : long_long_integer_type_node;
2391 
2392   for (i = 0; i < NUM_INT_N_ENTS; i ++)
2393     if (int_n_enabled_p[i]
2394 	&& (type1 == int_n_trees[i].unsigned_type
2395 	    || type1 == int_n_trees[i].signed_type))
2396 	return unsignedp
2397 	  ? int_n_trees[i].unsigned_type
2398 	  : int_n_trees[i].signed_type;
2399 
2400 #if HOST_BITS_PER_WIDE_INT >= 64
2401   if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2402     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2403 #endif
2404   if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2405     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2406   if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2407     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2408   if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2409     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2410   if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2411     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2412 
2413 #define GIMPLE_FIXED_TYPES(NAME)	    \
2414   if (type1 == short_ ## NAME ## _type_node \
2415       || type1 == unsigned_short_ ## NAME ## _type_node) \
2416     return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2417 		     : short_ ## NAME ## _type_node; \
2418   if (type1 == NAME ## _type_node \
2419       || type1 == unsigned_ ## NAME ## _type_node) \
2420     return unsignedp ? unsigned_ ## NAME ## _type_node \
2421 		     : NAME ## _type_node; \
2422   if (type1 == long_ ## NAME ## _type_node \
2423       || type1 == unsigned_long_ ## NAME ## _type_node) \
2424     return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2425 		     : long_ ## NAME ## _type_node; \
2426   if (type1 == long_long_ ## NAME ## _type_node \
2427       || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2428     return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2429 		     : long_long_ ## NAME ## _type_node;
2430 
2431 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2432   if (type1 == NAME ## _type_node \
2433       || type1 == u ## NAME ## _type_node) \
2434     return unsignedp ? u ## NAME ## _type_node \
2435 		     : NAME ## _type_node;
2436 
2437 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2438   if (type1 == sat_ ## short_ ## NAME ## _type_node \
2439       || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2440     return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2441 		     : sat_ ## short_ ## NAME ## _type_node; \
2442   if (type1 == sat_ ## NAME ## _type_node \
2443       || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2444     return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2445 		     : sat_ ## NAME ## _type_node; \
2446   if (type1 == sat_ ## long_ ## NAME ## _type_node \
2447       || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2448     return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2449 		     : sat_ ## long_ ## NAME ## _type_node; \
2450   if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2451       || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2452     return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2453 		     : sat_ ## long_long_ ## NAME ## _type_node;
2454 
2455 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME)	\
2456   if (type1 == sat_ ## NAME ## _type_node \
2457       || type1 == sat_ ## u ## NAME ## _type_node) \
2458     return unsignedp ? sat_ ## u ## NAME ## _type_node \
2459 		     : sat_ ## NAME ## _type_node;
2460 
2461   GIMPLE_FIXED_TYPES (fract);
2462   GIMPLE_FIXED_TYPES_SAT (fract);
2463   GIMPLE_FIXED_TYPES (accum);
2464   GIMPLE_FIXED_TYPES_SAT (accum);
2465 
2466   GIMPLE_FIXED_MODE_TYPES (qq);
2467   GIMPLE_FIXED_MODE_TYPES (hq);
2468   GIMPLE_FIXED_MODE_TYPES (sq);
2469   GIMPLE_FIXED_MODE_TYPES (dq);
2470   GIMPLE_FIXED_MODE_TYPES (tq);
2471   GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2472   GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2473   GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2474   GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2475   GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2476   GIMPLE_FIXED_MODE_TYPES (ha);
2477   GIMPLE_FIXED_MODE_TYPES (sa);
2478   GIMPLE_FIXED_MODE_TYPES (da);
2479   GIMPLE_FIXED_MODE_TYPES (ta);
2480   GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2481   GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2482   GIMPLE_FIXED_MODE_TYPES_SAT (da);
2483   GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2484 
2485   /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2486      the precision; they have precision set to match their range, but
2487      may use a wider mode to match an ABI.  If we change modes, we may
2488      wind up with bad conversions.  For INTEGER_TYPEs in C, must check
2489      the precision as well, so as to yield correct results for
2490      bit-field types.  C++ does not have these separate bit-field
2491      types, and producing a signed or unsigned variant of an
2492      ENUMERAL_TYPE may cause other problems as well.  */
2493   if (!INTEGRAL_TYPE_P (type)
2494       || TYPE_UNSIGNED (type) == unsignedp)
2495     return type;
2496 
2497 #define TYPE_OK(node)							    \
2498   (TYPE_MODE (type) == TYPE_MODE (node)					    \
2499    && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2500   if (TYPE_OK (signed_char_type_node))
2501     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2502   if (TYPE_OK (integer_type_node))
2503     return unsignedp ? unsigned_type_node : integer_type_node;
2504   if (TYPE_OK (short_integer_type_node))
2505     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2506   if (TYPE_OK (long_integer_type_node))
2507     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2508   if (TYPE_OK (long_long_integer_type_node))
2509     return (unsignedp
2510 	    ? long_long_unsigned_type_node
2511 	    : long_long_integer_type_node);
2512 
2513   for (i = 0; i < NUM_INT_N_ENTS; i ++)
2514     if (int_n_enabled_p[i]
2515 	&& TYPE_MODE (type) == int_n_data[i].m
2516 	&& TYPE_PRECISION (type) == int_n_data[i].bitsize)
2517 	return unsignedp
2518 	  ? int_n_trees[i].unsigned_type
2519 	  : int_n_trees[i].signed_type;
2520 
2521 #if HOST_BITS_PER_WIDE_INT >= 64
2522   if (TYPE_OK (intTI_type_node))
2523     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2524 #endif
2525   if (TYPE_OK (intDI_type_node))
2526     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2527   if (TYPE_OK (intSI_type_node))
2528     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2529   if (TYPE_OK (intHI_type_node))
2530     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2531   if (TYPE_OK (intQI_type_node))
2532     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2533 
2534 #undef GIMPLE_FIXED_TYPES
2535 #undef GIMPLE_FIXED_MODE_TYPES
2536 #undef GIMPLE_FIXED_TYPES_SAT
2537 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2538 #undef TYPE_OK
2539 
2540   return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2541 }
2542 
2543 
2544 /* Return an unsigned type the same as TYPE in other respects.  */
2545 
2546 tree
2547 gimple_unsigned_type (tree type)
2548 {
2549   return gimple_signed_or_unsigned_type (true, type);
2550 }
2551 
2552 
2553 /* Return a signed type the same as TYPE in other respects.  */
2554 
2555 tree
2556 gimple_signed_type (tree type)
2557 {
2558   return gimple_signed_or_unsigned_type (false, type);
2559 }
2560 
2561 
2562 /* Return the typed-based alias set for T, which may be an expression
2563    or a type.  Return -1 if we don't do anything special.  */
2564 
2565 alias_set_type
2566 gimple_get_alias_set (tree t)
2567 {
2568   /* That's all the expressions we handle specially.  */
2569   if (!TYPE_P (t))
2570     return -1;
2571 
2572   /* For convenience, follow the C standard when dealing with
2573      character types.  Any object may be accessed via an lvalue that
2574      has character type.  */
2575   if (t == char_type_node
2576       || t == signed_char_type_node
2577       || t == unsigned_char_type_node)
2578     return 0;
2579 
2580   /* Allow aliasing between signed and unsigned variants of the same
2581      type.  We treat the signed variant as canonical.  */
2582   if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2583     {
2584       tree t1 = gimple_signed_type (t);
2585 
2586       /* t1 == t can happen for boolean nodes which are always unsigned.  */
2587       if (t1 != t)
2588 	return get_alias_set (t1);
2589     }
2590 
2591   /* Allow aliasing between enumeral types and the underlying
2592      integer type.  This is required for C since those are
2593      compatible types.  */
2594   else if (TREE_CODE (t) == ENUMERAL_TYPE)
2595     {
2596       tree t1 = lang_hooks.types.type_for_size (tree_to_uhwi (TYPE_SIZE (t)),
2597 						false /* short-cut above */);
2598       return get_alias_set (t1);
2599     }
2600 
2601   return -1;
2602 }
2603 
2604 
2605 /* Helper for gimple_ior_addresses_taken_1.  */
2606 
2607 static bool
2608 gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data)
2609 {
2610   bitmap addresses_taken = (bitmap)data;
2611   addr = get_base_address (addr);
2612   if (addr
2613       && DECL_P (addr))
2614     {
2615       bitmap_set_bit (addresses_taken, DECL_UID (addr));
2616       return true;
2617     }
2618   return false;
2619 }
2620 
2621 /* Set the bit for the uid of all decls that have their address taken
2622    in STMT in the ADDRESSES_TAKEN bitmap.  Returns true if there
2623    were any in this stmt.  */
2624 
2625 bool
2626 gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt)
2627 {
2628   return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2629 					gimple_ior_addresses_taken_1);
2630 }
2631 
2632 
2633 /* Return true when STMTs arguments and return value match those of FNDECL,
2634    a decl of a builtin function.  */
2635 
2636 bool
2637 gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl)
2638 {
2639   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
2640 
2641   tree ret = gimple_call_lhs (stmt);
2642   if (ret
2643       && !useless_type_conversion_p (TREE_TYPE (ret),
2644 				     TREE_TYPE (TREE_TYPE (fndecl))))
2645     return false;
2646 
2647   tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2648   unsigned nargs = gimple_call_num_args (stmt);
2649   for (unsigned i = 0; i < nargs; ++i)
2650     {
2651       /* Variadic args follow.  */
2652       if (!targs)
2653 	return true;
2654       tree arg = gimple_call_arg (stmt, i);
2655       tree type = TREE_VALUE (targs);
2656       if (!useless_type_conversion_p (type, TREE_TYPE (arg))
2657 	  /* char/short integral arguments are promoted to int
2658 	     by several frontends if targetm.calls.promote_prototypes
2659 	     is true.  Allow such promotion too.  */
2660 	  && !(INTEGRAL_TYPE_P (type)
2661 	       && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
2662 	       && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
2663 	       && useless_type_conversion_p (integer_type_node,
2664 					     TREE_TYPE (arg))))
2665 	return false;
2666       targs = TREE_CHAIN (targs);
2667     }
2668   if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2669     return false;
2670   return true;
2671 }
2672 
2673 /* Return true when STMT is builtins call.  */
2674 
2675 bool
2676 gimple_call_builtin_p (const gimple *stmt)
2677 {
2678   tree fndecl;
2679   if (is_gimple_call (stmt)
2680       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2681       && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
2682     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2683   return false;
2684 }
2685 
2686 /* Return true when STMT is builtins call to CLASS.  */
2687 
2688 bool
2689 gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass)
2690 {
2691   tree fndecl;
2692   if (is_gimple_call (stmt)
2693       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2694       && DECL_BUILT_IN_CLASS (fndecl) == klass)
2695     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2696   return false;
2697 }
2698 
2699 /* Return true when STMT is builtins call to CODE of CLASS.  */
2700 
2701 bool
2702 gimple_call_builtin_p (const gimple *stmt, enum built_in_function code)
2703 {
2704   tree fndecl;
2705   if (is_gimple_call (stmt)
2706       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2707       && fndecl_built_in_p (fndecl, code))
2708     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2709   return false;
2710 }
2711 
2712 /* If CALL is a call to a combined_fn (i.e. an internal function or
2713    a normal built-in function), return its code, otherwise return
2714    CFN_LAST.  */
2715 
2716 combined_fn
2717 gimple_call_combined_fn (const gimple *stmt)
2718 {
2719   if (const gcall *call = dyn_cast <const gcall *> (stmt))
2720     {
2721       if (gimple_call_internal_p (call))
2722 	return as_combined_fn (gimple_call_internal_fn (call));
2723 
2724       tree fndecl = gimple_call_fndecl (stmt);
2725       if (fndecl
2726 	  && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
2727 	  && gimple_builtin_call_types_compatible_p (stmt, fndecl))
2728 	return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
2729     }
2730   return CFN_LAST;
2731 }
2732 
2733 /* Return true if STMT clobbers memory.  STMT is required to be a
2734    GIMPLE_ASM.  */
2735 
2736 bool
2737 gimple_asm_clobbers_memory_p (const gasm *stmt)
2738 {
2739   unsigned i;
2740 
2741   for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2742     {
2743       tree op = gimple_asm_clobber_op (stmt, i);
2744       if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2745 	return true;
2746     }
2747 
2748   /* Non-empty basic ASM implicitly clobbers memory.  */
2749   if (gimple_asm_input_p (stmt) && strlen (gimple_asm_string (stmt)) != 0)
2750     return true;
2751 
2752   return false;
2753 }
2754 
2755 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE.  */
2756 
2757 void
2758 dump_decl_set (FILE *file, bitmap set)
2759 {
2760   if (set)
2761     {
2762       bitmap_iterator bi;
2763       unsigned i;
2764 
2765       fprintf (file, "{ ");
2766 
2767       EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2768 	{
2769 	  fprintf (file, "D.%u", i);
2770 	  fprintf (file, " ");
2771 	}
2772 
2773       fprintf (file, "}");
2774     }
2775   else
2776     fprintf (file, "NIL");
2777 }
2778 
2779 /* Return true when CALL is a call stmt that definitely doesn't
2780    free any memory or makes it unavailable otherwise.  */
2781 bool
2782 nonfreeing_call_p (gimple *call)
2783 {
2784   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2785       && gimple_call_flags (call) & ECF_LEAF)
2786     switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2787       {
2788 	/* Just in case these become ECF_LEAF in the future.  */
2789 	case BUILT_IN_FREE:
2790 	case BUILT_IN_TM_FREE:
2791 	case BUILT_IN_REALLOC:
2792 	case BUILT_IN_STACK_RESTORE:
2793 	  return false;
2794 	default:
2795 	  return true;
2796       }
2797   else if (gimple_call_internal_p (call))
2798     switch (gimple_call_internal_fn (call))
2799       {
2800       case IFN_ABNORMAL_DISPATCHER:
2801         return true;
2802       case IFN_ASAN_MARK:
2803 	return tree_to_uhwi (gimple_call_arg (call, 0)) == ASAN_MARK_UNPOISON;
2804       default:
2805 	if (gimple_call_flags (call) & ECF_LEAF)
2806 	  return true;
2807 	return false;
2808       }
2809 
2810   tree fndecl = gimple_call_fndecl (call);
2811   if (!fndecl)
2812     return false;
2813   struct cgraph_node *n = cgraph_node::get (fndecl);
2814   if (!n)
2815     return false;
2816   enum availability availability;
2817   n = n->function_symbol (&availability);
2818   if (!n || availability <= AVAIL_INTERPOSABLE)
2819     return false;
2820   return n->nonfreeing_fn;
2821 }
2822 
2823 /* Return true when CALL is a call stmt that definitely need not
2824    be considered to be a memory barrier.  */
2825 bool
2826 nonbarrier_call_p (gimple *call)
2827 {
2828   if (gimple_call_flags (call) & (ECF_PURE | ECF_CONST))
2829     return true;
2830   /* Should extend this to have a nonbarrier_fn flag, just as above in
2831      the nonfreeing case.  */
2832   return false;
2833 }
2834 
2835 /* Callback for walk_stmt_load_store_ops.
2836 
2837    Return TRUE if OP will dereference the tree stored in DATA, FALSE
2838    otherwise.
2839 
2840    This routine only makes a superficial check for a dereference.  Thus
2841    it must only be used if it is safe to return a false negative.  */
2842 static bool
2843 check_loadstore (gimple *, tree op, tree, void *data)
2844 {
2845   if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2846     {
2847       /* Some address spaces may legitimately dereference zero.  */
2848       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op));
2849       if (targetm.addr_space.zero_address_valid (as))
2850 	return false;
2851 
2852       return operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0);
2853     }
2854   return false;
2855 }
2856 
2857 
2858 /* Return true if OP can be inferred to be non-NULL after STMT executes,
2859    either by using a pointer dereference or attributes.  */
2860 bool
2861 infer_nonnull_range (gimple *stmt, tree op)
2862 {
2863   return infer_nonnull_range_by_dereference (stmt, op)
2864     || infer_nonnull_range_by_attribute (stmt, op);
2865 }
2866 
2867 /* Return true if OP can be inferred to be non-NULL after STMT
2868    executes by using a pointer dereference.  */
2869 bool
2870 infer_nonnull_range_by_dereference (gimple *stmt, tree op)
2871 {
2872   /* We can only assume that a pointer dereference will yield
2873      non-NULL if -fdelete-null-pointer-checks is enabled.  */
2874   if (!flag_delete_null_pointer_checks
2875       || !POINTER_TYPE_P (TREE_TYPE (op))
2876       || gimple_code (stmt) == GIMPLE_ASM)
2877     return false;
2878 
2879   if (walk_stmt_load_store_ops (stmt, (void *)op,
2880 				check_loadstore, check_loadstore))
2881     return true;
2882 
2883   return false;
2884 }
2885 
2886 /* Return true if OP can be inferred to be a non-NULL after STMT
2887    executes by using attributes.  */
2888 bool
2889 infer_nonnull_range_by_attribute (gimple *stmt, tree op)
2890 {
2891   /* We can only assume that a pointer dereference will yield
2892      non-NULL if -fdelete-null-pointer-checks is enabled.  */
2893   if (!flag_delete_null_pointer_checks
2894       || !POINTER_TYPE_P (TREE_TYPE (op))
2895       || gimple_code (stmt) == GIMPLE_ASM)
2896     return false;
2897 
2898   if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2899     {
2900       tree fntype = gimple_call_fntype (stmt);
2901       tree attrs = TYPE_ATTRIBUTES (fntype);
2902       for (; attrs; attrs = TREE_CHAIN (attrs))
2903 	{
2904 	  attrs = lookup_attribute ("nonnull", attrs);
2905 
2906 	  /* If "nonnull" wasn't specified, we know nothing about
2907 	     the argument.  */
2908 	  if (attrs == NULL_TREE)
2909 	    return false;
2910 
2911 	  /* If "nonnull" applies to all the arguments, then ARG
2912 	     is non-null if it's in the argument list.  */
2913 	  if (TREE_VALUE (attrs) == NULL_TREE)
2914 	    {
2915 	      for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2916 		{
2917 		  if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))
2918 		      && operand_equal_p (op, gimple_call_arg (stmt, i), 0))
2919 		    return true;
2920 		}
2921 	      return false;
2922 	    }
2923 
2924 	  /* Now see if op appears in the nonnull list.  */
2925 	  for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2926 	    {
2927 	      unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2928 	      if (idx < gimple_call_num_args (stmt))
2929 		{
2930 		  tree arg = gimple_call_arg (stmt, idx);
2931 		  if (operand_equal_p (op, arg, 0))
2932 		    return true;
2933 		}
2934 	    }
2935 	}
2936     }
2937 
2938   /* If this function is marked as returning non-null, then we can
2939      infer OP is non-null if it is used in the return statement.  */
2940   if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2941     if (gimple_return_retval (return_stmt)
2942 	&& operand_equal_p (gimple_return_retval (return_stmt), op, 0)
2943 	&& lookup_attribute ("returns_nonnull",
2944 			     TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2945       return true;
2946 
2947   return false;
2948 }
2949 
2950 /* Compare two case labels.  Because the front end should already have
2951    made sure that case ranges do not overlap, it is enough to only compare
2952    the CASE_LOW values of each case label.  */
2953 
2954 static int
2955 compare_case_labels (const void *p1, const void *p2)
2956 {
2957   const_tree const case1 = *(const_tree const*)p1;
2958   const_tree const case2 = *(const_tree const*)p2;
2959 
2960   /* The 'default' case label always goes first.  */
2961   if (!CASE_LOW (case1))
2962     return -1;
2963   else if (!CASE_LOW (case2))
2964     return 1;
2965   else
2966     return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2967 }
2968 
2969 /* Sort the case labels in LABEL_VEC in place in ascending order.  */
2970 
2971 void
2972 sort_case_labels (vec<tree> label_vec)
2973 {
2974   label_vec.qsort (compare_case_labels);
2975 }
2976 
2977 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2978 
2979    LABELS is a vector that contains all case labels to look at.
2980 
2981    INDEX_TYPE is the type of the switch index expression.  Case labels
2982    in LABELS are discarded if their values are not in the value range
2983    covered by INDEX_TYPE.  The remaining case label values are folded
2984    to INDEX_TYPE.
2985 
2986    If a default case exists in LABELS, it is removed from LABELS and
2987    returned in DEFAULT_CASEP.  If no default case exists, but the
2988    case labels already cover the whole range of INDEX_TYPE, a default
2989    case is returned pointing to one of the existing case labels.
2990    Otherwise DEFAULT_CASEP is set to NULL_TREE.
2991 
2992    DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2993    apply and no action is taken regardless of whether a default case is
2994    found or not.  */
2995 
2996 void
2997 preprocess_case_label_vec_for_gimple (vec<tree> labels,
2998 				      tree index_type,
2999 				      tree *default_casep)
3000 {
3001   tree min_value, max_value;
3002   tree default_case = NULL_TREE;
3003   size_t i, len;
3004 
3005   i = 0;
3006   min_value = TYPE_MIN_VALUE (index_type);
3007   max_value = TYPE_MAX_VALUE (index_type);
3008   while (i < labels.length ())
3009     {
3010       tree elt = labels[i];
3011       tree low = CASE_LOW (elt);
3012       tree high = CASE_HIGH (elt);
3013       bool remove_element = FALSE;
3014 
3015       if (low)
3016 	{
3017 	  gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
3018 	  gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
3019 
3020 	  /* This is a non-default case label, i.e. it has a value.
3021 
3022 	     See if the case label is reachable within the range of
3023 	     the index type.  Remove out-of-range case values.  Turn
3024 	     case ranges into a canonical form (high > low strictly)
3025 	     and convert the case label values to the index type.
3026 
3027 	     NB: The type of gimple_switch_index() may be the promoted
3028 	     type, but the case labels retain the original type.  */
3029 
3030 	  if (high)
3031 	    {
3032 	      /* This is a case range.  Discard empty ranges.
3033 		 If the bounds or the range are equal, turn this
3034 		 into a simple (one-value) case.  */
3035 	      int cmp = tree_int_cst_compare (high, low);
3036 	      if (cmp < 0)
3037 		remove_element = TRUE;
3038 	      else if (cmp == 0)
3039 		high = NULL_TREE;
3040 	    }
3041 
3042 	  if (! high)
3043 	    {
3044 	      /* If the simple case value is unreachable, ignore it.  */
3045 	      if ((TREE_CODE (min_value) == INTEGER_CST
3046 		   && tree_int_cst_compare (low, min_value) < 0)
3047 		  || (TREE_CODE (max_value) == INTEGER_CST
3048 		      && tree_int_cst_compare (low, max_value) > 0))
3049 		remove_element = TRUE;
3050 	      else
3051 		low = fold_convert (index_type, low);
3052 	    }
3053 	  else
3054 	    {
3055 	      /* If the entire case range is unreachable, ignore it.  */
3056 	      if ((TREE_CODE (min_value) == INTEGER_CST
3057 		   && tree_int_cst_compare (high, min_value) < 0)
3058 		  || (TREE_CODE (max_value) == INTEGER_CST
3059 		      && tree_int_cst_compare (low, max_value) > 0))
3060 		remove_element = TRUE;
3061 	      else
3062 		{
3063 		  /* If the lower bound is less than the index type's
3064 		     minimum value, truncate the range bounds.  */
3065 		  if (TREE_CODE (min_value) == INTEGER_CST
3066 		      && tree_int_cst_compare (low, min_value) < 0)
3067 		    low = min_value;
3068 		  low = fold_convert (index_type, low);
3069 
3070 		  /* If the upper bound is greater than the index type's
3071 		     maximum value, truncate the range bounds.  */
3072 		  if (TREE_CODE (max_value) == INTEGER_CST
3073 		      && tree_int_cst_compare (high, max_value) > 0)
3074 		    high = max_value;
3075 		  high = fold_convert (index_type, high);
3076 
3077 		  /* We may have folded a case range to a one-value case.  */
3078 		  if (tree_int_cst_equal (low, high))
3079 		    high = NULL_TREE;
3080 		}
3081 	    }
3082 
3083 	  CASE_LOW (elt) = low;
3084 	  CASE_HIGH (elt) = high;
3085 	}
3086       else
3087 	{
3088 	  gcc_assert (!default_case);
3089 	  default_case = elt;
3090 	  /* The default case must be passed separately to the
3091 	     gimple_build_switch routine.  But if DEFAULT_CASEP
3092 	     is NULL, we do not remove the default case (it would
3093 	     be completely lost).  */
3094 	  if (default_casep)
3095 	    remove_element = TRUE;
3096 	}
3097 
3098       if (remove_element)
3099 	labels.ordered_remove (i);
3100       else
3101 	i++;
3102     }
3103   len = i;
3104 
3105   if (!labels.is_empty ())
3106     sort_case_labels (labels);
3107 
3108   if (default_casep && !default_case)
3109     {
3110       /* If the switch has no default label, add one, so that we jump
3111 	 around the switch body.  If the labels already cover the whole
3112 	 range of the switch index_type, add the default label pointing
3113 	 to one of the existing labels.  */
3114       if (len
3115 	  && TYPE_MIN_VALUE (index_type)
3116 	  && TYPE_MAX_VALUE (index_type)
3117 	  && tree_int_cst_equal (CASE_LOW (labels[0]),
3118 				 TYPE_MIN_VALUE (index_type)))
3119 	{
3120 	  tree low, high = CASE_HIGH (labels[len - 1]);
3121 	  if (!high)
3122 	    high = CASE_LOW (labels[len - 1]);
3123 	  if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
3124 	    {
3125 	      tree widest_label = labels[0];
3126 	      for (i = 1; i < len; i++)
3127 		{
3128 		  high = CASE_LOW (labels[i]);
3129 		  low = CASE_HIGH (labels[i - 1]);
3130 		  if (!low)
3131 		    low = CASE_LOW (labels[i - 1]);
3132 
3133 		  if (CASE_HIGH (labels[i]) != NULL_TREE
3134 		      && (CASE_HIGH (widest_label) == NULL_TREE
3135 			  || (wi::gtu_p
3136 			      (wi::to_wide (CASE_HIGH (labels[i]))
3137 			       - wi::to_wide (CASE_LOW (labels[i])),
3138 			       wi::to_wide (CASE_HIGH (widest_label))
3139 			       - wi::to_wide (CASE_LOW (widest_label))))))
3140 		    widest_label = labels[i];
3141 
3142 		  if (wi::to_wide (low) + 1 != wi::to_wide (high))
3143 		    break;
3144 		}
3145 	      if (i == len)
3146 		{
3147 		  /* Designate the label with the widest range to be the
3148 		     default label.  */
3149 		  tree label = CASE_LABEL (widest_label);
3150 		  default_case = build_case_label (NULL_TREE, NULL_TREE,
3151 						   label);
3152 		}
3153 	    }
3154 	}
3155     }
3156 
3157   if (default_casep)
3158     *default_casep = default_case;
3159 }
3160 
3161 /* Set the location of all statements in SEQ to LOC.  */
3162 
3163 void
3164 gimple_seq_set_location (gimple_seq seq, location_t loc)
3165 {
3166   for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
3167     gimple_set_location (gsi_stmt (i), loc);
3168 }
3169 
3170 /* Release SSA_NAMEs in SEQ as well as the GIMPLE statements.  */
3171 
3172 void
3173 gimple_seq_discard (gimple_seq seq)
3174 {
3175   gimple_stmt_iterator gsi;
3176 
3177   for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
3178     {
3179       gimple *stmt = gsi_stmt (gsi);
3180       gsi_remove (&gsi, true);
3181       release_defs (stmt);
3182       ggc_free (stmt);
3183     }
3184 }
3185 
3186 /* See if STMT now calls function that takes no parameters and if so, drop
3187    call arguments.  This is used when devirtualization machinery redirects
3188    to __builtin_unreachable or __cxa_pure_virtual.  */
3189 
3190 void
3191 maybe_remove_unused_call_args (struct function *fn, gimple *stmt)
3192 {
3193   tree decl = gimple_call_fndecl (stmt);
3194   if (TYPE_ARG_TYPES (TREE_TYPE (decl))
3195       && TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))) == void_type_node
3196       && gimple_call_num_args (stmt))
3197     {
3198       gimple_set_num_ops (stmt, 3);
3199       update_stmt_fn (fn, stmt);
3200     }
3201 }
3202 
3203 /* Return false if STMT will likely expand to real function call.  */
3204 
3205 bool
3206 gimple_inexpensive_call_p (gcall *stmt)
3207 {
3208   if (gimple_call_internal_p (stmt))
3209     return true;
3210   tree decl = gimple_call_fndecl (stmt);
3211   if (decl && is_inexpensive_builtin (decl))
3212     return true;
3213   return false;
3214 }
3215 
3216 #if CHECKING_P
3217 
3218 namespace selftest {
3219 
3220 /* Selftests for core gimple structures.  */
3221 
3222 /* Verify that STMT is pretty-printed as EXPECTED.
3223    Helper function for selftests.  */
3224 
3225 static void
3226 verify_gimple_pp (const char *expected, gimple *stmt)
3227 {
3228   pretty_printer pp;
3229   pp_gimple_stmt_1 (&pp, stmt, 0 /* spc */, TDF_NONE /* flags */);
3230   ASSERT_STREQ (expected, pp_formatted_text (&pp));
3231 }
3232 
3233 /* Build a GIMPLE_ASSIGN equivalent to
3234      tmp = 5;
3235    and verify various properties of it.  */
3236 
3237 static void
3238 test_assign_single ()
3239 {
3240   tree type = integer_type_node;
3241   tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3242 			 get_identifier ("tmp"),
3243 			 type);
3244   tree rhs = build_int_cst (type, 5);
3245   gassign *stmt = gimple_build_assign (lhs, rhs);
3246   verify_gimple_pp ("tmp = 5;", stmt);
3247 
3248   ASSERT_TRUE (is_gimple_assign (stmt));
3249   ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
3250   ASSERT_EQ (lhs, gimple_get_lhs (stmt));
3251   ASSERT_EQ (rhs, gimple_assign_rhs1 (stmt));
3252   ASSERT_EQ (NULL, gimple_assign_rhs2 (stmt));
3253   ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
3254   ASSERT_TRUE (gimple_assign_single_p (stmt));
3255   ASSERT_EQ (INTEGER_CST, gimple_assign_rhs_code (stmt));
3256 }
3257 
3258 /* Build a GIMPLE_ASSIGN equivalent to
3259      tmp = a * b;
3260    and verify various properties of it.  */
3261 
3262 static void
3263 test_assign_binop ()
3264 {
3265   tree type = integer_type_node;
3266   tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3267 			 get_identifier ("tmp"),
3268 			 type);
3269   tree a = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3270 		       get_identifier ("a"),
3271 		       type);
3272   tree b = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3273 		       get_identifier ("b"),
3274 		       type);
3275   gassign *stmt = gimple_build_assign (lhs, MULT_EXPR, a, b);
3276   verify_gimple_pp ("tmp = a * b;", stmt);
3277 
3278   ASSERT_TRUE (is_gimple_assign (stmt));
3279   ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
3280   ASSERT_EQ (lhs, gimple_get_lhs (stmt));
3281   ASSERT_EQ (a, gimple_assign_rhs1 (stmt));
3282   ASSERT_EQ (b, gimple_assign_rhs2 (stmt));
3283   ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
3284   ASSERT_FALSE (gimple_assign_single_p (stmt));
3285   ASSERT_EQ (MULT_EXPR, gimple_assign_rhs_code (stmt));
3286 }
3287 
3288 /* Build a GIMPLE_NOP and verify various properties of it.  */
3289 
3290 static void
3291 test_nop_stmt ()
3292 {
3293   gimple *stmt = gimple_build_nop ();
3294   verify_gimple_pp ("GIMPLE_NOP", stmt);
3295   ASSERT_EQ (GIMPLE_NOP, gimple_code (stmt));
3296   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3297   ASSERT_FALSE (gimple_assign_single_p (stmt));
3298 }
3299 
3300 /* Build a GIMPLE_RETURN equivalent to
3301      return 7;
3302    and verify various properties of it.  */
3303 
3304 static void
3305 test_return_stmt ()
3306 {
3307   tree type = integer_type_node;
3308   tree val = build_int_cst (type, 7);
3309   greturn *stmt = gimple_build_return (val);
3310   verify_gimple_pp ("return 7;", stmt);
3311 
3312   ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
3313   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3314   ASSERT_EQ (val, gimple_return_retval (stmt));
3315   ASSERT_FALSE (gimple_assign_single_p (stmt));
3316 }
3317 
3318 /* Build a GIMPLE_RETURN equivalent to
3319      return;
3320    and verify various properties of it.  */
3321 
3322 static void
3323 test_return_without_value ()
3324 {
3325   greturn *stmt = gimple_build_return (NULL);
3326   verify_gimple_pp ("return;", stmt);
3327 
3328   ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
3329   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3330   ASSERT_EQ (NULL, gimple_return_retval (stmt));
3331   ASSERT_FALSE (gimple_assign_single_p (stmt));
3332 }
3333 
3334 /* Run all of the selftests within this file.  */
3335 
3336 void
3337 gimple_c_tests ()
3338 {
3339   test_assign_single ();
3340   test_assign_binop ();
3341   test_nop_stmt ();
3342   test_return_stmt ();
3343   test_return_without_value ();
3344 }
3345 
3346 } // namespace selftest
3347 
3348 
3349 #endif /* CHECKING_P */
3350