xref: /netbsd-src/external/gpl3/gcc/dist/gcc/gimple.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* Gimple IR support functions.
2 
3    Copyright (C) 2007-2022 Free Software Foundation, Inc.
4    Contributed by Aldy Hernandez <aldyh@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "diagnostic.h"
31 #include "alias.h"
32 #include "fold-const.h"
33 #include "calls.h"
34 #include "stor-layout.h"
35 #include "internal-fn.h"
36 #include "tree-eh.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "gimplify.h"
40 #include "target.h"
41 #include "builtins.h"
42 #include "selftest.h"
43 #include "gimple-pretty-print.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "asan.h"
47 #include "langhooks.h"
48 #include "attr-fnspec.h"
49 #include "ipa-modref-tree.h"
50 #include "ipa-modref.h"
51 #include "dbgcnt.h"
52 
53 /* All the tuples have their operand vector (if present) at the very bottom
54    of the structure.  Therefore, the offset required to find the
55    operands vector the size of the structure minus the size of the 1
56    element tree array at the end (see gimple_ops).  */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 	(HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST size_t gimple_ops_offset_[] = {
60 #include "gsstruct.def"
61 };
62 #undef DEFGSSTRUCT
63 
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
65 static const size_t gsstruct_code_size[] = {
66 #include "gsstruct.def"
67 };
68 #undef DEFGSSTRUCT
69 
70 #define DEFGSCODE(SYM, NAME, GSSCODE)	NAME,
71 const char *const gimple_code_name[] = {
72 #include "gimple.def"
73 };
74 #undef DEFGSCODE
75 
76 #define DEFGSCODE(SYM, NAME, GSSCODE)	GSSCODE,
77 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
78 #include "gimple.def"
79 };
80 #undef DEFGSCODE
81 
82 /* Gimple stats.  */
83 
84 uint64_t gimple_alloc_counts[(int) gimple_alloc_kind_all];
85 uint64_t gimple_alloc_sizes[(int) gimple_alloc_kind_all];
86 
87 /* Keep in sync with gimple.h:enum gimple_alloc_kind.  */
88 static const char * const gimple_alloc_kind_names[] = {
89     "assignments",
90     "phi nodes",
91     "conditionals",
92     "everything else"
93 };
94 
95 /* Static gimple tuple members.  */
96 const enum gimple_code gassign::code_;
97 const enum gimple_code gcall::code_;
98 const enum gimple_code gcond::code_;
99 
100 
101 /* Gimple tuple constructors.
102    Note: Any constructor taking a ``gimple_seq'' as a parameter, can
103    be passed a NULL to start with an empty sequence.  */
104 
105 /* Set the code for statement G to CODE.  */
106 
107 static inline void
gimple_set_code(gimple * g,enum gimple_code code)108 gimple_set_code (gimple *g, enum gimple_code code)
109 {
110   g->code = code;
111 }
112 
113 /* Return the number of bytes needed to hold a GIMPLE statement with
114    code CODE.  */
115 
116 size_t
gimple_size(enum gimple_code code,unsigned num_ops)117 gimple_size (enum gimple_code code, unsigned num_ops)
118 {
119   size_t size = gsstruct_code_size[gss_for_code (code)];
120   if (num_ops > 0)
121     size += (sizeof (tree) * (num_ops - 1));
122   return size;
123 }
124 
125 /* Initialize GIMPLE statement G with CODE and NUM_OPS.  */
126 
127 void
gimple_init(gimple * g,enum gimple_code code,unsigned num_ops)128 gimple_init (gimple *g, enum gimple_code code, unsigned num_ops)
129 {
130   gimple_set_code (g, code);
131   gimple_set_num_ops (g, num_ops);
132 
133   /* Do not call gimple_set_modified here as it has other side
134      effects and this tuple is still not completely built.  */
135   g->modified = 1;
136   gimple_init_singleton (g);
137 }
138 
139 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
140    operands.  */
141 
142 gimple *
gimple_alloc(enum gimple_code code,unsigned num_ops MEM_STAT_DECL)143 gimple_alloc (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
144 {
145   size_t size;
146   gimple *stmt;
147 
148   size = gimple_size (code, num_ops);
149   if (GATHER_STATISTICS)
150     {
151       enum gimple_alloc_kind kind = gimple_alloc_kind (code);
152       gimple_alloc_counts[(int) kind]++;
153       gimple_alloc_sizes[(int) kind] += size;
154     }
155 
156   stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
157   gimple_init (stmt, code, num_ops);
158   return stmt;
159 }
160 
161 /* Set SUBCODE to be the code of the expression computed by statement G.  */
162 
163 static inline void
gimple_set_subcode(gimple * g,unsigned subcode)164 gimple_set_subcode (gimple *g, unsigned subcode)
165 {
166   /* We only have 16 bits for the RHS code.  Assert that we are not
167      overflowing it.  */
168   gcc_assert (subcode < (1 << 16));
169   g->subcode = subcode;
170 }
171 
172 
173 
174 /* Build a tuple with operands.  CODE is the statement to build (which
175    must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the subcode
176    for the new tuple.  NUM_OPS is the number of operands to allocate.  */
177 
178 #define gimple_build_with_ops(c, s, n) \
179   gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
180 
181 static gimple *
gimple_build_with_ops_stat(enum gimple_code code,unsigned subcode,unsigned num_ops MEM_STAT_DECL)182 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
183 		            unsigned num_ops MEM_STAT_DECL)
184 {
185   gimple *s = gimple_alloc (code, num_ops PASS_MEM_STAT);
186   gimple_set_subcode (s, subcode);
187 
188   return s;
189 }
190 
191 
192 /* Build a GIMPLE_RETURN statement returning RETVAL.  */
193 
194 greturn *
gimple_build_return(tree retval)195 gimple_build_return (tree retval)
196 {
197   greturn *s
198     = as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
199 					       2));
200   if (retval)
201     gimple_return_set_retval (s, retval);
202   return s;
203 }
204 
205 /* Reset alias information on call S.  */
206 
207 void
gimple_call_reset_alias_info(gcall * s)208 gimple_call_reset_alias_info (gcall *s)
209 {
210   if (gimple_call_flags (s) & ECF_CONST)
211     memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
212   else
213     pt_solution_reset (gimple_call_use_set (s));
214   if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
215     memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
216   else
217     pt_solution_reset (gimple_call_clobber_set (s));
218 }
219 
220 /* Helper for gimple_build_call, gimple_build_call_valist,
221    gimple_build_call_vec and gimple_build_call_from_tree.  Build the basic
222    components of a GIMPLE_CALL statement to function FN with NARGS
223    arguments.  */
224 
225 static inline gcall *
gimple_build_call_1(tree fn,unsigned nargs)226 gimple_build_call_1 (tree fn, unsigned nargs)
227 {
228   gcall *s
229     = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
230 					     nargs + 3));
231   if (TREE_CODE (fn) == FUNCTION_DECL)
232     fn = build_fold_addr_expr (fn);
233   gimple_set_op (s, 1, fn);
234   gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
235   gimple_call_reset_alias_info (s);
236   return s;
237 }
238 
239 
240 /* Build a GIMPLE_CALL statement to function FN with the arguments
241    specified in vector ARGS.  */
242 
243 gcall *
gimple_build_call_vec(tree fn,const vec<tree> & args)244 gimple_build_call_vec (tree fn, const vec<tree> &args)
245 {
246   unsigned i;
247   unsigned nargs = args.length ();
248   gcall *call = gimple_build_call_1 (fn, nargs);
249 
250   for (i = 0; i < nargs; i++)
251     gimple_call_set_arg (call, i, args[i]);
252 
253   return call;
254 }
255 
256 
257 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
258    arguments.  The ... are the arguments.  */
259 
260 gcall *
gimple_build_call(tree fn,unsigned nargs,...)261 gimple_build_call (tree fn, unsigned nargs, ...)
262 {
263   va_list ap;
264   gcall *call;
265   unsigned i;
266 
267   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
268 
269   call = gimple_build_call_1 (fn, nargs);
270 
271   va_start (ap, nargs);
272   for (i = 0; i < nargs; i++)
273     gimple_call_set_arg (call, i, va_arg (ap, tree));
274   va_end (ap);
275 
276   return call;
277 }
278 
279 
280 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
281    arguments.  AP contains the arguments.  */
282 
283 gcall *
gimple_build_call_valist(tree fn,unsigned nargs,va_list ap)284 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
285 {
286   gcall *call;
287   unsigned i;
288 
289   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
290 
291   call = gimple_build_call_1 (fn, nargs);
292 
293   for (i = 0; i < nargs; i++)
294     gimple_call_set_arg (call, i, va_arg (ap, tree));
295 
296   return call;
297 }
298 
299 
300 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
301    Build the basic components of a GIMPLE_CALL statement to internal
302    function FN with NARGS arguments.  */
303 
304 static inline gcall *
gimple_build_call_internal_1(enum internal_fn fn,unsigned nargs)305 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
306 {
307   gcall *s
308     = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
309 					     nargs + 3));
310   s->subcode |= GF_CALL_INTERNAL;
311   gimple_call_set_internal_fn (s, fn);
312   gimple_call_reset_alias_info (s);
313   return s;
314 }
315 
316 
317 /* Build a GIMPLE_CALL statement to internal function FN.  NARGS is
318    the number of arguments.  The ... are the arguments.  */
319 
320 gcall *
gimple_build_call_internal(enum internal_fn fn,unsigned nargs,...)321 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
322 {
323   va_list ap;
324   gcall *call;
325   unsigned i;
326 
327   call = gimple_build_call_internal_1 (fn, nargs);
328   va_start (ap, nargs);
329   for (i = 0; i < nargs; i++)
330     gimple_call_set_arg (call, i, va_arg (ap, tree));
331   va_end (ap);
332 
333   return call;
334 }
335 
336 
337 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
338    specified in vector ARGS.  */
339 
340 gcall *
gimple_build_call_internal_vec(enum internal_fn fn,const vec<tree> & args)341 gimple_build_call_internal_vec (enum internal_fn fn, const vec<tree> &args)
342 {
343   unsigned i, nargs;
344   gcall *call;
345 
346   nargs = args.length ();
347   call = gimple_build_call_internal_1 (fn, nargs);
348   for (i = 0; i < nargs; i++)
349     gimple_call_set_arg (call, i, args[i]);
350 
351   return call;
352 }
353 
354 
355 /* Build a GIMPLE_CALL statement from CALL_EXPR T.  Note that T is
356    assumed to be in GIMPLE form already.  Minimal checking is done of
357    this fact.  */
358 
359 gcall *
gimple_build_call_from_tree(tree t,tree fnptrtype)360 gimple_build_call_from_tree (tree t, tree fnptrtype)
361 {
362   unsigned i, nargs;
363   gcall *call;
364 
365   gcc_assert (TREE_CODE (t) == CALL_EXPR);
366 
367   nargs = call_expr_nargs (t);
368 
369   tree fndecl = NULL_TREE;
370   if (CALL_EXPR_FN (t) == NULL_TREE)
371     call = gimple_build_call_internal_1 (CALL_EXPR_IFN (t), nargs);
372   else
373     {
374       fndecl = get_callee_fndecl (t);
375       call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
376     }
377 
378   for (i = 0; i < nargs; i++)
379     gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
380 
381   gimple_set_block (call, TREE_BLOCK (t));
382   gimple_set_location (call, EXPR_LOCATION (t));
383 
384   /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL.  */
385   gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
386   gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
387   gimple_call_set_must_tail (call, CALL_EXPR_MUST_TAIL_CALL (t));
388   gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
389   if (fndecl
390       && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
391       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
392     gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
393   else if (fndecl
394 	   && (DECL_IS_OPERATOR_NEW_P (fndecl)
395 	       || DECL_IS_OPERATOR_DELETE_P (fndecl)))
396     gimple_call_set_from_new_or_delete (call, CALL_FROM_NEW_OR_DELETE_P (t));
397   else
398     gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
399   gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
400   gimple_call_set_nothrow (call, TREE_NOTHROW (t));
401   gimple_call_set_by_descriptor (call, CALL_EXPR_BY_DESCRIPTOR (t));
402   copy_warning (call, t);
403 
404   if (fnptrtype)
405     {
406       gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
407 
408       /* Check if it's an indirect CALL and the type has the
409  	 nocf_check attribute. In that case propagate the information
410 	 to the gimple CALL insn.  */
411       if (!fndecl)
412 	{
413 	  gcc_assert (POINTER_TYPE_P (fnptrtype));
414 	  tree fntype = TREE_TYPE (fnptrtype);
415 
416 	  if (lookup_attribute ("nocf_check", TYPE_ATTRIBUTES (fntype)))
417 	    gimple_call_set_nocf_check (call, TRUE);
418 	}
419     }
420 
421   return call;
422 }
423 
424 
425 /* Build a GIMPLE_ASSIGN statement.
426 
427    LHS of the assignment.
428    RHS of the assignment which can be unary or binary.  */
429 
430 gassign *
gimple_build_assign(tree lhs,tree rhs MEM_STAT_DECL)431 gimple_build_assign (tree lhs, tree rhs MEM_STAT_DECL)
432 {
433   enum tree_code subcode;
434   tree op1, op2, op3;
435 
436   extract_ops_from_tree (rhs, &subcode, &op1, &op2, &op3);
437   return gimple_build_assign (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
438 }
439 
440 
441 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
442    OP1, OP2 and OP3.  */
443 
444 static inline gassign *
gimple_build_assign_1(tree lhs,enum tree_code subcode,tree op1,tree op2,tree op3 MEM_STAT_DECL)445 gimple_build_assign_1 (tree lhs, enum tree_code subcode, tree op1,
446 		       tree op2, tree op3 MEM_STAT_DECL)
447 {
448   unsigned num_ops;
449   gassign *p;
450 
451   /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
452      code).  */
453   num_ops = get_gimple_rhs_num_ops (subcode) + 1;
454 
455   p = as_a <gassign *> (
456         gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
457 				    PASS_MEM_STAT));
458   gimple_assign_set_lhs (p, lhs);
459   gimple_assign_set_rhs1 (p, op1);
460   if (op2)
461     {
462       gcc_assert (num_ops > 2);
463       gimple_assign_set_rhs2 (p, op2);
464     }
465 
466   if (op3)
467     {
468       gcc_assert (num_ops > 3);
469       gimple_assign_set_rhs3 (p, op3);
470     }
471 
472   return p;
473 }
474 
475 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
476    OP1, OP2 and OP3.  */
477 
478 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1,tree op2,tree op3 MEM_STAT_DECL)479 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
480 		     tree op2, tree op3 MEM_STAT_DECL)
481 {
482   return gimple_build_assign_1 (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
483 }
484 
485 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
486    OP1 and OP2.  */
487 
488 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1,tree op2 MEM_STAT_DECL)489 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
490 		     tree op2 MEM_STAT_DECL)
491 {
492   return gimple_build_assign_1 (lhs, subcode, op1, op2, NULL_TREE
493 				PASS_MEM_STAT);
494 }
495 
496 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operand OP1.  */
497 
498 gassign *
gimple_build_assign(tree lhs,enum tree_code subcode,tree op1 MEM_STAT_DECL)499 gimple_build_assign (tree lhs, enum tree_code subcode, tree op1 MEM_STAT_DECL)
500 {
501   return gimple_build_assign_1 (lhs, subcode, op1, NULL_TREE, NULL_TREE
502 				PASS_MEM_STAT);
503 }
504 
505 
506 /* Build a GIMPLE_COND statement.
507 
508    PRED is the condition used to compare LHS and the RHS.
509    T_LABEL is the label to jump to if the condition is true.
510    F_LABEL is the label to jump to otherwise.  */
511 
512 gcond *
gimple_build_cond(enum tree_code pred_code,tree lhs,tree rhs,tree t_label,tree f_label)513 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
514 		   tree t_label, tree f_label)
515 {
516   gcond *p;
517 
518   gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
519   p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
520   gimple_cond_set_lhs (p, lhs);
521   gimple_cond_set_rhs (p, rhs);
522   gimple_cond_set_true_label (p, t_label);
523   gimple_cond_set_false_label (p, f_label);
524   return p;
525 }
526 
527 /* Build a GIMPLE_COND statement from the conditional expression tree
528    COND.  T_LABEL and F_LABEL are as in gimple_build_cond.  */
529 
530 gcond *
gimple_build_cond_from_tree(tree cond,tree t_label,tree f_label)531 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
532 {
533   enum tree_code code;
534   tree lhs, rhs;
535 
536   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
537   return gimple_build_cond (code, lhs, rhs, t_label, f_label);
538 }
539 
540 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
541    boolean expression tree COND.  */
542 
543 void
gimple_cond_set_condition_from_tree(gcond * stmt,tree cond)544 gimple_cond_set_condition_from_tree (gcond *stmt, tree cond)
545 {
546   enum tree_code code;
547   tree lhs, rhs;
548 
549   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
550   gimple_cond_set_condition (stmt, code, lhs, rhs);
551 }
552 
553 /* Build a GIMPLE_LABEL statement for LABEL.  */
554 
555 glabel *
gimple_build_label(tree label)556 gimple_build_label (tree label)
557 {
558   glabel *p
559     = as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
560   gimple_label_set_label (p, label);
561   return p;
562 }
563 
564 /* Build a GIMPLE_GOTO statement to label DEST.  */
565 
566 ggoto *
gimple_build_goto(tree dest)567 gimple_build_goto (tree dest)
568 {
569   ggoto *p
570     = as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
571   gimple_goto_set_dest (p, dest);
572   return p;
573 }
574 
575 
576 /* Build a GIMPLE_NOP statement.  */
577 
578 gimple *
gimple_build_nop(void)579 gimple_build_nop (void)
580 {
581   return gimple_alloc (GIMPLE_NOP, 0);
582 }
583 
584 
585 /* Build a GIMPLE_BIND statement.
586    VARS are the variables in BODY.
587    BLOCK is the containing block.  */
588 
589 gbind *
gimple_build_bind(tree vars,gimple_seq body,tree block)590 gimple_build_bind (tree vars, gimple_seq body, tree block)
591 {
592   gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0));
593   gimple_bind_set_vars (p, vars);
594   if (body)
595     gimple_bind_set_body (p, body);
596   if (block)
597     gimple_bind_set_block (p, block);
598   return p;
599 }
600 
601 /* Helper function to set the simple fields of a asm stmt.
602 
603    STRING is a pointer to a string that is the asm blocks assembly code.
604    NINPUT is the number of register inputs.
605    NOUTPUT is the number of register outputs.
606    NCLOBBERS is the number of clobbered registers.
607    */
608 
609 static inline gasm *
gimple_build_asm_1(const char * string,unsigned ninputs,unsigned noutputs,unsigned nclobbers,unsigned nlabels)610 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
611                     unsigned nclobbers, unsigned nlabels)
612 {
613   gasm *p;
614   int size = strlen (string);
615 
616   p = as_a <gasm *> (
617         gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
618 			       ninputs + noutputs + nclobbers + nlabels));
619 
620   p->ni = ninputs;
621   p->no = noutputs;
622   p->nc = nclobbers;
623   p->nl = nlabels;
624   p->string = ggc_alloc_string (string, size);
625 
626   if (GATHER_STATISTICS)
627     gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
628 
629   return p;
630 }
631 
632 /* Build a GIMPLE_ASM statement.
633 
634    STRING is the assembly code.
635    NINPUT is the number of register inputs.
636    NOUTPUT is the number of register outputs.
637    NCLOBBERS is the number of clobbered registers.
638    INPUTS is a vector of the input register parameters.
639    OUTPUTS is a vector of the output register parameters.
640    CLOBBERS is a vector of the clobbered register parameters.
641    LABELS is a vector of destination labels.  */
642 
643 gasm *
gimple_build_asm_vec(const char * string,vec<tree,va_gc> * inputs,vec<tree,va_gc> * outputs,vec<tree,va_gc> * clobbers,vec<tree,va_gc> * labels)644 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
645                       vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
646 		      vec<tree, va_gc> *labels)
647 {
648   gasm *p;
649   unsigned i;
650 
651   p = gimple_build_asm_1 (string,
652                           vec_safe_length (inputs),
653                           vec_safe_length (outputs),
654                           vec_safe_length (clobbers),
655 			  vec_safe_length (labels));
656 
657   for (i = 0; i < vec_safe_length (inputs); i++)
658     gimple_asm_set_input_op (p, i, (*inputs)[i]);
659 
660   for (i = 0; i < vec_safe_length (outputs); i++)
661     gimple_asm_set_output_op (p, i, (*outputs)[i]);
662 
663   for (i = 0; i < vec_safe_length (clobbers); i++)
664     gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
665 
666   for (i = 0; i < vec_safe_length (labels); i++)
667     gimple_asm_set_label_op (p, i, (*labels)[i]);
668 
669   return p;
670 }
671 
672 /* Build a GIMPLE_CATCH statement.
673 
674   TYPES are the catch types.
675   HANDLER is the exception handler.  */
676 
677 gcatch *
gimple_build_catch(tree types,gimple_seq handler)678 gimple_build_catch (tree types, gimple_seq handler)
679 {
680   gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0));
681   gimple_catch_set_types (p, types);
682   if (handler)
683     gimple_catch_set_handler (p, handler);
684 
685   return p;
686 }
687 
688 /* Build a GIMPLE_EH_FILTER statement.
689 
690    TYPES are the filter's types.
691    FAILURE is the filter's failure action.  */
692 
693 geh_filter *
gimple_build_eh_filter(tree types,gimple_seq failure)694 gimple_build_eh_filter (tree types, gimple_seq failure)
695 {
696   geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0));
697   gimple_eh_filter_set_types (p, types);
698   if (failure)
699     gimple_eh_filter_set_failure (p, failure);
700 
701   return p;
702 }
703 
704 /* Build a GIMPLE_EH_MUST_NOT_THROW statement.  */
705 
706 geh_mnt *
gimple_build_eh_must_not_throw(tree decl)707 gimple_build_eh_must_not_throw (tree decl)
708 {
709   geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
710 
711   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
712   gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
713   gimple_eh_must_not_throw_set_fndecl (p, decl);
714 
715   return p;
716 }
717 
718 /* Build a GIMPLE_EH_ELSE statement.  */
719 
720 geh_else *
gimple_build_eh_else(gimple_seq n_body,gimple_seq e_body)721 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
722 {
723   geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0));
724   gimple_eh_else_set_n_body (p, n_body);
725   gimple_eh_else_set_e_body (p, e_body);
726   return p;
727 }
728 
729 /* Build a GIMPLE_TRY statement.
730 
731    EVAL is the expression to evaluate.
732    CLEANUP is the cleanup expression.
733    KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
734    whether this is a try/catch or a try/finally respectively.  */
735 
736 gtry *
gimple_build_try(gimple_seq eval,gimple_seq cleanup,enum gimple_try_flags kind)737 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
738     		  enum gimple_try_flags kind)
739 {
740   gtry *p;
741 
742   gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
743   p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0));
744   gimple_set_subcode (p, kind);
745   if (eval)
746     gimple_try_set_eval (p, eval);
747   if (cleanup)
748     gimple_try_set_cleanup (p, cleanup);
749 
750   return p;
751 }
752 
753 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
754 
755    CLEANUP is the cleanup expression.  */
756 
757 gimple *
gimple_build_wce(gimple_seq cleanup)758 gimple_build_wce (gimple_seq cleanup)
759 {
760   gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
761   if (cleanup)
762     gimple_wce_set_cleanup (p, cleanup);
763 
764   return p;
765 }
766 
767 
768 /* Build a GIMPLE_RESX statement.  */
769 
770 gresx *
gimple_build_resx(int region)771 gimple_build_resx (int region)
772 {
773   gresx *p
774     = as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
775   p->region = region;
776   return p;
777 }
778 
779 
780 /* The helper for constructing a gimple switch statement.
781    INDEX is the switch's index.
782    NLABELS is the number of labels in the switch excluding the default.
783    DEFAULT_LABEL is the default label for the switch statement.  */
784 
785 gswitch *
gimple_build_switch_nlabels(unsigned nlabels,tree index,tree default_label)786 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
787 {
788   /* nlabels + 1 default label + 1 index.  */
789   gcc_checking_assert (default_label);
790   gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,
791 							ERROR_MARK,
792 							1 + 1 + nlabels));
793   gimple_switch_set_index (p, index);
794   gimple_switch_set_default_label (p, default_label);
795   return p;
796 }
797 
798 /* Build a GIMPLE_SWITCH statement.
799 
800    INDEX is the switch's index.
801    DEFAULT_LABEL is the default label
802    ARGS is a vector of labels excluding the default.  */
803 
804 gswitch *
gimple_build_switch(tree index,tree default_label,const vec<tree> & args)805 gimple_build_switch (tree index, tree default_label, const vec<tree> &args)
806 {
807   unsigned i, nlabels = args.length ();
808 
809   gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label);
810 
811   /* Copy the labels from the vector to the switch statement.  */
812   for (i = 0; i < nlabels; i++)
813     gimple_switch_set_label (p, i + 1, args[i]);
814 
815   return p;
816 }
817 
818 /* Build a GIMPLE_EH_DISPATCH statement.  */
819 
820 geh_dispatch *
gimple_build_eh_dispatch(int region)821 gimple_build_eh_dispatch (int region)
822 {
823   geh_dispatch *p
824     = as_a <geh_dispatch *> (
825 	gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
826   p->region = region;
827   return p;
828 }
829 
830 /* Build a new GIMPLE_DEBUG_BIND statement.
831 
832    VAR is bound to VALUE; block and location are taken from STMT.  */
833 
834 gdebug *
gimple_build_debug_bind(tree var,tree value,gimple * stmt MEM_STAT_DECL)835 gimple_build_debug_bind (tree var, tree value, gimple *stmt MEM_STAT_DECL)
836 {
837   gdebug *p
838     = as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
839 						   (unsigned)GIMPLE_DEBUG_BIND, 2
840 						   PASS_MEM_STAT));
841   gimple_debug_bind_set_var (p, var);
842   gimple_debug_bind_set_value (p, value);
843   if (stmt)
844     gimple_set_location (p, gimple_location (stmt));
845 
846   return p;
847 }
848 
849 
850 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
851 
852    VAR is bound to VALUE; block and location are taken from STMT.  */
853 
854 gdebug *
gimple_build_debug_source_bind(tree var,tree value,gimple * stmt MEM_STAT_DECL)855 gimple_build_debug_source_bind (tree var, tree value,
856 				     gimple *stmt MEM_STAT_DECL)
857 {
858   gdebug *p
859     = as_a <gdebug *> (
860         gimple_build_with_ops_stat (GIMPLE_DEBUG,
861 				    (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
862 				    PASS_MEM_STAT));
863 
864   gimple_debug_source_bind_set_var (p, var);
865   gimple_debug_source_bind_set_value (p, value);
866   if (stmt)
867     gimple_set_location (p, gimple_location (stmt));
868 
869   return p;
870 }
871 
872 
873 /* Build a new GIMPLE_DEBUG_BEGIN_STMT statement in BLOCK at
874    LOCATION.  */
875 
876 gdebug *
gimple_build_debug_begin_stmt(tree block,location_t location MEM_STAT_DECL)877 gimple_build_debug_begin_stmt (tree block, location_t location
878 				    MEM_STAT_DECL)
879 {
880   gdebug *p
881     = as_a <gdebug *> (
882         gimple_build_with_ops_stat (GIMPLE_DEBUG,
883 				    (unsigned)GIMPLE_DEBUG_BEGIN_STMT, 0
884 				    PASS_MEM_STAT));
885 
886   gimple_set_location (p, location);
887   gimple_set_block (p, block);
888   cfun->debug_marker_count++;
889 
890   return p;
891 }
892 
893 
894 /* Build a new GIMPLE_DEBUG_INLINE_ENTRY statement in BLOCK at
895    LOCATION.  The BLOCK links to the inlined function.  */
896 
897 gdebug *
gimple_build_debug_inline_entry(tree block,location_t location MEM_STAT_DECL)898 gimple_build_debug_inline_entry (tree block, location_t location
899 				      MEM_STAT_DECL)
900 {
901   gdebug *p
902     = as_a <gdebug *> (
903         gimple_build_with_ops_stat (GIMPLE_DEBUG,
904 				    (unsigned)GIMPLE_DEBUG_INLINE_ENTRY, 0
905 				    PASS_MEM_STAT));
906 
907   gimple_set_location (p, location);
908   gimple_set_block (p, block);
909   cfun->debug_marker_count++;
910 
911   return p;
912 }
913 
914 
915 /* Build a GIMPLE_OMP_CRITICAL statement.
916 
917    BODY is the sequence of statements for which only one thread can execute.
918    NAME is optional identifier for this critical block.
919    CLAUSES are clauses for this critical block.  */
920 
921 gomp_critical *
gimple_build_omp_critical(gimple_seq body,tree name,tree clauses)922 gimple_build_omp_critical (gimple_seq body, tree name, tree clauses)
923 {
924   gomp_critical *p
925     = as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
926   gimple_omp_critical_set_name (p, name);
927   gimple_omp_critical_set_clauses (p, clauses);
928   if (body)
929     gimple_omp_set_body (p, body);
930 
931   return p;
932 }
933 
934 /* Build a GIMPLE_OMP_FOR statement.
935 
936    BODY is sequence of statements inside the for loop.
937    KIND is the `for' variant.
938    CLAUSES are any of the construct's clauses.
939    COLLAPSE is the collapse count.
940    PRE_BODY is the sequence of statements that are loop invariant.  */
941 
942 gomp_for *
gimple_build_omp_for(gimple_seq body,int kind,tree clauses,size_t collapse,gimple_seq pre_body)943 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
944 		      gimple_seq pre_body)
945 {
946   gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
947   if (body)
948     gimple_omp_set_body (p, body);
949   gimple_omp_for_set_clauses (p, clauses);
950   gimple_omp_for_set_kind (p, kind);
951   p->collapse = collapse;
952   p->iter =  ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse);
953 
954   if (pre_body)
955     gimple_omp_for_set_pre_body (p, pre_body);
956 
957   return p;
958 }
959 
960 
961 /* Build a GIMPLE_OMP_PARALLEL statement.
962 
963    BODY is sequence of statements which are executed in parallel.
964    CLAUSES are the OMP parallel construct's clauses.
965    CHILD_FN is the function created for the parallel threads to execute.
966    DATA_ARG are the shared data argument(s).  */
967 
968 gomp_parallel *
gimple_build_omp_parallel(gimple_seq body,tree clauses,tree child_fn,tree data_arg)969 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
970 			   tree data_arg)
971 {
972   gomp_parallel *p
973     = as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
974   if (body)
975     gimple_omp_set_body (p, body);
976   gimple_omp_parallel_set_clauses (p, clauses);
977   gimple_omp_parallel_set_child_fn (p, child_fn);
978   gimple_omp_parallel_set_data_arg (p, data_arg);
979 
980   return p;
981 }
982 
983 
984 /* Build a GIMPLE_OMP_TASK statement.
985 
986    BODY is sequence of statements which are executed by the explicit task.
987    CLAUSES are the OMP task construct's clauses.
988    CHILD_FN is the function created for the parallel threads to execute.
989    DATA_ARG are the shared data argument(s).
990    COPY_FN is the optional function for firstprivate initialization.
991    ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
992 
993 gomp_task *
gimple_build_omp_task(gimple_seq body,tree clauses,tree child_fn,tree data_arg,tree copy_fn,tree arg_size,tree arg_align)994 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
995 		       tree data_arg, tree copy_fn, tree arg_size,
996 		       tree arg_align)
997 {
998   gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0));
999   if (body)
1000     gimple_omp_set_body (p, body);
1001   gimple_omp_task_set_clauses (p, clauses);
1002   gimple_omp_task_set_child_fn (p, child_fn);
1003   gimple_omp_task_set_data_arg (p, data_arg);
1004   gimple_omp_task_set_copy_fn (p, copy_fn);
1005   gimple_omp_task_set_arg_size (p, arg_size);
1006   gimple_omp_task_set_arg_align (p, arg_align);
1007 
1008   return p;
1009 }
1010 
1011 
1012 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1013 
1014    BODY is the sequence of statements in the section.  */
1015 
1016 gimple *
gimple_build_omp_section(gimple_seq body)1017 gimple_build_omp_section (gimple_seq body)
1018 {
1019   gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1020   if (body)
1021     gimple_omp_set_body (p, body);
1022 
1023   return p;
1024 }
1025 
1026 
1027 /* Build a GIMPLE_OMP_MASTER statement.
1028 
1029    BODY is the sequence of statements to be executed by just the master.  */
1030 
1031 gimple *
gimple_build_omp_master(gimple_seq body)1032 gimple_build_omp_master (gimple_seq body)
1033 {
1034   gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1035   if (body)
1036     gimple_omp_set_body (p, body);
1037 
1038   return p;
1039 }
1040 
1041 /* Build a GIMPLE_OMP_MASKED statement.
1042 
1043    BODY is the sequence of statements to be executed by the selected thread(s).  */
1044 
1045 gimple *
gimple_build_omp_masked(gimple_seq body,tree clauses)1046 gimple_build_omp_masked (gimple_seq body, tree clauses)
1047 {
1048   gimple *p = gimple_alloc (GIMPLE_OMP_MASKED, 0);
1049   gimple_omp_masked_set_clauses (p, clauses);
1050   if (body)
1051     gimple_omp_set_body (p, body);
1052 
1053   return p;
1054 }
1055 
1056 /* Build a GIMPLE_OMP_TASKGROUP statement.
1057 
1058    BODY is the sequence of statements to be executed by the taskgroup
1059    construct.
1060    CLAUSES are any of the construct's clauses.  */
1061 
1062 gimple *
gimple_build_omp_taskgroup(gimple_seq body,tree clauses)1063 gimple_build_omp_taskgroup (gimple_seq body, tree clauses)
1064 {
1065   gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
1066   gimple_omp_taskgroup_set_clauses (p, clauses);
1067   if (body)
1068     gimple_omp_set_body (p, body);
1069 
1070   return p;
1071 }
1072 
1073 
1074 /* Build a GIMPLE_OMP_CONTINUE statement.
1075 
1076    CONTROL_DEF is the definition of the control variable.
1077    CONTROL_USE is the use of the control variable.  */
1078 
1079 gomp_continue *
gimple_build_omp_continue(tree control_def,tree control_use)1080 gimple_build_omp_continue (tree control_def, tree control_use)
1081 {
1082   gomp_continue *p
1083     = as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
1084   gimple_omp_continue_set_control_def (p, control_def);
1085   gimple_omp_continue_set_control_use (p, control_use);
1086   return p;
1087 }
1088 
1089 /* Build a GIMPLE_OMP_ORDERED statement.
1090 
1091    BODY is the sequence of statements inside a loop that will executed in
1092    sequence.
1093    CLAUSES are clauses for this statement.  */
1094 
1095 gomp_ordered *
gimple_build_omp_ordered(gimple_seq body,tree clauses)1096 gimple_build_omp_ordered (gimple_seq body, tree clauses)
1097 {
1098   gomp_ordered *p
1099     = as_a <gomp_ordered *> (gimple_alloc (GIMPLE_OMP_ORDERED, 0));
1100   gimple_omp_ordered_set_clauses (p, clauses);
1101   if (body)
1102     gimple_omp_set_body (p, body);
1103 
1104   return p;
1105 }
1106 
1107 
1108 /* Build a GIMPLE_OMP_RETURN statement.
1109    WAIT_P is true if this is a non-waiting return.  */
1110 
1111 gimple *
gimple_build_omp_return(bool wait_p)1112 gimple_build_omp_return (bool wait_p)
1113 {
1114   gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1115   if (wait_p)
1116     gimple_omp_return_set_nowait (p);
1117 
1118   return p;
1119 }
1120 
1121 
1122 /* Build a GIMPLE_OMP_SCAN statement.
1123 
1124    BODY is the sequence of statements to be executed by the scan
1125    construct.
1126    CLAUSES are any of the construct's clauses.  */
1127 
1128 gomp_scan *
gimple_build_omp_scan(gimple_seq body,tree clauses)1129 gimple_build_omp_scan (gimple_seq body, tree clauses)
1130 {
1131   gomp_scan *p
1132     = as_a <gomp_scan *> (gimple_alloc (GIMPLE_OMP_SCAN, 0));
1133   gimple_omp_scan_set_clauses (p, clauses);
1134   if (body)
1135     gimple_omp_set_body (p, body);
1136 
1137   return p;
1138 }
1139 
1140 
1141 /* Build a GIMPLE_OMP_SECTIONS statement.
1142 
1143    BODY is a sequence of section statements.
1144    CLAUSES are any of the OMP sections contsruct's clauses: private,
1145    firstprivate, lastprivate, reduction, and nowait.  */
1146 
1147 gomp_sections *
gimple_build_omp_sections(gimple_seq body,tree clauses)1148 gimple_build_omp_sections (gimple_seq body, tree clauses)
1149 {
1150   gomp_sections *p
1151     = as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
1152   if (body)
1153     gimple_omp_set_body (p, body);
1154   gimple_omp_sections_set_clauses (p, clauses);
1155 
1156   return p;
1157 }
1158 
1159 
1160 /* Build a GIMPLE_OMP_SECTIONS_SWITCH.  */
1161 
1162 gimple *
gimple_build_omp_sections_switch(void)1163 gimple_build_omp_sections_switch (void)
1164 {
1165   return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1166 }
1167 
1168 
1169 /* Build a GIMPLE_OMP_SINGLE statement.
1170 
1171    BODY is the sequence of statements that will be executed once.
1172    CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1173    copyprivate, nowait.  */
1174 
1175 gomp_single *
gimple_build_omp_single(gimple_seq body,tree clauses)1176 gimple_build_omp_single (gimple_seq body, tree clauses)
1177 {
1178   gomp_single *p
1179     = as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
1180   if (body)
1181     gimple_omp_set_body (p, body);
1182   gimple_omp_single_set_clauses (p, clauses);
1183 
1184   return p;
1185 }
1186 
1187 
1188 /* Build a GIMPLE_OMP_SCOPE statement.
1189 
1190    BODY is the sequence of statements that will be executed once.
1191    CLAUSES are any of the OMP scope construct's clauses: private, reduction,
1192    nowait.  */
1193 
1194 gimple *
gimple_build_omp_scope(gimple_seq body,tree clauses)1195 gimple_build_omp_scope (gimple_seq body, tree clauses)
1196 {
1197   gimple *p = gimple_alloc (GIMPLE_OMP_SCOPE, 0);
1198   gimple_omp_scope_set_clauses (p, clauses);
1199   if (body)
1200     gimple_omp_set_body (p, body);
1201 
1202   return p;
1203 }
1204 
1205 
1206 /* Build a GIMPLE_OMP_TARGET statement.
1207 
1208    BODY is the sequence of statements that will be executed.
1209    KIND is the kind of the region.
1210    CLAUSES are any of the construct's clauses.  */
1211 
1212 gomp_target *
gimple_build_omp_target(gimple_seq body,int kind,tree clauses)1213 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1214 {
1215   gomp_target *p
1216     = as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
1217   if (body)
1218     gimple_omp_set_body (p, body);
1219   gimple_omp_target_set_clauses (p, clauses);
1220   gimple_omp_target_set_kind (p, kind);
1221 
1222   return p;
1223 }
1224 
1225 
1226 /* Build a GIMPLE_OMP_TEAMS statement.
1227 
1228    BODY is the sequence of statements that will be executed.
1229    CLAUSES are any of the OMP teams construct's clauses.  */
1230 
1231 gomp_teams *
gimple_build_omp_teams(gimple_seq body,tree clauses)1232 gimple_build_omp_teams (gimple_seq body, tree clauses)
1233 {
1234   gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
1235   if (body)
1236     gimple_omp_set_body (p, body);
1237   gimple_omp_teams_set_clauses (p, clauses);
1238 
1239   return p;
1240 }
1241 
1242 
1243 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
1244 
1245 gomp_atomic_load *
gimple_build_omp_atomic_load(tree lhs,tree rhs,enum omp_memory_order mo)1246 gimple_build_omp_atomic_load (tree lhs, tree rhs, enum omp_memory_order mo)
1247 {
1248   gomp_atomic_load *p
1249     = as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
1250   gimple_omp_atomic_load_set_lhs (p, lhs);
1251   gimple_omp_atomic_load_set_rhs (p, rhs);
1252   gimple_omp_atomic_set_memory_order (p, mo);
1253   return p;
1254 }
1255 
1256 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1257 
1258    VAL is the value we are storing.  */
1259 
1260 gomp_atomic_store *
gimple_build_omp_atomic_store(tree val,enum omp_memory_order mo)1261 gimple_build_omp_atomic_store (tree val, enum omp_memory_order mo)
1262 {
1263   gomp_atomic_store *p
1264     = as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
1265   gimple_omp_atomic_store_set_val (p, val);
1266   gimple_omp_atomic_set_memory_order (p, mo);
1267   return p;
1268 }
1269 
1270 /* Build a GIMPLE_TRANSACTION statement.  */
1271 
1272 gtransaction *
gimple_build_transaction(gimple_seq body)1273 gimple_build_transaction (gimple_seq body)
1274 {
1275   gtransaction *p
1276     = as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
1277   gimple_transaction_set_body (p, body);
1278   gimple_transaction_set_label_norm (p, 0);
1279   gimple_transaction_set_label_uninst (p, 0);
1280   gimple_transaction_set_label_over (p, 0);
1281   return p;
1282 }
1283 
1284 #if defined ENABLE_GIMPLE_CHECKING
1285 /* Complain of a gimple type mismatch and die.  */
1286 
1287 void
gimple_check_failed(const gimple * gs,const char * file,int line,const char * function,enum gimple_code code,enum tree_code subcode)1288 gimple_check_failed (const gimple *gs, const char *file, int line,
1289 		     const char *function, enum gimple_code code,
1290 		     enum tree_code subcode)
1291 {
1292   internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1293       		  gimple_code_name[code],
1294 		  get_tree_code_name (subcode),
1295 		  gimple_code_name[gimple_code (gs)],
1296 		  gs->subcode > 0
1297 		    ? get_tree_code_name ((enum tree_code) gs->subcode)
1298 		    : "",
1299 		  function, trim_filename (file), line);
1300 }
1301 #endif /* ENABLE_GIMPLE_CHECKING */
1302 
1303 
1304 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1305    *SEQ_P is NULL, a new sequence is allocated.  */
1306 
1307 void
gimple_seq_add_stmt(gimple_seq * seq_p,gimple * gs)1308 gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
1309 {
1310   gimple_stmt_iterator si;
1311   if (gs == NULL)
1312     return;
1313 
1314   si = gsi_last (*seq_p);
1315   gsi_insert_after (&si, gs, GSI_NEW_STMT);
1316 }
1317 
1318 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1319    *SEQ_P is NULL, a new sequence is allocated.  This function is
1320    similar to gimple_seq_add_stmt, but does not scan the operands.
1321    During gimplification, we need to manipulate statement sequences
1322    before the def/use vectors have been constructed.  */
1323 
1324 void
gimple_seq_add_stmt_without_update(gimple_seq * seq_p,gimple * gs)1325 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs)
1326 {
1327   gimple_stmt_iterator si;
1328 
1329   if (gs == NULL)
1330     return;
1331 
1332   si = gsi_last (*seq_p);
1333   gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1334 }
1335 
1336 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1337    NULL, a new sequence is allocated.  */
1338 
1339 void
gimple_seq_add_seq(gimple_seq * dst_p,gimple_seq src)1340 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1341 {
1342   gimple_stmt_iterator si;
1343   if (src == NULL)
1344     return;
1345 
1346   si = gsi_last (*dst_p);
1347   gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1348 }
1349 
1350 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1351    NULL, a new sequence is allocated.  This function is
1352    similar to gimple_seq_add_seq, but does not scan the operands.  */
1353 
1354 void
gimple_seq_add_seq_without_update(gimple_seq * dst_p,gimple_seq src)1355 gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
1356 {
1357   gimple_stmt_iterator si;
1358   if (src == NULL)
1359     return;
1360 
1361   si = gsi_last (*dst_p);
1362   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
1363 }
1364 
1365 /* Determine whether to assign a location to the statement GS.  */
1366 
1367 static bool
should_carry_location_p(gimple * gs)1368 should_carry_location_p (gimple *gs)
1369 {
1370   /* Don't emit a line note for a label.  We particularly don't want to
1371      emit one for the break label, since it doesn't actually correspond
1372      to the beginning of the loop/switch.  */
1373   if (gimple_code (gs) == GIMPLE_LABEL)
1374     return false;
1375 
1376   return true;
1377 }
1378 
1379 /* Set the location for gimple statement GS to LOCATION.  */
1380 
1381 static void
annotate_one_with_location(gimple * gs,location_t location)1382 annotate_one_with_location (gimple *gs, location_t location)
1383 {
1384   if (!gimple_has_location (gs)
1385       && !gimple_do_not_emit_location_p (gs)
1386       && should_carry_location_p (gs))
1387     gimple_set_location (gs, location);
1388 }
1389 
1390 /* Set LOCATION for all the statements after iterator GSI in sequence
1391    SEQ.  If GSI is pointing to the end of the sequence, start with the
1392    first statement in SEQ.  */
1393 
1394 void
annotate_all_with_location_after(gimple_seq seq,gimple_stmt_iterator gsi,location_t location)1395 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1396 				  location_t location)
1397 {
1398   if (gsi_end_p (gsi))
1399     gsi = gsi_start (seq);
1400   else
1401     gsi_next (&gsi);
1402 
1403   for (; !gsi_end_p (gsi); gsi_next (&gsi))
1404     annotate_one_with_location (gsi_stmt (gsi), location);
1405 }
1406 
1407 /* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
1408 
1409 void
annotate_all_with_location(gimple_seq stmt_p,location_t location)1410 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1411 {
1412   gimple_stmt_iterator i;
1413 
1414   if (gimple_seq_empty_p (stmt_p))
1415     return;
1416 
1417   for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1418     {
1419       gimple *gs = gsi_stmt (i);
1420       annotate_one_with_location (gs, location);
1421     }
1422 }
1423 
1424 /* Helper function of empty_body_p.  Return true if STMT is an empty
1425    statement.  */
1426 
1427 static bool
empty_stmt_p(gimple * stmt)1428 empty_stmt_p (gimple *stmt)
1429 {
1430   if (gimple_code (stmt) == GIMPLE_NOP)
1431     return true;
1432   if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
1433     return empty_body_p (gimple_bind_body (bind_stmt));
1434   return false;
1435 }
1436 
1437 
1438 /* Return true if BODY contains nothing but empty statements.  */
1439 
1440 bool
empty_body_p(gimple_seq body)1441 empty_body_p (gimple_seq body)
1442 {
1443   gimple_stmt_iterator i;
1444 
1445   if (gimple_seq_empty_p (body))
1446     return true;
1447   for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1448     if (!empty_stmt_p (gsi_stmt (i))
1449 	&& !is_gimple_debug (gsi_stmt (i)))
1450       return false;
1451 
1452   return true;
1453 }
1454 
1455 
1456 /* Perform a deep copy of sequence SRC and return the result.  */
1457 
1458 gimple_seq
gimple_seq_copy(gimple_seq src)1459 gimple_seq_copy (gimple_seq src)
1460 {
1461   gimple_stmt_iterator gsi;
1462   gimple_seq new_seq = NULL;
1463   gimple *stmt;
1464 
1465   for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1466     {
1467       stmt = gimple_copy (gsi_stmt (gsi));
1468       gimple_seq_add_stmt (&new_seq, stmt);
1469     }
1470 
1471   return new_seq;
1472 }
1473 
1474 
1475 
1476 /* Return true if calls C1 and C2 are known to go to the same function.  */
1477 
1478 bool
gimple_call_same_target_p(const gimple * c1,const gimple * c2)1479 gimple_call_same_target_p (const gimple *c1, const gimple *c2)
1480 {
1481   if (gimple_call_internal_p (c1))
1482     return (gimple_call_internal_p (c2)
1483 	    && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
1484 	    && (!gimple_call_internal_unique_p (as_a <const gcall *> (c1))
1485 		|| c1 == c2));
1486   else
1487     return (gimple_call_fn (c1) == gimple_call_fn (c2)
1488 	    || (gimple_call_fndecl (c1)
1489 		&& gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1490 }
1491 
1492 /* Detect flags from a GIMPLE_CALL.  This is just like
1493    call_expr_flags, but for gimple tuples.  */
1494 
1495 int
gimple_call_flags(const gimple * stmt)1496 gimple_call_flags (const gimple *stmt)
1497 {
1498   int flags = 0;
1499 
1500   if (gimple_call_internal_p (stmt))
1501     flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1502   else
1503     {
1504       tree decl = gimple_call_fndecl (stmt);
1505       if (decl)
1506 	flags = flags_from_decl_or_type (decl);
1507       flags |= flags_from_decl_or_type (gimple_call_fntype (stmt));
1508     }
1509 
1510   if (stmt->subcode & GF_CALL_NOTHROW)
1511     flags |= ECF_NOTHROW;
1512 
1513   if (stmt->subcode & GF_CALL_BY_DESCRIPTOR)
1514     flags |= ECF_BY_DESCRIPTOR;
1515 
1516   return flags;
1517 }
1518 
1519 /* Return the "fn spec" string for call STMT.  */
1520 
1521 attr_fnspec
gimple_call_fnspec(const gcall * stmt)1522 gimple_call_fnspec (const gcall *stmt)
1523 {
1524   tree type, attr;
1525 
1526   if (gimple_call_internal_p (stmt))
1527     {
1528       const_tree spec = internal_fn_fnspec (gimple_call_internal_fn (stmt));
1529       if (spec)
1530 	return spec;
1531       else
1532 	return "";
1533     }
1534 
1535   type = gimple_call_fntype (stmt);
1536   if (type)
1537     {
1538       attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1539       if (attr)
1540 	return TREE_VALUE (TREE_VALUE (attr));
1541     }
1542   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1543     return builtin_fnspec (gimple_call_fndecl (stmt));
1544   tree fndecl = gimple_call_fndecl (stmt);
1545   /* If the call is to a replaceable operator delete and results
1546      from a delete expression as opposed to a direct call to
1547      such operator, then we can treat it as free.  */
1548   if (fndecl
1549       && DECL_IS_OPERATOR_DELETE_P (fndecl)
1550       && DECL_IS_REPLACEABLE_OPERATOR (fndecl)
1551       && gimple_call_from_new_or_delete (stmt))
1552     return ". o ";
1553   /* Similarly operator new can be treated as malloc.  */
1554   if (fndecl
1555       && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fndecl)
1556       && gimple_call_from_new_or_delete (stmt))
1557     return "m ";
1558   return "";
1559 }
1560 
1561 /* Detects argument flags for argument number ARG on call STMT.  */
1562 
1563 int
gimple_call_arg_flags(const gcall * stmt,unsigned arg)1564 gimple_call_arg_flags (const gcall *stmt, unsigned arg)
1565 {
1566   attr_fnspec fnspec = gimple_call_fnspec (stmt);
1567   int flags = 0;
1568 
1569   if (fnspec.known_p ())
1570     flags = fnspec.arg_eaf_flags (arg);
1571   tree callee = gimple_call_fndecl (stmt);
1572   if (callee)
1573     {
1574       cgraph_node *node = cgraph_node::get (callee);
1575       modref_summary *summary = node ? get_modref_function_summary (node)
1576 				: NULL;
1577 
1578       if (summary && summary->arg_flags.length () > arg)
1579 	{
1580 	  int modref_flags = summary->arg_flags[arg];
1581 
1582 	  /* We have possibly optimized out load.  Be conservative here.  */
1583 	  if (!node->binds_to_current_def_p ())
1584 	    modref_flags = interposable_eaf_flags (modref_flags, flags);
1585 	  if (dbg_cnt (ipa_mod_ref_pta))
1586 	    flags |= modref_flags;
1587 	}
1588     }
1589   return flags;
1590 }
1591 
1592 /* Detects argument flags for return slot on call STMT.  */
1593 
1594 int
gimple_call_retslot_flags(const gcall * stmt)1595 gimple_call_retslot_flags (const gcall *stmt)
1596 {
1597   int flags = implicit_retslot_eaf_flags;
1598 
1599   tree callee = gimple_call_fndecl (stmt);
1600   if (callee)
1601     {
1602       cgraph_node *node = cgraph_node::get (callee);
1603       modref_summary *summary = node ? get_modref_function_summary (node)
1604 				: NULL;
1605 
1606       if (summary)
1607 	{
1608 	  int modref_flags = summary->retslot_flags;
1609 
1610 	  /* We have possibly optimized out load.  Be conservative here.  */
1611 	  if (!node->binds_to_current_def_p ())
1612 	    modref_flags = interposable_eaf_flags (modref_flags, flags);
1613 	  if (dbg_cnt (ipa_mod_ref_pta))
1614 	    flags |= modref_flags;
1615 	}
1616     }
1617   return flags;
1618 }
1619 
1620 /* Detects argument flags for static chain on call STMT.  */
1621 
1622 int
gimple_call_static_chain_flags(const gcall * stmt)1623 gimple_call_static_chain_flags (const gcall *stmt)
1624 {
1625   int flags = 0;
1626 
1627   tree callee = gimple_call_fndecl (stmt);
1628   if (callee)
1629     {
1630       cgraph_node *node = cgraph_node::get (callee);
1631       modref_summary *summary = node ? get_modref_function_summary (node)
1632 				: NULL;
1633 
1634       /* Nested functions should always bind to current def since
1635 	 there is no public ABI for them.  */
1636       gcc_checking_assert (node->binds_to_current_def_p ());
1637       if (summary)
1638 	{
1639 	  int modref_flags = summary->static_chain_flags;
1640 
1641 	  if (dbg_cnt (ipa_mod_ref_pta))
1642 	    flags |= modref_flags;
1643 	}
1644     }
1645   return flags;
1646 }
1647 
1648 /* Detects return flags for the call STMT.  */
1649 
1650 int
gimple_call_return_flags(const gcall * stmt)1651 gimple_call_return_flags (const gcall *stmt)
1652 {
1653   if (gimple_call_flags (stmt) & ECF_MALLOC)
1654     return ERF_NOALIAS;
1655 
1656   attr_fnspec fnspec = gimple_call_fnspec (stmt);
1657 
1658   unsigned int arg_no;
1659   if (fnspec.returns_arg (&arg_no))
1660     return ERF_RETURNS_ARG | arg_no;
1661 
1662   if (fnspec.returns_noalias_p ())
1663     return ERF_NOALIAS;
1664   return 0;
1665 }
1666 
1667 
1668 /* Return true if call STMT is known to return a non-zero result.  */
1669 
1670 bool
gimple_call_nonnull_result_p(gcall * call)1671 gimple_call_nonnull_result_p (gcall *call)
1672 {
1673   tree fndecl = gimple_call_fndecl (call);
1674   if (!fndecl)
1675     return false;
1676   if (flag_delete_null_pointer_checks && !flag_check_new
1677       && DECL_IS_OPERATOR_NEW_P (fndecl)
1678       && !TREE_NOTHROW (fndecl))
1679     return true;
1680 
1681   /* References are always non-NULL.  */
1682   if (flag_delete_null_pointer_checks
1683       && TREE_CODE (TREE_TYPE (fndecl)) == REFERENCE_TYPE)
1684     return true;
1685 
1686   if (flag_delete_null_pointer_checks
1687       && lookup_attribute ("returns_nonnull",
1688 			   TYPE_ATTRIBUTES (gimple_call_fntype (call))))
1689     return true;
1690   return gimple_alloca_call_p (call);
1691 }
1692 
1693 
1694 /* If CALL returns a non-null result in an argument, return that arg.  */
1695 
1696 tree
gimple_call_nonnull_arg(gcall * call)1697 gimple_call_nonnull_arg (gcall *call)
1698 {
1699   tree fndecl = gimple_call_fndecl (call);
1700   if (!fndecl)
1701     return NULL_TREE;
1702 
1703   unsigned rf = gimple_call_return_flags (call);
1704   if (rf & ERF_RETURNS_ARG)
1705     {
1706       unsigned argnum = rf & ERF_RETURN_ARG_MASK;
1707       if (argnum < gimple_call_num_args (call))
1708 	{
1709 	  tree arg = gimple_call_arg (call, argnum);
1710 	  if (SSA_VAR_P (arg)
1711 	      && infer_nonnull_range_by_attribute (call, arg))
1712 	    return arg;
1713 	}
1714     }
1715   return NULL_TREE;
1716 }
1717 
1718 
1719 /* Return true if GS is a copy assignment.  */
1720 
1721 bool
gimple_assign_copy_p(gimple * gs)1722 gimple_assign_copy_p (gimple *gs)
1723 {
1724   return (gimple_assign_single_p (gs)
1725 	  && is_gimple_val (gimple_op (gs, 1)));
1726 }
1727 
1728 
1729 /* Return true if GS is a SSA_NAME copy assignment.  */
1730 
1731 bool
gimple_assign_ssa_name_copy_p(gimple * gs)1732 gimple_assign_ssa_name_copy_p (gimple *gs)
1733 {
1734   return (gimple_assign_single_p (gs)
1735 	  && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1736 	  && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1737 }
1738 
1739 
1740 /* Return true if GS is an assignment with a unary RHS, but the
1741    operator has no effect on the assigned value.  The logic is adapted
1742    from STRIP_NOPS.  This predicate is intended to be used in tuplifying
1743    instances in which STRIP_NOPS was previously applied to the RHS of
1744    an assignment.
1745 
1746    NOTE: In the use cases that led to the creation of this function
1747    and of gimple_assign_single_p, it is typical to test for either
1748    condition and to proceed in the same manner.  In each case, the
1749    assigned value is represented by the single RHS operand of the
1750    assignment.  I suspect there may be cases where gimple_assign_copy_p,
1751    gimple_assign_single_p, or equivalent logic is used where a similar
1752    treatment of unary NOPs is appropriate.  */
1753 
1754 bool
gimple_assign_unary_nop_p(gimple * gs)1755 gimple_assign_unary_nop_p (gimple *gs)
1756 {
1757   return (is_gimple_assign (gs)
1758           && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1759               || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1760           && gimple_assign_rhs1 (gs) != error_mark_node
1761           && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1762               == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1763 }
1764 
1765 /* Set BB to be the basic block holding G.  */
1766 
1767 void
gimple_set_bb(gimple * stmt,basic_block bb)1768 gimple_set_bb (gimple *stmt, basic_block bb)
1769 {
1770   stmt->bb = bb;
1771 
1772   if (gimple_code (stmt) != GIMPLE_LABEL)
1773     return;
1774 
1775   /* If the statement is a label, add the label to block-to-labels map
1776      so that we can speed up edge creation for GIMPLE_GOTOs.  */
1777   if (cfun->cfg)
1778     {
1779       tree t;
1780       int uid;
1781 
1782       t = gimple_label_label (as_a <glabel *> (stmt));
1783       uid = LABEL_DECL_UID (t);
1784       if (uid == -1)
1785 	{
1786 	  unsigned old_len =
1787 	    vec_safe_length (label_to_block_map_for_fn (cfun));
1788 	  LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1789 	  if (old_len <= (unsigned) uid)
1790 	    vec_safe_grow_cleared (label_to_block_map_for_fn (cfun), uid + 1);
1791 	}
1792 
1793       (*label_to_block_map_for_fn (cfun))[uid] = bb;
1794     }
1795 }
1796 
1797 
1798 /* Modify the RHS of the assignment pointed-to by GSI using the
1799    operands in the expression tree EXPR.
1800 
1801    NOTE: The statement pointed-to by GSI may be reallocated if it
1802    did not have enough operand slots.
1803 
1804    This function is useful to convert an existing tree expression into
1805    the flat representation used for the RHS of a GIMPLE assignment.
1806    It will reallocate memory as needed to expand or shrink the number
1807    of operand slots needed to represent EXPR.
1808 
1809    NOTE: If you find yourself building a tree and then calling this
1810    function, you are most certainly doing it the slow way.  It is much
1811    better to build a new assignment or to use the function
1812    gimple_assign_set_rhs_with_ops, which does not require an
1813    expression tree to be built.  */
1814 
1815 void
gimple_assign_set_rhs_from_tree(gimple_stmt_iterator * gsi,tree expr)1816 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1817 {
1818   enum tree_code subcode;
1819   tree op1, op2, op3;
1820 
1821   extract_ops_from_tree (expr, &subcode, &op1, &op2, &op3);
1822   gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2, op3);
1823 }
1824 
1825 
1826 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1827    operands OP1, OP2 and OP3.
1828 
1829    NOTE: The statement pointed-to by GSI may be reallocated if it
1830    did not have enough operand slots.  */
1831 
1832 void
gimple_assign_set_rhs_with_ops(gimple_stmt_iterator * gsi,enum tree_code code,tree op1,tree op2,tree op3)1833 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1834 				tree op1, tree op2, tree op3)
1835 {
1836   unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1837   gimple *stmt = gsi_stmt (*gsi);
1838   gimple *old_stmt = stmt;
1839 
1840   /* If the new CODE needs more operands, allocate a new statement.  */
1841   if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1842     {
1843       tree lhs = gimple_assign_lhs (old_stmt);
1844       stmt = gimple_alloc (gimple_code (old_stmt), new_rhs_ops + 1);
1845       memcpy (stmt, old_stmt, gimple_size (gimple_code (old_stmt)));
1846       gimple_init_singleton (stmt);
1847 
1848       /* The LHS needs to be reset as this also changes the SSA name
1849 	 on the LHS.  */
1850       gimple_assign_set_lhs (stmt, lhs);
1851     }
1852 
1853   gimple_set_num_ops (stmt, new_rhs_ops + 1);
1854   gimple_set_subcode (stmt, code);
1855   gimple_assign_set_rhs1 (stmt, op1);
1856   if (new_rhs_ops > 1)
1857     gimple_assign_set_rhs2 (stmt, op2);
1858   if (new_rhs_ops > 2)
1859     gimple_assign_set_rhs3 (stmt, op3);
1860   if (stmt != old_stmt)
1861     gsi_replace (gsi, stmt, false);
1862 }
1863 
1864 
1865 /* Return the LHS of a statement that performs an assignment,
1866    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  Returns NULL_TREE
1867    for a call to a function that returns no value, or for a
1868    statement other than an assignment or a call.  */
1869 
1870 tree
gimple_get_lhs(const gimple * stmt)1871 gimple_get_lhs (const gimple *stmt)
1872 {
1873   enum gimple_code code = gimple_code (stmt);
1874 
1875   if (code == GIMPLE_ASSIGN)
1876     return gimple_assign_lhs (stmt);
1877   else if (code == GIMPLE_CALL)
1878     return gimple_call_lhs (stmt);
1879   else if (code == GIMPLE_PHI)
1880     return gimple_phi_result (stmt);
1881   else
1882     return NULL_TREE;
1883 }
1884 
1885 
1886 /* Set the LHS of a statement that performs an assignment,
1887    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
1888 
1889 void
gimple_set_lhs(gimple * stmt,tree lhs)1890 gimple_set_lhs (gimple *stmt, tree lhs)
1891 {
1892   enum gimple_code code = gimple_code (stmt);
1893 
1894   if (code == GIMPLE_ASSIGN)
1895     gimple_assign_set_lhs (stmt, lhs);
1896   else if (code == GIMPLE_CALL)
1897     gimple_call_set_lhs (stmt, lhs);
1898   else
1899     gcc_unreachable ();
1900 }
1901 
1902 
1903 /* Return a deep copy of statement STMT.  All the operands from STMT
1904    are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
1905    and VUSE operand arrays are set to empty in the new copy.  The new
1906    copy isn't part of any sequence.  */
1907 
1908 gimple *
gimple_copy(gimple * stmt)1909 gimple_copy (gimple *stmt)
1910 {
1911   enum gimple_code code = gimple_code (stmt);
1912   unsigned num_ops = gimple_num_ops (stmt);
1913   gimple *copy = gimple_alloc (code, num_ops);
1914   unsigned i;
1915 
1916   /* Shallow copy all the fields from STMT.  */
1917   memcpy (copy, stmt, gimple_size (code));
1918   gimple_init_singleton (copy);
1919 
1920   /* If STMT has sub-statements, deep-copy them as well.  */
1921   if (gimple_has_substatements (stmt))
1922     {
1923       gimple_seq new_seq;
1924       tree t;
1925 
1926       switch (gimple_code (stmt))
1927 	{
1928 	case GIMPLE_BIND:
1929 	  {
1930 	    gbind *bind_stmt = as_a <gbind *> (stmt);
1931 	    gbind *bind_copy = as_a <gbind *> (copy);
1932 	    new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
1933 	    gimple_bind_set_body (bind_copy, new_seq);
1934 	    gimple_bind_set_vars (bind_copy,
1935 				  unshare_expr (gimple_bind_vars (bind_stmt)));
1936 	    gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
1937 	  }
1938 	  break;
1939 
1940 	case GIMPLE_CATCH:
1941 	  {
1942 	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1943 	    gcatch *catch_copy = as_a <gcatch *> (copy);
1944 	    new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
1945 	    gimple_catch_set_handler (catch_copy, new_seq);
1946 	    t = unshare_expr (gimple_catch_types (catch_stmt));
1947 	    gimple_catch_set_types (catch_copy, t);
1948 	  }
1949 	  break;
1950 
1951 	case GIMPLE_EH_FILTER:
1952 	  {
1953 	    geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt);
1954 	    geh_filter *eh_filter_copy = as_a <geh_filter *> (copy);
1955 	    new_seq
1956 	      = gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
1957 	    gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
1958 	    t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
1959 	    gimple_eh_filter_set_types (eh_filter_copy, t);
1960 	  }
1961 	  break;
1962 
1963 	case GIMPLE_EH_ELSE:
1964 	  {
1965 	    geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
1966 	    geh_else *eh_else_copy = as_a <geh_else *> (copy);
1967 	    new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
1968 	    gimple_eh_else_set_n_body (eh_else_copy, new_seq);
1969 	    new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
1970 	    gimple_eh_else_set_e_body (eh_else_copy, new_seq);
1971 	  }
1972 	  break;
1973 
1974 	case GIMPLE_TRY:
1975 	  {
1976 	    gtry *try_stmt = as_a <gtry *> (stmt);
1977 	    gtry *try_copy = as_a <gtry *> (copy);
1978 	    new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
1979 	    gimple_try_set_eval (try_copy, new_seq);
1980 	    new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
1981 	    gimple_try_set_cleanup (try_copy, new_seq);
1982 	  }
1983 	  break;
1984 
1985 	case GIMPLE_OMP_FOR:
1986 	  new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1987 	  gimple_omp_for_set_pre_body (copy, new_seq);
1988 	  t = unshare_expr (gimple_omp_for_clauses (stmt));
1989 	  gimple_omp_for_set_clauses (copy, t);
1990 	  {
1991 	    gomp_for *omp_for_copy = as_a <gomp_for *> (copy);
1992 	    omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
1993 	      ( gimple_omp_for_collapse (stmt));
1994           }
1995 	  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1996 	    {
1997 	      gimple_omp_for_set_cond (copy, i,
1998 				       gimple_omp_for_cond (stmt, i));
1999 	      gimple_omp_for_set_index (copy, i,
2000 					gimple_omp_for_index (stmt, i));
2001 	      t = unshare_expr (gimple_omp_for_initial (stmt, i));
2002 	      gimple_omp_for_set_initial (copy, i, t);
2003 	      t = unshare_expr (gimple_omp_for_final (stmt, i));
2004 	      gimple_omp_for_set_final (copy, i, t);
2005 	      t = unshare_expr (gimple_omp_for_incr (stmt, i));
2006 	      gimple_omp_for_set_incr (copy, i, t);
2007 	    }
2008 	  goto copy_omp_body;
2009 
2010 	case GIMPLE_OMP_PARALLEL:
2011 	  {
2012 	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
2013 	    gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy);
2014 	    t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
2015 	    gimple_omp_parallel_set_clauses (omp_par_copy, t);
2016 	    t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
2017 	    gimple_omp_parallel_set_child_fn (omp_par_copy, t);
2018 	    t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
2019 	    gimple_omp_parallel_set_data_arg (omp_par_copy, t);
2020 	  }
2021 	  goto copy_omp_body;
2022 
2023 	case GIMPLE_OMP_TASK:
2024 	  t = unshare_expr (gimple_omp_task_clauses (stmt));
2025 	  gimple_omp_task_set_clauses (copy, t);
2026 	  t = unshare_expr (gimple_omp_task_child_fn (stmt));
2027 	  gimple_omp_task_set_child_fn (copy, t);
2028 	  t = unshare_expr (gimple_omp_task_data_arg (stmt));
2029 	  gimple_omp_task_set_data_arg (copy, t);
2030 	  t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2031 	  gimple_omp_task_set_copy_fn (copy, t);
2032 	  t = unshare_expr (gimple_omp_task_arg_size (stmt));
2033 	  gimple_omp_task_set_arg_size (copy, t);
2034 	  t = unshare_expr (gimple_omp_task_arg_align (stmt));
2035 	  gimple_omp_task_set_arg_align (copy, t);
2036 	  goto copy_omp_body;
2037 
2038 	case GIMPLE_OMP_CRITICAL:
2039 	  t = unshare_expr (gimple_omp_critical_name
2040 				(as_a <gomp_critical *> (stmt)));
2041 	  gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
2042 	  t = unshare_expr (gimple_omp_critical_clauses
2043 				(as_a <gomp_critical *> (stmt)));
2044 	  gimple_omp_critical_set_clauses (as_a <gomp_critical *> (copy), t);
2045 	  goto copy_omp_body;
2046 
2047 	case GIMPLE_OMP_ORDERED:
2048 	  t = unshare_expr (gimple_omp_ordered_clauses
2049 				(as_a <gomp_ordered *> (stmt)));
2050 	  gimple_omp_ordered_set_clauses (as_a <gomp_ordered *> (copy), t);
2051 	  goto copy_omp_body;
2052 
2053 	case GIMPLE_OMP_SCAN:
2054 	  t = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt));
2055 	  t = unshare_expr (t);
2056 	  gimple_omp_scan_set_clauses (as_a <gomp_scan *> (copy), t);
2057 	  goto copy_omp_body;
2058 
2059 	case GIMPLE_OMP_TASKGROUP:
2060 	  t = unshare_expr (gimple_omp_taskgroup_clauses (stmt));
2061 	  gimple_omp_taskgroup_set_clauses (copy, t);
2062 	  goto copy_omp_body;
2063 
2064 	case GIMPLE_OMP_SECTIONS:
2065 	  t = unshare_expr (gimple_omp_sections_clauses (stmt));
2066 	  gimple_omp_sections_set_clauses (copy, t);
2067 	  t = unshare_expr (gimple_omp_sections_control (stmt));
2068 	  gimple_omp_sections_set_control (copy, t);
2069 	  goto copy_omp_body;
2070 
2071 	case GIMPLE_OMP_SINGLE:
2072 	  {
2073 	    gomp_single *omp_single_copy = as_a <gomp_single *> (copy);
2074 	    t = unshare_expr (gimple_omp_single_clauses (stmt));
2075 	    gimple_omp_single_set_clauses (omp_single_copy, t);
2076 	  }
2077 	  goto copy_omp_body;
2078 
2079 	case GIMPLE_OMP_SCOPE:
2080 	  t = unshare_expr (gimple_omp_scope_clauses (stmt));
2081 	  gimple_omp_scope_set_clauses (copy, t);
2082 	  goto copy_omp_body;
2083 
2084 	case GIMPLE_OMP_TARGET:
2085 	  {
2086 	    gomp_target *omp_target_stmt = as_a <gomp_target *> (stmt);
2087 	    gomp_target *omp_target_copy = as_a <gomp_target *> (copy);
2088 	    t = unshare_expr (gimple_omp_target_clauses (omp_target_stmt));
2089 	    gimple_omp_target_set_clauses (omp_target_copy, t);
2090 	    t = unshare_expr (gimple_omp_target_data_arg (omp_target_stmt));
2091 	    gimple_omp_target_set_data_arg (omp_target_copy, t);
2092 	  }
2093 	  goto copy_omp_body;
2094 
2095 	case GIMPLE_OMP_TEAMS:
2096 	  {
2097 	    gomp_teams *omp_teams_copy = as_a <gomp_teams *> (copy);
2098 	    t = unshare_expr (gimple_omp_teams_clauses (stmt));
2099 	    gimple_omp_teams_set_clauses (omp_teams_copy, t);
2100 	  }
2101 	  /* FALLTHRU  */
2102 
2103 	case GIMPLE_OMP_SECTION:
2104 	case GIMPLE_OMP_MASTER:
2105 	copy_omp_body:
2106 	  new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2107 	  gimple_omp_set_body (copy, new_seq);
2108 	  break;
2109 
2110 	case GIMPLE_OMP_MASKED:
2111 	  t = unshare_expr (gimple_omp_masked_clauses (stmt));
2112 	  gimple_omp_masked_set_clauses (copy, t);
2113 	  goto copy_omp_body;
2114 
2115 	case GIMPLE_TRANSACTION:
2116 	  new_seq = gimple_seq_copy (gimple_transaction_body (
2117 				       as_a <gtransaction *> (stmt)));
2118 	  gimple_transaction_set_body (as_a <gtransaction *> (copy),
2119 				       new_seq);
2120 	  break;
2121 
2122 	case GIMPLE_WITH_CLEANUP_EXPR:
2123 	  new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2124 	  gimple_wce_set_cleanup (copy, new_seq);
2125 	  break;
2126 
2127 	default:
2128 	  gcc_unreachable ();
2129 	}
2130     }
2131 
2132   /* Make copy of operands.  */
2133   for (i = 0; i < num_ops; i++)
2134     gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2135 
2136   if (gimple_has_mem_ops (stmt))
2137     {
2138       gimple_set_vdef (copy, gimple_vdef (stmt));
2139       gimple_set_vuse (copy, gimple_vuse (stmt));
2140     }
2141 
2142   /* Clear out SSA operand vectors on COPY.  */
2143   if (gimple_has_ops (stmt))
2144     {
2145       gimple_set_use_ops (copy, NULL);
2146 
2147       /* SSA operands need to be updated.  */
2148       gimple_set_modified (copy, true);
2149     }
2150 
2151   if (gimple_debug_nonbind_marker_p (stmt))
2152     cfun->debug_marker_count++;
2153 
2154   return copy;
2155 }
2156 
2157 /* Move OLD_STMT's vuse and vdef operands to NEW_STMT, on the assumption
2158    that OLD_STMT is about to be removed.  */
2159 
2160 void
gimple_move_vops(gimple * new_stmt,gimple * old_stmt)2161 gimple_move_vops (gimple *new_stmt, gimple *old_stmt)
2162 {
2163   tree vdef = gimple_vdef (old_stmt);
2164   gimple_set_vuse (new_stmt, gimple_vuse (old_stmt));
2165   gimple_set_vdef (new_stmt, vdef);
2166   if (vdef && TREE_CODE (vdef) == SSA_NAME)
2167     SSA_NAME_DEF_STMT (vdef) = new_stmt;
2168 }
2169 
2170 /* Return true if statement S has side-effects.  We consider a
2171    statement to have side effects if:
2172 
2173    - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2174    - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS.  */
2175 
2176 bool
gimple_has_side_effects(const gimple * s)2177 gimple_has_side_effects (const gimple *s)
2178 {
2179   if (is_gimple_debug (s))
2180     return false;
2181 
2182   /* We don't have to scan the arguments to check for
2183      volatile arguments, though, at present, we still
2184      do a scan to check for TREE_SIDE_EFFECTS.  */
2185   if (gimple_has_volatile_ops (s))
2186     return true;
2187 
2188   if (gimple_code (s) == GIMPLE_ASM
2189       && gimple_asm_volatile_p (as_a <const gasm *> (s)))
2190     return true;
2191 
2192   if (is_gimple_call (s))
2193     {
2194       int flags = gimple_call_flags (s);
2195 
2196       /* An infinite loop is considered a side effect.  */
2197       if (!(flags & (ECF_CONST | ECF_PURE))
2198 	  || (flags & ECF_LOOPING_CONST_OR_PURE))
2199 	return true;
2200 
2201       return false;
2202     }
2203 
2204   return false;
2205 }
2206 
2207 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2208    Return true if S can trap.  When INCLUDE_MEM is true, check whether
2209    the memory operations could trap.  When INCLUDE_STORES is true and
2210    S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked.  */
2211 
2212 bool
gimple_could_trap_p_1(const gimple * s,bool include_mem,bool include_stores)2213 gimple_could_trap_p_1 (const gimple *s, bool include_mem, bool include_stores)
2214 {
2215   tree t, div = NULL_TREE;
2216   enum tree_code op;
2217 
2218   if (include_mem)
2219     {
2220       unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2221 
2222       for (i = start; i < gimple_num_ops (s); i++)
2223 	if (tree_could_trap_p (gimple_op (s, i)))
2224 	  return true;
2225     }
2226 
2227   switch (gimple_code (s))
2228     {
2229     case GIMPLE_ASM:
2230       return gimple_asm_volatile_p (as_a <const gasm *> (s));
2231 
2232     case GIMPLE_CALL:
2233       if (gimple_call_internal_p (s))
2234 	return false;
2235       t = gimple_call_fndecl (s);
2236       /* Assume that indirect and calls to weak functions may trap.  */
2237       if (!t || !DECL_P (t) || DECL_WEAK (t))
2238 	return true;
2239       return false;
2240 
2241     case GIMPLE_ASSIGN:
2242       op = gimple_assign_rhs_code (s);
2243 
2244       /* For COND_EXPR only the condition may trap.  */
2245       if (op == COND_EXPR)
2246 	return tree_could_trap_p (gimple_assign_rhs1 (s));
2247 
2248       /* For comparisons we need to check rhs operand types instead of lhs type
2249          (which is BOOLEAN_TYPE).  */
2250       if (TREE_CODE_CLASS (op) == tcc_comparison)
2251 	t = TREE_TYPE (gimple_assign_rhs1 (s));
2252       else
2253 	t = TREE_TYPE (gimple_assign_lhs (s));
2254 
2255       if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2256 	div = gimple_assign_rhs2 (s);
2257 
2258       return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2259 				      (INTEGRAL_TYPE_P (t)
2260 				       && TYPE_OVERFLOW_TRAPS (t)),
2261 				      div));
2262 
2263     case GIMPLE_COND:
2264       t = TREE_TYPE (gimple_cond_lhs (s));
2265       return operation_could_trap_p (gimple_cond_code (s),
2266 				     FLOAT_TYPE_P (t), false, NULL_TREE);
2267 
2268     default:
2269       break;
2270     }
2271 
2272   return false;
2273 }
2274 
2275 /* Return true if statement S can trap.  */
2276 
2277 bool
gimple_could_trap_p(const gimple * s)2278 gimple_could_trap_p (const gimple *s)
2279 {
2280   return gimple_could_trap_p_1 (s, true, true);
2281 }
2282 
2283 /* Return true if RHS of a GIMPLE_ASSIGN S can trap.  */
2284 
2285 bool
gimple_assign_rhs_could_trap_p(gimple * s)2286 gimple_assign_rhs_could_trap_p (gimple *s)
2287 {
2288   gcc_assert (is_gimple_assign (s));
2289   return gimple_could_trap_p_1 (s, true, false);
2290 }
2291 
2292 
2293 /* Print debugging information for gimple stmts generated.  */
2294 
2295 void
dump_gimple_statistics(void)2296 dump_gimple_statistics (void)
2297 {
2298   int i;
2299   uint64_t total_tuples = 0, total_bytes = 0;
2300 
2301   if (! GATHER_STATISTICS)
2302     {
2303       fprintf (stderr, "No GIMPLE statistics\n");
2304       return;
2305     }
2306 
2307   fprintf (stderr, "\nGIMPLE statements\n");
2308   fprintf (stderr, "Kind                   Stmts      Bytes\n");
2309   fprintf (stderr, "---------------------------------------\n");
2310   for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2311     {
2312       fprintf (stderr, "%-20s %7" PRIu64 "%c %10" PRIu64 "%c\n",
2313 	       gimple_alloc_kind_names[i],
2314 	       SIZE_AMOUNT (gimple_alloc_counts[i]),
2315 	       SIZE_AMOUNT (gimple_alloc_sizes[i]));
2316       total_tuples += gimple_alloc_counts[i];
2317       total_bytes += gimple_alloc_sizes[i];
2318     }
2319   fprintf (stderr, "---------------------------------------\n");
2320   fprintf (stderr, "%-20s %7" PRIu64 "%c %10" PRIu64 "%c\n", "Total",
2321 	   SIZE_AMOUNT (total_tuples), SIZE_AMOUNT (total_bytes));
2322   fprintf (stderr, "---------------------------------------\n");
2323 }
2324 
2325 
2326 /* Return the number of operands needed on the RHS of a GIMPLE
2327    assignment for an expression with tree code CODE.  */
2328 
2329 unsigned
get_gimple_rhs_num_ops(enum tree_code code)2330 get_gimple_rhs_num_ops (enum tree_code code)
2331 {
2332   switch (get_gimple_rhs_class (code))
2333     {
2334     case GIMPLE_UNARY_RHS:
2335     case GIMPLE_SINGLE_RHS:
2336       return 1;
2337     case GIMPLE_BINARY_RHS:
2338       return 2;
2339     case GIMPLE_TERNARY_RHS:
2340       return 3;
2341     default:
2342       gcc_unreachable ();
2343     }
2344 }
2345 
2346 #define DEFTREECODE(SYM, STRING, TYPE, NARGS)   			    \
2347   (unsigned char)							    \
2348   ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS				    \
2349    : ((TYPE) == tcc_binary						    \
2350       || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS   		    \
2351    : ((TYPE) == tcc_constant						    \
2352       || (TYPE) == tcc_declaration					    \
2353       || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS			    \
2354    : ((SYM) == TRUTH_AND_EXPR						    \
2355       || (SYM) == TRUTH_OR_EXPR						    \
2356       || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS			    \
2357    : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS				    \
2358    : ((SYM) == COND_EXPR						    \
2359       || (SYM) == WIDEN_MULT_PLUS_EXPR					    \
2360       || (SYM) == WIDEN_MULT_MINUS_EXPR					    \
2361       || (SYM) == DOT_PROD_EXPR						    \
2362       || (SYM) == SAD_EXPR						    \
2363       || (SYM) == REALIGN_LOAD_EXPR					    \
2364       || (SYM) == VEC_COND_EXPR						    \
2365       || (SYM) == VEC_PERM_EXPR                                             \
2366       || (SYM) == BIT_INSERT_EXPR) ? GIMPLE_TERNARY_RHS			    \
2367    : ((SYM) == CONSTRUCTOR						    \
2368       || (SYM) == OBJ_TYPE_REF						    \
2369       || (SYM) == ASSERT_EXPR						    \
2370       || (SYM) == ADDR_EXPR						    \
2371       || (SYM) == WITH_SIZE_EXPR					    \
2372       || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS				    \
2373    : GIMPLE_INVALID_RHS),
2374 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2375 
2376 const unsigned char gimple_rhs_class_table[] = {
2377 #include "all-tree.def"
2378 };
2379 
2380 #undef DEFTREECODE
2381 #undef END_OF_BASE_TREE_CODES
2382 
2383 /* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
2384    a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2385    we failed to create one.  */
2386 
2387 tree
canonicalize_cond_expr_cond(tree t)2388 canonicalize_cond_expr_cond (tree t)
2389 {
2390   /* Strip conversions around boolean operations.  */
2391   if (CONVERT_EXPR_P (t)
2392       && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2393           || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2394 	     == BOOLEAN_TYPE))
2395     t = TREE_OPERAND (t, 0);
2396 
2397   /* For !x use x == 0.  */
2398   if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2399     {
2400       tree top0 = TREE_OPERAND (t, 0);
2401       t = build2 (EQ_EXPR, TREE_TYPE (t),
2402 		  top0, build_int_cst (TREE_TYPE (top0), 0));
2403     }
2404   /* For cmp ? 1 : 0 use cmp.  */
2405   else if (TREE_CODE (t) == COND_EXPR
2406 	   && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2407 	   && integer_onep (TREE_OPERAND (t, 1))
2408 	   && integer_zerop (TREE_OPERAND (t, 2)))
2409     {
2410       tree top0 = TREE_OPERAND (t, 0);
2411       t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2412 		  TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2413     }
2414   /* For x ^ y use x != y.  */
2415   else if (TREE_CODE (t) == BIT_XOR_EXPR)
2416     t = build2 (NE_EXPR, TREE_TYPE (t),
2417 		TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2418 
2419   if (is_gimple_condexpr (t))
2420     return t;
2421 
2422   return NULL_TREE;
2423 }
2424 
2425 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2426    the positions marked by the set ARGS_TO_SKIP.  */
2427 
2428 gcall *
gimple_call_copy_skip_args(gcall * stmt,bitmap args_to_skip)2429 gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip)
2430 {
2431   int i;
2432   int nargs = gimple_call_num_args (stmt);
2433   auto_vec<tree> vargs (nargs);
2434   gcall *new_stmt;
2435 
2436   for (i = 0; i < nargs; i++)
2437     if (!bitmap_bit_p (args_to_skip, i))
2438       vargs.quick_push (gimple_call_arg (stmt, i));
2439 
2440   if (gimple_call_internal_p (stmt))
2441     new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2442 					       vargs);
2443   else
2444     new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2445 
2446   if (gimple_call_lhs (stmt))
2447     gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2448 
2449   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2450   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2451 
2452   if (gimple_has_location (stmt))
2453     gimple_set_location (new_stmt, gimple_location (stmt));
2454   gimple_call_copy_flags (new_stmt, stmt);
2455   gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2456 
2457   gimple_set_modified (new_stmt, true);
2458 
2459   return new_stmt;
2460 }
2461 
2462 
2463 
2464 /* Return true if the field decls F1 and F2 are at the same offset.
2465 
2466    This is intended to be used on GIMPLE types only.  */
2467 
2468 bool
gimple_compare_field_offset(tree f1,tree f2)2469 gimple_compare_field_offset (tree f1, tree f2)
2470 {
2471   if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2472     {
2473       tree offset1 = DECL_FIELD_OFFSET (f1);
2474       tree offset2 = DECL_FIELD_OFFSET (f2);
2475       return ((offset1 == offset2
2476 	       /* Once gimplification is done, self-referential offsets are
2477 		  instantiated as operand #2 of the COMPONENT_REF built for
2478 		  each access and reset.  Therefore, they are not relevant
2479 		  anymore and fields are interchangeable provided that they
2480 		  represent the same access.  */
2481 	       || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2482 		   && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2483 		   && (DECL_SIZE (f1) == DECL_SIZE (f2)
2484 		       || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2485 			   && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2486 		       || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2487 		   && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2488 	       || operand_equal_p (offset1, offset2, 0))
2489 	      && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2490 				     DECL_FIELD_BIT_OFFSET (f2)));
2491     }
2492 
2493   /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2494      should be, so handle differing ones specially by decomposing
2495      the offset into a byte and bit offset manually.  */
2496   if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2497       && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2498     {
2499       unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2500       unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2501       bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2502       byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2503 		      + bit_offset1 / BITS_PER_UNIT);
2504       bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2505       byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2506 		      + bit_offset2 / BITS_PER_UNIT);
2507       if (byte_offset1 != byte_offset2)
2508 	return false;
2509       return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2510     }
2511 
2512   return false;
2513 }
2514 
2515 
2516 /* Return a type the same as TYPE except unsigned or
2517    signed according to UNSIGNEDP.  */
2518 
2519 static tree
gimple_signed_or_unsigned_type(bool unsignedp,tree type)2520 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2521 {
2522   tree type1;
2523   int i;
2524 
2525   type1 = TYPE_MAIN_VARIANT (type);
2526   if (type1 == signed_char_type_node
2527       || type1 == char_type_node
2528       || type1 == unsigned_char_type_node)
2529     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2530   if (type1 == integer_type_node || type1 == unsigned_type_node)
2531     return unsignedp ? unsigned_type_node : integer_type_node;
2532   if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2533     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2534   if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2535     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2536   if (type1 == long_long_integer_type_node
2537       || type1 == long_long_unsigned_type_node)
2538     return unsignedp
2539            ? long_long_unsigned_type_node
2540 	   : long_long_integer_type_node;
2541 
2542   for (i = 0; i < NUM_INT_N_ENTS; i ++)
2543     if (int_n_enabled_p[i]
2544 	&& (type1 == int_n_trees[i].unsigned_type
2545 	    || type1 == int_n_trees[i].signed_type))
2546 	return unsignedp
2547 	  ? int_n_trees[i].unsigned_type
2548 	  : int_n_trees[i].signed_type;
2549 
2550 #if HOST_BITS_PER_WIDE_INT >= 64
2551   if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2552     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2553 #endif
2554   if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2555     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2556   if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2557     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2558   if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2559     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2560   if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2561     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2562 
2563 #define GIMPLE_FIXED_TYPES(NAME)	    \
2564   if (type1 == short_ ## NAME ## _type_node \
2565       || type1 == unsigned_short_ ## NAME ## _type_node) \
2566     return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2567 		     : short_ ## NAME ## _type_node; \
2568   if (type1 == NAME ## _type_node \
2569       || type1 == unsigned_ ## NAME ## _type_node) \
2570     return unsignedp ? unsigned_ ## NAME ## _type_node \
2571 		     : NAME ## _type_node; \
2572   if (type1 == long_ ## NAME ## _type_node \
2573       || type1 == unsigned_long_ ## NAME ## _type_node) \
2574     return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2575 		     : long_ ## NAME ## _type_node; \
2576   if (type1 == long_long_ ## NAME ## _type_node \
2577       || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2578     return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2579 		     : long_long_ ## NAME ## _type_node;
2580 
2581 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2582   if (type1 == NAME ## _type_node \
2583       || type1 == u ## NAME ## _type_node) \
2584     return unsignedp ? u ## NAME ## _type_node \
2585 		     : NAME ## _type_node;
2586 
2587 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2588   if (type1 == sat_ ## short_ ## NAME ## _type_node \
2589       || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2590     return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2591 		     : sat_ ## short_ ## NAME ## _type_node; \
2592   if (type1 == sat_ ## NAME ## _type_node \
2593       || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2594     return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2595 		     : sat_ ## NAME ## _type_node; \
2596   if (type1 == sat_ ## long_ ## NAME ## _type_node \
2597       || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2598     return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2599 		     : sat_ ## long_ ## NAME ## _type_node; \
2600   if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2601       || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2602     return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2603 		     : sat_ ## long_long_ ## NAME ## _type_node;
2604 
2605 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME)	\
2606   if (type1 == sat_ ## NAME ## _type_node \
2607       || type1 == sat_ ## u ## NAME ## _type_node) \
2608     return unsignedp ? sat_ ## u ## NAME ## _type_node \
2609 		     : sat_ ## NAME ## _type_node;
2610 
2611   GIMPLE_FIXED_TYPES (fract);
2612   GIMPLE_FIXED_TYPES_SAT (fract);
2613   GIMPLE_FIXED_TYPES (accum);
2614   GIMPLE_FIXED_TYPES_SAT (accum);
2615 
2616   GIMPLE_FIXED_MODE_TYPES (qq);
2617   GIMPLE_FIXED_MODE_TYPES (hq);
2618   GIMPLE_FIXED_MODE_TYPES (sq);
2619   GIMPLE_FIXED_MODE_TYPES (dq);
2620   GIMPLE_FIXED_MODE_TYPES (tq);
2621   GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2622   GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2623   GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2624   GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2625   GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2626   GIMPLE_FIXED_MODE_TYPES (ha);
2627   GIMPLE_FIXED_MODE_TYPES (sa);
2628   GIMPLE_FIXED_MODE_TYPES (da);
2629   GIMPLE_FIXED_MODE_TYPES (ta);
2630   GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2631   GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2632   GIMPLE_FIXED_MODE_TYPES_SAT (da);
2633   GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2634 
2635   /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2636      the precision; they have precision set to match their range, but
2637      may use a wider mode to match an ABI.  If we change modes, we may
2638      wind up with bad conversions.  For INTEGER_TYPEs in C, must check
2639      the precision as well, so as to yield correct results for
2640      bit-field types.  C++ does not have these separate bit-field
2641      types, and producing a signed or unsigned variant of an
2642      ENUMERAL_TYPE may cause other problems as well.  */
2643   if (!INTEGRAL_TYPE_P (type)
2644       || TYPE_UNSIGNED (type) == unsignedp)
2645     return type;
2646 
2647 #define TYPE_OK(node)							    \
2648   (TYPE_MODE (type) == TYPE_MODE (node)					    \
2649    && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2650   if (TYPE_OK (signed_char_type_node))
2651     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2652   if (TYPE_OK (integer_type_node))
2653     return unsignedp ? unsigned_type_node : integer_type_node;
2654   if (TYPE_OK (short_integer_type_node))
2655     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2656   if (TYPE_OK (long_integer_type_node))
2657     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2658   if (TYPE_OK (long_long_integer_type_node))
2659     return (unsignedp
2660 	    ? long_long_unsigned_type_node
2661 	    : long_long_integer_type_node);
2662 
2663   for (i = 0; i < NUM_INT_N_ENTS; i ++)
2664     if (int_n_enabled_p[i]
2665 	&& TYPE_MODE (type) == int_n_data[i].m
2666 	&& TYPE_PRECISION (type) == int_n_data[i].bitsize)
2667 	return unsignedp
2668 	  ? int_n_trees[i].unsigned_type
2669 	  : int_n_trees[i].signed_type;
2670 
2671 #if HOST_BITS_PER_WIDE_INT >= 64
2672   if (TYPE_OK (intTI_type_node))
2673     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2674 #endif
2675   if (TYPE_OK (intDI_type_node))
2676     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2677   if (TYPE_OK (intSI_type_node))
2678     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2679   if (TYPE_OK (intHI_type_node))
2680     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2681   if (TYPE_OK (intQI_type_node))
2682     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2683 
2684 #undef GIMPLE_FIXED_TYPES
2685 #undef GIMPLE_FIXED_MODE_TYPES
2686 #undef GIMPLE_FIXED_TYPES_SAT
2687 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2688 #undef TYPE_OK
2689 
2690   return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2691 }
2692 
2693 
2694 /* Return an unsigned type the same as TYPE in other respects.  */
2695 
2696 tree
gimple_unsigned_type(tree type)2697 gimple_unsigned_type (tree type)
2698 {
2699   return gimple_signed_or_unsigned_type (true, type);
2700 }
2701 
2702 
2703 /* Return a signed type the same as TYPE in other respects.  */
2704 
2705 tree
gimple_signed_type(tree type)2706 gimple_signed_type (tree type)
2707 {
2708   return gimple_signed_or_unsigned_type (false, type);
2709 }
2710 
2711 
2712 /* Return the typed-based alias set for T, which may be an expression
2713    or a type.  Return -1 if we don't do anything special.  */
2714 
2715 alias_set_type
gimple_get_alias_set(tree t)2716 gimple_get_alias_set (tree t)
2717 {
2718   /* That's all the expressions we handle specially.  */
2719   if (!TYPE_P (t))
2720     return -1;
2721 
2722   /* For convenience, follow the C standard when dealing with
2723      character types.  Any object may be accessed via an lvalue that
2724      has character type.  */
2725   if (t == char_type_node
2726       || t == signed_char_type_node
2727       || t == unsigned_char_type_node)
2728     return 0;
2729 
2730   /* Allow aliasing between signed and unsigned variants of the same
2731      type.  We treat the signed variant as canonical.  */
2732   if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2733     {
2734       tree t1 = gimple_signed_type (t);
2735 
2736       /* t1 == t can happen for boolean nodes which are always unsigned.  */
2737       if (t1 != t)
2738 	return get_alias_set (t1);
2739     }
2740 
2741   /* Allow aliasing between enumeral types and the underlying
2742      integer type.  This is required for C since those are
2743      compatible types.  */
2744   else if (TREE_CODE (t) == ENUMERAL_TYPE)
2745     {
2746       tree t1 = lang_hooks.types.type_for_size (tree_to_uhwi (TYPE_SIZE (t)),
2747 						false /* short-cut above */);
2748       return get_alias_set (t1);
2749     }
2750 
2751   return -1;
2752 }
2753 
2754 
2755 /* Helper for gimple_ior_addresses_taken_1.  */
2756 
2757 static bool
gimple_ior_addresses_taken_1(gimple *,tree addr,tree,void * data)2758 gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data)
2759 {
2760   bitmap addresses_taken = (bitmap)data;
2761   addr = get_base_address (addr);
2762   if (addr
2763       && DECL_P (addr))
2764     {
2765       bitmap_set_bit (addresses_taken, DECL_UID (addr));
2766       return true;
2767     }
2768   return false;
2769 }
2770 
2771 /* Set the bit for the uid of all decls that have their address taken
2772    in STMT in the ADDRESSES_TAKEN bitmap.  Returns true if there
2773    were any in this stmt.  */
2774 
2775 bool
gimple_ior_addresses_taken(bitmap addresses_taken,gimple * stmt)2776 gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt)
2777 {
2778   return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2779 					gimple_ior_addresses_taken_1);
2780 }
2781 
2782 
2783 /* Return true when STMTs arguments and return value match those of FNDECL,
2784    a decl of a builtin function.  */
2785 
2786 bool
gimple_builtin_call_types_compatible_p(const gimple * stmt,tree fndecl)2787 gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl)
2788 {
2789   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
2790 
2791   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2792     if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
2793       fndecl = decl;
2794 
2795   tree ret = gimple_call_lhs (stmt);
2796   if (ret
2797       && !useless_type_conversion_p (TREE_TYPE (ret),
2798 				     TREE_TYPE (TREE_TYPE (fndecl))))
2799     return false;
2800 
2801   tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2802   unsigned nargs = gimple_call_num_args (stmt);
2803   for (unsigned i = 0; i < nargs; ++i)
2804     {
2805       /* Variadic args follow.  */
2806       if (!targs)
2807 	return true;
2808       tree arg = gimple_call_arg (stmt, i);
2809       tree type = TREE_VALUE (targs);
2810       if (!useless_type_conversion_p (type, TREE_TYPE (arg))
2811 	  /* char/short integral arguments are promoted to int
2812 	     by several frontends if targetm.calls.promote_prototypes
2813 	     is true.  Allow such promotion too.  */
2814 	  && !(INTEGRAL_TYPE_P (type)
2815 	       && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
2816 	       && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
2817 	       && useless_type_conversion_p (integer_type_node,
2818 					     TREE_TYPE (arg))))
2819 	return false;
2820       targs = TREE_CHAIN (targs);
2821     }
2822   if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2823     return false;
2824   return true;
2825 }
2826 
2827 /* Return true when STMT is operator a replaceable delete call.  */
2828 
2829 bool
gimple_call_operator_delete_p(const gcall * stmt)2830 gimple_call_operator_delete_p (const gcall *stmt)
2831 {
2832   tree fndecl;
2833 
2834   if ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE)
2835     return DECL_IS_OPERATOR_DELETE_P (fndecl);
2836   return false;
2837 }
2838 
2839 /* Return true when STMT is builtins call.  */
2840 
2841 bool
gimple_call_builtin_p(const gimple * stmt)2842 gimple_call_builtin_p (const gimple *stmt)
2843 {
2844   tree fndecl;
2845   if (is_gimple_call (stmt)
2846       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2847       && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
2848     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2849   return false;
2850 }
2851 
2852 /* Return true when STMT is builtins call to CLASS.  */
2853 
2854 bool
gimple_call_builtin_p(const gimple * stmt,enum built_in_class klass)2855 gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass)
2856 {
2857   tree fndecl;
2858   if (is_gimple_call (stmt)
2859       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2860       && DECL_BUILT_IN_CLASS (fndecl) == klass)
2861     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2862   return false;
2863 }
2864 
2865 /* Return true when STMT is builtins call to CODE of CLASS.  */
2866 
2867 bool
gimple_call_builtin_p(const gimple * stmt,enum built_in_function code)2868 gimple_call_builtin_p (const gimple *stmt, enum built_in_function code)
2869 {
2870   tree fndecl;
2871   if (is_gimple_call (stmt)
2872       && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2873       && fndecl_built_in_p (fndecl, code))
2874     return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2875   return false;
2876 }
2877 
2878 /* If CALL is a call to a combined_fn (i.e. an internal function or
2879    a normal built-in function), return its code, otherwise return
2880    CFN_LAST.  */
2881 
2882 combined_fn
gimple_call_combined_fn(const gimple * stmt)2883 gimple_call_combined_fn (const gimple *stmt)
2884 {
2885   if (const gcall *call = dyn_cast <const gcall *> (stmt))
2886     {
2887       if (gimple_call_internal_p (call))
2888 	return as_combined_fn (gimple_call_internal_fn (call));
2889 
2890       tree fndecl = gimple_call_fndecl (stmt);
2891       if (fndecl
2892 	  && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
2893 	  && gimple_builtin_call_types_compatible_p (stmt, fndecl))
2894 	return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
2895     }
2896   return CFN_LAST;
2897 }
2898 
2899 /* Return true if STMT clobbers memory.  STMT is required to be a
2900    GIMPLE_ASM.  */
2901 
2902 bool
gimple_asm_clobbers_memory_p(const gasm * stmt)2903 gimple_asm_clobbers_memory_p (const gasm *stmt)
2904 {
2905   unsigned i;
2906 
2907   for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2908     {
2909       tree op = gimple_asm_clobber_op (stmt, i);
2910       if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2911 	return true;
2912     }
2913 
2914   /* Non-empty basic ASM implicitly clobbers memory.  */
2915   if (gimple_asm_input_p (stmt) && strlen (gimple_asm_string (stmt)) != 0)
2916     return true;
2917 
2918   return false;
2919 }
2920 
2921 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE.  */
2922 
2923 void
dump_decl_set(FILE * file,bitmap set)2924 dump_decl_set (FILE *file, bitmap set)
2925 {
2926   if (set)
2927     {
2928       bitmap_iterator bi;
2929       unsigned i;
2930 
2931       fprintf (file, "{ ");
2932 
2933       EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2934 	{
2935 	  fprintf (file, "D.%u", i);
2936 	  fprintf (file, " ");
2937 	}
2938 
2939       fprintf (file, "}");
2940     }
2941   else
2942     fprintf (file, "NIL");
2943 }
2944 
2945 /* Return true when CALL is a call stmt that definitely doesn't
2946    free any memory or makes it unavailable otherwise.  */
2947 bool
nonfreeing_call_p(gimple * call)2948 nonfreeing_call_p (gimple *call)
2949 {
2950   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2951       && gimple_call_flags (call) & ECF_LEAF)
2952     switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2953       {
2954 	/* Just in case these become ECF_LEAF in the future.  */
2955 	case BUILT_IN_FREE:
2956 	case BUILT_IN_TM_FREE:
2957 	case BUILT_IN_REALLOC:
2958 	case BUILT_IN_STACK_RESTORE:
2959 	  return false;
2960 	default:
2961 	  return true;
2962       }
2963   else if (gimple_call_internal_p (call))
2964     switch (gimple_call_internal_fn (call))
2965       {
2966       case IFN_ABNORMAL_DISPATCHER:
2967         return true;
2968       case IFN_ASAN_MARK:
2969 	return tree_to_uhwi (gimple_call_arg (call, 0)) == ASAN_MARK_UNPOISON;
2970       default:
2971 	if (gimple_call_flags (call) & ECF_LEAF)
2972 	  return true;
2973 	return false;
2974       }
2975 
2976   tree fndecl = gimple_call_fndecl (call);
2977   if (!fndecl)
2978     return false;
2979   struct cgraph_node *n = cgraph_node::get (fndecl);
2980   if (!n)
2981     return false;
2982   enum availability availability;
2983   n = n->function_symbol (&availability);
2984   if (!n || availability <= AVAIL_INTERPOSABLE)
2985     return false;
2986   return n->nonfreeing_fn;
2987 }
2988 
2989 /* Return true when CALL is a call stmt that definitely need not
2990    be considered to be a memory barrier.  */
2991 bool
nonbarrier_call_p(gimple * call)2992 nonbarrier_call_p (gimple *call)
2993 {
2994   if (gimple_call_flags (call) & (ECF_PURE | ECF_CONST))
2995     return true;
2996   /* Should extend this to have a nonbarrier_fn flag, just as above in
2997      the nonfreeing case.  */
2998   return false;
2999 }
3000 
3001 /* Callback for walk_stmt_load_store_ops.
3002 
3003    Return TRUE if OP will dereference the tree stored in DATA, FALSE
3004    otherwise.
3005 
3006    This routine only makes a superficial check for a dereference.  Thus
3007    it must only be used if it is safe to return a false negative.  */
3008 static bool
check_loadstore(gimple *,tree op,tree,void * data)3009 check_loadstore (gimple *, tree op, tree, void *data)
3010 {
3011   if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
3012     {
3013       /* Some address spaces may legitimately dereference zero.  */
3014       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op));
3015       if (targetm.addr_space.zero_address_valid (as))
3016 	return false;
3017 
3018       return operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0);
3019     }
3020   return false;
3021 }
3022 
3023 
3024 /* Return true if OP can be inferred to be non-NULL after STMT executes,
3025    either by using a pointer dereference or attributes.  */
3026 bool
infer_nonnull_range(gimple * stmt,tree op)3027 infer_nonnull_range (gimple *stmt, tree op)
3028 {
3029   return (infer_nonnull_range_by_dereference (stmt, op)
3030 	  || infer_nonnull_range_by_attribute (stmt, op));
3031 }
3032 
3033 /* Return true if OP can be inferred to be non-NULL after STMT
3034    executes by using a pointer dereference.  */
3035 bool
infer_nonnull_range_by_dereference(gimple * stmt,tree op)3036 infer_nonnull_range_by_dereference (gimple *stmt, tree op)
3037 {
3038   /* We can only assume that a pointer dereference will yield
3039      non-NULL if -fdelete-null-pointer-checks is enabled.  */
3040   if (!flag_delete_null_pointer_checks
3041       || !POINTER_TYPE_P (TREE_TYPE (op))
3042       || gimple_code (stmt) == GIMPLE_ASM
3043       || gimple_clobber_p (stmt))
3044     return false;
3045 
3046   if (walk_stmt_load_store_ops (stmt, (void *)op,
3047 				check_loadstore, check_loadstore))
3048     return true;
3049 
3050   return false;
3051 }
3052 
3053 /* Return true if OP can be inferred to be a non-NULL after STMT
3054    executes by using attributes.  */
3055 bool
infer_nonnull_range_by_attribute(gimple * stmt,tree op)3056 infer_nonnull_range_by_attribute (gimple *stmt, tree op)
3057 {
3058   /* We can only assume that a pointer dereference will yield
3059      non-NULL if -fdelete-null-pointer-checks is enabled.  */
3060   if (!flag_delete_null_pointer_checks
3061       || !POINTER_TYPE_P (TREE_TYPE (op))
3062       || gimple_code (stmt) == GIMPLE_ASM)
3063     return false;
3064 
3065   if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
3066     {
3067       tree fntype = gimple_call_fntype (stmt);
3068       tree attrs = TYPE_ATTRIBUTES (fntype);
3069       for (; attrs; attrs = TREE_CHAIN (attrs))
3070 	{
3071 	  attrs = lookup_attribute ("nonnull", attrs);
3072 
3073 	  /* If "nonnull" wasn't specified, we know nothing about
3074 	     the argument.  */
3075 	  if (attrs == NULL_TREE)
3076 	    return false;
3077 
3078 	  /* If "nonnull" applies to all the arguments, then ARG
3079 	     is non-null if it's in the argument list.  */
3080 	  if (TREE_VALUE (attrs) == NULL_TREE)
3081 	    {
3082 	      for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
3083 		{
3084 		  if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))
3085 		      && operand_equal_p (op, gimple_call_arg (stmt, i), 0))
3086 		    return true;
3087 		}
3088 	      return false;
3089 	    }
3090 
3091 	  /* Now see if op appears in the nonnull list.  */
3092 	  for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
3093 	    {
3094 	      unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
3095 	      if (idx < gimple_call_num_args (stmt))
3096 		{
3097 		  tree arg = gimple_call_arg (stmt, idx);
3098 		  if (operand_equal_p (op, arg, 0))
3099 		    return true;
3100 		}
3101 	    }
3102 	}
3103     }
3104 
3105   /* If this function is marked as returning non-null, then we can
3106      infer OP is non-null if it is used in the return statement.  */
3107   if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
3108     if (gimple_return_retval (return_stmt)
3109 	&& operand_equal_p (gimple_return_retval (return_stmt), op, 0)
3110 	&& lookup_attribute ("returns_nonnull",
3111 			     TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
3112       return true;
3113 
3114   return false;
3115 }
3116 
3117 /* Compare two case labels.  Because the front end should already have
3118    made sure that case ranges do not overlap, it is enough to only compare
3119    the CASE_LOW values of each case label.  */
3120 
3121 static int
compare_case_labels(const void * p1,const void * p2)3122 compare_case_labels (const void *p1, const void *p2)
3123 {
3124   const_tree const case1 = *(const_tree const*)p1;
3125   const_tree const case2 = *(const_tree const*)p2;
3126 
3127   /* The 'default' case label always goes first.  */
3128   if (!CASE_LOW (case1))
3129     return -1;
3130   else if (!CASE_LOW (case2))
3131     return 1;
3132   else
3133     return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
3134 }
3135 
3136 /* Sort the case labels in LABEL_VEC in place in ascending order.  */
3137 
3138 void
sort_case_labels(vec<tree> & label_vec)3139 sort_case_labels (vec<tree> &label_vec)
3140 {
3141   label_vec.qsort (compare_case_labels);
3142 }
3143 
3144 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
3145 
3146    LABELS is a vector that contains all case labels to look at.
3147 
3148    INDEX_TYPE is the type of the switch index expression.  Case labels
3149    in LABELS are discarded if their values are not in the value range
3150    covered by INDEX_TYPE.  The remaining case label values are folded
3151    to INDEX_TYPE.
3152 
3153    If a default case exists in LABELS, it is removed from LABELS and
3154    returned in DEFAULT_CASEP.  If no default case exists, but the
3155    case labels already cover the whole range of INDEX_TYPE, a default
3156    case is returned pointing to one of the existing case labels.
3157    Otherwise DEFAULT_CASEP is set to NULL_TREE.
3158 
3159    DEFAULT_CASEP may be NULL, in which case the above comment doesn't
3160    apply and no action is taken regardless of whether a default case is
3161    found or not.  */
3162 
3163 void
preprocess_case_label_vec_for_gimple(vec<tree> & labels,tree index_type,tree * default_casep)3164 preprocess_case_label_vec_for_gimple (vec<tree> &labels,
3165 				      tree index_type,
3166 				      tree *default_casep)
3167 {
3168   tree min_value, max_value;
3169   tree default_case = NULL_TREE;
3170   size_t i, len;
3171 
3172   i = 0;
3173   min_value = TYPE_MIN_VALUE (index_type);
3174   max_value = TYPE_MAX_VALUE (index_type);
3175   while (i < labels.length ())
3176     {
3177       tree elt = labels[i];
3178       tree low = CASE_LOW (elt);
3179       tree high = CASE_HIGH (elt);
3180       bool remove_element = FALSE;
3181 
3182       if (low)
3183 	{
3184 	  gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
3185 	  gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
3186 
3187 	  /* This is a non-default case label, i.e. it has a value.
3188 
3189 	     See if the case label is reachable within the range of
3190 	     the index type.  Remove out-of-range case values.  Turn
3191 	     case ranges into a canonical form (high > low strictly)
3192 	     and convert the case label values to the index type.
3193 
3194 	     NB: The type of gimple_switch_index() may be the promoted
3195 	     type, but the case labels retain the original type.  */
3196 
3197 	  if (high)
3198 	    {
3199 	      /* This is a case range.  Discard empty ranges.
3200 		 If the bounds or the range are equal, turn this
3201 		 into a simple (one-value) case.  */
3202 	      int cmp = tree_int_cst_compare (high, low);
3203 	      if (cmp < 0)
3204 		remove_element = TRUE;
3205 	      else if (cmp == 0)
3206 		high = NULL_TREE;
3207 	    }
3208 
3209 	  if (! high)
3210 	    {
3211 	      /* If the simple case value is unreachable, ignore it.  */
3212 	      if ((TREE_CODE (min_value) == INTEGER_CST
3213 		   && tree_int_cst_compare (low, min_value) < 0)
3214 		  || (TREE_CODE (max_value) == INTEGER_CST
3215 		      && tree_int_cst_compare (low, max_value) > 0))
3216 		remove_element = TRUE;
3217 	      else
3218 		low = fold_convert (index_type, low);
3219 	    }
3220 	  else
3221 	    {
3222 	      /* If the entire case range is unreachable, ignore it.  */
3223 	      if ((TREE_CODE (min_value) == INTEGER_CST
3224 		   && tree_int_cst_compare (high, min_value) < 0)
3225 		  || (TREE_CODE (max_value) == INTEGER_CST
3226 		      && tree_int_cst_compare (low, max_value) > 0))
3227 		remove_element = TRUE;
3228 	      else
3229 		{
3230 		  /* If the lower bound is less than the index type's
3231 		     minimum value, truncate the range bounds.  */
3232 		  if (TREE_CODE (min_value) == INTEGER_CST
3233 		      && tree_int_cst_compare (low, min_value) < 0)
3234 		    low = min_value;
3235 		  low = fold_convert (index_type, low);
3236 
3237 		  /* If the upper bound is greater than the index type's
3238 		     maximum value, truncate the range bounds.  */
3239 		  if (TREE_CODE (max_value) == INTEGER_CST
3240 		      && tree_int_cst_compare (high, max_value) > 0)
3241 		    high = max_value;
3242 		  high = fold_convert (index_type, high);
3243 
3244 		  /* We may have folded a case range to a one-value case.  */
3245 		  if (tree_int_cst_equal (low, high))
3246 		    high = NULL_TREE;
3247 		}
3248 	    }
3249 
3250 	  CASE_LOW (elt) = low;
3251 	  CASE_HIGH (elt) = high;
3252 	}
3253       else
3254 	{
3255 	  gcc_assert (!default_case);
3256 	  default_case = elt;
3257 	  /* The default case must be passed separately to the
3258 	     gimple_build_switch routine.  But if DEFAULT_CASEP
3259 	     is NULL, we do not remove the default case (it would
3260 	     be completely lost).  */
3261 	  if (default_casep)
3262 	    remove_element = TRUE;
3263 	}
3264 
3265       if (remove_element)
3266 	labels.ordered_remove (i);
3267       else
3268 	i++;
3269     }
3270   len = i;
3271 
3272   if (!labels.is_empty ())
3273     sort_case_labels (labels);
3274 
3275   if (default_casep && !default_case)
3276     {
3277       /* If the switch has no default label, add one, so that we jump
3278 	 around the switch body.  If the labels already cover the whole
3279 	 range of the switch index_type, add the default label pointing
3280 	 to one of the existing labels.  */
3281       if (len
3282 	  && TYPE_MIN_VALUE (index_type)
3283 	  && TYPE_MAX_VALUE (index_type)
3284 	  && tree_int_cst_equal (CASE_LOW (labels[0]),
3285 				 TYPE_MIN_VALUE (index_type)))
3286 	{
3287 	  tree low, high = CASE_HIGH (labels[len - 1]);
3288 	  if (!high)
3289 	    high = CASE_LOW (labels[len - 1]);
3290 	  if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
3291 	    {
3292 	      tree widest_label = labels[0];
3293 	      for (i = 1; i < len; i++)
3294 		{
3295 		  high = CASE_LOW (labels[i]);
3296 		  low = CASE_HIGH (labels[i - 1]);
3297 		  if (!low)
3298 		    low = CASE_LOW (labels[i - 1]);
3299 
3300 		  if (CASE_HIGH (labels[i]) != NULL_TREE
3301 		      && (CASE_HIGH (widest_label) == NULL_TREE
3302 			  || (wi::gtu_p
3303 			      (wi::to_wide (CASE_HIGH (labels[i]))
3304 			       - wi::to_wide (CASE_LOW (labels[i])),
3305 			       wi::to_wide (CASE_HIGH (widest_label))
3306 			       - wi::to_wide (CASE_LOW (widest_label))))))
3307 		    widest_label = labels[i];
3308 
3309 		  if (wi::to_wide (low) + 1 != wi::to_wide (high))
3310 		    break;
3311 		}
3312 	      if (i == len)
3313 		{
3314 		  /* Designate the label with the widest range to be the
3315 		     default label.  */
3316 		  tree label = CASE_LABEL (widest_label);
3317 		  default_case = build_case_label (NULL_TREE, NULL_TREE,
3318 						   label);
3319 		}
3320 	    }
3321 	}
3322     }
3323 
3324   if (default_casep)
3325     *default_casep = default_case;
3326 }
3327 
3328 /* Set the location of all statements in SEQ to LOC.  */
3329 
3330 void
gimple_seq_set_location(gimple_seq seq,location_t loc)3331 gimple_seq_set_location (gimple_seq seq, location_t loc)
3332 {
3333   for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
3334     gimple_set_location (gsi_stmt (i), loc);
3335 }
3336 
3337 /* Release SSA_NAMEs in SEQ as well as the GIMPLE statements.  */
3338 
3339 void
gimple_seq_discard(gimple_seq seq)3340 gimple_seq_discard (gimple_seq seq)
3341 {
3342   gimple_stmt_iterator gsi;
3343 
3344   for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
3345     {
3346       gimple *stmt = gsi_stmt (gsi);
3347       gsi_remove (&gsi, true);
3348       release_defs (stmt);
3349       ggc_free (stmt);
3350     }
3351 }
3352 
3353 /* See if STMT now calls function that takes no parameters and if so, drop
3354    call arguments.  This is used when devirtualization machinery redirects
3355    to __builtin_unreachable or __cxa_pure_virtual.  */
3356 
3357 void
maybe_remove_unused_call_args(struct function * fn,gimple * stmt)3358 maybe_remove_unused_call_args (struct function *fn, gimple *stmt)
3359 {
3360   tree decl = gimple_call_fndecl (stmt);
3361   if (TYPE_ARG_TYPES (TREE_TYPE (decl))
3362       && TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))) == void_type_node
3363       && gimple_call_num_args (stmt))
3364     {
3365       gimple_set_num_ops (stmt, 3);
3366       update_stmt_fn (fn, stmt);
3367     }
3368 }
3369 
3370 /* Return false if STMT will likely expand to real function call.  */
3371 
3372 bool
gimple_inexpensive_call_p(gcall * stmt)3373 gimple_inexpensive_call_p (gcall *stmt)
3374 {
3375   if (gimple_call_internal_p (stmt))
3376     return true;
3377   tree decl = gimple_call_fndecl (stmt);
3378   if (decl && is_inexpensive_builtin (decl))
3379     return true;
3380   return false;
3381 }
3382 
3383 /* Return a non-artificial location for STMT.  If STMT does not have
3384    location information, get the location from EXPR.  */
3385 
3386 location_t
gimple_or_expr_nonartificial_location(gimple * stmt,tree expr)3387 gimple_or_expr_nonartificial_location (gimple *stmt, tree expr)
3388 {
3389   location_t loc = gimple_nonartificial_location (stmt);
3390   if (loc == UNKNOWN_LOCATION && EXPR_HAS_LOCATION (expr))
3391     loc = tree_nonartificial_location (expr);
3392   return expansion_point_location_if_in_system_header (loc);
3393 }
3394 
3395 
3396 #if CHECKING_P
3397 
3398 namespace selftest {
3399 
3400 /* Selftests for core gimple structures.  */
3401 
3402 /* Verify that STMT is pretty-printed as EXPECTED.
3403    Helper function for selftests.  */
3404 
3405 static void
verify_gimple_pp(const char * expected,gimple * stmt)3406 verify_gimple_pp (const char *expected, gimple *stmt)
3407 {
3408   pretty_printer pp;
3409   pp_gimple_stmt_1 (&pp, stmt, 0 /* spc */, TDF_NONE /* flags */);
3410   ASSERT_STREQ (expected, pp_formatted_text (&pp));
3411 }
3412 
3413 /* Build a GIMPLE_ASSIGN equivalent to
3414      tmp = 5;
3415    and verify various properties of it.  */
3416 
3417 static void
test_assign_single()3418 test_assign_single ()
3419 {
3420   tree type = integer_type_node;
3421   tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3422 			 get_identifier ("tmp"),
3423 			 type);
3424   tree rhs = build_int_cst (type, 5);
3425   gassign *stmt = gimple_build_assign (lhs, rhs);
3426   verify_gimple_pp ("tmp = 5;", stmt);
3427 
3428   ASSERT_TRUE (is_gimple_assign (stmt));
3429   ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
3430   ASSERT_EQ (lhs, gimple_get_lhs (stmt));
3431   ASSERT_EQ (rhs, gimple_assign_rhs1 (stmt));
3432   ASSERT_EQ (NULL, gimple_assign_rhs2 (stmt));
3433   ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
3434   ASSERT_TRUE (gimple_assign_single_p (stmt));
3435   ASSERT_EQ (INTEGER_CST, gimple_assign_rhs_code (stmt));
3436 }
3437 
3438 /* Build a GIMPLE_ASSIGN equivalent to
3439      tmp = a * b;
3440    and verify various properties of it.  */
3441 
3442 static void
test_assign_binop()3443 test_assign_binop ()
3444 {
3445   tree type = integer_type_node;
3446   tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3447 			 get_identifier ("tmp"),
3448 			 type);
3449   tree a = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3450 		       get_identifier ("a"),
3451 		       type);
3452   tree b = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3453 		       get_identifier ("b"),
3454 		       type);
3455   gassign *stmt = gimple_build_assign (lhs, MULT_EXPR, a, b);
3456   verify_gimple_pp ("tmp = a * b;", stmt);
3457 
3458   ASSERT_TRUE (is_gimple_assign (stmt));
3459   ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
3460   ASSERT_EQ (lhs, gimple_get_lhs (stmt));
3461   ASSERT_EQ (a, gimple_assign_rhs1 (stmt));
3462   ASSERT_EQ (b, gimple_assign_rhs2 (stmt));
3463   ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
3464   ASSERT_FALSE (gimple_assign_single_p (stmt));
3465   ASSERT_EQ (MULT_EXPR, gimple_assign_rhs_code (stmt));
3466 }
3467 
3468 /* Build a GIMPLE_NOP and verify various properties of it.  */
3469 
3470 static void
test_nop_stmt()3471 test_nop_stmt ()
3472 {
3473   gimple *stmt = gimple_build_nop ();
3474   verify_gimple_pp ("GIMPLE_NOP", stmt);
3475   ASSERT_EQ (GIMPLE_NOP, gimple_code (stmt));
3476   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3477   ASSERT_FALSE (gimple_assign_single_p (stmt));
3478 }
3479 
3480 /* Build a GIMPLE_RETURN equivalent to
3481      return 7;
3482    and verify various properties of it.  */
3483 
3484 static void
test_return_stmt()3485 test_return_stmt ()
3486 {
3487   tree type = integer_type_node;
3488   tree val = build_int_cst (type, 7);
3489   greturn *stmt = gimple_build_return (val);
3490   verify_gimple_pp ("return 7;", stmt);
3491 
3492   ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
3493   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3494   ASSERT_EQ (val, gimple_return_retval (stmt));
3495   ASSERT_FALSE (gimple_assign_single_p (stmt));
3496 }
3497 
3498 /* Build a GIMPLE_RETURN equivalent to
3499      return;
3500    and verify various properties of it.  */
3501 
3502 static void
test_return_without_value()3503 test_return_without_value ()
3504 {
3505   greturn *stmt = gimple_build_return (NULL);
3506   verify_gimple_pp ("return;", stmt);
3507 
3508   ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
3509   ASSERT_EQ (NULL, gimple_get_lhs (stmt));
3510   ASSERT_EQ (NULL, gimple_return_retval (stmt));
3511   ASSERT_FALSE (gimple_assign_single_p (stmt));
3512 }
3513 
3514 /* Run all of the selftests within this file.  */
3515 
3516 void
gimple_cc_tests()3517 gimple_cc_tests ()
3518 {
3519   test_assign_single ();
3520   test_assign_binop ();
3521   test_nop_stmt ();
3522   test_return_stmt ();
3523   test_return_without_value ();
3524 }
3525 
3526 } // namespace selftest
3527 
3528 
3529 #endif /* CHECKING_P */
3530