xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/gimple.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Gimple IR support functions.
2 
3    Copyright 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4    Contributed by Aldy Hernandez <aldyh@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "toplev.h"
33 #include "diagnostic.h"
34 #include "tree-flow.h"
35 #include "value-prof.h"
36 #include "flags.h"
37 #include "alias.h"
38 #include "demangle.h"
39 
40 /* Global type table.  FIXME lto, it should be possible to re-use some
41    of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42    etc), but those assume that types were built with the various
43    build_*_type routines which is not the case with the streamer.  */
44 static htab_t gimple_types;
45 static struct pointer_map_t *type_hash_cache;
46 
47 /* Global type comparison cache.  */
48 static htab_t gtc_visited;
49 static struct obstack gtc_ob;
50 
51 /* All the tuples have their operand vector (if present) at the very bottom
52    of the structure.  Therefore, the offset required to find the
53    operands vector the size of the structure minus the size of the 1
54    element tree array at the end (see gimple_ops).  */
55 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
56 	(HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
57 EXPORTED_CONST size_t gimple_ops_offset_[] = {
58 #include "gsstruct.def"
59 };
60 #undef DEFGSSTRUCT
61 
62 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
63 static const size_t gsstruct_code_size[] = {
64 #include "gsstruct.def"
65 };
66 #undef DEFGSSTRUCT
67 
68 #define DEFGSCODE(SYM, NAME, GSSCODE)	NAME,
69 const char *const gimple_code_name[] = {
70 #include "gimple.def"
71 };
72 #undef DEFGSCODE
73 
74 #define DEFGSCODE(SYM, NAME, GSSCODE)	GSSCODE,
75 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
76 #include "gimple.def"
77 };
78 #undef DEFGSCODE
79 
80 #ifdef GATHER_STATISTICS
81 /* Gimple stats.  */
82 
83 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
84 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
85 
86 /* Keep in sync with gimple.h:enum gimple_alloc_kind.  */
87 static const char * const gimple_alloc_kind_names[] = {
88     "assignments",
89     "phi nodes",
90     "conditionals",
91     "sequences",
92     "everything else"
93 };
94 
95 #endif /* GATHER_STATISTICS */
96 
97 /* A cache of gimple_seq objects.  Sequences are created and destroyed
98    fairly often during gimplification.  */
99 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
100 
101 /* Private API manipulation functions shared only with some
102    other files.  */
103 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
104 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
105 
106 /* Gimple tuple constructors.
107    Note: Any constructor taking a ``gimple_seq'' as a parameter, can
108    be passed a NULL to start with an empty sequence.  */
109 
110 /* Set the code for statement G to CODE.  */
111 
112 static inline void
113 gimple_set_code (gimple g, enum gimple_code code)
114 {
115   g->gsbase.code = code;
116 }
117 
118 /* Return the number of bytes needed to hold a GIMPLE statement with
119    code CODE.  */
120 
121 static inline size_t
122 gimple_size (enum gimple_code code)
123 {
124   return gsstruct_code_size[gss_for_code (code)];
125 }
126 
127 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
128    operands.  */
129 
130 gimple
131 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
132 {
133   size_t size;
134   gimple stmt;
135 
136   size = gimple_size (code);
137   if (num_ops > 0)
138     size += sizeof (tree) * (num_ops - 1);
139 
140 #ifdef GATHER_STATISTICS
141   {
142     enum gimple_alloc_kind kind = gimple_alloc_kind (code);
143     gimple_alloc_counts[(int) kind]++;
144     gimple_alloc_sizes[(int) kind] += size;
145   }
146 #endif
147 
148   stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT);
149   gimple_set_code (stmt, code);
150   gimple_set_num_ops (stmt, num_ops);
151 
152   /* Do not call gimple_set_modified here as it has other side
153      effects and this tuple is still not completely built.  */
154   stmt->gsbase.modified = 1;
155 
156   return stmt;
157 }
158 
159 /* Set SUBCODE to be the code of the expression computed by statement G.  */
160 
161 static inline void
162 gimple_set_subcode (gimple g, unsigned subcode)
163 {
164   /* We only have 16 bits for the RHS code.  Assert that we are not
165      overflowing it.  */
166   gcc_assert (subcode < (1 << 16));
167   g->gsbase.subcode = subcode;
168 }
169 
170 
171 
172 /* Build a tuple with operands.  CODE is the statement to build (which
173    must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the sub-code
174    for the new tuple.  NUM_OPS is the number of operands to allocate.  */
175 
176 #define gimple_build_with_ops(c, s, n) \
177   gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
178 
179 static gimple
180 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
181 		            unsigned num_ops MEM_STAT_DECL)
182 {
183   gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
184   gimple_set_subcode (s, subcode);
185 
186   return s;
187 }
188 
189 
190 /* Build a GIMPLE_RETURN statement returning RETVAL.  */
191 
192 gimple
193 gimple_build_return (tree retval)
194 {
195   gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
196   if (retval)
197     gimple_return_set_retval (s, retval);
198   return s;
199 }
200 
201 /* Helper for gimple_build_call, gimple_build_call_vec and
202    gimple_build_call_from_tree.  Build the basic components of a
203    GIMPLE_CALL statement to function FN with NARGS arguments.  */
204 
205 static inline gimple
206 gimple_build_call_1 (tree fn, unsigned nargs)
207 {
208   gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
209   if (TREE_CODE (fn) == FUNCTION_DECL)
210     fn = build_fold_addr_expr (fn);
211   gimple_set_op (s, 1, fn);
212   return s;
213 }
214 
215 
216 /* Build a GIMPLE_CALL statement to function FN with the arguments
217    specified in vector ARGS.  */
218 
219 gimple
220 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
221 {
222   unsigned i;
223   unsigned nargs = VEC_length (tree, args);
224   gimple call = gimple_build_call_1 (fn, nargs);
225 
226   for (i = 0; i < nargs; i++)
227     gimple_call_set_arg (call, i, VEC_index (tree, args, i));
228 
229   return call;
230 }
231 
232 
233 /* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
234    arguments.  The ... are the arguments.  */
235 
236 gimple
237 gimple_build_call (tree fn, unsigned nargs, ...)
238 {
239   va_list ap;
240   gimple call;
241   unsigned i;
242 
243   gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
244 
245   call = gimple_build_call_1 (fn, nargs);
246 
247   va_start (ap, nargs);
248   for (i = 0; i < nargs; i++)
249     gimple_call_set_arg (call, i, va_arg (ap, tree));
250   va_end (ap);
251 
252   return call;
253 }
254 
255 
256 /* Build a GIMPLE_CALL statement from CALL_EXPR T.  Note that T is
257    assumed to be in GIMPLE form already.  Minimal checking is done of
258    this fact.  */
259 
260 gimple
261 gimple_build_call_from_tree (tree t)
262 {
263   unsigned i, nargs;
264   gimple call;
265   tree fndecl = get_callee_fndecl (t);
266 
267   gcc_assert (TREE_CODE (t) == CALL_EXPR);
268 
269   nargs = call_expr_nargs (t);
270   call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
271 
272   for (i = 0; i < nargs; i++)
273     gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
274 
275   gimple_set_block (call, TREE_BLOCK (t));
276 
277   /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL.  */
278   gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
279   gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
280   gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
281   gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
282   gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
283   gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
284   gimple_call_set_nothrow (call, TREE_NOTHROW (t));
285   gimple_set_no_warning (call, TREE_NO_WARNING (t));
286 
287   return call;
288 }
289 
290 
291 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
292    *OP1_P and *OP2_P respectively.  */
293 
294 void
295 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
296 		       tree *op2_p)
297 {
298   enum gimple_rhs_class grhs_class;
299 
300   *subcode_p = TREE_CODE (expr);
301   grhs_class = get_gimple_rhs_class (*subcode_p);
302 
303   if (grhs_class == GIMPLE_BINARY_RHS)
304     {
305       *op1_p = TREE_OPERAND (expr, 0);
306       *op2_p = TREE_OPERAND (expr, 1);
307     }
308   else if (grhs_class == GIMPLE_UNARY_RHS)
309     {
310       *op1_p = TREE_OPERAND (expr, 0);
311       *op2_p = NULL_TREE;
312     }
313   else if (grhs_class == GIMPLE_SINGLE_RHS)
314     {
315       *op1_p = expr;
316       *op2_p = NULL_TREE;
317     }
318   else
319     gcc_unreachable ();
320 }
321 
322 
323 /* Build a GIMPLE_ASSIGN statement.
324 
325    LHS of the assignment.
326    RHS of the assignment which can be unary or binary.  */
327 
328 gimple
329 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
330 {
331   enum tree_code subcode;
332   tree op1, op2;
333 
334   extract_ops_from_tree (rhs, &subcode, &op1, &op2);
335   return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
336   					    PASS_MEM_STAT);
337 }
338 
339 
340 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
341    OP1 and OP2.  If OP2 is NULL then SUBCODE must be of class
342    GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS.  */
343 
344 gimple
345 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
346                                    tree op2 MEM_STAT_DECL)
347 {
348   unsigned num_ops;
349   gimple p;
350 
351   /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
352      code).  */
353   num_ops = get_gimple_rhs_num_ops (subcode) + 1;
354 
355   p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
356   			          PASS_MEM_STAT);
357   gimple_assign_set_lhs (p, lhs);
358   gimple_assign_set_rhs1 (p, op1);
359   if (op2)
360     {
361       gcc_assert (num_ops > 2);
362       gimple_assign_set_rhs2 (p, op2);
363     }
364 
365   return p;
366 }
367 
368 
369 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
370 
371    DST/SRC are the destination and source respectively.  You can pass
372    ungimplified trees in DST or SRC, in which case they will be
373    converted to a gimple operand if necessary.
374 
375    This function returns the newly created GIMPLE_ASSIGN tuple.  */
376 
377 gimple
378 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
379 {
380   tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
381   gimplify_and_add (t, seq_p);
382   ggc_free (t);
383   return gimple_seq_last_stmt (*seq_p);
384 }
385 
386 
387 /* Build a GIMPLE_COND statement.
388 
389    PRED is the condition used to compare LHS and the RHS.
390    T_LABEL is the label to jump to if the condition is true.
391    F_LABEL is the label to jump to otherwise.  */
392 
393 gimple
394 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
395 		   tree t_label, tree f_label)
396 {
397   gimple p;
398 
399   gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
400   p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
401   gimple_cond_set_lhs (p, lhs);
402   gimple_cond_set_rhs (p, rhs);
403   gimple_cond_set_true_label (p, t_label);
404   gimple_cond_set_false_label (p, f_label);
405   return p;
406 }
407 
408 
409 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND.  */
410 
411 void
412 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
413                                tree *lhs_p, tree *rhs_p)
414 {
415   location_t loc = EXPR_LOCATION (cond);
416   gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
417 	      || TREE_CODE (cond) == TRUTH_NOT_EXPR
418 	      || is_gimple_min_invariant (cond)
419 	      || SSA_VAR_P (cond));
420 
421   extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
422 
423   /* Canonicalize conditionals of the form 'if (!VAL)'.  */
424   if (*code_p == TRUTH_NOT_EXPR)
425     {
426       *code_p = EQ_EXPR;
427       gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
428       *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
429     }
430   /* Canonicalize conditionals of the form 'if (VAL)'  */
431   else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
432     {
433       *code_p = NE_EXPR;
434       gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
435       *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
436     }
437 }
438 
439 
440 /* Build a GIMPLE_COND statement from the conditional expression tree
441    COND.  T_LABEL and F_LABEL are as in gimple_build_cond.  */
442 
443 gimple
444 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
445 {
446   enum tree_code code;
447   tree lhs, rhs;
448 
449   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
450   return gimple_build_cond (code, lhs, rhs, t_label, f_label);
451 }
452 
453 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
454    boolean expression tree COND.  */
455 
456 void
457 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
458 {
459   enum tree_code code;
460   tree lhs, rhs;
461 
462   gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
463   gimple_cond_set_condition (stmt, code, lhs, rhs);
464 }
465 
466 /* Build a GIMPLE_LABEL statement for LABEL.  */
467 
468 gimple
469 gimple_build_label (tree label)
470 {
471   gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
472   gimple_label_set_label (p, label);
473   return p;
474 }
475 
476 /* Build a GIMPLE_GOTO statement to label DEST.  */
477 
478 gimple
479 gimple_build_goto (tree dest)
480 {
481   gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
482   gimple_goto_set_dest (p, dest);
483   return p;
484 }
485 
486 
487 /* Build a GIMPLE_NOP statement.  */
488 
489 gimple
490 gimple_build_nop (void)
491 {
492   return gimple_alloc (GIMPLE_NOP, 0);
493 }
494 
495 
496 /* Build a GIMPLE_BIND statement.
497    VARS are the variables in BODY.
498    BLOCK is the containing block.  */
499 
500 gimple
501 gimple_build_bind (tree vars, gimple_seq body, tree block)
502 {
503   gimple p = gimple_alloc (GIMPLE_BIND, 0);
504   gimple_bind_set_vars (p, vars);
505   if (body)
506     gimple_bind_set_body (p, body);
507   if (block)
508     gimple_bind_set_block (p, block);
509   return p;
510 }
511 
512 /* Helper function to set the simple fields of a asm stmt.
513 
514    STRING is a pointer to a string that is the asm blocks assembly code.
515    NINPUT is the number of register inputs.
516    NOUTPUT is the number of register outputs.
517    NCLOBBERS is the number of clobbered registers.
518    */
519 
520 static inline gimple
521 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
522                     unsigned nclobbers, unsigned nlabels)
523 {
524   gimple p;
525   int size = strlen (string);
526 
527   /* ASMs with labels cannot have outputs.  This should have been
528      enforced by the front end.  */
529   gcc_assert (nlabels == 0 || noutputs == 0);
530 
531   p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
532 			     ninputs + noutputs + nclobbers + nlabels);
533 
534   p->gimple_asm.ni = ninputs;
535   p->gimple_asm.no = noutputs;
536   p->gimple_asm.nc = nclobbers;
537   p->gimple_asm.nl = nlabels;
538   p->gimple_asm.string = ggc_alloc_string (string, size);
539 
540 #ifdef GATHER_STATISTICS
541   gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
542 #endif
543 
544   return p;
545 }
546 
547 /* Build a GIMPLE_ASM statement.
548 
549    STRING is the assembly code.
550    NINPUT is the number of register inputs.
551    NOUTPUT is the number of register outputs.
552    NCLOBBERS is the number of clobbered registers.
553    INPUTS is a vector of the input register parameters.
554    OUTPUTS is a vector of the output register parameters.
555    CLOBBERS is a vector of the clobbered register parameters.
556    LABELS is a vector of destination labels.  */
557 
558 gimple
559 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
560                       VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
561 		      VEC(tree,gc)* labels)
562 {
563   gimple p;
564   unsigned i;
565 
566   p = gimple_build_asm_1 (string,
567                           VEC_length (tree, inputs),
568                           VEC_length (tree, outputs),
569                           VEC_length (tree, clobbers),
570 			  VEC_length (tree, labels));
571 
572   for (i = 0; i < VEC_length (tree, inputs); i++)
573     gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
574 
575   for (i = 0; i < VEC_length (tree, outputs); i++)
576     gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
577 
578   for (i = 0; i < VEC_length (tree, clobbers); i++)
579     gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
580 
581   for (i = 0; i < VEC_length (tree, labels); i++)
582     gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
583 
584   return p;
585 }
586 
587 /* Build a GIMPLE_CATCH statement.
588 
589   TYPES are the catch types.
590   HANDLER is the exception handler.  */
591 
592 gimple
593 gimple_build_catch (tree types, gimple_seq handler)
594 {
595   gimple p = gimple_alloc (GIMPLE_CATCH, 0);
596   gimple_catch_set_types (p, types);
597   if (handler)
598     gimple_catch_set_handler (p, handler);
599 
600   return p;
601 }
602 
603 /* Build a GIMPLE_EH_FILTER statement.
604 
605    TYPES are the filter's types.
606    FAILURE is the filter's failure action.  */
607 
608 gimple
609 gimple_build_eh_filter (tree types, gimple_seq failure)
610 {
611   gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
612   gimple_eh_filter_set_types (p, types);
613   if (failure)
614     gimple_eh_filter_set_failure (p, failure);
615 
616   return p;
617 }
618 
619 /* Build a GIMPLE_EH_MUST_NOT_THROW statement.  */
620 
621 gimple
622 gimple_build_eh_must_not_throw (tree decl)
623 {
624   gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
625 
626   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
627   gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
628   gimple_eh_must_not_throw_set_fndecl (p, decl);
629 
630   return p;
631 }
632 
633 /* Build a GIMPLE_TRY statement.
634 
635    EVAL is the expression to evaluate.
636    CLEANUP is the cleanup expression.
637    KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
638    whether this is a try/catch or a try/finally respectively.  */
639 
640 gimple
641 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
642     		  enum gimple_try_flags kind)
643 {
644   gimple p;
645 
646   gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
647   p = gimple_alloc (GIMPLE_TRY, 0);
648   gimple_set_subcode (p, kind);
649   if (eval)
650     gimple_try_set_eval (p, eval);
651   if (cleanup)
652     gimple_try_set_cleanup (p, cleanup);
653 
654   return p;
655 }
656 
657 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
658 
659    CLEANUP is the cleanup expression.  */
660 
661 gimple
662 gimple_build_wce (gimple_seq cleanup)
663 {
664   gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
665   if (cleanup)
666     gimple_wce_set_cleanup (p, cleanup);
667 
668   return p;
669 }
670 
671 
672 /* Build a GIMPLE_RESX statement.  */
673 
674 gimple
675 gimple_build_resx (int region)
676 {
677   gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
678   p->gimple_eh_ctrl.region = region;
679   return p;
680 }
681 
682 
683 /* The helper for constructing a gimple switch statement.
684    INDEX is the switch's index.
685    NLABELS is the number of labels in the switch excluding the default.
686    DEFAULT_LABEL is the default label for the switch statement.  */
687 
688 gimple
689 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
690 {
691   /* nlabels + 1 default label + 1 index.  */
692   gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
693 				    1 + (default_label != NULL) + nlabels);
694   gimple_switch_set_index (p, index);
695   if (default_label)
696     gimple_switch_set_default_label (p, default_label);
697   return p;
698 }
699 
700 
701 /* Build a GIMPLE_SWITCH statement.
702 
703    INDEX is the switch's index.
704    NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
705    ... are the labels excluding the default.  */
706 
707 gimple
708 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
709 {
710   va_list al;
711   unsigned i, offset;
712   gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
713 
714   /* Store the rest of the labels.  */
715   va_start (al, default_label);
716   offset = (default_label != NULL);
717   for (i = 0; i < nlabels; i++)
718     gimple_switch_set_label (p, i + offset, va_arg (al, tree));
719   va_end (al);
720 
721   return p;
722 }
723 
724 
725 /* Build a GIMPLE_SWITCH statement.
726 
727    INDEX is the switch's index.
728    DEFAULT_LABEL is the default label
729    ARGS is a vector of labels excluding the default.  */
730 
731 gimple
732 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
733 {
734   unsigned i, offset, nlabels = VEC_length (tree, args);
735   gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
736 
737   /* Copy the labels from the vector to the switch statement.  */
738   offset = (default_label != NULL);
739   for (i = 0; i < nlabels; i++)
740     gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
741 
742   return p;
743 }
744 
745 /* Build a GIMPLE_EH_DISPATCH statement.  */
746 
747 gimple
748 gimple_build_eh_dispatch (int region)
749 {
750   gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
751   p->gimple_eh_ctrl.region = region;
752   return p;
753 }
754 
755 /* Build a new GIMPLE_DEBUG_BIND statement.
756 
757    VAR is bound to VALUE; block and location are taken from STMT.  */
758 
759 gimple
760 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
761 {
762   gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
763 					 (unsigned)GIMPLE_DEBUG_BIND, 2
764 					 PASS_MEM_STAT);
765 
766   gimple_debug_bind_set_var (p, var);
767   gimple_debug_bind_set_value (p, value);
768   if (stmt)
769     {
770       gimple_set_block (p, gimple_block (stmt));
771       gimple_set_location (p, gimple_location (stmt));
772     }
773 
774   return p;
775 }
776 
777 
778 /* Build a GIMPLE_OMP_CRITICAL statement.
779 
780    BODY is the sequence of statements for which only one thread can execute.
781    NAME is optional identifier for this critical block.  */
782 
783 gimple
784 gimple_build_omp_critical (gimple_seq body, tree name)
785 {
786   gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
787   gimple_omp_critical_set_name (p, name);
788   if (body)
789     gimple_omp_set_body (p, body);
790 
791   return p;
792 }
793 
794 /* Build a GIMPLE_OMP_FOR statement.
795 
796    BODY is sequence of statements inside the for loop.
797    CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
798    lastprivate, reductions, ordered, schedule, and nowait.
799    COLLAPSE is the collapse count.
800    PRE_BODY is the sequence of statements that are loop invariant.  */
801 
802 gimple
803 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
804 		      gimple_seq pre_body)
805 {
806   gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
807   if (body)
808     gimple_omp_set_body (p, body);
809   gimple_omp_for_set_clauses (p, clauses);
810   p->gimple_omp_for.collapse = collapse;
811   p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse);
812   if (pre_body)
813     gimple_omp_for_set_pre_body (p, pre_body);
814 
815   return p;
816 }
817 
818 
819 /* Build a GIMPLE_OMP_PARALLEL statement.
820 
821    BODY is sequence of statements which are executed in parallel.
822    CLAUSES, are the OMP parallel construct's clauses.
823    CHILD_FN is the function created for the parallel threads to execute.
824    DATA_ARG are the shared data argument(s).  */
825 
826 gimple
827 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
828 			   tree data_arg)
829 {
830   gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
831   if (body)
832     gimple_omp_set_body (p, body);
833   gimple_omp_parallel_set_clauses (p, clauses);
834   gimple_omp_parallel_set_child_fn (p, child_fn);
835   gimple_omp_parallel_set_data_arg (p, data_arg);
836 
837   return p;
838 }
839 
840 
841 /* Build a GIMPLE_OMP_TASK statement.
842 
843    BODY is sequence of statements which are executed by the explicit task.
844    CLAUSES, are the OMP parallel construct's clauses.
845    CHILD_FN is the function created for the parallel threads to execute.
846    DATA_ARG are the shared data argument(s).
847    COPY_FN is the optional function for firstprivate initialization.
848    ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
849 
850 gimple
851 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
852 		       tree data_arg, tree copy_fn, tree arg_size,
853 		       tree arg_align)
854 {
855   gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
856   if (body)
857     gimple_omp_set_body (p, body);
858   gimple_omp_task_set_clauses (p, clauses);
859   gimple_omp_task_set_child_fn (p, child_fn);
860   gimple_omp_task_set_data_arg (p, data_arg);
861   gimple_omp_task_set_copy_fn (p, copy_fn);
862   gimple_omp_task_set_arg_size (p, arg_size);
863   gimple_omp_task_set_arg_align (p, arg_align);
864 
865   return p;
866 }
867 
868 
869 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
870 
871    BODY is the sequence of statements in the section.  */
872 
873 gimple
874 gimple_build_omp_section (gimple_seq body)
875 {
876   gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
877   if (body)
878     gimple_omp_set_body (p, body);
879 
880   return p;
881 }
882 
883 
884 /* Build a GIMPLE_OMP_MASTER statement.
885 
886    BODY is the sequence of statements to be executed by just the master.  */
887 
888 gimple
889 gimple_build_omp_master (gimple_seq body)
890 {
891   gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
892   if (body)
893     gimple_omp_set_body (p, body);
894 
895   return p;
896 }
897 
898 
899 /* Build a GIMPLE_OMP_CONTINUE statement.
900 
901    CONTROL_DEF is the definition of the control variable.
902    CONTROL_USE is the use of the control variable.  */
903 
904 gimple
905 gimple_build_omp_continue (tree control_def, tree control_use)
906 {
907   gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
908   gimple_omp_continue_set_control_def (p, control_def);
909   gimple_omp_continue_set_control_use (p, control_use);
910   return p;
911 }
912 
913 /* Build a GIMPLE_OMP_ORDERED statement.
914 
915    BODY is the sequence of statements inside a loop that will executed in
916    sequence.  */
917 
918 gimple
919 gimple_build_omp_ordered (gimple_seq body)
920 {
921   gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
922   if (body)
923     gimple_omp_set_body (p, body);
924 
925   return p;
926 }
927 
928 
929 /* Build a GIMPLE_OMP_RETURN statement.
930    WAIT_P is true if this is a non-waiting return.  */
931 
932 gimple
933 gimple_build_omp_return (bool wait_p)
934 {
935   gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
936   if (wait_p)
937     gimple_omp_return_set_nowait (p);
938 
939   return p;
940 }
941 
942 
943 /* Build a GIMPLE_OMP_SECTIONS statement.
944 
945    BODY is a sequence of section statements.
946    CLAUSES are any of the OMP sections contsruct's clauses: private,
947    firstprivate, lastprivate, reduction, and nowait.  */
948 
949 gimple
950 gimple_build_omp_sections (gimple_seq body, tree clauses)
951 {
952   gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
953   if (body)
954     gimple_omp_set_body (p, body);
955   gimple_omp_sections_set_clauses (p, clauses);
956 
957   return p;
958 }
959 
960 
961 /* Build a GIMPLE_OMP_SECTIONS_SWITCH.  */
962 
963 gimple
964 gimple_build_omp_sections_switch (void)
965 {
966   return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
967 }
968 
969 
970 /* Build a GIMPLE_OMP_SINGLE statement.
971 
972    BODY is the sequence of statements that will be executed once.
973    CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
974    copyprivate, nowait.  */
975 
976 gimple
977 gimple_build_omp_single (gimple_seq body, tree clauses)
978 {
979   gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
980   if (body)
981     gimple_omp_set_body (p, body);
982   gimple_omp_single_set_clauses (p, clauses);
983 
984   return p;
985 }
986 
987 
988 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
989 
990 gimple
991 gimple_build_omp_atomic_load (tree lhs, tree rhs)
992 {
993   gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
994   gimple_omp_atomic_load_set_lhs (p, lhs);
995   gimple_omp_atomic_load_set_rhs (p, rhs);
996   return p;
997 }
998 
999 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1000 
1001    VAL is the value we are storing.  */
1002 
1003 gimple
1004 gimple_build_omp_atomic_store (tree val)
1005 {
1006   gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1007   gimple_omp_atomic_store_set_val (p, val);
1008   return p;
1009 }
1010 
1011 /* Build a GIMPLE_PREDICT statement.  PREDICT is one of the predictors from
1012    predict.def, OUTCOME is NOT_TAKEN or TAKEN.  */
1013 
1014 gimple
1015 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1016 {
1017   gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1018   /* Ensure all the predictors fit into the lower bits of the subcode.  */
1019   gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1020   gimple_predict_set_predictor (p, predictor);
1021   gimple_predict_set_outcome (p, outcome);
1022   return p;
1023 }
1024 
1025 #if defined ENABLE_GIMPLE_CHECKING
1026 /* Complain of a gimple type mismatch and die.  */
1027 
1028 void
1029 gimple_check_failed (const_gimple gs, const char *file, int line,
1030 		     const char *function, enum gimple_code code,
1031 		     enum tree_code subcode)
1032 {
1033   internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1034       		  gimple_code_name[code],
1035 		  tree_code_name[subcode],
1036 		  gimple_code_name[gimple_code (gs)],
1037 		  gs->gsbase.subcode > 0
1038 		    ? tree_code_name[gs->gsbase.subcode]
1039 		    : "",
1040 		  function, trim_filename (file), line);
1041 }
1042 #endif /* ENABLE_GIMPLE_CHECKING */
1043 
1044 
1045 /* Allocate a new GIMPLE sequence in GC memory and return it.  If
1046    there are free sequences in GIMPLE_SEQ_CACHE return one of those
1047    instead.  */
1048 
1049 gimple_seq
1050 gimple_seq_alloc (void)
1051 {
1052   gimple_seq seq = gimple_seq_cache;
1053   if (seq)
1054     {
1055       gimple_seq_cache = gimple_seq_cache->next_free;
1056       gcc_assert (gimple_seq_cache != seq);
1057       memset (seq, 0, sizeof (*seq));
1058     }
1059   else
1060     {
1061       seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq));
1062 #ifdef GATHER_STATISTICS
1063       gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1064       gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1065 #endif
1066     }
1067 
1068   return seq;
1069 }
1070 
1071 /* Return SEQ to the free pool of GIMPLE sequences.  */
1072 
1073 void
1074 gimple_seq_free (gimple_seq seq)
1075 {
1076   if (seq == NULL)
1077     return;
1078 
1079   gcc_assert (gimple_seq_first (seq) == NULL);
1080   gcc_assert (gimple_seq_last (seq) == NULL);
1081 
1082   /* If this triggers, it's a sign that the same list is being freed
1083      twice.  */
1084   gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1085 
1086   /* Add SEQ to the pool of free sequences.  */
1087   seq->next_free = gimple_seq_cache;
1088   gimple_seq_cache = seq;
1089 }
1090 
1091 
1092 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1093    *SEQ_P is NULL, a new sequence is allocated.  */
1094 
1095 void
1096 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1097 {
1098   gimple_stmt_iterator si;
1099 
1100   if (gs == NULL)
1101     return;
1102 
1103   if (*seq_p == NULL)
1104     *seq_p = gimple_seq_alloc ();
1105 
1106   si = gsi_last (*seq_p);
1107   gsi_insert_after (&si, gs, GSI_NEW_STMT);
1108 }
1109 
1110 
1111 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1112    NULL, a new sequence is allocated.  */
1113 
1114 void
1115 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1116 {
1117   gimple_stmt_iterator si;
1118 
1119   if (src == NULL)
1120     return;
1121 
1122   if (*dst_p == NULL)
1123     *dst_p = gimple_seq_alloc ();
1124 
1125   si = gsi_last (*dst_p);
1126   gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1127 }
1128 
1129 
1130 /* Helper function of empty_body_p.  Return true if STMT is an empty
1131    statement.  */
1132 
1133 static bool
1134 empty_stmt_p (gimple stmt)
1135 {
1136   if (gimple_code (stmt) == GIMPLE_NOP)
1137     return true;
1138   if (gimple_code (stmt) == GIMPLE_BIND)
1139     return empty_body_p (gimple_bind_body (stmt));
1140   return false;
1141 }
1142 
1143 
1144 /* Return true if BODY contains nothing but empty statements.  */
1145 
1146 bool
1147 empty_body_p (gimple_seq body)
1148 {
1149   gimple_stmt_iterator i;
1150 
1151   if (gimple_seq_empty_p (body))
1152     return true;
1153   for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1154     if (!empty_stmt_p (gsi_stmt (i))
1155 	&& !is_gimple_debug (gsi_stmt (i)))
1156       return false;
1157 
1158   return true;
1159 }
1160 
1161 
1162 /* Perform a deep copy of sequence SRC and return the result.  */
1163 
1164 gimple_seq
1165 gimple_seq_copy (gimple_seq src)
1166 {
1167   gimple_stmt_iterator gsi;
1168   gimple_seq new_seq = gimple_seq_alloc ();
1169   gimple stmt;
1170 
1171   for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1172     {
1173       stmt = gimple_copy (gsi_stmt (gsi));
1174       gimple_seq_add_stmt (&new_seq, stmt);
1175     }
1176 
1177   return new_seq;
1178 }
1179 
1180 
1181 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1182    on each one.  WI is as in walk_gimple_stmt.
1183 
1184    If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1185    value is stored in WI->CALLBACK_RESULT and the statement that
1186    produced the value is returned.
1187 
1188    Otherwise, all the statements are walked and NULL returned.  */
1189 
1190 gimple
1191 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1192 		 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1193 {
1194   gimple_stmt_iterator gsi;
1195 
1196   for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1197     {
1198       tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1199       if (ret)
1200 	{
1201 	  /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1202 	     to hold it.  */
1203 	  gcc_assert (wi);
1204 	  wi->callback_result = ret;
1205 	  return gsi_stmt (gsi);
1206 	}
1207     }
1208 
1209   if (wi)
1210     wi->callback_result = NULL_TREE;
1211 
1212   return NULL;
1213 }
1214 
1215 
1216 /* Helper function for walk_gimple_stmt.  Walk operands of a GIMPLE_ASM.  */
1217 
1218 static tree
1219 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1220 		 struct walk_stmt_info *wi)
1221 {
1222   tree ret, op;
1223   unsigned noutputs;
1224   const char **oconstraints;
1225   unsigned i, n;
1226   const char *constraint;
1227   bool allows_mem, allows_reg, is_inout;
1228 
1229   noutputs = gimple_asm_noutputs (stmt);
1230   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1231 
1232   if (wi)
1233     wi->is_lhs = true;
1234 
1235   for (i = 0; i < noutputs; i++)
1236     {
1237       op = gimple_asm_output_op (stmt, i);
1238       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1239       oconstraints[i] = constraint;
1240       parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1241 	                       &is_inout);
1242       if (wi)
1243 	wi->val_only = (allows_reg || !allows_mem);
1244       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1245       if (ret)
1246 	return ret;
1247     }
1248 
1249   n = gimple_asm_ninputs (stmt);
1250   for (i = 0; i < n; i++)
1251     {
1252       op = gimple_asm_input_op (stmt, i);
1253       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1254       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1255 			      oconstraints, &allows_mem, &allows_reg);
1256       if (wi)
1257 	{
1258 	  wi->val_only = (allows_reg || !allows_mem);
1259           /* Although input "m" is not really a LHS, we need a lvalue.  */
1260 	  wi->is_lhs = !wi->val_only;
1261 	}
1262       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1263       if (ret)
1264 	return ret;
1265     }
1266 
1267   if (wi)
1268     {
1269       wi->is_lhs = false;
1270       wi->val_only = true;
1271     }
1272 
1273   n = gimple_asm_nlabels (stmt);
1274   for (i = 0; i < n; i++)
1275     {
1276       op = gimple_asm_label_op (stmt, i);
1277       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1278       if (ret)
1279 	return ret;
1280     }
1281 
1282   return NULL_TREE;
1283 }
1284 
1285 
1286 /* Helper function of WALK_GIMPLE_STMT.  Walk every tree operand in
1287    STMT.  CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1288 
1289    CALLBACK_OP is called on each operand of STMT via walk_tree.
1290    Additional parameters to walk_tree must be stored in WI.  For each operand
1291    OP, walk_tree is called as:
1292 
1293 	walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1294 
1295    If CALLBACK_OP returns non-NULL for an operand, the remaining
1296    operands are not scanned.
1297 
1298    The return value is that returned by the last call to walk_tree, or
1299    NULL_TREE if no CALLBACK_OP is specified.  */
1300 
1301 tree
1302 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1303 		struct walk_stmt_info *wi)
1304 {
1305   struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1306   unsigned i;
1307   tree ret = NULL_TREE;
1308 
1309   switch (gimple_code (stmt))
1310     {
1311     case GIMPLE_ASSIGN:
1312       /* Walk the RHS operands.  A formal temporary LHS may use a
1313 	 COMPONENT_REF RHS.  */
1314       if (wi)
1315 	wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt))
1316                        || !gimple_assign_single_p (stmt);
1317 
1318       for (i = 1; i < gimple_num_ops (stmt); i++)
1319 	{
1320 	  ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1321 			   pset);
1322 	  if (ret)
1323 	    return ret;
1324 	}
1325 
1326       /* Walk the LHS.  If the RHS is appropriate for a memory, we
1327 	 may use a COMPONENT_REF on the LHS.  */
1328       if (wi)
1329 	{
1330           /* If the RHS has more than 1 operand, it is not appropriate
1331              for the memory.  */
1332 	  wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1333                          || !gimple_assign_single_p (stmt);
1334 	  wi->is_lhs = true;
1335 	}
1336 
1337       ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1338       if (ret)
1339 	return ret;
1340 
1341       if (wi)
1342 	{
1343 	  wi->val_only = true;
1344 	  wi->is_lhs = false;
1345 	}
1346       break;
1347 
1348     case GIMPLE_CALL:
1349       if (wi)
1350 	wi->is_lhs = false;
1351 
1352       ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1353       if (ret)
1354         return ret;
1355 
1356       ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1357       if (ret)
1358         return ret;
1359 
1360       for (i = 0; i < gimple_call_num_args (stmt); i++)
1361 	{
1362 	  ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1363 			   pset);
1364 	  if (ret)
1365 	    return ret;
1366 	}
1367 
1368       if (wi)
1369 	wi->is_lhs = true;
1370 
1371       ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1372       if (ret)
1373 	return ret;
1374 
1375       if (wi)
1376 	wi->is_lhs = false;
1377       break;
1378 
1379     case GIMPLE_CATCH:
1380       ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1381 		       pset);
1382       if (ret)
1383 	return ret;
1384       break;
1385 
1386     case GIMPLE_EH_FILTER:
1387       ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1388 		       pset);
1389       if (ret)
1390 	return ret;
1391       break;
1392 
1393     case GIMPLE_ASM:
1394       ret = walk_gimple_asm (stmt, callback_op, wi);
1395       if (ret)
1396 	return ret;
1397       break;
1398 
1399     case GIMPLE_OMP_CONTINUE:
1400       ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1401 	  	       callback_op, wi, pset);
1402       if (ret)
1403 	return ret;
1404 
1405       ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1406 	  	       callback_op, wi, pset);
1407       if (ret)
1408 	return ret;
1409       break;
1410 
1411     case GIMPLE_OMP_CRITICAL:
1412       ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1413 		       pset);
1414       if (ret)
1415 	return ret;
1416       break;
1417 
1418     case GIMPLE_OMP_FOR:
1419       ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1420 		       pset);
1421       if (ret)
1422 	return ret;
1423       for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1424 	{
1425 	  ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1426 			   wi, pset);
1427 	  if (ret)
1428 	    return ret;
1429 	  ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1430 			   wi, pset);
1431 	  if (ret)
1432 	    return ret;
1433 	  ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1434 			   wi, pset);
1435 	  if (ret)
1436 	    return ret;
1437 	  ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1438 			   wi, pset);
1439 	}
1440       if (ret)
1441 	return ret;
1442       break;
1443 
1444     case GIMPLE_OMP_PARALLEL:
1445       ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1446 		       wi, pset);
1447       if (ret)
1448 	return ret;
1449       ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1450 		       wi, pset);
1451       if (ret)
1452 	return ret;
1453       ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1454 		       wi, pset);
1455       if (ret)
1456 	return ret;
1457       break;
1458 
1459     case GIMPLE_OMP_TASK:
1460       ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1461 		       wi, pset);
1462       if (ret)
1463 	return ret;
1464       ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1465 		       wi, pset);
1466       if (ret)
1467 	return ret;
1468       ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1469 		       wi, pset);
1470       if (ret)
1471 	return ret;
1472       ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1473 		       wi, pset);
1474       if (ret)
1475 	return ret;
1476       ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1477 		       wi, pset);
1478       if (ret)
1479 	return ret;
1480       ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1481 		       wi, pset);
1482       if (ret)
1483 	return ret;
1484       break;
1485 
1486     case GIMPLE_OMP_SECTIONS:
1487       ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1488 		       wi, pset);
1489       if (ret)
1490 	return ret;
1491 
1492       ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1493 		       wi, pset);
1494       if (ret)
1495 	return ret;
1496 
1497       break;
1498 
1499     case GIMPLE_OMP_SINGLE:
1500       ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1501 		       pset);
1502       if (ret)
1503 	return ret;
1504       break;
1505 
1506     case GIMPLE_OMP_ATOMIC_LOAD:
1507       ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1508 		       pset);
1509       if (ret)
1510 	return ret;
1511 
1512       ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1513 		       pset);
1514       if (ret)
1515 	return ret;
1516       break;
1517 
1518     case GIMPLE_OMP_ATOMIC_STORE:
1519       ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1520 		       wi, pset);
1521       if (ret)
1522 	return ret;
1523       break;
1524 
1525       /* Tuples that do not have operands.  */
1526     case GIMPLE_NOP:
1527     case GIMPLE_RESX:
1528     case GIMPLE_OMP_RETURN:
1529     case GIMPLE_PREDICT:
1530       break;
1531 
1532     default:
1533       {
1534 	enum gimple_statement_structure_enum gss;
1535 	gss = gimple_statement_structure (stmt);
1536 	if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1537 	  for (i = 0; i < gimple_num_ops (stmt); i++)
1538 	    {
1539 	      ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1540 	      if (ret)
1541 		return ret;
1542 	    }
1543       }
1544       break;
1545     }
1546 
1547   return NULL_TREE;
1548 }
1549 
1550 
1551 /* Walk the current statement in GSI (optionally using traversal state
1552    stored in WI).  If WI is NULL, no state is kept during traversal.
1553    The callback CALLBACK_STMT is called.  If CALLBACK_STMT indicates
1554    that it has handled all the operands of the statement, its return
1555    value is returned.  Otherwise, the return value from CALLBACK_STMT
1556    is discarded and its operands are scanned.
1557 
1558    If CALLBACK_STMT is NULL or it didn't handle the operands,
1559    CALLBACK_OP is called on each operand of the statement via
1560    walk_gimple_op.  If walk_gimple_op returns non-NULL for any
1561    operand, the remaining operands are not scanned.  In this case, the
1562    return value from CALLBACK_OP is returned.
1563 
1564    In any other case, NULL_TREE is returned.  */
1565 
1566 tree
1567 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1568 		  walk_tree_fn callback_op, struct walk_stmt_info *wi)
1569 {
1570   gimple ret;
1571   tree tree_ret;
1572   gimple stmt = gsi_stmt (*gsi);
1573 
1574   if (wi)
1575     wi->gsi = *gsi;
1576 
1577   if (wi && wi->want_locations && gimple_has_location (stmt))
1578     input_location = gimple_location (stmt);
1579 
1580   ret = NULL;
1581 
1582   /* Invoke the statement callback.  Return if the callback handled
1583      all of STMT operands by itself.  */
1584   if (callback_stmt)
1585     {
1586       bool handled_ops = false;
1587       tree_ret = callback_stmt (gsi, &handled_ops, wi);
1588       if (handled_ops)
1589 	return tree_ret;
1590 
1591       /* If CALLBACK_STMT did not handle operands, it should not have
1592 	 a value to return.  */
1593       gcc_assert (tree_ret == NULL);
1594 
1595       /* Re-read stmt in case the callback changed it.  */
1596       stmt = gsi_stmt (*gsi);
1597     }
1598 
1599   /* If CALLBACK_OP is defined, invoke it on every operand of STMT.  */
1600   if (callback_op)
1601     {
1602       tree_ret = walk_gimple_op (stmt, callback_op, wi);
1603       if (tree_ret)
1604 	return tree_ret;
1605     }
1606 
1607   /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them.  */
1608   switch (gimple_code (stmt))
1609     {
1610     case GIMPLE_BIND:
1611       ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1612 	                     callback_op, wi);
1613       if (ret)
1614 	return wi->callback_result;
1615       break;
1616 
1617     case GIMPLE_CATCH:
1618       ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1619 	                     callback_op, wi);
1620       if (ret)
1621 	return wi->callback_result;
1622       break;
1623 
1624     case GIMPLE_EH_FILTER:
1625       ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1626 		             callback_op, wi);
1627       if (ret)
1628 	return wi->callback_result;
1629       break;
1630 
1631     case GIMPLE_TRY:
1632       ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1633 	                     wi);
1634       if (ret)
1635 	return wi->callback_result;
1636 
1637       ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1638 	                     callback_op, wi);
1639       if (ret)
1640 	return wi->callback_result;
1641       break;
1642 
1643     case GIMPLE_OMP_FOR:
1644       ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1645 		             callback_op, wi);
1646       if (ret)
1647 	return wi->callback_result;
1648 
1649       /* FALL THROUGH.  */
1650     case GIMPLE_OMP_CRITICAL:
1651     case GIMPLE_OMP_MASTER:
1652     case GIMPLE_OMP_ORDERED:
1653     case GIMPLE_OMP_SECTION:
1654     case GIMPLE_OMP_PARALLEL:
1655     case GIMPLE_OMP_TASK:
1656     case GIMPLE_OMP_SECTIONS:
1657     case GIMPLE_OMP_SINGLE:
1658       ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1659 	                     wi);
1660       if (ret)
1661 	return wi->callback_result;
1662       break;
1663 
1664     case GIMPLE_WITH_CLEANUP_EXPR:
1665       ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1666 			     callback_op, wi);
1667       if (ret)
1668 	return wi->callback_result;
1669       break;
1670 
1671     default:
1672       gcc_assert (!gimple_has_substatements (stmt));
1673       break;
1674     }
1675 
1676   return NULL;
1677 }
1678 
1679 
1680 /* Set sequence SEQ to be the GIMPLE body for function FN.  */
1681 
1682 void
1683 gimple_set_body (tree fndecl, gimple_seq seq)
1684 {
1685   struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1686   if (fn == NULL)
1687     {
1688       /* If FNDECL still does not have a function structure associated
1689 	 with it, then it does not make sense for it to receive a
1690 	 GIMPLE body.  */
1691       gcc_assert (seq == NULL);
1692     }
1693   else
1694     fn->gimple_body = seq;
1695 }
1696 
1697 
1698 /* Return the body of GIMPLE statements for function FN.  */
1699 
1700 gimple_seq
1701 gimple_body (tree fndecl)
1702 {
1703   struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1704   return fn ? fn->gimple_body : NULL;
1705 }
1706 
1707 /* Return true when FNDECL has Gimple body either in unlowered
1708    or CFG form.  */
1709 bool
1710 gimple_has_body_p (tree fndecl)
1711 {
1712   struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1713   return (gimple_body (fndecl) || (fn && fn->cfg));
1714 }
1715 
1716 /* Detect flags from a GIMPLE_CALL.  This is just like
1717    call_expr_flags, but for gimple tuples.  */
1718 
1719 int
1720 gimple_call_flags (const_gimple stmt)
1721 {
1722   int flags;
1723   tree decl = gimple_call_fndecl (stmt);
1724   tree t;
1725 
1726   if (decl)
1727     flags = flags_from_decl_or_type (decl);
1728   else
1729     {
1730       t = TREE_TYPE (gimple_call_fn (stmt));
1731       if (t && TREE_CODE (t) == POINTER_TYPE)
1732 	flags = flags_from_decl_or_type (TREE_TYPE (t));
1733       else
1734 	flags = 0;
1735     }
1736 
1737   if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1738     flags |= ECF_NOTHROW;
1739 
1740   return flags;
1741 }
1742 
1743 
1744 /* Return true if GS is a copy assignment.  */
1745 
1746 bool
1747 gimple_assign_copy_p (gimple gs)
1748 {
1749   return gimple_code (gs) == GIMPLE_ASSIGN
1750          && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1751 	    == GIMPLE_SINGLE_RHS
1752 	 && is_gimple_val (gimple_op (gs, 1));
1753 }
1754 
1755 
1756 /* Return true if GS is a SSA_NAME copy assignment.  */
1757 
1758 bool
1759 gimple_assign_ssa_name_copy_p (gimple gs)
1760 {
1761   return (gimple_code (gs) == GIMPLE_ASSIGN
1762 	  && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1763 	      == GIMPLE_SINGLE_RHS)
1764 	  && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1765 	  && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1766 }
1767 
1768 
1769 /* Return true if GS is an assignment with a singleton RHS, i.e.,
1770    there is no operator associated with the assignment itself.
1771    Unlike gimple_assign_copy_p, this predicate returns true for
1772    any RHS operand, including those that perform an operation
1773    and do not have the semantics of a copy, such as COND_EXPR.  */
1774 
1775 bool
1776 gimple_assign_single_p (gimple gs)
1777 {
1778   return (gimple_code (gs) == GIMPLE_ASSIGN
1779           && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1780 	     == GIMPLE_SINGLE_RHS);
1781 }
1782 
1783 /* Return true if GS is an assignment with a unary RHS, but the
1784    operator has no effect on the assigned value.  The logic is adapted
1785    from STRIP_NOPS.  This predicate is intended to be used in tuplifying
1786    instances in which STRIP_NOPS was previously applied to the RHS of
1787    an assignment.
1788 
1789    NOTE: In the use cases that led to the creation of this function
1790    and of gimple_assign_single_p, it is typical to test for either
1791    condition and to proceed in the same manner.  In each case, the
1792    assigned value is represented by the single RHS operand of the
1793    assignment.  I suspect there may be cases where gimple_assign_copy_p,
1794    gimple_assign_single_p, or equivalent logic is used where a similar
1795    treatment of unary NOPs is appropriate.  */
1796 
1797 bool
1798 gimple_assign_unary_nop_p (gimple gs)
1799 {
1800   return (gimple_code (gs) == GIMPLE_ASSIGN
1801           && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1802               || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1803           && gimple_assign_rhs1 (gs) != error_mark_node
1804           && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1805               == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1806 }
1807 
1808 /* Set BB to be the basic block holding G.  */
1809 
1810 void
1811 gimple_set_bb (gimple stmt, basic_block bb)
1812 {
1813   stmt->gsbase.bb = bb;
1814 
1815   /* If the statement is a label, add the label to block-to-labels map
1816      so that we can speed up edge creation for GIMPLE_GOTOs.  */
1817   if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1818     {
1819       tree t;
1820       int uid;
1821 
1822       t = gimple_label_label (stmt);
1823       uid = LABEL_DECL_UID (t);
1824       if (uid == -1)
1825 	{
1826 	  unsigned old_len = VEC_length (basic_block, label_to_block_map);
1827 	  LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1828 	  if (old_len <= (unsigned) uid)
1829 	    {
1830 	      unsigned new_len = 3 * uid / 2 + 1;
1831 
1832 	      VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1833 				     new_len);
1834 	    }
1835 	}
1836 
1837       VEC_replace (basic_block, label_to_block_map, uid, bb);
1838     }
1839 }
1840 
1841 
1842 /* Modify the RHS of the assignment pointed-to by GSI using the
1843    operands in the expression tree EXPR.
1844 
1845    NOTE: The statement pointed-to by GSI may be reallocated if it
1846    did not have enough operand slots.
1847 
1848    This function is useful to convert an existing tree expression into
1849    the flat representation used for the RHS of a GIMPLE assignment.
1850    It will reallocate memory as needed to expand or shrink the number
1851    of operand slots needed to represent EXPR.
1852 
1853    NOTE: If you find yourself building a tree and then calling this
1854    function, you are most certainly doing it the slow way.  It is much
1855    better to build a new assignment or to use the function
1856    gimple_assign_set_rhs_with_ops, which does not require an
1857    expression tree to be built.  */
1858 
1859 void
1860 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1861 {
1862   enum tree_code subcode;
1863   tree op1, op2;
1864 
1865   extract_ops_from_tree (expr, &subcode, &op1, &op2);
1866   gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2);
1867 }
1868 
1869 
1870 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1871    operands OP1 and OP2.
1872 
1873    NOTE: The statement pointed-to by GSI may be reallocated if it
1874    did not have enough operand slots.  */
1875 
1876 void
1877 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1878 				tree op1, tree op2)
1879 {
1880   unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1881   gimple stmt = gsi_stmt (*gsi);
1882 
1883   /* If the new CODE needs more operands, allocate a new statement.  */
1884   if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1885     {
1886       tree lhs = gimple_assign_lhs (stmt);
1887       gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1888       memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1889       gsi_replace (gsi, new_stmt, true);
1890       stmt = new_stmt;
1891 
1892       /* The LHS needs to be reset as this also changes the SSA name
1893 	 on the LHS.  */
1894       gimple_assign_set_lhs (stmt, lhs);
1895     }
1896 
1897   gimple_set_num_ops (stmt, new_rhs_ops + 1);
1898   gimple_set_subcode (stmt, code);
1899   gimple_assign_set_rhs1 (stmt, op1);
1900   if (new_rhs_ops > 1)
1901     gimple_assign_set_rhs2 (stmt, op2);
1902 }
1903 
1904 
1905 /* Return the LHS of a statement that performs an assignment,
1906    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  Returns NULL_TREE
1907    for a call to a function that returns no value, or for a
1908    statement other than an assignment or a call.  */
1909 
1910 tree
1911 gimple_get_lhs (const_gimple stmt)
1912 {
1913   enum gimple_code code = gimple_code (stmt);
1914 
1915   if (code == GIMPLE_ASSIGN)
1916     return gimple_assign_lhs (stmt);
1917   else if (code == GIMPLE_CALL)
1918     return gimple_call_lhs (stmt);
1919   else
1920     return NULL_TREE;
1921 }
1922 
1923 
1924 /* Set the LHS of a statement that performs an assignment,
1925    either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
1926 
1927 void
1928 gimple_set_lhs (gimple stmt, tree lhs)
1929 {
1930   enum gimple_code code = gimple_code (stmt);
1931 
1932   if (code == GIMPLE_ASSIGN)
1933     gimple_assign_set_lhs (stmt, lhs);
1934   else if (code == GIMPLE_CALL)
1935     gimple_call_set_lhs (stmt, lhs);
1936   else
1937     gcc_unreachable();
1938 }
1939 
1940 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
1941    GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
1942    expression with a different value.
1943 
1944    This will update any annotations (say debug bind stmts) referring
1945    to the original LHS, so that they use the RHS instead.  This is
1946    done even if NLHS and LHS are the same, for it is understood that
1947    the RHS will be modified afterwards, and NLHS will not be assigned
1948    an equivalent value.
1949 
1950    Adjusting any non-annotation uses of the LHS, if needed, is a
1951    responsibility of the caller.
1952 
1953    The effect of this call should be pretty much the same as that of
1954    inserting a copy of STMT before STMT, and then removing the
1955    original stmt, at which time gsi_remove() would have update
1956    annotations, but using this function saves all the inserting,
1957    copying and removing.  */
1958 
1959 void
1960 gimple_replace_lhs (gimple stmt, tree nlhs)
1961 {
1962   if (MAY_HAVE_DEBUG_STMTS)
1963     {
1964       tree lhs = gimple_get_lhs (stmt);
1965 
1966       gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
1967 
1968       insert_debug_temp_for_var_def (NULL, lhs);
1969     }
1970 
1971   gimple_set_lhs (stmt, nlhs);
1972 }
1973 
1974 /* Return a deep copy of statement STMT.  All the operands from STMT
1975    are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
1976    and VUSE operand arrays are set to empty in the new copy.  */
1977 
1978 gimple
1979 gimple_copy (gimple stmt)
1980 {
1981   enum gimple_code code = gimple_code (stmt);
1982   unsigned num_ops = gimple_num_ops (stmt);
1983   gimple copy = gimple_alloc (code, num_ops);
1984   unsigned i;
1985 
1986   /* Shallow copy all the fields from STMT.  */
1987   memcpy (copy, stmt, gimple_size (code));
1988 
1989   /* If STMT has sub-statements, deep-copy them as well.  */
1990   if (gimple_has_substatements (stmt))
1991     {
1992       gimple_seq new_seq;
1993       tree t;
1994 
1995       switch (gimple_code (stmt))
1996 	{
1997 	case GIMPLE_BIND:
1998 	  new_seq = gimple_seq_copy (gimple_bind_body (stmt));
1999 	  gimple_bind_set_body (copy, new_seq);
2000 	  gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2001 	  gimple_bind_set_block (copy, gimple_bind_block (stmt));
2002 	  break;
2003 
2004 	case GIMPLE_CATCH:
2005 	  new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2006 	  gimple_catch_set_handler (copy, new_seq);
2007 	  t = unshare_expr (gimple_catch_types (stmt));
2008 	  gimple_catch_set_types (copy, t);
2009 	  break;
2010 
2011 	case GIMPLE_EH_FILTER:
2012 	  new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2013 	  gimple_eh_filter_set_failure (copy, new_seq);
2014 	  t = unshare_expr (gimple_eh_filter_types (stmt));
2015 	  gimple_eh_filter_set_types (copy, t);
2016 	  break;
2017 
2018 	case GIMPLE_TRY:
2019 	  new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2020 	  gimple_try_set_eval (copy, new_seq);
2021 	  new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2022 	  gimple_try_set_cleanup (copy, new_seq);
2023 	  break;
2024 
2025 	case GIMPLE_OMP_FOR:
2026 	  new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2027 	  gimple_omp_for_set_pre_body (copy, new_seq);
2028 	  t = unshare_expr (gimple_omp_for_clauses (stmt));
2029 	  gimple_omp_for_set_clauses (copy, t);
2030 	  copy->gimple_omp_for.iter
2031 	    = GGC_NEWVEC (struct gimple_omp_for_iter,
2032 			  gimple_omp_for_collapse (stmt));
2033 	  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2034 	    {
2035 	      gimple_omp_for_set_cond (copy, i,
2036 				       gimple_omp_for_cond (stmt, i));
2037 	      gimple_omp_for_set_index (copy, i,
2038 					gimple_omp_for_index (stmt, i));
2039 	      t = unshare_expr (gimple_omp_for_initial (stmt, i));
2040 	      gimple_omp_for_set_initial (copy, i, t);
2041 	      t = unshare_expr (gimple_omp_for_final (stmt, i));
2042 	      gimple_omp_for_set_final (copy, i, t);
2043 	      t = unshare_expr (gimple_omp_for_incr (stmt, i));
2044 	      gimple_omp_for_set_incr (copy, i, t);
2045 	    }
2046 	  goto copy_omp_body;
2047 
2048 	case GIMPLE_OMP_PARALLEL:
2049 	  t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2050 	  gimple_omp_parallel_set_clauses (copy, t);
2051 	  t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2052 	  gimple_omp_parallel_set_child_fn (copy, t);
2053 	  t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2054 	  gimple_omp_parallel_set_data_arg (copy, t);
2055 	  goto copy_omp_body;
2056 
2057 	case GIMPLE_OMP_TASK:
2058 	  t = unshare_expr (gimple_omp_task_clauses (stmt));
2059 	  gimple_omp_task_set_clauses (copy, t);
2060 	  t = unshare_expr (gimple_omp_task_child_fn (stmt));
2061 	  gimple_omp_task_set_child_fn (copy, t);
2062 	  t = unshare_expr (gimple_omp_task_data_arg (stmt));
2063 	  gimple_omp_task_set_data_arg (copy, t);
2064 	  t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2065 	  gimple_omp_task_set_copy_fn (copy, t);
2066 	  t = unshare_expr (gimple_omp_task_arg_size (stmt));
2067 	  gimple_omp_task_set_arg_size (copy, t);
2068 	  t = unshare_expr (gimple_omp_task_arg_align (stmt));
2069 	  gimple_omp_task_set_arg_align (copy, t);
2070 	  goto copy_omp_body;
2071 
2072 	case GIMPLE_OMP_CRITICAL:
2073 	  t = unshare_expr (gimple_omp_critical_name (stmt));
2074 	  gimple_omp_critical_set_name (copy, t);
2075 	  goto copy_omp_body;
2076 
2077 	case GIMPLE_OMP_SECTIONS:
2078 	  t = unshare_expr (gimple_omp_sections_clauses (stmt));
2079 	  gimple_omp_sections_set_clauses (copy, t);
2080 	  t = unshare_expr (gimple_omp_sections_control (stmt));
2081 	  gimple_omp_sections_set_control (copy, t);
2082 	  /* FALLTHRU  */
2083 
2084 	case GIMPLE_OMP_SINGLE:
2085 	case GIMPLE_OMP_SECTION:
2086 	case GIMPLE_OMP_MASTER:
2087 	case GIMPLE_OMP_ORDERED:
2088 	copy_omp_body:
2089 	  new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2090 	  gimple_omp_set_body (copy, new_seq);
2091 	  break;
2092 
2093 	case GIMPLE_WITH_CLEANUP_EXPR:
2094 	  new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2095 	  gimple_wce_set_cleanup (copy, new_seq);
2096 	  break;
2097 
2098 	default:
2099 	  gcc_unreachable ();
2100 	}
2101     }
2102 
2103   /* Make copy of operands.  */
2104   if (num_ops > 0)
2105     {
2106       for (i = 0; i < num_ops; i++)
2107 	gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2108 
2109       /* Clear out SSA operand vectors on COPY.  */
2110       if (gimple_has_ops (stmt))
2111 	{
2112 	  gimple_set_def_ops (copy, NULL);
2113 	  gimple_set_use_ops (copy, NULL);
2114 	}
2115 
2116       if (gimple_has_mem_ops (stmt))
2117 	{
2118 	  gimple_set_vdef (copy, gimple_vdef (stmt));
2119 	  gimple_set_vuse (copy, gimple_vuse (stmt));
2120 	}
2121 
2122       /* SSA operands need to be updated.  */
2123       gimple_set_modified (copy, true);
2124     }
2125 
2126   return copy;
2127 }
2128 
2129 
2130 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2131    a MODIFIED field.  */
2132 
2133 void
2134 gimple_set_modified (gimple s, bool modifiedp)
2135 {
2136   if (gimple_has_ops (s))
2137     {
2138       s->gsbase.modified = (unsigned) modifiedp;
2139 
2140       if (modifiedp
2141 	  && cfun->gimple_df
2142 	  && is_gimple_call (s)
2143 	  && gimple_call_noreturn_p (s))
2144 	VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
2145     }
2146 }
2147 
2148 
2149 /* Return true if statement S has side-effects.  We consider a
2150    statement to have side effects if:
2151 
2152    - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2153    - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS.  */
2154 
2155 bool
2156 gimple_has_side_effects (const_gimple s)
2157 {
2158   unsigned i;
2159 
2160   if (is_gimple_debug (s))
2161     return false;
2162 
2163   /* We don't have to scan the arguments to check for
2164      volatile arguments, though, at present, we still
2165      do a scan to check for TREE_SIDE_EFFECTS.  */
2166   if (gimple_has_volatile_ops (s))
2167     return true;
2168 
2169   if (is_gimple_call (s))
2170     {
2171       unsigned nargs = gimple_call_num_args (s);
2172 
2173       if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2174         return true;
2175       else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2176 	/* An infinite loop is considered a side effect.  */
2177 	return true;
2178 
2179       if (gimple_call_lhs (s)
2180           && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2181 	{
2182 	  gcc_assert (gimple_has_volatile_ops (s));
2183 	  return true;
2184 	}
2185 
2186       if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2187         return true;
2188 
2189       for (i = 0; i < nargs; i++)
2190         if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2191 	  {
2192 	    gcc_assert (gimple_has_volatile_ops (s));
2193 	    return true;
2194 	  }
2195 
2196       return false;
2197     }
2198   else
2199     {
2200       for (i = 0; i < gimple_num_ops (s); i++)
2201 	if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2202 	  {
2203 	    gcc_assert (gimple_has_volatile_ops (s));
2204 	    return true;
2205 	  }
2206     }
2207 
2208   return false;
2209 }
2210 
2211 /* Return true if the RHS of statement S has side effects.
2212    We may use it to determine if it is admissable to replace
2213    an assignment or call with a copy of a previously-computed
2214    value.  In such cases, side-effects due the the LHS are
2215    preserved.  */
2216 
2217 bool
2218 gimple_rhs_has_side_effects (const_gimple s)
2219 {
2220   unsigned i;
2221 
2222   if (is_gimple_call (s))
2223     {
2224       unsigned nargs = gimple_call_num_args (s);
2225 
2226       if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2227         return true;
2228 
2229       /* We cannot use gimple_has_volatile_ops here,
2230          because we must ignore a volatile LHS.  */
2231       if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2232           || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2233 	{
2234 	  gcc_assert (gimple_has_volatile_ops (s));
2235 	  return true;
2236 	}
2237 
2238       for (i = 0; i < nargs; i++)
2239         if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2240             || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2241           return true;
2242 
2243       return false;
2244     }
2245   else if (is_gimple_assign (s))
2246     {
2247       /* Skip the first operand, the LHS. */
2248       for (i = 1; i < gimple_num_ops (s); i++)
2249 	if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2250             || TREE_THIS_VOLATILE (gimple_op (s, i)))
2251 	  {
2252 	    gcc_assert (gimple_has_volatile_ops (s));
2253 	    return true;
2254 	  }
2255     }
2256   else if (is_gimple_debug (s))
2257     return false;
2258   else
2259     {
2260       /* For statements without an LHS, examine all arguments.  */
2261       for (i = 0; i < gimple_num_ops (s); i++)
2262 	if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2263             || TREE_THIS_VOLATILE (gimple_op (s, i)))
2264 	  {
2265 	    gcc_assert (gimple_has_volatile_ops (s));
2266 	    return true;
2267 	  }
2268     }
2269 
2270   return false;
2271 }
2272 
2273 
2274 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2275    Return true if S can trap.  If INCLUDE_LHS is true and S is a
2276    GIMPLE_ASSIGN, the LHS of the assignment is also checked.
2277    Otherwise, only the RHS of the assignment is checked.  */
2278 
2279 static bool
2280 gimple_could_trap_p_1 (gimple s, bool include_lhs)
2281 {
2282   unsigned i, start;
2283   tree t, div = NULL_TREE;
2284   enum tree_code op;
2285 
2286   start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0;
2287 
2288   for (i = start; i < gimple_num_ops (s); i++)
2289     if (tree_could_trap_p (gimple_op (s, i)))
2290       return true;
2291 
2292   switch (gimple_code (s))
2293     {
2294     case GIMPLE_ASM:
2295       return gimple_asm_volatile_p (s);
2296 
2297     case GIMPLE_CALL:
2298       t = gimple_call_fndecl (s);
2299       /* Assume that calls to weak functions may trap.  */
2300       if (!t || !DECL_P (t) || DECL_WEAK (t))
2301 	return true;
2302       return false;
2303 
2304     case GIMPLE_ASSIGN:
2305       t = gimple_expr_type (s);
2306       op = gimple_assign_rhs_code (s);
2307       if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2308 	div = gimple_assign_rhs2 (s);
2309       return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2310 				      (INTEGRAL_TYPE_P (t)
2311 				       && TYPE_OVERFLOW_TRAPS (t)),
2312 				      div));
2313 
2314     default:
2315       break;
2316     }
2317 
2318   return false;
2319 
2320 }
2321 
2322 
2323 /* Return true if statement S can trap.  */
2324 
2325 bool
2326 gimple_could_trap_p (gimple s)
2327 {
2328   return gimple_could_trap_p_1 (s, true);
2329 }
2330 
2331 
2332 /* Return true if RHS of a GIMPLE_ASSIGN S can trap.  */
2333 
2334 bool
2335 gimple_assign_rhs_could_trap_p (gimple s)
2336 {
2337   gcc_assert (is_gimple_assign (s));
2338   return gimple_could_trap_p_1 (s, false);
2339 }
2340 
2341 
2342 /* Print debugging information for gimple stmts generated.  */
2343 
2344 void
2345 dump_gimple_statistics (void)
2346 {
2347 #ifdef GATHER_STATISTICS
2348   int i, total_tuples = 0, total_bytes = 0;
2349 
2350   fprintf (stderr, "\nGIMPLE statements\n");
2351   fprintf (stderr, "Kind                   Stmts      Bytes\n");
2352   fprintf (stderr, "---------------------------------------\n");
2353   for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2354     {
2355       fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2356 	  gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2357       total_tuples += gimple_alloc_counts[i];
2358       total_bytes += gimple_alloc_sizes[i];
2359     }
2360   fprintf (stderr, "---------------------------------------\n");
2361   fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2362   fprintf (stderr, "---------------------------------------\n");
2363 #else
2364   fprintf (stderr, "No gimple statistics\n");
2365 #endif
2366 }
2367 
2368 
2369 /* Return the number of operands needed on the RHS of a GIMPLE
2370    assignment for an expression with tree code CODE.  */
2371 
2372 unsigned
2373 get_gimple_rhs_num_ops (enum tree_code code)
2374 {
2375   enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2376 
2377   if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2378     return 1;
2379   else if (rhs_class == GIMPLE_BINARY_RHS)
2380     return 2;
2381   else
2382     gcc_unreachable ();
2383 }
2384 
2385 #define DEFTREECODE(SYM, STRING, TYPE, NARGS)   			    \
2386   (unsigned char)							    \
2387   ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS				    \
2388    : ((TYPE) == tcc_binary						    \
2389       || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS   		    \
2390    : ((TYPE) == tcc_constant						    \
2391       || (TYPE) == tcc_declaration					    \
2392       || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS			    \
2393    : ((SYM) == TRUTH_AND_EXPR						    \
2394       || (SYM) == TRUTH_OR_EXPR						    \
2395       || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS			    \
2396    : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS				    \
2397    : ((SYM) == COND_EXPR						    \
2398       || (SYM) == CONSTRUCTOR						    \
2399       || (SYM) == OBJ_TYPE_REF						    \
2400       || (SYM) == ASSERT_EXPR						    \
2401       || (SYM) == ADDR_EXPR						    \
2402       || (SYM) == WITH_SIZE_EXPR					    \
2403       || (SYM) == SSA_NAME						    \
2404       || (SYM) == POLYNOMIAL_CHREC					    \
2405       || (SYM) == DOT_PROD_EXPR						    \
2406       || (SYM) == VEC_COND_EXPR						    \
2407       || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS		    \
2408    : GIMPLE_INVALID_RHS),
2409 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2410 
2411 const unsigned char gimple_rhs_class_table[] = {
2412 #include "all-tree.def"
2413 };
2414 
2415 #undef DEFTREECODE
2416 #undef END_OF_BASE_TREE_CODES
2417 
2418 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi.  */
2419 
2420 /* Validation of GIMPLE expressions.  */
2421 
2422 /* Return true if OP is an acceptable tree node to be used as a GIMPLE
2423    operand.  */
2424 
2425 bool
2426 is_gimple_operand (const_tree op)
2427 {
2428   return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
2429 }
2430 
2431 /* Returns true iff T is a valid RHS for an assignment to a renamed
2432    user -- or front-end generated artificial -- variable.  */
2433 
2434 bool
2435 is_gimple_reg_rhs (tree t)
2436 {
2437   return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2438 }
2439 
2440 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2441    LHS, or for a call argument.  */
2442 
2443 bool
2444 is_gimple_mem_rhs (tree t)
2445 {
2446   /* If we're dealing with a renamable type, either source or dest must be
2447      a renamed variable.  */
2448   if (is_gimple_reg_type (TREE_TYPE (t)))
2449     return is_gimple_val (t);
2450   else
2451     return is_gimple_val (t) || is_gimple_lvalue (t);
2452 }
2453 
2454 /*  Return true if T is a valid LHS for a GIMPLE assignment expression.  */
2455 
2456 bool
2457 is_gimple_lvalue (tree t)
2458 {
2459   return (is_gimple_addressable (t)
2460 	  || TREE_CODE (t) == WITH_SIZE_EXPR
2461 	  /* These are complex lvalues, but don't have addresses, so they
2462 	     go here.  */
2463 	  || TREE_CODE (t) == BIT_FIELD_REF);
2464 }
2465 
2466 /*  Return true if T is a GIMPLE condition.  */
2467 
2468 bool
2469 is_gimple_condexpr (tree t)
2470 {
2471   return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2472 				&& !tree_could_trap_p (t)
2473 				&& is_gimple_val (TREE_OPERAND (t, 0))
2474 				&& is_gimple_val (TREE_OPERAND (t, 1))));
2475 }
2476 
2477 /*  Return true if T is something whose address can be taken.  */
2478 
2479 bool
2480 is_gimple_addressable (tree t)
2481 {
2482   return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t));
2483 }
2484 
2485 /* Return true if T is a valid gimple constant.  */
2486 
2487 bool
2488 is_gimple_constant (const_tree t)
2489 {
2490   switch (TREE_CODE (t))
2491     {
2492     case INTEGER_CST:
2493     case REAL_CST:
2494     case FIXED_CST:
2495     case STRING_CST:
2496     case COMPLEX_CST:
2497     case VECTOR_CST:
2498       return true;
2499 
2500     /* Vector constant constructors are gimple invariant.  */
2501     case CONSTRUCTOR:
2502       if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2503 	return TREE_CONSTANT (t);
2504       else
2505 	return false;
2506 
2507     default:
2508       return false;
2509     }
2510 }
2511 
2512 /* Return true if T is a gimple address.  */
2513 
2514 bool
2515 is_gimple_address (const_tree t)
2516 {
2517   tree op;
2518 
2519   if (TREE_CODE (t) != ADDR_EXPR)
2520     return false;
2521 
2522   op = TREE_OPERAND (t, 0);
2523   while (handled_component_p (op))
2524     {
2525       if ((TREE_CODE (op) == ARRAY_REF
2526 	   || TREE_CODE (op) == ARRAY_RANGE_REF)
2527 	  && !is_gimple_val (TREE_OPERAND (op, 1)))
2528 	    return false;
2529 
2530       op = TREE_OPERAND (op, 0);
2531     }
2532 
2533   if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
2534     return true;
2535 
2536   switch (TREE_CODE (op))
2537     {
2538     case PARM_DECL:
2539     case RESULT_DECL:
2540     case LABEL_DECL:
2541     case FUNCTION_DECL:
2542     case VAR_DECL:
2543     case CONST_DECL:
2544       return true;
2545 
2546     default:
2547       return false;
2548     }
2549 }
2550 
2551 /* Strip out all handled components that produce invariant
2552    offsets.  */
2553 
2554 static const_tree
2555 strip_invariant_refs (const_tree op)
2556 {
2557   while (handled_component_p (op))
2558     {
2559       switch (TREE_CODE (op))
2560 	{
2561 	case ARRAY_REF:
2562 	case ARRAY_RANGE_REF:
2563 	  if (!is_gimple_constant (TREE_OPERAND (op, 1))
2564 	      || TREE_OPERAND (op, 2) != NULL_TREE
2565 	      || TREE_OPERAND (op, 3) != NULL_TREE)
2566 	    return NULL;
2567 	  break;
2568 
2569 	case COMPONENT_REF:
2570 	  if (TREE_OPERAND (op, 2) != NULL_TREE)
2571 	    return NULL;
2572 	  break;
2573 
2574 	default:;
2575 	}
2576       op = TREE_OPERAND (op, 0);
2577     }
2578 
2579   return op;
2580 }
2581 
2582 /* Return true if T is a gimple invariant address.  */
2583 
2584 bool
2585 is_gimple_invariant_address (const_tree t)
2586 {
2587   const_tree op;
2588 
2589   if (TREE_CODE (t) != ADDR_EXPR)
2590     return false;
2591 
2592   op = strip_invariant_refs (TREE_OPERAND (t, 0));
2593 
2594   return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
2595 }
2596 
2597 /* Return true if T is a gimple invariant address at IPA level
2598    (so addresses of variables on stack are not allowed).  */
2599 
2600 bool
2601 is_gimple_ip_invariant_address (const_tree t)
2602 {
2603   const_tree op;
2604 
2605   if (TREE_CODE (t) != ADDR_EXPR)
2606     return false;
2607 
2608   op = strip_invariant_refs (TREE_OPERAND (t, 0));
2609 
2610   return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2611 }
2612 
2613 /* Return true if T is a GIMPLE minimal invariant.  It's a restricted
2614    form of function invariant.  */
2615 
2616 bool
2617 is_gimple_min_invariant (const_tree t)
2618 {
2619   if (TREE_CODE (t) == ADDR_EXPR)
2620     return is_gimple_invariant_address (t);
2621 
2622   return is_gimple_constant (t);
2623 }
2624 
2625 /* Return true if T is a GIMPLE interprocedural invariant.  It's a restricted
2626    form of gimple minimal invariant.  */
2627 
2628 bool
2629 is_gimple_ip_invariant (const_tree t)
2630 {
2631   if (TREE_CODE (t) == ADDR_EXPR)
2632     return is_gimple_ip_invariant_address (t);
2633 
2634   return is_gimple_constant (t);
2635 }
2636 
2637 /* Return true if T looks like a valid GIMPLE statement.  */
2638 
2639 bool
2640 is_gimple_stmt (tree t)
2641 {
2642   const enum tree_code code = TREE_CODE (t);
2643 
2644   switch (code)
2645     {
2646     case NOP_EXPR:
2647       /* The only valid NOP_EXPR is the empty statement.  */
2648       return IS_EMPTY_STMT (t);
2649 
2650     case BIND_EXPR:
2651     case COND_EXPR:
2652       /* These are only valid if they're void.  */
2653       return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2654 
2655     case SWITCH_EXPR:
2656     case GOTO_EXPR:
2657     case RETURN_EXPR:
2658     case LABEL_EXPR:
2659     case CASE_LABEL_EXPR:
2660     case TRY_CATCH_EXPR:
2661     case TRY_FINALLY_EXPR:
2662     case EH_FILTER_EXPR:
2663     case CATCH_EXPR:
2664     case ASM_EXPR:
2665     case STATEMENT_LIST:
2666     case OMP_PARALLEL:
2667     case OMP_FOR:
2668     case OMP_SECTIONS:
2669     case OMP_SECTION:
2670     case OMP_SINGLE:
2671     case OMP_MASTER:
2672     case OMP_ORDERED:
2673     case OMP_CRITICAL:
2674     case OMP_TASK:
2675       /* These are always void.  */
2676       return true;
2677 
2678     case CALL_EXPR:
2679     case MODIFY_EXPR:
2680     case PREDICT_EXPR:
2681       /* These are valid regardless of their type.  */
2682       return true;
2683 
2684     default:
2685       return false;
2686     }
2687 }
2688 
2689 /* Return true if T is a variable.  */
2690 
2691 bool
2692 is_gimple_variable (tree t)
2693 {
2694   return (TREE_CODE (t) == VAR_DECL
2695 	  || TREE_CODE (t) == PARM_DECL
2696 	  || TREE_CODE (t) == RESULT_DECL
2697 	  || TREE_CODE (t) == SSA_NAME);
2698 }
2699 
2700 /*  Return true if T is a GIMPLE identifier (something with an address).  */
2701 
2702 bool
2703 is_gimple_id (tree t)
2704 {
2705   return (is_gimple_variable (t)
2706 	  || TREE_CODE (t) == FUNCTION_DECL
2707 	  || TREE_CODE (t) == LABEL_DECL
2708 	  || TREE_CODE (t) == CONST_DECL
2709 	  /* Allow string constants, since they are addressable.  */
2710 	  || TREE_CODE (t) == STRING_CST);
2711 }
2712 
2713 /* Return true if TYPE is a suitable type for a scalar register variable.  */
2714 
2715 bool
2716 is_gimple_reg_type (tree type)
2717 {
2718   return !AGGREGATE_TYPE_P (type);
2719 }
2720 
2721 /* Return true if T is a non-aggregate register variable.  */
2722 
2723 bool
2724 is_gimple_reg (tree t)
2725 {
2726   if (TREE_CODE (t) == SSA_NAME)
2727     t = SSA_NAME_VAR (t);
2728 
2729   if (!is_gimple_variable (t))
2730     return false;
2731 
2732   if (!is_gimple_reg_type (TREE_TYPE (t)))
2733     return false;
2734 
2735   /* A volatile decl is not acceptable because we can't reuse it as
2736      needed.  We need to copy it into a temp first.  */
2737   if (TREE_THIS_VOLATILE (t))
2738     return false;
2739 
2740   /* We define "registers" as things that can be renamed as needed,
2741      which with our infrastructure does not apply to memory.  */
2742   if (needs_to_live_in_memory (t))
2743     return false;
2744 
2745   /* Hard register variables are an interesting case.  For those that
2746      are call-clobbered, we don't know where all the calls are, since
2747      we don't (want to) take into account which operations will turn
2748      into libcalls at the rtl level.  For those that are call-saved,
2749      we don't currently model the fact that calls may in fact change
2750      global hard registers, nor do we examine ASM_CLOBBERS at the tree
2751      level, and so miss variable changes that might imply.  All around,
2752      it seems safest to not do too much optimization with these at the
2753      tree level at all.  We'll have to rely on the rtl optimizers to
2754      clean this up, as there we've got all the appropriate bits exposed.  */
2755   if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2756     return false;
2757 
2758   /* Complex and vector values must have been put into SSA-like form.
2759      That is, no assignments to the individual components.  */
2760   if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2761       || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2762     return DECL_GIMPLE_REG_P (t);
2763 
2764   return true;
2765 }
2766 
2767 
2768 /* Return true if T is a GIMPLE variable whose address is not needed.  */
2769 
2770 bool
2771 is_gimple_non_addressable (tree t)
2772 {
2773   if (TREE_CODE (t) == SSA_NAME)
2774     t = SSA_NAME_VAR (t);
2775 
2776   return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2777 }
2778 
2779 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant.  */
2780 
2781 bool
2782 is_gimple_val (tree t)
2783 {
2784   /* Make loads from volatiles and memory vars explicit.  */
2785   if (is_gimple_variable (t)
2786       && is_gimple_reg_type (TREE_TYPE (t))
2787       && !is_gimple_reg (t))
2788     return false;
2789 
2790   return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2791 }
2792 
2793 /* Similarly, but accept hard registers as inputs to asm statements.  */
2794 
2795 bool
2796 is_gimple_asm_val (tree t)
2797 {
2798   if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2799     return true;
2800 
2801   return is_gimple_val (t);
2802 }
2803 
2804 /* Return true if T is a GIMPLE minimal lvalue.  */
2805 
2806 bool
2807 is_gimple_min_lval (tree t)
2808 {
2809   if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2810     return false;
2811   return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
2812 }
2813 
2814 /* Return true if T is a typecast operation.  */
2815 
2816 bool
2817 is_gimple_cast (tree t)
2818 {
2819   return (CONVERT_EXPR_P (t)
2820           || TREE_CODE (t) == FIX_TRUNC_EXPR);
2821 }
2822 
2823 /* Return true if T is a valid function operand of a CALL_EXPR.  */
2824 
2825 bool
2826 is_gimple_call_addr (tree t)
2827 {
2828   return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2829 }
2830 
2831 /* If T makes a function call, return the corresponding CALL_EXPR operand.
2832    Otherwise, return NULL_TREE.  */
2833 
2834 tree
2835 get_call_expr_in (tree t)
2836 {
2837   if (TREE_CODE (t) == MODIFY_EXPR)
2838     t = TREE_OPERAND (t, 1);
2839   if (TREE_CODE (t) == WITH_SIZE_EXPR)
2840     t = TREE_OPERAND (t, 0);
2841   if (TREE_CODE (t) == CALL_EXPR)
2842     return t;
2843   return NULL_TREE;
2844 }
2845 
2846 
2847 /* Given a memory reference expression T, return its base address.
2848    The base address of a memory reference expression is the main
2849    object being referenced.  For instance, the base address for
2850    'array[i].fld[j]' is 'array'.  You can think of this as stripping
2851    away the offset part from a memory address.
2852 
2853    This function calls handled_component_p to strip away all the inner
2854    parts of the memory reference until it reaches the base object.  */
2855 
2856 tree
2857 get_base_address (tree t)
2858 {
2859   while (handled_component_p (t))
2860     t = TREE_OPERAND (t, 0);
2861 
2862   if (SSA_VAR_P (t)
2863       || TREE_CODE (t) == STRING_CST
2864       || TREE_CODE (t) == CONSTRUCTOR
2865       || INDIRECT_REF_P (t))
2866     return t;
2867   else
2868     return NULL_TREE;
2869 }
2870 
2871 void
2872 recalculate_side_effects (tree t)
2873 {
2874   enum tree_code code = TREE_CODE (t);
2875   int len = TREE_OPERAND_LENGTH (t);
2876   int i;
2877 
2878   switch (TREE_CODE_CLASS (code))
2879     {
2880     case tcc_expression:
2881       switch (code)
2882 	{
2883 	case INIT_EXPR:
2884 	case MODIFY_EXPR:
2885 	case VA_ARG_EXPR:
2886 	case PREDECREMENT_EXPR:
2887 	case PREINCREMENT_EXPR:
2888 	case POSTDECREMENT_EXPR:
2889 	case POSTINCREMENT_EXPR:
2890 	  /* All of these have side-effects, no matter what their
2891 	     operands are.  */
2892 	  return;
2893 
2894 	default:
2895 	  break;
2896 	}
2897       /* Fall through.  */
2898 
2899     case tcc_comparison:  /* a comparison expression */
2900     case tcc_unary:       /* a unary arithmetic expression */
2901     case tcc_binary:      /* a binary arithmetic expression */
2902     case tcc_reference:   /* a reference */
2903     case tcc_vl_exp:        /* a function call */
2904       TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2905       for (i = 0; i < len; ++i)
2906 	{
2907 	  tree op = TREE_OPERAND (t, i);
2908 	  if (op && TREE_SIDE_EFFECTS (op))
2909 	    TREE_SIDE_EFFECTS (t) = 1;
2910 	}
2911       break;
2912 
2913     case tcc_constant:
2914       /* No side-effects.  */
2915       return;
2916 
2917     default:
2918       gcc_unreachable ();
2919    }
2920 }
2921 
2922 /* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
2923    a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2924    we failed to create one.  */
2925 
2926 tree
2927 canonicalize_cond_expr_cond (tree t)
2928 {
2929   /* Strip conversions around boolean operations.  */
2930   if (CONVERT_EXPR_P (t)
2931       && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
2932     t = TREE_OPERAND (t, 0);
2933 
2934   /* For (bool)x use x != 0.  */
2935   if (CONVERT_EXPR_P (t)
2936       && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
2937     {
2938       tree top0 = TREE_OPERAND (t, 0);
2939       t = build2 (NE_EXPR, TREE_TYPE (t),
2940 		  top0, build_int_cst (TREE_TYPE (top0), 0));
2941     }
2942   /* For !x use x == 0.  */
2943   else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2944     {
2945       tree top0 = TREE_OPERAND (t, 0);
2946       t = build2 (EQ_EXPR, TREE_TYPE (t),
2947 		  top0, build_int_cst (TREE_TYPE (top0), 0));
2948     }
2949   /* For cmp ? 1 : 0 use cmp.  */
2950   else if (TREE_CODE (t) == COND_EXPR
2951 	   && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2952 	   && integer_onep (TREE_OPERAND (t, 1))
2953 	   && integer_zerop (TREE_OPERAND (t, 2)))
2954     {
2955       tree top0 = TREE_OPERAND (t, 0);
2956       t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2957 		  TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2958     }
2959 
2960   if (is_gimple_condexpr (t))
2961     return t;
2962 
2963   return NULL_TREE;
2964 }
2965 
2966 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2967    the positions marked by the set ARGS_TO_SKIP.  */
2968 
2969 gimple
2970 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
2971 {
2972   int i;
2973   tree fn = gimple_call_fn (stmt);
2974   int nargs = gimple_call_num_args (stmt);
2975   VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
2976   gimple new_stmt;
2977 
2978   for (i = 0; i < nargs; i++)
2979     if (!bitmap_bit_p (args_to_skip, i))
2980       VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
2981 
2982   new_stmt = gimple_build_call_vec (fn, vargs);
2983   VEC_free (tree, heap, vargs);
2984   if (gimple_call_lhs (stmt))
2985     gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2986 
2987   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2988   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2989 
2990   gimple_set_block (new_stmt, gimple_block (stmt));
2991   if (gimple_has_location (stmt))
2992     gimple_set_location (new_stmt, gimple_location (stmt));
2993   gimple_call_copy_flags (new_stmt, stmt);
2994   gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2995 
2996   gimple_set_modified (new_stmt, true);
2997 
2998   return new_stmt;
2999 }
3000 
3001 
3002 static hashval_t gimple_type_hash (const void *);
3003 
3004 /* Structure used to maintain a cache of some type pairs compared by
3005    gimple_types_compatible_p when comparing aggregate types.  There are
3006    four possible values for SAME_P:
3007 
3008    	-2: The pair (T1, T2) has just been inserted in the table.
3009 	-1: The pair (T1, T2) is currently being compared.
3010 	 0: T1 and T2 are different types.
3011 	 1: T1 and T2 are the same type.
3012 
3013    This table is only used when comparing aggregate types to avoid
3014    infinite recursion due to self-referential types.  */
3015 struct type_pair_d
3016 {
3017   unsigned int uid1;
3018   unsigned int uid2;
3019   int same_p;
3020 };
3021 typedef struct type_pair_d *type_pair_t;
3022 
3023 /* Return a hash value for the type pair pointed-to by P.  */
3024 
3025 static hashval_t
3026 type_pair_hash (const void *p)
3027 {
3028   const struct type_pair_d *pair = (const struct type_pair_d *) p;
3029   hashval_t val1 = pair->uid1;
3030   hashval_t val2 = pair->uid2;
3031   return (iterative_hash_hashval_t (val2, val1)
3032 	  ^ iterative_hash_hashval_t (val1, val2));
3033 }
3034 
3035 /* Compare two type pairs pointed-to by P1 and P2.  */
3036 
3037 static int
3038 type_pair_eq (const void *p1, const void *p2)
3039 {
3040   const struct type_pair_d *pair1 = (const struct type_pair_d *) p1;
3041   const struct type_pair_d *pair2 = (const struct type_pair_d *) p2;
3042   return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2)
3043 	  || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1));
3044 }
3045 
3046 /* Lookup the pair of types T1 and T2 in *VISITED_P.  Insert a new
3047    entry if none existed.  */
3048 
3049 static type_pair_t
3050 lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p)
3051 {
3052   struct type_pair_d pair;
3053   type_pair_t p;
3054   void **slot;
3055 
3056   if (*visited_p == NULL)
3057     {
3058       *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL);
3059       gcc_obstack_init (ob_p);
3060     }
3061 
3062   pair.uid1 = TYPE_UID (t1);
3063   pair.uid2 = TYPE_UID (t2);
3064   slot = htab_find_slot (*visited_p, &pair, INSERT);
3065 
3066   if (*slot)
3067     p = *((type_pair_t *) slot);
3068   else
3069     {
3070       p = XOBNEW (ob_p, struct type_pair_d);
3071       p->uid1 = TYPE_UID (t1);
3072       p->uid2 = TYPE_UID (t2);
3073       p->same_p = -2;
3074       *slot = (void *) p;
3075     }
3076 
3077   return p;
3078 }
3079 
3080 
3081 /* Return true if T1 and T2 have the same name.  If FOR_COMPLETION_P is
3082    true then if any type has no name return false, otherwise return
3083    true if both types have no names.  */
3084 
3085 static bool
3086 compare_type_names_p (tree t1, tree t2, bool for_completion_p)
3087 {
3088   tree name1 = TYPE_NAME (t1);
3089   tree name2 = TYPE_NAME (t2);
3090 
3091   /* Consider anonymous types all unique for completion.  */
3092   if (for_completion_p
3093       && (!name1 || !name2))
3094     return false;
3095 
3096   if (name1 && TREE_CODE (name1) == TYPE_DECL)
3097     {
3098       name1 = DECL_NAME (name1);
3099       if (for_completion_p
3100 	  && !name1)
3101 	return false;
3102     }
3103   gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3104 
3105   if (name2 && TREE_CODE (name2) == TYPE_DECL)
3106     {
3107       name2 = DECL_NAME (name2);
3108       if (for_completion_p
3109 	  && !name2)
3110 	return false;
3111     }
3112   gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3113 
3114   /* Identifiers can be compared with pointer equality rather
3115      than a string comparison.  */
3116   if (name1 == name2)
3117     return true;
3118 
3119   return false;
3120 }
3121 
3122 /* Return true if the field decls F1 and F2 are at the same offset.  */
3123 
3124 bool
3125 compare_field_offset (tree f1, tree f2)
3126 {
3127   if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3128     return (operand_equal_p (DECL_FIELD_OFFSET (f1),
3129 			     DECL_FIELD_OFFSET (f2), 0)
3130 	    && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3131 				   DECL_FIELD_BIT_OFFSET (f2)));
3132 
3133   /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3134      should be, so handle differing ones specially by decomposing
3135      the offset into a byte and bit offset manually.  */
3136   if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3137       && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3138     {
3139       unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3140       unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3141       bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3142       byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3143 		      + bit_offset1 / BITS_PER_UNIT);
3144       bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3145       byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3146 		      + bit_offset2 / BITS_PER_UNIT);
3147       if (byte_offset1 != byte_offset2)
3148 	return false;
3149       return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3150     }
3151 
3152   return false;
3153 }
3154 
3155 /* Return 1 iff T1 and T2 are structurally identical.
3156    Otherwise, return 0.  */
3157 
3158 static int
3159 gimple_types_compatible_p (tree t1, tree t2)
3160 {
3161   type_pair_t p = NULL;
3162 
3163   /* Check first for the obvious case of pointer identity.  */
3164   if (t1 == t2)
3165     return 1;
3166 
3167   /* Check that we have two types to compare.  */
3168   if (t1 == NULL_TREE || t2 == NULL_TREE)
3169     return 0;
3170 
3171   /* Can't be the same type if the types don't have the same code.  */
3172   if (TREE_CODE (t1) != TREE_CODE (t2))
3173     return 0;
3174 
3175   /* Can't be the same type if they have different CV qualifiers.  */
3176   if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3177     return 0;
3178 
3179   /* Void types are always the same.  */
3180   if (TREE_CODE (t1) == VOID_TYPE)
3181     return 1;
3182 
3183   /* For numerical types do some simple checks before doing three
3184      hashtable queries.  */
3185   if (INTEGRAL_TYPE_P (t1)
3186       || SCALAR_FLOAT_TYPE_P (t1)
3187       || FIXED_POINT_TYPE_P (t1)
3188       || TREE_CODE (t1) == VECTOR_TYPE
3189       || TREE_CODE (t1) == COMPLEX_TYPE
3190       || TREE_CODE (t1) == OFFSET_TYPE)
3191     {
3192       /* Can't be the same type if they have different alignment,
3193 	 sign, precision or mode.  */
3194       if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3195 	  || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3196 	  || TYPE_MODE (t1) != TYPE_MODE (t2)
3197 	  || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3198 	return 0;
3199 
3200       if (TREE_CODE (t1) == INTEGER_TYPE
3201 	  && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3202 	      || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3203 	return 0;
3204 
3205       /* That's all we need to check for float and fixed-point types.  */
3206       if (SCALAR_FLOAT_TYPE_P (t1)
3207 	  || FIXED_POINT_TYPE_P (t1))
3208 	return 1;
3209 
3210       /* Perform cheap tail-recursion for vector and complex types.  */
3211       if (TREE_CODE (t1) == VECTOR_TYPE
3212 	  || TREE_CODE (t1) == COMPLEX_TYPE)
3213 	return gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2));
3214 
3215       /* For integral types fall thru to more complex checks.  */
3216     }
3217 
3218   /* If the hash values of t1 and t2 are different the types can't
3219      possibly be the same.  This helps keeping the type-pair hashtable
3220      small, only tracking comparisons for hash collisions.  */
3221   if (gimple_type_hash (t1) != gimple_type_hash (t2))
3222     return 0;
3223 
3224   /* If we've visited this type pair before (in the case of aggregates
3225      with self-referential types), and we made a decision, return it.  */
3226   p = lookup_type_pair (t1, t2, &gtc_visited, &gtc_ob);
3227   if (p->same_p == 0 || p->same_p == 1)
3228     {
3229       /* We have already decided whether T1 and T2 are the
3230 	 same, return the cached result.  */
3231       return p->same_p == 1;
3232     }
3233   else if (p->same_p == -1)
3234     {
3235       /* We are currently comparing this pair of types, assume
3236 	 that they are the same and let the caller decide.  */
3237       return 1;
3238     }
3239 
3240   gcc_assert (p->same_p == -2);
3241 
3242   /* Mark the (T1, T2) comparison in progress.  */
3243   p->same_p = -1;
3244 
3245   /* If their attributes are not the same they can't be the same type.  */
3246   if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3247     goto different_types;
3248 
3249   /* Do type-specific comparisons.  */
3250   switch (TREE_CODE (t1))
3251     {
3252     case ARRAY_TYPE:
3253       /* Array types are the same if the element types are the same and
3254 	 the number of elements are the same.  */
3255       if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
3256 	  || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3257 	  || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3258 	goto different_types;
3259       else
3260 	{
3261 	  tree i1 = TYPE_DOMAIN (t1);
3262 	  tree i2 = TYPE_DOMAIN (t2);
3263 
3264 	  /* For an incomplete external array, the type domain can be
3265  	     NULL_TREE.  Check this condition also.  */
3266 	  if (i1 == NULL_TREE && i2 == NULL_TREE)
3267 	    goto same_types;
3268 	  else if (i1 == NULL_TREE || i2 == NULL_TREE)
3269 	    goto different_types;
3270 	  /* If for a complete array type the possibly gimplified sizes
3271 	     are different the types are different.  */
3272 	  else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3273 		   || (TYPE_SIZE (i1)
3274 		       && TYPE_SIZE (i2)
3275 		       && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3276 	    goto different_types;
3277 	  else
3278 	    {
3279 	      tree min1 = TYPE_MIN_VALUE (i1);
3280 	      tree min2 = TYPE_MIN_VALUE (i2);
3281 	      tree max1 = TYPE_MAX_VALUE (i1);
3282 	      tree max2 = TYPE_MAX_VALUE (i2);
3283 
3284 	      /* The minimum/maximum values have to be the same.  */
3285 	      if ((min1 == min2
3286 		   || (min1 && min2 && operand_equal_p (min1, min2, 0)))
3287 		  && (max1 == max2
3288 		      || (max1 && max2 && operand_equal_p (max1, max2, 0))))
3289 		goto same_types;
3290 	      else
3291 		goto different_types;
3292 	    }
3293 	}
3294 
3295     case METHOD_TYPE:
3296       /* Method types should belong to the same class.  */
3297       if (!gimple_types_compatible_p (TYPE_METHOD_BASETYPE (t1),
3298 				 TYPE_METHOD_BASETYPE (t2)))
3299 	goto different_types;
3300 
3301       /* Fallthru  */
3302 
3303     case FUNCTION_TYPE:
3304       /* Function types are the same if the return type and arguments types
3305 	 are the same.  */
3306       if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3307 	goto different_types;
3308       else
3309 	{
3310 	  if (!targetm.comp_type_attributes (t1, t2))
3311 	    goto different_types;
3312 
3313 	  if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3314 	    goto same_types;
3315 	  else
3316 	    {
3317 	      tree parms1, parms2;
3318 
3319 	      for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3320 		   parms1 && parms2;
3321 		   parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3322 		{
3323 		  if (!gimple_types_compatible_p (TREE_VALUE (parms1),
3324 					     TREE_VALUE (parms2)))
3325 		    goto different_types;
3326 		}
3327 
3328 	      if (parms1 || parms2)
3329 		goto different_types;
3330 
3331 	      goto same_types;
3332 	    }
3333 	}
3334 
3335     case OFFSET_TYPE:
3336       {
3337 	if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
3338 	    || !gimple_types_compatible_p (TYPE_OFFSET_BASETYPE (t1),
3339 					   TYPE_OFFSET_BASETYPE (t2)))
3340 	  goto different_types;
3341 
3342 	goto same_types;
3343       }
3344 
3345     case POINTER_TYPE:
3346     case REFERENCE_TYPE:
3347       {
3348 	/* If the two pointers have different ref-all attributes,
3349 	   they can't be the same type.  */
3350 	if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3351 	  goto different_types;
3352 
3353 	/* If one pointer points to an incomplete type variant of
3354 	   the other pointed-to type they are the same.  */
3355 	if (TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3356 	    && RECORD_OR_UNION_TYPE_P (TREE_TYPE (t1))
3357 	    && (!COMPLETE_TYPE_P (TREE_TYPE (t1))
3358 		|| !COMPLETE_TYPE_P (TREE_TYPE (t2)))
3359 	    && TYPE_QUALS (TREE_TYPE (t1)) == TYPE_QUALS (TREE_TYPE (t2))
3360 	    && compare_type_names_p (TYPE_MAIN_VARIANT (TREE_TYPE (t1)),
3361 				     TYPE_MAIN_VARIANT (TREE_TYPE (t2)), true))
3362 	  {
3363 	    /* Replace the pointed-to incomplete type with the
3364 	       complete one.
3365 	       ???  This simple name-based merging causes at least some
3366 	       of the ICEs in canonicalizing FIELD_DECLs during stmt
3367 	       read.  For example in GCC we have two different struct deps
3368 	       and we mismatch the use in struct cpp_reader in sched-int.h
3369 	       vs. mkdeps.c.  Of course the whole exercise is for TBAA
3370 	       with structs which contain pointers to incomplete types
3371 	       in one unit and to complete ones in another.  So we
3372 	       probably should merge these types only with more context.  */
3373 	    if (COMPLETE_TYPE_P (TREE_TYPE (t2)))
3374 	      TREE_TYPE (t1) = TREE_TYPE (t2);
3375 	    else
3376 	      TREE_TYPE (t2) = TREE_TYPE (t1);
3377 	    goto same_types;
3378 	  }
3379 
3380 	/* Otherwise, pointer and reference types are the same if the
3381 	   pointed-to types are the same.  */
3382 	if (gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3383 	  goto same_types;
3384 
3385 	goto different_types;
3386       }
3387 
3388     case INTEGER_TYPE:
3389     case BOOLEAN_TYPE:
3390       {
3391 	tree min1 = TYPE_MIN_VALUE (t1);
3392 	tree max1 = TYPE_MAX_VALUE (t1);
3393 	tree min2 = TYPE_MIN_VALUE (t2);
3394 	tree max2 = TYPE_MAX_VALUE (t2);
3395 	bool min_equal_p = false;
3396 	bool max_equal_p = false;
3397 
3398 	/* If either type has a minimum value, the other type must
3399 	   have the same.  */
3400 	if (min1 == NULL_TREE && min2 == NULL_TREE)
3401 	  min_equal_p = true;
3402 	else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3403 	  min_equal_p = true;
3404 
3405 	/* Likewise, if either type has a maximum value, the other
3406 	   type must have the same.  */
3407 	if (max1 == NULL_TREE && max2 == NULL_TREE)
3408 	  max_equal_p = true;
3409 	else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3410 	  max_equal_p = true;
3411 
3412 	if (!min_equal_p || !max_equal_p)
3413 	  goto different_types;
3414 
3415 	goto same_types;
3416       }
3417 
3418     case ENUMERAL_TYPE:
3419       {
3420 	/* FIXME lto, we cannot check bounds on enumeral types because
3421 	   different front ends will produce different values.
3422 	   In C, enumeral types are integers, while in C++ each element
3423 	   will have its own symbolic value.  We should decide how enums
3424 	   are to be represented in GIMPLE and have each front end lower
3425 	   to that.  */
3426 	tree v1, v2;
3427 
3428 	/* For enumeral types, all the values must be the same.  */
3429 	if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3430 	  goto same_types;
3431 
3432 	for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3433 	     v1 && v2;
3434 	     v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3435 	  {
3436 	    tree c1 = TREE_VALUE (v1);
3437 	    tree c2 = TREE_VALUE (v2);
3438 
3439 	    if (TREE_CODE (c1) == CONST_DECL)
3440 	      c1 = DECL_INITIAL (c1);
3441 
3442 	    if (TREE_CODE (c2) == CONST_DECL)
3443 	      c2 = DECL_INITIAL (c2);
3444 
3445 	    if (tree_int_cst_equal (c1, c2) != 1)
3446 	      goto different_types;
3447 	  }
3448 
3449 	/* If one enumeration has more values than the other, they
3450 	   are not the same.  */
3451 	if (v1 || v2)
3452 	  goto different_types;
3453 
3454 	goto same_types;
3455       }
3456 
3457     case RECORD_TYPE:
3458     case UNION_TYPE:
3459     case QUAL_UNION_TYPE:
3460       {
3461 	tree f1, f2;
3462 
3463 	/* If one type requires structural equality checks and the
3464 	   other doesn't, do not merge the types.  */
3465 	if (TYPE_STRUCTURAL_EQUALITY_P (t1)
3466 	    != TYPE_STRUCTURAL_EQUALITY_P (t2))
3467 	  goto different_types;
3468 
3469 	/* The struct tags shall compare equal.  */
3470 	if (!compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3471 				   TYPE_MAIN_VARIANT (t2), false))
3472 	  goto different_types;
3473 
3474 	/* For aggregate types, all the fields must be the same.  */
3475 	for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3476 	     f1 && f2;
3477 	     f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3478 	  {
3479 	    /* The fields must have the same name, offset and type.  */
3480 	    if (DECL_NAME (f1) != DECL_NAME (f2)
3481 		|| DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3482 		|| !compare_field_offset (f1, f2)
3483 		|| !gimple_types_compatible_p (TREE_TYPE (f1),
3484 					       TREE_TYPE (f2)))
3485 	      goto different_types;
3486 	  }
3487 
3488 	/* If one aggregate has more fields than the other, they
3489 	   are not the same.  */
3490 	if (f1 || f2)
3491 	  goto different_types;
3492 
3493 	goto same_types;
3494       }
3495 
3496     default:
3497       gcc_unreachable ();
3498     }
3499 
3500   /* Common exit path for types that are not compatible.  */
3501 different_types:
3502   p->same_p = 0;
3503   return 0;
3504 
3505   /* Common exit path for types that are compatible.  */
3506 same_types:
3507   p->same_p = 1;
3508   return 1;
3509 }
3510 
3511 
3512 
3513 
3514 /* Per pointer state for the SCC finding.  The on_sccstack flag
3515    is not strictly required, it is true when there is no hash value
3516    recorded for the type and false otherwise.  But querying that
3517    is slower.  */
3518 
3519 struct sccs
3520 {
3521   unsigned int dfsnum;
3522   unsigned int low;
3523   bool on_sccstack;
3524   hashval_t hash;
3525 };
3526 
3527 static unsigned int next_dfs_num;
3528 
3529 static hashval_t
3530 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3531 			    struct pointer_map_t *, struct obstack *);
3532 
3533 /* DFS visit the edge from the callers type with state *STATE to T.
3534    Update the callers type hash V with the hash for T if it is not part
3535    of the SCC containing the callers type and return it.
3536    SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.  */
3537 
3538 static hashval_t
3539 visit (tree t, struct sccs *state, hashval_t v,
3540        VEC (tree, heap) **sccstack,
3541        struct pointer_map_t *sccstate,
3542        struct obstack *sccstate_obstack)
3543 {
3544   struct sccs *cstate = NULL;
3545   void **slot;
3546 
3547   /* If there is a hash value recorded for this type then it can't
3548      possibly be part of our parent SCC.  Simply mix in its hash.  */
3549   if ((slot = pointer_map_contains (type_hash_cache, t)))
3550     return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, v);
3551 
3552   if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3553     cstate = (struct sccs *)*slot;
3554   if (!cstate)
3555     {
3556       hashval_t tem;
3557       /* Not yet visited.  DFS recurse.  */
3558       tem = iterative_hash_gimple_type (t, v,
3559 					sccstack, sccstate, sccstate_obstack);
3560       if (!cstate)
3561 	cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3562       state->low = MIN (state->low, cstate->low);
3563       /* If the type is no longer on the SCC stack and thus is not part
3564          of the parents SCC mix in its hash value.  Otherwise we will
3565 	 ignore the type for hashing purposes and return the unaltered
3566 	 hash value.  */
3567       if (!cstate->on_sccstack)
3568 	return tem;
3569     }
3570   if (cstate->dfsnum < state->dfsnum
3571       && cstate->on_sccstack)
3572     state->low = MIN (cstate->dfsnum, state->low);
3573 
3574   /* We are part of our parents SCC, skip this type during hashing
3575      and return the unaltered hash value.  */
3576   return v;
3577 }
3578 
3579 /* Hash NAME with the previous hash value V and return it.  */
3580 
3581 static hashval_t
3582 iterative_hash_name (tree name, hashval_t v)
3583 {
3584   if (!name)
3585     return v;
3586   if (TREE_CODE (name) == TYPE_DECL)
3587     name = DECL_NAME (name);
3588   if (!name)
3589     return v;
3590   gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
3591   return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
3592 }
3593 
3594 /* Returning a hash value for gimple type TYPE combined with VAL.
3595    SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
3596 
3597    To hash a type we end up hashing in types that are reachable.
3598    Through pointers we can end up with cycles which messes up the
3599    required property that we need to compute the same hash value
3600    for structurally equivalent types.  To avoid this we have to
3601    hash all types in a cycle (the SCC) in a commutative way.  The
3602    easiest way is to not mix in the hashes of the SCC members at
3603    all.  To make this work we have to delay setting the hash
3604    values of the SCC until it is complete.  */
3605 
3606 static hashval_t
3607 iterative_hash_gimple_type (tree type, hashval_t val,
3608 			    VEC(tree, heap) **sccstack,
3609 			    struct pointer_map_t *sccstate,
3610 			    struct obstack *sccstate_obstack)
3611 {
3612   hashval_t v;
3613   void **slot;
3614   struct sccs *state;
3615 
3616 #ifdef ENABLE_CHECKING
3617   /* Not visited during this DFS walk nor during previous walks.  */
3618   gcc_assert (!pointer_map_contains (type_hash_cache, type)
3619 	      && !pointer_map_contains (sccstate, type));
3620 #endif
3621   state = XOBNEW (sccstate_obstack, struct sccs);
3622   *pointer_map_insert (sccstate, type) = state;
3623 
3624   VEC_safe_push (tree, heap, *sccstack, type);
3625   state->dfsnum = next_dfs_num++;
3626   state->low = state->dfsnum;
3627   state->on_sccstack = true;
3628 
3629   /* Combine a few common features of types so that types are grouped into
3630      smaller sets; when searching for existing matching types to merge,
3631      only existing types having the same features as the new type will be
3632      checked.  */
3633   v = iterative_hash_hashval_t (TREE_CODE (type), 0);
3634   v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
3635   v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
3636 
3637   /* Do not hash the types size as this will cause differences in
3638      hash values for the complete vs. the incomplete type variant.  */
3639 
3640   /* Incorporate common features of numerical types.  */
3641   if (INTEGRAL_TYPE_P (type)
3642       || SCALAR_FLOAT_TYPE_P (type)
3643       || FIXED_POINT_TYPE_P (type))
3644     {
3645       v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
3646       v = iterative_hash_hashval_t (TYPE_MODE (type), v);
3647       v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3648     }
3649 
3650   /* For pointer and reference types, fold in information about the type
3651      pointed to but do not recurse into possibly incomplete types to
3652      avoid hash differences for complete vs. incomplete types.  */
3653   if (POINTER_TYPE_P (type))
3654     {
3655       if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
3656 	{
3657 	  v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
3658 	  v = iterative_hash_name
3659 	      (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
3660 	}
3661       else
3662 	v = visit (TREE_TYPE (type), state, v,
3663 		   sccstack, sccstate, sccstate_obstack);
3664     }
3665 
3666   /* For integer types hash the types min/max values and the string flag.  */
3667   if (TREE_CODE (type) == INTEGER_TYPE)
3668     {
3669       /* OMP lowering can introduce error_mark_node in place of
3670 	 random local decls in types.  */
3671       if (TYPE_MIN_VALUE (type) != error_mark_node)
3672 	v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
3673       if (TYPE_MAX_VALUE (type) != error_mark_node)
3674 	v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
3675       v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3676     }
3677 
3678   /* For array types hash their domain and the string flag.  */
3679   if (TREE_CODE (type) == ARRAY_TYPE
3680       && TYPE_DOMAIN (type))
3681     {
3682       v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3683       v = visit (TYPE_DOMAIN (type), state, v,
3684 		 sccstack, sccstate, sccstate_obstack);
3685     }
3686 
3687   /* Recurse for aggregates with a single element type.  */
3688   if (TREE_CODE (type) == ARRAY_TYPE
3689       || TREE_CODE (type) == COMPLEX_TYPE
3690       || TREE_CODE (type) == VECTOR_TYPE)
3691     v = visit (TREE_TYPE (type), state, v,
3692 	       sccstack, sccstate, sccstate_obstack);
3693 
3694   /* Incorporate function return and argument types.  */
3695   if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
3696     {
3697       unsigned na;
3698       tree p;
3699 
3700       /* For method types also incorporate their parent class.  */
3701       if (TREE_CODE (type) == METHOD_TYPE)
3702 	v = visit (TYPE_METHOD_BASETYPE (type), state, v,
3703 		   sccstack, sccstate, sccstate_obstack);
3704 
3705       v = visit (TREE_TYPE (type), state, v,
3706 		 sccstack, sccstate, sccstate_obstack);
3707 
3708       for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
3709 	{
3710 	  v = visit (TREE_VALUE (p), state, v,
3711 		     sccstack, sccstate, sccstate_obstack);
3712 	  na++;
3713 	}
3714 
3715       v = iterative_hash_hashval_t (na, v);
3716     }
3717 
3718   if (TREE_CODE (type) == RECORD_TYPE
3719       || TREE_CODE (type) == UNION_TYPE
3720       || TREE_CODE (type) == QUAL_UNION_TYPE)
3721     {
3722       unsigned nf;
3723       tree f;
3724 
3725       v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
3726 
3727       for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
3728 	{
3729 	  v = iterative_hash_name (DECL_NAME (f), v);
3730 	  v = visit (TREE_TYPE (f), state, v,
3731 		     sccstack, sccstate, sccstate_obstack);
3732 	  nf++;
3733 	}
3734 
3735       v = iterative_hash_hashval_t (nf, v);
3736     }
3737 
3738   /* Record hash for us.  */
3739   state->hash = v;
3740 
3741   /* See if we found an SCC.  */
3742   if (state->low == state->dfsnum)
3743     {
3744       tree x;
3745 
3746       /* Pop off the SCC and set its hash values.  */
3747       do
3748 	{
3749 	  struct sccs *cstate;
3750 	  x = VEC_pop (tree, *sccstack);
3751 	  gcc_assert (!pointer_map_contains (type_hash_cache, x));
3752 	  cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3753 	  cstate->on_sccstack = false;
3754 	  slot = pointer_map_insert (type_hash_cache, x);
3755 	  *slot = (void *) (size_t) cstate->hash;
3756 	}
3757       while (x != type);
3758     }
3759 
3760   return iterative_hash_hashval_t (v, val);
3761 }
3762 
3763 
3764 /* Returns a hash value for P (assumed to be a type).  The hash value
3765    is computed using some distinguishing features of the type.  Note
3766    that we cannot use pointer hashing here as we may be dealing with
3767    two distinct instances of the same type.
3768 
3769    This function should produce the same hash value for two compatible
3770    types according to gimple_types_compatible_p.  */
3771 
3772 static hashval_t
3773 gimple_type_hash (const void *p)
3774 {
3775   const_tree t = (const_tree) p;
3776   VEC(tree, heap) *sccstack = NULL;
3777   struct pointer_map_t *sccstate;
3778   struct obstack sccstate_obstack;
3779   hashval_t val;
3780   void **slot;
3781 
3782   if (type_hash_cache == NULL)
3783     type_hash_cache = pointer_map_create ();
3784 
3785   if ((slot = pointer_map_contains (type_hash_cache, p)) != NULL)
3786     return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, 0);
3787 
3788   /* Perform a DFS walk and pre-hash all reachable types.  */
3789   next_dfs_num = 1;
3790   sccstate = pointer_map_create ();
3791   gcc_obstack_init (&sccstate_obstack);
3792   val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
3793 				    &sccstack, sccstate, &sccstate_obstack);
3794   VEC_free (tree, heap, sccstack);
3795   pointer_map_destroy (sccstate);
3796   obstack_free (&sccstate_obstack, NULL);
3797 
3798   return val;
3799 }
3800 
3801 
3802 /* Returns nonzero if P1 and P2 are equal.  */
3803 
3804 static int
3805 gimple_type_eq (const void *p1, const void *p2)
3806 {
3807   const_tree t1 = (const_tree) p1;
3808   const_tree t2 = (const_tree) p2;
3809   return gimple_types_compatible_p (CONST_CAST_TREE (t1), CONST_CAST_TREE (t2));
3810 }
3811 
3812 
3813 /* Register type T in the global type table gimple_types.
3814    If another type T', compatible with T, already existed in
3815    gimple_types then return T', otherwise return T.  This is used by
3816    LTO to merge identical types read from different TUs.  */
3817 
3818 tree
3819 gimple_register_type (tree t)
3820 {
3821   void **slot;
3822 
3823   gcc_assert (TYPE_P (t));
3824 
3825   /* Always register the main variant first.  This is important so we
3826      pick up the non-typedef variants as canonical, otherwise we'll end
3827      up taking typedef ids for structure tags during comparison.  */
3828   if (TYPE_MAIN_VARIANT (t) != t)
3829     gimple_register_type (TYPE_MAIN_VARIANT (t));
3830 
3831   if (gimple_types == NULL)
3832     gimple_types = htab_create (16381, gimple_type_hash, gimple_type_eq, 0);
3833 
3834   slot = htab_find_slot (gimple_types, t, INSERT);
3835   if (*slot
3836       && *(tree *)slot != t)
3837     {
3838       tree new_type = (tree) *((tree *) slot);
3839 
3840       /* Do not merge types with different addressability.  */
3841       gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
3842 
3843       /* If t is not its main variant then make t unreachable from its
3844 	 main variant list.  Otherwise we'd queue up a lot of duplicates
3845 	 there.  */
3846       if (t != TYPE_MAIN_VARIANT (t))
3847 	{
3848 	  tree tem = TYPE_MAIN_VARIANT (t);
3849 	  while (tem && TYPE_NEXT_VARIANT (tem) != t)
3850 	    tem = TYPE_NEXT_VARIANT (tem);
3851 	  if (tem)
3852 	    TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
3853 	  TYPE_NEXT_VARIANT (t) = NULL_TREE;
3854 	}
3855 
3856       /* If we are a pointer then remove us from the pointer-to or
3857 	 reference-to chain.  Otherwise we'd queue up a lot of duplicates
3858 	 there.  */
3859       if (TREE_CODE (t) == POINTER_TYPE)
3860 	{
3861 	  if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
3862 	    TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
3863 	  else
3864 	    {
3865 	      tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
3866 	      while (tem && TYPE_NEXT_PTR_TO (tem) != t)
3867 		tem = TYPE_NEXT_PTR_TO (tem);
3868 	      if (tem)
3869 		TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
3870 	    }
3871 	  TYPE_NEXT_PTR_TO (t) = NULL_TREE;
3872 	}
3873       else if (TREE_CODE (t) == REFERENCE_TYPE)
3874 	{
3875 	  if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
3876 	    TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
3877 	  else
3878 	    {
3879 	      tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
3880 	      while (tem && TYPE_NEXT_REF_TO (tem) != t)
3881 		tem = TYPE_NEXT_REF_TO (tem);
3882 	      if (tem)
3883 		TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
3884 	    }
3885 	  TYPE_NEXT_REF_TO (t) = NULL_TREE;
3886 	}
3887 
3888       t = new_type;
3889     }
3890   else
3891     *slot = (void *) t;
3892 
3893   return t;
3894 }
3895 
3896 
3897 /* Show statistics on references to the global type table gimple_types.  */
3898 
3899 void
3900 print_gimple_types_stats (void)
3901 {
3902   if (gimple_types)
3903     fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
3904 	     "%ld searches, %ld collisions (ratio: %f)\n",
3905 	     (long) htab_size (gimple_types),
3906 	     (long) htab_elements (gimple_types),
3907 	     (long) gimple_types->searches,
3908 	     (long) gimple_types->collisions,
3909 	     htab_collisions (gimple_types));
3910   else
3911     fprintf (stderr, "GIMPLE type table is empty\n");
3912   if (gtc_visited)
3913     fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
3914 	     "elements, %ld searches, %ld collisions (ratio: %f)\n",
3915 	     (long) htab_size (gtc_visited),
3916 	     (long) htab_elements (gtc_visited),
3917 	     (long) gtc_visited->searches,
3918 	     (long) gtc_visited->collisions,
3919 	     htab_collisions (gtc_visited));
3920   else
3921     fprintf (stderr, "GIMPLE type comparison table is empty\n");
3922 }
3923 
3924 /* Free the gimple type hashtables used for LTO type merging.  */
3925 
3926 void
3927 free_gimple_type_tables (void)
3928 {
3929   /* Last chance to print stats for the tables.  */
3930   if (flag_lto_report)
3931     print_gimple_types_stats ();
3932 
3933   if (gimple_types)
3934     {
3935       htab_delete (gimple_types);
3936       gimple_types = NULL;
3937     }
3938   if (type_hash_cache)
3939     {
3940       pointer_map_destroy (type_hash_cache);
3941       type_hash_cache = NULL;
3942     }
3943   if (gtc_visited)
3944     {
3945       htab_delete (gtc_visited);
3946       obstack_free (&gtc_ob, NULL);
3947       gtc_visited = NULL;
3948     }
3949 }
3950 
3951 
3952 /* Return a type the same as TYPE except unsigned or
3953    signed according to UNSIGNEDP.  */
3954 
3955 static tree
3956 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
3957 {
3958   tree type1;
3959 
3960   type1 = TYPE_MAIN_VARIANT (type);
3961   if (type1 == signed_char_type_node
3962       || type1 == char_type_node
3963       || type1 == unsigned_char_type_node)
3964     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3965   if (type1 == integer_type_node || type1 == unsigned_type_node)
3966     return unsignedp ? unsigned_type_node : integer_type_node;
3967   if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
3968     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3969   if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
3970     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3971   if (type1 == long_long_integer_type_node
3972       || type1 == long_long_unsigned_type_node)
3973     return unsignedp
3974            ? long_long_unsigned_type_node
3975 	   : long_long_integer_type_node;
3976 #if HOST_BITS_PER_WIDE_INT >= 64
3977   if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
3978     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3979 #endif
3980   if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
3981     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3982   if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
3983     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3984   if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
3985     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3986   if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
3987     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3988 
3989 #define GIMPLE_FIXED_TYPES(NAME)	    \
3990   if (type1 == short_ ## NAME ## _type_node \
3991       || type1 == unsigned_short_ ## NAME ## _type_node) \
3992     return unsignedp ? unsigned_short_ ## NAME ## _type_node \
3993 		     : short_ ## NAME ## _type_node; \
3994   if (type1 == NAME ## _type_node \
3995       || type1 == unsigned_ ## NAME ## _type_node) \
3996     return unsignedp ? unsigned_ ## NAME ## _type_node \
3997 		     : NAME ## _type_node; \
3998   if (type1 == long_ ## NAME ## _type_node \
3999       || type1 == unsigned_long_ ## NAME ## _type_node) \
4000     return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4001 		     : long_ ## NAME ## _type_node; \
4002   if (type1 == long_long_ ## NAME ## _type_node \
4003       || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4004     return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4005 		     : long_long_ ## NAME ## _type_node;
4006 
4007 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4008   if (type1 == NAME ## _type_node \
4009       || type1 == u ## NAME ## _type_node) \
4010     return unsignedp ? u ## NAME ## _type_node \
4011 		     : NAME ## _type_node;
4012 
4013 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4014   if (type1 == sat_ ## short_ ## NAME ## _type_node \
4015       || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4016     return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4017 		     : sat_ ## short_ ## NAME ## _type_node; \
4018   if (type1 == sat_ ## NAME ## _type_node \
4019       || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4020     return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4021 		     : sat_ ## NAME ## _type_node; \
4022   if (type1 == sat_ ## long_ ## NAME ## _type_node \
4023       || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4024     return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4025 		     : sat_ ## long_ ## NAME ## _type_node; \
4026   if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4027       || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4028     return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4029 		     : sat_ ## long_long_ ## NAME ## _type_node;
4030 
4031 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME)	\
4032   if (type1 == sat_ ## NAME ## _type_node \
4033       || type1 == sat_ ## u ## NAME ## _type_node) \
4034     return unsignedp ? sat_ ## u ## NAME ## _type_node \
4035 		     : sat_ ## NAME ## _type_node;
4036 
4037   GIMPLE_FIXED_TYPES (fract);
4038   GIMPLE_FIXED_TYPES_SAT (fract);
4039   GIMPLE_FIXED_TYPES (accum);
4040   GIMPLE_FIXED_TYPES_SAT (accum);
4041 
4042   GIMPLE_FIXED_MODE_TYPES (qq);
4043   GIMPLE_FIXED_MODE_TYPES (hq);
4044   GIMPLE_FIXED_MODE_TYPES (sq);
4045   GIMPLE_FIXED_MODE_TYPES (dq);
4046   GIMPLE_FIXED_MODE_TYPES (tq);
4047   GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4048   GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4049   GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4050   GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4051   GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4052   GIMPLE_FIXED_MODE_TYPES (ha);
4053   GIMPLE_FIXED_MODE_TYPES (sa);
4054   GIMPLE_FIXED_MODE_TYPES (da);
4055   GIMPLE_FIXED_MODE_TYPES (ta);
4056   GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4057   GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4058   GIMPLE_FIXED_MODE_TYPES_SAT (da);
4059   GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4060 
4061   /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4062      the precision; they have precision set to match their range, but
4063      may use a wider mode to match an ABI.  If we change modes, we may
4064      wind up with bad conversions.  For INTEGER_TYPEs in C, must check
4065      the precision as well, so as to yield correct results for
4066      bit-field types.  C++ does not have these separate bit-field
4067      types, and producing a signed or unsigned variant of an
4068      ENUMERAL_TYPE may cause other problems as well.  */
4069   if (!INTEGRAL_TYPE_P (type)
4070       || TYPE_UNSIGNED (type) == unsignedp)
4071     return type;
4072 
4073 #define TYPE_OK(node)							    \
4074   (TYPE_MODE (type) == TYPE_MODE (node)					    \
4075    && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4076   if (TYPE_OK (signed_char_type_node))
4077     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4078   if (TYPE_OK (integer_type_node))
4079     return unsignedp ? unsigned_type_node : integer_type_node;
4080   if (TYPE_OK (short_integer_type_node))
4081     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4082   if (TYPE_OK (long_integer_type_node))
4083     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4084   if (TYPE_OK (long_long_integer_type_node))
4085     return (unsignedp
4086 	    ? long_long_unsigned_type_node
4087 	    : long_long_integer_type_node);
4088 
4089 #if HOST_BITS_PER_WIDE_INT >= 64
4090   if (TYPE_OK (intTI_type_node))
4091     return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4092 #endif
4093   if (TYPE_OK (intDI_type_node))
4094     return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4095   if (TYPE_OK (intSI_type_node))
4096     return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4097   if (TYPE_OK (intHI_type_node))
4098     return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4099   if (TYPE_OK (intQI_type_node))
4100     return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4101 
4102 #undef GIMPLE_FIXED_TYPES
4103 #undef GIMPLE_FIXED_MODE_TYPES
4104 #undef GIMPLE_FIXED_TYPES_SAT
4105 #undef GIMPLE_FIXED_MODE_TYPES_SAT
4106 #undef TYPE_OK
4107 
4108   return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4109 }
4110 
4111 
4112 /* Return an unsigned type the same as TYPE in other respects.  */
4113 
4114 tree
4115 gimple_unsigned_type (tree type)
4116 {
4117   return gimple_signed_or_unsigned_type (true, type);
4118 }
4119 
4120 
4121 /* Return a signed type the same as TYPE in other respects.  */
4122 
4123 tree
4124 gimple_signed_type (tree type)
4125 {
4126   return gimple_signed_or_unsigned_type (false, type);
4127 }
4128 
4129 
4130 /* Return the typed-based alias set for T, which may be an expression
4131    or a type.  Return -1 if we don't do anything special.  */
4132 
4133 alias_set_type
4134 gimple_get_alias_set (tree t)
4135 {
4136   tree u;
4137 
4138   /* Permit type-punning when accessing a union, provided the access
4139      is directly through the union.  For example, this code does not
4140      permit taking the address of a union member and then storing
4141      through it.  Even the type-punning allowed here is a GCC
4142      extension, albeit a common and useful one; the C standard says
4143      that such accesses have implementation-defined behavior.  */
4144   for (u = t;
4145        TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4146        u = TREE_OPERAND (u, 0))
4147     if (TREE_CODE (u) == COMPONENT_REF
4148 	&& TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4149       return 0;
4150 
4151   /* That's all the expressions we handle specially.  */
4152   if (!TYPE_P (t))
4153     return -1;
4154 
4155   /* For convenience, follow the C standard when dealing with
4156      character types.  Any object may be accessed via an lvalue that
4157      has character type.  */
4158   if (t == char_type_node
4159       || t == signed_char_type_node
4160       || t == unsigned_char_type_node)
4161     return 0;
4162 
4163   /* Allow aliasing between signed and unsigned variants of the same
4164      type.  We treat the signed variant as canonical.  */
4165   if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4166     {
4167       tree t1 = gimple_signed_type (t);
4168 
4169       /* t1 == t can happen for boolean nodes which are always unsigned.  */
4170       if (t1 != t)
4171 	return get_alias_set (t1);
4172     }
4173   else if (POINTER_TYPE_P (t))
4174     {
4175       /* From the common C and C++ langhook implementation:
4176 
4177 	 Unfortunately, there is no canonical form of a pointer type.
4178 	 In particular, if we have `typedef int I', then `int *', and
4179 	 `I *' are different types.  So, we have to pick a canonical
4180 	 representative.  We do this below.
4181 
4182 	 Technically, this approach is actually more conservative that
4183 	 it needs to be.  In particular, `const int *' and `int *'
4184 	 should be in different alias sets, according to the C and C++
4185 	 standard, since their types are not the same, and so,
4186 	 technically, an `int **' and `const int **' cannot point at
4187 	 the same thing.
4188 
4189 	 But, the standard is wrong.  In particular, this code is
4190 	 legal C++:
4191 
4192 	 int *ip;
4193 	 int **ipp = &ip;
4194 	 const int* const* cipp = ipp;
4195 	 And, it doesn't make sense for that to be legal unless you
4196 	 can dereference IPP and CIPP.  So, we ignore cv-qualifiers on
4197 	 the pointed-to types.  This issue has been reported to the
4198 	 C++ committee.  */
4199 
4200       /* In addition to the above canonicalization issue with LTO
4201          we should also canonicalize `T (*)[]' to `T *' avoiding
4202 	 alias issues with pointer-to element types and pointer-to
4203 	 array types.
4204 
4205 	 Likewise we need to deal with the situation of incomplete
4206 	 pointed-to types and make `*(struct X **)&a' and
4207 	 `*(struct X {} **)&a' alias.  Otherwise we will have to
4208 	 guarantee that all pointer-to incomplete type variants
4209 	 will be replaced by pointer-to complete type variants if
4210 	 they are available.
4211 
4212 	 With LTO the convenient situation of using `void *' to
4213 	 access and store any pointer type will also become
4214 	 more apparent (and `void *' is just another pointer-to
4215 	 incomplete type).  Assigning alias-set zero to `void *'
4216 	 and all pointer-to incomplete types is a not appealing
4217 	 solution.  Assigning an effective alias-set zero only
4218 	 affecting pointers might be - by recording proper subset
4219 	 relationships of all pointer alias-sets.
4220 
4221 	 Pointer-to function types are another grey area which
4222 	 needs caution.  Globbing them all into one alias-set
4223 	 or the above effective zero set would work.  */
4224 
4225       /* For now just assign the same alias-set to all pointers.
4226          That's simple and avoids all the above problems.  */
4227       if (t != ptr_type_node)
4228 	return get_alias_set (ptr_type_node);
4229     }
4230 
4231   return -1;
4232 }
4233 
4234 
4235 /* Data structure used to count the number of dereferences to PTR
4236    inside an expression.  */
4237 struct count_ptr_d
4238 {
4239   tree ptr;
4240   unsigned num_stores;
4241   unsigned num_loads;
4242 };
4243 
4244 /* Helper for count_uses_and_derefs.  Called by walk_tree to look for
4245    (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA.  */
4246 
4247 static tree
4248 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4249 {
4250   struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4251   struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4252 
4253   /* Do not walk inside ADDR_EXPR nodes.  In the expression &ptr->fld,
4254      pointer 'ptr' is *not* dereferenced, it is simply used to compute
4255      the address of 'fld' as 'ptr + offsetof(fld)'.  */
4256   if (TREE_CODE (*tp) == ADDR_EXPR)
4257     {
4258       *walk_subtrees = 0;
4259       return NULL_TREE;
4260     }
4261 
4262   if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
4263     {
4264       if (wi_p->is_lhs)
4265 	count_p->num_stores++;
4266       else
4267 	count_p->num_loads++;
4268     }
4269 
4270   return NULL_TREE;
4271 }
4272 
4273 /* Count the number of direct and indirect uses for pointer PTR in
4274    statement STMT.  The number of direct uses is stored in
4275    *NUM_USES_P.  Indirect references are counted separately depending
4276    on whether they are store or load operations.  The counts are
4277    stored in *NUM_STORES_P and *NUM_LOADS_P.  */
4278 
4279 void
4280 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
4281 		       unsigned *num_loads_p, unsigned *num_stores_p)
4282 {
4283   ssa_op_iter i;
4284   tree use;
4285 
4286   *num_uses_p = 0;
4287   *num_loads_p = 0;
4288   *num_stores_p = 0;
4289 
4290   /* Find out the total number of uses of PTR in STMT.  */
4291   FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
4292     if (use == ptr)
4293       (*num_uses_p)++;
4294 
4295   /* Now count the number of indirect references to PTR.  This is
4296      truly awful, but we don't have much choice.  There are no parent
4297      pointers inside INDIRECT_REFs, so an expression like
4298      '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
4299      find all the indirect and direct uses of x_1 inside.  The only
4300      shortcut we can take is the fact that GIMPLE only allows
4301      INDIRECT_REFs inside the expressions below.  */
4302   if (is_gimple_assign (stmt)
4303       || gimple_code (stmt) == GIMPLE_RETURN
4304       || gimple_code (stmt) == GIMPLE_ASM
4305       || is_gimple_call (stmt))
4306     {
4307       struct walk_stmt_info wi;
4308       struct count_ptr_d count;
4309 
4310       count.ptr = ptr;
4311       count.num_stores = 0;
4312       count.num_loads = 0;
4313 
4314       memset (&wi, 0, sizeof (wi));
4315       wi.info = &count;
4316       walk_gimple_op (stmt, count_ptr_derefs, &wi);
4317 
4318       *num_stores_p = count.num_stores;
4319       *num_loads_p = count.num_loads;
4320     }
4321 
4322   gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
4323 }
4324 
4325 /* From a tree operand OP return the base of a load or store operation
4326    or NULL_TREE if OP is not a load or a store.  */
4327 
4328 static tree
4329 get_base_loadstore (tree op)
4330 {
4331   while (handled_component_p (op))
4332     op = TREE_OPERAND (op, 0);
4333   if (DECL_P (op)
4334       || INDIRECT_REF_P (op)
4335       || TREE_CODE (op) == TARGET_MEM_REF)
4336     return op;
4337   return NULL_TREE;
4338 }
4339 
4340 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
4341    VISIT_ADDR if non-NULL on loads, store and address-taken operands
4342    passing the STMT, the base of the operand and DATA to it.  The base
4343    will be either a decl, an indirect reference (including TARGET_MEM_REF)
4344    or the argument of an address expression.
4345    Returns the results of these callbacks or'ed.  */
4346 
4347 bool
4348 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
4349 			       bool (*visit_load)(gimple, tree, void *),
4350 			       bool (*visit_store)(gimple, tree, void *),
4351 			       bool (*visit_addr)(gimple, tree, void *))
4352 {
4353   bool ret = false;
4354   unsigned i;
4355   if (gimple_assign_single_p (stmt))
4356     {
4357       tree lhs, rhs;
4358       if (visit_store)
4359 	{
4360 	  lhs = get_base_loadstore (gimple_assign_lhs (stmt));
4361 	  if (lhs)
4362 	    ret |= visit_store (stmt, lhs, data);
4363 	}
4364       rhs = gimple_assign_rhs1 (stmt);
4365       while (handled_component_p (rhs))
4366 	rhs = TREE_OPERAND (rhs, 0);
4367       if (visit_addr)
4368 	{
4369 	  if (TREE_CODE (rhs) == ADDR_EXPR)
4370 	    ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4371 	  else if (TREE_CODE (rhs) == TARGET_MEM_REF
4372                    && TMR_BASE (rhs) != NULL_TREE
4373 		   && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
4374 	    ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
4375 	  else if (TREE_CODE (rhs) == OBJ_TYPE_REF
4376 		   && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
4377 	    ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
4378 						   0), data);
4379           lhs = gimple_assign_lhs (stmt);
4380 	  if (TREE_CODE (lhs) == TARGET_MEM_REF
4381               && TMR_BASE (lhs) != NULL_TREE
4382               && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
4383             ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
4384 	}
4385       if (visit_load)
4386 	{
4387 	  rhs = get_base_loadstore (rhs);
4388 	  if (rhs)
4389 	    ret |= visit_load (stmt, rhs, data);
4390 	}
4391     }
4392   else if (visit_addr
4393 	   && (is_gimple_assign (stmt)
4394 	       || gimple_code (stmt) == GIMPLE_COND))
4395     {
4396       for (i = 0; i < gimple_num_ops (stmt); ++i)
4397 	if (gimple_op (stmt, i)
4398 	    && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
4399 	  ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
4400     }
4401   else if (is_gimple_call (stmt))
4402     {
4403       if (visit_store)
4404 	{
4405 	  tree lhs = gimple_call_lhs (stmt);
4406 	  if (lhs)
4407 	    {
4408 	      lhs = get_base_loadstore (lhs);
4409 	      if (lhs)
4410 		ret |= visit_store (stmt, lhs, data);
4411 	    }
4412 	}
4413       if (visit_load || visit_addr)
4414 	for (i = 0; i < gimple_call_num_args (stmt); ++i)
4415 	  {
4416 	    tree rhs = gimple_call_arg (stmt, i);
4417 	    if (visit_addr
4418 		&& TREE_CODE (rhs) == ADDR_EXPR)
4419 	      ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4420 	    else if (visit_load)
4421 	      {
4422 		rhs = get_base_loadstore (rhs);
4423 		if (rhs)
4424 		  ret |= visit_load (stmt, rhs, data);
4425 	      }
4426 	  }
4427       if (visit_addr
4428 	  && gimple_call_chain (stmt)
4429 	  && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
4430 	ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
4431 			   data);
4432       if (visit_addr
4433 	  && gimple_call_return_slot_opt_p (stmt)
4434 	  && gimple_call_lhs (stmt) != NULL_TREE
4435 	  && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4436 	ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
4437     }
4438   else if (gimple_code (stmt) == GIMPLE_ASM)
4439     {
4440       unsigned noutputs;
4441       const char *constraint;
4442       const char **oconstraints;
4443       bool allows_mem, allows_reg, is_inout;
4444       noutputs = gimple_asm_noutputs (stmt);
4445       oconstraints = XALLOCAVEC (const char *, noutputs);
4446       if (visit_store || visit_addr)
4447 	for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
4448 	  {
4449 	    tree link = gimple_asm_output_op (stmt, i);
4450 	    tree op = get_base_loadstore (TREE_VALUE (link));
4451 	    if (op && visit_store)
4452 	      ret |= visit_store (stmt, op, data);
4453 	    if (visit_addr)
4454 	      {
4455 		constraint = TREE_STRING_POINTER
4456 		    (TREE_VALUE (TREE_PURPOSE (link)));
4457 		oconstraints[i] = constraint;
4458 		parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4459 					 &allows_reg, &is_inout);
4460 		if (op && !allows_reg && allows_mem)
4461 		  ret |= visit_addr (stmt, op, data);
4462 	      }
4463 	  }
4464       if (visit_load || visit_addr)
4465 	for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
4466 	  {
4467 	    tree link = gimple_asm_input_op (stmt, i);
4468 	    tree op = TREE_VALUE (link);
4469 	    if (visit_addr
4470 		&& TREE_CODE (op) == ADDR_EXPR)
4471 	      ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4472 	    else if (visit_load || visit_addr)
4473 	      {
4474 		op = get_base_loadstore (op);
4475 		if (op)
4476 		  {
4477 		    if (visit_load)
4478 		      ret |= visit_load (stmt, op, data);
4479 		    if (visit_addr)
4480 		      {
4481 			constraint = TREE_STRING_POINTER
4482 			    (TREE_VALUE (TREE_PURPOSE (link)));
4483 			parse_input_constraint (&constraint, 0, 0, noutputs,
4484 						0, oconstraints,
4485 						&allows_mem, &allows_reg);
4486 			if (!allows_reg && allows_mem)
4487 			  ret |= visit_addr (stmt, op, data);
4488 		      }
4489 		  }
4490 	      }
4491 	  }
4492     }
4493   else if (gimple_code (stmt) == GIMPLE_RETURN)
4494     {
4495       tree op = gimple_return_retval (stmt);
4496       if (op)
4497 	{
4498 	  if (visit_addr
4499 	      && TREE_CODE (op) == ADDR_EXPR)
4500 	    ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4501 	  else if (visit_load)
4502 	    {
4503 	      op = get_base_loadstore (op);
4504 	      if (op)
4505 		ret |= visit_load (stmt, op, data);
4506 	    }
4507 	}
4508     }
4509   else if (visit_addr
4510 	   && gimple_code (stmt) == GIMPLE_PHI)
4511     {
4512       for (i = 0; i < gimple_phi_num_args (stmt); ++i)
4513 	{
4514 	  tree op = PHI_ARG_DEF (stmt, i);
4515 	  if (TREE_CODE (op) == ADDR_EXPR)
4516 	    ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4517 	}
4518     }
4519 
4520   return ret;
4521 }
4522 
4523 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr.  IPA-CP
4524    should make a faster clone for this case.  */
4525 
4526 bool
4527 walk_stmt_load_store_ops (gimple stmt, void *data,
4528 			  bool (*visit_load)(gimple, tree, void *),
4529 			  bool (*visit_store)(gimple, tree, void *))
4530 {
4531   return walk_stmt_load_store_addr_ops (stmt, data,
4532 					visit_load, visit_store, NULL);
4533 }
4534 
4535 /* Helper for gimple_ior_addresses_taken_1.  */
4536 
4537 static bool
4538 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
4539 			      tree addr, void *data)
4540 {
4541   bitmap addresses_taken = (bitmap)data;
4542   while (handled_component_p (addr))
4543     addr = TREE_OPERAND (addr, 0);
4544   if (DECL_P (addr))
4545     {
4546       bitmap_set_bit (addresses_taken, DECL_UID (addr));
4547       return true;
4548     }
4549   return false;
4550 }
4551 
4552 /* Set the bit for the uid of all decls that have their address taken
4553    in STMT in the ADDRESSES_TAKEN bitmap.  Returns true if there
4554    were any in this stmt.  */
4555 
4556 bool
4557 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
4558 {
4559   return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
4560 					gimple_ior_addresses_taken_1);
4561 }
4562 
4563 
4564 /* Return a printable name for symbol DECL.  */
4565 
4566 const char *
4567 gimple_decl_printable_name (tree decl, int verbosity)
4568 {
4569   if (!DECL_NAME (decl))
4570     return NULL;
4571 
4572   if (DECL_ASSEMBLER_NAME_SET_P (decl))
4573     {
4574       const char *str, *mangled_str;
4575       int dmgl_opts = DMGL_NO_OPTS;
4576 
4577       if (verbosity >= 2)
4578 	{
4579 	  dmgl_opts = DMGL_VERBOSE
4580 		      | DMGL_ANSI
4581 		      | DMGL_GNU_V3
4582 		      | DMGL_RET_POSTFIX;
4583 	  if (TREE_CODE (decl) == FUNCTION_DECL)
4584 	    dmgl_opts |= DMGL_PARAMS;
4585 	}
4586 
4587       mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4588       str = cplus_demangle_v3 (mangled_str, dmgl_opts);
4589       return (str) ? str : mangled_str;
4590     }
4591 
4592   return IDENTIFIER_POINTER (DECL_NAME (decl));
4593 }
4594 
4595 
4596 /* Fold a OBJ_TYPE_REF expression to the address of a function.
4597    KNOWN_TYPE carries the true type of OBJ_TYPE_REF_OBJECT(REF).  Adapted
4598    from cp_fold_obj_type_ref, but it tolerates types with no binfo
4599    data.  */
4600 
4601 tree
4602 gimple_fold_obj_type_ref (tree ref, tree known_type)
4603 {
4604   HOST_WIDE_INT index;
4605   HOST_WIDE_INT i;
4606   tree v;
4607   tree fndecl;
4608 
4609   if (TYPE_BINFO (known_type) == NULL_TREE)
4610     return NULL_TREE;
4611 
4612   v = BINFO_VIRTUALS (TYPE_BINFO (known_type));
4613   index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
4614   i = 0;
4615   while (i != index)
4616     {
4617       i += (TARGET_VTABLE_USES_DESCRIPTORS
4618 	    ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
4619       v = TREE_CHAIN (v);
4620     }
4621 
4622   fndecl = TREE_VALUE (v);
4623 
4624 #ifdef ENABLE_CHECKING
4625   gcc_assert (tree_int_cst_equal (OBJ_TYPE_REF_TOKEN (ref),
4626 				  DECL_VINDEX (fndecl)));
4627 #endif
4628 
4629   cgraph_node (fndecl)->local.vtable_method = true;
4630 
4631   return build_fold_addr_expr (fndecl);
4632 }
4633 
4634 #include "gt-gimple.h"
4635