xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree.c (revision f3cfa6f6ce31685c6c4a758bc430e69eb99f50a4)
1 /* Language-independent node constructors for parse phase of GNU compiler.
2    Copyright (C) 1987-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains the low level primitives for operating on tree nodes,
21    including allocation, list operations, interning of identifiers,
22    construction of data type nodes and statement nodes,
23    and construction of type conversion nodes.  It also contains
24    tables index by tree code that describe how to take apart
25    nodes of that code.
26 
27    It is intended to be language-independent but can occasionally
28    calls language-dependent routines.  */
29 
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 
65 /* Tree code classes.  */
66 
67 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
68 #define END_OF_BASE_TREE_CODES tcc_exceptional,
69 
70 const enum tree_code_class tree_code_type[] = {
71 #include "all-tree.def"
72 };
73 
74 #undef DEFTREECODE
75 #undef END_OF_BASE_TREE_CODES
76 
77 /* Table indexed by tree code giving number of expression
78    operands beyond the fixed part of the node structure.
79    Not used for types or decls.  */
80 
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
82 #define END_OF_BASE_TREE_CODES 0,
83 
84 const unsigned char tree_code_length[] = {
85 #include "all-tree.def"
86 };
87 
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
90 
91 /* Names of tree components.
92    Used for printing out the tree and error messages.  */
93 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
94 #define END_OF_BASE_TREE_CODES "@dummy",
95 
96 static const char *const tree_code_name[] = {
97 #include "all-tree.def"
98 };
99 
100 #undef DEFTREECODE
101 #undef END_OF_BASE_TREE_CODES
102 
103 /* Each tree code class has an associated string representation.
104    These must correspond to the tree_code_class entries.  */
105 
106 const char *const tree_code_class_strings[] =
107 {
108   "exceptional",
109   "constant",
110   "type",
111   "declaration",
112   "reference",
113   "comparison",
114   "unary",
115   "binary",
116   "statement",
117   "vl_exp",
118   "expression"
119 };
120 
121 /* obstack.[ch] explicitly declined to prototype this.  */
122 extern int _obstack_allocated_p (struct obstack *h, void *obj);
123 
124 /* Statistics-gathering stuff.  */
125 
126 static int tree_code_counts[MAX_TREE_CODES];
127 int tree_node_counts[(int) all_kinds];
128 int tree_node_sizes[(int) all_kinds];
129 
130 /* Keep in sync with tree.h:enum tree_node_kind.  */
131 static const char * const tree_node_kind_names[] = {
132   "decls",
133   "types",
134   "blocks",
135   "stmts",
136   "refs",
137   "exprs",
138   "constants",
139   "identifiers",
140   "vecs",
141   "binfos",
142   "ssa names",
143   "constructors",
144   "random kinds",
145   "lang_decl kinds",
146   "lang_type kinds",
147   "omp clauses",
148 };
149 
150 /* Unique id for next decl created.  */
151 static GTY(()) int next_decl_uid;
152 /* Unique id for next type created.  */
153 static GTY(()) int next_type_uid = 1;
154 /* Unique id for next debug decl created.  Use negative numbers,
155    to catch erroneous uses.  */
156 static GTY(()) int next_debug_decl_uid;
157 
158 /* Since we cannot rehash a type after it is in the table, we have to
159    keep the hash code.  */
160 
161 struct GTY((for_user)) type_hash {
162   unsigned long hash;
163   tree type;
164 };
165 
166 /* Initial size of the hash table (rounded to next prime).  */
167 #define TYPE_HASH_INITIAL_SIZE 1000
168 
169 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
170 {
171   static hashval_t hash (type_hash *t) { return t->hash; }
172   static bool equal (type_hash *a, type_hash *b);
173 
174   static int
175   keep_cache_entry (type_hash *&t)
176   {
177     return ggc_marked_p (t->type);
178   }
179 };
180 
181 /* Now here is the hash table.  When recording a type, it is added to
182    the slot whose index is the hash code.  Note that the hash table is
183    used for several kinds of types (function types, array types and
184    array index range types, for now).  While all these live in the
185    same table, they are completely independent, and the hash code is
186    computed differently for each of these.  */
187 
188 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
189 
190 /* Hash table and temporary node for larger integer const values.  */
191 static GTY (()) tree int_cst_node;
192 
193 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
194 {
195   static hashval_t hash (tree t);
196   static bool equal (tree x, tree y);
197 };
198 
199 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
200 
201 /* Hash table for optimization flags and target option flags.  Use the same
202    hash table for both sets of options.  Nodes for building the current
203    optimization and target option nodes.  The assumption is most of the time
204    the options created will already be in the hash table, so we avoid
205    allocating and freeing up a node repeatably.  */
206 static GTY (()) tree cl_optimization_node;
207 static GTY (()) tree cl_target_option_node;
208 
209 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
210 {
211   static hashval_t hash (tree t);
212   static bool equal (tree x, tree y);
213 };
214 
215 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
216 
217 /* General tree->tree mapping  structure for use in hash tables.  */
218 
219 
220 static GTY ((cache))
221      hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
222 
223 static GTY ((cache))
224      hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
225 
226 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
227 {
228   static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
229 
230   static bool
231   equal (tree_vec_map *a, tree_vec_map *b)
232   {
233     return a->base.from == b->base.from;
234   }
235 
236   static int
237   keep_cache_entry (tree_vec_map *&m)
238   {
239     return ggc_marked_p (m->base.from);
240   }
241 };
242 
243 static GTY ((cache))
244      hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
245 
246 static void set_type_quals (tree, int);
247 static void print_type_hash_statistics (void);
248 static void print_debug_expr_statistics (void);
249 static void print_value_expr_statistics (void);
250 static void type_hash_list (const_tree, inchash::hash &);
251 static void attribute_hash_list (const_tree, inchash::hash &);
252 
253 tree global_trees[TI_MAX];
254 tree integer_types[itk_none];
255 
256 bool int_n_enabled_p[NUM_INT_N_ENTS];
257 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
258 
259 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
260 
261 /* Number of operands for each OpenMP clause.  */
262 unsigned const char omp_clause_num_ops[] =
263 {
264   0, /* OMP_CLAUSE_ERROR  */
265   1, /* OMP_CLAUSE_PRIVATE  */
266   1, /* OMP_CLAUSE_SHARED  */
267   1, /* OMP_CLAUSE_FIRSTPRIVATE  */
268   2, /* OMP_CLAUSE_LASTPRIVATE  */
269   5, /* OMP_CLAUSE_REDUCTION  */
270   1, /* OMP_CLAUSE_COPYIN  */
271   1, /* OMP_CLAUSE_COPYPRIVATE  */
272   3, /* OMP_CLAUSE_LINEAR  */
273   2, /* OMP_CLAUSE_ALIGNED  */
274   1, /* OMP_CLAUSE_DEPEND  */
275   1, /* OMP_CLAUSE_UNIFORM  */
276   1, /* OMP_CLAUSE_TO_DECLARE  */
277   1, /* OMP_CLAUSE_LINK  */
278   2, /* OMP_CLAUSE_FROM  */
279   2, /* OMP_CLAUSE_TO  */
280   2, /* OMP_CLAUSE_MAP  */
281   1, /* OMP_CLAUSE_USE_DEVICE_PTR  */
282   1, /* OMP_CLAUSE_IS_DEVICE_PTR  */
283   2, /* OMP_CLAUSE__CACHE_  */
284   1, /* OMP_CLAUSE_DEVICE_RESIDENT  */
285   2, /* OMP_CLAUSE_GANG  */
286   1, /* OMP_CLAUSE_ASYNC  */
287   1, /* OMP_CLAUSE_WAIT  */
288   0, /* OMP_CLAUSE_AUTO  */
289   0, /* OMP_CLAUSE_SEQ  */
290   1, /* OMP_CLAUSE__LOOPTEMP_  */
291   1, /* OMP_CLAUSE_IF  */
292   1, /* OMP_CLAUSE_NUM_THREADS  */
293   1, /* OMP_CLAUSE_SCHEDULE  */
294   0, /* OMP_CLAUSE_NOWAIT  */
295   1, /* OMP_CLAUSE_ORDERED  */
296   0, /* OMP_CLAUSE_DEFAULT  */
297   3, /* OMP_CLAUSE_COLLAPSE  */
298   0, /* OMP_CLAUSE_UNTIED   */
299   1, /* OMP_CLAUSE_FINAL  */
300   0, /* OMP_CLAUSE_MERGEABLE  */
301   1, /* OMP_CLAUSE_DEVICE  */
302   1, /* OMP_CLAUSE_DIST_SCHEDULE  */
303   0, /* OMP_CLAUSE_INBRANCH  */
304   0, /* OMP_CLAUSE_NOTINBRANCH  */
305   1, /* OMP_CLAUSE_NUM_TEAMS  */
306   1, /* OMP_CLAUSE_THREAD_LIMIT  */
307   0, /* OMP_CLAUSE_PROC_BIND  */
308   1, /* OMP_CLAUSE_SAFELEN  */
309   1, /* OMP_CLAUSE_SIMDLEN  */
310   0, /* OMP_CLAUSE_FOR  */
311   0, /* OMP_CLAUSE_PARALLEL  */
312   0, /* OMP_CLAUSE_SECTIONS  */
313   0, /* OMP_CLAUSE_TASKGROUP  */
314   1, /* OMP_CLAUSE_PRIORITY  */
315   1, /* OMP_CLAUSE_GRAINSIZE  */
316   1, /* OMP_CLAUSE_NUM_TASKS  */
317   0, /* OMP_CLAUSE_NOGROUP  */
318   0, /* OMP_CLAUSE_THREADS  */
319   0, /* OMP_CLAUSE_SIMD  */
320   1, /* OMP_CLAUSE_HINT  */
321   0, /* OMP_CLAUSE_DEFALTMAP  */
322   1, /* OMP_CLAUSE__SIMDUID_  */
323   1, /* OMP_CLAUSE__CILK_FOR_COUNT_  */
324   0, /* OMP_CLAUSE_INDEPENDENT  */
325   1, /* OMP_CLAUSE_WORKER  */
326   1, /* OMP_CLAUSE_VECTOR  */
327   1, /* OMP_CLAUSE_NUM_GANGS  */
328   1, /* OMP_CLAUSE_NUM_WORKERS  */
329   1, /* OMP_CLAUSE_VECTOR_LENGTH  */
330   1, /* OMP_CLAUSE_TILE  */
331   2, /* OMP_CLAUSE__GRIDDIM_  */
332 };
333 
334 const char * const omp_clause_code_name[] =
335 {
336   "error_clause",
337   "private",
338   "shared",
339   "firstprivate",
340   "lastprivate",
341   "reduction",
342   "copyin",
343   "copyprivate",
344   "linear",
345   "aligned",
346   "depend",
347   "uniform",
348   "to",
349   "link",
350   "from",
351   "to",
352   "map",
353   "use_device_ptr",
354   "is_device_ptr",
355   "_cache_",
356   "device_resident",
357   "gang",
358   "async",
359   "wait",
360   "auto",
361   "seq",
362   "_looptemp_",
363   "if",
364   "num_threads",
365   "schedule",
366   "nowait",
367   "ordered",
368   "default",
369   "collapse",
370   "untied",
371   "final",
372   "mergeable",
373   "device",
374   "dist_schedule",
375   "inbranch",
376   "notinbranch",
377   "num_teams",
378   "thread_limit",
379   "proc_bind",
380   "safelen",
381   "simdlen",
382   "for",
383   "parallel",
384   "sections",
385   "taskgroup",
386   "priority",
387   "grainsize",
388   "num_tasks",
389   "nogroup",
390   "threads",
391   "simd",
392   "hint",
393   "defaultmap",
394   "_simduid_",
395   "_Cilk_for_count_",
396   "independent",
397   "worker",
398   "vector",
399   "num_gangs",
400   "num_workers",
401   "vector_length",
402   "tile",
403   "_griddim_"
404 };
405 
406 
407 /* Return the tree node structure used by tree code CODE.  */
408 
409 static inline enum tree_node_structure_enum
410 tree_node_structure_for_code (enum tree_code code)
411 {
412   switch (TREE_CODE_CLASS (code))
413     {
414     case tcc_declaration:
415       {
416 	switch (code)
417 	  {
418 	  case FIELD_DECL:
419 	    return TS_FIELD_DECL;
420 	  case PARM_DECL:
421 	    return TS_PARM_DECL;
422 	  case VAR_DECL:
423 	    return TS_VAR_DECL;
424 	  case LABEL_DECL:
425 	    return TS_LABEL_DECL;
426 	  case RESULT_DECL:
427 	    return TS_RESULT_DECL;
428 	  case DEBUG_EXPR_DECL:
429 	    return TS_DECL_WRTL;
430 	  case CONST_DECL:
431 	    return TS_CONST_DECL;
432 	  case TYPE_DECL:
433 	    return TS_TYPE_DECL;
434 	  case FUNCTION_DECL:
435 	    return TS_FUNCTION_DECL;
436 	  case TRANSLATION_UNIT_DECL:
437 	    return TS_TRANSLATION_UNIT_DECL;
438 	  default:
439 	    return TS_DECL_NON_COMMON;
440 	  }
441       }
442     case tcc_type:
443       return TS_TYPE_NON_COMMON;
444     case tcc_reference:
445     case tcc_comparison:
446     case tcc_unary:
447     case tcc_binary:
448     case tcc_expression:
449     case tcc_statement:
450     case tcc_vl_exp:
451       return TS_EXP;
452     default:  /* tcc_constant and tcc_exceptional */
453       break;
454     }
455   switch (code)
456     {
457       /* tcc_constant cases.  */
458     case VOID_CST:		return TS_TYPED;
459     case INTEGER_CST:		return TS_INT_CST;
460     case REAL_CST:		return TS_REAL_CST;
461     case FIXED_CST:		return TS_FIXED_CST;
462     case COMPLEX_CST:		return TS_COMPLEX;
463     case VECTOR_CST:		return TS_VECTOR;
464     case STRING_CST:		return TS_STRING;
465       /* tcc_exceptional cases.  */
466     case ERROR_MARK:		return TS_COMMON;
467     case IDENTIFIER_NODE:	return TS_IDENTIFIER;
468     case TREE_LIST:		return TS_LIST;
469     case TREE_VEC:		return TS_VEC;
470     case SSA_NAME:		return TS_SSA_NAME;
471     case PLACEHOLDER_EXPR:	return TS_COMMON;
472     case STATEMENT_LIST:	return TS_STATEMENT_LIST;
473     case BLOCK:			return TS_BLOCK;
474     case CONSTRUCTOR:		return TS_CONSTRUCTOR;
475     case TREE_BINFO:		return TS_BINFO;
476     case OMP_CLAUSE:		return TS_OMP_CLAUSE;
477     case OPTIMIZATION_NODE:	return TS_OPTIMIZATION;
478     case TARGET_OPTION_NODE:	return TS_TARGET_OPTION;
479 
480     default:
481       gcc_unreachable ();
482     }
483 }
484 
485 
486 /* Initialize tree_contains_struct to describe the hierarchy of tree
487    nodes.  */
488 
489 static void
490 initialize_tree_contains_struct (void)
491 {
492   unsigned i;
493 
494   for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
495     {
496       enum tree_code code;
497       enum tree_node_structure_enum ts_code;
498 
499       code = (enum tree_code) i;
500       ts_code = tree_node_structure_for_code (code);
501 
502       /* Mark the TS structure itself.  */
503       tree_contains_struct[code][ts_code] = 1;
504 
505       /* Mark all the structures that TS is derived from.  */
506       switch (ts_code)
507 	{
508 	case TS_TYPED:
509 	case TS_BLOCK:
510 	  MARK_TS_BASE (code);
511 	  break;
512 
513 	case TS_COMMON:
514 	case TS_INT_CST:
515 	case TS_REAL_CST:
516 	case TS_FIXED_CST:
517 	case TS_VECTOR:
518 	case TS_STRING:
519 	case TS_COMPLEX:
520 	case TS_SSA_NAME:
521 	case TS_CONSTRUCTOR:
522 	case TS_EXP:
523 	case TS_STATEMENT_LIST:
524 	  MARK_TS_TYPED (code);
525 	  break;
526 
527 	case TS_IDENTIFIER:
528 	case TS_DECL_MINIMAL:
529 	case TS_TYPE_COMMON:
530 	case TS_LIST:
531 	case TS_VEC:
532 	case TS_BINFO:
533 	case TS_OMP_CLAUSE:
534 	case TS_OPTIMIZATION:
535 	case TS_TARGET_OPTION:
536 	  MARK_TS_COMMON (code);
537 	  break;
538 
539 	case TS_TYPE_WITH_LANG_SPECIFIC:
540 	  MARK_TS_TYPE_COMMON (code);
541 	  break;
542 
543 	case TS_TYPE_NON_COMMON:
544 	  MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
545 	  break;
546 
547 	case TS_DECL_COMMON:
548 	  MARK_TS_DECL_MINIMAL (code);
549 	  break;
550 
551 	case TS_DECL_WRTL:
552 	case TS_CONST_DECL:
553 	  MARK_TS_DECL_COMMON (code);
554 	  break;
555 
556 	case TS_DECL_NON_COMMON:
557 	  MARK_TS_DECL_WITH_VIS (code);
558 	  break;
559 
560 	case TS_DECL_WITH_VIS:
561 	case TS_PARM_DECL:
562 	case TS_LABEL_DECL:
563 	case TS_RESULT_DECL:
564 	  MARK_TS_DECL_WRTL (code);
565 	  break;
566 
567 	case TS_FIELD_DECL:
568 	  MARK_TS_DECL_COMMON (code);
569 	  break;
570 
571 	case TS_VAR_DECL:
572 	  MARK_TS_DECL_WITH_VIS (code);
573 	  break;
574 
575 	case TS_TYPE_DECL:
576 	case TS_FUNCTION_DECL:
577 	  MARK_TS_DECL_NON_COMMON (code);
578 	  break;
579 
580 	case TS_TRANSLATION_UNIT_DECL:
581 	  MARK_TS_DECL_COMMON (code);
582 	  break;
583 
584 	default:
585 	  gcc_unreachable ();
586 	}
587     }
588 
589   /* Basic consistency checks for attributes used in fold.  */
590   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
591   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
592   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
593   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
594   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
595   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
596   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
597   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
598   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
599   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
600   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
601   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
602   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
603   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
604   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
605   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
606   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
607   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
608   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
609   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
610   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
611   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
612   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
613   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
614   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
615   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
616   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
617   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
618   gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
619   gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
620   gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
621   gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
622   gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
623   gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
624   gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
625   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
626   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
627   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
628   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
629   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
630 }
631 
632 
633 /* Init tree.c.  */
634 
635 void
636 init_ttree (void)
637 {
638   /* Initialize the hash table of types.  */
639   type_hash_table
640     = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
641 
642   debug_expr_for_decl
643     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
644 
645   value_expr_for_decl
646     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
647 
648   int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
649 
650   int_cst_node = make_int_cst (1, 1);
651 
652   cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
653 
654   cl_optimization_node = make_node (OPTIMIZATION_NODE);
655   cl_target_option_node = make_node (TARGET_OPTION_NODE);
656 
657   /* Initialize the tree_contains_struct array.  */
658   initialize_tree_contains_struct ();
659   lang_hooks.init_ts ();
660 }
661 
662 
663 /* The name of the object as the assembler will see it (but before any
664    translations made by ASM_OUTPUT_LABELREF).  Often this is the same
665    as DECL_NAME.  It is an IDENTIFIER_NODE.  */
666 tree
667 decl_assembler_name (tree decl)
668 {
669   if (!DECL_ASSEMBLER_NAME_SET_P (decl))
670     lang_hooks.set_decl_assembler_name (decl);
671   return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
672 }
673 
674 /* When the target supports COMDAT groups, this indicates which group the
675    DECL is associated with.  This can be either an IDENTIFIER_NODE or a
676    decl, in which case its DECL_ASSEMBLER_NAME identifies the group.  */
677 tree
678 decl_comdat_group (const_tree node)
679 {
680   struct symtab_node *snode = symtab_node::get (node);
681   if (!snode)
682     return NULL;
683   return snode->get_comdat_group ();
684 }
685 
686 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE.  */
687 tree
688 decl_comdat_group_id (const_tree node)
689 {
690   struct symtab_node *snode = symtab_node::get (node);
691   if (!snode)
692     return NULL;
693   return snode->get_comdat_group_id ();
694 }
695 
696 /* When the target supports named section, return its name as IDENTIFIER_NODE
697    or NULL if it is in no section.  */
698 const char *
699 decl_section_name (const_tree node)
700 {
701   struct symtab_node *snode = symtab_node::get (node);
702   if (!snode)
703     return NULL;
704   return snode->get_section ();
705 }
706 
707 /* Set section name of NODE to VALUE (that is expected to be
708    identifier node) */
709 void
710 set_decl_section_name (tree node, const char *value)
711 {
712   struct symtab_node *snode;
713 
714   if (value == NULL)
715     {
716       snode = symtab_node::get (node);
717       if (!snode)
718 	return;
719     }
720   else if (TREE_CODE (node) == VAR_DECL)
721     snode = varpool_node::get_create (node);
722   else
723     snode = cgraph_node::get_create (node);
724   snode->set_section (value);
725 }
726 
727 /* Return TLS model of a variable NODE.  */
728 enum tls_model
729 decl_tls_model (const_tree node)
730 {
731   struct varpool_node *snode = varpool_node::get (node);
732   if (!snode)
733     return TLS_MODEL_NONE;
734   return snode->tls_model;
735 }
736 
737 /* Set TLS model of variable NODE to MODEL.  */
738 void
739 set_decl_tls_model (tree node, enum tls_model model)
740 {
741   struct varpool_node *vnode;
742 
743   if (model == TLS_MODEL_NONE)
744     {
745       vnode = varpool_node::get (node);
746       if (!vnode)
747 	return;
748     }
749   else
750     vnode = varpool_node::get_create (node);
751   vnode->tls_model = model;
752 }
753 
754 /* Compute the number of bytes occupied by a tree with code CODE.
755    This function cannot be used for nodes that have variable sizes,
756    including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR.  */
757 size_t
758 tree_code_size (enum tree_code code)
759 {
760   switch (TREE_CODE_CLASS (code))
761     {
762     case tcc_declaration:  /* A decl node */
763       {
764 	switch (code)
765 	  {
766 	  case FIELD_DECL:
767 	    return sizeof (struct tree_field_decl);
768 	  case PARM_DECL:
769 	    return sizeof (struct tree_parm_decl);
770 	  case VAR_DECL:
771 	    return sizeof (struct tree_var_decl);
772 	  case LABEL_DECL:
773 	    return sizeof (struct tree_label_decl);
774 	  case RESULT_DECL:
775 	    return sizeof (struct tree_result_decl);
776 	  case CONST_DECL:
777 	    return sizeof (struct tree_const_decl);
778 	  case TYPE_DECL:
779 	    return sizeof (struct tree_type_decl);
780 	  case FUNCTION_DECL:
781 	    return sizeof (struct tree_function_decl);
782 	  case DEBUG_EXPR_DECL:
783 	    return sizeof (struct tree_decl_with_rtl);
784 	  case TRANSLATION_UNIT_DECL:
785 	    return sizeof (struct tree_translation_unit_decl);
786 	  case NAMESPACE_DECL:
787 	  case IMPORTED_DECL:
788 	  case NAMELIST_DECL:
789 	    return sizeof (struct tree_decl_non_common);
790 	  default:
791 	    return lang_hooks.tree_size (code);
792 	  }
793       }
794 
795     case tcc_type:  /* a type node */
796       return sizeof (struct tree_type_non_common);
797 
798     case tcc_reference:   /* a reference */
799     case tcc_expression:  /* an expression */
800     case tcc_statement:   /* an expression with side effects */
801     case tcc_comparison:  /* a comparison expression */
802     case tcc_unary:       /* a unary arithmetic expression */
803     case tcc_binary:      /* a binary arithmetic expression */
804       return (sizeof (struct tree_exp)
805 	      + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
806 
807     case tcc_constant:  /* a constant */
808       switch (code)
809 	{
810 	case VOID_CST:		return sizeof (struct tree_typed);
811 	case INTEGER_CST:	gcc_unreachable ();
812 	case REAL_CST:		return sizeof (struct tree_real_cst);
813 	case FIXED_CST:		return sizeof (struct tree_fixed_cst);
814 	case COMPLEX_CST:	return sizeof (struct tree_complex);
815 	case VECTOR_CST:	return sizeof (struct tree_vector);
816 	case STRING_CST:	gcc_unreachable ();
817 	default:
818 	  return lang_hooks.tree_size (code);
819 	}
820 
821     case tcc_exceptional:  /* something random, like an identifier.  */
822       switch (code)
823 	{
824 	case IDENTIFIER_NODE:	return lang_hooks.identifier_size;
825 	case TREE_LIST:		return sizeof (struct tree_list);
826 
827 	case ERROR_MARK:
828 	case PLACEHOLDER_EXPR:	return sizeof (struct tree_common);
829 
830 	case TREE_VEC:
831 	case OMP_CLAUSE:	gcc_unreachable ();
832 
833 	case SSA_NAME:		return sizeof (struct tree_ssa_name);
834 
835 	case STATEMENT_LIST:	return sizeof (struct tree_statement_list);
836 	case BLOCK:		return sizeof (struct tree_block);
837 	case CONSTRUCTOR:	return sizeof (struct tree_constructor);
838 	case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
839 	case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
840 
841 	default:
842 	  return lang_hooks.tree_size (code);
843 	}
844 
845     default:
846       gcc_unreachable ();
847     }
848 }
849 
850 /* Compute the number of bytes occupied by NODE.  This routine only
851    looks at TREE_CODE, except for those nodes that have variable sizes.  */
852 size_t
853 tree_size (const_tree node)
854 {
855   const enum tree_code code = TREE_CODE (node);
856   switch (code)
857     {
858     case INTEGER_CST:
859       return (sizeof (struct tree_int_cst)
860 	      + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
861 
862     case TREE_BINFO:
863       return (offsetof (struct tree_binfo, base_binfos)
864 	      + vec<tree, va_gc>
865 		  ::embedded_size (BINFO_N_BASE_BINFOS (node)));
866 
867     case TREE_VEC:
868       return (sizeof (struct tree_vec)
869 	      + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
870 
871     case VECTOR_CST:
872       return (sizeof (struct tree_vector)
873 	      + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
874 
875     case STRING_CST:
876       return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
877 
878     case OMP_CLAUSE:
879       return (sizeof (struct tree_omp_clause)
880 	      + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
881 	        * sizeof (tree));
882 
883     default:
884       if (TREE_CODE_CLASS (code) == tcc_vl_exp)
885 	return (sizeof (struct tree_exp)
886 		+ (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
887       else
888 	return tree_code_size (code);
889     }
890 }
891 
892 /* Record interesting allocation statistics for a tree node with CODE
893    and LENGTH.  */
894 
895 static void
896 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
897 				   size_t length ATTRIBUTE_UNUSED)
898 {
899   enum tree_code_class type = TREE_CODE_CLASS (code);
900   tree_node_kind kind;
901 
902   if (!GATHER_STATISTICS)
903     return;
904 
905   switch (type)
906     {
907     case tcc_declaration:  /* A decl node */
908       kind = d_kind;
909       break;
910 
911     case tcc_type:  /* a type node */
912       kind = t_kind;
913       break;
914 
915     case tcc_statement:  /* an expression with side effects */
916       kind = s_kind;
917       break;
918 
919     case tcc_reference:  /* a reference */
920       kind = r_kind;
921       break;
922 
923     case tcc_expression:  /* an expression */
924     case tcc_comparison:  /* a comparison expression */
925     case tcc_unary:  /* a unary arithmetic expression */
926     case tcc_binary:  /* a binary arithmetic expression */
927       kind = e_kind;
928       break;
929 
930     case tcc_constant:  /* a constant */
931       kind = c_kind;
932       break;
933 
934     case tcc_exceptional:  /* something random, like an identifier.  */
935       switch (code)
936 	{
937 	case IDENTIFIER_NODE:
938 	  kind = id_kind;
939 	  break;
940 
941 	case TREE_VEC:
942 	  kind = vec_kind;
943 	  break;
944 
945 	case TREE_BINFO:
946 	  kind = binfo_kind;
947 	  break;
948 
949 	case SSA_NAME:
950 	  kind = ssa_name_kind;
951 	  break;
952 
953 	case BLOCK:
954 	  kind = b_kind;
955 	  break;
956 
957 	case CONSTRUCTOR:
958 	  kind = constr_kind;
959 	  break;
960 
961 	case OMP_CLAUSE:
962 	  kind = omp_clause_kind;
963 	  break;
964 
965 	default:
966 	  kind = x_kind;
967 	  break;
968 	}
969       break;
970 
971     case tcc_vl_exp:
972       kind = e_kind;
973       break;
974 
975     default:
976       gcc_unreachable ();
977     }
978 
979   tree_code_counts[(int) code]++;
980   tree_node_counts[(int) kind]++;
981   tree_node_sizes[(int) kind] += length;
982 }
983 
984 /* Allocate and return a new UID from the DECL_UID namespace.  */
985 
986 int
987 allocate_decl_uid (void)
988 {
989   return next_decl_uid++;
990 }
991 
992 /* Return a newly allocated node of code CODE.  For decl and type
993    nodes, some other fields are initialized.  The rest of the node is
994    initialized to zero.  This function cannot be used for TREE_VEC,
995    INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
996    tree_code_size.
997 
998    Achoo!  I got a code in the node.  */
999 
1000 tree
1001 make_node_stat (enum tree_code code MEM_STAT_DECL)
1002 {
1003   tree t;
1004   enum tree_code_class type = TREE_CODE_CLASS (code);
1005   size_t length = tree_code_size (code);
1006 
1007   record_node_allocation_statistics (code, length);
1008 
1009   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1010   TREE_SET_CODE (t, code);
1011 
1012   switch (type)
1013     {
1014     case tcc_statement:
1015       TREE_SIDE_EFFECTS (t) = 1;
1016       break;
1017 
1018     case tcc_declaration:
1019       if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1020 	{
1021 	  if (code == FUNCTION_DECL)
1022 	    {
1023 	      DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1024 	      DECL_MODE (t) = FUNCTION_MODE;
1025 	    }
1026 	  else
1027 	    DECL_ALIGN (t) = 1;
1028 	}
1029       DECL_SOURCE_LOCATION (t) = input_location;
1030       if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1031 	DECL_UID (t) = --next_debug_decl_uid;
1032       else
1033 	{
1034 	  DECL_UID (t) = allocate_decl_uid ();
1035 	  SET_DECL_PT_UID (t, -1);
1036 	}
1037       if (TREE_CODE (t) == LABEL_DECL)
1038 	LABEL_DECL_UID (t) = -1;
1039 
1040       break;
1041 
1042     case tcc_type:
1043       TYPE_UID (t) = next_type_uid++;
1044       TYPE_ALIGN (t) = BITS_PER_UNIT;
1045       TYPE_USER_ALIGN (t) = 0;
1046       TYPE_MAIN_VARIANT (t) = t;
1047       TYPE_CANONICAL (t) = t;
1048 
1049       /* Default to no attributes for type, but let target change that.  */
1050       TYPE_ATTRIBUTES (t) = NULL_TREE;
1051       targetm.set_default_type_attributes (t);
1052 
1053       /* We have not yet computed the alias set for this type.  */
1054       TYPE_ALIAS_SET (t) = -1;
1055       break;
1056 
1057     case tcc_constant:
1058       TREE_CONSTANT (t) = 1;
1059       break;
1060 
1061     case tcc_expression:
1062       switch (code)
1063 	{
1064 	case INIT_EXPR:
1065 	case MODIFY_EXPR:
1066 	case VA_ARG_EXPR:
1067 	case PREDECREMENT_EXPR:
1068 	case PREINCREMENT_EXPR:
1069 	case POSTDECREMENT_EXPR:
1070 	case POSTINCREMENT_EXPR:
1071 	  /* All of these have side-effects, no matter what their
1072 	     operands are.  */
1073 	  TREE_SIDE_EFFECTS (t) = 1;
1074 	  break;
1075 
1076 	default:
1077 	  break;
1078 	}
1079       break;
1080 
1081     case tcc_exceptional:
1082       switch (code)
1083         {
1084 	case TARGET_OPTION_NODE:
1085 	  TREE_TARGET_OPTION(t)
1086 			    = ggc_cleared_alloc<struct cl_target_option> ();
1087 	  break;
1088 
1089 	case OPTIMIZATION_NODE:
1090 	  TREE_OPTIMIZATION (t)
1091 			    = ggc_cleared_alloc<struct cl_optimization> ();
1092 	  break;
1093 
1094 	default:
1095 	  break;
1096 	}
1097       break;
1098 
1099     default:
1100       /* Other classes need no special treatment.  */
1101       break;
1102     }
1103 
1104   return t;
1105 }
1106 
1107 /* Free tree node.  */
1108 
1109 void
1110 free_node (tree node)
1111 {
1112   enum tree_code code = TREE_CODE (node);
1113   if (GATHER_STATISTICS)
1114     {
1115       tree_code_counts[(int) TREE_CODE (node)]--;
1116       tree_node_counts[(int) t_kind]--;
1117       tree_node_sizes[(int) t_kind] -= tree_size (node);
1118     }
1119   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1120     vec_free (CONSTRUCTOR_ELTS (node));
1121   else if (code == BLOCK)
1122     vec_free (BLOCK_NONLOCALIZED_VARS (node));
1123   else if (code == TREE_BINFO)
1124     vec_free (BINFO_BASE_ACCESSES (node));
1125   ggc_free (node);
1126 }
1127 
1128 /* Return a new node with the same contents as NODE except that its
1129    TREE_CHAIN, if it has one, is zero and it has a fresh uid.  */
1130 
1131 tree
1132 copy_node_stat (tree node MEM_STAT_DECL)
1133 {
1134   tree t;
1135   enum tree_code code = TREE_CODE (node);
1136   size_t length;
1137 
1138   gcc_assert (code != STATEMENT_LIST);
1139 
1140   length = tree_size (node);
1141   record_node_allocation_statistics (code, length);
1142   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1143   memcpy (t, node, length);
1144 
1145   if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1146     TREE_CHAIN (t) = 0;
1147   TREE_ASM_WRITTEN (t) = 0;
1148   TREE_VISITED (t) = 0;
1149 
1150   if (TREE_CODE_CLASS (code) == tcc_declaration)
1151     {
1152       if (code == DEBUG_EXPR_DECL)
1153 	DECL_UID (t) = --next_debug_decl_uid;
1154       else
1155 	{
1156 	  DECL_UID (t) = allocate_decl_uid ();
1157 	  if (DECL_PT_UID_SET_P (node))
1158 	    SET_DECL_PT_UID (t, DECL_PT_UID (node));
1159 	}
1160       if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1161 	  && DECL_HAS_VALUE_EXPR_P (node))
1162 	{
1163 	  SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1164 	  DECL_HAS_VALUE_EXPR_P (t) = 1;
1165 	}
1166       /* DECL_DEBUG_EXPR is copied explicitely by callers.  */
1167       if (TREE_CODE (node) == VAR_DECL)
1168 	{
1169 	  DECL_HAS_DEBUG_EXPR_P (t) = 0;
1170 	  t->decl_with_vis.symtab_node = NULL;
1171 	}
1172       if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1173 	{
1174 	  SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1175 	  DECL_HAS_INIT_PRIORITY_P (t) = 1;
1176 	}
1177       if (TREE_CODE (node) == FUNCTION_DECL)
1178 	{
1179 	  DECL_STRUCT_FUNCTION (t) = NULL;
1180 	  t->decl_with_vis.symtab_node = NULL;
1181 	}
1182     }
1183   else if (TREE_CODE_CLASS (code) == tcc_type)
1184     {
1185       TYPE_UID (t) = next_type_uid++;
1186       /* The following is so that the debug code for
1187 	 the copy is different from the original type.
1188 	 The two statements usually duplicate each other
1189 	 (because they clear fields of the same union),
1190 	 but the optimizer should catch that.  */
1191       TYPE_SYMTAB_POINTER (t) = 0;
1192       TYPE_SYMTAB_ADDRESS (t) = 0;
1193 
1194       /* Do not copy the values cache.  */
1195       if (TYPE_CACHED_VALUES_P (t))
1196 	{
1197 	  TYPE_CACHED_VALUES_P (t) = 0;
1198 	  TYPE_CACHED_VALUES (t) = NULL_TREE;
1199 	}
1200     }
1201     else if (code == TARGET_OPTION_NODE)
1202       {
1203 	TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1204 	memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1205 		sizeof (struct cl_target_option));
1206       }
1207     else if (code == OPTIMIZATION_NODE)
1208       {
1209 	TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1210 	memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1211 		sizeof (struct cl_optimization));
1212       }
1213 
1214   return t;
1215 }
1216 
1217 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1218    For example, this can copy a list made of TREE_LIST nodes.  */
1219 
1220 tree
1221 copy_list (tree list)
1222 {
1223   tree head;
1224   tree prev, next;
1225 
1226   if (list == 0)
1227     return 0;
1228 
1229   head = prev = copy_node (list);
1230   next = TREE_CHAIN (list);
1231   while (next)
1232     {
1233       TREE_CHAIN (prev) = copy_node (next);
1234       prev = TREE_CHAIN (prev);
1235       next = TREE_CHAIN (next);
1236     }
1237   return head;
1238 }
1239 
1240 
1241 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1242    INTEGER_CST with value CST and type TYPE.   */
1243 
1244 static unsigned int
1245 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1246 {
1247   gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1248   /* We need extra HWIs if CST is an unsigned integer with its
1249      upper bit set.  */
1250   if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1251     return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1252   return cst.get_len ();
1253 }
1254 
1255 /* Return a new INTEGER_CST with value CST and type TYPE.  */
1256 
1257 static tree
1258 build_new_int_cst (tree type, const wide_int &cst)
1259 {
1260   unsigned int len = cst.get_len ();
1261   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1262   tree nt = make_int_cst (len, ext_len);
1263 
1264   if (len < ext_len)
1265     {
1266       --ext_len;
1267       TREE_INT_CST_ELT (nt, ext_len)
1268 	= zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1269       for (unsigned int i = len; i < ext_len; ++i)
1270 	TREE_INT_CST_ELT (nt, i) = -1;
1271     }
1272   else if (TYPE_UNSIGNED (type)
1273 	   && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1274     {
1275       len--;
1276       TREE_INT_CST_ELT (nt, len)
1277 	= zext_hwi (cst.elt (len),
1278 		    cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1279     }
1280 
1281   for (unsigned int i = 0; i < len; i++)
1282     TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1283   TREE_TYPE (nt) = type;
1284   return nt;
1285 }
1286 
1287 /* Create an INT_CST node with a LOW value sign extended to TYPE.  */
1288 
1289 tree
1290 build_int_cst (tree type, HOST_WIDE_INT low)
1291 {
1292   /* Support legacy code.  */
1293   if (!type)
1294     type = integer_type_node;
1295 
1296   return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1297 }
1298 
1299 tree
1300 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1301 {
1302   return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1303 }
1304 
1305 /* Create an INT_CST node with a LOW value sign extended to TYPE.  */
1306 
1307 tree
1308 build_int_cst_type (tree type, HOST_WIDE_INT low)
1309 {
1310   gcc_assert (type);
1311   return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1312 }
1313 
1314 /* Constructs tree in type TYPE from with value given by CST.  Signedness
1315    of CST is assumed to be the same as the signedness of TYPE.  */
1316 
1317 tree
1318 double_int_to_tree (tree type, double_int cst)
1319 {
1320   return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1321 }
1322 
1323 /* We force the wide_int CST to the range of the type TYPE by sign or
1324    zero extending it.  OVERFLOWABLE indicates if we are interested in
1325    overflow of the value, when >0 we are only interested in signed
1326    overflow, for <0 we are interested in any overflow.  OVERFLOWED
1327    indicates whether overflow has already occurred.  CONST_OVERFLOWED
1328    indicates whether constant overflow has already occurred.  We force
1329    T's value to be within range of T's type (by setting to 0 or 1 all
1330    the bits outside the type's range).  We set TREE_OVERFLOWED if,
1331         OVERFLOWED is nonzero,
1332         or OVERFLOWABLE is >0 and signed overflow occurs
1333         or OVERFLOWABLE is <0 and any overflow occurs
1334    We return a new tree node for the extended wide_int.  The node
1335    is shared if no overflow flags are set.  */
1336 
1337 
1338 tree
1339 force_fit_type (tree type, const wide_int_ref &cst,
1340 		int overflowable, bool overflowed)
1341 {
1342   signop sign = TYPE_SIGN (type);
1343 
1344   /* If we need to set overflow flags, return a new unshared node.  */
1345   if (overflowed || !wi::fits_to_tree_p (cst, type))
1346     {
1347       if (overflowed
1348 	  || overflowable < 0
1349 	  || (overflowable > 0 && sign == SIGNED))
1350 	{
1351 	  wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1352 	  tree t = build_new_int_cst (type, tmp);
1353 	  TREE_OVERFLOW (t) = 1;
1354 	  return t;
1355 	}
1356     }
1357 
1358   /* Else build a shared node.  */
1359   return wide_int_to_tree (type, cst);
1360 }
1361 
1362 /* These are the hash table functions for the hash table of INTEGER_CST
1363    nodes of a sizetype.  */
1364 
1365 /* Return the hash code X, an INTEGER_CST.  */
1366 
1367 hashval_t
1368 int_cst_hasher::hash (tree x)
1369 {
1370   const_tree const t = x;
1371   hashval_t code = TYPE_UID (TREE_TYPE (t));
1372   int i;
1373 
1374   for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1375     code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1376 
1377   return code;
1378 }
1379 
1380 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1381    is the same as that given by *Y, which is the same.  */
1382 
1383 bool
1384 int_cst_hasher::equal (tree x, tree y)
1385 {
1386   const_tree const xt = x;
1387   const_tree const yt = y;
1388 
1389   if (TREE_TYPE (xt) != TREE_TYPE (yt)
1390       || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1391       || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1392     return false;
1393 
1394   for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1395     if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1396       return false;
1397 
1398   return true;
1399 }
1400 
1401 /* Create an INT_CST node of TYPE and value CST.
1402    The returned node is always shared.  For small integers we use a
1403    per-type vector cache, for larger ones we use a single hash table.
1404    The value is extended from its precision according to the sign of
1405    the type to be a multiple of HOST_BITS_PER_WIDE_INT.  This defines
1406    the upper bits and ensures that hashing and value equality based
1407    upon the underlying HOST_WIDE_INTs works without masking.  */
1408 
1409 tree
1410 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1411 {
1412   tree t;
1413   int ix = -1;
1414   int limit = 0;
1415 
1416   gcc_assert (type);
1417   unsigned int prec = TYPE_PRECISION (type);
1418   signop sgn = TYPE_SIGN (type);
1419 
1420   /* Verify that everything is canonical.  */
1421   int l = pcst.get_len ();
1422   if (l > 1)
1423     {
1424       if (pcst.elt (l - 1) == 0)
1425 	gcc_checking_assert (pcst.elt (l - 2) < 0);
1426       if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1427 	gcc_checking_assert (pcst.elt (l - 2) >= 0);
1428     }
1429 
1430   wide_int cst = wide_int::from (pcst, prec, sgn);
1431   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1432 
1433   if (ext_len == 1)
1434     {
1435       /* We just need to store a single HOST_WIDE_INT.  */
1436       HOST_WIDE_INT hwi;
1437       if (TYPE_UNSIGNED (type))
1438 	hwi = cst.to_uhwi ();
1439       else
1440 	hwi = cst.to_shwi ();
1441 
1442       switch (TREE_CODE (type))
1443 	{
1444 	case NULLPTR_TYPE:
1445 	  gcc_assert (hwi == 0);
1446 	  /* Fallthru.  */
1447 
1448 	case POINTER_TYPE:
1449 	case REFERENCE_TYPE:
1450 	case POINTER_BOUNDS_TYPE:
1451 	  /* Cache NULL pointer and zero bounds.  */
1452 	  if (hwi == 0)
1453 	    {
1454 	      limit = 1;
1455 	      ix = 0;
1456 	    }
1457 	  break;
1458 
1459 	case BOOLEAN_TYPE:
1460 	  /* Cache false or true.  */
1461 	  limit = 2;
1462 	  if (IN_RANGE (hwi, 0, 1))
1463 	    ix = hwi;
1464 	  break;
1465 
1466 	case INTEGER_TYPE:
1467 	case OFFSET_TYPE:
1468 	  if (TYPE_SIGN (type) == UNSIGNED)
1469 	    {
1470 	      /* Cache [0, N).  */
1471 	      limit = INTEGER_SHARE_LIMIT;
1472 	      if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1473 		ix = hwi;
1474 	    }
1475 	  else
1476 	    {
1477 	      /* Cache [-1, N).  */
1478 	      limit = INTEGER_SHARE_LIMIT + 1;
1479 	      if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1480 		ix = hwi + 1;
1481 	    }
1482 	  break;
1483 
1484 	case ENUMERAL_TYPE:
1485 	  break;
1486 
1487 	default:
1488 	  gcc_unreachable ();
1489 	}
1490 
1491       if (ix >= 0)
1492 	{
1493 	  /* Look for it in the type's vector of small shared ints.  */
1494 	  if (!TYPE_CACHED_VALUES_P (type))
1495 	    {
1496 	      TYPE_CACHED_VALUES_P (type) = 1;
1497 	      TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1498 	    }
1499 
1500 	  t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1501 	  if (t)
1502 	    /* Make sure no one is clobbering the shared constant.  */
1503 	    gcc_checking_assert (TREE_TYPE (t) == type
1504 				 && TREE_INT_CST_NUNITS (t) == 1
1505 				 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1506 				 && TREE_INT_CST_EXT_NUNITS (t) == 1
1507 				 && TREE_INT_CST_ELT (t, 0) == hwi);
1508 	  else
1509 	    {
1510 	      /* Create a new shared int.  */
1511 	      t = build_new_int_cst (type, cst);
1512 	      TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1513 	    }
1514 	}
1515       else
1516 	{
1517 	  /* Use the cache of larger shared ints, using int_cst_node as
1518 	     a temporary.  */
1519 
1520 	  TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1521 	  TREE_TYPE (int_cst_node) = type;
1522 
1523 	  tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1524 	  t = *slot;
1525 	  if (!t)
1526 	    {
1527 	      /* Insert this one into the hash table.  */
1528 	      t = int_cst_node;
1529 	      *slot = t;
1530 	      /* Make a new node for next time round.  */
1531 	      int_cst_node = make_int_cst (1, 1);
1532 	    }
1533 	}
1534     }
1535   else
1536     {
1537       /* The value either hashes properly or we drop it on the floor
1538 	 for the gc to take care of.  There will not be enough of them
1539 	 to worry about.  */
1540 
1541       tree nt = build_new_int_cst (type, cst);
1542       tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1543       t = *slot;
1544       if (!t)
1545 	{
1546 	  /* Insert this one into the hash table.  */
1547 	  t = nt;
1548 	  *slot = t;
1549 	}
1550     }
1551 
1552   return t;
1553 }
1554 
1555 void
1556 cache_integer_cst (tree t)
1557 {
1558   tree type = TREE_TYPE (t);
1559   int ix = -1;
1560   int limit = 0;
1561   int prec = TYPE_PRECISION (type);
1562 
1563   gcc_assert (!TREE_OVERFLOW (t));
1564 
1565   switch (TREE_CODE (type))
1566     {
1567     case NULLPTR_TYPE:
1568       gcc_assert (integer_zerop (t));
1569       /* Fallthru.  */
1570 
1571     case POINTER_TYPE:
1572     case REFERENCE_TYPE:
1573       /* Cache NULL pointer.  */
1574       if (integer_zerop (t))
1575 	{
1576 	  limit = 1;
1577 	  ix = 0;
1578 	}
1579       break;
1580 
1581     case BOOLEAN_TYPE:
1582       /* Cache false or true.  */
1583       limit = 2;
1584       if (wi::ltu_p (t, 2))
1585 	ix = TREE_INT_CST_ELT (t, 0);
1586       break;
1587 
1588     case INTEGER_TYPE:
1589     case OFFSET_TYPE:
1590       if (TYPE_UNSIGNED (type))
1591 	{
1592 	  /* Cache 0..N */
1593 	  limit = INTEGER_SHARE_LIMIT;
1594 
1595 	  /* This is a little hokie, but if the prec is smaller than
1596 	     what is necessary to hold INTEGER_SHARE_LIMIT, then the
1597 	     obvious test will not get the correct answer.  */
1598 	  if (prec < HOST_BITS_PER_WIDE_INT)
1599 	    {
1600 	      if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1601 		ix = tree_to_uhwi (t);
1602 	    }
1603 	  else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1604 	    ix = tree_to_uhwi (t);
1605 	}
1606       else
1607 	{
1608 	  /* Cache -1..N */
1609 	  limit = INTEGER_SHARE_LIMIT + 1;
1610 
1611 	  if (integer_minus_onep (t))
1612 	    ix = 0;
1613 	  else if (!wi::neg_p (t))
1614 	    {
1615 	      if (prec < HOST_BITS_PER_WIDE_INT)
1616 		{
1617 		  if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1618 		    ix = tree_to_shwi (t) + 1;
1619 		}
1620 	      else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1621 		ix = tree_to_shwi (t) + 1;
1622 	    }
1623 	}
1624       break;
1625 
1626     case ENUMERAL_TYPE:
1627       break;
1628 
1629     default:
1630       gcc_unreachable ();
1631     }
1632 
1633   if (ix >= 0)
1634     {
1635       /* Look for it in the type's vector of small shared ints.  */
1636       if (!TYPE_CACHED_VALUES_P (type))
1637 	{
1638 	  TYPE_CACHED_VALUES_P (type) = 1;
1639 	  TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1640 	}
1641 
1642       gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1643       TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1644     }
1645   else
1646     {
1647       /* Use the cache of larger shared ints.  */
1648       tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1649       /* If there is already an entry for the number verify it's the
1650          same.  */
1651       if (*slot)
1652 	gcc_assert (wi::eq_p (tree (*slot), t));
1653       else
1654 	/* Otherwise insert this one into the hash table.  */
1655 	*slot = t;
1656     }
1657 }
1658 
1659 
1660 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1661    and the rest are zeros.  */
1662 
1663 tree
1664 build_low_bits_mask (tree type, unsigned bits)
1665 {
1666   gcc_assert (bits <= TYPE_PRECISION (type));
1667 
1668   return wide_int_to_tree (type, wi::mask (bits, false,
1669 					   TYPE_PRECISION (type)));
1670 }
1671 
1672 /* Checks that X is integer constant that can be expressed in (unsigned)
1673    HOST_WIDE_INT without loss of precision.  */
1674 
1675 bool
1676 cst_and_fits_in_hwi (const_tree x)
1677 {
1678   return (TREE_CODE (x) == INTEGER_CST
1679 	  && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1680 }
1681 
1682 /* Build a newly constructed VECTOR_CST node of length LEN.  */
1683 
1684 tree
1685 make_vector_stat (unsigned len MEM_STAT_DECL)
1686 {
1687   tree t;
1688   unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1689 
1690   record_node_allocation_statistics (VECTOR_CST, length);
1691 
1692   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1693 
1694   TREE_SET_CODE (t, VECTOR_CST);
1695   TREE_CONSTANT (t) = 1;
1696 
1697   return t;
1698 }
1699 
1700 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1701    are in a list pointed to by VALS.  */
1702 
1703 tree
1704 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1705 {
1706   int over = 0;
1707   unsigned cnt = 0;
1708   tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1709   TREE_TYPE (v) = type;
1710 
1711   /* Iterate through elements and check for overflow.  */
1712   for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1713     {
1714       tree value = vals[cnt];
1715 
1716       VECTOR_CST_ELT (v, cnt) = value;
1717 
1718       /* Don't crash if we get an address constant.  */
1719       if (!CONSTANT_CLASS_P (value))
1720 	continue;
1721 
1722       over |= TREE_OVERFLOW (value);
1723     }
1724 
1725   TREE_OVERFLOW (v) = over;
1726   return v;
1727 }
1728 
1729 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1730    are extracted from V, a vector of CONSTRUCTOR_ELT.  */
1731 
1732 tree
1733 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1734 {
1735   tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1736   unsigned HOST_WIDE_INT idx, pos = 0;
1737   tree value;
1738 
1739   FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1740     {
1741       if (TREE_CODE (value) == VECTOR_CST)
1742 	for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1743 	  vec[pos++] = VECTOR_CST_ELT (value, i);
1744       else
1745 	vec[pos++] = value;
1746     }
1747   while (pos < TYPE_VECTOR_SUBPARTS (type))
1748     vec[pos++] = build_zero_cst (TREE_TYPE (type));
1749 
1750   return build_vector (type, vec);
1751 }
1752 
1753 /* Build a vector of type VECTYPE where all the elements are SCs.  */
1754 tree
1755 build_vector_from_val (tree vectype, tree sc)
1756 {
1757   int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1758 
1759   if (sc == error_mark_node)
1760     return sc;
1761 
1762   /* Verify that the vector type is suitable for SC.  Note that there
1763      is some inconsistency in the type-system with respect to restrict
1764      qualifications of pointers.  Vector types always have a main-variant
1765      element type and the qualification is applied to the vector-type.
1766      So TREE_TYPE (vector-type) does not return a properly qualified
1767      vector element-type.  */
1768   gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1769 					   TREE_TYPE (vectype)));
1770 
1771   if (CONSTANT_CLASS_P (sc))
1772     {
1773       tree *v = XALLOCAVEC (tree, nunits);
1774       for (i = 0; i < nunits; ++i)
1775 	v[i] = sc;
1776       return build_vector (vectype, v);
1777     }
1778   else
1779     {
1780       vec<constructor_elt, va_gc> *v;
1781       vec_alloc (v, nunits);
1782       for (i = 0; i < nunits; ++i)
1783 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1784       return build_constructor (vectype, v);
1785     }
1786 }
1787 
1788 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1789    calculate TREE_CONSTANT and TREE_SIDE_EFFECTS.  */
1790 
1791 void
1792 recompute_constructor_flags (tree c)
1793 {
1794   unsigned int i;
1795   tree val;
1796   bool constant_p = true;
1797   bool side_effects_p = false;
1798   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1799 
1800   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1801     {
1802       /* Mostly ctors will have elts that don't have side-effects, so
1803 	 the usual case is to scan all the elements.  Hence a single
1804 	 loop for both const and side effects, rather than one loop
1805 	 each (with early outs).  */
1806       if (!TREE_CONSTANT (val))
1807 	constant_p = false;
1808       if (TREE_SIDE_EFFECTS (val))
1809 	side_effects_p = true;
1810     }
1811 
1812   TREE_SIDE_EFFECTS (c) = side_effects_p;
1813   TREE_CONSTANT (c) = constant_p;
1814 }
1815 
1816 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1817    CONSTRUCTOR C.  */
1818 
1819 void
1820 verify_constructor_flags (tree c)
1821 {
1822   unsigned int i;
1823   tree val;
1824   bool constant_p = TREE_CONSTANT (c);
1825   bool side_effects_p = TREE_SIDE_EFFECTS (c);
1826   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1827 
1828   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1829     {
1830       if (constant_p && !TREE_CONSTANT (val))
1831 	internal_error ("non-constant element in constant CONSTRUCTOR");
1832       if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1833 	internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1834     }
1835 }
1836 
1837 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1838    are in the vec pointed to by VALS.  */
1839 tree
1840 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1841 {
1842   tree c = make_node (CONSTRUCTOR);
1843 
1844   TREE_TYPE (c) = type;
1845   CONSTRUCTOR_ELTS (c) = vals;
1846 
1847   recompute_constructor_flags (c);
1848 
1849   return c;
1850 }
1851 
1852 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1853    INDEX and VALUE.  */
1854 tree
1855 build_constructor_single (tree type, tree index, tree value)
1856 {
1857   vec<constructor_elt, va_gc> *v;
1858   constructor_elt elt = {index, value};
1859 
1860   vec_alloc (v, 1);
1861   v->quick_push (elt);
1862 
1863   return build_constructor (type, v);
1864 }
1865 
1866 
1867 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1868    are in a list pointed to by VALS.  */
1869 tree
1870 build_constructor_from_list (tree type, tree vals)
1871 {
1872   tree t;
1873   vec<constructor_elt, va_gc> *v = NULL;
1874 
1875   if (vals)
1876     {
1877       vec_alloc (v, list_length (vals));
1878       for (t = vals; t; t = TREE_CHAIN (t))
1879 	CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1880     }
1881 
1882   return build_constructor (type, v);
1883 }
1884 
1885 /* Return a new CONSTRUCTOR node whose type is TYPE.  NELTS is the number
1886    of elements, provided as index/value pairs.  */
1887 
1888 tree
1889 build_constructor_va (tree type, int nelts, ...)
1890 {
1891   vec<constructor_elt, va_gc> *v = NULL;
1892   va_list p;
1893 
1894   va_start (p, nelts);
1895   vec_alloc (v, nelts);
1896   while (nelts--)
1897     {
1898       tree index = va_arg (p, tree);
1899       tree value = va_arg (p, tree);
1900       CONSTRUCTOR_APPEND_ELT (v, index, value);
1901     }
1902   va_end (p);
1903   return build_constructor (type, v);
1904 }
1905 
1906 /* Return a new FIXED_CST node whose type is TYPE and value is F.  */
1907 
1908 tree
1909 build_fixed (tree type, FIXED_VALUE_TYPE f)
1910 {
1911   tree v;
1912   FIXED_VALUE_TYPE *fp;
1913 
1914   v = make_node (FIXED_CST);
1915   fp = ggc_alloc<fixed_value> ();
1916   memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1917 
1918   TREE_TYPE (v) = type;
1919   TREE_FIXED_CST_PTR (v) = fp;
1920   return v;
1921 }
1922 
1923 /* Return a new REAL_CST node whose type is TYPE and value is D.  */
1924 
1925 tree
1926 build_real (tree type, REAL_VALUE_TYPE d)
1927 {
1928   tree v;
1929   REAL_VALUE_TYPE *dp;
1930   int overflow = 0;
1931 
1932   /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1933      Consider doing it via real_convert now.  */
1934 
1935   v = make_node (REAL_CST);
1936   dp = ggc_alloc<real_value> ();
1937   memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1938 
1939   TREE_TYPE (v) = type;
1940   TREE_REAL_CST_PTR (v) = dp;
1941   TREE_OVERFLOW (v) = overflow;
1942   return v;
1943 }
1944 
1945 /* Like build_real, but first truncate D to the type.  */
1946 
1947 tree
1948 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1949 {
1950   return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1951 }
1952 
1953 /* Return a new REAL_CST node whose type is TYPE
1954    and whose value is the integer value of the INTEGER_CST node I.  */
1955 
1956 REAL_VALUE_TYPE
1957 real_value_from_int_cst (const_tree type, const_tree i)
1958 {
1959   REAL_VALUE_TYPE d;
1960 
1961   /* Clear all bits of the real value type so that we can later do
1962      bitwise comparisons to see if two values are the same.  */
1963   memset (&d, 0, sizeof d);
1964 
1965   real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1966 		     TYPE_SIGN (TREE_TYPE (i)));
1967   return d;
1968 }
1969 
1970 /* Given a tree representing an integer constant I, return a tree
1971    representing the same value as a floating-point constant of type TYPE.  */
1972 
1973 tree
1974 build_real_from_int_cst (tree type, const_tree i)
1975 {
1976   tree v;
1977   int overflow = TREE_OVERFLOW (i);
1978 
1979   v = build_real (type, real_value_from_int_cst (type, i));
1980 
1981   TREE_OVERFLOW (v) |= overflow;
1982   return v;
1983 }
1984 
1985 /* Return a newly constructed STRING_CST node whose value is
1986    the LEN characters at STR.
1987    Note that for a C string literal, LEN should include the trailing NUL.
1988    The TREE_TYPE is not initialized.  */
1989 
1990 tree
1991 build_string (int len, const char *str)
1992 {
1993   tree s;
1994   size_t length;
1995 
1996   /* Do not waste bytes provided by padding of struct tree_string.  */
1997   length = len + offsetof (struct tree_string, str) + 1;
1998 
1999   record_node_allocation_statistics (STRING_CST, length);
2000 
2001   s = (tree) ggc_internal_alloc (length);
2002 
2003   memset (s, 0, sizeof (struct tree_typed));
2004   TREE_SET_CODE (s, STRING_CST);
2005   TREE_CONSTANT (s) = 1;
2006   TREE_STRING_LENGTH (s) = len;
2007   memcpy (s->string.str, str, len);
2008   s->string.str[len] = '\0';
2009 
2010   return s;
2011 }
2012 
2013 /* Return a newly constructed COMPLEX_CST node whose value is
2014    specified by the real and imaginary parts REAL and IMAG.
2015    Both REAL and IMAG should be constant nodes.  TYPE, if specified,
2016    will be the type of the COMPLEX_CST; otherwise a new type will be made.  */
2017 
2018 tree
2019 build_complex (tree type, tree real, tree imag)
2020 {
2021   tree t = make_node (COMPLEX_CST);
2022 
2023   TREE_REALPART (t) = real;
2024   TREE_IMAGPART (t) = imag;
2025   TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2026   TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2027   return t;
2028 }
2029 
2030 /* Build a complex (inf +- 0i), such as for the result of cproj.
2031    TYPE is the complex tree type of the result.  If NEG is true, the
2032    imaginary zero is negative.  */
2033 
2034 tree
2035 build_complex_inf (tree type, bool neg)
2036 {
2037   REAL_VALUE_TYPE rinf, rzero = dconst0;
2038 
2039   real_inf (&rinf);
2040   rzero.sign = neg;
2041   return build_complex (type, build_real (TREE_TYPE (type), rinf),
2042 			build_real (TREE_TYPE (type), rzero));
2043 }
2044 
2045 /* Return the constant 1 in type TYPE.  If TYPE has several elements, each
2046    element is set to 1.  In particular, this is 1 + i for complex types.  */
2047 
2048 tree
2049 build_each_one_cst (tree type)
2050 {
2051   if (TREE_CODE (type) == COMPLEX_TYPE)
2052     {
2053       tree scalar = build_one_cst (TREE_TYPE (type));
2054       return build_complex (type, scalar, scalar);
2055     }
2056   else
2057     return build_one_cst (type);
2058 }
2059 
2060 /* Return a constant of arithmetic type TYPE which is the
2061    multiplicative identity of the set TYPE.  */
2062 
2063 tree
2064 build_one_cst (tree type)
2065 {
2066   switch (TREE_CODE (type))
2067     {
2068     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2069     case POINTER_TYPE: case REFERENCE_TYPE:
2070     case OFFSET_TYPE:
2071       return build_int_cst (type, 1);
2072 
2073     case REAL_TYPE:
2074       return build_real (type, dconst1);
2075 
2076     case FIXED_POINT_TYPE:
2077       /* We can only generate 1 for accum types.  */
2078       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2079       return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2080 
2081     case VECTOR_TYPE:
2082       {
2083 	tree scalar = build_one_cst (TREE_TYPE (type));
2084 
2085 	return build_vector_from_val (type, scalar);
2086       }
2087 
2088     case COMPLEX_TYPE:
2089       return build_complex (type,
2090 			    build_one_cst (TREE_TYPE (type)),
2091 			    build_zero_cst (TREE_TYPE (type)));
2092 
2093     default:
2094       gcc_unreachable ();
2095     }
2096 }
2097 
2098 /* Return an integer of type TYPE containing all 1's in as much precision as
2099    it contains, or a complex or vector whose subparts are such integers.  */
2100 
2101 tree
2102 build_all_ones_cst (tree type)
2103 {
2104   if (TREE_CODE (type) == COMPLEX_TYPE)
2105     {
2106       tree scalar = build_all_ones_cst (TREE_TYPE (type));
2107       return build_complex (type, scalar, scalar);
2108     }
2109   else
2110     return build_minus_one_cst (type);
2111 }
2112 
2113 /* Return a constant of arithmetic type TYPE which is the
2114    opposite of the multiplicative identity of the set TYPE.  */
2115 
2116 tree
2117 build_minus_one_cst (tree type)
2118 {
2119   switch (TREE_CODE (type))
2120     {
2121     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2122     case POINTER_TYPE: case REFERENCE_TYPE:
2123     case OFFSET_TYPE:
2124       return build_int_cst (type, -1);
2125 
2126     case REAL_TYPE:
2127       return build_real (type, dconstm1);
2128 
2129     case FIXED_POINT_TYPE:
2130       /* We can only generate 1 for accum types.  */
2131       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2132       return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2133 						       TYPE_MODE (type)));
2134 
2135     case VECTOR_TYPE:
2136       {
2137 	tree scalar = build_minus_one_cst (TREE_TYPE (type));
2138 
2139 	return build_vector_from_val (type, scalar);
2140       }
2141 
2142     case COMPLEX_TYPE:
2143       return build_complex (type,
2144 			    build_minus_one_cst (TREE_TYPE (type)),
2145 			    build_zero_cst (TREE_TYPE (type)));
2146 
2147     default:
2148       gcc_unreachable ();
2149     }
2150 }
2151 
2152 /* Build 0 constant of type TYPE.  This is used by constructor folding
2153    and thus the constant should be represented in memory by
2154    zero(es).  */
2155 
2156 tree
2157 build_zero_cst (tree type)
2158 {
2159   switch (TREE_CODE (type))
2160     {
2161     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2162     case POINTER_TYPE: case REFERENCE_TYPE:
2163     case OFFSET_TYPE: case NULLPTR_TYPE:
2164       return build_int_cst (type, 0);
2165 
2166     case REAL_TYPE:
2167       return build_real (type, dconst0);
2168 
2169     case FIXED_POINT_TYPE:
2170       return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2171 
2172     case VECTOR_TYPE:
2173       {
2174 	tree scalar = build_zero_cst (TREE_TYPE (type));
2175 
2176 	return build_vector_from_val (type, scalar);
2177       }
2178 
2179     case COMPLEX_TYPE:
2180       {
2181 	tree zero = build_zero_cst (TREE_TYPE (type));
2182 
2183 	return build_complex (type, zero, zero);
2184       }
2185 
2186     default:
2187       if (!AGGREGATE_TYPE_P (type))
2188 	return fold_convert (type, integer_zero_node);
2189       return build_constructor (type, NULL);
2190     }
2191 }
2192 
2193 
2194 /* Build a BINFO with LEN language slots.  */
2195 
2196 tree
2197 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2198 {
2199   tree t;
2200   size_t length = (offsetof (struct tree_binfo, base_binfos)
2201 		   + vec<tree, va_gc>::embedded_size (base_binfos));
2202 
2203   record_node_allocation_statistics (TREE_BINFO, length);
2204 
2205   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2206 
2207   memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2208 
2209   TREE_SET_CODE (t, TREE_BINFO);
2210 
2211   BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2212 
2213   return t;
2214 }
2215 
2216 /* Create a CASE_LABEL_EXPR tree node and return it.  */
2217 
2218 tree
2219 build_case_label (tree low_value, tree high_value, tree label_decl)
2220 {
2221   tree t = make_node (CASE_LABEL_EXPR);
2222 
2223   TREE_TYPE (t) = void_type_node;
2224   SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2225 
2226   CASE_LOW (t) = low_value;
2227   CASE_HIGH (t) = high_value;
2228   CASE_LABEL (t) = label_decl;
2229   CASE_CHAIN (t) = NULL_TREE;
2230 
2231   return t;
2232 }
2233 
2234 /* Build a newly constructed INTEGER_CST node.  LEN and EXT_LEN are the
2235    values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2236    The latter determines the length of the HOST_WIDE_INT vector.  */
2237 
2238 tree
2239 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2240 {
2241   tree t;
2242   int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2243 		+ sizeof (struct tree_int_cst));
2244 
2245   gcc_assert (len);
2246   record_node_allocation_statistics (INTEGER_CST, length);
2247 
2248   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2249 
2250   TREE_SET_CODE (t, INTEGER_CST);
2251   TREE_INT_CST_NUNITS (t) = len;
2252   TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2253   /* to_offset can only be applied to trees that are offset_int-sized
2254      or smaller.  EXT_LEN is correct if it fits, otherwise the constant
2255      must be exactly the precision of offset_int and so LEN is correct.  */
2256   if (ext_len <= OFFSET_INT_ELTS)
2257     TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2258   else
2259     TREE_INT_CST_OFFSET_NUNITS (t) = len;
2260 
2261   TREE_CONSTANT (t) = 1;
2262 
2263   return t;
2264 }
2265 
2266 /* Build a newly constructed TREE_VEC node of length LEN.  */
2267 
2268 tree
2269 make_tree_vec_stat (int len MEM_STAT_DECL)
2270 {
2271   tree t;
2272   int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2273 
2274   record_node_allocation_statistics (TREE_VEC, length);
2275 
2276   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2277 
2278   TREE_SET_CODE (t, TREE_VEC);
2279   TREE_VEC_LENGTH (t) = len;
2280 
2281   return t;
2282 }
2283 
2284 /* Grow a TREE_VEC node to new length LEN.  */
2285 
2286 tree
2287 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2288 {
2289   gcc_assert (TREE_CODE (v) == TREE_VEC);
2290 
2291   int oldlen = TREE_VEC_LENGTH (v);
2292   gcc_assert (len > oldlen);
2293 
2294   int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2295   int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2296 
2297   record_node_allocation_statistics (TREE_VEC, length - oldlength);
2298 
2299   v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2300 
2301   TREE_VEC_LENGTH (v) = len;
2302 
2303   return v;
2304 }
2305 
2306 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2307    fixed, and scalar, complex or vector.  */
2308 
2309 int
2310 zerop (const_tree expr)
2311 {
2312   return (integer_zerop (expr)
2313 	  || real_zerop (expr)
2314 	  || fixed_zerop (expr));
2315 }
2316 
2317 /* Return 1 if EXPR is the integer constant zero or a complex constant
2318    of zero.  */
2319 
2320 int
2321 integer_zerop (const_tree expr)
2322 {
2323   switch (TREE_CODE (expr))
2324     {
2325     case INTEGER_CST:
2326       return wi::eq_p (expr, 0);
2327     case COMPLEX_CST:
2328       return (integer_zerop (TREE_REALPART (expr))
2329 	      && integer_zerop (TREE_IMAGPART (expr)));
2330     case VECTOR_CST:
2331       {
2332 	unsigned i;
2333 	for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2334 	  if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2335 	    return false;
2336 	return true;
2337       }
2338     default:
2339       return false;
2340     }
2341 }
2342 
2343 /* Return 1 if EXPR is the integer constant one or the corresponding
2344    complex constant.  */
2345 
2346 int
2347 integer_onep (const_tree expr)
2348 {
2349   switch (TREE_CODE (expr))
2350     {
2351     case INTEGER_CST:
2352       return wi::eq_p (wi::to_widest (expr), 1);
2353     case COMPLEX_CST:
2354       return (integer_onep (TREE_REALPART (expr))
2355 	      && integer_zerop (TREE_IMAGPART (expr)));
2356     case VECTOR_CST:
2357       {
2358 	unsigned i;
2359 	for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2360 	  if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2361 	    return false;
2362 	return true;
2363       }
2364     default:
2365       return false;
2366     }
2367 }
2368 
2369 /* Return 1 if EXPR is the integer constant one.  For complex and vector,
2370    return 1 if every piece is the integer constant one.  */
2371 
2372 int
2373 integer_each_onep (const_tree expr)
2374 {
2375   if (TREE_CODE (expr) == COMPLEX_CST)
2376     return (integer_onep (TREE_REALPART (expr))
2377 	    && integer_onep (TREE_IMAGPART (expr)));
2378   else
2379     return integer_onep (expr);
2380 }
2381 
2382 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2383    it contains, or a complex or vector whose subparts are such integers.  */
2384 
2385 int
2386 integer_all_onesp (const_tree expr)
2387 {
2388   if (TREE_CODE (expr) == COMPLEX_CST
2389       && integer_all_onesp (TREE_REALPART (expr))
2390       && integer_all_onesp (TREE_IMAGPART (expr)))
2391     return 1;
2392 
2393   else if (TREE_CODE (expr) == VECTOR_CST)
2394     {
2395       unsigned i;
2396       for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2397 	if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2398 	  return 0;
2399       return 1;
2400     }
2401 
2402   else if (TREE_CODE (expr) != INTEGER_CST)
2403     return 0;
2404 
2405   return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2406 }
2407 
2408 /* Return 1 if EXPR is the integer constant minus one.  */
2409 
2410 int
2411 integer_minus_onep (const_tree expr)
2412 {
2413   if (TREE_CODE (expr) == COMPLEX_CST)
2414     return (integer_all_onesp (TREE_REALPART (expr))
2415 	    && integer_zerop (TREE_IMAGPART (expr)));
2416   else
2417     return integer_all_onesp (expr);
2418 }
2419 
2420 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2421    one bit on).  */
2422 
2423 int
2424 integer_pow2p (const_tree expr)
2425 {
2426   if (TREE_CODE (expr) == COMPLEX_CST
2427       && integer_pow2p (TREE_REALPART (expr))
2428       && integer_zerop (TREE_IMAGPART (expr)))
2429     return 1;
2430 
2431   if (TREE_CODE (expr) != INTEGER_CST)
2432     return 0;
2433 
2434   return wi::popcount (expr) == 1;
2435 }
2436 
2437 /* Return 1 if EXPR is an integer constant other than zero or a
2438    complex constant other than zero.  */
2439 
2440 int
2441 integer_nonzerop (const_tree expr)
2442 {
2443   return ((TREE_CODE (expr) == INTEGER_CST
2444 	   && !wi::eq_p (expr, 0))
2445 	  || (TREE_CODE (expr) == COMPLEX_CST
2446 	      && (integer_nonzerop (TREE_REALPART (expr))
2447 		  || integer_nonzerop (TREE_IMAGPART (expr)))));
2448 }
2449 
2450 /* Return 1 if EXPR is the integer constant one.  For vector,
2451    return 1 if every piece is the integer constant minus one
2452    (representing the value TRUE).  */
2453 
2454 int
2455 integer_truep (const_tree expr)
2456 {
2457   if (TREE_CODE (expr) == VECTOR_CST)
2458     return integer_all_onesp (expr);
2459   return integer_onep (expr);
2460 }
2461 
2462 /* Return 1 if EXPR is the fixed-point constant zero.  */
2463 
2464 int
2465 fixed_zerop (const_tree expr)
2466 {
2467   return (TREE_CODE (expr) == FIXED_CST
2468 	  && TREE_FIXED_CST (expr).data.is_zero ());
2469 }
2470 
2471 /* Return the power of two represented by a tree node known to be a
2472    power of two.  */
2473 
2474 int
2475 tree_log2 (const_tree expr)
2476 {
2477   if (TREE_CODE (expr) == COMPLEX_CST)
2478     return tree_log2 (TREE_REALPART (expr));
2479 
2480   return wi::exact_log2 (expr);
2481 }
2482 
2483 /* Similar, but return the largest integer Y such that 2 ** Y is less
2484    than or equal to EXPR.  */
2485 
2486 int
2487 tree_floor_log2 (const_tree expr)
2488 {
2489   if (TREE_CODE (expr) == COMPLEX_CST)
2490     return tree_log2 (TREE_REALPART (expr));
2491 
2492   return wi::floor_log2 (expr);
2493 }
2494 
2495 /* Return number of known trailing zero bits in EXPR, or, if the value of
2496    EXPR is known to be zero, the precision of it's type.  */
2497 
2498 unsigned int
2499 tree_ctz (const_tree expr)
2500 {
2501   if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2502       && !POINTER_TYPE_P (TREE_TYPE (expr)))
2503     return 0;
2504 
2505   unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2506   switch (TREE_CODE (expr))
2507     {
2508     case INTEGER_CST:
2509       ret1 = wi::ctz (expr);
2510       return MIN (ret1, prec);
2511     case SSA_NAME:
2512       ret1 = wi::ctz (get_nonzero_bits (expr));
2513       return MIN (ret1, prec);
2514     case PLUS_EXPR:
2515     case MINUS_EXPR:
2516     case BIT_IOR_EXPR:
2517     case BIT_XOR_EXPR:
2518     case MIN_EXPR:
2519     case MAX_EXPR:
2520       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2521       if (ret1 == 0)
2522 	return ret1;
2523       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2524       return MIN (ret1, ret2);
2525     case POINTER_PLUS_EXPR:
2526       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2527       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2528       /* Second operand is sizetype, which could be in theory
2529 	 wider than pointer's precision.  Make sure we never
2530 	 return more than prec.  */
2531       ret2 = MIN (ret2, prec);
2532       return MIN (ret1, ret2);
2533     case BIT_AND_EXPR:
2534       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2535       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2536       return MAX (ret1, ret2);
2537     case MULT_EXPR:
2538       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2539       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2540       return MIN (ret1 + ret2, prec);
2541     case LSHIFT_EXPR:
2542       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2543       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2544 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2545 	{
2546 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2547 	  return MIN (ret1 + ret2, prec);
2548 	}
2549       return ret1;
2550     case RSHIFT_EXPR:
2551       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2552 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2553 	{
2554 	  ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2555 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2556 	  if (ret1 > ret2)
2557 	    return ret1 - ret2;
2558 	}
2559       return 0;
2560     case TRUNC_DIV_EXPR:
2561     case CEIL_DIV_EXPR:
2562     case FLOOR_DIV_EXPR:
2563     case ROUND_DIV_EXPR:
2564     case EXACT_DIV_EXPR:
2565       if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2566 	  && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2567 	{
2568 	  int l = tree_log2 (TREE_OPERAND (expr, 1));
2569 	  if (l >= 0)
2570 	    {
2571 	      ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2572 	      ret2 = l;
2573 	      if (ret1 > ret2)
2574 		return ret1 - ret2;
2575 	    }
2576 	}
2577       return 0;
2578     CASE_CONVERT:
2579       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2580       if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2581 	ret1 = prec;
2582       return MIN (ret1, prec);
2583     case SAVE_EXPR:
2584       return tree_ctz (TREE_OPERAND (expr, 0));
2585     case COND_EXPR:
2586       ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2587       if (ret1 == 0)
2588 	return 0;
2589       ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2590       return MIN (ret1, ret2);
2591     case COMPOUND_EXPR:
2592       return tree_ctz (TREE_OPERAND (expr, 1));
2593     case ADDR_EXPR:
2594       ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2595       if (ret1 > BITS_PER_UNIT)
2596 	{
2597 	  ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2598 	  return MIN (ret1, prec);
2599 	}
2600       return 0;
2601     default:
2602       return 0;
2603     }
2604 }
2605 
2606 /* Return 1 if EXPR is the real constant zero.  Trailing zeroes matter for
2607    decimal float constants, so don't return 1 for them.  */
2608 
2609 int
2610 real_zerop (const_tree expr)
2611 {
2612   switch (TREE_CODE (expr))
2613     {
2614     case REAL_CST:
2615       return real_equal (&TREE_REAL_CST (expr), &dconst0)
2616 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2617     case COMPLEX_CST:
2618       return real_zerop (TREE_REALPART (expr))
2619 	     && real_zerop (TREE_IMAGPART (expr));
2620     case VECTOR_CST:
2621       {
2622 	unsigned i;
2623 	for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2624 	  if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2625 	    return false;
2626 	return true;
2627       }
2628     default:
2629       return false;
2630     }
2631 }
2632 
2633 /* Return 1 if EXPR is the real constant one in real or complex form.
2634    Trailing zeroes matter for decimal float constants, so don't return
2635    1 for them.  */
2636 
2637 int
2638 real_onep (const_tree expr)
2639 {
2640   switch (TREE_CODE (expr))
2641     {
2642     case REAL_CST:
2643       return real_equal (&TREE_REAL_CST (expr), &dconst1)
2644 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2645     case COMPLEX_CST:
2646       return real_onep (TREE_REALPART (expr))
2647 	     && real_zerop (TREE_IMAGPART (expr));
2648     case VECTOR_CST:
2649       {
2650 	unsigned i;
2651 	for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2652 	  if (!real_onep (VECTOR_CST_ELT (expr, i)))
2653 	    return false;
2654 	return true;
2655       }
2656     default:
2657       return false;
2658     }
2659 }
2660 
2661 /* Return 1 if EXPR is the real constant minus one.  Trailing zeroes
2662    matter for decimal float constants, so don't return 1 for them.  */
2663 
2664 int
2665 real_minus_onep (const_tree expr)
2666 {
2667   switch (TREE_CODE (expr))
2668     {
2669     case REAL_CST:
2670       return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2671 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2672     case COMPLEX_CST:
2673       return real_minus_onep (TREE_REALPART (expr))
2674 	     && real_zerop (TREE_IMAGPART (expr));
2675     case VECTOR_CST:
2676       {
2677 	unsigned i;
2678 	for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2679 	  if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2680 	    return false;
2681 	return true;
2682       }
2683     default:
2684       return false;
2685     }
2686 }
2687 
2688 /* Nonzero if EXP is a constant or a cast of a constant.  */
2689 
2690 int
2691 really_constant_p (const_tree exp)
2692 {
2693   /* This is not quite the same as STRIP_NOPS.  It does more.  */
2694   while (CONVERT_EXPR_P (exp)
2695 	 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2696     exp = TREE_OPERAND (exp, 0);
2697   return TREE_CONSTANT (exp);
2698 }
2699 
2700 /* Return first list element whose TREE_VALUE is ELEM.
2701    Return 0 if ELEM is not in LIST.  */
2702 
2703 tree
2704 value_member (tree elem, tree list)
2705 {
2706   while (list)
2707     {
2708       if (elem == TREE_VALUE (list))
2709 	return list;
2710       list = TREE_CHAIN (list);
2711     }
2712   return NULL_TREE;
2713 }
2714 
2715 /* Return first list element whose TREE_PURPOSE is ELEM.
2716    Return 0 if ELEM is not in LIST.  */
2717 
2718 tree
2719 purpose_member (const_tree elem, tree list)
2720 {
2721   while (list)
2722     {
2723       if (elem == TREE_PURPOSE (list))
2724 	return list;
2725       list = TREE_CHAIN (list);
2726     }
2727   return NULL_TREE;
2728 }
2729 
2730 /* Return true if ELEM is in V.  */
2731 
2732 bool
2733 vec_member (const_tree elem, vec<tree, va_gc> *v)
2734 {
2735   unsigned ix;
2736   tree t;
2737   FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2738     if (elem == t)
2739       return true;
2740   return false;
2741 }
2742 
2743 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2744    NULL_TREE.  */
2745 
2746 tree
2747 chain_index (int idx, tree chain)
2748 {
2749   for (; chain && idx > 0; --idx)
2750     chain = TREE_CHAIN (chain);
2751   return chain;
2752 }
2753 
2754 /* Return nonzero if ELEM is part of the chain CHAIN.  */
2755 
2756 int
2757 chain_member (const_tree elem, const_tree chain)
2758 {
2759   while (chain)
2760     {
2761       if (elem == chain)
2762 	return 1;
2763       chain = DECL_CHAIN (chain);
2764     }
2765 
2766   return 0;
2767 }
2768 
2769 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2770    We expect a null pointer to mark the end of the chain.
2771    This is the Lisp primitive `length'.  */
2772 
2773 int
2774 list_length (const_tree t)
2775 {
2776   const_tree p = t;
2777 #ifdef ENABLE_TREE_CHECKING
2778   const_tree q = t;
2779 #endif
2780   int len = 0;
2781 
2782   while (p)
2783     {
2784       p = TREE_CHAIN (p);
2785 #ifdef ENABLE_TREE_CHECKING
2786       if (len % 2)
2787 	q = TREE_CHAIN (q);
2788       gcc_assert (p != q);
2789 #endif
2790       len++;
2791     }
2792 
2793   return len;
2794 }
2795 
2796 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2797    UNION_TYPE TYPE, or NULL_TREE if none.  */
2798 
2799 tree
2800 first_field (const_tree type)
2801 {
2802   tree t = TYPE_FIELDS (type);
2803   while (t && TREE_CODE (t) != FIELD_DECL)
2804     t = TREE_CHAIN (t);
2805   return t;
2806 }
2807 
2808 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2809    by modifying the last node in chain 1 to point to chain 2.
2810    This is the Lisp primitive `nconc'.  */
2811 
2812 tree
2813 chainon (tree op1, tree op2)
2814 {
2815   tree t1;
2816 
2817   if (!op1)
2818     return op2;
2819   if (!op2)
2820     return op1;
2821 
2822   for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2823     continue;
2824   TREE_CHAIN (t1) = op2;
2825 
2826 #ifdef ENABLE_TREE_CHECKING
2827   {
2828     tree t2;
2829     for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2830       gcc_assert (t2 != t1);
2831   }
2832 #endif
2833 
2834   return op1;
2835 }
2836 
2837 /* Return the last node in a chain of nodes (chained through TREE_CHAIN).  */
2838 
2839 tree
2840 tree_last (tree chain)
2841 {
2842   tree next;
2843   if (chain)
2844     while ((next = TREE_CHAIN (chain)))
2845       chain = next;
2846   return chain;
2847 }
2848 
2849 /* Reverse the order of elements in the chain T,
2850    and return the new head of the chain (old last element).  */
2851 
2852 tree
2853 nreverse (tree t)
2854 {
2855   tree prev = 0, decl, next;
2856   for (decl = t; decl; decl = next)
2857     {
2858       /* We shouldn't be using this function to reverse BLOCK chains; we
2859 	 have blocks_nreverse for that.  */
2860       gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2861       next = TREE_CHAIN (decl);
2862       TREE_CHAIN (decl) = prev;
2863       prev = decl;
2864     }
2865   return prev;
2866 }
2867 
2868 /* Return a newly created TREE_LIST node whose
2869    purpose and value fields are PARM and VALUE.  */
2870 
2871 tree
2872 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2873 {
2874   tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2875   TREE_PURPOSE (t) = parm;
2876   TREE_VALUE (t) = value;
2877   return t;
2878 }
2879 
2880 /* Build a chain of TREE_LIST nodes from a vector.  */
2881 
2882 tree
2883 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2884 {
2885   tree ret = NULL_TREE;
2886   tree *pp = &ret;
2887   unsigned int i;
2888   tree t;
2889   FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2890     {
2891       *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2892       pp = &TREE_CHAIN (*pp);
2893     }
2894   return ret;
2895 }
2896 
2897 /* Return a newly created TREE_LIST node whose
2898    purpose and value fields are PURPOSE and VALUE
2899    and whose TREE_CHAIN is CHAIN.  */
2900 
2901 tree
2902 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2903 {
2904   tree node;
2905 
2906   node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2907   memset (node, 0, sizeof (struct tree_common));
2908 
2909   record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2910 
2911   TREE_SET_CODE (node, TREE_LIST);
2912   TREE_CHAIN (node) = chain;
2913   TREE_PURPOSE (node) = purpose;
2914   TREE_VALUE (node) = value;
2915   return node;
2916 }
2917 
2918 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2919    trees.  */
2920 
2921 vec<tree, va_gc> *
2922 ctor_to_vec (tree ctor)
2923 {
2924   vec<tree, va_gc> *vec;
2925   vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2926   unsigned int ix;
2927   tree val;
2928 
2929   FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2930     vec->quick_push (val);
2931 
2932   return vec;
2933 }
2934 
2935 /* Return the size nominally occupied by an object of type TYPE
2936    when it resides in memory.  The value is measured in units of bytes,
2937    and its data type is that normally used for type sizes
2938    (which is the first type created by make_signed_type or
2939    make_unsigned_type).  */
2940 
2941 tree
2942 size_in_bytes (const_tree type)
2943 {
2944   tree t;
2945 
2946   if (type == error_mark_node)
2947     return integer_zero_node;
2948 
2949   type = TYPE_MAIN_VARIANT (type);
2950   t = TYPE_SIZE_UNIT (type);
2951 
2952   if (t == 0)
2953     {
2954       lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2955       return size_zero_node;
2956     }
2957 
2958   return t;
2959 }
2960 
2961 /* Return the size of TYPE (in bytes) as a wide integer
2962    or return -1 if the size can vary or is larger than an integer.  */
2963 
2964 HOST_WIDE_INT
2965 int_size_in_bytes (const_tree type)
2966 {
2967   tree t;
2968 
2969   if (type == error_mark_node)
2970     return 0;
2971 
2972   type = TYPE_MAIN_VARIANT (type);
2973   t = TYPE_SIZE_UNIT (type);
2974 
2975   if (t && tree_fits_uhwi_p (t))
2976     return TREE_INT_CST_LOW (t);
2977   else
2978     return -1;
2979 }
2980 
2981 /* Return the maximum size of TYPE (in bytes) as a wide integer
2982    or return -1 if the size can vary or is larger than an integer.  */
2983 
2984 HOST_WIDE_INT
2985 max_int_size_in_bytes (const_tree type)
2986 {
2987   HOST_WIDE_INT size = -1;
2988   tree size_tree;
2989 
2990   /* If this is an array type, check for a possible MAX_SIZE attached.  */
2991 
2992   if (TREE_CODE (type) == ARRAY_TYPE)
2993     {
2994       size_tree = TYPE_ARRAY_MAX_SIZE (type);
2995 
2996       if (size_tree && tree_fits_uhwi_p (size_tree))
2997 	size = tree_to_uhwi (size_tree);
2998     }
2999 
3000   /* If we still haven't been able to get a size, see if the language
3001      can compute a maximum size.  */
3002 
3003   if (size == -1)
3004     {
3005       size_tree = lang_hooks.types.max_size (type);
3006 
3007       if (size_tree && tree_fits_uhwi_p (size_tree))
3008 	size = tree_to_uhwi (size_tree);
3009     }
3010 
3011   return size;
3012 }
3013 
3014 /* Return the bit position of FIELD, in bits from the start of the record.
3015    This is a tree of type bitsizetype.  */
3016 
3017 tree
3018 bit_position (const_tree field)
3019 {
3020   return bit_from_pos (DECL_FIELD_OFFSET (field),
3021 		       DECL_FIELD_BIT_OFFSET (field));
3022 }
3023 
3024 /* Return the byte position of FIELD, in bytes from the start of the record.
3025    This is a tree of type sizetype.  */
3026 
3027 tree
3028 byte_position (const_tree field)
3029 {
3030   return byte_from_pos (DECL_FIELD_OFFSET (field),
3031 			DECL_FIELD_BIT_OFFSET (field));
3032 }
3033 
3034 /* Likewise, but return as an integer.  It must be representable in
3035    that way (since it could be a signed value, we don't have the
3036    option of returning -1 like int_size_in_byte can.  */
3037 
3038 HOST_WIDE_INT
3039 int_byte_position (const_tree field)
3040 {
3041   return tree_to_shwi (byte_position (field));
3042 }
3043 
3044 /* Return the strictest alignment, in bits, that T is known to have.  */
3045 
3046 unsigned int
3047 expr_align (const_tree t)
3048 {
3049   unsigned int align0, align1;
3050 
3051   switch (TREE_CODE (t))
3052     {
3053     CASE_CONVERT:  case NON_LVALUE_EXPR:
3054       /* If we have conversions, we know that the alignment of the
3055 	 object must meet each of the alignments of the types.  */
3056       align0 = expr_align (TREE_OPERAND (t, 0));
3057       align1 = TYPE_ALIGN (TREE_TYPE (t));
3058       return MAX (align0, align1);
3059 
3060     case SAVE_EXPR:         case COMPOUND_EXPR:       case MODIFY_EXPR:
3061     case INIT_EXPR:         case TARGET_EXPR:         case WITH_CLEANUP_EXPR:
3062     case CLEANUP_POINT_EXPR:
3063       /* These don't change the alignment of an object.  */
3064       return expr_align (TREE_OPERAND (t, 0));
3065 
3066     case COND_EXPR:
3067       /* The best we can do is say that the alignment is the least aligned
3068 	 of the two arms.  */
3069       align0 = expr_align (TREE_OPERAND (t, 1));
3070       align1 = expr_align (TREE_OPERAND (t, 2));
3071       return MIN (align0, align1);
3072 
3073       /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3074 	 meaningfully, it's always 1.  */
3075     case LABEL_DECL:     case CONST_DECL:
3076     case VAR_DECL:       case PARM_DECL:   case RESULT_DECL:
3077     case FUNCTION_DECL:
3078       gcc_assert (DECL_ALIGN (t) != 0);
3079       return DECL_ALIGN (t);
3080 
3081     default:
3082       break;
3083     }
3084 
3085   /* Otherwise take the alignment from that of the type.  */
3086   return TYPE_ALIGN (TREE_TYPE (t));
3087 }
3088 
3089 /* Return, as a tree node, the number of elements for TYPE (which is an
3090    ARRAY_TYPE) minus one. This counts only elements of the top array.  */
3091 
3092 tree
3093 array_type_nelts (const_tree type)
3094 {
3095   tree index_type, min, max;
3096 
3097   /* If they did it with unspecified bounds, then we should have already
3098      given an error about it before we got here.  */
3099   if (! TYPE_DOMAIN (type))
3100     return error_mark_node;
3101 
3102   index_type = TYPE_DOMAIN (type);
3103   min = TYPE_MIN_VALUE (index_type);
3104   max = TYPE_MAX_VALUE (index_type);
3105 
3106   /* TYPE_MAX_VALUE may not be set if the array has unknown length.  */
3107   if (!max)
3108     return error_mark_node;
3109 
3110   return (integer_zerop (min)
3111 	  ? max
3112 	  : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3113 }
3114 
3115 /* If arg is static -- a reference to an object in static storage -- then
3116    return the object.  This is not the same as the C meaning of `static'.
3117    If arg isn't static, return NULL.  */
3118 
3119 tree
3120 staticp (tree arg)
3121 {
3122   switch (TREE_CODE (arg))
3123     {
3124     case FUNCTION_DECL:
3125       /* Nested functions are static, even though taking their address will
3126 	 involve a trampoline as we unnest the nested function and create
3127 	 the trampoline on the tree level.  */
3128       return arg;
3129 
3130     case VAR_DECL:
3131       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3132 	      && ! DECL_THREAD_LOCAL_P (arg)
3133 	      && ! DECL_DLLIMPORT_P (arg)
3134 	      ? arg : NULL);
3135 
3136     case CONST_DECL:
3137       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3138 	      ? arg : NULL);
3139 
3140     case CONSTRUCTOR:
3141       return TREE_STATIC (arg) ? arg : NULL;
3142 
3143     case LABEL_DECL:
3144     case STRING_CST:
3145       return arg;
3146 
3147     case COMPONENT_REF:
3148       /* If the thing being referenced is not a field, then it is
3149 	 something language specific.  */
3150       gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3151 
3152       /* If we are referencing a bitfield, we can't evaluate an
3153 	 ADDR_EXPR at compile time and so it isn't a constant.  */
3154       if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3155 	return NULL;
3156 
3157       return staticp (TREE_OPERAND (arg, 0));
3158 
3159     case BIT_FIELD_REF:
3160       return NULL;
3161 
3162     case INDIRECT_REF:
3163       return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3164 
3165     case ARRAY_REF:
3166     case ARRAY_RANGE_REF:
3167       if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3168 	  && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3169 	return staticp (TREE_OPERAND (arg, 0));
3170       else
3171 	return NULL;
3172 
3173     case COMPOUND_LITERAL_EXPR:
3174       return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3175 
3176     default:
3177       return NULL;
3178     }
3179 }
3180 
3181 
3182 
3183 
3184 /* Return whether OP is a DECL whose address is function-invariant.  */
3185 
3186 bool
3187 decl_address_invariant_p (const_tree op)
3188 {
3189   /* The conditions below are slightly less strict than the one in
3190      staticp.  */
3191 
3192   switch (TREE_CODE (op))
3193     {
3194     case PARM_DECL:
3195     case RESULT_DECL:
3196     case LABEL_DECL:
3197     case FUNCTION_DECL:
3198       return true;
3199 
3200     case VAR_DECL:
3201       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3202           || DECL_THREAD_LOCAL_P (op)
3203           || DECL_CONTEXT (op) == current_function_decl
3204           || decl_function_context (op) == current_function_decl)
3205         return true;
3206       break;
3207 
3208     case CONST_DECL:
3209       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3210           || decl_function_context (op) == current_function_decl)
3211         return true;
3212       break;
3213 
3214     default:
3215       break;
3216     }
3217 
3218   return false;
3219 }
3220 
3221 /* Return whether OP is a DECL whose address is interprocedural-invariant.  */
3222 
3223 bool
3224 decl_address_ip_invariant_p (const_tree op)
3225 {
3226   /* The conditions below are slightly less strict than the one in
3227      staticp.  */
3228 
3229   switch (TREE_CODE (op))
3230     {
3231     case LABEL_DECL:
3232     case FUNCTION_DECL:
3233     case STRING_CST:
3234       return true;
3235 
3236     case VAR_DECL:
3237       if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3238            && !DECL_DLLIMPORT_P (op))
3239           || DECL_THREAD_LOCAL_P (op))
3240         return true;
3241       break;
3242 
3243     case CONST_DECL:
3244       if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3245         return true;
3246       break;
3247 
3248     default:
3249       break;
3250     }
3251 
3252   return false;
3253 }
3254 
3255 
3256 /* Return true if T is function-invariant (internal function, does
3257    not handle arithmetic; that's handled in skip_simple_arithmetic and
3258    tree_invariant_p).  */
3259 
3260 static bool
3261 tree_invariant_p_1 (tree t)
3262 {
3263   tree op;
3264 
3265   if (TREE_CONSTANT (t)
3266       || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3267     return true;
3268 
3269   switch (TREE_CODE (t))
3270     {
3271     case SAVE_EXPR:
3272       return true;
3273 
3274     case ADDR_EXPR:
3275       op = TREE_OPERAND (t, 0);
3276       while (handled_component_p (op))
3277 	{
3278 	  switch (TREE_CODE (op))
3279 	    {
3280 	    case ARRAY_REF:
3281 	    case ARRAY_RANGE_REF:
3282 	      if (!tree_invariant_p (TREE_OPERAND (op, 1))
3283 		  || TREE_OPERAND (op, 2) != NULL_TREE
3284 		  || TREE_OPERAND (op, 3) != NULL_TREE)
3285 		return false;
3286 	      break;
3287 
3288 	    case COMPONENT_REF:
3289 	      if (TREE_OPERAND (op, 2) != NULL_TREE)
3290 		return false;
3291 	      break;
3292 
3293 	    default:;
3294 	    }
3295 	  op = TREE_OPERAND (op, 0);
3296 	}
3297 
3298       return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3299 
3300     default:
3301       break;
3302     }
3303 
3304   return false;
3305 }
3306 
3307 /* Return true if T is function-invariant.  */
3308 
3309 bool
3310 tree_invariant_p (tree t)
3311 {
3312   tree inner = skip_simple_arithmetic (t);
3313   return tree_invariant_p_1 (inner);
3314 }
3315 
3316 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3317    Do this to any expression which may be used in more than one place,
3318    but must be evaluated only once.
3319 
3320    Normally, expand_expr would reevaluate the expression each time.
3321    Calling save_expr produces something that is evaluated and recorded
3322    the first time expand_expr is called on it.  Subsequent calls to
3323    expand_expr just reuse the recorded value.
3324 
3325    The call to expand_expr that generates code that actually computes
3326    the value is the first call *at compile time*.  Subsequent calls
3327    *at compile time* generate code to use the saved value.
3328    This produces correct result provided that *at run time* control
3329    always flows through the insns made by the first expand_expr
3330    before reaching the other places where the save_expr was evaluated.
3331    You, the caller of save_expr, must make sure this is so.
3332 
3333    Constants, and certain read-only nodes, are returned with no
3334    SAVE_EXPR because that is safe.  Expressions containing placeholders
3335    are not touched; see tree.def for an explanation of what these
3336    are used for.  */
3337 
3338 tree
3339 save_expr (tree expr)
3340 {
3341   tree t = fold (expr);
3342   tree inner;
3343 
3344   /* If the tree evaluates to a constant, then we don't want to hide that
3345      fact (i.e. this allows further folding, and direct checks for constants).
3346      However, a read-only object that has side effects cannot be bypassed.
3347      Since it is no problem to reevaluate literals, we just return the
3348      literal node.  */
3349   inner = skip_simple_arithmetic (t);
3350   if (TREE_CODE (inner) == ERROR_MARK)
3351     return inner;
3352 
3353   if (tree_invariant_p_1 (inner))
3354     return t;
3355 
3356   /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3357      it means that the size or offset of some field of an object depends on
3358      the value within another field.
3359 
3360      Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3361      and some variable since it would then need to be both evaluated once and
3362      evaluated more than once.  Front-ends must assure this case cannot
3363      happen by surrounding any such subexpressions in their own SAVE_EXPR
3364      and forcing evaluation at the proper time.  */
3365   if (contains_placeholder_p (inner))
3366     return t;
3367 
3368   t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3369   SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3370 
3371   /* This expression might be placed ahead of a jump to ensure that the
3372      value was computed on both sides of the jump.  So make sure it isn't
3373      eliminated as dead.  */
3374   TREE_SIDE_EFFECTS (t) = 1;
3375   return t;
3376 }
3377 
3378 /* Look inside EXPR into any simple arithmetic operations.  Return the
3379    outermost non-arithmetic or non-invariant node.  */
3380 
3381 tree
3382 skip_simple_arithmetic (tree expr)
3383 {
3384   /* We don't care about whether this can be used as an lvalue in this
3385      context.  */
3386   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3387     expr = TREE_OPERAND (expr, 0);
3388 
3389   /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3390      a constant, it will be more efficient to not make another SAVE_EXPR since
3391      it will allow better simplification and GCSE will be able to merge the
3392      computations if they actually occur.  */
3393   while (true)
3394     {
3395       if (UNARY_CLASS_P (expr))
3396 	expr = TREE_OPERAND (expr, 0);
3397       else if (BINARY_CLASS_P (expr))
3398 	{
3399 	  if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3400 	    expr = TREE_OPERAND (expr, 0);
3401 	  else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3402 	    expr = TREE_OPERAND (expr, 1);
3403 	  else
3404 	    break;
3405 	}
3406       else
3407 	break;
3408     }
3409 
3410   return expr;
3411 }
3412 
3413 /* Look inside EXPR into simple arithmetic operations involving constants.
3414    Return the outermost non-arithmetic or non-constant node.  */
3415 
3416 tree
3417 skip_simple_constant_arithmetic (tree expr)
3418 {
3419   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3420     expr = TREE_OPERAND (expr, 0);
3421 
3422   while (true)
3423     {
3424       if (UNARY_CLASS_P (expr))
3425 	expr = TREE_OPERAND (expr, 0);
3426       else if (BINARY_CLASS_P (expr))
3427 	{
3428 	  if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3429 	    expr = TREE_OPERAND (expr, 0);
3430 	  else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3431 	    expr = TREE_OPERAND (expr, 1);
3432 	  else
3433 	    break;
3434 	}
3435       else
3436 	break;
3437     }
3438 
3439   return expr;
3440 }
3441 
3442 /* Return which tree structure is used by T.  */
3443 
3444 enum tree_node_structure_enum
3445 tree_node_structure (const_tree t)
3446 {
3447   const enum tree_code code = TREE_CODE (t);
3448   return tree_node_structure_for_code (code);
3449 }
3450 
3451 /* Set various status flags when building a CALL_EXPR object T.  */
3452 
3453 static void
3454 process_call_operands (tree t)
3455 {
3456   bool side_effects = TREE_SIDE_EFFECTS (t);
3457   bool read_only = false;
3458   int i = call_expr_flags (t);
3459 
3460   /* Calls have side-effects, except those to const or pure functions.  */
3461   if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3462     side_effects = true;
3463   /* Propagate TREE_READONLY of arguments for const functions.  */
3464   if (i & ECF_CONST)
3465     read_only = true;
3466 
3467   if (!side_effects || read_only)
3468     for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3469       {
3470 	tree op = TREE_OPERAND (t, i);
3471 	if (op && TREE_SIDE_EFFECTS (op))
3472 	  side_effects = true;
3473 	if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3474 	  read_only = false;
3475       }
3476 
3477   TREE_SIDE_EFFECTS (t) = side_effects;
3478   TREE_READONLY (t) = read_only;
3479 }
3480 
3481 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3482    size or offset that depends on a field within a record.  */
3483 
3484 bool
3485 contains_placeholder_p (const_tree exp)
3486 {
3487   enum tree_code code;
3488 
3489   if (!exp)
3490     return 0;
3491 
3492   code = TREE_CODE (exp);
3493   if (code == PLACEHOLDER_EXPR)
3494     return 1;
3495 
3496   switch (TREE_CODE_CLASS (code))
3497     {
3498     case tcc_reference:
3499       /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3500 	 position computations since they will be converted into a
3501 	 WITH_RECORD_EXPR involving the reference, which will assume
3502 	 here will be valid.  */
3503       return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3504 
3505     case tcc_exceptional:
3506       if (code == TREE_LIST)
3507 	return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3508 		|| CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3509       break;
3510 
3511     case tcc_unary:
3512     case tcc_binary:
3513     case tcc_comparison:
3514     case tcc_expression:
3515       switch (code)
3516 	{
3517 	case COMPOUND_EXPR:
3518 	  /* Ignoring the first operand isn't quite right, but works best.  */
3519 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3520 
3521 	case COND_EXPR:
3522 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3523 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3524 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3525 
3526 	case SAVE_EXPR:
3527 	  /* The save_expr function never wraps anything containing
3528 	     a PLACEHOLDER_EXPR. */
3529 	  return 0;
3530 
3531 	default:
3532 	  break;
3533 	}
3534 
3535       switch (TREE_CODE_LENGTH (code))
3536 	{
3537 	case 1:
3538 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3539 	case 2:
3540 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3541 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3542 	default:
3543 	  return 0;
3544 	}
3545 
3546     case tcc_vl_exp:
3547       switch (code)
3548 	{
3549 	case CALL_EXPR:
3550 	  {
3551 	    const_tree arg;
3552 	    const_call_expr_arg_iterator iter;
3553 	    FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3554 	      if (CONTAINS_PLACEHOLDER_P (arg))
3555 		return 1;
3556 	    return 0;
3557 	  }
3558 	default:
3559 	  return 0;
3560 	}
3561 
3562     default:
3563       return 0;
3564     }
3565   return 0;
3566 }
3567 
3568 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3569    directly.  This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3570    field positions.  */
3571 
3572 static bool
3573 type_contains_placeholder_1 (const_tree type)
3574 {
3575   /* If the size contains a placeholder or the parent type (component type in
3576      the case of arrays) type involves a placeholder, this type does.  */
3577   if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3578       || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3579       || (!POINTER_TYPE_P (type)
3580 	  && TREE_TYPE (type)
3581 	  && type_contains_placeholder_p (TREE_TYPE (type))))
3582     return true;
3583 
3584   /* Now do type-specific checks.  Note that the last part of the check above
3585      greatly limits what we have to do below.  */
3586   switch (TREE_CODE (type))
3587     {
3588     case VOID_TYPE:
3589     case POINTER_BOUNDS_TYPE:
3590     case COMPLEX_TYPE:
3591     case ENUMERAL_TYPE:
3592     case BOOLEAN_TYPE:
3593     case POINTER_TYPE:
3594     case OFFSET_TYPE:
3595     case REFERENCE_TYPE:
3596     case METHOD_TYPE:
3597     case FUNCTION_TYPE:
3598     case VECTOR_TYPE:
3599     case NULLPTR_TYPE:
3600       return false;
3601 
3602     case INTEGER_TYPE:
3603     case REAL_TYPE:
3604     case FIXED_POINT_TYPE:
3605       /* Here we just check the bounds.  */
3606       return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3607 	      || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3608 
3609     case ARRAY_TYPE:
3610       /* We have already checked the component type above, so just check
3611 	 the domain type.  Flexible array members have a null domain.  */
3612       return TYPE_DOMAIN (type) ?
3613 	type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3614 
3615     case RECORD_TYPE:
3616     case UNION_TYPE:
3617     case QUAL_UNION_TYPE:
3618       {
3619 	tree field;
3620 
3621 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3622 	  if (TREE_CODE (field) == FIELD_DECL
3623 	      && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3624 		  || (TREE_CODE (type) == QUAL_UNION_TYPE
3625 		      && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3626 		  || type_contains_placeholder_p (TREE_TYPE (field))))
3627 	    return true;
3628 
3629 	return false;
3630       }
3631 
3632     default:
3633       gcc_unreachable ();
3634     }
3635 }
3636 
3637 /* Wrapper around above function used to cache its result.  */
3638 
3639 bool
3640 type_contains_placeholder_p (tree type)
3641 {
3642   bool result;
3643 
3644   /* If the contains_placeholder_bits field has been initialized,
3645      then we know the answer.  */
3646   if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3647     return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3648 
3649   /* Indicate that we've seen this type node, and the answer is false.
3650      This is what we want to return if we run into recursion via fields.  */
3651   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3652 
3653   /* Compute the real value.  */
3654   result = type_contains_placeholder_1 (type);
3655 
3656   /* Store the real value.  */
3657   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3658 
3659   return result;
3660 }
3661 
3662 /* Push tree EXP onto vector QUEUE if it is not already present.  */
3663 
3664 static void
3665 push_without_duplicates (tree exp, vec<tree> *queue)
3666 {
3667   unsigned int i;
3668   tree iter;
3669 
3670   FOR_EACH_VEC_ELT (*queue, i, iter)
3671     if (simple_cst_equal (iter, exp) == 1)
3672       break;
3673 
3674   if (!iter)
3675     queue->safe_push (exp);
3676 }
3677 
3678 /* Given a tree EXP, find all occurrences of references to fields
3679    in a PLACEHOLDER_EXPR and place them in vector REFS without
3680    duplicates.  Also record VAR_DECLs and CONST_DECLs.  Note that
3681    we assume here that EXP contains only arithmetic expressions
3682    or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3683    argument list.  */
3684 
3685 void
3686 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3687 {
3688   enum tree_code code = TREE_CODE (exp);
3689   tree inner;
3690   int i;
3691 
3692   /* We handle TREE_LIST and COMPONENT_REF separately.  */
3693   if (code == TREE_LIST)
3694     {
3695       FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3696       FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3697     }
3698   else if (code == COMPONENT_REF)
3699     {
3700       for (inner = TREE_OPERAND (exp, 0);
3701 	   REFERENCE_CLASS_P (inner);
3702 	   inner = TREE_OPERAND (inner, 0))
3703 	;
3704 
3705       if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3706 	push_without_duplicates (exp, refs);
3707       else
3708 	FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3709    }
3710   else
3711     switch (TREE_CODE_CLASS (code))
3712       {
3713       case tcc_constant:
3714 	break;
3715 
3716       case tcc_declaration:
3717 	/* Variables allocated to static storage can stay.  */
3718         if (!TREE_STATIC (exp))
3719 	  push_without_duplicates (exp, refs);
3720 	break;
3721 
3722       case tcc_expression:
3723 	/* This is the pattern built in ada/make_aligning_type.  */
3724 	if (code == ADDR_EXPR
3725 	    && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3726 	  {
3727 	    push_without_duplicates (exp, refs);
3728 	    break;
3729 	  }
3730 
3731         /* Fall through...  */
3732 
3733       case tcc_exceptional:
3734       case tcc_unary:
3735       case tcc_binary:
3736       case tcc_comparison:
3737       case tcc_reference:
3738 	for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3739 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3740 	break;
3741 
3742       case tcc_vl_exp:
3743 	for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3744 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3745 	break;
3746 
3747       default:
3748 	gcc_unreachable ();
3749       }
3750 }
3751 
3752 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3753    return a tree with all occurrences of references to F in a
3754    PLACEHOLDER_EXPR replaced by R.  Also handle VAR_DECLs and
3755    CONST_DECLs.  Note that we assume here that EXP contains only
3756    arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3757    occurring only in their argument list.  */
3758 
3759 tree
3760 substitute_in_expr (tree exp, tree f, tree r)
3761 {
3762   enum tree_code code = TREE_CODE (exp);
3763   tree op0, op1, op2, op3;
3764   tree new_tree;
3765 
3766   /* We handle TREE_LIST and COMPONENT_REF separately.  */
3767   if (code == TREE_LIST)
3768     {
3769       op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3770       op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3771       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3772 	return exp;
3773 
3774       return tree_cons (TREE_PURPOSE (exp), op1, op0);
3775     }
3776   else if (code == COMPONENT_REF)
3777     {
3778       tree inner;
3779 
3780       /* If this expression is getting a value from a PLACEHOLDER_EXPR
3781 	 and it is the right field, replace it with R.  */
3782       for (inner = TREE_OPERAND (exp, 0);
3783 	   REFERENCE_CLASS_P (inner);
3784 	   inner = TREE_OPERAND (inner, 0))
3785 	;
3786 
3787       /* The field.  */
3788       op1 = TREE_OPERAND (exp, 1);
3789 
3790       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3791 	return r;
3792 
3793       /* If this expression hasn't been completed let, leave it alone.  */
3794       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3795 	return exp;
3796 
3797       op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3798       if (op0 == TREE_OPERAND (exp, 0))
3799 	return exp;
3800 
3801       new_tree
3802 	= fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3803    }
3804   else
3805     switch (TREE_CODE_CLASS (code))
3806       {
3807       case tcc_constant:
3808 	return exp;
3809 
3810       case tcc_declaration:
3811 	if (exp == f)
3812 	  return r;
3813 	else
3814 	  return exp;
3815 
3816       case tcc_expression:
3817 	if (exp == f)
3818 	  return r;
3819 
3820         /* Fall through...  */
3821 
3822       case tcc_exceptional:
3823       case tcc_unary:
3824       case tcc_binary:
3825       case tcc_comparison:
3826       case tcc_reference:
3827 	switch (TREE_CODE_LENGTH (code))
3828 	  {
3829 	  case 0:
3830 	    return exp;
3831 
3832 	  case 1:
3833 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3834 	    if (op0 == TREE_OPERAND (exp, 0))
3835 	      return exp;
3836 
3837 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3838 	    break;
3839 
3840 	  case 2:
3841 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3842 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3843 
3844 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3845 	      return exp;
3846 
3847 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3848 	    break;
3849 
3850 	  case 3:
3851 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3852 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3853 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3854 
3855 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3856 		&& op2 == TREE_OPERAND (exp, 2))
3857 	      return exp;
3858 
3859 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3860 	    break;
3861 
3862 	  case 4:
3863 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3864 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3865 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3866 	    op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3867 
3868 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3869 		&& op2 == TREE_OPERAND (exp, 2)
3870 		&& op3 == TREE_OPERAND (exp, 3))
3871 	      return exp;
3872 
3873 	    new_tree
3874 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3875 	    break;
3876 
3877 	  default:
3878 	    gcc_unreachable ();
3879 	  }
3880 	break;
3881 
3882       case tcc_vl_exp:
3883 	{
3884 	  int i;
3885 
3886 	  new_tree = NULL_TREE;
3887 
3888 	  /* If we are trying to replace F with a constant, inline back
3889 	     functions which do nothing else than computing a value from
3890 	     the arguments they are passed.  This makes it possible to
3891 	     fold partially or entirely the replacement expression.  */
3892 	  if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3893 	    {
3894 	      tree t = maybe_inline_call_in_expr (exp);
3895 	      if (t)
3896 		return SUBSTITUTE_IN_EXPR (t, f, r);
3897 	    }
3898 
3899 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3900 	    {
3901 	      tree op = TREE_OPERAND (exp, i);
3902 	      tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3903 	      if (new_op != op)
3904 		{
3905 		  if (!new_tree)
3906 		    new_tree = copy_node (exp);
3907 		  TREE_OPERAND (new_tree, i) = new_op;
3908 		}
3909 	    }
3910 
3911 	  if (new_tree)
3912 	    {
3913 	      new_tree = fold (new_tree);
3914 	      if (TREE_CODE (new_tree) == CALL_EXPR)
3915 		process_call_operands (new_tree);
3916 	    }
3917 	  else
3918 	    return exp;
3919 	}
3920 	break;
3921 
3922       default:
3923 	gcc_unreachable ();
3924       }
3925 
3926   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3927 
3928   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3929     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3930 
3931   return new_tree;
3932 }
3933 
3934 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3935    for it within OBJ, a tree that is an object or a chain of references.  */
3936 
3937 tree
3938 substitute_placeholder_in_expr (tree exp, tree obj)
3939 {
3940   enum tree_code code = TREE_CODE (exp);
3941   tree op0, op1, op2, op3;
3942   tree new_tree;
3943 
3944   /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3945      in the chain of OBJ.  */
3946   if (code == PLACEHOLDER_EXPR)
3947     {
3948       tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3949       tree elt;
3950 
3951       for (elt = obj; elt != 0;
3952 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3953 		   || TREE_CODE (elt) == COND_EXPR)
3954 		  ? TREE_OPERAND (elt, 1)
3955 		  : (REFERENCE_CLASS_P (elt)
3956 		     || UNARY_CLASS_P (elt)
3957 		     || BINARY_CLASS_P (elt)
3958 		     || VL_EXP_CLASS_P (elt)
3959 		     || EXPRESSION_CLASS_P (elt))
3960 		  ? TREE_OPERAND (elt, 0) : 0))
3961 	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3962 	  return elt;
3963 
3964       for (elt = obj; elt != 0;
3965 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3966 		   || TREE_CODE (elt) == COND_EXPR)
3967 		  ? TREE_OPERAND (elt, 1)
3968 		  : (REFERENCE_CLASS_P (elt)
3969 		     || UNARY_CLASS_P (elt)
3970 		     || BINARY_CLASS_P (elt)
3971 		     || VL_EXP_CLASS_P (elt)
3972 		     || EXPRESSION_CLASS_P (elt))
3973 		  ? TREE_OPERAND (elt, 0) : 0))
3974 	if (POINTER_TYPE_P (TREE_TYPE (elt))
3975 	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3976 		== need_type))
3977 	  return fold_build1 (INDIRECT_REF, need_type, elt);
3978 
3979       /* If we didn't find it, return the original PLACEHOLDER_EXPR.  If it
3980 	 survives until RTL generation, there will be an error.  */
3981       return exp;
3982     }
3983 
3984   /* TREE_LIST is special because we need to look at TREE_VALUE
3985      and TREE_CHAIN, not TREE_OPERANDS.  */
3986   else if (code == TREE_LIST)
3987     {
3988       op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3989       op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3990       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3991 	return exp;
3992 
3993       return tree_cons (TREE_PURPOSE (exp), op1, op0);
3994     }
3995   else
3996     switch (TREE_CODE_CLASS (code))
3997       {
3998       case tcc_constant:
3999       case tcc_declaration:
4000 	return exp;
4001 
4002       case tcc_exceptional:
4003       case tcc_unary:
4004       case tcc_binary:
4005       case tcc_comparison:
4006       case tcc_expression:
4007       case tcc_reference:
4008       case tcc_statement:
4009 	switch (TREE_CODE_LENGTH (code))
4010 	  {
4011 	  case 0:
4012 	    return exp;
4013 
4014 	  case 1:
4015 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4016 	    if (op0 == TREE_OPERAND (exp, 0))
4017 	      return exp;
4018 
4019 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4020 	    break;
4021 
4022 	  case 2:
4023 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4024 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4025 
4026 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4027 	      return exp;
4028 
4029 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4030 	    break;
4031 
4032 	  case 3:
4033 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4034 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4035 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4036 
4037 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4038 		&& op2 == TREE_OPERAND (exp, 2))
4039 	      return exp;
4040 
4041 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4042 	    break;
4043 
4044 	  case 4:
4045 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4046 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4047 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4048 	    op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4049 
4050 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4051 		&& op2 == TREE_OPERAND (exp, 2)
4052 		&& op3 == TREE_OPERAND (exp, 3))
4053 	      return exp;
4054 
4055 	    new_tree
4056 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4057 	    break;
4058 
4059 	  default:
4060 	    gcc_unreachable ();
4061 	  }
4062 	break;
4063 
4064       case tcc_vl_exp:
4065 	{
4066 	  int i;
4067 
4068 	  new_tree = NULL_TREE;
4069 
4070 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4071 	    {
4072 	      tree op = TREE_OPERAND (exp, i);
4073 	      tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4074 	      if (new_op != op)
4075 		{
4076 		  if (!new_tree)
4077 		    new_tree = copy_node (exp);
4078 		  TREE_OPERAND (new_tree, i) = new_op;
4079 		}
4080 	    }
4081 
4082 	  if (new_tree)
4083 	    {
4084 	      new_tree = fold (new_tree);
4085 	      if (TREE_CODE (new_tree) == CALL_EXPR)
4086 		process_call_operands (new_tree);
4087 	    }
4088 	  else
4089 	    return exp;
4090 	}
4091 	break;
4092 
4093       default:
4094 	gcc_unreachable ();
4095       }
4096 
4097   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4098 
4099   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4100     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4101 
4102   return new_tree;
4103 }
4104 
4105 
4106 /* Subroutine of stabilize_reference; this is called for subtrees of
4107    references.  Any expression with side-effects must be put in a SAVE_EXPR
4108    to ensure that it is only evaluated once.
4109 
4110    We don't put SAVE_EXPR nodes around everything, because assigning very
4111    simple expressions to temporaries causes us to miss good opportunities
4112    for optimizations.  Among other things, the opportunity to fold in the
4113    addition of a constant into an addressing mode often gets lost, e.g.
4114    "y[i+1] += x;".  In general, we take the approach that we should not make
4115    an assignment unless we are forced into it - i.e., that any non-side effect
4116    operator should be allowed, and that cse should take care of coalescing
4117    multiple utterances of the same expression should that prove fruitful.  */
4118 
4119 static tree
4120 stabilize_reference_1 (tree e)
4121 {
4122   tree result;
4123   enum tree_code code = TREE_CODE (e);
4124 
4125   /* We cannot ignore const expressions because it might be a reference
4126      to a const array but whose index contains side-effects.  But we can
4127      ignore things that are actual constant or that already have been
4128      handled by this function.  */
4129 
4130   if (tree_invariant_p (e))
4131     return e;
4132 
4133   switch (TREE_CODE_CLASS (code))
4134     {
4135     case tcc_exceptional:
4136     case tcc_type:
4137     case tcc_declaration:
4138     case tcc_comparison:
4139     case tcc_statement:
4140     case tcc_expression:
4141     case tcc_reference:
4142     case tcc_vl_exp:
4143       /* If the expression has side-effects, then encase it in a SAVE_EXPR
4144 	 so that it will only be evaluated once.  */
4145       /* The reference (r) and comparison (<) classes could be handled as
4146 	 below, but it is generally faster to only evaluate them once.  */
4147       if (TREE_SIDE_EFFECTS (e))
4148 	return save_expr (e);
4149       return e;
4150 
4151     case tcc_constant:
4152       /* Constants need no processing.  In fact, we should never reach
4153 	 here.  */
4154       return e;
4155 
4156     case tcc_binary:
4157       /* Division is slow and tends to be compiled with jumps,
4158 	 especially the division by powers of 2 that is often
4159 	 found inside of an array reference.  So do it just once.  */
4160       if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4161 	  || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4162 	  || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4163 	  || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4164 	return save_expr (e);
4165       /* Recursively stabilize each operand.  */
4166       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4167 			 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4168       break;
4169 
4170     case tcc_unary:
4171       /* Recursively stabilize each operand.  */
4172       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4173       break;
4174 
4175     default:
4176       gcc_unreachable ();
4177     }
4178 
4179   TREE_TYPE (result) = TREE_TYPE (e);
4180   TREE_READONLY (result) = TREE_READONLY (e);
4181   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4182   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4183 
4184   return result;
4185 }
4186 
4187 /* Stabilize a reference so that we can use it any number of times
4188    without causing its operands to be evaluated more than once.
4189    Returns the stabilized reference.  This works by means of save_expr,
4190    so see the caveats in the comments about save_expr.
4191 
4192    Also allows conversion expressions whose operands are references.
4193    Any other kind of expression is returned unchanged.  */
4194 
4195 tree
4196 stabilize_reference (tree ref)
4197 {
4198   tree result;
4199   enum tree_code code = TREE_CODE (ref);
4200 
4201   switch (code)
4202     {
4203     case VAR_DECL:
4204     case PARM_DECL:
4205     case RESULT_DECL:
4206       /* No action is needed in this case.  */
4207       return ref;
4208 
4209     CASE_CONVERT:
4210     case FLOAT_EXPR:
4211     case FIX_TRUNC_EXPR:
4212       result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4213       break;
4214 
4215     case INDIRECT_REF:
4216       result = build_nt (INDIRECT_REF,
4217 			 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4218       break;
4219 
4220     case COMPONENT_REF:
4221       result = build_nt (COMPONENT_REF,
4222 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4223 			 TREE_OPERAND (ref, 1), NULL_TREE);
4224       break;
4225 
4226     case BIT_FIELD_REF:
4227       result = build_nt (BIT_FIELD_REF,
4228 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4229 			 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4230       REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4231       break;
4232 
4233     case ARRAY_REF:
4234       result = build_nt (ARRAY_REF,
4235 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4236 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4237 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4238       break;
4239 
4240     case ARRAY_RANGE_REF:
4241       result = build_nt (ARRAY_RANGE_REF,
4242 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4243 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4244 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4245       break;
4246 
4247     case COMPOUND_EXPR:
4248       /* We cannot wrap the first expression in a SAVE_EXPR, as then
4249 	 it wouldn't be ignored.  This matters when dealing with
4250 	 volatiles.  */
4251       return stabilize_reference_1 (ref);
4252 
4253       /* If arg isn't a kind of lvalue we recognize, make no change.
4254 	 Caller should recognize the error for an invalid lvalue.  */
4255     default:
4256       return ref;
4257 
4258     case ERROR_MARK:
4259       return error_mark_node;
4260     }
4261 
4262   TREE_TYPE (result) = TREE_TYPE (ref);
4263   TREE_READONLY (result) = TREE_READONLY (ref);
4264   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4265   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4266 
4267   return result;
4268 }
4269 
4270 /* Low-level constructors for expressions.  */
4271 
4272 /* A helper function for build1 and constant folders.  Set TREE_CONSTANT,
4273    and TREE_SIDE_EFFECTS for an ADDR_EXPR.  */
4274 
4275 void
4276 recompute_tree_invariant_for_addr_expr (tree t)
4277 {
4278   tree node;
4279   bool tc = true, se = false;
4280 
4281   gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4282 
4283   /* We started out assuming this address is both invariant and constant, but
4284      does not have side effects.  Now go down any handled components and see if
4285      any of them involve offsets that are either non-constant or non-invariant.
4286      Also check for side-effects.
4287 
4288      ??? Note that this code makes no attempt to deal with the case where
4289      taking the address of something causes a copy due to misalignment.  */
4290 
4291 #define UPDATE_FLAGS(NODE)  \
4292 do { tree _node = (NODE); \
4293      if (_node && !TREE_CONSTANT (_node)) tc = false; \
4294      if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4295 
4296   for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4297        node = TREE_OPERAND (node, 0))
4298     {
4299       /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4300 	 array reference (probably made temporarily by the G++ front end),
4301 	 so ignore all the operands.  */
4302       if ((TREE_CODE (node) == ARRAY_REF
4303 	   || TREE_CODE (node) == ARRAY_RANGE_REF)
4304 	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4305 	{
4306 	  UPDATE_FLAGS (TREE_OPERAND (node, 1));
4307 	  if (TREE_OPERAND (node, 2))
4308 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
4309 	  if (TREE_OPERAND (node, 3))
4310 	    UPDATE_FLAGS (TREE_OPERAND (node, 3));
4311 	}
4312       /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4313 	 FIELD_DECL, apparently.  The G++ front end can put something else
4314 	 there, at least temporarily.  */
4315       else if (TREE_CODE (node) == COMPONENT_REF
4316 	       && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4317 	{
4318 	  if (TREE_OPERAND (node, 2))
4319 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
4320 	}
4321     }
4322 
4323   node = lang_hooks.expr_to_decl (node, &tc, &se);
4324 
4325   /* Now see what's inside.  If it's an INDIRECT_REF, copy our properties from
4326      the address, since &(*a)->b is a form of addition.  If it's a constant, the
4327      address is constant too.  If it's a decl, its address is constant if the
4328      decl is static.  Everything else is not constant and, furthermore,
4329      taking the address of a volatile variable is not volatile.  */
4330   if (TREE_CODE (node) == INDIRECT_REF
4331       || TREE_CODE (node) == MEM_REF)
4332     UPDATE_FLAGS (TREE_OPERAND (node, 0));
4333   else if (CONSTANT_CLASS_P (node))
4334     ;
4335   else if (DECL_P (node))
4336     tc &= (staticp (node) != NULL_TREE);
4337   else
4338     {
4339       tc = false;
4340       se |= TREE_SIDE_EFFECTS (node);
4341     }
4342 
4343 
4344   TREE_CONSTANT (t) = tc;
4345   TREE_SIDE_EFFECTS (t) = se;
4346 #undef UPDATE_FLAGS
4347 }
4348 
4349 /* Build an expression of code CODE, data type TYPE, and operands as
4350    specified.  Expressions and reference nodes can be created this way.
4351    Constants, decls, types and misc nodes cannot be.
4352 
4353    We define 5 non-variadic functions, from 0 to 4 arguments.  This is
4354    enough for all extant tree codes.  */
4355 
4356 tree
4357 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4358 {
4359   tree t;
4360 
4361   gcc_assert (TREE_CODE_LENGTH (code) == 0);
4362 
4363   t = make_node_stat (code PASS_MEM_STAT);
4364   TREE_TYPE (t) = tt;
4365 
4366   return t;
4367 }
4368 
4369 tree
4370 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4371 {
4372   int length = sizeof (struct tree_exp);
4373   tree t;
4374 
4375   record_node_allocation_statistics (code, length);
4376 
4377   gcc_assert (TREE_CODE_LENGTH (code) == 1);
4378 
4379   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4380 
4381   memset (t, 0, sizeof (struct tree_common));
4382 
4383   TREE_SET_CODE (t, code);
4384 
4385   TREE_TYPE (t) = type;
4386   SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4387   TREE_OPERAND (t, 0) = node;
4388   if (node && !TYPE_P (node))
4389     {
4390       TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4391       TREE_READONLY (t) = TREE_READONLY (node);
4392     }
4393 
4394   if (TREE_CODE_CLASS (code) == tcc_statement)
4395     TREE_SIDE_EFFECTS (t) = 1;
4396   else switch (code)
4397     {
4398     case VA_ARG_EXPR:
4399       /* All of these have side-effects, no matter what their
4400 	 operands are.  */
4401       TREE_SIDE_EFFECTS (t) = 1;
4402       TREE_READONLY (t) = 0;
4403       break;
4404 
4405     case INDIRECT_REF:
4406       /* Whether a dereference is readonly has nothing to do with whether
4407 	 its operand is readonly.  */
4408       TREE_READONLY (t) = 0;
4409       break;
4410 
4411     case ADDR_EXPR:
4412       if (node)
4413 	recompute_tree_invariant_for_addr_expr (t);
4414       break;
4415 
4416     default:
4417       if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4418 	  && node && !TYPE_P (node)
4419 	  && TREE_CONSTANT (node))
4420 	TREE_CONSTANT (t) = 1;
4421       if (TREE_CODE_CLASS (code) == tcc_reference
4422 	  && node && TREE_THIS_VOLATILE (node))
4423 	TREE_THIS_VOLATILE (t) = 1;
4424       break;
4425     }
4426 
4427   return t;
4428 }
4429 
4430 #define PROCESS_ARG(N)				\
4431   do {						\
4432     TREE_OPERAND (t, N) = arg##N;		\
4433     if (arg##N &&!TYPE_P (arg##N))		\
4434       {						\
4435         if (TREE_SIDE_EFFECTS (arg##N))		\
4436 	  side_effects = 1;			\
4437         if (!TREE_READONLY (arg##N)		\
4438 	    && !CONSTANT_CLASS_P (arg##N))	\
4439 	  (void) (read_only = 0);		\
4440         if (!TREE_CONSTANT (arg##N))		\
4441 	  (void) (constant = 0);		\
4442       }						\
4443   } while (0)
4444 
4445 tree
4446 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4447 {
4448   bool constant, read_only, side_effects;
4449   tree t;
4450 
4451   gcc_assert (TREE_CODE_LENGTH (code) == 2);
4452 
4453   if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4454       && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4455       /* When sizetype precision doesn't match that of pointers
4456          we need to be able to build explicit extensions or truncations
4457 	 of the offset argument.  */
4458       && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4459     gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4460 		&& TREE_CODE (arg1) == INTEGER_CST);
4461 
4462   if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4463     gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4464 		&& ptrofftype_p (TREE_TYPE (arg1)));
4465 
4466   t = make_node_stat (code PASS_MEM_STAT);
4467   TREE_TYPE (t) = tt;
4468 
4469   /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4470      result based on those same flags for the arguments.  But if the
4471      arguments aren't really even `tree' expressions, we shouldn't be trying
4472      to do this.  */
4473 
4474   /* Expressions without side effects may be constant if their
4475      arguments are as well.  */
4476   constant = (TREE_CODE_CLASS (code) == tcc_comparison
4477 	      || TREE_CODE_CLASS (code) == tcc_binary);
4478   read_only = 1;
4479   side_effects = TREE_SIDE_EFFECTS (t);
4480 
4481   PROCESS_ARG (0);
4482   PROCESS_ARG (1);
4483 
4484   TREE_SIDE_EFFECTS (t) = side_effects;
4485   if (code == MEM_REF)
4486     {
4487       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4488 	{
4489 	  tree o = TREE_OPERAND (arg0, 0);
4490 	  TREE_READONLY (t) = TREE_READONLY (o);
4491 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4492 	}
4493     }
4494   else
4495     {
4496       TREE_READONLY (t) = read_only;
4497       TREE_CONSTANT (t) = constant;
4498       TREE_THIS_VOLATILE (t)
4499 	= (TREE_CODE_CLASS (code) == tcc_reference
4500 	   && arg0 && TREE_THIS_VOLATILE (arg0));
4501     }
4502 
4503   return t;
4504 }
4505 
4506 
4507 tree
4508 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4509 	     tree arg2 MEM_STAT_DECL)
4510 {
4511   bool constant, read_only, side_effects;
4512   tree t;
4513 
4514   gcc_assert (TREE_CODE_LENGTH (code) == 3);
4515   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4516 
4517   t = make_node_stat (code PASS_MEM_STAT);
4518   TREE_TYPE (t) = tt;
4519 
4520   read_only = 1;
4521 
4522   /* As a special exception, if COND_EXPR has NULL branches, we
4523      assume that it is a gimple statement and always consider
4524      it to have side effects.  */
4525   if (code == COND_EXPR
4526       && tt == void_type_node
4527       && arg1 == NULL_TREE
4528       && arg2 == NULL_TREE)
4529     side_effects = true;
4530   else
4531     side_effects = TREE_SIDE_EFFECTS (t);
4532 
4533   PROCESS_ARG (0);
4534   PROCESS_ARG (1);
4535   PROCESS_ARG (2);
4536 
4537   if (code == COND_EXPR)
4538     TREE_READONLY (t) = read_only;
4539 
4540   TREE_SIDE_EFFECTS (t) = side_effects;
4541   TREE_THIS_VOLATILE (t)
4542     = (TREE_CODE_CLASS (code) == tcc_reference
4543        && arg0 && TREE_THIS_VOLATILE (arg0));
4544 
4545   return t;
4546 }
4547 
4548 tree
4549 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4550 	     tree arg2, tree arg3 MEM_STAT_DECL)
4551 {
4552   bool constant, read_only, side_effects;
4553   tree t;
4554 
4555   gcc_assert (TREE_CODE_LENGTH (code) == 4);
4556 
4557   t = make_node_stat (code PASS_MEM_STAT);
4558   TREE_TYPE (t) = tt;
4559 
4560   side_effects = TREE_SIDE_EFFECTS (t);
4561 
4562   PROCESS_ARG (0);
4563   PROCESS_ARG (1);
4564   PROCESS_ARG (2);
4565   PROCESS_ARG (3);
4566 
4567   TREE_SIDE_EFFECTS (t) = side_effects;
4568   TREE_THIS_VOLATILE (t)
4569     = (TREE_CODE_CLASS (code) == tcc_reference
4570        && arg0 && TREE_THIS_VOLATILE (arg0));
4571 
4572   return t;
4573 }
4574 
4575 tree
4576 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4577 	     tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4578 {
4579   bool constant, read_only, side_effects;
4580   tree t;
4581 
4582   gcc_assert (TREE_CODE_LENGTH (code) == 5);
4583 
4584   t = make_node_stat (code PASS_MEM_STAT);
4585   TREE_TYPE (t) = tt;
4586 
4587   side_effects = TREE_SIDE_EFFECTS (t);
4588 
4589   PROCESS_ARG (0);
4590   PROCESS_ARG (1);
4591   PROCESS_ARG (2);
4592   PROCESS_ARG (3);
4593   PROCESS_ARG (4);
4594 
4595   TREE_SIDE_EFFECTS (t) = side_effects;
4596   if (code == TARGET_MEM_REF)
4597     {
4598       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4599 	{
4600 	  tree o = TREE_OPERAND (arg0, 0);
4601 	  TREE_READONLY (t) = TREE_READONLY (o);
4602 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4603 	}
4604     }
4605   else
4606     TREE_THIS_VOLATILE (t)
4607       = (TREE_CODE_CLASS (code) == tcc_reference
4608 	 && arg0 && TREE_THIS_VOLATILE (arg0));
4609 
4610   return t;
4611 }
4612 
4613 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4614    on the pointer PTR.  */
4615 
4616 tree
4617 build_simple_mem_ref_loc (location_t loc, tree ptr)
4618 {
4619   HOST_WIDE_INT offset = 0;
4620   tree ptype = TREE_TYPE (ptr);
4621   tree tem;
4622   /* For convenience allow addresses that collapse to a simple base
4623      and offset.  */
4624   if (TREE_CODE (ptr) == ADDR_EXPR
4625       && (handled_component_p (TREE_OPERAND (ptr, 0))
4626 	  || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4627     {
4628       ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4629       gcc_assert (ptr);
4630       ptr = build_fold_addr_expr (ptr);
4631       gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4632     }
4633   tem = build2 (MEM_REF, TREE_TYPE (ptype),
4634 		ptr, build_int_cst (ptype, offset));
4635   SET_EXPR_LOCATION (tem, loc);
4636   return tem;
4637 }
4638 
4639 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T.  */
4640 
4641 offset_int
4642 mem_ref_offset (const_tree t)
4643 {
4644   return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4645 }
4646 
4647 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4648    offsetted by OFFSET units.  */
4649 
4650 tree
4651 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4652 {
4653   tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4654 			  build_fold_addr_expr (base),
4655 			  build_int_cst (ptr_type_node, offset));
4656   tree addr = build1 (ADDR_EXPR, type, ref);
4657   recompute_tree_invariant_for_addr_expr (addr);
4658   return addr;
4659 }
4660 
4661 /* Similar except don't specify the TREE_TYPE
4662    and leave the TREE_SIDE_EFFECTS as 0.
4663    It is permissible for arguments to be null,
4664    or even garbage if their values do not matter.  */
4665 
4666 tree
4667 build_nt (enum tree_code code, ...)
4668 {
4669   tree t;
4670   int length;
4671   int i;
4672   va_list p;
4673 
4674   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4675 
4676   va_start (p, code);
4677 
4678   t = make_node (code);
4679   length = TREE_CODE_LENGTH (code);
4680 
4681   for (i = 0; i < length; i++)
4682     TREE_OPERAND (t, i) = va_arg (p, tree);
4683 
4684   va_end (p);
4685   return t;
4686 }
4687 
4688 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4689    tree vec.  */
4690 
4691 tree
4692 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4693 {
4694   tree ret, t;
4695   unsigned int ix;
4696 
4697   ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4698   CALL_EXPR_FN (ret) = fn;
4699   CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4700   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4701     CALL_EXPR_ARG (ret, ix) = t;
4702   return ret;
4703 }
4704 
4705 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4706    We do NOT enter this node in any sort of symbol table.
4707 
4708    LOC is the location of the decl.
4709 
4710    layout_decl is used to set up the decl's storage layout.
4711    Other slots are initialized to 0 or null pointers.  */
4712 
4713 tree
4714 build_decl_stat (location_t loc, enum tree_code code, tree name,
4715     		 tree type MEM_STAT_DECL)
4716 {
4717   tree t;
4718 
4719   t = make_node_stat (code PASS_MEM_STAT);
4720   DECL_SOURCE_LOCATION (t) = loc;
4721 
4722 /*  if (type == error_mark_node)
4723     type = integer_type_node; */
4724 /* That is not done, deliberately, so that having error_mark_node
4725    as the type can suppress useless errors in the use of this variable.  */
4726 
4727   DECL_NAME (t) = name;
4728   TREE_TYPE (t) = type;
4729 
4730   if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4731     layout_decl (t, 0);
4732 
4733   return t;
4734 }
4735 
4736 /* Builds and returns function declaration with NAME and TYPE.  */
4737 
4738 tree
4739 build_fn_decl (const char *name, tree type)
4740 {
4741   tree id = get_identifier (name);
4742   tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4743 
4744   DECL_EXTERNAL (decl) = 1;
4745   TREE_PUBLIC (decl) = 1;
4746   DECL_ARTIFICIAL (decl) = 1;
4747   TREE_NOTHROW (decl) = 1;
4748 
4749   return decl;
4750 }
4751 
4752 vec<tree, va_gc> *all_translation_units;
4753 
4754 /* Builds a new translation-unit decl with name NAME, queues it in the
4755    global list of translation-unit decls and returns it.   */
4756 
4757 tree
4758 build_translation_unit_decl (tree name)
4759 {
4760   tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4761 			name, NULL_TREE);
4762   TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4763   vec_safe_push (all_translation_units, tu);
4764   return tu;
4765 }
4766 
4767 
4768 /* BLOCK nodes are used to represent the structure of binding contours
4769    and declarations, once those contours have been exited and their contents
4770    compiled.  This information is used for outputting debugging info.  */
4771 
4772 tree
4773 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4774 {
4775   tree block = make_node (BLOCK);
4776 
4777   BLOCK_VARS (block) = vars;
4778   BLOCK_SUBBLOCKS (block) = subblocks;
4779   BLOCK_SUPERCONTEXT (block) = supercontext;
4780   BLOCK_CHAIN (block) = chain;
4781   return block;
4782 }
4783 
4784 
4785 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4786 
4787    LOC is the location to use in tree T.  */
4788 
4789 void
4790 protected_set_expr_location (tree t, location_t loc)
4791 {
4792   if (CAN_HAVE_LOCATION_P (t))
4793     SET_EXPR_LOCATION (t, loc);
4794 }
4795 
4796 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4797    is ATTRIBUTE.  */
4798 
4799 tree
4800 build_decl_attribute_variant (tree ddecl, tree attribute)
4801 {
4802   DECL_ATTRIBUTES (ddecl) = attribute;
4803   return ddecl;
4804 }
4805 
4806 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4807    is ATTRIBUTE and its qualifiers are QUALS.
4808 
4809    Record such modified types already made so we don't make duplicates.  */
4810 
4811 tree
4812 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4813 {
4814   if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4815     {
4816       inchash::hash hstate;
4817       tree ntype;
4818       int i;
4819       tree t;
4820       enum tree_code code = TREE_CODE (ttype);
4821 
4822       /* Building a distinct copy of a tagged type is inappropriate; it
4823 	 causes breakage in code that expects there to be a one-to-one
4824 	 relationship between a struct and its fields.
4825 	 build_duplicate_type is another solution (as used in
4826 	 handle_transparent_union_attribute), but that doesn't play well
4827 	 with the stronger C++ type identity model.  */
4828       if (TREE_CODE (ttype) == RECORD_TYPE
4829 	  || TREE_CODE (ttype) == UNION_TYPE
4830 	  || TREE_CODE (ttype) == QUAL_UNION_TYPE
4831 	  || TREE_CODE (ttype) == ENUMERAL_TYPE)
4832 	{
4833 	  warning (OPT_Wattributes,
4834 		   "ignoring attributes applied to %qT after definition",
4835 		   TYPE_MAIN_VARIANT (ttype));
4836 	  return build_qualified_type (ttype, quals);
4837 	}
4838 
4839       ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4840       ntype = build_distinct_type_copy (ttype);
4841 
4842       TYPE_ATTRIBUTES (ntype) = attribute;
4843 
4844       hstate.add_int (code);
4845       if (TREE_TYPE (ntype))
4846 	hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4847       attribute_hash_list (attribute, hstate);
4848 
4849       switch (TREE_CODE (ntype))
4850 	{
4851 	case FUNCTION_TYPE:
4852 	  type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4853 	  break;
4854 	case ARRAY_TYPE:
4855 	  if (TYPE_DOMAIN (ntype))
4856 	    hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4857 	  break;
4858 	case INTEGER_TYPE:
4859 	  t = TYPE_MAX_VALUE (ntype);
4860 	  for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4861 	    hstate.add_object (TREE_INT_CST_ELT (t, i));
4862 	  break;
4863 	case REAL_TYPE:
4864 	case FIXED_POINT_TYPE:
4865 	  {
4866 	    unsigned int precision = TYPE_PRECISION (ntype);
4867 	    hstate.add_object (precision);
4868 	  }
4869 	  break;
4870 	default:
4871 	  break;
4872 	}
4873 
4874       ntype = type_hash_canon (hstate.end(), ntype);
4875 
4876       /* If the target-dependent attributes make NTYPE different from
4877 	 its canonical type, we will need to use structural equality
4878 	 checks for this type. */
4879       if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4880           || !comp_type_attributes (ntype, ttype))
4881 	SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4882       else if (TYPE_CANONICAL (ntype) == ntype)
4883 	TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4884 
4885       ttype = build_qualified_type (ntype, quals);
4886     }
4887   else if (TYPE_QUALS (ttype) != quals)
4888     ttype = build_qualified_type (ttype, quals);
4889 
4890   return ttype;
4891 }
4892 
4893 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4894    the same.  */
4895 
4896 static bool
4897 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4898 {
4899   tree cl1, cl2;
4900   for (cl1 = clauses1, cl2 = clauses2;
4901        cl1 && cl2;
4902        cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4903     {
4904       if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4905 	return false;
4906       if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4907 	{
4908 	  if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4909 				OMP_CLAUSE_DECL (cl2)) != 1)
4910 	    return false;
4911 	}
4912       switch (OMP_CLAUSE_CODE (cl1))
4913 	{
4914 	case OMP_CLAUSE_ALIGNED:
4915 	  if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4916 				OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4917 	    return false;
4918 	  break;
4919 	case OMP_CLAUSE_LINEAR:
4920 	  if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4921 				OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4922 	    return false;
4923 	  break;
4924 	case OMP_CLAUSE_SIMDLEN:
4925 	  if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4926 				OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4927 	    return false;
4928 	default:
4929 	  break;
4930 	}
4931     }
4932   return true;
4933 }
4934 
4935 /* Compare two constructor-element-type constants.  Return 1 if the lists
4936    are known to be equal; otherwise return 0.  */
4937 
4938 static bool
4939 simple_cst_list_equal (const_tree l1, const_tree l2)
4940 {
4941   while (l1 != NULL_TREE && l2 != NULL_TREE)
4942     {
4943       if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4944 	return false;
4945 
4946       l1 = TREE_CHAIN (l1);
4947       l2 = TREE_CHAIN (l2);
4948     }
4949 
4950   return l1 == l2;
4951 }
4952 
4953 /* Compare two identifier nodes representing attributes.  Either one may
4954    be in wrapped __ATTR__ form.  Return true if they are the same, false
4955    otherwise.  */
4956 
4957 static bool
4958 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4959 {
4960   /* Make sure we're dealing with IDENTIFIER_NODEs.  */
4961   gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4962 		       && TREE_CODE (attr2) == IDENTIFIER_NODE);
4963 
4964   /* Identifiers can be compared directly for equality.  */
4965   if (attr1 == attr2)
4966     return true;
4967 
4968   /* If they are not equal, they may still be one in the form
4969      'text' while the other one is in the form '__text__'.  TODO:
4970      If we were storing attributes in normalized 'text' form, then
4971      this could all go away and we could take full advantage of
4972      the fact that we're comparing identifiers. :-)  */
4973   const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4974   const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4975 
4976   if (attr2_len == attr1_len + 4)
4977     {
4978       const char *p = IDENTIFIER_POINTER (attr2);
4979       const char *q = IDENTIFIER_POINTER (attr1);
4980       if (p[0] == '_' && p[1] == '_'
4981 	  && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4982 	  && strncmp (q, p + 2, attr1_len) == 0)
4983 	return true;;
4984     }
4985   else if (attr2_len + 4 == attr1_len)
4986     {
4987       const char *p = IDENTIFIER_POINTER (attr2);
4988       const char *q = IDENTIFIER_POINTER (attr1);
4989       if (q[0] == '_' && q[1] == '_'
4990 	  && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4991 	  && strncmp (q + 2, p, attr2_len) == 0)
4992 	return true;
4993     }
4994 
4995   return false;
4996 }
4997 
4998 /* Compare two attributes for their value identity.  Return true if the
4999    attribute values are known to be equal; otherwise return false.  */
5000 
5001 bool
5002 attribute_value_equal (const_tree attr1, const_tree attr2)
5003 {
5004   if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
5005     return true;
5006 
5007   if (TREE_VALUE (attr1) != NULL_TREE
5008       && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
5009       && TREE_VALUE (attr2) != NULL_TREE
5010       && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
5011     {
5012       /* Handle attribute format.  */
5013       if (is_attribute_p ("format", get_attribute_name (attr1)))
5014 	{
5015 	  attr1 = TREE_VALUE (attr1);
5016 	  attr2 = TREE_VALUE (attr2);
5017 	  /* Compare the archetypes (printf/scanf/strftime/...).  */
5018 	  if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
5019 				       TREE_VALUE (attr2)))
5020 	    return false;
5021 	  /* Archetypes are the same.  Compare the rest.  */
5022 	  return (simple_cst_list_equal (TREE_CHAIN (attr1),
5023 					 TREE_CHAIN (attr2)) == 1);
5024 	}
5025       return (simple_cst_list_equal (TREE_VALUE (attr1),
5026 				     TREE_VALUE (attr2)) == 1);
5027     }
5028 
5029   if ((flag_openmp || flag_openmp_simd)
5030       && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5031       && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5032       && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5033     return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5034 					   TREE_VALUE (attr2));
5035 
5036   return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5037 }
5038 
5039 /* Return 0 if the attributes for two types are incompatible, 1 if they
5040    are compatible, and 2 if they are nearly compatible (which causes a
5041    warning to be generated).  */
5042 int
5043 comp_type_attributes (const_tree type1, const_tree type2)
5044 {
5045   const_tree a1 = TYPE_ATTRIBUTES (type1);
5046   const_tree a2 = TYPE_ATTRIBUTES (type2);
5047   const_tree a;
5048 
5049   if (a1 == a2)
5050     return 1;
5051   for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5052     {
5053       const struct attribute_spec *as;
5054       const_tree attr;
5055 
5056       as = lookup_attribute_spec (get_attribute_name (a));
5057       if (!as || as->affects_type_identity == false)
5058         continue;
5059 
5060       attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5061       if (!attr || !attribute_value_equal (a, attr))
5062         break;
5063     }
5064   if (!a)
5065     {
5066       for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5067 	{
5068 	  const struct attribute_spec *as;
5069 
5070 	  as = lookup_attribute_spec (get_attribute_name (a));
5071 	  if (!as || as->affects_type_identity == false)
5072 	    continue;
5073 
5074 	  if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5075 	    break;
5076 	  /* We don't need to compare trees again, as we did this
5077 	     already in first loop.  */
5078 	}
5079       /* All types - affecting identity - are equal, so
5080          there is no need to call target hook for comparison.  */
5081       if (!a)
5082         return 1;
5083     }
5084   if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5085     return 0;
5086   /* As some type combinations - like default calling-convention - might
5087      be compatible, we have to call the target hook to get the final result.  */
5088   return targetm.comp_type_attributes (type1, type2);
5089 }
5090 
5091 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5092    is ATTRIBUTE.
5093 
5094    Record such modified types already made so we don't make duplicates.  */
5095 
5096 tree
5097 build_type_attribute_variant (tree ttype, tree attribute)
5098 {
5099   return build_type_attribute_qual_variant (ttype, attribute,
5100 					    TYPE_QUALS (ttype));
5101 }
5102 
5103 
5104 /* Reset the expression *EXPR_P, a size or position.
5105 
5106    ??? We could reset all non-constant sizes or positions.  But it's cheap
5107    enough to not do so and refrain from adding workarounds to dwarf2out.c.
5108 
5109    We need to reset self-referential sizes or positions because they cannot
5110    be gimplified and thus can contain a CALL_EXPR after the gimplification
5111    is finished, which will run afoul of LTO streaming.  And they need to be
5112    reset to something essentially dummy but not constant, so as to preserve
5113    the properties of the object they are attached to.  */
5114 
5115 static inline void
5116 free_lang_data_in_one_sizepos (tree *expr_p)
5117 {
5118   tree expr = *expr_p;
5119   if (CONTAINS_PLACEHOLDER_P (expr))
5120     *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5121 }
5122 
5123 
5124 /* Reset all the fields in a binfo node BINFO.  We only keep
5125    BINFO_VTABLE, which is used by gimple_fold_obj_type_ref.  */
5126 
5127 static void
5128 free_lang_data_in_binfo (tree binfo)
5129 {
5130   unsigned i;
5131   tree t;
5132 
5133   gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5134 
5135   BINFO_VIRTUALS (binfo) = NULL_TREE;
5136   BINFO_BASE_ACCESSES (binfo) = NULL;
5137   BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5138   BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5139 
5140   FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5141     free_lang_data_in_binfo (t);
5142 }
5143 
5144 
5145 /* Reset all language specific information still present in TYPE.  */
5146 
5147 static void
5148 free_lang_data_in_type (tree type)
5149 {
5150   gcc_assert (TYPE_P (type));
5151 
5152   /* Give the FE a chance to remove its own data first.  */
5153   lang_hooks.free_lang_data (type);
5154 
5155   TREE_LANG_FLAG_0 (type) = 0;
5156   TREE_LANG_FLAG_1 (type) = 0;
5157   TREE_LANG_FLAG_2 (type) = 0;
5158   TREE_LANG_FLAG_3 (type) = 0;
5159   TREE_LANG_FLAG_4 (type) = 0;
5160   TREE_LANG_FLAG_5 (type) = 0;
5161   TREE_LANG_FLAG_6 (type) = 0;
5162 
5163   if (TREE_CODE (type) == FUNCTION_TYPE)
5164     {
5165       /* Remove the const and volatile qualifiers from arguments.  The
5166 	 C++ front end removes them, but the C front end does not,
5167 	 leading to false ODR violation errors when merging two
5168 	 instances of the same function signature compiled by
5169 	 different front ends.  */
5170       tree p;
5171 
5172       for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5173 	{
5174 	  tree arg_type = TREE_VALUE (p);
5175 
5176 	  if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5177 	    {
5178 	      int quals = TYPE_QUALS (arg_type)
5179 			  & ~TYPE_QUAL_CONST
5180 			  & ~TYPE_QUAL_VOLATILE;
5181 	      TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5182 	      free_lang_data_in_type (TREE_VALUE (p));
5183 	    }
5184 	  /* C++ FE uses TREE_PURPOSE to store initial values.  */
5185 	  TREE_PURPOSE (p) = NULL;
5186 	}
5187       /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE.  */
5188       TYPE_MINVAL (type) = NULL;
5189     }
5190   if (TREE_CODE (type) == METHOD_TYPE)
5191     {
5192       tree p;
5193 
5194       for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5195 	{
5196 	  /* C++ FE uses TREE_PURPOSE to store initial values.  */
5197 	  TREE_PURPOSE (p) = NULL;
5198 	}
5199       /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE.  */
5200       TYPE_MINVAL (type) = NULL;
5201     }
5202 
5203   /* Remove members that are not actually FIELD_DECLs from the field
5204      list of an aggregate.  These occur in C++.  */
5205   if (RECORD_OR_UNION_TYPE_P (type))
5206     {
5207       tree prev, member;
5208 
5209       /* Note that TYPE_FIELDS can be shared across distinct
5210 	 TREE_TYPEs.  Therefore, if the first field of TYPE_FIELDS is
5211 	 to be removed, we cannot set its TREE_CHAIN to NULL.
5212 	 Otherwise, we would not be able to find all the other fields
5213 	 in the other instances of this TREE_TYPE.
5214 
5215 	 This was causing an ICE in testsuite/g++.dg/lto/20080915.C.  */
5216       prev = NULL_TREE;
5217       member = TYPE_FIELDS (type);
5218       while (member)
5219 	{
5220 	  if (TREE_CODE (member) == FIELD_DECL
5221 	      || (TREE_CODE (member) == TYPE_DECL
5222 		  && !DECL_IGNORED_P (member)
5223 		  && debug_info_level > DINFO_LEVEL_TERSE
5224 		  && !is_redundant_typedef (member)))
5225 	    {
5226 	      if (prev)
5227 		TREE_CHAIN (prev) = member;
5228 	      else
5229 		TYPE_FIELDS (type) = member;
5230 	      prev = member;
5231 	    }
5232 
5233 	  member = TREE_CHAIN (member);
5234 	}
5235 
5236       if (prev)
5237 	TREE_CHAIN (prev) = NULL_TREE;
5238       else
5239 	TYPE_FIELDS (type) = NULL_TREE;
5240 
5241       /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5242  	 and danagle the pointer from time to time.  */
5243       if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5244         TYPE_VFIELD (type) = NULL_TREE;
5245 
5246       /* Remove TYPE_METHODS list.  While it would be nice to keep it
5247  	 to enable ODR warnings about different method lists, doing so
5248 	 seems to impractically increase size of LTO data streamed.
5249 	 Keep the information if TYPE_METHODS was non-NULL. This is used
5250 	 by function.c and pretty printers.  */
5251       if (TYPE_METHODS (type))
5252         TYPE_METHODS (type) = error_mark_node;
5253       if (TYPE_BINFO (type))
5254 	{
5255 	  free_lang_data_in_binfo (TYPE_BINFO (type));
5256 	  /* We need to preserve link to bases and virtual table for all
5257 	     polymorphic types to make devirtualization machinery working.
5258 	     Debug output cares only about bases, but output also
5259 	     virtual table pointers so merging of -fdevirtualize and
5260 	     -fno-devirtualize units is easier.  */
5261 	  if ((!BINFO_VTABLE (TYPE_BINFO (type))
5262 	       || !flag_devirtualize)
5263 	      && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5264 		   && !BINFO_VTABLE (TYPE_BINFO (type)))
5265 		  || debug_info_level != DINFO_LEVEL_NONE))
5266 	    TYPE_BINFO (type) = NULL;
5267 	}
5268     }
5269   else
5270     {
5271       /* For non-aggregate types, clear out the language slot (which
5272 	 overloads TYPE_BINFO).  */
5273       TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5274 
5275       if (INTEGRAL_TYPE_P (type)
5276 	  || SCALAR_FLOAT_TYPE_P (type)
5277 	  || FIXED_POINT_TYPE_P (type))
5278 	{
5279 	  free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5280 	  free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5281 	}
5282     }
5283 
5284   free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5285   free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5286 
5287   if (TYPE_CONTEXT (type)
5288       && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5289     {
5290       tree ctx = TYPE_CONTEXT (type);
5291       do
5292 	{
5293 	  ctx = BLOCK_SUPERCONTEXT (ctx);
5294 	}
5295       while (ctx && TREE_CODE (ctx) == BLOCK);
5296       TYPE_CONTEXT (type) = ctx;
5297     }
5298 }
5299 
5300 
5301 /* Return true if DECL may need an assembler name to be set.  */
5302 
5303 static inline bool
5304 need_assembler_name_p (tree decl)
5305 {
5306   /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5307      Rule merging.  This makes type_odr_p to return true on those types during
5308      LTO and by comparing the mangled name, we can say what types are intended
5309      to be equivalent across compilation unit.
5310 
5311      We do not store names of type_in_anonymous_namespace_p.
5312 
5313      Record, union and enumeration type have linkage that allows use
5314      to check type_in_anonymous_namespace_p. We do not mangle compound types
5315      that always can be compared structurally.
5316 
5317      Similarly for builtin types, we compare properties of their main variant.
5318      A special case are integer types where mangling do make differences
5319      between char/signed char/unsigned char etc.  Storing name for these makes
5320      e.g.  -fno-signed-char/-fsigned-char mismatches to be handled well.
5321      See cp/mangle.c:write_builtin_type for details.  */
5322 
5323   if (flag_lto_odr_type_mering
5324       && TREE_CODE (decl) == TYPE_DECL
5325       && DECL_NAME (decl)
5326       && decl == TYPE_NAME (TREE_TYPE (decl))
5327       && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5328       && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5329       && (type_with_linkage_p (TREE_TYPE (decl))
5330 	  || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5331       && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5332     return !DECL_ASSEMBLER_NAME_SET_P (decl);
5333   /* Only FUNCTION_DECLs and VAR_DECLs are considered.  */
5334   if (TREE_CODE (decl) != FUNCTION_DECL
5335       && TREE_CODE (decl) != VAR_DECL)
5336     return false;
5337 
5338   /* If DECL already has its assembler name set, it does not need a
5339      new one.  */
5340   if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5341       || DECL_ASSEMBLER_NAME_SET_P (decl))
5342     return false;
5343 
5344   /* Abstract decls do not need an assembler name.  */
5345   if (DECL_ABSTRACT_P (decl))
5346     return false;
5347 
5348   /* For VAR_DECLs, only static, public and external symbols need an
5349      assembler name.  */
5350   if (TREE_CODE (decl) == VAR_DECL
5351       && !TREE_STATIC (decl)
5352       && !TREE_PUBLIC (decl)
5353       && !DECL_EXTERNAL (decl))
5354     return false;
5355 
5356   if (TREE_CODE (decl) == FUNCTION_DECL)
5357     {
5358       /* Do not set assembler name on builtins.  Allow RTL expansion to
5359 	 decide whether to expand inline or via a regular call.  */
5360       if (DECL_BUILT_IN (decl)
5361 	  && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5362 	return false;
5363 
5364       /* Functions represented in the callgraph need an assembler name.  */
5365       if (cgraph_node::get (decl) != NULL)
5366 	return true;
5367 
5368       /* Unused and not public functions don't need an assembler name.  */
5369       if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5370 	return false;
5371     }
5372 
5373   return true;
5374 }
5375 
5376 
5377 /* Reset all language specific information still present in symbol
5378    DECL.  */
5379 
5380 static void
5381 free_lang_data_in_decl (tree decl)
5382 {
5383   gcc_assert (DECL_P (decl));
5384 
5385   /* Give the FE a chance to remove its own data first.  */
5386   lang_hooks.free_lang_data (decl);
5387 
5388   TREE_LANG_FLAG_0 (decl) = 0;
5389   TREE_LANG_FLAG_1 (decl) = 0;
5390   TREE_LANG_FLAG_2 (decl) = 0;
5391   TREE_LANG_FLAG_3 (decl) = 0;
5392   TREE_LANG_FLAG_4 (decl) = 0;
5393   TREE_LANG_FLAG_5 (decl) = 0;
5394   TREE_LANG_FLAG_6 (decl) = 0;
5395 
5396   free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5397   free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5398   if (TREE_CODE (decl) == FIELD_DECL)
5399     {
5400       free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5401       if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5402 	DECL_QUALIFIER (decl) = NULL_TREE;
5403     }
5404 
5405  if (TREE_CODE (decl) == FUNCTION_DECL)
5406     {
5407       struct cgraph_node *node;
5408       if (!(node = cgraph_node::get (decl))
5409 	  || (!node->definition && !node->clones))
5410 	{
5411 	  if (node)
5412 	    node->release_body ();
5413 	  else
5414 	    {
5415 	      release_function_body (decl);
5416 	      DECL_ARGUMENTS (decl) = NULL;
5417 	      DECL_RESULT (decl) = NULL;
5418 	      DECL_INITIAL (decl) = error_mark_node;
5419 	    }
5420 	}
5421       if (gimple_has_body_p (decl))
5422 	{
5423 	  tree t;
5424 
5425 	  /* If DECL has a gimple body, then the context for its
5426 	     arguments must be DECL.  Otherwise, it doesn't really
5427 	     matter, as we will not be emitting any code for DECL.  In
5428 	     general, there may be other instances of DECL created by
5429 	     the front end and since PARM_DECLs are generally shared,
5430 	     their DECL_CONTEXT changes as the replicas of DECL are
5431 	     created.  The only time where DECL_CONTEXT is important
5432 	     is for the FUNCTION_DECLs that have a gimple body (since
5433 	     the PARM_DECL will be used in the function's body).  */
5434 	  for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5435 	    DECL_CONTEXT (t) = decl;
5436 	  if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5437 	    DECL_FUNCTION_SPECIFIC_TARGET (decl)
5438 	      = target_option_default_node;
5439 	  if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5440 	    DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5441 	      = optimization_default_node;
5442 	}
5443 
5444       /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5445 	 At this point, it is not needed anymore.  */
5446       DECL_SAVED_TREE (decl) = NULL_TREE;
5447 
5448       /* Clear the abstract origin if it refers to a method.
5449          Otherwise dwarf2out.c will ICE as we splice functions out of
5450          TYPE_FIELDS and thus the origin will not be output
5451          correctly.  */
5452       if (DECL_ABSTRACT_ORIGIN (decl)
5453 	  && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5454 	  && RECORD_OR_UNION_TYPE_P
5455 	       (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5456 	DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5457 
5458       /* Sometimes the C++ frontend doesn't manage to transform a temporary
5459          DECL_VINDEX referring to itself into a vtable slot number as it
5460 	 should.  Happens with functions that are copied and then forgotten
5461 	 about.  Just clear it, it won't matter anymore.  */
5462       if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5463 	DECL_VINDEX (decl) = NULL_TREE;
5464     }
5465   else if (TREE_CODE (decl) == VAR_DECL)
5466     {
5467       if ((DECL_EXTERNAL (decl)
5468 	   && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5469 	  || (decl_function_context (decl) && !TREE_STATIC (decl)))
5470 	DECL_INITIAL (decl) = NULL_TREE;
5471     }
5472   else if (TREE_CODE (decl) == TYPE_DECL)
5473     {
5474       DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5475       DECL_VISIBILITY_SPECIFIED (decl) = 0;
5476       DECL_INITIAL (decl) = NULL_TREE;
5477     }
5478   else if (TREE_CODE (decl) == FIELD_DECL)
5479     DECL_INITIAL (decl) = NULL_TREE;
5480   else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5481            && DECL_INITIAL (decl)
5482            && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5483     {
5484       /* Strip builtins from the translation-unit BLOCK.  We still have targets
5485 	 without builtin_decl_explicit support and also builtins are shared
5486 	 nodes and thus we can't use TREE_CHAIN in multiple lists.  */
5487       tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5488       while (*nextp)
5489         {
5490           tree var = *nextp;
5491           if (TREE_CODE (var) == FUNCTION_DECL
5492               && DECL_BUILT_IN (var))
5493 	    *nextp = TREE_CHAIN (var);
5494 	  else
5495 	    nextp = &TREE_CHAIN (var);
5496         }
5497     }
5498 }
5499 
5500 
5501 /* Data used when collecting DECLs and TYPEs for language data removal.  */
5502 
5503 struct free_lang_data_d
5504 {
5505   /* Worklist to avoid excessive recursion.  */
5506   vec<tree> worklist;
5507 
5508   /* Set of traversed objects.  Used to avoid duplicate visits.  */
5509   hash_set<tree> *pset;
5510 
5511   /* Array of symbols to process with free_lang_data_in_decl.  */
5512   vec<tree> decls;
5513 
5514   /* Array of types to process with free_lang_data_in_type.  */
5515   vec<tree> types;
5516 };
5517 
5518 
5519 /* Save all language fields needed to generate proper debug information
5520    for DECL.  This saves most fields cleared out by free_lang_data_in_decl.  */
5521 
5522 static void
5523 save_debug_info_for_decl (tree t)
5524 {
5525   /*struct saved_debug_info_d *sdi;*/
5526 
5527   gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5528 
5529   /* FIXME.  Partial implementation for saving debug info removed.  */
5530 }
5531 
5532 
5533 /* Save all language fields needed to generate proper debug information
5534    for TYPE.  This saves most fields cleared out by free_lang_data_in_type.  */
5535 
5536 static void
5537 save_debug_info_for_type (tree t)
5538 {
5539   /*struct saved_debug_info_d *sdi;*/
5540 
5541   gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5542 
5543   /* FIXME.  Partial implementation for saving debug info removed.  */
5544 }
5545 
5546 
5547 /* Add type or decl T to one of the list of tree nodes that need their
5548    language data removed.  The lists are held inside FLD.  */
5549 
5550 static void
5551 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5552 {
5553   if (DECL_P (t))
5554     {
5555       fld->decls.safe_push (t);
5556       if (debug_info_level > DINFO_LEVEL_TERSE)
5557 	save_debug_info_for_decl (t);
5558     }
5559   else if (TYPE_P (t))
5560     {
5561       fld->types.safe_push (t);
5562       if (debug_info_level > DINFO_LEVEL_TERSE)
5563 	save_debug_info_for_type (t);
5564     }
5565   else
5566     gcc_unreachable ();
5567 }
5568 
5569 /* Push tree node T into FLD->WORKLIST.  */
5570 
5571 static inline void
5572 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5573 {
5574   if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5575     fld->worklist.safe_push ((t));
5576 }
5577 
5578 
5579 /* Operand callback helper for free_lang_data_in_node.  *TP is the
5580    subtree operand being considered.  */
5581 
5582 static tree
5583 find_decls_types_r (tree *tp, int *ws, void *data)
5584 {
5585   tree t = *tp;
5586   struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5587 
5588   if (TREE_CODE (t) == TREE_LIST)
5589     return NULL_TREE;
5590 
5591   /* Language specific nodes will be removed, so there is no need
5592      to gather anything under them.  */
5593   if (is_lang_specific (t))
5594     {
5595       *ws = 0;
5596       return NULL_TREE;
5597     }
5598 
5599   if (DECL_P (t))
5600     {
5601       /* Note that walk_tree does not traverse every possible field in
5602 	 decls, so we have to do our own traversals here.  */
5603       add_tree_to_fld_list (t, fld);
5604 
5605       fld_worklist_push (DECL_NAME (t), fld);
5606       fld_worklist_push (DECL_CONTEXT (t), fld);
5607       fld_worklist_push (DECL_SIZE (t), fld);
5608       fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5609 
5610       /* We are going to remove everything under DECL_INITIAL for
5611 	 TYPE_DECLs.  No point walking them.  */
5612       if (TREE_CODE (t) != TYPE_DECL)
5613 	fld_worklist_push (DECL_INITIAL (t), fld);
5614 
5615       fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5616       fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5617 
5618       if (TREE_CODE (t) == FUNCTION_DECL)
5619 	{
5620 	  fld_worklist_push (DECL_ARGUMENTS (t), fld);
5621 	  fld_worklist_push (DECL_RESULT (t), fld);
5622 	}
5623       else if (TREE_CODE (t) == TYPE_DECL)
5624 	{
5625 	  fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5626 	}
5627       else if (TREE_CODE (t) == FIELD_DECL)
5628 	{
5629 	  fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5630 	  fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5631 	  fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5632 	  fld_worklist_push (DECL_FCONTEXT (t), fld);
5633 	}
5634 
5635       if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5636 	  && DECL_HAS_VALUE_EXPR_P (t))
5637 	fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5638 
5639       if (TREE_CODE (t) != FIELD_DECL
5640 	  && TREE_CODE (t) != TYPE_DECL)
5641 	fld_worklist_push (TREE_CHAIN (t), fld);
5642       *ws = 0;
5643     }
5644   else if (TYPE_P (t))
5645     {
5646       /* Note that walk_tree does not traverse every possible field in
5647 	 types, so we have to do our own traversals here.  */
5648       add_tree_to_fld_list (t, fld);
5649 
5650       if (!RECORD_OR_UNION_TYPE_P (t))
5651 	fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5652       fld_worklist_push (TYPE_SIZE (t), fld);
5653       fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5654       fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5655       fld_worklist_push (TYPE_POINTER_TO (t), fld);
5656       fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5657       fld_worklist_push (TYPE_NAME (t), fld);
5658       /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO.  We do not stream
5659 	 them and thus do not and want not to reach unused pointer types
5660 	 this way.  */
5661       if (!POINTER_TYPE_P (t))
5662 	fld_worklist_push (TYPE_MINVAL (t), fld);
5663       if (!RECORD_OR_UNION_TYPE_P (t))
5664 	fld_worklist_push (TYPE_MAXVAL (t), fld);
5665       fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5666       /* Do not walk TYPE_NEXT_VARIANT.  We do not stream it and thus
5667          do not and want not to reach unused variants this way.  */
5668       if (TYPE_CONTEXT (t))
5669 	{
5670 	  tree ctx = TYPE_CONTEXT (t);
5671 	  /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5672 	     So push that instead.  */
5673 	  while (ctx && TREE_CODE (ctx) == BLOCK)
5674 	    ctx = BLOCK_SUPERCONTEXT (ctx);
5675 	  fld_worklist_push (ctx, fld);
5676 	}
5677       /* Do not walk TYPE_CANONICAL.  We do not stream it and thus do not
5678 	 and want not to reach unused types this way.  */
5679 
5680       if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5681 	{
5682 	  unsigned i;
5683 	  tree tem;
5684 	  FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5685 	    fld_worklist_push (TREE_TYPE (tem), fld);
5686 	  tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5687 	  if (tem
5688 	      /* The Java FE overloads BINFO_VIRTUALS for its own purpose.  */
5689 	      && TREE_CODE (tem) == TREE_LIST)
5690 	    do
5691 	      {
5692 		fld_worklist_push (TREE_VALUE (tem), fld);
5693 		tem = TREE_CHAIN (tem);
5694 	      }
5695 	    while (tem);
5696 	}
5697       if (RECORD_OR_UNION_TYPE_P (t))
5698 	{
5699 	  tree tem;
5700 	  /* Push all TYPE_FIELDS - there can be interleaving interesting
5701 	     and non-interesting things.  */
5702 	  tem = TYPE_FIELDS (t);
5703 	  while (tem)
5704 	    {
5705 	      if (TREE_CODE (tem) == FIELD_DECL
5706 		  || (TREE_CODE (tem) == TYPE_DECL
5707 		      && !DECL_IGNORED_P (tem)
5708 		      && debug_info_level > DINFO_LEVEL_TERSE
5709 		      && !is_redundant_typedef (tem)))
5710 		fld_worklist_push (tem, fld);
5711 	      tem = TREE_CHAIN (tem);
5712 	    }
5713 	}
5714 
5715       fld_worklist_push (TYPE_STUB_DECL (t), fld);
5716       *ws = 0;
5717     }
5718   else if (TREE_CODE (t) == BLOCK)
5719     {
5720       tree tem;
5721       for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5722 	fld_worklist_push (tem, fld);
5723       for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5724 	fld_worklist_push (tem, fld);
5725       fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5726     }
5727 
5728   if (TREE_CODE (t) != IDENTIFIER_NODE
5729       && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5730     fld_worklist_push (TREE_TYPE (t), fld);
5731 
5732   return NULL_TREE;
5733 }
5734 
5735 
5736 /* Find decls and types in T.  */
5737 
5738 static void
5739 find_decls_types (tree t, struct free_lang_data_d *fld)
5740 {
5741   while (1)
5742     {
5743       if (!fld->pset->contains (t))
5744 	walk_tree (&t, find_decls_types_r, fld, fld->pset);
5745       if (fld->worklist.is_empty ())
5746 	break;
5747       t = fld->worklist.pop ();
5748     }
5749 }
5750 
5751 /* Translate all the types in LIST with the corresponding runtime
5752    types.  */
5753 
5754 static tree
5755 get_eh_types_for_runtime (tree list)
5756 {
5757   tree head, prev;
5758 
5759   if (list == NULL_TREE)
5760     return NULL_TREE;
5761 
5762   head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5763   prev = head;
5764   list = TREE_CHAIN (list);
5765   while (list)
5766     {
5767       tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5768       TREE_CHAIN (prev) = n;
5769       prev = TREE_CHAIN (prev);
5770       list = TREE_CHAIN (list);
5771     }
5772 
5773   return head;
5774 }
5775 
5776 
5777 /* Find decls and types referenced in EH region R and store them in
5778    FLD->DECLS and FLD->TYPES.  */
5779 
5780 static void
5781 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5782 {
5783   switch (r->type)
5784     {
5785     case ERT_CLEANUP:
5786       break;
5787 
5788     case ERT_TRY:
5789       {
5790 	eh_catch c;
5791 
5792 	/* The types referenced in each catch must first be changed to the
5793 	   EH types used at runtime.  This removes references to FE types
5794 	   in the region.  */
5795 	for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5796 	  {
5797 	    c->type_list = get_eh_types_for_runtime (c->type_list);
5798 	    walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5799 	  }
5800       }
5801       break;
5802 
5803     case ERT_ALLOWED_EXCEPTIONS:
5804       r->u.allowed.type_list
5805 	= get_eh_types_for_runtime (r->u.allowed.type_list);
5806       walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5807       break;
5808 
5809     case ERT_MUST_NOT_THROW:
5810       walk_tree (&r->u.must_not_throw.failure_decl,
5811 		 find_decls_types_r, fld, fld->pset);
5812       break;
5813     }
5814 }
5815 
5816 
5817 /* Find decls and types referenced in cgraph node N and store them in
5818    FLD->DECLS and FLD->TYPES.  Unlike pass_referenced_vars, this will
5819    look for *every* kind of DECL and TYPE node reachable from N,
5820    including those embedded inside types and decls (i.e,, TYPE_DECLs,
5821    NAMESPACE_DECLs, etc).  */
5822 
5823 static void
5824 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5825 {
5826   basic_block bb;
5827   struct function *fn;
5828   unsigned ix;
5829   tree t;
5830 
5831   find_decls_types (n->decl, fld);
5832 
5833   if (!gimple_has_body_p (n->decl))
5834     return;
5835 
5836   gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5837 
5838   fn = DECL_STRUCT_FUNCTION (n->decl);
5839 
5840   /* Traverse locals. */
5841   FOR_EACH_LOCAL_DECL (fn, ix, t)
5842     find_decls_types (t, fld);
5843 
5844   /* Traverse EH regions in FN.  */
5845   {
5846     eh_region r;
5847     FOR_ALL_EH_REGION_FN (r, fn)
5848       find_decls_types_in_eh_region (r, fld);
5849   }
5850 
5851   /* Traverse every statement in FN.  */
5852   FOR_EACH_BB_FN (bb, fn)
5853     {
5854       gphi_iterator psi;
5855       gimple_stmt_iterator si;
5856       unsigned i;
5857 
5858       for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5859 	{
5860 	  gphi *phi = psi.phi ();
5861 
5862 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
5863 	    {
5864 	      tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5865 	      find_decls_types (*arg_p, fld);
5866 	    }
5867 	}
5868 
5869       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5870 	{
5871 	  gimple *stmt = gsi_stmt (si);
5872 
5873 	  if (is_gimple_call (stmt))
5874 	    find_decls_types (gimple_call_fntype (stmt), fld);
5875 
5876 	  for (i = 0; i < gimple_num_ops (stmt); i++)
5877 	    {
5878 	      tree arg = gimple_op (stmt, i);
5879 	      find_decls_types (arg, fld);
5880 	    }
5881 	}
5882     }
5883 }
5884 
5885 
5886 /* Find decls and types referenced in varpool node N and store them in
5887    FLD->DECLS and FLD->TYPES.  Unlike pass_referenced_vars, this will
5888    look for *every* kind of DECL and TYPE node reachable from N,
5889    including those embedded inside types and decls (i.e,, TYPE_DECLs,
5890    NAMESPACE_DECLs, etc).  */
5891 
5892 static void
5893 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5894 {
5895   find_decls_types (v->decl, fld);
5896 }
5897 
5898 /* If T needs an assembler name, have one created for it.  */
5899 
5900 void
5901 assign_assembler_name_if_neeeded (tree t)
5902 {
5903   if (need_assembler_name_p (t))
5904     {
5905       /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5906 	 diagnostics that use input_location to show locus
5907 	 information.  The problem here is that, at this point,
5908 	 input_location is generally anchored to the end of the file
5909 	 (since the parser is long gone), so we don't have a good
5910 	 position to pin it to.
5911 
5912 	 To alleviate this problem, this uses the location of T's
5913 	 declaration.  Examples of this are
5914 	 testsuite/g++.dg/template/cond2.C and
5915 	 testsuite/g++.dg/template/pr35240.C.  */
5916       location_t saved_location = input_location;
5917       input_location = DECL_SOURCE_LOCATION (t);
5918 
5919       decl_assembler_name (t);
5920 
5921       input_location = saved_location;
5922     }
5923 }
5924 
5925 
5926 /* Free language specific information for every operand and expression
5927    in every node of the call graph.  This process operates in three stages:
5928 
5929    1- Every callgraph node and varpool node is traversed looking for
5930       decls and types embedded in them.  This is a more exhaustive
5931       search than that done by find_referenced_vars, because it will
5932       also collect individual fields, decls embedded in types, etc.
5933 
5934    2- All the decls found are sent to free_lang_data_in_decl.
5935 
5936    3- All the types found are sent to free_lang_data_in_type.
5937 
5938    The ordering between decls and types is important because
5939    free_lang_data_in_decl sets assembler names, which includes
5940    mangling.  So types cannot be freed up until assembler names have
5941    been set up.  */
5942 
5943 static void
5944 free_lang_data_in_cgraph (void)
5945 {
5946   struct cgraph_node *n;
5947   varpool_node *v;
5948   struct free_lang_data_d fld;
5949   tree t;
5950   unsigned i;
5951   alias_pair *p;
5952 
5953   /* Initialize sets and arrays to store referenced decls and types.  */
5954   fld.pset = new hash_set<tree>;
5955   fld.worklist.create (0);
5956   fld.decls.create (100);
5957   fld.types.create (100);
5958 
5959   /* Find decls and types in the body of every function in the callgraph.  */
5960   FOR_EACH_FUNCTION (n)
5961     find_decls_types_in_node (n, &fld);
5962 
5963   FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5964     find_decls_types (p->decl, &fld);
5965 
5966   /* Find decls and types in every varpool symbol.  */
5967   FOR_EACH_VARIABLE (v)
5968     find_decls_types_in_var (v, &fld);
5969 
5970   /* Set the assembler name on every decl found.  We need to do this
5971      now because free_lang_data_in_decl will invalidate data needed
5972      for mangling.  This breaks mangling on interdependent decls.  */
5973   FOR_EACH_VEC_ELT (fld.decls, i, t)
5974     assign_assembler_name_if_neeeded (t);
5975 
5976   /* Traverse every decl found freeing its language data.  */
5977   FOR_EACH_VEC_ELT (fld.decls, i, t)
5978     free_lang_data_in_decl (t);
5979 
5980   /* Traverse every type found freeing its language data.  */
5981   FOR_EACH_VEC_ELT (fld.types, i, t)
5982     free_lang_data_in_type (t);
5983   if (flag_checking)
5984     {
5985       FOR_EACH_VEC_ELT (fld.types, i, t)
5986 	verify_type (t);
5987     }
5988 
5989   delete fld.pset;
5990   fld.worklist.release ();
5991   fld.decls.release ();
5992   fld.types.release ();
5993 }
5994 
5995 
5996 /* Free resources that are used by FE but are not needed once they are done. */
5997 
5998 static unsigned
5999 free_lang_data (void)
6000 {
6001   unsigned i;
6002 
6003   /* If we are the LTO frontend we have freed lang-specific data already.  */
6004   if (in_lto_p
6005       || (!flag_generate_lto && !flag_generate_offload))
6006     return 0;
6007 
6008   /* Allocate and assign alias sets to the standard integer types
6009      while the slots are still in the way the frontends generated them.  */
6010   for (i = 0; i < itk_none; ++i)
6011     if (integer_types[i])
6012       TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6013 
6014   /* Traverse the IL resetting language specific information for
6015      operands, expressions, etc.  */
6016   free_lang_data_in_cgraph ();
6017 
6018   /* Create gimple variants for common types.  */
6019   ptrdiff_type_node = integer_type_node;
6020   fileptr_type_node = ptr_type_node;
6021 
6022   /* Reset some langhooks.  Do not reset types_compatible_p, it may
6023      still be used indirectly via the get_alias_set langhook.  */
6024   lang_hooks.dwarf_name = lhd_dwarf_name;
6025   lang_hooks.decl_printable_name = gimple_decl_printable_name;
6026   lang_hooks.gimplify_expr = lhd_gimplify_expr;
6027 
6028   /* We do not want the default decl_assembler_name implementation,
6029      rather if we have fixed everything we want a wrapper around it
6030      asserting that all non-local symbols already got their assembler
6031      name and only produce assembler names for local symbols.  Or rather
6032      make sure we never call decl_assembler_name on local symbols and
6033      devise a separate, middle-end private scheme for it.  */
6034 
6035   /* Reset diagnostic machinery.  */
6036   tree_diagnostics_defaults (global_dc);
6037 
6038   return 0;
6039 }
6040 
6041 
6042 namespace {
6043 
6044 const pass_data pass_data_ipa_free_lang_data =
6045 {
6046   SIMPLE_IPA_PASS, /* type */
6047   "*free_lang_data", /* name */
6048   OPTGROUP_NONE, /* optinfo_flags */
6049   TV_IPA_FREE_LANG_DATA, /* tv_id */
6050   0, /* properties_required */
6051   0, /* properties_provided */
6052   0, /* properties_destroyed */
6053   0, /* todo_flags_start */
6054   0, /* todo_flags_finish */
6055 };
6056 
6057 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6058 {
6059 public:
6060   pass_ipa_free_lang_data (gcc::context *ctxt)
6061     : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6062   {}
6063 
6064   /* opt_pass methods: */
6065   virtual unsigned int execute (function *) { return free_lang_data (); }
6066 
6067 }; // class pass_ipa_free_lang_data
6068 
6069 } // anon namespace
6070 
6071 simple_ipa_opt_pass *
6072 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6073 {
6074   return new pass_ipa_free_lang_data (ctxt);
6075 }
6076 
6077 /* The backbone of is_attribute_p().  ATTR_LEN is the string length of
6078    ATTR_NAME.  Also used internally by remove_attribute().  */
6079 bool
6080 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6081 {
6082   size_t ident_len = IDENTIFIER_LENGTH (ident);
6083 
6084   if (ident_len == attr_len)
6085     {
6086       if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6087 	return true;
6088     }
6089   else if (ident_len == attr_len + 4)
6090     {
6091       /* There is the possibility that ATTR is 'text' and IDENT is
6092 	 '__text__'.  */
6093       const char *p = IDENTIFIER_POINTER (ident);
6094       if (p[0] == '_' && p[1] == '_'
6095 	  && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6096 	  && strncmp (attr_name, p + 2, attr_len) == 0)
6097 	return true;
6098     }
6099 
6100   return false;
6101 }
6102 
6103 /* The backbone of lookup_attribute().  ATTR_LEN is the string length
6104    of ATTR_NAME, and LIST is not NULL_TREE.  */
6105 tree
6106 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6107 {
6108   while (list)
6109     {
6110       size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6111 
6112       if (ident_len == attr_len)
6113 	{
6114 	  if (!strcmp (attr_name,
6115 		       IDENTIFIER_POINTER (get_attribute_name (list))))
6116 	    break;
6117 	}
6118       /* TODO: If we made sure that attributes were stored in the
6119 	 canonical form without '__...__' (ie, as in 'text' as opposed
6120 	 to '__text__') then we could avoid the following case.  */
6121       else if (ident_len == attr_len + 4)
6122 	{
6123 	  const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6124 	  if (p[0] == '_' && p[1] == '_'
6125 	      && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6126 	      && strncmp (attr_name, p + 2, attr_len) == 0)
6127 	    break;
6128 	}
6129       list = TREE_CHAIN (list);
6130     }
6131 
6132   return list;
6133 }
6134 
6135 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6136    return a pointer to the attribute's list first element if the attribute
6137    starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6138    '__text__').  */
6139 
6140 tree
6141 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6142 				    tree list)
6143 {
6144   while (list)
6145     {
6146       size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6147 
6148       if (attr_len > ident_len)
6149 	{
6150 	  list = TREE_CHAIN (list);
6151 	  continue;
6152 	}
6153 
6154       const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6155 
6156       if (strncmp (attr_name, p, attr_len) == 0)
6157 	break;
6158 
6159       /* TODO: If we made sure that attributes were stored in the
6160 	 canonical form without '__...__' (ie, as in 'text' as opposed
6161 	 to '__text__') then we could avoid the following case.  */
6162       if (p[0] == '_' && p[1] == '_' &&
6163 	  strncmp (attr_name, p + 2, attr_len) == 0)
6164 	break;
6165 
6166       list = TREE_CHAIN (list);
6167     }
6168 
6169   return list;
6170 }
6171 
6172 
6173 /* A variant of lookup_attribute() that can be used with an identifier
6174    as the first argument, and where the identifier can be either
6175    'text' or '__text__'.
6176 
6177    Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6178    return a pointer to the attribute's list element if the attribute
6179    is part of the list, or NULL_TREE if not found.  If the attribute
6180    appears more than once, this only returns the first occurrence; the
6181    TREE_CHAIN of the return value should be passed back in if further
6182    occurrences are wanted.  ATTR_IDENTIFIER must be an identifier but
6183    can be in the form 'text' or '__text__'.  */
6184 static tree
6185 lookup_ident_attribute (tree attr_identifier, tree list)
6186 {
6187   gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6188 
6189   while (list)
6190     {
6191       gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6192 			   == IDENTIFIER_NODE);
6193 
6194       if (cmp_attrib_identifiers (attr_identifier,
6195 				  get_attribute_name (list)))
6196 	/* Found it.  */
6197 	break;
6198       list = TREE_CHAIN (list);
6199     }
6200 
6201   return list;
6202 }
6203 
6204 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6205    modified list.  */
6206 
6207 tree
6208 remove_attribute (const char *attr_name, tree list)
6209 {
6210   tree *p;
6211   size_t attr_len = strlen (attr_name);
6212 
6213   gcc_checking_assert (attr_name[0] != '_');
6214 
6215   for (p = &list; *p; )
6216     {
6217       tree l = *p;
6218       /* TODO: If we were storing attributes in normalized form, here
6219 	 we could use a simple strcmp().  */
6220       if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6221 	*p = TREE_CHAIN (l);
6222       else
6223 	p = &TREE_CHAIN (l);
6224     }
6225 
6226   return list;
6227 }
6228 
6229 /* Return an attribute list that is the union of a1 and a2.  */
6230 
6231 tree
6232 merge_attributes (tree a1, tree a2)
6233 {
6234   tree attributes;
6235 
6236   /* Either one unset?  Take the set one.  */
6237 
6238   if ((attributes = a1) == 0)
6239     attributes = a2;
6240 
6241   /* One that completely contains the other?  Take it.  */
6242 
6243   else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6244     {
6245       if (attribute_list_contained (a2, a1))
6246 	attributes = a2;
6247       else
6248 	{
6249 	  /* Pick the longest list, and hang on the other list.  */
6250 
6251 	  if (list_length (a1) < list_length (a2))
6252 	    attributes = a2, a2 = a1;
6253 
6254 	  for (; a2 != 0; a2 = TREE_CHAIN (a2))
6255 	    {
6256 	      tree a;
6257 	      for (a = lookup_ident_attribute (get_attribute_name (a2),
6258 					       attributes);
6259 		   a != NULL_TREE && !attribute_value_equal (a, a2);
6260 		   a = lookup_ident_attribute (get_attribute_name (a2),
6261 					       TREE_CHAIN (a)))
6262 		;
6263 	      if (a == NULL_TREE)
6264 		{
6265 		  a1 = copy_node (a2);
6266 		  TREE_CHAIN (a1) = attributes;
6267 		  attributes = a1;
6268 		}
6269 	    }
6270 	}
6271     }
6272   return attributes;
6273 }
6274 
6275 /* Given types T1 and T2, merge their attributes and return
6276   the result.  */
6277 
6278 tree
6279 merge_type_attributes (tree t1, tree t2)
6280 {
6281   return merge_attributes (TYPE_ATTRIBUTES (t1),
6282 			   TYPE_ATTRIBUTES (t2));
6283 }
6284 
6285 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6286    the result.  */
6287 
6288 tree
6289 merge_decl_attributes (tree olddecl, tree newdecl)
6290 {
6291   return merge_attributes (DECL_ATTRIBUTES (olddecl),
6292 			   DECL_ATTRIBUTES (newdecl));
6293 }
6294 
6295 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6296 
6297 /* Specialization of merge_decl_attributes for various Windows targets.
6298 
6299    This handles the following situation:
6300 
6301      __declspec (dllimport) int foo;
6302      int foo;
6303 
6304    The second instance of `foo' nullifies the dllimport.  */
6305 
6306 tree
6307 merge_dllimport_decl_attributes (tree old, tree new_tree)
6308 {
6309   tree a;
6310   int delete_dllimport_p = 1;
6311 
6312   /* What we need to do here is remove from `old' dllimport if it doesn't
6313      appear in `new'.  dllimport behaves like extern: if a declaration is
6314      marked dllimport and a definition appears later, then the object
6315      is not dllimport'd.  We also remove a `new' dllimport if the old list
6316      contains dllexport:  dllexport always overrides dllimport, regardless
6317      of the order of declaration.  */
6318   if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6319     delete_dllimport_p = 0;
6320   else if (DECL_DLLIMPORT_P (new_tree)
6321      	   && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6322     {
6323       DECL_DLLIMPORT_P (new_tree) = 0;
6324       warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6325 	      "dllimport ignored", new_tree);
6326     }
6327   else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6328     {
6329       /* Warn about overriding a symbol that has already been used, e.g.:
6330            extern int __attribute__ ((dllimport)) foo;
6331 	   int* bar () {return &foo;}
6332 	   int foo;
6333       */
6334       if (TREE_USED (old))
6335 	{
6336 	  warning (0, "%q+D redeclared without dllimport attribute "
6337 		   "after being referenced with dll linkage", new_tree);
6338 	  /* If we have used a variable's address with dllimport linkage,
6339 	      keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6340 	      decl may already have had TREE_CONSTANT computed.
6341 	      We still remove the attribute so that assembler code refers
6342 	      to '&foo rather than '_imp__foo'.  */
6343 	  if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6344 	    DECL_DLLIMPORT_P (new_tree) = 1;
6345 	}
6346 
6347       /* Let an inline definition silently override the external reference,
6348 	 but otherwise warn about attribute inconsistency.  */
6349       else if (TREE_CODE (new_tree) == VAR_DECL
6350 	       || !DECL_DECLARED_INLINE_P (new_tree))
6351 	warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6352 		  "previous dllimport ignored", new_tree);
6353     }
6354   else
6355     delete_dllimport_p = 0;
6356 
6357   a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6358 
6359   if (delete_dllimport_p)
6360     a = remove_attribute ("dllimport", a);
6361 
6362   return a;
6363 }
6364 
6365 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6366    struct attribute_spec.handler.  */
6367 
6368 tree
6369 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6370 		      bool *no_add_attrs)
6371 {
6372   tree node = *pnode;
6373   bool is_dllimport;
6374 
6375   /* These attributes may apply to structure and union types being created,
6376      but otherwise should pass to the declaration involved.  */
6377   if (!DECL_P (node))
6378     {
6379       if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6380 		   | (int) ATTR_FLAG_ARRAY_NEXT))
6381 	{
6382 	  *no_add_attrs = true;
6383 	  return tree_cons (name, args, NULL_TREE);
6384 	}
6385       if (TREE_CODE (node) == RECORD_TYPE
6386 	  || TREE_CODE (node) == UNION_TYPE)
6387 	{
6388 	  node = TYPE_NAME (node);
6389 	  if (!node)
6390 	    return NULL_TREE;
6391 	}
6392       else
6393 	{
6394 	  warning (OPT_Wattributes, "%qE attribute ignored",
6395 		   name);
6396 	  *no_add_attrs = true;
6397 	  return NULL_TREE;
6398 	}
6399     }
6400 
6401   if (TREE_CODE (node) != FUNCTION_DECL
6402       && TREE_CODE (node) != VAR_DECL
6403       && TREE_CODE (node) != TYPE_DECL)
6404     {
6405       *no_add_attrs = true;
6406       warning (OPT_Wattributes, "%qE attribute ignored",
6407 	       name);
6408       return NULL_TREE;
6409     }
6410 
6411   if (TREE_CODE (node) == TYPE_DECL
6412       && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6413       && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6414     {
6415       *no_add_attrs = true;
6416       warning (OPT_Wattributes, "%qE attribute ignored",
6417 	       name);
6418       return NULL_TREE;
6419     }
6420 
6421   is_dllimport = is_attribute_p ("dllimport", name);
6422 
6423   /* Report error on dllimport ambiguities seen now before they cause
6424      any damage.  */
6425   if (is_dllimport)
6426     {
6427       /* Honor any target-specific overrides. */
6428       if (!targetm.valid_dllimport_attribute_p (node))
6429 	*no_add_attrs = true;
6430 
6431      else if (TREE_CODE (node) == FUNCTION_DECL
6432 	        && DECL_DECLARED_INLINE_P (node))
6433 	{
6434 	  warning (OPT_Wattributes, "inline function %q+D declared as "
6435 		  " dllimport: attribute ignored", node);
6436 	  *no_add_attrs = true;
6437 	}
6438       /* Like MS, treat definition of dllimported variables and
6439 	 non-inlined functions on declaration as syntax errors. */
6440      else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6441 	{
6442 	  error ("function %q+D definition is marked dllimport", node);
6443 	  *no_add_attrs = true;
6444 	}
6445 
6446      else if (TREE_CODE (node) == VAR_DECL)
6447 	{
6448 	  if (DECL_INITIAL (node))
6449 	    {
6450 	      error ("variable %q+D definition is marked dllimport",
6451 		     node);
6452 	      *no_add_attrs = true;
6453 	    }
6454 
6455 	  /* `extern' needn't be specified with dllimport.
6456 	     Specify `extern' now and hope for the best.  Sigh.  */
6457 	  DECL_EXTERNAL (node) = 1;
6458 	  /* Also, implicitly give dllimport'd variables declared within
6459 	     a function global scope, unless declared static.  */
6460 	  if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6461 	    TREE_PUBLIC (node) = 1;
6462 	}
6463 
6464       if (*no_add_attrs == false)
6465         DECL_DLLIMPORT_P (node) = 1;
6466     }
6467   else if (TREE_CODE (node) == FUNCTION_DECL
6468 	   && DECL_DECLARED_INLINE_P (node)
6469 	   && flag_keep_inline_dllexport)
6470     /* An exported function, even if inline, must be emitted.  */
6471     DECL_EXTERNAL (node) = 0;
6472 
6473   /*  Report error if symbol is not accessible at global scope.  */
6474   if (!TREE_PUBLIC (node)
6475       && (TREE_CODE (node) == VAR_DECL
6476 	  || TREE_CODE (node) == FUNCTION_DECL))
6477     {
6478       error ("external linkage required for symbol %q+D because of "
6479 	     "%qE attribute", node, name);
6480       *no_add_attrs = true;
6481     }
6482 
6483   /* A dllexport'd entity must have default visibility so that other
6484      program units (shared libraries or the main executable) can see
6485      it.  A dllimport'd entity must have default visibility so that
6486      the linker knows that undefined references within this program
6487      unit can be resolved by the dynamic linker.  */
6488   if (!*no_add_attrs)
6489     {
6490       if (DECL_VISIBILITY_SPECIFIED (node)
6491 	  && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6492 	error ("%qE implies default visibility, but %qD has already "
6493 	       "been declared with a different visibility",
6494 	       name, node);
6495       DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6496       DECL_VISIBILITY_SPECIFIED (node) = 1;
6497     }
6498 
6499   return NULL_TREE;
6500 }
6501 
6502 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES  */
6503 
6504 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6505    of the various TYPE_QUAL values.  */
6506 
6507 static void
6508 set_type_quals (tree type, int type_quals)
6509 {
6510   TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6511   TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6512   TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6513   TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6514   TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6515 }
6516 
6517 /* Returns true iff unqualified CAND and BASE are equivalent.  */
6518 
6519 bool
6520 check_base_type (const_tree cand, const_tree base)
6521 {
6522   return (TYPE_NAME (cand) == TYPE_NAME (base)
6523 	  /* Apparently this is needed for Objective-C.  */
6524 	  && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6525 	  /* Check alignment.  */
6526 	  && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6527 	  && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6528 				   TYPE_ATTRIBUTES (base)));
6529 }
6530 
6531 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS.  */
6532 
6533 bool
6534 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6535 {
6536   return (TYPE_QUALS (cand) == type_quals
6537 	  && check_base_type (cand, base));
6538 }
6539 
6540 /* Returns true iff CAND is equivalent to BASE with ALIGN.  */
6541 
6542 static bool
6543 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6544 {
6545   return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6546 	  && TYPE_NAME (cand) == TYPE_NAME (base)
6547 	  /* Apparently this is needed for Objective-C.  */
6548 	  && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6549 	  /* Check alignment.  */
6550 	  && TYPE_ALIGN (cand) == align
6551 	  && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6552 				   TYPE_ATTRIBUTES (base)));
6553 }
6554 
6555 /* This function checks to see if TYPE matches the size one of the built-in
6556    atomic types, and returns that core atomic type.  */
6557 
6558 static tree
6559 find_atomic_core_type (tree type)
6560 {
6561   tree base_atomic_type;
6562 
6563   /* Only handle complete types.  */
6564   if (TYPE_SIZE (type) == NULL_TREE)
6565     return NULL_TREE;
6566 
6567   HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6568   switch (type_size)
6569     {
6570     case 8:
6571       base_atomic_type = atomicQI_type_node;
6572       break;
6573 
6574     case 16:
6575       base_atomic_type = atomicHI_type_node;
6576       break;
6577 
6578     case 32:
6579       base_atomic_type = atomicSI_type_node;
6580       break;
6581 
6582     case 64:
6583       base_atomic_type = atomicDI_type_node;
6584       break;
6585 
6586     case 128:
6587       base_atomic_type = atomicTI_type_node;
6588       break;
6589 
6590     default:
6591       base_atomic_type = NULL_TREE;
6592     }
6593 
6594   return base_atomic_type;
6595 }
6596 
6597 /* Return a version of the TYPE, qualified as indicated by the
6598    TYPE_QUALS, if one exists.  If no qualified version exists yet,
6599    return NULL_TREE.  */
6600 
6601 tree
6602 get_qualified_type (tree type, int type_quals)
6603 {
6604   tree t;
6605 
6606   if (TYPE_QUALS (type) == type_quals)
6607     return type;
6608 
6609   /* Search the chain of variants to see if there is already one there just
6610      like the one we need to have.  If so, use that existing one.  We must
6611      preserve the TYPE_NAME, since there is code that depends on this.  */
6612   for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6613     if (check_qualified_type (t, type, type_quals))
6614       return t;
6615 
6616   return NULL_TREE;
6617 }
6618 
6619 /* Like get_qualified_type, but creates the type if it does not
6620    exist.  This function never returns NULL_TREE.  */
6621 
6622 tree
6623 build_qualified_type (tree type, int type_quals)
6624 {
6625   tree t;
6626 
6627   /* See if we already have the appropriate qualified variant.  */
6628   t = get_qualified_type (type, type_quals);
6629 
6630   /* If not, build it.  */
6631   if (!t)
6632     {
6633       t = build_variant_type_copy (type);
6634       set_type_quals (t, type_quals);
6635 
6636       if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6637 	{
6638 	  /* See if this object can map to a basic atomic type.  */
6639 	  tree atomic_type = find_atomic_core_type (type);
6640 	  if (atomic_type)
6641 	    {
6642 	      /* Ensure the alignment of this type is compatible with
6643 		 the required alignment of the atomic type.  */
6644 	      if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6645 		TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6646 	    }
6647 	}
6648 
6649       if (TYPE_STRUCTURAL_EQUALITY_P (type))
6650 	/* Propagate structural equality. */
6651 	SET_TYPE_STRUCTURAL_EQUALITY (t);
6652       else if (TYPE_CANONICAL (type) != type)
6653 	/* Build the underlying canonical type, since it is different
6654 	   from TYPE. */
6655 	{
6656 	  tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6657 	  TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6658 	}
6659       else
6660 	/* T is its own canonical type. */
6661 	TYPE_CANONICAL (t) = t;
6662 
6663     }
6664 
6665   return t;
6666 }
6667 
6668 /* Create a variant of type T with alignment ALIGN.  */
6669 
6670 tree
6671 build_aligned_type (tree type, unsigned int align)
6672 {
6673   tree t;
6674 
6675   if (TYPE_PACKED (type)
6676       || TYPE_ALIGN (type) == align)
6677     return type;
6678 
6679   for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6680     if (check_aligned_type (t, type, align))
6681       return t;
6682 
6683   t = build_variant_type_copy (type);
6684   TYPE_ALIGN (t) = align;
6685   TYPE_USER_ALIGN (t) = 1;
6686 
6687   return t;
6688 }
6689 
6690 /* Create a new distinct copy of TYPE.  The new type is made its own
6691    MAIN_VARIANT. If TYPE requires structural equality checks, the
6692    resulting type requires structural equality checks; otherwise, its
6693    TYPE_CANONICAL points to itself. */
6694 
6695 tree
6696 build_distinct_type_copy (tree type)
6697 {
6698   tree t = copy_node (type);
6699 
6700   TYPE_POINTER_TO (t) = 0;
6701   TYPE_REFERENCE_TO (t) = 0;
6702 
6703   /* Set the canonical type either to a new equivalence class, or
6704      propagate the need for structural equality checks. */
6705   if (TYPE_STRUCTURAL_EQUALITY_P (type))
6706     SET_TYPE_STRUCTURAL_EQUALITY (t);
6707   else
6708     TYPE_CANONICAL (t) = t;
6709 
6710   /* Make it its own variant.  */
6711   TYPE_MAIN_VARIANT (t) = t;
6712   TYPE_NEXT_VARIANT (t) = 0;
6713 
6714   /* We do not record methods in type copies nor variants
6715      so we do not need to keep them up to date when new method
6716      is inserted.  */
6717   if (RECORD_OR_UNION_TYPE_P (t))
6718     TYPE_METHODS (t) = NULL_TREE;
6719 
6720   /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6721      whose TREE_TYPE is not t.  This can also happen in the Ada
6722      frontend when using subtypes.  */
6723 
6724   return t;
6725 }
6726 
6727 /* Create a new variant of TYPE, equivalent but distinct.  This is so
6728    the caller can modify it. TYPE_CANONICAL for the return type will
6729    be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6730    are considered equal by the language itself (or that both types
6731    require structural equality checks). */
6732 
6733 tree
6734 build_variant_type_copy (tree type)
6735 {
6736   tree t, m = TYPE_MAIN_VARIANT (type);
6737 
6738   t = build_distinct_type_copy (type);
6739 
6740   /* Since we're building a variant, assume that it is a non-semantic
6741      variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6742   TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6743   /* Type variants have no alias set defined.  */
6744   TYPE_ALIAS_SET (t) = -1;
6745 
6746   /* Add the new type to the chain of variants of TYPE.  */
6747   TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6748   TYPE_NEXT_VARIANT (m) = t;
6749   TYPE_MAIN_VARIANT (t) = m;
6750 
6751   return t;
6752 }
6753 
6754 /* Return true if the from tree in both tree maps are equal.  */
6755 
6756 int
6757 tree_map_base_eq (const void *va, const void *vb)
6758 {
6759   const struct tree_map_base  *const a = (const struct tree_map_base *) va,
6760     *const b = (const struct tree_map_base *) vb;
6761   return (a->from == b->from);
6762 }
6763 
6764 /* Hash a from tree in a tree_base_map.  */
6765 
6766 unsigned int
6767 tree_map_base_hash (const void *item)
6768 {
6769   return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6770 }
6771 
6772 /* Return true if this tree map structure is marked for garbage collection
6773    purposes.  We simply return true if the from tree is marked, so that this
6774    structure goes away when the from tree goes away.  */
6775 
6776 int
6777 tree_map_base_marked_p (const void *p)
6778 {
6779   return ggc_marked_p (((const struct tree_map_base *) p)->from);
6780 }
6781 
6782 /* Hash a from tree in a tree_map.  */
6783 
6784 unsigned int
6785 tree_map_hash (const void *item)
6786 {
6787   return (((const struct tree_map *) item)->hash);
6788 }
6789 
6790 /* Hash a from tree in a tree_decl_map.  */
6791 
6792 unsigned int
6793 tree_decl_map_hash (const void *item)
6794 {
6795   return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6796 }
6797 
6798 /* Return the initialization priority for DECL.  */
6799 
6800 priority_type
6801 decl_init_priority_lookup (tree decl)
6802 {
6803   symtab_node *snode = symtab_node::get (decl);
6804 
6805   if (!snode)
6806     return DEFAULT_INIT_PRIORITY;
6807   return
6808     snode->get_init_priority ();
6809 }
6810 
6811 /* Return the finalization priority for DECL.  */
6812 
6813 priority_type
6814 decl_fini_priority_lookup (tree decl)
6815 {
6816   cgraph_node *node = cgraph_node::get (decl);
6817 
6818   if (!node)
6819     return DEFAULT_INIT_PRIORITY;
6820   return
6821     node->get_fini_priority ();
6822 }
6823 
6824 /* Set the initialization priority for DECL to PRIORITY.  */
6825 
6826 void
6827 decl_init_priority_insert (tree decl, priority_type priority)
6828 {
6829   struct symtab_node *snode;
6830 
6831   if (priority == DEFAULT_INIT_PRIORITY)
6832     {
6833       snode = symtab_node::get (decl);
6834       if (!snode)
6835 	return;
6836     }
6837   else if (TREE_CODE (decl) == VAR_DECL)
6838     snode = varpool_node::get_create (decl);
6839   else
6840     snode = cgraph_node::get_create (decl);
6841   snode->set_init_priority (priority);
6842 }
6843 
6844 /* Set the finalization priority for DECL to PRIORITY.  */
6845 
6846 void
6847 decl_fini_priority_insert (tree decl, priority_type priority)
6848 {
6849   struct cgraph_node *node;
6850 
6851   if (priority == DEFAULT_INIT_PRIORITY)
6852     {
6853       node = cgraph_node::get (decl);
6854       if (!node)
6855 	return;
6856     }
6857   else
6858     node = cgraph_node::get_create (decl);
6859   node->set_fini_priority (priority);
6860 }
6861 
6862 /* Print out the statistics for the DECL_DEBUG_EXPR hash table.  */
6863 
6864 static void
6865 print_debug_expr_statistics (void)
6866 {
6867   fprintf (stderr, "DECL_DEBUG_EXPR  hash: size %ld, %ld elements, %f collisions\n",
6868 	   (long) debug_expr_for_decl->size (),
6869 	   (long) debug_expr_for_decl->elements (),
6870 	   debug_expr_for_decl->collisions ());
6871 }
6872 
6873 /* Print out the statistics for the DECL_VALUE_EXPR hash table.  */
6874 
6875 static void
6876 print_value_expr_statistics (void)
6877 {
6878   fprintf (stderr, "DECL_VALUE_EXPR  hash: size %ld, %ld elements, %f collisions\n",
6879 	   (long) value_expr_for_decl->size (),
6880 	   (long) value_expr_for_decl->elements (),
6881 	   value_expr_for_decl->collisions ());
6882 }
6883 
6884 /* Lookup a debug expression for FROM, and return it if we find one.  */
6885 
6886 tree
6887 decl_debug_expr_lookup (tree from)
6888 {
6889   struct tree_decl_map *h, in;
6890   in.base.from = from;
6891 
6892   h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6893   if (h)
6894     return h->to;
6895   return NULL_TREE;
6896 }
6897 
6898 /* Insert a mapping FROM->TO in the debug expression hashtable.  */
6899 
6900 void
6901 decl_debug_expr_insert (tree from, tree to)
6902 {
6903   struct tree_decl_map *h;
6904 
6905   h = ggc_alloc<tree_decl_map> ();
6906   h->base.from = from;
6907   h->to = to;
6908   *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6909 }
6910 
6911 /* Lookup a value expression for FROM, and return it if we find one.  */
6912 
6913 tree
6914 decl_value_expr_lookup (tree from)
6915 {
6916   struct tree_decl_map *h, in;
6917   in.base.from = from;
6918 
6919   h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6920   if (h)
6921     return h->to;
6922   return NULL_TREE;
6923 }
6924 
6925 /* Insert a mapping FROM->TO in the value expression hashtable.  */
6926 
6927 void
6928 decl_value_expr_insert (tree from, tree to)
6929 {
6930   struct tree_decl_map *h;
6931 
6932   h = ggc_alloc<tree_decl_map> ();
6933   h->base.from = from;
6934   h->to = to;
6935   *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6936 }
6937 
6938 /* Lookup a vector of debug arguments for FROM, and return it if we
6939    find one.  */
6940 
6941 vec<tree, va_gc> **
6942 decl_debug_args_lookup (tree from)
6943 {
6944   struct tree_vec_map *h, in;
6945 
6946   if (!DECL_HAS_DEBUG_ARGS_P (from))
6947     return NULL;
6948   gcc_checking_assert (debug_args_for_decl != NULL);
6949   in.base.from = from;
6950   h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6951   if (h)
6952     return &h->to;
6953   return NULL;
6954 }
6955 
6956 /* Insert a mapping FROM->empty vector of debug arguments in the value
6957    expression hashtable.  */
6958 
6959 vec<tree, va_gc> **
6960 decl_debug_args_insert (tree from)
6961 {
6962   struct tree_vec_map *h;
6963   tree_vec_map **loc;
6964 
6965   if (DECL_HAS_DEBUG_ARGS_P (from))
6966     return decl_debug_args_lookup (from);
6967   if (debug_args_for_decl == NULL)
6968     debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6969   h = ggc_alloc<tree_vec_map> ();
6970   h->base.from = from;
6971   h->to = NULL;
6972   loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6973   *loc = h;
6974   DECL_HAS_DEBUG_ARGS_P (from) = 1;
6975   return &h->to;
6976 }
6977 
6978 /* Hashing of types so that we don't make duplicates.
6979    The entry point is `type_hash_canon'.  */
6980 
6981 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6982    with types in the TREE_VALUE slots), by adding the hash codes
6983    of the individual types.  */
6984 
6985 static void
6986 type_hash_list (const_tree list, inchash::hash &hstate)
6987 {
6988   const_tree tail;
6989 
6990   for (tail = list; tail; tail = TREE_CHAIN (tail))
6991     if (TREE_VALUE (tail) != error_mark_node)
6992       hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6993 }
6994 
6995 /* These are the Hashtable callback functions.  */
6996 
6997 /* Returns true iff the types are equivalent.  */
6998 
6999 bool
7000 type_cache_hasher::equal (type_hash *a, type_hash *b)
7001 {
7002   /* First test the things that are the same for all types.  */
7003   if (a->hash != b->hash
7004       || TREE_CODE (a->type) != TREE_CODE (b->type)
7005       || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7006       || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7007 				 TYPE_ATTRIBUTES (b->type))
7008       || (TREE_CODE (a->type) != COMPLEX_TYPE
7009           && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7010     return 0;
7011 
7012   /* Be careful about comparing arrays before and after the element type
7013      has been completed; don't compare TYPE_ALIGN unless both types are
7014      complete.  */
7015   if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7016       && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7017 	  || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7018     return 0;
7019 
7020   switch (TREE_CODE (a->type))
7021     {
7022     case VOID_TYPE:
7023     case COMPLEX_TYPE:
7024     case POINTER_TYPE:
7025     case REFERENCE_TYPE:
7026     case NULLPTR_TYPE:
7027       return 1;
7028 
7029     case VECTOR_TYPE:
7030       return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
7031 
7032     case ENUMERAL_TYPE:
7033       if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7034 	  && !(TYPE_VALUES (a->type)
7035 	       && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7036 	       && TYPE_VALUES (b->type)
7037 	       && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7038 	       && type_list_equal (TYPE_VALUES (a->type),
7039 				   TYPE_VALUES (b->type))))
7040 	return 0;
7041 
7042       /* ... fall through ... */
7043 
7044     case INTEGER_TYPE:
7045     case REAL_TYPE:
7046     case BOOLEAN_TYPE:
7047       if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7048 	return false;
7049       return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7050 	       || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7051 				      TYPE_MAX_VALUE (b->type)))
7052 	      && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7053 		  || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7054 					 TYPE_MIN_VALUE (b->type))));
7055 
7056     case FIXED_POINT_TYPE:
7057       return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7058 
7059     case OFFSET_TYPE:
7060       return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7061 
7062     case METHOD_TYPE:
7063       if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7064 	  && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7065 	      || (TYPE_ARG_TYPES (a->type)
7066 		  && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7067 		  && TYPE_ARG_TYPES (b->type)
7068 		  && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7069 		  && type_list_equal (TYPE_ARG_TYPES (a->type),
7070 				      TYPE_ARG_TYPES (b->type)))))
7071         break;
7072       return 0;
7073     case ARRAY_TYPE:
7074       return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7075 
7076     case RECORD_TYPE:
7077     case UNION_TYPE:
7078     case QUAL_UNION_TYPE:
7079       return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7080 	      || (TYPE_FIELDS (a->type)
7081 		  && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7082 		  && TYPE_FIELDS (b->type)
7083 		  && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7084 		  && type_list_equal (TYPE_FIELDS (a->type),
7085 				      TYPE_FIELDS (b->type))));
7086 
7087     case FUNCTION_TYPE:
7088       if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7089 	  || (TYPE_ARG_TYPES (a->type)
7090 	      && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7091 	      && TYPE_ARG_TYPES (b->type)
7092 	      && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7093 	      && type_list_equal (TYPE_ARG_TYPES (a->type),
7094 				  TYPE_ARG_TYPES (b->type))))
7095 	break;
7096       return 0;
7097 
7098     default:
7099       return 0;
7100     }
7101 
7102   if (lang_hooks.types.type_hash_eq != NULL)
7103     return lang_hooks.types.type_hash_eq (a->type, b->type);
7104 
7105   return 1;
7106 }
7107 
7108 /* Given TYPE, and HASHCODE its hash code, return the canonical
7109    object for an identical type if one already exists.
7110    Otherwise, return TYPE, and record it as the canonical object.
7111 
7112    To use this function, first create a type of the sort you want.
7113    Then compute its hash code from the fields of the type that
7114    make it different from other similar types.
7115    Then call this function and use the value.  */
7116 
7117 tree
7118 type_hash_canon (unsigned int hashcode, tree type)
7119 {
7120   type_hash in;
7121   type_hash **loc;
7122 
7123   /* The hash table only contains main variants, so ensure that's what we're
7124      being passed.  */
7125   gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7126 
7127   /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7128      must call that routine before comparing TYPE_ALIGNs.  */
7129   layout_type (type);
7130 
7131   in.hash = hashcode;
7132   in.type = type;
7133 
7134   loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7135   if (*loc)
7136     {
7137       tree t1 = ((type_hash *) *loc)->type;
7138       gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7139       free_node (type);
7140       return t1;
7141     }
7142   else
7143     {
7144       struct type_hash *h;
7145 
7146       h = ggc_alloc<type_hash> ();
7147       h->hash = hashcode;
7148       h->type = type;
7149       *loc = h;
7150 
7151       return type;
7152     }
7153 }
7154 
7155 static void
7156 print_type_hash_statistics (void)
7157 {
7158   fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7159 	   (long) type_hash_table->size (),
7160 	   (long) type_hash_table->elements (),
7161 	   type_hash_table->collisions ());
7162 }
7163 
7164 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7165    with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7166    by adding the hash codes of the individual attributes.  */
7167 
7168 static void
7169 attribute_hash_list (const_tree list, inchash::hash &hstate)
7170 {
7171   const_tree tail;
7172 
7173   for (tail = list; tail; tail = TREE_CHAIN (tail))
7174     /* ??? Do we want to add in TREE_VALUE too? */
7175     hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7176 }
7177 
7178 /* Given two lists of attributes, return true if list l2 is
7179    equivalent to l1.  */
7180 
7181 int
7182 attribute_list_equal (const_tree l1, const_tree l2)
7183 {
7184   if (l1 == l2)
7185     return 1;
7186 
7187   return attribute_list_contained (l1, l2)
7188 	 && attribute_list_contained (l2, l1);
7189 }
7190 
7191 /* Given two lists of attributes, return true if list L2 is
7192    completely contained within L1.  */
7193 /* ??? This would be faster if attribute names were stored in a canonicalized
7194    form.  Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7195    must be used to show these elements are equivalent (which they are).  */
7196 /* ??? It's not clear that attributes with arguments will always be handled
7197    correctly.  */
7198 
7199 int
7200 attribute_list_contained (const_tree l1, const_tree l2)
7201 {
7202   const_tree t1, t2;
7203 
7204   /* First check the obvious, maybe the lists are identical.  */
7205   if (l1 == l2)
7206     return 1;
7207 
7208   /* Maybe the lists are similar.  */
7209   for (t1 = l1, t2 = l2;
7210        t1 != 0 && t2 != 0
7211         && get_attribute_name (t1) == get_attribute_name (t2)
7212         && TREE_VALUE (t1) == TREE_VALUE (t2);
7213        t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7214     ;
7215 
7216   /* Maybe the lists are equal.  */
7217   if (t1 == 0 && t2 == 0)
7218     return 1;
7219 
7220   for (; t2 != 0; t2 = TREE_CHAIN (t2))
7221     {
7222       const_tree attr;
7223       /* This CONST_CAST is okay because lookup_attribute does not
7224 	 modify its argument and the return value is assigned to a
7225 	 const_tree.  */
7226       for (attr = lookup_ident_attribute (get_attribute_name (t2),
7227 					  CONST_CAST_TREE (l1));
7228 	   attr != NULL_TREE && !attribute_value_equal (t2, attr);
7229 	   attr = lookup_ident_attribute (get_attribute_name (t2),
7230 					  TREE_CHAIN (attr)))
7231 	;
7232 
7233       if (attr == NULL_TREE)
7234 	return 0;
7235     }
7236 
7237   return 1;
7238 }
7239 
7240 /* Given two lists of types
7241    (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7242    return 1 if the lists contain the same types in the same order.
7243    Also, the TREE_PURPOSEs must match.  */
7244 
7245 int
7246 type_list_equal (const_tree l1, const_tree l2)
7247 {
7248   const_tree t1, t2;
7249 
7250   for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7251     if (TREE_VALUE (t1) != TREE_VALUE (t2)
7252 	|| (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7253 	    && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7254 		  && (TREE_TYPE (TREE_PURPOSE (t1))
7255 		      == TREE_TYPE (TREE_PURPOSE (t2))))))
7256       return 0;
7257 
7258   return t1 == t2;
7259 }
7260 
7261 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7262    given by TYPE.  If the argument list accepts variable arguments,
7263    then this function counts only the ordinary arguments.  */
7264 
7265 int
7266 type_num_arguments (const_tree type)
7267 {
7268   int i = 0;
7269   tree t;
7270 
7271   for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7272     /* If the function does not take a variable number of arguments,
7273        the last element in the list will have type `void'.  */
7274     if (VOID_TYPE_P (TREE_VALUE (t)))
7275       break;
7276     else
7277       ++i;
7278 
7279   return i;
7280 }
7281 
7282 /* Nonzero if integer constants T1 and T2
7283    represent the same constant value.  */
7284 
7285 int
7286 tree_int_cst_equal (const_tree t1, const_tree t2)
7287 {
7288   if (t1 == t2)
7289     return 1;
7290 
7291   if (t1 == 0 || t2 == 0)
7292     return 0;
7293 
7294   if (TREE_CODE (t1) == INTEGER_CST
7295       && TREE_CODE (t2) == INTEGER_CST
7296       && wi::to_widest (t1) == wi::to_widest (t2))
7297     return 1;
7298 
7299   return 0;
7300 }
7301 
7302 /* Return true if T is an INTEGER_CST whose numerical value (extended
7303    according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  */
7304 
7305 bool
7306 tree_fits_shwi_p (const_tree t)
7307 {
7308   return (t != NULL_TREE
7309 	  && TREE_CODE (t) == INTEGER_CST
7310 	  && wi::fits_shwi_p (wi::to_widest (t)));
7311 }
7312 
7313 /* Return true if T is an INTEGER_CST whose numerical value (extended
7314    according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  */
7315 
7316 bool
7317 tree_fits_uhwi_p (const_tree t)
7318 {
7319   return (t != NULL_TREE
7320 	  && TREE_CODE (t) == INTEGER_CST
7321 	  && wi::fits_uhwi_p (wi::to_widest (t)));
7322 }
7323 
7324 /* T is an INTEGER_CST whose numerical value (extended according to
7325    TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  Return that
7326    HOST_WIDE_INT.  */
7327 
7328 HOST_WIDE_INT
7329 tree_to_shwi (const_tree t)
7330 {
7331   gcc_assert (tree_fits_shwi_p (t));
7332   return TREE_INT_CST_LOW (t);
7333 }
7334 
7335 /* T is an INTEGER_CST whose numerical value (extended according to
7336    TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  Return that
7337    HOST_WIDE_INT.  */
7338 
7339 unsigned HOST_WIDE_INT
7340 tree_to_uhwi (const_tree t)
7341 {
7342   gcc_assert (tree_fits_uhwi_p (t));
7343   return TREE_INT_CST_LOW (t);
7344 }
7345 
7346 /* Return the most significant (sign) bit of T.  */
7347 
7348 int
7349 tree_int_cst_sign_bit (const_tree t)
7350 {
7351   unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7352 
7353   return wi::extract_uhwi (t, bitno, 1);
7354 }
7355 
7356 /* Return an indication of the sign of the integer constant T.
7357    The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7358    Note that -1 will never be returned if T's type is unsigned.  */
7359 
7360 int
7361 tree_int_cst_sgn (const_tree t)
7362 {
7363   if (wi::eq_p (t, 0))
7364     return 0;
7365   else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7366     return 1;
7367   else if (wi::neg_p (t))
7368     return -1;
7369   else
7370     return 1;
7371 }
7372 
7373 /* Return the minimum number of bits needed to represent VALUE in a
7374    signed or unsigned type, UNSIGNEDP says which.  */
7375 
7376 unsigned int
7377 tree_int_cst_min_precision (tree value, signop sgn)
7378 {
7379   /* If the value is negative, compute its negative minus 1.  The latter
7380      adjustment is because the absolute value of the largest negative value
7381      is one larger than the largest positive value.  This is equivalent to
7382      a bit-wise negation, so use that operation instead.  */
7383 
7384   if (tree_int_cst_sgn (value) < 0)
7385     value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7386 
7387   /* Return the number of bits needed, taking into account the fact
7388      that we need one more bit for a signed than unsigned type.
7389      If value is 0 or -1, the minimum precision is 1 no matter
7390      whether unsignedp is true or false.  */
7391 
7392   if (integer_zerop (value))
7393     return 1;
7394   else
7395     return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7396 }
7397 
7398 /* Return truthvalue of whether T1 is the same tree structure as T2.
7399    Return 1 if they are the same.
7400    Return 0 if they are understandably different.
7401    Return -1 if either contains tree structure not understood by
7402    this function.  */
7403 
7404 int
7405 simple_cst_equal (const_tree t1, const_tree t2)
7406 {
7407   enum tree_code code1, code2;
7408   int cmp;
7409   int i;
7410 
7411   if (t1 == t2)
7412     return 1;
7413   if (t1 == 0 || t2 == 0)
7414     return 0;
7415 
7416   code1 = TREE_CODE (t1);
7417   code2 = TREE_CODE (t2);
7418 
7419   if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7420     {
7421       if (CONVERT_EXPR_CODE_P (code2)
7422 	  || code2 == NON_LVALUE_EXPR)
7423 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7424       else
7425 	return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7426     }
7427 
7428   else if (CONVERT_EXPR_CODE_P (code2)
7429 	   || code2 == NON_LVALUE_EXPR)
7430     return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7431 
7432   if (code1 != code2)
7433     return 0;
7434 
7435   switch (code1)
7436     {
7437     case INTEGER_CST:
7438       return wi::to_widest (t1) == wi::to_widest (t2);
7439 
7440     case REAL_CST:
7441       return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7442 
7443     case FIXED_CST:
7444       return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7445 
7446     case STRING_CST:
7447       return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7448 	      && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7449 			 TREE_STRING_LENGTH (t1)));
7450 
7451     case CONSTRUCTOR:
7452       {
7453 	unsigned HOST_WIDE_INT idx;
7454 	vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7455 	vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7456 
7457 	if (vec_safe_length (v1) != vec_safe_length (v2))
7458 	  return false;
7459 
7460         for (idx = 0; idx < vec_safe_length (v1); ++idx)
7461 	  /* ??? Should we handle also fields here? */
7462 	  if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7463 	    return false;
7464 	return true;
7465       }
7466 
7467     case SAVE_EXPR:
7468       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7469 
7470     case CALL_EXPR:
7471       cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7472       if (cmp <= 0)
7473 	return cmp;
7474       if (call_expr_nargs (t1) != call_expr_nargs (t2))
7475 	return 0;
7476       {
7477 	const_tree arg1, arg2;
7478 	const_call_expr_arg_iterator iter1, iter2;
7479 	for (arg1 = first_const_call_expr_arg (t1, &iter1),
7480 	       arg2 = first_const_call_expr_arg (t2, &iter2);
7481 	     arg1 && arg2;
7482 	     arg1 = next_const_call_expr_arg (&iter1),
7483 	       arg2 = next_const_call_expr_arg (&iter2))
7484 	  {
7485 	    cmp = simple_cst_equal (arg1, arg2);
7486 	    if (cmp <= 0)
7487 	      return cmp;
7488 	  }
7489 	return arg1 == arg2;
7490       }
7491 
7492     case TARGET_EXPR:
7493       /* Special case: if either target is an unallocated VAR_DECL,
7494 	 it means that it's going to be unified with whatever the
7495 	 TARGET_EXPR is really supposed to initialize, so treat it
7496 	 as being equivalent to anything.  */
7497       if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7498 	   && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7499 	   && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7500 	  || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7501 	      && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7502 	      && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7503 	cmp = 1;
7504       else
7505 	cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7506 
7507       if (cmp <= 0)
7508 	return cmp;
7509 
7510       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7511 
7512     case WITH_CLEANUP_EXPR:
7513       cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7514       if (cmp <= 0)
7515 	return cmp;
7516 
7517       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7518 
7519     case COMPONENT_REF:
7520       if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7521 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7522 
7523       return 0;
7524 
7525     case VAR_DECL:
7526     case PARM_DECL:
7527     case CONST_DECL:
7528     case FUNCTION_DECL:
7529       return 0;
7530 
7531     default:
7532       break;
7533     }
7534 
7535   /* This general rule works for most tree codes.  All exceptions should be
7536      handled above.  If this is a language-specific tree code, we can't
7537      trust what might be in the operand, so say we don't know
7538      the situation.  */
7539   if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7540     return -1;
7541 
7542   switch (TREE_CODE_CLASS (code1))
7543     {
7544     case tcc_unary:
7545     case tcc_binary:
7546     case tcc_comparison:
7547     case tcc_expression:
7548     case tcc_reference:
7549     case tcc_statement:
7550       cmp = 1;
7551       for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7552 	{
7553 	  cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7554 	  if (cmp <= 0)
7555 	    return cmp;
7556 	}
7557 
7558       return cmp;
7559 
7560     default:
7561       return -1;
7562     }
7563 }
7564 
7565 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7566    Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7567    than U, respectively.  */
7568 
7569 int
7570 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7571 {
7572   if (tree_int_cst_sgn (t) < 0)
7573     return -1;
7574   else if (!tree_fits_uhwi_p (t))
7575     return 1;
7576   else if (TREE_INT_CST_LOW (t) == u)
7577     return 0;
7578   else if (TREE_INT_CST_LOW (t) < u)
7579     return -1;
7580   else
7581     return 1;
7582 }
7583 
7584 /* Return true if SIZE represents a constant size that is in bounds of
7585    what the middle-end and the backend accepts (covering not more than
7586    half of the address-space).  */
7587 
7588 bool
7589 valid_constant_size_p (const_tree size)
7590 {
7591   if (! tree_fits_uhwi_p (size)
7592       || TREE_OVERFLOW (size)
7593       || tree_int_cst_sign_bit (size) != 0)
7594     return false;
7595   return true;
7596 }
7597 
7598 /* Return the precision of the type, or for a complex or vector type the
7599    precision of the type of its elements.  */
7600 
7601 unsigned int
7602 element_precision (const_tree type)
7603 {
7604   if (!TYPE_P (type))
7605     type = TREE_TYPE (type);
7606   enum tree_code code = TREE_CODE (type);
7607   if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7608     type = TREE_TYPE (type);
7609 
7610   return TYPE_PRECISION (type);
7611 }
7612 
7613 /* Return true if CODE represents an associative tree code.  Otherwise
7614    return false.  */
7615 bool
7616 associative_tree_code (enum tree_code code)
7617 {
7618   switch (code)
7619     {
7620     case BIT_IOR_EXPR:
7621     case BIT_AND_EXPR:
7622     case BIT_XOR_EXPR:
7623     case PLUS_EXPR:
7624     case MULT_EXPR:
7625     case MIN_EXPR:
7626     case MAX_EXPR:
7627       return true;
7628 
7629     default:
7630       break;
7631     }
7632   return false;
7633 }
7634 
7635 /* Return true if CODE represents a commutative tree code.  Otherwise
7636    return false.  */
7637 bool
7638 commutative_tree_code (enum tree_code code)
7639 {
7640   switch (code)
7641     {
7642     case PLUS_EXPR:
7643     case MULT_EXPR:
7644     case MULT_HIGHPART_EXPR:
7645     case MIN_EXPR:
7646     case MAX_EXPR:
7647     case BIT_IOR_EXPR:
7648     case BIT_XOR_EXPR:
7649     case BIT_AND_EXPR:
7650     case NE_EXPR:
7651     case EQ_EXPR:
7652     case UNORDERED_EXPR:
7653     case ORDERED_EXPR:
7654     case UNEQ_EXPR:
7655     case LTGT_EXPR:
7656     case TRUTH_AND_EXPR:
7657     case TRUTH_XOR_EXPR:
7658     case TRUTH_OR_EXPR:
7659     case WIDEN_MULT_EXPR:
7660     case VEC_WIDEN_MULT_HI_EXPR:
7661     case VEC_WIDEN_MULT_LO_EXPR:
7662     case VEC_WIDEN_MULT_EVEN_EXPR:
7663     case VEC_WIDEN_MULT_ODD_EXPR:
7664       return true;
7665 
7666     default:
7667       break;
7668     }
7669   return false;
7670 }
7671 
7672 /* Return true if CODE represents a ternary tree code for which the
7673    first two operands are commutative.  Otherwise return false.  */
7674 bool
7675 commutative_ternary_tree_code (enum tree_code code)
7676 {
7677   switch (code)
7678     {
7679     case WIDEN_MULT_PLUS_EXPR:
7680     case WIDEN_MULT_MINUS_EXPR:
7681     case DOT_PROD_EXPR:
7682     case FMA_EXPR:
7683       return true;
7684 
7685     default:
7686       break;
7687     }
7688   return false;
7689 }
7690 
7691 /* Returns true if CODE can overflow.  */
7692 
7693 bool
7694 operation_can_overflow (enum tree_code code)
7695 {
7696   switch (code)
7697     {
7698     case PLUS_EXPR:
7699     case MINUS_EXPR:
7700     case MULT_EXPR:
7701     case LSHIFT_EXPR:
7702       /* Can overflow in various ways.  */
7703       return true;
7704     case TRUNC_DIV_EXPR:
7705     case EXACT_DIV_EXPR:
7706     case FLOOR_DIV_EXPR:
7707     case CEIL_DIV_EXPR:
7708       /* For INT_MIN / -1.  */
7709       return true;
7710     case NEGATE_EXPR:
7711     case ABS_EXPR:
7712       /* For -INT_MIN.  */
7713       return true;
7714     default:
7715       /* These operators cannot overflow.  */
7716       return false;
7717     }
7718 }
7719 
7720 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7721    ftrapv doesn't generate trapping insns for CODE.  */
7722 
7723 bool
7724 operation_no_trapping_overflow (tree type, enum tree_code code)
7725 {
7726   gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7727 
7728   /* We don't generate instructions that trap on overflow for complex or vector
7729      types.  */
7730   if (!INTEGRAL_TYPE_P (type))
7731     return true;
7732 
7733   if (!TYPE_OVERFLOW_TRAPS (type))
7734     return true;
7735 
7736   switch (code)
7737     {
7738     case PLUS_EXPR:
7739     case MINUS_EXPR:
7740     case MULT_EXPR:
7741     case NEGATE_EXPR:
7742     case ABS_EXPR:
7743       /* These operators can overflow, and -ftrapv generates trapping code for
7744 	 these.  */
7745       return false;
7746     case TRUNC_DIV_EXPR:
7747     case EXACT_DIV_EXPR:
7748     case FLOOR_DIV_EXPR:
7749     case CEIL_DIV_EXPR:
7750     case LSHIFT_EXPR:
7751       /* These operators can overflow, but -ftrapv does not generate trapping
7752 	 code for these.  */
7753       return true;
7754     default:
7755       /* These operators cannot overflow.  */
7756       return true;
7757     }
7758 }
7759 
7760 namespace inchash
7761 {
7762 
7763 /* Generate a hash value for an expression.  This can be used iteratively
7764    by passing a previous result as the HSTATE argument.
7765 
7766    This function is intended to produce the same hash for expressions which
7767    would compare equal using operand_equal_p.  */
7768 void
7769 add_expr (const_tree t, inchash::hash &hstate)
7770 {
7771   int i;
7772   enum tree_code code;
7773   enum tree_code_class tclass;
7774 
7775   if (t == NULL_TREE)
7776     {
7777       hstate.merge_hash (0);
7778       return;
7779     }
7780 
7781   code = TREE_CODE (t);
7782 
7783   switch (code)
7784     {
7785     /* Alas, constants aren't shared, so we can't rely on pointer
7786        identity.  */
7787     case VOID_CST:
7788       hstate.merge_hash (0);
7789       return;
7790     case INTEGER_CST:
7791       for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7792 	hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7793       return;
7794     case REAL_CST:
7795       {
7796 	unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7797 	hstate.merge_hash (val2);
7798 	return;
7799       }
7800     case FIXED_CST:
7801       {
7802 	unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7803 	hstate.merge_hash (val2);
7804 	return;
7805       }
7806     case STRING_CST:
7807       hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7808       return;
7809     case COMPLEX_CST:
7810       inchash::add_expr (TREE_REALPART (t), hstate);
7811       inchash::add_expr (TREE_IMAGPART (t), hstate);
7812       return;
7813     case VECTOR_CST:
7814       {
7815 	unsigned i;
7816 	for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7817 	  inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7818 	return;
7819       }
7820     case SSA_NAME:
7821       /* We can just compare by pointer.  */
7822       hstate.add_wide_int (SSA_NAME_VERSION (t));
7823       return;
7824     case PLACEHOLDER_EXPR:
7825       /* The node itself doesn't matter.  */
7826       return;
7827     case TREE_LIST:
7828       /* A list of expressions, for a CALL_EXPR or as the elements of a
7829 	 VECTOR_CST.  */
7830       for (; t; t = TREE_CHAIN (t))
7831 	inchash::add_expr (TREE_VALUE (t), hstate);
7832       return;
7833     case CONSTRUCTOR:
7834       {
7835 	unsigned HOST_WIDE_INT idx;
7836 	tree field, value;
7837 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7838 	  {
7839 	    inchash::add_expr (field, hstate);
7840 	    inchash::add_expr (value, hstate);
7841 	  }
7842 	return;
7843       }
7844     case FUNCTION_DECL:
7845       /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7846 	 Otherwise nodes that compare equal according to operand_equal_p might
7847 	 get different hash codes.  However, don't do this for machine specific
7848 	 or front end builtins, since the function code is overloaded in those
7849 	 cases.  */
7850       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7851 	  && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7852 	{
7853 	  t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7854 	  code = TREE_CODE (t);
7855 	}
7856       /* FALL THROUGH */
7857     default:
7858       tclass = TREE_CODE_CLASS (code);
7859 
7860       if (tclass == tcc_declaration)
7861 	{
7862 	  /* DECL's have a unique ID */
7863 	  hstate.add_wide_int (DECL_UID (t));
7864 	}
7865       else
7866 	{
7867 	  gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7868 
7869 	  hstate.add_object (code);
7870 
7871 	  /* Don't hash the type, that can lead to having nodes which
7872 	     compare equal according to operand_equal_p, but which
7873 	     have different hash codes.  */
7874 	  if (CONVERT_EXPR_CODE_P (code)
7875 	      || code == NON_LVALUE_EXPR)
7876 	    {
7877 	      /* Make sure to include signness in the hash computation.  */
7878 	      hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7879 	      inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7880 	    }
7881 
7882 	  else if (commutative_tree_code (code))
7883 	    {
7884 	      /* It's a commutative expression.  We want to hash it the same
7885 		 however it appears.  We do this by first hashing both operands
7886 		 and then rehashing based on the order of their independent
7887 		 hashes.  */
7888 	      inchash::hash one, two;
7889 	      inchash::add_expr (TREE_OPERAND (t, 0), one);
7890 	      inchash::add_expr (TREE_OPERAND (t, 1), two);
7891 	      hstate.add_commutative (one, two);
7892 	    }
7893 	  else
7894 	    for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7895 	      inchash::add_expr (TREE_OPERAND (t, i), hstate);
7896 	}
7897       return;
7898     }
7899 }
7900 
7901 }
7902 
7903 /* Constructors for pointer, array and function types.
7904    (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7905    constructed by language-dependent code, not here.)  */
7906 
7907 /* Construct, lay out and return the type of pointers to TO_TYPE with
7908    mode MODE.  If CAN_ALIAS_ALL is TRUE, indicate this type can
7909    reference all of memory. If such a type has already been
7910    constructed, reuse it.  */
7911 
7912 tree
7913 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7914 			     bool can_alias_all)
7915 {
7916   tree t;
7917   bool could_alias = can_alias_all;
7918 
7919   if (to_type == error_mark_node)
7920     return error_mark_node;
7921 
7922   /* If the pointed-to type has the may_alias attribute set, force
7923      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
7924   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7925     can_alias_all = true;
7926 
7927   /* In some cases, languages will have things that aren't a POINTER_TYPE
7928      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7929      In that case, return that type without regard to the rest of our
7930      operands.
7931 
7932      ??? This is a kludge, but consistent with the way this function has
7933      always operated and there doesn't seem to be a good way to avoid this
7934      at the moment.  */
7935   if (TYPE_POINTER_TO (to_type) != 0
7936       && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7937     return TYPE_POINTER_TO (to_type);
7938 
7939   /* First, if we already have a type for pointers to TO_TYPE and it's
7940      the proper mode, use it.  */
7941   for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7942     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7943       return t;
7944 
7945   t = make_node (POINTER_TYPE);
7946 
7947   TREE_TYPE (t) = to_type;
7948   SET_TYPE_MODE (t, mode);
7949   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7950   TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7951   TYPE_POINTER_TO (to_type) = t;
7952 
7953   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
7954   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7955     SET_TYPE_STRUCTURAL_EQUALITY (t);
7956   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7957     TYPE_CANONICAL (t)
7958       = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7959 				     mode, false);
7960 
7961   /* Lay out the type.  This function has many callers that are concerned
7962      with expression-construction, and this simplifies them all.  */
7963   layout_type (t);
7964 
7965   return t;
7966 }
7967 
7968 /* By default build pointers in ptr_mode.  */
7969 
7970 tree
7971 build_pointer_type (tree to_type)
7972 {
7973   addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7974 					      : TYPE_ADDR_SPACE (to_type);
7975   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7976   return build_pointer_type_for_mode (to_type, pointer_mode, false);
7977 }
7978 
7979 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE.  */
7980 
7981 tree
7982 build_reference_type_for_mode (tree to_type, machine_mode mode,
7983 			       bool can_alias_all)
7984 {
7985   tree t;
7986   bool could_alias = can_alias_all;
7987 
7988   if (to_type == error_mark_node)
7989     return error_mark_node;
7990 
7991   /* If the pointed-to type has the may_alias attribute set, force
7992      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
7993   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7994     can_alias_all = true;
7995 
7996   /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7997      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7998      In that case, return that type without regard to the rest of our
7999      operands.
8000 
8001      ??? This is a kludge, but consistent with the way this function has
8002      always operated and there doesn't seem to be a good way to avoid this
8003      at the moment.  */
8004   if (TYPE_REFERENCE_TO (to_type) != 0
8005       && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8006     return TYPE_REFERENCE_TO (to_type);
8007 
8008   /* First, if we already have a type for pointers to TO_TYPE and it's
8009      the proper mode, use it.  */
8010   for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8011     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8012       return t;
8013 
8014   t = make_node (REFERENCE_TYPE);
8015 
8016   TREE_TYPE (t) = to_type;
8017   SET_TYPE_MODE (t, mode);
8018   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8019   TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8020   TYPE_REFERENCE_TO (to_type) = t;
8021 
8022   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
8023   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8024     SET_TYPE_STRUCTURAL_EQUALITY (t);
8025   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8026     TYPE_CANONICAL (t)
8027       = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8028 				       mode, false);
8029 
8030   layout_type (t);
8031 
8032   return t;
8033 }
8034 
8035 
8036 /* Build the node for the type of references-to-TO_TYPE by default
8037    in ptr_mode.  */
8038 
8039 tree
8040 build_reference_type (tree to_type)
8041 {
8042   addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8043 					      : TYPE_ADDR_SPACE (to_type);
8044   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8045   return build_reference_type_for_mode (to_type, pointer_mode, false);
8046 }
8047 
8048 #define MAX_INT_CACHED_PREC \
8049   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8050 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8051 
8052 /* Builds a signed or unsigned integer type of precision PRECISION.
8053    Used for C bitfields whose precision does not match that of
8054    built-in target types.  */
8055 tree
8056 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8057 				int unsignedp)
8058 {
8059   tree itype, ret;
8060 
8061   if (unsignedp)
8062     unsignedp = MAX_INT_CACHED_PREC + 1;
8063 
8064   if (precision <= MAX_INT_CACHED_PREC)
8065     {
8066       itype = nonstandard_integer_type_cache[precision + unsignedp];
8067       if (itype)
8068 	return itype;
8069     }
8070 
8071   itype = make_node (INTEGER_TYPE);
8072   TYPE_PRECISION (itype) = precision;
8073 
8074   if (unsignedp)
8075     fixup_unsigned_type (itype);
8076   else
8077     fixup_signed_type (itype);
8078 
8079   ret = itype;
8080   if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8081     ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8082   if (precision <= MAX_INT_CACHED_PREC)
8083     nonstandard_integer_type_cache[precision + unsignedp] = ret;
8084 
8085   return ret;
8086 }
8087 
8088 #define MAX_BOOL_CACHED_PREC \
8089   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8090 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8091 
8092 /* Builds a boolean type of precision PRECISION.
8093    Used for boolean vectors to choose proper vector element size.  */
8094 tree
8095 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8096 {
8097   tree type;
8098 
8099   if (precision <= MAX_BOOL_CACHED_PREC)
8100     {
8101       type = nonstandard_boolean_type_cache[precision];
8102       if (type)
8103 	return type;
8104     }
8105 
8106   type = make_node (BOOLEAN_TYPE);
8107   TYPE_PRECISION (type) = precision;
8108   fixup_signed_type (type);
8109 
8110   if (precision <= MAX_INT_CACHED_PREC)
8111     nonstandard_boolean_type_cache[precision] = type;
8112 
8113   return type;
8114 }
8115 
8116 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8117    or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL.  If SHARED
8118    is true, reuse such a type that has already been constructed.  */
8119 
8120 static tree
8121 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8122 {
8123   tree itype = make_node (INTEGER_TYPE);
8124   inchash::hash hstate;
8125 
8126   TREE_TYPE (itype) = type;
8127 
8128   TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8129   TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8130 
8131   TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8132   SET_TYPE_MODE (itype, TYPE_MODE (type));
8133   TYPE_SIZE (itype) = TYPE_SIZE (type);
8134   TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8135   TYPE_ALIGN (itype) = TYPE_ALIGN (type);
8136   TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8137 
8138   if (!shared)
8139     return itype;
8140 
8141   if ((TYPE_MIN_VALUE (itype)
8142        && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8143       || (TYPE_MAX_VALUE (itype)
8144 	  && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8145     {
8146       /* Since we cannot reliably merge this type, we need to compare it using
8147 	 structural equality checks.  */
8148       SET_TYPE_STRUCTURAL_EQUALITY (itype);
8149       return itype;
8150     }
8151 
8152   inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8153   inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8154   hstate.merge_hash (TYPE_HASH (type));
8155   itype = type_hash_canon (hstate.end (), itype);
8156 
8157   return itype;
8158 }
8159 
8160 /* Wrapper around build_range_type_1 with SHARED set to true.  */
8161 
8162 tree
8163 build_range_type (tree type, tree lowval, tree highval)
8164 {
8165   return build_range_type_1 (type, lowval, highval, true);
8166 }
8167 
8168 /* Wrapper around build_range_type_1 with SHARED set to false.  */
8169 
8170 tree
8171 build_nonshared_range_type (tree type, tree lowval, tree highval)
8172 {
8173   return build_range_type_1 (type, lowval, highval, false);
8174 }
8175 
8176 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8177    MAXVAL should be the maximum value in the domain
8178    (one less than the length of the array).
8179 
8180    The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8181    We don't enforce this limit, that is up to caller (e.g. language front end).
8182    The limit exists because the result is a signed type and we don't handle
8183    sizes that use more than one HOST_WIDE_INT.  */
8184 
8185 tree
8186 build_index_type (tree maxval)
8187 {
8188   return build_range_type (sizetype, size_zero_node, maxval);
8189 }
8190 
8191 /* Return true if the debug information for TYPE, a subtype, should be emitted
8192    as a subrange type.  If so, set LOWVAL to the low bound and HIGHVAL to the
8193    high bound, respectively.  Sometimes doing so unnecessarily obfuscates the
8194    debug info and doesn't reflect the source code.  */
8195 
8196 bool
8197 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8198 {
8199   tree base_type = TREE_TYPE (type), low, high;
8200 
8201   /* Subrange types have a base type which is an integral type.  */
8202   if (!INTEGRAL_TYPE_P (base_type))
8203     return false;
8204 
8205   /* Get the real bounds of the subtype.  */
8206   if (lang_hooks.types.get_subrange_bounds)
8207     lang_hooks.types.get_subrange_bounds (type, &low, &high);
8208   else
8209     {
8210       low = TYPE_MIN_VALUE (type);
8211       high = TYPE_MAX_VALUE (type);
8212     }
8213 
8214   /* If the type and its base type have the same representation and the same
8215      name, then the type is not a subrange but a copy of the base type.  */
8216   if ((TREE_CODE (base_type) == INTEGER_TYPE
8217        || TREE_CODE (base_type) == BOOLEAN_TYPE)
8218       && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8219       && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8220       && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8221       && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8222     return false;
8223 
8224   if (lowval)
8225     *lowval = low;
8226   if (highval)
8227     *highval = high;
8228   return true;
8229 }
8230 
8231 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8232    and number of elements specified by the range of values of INDEX_TYPE.
8233    If SHARED is true, reuse such a type that has already been constructed.  */
8234 
8235 static tree
8236 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8237 {
8238   tree t;
8239 
8240   if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8241     {
8242       error ("arrays of functions are not meaningful");
8243       elt_type = integer_type_node;
8244     }
8245 
8246   t = make_node (ARRAY_TYPE);
8247   TREE_TYPE (t) = elt_type;
8248   TYPE_DOMAIN (t) = index_type;
8249   TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8250   layout_type (t);
8251 
8252   /* If the element type is incomplete at this point we get marked for
8253      structural equality.  Do not record these types in the canonical
8254      type hashtable.  */
8255   if (TYPE_STRUCTURAL_EQUALITY_P (t))
8256     return t;
8257 
8258   if (shared)
8259     {
8260       inchash::hash hstate;
8261       hstate.add_object (TYPE_HASH (elt_type));
8262       if (index_type)
8263 	hstate.add_object (TYPE_HASH (index_type));
8264       t = type_hash_canon (hstate.end (), t);
8265     }
8266 
8267   if (TYPE_CANONICAL (t) == t)
8268     {
8269       if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8270 	  || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8271 	  || in_lto_p)
8272 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8273       else if (TYPE_CANONICAL (elt_type) != elt_type
8274 	       || (index_type && TYPE_CANONICAL (index_type) != index_type))
8275 	TYPE_CANONICAL (t)
8276 	  = build_array_type_1 (TYPE_CANONICAL (elt_type),
8277 				index_type
8278 				? TYPE_CANONICAL (index_type) : NULL_TREE,
8279 				shared);
8280     }
8281 
8282   return t;
8283 }
8284 
8285 /* Wrapper around build_array_type_1 with SHARED set to true.  */
8286 
8287 tree
8288 build_array_type (tree elt_type, tree index_type)
8289 {
8290   return build_array_type_1 (elt_type, index_type, true);
8291 }
8292 
8293 /* Wrapper around build_array_type_1 with SHARED set to false.  */
8294 
8295 tree
8296 build_nonshared_array_type (tree elt_type, tree index_type)
8297 {
8298   return build_array_type_1 (elt_type, index_type, false);
8299 }
8300 
8301 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8302    sizetype.  */
8303 
8304 tree
8305 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8306 {
8307   return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8308 }
8309 
8310 /* Recursively examines the array elements of TYPE, until a non-array
8311    element type is found.  */
8312 
8313 tree
8314 strip_array_types (tree type)
8315 {
8316   while (TREE_CODE (type) == ARRAY_TYPE)
8317     type = TREE_TYPE (type);
8318 
8319   return type;
8320 }
8321 
8322 /* Computes the canonical argument types from the argument type list
8323    ARGTYPES.
8324 
8325    Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8326    on entry to this function, or if any of the ARGTYPES are
8327    structural.
8328 
8329    Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8330    true on entry to this function, or if any of the ARGTYPES are
8331    non-canonical.
8332 
8333    Returns a canonical argument list, which may be ARGTYPES when the
8334    canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8335    true) or would not differ from ARGTYPES.  */
8336 
8337 static tree
8338 maybe_canonicalize_argtypes (tree argtypes,
8339 			     bool *any_structural_p,
8340 			     bool *any_noncanonical_p)
8341 {
8342   tree arg;
8343   bool any_noncanonical_argtypes_p = false;
8344 
8345   for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8346     {
8347       if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8348 	/* Fail gracefully by stating that the type is structural.  */
8349 	*any_structural_p = true;
8350       else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8351 	*any_structural_p = true;
8352       else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8353 	       || TREE_PURPOSE (arg))
8354 	/* If the argument has a default argument, we consider it
8355 	   non-canonical even though the type itself is canonical.
8356 	   That way, different variants of function and method types
8357 	   with default arguments will all point to the variant with
8358 	   no defaults as their canonical type.  */
8359         any_noncanonical_argtypes_p = true;
8360     }
8361 
8362   if (*any_structural_p)
8363     return argtypes;
8364 
8365   if (any_noncanonical_argtypes_p)
8366     {
8367       /* Build the canonical list of argument types.  */
8368       tree canon_argtypes = NULL_TREE;
8369       bool is_void = false;
8370 
8371       for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8372         {
8373           if (arg == void_list_node)
8374             is_void = true;
8375           else
8376             canon_argtypes = tree_cons (NULL_TREE,
8377                                         TYPE_CANONICAL (TREE_VALUE (arg)),
8378                                         canon_argtypes);
8379         }
8380 
8381       canon_argtypes = nreverse (canon_argtypes);
8382       if (is_void)
8383         canon_argtypes = chainon (canon_argtypes, void_list_node);
8384 
8385       /* There is a non-canonical type.  */
8386       *any_noncanonical_p = true;
8387       return canon_argtypes;
8388     }
8389 
8390   /* The canonical argument types are the same as ARGTYPES.  */
8391   return argtypes;
8392 }
8393 
8394 /* Construct, lay out and return
8395    the type of functions returning type VALUE_TYPE
8396    given arguments of types ARG_TYPES.
8397    ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8398    are data type nodes for the arguments of the function.
8399    If such a type has already been constructed, reuse it.  */
8400 
8401 tree
8402 build_function_type (tree value_type, tree arg_types)
8403 {
8404   tree t;
8405   inchash::hash hstate;
8406   bool any_structural_p, any_noncanonical_p;
8407   tree canon_argtypes;
8408 
8409   if (TREE_CODE (value_type) == FUNCTION_TYPE)
8410     {
8411       error ("function return type cannot be function");
8412       value_type = integer_type_node;
8413     }
8414 
8415   /* Make a node of the sort we want.  */
8416   t = make_node (FUNCTION_TYPE);
8417   TREE_TYPE (t) = value_type;
8418   TYPE_ARG_TYPES (t) = arg_types;
8419 
8420   /* If we already have such a type, use the old one.  */
8421   hstate.add_object (TYPE_HASH (value_type));
8422   type_hash_list (arg_types, hstate);
8423   t = type_hash_canon (hstate.end (), t);
8424 
8425   /* Set up the canonical type. */
8426   any_structural_p   = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8427   any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8428   canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8429 						&any_structural_p,
8430 						&any_noncanonical_p);
8431   if (any_structural_p)
8432     SET_TYPE_STRUCTURAL_EQUALITY (t);
8433   else if (any_noncanonical_p)
8434     TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8435 					      canon_argtypes);
8436 
8437   if (!COMPLETE_TYPE_P (t))
8438     layout_type (t);
8439   return t;
8440 }
8441 
8442 /* Build a function type.  The RETURN_TYPE is the type returned by the
8443    function.  If VAARGS is set, no void_type_node is appended to the
8444    list.  ARGP must be always be terminated be a NULL_TREE.  */
8445 
8446 static tree
8447 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8448 {
8449   tree t, args, last;
8450 
8451   t = va_arg (argp, tree);
8452   for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8453     args = tree_cons (NULL_TREE, t, args);
8454 
8455   if (vaargs)
8456     {
8457       last = args;
8458       if (args != NULL_TREE)
8459 	args = nreverse (args);
8460       gcc_assert (last != void_list_node);
8461     }
8462   else if (args == NULL_TREE)
8463     args = void_list_node;
8464   else
8465     {
8466       last = args;
8467       args = nreverse (args);
8468       TREE_CHAIN (last) = void_list_node;
8469     }
8470   args = build_function_type (return_type, args);
8471 
8472   return args;
8473 }
8474 
8475 /* Build a function type.  The RETURN_TYPE is the type returned by the
8476    function.  If additional arguments are provided, they are
8477    additional argument types.  The list of argument types must always
8478    be terminated by NULL_TREE.  */
8479 
8480 tree
8481 build_function_type_list (tree return_type, ...)
8482 {
8483   tree args;
8484   va_list p;
8485 
8486   va_start (p, return_type);
8487   args = build_function_type_list_1 (false, return_type, p);
8488   va_end (p);
8489   return args;
8490 }
8491 
8492 /* Build a variable argument function type.  The RETURN_TYPE is the
8493    type returned by the function.  If additional arguments are provided,
8494    they are additional argument types.  The list of argument types must
8495    always be terminated by NULL_TREE.  */
8496 
8497 tree
8498 build_varargs_function_type_list (tree return_type, ...)
8499 {
8500   tree args;
8501   va_list p;
8502 
8503   va_start (p, return_type);
8504   args = build_function_type_list_1 (true, return_type, p);
8505   va_end (p);
8506 
8507   return args;
8508 }
8509 
8510 /* Build a function type.  RETURN_TYPE is the type returned by the
8511    function; VAARGS indicates whether the function takes varargs.  The
8512    function takes N named arguments, the types of which are provided in
8513    ARG_TYPES.  */
8514 
8515 static tree
8516 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8517 			     tree *arg_types)
8518 {
8519   int i;
8520   tree t = vaargs ? NULL_TREE : void_list_node;
8521 
8522   for (i = n - 1; i >= 0; i--)
8523     t = tree_cons (NULL_TREE, arg_types[i], t);
8524 
8525   return build_function_type (return_type, t);
8526 }
8527 
8528 /* Build a function type.  RETURN_TYPE is the type returned by the
8529    function.  The function takes N named arguments, the types of which
8530    are provided in ARG_TYPES.  */
8531 
8532 tree
8533 build_function_type_array (tree return_type, int n, tree *arg_types)
8534 {
8535   return build_function_type_array_1 (false, return_type, n, arg_types);
8536 }
8537 
8538 /* Build a variable argument function type.  RETURN_TYPE is the type
8539    returned by the function.  The function takes N named arguments, the
8540    types of which are provided in ARG_TYPES.  */
8541 
8542 tree
8543 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8544 {
8545   return build_function_type_array_1 (true, return_type, n, arg_types);
8546 }
8547 
8548 /* Build a METHOD_TYPE for a member of BASETYPE.  The RETTYPE (a TYPE)
8549    and ARGTYPES (a TREE_LIST) are the return type and arguments types
8550    for the method.  An implicit additional parameter (of type
8551    pointer-to-BASETYPE) is added to the ARGTYPES.  */
8552 
8553 tree
8554 build_method_type_directly (tree basetype,
8555 			    tree rettype,
8556 			    tree argtypes)
8557 {
8558   tree t;
8559   tree ptype;
8560   inchash::hash hstate;
8561   bool any_structural_p, any_noncanonical_p;
8562   tree canon_argtypes;
8563 
8564   /* Make a node of the sort we want.  */
8565   t = make_node (METHOD_TYPE);
8566 
8567   TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8568   TREE_TYPE (t) = rettype;
8569   ptype = build_pointer_type (basetype);
8570 
8571   /* The actual arglist for this function includes a "hidden" argument
8572      which is "this".  Put it into the list of argument types.  */
8573   argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8574   TYPE_ARG_TYPES (t) = argtypes;
8575 
8576   /* If we already have such a type, use the old one.  */
8577   hstate.add_object (TYPE_HASH (basetype));
8578   hstate.add_object (TYPE_HASH (rettype));
8579   type_hash_list (argtypes, hstate);
8580   t = type_hash_canon (hstate.end (), t);
8581 
8582   /* Set up the canonical type. */
8583   any_structural_p
8584     = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8585        || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8586   any_noncanonical_p
8587     = (TYPE_CANONICAL (basetype) != basetype
8588        || TYPE_CANONICAL (rettype) != rettype);
8589   canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8590 						&any_structural_p,
8591 						&any_noncanonical_p);
8592   if (any_structural_p)
8593     SET_TYPE_STRUCTURAL_EQUALITY (t);
8594   else if (any_noncanonical_p)
8595     TYPE_CANONICAL (t)
8596       = build_method_type_directly (TYPE_CANONICAL (basetype),
8597 				    TYPE_CANONICAL (rettype),
8598 				    canon_argtypes);
8599   if (!COMPLETE_TYPE_P (t))
8600     layout_type (t);
8601 
8602   return t;
8603 }
8604 
8605 /* Construct, lay out and return the type of methods belonging to class
8606    BASETYPE and whose arguments and values are described by TYPE.
8607    If that type exists already, reuse it.
8608    TYPE must be a FUNCTION_TYPE node.  */
8609 
8610 tree
8611 build_method_type (tree basetype, tree type)
8612 {
8613   gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8614 
8615   return build_method_type_directly (basetype,
8616 				     TREE_TYPE (type),
8617 				     TYPE_ARG_TYPES (type));
8618 }
8619 
8620 /* Construct, lay out and return the type of offsets to a value
8621    of type TYPE, within an object of type BASETYPE.
8622    If a suitable offset type exists already, reuse it.  */
8623 
8624 tree
8625 build_offset_type (tree basetype, tree type)
8626 {
8627   tree t;
8628   inchash::hash hstate;
8629 
8630   /* Make a node of the sort we want.  */
8631   t = make_node (OFFSET_TYPE);
8632 
8633   TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8634   TREE_TYPE (t) = type;
8635 
8636   /* If we already have such a type, use the old one.  */
8637   hstate.add_object (TYPE_HASH (basetype));
8638   hstate.add_object (TYPE_HASH (type));
8639   t = type_hash_canon (hstate.end (), t);
8640 
8641   if (!COMPLETE_TYPE_P (t))
8642     layout_type (t);
8643 
8644   if (TYPE_CANONICAL (t) == t)
8645     {
8646       if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8647 	  || TYPE_STRUCTURAL_EQUALITY_P (type))
8648 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8649       else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8650 	       || TYPE_CANONICAL (type) != type)
8651 	TYPE_CANONICAL (t)
8652 	  = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8653 			       TYPE_CANONICAL (type));
8654     }
8655 
8656   return t;
8657 }
8658 
8659 /* Create a complex type whose components are COMPONENT_TYPE.
8660 
8661    If NAMED is true, the type is given a TYPE_NAME.  We do not always
8662    do so because this creates a DECL node and thus make the DECL_UIDs
8663    dependent on the type canonicalization hashtable, which is GC-ed,
8664    so the DECL_UIDs would not be stable wrt garbage collection.  */
8665 
8666 tree
8667 build_complex_type (tree component_type, bool named)
8668 {
8669   tree t;
8670   inchash::hash hstate;
8671 
8672   gcc_assert (INTEGRAL_TYPE_P (component_type)
8673 	      || SCALAR_FLOAT_TYPE_P (component_type)
8674 	      || FIXED_POINT_TYPE_P (component_type));
8675 
8676   /* Make a node of the sort we want.  */
8677   t = make_node (COMPLEX_TYPE);
8678 
8679   TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8680 
8681   /* If we already have such a type, use the old one.  */
8682   hstate.add_object (TYPE_HASH (component_type));
8683   t = type_hash_canon (hstate.end (), t);
8684 
8685   if (!COMPLETE_TYPE_P (t))
8686     layout_type (t);
8687 
8688   if (TYPE_CANONICAL (t) == t)
8689     {
8690       if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8691 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8692       else if (TYPE_CANONICAL (component_type) != component_type)
8693 	TYPE_CANONICAL (t)
8694 	  = build_complex_type (TYPE_CANONICAL (component_type), named);
8695     }
8696 
8697   /* We need to create a name, since complex is a fundamental type.  */
8698   if (!TYPE_NAME (t) && named)
8699     {
8700       const char *name;
8701       if (component_type == char_type_node)
8702 	name = "complex char";
8703       else if (component_type == signed_char_type_node)
8704 	name = "complex signed char";
8705       else if (component_type == unsigned_char_type_node)
8706 	name = "complex unsigned char";
8707       else if (component_type == short_integer_type_node)
8708 	name = "complex short int";
8709       else if (component_type == short_unsigned_type_node)
8710 	name = "complex short unsigned int";
8711       else if (component_type == integer_type_node)
8712 	name = "complex int";
8713       else if (component_type == unsigned_type_node)
8714 	name = "complex unsigned int";
8715       else if (component_type == long_integer_type_node)
8716 	name = "complex long int";
8717       else if (component_type == long_unsigned_type_node)
8718 	name = "complex long unsigned int";
8719       else if (component_type == long_long_integer_type_node)
8720 	name = "complex long long int";
8721       else if (component_type == long_long_unsigned_type_node)
8722 	name = "complex long long unsigned int";
8723       else
8724 	name = 0;
8725 
8726       if (name != 0)
8727 	TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8728 	    			    get_identifier (name), t);
8729     }
8730 
8731   return build_qualified_type (t, TYPE_QUALS (component_type));
8732 }
8733 
8734 /* If TYPE is a real or complex floating-point type and the target
8735    does not directly support arithmetic on TYPE then return the wider
8736    type to be used for arithmetic on TYPE.  Otherwise, return
8737    NULL_TREE.  */
8738 
8739 tree
8740 excess_precision_type (tree type)
8741 {
8742   if (flag_excess_precision != EXCESS_PRECISION_FAST)
8743     {
8744       int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8745       switch (TREE_CODE (type))
8746 	{
8747 	case REAL_TYPE:
8748 	  switch (flt_eval_method)
8749 	    {
8750 	    case 1:
8751 	      if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8752 		return double_type_node;
8753 	      break;
8754 	    case 2:
8755 	      if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8756 		  || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8757 		return long_double_type_node;
8758 	      break;
8759 	    default:
8760 	      gcc_unreachable ();
8761 	    }
8762 	  break;
8763 	case COMPLEX_TYPE:
8764 	  if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8765 	    return NULL_TREE;
8766 	  switch (flt_eval_method)
8767 	    {
8768 	    case 1:
8769 	      if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8770 		return complex_double_type_node;
8771 	      break;
8772 	    case 2:
8773 	      if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8774 		  || (TYPE_MODE (TREE_TYPE (type))
8775 		      == TYPE_MODE (double_type_node)))
8776 		return complex_long_double_type_node;
8777 	      break;
8778 	    default:
8779 	      gcc_unreachable ();
8780 	    }
8781 	  break;
8782 	default:
8783 	  break;
8784 	}
8785     }
8786   return NULL_TREE;
8787 }
8788 
8789 /* Return OP, stripped of any conversions to wider types as much as is safe.
8790    Converting the value back to OP's type makes a value equivalent to OP.
8791 
8792    If FOR_TYPE is nonzero, we return a value which, if converted to
8793    type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8794 
8795    OP must have integer, real or enumeral type.  Pointers are not allowed!
8796 
8797    There are some cases where the obvious value we could return
8798    would regenerate to OP if converted to OP's type,
8799    but would not extend like OP to wider types.
8800    If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8801    For example, if OP is (unsigned short)(signed char)-1,
8802    we avoid returning (signed char)-1 if FOR_TYPE is int,
8803    even though extending that to an unsigned short would regenerate OP,
8804    since the result of extending (signed char)-1 to (int)
8805    is different from (int) OP.  */
8806 
8807 tree
8808 get_unwidened (tree op, tree for_type)
8809 {
8810   /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension.  */
8811   tree type = TREE_TYPE (op);
8812   unsigned final_prec
8813     = TYPE_PRECISION (for_type != 0 ? for_type : type);
8814   int uns
8815     = (for_type != 0 && for_type != type
8816        && final_prec > TYPE_PRECISION (type)
8817        && TYPE_UNSIGNED (type));
8818   tree win = op;
8819 
8820   while (CONVERT_EXPR_P (op))
8821     {
8822       int bitschange;
8823 
8824       /* TYPE_PRECISION on vector types has different meaning
8825 	 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8826 	 so avoid them here.  */
8827       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8828 	break;
8829 
8830       bitschange = TYPE_PRECISION (TREE_TYPE (op))
8831 		   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8832 
8833       /* Truncations are many-one so cannot be removed.
8834 	 Unless we are later going to truncate down even farther.  */
8835       if (bitschange < 0
8836 	  && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8837 	break;
8838 
8839       /* See what's inside this conversion.  If we decide to strip it,
8840 	 we will set WIN.  */
8841       op = TREE_OPERAND (op, 0);
8842 
8843       /* If we have not stripped any zero-extensions (uns is 0),
8844 	 we can strip any kind of extension.
8845 	 If we have previously stripped a zero-extension,
8846 	 only zero-extensions can safely be stripped.
8847 	 Any extension can be stripped if the bits it would produce
8848 	 are all going to be discarded later by truncating to FOR_TYPE.  */
8849 
8850       if (bitschange > 0)
8851 	{
8852 	  if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8853 	    win = op;
8854 	  /* TYPE_UNSIGNED says whether this is a zero-extension.
8855 	     Let's avoid computing it if it does not affect WIN
8856 	     and if UNS will not be needed again.  */
8857 	  if ((uns
8858 	       || CONVERT_EXPR_P (op))
8859 	      && TYPE_UNSIGNED (TREE_TYPE (op)))
8860 	    {
8861 	      uns = 1;
8862 	      win = op;
8863 	    }
8864 	}
8865     }
8866 
8867   /* If we finally reach a constant see if it fits in sth smaller and
8868      in that case convert it.  */
8869   if (TREE_CODE (win) == INTEGER_CST)
8870     {
8871       tree wtype = TREE_TYPE (win);
8872       unsigned prec = wi::min_precision (win, TYPE_SIGN (wtype));
8873       if (for_type)
8874 	prec = MAX (prec, final_prec);
8875       if (prec < TYPE_PRECISION (wtype))
8876 	{
8877 	  tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8878 	  if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8879 	    win = fold_convert (t, win);
8880 	}
8881     }
8882 
8883   return win;
8884 }
8885 
8886 /* Return OP or a simpler expression for a narrower value
8887    which can be sign-extended or zero-extended to give back OP.
8888    Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8889    or 0 if the value should be sign-extended.  */
8890 
8891 tree
8892 get_narrower (tree op, int *unsignedp_ptr)
8893 {
8894   int uns = 0;
8895   int first = 1;
8896   tree win = op;
8897   bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8898 
8899   while (TREE_CODE (op) == NOP_EXPR)
8900     {
8901       int bitschange
8902 	= (TYPE_PRECISION (TREE_TYPE (op))
8903 	   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8904 
8905       /* Truncations are many-one so cannot be removed.  */
8906       if (bitschange < 0)
8907 	break;
8908 
8909       /* See what's inside this conversion.  If we decide to strip it,
8910 	 we will set WIN.  */
8911 
8912       if (bitschange > 0)
8913 	{
8914 	  op = TREE_OPERAND (op, 0);
8915 	  /* An extension: the outermost one can be stripped,
8916 	     but remember whether it is zero or sign extension.  */
8917 	  if (first)
8918 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
8919 	  /* Otherwise, if a sign extension has been stripped,
8920 	     only sign extensions can now be stripped;
8921 	     if a zero extension has been stripped, only zero-extensions.  */
8922 	  else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8923 	    break;
8924 	  first = 0;
8925 	}
8926       else /* bitschange == 0 */
8927 	{
8928 	  /* A change in nominal type can always be stripped, but we must
8929 	     preserve the unsignedness.  */
8930 	  if (first)
8931 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
8932 	  first = 0;
8933 	  op = TREE_OPERAND (op, 0);
8934 	  /* Keep trying to narrow, but don't assign op to win if it
8935 	     would turn an integral type into something else.  */
8936 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8937 	    continue;
8938 	}
8939 
8940       win = op;
8941     }
8942 
8943   if (TREE_CODE (op) == COMPONENT_REF
8944       /* Since type_for_size always gives an integer type.  */
8945       && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8946       && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8947       /* Ensure field is laid out already.  */
8948       && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8949       && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8950     {
8951       unsigned HOST_WIDE_INT innerprec
8952 	= tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8953       int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8954 		       || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8955       tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8956 
8957       /* We can get this structure field in a narrower type that fits it,
8958 	 but the resulting extension to its nominal type (a fullword type)
8959 	 must satisfy the same conditions as for other extensions.
8960 
8961 	 Do this only for fields that are aligned (not bit-fields),
8962 	 because when bit-field insns will be used there is no
8963 	 advantage in doing this.  */
8964 
8965       if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8966 	  && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8967 	  && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8968 	  && type != 0)
8969 	{
8970 	  if (first)
8971 	    uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8972 	  win = fold_convert (type, op);
8973 	}
8974     }
8975 
8976   *unsignedp_ptr = uns;
8977   return win;
8978 }
8979 
8980 /* Return true if integer constant C has a value that is permissible
8981    for TYPE, an integral type.  */
8982 
8983 bool
8984 int_fits_type_p (const_tree c, const_tree type)
8985 {
8986   tree type_low_bound, type_high_bound;
8987   bool ok_for_low_bound, ok_for_high_bound;
8988   signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8989 
8990   /* Non-standard boolean types can have arbitrary precision but various
8991      transformations assume that they can only take values 0 and +/-1.  */
8992   if (TREE_CODE (type) == BOOLEAN_TYPE)
8993     return wi::fits_to_boolean_p (c, type);
8994 
8995 retry:
8996   type_low_bound = TYPE_MIN_VALUE (type);
8997   type_high_bound = TYPE_MAX_VALUE (type);
8998 
8999   /* If at least one bound of the type is a constant integer, we can check
9000      ourselves and maybe make a decision. If no such decision is possible, but
9001      this type is a subtype, try checking against that.  Otherwise, use
9002      fits_to_tree_p, which checks against the precision.
9003 
9004      Compute the status for each possibly constant bound, and return if we see
9005      one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9006      for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9007      for "constant known to fit".  */
9008 
9009   /* Check if c >= type_low_bound.  */
9010   if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9011     {
9012       if (tree_int_cst_lt (c, type_low_bound))
9013 	return false;
9014       ok_for_low_bound = true;
9015     }
9016   else
9017     ok_for_low_bound = false;
9018 
9019   /* Check if c <= type_high_bound.  */
9020   if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9021     {
9022       if (tree_int_cst_lt (type_high_bound, c))
9023 	return false;
9024       ok_for_high_bound = true;
9025     }
9026   else
9027     ok_for_high_bound = false;
9028 
9029   /* If the constant fits both bounds, the result is known.  */
9030   if (ok_for_low_bound && ok_for_high_bound)
9031     return true;
9032 
9033   /* Perform some generic filtering which may allow making a decision
9034      even if the bounds are not constant.  First, negative integers
9035      never fit in unsigned types, */
9036   if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
9037     return false;
9038 
9039   /* Second, narrower types always fit in wider ones.  */
9040   if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9041     return true;
9042 
9043   /* Third, unsigned integers with top bit set never fit signed types.  */
9044   if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9045     {
9046       int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
9047       if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9048 	{
9049 	  /* When a tree_cst is converted to a wide-int, the precision
9050 	     is taken from the type.  However, if the precision of the
9051 	     mode underneath the type is smaller than that, it is
9052 	     possible that the value will not fit.  The test below
9053 	     fails if any bit is set between the sign bit of the
9054 	     underlying mode and the top bit of the type.  */
9055 	  if (wi::ne_p (wi::zext (c, prec - 1), c))
9056 	    return false;
9057 	}
9058       else if (wi::neg_p (c))
9059 	return false;
9060     }
9061 
9062   /* If we haven't been able to decide at this point, there nothing more we
9063      can check ourselves here.  Look at the base type if we have one and it
9064      has the same precision.  */
9065   if (TREE_CODE (type) == INTEGER_TYPE
9066       && TREE_TYPE (type) != 0
9067       && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9068     {
9069       type = TREE_TYPE (type);
9070       goto retry;
9071     }
9072 
9073   /* Or to fits_to_tree_p, if nothing else.  */
9074   return wi::fits_to_tree_p (c, type);
9075 }
9076 
9077 /* Stores bounds of an integer TYPE in MIN and MAX.  If TYPE has non-constant
9078    bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9079    represented (assuming two's-complement arithmetic) within the bit
9080    precision of the type are returned instead.  */
9081 
9082 void
9083 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9084 {
9085   if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9086       && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9087     wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9088   else
9089     {
9090       if (TYPE_UNSIGNED (type))
9091 	mpz_set_ui (min, 0);
9092       else
9093 	{
9094 	  wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9095 	  wi::to_mpz (mn, min, SIGNED);
9096 	}
9097     }
9098 
9099   if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9100       && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9101     wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9102   else
9103     {
9104       wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9105       wi::to_mpz (mn, max, TYPE_SIGN (type));
9106     }
9107 }
9108 
9109 /* Return true if VAR is an automatic variable defined in function FN.  */
9110 
9111 bool
9112 auto_var_in_fn_p (const_tree var, const_tree fn)
9113 {
9114   return (DECL_P (var) && DECL_CONTEXT (var) == fn
9115 	  && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9116 		|| TREE_CODE (var) == PARM_DECL)
9117 	       && ! TREE_STATIC (var))
9118 	      || TREE_CODE (var) == LABEL_DECL
9119 	      || TREE_CODE (var) == RESULT_DECL));
9120 }
9121 
9122 /* Subprogram of following function.  Called by walk_tree.
9123 
9124    Return *TP if it is an automatic variable or parameter of the
9125    function passed in as DATA.  */
9126 
9127 static tree
9128 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9129 {
9130   tree fn = (tree) data;
9131 
9132   if (TYPE_P (*tp))
9133     *walk_subtrees = 0;
9134 
9135   else if (DECL_P (*tp)
9136 	   && auto_var_in_fn_p (*tp, fn))
9137     return *tp;
9138 
9139   return NULL_TREE;
9140 }
9141 
9142 /* Returns true if T is, contains, or refers to a type with variable
9143    size.  For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9144    arguments, but not the return type.  If FN is nonzero, only return
9145    true if a modifier of the type or position of FN is a variable or
9146    parameter inside FN.
9147 
9148    This concept is more general than that of C99 'variably modified types':
9149    in C99, a struct type is never variably modified because a VLA may not
9150    appear as a structure member.  However, in GNU C code like:
9151 
9152      struct S { int i[f()]; };
9153 
9154    is valid, and other languages may define similar constructs.  */
9155 
9156 bool
9157 variably_modified_type_p (tree type, tree fn)
9158 {
9159   tree t;
9160 
9161 /* Test if T is either variable (if FN is zero) or an expression containing
9162    a variable in FN.  If TYPE isn't gimplified, return true also if
9163    gimplify_one_sizepos would gimplify the expression into a local
9164    variable.  */
9165 #define RETURN_TRUE_IF_VAR(T)						\
9166   do { tree _t = (T);							\
9167     if (_t != NULL_TREE							\
9168 	&& _t != error_mark_node					\
9169 	&& TREE_CODE (_t) != INTEGER_CST				\
9170 	&& TREE_CODE (_t) != PLACEHOLDER_EXPR				\
9171 	&& (!fn								\
9172 	    || (!TYPE_SIZES_GIMPLIFIED (type)				\
9173 		&& !is_gimple_sizepos (_t))				\
9174 	    || walk_tree (&_t, find_var_from_fn, fn, NULL)))		\
9175       return true;  } while (0)
9176 
9177   if (type == error_mark_node)
9178     return false;
9179 
9180   /* If TYPE itself has variable size, it is variably modified.  */
9181   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9182   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9183 
9184   switch (TREE_CODE (type))
9185     {
9186     case POINTER_TYPE:
9187     case REFERENCE_TYPE:
9188     case VECTOR_TYPE:
9189       if (variably_modified_type_p (TREE_TYPE (type), fn))
9190 	return true;
9191       break;
9192 
9193     case FUNCTION_TYPE:
9194     case METHOD_TYPE:
9195       /* If TYPE is a function type, it is variably modified if the
9196 	 return type is variably modified.  */
9197       if (variably_modified_type_p (TREE_TYPE (type), fn))
9198 	  return true;
9199       break;
9200 
9201     case INTEGER_TYPE:
9202     case REAL_TYPE:
9203     case FIXED_POINT_TYPE:
9204     case ENUMERAL_TYPE:
9205     case BOOLEAN_TYPE:
9206       /* Scalar types are variably modified if their end points
9207 	 aren't constant.  */
9208       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9209       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9210       break;
9211 
9212     case RECORD_TYPE:
9213     case UNION_TYPE:
9214     case QUAL_UNION_TYPE:
9215       /* We can't see if any of the fields are variably-modified by the
9216 	 definition we normally use, since that would produce infinite
9217 	 recursion via pointers.  */
9218       /* This is variably modified if some field's type is.  */
9219       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9220 	if (TREE_CODE (t) == FIELD_DECL)
9221 	  {
9222 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9223 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9224 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9225 
9226 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
9227 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9228 	  }
9229       break;
9230 
9231     case ARRAY_TYPE:
9232       /* Do not call ourselves to avoid infinite recursion.  This is
9233 	 variably modified if the element type is.  */
9234       RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9235       RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9236       break;
9237 
9238     default:
9239       break;
9240     }
9241 
9242   /* The current language may have other cases to check, but in general,
9243      all other types are not variably modified.  */
9244   return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9245 
9246 #undef RETURN_TRUE_IF_VAR
9247 }
9248 
9249 /* Given a DECL or TYPE, return the scope in which it was declared, or
9250    NULL_TREE if there is no containing scope.  */
9251 
9252 tree
9253 get_containing_scope (const_tree t)
9254 {
9255   return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9256 }
9257 
9258 /* Return the innermost context enclosing DECL that is
9259    a FUNCTION_DECL, or zero if none.  */
9260 
9261 tree
9262 decl_function_context (const_tree decl)
9263 {
9264   tree context;
9265 
9266   if (TREE_CODE (decl) == ERROR_MARK)
9267     return 0;
9268 
9269   /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9270      where we look up the function at runtime.  Such functions always take
9271      a first argument of type 'pointer to real context'.
9272 
9273      C++ should really be fixed to use DECL_CONTEXT for the real context,
9274      and use something else for the "virtual context".  */
9275   else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9276     context
9277       = TYPE_MAIN_VARIANT
9278 	(TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9279   else
9280     context = DECL_CONTEXT (decl);
9281 
9282   while (context && TREE_CODE (context) != FUNCTION_DECL)
9283     {
9284       if (TREE_CODE (context) == BLOCK)
9285 	context = BLOCK_SUPERCONTEXT (context);
9286       else
9287 	context = get_containing_scope (context);
9288     }
9289 
9290   return context;
9291 }
9292 
9293 /* Return the innermost context enclosing DECL that is
9294    a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9295    TYPE_DECLs and FUNCTION_DECLs are transparent to this function.  */
9296 
9297 tree
9298 decl_type_context (const_tree decl)
9299 {
9300   tree context = DECL_CONTEXT (decl);
9301 
9302   while (context)
9303     switch (TREE_CODE (context))
9304       {
9305       case NAMESPACE_DECL:
9306       case TRANSLATION_UNIT_DECL:
9307 	return NULL_TREE;
9308 
9309       case RECORD_TYPE:
9310       case UNION_TYPE:
9311       case QUAL_UNION_TYPE:
9312 	return context;
9313 
9314       case TYPE_DECL:
9315       case FUNCTION_DECL:
9316 	context = DECL_CONTEXT (context);
9317 	break;
9318 
9319       case BLOCK:
9320 	context = BLOCK_SUPERCONTEXT (context);
9321 	break;
9322 
9323       default:
9324 	gcc_unreachable ();
9325       }
9326 
9327   return NULL_TREE;
9328 }
9329 
9330 /* CALL is a CALL_EXPR.  Return the declaration for the function
9331    called, or NULL_TREE if the called function cannot be
9332    determined.  */
9333 
9334 tree
9335 get_callee_fndecl (const_tree call)
9336 {
9337   tree addr;
9338 
9339   if (call == error_mark_node)
9340     return error_mark_node;
9341 
9342   /* It's invalid to call this function with anything but a
9343      CALL_EXPR.  */
9344   gcc_assert (TREE_CODE (call) == CALL_EXPR);
9345 
9346   /* The first operand to the CALL is the address of the function
9347      called.  */
9348   addr = CALL_EXPR_FN (call);
9349 
9350   /* If there is no function, return early.  */
9351   if (addr == NULL_TREE)
9352     return NULL_TREE;
9353 
9354   STRIP_NOPS (addr);
9355 
9356   /* If this is a readonly function pointer, extract its initial value.  */
9357   if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9358       && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9359       && DECL_INITIAL (addr))
9360     addr = DECL_INITIAL (addr);
9361 
9362   /* If the address is just `&f' for some function `f', then we know
9363      that `f' is being called.  */
9364   if (TREE_CODE (addr) == ADDR_EXPR
9365       && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9366     return TREE_OPERAND (addr, 0);
9367 
9368   /* We couldn't figure out what was being called.  */
9369   return NULL_TREE;
9370 }
9371 
9372 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9373    return the associated function code, otherwise return CFN_LAST.  */
9374 
9375 combined_fn
9376 get_call_combined_fn (const_tree call)
9377 {
9378   /* It's invalid to call this function with anything but a CALL_EXPR.  */
9379   gcc_assert (TREE_CODE (call) == CALL_EXPR);
9380 
9381   if (!CALL_EXPR_FN (call))
9382     return as_combined_fn (CALL_EXPR_IFN (call));
9383 
9384   tree fndecl = get_callee_fndecl (call);
9385   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9386     return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9387 
9388   return CFN_LAST;
9389 }
9390 
9391 #define TREE_MEM_USAGE_SPACES 40
9392 
9393 /* Print debugging information about tree nodes generated during the compile,
9394    and any language-specific information.  */
9395 
9396 void
9397 dump_tree_statistics (void)
9398 {
9399   if (GATHER_STATISTICS)
9400     {
9401       int i;
9402       int total_nodes, total_bytes;
9403       fprintf (stderr, "\nKind                   Nodes      Bytes\n");
9404       mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9405       total_nodes = total_bytes = 0;
9406       for (i = 0; i < (int) all_kinds; i++)
9407 	{
9408 	  fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9409 		   tree_node_counts[i], tree_node_sizes[i]);
9410 	  total_nodes += tree_node_counts[i];
9411 	  total_bytes += tree_node_sizes[i];
9412 	}
9413       mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9414       fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9415       mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9416       fprintf (stderr, "Code                   Nodes\n");
9417       mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9418       for (i = 0; i < (int) MAX_TREE_CODES; i++)
9419 	fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9420                  tree_code_counts[i]);
9421       mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9422       fprintf (stderr, "\n");
9423       ssanames_print_statistics ();
9424       fprintf (stderr, "\n");
9425       phinodes_print_statistics ();
9426       fprintf (stderr, "\n");
9427     }
9428   else
9429     fprintf (stderr, "(No per-node statistics)\n");
9430 
9431   print_type_hash_statistics ();
9432   print_debug_expr_statistics ();
9433   print_value_expr_statistics ();
9434   lang_hooks.print_statistics ();
9435 }
9436 
9437 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9438 
9439 /* Generate a crc32 of a byte.  */
9440 
9441 static unsigned
9442 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9443 {
9444   unsigned ix;
9445 
9446   for (ix = bits; ix--; value <<= 1)
9447     {
9448       unsigned feedback;
9449 
9450       feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9451       chksum <<= 1;
9452       chksum ^= feedback;
9453     }
9454   return chksum;
9455 }
9456 
9457 /* Generate a crc32 of a 32-bit unsigned.  */
9458 
9459 unsigned
9460 crc32_unsigned (unsigned chksum, unsigned value)
9461 {
9462   return crc32_unsigned_bits (chksum, value, 32);
9463 }
9464 
9465 /* Generate a crc32 of a byte.  */
9466 
9467 unsigned
9468 crc32_byte (unsigned chksum, char byte)
9469 {
9470   return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9471 }
9472 
9473 /* Generate a crc32 of a string.  */
9474 
9475 unsigned
9476 crc32_string (unsigned chksum, const char *string)
9477 {
9478   do
9479     {
9480       chksum = crc32_byte (chksum, *string);
9481     }
9482   while (*string++);
9483   return chksum;
9484 }
9485 
9486 /* P is a string that will be used in a symbol.  Mask out any characters
9487    that are not valid in that context.  */
9488 
9489 void
9490 clean_symbol_name (char *p)
9491 {
9492   for (; *p; p++)
9493     if (! (ISALNUM (*p)
9494 #ifndef NO_DOLLAR_IN_LABEL	/* this for `$'; unlikely, but... -- kr */
9495 	    || *p == '$'
9496 #endif
9497 #ifndef NO_DOT_IN_LABEL		/* this for `.'; unlikely, but...  */
9498 	    || *p == '.'
9499 #endif
9500 	   ))
9501       *p = '_';
9502 }
9503 
9504 /* For anonymous aggregate types, we need some sort of name to
9505    hold on to.  In practice, this should not appear, but it should
9506    not be harmful if it does.  */
9507 bool
9508 anon_aggrname_p(const_tree id_node)
9509 {
9510 #ifndef NO_DOT_IN_LABEL
9511  return (IDENTIFIER_POINTER (id_node)[0] == '.'
9512 	 && IDENTIFIER_POINTER (id_node)[1] == '_');
9513 #else /* NO_DOT_IN_LABEL */
9514 #ifndef NO_DOLLAR_IN_LABEL
9515   return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9516 	  && IDENTIFIER_POINTER (id_node)[1] == '_');
9517 #else /* NO_DOLLAR_IN_LABEL */
9518 #define ANON_AGGRNAME_PREFIX "__anon_"
9519   return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9520 		    sizeof (ANON_AGGRNAME_PREFIX) - 1));
9521 #endif	/* NO_DOLLAR_IN_LABEL */
9522 #endif	/* NO_DOT_IN_LABEL */
9523 }
9524 
9525 /* Return a format for an anonymous aggregate name.  */
9526 const char *
9527 anon_aggrname_format()
9528 {
9529 #ifndef NO_DOT_IN_LABEL
9530  return "._%d";
9531 #else /* NO_DOT_IN_LABEL */
9532 #ifndef NO_DOLLAR_IN_LABEL
9533   return "$_%d";
9534 #else /* NO_DOLLAR_IN_LABEL */
9535   return "__anon_%d";
9536 #endif	/* NO_DOLLAR_IN_LABEL */
9537 #endif	/* NO_DOT_IN_LABEL */
9538 }
9539 
9540 /* Generate a name for a special-purpose function.
9541    The generated name may need to be unique across the whole link.
9542    Changes to this function may also require corresponding changes to
9543    xstrdup_mask_random.
9544    TYPE is some string to identify the purpose of this function to the
9545    linker or collect2; it must start with an uppercase letter,
9546    one of:
9547    I - for constructors
9548    D - for destructors
9549    N - for C++ anonymous namespaces
9550    F - for DWARF unwind frame information.  */
9551 
9552 tree
9553 get_file_function_name (const char *type)
9554 {
9555   char *buf;
9556   const char *p;
9557   char *q;
9558 
9559   /* If we already have a name we know to be unique, just use that.  */
9560   if (first_global_object_name)
9561     p = q = ASTRDUP (first_global_object_name);
9562   /* If the target is handling the constructors/destructors, they
9563      will be local to this file and the name is only necessary for
9564      debugging purposes.
9565      We also assign sub_I and sub_D sufixes to constructors called from
9566      the global static constructors.  These are always local.  */
9567   else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9568 	   || (strncmp (type, "sub_", 4) == 0
9569 	       && (type[4] == 'I' || type[4] == 'D')))
9570     {
9571       const char *file = main_input_filename;
9572       if (! file)
9573 	file = LOCATION_FILE (input_location);
9574       /* Just use the file's basename, because the full pathname
9575 	 might be quite long.  */
9576       p = q = ASTRDUP (lbasename (file));
9577     }
9578   else
9579     {
9580       /* Otherwise, the name must be unique across the entire link.
9581 	 We don't have anything that we know to be unique to this translation
9582 	 unit, so use what we do have and throw in some randomness.  */
9583       unsigned len;
9584       const char *name = weak_global_object_name;
9585       const char *file = main_input_filename;
9586 
9587       if (! name)
9588 	name = "";
9589       if (! file)
9590 	file = LOCATION_FILE (input_location);
9591 
9592       len = strlen (file);
9593       q = (char *) alloca (9 + 17 + len + 1);
9594       memcpy (q, file, len + 1);
9595 
9596       snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9597 		crc32_string (0, name), get_random_seed (false));
9598 
9599       p = q;
9600     }
9601 
9602   clean_symbol_name (q);
9603   buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9604 			 + strlen (type));
9605 
9606   /* Set up the name of the file-level functions we may need.
9607      Use a global object (which is already required to be unique over
9608      the program) rather than the file name (which imposes extra
9609      constraints).  */
9610   sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9611 
9612   return get_identifier (buf);
9613 }
9614 
9615 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9616 
9617 /* Complain that the tree code of NODE does not match the expected 0
9618    terminated list of trailing codes. The trailing code list can be
9619    empty, for a more vague error message.  FILE, LINE, and FUNCTION
9620    are of the caller.  */
9621 
9622 void
9623 tree_check_failed (const_tree node, const char *file,
9624 		   int line, const char *function, ...)
9625 {
9626   va_list args;
9627   const char *buffer;
9628   unsigned length = 0;
9629   enum tree_code code;
9630 
9631   va_start (args, function);
9632   while ((code = (enum tree_code) va_arg (args, int)))
9633     length += 4 + strlen (get_tree_code_name (code));
9634   va_end (args);
9635   if (length)
9636     {
9637       char *tmp;
9638       va_start (args, function);
9639       length += strlen ("expected ");
9640       buffer = tmp = (char *) alloca (length);
9641       length = 0;
9642       while ((code = (enum tree_code) va_arg (args, int)))
9643 	{
9644 	  const char *prefix = length ? " or " : "expected ";
9645 
9646 	  strcpy (tmp + length, prefix);
9647 	  length += strlen (prefix);
9648 	  strcpy (tmp + length, get_tree_code_name (code));
9649 	  length += strlen (get_tree_code_name (code));
9650 	}
9651       va_end (args);
9652     }
9653   else
9654     buffer = "unexpected node";
9655 
9656   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9657 		  buffer, get_tree_code_name (TREE_CODE (node)),
9658 		  function, trim_filename (file), line);
9659 }
9660 
9661 /* Complain that the tree code of NODE does match the expected 0
9662    terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9663    the caller.  */
9664 
9665 void
9666 tree_not_check_failed (const_tree node, const char *file,
9667 		       int line, const char *function, ...)
9668 {
9669   va_list args;
9670   char *buffer;
9671   unsigned length = 0;
9672   enum tree_code code;
9673 
9674   va_start (args, function);
9675   while ((code = (enum tree_code) va_arg (args, int)))
9676     length += 4 + strlen (get_tree_code_name (code));
9677   va_end (args);
9678   va_start (args, function);
9679   buffer = (char *) alloca (length);
9680   length = 0;
9681   while ((code = (enum tree_code) va_arg (args, int)))
9682     {
9683       if (length)
9684 	{
9685 	  strcpy (buffer + length, " or ");
9686 	  length += 4;
9687 	}
9688       strcpy (buffer + length, get_tree_code_name (code));
9689       length += strlen (get_tree_code_name (code));
9690     }
9691   va_end (args);
9692 
9693   internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9694 		  buffer, get_tree_code_name (TREE_CODE (node)),
9695 		  function, trim_filename (file), line);
9696 }
9697 
9698 /* Similar to tree_check_failed, except that we check for a class of tree
9699    code, given in CL.  */
9700 
9701 void
9702 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9703 			 const char *file, int line, const char *function)
9704 {
9705   internal_error
9706     ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9707      TREE_CODE_CLASS_STRING (cl),
9708      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9709      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9710 }
9711 
9712 /* Similar to tree_check_failed, except that instead of specifying a
9713    dozen codes, use the knowledge that they're all sequential.  */
9714 
9715 void
9716 tree_range_check_failed (const_tree node, const char *file, int line,
9717 			 const char *function, enum tree_code c1,
9718 			 enum tree_code c2)
9719 {
9720   char *buffer;
9721   unsigned length = 0;
9722   unsigned int c;
9723 
9724   for (c = c1; c <= c2; ++c)
9725     length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9726 
9727   length += strlen ("expected ");
9728   buffer = (char *) alloca (length);
9729   length = 0;
9730 
9731   for (c = c1; c <= c2; ++c)
9732     {
9733       const char *prefix = length ? " or " : "expected ";
9734 
9735       strcpy (buffer + length, prefix);
9736       length += strlen (prefix);
9737       strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9738       length += strlen (get_tree_code_name ((enum tree_code) c));
9739     }
9740 
9741   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9742 		  buffer, get_tree_code_name (TREE_CODE (node)),
9743 		  function, trim_filename (file), line);
9744 }
9745 
9746 
9747 /* Similar to tree_check_failed, except that we check that a tree does
9748    not have the specified code, given in CL.  */
9749 
9750 void
9751 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9752 			     const char *file, int line, const char *function)
9753 {
9754   internal_error
9755     ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9756      TREE_CODE_CLASS_STRING (cl),
9757      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9758      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9759 }
9760 
9761 
9762 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes.  */
9763 
9764 void
9765 omp_clause_check_failed (const_tree node, const char *file, int line,
9766                          const char *function, enum omp_clause_code code)
9767 {
9768   internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9769 		  omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9770 		  function, trim_filename (file), line);
9771 }
9772 
9773 
9774 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes.  */
9775 
9776 void
9777 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9778 			       const char *function, enum omp_clause_code c1,
9779 			       enum omp_clause_code c2)
9780 {
9781   char *buffer;
9782   unsigned length = 0;
9783   unsigned int c;
9784 
9785   for (c = c1; c <= c2; ++c)
9786     length += 4 + strlen (omp_clause_code_name[c]);
9787 
9788   length += strlen ("expected ");
9789   buffer = (char *) alloca (length);
9790   length = 0;
9791 
9792   for (c = c1; c <= c2; ++c)
9793     {
9794       const char *prefix = length ? " or " : "expected ";
9795 
9796       strcpy (buffer + length, prefix);
9797       length += strlen (prefix);
9798       strcpy (buffer + length, omp_clause_code_name[c]);
9799       length += strlen (omp_clause_code_name[c]);
9800     }
9801 
9802   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9803 		  buffer, omp_clause_code_name[TREE_CODE (node)],
9804 		  function, trim_filename (file), line);
9805 }
9806 
9807 
9808 #undef DEFTREESTRUCT
9809 #define DEFTREESTRUCT(VAL, NAME) NAME,
9810 
9811 static const char *ts_enum_names[] = {
9812 #include "treestruct.def"
9813 };
9814 #undef DEFTREESTRUCT
9815 
9816 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9817 
9818 /* Similar to tree_class_check_failed, except that we check for
9819    whether CODE contains the tree structure identified by EN.  */
9820 
9821 void
9822 tree_contains_struct_check_failed (const_tree node,
9823 				   const enum tree_node_structure_enum en,
9824 				   const char *file, int line,
9825 				   const char *function)
9826 {
9827   internal_error
9828     ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9829      TS_ENUM_NAME (en),
9830      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9831 }
9832 
9833 
9834 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9835    (dynamically sized) vector.  */
9836 
9837 void
9838 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9839 			       const char *function)
9840 {
9841   internal_error
9842     ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9843      idx + 1, len, function, trim_filename (file), line);
9844 }
9845 
9846 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9847    (dynamically sized) vector.  */
9848 
9849 void
9850 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9851 			   const char *function)
9852 {
9853   internal_error
9854     ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9855      idx + 1, len, function, trim_filename (file), line);
9856 }
9857 
9858 /* Similar to above, except that the check is for the bounds of the operand
9859    vector of an expression node EXP.  */
9860 
9861 void
9862 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9863 			   int line, const char *function)
9864 {
9865   enum tree_code code = TREE_CODE (exp);
9866   internal_error
9867     ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9868      idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9869      function, trim_filename (file), line);
9870 }
9871 
9872 /* Similar to above, except that the check is for the number of
9873    operands of an OMP_CLAUSE node.  */
9874 
9875 void
9876 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9877 			         int line, const char *function)
9878 {
9879   internal_error
9880     ("tree check: accessed operand %d of omp_clause %s with %d operands "
9881      "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9882      omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9883      trim_filename (file), line);
9884 }
9885 #endif /* ENABLE_TREE_CHECKING */
9886 
9887 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9888    and mapped to the machine mode MODE.  Initialize its fields and build
9889    the information necessary for debugging output.  */
9890 
9891 static tree
9892 make_vector_type (tree innertype, int nunits, machine_mode mode)
9893 {
9894   tree t;
9895   inchash::hash hstate;
9896   tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9897 
9898   t = make_node (VECTOR_TYPE);
9899   TREE_TYPE (t) = mv_innertype;
9900   SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9901   SET_TYPE_MODE (t, mode);
9902 
9903   if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9904     SET_TYPE_STRUCTURAL_EQUALITY (t);
9905   else if ((TYPE_CANONICAL (mv_innertype) != innertype
9906 	    || mode != VOIDmode)
9907 	   && !VECTOR_BOOLEAN_TYPE_P (t))
9908     TYPE_CANONICAL (t)
9909       = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9910 
9911   layout_type (t);
9912 
9913   hstate.add_wide_int (VECTOR_TYPE);
9914   hstate.add_wide_int (nunits);
9915   hstate.add_wide_int (mode);
9916   hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9917   t = type_hash_canon (hstate.end (), t);
9918 
9919   /* We have built a main variant, based on the main variant of the
9920      inner type. Use it to build the variant we return.  */
9921   if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9922       && TREE_TYPE (t) != innertype)
9923     return build_type_attribute_qual_variant (t,
9924 					      TYPE_ATTRIBUTES (innertype),
9925 					      TYPE_QUALS (innertype));
9926 
9927   return t;
9928 }
9929 
9930 static tree
9931 make_or_reuse_type (unsigned size, int unsignedp)
9932 {
9933   int i;
9934 
9935   if (size == INT_TYPE_SIZE)
9936     return unsignedp ? unsigned_type_node : integer_type_node;
9937   if (size == CHAR_TYPE_SIZE)
9938     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9939   if (size == SHORT_TYPE_SIZE)
9940     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9941   if (size == LONG_TYPE_SIZE)
9942     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9943   if (size == LONG_LONG_TYPE_SIZE)
9944     return (unsignedp ? long_long_unsigned_type_node
9945             : long_long_integer_type_node);
9946 
9947   for (i = 0; i < NUM_INT_N_ENTS; i ++)
9948     if (size == int_n_data[i].bitsize
9949 	&& int_n_enabled_p[i])
9950       return (unsignedp ? int_n_trees[i].unsigned_type
9951 	      : int_n_trees[i].signed_type);
9952 
9953   if (unsignedp)
9954     return make_unsigned_type (size);
9955   else
9956     return make_signed_type (size);
9957 }
9958 
9959 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP.  */
9960 
9961 static tree
9962 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9963 {
9964   if (satp)
9965     {
9966       if (size == SHORT_FRACT_TYPE_SIZE)
9967 	return unsignedp ? sat_unsigned_short_fract_type_node
9968 			 : sat_short_fract_type_node;
9969       if (size == FRACT_TYPE_SIZE)
9970 	return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9971       if (size == LONG_FRACT_TYPE_SIZE)
9972 	return unsignedp ? sat_unsigned_long_fract_type_node
9973 			 : sat_long_fract_type_node;
9974       if (size == LONG_LONG_FRACT_TYPE_SIZE)
9975 	return unsignedp ? sat_unsigned_long_long_fract_type_node
9976 			 : sat_long_long_fract_type_node;
9977     }
9978   else
9979     {
9980       if (size == SHORT_FRACT_TYPE_SIZE)
9981 	return unsignedp ? unsigned_short_fract_type_node
9982 			 : short_fract_type_node;
9983       if (size == FRACT_TYPE_SIZE)
9984 	return unsignedp ? unsigned_fract_type_node : fract_type_node;
9985       if (size == LONG_FRACT_TYPE_SIZE)
9986 	return unsignedp ? unsigned_long_fract_type_node
9987 			 : long_fract_type_node;
9988       if (size == LONG_LONG_FRACT_TYPE_SIZE)
9989 	return unsignedp ? unsigned_long_long_fract_type_node
9990 			 : long_long_fract_type_node;
9991     }
9992 
9993   return make_fract_type (size, unsignedp, satp);
9994 }
9995 
9996 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP.  */
9997 
9998 static tree
9999 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10000 {
10001   if (satp)
10002     {
10003       if (size == SHORT_ACCUM_TYPE_SIZE)
10004 	return unsignedp ? sat_unsigned_short_accum_type_node
10005 			 : sat_short_accum_type_node;
10006       if (size == ACCUM_TYPE_SIZE)
10007 	return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10008       if (size == LONG_ACCUM_TYPE_SIZE)
10009 	return unsignedp ? sat_unsigned_long_accum_type_node
10010 			 : sat_long_accum_type_node;
10011       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10012 	return unsignedp ? sat_unsigned_long_long_accum_type_node
10013 			 : sat_long_long_accum_type_node;
10014     }
10015   else
10016     {
10017       if (size == SHORT_ACCUM_TYPE_SIZE)
10018 	return unsignedp ? unsigned_short_accum_type_node
10019 			 : short_accum_type_node;
10020       if (size == ACCUM_TYPE_SIZE)
10021 	return unsignedp ? unsigned_accum_type_node : accum_type_node;
10022       if (size == LONG_ACCUM_TYPE_SIZE)
10023 	return unsignedp ? unsigned_long_accum_type_node
10024 			 : long_accum_type_node;
10025       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10026 	return unsignedp ? unsigned_long_long_accum_type_node
10027 			 : long_long_accum_type_node;
10028     }
10029 
10030   return make_accum_type (size, unsignedp, satp);
10031 }
10032 
10033 
10034 /* Create an atomic variant node for TYPE.  This routine is called
10035    during initialization of data types to create the 5 basic atomic
10036    types. The generic build_variant_type function requires these to
10037    already be set up in order to function properly, so cannot be
10038    called from there.  If ALIGN is non-zero, then ensure alignment is
10039    overridden to this value.  */
10040 
10041 static tree
10042 build_atomic_base (tree type, unsigned int align)
10043 {
10044   tree t;
10045 
10046   /* Make sure its not already registered.  */
10047   if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10048     return t;
10049 
10050   t = build_variant_type_copy (type);
10051   set_type_quals (t, TYPE_QUAL_ATOMIC);
10052 
10053   if (align)
10054     TYPE_ALIGN (t) = align;
10055 
10056   return t;
10057 }
10058 
10059 /* Create nodes for all integer types (and error_mark_node) using the sizes
10060    of C datatypes.  SIGNED_CHAR specifies whether char is signed.  */
10061 
10062 void
10063 build_common_tree_nodes (bool signed_char)
10064 {
10065   int i;
10066 
10067   error_mark_node = make_node (ERROR_MARK);
10068   TREE_TYPE (error_mark_node) = error_mark_node;
10069 
10070   initialize_sizetypes ();
10071 
10072   /* Define both `signed char' and `unsigned char'.  */
10073   signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10074   TYPE_STRING_FLAG (signed_char_type_node) = 1;
10075   unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10076   TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10077 
10078   /* Define `char', which is like either `signed char' or `unsigned char'
10079      but not the same as either.  */
10080   char_type_node
10081     = (signed_char
10082        ? make_signed_type (CHAR_TYPE_SIZE)
10083        : make_unsigned_type (CHAR_TYPE_SIZE));
10084   TYPE_STRING_FLAG (char_type_node) = 1;
10085 
10086   short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10087   short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10088   integer_type_node = make_signed_type (INT_TYPE_SIZE);
10089   unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10090   long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10091   long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10092   long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10093   long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10094 
10095   for (i = 0; i < NUM_INT_N_ENTS; i ++)
10096     {
10097       int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10098       int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10099       TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10100       TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10101 
10102       if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10103 	  && int_n_enabled_p[i])
10104 	{
10105 	  integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10106 	  integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10107 	}
10108     }
10109 
10110   /* Define a boolean type.  This type only represents boolean values but
10111      may be larger than char depending on the value of BOOL_TYPE_SIZE.  */
10112   boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10113   TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10114   TYPE_PRECISION (boolean_type_node) = 1;
10115   TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10116 
10117   /* Define what type to use for size_t.  */
10118   if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10119     size_type_node = unsigned_type_node;
10120   else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10121     size_type_node = long_unsigned_type_node;
10122   else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10123     size_type_node = long_long_unsigned_type_node;
10124   else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10125     size_type_node = short_unsigned_type_node;
10126   else
10127     {
10128       int i;
10129 
10130       size_type_node = NULL_TREE;
10131       for (i = 0; i < NUM_INT_N_ENTS; i++)
10132 	if (int_n_enabled_p[i])
10133 	  {
10134 	    char name[50];
10135 	    sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10136 
10137 	    if (strcmp (name, SIZE_TYPE) == 0)
10138 	      {
10139 		size_type_node = int_n_trees[i].unsigned_type;
10140 	      }
10141 	  }
10142       if (size_type_node == NULL_TREE)
10143 	gcc_unreachable ();
10144     }
10145 
10146   /* Fill in the rest of the sized types.  Reuse existing type nodes
10147      when possible.  */
10148   intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10149   intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10150   intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10151   intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10152   intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10153 
10154   unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10155   unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10156   unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10157   unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10158   unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10159 
10160   /* Don't call build_qualified type for atomics.  That routine does
10161      special processing for atomics, and until they are initialized
10162      it's better not to make that call.
10163 
10164      Check to see if there is a target override for atomic types.  */
10165 
10166   atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10167 					targetm.atomic_align_for_mode (QImode));
10168   atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10169 					targetm.atomic_align_for_mode (HImode));
10170   atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10171 					targetm.atomic_align_for_mode (SImode));
10172   atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10173 					targetm.atomic_align_for_mode (DImode));
10174   atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10175 					targetm.atomic_align_for_mode (TImode));
10176 
10177   access_public_node = get_identifier ("public");
10178   access_protected_node = get_identifier ("protected");
10179   access_private_node = get_identifier ("private");
10180 
10181   /* Define these next since types below may used them.  */
10182   integer_zero_node = build_int_cst (integer_type_node, 0);
10183   integer_one_node = build_int_cst (integer_type_node, 1);
10184   integer_three_node = build_int_cst (integer_type_node, 3);
10185   integer_minus_one_node = build_int_cst (integer_type_node, -1);
10186 
10187   size_zero_node = size_int (0);
10188   size_one_node = size_int (1);
10189   bitsize_zero_node = bitsize_int (0);
10190   bitsize_one_node = bitsize_int (1);
10191   bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10192 
10193   boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10194   boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10195 
10196   void_type_node = make_node (VOID_TYPE);
10197   layout_type (void_type_node);
10198 
10199   pointer_bounds_type_node = targetm.chkp_bound_type ();
10200 
10201   /* We are not going to have real types in C with less than byte alignment,
10202      so we might as well not have any types that claim to have it.  */
10203   TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
10204   TYPE_USER_ALIGN (void_type_node) = 0;
10205 
10206   void_node = make_node (VOID_CST);
10207   TREE_TYPE (void_node) = void_type_node;
10208 
10209   null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10210   layout_type (TREE_TYPE (null_pointer_node));
10211 
10212   ptr_type_node = build_pointer_type (void_type_node);
10213   const_ptr_type_node
10214     = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10215   fileptr_type_node = ptr_type_node;
10216 
10217   pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10218 
10219   float_type_node = make_node (REAL_TYPE);
10220   TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10221   layout_type (float_type_node);
10222 
10223   double_type_node = make_node (REAL_TYPE);
10224   TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10225   layout_type (double_type_node);
10226 
10227   long_double_type_node = make_node (REAL_TYPE);
10228   TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10229   layout_type (long_double_type_node);
10230 
10231   float_ptr_type_node = build_pointer_type (float_type_node);
10232   double_ptr_type_node = build_pointer_type (double_type_node);
10233   long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10234   integer_ptr_type_node = build_pointer_type (integer_type_node);
10235 
10236   /* Fixed size integer types.  */
10237   uint16_type_node = make_or_reuse_type (16, 1);
10238   uint32_type_node = make_or_reuse_type (32, 1);
10239   uint64_type_node = make_or_reuse_type (64, 1);
10240 
10241   /* Decimal float types. */
10242   dfloat32_type_node = make_node (REAL_TYPE);
10243   TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10244   layout_type (dfloat32_type_node);
10245   SET_TYPE_MODE (dfloat32_type_node, SDmode);
10246   dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10247 
10248   dfloat64_type_node = make_node (REAL_TYPE);
10249   TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10250   layout_type (dfloat64_type_node);
10251   SET_TYPE_MODE (dfloat64_type_node, DDmode);
10252   dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10253 
10254   dfloat128_type_node = make_node (REAL_TYPE);
10255   TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10256   layout_type (dfloat128_type_node);
10257   SET_TYPE_MODE (dfloat128_type_node, TDmode);
10258   dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10259 
10260   complex_integer_type_node = build_complex_type (integer_type_node, true);
10261   complex_float_type_node = build_complex_type (float_type_node, true);
10262   complex_double_type_node = build_complex_type (double_type_node, true);
10263   complex_long_double_type_node = build_complex_type (long_double_type_node,
10264 						      true);
10265 
10266 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned.  */
10267 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10268   sat_ ## KIND ## _type_node = \
10269     make_sat_signed_ ## KIND ## _type (SIZE); \
10270   sat_unsigned_ ## KIND ## _type_node = \
10271     make_sat_unsigned_ ## KIND ## _type (SIZE); \
10272   KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10273   unsigned_ ## KIND ## _type_node = \
10274     make_unsigned_ ## KIND ## _type (SIZE);
10275 
10276 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10277   sat_ ## WIDTH ## KIND ## _type_node = \
10278     make_sat_signed_ ## KIND ## _type (SIZE); \
10279   sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10280     make_sat_unsigned_ ## KIND ## _type (SIZE); \
10281   WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10282   unsigned_ ## WIDTH ## KIND ## _type_node = \
10283     make_unsigned_ ## KIND ## _type (SIZE);
10284 
10285 /* Make fixed-point type nodes based on four different widths.  */
10286 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10287   MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10288   MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10289   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10290   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10291 
10292 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned.  */
10293 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10294   NAME ## _type_node = \
10295     make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10296   u ## NAME ## _type_node = \
10297     make_or_reuse_unsigned_ ## KIND ## _type \
10298       (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10299   sat_ ## NAME ## _type_node = \
10300     make_or_reuse_sat_signed_ ## KIND ## _type \
10301       (GET_MODE_BITSIZE (MODE ## mode)); \
10302   sat_u ## NAME ## _type_node = \
10303     make_or_reuse_sat_unsigned_ ## KIND ## _type \
10304       (GET_MODE_BITSIZE (U ## MODE ## mode));
10305 
10306   /* Fixed-point type and mode nodes.  */
10307   MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10308   MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10309   MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10310   MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10311   MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10312   MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10313   MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10314   MAKE_FIXED_MODE_NODE (accum, ha, HA)
10315   MAKE_FIXED_MODE_NODE (accum, sa, SA)
10316   MAKE_FIXED_MODE_NODE (accum, da, DA)
10317   MAKE_FIXED_MODE_NODE (accum, ta, TA)
10318 
10319   {
10320     tree t = targetm.build_builtin_va_list ();
10321 
10322     /* Many back-ends define record types without setting TYPE_NAME.
10323        If we copied the record type here, we'd keep the original
10324        record type without a name.  This breaks name mangling.  So,
10325        don't copy record types and let c_common_nodes_and_builtins()
10326        declare the type to be __builtin_va_list.  */
10327     if (TREE_CODE (t) != RECORD_TYPE)
10328       t = build_variant_type_copy (t);
10329 
10330     va_list_type_node = t;
10331   }
10332 }
10333 
10334 /* Modify DECL for given flags.
10335    TM_PURE attribute is set only on types, so the function will modify
10336    DECL's type when ECF_TM_PURE is used.  */
10337 
10338 void
10339 set_call_expr_flags (tree decl, int flags)
10340 {
10341   if (flags & ECF_NOTHROW)
10342     TREE_NOTHROW (decl) = 1;
10343   if (flags & ECF_CONST)
10344     TREE_READONLY (decl) = 1;
10345   if (flags & ECF_PURE)
10346     DECL_PURE_P (decl) = 1;
10347   if (flags & ECF_LOOPING_CONST_OR_PURE)
10348     DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10349   if (flags & ECF_NOVOPS)
10350     DECL_IS_NOVOPS (decl) = 1;
10351   if (flags & ECF_NORETURN)
10352     TREE_THIS_VOLATILE (decl) = 1;
10353   if (flags & ECF_MALLOC)
10354     DECL_IS_MALLOC (decl) = 1;
10355   if (flags & ECF_RETURNS_TWICE)
10356     DECL_IS_RETURNS_TWICE (decl) = 1;
10357   if (flags & ECF_LEAF)
10358     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10359 					NULL, DECL_ATTRIBUTES (decl));
10360   if ((flags & ECF_TM_PURE) && flag_tm)
10361     apply_tm_attr (decl, get_identifier ("transaction_pure"));
10362   /* Looping const or pure is implied by noreturn.
10363      There is currently no way to declare looping const or looping pure alone.  */
10364   gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10365 	      || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10366 }
10367 
10368 
10369 /* A subroutine of build_common_builtin_nodes.  Define a builtin function.  */
10370 
10371 static void
10372 local_define_builtin (const char *name, tree type, enum built_in_function code,
10373                       const char *library_name, int ecf_flags)
10374 {
10375   tree decl;
10376 
10377   decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10378 			       library_name, NULL_TREE);
10379   set_call_expr_flags (decl, ecf_flags);
10380 
10381   set_builtin_decl (code, decl, true);
10382 }
10383 
10384 /* Call this function after instantiating all builtins that the language
10385    front end cares about.  This will build the rest of the builtins
10386    and internal functions that are relied upon by the tree optimizers and
10387    the middle-end.  */
10388 
10389 void
10390 build_common_builtin_nodes (void)
10391 {
10392   tree tmp, ftype;
10393   int ecf_flags;
10394 
10395   if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10396     {
10397       ftype = build_function_type (void_type_node, void_list_node);
10398       local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10399 			    "__builtin_unreachable",
10400 			    ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10401 			    | ECF_CONST);
10402     }
10403 
10404   if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10405       || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10406     {
10407       ftype = build_function_type_list (ptr_type_node,
10408 					ptr_type_node, const_ptr_type_node,
10409 					size_type_node, NULL_TREE);
10410 
10411       if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10412 	local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10413 			      "memcpy", ECF_NOTHROW | ECF_LEAF);
10414       if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10415 	local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10416 			      "memmove", ECF_NOTHROW | ECF_LEAF);
10417     }
10418 
10419   if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10420     {
10421       ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10422 					const_ptr_type_node, size_type_node,
10423 					NULL_TREE);
10424       local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10425 			    "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10426     }
10427 
10428   if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10429     {
10430       ftype = build_function_type_list (ptr_type_node,
10431 					ptr_type_node, integer_type_node,
10432 					size_type_node, NULL_TREE);
10433       local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10434 			    "memset", ECF_NOTHROW | ECF_LEAF);
10435     }
10436 
10437   if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10438     {
10439       ftype = build_function_type_list (ptr_type_node,
10440 					size_type_node, NULL_TREE);
10441       local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10442 			    "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10443     }
10444 
10445   ftype = build_function_type_list (ptr_type_node, size_type_node,
10446 				    size_type_node, NULL_TREE);
10447   local_define_builtin ("__builtin_alloca_with_align", ftype,
10448 			BUILT_IN_ALLOCA_WITH_ALIGN,
10449 			"__builtin_alloca_with_align",
10450 			ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10451 
10452   /* If we're checking the stack, `alloca' can throw.  */
10453   if (flag_stack_check)
10454     {
10455       TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10456       TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10457     }
10458 
10459   ftype = build_function_type_list (void_type_node,
10460 				    ptr_type_node, ptr_type_node,
10461 				    ptr_type_node, NULL_TREE);
10462   local_define_builtin ("__builtin_init_trampoline", ftype,
10463 			BUILT_IN_INIT_TRAMPOLINE,
10464 			"__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10465   local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10466 			BUILT_IN_INIT_HEAP_TRAMPOLINE,
10467 			"__builtin_init_heap_trampoline",
10468 			ECF_NOTHROW | ECF_LEAF);
10469 
10470   ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10471   local_define_builtin ("__builtin_adjust_trampoline", ftype,
10472 			BUILT_IN_ADJUST_TRAMPOLINE,
10473 			"__builtin_adjust_trampoline",
10474 			ECF_CONST | ECF_NOTHROW);
10475 
10476   ftype = build_function_type_list (void_type_node,
10477 				    ptr_type_node, ptr_type_node, NULL_TREE);
10478   local_define_builtin ("__builtin_nonlocal_goto", ftype,
10479 			BUILT_IN_NONLOCAL_GOTO,
10480 			"__builtin_nonlocal_goto",
10481 			ECF_NORETURN | ECF_NOTHROW);
10482 
10483   ftype = build_function_type_list (void_type_node,
10484 				    ptr_type_node, ptr_type_node, NULL_TREE);
10485   local_define_builtin ("__builtin_setjmp_setup", ftype,
10486 			BUILT_IN_SETJMP_SETUP,
10487 			"__builtin_setjmp_setup", ECF_NOTHROW);
10488 
10489   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10490   local_define_builtin ("__builtin_setjmp_receiver", ftype,
10491 			BUILT_IN_SETJMP_RECEIVER,
10492 			"__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10493 
10494   ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10495   local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10496 			"__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10497 
10498   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10499   local_define_builtin ("__builtin_stack_restore", ftype,
10500 			BUILT_IN_STACK_RESTORE,
10501 			"__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10502 
10503   /* If there's a possibility that we might use the ARM EABI, build the
10504     alternate __cxa_end_cleanup node used to resume from C++ and Java.  */
10505   if (targetm.arm_eabi_unwinder)
10506     {
10507       ftype = build_function_type_list (void_type_node, NULL_TREE);
10508       local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10509 			    BUILT_IN_CXA_END_CLEANUP,
10510 			    "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10511     }
10512 
10513   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10514   local_define_builtin ("__builtin_unwind_resume", ftype,
10515 			BUILT_IN_UNWIND_RESUME,
10516 			((targetm_common.except_unwind_info (&global_options)
10517 			  == UI_SJLJ)
10518 			 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10519 			ECF_NORETURN);
10520 
10521   if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10522     {
10523       ftype = build_function_type_list (ptr_type_node, integer_type_node,
10524 					NULL_TREE);
10525       local_define_builtin ("__builtin_return_address", ftype,
10526 			    BUILT_IN_RETURN_ADDRESS,
10527 			    "__builtin_return_address",
10528 			    ECF_NOTHROW);
10529     }
10530 
10531   if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10532       || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10533     {
10534       ftype = build_function_type_list (void_type_node, ptr_type_node,
10535 					ptr_type_node, NULL_TREE);
10536       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10537 	local_define_builtin ("__cyg_profile_func_enter", ftype,
10538 			      BUILT_IN_PROFILE_FUNC_ENTER,
10539 			      "__cyg_profile_func_enter", 0);
10540       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10541 	local_define_builtin ("__cyg_profile_func_exit", ftype,
10542 			      BUILT_IN_PROFILE_FUNC_EXIT,
10543 			      "__cyg_profile_func_exit", 0);
10544     }
10545 
10546   /* The exception object and filter values from the runtime.  The argument
10547      must be zero before exception lowering, i.e. from the front end.  After
10548      exception lowering, it will be the region number for the exception
10549      landing pad.  These functions are PURE instead of CONST to prevent
10550      them from being hoisted past the exception edge that will initialize
10551      its value in the landing pad.  */
10552   ftype = build_function_type_list (ptr_type_node,
10553 				    integer_type_node, NULL_TREE);
10554   ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10555   /* Only use TM_PURE if we have TM language support.  */
10556   if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10557     ecf_flags |= ECF_TM_PURE;
10558   local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10559 			"__builtin_eh_pointer", ecf_flags);
10560 
10561   tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10562   ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10563   local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10564 			"__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10565 
10566   ftype = build_function_type_list (void_type_node,
10567 				    integer_type_node, integer_type_node,
10568 				    NULL_TREE);
10569   local_define_builtin ("__builtin_eh_copy_values", ftype,
10570 			BUILT_IN_EH_COPY_VALUES,
10571 			"__builtin_eh_copy_values", ECF_NOTHROW);
10572 
10573   /* Complex multiplication and division.  These are handled as builtins
10574      rather than optabs because emit_library_call_value doesn't support
10575      complex.  Further, we can do slightly better with folding these
10576      beasties if the real and complex parts of the arguments are separate.  */
10577   {
10578     int mode;
10579 
10580     for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10581       {
10582 	char mode_name_buf[4], *q;
10583 	const char *p;
10584 	enum built_in_function mcode, dcode;
10585 	tree type, inner_type;
10586 	const char *prefix = "__";
10587 
10588 	if (targetm.libfunc_gnu_prefix)
10589 	  prefix = "__gnu_";
10590 
10591 	type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10592 	if (type == NULL)
10593 	  continue;
10594 	inner_type = TREE_TYPE (type);
10595 
10596 	ftype = build_function_type_list (type, inner_type, inner_type,
10597 					  inner_type, inner_type, NULL_TREE);
10598 
10599         mcode = ((enum built_in_function)
10600 		 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10601         dcode = ((enum built_in_function)
10602 		 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10603 
10604         for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10605 	  *q = TOLOWER (*p);
10606 	*q = '\0';
10607 
10608 	built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10609 					NULL);
10610         local_define_builtin (built_in_names[mcode], ftype, mcode,
10611 			      built_in_names[mcode],
10612 			      ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10613 
10614 	built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10615 					NULL);
10616         local_define_builtin (built_in_names[dcode], ftype, dcode,
10617 			      built_in_names[dcode],
10618 			      ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10619       }
10620   }
10621 
10622   init_internal_fns ();
10623 }
10624 
10625 /* HACK.  GROSS.  This is absolutely disgusting.  I wish there was a
10626    better way.
10627 
10628    If we requested a pointer to a vector, build up the pointers that
10629    we stripped off while looking for the inner type.  Similarly for
10630    return values from functions.
10631 
10632    The argument TYPE is the top of the chain, and BOTTOM is the
10633    new type which we will point to.  */
10634 
10635 tree
10636 reconstruct_complex_type (tree type, tree bottom)
10637 {
10638   tree inner, outer;
10639 
10640   if (TREE_CODE (type) == POINTER_TYPE)
10641     {
10642       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10643       outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10644 					   TYPE_REF_CAN_ALIAS_ALL (type));
10645     }
10646   else if (TREE_CODE (type) == REFERENCE_TYPE)
10647     {
10648       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10649       outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10650 					     TYPE_REF_CAN_ALIAS_ALL (type));
10651     }
10652   else if (TREE_CODE (type) == ARRAY_TYPE)
10653     {
10654       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10655       outer = build_array_type (inner, TYPE_DOMAIN (type));
10656     }
10657   else if (TREE_CODE (type) == FUNCTION_TYPE)
10658     {
10659       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10660       outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10661     }
10662   else if (TREE_CODE (type) == METHOD_TYPE)
10663     {
10664       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10665       /* The build_method_type_directly() routine prepends 'this' to argument list,
10666          so we must compensate by getting rid of it.  */
10667       outer
10668 	= build_method_type_directly
10669 	    (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10670 	     inner,
10671 	     TREE_CHAIN (TYPE_ARG_TYPES (type)));
10672     }
10673   else if (TREE_CODE (type) == OFFSET_TYPE)
10674     {
10675       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10676       outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10677     }
10678   else
10679     return bottom;
10680 
10681   return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10682 					    TYPE_QUALS (type));
10683 }
10684 
10685 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10686    the inner type.  */
10687 tree
10688 build_vector_type_for_mode (tree innertype, machine_mode mode)
10689 {
10690   int nunits;
10691 
10692   switch (GET_MODE_CLASS (mode))
10693     {
10694     case MODE_VECTOR_INT:
10695     case MODE_VECTOR_FLOAT:
10696     case MODE_VECTOR_FRACT:
10697     case MODE_VECTOR_UFRACT:
10698     case MODE_VECTOR_ACCUM:
10699     case MODE_VECTOR_UACCUM:
10700       nunits = GET_MODE_NUNITS (mode);
10701       break;
10702 
10703     case MODE_INT:
10704       /* Check that there are no leftover bits.  */
10705       gcc_assert (GET_MODE_BITSIZE (mode)
10706 		  % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10707 
10708       nunits = GET_MODE_BITSIZE (mode)
10709 	       / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10710       break;
10711 
10712     default:
10713       gcc_unreachable ();
10714     }
10715 
10716   return make_vector_type (innertype, nunits, mode);
10717 }
10718 
10719 /* Similarly, but takes the inner type and number of units, which must be
10720    a power of two.  */
10721 
10722 tree
10723 build_vector_type (tree innertype, int nunits)
10724 {
10725   return make_vector_type (innertype, nunits, VOIDmode);
10726 }
10727 
10728 /* Build truth vector with specified length and number of units.  */
10729 
10730 tree
10731 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10732 {
10733   machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10734 							    vector_size);
10735 
10736   gcc_assert (mask_mode != VOIDmode);
10737 
10738   unsigned HOST_WIDE_INT vsize;
10739   if (mask_mode == BLKmode)
10740     vsize = vector_size * BITS_PER_UNIT;
10741   else
10742     vsize = GET_MODE_BITSIZE (mask_mode);
10743 
10744   unsigned HOST_WIDE_INT esize = vsize / nunits;
10745   gcc_assert (esize * nunits == vsize);
10746 
10747   tree bool_type = build_nonstandard_boolean_type (esize);
10748 
10749   return make_vector_type (bool_type, nunits, mask_mode);
10750 }
10751 
10752 /* Returns a vector type corresponding to a comparison of VECTYPE.  */
10753 
10754 tree
10755 build_same_sized_truth_vector_type (tree vectype)
10756 {
10757   if (VECTOR_BOOLEAN_TYPE_P (vectype))
10758     return vectype;
10759 
10760   unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10761 
10762   if (!size)
10763     size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10764 
10765   return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10766 }
10767 
10768 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set.  */
10769 
10770 tree
10771 build_opaque_vector_type (tree innertype, int nunits)
10772 {
10773   tree t = make_vector_type (innertype, nunits, VOIDmode);
10774   tree cand;
10775   /* We always build the non-opaque variant before the opaque one,
10776      so if it already exists, it is TYPE_NEXT_VARIANT of this one.  */
10777   cand = TYPE_NEXT_VARIANT (t);
10778   if (cand
10779       && TYPE_VECTOR_OPAQUE (cand)
10780       && check_qualified_type (cand, t, TYPE_QUALS (t)))
10781     return cand;
10782   /* Othewise build a variant type and make sure to queue it after
10783      the non-opaque type.  */
10784   cand = build_distinct_type_copy (t);
10785   TYPE_VECTOR_OPAQUE (cand) = true;
10786   TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10787   TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10788   TYPE_NEXT_VARIANT (t) = cand;
10789   TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10790   return cand;
10791 }
10792 
10793 
10794 /* Given an initializer INIT, return TRUE if INIT is zero or some
10795    aggregate of zeros.  Otherwise return FALSE.  */
10796 bool
10797 initializer_zerop (const_tree init)
10798 {
10799   tree elt;
10800 
10801   STRIP_NOPS (init);
10802 
10803   switch (TREE_CODE (init))
10804     {
10805     case INTEGER_CST:
10806       return integer_zerop (init);
10807 
10808     case REAL_CST:
10809       /* ??? Note that this is not correct for C4X float formats.  There,
10810 	 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10811 	 negative exponent.  */
10812       return real_zerop (init)
10813 	&& ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10814 
10815     case FIXED_CST:
10816       return fixed_zerop (init);
10817 
10818     case COMPLEX_CST:
10819       return integer_zerop (init)
10820 	|| (real_zerop (init)
10821 	    && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10822 	    && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10823 
10824     case VECTOR_CST:
10825       {
10826 	unsigned i;
10827 	for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10828 	  if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10829 	    return false;
10830 	return true;
10831       }
10832 
10833     case CONSTRUCTOR:
10834       {
10835 	unsigned HOST_WIDE_INT idx;
10836 
10837 	if (TREE_CLOBBER_P (init))
10838 	  return false;
10839 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10840 	  if (!initializer_zerop (elt))
10841 	    return false;
10842 	return true;
10843       }
10844 
10845     case STRING_CST:
10846       {
10847 	int i;
10848 
10849 	/* We need to loop through all elements to handle cases like
10850 	   "\0" and "\0foobar".  */
10851 	for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10852 	  if (TREE_STRING_POINTER (init)[i] != '\0')
10853 	    return false;
10854 
10855 	return true;
10856       }
10857 
10858     default:
10859       return false;
10860     }
10861 }
10862 
10863 /* Check if vector VEC consists of all the equal elements and
10864    that the number of elements corresponds to the type of VEC.
10865    The function returns first element of the vector
10866    or NULL_TREE if the vector is not uniform.  */
10867 tree
10868 uniform_vector_p (const_tree vec)
10869 {
10870   tree first, t;
10871   unsigned i;
10872 
10873   if (vec == NULL_TREE)
10874     return NULL_TREE;
10875 
10876   gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10877 
10878   if (TREE_CODE (vec) == VECTOR_CST)
10879     {
10880       first = VECTOR_CST_ELT (vec, 0);
10881       for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10882 	if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10883 	  return NULL_TREE;
10884 
10885       return first;
10886     }
10887 
10888   else if (TREE_CODE (vec) == CONSTRUCTOR)
10889     {
10890       first = error_mark_node;
10891 
10892       FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10893         {
10894           if (i == 0)
10895             {
10896               first = t;
10897               continue;
10898             }
10899 	  if (!operand_equal_p (first, t, 0))
10900 	    return NULL_TREE;
10901         }
10902       if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10903 	return NULL_TREE;
10904 
10905       return first;
10906     }
10907 
10908   return NULL_TREE;
10909 }
10910 
10911 /* Build an empty statement at location LOC.  */
10912 
10913 tree
10914 build_empty_stmt (location_t loc)
10915 {
10916   tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10917   SET_EXPR_LOCATION (t, loc);
10918   return t;
10919 }
10920 
10921 
10922 /* Build an OpenMP clause with code CODE.  LOC is the location of the
10923    clause.  */
10924 
10925 tree
10926 build_omp_clause (location_t loc, enum omp_clause_code code)
10927 {
10928   tree t;
10929   int size, length;
10930 
10931   length = omp_clause_num_ops[code];
10932   size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10933 
10934   record_node_allocation_statistics (OMP_CLAUSE, size);
10935 
10936   t = (tree) ggc_internal_alloc (size);
10937   memset (t, 0, size);
10938   TREE_SET_CODE (t, OMP_CLAUSE);
10939   OMP_CLAUSE_SET_CODE (t, code);
10940   OMP_CLAUSE_LOCATION (t) = loc;
10941 
10942   return t;
10943 }
10944 
10945 /* Build a tcc_vl_exp object with code CODE and room for LEN operands.  LEN
10946    includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10947    Except for the CODE and operand count field, other storage for the
10948    object is initialized to zeros.  */
10949 
10950 tree
10951 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10952 {
10953   tree t;
10954   int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10955 
10956   gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10957   gcc_assert (len >= 1);
10958 
10959   record_node_allocation_statistics (code, length);
10960 
10961   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10962 
10963   TREE_SET_CODE (t, code);
10964 
10965   /* Can't use TREE_OPERAND to store the length because if checking is
10966      enabled, it will try to check the length before we store it.  :-P  */
10967   t->exp.operands[0] = build_int_cst (sizetype, len);
10968 
10969   return t;
10970 }
10971 
10972 /* Helper function for build_call_* functions; build a CALL_EXPR with
10973    indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10974    the argument slots.  */
10975 
10976 static tree
10977 build_call_1 (tree return_type, tree fn, int nargs)
10978 {
10979   tree t;
10980 
10981   t = build_vl_exp (CALL_EXPR, nargs + 3);
10982   TREE_TYPE (t) = return_type;
10983   CALL_EXPR_FN (t) = fn;
10984   CALL_EXPR_STATIC_CHAIN (t) = NULL;
10985 
10986   return t;
10987 }
10988 
10989 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10990    FN and a null static chain slot.  NARGS is the number of call arguments
10991    which are specified as "..." arguments.  */
10992 
10993 tree
10994 build_call_nary (tree return_type, tree fn, int nargs, ...)
10995 {
10996   tree ret;
10997   va_list args;
10998   va_start (args, nargs);
10999   ret = build_call_valist (return_type, fn, nargs, args);
11000   va_end (args);
11001   return ret;
11002 }
11003 
11004 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11005    FN and a null static chain slot.  NARGS is the number of call arguments
11006    which are specified as a va_list ARGS.  */
11007 
11008 tree
11009 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11010 {
11011   tree t;
11012   int i;
11013 
11014   t = build_call_1 (return_type, fn, nargs);
11015   for (i = 0; i < nargs; i++)
11016     CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11017   process_call_operands (t);
11018   return t;
11019 }
11020 
11021 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11022    FN and a null static chain slot.  NARGS is the number of call arguments
11023    which are specified as a tree array ARGS.  */
11024 
11025 tree
11026 build_call_array_loc (location_t loc, tree return_type, tree fn,
11027 		      int nargs, const tree *args)
11028 {
11029   tree t;
11030   int i;
11031 
11032   t = build_call_1 (return_type, fn, nargs);
11033   for (i = 0; i < nargs; i++)
11034     CALL_EXPR_ARG (t, i) = args[i];
11035   process_call_operands (t);
11036   SET_EXPR_LOCATION (t, loc);
11037   return t;
11038 }
11039 
11040 /* Like build_call_array, but takes a vec.  */
11041 
11042 tree
11043 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11044 {
11045   tree ret, t;
11046   unsigned int ix;
11047 
11048   ret = build_call_1 (return_type, fn, vec_safe_length (args));
11049   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11050     CALL_EXPR_ARG (ret, ix) = t;
11051   process_call_operands (ret);
11052   return ret;
11053 }
11054 
11055 /* Conveniently construct a function call expression.  FNDECL names the
11056    function to be called and N arguments are passed in the array
11057    ARGARRAY.  */
11058 
11059 tree
11060 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11061 {
11062   tree fntype = TREE_TYPE (fndecl);
11063   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11064 
11065   return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11066 }
11067 
11068 /* Conveniently construct a function call expression.  FNDECL names the
11069    function to be called and the arguments are passed in the vector
11070    VEC.  */
11071 
11072 tree
11073 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11074 {
11075   return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11076 				    vec_safe_address (vec));
11077 }
11078 
11079 
11080 /* Conveniently construct a function call expression.  FNDECL names the
11081    function to be called, N is the number of arguments, and the "..."
11082    parameters are the argument expressions.  */
11083 
11084 tree
11085 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11086 {
11087   va_list ap;
11088   tree *argarray = XALLOCAVEC (tree, n);
11089   int i;
11090 
11091   va_start (ap, n);
11092   for (i = 0; i < n; i++)
11093     argarray[i] = va_arg (ap, tree);
11094   va_end (ap);
11095   return build_call_expr_loc_array (loc, fndecl, n, argarray);
11096 }
11097 
11098 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...).  Duplicated because
11099    varargs macros aren't supported by all bootstrap compilers.  */
11100 
11101 tree
11102 build_call_expr (tree fndecl, int n, ...)
11103 {
11104   va_list ap;
11105   tree *argarray = XALLOCAVEC (tree, n);
11106   int i;
11107 
11108   va_start (ap, n);
11109   for (i = 0; i < n; i++)
11110     argarray[i] = va_arg (ap, tree);
11111   va_end (ap);
11112   return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11113 }
11114 
11115 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11116    type TYPE.  This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11117    It will get gimplified later into an ordinary internal function.  */
11118 
11119 tree
11120 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11121 				    tree type, int n, const tree *args)
11122 {
11123   tree t = build_call_1 (type, NULL_TREE, n);
11124   for (int i = 0; i < n; ++i)
11125     CALL_EXPR_ARG (t, i) = args[i];
11126   SET_EXPR_LOCATION (t, loc);
11127   CALL_EXPR_IFN (t) = ifn;
11128   return t;
11129 }
11130 
11131 /* Build internal call expression.  This is just like CALL_EXPR, except
11132    its CALL_EXPR_FN is NULL.  It will get gimplified later into ordinary
11133    internal function.  */
11134 
11135 tree
11136 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11137 			      tree type, int n, ...)
11138 {
11139   va_list ap;
11140   tree *argarray = XALLOCAVEC (tree, n);
11141   int i;
11142 
11143   va_start (ap, n);
11144   for (i = 0; i < n; i++)
11145     argarray[i] = va_arg (ap, tree);
11146   va_end (ap);
11147   return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11148 }
11149 
11150 /* Return a function call to FN, if the target is guaranteed to support it,
11151    or null otherwise.
11152 
11153    N is the number of arguments, passed in the "...", and TYPE is the
11154    type of the return value.  */
11155 
11156 tree
11157 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11158 			   int n, ...)
11159 {
11160   va_list ap;
11161   tree *argarray = XALLOCAVEC (tree, n);
11162   int i;
11163 
11164   va_start (ap, n);
11165   for (i = 0; i < n; i++)
11166     argarray[i] = va_arg (ap, tree);
11167   va_end (ap);
11168   if (internal_fn_p (fn))
11169     {
11170       internal_fn ifn = as_internal_fn (fn);
11171       if (direct_internal_fn_p (ifn))
11172 	{
11173 	  tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11174 	  if (!direct_internal_fn_supported_p (ifn, types,
11175 					       OPTIMIZE_FOR_BOTH))
11176 	    return NULL_TREE;
11177 	}
11178       return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11179     }
11180   else
11181     {
11182       tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11183       if (!fndecl)
11184 	return NULL_TREE;
11185       return build_call_expr_loc_array (loc, fndecl, n, argarray);
11186     }
11187 }
11188 
11189 /* Create a new constant string literal and return a char* pointer to it.
11190    The STRING_CST value is the LEN characters at STR.  */
11191 tree
11192 build_string_literal (int len, const char *str)
11193 {
11194   tree t, elem, index, type;
11195 
11196   t = build_string (len, str);
11197   elem = build_type_variant (char_type_node, 1, 0);
11198   index = build_index_type (size_int (len - 1));
11199   type = build_array_type (elem, index);
11200   TREE_TYPE (t) = type;
11201   TREE_CONSTANT (t) = 1;
11202   TREE_READONLY (t) = 1;
11203   TREE_STATIC (t) = 1;
11204 
11205   type = build_pointer_type (elem);
11206   t = build1 (ADDR_EXPR, type,
11207 	      build4 (ARRAY_REF, elem,
11208 		      t, integer_zero_node, NULL_TREE, NULL_TREE));
11209   return t;
11210 }
11211 
11212 
11213 
11214 /* Return true if T (assumed to be a DECL) must be assigned a memory
11215    location.  */
11216 
11217 bool
11218 needs_to_live_in_memory (const_tree t)
11219 {
11220   return (TREE_ADDRESSABLE (t)
11221 	  || is_global_var (t)
11222 	  || (TREE_CODE (t) == RESULT_DECL
11223 	      && !DECL_BY_REFERENCE (t)
11224 	      && aggregate_value_p (t, current_function_decl)));
11225 }
11226 
11227 /* Return value of a constant X and sign-extend it.  */
11228 
11229 HOST_WIDE_INT
11230 int_cst_value (const_tree x)
11231 {
11232   unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11233   unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11234 
11235   /* Make sure the sign-extended value will fit in a HOST_WIDE_INT.  */
11236   gcc_assert (cst_and_fits_in_hwi (x));
11237 
11238   if (bits < HOST_BITS_PER_WIDE_INT)
11239     {
11240       bool negative = ((val >> (bits - 1)) & 1) != 0;
11241       if (negative)
11242 	val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11243       else
11244 	val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11245     }
11246 
11247   return val;
11248 }
11249 
11250 /* If TYPE is an integral or pointer type, return an integer type with
11251    the same precision which is unsigned iff UNSIGNEDP is true, or itself
11252    if TYPE is already an integer type of signedness UNSIGNEDP.  */
11253 
11254 tree
11255 signed_or_unsigned_type_for (int unsignedp, tree type)
11256 {
11257   if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11258     return type;
11259 
11260   if (TREE_CODE (type) == VECTOR_TYPE)
11261     {
11262       tree inner = TREE_TYPE (type);
11263       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11264       if (!inner2)
11265 	return NULL_TREE;
11266       if (inner == inner2)
11267 	return type;
11268       return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11269     }
11270 
11271   if (!INTEGRAL_TYPE_P (type)
11272       && !POINTER_TYPE_P (type)
11273       && TREE_CODE (type) != OFFSET_TYPE)
11274     return NULL_TREE;
11275 
11276   return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11277 }
11278 
11279 /* If TYPE is an integral or pointer type, return an integer type with
11280    the same precision which is unsigned, or itself if TYPE is already an
11281    unsigned integer type.  */
11282 
11283 tree
11284 unsigned_type_for (tree type)
11285 {
11286   return signed_or_unsigned_type_for (1, type);
11287 }
11288 
11289 /* If TYPE is an integral or pointer type, return an integer type with
11290    the same precision which is signed, or itself if TYPE is already a
11291    signed integer type.  */
11292 
11293 tree
11294 signed_type_for (tree type)
11295 {
11296   return signed_or_unsigned_type_for (0, type);
11297 }
11298 
11299 /* If TYPE is a vector type, return a signed integer vector type with the
11300    same width and number of subparts. Otherwise return boolean_type_node.  */
11301 
11302 tree
11303 truth_type_for (tree type)
11304 {
11305   if (TREE_CODE (type) == VECTOR_TYPE)
11306     {
11307       if (VECTOR_BOOLEAN_TYPE_P (type))
11308 	return type;
11309       return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11310 				      GET_MODE_SIZE (TYPE_MODE (type)));
11311     }
11312   else
11313     return boolean_type_node;
11314 }
11315 
11316 /* Returns the largest value obtainable by casting something in INNER type to
11317    OUTER type.  */
11318 
11319 tree
11320 upper_bound_in_type (tree outer, tree inner)
11321 {
11322   unsigned int det = 0;
11323   unsigned oprec = TYPE_PRECISION (outer);
11324   unsigned iprec = TYPE_PRECISION (inner);
11325   unsigned prec;
11326 
11327   /* Compute a unique number for every combination.  */
11328   det |= (oprec > iprec) ? 4 : 0;
11329   det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11330   det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11331 
11332   /* Determine the exponent to use.  */
11333   switch (det)
11334     {
11335     case 0:
11336     case 1:
11337       /* oprec <= iprec, outer: signed, inner: don't care.  */
11338       prec = oprec - 1;
11339       break;
11340     case 2:
11341     case 3:
11342       /* oprec <= iprec, outer: unsigned, inner: don't care.  */
11343       prec = oprec;
11344       break;
11345     case 4:
11346       /* oprec > iprec, outer: signed, inner: signed.  */
11347       prec = iprec - 1;
11348       break;
11349     case 5:
11350       /* oprec > iprec, outer: signed, inner: unsigned.  */
11351       prec = iprec;
11352       break;
11353     case 6:
11354       /* oprec > iprec, outer: unsigned, inner: signed.  */
11355       prec = oprec;
11356       break;
11357     case 7:
11358       /* oprec > iprec, outer: unsigned, inner: unsigned.  */
11359       prec = iprec;
11360       break;
11361     default:
11362       gcc_unreachable ();
11363     }
11364 
11365   return wide_int_to_tree (outer,
11366 			   wi::mask (prec, false, TYPE_PRECISION (outer)));
11367 }
11368 
11369 /* Returns the smallest value obtainable by casting something in INNER type to
11370    OUTER type.  */
11371 
11372 tree
11373 lower_bound_in_type (tree outer, tree inner)
11374 {
11375   unsigned oprec = TYPE_PRECISION (outer);
11376   unsigned iprec = TYPE_PRECISION (inner);
11377 
11378   /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11379      and obtain 0.  */
11380   if (TYPE_UNSIGNED (outer)
11381       /* If we are widening something of an unsigned type, OUTER type
11382 	 contains all values of INNER type.  In particular, both INNER
11383 	 and OUTER types have zero in common.  */
11384       || (oprec > iprec && TYPE_UNSIGNED (inner)))
11385     return build_int_cst (outer, 0);
11386   else
11387     {
11388       /* If we are widening a signed type to another signed type, we
11389 	 want to obtain -2^^(iprec-1).  If we are keeping the
11390 	 precision or narrowing to a signed type, we want to obtain
11391 	 -2^(oprec-1).  */
11392       unsigned prec = oprec > iprec ? iprec : oprec;
11393       return wide_int_to_tree (outer,
11394 			       wi::mask (prec - 1, true,
11395 					 TYPE_PRECISION (outer)));
11396     }
11397 }
11398 
11399 /* Return nonzero if two operands that are suitable for PHI nodes are
11400    necessarily equal.  Specifically, both ARG0 and ARG1 must be either
11401    SSA_NAME or invariant.  Note that this is strictly an optimization.
11402    That is, callers of this function can directly call operand_equal_p
11403    and get the same result, only slower.  */
11404 
11405 int
11406 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11407 {
11408   if (arg0 == arg1)
11409     return 1;
11410   if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11411     return 0;
11412   return operand_equal_p (arg0, arg1, 0);
11413 }
11414 
11415 /* Returns number of zeros at the end of binary representation of X.  */
11416 
11417 tree
11418 num_ending_zeros (const_tree x)
11419 {
11420   return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11421 }
11422 
11423 
11424 #define WALK_SUBTREE(NODE)				\
11425   do							\
11426     {							\
11427       result = walk_tree_1 (&(NODE), func, data, pset, lh);	\
11428       if (result)					\
11429 	return result;					\
11430     }							\
11431   while (0)
11432 
11433 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11434    be walked whenever a type is seen in the tree.  Rest of operands and return
11435    value are as for walk_tree.  */
11436 
11437 static tree
11438 walk_type_fields (tree type, walk_tree_fn func, void *data,
11439 		  hash_set<tree> *pset, walk_tree_lh lh)
11440 {
11441   tree result = NULL_TREE;
11442 
11443   switch (TREE_CODE (type))
11444     {
11445     case POINTER_TYPE:
11446     case REFERENCE_TYPE:
11447     case VECTOR_TYPE:
11448       /* We have to worry about mutually recursive pointers.  These can't
11449 	 be written in C.  They can in Ada.  It's pathological, but
11450 	 there's an ACATS test (c38102a) that checks it.  Deal with this
11451 	 by checking if we're pointing to another pointer, that one
11452 	 points to another pointer, that one does too, and we have no htab.
11453 	 If so, get a hash table.  We check three levels deep to avoid
11454 	 the cost of the hash table if we don't need one.  */
11455       if (POINTER_TYPE_P (TREE_TYPE (type))
11456 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11457 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11458 	  && !pset)
11459 	{
11460 	  result = walk_tree_without_duplicates (&TREE_TYPE (type),
11461 						 func, data);
11462 	  if (result)
11463 	    return result;
11464 
11465 	  break;
11466 	}
11467 
11468       /* ... fall through ... */
11469 
11470     case COMPLEX_TYPE:
11471       WALK_SUBTREE (TREE_TYPE (type));
11472       break;
11473 
11474     case METHOD_TYPE:
11475       WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11476 
11477       /* Fall through.  */
11478 
11479     case FUNCTION_TYPE:
11480       WALK_SUBTREE (TREE_TYPE (type));
11481       {
11482 	tree arg;
11483 
11484 	/* We never want to walk into default arguments.  */
11485 	for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11486 	  WALK_SUBTREE (TREE_VALUE (arg));
11487       }
11488       break;
11489 
11490     case ARRAY_TYPE:
11491       /* Don't follow this nodes's type if a pointer for fear that
11492 	 we'll have infinite recursion.  If we have a PSET, then we
11493 	 need not fear.  */
11494       if (pset
11495 	  || (!POINTER_TYPE_P (TREE_TYPE (type))
11496 	      && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11497 	WALK_SUBTREE (TREE_TYPE (type));
11498       WALK_SUBTREE (TYPE_DOMAIN (type));
11499       break;
11500 
11501     case OFFSET_TYPE:
11502       WALK_SUBTREE (TREE_TYPE (type));
11503       WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11504       break;
11505 
11506     default:
11507       break;
11508     }
11509 
11510   return NULL_TREE;
11511 }
11512 
11513 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.  FUNC is
11514    called with the DATA and the address of each sub-tree.  If FUNC returns a
11515    non-NULL value, the traversal is stopped, and the value returned by FUNC
11516    is returned.  If PSET is non-NULL it is used to record the nodes visited,
11517    and to avoid visiting a node more than once.  */
11518 
11519 tree
11520 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11521 	     hash_set<tree> *pset, walk_tree_lh lh)
11522 {
11523   enum tree_code code;
11524   int walk_subtrees;
11525   tree result;
11526 
11527 #define WALK_SUBTREE_TAIL(NODE)				\
11528   do							\
11529     {							\
11530        tp = & (NODE);					\
11531        goto tail_recurse;				\
11532     }							\
11533   while (0)
11534 
11535  tail_recurse:
11536   /* Skip empty subtrees.  */
11537   if (!*tp)
11538     return NULL_TREE;
11539 
11540   /* Don't walk the same tree twice, if the user has requested
11541      that we avoid doing so.  */
11542   if (pset && pset->add (*tp))
11543     return NULL_TREE;
11544 
11545   /* Call the function.  */
11546   walk_subtrees = 1;
11547   result = (*func) (tp, &walk_subtrees, data);
11548 
11549   /* If we found something, return it.  */
11550   if (result)
11551     return result;
11552 
11553   code = TREE_CODE (*tp);
11554 
11555   /* Even if we didn't, FUNC may have decided that there was nothing
11556      interesting below this point in the tree.  */
11557   if (!walk_subtrees)
11558     {
11559       /* But we still need to check our siblings.  */
11560       if (code == TREE_LIST)
11561 	WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11562       else if (code == OMP_CLAUSE)
11563 	WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11564       else
11565 	return NULL_TREE;
11566     }
11567 
11568   if (lh)
11569     {
11570       result = (*lh) (tp, &walk_subtrees, func, data, pset);
11571       if (result || !walk_subtrees)
11572         return result;
11573     }
11574 
11575   switch (code)
11576     {
11577     case ERROR_MARK:
11578     case IDENTIFIER_NODE:
11579     case INTEGER_CST:
11580     case REAL_CST:
11581     case FIXED_CST:
11582     case VECTOR_CST:
11583     case STRING_CST:
11584     case BLOCK:
11585     case PLACEHOLDER_EXPR:
11586     case SSA_NAME:
11587     case FIELD_DECL:
11588     case RESULT_DECL:
11589       /* None of these have subtrees other than those already walked
11590 	 above.  */
11591       break;
11592 
11593     case TREE_LIST:
11594       WALK_SUBTREE (TREE_VALUE (*tp));
11595       WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11596       break;
11597 
11598     case TREE_VEC:
11599       {
11600 	int len = TREE_VEC_LENGTH (*tp);
11601 
11602 	if (len == 0)
11603 	  break;
11604 
11605 	/* Walk all elements but the first.  */
11606 	while (--len)
11607 	  WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11608 
11609 	/* Now walk the first one as a tail call.  */
11610 	WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11611       }
11612 
11613     case COMPLEX_CST:
11614       WALK_SUBTREE (TREE_REALPART (*tp));
11615       WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11616 
11617     case CONSTRUCTOR:
11618       {
11619 	unsigned HOST_WIDE_INT idx;
11620 	constructor_elt *ce;
11621 
11622 	for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11623 	     idx++)
11624 	  WALK_SUBTREE (ce->value);
11625       }
11626       break;
11627 
11628     case SAVE_EXPR:
11629       WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11630 
11631     case BIND_EXPR:
11632       {
11633 	tree decl;
11634 	for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11635 	  {
11636 	    /* Walk the DECL_INITIAL and DECL_SIZE.  We don't want to walk
11637 	       into declarations that are just mentioned, rather than
11638 	       declared; they don't really belong to this part of the tree.
11639 	       And, we can see cycles: the initializer for a declaration
11640 	       can refer to the declaration itself.  */
11641 	    WALK_SUBTREE (DECL_INITIAL (decl));
11642 	    WALK_SUBTREE (DECL_SIZE (decl));
11643 	    WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11644 	  }
11645 	WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11646       }
11647 
11648     case STATEMENT_LIST:
11649       {
11650 	tree_stmt_iterator i;
11651 	for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11652 	  WALK_SUBTREE (*tsi_stmt_ptr (i));
11653       }
11654       break;
11655 
11656     case OMP_CLAUSE:
11657       switch (OMP_CLAUSE_CODE (*tp))
11658 	{
11659 	case OMP_CLAUSE_GANG:
11660 	case OMP_CLAUSE__GRIDDIM_:
11661 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11662 	  /* FALLTHRU */
11663 
11664 	case OMP_CLAUSE_DEVICE_RESIDENT:
11665 	case OMP_CLAUSE_ASYNC:
11666 	case OMP_CLAUSE_WAIT:
11667 	case OMP_CLAUSE_WORKER:
11668 	case OMP_CLAUSE_VECTOR:
11669 	case OMP_CLAUSE_NUM_GANGS:
11670 	case OMP_CLAUSE_NUM_WORKERS:
11671 	case OMP_CLAUSE_VECTOR_LENGTH:
11672 	case OMP_CLAUSE_PRIVATE:
11673 	case OMP_CLAUSE_SHARED:
11674 	case OMP_CLAUSE_FIRSTPRIVATE:
11675 	case OMP_CLAUSE_COPYIN:
11676 	case OMP_CLAUSE_COPYPRIVATE:
11677 	case OMP_CLAUSE_FINAL:
11678 	case OMP_CLAUSE_IF:
11679 	case OMP_CLAUSE_NUM_THREADS:
11680 	case OMP_CLAUSE_SCHEDULE:
11681 	case OMP_CLAUSE_UNIFORM:
11682 	case OMP_CLAUSE_DEPEND:
11683 	case OMP_CLAUSE_NUM_TEAMS:
11684 	case OMP_CLAUSE_THREAD_LIMIT:
11685 	case OMP_CLAUSE_DEVICE:
11686 	case OMP_CLAUSE_DIST_SCHEDULE:
11687 	case OMP_CLAUSE_SAFELEN:
11688 	case OMP_CLAUSE_SIMDLEN:
11689 	case OMP_CLAUSE_ORDERED:
11690 	case OMP_CLAUSE_PRIORITY:
11691 	case OMP_CLAUSE_GRAINSIZE:
11692 	case OMP_CLAUSE_NUM_TASKS:
11693 	case OMP_CLAUSE_HINT:
11694 	case OMP_CLAUSE_TO_DECLARE:
11695 	case OMP_CLAUSE_LINK:
11696 	case OMP_CLAUSE_USE_DEVICE_PTR:
11697 	case OMP_CLAUSE_IS_DEVICE_PTR:
11698 	case OMP_CLAUSE__LOOPTEMP_:
11699 	case OMP_CLAUSE__SIMDUID_:
11700 	case OMP_CLAUSE__CILK_FOR_COUNT_:
11701 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11702 	  /* FALLTHRU */
11703 
11704 	case OMP_CLAUSE_INDEPENDENT:
11705 	case OMP_CLAUSE_NOWAIT:
11706 	case OMP_CLAUSE_DEFAULT:
11707 	case OMP_CLAUSE_UNTIED:
11708 	case OMP_CLAUSE_MERGEABLE:
11709 	case OMP_CLAUSE_PROC_BIND:
11710 	case OMP_CLAUSE_INBRANCH:
11711 	case OMP_CLAUSE_NOTINBRANCH:
11712 	case OMP_CLAUSE_FOR:
11713 	case OMP_CLAUSE_PARALLEL:
11714 	case OMP_CLAUSE_SECTIONS:
11715 	case OMP_CLAUSE_TASKGROUP:
11716 	case OMP_CLAUSE_NOGROUP:
11717 	case OMP_CLAUSE_THREADS:
11718 	case OMP_CLAUSE_SIMD:
11719 	case OMP_CLAUSE_DEFAULTMAP:
11720 	case OMP_CLAUSE_AUTO:
11721 	case OMP_CLAUSE_SEQ:
11722 	case OMP_CLAUSE_TILE:
11723 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11724 
11725 	case OMP_CLAUSE_LASTPRIVATE:
11726 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11727 	  WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11728 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11729 
11730 	case OMP_CLAUSE_COLLAPSE:
11731 	  {
11732 	    int i;
11733 	    for (i = 0; i < 3; i++)
11734 	      WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11735 	    WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11736 	  }
11737 
11738 	case OMP_CLAUSE_LINEAR:
11739 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11740 	  WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11741 	  WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11742 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11743 
11744 	case OMP_CLAUSE_ALIGNED:
11745 	case OMP_CLAUSE_FROM:
11746 	case OMP_CLAUSE_TO:
11747 	case OMP_CLAUSE_MAP:
11748 	case OMP_CLAUSE__CACHE_:
11749 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11750 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11751 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11752 
11753 	case OMP_CLAUSE_REDUCTION:
11754 	  {
11755 	    int i;
11756 	    for (i = 0; i < 5; i++)
11757 	      WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11758 	    WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11759 	  }
11760 
11761 	default:
11762 	  gcc_unreachable ();
11763 	}
11764       break;
11765 
11766     case TARGET_EXPR:
11767       {
11768 	int i, len;
11769 
11770 	/* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11771 	   But, we only want to walk once.  */
11772 	len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11773 	for (i = 0; i < len; ++i)
11774 	  WALK_SUBTREE (TREE_OPERAND (*tp, i));
11775 	WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11776       }
11777 
11778     case DECL_EXPR:
11779       /* If this is a TYPE_DECL, walk into the fields of the type that it's
11780 	 defining.  We only want to walk into these fields of a type in this
11781 	 case and not in the general case of a mere reference to the type.
11782 
11783 	 The criterion is as follows: if the field can be an expression, it
11784 	 must be walked only here.  This should be in keeping with the fields
11785 	 that are directly gimplified in gimplify_type_sizes in order for the
11786 	 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11787 	 variable-sized types.
11788 
11789 	 Note that DECLs get walked as part of processing the BIND_EXPR.  */
11790       if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11791 	{
11792 	  tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11793 	  if (TREE_CODE (*type_p) == ERROR_MARK)
11794 	    return NULL_TREE;
11795 
11796 	  /* Call the function for the type.  See if it returns anything or
11797 	     doesn't want us to continue.  If we are to continue, walk both
11798 	     the normal fields and those for the declaration case.  */
11799 	  result = (*func) (type_p, &walk_subtrees, data);
11800 	  if (result || !walk_subtrees)
11801 	    return result;
11802 
11803 	  /* But do not walk a pointed-to type since it may itself need to
11804 	     be walked in the declaration case if it isn't anonymous.  */
11805 	  if (!POINTER_TYPE_P (*type_p))
11806 	    {
11807 	      result = walk_type_fields (*type_p, func, data, pset, lh);
11808 	      if (result)
11809 		return result;
11810 	    }
11811 
11812 	  /* If this is a record type, also walk the fields.  */
11813 	  if (RECORD_OR_UNION_TYPE_P (*type_p))
11814 	    {
11815 	      tree field;
11816 
11817 	      for (field = TYPE_FIELDS (*type_p); field;
11818 		   field = DECL_CHAIN (field))
11819 		{
11820 		  /* We'd like to look at the type of the field, but we can
11821 		     easily get infinite recursion.  So assume it's pointed
11822 		     to elsewhere in the tree.  Also, ignore things that
11823 		     aren't fields.  */
11824 		  if (TREE_CODE (field) != FIELD_DECL)
11825 		    continue;
11826 
11827 		  WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11828 		  WALK_SUBTREE (DECL_SIZE (field));
11829 		  WALK_SUBTREE (DECL_SIZE_UNIT (field));
11830 		  if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11831 		    WALK_SUBTREE (DECL_QUALIFIER (field));
11832 		}
11833 	    }
11834 
11835 	  /* Same for scalar types.  */
11836 	  else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11837 		   || TREE_CODE (*type_p) == ENUMERAL_TYPE
11838 		   || TREE_CODE (*type_p) == INTEGER_TYPE
11839 		   || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11840 		   || TREE_CODE (*type_p) == REAL_TYPE)
11841 	    {
11842 	      WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11843 	      WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11844 	    }
11845 
11846 	  WALK_SUBTREE (TYPE_SIZE (*type_p));
11847 	  WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11848 	}
11849       /* FALLTHRU */
11850 
11851     default:
11852       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11853 	{
11854 	  int i, len;
11855 
11856 	  /* Walk over all the sub-trees of this operand.  */
11857 	  len = TREE_OPERAND_LENGTH (*tp);
11858 
11859 	  /* Go through the subtrees.  We need to do this in forward order so
11860 	     that the scope of a FOR_EXPR is handled properly.  */
11861 	  if (len)
11862 	    {
11863 	      for (i = 0; i < len - 1; ++i)
11864 		WALK_SUBTREE (TREE_OPERAND (*tp, i));
11865 	      WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11866 	    }
11867 	}
11868       /* If this is a type, walk the needed fields in the type.  */
11869       else if (TYPE_P (*tp))
11870 	return walk_type_fields (*tp, func, data, pset, lh);
11871       break;
11872     }
11873 
11874   /* We didn't find what we were looking for.  */
11875   return NULL_TREE;
11876 
11877 #undef WALK_SUBTREE_TAIL
11878 }
11879 #undef WALK_SUBTREE
11880 
11881 /* Like walk_tree, but does not walk duplicate nodes more than once.  */
11882 
11883 tree
11884 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11885 				walk_tree_lh lh)
11886 {
11887   tree result;
11888 
11889   hash_set<tree> pset;
11890   result = walk_tree_1 (tp, func, data, &pset, lh);
11891   return result;
11892 }
11893 
11894 
11895 tree
11896 tree_block (tree t)
11897 {
11898   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11899 
11900   if (IS_EXPR_CODE_CLASS (c))
11901     return LOCATION_BLOCK (t->exp.locus);
11902   gcc_unreachable ();
11903   return NULL;
11904 }
11905 
11906 void
11907 tree_set_block (tree t, tree b)
11908 {
11909   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11910 
11911   if (IS_EXPR_CODE_CLASS (c))
11912     {
11913       t->exp.locus = set_block (t->exp.locus, b);
11914     }
11915   else
11916     gcc_unreachable ();
11917 }
11918 
11919 /* Create a nameless artificial label and put it in the current
11920    function context.  The label has a location of LOC.  Returns the
11921    newly created label.  */
11922 
11923 tree
11924 create_artificial_label (location_t loc)
11925 {
11926   tree lab = build_decl (loc,
11927       			 LABEL_DECL, NULL_TREE, void_type_node);
11928 
11929   DECL_ARTIFICIAL (lab) = 1;
11930   DECL_IGNORED_P (lab) = 1;
11931   DECL_CONTEXT (lab) = current_function_decl;
11932   return lab;
11933 }
11934 
11935 /*  Given a tree, try to return a useful variable name that we can use
11936     to prefix a temporary that is being assigned the value of the tree.
11937     I.E. given  <temp> = &A, return A.  */
11938 
11939 const char *
11940 get_name (tree t)
11941 {
11942   tree stripped_decl;
11943 
11944   stripped_decl = t;
11945   STRIP_NOPS (stripped_decl);
11946   if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11947     return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11948   else if (TREE_CODE (stripped_decl) == SSA_NAME)
11949     {
11950       tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11951       if (!name)
11952 	return NULL;
11953       return IDENTIFIER_POINTER (name);
11954     }
11955   else
11956     {
11957       switch (TREE_CODE (stripped_decl))
11958 	{
11959 	case ADDR_EXPR:
11960 	  return get_name (TREE_OPERAND (stripped_decl, 0));
11961 	default:
11962 	  return NULL;
11963 	}
11964     }
11965 }
11966 
11967 /* Return true if TYPE has a variable argument list.  */
11968 
11969 bool
11970 stdarg_p (const_tree fntype)
11971 {
11972   function_args_iterator args_iter;
11973   tree n = NULL_TREE, t;
11974 
11975   if (!fntype)
11976     return false;
11977 
11978   FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11979     {
11980       n = t;
11981     }
11982 
11983   return n != NULL_TREE && n != void_type_node;
11984 }
11985 
11986 /* Return true if TYPE has a prototype.  */
11987 
11988 bool
11989 prototype_p (const_tree fntype)
11990 {
11991   tree t;
11992 
11993   gcc_assert (fntype != NULL_TREE);
11994 
11995   t = TYPE_ARG_TYPES (fntype);
11996   return (t != NULL_TREE);
11997 }
11998 
11999 /* If BLOCK is inlined from an __attribute__((__artificial__))
12000    routine, return pointer to location from where it has been
12001    called.  */
12002 location_t *
12003 block_nonartificial_location (tree block)
12004 {
12005   location_t *ret = NULL;
12006 
12007   while (block && TREE_CODE (block) == BLOCK
12008 	 && BLOCK_ABSTRACT_ORIGIN (block))
12009     {
12010       tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12011 
12012       while (TREE_CODE (ao) == BLOCK
12013 	     && BLOCK_ABSTRACT_ORIGIN (ao)
12014 	     && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
12015 	ao = BLOCK_ABSTRACT_ORIGIN (ao);
12016 
12017       if (TREE_CODE (ao) == FUNCTION_DECL)
12018 	{
12019 	  /* If AO is an artificial inline, point RET to the
12020 	     call site locus at which it has been inlined and continue
12021 	     the loop, in case AO's caller is also an artificial
12022 	     inline.  */
12023 	  if (DECL_DECLARED_INLINE_P (ao)
12024 	      && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12025 	    ret = &BLOCK_SOURCE_LOCATION (block);
12026 	  else
12027 	    break;
12028 	}
12029       else if (TREE_CODE (ao) != BLOCK)
12030 	break;
12031 
12032       block = BLOCK_SUPERCONTEXT (block);
12033     }
12034   return ret;
12035 }
12036 
12037 
12038 /* If EXP is inlined from an __attribute__((__artificial__))
12039    function, return the location of the original call expression.  */
12040 
12041 location_t
12042 tree_nonartificial_location (tree exp)
12043 {
12044   location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12045 
12046   if (loc)
12047     return *loc;
12048   else
12049     return EXPR_LOCATION (exp);
12050 }
12051 
12052 
12053 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12054    nodes.  */
12055 
12056 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code.  */
12057 
12058 hashval_t
12059 cl_option_hasher::hash (tree x)
12060 {
12061   const_tree const t = x;
12062   const char *p;
12063   size_t i;
12064   size_t len = 0;
12065   hashval_t hash = 0;
12066 
12067   if (TREE_CODE (t) == OPTIMIZATION_NODE)
12068     {
12069       p = (const char *)TREE_OPTIMIZATION (t);
12070       len = sizeof (struct cl_optimization);
12071     }
12072 
12073   else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12074     return cl_target_option_hash (TREE_TARGET_OPTION (t));
12075 
12076   else
12077     gcc_unreachable ();
12078 
12079   /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12080      something else.  */
12081   for (i = 0; i < len; i++)
12082     if (p[i])
12083       hash = (hash << 4) ^ ((i << 2) | p[i]);
12084 
12085   return hash;
12086 }
12087 
12088 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12089    TARGET_OPTION tree node) is the same as that given by *Y, which is the
12090    same.  */
12091 
12092 bool
12093 cl_option_hasher::equal (tree x, tree y)
12094 {
12095   const_tree const xt = x;
12096   const_tree const yt = y;
12097   const char *xp;
12098   const char *yp;
12099   size_t len;
12100 
12101   if (TREE_CODE (xt) != TREE_CODE (yt))
12102     return 0;
12103 
12104   if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12105     {
12106       xp = (const char *)TREE_OPTIMIZATION (xt);
12107       yp = (const char *)TREE_OPTIMIZATION (yt);
12108       len = sizeof (struct cl_optimization);
12109     }
12110 
12111   else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12112     {
12113       return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12114 				  TREE_TARGET_OPTION (yt));
12115     }
12116 
12117   else
12118     gcc_unreachable ();
12119 
12120   return (memcmp (xp, yp, len) == 0);
12121 }
12122 
12123 /* Build an OPTIMIZATION_NODE based on the options in OPTS.  */
12124 
12125 tree
12126 build_optimization_node (struct gcc_options *opts)
12127 {
12128   tree t;
12129 
12130   /* Use the cache of optimization nodes.  */
12131 
12132   cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12133 			opts);
12134 
12135   tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12136   t = *slot;
12137   if (!t)
12138     {
12139       /* Insert this one into the hash table.  */
12140       t = cl_optimization_node;
12141       *slot = t;
12142 
12143       /* Make a new node for next time round.  */
12144       cl_optimization_node = make_node (OPTIMIZATION_NODE);
12145     }
12146 
12147   return t;
12148 }
12149 
12150 /* Build a TARGET_OPTION_NODE based on the options in OPTS.  */
12151 
12152 tree
12153 build_target_option_node (struct gcc_options *opts)
12154 {
12155   tree t;
12156 
12157   /* Use the cache of optimization nodes.  */
12158 
12159   cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12160 			 opts);
12161 
12162   tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12163   t = *slot;
12164   if (!t)
12165     {
12166       /* Insert this one into the hash table.  */
12167       t = cl_target_option_node;
12168       *slot = t;
12169 
12170       /* Make a new node for next time round.  */
12171       cl_target_option_node = make_node (TARGET_OPTION_NODE);
12172     }
12173 
12174   return t;
12175 }
12176 
12177 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12178    so that they aren't saved during PCH writing.  */
12179 
12180 void
12181 prepare_target_option_nodes_for_pch (void)
12182 {
12183   hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12184   for (; iter != cl_option_hash_table->end (); ++iter)
12185     if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12186       TREE_TARGET_GLOBALS (*iter) = NULL;
12187 }
12188 
12189 /* Determine the "ultimate origin" of a block.  The block may be an inlined
12190    instance of an inlined instance of a block which is local to an inline
12191    function, so we have to trace all of the way back through the origin chain
12192    to find out what sort of node actually served as the original seed for the
12193    given block.  */
12194 
12195 tree
12196 block_ultimate_origin (const_tree block)
12197 {
12198   tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12199 
12200   /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12201      we're trying to output the abstract instance of this function.  */
12202   if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12203     return NULL_TREE;
12204 
12205   if (immediate_origin == NULL_TREE)
12206     return NULL_TREE;
12207   else
12208     {
12209       tree ret_val;
12210       tree lookahead = immediate_origin;
12211 
12212       do
12213 	{
12214 	  ret_val = lookahead;
12215 	  lookahead = (TREE_CODE (ret_val) == BLOCK
12216 		       ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12217 	}
12218       while (lookahead != NULL && lookahead != ret_val);
12219 
12220       /* The block's abstract origin chain may not be the *ultimate* origin of
12221 	 the block. It could lead to a DECL that has an abstract origin set.
12222 	 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12223 	 will give us if it has one).  Note that DECL's abstract origins are
12224 	 supposed to be the most distant ancestor (or so decl_ultimate_origin
12225 	 claims), so we don't need to loop following the DECL origins.  */
12226       if (DECL_P (ret_val))
12227 	return DECL_ORIGIN (ret_val);
12228 
12229       return ret_val;
12230     }
12231 }
12232 
12233 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12234    no instruction.  */
12235 
12236 bool
12237 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12238 {
12239   /* Do not strip casts into or out of differing address spaces.  */
12240   if (POINTER_TYPE_P (outer_type)
12241       && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12242     {
12243       if (!POINTER_TYPE_P (inner_type)
12244 	  || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12245 	      != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12246 	return false;
12247     }
12248   else if (POINTER_TYPE_P (inner_type)
12249 	   && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12250     {
12251       /* We already know that outer_type is not a pointer with
12252 	 a non-generic address space.  */
12253       return false;
12254     }
12255 
12256   /* Use precision rather then machine mode when we can, which gives
12257      the correct answer even for submode (bit-field) types.  */
12258   if ((INTEGRAL_TYPE_P (outer_type)
12259        || POINTER_TYPE_P (outer_type)
12260        || TREE_CODE (outer_type) == OFFSET_TYPE)
12261       && (INTEGRAL_TYPE_P (inner_type)
12262 	  || POINTER_TYPE_P (inner_type)
12263 	  || TREE_CODE (inner_type) == OFFSET_TYPE))
12264     return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12265 
12266   /* Otherwise fall back on comparing machine modes (e.g. for
12267      aggregate types, floats).  */
12268   return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12269 }
12270 
12271 /* Return true iff conversion in EXP generates no instruction.  Mark
12272    it inline so that we fully inline into the stripping functions even
12273    though we have two uses of this function.  */
12274 
12275 static inline bool
12276 tree_nop_conversion (const_tree exp)
12277 {
12278   tree outer_type, inner_type;
12279 
12280   if (!CONVERT_EXPR_P (exp)
12281       && TREE_CODE (exp) != NON_LVALUE_EXPR)
12282     return false;
12283   if (TREE_OPERAND (exp, 0) == error_mark_node)
12284     return false;
12285 
12286   outer_type = TREE_TYPE (exp);
12287   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12288 
12289   if (!inner_type)
12290     return false;
12291 
12292   return tree_nop_conversion_p (outer_type, inner_type);
12293 }
12294 
12295 /* Return true iff conversion in EXP generates no instruction.  Don't
12296    consider conversions changing the signedness.  */
12297 
12298 static bool
12299 tree_sign_nop_conversion (const_tree exp)
12300 {
12301   tree outer_type, inner_type;
12302 
12303   if (!tree_nop_conversion (exp))
12304     return false;
12305 
12306   outer_type = TREE_TYPE (exp);
12307   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12308 
12309   return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12310 	  && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12311 }
12312 
12313 /* Strip conversions from EXP according to tree_nop_conversion and
12314    return the resulting expression.  */
12315 
12316 tree
12317 tree_strip_nop_conversions (tree exp)
12318 {
12319   while (tree_nop_conversion (exp))
12320     exp = TREE_OPERAND (exp, 0);
12321   return exp;
12322 }
12323 
12324 /* Strip conversions from EXP according to tree_sign_nop_conversion
12325    and return the resulting expression.  */
12326 
12327 tree
12328 tree_strip_sign_nop_conversions (tree exp)
12329 {
12330   while (tree_sign_nop_conversion (exp))
12331     exp = TREE_OPERAND (exp, 0);
12332   return exp;
12333 }
12334 
12335 /* Avoid any floating point extensions from EXP.  */
12336 tree
12337 strip_float_extensions (tree exp)
12338 {
12339   tree sub, expt, subt;
12340 
12341   /*  For floating point constant look up the narrowest type that can hold
12342       it properly and handle it like (type)(narrowest_type)constant.
12343       This way we can optimize for instance a=a*2.0 where "a" is float
12344       but 2.0 is double constant.  */
12345   if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12346     {
12347       REAL_VALUE_TYPE orig;
12348       tree type = NULL;
12349 
12350       orig = TREE_REAL_CST (exp);
12351       if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12352 	  && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12353 	type = float_type_node;
12354       else if (TYPE_PRECISION (TREE_TYPE (exp))
12355 	       > TYPE_PRECISION (double_type_node)
12356 	       && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12357 	type = double_type_node;
12358       if (type)
12359 	return build_real_truncate (type, orig);
12360     }
12361 
12362   if (!CONVERT_EXPR_P (exp))
12363     return exp;
12364 
12365   sub = TREE_OPERAND (exp, 0);
12366   subt = TREE_TYPE (sub);
12367   expt = TREE_TYPE (exp);
12368 
12369   if (!FLOAT_TYPE_P (subt))
12370     return exp;
12371 
12372   if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12373     return exp;
12374 
12375   if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12376     return exp;
12377 
12378   return strip_float_extensions (sub);
12379 }
12380 
12381 /* Strip out all handled components that produce invariant
12382    offsets.  */
12383 
12384 const_tree
12385 strip_invariant_refs (const_tree op)
12386 {
12387   while (handled_component_p (op))
12388     {
12389       switch (TREE_CODE (op))
12390 	{
12391 	case ARRAY_REF:
12392 	case ARRAY_RANGE_REF:
12393 	  if (!is_gimple_constant (TREE_OPERAND (op, 1))
12394 	      || TREE_OPERAND (op, 2) != NULL_TREE
12395 	      || TREE_OPERAND (op, 3) != NULL_TREE)
12396 	    return NULL;
12397 	  break;
12398 
12399 	case COMPONENT_REF:
12400 	  if (TREE_OPERAND (op, 2) != NULL_TREE)
12401 	    return NULL;
12402 	  break;
12403 
12404 	default:;
12405 	}
12406       op = TREE_OPERAND (op, 0);
12407     }
12408 
12409   return op;
12410 }
12411 
12412 static GTY(()) tree gcc_eh_personality_decl;
12413 
12414 /* Return the GCC personality function decl.  */
12415 
12416 tree
12417 lhd_gcc_personality (void)
12418 {
12419   if (!gcc_eh_personality_decl)
12420     gcc_eh_personality_decl = build_personality_function ("gcc");
12421   return gcc_eh_personality_decl;
12422 }
12423 
12424 /* TARGET is a call target of GIMPLE call statement
12425    (obtained by gimple_call_fn).  Return true if it is
12426    OBJ_TYPE_REF representing an virtual call of C++ method.
12427    (As opposed to OBJ_TYPE_REF representing objc calls
12428    through a cast where middle-end devirtualization machinery
12429    can't apply.) */
12430 
12431 bool
12432 virtual_method_call_p (const_tree target)
12433 {
12434   if (TREE_CODE (target) != OBJ_TYPE_REF)
12435     return false;
12436   tree t = TREE_TYPE (target);
12437   gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12438   t = TREE_TYPE (t);
12439   if (TREE_CODE (t) == FUNCTION_TYPE)
12440     return false;
12441   gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12442   /* If we do not have BINFO associated, it means that type was built
12443      without devirtualization enabled.  Do not consider this a virtual
12444      call.  */
12445   if (!TYPE_BINFO (obj_type_ref_class (target)))
12446     return false;
12447   return true;
12448 }
12449 
12450 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to.  */
12451 
12452 tree
12453 obj_type_ref_class (const_tree ref)
12454 {
12455   gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12456   ref = TREE_TYPE (ref);
12457   gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12458   ref = TREE_TYPE (ref);
12459   /* We look for type THIS points to.  ObjC also builds
12460      OBJ_TYPE_REF with non-method calls, Their first parameter
12461      ID however also corresponds to class type. */
12462   gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12463 		       || TREE_CODE (ref) == FUNCTION_TYPE);
12464   ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12465   gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12466   return TREE_TYPE (ref);
12467 }
12468 
12469 /* Lookup sub-BINFO of BINFO of TYPE at offset POS.  */
12470 
12471 static tree
12472 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12473 {
12474   unsigned int i;
12475   tree base_binfo, b;
12476 
12477   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12478     if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12479 	&& types_same_for_odr (TREE_TYPE (base_binfo), type))
12480       return base_binfo;
12481     else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12482       return b;
12483   return NULL;
12484 }
12485 
12486 /* Try to find a base info of BINFO that would have its field decl at offset
12487    OFFSET within the BINFO type and which is of EXPECTED_TYPE.  If it can be
12488    found, return, otherwise return NULL_TREE.  */
12489 
12490 tree
12491 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12492 {
12493   tree type = BINFO_TYPE (binfo);
12494 
12495   while (true)
12496     {
12497       HOST_WIDE_INT pos, size;
12498       tree fld;
12499       int i;
12500 
12501       if (types_same_for_odr (type, expected_type))
12502 	  return binfo;
12503       if (offset < 0)
12504 	return NULL_TREE;
12505 
12506       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12507 	{
12508 	  if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12509 	    continue;
12510 
12511 	  pos = int_bit_position (fld);
12512 	  size = tree_to_uhwi (DECL_SIZE (fld));
12513 	  if (pos <= offset && (pos + size) > offset)
12514 	    break;
12515 	}
12516       if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12517 	return NULL_TREE;
12518 
12519       /* Offset 0 indicates the primary base, whose vtable contents are
12520 	 represented in the binfo for the derived class.  */
12521       else if (offset != 0)
12522 	{
12523 	  tree found_binfo = NULL, base_binfo;
12524 	  /* Offsets in BINFO are in bytes relative to the whole structure
12525 	     while POS is in bits relative to the containing field.  */
12526 	  int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12527 			     / BITS_PER_UNIT);
12528 
12529 	  for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12530 	    if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12531 		&& types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12532 	      {
12533 		found_binfo = base_binfo;
12534 		break;
12535 	      }
12536 	  if (found_binfo)
12537 	    binfo = found_binfo;
12538 	  else
12539 	    binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12540 					    binfo_offset);
12541 	 }
12542 
12543       type = TREE_TYPE (fld);
12544       offset -= pos;
12545     }
12546 }
12547 
12548 /* Returns true if X is a typedef decl.  */
12549 
12550 bool
12551 is_typedef_decl (const_tree x)
12552 {
12553   return (x && TREE_CODE (x) == TYPE_DECL
12554           && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12555 }
12556 
12557 /* Returns true iff TYPE is a type variant created for a typedef. */
12558 
12559 bool
12560 typedef_variant_p (const_tree type)
12561 {
12562   return is_typedef_decl (TYPE_NAME (type));
12563 }
12564 
12565 /* Warn about a use of an identifier which was marked deprecated.  */
12566 void
12567 warn_deprecated_use (tree node, tree attr)
12568 {
12569   const char *msg;
12570 
12571   if (node == 0 || !warn_deprecated_decl)
12572     return;
12573 
12574   if (!attr)
12575     {
12576       if (DECL_P (node))
12577 	attr = DECL_ATTRIBUTES (node);
12578       else if (TYPE_P (node))
12579 	{
12580 	  tree decl = TYPE_STUB_DECL (node);
12581 	  if (decl)
12582 	    attr = lookup_attribute ("deprecated",
12583 				     TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12584 	}
12585     }
12586 
12587   if (attr)
12588     attr = lookup_attribute ("deprecated", attr);
12589 
12590   if (attr)
12591     msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12592   else
12593     msg = NULL;
12594 
12595   bool w;
12596   if (DECL_P (node))
12597     {
12598       if (msg)
12599 	w = warning (OPT_Wdeprecated_declarations,
12600 		     "%qD is deprecated: %s", node, msg);
12601       else
12602 	w = warning (OPT_Wdeprecated_declarations,
12603 		     "%qD is deprecated", node);
12604       if (w)
12605 	inform (DECL_SOURCE_LOCATION (node), "declared here");
12606     }
12607   else if (TYPE_P (node))
12608     {
12609       tree what = NULL_TREE;
12610       tree decl = TYPE_STUB_DECL (node);
12611 
12612       if (TYPE_NAME (node))
12613 	{
12614 	  if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12615 	    what = TYPE_NAME (node);
12616 	  else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12617 		   && DECL_NAME (TYPE_NAME (node)))
12618 	    what = DECL_NAME (TYPE_NAME (node));
12619 	}
12620 
12621       if (decl)
12622 	{
12623 	  if (what)
12624 	    {
12625 	      if (msg)
12626 		w = warning (OPT_Wdeprecated_declarations,
12627 			     "%qE is deprecated: %s", what, msg);
12628 	      else
12629 		w = warning (OPT_Wdeprecated_declarations,
12630 			     "%qE is deprecated", what);
12631 	    }
12632 	  else
12633 	    {
12634 	      if (msg)
12635 		w = warning (OPT_Wdeprecated_declarations,
12636 			     "type is deprecated: %s", msg);
12637 	      else
12638 		w = warning (OPT_Wdeprecated_declarations,
12639 			     "type is deprecated");
12640 	    }
12641 	  if (w)
12642 	    inform (DECL_SOURCE_LOCATION (decl), "declared here");
12643 	}
12644       else
12645 	{
12646 	  if (what)
12647 	    {
12648 	      if (msg)
12649 		warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12650 			 what, msg);
12651 	      else
12652 		warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12653 	    }
12654 	  else
12655 	    {
12656 	      if (msg)
12657 		warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12658 			 msg);
12659 	      else
12660 		warning (OPT_Wdeprecated_declarations, "type is deprecated");
12661 	    }
12662 	}
12663     }
12664 }
12665 
12666 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12667    somewhere in it.  */
12668 
12669 bool
12670 contains_bitfld_component_ref_p (const_tree ref)
12671 {
12672   while (handled_component_p (ref))
12673     {
12674       if (TREE_CODE (ref) == COMPONENT_REF
12675           && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12676         return true;
12677       ref = TREE_OPERAND (ref, 0);
12678     }
12679 
12680   return false;
12681 }
12682 
12683 /* Try to determine whether a TRY_CATCH expression can fall through.
12684    This is a subroutine of block_may_fallthru.  */
12685 
12686 static bool
12687 try_catch_may_fallthru (const_tree stmt)
12688 {
12689   tree_stmt_iterator i;
12690 
12691   /* If the TRY block can fall through, the whole TRY_CATCH can
12692      fall through.  */
12693   if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12694     return true;
12695 
12696   i = tsi_start (TREE_OPERAND (stmt, 1));
12697   switch (TREE_CODE (tsi_stmt (i)))
12698     {
12699     case CATCH_EXPR:
12700       /* We expect to see a sequence of CATCH_EXPR trees, each with a
12701 	 catch expression and a body.  The whole TRY_CATCH may fall
12702 	 through iff any of the catch bodies falls through.  */
12703       for (; !tsi_end_p (i); tsi_next (&i))
12704 	{
12705 	  if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12706 	    return true;
12707 	}
12708       return false;
12709 
12710     case EH_FILTER_EXPR:
12711       /* The exception filter expression only matters if there is an
12712 	 exception.  If the exception does not match EH_FILTER_TYPES,
12713 	 we will execute EH_FILTER_FAILURE, and we will fall through
12714 	 if that falls through.  If the exception does match
12715 	 EH_FILTER_TYPES, the stack unwinder will continue up the
12716 	 stack, so we will not fall through.  We don't know whether we
12717 	 will throw an exception which matches EH_FILTER_TYPES or not,
12718 	 so we just ignore EH_FILTER_TYPES and assume that we might
12719 	 throw an exception which doesn't match.  */
12720       return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12721 
12722     default:
12723       /* This case represents statements to be executed when an
12724 	 exception occurs.  Those statements are implicitly followed
12725 	 by a RESX statement to resume execution after the exception.
12726 	 So in this case the TRY_CATCH never falls through.  */
12727       return false;
12728     }
12729 }
12730 
12731 /* Try to determine if we can fall out of the bottom of BLOCK.  This guess
12732    need not be 100% accurate; simply be conservative and return true if we
12733    don't know.  This is used only to avoid stupidly generating extra code.
12734    If we're wrong, we'll just delete the extra code later.  */
12735 
12736 bool
12737 block_may_fallthru (const_tree block)
12738 {
12739   /* This CONST_CAST is okay because expr_last returns its argument
12740      unmodified and we assign it to a const_tree.  */
12741   const_tree stmt = expr_last (CONST_CAST_TREE (block));
12742 
12743   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12744     {
12745     case GOTO_EXPR:
12746     case RETURN_EXPR:
12747       /* Easy cases.  If the last statement of the block implies
12748 	 control transfer, then we can't fall through.  */
12749       return false;
12750 
12751     case SWITCH_EXPR:
12752       /* If SWITCH_LABELS is set, this is lowered, and represents a
12753 	 branch to a selected label and hence can not fall through.
12754 	 Otherwise SWITCH_BODY is set, and the switch can fall
12755 	 through.  */
12756       return SWITCH_LABELS (stmt) == NULL_TREE;
12757 
12758     case COND_EXPR:
12759       if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12760 	return true;
12761       return block_may_fallthru (COND_EXPR_ELSE (stmt));
12762 
12763     case BIND_EXPR:
12764       return block_may_fallthru (BIND_EXPR_BODY (stmt));
12765 
12766     case TRY_CATCH_EXPR:
12767       return try_catch_may_fallthru (stmt);
12768 
12769     case TRY_FINALLY_EXPR:
12770       /* The finally clause is always executed after the try clause,
12771 	 so if it does not fall through, then the try-finally will not
12772 	 fall through.  Otherwise, if the try clause does not fall
12773 	 through, then when the finally clause falls through it will
12774 	 resume execution wherever the try clause was going.  So the
12775 	 whole try-finally will only fall through if both the try
12776 	 clause and the finally clause fall through.  */
12777       return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12778 	      && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12779 
12780     case MODIFY_EXPR:
12781       if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12782 	stmt = TREE_OPERAND (stmt, 1);
12783       else
12784 	return true;
12785       /* FALLTHRU */
12786 
12787     case CALL_EXPR:
12788       /* Functions that do not return do not fall through.  */
12789       return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12790 
12791     case CLEANUP_POINT_EXPR:
12792       return block_may_fallthru (TREE_OPERAND (stmt, 0));
12793 
12794     case TARGET_EXPR:
12795       return block_may_fallthru (TREE_OPERAND (stmt, 1));
12796 
12797     case ERROR_MARK:
12798       return true;
12799 
12800     default:
12801       return lang_hooks.block_may_fallthru (stmt);
12802     }
12803 }
12804 
12805 /* True if we are using EH to handle cleanups.  */
12806 static bool using_eh_for_cleanups_flag = false;
12807 
12808 /* This routine is called from front ends to indicate eh should be used for
12809    cleanups.  */
12810 void
12811 using_eh_for_cleanups (void)
12812 {
12813   using_eh_for_cleanups_flag = true;
12814 }
12815 
12816 /* Query whether EH is used for cleanups.  */
12817 bool
12818 using_eh_for_cleanups_p (void)
12819 {
12820   return using_eh_for_cleanups_flag;
12821 }
12822 
12823 /* Wrapper for tree_code_name to ensure that tree code is valid */
12824 const char *
12825 get_tree_code_name (enum tree_code code)
12826 {
12827   const char *invalid = "<invalid tree code>";
12828 
12829   if (code >= MAX_TREE_CODES)
12830     return invalid;
12831 
12832   return tree_code_name[code];
12833 }
12834 
12835 /* Drops the TREE_OVERFLOW flag from T.  */
12836 
12837 tree
12838 drop_tree_overflow (tree t)
12839 {
12840   gcc_checking_assert (TREE_OVERFLOW (t));
12841 
12842   /* For tree codes with a sharing machinery re-build the result.  */
12843   if (TREE_CODE (t) == INTEGER_CST)
12844     return wide_int_to_tree (TREE_TYPE (t), t);
12845 
12846   /* Otherwise, as all tcc_constants are possibly shared, copy the node
12847      and drop the flag.  */
12848   t = copy_node (t);
12849   TREE_OVERFLOW (t) = 0;
12850   return t;
12851 }
12852 
12853 /* Given a memory reference expression T, return its base address.
12854    The base address of a memory reference expression is the main
12855    object being referenced.  For instance, the base address for
12856    'array[i].fld[j]' is 'array'.  You can think of this as stripping
12857    away the offset part from a memory address.
12858 
12859    This function calls handled_component_p to strip away all the inner
12860    parts of the memory reference until it reaches the base object.  */
12861 
12862 tree
12863 get_base_address (tree t)
12864 {
12865   while (handled_component_p (t))
12866     t = TREE_OPERAND (t, 0);
12867 
12868   if ((TREE_CODE (t) == MEM_REF
12869        || TREE_CODE (t) == TARGET_MEM_REF)
12870       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12871     t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12872 
12873   /* ???  Either the alias oracle or all callers need to properly deal
12874      with WITH_SIZE_EXPRs before we can look through those.  */
12875   if (TREE_CODE (t) == WITH_SIZE_EXPR)
12876     return NULL_TREE;
12877 
12878   return t;
12879 }
12880 
12881 /* Return a tree of sizetype representing the size, in bytes, of the element
12882    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
12883 
12884 tree
12885 array_ref_element_size (tree exp)
12886 {
12887   tree aligned_size = TREE_OPERAND (exp, 3);
12888   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12889   location_t loc = EXPR_LOCATION (exp);
12890 
12891   /* If a size was specified in the ARRAY_REF, it's the size measured
12892      in alignment units of the element type.  So multiply by that value.  */
12893   if (aligned_size)
12894     {
12895       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12896 	 sizetype from another type of the same width and signedness.  */
12897       if (TREE_TYPE (aligned_size) != sizetype)
12898 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12899       return size_binop_loc (loc, MULT_EXPR, aligned_size,
12900 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
12901     }
12902 
12903   /* Otherwise, take the size from that of the element type.  Substitute
12904      any PLACEHOLDER_EXPR that we have.  */
12905   else
12906     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12907 }
12908 
12909 /* Return a tree representing the lower bound of the array mentioned in
12910    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
12911 
12912 tree
12913 array_ref_low_bound (tree exp)
12914 {
12915   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12916 
12917   /* If a lower bound is specified in EXP, use it.  */
12918   if (TREE_OPERAND (exp, 2))
12919     return TREE_OPERAND (exp, 2);
12920 
12921   /* Otherwise, if there is a domain type and it has a lower bound, use it,
12922      substituting for a PLACEHOLDER_EXPR as needed.  */
12923   if (domain_type && TYPE_MIN_VALUE (domain_type))
12924     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12925 
12926   /* Otherwise, return a zero of the appropriate type.  */
12927   return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12928 }
12929 
12930 /* Return a tree representing the upper bound of the array mentioned in
12931    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
12932 
12933 tree
12934 array_ref_up_bound (tree exp)
12935 {
12936   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12937 
12938   /* If there is a domain type and it has an upper bound, use it, substituting
12939      for a PLACEHOLDER_EXPR as needed.  */
12940   if (domain_type && TYPE_MAX_VALUE (domain_type))
12941     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12942 
12943   /* Otherwise fail.  */
12944   return NULL_TREE;
12945 }
12946 
12947 /* Returns true if REF is an array reference or a component reference
12948    to an array at the end of a structure.
12949    If this is the case, the array may be allocated larger
12950    than its upper bound implies.  */
12951 
12952 bool
12953 array_at_struct_end_p (tree ref)
12954 {
12955   tree atype;
12956 
12957   if (TREE_CODE (ref) == ARRAY_REF
12958       || TREE_CODE (ref) == ARRAY_RANGE_REF)
12959     {
12960       atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12961       ref = TREE_OPERAND (ref, 0);
12962     }
12963   else if (TREE_CODE (ref) == COMPONENT_REF
12964 	   && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12965     atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12966   else
12967     return false;
12968 
12969   while (handled_component_p (ref))
12970     {
12971       /* If the reference chain contains a component reference to a
12972          non-union type and there follows another field the reference
12973 	 is not at the end of a structure.  */
12974       if (TREE_CODE (ref) == COMPONENT_REF)
12975 	{
12976 	  if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12977 	    {
12978 	      tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12979 	      while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12980 		nextf = DECL_CHAIN (nextf);
12981 	      if (nextf)
12982 		return false;
12983 	    }
12984 	}
12985       /* If we have a multi-dimensional array we do not consider
12986          a non-innermost dimension as flex array if the whole
12987 	 multi-dimensional array is at struct end.
12988 	 Same for an array of aggregates with a trailing array
12989 	 member.  */
12990       else if (TREE_CODE (ref) == ARRAY_REF)
12991 	return false;
12992       else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12993 	;
12994       /* If we view an underlying object as sth else then what we
12995          gathered up to now is what we have to rely on.  */
12996       else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12997 	break;
12998       else
12999 	gcc_unreachable ();
13000 
13001       ref = TREE_OPERAND (ref, 0);
13002     }
13003 
13004   /* The array now is at struct end.  Treat flexible arrays as
13005      always subject to extend, even into just padding constrained by
13006      an underlying decl.  */
13007   if (! TYPE_SIZE (atype))
13008     return true;
13009 
13010   /* If the reference is based on a declared entity, the size of the array
13011      is constrained by its given domain.  (Do not trust commons PR/69368).  */
13012   if (DECL_P (ref)
13013       && !(flag_unconstrained_commons
13014 	   && VAR_P (ref) && DECL_COMMON (ref)))
13015     return false;
13016 
13017   return true;
13018 }
13019 
13020 /* Return a tree representing the offset, in bytes, of the field referenced
13021    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
13022 
13023 tree
13024 component_ref_field_offset (tree exp)
13025 {
13026   tree aligned_offset = TREE_OPERAND (exp, 2);
13027   tree field = TREE_OPERAND (exp, 1);
13028   location_t loc = EXPR_LOCATION (exp);
13029 
13030   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13031      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
13032      value.  */
13033   if (aligned_offset)
13034     {
13035       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13036 	 sizetype from another type of the same width and signedness.  */
13037       if (TREE_TYPE (aligned_offset) != sizetype)
13038 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13039       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13040 			     size_int (DECL_OFFSET_ALIGN (field)
13041 				       / BITS_PER_UNIT));
13042     }
13043 
13044   /* Otherwise, take the offset from that of the field.  Substitute
13045      any PLACEHOLDER_EXPR that we have.  */
13046   else
13047     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13048 }
13049 
13050 /* Return the machine mode of T.  For vectors, returns the mode of the
13051    inner type.  The main use case is to feed the result to HONOR_NANS,
13052    avoiding the BLKmode that a direct TYPE_MODE (T) might return.  */
13053 
13054 machine_mode
13055 element_mode (const_tree t)
13056 {
13057   if (!TYPE_P (t))
13058     t = TREE_TYPE (t);
13059   if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13060     t = TREE_TYPE (t);
13061   return TYPE_MODE (t);
13062 }
13063 
13064 
13065 /* Veirfy that basic properties of T match TV and thus T can be a variant of
13066    TV.  TV should be the more specified variant (i.e. the main variant).  */
13067 
13068 static bool
13069 verify_type_variant (const_tree t, tree tv)
13070 {
13071   /* Type variant can differ by:
13072 
13073      - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13074                    ENCODE_QUAL_ADDR_SPACE.
13075      - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13076        in this case some values may not be set in the variant types
13077        (see TYPE_COMPLETE_P checks).
13078      - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13079      - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13080      - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13081      - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13082      - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13083        this is necessary to make it possible to merge types form different TUs
13084      - arrays, pointers and references may have TREE_TYPE that is a variant
13085        of TREE_TYPE of their main variants.
13086      - aggregates may have new TYPE_FIELDS list that list variants of
13087        the main variant TYPE_FIELDS.
13088      - vector types may differ by TYPE_VECTOR_OPAQUE
13089      - TYPE_METHODS is always NULL for vairant types and maintained for
13090        main variant only.
13091    */
13092 
13093   /* Convenience macro for matching individual fields.  */
13094 #define verify_variant_match(flag)					    \
13095   do {									    \
13096     if (flag (tv) != flag (t))						    \
13097       {									    \
13098 	error ("type variant differs by " #flag ".");			    \
13099 	debug_tree (tv);						    \
13100 	return false;							    \
13101       }									    \
13102   } while (false)
13103 
13104   /* tree_base checks.  */
13105 
13106   verify_variant_match (TREE_CODE);
13107   /* FIXME: Ada builds non-artificial variants of artificial types.  */
13108   if (TYPE_ARTIFICIAL (tv) && 0)
13109     verify_variant_match (TYPE_ARTIFICIAL);
13110   if (POINTER_TYPE_P (tv))
13111     verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13112   /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build.  */
13113   verify_variant_match (TYPE_UNSIGNED);
13114   verify_variant_match (TYPE_ALIGN_OK);
13115   verify_variant_match (TYPE_PACKED);
13116   if (TREE_CODE (t) == REFERENCE_TYPE)
13117     verify_variant_match (TYPE_REF_IS_RVALUE);
13118   if (AGGREGATE_TYPE_P (t))
13119     verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13120   else
13121     verify_variant_match (TYPE_SATURATING);
13122   /* FIXME: This check trigger during libstdc++ build.  */
13123   if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13124     verify_variant_match (TYPE_FINAL_P);
13125 
13126   /* tree_type_common checks.  */
13127 
13128   if (COMPLETE_TYPE_P (t))
13129     {
13130       verify_variant_match (TYPE_SIZE);
13131       verify_variant_match (TYPE_MODE);
13132       if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
13133 	  /* FIXME: ideally we should compare pointer equality, but java FE
13134 	     produce variants where size is INTEGER_CST of different type (int
13135 	     wrt size_type) during libjava biuld.  */
13136 	  && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
13137 	{
13138 	  error ("type variant has different TYPE_SIZE_UNIT");
13139 	  debug_tree (tv);
13140 	  error ("type variant's TYPE_SIZE_UNIT");
13141 	  debug_tree (TYPE_SIZE_UNIT (tv));
13142 	  error ("type's TYPE_SIZE_UNIT");
13143 	  debug_tree (TYPE_SIZE_UNIT (t));
13144 	  return false;
13145 	}
13146     }
13147   verify_variant_match (TYPE_PRECISION);
13148   verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13149   if (RECORD_OR_UNION_TYPE_P (t))
13150     verify_variant_match (TYPE_TRANSPARENT_AGGR);
13151   else if (TREE_CODE (t) == ARRAY_TYPE)
13152     verify_variant_match (TYPE_NONALIASED_COMPONENT);
13153   /* During LTO we merge variant lists from diferent translation units
13154      that may differ BY TYPE_CONTEXT that in turn may point
13155      to TRANSLATION_UNIT_DECL.
13156      Ada also builds variants of types with different TYPE_CONTEXT.   */
13157   if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13158     verify_variant_match (TYPE_CONTEXT);
13159   verify_variant_match (TYPE_STRING_FLAG);
13160   if (TYPE_ALIAS_SET_KNOWN_P (t))
13161     {
13162       error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13163       debug_tree (tv);
13164       return false;
13165     }
13166 
13167   /* tree_type_non_common checks.  */
13168 
13169   /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13170      and dangle the pointer from time to time.  */
13171   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13172       && (in_lto_p || !TYPE_VFIELD (tv)
13173 	  || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13174     {
13175       error ("type variant has different TYPE_VFIELD");
13176       debug_tree (tv);
13177       return false;
13178     }
13179   if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13180        || TREE_CODE (t) == INTEGER_TYPE
13181        || TREE_CODE (t) == BOOLEAN_TYPE
13182        || TREE_CODE (t) == REAL_TYPE
13183        || TREE_CODE (t) == FIXED_POINT_TYPE)
13184     {
13185       verify_variant_match (TYPE_MAX_VALUE);
13186       verify_variant_match (TYPE_MIN_VALUE);
13187     }
13188   if (TREE_CODE (t) == METHOD_TYPE)
13189     verify_variant_match (TYPE_METHOD_BASETYPE);
13190   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13191     {
13192       error ("type variant has TYPE_METHODS");
13193       debug_tree (tv);
13194       return false;
13195     }
13196   if (TREE_CODE (t) == OFFSET_TYPE)
13197     verify_variant_match (TYPE_OFFSET_BASETYPE);
13198   if (TREE_CODE (t) == ARRAY_TYPE)
13199     verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13200   /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13201      or even type's main variant.  This is needed to make bootstrap pass
13202      and the bug seems new in GCC 5.
13203      C++ FE should be updated to make this consistent and we should check
13204      that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13205      is a match with main variant.
13206 
13207      Also disable the check for Java for now because of parser hack that builds
13208      first an dummy BINFO and then sometimes replace it by real BINFO in some
13209      of the copies.  */
13210   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13211       && TYPE_BINFO (t) != TYPE_BINFO (tv)
13212       /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13213 	 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13214 	 at LTO time only.  */
13215       && (in_lto_p && odr_type_p (t)))
13216     {
13217       error ("type variant has different TYPE_BINFO");
13218       debug_tree (tv);
13219       error ("type variant's TYPE_BINFO");
13220       debug_tree (TYPE_BINFO (tv));
13221       error ("type's TYPE_BINFO");
13222       debug_tree (TYPE_BINFO (t));
13223       return false;
13224     }
13225 
13226   /* Check various uses of TYPE_VALUES_RAW.  */
13227   if (TREE_CODE (t) == ENUMERAL_TYPE)
13228     verify_variant_match (TYPE_VALUES);
13229   else if (TREE_CODE (t) == ARRAY_TYPE)
13230     verify_variant_match (TYPE_DOMAIN);
13231   /* Permit incomplete variants of complete type.  While FEs may complete
13232      all variants, this does not happen for C++ templates in all cases.  */
13233   else if (RECORD_OR_UNION_TYPE_P (t)
13234 	   && COMPLETE_TYPE_P (t)
13235 	   && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13236     {
13237       tree f1, f2;
13238 
13239       /* Fortran builds qualified variants as new records with items of
13240 	 qualified type. Verify that they looks same.  */
13241       for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13242 	   f1 && f2;
13243 	   f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13244 	if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13245 	    || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13246 		 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13247 		/* FIXME: gfc_nonrestricted_type builds all types as variants
13248 		   with exception of pointer types.  It deeply copies the type
13249 		   which means that we may end up with a variant type
13250 		   referring non-variant pointer.  We may change it to
13251 		   produce types as variants, too, like
13252 		   objc_get_protocol_qualified_type does.  */
13253 		&& !POINTER_TYPE_P (TREE_TYPE (f1)))
13254 	    || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13255 	    || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13256 	  break;
13257       if (f1 || f2)
13258 	{
13259 	  error ("type variant has different TYPE_FIELDS");
13260 	  debug_tree (tv);
13261 	  error ("first mismatch is field");
13262 	  debug_tree (f1);
13263 	  error ("and field");
13264 	  debug_tree (f2);
13265           return false;
13266 	}
13267     }
13268   else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13269     verify_variant_match (TYPE_ARG_TYPES);
13270   /* For C++ the qualified variant of array type is really an array type
13271      of qualified TREE_TYPE.
13272      objc builds variants of pointer where pointer to type is a variant, too
13273      in objc_get_protocol_qualified_type.  */
13274   if (TREE_TYPE (t) != TREE_TYPE (tv)
13275       && ((TREE_CODE (t) != ARRAY_TYPE
13276 	   && !POINTER_TYPE_P (t))
13277 	  || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13278 	     != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13279     {
13280       error ("type variant has different TREE_TYPE");
13281       debug_tree (tv);
13282       error ("type variant's TREE_TYPE");
13283       debug_tree (TREE_TYPE (tv));
13284       error ("type's TREE_TYPE");
13285       debug_tree (TREE_TYPE (t));
13286       return false;
13287     }
13288   if (type_with_alias_set_p (t)
13289       && !gimple_canonical_types_compatible_p (t, tv, false))
13290     {
13291       error ("type is not compatible with its vairant");
13292       debug_tree (tv);
13293       error ("type variant's TREE_TYPE");
13294       debug_tree (TREE_TYPE (tv));
13295       error ("type's TREE_TYPE");
13296       debug_tree (TREE_TYPE (t));
13297       return false;
13298     }
13299   return true;
13300 #undef verify_variant_match
13301 }
13302 
13303 
13304 /* The TYPE_CANONICAL merging machinery.  It should closely resemble
13305    the middle-end types_compatible_p function.  It needs to avoid
13306    claiming types are different for types that should be treated
13307    the same with respect to TBAA.  Canonical types are also used
13308    for IL consistency checks via the useless_type_conversion_p
13309    predicate which does not handle all type kinds itself but falls
13310    back to pointer-comparison of TYPE_CANONICAL for aggregates
13311    for example.  */
13312 
13313 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13314    type calculation because we need to allow inter-operability between signed
13315    and unsigned variants.  */
13316 
13317 bool
13318 type_with_interoperable_signedness (const_tree type)
13319 {
13320   /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13321      signed char and unsigned char.  Similarly fortran FE builds
13322      C_SIZE_T as signed type, while C defines it unsigned.  */
13323 
13324   return tree_code_for_canonical_type_merging (TREE_CODE (type))
13325 	   == INTEGER_TYPE
13326          && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13327 	     || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13328 }
13329 
13330 /* Return true iff T1 and T2 are structurally identical for what
13331    TBAA is concerned.
13332    This function is used both by lto.c canonical type merging and by the
13333    verifier.  If TRUST_TYPE_CANONICAL we do not look into structure of types
13334    that have TYPE_CANONICAL defined and assume them equivalent.  This is useful
13335    only for LTO because only in these cases TYPE_CANONICAL equivalence
13336    correspond to one defined by gimple_canonical_types_compatible_p.  */
13337 
13338 bool
13339 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13340 				     bool trust_type_canonical)
13341 {
13342   /* Type variants should be same as the main variant.  When not doing sanity
13343      checking to verify this fact, go to main variants and save some work.  */
13344   if (trust_type_canonical)
13345     {
13346       t1 = TYPE_MAIN_VARIANT (t1);
13347       t2 = TYPE_MAIN_VARIANT (t2);
13348     }
13349 
13350   /* Check first for the obvious case of pointer identity.  */
13351   if (t1 == t2)
13352     return true;
13353 
13354   /* Check that we have two types to compare.  */
13355   if (t1 == NULL_TREE || t2 == NULL_TREE)
13356     return false;
13357 
13358   /* We consider complete types always compatible with incomplete type.
13359      This does not make sense for canonical type calculation and thus we
13360      need to ensure that we are never called on it.
13361 
13362      FIXME: For more correctness the function probably should have three modes
13363 	1) mode assuming that types are complete mathcing their structure
13364 	2) mode allowing incomplete types but producing equivalence classes
13365 	   and thus ignoring all info from complete types
13366 	3) mode allowing incomplete types to match complete but checking
13367 	   compatibility between complete types.
13368 
13369      1 and 2 can be used for canonical type calculation. 3 is the real
13370      definition of type compatibility that can be used i.e. for warnings during
13371      declaration merging.  */
13372 
13373   gcc_assert (!trust_type_canonical
13374 	      || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13375   /* If the types have been previously registered and found equal
13376      they still are.  */
13377 
13378   if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13379       && trust_type_canonical)
13380     {
13381       /* Do not use TYPE_CANONICAL of pointer types.  For LTO streamed types
13382 	 they are always NULL, but they are set to non-NULL for types
13383 	 constructed by build_pointer_type and variants.  In this case the
13384 	 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13385 	 all pointers are considered equal.  Be sure to not return false
13386 	 negatives.  */
13387       gcc_checking_assert (canonical_type_used_p (t1)
13388 			   && canonical_type_used_p (t2));
13389       return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13390     }
13391 
13392   /* Can't be the same type if the types don't have the same code.  */
13393   enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13394   if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13395     return false;
13396 
13397   /* Qualifiers do not matter for canonical type comparison purposes.  */
13398 
13399   /* Void types and nullptr types are always the same.  */
13400   if (TREE_CODE (t1) == VOID_TYPE
13401       || TREE_CODE (t1) == NULLPTR_TYPE)
13402     return true;
13403 
13404   /* Can't be the same type if they have different mode.  */
13405   if (TYPE_MODE (t1) != TYPE_MODE (t2))
13406     return false;
13407 
13408   /* Non-aggregate types can be handled cheaply.  */
13409   if (INTEGRAL_TYPE_P (t1)
13410       || SCALAR_FLOAT_TYPE_P (t1)
13411       || FIXED_POINT_TYPE_P (t1)
13412       || TREE_CODE (t1) == VECTOR_TYPE
13413       || TREE_CODE (t1) == COMPLEX_TYPE
13414       || TREE_CODE (t1) == OFFSET_TYPE
13415       || POINTER_TYPE_P (t1))
13416     {
13417       /* Can't be the same type if they have different recision.  */
13418       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13419 	return false;
13420 
13421       /* In some cases the signed and unsigned types are required to be
13422 	 inter-operable.  */
13423       if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13424 	  && !type_with_interoperable_signedness (t1))
13425 	return false;
13426 
13427       /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13428 	 interoperable with "signed char".  Unless all frontends are revisited
13429 	 to agree on these types, we must ignore the flag completely.  */
13430 
13431       /* Fortran standard define C_PTR type that is compatible with every
13432  	 C pointer.  For this reason we need to glob all pointers into one.
13433 	 Still pointers in different address spaces are not compatible.  */
13434       if (POINTER_TYPE_P (t1))
13435 	{
13436 	  if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13437 	      != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13438 	    return false;
13439 	}
13440 
13441       /* Tail-recurse to components.  */
13442       if (TREE_CODE (t1) == VECTOR_TYPE
13443 	  || TREE_CODE (t1) == COMPLEX_TYPE)
13444 	return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13445 						    TREE_TYPE (t2),
13446 						    trust_type_canonical);
13447 
13448       return true;
13449     }
13450 
13451   /* Do type-specific comparisons.  */
13452   switch (TREE_CODE (t1))
13453     {
13454     case ARRAY_TYPE:
13455       /* Array types are the same if the element types are the same and
13456 	 the number of elements are the same.  */
13457       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13458 						trust_type_canonical)
13459 	  || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13460 	  || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13461 	  || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13462 	return false;
13463       else
13464 	{
13465 	  tree i1 = TYPE_DOMAIN (t1);
13466 	  tree i2 = TYPE_DOMAIN (t2);
13467 
13468 	  /* For an incomplete external array, the type domain can be
13469  	     NULL_TREE.  Check this condition also.  */
13470 	  if (i1 == NULL_TREE && i2 == NULL_TREE)
13471 	    return true;
13472 	  else if (i1 == NULL_TREE || i2 == NULL_TREE)
13473 	    return false;
13474 	  else
13475 	    {
13476 	      tree min1 = TYPE_MIN_VALUE (i1);
13477 	      tree min2 = TYPE_MIN_VALUE (i2);
13478 	      tree max1 = TYPE_MAX_VALUE (i1);
13479 	      tree max2 = TYPE_MAX_VALUE (i2);
13480 
13481 	      /* The minimum/maximum values have to be the same.  */
13482 	      if ((min1 == min2
13483 		   || (min1 && min2
13484 		       && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13485 			    && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13486 		           || operand_equal_p (min1, min2, 0))))
13487 		  && (max1 == max2
13488 		      || (max1 && max2
13489 			  && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13490 			       && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13491 			      || operand_equal_p (max1, max2, 0)))))
13492 		return true;
13493 	      else
13494 		return false;
13495 	    }
13496 	}
13497 
13498     case METHOD_TYPE:
13499     case FUNCTION_TYPE:
13500       /* Function types are the same if the return type and arguments types
13501 	 are the same.  */
13502       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13503 						trust_type_canonical))
13504 	return false;
13505 
13506       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13507 	return true;
13508       else
13509 	{
13510 	  tree parms1, parms2;
13511 
13512 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13513 	       parms1 && parms2;
13514 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13515 	    {
13516 	      if (!gimple_canonical_types_compatible_p
13517 		     (TREE_VALUE (parms1), TREE_VALUE (parms2),
13518 		      trust_type_canonical))
13519 		return false;
13520 	    }
13521 
13522 	  if (parms1 || parms2)
13523 	    return false;
13524 
13525 	  return true;
13526 	}
13527 
13528     case RECORD_TYPE:
13529     case UNION_TYPE:
13530     case QUAL_UNION_TYPE:
13531       {
13532 	tree f1, f2;
13533 
13534 	/* Don't try to compare variants of an incomplete type, before
13535 	   TYPE_FIELDS has been copied around.  */
13536 	if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13537 	  return true;
13538 
13539 
13540 	if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13541 	  return false;
13542 
13543 	/* For aggregate types, all the fields must be the same.  */
13544 	for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13545 	     f1 || f2;
13546 	     f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13547 	  {
13548 	    /* Skip non-fields and zero-sized fields.  */
13549 	    while (f1 && (TREE_CODE (f1) != FIELD_DECL
13550 			  || (DECL_SIZE (f1)
13551 			      && integer_zerop (DECL_SIZE (f1)))))
13552 	      f1 = TREE_CHAIN (f1);
13553 	    while (f2 && (TREE_CODE (f2) != FIELD_DECL
13554 			  || (DECL_SIZE (f2)
13555 			      && integer_zerop (DECL_SIZE (f2)))))
13556 	      f2 = TREE_CHAIN (f2);
13557 	    if (!f1 || !f2)
13558 	      break;
13559 	    /* The fields must have the same name, offset and type.  */
13560 	    if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13561 		|| !gimple_compare_field_offset (f1, f2)
13562 		|| !gimple_canonical_types_compatible_p
13563 		      (TREE_TYPE (f1), TREE_TYPE (f2),
13564 		       trust_type_canonical))
13565 	      return false;
13566 	  }
13567 
13568 	/* If one aggregate has more fields than the other, they
13569 	   are not the same.  */
13570 	if (f1 || f2)
13571 	  return false;
13572 
13573 	return true;
13574       }
13575 
13576     default:
13577       /* Consider all types with language specific trees in them mutually
13578 	 compatible.  This is executed only from verify_type and false
13579          positives can be tolerated.  */
13580       gcc_assert (!in_lto_p);
13581       return true;
13582     }
13583 }
13584 
13585 /* Verify type T.  */
13586 
13587 void
13588 verify_type (const_tree t)
13589 {
13590   bool error_found = false;
13591   tree mv = TYPE_MAIN_VARIANT (t);
13592   if (!mv)
13593     {
13594       error ("Main variant is not defined");
13595       error_found = true;
13596     }
13597   else if (mv != TYPE_MAIN_VARIANT (mv))
13598     {
13599       error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13600       debug_tree (mv);
13601       error_found = true;
13602     }
13603   else if (t != mv && !verify_type_variant (t, mv))
13604     error_found = true;
13605 
13606   tree ct = TYPE_CANONICAL (t);
13607   if (!ct)
13608     ;
13609   else if (TYPE_CANONICAL (t) != ct)
13610     {
13611       error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13612       debug_tree (ct);
13613       error_found = true;
13614     }
13615   /* Method and function types can not be used to address memory and thus
13616      TYPE_CANONICAL really matters only for determining useless conversions.
13617 
13618      FIXME: C++ FE produce declarations of builtin functions that are not
13619      compatible with main variants.  */
13620   else if (TREE_CODE (t) == FUNCTION_TYPE)
13621     ;
13622   else if (t != ct
13623 	   /* FIXME: gimple_canonical_types_compatible_p can not compare types
13624 	      with variably sized arrays because their sizes possibly
13625 	      gimplified to different variables.  */
13626 	   && !variably_modified_type_p (ct, NULL)
13627 	   && !gimple_canonical_types_compatible_p (t, ct, false))
13628     {
13629       error ("TYPE_CANONICAL is not compatible");
13630       debug_tree (ct);
13631       error_found = true;
13632     }
13633 
13634   if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13635       && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13636     {
13637       error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13638       debug_tree (ct);
13639       error_found = true;
13640     }
13641   /* FIXME: this is violated by the C++ FE as discussed in PR70029, when
13642      FUNCTION_*_QUALIFIED flags are set.  */
13643   if (0 && TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13644    {
13645       error ("TYPE_CANONICAL of main variant is not main variant");
13646       debug_tree (ct);
13647       debug_tree (TYPE_MAIN_VARIANT (ct));
13648       error_found = true;
13649    }
13650 
13651 
13652   /* Check various uses of TYPE_MINVAL.  */
13653   if (RECORD_OR_UNION_TYPE_P (t))
13654     {
13655       /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13656 	 and danagle the pointer from time to time.  */
13657       if (TYPE_VFIELD (t)
13658 	  && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13659 	  && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13660 	{
13661 	  error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13662 	  debug_tree (TYPE_VFIELD (t));
13663 	  error_found = true;
13664 	}
13665     }
13666   else if (TREE_CODE (t) == POINTER_TYPE)
13667     {
13668       if (TYPE_NEXT_PTR_TO (t)
13669 	  && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13670 	{
13671 	  error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13672 	  debug_tree (TYPE_NEXT_PTR_TO (t));
13673 	  error_found = true;
13674 	}
13675     }
13676   else if (TREE_CODE (t) == REFERENCE_TYPE)
13677     {
13678       if (TYPE_NEXT_REF_TO (t)
13679 	  && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13680 	{
13681 	  error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13682 	  debug_tree (TYPE_NEXT_REF_TO (t));
13683 	  error_found = true;
13684 	}
13685     }
13686   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13687 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
13688     {
13689       /* FIXME: The following check should pass:
13690 	  useless_type_conversion_p (const_cast <tree> (t),
13691 				     TREE_TYPE (TYPE_MIN_VALUE (t))
13692 	 but does not for C sizetypes in LTO.  */
13693     }
13694   /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE.  */
13695   else if (TYPE_MINVAL (t)
13696 	   && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13697 	       || in_lto_p))
13698     {
13699       error ("TYPE_MINVAL non-NULL");
13700       debug_tree (TYPE_MINVAL (t));
13701       error_found = true;
13702     }
13703 
13704   /* Check various uses of TYPE_MAXVAL.  */
13705   if (RECORD_OR_UNION_TYPE_P (t))
13706     {
13707       if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13708 	  && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13709 	  && TYPE_METHODS (t) != error_mark_node)
13710 	{
13711 	  error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13712 	  debug_tree (TYPE_METHODS (t));
13713 	  error_found = true;
13714 	}
13715     }
13716   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13717     {
13718       if (TYPE_METHOD_BASETYPE (t)
13719 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13720 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13721 	{
13722 	  error ("TYPE_METHOD_BASETYPE is not record nor union");
13723 	  debug_tree (TYPE_METHOD_BASETYPE (t));
13724 	  error_found = true;
13725 	}
13726     }
13727   else if (TREE_CODE (t) == OFFSET_TYPE)
13728     {
13729       if (TYPE_OFFSET_BASETYPE (t)
13730 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13731 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13732 	{
13733 	  error ("TYPE_OFFSET_BASETYPE is not record nor union");
13734 	  debug_tree (TYPE_OFFSET_BASETYPE (t));
13735 	  error_found = true;
13736 	}
13737     }
13738   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13739 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
13740     {
13741       /* FIXME: The following check should pass:
13742 	  useless_type_conversion_p (const_cast <tree> (t),
13743 				     TREE_TYPE (TYPE_MAX_VALUE (t))
13744 	 but does not for C sizetypes in LTO.  */
13745     }
13746   else if (TREE_CODE (t) == ARRAY_TYPE)
13747     {
13748       if (TYPE_ARRAY_MAX_SIZE (t)
13749 	  && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13750         {
13751 	  error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13752 	  debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13753 	  error_found = true;
13754         }
13755     }
13756   else if (TYPE_MAXVAL (t))
13757     {
13758       error ("TYPE_MAXVAL non-NULL");
13759       debug_tree (TYPE_MAXVAL (t));
13760       error_found = true;
13761     }
13762 
13763   /* Check various uses of TYPE_BINFO.  */
13764   if (RECORD_OR_UNION_TYPE_P (t))
13765     {
13766       if (!TYPE_BINFO (t))
13767 	;
13768       else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13769 	{
13770 	  error ("TYPE_BINFO is not TREE_BINFO");
13771 	  debug_tree (TYPE_BINFO (t));
13772 	  error_found = true;
13773 	}
13774       /* FIXME: Java builds invalid empty binfos that do not have
13775          TREE_TYPE set.  */
13776       else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13777 	{
13778 	  error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13779 	  debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13780 	  error_found = true;
13781 	}
13782     }
13783   else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13784     {
13785       error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13786       debug_tree (TYPE_LANG_SLOT_1 (t));
13787       error_found = true;
13788     }
13789 
13790   /* Check various uses of TYPE_VALUES_RAW.  */
13791   if (TREE_CODE (t) == ENUMERAL_TYPE)
13792     for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13793       {
13794 	tree value = TREE_VALUE (l);
13795 	tree name = TREE_PURPOSE (l);
13796 
13797 	/* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13798  	   CONST_DECL of ENUMERAL TYPE.  */
13799 	if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13800 	  {
13801 	    error ("Enum value is not CONST_DECL or INTEGER_CST");
13802 	    debug_tree (value);
13803 	    debug_tree (name);
13804 	    error_found = true;
13805 	  }
13806 	if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13807 	    && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13808 	  {
13809 	    error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13810 	    debug_tree (value);
13811 	    debug_tree (name);
13812 	    error_found = true;
13813 	  }
13814 	if (TREE_CODE (name) != IDENTIFIER_NODE)
13815 	  {
13816 	    error ("Enum value name is not IDENTIFIER_NODE");
13817 	    debug_tree (value);
13818 	    debug_tree (name);
13819 	    error_found = true;
13820 	  }
13821       }
13822   else if (TREE_CODE (t) == ARRAY_TYPE)
13823     {
13824       if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13825 	{
13826 	  error ("Array TYPE_DOMAIN is not integer type");
13827 	  debug_tree (TYPE_DOMAIN (t));
13828 	  error_found = true;
13829 	}
13830     }
13831   else if (RECORD_OR_UNION_TYPE_P (t))
13832     {
13833       if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13834 	{
13835 	  error ("TYPE_FIELDS defined in incomplete type");
13836 	  error_found = true;
13837 	}
13838       for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13839 	{
13840 	  /* TODO: verify properties of decls.  */
13841 	  if (TREE_CODE (fld) == FIELD_DECL)
13842 	    ;
13843 	  else if (TREE_CODE (fld) == TYPE_DECL)
13844 	    ;
13845 	  else if (TREE_CODE (fld) == CONST_DECL)
13846 	    ;
13847 	  else if (TREE_CODE (fld) == VAR_DECL)
13848 	    ;
13849 	  else if (TREE_CODE (fld) == TEMPLATE_DECL)
13850 	    ;
13851 	  else if (TREE_CODE (fld) == USING_DECL)
13852 	    ;
13853 	  else
13854 	    {
13855 	      error ("Wrong tree in TYPE_FIELDS list");
13856 	      debug_tree (fld);
13857 	      error_found = true;
13858 	    }
13859 	}
13860     }
13861   else if (TREE_CODE (t) == INTEGER_TYPE
13862 	   || TREE_CODE (t) == BOOLEAN_TYPE
13863 	   || TREE_CODE (t) == OFFSET_TYPE
13864 	   || TREE_CODE (t) == REFERENCE_TYPE
13865 	   || TREE_CODE (t) == NULLPTR_TYPE
13866 	   || TREE_CODE (t) == POINTER_TYPE)
13867     {
13868       if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13869 	{
13870 	  error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13871 		 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13872 	  error_found = true;
13873 	}
13874       else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13875 	{
13876 	  error ("TYPE_CACHED_VALUES is not TREE_VEC");
13877 	  debug_tree (TYPE_CACHED_VALUES (t));
13878 	  error_found = true;
13879 	}
13880       /* Verify just enough of cache to ensure that no one copied it to new type.
13881  	 All copying should go by copy_node that should clear it.  */
13882       else if (TYPE_CACHED_VALUES_P (t))
13883 	{
13884 	  int i;
13885 	  for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13886 	    if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13887 		&& TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13888 	      {
13889 		error ("wrong TYPE_CACHED_VALUES entry");
13890 		debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13891 		error_found = true;
13892 		break;
13893 	      }
13894 	}
13895     }
13896   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13897     for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13898       {
13899 	/* C++ FE uses TREE_PURPOSE to store initial values.  */
13900 	if (TREE_PURPOSE (l) && in_lto_p)
13901 	  {
13902 	    error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13903 	    debug_tree (l);
13904 	    error_found = true;
13905 	  }
13906 	if (!TYPE_P (TREE_VALUE (l)))
13907 	  {
13908 	    error ("Wrong entry in TYPE_ARG_TYPES list");
13909 	    debug_tree (l);
13910 	    error_found = true;
13911 	  }
13912       }
13913   else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13914     {
13915       error ("TYPE_VALUES_RAW field is non-NULL");
13916       debug_tree (TYPE_VALUES_RAW (t));
13917       error_found = true;
13918     }
13919   if (TREE_CODE (t) != INTEGER_TYPE
13920       && TREE_CODE (t) != BOOLEAN_TYPE
13921       && TREE_CODE (t) != OFFSET_TYPE
13922       && TREE_CODE (t) != REFERENCE_TYPE
13923       && TREE_CODE (t) != NULLPTR_TYPE
13924       && TREE_CODE (t) != POINTER_TYPE
13925       && TYPE_CACHED_VALUES_P (t))
13926     {
13927       error ("TYPE_CACHED_VALUES_P is set while it should not");
13928       error_found = true;
13929     }
13930   if (TYPE_STRING_FLAG (t)
13931       && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13932     {
13933       error ("TYPE_STRING_FLAG is set on wrong type code");
13934       error_found = true;
13935     }
13936   else if (TYPE_STRING_FLAG (t))
13937     {
13938       const_tree b = t;
13939       if (TREE_CODE (b) == ARRAY_TYPE)
13940 	b = TREE_TYPE (t);
13941       /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13942 	 that is 32bits.  */
13943       if (TREE_CODE (b) != INTEGER_TYPE)
13944 	{
13945 	  error ("TYPE_STRING_FLAG is set on type that does not look like "
13946 		 "char nor array of chars");
13947 	  error_found = true;
13948 	}
13949     }
13950 
13951   /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13952      TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13953      of a type. */
13954   if (TREE_CODE (t) == METHOD_TYPE
13955       && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13956     {
13957 	error ("TYPE_METHOD_BASETYPE is not main variant");
13958 	error_found = true;
13959     }
13960 
13961   if (error_found)
13962     {
13963       debug_tree (const_cast <tree> (t));
13964       internal_error ("verify_type failed");
13965     }
13966 }
13967 
13968 
13969 /* Return true if ARG is marked with the nonnull attribute in the
13970    current function signature.  */
13971 
13972 bool
13973 nonnull_arg_p (const_tree arg)
13974 {
13975   tree t, attrs, fntype;
13976   unsigned HOST_WIDE_INT arg_num;
13977 
13978   gcc_assert (TREE_CODE (arg) == PARM_DECL
13979 	      && (POINTER_TYPE_P (TREE_TYPE (arg))
13980 		  || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13981 
13982   /* The static chain decl is always non null.  */
13983   if (arg == cfun->static_chain_decl)
13984     return true;
13985 
13986   /* THIS argument of method is always non-NULL.  */
13987   if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13988       && arg == DECL_ARGUMENTS (cfun->decl)
13989       && flag_delete_null_pointer_checks)
13990     return true;
13991 
13992   /* Values passed by reference are always non-NULL.  */
13993   if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13994       && flag_delete_null_pointer_checks)
13995     return true;
13996 
13997   fntype = TREE_TYPE (cfun->decl);
13998   for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13999     {
14000       attrs = lookup_attribute ("nonnull", attrs);
14001 
14002       /* If "nonnull" wasn't specified, we know nothing about the argument.  */
14003       if (attrs == NULL_TREE)
14004 	return false;
14005 
14006       /* If "nonnull" applies to all the arguments, then ARG is non-null.  */
14007       if (TREE_VALUE (attrs) == NULL_TREE)
14008 	return true;
14009 
14010       /* Get the position number for ARG in the function signature.  */
14011       for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14012 	   t;
14013 	   t = DECL_CHAIN (t), arg_num++)
14014 	{
14015 	  if (t == arg)
14016 	    break;
14017 	}
14018 
14019       gcc_assert (t == arg);
14020 
14021       /* Now see if ARG_NUM is mentioned in the nonnull list.  */
14022       for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14023 	{
14024 	  if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14025 	    return true;
14026 	}
14027     }
14028 
14029   return false;
14030 }
14031 
14032 /* Given location LOC, strip away any packed range information
14033    or ad-hoc information.  */
14034 
14035 location_t
14036 get_pure_location (location_t loc)
14037 {
14038   if (IS_ADHOC_LOC (loc))
14039     loc
14040       = line_table->location_adhoc_data_map.data[loc & MAX_SOURCE_LOCATION].locus;
14041 
14042   if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
14043     return loc;
14044 
14045   if (loc < RESERVED_LOCATION_COUNT)
14046     return loc;
14047 
14048   const line_map *map = linemap_lookup (line_table, loc);
14049   const line_map_ordinary *ordmap = linemap_check_ordinary (map);
14050 
14051   return loc & ~((1 << ordmap->m_range_bits) - 1);
14052 }
14053 
14054 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14055    information.  */
14056 
14057 location_t
14058 set_block (location_t loc, tree block)
14059 {
14060   location_t pure_loc = get_pure_location (loc);
14061   source_range src_range = get_range_from_loc (line_table, loc);
14062   return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14063 }
14064 
14065 location_t
14066 set_source_range (tree expr, location_t start, location_t finish)
14067 {
14068   source_range src_range;
14069   src_range.m_start = start;
14070   src_range.m_finish = finish;
14071   return set_source_range (expr, src_range);
14072 }
14073 
14074 location_t
14075 set_source_range (tree expr, source_range src_range)
14076 {
14077   if (!EXPR_P (expr))
14078     return UNKNOWN_LOCATION;
14079 
14080   location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14081   location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14082 					    pure_loc,
14083 					    src_range,
14084 					    NULL);
14085   SET_EXPR_LOCATION (expr, adhoc);
14086   return adhoc;
14087 }
14088 
14089 location_t
14090 make_location (location_t caret, location_t start, location_t finish)
14091 {
14092   location_t pure_loc = get_pure_location (caret);
14093   source_range src_range;
14094   src_range.m_start = start;
14095   src_range.m_finish = finish;
14096   location_t combined_loc = COMBINE_LOCATION_DATA (line_table,
14097 						   pure_loc,
14098 						   src_range,
14099 						   NULL);
14100   return combined_loc;
14101 }
14102 
14103 /* Return the name of combined function FN, for debugging purposes.  */
14104 
14105 const char *
14106 combined_fn_name (combined_fn fn)
14107 {
14108   if (builtin_fn_p (fn))
14109     {
14110       tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14111       return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14112     }
14113   else
14114     return internal_fn_name (as_internal_fn (fn));
14115 }
14116 
14117 #include "gt-tree.h"
14118