xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree.c (revision afab4e300d3a9fb07dd8c80daf53d0feb3345706)
1 /* Language-independent node constructors for parse phase of GNU compiler.
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains the low level primitives for operating on tree nodes,
21    including allocation, list operations, interning of identifiers,
22    construction of data type nodes and statement nodes,
23    and construction of type conversion nodes.  It also contains
24    tables index by tree code that describe how to take apart
25    nodes of that code.
26 
27    It is intended to be language-independent but can occasionally
28    calls language-dependent routines.  */
29 
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 
72 /* Tree code classes.  */
73 
74 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
75 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76 
77 const enum tree_code_class tree_code_type[] = {
78 #include "all-tree.def"
79 };
80 
81 #undef DEFTREECODE
82 #undef END_OF_BASE_TREE_CODES
83 
84 /* Table indexed by tree code giving number of expression
85    operands beyond the fixed part of the node structure.
86    Not used for types or decls.  */
87 
88 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
89 #define END_OF_BASE_TREE_CODES 0,
90 
91 const unsigned char tree_code_length[] = {
92 #include "all-tree.def"
93 };
94 
95 #undef DEFTREECODE
96 #undef END_OF_BASE_TREE_CODES
97 
98 /* Names of tree components.
99    Used for printing out the tree and error messages.  */
100 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
101 #define END_OF_BASE_TREE_CODES "@dummy",
102 
103 static const char *const tree_code_name[] = {
104 #include "all-tree.def"
105 };
106 
107 #undef DEFTREECODE
108 #undef END_OF_BASE_TREE_CODES
109 
110 /* Each tree code class has an associated string representation.
111    These must correspond to the tree_code_class entries.  */
112 
113 const char *const tree_code_class_strings[] =
114 {
115   "exceptional",
116   "constant",
117   "type",
118   "declaration",
119   "reference",
120   "comparison",
121   "unary",
122   "binary",
123   "statement",
124   "vl_exp",
125   "expression"
126 };
127 
128 /* obstack.[ch] explicitly declined to prototype this.  */
129 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130 
131 /* Statistics-gathering stuff.  */
132 
133 static uint64_t tree_code_counts[MAX_TREE_CODES];
134 uint64_t tree_node_counts[(int) all_kinds];
135 uint64_t tree_node_sizes[(int) all_kinds];
136 
137 /* Keep in sync with tree.h:enum tree_node_kind.  */
138 static const char * const tree_node_kind_names[] = {
139   "decls",
140   "types",
141   "blocks",
142   "stmts",
143   "refs",
144   "exprs",
145   "constants",
146   "identifiers",
147   "vecs",
148   "binfos",
149   "ssa names",
150   "constructors",
151   "random kinds",
152   "lang_decl kinds",
153   "lang_type kinds",
154   "omp clauses",
155 };
156 
157 /* Unique id for next decl created.  */
158 static GTY(()) int next_decl_uid;
159 /* Unique id for next type created.  */
160 static GTY(()) unsigned next_type_uid = 1;
161 /* Unique id for next debug decl created.  Use negative numbers,
162    to catch erroneous uses.  */
163 static GTY(()) int next_debug_decl_uid;
164 
165 /* Since we cannot rehash a type after it is in the table, we have to
166    keep the hash code.  */
167 
168 struct GTY((for_user)) type_hash {
169   unsigned long hash;
170   tree type;
171 };
172 
173 /* Initial size of the hash table (rounded to next prime).  */
174 #define TYPE_HASH_INITIAL_SIZE 1000
175 
176 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 {
178   static hashval_t hash (type_hash *t) { return t->hash; }
179   static bool equal (type_hash *a, type_hash *b);
180 
181   static int
182   keep_cache_entry (type_hash *&t)
183   {
184     return ggc_marked_p (t->type);
185   }
186 };
187 
188 /* Now here is the hash table.  When recording a type, it is added to
189    the slot whose index is the hash code.  Note that the hash table is
190    used for several kinds of types (function types, array types and
191    array index range types, for now).  While all these live in the
192    same table, they are completely independent, and the hash code is
193    computed differently for each of these.  */
194 
195 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196 
197 /* Hash table and temporary node for larger integer const values.  */
198 static GTY (()) tree int_cst_node;
199 
200 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 {
202   static hashval_t hash (tree t);
203   static bool equal (tree x, tree y);
204 };
205 
206 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207 
208 /* Class and variable for making sure that there is a single POLY_INT_CST
209    for a given value.  */
210 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
211 {
212   typedef std::pair<tree, const poly_wide_int *> compare_type;
213   static hashval_t hash (tree t);
214   static bool equal (tree x, const compare_type &y);
215 };
216 
217 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
218 
219 /* Hash table for optimization flags and target option flags.  Use the same
220    hash table for both sets of options.  Nodes for building the current
221    optimization and target option nodes.  The assumption is most of the time
222    the options created will already be in the hash table, so we avoid
223    allocating and freeing up a node repeatably.  */
224 static GTY (()) tree cl_optimization_node;
225 static GTY (()) tree cl_target_option_node;
226 
227 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
228 {
229   static hashval_t hash (tree t);
230   static bool equal (tree x, tree y);
231 };
232 
233 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
234 
235 /* General tree->tree mapping  structure for use in hash tables.  */
236 
237 
238 static GTY ((cache))
239      hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
240 
241 static GTY ((cache))
242      hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
243 
244 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
245 {
246   static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
247 
248   static bool
249   equal (tree_vec_map *a, tree_vec_map *b)
250   {
251     return a->base.from == b->base.from;
252   }
253 
254   static int
255   keep_cache_entry (tree_vec_map *&m)
256   {
257     return ggc_marked_p (m->base.from);
258   }
259 };
260 
261 static GTY ((cache))
262      hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
263 
264 static void set_type_quals (tree, int);
265 static void print_type_hash_statistics (void);
266 static void print_debug_expr_statistics (void);
267 static void print_value_expr_statistics (void);
268 
269 static tree build_array_type_1 (tree, tree, bool, bool, bool);
270 
271 tree global_trees[TI_MAX];
272 tree integer_types[itk_none];
273 
274 bool int_n_enabled_p[NUM_INT_N_ENTS];
275 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
276 
277 bool tree_contains_struct[MAX_TREE_CODES][64];
278 
279 /* Number of operands for each OpenMP clause.  */
280 unsigned const char omp_clause_num_ops[] =
281 {
282   0, /* OMP_CLAUSE_ERROR  */
283   1, /* OMP_CLAUSE_PRIVATE  */
284   1, /* OMP_CLAUSE_SHARED  */
285   1, /* OMP_CLAUSE_FIRSTPRIVATE  */
286   2, /* OMP_CLAUSE_LASTPRIVATE  */
287   5, /* OMP_CLAUSE_REDUCTION  */
288   5, /* OMP_CLAUSE_TASK_REDUCTION  */
289   5, /* OMP_CLAUSE_IN_REDUCTION  */
290   1, /* OMP_CLAUSE_COPYIN  */
291   1, /* OMP_CLAUSE_COPYPRIVATE  */
292   3, /* OMP_CLAUSE_LINEAR  */
293   2, /* OMP_CLAUSE_ALIGNED  */
294   1, /* OMP_CLAUSE_DEPEND  */
295   1, /* OMP_CLAUSE_NONTEMPORAL  */
296   1, /* OMP_CLAUSE_UNIFORM  */
297   1, /* OMP_CLAUSE_TO_DECLARE  */
298   1, /* OMP_CLAUSE_LINK  */
299   2, /* OMP_CLAUSE_FROM  */
300   2, /* OMP_CLAUSE_TO  */
301   2, /* OMP_CLAUSE_MAP  */
302   1, /* OMP_CLAUSE_USE_DEVICE_PTR  */
303   1, /* OMP_CLAUSE_USE_DEVICE_ADDR  */
304   1, /* OMP_CLAUSE_IS_DEVICE_PTR  */
305   1, /* OMP_CLAUSE_INCLUSIVE  */
306   1, /* OMP_CLAUSE_EXCLUSIVE  */
307   2, /* OMP_CLAUSE__CACHE_  */
308   2, /* OMP_CLAUSE_GANG  */
309   1, /* OMP_CLAUSE_ASYNC  */
310   1, /* OMP_CLAUSE_WAIT  */
311   0, /* OMP_CLAUSE_AUTO  */
312   0, /* OMP_CLAUSE_SEQ  */
313   1, /* OMP_CLAUSE__LOOPTEMP_  */
314   1, /* OMP_CLAUSE__REDUCTEMP_  */
315   1, /* OMP_CLAUSE__CONDTEMP_  */
316   1, /* OMP_CLAUSE__SCANTEMP_  */
317   1, /* OMP_CLAUSE_IF  */
318   1, /* OMP_CLAUSE_NUM_THREADS  */
319   1, /* OMP_CLAUSE_SCHEDULE  */
320   0, /* OMP_CLAUSE_NOWAIT  */
321   1, /* OMP_CLAUSE_ORDERED  */
322   0, /* OMP_CLAUSE_DEFAULT  */
323   3, /* OMP_CLAUSE_COLLAPSE  */
324   0, /* OMP_CLAUSE_UNTIED   */
325   1, /* OMP_CLAUSE_FINAL  */
326   0, /* OMP_CLAUSE_MERGEABLE  */
327   1, /* OMP_CLAUSE_DEVICE  */
328   1, /* OMP_CLAUSE_DIST_SCHEDULE  */
329   0, /* OMP_CLAUSE_INBRANCH  */
330   0, /* OMP_CLAUSE_NOTINBRANCH  */
331   1, /* OMP_CLAUSE_NUM_TEAMS  */
332   1, /* OMP_CLAUSE_THREAD_LIMIT  */
333   0, /* OMP_CLAUSE_PROC_BIND  */
334   1, /* OMP_CLAUSE_SAFELEN  */
335   1, /* OMP_CLAUSE_SIMDLEN  */
336   0, /* OMP_CLAUSE_DEVICE_TYPE  */
337   0, /* OMP_CLAUSE_FOR  */
338   0, /* OMP_CLAUSE_PARALLEL  */
339   0, /* OMP_CLAUSE_SECTIONS  */
340   0, /* OMP_CLAUSE_TASKGROUP  */
341   1, /* OMP_CLAUSE_PRIORITY  */
342   1, /* OMP_CLAUSE_GRAINSIZE  */
343   1, /* OMP_CLAUSE_NUM_TASKS  */
344   0, /* OMP_CLAUSE_NOGROUP  */
345   0, /* OMP_CLAUSE_THREADS  */
346   0, /* OMP_CLAUSE_SIMD  */
347   1, /* OMP_CLAUSE_HINT  */
348   0, /* OMP_CLAUSE_DEFAULTMAP  */
349   0, /* OMP_CLAUSE_ORDER  */
350   0, /* OMP_CLAUSE_BIND  */
351   1, /* OMP_CLAUSE__SIMDUID_  */
352   0, /* OMP_CLAUSE__SIMT_  */
353   0, /* OMP_CLAUSE_INDEPENDENT  */
354   1, /* OMP_CLAUSE_WORKER  */
355   1, /* OMP_CLAUSE_VECTOR  */
356   1, /* OMP_CLAUSE_NUM_GANGS  */
357   1, /* OMP_CLAUSE_NUM_WORKERS  */
358   1, /* OMP_CLAUSE_VECTOR_LENGTH  */
359   3, /* OMP_CLAUSE_TILE  */
360   2, /* OMP_CLAUSE__GRIDDIM_  */
361   0, /* OMP_CLAUSE_IF_PRESENT */
362   0, /* OMP_CLAUSE_FINALIZE */
363 };
364 
365 const char * const omp_clause_code_name[] =
366 {
367   "error_clause",
368   "private",
369   "shared",
370   "firstprivate",
371   "lastprivate",
372   "reduction",
373   "task_reduction",
374   "in_reduction",
375   "copyin",
376   "copyprivate",
377   "linear",
378   "aligned",
379   "depend",
380   "nontemporal",
381   "uniform",
382   "to",
383   "link",
384   "from",
385   "to",
386   "map",
387   "use_device_ptr",
388   "use_device_addr",
389   "is_device_ptr",
390   "inclusive",
391   "exclusive",
392   "_cache_",
393   "gang",
394   "async",
395   "wait",
396   "auto",
397   "seq",
398   "_looptemp_",
399   "_reductemp_",
400   "_condtemp_",
401   "_scantemp_",
402   "if",
403   "num_threads",
404   "schedule",
405   "nowait",
406   "ordered",
407   "default",
408   "collapse",
409   "untied",
410   "final",
411   "mergeable",
412   "device",
413   "dist_schedule",
414   "inbranch",
415   "notinbranch",
416   "num_teams",
417   "thread_limit",
418   "proc_bind",
419   "safelen",
420   "simdlen",
421   "device_type",
422   "for",
423   "parallel",
424   "sections",
425   "taskgroup",
426   "priority",
427   "grainsize",
428   "num_tasks",
429   "nogroup",
430   "threads",
431   "simd",
432   "hint",
433   "defaultmap",
434   "order",
435   "bind",
436   "_simduid_",
437   "_simt_",
438   "independent",
439   "worker",
440   "vector",
441   "num_gangs",
442   "num_workers",
443   "vector_length",
444   "tile",
445   "_griddim_",
446   "if_present",
447   "finalize",
448 };
449 
450 
451 /* Return the tree node structure used by tree code CODE.  */
452 
453 static inline enum tree_node_structure_enum
454 tree_node_structure_for_code (enum tree_code code)
455 {
456   switch (TREE_CODE_CLASS (code))
457     {
458     case tcc_declaration:
459       switch (code)
460 	{
461 	case CONST_DECL:	return TS_CONST_DECL;
462 	case DEBUG_EXPR_DECL:	return TS_DECL_WRTL;
463 	case FIELD_DECL:	return TS_FIELD_DECL;
464 	case FUNCTION_DECL:	return TS_FUNCTION_DECL;
465 	case LABEL_DECL:	return TS_LABEL_DECL;
466 	case PARM_DECL:		return TS_PARM_DECL;
467 	case RESULT_DECL:	return TS_RESULT_DECL;
468 	case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
469 	case TYPE_DECL:		return TS_TYPE_DECL;
470 	case VAR_DECL:		return TS_VAR_DECL;
471 	default: 		return TS_DECL_NON_COMMON;
472 	}
473 
474     case tcc_type:		return TS_TYPE_NON_COMMON;
475 
476     case tcc_binary:
477     case tcc_comparison:
478     case tcc_expression:
479     case tcc_reference:
480     case tcc_statement:
481     case tcc_unary:
482     case tcc_vl_exp:		return TS_EXP;
483 
484     default:  /* tcc_constant and tcc_exceptional */
485       break;
486     }
487 
488   switch (code)
489     {
490       /* tcc_constant cases.  */
491     case COMPLEX_CST:		return TS_COMPLEX;
492     case FIXED_CST:		return TS_FIXED_CST;
493     case INTEGER_CST:		return TS_INT_CST;
494     case POLY_INT_CST:		return TS_POLY_INT_CST;
495     case REAL_CST:		return TS_REAL_CST;
496     case STRING_CST:		return TS_STRING;
497     case VECTOR_CST:		return TS_VECTOR;
498     case VOID_CST:		return TS_TYPED;
499 
500       /* tcc_exceptional cases.  */
501     case BLOCK:			return TS_BLOCK;
502     case CONSTRUCTOR:		return TS_CONSTRUCTOR;
503     case ERROR_MARK:		return TS_COMMON;
504     case IDENTIFIER_NODE:	return TS_IDENTIFIER;
505     case OMP_CLAUSE:		return TS_OMP_CLAUSE;
506     case OPTIMIZATION_NODE:	return TS_OPTIMIZATION;
507     case PLACEHOLDER_EXPR:	return TS_COMMON;
508     case SSA_NAME:		return TS_SSA_NAME;
509     case STATEMENT_LIST:	return TS_STATEMENT_LIST;
510     case TARGET_OPTION_NODE:	return TS_TARGET_OPTION;
511     case TREE_BINFO:		return TS_BINFO;
512     case TREE_LIST:		return TS_LIST;
513     case TREE_VEC:		return TS_VEC;
514 
515     default:
516       gcc_unreachable ();
517     }
518 }
519 
520 
521 /* Initialize tree_contains_struct to describe the hierarchy of tree
522    nodes.  */
523 
524 static void
525 initialize_tree_contains_struct (void)
526 {
527   unsigned i;
528 
529   for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
530     {
531       enum tree_code code;
532       enum tree_node_structure_enum ts_code;
533 
534       code = (enum tree_code) i;
535       ts_code = tree_node_structure_for_code (code);
536 
537       /* Mark the TS structure itself.  */
538       tree_contains_struct[code][ts_code] = 1;
539 
540       /* Mark all the structures that TS is derived from.  */
541       switch (ts_code)
542 	{
543 	case TS_TYPED:
544 	case TS_BLOCK:
545 	case TS_OPTIMIZATION:
546 	case TS_TARGET_OPTION:
547 	  MARK_TS_BASE (code);
548 	  break;
549 
550 	case TS_COMMON:
551 	case TS_INT_CST:
552 	case TS_POLY_INT_CST:
553 	case TS_REAL_CST:
554 	case TS_FIXED_CST:
555 	case TS_VECTOR:
556 	case TS_STRING:
557 	case TS_COMPLEX:
558 	case TS_SSA_NAME:
559 	case TS_CONSTRUCTOR:
560 	case TS_EXP:
561 	case TS_STATEMENT_LIST:
562 	  MARK_TS_TYPED (code);
563 	  break;
564 
565 	case TS_IDENTIFIER:
566 	case TS_DECL_MINIMAL:
567 	case TS_TYPE_COMMON:
568 	case TS_LIST:
569 	case TS_VEC:
570 	case TS_BINFO:
571 	case TS_OMP_CLAUSE:
572 	  MARK_TS_COMMON (code);
573 	  break;
574 
575 	case TS_TYPE_WITH_LANG_SPECIFIC:
576 	  MARK_TS_TYPE_COMMON (code);
577 	  break;
578 
579 	case TS_TYPE_NON_COMMON:
580 	  MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
581 	  break;
582 
583 	case TS_DECL_COMMON:
584 	  MARK_TS_DECL_MINIMAL (code);
585 	  break;
586 
587 	case TS_DECL_WRTL:
588 	case TS_CONST_DECL:
589 	  MARK_TS_DECL_COMMON (code);
590 	  break;
591 
592 	case TS_DECL_NON_COMMON:
593 	  MARK_TS_DECL_WITH_VIS (code);
594 	  break;
595 
596 	case TS_DECL_WITH_VIS:
597 	case TS_PARM_DECL:
598 	case TS_LABEL_DECL:
599 	case TS_RESULT_DECL:
600 	  MARK_TS_DECL_WRTL (code);
601 	  break;
602 
603 	case TS_FIELD_DECL:
604 	  MARK_TS_DECL_COMMON (code);
605 	  break;
606 
607 	case TS_VAR_DECL:
608 	  MARK_TS_DECL_WITH_VIS (code);
609 	  break;
610 
611 	case TS_TYPE_DECL:
612 	case TS_FUNCTION_DECL:
613 	  MARK_TS_DECL_NON_COMMON (code);
614 	  break;
615 
616 	case TS_TRANSLATION_UNIT_DECL:
617 	  MARK_TS_DECL_COMMON (code);
618 	  break;
619 
620 	default:
621 	  gcc_unreachable ();
622 	}
623     }
624 
625   /* Basic consistency checks for attributes used in fold.  */
626   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
627   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
628   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
629   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
630   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
631   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
632   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
633   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
634   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
635   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
636   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
637   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
638   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
639   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
640   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
641   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
642   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
643   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
644   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
645   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
646   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
647   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
648   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
649   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
650   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
651   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
652   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
653   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
654   gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
655   gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
656   gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
657   gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
658   gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
659   gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
660   gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
661   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
662   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
663   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
664   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
665   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
666 }
667 
668 
669 /* Init tree.c.  */
670 
671 void
672 init_ttree (void)
673 {
674   /* Initialize the hash table of types.  */
675   type_hash_table
676     = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
677 
678   debug_expr_for_decl
679     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
680 
681   value_expr_for_decl
682     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
683 
684   int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
685 
686   poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
687 
688   int_cst_node = make_int_cst (1, 1);
689 
690   cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
691 
692   cl_optimization_node = make_node (OPTIMIZATION_NODE);
693   cl_target_option_node = make_node (TARGET_OPTION_NODE);
694 
695   /* Initialize the tree_contains_struct array.  */
696   initialize_tree_contains_struct ();
697   lang_hooks.init_ts ();
698 }
699 
700 
701 /* The name of the object as the assembler will see it (but before any
702    translations made by ASM_OUTPUT_LABELREF).  Often this is the same
703    as DECL_NAME.  It is an IDENTIFIER_NODE.  */
704 tree
705 decl_assembler_name (tree decl)
706 {
707   if (!DECL_ASSEMBLER_NAME_SET_P (decl))
708     lang_hooks.set_decl_assembler_name (decl);
709   return DECL_ASSEMBLER_NAME_RAW (decl);
710 }
711 
712 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
713    (either of which may be NULL).  Inform the FE, if this changes the
714    name.  */
715 
716 void
717 overwrite_decl_assembler_name (tree decl, tree name)
718 {
719   if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
720     lang_hooks.overwrite_decl_assembler_name (decl, name);
721 }
722 
723 /* When the target supports COMDAT groups, this indicates which group the
724    DECL is associated with.  This can be either an IDENTIFIER_NODE or a
725    decl, in which case its DECL_ASSEMBLER_NAME identifies the group.  */
726 tree
727 decl_comdat_group (const_tree node)
728 {
729   struct symtab_node *snode = symtab_node::get (node);
730   if (!snode)
731     return NULL;
732   return snode->get_comdat_group ();
733 }
734 
735 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE.  */
736 tree
737 decl_comdat_group_id (const_tree node)
738 {
739   struct symtab_node *snode = symtab_node::get (node);
740   if (!snode)
741     return NULL;
742   return snode->get_comdat_group_id ();
743 }
744 
745 /* When the target supports named section, return its name as IDENTIFIER_NODE
746    or NULL if it is in no section.  */
747 const char *
748 decl_section_name (const_tree node)
749 {
750   struct symtab_node *snode = symtab_node::get (node);
751   if (!snode)
752     return NULL;
753   return snode->get_section ();
754 }
755 
756 /* Set section name of NODE to VALUE (that is expected to be
757    identifier node) */
758 void
759 set_decl_section_name (tree node, const char *value)
760 {
761   struct symtab_node *snode;
762 
763   if (value == NULL)
764     {
765       snode = symtab_node::get (node);
766       if (!snode)
767 	return;
768     }
769   else if (VAR_P (node))
770     snode = varpool_node::get_create (node);
771   else
772     snode = cgraph_node::get_create (node);
773   snode->set_section (value);
774 }
775 
776 /* Return TLS model of a variable NODE.  */
777 enum tls_model
778 decl_tls_model (const_tree node)
779 {
780   struct varpool_node *snode = varpool_node::get (node);
781   if (!snode)
782     return TLS_MODEL_NONE;
783   return snode->tls_model;
784 }
785 
786 /* Set TLS model of variable NODE to MODEL.  */
787 void
788 set_decl_tls_model (tree node, enum tls_model model)
789 {
790   struct varpool_node *vnode;
791 
792   if (model == TLS_MODEL_NONE)
793     {
794       vnode = varpool_node::get (node);
795       if (!vnode)
796 	return;
797     }
798   else
799     vnode = varpool_node::get_create (node);
800   vnode->tls_model = model;
801 }
802 
803 /* Compute the number of bytes occupied by a tree with code CODE.
804    This function cannot be used for nodes that have variable sizes,
805    including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR.  */
806 size_t
807 tree_code_size (enum tree_code code)
808 {
809   switch (TREE_CODE_CLASS (code))
810     {
811     case tcc_declaration:  /* A decl node */
812       switch (code)
813 	{
814 	case FIELD_DECL:	return sizeof (tree_field_decl);
815 	case PARM_DECL:		return sizeof (tree_parm_decl);
816 	case VAR_DECL:		return sizeof (tree_var_decl);
817 	case LABEL_DECL:	return sizeof (tree_label_decl);
818 	case RESULT_DECL:	return sizeof (tree_result_decl);
819 	case CONST_DECL:	return sizeof (tree_const_decl);
820 	case TYPE_DECL:		return sizeof (tree_type_decl);
821 	case FUNCTION_DECL:	return sizeof (tree_function_decl);
822 	case DEBUG_EXPR_DECL:	return sizeof (tree_decl_with_rtl);
823 	case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
824 	case NAMESPACE_DECL:
825 	case IMPORTED_DECL:
826 	case NAMELIST_DECL:	return sizeof (tree_decl_non_common);
827 	default:
828 	  gcc_checking_assert (code >= NUM_TREE_CODES);
829 	  return lang_hooks.tree_size (code);
830 	}
831 
832     case tcc_type:  /* a type node */
833       switch (code)
834 	{
835 	case OFFSET_TYPE:
836 	case ENUMERAL_TYPE:
837 	case BOOLEAN_TYPE:
838 	case INTEGER_TYPE:
839 	case REAL_TYPE:
840 	case POINTER_TYPE:
841 	case REFERENCE_TYPE:
842 	case NULLPTR_TYPE:
843 	case FIXED_POINT_TYPE:
844 	case COMPLEX_TYPE:
845 	case VECTOR_TYPE:
846 	case ARRAY_TYPE:
847 	case RECORD_TYPE:
848 	case UNION_TYPE:
849 	case QUAL_UNION_TYPE:
850 	case VOID_TYPE:
851 	case FUNCTION_TYPE:
852 	case METHOD_TYPE:
853 	case LANG_TYPE:		return sizeof (tree_type_non_common);
854 	default:
855 	  gcc_checking_assert (code >= NUM_TREE_CODES);
856 	  return lang_hooks.tree_size (code);
857 	}
858 
859     case tcc_reference:   /* a reference */
860     case tcc_expression:  /* an expression */
861     case tcc_statement:   /* an expression with side effects */
862     case tcc_comparison:  /* a comparison expression */
863     case tcc_unary:       /* a unary arithmetic expression */
864     case tcc_binary:      /* a binary arithmetic expression */
865       return (sizeof (struct tree_exp)
866 	      + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
867 
868     case tcc_constant:  /* a constant */
869       switch (code)
870 	{
871 	case VOID_CST:		return sizeof (tree_typed);
872 	case INTEGER_CST:	gcc_unreachable ();
873 	case POLY_INT_CST:	return sizeof (tree_poly_int_cst);
874 	case REAL_CST:		return sizeof (tree_real_cst);
875 	case FIXED_CST:		return sizeof (tree_fixed_cst);
876 	case COMPLEX_CST:	return sizeof (tree_complex);
877 	case VECTOR_CST:	gcc_unreachable ();
878 	case STRING_CST:	gcc_unreachable ();
879 	default:
880 	  gcc_checking_assert (code >= NUM_TREE_CODES);
881 	  return lang_hooks.tree_size (code);
882 	}
883 
884     case tcc_exceptional:  /* something random, like an identifier.  */
885       switch (code)
886 	{
887 	case IDENTIFIER_NODE:	return lang_hooks.identifier_size;
888 	case TREE_LIST:		return sizeof (tree_list);
889 
890 	case ERROR_MARK:
891 	case PLACEHOLDER_EXPR:	return sizeof (tree_common);
892 
893 	case TREE_VEC:		gcc_unreachable ();
894 	case OMP_CLAUSE:	gcc_unreachable ();
895 
896 	case SSA_NAME:		return sizeof (tree_ssa_name);
897 
898 	case STATEMENT_LIST:	return sizeof (tree_statement_list);
899 	case BLOCK:		return sizeof (struct tree_block);
900 	case CONSTRUCTOR:	return sizeof (tree_constructor);
901 	case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
902 	case TARGET_OPTION_NODE: return sizeof (tree_target_option);
903 
904 	default:
905 	  gcc_checking_assert (code >= NUM_TREE_CODES);
906 	  return lang_hooks.tree_size (code);
907 	}
908 
909     default:
910       gcc_unreachable ();
911     }
912 }
913 
914 /* Compute the number of bytes occupied by NODE.  This routine only
915    looks at TREE_CODE, except for those nodes that have variable sizes.  */
916 size_t
917 tree_size (const_tree node)
918 {
919   const enum tree_code code = TREE_CODE (node);
920   switch (code)
921     {
922     case INTEGER_CST:
923       return (sizeof (struct tree_int_cst)
924 	      + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
925 
926     case TREE_BINFO:
927       return (offsetof (struct tree_binfo, base_binfos)
928 	      + vec<tree, va_gc>
929 		  ::embedded_size (BINFO_N_BASE_BINFOS (node)));
930 
931     case TREE_VEC:
932       return (sizeof (struct tree_vec)
933 	      + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
934 
935     case VECTOR_CST:
936       return (sizeof (struct tree_vector)
937 	      + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
938 
939     case STRING_CST:
940       return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
941 
942     case OMP_CLAUSE:
943       return (sizeof (struct tree_omp_clause)
944 	      + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
945 	        * sizeof (tree));
946 
947     default:
948       if (TREE_CODE_CLASS (code) == tcc_vl_exp)
949 	return (sizeof (struct tree_exp)
950 		+ (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
951       else
952 	return tree_code_size (code);
953     }
954 }
955 
956 /* Return tree node kind based on tree CODE.  */
957 
958 static tree_node_kind
959 get_stats_node_kind (enum tree_code code)
960 {
961   enum tree_code_class type = TREE_CODE_CLASS (code);
962 
963   switch (type)
964     {
965     case tcc_declaration:  /* A decl node */
966       return d_kind;
967     case tcc_type:  /* a type node */
968       return t_kind;
969     case tcc_statement:  /* an expression with side effects */
970       return s_kind;
971     case tcc_reference:  /* a reference */
972       return r_kind;
973     case tcc_expression:  /* an expression */
974     case tcc_comparison:  /* a comparison expression */
975     case tcc_unary:  /* a unary arithmetic expression */
976     case tcc_binary:  /* a binary arithmetic expression */
977       return e_kind;
978     case tcc_constant:  /* a constant */
979       return c_kind;
980     case tcc_exceptional:  /* something random, like an identifier.  */
981       switch (code)
982 	{
983 	case IDENTIFIER_NODE:
984 	  return id_kind;
985 	case TREE_VEC:
986 	  return vec_kind;
987 	case TREE_BINFO:
988 	  return binfo_kind;
989 	case SSA_NAME:
990 	  return ssa_name_kind;
991 	case BLOCK:
992 	  return b_kind;
993 	case CONSTRUCTOR:
994 	  return constr_kind;
995 	case OMP_CLAUSE:
996 	  return omp_clause_kind;
997 	default:
998 	  return x_kind;
999 	}
1000       break;
1001     case tcc_vl_exp:
1002       return e_kind;
1003     default:
1004       gcc_unreachable ();
1005     }
1006 }
1007 
1008 /* Record interesting allocation statistics for a tree node with CODE
1009    and LENGTH.  */
1010 
1011 static void
1012 record_node_allocation_statistics (enum tree_code code, size_t length)
1013 {
1014   if (!GATHER_STATISTICS)
1015     return;
1016 
1017   tree_node_kind kind = get_stats_node_kind (code);
1018 
1019   tree_code_counts[(int) code]++;
1020   tree_node_counts[(int) kind]++;
1021   tree_node_sizes[(int) kind] += length;
1022 }
1023 
1024 /* Allocate and return a new UID from the DECL_UID namespace.  */
1025 
1026 int
1027 allocate_decl_uid (void)
1028 {
1029   return next_decl_uid++;
1030 }
1031 
1032 /* Return a newly allocated node of code CODE.  For decl and type
1033    nodes, some other fields are initialized.  The rest of the node is
1034    initialized to zero.  This function cannot be used for TREE_VEC,
1035    INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1036    tree_code_size.
1037 
1038    Achoo!  I got a code in the node.  */
1039 
1040 tree
1041 make_node (enum tree_code code MEM_STAT_DECL)
1042 {
1043   tree t;
1044   enum tree_code_class type = TREE_CODE_CLASS (code);
1045   size_t length = tree_code_size (code);
1046 
1047   record_node_allocation_statistics (code, length);
1048 
1049   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1050   TREE_SET_CODE (t, code);
1051 
1052   switch (type)
1053     {
1054     case tcc_statement:
1055       if (code != DEBUG_BEGIN_STMT)
1056 	TREE_SIDE_EFFECTS (t) = 1;
1057       break;
1058 
1059     case tcc_declaration:
1060       if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1061 	{
1062 	  if (code == FUNCTION_DECL)
1063 	    {
1064 	      SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1065 	      SET_DECL_MODE (t, FUNCTION_MODE);
1066 	    }
1067 	  else
1068 	    SET_DECL_ALIGN (t, 1);
1069 	}
1070       DECL_SOURCE_LOCATION (t) = input_location;
1071       if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1072 	DECL_UID (t) = --next_debug_decl_uid;
1073       else
1074 	{
1075 	  DECL_UID (t) = allocate_decl_uid ();
1076 	  SET_DECL_PT_UID (t, -1);
1077 	}
1078       if (TREE_CODE (t) == LABEL_DECL)
1079 	LABEL_DECL_UID (t) = -1;
1080 
1081       break;
1082 
1083     case tcc_type:
1084       TYPE_UID (t) = next_type_uid++;
1085       SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1086       TYPE_USER_ALIGN (t) = 0;
1087       TYPE_MAIN_VARIANT (t) = t;
1088       TYPE_CANONICAL (t) = t;
1089 
1090       /* Default to no attributes for type, but let target change that.  */
1091       TYPE_ATTRIBUTES (t) = NULL_TREE;
1092       targetm.set_default_type_attributes (t);
1093 
1094       /* We have not yet computed the alias set for this type.  */
1095       TYPE_ALIAS_SET (t) = -1;
1096       break;
1097 
1098     case tcc_constant:
1099       TREE_CONSTANT (t) = 1;
1100       break;
1101 
1102     case tcc_expression:
1103       switch (code)
1104 	{
1105 	case INIT_EXPR:
1106 	case MODIFY_EXPR:
1107 	case VA_ARG_EXPR:
1108 	case PREDECREMENT_EXPR:
1109 	case PREINCREMENT_EXPR:
1110 	case POSTDECREMENT_EXPR:
1111 	case POSTINCREMENT_EXPR:
1112 	  /* All of these have side-effects, no matter what their
1113 	     operands are.  */
1114 	  TREE_SIDE_EFFECTS (t) = 1;
1115 	  break;
1116 
1117 	default:
1118 	  break;
1119 	}
1120       break;
1121 
1122     case tcc_exceptional:
1123       switch (code)
1124         {
1125 	case TARGET_OPTION_NODE:
1126 	  TREE_TARGET_OPTION(t)
1127 			    = ggc_cleared_alloc<struct cl_target_option> ();
1128 	  break;
1129 
1130 	case OPTIMIZATION_NODE:
1131 	  TREE_OPTIMIZATION (t)
1132 			    = ggc_cleared_alloc<struct cl_optimization> ();
1133 	  break;
1134 
1135 	default:
1136 	  break;
1137 	}
1138       break;
1139 
1140     default:
1141       /* Other classes need no special treatment.  */
1142       break;
1143     }
1144 
1145   return t;
1146 }
1147 
1148 /* Free tree node.  */
1149 
1150 void
1151 free_node (tree node)
1152 {
1153   enum tree_code code = TREE_CODE (node);
1154   if (GATHER_STATISTICS)
1155     {
1156       enum tree_node_kind kind = get_stats_node_kind (code);
1157 
1158       gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1159       gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1160       gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1161 
1162       tree_code_counts[(int) TREE_CODE (node)]--;
1163       tree_node_counts[(int) kind]--;
1164       tree_node_sizes[(int) kind] -= tree_size (node);
1165     }
1166   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1167     vec_free (CONSTRUCTOR_ELTS (node));
1168   else if (code == BLOCK)
1169     vec_free (BLOCK_NONLOCALIZED_VARS (node));
1170   else if (code == TREE_BINFO)
1171     vec_free (BINFO_BASE_ACCESSES (node));
1172   else if (code == OPTIMIZATION_NODE)
1173     cl_optimization_option_free (TREE_OPTIMIZATION (node));
1174   else if (code == TARGET_OPTION_NODE)
1175     cl_target_option_free (TREE_TARGET_OPTION (node));
1176   ggc_free (node);
1177 }
1178 
1179 /* Return a new node with the same contents as NODE except that its
1180    TREE_CHAIN, if it has one, is zero and it has a fresh uid.  */
1181 
1182 tree
1183 copy_node (tree node MEM_STAT_DECL)
1184 {
1185   tree t;
1186   enum tree_code code = TREE_CODE (node);
1187   size_t length;
1188 
1189   gcc_assert (code != STATEMENT_LIST);
1190 
1191   length = tree_size (node);
1192   record_node_allocation_statistics (code, length);
1193   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1194   memcpy (t, node, length);
1195 
1196   if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1197     TREE_CHAIN (t) = 0;
1198   TREE_ASM_WRITTEN (t) = 0;
1199   TREE_VISITED (t) = 0;
1200 
1201   if (TREE_CODE_CLASS (code) == tcc_declaration)
1202     {
1203       if (code == DEBUG_EXPR_DECL)
1204 	DECL_UID (t) = --next_debug_decl_uid;
1205       else
1206 	{
1207 	  DECL_UID (t) = allocate_decl_uid ();
1208 	  if (DECL_PT_UID_SET_P (node))
1209 	    SET_DECL_PT_UID (t, DECL_PT_UID (node));
1210 	}
1211       if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1212 	  && DECL_HAS_VALUE_EXPR_P (node))
1213 	{
1214 	  SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1215 	  DECL_HAS_VALUE_EXPR_P (t) = 1;
1216 	}
1217       /* DECL_DEBUG_EXPR is copied explicitely by callers.  */
1218       if (VAR_P (node))
1219 	{
1220 	  DECL_HAS_DEBUG_EXPR_P (t) = 0;
1221 	  t->decl_with_vis.symtab_node = NULL;
1222 	}
1223       if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1224 	{
1225 	  SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1226 	  DECL_HAS_INIT_PRIORITY_P (t) = 1;
1227 	}
1228       if (TREE_CODE (node) == FUNCTION_DECL)
1229 	{
1230 	  DECL_STRUCT_FUNCTION (t) = NULL;
1231 	  t->decl_with_vis.symtab_node = NULL;
1232 	}
1233     }
1234   else if (TREE_CODE_CLASS (code) == tcc_type)
1235     {
1236       TYPE_UID (t) = next_type_uid++;
1237       /* The following is so that the debug code for
1238 	 the copy is different from the original type.
1239 	 The two statements usually duplicate each other
1240 	 (because they clear fields of the same union),
1241 	 but the optimizer should catch that.  */
1242       TYPE_SYMTAB_ADDRESS (t) = 0;
1243       TYPE_SYMTAB_DIE (t) = 0;
1244 
1245       /* Do not copy the values cache.  */
1246       if (TYPE_CACHED_VALUES_P (t))
1247 	{
1248 	  TYPE_CACHED_VALUES_P (t) = 0;
1249 	  TYPE_CACHED_VALUES (t) = NULL_TREE;
1250 	}
1251     }
1252     else if (code == TARGET_OPTION_NODE)
1253       {
1254 	TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1255 	memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1256 		sizeof (struct cl_target_option));
1257       }
1258     else if (code == OPTIMIZATION_NODE)
1259       {
1260 	TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1261 	memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1262 		sizeof (struct cl_optimization));
1263       }
1264 
1265   return t;
1266 }
1267 
1268 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1269    For example, this can copy a list made of TREE_LIST nodes.  */
1270 
1271 tree
1272 copy_list (tree list)
1273 {
1274   tree head;
1275   tree prev, next;
1276 
1277   if (list == 0)
1278     return 0;
1279 
1280   head = prev = copy_node (list);
1281   next = TREE_CHAIN (list);
1282   while (next)
1283     {
1284       TREE_CHAIN (prev) = copy_node (next);
1285       prev = TREE_CHAIN (prev);
1286       next = TREE_CHAIN (next);
1287     }
1288   return head;
1289 }
1290 
1291 
1292 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1293    INTEGER_CST with value CST and type TYPE.   */
1294 
1295 static unsigned int
1296 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1297 {
1298   gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1299   /* We need extra HWIs if CST is an unsigned integer with its
1300      upper bit set.  */
1301   if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1302     return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1303   return cst.get_len ();
1304 }
1305 
1306 /* Return a new INTEGER_CST with value CST and type TYPE.  */
1307 
1308 static tree
1309 build_new_int_cst (tree type, const wide_int &cst)
1310 {
1311   unsigned int len = cst.get_len ();
1312   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1313   tree nt = make_int_cst (len, ext_len);
1314 
1315   if (len < ext_len)
1316     {
1317       --ext_len;
1318       TREE_INT_CST_ELT (nt, ext_len)
1319 	= zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1320       for (unsigned int i = len; i < ext_len; ++i)
1321 	TREE_INT_CST_ELT (nt, i) = -1;
1322     }
1323   else if (TYPE_UNSIGNED (type)
1324 	   && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1325     {
1326       len--;
1327       TREE_INT_CST_ELT (nt, len)
1328 	= zext_hwi (cst.elt (len),
1329 		    cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1330     }
1331 
1332   for (unsigned int i = 0; i < len; i++)
1333     TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1334   TREE_TYPE (nt) = type;
1335   return nt;
1336 }
1337 
1338 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE.  */
1339 
1340 static tree
1341 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1342 			CXX_MEM_STAT_INFO)
1343 {
1344   size_t length = sizeof (struct tree_poly_int_cst);
1345   record_node_allocation_statistics (POLY_INT_CST, length);
1346 
1347   tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1348 
1349   TREE_SET_CODE (t, POLY_INT_CST);
1350   TREE_CONSTANT (t) = 1;
1351   TREE_TYPE (t) = type;
1352   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1353     POLY_INT_CST_COEFF (t, i) = coeffs[i];
1354   return t;
1355 }
1356 
1357 /* Create a constant tree that contains CST sign-extended to TYPE.  */
1358 
1359 tree
1360 build_int_cst (tree type, poly_int64 cst)
1361 {
1362   /* Support legacy code.  */
1363   if (!type)
1364     type = integer_type_node;
1365 
1366   return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1367 }
1368 
1369 /* Create a constant tree that contains CST zero-extended to TYPE.  */
1370 
1371 tree
1372 build_int_cstu (tree type, poly_uint64 cst)
1373 {
1374   return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1375 }
1376 
1377 /* Create a constant tree that contains CST sign-extended to TYPE.  */
1378 
1379 tree
1380 build_int_cst_type (tree type, poly_int64 cst)
1381 {
1382   gcc_assert (type);
1383   return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1384 }
1385 
1386 /* Constructs tree in type TYPE from with value given by CST.  Signedness
1387    of CST is assumed to be the same as the signedness of TYPE.  */
1388 
1389 tree
1390 double_int_to_tree (tree type, double_int cst)
1391 {
1392   return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1393 }
1394 
1395 /* We force the wide_int CST to the range of the type TYPE by sign or
1396    zero extending it.  OVERFLOWABLE indicates if we are interested in
1397    overflow of the value, when >0 we are only interested in signed
1398    overflow, for <0 we are interested in any overflow.  OVERFLOWED
1399    indicates whether overflow has already occurred.  CONST_OVERFLOWED
1400    indicates whether constant overflow has already occurred.  We force
1401    T's value to be within range of T's type (by setting to 0 or 1 all
1402    the bits outside the type's range).  We set TREE_OVERFLOWED if,
1403         OVERFLOWED is nonzero,
1404         or OVERFLOWABLE is >0 and signed overflow occurs
1405         or OVERFLOWABLE is <0 and any overflow occurs
1406    We return a new tree node for the extended wide_int.  The node
1407    is shared if no overflow flags are set.  */
1408 
1409 
1410 tree
1411 force_fit_type (tree type, const poly_wide_int_ref &cst,
1412 		int overflowable, bool overflowed)
1413 {
1414   signop sign = TYPE_SIGN (type);
1415 
1416   /* If we need to set overflow flags, return a new unshared node.  */
1417   if (overflowed || !wi::fits_to_tree_p (cst, type))
1418     {
1419       if (overflowed
1420 	  || overflowable < 0
1421 	  || (overflowable > 0 && sign == SIGNED))
1422 	{
1423 	  poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1424 						   sign);
1425 	  tree t;
1426 	  if (tmp.is_constant ())
1427 	    t = build_new_int_cst (type, tmp.coeffs[0]);
1428 	  else
1429 	    {
1430 	      tree coeffs[NUM_POLY_INT_COEFFS];
1431 	      for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1432 		{
1433 		  coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1434 		  TREE_OVERFLOW (coeffs[i]) = 1;
1435 		}
1436 	      t = build_new_poly_int_cst (type, coeffs);
1437 	    }
1438 	  TREE_OVERFLOW (t) = 1;
1439 	  return t;
1440 	}
1441     }
1442 
1443   /* Else build a shared node.  */
1444   return wide_int_to_tree (type, cst);
1445 }
1446 
1447 /* These are the hash table functions for the hash table of INTEGER_CST
1448    nodes of a sizetype.  */
1449 
1450 /* Return the hash code X, an INTEGER_CST.  */
1451 
1452 hashval_t
1453 int_cst_hasher::hash (tree x)
1454 {
1455   const_tree const t = x;
1456   hashval_t code = TYPE_UID (TREE_TYPE (t));
1457   int i;
1458 
1459   for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1460     code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1461 
1462   return code;
1463 }
1464 
1465 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1466    is the same as that given by *Y, which is the same.  */
1467 
1468 bool
1469 int_cst_hasher::equal (tree x, tree y)
1470 {
1471   const_tree const xt = x;
1472   const_tree const yt = y;
1473 
1474   if (TREE_TYPE (xt) != TREE_TYPE (yt)
1475       || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1476       || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1477     return false;
1478 
1479   for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1480     if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1481       return false;
1482 
1483   return true;
1484 }
1485 
1486 /* Create an INT_CST node of TYPE and value CST.
1487    The returned node is always shared.  For small integers we use a
1488    per-type vector cache, for larger ones we use a single hash table.
1489    The value is extended from its precision according to the sign of
1490    the type to be a multiple of HOST_BITS_PER_WIDE_INT.  This defines
1491    the upper bits and ensures that hashing and value equality based
1492    upon the underlying HOST_WIDE_INTs works without masking.  */
1493 
1494 static tree
1495 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1496 {
1497   tree t;
1498   int ix = -1;
1499   int limit = 0;
1500 
1501   gcc_assert (type);
1502   unsigned int prec = TYPE_PRECISION (type);
1503   signop sgn = TYPE_SIGN (type);
1504 
1505   /* Verify that everything is canonical.  */
1506   int l = pcst.get_len ();
1507   if (l > 1)
1508     {
1509       if (pcst.elt (l - 1) == 0)
1510 	gcc_checking_assert (pcst.elt (l - 2) < 0);
1511       if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1512 	gcc_checking_assert (pcst.elt (l - 2) >= 0);
1513     }
1514 
1515   wide_int cst = wide_int::from (pcst, prec, sgn);
1516   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1517 
1518   if (ext_len == 1)
1519     {
1520       /* We just need to store a single HOST_WIDE_INT.  */
1521       HOST_WIDE_INT hwi;
1522       if (TYPE_UNSIGNED (type))
1523 	hwi = cst.to_uhwi ();
1524       else
1525 	hwi = cst.to_shwi ();
1526 
1527       switch (TREE_CODE (type))
1528 	{
1529 	case NULLPTR_TYPE:
1530 	  gcc_assert (hwi == 0);
1531 	  /* Fallthru.  */
1532 
1533 	case POINTER_TYPE:
1534 	case REFERENCE_TYPE:
1535 	  /* Cache NULL pointer and zero bounds.  */
1536 	  if (hwi == 0)
1537 	    {
1538 	      limit = 1;
1539 	      ix = 0;
1540 	    }
1541 	  break;
1542 
1543 	case BOOLEAN_TYPE:
1544 	  /* Cache false or true.  */
1545 	  limit = 2;
1546 	  if (IN_RANGE (hwi, 0, 1))
1547 	    ix = hwi;
1548 	  break;
1549 
1550 	case INTEGER_TYPE:
1551 	case OFFSET_TYPE:
1552 	  if (TYPE_SIGN (type) == UNSIGNED)
1553 	    {
1554 	      /* Cache [0, N).  */
1555 	      limit = param_integer_share_limit;
1556 	      if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1557 		ix = hwi;
1558 	    }
1559 	  else
1560 	    {
1561 	      /* Cache [-1, N).  */
1562 	      limit = param_integer_share_limit + 1;
1563 	      if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1564 		ix = hwi + 1;
1565 	    }
1566 	  break;
1567 
1568 	case ENUMERAL_TYPE:
1569 	  break;
1570 
1571 	default:
1572 	  gcc_unreachable ();
1573 	}
1574 
1575       if (ix >= 0)
1576 	{
1577 	  /* Look for it in the type's vector of small shared ints.  */
1578 	  if (!TYPE_CACHED_VALUES_P (type))
1579 	    {
1580 	      TYPE_CACHED_VALUES_P (type) = 1;
1581 	      TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1582 	    }
1583 
1584 	  t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1585 	  if (t)
1586 	    /* Make sure no one is clobbering the shared constant.  */
1587 	    gcc_checking_assert (TREE_TYPE (t) == type
1588 				 && TREE_INT_CST_NUNITS (t) == 1
1589 				 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1590 				 && TREE_INT_CST_EXT_NUNITS (t) == 1
1591 				 && TREE_INT_CST_ELT (t, 0) == hwi);
1592 	  else
1593 	    {
1594 	      /* Create a new shared int.  */
1595 	      t = build_new_int_cst (type, cst);
1596 	      TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1597 	    }
1598 	}
1599       else
1600 	{
1601 	  /* Use the cache of larger shared ints, using int_cst_node as
1602 	     a temporary.  */
1603 
1604 	  TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1605 	  TREE_TYPE (int_cst_node) = type;
1606 
1607 	  tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1608 	  t = *slot;
1609 	  if (!t)
1610 	    {
1611 	      /* Insert this one into the hash table.  */
1612 	      t = int_cst_node;
1613 	      *slot = t;
1614 	      /* Make a new node for next time round.  */
1615 	      int_cst_node = make_int_cst (1, 1);
1616 	    }
1617 	}
1618     }
1619   else
1620     {
1621       /* The value either hashes properly or we drop it on the floor
1622 	 for the gc to take care of.  There will not be enough of them
1623 	 to worry about.  */
1624 
1625       tree nt = build_new_int_cst (type, cst);
1626       tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1627       t = *slot;
1628       if (!t)
1629 	{
1630 	  /* Insert this one into the hash table.  */
1631 	  t = nt;
1632 	  *slot = t;
1633 	}
1634       else
1635 	ggc_free (nt);
1636     }
1637 
1638   return t;
1639 }
1640 
1641 hashval_t
1642 poly_int_cst_hasher::hash (tree t)
1643 {
1644   inchash::hash hstate;
1645 
1646   hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1647   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1648     hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1649 
1650   return hstate.end ();
1651 }
1652 
1653 bool
1654 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1655 {
1656   if (TREE_TYPE (x) != y.first)
1657     return false;
1658   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1659     if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1660       return false;
1661   return true;
1662 }
1663 
1664 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1665    The elements must also have type TYPE.  */
1666 
1667 tree
1668 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1669 {
1670   unsigned int prec = TYPE_PRECISION (type);
1671   gcc_assert (prec <= values.coeffs[0].get_precision ());
1672   poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1673 
1674   inchash::hash h;
1675   h.add_int (TYPE_UID (type));
1676   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1677     h.add_wide_int (c.coeffs[i]);
1678   poly_int_cst_hasher::compare_type comp (type, &c);
1679   tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1680 							     INSERT);
1681   if (*slot == NULL_TREE)
1682     {
1683       tree coeffs[NUM_POLY_INT_COEFFS];
1684       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1685 	coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1686       *slot = build_new_poly_int_cst (type, coeffs);
1687     }
1688   return *slot;
1689 }
1690 
1691 /* Create a constant tree with value VALUE in type TYPE.  */
1692 
1693 tree
1694 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1695 {
1696   if (value.is_constant ())
1697     return wide_int_to_tree_1 (type, value.coeffs[0]);
1698   return build_poly_int_cst (type, value);
1699 }
1700 
1701 void
1702 cache_integer_cst (tree t)
1703 {
1704   tree type = TREE_TYPE (t);
1705   int ix = -1;
1706   int limit = 0;
1707   int prec = TYPE_PRECISION (type);
1708 
1709   gcc_assert (!TREE_OVERFLOW (t));
1710 
1711   switch (TREE_CODE (type))
1712     {
1713     case NULLPTR_TYPE:
1714       gcc_assert (integer_zerop (t));
1715       /* Fallthru.  */
1716 
1717     case POINTER_TYPE:
1718     case REFERENCE_TYPE:
1719       /* Cache NULL pointer.  */
1720       if (integer_zerop (t))
1721 	{
1722 	  limit = 1;
1723 	  ix = 0;
1724 	}
1725       break;
1726 
1727     case BOOLEAN_TYPE:
1728       /* Cache false or true.  */
1729       limit = 2;
1730       if (wi::ltu_p (wi::to_wide (t), 2))
1731 	ix = TREE_INT_CST_ELT (t, 0);
1732       break;
1733 
1734     case INTEGER_TYPE:
1735     case OFFSET_TYPE:
1736       if (TYPE_UNSIGNED (type))
1737 	{
1738 	  /* Cache 0..N */
1739 	  limit = param_integer_share_limit;
1740 
1741 	  /* This is a little hokie, but if the prec is smaller than
1742 	     what is necessary to hold param_integer_share_limit, then the
1743 	     obvious test will not get the correct answer.  */
1744 	  if (prec < HOST_BITS_PER_WIDE_INT)
1745 	    {
1746 	      if (tree_to_uhwi (t)
1747 		  < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1748 		ix = tree_to_uhwi (t);
1749 	    }
1750 	  else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1751 	    ix = tree_to_uhwi (t);
1752 	}
1753       else
1754 	{
1755 	  /* Cache -1..N */
1756 	  limit = param_integer_share_limit + 1;
1757 
1758 	  if (integer_minus_onep (t))
1759 	    ix = 0;
1760 	  else if (!wi::neg_p (wi::to_wide (t)))
1761 	    {
1762 	      if (prec < HOST_BITS_PER_WIDE_INT)
1763 		{
1764 		  if (tree_to_shwi (t) < param_integer_share_limit)
1765 		    ix = tree_to_shwi (t) + 1;
1766 		}
1767 	      else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1768 		ix = tree_to_shwi (t) + 1;
1769 	    }
1770 	}
1771       break;
1772 
1773     case ENUMERAL_TYPE:
1774       break;
1775 
1776     default:
1777       gcc_unreachable ();
1778     }
1779 
1780   if (ix >= 0)
1781     {
1782       /* Look for it in the type's vector of small shared ints.  */
1783       if (!TYPE_CACHED_VALUES_P (type))
1784 	{
1785 	  TYPE_CACHED_VALUES_P (type) = 1;
1786 	  TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1787 	}
1788 
1789       gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1790       TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1791     }
1792   else
1793     {
1794       /* Use the cache of larger shared ints.  */
1795       tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1796       /* If there is already an entry for the number verify it's the
1797          same.  */
1798       if (*slot)
1799 	gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1800       else
1801 	/* Otherwise insert this one into the hash table.  */
1802 	*slot = t;
1803     }
1804 }
1805 
1806 
1807 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1808    and the rest are zeros.  */
1809 
1810 tree
1811 build_low_bits_mask (tree type, unsigned bits)
1812 {
1813   gcc_assert (bits <= TYPE_PRECISION (type));
1814 
1815   return wide_int_to_tree (type, wi::mask (bits, false,
1816 					   TYPE_PRECISION (type)));
1817 }
1818 
1819 /* Checks that X is integer constant that can be expressed in (unsigned)
1820    HOST_WIDE_INT without loss of precision.  */
1821 
1822 bool
1823 cst_and_fits_in_hwi (const_tree x)
1824 {
1825   return (TREE_CODE (x) == INTEGER_CST
1826 	  && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1827 }
1828 
1829 /* Build a newly constructed VECTOR_CST with the given values of
1830    (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN.  */
1831 
1832 tree
1833 make_vector (unsigned log2_npatterns,
1834 	     unsigned int nelts_per_pattern MEM_STAT_DECL)
1835 {
1836   gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1837   tree t;
1838   unsigned npatterns = 1 << log2_npatterns;
1839   unsigned encoded_nelts = npatterns * nelts_per_pattern;
1840   unsigned length = (sizeof (struct tree_vector)
1841 		     + (encoded_nelts - 1) * sizeof (tree));
1842 
1843   record_node_allocation_statistics (VECTOR_CST, length);
1844 
1845   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1846 
1847   TREE_SET_CODE (t, VECTOR_CST);
1848   TREE_CONSTANT (t) = 1;
1849   VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1850   VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1851 
1852   return t;
1853 }
1854 
1855 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1856    are extracted from V, a vector of CONSTRUCTOR_ELT.  */
1857 
1858 tree
1859 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1860 {
1861   if (vec_safe_length (v) == 0)
1862     return build_zero_cst (type);
1863 
1864   unsigned HOST_WIDE_INT idx, nelts;
1865   tree value;
1866 
1867   /* We can't construct a VECTOR_CST for a variable number of elements.  */
1868   nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1869   tree_vector_builder vec (type, nelts, 1);
1870   FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1871     {
1872       if (TREE_CODE (value) == VECTOR_CST)
1873 	{
1874 	  /* If NELTS is constant then this must be too.  */
1875 	  unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1876 	  for (unsigned i = 0; i < sub_nelts; ++i)
1877 	    vec.quick_push (VECTOR_CST_ELT (value, i));
1878 	}
1879       else
1880 	vec.quick_push (value);
1881     }
1882   while (vec.length () < nelts)
1883     vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1884 
1885   return vec.build ();
1886 }
1887 
1888 /* Build a vector of type VECTYPE where all the elements are SCs.  */
1889 tree
1890 build_vector_from_val (tree vectype, tree sc)
1891 {
1892   unsigned HOST_WIDE_INT i, nunits;
1893 
1894   if (sc == error_mark_node)
1895     return sc;
1896 
1897   /* Verify that the vector type is suitable for SC.  Note that there
1898      is some inconsistency in the type-system with respect to restrict
1899      qualifications of pointers.  Vector types always have a main-variant
1900      element type and the qualification is applied to the vector-type.
1901      So TREE_TYPE (vector-type) does not return a properly qualified
1902      vector element-type.  */
1903   gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1904 					   TREE_TYPE (vectype)));
1905 
1906   if (CONSTANT_CLASS_P (sc))
1907     {
1908       tree_vector_builder v (vectype, 1, 1);
1909       v.quick_push (sc);
1910       return v.build ();
1911     }
1912   else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1913     return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1914   else
1915     {
1916       vec<constructor_elt, va_gc> *v;
1917       vec_alloc (v, nunits);
1918       for (i = 0; i < nunits; ++i)
1919 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1920       return build_constructor (vectype, v);
1921     }
1922 }
1923 
1924 /* If TYPE is not a vector type, just return SC, otherwise return
1925    build_vector_from_val (TYPE, SC).  */
1926 
1927 tree
1928 build_uniform_cst (tree type, tree sc)
1929 {
1930   if (!VECTOR_TYPE_P (type))
1931     return sc;
1932 
1933   return build_vector_from_val (type, sc);
1934 }
1935 
1936 /* Build a vector series of type TYPE in which element I has the value
1937    BASE + I * STEP.  The result is a constant if BASE and STEP are constant
1938    and a VEC_SERIES_EXPR otherwise.  */
1939 
1940 tree
1941 build_vec_series (tree type, tree base, tree step)
1942 {
1943   if (integer_zerop (step))
1944     return build_vector_from_val (type, base);
1945   if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1946     {
1947       tree_vector_builder builder (type, 1, 3);
1948       tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1949 				    wi::to_wide (base) + wi::to_wide (step));
1950       tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1951 				    wi::to_wide (elt1) + wi::to_wide (step));
1952       builder.quick_push (base);
1953       builder.quick_push (elt1);
1954       builder.quick_push (elt2);
1955       return builder.build ();
1956     }
1957   return build2 (VEC_SERIES_EXPR, type, base, step);
1958 }
1959 
1960 /* Return a vector with the same number of units and number of bits
1961    as VEC_TYPE, but in which the elements are a linear series of unsigned
1962    integers { BASE, BASE + STEP, BASE + STEP * 2, ... }.  */
1963 
1964 tree
1965 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1966 {
1967   tree index_vec_type = vec_type;
1968   tree index_elt_type = TREE_TYPE (vec_type);
1969   poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1970   if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1971     {
1972       index_elt_type = build_nonstandard_integer_type
1973 	(GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1974       index_vec_type = build_vector_type (index_elt_type, nunits);
1975     }
1976 
1977   tree_vector_builder v (index_vec_type, 1, 3);
1978   for (unsigned int i = 0; i < 3; ++i)
1979     v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1980   return v.build ();
1981 }
1982 
1983 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
1984    elements are A and the rest are B.  */
1985 
1986 tree
1987 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
1988 {
1989   gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
1990   unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
1991   /* Optimize the constant case.  */
1992   if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
1993     count /= 2;
1994   tree_vector_builder builder (vec_type, count, 2);
1995   for (unsigned int i = 0; i < count * 2; ++i)
1996     builder.quick_push (i < num_a ? a : b);
1997   return builder.build ();
1998 }
1999 
2000 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2001    calculate TREE_CONSTANT and TREE_SIDE_EFFECTS.  */
2002 
2003 void
2004 recompute_constructor_flags (tree c)
2005 {
2006   unsigned int i;
2007   tree val;
2008   bool constant_p = true;
2009   bool side_effects_p = false;
2010   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2011 
2012   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2013     {
2014       /* Mostly ctors will have elts that don't have side-effects, so
2015 	 the usual case is to scan all the elements.  Hence a single
2016 	 loop for both const and side effects, rather than one loop
2017 	 each (with early outs).  */
2018       if (!TREE_CONSTANT (val))
2019 	constant_p = false;
2020       if (TREE_SIDE_EFFECTS (val))
2021 	side_effects_p = true;
2022     }
2023 
2024   TREE_SIDE_EFFECTS (c) = side_effects_p;
2025   TREE_CONSTANT (c) = constant_p;
2026 }
2027 
2028 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2029    CONSTRUCTOR C.  */
2030 
2031 void
2032 verify_constructor_flags (tree c)
2033 {
2034   unsigned int i;
2035   tree val;
2036   bool constant_p = TREE_CONSTANT (c);
2037   bool side_effects_p = TREE_SIDE_EFFECTS (c);
2038   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2039 
2040   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2041     {
2042       if (constant_p && !TREE_CONSTANT (val))
2043 	internal_error ("non-constant element in constant CONSTRUCTOR");
2044       if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2045 	internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2046     }
2047 }
2048 
2049 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2050    are in the vec pointed to by VALS.  */
2051 tree
2052 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2053 {
2054   tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2055 
2056   TREE_TYPE (c) = type;
2057   CONSTRUCTOR_ELTS (c) = vals;
2058 
2059   recompute_constructor_flags (c);
2060 
2061   return c;
2062 }
2063 
2064 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2065    INDEX and VALUE.  */
2066 tree
2067 build_constructor_single (tree type, tree index, tree value)
2068 {
2069   vec<constructor_elt, va_gc> *v;
2070   constructor_elt elt = {index, value};
2071 
2072   vec_alloc (v, 1);
2073   v->quick_push (elt);
2074 
2075   return build_constructor (type, v);
2076 }
2077 
2078 
2079 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2080    are in a list pointed to by VALS.  */
2081 tree
2082 build_constructor_from_list (tree type, tree vals)
2083 {
2084   tree t;
2085   vec<constructor_elt, va_gc> *v = NULL;
2086 
2087   if (vals)
2088     {
2089       vec_alloc (v, list_length (vals));
2090       for (t = vals; t; t = TREE_CHAIN (t))
2091 	CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2092     }
2093 
2094   return build_constructor (type, v);
2095 }
2096 
2097 /* Return a new CONSTRUCTOR node whose type is TYPE.  NELTS is the number
2098    of elements, provided as index/value pairs.  */
2099 
2100 tree
2101 build_constructor_va (tree type, int nelts, ...)
2102 {
2103   vec<constructor_elt, va_gc> *v = NULL;
2104   va_list p;
2105 
2106   va_start (p, nelts);
2107   vec_alloc (v, nelts);
2108   while (nelts--)
2109     {
2110       tree index = va_arg (p, tree);
2111       tree value = va_arg (p, tree);
2112       CONSTRUCTOR_APPEND_ELT (v, index, value);
2113     }
2114   va_end (p);
2115   return build_constructor (type, v);
2116 }
2117 
2118 /* Return a node of type TYPE for which TREE_CLOBBER_P is true.  */
2119 
2120 tree
2121 build_clobber (tree type)
2122 {
2123   tree clobber = build_constructor (type, NULL);
2124   TREE_THIS_VOLATILE (clobber) = true;
2125   return clobber;
2126 }
2127 
2128 /* Return a new FIXED_CST node whose type is TYPE and value is F.  */
2129 
2130 tree
2131 build_fixed (tree type, FIXED_VALUE_TYPE f)
2132 {
2133   tree v;
2134   FIXED_VALUE_TYPE *fp;
2135 
2136   v = make_node (FIXED_CST);
2137   fp = ggc_alloc<fixed_value> ();
2138   memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2139 
2140   TREE_TYPE (v) = type;
2141   TREE_FIXED_CST_PTR (v) = fp;
2142   return v;
2143 }
2144 
2145 /* Return a new REAL_CST node whose type is TYPE and value is D.  */
2146 
2147 tree
2148 build_real (tree type, REAL_VALUE_TYPE d)
2149 {
2150   tree v;
2151   REAL_VALUE_TYPE *dp;
2152   int overflow = 0;
2153 
2154   /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2155      Consider doing it via real_convert now.  */
2156 
2157   v = make_node (REAL_CST);
2158   dp = ggc_alloc<real_value> ();
2159   memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2160 
2161   TREE_TYPE (v) = type;
2162   TREE_REAL_CST_PTR (v) = dp;
2163   TREE_OVERFLOW (v) = overflow;
2164   return v;
2165 }
2166 
2167 /* Like build_real, but first truncate D to the type.  */
2168 
2169 tree
2170 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2171 {
2172   return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2173 }
2174 
2175 /* Return a new REAL_CST node whose type is TYPE
2176    and whose value is the integer value of the INTEGER_CST node I.  */
2177 
2178 REAL_VALUE_TYPE
2179 real_value_from_int_cst (const_tree type, const_tree i)
2180 {
2181   REAL_VALUE_TYPE d;
2182 
2183   /* Clear all bits of the real value type so that we can later do
2184      bitwise comparisons to see if two values are the same.  */
2185   memset (&d, 0, sizeof d);
2186 
2187   real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2188 		     TYPE_SIGN (TREE_TYPE (i)));
2189   return d;
2190 }
2191 
2192 /* Given a tree representing an integer constant I, return a tree
2193    representing the same value as a floating-point constant of type TYPE.  */
2194 
2195 tree
2196 build_real_from_int_cst (tree type, const_tree i)
2197 {
2198   tree v;
2199   int overflow = TREE_OVERFLOW (i);
2200 
2201   v = build_real (type, real_value_from_int_cst (type, i));
2202 
2203   TREE_OVERFLOW (v) |= overflow;
2204   return v;
2205 }
2206 
2207 /* Return a newly constructed STRING_CST node whose value is the LEN
2208    characters at STR when STR is nonnull, or all zeros otherwise.
2209    Note that for a C string literal, LEN should include the trailing NUL.
2210    The TREE_TYPE is not initialized.  */
2211 
2212 tree
2213 build_string (unsigned len, const char *str /*= NULL */)
2214 {
2215   /* Do not waste bytes provided by padding of struct tree_string.  */
2216   unsigned size = len + offsetof (struct tree_string, str) + 1;
2217 
2218   record_node_allocation_statistics (STRING_CST, size);
2219 
2220   tree s = (tree) ggc_internal_alloc (size);
2221 
2222   memset (s, 0, sizeof (struct tree_typed));
2223   TREE_SET_CODE (s, STRING_CST);
2224   TREE_CONSTANT (s) = 1;
2225   TREE_STRING_LENGTH (s) = len;
2226   if (str)
2227     memcpy (s->string.str, str, len);
2228   else
2229     memset (s->string.str, 0, len);
2230   s->string.str[len] = '\0';
2231 
2232   return s;
2233 }
2234 
2235 /* Return a newly constructed COMPLEX_CST node whose value is
2236    specified by the real and imaginary parts REAL and IMAG.
2237    Both REAL and IMAG should be constant nodes.  TYPE, if specified,
2238    will be the type of the COMPLEX_CST; otherwise a new type will be made.  */
2239 
2240 tree
2241 build_complex (tree type, tree real, tree imag)
2242 {
2243   gcc_assert (CONSTANT_CLASS_P (real));
2244   gcc_assert (CONSTANT_CLASS_P (imag));
2245 
2246   tree t = make_node (COMPLEX_CST);
2247 
2248   TREE_REALPART (t) = real;
2249   TREE_IMAGPART (t) = imag;
2250   TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2251   TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2252   return t;
2253 }
2254 
2255 /* Build a complex (inf +- 0i), such as for the result of cproj.
2256    TYPE is the complex tree type of the result.  If NEG is true, the
2257    imaginary zero is negative.  */
2258 
2259 tree
2260 build_complex_inf (tree type, bool neg)
2261 {
2262   REAL_VALUE_TYPE rinf, rzero = dconst0;
2263 
2264   real_inf (&rinf);
2265   rzero.sign = neg;
2266   return build_complex (type, build_real (TREE_TYPE (type), rinf),
2267 			build_real (TREE_TYPE (type), rzero));
2268 }
2269 
2270 /* Return the constant 1 in type TYPE.  If TYPE has several elements, each
2271    element is set to 1.  In particular, this is 1 + i for complex types.  */
2272 
2273 tree
2274 build_each_one_cst (tree type)
2275 {
2276   if (TREE_CODE (type) == COMPLEX_TYPE)
2277     {
2278       tree scalar = build_one_cst (TREE_TYPE (type));
2279       return build_complex (type, scalar, scalar);
2280     }
2281   else
2282     return build_one_cst (type);
2283 }
2284 
2285 /* Return a constant of arithmetic type TYPE which is the
2286    multiplicative identity of the set TYPE.  */
2287 
2288 tree
2289 build_one_cst (tree type)
2290 {
2291   switch (TREE_CODE (type))
2292     {
2293     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2294     case POINTER_TYPE: case REFERENCE_TYPE:
2295     case OFFSET_TYPE:
2296       return build_int_cst (type, 1);
2297 
2298     case REAL_TYPE:
2299       return build_real (type, dconst1);
2300 
2301     case FIXED_POINT_TYPE:
2302       /* We can only generate 1 for accum types.  */
2303       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2304       return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2305 
2306     case VECTOR_TYPE:
2307       {
2308 	tree scalar = build_one_cst (TREE_TYPE (type));
2309 
2310 	return build_vector_from_val (type, scalar);
2311       }
2312 
2313     case COMPLEX_TYPE:
2314       return build_complex (type,
2315 			    build_one_cst (TREE_TYPE (type)),
2316 			    build_zero_cst (TREE_TYPE (type)));
2317 
2318     default:
2319       gcc_unreachable ();
2320     }
2321 }
2322 
2323 /* Return an integer of type TYPE containing all 1's in as much precision as
2324    it contains, or a complex or vector whose subparts are such integers.  */
2325 
2326 tree
2327 build_all_ones_cst (tree type)
2328 {
2329   if (TREE_CODE (type) == COMPLEX_TYPE)
2330     {
2331       tree scalar = build_all_ones_cst (TREE_TYPE (type));
2332       return build_complex (type, scalar, scalar);
2333     }
2334   else
2335     return build_minus_one_cst (type);
2336 }
2337 
2338 /* Return a constant of arithmetic type TYPE which is the
2339    opposite of the multiplicative identity of the set TYPE.  */
2340 
2341 tree
2342 build_minus_one_cst (tree type)
2343 {
2344   switch (TREE_CODE (type))
2345     {
2346     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2347     case POINTER_TYPE: case REFERENCE_TYPE:
2348     case OFFSET_TYPE:
2349       return build_int_cst (type, -1);
2350 
2351     case REAL_TYPE:
2352       return build_real (type, dconstm1);
2353 
2354     case FIXED_POINT_TYPE:
2355       /* We can only generate 1 for accum types.  */
2356       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2357       return build_fixed (type,
2358 			  fixed_from_double_int (double_int_minus_one,
2359 						 SCALAR_TYPE_MODE (type)));
2360 
2361     case VECTOR_TYPE:
2362       {
2363 	tree scalar = build_minus_one_cst (TREE_TYPE (type));
2364 
2365 	return build_vector_from_val (type, scalar);
2366       }
2367 
2368     case COMPLEX_TYPE:
2369       return build_complex (type,
2370 			    build_minus_one_cst (TREE_TYPE (type)),
2371 			    build_zero_cst (TREE_TYPE (type)));
2372 
2373     default:
2374       gcc_unreachable ();
2375     }
2376 }
2377 
2378 /* Build 0 constant of type TYPE.  This is used by constructor folding
2379    and thus the constant should be represented in memory by
2380    zero(es).  */
2381 
2382 tree
2383 build_zero_cst (tree type)
2384 {
2385   switch (TREE_CODE (type))
2386     {
2387     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2388     case POINTER_TYPE: case REFERENCE_TYPE:
2389     case OFFSET_TYPE: case NULLPTR_TYPE:
2390       return build_int_cst (type, 0);
2391 
2392     case REAL_TYPE:
2393       return build_real (type, dconst0);
2394 
2395     case FIXED_POINT_TYPE:
2396       return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2397 
2398     case VECTOR_TYPE:
2399       {
2400 	tree scalar = build_zero_cst (TREE_TYPE (type));
2401 
2402 	return build_vector_from_val (type, scalar);
2403       }
2404 
2405     case COMPLEX_TYPE:
2406       {
2407 	tree zero = build_zero_cst (TREE_TYPE (type));
2408 
2409 	return build_complex (type, zero, zero);
2410       }
2411 
2412     default:
2413       if (!AGGREGATE_TYPE_P (type))
2414 	return fold_convert (type, integer_zero_node);
2415       return build_constructor (type, NULL);
2416     }
2417 }
2418 
2419 
2420 /* Build a BINFO with LEN language slots.  */
2421 
2422 tree
2423 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2424 {
2425   tree t;
2426   size_t length = (offsetof (struct tree_binfo, base_binfos)
2427 		   + vec<tree, va_gc>::embedded_size (base_binfos));
2428 
2429   record_node_allocation_statistics (TREE_BINFO, length);
2430 
2431   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2432 
2433   memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2434 
2435   TREE_SET_CODE (t, TREE_BINFO);
2436 
2437   BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2438 
2439   return t;
2440 }
2441 
2442 /* Create a CASE_LABEL_EXPR tree node and return it.  */
2443 
2444 tree
2445 build_case_label (tree low_value, tree high_value, tree label_decl)
2446 {
2447   tree t = make_node (CASE_LABEL_EXPR);
2448 
2449   TREE_TYPE (t) = void_type_node;
2450   SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2451 
2452   CASE_LOW (t) = low_value;
2453   CASE_HIGH (t) = high_value;
2454   CASE_LABEL (t) = label_decl;
2455   CASE_CHAIN (t) = NULL_TREE;
2456 
2457   return t;
2458 }
2459 
2460 /* Build a newly constructed INTEGER_CST node.  LEN and EXT_LEN are the
2461    values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2462    The latter determines the length of the HOST_WIDE_INT vector.  */
2463 
2464 tree
2465 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2466 {
2467   tree t;
2468   int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2469 		+ sizeof (struct tree_int_cst));
2470 
2471   gcc_assert (len);
2472   record_node_allocation_statistics (INTEGER_CST, length);
2473 
2474   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2475 
2476   TREE_SET_CODE (t, INTEGER_CST);
2477   TREE_INT_CST_NUNITS (t) = len;
2478   TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2479   /* to_offset can only be applied to trees that are offset_int-sized
2480      or smaller.  EXT_LEN is correct if it fits, otherwise the constant
2481      must be exactly the precision of offset_int and so LEN is correct.  */
2482   if (ext_len <= OFFSET_INT_ELTS)
2483     TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2484   else
2485     TREE_INT_CST_OFFSET_NUNITS (t) = len;
2486 
2487   TREE_CONSTANT (t) = 1;
2488 
2489   return t;
2490 }
2491 
2492 /* Build a newly constructed TREE_VEC node of length LEN.  */
2493 
2494 tree
2495 make_tree_vec (int len MEM_STAT_DECL)
2496 {
2497   tree t;
2498   size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2499 
2500   record_node_allocation_statistics (TREE_VEC, length);
2501 
2502   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2503 
2504   TREE_SET_CODE (t, TREE_VEC);
2505   TREE_VEC_LENGTH (t) = len;
2506 
2507   return t;
2508 }
2509 
2510 /* Grow a TREE_VEC node to new length LEN.  */
2511 
2512 tree
2513 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2514 {
2515   gcc_assert (TREE_CODE (v) == TREE_VEC);
2516 
2517   int oldlen = TREE_VEC_LENGTH (v);
2518   gcc_assert (len > oldlen);
2519 
2520   size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2521   size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2522 
2523   record_node_allocation_statistics (TREE_VEC, length - oldlength);
2524 
2525   v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2526 
2527   TREE_VEC_LENGTH (v) = len;
2528 
2529   return v;
2530 }
2531 
2532 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2533    fixed, and scalar, complex or vector.  */
2534 
2535 bool
2536 zerop (const_tree expr)
2537 {
2538   return (integer_zerop (expr)
2539 	  || real_zerop (expr)
2540 	  || fixed_zerop (expr));
2541 }
2542 
2543 /* Return 1 if EXPR is the integer constant zero or a complex constant
2544    of zero, or a location wrapper for such a constant.  */
2545 
2546 bool
2547 integer_zerop (const_tree expr)
2548 {
2549   STRIP_ANY_LOCATION_WRAPPER (expr);
2550 
2551   switch (TREE_CODE (expr))
2552     {
2553     case INTEGER_CST:
2554       return wi::to_wide (expr) == 0;
2555     case COMPLEX_CST:
2556       return (integer_zerop (TREE_REALPART (expr))
2557 	      && integer_zerop (TREE_IMAGPART (expr)));
2558     case VECTOR_CST:
2559       return (VECTOR_CST_NPATTERNS (expr) == 1
2560 	      && VECTOR_CST_DUPLICATE_P (expr)
2561 	      && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2562     default:
2563       return false;
2564     }
2565 }
2566 
2567 /* Return 1 if EXPR is the integer constant one or the corresponding
2568    complex constant, or a location wrapper for such a constant.  */
2569 
2570 bool
2571 integer_onep (const_tree expr)
2572 {
2573   STRIP_ANY_LOCATION_WRAPPER (expr);
2574 
2575   switch (TREE_CODE (expr))
2576     {
2577     case INTEGER_CST:
2578       return wi::eq_p (wi::to_widest (expr), 1);
2579     case COMPLEX_CST:
2580       return (integer_onep (TREE_REALPART (expr))
2581 	      && integer_zerop (TREE_IMAGPART (expr)));
2582     case VECTOR_CST:
2583       return (VECTOR_CST_NPATTERNS (expr) == 1
2584 	      && VECTOR_CST_DUPLICATE_P (expr)
2585 	      && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2586     default:
2587       return false;
2588     }
2589 }
2590 
2591 /* Return 1 if EXPR is the integer constant one.  For complex and vector,
2592    return 1 if every piece is the integer constant one.
2593    Also return 1 for location wrappers for such a constant.  */
2594 
2595 bool
2596 integer_each_onep (const_tree expr)
2597 {
2598   STRIP_ANY_LOCATION_WRAPPER (expr);
2599 
2600   if (TREE_CODE (expr) == COMPLEX_CST)
2601     return (integer_onep (TREE_REALPART (expr))
2602 	    && integer_onep (TREE_IMAGPART (expr)));
2603   else
2604     return integer_onep (expr);
2605 }
2606 
2607 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2608    it contains, or a complex or vector whose subparts are such integers,
2609    or a location wrapper for such a constant.  */
2610 
2611 bool
2612 integer_all_onesp (const_tree expr)
2613 {
2614   STRIP_ANY_LOCATION_WRAPPER (expr);
2615 
2616   if (TREE_CODE (expr) == COMPLEX_CST
2617       && integer_all_onesp (TREE_REALPART (expr))
2618       && integer_all_onesp (TREE_IMAGPART (expr)))
2619     return true;
2620 
2621   else if (TREE_CODE (expr) == VECTOR_CST)
2622     return (VECTOR_CST_NPATTERNS (expr) == 1
2623 	    && VECTOR_CST_DUPLICATE_P (expr)
2624 	    && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2625 
2626   else if (TREE_CODE (expr) != INTEGER_CST)
2627     return false;
2628 
2629   return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2630 	  == wi::to_wide (expr));
2631 }
2632 
2633 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2634    for such a constant.  */
2635 
2636 bool
2637 integer_minus_onep (const_tree expr)
2638 {
2639   STRIP_ANY_LOCATION_WRAPPER (expr);
2640 
2641   if (TREE_CODE (expr) == COMPLEX_CST)
2642     return (integer_all_onesp (TREE_REALPART (expr))
2643 	    && integer_zerop (TREE_IMAGPART (expr)));
2644   else
2645     return integer_all_onesp (expr);
2646 }
2647 
2648 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2649    one bit on), or a location wrapper for such a constant.  */
2650 
2651 bool
2652 integer_pow2p (const_tree expr)
2653 {
2654   STRIP_ANY_LOCATION_WRAPPER (expr);
2655 
2656   if (TREE_CODE (expr) == COMPLEX_CST
2657       && integer_pow2p (TREE_REALPART (expr))
2658       && integer_zerop (TREE_IMAGPART (expr)))
2659     return true;
2660 
2661   if (TREE_CODE (expr) != INTEGER_CST)
2662     return false;
2663 
2664   return wi::popcount (wi::to_wide (expr)) == 1;
2665 }
2666 
2667 /* Return 1 if EXPR is an integer constant other than zero or a
2668    complex constant other than zero, or a location wrapper for such a
2669    constant.  */
2670 
2671 bool
2672 integer_nonzerop (const_tree expr)
2673 {
2674   STRIP_ANY_LOCATION_WRAPPER (expr);
2675 
2676   return ((TREE_CODE (expr) == INTEGER_CST
2677 	   && wi::to_wide (expr) != 0)
2678 	  || (TREE_CODE (expr) == COMPLEX_CST
2679 	      && (integer_nonzerop (TREE_REALPART (expr))
2680 		  || integer_nonzerop (TREE_IMAGPART (expr)))));
2681 }
2682 
2683 /* Return 1 if EXPR is the integer constant one.  For vector,
2684    return 1 if every piece is the integer constant minus one
2685    (representing the value TRUE).
2686    Also return 1 for location wrappers for such a constant.  */
2687 
2688 bool
2689 integer_truep (const_tree expr)
2690 {
2691   STRIP_ANY_LOCATION_WRAPPER (expr);
2692 
2693   if (TREE_CODE (expr) == VECTOR_CST)
2694     return integer_all_onesp (expr);
2695   return integer_onep (expr);
2696 }
2697 
2698 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2699    for such a constant.  */
2700 
2701 bool
2702 fixed_zerop (const_tree expr)
2703 {
2704   STRIP_ANY_LOCATION_WRAPPER (expr);
2705 
2706   return (TREE_CODE (expr) == FIXED_CST
2707 	  && TREE_FIXED_CST (expr).data.is_zero ());
2708 }
2709 
2710 /* Return the power of two represented by a tree node known to be a
2711    power of two.  */
2712 
2713 int
2714 tree_log2 (const_tree expr)
2715 {
2716   if (TREE_CODE (expr) == COMPLEX_CST)
2717     return tree_log2 (TREE_REALPART (expr));
2718 
2719   return wi::exact_log2 (wi::to_wide (expr));
2720 }
2721 
2722 /* Similar, but return the largest integer Y such that 2 ** Y is less
2723    than or equal to EXPR.  */
2724 
2725 int
2726 tree_floor_log2 (const_tree expr)
2727 {
2728   if (TREE_CODE (expr) == COMPLEX_CST)
2729     return tree_log2 (TREE_REALPART (expr));
2730 
2731   return wi::floor_log2 (wi::to_wide (expr));
2732 }
2733 
2734 /* Return number of known trailing zero bits in EXPR, or, if the value of
2735    EXPR is known to be zero, the precision of it's type.  */
2736 
2737 unsigned int
2738 tree_ctz (const_tree expr)
2739 {
2740   if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2741       && !POINTER_TYPE_P (TREE_TYPE (expr)))
2742     return 0;
2743 
2744   unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2745   switch (TREE_CODE (expr))
2746     {
2747     case INTEGER_CST:
2748       ret1 = wi::ctz (wi::to_wide (expr));
2749       return MIN (ret1, prec);
2750     case SSA_NAME:
2751       ret1 = wi::ctz (get_nonzero_bits (expr));
2752       return MIN (ret1, prec);
2753     case PLUS_EXPR:
2754     case MINUS_EXPR:
2755     case BIT_IOR_EXPR:
2756     case BIT_XOR_EXPR:
2757     case MIN_EXPR:
2758     case MAX_EXPR:
2759       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2760       if (ret1 == 0)
2761 	return ret1;
2762       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2763       return MIN (ret1, ret2);
2764     case POINTER_PLUS_EXPR:
2765       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2766       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2767       /* Second operand is sizetype, which could be in theory
2768 	 wider than pointer's precision.  Make sure we never
2769 	 return more than prec.  */
2770       ret2 = MIN (ret2, prec);
2771       return MIN (ret1, ret2);
2772     case BIT_AND_EXPR:
2773       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2774       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2775       return MAX (ret1, ret2);
2776     case MULT_EXPR:
2777       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2778       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2779       return MIN (ret1 + ret2, prec);
2780     case LSHIFT_EXPR:
2781       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2782       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2783 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2784 	{
2785 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2786 	  return MIN (ret1 + ret2, prec);
2787 	}
2788       return ret1;
2789     case RSHIFT_EXPR:
2790       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2791 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2792 	{
2793 	  ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2794 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2795 	  if (ret1 > ret2)
2796 	    return ret1 - ret2;
2797 	}
2798       return 0;
2799     case TRUNC_DIV_EXPR:
2800     case CEIL_DIV_EXPR:
2801     case FLOOR_DIV_EXPR:
2802     case ROUND_DIV_EXPR:
2803     case EXACT_DIV_EXPR:
2804       if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2805 	  && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2806 	{
2807 	  int l = tree_log2 (TREE_OPERAND (expr, 1));
2808 	  if (l >= 0)
2809 	    {
2810 	      ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2811 	      ret2 = l;
2812 	      if (ret1 > ret2)
2813 		return ret1 - ret2;
2814 	    }
2815 	}
2816       return 0;
2817     CASE_CONVERT:
2818       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2819       if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2820 	ret1 = prec;
2821       return MIN (ret1, prec);
2822     case SAVE_EXPR:
2823       return tree_ctz (TREE_OPERAND (expr, 0));
2824     case COND_EXPR:
2825       ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2826       if (ret1 == 0)
2827 	return 0;
2828       ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2829       return MIN (ret1, ret2);
2830     case COMPOUND_EXPR:
2831       return tree_ctz (TREE_OPERAND (expr, 1));
2832     case ADDR_EXPR:
2833       ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2834       if (ret1 > BITS_PER_UNIT)
2835 	{
2836 	  ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2837 	  return MIN (ret1, prec);
2838 	}
2839       return 0;
2840     default:
2841       return 0;
2842     }
2843 }
2844 
2845 /* Return 1 if EXPR is the real constant zero.  Trailing zeroes matter for
2846    decimal float constants, so don't return 1 for them.
2847    Also return 1 for location wrappers around such a constant.  */
2848 
2849 bool
2850 real_zerop (const_tree expr)
2851 {
2852   STRIP_ANY_LOCATION_WRAPPER (expr);
2853 
2854   switch (TREE_CODE (expr))
2855     {
2856     case REAL_CST:
2857       return real_equal (&TREE_REAL_CST (expr), &dconst0)
2858 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2859     case COMPLEX_CST:
2860       return real_zerop (TREE_REALPART (expr))
2861 	     && real_zerop (TREE_IMAGPART (expr));
2862     case VECTOR_CST:
2863       {
2864 	/* Don't simply check for a duplicate because the predicate
2865 	   accepts both +0.0 and -0.0.  */
2866 	unsigned count = vector_cst_encoded_nelts (expr);
2867 	for (unsigned int i = 0; i < count; ++i)
2868 	  if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2869 	    return false;
2870 	return true;
2871       }
2872     default:
2873       return false;
2874     }
2875 }
2876 
2877 /* Return 1 if EXPR is the real constant one in real or complex form.
2878    Trailing zeroes matter for decimal float constants, so don't return
2879    1 for them.
2880    Also return 1 for location wrappers around such a constant.  */
2881 
2882 bool
2883 real_onep (const_tree expr)
2884 {
2885   STRIP_ANY_LOCATION_WRAPPER (expr);
2886 
2887   switch (TREE_CODE (expr))
2888     {
2889     case REAL_CST:
2890       return real_equal (&TREE_REAL_CST (expr), &dconst1)
2891 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2892     case COMPLEX_CST:
2893       return real_onep (TREE_REALPART (expr))
2894 	     && real_zerop (TREE_IMAGPART (expr));
2895     case VECTOR_CST:
2896       return (VECTOR_CST_NPATTERNS (expr) == 1
2897 	      && VECTOR_CST_DUPLICATE_P (expr)
2898 	      && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2899     default:
2900       return false;
2901     }
2902 }
2903 
2904 /* Return 1 if EXPR is the real constant minus one.  Trailing zeroes
2905    matter for decimal float constants, so don't return 1 for them.
2906    Also return 1 for location wrappers around such a constant.  */
2907 
2908 bool
2909 real_minus_onep (const_tree expr)
2910 {
2911   STRIP_ANY_LOCATION_WRAPPER (expr);
2912 
2913   switch (TREE_CODE (expr))
2914     {
2915     case REAL_CST:
2916       return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2917 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2918     case COMPLEX_CST:
2919       return real_minus_onep (TREE_REALPART (expr))
2920 	     && real_zerop (TREE_IMAGPART (expr));
2921     case VECTOR_CST:
2922       return (VECTOR_CST_NPATTERNS (expr) == 1
2923 	      && VECTOR_CST_DUPLICATE_P (expr)
2924 	      && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2925     default:
2926       return false;
2927     }
2928 }
2929 
2930 /* Nonzero if EXP is a constant or a cast of a constant.  */
2931 
2932 bool
2933 really_constant_p (const_tree exp)
2934 {
2935   /* This is not quite the same as STRIP_NOPS.  It does more.  */
2936   while (CONVERT_EXPR_P (exp)
2937 	 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2938     exp = TREE_OPERAND (exp, 0);
2939   return TREE_CONSTANT (exp);
2940 }
2941 
2942 /* Return true if T holds a polynomial pointer difference, storing it in
2943    *VALUE if so.  A true return means that T's precision is no greater
2944    than 64 bits, which is the largest address space we support, so *VALUE
2945    never loses precision.  However, the signedness of the result does
2946    not necessarily match the signedness of T: sometimes an unsigned type
2947    like sizetype is used to encode a value that is actually negative.  */
2948 
2949 bool
2950 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2951 {
2952   if (!t)
2953     return false;
2954   if (TREE_CODE (t) == INTEGER_CST)
2955     {
2956       if (!cst_and_fits_in_hwi (t))
2957 	return false;
2958       *value = int_cst_value (t);
2959       return true;
2960     }
2961   if (POLY_INT_CST_P (t))
2962     {
2963       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2964 	if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2965 	  return false;
2966       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2967 	value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2968       return true;
2969     }
2970   return false;
2971 }
2972 
2973 poly_int64
2974 tree_to_poly_int64 (const_tree t)
2975 {
2976   gcc_assert (tree_fits_poly_int64_p (t));
2977   if (POLY_INT_CST_P (t))
2978     return poly_int_cst_value (t).force_shwi ();
2979   return TREE_INT_CST_LOW (t);
2980 }
2981 
2982 poly_uint64
2983 tree_to_poly_uint64 (const_tree t)
2984 {
2985   gcc_assert (tree_fits_poly_uint64_p (t));
2986   if (POLY_INT_CST_P (t))
2987     return poly_int_cst_value (t).force_uhwi ();
2988   return TREE_INT_CST_LOW (t);
2989 }
2990 
2991 /* Return first list element whose TREE_VALUE is ELEM.
2992    Return 0 if ELEM is not in LIST.  */
2993 
2994 tree
2995 value_member (tree elem, tree list)
2996 {
2997   while (list)
2998     {
2999       if (elem == TREE_VALUE (list))
3000 	return list;
3001       list = TREE_CHAIN (list);
3002     }
3003   return NULL_TREE;
3004 }
3005 
3006 /* Return first list element whose TREE_PURPOSE is ELEM.
3007    Return 0 if ELEM is not in LIST.  */
3008 
3009 tree
3010 purpose_member (const_tree elem, tree list)
3011 {
3012   while (list)
3013     {
3014       if (elem == TREE_PURPOSE (list))
3015 	return list;
3016       list = TREE_CHAIN (list);
3017     }
3018   return NULL_TREE;
3019 }
3020 
3021 /* Return true if ELEM is in V.  */
3022 
3023 bool
3024 vec_member (const_tree elem, vec<tree, va_gc> *v)
3025 {
3026   unsigned ix;
3027   tree t;
3028   FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3029     if (elem == t)
3030       return true;
3031   return false;
3032 }
3033 
3034 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3035    NULL_TREE.  */
3036 
3037 tree
3038 chain_index (int idx, tree chain)
3039 {
3040   for (; chain && idx > 0; --idx)
3041     chain = TREE_CHAIN (chain);
3042   return chain;
3043 }
3044 
3045 /* Return nonzero if ELEM is part of the chain CHAIN.  */
3046 
3047 bool
3048 chain_member (const_tree elem, const_tree chain)
3049 {
3050   while (chain)
3051     {
3052       if (elem == chain)
3053 	return true;
3054       chain = DECL_CHAIN (chain);
3055     }
3056 
3057   return false;
3058 }
3059 
3060 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3061    We expect a null pointer to mark the end of the chain.
3062    This is the Lisp primitive `length'.  */
3063 
3064 int
3065 list_length (const_tree t)
3066 {
3067   const_tree p = t;
3068 #ifdef ENABLE_TREE_CHECKING
3069   const_tree q = t;
3070 #endif
3071   int len = 0;
3072 
3073   while (p)
3074     {
3075       p = TREE_CHAIN (p);
3076 #ifdef ENABLE_TREE_CHECKING
3077       if (len % 2)
3078 	q = TREE_CHAIN (q);
3079       gcc_assert (p != q);
3080 #endif
3081       len++;
3082     }
3083 
3084   return len;
3085 }
3086 
3087 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3088    UNION_TYPE TYPE, or NULL_TREE if none.  */
3089 
3090 tree
3091 first_field (const_tree type)
3092 {
3093   tree t = TYPE_FIELDS (type);
3094   while (t && TREE_CODE (t) != FIELD_DECL)
3095     t = TREE_CHAIN (t);
3096   return t;
3097 }
3098 
3099 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3100    UNION_TYPE TYPE, or NULL_TREE if none.  */
3101 
3102 tree
3103 last_field (const_tree type)
3104 {
3105   tree last = NULL_TREE;
3106 
3107   for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3108     {
3109       if (TREE_CODE (fld) != FIELD_DECL)
3110 	continue;
3111 
3112       last = fld;
3113     }
3114 
3115   return last;
3116 }
3117 
3118 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3119    by modifying the last node in chain 1 to point to chain 2.
3120    This is the Lisp primitive `nconc'.  */
3121 
3122 tree
3123 chainon (tree op1, tree op2)
3124 {
3125   tree t1;
3126 
3127   if (!op1)
3128     return op2;
3129   if (!op2)
3130     return op1;
3131 
3132   for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3133     continue;
3134   TREE_CHAIN (t1) = op2;
3135 
3136 #ifdef ENABLE_TREE_CHECKING
3137   {
3138     tree t2;
3139     for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3140       gcc_assert (t2 != t1);
3141   }
3142 #endif
3143 
3144   return op1;
3145 }
3146 
3147 /* Return the last node in a chain of nodes (chained through TREE_CHAIN).  */
3148 
3149 tree
3150 tree_last (tree chain)
3151 {
3152   tree next;
3153   if (chain)
3154     while ((next = TREE_CHAIN (chain)))
3155       chain = next;
3156   return chain;
3157 }
3158 
3159 /* Reverse the order of elements in the chain T,
3160    and return the new head of the chain (old last element).  */
3161 
3162 tree
3163 nreverse (tree t)
3164 {
3165   tree prev = 0, decl, next;
3166   for (decl = t; decl; decl = next)
3167     {
3168       /* We shouldn't be using this function to reverse BLOCK chains; we
3169 	 have blocks_nreverse for that.  */
3170       gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3171       next = TREE_CHAIN (decl);
3172       TREE_CHAIN (decl) = prev;
3173       prev = decl;
3174     }
3175   return prev;
3176 }
3177 
3178 /* Return a newly created TREE_LIST node whose
3179    purpose and value fields are PARM and VALUE.  */
3180 
3181 tree
3182 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3183 {
3184   tree t = make_node (TREE_LIST PASS_MEM_STAT);
3185   TREE_PURPOSE (t) = parm;
3186   TREE_VALUE (t) = value;
3187   return t;
3188 }
3189 
3190 /* Build a chain of TREE_LIST nodes from a vector.  */
3191 
3192 tree
3193 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3194 {
3195   tree ret = NULL_TREE;
3196   tree *pp = &ret;
3197   unsigned int i;
3198   tree t;
3199   FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3200     {
3201       *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3202       pp = &TREE_CHAIN (*pp);
3203     }
3204   return ret;
3205 }
3206 
3207 /* Return a newly created TREE_LIST node whose
3208    purpose and value fields are PURPOSE and VALUE
3209    and whose TREE_CHAIN is CHAIN.  */
3210 
3211 tree
3212 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3213 {
3214   tree node;
3215 
3216   node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3217   memset (node, 0, sizeof (struct tree_common));
3218 
3219   record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3220 
3221   TREE_SET_CODE (node, TREE_LIST);
3222   TREE_CHAIN (node) = chain;
3223   TREE_PURPOSE (node) = purpose;
3224   TREE_VALUE (node) = value;
3225   return node;
3226 }
3227 
3228 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3229    trees.  */
3230 
3231 vec<tree, va_gc> *
3232 ctor_to_vec (tree ctor)
3233 {
3234   vec<tree, va_gc> *vec;
3235   vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3236   unsigned int ix;
3237   tree val;
3238 
3239   FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3240     vec->quick_push (val);
3241 
3242   return vec;
3243 }
3244 
3245 /* Return the size nominally occupied by an object of type TYPE
3246    when it resides in memory.  The value is measured in units of bytes,
3247    and its data type is that normally used for type sizes
3248    (which is the first type created by make_signed_type or
3249    make_unsigned_type).  */
3250 
3251 tree
3252 size_in_bytes_loc (location_t loc, const_tree type)
3253 {
3254   tree t;
3255 
3256   if (type == error_mark_node)
3257     return integer_zero_node;
3258 
3259   type = TYPE_MAIN_VARIANT (type);
3260   t = TYPE_SIZE_UNIT (type);
3261 
3262   if (t == 0)
3263     {
3264       lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3265       return size_zero_node;
3266     }
3267 
3268   return t;
3269 }
3270 
3271 /* Return the size of TYPE (in bytes) as a wide integer
3272    or return -1 if the size can vary or is larger than an integer.  */
3273 
3274 HOST_WIDE_INT
3275 int_size_in_bytes (const_tree type)
3276 {
3277   tree t;
3278 
3279   if (type == error_mark_node)
3280     return 0;
3281 
3282   type = TYPE_MAIN_VARIANT (type);
3283   t = TYPE_SIZE_UNIT (type);
3284 
3285   if (t && tree_fits_uhwi_p (t))
3286     return TREE_INT_CST_LOW (t);
3287   else
3288     return -1;
3289 }
3290 
3291 /* Return the maximum size of TYPE (in bytes) as a wide integer
3292    or return -1 if the size can vary or is larger than an integer.  */
3293 
3294 HOST_WIDE_INT
3295 max_int_size_in_bytes (const_tree type)
3296 {
3297   HOST_WIDE_INT size = -1;
3298   tree size_tree;
3299 
3300   /* If this is an array type, check for a possible MAX_SIZE attached.  */
3301 
3302   if (TREE_CODE (type) == ARRAY_TYPE)
3303     {
3304       size_tree = TYPE_ARRAY_MAX_SIZE (type);
3305 
3306       if (size_tree && tree_fits_uhwi_p (size_tree))
3307 	size = tree_to_uhwi (size_tree);
3308     }
3309 
3310   /* If we still haven't been able to get a size, see if the language
3311      can compute a maximum size.  */
3312 
3313   if (size == -1)
3314     {
3315       size_tree = lang_hooks.types.max_size (type);
3316 
3317       if (size_tree && tree_fits_uhwi_p (size_tree))
3318 	size = tree_to_uhwi (size_tree);
3319     }
3320 
3321   return size;
3322 }
3323 
3324 /* Return the bit position of FIELD, in bits from the start of the record.
3325    This is a tree of type bitsizetype.  */
3326 
3327 tree
3328 bit_position (const_tree field)
3329 {
3330   return bit_from_pos (DECL_FIELD_OFFSET (field),
3331 		       DECL_FIELD_BIT_OFFSET (field));
3332 }
3333 
3334 /* Return the byte position of FIELD, in bytes from the start of the record.
3335    This is a tree of type sizetype.  */
3336 
3337 tree
3338 byte_position (const_tree field)
3339 {
3340   return byte_from_pos (DECL_FIELD_OFFSET (field),
3341 			DECL_FIELD_BIT_OFFSET (field));
3342 }
3343 
3344 /* Likewise, but return as an integer.  It must be representable in
3345    that way (since it could be a signed value, we don't have the
3346    option of returning -1 like int_size_in_byte can.  */
3347 
3348 HOST_WIDE_INT
3349 int_byte_position (const_tree field)
3350 {
3351   return tree_to_shwi (byte_position (field));
3352 }
3353 
3354 /* Return the strictest alignment, in bits, that T is known to have.  */
3355 
3356 unsigned int
3357 expr_align (const_tree t)
3358 {
3359   unsigned int align0, align1;
3360 
3361   switch (TREE_CODE (t))
3362     {
3363     CASE_CONVERT:  case NON_LVALUE_EXPR:
3364       /* If we have conversions, we know that the alignment of the
3365 	 object must meet each of the alignments of the types.  */
3366       align0 = expr_align (TREE_OPERAND (t, 0));
3367       align1 = TYPE_ALIGN (TREE_TYPE (t));
3368       return MAX (align0, align1);
3369 
3370     case SAVE_EXPR:         case COMPOUND_EXPR:       case MODIFY_EXPR:
3371     case INIT_EXPR:         case TARGET_EXPR:         case WITH_CLEANUP_EXPR:
3372     case CLEANUP_POINT_EXPR:
3373       /* These don't change the alignment of an object.  */
3374       return expr_align (TREE_OPERAND (t, 0));
3375 
3376     case COND_EXPR:
3377       /* The best we can do is say that the alignment is the least aligned
3378 	 of the two arms.  */
3379       align0 = expr_align (TREE_OPERAND (t, 1));
3380       align1 = expr_align (TREE_OPERAND (t, 2));
3381       return MIN (align0, align1);
3382 
3383       /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3384 	 meaningfully, it's always 1.  */
3385     case LABEL_DECL:     case CONST_DECL:
3386     case VAR_DECL:       case PARM_DECL:   case RESULT_DECL:
3387     case FUNCTION_DECL:
3388       gcc_assert (DECL_ALIGN (t) != 0);
3389       return DECL_ALIGN (t);
3390 
3391     default:
3392       break;
3393     }
3394 
3395   /* Otherwise take the alignment from that of the type.  */
3396   return TYPE_ALIGN (TREE_TYPE (t));
3397 }
3398 
3399 /* Return, as a tree node, the number of elements for TYPE (which is an
3400    ARRAY_TYPE) minus one. This counts only elements of the top array.  */
3401 
3402 tree
3403 array_type_nelts (const_tree type)
3404 {
3405   tree index_type, min, max;
3406 
3407   /* If they did it with unspecified bounds, then we should have already
3408      given an error about it before we got here.  */
3409   if (! TYPE_DOMAIN (type))
3410     return error_mark_node;
3411 
3412   index_type = TYPE_DOMAIN (type);
3413   min = TYPE_MIN_VALUE (index_type);
3414   max = TYPE_MAX_VALUE (index_type);
3415 
3416   /* TYPE_MAX_VALUE may not be set if the array has unknown length.  */
3417   if (!max)
3418     return error_mark_node;
3419 
3420   return (integer_zerop (min)
3421 	  ? max
3422 	  : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3423 }
3424 
3425 /* If arg is static -- a reference to an object in static storage -- then
3426    return the object.  This is not the same as the C meaning of `static'.
3427    If arg isn't static, return NULL.  */
3428 
3429 tree
3430 staticp (tree arg)
3431 {
3432   switch (TREE_CODE (arg))
3433     {
3434     case FUNCTION_DECL:
3435       /* Nested functions are static, even though taking their address will
3436 	 involve a trampoline as we unnest the nested function and create
3437 	 the trampoline on the tree level.  */
3438       return arg;
3439 
3440     case VAR_DECL:
3441       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3442 	      && ! DECL_THREAD_LOCAL_P (arg)
3443 	      && ! DECL_DLLIMPORT_P (arg)
3444 	      ? arg : NULL);
3445 
3446     case CONST_DECL:
3447       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3448 	      ? arg : NULL);
3449 
3450     case CONSTRUCTOR:
3451       return TREE_STATIC (arg) ? arg : NULL;
3452 
3453     case LABEL_DECL:
3454     case STRING_CST:
3455       return arg;
3456 
3457     case COMPONENT_REF:
3458       /* If the thing being referenced is not a field, then it is
3459 	 something language specific.  */
3460       gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3461 
3462       /* If we are referencing a bitfield, we can't evaluate an
3463 	 ADDR_EXPR at compile time and so it isn't a constant.  */
3464       if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3465 	return NULL;
3466 
3467       return staticp (TREE_OPERAND (arg, 0));
3468 
3469     case BIT_FIELD_REF:
3470       return NULL;
3471 
3472     case INDIRECT_REF:
3473       return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3474 
3475     case ARRAY_REF:
3476     case ARRAY_RANGE_REF:
3477       if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3478 	  && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3479 	return staticp (TREE_OPERAND (arg, 0));
3480       else
3481 	return NULL;
3482 
3483     case COMPOUND_LITERAL_EXPR:
3484       return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3485 
3486     default:
3487       return NULL;
3488     }
3489 }
3490 
3491 
3492 
3493 
3494 /* Return whether OP is a DECL whose address is function-invariant.  */
3495 
3496 bool
3497 decl_address_invariant_p (const_tree op)
3498 {
3499   /* The conditions below are slightly less strict than the one in
3500      staticp.  */
3501 
3502   switch (TREE_CODE (op))
3503     {
3504     case PARM_DECL:
3505     case RESULT_DECL:
3506     case LABEL_DECL:
3507     case FUNCTION_DECL:
3508       return true;
3509 
3510     case VAR_DECL:
3511       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3512           || DECL_THREAD_LOCAL_P (op)
3513           || DECL_CONTEXT (op) == current_function_decl
3514           || decl_function_context (op) == current_function_decl)
3515         return true;
3516       break;
3517 
3518     case CONST_DECL:
3519       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3520           || decl_function_context (op) == current_function_decl)
3521         return true;
3522       break;
3523 
3524     default:
3525       break;
3526     }
3527 
3528   return false;
3529 }
3530 
3531 /* Return whether OP is a DECL whose address is interprocedural-invariant.  */
3532 
3533 bool
3534 decl_address_ip_invariant_p (const_tree op)
3535 {
3536   /* The conditions below are slightly less strict than the one in
3537      staticp.  */
3538 
3539   switch (TREE_CODE (op))
3540     {
3541     case LABEL_DECL:
3542     case FUNCTION_DECL:
3543     case STRING_CST:
3544       return true;
3545 
3546     case VAR_DECL:
3547       if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3548            && !DECL_DLLIMPORT_P (op))
3549           || DECL_THREAD_LOCAL_P (op))
3550         return true;
3551       break;
3552 
3553     case CONST_DECL:
3554       if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3555         return true;
3556       break;
3557 
3558     default:
3559       break;
3560     }
3561 
3562   return false;
3563 }
3564 
3565 
3566 /* Return true if T is function-invariant (internal function, does
3567    not handle arithmetic; that's handled in skip_simple_arithmetic and
3568    tree_invariant_p).  */
3569 
3570 static bool
3571 tree_invariant_p_1 (tree t)
3572 {
3573   tree op;
3574 
3575   if (TREE_CONSTANT (t)
3576       || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3577     return true;
3578 
3579   switch (TREE_CODE (t))
3580     {
3581     case SAVE_EXPR:
3582       return true;
3583 
3584     case ADDR_EXPR:
3585       op = TREE_OPERAND (t, 0);
3586       while (handled_component_p (op))
3587 	{
3588 	  switch (TREE_CODE (op))
3589 	    {
3590 	    case ARRAY_REF:
3591 	    case ARRAY_RANGE_REF:
3592 	      if (!tree_invariant_p (TREE_OPERAND (op, 1))
3593 		  || TREE_OPERAND (op, 2) != NULL_TREE
3594 		  || TREE_OPERAND (op, 3) != NULL_TREE)
3595 		return false;
3596 	      break;
3597 
3598 	    case COMPONENT_REF:
3599 	      if (TREE_OPERAND (op, 2) != NULL_TREE)
3600 		return false;
3601 	      break;
3602 
3603 	    default:;
3604 	    }
3605 	  op = TREE_OPERAND (op, 0);
3606 	}
3607 
3608       return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3609 
3610     default:
3611       break;
3612     }
3613 
3614   return false;
3615 }
3616 
3617 /* Return true if T is function-invariant.  */
3618 
3619 bool
3620 tree_invariant_p (tree t)
3621 {
3622   tree inner = skip_simple_arithmetic (t);
3623   return tree_invariant_p_1 (inner);
3624 }
3625 
3626 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3627    Do this to any expression which may be used in more than one place,
3628    but must be evaluated only once.
3629 
3630    Normally, expand_expr would reevaluate the expression each time.
3631    Calling save_expr produces something that is evaluated and recorded
3632    the first time expand_expr is called on it.  Subsequent calls to
3633    expand_expr just reuse the recorded value.
3634 
3635    The call to expand_expr that generates code that actually computes
3636    the value is the first call *at compile time*.  Subsequent calls
3637    *at compile time* generate code to use the saved value.
3638    This produces correct result provided that *at run time* control
3639    always flows through the insns made by the first expand_expr
3640    before reaching the other places where the save_expr was evaluated.
3641    You, the caller of save_expr, must make sure this is so.
3642 
3643    Constants, and certain read-only nodes, are returned with no
3644    SAVE_EXPR because that is safe.  Expressions containing placeholders
3645    are not touched; see tree.def for an explanation of what these
3646    are used for.  */
3647 
3648 tree
3649 save_expr (tree expr)
3650 {
3651   tree inner;
3652 
3653   /* If the tree evaluates to a constant, then we don't want to hide that
3654      fact (i.e. this allows further folding, and direct checks for constants).
3655      However, a read-only object that has side effects cannot be bypassed.
3656      Since it is no problem to reevaluate literals, we just return the
3657      literal node.  */
3658   inner = skip_simple_arithmetic (expr);
3659   if (TREE_CODE (inner) == ERROR_MARK)
3660     return inner;
3661 
3662   if (tree_invariant_p_1 (inner))
3663     return expr;
3664 
3665   /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3666      it means that the size or offset of some field of an object depends on
3667      the value within another field.
3668 
3669      Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3670      and some variable since it would then need to be both evaluated once and
3671      evaluated more than once.  Front-ends must assure this case cannot
3672      happen by surrounding any such subexpressions in their own SAVE_EXPR
3673      and forcing evaluation at the proper time.  */
3674   if (contains_placeholder_p (inner))
3675     return expr;
3676 
3677   expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3678 
3679   /* This expression might be placed ahead of a jump to ensure that the
3680      value was computed on both sides of the jump.  So make sure it isn't
3681      eliminated as dead.  */
3682   TREE_SIDE_EFFECTS (expr) = 1;
3683   return expr;
3684 }
3685 
3686 /* Look inside EXPR into any simple arithmetic operations.  Return the
3687    outermost non-arithmetic or non-invariant node.  */
3688 
3689 tree
3690 skip_simple_arithmetic (tree expr)
3691 {
3692   /* We don't care about whether this can be used as an lvalue in this
3693      context.  */
3694   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3695     expr = TREE_OPERAND (expr, 0);
3696 
3697   /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3698      a constant, it will be more efficient to not make another SAVE_EXPR since
3699      it will allow better simplification and GCSE will be able to merge the
3700      computations if they actually occur.  */
3701   while (true)
3702     {
3703       if (UNARY_CLASS_P (expr))
3704 	expr = TREE_OPERAND (expr, 0);
3705       else if (BINARY_CLASS_P (expr))
3706 	{
3707 	  if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3708 	    expr = TREE_OPERAND (expr, 0);
3709 	  else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3710 	    expr = TREE_OPERAND (expr, 1);
3711 	  else
3712 	    break;
3713 	}
3714       else
3715 	break;
3716     }
3717 
3718   return expr;
3719 }
3720 
3721 /* Look inside EXPR into simple arithmetic operations involving constants.
3722    Return the outermost non-arithmetic or non-constant node.  */
3723 
3724 tree
3725 skip_simple_constant_arithmetic (tree expr)
3726 {
3727   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3728     expr = TREE_OPERAND (expr, 0);
3729 
3730   while (true)
3731     {
3732       if (UNARY_CLASS_P (expr))
3733 	expr = TREE_OPERAND (expr, 0);
3734       else if (BINARY_CLASS_P (expr))
3735 	{
3736 	  if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3737 	    expr = TREE_OPERAND (expr, 0);
3738 	  else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3739 	    expr = TREE_OPERAND (expr, 1);
3740 	  else
3741 	    break;
3742 	}
3743       else
3744 	break;
3745     }
3746 
3747   return expr;
3748 }
3749 
3750 /* Return which tree structure is used by T.  */
3751 
3752 enum tree_node_structure_enum
3753 tree_node_structure (const_tree t)
3754 {
3755   const enum tree_code code = TREE_CODE (t);
3756   return tree_node_structure_for_code (code);
3757 }
3758 
3759 /* Set various status flags when building a CALL_EXPR object T.  */
3760 
3761 static void
3762 process_call_operands (tree t)
3763 {
3764   bool side_effects = TREE_SIDE_EFFECTS (t);
3765   bool read_only = false;
3766   int i = call_expr_flags (t);
3767 
3768   /* Calls have side-effects, except those to const or pure functions.  */
3769   if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3770     side_effects = true;
3771   /* Propagate TREE_READONLY of arguments for const functions.  */
3772   if (i & ECF_CONST)
3773     read_only = true;
3774 
3775   if (!side_effects || read_only)
3776     for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3777       {
3778 	tree op = TREE_OPERAND (t, i);
3779 	if (op && TREE_SIDE_EFFECTS (op))
3780 	  side_effects = true;
3781 	if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3782 	  read_only = false;
3783       }
3784 
3785   TREE_SIDE_EFFECTS (t) = side_effects;
3786   TREE_READONLY (t) = read_only;
3787 }
3788 
3789 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3790    size or offset that depends on a field within a record.  */
3791 
3792 bool
3793 contains_placeholder_p (const_tree exp)
3794 {
3795   enum tree_code code;
3796 
3797   if (!exp)
3798     return 0;
3799 
3800   code = TREE_CODE (exp);
3801   if (code == PLACEHOLDER_EXPR)
3802     return 1;
3803 
3804   switch (TREE_CODE_CLASS (code))
3805     {
3806     case tcc_reference:
3807       /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3808 	 position computations since they will be converted into a
3809 	 WITH_RECORD_EXPR involving the reference, which will assume
3810 	 here will be valid.  */
3811       return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3812 
3813     case tcc_exceptional:
3814       if (code == TREE_LIST)
3815 	return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3816 		|| CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3817       break;
3818 
3819     case tcc_unary:
3820     case tcc_binary:
3821     case tcc_comparison:
3822     case tcc_expression:
3823       switch (code)
3824 	{
3825 	case COMPOUND_EXPR:
3826 	  /* Ignoring the first operand isn't quite right, but works best.  */
3827 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3828 
3829 	case COND_EXPR:
3830 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3831 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3832 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3833 
3834 	case SAVE_EXPR:
3835 	  /* The save_expr function never wraps anything containing
3836 	     a PLACEHOLDER_EXPR. */
3837 	  return 0;
3838 
3839 	default:
3840 	  break;
3841 	}
3842 
3843       switch (TREE_CODE_LENGTH (code))
3844 	{
3845 	case 1:
3846 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3847 	case 2:
3848 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3849 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3850 	default:
3851 	  return 0;
3852 	}
3853 
3854     case tcc_vl_exp:
3855       switch (code)
3856 	{
3857 	case CALL_EXPR:
3858 	  {
3859 	    const_tree arg;
3860 	    const_call_expr_arg_iterator iter;
3861 	    FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3862 	      if (CONTAINS_PLACEHOLDER_P (arg))
3863 		return 1;
3864 	    return 0;
3865 	  }
3866 	default:
3867 	  return 0;
3868 	}
3869 
3870     default:
3871       return 0;
3872     }
3873   return 0;
3874 }
3875 
3876 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3877    directly.  This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3878    field positions.  */
3879 
3880 static bool
3881 type_contains_placeholder_1 (const_tree type)
3882 {
3883   /* If the size contains a placeholder or the parent type (component type in
3884      the case of arrays) type involves a placeholder, this type does.  */
3885   if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3886       || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3887       || (!POINTER_TYPE_P (type)
3888 	  && TREE_TYPE (type)
3889 	  && type_contains_placeholder_p (TREE_TYPE (type))))
3890     return true;
3891 
3892   /* Now do type-specific checks.  Note that the last part of the check above
3893      greatly limits what we have to do below.  */
3894   switch (TREE_CODE (type))
3895     {
3896     case VOID_TYPE:
3897     case COMPLEX_TYPE:
3898     case ENUMERAL_TYPE:
3899     case BOOLEAN_TYPE:
3900     case POINTER_TYPE:
3901     case OFFSET_TYPE:
3902     case REFERENCE_TYPE:
3903     case METHOD_TYPE:
3904     case FUNCTION_TYPE:
3905     case VECTOR_TYPE:
3906     case NULLPTR_TYPE:
3907       return false;
3908 
3909     case INTEGER_TYPE:
3910     case REAL_TYPE:
3911     case FIXED_POINT_TYPE:
3912       /* Here we just check the bounds.  */
3913       return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3914 	      || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3915 
3916     case ARRAY_TYPE:
3917       /* We have already checked the component type above, so just check
3918 	 the domain type.  Flexible array members have a null domain.  */
3919       return TYPE_DOMAIN (type) ?
3920 	type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3921 
3922     case RECORD_TYPE:
3923     case UNION_TYPE:
3924     case QUAL_UNION_TYPE:
3925       {
3926 	tree field;
3927 
3928 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3929 	  if (TREE_CODE (field) == FIELD_DECL
3930 	      && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3931 		  || (TREE_CODE (type) == QUAL_UNION_TYPE
3932 		      && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3933 		  || type_contains_placeholder_p (TREE_TYPE (field))))
3934 	    return true;
3935 
3936 	return false;
3937       }
3938 
3939     default:
3940       gcc_unreachable ();
3941     }
3942 }
3943 
3944 /* Wrapper around above function used to cache its result.  */
3945 
3946 bool
3947 type_contains_placeholder_p (tree type)
3948 {
3949   bool result;
3950 
3951   /* If the contains_placeholder_bits field has been initialized,
3952      then we know the answer.  */
3953   if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3954     return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3955 
3956   /* Indicate that we've seen this type node, and the answer is false.
3957      This is what we want to return if we run into recursion via fields.  */
3958   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3959 
3960   /* Compute the real value.  */
3961   result = type_contains_placeholder_1 (type);
3962 
3963   /* Store the real value.  */
3964   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3965 
3966   return result;
3967 }
3968 
3969 /* Push tree EXP onto vector QUEUE if it is not already present.  */
3970 
3971 static void
3972 push_without_duplicates (tree exp, vec<tree> *queue)
3973 {
3974   unsigned int i;
3975   tree iter;
3976 
3977   FOR_EACH_VEC_ELT (*queue, i, iter)
3978     if (simple_cst_equal (iter, exp) == 1)
3979       break;
3980 
3981   if (!iter)
3982     queue->safe_push (exp);
3983 }
3984 
3985 /* Given a tree EXP, find all occurrences of references to fields
3986    in a PLACEHOLDER_EXPR and place them in vector REFS without
3987    duplicates.  Also record VAR_DECLs and CONST_DECLs.  Note that
3988    we assume here that EXP contains only arithmetic expressions
3989    or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3990    argument list.  */
3991 
3992 void
3993 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3994 {
3995   enum tree_code code = TREE_CODE (exp);
3996   tree inner;
3997   int i;
3998 
3999   /* We handle TREE_LIST and COMPONENT_REF separately.  */
4000   if (code == TREE_LIST)
4001     {
4002       FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4003       FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4004     }
4005   else if (code == COMPONENT_REF)
4006     {
4007       for (inner = TREE_OPERAND (exp, 0);
4008 	   REFERENCE_CLASS_P (inner);
4009 	   inner = TREE_OPERAND (inner, 0))
4010 	;
4011 
4012       if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4013 	push_without_duplicates (exp, refs);
4014       else
4015 	FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4016    }
4017   else
4018     switch (TREE_CODE_CLASS (code))
4019       {
4020       case tcc_constant:
4021 	break;
4022 
4023       case tcc_declaration:
4024 	/* Variables allocated to static storage can stay.  */
4025         if (!TREE_STATIC (exp))
4026 	  push_without_duplicates (exp, refs);
4027 	break;
4028 
4029       case tcc_expression:
4030 	/* This is the pattern built in ada/make_aligning_type.  */
4031 	if (code == ADDR_EXPR
4032 	    && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4033 	  {
4034 	    push_without_duplicates (exp, refs);
4035 	    break;
4036 	  }
4037 
4038         /* Fall through.  */
4039 
4040       case tcc_exceptional:
4041       case tcc_unary:
4042       case tcc_binary:
4043       case tcc_comparison:
4044       case tcc_reference:
4045 	for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4046 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4047 	break;
4048 
4049       case tcc_vl_exp:
4050 	for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4051 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4052 	break;
4053 
4054       default:
4055 	gcc_unreachable ();
4056       }
4057 }
4058 
4059 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4060    return a tree with all occurrences of references to F in a
4061    PLACEHOLDER_EXPR replaced by R.  Also handle VAR_DECLs and
4062    CONST_DECLs.  Note that we assume here that EXP contains only
4063    arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4064    occurring only in their argument list.  */
4065 
4066 tree
4067 substitute_in_expr (tree exp, tree f, tree r)
4068 {
4069   enum tree_code code = TREE_CODE (exp);
4070   tree op0, op1, op2, op3;
4071   tree new_tree;
4072 
4073   /* We handle TREE_LIST and COMPONENT_REF separately.  */
4074   if (code == TREE_LIST)
4075     {
4076       op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4077       op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4078       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4079 	return exp;
4080 
4081       return tree_cons (TREE_PURPOSE (exp), op1, op0);
4082     }
4083   else if (code == COMPONENT_REF)
4084     {
4085       tree inner;
4086 
4087       /* If this expression is getting a value from a PLACEHOLDER_EXPR
4088 	 and it is the right field, replace it with R.  */
4089       for (inner = TREE_OPERAND (exp, 0);
4090 	   REFERENCE_CLASS_P (inner);
4091 	   inner = TREE_OPERAND (inner, 0))
4092 	;
4093 
4094       /* The field.  */
4095       op1 = TREE_OPERAND (exp, 1);
4096 
4097       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4098 	return r;
4099 
4100       /* If this expression hasn't been completed let, leave it alone.  */
4101       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4102 	return exp;
4103 
4104       op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4105       if (op0 == TREE_OPERAND (exp, 0))
4106 	return exp;
4107 
4108       new_tree
4109 	= fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4110    }
4111   else
4112     switch (TREE_CODE_CLASS (code))
4113       {
4114       case tcc_constant:
4115 	return exp;
4116 
4117       case tcc_declaration:
4118 	if (exp == f)
4119 	  return r;
4120 	else
4121 	  return exp;
4122 
4123       case tcc_expression:
4124 	if (exp == f)
4125 	  return r;
4126 
4127         /* Fall through.  */
4128 
4129       case tcc_exceptional:
4130       case tcc_unary:
4131       case tcc_binary:
4132       case tcc_comparison:
4133       case tcc_reference:
4134 	switch (TREE_CODE_LENGTH (code))
4135 	  {
4136 	  case 0:
4137 	    return exp;
4138 
4139 	  case 1:
4140 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4141 	    if (op0 == TREE_OPERAND (exp, 0))
4142 	      return exp;
4143 
4144 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4145 	    break;
4146 
4147 	  case 2:
4148 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4149 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4150 
4151 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4152 	      return exp;
4153 
4154 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4155 	    break;
4156 
4157 	  case 3:
4158 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4159 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4160 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4161 
4162 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4163 		&& op2 == TREE_OPERAND (exp, 2))
4164 	      return exp;
4165 
4166 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4167 	    break;
4168 
4169 	  case 4:
4170 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4171 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4172 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4173 	    op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4174 
4175 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4176 		&& op2 == TREE_OPERAND (exp, 2)
4177 		&& op3 == TREE_OPERAND (exp, 3))
4178 	      return exp;
4179 
4180 	    new_tree
4181 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4182 	    break;
4183 
4184 	  default:
4185 	    gcc_unreachable ();
4186 	  }
4187 	break;
4188 
4189       case tcc_vl_exp:
4190 	{
4191 	  int i;
4192 
4193 	  new_tree = NULL_TREE;
4194 
4195 	  /* If we are trying to replace F with a constant or with another
4196 	     instance of one of the arguments of the call, inline back
4197 	     functions which do nothing else than computing a value from
4198 	     the arguments they are passed.  This makes it possible to
4199 	     fold partially or entirely the replacement expression.  */
4200 	  if (code == CALL_EXPR)
4201 	    {
4202 	      bool maybe_inline = false;
4203 	      if (CONSTANT_CLASS_P (r))
4204 		maybe_inline = true;
4205 	      else
4206 		for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4207 		  if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4208 		    {
4209 		      maybe_inline = true;
4210 		      break;
4211 		    }
4212 	      if (maybe_inline)
4213 		{
4214 		  tree t = maybe_inline_call_in_expr (exp);
4215 		  if (t)
4216 		    return SUBSTITUTE_IN_EXPR (t, f, r);
4217 		}
4218 	    }
4219 
4220 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4221 	    {
4222 	      tree op = TREE_OPERAND (exp, i);
4223 	      tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4224 	      if (new_op != op)
4225 		{
4226 		  if (!new_tree)
4227 		    new_tree = copy_node (exp);
4228 		  TREE_OPERAND (new_tree, i) = new_op;
4229 		}
4230 	    }
4231 
4232 	  if (new_tree)
4233 	    {
4234 	      new_tree = fold (new_tree);
4235 	      if (TREE_CODE (new_tree) == CALL_EXPR)
4236 		process_call_operands (new_tree);
4237 	    }
4238 	  else
4239 	    return exp;
4240 	}
4241 	break;
4242 
4243       default:
4244 	gcc_unreachable ();
4245       }
4246 
4247   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4248 
4249   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4250     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4251 
4252   return new_tree;
4253 }
4254 
4255 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4256    for it within OBJ, a tree that is an object or a chain of references.  */
4257 
4258 tree
4259 substitute_placeholder_in_expr (tree exp, tree obj)
4260 {
4261   enum tree_code code = TREE_CODE (exp);
4262   tree op0, op1, op2, op3;
4263   tree new_tree;
4264 
4265   /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4266      in the chain of OBJ.  */
4267   if (code == PLACEHOLDER_EXPR)
4268     {
4269       tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4270       tree elt;
4271 
4272       for (elt = obj; elt != 0;
4273 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4274 		   || TREE_CODE (elt) == COND_EXPR)
4275 		  ? TREE_OPERAND (elt, 1)
4276 		  : (REFERENCE_CLASS_P (elt)
4277 		     || UNARY_CLASS_P (elt)
4278 		     || BINARY_CLASS_P (elt)
4279 		     || VL_EXP_CLASS_P (elt)
4280 		     || EXPRESSION_CLASS_P (elt))
4281 		  ? TREE_OPERAND (elt, 0) : 0))
4282 	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4283 	  return elt;
4284 
4285       for (elt = obj; elt != 0;
4286 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4287 		   || TREE_CODE (elt) == COND_EXPR)
4288 		  ? TREE_OPERAND (elt, 1)
4289 		  : (REFERENCE_CLASS_P (elt)
4290 		     || UNARY_CLASS_P (elt)
4291 		     || BINARY_CLASS_P (elt)
4292 		     || VL_EXP_CLASS_P (elt)
4293 		     || EXPRESSION_CLASS_P (elt))
4294 		  ? TREE_OPERAND (elt, 0) : 0))
4295 	if (POINTER_TYPE_P (TREE_TYPE (elt))
4296 	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4297 		== need_type))
4298 	  return fold_build1 (INDIRECT_REF, need_type, elt);
4299 
4300       /* If we didn't find it, return the original PLACEHOLDER_EXPR.  If it
4301 	 survives until RTL generation, there will be an error.  */
4302       return exp;
4303     }
4304 
4305   /* TREE_LIST is special because we need to look at TREE_VALUE
4306      and TREE_CHAIN, not TREE_OPERANDS.  */
4307   else if (code == TREE_LIST)
4308     {
4309       op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4310       op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4311       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4312 	return exp;
4313 
4314       return tree_cons (TREE_PURPOSE (exp), op1, op0);
4315     }
4316   else
4317     switch (TREE_CODE_CLASS (code))
4318       {
4319       case tcc_constant:
4320       case tcc_declaration:
4321 	return exp;
4322 
4323       case tcc_exceptional:
4324       case tcc_unary:
4325       case tcc_binary:
4326       case tcc_comparison:
4327       case tcc_expression:
4328       case tcc_reference:
4329       case tcc_statement:
4330 	switch (TREE_CODE_LENGTH (code))
4331 	  {
4332 	  case 0:
4333 	    return exp;
4334 
4335 	  case 1:
4336 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4337 	    if (op0 == TREE_OPERAND (exp, 0))
4338 	      return exp;
4339 
4340 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4341 	    break;
4342 
4343 	  case 2:
4344 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4345 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4346 
4347 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4348 	      return exp;
4349 
4350 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4351 	    break;
4352 
4353 	  case 3:
4354 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4355 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4356 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4357 
4358 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4359 		&& op2 == TREE_OPERAND (exp, 2))
4360 	      return exp;
4361 
4362 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4363 	    break;
4364 
4365 	  case 4:
4366 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4367 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4368 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4369 	    op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4370 
4371 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4372 		&& op2 == TREE_OPERAND (exp, 2)
4373 		&& op3 == TREE_OPERAND (exp, 3))
4374 	      return exp;
4375 
4376 	    new_tree
4377 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4378 	    break;
4379 
4380 	  default:
4381 	    gcc_unreachable ();
4382 	  }
4383 	break;
4384 
4385       case tcc_vl_exp:
4386 	{
4387 	  int i;
4388 
4389 	  new_tree = NULL_TREE;
4390 
4391 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4392 	    {
4393 	      tree op = TREE_OPERAND (exp, i);
4394 	      tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4395 	      if (new_op != op)
4396 		{
4397 		  if (!new_tree)
4398 		    new_tree = copy_node (exp);
4399 		  TREE_OPERAND (new_tree, i) = new_op;
4400 		}
4401 	    }
4402 
4403 	  if (new_tree)
4404 	    {
4405 	      new_tree = fold (new_tree);
4406 	      if (TREE_CODE (new_tree) == CALL_EXPR)
4407 		process_call_operands (new_tree);
4408 	    }
4409 	  else
4410 	    return exp;
4411 	}
4412 	break;
4413 
4414       default:
4415 	gcc_unreachable ();
4416       }
4417 
4418   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4419 
4420   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4421     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4422 
4423   return new_tree;
4424 }
4425 
4426 
4427 /* Subroutine of stabilize_reference; this is called for subtrees of
4428    references.  Any expression with side-effects must be put in a SAVE_EXPR
4429    to ensure that it is only evaluated once.
4430 
4431    We don't put SAVE_EXPR nodes around everything, because assigning very
4432    simple expressions to temporaries causes us to miss good opportunities
4433    for optimizations.  Among other things, the opportunity to fold in the
4434    addition of a constant into an addressing mode often gets lost, e.g.
4435    "y[i+1] += x;".  In general, we take the approach that we should not make
4436    an assignment unless we are forced into it - i.e., that any non-side effect
4437    operator should be allowed, and that cse should take care of coalescing
4438    multiple utterances of the same expression should that prove fruitful.  */
4439 
4440 static tree
4441 stabilize_reference_1 (tree e)
4442 {
4443   tree result;
4444   enum tree_code code = TREE_CODE (e);
4445 
4446   /* We cannot ignore const expressions because it might be a reference
4447      to a const array but whose index contains side-effects.  But we can
4448      ignore things that are actual constant or that already have been
4449      handled by this function.  */
4450 
4451   if (tree_invariant_p (e))
4452     return e;
4453 
4454   switch (TREE_CODE_CLASS (code))
4455     {
4456     case tcc_exceptional:
4457       /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4458 	 have side-effects.  */
4459       if (code == STATEMENT_LIST)
4460 	return save_expr (e);
4461       /* FALLTHRU */
4462     case tcc_type:
4463     case tcc_declaration:
4464     case tcc_comparison:
4465     case tcc_statement:
4466     case tcc_expression:
4467     case tcc_reference:
4468     case tcc_vl_exp:
4469       /* If the expression has side-effects, then encase it in a SAVE_EXPR
4470 	 so that it will only be evaluated once.  */
4471       /* The reference (r) and comparison (<) classes could be handled as
4472 	 below, but it is generally faster to only evaluate them once.  */
4473       if (TREE_SIDE_EFFECTS (e))
4474 	return save_expr (e);
4475       return e;
4476 
4477     case tcc_constant:
4478       /* Constants need no processing.  In fact, we should never reach
4479 	 here.  */
4480       return e;
4481 
4482     case tcc_binary:
4483       /* Division is slow and tends to be compiled with jumps,
4484 	 especially the division by powers of 2 that is often
4485 	 found inside of an array reference.  So do it just once.  */
4486       if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4487 	  || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4488 	  || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4489 	  || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4490 	return save_expr (e);
4491       /* Recursively stabilize each operand.  */
4492       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4493 			 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4494       break;
4495 
4496     case tcc_unary:
4497       /* Recursively stabilize each operand.  */
4498       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4499       break;
4500 
4501     default:
4502       gcc_unreachable ();
4503     }
4504 
4505   TREE_TYPE (result) = TREE_TYPE (e);
4506   TREE_READONLY (result) = TREE_READONLY (e);
4507   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4508   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4509 
4510   return result;
4511 }
4512 
4513 /* Stabilize a reference so that we can use it any number of times
4514    without causing its operands to be evaluated more than once.
4515    Returns the stabilized reference.  This works by means of save_expr,
4516    so see the caveats in the comments about save_expr.
4517 
4518    Also allows conversion expressions whose operands are references.
4519    Any other kind of expression is returned unchanged.  */
4520 
4521 tree
4522 stabilize_reference (tree ref)
4523 {
4524   tree result;
4525   enum tree_code code = TREE_CODE (ref);
4526 
4527   switch (code)
4528     {
4529     case VAR_DECL:
4530     case PARM_DECL:
4531     case RESULT_DECL:
4532       /* No action is needed in this case.  */
4533       return ref;
4534 
4535     CASE_CONVERT:
4536     case FLOAT_EXPR:
4537     case FIX_TRUNC_EXPR:
4538       result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4539       break;
4540 
4541     case INDIRECT_REF:
4542       result = build_nt (INDIRECT_REF,
4543 			 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4544       break;
4545 
4546     case COMPONENT_REF:
4547       result = build_nt (COMPONENT_REF,
4548 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4549 			 TREE_OPERAND (ref, 1), NULL_TREE);
4550       break;
4551 
4552     case BIT_FIELD_REF:
4553       result = build_nt (BIT_FIELD_REF,
4554 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4555 			 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4556       REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4557       break;
4558 
4559     case ARRAY_REF:
4560       result = build_nt (ARRAY_REF,
4561 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4562 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4563 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4564       break;
4565 
4566     case ARRAY_RANGE_REF:
4567       result = build_nt (ARRAY_RANGE_REF,
4568 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4569 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4570 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4571       break;
4572 
4573     case COMPOUND_EXPR:
4574       /* We cannot wrap the first expression in a SAVE_EXPR, as then
4575 	 it wouldn't be ignored.  This matters when dealing with
4576 	 volatiles.  */
4577       return stabilize_reference_1 (ref);
4578 
4579       /* If arg isn't a kind of lvalue we recognize, make no change.
4580 	 Caller should recognize the error for an invalid lvalue.  */
4581     default:
4582       return ref;
4583 
4584     case ERROR_MARK:
4585       return error_mark_node;
4586     }
4587 
4588   TREE_TYPE (result) = TREE_TYPE (ref);
4589   TREE_READONLY (result) = TREE_READONLY (ref);
4590   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4591   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4592 
4593   return result;
4594 }
4595 
4596 /* Low-level constructors for expressions.  */
4597 
4598 /* A helper function for build1 and constant folders.  Set TREE_CONSTANT,
4599    and TREE_SIDE_EFFECTS for an ADDR_EXPR.  */
4600 
4601 void
4602 recompute_tree_invariant_for_addr_expr (tree t)
4603 {
4604   tree node;
4605   bool tc = true, se = false;
4606 
4607   gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4608 
4609   /* We started out assuming this address is both invariant and constant, but
4610      does not have side effects.  Now go down any handled components and see if
4611      any of them involve offsets that are either non-constant or non-invariant.
4612      Also check for side-effects.
4613 
4614      ??? Note that this code makes no attempt to deal with the case where
4615      taking the address of something causes a copy due to misalignment.  */
4616 
4617 #define UPDATE_FLAGS(NODE)  \
4618 do { tree _node = (NODE); \
4619      if (_node && !TREE_CONSTANT (_node)) tc = false; \
4620      if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4621 
4622   for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4623        node = TREE_OPERAND (node, 0))
4624     {
4625       /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4626 	 array reference (probably made temporarily by the G++ front end),
4627 	 so ignore all the operands.  */
4628       if ((TREE_CODE (node) == ARRAY_REF
4629 	   || TREE_CODE (node) == ARRAY_RANGE_REF)
4630 	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4631 	{
4632 	  UPDATE_FLAGS (TREE_OPERAND (node, 1));
4633 	  if (TREE_OPERAND (node, 2))
4634 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
4635 	  if (TREE_OPERAND (node, 3))
4636 	    UPDATE_FLAGS (TREE_OPERAND (node, 3));
4637 	}
4638       /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4639 	 FIELD_DECL, apparently.  The G++ front end can put something else
4640 	 there, at least temporarily.  */
4641       else if (TREE_CODE (node) == COMPONENT_REF
4642 	       && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4643 	{
4644 	  if (TREE_OPERAND (node, 2))
4645 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
4646 	}
4647     }
4648 
4649   node = lang_hooks.expr_to_decl (node, &tc, &se);
4650 
4651   /* Now see what's inside.  If it's an INDIRECT_REF, copy our properties from
4652      the address, since &(*a)->b is a form of addition.  If it's a constant, the
4653      address is constant too.  If it's a decl, its address is constant if the
4654      decl is static.  Everything else is not constant and, furthermore,
4655      taking the address of a volatile variable is not volatile.  */
4656   if (TREE_CODE (node) == INDIRECT_REF
4657       || TREE_CODE (node) == MEM_REF)
4658     UPDATE_FLAGS (TREE_OPERAND (node, 0));
4659   else if (CONSTANT_CLASS_P (node))
4660     ;
4661   else if (DECL_P (node))
4662     tc &= (staticp (node) != NULL_TREE);
4663   else
4664     {
4665       tc = false;
4666       se |= TREE_SIDE_EFFECTS (node);
4667     }
4668 
4669 
4670   TREE_CONSTANT (t) = tc;
4671   TREE_SIDE_EFFECTS (t) = se;
4672 #undef UPDATE_FLAGS
4673 }
4674 
4675 /* Build an expression of code CODE, data type TYPE, and operands as
4676    specified.  Expressions and reference nodes can be created this way.
4677    Constants, decls, types and misc nodes cannot be.
4678 
4679    We define 5 non-variadic functions, from 0 to 4 arguments.  This is
4680    enough for all extant tree codes.  */
4681 
4682 tree
4683 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4684 {
4685   tree t;
4686 
4687   gcc_assert (TREE_CODE_LENGTH (code) == 0);
4688 
4689   t = make_node (code PASS_MEM_STAT);
4690   TREE_TYPE (t) = tt;
4691 
4692   return t;
4693 }
4694 
4695 tree
4696 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4697 {
4698   int length = sizeof (struct tree_exp);
4699   tree t;
4700 
4701   record_node_allocation_statistics (code, length);
4702 
4703   gcc_assert (TREE_CODE_LENGTH (code) == 1);
4704 
4705   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4706 
4707   memset (t, 0, sizeof (struct tree_common));
4708 
4709   TREE_SET_CODE (t, code);
4710 
4711   TREE_TYPE (t) = type;
4712   SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4713   TREE_OPERAND (t, 0) = node;
4714   if (node && !TYPE_P (node))
4715     {
4716       TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4717       TREE_READONLY (t) = TREE_READONLY (node);
4718     }
4719 
4720   if (TREE_CODE_CLASS (code) == tcc_statement)
4721     {
4722       if (code != DEBUG_BEGIN_STMT)
4723 	TREE_SIDE_EFFECTS (t) = 1;
4724     }
4725   else switch (code)
4726     {
4727     case VA_ARG_EXPR:
4728       /* All of these have side-effects, no matter what their
4729 	 operands are.  */
4730       TREE_SIDE_EFFECTS (t) = 1;
4731       TREE_READONLY (t) = 0;
4732       break;
4733 
4734     case INDIRECT_REF:
4735       /* Whether a dereference is readonly has nothing to do with whether
4736 	 its operand is readonly.  */
4737       TREE_READONLY (t) = 0;
4738       break;
4739 
4740     case ADDR_EXPR:
4741       if (node)
4742 	recompute_tree_invariant_for_addr_expr (t);
4743       break;
4744 
4745     default:
4746       if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4747 	  && node && !TYPE_P (node)
4748 	  && TREE_CONSTANT (node))
4749 	TREE_CONSTANT (t) = 1;
4750       if (TREE_CODE_CLASS (code) == tcc_reference
4751 	  && node && TREE_THIS_VOLATILE (node))
4752 	TREE_THIS_VOLATILE (t) = 1;
4753       break;
4754     }
4755 
4756   return t;
4757 }
4758 
4759 #define PROCESS_ARG(N)				\
4760   do {						\
4761     TREE_OPERAND (t, N) = arg##N;		\
4762     if (arg##N &&!TYPE_P (arg##N))		\
4763       {						\
4764         if (TREE_SIDE_EFFECTS (arg##N))		\
4765 	  side_effects = 1;			\
4766         if (!TREE_READONLY (arg##N)		\
4767 	    && !CONSTANT_CLASS_P (arg##N))	\
4768 	  (void) (read_only = 0);		\
4769         if (!TREE_CONSTANT (arg##N))		\
4770 	  (void) (constant = 0);		\
4771       }						\
4772   } while (0)
4773 
4774 tree
4775 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4776 {
4777   bool constant, read_only, side_effects, div_by_zero;
4778   tree t;
4779 
4780   gcc_assert (TREE_CODE_LENGTH (code) == 2);
4781 
4782   if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4783       && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4784       /* When sizetype precision doesn't match that of pointers
4785          we need to be able to build explicit extensions or truncations
4786 	 of the offset argument.  */
4787       && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4788     gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4789 		&& TREE_CODE (arg1) == INTEGER_CST);
4790 
4791   if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4792     gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4793 		&& ptrofftype_p (TREE_TYPE (arg1)));
4794 
4795   t = make_node (code PASS_MEM_STAT);
4796   TREE_TYPE (t) = tt;
4797 
4798   /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4799      result based on those same flags for the arguments.  But if the
4800      arguments aren't really even `tree' expressions, we shouldn't be trying
4801      to do this.  */
4802 
4803   /* Expressions without side effects may be constant if their
4804      arguments are as well.  */
4805   constant = (TREE_CODE_CLASS (code) == tcc_comparison
4806 	      || TREE_CODE_CLASS (code) == tcc_binary);
4807   read_only = 1;
4808   side_effects = TREE_SIDE_EFFECTS (t);
4809 
4810   switch (code)
4811     {
4812     case TRUNC_DIV_EXPR:
4813     case CEIL_DIV_EXPR:
4814     case FLOOR_DIV_EXPR:
4815     case ROUND_DIV_EXPR:
4816     case EXACT_DIV_EXPR:
4817     case CEIL_MOD_EXPR:
4818     case FLOOR_MOD_EXPR:
4819     case ROUND_MOD_EXPR:
4820     case TRUNC_MOD_EXPR:
4821       div_by_zero = integer_zerop (arg1);
4822       break;
4823     default:
4824       div_by_zero = false;
4825     }
4826 
4827   PROCESS_ARG (0);
4828   PROCESS_ARG (1);
4829 
4830   TREE_SIDE_EFFECTS (t) = side_effects;
4831   if (code == MEM_REF)
4832     {
4833       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4834 	{
4835 	  tree o = TREE_OPERAND (arg0, 0);
4836 	  TREE_READONLY (t) = TREE_READONLY (o);
4837 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4838 	}
4839     }
4840   else
4841     {
4842       TREE_READONLY (t) = read_only;
4843       /* Don't mark X / 0 as constant.  */
4844       TREE_CONSTANT (t) = constant && !div_by_zero;
4845       TREE_THIS_VOLATILE (t)
4846 	= (TREE_CODE_CLASS (code) == tcc_reference
4847 	   && arg0 && TREE_THIS_VOLATILE (arg0));
4848     }
4849 
4850   return t;
4851 }
4852 
4853 
4854 tree
4855 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4856 	tree arg2 MEM_STAT_DECL)
4857 {
4858   bool constant, read_only, side_effects;
4859   tree t;
4860 
4861   gcc_assert (TREE_CODE_LENGTH (code) == 3);
4862   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4863 
4864   t = make_node (code PASS_MEM_STAT);
4865   TREE_TYPE (t) = tt;
4866 
4867   read_only = 1;
4868 
4869   /* As a special exception, if COND_EXPR has NULL branches, we
4870      assume that it is a gimple statement and always consider
4871      it to have side effects.  */
4872   if (code == COND_EXPR
4873       && tt == void_type_node
4874       && arg1 == NULL_TREE
4875       && arg2 == NULL_TREE)
4876     side_effects = true;
4877   else
4878     side_effects = TREE_SIDE_EFFECTS (t);
4879 
4880   PROCESS_ARG (0);
4881   PROCESS_ARG (1);
4882   PROCESS_ARG (2);
4883 
4884   if (code == COND_EXPR)
4885     TREE_READONLY (t) = read_only;
4886 
4887   TREE_SIDE_EFFECTS (t) = side_effects;
4888   TREE_THIS_VOLATILE (t)
4889     = (TREE_CODE_CLASS (code) == tcc_reference
4890        && arg0 && TREE_THIS_VOLATILE (arg0));
4891 
4892   return t;
4893 }
4894 
4895 tree
4896 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4897 	tree arg2, tree arg3 MEM_STAT_DECL)
4898 {
4899   bool constant, read_only, side_effects;
4900   tree t;
4901 
4902   gcc_assert (TREE_CODE_LENGTH (code) == 4);
4903 
4904   t = make_node (code PASS_MEM_STAT);
4905   TREE_TYPE (t) = tt;
4906 
4907   side_effects = TREE_SIDE_EFFECTS (t);
4908 
4909   PROCESS_ARG (0);
4910   PROCESS_ARG (1);
4911   PROCESS_ARG (2);
4912   PROCESS_ARG (3);
4913 
4914   TREE_SIDE_EFFECTS (t) = side_effects;
4915   TREE_THIS_VOLATILE (t)
4916     = (TREE_CODE_CLASS (code) == tcc_reference
4917        && arg0 && TREE_THIS_VOLATILE (arg0));
4918 
4919   return t;
4920 }
4921 
4922 tree
4923 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4924 	tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4925 {
4926   bool constant, read_only, side_effects;
4927   tree t;
4928 
4929   gcc_assert (TREE_CODE_LENGTH (code) == 5);
4930 
4931   t = make_node (code PASS_MEM_STAT);
4932   TREE_TYPE (t) = tt;
4933 
4934   side_effects = TREE_SIDE_EFFECTS (t);
4935 
4936   PROCESS_ARG (0);
4937   PROCESS_ARG (1);
4938   PROCESS_ARG (2);
4939   PROCESS_ARG (3);
4940   PROCESS_ARG (4);
4941 
4942   TREE_SIDE_EFFECTS (t) = side_effects;
4943   if (code == TARGET_MEM_REF)
4944     {
4945       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4946 	{
4947 	  tree o = TREE_OPERAND (arg0, 0);
4948 	  TREE_READONLY (t) = TREE_READONLY (o);
4949 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4950 	}
4951     }
4952   else
4953     TREE_THIS_VOLATILE (t)
4954       = (TREE_CODE_CLASS (code) == tcc_reference
4955 	 && arg0 && TREE_THIS_VOLATILE (arg0));
4956 
4957   return t;
4958 }
4959 
4960 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4961    on the pointer PTR.  */
4962 
4963 tree
4964 build_simple_mem_ref_loc (location_t loc, tree ptr)
4965 {
4966   poly_int64 offset = 0;
4967   tree ptype = TREE_TYPE (ptr);
4968   tree tem;
4969   /* For convenience allow addresses that collapse to a simple base
4970      and offset.  */
4971   if (TREE_CODE (ptr) == ADDR_EXPR
4972       && (handled_component_p (TREE_OPERAND (ptr, 0))
4973 	  || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4974     {
4975       ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4976       gcc_assert (ptr);
4977       if (TREE_CODE (ptr) == MEM_REF)
4978 	{
4979 	  offset += mem_ref_offset (ptr).force_shwi ();
4980 	  ptr = TREE_OPERAND (ptr, 0);
4981 	}
4982       else
4983 	ptr = build_fold_addr_expr (ptr);
4984       gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4985     }
4986   tem = build2 (MEM_REF, TREE_TYPE (ptype),
4987 		ptr, build_int_cst (ptype, offset));
4988   SET_EXPR_LOCATION (tem, loc);
4989   return tem;
4990 }
4991 
4992 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T.  */
4993 
4994 poly_offset_int
4995 mem_ref_offset (const_tree t)
4996 {
4997   return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4998 				SIGNED);
4999 }
5000 
5001 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5002    offsetted by OFFSET units.  */
5003 
5004 tree
5005 build_invariant_address (tree type, tree base, poly_int64 offset)
5006 {
5007   tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5008 			  build_fold_addr_expr (base),
5009 			  build_int_cst (ptr_type_node, offset));
5010   tree addr = build1 (ADDR_EXPR, type, ref);
5011   recompute_tree_invariant_for_addr_expr (addr);
5012   return addr;
5013 }
5014 
5015 /* Similar except don't specify the TREE_TYPE
5016    and leave the TREE_SIDE_EFFECTS as 0.
5017    It is permissible for arguments to be null,
5018    or even garbage if their values do not matter.  */
5019 
5020 tree
5021 build_nt (enum tree_code code, ...)
5022 {
5023   tree t;
5024   int length;
5025   int i;
5026   va_list p;
5027 
5028   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5029 
5030   va_start (p, code);
5031 
5032   t = make_node (code);
5033   length = TREE_CODE_LENGTH (code);
5034 
5035   for (i = 0; i < length; i++)
5036     TREE_OPERAND (t, i) = va_arg (p, tree);
5037 
5038   va_end (p);
5039   return t;
5040 }
5041 
5042 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5043    tree vec.  */
5044 
5045 tree
5046 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5047 {
5048   tree ret, t;
5049   unsigned int ix;
5050 
5051   ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5052   CALL_EXPR_FN (ret) = fn;
5053   CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5054   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5055     CALL_EXPR_ARG (ret, ix) = t;
5056   return ret;
5057 }
5058 
5059 /* Create a DECL_... node of code CODE, name NAME  (if non-null)
5060    and data type TYPE.
5061    We do NOT enter this node in any sort of symbol table.
5062 
5063    LOC is the location of the decl.
5064 
5065    layout_decl is used to set up the decl's storage layout.
5066    Other slots are initialized to 0 or null pointers.  */
5067 
5068 tree
5069 build_decl (location_t loc, enum tree_code code, tree name,
5070     		 tree type MEM_STAT_DECL)
5071 {
5072   tree t;
5073 
5074   t = make_node (code PASS_MEM_STAT);
5075   DECL_SOURCE_LOCATION (t) = loc;
5076 
5077 /*  if (type == error_mark_node)
5078     type = integer_type_node; */
5079 /* That is not done, deliberately, so that having error_mark_node
5080    as the type can suppress useless errors in the use of this variable.  */
5081 
5082   DECL_NAME (t) = name;
5083   TREE_TYPE (t) = type;
5084 
5085   if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5086     layout_decl (t, 0);
5087 
5088   return t;
5089 }
5090 
5091 /* Builds and returns function declaration with NAME and TYPE.  */
5092 
5093 tree
5094 build_fn_decl (const char *name, tree type)
5095 {
5096   tree id = get_identifier (name);
5097   tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5098 
5099   DECL_EXTERNAL (decl) = 1;
5100   TREE_PUBLIC (decl) = 1;
5101   DECL_ARTIFICIAL (decl) = 1;
5102   TREE_NOTHROW (decl) = 1;
5103 
5104   return decl;
5105 }
5106 
5107 vec<tree, va_gc> *all_translation_units;
5108 
5109 /* Builds a new translation-unit decl with name NAME, queues it in the
5110    global list of translation-unit decls and returns it.   */
5111 
5112 tree
5113 build_translation_unit_decl (tree name)
5114 {
5115   tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5116 			name, NULL_TREE);
5117   TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5118   vec_safe_push (all_translation_units, tu);
5119   return tu;
5120 }
5121 
5122 
5123 /* BLOCK nodes are used to represent the structure of binding contours
5124    and declarations, once those contours have been exited and their contents
5125    compiled.  This information is used for outputting debugging info.  */
5126 
5127 tree
5128 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5129 {
5130   tree block = make_node (BLOCK);
5131 
5132   BLOCK_VARS (block) = vars;
5133   BLOCK_SUBBLOCKS (block) = subblocks;
5134   BLOCK_SUPERCONTEXT (block) = supercontext;
5135   BLOCK_CHAIN (block) = chain;
5136   return block;
5137 }
5138 
5139 
5140 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5141 
5142    LOC is the location to use in tree T.  */
5143 
5144 void
5145 protected_set_expr_location (tree t, location_t loc)
5146 {
5147   if (CAN_HAVE_LOCATION_P (t))
5148     SET_EXPR_LOCATION (t, loc);
5149   else if (t && TREE_CODE (t) == STATEMENT_LIST)
5150     {
5151       t = expr_single (t);
5152       if (t && CAN_HAVE_LOCATION_P (t))
5153 	SET_EXPR_LOCATION (t, loc);
5154     }
5155 }
5156 
5157 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5158    UNKNOWN_LOCATION.  */
5159 
5160 void
5161 protected_set_expr_location_if_unset (tree t, location_t loc)
5162 {
5163   t = expr_single (t);
5164   if (t && !EXPR_HAS_LOCATION (t))
5165     protected_set_expr_location (t, loc);
5166 }
5167 
5168 /* Data used when collecting DECLs and TYPEs for language data removal.  */
5169 
5170 class free_lang_data_d
5171 {
5172 public:
5173   free_lang_data_d () : decls (100), types (100) {}
5174 
5175   /* Worklist to avoid excessive recursion.  */
5176   auto_vec<tree> worklist;
5177 
5178   /* Set of traversed objects.  Used to avoid duplicate visits.  */
5179   hash_set<tree> pset;
5180 
5181   /* Array of symbols to process with free_lang_data_in_decl.  */
5182   auto_vec<tree> decls;
5183 
5184   /* Array of types to process with free_lang_data_in_type.  */
5185   auto_vec<tree> types;
5186 };
5187 
5188 
5189 /* Add type or decl T to one of the list of tree nodes that need their
5190    language data removed.  The lists are held inside FLD.  */
5191 
5192 static void
5193 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5194 {
5195   if (DECL_P (t))
5196     fld->decls.safe_push (t);
5197   else if (TYPE_P (t))
5198     fld->types.safe_push (t);
5199   else
5200     gcc_unreachable ();
5201 }
5202 
5203 /* Push tree node T into FLD->WORKLIST.  */
5204 
5205 static inline void
5206 fld_worklist_push (tree t, class free_lang_data_d *fld)
5207 {
5208   if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5209     fld->worklist.safe_push ((t));
5210 }
5211 
5212 
5213 
5214 /* Return simplified TYPE_NAME of TYPE.  */
5215 
5216 static tree
5217 fld_simplified_type_name (tree type)
5218 {
5219   if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5220     return TYPE_NAME (type);
5221   /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5222      TYPE_DECL if the type doesn't have linkage.
5223      this must match fld_  */
5224   if (type != TYPE_MAIN_VARIANT (type)
5225       || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5226 	  && (TREE_CODE (type) != RECORD_TYPE
5227 	      || !TYPE_BINFO (type)
5228 	      || !BINFO_VTABLE (TYPE_BINFO (type)))))
5229     return DECL_NAME (TYPE_NAME (type));
5230   return TYPE_NAME (type);
5231 }
5232 
5233 /* Do same comparsion as check_qualified_type skipping lang part of type
5234    and be more permissive about type names: we only care that names are
5235    same (for diagnostics) and that ODR names are the same.
5236    If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it.  */
5237 
5238 static bool
5239 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5240 {
5241   if (TYPE_QUALS (t) != TYPE_QUALS (v)
5242       /* We want to match incomplete variants with complete types.
5243 	 In this case we need to ignore alignment.   */
5244       || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5245 	  && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5246 	      || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5247       || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5248       || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5249 			        TYPE_ATTRIBUTES (v))
5250       || (inner_type && TREE_TYPE (v) != inner_type))
5251     return false;
5252 
5253   return true;
5254 }
5255 
5256 /* Find variant of FIRST that match T and create new one if necessary.
5257    Set TREE_TYPE to INNER_TYPE if non-NULL.  */
5258 
5259 static tree
5260 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5261 		  tree inner_type = NULL)
5262 {
5263   if (first == TYPE_MAIN_VARIANT (t))
5264     return t;
5265   for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5266     if (fld_type_variant_equal_p (t, v, inner_type))
5267       return v;
5268   tree v = build_variant_type_copy (first);
5269   TYPE_READONLY (v) = TYPE_READONLY (t);
5270   TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5271   TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5272   TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5273   TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5274   TYPE_NAME (v) = TYPE_NAME (t);
5275   TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5276   TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5277   /* Variants of incomplete types should have alignment
5278      set to BITS_PER_UNIT.  Do not copy the actual alignment.  */
5279   if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5280     {
5281       SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5282       TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5283     }
5284   if (inner_type)
5285     TREE_TYPE (v) = inner_type;
5286   gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5287   if (!fld->pset.add (v))
5288     add_tree_to_fld_list (v, fld);
5289   return v;
5290 }
5291 
5292 /* Map complete types to incomplete types.  */
5293 
5294 static hash_map<tree, tree> *fld_incomplete_types;
5295 
5296 /* Map types to simplified types.  */
5297 
5298 static hash_map<tree, tree> *fld_simplified_types;
5299 
5300 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5301    use MAP to prevent duplicates.  */
5302 
5303 static tree
5304 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5305 			class free_lang_data_d *fld)
5306 {
5307   if (TREE_TYPE (t) == t2)
5308     return t;
5309 
5310   if (TYPE_MAIN_VARIANT (t) != t)
5311     {
5312       return fld_type_variant
5313 	       (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5314 					TYPE_MAIN_VARIANT (t2), map, fld),
5315 		t, fld, t2);
5316     }
5317 
5318   bool existed;
5319   tree &array
5320      = map->get_or_insert (t, &existed);
5321   if (!existed)
5322     {
5323       array
5324 	= build_array_type_1 (t2, TYPE_DOMAIN (t), TYPE_TYPELESS_STORAGE (t),
5325 			      false, false);
5326       TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5327       if (!fld->pset.add (array))
5328 	add_tree_to_fld_list (array, fld);
5329     }
5330   return array;
5331 }
5332 
5333 /* Return CTX after removal of contexts that are not relevant  */
5334 
5335 static tree
5336 fld_decl_context (tree ctx)
5337 {
5338   /* Variably modified types are needed for tree_is_indexable to decide
5339      whether the type needs to go to local or global section.
5340      This code is semi-broken but for now it is easiest to keep contexts
5341      as expected.  */
5342   if (ctx && TYPE_P (ctx)
5343       && !variably_modified_type_p (ctx, NULL_TREE))
5344      {
5345        while (ctx && TYPE_P (ctx))
5346 	 ctx = TYPE_CONTEXT (ctx);
5347      }
5348   return ctx;
5349 }
5350 
5351 /* For T being aggregate type try to turn it into a incomplete variant.
5352    Return T if no simplification is possible.  */
5353 
5354 static tree
5355 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5356 {
5357   if (!t)
5358     return NULL;
5359   if (POINTER_TYPE_P (t))
5360     {
5361       tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5362       if (t2 != TREE_TYPE (t))
5363 	{
5364 	  tree first;
5365 	  if (TREE_CODE (t) == POINTER_TYPE)
5366 	    first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5367 						TYPE_REF_CAN_ALIAS_ALL (t));
5368 	  else
5369 	    first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5370 						TYPE_REF_CAN_ALIAS_ALL (t));
5371 	  gcc_assert (TYPE_CANONICAL (t2) != t2
5372 		      && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5373 	  if (!fld->pset.add (first))
5374 	    add_tree_to_fld_list (first, fld);
5375 	  return fld_type_variant (first, t, fld);
5376 	}
5377       return t;
5378     }
5379   if (TREE_CODE (t) == ARRAY_TYPE)
5380     return fld_process_array_type (t,
5381 				   fld_incomplete_type_of (TREE_TYPE (t), fld),
5382 				   fld_incomplete_types, fld);
5383   if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5384       || !COMPLETE_TYPE_P (t))
5385     return t;
5386   if (TYPE_MAIN_VARIANT (t) == t)
5387     {
5388       bool existed;
5389       tree &copy
5390 	 = fld_incomplete_types->get_or_insert (t, &existed);
5391 
5392       if (!existed)
5393 	{
5394 	  copy = build_distinct_type_copy (t);
5395 
5396 	  /* It is possible that type was not seen by free_lang_data yet.  */
5397 	  if (!fld->pset.add (copy))
5398 	    add_tree_to_fld_list (copy, fld);
5399 	  TYPE_SIZE (copy) = NULL;
5400 	  TYPE_USER_ALIGN (copy) = 0;
5401 	  TYPE_SIZE_UNIT (copy) = NULL;
5402 	  TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5403 	  TREE_ADDRESSABLE (copy) = 0;
5404 	  if (AGGREGATE_TYPE_P (t))
5405 	    {
5406 	      SET_TYPE_MODE (copy, VOIDmode);
5407 	      SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5408 	      TYPE_TYPELESS_STORAGE (copy) = 0;
5409 	      TYPE_FIELDS (copy) = NULL;
5410 	      TYPE_BINFO (copy) = NULL;
5411 	      TYPE_FINAL_P (copy) = 0;
5412 	      TYPE_EMPTY_P (copy) = 0;
5413 	    }
5414 	  else
5415 	    {
5416 	      TYPE_VALUES (copy) = NULL;
5417 	      ENUM_IS_OPAQUE (copy) = 0;
5418 	      ENUM_IS_SCOPED (copy) = 0;
5419 	    }
5420 
5421 	  /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5422 	     This is needed for ODR violation warnings to come out right (we
5423 	     want duplicate TYPE_DECLs whenever the type is duplicated because
5424 	     of ODR violation.  Because lang data in the TYPE_DECL may not
5425 	     have been freed yet, rebuild it from scratch and copy relevant
5426 	     fields.  */
5427 	  TYPE_NAME (copy) = fld_simplified_type_name (copy);
5428 	  tree name = TYPE_NAME (copy);
5429 
5430 	  if (name && TREE_CODE (name) == TYPE_DECL)
5431 	    {
5432 	      gcc_checking_assert (TREE_TYPE (name) == t);
5433 	      tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5434 				       DECL_NAME (name), copy);
5435 	      if (DECL_ASSEMBLER_NAME_SET_P (name))
5436 	        SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5437 	      SET_DECL_ALIGN (name2, 0);
5438 	      DECL_CONTEXT (name2) = fld_decl_context
5439 					 (DECL_CONTEXT (name));
5440 	      TYPE_NAME (copy) = name2;
5441 	    }
5442 	}
5443       return copy;
5444    }
5445   return (fld_type_variant
5446 	    (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5447 }
5448 
5449 /* Simplify type T for scenarios where we do not need complete pointer
5450    types.  */
5451 
5452 static tree
5453 fld_simplified_type (tree t, class free_lang_data_d *fld)
5454 {
5455   if (!t)
5456     return t;
5457   if (POINTER_TYPE_P (t))
5458     return fld_incomplete_type_of (t, fld);
5459   /* FIXME: This triggers verification error, see PR88140.  */
5460   if (TREE_CODE (t) == ARRAY_TYPE && 0)
5461     return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5462 				   fld_simplified_types, fld);
5463   return t;
5464 }
5465 
5466 /* Reset the expression *EXPR_P, a size or position.
5467 
5468    ??? We could reset all non-constant sizes or positions.  But it's cheap
5469    enough to not do so and refrain from adding workarounds to dwarf2out.c.
5470 
5471    We need to reset self-referential sizes or positions because they cannot
5472    be gimplified and thus can contain a CALL_EXPR after the gimplification
5473    is finished, which will run afoul of LTO streaming.  And they need to be
5474    reset to something essentially dummy but not constant, so as to preserve
5475    the properties of the object they are attached to.  */
5476 
5477 static inline void
5478 free_lang_data_in_one_sizepos (tree *expr_p)
5479 {
5480   tree expr = *expr_p;
5481   if (CONTAINS_PLACEHOLDER_P (expr))
5482     *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5483 }
5484 
5485 
5486 /* Reset all the fields in a binfo node BINFO.  We only keep
5487    BINFO_VTABLE, which is used by gimple_fold_obj_type_ref.  */
5488 
5489 static void
5490 free_lang_data_in_binfo (tree binfo)
5491 {
5492   unsigned i;
5493   tree t;
5494 
5495   gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5496 
5497   BINFO_VIRTUALS (binfo) = NULL_TREE;
5498   BINFO_BASE_ACCESSES (binfo) = NULL;
5499   BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5500   BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5501   BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5502   TREE_PUBLIC (binfo) = 0;
5503 
5504   FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5505     free_lang_data_in_binfo (t);
5506 }
5507 
5508 
5509 /* Reset all language specific information still present in TYPE.  */
5510 
5511 static void
5512 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5513 {
5514   gcc_assert (TYPE_P (type));
5515 
5516   /* Give the FE a chance to remove its own data first.  */
5517   lang_hooks.free_lang_data (type);
5518 
5519   TREE_LANG_FLAG_0 (type) = 0;
5520   TREE_LANG_FLAG_1 (type) = 0;
5521   TREE_LANG_FLAG_2 (type) = 0;
5522   TREE_LANG_FLAG_3 (type) = 0;
5523   TREE_LANG_FLAG_4 (type) = 0;
5524   TREE_LANG_FLAG_5 (type) = 0;
5525   TREE_LANG_FLAG_6 (type) = 0;
5526 
5527   TYPE_NEEDS_CONSTRUCTING (type) = 0;
5528 
5529   /* Purge non-marked variants from the variants chain, so that they
5530      don't reappear in the IL after free_lang_data.  */
5531   while (TYPE_NEXT_VARIANT (type)
5532 	 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5533     {
5534       tree t = TYPE_NEXT_VARIANT (type);
5535       TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5536       /* Turn the removed types into distinct types.  */
5537       TYPE_MAIN_VARIANT (t) = t;
5538       TYPE_NEXT_VARIANT (t) = NULL_TREE;
5539     }
5540 
5541   if (TREE_CODE (type) == FUNCTION_TYPE)
5542     {
5543       TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5544       /* Remove the const and volatile qualifiers from arguments.  The
5545 	 C++ front end removes them, but the C front end does not,
5546 	 leading to false ODR violation errors when merging two
5547 	 instances of the same function signature compiled by
5548 	 different front ends.  */
5549       for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5550 	{
5551           TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5552 	  tree arg_type = TREE_VALUE (p);
5553 
5554 	  if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5555 	    {
5556 	      int quals = TYPE_QUALS (arg_type)
5557 			  & ~TYPE_QUAL_CONST
5558 			  & ~TYPE_QUAL_VOLATILE;
5559 	      TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5560 	      if (!fld->pset.add (TREE_VALUE (p)))
5561 		free_lang_data_in_type (TREE_VALUE (p), fld);
5562 	    }
5563 	  /* C++ FE uses TREE_PURPOSE to store initial values.  */
5564 	  TREE_PURPOSE (p) = NULL;
5565 	}
5566     }
5567   else if (TREE_CODE (type) == METHOD_TYPE)
5568     {
5569       TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5570       for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5571 	{
5572 	  /* C++ FE uses TREE_PURPOSE to store initial values.  */
5573 	  TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5574 	  TREE_PURPOSE (p) = NULL;
5575 	}
5576     }
5577   else if (RECORD_OR_UNION_TYPE_P (type))
5578     {
5579       /* Remove members that are not FIELD_DECLs from the field list
5580 	 of an aggregate.  These occur in C++.  */
5581       for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5582 	if (TREE_CODE (member) == FIELD_DECL)
5583 	  prev = &DECL_CHAIN (member);
5584 	else
5585 	  *prev = DECL_CHAIN (member);
5586 
5587       TYPE_VFIELD (type) = NULL_TREE;
5588 
5589       if (TYPE_BINFO (type))
5590 	{
5591 	  free_lang_data_in_binfo (TYPE_BINFO (type));
5592 	  /* We need to preserve link to bases and virtual table for all
5593 	     polymorphic types to make devirtualization machinery working.  */
5594 	  if (!BINFO_VTABLE (TYPE_BINFO (type)))
5595 	    TYPE_BINFO (type) = NULL;
5596 	}
5597     }
5598   else if (INTEGRAL_TYPE_P (type)
5599 	   || SCALAR_FLOAT_TYPE_P (type)
5600 	   || FIXED_POINT_TYPE_P (type))
5601     {
5602       if (TREE_CODE (type) == ENUMERAL_TYPE)
5603 	{
5604 	  ENUM_IS_OPAQUE (type) = 0;
5605 	  ENUM_IS_SCOPED (type) = 0;
5606 	  /* Type values are used only for C++ ODR checking.  Drop them
5607 	     for all type variants and non-ODR types.
5608 	     For ODR types the data is freed in free_odr_warning_data.  */
5609 	  if (!TYPE_VALUES (type))
5610 	    ;
5611 	  else if (TYPE_MAIN_VARIANT (type) != type
5612 		   || !type_with_linkage_p (type)
5613 		   || type_in_anonymous_namespace_p (type))
5614 	    TYPE_VALUES (type) = NULL;
5615 	  else
5616 	    register_odr_enum (type);
5617 	}
5618       free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5619       free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5620     }
5621 
5622   TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5623 
5624   free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5625   free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5626 
5627   if (TYPE_CONTEXT (type)
5628       && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5629     {
5630       tree ctx = TYPE_CONTEXT (type);
5631       do
5632 	{
5633 	  ctx = BLOCK_SUPERCONTEXT (ctx);
5634 	}
5635       while (ctx && TREE_CODE (ctx) == BLOCK);
5636       TYPE_CONTEXT (type) = ctx;
5637     }
5638 
5639   TYPE_STUB_DECL (type) = NULL;
5640   TYPE_NAME (type) = fld_simplified_type_name (type);
5641 }
5642 
5643 
5644 /* Return true if DECL may need an assembler name to be set.  */
5645 
5646 static inline bool
5647 need_assembler_name_p (tree decl)
5648 {
5649   /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5650      Rule merging.  This makes type_odr_p to return true on those types during
5651      LTO and by comparing the mangled name, we can say what types are intended
5652      to be equivalent across compilation unit.
5653 
5654      We do not store names of type_in_anonymous_namespace_p.
5655 
5656      Record, union and enumeration type have linkage that allows use
5657      to check type_in_anonymous_namespace_p. We do not mangle compound types
5658      that always can be compared structurally.
5659 
5660      Similarly for builtin types, we compare properties of their main variant.
5661      A special case are integer types where mangling do make differences
5662      between char/signed char/unsigned char etc.  Storing name for these makes
5663      e.g.  -fno-signed-char/-fsigned-char mismatches to be handled well.
5664      See cp/mangle.c:write_builtin_type for details.  */
5665 
5666   if (TREE_CODE (decl) == TYPE_DECL)
5667     {
5668       if (DECL_NAME (decl)
5669 	  && decl == TYPE_NAME (TREE_TYPE (decl))
5670 	  && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5671 	  && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5672 	  && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5673 	       && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5674 	      || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5675 	  && (type_with_linkage_p (TREE_TYPE (decl))
5676 	      || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5677 	  && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5678 	return !DECL_ASSEMBLER_NAME_SET_P (decl);
5679       return false;
5680     }
5681   /* Only FUNCTION_DECLs and VAR_DECLs are considered.  */
5682   if (!VAR_OR_FUNCTION_DECL_P (decl))
5683     return false;
5684 
5685   /* If DECL already has its assembler name set, it does not need a
5686      new one.  */
5687   if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5688       || DECL_ASSEMBLER_NAME_SET_P (decl))
5689     return false;
5690 
5691   /* Abstract decls do not need an assembler name.  */
5692   if (DECL_ABSTRACT_P (decl))
5693     return false;
5694 
5695   /* For VAR_DECLs, only static, public and external symbols need an
5696      assembler name.  */
5697   if (VAR_P (decl)
5698       && !TREE_STATIC (decl)
5699       && !TREE_PUBLIC (decl)
5700       && !DECL_EXTERNAL (decl))
5701     return false;
5702 
5703   if (TREE_CODE (decl) == FUNCTION_DECL)
5704     {
5705       /* Do not set assembler name on builtins.  Allow RTL expansion to
5706 	 decide whether to expand inline or via a regular call.  */
5707       if (fndecl_built_in_p (decl)
5708 	  && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5709 	return false;
5710 
5711       /* Functions represented in the callgraph need an assembler name.  */
5712       if (cgraph_node::get (decl) != NULL)
5713 	return true;
5714 
5715       /* Unused and not public functions don't need an assembler name.  */
5716       if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5717 	return false;
5718     }
5719 
5720   return true;
5721 }
5722 
5723 
5724 /* Reset all language specific information still present in symbol
5725    DECL.  */
5726 
5727 static void
5728 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5729 {
5730   gcc_assert (DECL_P (decl));
5731 
5732   /* Give the FE a chance to remove its own data first.  */
5733   lang_hooks.free_lang_data (decl);
5734 
5735   TREE_LANG_FLAG_0 (decl) = 0;
5736   TREE_LANG_FLAG_1 (decl) = 0;
5737   TREE_LANG_FLAG_2 (decl) = 0;
5738   TREE_LANG_FLAG_3 (decl) = 0;
5739   TREE_LANG_FLAG_4 (decl) = 0;
5740   TREE_LANG_FLAG_5 (decl) = 0;
5741   TREE_LANG_FLAG_6 (decl) = 0;
5742 
5743   free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5744   free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5745   if (TREE_CODE (decl) == FIELD_DECL)
5746     {
5747       DECL_FCONTEXT (decl) = NULL;
5748       free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5749       if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5750 	DECL_QUALIFIER (decl) = NULL_TREE;
5751     }
5752 
5753  if (TREE_CODE (decl) == FUNCTION_DECL)
5754     {
5755       struct cgraph_node *node;
5756       /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5757 	 the address may be taken in other unit, so this flag has no practical
5758 	 use for middle-end.
5759 
5760 	 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5761 	 for public objects that indeed cannot be adressed, but it is not
5762 	 the case.  Set the flag to true so we do not get merge failures for
5763 	 i.e. virtual tables between units that take address of it and
5764 	 units that don't.  */
5765       if (TREE_PUBLIC (decl))
5766 	TREE_ADDRESSABLE (decl) = true;
5767       TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5768       if (!(node = cgraph_node::get (decl))
5769 	  || (!node->definition && !node->clones))
5770 	{
5771 	  if (node)
5772 	    node->release_body ();
5773 	  else
5774 	    {
5775 	      release_function_body (decl);
5776 	      DECL_ARGUMENTS (decl) = NULL;
5777 	      DECL_RESULT (decl) = NULL;
5778 	      DECL_INITIAL (decl) = error_mark_node;
5779 	    }
5780 	}
5781       if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5782 	{
5783 	  tree t;
5784 
5785 	  /* If DECL has a gimple body, then the context for its
5786 	     arguments must be DECL.  Otherwise, it doesn't really
5787 	     matter, as we will not be emitting any code for DECL.  In
5788 	     general, there may be other instances of DECL created by
5789 	     the front end and since PARM_DECLs are generally shared,
5790 	     their DECL_CONTEXT changes as the replicas of DECL are
5791 	     created.  The only time where DECL_CONTEXT is important
5792 	     is for the FUNCTION_DECLs that have a gimple body (since
5793 	     the PARM_DECL will be used in the function's body).  */
5794 	  for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5795 	    DECL_CONTEXT (t) = decl;
5796 	  if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5797 	    DECL_FUNCTION_SPECIFIC_TARGET (decl)
5798 	      = target_option_default_node;
5799 	  if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5800 	    DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5801 	      = optimization_default_node;
5802 	}
5803 
5804       /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5805 	 At this point, it is not needed anymore.  */
5806       DECL_SAVED_TREE (decl) = NULL_TREE;
5807 
5808       /* Clear the abstract origin if it refers to a method.
5809          Otherwise dwarf2out.c will ICE as we splice functions out of
5810          TYPE_FIELDS and thus the origin will not be output
5811          correctly.  */
5812       if (DECL_ABSTRACT_ORIGIN (decl)
5813 	  && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5814 	  && RECORD_OR_UNION_TYPE_P
5815 	       (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5816 	DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5817 
5818       DECL_VINDEX (decl) = NULL_TREE;
5819     }
5820   else if (VAR_P (decl))
5821     {
5822       /* See comment above why we set the flag for functions.  */
5823       if (TREE_PUBLIC (decl))
5824 	TREE_ADDRESSABLE (decl) = true;
5825       if ((DECL_EXTERNAL (decl)
5826 	   && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5827 	  || (decl_function_context (decl) && !TREE_STATIC (decl)))
5828 	DECL_INITIAL (decl) = NULL_TREE;
5829     }
5830   else if (TREE_CODE (decl) == TYPE_DECL)
5831     {
5832       DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5833       DECL_VISIBILITY_SPECIFIED (decl) = 0;
5834       TREE_PUBLIC (decl) = 0;
5835       TREE_PRIVATE (decl) = 0;
5836       DECL_ARTIFICIAL (decl) = 0;
5837       TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5838       DECL_INITIAL (decl) = NULL_TREE;
5839       DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5840       DECL_MODE (decl) = VOIDmode;
5841       SET_DECL_ALIGN (decl, 0);
5842       /* TREE_TYPE is cleared at WPA time in free_odr_warning_data.  */
5843     }
5844   else if (TREE_CODE (decl) == FIELD_DECL)
5845     {
5846       TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5847       DECL_INITIAL (decl) = NULL_TREE;
5848     }
5849   else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5850            && DECL_INITIAL (decl)
5851            && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5852     {
5853       /* Strip builtins from the translation-unit BLOCK.  We still have targets
5854 	 without builtin_decl_explicit support and also builtins are shared
5855 	 nodes and thus we can't use TREE_CHAIN in multiple lists.  */
5856       tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5857       while (*nextp)
5858 	{
5859 	  tree var = *nextp;
5860 	  if (TREE_CODE (var) == FUNCTION_DECL
5861 	      && fndecl_built_in_p (var))
5862 	    *nextp = TREE_CHAIN (var);
5863 	  else
5864 	    nextp = &TREE_CHAIN (var);
5865         }
5866     }
5867   /* We need to keep field decls associated with their trees. Otherwise tree
5868      merging may merge some fileds and keep others disjoint wich in turn will
5869      not do well with TREE_CHAIN pointers linking them.
5870 
5871      Also do not drop containing types for virtual methods and tables because
5872      these are needed by devirtualization.
5873      C++ destructors are special because C++ frontends sometimes produces
5874      virtual destructor as an alias of non-virtual destructor.  In
5875      devirutalization code we always walk through aliases and we need
5876      context to be preserved too.  See PR89335  */
5877   if (TREE_CODE (decl) != FIELD_DECL
5878       && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5879           || (!DECL_VIRTUAL_P (decl)
5880 	      && (TREE_CODE (decl) != FUNCTION_DECL
5881 		  || !DECL_CXX_DESTRUCTOR_P (decl)))))
5882     DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5883 }
5884 
5885 
5886 /* Operand callback helper for free_lang_data_in_node.  *TP is the
5887    subtree operand being considered.  */
5888 
5889 static tree
5890 find_decls_types_r (tree *tp, int *ws, void *data)
5891 {
5892   tree t = *tp;
5893   class free_lang_data_d *fld = (class free_lang_data_d *) data;
5894 
5895   if (TREE_CODE (t) == TREE_LIST)
5896     return NULL_TREE;
5897 
5898   /* Language specific nodes will be removed, so there is no need
5899      to gather anything under them.  */
5900   if (is_lang_specific (t))
5901     {
5902       *ws = 0;
5903       return NULL_TREE;
5904     }
5905 
5906   if (DECL_P (t))
5907     {
5908       /* Note that walk_tree does not traverse every possible field in
5909 	 decls, so we have to do our own traversals here.  */
5910       add_tree_to_fld_list (t, fld);
5911 
5912       fld_worklist_push (DECL_NAME (t), fld);
5913       fld_worklist_push (DECL_CONTEXT (t), fld);
5914       fld_worklist_push (DECL_SIZE (t), fld);
5915       fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5916 
5917       /* We are going to remove everything under DECL_INITIAL for
5918 	 TYPE_DECLs.  No point walking them.  */
5919       if (TREE_CODE (t) != TYPE_DECL)
5920 	fld_worklist_push (DECL_INITIAL (t), fld);
5921 
5922       fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5923       fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5924 
5925       if (TREE_CODE (t) == FUNCTION_DECL)
5926 	{
5927 	  fld_worklist_push (DECL_ARGUMENTS (t), fld);
5928 	  fld_worklist_push (DECL_RESULT (t), fld);
5929 	}
5930       else if (TREE_CODE (t) == FIELD_DECL)
5931 	{
5932 	  fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5933 	  fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5934 	  fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5935 	  fld_worklist_push (DECL_FCONTEXT (t), fld);
5936 	}
5937 
5938       if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5939 	  && DECL_HAS_VALUE_EXPR_P (t))
5940 	fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5941 
5942       if (TREE_CODE (t) != FIELD_DECL
5943 	  && TREE_CODE (t) != TYPE_DECL)
5944 	fld_worklist_push (TREE_CHAIN (t), fld);
5945       *ws = 0;
5946     }
5947   else if (TYPE_P (t))
5948     {
5949       /* Note that walk_tree does not traverse every possible field in
5950 	 types, so we have to do our own traversals here.  */
5951       add_tree_to_fld_list (t, fld);
5952 
5953       if (!RECORD_OR_UNION_TYPE_P (t))
5954 	fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5955       fld_worklist_push (TYPE_SIZE (t), fld);
5956       fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5957       fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5958       fld_worklist_push (TYPE_POINTER_TO (t), fld);
5959       fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5960       fld_worklist_push (TYPE_NAME (t), fld);
5961       /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5962 	 lists, we may look types up in these lists and use them while
5963 	 optimizing the function body.  Thus we need to free lang data
5964 	 in them.  */
5965       if (TREE_CODE (t) == POINTER_TYPE)
5966         fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5967       if (TREE_CODE (t) == REFERENCE_TYPE)
5968         fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5969       if (!POINTER_TYPE_P (t))
5970 	fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5971       /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types.  */
5972       if (!RECORD_OR_UNION_TYPE_P (t))
5973 	fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5974       fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5975       /* Do not walk TYPE_NEXT_VARIANT.  We do not stream it and thus
5976          do not and want not to reach unused variants this way.  */
5977       if (TYPE_CONTEXT (t))
5978 	{
5979 	  tree ctx = TYPE_CONTEXT (t);
5980 	  /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5981 	     So push that instead.  */
5982 	  while (ctx && TREE_CODE (ctx) == BLOCK)
5983 	    ctx = BLOCK_SUPERCONTEXT (ctx);
5984 	  fld_worklist_push (ctx, fld);
5985 	}
5986       fld_worklist_push (TYPE_CANONICAL (t), fld);
5987 
5988       if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5989 	{
5990 	  unsigned i;
5991 	  tree tem;
5992 	  FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5993 	    fld_worklist_push (TREE_TYPE (tem), fld);
5994 	  fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5995 	  fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5996 	}
5997       if (RECORD_OR_UNION_TYPE_P (t))
5998 	{
5999 	  tree tem;
6000 	  /* Push all TYPE_FIELDS - there can be interleaving interesting
6001 	     and non-interesting things.  */
6002 	  tem = TYPE_FIELDS (t);
6003 	  while (tem)
6004 	    {
6005 	      if (TREE_CODE (tem) == FIELD_DECL)
6006 		fld_worklist_push (tem, fld);
6007 	      tem = TREE_CHAIN (tem);
6008 	    }
6009 	}
6010       if (FUNC_OR_METHOD_TYPE_P (t))
6011 	fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
6012 
6013       fld_worklist_push (TYPE_STUB_DECL (t), fld);
6014       *ws = 0;
6015     }
6016   else if (TREE_CODE (t) == BLOCK)
6017     {
6018       for (tree *tem = &BLOCK_VARS (t); *tem; )
6019 	{
6020 	  if (TREE_CODE (*tem) != LABEL_DECL
6021 	      && (TREE_CODE (*tem) != VAR_DECL
6022 		  || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem))))
6023 	    {
6024 	      gcc_assert (TREE_CODE (*tem) != RESULT_DECL
6025 			  && TREE_CODE (*tem) != PARM_DECL);
6026 	      *tem = TREE_CHAIN (*tem);
6027 	    }
6028 	  else
6029 	    {
6030 	      fld_worklist_push (*tem, fld);
6031 	      tem = &TREE_CHAIN (*tem);
6032 	    }
6033 	}
6034       for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
6035 	fld_worklist_push (tem, fld);
6036       fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
6037     }
6038 
6039   if (TREE_CODE (t) != IDENTIFIER_NODE
6040       && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
6041     fld_worklist_push (TREE_TYPE (t), fld);
6042 
6043   return NULL_TREE;
6044 }
6045 
6046 
6047 /* Find decls and types in T.  */
6048 
6049 static void
6050 find_decls_types (tree t, class free_lang_data_d *fld)
6051 {
6052   while (1)
6053     {
6054       if (!fld->pset.contains (t))
6055 	walk_tree (&t, find_decls_types_r, fld, &fld->pset);
6056       if (fld->worklist.is_empty ())
6057 	break;
6058       t = fld->worklist.pop ();
6059     }
6060 }
6061 
6062 /* Translate all the types in LIST with the corresponding runtime
6063    types.  */
6064 
6065 static tree
6066 get_eh_types_for_runtime (tree list)
6067 {
6068   tree head, prev;
6069 
6070   if (list == NULL_TREE)
6071     return NULL_TREE;
6072 
6073   head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6074   prev = head;
6075   list = TREE_CHAIN (list);
6076   while (list)
6077     {
6078       tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6079       TREE_CHAIN (prev) = n;
6080       prev = TREE_CHAIN (prev);
6081       list = TREE_CHAIN (list);
6082     }
6083 
6084   return head;
6085 }
6086 
6087 
6088 /* Find decls and types referenced in EH region R and store them in
6089    FLD->DECLS and FLD->TYPES.  */
6090 
6091 static void
6092 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6093 {
6094   switch (r->type)
6095     {
6096     case ERT_CLEANUP:
6097       break;
6098 
6099     case ERT_TRY:
6100       {
6101 	eh_catch c;
6102 
6103 	/* The types referenced in each catch must first be changed to the
6104 	   EH types used at runtime.  This removes references to FE types
6105 	   in the region.  */
6106 	for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6107 	  {
6108 	    c->type_list = get_eh_types_for_runtime (c->type_list);
6109 	    walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6110 	  }
6111       }
6112       break;
6113 
6114     case ERT_ALLOWED_EXCEPTIONS:
6115       r->u.allowed.type_list
6116 	= get_eh_types_for_runtime (r->u.allowed.type_list);
6117       walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6118       break;
6119 
6120     case ERT_MUST_NOT_THROW:
6121       walk_tree (&r->u.must_not_throw.failure_decl,
6122 		 find_decls_types_r, fld, &fld->pset);
6123       break;
6124     }
6125 }
6126 
6127 
6128 /* Find decls and types referenced in cgraph node N and store them in
6129    FLD->DECLS and FLD->TYPES.  Unlike pass_referenced_vars, this will
6130    look for *every* kind of DECL and TYPE node reachable from N,
6131    including those embedded inside types and decls (i.e,, TYPE_DECLs,
6132    NAMESPACE_DECLs, etc).  */
6133 
6134 static void
6135 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6136 {
6137   basic_block bb;
6138   struct function *fn;
6139   unsigned ix;
6140   tree t;
6141 
6142   find_decls_types (n->decl, fld);
6143 
6144   if (!gimple_has_body_p (n->decl))
6145     return;
6146 
6147   gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6148 
6149   fn = DECL_STRUCT_FUNCTION (n->decl);
6150 
6151   /* Traverse locals. */
6152   FOR_EACH_LOCAL_DECL (fn, ix, t)
6153     find_decls_types (t, fld);
6154 
6155   /* Traverse EH regions in FN.  */
6156   {
6157     eh_region r;
6158     FOR_ALL_EH_REGION_FN (r, fn)
6159       find_decls_types_in_eh_region (r, fld);
6160   }
6161 
6162   /* Traverse every statement in FN.  */
6163   FOR_EACH_BB_FN (bb, fn)
6164     {
6165       gphi_iterator psi;
6166       gimple_stmt_iterator si;
6167       unsigned i;
6168 
6169       for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6170 	{
6171 	  gphi *phi = psi.phi ();
6172 
6173 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
6174 	    {
6175 	      tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6176 	      find_decls_types (*arg_p, fld);
6177 	    }
6178 	}
6179 
6180       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6181 	{
6182 	  gimple *stmt = gsi_stmt (si);
6183 
6184 	  if (is_gimple_call (stmt))
6185 	    find_decls_types (gimple_call_fntype (stmt), fld);
6186 
6187 	  for (i = 0; i < gimple_num_ops (stmt); i++)
6188 	    {
6189 	      tree arg = gimple_op (stmt, i);
6190 	      find_decls_types (arg, fld);
6191 	      /* find_decls_types doesn't walk TREE_PURPOSE of TREE_LISTs,
6192 		 which we need for asm stmts.  */
6193 	      if (arg
6194 		  && TREE_CODE (arg) == TREE_LIST
6195 		  && TREE_PURPOSE (arg)
6196 		  && gimple_code (stmt) == GIMPLE_ASM)
6197 		find_decls_types (TREE_PURPOSE (arg), fld);
6198 	    }
6199 	}
6200     }
6201 }
6202 
6203 
6204 /* Find decls and types referenced in varpool node N and store them in
6205    FLD->DECLS and FLD->TYPES.  Unlike pass_referenced_vars, this will
6206    look for *every* kind of DECL and TYPE node reachable from N,
6207    including those embedded inside types and decls (i.e,, TYPE_DECLs,
6208    NAMESPACE_DECLs, etc).  */
6209 
6210 static void
6211 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6212 {
6213   find_decls_types (v->decl, fld);
6214 }
6215 
6216 /* If T needs an assembler name, have one created for it.  */
6217 
6218 void
6219 assign_assembler_name_if_needed (tree t)
6220 {
6221   if (need_assembler_name_p (t))
6222     {
6223       /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6224 	 diagnostics that use input_location to show locus
6225 	 information.  The problem here is that, at this point,
6226 	 input_location is generally anchored to the end of the file
6227 	 (since the parser is long gone), so we don't have a good
6228 	 position to pin it to.
6229 
6230 	 To alleviate this problem, this uses the location of T's
6231 	 declaration.  Examples of this are
6232 	 testsuite/g++.dg/template/cond2.C and
6233 	 testsuite/g++.dg/template/pr35240.C.  */
6234       location_t saved_location = input_location;
6235       input_location = DECL_SOURCE_LOCATION (t);
6236 
6237       decl_assembler_name (t);
6238 
6239       input_location = saved_location;
6240     }
6241 }
6242 
6243 
6244 /* Free language specific information for every operand and expression
6245    in every node of the call graph.  This process operates in three stages:
6246 
6247    1- Every callgraph node and varpool node is traversed looking for
6248       decls and types embedded in them.  This is a more exhaustive
6249       search than that done by find_referenced_vars, because it will
6250       also collect individual fields, decls embedded in types, etc.
6251 
6252    2- All the decls found are sent to free_lang_data_in_decl.
6253 
6254    3- All the types found are sent to free_lang_data_in_type.
6255 
6256    The ordering between decls and types is important because
6257    free_lang_data_in_decl sets assembler names, which includes
6258    mangling.  So types cannot be freed up until assembler names have
6259    been set up.  */
6260 
6261 static void
6262 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6263 {
6264   struct cgraph_node *n;
6265   varpool_node *v;
6266   tree t;
6267   unsigned i;
6268   alias_pair *p;
6269 
6270   /* Find decls and types in the body of every function in the callgraph.  */
6271   FOR_EACH_FUNCTION (n)
6272     find_decls_types_in_node (n, fld);
6273 
6274   FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6275     find_decls_types (p->decl, fld);
6276 
6277   /* Find decls and types in every varpool symbol.  */
6278   FOR_EACH_VARIABLE (v)
6279     find_decls_types_in_var (v, fld);
6280 
6281   /* Set the assembler name on every decl found.  We need to do this
6282      now because free_lang_data_in_decl will invalidate data needed
6283      for mangling.  This breaks mangling on interdependent decls.  */
6284   FOR_EACH_VEC_ELT (fld->decls, i, t)
6285     assign_assembler_name_if_needed (t);
6286 
6287   /* Traverse every decl found freeing its language data.  */
6288   FOR_EACH_VEC_ELT (fld->decls, i, t)
6289     free_lang_data_in_decl (t, fld);
6290 
6291   /* Traverse every type found freeing its language data.  */
6292   FOR_EACH_VEC_ELT (fld->types, i, t)
6293     free_lang_data_in_type (t, fld);
6294 }
6295 
6296 
6297 /* Free resources that are used by FE but are not needed once they are done. */
6298 
6299 static unsigned
6300 free_lang_data (void)
6301 {
6302   unsigned i;
6303   class free_lang_data_d fld;
6304 
6305   /* If we are the LTO frontend we have freed lang-specific data already.  */
6306   if (in_lto_p
6307       || (!flag_generate_lto && !flag_generate_offload))
6308     {
6309       /* Rebuild type inheritance graph even when not doing LTO to get
6310 	 consistent profile data.  */
6311       rebuild_type_inheritance_graph ();
6312       return 0;
6313     }
6314 
6315   fld_incomplete_types = new hash_map<tree, tree>;
6316   fld_simplified_types = new hash_map<tree, tree>;
6317 
6318   /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one.  */
6319   if (vec_safe_is_empty (all_translation_units))
6320     build_translation_unit_decl (NULL_TREE);
6321 
6322   /* Allocate and assign alias sets to the standard integer types
6323      while the slots are still in the way the frontends generated them.  */
6324   for (i = 0; i < itk_none; ++i)
6325     if (integer_types[i])
6326       TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6327 
6328   /* Traverse the IL resetting language specific information for
6329      operands, expressions, etc.  */
6330   free_lang_data_in_cgraph (&fld);
6331 
6332   /* Create gimple variants for common types.  */
6333   for (unsigned i = 0;
6334        i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6335        ++i)
6336     builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6337 
6338   /* Reset some langhooks.  Do not reset types_compatible_p, it may
6339      still be used indirectly via the get_alias_set langhook.  */
6340   lang_hooks.dwarf_name = lhd_dwarf_name;
6341   lang_hooks.decl_printable_name = gimple_decl_printable_name;
6342   lang_hooks.gimplify_expr = lhd_gimplify_expr;
6343   lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6344   lang_hooks.print_xnode = lhd_print_tree_nothing;
6345   lang_hooks.print_decl = lhd_print_tree_nothing;
6346   lang_hooks.print_type = lhd_print_tree_nothing;
6347   lang_hooks.print_identifier = lhd_print_tree_nothing;
6348 
6349   lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6350 
6351   if (flag_checking)
6352     {
6353       int i;
6354       tree t;
6355 
6356       FOR_EACH_VEC_ELT (fld.types, i, t)
6357 	verify_type (t);
6358     }
6359 
6360   /* We do not want the default decl_assembler_name implementation,
6361      rather if we have fixed everything we want a wrapper around it
6362      asserting that all non-local symbols already got their assembler
6363      name and only produce assembler names for local symbols.  Or rather
6364      make sure we never call decl_assembler_name on local symbols and
6365      devise a separate, middle-end private scheme for it.  */
6366 
6367   /* Reset diagnostic machinery.  */
6368   tree_diagnostics_defaults (global_dc);
6369 
6370   rebuild_type_inheritance_graph ();
6371 
6372   delete fld_incomplete_types;
6373   delete fld_simplified_types;
6374 
6375   return 0;
6376 }
6377 
6378 
6379 namespace {
6380 
6381 const pass_data pass_data_ipa_free_lang_data =
6382 {
6383   SIMPLE_IPA_PASS, /* type */
6384   "*free_lang_data", /* name */
6385   OPTGROUP_NONE, /* optinfo_flags */
6386   TV_IPA_FREE_LANG_DATA, /* tv_id */
6387   0, /* properties_required */
6388   0, /* properties_provided */
6389   0, /* properties_destroyed */
6390   0, /* todo_flags_start */
6391   0, /* todo_flags_finish */
6392 };
6393 
6394 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6395 {
6396 public:
6397   pass_ipa_free_lang_data (gcc::context *ctxt)
6398     : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6399   {}
6400 
6401   /* opt_pass methods: */
6402   virtual unsigned int execute (function *) { return free_lang_data (); }
6403 
6404 }; // class pass_ipa_free_lang_data
6405 
6406 } // anon namespace
6407 
6408 simple_ipa_opt_pass *
6409 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6410 {
6411   return new pass_ipa_free_lang_data (ctxt);
6412 }
6413 
6414 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6415    of the various TYPE_QUAL values.  */
6416 
6417 static void
6418 set_type_quals (tree type, int type_quals)
6419 {
6420   TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6421   TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6422   TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6423   TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6424   TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6425 }
6426 
6427 /* Returns true iff CAND and BASE have equivalent language-specific
6428    qualifiers.  */
6429 
6430 bool
6431 check_lang_type (const_tree cand, const_tree base)
6432 {
6433   if (lang_hooks.types.type_hash_eq == NULL)
6434     return true;
6435   /* type_hash_eq currently only applies to these types.  */
6436   if (TREE_CODE (cand) != FUNCTION_TYPE
6437       && TREE_CODE (cand) != METHOD_TYPE)
6438     return true;
6439   return lang_hooks.types.type_hash_eq (cand, base);
6440 }
6441 
6442 /* This function checks to see if TYPE matches the size one of the built-in
6443    atomic types, and returns that core atomic type.  */
6444 
6445 static tree
6446 find_atomic_core_type (const_tree type)
6447 {
6448   tree base_atomic_type;
6449 
6450   /* Only handle complete types.  */
6451   if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6452     return NULL_TREE;
6453 
6454   switch (tree_to_uhwi (TYPE_SIZE (type)))
6455     {
6456     case 8:
6457       base_atomic_type = atomicQI_type_node;
6458       break;
6459 
6460     case 16:
6461       base_atomic_type = atomicHI_type_node;
6462       break;
6463 
6464     case 32:
6465       base_atomic_type = atomicSI_type_node;
6466       break;
6467 
6468     case 64:
6469       base_atomic_type = atomicDI_type_node;
6470       break;
6471 
6472     case 128:
6473       base_atomic_type = atomicTI_type_node;
6474       break;
6475 
6476     default:
6477       base_atomic_type = NULL_TREE;
6478     }
6479 
6480   return base_atomic_type;
6481 }
6482 
6483 /* Returns true iff unqualified CAND and BASE are equivalent.  */
6484 
6485 bool
6486 check_base_type (const_tree cand, const_tree base)
6487 {
6488   if (TYPE_NAME (cand) != TYPE_NAME (base)
6489       /* Apparently this is needed for Objective-C.  */
6490       || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6491       || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6492 			        TYPE_ATTRIBUTES (base)))
6493     return false;
6494   /* Check alignment.  */
6495   if (TYPE_ALIGN (cand) == TYPE_ALIGN (base))
6496     return true;
6497   /* Atomic types increase minimal alignment.  We must to do so as well
6498      or we get duplicated canonical types. See PR88686.  */
6499   if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6500     {
6501       /* See if this object can map to a basic atomic type.  */
6502       tree atomic_type = find_atomic_core_type (cand);
6503       if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6504        return true;
6505     }
6506   return false;
6507 }
6508 
6509 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS.  */
6510 
6511 bool
6512 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6513 {
6514   return (TYPE_QUALS (cand) == type_quals
6515 	  && check_base_type (cand, base)
6516 	  && check_lang_type (cand, base));
6517 }
6518 
6519 /* Returns true iff CAND is equivalent to BASE with ALIGN.  */
6520 
6521 static bool
6522 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6523 {
6524   return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6525 	  && TYPE_NAME (cand) == TYPE_NAME (base)
6526 	  /* Apparently this is needed for Objective-C.  */
6527 	  && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6528 	  /* Check alignment.  */
6529 	  && TYPE_ALIGN (cand) == align
6530 	  && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6531 				   TYPE_ATTRIBUTES (base))
6532 	  && check_lang_type (cand, base));
6533 }
6534 
6535 /* Return a version of the TYPE, qualified as indicated by the
6536    TYPE_QUALS, if one exists.  If no qualified version exists yet,
6537    return NULL_TREE.  */
6538 
6539 tree
6540 get_qualified_type (tree type, int type_quals)
6541 {
6542   if (TYPE_QUALS (type) == type_quals)
6543     return type;
6544 
6545   tree mv = TYPE_MAIN_VARIANT (type);
6546   if (check_qualified_type (mv, type, type_quals))
6547     return mv;
6548 
6549   /* Search the chain of variants to see if there is already one there just
6550      like the one we need to have.  If so, use that existing one.  We must
6551      preserve the TYPE_NAME, since there is code that depends on this.  */
6552   for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6553     if (check_qualified_type (*tp, type, type_quals))
6554       {
6555 	/* Put the found variant at the head of the variant list so
6556 	   frequently searched variants get found faster.  The C++ FE
6557 	   benefits greatly from this.  */
6558 	tree t = *tp;
6559 	*tp = TYPE_NEXT_VARIANT (t);
6560 	TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6561 	TYPE_NEXT_VARIANT (mv) = t;
6562 	return t;
6563       }
6564 
6565   return NULL_TREE;
6566 }
6567 
6568 /* Like get_qualified_type, but creates the type if it does not
6569    exist.  This function never returns NULL_TREE.  */
6570 
6571 tree
6572 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6573 {
6574   tree t;
6575 
6576   /* See if we already have the appropriate qualified variant.  */
6577   t = get_qualified_type (type, type_quals);
6578 
6579   /* If not, build it.  */
6580   if (!t)
6581     {
6582       t = build_variant_type_copy (type PASS_MEM_STAT);
6583       set_type_quals (t, type_quals);
6584 
6585       if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6586 	{
6587 	  /* See if this object can map to a basic atomic type.  */
6588 	  tree atomic_type = find_atomic_core_type (type);
6589 	  if (atomic_type)
6590 	    {
6591 	      /* Ensure the alignment of this type is compatible with
6592 		 the required alignment of the atomic type.  */
6593 	      if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6594 		SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6595 	    }
6596 	}
6597 
6598       if (TYPE_STRUCTURAL_EQUALITY_P (type))
6599 	/* Propagate structural equality. */
6600 	SET_TYPE_STRUCTURAL_EQUALITY (t);
6601       else if (TYPE_CANONICAL (type) != type)
6602 	/* Build the underlying canonical type, since it is different
6603 	   from TYPE. */
6604 	{
6605 	  tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6606 	  TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6607 	}
6608       else
6609 	/* T is its own canonical type. */
6610 	TYPE_CANONICAL (t) = t;
6611 
6612     }
6613 
6614   return t;
6615 }
6616 
6617 /* Create a variant of type T with alignment ALIGN.  */
6618 
6619 tree
6620 build_aligned_type (tree type, unsigned int align)
6621 {
6622   tree t;
6623 
6624   if (TYPE_PACKED (type)
6625       || TYPE_ALIGN (type) == align)
6626     return type;
6627 
6628   for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6629     if (check_aligned_type (t, type, align))
6630       return t;
6631 
6632   t = build_variant_type_copy (type);
6633   SET_TYPE_ALIGN (t, align);
6634   TYPE_USER_ALIGN (t) = 1;
6635 
6636   return t;
6637 }
6638 
6639 /* Create a new distinct copy of TYPE.  The new type is made its own
6640    MAIN_VARIANT. If TYPE requires structural equality checks, the
6641    resulting type requires structural equality checks; otherwise, its
6642    TYPE_CANONICAL points to itself. */
6643 
6644 tree
6645 build_distinct_type_copy (tree type MEM_STAT_DECL)
6646 {
6647   tree t = copy_node (type PASS_MEM_STAT);
6648 
6649   TYPE_POINTER_TO (t) = 0;
6650   TYPE_REFERENCE_TO (t) = 0;
6651 
6652   /* Set the canonical type either to a new equivalence class, or
6653      propagate the need for structural equality checks. */
6654   if (TYPE_STRUCTURAL_EQUALITY_P (type))
6655     SET_TYPE_STRUCTURAL_EQUALITY (t);
6656   else
6657     TYPE_CANONICAL (t) = t;
6658 
6659   /* Make it its own variant.  */
6660   TYPE_MAIN_VARIANT (t) = t;
6661   TYPE_NEXT_VARIANT (t) = 0;
6662 
6663   /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6664      whose TREE_TYPE is not t.  This can also happen in the Ada
6665      frontend when using subtypes.  */
6666 
6667   return t;
6668 }
6669 
6670 /* Create a new variant of TYPE, equivalent but distinct.  This is so
6671    the caller can modify it. TYPE_CANONICAL for the return type will
6672    be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6673    are considered equal by the language itself (or that both types
6674    require structural equality checks). */
6675 
6676 tree
6677 build_variant_type_copy (tree type MEM_STAT_DECL)
6678 {
6679   tree t, m = TYPE_MAIN_VARIANT (type);
6680 
6681   t = build_distinct_type_copy (type PASS_MEM_STAT);
6682 
6683   /* Since we're building a variant, assume that it is a non-semantic
6684      variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6685   TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6686   /* Type variants have no alias set defined.  */
6687   TYPE_ALIAS_SET (t) = -1;
6688 
6689   /* Add the new type to the chain of variants of TYPE.  */
6690   TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6691   TYPE_NEXT_VARIANT (m) = t;
6692   TYPE_MAIN_VARIANT (t) = m;
6693 
6694   return t;
6695 }
6696 
6697 /* Return true if the from tree in both tree maps are equal.  */
6698 
6699 int
6700 tree_map_base_eq (const void *va, const void *vb)
6701 {
6702   const struct tree_map_base  *const a = (const struct tree_map_base *) va,
6703     *const b = (const struct tree_map_base *) vb;
6704   return (a->from == b->from);
6705 }
6706 
6707 /* Hash a from tree in a tree_base_map.  */
6708 
6709 unsigned int
6710 tree_map_base_hash (const void *item)
6711 {
6712   return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6713 }
6714 
6715 /* Return true if this tree map structure is marked for garbage collection
6716    purposes.  We simply return true if the from tree is marked, so that this
6717    structure goes away when the from tree goes away.  */
6718 
6719 int
6720 tree_map_base_marked_p (const void *p)
6721 {
6722   return ggc_marked_p (((const struct tree_map_base *) p)->from);
6723 }
6724 
6725 /* Hash a from tree in a tree_map.  */
6726 
6727 unsigned int
6728 tree_map_hash (const void *item)
6729 {
6730   return (((const struct tree_map *) item)->hash);
6731 }
6732 
6733 /* Hash a from tree in a tree_decl_map.  */
6734 
6735 unsigned int
6736 tree_decl_map_hash (const void *item)
6737 {
6738   return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6739 }
6740 
6741 /* Return the initialization priority for DECL.  */
6742 
6743 priority_type
6744 decl_init_priority_lookup (tree decl)
6745 {
6746   symtab_node *snode = symtab_node::get (decl);
6747 
6748   if (!snode)
6749     return DEFAULT_INIT_PRIORITY;
6750   return
6751     snode->get_init_priority ();
6752 }
6753 
6754 /* Return the finalization priority for DECL.  */
6755 
6756 priority_type
6757 decl_fini_priority_lookup (tree decl)
6758 {
6759   cgraph_node *node = cgraph_node::get (decl);
6760 
6761   if (!node)
6762     return DEFAULT_INIT_PRIORITY;
6763   return
6764     node->get_fini_priority ();
6765 }
6766 
6767 /* Set the initialization priority for DECL to PRIORITY.  */
6768 
6769 void
6770 decl_init_priority_insert (tree decl, priority_type priority)
6771 {
6772   struct symtab_node *snode;
6773 
6774   if (priority == DEFAULT_INIT_PRIORITY)
6775     {
6776       snode = symtab_node::get (decl);
6777       if (!snode)
6778 	return;
6779     }
6780   else if (VAR_P (decl))
6781     snode = varpool_node::get_create (decl);
6782   else
6783     snode = cgraph_node::get_create (decl);
6784   snode->set_init_priority (priority);
6785 }
6786 
6787 /* Set the finalization priority for DECL to PRIORITY.  */
6788 
6789 void
6790 decl_fini_priority_insert (tree decl, priority_type priority)
6791 {
6792   struct cgraph_node *node;
6793 
6794   if (priority == DEFAULT_INIT_PRIORITY)
6795     {
6796       node = cgraph_node::get (decl);
6797       if (!node)
6798 	return;
6799     }
6800   else
6801     node = cgraph_node::get_create (decl);
6802   node->set_fini_priority (priority);
6803 }
6804 
6805 /* Print out the statistics for the DECL_DEBUG_EXPR hash table.  */
6806 
6807 static void
6808 print_debug_expr_statistics (void)
6809 {
6810   fprintf (stderr, "DECL_DEBUG_EXPR  hash: size %ld, %ld elements, %f collisions\n",
6811 	   (long) debug_expr_for_decl->size (),
6812 	   (long) debug_expr_for_decl->elements (),
6813 	   debug_expr_for_decl->collisions ());
6814 }
6815 
6816 /* Print out the statistics for the DECL_VALUE_EXPR hash table.  */
6817 
6818 static void
6819 print_value_expr_statistics (void)
6820 {
6821   fprintf (stderr, "DECL_VALUE_EXPR  hash: size %ld, %ld elements, %f collisions\n",
6822 	   (long) value_expr_for_decl->size (),
6823 	   (long) value_expr_for_decl->elements (),
6824 	   value_expr_for_decl->collisions ());
6825 }
6826 
6827 /* Lookup a debug expression for FROM, and return it if we find one.  */
6828 
6829 tree
6830 decl_debug_expr_lookup (tree from)
6831 {
6832   struct tree_decl_map *h, in;
6833   in.base.from = from;
6834 
6835   h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6836   if (h)
6837     return h->to;
6838   return NULL_TREE;
6839 }
6840 
6841 /* Insert a mapping FROM->TO in the debug expression hashtable.  */
6842 
6843 void
6844 decl_debug_expr_insert (tree from, tree to)
6845 {
6846   struct tree_decl_map *h;
6847 
6848   h = ggc_alloc<tree_decl_map> ();
6849   h->base.from = from;
6850   h->to = to;
6851   *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6852 }
6853 
6854 /* Lookup a value expression for FROM, and return it if we find one.  */
6855 
6856 tree
6857 decl_value_expr_lookup (tree from)
6858 {
6859   struct tree_decl_map *h, in;
6860   in.base.from = from;
6861 
6862   h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6863   if (h)
6864     return h->to;
6865   return NULL_TREE;
6866 }
6867 
6868 /* Insert a mapping FROM->TO in the value expression hashtable.  */
6869 
6870 void
6871 decl_value_expr_insert (tree from, tree to)
6872 {
6873   struct tree_decl_map *h;
6874 
6875   h = ggc_alloc<tree_decl_map> ();
6876   h->base.from = from;
6877   h->to = to;
6878   *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6879 }
6880 
6881 /* Lookup a vector of debug arguments for FROM, and return it if we
6882    find one.  */
6883 
6884 vec<tree, va_gc> **
6885 decl_debug_args_lookup (tree from)
6886 {
6887   struct tree_vec_map *h, in;
6888 
6889   if (!DECL_HAS_DEBUG_ARGS_P (from))
6890     return NULL;
6891   gcc_checking_assert (debug_args_for_decl != NULL);
6892   in.base.from = from;
6893   h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6894   if (h)
6895     return &h->to;
6896   return NULL;
6897 }
6898 
6899 /* Insert a mapping FROM->empty vector of debug arguments in the value
6900    expression hashtable.  */
6901 
6902 vec<tree, va_gc> **
6903 decl_debug_args_insert (tree from)
6904 {
6905   struct tree_vec_map *h;
6906   tree_vec_map **loc;
6907 
6908   if (DECL_HAS_DEBUG_ARGS_P (from))
6909     return decl_debug_args_lookup (from);
6910   if (debug_args_for_decl == NULL)
6911     debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6912   h = ggc_alloc<tree_vec_map> ();
6913   h->base.from = from;
6914   h->to = NULL;
6915   loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6916   *loc = h;
6917   DECL_HAS_DEBUG_ARGS_P (from) = 1;
6918   return &h->to;
6919 }
6920 
6921 /* Hashing of types so that we don't make duplicates.
6922    The entry point is `type_hash_canon'.  */
6923 
6924 /* Generate the default hash code for TYPE.  This is designed for
6925    speed, rather than maximum entropy.  */
6926 
6927 hashval_t
6928 type_hash_canon_hash (tree type)
6929 {
6930   inchash::hash hstate;
6931 
6932   hstate.add_int (TREE_CODE (type));
6933 
6934   if (TREE_TYPE (type))
6935     hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6936 
6937   for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6938     /* Just the identifier is adequate to distinguish.  */
6939     hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6940 
6941   switch (TREE_CODE (type))
6942     {
6943     case METHOD_TYPE:
6944       hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6945       /* FALLTHROUGH. */
6946     case FUNCTION_TYPE:
6947       for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6948 	if (TREE_VALUE (t) != error_mark_node)
6949 	  hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6950       break;
6951 
6952     case OFFSET_TYPE:
6953       hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6954       break;
6955 
6956     case ARRAY_TYPE:
6957       {
6958 	if (TYPE_DOMAIN (type))
6959 	  hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6960 	if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6961 	  {
6962 	    unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6963 	    hstate.add_object (typeless);
6964 	  }
6965       }
6966       break;
6967 
6968     case INTEGER_TYPE:
6969       {
6970 	tree t = TYPE_MAX_VALUE (type);
6971 	if (!t)
6972 	  t = TYPE_MIN_VALUE (type);
6973 	for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6974 	  hstate.add_object (TREE_INT_CST_ELT (t, i));
6975 	break;
6976       }
6977 
6978     case REAL_TYPE:
6979     case FIXED_POINT_TYPE:
6980       {
6981 	unsigned prec = TYPE_PRECISION (type);
6982 	hstate.add_object (prec);
6983 	break;
6984       }
6985 
6986     case VECTOR_TYPE:
6987       hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6988       break;
6989 
6990     default:
6991       break;
6992     }
6993 
6994   return hstate.end ();
6995 }
6996 
6997 /* These are the Hashtable callback functions.  */
6998 
6999 /* Returns true iff the types are equivalent.  */
7000 
7001 bool
7002 type_cache_hasher::equal (type_hash *a, type_hash *b)
7003 {
7004   /* First test the things that are the same for all types.  */
7005   if (a->hash != b->hash
7006       || TREE_CODE (a->type) != TREE_CODE (b->type)
7007       || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7008       || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7009 				 TYPE_ATTRIBUTES (b->type))
7010       || (TREE_CODE (a->type) != COMPLEX_TYPE
7011           && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7012     return 0;
7013 
7014   /* Be careful about comparing arrays before and after the element type
7015      has been completed; don't compare TYPE_ALIGN unless both types are
7016      complete.  */
7017   if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7018       && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7019 	  || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7020     return 0;
7021 
7022   switch (TREE_CODE (a->type))
7023     {
7024     case VOID_TYPE:
7025     case COMPLEX_TYPE:
7026     case POINTER_TYPE:
7027     case REFERENCE_TYPE:
7028     case NULLPTR_TYPE:
7029       return 1;
7030 
7031     case VECTOR_TYPE:
7032       return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
7033 		       TYPE_VECTOR_SUBPARTS (b->type));
7034 
7035     case ENUMERAL_TYPE:
7036       if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7037 	  && !(TYPE_VALUES (a->type)
7038 	       && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7039 	       && TYPE_VALUES (b->type)
7040 	       && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7041 	       && type_list_equal (TYPE_VALUES (a->type),
7042 				   TYPE_VALUES (b->type))))
7043 	return 0;
7044 
7045       /* fall through */
7046 
7047     case INTEGER_TYPE:
7048     case REAL_TYPE:
7049     case BOOLEAN_TYPE:
7050       if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7051 	return false;
7052       return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7053 	       || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7054 				      TYPE_MAX_VALUE (b->type)))
7055 	      && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7056 		  || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7057 					 TYPE_MIN_VALUE (b->type))));
7058 
7059     case FIXED_POINT_TYPE:
7060       return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7061 
7062     case OFFSET_TYPE:
7063       return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7064 
7065     case METHOD_TYPE:
7066       if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7067 	  && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7068 	      || (TYPE_ARG_TYPES (a->type)
7069 		  && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7070 		  && TYPE_ARG_TYPES (b->type)
7071 		  && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7072 		  && type_list_equal (TYPE_ARG_TYPES (a->type),
7073 				      TYPE_ARG_TYPES (b->type)))))
7074         break;
7075       return 0;
7076     case ARRAY_TYPE:
7077       /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7078 	 where the flag should be inherited from the element type
7079 	 and can change after ARRAY_TYPEs are created; on non-aggregates
7080 	 compare it and hash it, scalars will never have that flag set
7081 	 and we need to differentiate between arrays created by different
7082 	 front-ends or middle-end created arrays.  */
7083       return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7084 	      && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7085 		  || (TYPE_TYPELESS_STORAGE (a->type)
7086 		      == TYPE_TYPELESS_STORAGE (b->type))));
7087 
7088     case RECORD_TYPE:
7089     case UNION_TYPE:
7090     case QUAL_UNION_TYPE:
7091       return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7092 	      || (TYPE_FIELDS (a->type)
7093 		  && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7094 		  && TYPE_FIELDS (b->type)
7095 		  && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7096 		  && type_list_equal (TYPE_FIELDS (a->type),
7097 				      TYPE_FIELDS (b->type))));
7098 
7099     case FUNCTION_TYPE:
7100       if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7101 	  || (TYPE_ARG_TYPES (a->type)
7102 	      && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7103 	      && TYPE_ARG_TYPES (b->type)
7104 	      && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7105 	      && type_list_equal (TYPE_ARG_TYPES (a->type),
7106 				  TYPE_ARG_TYPES (b->type))))
7107 	break;
7108       return 0;
7109 
7110     default:
7111       return 0;
7112     }
7113 
7114   if (lang_hooks.types.type_hash_eq != NULL)
7115     return lang_hooks.types.type_hash_eq (a->type, b->type);
7116 
7117   return 1;
7118 }
7119 
7120 /* Given TYPE, and HASHCODE its hash code, return the canonical
7121    object for an identical type if one already exists.
7122    Otherwise, return TYPE, and record it as the canonical object.
7123 
7124    To use this function, first create a type of the sort you want.
7125    Then compute its hash code from the fields of the type that
7126    make it different from other similar types.
7127    Then call this function and use the value.  */
7128 
7129 tree
7130 type_hash_canon (unsigned int hashcode, tree type)
7131 {
7132   type_hash in;
7133   type_hash **loc;
7134 
7135   /* The hash table only contains main variants, so ensure that's what we're
7136      being passed.  */
7137   gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7138 
7139   /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7140      must call that routine before comparing TYPE_ALIGNs.  */
7141   layout_type (type);
7142 
7143   in.hash = hashcode;
7144   in.type = type;
7145 
7146   loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7147   if (*loc)
7148     {
7149       tree t1 = ((type_hash *) *loc)->type;
7150       gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7151 		  && t1 != type);
7152       if (TYPE_UID (type) + 1 == next_type_uid)
7153 	--next_type_uid;
7154       /* Free also min/max values and the cache for integer
7155 	 types.  This can't be done in free_node, as LTO frees
7156 	 those on its own.  */
7157       if (TREE_CODE (type) == INTEGER_TYPE)
7158 	{
7159 	  if (TYPE_MIN_VALUE (type)
7160 	      && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7161 	    {
7162 	      /* Zero is always in TYPE_CACHED_VALUES.  */
7163 	      if (! TYPE_UNSIGNED (type))
7164 		int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7165 	      ggc_free (TYPE_MIN_VALUE (type));
7166 	    }
7167 	  if (TYPE_MAX_VALUE (type)
7168 	      && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7169 	    {
7170 	      int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7171 	      ggc_free (TYPE_MAX_VALUE (type));
7172 	    }
7173 	  if (TYPE_CACHED_VALUES_P (type))
7174 	    ggc_free (TYPE_CACHED_VALUES (type));
7175 	}
7176       free_node (type);
7177       return t1;
7178     }
7179   else
7180     {
7181       struct type_hash *h;
7182 
7183       h = ggc_alloc<type_hash> ();
7184       h->hash = hashcode;
7185       h->type = type;
7186       *loc = h;
7187 
7188       return type;
7189     }
7190 }
7191 
7192 static void
7193 print_type_hash_statistics (void)
7194 {
7195   fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7196 	   (long) type_hash_table->size (),
7197 	   (long) type_hash_table->elements (),
7198 	   type_hash_table->collisions ());
7199 }
7200 
7201 /* Given two lists of types
7202    (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7203    return 1 if the lists contain the same types in the same order.
7204    Also, the TREE_PURPOSEs must match.  */
7205 
7206 bool
7207 type_list_equal (const_tree l1, const_tree l2)
7208 {
7209   const_tree t1, t2;
7210 
7211   for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7212     if (TREE_VALUE (t1) != TREE_VALUE (t2)
7213 	|| (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7214 	    && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7215 		  && (TREE_TYPE (TREE_PURPOSE (t1))
7216 		      == TREE_TYPE (TREE_PURPOSE (t2))))))
7217       return false;
7218 
7219   return t1 == t2;
7220 }
7221 
7222 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7223    given by TYPE.  If the argument list accepts variable arguments,
7224    then this function counts only the ordinary arguments.  */
7225 
7226 int
7227 type_num_arguments (const_tree fntype)
7228 {
7229   int i = 0;
7230 
7231   for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7232     /* If the function does not take a variable number of arguments,
7233        the last element in the list will have type `void'.  */
7234     if (VOID_TYPE_P (TREE_VALUE (t)))
7235       break;
7236     else
7237       ++i;
7238 
7239   return i;
7240 }
7241 
7242 /* Return the type of the function TYPE's argument ARGNO if known.
7243    For vararg function's where ARGNO refers to one of the variadic
7244    arguments return null.  Otherwise, return a void_type_node for
7245    out-of-bounds ARGNO.  */
7246 
7247 tree
7248 type_argument_type (const_tree fntype, unsigned argno)
7249 {
7250   /* Treat zero the same as an out-of-bounds argument number.  */
7251   if (!argno)
7252     return void_type_node;
7253 
7254   function_args_iterator iter;
7255 
7256   tree argtype;
7257   unsigned i = 1;
7258   FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7259     {
7260       /* A vararg function's argument list ends in a null.  Otherwise,
7261 	 an ordinary function's argument list ends with void.  Return
7262 	 null if ARGNO refers to a vararg argument, void_type_node if
7263 	 it's out of bounds, and the formal argument type otherwise.  */
7264       if (!argtype)
7265 	break;
7266 
7267       if (i == argno || VOID_TYPE_P (argtype))
7268 	return argtype;
7269 
7270       ++i;
7271     }
7272 
7273   return NULL_TREE;
7274 }
7275 
7276 /* Nonzero if integer constants T1 and T2
7277    represent the same constant value.  */
7278 
7279 int
7280 tree_int_cst_equal (const_tree t1, const_tree t2)
7281 {
7282   if (t1 == t2)
7283     return 1;
7284 
7285   if (t1 == 0 || t2 == 0)
7286     return 0;
7287 
7288   STRIP_ANY_LOCATION_WRAPPER (t1);
7289   STRIP_ANY_LOCATION_WRAPPER (t2);
7290 
7291   if (TREE_CODE (t1) == INTEGER_CST
7292       && TREE_CODE (t2) == INTEGER_CST
7293       && wi::to_widest (t1) == wi::to_widest (t2))
7294     return 1;
7295 
7296   return 0;
7297 }
7298 
7299 /* Return true if T is an INTEGER_CST whose numerical value (extended
7300    according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  */
7301 
7302 bool
7303 tree_fits_shwi_p (const_tree t)
7304 {
7305   return (t != NULL_TREE
7306 	  && TREE_CODE (t) == INTEGER_CST
7307 	  && wi::fits_shwi_p (wi::to_widest (t)));
7308 }
7309 
7310 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7311    value (extended according to TYPE_UNSIGNED) fits in a poly_int64.  */
7312 
7313 bool
7314 tree_fits_poly_int64_p (const_tree t)
7315 {
7316   if (t == NULL_TREE)
7317     return false;
7318   if (POLY_INT_CST_P (t))
7319     {
7320       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7321 	if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7322 	  return false;
7323       return true;
7324     }
7325   return (TREE_CODE (t) == INTEGER_CST
7326 	  && wi::fits_shwi_p (wi::to_widest (t)));
7327 }
7328 
7329 /* Return true if T is an INTEGER_CST whose numerical value (extended
7330    according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  */
7331 
7332 bool
7333 tree_fits_uhwi_p (const_tree t)
7334 {
7335   return (t != NULL_TREE
7336 	  && TREE_CODE (t) == INTEGER_CST
7337 	  && wi::fits_uhwi_p (wi::to_widest (t)));
7338 }
7339 
7340 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7341    value (extended according to TYPE_UNSIGNED) fits in a poly_uint64.  */
7342 
7343 bool
7344 tree_fits_poly_uint64_p (const_tree t)
7345 {
7346   if (t == NULL_TREE)
7347     return false;
7348   if (POLY_INT_CST_P (t))
7349     {
7350       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7351 	if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7352 	  return false;
7353       return true;
7354     }
7355   return (TREE_CODE (t) == INTEGER_CST
7356 	  && wi::fits_uhwi_p (wi::to_widest (t)));
7357 }
7358 
7359 /* T is an INTEGER_CST whose numerical value (extended according to
7360    TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  Return that
7361    HOST_WIDE_INT.  */
7362 
7363 HOST_WIDE_INT
7364 tree_to_shwi (const_tree t)
7365 {
7366   gcc_assert (tree_fits_shwi_p (t));
7367   return TREE_INT_CST_LOW (t);
7368 }
7369 
7370 /* T is an INTEGER_CST whose numerical value (extended according to
7371    TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  Return that
7372    HOST_WIDE_INT.  */
7373 
7374 unsigned HOST_WIDE_INT
7375 tree_to_uhwi (const_tree t)
7376 {
7377   gcc_assert (tree_fits_uhwi_p (t));
7378   return TREE_INT_CST_LOW (t);
7379 }
7380 
7381 /* Return the most significant (sign) bit of T.  */
7382 
7383 int
7384 tree_int_cst_sign_bit (const_tree t)
7385 {
7386   unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7387 
7388   return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7389 }
7390 
7391 /* Return an indication of the sign of the integer constant T.
7392    The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7393    Note that -1 will never be returned if T's type is unsigned.  */
7394 
7395 int
7396 tree_int_cst_sgn (const_tree t)
7397 {
7398   if (wi::to_wide (t) == 0)
7399     return 0;
7400   else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7401     return 1;
7402   else if (wi::neg_p (wi::to_wide (t)))
7403     return -1;
7404   else
7405     return 1;
7406 }
7407 
7408 /* Return the minimum number of bits needed to represent VALUE in a
7409    signed or unsigned type, UNSIGNEDP says which.  */
7410 
7411 unsigned int
7412 tree_int_cst_min_precision (tree value, signop sgn)
7413 {
7414   /* If the value is negative, compute its negative minus 1.  The latter
7415      adjustment is because the absolute value of the largest negative value
7416      is one larger than the largest positive value.  This is equivalent to
7417      a bit-wise negation, so use that operation instead.  */
7418 
7419   if (tree_int_cst_sgn (value) < 0)
7420     value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7421 
7422   /* Return the number of bits needed, taking into account the fact
7423      that we need one more bit for a signed than unsigned type.
7424      If value is 0 or -1, the minimum precision is 1 no matter
7425      whether unsignedp is true or false.  */
7426 
7427   if (integer_zerop (value))
7428     return 1;
7429   else
7430     return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7431 }
7432 
7433 /* Return truthvalue of whether T1 is the same tree structure as T2.
7434    Return 1 if they are the same.
7435    Return 0 if they are understandably different.
7436    Return -1 if either contains tree structure not understood by
7437    this function.  */
7438 
7439 int
7440 simple_cst_equal (const_tree t1, const_tree t2)
7441 {
7442   enum tree_code code1, code2;
7443   int cmp;
7444   int i;
7445 
7446   if (t1 == t2)
7447     return 1;
7448   if (t1 == 0 || t2 == 0)
7449     return 0;
7450 
7451   /* For location wrappers to be the same, they must be at the same
7452      source location (and wrap the same thing).  */
7453   if (location_wrapper_p (t1) && location_wrapper_p (t2))
7454     {
7455       if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7456 	return 0;
7457       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7458     }
7459 
7460   code1 = TREE_CODE (t1);
7461   code2 = TREE_CODE (t2);
7462 
7463   if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7464     {
7465       if (CONVERT_EXPR_CODE_P (code2)
7466 	  || code2 == NON_LVALUE_EXPR)
7467 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7468       else
7469 	return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7470     }
7471 
7472   else if (CONVERT_EXPR_CODE_P (code2)
7473 	   || code2 == NON_LVALUE_EXPR)
7474     return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7475 
7476   if (code1 != code2)
7477     return 0;
7478 
7479   switch (code1)
7480     {
7481     case INTEGER_CST:
7482       return wi::to_widest (t1) == wi::to_widest (t2);
7483 
7484     case REAL_CST:
7485       return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7486 
7487     case FIXED_CST:
7488       return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7489 
7490     case STRING_CST:
7491       return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7492 	      && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7493 			 TREE_STRING_LENGTH (t1)));
7494 
7495     case CONSTRUCTOR:
7496       {
7497 	unsigned HOST_WIDE_INT idx;
7498 	vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7499 	vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7500 
7501 	if (vec_safe_length (v1) != vec_safe_length (v2))
7502 	  return false;
7503 
7504         for (idx = 0; idx < vec_safe_length (v1); ++idx)
7505 	  /* ??? Should we handle also fields here? */
7506 	  if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7507 	    return false;
7508 	return true;
7509       }
7510 
7511     case SAVE_EXPR:
7512       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7513 
7514     case CALL_EXPR:
7515       cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7516       if (cmp <= 0)
7517 	return cmp;
7518       if (call_expr_nargs (t1) != call_expr_nargs (t2))
7519 	return 0;
7520       {
7521 	const_tree arg1, arg2;
7522 	const_call_expr_arg_iterator iter1, iter2;
7523 	for (arg1 = first_const_call_expr_arg (t1, &iter1),
7524 	       arg2 = first_const_call_expr_arg (t2, &iter2);
7525 	     arg1 && arg2;
7526 	     arg1 = next_const_call_expr_arg (&iter1),
7527 	       arg2 = next_const_call_expr_arg (&iter2))
7528 	  {
7529 	    cmp = simple_cst_equal (arg1, arg2);
7530 	    if (cmp <= 0)
7531 	      return cmp;
7532 	  }
7533 	return arg1 == arg2;
7534       }
7535 
7536     case TARGET_EXPR:
7537       /* Special case: if either target is an unallocated VAR_DECL,
7538 	 it means that it's going to be unified with whatever the
7539 	 TARGET_EXPR is really supposed to initialize, so treat it
7540 	 as being equivalent to anything.  */
7541       if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7542 	   && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7543 	   && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7544 	  || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7545 	      && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7546 	      && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7547 	cmp = 1;
7548       else
7549 	cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7550 
7551       if (cmp <= 0)
7552 	return cmp;
7553 
7554       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7555 
7556     case WITH_CLEANUP_EXPR:
7557       cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7558       if (cmp <= 0)
7559 	return cmp;
7560 
7561       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7562 
7563     case COMPONENT_REF:
7564       if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7565 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7566 
7567       return 0;
7568 
7569     case VAR_DECL:
7570     case PARM_DECL:
7571     case CONST_DECL:
7572     case FUNCTION_DECL:
7573       return 0;
7574 
7575     default:
7576       if (POLY_INT_CST_P (t1))
7577 	/* A false return means maybe_ne rather than known_ne.  */
7578 	return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7579 						TYPE_SIGN (TREE_TYPE (t1))),
7580 			 poly_widest_int::from (poly_int_cst_value (t2),
7581 						TYPE_SIGN (TREE_TYPE (t2))));
7582       break;
7583     }
7584 
7585   /* This general rule works for most tree codes.  All exceptions should be
7586      handled above.  If this is a language-specific tree code, we can't
7587      trust what might be in the operand, so say we don't know
7588      the situation.  */
7589   if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7590     return -1;
7591 
7592   switch (TREE_CODE_CLASS (code1))
7593     {
7594     case tcc_unary:
7595     case tcc_binary:
7596     case tcc_comparison:
7597     case tcc_expression:
7598     case tcc_reference:
7599     case tcc_statement:
7600       cmp = 1;
7601       for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7602 	{
7603 	  cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7604 	  if (cmp <= 0)
7605 	    return cmp;
7606 	}
7607 
7608       return cmp;
7609 
7610     default:
7611       return -1;
7612     }
7613 }
7614 
7615 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7616    Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7617    than U, respectively.  */
7618 
7619 int
7620 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7621 {
7622   if (tree_int_cst_sgn (t) < 0)
7623     return -1;
7624   else if (!tree_fits_uhwi_p (t))
7625     return 1;
7626   else if (TREE_INT_CST_LOW (t) == u)
7627     return 0;
7628   else if (TREE_INT_CST_LOW (t) < u)
7629     return -1;
7630   else
7631     return 1;
7632 }
7633 
7634 /* Return true if SIZE represents a constant size that is in bounds of
7635    what the middle-end and the backend accepts (covering not more than
7636    half of the address-space).
7637    When PERR is non-null, set *PERR on failure to the description of
7638    why SIZE is not valid.  */
7639 
7640 bool
7641 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7642 {
7643   if (POLY_INT_CST_P (size))
7644     {
7645       if (TREE_OVERFLOW (size))
7646 	return false;
7647       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7648 	if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7649 	  return false;
7650       return true;
7651     }
7652 
7653   cst_size_error error;
7654   if (!perr)
7655     perr = &error;
7656 
7657   if (TREE_CODE (size) != INTEGER_CST)
7658     {
7659       *perr = cst_size_not_constant;
7660       return false;
7661     }
7662 
7663   if (TREE_OVERFLOW_P (size))
7664     {
7665       *perr = cst_size_overflow;
7666       return false;
7667     }
7668 
7669   if (tree_int_cst_sgn (size) < 0)
7670     {
7671       *perr = cst_size_negative;
7672       return false;
7673     }
7674   if (!tree_fits_uhwi_p (size)
7675       || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7676 	  < wi::to_widest (size) * 2))
7677     {
7678       *perr = cst_size_too_big;
7679       return false;
7680     }
7681 
7682   return true;
7683 }
7684 
7685 /* Return the precision of the type, or for a complex or vector type the
7686    precision of the type of its elements.  */
7687 
7688 unsigned int
7689 element_precision (const_tree type)
7690 {
7691   if (!TYPE_P (type))
7692     type = TREE_TYPE (type);
7693   enum tree_code code = TREE_CODE (type);
7694   if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7695     type = TREE_TYPE (type);
7696 
7697   return TYPE_PRECISION (type);
7698 }
7699 
7700 /* Return true if CODE represents an associative tree code.  Otherwise
7701    return false.  */
7702 bool
7703 associative_tree_code (enum tree_code code)
7704 {
7705   switch (code)
7706     {
7707     case BIT_IOR_EXPR:
7708     case BIT_AND_EXPR:
7709     case BIT_XOR_EXPR:
7710     case PLUS_EXPR:
7711     case MULT_EXPR:
7712     case MIN_EXPR:
7713     case MAX_EXPR:
7714       return true;
7715 
7716     default:
7717       break;
7718     }
7719   return false;
7720 }
7721 
7722 /* Return true if CODE represents a commutative tree code.  Otherwise
7723    return false.  */
7724 bool
7725 commutative_tree_code (enum tree_code code)
7726 {
7727   switch (code)
7728     {
7729     case PLUS_EXPR:
7730     case MULT_EXPR:
7731     case MULT_HIGHPART_EXPR:
7732     case MIN_EXPR:
7733     case MAX_EXPR:
7734     case BIT_IOR_EXPR:
7735     case BIT_XOR_EXPR:
7736     case BIT_AND_EXPR:
7737     case NE_EXPR:
7738     case EQ_EXPR:
7739     case UNORDERED_EXPR:
7740     case ORDERED_EXPR:
7741     case UNEQ_EXPR:
7742     case LTGT_EXPR:
7743     case TRUTH_AND_EXPR:
7744     case TRUTH_XOR_EXPR:
7745     case TRUTH_OR_EXPR:
7746     case WIDEN_MULT_EXPR:
7747     case VEC_WIDEN_MULT_HI_EXPR:
7748     case VEC_WIDEN_MULT_LO_EXPR:
7749     case VEC_WIDEN_MULT_EVEN_EXPR:
7750     case VEC_WIDEN_MULT_ODD_EXPR:
7751       return true;
7752 
7753     default:
7754       break;
7755     }
7756   return false;
7757 }
7758 
7759 /* Return true if CODE represents a ternary tree code for which the
7760    first two operands are commutative.  Otherwise return false.  */
7761 bool
7762 commutative_ternary_tree_code (enum tree_code code)
7763 {
7764   switch (code)
7765     {
7766     case WIDEN_MULT_PLUS_EXPR:
7767     case WIDEN_MULT_MINUS_EXPR:
7768     case DOT_PROD_EXPR:
7769       return true;
7770 
7771     default:
7772       break;
7773     }
7774   return false;
7775 }
7776 
7777 /* Returns true if CODE can overflow.  */
7778 
7779 bool
7780 operation_can_overflow (enum tree_code code)
7781 {
7782   switch (code)
7783     {
7784     case PLUS_EXPR:
7785     case MINUS_EXPR:
7786     case MULT_EXPR:
7787     case LSHIFT_EXPR:
7788       /* Can overflow in various ways.  */
7789       return true;
7790     case TRUNC_DIV_EXPR:
7791     case EXACT_DIV_EXPR:
7792     case FLOOR_DIV_EXPR:
7793     case CEIL_DIV_EXPR:
7794       /* For INT_MIN / -1.  */
7795       return true;
7796     case NEGATE_EXPR:
7797     case ABS_EXPR:
7798       /* For -INT_MIN.  */
7799       return true;
7800     default:
7801       /* These operators cannot overflow.  */
7802       return false;
7803     }
7804 }
7805 
7806 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7807    ftrapv doesn't generate trapping insns for CODE.  */
7808 
7809 bool
7810 operation_no_trapping_overflow (tree type, enum tree_code code)
7811 {
7812   gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7813 
7814   /* We don't generate instructions that trap on overflow for complex or vector
7815      types.  */
7816   if (!INTEGRAL_TYPE_P (type))
7817     return true;
7818 
7819   if (!TYPE_OVERFLOW_TRAPS (type))
7820     return true;
7821 
7822   switch (code)
7823     {
7824     case PLUS_EXPR:
7825     case MINUS_EXPR:
7826     case MULT_EXPR:
7827     case NEGATE_EXPR:
7828     case ABS_EXPR:
7829       /* These operators can overflow, and -ftrapv generates trapping code for
7830 	 these.  */
7831       return false;
7832     case TRUNC_DIV_EXPR:
7833     case EXACT_DIV_EXPR:
7834     case FLOOR_DIV_EXPR:
7835     case CEIL_DIV_EXPR:
7836     case LSHIFT_EXPR:
7837       /* These operators can overflow, but -ftrapv does not generate trapping
7838 	 code for these.  */
7839       return true;
7840     default:
7841       /* These operators cannot overflow.  */
7842       return true;
7843     }
7844 }
7845 
7846 /* Constructors for pointer, array and function types.
7847    (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7848    constructed by language-dependent code, not here.)  */
7849 
7850 /* Construct, lay out and return the type of pointers to TO_TYPE with
7851    mode MODE.  If CAN_ALIAS_ALL is TRUE, indicate this type can
7852    reference all of memory. If such a type has already been
7853    constructed, reuse it.  */
7854 
7855 tree
7856 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7857 			     bool can_alias_all)
7858 {
7859   tree t;
7860   bool could_alias = can_alias_all;
7861 
7862   if (to_type == error_mark_node)
7863     return error_mark_node;
7864 
7865   /* If the pointed-to type has the may_alias attribute set, force
7866      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
7867   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7868     can_alias_all = true;
7869 
7870   /* In some cases, languages will have things that aren't a POINTER_TYPE
7871      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7872      In that case, return that type without regard to the rest of our
7873      operands.
7874 
7875      ??? This is a kludge, but consistent with the way this function has
7876      always operated and there doesn't seem to be a good way to avoid this
7877      at the moment.  */
7878   if (TYPE_POINTER_TO (to_type) != 0
7879       && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7880     return TYPE_POINTER_TO (to_type);
7881 
7882   /* First, if we already have a type for pointers to TO_TYPE and it's
7883      the proper mode, use it.  */
7884   for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7885     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7886       return t;
7887 
7888   t = make_node (POINTER_TYPE);
7889 
7890   TREE_TYPE (t) = to_type;
7891   SET_TYPE_MODE (t, mode);
7892   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7893   TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7894   TYPE_POINTER_TO (to_type) = t;
7895 
7896   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
7897   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7898     SET_TYPE_STRUCTURAL_EQUALITY (t);
7899   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7900     TYPE_CANONICAL (t)
7901       = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7902 				     mode, false);
7903 
7904   /* Lay out the type.  This function has many callers that are concerned
7905      with expression-construction, and this simplifies them all.  */
7906   layout_type (t);
7907 
7908   return t;
7909 }
7910 
7911 /* By default build pointers in ptr_mode.  */
7912 
7913 tree
7914 build_pointer_type (tree to_type)
7915 {
7916   addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7917 					      : TYPE_ADDR_SPACE (to_type);
7918   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7919   return build_pointer_type_for_mode (to_type, pointer_mode, false);
7920 }
7921 
7922 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE.  */
7923 
7924 tree
7925 build_reference_type_for_mode (tree to_type, machine_mode mode,
7926 			       bool can_alias_all)
7927 {
7928   tree t;
7929   bool could_alias = can_alias_all;
7930 
7931   if (to_type == error_mark_node)
7932     return error_mark_node;
7933 
7934   /* If the pointed-to type has the may_alias attribute set, force
7935      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
7936   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7937     can_alias_all = true;
7938 
7939   /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7940      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7941      In that case, return that type without regard to the rest of our
7942      operands.
7943 
7944      ??? This is a kludge, but consistent with the way this function has
7945      always operated and there doesn't seem to be a good way to avoid this
7946      at the moment.  */
7947   if (TYPE_REFERENCE_TO (to_type) != 0
7948       && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7949     return TYPE_REFERENCE_TO (to_type);
7950 
7951   /* First, if we already have a type for pointers to TO_TYPE and it's
7952      the proper mode, use it.  */
7953   for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7954     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7955       return t;
7956 
7957   t = make_node (REFERENCE_TYPE);
7958 
7959   TREE_TYPE (t) = to_type;
7960   SET_TYPE_MODE (t, mode);
7961   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7962   TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7963   TYPE_REFERENCE_TO (to_type) = t;
7964 
7965   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
7966   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7967     SET_TYPE_STRUCTURAL_EQUALITY (t);
7968   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7969     TYPE_CANONICAL (t)
7970       = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7971 				       mode, false);
7972 
7973   layout_type (t);
7974 
7975   return t;
7976 }
7977 
7978 
7979 /* Build the node for the type of references-to-TO_TYPE by default
7980    in ptr_mode.  */
7981 
7982 tree
7983 build_reference_type (tree to_type)
7984 {
7985   addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7986 					      : TYPE_ADDR_SPACE (to_type);
7987   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7988   return build_reference_type_for_mode (to_type, pointer_mode, false);
7989 }
7990 
7991 #define MAX_INT_CACHED_PREC \
7992   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7993 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7994 
7995 /* Builds a signed or unsigned integer type of precision PRECISION.
7996    Used for C bitfields whose precision does not match that of
7997    built-in target types.  */
7998 tree
7999 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8000 				int unsignedp)
8001 {
8002   tree itype, ret;
8003 
8004   if (unsignedp)
8005     unsignedp = MAX_INT_CACHED_PREC + 1;
8006 
8007   if (precision <= MAX_INT_CACHED_PREC)
8008     {
8009       itype = nonstandard_integer_type_cache[precision + unsignedp];
8010       if (itype)
8011 	return itype;
8012     }
8013 
8014   itype = make_node (INTEGER_TYPE);
8015   TYPE_PRECISION (itype) = precision;
8016 
8017   if (unsignedp)
8018     fixup_unsigned_type (itype);
8019   else
8020     fixup_signed_type (itype);
8021 
8022   inchash::hash hstate;
8023   inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8024   ret = type_hash_canon (hstate.end (), itype);
8025   if (precision <= MAX_INT_CACHED_PREC)
8026     nonstandard_integer_type_cache[precision + unsignedp] = ret;
8027 
8028   return ret;
8029 }
8030 
8031 #define MAX_BOOL_CACHED_PREC \
8032   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8033 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8034 
8035 /* Builds a boolean type of precision PRECISION.
8036    Used for boolean vectors to choose proper vector element size.  */
8037 tree
8038 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8039 {
8040   tree type;
8041 
8042   if (precision <= MAX_BOOL_CACHED_PREC)
8043     {
8044       type = nonstandard_boolean_type_cache[precision];
8045       if (type)
8046 	return type;
8047     }
8048 
8049   type = make_node (BOOLEAN_TYPE);
8050   TYPE_PRECISION (type) = precision;
8051   fixup_signed_type (type);
8052 
8053   if (precision <= MAX_INT_CACHED_PREC)
8054     nonstandard_boolean_type_cache[precision] = type;
8055 
8056   return type;
8057 }
8058 
8059 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8060    or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL.  If SHARED
8061    is true, reuse such a type that has already been constructed.  */
8062 
8063 static tree
8064 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8065 {
8066   tree itype = make_node (INTEGER_TYPE);
8067 
8068   TREE_TYPE (itype) = type;
8069 
8070   TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8071   TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8072 
8073   TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8074   SET_TYPE_MODE (itype, TYPE_MODE (type));
8075   TYPE_SIZE (itype) = TYPE_SIZE (type);
8076   TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8077   SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8078   TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8079   SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8080 
8081   if (!shared)
8082     return itype;
8083 
8084   if ((TYPE_MIN_VALUE (itype)
8085        && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8086       || (TYPE_MAX_VALUE (itype)
8087 	  && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8088     {
8089       /* Since we cannot reliably merge this type, we need to compare it using
8090 	 structural equality checks.  */
8091       SET_TYPE_STRUCTURAL_EQUALITY (itype);
8092       return itype;
8093     }
8094 
8095   hashval_t hash = type_hash_canon_hash (itype);
8096   itype = type_hash_canon (hash, itype);
8097 
8098   return itype;
8099 }
8100 
8101 /* Wrapper around build_range_type_1 with SHARED set to true.  */
8102 
8103 tree
8104 build_range_type (tree type, tree lowval, tree highval)
8105 {
8106   return build_range_type_1 (type, lowval, highval, true);
8107 }
8108 
8109 /* Wrapper around build_range_type_1 with SHARED set to false.  */
8110 
8111 tree
8112 build_nonshared_range_type (tree type, tree lowval, tree highval)
8113 {
8114   return build_range_type_1 (type, lowval, highval, false);
8115 }
8116 
8117 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8118    MAXVAL should be the maximum value in the domain
8119    (one less than the length of the array).
8120 
8121    The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8122    We don't enforce this limit, that is up to caller (e.g. language front end).
8123    The limit exists because the result is a signed type and we don't handle
8124    sizes that use more than one HOST_WIDE_INT.  */
8125 
8126 tree
8127 build_index_type (tree maxval)
8128 {
8129   return build_range_type (sizetype, size_zero_node, maxval);
8130 }
8131 
8132 /* Return true if the debug information for TYPE, a subtype, should be emitted
8133    as a subrange type.  If so, set LOWVAL to the low bound and HIGHVAL to the
8134    high bound, respectively.  Sometimes doing so unnecessarily obfuscates the
8135    debug info and doesn't reflect the source code.  */
8136 
8137 bool
8138 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8139 {
8140   tree base_type = TREE_TYPE (type), low, high;
8141 
8142   /* Subrange types have a base type which is an integral type.  */
8143   if (!INTEGRAL_TYPE_P (base_type))
8144     return false;
8145 
8146   /* Get the real bounds of the subtype.  */
8147   if (lang_hooks.types.get_subrange_bounds)
8148     lang_hooks.types.get_subrange_bounds (type, &low, &high);
8149   else
8150     {
8151       low = TYPE_MIN_VALUE (type);
8152       high = TYPE_MAX_VALUE (type);
8153     }
8154 
8155   /* If the type and its base type have the same representation and the same
8156      name, then the type is not a subrange but a copy of the base type.  */
8157   if ((TREE_CODE (base_type) == INTEGER_TYPE
8158        || TREE_CODE (base_type) == BOOLEAN_TYPE)
8159       && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8160       && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8161       && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8162       && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8163     return false;
8164 
8165   if (lowval)
8166     *lowval = low;
8167   if (highval)
8168     *highval = high;
8169   return true;
8170 }
8171 
8172 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8173    and number of elements specified by the range of values of INDEX_TYPE.
8174    If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8175    If SHARED is true, reuse such a type that has already been constructed.
8176    If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type.  */
8177 
8178 static tree
8179 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8180 		    bool shared, bool set_canonical)
8181 {
8182   tree t;
8183 
8184   if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8185     {
8186       error ("arrays of functions are not meaningful");
8187       elt_type = integer_type_node;
8188     }
8189 
8190   t = make_node (ARRAY_TYPE);
8191   TREE_TYPE (t) = elt_type;
8192   TYPE_DOMAIN (t) = index_type;
8193   TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8194   TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8195   layout_type (t);
8196 
8197   if (shared)
8198     {
8199       hashval_t hash = type_hash_canon_hash (t);
8200       t = type_hash_canon (hash, t);
8201     }
8202 
8203   if (TYPE_CANONICAL (t) == t && set_canonical)
8204     {
8205       if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8206 	  || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8207 	  || in_lto_p)
8208 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8209       else if (TYPE_CANONICAL (elt_type) != elt_type
8210 	       || (index_type && TYPE_CANONICAL (index_type) != index_type))
8211 	TYPE_CANONICAL (t)
8212 	  = build_array_type_1 (TYPE_CANONICAL (elt_type),
8213 				index_type
8214 				? TYPE_CANONICAL (index_type) : NULL_TREE,
8215 				typeless_storage, shared, set_canonical);
8216     }
8217 
8218   return t;
8219 }
8220 
8221 /* Wrapper around build_array_type_1 with SHARED set to true.  */
8222 
8223 tree
8224 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8225 {
8226   return
8227     build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
8228 }
8229 
8230 /* Wrapper around build_array_type_1 with SHARED set to false.  */
8231 
8232 tree
8233 build_nonshared_array_type (tree elt_type, tree index_type)
8234 {
8235   return build_array_type_1 (elt_type, index_type, false, false, true);
8236 }
8237 
8238 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8239    sizetype.  */
8240 
8241 tree
8242 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8243 {
8244   return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8245 }
8246 
8247 /* Recursively examines the array elements of TYPE, until a non-array
8248    element type is found.  */
8249 
8250 tree
8251 strip_array_types (tree type)
8252 {
8253   while (TREE_CODE (type) == ARRAY_TYPE)
8254     type = TREE_TYPE (type);
8255 
8256   return type;
8257 }
8258 
8259 /* Computes the canonical argument types from the argument type list
8260    ARGTYPES.
8261 
8262    Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8263    on entry to this function, or if any of the ARGTYPES are
8264    structural.
8265 
8266    Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8267    true on entry to this function, or if any of the ARGTYPES are
8268    non-canonical.
8269 
8270    Returns a canonical argument list, which may be ARGTYPES when the
8271    canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8272    true) or would not differ from ARGTYPES.  */
8273 
8274 static tree
8275 maybe_canonicalize_argtypes (tree argtypes,
8276 			     bool *any_structural_p,
8277 			     bool *any_noncanonical_p)
8278 {
8279   tree arg;
8280   bool any_noncanonical_argtypes_p = false;
8281 
8282   for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8283     {
8284       if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8285 	/* Fail gracefully by stating that the type is structural.  */
8286 	*any_structural_p = true;
8287       else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8288 	*any_structural_p = true;
8289       else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8290 	       || TREE_PURPOSE (arg))
8291 	/* If the argument has a default argument, we consider it
8292 	   non-canonical even though the type itself is canonical.
8293 	   That way, different variants of function and method types
8294 	   with default arguments will all point to the variant with
8295 	   no defaults as their canonical type.  */
8296         any_noncanonical_argtypes_p = true;
8297     }
8298 
8299   if (*any_structural_p)
8300     return argtypes;
8301 
8302   if (any_noncanonical_argtypes_p)
8303     {
8304       /* Build the canonical list of argument types.  */
8305       tree canon_argtypes = NULL_TREE;
8306       bool is_void = false;
8307 
8308       for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8309         {
8310           if (arg == void_list_node)
8311             is_void = true;
8312           else
8313             canon_argtypes = tree_cons (NULL_TREE,
8314                                         TYPE_CANONICAL (TREE_VALUE (arg)),
8315                                         canon_argtypes);
8316         }
8317 
8318       canon_argtypes = nreverse (canon_argtypes);
8319       if (is_void)
8320         canon_argtypes = chainon (canon_argtypes, void_list_node);
8321 
8322       /* There is a non-canonical type.  */
8323       *any_noncanonical_p = true;
8324       return canon_argtypes;
8325     }
8326 
8327   /* The canonical argument types are the same as ARGTYPES.  */
8328   return argtypes;
8329 }
8330 
8331 /* Construct, lay out and return
8332    the type of functions returning type VALUE_TYPE
8333    given arguments of types ARG_TYPES.
8334    ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8335    are data type nodes for the arguments of the function.
8336    If such a type has already been constructed, reuse it.  */
8337 
8338 tree
8339 build_function_type (tree value_type, tree arg_types)
8340 {
8341   tree t;
8342   inchash::hash hstate;
8343   bool any_structural_p, any_noncanonical_p;
8344   tree canon_argtypes;
8345 
8346   gcc_assert (arg_types != error_mark_node);
8347 
8348   if (TREE_CODE (value_type) == FUNCTION_TYPE)
8349     {
8350       error ("function return type cannot be function");
8351       value_type = integer_type_node;
8352     }
8353 
8354   /* Make a node of the sort we want.  */
8355   t = make_node (FUNCTION_TYPE);
8356   TREE_TYPE (t) = value_type;
8357   TYPE_ARG_TYPES (t) = arg_types;
8358 
8359   /* If we already have such a type, use the old one.  */
8360   hashval_t hash = type_hash_canon_hash (t);
8361   t = type_hash_canon (hash, t);
8362 
8363   /* Set up the canonical type. */
8364   any_structural_p   = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8365   any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8366   canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8367 						&any_structural_p,
8368 						&any_noncanonical_p);
8369   if (any_structural_p)
8370     SET_TYPE_STRUCTURAL_EQUALITY (t);
8371   else if (any_noncanonical_p)
8372     TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8373 					      canon_argtypes);
8374 
8375   if (!COMPLETE_TYPE_P (t))
8376     layout_type (t);
8377   return t;
8378 }
8379 
8380 /* Build a function type.  The RETURN_TYPE is the type returned by the
8381    function.  If VAARGS is set, no void_type_node is appended to the
8382    list.  ARGP must be always be terminated be a NULL_TREE.  */
8383 
8384 static tree
8385 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8386 {
8387   tree t, args, last;
8388 
8389   t = va_arg (argp, tree);
8390   for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8391     args = tree_cons (NULL_TREE, t, args);
8392 
8393   if (vaargs)
8394     {
8395       last = args;
8396       if (args != NULL_TREE)
8397 	args = nreverse (args);
8398       gcc_assert (last != void_list_node);
8399     }
8400   else if (args == NULL_TREE)
8401     args = void_list_node;
8402   else
8403     {
8404       last = args;
8405       args = nreverse (args);
8406       TREE_CHAIN (last) = void_list_node;
8407     }
8408   args = build_function_type (return_type, args);
8409 
8410   return args;
8411 }
8412 
8413 /* Build a function type.  The RETURN_TYPE is the type returned by the
8414    function.  If additional arguments are provided, they are
8415    additional argument types.  The list of argument types must always
8416    be terminated by NULL_TREE.  */
8417 
8418 tree
8419 build_function_type_list (tree return_type, ...)
8420 {
8421   tree args;
8422   va_list p;
8423 
8424   va_start (p, return_type);
8425   args = build_function_type_list_1 (false, return_type, p);
8426   va_end (p);
8427   return args;
8428 }
8429 
8430 /* Build a variable argument function type.  The RETURN_TYPE is the
8431    type returned by the function.  If additional arguments are provided,
8432    they are additional argument types.  The list of argument types must
8433    always be terminated by NULL_TREE.  */
8434 
8435 tree
8436 build_varargs_function_type_list (tree return_type, ...)
8437 {
8438   tree args;
8439   va_list p;
8440 
8441   va_start (p, return_type);
8442   args = build_function_type_list_1 (true, return_type, p);
8443   va_end (p);
8444 
8445   return args;
8446 }
8447 
8448 /* Build a function type.  RETURN_TYPE is the type returned by the
8449    function; VAARGS indicates whether the function takes varargs.  The
8450    function takes N named arguments, the types of which are provided in
8451    ARG_TYPES.  */
8452 
8453 static tree
8454 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8455 			     tree *arg_types)
8456 {
8457   int i;
8458   tree t = vaargs ? NULL_TREE : void_list_node;
8459 
8460   for (i = n - 1; i >= 0; i--)
8461     t = tree_cons (NULL_TREE, arg_types[i], t);
8462 
8463   return build_function_type (return_type, t);
8464 }
8465 
8466 /* Build a function type.  RETURN_TYPE is the type returned by the
8467    function.  The function takes N named arguments, the types of which
8468    are provided in ARG_TYPES.  */
8469 
8470 tree
8471 build_function_type_array (tree return_type, int n, tree *arg_types)
8472 {
8473   return build_function_type_array_1 (false, return_type, n, arg_types);
8474 }
8475 
8476 /* Build a variable argument function type.  RETURN_TYPE is the type
8477    returned by the function.  The function takes N named arguments, the
8478    types of which are provided in ARG_TYPES.  */
8479 
8480 tree
8481 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8482 {
8483   return build_function_type_array_1 (true, return_type, n, arg_types);
8484 }
8485 
8486 /* Build a METHOD_TYPE for a member of BASETYPE.  The RETTYPE (a TYPE)
8487    and ARGTYPES (a TREE_LIST) are the return type and arguments types
8488    for the method.  An implicit additional parameter (of type
8489    pointer-to-BASETYPE) is added to the ARGTYPES.  */
8490 
8491 tree
8492 build_method_type_directly (tree basetype,
8493 			    tree rettype,
8494 			    tree argtypes)
8495 {
8496   tree t;
8497   tree ptype;
8498   bool any_structural_p, any_noncanonical_p;
8499   tree canon_argtypes;
8500 
8501   /* Make a node of the sort we want.  */
8502   t = make_node (METHOD_TYPE);
8503 
8504   TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8505   TREE_TYPE (t) = rettype;
8506   ptype = build_pointer_type (basetype);
8507 
8508   /* The actual arglist for this function includes a "hidden" argument
8509      which is "this".  Put it into the list of argument types.  */
8510   argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8511   TYPE_ARG_TYPES (t) = argtypes;
8512 
8513   /* If we already have such a type, use the old one.  */
8514   hashval_t hash = type_hash_canon_hash (t);
8515   t = type_hash_canon (hash, t);
8516 
8517   /* Set up the canonical type. */
8518   any_structural_p
8519     = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8520        || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8521   any_noncanonical_p
8522     = (TYPE_CANONICAL (basetype) != basetype
8523        || TYPE_CANONICAL (rettype) != rettype);
8524   canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8525 						&any_structural_p,
8526 						&any_noncanonical_p);
8527   if (any_structural_p)
8528     SET_TYPE_STRUCTURAL_EQUALITY (t);
8529   else if (any_noncanonical_p)
8530     TYPE_CANONICAL (t)
8531       = build_method_type_directly (TYPE_CANONICAL (basetype),
8532 				    TYPE_CANONICAL (rettype),
8533 				    canon_argtypes);
8534   if (!COMPLETE_TYPE_P (t))
8535     layout_type (t);
8536 
8537   return t;
8538 }
8539 
8540 /* Construct, lay out and return the type of methods belonging to class
8541    BASETYPE and whose arguments and values are described by TYPE.
8542    If that type exists already, reuse it.
8543    TYPE must be a FUNCTION_TYPE node.  */
8544 
8545 tree
8546 build_method_type (tree basetype, tree type)
8547 {
8548   gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8549 
8550   return build_method_type_directly (basetype,
8551 				     TREE_TYPE (type),
8552 				     TYPE_ARG_TYPES (type));
8553 }
8554 
8555 /* Construct, lay out and return the type of offsets to a value
8556    of type TYPE, within an object of type BASETYPE.
8557    If a suitable offset type exists already, reuse it.  */
8558 
8559 tree
8560 build_offset_type (tree basetype, tree type)
8561 {
8562   tree t;
8563 
8564   /* Make a node of the sort we want.  */
8565   t = make_node (OFFSET_TYPE);
8566 
8567   TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8568   TREE_TYPE (t) = type;
8569 
8570   /* If we already have such a type, use the old one.  */
8571   hashval_t hash = type_hash_canon_hash (t);
8572   t = type_hash_canon (hash, t);
8573 
8574   if (!COMPLETE_TYPE_P (t))
8575     layout_type (t);
8576 
8577   if (TYPE_CANONICAL (t) == t)
8578     {
8579       if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8580 	  || TYPE_STRUCTURAL_EQUALITY_P (type))
8581 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8582       else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8583 	       || TYPE_CANONICAL (type) != type)
8584 	TYPE_CANONICAL (t)
8585 	  = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8586 			       TYPE_CANONICAL (type));
8587     }
8588 
8589   return t;
8590 }
8591 
8592 /* Create a complex type whose components are COMPONENT_TYPE.
8593 
8594    If NAMED is true, the type is given a TYPE_NAME.  We do not always
8595    do so because this creates a DECL node and thus make the DECL_UIDs
8596    dependent on the type canonicalization hashtable, which is GC-ed,
8597    so the DECL_UIDs would not be stable wrt garbage collection.  */
8598 
8599 tree
8600 build_complex_type (tree component_type, bool named)
8601 {
8602   gcc_assert (INTEGRAL_TYPE_P (component_type)
8603 	      || SCALAR_FLOAT_TYPE_P (component_type)
8604 	      || FIXED_POINT_TYPE_P (component_type));
8605 
8606   /* Make a node of the sort we want.  */
8607   tree probe = make_node (COMPLEX_TYPE);
8608 
8609   TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8610 
8611   /* If we already have such a type, use the old one.  */
8612   hashval_t hash = type_hash_canon_hash (probe);
8613   tree t = type_hash_canon (hash, probe);
8614 
8615   if (t == probe)
8616     {
8617       /* We created a new type.  The hash insertion will have laid
8618 	 out the type.  We need to check the canonicalization and
8619 	 maybe set the name.  */
8620       gcc_checking_assert (COMPLETE_TYPE_P (t)
8621 			   && !TYPE_NAME (t)
8622 			   && TYPE_CANONICAL (t) == t);
8623 
8624       if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8625 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8626       else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8627 	TYPE_CANONICAL (t)
8628 	  = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8629 
8630       /* We need to create a name, since complex is a fundamental type.  */
8631       if (named)
8632 	{
8633 	  const char *name = NULL;
8634 
8635 	  if (TREE_TYPE (t) == char_type_node)
8636 	    name = "complex char";
8637 	  else if (TREE_TYPE (t) == signed_char_type_node)
8638 	    name = "complex signed char";
8639 	  else if (TREE_TYPE (t) == unsigned_char_type_node)
8640 	    name = "complex unsigned char";
8641 	  else if (TREE_TYPE (t) == short_integer_type_node)
8642 	    name = "complex short int";
8643 	  else if (TREE_TYPE (t) == short_unsigned_type_node)
8644 	    name = "complex short unsigned int";
8645 	  else if (TREE_TYPE (t) == integer_type_node)
8646 	    name = "complex int";
8647 	  else if (TREE_TYPE (t) == unsigned_type_node)
8648 	    name = "complex unsigned int";
8649 	  else if (TREE_TYPE (t) == long_integer_type_node)
8650 	    name = "complex long int";
8651 	  else if (TREE_TYPE (t) == long_unsigned_type_node)
8652 	    name = "complex long unsigned int";
8653 	  else if (TREE_TYPE (t) == long_long_integer_type_node)
8654 	    name = "complex long long int";
8655 	  else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8656 	    name = "complex long long unsigned int";
8657 
8658 	  if (name != NULL)
8659 	    TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8660 					get_identifier (name), t);
8661 	}
8662     }
8663 
8664   return build_qualified_type (t, TYPE_QUALS (component_type));
8665 }
8666 
8667 /* If TYPE is a real or complex floating-point type and the target
8668    does not directly support arithmetic on TYPE then return the wider
8669    type to be used for arithmetic on TYPE.  Otherwise, return
8670    NULL_TREE.  */
8671 
8672 tree
8673 excess_precision_type (tree type)
8674 {
8675   /* The target can give two different responses to the question of
8676      which excess precision mode it would like depending on whether we
8677      are in -fexcess-precision=standard or -fexcess-precision=fast.  */
8678 
8679   enum excess_precision_type requested_type
8680     = (flag_excess_precision == EXCESS_PRECISION_FAST
8681        ? EXCESS_PRECISION_TYPE_FAST
8682        : EXCESS_PRECISION_TYPE_STANDARD);
8683 
8684   enum flt_eval_method target_flt_eval_method
8685     = targetm.c.excess_precision (requested_type);
8686 
8687   /* The target should not ask for unpredictable float evaluation (though
8688      it might advertise that implicitly the evaluation is unpredictable,
8689      but we don't care about that here, it will have been reported
8690      elsewhere).  If it does ask for unpredictable evaluation, we have
8691      nothing to do here.  */
8692   gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8693 
8694   /* Nothing to do.  The target has asked for all types we know about
8695      to be computed with their native precision and range.  */
8696   if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8697     return NULL_TREE;
8698 
8699   /* The target will promote this type in a target-dependent way, so excess
8700      precision ought to leave it alone.  */
8701   if (targetm.promoted_type (type) != NULL_TREE)
8702     return NULL_TREE;
8703 
8704   machine_mode float16_type_mode = (float16_type_node
8705 				    ? TYPE_MODE (float16_type_node)
8706 				    : VOIDmode);
8707   machine_mode float_type_mode = TYPE_MODE (float_type_node);
8708   machine_mode double_type_mode = TYPE_MODE (double_type_node);
8709 
8710   switch (TREE_CODE (type))
8711     {
8712     case REAL_TYPE:
8713       {
8714 	machine_mode type_mode = TYPE_MODE (type);
8715 	switch (target_flt_eval_method)
8716 	  {
8717 	  case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8718 	    if (type_mode == float16_type_mode)
8719 	      return float_type_node;
8720 	    break;
8721 	  case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8722 	    if (type_mode == float16_type_mode
8723 		|| type_mode == float_type_mode)
8724 	      return double_type_node;
8725 	    break;
8726 	  case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8727 	    if (type_mode == float16_type_mode
8728 		|| type_mode == float_type_mode
8729 		|| type_mode == double_type_mode)
8730 	      return long_double_type_node;
8731 	    break;
8732 	  default:
8733 	    gcc_unreachable ();
8734 	  }
8735 	break;
8736       }
8737     case COMPLEX_TYPE:
8738       {
8739 	if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8740 	  return NULL_TREE;
8741 	machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8742 	switch (target_flt_eval_method)
8743 	  {
8744 	  case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8745 	    if (type_mode == float16_type_mode)
8746 	      return complex_float_type_node;
8747 	    break;
8748 	  case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8749 	    if (type_mode == float16_type_mode
8750 		|| type_mode == float_type_mode)
8751 	      return complex_double_type_node;
8752 	    break;
8753 	  case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8754 	    if (type_mode == float16_type_mode
8755 		|| type_mode == float_type_mode
8756 		|| type_mode == double_type_mode)
8757 	      return complex_long_double_type_node;
8758 	    break;
8759 	  default:
8760 	    gcc_unreachable ();
8761 	  }
8762 	break;
8763       }
8764     default:
8765       break;
8766     }
8767 
8768   return NULL_TREE;
8769 }
8770 
8771 /* Return OP, stripped of any conversions to wider types as much as is safe.
8772    Converting the value back to OP's type makes a value equivalent to OP.
8773 
8774    If FOR_TYPE is nonzero, we return a value which, if converted to
8775    type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8776 
8777    OP must have integer, real or enumeral type.  Pointers are not allowed!
8778 
8779    There are some cases where the obvious value we could return
8780    would regenerate to OP if converted to OP's type,
8781    but would not extend like OP to wider types.
8782    If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8783    For example, if OP is (unsigned short)(signed char)-1,
8784    we avoid returning (signed char)-1 if FOR_TYPE is int,
8785    even though extending that to an unsigned short would regenerate OP,
8786    since the result of extending (signed char)-1 to (int)
8787    is different from (int) OP.  */
8788 
8789 tree
8790 get_unwidened (tree op, tree for_type)
8791 {
8792   /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension.  */
8793   tree type = TREE_TYPE (op);
8794   unsigned final_prec
8795     = TYPE_PRECISION (for_type != 0 ? for_type : type);
8796   int uns
8797     = (for_type != 0 && for_type != type
8798        && final_prec > TYPE_PRECISION (type)
8799        && TYPE_UNSIGNED (type));
8800   tree win = op;
8801 
8802   while (CONVERT_EXPR_P (op))
8803     {
8804       int bitschange;
8805 
8806       /* TYPE_PRECISION on vector types has different meaning
8807 	 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8808 	 so avoid them here.  */
8809       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8810 	break;
8811 
8812       bitschange = TYPE_PRECISION (TREE_TYPE (op))
8813 		   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8814 
8815       /* Truncations are many-one so cannot be removed.
8816 	 Unless we are later going to truncate down even farther.  */
8817       if (bitschange < 0
8818 	  && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8819 	break;
8820 
8821       /* See what's inside this conversion.  If we decide to strip it,
8822 	 we will set WIN.  */
8823       op = TREE_OPERAND (op, 0);
8824 
8825       /* If we have not stripped any zero-extensions (uns is 0),
8826 	 we can strip any kind of extension.
8827 	 If we have previously stripped a zero-extension,
8828 	 only zero-extensions can safely be stripped.
8829 	 Any extension can be stripped if the bits it would produce
8830 	 are all going to be discarded later by truncating to FOR_TYPE.  */
8831 
8832       if (bitschange > 0)
8833 	{
8834 	  if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8835 	    win = op;
8836 	  /* TYPE_UNSIGNED says whether this is a zero-extension.
8837 	     Let's avoid computing it if it does not affect WIN
8838 	     and if UNS will not be needed again.  */
8839 	  if ((uns
8840 	       || CONVERT_EXPR_P (op))
8841 	      && TYPE_UNSIGNED (TREE_TYPE (op)))
8842 	    {
8843 	      uns = 1;
8844 	      win = op;
8845 	    }
8846 	}
8847     }
8848 
8849   /* If we finally reach a constant see if it fits in sth smaller and
8850      in that case convert it.  */
8851   if (TREE_CODE (win) == INTEGER_CST)
8852     {
8853       tree wtype = TREE_TYPE (win);
8854       unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8855       if (for_type)
8856 	prec = MAX (prec, final_prec);
8857       if (prec < TYPE_PRECISION (wtype))
8858 	{
8859 	  tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8860 	  if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8861 	    win = fold_convert (t, win);
8862 	}
8863     }
8864 
8865   return win;
8866 }
8867 
8868 /* Return OP or a simpler expression for a narrower value
8869    which can be sign-extended or zero-extended to give back OP.
8870    Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8871    or 0 if the value should be sign-extended.  */
8872 
8873 tree
8874 get_narrower (tree op, int *unsignedp_ptr)
8875 {
8876   int uns = 0;
8877   int first = 1;
8878   tree win = op;
8879   bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8880 
8881   if (TREE_CODE (op) == COMPOUND_EXPR)
8882     {
8883       do
8884 	op = TREE_OPERAND (op, 1);
8885       while (TREE_CODE (op) == COMPOUND_EXPR);
8886       tree ret = get_narrower (op, unsignedp_ptr);
8887       if (ret == op)
8888 	return win;
8889       auto_vec <tree, 16> v;
8890       unsigned int i;
8891       for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8892 	   op = TREE_OPERAND (op, 1))
8893 	v.safe_push (op);
8894       FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8895 	ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8896 			  TREE_TYPE (ret), TREE_OPERAND (op, 0),
8897 			  ret);
8898       return ret;
8899     }
8900   while (TREE_CODE (op) == NOP_EXPR)
8901     {
8902       int bitschange
8903 	= (TYPE_PRECISION (TREE_TYPE (op))
8904 	   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8905 
8906       /* Truncations are many-one so cannot be removed.  */
8907       if (bitschange < 0)
8908 	break;
8909 
8910       /* See what's inside this conversion.  If we decide to strip it,
8911 	 we will set WIN.  */
8912 
8913       if (bitschange > 0)
8914 	{
8915 	  op = TREE_OPERAND (op, 0);
8916 	  /* An extension: the outermost one can be stripped,
8917 	     but remember whether it is zero or sign extension.  */
8918 	  if (first)
8919 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
8920 	  /* Otherwise, if a sign extension has been stripped,
8921 	     only sign extensions can now be stripped;
8922 	     if a zero extension has been stripped, only zero-extensions.  */
8923 	  else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8924 	    break;
8925 	  first = 0;
8926 	}
8927       else /* bitschange == 0 */
8928 	{
8929 	  /* A change in nominal type can always be stripped, but we must
8930 	     preserve the unsignedness.  */
8931 	  if (first)
8932 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
8933 	  first = 0;
8934 	  op = TREE_OPERAND (op, 0);
8935 	  /* Keep trying to narrow, but don't assign op to win if it
8936 	     would turn an integral type into something else.  */
8937 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8938 	    continue;
8939 	}
8940 
8941       win = op;
8942     }
8943 
8944   if (TREE_CODE (op) == COMPONENT_REF
8945       /* Since type_for_size always gives an integer type.  */
8946       && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8947       && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8948       /* Ensure field is laid out already.  */
8949       && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8950       && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8951     {
8952       unsigned HOST_WIDE_INT innerprec
8953 	= tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8954       int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8955 		       || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8956       tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8957 
8958       /* We can get this structure field in a narrower type that fits it,
8959 	 but the resulting extension to its nominal type (a fullword type)
8960 	 must satisfy the same conditions as for other extensions.
8961 
8962 	 Do this only for fields that are aligned (not bit-fields),
8963 	 because when bit-field insns will be used there is no
8964 	 advantage in doing this.  */
8965 
8966       if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8967 	  && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8968 	  && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8969 	  && type != 0)
8970 	{
8971 	  if (first)
8972 	    uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8973 	  win = fold_convert (type, op);
8974 	}
8975     }
8976 
8977   *unsignedp_ptr = uns;
8978   return win;
8979 }
8980 
8981 /* Return true if integer constant C has a value that is permissible
8982    for TYPE, an integral type.  */
8983 
8984 bool
8985 int_fits_type_p (const_tree c, const_tree type)
8986 {
8987   tree type_low_bound, type_high_bound;
8988   bool ok_for_low_bound, ok_for_high_bound;
8989   signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8990 
8991   /* Non-standard boolean types can have arbitrary precision but various
8992      transformations assume that they can only take values 0 and +/-1.  */
8993   if (TREE_CODE (type) == BOOLEAN_TYPE)
8994     return wi::fits_to_boolean_p (wi::to_wide (c), type);
8995 
8996 retry:
8997   type_low_bound = TYPE_MIN_VALUE (type);
8998   type_high_bound = TYPE_MAX_VALUE (type);
8999 
9000   /* If at least one bound of the type is a constant integer, we can check
9001      ourselves and maybe make a decision. If no such decision is possible, but
9002      this type is a subtype, try checking against that.  Otherwise, use
9003      fits_to_tree_p, which checks against the precision.
9004 
9005      Compute the status for each possibly constant bound, and return if we see
9006      one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9007      for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9008      for "constant known to fit".  */
9009 
9010   /* Check if c >= type_low_bound.  */
9011   if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9012     {
9013       if (tree_int_cst_lt (c, type_low_bound))
9014 	return false;
9015       ok_for_low_bound = true;
9016     }
9017   else
9018     ok_for_low_bound = false;
9019 
9020   /* Check if c <= type_high_bound.  */
9021   if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9022     {
9023       if (tree_int_cst_lt (type_high_bound, c))
9024 	return false;
9025       ok_for_high_bound = true;
9026     }
9027   else
9028     ok_for_high_bound = false;
9029 
9030   /* If the constant fits both bounds, the result is known.  */
9031   if (ok_for_low_bound && ok_for_high_bound)
9032     return true;
9033 
9034   /* Perform some generic filtering which may allow making a decision
9035      even if the bounds are not constant.  First, negative integers
9036      never fit in unsigned types, */
9037   if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
9038     return false;
9039 
9040   /* Second, narrower types always fit in wider ones.  */
9041   if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9042     return true;
9043 
9044   /* Third, unsigned integers with top bit set never fit signed types.  */
9045   if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9046     {
9047       int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
9048       if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9049 	{
9050 	  /* When a tree_cst is converted to a wide-int, the precision
9051 	     is taken from the type.  However, if the precision of the
9052 	     mode underneath the type is smaller than that, it is
9053 	     possible that the value will not fit.  The test below
9054 	     fails if any bit is set between the sign bit of the
9055 	     underlying mode and the top bit of the type.  */
9056 	  if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
9057 	    return false;
9058 	}
9059       else if (wi::neg_p (wi::to_wide (c)))
9060 	return false;
9061     }
9062 
9063   /* If we haven't been able to decide at this point, there nothing more we
9064      can check ourselves here.  Look at the base type if we have one and it
9065      has the same precision.  */
9066   if (TREE_CODE (type) == INTEGER_TYPE
9067       && TREE_TYPE (type) != 0
9068       && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9069     {
9070       type = TREE_TYPE (type);
9071       goto retry;
9072     }
9073 
9074   /* Or to fits_to_tree_p, if nothing else.  */
9075   return wi::fits_to_tree_p (wi::to_wide (c), type);
9076 }
9077 
9078 /* Stores bounds of an integer TYPE in MIN and MAX.  If TYPE has non-constant
9079    bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9080    represented (assuming two's-complement arithmetic) within the bit
9081    precision of the type are returned instead.  */
9082 
9083 void
9084 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9085 {
9086   if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9087       && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9088     wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9089   else
9090     {
9091       if (TYPE_UNSIGNED (type))
9092 	mpz_set_ui (min, 0);
9093       else
9094 	{
9095 	  wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9096 	  wi::to_mpz (mn, min, SIGNED);
9097 	}
9098     }
9099 
9100   if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9101       && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9102     wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9103   else
9104     {
9105       wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9106       wi::to_mpz (mn, max, TYPE_SIGN (type));
9107     }
9108 }
9109 
9110 /* Return true if VAR is an automatic variable.  */
9111 
9112 bool
9113 auto_var_p (const_tree var)
9114 {
9115   return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9116 	    || TREE_CODE (var) == PARM_DECL)
9117 	   && ! TREE_STATIC (var))
9118 	  || TREE_CODE (var) == RESULT_DECL);
9119 }
9120 
9121 /* Return true if VAR is an automatic variable defined in function FN.  */
9122 
9123 bool
9124 auto_var_in_fn_p (const_tree var, const_tree fn)
9125 {
9126   return (DECL_P (var) && DECL_CONTEXT (var) == fn
9127 	  && (auto_var_p (var)
9128 	      || TREE_CODE (var) == LABEL_DECL));
9129 }
9130 
9131 /* Subprogram of following function.  Called by walk_tree.
9132 
9133    Return *TP if it is an automatic variable or parameter of the
9134    function passed in as DATA.  */
9135 
9136 static tree
9137 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9138 {
9139   tree fn = (tree) data;
9140 
9141   if (TYPE_P (*tp))
9142     *walk_subtrees = 0;
9143 
9144   else if (DECL_P (*tp)
9145 	   && auto_var_in_fn_p (*tp, fn))
9146     return *tp;
9147 
9148   return NULL_TREE;
9149 }
9150 
9151 /* Returns true if T is, contains, or refers to a type with variable
9152    size.  For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9153    arguments, but not the return type.  If FN is nonzero, only return
9154    true if a modifier of the type or position of FN is a variable or
9155    parameter inside FN.
9156 
9157    This concept is more general than that of C99 'variably modified types':
9158    in C99, a struct type is never variably modified because a VLA may not
9159    appear as a structure member.  However, in GNU C code like:
9160 
9161      struct S { int i[f()]; };
9162 
9163    is valid, and other languages may define similar constructs.  */
9164 
9165 bool
9166 variably_modified_type_p (tree type, tree fn)
9167 {
9168   tree t;
9169 
9170 /* Test if T is either variable (if FN is zero) or an expression containing
9171    a variable in FN.  If TYPE isn't gimplified, return true also if
9172    gimplify_one_sizepos would gimplify the expression into a local
9173    variable.  */
9174 #define RETURN_TRUE_IF_VAR(T)						\
9175   do { tree _t = (T);							\
9176     if (_t != NULL_TREE							\
9177 	&& _t != error_mark_node					\
9178 	&& !CONSTANT_CLASS_P (_t)					\
9179 	&& TREE_CODE (_t) != PLACEHOLDER_EXPR				\
9180 	&& (!fn								\
9181 	    || (!TYPE_SIZES_GIMPLIFIED (type)				\
9182 		&& (TREE_CODE (_t) != VAR_DECL				\
9183 		    && !CONTAINS_PLACEHOLDER_P (_t)))			\
9184 	    || walk_tree (&_t, find_var_from_fn, fn, NULL)))		\
9185       return true;  } while (0)
9186 
9187   if (type == error_mark_node)
9188     return false;
9189 
9190   /* If TYPE itself has variable size, it is variably modified.  */
9191   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9192   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9193 
9194   switch (TREE_CODE (type))
9195     {
9196     case POINTER_TYPE:
9197     case REFERENCE_TYPE:
9198     case VECTOR_TYPE:
9199       /* Ada can have pointer types refering to themselves indirectly.  */
9200       if (TREE_VISITED (type))
9201 	return false;
9202       TREE_VISITED (type) = true;
9203       if (variably_modified_type_p (TREE_TYPE (type), fn))
9204 	{
9205 	  TREE_VISITED (type) = false;
9206 	  return true;
9207 	}
9208       TREE_VISITED (type) = false;
9209       break;
9210 
9211     case FUNCTION_TYPE:
9212     case METHOD_TYPE:
9213       /* If TYPE is a function type, it is variably modified if the
9214 	 return type is variably modified.  */
9215       if (variably_modified_type_p (TREE_TYPE (type), fn))
9216 	  return true;
9217       break;
9218 
9219     case INTEGER_TYPE:
9220     case REAL_TYPE:
9221     case FIXED_POINT_TYPE:
9222     case ENUMERAL_TYPE:
9223     case BOOLEAN_TYPE:
9224       /* Scalar types are variably modified if their end points
9225 	 aren't constant.  */
9226       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9227       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9228       break;
9229 
9230     case RECORD_TYPE:
9231     case UNION_TYPE:
9232     case QUAL_UNION_TYPE:
9233       /* We can't see if any of the fields are variably-modified by the
9234 	 definition we normally use, since that would produce infinite
9235 	 recursion via pointers.  */
9236       /* This is variably modified if some field's type is.  */
9237       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9238 	if (TREE_CODE (t) == FIELD_DECL)
9239 	  {
9240 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9241 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9242 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9243 
9244 	    /* If the type is a qualified union, then the DECL_QUALIFIER
9245 	       of fields can also be an expression containing a variable.  */
9246 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
9247 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9248 
9249 	    /* If the field is a qualified union, then it's only a container
9250 	       for what's inside so we look into it.  That's necessary in LTO
9251 	       mode because the sizes of the field tested above have been set
9252 	       to PLACEHOLDER_EXPRs by free_lang_data.  */
9253 	    if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
9254 		&& variably_modified_type_p (TREE_TYPE (t), fn))
9255 	      return true;
9256 	  }
9257       break;
9258 
9259     case ARRAY_TYPE:
9260       /* Do not call ourselves to avoid infinite recursion.  This is
9261 	 variably modified if the element type is.  */
9262       RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9263       RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9264       break;
9265 
9266     default:
9267       break;
9268     }
9269 
9270   /* The current language may have other cases to check, but in general,
9271      all other types are not variably modified.  */
9272   return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9273 
9274 #undef RETURN_TRUE_IF_VAR
9275 }
9276 
9277 /* Given a DECL or TYPE, return the scope in which it was declared, or
9278    NULL_TREE if there is no containing scope.  */
9279 
9280 tree
9281 get_containing_scope (const_tree t)
9282 {
9283   return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9284 }
9285 
9286 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL.  */
9287 
9288 const_tree
9289 get_ultimate_context (const_tree decl)
9290 {
9291   while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9292     {
9293       if (TREE_CODE (decl) == BLOCK)
9294 	decl = BLOCK_SUPERCONTEXT (decl);
9295       else
9296 	decl = get_containing_scope (decl);
9297     }
9298   return decl;
9299 }
9300 
9301 /* Return the innermost context enclosing DECL that is
9302    a FUNCTION_DECL, or zero if none.  */
9303 
9304 tree
9305 decl_function_context (const_tree decl)
9306 {
9307   tree context;
9308 
9309   if (TREE_CODE (decl) == ERROR_MARK)
9310     return 0;
9311 
9312   /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9313      where we look up the function at runtime.  Such functions always take
9314      a first argument of type 'pointer to real context'.
9315 
9316      C++ should really be fixed to use DECL_CONTEXT for the real context,
9317      and use something else for the "virtual context".  */
9318   else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9319     context
9320       = TYPE_MAIN_VARIANT
9321 	(TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9322   else
9323     context = DECL_CONTEXT (decl);
9324 
9325   while (context && TREE_CODE (context) != FUNCTION_DECL)
9326     {
9327       if (TREE_CODE (context) == BLOCK)
9328 	context = BLOCK_SUPERCONTEXT (context);
9329       else
9330 	context = get_containing_scope (context);
9331     }
9332 
9333   return context;
9334 }
9335 
9336 /* Return the innermost context enclosing DECL that is
9337    a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9338    TYPE_DECLs and FUNCTION_DECLs are transparent to this function.  */
9339 
9340 tree
9341 decl_type_context (const_tree decl)
9342 {
9343   tree context = DECL_CONTEXT (decl);
9344 
9345   while (context)
9346     switch (TREE_CODE (context))
9347       {
9348       case NAMESPACE_DECL:
9349       case TRANSLATION_UNIT_DECL:
9350 	return NULL_TREE;
9351 
9352       case RECORD_TYPE:
9353       case UNION_TYPE:
9354       case QUAL_UNION_TYPE:
9355 	return context;
9356 
9357       case TYPE_DECL:
9358       case FUNCTION_DECL:
9359 	context = DECL_CONTEXT (context);
9360 	break;
9361 
9362       case BLOCK:
9363 	context = BLOCK_SUPERCONTEXT (context);
9364 	break;
9365 
9366       default:
9367 	gcc_unreachable ();
9368       }
9369 
9370   return NULL_TREE;
9371 }
9372 
9373 /* CALL is a CALL_EXPR.  Return the declaration for the function
9374    called, or NULL_TREE if the called function cannot be
9375    determined.  */
9376 
9377 tree
9378 get_callee_fndecl (const_tree call)
9379 {
9380   tree addr;
9381 
9382   if (call == error_mark_node)
9383     return error_mark_node;
9384 
9385   /* It's invalid to call this function with anything but a
9386      CALL_EXPR.  */
9387   gcc_assert (TREE_CODE (call) == CALL_EXPR);
9388 
9389   /* The first operand to the CALL is the address of the function
9390      called.  */
9391   addr = CALL_EXPR_FN (call);
9392 
9393   /* If there is no function, return early.  */
9394   if (addr == NULL_TREE)
9395     return NULL_TREE;
9396 
9397   STRIP_NOPS (addr);
9398 
9399   /* If this is a readonly function pointer, extract its initial value.  */
9400   if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9401       && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9402       && DECL_INITIAL (addr))
9403     addr = DECL_INITIAL (addr);
9404 
9405   /* If the address is just `&f' for some function `f', then we know
9406      that `f' is being called.  */
9407   if (TREE_CODE (addr) == ADDR_EXPR
9408       && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9409     return TREE_OPERAND (addr, 0);
9410 
9411   /* We couldn't figure out what was being called.  */
9412   return NULL_TREE;
9413 }
9414 
9415 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9416    return the associated function code, otherwise return CFN_LAST.  */
9417 
9418 combined_fn
9419 get_call_combined_fn (const_tree call)
9420 {
9421   /* It's invalid to call this function with anything but a CALL_EXPR.  */
9422   gcc_assert (TREE_CODE (call) == CALL_EXPR);
9423 
9424   if (!CALL_EXPR_FN (call))
9425     return as_combined_fn (CALL_EXPR_IFN (call));
9426 
9427   tree fndecl = get_callee_fndecl (call);
9428   if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9429     return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9430 
9431   return CFN_LAST;
9432 }
9433 
9434 /* Comparator of indices based on tree_node_counts.  */
9435 
9436 static int
9437 tree_nodes_cmp (const void *p1, const void *p2)
9438 {
9439   const unsigned *n1 = (const unsigned *)p1;
9440   const unsigned *n2 = (const unsigned *)p2;
9441 
9442   return tree_node_counts[*n1] - tree_node_counts[*n2];
9443 }
9444 
9445 /* Comparator of indices based on tree_code_counts.  */
9446 
9447 static int
9448 tree_codes_cmp (const void *p1, const void *p2)
9449 {
9450   const unsigned *n1 = (const unsigned *)p1;
9451   const unsigned *n2 = (const unsigned *)p2;
9452 
9453   return tree_code_counts[*n1] - tree_code_counts[*n2];
9454 }
9455 
9456 #define TREE_MEM_USAGE_SPACES 40
9457 
9458 /* Print debugging information about tree nodes generated during the compile,
9459    and any language-specific information.  */
9460 
9461 void
9462 dump_tree_statistics (void)
9463 {
9464   if (GATHER_STATISTICS)
9465     {
9466       uint64_t total_nodes, total_bytes;
9467       fprintf (stderr, "\nKind                   Nodes      Bytes\n");
9468       mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9469       total_nodes = total_bytes = 0;
9470 
9471       {
9472 	auto_vec<unsigned> indices (all_kinds);
9473 	for (unsigned i = 0; i < all_kinds; i++)
9474 	  indices.quick_push (i);
9475 	indices.qsort (tree_nodes_cmp);
9476 
9477 	for (unsigned i = 0; i < (int) all_kinds; i++)
9478 	  {
9479 	    unsigned j = indices[i];
9480 	    fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9481 		     tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
9482 		     SIZE_AMOUNT (tree_node_sizes[j]));
9483 	    total_nodes += tree_node_counts[j];
9484 	    total_bytes += tree_node_sizes[j];
9485 	  }
9486 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9487 	fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9488 		 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9489 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9490       }
9491 
9492       {
9493 	fprintf (stderr, "Code                              Nodes\n");
9494 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9495 
9496 	auto_vec<unsigned> indices (MAX_TREE_CODES);
9497 	for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9498 	  indices.quick_push (i);
9499 	indices.qsort (tree_codes_cmp);
9500 
9501 	for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9502 	  {
9503 	    unsigned j = indices[i];
9504 	    fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9505 		     get_tree_code_name ((enum tree_code) j),
9506 		     SIZE_AMOUNT (tree_code_counts[j]));
9507 	  }
9508 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9509 	fprintf (stderr, "\n");
9510 	ssanames_print_statistics ();
9511 	fprintf (stderr, "\n");
9512 	phinodes_print_statistics ();
9513 	fprintf (stderr, "\n");
9514       }
9515     }
9516   else
9517     fprintf (stderr, "(No per-node statistics)\n");
9518 
9519   print_type_hash_statistics ();
9520   print_debug_expr_statistics ();
9521   print_value_expr_statistics ();
9522   lang_hooks.print_statistics ();
9523 }
9524 
9525 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9526 
9527 /* Generate a crc32 of the low BYTES bytes of VALUE.  */
9528 
9529 unsigned
9530 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9531 {
9532   /* This relies on the raw feedback's top 4 bits being zero.  */
9533 #define FEEDBACK(X) ((X) * 0x04c11db7)
9534 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9535 		     ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9536   static const unsigned syndromes[16] =
9537     {
9538       SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9539       SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9540       SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9541       SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9542     };
9543 #undef FEEDBACK
9544 #undef SYNDROME
9545 
9546   value <<= (32 - bytes * 8);
9547   for (unsigned ix = bytes * 2; ix--; value <<= 4)
9548     {
9549       unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9550 
9551       chksum = (chksum << 4) ^ feedback;
9552     }
9553 
9554   return chksum;
9555 }
9556 
9557 /* Generate a crc32 of a string.  */
9558 
9559 unsigned
9560 crc32_string (unsigned chksum, const char *string)
9561 {
9562   do
9563     chksum = crc32_byte (chksum, *string);
9564   while (*string++);
9565   return chksum;
9566 }
9567 
9568 /* P is a string that will be used in a symbol.  Mask out any characters
9569    that are not valid in that context.  */
9570 
9571 void
9572 clean_symbol_name (char *p)
9573 {
9574   for (; *p; p++)
9575     if (! (ISALNUM (*p)
9576 #ifndef NO_DOLLAR_IN_LABEL	/* this for `$'; unlikely, but... -- kr */
9577 	    || *p == '$'
9578 #endif
9579 #ifndef NO_DOT_IN_LABEL		/* this for `.'; unlikely, but...  */
9580 	    || *p == '.'
9581 #endif
9582 	   ))
9583       *p = '_';
9584 }
9585 
9586 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH.  */
9587 
9588 /* Create a unique anonymous identifier.  The identifier is still a
9589    valid assembly label.  */
9590 
9591 tree
9592 make_anon_name ()
9593 {
9594   const char *fmt =
9595 #if !defined (NO_DOT_IN_LABEL)
9596     "."
9597 #elif !defined (NO_DOLLAR_IN_LABEL)
9598     "$"
9599 #else
9600     "_"
9601 #endif
9602     "_anon_%d";
9603 
9604   char buf[24];
9605   int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9606   gcc_checking_assert (len < int (sizeof (buf)));
9607 
9608   tree id = get_identifier_with_length (buf, len);
9609   IDENTIFIER_ANON_P (id) = true;
9610 
9611   return id;
9612 }
9613 
9614 /* Generate a name for a special-purpose function.
9615    The generated name may need to be unique across the whole link.
9616    Changes to this function may also require corresponding changes to
9617    xstrdup_mask_random.
9618    TYPE is some string to identify the purpose of this function to the
9619    linker or collect2; it must start with an uppercase letter,
9620    one of:
9621    I - for constructors
9622    D - for destructors
9623    N - for C++ anonymous namespaces
9624    F - for DWARF unwind frame information.  */
9625 
9626 tree
9627 get_file_function_name (const char *type)
9628 {
9629   char *buf;
9630   const char *p;
9631   char *q;
9632 
9633   /* If we already have a name we know to be unique, just use that.  */
9634   if (first_global_object_name)
9635     p = q = ASTRDUP (first_global_object_name);
9636   /* If the target is handling the constructors/destructors, they
9637      will be local to this file and the name is only necessary for
9638      debugging purposes.
9639      We also assign sub_I and sub_D sufixes to constructors called from
9640      the global static constructors.  These are always local.  */
9641   else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9642 	   || (strncmp (type, "sub_", 4) == 0
9643 	       && (type[4] == 'I' || type[4] == 'D')))
9644     {
9645       const char *file = main_input_filename;
9646       if (! file)
9647 	file = LOCATION_FILE (input_location);
9648       /* Just use the file's basename, because the full pathname
9649 	 might be quite long.  */
9650       p = q = ASTRDUP (lbasename (file));
9651     }
9652   else
9653     {
9654       /* Otherwise, the name must be unique across the entire link.
9655 	 We don't have anything that we know to be unique to this translation
9656 	 unit, so use what we do have and throw in some randomness.  */
9657       unsigned len;
9658       const char *name = weak_global_object_name;
9659       const char *file = main_input_filename;
9660 
9661       if (! name)
9662 	name = "";
9663       if (! file)
9664 	file = LOCATION_FILE (input_location);
9665 
9666       len = strlen (file);
9667       q = (char *) alloca (9 + 19 + len + 1);
9668       memcpy (q, file, len + 1);
9669 
9670       snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9671 		crc32_string (0, name), get_random_seed (false));
9672 
9673       p = q;
9674     }
9675 
9676   clean_symbol_name (q);
9677   buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9678 			 + strlen (type));
9679 
9680   /* Set up the name of the file-level functions we may need.
9681      Use a global object (which is already required to be unique over
9682      the program) rather than the file name (which imposes extra
9683      constraints).  */
9684   sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9685 
9686   return get_identifier (buf);
9687 }
9688 
9689 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9690 
9691 /* Complain that the tree code of NODE does not match the expected 0
9692    terminated list of trailing codes. The trailing code list can be
9693    empty, for a more vague error message.  FILE, LINE, and FUNCTION
9694    are of the caller.  */
9695 
9696 void
9697 tree_check_failed (const_tree node, const char *file,
9698 		   int line, const char *function, ...)
9699 {
9700   va_list args;
9701   const char *buffer;
9702   unsigned length = 0;
9703   enum tree_code code;
9704 
9705   va_start (args, function);
9706   while ((code = (enum tree_code) va_arg (args, int)))
9707     length += 4 + strlen (get_tree_code_name (code));
9708   va_end (args);
9709   if (length)
9710     {
9711       char *tmp;
9712       va_start (args, function);
9713       length += strlen ("expected ");
9714       buffer = tmp = (char *) alloca (length);
9715       length = 0;
9716       while ((code = (enum tree_code) va_arg (args, int)))
9717 	{
9718 	  const char *prefix = length ? " or " : "expected ";
9719 
9720 	  strcpy (tmp + length, prefix);
9721 	  length += strlen (prefix);
9722 	  strcpy (tmp + length, get_tree_code_name (code));
9723 	  length += strlen (get_tree_code_name (code));
9724 	}
9725       va_end (args);
9726     }
9727   else
9728     buffer = "unexpected node";
9729 
9730   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9731 		  buffer, get_tree_code_name (TREE_CODE (node)),
9732 		  function, trim_filename (file), line);
9733 }
9734 
9735 /* Complain that the tree code of NODE does match the expected 0
9736    terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9737    the caller.  */
9738 
9739 void
9740 tree_not_check_failed (const_tree node, const char *file,
9741 		       int line, const char *function, ...)
9742 {
9743   va_list args;
9744   char *buffer;
9745   unsigned length = 0;
9746   enum tree_code code;
9747 
9748   va_start (args, function);
9749   while ((code = (enum tree_code) va_arg (args, int)))
9750     length += 4 + strlen (get_tree_code_name (code));
9751   va_end (args);
9752   va_start (args, function);
9753   buffer = (char *) alloca (length);
9754   length = 0;
9755   while ((code = (enum tree_code) va_arg (args, int)))
9756     {
9757       if (length)
9758 	{
9759 	  strcpy (buffer + length, " or ");
9760 	  length += 4;
9761 	}
9762       strcpy (buffer + length, get_tree_code_name (code));
9763       length += strlen (get_tree_code_name (code));
9764     }
9765   va_end (args);
9766 
9767   internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9768 		  buffer, get_tree_code_name (TREE_CODE (node)),
9769 		  function, trim_filename (file), line);
9770 }
9771 
9772 /* Similar to tree_check_failed, except that we check for a class of tree
9773    code, given in CL.  */
9774 
9775 void
9776 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9777 			 const char *file, int line, const char *function)
9778 {
9779   internal_error
9780     ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9781      TREE_CODE_CLASS_STRING (cl),
9782      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9783      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9784 }
9785 
9786 /* Similar to tree_check_failed, except that instead of specifying a
9787    dozen codes, use the knowledge that they're all sequential.  */
9788 
9789 void
9790 tree_range_check_failed (const_tree node, const char *file, int line,
9791 			 const char *function, enum tree_code c1,
9792 			 enum tree_code c2)
9793 {
9794   char *buffer;
9795   unsigned length = 0;
9796   unsigned int c;
9797 
9798   for (c = c1; c <= c2; ++c)
9799     length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9800 
9801   length += strlen ("expected ");
9802   buffer = (char *) alloca (length);
9803   length = 0;
9804 
9805   for (c = c1; c <= c2; ++c)
9806     {
9807       const char *prefix = length ? " or " : "expected ";
9808 
9809       strcpy (buffer + length, prefix);
9810       length += strlen (prefix);
9811       strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9812       length += strlen (get_tree_code_name ((enum tree_code) c));
9813     }
9814 
9815   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9816 		  buffer, get_tree_code_name (TREE_CODE (node)),
9817 		  function, trim_filename (file), line);
9818 }
9819 
9820 
9821 /* Similar to tree_check_failed, except that we check that a tree does
9822    not have the specified code, given in CL.  */
9823 
9824 void
9825 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9826 			     const char *file, int line, const char *function)
9827 {
9828   internal_error
9829     ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9830      TREE_CODE_CLASS_STRING (cl),
9831      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9832      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9833 }
9834 
9835 
9836 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes.  */
9837 
9838 void
9839 omp_clause_check_failed (const_tree node, const char *file, int line,
9840                          const char *function, enum omp_clause_code code)
9841 {
9842   internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9843 		  "in %s, at %s:%d",
9844 		  omp_clause_code_name[code],
9845 		  get_tree_code_name (TREE_CODE (node)),
9846 		  function, trim_filename (file), line);
9847 }
9848 
9849 
9850 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes.  */
9851 
9852 void
9853 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9854 			       const char *function, enum omp_clause_code c1,
9855 			       enum omp_clause_code c2)
9856 {
9857   char *buffer;
9858   unsigned length = 0;
9859   unsigned int c;
9860 
9861   for (c = c1; c <= c2; ++c)
9862     length += 4 + strlen (omp_clause_code_name[c]);
9863 
9864   length += strlen ("expected ");
9865   buffer = (char *) alloca (length);
9866   length = 0;
9867 
9868   for (c = c1; c <= c2; ++c)
9869     {
9870       const char *prefix = length ? " or " : "expected ";
9871 
9872       strcpy (buffer + length, prefix);
9873       length += strlen (prefix);
9874       strcpy (buffer + length, omp_clause_code_name[c]);
9875       length += strlen (omp_clause_code_name[c]);
9876     }
9877 
9878   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9879 		  buffer, omp_clause_code_name[TREE_CODE (node)],
9880 		  function, trim_filename (file), line);
9881 }
9882 
9883 
9884 #undef DEFTREESTRUCT
9885 #define DEFTREESTRUCT(VAL, NAME) NAME,
9886 
9887 static const char *ts_enum_names[] = {
9888 #include "treestruct.def"
9889 };
9890 #undef DEFTREESTRUCT
9891 
9892 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9893 
9894 /* Similar to tree_class_check_failed, except that we check for
9895    whether CODE contains the tree structure identified by EN.  */
9896 
9897 void
9898 tree_contains_struct_check_failed (const_tree node,
9899 				   const enum tree_node_structure_enum en,
9900 				   const char *file, int line,
9901 				   const char *function)
9902 {
9903   internal_error
9904     ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9905      TS_ENUM_NAME (en),
9906      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9907 }
9908 
9909 
9910 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9911    (dynamically sized) vector.  */
9912 
9913 void
9914 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9915 			       const char *function)
9916 {
9917   internal_error
9918     ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9919      "at %s:%d",
9920      idx + 1, len, function, trim_filename (file), line);
9921 }
9922 
9923 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9924    (dynamically sized) vector.  */
9925 
9926 void
9927 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9928 			   const char *function)
9929 {
9930   internal_error
9931     ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9932      idx + 1, len, function, trim_filename (file), line);
9933 }
9934 
9935 /* Similar to above, except that the check is for the bounds of the operand
9936    vector of an expression node EXP.  */
9937 
9938 void
9939 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9940 			   int line, const char *function)
9941 {
9942   enum tree_code code = TREE_CODE (exp);
9943   internal_error
9944     ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9945      idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9946      function, trim_filename (file), line);
9947 }
9948 
9949 /* Similar to above, except that the check is for the number of
9950    operands of an OMP_CLAUSE node.  */
9951 
9952 void
9953 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9954 			         int line, const char *function)
9955 {
9956   internal_error
9957     ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9958      "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9959      omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9960      trim_filename (file), line);
9961 }
9962 #endif /* ENABLE_TREE_CHECKING */
9963 
9964 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9965    and mapped to the machine mode MODE.  Initialize its fields and build
9966    the information necessary for debugging output.  */
9967 
9968 static tree
9969 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9970 {
9971   tree t;
9972   tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9973 
9974   t = make_node (VECTOR_TYPE);
9975   TREE_TYPE (t) = mv_innertype;
9976   SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9977   SET_TYPE_MODE (t, mode);
9978 
9979   if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9980     SET_TYPE_STRUCTURAL_EQUALITY (t);
9981   else if ((TYPE_CANONICAL (mv_innertype) != innertype
9982 	    || mode != VOIDmode)
9983 	   && !VECTOR_BOOLEAN_TYPE_P (t))
9984     TYPE_CANONICAL (t)
9985       = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9986 
9987   layout_type (t);
9988 
9989   hashval_t hash = type_hash_canon_hash (t);
9990   t = type_hash_canon (hash, t);
9991 
9992   /* We have built a main variant, based on the main variant of the
9993      inner type. Use it to build the variant we return.  */
9994   if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9995       && TREE_TYPE (t) != innertype)
9996     return build_type_attribute_qual_variant (t,
9997 					      TYPE_ATTRIBUTES (innertype),
9998 					      TYPE_QUALS (innertype));
9999 
10000   return t;
10001 }
10002 
10003 static tree
10004 make_or_reuse_type (unsigned size, int unsignedp)
10005 {
10006   int i;
10007 
10008   if (size == INT_TYPE_SIZE)
10009     return unsignedp ? unsigned_type_node : integer_type_node;
10010   if (size == CHAR_TYPE_SIZE)
10011     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10012   if (size == SHORT_TYPE_SIZE)
10013     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10014   if (size == LONG_TYPE_SIZE)
10015     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10016   if (size == LONG_LONG_TYPE_SIZE)
10017     return (unsignedp ? long_long_unsigned_type_node
10018             : long_long_integer_type_node);
10019 
10020   for (i = 0; i < NUM_INT_N_ENTS; i ++)
10021     if (size == int_n_data[i].bitsize
10022 	&& int_n_enabled_p[i])
10023       return (unsignedp ? int_n_trees[i].unsigned_type
10024 	      : int_n_trees[i].signed_type);
10025 
10026   if (unsignedp)
10027     return make_unsigned_type (size);
10028   else
10029     return make_signed_type (size);
10030 }
10031 
10032 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP.  */
10033 
10034 static tree
10035 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10036 {
10037   if (satp)
10038     {
10039       if (size == SHORT_FRACT_TYPE_SIZE)
10040 	return unsignedp ? sat_unsigned_short_fract_type_node
10041 			 : sat_short_fract_type_node;
10042       if (size == FRACT_TYPE_SIZE)
10043 	return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10044       if (size == LONG_FRACT_TYPE_SIZE)
10045 	return unsignedp ? sat_unsigned_long_fract_type_node
10046 			 : sat_long_fract_type_node;
10047       if (size == LONG_LONG_FRACT_TYPE_SIZE)
10048 	return unsignedp ? sat_unsigned_long_long_fract_type_node
10049 			 : sat_long_long_fract_type_node;
10050     }
10051   else
10052     {
10053       if (size == SHORT_FRACT_TYPE_SIZE)
10054 	return unsignedp ? unsigned_short_fract_type_node
10055 			 : short_fract_type_node;
10056       if (size == FRACT_TYPE_SIZE)
10057 	return unsignedp ? unsigned_fract_type_node : fract_type_node;
10058       if (size == LONG_FRACT_TYPE_SIZE)
10059 	return unsignedp ? unsigned_long_fract_type_node
10060 			 : long_fract_type_node;
10061       if (size == LONG_LONG_FRACT_TYPE_SIZE)
10062 	return unsignedp ? unsigned_long_long_fract_type_node
10063 			 : long_long_fract_type_node;
10064     }
10065 
10066   return make_fract_type (size, unsignedp, satp);
10067 }
10068 
10069 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP.  */
10070 
10071 static tree
10072 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10073 {
10074   if (satp)
10075     {
10076       if (size == SHORT_ACCUM_TYPE_SIZE)
10077 	return unsignedp ? sat_unsigned_short_accum_type_node
10078 			 : sat_short_accum_type_node;
10079       if (size == ACCUM_TYPE_SIZE)
10080 	return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10081       if (size == LONG_ACCUM_TYPE_SIZE)
10082 	return unsignedp ? sat_unsigned_long_accum_type_node
10083 			 : sat_long_accum_type_node;
10084       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10085 	return unsignedp ? sat_unsigned_long_long_accum_type_node
10086 			 : sat_long_long_accum_type_node;
10087     }
10088   else
10089     {
10090       if (size == SHORT_ACCUM_TYPE_SIZE)
10091 	return unsignedp ? unsigned_short_accum_type_node
10092 			 : short_accum_type_node;
10093       if (size == ACCUM_TYPE_SIZE)
10094 	return unsignedp ? unsigned_accum_type_node : accum_type_node;
10095       if (size == LONG_ACCUM_TYPE_SIZE)
10096 	return unsignedp ? unsigned_long_accum_type_node
10097 			 : long_accum_type_node;
10098       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10099 	return unsignedp ? unsigned_long_long_accum_type_node
10100 			 : long_long_accum_type_node;
10101     }
10102 
10103   return make_accum_type (size, unsignedp, satp);
10104 }
10105 
10106 
10107 /* Create an atomic variant node for TYPE.  This routine is called
10108    during initialization of data types to create the 5 basic atomic
10109    types. The generic build_variant_type function requires these to
10110    already be set up in order to function properly, so cannot be
10111    called from there.  If ALIGN is non-zero, then ensure alignment is
10112    overridden to this value.  */
10113 
10114 static tree
10115 build_atomic_base (tree type, unsigned int align)
10116 {
10117   tree t;
10118 
10119   /* Make sure its not already registered.  */
10120   if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10121     return t;
10122 
10123   t = build_variant_type_copy (type);
10124   set_type_quals (t, TYPE_QUAL_ATOMIC);
10125 
10126   if (align)
10127     SET_TYPE_ALIGN (t, align);
10128 
10129   return t;
10130 }
10131 
10132 /* Information about the _FloatN and _FloatNx types.  This must be in
10133    the same order as the corresponding TI_* enum values.  */
10134 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10135   {
10136     { 16, false },
10137     { 32, false },
10138     { 64, false },
10139     { 128, false },
10140     { 32, true },
10141     { 64, true },
10142     { 128, true },
10143   };
10144 
10145 
10146 /* Create nodes for all integer types (and error_mark_node) using the sizes
10147    of C datatypes.  SIGNED_CHAR specifies whether char is signed.  */
10148 
10149 void
10150 build_common_tree_nodes (bool signed_char)
10151 {
10152   int i;
10153 
10154   error_mark_node = make_node (ERROR_MARK);
10155   TREE_TYPE (error_mark_node) = error_mark_node;
10156 
10157   initialize_sizetypes ();
10158 
10159   /* Define both `signed char' and `unsigned char'.  */
10160   signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10161   TYPE_STRING_FLAG (signed_char_type_node) = 1;
10162   unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10163   TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10164 
10165   /* Define `char', which is like either `signed char' or `unsigned char'
10166      but not the same as either.  */
10167   char_type_node
10168     = (signed_char
10169        ? make_signed_type (CHAR_TYPE_SIZE)
10170        : make_unsigned_type (CHAR_TYPE_SIZE));
10171   TYPE_STRING_FLAG (char_type_node) = 1;
10172 
10173   short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10174   short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10175   integer_type_node = make_signed_type (INT_TYPE_SIZE);
10176   unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10177   long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10178   long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10179   long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10180   long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10181 
10182   for (i = 0; i < NUM_INT_N_ENTS; i ++)
10183     {
10184       int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10185       int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10186 
10187       if (int_n_enabled_p[i])
10188 	{
10189 	  integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10190 	  integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10191 	}
10192     }
10193 
10194   /* Define a boolean type.  This type only represents boolean values but
10195      may be larger than char depending on the value of BOOL_TYPE_SIZE.  */
10196   boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10197   TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10198   TYPE_PRECISION (boolean_type_node) = 1;
10199   TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10200 
10201   /* Define what type to use for size_t.  */
10202   if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10203     size_type_node = unsigned_type_node;
10204   else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10205     size_type_node = long_unsigned_type_node;
10206   else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10207     size_type_node = long_long_unsigned_type_node;
10208   else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10209     size_type_node = short_unsigned_type_node;
10210   else
10211     {
10212       int i;
10213 
10214       size_type_node = NULL_TREE;
10215       for (i = 0; i < NUM_INT_N_ENTS; i++)
10216 	if (int_n_enabled_p[i])
10217 	  {
10218 	    char name[50], altname[50];
10219 	    sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10220 	    sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10221 
10222 	    if (strcmp (name, SIZE_TYPE) == 0
10223 		|| strcmp (altname, SIZE_TYPE) == 0)
10224 	      {
10225 		size_type_node = int_n_trees[i].unsigned_type;
10226 	      }
10227 	  }
10228       if (size_type_node == NULL_TREE)
10229 	gcc_unreachable ();
10230     }
10231 
10232   /* Define what type to use for ptrdiff_t.  */
10233   if (strcmp (PTRDIFF_TYPE, "int") == 0)
10234     ptrdiff_type_node = integer_type_node;
10235   else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10236     ptrdiff_type_node = long_integer_type_node;
10237   else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10238     ptrdiff_type_node = long_long_integer_type_node;
10239   else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10240     ptrdiff_type_node = short_integer_type_node;
10241   else
10242     {
10243       ptrdiff_type_node = NULL_TREE;
10244       for (int i = 0; i < NUM_INT_N_ENTS; i++)
10245 	if (int_n_enabled_p[i])
10246 	  {
10247 	    char name[50], altname[50];
10248 	    sprintf (name, "__int%d", int_n_data[i].bitsize);
10249 	    sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10250 
10251 	    if (strcmp (name, PTRDIFF_TYPE) == 0
10252 		|| strcmp (altname, PTRDIFF_TYPE) == 0)
10253 	      ptrdiff_type_node = int_n_trees[i].signed_type;
10254 	  }
10255       if (ptrdiff_type_node == NULL_TREE)
10256 	gcc_unreachable ();
10257     }
10258 
10259   /* Fill in the rest of the sized types.  Reuse existing type nodes
10260      when possible.  */
10261   intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10262   intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10263   intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10264   intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10265   intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10266 
10267   unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10268   unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10269   unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10270   unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10271   unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10272 
10273   /* Don't call build_qualified type for atomics.  That routine does
10274      special processing for atomics, and until they are initialized
10275      it's better not to make that call.
10276 
10277      Check to see if there is a target override for atomic types.  */
10278 
10279   atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10280 					targetm.atomic_align_for_mode (QImode));
10281   atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10282 					targetm.atomic_align_for_mode (HImode));
10283   atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10284 					targetm.atomic_align_for_mode (SImode));
10285   atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10286 					targetm.atomic_align_for_mode (DImode));
10287   atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10288 					targetm.atomic_align_for_mode (TImode));
10289 
10290   access_public_node = get_identifier ("public");
10291   access_protected_node = get_identifier ("protected");
10292   access_private_node = get_identifier ("private");
10293 
10294   /* Define these next since types below may used them.  */
10295   integer_zero_node = build_int_cst (integer_type_node, 0);
10296   integer_one_node = build_int_cst (integer_type_node, 1);
10297   integer_three_node = build_int_cst (integer_type_node, 3);
10298   integer_minus_one_node = build_int_cst (integer_type_node, -1);
10299 
10300   size_zero_node = size_int (0);
10301   size_one_node = size_int (1);
10302   bitsize_zero_node = bitsize_int (0);
10303   bitsize_one_node = bitsize_int (1);
10304   bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10305 
10306   boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10307   boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10308 
10309   void_type_node = make_node (VOID_TYPE);
10310   layout_type (void_type_node);
10311 
10312   /* We are not going to have real types in C with less than byte alignment,
10313      so we might as well not have any types that claim to have it.  */
10314   SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10315   TYPE_USER_ALIGN (void_type_node) = 0;
10316 
10317   void_node = make_node (VOID_CST);
10318   TREE_TYPE (void_node) = void_type_node;
10319 
10320   null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10321   layout_type (TREE_TYPE (null_pointer_node));
10322 
10323   ptr_type_node = build_pointer_type (void_type_node);
10324   const_ptr_type_node
10325     = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10326   for (unsigned i = 0;
10327        i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10328        ++i)
10329     builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10330 
10331   pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10332 
10333   float_type_node = make_node (REAL_TYPE);
10334   TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10335   layout_type (float_type_node);
10336 
10337   double_type_node = make_node (REAL_TYPE);
10338   TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10339   layout_type (double_type_node);
10340 
10341   long_double_type_node = make_node (REAL_TYPE);
10342   TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10343   layout_type (long_double_type_node);
10344 
10345   for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10346     {
10347       int n = floatn_nx_types[i].n;
10348       bool extended = floatn_nx_types[i].extended;
10349       scalar_float_mode mode;
10350       if (!targetm.floatn_mode (n, extended).exists (&mode))
10351 	continue;
10352       int precision = GET_MODE_PRECISION (mode);
10353       /* Work around the rs6000 KFmode having precision 113 not
10354 	 128.  */
10355       const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10356       gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10357       int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10358       if (!extended)
10359 	gcc_assert (min_precision == n);
10360       if (precision < min_precision)
10361 	precision = min_precision;
10362       FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10363       TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10364       layout_type (FLOATN_NX_TYPE_NODE (i));
10365       SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10366     }
10367 
10368   float_ptr_type_node = build_pointer_type (float_type_node);
10369   double_ptr_type_node = build_pointer_type (double_type_node);
10370   long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10371   integer_ptr_type_node = build_pointer_type (integer_type_node);
10372 
10373   /* Fixed size integer types.  */
10374   uint16_type_node = make_or_reuse_type (16, 1);
10375   uint32_type_node = make_or_reuse_type (32, 1);
10376   uint64_type_node = make_or_reuse_type (64, 1);
10377 
10378   /* Decimal float types. */
10379   if (targetm.decimal_float_supported_p ())
10380     {
10381       dfloat32_type_node = make_node (REAL_TYPE);
10382       TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10383       SET_TYPE_MODE (dfloat32_type_node, SDmode);
10384       layout_type (dfloat32_type_node);
10385 
10386       dfloat64_type_node = make_node (REAL_TYPE);
10387       TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10388       SET_TYPE_MODE (dfloat64_type_node, DDmode);
10389       layout_type (dfloat64_type_node);
10390 
10391       dfloat128_type_node = make_node (REAL_TYPE);
10392       TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10393       SET_TYPE_MODE (dfloat128_type_node, TDmode);
10394       layout_type (dfloat128_type_node);
10395     }
10396 
10397   complex_integer_type_node = build_complex_type (integer_type_node, true);
10398   complex_float_type_node = build_complex_type (float_type_node, true);
10399   complex_double_type_node = build_complex_type (double_type_node, true);
10400   complex_long_double_type_node = build_complex_type (long_double_type_node,
10401 						      true);
10402 
10403   for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10404     {
10405       if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10406 	COMPLEX_FLOATN_NX_TYPE_NODE (i)
10407 	  = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10408     }
10409 
10410 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned.  */
10411 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10412   sat_ ## KIND ## _type_node = \
10413     make_sat_signed_ ## KIND ## _type (SIZE); \
10414   sat_unsigned_ ## KIND ## _type_node = \
10415     make_sat_unsigned_ ## KIND ## _type (SIZE); \
10416   KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10417   unsigned_ ## KIND ## _type_node = \
10418     make_unsigned_ ## KIND ## _type (SIZE);
10419 
10420 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10421   sat_ ## WIDTH ## KIND ## _type_node = \
10422     make_sat_signed_ ## KIND ## _type (SIZE); \
10423   sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10424     make_sat_unsigned_ ## KIND ## _type (SIZE); \
10425   WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10426   unsigned_ ## WIDTH ## KIND ## _type_node = \
10427     make_unsigned_ ## KIND ## _type (SIZE);
10428 
10429 /* Make fixed-point type nodes based on four different widths.  */
10430 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10431   MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10432   MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10433   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10434   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10435 
10436 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned.  */
10437 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10438   NAME ## _type_node = \
10439     make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10440   u ## NAME ## _type_node = \
10441     make_or_reuse_unsigned_ ## KIND ## _type \
10442       (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10443   sat_ ## NAME ## _type_node = \
10444     make_or_reuse_sat_signed_ ## KIND ## _type \
10445       (GET_MODE_BITSIZE (MODE ## mode)); \
10446   sat_u ## NAME ## _type_node = \
10447     make_or_reuse_sat_unsigned_ ## KIND ## _type \
10448       (GET_MODE_BITSIZE (U ## MODE ## mode));
10449 
10450   /* Fixed-point type and mode nodes.  */
10451   MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10452   MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10453   MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10454   MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10455   MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10456   MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10457   MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10458   MAKE_FIXED_MODE_NODE (accum, ha, HA)
10459   MAKE_FIXED_MODE_NODE (accum, sa, SA)
10460   MAKE_FIXED_MODE_NODE (accum, da, DA)
10461   MAKE_FIXED_MODE_NODE (accum, ta, TA)
10462 
10463   {
10464     tree t = targetm.build_builtin_va_list ();
10465 
10466     /* Many back-ends define record types without setting TYPE_NAME.
10467        If we copied the record type here, we'd keep the original
10468        record type without a name.  This breaks name mangling.  So,
10469        don't copy record types and let c_common_nodes_and_builtins()
10470        declare the type to be __builtin_va_list.  */
10471     if (TREE_CODE (t) != RECORD_TYPE)
10472       t = build_variant_type_copy (t);
10473 
10474     va_list_type_node = t;
10475   }
10476 
10477   /* SCEV analyzer global shared trees.  */
10478   chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10479   TREE_TYPE (chrec_dont_know) = void_type_node;
10480   chrec_known = make_node (SCEV_KNOWN);
10481   TREE_TYPE (chrec_known) = void_type_node;
10482 }
10483 
10484 /* Modify DECL for given flags.
10485    TM_PURE attribute is set only on types, so the function will modify
10486    DECL's type when ECF_TM_PURE is used.  */
10487 
10488 void
10489 set_call_expr_flags (tree decl, int flags)
10490 {
10491   if (flags & ECF_NOTHROW)
10492     TREE_NOTHROW (decl) = 1;
10493   if (flags & ECF_CONST)
10494     TREE_READONLY (decl) = 1;
10495   if (flags & ECF_PURE)
10496     DECL_PURE_P (decl) = 1;
10497   if (flags & ECF_LOOPING_CONST_OR_PURE)
10498     DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10499   if (flags & ECF_NOVOPS)
10500     DECL_IS_NOVOPS (decl) = 1;
10501   if (flags & ECF_NORETURN)
10502     TREE_THIS_VOLATILE (decl) = 1;
10503   if (flags & ECF_MALLOC)
10504     DECL_IS_MALLOC (decl) = 1;
10505   if (flags & ECF_RETURNS_TWICE)
10506     DECL_IS_RETURNS_TWICE (decl) = 1;
10507   if (flags & ECF_LEAF)
10508     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10509 					NULL, DECL_ATTRIBUTES (decl));
10510   if (flags & ECF_COLD)
10511     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10512 					NULL, DECL_ATTRIBUTES (decl));
10513   if (flags & ECF_RET1)
10514     DECL_ATTRIBUTES (decl)
10515       = tree_cons (get_identifier ("fn spec"),
10516 		   build_tree_list (NULL_TREE, build_string (1, "1")),
10517 		   DECL_ATTRIBUTES (decl));
10518   if ((flags & ECF_TM_PURE) && flag_tm)
10519     apply_tm_attr (decl, get_identifier ("transaction_pure"));
10520   /* Looping const or pure is implied by noreturn.
10521      There is currently no way to declare looping const or looping pure alone.  */
10522   gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10523 	      || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10524 }
10525 
10526 
10527 /* A subroutine of build_common_builtin_nodes.  Define a builtin function.  */
10528 
10529 static void
10530 local_define_builtin (const char *name, tree type, enum built_in_function code,
10531                       const char *library_name, int ecf_flags)
10532 {
10533   tree decl;
10534 
10535   decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10536 			       library_name, NULL_TREE);
10537   set_call_expr_flags (decl, ecf_flags);
10538 
10539   set_builtin_decl (code, decl, true);
10540 }
10541 
10542 /* Call this function after instantiating all builtins that the language
10543    front end cares about.  This will build the rest of the builtins
10544    and internal functions that are relied upon by the tree optimizers and
10545    the middle-end.  */
10546 
10547 void
10548 build_common_builtin_nodes (void)
10549 {
10550   tree tmp, ftype;
10551   int ecf_flags;
10552 
10553   if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10554       || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10555     {
10556       ftype = build_function_type (void_type_node, void_list_node);
10557       if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10558 	local_define_builtin ("__builtin_unreachable", ftype,
10559 			      BUILT_IN_UNREACHABLE,
10560 			      "__builtin_unreachable",
10561 			      ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10562 			      | ECF_CONST | ECF_COLD);
10563       if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10564 	local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10565 			      "abort",
10566 			      ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10567     }
10568 
10569   if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10570       || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10571     {
10572       ftype = build_function_type_list (ptr_type_node,
10573 					ptr_type_node, const_ptr_type_node,
10574 					size_type_node, NULL_TREE);
10575 
10576       if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10577 	local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10578 			      "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10579       if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10580 	local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10581 			      "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10582     }
10583 
10584   if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10585     {
10586       ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10587 					const_ptr_type_node, size_type_node,
10588 					NULL_TREE);
10589       local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10590 			    "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10591     }
10592 
10593   if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10594     {
10595       ftype = build_function_type_list (ptr_type_node,
10596 					ptr_type_node, integer_type_node,
10597 					size_type_node, NULL_TREE);
10598       local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10599 			    "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10600     }
10601 
10602   /* If we're checking the stack, `alloca' can throw.  */
10603   const int alloca_flags
10604     = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10605 
10606   if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10607     {
10608       ftype = build_function_type_list (ptr_type_node,
10609 					size_type_node, NULL_TREE);
10610       local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10611 			    "alloca", alloca_flags);
10612     }
10613 
10614   ftype = build_function_type_list (ptr_type_node, size_type_node,
10615 				    size_type_node, NULL_TREE);
10616   local_define_builtin ("__builtin_alloca_with_align", ftype,
10617 			BUILT_IN_ALLOCA_WITH_ALIGN,
10618 			"__builtin_alloca_with_align",
10619 			alloca_flags);
10620 
10621   ftype = build_function_type_list (ptr_type_node, size_type_node,
10622 				    size_type_node, size_type_node, NULL_TREE);
10623   local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10624 			BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10625 			"__builtin_alloca_with_align_and_max",
10626 			alloca_flags);
10627 
10628   ftype = build_function_type_list (void_type_node,
10629 				    ptr_type_node, ptr_type_node,
10630 				    ptr_type_node, NULL_TREE);
10631   local_define_builtin ("__builtin_init_trampoline", ftype,
10632 			BUILT_IN_INIT_TRAMPOLINE,
10633 			"__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10634   local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10635 			BUILT_IN_INIT_HEAP_TRAMPOLINE,
10636 			"__builtin_init_heap_trampoline",
10637 			ECF_NOTHROW | ECF_LEAF);
10638   local_define_builtin ("__builtin_init_descriptor", ftype,
10639 			BUILT_IN_INIT_DESCRIPTOR,
10640 			"__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10641 
10642   ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10643   local_define_builtin ("__builtin_adjust_trampoline", ftype,
10644 			BUILT_IN_ADJUST_TRAMPOLINE,
10645 			"__builtin_adjust_trampoline",
10646 			ECF_CONST | ECF_NOTHROW);
10647   local_define_builtin ("__builtin_adjust_descriptor", ftype,
10648 			BUILT_IN_ADJUST_DESCRIPTOR,
10649 			"__builtin_adjust_descriptor",
10650 			ECF_CONST | ECF_NOTHROW);
10651 
10652   ftype = build_function_type_list (void_type_node,
10653 				    ptr_type_node, ptr_type_node, NULL_TREE);
10654   local_define_builtin ("__builtin_nonlocal_goto", ftype,
10655 			BUILT_IN_NONLOCAL_GOTO,
10656 			"__builtin_nonlocal_goto",
10657 			ECF_NORETURN | ECF_NOTHROW);
10658 
10659   ftype = build_function_type_list (void_type_node,
10660 				    ptr_type_node, ptr_type_node, NULL_TREE);
10661   local_define_builtin ("__builtin_setjmp_setup", ftype,
10662 			BUILT_IN_SETJMP_SETUP,
10663 			"__builtin_setjmp_setup", ECF_NOTHROW);
10664 
10665   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10666   local_define_builtin ("__builtin_setjmp_receiver", ftype,
10667 			BUILT_IN_SETJMP_RECEIVER,
10668 			"__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10669 
10670   ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10671   local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10672 			"__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10673 
10674   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10675   local_define_builtin ("__builtin_stack_restore", ftype,
10676 			BUILT_IN_STACK_RESTORE,
10677 			"__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10678 
10679   ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10680 				    const_ptr_type_node, size_type_node,
10681 				    NULL_TREE);
10682   local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10683 			"__builtin_memcmp_eq",
10684 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10685 
10686   local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10687 			"__builtin_strncmp_eq",
10688 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10689 
10690   local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10691 			"__builtin_strcmp_eq",
10692 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10693 
10694   /* If there's a possibility that we might use the ARM EABI, build the
10695     alternate __cxa_end_cleanup node used to resume from C++.  */
10696   if (targetm.arm_eabi_unwinder)
10697     {
10698       ftype = build_function_type_list (void_type_node, NULL_TREE);
10699       local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10700 			    BUILT_IN_CXA_END_CLEANUP,
10701 			    "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10702     }
10703 
10704   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10705   local_define_builtin ("__builtin_unwind_resume", ftype,
10706 			BUILT_IN_UNWIND_RESUME,
10707 			((targetm_common.except_unwind_info (&global_options)
10708 			  == UI_SJLJ)
10709 			 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10710 			ECF_NORETURN);
10711 
10712   if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10713     {
10714       ftype = build_function_type_list (ptr_type_node, integer_type_node,
10715 					NULL_TREE);
10716       local_define_builtin ("__builtin_return_address", ftype,
10717 			    BUILT_IN_RETURN_ADDRESS,
10718 			    "__builtin_return_address",
10719 			    ECF_NOTHROW);
10720     }
10721 
10722   if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10723       || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10724     {
10725       ftype = build_function_type_list (void_type_node, ptr_type_node,
10726 					ptr_type_node, NULL_TREE);
10727       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10728 	local_define_builtin ("__cyg_profile_func_enter", ftype,
10729 			      BUILT_IN_PROFILE_FUNC_ENTER,
10730 			      "__cyg_profile_func_enter", 0);
10731       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10732 	local_define_builtin ("__cyg_profile_func_exit", ftype,
10733 			      BUILT_IN_PROFILE_FUNC_EXIT,
10734 			      "__cyg_profile_func_exit", 0);
10735     }
10736 
10737   /* The exception object and filter values from the runtime.  The argument
10738      must be zero before exception lowering, i.e. from the front end.  After
10739      exception lowering, it will be the region number for the exception
10740      landing pad.  These functions are PURE instead of CONST to prevent
10741      them from being hoisted past the exception edge that will initialize
10742      its value in the landing pad.  */
10743   ftype = build_function_type_list (ptr_type_node,
10744 				    integer_type_node, NULL_TREE);
10745   ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10746   /* Only use TM_PURE if we have TM language support.  */
10747   if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10748     ecf_flags |= ECF_TM_PURE;
10749   local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10750 			"__builtin_eh_pointer", ecf_flags);
10751 
10752   tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10753   ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10754   local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10755 			"__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10756 
10757   ftype = build_function_type_list (void_type_node,
10758 				    integer_type_node, integer_type_node,
10759 				    NULL_TREE);
10760   local_define_builtin ("__builtin_eh_copy_values", ftype,
10761 			BUILT_IN_EH_COPY_VALUES,
10762 			"__builtin_eh_copy_values", ECF_NOTHROW);
10763 
10764   /* Complex multiplication and division.  These are handled as builtins
10765      rather than optabs because emit_library_call_value doesn't support
10766      complex.  Further, we can do slightly better with folding these
10767      beasties if the real and complex parts of the arguments are separate.  */
10768   {
10769     int mode;
10770 
10771     for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10772       {
10773 	char mode_name_buf[4], *q;
10774 	const char *p;
10775 	enum built_in_function mcode, dcode;
10776 	tree type, inner_type;
10777 	const char *prefix = "__";
10778 
10779 	if (targetm.libfunc_gnu_prefix)
10780 	  prefix = "__gnu_";
10781 
10782 	type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10783 	if (type == NULL)
10784 	  continue;
10785 	inner_type = TREE_TYPE (type);
10786 
10787 	ftype = build_function_type_list (type, inner_type, inner_type,
10788 					  inner_type, inner_type, NULL_TREE);
10789 
10790         mcode = ((enum built_in_function)
10791 		 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10792         dcode = ((enum built_in_function)
10793 		 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10794 
10795         for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10796 	  *q = TOLOWER (*p);
10797 	*q = '\0';
10798 
10799 	/* For -ftrapping-math these should throw from a former
10800 	   -fnon-call-exception stmt.  */
10801 	built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10802 					NULL);
10803         local_define_builtin (built_in_names[mcode], ftype, mcode,
10804 			      built_in_names[mcode],
10805 			      ECF_CONST | ECF_LEAF);
10806 
10807 	built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10808 					NULL);
10809         local_define_builtin (built_in_names[dcode], ftype, dcode,
10810 			      built_in_names[dcode],
10811 			      ECF_CONST | ECF_LEAF);
10812       }
10813   }
10814 
10815   init_internal_fns ();
10816 }
10817 
10818 /* HACK.  GROSS.  This is absolutely disgusting.  I wish there was a
10819    better way.
10820 
10821    If we requested a pointer to a vector, build up the pointers that
10822    we stripped off while looking for the inner type.  Similarly for
10823    return values from functions.
10824 
10825    The argument TYPE is the top of the chain, and BOTTOM is the
10826    new type which we will point to.  */
10827 
10828 tree
10829 reconstruct_complex_type (tree type, tree bottom)
10830 {
10831   tree inner, outer;
10832 
10833   if (TREE_CODE (type) == POINTER_TYPE)
10834     {
10835       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10836       outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10837 					   TYPE_REF_CAN_ALIAS_ALL (type));
10838     }
10839   else if (TREE_CODE (type) == REFERENCE_TYPE)
10840     {
10841       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10842       outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10843 					     TYPE_REF_CAN_ALIAS_ALL (type));
10844     }
10845   else if (TREE_CODE (type) == ARRAY_TYPE)
10846     {
10847       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10848       outer = build_array_type (inner, TYPE_DOMAIN (type));
10849     }
10850   else if (TREE_CODE (type) == FUNCTION_TYPE)
10851     {
10852       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10853       outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10854     }
10855   else if (TREE_CODE (type) == METHOD_TYPE)
10856     {
10857       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10858       /* The build_method_type_directly() routine prepends 'this' to argument list,
10859          so we must compensate by getting rid of it.  */
10860       outer
10861 	= build_method_type_directly
10862 	    (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10863 	     inner,
10864 	     TREE_CHAIN (TYPE_ARG_TYPES (type)));
10865     }
10866   else if (TREE_CODE (type) == OFFSET_TYPE)
10867     {
10868       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10869       outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10870     }
10871   else
10872     return bottom;
10873 
10874   return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10875 					    TYPE_QUALS (type));
10876 }
10877 
10878 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10879    the inner type.  */
10880 tree
10881 build_vector_type_for_mode (tree innertype, machine_mode mode)
10882 {
10883   poly_int64 nunits;
10884   unsigned int bitsize;
10885 
10886   switch (GET_MODE_CLASS (mode))
10887     {
10888     case MODE_VECTOR_BOOL:
10889     case MODE_VECTOR_INT:
10890     case MODE_VECTOR_FLOAT:
10891     case MODE_VECTOR_FRACT:
10892     case MODE_VECTOR_UFRACT:
10893     case MODE_VECTOR_ACCUM:
10894     case MODE_VECTOR_UACCUM:
10895       nunits = GET_MODE_NUNITS (mode);
10896       break;
10897 
10898     case MODE_INT:
10899       /* Check that there are no leftover bits.  */
10900       bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10901       gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10902       nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10903       break;
10904 
10905     default:
10906       gcc_unreachable ();
10907     }
10908 
10909   return make_vector_type (innertype, nunits, mode);
10910 }
10911 
10912 /* Similarly, but takes the inner type and number of units, which must be
10913    a power of two.  */
10914 
10915 tree
10916 build_vector_type (tree innertype, poly_int64 nunits)
10917 {
10918   return make_vector_type (innertype, nunits, VOIDmode);
10919 }
10920 
10921 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE.  */
10922 
10923 tree
10924 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10925 {
10926   gcc_assert (mask_mode != BLKmode);
10927 
10928   poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10929   unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10930   tree bool_type = build_nonstandard_boolean_type (esize);
10931 
10932   return make_vector_type (bool_type, nunits, mask_mode);
10933 }
10934 
10935 /* Build a vector type that holds one boolean result for each element of
10936    vector type VECTYPE.  The public interface for this operation is
10937    truth_type_for.  */
10938 
10939 static tree
10940 build_truth_vector_type_for (tree vectype)
10941 {
10942   machine_mode vector_mode = TYPE_MODE (vectype);
10943   poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10944 
10945   machine_mode mask_mode;
10946   if (VECTOR_MODE_P (vector_mode)
10947       && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10948     return build_truth_vector_type_for_mode (nunits, mask_mode);
10949 
10950   poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10951   unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10952   tree bool_type = build_nonstandard_boolean_type (esize);
10953 
10954   return make_vector_type (bool_type, nunits, BLKmode);
10955 }
10956 
10957 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10958    set.  */
10959 
10960 tree
10961 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10962 {
10963   tree t = make_vector_type (innertype, nunits, VOIDmode);
10964   tree cand;
10965   /* We always build the non-opaque variant before the opaque one,
10966      so if it already exists, it is TYPE_NEXT_VARIANT of this one.  */
10967   cand = TYPE_NEXT_VARIANT (t);
10968   if (cand
10969       && TYPE_VECTOR_OPAQUE (cand)
10970       && check_qualified_type (cand, t, TYPE_QUALS (t)))
10971     return cand;
10972   /* Othewise build a variant type and make sure to queue it after
10973      the non-opaque type.  */
10974   cand = build_distinct_type_copy (t);
10975   TYPE_VECTOR_OPAQUE (cand) = true;
10976   TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10977   TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10978   TYPE_NEXT_VARIANT (t) = cand;
10979   TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10980   return cand;
10981 }
10982 
10983 /* Return the value of element I of VECTOR_CST T as a wide_int.  */
10984 
10985 static poly_wide_int
10986 vector_cst_int_elt (const_tree t, unsigned int i)
10987 {
10988   /* First handle elements that are directly encoded.  */
10989   unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10990   if (i < encoded_nelts)
10991     return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10992 
10993   /* Identify the pattern that contains element I and work out the index of
10994      the last encoded element for that pattern.  */
10995   unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10996   unsigned int pattern = i % npatterns;
10997   unsigned int count = i / npatterns;
10998   unsigned int final_i = encoded_nelts - npatterns + pattern;
10999 
11000   /* If there are no steps, the final encoded value is the right one.  */
11001   if (!VECTOR_CST_STEPPED_P (t))
11002     return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
11003 
11004   /* Otherwise work out the value from the last two encoded elements.  */
11005   tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
11006   tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
11007   poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
11008   return wi::to_poly_wide (v2) + (count - 2) * diff;
11009 }
11010 
11011 /* Return the value of element I of VECTOR_CST T.  */
11012 
11013 tree
11014 vector_cst_elt (const_tree t, unsigned int i)
11015 {
11016   /* First handle elements that are directly encoded.  */
11017   unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11018   if (i < encoded_nelts)
11019     return VECTOR_CST_ENCODED_ELT (t, i);
11020 
11021   /* If there are no steps, the final encoded value is the right one.  */
11022   if (!VECTOR_CST_STEPPED_P (t))
11023     {
11024       /* Identify the pattern that contains element I and work out the index of
11025 	 the last encoded element for that pattern.  */
11026       unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11027       unsigned int pattern = i % npatterns;
11028       unsigned int final_i = encoded_nelts - npatterns + pattern;
11029       return VECTOR_CST_ENCODED_ELT (t, final_i);
11030     }
11031 
11032   /* Otherwise work out the value from the last two encoded elements.  */
11033   return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
11034 			   vector_cst_int_elt (t, i));
11035 }
11036 
11037 /* Given an initializer INIT, return TRUE if INIT is zero or some
11038    aggregate of zeros.  Otherwise return FALSE.  If NONZERO is not
11039    null, set *NONZERO if and only if INIT is known not to be all
11040    zeros.  The combination of return value of false and *NONZERO
11041    false implies that INIT may but need not be all zeros.  Other
11042    combinations indicate definitive answers.  */
11043 
11044 bool
11045 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
11046 {
11047   bool dummy;
11048   if (!nonzero)
11049     nonzero = &dummy;
11050 
11051   /* Conservatively clear NONZERO and set it only if INIT is definitely
11052      not all zero.  */
11053   *nonzero = false;
11054 
11055   STRIP_NOPS (init);
11056 
11057   unsigned HOST_WIDE_INT off = 0;
11058 
11059   switch (TREE_CODE (init))
11060     {
11061     case INTEGER_CST:
11062       if (integer_zerop (init))
11063 	return true;
11064 
11065       *nonzero = true;
11066       return false;
11067 
11068     case REAL_CST:
11069       /* ??? Note that this is not correct for C4X float formats.  There,
11070 	 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11071 	 negative exponent.  */
11072       if (real_zerop (init)
11073 	  && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
11074 	return true;
11075 
11076       *nonzero = true;
11077       return false;
11078 
11079     case FIXED_CST:
11080       if (fixed_zerop (init))
11081 	return true;
11082 
11083       *nonzero = true;
11084       return false;
11085 
11086     case COMPLEX_CST:
11087       if (integer_zerop (init)
11088 	  || (real_zerop (init)
11089 	      && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11090 	      && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11091 	return true;
11092 
11093       *nonzero = true;
11094       return false;
11095 
11096     case VECTOR_CST:
11097       if (VECTOR_CST_NPATTERNS (init) == 1
11098 	  && VECTOR_CST_DUPLICATE_P (init)
11099 	  && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11100 	return true;
11101 
11102       *nonzero = true;
11103       return false;
11104 
11105     case CONSTRUCTOR:
11106       {
11107 	if (TREE_CLOBBER_P (init))
11108 	  return false;
11109 
11110 	unsigned HOST_WIDE_INT idx;
11111 	tree elt;
11112 
11113 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11114 	  if (!initializer_zerop (elt, nonzero))
11115 	    return false;
11116 
11117 	return true;
11118       }
11119 
11120     case MEM_REF:
11121       {
11122 	tree arg = TREE_OPERAND (init, 0);
11123 	if (TREE_CODE (arg) != ADDR_EXPR)
11124 	  return false;
11125 	tree offset = TREE_OPERAND (init, 1);
11126 	if (TREE_CODE (offset) != INTEGER_CST
11127 	    || !tree_fits_uhwi_p (offset))
11128 	  return false;
11129 	off = tree_to_uhwi (offset);
11130 	if (INT_MAX < off)
11131 	  return false;
11132 	arg = TREE_OPERAND (arg, 0);
11133 	if (TREE_CODE (arg) != STRING_CST)
11134 	  return false;
11135 	init = arg;
11136       }
11137       /* Fall through.  */
11138 
11139     case STRING_CST:
11140       {
11141 	gcc_assert (off <= INT_MAX);
11142 
11143 	int i = off;
11144 	int n = TREE_STRING_LENGTH (init);
11145 	if (n <= i)
11146 	  return false;
11147 
11148 	/* We need to loop through all elements to handle cases like
11149 	   "\0" and "\0foobar".  */
11150 	for (i = 0; i < n; ++i)
11151 	  if (TREE_STRING_POINTER (init)[i] != '\0')
11152 	    {
11153 	      *nonzero = true;
11154 	      return false;
11155 	    }
11156 
11157 	return true;
11158       }
11159 
11160     default:
11161       return false;
11162     }
11163 }
11164 
11165 /* Return true if EXPR is an initializer expression in which every element
11166    is a constant that is numerically equal to 0 or 1.  The elements do not
11167    need to be equal to each other.  */
11168 
11169 bool
11170 initializer_each_zero_or_onep (const_tree expr)
11171 {
11172   STRIP_ANY_LOCATION_WRAPPER (expr);
11173 
11174   switch (TREE_CODE (expr))
11175     {
11176     case INTEGER_CST:
11177       return integer_zerop (expr) || integer_onep (expr);
11178 
11179     case REAL_CST:
11180       return real_zerop (expr) || real_onep (expr);
11181 
11182     case VECTOR_CST:
11183       {
11184 	unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11185 	if (VECTOR_CST_STEPPED_P (expr)
11186 	    && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11187 	  return false;
11188 
11189 	for (unsigned int i = 0; i < nelts; ++i)
11190 	  {
11191 	    tree elt = vector_cst_elt (expr, i);
11192 	    if (!initializer_each_zero_or_onep (elt))
11193 	      return false;
11194 	  }
11195 
11196 	return true;
11197       }
11198 
11199     default:
11200       return false;
11201     }
11202 }
11203 
11204 /* Check if vector VEC consists of all the equal elements and
11205    that the number of elements corresponds to the type of VEC.
11206    The function returns first element of the vector
11207    or NULL_TREE if the vector is not uniform.  */
11208 tree
11209 uniform_vector_p (const_tree vec)
11210 {
11211   tree first, t;
11212   unsigned HOST_WIDE_INT i, nelts;
11213 
11214   if (vec == NULL_TREE)
11215     return NULL_TREE;
11216 
11217   gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11218 
11219   if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11220     return TREE_OPERAND (vec, 0);
11221 
11222   else if (TREE_CODE (vec) == VECTOR_CST)
11223     {
11224       if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11225 	return VECTOR_CST_ENCODED_ELT (vec, 0);
11226       return NULL_TREE;
11227     }
11228 
11229   else if (TREE_CODE (vec) == CONSTRUCTOR
11230 	   && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11231     {
11232       first = error_mark_node;
11233 
11234       FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11235         {
11236           if (i == 0)
11237             {
11238               first = t;
11239               continue;
11240             }
11241 	  if (!operand_equal_p (first, t, 0))
11242 	    return NULL_TREE;
11243         }
11244       if (i != nelts)
11245 	return NULL_TREE;
11246 
11247       return first;
11248     }
11249 
11250   return NULL_TREE;
11251 }
11252 
11253 /* If the argument is INTEGER_CST, return it.  If the argument is vector
11254    with all elements the same INTEGER_CST, return that INTEGER_CST.  Otherwise
11255    return NULL_TREE.
11256    Look through location wrappers. */
11257 
11258 tree
11259 uniform_integer_cst_p (tree t)
11260 {
11261   STRIP_ANY_LOCATION_WRAPPER (t);
11262 
11263   if (TREE_CODE (t) == INTEGER_CST)
11264     return t;
11265 
11266   if (VECTOR_TYPE_P (TREE_TYPE (t)))
11267     {
11268       t = uniform_vector_p (t);
11269       if (t && TREE_CODE (t) == INTEGER_CST)
11270 	return t;
11271     }
11272 
11273   return NULL_TREE;
11274 }
11275 
11276 /* If VECTOR_CST T has a single nonzero element, return the index of that
11277    element, otherwise return -1.  */
11278 
11279 int
11280 single_nonzero_element (const_tree t)
11281 {
11282   unsigned HOST_WIDE_INT nelts;
11283   unsigned int repeat_nelts;
11284   if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11285     repeat_nelts = nelts;
11286   else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11287     {
11288       nelts = vector_cst_encoded_nelts (t);
11289       repeat_nelts = VECTOR_CST_NPATTERNS (t);
11290     }
11291   else
11292     return -1;
11293 
11294   int res = -1;
11295   for (unsigned int i = 0; i < nelts; ++i)
11296     {
11297       tree elt = vector_cst_elt (t, i);
11298       if (!integer_zerop (elt) && !real_zerop (elt))
11299 	{
11300 	  if (res >= 0 || i >= repeat_nelts)
11301 	    return -1;
11302 	  res = i;
11303 	}
11304     }
11305   return res;
11306 }
11307 
11308 /* Build an empty statement at location LOC.  */
11309 
11310 tree
11311 build_empty_stmt (location_t loc)
11312 {
11313   tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11314   SET_EXPR_LOCATION (t, loc);
11315   return t;
11316 }
11317 
11318 
11319 /* Build an OpenMP clause with code CODE.  LOC is the location of the
11320    clause.  */
11321 
11322 tree
11323 build_omp_clause (location_t loc, enum omp_clause_code code)
11324 {
11325   tree t;
11326   int size, length;
11327 
11328   length = omp_clause_num_ops[code];
11329   size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11330 
11331   record_node_allocation_statistics (OMP_CLAUSE, size);
11332 
11333   t = (tree) ggc_internal_alloc (size);
11334   memset (t, 0, size);
11335   TREE_SET_CODE (t, OMP_CLAUSE);
11336   OMP_CLAUSE_SET_CODE (t, code);
11337   OMP_CLAUSE_LOCATION (t) = loc;
11338 
11339   return t;
11340 }
11341 
11342 /* Build a tcc_vl_exp object with code CODE and room for LEN operands.  LEN
11343    includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11344    Except for the CODE and operand count field, other storage for the
11345    object is initialized to zeros.  */
11346 
11347 tree
11348 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11349 {
11350   tree t;
11351   int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11352 
11353   gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11354   gcc_assert (len >= 1);
11355 
11356   record_node_allocation_statistics (code, length);
11357 
11358   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11359 
11360   TREE_SET_CODE (t, code);
11361 
11362   /* Can't use TREE_OPERAND to store the length because if checking is
11363      enabled, it will try to check the length before we store it.  :-P  */
11364   t->exp.operands[0] = build_int_cst (sizetype, len);
11365 
11366   return t;
11367 }
11368 
11369 /* Helper function for build_call_* functions; build a CALL_EXPR with
11370    indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11371    the argument slots.  */
11372 
11373 static tree
11374 build_call_1 (tree return_type, tree fn, int nargs)
11375 {
11376   tree t;
11377 
11378   t = build_vl_exp (CALL_EXPR, nargs + 3);
11379   TREE_TYPE (t) = return_type;
11380   CALL_EXPR_FN (t) = fn;
11381   CALL_EXPR_STATIC_CHAIN (t) = NULL;
11382 
11383   return t;
11384 }
11385 
11386 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11387    FN and a null static chain slot.  NARGS is the number of call arguments
11388    which are specified as "..." arguments.  */
11389 
11390 tree
11391 build_call_nary (tree return_type, tree fn, int nargs, ...)
11392 {
11393   tree ret;
11394   va_list args;
11395   va_start (args, nargs);
11396   ret = build_call_valist (return_type, fn, nargs, args);
11397   va_end (args);
11398   return ret;
11399 }
11400 
11401 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11402    FN and a null static chain slot.  NARGS is the number of call arguments
11403    which are specified as a va_list ARGS.  */
11404 
11405 tree
11406 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11407 {
11408   tree t;
11409   int i;
11410 
11411   t = build_call_1 (return_type, fn, nargs);
11412   for (i = 0; i < nargs; i++)
11413     CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11414   process_call_operands (t);
11415   return t;
11416 }
11417 
11418 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11419    FN and a null static chain slot.  NARGS is the number of call arguments
11420    which are specified as a tree array ARGS.  */
11421 
11422 tree
11423 build_call_array_loc (location_t loc, tree return_type, tree fn,
11424 		      int nargs, const tree *args)
11425 {
11426   tree t;
11427   int i;
11428 
11429   t = build_call_1 (return_type, fn, nargs);
11430   for (i = 0; i < nargs; i++)
11431     CALL_EXPR_ARG (t, i) = args[i];
11432   process_call_operands (t);
11433   SET_EXPR_LOCATION (t, loc);
11434   return t;
11435 }
11436 
11437 /* Like build_call_array, but takes a vec.  */
11438 
11439 tree
11440 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11441 {
11442   tree ret, t;
11443   unsigned int ix;
11444 
11445   ret = build_call_1 (return_type, fn, vec_safe_length (args));
11446   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11447     CALL_EXPR_ARG (ret, ix) = t;
11448   process_call_operands (ret);
11449   return ret;
11450 }
11451 
11452 /* Conveniently construct a function call expression.  FNDECL names the
11453    function to be called and N arguments are passed in the array
11454    ARGARRAY.  */
11455 
11456 tree
11457 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11458 {
11459   tree fntype = TREE_TYPE (fndecl);
11460   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11461 
11462   return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11463 }
11464 
11465 /* Conveniently construct a function call expression.  FNDECL names the
11466    function to be called and the arguments are passed in the vector
11467    VEC.  */
11468 
11469 tree
11470 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11471 {
11472   return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11473 				    vec_safe_address (vec));
11474 }
11475 
11476 
11477 /* Conveniently construct a function call expression.  FNDECL names the
11478    function to be called, N is the number of arguments, and the "..."
11479    parameters are the argument expressions.  */
11480 
11481 tree
11482 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11483 {
11484   va_list ap;
11485   tree *argarray = XALLOCAVEC (tree, n);
11486   int i;
11487 
11488   va_start (ap, n);
11489   for (i = 0; i < n; i++)
11490     argarray[i] = va_arg (ap, tree);
11491   va_end (ap);
11492   return build_call_expr_loc_array (loc, fndecl, n, argarray);
11493 }
11494 
11495 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...).  Duplicated because
11496    varargs macros aren't supported by all bootstrap compilers.  */
11497 
11498 tree
11499 build_call_expr (tree fndecl, int n, ...)
11500 {
11501   va_list ap;
11502   tree *argarray = XALLOCAVEC (tree, n);
11503   int i;
11504 
11505   va_start (ap, n);
11506   for (i = 0; i < n; i++)
11507     argarray[i] = va_arg (ap, tree);
11508   va_end (ap);
11509   return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11510 }
11511 
11512 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11513    type TYPE.  This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11514    It will get gimplified later into an ordinary internal function.  */
11515 
11516 tree
11517 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11518 				    tree type, int n, const tree *args)
11519 {
11520   tree t = build_call_1 (type, NULL_TREE, n);
11521   for (int i = 0; i < n; ++i)
11522     CALL_EXPR_ARG (t, i) = args[i];
11523   SET_EXPR_LOCATION (t, loc);
11524   CALL_EXPR_IFN (t) = ifn;
11525   process_call_operands (t);
11526   return t;
11527 }
11528 
11529 /* Build internal call expression.  This is just like CALL_EXPR, except
11530    its CALL_EXPR_FN is NULL.  It will get gimplified later into ordinary
11531    internal function.  */
11532 
11533 tree
11534 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11535 			      tree type, int n, ...)
11536 {
11537   va_list ap;
11538   tree *argarray = XALLOCAVEC (tree, n);
11539   int i;
11540 
11541   va_start (ap, n);
11542   for (i = 0; i < n; i++)
11543     argarray[i] = va_arg (ap, tree);
11544   va_end (ap);
11545   return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11546 }
11547 
11548 /* Return a function call to FN, if the target is guaranteed to support it,
11549    or null otherwise.
11550 
11551    N is the number of arguments, passed in the "...", and TYPE is the
11552    type of the return value.  */
11553 
11554 tree
11555 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11556 			   int n, ...)
11557 {
11558   va_list ap;
11559   tree *argarray = XALLOCAVEC (tree, n);
11560   int i;
11561 
11562   va_start (ap, n);
11563   for (i = 0; i < n; i++)
11564     argarray[i] = va_arg (ap, tree);
11565   va_end (ap);
11566   if (internal_fn_p (fn))
11567     {
11568       internal_fn ifn = as_internal_fn (fn);
11569       if (direct_internal_fn_p (ifn))
11570 	{
11571 	  tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11572 	  if (!direct_internal_fn_supported_p (ifn, types,
11573 					       OPTIMIZE_FOR_BOTH))
11574 	    return NULL_TREE;
11575 	}
11576       return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11577     }
11578   else
11579     {
11580       tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11581       if (!fndecl)
11582 	return NULL_TREE;
11583       return build_call_expr_loc_array (loc, fndecl, n, argarray);
11584     }
11585 }
11586 
11587 /* Return a function call to the appropriate builtin alloca variant.
11588 
11589    SIZE is the size to be allocated.  ALIGN, if non-zero, is the requested
11590    alignment of the allocated area.  MAX_SIZE, if non-negative, is an upper
11591    bound for SIZE in case it is not a fixed value.  */
11592 
11593 tree
11594 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11595 {
11596   if (max_size >= 0)
11597     {
11598       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11599       return
11600 	build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11601     }
11602   else if (align > 0)
11603     {
11604       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11605       return build_call_expr (t, 2, size, size_int (align));
11606     }
11607   else
11608     {
11609       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11610       return build_call_expr (t, 1, size);
11611     }
11612 }
11613 
11614 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11615    if SIZE == -1) and return a tree node representing char* pointer to
11616    it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)).  When STR is nonnull
11617    the STRING_CST value is the LEN bytes at STR (the representation
11618    of the string, which may be wide).  Otherwise it's all zeros.  */
11619 
11620 tree
11621 build_string_literal (unsigned len, const char *str /* = NULL */,
11622 		      tree eltype /* = char_type_node */,
11623 		      unsigned HOST_WIDE_INT size /* = -1 */)
11624 {
11625   tree t = build_string (len, str);
11626   /* Set the maximum valid index based on the string length or SIZE.  */
11627   unsigned HOST_WIDE_INT maxidx
11628     = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11629 
11630   tree index = build_index_type (size_int (maxidx));
11631   eltype = build_type_variant (eltype, 1, 0);
11632   tree type = build_array_type (eltype, index);
11633   TREE_TYPE (t) = type;
11634   TREE_CONSTANT (t) = 1;
11635   TREE_READONLY (t) = 1;
11636   TREE_STATIC (t) = 1;
11637 
11638   type = build_pointer_type (eltype);
11639   t = build1 (ADDR_EXPR, type,
11640 	      build4 (ARRAY_REF, eltype,
11641 		      t, integer_zero_node, NULL_TREE, NULL_TREE));
11642   return t;
11643 }
11644 
11645 
11646 
11647 /* Return true if T (assumed to be a DECL) must be assigned a memory
11648    location.  */
11649 
11650 bool
11651 needs_to_live_in_memory (const_tree t)
11652 {
11653   return (TREE_ADDRESSABLE (t)
11654 	  || is_global_var (t)
11655 	  || (TREE_CODE (t) == RESULT_DECL
11656 	      && !DECL_BY_REFERENCE (t)
11657 	      && aggregate_value_p (t, current_function_decl)));
11658 }
11659 
11660 /* Return value of a constant X and sign-extend it.  */
11661 
11662 HOST_WIDE_INT
11663 int_cst_value (const_tree x)
11664 {
11665   unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11666   unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11667 
11668   /* Make sure the sign-extended value will fit in a HOST_WIDE_INT.  */
11669   gcc_assert (cst_and_fits_in_hwi (x));
11670 
11671   if (bits < HOST_BITS_PER_WIDE_INT)
11672     {
11673       bool negative = ((val >> (bits - 1)) & 1) != 0;
11674       if (negative)
11675 	val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11676       else
11677 	val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11678     }
11679 
11680   return val;
11681 }
11682 
11683 /* If TYPE is an integral or pointer type, return an integer type with
11684    the same precision which is unsigned iff UNSIGNEDP is true, or itself
11685    if TYPE is already an integer type of signedness UNSIGNEDP.
11686    If TYPE is a floating-point type, return an integer type with the same
11687    bitsize and with the signedness given by UNSIGNEDP; this is useful
11688    when doing bit-level operations on a floating-point value.  */
11689 
11690 tree
11691 signed_or_unsigned_type_for (int unsignedp, tree type)
11692 {
11693   if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11694     return type;
11695 
11696   if (TREE_CODE (type) == VECTOR_TYPE)
11697     {
11698       tree inner = TREE_TYPE (type);
11699       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11700       if (!inner2)
11701 	return NULL_TREE;
11702       if (inner == inner2)
11703 	return type;
11704       return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11705     }
11706 
11707   if (TREE_CODE (type) == COMPLEX_TYPE)
11708     {
11709       tree inner = TREE_TYPE (type);
11710       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11711       if (!inner2)
11712 	return NULL_TREE;
11713       if (inner == inner2)
11714 	return type;
11715       return build_complex_type (inner2);
11716     }
11717 
11718   unsigned int bits;
11719   if (INTEGRAL_TYPE_P (type)
11720       || POINTER_TYPE_P (type)
11721       || TREE_CODE (type) == OFFSET_TYPE)
11722     bits = TYPE_PRECISION (type);
11723   else if (TREE_CODE (type) == REAL_TYPE)
11724     bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11725   else
11726     return NULL_TREE;
11727 
11728   return build_nonstandard_integer_type (bits, unsignedp);
11729 }
11730 
11731 /* If TYPE is an integral or pointer type, return an integer type with
11732    the same precision which is unsigned, or itself if TYPE is already an
11733    unsigned integer type.  If TYPE is a floating-point type, return an
11734    unsigned integer type with the same bitsize as TYPE.  */
11735 
11736 tree
11737 unsigned_type_for (tree type)
11738 {
11739   return signed_or_unsigned_type_for (1, type);
11740 }
11741 
11742 /* If TYPE is an integral or pointer type, return an integer type with
11743    the same precision which is signed, or itself if TYPE is already a
11744    signed integer type.  If TYPE is a floating-point type, return a
11745    signed integer type with the same bitsize as TYPE.  */
11746 
11747 tree
11748 signed_type_for (tree type)
11749 {
11750   return signed_or_unsigned_type_for (0, type);
11751 }
11752 
11753 /* If TYPE is a vector type, return a signed integer vector type with the
11754    same width and number of subparts. Otherwise return boolean_type_node.  */
11755 
11756 tree
11757 truth_type_for (tree type)
11758 {
11759   if (TREE_CODE (type) == VECTOR_TYPE)
11760     {
11761       if (VECTOR_BOOLEAN_TYPE_P (type))
11762 	return type;
11763       return build_truth_vector_type_for (type);
11764     }
11765   else
11766     return boolean_type_node;
11767 }
11768 
11769 /* Returns the largest value obtainable by casting something in INNER type to
11770    OUTER type.  */
11771 
11772 tree
11773 upper_bound_in_type (tree outer, tree inner)
11774 {
11775   unsigned int det = 0;
11776   unsigned oprec = TYPE_PRECISION (outer);
11777   unsigned iprec = TYPE_PRECISION (inner);
11778   unsigned prec;
11779 
11780   /* Compute a unique number for every combination.  */
11781   det |= (oprec > iprec) ? 4 : 0;
11782   det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11783   det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11784 
11785   /* Determine the exponent to use.  */
11786   switch (det)
11787     {
11788     case 0:
11789     case 1:
11790       /* oprec <= iprec, outer: signed, inner: don't care.  */
11791       prec = oprec - 1;
11792       break;
11793     case 2:
11794     case 3:
11795       /* oprec <= iprec, outer: unsigned, inner: don't care.  */
11796       prec = oprec;
11797       break;
11798     case 4:
11799       /* oprec > iprec, outer: signed, inner: signed.  */
11800       prec = iprec - 1;
11801       break;
11802     case 5:
11803       /* oprec > iprec, outer: signed, inner: unsigned.  */
11804       prec = iprec;
11805       break;
11806     case 6:
11807       /* oprec > iprec, outer: unsigned, inner: signed.  */
11808       prec = oprec;
11809       break;
11810     case 7:
11811       /* oprec > iprec, outer: unsigned, inner: unsigned.  */
11812       prec = iprec;
11813       break;
11814     default:
11815       gcc_unreachable ();
11816     }
11817 
11818   return wide_int_to_tree (outer,
11819 			   wi::mask (prec, false, TYPE_PRECISION (outer)));
11820 }
11821 
11822 /* Returns the smallest value obtainable by casting something in INNER type to
11823    OUTER type.  */
11824 
11825 tree
11826 lower_bound_in_type (tree outer, tree inner)
11827 {
11828   unsigned oprec = TYPE_PRECISION (outer);
11829   unsigned iprec = TYPE_PRECISION (inner);
11830 
11831   /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11832      and obtain 0.  */
11833   if (TYPE_UNSIGNED (outer)
11834       /* If we are widening something of an unsigned type, OUTER type
11835 	 contains all values of INNER type.  In particular, both INNER
11836 	 and OUTER types have zero in common.  */
11837       || (oprec > iprec && TYPE_UNSIGNED (inner)))
11838     return build_int_cst (outer, 0);
11839   else
11840     {
11841       /* If we are widening a signed type to another signed type, we
11842 	 want to obtain -2^^(iprec-1).  If we are keeping the
11843 	 precision or narrowing to a signed type, we want to obtain
11844 	 -2^(oprec-1).  */
11845       unsigned prec = oprec > iprec ? iprec : oprec;
11846       return wide_int_to_tree (outer,
11847 			       wi::mask (prec - 1, true,
11848 					 TYPE_PRECISION (outer)));
11849     }
11850 }
11851 
11852 /* Return nonzero if two operands that are suitable for PHI nodes are
11853    necessarily equal.  Specifically, both ARG0 and ARG1 must be either
11854    SSA_NAME or invariant.  Note that this is strictly an optimization.
11855    That is, callers of this function can directly call operand_equal_p
11856    and get the same result, only slower.  */
11857 
11858 int
11859 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11860 {
11861   if (arg0 == arg1)
11862     return 1;
11863   if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11864     return 0;
11865   return operand_equal_p (arg0, arg1, 0);
11866 }
11867 
11868 /* Returns number of zeros at the end of binary representation of X.  */
11869 
11870 tree
11871 num_ending_zeros (const_tree x)
11872 {
11873   return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11874 }
11875 
11876 
11877 #define WALK_SUBTREE(NODE)				\
11878   do							\
11879     {							\
11880       result = walk_tree_1 (&(NODE), func, data, pset, lh);	\
11881       if (result)					\
11882 	return result;					\
11883     }							\
11884   while (0)
11885 
11886 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11887    be walked whenever a type is seen in the tree.  Rest of operands and return
11888    value are as for walk_tree.  */
11889 
11890 static tree
11891 walk_type_fields (tree type, walk_tree_fn func, void *data,
11892 		  hash_set<tree> *pset, walk_tree_lh lh)
11893 {
11894   tree result = NULL_TREE;
11895 
11896   switch (TREE_CODE (type))
11897     {
11898     case POINTER_TYPE:
11899     case REFERENCE_TYPE:
11900     case VECTOR_TYPE:
11901       /* We have to worry about mutually recursive pointers.  These can't
11902 	 be written in C.  They can in Ada.  It's pathological, but
11903 	 there's an ACATS test (c38102a) that checks it.  Deal with this
11904 	 by checking if we're pointing to another pointer, that one
11905 	 points to another pointer, that one does too, and we have no htab.
11906 	 If so, get a hash table.  We check three levels deep to avoid
11907 	 the cost of the hash table if we don't need one.  */
11908       if (POINTER_TYPE_P (TREE_TYPE (type))
11909 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11910 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11911 	  && !pset)
11912 	{
11913 	  result = walk_tree_without_duplicates (&TREE_TYPE (type),
11914 						 func, data);
11915 	  if (result)
11916 	    return result;
11917 
11918 	  break;
11919 	}
11920 
11921       /* fall through */
11922 
11923     case COMPLEX_TYPE:
11924       WALK_SUBTREE (TREE_TYPE (type));
11925       break;
11926 
11927     case METHOD_TYPE:
11928       WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11929 
11930       /* Fall through.  */
11931 
11932     case FUNCTION_TYPE:
11933       WALK_SUBTREE (TREE_TYPE (type));
11934       {
11935 	tree arg;
11936 
11937 	/* We never want to walk into default arguments.  */
11938 	for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11939 	  WALK_SUBTREE (TREE_VALUE (arg));
11940       }
11941       break;
11942 
11943     case ARRAY_TYPE:
11944       /* Don't follow this nodes's type if a pointer for fear that
11945 	 we'll have infinite recursion.  If we have a PSET, then we
11946 	 need not fear.  */
11947       if (pset
11948 	  || (!POINTER_TYPE_P (TREE_TYPE (type))
11949 	      && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11950 	WALK_SUBTREE (TREE_TYPE (type));
11951       WALK_SUBTREE (TYPE_DOMAIN (type));
11952       break;
11953 
11954     case OFFSET_TYPE:
11955       WALK_SUBTREE (TREE_TYPE (type));
11956       WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11957       break;
11958 
11959     default:
11960       break;
11961     }
11962 
11963   return NULL_TREE;
11964 }
11965 
11966 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.  FUNC is
11967    called with the DATA and the address of each sub-tree.  If FUNC returns a
11968    non-NULL value, the traversal is stopped, and the value returned by FUNC
11969    is returned.  If PSET is non-NULL it is used to record the nodes visited,
11970    and to avoid visiting a node more than once.  */
11971 
11972 tree
11973 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11974 	     hash_set<tree> *pset, walk_tree_lh lh)
11975 {
11976   enum tree_code code;
11977   int walk_subtrees;
11978   tree result;
11979 
11980 #define WALK_SUBTREE_TAIL(NODE)				\
11981   do							\
11982     {							\
11983        tp = & (NODE);					\
11984        goto tail_recurse;				\
11985     }							\
11986   while (0)
11987 
11988  tail_recurse:
11989   /* Skip empty subtrees.  */
11990   if (!*tp)
11991     return NULL_TREE;
11992 
11993   /* Don't walk the same tree twice, if the user has requested
11994      that we avoid doing so.  */
11995   if (pset && pset->add (*tp))
11996     return NULL_TREE;
11997 
11998   /* Call the function.  */
11999   walk_subtrees = 1;
12000   result = (*func) (tp, &walk_subtrees, data);
12001 
12002   /* If we found something, return it.  */
12003   if (result)
12004     return result;
12005 
12006   code = TREE_CODE (*tp);
12007 
12008   /* Even if we didn't, FUNC may have decided that there was nothing
12009      interesting below this point in the tree.  */
12010   if (!walk_subtrees)
12011     {
12012       /* But we still need to check our siblings.  */
12013       if (code == TREE_LIST)
12014 	WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12015       else if (code == OMP_CLAUSE)
12016 	WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12017       else
12018 	return NULL_TREE;
12019     }
12020 
12021   if (lh)
12022     {
12023       result = (*lh) (tp, &walk_subtrees, func, data, pset);
12024       if (result || !walk_subtrees)
12025         return result;
12026     }
12027 
12028   switch (code)
12029     {
12030     case ERROR_MARK:
12031     case IDENTIFIER_NODE:
12032     case INTEGER_CST:
12033     case REAL_CST:
12034     case FIXED_CST:
12035     case STRING_CST:
12036     case BLOCK:
12037     case PLACEHOLDER_EXPR:
12038     case SSA_NAME:
12039     case FIELD_DECL:
12040     case RESULT_DECL:
12041       /* None of these have subtrees other than those already walked
12042 	 above.  */
12043       break;
12044 
12045     case TREE_LIST:
12046       WALK_SUBTREE (TREE_VALUE (*tp));
12047       WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12048       break;
12049 
12050     case TREE_VEC:
12051       {
12052 	int len = TREE_VEC_LENGTH (*tp);
12053 
12054 	if (len == 0)
12055 	  break;
12056 
12057 	/* Walk all elements but the first.  */
12058 	while (--len)
12059 	  WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
12060 
12061 	/* Now walk the first one as a tail call.  */
12062 	WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
12063       }
12064 
12065     case VECTOR_CST:
12066       {
12067 	unsigned len = vector_cst_encoded_nelts (*tp);
12068 	if (len == 0)
12069 	  break;
12070 	/* Walk all elements but the first.  */
12071 	while (--len)
12072 	  WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
12073 	/* Now walk the first one as a tail call.  */
12074 	WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
12075       }
12076 
12077     case COMPLEX_CST:
12078       WALK_SUBTREE (TREE_REALPART (*tp));
12079       WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
12080 
12081     case CONSTRUCTOR:
12082       {
12083 	unsigned HOST_WIDE_INT idx;
12084 	constructor_elt *ce;
12085 
12086 	for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
12087 	     idx++)
12088 	  WALK_SUBTREE (ce->value);
12089       }
12090       break;
12091 
12092     case SAVE_EXPR:
12093       WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12094 
12095     case BIND_EXPR:
12096       {
12097 	tree decl;
12098 	for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12099 	  {
12100 	    /* Walk the DECL_INITIAL and DECL_SIZE.  We don't want to walk
12101 	       into declarations that are just mentioned, rather than
12102 	       declared; they don't really belong to this part of the tree.
12103 	       And, we can see cycles: the initializer for a declaration
12104 	       can refer to the declaration itself.  */
12105 	    WALK_SUBTREE (DECL_INITIAL (decl));
12106 	    WALK_SUBTREE (DECL_SIZE (decl));
12107 	    WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12108 	  }
12109 	WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12110       }
12111 
12112     case STATEMENT_LIST:
12113       {
12114 	tree_stmt_iterator i;
12115 	for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12116 	  WALK_SUBTREE (*tsi_stmt_ptr (i));
12117       }
12118       break;
12119 
12120     case OMP_CLAUSE:
12121       switch (OMP_CLAUSE_CODE (*tp))
12122 	{
12123 	case OMP_CLAUSE_GANG:
12124 	case OMP_CLAUSE__GRIDDIM_:
12125 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12126 	  /* FALLTHRU */
12127 
12128 	case OMP_CLAUSE_ASYNC:
12129 	case OMP_CLAUSE_WAIT:
12130 	case OMP_CLAUSE_WORKER:
12131 	case OMP_CLAUSE_VECTOR:
12132 	case OMP_CLAUSE_NUM_GANGS:
12133 	case OMP_CLAUSE_NUM_WORKERS:
12134 	case OMP_CLAUSE_VECTOR_LENGTH:
12135 	case OMP_CLAUSE_PRIVATE:
12136 	case OMP_CLAUSE_SHARED:
12137 	case OMP_CLAUSE_FIRSTPRIVATE:
12138 	case OMP_CLAUSE_COPYIN:
12139 	case OMP_CLAUSE_COPYPRIVATE:
12140 	case OMP_CLAUSE_FINAL:
12141 	case OMP_CLAUSE_IF:
12142 	case OMP_CLAUSE_NUM_THREADS:
12143 	case OMP_CLAUSE_SCHEDULE:
12144 	case OMP_CLAUSE_UNIFORM:
12145 	case OMP_CLAUSE_DEPEND:
12146 	case OMP_CLAUSE_NONTEMPORAL:
12147 	case OMP_CLAUSE_NUM_TEAMS:
12148 	case OMP_CLAUSE_THREAD_LIMIT:
12149 	case OMP_CLAUSE_DEVICE:
12150 	case OMP_CLAUSE_DIST_SCHEDULE:
12151 	case OMP_CLAUSE_SAFELEN:
12152 	case OMP_CLAUSE_SIMDLEN:
12153 	case OMP_CLAUSE_ORDERED:
12154 	case OMP_CLAUSE_PRIORITY:
12155 	case OMP_CLAUSE_GRAINSIZE:
12156 	case OMP_CLAUSE_NUM_TASKS:
12157 	case OMP_CLAUSE_HINT:
12158 	case OMP_CLAUSE_TO_DECLARE:
12159 	case OMP_CLAUSE_LINK:
12160 	case OMP_CLAUSE_USE_DEVICE_PTR:
12161 	case OMP_CLAUSE_USE_DEVICE_ADDR:
12162 	case OMP_CLAUSE_IS_DEVICE_PTR:
12163 	case OMP_CLAUSE_INCLUSIVE:
12164 	case OMP_CLAUSE_EXCLUSIVE:
12165 	case OMP_CLAUSE__LOOPTEMP_:
12166 	case OMP_CLAUSE__REDUCTEMP_:
12167 	case OMP_CLAUSE__CONDTEMP_:
12168 	case OMP_CLAUSE__SCANTEMP_:
12169 	case OMP_CLAUSE__SIMDUID_:
12170 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12171 	  /* FALLTHRU */
12172 
12173 	case OMP_CLAUSE_INDEPENDENT:
12174 	case OMP_CLAUSE_NOWAIT:
12175 	case OMP_CLAUSE_DEFAULT:
12176 	case OMP_CLAUSE_UNTIED:
12177 	case OMP_CLAUSE_MERGEABLE:
12178 	case OMP_CLAUSE_PROC_BIND:
12179 	case OMP_CLAUSE_DEVICE_TYPE:
12180 	case OMP_CLAUSE_INBRANCH:
12181 	case OMP_CLAUSE_NOTINBRANCH:
12182 	case OMP_CLAUSE_FOR:
12183 	case OMP_CLAUSE_PARALLEL:
12184 	case OMP_CLAUSE_SECTIONS:
12185 	case OMP_CLAUSE_TASKGROUP:
12186 	case OMP_CLAUSE_NOGROUP:
12187 	case OMP_CLAUSE_THREADS:
12188 	case OMP_CLAUSE_SIMD:
12189 	case OMP_CLAUSE_DEFAULTMAP:
12190 	case OMP_CLAUSE_ORDER:
12191 	case OMP_CLAUSE_BIND:
12192 	case OMP_CLAUSE_AUTO:
12193 	case OMP_CLAUSE_SEQ:
12194 	case OMP_CLAUSE__SIMT_:
12195 	case OMP_CLAUSE_IF_PRESENT:
12196 	case OMP_CLAUSE_FINALIZE:
12197 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12198 
12199 	case OMP_CLAUSE_LASTPRIVATE:
12200 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12201 	  WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12202 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12203 
12204 	case OMP_CLAUSE_COLLAPSE:
12205 	case OMP_CLAUSE_TILE:
12206 	  {
12207 	    int i;
12208 	    for (i = 0; i < 3; i++)
12209 	      WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12210 	    WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12211 	  }
12212 
12213 	case OMP_CLAUSE_LINEAR:
12214 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12215 	  WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12216 	  WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12217 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12218 
12219 	case OMP_CLAUSE_ALIGNED:
12220 	case OMP_CLAUSE_FROM:
12221 	case OMP_CLAUSE_TO:
12222 	case OMP_CLAUSE_MAP:
12223 	case OMP_CLAUSE__CACHE_:
12224 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12225 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12226 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12227 
12228 	case OMP_CLAUSE_REDUCTION:
12229 	case OMP_CLAUSE_TASK_REDUCTION:
12230 	case OMP_CLAUSE_IN_REDUCTION:
12231 	  {
12232 	    int i;
12233 	    for (i = 0; i < 5; i++)
12234 	      WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12235 	    WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12236 	  }
12237 
12238 	default:
12239 	  gcc_unreachable ();
12240 	}
12241       break;
12242 
12243     case TARGET_EXPR:
12244       {
12245 	int i, len;
12246 
12247 	/* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12248 	   But, we only want to walk once.  */
12249 	len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12250 	for (i = 0; i < len; ++i)
12251 	  WALK_SUBTREE (TREE_OPERAND (*tp, i));
12252 	WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12253       }
12254 
12255     case DECL_EXPR:
12256       /* If this is a TYPE_DECL, walk into the fields of the type that it's
12257 	 defining.  We only want to walk into these fields of a type in this
12258 	 case and not in the general case of a mere reference to the type.
12259 
12260 	 The criterion is as follows: if the field can be an expression, it
12261 	 must be walked only here.  This should be in keeping with the fields
12262 	 that are directly gimplified in gimplify_type_sizes in order for the
12263 	 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12264 	 variable-sized types.
12265 
12266 	 Note that DECLs get walked as part of processing the BIND_EXPR.  */
12267       if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12268 	{
12269 	  tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12270 	  if (TREE_CODE (*type_p) == ERROR_MARK)
12271 	    return NULL_TREE;
12272 
12273 	  /* Call the function for the type.  See if it returns anything or
12274 	     doesn't want us to continue.  If we are to continue, walk both
12275 	     the normal fields and those for the declaration case.  */
12276 	  result = (*func) (type_p, &walk_subtrees, data);
12277 	  if (result || !walk_subtrees)
12278 	    return result;
12279 
12280 	  /* But do not walk a pointed-to type since it may itself need to
12281 	     be walked in the declaration case if it isn't anonymous.  */
12282 	  if (!POINTER_TYPE_P (*type_p))
12283 	    {
12284 	      result = walk_type_fields (*type_p, func, data, pset, lh);
12285 	      if (result)
12286 		return result;
12287 	    }
12288 
12289 	  /* If this is a record type, also walk the fields.  */
12290 	  if (RECORD_OR_UNION_TYPE_P (*type_p))
12291 	    {
12292 	      tree field;
12293 
12294 	      for (field = TYPE_FIELDS (*type_p); field;
12295 		   field = DECL_CHAIN (field))
12296 		{
12297 		  /* We'd like to look at the type of the field, but we can
12298 		     easily get infinite recursion.  So assume it's pointed
12299 		     to elsewhere in the tree.  Also, ignore things that
12300 		     aren't fields.  */
12301 		  if (TREE_CODE (field) != FIELD_DECL)
12302 		    continue;
12303 
12304 		  WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12305 		  WALK_SUBTREE (DECL_SIZE (field));
12306 		  WALK_SUBTREE (DECL_SIZE_UNIT (field));
12307 		  if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12308 		    WALK_SUBTREE (DECL_QUALIFIER (field));
12309 		}
12310 	    }
12311 
12312 	  /* Same for scalar types.  */
12313 	  else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12314 		   || TREE_CODE (*type_p) == ENUMERAL_TYPE
12315 		   || TREE_CODE (*type_p) == INTEGER_TYPE
12316 		   || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12317 		   || TREE_CODE (*type_p) == REAL_TYPE)
12318 	    {
12319 	      WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12320 	      WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12321 	    }
12322 
12323 	  WALK_SUBTREE (TYPE_SIZE (*type_p));
12324 	  WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12325 	}
12326       /* FALLTHRU */
12327 
12328     default:
12329       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12330 	{
12331 	  int i, len;
12332 
12333 	  /* Walk over all the sub-trees of this operand.  */
12334 	  len = TREE_OPERAND_LENGTH (*tp);
12335 
12336 	  /* Go through the subtrees.  We need to do this in forward order so
12337 	     that the scope of a FOR_EXPR is handled properly.  */
12338 	  if (len)
12339 	    {
12340 	      for (i = 0; i < len - 1; ++i)
12341 		WALK_SUBTREE (TREE_OPERAND (*tp, i));
12342 	      WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12343 	    }
12344 	}
12345       /* If this is a type, walk the needed fields in the type.  */
12346       else if (TYPE_P (*tp))
12347 	return walk_type_fields (*tp, func, data, pset, lh);
12348       break;
12349     }
12350 
12351   /* We didn't find what we were looking for.  */
12352   return NULL_TREE;
12353 
12354 #undef WALK_SUBTREE_TAIL
12355 }
12356 #undef WALK_SUBTREE
12357 
12358 /* Like walk_tree, but does not walk duplicate nodes more than once.  */
12359 
12360 tree
12361 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12362 				walk_tree_lh lh)
12363 {
12364   tree result;
12365 
12366   hash_set<tree> pset;
12367   result = walk_tree_1 (tp, func, data, &pset, lh);
12368   return result;
12369 }
12370 
12371 
12372 tree
12373 tree_block (tree t)
12374 {
12375   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12376 
12377   if (IS_EXPR_CODE_CLASS (c))
12378     return LOCATION_BLOCK (t->exp.locus);
12379   gcc_unreachable ();
12380   return NULL;
12381 }
12382 
12383 void
12384 tree_set_block (tree t, tree b)
12385 {
12386   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12387 
12388   if (IS_EXPR_CODE_CLASS (c))
12389     {
12390       t->exp.locus = set_block (t->exp.locus, b);
12391     }
12392   else
12393     gcc_unreachable ();
12394 }
12395 
12396 /* Create a nameless artificial label and put it in the current
12397    function context.  The label has a location of LOC.  Returns the
12398    newly created label.  */
12399 
12400 tree
12401 create_artificial_label (location_t loc)
12402 {
12403   tree lab = build_decl (loc,
12404       			 LABEL_DECL, NULL_TREE, void_type_node);
12405 
12406   DECL_ARTIFICIAL (lab) = 1;
12407   DECL_IGNORED_P (lab) = 1;
12408   DECL_CONTEXT (lab) = current_function_decl;
12409   return lab;
12410 }
12411 
12412 /*  Given a tree, try to return a useful variable name that we can use
12413     to prefix a temporary that is being assigned the value of the tree.
12414     I.E. given  <temp> = &A, return A.  */
12415 
12416 const char *
12417 get_name (tree t)
12418 {
12419   tree stripped_decl;
12420 
12421   stripped_decl = t;
12422   STRIP_NOPS (stripped_decl);
12423   if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12424     return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12425   else if (TREE_CODE (stripped_decl) == SSA_NAME)
12426     {
12427       tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12428       if (!name)
12429 	return NULL;
12430       return IDENTIFIER_POINTER (name);
12431     }
12432   else
12433     {
12434       switch (TREE_CODE (stripped_decl))
12435 	{
12436 	case ADDR_EXPR:
12437 	  return get_name (TREE_OPERAND (stripped_decl, 0));
12438 	default:
12439 	  return NULL;
12440 	}
12441     }
12442 }
12443 
12444 /* Return true if TYPE has a variable argument list.  */
12445 
12446 bool
12447 stdarg_p (const_tree fntype)
12448 {
12449   function_args_iterator args_iter;
12450   tree n = NULL_TREE, t;
12451 
12452   if (!fntype)
12453     return false;
12454 
12455   FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12456     {
12457       n = t;
12458     }
12459 
12460   return n != NULL_TREE && n != void_type_node;
12461 }
12462 
12463 /* Return true if TYPE has a prototype.  */
12464 
12465 bool
12466 prototype_p (const_tree fntype)
12467 {
12468   tree t;
12469 
12470   gcc_assert (fntype != NULL_TREE);
12471 
12472   t = TYPE_ARG_TYPES (fntype);
12473   return (t != NULL_TREE);
12474 }
12475 
12476 /* If BLOCK is inlined from an __attribute__((__artificial__))
12477    routine, return pointer to location from where it has been
12478    called.  */
12479 location_t *
12480 block_nonartificial_location (tree block)
12481 {
12482   location_t *ret = NULL;
12483 
12484   while (block && TREE_CODE (block) == BLOCK
12485 	 && BLOCK_ABSTRACT_ORIGIN (block))
12486     {
12487       tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12488       if (TREE_CODE (ao) == FUNCTION_DECL)
12489 	{
12490 	  /* If AO is an artificial inline, point RET to the
12491 	     call site locus at which it has been inlined and continue
12492 	     the loop, in case AO's caller is also an artificial
12493 	     inline.  */
12494 	  if (DECL_DECLARED_INLINE_P (ao)
12495 	      && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12496 	    ret = &BLOCK_SOURCE_LOCATION (block);
12497 	  else
12498 	    break;
12499 	}
12500       else if (TREE_CODE (ao) != BLOCK)
12501 	break;
12502 
12503       block = BLOCK_SUPERCONTEXT (block);
12504     }
12505   return ret;
12506 }
12507 
12508 
12509 /* If EXP is inlined from an __attribute__((__artificial__))
12510    function, return the location of the original call expression.  */
12511 
12512 location_t
12513 tree_nonartificial_location (tree exp)
12514 {
12515   location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12516 
12517   if (loc)
12518     return *loc;
12519   else
12520     return EXPR_LOCATION (exp);
12521 }
12522 
12523 
12524 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12525    nodes.  */
12526 
12527 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code.  */
12528 
12529 hashval_t
12530 cl_option_hasher::hash (tree x)
12531 {
12532   const_tree const t = x;
12533   const char *p;
12534   size_t i;
12535   size_t len = 0;
12536   hashval_t hash = 0;
12537 
12538   if (TREE_CODE (t) == OPTIMIZATION_NODE)
12539     {
12540       p = (const char *)TREE_OPTIMIZATION (t);
12541       len = sizeof (struct cl_optimization);
12542     }
12543 
12544   else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12545     return cl_target_option_hash (TREE_TARGET_OPTION (t));
12546 
12547   else
12548     gcc_unreachable ();
12549 
12550   /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12551      something else.  */
12552   for (i = 0; i < len; i++)
12553     if (p[i])
12554       hash = (hash << 4) ^ ((i << 2) | p[i]);
12555 
12556   return hash;
12557 }
12558 
12559 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12560    TARGET_OPTION tree node) is the same as that given by *Y, which is the
12561    same.  */
12562 
12563 bool
12564 cl_option_hasher::equal (tree x, tree y)
12565 {
12566   const_tree const xt = x;
12567   const_tree const yt = y;
12568 
12569   if (TREE_CODE (xt) != TREE_CODE (yt))
12570     return 0;
12571 
12572   if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12573     return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12574 				      TREE_OPTIMIZATION (yt));
12575   else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12576     return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12577 				TREE_TARGET_OPTION (yt));
12578   else
12579     gcc_unreachable ();
12580 }
12581 
12582 /* Build an OPTIMIZATION_NODE based on the options in OPTS.  */
12583 
12584 tree
12585 build_optimization_node (struct gcc_options *opts)
12586 {
12587   tree t;
12588 
12589   /* Use the cache of optimization nodes.  */
12590 
12591   cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12592 			opts);
12593 
12594   tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12595   t = *slot;
12596   if (!t)
12597     {
12598       /* Insert this one into the hash table.  */
12599       t = cl_optimization_node;
12600       *slot = t;
12601 
12602       /* Make a new node for next time round.  */
12603       cl_optimization_node = make_node (OPTIMIZATION_NODE);
12604     }
12605 
12606   return t;
12607 }
12608 
12609 /* Build a TARGET_OPTION_NODE based on the options in OPTS.  */
12610 
12611 tree
12612 build_target_option_node (struct gcc_options *opts)
12613 {
12614   tree t;
12615 
12616   /* Use the cache of optimization nodes.  */
12617 
12618   cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12619 			 opts);
12620 
12621   tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12622   t = *slot;
12623   if (!t)
12624     {
12625       /* Insert this one into the hash table.  */
12626       t = cl_target_option_node;
12627       *slot = t;
12628 
12629       /* Make a new node for next time round.  */
12630       cl_target_option_node = make_node (TARGET_OPTION_NODE);
12631     }
12632 
12633   return t;
12634 }
12635 
12636 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12637    so that they aren't saved during PCH writing.  */
12638 
12639 void
12640 prepare_target_option_nodes_for_pch (void)
12641 {
12642   hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12643   for (; iter != cl_option_hash_table->end (); ++iter)
12644     if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12645       TREE_TARGET_GLOBALS (*iter) = NULL;
12646 }
12647 
12648 /* Determine the "ultimate origin" of a block.  */
12649 
12650 tree
12651 block_ultimate_origin (const_tree block)
12652 {
12653   tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12654 
12655   if (origin == NULL_TREE)
12656     return NULL_TREE;
12657   else
12658     {
12659       gcc_checking_assert ((DECL_P (origin)
12660 			    && DECL_ORIGIN (origin) == origin)
12661 			   || BLOCK_ORIGIN (origin) == origin);
12662       return origin;
12663     }
12664 }
12665 
12666 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12667    no instruction.  */
12668 
12669 bool
12670 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12671 {
12672   /* Do not strip casts into or out of differing address spaces.  */
12673   if (POINTER_TYPE_P (outer_type)
12674       && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12675     {
12676       if (!POINTER_TYPE_P (inner_type)
12677 	  || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12678 	      != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12679 	return false;
12680     }
12681   else if (POINTER_TYPE_P (inner_type)
12682 	   && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12683     {
12684       /* We already know that outer_type is not a pointer with
12685 	 a non-generic address space.  */
12686       return false;
12687     }
12688 
12689   /* Use precision rather then machine mode when we can, which gives
12690      the correct answer even for submode (bit-field) types.  */
12691   if ((INTEGRAL_TYPE_P (outer_type)
12692        || POINTER_TYPE_P (outer_type)
12693        || TREE_CODE (outer_type) == OFFSET_TYPE)
12694       && (INTEGRAL_TYPE_P (inner_type)
12695 	  || POINTER_TYPE_P (inner_type)
12696 	  || TREE_CODE (inner_type) == OFFSET_TYPE))
12697     return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12698 
12699   /* Otherwise fall back on comparing machine modes (e.g. for
12700      aggregate types, floats).  */
12701   return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12702 }
12703 
12704 /* Return true iff conversion in EXP generates no instruction.  Mark
12705    it inline so that we fully inline into the stripping functions even
12706    though we have two uses of this function.  */
12707 
12708 static inline bool
12709 tree_nop_conversion (const_tree exp)
12710 {
12711   tree outer_type, inner_type;
12712 
12713   if (location_wrapper_p (exp))
12714     return true;
12715   if (!CONVERT_EXPR_P (exp)
12716       && TREE_CODE (exp) != NON_LVALUE_EXPR)
12717     return false;
12718 
12719   outer_type = TREE_TYPE (exp);
12720   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12721   if (!inner_type || inner_type == error_mark_node)
12722     return false;
12723 
12724   return tree_nop_conversion_p (outer_type, inner_type);
12725 }
12726 
12727 /* Return true iff conversion in EXP generates no instruction.  Don't
12728    consider conversions changing the signedness.  */
12729 
12730 static bool
12731 tree_sign_nop_conversion (const_tree exp)
12732 {
12733   tree outer_type, inner_type;
12734 
12735   if (!tree_nop_conversion (exp))
12736     return false;
12737 
12738   outer_type = TREE_TYPE (exp);
12739   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12740 
12741   return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12742 	  && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12743 }
12744 
12745 /* Strip conversions from EXP according to tree_nop_conversion and
12746    return the resulting expression.  */
12747 
12748 tree
12749 tree_strip_nop_conversions (tree exp)
12750 {
12751   while (tree_nop_conversion (exp))
12752     exp = TREE_OPERAND (exp, 0);
12753   return exp;
12754 }
12755 
12756 /* Strip conversions from EXP according to tree_sign_nop_conversion
12757    and return the resulting expression.  */
12758 
12759 tree
12760 tree_strip_sign_nop_conversions (tree exp)
12761 {
12762   while (tree_sign_nop_conversion (exp))
12763     exp = TREE_OPERAND (exp, 0);
12764   return exp;
12765 }
12766 
12767 /* Avoid any floating point extensions from EXP.  */
12768 tree
12769 strip_float_extensions (tree exp)
12770 {
12771   tree sub, expt, subt;
12772 
12773   /*  For floating point constant look up the narrowest type that can hold
12774       it properly and handle it like (type)(narrowest_type)constant.
12775       This way we can optimize for instance a=a*2.0 where "a" is float
12776       but 2.0 is double constant.  */
12777   if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12778     {
12779       REAL_VALUE_TYPE orig;
12780       tree type = NULL;
12781 
12782       orig = TREE_REAL_CST (exp);
12783       if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12784 	  && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12785 	type = float_type_node;
12786       else if (TYPE_PRECISION (TREE_TYPE (exp))
12787 	       > TYPE_PRECISION (double_type_node)
12788 	       && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12789 	type = double_type_node;
12790       if (type)
12791 	return build_real_truncate (type, orig);
12792     }
12793 
12794   if (!CONVERT_EXPR_P (exp))
12795     return exp;
12796 
12797   sub = TREE_OPERAND (exp, 0);
12798   subt = TREE_TYPE (sub);
12799   expt = TREE_TYPE (exp);
12800 
12801   if (!FLOAT_TYPE_P (subt))
12802     return exp;
12803 
12804   if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12805     return exp;
12806 
12807   if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12808     return exp;
12809 
12810   return strip_float_extensions (sub);
12811 }
12812 
12813 /* Strip out all handled components that produce invariant
12814    offsets.  */
12815 
12816 const_tree
12817 strip_invariant_refs (const_tree op)
12818 {
12819   while (handled_component_p (op))
12820     {
12821       switch (TREE_CODE (op))
12822 	{
12823 	case ARRAY_REF:
12824 	case ARRAY_RANGE_REF:
12825 	  if (!is_gimple_constant (TREE_OPERAND (op, 1))
12826 	      || TREE_OPERAND (op, 2) != NULL_TREE
12827 	      || TREE_OPERAND (op, 3) != NULL_TREE)
12828 	    return NULL;
12829 	  break;
12830 
12831 	case COMPONENT_REF:
12832 	  if (TREE_OPERAND (op, 2) != NULL_TREE)
12833 	    return NULL;
12834 	  break;
12835 
12836 	default:;
12837 	}
12838       op = TREE_OPERAND (op, 0);
12839     }
12840 
12841   return op;
12842 }
12843 
12844 static GTY(()) tree gcc_eh_personality_decl;
12845 
12846 /* Return the GCC personality function decl.  */
12847 
12848 tree
12849 lhd_gcc_personality (void)
12850 {
12851   if (!gcc_eh_personality_decl)
12852     gcc_eh_personality_decl = build_personality_function ("gcc");
12853   return gcc_eh_personality_decl;
12854 }
12855 
12856 /* TARGET is a call target of GIMPLE call statement
12857    (obtained by gimple_call_fn).  Return true if it is
12858    OBJ_TYPE_REF representing an virtual call of C++ method.
12859    (As opposed to OBJ_TYPE_REF representing objc calls
12860    through a cast where middle-end devirtualization machinery
12861    can't apply.)  FOR_DUMP_P is true when being called from
12862    the dump routines.  */
12863 
12864 bool
12865 virtual_method_call_p (const_tree target, bool for_dump_p)
12866 {
12867   if (TREE_CODE (target) != OBJ_TYPE_REF)
12868     return false;
12869   tree t = TREE_TYPE (target);
12870   gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12871   t = TREE_TYPE (t);
12872   if (TREE_CODE (t) == FUNCTION_TYPE)
12873     return false;
12874   gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12875   /* If we do not have BINFO associated, it means that type was built
12876      without devirtualization enabled.  Do not consider this a virtual
12877      call.  */
12878   if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12879     return false;
12880   return true;
12881 }
12882 
12883 /* Lookup sub-BINFO of BINFO of TYPE at offset POS.  */
12884 
12885 static tree
12886 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12887 {
12888   unsigned int i;
12889   tree base_binfo, b;
12890 
12891   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12892     if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12893 	&& types_same_for_odr (TREE_TYPE (base_binfo), type))
12894       return base_binfo;
12895     else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12896       return b;
12897   return NULL;
12898 }
12899 
12900 /* Try to find a base info of BINFO that would have its field decl at offset
12901    OFFSET within the BINFO type and which is of EXPECTED_TYPE.  If it can be
12902    found, return, otherwise return NULL_TREE.  */
12903 
12904 tree
12905 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12906 {
12907   tree type = BINFO_TYPE (binfo);
12908 
12909   while (true)
12910     {
12911       HOST_WIDE_INT pos, size;
12912       tree fld;
12913       int i;
12914 
12915       if (types_same_for_odr (type, expected_type))
12916 	  return binfo;
12917       if (maybe_lt (offset, 0))
12918 	return NULL_TREE;
12919 
12920       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12921 	{
12922 	  if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12923 	    continue;
12924 
12925 	  pos = int_bit_position (fld);
12926 	  size = tree_to_uhwi (DECL_SIZE (fld));
12927 	  if (known_in_range_p (offset, pos, size))
12928 	    break;
12929 	}
12930       if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12931 	return NULL_TREE;
12932 
12933       /* Offset 0 indicates the primary base, whose vtable contents are
12934 	 represented in the binfo for the derived class.  */
12935       else if (maybe_ne (offset, 0))
12936 	{
12937 	  tree found_binfo = NULL, base_binfo;
12938 	  /* Offsets in BINFO are in bytes relative to the whole structure
12939 	     while POS is in bits relative to the containing field.  */
12940 	  int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12941 			     / BITS_PER_UNIT);
12942 
12943 	  for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12944 	    if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12945 		&& types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12946 	      {
12947 		found_binfo = base_binfo;
12948 		break;
12949 	      }
12950 	  if (found_binfo)
12951 	    binfo = found_binfo;
12952 	  else
12953 	    binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12954 					    binfo_offset);
12955 	 }
12956 
12957       type = TREE_TYPE (fld);
12958       offset -= pos;
12959     }
12960 }
12961 
12962 /* Returns true if X is a typedef decl.  */
12963 
12964 bool
12965 is_typedef_decl (const_tree x)
12966 {
12967   return (x && TREE_CODE (x) == TYPE_DECL
12968           && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12969 }
12970 
12971 /* Returns true iff TYPE is a type variant created for a typedef. */
12972 
12973 bool
12974 typedef_variant_p (const_tree type)
12975 {
12976   return is_typedef_decl (TYPE_NAME (type));
12977 }
12978 
12979 /* PR 84195: Replace control characters in "unescaped" with their
12980    escaped equivalents.  Allow newlines if -fmessage-length has
12981    been set to a non-zero value.  This is done here, rather than
12982    where the attribute is recorded as the message length can
12983    change between these two locations.  */
12984 
12985 void
12986 escaped_string::escape (const char *unescaped)
12987 {
12988   char *escaped;
12989   size_t i, new_i, len;
12990 
12991   if (m_owned)
12992     free (m_str);
12993 
12994   m_str = const_cast<char *> (unescaped);
12995   m_owned = false;
12996 
12997   if (unescaped == NULL || *unescaped == 0)
12998     return;
12999 
13000   len = strlen (unescaped);
13001   escaped = NULL;
13002   new_i = 0;
13003 
13004   for (i = 0; i < len; i++)
13005     {
13006       char c = unescaped[i];
13007 
13008       if (!ISCNTRL (c))
13009 	{
13010 	  if (escaped)
13011 	    escaped[new_i++] = c;
13012 	  continue;
13013 	}
13014 
13015       if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
13016 	{
13017 	  if (escaped == NULL)
13018 	    {
13019 	      /* We only allocate space for a new string if we
13020 		 actually encounter a control character that
13021 		 needs replacing.  */
13022 	      escaped = (char *) xmalloc (len * 2 + 1);
13023 	      strncpy (escaped, unescaped, i);
13024 	      new_i = i;
13025 	    }
13026 
13027 	  escaped[new_i++] = '\\';
13028 
13029 	  switch (c)
13030 	    {
13031 	    case '\a': escaped[new_i++] = 'a'; break;
13032 	    case '\b': escaped[new_i++] = 'b'; break;
13033 	    case '\f': escaped[new_i++] = 'f'; break;
13034 	    case '\n': escaped[new_i++] = 'n'; break;
13035 	    case '\r': escaped[new_i++] = 'r'; break;
13036 	    case '\t': escaped[new_i++] = 't'; break;
13037 	    case '\v': escaped[new_i++] = 'v'; break;
13038 	    default:   escaped[new_i++] = '?'; break;
13039 	    }
13040 	}
13041       else if (escaped)
13042 	escaped[new_i++] = c;
13043     }
13044 
13045   if (escaped)
13046     {
13047       escaped[new_i] = 0;
13048       m_str = escaped;
13049       m_owned = true;
13050     }
13051 }
13052 
13053 /* Warn about a use of an identifier which was marked deprecated.  Returns
13054    whether a warning was given.  */
13055 
13056 bool
13057 warn_deprecated_use (tree node, tree attr)
13058 {
13059   escaped_string msg;
13060 
13061   if (node == 0 || !warn_deprecated_decl)
13062     return false;
13063 
13064   if (!attr)
13065     {
13066       if (DECL_P (node))
13067 	attr = DECL_ATTRIBUTES (node);
13068       else if (TYPE_P (node))
13069 	{
13070 	  tree decl = TYPE_STUB_DECL (node);
13071 	  if (decl)
13072 	    attr = lookup_attribute ("deprecated",
13073 				     TYPE_ATTRIBUTES (TREE_TYPE (decl)));
13074 	}
13075     }
13076 
13077   if (attr)
13078     attr = lookup_attribute ("deprecated", attr);
13079 
13080   if (attr)
13081     msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
13082 
13083   bool w = false;
13084   if (DECL_P (node))
13085     {
13086       auto_diagnostic_group d;
13087       if (msg)
13088 	w = warning (OPT_Wdeprecated_declarations,
13089 		     "%qD is deprecated: %s", node, (const char *) msg);
13090       else
13091 	w = warning (OPT_Wdeprecated_declarations,
13092 		     "%qD is deprecated", node);
13093       if (w)
13094 	inform (DECL_SOURCE_LOCATION (node), "declared here");
13095     }
13096   else if (TYPE_P (node))
13097     {
13098       tree what = NULL_TREE;
13099       tree decl = TYPE_STUB_DECL (node);
13100 
13101       if (TYPE_NAME (node))
13102 	{
13103 	  if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13104 	    what = TYPE_NAME (node);
13105 	  else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13106 		   && DECL_NAME (TYPE_NAME (node)))
13107 	    what = DECL_NAME (TYPE_NAME (node));
13108 	}
13109 
13110       auto_diagnostic_group d;
13111       if (what)
13112 	{
13113 	  if (msg)
13114 	    w = warning (OPT_Wdeprecated_declarations,
13115 			 "%qE is deprecated: %s", what, (const char *) msg);
13116 	  else
13117 	    w = warning (OPT_Wdeprecated_declarations,
13118 			 "%qE is deprecated", what);
13119 	}
13120       else
13121 	{
13122 	  if (msg)
13123 	    w = warning (OPT_Wdeprecated_declarations,
13124 			 "type is deprecated: %s", (const char *) msg);
13125 	  else
13126 	    w = warning (OPT_Wdeprecated_declarations,
13127 			 "type is deprecated");
13128 	}
13129 
13130       if (w && decl)
13131 	inform (DECL_SOURCE_LOCATION (decl), "declared here");
13132     }
13133 
13134   return w;
13135 }
13136 
13137 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13138    somewhere in it.  */
13139 
13140 bool
13141 contains_bitfld_component_ref_p (const_tree ref)
13142 {
13143   while (handled_component_p (ref))
13144     {
13145       if (TREE_CODE (ref) == COMPONENT_REF
13146           && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13147         return true;
13148       ref = TREE_OPERAND (ref, 0);
13149     }
13150 
13151   return false;
13152 }
13153 
13154 /* Try to determine whether a TRY_CATCH expression can fall through.
13155    This is a subroutine of block_may_fallthru.  */
13156 
13157 static bool
13158 try_catch_may_fallthru (const_tree stmt)
13159 {
13160   tree_stmt_iterator i;
13161 
13162   /* If the TRY block can fall through, the whole TRY_CATCH can
13163      fall through.  */
13164   if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13165     return true;
13166 
13167   i = tsi_start (TREE_OPERAND (stmt, 1));
13168   switch (TREE_CODE (tsi_stmt (i)))
13169     {
13170     case CATCH_EXPR:
13171       /* We expect to see a sequence of CATCH_EXPR trees, each with a
13172 	 catch expression and a body.  The whole TRY_CATCH may fall
13173 	 through iff any of the catch bodies falls through.  */
13174       for (; !tsi_end_p (i); tsi_next (&i))
13175 	{
13176 	  if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13177 	    return true;
13178 	}
13179       return false;
13180 
13181     case EH_FILTER_EXPR:
13182       /* The exception filter expression only matters if there is an
13183 	 exception.  If the exception does not match EH_FILTER_TYPES,
13184 	 we will execute EH_FILTER_FAILURE, and we will fall through
13185 	 if that falls through.  If the exception does match
13186 	 EH_FILTER_TYPES, the stack unwinder will continue up the
13187 	 stack, so we will not fall through.  We don't know whether we
13188 	 will throw an exception which matches EH_FILTER_TYPES or not,
13189 	 so we just ignore EH_FILTER_TYPES and assume that we might
13190 	 throw an exception which doesn't match.  */
13191       return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13192 
13193     default:
13194       /* This case represents statements to be executed when an
13195 	 exception occurs.  Those statements are implicitly followed
13196 	 by a RESX statement to resume execution after the exception.
13197 	 So in this case the TRY_CATCH never falls through.  */
13198       return false;
13199     }
13200 }
13201 
13202 /* Try to determine if we can fall out of the bottom of BLOCK.  This guess
13203    need not be 100% accurate; simply be conservative and return true if we
13204    don't know.  This is used only to avoid stupidly generating extra code.
13205    If we're wrong, we'll just delete the extra code later.  */
13206 
13207 bool
13208 block_may_fallthru (const_tree block)
13209 {
13210   /* This CONST_CAST is okay because expr_last returns its argument
13211      unmodified and we assign it to a const_tree.  */
13212   const_tree stmt = expr_last (CONST_CAST_TREE (block));
13213 
13214   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13215     {
13216     case GOTO_EXPR:
13217     case RETURN_EXPR:
13218       /* Easy cases.  If the last statement of the block implies
13219 	 control transfer, then we can't fall through.  */
13220       return false;
13221 
13222     case SWITCH_EXPR:
13223       /* If there is a default: label or case labels cover all possible
13224 	 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13225 	 to some case label in all cases and all we care is whether the
13226 	 SWITCH_BODY falls through.  */
13227       if (SWITCH_ALL_CASES_P (stmt))
13228 	return block_may_fallthru (SWITCH_BODY (stmt));
13229       return true;
13230 
13231     case COND_EXPR:
13232       if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13233 	return true;
13234       return block_may_fallthru (COND_EXPR_ELSE (stmt));
13235 
13236     case BIND_EXPR:
13237       return block_may_fallthru (BIND_EXPR_BODY (stmt));
13238 
13239     case TRY_CATCH_EXPR:
13240       return try_catch_may_fallthru (stmt);
13241 
13242     case TRY_FINALLY_EXPR:
13243       /* The finally clause is always executed after the try clause,
13244 	 so if it does not fall through, then the try-finally will not
13245 	 fall through.  Otherwise, if the try clause does not fall
13246 	 through, then when the finally clause falls through it will
13247 	 resume execution wherever the try clause was going.  So the
13248 	 whole try-finally will only fall through if both the try
13249 	 clause and the finally clause fall through.  */
13250       return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13251 	      && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13252 
13253     case EH_ELSE_EXPR:
13254       return block_may_fallthru (TREE_OPERAND (stmt, 0));
13255 
13256     case MODIFY_EXPR:
13257       if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13258 	stmt = TREE_OPERAND (stmt, 1);
13259       else
13260 	return true;
13261       /* FALLTHRU */
13262 
13263     case CALL_EXPR:
13264       /* Functions that do not return do not fall through.  */
13265       return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13266 
13267     case CLEANUP_POINT_EXPR:
13268       return block_may_fallthru (TREE_OPERAND (stmt, 0));
13269 
13270     case TARGET_EXPR:
13271       return block_may_fallthru (TREE_OPERAND (stmt, 1));
13272 
13273     case ERROR_MARK:
13274       return true;
13275 
13276     default:
13277       return lang_hooks.block_may_fallthru (stmt);
13278     }
13279 }
13280 
13281 /* True if we are using EH to handle cleanups.  */
13282 static bool using_eh_for_cleanups_flag = false;
13283 
13284 /* This routine is called from front ends to indicate eh should be used for
13285    cleanups.  */
13286 void
13287 using_eh_for_cleanups (void)
13288 {
13289   using_eh_for_cleanups_flag = true;
13290 }
13291 
13292 /* Query whether EH is used for cleanups.  */
13293 bool
13294 using_eh_for_cleanups_p (void)
13295 {
13296   return using_eh_for_cleanups_flag;
13297 }
13298 
13299 /* Wrapper for tree_code_name to ensure that tree code is valid */
13300 const char *
13301 get_tree_code_name (enum tree_code code)
13302 {
13303   const char *invalid = "<invalid tree code>";
13304 
13305   if (code >= MAX_TREE_CODES)
13306     {
13307       if (code == 0xa5a5)
13308 	return "ggc_freed";
13309       return invalid;
13310     }
13311 
13312   return tree_code_name[code];
13313 }
13314 
13315 /* Drops the TREE_OVERFLOW flag from T.  */
13316 
13317 tree
13318 drop_tree_overflow (tree t)
13319 {
13320   gcc_checking_assert (TREE_OVERFLOW (t));
13321 
13322   /* For tree codes with a sharing machinery re-build the result.  */
13323   if (poly_int_tree_p (t))
13324     return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13325 
13326   /* For VECTOR_CST, remove the overflow bits from the encoded elements
13327      and canonicalize the result.  */
13328   if (TREE_CODE (t) == VECTOR_CST)
13329     {
13330       tree_vector_builder builder;
13331       builder.new_unary_operation (TREE_TYPE (t), t, true);
13332       unsigned int count = builder.encoded_nelts ();
13333       for (unsigned int i = 0; i < count; ++i)
13334 	{
13335 	  tree elt = VECTOR_CST_ELT (t, i);
13336 	  if (TREE_OVERFLOW (elt))
13337 	    elt = drop_tree_overflow (elt);
13338 	  builder.quick_push (elt);
13339 	}
13340       return builder.build ();
13341     }
13342 
13343   /* Otherwise, as all tcc_constants are possibly shared, copy the node
13344      and drop the flag.  */
13345   t = copy_node (t);
13346   TREE_OVERFLOW (t) = 0;
13347 
13348   /* For constants that contain nested constants, drop the flag
13349      from those as well.  */
13350   if (TREE_CODE (t) == COMPLEX_CST)
13351     {
13352       if (TREE_OVERFLOW (TREE_REALPART (t)))
13353 	TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13354       if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13355 	TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13356     }
13357 
13358   return t;
13359 }
13360 
13361 /* Given a memory reference expression T, return its base address.
13362    The base address of a memory reference expression is the main
13363    object being referenced.  For instance, the base address for
13364    'array[i].fld[j]' is 'array'.  You can think of this as stripping
13365    away the offset part from a memory address.
13366 
13367    This function calls handled_component_p to strip away all the inner
13368    parts of the memory reference until it reaches the base object.  */
13369 
13370 tree
13371 get_base_address (tree t)
13372 {
13373   while (handled_component_p (t))
13374     t = TREE_OPERAND (t, 0);
13375 
13376   if ((TREE_CODE (t) == MEM_REF
13377        || TREE_CODE (t) == TARGET_MEM_REF)
13378       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13379     t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13380 
13381   /* ???  Either the alias oracle or all callers need to properly deal
13382      with WITH_SIZE_EXPRs before we can look through those.  */
13383   if (TREE_CODE (t) == WITH_SIZE_EXPR)
13384     return NULL_TREE;
13385 
13386   return t;
13387 }
13388 
13389 /* Return a tree of sizetype representing the size, in bytes, of the element
13390    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
13391 
13392 tree
13393 array_ref_element_size (tree exp)
13394 {
13395   tree aligned_size = TREE_OPERAND (exp, 3);
13396   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13397   location_t loc = EXPR_LOCATION (exp);
13398 
13399   /* If a size was specified in the ARRAY_REF, it's the size measured
13400      in alignment units of the element type.  So multiply by that value.  */
13401   if (aligned_size)
13402     {
13403       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13404 	 sizetype from another type of the same width and signedness.  */
13405       if (TREE_TYPE (aligned_size) != sizetype)
13406 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13407       return size_binop_loc (loc, MULT_EXPR, aligned_size,
13408 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
13409     }
13410 
13411   /* Otherwise, take the size from that of the element type.  Substitute
13412      any PLACEHOLDER_EXPR that we have.  */
13413   else
13414     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13415 }
13416 
13417 /* Return a tree representing the lower bound of the array mentioned in
13418    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
13419 
13420 tree
13421 array_ref_low_bound (tree exp)
13422 {
13423   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13424 
13425   /* If a lower bound is specified in EXP, use it.  */
13426   if (TREE_OPERAND (exp, 2))
13427     return TREE_OPERAND (exp, 2);
13428 
13429   /* Otherwise, if there is a domain type and it has a lower bound, use it,
13430      substituting for a PLACEHOLDER_EXPR as needed.  */
13431   if (domain_type && TYPE_MIN_VALUE (domain_type))
13432     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13433 
13434   /* Otherwise, return a zero of the appropriate type.  */
13435   tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
13436   return (idxtype == error_mark_node
13437 	  ? integer_zero_node : build_int_cst (idxtype, 0));
13438 }
13439 
13440 /* Return a tree representing the upper bound of the array mentioned in
13441    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
13442 
13443 tree
13444 array_ref_up_bound (tree exp)
13445 {
13446   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13447 
13448   /* If there is a domain type and it has an upper bound, use it, substituting
13449      for a PLACEHOLDER_EXPR as needed.  */
13450   if (domain_type && TYPE_MAX_VALUE (domain_type))
13451     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13452 
13453   /* Otherwise fail.  */
13454   return NULL_TREE;
13455 }
13456 
13457 /* Returns true if REF is an array reference, component reference,
13458    or memory reference to an array at the end of a structure.
13459    If this is the case, the array may be allocated larger
13460    than its upper bound implies.  */
13461 
13462 bool
13463 array_at_struct_end_p (tree ref)
13464 {
13465   tree atype;
13466 
13467   if (TREE_CODE (ref) == ARRAY_REF
13468       || TREE_CODE (ref) == ARRAY_RANGE_REF)
13469     {
13470       atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13471       ref = TREE_OPERAND (ref, 0);
13472     }
13473   else if (TREE_CODE (ref) == COMPONENT_REF
13474 	   && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13475     atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13476   else if (TREE_CODE (ref) == MEM_REF)
13477     {
13478       tree arg = TREE_OPERAND (ref, 0);
13479       if (TREE_CODE (arg) == ADDR_EXPR)
13480 	arg = TREE_OPERAND (arg, 0);
13481       tree argtype = TREE_TYPE (arg);
13482       if (TREE_CODE (argtype) == RECORD_TYPE)
13483 	{
13484 	  if (tree fld = last_field (argtype))
13485 	    {
13486 	      atype = TREE_TYPE (fld);
13487 	      if (TREE_CODE (atype) != ARRAY_TYPE)
13488 		return false;
13489 	      if (VAR_P (arg) && DECL_SIZE (fld))
13490 		return false;
13491 	    }
13492 	  else
13493 	    return false;
13494 	}
13495       else
13496 	return false;
13497     }
13498   else
13499     return false;
13500 
13501   if (TREE_CODE (ref) == STRING_CST)
13502     return false;
13503 
13504   tree ref_to_array = ref;
13505   while (handled_component_p (ref))
13506     {
13507       /* If the reference chain contains a component reference to a
13508          non-union type and there follows another field the reference
13509 	 is not at the end of a structure.  */
13510       if (TREE_CODE (ref) == COMPONENT_REF)
13511 	{
13512 	  if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13513 	    {
13514 	      tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13515 	      while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13516 		nextf = DECL_CHAIN (nextf);
13517 	      if (nextf)
13518 		return false;
13519 	    }
13520 	}
13521       /* If we have a multi-dimensional array we do not consider
13522          a non-innermost dimension as flex array if the whole
13523 	 multi-dimensional array is at struct end.
13524 	 Same for an array of aggregates with a trailing array
13525 	 member.  */
13526       else if (TREE_CODE (ref) == ARRAY_REF)
13527 	return false;
13528       else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13529 	;
13530       /* If we view an underlying object as sth else then what we
13531          gathered up to now is what we have to rely on.  */
13532       else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13533 	break;
13534       else
13535 	gcc_unreachable ();
13536 
13537       ref = TREE_OPERAND (ref, 0);
13538     }
13539 
13540   /* The array now is at struct end.  Treat flexible arrays as
13541      always subject to extend, even into just padding constrained by
13542      an underlying decl.  */
13543   if (! TYPE_SIZE (atype)
13544       || ! TYPE_DOMAIN (atype)
13545       || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13546     return true;
13547 
13548   if (TREE_CODE (ref) == MEM_REF
13549       && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13550     ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13551 
13552   /* If the reference is based on a declared entity, the size of the array
13553      is constrained by its given domain.  (Do not trust commons PR/69368).  */
13554   if (DECL_P (ref)
13555       && !(flag_unconstrained_commons
13556 	   && VAR_P (ref) && DECL_COMMON (ref))
13557       && DECL_SIZE_UNIT (ref)
13558       && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13559     {
13560       /* Check whether the array domain covers all of the available
13561          padding.  */
13562       poly_int64 offset;
13563       if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13564 	  || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13565           || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13566 	return true;
13567       if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13568 	return true;
13569 
13570       /* If at least one extra element fits it is a flexarray.  */
13571       if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13572 		     - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13573 		     + 2)
13574 		    * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13575 		    wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13576 	return true;
13577 
13578       return false;
13579     }
13580 
13581   return true;
13582 }
13583 
13584 /* Return a tree representing the offset, in bytes, of the field referenced
13585    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
13586 
13587 tree
13588 component_ref_field_offset (tree exp)
13589 {
13590   tree aligned_offset = TREE_OPERAND (exp, 2);
13591   tree field = TREE_OPERAND (exp, 1);
13592   location_t loc = EXPR_LOCATION (exp);
13593 
13594   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13595      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
13596      value.  */
13597   if (aligned_offset)
13598     {
13599       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13600 	 sizetype from another type of the same width and signedness.  */
13601       if (TREE_TYPE (aligned_offset) != sizetype)
13602 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13603       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13604 			     size_int (DECL_OFFSET_ALIGN (field)
13605 				       / BITS_PER_UNIT));
13606     }
13607 
13608   /* Otherwise, take the offset from that of the field.  Substitute
13609      any PLACEHOLDER_EXPR that we have.  */
13610   else
13611     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13612 }
13613 
13614 /* Given the initializer INIT, return the initializer for the field
13615    DECL if it exists, otherwise null.  Used to obtain the initializer
13616    for a flexible array member and determine its size.  */
13617 
13618 static tree
13619 get_initializer_for (tree init, tree decl)
13620 {
13621   STRIP_NOPS (init);
13622 
13623   tree fld, fld_init;
13624   unsigned HOST_WIDE_INT i;
13625   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13626     {
13627       if (decl == fld)
13628 	return fld_init;
13629 
13630       if (TREE_CODE (fld) == CONSTRUCTOR)
13631 	{
13632 	  fld_init = get_initializer_for (fld_init, decl);
13633 	  if (fld_init)
13634 	    return fld_init;
13635 	}
13636     }
13637 
13638   return NULL_TREE;
13639 }
13640 
13641 /* Determines the size of the member referenced by the COMPONENT_REF
13642    REF, using its initializer expression if necessary in order to
13643    determine the size of an initialized flexible array member.
13644    If non-null, *INTERIOR_ZERO_LENGTH is set when REF refers to
13645    an interior zero-length array.
13646    Returns the size as sizetype (which might be zero for an object
13647    with an uninitialized flexible array member) or null if the size
13648    cannot be determined.  */
13649 
13650 tree
13651 component_ref_size (tree ref, bool *interior_zero_length /* = NULL */)
13652 {
13653   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13654 
13655   bool int_0_len = false;
13656   if (!interior_zero_length)
13657     interior_zero_length = &int_0_len;
13658 
13659   /* The object/argument referenced by the COMPONENT_REF and its type.  */
13660   tree arg = TREE_OPERAND (ref, 0);
13661   tree argtype = TREE_TYPE (arg);
13662   /* The referenced member.  */
13663   tree member = TREE_OPERAND (ref, 1);
13664 
13665   tree memsize = DECL_SIZE_UNIT (member);
13666   if (memsize)
13667     {
13668       tree memtype = TREE_TYPE (member);
13669       if (TREE_CODE (memtype) != ARRAY_TYPE)
13670 	return memsize;
13671 
13672       bool trailing = array_at_struct_end_p (ref);
13673       bool zero_length = integer_zerop (memsize);
13674       if (!trailing && !zero_length)
13675 	/* MEMBER is either an interior array or is an array with
13676 	   more than one element.  */
13677 	return memsize;
13678 
13679       *interior_zero_length = zero_length && !trailing;
13680       if (*interior_zero_length)
13681 	memsize = NULL_TREE;
13682 
13683       if (!zero_length)
13684 	if (tree dom = TYPE_DOMAIN (memtype))
13685 	  if (tree min = TYPE_MIN_VALUE (dom))
13686 	    if (tree max = TYPE_MAX_VALUE (dom))
13687 	      if (TREE_CODE (min) == INTEGER_CST
13688 		  && TREE_CODE (max) == INTEGER_CST)
13689 		{
13690 		  offset_int minidx = wi::to_offset (min);
13691 		  offset_int maxidx = wi::to_offset (max);
13692 		  if (maxidx - minidx > 0)
13693 		    /* MEMBER is an array with more than one element.  */
13694 		    return memsize;
13695 		}
13696 
13697       /* For a refernce to a zero- or one-element array member of a union
13698 	 use the size of the union instead of the size of the member.  */
13699       if (TREE_CODE (argtype) == UNION_TYPE)
13700 	memsize = TYPE_SIZE_UNIT (argtype);
13701     }
13702 
13703   /* MEMBER is either a bona fide flexible array member, or a zero-length
13704      array member, or an array of length one treated as such.  */
13705 
13706   /* If the reference is to a declared object and the member a true
13707      flexible array, try to determine its size from its initializer.  */
13708   poly_int64 baseoff = 0;
13709   tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13710   if (!base || !VAR_P (base))
13711     {
13712       if (!*interior_zero_length)
13713 	return NULL_TREE;
13714 
13715       if (TREE_CODE (arg) != COMPONENT_REF)
13716 	return NULL_TREE;
13717 
13718       base = arg;
13719       while (TREE_CODE (base) == COMPONENT_REF)
13720 	base = TREE_OPERAND (base, 0);
13721       baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13722     }
13723 
13724   /* BASE is the declared object of which MEMBER is either a member
13725      or that is cast to ARGTYPE (e.g., a char buffer used to store
13726      an ARGTYPE object).  */
13727   tree basetype = TREE_TYPE (base);
13728 
13729   /* Determine the base type of the referenced object.  If it's
13730      the same as ARGTYPE and MEMBER has a known size, return it.  */
13731   tree bt = basetype;
13732   if (!*interior_zero_length)
13733     while (TREE_CODE (bt) == ARRAY_TYPE)
13734       bt = TREE_TYPE (bt);
13735   bool typematch = useless_type_conversion_p (argtype, bt);
13736   if (memsize && typematch)
13737     return memsize;
13738 
13739   memsize = NULL_TREE;
13740 
13741   if (typematch)
13742     /* MEMBER is a true flexible array member.  Compute its size from
13743        the initializer of the BASE object if it has one.  */
13744     if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13745       if (init != error_mark_node)
13746 	{
13747 	  init = get_initializer_for (init, member);
13748 	  if (init)
13749 	    {
13750 	      memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13751 	      if (tree refsize = TYPE_SIZE_UNIT (argtype))
13752 		{
13753 		  /* Use the larger of the initializer size and the tail
13754 		     padding in the enclosing struct.  */
13755 		  poly_int64 rsz = tree_to_poly_int64 (refsize);
13756 		  rsz -= baseoff;
13757 		  if (known_lt (tree_to_poly_int64 (memsize), rsz))
13758 		    memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13759 		}
13760 
13761 	      baseoff = 0;
13762 	    }
13763 	}
13764 
13765   if (!memsize)
13766     {
13767       if (typematch)
13768 	{
13769 	  if (DECL_P (base)
13770 	      && DECL_EXTERNAL (base)
13771 	      && bt == basetype
13772 	      && !*interior_zero_length)
13773 	    /* The size of a flexible array member of an extern struct
13774 	       with no initializer cannot be determined (it's defined
13775 	       in another translation unit and can have an initializer
13776 	       with an arbitrary number of elements).  */
13777 	    return NULL_TREE;
13778 
13779 	  /* Use the size of the base struct or, for interior zero-length
13780 	     arrays, the size of the enclosing type.  */
13781 	  memsize = TYPE_SIZE_UNIT (bt);
13782 	}
13783       else if (DECL_P (base))
13784 	/* Use the size of the BASE object (possibly an array of some
13785 	   other type such as char used to store the struct).  */
13786 	memsize = DECL_SIZE_UNIT (base);
13787       else
13788 	return NULL_TREE;
13789     }
13790 
13791   /* If the flexible array member has a known size use the greater
13792      of it and the tail padding in the enclosing struct.
13793      Otherwise, when the size of the flexible array member is unknown
13794      and the referenced object is not a struct, use the size of its
13795      type when known.  This detects sizes of array buffers when cast
13796      to struct types with flexible array members.  */
13797   if (memsize)
13798     {
13799       poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13800       if (known_lt (baseoff, memsz64))
13801 	{
13802 	  memsz64 -= baseoff;
13803 	  return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13804 	}
13805       return size_zero_node;
13806     }
13807 
13808   /* Return "don't know" for an external non-array object since its
13809      flexible array member can be initialized to have any number of
13810      elements.  Otherwise, return zero because the flexible array
13811      member has no elements.  */
13812   return (DECL_P (base)
13813 	  && DECL_EXTERNAL (base)
13814 	  && (!typematch
13815 	      || TREE_CODE (basetype) != ARRAY_TYPE)
13816 	  ? NULL_TREE : size_zero_node);
13817 }
13818 
13819 /* Return the machine mode of T.  For vectors, returns the mode of the
13820    inner type.  The main use case is to feed the result to HONOR_NANS,
13821    avoiding the BLKmode that a direct TYPE_MODE (T) might return.  */
13822 
13823 machine_mode
13824 element_mode (const_tree t)
13825 {
13826   if (!TYPE_P (t))
13827     t = TREE_TYPE (t);
13828   if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13829     t = TREE_TYPE (t);
13830   return TYPE_MODE (t);
13831 }
13832 
13833 /* Vector types need to re-check the target flags each time we report
13834    the machine mode.  We need to do this because attribute target can
13835    change the result of vector_mode_supported_p and have_regs_of_mode
13836    on a per-function basis.  Thus the TYPE_MODE of a VECTOR_TYPE can
13837    change on a per-function basis.  */
13838 /* ??? Possibly a better solution is to run through all the types
13839    referenced by a function and re-compute the TYPE_MODE once, rather
13840    than make the TYPE_MODE macro call a function.  */
13841 
13842 machine_mode
13843 vector_type_mode (const_tree t)
13844 {
13845   machine_mode mode;
13846 
13847   gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13848 
13849   mode = t->type_common.mode;
13850   if (VECTOR_MODE_P (mode)
13851       && (!targetm.vector_mode_supported_p (mode)
13852 	  || !have_regs_of_mode[mode]))
13853     {
13854       scalar_int_mode innermode;
13855 
13856       /* For integers, try mapping it to a same-sized scalar mode.  */
13857       if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13858 	{
13859 	  poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13860 			     * GET_MODE_BITSIZE (innermode));
13861 	  scalar_int_mode mode;
13862 	  if (int_mode_for_size (size, 0).exists (&mode)
13863 	      && have_regs_of_mode[mode])
13864 	    return mode;
13865 	}
13866 
13867       return BLKmode;
13868     }
13869 
13870   return mode;
13871 }
13872 
13873 /* Verify that basic properties of T match TV and thus T can be a variant of
13874    TV.  TV should be the more specified variant (i.e. the main variant).  */
13875 
13876 static bool
13877 verify_type_variant (const_tree t, tree tv)
13878 {
13879   /* Type variant can differ by:
13880 
13881      - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13882                    ENCODE_QUAL_ADDR_SPACE.
13883      - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13884        in this case some values may not be set in the variant types
13885        (see TYPE_COMPLETE_P checks).
13886      - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13887      - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13888      - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13889      - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13890      - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13891        this is necessary to make it possible to merge types form different TUs
13892      - arrays, pointers and references may have TREE_TYPE that is a variant
13893        of TREE_TYPE of their main variants.
13894      - aggregates may have new TYPE_FIELDS list that list variants of
13895        the main variant TYPE_FIELDS.
13896      - vector types may differ by TYPE_VECTOR_OPAQUE
13897    */
13898 
13899   /* Convenience macro for matching individual fields.  */
13900 #define verify_variant_match(flag)					    \
13901   do {									    \
13902     if (flag (tv) != flag (t))						    \
13903       {									    \
13904 	error ("type variant differs by %s", #flag);			    \
13905 	debug_tree (tv);						    \
13906 	return false;							    \
13907       }									    \
13908   } while (false)
13909 
13910   /* tree_base checks.  */
13911 
13912   verify_variant_match (TREE_CODE);
13913   /* FIXME: Ada builds non-artificial variants of artificial types.  */
13914   if (TYPE_ARTIFICIAL (tv) && 0)
13915     verify_variant_match (TYPE_ARTIFICIAL);
13916   if (POINTER_TYPE_P (tv))
13917     verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13918   /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build.  */
13919   verify_variant_match (TYPE_UNSIGNED);
13920   verify_variant_match (TYPE_PACKED);
13921   if (TREE_CODE (t) == REFERENCE_TYPE)
13922     verify_variant_match (TYPE_REF_IS_RVALUE);
13923   if (AGGREGATE_TYPE_P (t))
13924     verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13925   else
13926     verify_variant_match (TYPE_SATURATING);
13927   /* FIXME: This check trigger during libstdc++ build.  */
13928   if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13929     verify_variant_match (TYPE_FINAL_P);
13930 
13931   /* tree_type_common checks.  */
13932 
13933   if (COMPLETE_TYPE_P (t))
13934     {
13935       verify_variant_match (TYPE_MODE);
13936       if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13937 	  && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13938 	verify_variant_match (TYPE_SIZE);
13939       if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13940 	  && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13941 	  && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13942 	{
13943 	  gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13944 					TYPE_SIZE_UNIT (tv), 0));
13945 	  error ("type variant has different %<TYPE_SIZE_UNIT%>");
13946 	  debug_tree (tv);
13947 	  error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13948 	  debug_tree (TYPE_SIZE_UNIT (tv));
13949 	  error ("type%'s %<TYPE_SIZE_UNIT%>");
13950 	  debug_tree (TYPE_SIZE_UNIT (t));
13951 	  return false;
13952 	}
13953       verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13954     }
13955   verify_variant_match (TYPE_PRECISION);
13956   if (RECORD_OR_UNION_TYPE_P (t))
13957     verify_variant_match (TYPE_TRANSPARENT_AGGR);
13958   else if (TREE_CODE (t) == ARRAY_TYPE)
13959     verify_variant_match (TYPE_NONALIASED_COMPONENT);
13960   /* During LTO we merge variant lists from diferent translation units
13961      that may differ BY TYPE_CONTEXT that in turn may point
13962      to TRANSLATION_UNIT_DECL.
13963      Ada also builds variants of types with different TYPE_CONTEXT.   */
13964   if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13965     verify_variant_match (TYPE_CONTEXT);
13966   if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13967     verify_variant_match (TYPE_STRING_FLAG);
13968   if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13969     verify_variant_match (TYPE_CXX_ODR_P);
13970   if (TYPE_ALIAS_SET_KNOWN_P (t))
13971     {
13972       error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13973       debug_tree (tv);
13974       return false;
13975     }
13976 
13977   /* tree_type_non_common checks.  */
13978 
13979   /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13980      and dangle the pointer from time to time.  */
13981   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13982       && (in_lto_p || !TYPE_VFIELD (tv)
13983 	  || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13984     {
13985       error ("type variant has different %<TYPE_VFIELD%>");
13986       debug_tree (tv);
13987       return false;
13988     }
13989   if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13990        || TREE_CODE (t) == INTEGER_TYPE
13991        || TREE_CODE (t) == BOOLEAN_TYPE
13992        || TREE_CODE (t) == REAL_TYPE
13993        || TREE_CODE (t) == FIXED_POINT_TYPE)
13994     {
13995       verify_variant_match (TYPE_MAX_VALUE);
13996       verify_variant_match (TYPE_MIN_VALUE);
13997     }
13998   if (TREE_CODE (t) == METHOD_TYPE)
13999     verify_variant_match (TYPE_METHOD_BASETYPE);
14000   if (TREE_CODE (t) == OFFSET_TYPE)
14001     verify_variant_match (TYPE_OFFSET_BASETYPE);
14002   if (TREE_CODE (t) == ARRAY_TYPE)
14003     verify_variant_match (TYPE_ARRAY_MAX_SIZE);
14004   /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
14005      or even type's main variant.  This is needed to make bootstrap pass
14006      and the bug seems new in GCC 5.
14007      C++ FE should be updated to make this consistent and we should check
14008      that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
14009      is a match with main variant.
14010 
14011      Also disable the check for Java for now because of parser hack that builds
14012      first an dummy BINFO and then sometimes replace it by real BINFO in some
14013      of the copies.  */
14014   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
14015       && TYPE_BINFO (t) != TYPE_BINFO (tv)
14016       /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
14017 	 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
14018 	 at LTO time only.  */
14019       && (in_lto_p && odr_type_p (t)))
14020     {
14021       error ("type variant has different %<TYPE_BINFO%>");
14022       debug_tree (tv);
14023       error ("type variant%'s %<TYPE_BINFO%>");
14024       debug_tree (TYPE_BINFO (tv));
14025       error ("type%'s %<TYPE_BINFO%>");
14026       debug_tree (TYPE_BINFO (t));
14027       return false;
14028     }
14029 
14030   /* Check various uses of TYPE_VALUES_RAW.  */
14031   if (TREE_CODE (t) == ENUMERAL_TYPE
14032       && TYPE_VALUES (t))
14033     verify_variant_match (TYPE_VALUES);
14034   else if (TREE_CODE (t) == ARRAY_TYPE)
14035     verify_variant_match (TYPE_DOMAIN);
14036   /* Permit incomplete variants of complete type.  While FEs may complete
14037      all variants, this does not happen for C++ templates in all cases.  */
14038   else if (RECORD_OR_UNION_TYPE_P (t)
14039 	   && COMPLETE_TYPE_P (t)
14040 	   && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
14041     {
14042       tree f1, f2;
14043 
14044       /* Fortran builds qualified variants as new records with items of
14045 	 qualified type. Verify that they looks same.  */
14046       for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
14047 	   f1 && f2;
14048 	   f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14049 	if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
14050 	    || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
14051 		 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
14052 		/* FIXME: gfc_nonrestricted_type builds all types as variants
14053 		   with exception of pointer types.  It deeply copies the type
14054 		   which means that we may end up with a variant type
14055 		   referring non-variant pointer.  We may change it to
14056 		   produce types as variants, too, like
14057 		   objc_get_protocol_qualified_type does.  */
14058 		&& !POINTER_TYPE_P (TREE_TYPE (f1)))
14059 	    || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
14060 	    || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
14061 	  break;
14062       if (f1 || f2)
14063 	{
14064 	  error ("type variant has different %<TYPE_FIELDS%>");
14065 	  debug_tree (tv);
14066 	  error ("first mismatch is field");
14067 	  debug_tree (f1);
14068 	  error ("and field");
14069 	  debug_tree (f2);
14070           return false;
14071 	}
14072     }
14073   else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
14074     verify_variant_match (TYPE_ARG_TYPES);
14075   /* For C++ the qualified variant of array type is really an array type
14076      of qualified TREE_TYPE.
14077      objc builds variants of pointer where pointer to type is a variant, too
14078      in objc_get_protocol_qualified_type.  */
14079   if (TREE_TYPE (t) != TREE_TYPE (tv)
14080       && ((TREE_CODE (t) != ARRAY_TYPE
14081 	   && !POINTER_TYPE_P (t))
14082 	  || TYPE_MAIN_VARIANT (TREE_TYPE (t))
14083 	     != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
14084     {
14085       error ("type variant has different %<TREE_TYPE%>");
14086       debug_tree (tv);
14087       error ("type variant%'s %<TREE_TYPE%>");
14088       debug_tree (TREE_TYPE (tv));
14089       error ("type%'s %<TREE_TYPE%>");
14090       debug_tree (TREE_TYPE (t));
14091       return false;
14092     }
14093   if (type_with_alias_set_p (t)
14094       && !gimple_canonical_types_compatible_p (t, tv, false))
14095     {
14096       error ("type is not compatible with its variant");
14097       debug_tree (tv);
14098       error ("type variant%'s %<TREE_TYPE%>");
14099       debug_tree (TREE_TYPE (tv));
14100       error ("type%'s %<TREE_TYPE%>");
14101       debug_tree (TREE_TYPE (t));
14102       return false;
14103     }
14104   return true;
14105 #undef verify_variant_match
14106 }
14107 
14108 
14109 /* The TYPE_CANONICAL merging machinery.  It should closely resemble
14110    the middle-end types_compatible_p function.  It needs to avoid
14111    claiming types are different for types that should be treated
14112    the same with respect to TBAA.  Canonical types are also used
14113    for IL consistency checks via the useless_type_conversion_p
14114    predicate which does not handle all type kinds itself but falls
14115    back to pointer-comparison of TYPE_CANONICAL for aggregates
14116    for example.  */
14117 
14118 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
14119    type calculation because we need to allow inter-operability between signed
14120    and unsigned variants.  */
14121 
14122 bool
14123 type_with_interoperable_signedness (const_tree type)
14124 {
14125   /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
14126      signed char and unsigned char.  Similarly fortran FE builds
14127      C_SIZE_T as signed type, while C defines it unsigned.  */
14128 
14129   return tree_code_for_canonical_type_merging (TREE_CODE (type))
14130 	   == INTEGER_TYPE
14131          && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
14132 	     || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
14133 }
14134 
14135 /* Return true iff T1 and T2 are structurally identical for what
14136    TBAA is concerned.
14137    This function is used both by lto.c canonical type merging and by the
14138    verifier.  If TRUST_TYPE_CANONICAL we do not look into structure of types
14139    that have TYPE_CANONICAL defined and assume them equivalent.  This is useful
14140    only for LTO because only in these cases TYPE_CANONICAL equivalence
14141    correspond to one defined by gimple_canonical_types_compatible_p.  */
14142 
14143 bool
14144 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
14145 				     bool trust_type_canonical)
14146 {
14147   /* Type variants should be same as the main variant.  When not doing sanity
14148      checking to verify this fact, go to main variants and save some work.  */
14149   if (trust_type_canonical)
14150     {
14151       t1 = TYPE_MAIN_VARIANT (t1);
14152       t2 = TYPE_MAIN_VARIANT (t2);
14153     }
14154 
14155   /* Check first for the obvious case of pointer identity.  */
14156   if (t1 == t2)
14157     return true;
14158 
14159   /* Check that we have two types to compare.  */
14160   if (t1 == NULL_TREE || t2 == NULL_TREE)
14161     return false;
14162 
14163   /* We consider complete types always compatible with incomplete type.
14164      This does not make sense for canonical type calculation and thus we
14165      need to ensure that we are never called on it.
14166 
14167      FIXME: For more correctness the function probably should have three modes
14168 	1) mode assuming that types are complete mathcing their structure
14169 	2) mode allowing incomplete types but producing equivalence classes
14170 	   and thus ignoring all info from complete types
14171 	3) mode allowing incomplete types to match complete but checking
14172 	   compatibility between complete types.
14173 
14174      1 and 2 can be used for canonical type calculation. 3 is the real
14175      definition of type compatibility that can be used i.e. for warnings during
14176      declaration merging.  */
14177 
14178   gcc_assert (!trust_type_canonical
14179 	      || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
14180 
14181   /* If the types have been previously registered and found equal
14182      they still are.  */
14183 
14184   if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
14185       && trust_type_canonical)
14186     {
14187       /* Do not use TYPE_CANONICAL of pointer types.  For LTO streamed types
14188 	 they are always NULL, but they are set to non-NULL for types
14189 	 constructed by build_pointer_type and variants.  In this case the
14190 	 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
14191 	 all pointers are considered equal.  Be sure to not return false
14192 	 negatives.  */
14193       gcc_checking_assert (canonical_type_used_p (t1)
14194 			   && canonical_type_used_p (t2));
14195       return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
14196     }
14197 
14198   /* For types where we do ODR based TBAA the canonical type is always
14199      set correctly, so we know that types are different if their
14200      canonical types does not match.  */
14201   if (trust_type_canonical
14202       && (odr_type_p (t1) && odr_based_tbaa_p (t1))
14203 	  != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
14204     return false;
14205 
14206   /* Can't be the same type if the types don't have the same code.  */
14207   enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
14208   if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
14209     return false;
14210 
14211   /* Qualifiers do not matter for canonical type comparison purposes.  */
14212 
14213   /* Void types and nullptr types are always the same.  */
14214   if (TREE_CODE (t1) == VOID_TYPE
14215       || TREE_CODE (t1) == NULLPTR_TYPE)
14216     return true;
14217 
14218   /* Can't be the same type if they have different mode.  */
14219   if (TYPE_MODE (t1) != TYPE_MODE (t2))
14220     return false;
14221 
14222   /* Non-aggregate types can be handled cheaply.  */
14223   if (INTEGRAL_TYPE_P (t1)
14224       || SCALAR_FLOAT_TYPE_P (t1)
14225       || FIXED_POINT_TYPE_P (t1)
14226       || TREE_CODE (t1) == VECTOR_TYPE
14227       || TREE_CODE (t1) == COMPLEX_TYPE
14228       || TREE_CODE (t1) == OFFSET_TYPE
14229       || POINTER_TYPE_P (t1))
14230     {
14231       /* Can't be the same type if they have different recision.  */
14232       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
14233 	return false;
14234 
14235       /* In some cases the signed and unsigned types are required to be
14236 	 inter-operable.  */
14237       if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
14238 	  && !type_with_interoperable_signedness (t1))
14239 	return false;
14240 
14241       /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
14242 	 interoperable with "signed char".  Unless all frontends are revisited
14243 	 to agree on these types, we must ignore the flag completely.  */
14244 
14245       /* Fortran standard define C_PTR type that is compatible with every
14246  	 C pointer.  For this reason we need to glob all pointers into one.
14247 	 Still pointers in different address spaces are not compatible.  */
14248       if (POINTER_TYPE_P (t1))
14249 	{
14250 	  if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14251 	      != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14252 	    return false;
14253 	}
14254 
14255       /* Tail-recurse to components.  */
14256       if (TREE_CODE (t1) == VECTOR_TYPE
14257 	  || TREE_CODE (t1) == COMPLEX_TYPE)
14258 	return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14259 						    TREE_TYPE (t2),
14260 						    trust_type_canonical);
14261 
14262       return true;
14263     }
14264 
14265   /* Do type-specific comparisons.  */
14266   switch (TREE_CODE (t1))
14267     {
14268     case ARRAY_TYPE:
14269       /* Array types are the same if the element types are the same and
14270 	 the number of elements are the same.  */
14271       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14272 						trust_type_canonical)
14273 	  || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14274 	  || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14275 	  || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14276 	return false;
14277       else
14278 	{
14279 	  tree i1 = TYPE_DOMAIN (t1);
14280 	  tree i2 = TYPE_DOMAIN (t2);
14281 
14282 	  /* For an incomplete external array, the type domain can be
14283  	     NULL_TREE.  Check this condition also.  */
14284 	  if (i1 == NULL_TREE && i2 == NULL_TREE)
14285 	    return true;
14286 	  else if (i1 == NULL_TREE || i2 == NULL_TREE)
14287 	    return false;
14288 	  else
14289 	    {
14290 	      tree min1 = TYPE_MIN_VALUE (i1);
14291 	      tree min2 = TYPE_MIN_VALUE (i2);
14292 	      tree max1 = TYPE_MAX_VALUE (i1);
14293 	      tree max2 = TYPE_MAX_VALUE (i2);
14294 
14295 	      /* The minimum/maximum values have to be the same.  */
14296 	      if ((min1 == min2
14297 		   || (min1 && min2
14298 		       && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14299 			    && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14300 		           || operand_equal_p (min1, min2, 0))))
14301 		  && (max1 == max2
14302 		      || (max1 && max2
14303 			  && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14304 			       && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14305 			      || operand_equal_p (max1, max2, 0)))))
14306 		return true;
14307 	      else
14308 		return false;
14309 	    }
14310 	}
14311 
14312     case METHOD_TYPE:
14313     case FUNCTION_TYPE:
14314       /* Function types are the same if the return type and arguments types
14315 	 are the same.  */
14316       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14317 						trust_type_canonical))
14318 	return false;
14319 
14320       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14321 	return true;
14322       else
14323 	{
14324 	  tree parms1, parms2;
14325 
14326 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14327 	       parms1 && parms2;
14328 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14329 	    {
14330 	      if (!gimple_canonical_types_compatible_p
14331 		     (TREE_VALUE (parms1), TREE_VALUE (parms2),
14332 		      trust_type_canonical))
14333 		return false;
14334 	    }
14335 
14336 	  if (parms1 || parms2)
14337 	    return false;
14338 
14339 	  return true;
14340 	}
14341 
14342     case RECORD_TYPE:
14343     case UNION_TYPE:
14344     case QUAL_UNION_TYPE:
14345       {
14346 	tree f1, f2;
14347 
14348 	/* Don't try to compare variants of an incomplete type, before
14349 	   TYPE_FIELDS has been copied around.  */
14350 	if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14351 	  return true;
14352 
14353 
14354 	if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14355 	  return false;
14356 
14357 	/* For aggregate types, all the fields must be the same.  */
14358 	for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14359 	     f1 || f2;
14360 	     f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14361 	  {
14362 	    /* Skip non-fields and zero-sized fields.  */
14363 	    while (f1 && (TREE_CODE (f1) != FIELD_DECL
14364 			  || (DECL_SIZE (f1)
14365 			      && integer_zerop (DECL_SIZE (f1)))))
14366 	      f1 = TREE_CHAIN (f1);
14367 	    while (f2 && (TREE_CODE (f2) != FIELD_DECL
14368 			  || (DECL_SIZE (f2)
14369 			      && integer_zerop (DECL_SIZE (f2)))))
14370 	      f2 = TREE_CHAIN (f2);
14371 	    if (!f1 || !f2)
14372 	      break;
14373 	    /* The fields must have the same name, offset and type.  */
14374 	    if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14375 		|| !gimple_compare_field_offset (f1, f2)
14376 		|| !gimple_canonical_types_compatible_p
14377 		      (TREE_TYPE (f1), TREE_TYPE (f2),
14378 		       trust_type_canonical))
14379 	      return false;
14380 	  }
14381 
14382 	/* If one aggregate has more fields than the other, they
14383 	   are not the same.  */
14384 	if (f1 || f2)
14385 	  return false;
14386 
14387 	return true;
14388       }
14389 
14390     default:
14391       /* Consider all types with language specific trees in them mutually
14392 	 compatible.  This is executed only from verify_type and false
14393          positives can be tolerated.  */
14394       gcc_assert (!in_lto_p);
14395       return true;
14396     }
14397 }
14398 
14399 /* Verify type T.  */
14400 
14401 void
14402 verify_type (const_tree t)
14403 {
14404   bool error_found = false;
14405   tree mv = TYPE_MAIN_VARIANT (t);
14406   if (!mv)
14407     {
14408       error ("main variant is not defined");
14409       error_found = true;
14410     }
14411   else if (mv != TYPE_MAIN_VARIANT (mv))
14412     {
14413       error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14414       debug_tree (mv);
14415       error_found = true;
14416     }
14417   else if (t != mv && !verify_type_variant (t, mv))
14418     error_found = true;
14419 
14420   tree ct = TYPE_CANONICAL (t);
14421   if (!ct)
14422     ;
14423   else if (TYPE_CANONICAL (t) != ct)
14424     {
14425       error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14426       debug_tree (ct);
14427       error_found = true;
14428     }
14429   /* Method and function types cannot be used to address memory and thus
14430      TYPE_CANONICAL really matters only for determining useless conversions.
14431 
14432      FIXME: C++ FE produce declarations of builtin functions that are not
14433      compatible with main variants.  */
14434   else if (TREE_CODE (t) == FUNCTION_TYPE)
14435     ;
14436   else if (t != ct
14437 	   /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14438 	      with variably sized arrays because their sizes possibly
14439 	      gimplified to different variables.  */
14440 	   && !variably_modified_type_p (ct, NULL)
14441 	   && !gimple_canonical_types_compatible_p (t, ct, false)
14442 	   && COMPLETE_TYPE_P (t))
14443     {
14444       error ("%<TYPE_CANONICAL%> is not compatible");
14445       debug_tree (ct);
14446       error_found = true;
14447     }
14448 
14449   if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14450       && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14451     {
14452       error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14453       debug_tree (ct);
14454       error_found = true;
14455     }
14456   if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14457    {
14458       error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14459       debug_tree (ct);
14460       debug_tree (TYPE_MAIN_VARIANT (ct));
14461       error_found = true;
14462    }
14463 
14464 
14465   /* Check various uses of TYPE_MIN_VALUE_RAW.  */
14466   if (RECORD_OR_UNION_TYPE_P (t))
14467     {
14468       /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14469 	 and danagle the pointer from time to time.  */
14470       if (TYPE_VFIELD (t)
14471 	  && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14472 	  && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14473 	{
14474 	  error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14475 	  debug_tree (TYPE_VFIELD (t));
14476 	  error_found = true;
14477 	}
14478     }
14479   else if (TREE_CODE (t) == POINTER_TYPE)
14480     {
14481       if (TYPE_NEXT_PTR_TO (t)
14482 	  && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14483 	{
14484 	  error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14485 	  debug_tree (TYPE_NEXT_PTR_TO (t));
14486 	  error_found = true;
14487 	}
14488     }
14489   else if (TREE_CODE (t) == REFERENCE_TYPE)
14490     {
14491       if (TYPE_NEXT_REF_TO (t)
14492 	  && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14493 	{
14494 	  error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14495 	  debug_tree (TYPE_NEXT_REF_TO (t));
14496 	  error_found = true;
14497 	}
14498     }
14499   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14500 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
14501     {
14502       /* FIXME: The following check should pass:
14503 	  useless_type_conversion_p (const_cast <tree> (t),
14504 				     TREE_TYPE (TYPE_MIN_VALUE (t))
14505 	 but does not for C sizetypes in LTO.  */
14506     }
14507 
14508   /* Check various uses of TYPE_MAXVAL_RAW.  */
14509   if (RECORD_OR_UNION_TYPE_P (t))
14510     {
14511       if (!TYPE_BINFO (t))
14512 	;
14513       else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14514 	{
14515 	  error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14516 	  debug_tree (TYPE_BINFO (t));
14517 	  error_found = true;
14518 	}
14519       else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14520 	{
14521 	  error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14522 	  debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14523 	  error_found = true;
14524 	}
14525     }
14526   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14527     {
14528       if (TYPE_METHOD_BASETYPE (t)
14529 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14530 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14531 	{
14532 	  error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14533 	  debug_tree (TYPE_METHOD_BASETYPE (t));
14534 	  error_found = true;
14535 	}
14536     }
14537   else if (TREE_CODE (t) == OFFSET_TYPE)
14538     {
14539       if (TYPE_OFFSET_BASETYPE (t)
14540 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14541 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14542 	{
14543 	  error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14544 	  debug_tree (TYPE_OFFSET_BASETYPE (t));
14545 	  error_found = true;
14546 	}
14547     }
14548   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14549 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
14550     {
14551       /* FIXME: The following check should pass:
14552 	  useless_type_conversion_p (const_cast <tree> (t),
14553 				     TREE_TYPE (TYPE_MAX_VALUE (t))
14554 	 but does not for C sizetypes in LTO.  */
14555     }
14556   else if (TREE_CODE (t) == ARRAY_TYPE)
14557     {
14558       if (TYPE_ARRAY_MAX_SIZE (t)
14559 	  && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14560         {
14561 	  error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14562 	  debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14563 	  error_found = true;
14564         }
14565     }
14566   else if (TYPE_MAX_VALUE_RAW (t))
14567     {
14568       error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14569       debug_tree (TYPE_MAX_VALUE_RAW (t));
14570       error_found = true;
14571     }
14572 
14573   if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14574     {
14575       error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14576       debug_tree (TYPE_LANG_SLOT_1 (t));
14577       error_found = true;
14578     }
14579 
14580   /* Check various uses of TYPE_VALUES_RAW.  */
14581   if (TREE_CODE (t) == ENUMERAL_TYPE)
14582     for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14583       {
14584 	tree value = TREE_VALUE (l);
14585 	tree name = TREE_PURPOSE (l);
14586 
14587 	/* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14588  	   CONST_DECL of ENUMERAL TYPE.  */
14589 	if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14590 	  {
14591 	    error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14592 	    debug_tree (value);
14593 	    debug_tree (name);
14594 	    error_found = true;
14595 	  }
14596 	if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14597 	    && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14598 	  {
14599 	    error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14600 		   "to the enum");
14601 	    debug_tree (value);
14602 	    debug_tree (name);
14603 	    error_found = true;
14604 	  }
14605 	if (TREE_CODE (name) != IDENTIFIER_NODE)
14606 	  {
14607 	    error ("enum value name is not %<IDENTIFIER_NODE%>");
14608 	    debug_tree (value);
14609 	    debug_tree (name);
14610 	    error_found = true;
14611 	  }
14612       }
14613   else if (TREE_CODE (t) == ARRAY_TYPE)
14614     {
14615       if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14616 	{
14617 	  error ("array %<TYPE_DOMAIN%> is not integer type");
14618 	  debug_tree (TYPE_DOMAIN (t));
14619 	  error_found = true;
14620 	}
14621     }
14622   else if (RECORD_OR_UNION_TYPE_P (t))
14623     {
14624       if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14625 	{
14626 	  error ("%<TYPE_FIELDS%> defined in incomplete type");
14627 	  error_found = true;
14628 	}
14629       for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14630 	{
14631 	  /* TODO: verify properties of decls.  */
14632 	  if (TREE_CODE (fld) == FIELD_DECL)
14633 	    ;
14634 	  else if (TREE_CODE (fld) == TYPE_DECL)
14635 	    ;
14636 	  else if (TREE_CODE (fld) == CONST_DECL)
14637 	    ;
14638 	  else if (VAR_P (fld))
14639 	    ;
14640 	  else if (TREE_CODE (fld) == TEMPLATE_DECL)
14641 	    ;
14642 	  else if (TREE_CODE (fld) == USING_DECL)
14643 	    ;
14644 	  else if (TREE_CODE (fld) == FUNCTION_DECL)
14645 	    ;
14646 	  else
14647 	    {
14648 	      error ("wrong tree in %<TYPE_FIELDS%> list");
14649 	      debug_tree (fld);
14650 	      error_found = true;
14651 	    }
14652 	}
14653     }
14654   else if (TREE_CODE (t) == INTEGER_TYPE
14655 	   || TREE_CODE (t) == BOOLEAN_TYPE
14656 	   || TREE_CODE (t) == OFFSET_TYPE
14657 	   || TREE_CODE (t) == REFERENCE_TYPE
14658 	   || TREE_CODE (t) == NULLPTR_TYPE
14659 	   || TREE_CODE (t) == POINTER_TYPE)
14660     {
14661       if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14662 	{
14663 	  error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14664 		 "is %p",
14665 		 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14666 	  error_found = true;
14667 	}
14668       else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14669 	{
14670 	  error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14671 	  debug_tree (TYPE_CACHED_VALUES (t));
14672 	  error_found = true;
14673 	}
14674       /* Verify just enough of cache to ensure that no one copied it to new type.
14675  	 All copying should go by copy_node that should clear it.  */
14676       else if (TYPE_CACHED_VALUES_P (t))
14677 	{
14678 	  int i;
14679 	  for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14680 	    if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14681 		&& TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14682 	      {
14683 		error ("wrong %<TYPE_CACHED_VALUES%> entry");
14684 		debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14685 		error_found = true;
14686 		break;
14687 	      }
14688 	}
14689     }
14690   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14691     for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14692       {
14693 	/* C++ FE uses TREE_PURPOSE to store initial values.  */
14694 	if (TREE_PURPOSE (l) && in_lto_p)
14695 	  {
14696 	    error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14697 	    debug_tree (l);
14698 	    error_found = true;
14699 	  }
14700 	if (!TYPE_P (TREE_VALUE (l)))
14701 	  {
14702 	    error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14703 	    debug_tree (l);
14704 	    error_found = true;
14705 	  }
14706       }
14707   else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14708     {
14709       error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14710       debug_tree (TYPE_VALUES_RAW (t));
14711       error_found = true;
14712     }
14713   if (TREE_CODE (t) != INTEGER_TYPE
14714       && TREE_CODE (t) != BOOLEAN_TYPE
14715       && TREE_CODE (t) != OFFSET_TYPE
14716       && TREE_CODE (t) != REFERENCE_TYPE
14717       && TREE_CODE (t) != NULLPTR_TYPE
14718       && TREE_CODE (t) != POINTER_TYPE
14719       && TYPE_CACHED_VALUES_P (t))
14720     {
14721       error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14722       error_found = true;
14723     }
14724 
14725   /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14726      TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14727      of a type. */
14728   if (TREE_CODE (t) == METHOD_TYPE
14729       && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14730     {
14731 	error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14732 	error_found = true;
14733     }
14734 
14735   if (error_found)
14736     {
14737       debug_tree (const_cast <tree> (t));
14738       internal_error ("%qs failed", __func__);
14739     }
14740 }
14741 
14742 
14743 /* Return 1 if ARG interpreted as signed in its precision is known to be
14744    always positive or 2 if ARG is known to be always negative, or 3 if
14745    ARG may be positive or negative.  */
14746 
14747 int
14748 get_range_pos_neg (tree arg)
14749 {
14750   if (arg == error_mark_node)
14751     return 3;
14752 
14753   int prec = TYPE_PRECISION (TREE_TYPE (arg));
14754   int cnt = 0;
14755   if (TREE_CODE (arg) == INTEGER_CST)
14756     {
14757       wide_int w = wi::sext (wi::to_wide (arg), prec);
14758       if (wi::neg_p (w))
14759 	return 2;
14760       else
14761 	return 1;
14762     }
14763   while (CONVERT_EXPR_P (arg)
14764 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14765 	 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14766     {
14767       arg = TREE_OPERAND (arg, 0);
14768       /* Narrower value zero extended into wider type
14769 	 will always result in positive values.  */
14770       if (TYPE_UNSIGNED (TREE_TYPE (arg))
14771 	  && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14772 	return 1;
14773       prec = TYPE_PRECISION (TREE_TYPE (arg));
14774       if (++cnt > 30)
14775 	return 3;
14776     }
14777 
14778   if (TREE_CODE (arg) != SSA_NAME)
14779     return 3;
14780   wide_int arg_min, arg_max;
14781   while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14782     {
14783       gimple *g = SSA_NAME_DEF_STMT (arg);
14784       if (is_gimple_assign (g)
14785 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14786 	{
14787 	  tree t = gimple_assign_rhs1 (g);
14788 	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14789 	      && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14790 	    {
14791 	      if (TYPE_UNSIGNED (TREE_TYPE (t))
14792 		  && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14793 		return 1;
14794 	      prec = TYPE_PRECISION (TREE_TYPE (t));
14795 	      arg = t;
14796 	      if (++cnt > 30)
14797 		return 3;
14798 	      continue;
14799 	    }
14800 	}
14801       return 3;
14802     }
14803   if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14804     {
14805       /* For unsigned values, the "positive" range comes
14806 	 below the "negative" range.  */
14807       if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14808 	return 1;
14809       if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14810 	return 2;
14811     }
14812   else
14813     {
14814       if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14815 	return 1;
14816       if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14817 	return 2;
14818     }
14819   return 3;
14820 }
14821 
14822 
14823 
14824 
14825 /* Return true if ARG is marked with the nonnull attribute in the
14826    current function signature.  */
14827 
14828 bool
14829 nonnull_arg_p (const_tree arg)
14830 {
14831   tree t, attrs, fntype;
14832   unsigned HOST_WIDE_INT arg_num;
14833 
14834   gcc_assert (TREE_CODE (arg) == PARM_DECL
14835 	      && (POINTER_TYPE_P (TREE_TYPE (arg))
14836 		  || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14837 
14838   /* The static chain decl is always non null.  */
14839   if (arg == cfun->static_chain_decl)
14840     return true;
14841 
14842   /* THIS argument of method is always non-NULL.  */
14843   if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14844       && arg == DECL_ARGUMENTS (cfun->decl)
14845       && flag_delete_null_pointer_checks)
14846     return true;
14847 
14848   /* Values passed by reference are always non-NULL.  */
14849   if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14850       && flag_delete_null_pointer_checks)
14851     return true;
14852 
14853   fntype = TREE_TYPE (cfun->decl);
14854   for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14855     {
14856       attrs = lookup_attribute ("nonnull", attrs);
14857 
14858       /* If "nonnull" wasn't specified, we know nothing about the argument.  */
14859       if (attrs == NULL_TREE)
14860 	return false;
14861 
14862       /* If "nonnull" applies to all the arguments, then ARG is non-null.  */
14863       if (TREE_VALUE (attrs) == NULL_TREE)
14864 	return true;
14865 
14866       /* Get the position number for ARG in the function signature.  */
14867       for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14868 	   t;
14869 	   t = DECL_CHAIN (t), arg_num++)
14870 	{
14871 	  if (t == arg)
14872 	    break;
14873 	}
14874 
14875       gcc_assert (t == arg);
14876 
14877       /* Now see if ARG_NUM is mentioned in the nonnull list.  */
14878       for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14879 	{
14880 	  if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14881 	    return true;
14882 	}
14883     }
14884 
14885   return false;
14886 }
14887 
14888 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14889    information.  */
14890 
14891 location_t
14892 set_block (location_t loc, tree block)
14893 {
14894   location_t pure_loc = get_pure_location (loc);
14895   source_range src_range = get_range_from_loc (line_table, loc);
14896   return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14897 }
14898 
14899 location_t
14900 set_source_range (tree expr, location_t start, location_t finish)
14901 {
14902   source_range src_range;
14903   src_range.m_start = start;
14904   src_range.m_finish = finish;
14905   return set_source_range (expr, src_range);
14906 }
14907 
14908 location_t
14909 set_source_range (tree expr, source_range src_range)
14910 {
14911   if (!EXPR_P (expr))
14912     return UNKNOWN_LOCATION;
14913 
14914   location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14915   location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14916 					    pure_loc,
14917 					    src_range,
14918 					    NULL);
14919   SET_EXPR_LOCATION (expr, adhoc);
14920   return adhoc;
14921 }
14922 
14923 /* Return EXPR, potentially wrapped with a node expression LOC,
14924    if !CAN_HAVE_LOCATION_P (expr).
14925 
14926    NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14927    VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14928 
14929    Wrapper nodes can be identified using location_wrapper_p.  */
14930 
14931 tree
14932 maybe_wrap_with_location (tree expr, location_t loc)
14933 {
14934   if (expr == NULL)
14935     return NULL;
14936   if (loc == UNKNOWN_LOCATION)
14937     return expr;
14938   if (CAN_HAVE_LOCATION_P (expr))
14939     return expr;
14940   /* We should only be adding wrappers for constants and for decls,
14941      or for some exceptional tree nodes (e.g. BASELINK in the C++ FE).  */
14942   gcc_assert (CONSTANT_CLASS_P (expr)
14943 	      || DECL_P (expr)
14944 	      || EXCEPTIONAL_CLASS_P (expr));
14945 
14946   /* For now, don't add wrappers to exceptional tree nodes, to minimize
14947      any impact of the wrapper nodes.  */
14948   if (EXCEPTIONAL_CLASS_P (expr))
14949     return expr;
14950 
14951   /* If any auto_suppress_location_wrappers are active, don't create
14952      wrappers.  */
14953   if (suppress_location_wrappers > 0)
14954     return expr;
14955 
14956   tree_code code
14957     = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14958 	|| (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14959        ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14960   tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14961   /* Mark this node as being a wrapper.  */
14962   EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14963   return wrapper;
14964 }
14965 
14966 int suppress_location_wrappers;
14967 
14968 /* Return the name of combined function FN, for debugging purposes.  */
14969 
14970 const char *
14971 combined_fn_name (combined_fn fn)
14972 {
14973   if (builtin_fn_p (fn))
14974     {
14975       tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14976       return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14977     }
14978   else
14979     return internal_fn_name (as_internal_fn (fn));
14980 }
14981 
14982 /* Return a bitmap with a bit set corresponding to each argument in
14983    a function call type FNTYPE declared with attribute nonnull,
14984    or null if none of the function's argument are nonnull.  The caller
14985    must free the bitmap.  */
14986 
14987 bitmap
14988 get_nonnull_args (const_tree fntype)
14989 {
14990   if (fntype == NULL_TREE)
14991     return NULL;
14992 
14993   tree attrs = TYPE_ATTRIBUTES (fntype);
14994   if (!attrs)
14995     return NULL;
14996 
14997   bitmap argmap = NULL;
14998 
14999   /* A function declaration can specify multiple attribute nonnull,
15000      each with zero or more arguments.  The loop below creates a bitmap
15001      representing a union of all the arguments.  An empty (but non-null)
15002      bitmap means that all arguments have been declaraed nonnull.  */
15003   for ( ; attrs; attrs = TREE_CHAIN (attrs))
15004     {
15005       attrs = lookup_attribute ("nonnull", attrs);
15006       if (!attrs)
15007 	break;
15008 
15009       if (!argmap)
15010 	argmap = BITMAP_ALLOC (NULL);
15011 
15012       if (!TREE_VALUE (attrs))
15013 	{
15014 	  /* Clear the bitmap in case a previous attribute nonnull
15015 	     set it and this one overrides it for all arguments.  */
15016 	  bitmap_clear (argmap);
15017 	  return argmap;
15018 	}
15019 
15020       /* Iterate over the indices of the format arguments declared nonnull
15021 	 and set a bit for each.  */
15022       for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
15023 	{
15024 	  unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
15025 	  bitmap_set_bit (argmap, val);
15026 	}
15027     }
15028 
15029   return argmap;
15030 }
15031 
15032 /* Returns true if TYPE is a type where it and all of its subobjects
15033    (recursively) are of structure, union, or array type.  */
15034 
15035 static bool
15036 default_is_empty_type (tree type)
15037 {
15038   if (RECORD_OR_UNION_TYPE_P (type))
15039     {
15040       for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15041 	if (TREE_CODE (field) == FIELD_DECL
15042 	    && !DECL_PADDING_P (field)
15043 	    && !default_is_empty_type (TREE_TYPE (field)))
15044 	  return false;
15045       return true;
15046     }
15047   else if (TREE_CODE (type) == ARRAY_TYPE)
15048     return (integer_minus_onep (array_type_nelts (type))
15049 	    || TYPE_DOMAIN (type) == NULL_TREE
15050 	    || default_is_empty_type (TREE_TYPE (type)));
15051   return false;
15052 }
15053 
15054 /* Implement TARGET_EMPTY_RECORD_P.  Return true if TYPE is an empty type
15055    that shouldn't be passed via stack.  */
15056 
15057 bool
15058 default_is_empty_record (const_tree type)
15059 {
15060   if (!abi_version_at_least (12))
15061     return false;
15062 
15063   if (type == error_mark_node)
15064     return false;
15065 
15066   if (TREE_ADDRESSABLE (type))
15067     return false;
15068 
15069   return default_is_empty_type (TYPE_MAIN_VARIANT (type));
15070 }
15071 
15072 /* Determine whether TYPE is a structure with a flexible array member,
15073    or a union containing such a structure (possibly recursively).  */
15074 
15075 bool
15076 flexible_array_type_p (const_tree type)
15077 {
15078   tree x, last;
15079   switch (TREE_CODE (type))
15080     {
15081     case RECORD_TYPE:
15082       last = NULL_TREE;
15083       for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15084 	if (TREE_CODE (x) == FIELD_DECL)
15085 	  last = x;
15086       if (last == NULL_TREE)
15087 	return false;
15088       if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
15089 	  && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
15090 	  && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
15091 	  && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
15092 	return true;
15093       return false;
15094     case UNION_TYPE:
15095       for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15096 	{
15097 	  if (TREE_CODE (x) == FIELD_DECL
15098 	      && flexible_array_type_p (TREE_TYPE (x)))
15099 	    return true;
15100 	}
15101       return false;
15102     default:
15103       return false;
15104   }
15105 }
15106 
15107 /* Like int_size_in_bytes, but handle empty records specially.  */
15108 
15109 HOST_WIDE_INT
15110 arg_int_size_in_bytes (const_tree type)
15111 {
15112   return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
15113 }
15114 
15115 /* Like size_in_bytes, but handle empty records specially.  */
15116 
15117 tree
15118 arg_size_in_bytes (const_tree type)
15119 {
15120   return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
15121 }
15122 
15123 /* Return true if an expression with CODE has to have the same result type as
15124    its first operand.  */
15125 
15126 bool
15127 expr_type_first_operand_type_p (tree_code code)
15128 {
15129   switch (code)
15130     {
15131     case NEGATE_EXPR:
15132     case ABS_EXPR:
15133     case BIT_NOT_EXPR:
15134     case PAREN_EXPR:
15135     case CONJ_EXPR:
15136 
15137     case PLUS_EXPR:
15138     case MINUS_EXPR:
15139     case MULT_EXPR:
15140     case TRUNC_DIV_EXPR:
15141     case CEIL_DIV_EXPR:
15142     case FLOOR_DIV_EXPR:
15143     case ROUND_DIV_EXPR:
15144     case TRUNC_MOD_EXPR:
15145     case CEIL_MOD_EXPR:
15146     case FLOOR_MOD_EXPR:
15147     case ROUND_MOD_EXPR:
15148     case RDIV_EXPR:
15149     case EXACT_DIV_EXPR:
15150     case MIN_EXPR:
15151     case MAX_EXPR:
15152     case BIT_IOR_EXPR:
15153     case BIT_XOR_EXPR:
15154     case BIT_AND_EXPR:
15155 
15156     case LSHIFT_EXPR:
15157     case RSHIFT_EXPR:
15158     case LROTATE_EXPR:
15159     case RROTATE_EXPR:
15160       return true;
15161 
15162     default:
15163       return false;
15164     }
15165 }
15166 
15167 /* Return a typenode for the "standard" C type with a given name.  */
15168 tree
15169 get_typenode_from_name (const char *name)
15170 {
15171   if (name == NULL || *name == '\0')
15172     return NULL_TREE;
15173 
15174   if (strcmp (name, "char") == 0)
15175     return char_type_node;
15176   if (strcmp (name, "unsigned char") == 0)
15177     return unsigned_char_type_node;
15178   if (strcmp (name, "signed char") == 0)
15179     return signed_char_type_node;
15180 
15181   if (strcmp (name, "short int") == 0)
15182     return short_integer_type_node;
15183   if (strcmp (name, "short unsigned int") == 0)
15184     return short_unsigned_type_node;
15185 
15186   if (strcmp (name, "int") == 0)
15187     return integer_type_node;
15188   if (strcmp (name, "unsigned int") == 0)
15189     return unsigned_type_node;
15190 
15191   if (strcmp (name, "long int") == 0)
15192     return long_integer_type_node;
15193   if (strcmp (name, "long unsigned int") == 0)
15194     return long_unsigned_type_node;
15195 
15196   if (strcmp (name, "long long int") == 0)
15197     return long_long_integer_type_node;
15198   if (strcmp (name, "long long unsigned int") == 0)
15199     return long_long_unsigned_type_node;
15200 
15201   gcc_unreachable ();
15202 }
15203 
15204 /* List of pointer types used to declare builtins before we have seen their
15205    real declaration.
15206 
15207    Keep the size up to date in tree.h !  */
15208 const builtin_structptr_type builtin_structptr_types[6] =
15209 {
15210   { fileptr_type_node, ptr_type_node, "FILE" },
15211   { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
15212   { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
15213   { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
15214   { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
15215   { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
15216 };
15217 
15218 /* Return the maximum object size.  */
15219 
15220 tree
15221 max_object_size (void)
15222 {
15223   /* To do: Make this a configurable parameter.  */
15224   return TYPE_MAX_VALUE (ptrdiff_type_node);
15225 }
15226 
15227 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
15228    parameter default to false and that weeds out error_mark_node.  */
15229 
15230 bool
15231 verify_type_context (location_t loc, type_context_kind context,
15232 		     const_tree type, bool silent_p)
15233 {
15234   if (type == error_mark_node)
15235     return true;
15236 
15237   gcc_assert (TYPE_P (type));
15238   return (!targetm.verify_type_context
15239 	  || targetm.verify_type_context (loc, context, type, silent_p));
15240 }
15241 
15242 #if CHECKING_P
15243 
15244 namespace selftest {
15245 
15246 /* Selftests for tree.  */
15247 
15248 /* Verify that integer constants are sane.  */
15249 
15250 static void
15251 test_integer_constants ()
15252 {
15253   ASSERT_TRUE (integer_type_node != NULL);
15254   ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15255 
15256   tree type = integer_type_node;
15257 
15258   tree zero = build_zero_cst (type);
15259   ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15260   ASSERT_EQ (type, TREE_TYPE (zero));
15261 
15262   tree one = build_int_cst (type, 1);
15263   ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15264   ASSERT_EQ (type, TREE_TYPE (zero));
15265 }
15266 
15267 /* Verify identifiers.  */
15268 
15269 static void
15270 test_identifiers ()
15271 {
15272   tree identifier = get_identifier ("foo");
15273   ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15274   ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15275 }
15276 
15277 /* Verify LABEL_DECL.  */
15278 
15279 static void
15280 test_labels ()
15281 {
15282   tree identifier = get_identifier ("err");
15283   tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15284 				identifier, void_type_node);
15285   ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15286   ASSERT_FALSE (FORCED_LABEL (label_decl));
15287 }
15288 
15289 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15290    are given by VALS.  */
15291 
15292 static tree
15293 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
15294 {
15295   gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15296   tree_vector_builder builder (type, vals.length (), 1);
15297   builder.splice (vals);
15298   return builder.build ();
15299 }
15300 
15301 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED.  */
15302 
15303 static void
15304 check_vector_cst (vec<tree> expected, tree actual)
15305 {
15306   ASSERT_KNOWN_EQ (expected.length (),
15307 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15308   for (unsigned int i = 0; i < expected.length (); ++i)
15309     ASSERT_EQ (wi::to_wide (expected[i]),
15310 	       wi::to_wide (vector_cst_elt (actual, i)));
15311 }
15312 
15313 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15314    and that its elements match EXPECTED.  */
15315 
15316 static void
15317 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15318 			    unsigned int npatterns)
15319 {
15320   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15321   ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15322   ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15323   ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15324   ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15325   check_vector_cst (expected, actual);
15326 }
15327 
15328 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15329    and NPATTERNS background elements, and that its elements match
15330    EXPECTED.  */
15331 
15332 static void
15333 check_vector_cst_fill (vec<tree> expected, tree actual,
15334 		       unsigned int npatterns)
15335 {
15336   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15337   ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15338   ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15339   ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15340   ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15341   check_vector_cst (expected, actual);
15342 }
15343 
15344 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15345    and that its elements match EXPECTED.  */
15346 
15347 static void
15348 check_vector_cst_stepped (vec<tree> expected, tree actual,
15349 			  unsigned int npatterns)
15350 {
15351   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15352   ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15353   ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15354   ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15355   ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15356   check_vector_cst (expected, actual);
15357 }
15358 
15359 /* Test the creation of VECTOR_CSTs.  */
15360 
15361 static void
15362 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15363 {
15364   auto_vec<tree, 8> elements (8);
15365   elements.quick_grow (8);
15366   tree element_type = build_nonstandard_integer_type (16, true);
15367   tree vector_type = build_vector_type (element_type, 8);
15368 
15369   /* Test a simple linear series with a base of 0 and a step of 1:
15370      { 0, 1, 2, 3, 4, 5, 6, 7 }.  */
15371   for (unsigned int i = 0; i < 8; ++i)
15372     elements[i] = build_int_cst (element_type, i);
15373   tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15374   check_vector_cst_stepped (elements, vector, 1);
15375 
15376   /* Try the same with the first element replaced by 100:
15377      { 100, 1, 2, 3, 4, 5, 6, 7 }.  */
15378   elements[0] = build_int_cst (element_type, 100);
15379   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15380   check_vector_cst_stepped (elements, vector, 1);
15381 
15382   /* Try a series that wraps around.
15383      { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }.  */
15384   for (unsigned int i = 1; i < 8; ++i)
15385     elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15386   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15387   check_vector_cst_stepped (elements, vector, 1);
15388 
15389   /* Try a downward series:
15390      { 100, 79, 78, 77, 76, 75, 75, 73 }.  */
15391   for (unsigned int i = 1; i < 8; ++i)
15392     elements[i] = build_int_cst (element_type, 80 - i);
15393   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15394   check_vector_cst_stepped (elements, vector, 1);
15395 
15396   /* Try two interleaved series with different bases and steps:
15397      { 100, 53, 66, 206, 62, 212, 58, 218 }.  */
15398   elements[1] = build_int_cst (element_type, 53);
15399   for (unsigned int i = 2; i < 8; i += 2)
15400     {
15401       elements[i] = build_int_cst (element_type, 70 - i * 2);
15402       elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15403     }
15404   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15405   check_vector_cst_stepped (elements, vector, 2);
15406 
15407   /* Try a duplicated value:
15408      { 100, 100, 100, 100, 100, 100, 100, 100 }.  */
15409   for (unsigned int i = 1; i < 8; ++i)
15410     elements[i] = elements[0];
15411   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15412   check_vector_cst_duplicate (elements, vector, 1);
15413 
15414   /* Try an interleaved duplicated value:
15415      { 100, 55, 100, 55, 100, 55, 100, 55 }.  */
15416   elements[1] = build_int_cst (element_type, 55);
15417   for (unsigned int i = 2; i < 8; ++i)
15418     elements[i] = elements[i - 2];
15419   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15420   check_vector_cst_duplicate (elements, vector, 2);
15421 
15422   /* Try a duplicated value with 2 exceptions
15423      { 41, 97, 100, 55, 100, 55, 100, 55 }.  */
15424   elements[0] = build_int_cst (element_type, 41);
15425   elements[1] = build_int_cst (element_type, 97);
15426   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15427   check_vector_cst_fill (elements, vector, 2);
15428 
15429   /* Try with and without a step
15430      { 41, 97, 100, 21, 100, 35, 100, 49 }.  */
15431   for (unsigned int i = 3; i < 8; i += 2)
15432     elements[i] = build_int_cst (element_type, i * 7);
15433   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15434   check_vector_cst_stepped (elements, vector, 2);
15435 
15436   /* Try a fully-general constant:
15437      { 41, 97, 100, 21, 100, 9990, 100, 49 }.  */
15438   elements[5] = build_int_cst (element_type, 9990);
15439   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15440   check_vector_cst_fill (elements, vector, 4);
15441 }
15442 
15443 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15444    Helper function for test_location_wrappers, to deal with STRIP_NOPS
15445    modifying its argument in-place.  */
15446 
15447 static void
15448 check_strip_nops (tree node, tree expected)
15449 {
15450   STRIP_NOPS (node);
15451   ASSERT_EQ (expected, node);
15452 }
15453 
15454 /* Verify location wrappers.  */
15455 
15456 static void
15457 test_location_wrappers ()
15458 {
15459   location_t loc = BUILTINS_LOCATION;
15460 
15461   ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15462 
15463   /* Wrapping a constant.  */
15464   tree int_cst = build_int_cst (integer_type_node, 42);
15465   ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15466   ASSERT_FALSE (location_wrapper_p (int_cst));
15467 
15468   tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15469   ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15470   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15471   ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15472 
15473   /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION.  */
15474   ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15475 
15476   /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P.  */
15477   tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15478   ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15479   ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15480 
15481   /* Wrapping a STRING_CST.  */
15482   tree string_cst = build_string (4, "foo");
15483   ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15484   ASSERT_FALSE (location_wrapper_p (string_cst));
15485 
15486   tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15487   ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15488   ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15489   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15490   ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15491 
15492 
15493   /* Wrapping a variable.  */
15494   tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15495 			     get_identifier ("some_int_var"),
15496 			     integer_type_node);
15497   ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15498   ASSERT_FALSE (location_wrapper_p (int_var));
15499 
15500   tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15501   ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15502   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15503   ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15504 
15505   /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15506      wrapper.  */
15507   tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15508   ASSERT_FALSE (location_wrapper_p (r_cast));
15509   ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15510 
15511   /* Verify that STRIP_NOPS removes wrappers.  */
15512   check_strip_nops (wrapped_int_cst, int_cst);
15513   check_strip_nops (wrapped_string_cst, string_cst);
15514   check_strip_nops (wrapped_int_var, int_var);
15515 }
15516 
15517 /* Test various tree predicates.  Verify that location wrappers don't
15518    affect the results.  */
15519 
15520 static void
15521 test_predicates ()
15522 {
15523   /* Build various constants and wrappers around them.  */
15524 
15525   location_t loc = BUILTINS_LOCATION;
15526 
15527   tree i_0 = build_int_cst (integer_type_node, 0);
15528   tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15529 
15530   tree i_1 = build_int_cst (integer_type_node, 1);
15531   tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15532 
15533   tree i_m1 = build_int_cst (integer_type_node, -1);
15534   tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15535 
15536   tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15537   tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15538   tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15539   tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15540   tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15541   tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15542 
15543   tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15544   tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15545   tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15546 
15547   tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15548   tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15549   tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15550 
15551   /* TODO: vector constants.  */
15552 
15553   /* Test integer_onep.  */
15554   ASSERT_FALSE (integer_onep (i_0));
15555   ASSERT_FALSE (integer_onep (wr_i_0));
15556   ASSERT_TRUE (integer_onep (i_1));
15557   ASSERT_TRUE (integer_onep (wr_i_1));
15558   ASSERT_FALSE (integer_onep (i_m1));
15559   ASSERT_FALSE (integer_onep (wr_i_m1));
15560   ASSERT_FALSE (integer_onep (f_0));
15561   ASSERT_FALSE (integer_onep (wr_f_0));
15562   ASSERT_FALSE (integer_onep (f_1));
15563   ASSERT_FALSE (integer_onep (wr_f_1));
15564   ASSERT_FALSE (integer_onep (f_m1));
15565   ASSERT_FALSE (integer_onep (wr_f_m1));
15566   ASSERT_FALSE (integer_onep (c_i_0));
15567   ASSERT_TRUE (integer_onep (c_i_1));
15568   ASSERT_FALSE (integer_onep (c_i_m1));
15569   ASSERT_FALSE (integer_onep (c_f_0));
15570   ASSERT_FALSE (integer_onep (c_f_1));
15571   ASSERT_FALSE (integer_onep (c_f_m1));
15572 
15573   /* Test integer_zerop.  */
15574   ASSERT_TRUE (integer_zerop (i_0));
15575   ASSERT_TRUE (integer_zerop (wr_i_0));
15576   ASSERT_FALSE (integer_zerop (i_1));
15577   ASSERT_FALSE (integer_zerop (wr_i_1));
15578   ASSERT_FALSE (integer_zerop (i_m1));
15579   ASSERT_FALSE (integer_zerop (wr_i_m1));
15580   ASSERT_FALSE (integer_zerop (f_0));
15581   ASSERT_FALSE (integer_zerop (wr_f_0));
15582   ASSERT_FALSE (integer_zerop (f_1));
15583   ASSERT_FALSE (integer_zerop (wr_f_1));
15584   ASSERT_FALSE (integer_zerop (f_m1));
15585   ASSERT_FALSE (integer_zerop (wr_f_m1));
15586   ASSERT_TRUE (integer_zerop (c_i_0));
15587   ASSERT_FALSE (integer_zerop (c_i_1));
15588   ASSERT_FALSE (integer_zerop (c_i_m1));
15589   ASSERT_FALSE (integer_zerop (c_f_0));
15590   ASSERT_FALSE (integer_zerop (c_f_1));
15591   ASSERT_FALSE (integer_zerop (c_f_m1));
15592 
15593   /* Test integer_all_onesp.  */
15594   ASSERT_FALSE (integer_all_onesp (i_0));
15595   ASSERT_FALSE (integer_all_onesp (wr_i_0));
15596   ASSERT_FALSE (integer_all_onesp (i_1));
15597   ASSERT_FALSE (integer_all_onesp (wr_i_1));
15598   ASSERT_TRUE (integer_all_onesp (i_m1));
15599   ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15600   ASSERT_FALSE (integer_all_onesp (f_0));
15601   ASSERT_FALSE (integer_all_onesp (wr_f_0));
15602   ASSERT_FALSE (integer_all_onesp (f_1));
15603   ASSERT_FALSE (integer_all_onesp (wr_f_1));
15604   ASSERT_FALSE (integer_all_onesp (f_m1));
15605   ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15606   ASSERT_FALSE (integer_all_onesp (c_i_0));
15607   ASSERT_FALSE (integer_all_onesp (c_i_1));
15608   ASSERT_FALSE (integer_all_onesp (c_i_m1));
15609   ASSERT_FALSE (integer_all_onesp (c_f_0));
15610   ASSERT_FALSE (integer_all_onesp (c_f_1));
15611   ASSERT_FALSE (integer_all_onesp (c_f_m1));
15612 
15613   /* Test integer_minus_onep.  */
15614   ASSERT_FALSE (integer_minus_onep (i_0));
15615   ASSERT_FALSE (integer_minus_onep (wr_i_0));
15616   ASSERT_FALSE (integer_minus_onep (i_1));
15617   ASSERT_FALSE (integer_minus_onep (wr_i_1));
15618   ASSERT_TRUE (integer_minus_onep (i_m1));
15619   ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15620   ASSERT_FALSE (integer_minus_onep (f_0));
15621   ASSERT_FALSE (integer_minus_onep (wr_f_0));
15622   ASSERT_FALSE (integer_minus_onep (f_1));
15623   ASSERT_FALSE (integer_minus_onep (wr_f_1));
15624   ASSERT_FALSE (integer_minus_onep (f_m1));
15625   ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15626   ASSERT_FALSE (integer_minus_onep (c_i_0));
15627   ASSERT_FALSE (integer_minus_onep (c_i_1));
15628   ASSERT_TRUE (integer_minus_onep (c_i_m1));
15629   ASSERT_FALSE (integer_minus_onep (c_f_0));
15630   ASSERT_FALSE (integer_minus_onep (c_f_1));
15631   ASSERT_FALSE (integer_minus_onep (c_f_m1));
15632 
15633   /* Test integer_each_onep.  */
15634   ASSERT_FALSE (integer_each_onep (i_0));
15635   ASSERT_FALSE (integer_each_onep (wr_i_0));
15636   ASSERT_TRUE (integer_each_onep (i_1));
15637   ASSERT_TRUE (integer_each_onep (wr_i_1));
15638   ASSERT_FALSE (integer_each_onep (i_m1));
15639   ASSERT_FALSE (integer_each_onep (wr_i_m1));
15640   ASSERT_FALSE (integer_each_onep (f_0));
15641   ASSERT_FALSE (integer_each_onep (wr_f_0));
15642   ASSERT_FALSE (integer_each_onep (f_1));
15643   ASSERT_FALSE (integer_each_onep (wr_f_1));
15644   ASSERT_FALSE (integer_each_onep (f_m1));
15645   ASSERT_FALSE (integer_each_onep (wr_f_m1));
15646   ASSERT_FALSE (integer_each_onep (c_i_0));
15647   ASSERT_FALSE (integer_each_onep (c_i_1));
15648   ASSERT_FALSE (integer_each_onep (c_i_m1));
15649   ASSERT_FALSE (integer_each_onep (c_f_0));
15650   ASSERT_FALSE (integer_each_onep (c_f_1));
15651   ASSERT_FALSE (integer_each_onep (c_f_m1));
15652 
15653   /* Test integer_truep.  */
15654   ASSERT_FALSE (integer_truep (i_0));
15655   ASSERT_FALSE (integer_truep (wr_i_0));
15656   ASSERT_TRUE (integer_truep (i_1));
15657   ASSERT_TRUE (integer_truep (wr_i_1));
15658   ASSERT_FALSE (integer_truep (i_m1));
15659   ASSERT_FALSE (integer_truep (wr_i_m1));
15660   ASSERT_FALSE (integer_truep (f_0));
15661   ASSERT_FALSE (integer_truep (wr_f_0));
15662   ASSERT_FALSE (integer_truep (f_1));
15663   ASSERT_FALSE (integer_truep (wr_f_1));
15664   ASSERT_FALSE (integer_truep (f_m1));
15665   ASSERT_FALSE (integer_truep (wr_f_m1));
15666   ASSERT_FALSE (integer_truep (c_i_0));
15667   ASSERT_TRUE (integer_truep (c_i_1));
15668   ASSERT_FALSE (integer_truep (c_i_m1));
15669   ASSERT_FALSE (integer_truep (c_f_0));
15670   ASSERT_FALSE (integer_truep (c_f_1));
15671   ASSERT_FALSE (integer_truep (c_f_m1));
15672 
15673   /* Test integer_nonzerop.  */
15674   ASSERT_FALSE (integer_nonzerop (i_0));
15675   ASSERT_FALSE (integer_nonzerop (wr_i_0));
15676   ASSERT_TRUE (integer_nonzerop (i_1));
15677   ASSERT_TRUE (integer_nonzerop (wr_i_1));
15678   ASSERT_TRUE (integer_nonzerop (i_m1));
15679   ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15680   ASSERT_FALSE (integer_nonzerop (f_0));
15681   ASSERT_FALSE (integer_nonzerop (wr_f_0));
15682   ASSERT_FALSE (integer_nonzerop (f_1));
15683   ASSERT_FALSE (integer_nonzerop (wr_f_1));
15684   ASSERT_FALSE (integer_nonzerop (f_m1));
15685   ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15686   ASSERT_FALSE (integer_nonzerop (c_i_0));
15687   ASSERT_TRUE (integer_nonzerop (c_i_1));
15688   ASSERT_TRUE (integer_nonzerop (c_i_m1));
15689   ASSERT_FALSE (integer_nonzerop (c_f_0));
15690   ASSERT_FALSE (integer_nonzerop (c_f_1));
15691   ASSERT_FALSE (integer_nonzerop (c_f_m1));
15692 
15693   /* Test real_zerop.  */
15694   ASSERT_FALSE (real_zerop (i_0));
15695   ASSERT_FALSE (real_zerop (wr_i_0));
15696   ASSERT_FALSE (real_zerop (i_1));
15697   ASSERT_FALSE (real_zerop (wr_i_1));
15698   ASSERT_FALSE (real_zerop (i_m1));
15699   ASSERT_FALSE (real_zerop (wr_i_m1));
15700   ASSERT_TRUE (real_zerop (f_0));
15701   ASSERT_TRUE (real_zerop (wr_f_0));
15702   ASSERT_FALSE (real_zerop (f_1));
15703   ASSERT_FALSE (real_zerop (wr_f_1));
15704   ASSERT_FALSE (real_zerop (f_m1));
15705   ASSERT_FALSE (real_zerop (wr_f_m1));
15706   ASSERT_FALSE (real_zerop (c_i_0));
15707   ASSERT_FALSE (real_zerop (c_i_1));
15708   ASSERT_FALSE (real_zerop (c_i_m1));
15709   ASSERT_TRUE (real_zerop (c_f_0));
15710   ASSERT_FALSE (real_zerop (c_f_1));
15711   ASSERT_FALSE (real_zerop (c_f_m1));
15712 
15713   /* Test real_onep.  */
15714   ASSERT_FALSE (real_onep (i_0));
15715   ASSERT_FALSE (real_onep (wr_i_0));
15716   ASSERT_FALSE (real_onep (i_1));
15717   ASSERT_FALSE (real_onep (wr_i_1));
15718   ASSERT_FALSE (real_onep (i_m1));
15719   ASSERT_FALSE (real_onep (wr_i_m1));
15720   ASSERT_FALSE (real_onep (f_0));
15721   ASSERT_FALSE (real_onep (wr_f_0));
15722   ASSERT_TRUE (real_onep (f_1));
15723   ASSERT_TRUE (real_onep (wr_f_1));
15724   ASSERT_FALSE (real_onep (f_m1));
15725   ASSERT_FALSE (real_onep (wr_f_m1));
15726   ASSERT_FALSE (real_onep (c_i_0));
15727   ASSERT_FALSE (real_onep (c_i_1));
15728   ASSERT_FALSE (real_onep (c_i_m1));
15729   ASSERT_FALSE (real_onep (c_f_0));
15730   ASSERT_TRUE (real_onep (c_f_1));
15731   ASSERT_FALSE (real_onep (c_f_m1));
15732 
15733   /* Test real_minus_onep.  */
15734   ASSERT_FALSE (real_minus_onep (i_0));
15735   ASSERT_FALSE (real_minus_onep (wr_i_0));
15736   ASSERT_FALSE (real_minus_onep (i_1));
15737   ASSERT_FALSE (real_minus_onep (wr_i_1));
15738   ASSERT_FALSE (real_minus_onep (i_m1));
15739   ASSERT_FALSE (real_minus_onep (wr_i_m1));
15740   ASSERT_FALSE (real_minus_onep (f_0));
15741   ASSERT_FALSE (real_minus_onep (wr_f_0));
15742   ASSERT_FALSE (real_minus_onep (f_1));
15743   ASSERT_FALSE (real_minus_onep (wr_f_1));
15744   ASSERT_TRUE (real_minus_onep (f_m1));
15745   ASSERT_TRUE (real_minus_onep (wr_f_m1));
15746   ASSERT_FALSE (real_minus_onep (c_i_0));
15747   ASSERT_FALSE (real_minus_onep (c_i_1));
15748   ASSERT_FALSE (real_minus_onep (c_i_m1));
15749   ASSERT_FALSE (real_minus_onep (c_f_0));
15750   ASSERT_FALSE (real_minus_onep (c_f_1));
15751   ASSERT_TRUE (real_minus_onep (c_f_m1));
15752 
15753   /* Test zerop.  */
15754   ASSERT_TRUE (zerop (i_0));
15755   ASSERT_TRUE (zerop (wr_i_0));
15756   ASSERT_FALSE (zerop (i_1));
15757   ASSERT_FALSE (zerop (wr_i_1));
15758   ASSERT_FALSE (zerop (i_m1));
15759   ASSERT_FALSE (zerop (wr_i_m1));
15760   ASSERT_TRUE (zerop (f_0));
15761   ASSERT_TRUE (zerop (wr_f_0));
15762   ASSERT_FALSE (zerop (f_1));
15763   ASSERT_FALSE (zerop (wr_f_1));
15764   ASSERT_FALSE (zerop (f_m1));
15765   ASSERT_FALSE (zerop (wr_f_m1));
15766   ASSERT_TRUE (zerop (c_i_0));
15767   ASSERT_FALSE (zerop (c_i_1));
15768   ASSERT_FALSE (zerop (c_i_m1));
15769   ASSERT_TRUE (zerop (c_f_0));
15770   ASSERT_FALSE (zerop (c_f_1));
15771   ASSERT_FALSE (zerop (c_f_m1));
15772 
15773   /* Test tree_expr_nonnegative_p.  */
15774   ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15775   ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15776   ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15777   ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15778   ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15779   ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15780   ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15781   ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15782   ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15783   ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15784   ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15785   ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15786   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15787   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15788   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15789   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15790   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15791   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15792 
15793   /* Test tree_expr_nonzero_p.  */
15794   ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15795   ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15796   ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15797   ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15798   ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15799   ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15800 
15801   /* Test integer_valued_real_p.  */
15802   ASSERT_FALSE (integer_valued_real_p (i_0));
15803   ASSERT_TRUE (integer_valued_real_p (f_0));
15804   ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15805   ASSERT_TRUE (integer_valued_real_p (f_1));
15806   ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15807 
15808   /* Test integer_pow2p.  */
15809   ASSERT_FALSE (integer_pow2p (i_0));
15810   ASSERT_TRUE (integer_pow2p (i_1));
15811   ASSERT_TRUE (integer_pow2p (wr_i_1));
15812 
15813   /* Test uniform_integer_cst_p.  */
15814   ASSERT_TRUE (uniform_integer_cst_p (i_0));
15815   ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15816   ASSERT_TRUE (uniform_integer_cst_p (i_1));
15817   ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15818   ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15819   ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15820   ASSERT_FALSE (uniform_integer_cst_p (f_0));
15821   ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15822   ASSERT_FALSE (uniform_integer_cst_p (f_1));
15823   ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15824   ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15825   ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15826   ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15827   ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15828   ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15829   ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15830   ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15831   ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15832 }
15833 
15834 /* Check that string escaping works correctly.  */
15835 
15836 static void
15837 test_escaped_strings (void)
15838 {
15839   int saved_cutoff;
15840   escaped_string msg;
15841 
15842   msg.escape (NULL);
15843   /* ASSERT_STREQ does not accept NULL as a valid test
15844      result, so we have to use ASSERT_EQ instead.  */
15845   ASSERT_EQ (NULL, (const char *) msg);
15846 
15847   msg.escape ("");
15848   ASSERT_STREQ ("", (const char *) msg);
15849 
15850   msg.escape ("foobar");
15851   ASSERT_STREQ ("foobar", (const char *) msg);
15852 
15853   /* Ensure that we have -fmessage-length set to 0.  */
15854   saved_cutoff = pp_line_cutoff (global_dc->printer);
15855   pp_line_cutoff (global_dc->printer) = 0;
15856 
15857   msg.escape ("foo\nbar");
15858   ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15859 
15860   msg.escape ("\a\b\f\n\r\t\v");
15861   ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15862 
15863   /* Now repeat the tests with -fmessage-length set to 5.  */
15864   pp_line_cutoff (global_dc->printer) = 5;
15865 
15866   /* Note that the newline is not translated into an escape.  */
15867   msg.escape ("foo\nbar");
15868   ASSERT_STREQ ("foo\nbar", (const char *) msg);
15869 
15870   msg.escape ("\a\b\f\n\r\t\v");
15871   ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15872 
15873   /* Restore the original message length setting.  */
15874   pp_line_cutoff (global_dc->printer) = saved_cutoff;
15875 }
15876 
15877 /* Run all of the selftests within this file.  */
15878 
15879 void
15880 tree_c_tests ()
15881 {
15882   test_integer_constants ();
15883   test_identifiers ();
15884   test_labels ();
15885   test_vector_cst_patterns ();
15886   test_location_wrappers ();
15887   test_predicates ();
15888   test_escaped_strings ();
15889 }
15890 
15891 } // namespace selftest
15892 
15893 #endif /* CHECKING_P */
15894 
15895 #include "gt-tree.h"
15896