xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree.c (revision 23f5f46327e37e7811da3520f4bb933f9489322f)
1 /* Language-independent node constructors for parse phase of GNU compiler.
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains the low level primitives for operating on tree nodes,
21    including allocation, list operations, interning of identifiers,
22    construction of data type nodes and statement nodes,
23    and construction of type conversion nodes.  It also contains
24    tables index by tree code that describe how to take apart
25    nodes of that code.
26 
27    It is intended to be language-independent but can occasionally
28    calls language-dependent routines.  */
29 
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 
72 /* Tree code classes.  */
73 
74 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
75 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76 
77 const enum tree_code_class tree_code_type[] = {
78 #include "all-tree.def"
79 };
80 
81 #undef DEFTREECODE
82 #undef END_OF_BASE_TREE_CODES
83 
84 /* Table indexed by tree code giving number of expression
85    operands beyond the fixed part of the node structure.
86    Not used for types or decls.  */
87 
88 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
89 #define END_OF_BASE_TREE_CODES 0,
90 
91 const unsigned char tree_code_length[] = {
92 #include "all-tree.def"
93 };
94 
95 #undef DEFTREECODE
96 #undef END_OF_BASE_TREE_CODES
97 
98 /* Names of tree components.
99    Used for printing out the tree and error messages.  */
100 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
101 #define END_OF_BASE_TREE_CODES "@dummy",
102 
103 static const char *const tree_code_name[] = {
104 #include "all-tree.def"
105 };
106 
107 #undef DEFTREECODE
108 #undef END_OF_BASE_TREE_CODES
109 
110 /* Each tree code class has an associated string representation.
111    These must correspond to the tree_code_class entries.  */
112 
113 const char *const tree_code_class_strings[] =
114 {
115   "exceptional",
116   "constant",
117   "type",
118   "declaration",
119   "reference",
120   "comparison",
121   "unary",
122   "binary",
123   "statement",
124   "vl_exp",
125   "expression"
126 };
127 
128 /* obstack.[ch] explicitly declined to prototype this.  */
129 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130 
131 /* Statistics-gathering stuff.  */
132 
133 static uint64_t tree_code_counts[MAX_TREE_CODES];
134 uint64_t tree_node_counts[(int) all_kinds];
135 uint64_t tree_node_sizes[(int) all_kinds];
136 
137 /* Keep in sync with tree.h:enum tree_node_kind.  */
138 static const char * const tree_node_kind_names[] = {
139   "decls",
140   "types",
141   "blocks",
142   "stmts",
143   "refs",
144   "exprs",
145   "constants",
146   "identifiers",
147   "vecs",
148   "binfos",
149   "ssa names",
150   "constructors",
151   "random kinds",
152   "lang_decl kinds",
153   "lang_type kinds",
154   "omp clauses",
155 };
156 
157 /* Unique id for next decl created.  */
158 static GTY(()) int next_decl_uid;
159 /* Unique id for next type created.  */
160 static GTY(()) unsigned next_type_uid = 1;
161 /* Unique id for next debug decl created.  Use negative numbers,
162    to catch erroneous uses.  */
163 static GTY(()) int next_debug_decl_uid;
164 
165 /* Since we cannot rehash a type after it is in the table, we have to
166    keep the hash code.  */
167 
168 struct GTY((for_user)) type_hash {
169   unsigned long hash;
170   tree type;
171 };
172 
173 /* Initial size of the hash table (rounded to next prime).  */
174 #define TYPE_HASH_INITIAL_SIZE 1000
175 
176 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 {
hashtype_cache_hasher178   static hashval_t hash (type_hash *t) { return t->hash; }
179   static bool equal (type_hash *a, type_hash *b);
180 
181   static int
keep_cache_entrytype_cache_hasher182   keep_cache_entry (type_hash *&t)
183   {
184     return ggc_marked_p (t->type);
185   }
186 };
187 
188 /* Now here is the hash table.  When recording a type, it is added to
189    the slot whose index is the hash code.  Note that the hash table is
190    used for several kinds of types (function types, array types and
191    array index range types, for now).  While all these live in the
192    same table, they are completely independent, and the hash code is
193    computed differently for each of these.  */
194 
195 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196 
197 /* Hash table and temporary node for larger integer const values.  */
198 static GTY (()) tree int_cst_node;
199 
200 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 {
202   static hashval_t hash (tree t);
203   static bool equal (tree x, tree y);
204 };
205 
206 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207 
208 /* Class and variable for making sure that there is a single POLY_INT_CST
209    for a given value.  */
210 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
211 {
212   typedef std::pair<tree, const poly_wide_int *> compare_type;
213   static hashval_t hash (tree t);
214   static bool equal (tree x, const compare_type &y);
215 };
216 
217 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
218 
219 /* Hash table for optimization flags and target option flags.  Use the same
220    hash table for both sets of options.  Nodes for building the current
221    optimization and target option nodes.  The assumption is most of the time
222    the options created will already be in the hash table, so we avoid
223    allocating and freeing up a node repeatably.  */
224 static GTY (()) tree cl_optimization_node;
225 static GTY (()) tree cl_target_option_node;
226 
227 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
228 {
229   static hashval_t hash (tree t);
230   static bool equal (tree x, tree y);
231 };
232 
233 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
234 
235 /* General tree->tree mapping  structure for use in hash tables.  */
236 
237 
238 static GTY ((cache))
239      hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
240 
241 static GTY ((cache))
242      hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
243 
244 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
245 {
hashtree_vec_map_cache_hasher246   static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
247 
248   static bool
equaltree_vec_map_cache_hasher249   equal (tree_vec_map *a, tree_vec_map *b)
250   {
251     return a->base.from == b->base.from;
252   }
253 
254   static int
keep_cache_entrytree_vec_map_cache_hasher255   keep_cache_entry (tree_vec_map *&m)
256   {
257     return ggc_marked_p (m->base.from);
258   }
259 };
260 
261 static GTY ((cache))
262      hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
263 
264 static void set_type_quals (tree, int);
265 static void print_type_hash_statistics (void);
266 static void print_debug_expr_statistics (void);
267 static void print_value_expr_statistics (void);
268 
269 static tree build_array_type_1 (tree, tree, bool, bool, bool);
270 
271 tree global_trees[TI_MAX];
272 tree integer_types[itk_none];
273 
274 bool int_n_enabled_p[NUM_INT_N_ENTS];
275 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
276 
277 bool tree_contains_struct[MAX_TREE_CODES][64];
278 
279 /* Number of operands for each OpenMP clause.  */
280 unsigned const char omp_clause_num_ops[] =
281 {
282   0, /* OMP_CLAUSE_ERROR  */
283   1, /* OMP_CLAUSE_PRIVATE  */
284   1, /* OMP_CLAUSE_SHARED  */
285   1, /* OMP_CLAUSE_FIRSTPRIVATE  */
286   2, /* OMP_CLAUSE_LASTPRIVATE  */
287   5, /* OMP_CLAUSE_REDUCTION  */
288   5, /* OMP_CLAUSE_TASK_REDUCTION  */
289   5, /* OMP_CLAUSE_IN_REDUCTION  */
290   1, /* OMP_CLAUSE_COPYIN  */
291   1, /* OMP_CLAUSE_COPYPRIVATE  */
292   3, /* OMP_CLAUSE_LINEAR  */
293   2, /* OMP_CLAUSE_ALIGNED  */
294   1, /* OMP_CLAUSE_DEPEND  */
295   1, /* OMP_CLAUSE_NONTEMPORAL  */
296   1, /* OMP_CLAUSE_UNIFORM  */
297   1, /* OMP_CLAUSE_TO_DECLARE  */
298   1, /* OMP_CLAUSE_LINK  */
299   2, /* OMP_CLAUSE_FROM  */
300   2, /* OMP_CLAUSE_TO  */
301   2, /* OMP_CLAUSE_MAP  */
302   1, /* OMP_CLAUSE_USE_DEVICE_PTR  */
303   1, /* OMP_CLAUSE_USE_DEVICE_ADDR  */
304   1, /* OMP_CLAUSE_IS_DEVICE_PTR  */
305   1, /* OMP_CLAUSE_INCLUSIVE  */
306   1, /* OMP_CLAUSE_EXCLUSIVE  */
307   2, /* OMP_CLAUSE__CACHE_  */
308   2, /* OMP_CLAUSE_GANG  */
309   1, /* OMP_CLAUSE_ASYNC  */
310   1, /* OMP_CLAUSE_WAIT  */
311   0, /* OMP_CLAUSE_AUTO  */
312   0, /* OMP_CLAUSE_SEQ  */
313   1, /* OMP_CLAUSE__LOOPTEMP_  */
314   1, /* OMP_CLAUSE__REDUCTEMP_  */
315   1, /* OMP_CLAUSE__CONDTEMP_  */
316   1, /* OMP_CLAUSE__SCANTEMP_  */
317   1, /* OMP_CLAUSE_IF  */
318   1, /* OMP_CLAUSE_NUM_THREADS  */
319   1, /* OMP_CLAUSE_SCHEDULE  */
320   0, /* OMP_CLAUSE_NOWAIT  */
321   1, /* OMP_CLAUSE_ORDERED  */
322   0, /* OMP_CLAUSE_DEFAULT  */
323   3, /* OMP_CLAUSE_COLLAPSE  */
324   0, /* OMP_CLAUSE_UNTIED   */
325   1, /* OMP_CLAUSE_FINAL  */
326   0, /* OMP_CLAUSE_MERGEABLE  */
327   1, /* OMP_CLAUSE_DEVICE  */
328   1, /* OMP_CLAUSE_DIST_SCHEDULE  */
329   0, /* OMP_CLAUSE_INBRANCH  */
330   0, /* OMP_CLAUSE_NOTINBRANCH  */
331   1, /* OMP_CLAUSE_NUM_TEAMS  */
332   1, /* OMP_CLAUSE_THREAD_LIMIT  */
333   0, /* OMP_CLAUSE_PROC_BIND  */
334   1, /* OMP_CLAUSE_SAFELEN  */
335   1, /* OMP_CLAUSE_SIMDLEN  */
336   0, /* OMP_CLAUSE_DEVICE_TYPE  */
337   0, /* OMP_CLAUSE_FOR  */
338   0, /* OMP_CLAUSE_PARALLEL  */
339   0, /* OMP_CLAUSE_SECTIONS  */
340   0, /* OMP_CLAUSE_TASKGROUP  */
341   1, /* OMP_CLAUSE_PRIORITY  */
342   1, /* OMP_CLAUSE_GRAINSIZE  */
343   1, /* OMP_CLAUSE_NUM_TASKS  */
344   0, /* OMP_CLAUSE_NOGROUP  */
345   0, /* OMP_CLAUSE_THREADS  */
346   0, /* OMP_CLAUSE_SIMD  */
347   1, /* OMP_CLAUSE_HINT  */
348   0, /* OMP_CLAUSE_DEFAULTMAP  */
349   0, /* OMP_CLAUSE_ORDER  */
350   0, /* OMP_CLAUSE_BIND  */
351   1, /* OMP_CLAUSE__SIMDUID_  */
352   0, /* OMP_CLAUSE__SIMT_  */
353   0, /* OMP_CLAUSE_INDEPENDENT  */
354   1, /* OMP_CLAUSE_WORKER  */
355   1, /* OMP_CLAUSE_VECTOR  */
356   1, /* OMP_CLAUSE_NUM_GANGS  */
357   1, /* OMP_CLAUSE_NUM_WORKERS  */
358   1, /* OMP_CLAUSE_VECTOR_LENGTH  */
359   3, /* OMP_CLAUSE_TILE  */
360   2, /* OMP_CLAUSE__GRIDDIM_  */
361   0, /* OMP_CLAUSE_IF_PRESENT */
362   0, /* OMP_CLAUSE_FINALIZE */
363 };
364 
365 const char * const omp_clause_code_name[] =
366 {
367   "error_clause",
368   "private",
369   "shared",
370   "firstprivate",
371   "lastprivate",
372   "reduction",
373   "task_reduction",
374   "in_reduction",
375   "copyin",
376   "copyprivate",
377   "linear",
378   "aligned",
379   "depend",
380   "nontemporal",
381   "uniform",
382   "to",
383   "link",
384   "from",
385   "to",
386   "map",
387   "use_device_ptr",
388   "use_device_addr",
389   "is_device_ptr",
390   "inclusive",
391   "exclusive",
392   "_cache_",
393   "gang",
394   "async",
395   "wait",
396   "auto",
397   "seq",
398   "_looptemp_",
399   "_reductemp_",
400   "_condtemp_",
401   "_scantemp_",
402   "if",
403   "num_threads",
404   "schedule",
405   "nowait",
406   "ordered",
407   "default",
408   "collapse",
409   "untied",
410   "final",
411   "mergeable",
412   "device",
413   "dist_schedule",
414   "inbranch",
415   "notinbranch",
416   "num_teams",
417   "thread_limit",
418   "proc_bind",
419   "safelen",
420   "simdlen",
421   "device_type",
422   "for",
423   "parallel",
424   "sections",
425   "taskgroup",
426   "priority",
427   "grainsize",
428   "num_tasks",
429   "nogroup",
430   "threads",
431   "simd",
432   "hint",
433   "defaultmap",
434   "order",
435   "bind",
436   "_simduid_",
437   "_simt_",
438   "independent",
439   "worker",
440   "vector",
441   "num_gangs",
442   "num_workers",
443   "vector_length",
444   "tile",
445   "_griddim_",
446   "if_present",
447   "finalize",
448 };
449 
450 
451 /* Return the tree node structure used by tree code CODE.  */
452 
453 static inline enum tree_node_structure_enum
tree_node_structure_for_code(enum tree_code code)454 tree_node_structure_for_code (enum tree_code code)
455 {
456   switch (TREE_CODE_CLASS (code))
457     {
458     case tcc_declaration:
459       switch (code)
460 	{
461 	case CONST_DECL:	return TS_CONST_DECL;
462 	case DEBUG_EXPR_DECL:	return TS_DECL_WRTL;
463 	case FIELD_DECL:	return TS_FIELD_DECL;
464 	case FUNCTION_DECL:	return TS_FUNCTION_DECL;
465 	case LABEL_DECL:	return TS_LABEL_DECL;
466 	case PARM_DECL:		return TS_PARM_DECL;
467 	case RESULT_DECL:	return TS_RESULT_DECL;
468 	case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
469 	case TYPE_DECL:		return TS_TYPE_DECL;
470 	case VAR_DECL:		return TS_VAR_DECL;
471 	default: 		return TS_DECL_NON_COMMON;
472 	}
473 
474     case tcc_type:		return TS_TYPE_NON_COMMON;
475 
476     case tcc_binary:
477     case tcc_comparison:
478     case tcc_expression:
479     case tcc_reference:
480     case tcc_statement:
481     case tcc_unary:
482     case tcc_vl_exp:		return TS_EXP;
483 
484     default:  /* tcc_constant and tcc_exceptional */
485       break;
486     }
487 
488   switch (code)
489     {
490       /* tcc_constant cases.  */
491     case COMPLEX_CST:		return TS_COMPLEX;
492     case FIXED_CST:		return TS_FIXED_CST;
493     case INTEGER_CST:		return TS_INT_CST;
494     case POLY_INT_CST:		return TS_POLY_INT_CST;
495     case REAL_CST:		return TS_REAL_CST;
496     case STRING_CST:		return TS_STRING;
497     case VECTOR_CST:		return TS_VECTOR;
498     case VOID_CST:		return TS_TYPED;
499 
500       /* tcc_exceptional cases.  */
501     case BLOCK:			return TS_BLOCK;
502     case CONSTRUCTOR:		return TS_CONSTRUCTOR;
503     case ERROR_MARK:		return TS_COMMON;
504     case IDENTIFIER_NODE:	return TS_IDENTIFIER;
505     case OMP_CLAUSE:		return TS_OMP_CLAUSE;
506     case OPTIMIZATION_NODE:	return TS_OPTIMIZATION;
507     case PLACEHOLDER_EXPR:	return TS_COMMON;
508     case SSA_NAME:		return TS_SSA_NAME;
509     case STATEMENT_LIST:	return TS_STATEMENT_LIST;
510     case TARGET_OPTION_NODE:	return TS_TARGET_OPTION;
511     case TREE_BINFO:		return TS_BINFO;
512     case TREE_LIST:		return TS_LIST;
513     case TREE_VEC:		return TS_VEC;
514 
515     default:
516       gcc_unreachable ();
517     }
518 }
519 
520 
521 /* Initialize tree_contains_struct to describe the hierarchy of tree
522    nodes.  */
523 
524 static void
initialize_tree_contains_struct(void)525 initialize_tree_contains_struct (void)
526 {
527   unsigned i;
528 
529   for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
530     {
531       enum tree_code code;
532       enum tree_node_structure_enum ts_code;
533 
534       code = (enum tree_code) i;
535       ts_code = tree_node_structure_for_code (code);
536 
537       /* Mark the TS structure itself.  */
538       tree_contains_struct[code][ts_code] = 1;
539 
540       /* Mark all the structures that TS is derived from.  */
541       switch (ts_code)
542 	{
543 	case TS_TYPED:
544 	case TS_BLOCK:
545 	case TS_OPTIMIZATION:
546 	case TS_TARGET_OPTION:
547 	  MARK_TS_BASE (code);
548 	  break;
549 
550 	case TS_COMMON:
551 	case TS_INT_CST:
552 	case TS_POLY_INT_CST:
553 	case TS_REAL_CST:
554 	case TS_FIXED_CST:
555 	case TS_VECTOR:
556 	case TS_STRING:
557 	case TS_COMPLEX:
558 	case TS_SSA_NAME:
559 	case TS_CONSTRUCTOR:
560 	case TS_EXP:
561 	case TS_STATEMENT_LIST:
562 	  MARK_TS_TYPED (code);
563 	  break;
564 
565 	case TS_IDENTIFIER:
566 	case TS_DECL_MINIMAL:
567 	case TS_TYPE_COMMON:
568 	case TS_LIST:
569 	case TS_VEC:
570 	case TS_BINFO:
571 	case TS_OMP_CLAUSE:
572 	  MARK_TS_COMMON (code);
573 	  break;
574 
575 	case TS_TYPE_WITH_LANG_SPECIFIC:
576 	  MARK_TS_TYPE_COMMON (code);
577 	  break;
578 
579 	case TS_TYPE_NON_COMMON:
580 	  MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
581 	  break;
582 
583 	case TS_DECL_COMMON:
584 	  MARK_TS_DECL_MINIMAL (code);
585 	  break;
586 
587 	case TS_DECL_WRTL:
588 	case TS_CONST_DECL:
589 	  MARK_TS_DECL_COMMON (code);
590 	  break;
591 
592 	case TS_DECL_NON_COMMON:
593 	  MARK_TS_DECL_WITH_VIS (code);
594 	  break;
595 
596 	case TS_DECL_WITH_VIS:
597 	case TS_PARM_DECL:
598 	case TS_LABEL_DECL:
599 	case TS_RESULT_DECL:
600 	  MARK_TS_DECL_WRTL (code);
601 	  break;
602 
603 	case TS_FIELD_DECL:
604 	  MARK_TS_DECL_COMMON (code);
605 	  break;
606 
607 	case TS_VAR_DECL:
608 	  MARK_TS_DECL_WITH_VIS (code);
609 	  break;
610 
611 	case TS_TYPE_DECL:
612 	case TS_FUNCTION_DECL:
613 	  MARK_TS_DECL_NON_COMMON (code);
614 	  break;
615 
616 	case TS_TRANSLATION_UNIT_DECL:
617 	  MARK_TS_DECL_COMMON (code);
618 	  break;
619 
620 	default:
621 	  gcc_unreachable ();
622 	}
623     }
624 
625   /* Basic consistency checks for attributes used in fold.  */
626   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
627   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
628   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
629   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
630   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
631   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
632   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
633   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
634   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
635   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
636   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
637   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
638   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
639   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
640   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
641   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
642   gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
643   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
644   gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
645   gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
646   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
647   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
648   gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
649   gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
650   gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
651   gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
652   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
653   gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
654   gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
655   gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
656   gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
657   gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
658   gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
659   gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
660   gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
661   gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
662   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
663   gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
664   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
665   gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
666 }
667 
668 
669 /* Init tree.c.  */
670 
671 void
init_ttree(void)672 init_ttree (void)
673 {
674   /* Initialize the hash table of types.  */
675   type_hash_table
676     = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
677 
678   debug_expr_for_decl
679     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
680 
681   value_expr_for_decl
682     = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
683 
684   int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
685 
686   poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
687 
688   int_cst_node = make_int_cst (1, 1);
689 
690   cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
691 
692   cl_optimization_node = make_node (OPTIMIZATION_NODE);
693   cl_target_option_node = make_node (TARGET_OPTION_NODE);
694 
695   /* Initialize the tree_contains_struct array.  */
696   initialize_tree_contains_struct ();
697   lang_hooks.init_ts ();
698 }
699 
700 
701 /* The name of the object as the assembler will see it (but before any
702    translations made by ASM_OUTPUT_LABELREF).  Often this is the same
703    as DECL_NAME.  It is an IDENTIFIER_NODE.  */
704 tree
decl_assembler_name(tree decl)705 decl_assembler_name (tree decl)
706 {
707   if (!DECL_ASSEMBLER_NAME_SET_P (decl))
708     lang_hooks.set_decl_assembler_name (decl);
709   return DECL_ASSEMBLER_NAME_RAW (decl);
710 }
711 
712 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
713    (either of which may be NULL).  Inform the FE, if this changes the
714    name.  */
715 
716 void
overwrite_decl_assembler_name(tree decl,tree name)717 overwrite_decl_assembler_name (tree decl, tree name)
718 {
719   if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
720     lang_hooks.overwrite_decl_assembler_name (decl, name);
721 }
722 
723 /* When the target supports COMDAT groups, this indicates which group the
724    DECL is associated with.  This can be either an IDENTIFIER_NODE or a
725    decl, in which case its DECL_ASSEMBLER_NAME identifies the group.  */
726 tree
decl_comdat_group(const_tree node)727 decl_comdat_group (const_tree node)
728 {
729   struct symtab_node *snode = symtab_node::get (node);
730   if (!snode)
731     return NULL;
732   return snode->get_comdat_group ();
733 }
734 
735 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE.  */
736 tree
decl_comdat_group_id(const_tree node)737 decl_comdat_group_id (const_tree node)
738 {
739   struct symtab_node *snode = symtab_node::get (node);
740   if (!snode)
741     return NULL;
742   return snode->get_comdat_group_id ();
743 }
744 
745 /* When the target supports named section, return its name as IDENTIFIER_NODE
746    or NULL if it is in no section.  */
747 const char *
decl_section_name(const_tree node)748 decl_section_name (const_tree node)
749 {
750   struct symtab_node *snode = symtab_node::get (node);
751   if (!snode)
752     return NULL;
753   return snode->get_section ();
754 }
755 
756 /* Set section name of NODE to VALUE (that is expected to be
757    identifier node) */
758 void
set_decl_section_name(tree node,const char * value)759 set_decl_section_name (tree node, const char *value)
760 {
761   struct symtab_node *snode;
762 
763   if (value == NULL)
764     {
765       snode = symtab_node::get (node);
766       if (!snode)
767 	return;
768     }
769   else if (VAR_P (node))
770     snode = varpool_node::get_create (node);
771   else
772     snode = cgraph_node::get_create (node);
773   snode->set_section (value);
774 }
775 
776 /* Return TLS model of a variable NODE.  */
777 enum tls_model
decl_tls_model(const_tree node)778 decl_tls_model (const_tree node)
779 {
780   struct varpool_node *snode = varpool_node::get (node);
781   if (!snode)
782     return TLS_MODEL_NONE;
783   return snode->tls_model;
784 }
785 
786 /* Set TLS model of variable NODE to MODEL.  */
787 void
set_decl_tls_model(tree node,enum tls_model model)788 set_decl_tls_model (tree node, enum tls_model model)
789 {
790   struct varpool_node *vnode;
791 
792   if (model == TLS_MODEL_NONE)
793     {
794       vnode = varpool_node::get (node);
795       if (!vnode)
796 	return;
797     }
798   else
799     vnode = varpool_node::get_create (node);
800   vnode->tls_model = model;
801 }
802 
803 /* Compute the number of bytes occupied by a tree with code CODE.
804    This function cannot be used for nodes that have variable sizes,
805    including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR.  */
806 size_t
tree_code_size(enum tree_code code)807 tree_code_size (enum tree_code code)
808 {
809   switch (TREE_CODE_CLASS (code))
810     {
811     case tcc_declaration:  /* A decl node */
812       switch (code)
813 	{
814 	case FIELD_DECL:	return sizeof (tree_field_decl);
815 	case PARM_DECL:		return sizeof (tree_parm_decl);
816 	case VAR_DECL:		return sizeof (tree_var_decl);
817 	case LABEL_DECL:	return sizeof (tree_label_decl);
818 	case RESULT_DECL:	return sizeof (tree_result_decl);
819 	case CONST_DECL:	return sizeof (tree_const_decl);
820 	case TYPE_DECL:		return sizeof (tree_type_decl);
821 	case FUNCTION_DECL:	return sizeof (tree_function_decl);
822 	case DEBUG_EXPR_DECL:	return sizeof (tree_decl_with_rtl);
823 	case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
824 	case NAMESPACE_DECL:
825 	case IMPORTED_DECL:
826 	case NAMELIST_DECL:	return sizeof (tree_decl_non_common);
827 	default:
828 	  gcc_checking_assert (code >= NUM_TREE_CODES);
829 	  return lang_hooks.tree_size (code);
830 	}
831 
832     case tcc_type:  /* a type node */
833       switch (code)
834 	{
835 	case OFFSET_TYPE:
836 	case ENUMERAL_TYPE:
837 	case BOOLEAN_TYPE:
838 	case INTEGER_TYPE:
839 	case REAL_TYPE:
840 	case POINTER_TYPE:
841 	case REFERENCE_TYPE:
842 	case NULLPTR_TYPE:
843 	case FIXED_POINT_TYPE:
844 	case COMPLEX_TYPE:
845 	case VECTOR_TYPE:
846 	case ARRAY_TYPE:
847 	case RECORD_TYPE:
848 	case UNION_TYPE:
849 	case QUAL_UNION_TYPE:
850 	case VOID_TYPE:
851 	case FUNCTION_TYPE:
852 	case METHOD_TYPE:
853 	case LANG_TYPE:		return sizeof (tree_type_non_common);
854 	default:
855 	  gcc_checking_assert (code >= NUM_TREE_CODES);
856 	  return lang_hooks.tree_size (code);
857 	}
858 
859     case tcc_reference:   /* a reference */
860     case tcc_expression:  /* an expression */
861     case tcc_statement:   /* an expression with side effects */
862     case tcc_comparison:  /* a comparison expression */
863     case tcc_unary:       /* a unary arithmetic expression */
864     case tcc_binary:      /* a binary arithmetic expression */
865       return (sizeof (struct tree_exp)
866 	      + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
867 
868     case tcc_constant:  /* a constant */
869       switch (code)
870 	{
871 	case VOID_CST:		return sizeof (tree_typed);
872 	case INTEGER_CST:	gcc_unreachable ();
873 	case POLY_INT_CST:	return sizeof (tree_poly_int_cst);
874 	case REAL_CST:		return sizeof (tree_real_cst);
875 	case FIXED_CST:		return sizeof (tree_fixed_cst);
876 	case COMPLEX_CST:	return sizeof (tree_complex);
877 	case VECTOR_CST:	gcc_unreachable ();
878 	case STRING_CST:	gcc_unreachable ();
879 	default:
880 	  gcc_checking_assert (code >= NUM_TREE_CODES);
881 	  return lang_hooks.tree_size (code);
882 	}
883 
884     case tcc_exceptional:  /* something random, like an identifier.  */
885       switch (code)
886 	{
887 	case IDENTIFIER_NODE:	return lang_hooks.identifier_size;
888 	case TREE_LIST:		return sizeof (tree_list);
889 
890 	case ERROR_MARK:
891 	case PLACEHOLDER_EXPR:	return sizeof (tree_common);
892 
893 	case TREE_VEC:		gcc_unreachable ();
894 	case OMP_CLAUSE:	gcc_unreachable ();
895 
896 	case SSA_NAME:		return sizeof (tree_ssa_name);
897 
898 	case STATEMENT_LIST:	return sizeof (tree_statement_list);
899 	case BLOCK:		return sizeof (struct tree_block);
900 	case CONSTRUCTOR:	return sizeof (tree_constructor);
901 	case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
902 	case TARGET_OPTION_NODE: return sizeof (tree_target_option);
903 
904 	default:
905 	  gcc_checking_assert (code >= NUM_TREE_CODES);
906 	  return lang_hooks.tree_size (code);
907 	}
908 
909     default:
910       gcc_unreachable ();
911     }
912 }
913 
914 /* Compute the number of bytes occupied by NODE.  This routine only
915    looks at TREE_CODE, except for those nodes that have variable sizes.  */
916 size_t
tree_size(const_tree node)917 tree_size (const_tree node)
918 {
919   const enum tree_code code = TREE_CODE (node);
920   switch (code)
921     {
922     case INTEGER_CST:
923       return (sizeof (struct tree_int_cst)
924 	      + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
925 
926     case TREE_BINFO:
927       return (offsetof (struct tree_binfo, base_binfos)
928 	      + vec<tree, va_gc>
929 		  ::embedded_size (BINFO_N_BASE_BINFOS (node)));
930 
931     case TREE_VEC:
932       return (sizeof (struct tree_vec)
933 	      + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
934 
935     case VECTOR_CST:
936       return (sizeof (struct tree_vector)
937 	      + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
938 
939     case STRING_CST:
940       return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
941 
942     case OMP_CLAUSE:
943       return (sizeof (struct tree_omp_clause)
944 	      + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
945 	        * sizeof (tree));
946 
947     default:
948       if (TREE_CODE_CLASS (code) == tcc_vl_exp)
949 	return (sizeof (struct tree_exp)
950 		+ (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
951       else
952 	return tree_code_size (code);
953     }
954 }
955 
956 /* Return tree node kind based on tree CODE.  */
957 
958 static tree_node_kind
get_stats_node_kind(enum tree_code code)959 get_stats_node_kind (enum tree_code code)
960 {
961   enum tree_code_class type = TREE_CODE_CLASS (code);
962 
963   switch (type)
964     {
965     case tcc_declaration:  /* A decl node */
966       return d_kind;
967     case tcc_type:  /* a type node */
968       return t_kind;
969     case tcc_statement:  /* an expression with side effects */
970       return s_kind;
971     case tcc_reference:  /* a reference */
972       return r_kind;
973     case tcc_expression:  /* an expression */
974     case tcc_comparison:  /* a comparison expression */
975     case tcc_unary:  /* a unary arithmetic expression */
976     case tcc_binary:  /* a binary arithmetic expression */
977       return e_kind;
978     case tcc_constant:  /* a constant */
979       return c_kind;
980     case tcc_exceptional:  /* something random, like an identifier.  */
981       switch (code)
982 	{
983 	case IDENTIFIER_NODE:
984 	  return id_kind;
985 	case TREE_VEC:
986 	  return vec_kind;
987 	case TREE_BINFO:
988 	  return binfo_kind;
989 	case SSA_NAME:
990 	  return ssa_name_kind;
991 	case BLOCK:
992 	  return b_kind;
993 	case CONSTRUCTOR:
994 	  return constr_kind;
995 	case OMP_CLAUSE:
996 	  return omp_clause_kind;
997 	default:
998 	  return x_kind;
999 	}
1000       break;
1001     case tcc_vl_exp:
1002       return e_kind;
1003     default:
1004       gcc_unreachable ();
1005     }
1006 }
1007 
1008 /* Record interesting allocation statistics for a tree node with CODE
1009    and LENGTH.  */
1010 
1011 static void
record_node_allocation_statistics(enum tree_code code,size_t length)1012 record_node_allocation_statistics (enum tree_code code, size_t length)
1013 {
1014   if (!GATHER_STATISTICS)
1015     return;
1016 
1017   tree_node_kind kind = get_stats_node_kind (code);
1018 
1019   tree_code_counts[(int) code]++;
1020   tree_node_counts[(int) kind]++;
1021   tree_node_sizes[(int) kind] += length;
1022 }
1023 
1024 /* Allocate and return a new UID from the DECL_UID namespace.  */
1025 
1026 int
allocate_decl_uid(void)1027 allocate_decl_uid (void)
1028 {
1029   return next_decl_uid++;
1030 }
1031 
1032 /* Return a newly allocated node of code CODE.  For decl and type
1033    nodes, some other fields are initialized.  The rest of the node is
1034    initialized to zero.  This function cannot be used for TREE_VEC,
1035    INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1036    tree_code_size.
1037 
1038    Achoo!  I got a code in the node.  */
1039 
1040 tree
make_node(enum tree_code code MEM_STAT_DECL)1041 make_node (enum tree_code code MEM_STAT_DECL)
1042 {
1043   tree t;
1044   enum tree_code_class type = TREE_CODE_CLASS (code);
1045   size_t length = tree_code_size (code);
1046 
1047   record_node_allocation_statistics (code, length);
1048 
1049   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1050   TREE_SET_CODE (t, code);
1051 
1052   switch (type)
1053     {
1054     case tcc_statement:
1055       if (code != DEBUG_BEGIN_STMT)
1056 	TREE_SIDE_EFFECTS (t) = 1;
1057       break;
1058 
1059     case tcc_declaration:
1060       if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1061 	{
1062 	  if (code == FUNCTION_DECL)
1063 	    {
1064 	      SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1065 	      SET_DECL_MODE (t, FUNCTION_MODE);
1066 	    }
1067 	  else
1068 	    SET_DECL_ALIGN (t, 1);
1069 	}
1070       DECL_SOURCE_LOCATION (t) = input_location;
1071       if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1072 	DECL_UID (t) = --next_debug_decl_uid;
1073       else
1074 	{
1075 	  DECL_UID (t) = allocate_decl_uid ();
1076 	  SET_DECL_PT_UID (t, -1);
1077 	}
1078       if (TREE_CODE (t) == LABEL_DECL)
1079 	LABEL_DECL_UID (t) = -1;
1080 
1081       break;
1082 
1083     case tcc_type:
1084       TYPE_UID (t) = next_type_uid++;
1085       SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1086       TYPE_USER_ALIGN (t) = 0;
1087       TYPE_MAIN_VARIANT (t) = t;
1088       TYPE_CANONICAL (t) = t;
1089 
1090       /* Default to no attributes for type, but let target change that.  */
1091       TYPE_ATTRIBUTES (t) = NULL_TREE;
1092       targetm.set_default_type_attributes (t);
1093 
1094       /* We have not yet computed the alias set for this type.  */
1095       TYPE_ALIAS_SET (t) = -1;
1096       break;
1097 
1098     case tcc_constant:
1099       TREE_CONSTANT (t) = 1;
1100       break;
1101 
1102     case tcc_expression:
1103       switch (code)
1104 	{
1105 	case INIT_EXPR:
1106 	case MODIFY_EXPR:
1107 	case VA_ARG_EXPR:
1108 	case PREDECREMENT_EXPR:
1109 	case PREINCREMENT_EXPR:
1110 	case POSTDECREMENT_EXPR:
1111 	case POSTINCREMENT_EXPR:
1112 	  /* All of these have side-effects, no matter what their
1113 	     operands are.  */
1114 	  TREE_SIDE_EFFECTS (t) = 1;
1115 	  break;
1116 
1117 	default:
1118 	  break;
1119 	}
1120       break;
1121 
1122     case tcc_exceptional:
1123       switch (code)
1124         {
1125 	case TARGET_OPTION_NODE:
1126 	  TREE_TARGET_OPTION(t)
1127 			    = ggc_cleared_alloc<struct cl_target_option> ();
1128 	  break;
1129 
1130 	case OPTIMIZATION_NODE:
1131 	  TREE_OPTIMIZATION (t)
1132 			    = ggc_cleared_alloc<struct cl_optimization> ();
1133 	  break;
1134 
1135 	default:
1136 	  break;
1137 	}
1138       break;
1139 
1140     default:
1141       /* Other classes need no special treatment.  */
1142       break;
1143     }
1144 
1145   return t;
1146 }
1147 
1148 /* Free tree node.  */
1149 
1150 void
free_node(tree node)1151 free_node (tree node)
1152 {
1153   enum tree_code code = TREE_CODE (node);
1154   if (GATHER_STATISTICS)
1155     {
1156       enum tree_node_kind kind = get_stats_node_kind (code);
1157 
1158       gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1159       gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1160       gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1161 
1162       tree_code_counts[(int) TREE_CODE (node)]--;
1163       tree_node_counts[(int) kind]--;
1164       tree_node_sizes[(int) kind] -= tree_size (node);
1165     }
1166   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1167     vec_free (CONSTRUCTOR_ELTS (node));
1168   else if (code == BLOCK)
1169     vec_free (BLOCK_NONLOCALIZED_VARS (node));
1170   else if (code == TREE_BINFO)
1171     vec_free (BINFO_BASE_ACCESSES (node));
1172   else if (code == OPTIMIZATION_NODE)
1173     cl_optimization_option_free (TREE_OPTIMIZATION (node));
1174   else if (code == TARGET_OPTION_NODE)
1175     cl_target_option_free (TREE_TARGET_OPTION (node));
1176   ggc_free (node);
1177 }
1178 
1179 /* Return a new node with the same contents as NODE except that its
1180    TREE_CHAIN, if it has one, is zero and it has a fresh uid.  */
1181 
1182 tree
copy_node(tree node MEM_STAT_DECL)1183 copy_node (tree node MEM_STAT_DECL)
1184 {
1185   tree t;
1186   enum tree_code code = TREE_CODE (node);
1187   size_t length;
1188 
1189   gcc_assert (code != STATEMENT_LIST);
1190 
1191   length = tree_size (node);
1192   record_node_allocation_statistics (code, length);
1193   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1194   memcpy (t, node, length);
1195 
1196   if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1197     TREE_CHAIN (t) = 0;
1198   TREE_ASM_WRITTEN (t) = 0;
1199   TREE_VISITED (t) = 0;
1200 
1201   if (TREE_CODE_CLASS (code) == tcc_declaration)
1202     {
1203       if (code == DEBUG_EXPR_DECL)
1204 	DECL_UID (t) = --next_debug_decl_uid;
1205       else
1206 	{
1207 	  DECL_UID (t) = allocate_decl_uid ();
1208 	  if (DECL_PT_UID_SET_P (node))
1209 	    SET_DECL_PT_UID (t, DECL_PT_UID (node));
1210 	}
1211       if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1212 	  && DECL_HAS_VALUE_EXPR_P (node))
1213 	{
1214 	  SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1215 	  DECL_HAS_VALUE_EXPR_P (t) = 1;
1216 	}
1217       /* DECL_DEBUG_EXPR is copied explicitely by callers.  */
1218       if (VAR_P (node))
1219 	{
1220 	  DECL_HAS_DEBUG_EXPR_P (t) = 0;
1221 	  t->decl_with_vis.symtab_node = NULL;
1222 	}
1223       if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1224 	{
1225 	  SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1226 	  DECL_HAS_INIT_PRIORITY_P (t) = 1;
1227 	}
1228       if (TREE_CODE (node) == FUNCTION_DECL)
1229 	{
1230 	  DECL_STRUCT_FUNCTION (t) = NULL;
1231 	  t->decl_with_vis.symtab_node = NULL;
1232 	}
1233     }
1234   else if (TREE_CODE_CLASS (code) == tcc_type)
1235     {
1236       TYPE_UID (t) = next_type_uid++;
1237       /* The following is so that the debug code for
1238 	 the copy is different from the original type.
1239 	 The two statements usually duplicate each other
1240 	 (because they clear fields of the same union),
1241 	 but the optimizer should catch that.  */
1242       TYPE_SYMTAB_ADDRESS (t) = 0;
1243       TYPE_SYMTAB_DIE (t) = 0;
1244 
1245       /* Do not copy the values cache.  */
1246       if (TYPE_CACHED_VALUES_P (t))
1247 	{
1248 	  TYPE_CACHED_VALUES_P (t) = 0;
1249 	  TYPE_CACHED_VALUES (t) = NULL_TREE;
1250 	}
1251     }
1252     else if (code == TARGET_OPTION_NODE)
1253       {
1254 	TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1255 	memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1256 		sizeof (struct cl_target_option));
1257       }
1258     else if (code == OPTIMIZATION_NODE)
1259       {
1260 	TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1261 	memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1262 		sizeof (struct cl_optimization));
1263       }
1264 
1265   return t;
1266 }
1267 
1268 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1269    For example, this can copy a list made of TREE_LIST nodes.  */
1270 
1271 tree
copy_list(tree list)1272 copy_list (tree list)
1273 {
1274   tree head;
1275   tree prev, next;
1276 
1277   if (list == 0)
1278     return 0;
1279 
1280   head = prev = copy_node (list);
1281   next = TREE_CHAIN (list);
1282   while (next)
1283     {
1284       TREE_CHAIN (prev) = copy_node (next);
1285       prev = TREE_CHAIN (prev);
1286       next = TREE_CHAIN (next);
1287     }
1288   return head;
1289 }
1290 
1291 
1292 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1293    INTEGER_CST with value CST and type TYPE.   */
1294 
1295 static unsigned int
get_int_cst_ext_nunits(tree type,const wide_int & cst)1296 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1297 {
1298   gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1299   /* We need extra HWIs if CST is an unsigned integer with its
1300      upper bit set.  */
1301   if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1302     return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1303   return cst.get_len ();
1304 }
1305 
1306 /* Return a new INTEGER_CST with value CST and type TYPE.  */
1307 
1308 static tree
build_new_int_cst(tree type,const wide_int & cst)1309 build_new_int_cst (tree type, const wide_int &cst)
1310 {
1311   unsigned int len = cst.get_len ();
1312   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1313   tree nt = make_int_cst (len, ext_len);
1314 
1315   if (len < ext_len)
1316     {
1317       --ext_len;
1318       TREE_INT_CST_ELT (nt, ext_len)
1319 	= zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1320       for (unsigned int i = len; i < ext_len; ++i)
1321 	TREE_INT_CST_ELT (nt, i) = -1;
1322     }
1323   else if (TYPE_UNSIGNED (type)
1324 	   && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1325     {
1326       len--;
1327       TREE_INT_CST_ELT (nt, len)
1328 	= zext_hwi (cst.elt (len),
1329 		    cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1330     }
1331 
1332   for (unsigned int i = 0; i < len; i++)
1333     TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1334   TREE_TYPE (nt) = type;
1335   return nt;
1336 }
1337 
1338 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE.  */
1339 
1340 static tree
build_new_poly_int_cst(tree type,tree (& coeffs)[NUM_POLY_INT_COEFFS]CXX_MEM_STAT_INFO)1341 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1342 			CXX_MEM_STAT_INFO)
1343 {
1344   size_t length = sizeof (struct tree_poly_int_cst);
1345   record_node_allocation_statistics (POLY_INT_CST, length);
1346 
1347   tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1348 
1349   TREE_SET_CODE (t, POLY_INT_CST);
1350   TREE_CONSTANT (t) = 1;
1351   TREE_TYPE (t) = type;
1352   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1353     POLY_INT_CST_COEFF (t, i) = coeffs[i];
1354   return t;
1355 }
1356 
1357 /* Create a constant tree that contains CST sign-extended to TYPE.  */
1358 
1359 tree
build_int_cst(tree type,poly_int64 cst)1360 build_int_cst (tree type, poly_int64 cst)
1361 {
1362   /* Support legacy code.  */
1363   if (!type)
1364     type = integer_type_node;
1365 
1366   return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1367 }
1368 
1369 /* Create a constant tree that contains CST zero-extended to TYPE.  */
1370 
1371 tree
build_int_cstu(tree type,poly_uint64 cst)1372 build_int_cstu (tree type, poly_uint64 cst)
1373 {
1374   return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1375 }
1376 
1377 /* Create a constant tree that contains CST sign-extended to TYPE.  */
1378 
1379 tree
build_int_cst_type(tree type,poly_int64 cst)1380 build_int_cst_type (tree type, poly_int64 cst)
1381 {
1382   gcc_assert (type);
1383   return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1384 }
1385 
1386 /* Constructs tree in type TYPE from with value given by CST.  Signedness
1387    of CST is assumed to be the same as the signedness of TYPE.  */
1388 
1389 tree
double_int_to_tree(tree type,double_int cst)1390 double_int_to_tree (tree type, double_int cst)
1391 {
1392   return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1393 }
1394 
1395 /* We force the wide_int CST to the range of the type TYPE by sign or
1396    zero extending it.  OVERFLOWABLE indicates if we are interested in
1397    overflow of the value, when >0 we are only interested in signed
1398    overflow, for <0 we are interested in any overflow.  OVERFLOWED
1399    indicates whether overflow has already occurred.  CONST_OVERFLOWED
1400    indicates whether constant overflow has already occurred.  We force
1401    T's value to be within range of T's type (by setting to 0 or 1 all
1402    the bits outside the type's range).  We set TREE_OVERFLOWED if,
1403         OVERFLOWED is nonzero,
1404         or OVERFLOWABLE is >0 and signed overflow occurs
1405         or OVERFLOWABLE is <0 and any overflow occurs
1406    We return a new tree node for the extended wide_int.  The node
1407    is shared if no overflow flags are set.  */
1408 
1409 
1410 tree
force_fit_type(tree type,const poly_wide_int_ref & cst,int overflowable,bool overflowed)1411 force_fit_type (tree type, const poly_wide_int_ref &cst,
1412 		int overflowable, bool overflowed)
1413 {
1414   signop sign = TYPE_SIGN (type);
1415 
1416   /* If we need to set overflow flags, return a new unshared node.  */
1417   if (overflowed || !wi::fits_to_tree_p (cst, type))
1418     {
1419       if (overflowed
1420 	  || overflowable < 0
1421 	  || (overflowable > 0 && sign == SIGNED))
1422 	{
1423 	  poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1424 						   sign);
1425 	  tree t;
1426 	  if (tmp.is_constant ())
1427 	    t = build_new_int_cst (type, tmp.coeffs[0]);
1428 	  else
1429 	    {
1430 	      tree coeffs[NUM_POLY_INT_COEFFS];
1431 	      for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1432 		{
1433 		  coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1434 		  TREE_OVERFLOW (coeffs[i]) = 1;
1435 		}
1436 	      t = build_new_poly_int_cst (type, coeffs);
1437 	    }
1438 	  TREE_OVERFLOW (t) = 1;
1439 	  return t;
1440 	}
1441     }
1442 
1443   /* Else build a shared node.  */
1444   return wide_int_to_tree (type, cst);
1445 }
1446 
1447 /* These are the hash table functions for the hash table of INTEGER_CST
1448    nodes of a sizetype.  */
1449 
1450 /* Return the hash code X, an INTEGER_CST.  */
1451 
1452 hashval_t
hash(tree x)1453 int_cst_hasher::hash (tree x)
1454 {
1455   const_tree const t = x;
1456   hashval_t code = TYPE_UID (TREE_TYPE (t));
1457   int i;
1458 
1459   for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1460     code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1461 
1462   return code;
1463 }
1464 
1465 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1466    is the same as that given by *Y, which is the same.  */
1467 
1468 bool
equal(tree x,tree y)1469 int_cst_hasher::equal (tree x, tree y)
1470 {
1471   const_tree const xt = x;
1472   const_tree const yt = y;
1473 
1474   if (TREE_TYPE (xt) != TREE_TYPE (yt)
1475       || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1476       || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1477     return false;
1478 
1479   for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1480     if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1481       return false;
1482 
1483   return true;
1484 }
1485 
1486 /* Create an INT_CST node of TYPE and value CST.
1487    The returned node is always shared.  For small integers we use a
1488    per-type vector cache, for larger ones we use a single hash table.
1489    The value is extended from its precision according to the sign of
1490    the type to be a multiple of HOST_BITS_PER_WIDE_INT.  This defines
1491    the upper bits and ensures that hashing and value equality based
1492    upon the underlying HOST_WIDE_INTs works without masking.  */
1493 
1494 static tree
wide_int_to_tree_1(tree type,const wide_int_ref & pcst)1495 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1496 {
1497   tree t;
1498   int ix = -1;
1499   int limit = 0;
1500 
1501   gcc_assert (type);
1502   unsigned int prec = TYPE_PRECISION (type);
1503   signop sgn = TYPE_SIGN (type);
1504 
1505   /* Verify that everything is canonical.  */
1506   int l = pcst.get_len ();
1507   if (l > 1)
1508     {
1509       if (pcst.elt (l - 1) == 0)
1510 	gcc_checking_assert (pcst.elt (l - 2) < 0);
1511       if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1512 	gcc_checking_assert (pcst.elt (l - 2) >= 0);
1513     }
1514 
1515   wide_int cst = wide_int::from (pcst, prec, sgn);
1516   unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1517 
1518   if (ext_len == 1)
1519     {
1520       /* We just need to store a single HOST_WIDE_INT.  */
1521       HOST_WIDE_INT hwi;
1522       if (TYPE_UNSIGNED (type))
1523 	hwi = cst.to_uhwi ();
1524       else
1525 	hwi = cst.to_shwi ();
1526 
1527       switch (TREE_CODE (type))
1528 	{
1529 	case NULLPTR_TYPE:
1530 	  gcc_assert (hwi == 0);
1531 	  /* Fallthru.  */
1532 
1533 	case POINTER_TYPE:
1534 	case REFERENCE_TYPE:
1535 	  /* Cache NULL pointer and zero bounds.  */
1536 	  if (hwi == 0)
1537 	    {
1538 	      limit = 1;
1539 	      ix = 0;
1540 	    }
1541 	  break;
1542 
1543 	case BOOLEAN_TYPE:
1544 	  /* Cache false or true.  */
1545 	  limit = 2;
1546 	  if (IN_RANGE (hwi, 0, 1))
1547 	    ix = hwi;
1548 	  break;
1549 
1550 	case INTEGER_TYPE:
1551 	case OFFSET_TYPE:
1552 	  if (TYPE_SIGN (type) == UNSIGNED)
1553 	    {
1554 	      /* Cache [0, N).  */
1555 	      limit = param_integer_share_limit;
1556 	      if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1557 		ix = hwi;
1558 	    }
1559 	  else
1560 	    {
1561 	      /* Cache [-1, N).  */
1562 	      limit = param_integer_share_limit + 1;
1563 	      if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1564 		ix = hwi + 1;
1565 	    }
1566 	  break;
1567 
1568 	case ENUMERAL_TYPE:
1569 	  break;
1570 
1571 	default:
1572 	  gcc_unreachable ();
1573 	}
1574 
1575       if (ix >= 0)
1576 	{
1577 	  /* Look for it in the type's vector of small shared ints.  */
1578 	  if (!TYPE_CACHED_VALUES_P (type))
1579 	    {
1580 	      TYPE_CACHED_VALUES_P (type) = 1;
1581 	      TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1582 	    }
1583 
1584 	  t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1585 	  if (t)
1586 	    /* Make sure no one is clobbering the shared constant.  */
1587 	    gcc_checking_assert (TREE_TYPE (t) == type
1588 				 && TREE_INT_CST_NUNITS (t) == 1
1589 				 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1590 				 && TREE_INT_CST_EXT_NUNITS (t) == 1
1591 				 && TREE_INT_CST_ELT (t, 0) == hwi);
1592 	  else
1593 	    {
1594 	      /* Create a new shared int.  */
1595 	      t = build_new_int_cst (type, cst);
1596 	      TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1597 	    }
1598 	}
1599       else
1600 	{
1601 	  /* Use the cache of larger shared ints, using int_cst_node as
1602 	     a temporary.  */
1603 
1604 	  TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1605 	  TREE_TYPE (int_cst_node) = type;
1606 
1607 	  tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1608 	  t = *slot;
1609 	  if (!t)
1610 	    {
1611 	      /* Insert this one into the hash table.  */
1612 	      t = int_cst_node;
1613 	      *slot = t;
1614 	      /* Make a new node for next time round.  */
1615 	      int_cst_node = make_int_cst (1, 1);
1616 	    }
1617 	}
1618     }
1619   else
1620     {
1621       /* The value either hashes properly or we drop it on the floor
1622 	 for the gc to take care of.  There will not be enough of them
1623 	 to worry about.  */
1624 
1625       tree nt = build_new_int_cst (type, cst);
1626       tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1627       t = *slot;
1628       if (!t)
1629 	{
1630 	  /* Insert this one into the hash table.  */
1631 	  t = nt;
1632 	  *slot = t;
1633 	}
1634       else
1635 	ggc_free (nt);
1636     }
1637 
1638   return t;
1639 }
1640 
1641 hashval_t
hash(tree t)1642 poly_int_cst_hasher::hash (tree t)
1643 {
1644   inchash::hash hstate;
1645 
1646   hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1647   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1648     hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1649 
1650   return hstate.end ();
1651 }
1652 
1653 bool
equal(tree x,const compare_type & y)1654 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1655 {
1656   if (TREE_TYPE (x) != y.first)
1657     return false;
1658   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1659     if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1660       return false;
1661   return true;
1662 }
1663 
1664 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1665    The elements must also have type TYPE.  */
1666 
1667 tree
build_poly_int_cst(tree type,const poly_wide_int_ref & values)1668 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1669 {
1670   unsigned int prec = TYPE_PRECISION (type);
1671   gcc_assert (prec <= values.coeffs[0].get_precision ());
1672   poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1673 
1674   inchash::hash h;
1675   h.add_int (TYPE_UID (type));
1676   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1677     h.add_wide_int (c.coeffs[i]);
1678   poly_int_cst_hasher::compare_type comp (type, &c);
1679   tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1680 							     INSERT);
1681   if (*slot == NULL_TREE)
1682     {
1683       tree coeffs[NUM_POLY_INT_COEFFS];
1684       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1685 	coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1686       *slot = build_new_poly_int_cst (type, coeffs);
1687     }
1688   return *slot;
1689 }
1690 
1691 /* Create a constant tree with value VALUE in type TYPE.  */
1692 
1693 tree
wide_int_to_tree(tree type,const poly_wide_int_ref & value)1694 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1695 {
1696   if (value.is_constant ())
1697     return wide_int_to_tree_1 (type, value.coeffs[0]);
1698   return build_poly_int_cst (type, value);
1699 }
1700 
1701 void
cache_integer_cst(tree t)1702 cache_integer_cst (tree t)
1703 {
1704   tree type = TREE_TYPE (t);
1705   int ix = -1;
1706   int limit = 0;
1707   int prec = TYPE_PRECISION (type);
1708 
1709   gcc_assert (!TREE_OVERFLOW (t));
1710 
1711   switch (TREE_CODE (type))
1712     {
1713     case NULLPTR_TYPE:
1714       gcc_assert (integer_zerop (t));
1715       /* Fallthru.  */
1716 
1717     case POINTER_TYPE:
1718     case REFERENCE_TYPE:
1719       /* Cache NULL pointer.  */
1720       if (integer_zerop (t))
1721 	{
1722 	  limit = 1;
1723 	  ix = 0;
1724 	}
1725       break;
1726 
1727     case BOOLEAN_TYPE:
1728       /* Cache false or true.  */
1729       limit = 2;
1730       if (wi::ltu_p (wi::to_wide (t), 2))
1731 	ix = TREE_INT_CST_ELT (t, 0);
1732       break;
1733 
1734     case INTEGER_TYPE:
1735     case OFFSET_TYPE:
1736       if (TYPE_UNSIGNED (type))
1737 	{
1738 	  /* Cache 0..N */
1739 	  limit = param_integer_share_limit;
1740 
1741 	  /* This is a little hokie, but if the prec is smaller than
1742 	     what is necessary to hold param_integer_share_limit, then the
1743 	     obvious test will not get the correct answer.  */
1744 	  if (prec < HOST_BITS_PER_WIDE_INT)
1745 	    {
1746 	      if (tree_to_uhwi (t)
1747 		  < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1748 		ix = tree_to_uhwi (t);
1749 	    }
1750 	  else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1751 	    ix = tree_to_uhwi (t);
1752 	}
1753       else
1754 	{
1755 	  /* Cache -1..N */
1756 	  limit = param_integer_share_limit + 1;
1757 
1758 	  if (integer_minus_onep (t))
1759 	    ix = 0;
1760 	  else if (!wi::neg_p (wi::to_wide (t)))
1761 	    {
1762 	      if (prec < HOST_BITS_PER_WIDE_INT)
1763 		{
1764 		  if (tree_to_shwi (t) < param_integer_share_limit)
1765 		    ix = tree_to_shwi (t) + 1;
1766 		}
1767 	      else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1768 		ix = tree_to_shwi (t) + 1;
1769 	    }
1770 	}
1771       break;
1772 
1773     case ENUMERAL_TYPE:
1774       break;
1775 
1776     default:
1777       gcc_unreachable ();
1778     }
1779 
1780   if (ix >= 0)
1781     {
1782       /* Look for it in the type's vector of small shared ints.  */
1783       if (!TYPE_CACHED_VALUES_P (type))
1784 	{
1785 	  TYPE_CACHED_VALUES_P (type) = 1;
1786 	  TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1787 	}
1788 
1789       gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1790       TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1791     }
1792   else
1793     {
1794       /* Use the cache of larger shared ints.  */
1795       tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1796       /* If there is already an entry for the number verify it's the
1797          same.  */
1798       if (*slot)
1799 	gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1800       else
1801 	/* Otherwise insert this one into the hash table.  */
1802 	*slot = t;
1803     }
1804 }
1805 
1806 
1807 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1808    and the rest are zeros.  */
1809 
1810 tree
build_low_bits_mask(tree type,unsigned bits)1811 build_low_bits_mask (tree type, unsigned bits)
1812 {
1813   gcc_assert (bits <= TYPE_PRECISION (type));
1814 
1815   return wide_int_to_tree (type, wi::mask (bits, false,
1816 					   TYPE_PRECISION (type)));
1817 }
1818 
1819 /* Checks that X is integer constant that can be expressed in (unsigned)
1820    HOST_WIDE_INT without loss of precision.  */
1821 
1822 bool
cst_and_fits_in_hwi(const_tree x)1823 cst_and_fits_in_hwi (const_tree x)
1824 {
1825   return (TREE_CODE (x) == INTEGER_CST
1826 	  && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1827 }
1828 
1829 /* Build a newly constructed VECTOR_CST with the given values of
1830    (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN.  */
1831 
1832 tree
make_vector(unsigned log2_npatterns,unsigned int nelts_per_pattern MEM_STAT_DECL)1833 make_vector (unsigned log2_npatterns,
1834 	     unsigned int nelts_per_pattern MEM_STAT_DECL)
1835 {
1836   gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1837   tree t;
1838   unsigned npatterns = 1 << log2_npatterns;
1839   unsigned encoded_nelts = npatterns * nelts_per_pattern;
1840   unsigned length = (sizeof (struct tree_vector)
1841 		     + (encoded_nelts - 1) * sizeof (tree));
1842 
1843   record_node_allocation_statistics (VECTOR_CST, length);
1844 
1845   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1846 
1847   TREE_SET_CODE (t, VECTOR_CST);
1848   TREE_CONSTANT (t) = 1;
1849   VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1850   VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1851 
1852   return t;
1853 }
1854 
1855 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1856    are extracted from V, a vector of CONSTRUCTOR_ELT.  */
1857 
1858 tree
build_vector_from_ctor(tree type,vec<constructor_elt,va_gc> * v)1859 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1860 {
1861   if (vec_safe_length (v) == 0)
1862     return build_zero_cst (type);
1863 
1864   unsigned HOST_WIDE_INT idx, nelts;
1865   tree value;
1866 
1867   /* We can't construct a VECTOR_CST for a variable number of elements.  */
1868   nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1869   tree_vector_builder vec (type, nelts, 1);
1870   FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1871     {
1872       if (TREE_CODE (value) == VECTOR_CST)
1873 	{
1874 	  /* If NELTS is constant then this must be too.  */
1875 	  unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1876 	  for (unsigned i = 0; i < sub_nelts; ++i)
1877 	    vec.quick_push (VECTOR_CST_ELT (value, i));
1878 	}
1879       else
1880 	vec.quick_push (value);
1881     }
1882   while (vec.length () < nelts)
1883     vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1884 
1885   return vec.build ();
1886 }
1887 
1888 /* Build a vector of type VECTYPE where all the elements are SCs.  */
1889 tree
build_vector_from_val(tree vectype,tree sc)1890 build_vector_from_val (tree vectype, tree sc)
1891 {
1892   unsigned HOST_WIDE_INT i, nunits;
1893 
1894   if (sc == error_mark_node)
1895     return sc;
1896 
1897   /* Verify that the vector type is suitable for SC.  Note that there
1898      is some inconsistency in the type-system with respect to restrict
1899      qualifications of pointers.  Vector types always have a main-variant
1900      element type and the qualification is applied to the vector-type.
1901      So TREE_TYPE (vector-type) does not return a properly qualified
1902      vector element-type.  */
1903   gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1904 					   TREE_TYPE (vectype)));
1905 
1906   if (CONSTANT_CLASS_P (sc))
1907     {
1908       tree_vector_builder v (vectype, 1, 1);
1909       v.quick_push (sc);
1910       return v.build ();
1911     }
1912   else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1913     return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1914   else
1915     {
1916       vec<constructor_elt, va_gc> *v;
1917       vec_alloc (v, nunits);
1918       for (i = 0; i < nunits; ++i)
1919 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1920       return build_constructor (vectype, v);
1921     }
1922 }
1923 
1924 /* If TYPE is not a vector type, just return SC, otherwise return
1925    build_vector_from_val (TYPE, SC).  */
1926 
1927 tree
build_uniform_cst(tree type,tree sc)1928 build_uniform_cst (tree type, tree sc)
1929 {
1930   if (!VECTOR_TYPE_P (type))
1931     return sc;
1932 
1933   return build_vector_from_val (type, sc);
1934 }
1935 
1936 /* Build a vector series of type TYPE in which element I has the value
1937    BASE + I * STEP.  The result is a constant if BASE and STEP are constant
1938    and a VEC_SERIES_EXPR otherwise.  */
1939 
1940 tree
build_vec_series(tree type,tree base,tree step)1941 build_vec_series (tree type, tree base, tree step)
1942 {
1943   if (integer_zerop (step))
1944     return build_vector_from_val (type, base);
1945   if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1946     {
1947       tree_vector_builder builder (type, 1, 3);
1948       tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1949 				    wi::to_wide (base) + wi::to_wide (step));
1950       tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1951 				    wi::to_wide (elt1) + wi::to_wide (step));
1952       builder.quick_push (base);
1953       builder.quick_push (elt1);
1954       builder.quick_push (elt2);
1955       return builder.build ();
1956     }
1957   return build2 (VEC_SERIES_EXPR, type, base, step);
1958 }
1959 
1960 /* Return a vector with the same number of units and number of bits
1961    as VEC_TYPE, but in which the elements are a linear series of unsigned
1962    integers { BASE, BASE + STEP, BASE + STEP * 2, ... }.  */
1963 
1964 tree
build_index_vector(tree vec_type,poly_uint64 base,poly_uint64 step)1965 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1966 {
1967   tree index_vec_type = vec_type;
1968   tree index_elt_type = TREE_TYPE (vec_type);
1969   poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1970   if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1971     {
1972       index_elt_type = build_nonstandard_integer_type
1973 	(GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1974       index_vec_type = build_vector_type (index_elt_type, nunits);
1975     }
1976 
1977   tree_vector_builder v (index_vec_type, 1, 3);
1978   for (unsigned int i = 0; i < 3; ++i)
1979     v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1980   return v.build ();
1981 }
1982 
1983 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
1984    elements are A and the rest are B.  */
1985 
1986 tree
build_vector_a_then_b(tree vec_type,unsigned int num_a,tree a,tree b)1987 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
1988 {
1989   gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
1990   unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
1991   /* Optimize the constant case.  */
1992   if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
1993     count /= 2;
1994   tree_vector_builder builder (vec_type, count, 2);
1995   for (unsigned int i = 0; i < count * 2; ++i)
1996     builder.quick_push (i < num_a ? a : b);
1997   return builder.build ();
1998 }
1999 
2000 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2001    calculate TREE_CONSTANT and TREE_SIDE_EFFECTS.  */
2002 
2003 void
recompute_constructor_flags(tree c)2004 recompute_constructor_flags (tree c)
2005 {
2006   unsigned int i;
2007   tree val;
2008   bool constant_p = true;
2009   bool side_effects_p = false;
2010   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2011 
2012   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2013     {
2014       /* Mostly ctors will have elts that don't have side-effects, so
2015 	 the usual case is to scan all the elements.  Hence a single
2016 	 loop for both const and side effects, rather than one loop
2017 	 each (with early outs).  */
2018       if (!TREE_CONSTANT (val))
2019 	constant_p = false;
2020       if (TREE_SIDE_EFFECTS (val))
2021 	side_effects_p = true;
2022     }
2023 
2024   TREE_SIDE_EFFECTS (c) = side_effects_p;
2025   TREE_CONSTANT (c) = constant_p;
2026 }
2027 
2028 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2029    CONSTRUCTOR C.  */
2030 
2031 void
verify_constructor_flags(tree c)2032 verify_constructor_flags (tree c)
2033 {
2034   unsigned int i;
2035   tree val;
2036   bool constant_p = TREE_CONSTANT (c);
2037   bool side_effects_p = TREE_SIDE_EFFECTS (c);
2038   vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2039 
2040   FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2041     {
2042       if (constant_p && !TREE_CONSTANT (val))
2043 	internal_error ("non-constant element in constant CONSTRUCTOR");
2044       if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2045 	internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2046     }
2047 }
2048 
2049 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2050    are in the vec pointed to by VALS.  */
2051 tree
build_constructor(tree type,vec<constructor_elt,va_gc> * vals MEM_STAT_DECL)2052 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2053 {
2054   tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2055 
2056   TREE_TYPE (c) = type;
2057   CONSTRUCTOR_ELTS (c) = vals;
2058 
2059   recompute_constructor_flags (c);
2060 
2061   return c;
2062 }
2063 
2064 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2065    INDEX and VALUE.  */
2066 tree
build_constructor_single(tree type,tree index,tree value)2067 build_constructor_single (tree type, tree index, tree value)
2068 {
2069   vec<constructor_elt, va_gc> *v;
2070   constructor_elt elt = {index, value};
2071 
2072   vec_alloc (v, 1);
2073   v->quick_push (elt);
2074 
2075   return build_constructor (type, v);
2076 }
2077 
2078 
2079 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2080    are in a list pointed to by VALS.  */
2081 tree
build_constructor_from_list(tree type,tree vals)2082 build_constructor_from_list (tree type, tree vals)
2083 {
2084   tree t;
2085   vec<constructor_elt, va_gc> *v = NULL;
2086 
2087   if (vals)
2088     {
2089       vec_alloc (v, list_length (vals));
2090       for (t = vals; t; t = TREE_CHAIN (t))
2091 	CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2092     }
2093 
2094   return build_constructor (type, v);
2095 }
2096 
2097 /* Return a new CONSTRUCTOR node whose type is TYPE.  NELTS is the number
2098    of elements, provided as index/value pairs.  */
2099 
2100 tree
build_constructor_va(tree type,int nelts,...)2101 build_constructor_va (tree type, int nelts, ...)
2102 {
2103   vec<constructor_elt, va_gc> *v = NULL;
2104   va_list p;
2105 
2106   va_start (p, nelts);
2107   vec_alloc (v, nelts);
2108   while (nelts--)
2109     {
2110       tree index = va_arg (p, tree);
2111       tree value = va_arg (p, tree);
2112       CONSTRUCTOR_APPEND_ELT (v, index, value);
2113     }
2114   va_end (p);
2115   return build_constructor (type, v);
2116 }
2117 
2118 /* Return a node of type TYPE for which TREE_CLOBBER_P is true.  */
2119 
2120 tree
build_clobber(tree type)2121 build_clobber (tree type)
2122 {
2123   tree clobber = build_constructor (type, NULL);
2124   TREE_THIS_VOLATILE (clobber) = true;
2125   return clobber;
2126 }
2127 
2128 /* Return a new FIXED_CST node whose type is TYPE and value is F.  */
2129 
2130 tree
build_fixed(tree type,FIXED_VALUE_TYPE f)2131 build_fixed (tree type, FIXED_VALUE_TYPE f)
2132 {
2133   tree v;
2134   FIXED_VALUE_TYPE *fp;
2135 
2136   v = make_node (FIXED_CST);
2137   fp = ggc_alloc<fixed_value> ();
2138   memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2139 
2140   TREE_TYPE (v) = type;
2141   TREE_FIXED_CST_PTR (v) = fp;
2142   return v;
2143 }
2144 
2145 /* Return a new REAL_CST node whose type is TYPE and value is D.  */
2146 
2147 tree
build_real(tree type,REAL_VALUE_TYPE d)2148 build_real (tree type, REAL_VALUE_TYPE d)
2149 {
2150   tree v;
2151   REAL_VALUE_TYPE *dp;
2152   int overflow = 0;
2153 
2154   /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2155      Consider doing it via real_convert now.  */
2156 
2157   v = make_node (REAL_CST);
2158   dp = ggc_alloc<real_value> ();
2159   memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2160 
2161   TREE_TYPE (v) = type;
2162   TREE_REAL_CST_PTR (v) = dp;
2163   TREE_OVERFLOW (v) = overflow;
2164   return v;
2165 }
2166 
2167 /* Like build_real, but first truncate D to the type.  */
2168 
2169 tree
build_real_truncate(tree type,REAL_VALUE_TYPE d)2170 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2171 {
2172   return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2173 }
2174 
2175 /* Return a new REAL_CST node whose type is TYPE
2176    and whose value is the integer value of the INTEGER_CST node I.  */
2177 
2178 REAL_VALUE_TYPE
real_value_from_int_cst(const_tree type,const_tree i)2179 real_value_from_int_cst (const_tree type, const_tree i)
2180 {
2181   REAL_VALUE_TYPE d;
2182 
2183   /* Clear all bits of the real value type so that we can later do
2184      bitwise comparisons to see if two values are the same.  */
2185   memset (&d, 0, sizeof d);
2186 
2187   real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2188 		     TYPE_SIGN (TREE_TYPE (i)));
2189   return d;
2190 }
2191 
2192 /* Given a tree representing an integer constant I, return a tree
2193    representing the same value as a floating-point constant of type TYPE.  */
2194 
2195 tree
build_real_from_int_cst(tree type,const_tree i)2196 build_real_from_int_cst (tree type, const_tree i)
2197 {
2198   tree v;
2199   int overflow = TREE_OVERFLOW (i);
2200 
2201   v = build_real (type, real_value_from_int_cst (type, i));
2202 
2203   TREE_OVERFLOW (v) |= overflow;
2204   return v;
2205 }
2206 
2207 /* Return a newly constructed STRING_CST node whose value is the LEN
2208    characters at STR when STR is nonnull, or all zeros otherwise.
2209    Note that for a C string literal, LEN should include the trailing NUL.
2210    The TREE_TYPE is not initialized.  */
2211 
2212 tree
build_string(unsigned len,const char * str)2213 build_string (unsigned len, const char *str /*= NULL */)
2214 {
2215   /* Do not waste bytes provided by padding of struct tree_string.  */
2216   unsigned size = len + offsetof (struct tree_string, str) + 1;
2217 
2218   record_node_allocation_statistics (STRING_CST, size);
2219 
2220   tree s = (tree) ggc_internal_alloc (size);
2221 
2222   memset (s, 0, sizeof (struct tree_typed));
2223   TREE_SET_CODE (s, STRING_CST);
2224   TREE_CONSTANT (s) = 1;
2225   TREE_STRING_LENGTH (s) = len;
2226   if (str)
2227     memcpy (s->string.str, str, len);
2228   else
2229     memset (s->string.str, 0, len);
2230   s->string.str[len] = '\0';
2231 
2232   return s;
2233 }
2234 
2235 /* Return a newly constructed COMPLEX_CST node whose value is
2236    specified by the real and imaginary parts REAL and IMAG.
2237    Both REAL and IMAG should be constant nodes.  TYPE, if specified,
2238    will be the type of the COMPLEX_CST; otherwise a new type will be made.  */
2239 
2240 tree
build_complex(tree type,tree real,tree imag)2241 build_complex (tree type, tree real, tree imag)
2242 {
2243   gcc_assert (CONSTANT_CLASS_P (real));
2244   gcc_assert (CONSTANT_CLASS_P (imag));
2245 
2246   tree t = make_node (COMPLEX_CST);
2247 
2248   TREE_REALPART (t) = real;
2249   TREE_IMAGPART (t) = imag;
2250   TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2251   TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2252   return t;
2253 }
2254 
2255 /* Build a complex (inf +- 0i), such as for the result of cproj.
2256    TYPE is the complex tree type of the result.  If NEG is true, the
2257    imaginary zero is negative.  */
2258 
2259 tree
build_complex_inf(tree type,bool neg)2260 build_complex_inf (tree type, bool neg)
2261 {
2262   REAL_VALUE_TYPE rinf, rzero = dconst0;
2263 
2264   real_inf (&rinf);
2265   rzero.sign = neg;
2266   return build_complex (type, build_real (TREE_TYPE (type), rinf),
2267 			build_real (TREE_TYPE (type), rzero));
2268 }
2269 
2270 /* Return the constant 1 in type TYPE.  If TYPE has several elements, each
2271    element is set to 1.  In particular, this is 1 + i for complex types.  */
2272 
2273 tree
build_each_one_cst(tree type)2274 build_each_one_cst (tree type)
2275 {
2276   if (TREE_CODE (type) == COMPLEX_TYPE)
2277     {
2278       tree scalar = build_one_cst (TREE_TYPE (type));
2279       return build_complex (type, scalar, scalar);
2280     }
2281   else
2282     return build_one_cst (type);
2283 }
2284 
2285 /* Return a constant of arithmetic type TYPE which is the
2286    multiplicative identity of the set TYPE.  */
2287 
2288 tree
build_one_cst(tree type)2289 build_one_cst (tree type)
2290 {
2291   switch (TREE_CODE (type))
2292     {
2293     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2294     case POINTER_TYPE: case REFERENCE_TYPE:
2295     case OFFSET_TYPE:
2296       return build_int_cst (type, 1);
2297 
2298     case REAL_TYPE:
2299       return build_real (type, dconst1);
2300 
2301     case FIXED_POINT_TYPE:
2302       /* We can only generate 1 for accum types.  */
2303       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2304       return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2305 
2306     case VECTOR_TYPE:
2307       {
2308 	tree scalar = build_one_cst (TREE_TYPE (type));
2309 
2310 	return build_vector_from_val (type, scalar);
2311       }
2312 
2313     case COMPLEX_TYPE:
2314       return build_complex (type,
2315 			    build_one_cst (TREE_TYPE (type)),
2316 			    build_zero_cst (TREE_TYPE (type)));
2317 
2318     default:
2319       gcc_unreachable ();
2320     }
2321 }
2322 
2323 /* Return an integer of type TYPE containing all 1's in as much precision as
2324    it contains, or a complex or vector whose subparts are such integers.  */
2325 
2326 tree
build_all_ones_cst(tree type)2327 build_all_ones_cst (tree type)
2328 {
2329   if (TREE_CODE (type) == COMPLEX_TYPE)
2330     {
2331       tree scalar = build_all_ones_cst (TREE_TYPE (type));
2332       return build_complex (type, scalar, scalar);
2333     }
2334   else
2335     return build_minus_one_cst (type);
2336 }
2337 
2338 /* Return a constant of arithmetic type TYPE which is the
2339    opposite of the multiplicative identity of the set TYPE.  */
2340 
2341 tree
build_minus_one_cst(tree type)2342 build_minus_one_cst (tree type)
2343 {
2344   switch (TREE_CODE (type))
2345     {
2346     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2347     case POINTER_TYPE: case REFERENCE_TYPE:
2348     case OFFSET_TYPE:
2349       return build_int_cst (type, -1);
2350 
2351     case REAL_TYPE:
2352       return build_real (type, dconstm1);
2353 
2354     case FIXED_POINT_TYPE:
2355       /* We can only generate 1 for accum types.  */
2356       gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2357       return build_fixed (type,
2358 			  fixed_from_double_int (double_int_minus_one,
2359 						 SCALAR_TYPE_MODE (type)));
2360 
2361     case VECTOR_TYPE:
2362       {
2363 	tree scalar = build_minus_one_cst (TREE_TYPE (type));
2364 
2365 	return build_vector_from_val (type, scalar);
2366       }
2367 
2368     case COMPLEX_TYPE:
2369       return build_complex (type,
2370 			    build_minus_one_cst (TREE_TYPE (type)),
2371 			    build_zero_cst (TREE_TYPE (type)));
2372 
2373     default:
2374       gcc_unreachable ();
2375     }
2376 }
2377 
2378 /* Build 0 constant of type TYPE.  This is used by constructor folding
2379    and thus the constant should be represented in memory by
2380    zero(es).  */
2381 
2382 tree
build_zero_cst(tree type)2383 build_zero_cst (tree type)
2384 {
2385   switch (TREE_CODE (type))
2386     {
2387     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2388     case POINTER_TYPE: case REFERENCE_TYPE:
2389     case OFFSET_TYPE: case NULLPTR_TYPE:
2390       return build_int_cst (type, 0);
2391 
2392     case REAL_TYPE:
2393       return build_real (type, dconst0);
2394 
2395     case FIXED_POINT_TYPE:
2396       return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2397 
2398     case VECTOR_TYPE:
2399       {
2400 	tree scalar = build_zero_cst (TREE_TYPE (type));
2401 
2402 	return build_vector_from_val (type, scalar);
2403       }
2404 
2405     case COMPLEX_TYPE:
2406       {
2407 	tree zero = build_zero_cst (TREE_TYPE (type));
2408 
2409 	return build_complex (type, zero, zero);
2410       }
2411 
2412     default:
2413       if (!AGGREGATE_TYPE_P (type))
2414 	return fold_convert (type, integer_zero_node);
2415       return build_constructor (type, NULL);
2416     }
2417 }
2418 
2419 
2420 /* Build a BINFO with LEN language slots.  */
2421 
2422 tree
make_tree_binfo(unsigned base_binfos MEM_STAT_DECL)2423 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2424 {
2425   tree t;
2426   size_t length = (offsetof (struct tree_binfo, base_binfos)
2427 		   + vec<tree, va_gc>::embedded_size (base_binfos));
2428 
2429   record_node_allocation_statistics (TREE_BINFO, length);
2430 
2431   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2432 
2433   memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2434 
2435   TREE_SET_CODE (t, TREE_BINFO);
2436 
2437   BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2438 
2439   return t;
2440 }
2441 
2442 /* Create a CASE_LABEL_EXPR tree node and return it.  */
2443 
2444 tree
build_case_label(tree low_value,tree high_value,tree label_decl)2445 build_case_label (tree low_value, tree high_value, tree label_decl)
2446 {
2447   tree t = make_node (CASE_LABEL_EXPR);
2448 
2449   TREE_TYPE (t) = void_type_node;
2450   SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2451 
2452   CASE_LOW (t) = low_value;
2453   CASE_HIGH (t) = high_value;
2454   CASE_LABEL (t) = label_decl;
2455   CASE_CHAIN (t) = NULL_TREE;
2456 
2457   return t;
2458 }
2459 
2460 /* Build a newly constructed INTEGER_CST node.  LEN and EXT_LEN are the
2461    values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2462    The latter determines the length of the HOST_WIDE_INT vector.  */
2463 
2464 tree
make_int_cst(int len,int ext_len MEM_STAT_DECL)2465 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2466 {
2467   tree t;
2468   int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2469 		+ sizeof (struct tree_int_cst));
2470 
2471   gcc_assert (len);
2472   record_node_allocation_statistics (INTEGER_CST, length);
2473 
2474   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2475 
2476   TREE_SET_CODE (t, INTEGER_CST);
2477   TREE_INT_CST_NUNITS (t) = len;
2478   TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2479   /* to_offset can only be applied to trees that are offset_int-sized
2480      or smaller.  EXT_LEN is correct if it fits, otherwise the constant
2481      must be exactly the precision of offset_int and so LEN is correct.  */
2482   if (ext_len <= OFFSET_INT_ELTS)
2483     TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2484   else
2485     TREE_INT_CST_OFFSET_NUNITS (t) = len;
2486 
2487   TREE_CONSTANT (t) = 1;
2488 
2489   return t;
2490 }
2491 
2492 /* Build a newly constructed TREE_VEC node of length LEN.  */
2493 
2494 tree
make_tree_vec(int len MEM_STAT_DECL)2495 make_tree_vec (int len MEM_STAT_DECL)
2496 {
2497   tree t;
2498   size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2499 
2500   record_node_allocation_statistics (TREE_VEC, length);
2501 
2502   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2503 
2504   TREE_SET_CODE (t, TREE_VEC);
2505   TREE_VEC_LENGTH (t) = len;
2506 
2507   return t;
2508 }
2509 
2510 /* Grow a TREE_VEC node to new length LEN.  */
2511 
2512 tree
grow_tree_vec(tree v,int len MEM_STAT_DECL)2513 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2514 {
2515   gcc_assert (TREE_CODE (v) == TREE_VEC);
2516 
2517   int oldlen = TREE_VEC_LENGTH (v);
2518   gcc_assert (len > oldlen);
2519 
2520   size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2521   size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2522 
2523   record_node_allocation_statistics (TREE_VEC, length - oldlength);
2524 
2525   v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2526 
2527   TREE_VEC_LENGTH (v) = len;
2528 
2529   return v;
2530 }
2531 
2532 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2533    fixed, and scalar, complex or vector.  */
2534 
2535 bool
zerop(const_tree expr)2536 zerop (const_tree expr)
2537 {
2538   return (integer_zerop (expr)
2539 	  || real_zerop (expr)
2540 	  || fixed_zerop (expr));
2541 }
2542 
2543 /* Return 1 if EXPR is the integer constant zero or a complex constant
2544    of zero, or a location wrapper for such a constant.  */
2545 
2546 bool
integer_zerop(const_tree expr)2547 integer_zerop (const_tree expr)
2548 {
2549   STRIP_ANY_LOCATION_WRAPPER (expr);
2550 
2551   switch (TREE_CODE (expr))
2552     {
2553     case INTEGER_CST:
2554       return wi::to_wide (expr) == 0;
2555     case COMPLEX_CST:
2556       return (integer_zerop (TREE_REALPART (expr))
2557 	      && integer_zerop (TREE_IMAGPART (expr)));
2558     case VECTOR_CST:
2559       return (VECTOR_CST_NPATTERNS (expr) == 1
2560 	      && VECTOR_CST_DUPLICATE_P (expr)
2561 	      && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2562     default:
2563       return false;
2564     }
2565 }
2566 
2567 /* Return 1 if EXPR is the integer constant one or the corresponding
2568    complex constant, or a location wrapper for such a constant.  */
2569 
2570 bool
integer_onep(const_tree expr)2571 integer_onep (const_tree expr)
2572 {
2573   STRIP_ANY_LOCATION_WRAPPER (expr);
2574 
2575   switch (TREE_CODE (expr))
2576     {
2577     case INTEGER_CST:
2578       return wi::eq_p (wi::to_widest (expr), 1);
2579     case COMPLEX_CST:
2580       return (integer_onep (TREE_REALPART (expr))
2581 	      && integer_zerop (TREE_IMAGPART (expr)));
2582     case VECTOR_CST:
2583       return (VECTOR_CST_NPATTERNS (expr) == 1
2584 	      && VECTOR_CST_DUPLICATE_P (expr)
2585 	      && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2586     default:
2587       return false;
2588     }
2589 }
2590 
2591 /* Return 1 if EXPR is the integer constant one.  For complex and vector,
2592    return 1 if every piece is the integer constant one.
2593    Also return 1 for location wrappers for such a constant.  */
2594 
2595 bool
integer_each_onep(const_tree expr)2596 integer_each_onep (const_tree expr)
2597 {
2598   STRIP_ANY_LOCATION_WRAPPER (expr);
2599 
2600   if (TREE_CODE (expr) == COMPLEX_CST)
2601     return (integer_onep (TREE_REALPART (expr))
2602 	    && integer_onep (TREE_IMAGPART (expr)));
2603   else
2604     return integer_onep (expr);
2605 }
2606 
2607 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2608    it contains, or a complex or vector whose subparts are such integers,
2609    or a location wrapper for such a constant.  */
2610 
2611 bool
integer_all_onesp(const_tree expr)2612 integer_all_onesp (const_tree expr)
2613 {
2614   STRIP_ANY_LOCATION_WRAPPER (expr);
2615 
2616   if (TREE_CODE (expr) == COMPLEX_CST
2617       && integer_all_onesp (TREE_REALPART (expr))
2618       && integer_all_onesp (TREE_IMAGPART (expr)))
2619     return true;
2620 
2621   else if (TREE_CODE (expr) == VECTOR_CST)
2622     return (VECTOR_CST_NPATTERNS (expr) == 1
2623 	    && VECTOR_CST_DUPLICATE_P (expr)
2624 	    && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2625 
2626   else if (TREE_CODE (expr) != INTEGER_CST)
2627     return false;
2628 
2629   return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2630 	  == wi::to_wide (expr));
2631 }
2632 
2633 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2634    for such a constant.  */
2635 
2636 bool
integer_minus_onep(const_tree expr)2637 integer_minus_onep (const_tree expr)
2638 {
2639   STRIP_ANY_LOCATION_WRAPPER (expr);
2640 
2641   if (TREE_CODE (expr) == COMPLEX_CST)
2642     return (integer_all_onesp (TREE_REALPART (expr))
2643 	    && integer_zerop (TREE_IMAGPART (expr)));
2644   else
2645     return integer_all_onesp (expr);
2646 }
2647 
2648 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2649    one bit on), or a location wrapper for such a constant.  */
2650 
2651 bool
integer_pow2p(const_tree expr)2652 integer_pow2p (const_tree expr)
2653 {
2654   STRIP_ANY_LOCATION_WRAPPER (expr);
2655 
2656   if (TREE_CODE (expr) == COMPLEX_CST
2657       && integer_pow2p (TREE_REALPART (expr))
2658       && integer_zerop (TREE_IMAGPART (expr)))
2659     return true;
2660 
2661   if (TREE_CODE (expr) != INTEGER_CST)
2662     return false;
2663 
2664   return wi::popcount (wi::to_wide (expr)) == 1;
2665 }
2666 
2667 /* Return 1 if EXPR is an integer constant other than zero or a
2668    complex constant other than zero, or a location wrapper for such a
2669    constant.  */
2670 
2671 bool
integer_nonzerop(const_tree expr)2672 integer_nonzerop (const_tree expr)
2673 {
2674   STRIP_ANY_LOCATION_WRAPPER (expr);
2675 
2676   return ((TREE_CODE (expr) == INTEGER_CST
2677 	   && wi::to_wide (expr) != 0)
2678 	  || (TREE_CODE (expr) == COMPLEX_CST
2679 	      && (integer_nonzerop (TREE_REALPART (expr))
2680 		  || integer_nonzerop (TREE_IMAGPART (expr)))));
2681 }
2682 
2683 /* Return 1 if EXPR is the integer constant one.  For vector,
2684    return 1 if every piece is the integer constant minus one
2685    (representing the value TRUE).
2686    Also return 1 for location wrappers for such a constant.  */
2687 
2688 bool
integer_truep(const_tree expr)2689 integer_truep (const_tree expr)
2690 {
2691   STRIP_ANY_LOCATION_WRAPPER (expr);
2692 
2693   if (TREE_CODE (expr) == VECTOR_CST)
2694     return integer_all_onesp (expr);
2695   return integer_onep (expr);
2696 }
2697 
2698 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2699    for such a constant.  */
2700 
2701 bool
fixed_zerop(const_tree expr)2702 fixed_zerop (const_tree expr)
2703 {
2704   STRIP_ANY_LOCATION_WRAPPER (expr);
2705 
2706   return (TREE_CODE (expr) == FIXED_CST
2707 	  && TREE_FIXED_CST (expr).data.is_zero ());
2708 }
2709 
2710 /* Return the power of two represented by a tree node known to be a
2711    power of two.  */
2712 
2713 int
tree_log2(const_tree expr)2714 tree_log2 (const_tree expr)
2715 {
2716   if (TREE_CODE (expr) == COMPLEX_CST)
2717     return tree_log2 (TREE_REALPART (expr));
2718 
2719   return wi::exact_log2 (wi::to_wide (expr));
2720 }
2721 
2722 /* Similar, but return the largest integer Y such that 2 ** Y is less
2723    than or equal to EXPR.  */
2724 
2725 int
tree_floor_log2(const_tree expr)2726 tree_floor_log2 (const_tree expr)
2727 {
2728   if (TREE_CODE (expr) == COMPLEX_CST)
2729     return tree_log2 (TREE_REALPART (expr));
2730 
2731   return wi::floor_log2 (wi::to_wide (expr));
2732 }
2733 
2734 /* Return number of known trailing zero bits in EXPR, or, if the value of
2735    EXPR is known to be zero, the precision of it's type.  */
2736 
2737 unsigned int
tree_ctz(const_tree expr)2738 tree_ctz (const_tree expr)
2739 {
2740   if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2741       && !POINTER_TYPE_P (TREE_TYPE (expr)))
2742     return 0;
2743 
2744   unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2745   switch (TREE_CODE (expr))
2746     {
2747     case INTEGER_CST:
2748       ret1 = wi::ctz (wi::to_wide (expr));
2749       return MIN (ret1, prec);
2750     case SSA_NAME:
2751       ret1 = wi::ctz (get_nonzero_bits (expr));
2752       return MIN (ret1, prec);
2753     case PLUS_EXPR:
2754     case MINUS_EXPR:
2755     case BIT_IOR_EXPR:
2756     case BIT_XOR_EXPR:
2757     case MIN_EXPR:
2758     case MAX_EXPR:
2759       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2760       if (ret1 == 0)
2761 	return ret1;
2762       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2763       return MIN (ret1, ret2);
2764     case POINTER_PLUS_EXPR:
2765       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2766       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2767       /* Second operand is sizetype, which could be in theory
2768 	 wider than pointer's precision.  Make sure we never
2769 	 return more than prec.  */
2770       ret2 = MIN (ret2, prec);
2771       return MIN (ret1, ret2);
2772     case BIT_AND_EXPR:
2773       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2774       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2775       return MAX (ret1, ret2);
2776     case MULT_EXPR:
2777       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2778       ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2779       return MIN (ret1 + ret2, prec);
2780     case LSHIFT_EXPR:
2781       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2782       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2783 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2784 	{
2785 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2786 	  return MIN (ret1 + ret2, prec);
2787 	}
2788       return ret1;
2789     case RSHIFT_EXPR:
2790       if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2791 	  && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2792 	{
2793 	  ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2794 	  ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2795 	  if (ret1 > ret2)
2796 	    return ret1 - ret2;
2797 	}
2798       return 0;
2799     case TRUNC_DIV_EXPR:
2800     case CEIL_DIV_EXPR:
2801     case FLOOR_DIV_EXPR:
2802     case ROUND_DIV_EXPR:
2803     case EXACT_DIV_EXPR:
2804       if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2805 	  && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2806 	{
2807 	  int l = tree_log2 (TREE_OPERAND (expr, 1));
2808 	  if (l >= 0)
2809 	    {
2810 	      ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2811 	      ret2 = l;
2812 	      if (ret1 > ret2)
2813 		return ret1 - ret2;
2814 	    }
2815 	}
2816       return 0;
2817     CASE_CONVERT:
2818       ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2819       if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2820 	ret1 = prec;
2821       return MIN (ret1, prec);
2822     case SAVE_EXPR:
2823       return tree_ctz (TREE_OPERAND (expr, 0));
2824     case COND_EXPR:
2825       ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2826       if (ret1 == 0)
2827 	return 0;
2828       ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2829       return MIN (ret1, ret2);
2830     case COMPOUND_EXPR:
2831       return tree_ctz (TREE_OPERAND (expr, 1));
2832     case ADDR_EXPR:
2833       ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2834       if (ret1 > BITS_PER_UNIT)
2835 	{
2836 	  ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2837 	  return MIN (ret1, prec);
2838 	}
2839       return 0;
2840     default:
2841       return 0;
2842     }
2843 }
2844 
2845 /* Return 1 if EXPR is the real constant zero.  Trailing zeroes matter for
2846    decimal float constants, so don't return 1 for them.
2847    Also return 1 for location wrappers around such a constant.  */
2848 
2849 bool
real_zerop(const_tree expr)2850 real_zerop (const_tree expr)
2851 {
2852   STRIP_ANY_LOCATION_WRAPPER (expr);
2853 
2854   switch (TREE_CODE (expr))
2855     {
2856     case REAL_CST:
2857       return real_equal (&TREE_REAL_CST (expr), &dconst0)
2858 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2859     case COMPLEX_CST:
2860       return real_zerop (TREE_REALPART (expr))
2861 	     && real_zerop (TREE_IMAGPART (expr));
2862     case VECTOR_CST:
2863       {
2864 	/* Don't simply check for a duplicate because the predicate
2865 	   accepts both +0.0 and -0.0.  */
2866 	unsigned count = vector_cst_encoded_nelts (expr);
2867 	for (unsigned int i = 0; i < count; ++i)
2868 	  if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2869 	    return false;
2870 	return true;
2871       }
2872     default:
2873       return false;
2874     }
2875 }
2876 
2877 /* Return 1 if EXPR is the real constant one in real or complex form.
2878    Trailing zeroes matter for decimal float constants, so don't return
2879    1 for them.
2880    Also return 1 for location wrappers around such a constant.  */
2881 
2882 bool
real_onep(const_tree expr)2883 real_onep (const_tree expr)
2884 {
2885   STRIP_ANY_LOCATION_WRAPPER (expr);
2886 
2887   switch (TREE_CODE (expr))
2888     {
2889     case REAL_CST:
2890       return real_equal (&TREE_REAL_CST (expr), &dconst1)
2891 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2892     case COMPLEX_CST:
2893       return real_onep (TREE_REALPART (expr))
2894 	     && real_zerop (TREE_IMAGPART (expr));
2895     case VECTOR_CST:
2896       return (VECTOR_CST_NPATTERNS (expr) == 1
2897 	      && VECTOR_CST_DUPLICATE_P (expr)
2898 	      && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2899     default:
2900       return false;
2901     }
2902 }
2903 
2904 /* Return 1 if EXPR is the real constant minus one.  Trailing zeroes
2905    matter for decimal float constants, so don't return 1 for them.
2906    Also return 1 for location wrappers around such a constant.  */
2907 
2908 bool
real_minus_onep(const_tree expr)2909 real_minus_onep (const_tree expr)
2910 {
2911   STRIP_ANY_LOCATION_WRAPPER (expr);
2912 
2913   switch (TREE_CODE (expr))
2914     {
2915     case REAL_CST:
2916       return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2917 	     && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2918     case COMPLEX_CST:
2919       return real_minus_onep (TREE_REALPART (expr))
2920 	     && real_zerop (TREE_IMAGPART (expr));
2921     case VECTOR_CST:
2922       return (VECTOR_CST_NPATTERNS (expr) == 1
2923 	      && VECTOR_CST_DUPLICATE_P (expr)
2924 	      && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2925     default:
2926       return false;
2927     }
2928 }
2929 
2930 /* Return true if T could be a floating point zero.  */
2931 
2932 bool
real_maybe_zerop(const_tree expr)2933 real_maybe_zerop (const_tree expr)
2934 {
2935   switch (TREE_CODE (expr))
2936     {
2937     case REAL_CST:
2938       /* Can't use real_zerop here, as it always returns false for decimal
2939 	 floats.  And can't use TREE_REAL_CST (expr).cl == rvc_zero
2940 	 either, as decimal zeros are rvc_normal.  */
2941       return real_equal (&TREE_REAL_CST (expr), &dconst0);
2942     case COMPLEX_CST:
2943       return (real_maybe_zerop (TREE_REALPART (expr))
2944 	      || real_maybe_zerop (TREE_IMAGPART (expr)));
2945     case VECTOR_CST:
2946       {
2947 	unsigned count = vector_cst_encoded_nelts (expr);
2948 	for (unsigned int i = 0; i < count; ++i)
2949 	  if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2950 	    return true;
2951 	return false;
2952       }
2953     default:
2954       /* Perhaps for SSA_NAMEs we could query frange.  */
2955       return true;
2956     }
2957 }
2958 
2959 /* Nonzero if EXP is a constant or a cast of a constant.  */
2960 
2961 bool
really_constant_p(const_tree exp)2962 really_constant_p (const_tree exp)
2963 {
2964   /* This is not quite the same as STRIP_NOPS.  It does more.  */
2965   while (CONVERT_EXPR_P (exp)
2966 	 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2967     exp = TREE_OPERAND (exp, 0);
2968   return TREE_CONSTANT (exp);
2969 }
2970 
2971 /* Return true if T holds a polynomial pointer difference, storing it in
2972    *VALUE if so.  A true return means that T's precision is no greater
2973    than 64 bits, which is the largest address space we support, so *VALUE
2974    never loses precision.  However, the signedness of the result does
2975    not necessarily match the signedness of T: sometimes an unsigned type
2976    like sizetype is used to encode a value that is actually negative.  */
2977 
2978 bool
ptrdiff_tree_p(const_tree t,poly_int64_pod * value)2979 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2980 {
2981   if (!t)
2982     return false;
2983   if (TREE_CODE (t) == INTEGER_CST)
2984     {
2985       if (!cst_and_fits_in_hwi (t))
2986 	return false;
2987       *value = int_cst_value (t);
2988       return true;
2989     }
2990   if (POLY_INT_CST_P (t))
2991     {
2992       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2993 	if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2994 	  return false;
2995       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2996 	value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2997       return true;
2998     }
2999   return false;
3000 }
3001 
3002 poly_int64
tree_to_poly_int64(const_tree t)3003 tree_to_poly_int64 (const_tree t)
3004 {
3005   gcc_assert (tree_fits_poly_int64_p (t));
3006   if (POLY_INT_CST_P (t))
3007     return poly_int_cst_value (t).force_shwi ();
3008   return TREE_INT_CST_LOW (t);
3009 }
3010 
3011 poly_uint64
tree_to_poly_uint64(const_tree t)3012 tree_to_poly_uint64 (const_tree t)
3013 {
3014   gcc_assert (tree_fits_poly_uint64_p (t));
3015   if (POLY_INT_CST_P (t))
3016     return poly_int_cst_value (t).force_uhwi ();
3017   return TREE_INT_CST_LOW (t);
3018 }
3019 
3020 /* Return first list element whose TREE_VALUE is ELEM.
3021    Return 0 if ELEM is not in LIST.  */
3022 
3023 tree
value_member(tree elem,tree list)3024 value_member (tree elem, tree list)
3025 {
3026   while (list)
3027     {
3028       if (elem == TREE_VALUE (list))
3029 	return list;
3030       list = TREE_CHAIN (list);
3031     }
3032   return NULL_TREE;
3033 }
3034 
3035 /* Return first list element whose TREE_PURPOSE is ELEM.
3036    Return 0 if ELEM is not in LIST.  */
3037 
3038 tree
purpose_member(const_tree elem,tree list)3039 purpose_member (const_tree elem, tree list)
3040 {
3041   while (list)
3042     {
3043       if (elem == TREE_PURPOSE (list))
3044 	return list;
3045       list = TREE_CHAIN (list);
3046     }
3047   return NULL_TREE;
3048 }
3049 
3050 /* Return true if ELEM is in V.  */
3051 
3052 bool
vec_member(const_tree elem,vec<tree,va_gc> * v)3053 vec_member (const_tree elem, vec<tree, va_gc> *v)
3054 {
3055   unsigned ix;
3056   tree t;
3057   FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3058     if (elem == t)
3059       return true;
3060   return false;
3061 }
3062 
3063 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3064    NULL_TREE.  */
3065 
3066 tree
chain_index(int idx,tree chain)3067 chain_index (int idx, tree chain)
3068 {
3069   for (; chain && idx > 0; --idx)
3070     chain = TREE_CHAIN (chain);
3071   return chain;
3072 }
3073 
3074 /* Return nonzero if ELEM is part of the chain CHAIN.  */
3075 
3076 bool
chain_member(const_tree elem,const_tree chain)3077 chain_member (const_tree elem, const_tree chain)
3078 {
3079   while (chain)
3080     {
3081       if (elem == chain)
3082 	return true;
3083       chain = DECL_CHAIN (chain);
3084     }
3085 
3086   return false;
3087 }
3088 
3089 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3090    We expect a null pointer to mark the end of the chain.
3091    This is the Lisp primitive `length'.  */
3092 
3093 int
list_length(const_tree t)3094 list_length (const_tree t)
3095 {
3096   const_tree p = t;
3097 #ifdef ENABLE_TREE_CHECKING
3098   const_tree q = t;
3099 #endif
3100   int len = 0;
3101 
3102   while (p)
3103     {
3104       p = TREE_CHAIN (p);
3105 #ifdef ENABLE_TREE_CHECKING
3106       if (len % 2)
3107 	q = TREE_CHAIN (q);
3108       gcc_assert (p != q);
3109 #endif
3110       len++;
3111     }
3112 
3113   return len;
3114 }
3115 
3116 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3117    UNION_TYPE TYPE, or NULL_TREE if none.  */
3118 
3119 tree
first_field(const_tree type)3120 first_field (const_tree type)
3121 {
3122   tree t = TYPE_FIELDS (type);
3123   while (t && TREE_CODE (t) != FIELD_DECL)
3124     t = TREE_CHAIN (t);
3125   return t;
3126 }
3127 
3128 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3129    UNION_TYPE TYPE, or NULL_TREE if none.  */
3130 
3131 tree
last_field(const_tree type)3132 last_field (const_tree type)
3133 {
3134   tree last = NULL_TREE;
3135 
3136   for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3137     {
3138       if (TREE_CODE (fld) != FIELD_DECL)
3139 	continue;
3140 
3141       last = fld;
3142     }
3143 
3144   return last;
3145 }
3146 
3147 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3148    by modifying the last node in chain 1 to point to chain 2.
3149    This is the Lisp primitive `nconc'.  */
3150 
3151 tree
chainon(tree op1,tree op2)3152 chainon (tree op1, tree op2)
3153 {
3154   tree t1;
3155 
3156   if (!op1)
3157     return op2;
3158   if (!op2)
3159     return op1;
3160 
3161   for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3162     continue;
3163   TREE_CHAIN (t1) = op2;
3164 
3165 #ifdef ENABLE_TREE_CHECKING
3166   {
3167     tree t2;
3168     for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3169       gcc_assert (t2 != t1);
3170   }
3171 #endif
3172 
3173   return op1;
3174 }
3175 
3176 /* Return the last node in a chain of nodes (chained through TREE_CHAIN).  */
3177 
3178 tree
tree_last(tree chain)3179 tree_last (tree chain)
3180 {
3181   tree next;
3182   if (chain)
3183     while ((next = TREE_CHAIN (chain)))
3184       chain = next;
3185   return chain;
3186 }
3187 
3188 /* Reverse the order of elements in the chain T,
3189    and return the new head of the chain (old last element).  */
3190 
3191 tree
nreverse(tree t)3192 nreverse (tree t)
3193 {
3194   tree prev = 0, decl, next;
3195   for (decl = t; decl; decl = next)
3196     {
3197       /* We shouldn't be using this function to reverse BLOCK chains; we
3198 	 have blocks_nreverse for that.  */
3199       gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3200       next = TREE_CHAIN (decl);
3201       TREE_CHAIN (decl) = prev;
3202       prev = decl;
3203     }
3204   return prev;
3205 }
3206 
3207 /* Return a newly created TREE_LIST node whose
3208    purpose and value fields are PARM and VALUE.  */
3209 
3210 tree
build_tree_list(tree parm,tree value MEM_STAT_DECL)3211 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3212 {
3213   tree t = make_node (TREE_LIST PASS_MEM_STAT);
3214   TREE_PURPOSE (t) = parm;
3215   TREE_VALUE (t) = value;
3216   return t;
3217 }
3218 
3219 /* Build a chain of TREE_LIST nodes from a vector.  */
3220 
3221 tree
build_tree_list_vec(const vec<tree,va_gc> * vec MEM_STAT_DECL)3222 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3223 {
3224   tree ret = NULL_TREE;
3225   tree *pp = &ret;
3226   unsigned int i;
3227   tree t;
3228   FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3229     {
3230       *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3231       pp = &TREE_CHAIN (*pp);
3232     }
3233   return ret;
3234 }
3235 
3236 /* Return a newly created TREE_LIST node whose
3237    purpose and value fields are PURPOSE and VALUE
3238    and whose TREE_CHAIN is CHAIN.  */
3239 
3240 tree
tree_cons(tree purpose,tree value,tree chain MEM_STAT_DECL)3241 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3242 {
3243   tree node;
3244 
3245   node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3246   memset (node, 0, sizeof (struct tree_common));
3247 
3248   record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3249 
3250   TREE_SET_CODE (node, TREE_LIST);
3251   TREE_CHAIN (node) = chain;
3252   TREE_PURPOSE (node) = purpose;
3253   TREE_VALUE (node) = value;
3254   return node;
3255 }
3256 
3257 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3258    trees.  */
3259 
3260 vec<tree, va_gc> *
ctor_to_vec(tree ctor)3261 ctor_to_vec (tree ctor)
3262 {
3263   vec<tree, va_gc> *vec;
3264   vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3265   unsigned int ix;
3266   tree val;
3267 
3268   FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3269     vec->quick_push (val);
3270 
3271   return vec;
3272 }
3273 
3274 /* Return the size nominally occupied by an object of type TYPE
3275    when it resides in memory.  The value is measured in units of bytes,
3276    and its data type is that normally used for type sizes
3277    (which is the first type created by make_signed_type or
3278    make_unsigned_type).  */
3279 
3280 tree
size_in_bytes_loc(location_t loc,const_tree type)3281 size_in_bytes_loc (location_t loc, const_tree type)
3282 {
3283   tree t;
3284 
3285   if (type == error_mark_node)
3286     return integer_zero_node;
3287 
3288   type = TYPE_MAIN_VARIANT (type);
3289   t = TYPE_SIZE_UNIT (type);
3290 
3291   if (t == 0)
3292     {
3293       lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3294       return size_zero_node;
3295     }
3296 
3297   return t;
3298 }
3299 
3300 /* Return the size of TYPE (in bytes) as a wide integer
3301    or return -1 if the size can vary or is larger than an integer.  */
3302 
3303 HOST_WIDE_INT
int_size_in_bytes(const_tree type)3304 int_size_in_bytes (const_tree type)
3305 {
3306   tree t;
3307 
3308   if (type == error_mark_node)
3309     return 0;
3310 
3311   type = TYPE_MAIN_VARIANT (type);
3312   t = TYPE_SIZE_UNIT (type);
3313 
3314   if (t && tree_fits_uhwi_p (t))
3315     return TREE_INT_CST_LOW (t);
3316   else
3317     return -1;
3318 }
3319 
3320 /* Return the maximum size of TYPE (in bytes) as a wide integer
3321    or return -1 if the size can vary or is larger than an integer.  */
3322 
3323 HOST_WIDE_INT
max_int_size_in_bytes(const_tree type)3324 max_int_size_in_bytes (const_tree type)
3325 {
3326   HOST_WIDE_INT size = -1;
3327   tree size_tree;
3328 
3329   /* If this is an array type, check for a possible MAX_SIZE attached.  */
3330 
3331   if (TREE_CODE (type) == ARRAY_TYPE)
3332     {
3333       size_tree = TYPE_ARRAY_MAX_SIZE (type);
3334 
3335       if (size_tree && tree_fits_uhwi_p (size_tree))
3336 	size = tree_to_uhwi (size_tree);
3337     }
3338 
3339   /* If we still haven't been able to get a size, see if the language
3340      can compute a maximum size.  */
3341 
3342   if (size == -1)
3343     {
3344       size_tree = lang_hooks.types.max_size (type);
3345 
3346       if (size_tree && tree_fits_uhwi_p (size_tree))
3347 	size = tree_to_uhwi (size_tree);
3348     }
3349 
3350   return size;
3351 }
3352 
3353 /* Return the bit position of FIELD, in bits from the start of the record.
3354    This is a tree of type bitsizetype.  */
3355 
3356 tree
bit_position(const_tree field)3357 bit_position (const_tree field)
3358 {
3359   return bit_from_pos (DECL_FIELD_OFFSET (field),
3360 		       DECL_FIELD_BIT_OFFSET (field));
3361 }
3362 
3363 /* Return the byte position of FIELD, in bytes from the start of the record.
3364    This is a tree of type sizetype.  */
3365 
3366 tree
byte_position(const_tree field)3367 byte_position (const_tree field)
3368 {
3369   return byte_from_pos (DECL_FIELD_OFFSET (field),
3370 			DECL_FIELD_BIT_OFFSET (field));
3371 }
3372 
3373 /* Likewise, but return as an integer.  It must be representable in
3374    that way (since it could be a signed value, we don't have the
3375    option of returning -1 like int_size_in_byte can.  */
3376 
3377 HOST_WIDE_INT
int_byte_position(const_tree field)3378 int_byte_position (const_tree field)
3379 {
3380   return tree_to_shwi (byte_position (field));
3381 }
3382 
3383 /* Return the strictest alignment, in bits, that T is known to have.  */
3384 
3385 unsigned int
expr_align(const_tree t)3386 expr_align (const_tree t)
3387 {
3388   unsigned int align0, align1;
3389 
3390   switch (TREE_CODE (t))
3391     {
3392     CASE_CONVERT:  case NON_LVALUE_EXPR:
3393       /* If we have conversions, we know that the alignment of the
3394 	 object must meet each of the alignments of the types.  */
3395       align0 = expr_align (TREE_OPERAND (t, 0));
3396       align1 = TYPE_ALIGN (TREE_TYPE (t));
3397       return MAX (align0, align1);
3398 
3399     case SAVE_EXPR:         case COMPOUND_EXPR:       case MODIFY_EXPR:
3400     case INIT_EXPR:         case TARGET_EXPR:         case WITH_CLEANUP_EXPR:
3401     case CLEANUP_POINT_EXPR:
3402       /* These don't change the alignment of an object.  */
3403       return expr_align (TREE_OPERAND (t, 0));
3404 
3405     case COND_EXPR:
3406       /* The best we can do is say that the alignment is the least aligned
3407 	 of the two arms.  */
3408       align0 = expr_align (TREE_OPERAND (t, 1));
3409       align1 = expr_align (TREE_OPERAND (t, 2));
3410       return MIN (align0, align1);
3411 
3412       /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3413 	 meaningfully, it's always 1.  */
3414     case LABEL_DECL:     case CONST_DECL:
3415     case VAR_DECL:       case PARM_DECL:   case RESULT_DECL:
3416     case FUNCTION_DECL:
3417       gcc_assert (DECL_ALIGN (t) != 0);
3418       return DECL_ALIGN (t);
3419 
3420     default:
3421       break;
3422     }
3423 
3424   /* Otherwise take the alignment from that of the type.  */
3425   return TYPE_ALIGN (TREE_TYPE (t));
3426 }
3427 
3428 /* Return, as a tree node, the number of elements for TYPE (which is an
3429    ARRAY_TYPE) minus one. This counts only elements of the top array.  */
3430 
3431 tree
array_type_nelts(const_tree type)3432 array_type_nelts (const_tree type)
3433 {
3434   tree index_type, min, max;
3435 
3436   /* If they did it with unspecified bounds, then we should have already
3437      given an error about it before we got here.  */
3438   if (! TYPE_DOMAIN (type))
3439     return error_mark_node;
3440 
3441   index_type = TYPE_DOMAIN (type);
3442   min = TYPE_MIN_VALUE (index_type);
3443   max = TYPE_MAX_VALUE (index_type);
3444 
3445   /* TYPE_MAX_VALUE may not be set if the array has unknown length.  */
3446   if (!max)
3447     return error_mark_node;
3448 
3449   return (integer_zerop (min)
3450 	  ? max
3451 	  : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3452 }
3453 
3454 /* If arg is static -- a reference to an object in static storage -- then
3455    return the object.  This is not the same as the C meaning of `static'.
3456    If arg isn't static, return NULL.  */
3457 
3458 tree
staticp(tree arg)3459 staticp (tree arg)
3460 {
3461   switch (TREE_CODE (arg))
3462     {
3463     case FUNCTION_DECL:
3464       /* Nested functions are static, even though taking their address will
3465 	 involve a trampoline as we unnest the nested function and create
3466 	 the trampoline on the tree level.  */
3467       return arg;
3468 
3469     case VAR_DECL:
3470       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3471 	      && ! DECL_THREAD_LOCAL_P (arg)
3472 	      && ! DECL_DLLIMPORT_P (arg)
3473 	      ? arg : NULL);
3474 
3475     case CONST_DECL:
3476       return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3477 	      ? arg : NULL);
3478 
3479     case CONSTRUCTOR:
3480       return TREE_STATIC (arg) ? arg : NULL;
3481 
3482     case LABEL_DECL:
3483     case STRING_CST:
3484       return arg;
3485 
3486     case COMPONENT_REF:
3487       /* If the thing being referenced is not a field, then it is
3488 	 something language specific.  */
3489       gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3490 
3491       /* If we are referencing a bitfield, we can't evaluate an
3492 	 ADDR_EXPR at compile time and so it isn't a constant.  */
3493       if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3494 	return NULL;
3495 
3496       return staticp (TREE_OPERAND (arg, 0));
3497 
3498     case BIT_FIELD_REF:
3499       return NULL;
3500 
3501     case INDIRECT_REF:
3502       return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3503 
3504     case ARRAY_REF:
3505     case ARRAY_RANGE_REF:
3506       if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3507 	  && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3508 	return staticp (TREE_OPERAND (arg, 0));
3509       else
3510 	return NULL;
3511 
3512     case COMPOUND_LITERAL_EXPR:
3513       return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3514 
3515     default:
3516       return NULL;
3517     }
3518 }
3519 
3520 
3521 
3522 
3523 /* Return whether OP is a DECL whose address is function-invariant.  */
3524 
3525 bool
decl_address_invariant_p(const_tree op)3526 decl_address_invariant_p (const_tree op)
3527 {
3528   /* The conditions below are slightly less strict than the one in
3529      staticp.  */
3530 
3531   switch (TREE_CODE (op))
3532     {
3533     case PARM_DECL:
3534     case RESULT_DECL:
3535     case LABEL_DECL:
3536     case FUNCTION_DECL:
3537       return true;
3538 
3539     case VAR_DECL:
3540       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3541           || DECL_THREAD_LOCAL_P (op)
3542           || DECL_CONTEXT (op) == current_function_decl
3543           || decl_function_context (op) == current_function_decl)
3544         return true;
3545       break;
3546 
3547     case CONST_DECL:
3548       if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3549           || decl_function_context (op) == current_function_decl)
3550         return true;
3551       break;
3552 
3553     default:
3554       break;
3555     }
3556 
3557   return false;
3558 }
3559 
3560 /* Return whether OP is a DECL whose address is interprocedural-invariant.  */
3561 
3562 bool
decl_address_ip_invariant_p(const_tree op)3563 decl_address_ip_invariant_p (const_tree op)
3564 {
3565   /* The conditions below are slightly less strict than the one in
3566      staticp.  */
3567 
3568   switch (TREE_CODE (op))
3569     {
3570     case LABEL_DECL:
3571     case FUNCTION_DECL:
3572     case STRING_CST:
3573       return true;
3574 
3575     case VAR_DECL:
3576       if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3577            && !DECL_DLLIMPORT_P (op))
3578           || DECL_THREAD_LOCAL_P (op))
3579         return true;
3580       break;
3581 
3582     case CONST_DECL:
3583       if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3584         return true;
3585       break;
3586 
3587     default:
3588       break;
3589     }
3590 
3591   return false;
3592 }
3593 
3594 
3595 /* Return true if T is function-invariant (internal function, does
3596    not handle arithmetic; that's handled in skip_simple_arithmetic and
3597    tree_invariant_p).  */
3598 
3599 static bool
tree_invariant_p_1(tree t)3600 tree_invariant_p_1 (tree t)
3601 {
3602   tree op;
3603 
3604   if (TREE_CONSTANT (t)
3605       || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3606     return true;
3607 
3608   switch (TREE_CODE (t))
3609     {
3610     case SAVE_EXPR:
3611       return true;
3612 
3613     case ADDR_EXPR:
3614       op = TREE_OPERAND (t, 0);
3615       while (handled_component_p (op))
3616 	{
3617 	  switch (TREE_CODE (op))
3618 	    {
3619 	    case ARRAY_REF:
3620 	    case ARRAY_RANGE_REF:
3621 	      if (!tree_invariant_p (TREE_OPERAND (op, 1))
3622 		  || TREE_OPERAND (op, 2) != NULL_TREE
3623 		  || TREE_OPERAND (op, 3) != NULL_TREE)
3624 		return false;
3625 	      break;
3626 
3627 	    case COMPONENT_REF:
3628 	      if (TREE_OPERAND (op, 2) != NULL_TREE)
3629 		return false;
3630 	      break;
3631 
3632 	    default:;
3633 	    }
3634 	  op = TREE_OPERAND (op, 0);
3635 	}
3636 
3637       return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3638 
3639     default:
3640       break;
3641     }
3642 
3643   return false;
3644 }
3645 
3646 /* Return true if T is function-invariant.  */
3647 
3648 bool
tree_invariant_p(tree t)3649 tree_invariant_p (tree t)
3650 {
3651   tree inner = skip_simple_arithmetic (t);
3652   return tree_invariant_p_1 (inner);
3653 }
3654 
3655 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3656    Do this to any expression which may be used in more than one place,
3657    but must be evaluated only once.
3658 
3659    Normally, expand_expr would reevaluate the expression each time.
3660    Calling save_expr produces something that is evaluated and recorded
3661    the first time expand_expr is called on it.  Subsequent calls to
3662    expand_expr just reuse the recorded value.
3663 
3664    The call to expand_expr that generates code that actually computes
3665    the value is the first call *at compile time*.  Subsequent calls
3666    *at compile time* generate code to use the saved value.
3667    This produces correct result provided that *at run time* control
3668    always flows through the insns made by the first expand_expr
3669    before reaching the other places where the save_expr was evaluated.
3670    You, the caller of save_expr, must make sure this is so.
3671 
3672    Constants, and certain read-only nodes, are returned with no
3673    SAVE_EXPR because that is safe.  Expressions containing placeholders
3674    are not touched; see tree.def for an explanation of what these
3675    are used for.  */
3676 
3677 tree
save_expr(tree expr)3678 save_expr (tree expr)
3679 {
3680   tree inner;
3681 
3682   /* If the tree evaluates to a constant, then we don't want to hide that
3683      fact (i.e. this allows further folding, and direct checks for constants).
3684      However, a read-only object that has side effects cannot be bypassed.
3685      Since it is no problem to reevaluate literals, we just return the
3686      literal node.  */
3687   inner = skip_simple_arithmetic (expr);
3688   if (TREE_CODE (inner) == ERROR_MARK)
3689     return inner;
3690 
3691   if (tree_invariant_p_1 (inner))
3692     return expr;
3693 
3694   /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3695      it means that the size or offset of some field of an object depends on
3696      the value within another field.
3697 
3698      Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3699      and some variable since it would then need to be both evaluated once and
3700      evaluated more than once.  Front-ends must assure this case cannot
3701      happen by surrounding any such subexpressions in their own SAVE_EXPR
3702      and forcing evaluation at the proper time.  */
3703   if (contains_placeholder_p (inner))
3704     return expr;
3705 
3706   expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3707 
3708   /* This expression might be placed ahead of a jump to ensure that the
3709      value was computed on both sides of the jump.  So make sure it isn't
3710      eliminated as dead.  */
3711   TREE_SIDE_EFFECTS (expr) = 1;
3712   return expr;
3713 }
3714 
3715 /* Look inside EXPR into any simple arithmetic operations.  Return the
3716    outermost non-arithmetic or non-invariant node.  */
3717 
3718 tree
skip_simple_arithmetic(tree expr)3719 skip_simple_arithmetic (tree expr)
3720 {
3721   /* We don't care about whether this can be used as an lvalue in this
3722      context.  */
3723   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3724     expr = TREE_OPERAND (expr, 0);
3725 
3726   /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3727      a constant, it will be more efficient to not make another SAVE_EXPR since
3728      it will allow better simplification and GCSE will be able to merge the
3729      computations if they actually occur.  */
3730   while (true)
3731     {
3732       if (UNARY_CLASS_P (expr))
3733 	expr = TREE_OPERAND (expr, 0);
3734       else if (BINARY_CLASS_P (expr))
3735 	{
3736 	  if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3737 	    expr = TREE_OPERAND (expr, 0);
3738 	  else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3739 	    expr = TREE_OPERAND (expr, 1);
3740 	  else
3741 	    break;
3742 	}
3743       else
3744 	break;
3745     }
3746 
3747   return expr;
3748 }
3749 
3750 /* Look inside EXPR into simple arithmetic operations involving constants.
3751    Return the outermost non-arithmetic or non-constant node.  */
3752 
3753 tree
skip_simple_constant_arithmetic(tree expr)3754 skip_simple_constant_arithmetic (tree expr)
3755 {
3756   while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3757     expr = TREE_OPERAND (expr, 0);
3758 
3759   while (true)
3760     {
3761       if (UNARY_CLASS_P (expr))
3762 	expr = TREE_OPERAND (expr, 0);
3763       else if (BINARY_CLASS_P (expr))
3764 	{
3765 	  if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3766 	    expr = TREE_OPERAND (expr, 0);
3767 	  else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3768 	    expr = TREE_OPERAND (expr, 1);
3769 	  else
3770 	    break;
3771 	}
3772       else
3773 	break;
3774     }
3775 
3776   return expr;
3777 }
3778 
3779 /* Return which tree structure is used by T.  */
3780 
3781 enum tree_node_structure_enum
tree_node_structure(const_tree t)3782 tree_node_structure (const_tree t)
3783 {
3784   const enum tree_code code = TREE_CODE (t);
3785   return tree_node_structure_for_code (code);
3786 }
3787 
3788 /* Set various status flags when building a CALL_EXPR object T.  */
3789 
3790 static void
process_call_operands(tree t)3791 process_call_operands (tree t)
3792 {
3793   bool side_effects = TREE_SIDE_EFFECTS (t);
3794   bool read_only = false;
3795   int i = call_expr_flags (t);
3796 
3797   /* Calls have side-effects, except those to const or pure functions.  */
3798   if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3799     side_effects = true;
3800   /* Propagate TREE_READONLY of arguments for const functions.  */
3801   if (i & ECF_CONST)
3802     read_only = true;
3803 
3804   if (!side_effects || read_only)
3805     for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3806       {
3807 	tree op = TREE_OPERAND (t, i);
3808 	if (op && TREE_SIDE_EFFECTS (op))
3809 	  side_effects = true;
3810 	if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3811 	  read_only = false;
3812       }
3813 
3814   TREE_SIDE_EFFECTS (t) = side_effects;
3815   TREE_READONLY (t) = read_only;
3816 }
3817 
3818 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3819    size or offset that depends on a field within a record.  */
3820 
3821 bool
contains_placeholder_p(const_tree exp)3822 contains_placeholder_p (const_tree exp)
3823 {
3824   enum tree_code code;
3825 
3826   if (!exp)
3827     return 0;
3828 
3829   code = TREE_CODE (exp);
3830   if (code == PLACEHOLDER_EXPR)
3831     return 1;
3832 
3833   switch (TREE_CODE_CLASS (code))
3834     {
3835     case tcc_reference:
3836       /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3837 	 position computations since they will be converted into a
3838 	 WITH_RECORD_EXPR involving the reference, which will assume
3839 	 here will be valid.  */
3840       return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3841 
3842     case tcc_exceptional:
3843       if (code == TREE_LIST)
3844 	return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3845 		|| CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3846       break;
3847 
3848     case tcc_unary:
3849     case tcc_binary:
3850     case tcc_comparison:
3851     case tcc_expression:
3852       switch (code)
3853 	{
3854 	case COMPOUND_EXPR:
3855 	  /* Ignoring the first operand isn't quite right, but works best.  */
3856 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3857 
3858 	case COND_EXPR:
3859 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3860 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3861 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3862 
3863 	case SAVE_EXPR:
3864 	  /* The save_expr function never wraps anything containing
3865 	     a PLACEHOLDER_EXPR. */
3866 	  return 0;
3867 
3868 	default:
3869 	  break;
3870 	}
3871 
3872       switch (TREE_CODE_LENGTH (code))
3873 	{
3874 	case 1:
3875 	  return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3876 	case 2:
3877 	  return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3878 		  || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3879 	default:
3880 	  return 0;
3881 	}
3882 
3883     case tcc_vl_exp:
3884       switch (code)
3885 	{
3886 	case CALL_EXPR:
3887 	  {
3888 	    const_tree arg;
3889 	    const_call_expr_arg_iterator iter;
3890 	    FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3891 	      if (CONTAINS_PLACEHOLDER_P (arg))
3892 		return 1;
3893 	    return 0;
3894 	  }
3895 	default:
3896 	  return 0;
3897 	}
3898 
3899     default:
3900       return 0;
3901     }
3902   return 0;
3903 }
3904 
3905 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3906    directly.  This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3907    field positions.  */
3908 
3909 static bool
type_contains_placeholder_1(const_tree type)3910 type_contains_placeholder_1 (const_tree type)
3911 {
3912   /* If the size contains a placeholder or the parent type (component type in
3913      the case of arrays) type involves a placeholder, this type does.  */
3914   if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3915       || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3916       || (!POINTER_TYPE_P (type)
3917 	  && TREE_TYPE (type)
3918 	  && type_contains_placeholder_p (TREE_TYPE (type))))
3919     return true;
3920 
3921   /* Now do type-specific checks.  Note that the last part of the check above
3922      greatly limits what we have to do below.  */
3923   switch (TREE_CODE (type))
3924     {
3925     case VOID_TYPE:
3926     case COMPLEX_TYPE:
3927     case ENUMERAL_TYPE:
3928     case BOOLEAN_TYPE:
3929     case POINTER_TYPE:
3930     case OFFSET_TYPE:
3931     case REFERENCE_TYPE:
3932     case METHOD_TYPE:
3933     case FUNCTION_TYPE:
3934     case VECTOR_TYPE:
3935     case NULLPTR_TYPE:
3936       return false;
3937 
3938     case INTEGER_TYPE:
3939     case REAL_TYPE:
3940     case FIXED_POINT_TYPE:
3941       /* Here we just check the bounds.  */
3942       return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3943 	      || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3944 
3945     case ARRAY_TYPE:
3946       /* We have already checked the component type above, so just check
3947 	 the domain type.  Flexible array members have a null domain.  */
3948       return TYPE_DOMAIN (type) ?
3949 	type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3950 
3951     case RECORD_TYPE:
3952     case UNION_TYPE:
3953     case QUAL_UNION_TYPE:
3954       {
3955 	tree field;
3956 
3957 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3958 	  if (TREE_CODE (field) == FIELD_DECL
3959 	      && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3960 		  || (TREE_CODE (type) == QUAL_UNION_TYPE
3961 		      && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3962 		  || type_contains_placeholder_p (TREE_TYPE (field))))
3963 	    return true;
3964 
3965 	return false;
3966       }
3967 
3968     default:
3969       gcc_unreachable ();
3970     }
3971 }
3972 
3973 /* Wrapper around above function used to cache its result.  */
3974 
3975 bool
type_contains_placeholder_p(tree type)3976 type_contains_placeholder_p (tree type)
3977 {
3978   bool result;
3979 
3980   /* If the contains_placeholder_bits field has been initialized,
3981      then we know the answer.  */
3982   if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3983     return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3984 
3985   /* Indicate that we've seen this type node, and the answer is false.
3986      This is what we want to return if we run into recursion via fields.  */
3987   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3988 
3989   /* Compute the real value.  */
3990   result = type_contains_placeholder_1 (type);
3991 
3992   /* Store the real value.  */
3993   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3994 
3995   return result;
3996 }
3997 
3998 /* Push tree EXP onto vector QUEUE if it is not already present.  */
3999 
4000 static void
push_without_duplicates(tree exp,vec<tree> * queue)4001 push_without_duplicates (tree exp, vec<tree> *queue)
4002 {
4003   unsigned int i;
4004   tree iter;
4005 
4006   FOR_EACH_VEC_ELT (*queue, i, iter)
4007     if (simple_cst_equal (iter, exp) == 1)
4008       break;
4009 
4010   if (!iter)
4011     queue->safe_push (exp);
4012 }
4013 
4014 /* Given a tree EXP, find all occurrences of references to fields
4015    in a PLACEHOLDER_EXPR and place them in vector REFS without
4016    duplicates.  Also record VAR_DECLs and CONST_DECLs.  Note that
4017    we assume here that EXP contains only arithmetic expressions
4018    or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4019    argument list.  */
4020 
4021 void
find_placeholder_in_expr(tree exp,vec<tree> * refs)4022 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4023 {
4024   enum tree_code code = TREE_CODE (exp);
4025   tree inner;
4026   int i;
4027 
4028   /* We handle TREE_LIST and COMPONENT_REF separately.  */
4029   if (code == TREE_LIST)
4030     {
4031       FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4032       FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4033     }
4034   else if (code == COMPONENT_REF)
4035     {
4036       for (inner = TREE_OPERAND (exp, 0);
4037 	   REFERENCE_CLASS_P (inner);
4038 	   inner = TREE_OPERAND (inner, 0))
4039 	;
4040 
4041       if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4042 	push_without_duplicates (exp, refs);
4043       else
4044 	FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4045    }
4046   else
4047     switch (TREE_CODE_CLASS (code))
4048       {
4049       case tcc_constant:
4050 	break;
4051 
4052       case tcc_declaration:
4053 	/* Variables allocated to static storage can stay.  */
4054         if (!TREE_STATIC (exp))
4055 	  push_without_duplicates (exp, refs);
4056 	break;
4057 
4058       case tcc_expression:
4059 	/* This is the pattern built in ada/make_aligning_type.  */
4060 	if (code == ADDR_EXPR
4061 	    && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4062 	  {
4063 	    push_without_duplicates (exp, refs);
4064 	    break;
4065 	  }
4066 
4067         /* Fall through.  */
4068 
4069       case tcc_exceptional:
4070       case tcc_unary:
4071       case tcc_binary:
4072       case tcc_comparison:
4073       case tcc_reference:
4074 	for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4075 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4076 	break;
4077 
4078       case tcc_vl_exp:
4079 	for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4080 	  FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4081 	break;
4082 
4083       default:
4084 	gcc_unreachable ();
4085       }
4086 }
4087 
4088 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4089    return a tree with all occurrences of references to F in a
4090    PLACEHOLDER_EXPR replaced by R.  Also handle VAR_DECLs and
4091    CONST_DECLs.  Note that we assume here that EXP contains only
4092    arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4093    occurring only in their argument list.  */
4094 
4095 tree
substitute_in_expr(tree exp,tree f,tree r)4096 substitute_in_expr (tree exp, tree f, tree r)
4097 {
4098   enum tree_code code = TREE_CODE (exp);
4099   tree op0, op1, op2, op3;
4100   tree new_tree;
4101 
4102   /* We handle TREE_LIST and COMPONENT_REF separately.  */
4103   if (code == TREE_LIST)
4104     {
4105       op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4106       op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4107       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4108 	return exp;
4109 
4110       return tree_cons (TREE_PURPOSE (exp), op1, op0);
4111     }
4112   else if (code == COMPONENT_REF)
4113     {
4114       tree inner;
4115 
4116       /* If this expression is getting a value from a PLACEHOLDER_EXPR
4117 	 and it is the right field, replace it with R.  */
4118       for (inner = TREE_OPERAND (exp, 0);
4119 	   REFERENCE_CLASS_P (inner);
4120 	   inner = TREE_OPERAND (inner, 0))
4121 	;
4122 
4123       /* The field.  */
4124       op1 = TREE_OPERAND (exp, 1);
4125 
4126       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4127 	return r;
4128 
4129       /* If this expression hasn't been completed let, leave it alone.  */
4130       if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4131 	return exp;
4132 
4133       op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4134       if (op0 == TREE_OPERAND (exp, 0))
4135 	return exp;
4136 
4137       new_tree
4138 	= fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4139    }
4140   else
4141     switch (TREE_CODE_CLASS (code))
4142       {
4143       case tcc_constant:
4144 	return exp;
4145 
4146       case tcc_declaration:
4147 	if (exp == f)
4148 	  return r;
4149 	else
4150 	  return exp;
4151 
4152       case tcc_expression:
4153 	if (exp == f)
4154 	  return r;
4155 
4156         /* Fall through.  */
4157 
4158       case tcc_exceptional:
4159       case tcc_unary:
4160       case tcc_binary:
4161       case tcc_comparison:
4162       case tcc_reference:
4163 	switch (TREE_CODE_LENGTH (code))
4164 	  {
4165 	  case 0:
4166 	    return exp;
4167 
4168 	  case 1:
4169 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4170 	    if (op0 == TREE_OPERAND (exp, 0))
4171 	      return exp;
4172 
4173 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4174 	    break;
4175 
4176 	  case 2:
4177 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4178 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4179 
4180 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4181 	      return exp;
4182 
4183 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4184 	    break;
4185 
4186 	  case 3:
4187 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4188 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4189 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4190 
4191 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4192 		&& op2 == TREE_OPERAND (exp, 2))
4193 	      return exp;
4194 
4195 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4196 	    break;
4197 
4198 	  case 4:
4199 	    op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4200 	    op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4201 	    op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4202 	    op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4203 
4204 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4205 		&& op2 == TREE_OPERAND (exp, 2)
4206 		&& op3 == TREE_OPERAND (exp, 3))
4207 	      return exp;
4208 
4209 	    new_tree
4210 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4211 	    break;
4212 
4213 	  default:
4214 	    gcc_unreachable ();
4215 	  }
4216 	break;
4217 
4218       case tcc_vl_exp:
4219 	{
4220 	  int i;
4221 
4222 	  new_tree = NULL_TREE;
4223 
4224 	  /* If we are trying to replace F with a constant or with another
4225 	     instance of one of the arguments of the call, inline back
4226 	     functions which do nothing else than computing a value from
4227 	     the arguments they are passed.  This makes it possible to
4228 	     fold partially or entirely the replacement expression.  */
4229 	  if (code == CALL_EXPR)
4230 	    {
4231 	      bool maybe_inline = false;
4232 	      if (CONSTANT_CLASS_P (r))
4233 		maybe_inline = true;
4234 	      else
4235 		for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4236 		  if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4237 		    {
4238 		      maybe_inline = true;
4239 		      break;
4240 		    }
4241 	      if (maybe_inline)
4242 		{
4243 		  tree t = maybe_inline_call_in_expr (exp);
4244 		  if (t)
4245 		    return SUBSTITUTE_IN_EXPR (t, f, r);
4246 		}
4247 	    }
4248 
4249 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4250 	    {
4251 	      tree op = TREE_OPERAND (exp, i);
4252 	      tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4253 	      if (new_op != op)
4254 		{
4255 		  if (!new_tree)
4256 		    new_tree = copy_node (exp);
4257 		  TREE_OPERAND (new_tree, i) = new_op;
4258 		}
4259 	    }
4260 
4261 	  if (new_tree)
4262 	    {
4263 	      new_tree = fold (new_tree);
4264 	      if (TREE_CODE (new_tree) == CALL_EXPR)
4265 		process_call_operands (new_tree);
4266 	    }
4267 	  else
4268 	    return exp;
4269 	}
4270 	break;
4271 
4272       default:
4273 	gcc_unreachable ();
4274       }
4275 
4276   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4277 
4278   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4279     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4280 
4281   return new_tree;
4282 }
4283 
4284 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4285    for it within OBJ, a tree that is an object or a chain of references.  */
4286 
4287 tree
substitute_placeholder_in_expr(tree exp,tree obj)4288 substitute_placeholder_in_expr (tree exp, tree obj)
4289 {
4290   enum tree_code code = TREE_CODE (exp);
4291   tree op0, op1, op2, op3;
4292   tree new_tree;
4293 
4294   /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4295      in the chain of OBJ.  */
4296   if (code == PLACEHOLDER_EXPR)
4297     {
4298       tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4299       tree elt;
4300 
4301       for (elt = obj; elt != 0;
4302 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4303 		   || TREE_CODE (elt) == COND_EXPR)
4304 		  ? TREE_OPERAND (elt, 1)
4305 		  : (REFERENCE_CLASS_P (elt)
4306 		     || UNARY_CLASS_P (elt)
4307 		     || BINARY_CLASS_P (elt)
4308 		     || VL_EXP_CLASS_P (elt)
4309 		     || EXPRESSION_CLASS_P (elt))
4310 		  ? TREE_OPERAND (elt, 0) : 0))
4311 	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4312 	  return elt;
4313 
4314       for (elt = obj; elt != 0;
4315 	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4316 		   || TREE_CODE (elt) == COND_EXPR)
4317 		  ? TREE_OPERAND (elt, 1)
4318 		  : (REFERENCE_CLASS_P (elt)
4319 		     || UNARY_CLASS_P (elt)
4320 		     || BINARY_CLASS_P (elt)
4321 		     || VL_EXP_CLASS_P (elt)
4322 		     || EXPRESSION_CLASS_P (elt))
4323 		  ? TREE_OPERAND (elt, 0) : 0))
4324 	if (POINTER_TYPE_P (TREE_TYPE (elt))
4325 	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4326 		== need_type))
4327 	  return fold_build1 (INDIRECT_REF, need_type, elt);
4328 
4329       /* If we didn't find it, return the original PLACEHOLDER_EXPR.  If it
4330 	 survives until RTL generation, there will be an error.  */
4331       return exp;
4332     }
4333 
4334   /* TREE_LIST is special because we need to look at TREE_VALUE
4335      and TREE_CHAIN, not TREE_OPERANDS.  */
4336   else if (code == TREE_LIST)
4337     {
4338       op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4339       op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4340       if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4341 	return exp;
4342 
4343       return tree_cons (TREE_PURPOSE (exp), op1, op0);
4344     }
4345   else
4346     switch (TREE_CODE_CLASS (code))
4347       {
4348       case tcc_constant:
4349       case tcc_declaration:
4350 	return exp;
4351 
4352       case tcc_exceptional:
4353       case tcc_unary:
4354       case tcc_binary:
4355       case tcc_comparison:
4356       case tcc_expression:
4357       case tcc_reference:
4358       case tcc_statement:
4359 	switch (TREE_CODE_LENGTH (code))
4360 	  {
4361 	  case 0:
4362 	    return exp;
4363 
4364 	  case 1:
4365 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4366 	    if (op0 == TREE_OPERAND (exp, 0))
4367 	      return exp;
4368 
4369 	    new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4370 	    break;
4371 
4372 	  case 2:
4373 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4374 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4375 
4376 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4377 	      return exp;
4378 
4379 	    new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4380 	    break;
4381 
4382 	  case 3:
4383 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4384 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4385 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4386 
4387 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4388 		&& op2 == TREE_OPERAND (exp, 2))
4389 	      return exp;
4390 
4391 	    new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4392 	    break;
4393 
4394 	  case 4:
4395 	    op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4396 	    op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4397 	    op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4398 	    op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4399 
4400 	    if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4401 		&& op2 == TREE_OPERAND (exp, 2)
4402 		&& op3 == TREE_OPERAND (exp, 3))
4403 	      return exp;
4404 
4405 	    new_tree
4406 	      = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4407 	    break;
4408 
4409 	  default:
4410 	    gcc_unreachable ();
4411 	  }
4412 	break;
4413 
4414       case tcc_vl_exp:
4415 	{
4416 	  int i;
4417 
4418 	  new_tree = NULL_TREE;
4419 
4420 	  for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4421 	    {
4422 	      tree op = TREE_OPERAND (exp, i);
4423 	      tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4424 	      if (new_op != op)
4425 		{
4426 		  if (!new_tree)
4427 		    new_tree = copy_node (exp);
4428 		  TREE_OPERAND (new_tree, i) = new_op;
4429 		}
4430 	    }
4431 
4432 	  if (new_tree)
4433 	    {
4434 	      new_tree = fold (new_tree);
4435 	      if (TREE_CODE (new_tree) == CALL_EXPR)
4436 		process_call_operands (new_tree);
4437 	    }
4438 	  else
4439 	    return exp;
4440 	}
4441 	break;
4442 
4443       default:
4444 	gcc_unreachable ();
4445       }
4446 
4447   TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4448 
4449   if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4450     TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4451 
4452   return new_tree;
4453 }
4454 
4455 
4456 /* Subroutine of stabilize_reference; this is called for subtrees of
4457    references.  Any expression with side-effects must be put in a SAVE_EXPR
4458    to ensure that it is only evaluated once.
4459 
4460    We don't put SAVE_EXPR nodes around everything, because assigning very
4461    simple expressions to temporaries causes us to miss good opportunities
4462    for optimizations.  Among other things, the opportunity to fold in the
4463    addition of a constant into an addressing mode often gets lost, e.g.
4464    "y[i+1] += x;".  In general, we take the approach that we should not make
4465    an assignment unless we are forced into it - i.e., that any non-side effect
4466    operator should be allowed, and that cse should take care of coalescing
4467    multiple utterances of the same expression should that prove fruitful.  */
4468 
4469 static tree
stabilize_reference_1(tree e)4470 stabilize_reference_1 (tree e)
4471 {
4472   tree result;
4473   enum tree_code code = TREE_CODE (e);
4474 
4475   /* We cannot ignore const expressions because it might be a reference
4476      to a const array but whose index contains side-effects.  But we can
4477      ignore things that are actual constant or that already have been
4478      handled by this function.  */
4479 
4480   if (tree_invariant_p (e))
4481     return e;
4482 
4483   switch (TREE_CODE_CLASS (code))
4484     {
4485     case tcc_exceptional:
4486       /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4487 	 have side-effects.  */
4488       if (code == STATEMENT_LIST)
4489 	return save_expr (e);
4490       /* FALLTHRU */
4491     case tcc_type:
4492     case tcc_declaration:
4493     case tcc_comparison:
4494     case tcc_statement:
4495     case tcc_expression:
4496     case tcc_reference:
4497     case tcc_vl_exp:
4498       /* If the expression has side-effects, then encase it in a SAVE_EXPR
4499 	 so that it will only be evaluated once.  */
4500       /* The reference (r) and comparison (<) classes could be handled as
4501 	 below, but it is generally faster to only evaluate them once.  */
4502       if (TREE_SIDE_EFFECTS (e))
4503 	return save_expr (e);
4504       return e;
4505 
4506     case tcc_constant:
4507       /* Constants need no processing.  In fact, we should never reach
4508 	 here.  */
4509       return e;
4510 
4511     case tcc_binary:
4512       /* Division is slow and tends to be compiled with jumps,
4513 	 especially the division by powers of 2 that is often
4514 	 found inside of an array reference.  So do it just once.  */
4515       if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4516 	  || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4517 	  || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4518 	  || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4519 	return save_expr (e);
4520       /* Recursively stabilize each operand.  */
4521       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4522 			 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4523       break;
4524 
4525     case tcc_unary:
4526       /* Recursively stabilize each operand.  */
4527       result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4528       break;
4529 
4530     default:
4531       gcc_unreachable ();
4532     }
4533 
4534   TREE_TYPE (result) = TREE_TYPE (e);
4535   TREE_READONLY (result) = TREE_READONLY (e);
4536   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4537   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4538 
4539   return result;
4540 }
4541 
4542 /* Stabilize a reference so that we can use it any number of times
4543    without causing its operands to be evaluated more than once.
4544    Returns the stabilized reference.  This works by means of save_expr,
4545    so see the caveats in the comments about save_expr.
4546 
4547    Also allows conversion expressions whose operands are references.
4548    Any other kind of expression is returned unchanged.  */
4549 
4550 tree
stabilize_reference(tree ref)4551 stabilize_reference (tree ref)
4552 {
4553   tree result;
4554   enum tree_code code = TREE_CODE (ref);
4555 
4556   switch (code)
4557     {
4558     case VAR_DECL:
4559     case PARM_DECL:
4560     case RESULT_DECL:
4561       /* No action is needed in this case.  */
4562       return ref;
4563 
4564     CASE_CONVERT:
4565     case FLOAT_EXPR:
4566     case FIX_TRUNC_EXPR:
4567       result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4568       break;
4569 
4570     case INDIRECT_REF:
4571       result = build_nt (INDIRECT_REF,
4572 			 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4573       break;
4574 
4575     case COMPONENT_REF:
4576       result = build_nt (COMPONENT_REF,
4577 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4578 			 TREE_OPERAND (ref, 1), NULL_TREE);
4579       break;
4580 
4581     case BIT_FIELD_REF:
4582       result = build_nt (BIT_FIELD_REF,
4583 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4584 			 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4585       REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4586       break;
4587 
4588     case ARRAY_REF:
4589       result = build_nt (ARRAY_REF,
4590 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4591 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4592 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4593       break;
4594 
4595     case ARRAY_RANGE_REF:
4596       result = build_nt (ARRAY_RANGE_REF,
4597 			 stabilize_reference (TREE_OPERAND (ref, 0)),
4598 			 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4599 			 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4600       break;
4601 
4602     case COMPOUND_EXPR:
4603       /* We cannot wrap the first expression in a SAVE_EXPR, as then
4604 	 it wouldn't be ignored.  This matters when dealing with
4605 	 volatiles.  */
4606       return stabilize_reference_1 (ref);
4607 
4608       /* If arg isn't a kind of lvalue we recognize, make no change.
4609 	 Caller should recognize the error for an invalid lvalue.  */
4610     default:
4611       return ref;
4612 
4613     case ERROR_MARK:
4614       return error_mark_node;
4615     }
4616 
4617   TREE_TYPE (result) = TREE_TYPE (ref);
4618   TREE_READONLY (result) = TREE_READONLY (ref);
4619   TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4620   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4621 
4622   return result;
4623 }
4624 
4625 /* Low-level constructors for expressions.  */
4626 
4627 /* A helper function for build1 and constant folders.  Set TREE_CONSTANT,
4628    and TREE_SIDE_EFFECTS for an ADDR_EXPR.  */
4629 
4630 void
recompute_tree_invariant_for_addr_expr(tree t)4631 recompute_tree_invariant_for_addr_expr (tree t)
4632 {
4633   tree node;
4634   bool tc = true, se = false;
4635 
4636   gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4637 
4638   /* We started out assuming this address is both invariant and constant, but
4639      does not have side effects.  Now go down any handled components and see if
4640      any of them involve offsets that are either non-constant or non-invariant.
4641      Also check for side-effects.
4642 
4643      ??? Note that this code makes no attempt to deal with the case where
4644      taking the address of something causes a copy due to misalignment.  */
4645 
4646 #define UPDATE_FLAGS(NODE)  \
4647 do { tree _node = (NODE); \
4648      if (_node && !TREE_CONSTANT (_node)) tc = false; \
4649      if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4650 
4651   for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4652        node = TREE_OPERAND (node, 0))
4653     {
4654       /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4655 	 array reference (probably made temporarily by the G++ front end),
4656 	 so ignore all the operands.  */
4657       if ((TREE_CODE (node) == ARRAY_REF
4658 	   || TREE_CODE (node) == ARRAY_RANGE_REF)
4659 	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4660 	{
4661 	  UPDATE_FLAGS (TREE_OPERAND (node, 1));
4662 	  if (TREE_OPERAND (node, 2))
4663 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
4664 	  if (TREE_OPERAND (node, 3))
4665 	    UPDATE_FLAGS (TREE_OPERAND (node, 3));
4666 	}
4667       /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4668 	 FIELD_DECL, apparently.  The G++ front end can put something else
4669 	 there, at least temporarily.  */
4670       else if (TREE_CODE (node) == COMPONENT_REF
4671 	       && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4672 	{
4673 	  if (TREE_OPERAND (node, 2))
4674 	    UPDATE_FLAGS (TREE_OPERAND (node, 2));
4675 	}
4676     }
4677 
4678   node = lang_hooks.expr_to_decl (node, &tc, &se);
4679 
4680   /* Now see what's inside.  If it's an INDIRECT_REF, copy our properties from
4681      the address, since &(*a)->b is a form of addition.  If it's a constant, the
4682      address is constant too.  If it's a decl, its address is constant if the
4683      decl is static.  Everything else is not constant and, furthermore,
4684      taking the address of a volatile variable is not volatile.  */
4685   if (TREE_CODE (node) == INDIRECT_REF
4686       || TREE_CODE (node) == MEM_REF)
4687     UPDATE_FLAGS (TREE_OPERAND (node, 0));
4688   else if (CONSTANT_CLASS_P (node))
4689     ;
4690   else if (DECL_P (node))
4691     tc &= (staticp (node) != NULL_TREE);
4692   else
4693     {
4694       tc = false;
4695       se |= TREE_SIDE_EFFECTS (node);
4696     }
4697 
4698 
4699   TREE_CONSTANT (t) = tc;
4700   TREE_SIDE_EFFECTS (t) = se;
4701 #undef UPDATE_FLAGS
4702 }
4703 
4704 /* Build an expression of code CODE, data type TYPE, and operands as
4705    specified.  Expressions and reference nodes can be created this way.
4706    Constants, decls, types and misc nodes cannot be.
4707 
4708    We define 5 non-variadic functions, from 0 to 4 arguments.  This is
4709    enough for all extant tree codes.  */
4710 
4711 tree
build0(enum tree_code code,tree tt MEM_STAT_DECL)4712 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4713 {
4714   tree t;
4715 
4716   gcc_assert (TREE_CODE_LENGTH (code) == 0);
4717 
4718   t = make_node (code PASS_MEM_STAT);
4719   TREE_TYPE (t) = tt;
4720 
4721   return t;
4722 }
4723 
4724 tree
build1(enum tree_code code,tree type,tree node MEM_STAT_DECL)4725 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4726 {
4727   int length = sizeof (struct tree_exp);
4728   tree t;
4729 
4730   record_node_allocation_statistics (code, length);
4731 
4732   gcc_assert (TREE_CODE_LENGTH (code) == 1);
4733 
4734   t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4735 
4736   memset (t, 0, sizeof (struct tree_common));
4737 
4738   TREE_SET_CODE (t, code);
4739 
4740   TREE_TYPE (t) = type;
4741   SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4742   TREE_OPERAND (t, 0) = node;
4743   if (node && !TYPE_P (node))
4744     {
4745       TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4746       TREE_READONLY (t) = TREE_READONLY (node);
4747     }
4748 
4749   if (TREE_CODE_CLASS (code) == tcc_statement)
4750     {
4751       if (code != DEBUG_BEGIN_STMT)
4752 	TREE_SIDE_EFFECTS (t) = 1;
4753     }
4754   else switch (code)
4755     {
4756     case VA_ARG_EXPR:
4757       /* All of these have side-effects, no matter what their
4758 	 operands are.  */
4759       TREE_SIDE_EFFECTS (t) = 1;
4760       TREE_READONLY (t) = 0;
4761       break;
4762 
4763     case INDIRECT_REF:
4764       /* Whether a dereference is readonly has nothing to do with whether
4765 	 its operand is readonly.  */
4766       TREE_READONLY (t) = 0;
4767       break;
4768 
4769     case ADDR_EXPR:
4770       if (node)
4771 	recompute_tree_invariant_for_addr_expr (t);
4772       break;
4773 
4774     default:
4775       if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4776 	  && node && !TYPE_P (node)
4777 	  && TREE_CONSTANT (node))
4778 	TREE_CONSTANT (t) = 1;
4779       if (TREE_CODE_CLASS (code) == tcc_reference
4780 	  && node && TREE_THIS_VOLATILE (node))
4781 	TREE_THIS_VOLATILE (t) = 1;
4782       break;
4783     }
4784 
4785   return t;
4786 }
4787 
4788 #define PROCESS_ARG(N)				\
4789   do {						\
4790     TREE_OPERAND (t, N) = arg##N;		\
4791     if (arg##N &&!TYPE_P (arg##N))		\
4792       {						\
4793         if (TREE_SIDE_EFFECTS (arg##N))		\
4794 	  side_effects = 1;			\
4795         if (!TREE_READONLY (arg##N)		\
4796 	    && !CONSTANT_CLASS_P (arg##N))	\
4797 	  (void) (read_only = 0);		\
4798         if (!TREE_CONSTANT (arg##N))		\
4799 	  (void) (constant = 0);		\
4800       }						\
4801   } while (0)
4802 
4803 tree
build2(enum tree_code code,tree tt,tree arg0,tree arg1 MEM_STAT_DECL)4804 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4805 {
4806   bool constant, read_only, side_effects, div_by_zero;
4807   tree t;
4808 
4809   gcc_assert (TREE_CODE_LENGTH (code) == 2);
4810 
4811   if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4812       && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4813       /* When sizetype precision doesn't match that of pointers
4814          we need to be able to build explicit extensions or truncations
4815 	 of the offset argument.  */
4816       && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4817     gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4818 		&& TREE_CODE (arg1) == INTEGER_CST);
4819 
4820   if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4821     gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4822 		&& ptrofftype_p (TREE_TYPE (arg1)));
4823 
4824   t = make_node (code PASS_MEM_STAT);
4825   TREE_TYPE (t) = tt;
4826 
4827   /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4828      result based on those same flags for the arguments.  But if the
4829      arguments aren't really even `tree' expressions, we shouldn't be trying
4830      to do this.  */
4831 
4832   /* Expressions without side effects may be constant if their
4833      arguments are as well.  */
4834   constant = (TREE_CODE_CLASS (code) == tcc_comparison
4835 	      || TREE_CODE_CLASS (code) == tcc_binary);
4836   read_only = 1;
4837   side_effects = TREE_SIDE_EFFECTS (t);
4838 
4839   switch (code)
4840     {
4841     case TRUNC_DIV_EXPR:
4842     case CEIL_DIV_EXPR:
4843     case FLOOR_DIV_EXPR:
4844     case ROUND_DIV_EXPR:
4845     case EXACT_DIV_EXPR:
4846     case CEIL_MOD_EXPR:
4847     case FLOOR_MOD_EXPR:
4848     case ROUND_MOD_EXPR:
4849     case TRUNC_MOD_EXPR:
4850       div_by_zero = integer_zerop (arg1);
4851       break;
4852     default:
4853       div_by_zero = false;
4854     }
4855 
4856   PROCESS_ARG (0);
4857   PROCESS_ARG (1);
4858 
4859   TREE_SIDE_EFFECTS (t) = side_effects;
4860   if (code == MEM_REF)
4861     {
4862       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4863 	{
4864 	  tree o = TREE_OPERAND (arg0, 0);
4865 	  TREE_READONLY (t) = TREE_READONLY (o);
4866 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4867 	}
4868     }
4869   else
4870     {
4871       TREE_READONLY (t) = read_only;
4872       /* Don't mark X / 0 as constant.  */
4873       TREE_CONSTANT (t) = constant && !div_by_zero;
4874       TREE_THIS_VOLATILE (t)
4875 	= (TREE_CODE_CLASS (code) == tcc_reference
4876 	   && arg0 && TREE_THIS_VOLATILE (arg0));
4877     }
4878 
4879   return t;
4880 }
4881 
4882 
4883 tree
build3(enum tree_code code,tree tt,tree arg0,tree arg1,tree arg2 MEM_STAT_DECL)4884 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4885 	tree arg2 MEM_STAT_DECL)
4886 {
4887   bool constant, read_only, side_effects;
4888   tree t;
4889 
4890   gcc_assert (TREE_CODE_LENGTH (code) == 3);
4891   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4892 
4893   t = make_node (code PASS_MEM_STAT);
4894   TREE_TYPE (t) = tt;
4895 
4896   read_only = 1;
4897 
4898   /* As a special exception, if COND_EXPR has NULL branches, we
4899      assume that it is a gimple statement and always consider
4900      it to have side effects.  */
4901   if (code == COND_EXPR
4902       && tt == void_type_node
4903       && arg1 == NULL_TREE
4904       && arg2 == NULL_TREE)
4905     side_effects = true;
4906   else
4907     side_effects = TREE_SIDE_EFFECTS (t);
4908 
4909   PROCESS_ARG (0);
4910   PROCESS_ARG (1);
4911   PROCESS_ARG (2);
4912 
4913   if (code == COND_EXPR)
4914     TREE_READONLY (t) = read_only;
4915 
4916   TREE_SIDE_EFFECTS (t) = side_effects;
4917   TREE_THIS_VOLATILE (t)
4918     = (TREE_CODE_CLASS (code) == tcc_reference
4919        && arg0 && TREE_THIS_VOLATILE (arg0));
4920 
4921   return t;
4922 }
4923 
4924 tree
build4(enum tree_code code,tree tt,tree arg0,tree arg1,tree arg2,tree arg3 MEM_STAT_DECL)4925 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4926 	tree arg2, tree arg3 MEM_STAT_DECL)
4927 {
4928   bool constant, read_only, side_effects;
4929   tree t;
4930 
4931   gcc_assert (TREE_CODE_LENGTH (code) == 4);
4932 
4933   t = make_node (code PASS_MEM_STAT);
4934   TREE_TYPE (t) = tt;
4935 
4936   side_effects = TREE_SIDE_EFFECTS (t);
4937 
4938   PROCESS_ARG (0);
4939   PROCESS_ARG (1);
4940   PROCESS_ARG (2);
4941   PROCESS_ARG (3);
4942 
4943   TREE_SIDE_EFFECTS (t) = side_effects;
4944   TREE_THIS_VOLATILE (t)
4945     = (TREE_CODE_CLASS (code) == tcc_reference
4946        && arg0 && TREE_THIS_VOLATILE (arg0));
4947 
4948   return t;
4949 }
4950 
4951 tree
build5(enum tree_code code,tree tt,tree arg0,tree arg1,tree arg2,tree arg3,tree arg4 MEM_STAT_DECL)4952 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4953 	tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4954 {
4955   bool constant, read_only, side_effects;
4956   tree t;
4957 
4958   gcc_assert (TREE_CODE_LENGTH (code) == 5);
4959 
4960   t = make_node (code PASS_MEM_STAT);
4961   TREE_TYPE (t) = tt;
4962 
4963   side_effects = TREE_SIDE_EFFECTS (t);
4964 
4965   PROCESS_ARG (0);
4966   PROCESS_ARG (1);
4967   PROCESS_ARG (2);
4968   PROCESS_ARG (3);
4969   PROCESS_ARG (4);
4970 
4971   TREE_SIDE_EFFECTS (t) = side_effects;
4972   if (code == TARGET_MEM_REF)
4973     {
4974       if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4975 	{
4976 	  tree o = TREE_OPERAND (arg0, 0);
4977 	  TREE_READONLY (t) = TREE_READONLY (o);
4978 	  TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4979 	}
4980     }
4981   else
4982     TREE_THIS_VOLATILE (t)
4983       = (TREE_CODE_CLASS (code) == tcc_reference
4984 	 && arg0 && TREE_THIS_VOLATILE (arg0));
4985 
4986   return t;
4987 }
4988 
4989 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4990    on the pointer PTR.  */
4991 
4992 tree
build_simple_mem_ref_loc(location_t loc,tree ptr)4993 build_simple_mem_ref_loc (location_t loc, tree ptr)
4994 {
4995   poly_int64 offset = 0;
4996   tree ptype = TREE_TYPE (ptr);
4997   tree tem;
4998   /* For convenience allow addresses that collapse to a simple base
4999      and offset.  */
5000   if (TREE_CODE (ptr) == ADDR_EXPR
5001       && (handled_component_p (TREE_OPERAND (ptr, 0))
5002 	  || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5003     {
5004       ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5005       gcc_assert (ptr);
5006       if (TREE_CODE (ptr) == MEM_REF)
5007 	{
5008 	  offset += mem_ref_offset (ptr).force_shwi ();
5009 	  ptr = TREE_OPERAND (ptr, 0);
5010 	}
5011       else
5012 	ptr = build_fold_addr_expr (ptr);
5013       gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5014     }
5015   tem = build2 (MEM_REF, TREE_TYPE (ptype),
5016 		ptr, build_int_cst (ptype, offset));
5017   SET_EXPR_LOCATION (tem, loc);
5018   return tem;
5019 }
5020 
5021 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T.  */
5022 
5023 poly_offset_int
mem_ref_offset(const_tree t)5024 mem_ref_offset (const_tree t)
5025 {
5026   return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5027 				SIGNED);
5028 }
5029 
5030 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5031    offsetted by OFFSET units.  */
5032 
5033 tree
build_invariant_address(tree type,tree base,poly_int64 offset)5034 build_invariant_address (tree type, tree base, poly_int64 offset)
5035 {
5036   tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5037 			  build_fold_addr_expr (base),
5038 			  build_int_cst (ptr_type_node, offset));
5039   tree addr = build1 (ADDR_EXPR, type, ref);
5040   recompute_tree_invariant_for_addr_expr (addr);
5041   return addr;
5042 }
5043 
5044 /* Similar except don't specify the TREE_TYPE
5045    and leave the TREE_SIDE_EFFECTS as 0.
5046    It is permissible for arguments to be null,
5047    or even garbage if their values do not matter.  */
5048 
5049 tree
build_nt(enum tree_code code,...)5050 build_nt (enum tree_code code, ...)
5051 {
5052   tree t;
5053   int length;
5054   int i;
5055   va_list p;
5056 
5057   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5058 
5059   va_start (p, code);
5060 
5061   t = make_node (code);
5062   length = TREE_CODE_LENGTH (code);
5063 
5064   for (i = 0; i < length; i++)
5065     TREE_OPERAND (t, i) = va_arg (p, tree);
5066 
5067   va_end (p);
5068   return t;
5069 }
5070 
5071 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5072    tree vec.  */
5073 
5074 tree
build_nt_call_vec(tree fn,vec<tree,va_gc> * args)5075 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5076 {
5077   tree ret, t;
5078   unsigned int ix;
5079 
5080   ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5081   CALL_EXPR_FN (ret) = fn;
5082   CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5083   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5084     CALL_EXPR_ARG (ret, ix) = t;
5085   return ret;
5086 }
5087 
5088 /* Create a DECL_... node of code CODE, name NAME  (if non-null)
5089    and data type TYPE.
5090    We do NOT enter this node in any sort of symbol table.
5091 
5092    LOC is the location of the decl.
5093 
5094    layout_decl is used to set up the decl's storage layout.
5095    Other slots are initialized to 0 or null pointers.  */
5096 
5097 tree
build_decl(location_t loc,enum tree_code code,tree name,tree type MEM_STAT_DECL)5098 build_decl (location_t loc, enum tree_code code, tree name,
5099     		 tree type MEM_STAT_DECL)
5100 {
5101   tree t;
5102 
5103   t = make_node (code PASS_MEM_STAT);
5104   DECL_SOURCE_LOCATION (t) = loc;
5105 
5106 /*  if (type == error_mark_node)
5107     type = integer_type_node; */
5108 /* That is not done, deliberately, so that having error_mark_node
5109    as the type can suppress useless errors in the use of this variable.  */
5110 
5111   DECL_NAME (t) = name;
5112   TREE_TYPE (t) = type;
5113 
5114   if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5115     layout_decl (t, 0);
5116 
5117   return t;
5118 }
5119 
5120 /* Builds and returns function declaration with NAME and TYPE.  */
5121 
5122 tree
build_fn_decl(const char * name,tree type)5123 build_fn_decl (const char *name, tree type)
5124 {
5125   tree id = get_identifier (name);
5126   tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5127 
5128   DECL_EXTERNAL (decl) = 1;
5129   TREE_PUBLIC (decl) = 1;
5130   DECL_ARTIFICIAL (decl) = 1;
5131   TREE_NOTHROW (decl) = 1;
5132 
5133   return decl;
5134 }
5135 
5136 vec<tree, va_gc> *all_translation_units;
5137 
5138 /* Builds a new translation-unit decl with name NAME, queues it in the
5139    global list of translation-unit decls and returns it.   */
5140 
5141 tree
build_translation_unit_decl(tree name)5142 build_translation_unit_decl (tree name)
5143 {
5144   tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5145 			name, NULL_TREE);
5146   TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5147   vec_safe_push (all_translation_units, tu);
5148   return tu;
5149 }
5150 
5151 
5152 /* BLOCK nodes are used to represent the structure of binding contours
5153    and declarations, once those contours have been exited and their contents
5154    compiled.  This information is used for outputting debugging info.  */
5155 
5156 tree
build_block(tree vars,tree subblocks,tree supercontext,tree chain)5157 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5158 {
5159   tree block = make_node (BLOCK);
5160 
5161   BLOCK_VARS (block) = vars;
5162   BLOCK_SUBBLOCKS (block) = subblocks;
5163   BLOCK_SUPERCONTEXT (block) = supercontext;
5164   BLOCK_CHAIN (block) = chain;
5165   return block;
5166 }
5167 
5168 
5169 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5170 
5171    LOC is the location to use in tree T.  */
5172 
5173 void
protected_set_expr_location(tree t,location_t loc)5174 protected_set_expr_location (tree t, location_t loc)
5175 {
5176   if (CAN_HAVE_LOCATION_P (t))
5177     SET_EXPR_LOCATION (t, loc);
5178   else if (t && TREE_CODE (t) == STATEMENT_LIST)
5179     {
5180       t = expr_single (t);
5181       if (t && CAN_HAVE_LOCATION_P (t))
5182 	SET_EXPR_LOCATION (t, loc);
5183     }
5184 }
5185 
5186 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5187    UNKNOWN_LOCATION.  */
5188 
5189 void
protected_set_expr_location_if_unset(tree t,location_t loc)5190 protected_set_expr_location_if_unset (tree t, location_t loc)
5191 {
5192   t = expr_single (t);
5193   if (t && !EXPR_HAS_LOCATION (t))
5194     protected_set_expr_location (t, loc);
5195 }
5196 
5197 /* Data used when collecting DECLs and TYPEs for language data removal.  */
5198 
5199 class free_lang_data_d
5200 {
5201 public:
free_lang_data_d()5202   free_lang_data_d () : decls (100), types (100) {}
5203 
5204   /* Worklist to avoid excessive recursion.  */
5205   auto_vec<tree> worklist;
5206 
5207   /* Set of traversed objects.  Used to avoid duplicate visits.  */
5208   hash_set<tree> pset;
5209 
5210   /* Array of symbols to process with free_lang_data_in_decl.  */
5211   auto_vec<tree> decls;
5212 
5213   /* Array of types to process with free_lang_data_in_type.  */
5214   auto_vec<tree> types;
5215 };
5216 
5217 
5218 /* Add type or decl T to one of the list of tree nodes that need their
5219    language data removed.  The lists are held inside FLD.  */
5220 
5221 static void
add_tree_to_fld_list(tree t,class free_lang_data_d * fld)5222 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5223 {
5224   if (DECL_P (t))
5225     fld->decls.safe_push (t);
5226   else if (TYPE_P (t))
5227     fld->types.safe_push (t);
5228   else
5229     gcc_unreachable ();
5230 }
5231 
5232 /* Push tree node T into FLD->WORKLIST.  */
5233 
5234 static inline void
fld_worklist_push(tree t,class free_lang_data_d * fld)5235 fld_worklist_push (tree t, class free_lang_data_d *fld)
5236 {
5237   if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5238     fld->worklist.safe_push ((t));
5239 }
5240 
5241 
5242 
5243 /* Return simplified TYPE_NAME of TYPE.  */
5244 
5245 static tree
fld_simplified_type_name(tree type)5246 fld_simplified_type_name (tree type)
5247 {
5248   if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5249     return TYPE_NAME (type);
5250   /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5251      TYPE_DECL if the type doesn't have linkage.
5252      this must match fld_  */
5253   if (type != TYPE_MAIN_VARIANT (type)
5254       || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5255 	  && (TREE_CODE (type) != RECORD_TYPE
5256 	      || !TYPE_BINFO (type)
5257 	      || !BINFO_VTABLE (TYPE_BINFO (type)))))
5258     return DECL_NAME (TYPE_NAME (type));
5259   return TYPE_NAME (type);
5260 }
5261 
5262 /* Do same comparsion as check_qualified_type skipping lang part of type
5263    and be more permissive about type names: we only care that names are
5264    same (for diagnostics) and that ODR names are the same.
5265    If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it.  */
5266 
5267 static bool
fld_type_variant_equal_p(tree t,tree v,tree inner_type)5268 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5269 {
5270   if (TYPE_QUALS (t) != TYPE_QUALS (v)
5271       /* We want to match incomplete variants with complete types.
5272 	 In this case we need to ignore alignment.   */
5273       || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5274 	  && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5275 	      || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5276       || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5277       || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5278 			        TYPE_ATTRIBUTES (v))
5279       || (inner_type && TREE_TYPE (v) != inner_type))
5280     return false;
5281 
5282   return true;
5283 }
5284 
5285 /* Find variant of FIRST that match T and create new one if necessary.
5286    Set TREE_TYPE to INNER_TYPE if non-NULL.  */
5287 
5288 static tree
5289 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5290 		  tree inner_type = NULL)
5291 {
5292   if (first == TYPE_MAIN_VARIANT (t))
5293     return t;
5294   for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5295     if (fld_type_variant_equal_p (t, v, inner_type))
5296       return v;
5297   tree v = build_variant_type_copy (first);
5298   TYPE_READONLY (v) = TYPE_READONLY (t);
5299   TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5300   TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5301   TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5302   TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5303   TYPE_NAME (v) = TYPE_NAME (t);
5304   TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5305   TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5306   /* Variants of incomplete types should have alignment
5307      set to BITS_PER_UNIT.  Do not copy the actual alignment.  */
5308   if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5309     {
5310       SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5311       TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5312     }
5313   if (inner_type)
5314     TREE_TYPE (v) = inner_type;
5315   gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5316   if (!fld->pset.add (v))
5317     add_tree_to_fld_list (v, fld);
5318   return v;
5319 }
5320 
5321 /* Map complete types to incomplete types.  */
5322 
5323 static hash_map<tree, tree> *fld_incomplete_types;
5324 
5325 /* Map types to simplified types.  */
5326 
5327 static hash_map<tree, tree> *fld_simplified_types;
5328 
5329 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5330    use MAP to prevent duplicates.  */
5331 
5332 static tree
fld_process_array_type(tree t,tree t2,hash_map<tree,tree> * map,class free_lang_data_d * fld)5333 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5334 			class free_lang_data_d *fld)
5335 {
5336   if (TREE_TYPE (t) == t2)
5337     return t;
5338 
5339   if (TYPE_MAIN_VARIANT (t) != t)
5340     {
5341       return fld_type_variant
5342 	       (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5343 					TYPE_MAIN_VARIANT (t2), map, fld),
5344 		t, fld, t2);
5345     }
5346 
5347   bool existed;
5348   tree &array
5349      = map->get_or_insert (t, &existed);
5350   if (!existed)
5351     {
5352       array
5353 	= build_array_type_1 (t2, TYPE_DOMAIN (t), TYPE_TYPELESS_STORAGE (t),
5354 			      false, false);
5355       TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5356       if (!fld->pset.add (array))
5357 	add_tree_to_fld_list (array, fld);
5358     }
5359   return array;
5360 }
5361 
5362 /* Return CTX after removal of contexts that are not relevant  */
5363 
5364 static tree
fld_decl_context(tree ctx)5365 fld_decl_context (tree ctx)
5366 {
5367   /* Variably modified types are needed for tree_is_indexable to decide
5368      whether the type needs to go to local or global section.
5369      This code is semi-broken but for now it is easiest to keep contexts
5370      as expected.  */
5371   if (ctx && TYPE_P (ctx)
5372       && !variably_modified_type_p (ctx, NULL_TREE))
5373      {
5374        while (ctx && TYPE_P (ctx))
5375 	 ctx = TYPE_CONTEXT (ctx);
5376      }
5377   return ctx;
5378 }
5379 
5380 /* For T being aggregate type try to turn it into a incomplete variant.
5381    Return T if no simplification is possible.  */
5382 
5383 static tree
fld_incomplete_type_of(tree t,class free_lang_data_d * fld)5384 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5385 {
5386   if (!t)
5387     return NULL;
5388   if (POINTER_TYPE_P (t))
5389     {
5390       tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5391       if (t2 != TREE_TYPE (t))
5392 	{
5393 	  tree first;
5394 	  if (TREE_CODE (t) == POINTER_TYPE)
5395 	    first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5396 						TYPE_REF_CAN_ALIAS_ALL (t));
5397 	  else
5398 	    first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5399 						TYPE_REF_CAN_ALIAS_ALL (t));
5400 	  gcc_assert (TYPE_CANONICAL (t2) != t2
5401 		      && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5402 	  if (!fld->pset.add (first))
5403 	    add_tree_to_fld_list (first, fld);
5404 	  return fld_type_variant (first, t, fld);
5405 	}
5406       return t;
5407     }
5408   if (TREE_CODE (t) == ARRAY_TYPE)
5409     return fld_process_array_type (t,
5410 				   fld_incomplete_type_of (TREE_TYPE (t), fld),
5411 				   fld_incomplete_types, fld);
5412   if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5413       || !COMPLETE_TYPE_P (t))
5414     return t;
5415   if (TYPE_MAIN_VARIANT (t) == t)
5416     {
5417       bool existed;
5418       tree &copy
5419 	 = fld_incomplete_types->get_or_insert (t, &existed);
5420 
5421       if (!existed)
5422 	{
5423 	  copy = build_distinct_type_copy (t);
5424 
5425 	  /* It is possible that type was not seen by free_lang_data yet.  */
5426 	  if (!fld->pset.add (copy))
5427 	    add_tree_to_fld_list (copy, fld);
5428 	  TYPE_SIZE (copy) = NULL;
5429 	  TYPE_USER_ALIGN (copy) = 0;
5430 	  TYPE_SIZE_UNIT (copy) = NULL;
5431 	  TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5432 	  TREE_ADDRESSABLE (copy) = 0;
5433 	  if (AGGREGATE_TYPE_P (t))
5434 	    {
5435 	      SET_TYPE_MODE (copy, VOIDmode);
5436 	      SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5437 	      TYPE_TYPELESS_STORAGE (copy) = 0;
5438 	      TYPE_FIELDS (copy) = NULL;
5439 	      TYPE_BINFO (copy) = NULL;
5440 	      TYPE_FINAL_P (copy) = 0;
5441 	      TYPE_EMPTY_P (copy) = 0;
5442 	    }
5443 	  else
5444 	    {
5445 	      TYPE_VALUES (copy) = NULL;
5446 	      ENUM_IS_OPAQUE (copy) = 0;
5447 	      ENUM_IS_SCOPED (copy) = 0;
5448 	    }
5449 
5450 	  /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5451 	     This is needed for ODR violation warnings to come out right (we
5452 	     want duplicate TYPE_DECLs whenever the type is duplicated because
5453 	     of ODR violation.  Because lang data in the TYPE_DECL may not
5454 	     have been freed yet, rebuild it from scratch and copy relevant
5455 	     fields.  */
5456 	  TYPE_NAME (copy) = fld_simplified_type_name (copy);
5457 	  tree name = TYPE_NAME (copy);
5458 
5459 	  if (name && TREE_CODE (name) == TYPE_DECL)
5460 	    {
5461 	      gcc_checking_assert (TREE_TYPE (name) == t);
5462 	      tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5463 				       DECL_NAME (name), copy);
5464 	      if (DECL_ASSEMBLER_NAME_SET_P (name))
5465 	        SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5466 	      SET_DECL_ALIGN (name2, 0);
5467 	      DECL_CONTEXT (name2) = fld_decl_context
5468 					 (DECL_CONTEXT (name));
5469 	      TYPE_NAME (copy) = name2;
5470 	    }
5471 	}
5472       return copy;
5473    }
5474   return (fld_type_variant
5475 	    (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5476 }
5477 
5478 /* Simplify type T for scenarios where we do not need complete pointer
5479    types.  */
5480 
5481 static tree
fld_simplified_type(tree t,class free_lang_data_d * fld)5482 fld_simplified_type (tree t, class free_lang_data_d *fld)
5483 {
5484   if (!t)
5485     return t;
5486   if (POINTER_TYPE_P (t))
5487     return fld_incomplete_type_of (t, fld);
5488   /* FIXME: This triggers verification error, see PR88140.  */
5489   if (TREE_CODE (t) == ARRAY_TYPE && 0)
5490     return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5491 				   fld_simplified_types, fld);
5492   return t;
5493 }
5494 
5495 /* Reset the expression *EXPR_P, a size or position.
5496 
5497    ??? We could reset all non-constant sizes or positions.  But it's cheap
5498    enough to not do so and refrain from adding workarounds to dwarf2out.c.
5499 
5500    We need to reset self-referential sizes or positions because they cannot
5501    be gimplified and thus can contain a CALL_EXPR after the gimplification
5502    is finished, which will run afoul of LTO streaming.  And they need to be
5503    reset to something essentially dummy but not constant, so as to preserve
5504    the properties of the object they are attached to.  */
5505 
5506 static inline void
free_lang_data_in_one_sizepos(tree * expr_p)5507 free_lang_data_in_one_sizepos (tree *expr_p)
5508 {
5509   tree expr = *expr_p;
5510   if (CONTAINS_PLACEHOLDER_P (expr))
5511     *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5512 }
5513 
5514 
5515 /* Reset all the fields in a binfo node BINFO.  We only keep
5516    BINFO_VTABLE, which is used by gimple_fold_obj_type_ref.  */
5517 
5518 static void
free_lang_data_in_binfo(tree binfo)5519 free_lang_data_in_binfo (tree binfo)
5520 {
5521   unsigned i;
5522   tree t;
5523 
5524   gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5525 
5526   BINFO_VIRTUALS (binfo) = NULL_TREE;
5527   BINFO_BASE_ACCESSES (binfo) = NULL;
5528   BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5529   BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5530   BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5531   TREE_PUBLIC (binfo) = 0;
5532 
5533   FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5534     free_lang_data_in_binfo (t);
5535 }
5536 
5537 
5538 /* Reset all language specific information still present in TYPE.  */
5539 
5540 static void
free_lang_data_in_type(tree type,class free_lang_data_d * fld)5541 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5542 {
5543   gcc_assert (TYPE_P (type));
5544 
5545   /* Give the FE a chance to remove its own data first.  */
5546   lang_hooks.free_lang_data (type);
5547 
5548   TREE_LANG_FLAG_0 (type) = 0;
5549   TREE_LANG_FLAG_1 (type) = 0;
5550   TREE_LANG_FLAG_2 (type) = 0;
5551   TREE_LANG_FLAG_3 (type) = 0;
5552   TREE_LANG_FLAG_4 (type) = 0;
5553   TREE_LANG_FLAG_5 (type) = 0;
5554   TREE_LANG_FLAG_6 (type) = 0;
5555 
5556   TYPE_NEEDS_CONSTRUCTING (type) = 0;
5557 
5558   /* Purge non-marked variants from the variants chain, so that they
5559      don't reappear in the IL after free_lang_data.  */
5560   while (TYPE_NEXT_VARIANT (type)
5561 	 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5562     {
5563       tree t = TYPE_NEXT_VARIANT (type);
5564       TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5565       /* Turn the removed types into distinct types.  */
5566       TYPE_MAIN_VARIANT (t) = t;
5567       TYPE_NEXT_VARIANT (t) = NULL_TREE;
5568     }
5569 
5570   if (TREE_CODE (type) == FUNCTION_TYPE)
5571     {
5572       TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5573       /* Remove the const and volatile qualifiers from arguments.  The
5574 	 C++ front end removes them, but the C front end does not,
5575 	 leading to false ODR violation errors when merging two
5576 	 instances of the same function signature compiled by
5577 	 different front ends.  */
5578       for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5579 	{
5580           TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5581 	  tree arg_type = TREE_VALUE (p);
5582 
5583 	  if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5584 	    {
5585 	      int quals = TYPE_QUALS (arg_type)
5586 			  & ~TYPE_QUAL_CONST
5587 			  & ~TYPE_QUAL_VOLATILE;
5588 	      TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5589 	      if (!fld->pset.add (TREE_VALUE (p)))
5590 		free_lang_data_in_type (TREE_VALUE (p), fld);
5591 	    }
5592 	  /* C++ FE uses TREE_PURPOSE to store initial values.  */
5593 	  TREE_PURPOSE (p) = NULL;
5594 	}
5595     }
5596   else if (TREE_CODE (type) == METHOD_TYPE)
5597     {
5598       TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5599       for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5600 	{
5601 	  /* C++ FE uses TREE_PURPOSE to store initial values.  */
5602 	  TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5603 	  TREE_PURPOSE (p) = NULL;
5604 	}
5605     }
5606   else if (RECORD_OR_UNION_TYPE_P (type))
5607     {
5608       /* Remove members that are not FIELD_DECLs from the field list
5609 	 of an aggregate.  These occur in C++.  */
5610       for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5611 	if (TREE_CODE (member) == FIELD_DECL)
5612 	  prev = &DECL_CHAIN (member);
5613 	else
5614 	  *prev = DECL_CHAIN (member);
5615 
5616       TYPE_VFIELD (type) = NULL_TREE;
5617 
5618       if (TYPE_BINFO (type))
5619 	{
5620 	  free_lang_data_in_binfo (TYPE_BINFO (type));
5621 	  /* We need to preserve link to bases and virtual table for all
5622 	     polymorphic types to make devirtualization machinery working.  */
5623 	  if (!BINFO_VTABLE (TYPE_BINFO (type)))
5624 	    TYPE_BINFO (type) = NULL;
5625 	}
5626     }
5627   else if (INTEGRAL_TYPE_P (type)
5628 	   || SCALAR_FLOAT_TYPE_P (type)
5629 	   || FIXED_POINT_TYPE_P (type))
5630     {
5631       if (TREE_CODE (type) == ENUMERAL_TYPE)
5632 	{
5633 	  ENUM_IS_OPAQUE (type) = 0;
5634 	  ENUM_IS_SCOPED (type) = 0;
5635 	  /* Type values are used only for C++ ODR checking.  Drop them
5636 	     for all type variants and non-ODR types.
5637 	     For ODR types the data is freed in free_odr_warning_data.  */
5638 	  if (!TYPE_VALUES (type))
5639 	    ;
5640 	  else if (TYPE_MAIN_VARIANT (type) != type
5641 		   || !type_with_linkage_p (type)
5642 		   || type_in_anonymous_namespace_p (type))
5643 	    TYPE_VALUES (type) = NULL;
5644 	  else
5645 	    register_odr_enum (type);
5646 	}
5647       free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5648       free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5649     }
5650 
5651   TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5652 
5653   free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5654   free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5655 
5656   if (TYPE_CONTEXT (type)
5657       && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5658     {
5659       tree ctx = TYPE_CONTEXT (type);
5660       do
5661 	{
5662 	  ctx = BLOCK_SUPERCONTEXT (ctx);
5663 	}
5664       while (ctx && TREE_CODE (ctx) == BLOCK);
5665       TYPE_CONTEXT (type) = ctx;
5666     }
5667 
5668   TYPE_STUB_DECL (type) = NULL;
5669   TYPE_NAME (type) = fld_simplified_type_name (type);
5670 }
5671 
5672 
5673 /* Return true if DECL may need an assembler name to be set.  */
5674 
5675 static inline bool
need_assembler_name_p(tree decl)5676 need_assembler_name_p (tree decl)
5677 {
5678   /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5679      Rule merging.  This makes type_odr_p to return true on those types during
5680      LTO and by comparing the mangled name, we can say what types are intended
5681      to be equivalent across compilation unit.
5682 
5683      We do not store names of type_in_anonymous_namespace_p.
5684 
5685      Record, union and enumeration type have linkage that allows use
5686      to check type_in_anonymous_namespace_p. We do not mangle compound types
5687      that always can be compared structurally.
5688 
5689      Similarly for builtin types, we compare properties of their main variant.
5690      A special case are integer types where mangling do make differences
5691      between char/signed char/unsigned char etc.  Storing name for these makes
5692      e.g.  -fno-signed-char/-fsigned-char mismatches to be handled well.
5693      See cp/mangle.c:write_builtin_type for details.  */
5694 
5695   if (TREE_CODE (decl) == TYPE_DECL)
5696     {
5697       if (DECL_NAME (decl)
5698 	  && decl == TYPE_NAME (TREE_TYPE (decl))
5699 	  && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5700 	  && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5701 	  && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5702 	       && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5703 	      || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5704 	  && (type_with_linkage_p (TREE_TYPE (decl))
5705 	      || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5706 	  && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5707 	return !DECL_ASSEMBLER_NAME_SET_P (decl);
5708       return false;
5709     }
5710   /* Only FUNCTION_DECLs and VAR_DECLs are considered.  */
5711   if (!VAR_OR_FUNCTION_DECL_P (decl))
5712     return false;
5713 
5714   /* If DECL already has its assembler name set, it does not need a
5715      new one.  */
5716   if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5717       || DECL_ASSEMBLER_NAME_SET_P (decl))
5718     return false;
5719 
5720   /* Abstract decls do not need an assembler name.  */
5721   if (DECL_ABSTRACT_P (decl))
5722     return false;
5723 
5724   /* For VAR_DECLs, only static, public and external symbols need an
5725      assembler name.  */
5726   if (VAR_P (decl)
5727       && !TREE_STATIC (decl)
5728       && !TREE_PUBLIC (decl)
5729       && !DECL_EXTERNAL (decl))
5730     return false;
5731 
5732   if (TREE_CODE (decl) == FUNCTION_DECL)
5733     {
5734       /* Do not set assembler name on builtins.  Allow RTL expansion to
5735 	 decide whether to expand inline or via a regular call.  */
5736       if (fndecl_built_in_p (decl)
5737 	  && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5738 	return false;
5739 
5740       /* Functions represented in the callgraph need an assembler name.  */
5741       if (cgraph_node::get (decl) != NULL)
5742 	return true;
5743 
5744       /* Unused and not public functions don't need an assembler name.  */
5745       if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5746 	return false;
5747     }
5748 
5749   return true;
5750 }
5751 
5752 
5753 /* Reset all language specific information still present in symbol
5754    DECL.  */
5755 
5756 static void
free_lang_data_in_decl(tree decl,class free_lang_data_d * fld)5757 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5758 {
5759   gcc_assert (DECL_P (decl));
5760 
5761   /* Give the FE a chance to remove its own data first.  */
5762   lang_hooks.free_lang_data (decl);
5763 
5764   TREE_LANG_FLAG_0 (decl) = 0;
5765   TREE_LANG_FLAG_1 (decl) = 0;
5766   TREE_LANG_FLAG_2 (decl) = 0;
5767   TREE_LANG_FLAG_3 (decl) = 0;
5768   TREE_LANG_FLAG_4 (decl) = 0;
5769   TREE_LANG_FLAG_5 (decl) = 0;
5770   TREE_LANG_FLAG_6 (decl) = 0;
5771 
5772   free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5773   free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5774   if (TREE_CODE (decl) == FIELD_DECL)
5775     {
5776       DECL_FCONTEXT (decl) = NULL;
5777       free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5778       if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5779 	DECL_QUALIFIER (decl) = NULL_TREE;
5780     }
5781 
5782  if (TREE_CODE (decl) == FUNCTION_DECL)
5783     {
5784       struct cgraph_node *node;
5785       /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5786 	 the address may be taken in other unit, so this flag has no practical
5787 	 use for middle-end.
5788 
5789 	 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5790 	 for public objects that indeed cannot be adressed, but it is not
5791 	 the case.  Set the flag to true so we do not get merge failures for
5792 	 i.e. virtual tables between units that take address of it and
5793 	 units that don't.  */
5794       if (TREE_PUBLIC (decl))
5795 	TREE_ADDRESSABLE (decl) = true;
5796       TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5797       if (!(node = cgraph_node::get (decl))
5798 	  || (!node->definition && !node->clones))
5799 	{
5800 	  if (node)
5801 	    node->release_body ();
5802 	  else
5803 	    {
5804 	      release_function_body (decl);
5805 	      DECL_ARGUMENTS (decl) = NULL;
5806 	      DECL_RESULT (decl) = NULL;
5807 	      DECL_INITIAL (decl) = error_mark_node;
5808 	    }
5809 	}
5810       if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5811 	{
5812 	  tree t;
5813 
5814 	  /* If DECL has a gimple body, then the context for its
5815 	     arguments must be DECL.  Otherwise, it doesn't really
5816 	     matter, as we will not be emitting any code for DECL.  In
5817 	     general, there may be other instances of DECL created by
5818 	     the front end and since PARM_DECLs are generally shared,
5819 	     their DECL_CONTEXT changes as the replicas of DECL are
5820 	     created.  The only time where DECL_CONTEXT is important
5821 	     is for the FUNCTION_DECLs that have a gimple body (since
5822 	     the PARM_DECL will be used in the function's body).  */
5823 	  for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5824 	    DECL_CONTEXT (t) = decl;
5825 	  if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5826 	    DECL_FUNCTION_SPECIFIC_TARGET (decl)
5827 	      = target_option_default_node;
5828 	  if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5829 	    DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5830 	      = optimization_default_node;
5831 	}
5832 
5833       /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5834 	 At this point, it is not needed anymore.  */
5835       DECL_SAVED_TREE (decl) = NULL_TREE;
5836 
5837       /* Clear the abstract origin if it refers to a method.
5838          Otherwise dwarf2out.c will ICE as we splice functions out of
5839          TYPE_FIELDS and thus the origin will not be output
5840          correctly.  */
5841       if (DECL_ABSTRACT_ORIGIN (decl)
5842 	  && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5843 	  && RECORD_OR_UNION_TYPE_P
5844 	       (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5845 	DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5846 
5847       DECL_VINDEX (decl) = NULL_TREE;
5848     }
5849   else if (VAR_P (decl))
5850     {
5851       /* See comment above why we set the flag for functions.  */
5852       if (TREE_PUBLIC (decl))
5853 	TREE_ADDRESSABLE (decl) = true;
5854       if ((DECL_EXTERNAL (decl)
5855 	   && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5856 	  || (decl_function_context (decl) && !TREE_STATIC (decl)))
5857 	DECL_INITIAL (decl) = NULL_TREE;
5858     }
5859   else if (TREE_CODE (decl) == TYPE_DECL)
5860     {
5861       DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5862       DECL_VISIBILITY_SPECIFIED (decl) = 0;
5863       TREE_PUBLIC (decl) = 0;
5864       TREE_PRIVATE (decl) = 0;
5865       DECL_ARTIFICIAL (decl) = 0;
5866       TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5867       DECL_INITIAL (decl) = NULL_TREE;
5868       DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5869       DECL_MODE (decl) = VOIDmode;
5870       SET_DECL_ALIGN (decl, 0);
5871       /* TREE_TYPE is cleared at WPA time in free_odr_warning_data.  */
5872     }
5873   else if (TREE_CODE (decl) == FIELD_DECL)
5874     {
5875       TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5876       DECL_INITIAL (decl) = NULL_TREE;
5877     }
5878   else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5879            && DECL_INITIAL (decl)
5880            && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5881     {
5882       /* Strip builtins from the translation-unit BLOCK.  We still have targets
5883 	 without builtin_decl_explicit support and also builtins are shared
5884 	 nodes and thus we can't use TREE_CHAIN in multiple lists.  */
5885       tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5886       while (*nextp)
5887 	{
5888 	  tree var = *nextp;
5889 	  if (TREE_CODE (var) == FUNCTION_DECL
5890 	      && fndecl_built_in_p (var))
5891 	    *nextp = TREE_CHAIN (var);
5892 	  else
5893 	    nextp = &TREE_CHAIN (var);
5894         }
5895     }
5896   /* We need to keep field decls associated with their trees. Otherwise tree
5897      merging may merge some fileds and keep others disjoint wich in turn will
5898      not do well with TREE_CHAIN pointers linking them.
5899 
5900      Also do not drop containing types for virtual methods and tables because
5901      these are needed by devirtualization.
5902      C++ destructors are special because C++ frontends sometimes produces
5903      virtual destructor as an alias of non-virtual destructor.  In
5904      devirutalization code we always walk through aliases and we need
5905      context to be preserved too.  See PR89335  */
5906   if (TREE_CODE (decl) != FIELD_DECL
5907       && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5908           || (!DECL_VIRTUAL_P (decl)
5909 	      && (TREE_CODE (decl) != FUNCTION_DECL
5910 		  || !DECL_CXX_DESTRUCTOR_P (decl)))))
5911     DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5912 }
5913 
5914 
5915 /* Operand callback helper for free_lang_data_in_node.  *TP is the
5916    subtree operand being considered.  */
5917 
5918 static tree
find_decls_types_r(tree * tp,int * ws,void * data)5919 find_decls_types_r (tree *tp, int *ws, void *data)
5920 {
5921   tree t = *tp;
5922   class free_lang_data_d *fld = (class free_lang_data_d *) data;
5923 
5924   if (TREE_CODE (t) == TREE_LIST)
5925     return NULL_TREE;
5926 
5927   /* Language specific nodes will be removed, so there is no need
5928      to gather anything under them.  */
5929   if (is_lang_specific (t))
5930     {
5931       *ws = 0;
5932       return NULL_TREE;
5933     }
5934 
5935   if (DECL_P (t))
5936     {
5937       /* Note that walk_tree does not traverse every possible field in
5938 	 decls, so we have to do our own traversals here.  */
5939       add_tree_to_fld_list (t, fld);
5940 
5941       fld_worklist_push (DECL_NAME (t), fld);
5942       fld_worklist_push (DECL_CONTEXT (t), fld);
5943       fld_worklist_push (DECL_SIZE (t), fld);
5944       fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5945 
5946       /* We are going to remove everything under DECL_INITIAL for
5947 	 TYPE_DECLs.  No point walking them.  */
5948       if (TREE_CODE (t) != TYPE_DECL)
5949 	fld_worklist_push (DECL_INITIAL (t), fld);
5950 
5951       fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5952       fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5953 
5954       if (TREE_CODE (t) == FUNCTION_DECL)
5955 	{
5956 	  fld_worklist_push (DECL_ARGUMENTS (t), fld);
5957 	  fld_worklist_push (DECL_RESULT (t), fld);
5958 	}
5959       else if (TREE_CODE (t) == FIELD_DECL)
5960 	{
5961 	  fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5962 	  fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5963 	  fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5964 	  fld_worklist_push (DECL_FCONTEXT (t), fld);
5965 	}
5966 
5967       if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5968 	  && DECL_HAS_VALUE_EXPR_P (t))
5969 	fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5970 
5971       if (TREE_CODE (t) != FIELD_DECL
5972 	  && TREE_CODE (t) != TYPE_DECL)
5973 	fld_worklist_push (TREE_CHAIN (t), fld);
5974       *ws = 0;
5975     }
5976   else if (TYPE_P (t))
5977     {
5978       /* Note that walk_tree does not traverse every possible field in
5979 	 types, so we have to do our own traversals here.  */
5980       add_tree_to_fld_list (t, fld);
5981 
5982       if (!RECORD_OR_UNION_TYPE_P (t))
5983 	fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5984       fld_worklist_push (TYPE_SIZE (t), fld);
5985       fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5986       fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5987       fld_worklist_push (TYPE_POINTER_TO (t), fld);
5988       fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5989       fld_worklist_push (TYPE_NAME (t), fld);
5990       /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5991 	 lists, we may look types up in these lists and use them while
5992 	 optimizing the function body.  Thus we need to free lang data
5993 	 in them.  */
5994       if (TREE_CODE (t) == POINTER_TYPE)
5995         fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5996       if (TREE_CODE (t) == REFERENCE_TYPE)
5997         fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5998       if (!POINTER_TYPE_P (t))
5999 	fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
6000       /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types.  */
6001       if (!RECORD_OR_UNION_TYPE_P (t))
6002 	fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
6003       fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
6004       /* Do not walk TYPE_NEXT_VARIANT.  We do not stream it and thus
6005          do not and want not to reach unused variants this way.  */
6006       if (TYPE_CONTEXT (t))
6007 	{
6008 	  tree ctx = TYPE_CONTEXT (t);
6009 	  /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
6010 	     So push that instead.  */
6011 	  while (ctx && TREE_CODE (ctx) == BLOCK)
6012 	    ctx = BLOCK_SUPERCONTEXT (ctx);
6013 	  fld_worklist_push (ctx, fld);
6014 	}
6015       fld_worklist_push (TYPE_CANONICAL (t), fld);
6016 
6017       if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
6018 	{
6019 	  unsigned i;
6020 	  tree tem;
6021 	  FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
6022 	    fld_worklist_push (TREE_TYPE (tem), fld);
6023 	  fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
6024 	  fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
6025 	}
6026       if (RECORD_OR_UNION_TYPE_P (t))
6027 	{
6028 	  tree tem;
6029 	  /* Push all TYPE_FIELDS - there can be interleaving interesting
6030 	     and non-interesting things.  */
6031 	  tem = TYPE_FIELDS (t);
6032 	  while (tem)
6033 	    {
6034 	      if (TREE_CODE (tem) == FIELD_DECL)
6035 		fld_worklist_push (tem, fld);
6036 	      tem = TREE_CHAIN (tem);
6037 	    }
6038 	}
6039       if (FUNC_OR_METHOD_TYPE_P (t))
6040 	fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
6041 
6042       fld_worklist_push (TYPE_STUB_DECL (t), fld);
6043       *ws = 0;
6044     }
6045   else if (TREE_CODE (t) == BLOCK)
6046     {
6047       for (tree *tem = &BLOCK_VARS (t); *tem; )
6048 	{
6049 	  if (TREE_CODE (*tem) != LABEL_DECL
6050 	      && (TREE_CODE (*tem) != VAR_DECL
6051 		  || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem))))
6052 	    {
6053 	      gcc_assert (TREE_CODE (*tem) != RESULT_DECL
6054 			  && TREE_CODE (*tem) != PARM_DECL);
6055 	      *tem = TREE_CHAIN (*tem);
6056 	    }
6057 	  else
6058 	    {
6059 	      fld_worklist_push (*tem, fld);
6060 	      tem = &TREE_CHAIN (*tem);
6061 	    }
6062 	}
6063       for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
6064 	fld_worklist_push (tem, fld);
6065       fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
6066     }
6067 
6068   if (TREE_CODE (t) != IDENTIFIER_NODE
6069       && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
6070     fld_worklist_push (TREE_TYPE (t), fld);
6071 
6072   return NULL_TREE;
6073 }
6074 
6075 
6076 /* Find decls and types in T.  */
6077 
6078 static void
find_decls_types(tree t,class free_lang_data_d * fld)6079 find_decls_types (tree t, class free_lang_data_d *fld)
6080 {
6081   while (1)
6082     {
6083       if (!fld->pset.contains (t))
6084 	walk_tree (&t, find_decls_types_r, fld, &fld->pset);
6085       if (fld->worklist.is_empty ())
6086 	break;
6087       t = fld->worklist.pop ();
6088     }
6089 }
6090 
6091 /* Translate all the types in LIST with the corresponding runtime
6092    types.  */
6093 
6094 static tree
get_eh_types_for_runtime(tree list)6095 get_eh_types_for_runtime (tree list)
6096 {
6097   tree head, prev;
6098 
6099   if (list == NULL_TREE)
6100     return NULL_TREE;
6101 
6102   head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6103   prev = head;
6104   list = TREE_CHAIN (list);
6105   while (list)
6106     {
6107       tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6108       TREE_CHAIN (prev) = n;
6109       prev = TREE_CHAIN (prev);
6110       list = TREE_CHAIN (list);
6111     }
6112 
6113   return head;
6114 }
6115 
6116 
6117 /* Find decls and types referenced in EH region R and store them in
6118    FLD->DECLS and FLD->TYPES.  */
6119 
6120 static void
find_decls_types_in_eh_region(eh_region r,class free_lang_data_d * fld)6121 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6122 {
6123   switch (r->type)
6124     {
6125     case ERT_CLEANUP:
6126       break;
6127 
6128     case ERT_TRY:
6129       {
6130 	eh_catch c;
6131 
6132 	/* The types referenced in each catch must first be changed to the
6133 	   EH types used at runtime.  This removes references to FE types
6134 	   in the region.  */
6135 	for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6136 	  {
6137 	    c->type_list = get_eh_types_for_runtime (c->type_list);
6138 	    walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6139 	  }
6140       }
6141       break;
6142 
6143     case ERT_ALLOWED_EXCEPTIONS:
6144       r->u.allowed.type_list
6145 	= get_eh_types_for_runtime (r->u.allowed.type_list);
6146       walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6147       break;
6148 
6149     case ERT_MUST_NOT_THROW:
6150       walk_tree (&r->u.must_not_throw.failure_decl,
6151 		 find_decls_types_r, fld, &fld->pset);
6152       break;
6153     }
6154 }
6155 
6156 
6157 /* Find decls and types referenced in cgraph node N and store them in
6158    FLD->DECLS and FLD->TYPES.  Unlike pass_referenced_vars, this will
6159    look for *every* kind of DECL and TYPE node reachable from N,
6160    including those embedded inside types and decls (i.e,, TYPE_DECLs,
6161    NAMESPACE_DECLs, etc).  */
6162 
6163 static void
find_decls_types_in_node(struct cgraph_node * n,class free_lang_data_d * fld)6164 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6165 {
6166   basic_block bb;
6167   struct function *fn;
6168   unsigned ix;
6169   tree t;
6170 
6171   find_decls_types (n->decl, fld);
6172 
6173   if (!gimple_has_body_p (n->decl))
6174     return;
6175 
6176   gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6177 
6178   fn = DECL_STRUCT_FUNCTION (n->decl);
6179 
6180   /* Traverse locals. */
6181   FOR_EACH_LOCAL_DECL (fn, ix, t)
6182     find_decls_types (t, fld);
6183 
6184   /* Traverse EH regions in FN.  */
6185   {
6186     eh_region r;
6187     FOR_ALL_EH_REGION_FN (r, fn)
6188       find_decls_types_in_eh_region (r, fld);
6189   }
6190 
6191   /* Traverse every statement in FN.  */
6192   FOR_EACH_BB_FN (bb, fn)
6193     {
6194       gphi_iterator psi;
6195       gimple_stmt_iterator si;
6196       unsigned i;
6197 
6198       for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6199 	{
6200 	  gphi *phi = psi.phi ();
6201 
6202 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
6203 	    {
6204 	      tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6205 	      find_decls_types (*arg_p, fld);
6206 	    }
6207 	}
6208 
6209       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6210 	{
6211 	  gimple *stmt = gsi_stmt (si);
6212 
6213 	  if (is_gimple_call (stmt))
6214 	    find_decls_types (gimple_call_fntype (stmt), fld);
6215 
6216 	  for (i = 0; i < gimple_num_ops (stmt); i++)
6217 	    {
6218 	      tree arg = gimple_op (stmt, i);
6219 	      find_decls_types (arg, fld);
6220 	      /* find_decls_types doesn't walk TREE_PURPOSE of TREE_LISTs,
6221 		 which we need for asm stmts.  */
6222 	      if (arg
6223 		  && TREE_CODE (arg) == TREE_LIST
6224 		  && TREE_PURPOSE (arg)
6225 		  && gimple_code (stmt) == GIMPLE_ASM)
6226 		find_decls_types (TREE_PURPOSE (arg), fld);
6227 	    }
6228 	}
6229     }
6230 }
6231 
6232 
6233 /* Find decls and types referenced in varpool node N and store them in
6234    FLD->DECLS and FLD->TYPES.  Unlike pass_referenced_vars, this will
6235    look for *every* kind of DECL and TYPE node reachable from N,
6236    including those embedded inside types and decls (i.e,, TYPE_DECLs,
6237    NAMESPACE_DECLs, etc).  */
6238 
6239 static void
find_decls_types_in_var(varpool_node * v,class free_lang_data_d * fld)6240 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6241 {
6242   find_decls_types (v->decl, fld);
6243 }
6244 
6245 /* If T needs an assembler name, have one created for it.  */
6246 
6247 void
assign_assembler_name_if_needed(tree t)6248 assign_assembler_name_if_needed (tree t)
6249 {
6250   if (need_assembler_name_p (t))
6251     {
6252       /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6253 	 diagnostics that use input_location to show locus
6254 	 information.  The problem here is that, at this point,
6255 	 input_location is generally anchored to the end of the file
6256 	 (since the parser is long gone), so we don't have a good
6257 	 position to pin it to.
6258 
6259 	 To alleviate this problem, this uses the location of T's
6260 	 declaration.  Examples of this are
6261 	 testsuite/g++.dg/template/cond2.C and
6262 	 testsuite/g++.dg/template/pr35240.C.  */
6263       location_t saved_location = input_location;
6264       input_location = DECL_SOURCE_LOCATION (t);
6265 
6266       decl_assembler_name (t);
6267 
6268       input_location = saved_location;
6269     }
6270 }
6271 
6272 
6273 /* Free language specific information for every operand and expression
6274    in every node of the call graph.  This process operates in three stages:
6275 
6276    1- Every callgraph node and varpool node is traversed looking for
6277       decls and types embedded in them.  This is a more exhaustive
6278       search than that done by find_referenced_vars, because it will
6279       also collect individual fields, decls embedded in types, etc.
6280 
6281    2- All the decls found are sent to free_lang_data_in_decl.
6282 
6283    3- All the types found are sent to free_lang_data_in_type.
6284 
6285    The ordering between decls and types is important because
6286    free_lang_data_in_decl sets assembler names, which includes
6287    mangling.  So types cannot be freed up until assembler names have
6288    been set up.  */
6289 
6290 static void
free_lang_data_in_cgraph(class free_lang_data_d * fld)6291 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6292 {
6293   struct cgraph_node *n;
6294   varpool_node *v;
6295   tree t;
6296   unsigned i;
6297   alias_pair *p;
6298 
6299   /* Find decls and types in the body of every function in the callgraph.  */
6300   FOR_EACH_FUNCTION (n)
6301     find_decls_types_in_node (n, fld);
6302 
6303   FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6304     find_decls_types (p->decl, fld);
6305 
6306   /* Find decls and types in every varpool symbol.  */
6307   FOR_EACH_VARIABLE (v)
6308     find_decls_types_in_var (v, fld);
6309 
6310   /* Set the assembler name on every decl found.  We need to do this
6311      now because free_lang_data_in_decl will invalidate data needed
6312      for mangling.  This breaks mangling on interdependent decls.  */
6313   FOR_EACH_VEC_ELT (fld->decls, i, t)
6314     assign_assembler_name_if_needed (t);
6315 
6316   /* Traverse every decl found freeing its language data.  */
6317   FOR_EACH_VEC_ELT (fld->decls, i, t)
6318     free_lang_data_in_decl (t, fld);
6319 
6320   /* Traverse every type found freeing its language data.  */
6321   FOR_EACH_VEC_ELT (fld->types, i, t)
6322     free_lang_data_in_type (t, fld);
6323 }
6324 
6325 
6326 /* Free resources that are used by FE but are not needed once they are done. */
6327 
6328 static unsigned
free_lang_data(void)6329 free_lang_data (void)
6330 {
6331   unsigned i;
6332   class free_lang_data_d fld;
6333 
6334   /* If we are the LTO frontend we have freed lang-specific data already.  */
6335   if (in_lto_p
6336       || (!flag_generate_lto && !flag_generate_offload))
6337     {
6338       /* Rebuild type inheritance graph even when not doing LTO to get
6339 	 consistent profile data.  */
6340       rebuild_type_inheritance_graph ();
6341       return 0;
6342     }
6343 
6344   fld_incomplete_types = new hash_map<tree, tree>;
6345   fld_simplified_types = new hash_map<tree, tree>;
6346 
6347   /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one.  */
6348   if (vec_safe_is_empty (all_translation_units))
6349     build_translation_unit_decl (NULL_TREE);
6350 
6351   /* Allocate and assign alias sets to the standard integer types
6352      while the slots are still in the way the frontends generated them.  */
6353   for (i = 0; i < itk_none; ++i)
6354     if (integer_types[i])
6355       TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6356 
6357   /* Traverse the IL resetting language specific information for
6358      operands, expressions, etc.  */
6359   free_lang_data_in_cgraph (&fld);
6360 
6361   /* Create gimple variants for common types.  */
6362   for (unsigned i = 0;
6363        i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6364        ++i)
6365     builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6366 
6367   /* Reset some langhooks.  Do not reset types_compatible_p, it may
6368      still be used indirectly via the get_alias_set langhook.  */
6369   lang_hooks.dwarf_name = lhd_dwarf_name;
6370   lang_hooks.decl_printable_name = gimple_decl_printable_name;
6371   lang_hooks.gimplify_expr = lhd_gimplify_expr;
6372   lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6373   lang_hooks.print_xnode = lhd_print_tree_nothing;
6374   lang_hooks.print_decl = lhd_print_tree_nothing;
6375   lang_hooks.print_type = lhd_print_tree_nothing;
6376   lang_hooks.print_identifier = lhd_print_tree_nothing;
6377 
6378   lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6379 
6380   if (flag_checking)
6381     {
6382       int i;
6383       tree t;
6384 
6385       FOR_EACH_VEC_ELT (fld.types, i, t)
6386 	verify_type (t);
6387     }
6388 
6389   /* We do not want the default decl_assembler_name implementation,
6390      rather if we have fixed everything we want a wrapper around it
6391      asserting that all non-local symbols already got their assembler
6392      name and only produce assembler names for local symbols.  Or rather
6393      make sure we never call decl_assembler_name on local symbols and
6394      devise a separate, middle-end private scheme for it.  */
6395 
6396   /* Reset diagnostic machinery.  */
6397   tree_diagnostics_defaults (global_dc);
6398 
6399   rebuild_type_inheritance_graph ();
6400 
6401   delete fld_incomplete_types;
6402   delete fld_simplified_types;
6403 
6404   return 0;
6405 }
6406 
6407 
6408 namespace {
6409 
6410 const pass_data pass_data_ipa_free_lang_data =
6411 {
6412   SIMPLE_IPA_PASS, /* type */
6413   "*free_lang_data", /* name */
6414   OPTGROUP_NONE, /* optinfo_flags */
6415   TV_IPA_FREE_LANG_DATA, /* tv_id */
6416   0, /* properties_required */
6417   0, /* properties_provided */
6418   0, /* properties_destroyed */
6419   0, /* todo_flags_start */
6420   0, /* todo_flags_finish */
6421 };
6422 
6423 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6424 {
6425 public:
pass_ipa_free_lang_data(gcc::context * ctxt)6426   pass_ipa_free_lang_data (gcc::context *ctxt)
6427     : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6428   {}
6429 
6430   /* opt_pass methods: */
execute(function *)6431   virtual unsigned int execute (function *) { return free_lang_data (); }
6432 
6433 }; // class pass_ipa_free_lang_data
6434 
6435 } // anon namespace
6436 
6437 simple_ipa_opt_pass *
make_pass_ipa_free_lang_data(gcc::context * ctxt)6438 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6439 {
6440   return new pass_ipa_free_lang_data (ctxt);
6441 }
6442 
6443 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6444    of the various TYPE_QUAL values.  */
6445 
6446 static void
set_type_quals(tree type,int type_quals)6447 set_type_quals (tree type, int type_quals)
6448 {
6449   TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6450   TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6451   TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6452   TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6453   TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6454 }
6455 
6456 /* Returns true iff CAND and BASE have equivalent language-specific
6457    qualifiers.  */
6458 
6459 bool
check_lang_type(const_tree cand,const_tree base)6460 check_lang_type (const_tree cand, const_tree base)
6461 {
6462   if (lang_hooks.types.type_hash_eq == NULL)
6463     return true;
6464   /* type_hash_eq currently only applies to these types.  */
6465   if (TREE_CODE (cand) != FUNCTION_TYPE
6466       && TREE_CODE (cand) != METHOD_TYPE)
6467     return true;
6468   return lang_hooks.types.type_hash_eq (cand, base);
6469 }
6470 
6471 /* This function checks to see if TYPE matches the size one of the built-in
6472    atomic types, and returns that core atomic type.  */
6473 
6474 static tree
find_atomic_core_type(const_tree type)6475 find_atomic_core_type (const_tree type)
6476 {
6477   tree base_atomic_type;
6478 
6479   /* Only handle complete types.  */
6480   if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6481     return NULL_TREE;
6482 
6483   switch (tree_to_uhwi (TYPE_SIZE (type)))
6484     {
6485     case 8:
6486       base_atomic_type = atomicQI_type_node;
6487       break;
6488 
6489     case 16:
6490       base_atomic_type = atomicHI_type_node;
6491       break;
6492 
6493     case 32:
6494       base_atomic_type = atomicSI_type_node;
6495       break;
6496 
6497     case 64:
6498       base_atomic_type = atomicDI_type_node;
6499       break;
6500 
6501     case 128:
6502       base_atomic_type = atomicTI_type_node;
6503       break;
6504 
6505     default:
6506       base_atomic_type = NULL_TREE;
6507     }
6508 
6509   return base_atomic_type;
6510 }
6511 
6512 /* Returns true iff unqualified CAND and BASE are equivalent.  */
6513 
6514 bool
check_base_type(const_tree cand,const_tree base)6515 check_base_type (const_tree cand, const_tree base)
6516 {
6517   if (TYPE_NAME (cand) != TYPE_NAME (base)
6518       /* Apparently this is needed for Objective-C.  */
6519       || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6520       || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6521 			        TYPE_ATTRIBUTES (base)))
6522     return false;
6523   /* Check alignment.  */
6524   if (TYPE_ALIGN (cand) == TYPE_ALIGN (base))
6525     return true;
6526   /* Atomic types increase minimal alignment.  We must to do so as well
6527      or we get duplicated canonical types. See PR88686.  */
6528   if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6529     {
6530       /* See if this object can map to a basic atomic type.  */
6531       tree atomic_type = find_atomic_core_type (cand);
6532       if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6533        return true;
6534     }
6535   return false;
6536 }
6537 
6538 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS.  */
6539 
6540 bool
check_qualified_type(const_tree cand,const_tree base,int type_quals)6541 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6542 {
6543   return (TYPE_QUALS (cand) == type_quals
6544 	  && check_base_type (cand, base)
6545 	  && check_lang_type (cand, base));
6546 }
6547 
6548 /* Returns true iff CAND is equivalent to BASE with ALIGN.  */
6549 
6550 static bool
check_aligned_type(const_tree cand,const_tree base,unsigned int align)6551 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6552 {
6553   return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6554 	  && TYPE_NAME (cand) == TYPE_NAME (base)
6555 	  /* Apparently this is needed for Objective-C.  */
6556 	  && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6557 	  /* Check alignment.  */
6558 	  && TYPE_ALIGN (cand) == align
6559 	  && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6560 				   TYPE_ATTRIBUTES (base))
6561 	  && check_lang_type (cand, base));
6562 }
6563 
6564 /* Return a version of the TYPE, qualified as indicated by the
6565    TYPE_QUALS, if one exists.  If no qualified version exists yet,
6566    return NULL_TREE.  */
6567 
6568 tree
get_qualified_type(tree type,int type_quals)6569 get_qualified_type (tree type, int type_quals)
6570 {
6571   if (TYPE_QUALS (type) == type_quals)
6572     return type;
6573 
6574   tree mv = TYPE_MAIN_VARIANT (type);
6575   if (check_qualified_type (mv, type, type_quals))
6576     return mv;
6577 
6578   /* Search the chain of variants to see if there is already one there just
6579      like the one we need to have.  If so, use that existing one.  We must
6580      preserve the TYPE_NAME, since there is code that depends on this.  */
6581   for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6582     if (check_qualified_type (*tp, type, type_quals))
6583       {
6584 	/* Put the found variant at the head of the variant list so
6585 	   frequently searched variants get found faster.  The C++ FE
6586 	   benefits greatly from this.  */
6587 	tree t = *tp;
6588 	*tp = TYPE_NEXT_VARIANT (t);
6589 	TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6590 	TYPE_NEXT_VARIANT (mv) = t;
6591 	return t;
6592       }
6593 
6594   return NULL_TREE;
6595 }
6596 
6597 /* Like get_qualified_type, but creates the type if it does not
6598    exist.  This function never returns NULL_TREE.  */
6599 
6600 tree
build_qualified_type(tree type,int type_quals MEM_STAT_DECL)6601 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6602 {
6603   tree t;
6604 
6605   /* See if we already have the appropriate qualified variant.  */
6606   t = get_qualified_type (type, type_quals);
6607 
6608   /* If not, build it.  */
6609   if (!t)
6610     {
6611       t = build_variant_type_copy (type PASS_MEM_STAT);
6612       set_type_quals (t, type_quals);
6613 
6614       if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6615 	{
6616 	  /* See if this object can map to a basic atomic type.  */
6617 	  tree atomic_type = find_atomic_core_type (type);
6618 	  if (atomic_type)
6619 	    {
6620 	      /* Ensure the alignment of this type is compatible with
6621 		 the required alignment of the atomic type.  */
6622 	      if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6623 		SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6624 	    }
6625 	}
6626 
6627       if (TYPE_STRUCTURAL_EQUALITY_P (type))
6628 	/* Propagate structural equality. */
6629 	SET_TYPE_STRUCTURAL_EQUALITY (t);
6630       else if (TYPE_CANONICAL (type) != type)
6631 	/* Build the underlying canonical type, since it is different
6632 	   from TYPE. */
6633 	{
6634 	  tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6635 	  TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6636 	}
6637       else
6638 	/* T is its own canonical type. */
6639 	TYPE_CANONICAL (t) = t;
6640 
6641     }
6642 
6643   return t;
6644 }
6645 
6646 /* Create a variant of type T with alignment ALIGN.  */
6647 
6648 tree
build_aligned_type(tree type,unsigned int align)6649 build_aligned_type (tree type, unsigned int align)
6650 {
6651   tree t;
6652 
6653   if (TYPE_PACKED (type)
6654       || TYPE_ALIGN (type) == align)
6655     return type;
6656 
6657   for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6658     if (check_aligned_type (t, type, align))
6659       return t;
6660 
6661   t = build_variant_type_copy (type);
6662   SET_TYPE_ALIGN (t, align);
6663   TYPE_USER_ALIGN (t) = 1;
6664 
6665   return t;
6666 }
6667 
6668 /* Create a new distinct copy of TYPE.  The new type is made its own
6669    MAIN_VARIANT. If TYPE requires structural equality checks, the
6670    resulting type requires structural equality checks; otherwise, its
6671    TYPE_CANONICAL points to itself. */
6672 
6673 tree
build_distinct_type_copy(tree type MEM_STAT_DECL)6674 build_distinct_type_copy (tree type MEM_STAT_DECL)
6675 {
6676   tree t = copy_node (type PASS_MEM_STAT);
6677 
6678   TYPE_POINTER_TO (t) = 0;
6679   TYPE_REFERENCE_TO (t) = 0;
6680 
6681   /* Set the canonical type either to a new equivalence class, or
6682      propagate the need for structural equality checks. */
6683   if (TYPE_STRUCTURAL_EQUALITY_P (type))
6684     SET_TYPE_STRUCTURAL_EQUALITY (t);
6685   else
6686     TYPE_CANONICAL (t) = t;
6687 
6688   /* Make it its own variant.  */
6689   TYPE_MAIN_VARIANT (t) = t;
6690   TYPE_NEXT_VARIANT (t) = 0;
6691 
6692   /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6693      whose TREE_TYPE is not t.  This can also happen in the Ada
6694      frontend when using subtypes.  */
6695 
6696   return t;
6697 }
6698 
6699 /* Create a new variant of TYPE, equivalent but distinct.  This is so
6700    the caller can modify it. TYPE_CANONICAL for the return type will
6701    be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6702    are considered equal by the language itself (or that both types
6703    require structural equality checks). */
6704 
6705 tree
build_variant_type_copy(tree type MEM_STAT_DECL)6706 build_variant_type_copy (tree type MEM_STAT_DECL)
6707 {
6708   tree t, m = TYPE_MAIN_VARIANT (type);
6709 
6710   t = build_distinct_type_copy (type PASS_MEM_STAT);
6711 
6712   /* Since we're building a variant, assume that it is a non-semantic
6713      variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6714   TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6715   /* Type variants have no alias set defined.  */
6716   TYPE_ALIAS_SET (t) = -1;
6717 
6718   /* Add the new type to the chain of variants of TYPE.  */
6719   TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6720   TYPE_NEXT_VARIANT (m) = t;
6721   TYPE_MAIN_VARIANT (t) = m;
6722 
6723   return t;
6724 }
6725 
6726 /* Return true if the from tree in both tree maps are equal.  */
6727 
6728 int
tree_map_base_eq(const void * va,const void * vb)6729 tree_map_base_eq (const void *va, const void *vb)
6730 {
6731   const struct tree_map_base  *const a = (const struct tree_map_base *) va,
6732     *const b = (const struct tree_map_base *) vb;
6733   return (a->from == b->from);
6734 }
6735 
6736 /* Hash a from tree in a tree_base_map.  */
6737 
6738 unsigned int
tree_map_base_hash(const void * item)6739 tree_map_base_hash (const void *item)
6740 {
6741   return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6742 }
6743 
6744 /* Return true if this tree map structure is marked for garbage collection
6745    purposes.  We simply return true if the from tree is marked, so that this
6746    structure goes away when the from tree goes away.  */
6747 
6748 int
tree_map_base_marked_p(const void * p)6749 tree_map_base_marked_p (const void *p)
6750 {
6751   return ggc_marked_p (((const struct tree_map_base *) p)->from);
6752 }
6753 
6754 /* Hash a from tree in a tree_map.  */
6755 
6756 unsigned int
tree_map_hash(const void * item)6757 tree_map_hash (const void *item)
6758 {
6759   return (((const struct tree_map *) item)->hash);
6760 }
6761 
6762 /* Hash a from tree in a tree_decl_map.  */
6763 
6764 unsigned int
tree_decl_map_hash(const void * item)6765 tree_decl_map_hash (const void *item)
6766 {
6767   return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6768 }
6769 
6770 /* Return the initialization priority for DECL.  */
6771 
6772 priority_type
decl_init_priority_lookup(tree decl)6773 decl_init_priority_lookup (tree decl)
6774 {
6775   symtab_node *snode = symtab_node::get (decl);
6776 
6777   if (!snode)
6778     return DEFAULT_INIT_PRIORITY;
6779   return
6780     snode->get_init_priority ();
6781 }
6782 
6783 /* Return the finalization priority for DECL.  */
6784 
6785 priority_type
decl_fini_priority_lookup(tree decl)6786 decl_fini_priority_lookup (tree decl)
6787 {
6788   cgraph_node *node = cgraph_node::get (decl);
6789 
6790   if (!node)
6791     return DEFAULT_INIT_PRIORITY;
6792   return
6793     node->get_fini_priority ();
6794 }
6795 
6796 /* Set the initialization priority for DECL to PRIORITY.  */
6797 
6798 void
decl_init_priority_insert(tree decl,priority_type priority)6799 decl_init_priority_insert (tree decl, priority_type priority)
6800 {
6801   struct symtab_node *snode;
6802 
6803   if (priority == DEFAULT_INIT_PRIORITY)
6804     {
6805       snode = symtab_node::get (decl);
6806       if (!snode)
6807 	return;
6808     }
6809   else if (VAR_P (decl))
6810     snode = varpool_node::get_create (decl);
6811   else
6812     snode = cgraph_node::get_create (decl);
6813   snode->set_init_priority (priority);
6814 }
6815 
6816 /* Set the finalization priority for DECL to PRIORITY.  */
6817 
6818 void
decl_fini_priority_insert(tree decl,priority_type priority)6819 decl_fini_priority_insert (tree decl, priority_type priority)
6820 {
6821   struct cgraph_node *node;
6822 
6823   if (priority == DEFAULT_INIT_PRIORITY)
6824     {
6825       node = cgraph_node::get (decl);
6826       if (!node)
6827 	return;
6828     }
6829   else
6830     node = cgraph_node::get_create (decl);
6831   node->set_fini_priority (priority);
6832 }
6833 
6834 /* Print out the statistics for the DECL_DEBUG_EXPR hash table.  */
6835 
6836 static void
print_debug_expr_statistics(void)6837 print_debug_expr_statistics (void)
6838 {
6839   fprintf (stderr, "DECL_DEBUG_EXPR  hash: size %ld, %ld elements, %f collisions\n",
6840 	   (long) debug_expr_for_decl->size (),
6841 	   (long) debug_expr_for_decl->elements (),
6842 	   debug_expr_for_decl->collisions ());
6843 }
6844 
6845 /* Print out the statistics for the DECL_VALUE_EXPR hash table.  */
6846 
6847 static void
print_value_expr_statistics(void)6848 print_value_expr_statistics (void)
6849 {
6850   fprintf (stderr, "DECL_VALUE_EXPR  hash: size %ld, %ld elements, %f collisions\n",
6851 	   (long) value_expr_for_decl->size (),
6852 	   (long) value_expr_for_decl->elements (),
6853 	   value_expr_for_decl->collisions ());
6854 }
6855 
6856 /* Lookup a debug expression for FROM, and return it if we find one.  */
6857 
6858 tree
decl_debug_expr_lookup(tree from)6859 decl_debug_expr_lookup (tree from)
6860 {
6861   struct tree_decl_map *h, in;
6862   in.base.from = from;
6863 
6864   h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6865   if (h)
6866     return h->to;
6867   return NULL_TREE;
6868 }
6869 
6870 /* Insert a mapping FROM->TO in the debug expression hashtable.  */
6871 
6872 void
decl_debug_expr_insert(tree from,tree to)6873 decl_debug_expr_insert (tree from, tree to)
6874 {
6875   struct tree_decl_map *h;
6876 
6877   h = ggc_alloc<tree_decl_map> ();
6878   h->base.from = from;
6879   h->to = to;
6880   *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6881 }
6882 
6883 /* Lookup a value expression for FROM, and return it if we find one.  */
6884 
6885 tree
decl_value_expr_lookup(tree from)6886 decl_value_expr_lookup (tree from)
6887 {
6888   struct tree_decl_map *h, in;
6889   in.base.from = from;
6890 
6891   h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6892   if (h)
6893     return h->to;
6894   return NULL_TREE;
6895 }
6896 
6897 /* Insert a mapping FROM->TO in the value expression hashtable.  */
6898 
6899 void
decl_value_expr_insert(tree from,tree to)6900 decl_value_expr_insert (tree from, tree to)
6901 {
6902   struct tree_decl_map *h;
6903 
6904   h = ggc_alloc<tree_decl_map> ();
6905   h->base.from = from;
6906   h->to = to;
6907   *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6908 }
6909 
6910 /* Lookup a vector of debug arguments for FROM, and return it if we
6911    find one.  */
6912 
6913 vec<tree, va_gc> **
decl_debug_args_lookup(tree from)6914 decl_debug_args_lookup (tree from)
6915 {
6916   struct tree_vec_map *h, in;
6917 
6918   if (!DECL_HAS_DEBUG_ARGS_P (from))
6919     return NULL;
6920   gcc_checking_assert (debug_args_for_decl != NULL);
6921   in.base.from = from;
6922   h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6923   if (h)
6924     return &h->to;
6925   return NULL;
6926 }
6927 
6928 /* Insert a mapping FROM->empty vector of debug arguments in the value
6929    expression hashtable.  */
6930 
6931 vec<tree, va_gc> **
decl_debug_args_insert(tree from)6932 decl_debug_args_insert (tree from)
6933 {
6934   struct tree_vec_map *h;
6935   tree_vec_map **loc;
6936 
6937   if (DECL_HAS_DEBUG_ARGS_P (from))
6938     return decl_debug_args_lookup (from);
6939   if (debug_args_for_decl == NULL)
6940     debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6941   h = ggc_alloc<tree_vec_map> ();
6942   h->base.from = from;
6943   h->to = NULL;
6944   loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6945   *loc = h;
6946   DECL_HAS_DEBUG_ARGS_P (from) = 1;
6947   return &h->to;
6948 }
6949 
6950 /* Hashing of types so that we don't make duplicates.
6951    The entry point is `type_hash_canon'.  */
6952 
6953 /* Generate the default hash code for TYPE.  This is designed for
6954    speed, rather than maximum entropy.  */
6955 
6956 hashval_t
type_hash_canon_hash(tree type)6957 type_hash_canon_hash (tree type)
6958 {
6959   inchash::hash hstate;
6960 
6961   hstate.add_int (TREE_CODE (type));
6962 
6963   if (TREE_TYPE (type))
6964     hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6965 
6966   for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6967     /* Just the identifier is adequate to distinguish.  */
6968     hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6969 
6970   switch (TREE_CODE (type))
6971     {
6972     case METHOD_TYPE:
6973       hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6974       /* FALLTHROUGH. */
6975     case FUNCTION_TYPE:
6976       for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6977 	if (TREE_VALUE (t) != error_mark_node)
6978 	  hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6979       break;
6980 
6981     case OFFSET_TYPE:
6982       hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6983       break;
6984 
6985     case ARRAY_TYPE:
6986       {
6987 	if (TYPE_DOMAIN (type))
6988 	  hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6989 	if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6990 	  {
6991 	    unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6992 	    hstate.add_object (typeless);
6993 	  }
6994       }
6995       break;
6996 
6997     case INTEGER_TYPE:
6998       {
6999 	tree t = TYPE_MAX_VALUE (type);
7000 	if (!t)
7001 	  t = TYPE_MIN_VALUE (type);
7002 	for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7003 	  hstate.add_object (TREE_INT_CST_ELT (t, i));
7004 	break;
7005       }
7006 
7007     case REAL_TYPE:
7008     case FIXED_POINT_TYPE:
7009       {
7010 	unsigned prec = TYPE_PRECISION (type);
7011 	hstate.add_object (prec);
7012 	break;
7013       }
7014 
7015     case VECTOR_TYPE:
7016       hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
7017       break;
7018 
7019     default:
7020       break;
7021     }
7022 
7023   return hstate.end ();
7024 }
7025 
7026 /* These are the Hashtable callback functions.  */
7027 
7028 /* Returns true iff the types are equivalent.  */
7029 
7030 bool
equal(type_hash * a,type_hash * b)7031 type_cache_hasher::equal (type_hash *a, type_hash *b)
7032 {
7033   /* First test the things that are the same for all types.  */
7034   if (a->hash != b->hash
7035       || TREE_CODE (a->type) != TREE_CODE (b->type)
7036       || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7037       || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7038 				 TYPE_ATTRIBUTES (b->type))
7039       || (TREE_CODE (a->type) != COMPLEX_TYPE
7040           && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7041     return 0;
7042 
7043   /* Be careful about comparing arrays before and after the element type
7044      has been completed; don't compare TYPE_ALIGN unless both types are
7045      complete.  */
7046   if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7047       && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7048 	  || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7049     return 0;
7050 
7051   switch (TREE_CODE (a->type))
7052     {
7053     case VOID_TYPE:
7054     case COMPLEX_TYPE:
7055     case POINTER_TYPE:
7056     case REFERENCE_TYPE:
7057     case NULLPTR_TYPE:
7058       return 1;
7059 
7060     case VECTOR_TYPE:
7061       return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
7062 		       TYPE_VECTOR_SUBPARTS (b->type));
7063 
7064     case ENUMERAL_TYPE:
7065       if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7066 	  && !(TYPE_VALUES (a->type)
7067 	       && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7068 	       && TYPE_VALUES (b->type)
7069 	       && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7070 	       && type_list_equal (TYPE_VALUES (a->type),
7071 				   TYPE_VALUES (b->type))))
7072 	return 0;
7073 
7074       /* fall through */
7075 
7076     case INTEGER_TYPE:
7077     case REAL_TYPE:
7078     case BOOLEAN_TYPE:
7079       if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7080 	return false;
7081       return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7082 	       || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7083 				      TYPE_MAX_VALUE (b->type)))
7084 	      && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7085 		  || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7086 					 TYPE_MIN_VALUE (b->type))));
7087 
7088     case FIXED_POINT_TYPE:
7089       return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7090 
7091     case OFFSET_TYPE:
7092       return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7093 
7094     case METHOD_TYPE:
7095       if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7096 	  && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7097 	      || (TYPE_ARG_TYPES (a->type)
7098 		  && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7099 		  && TYPE_ARG_TYPES (b->type)
7100 		  && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7101 		  && type_list_equal (TYPE_ARG_TYPES (a->type),
7102 				      TYPE_ARG_TYPES (b->type)))))
7103         break;
7104       return 0;
7105     case ARRAY_TYPE:
7106       /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7107 	 where the flag should be inherited from the element type
7108 	 and can change after ARRAY_TYPEs are created; on non-aggregates
7109 	 compare it and hash it, scalars will never have that flag set
7110 	 and we need to differentiate between arrays created by different
7111 	 front-ends or middle-end created arrays.  */
7112       return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7113 	      && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7114 		  || (TYPE_TYPELESS_STORAGE (a->type)
7115 		      == TYPE_TYPELESS_STORAGE (b->type))));
7116 
7117     case RECORD_TYPE:
7118     case UNION_TYPE:
7119     case QUAL_UNION_TYPE:
7120       return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7121 	      || (TYPE_FIELDS (a->type)
7122 		  && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7123 		  && TYPE_FIELDS (b->type)
7124 		  && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7125 		  && type_list_equal (TYPE_FIELDS (a->type),
7126 				      TYPE_FIELDS (b->type))));
7127 
7128     case FUNCTION_TYPE:
7129       if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7130 	  || (TYPE_ARG_TYPES (a->type)
7131 	      && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7132 	      && TYPE_ARG_TYPES (b->type)
7133 	      && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7134 	      && type_list_equal (TYPE_ARG_TYPES (a->type),
7135 				  TYPE_ARG_TYPES (b->type))))
7136 	break;
7137       return 0;
7138 
7139     default:
7140       return 0;
7141     }
7142 
7143   if (lang_hooks.types.type_hash_eq != NULL)
7144     return lang_hooks.types.type_hash_eq (a->type, b->type);
7145 
7146   return 1;
7147 }
7148 
7149 /* Given TYPE, and HASHCODE its hash code, return the canonical
7150    object for an identical type if one already exists.
7151    Otherwise, return TYPE, and record it as the canonical object.
7152 
7153    To use this function, first create a type of the sort you want.
7154    Then compute its hash code from the fields of the type that
7155    make it different from other similar types.
7156    Then call this function and use the value.  */
7157 
7158 tree
type_hash_canon(unsigned int hashcode,tree type)7159 type_hash_canon (unsigned int hashcode, tree type)
7160 {
7161   type_hash in;
7162   type_hash **loc;
7163 
7164   /* The hash table only contains main variants, so ensure that's what we're
7165      being passed.  */
7166   gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7167 
7168   /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7169      must call that routine before comparing TYPE_ALIGNs.  */
7170   layout_type (type);
7171 
7172   in.hash = hashcode;
7173   in.type = type;
7174 
7175   loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7176   if (*loc)
7177     {
7178       tree t1 = ((type_hash *) *loc)->type;
7179       gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7180 		  && t1 != type);
7181       if (TYPE_UID (type) + 1 == next_type_uid)
7182 	--next_type_uid;
7183       /* Free also min/max values and the cache for integer
7184 	 types.  This can't be done in free_node, as LTO frees
7185 	 those on its own.  */
7186       if (TREE_CODE (type) == INTEGER_TYPE)
7187 	{
7188 	  if (TYPE_MIN_VALUE (type)
7189 	      && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7190 	    {
7191 	      /* Zero is always in TYPE_CACHED_VALUES.  */
7192 	      if (! TYPE_UNSIGNED (type))
7193 		int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7194 	      ggc_free (TYPE_MIN_VALUE (type));
7195 	    }
7196 	  if (TYPE_MAX_VALUE (type)
7197 	      && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7198 	    {
7199 	      int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7200 	      ggc_free (TYPE_MAX_VALUE (type));
7201 	    }
7202 	  if (TYPE_CACHED_VALUES_P (type))
7203 	    ggc_free (TYPE_CACHED_VALUES (type));
7204 	}
7205       free_node (type);
7206       return t1;
7207     }
7208   else
7209     {
7210       struct type_hash *h;
7211 
7212       h = ggc_alloc<type_hash> ();
7213       h->hash = hashcode;
7214       h->type = type;
7215       *loc = h;
7216 
7217       return type;
7218     }
7219 }
7220 
7221 static void
print_type_hash_statistics(void)7222 print_type_hash_statistics (void)
7223 {
7224   fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7225 	   (long) type_hash_table->size (),
7226 	   (long) type_hash_table->elements (),
7227 	   type_hash_table->collisions ());
7228 }
7229 
7230 /* Given two lists of types
7231    (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7232    return 1 if the lists contain the same types in the same order.
7233    Also, the TREE_PURPOSEs must match.  */
7234 
7235 bool
type_list_equal(const_tree l1,const_tree l2)7236 type_list_equal (const_tree l1, const_tree l2)
7237 {
7238   const_tree t1, t2;
7239 
7240   for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7241     if (TREE_VALUE (t1) != TREE_VALUE (t2)
7242 	|| (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7243 	    && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7244 		  && (TREE_TYPE (TREE_PURPOSE (t1))
7245 		      == TREE_TYPE (TREE_PURPOSE (t2))))))
7246       return false;
7247 
7248   return t1 == t2;
7249 }
7250 
7251 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7252    given by TYPE.  If the argument list accepts variable arguments,
7253    then this function counts only the ordinary arguments.  */
7254 
7255 int
type_num_arguments(const_tree fntype)7256 type_num_arguments (const_tree fntype)
7257 {
7258   int i = 0;
7259 
7260   for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7261     /* If the function does not take a variable number of arguments,
7262        the last element in the list will have type `void'.  */
7263     if (VOID_TYPE_P (TREE_VALUE (t)))
7264       break;
7265     else
7266       ++i;
7267 
7268   return i;
7269 }
7270 
7271 /* Return the type of the function TYPE's argument ARGNO if known.
7272    For vararg function's where ARGNO refers to one of the variadic
7273    arguments return null.  Otherwise, return a void_type_node for
7274    out-of-bounds ARGNO.  */
7275 
7276 tree
type_argument_type(const_tree fntype,unsigned argno)7277 type_argument_type (const_tree fntype, unsigned argno)
7278 {
7279   /* Treat zero the same as an out-of-bounds argument number.  */
7280   if (!argno)
7281     return void_type_node;
7282 
7283   function_args_iterator iter;
7284 
7285   tree argtype;
7286   unsigned i = 1;
7287   FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7288     {
7289       /* A vararg function's argument list ends in a null.  Otherwise,
7290 	 an ordinary function's argument list ends with void.  Return
7291 	 null if ARGNO refers to a vararg argument, void_type_node if
7292 	 it's out of bounds, and the formal argument type otherwise.  */
7293       if (!argtype)
7294 	break;
7295 
7296       if (i == argno || VOID_TYPE_P (argtype))
7297 	return argtype;
7298 
7299       ++i;
7300     }
7301 
7302   return NULL_TREE;
7303 }
7304 
7305 /* Nonzero if integer constants T1 and T2
7306    represent the same constant value.  */
7307 
7308 int
tree_int_cst_equal(const_tree t1,const_tree t2)7309 tree_int_cst_equal (const_tree t1, const_tree t2)
7310 {
7311   if (t1 == t2)
7312     return 1;
7313 
7314   if (t1 == 0 || t2 == 0)
7315     return 0;
7316 
7317   STRIP_ANY_LOCATION_WRAPPER (t1);
7318   STRIP_ANY_LOCATION_WRAPPER (t2);
7319 
7320   if (TREE_CODE (t1) == INTEGER_CST
7321       && TREE_CODE (t2) == INTEGER_CST
7322       && wi::to_widest (t1) == wi::to_widest (t2))
7323     return 1;
7324 
7325   return 0;
7326 }
7327 
7328 /* Return true if T is an INTEGER_CST whose numerical value (extended
7329    according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  */
7330 
7331 bool
tree_fits_shwi_p(const_tree t)7332 tree_fits_shwi_p (const_tree t)
7333 {
7334   return (t != NULL_TREE
7335 	  && TREE_CODE (t) == INTEGER_CST
7336 	  && wi::fits_shwi_p (wi::to_widest (t)));
7337 }
7338 
7339 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7340    value (extended according to TYPE_UNSIGNED) fits in a poly_int64.  */
7341 
7342 bool
tree_fits_poly_int64_p(const_tree t)7343 tree_fits_poly_int64_p (const_tree t)
7344 {
7345   if (t == NULL_TREE)
7346     return false;
7347   if (POLY_INT_CST_P (t))
7348     {
7349       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7350 	if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7351 	  return false;
7352       return true;
7353     }
7354   return (TREE_CODE (t) == INTEGER_CST
7355 	  && wi::fits_shwi_p (wi::to_widest (t)));
7356 }
7357 
7358 /* Return true if T is an INTEGER_CST whose numerical value (extended
7359    according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  */
7360 
7361 bool
tree_fits_uhwi_p(const_tree t)7362 tree_fits_uhwi_p (const_tree t)
7363 {
7364   return (t != NULL_TREE
7365 	  && TREE_CODE (t) == INTEGER_CST
7366 	  && wi::fits_uhwi_p (wi::to_widest (t)));
7367 }
7368 
7369 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7370    value (extended according to TYPE_UNSIGNED) fits in a poly_uint64.  */
7371 
7372 bool
tree_fits_poly_uint64_p(const_tree t)7373 tree_fits_poly_uint64_p (const_tree t)
7374 {
7375   if (t == NULL_TREE)
7376     return false;
7377   if (POLY_INT_CST_P (t))
7378     {
7379       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7380 	if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7381 	  return false;
7382       return true;
7383     }
7384   return (TREE_CODE (t) == INTEGER_CST
7385 	  && wi::fits_uhwi_p (wi::to_widest (t)));
7386 }
7387 
7388 /* T is an INTEGER_CST whose numerical value (extended according to
7389    TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT.  Return that
7390    HOST_WIDE_INT.  */
7391 
7392 HOST_WIDE_INT
tree_to_shwi(const_tree t)7393 tree_to_shwi (const_tree t)
7394 {
7395   gcc_assert (tree_fits_shwi_p (t));
7396   return TREE_INT_CST_LOW (t);
7397 }
7398 
7399 /* T is an INTEGER_CST whose numerical value (extended according to
7400    TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT.  Return that
7401    HOST_WIDE_INT.  */
7402 
7403 unsigned HOST_WIDE_INT
tree_to_uhwi(const_tree t)7404 tree_to_uhwi (const_tree t)
7405 {
7406   gcc_assert (tree_fits_uhwi_p (t));
7407   return TREE_INT_CST_LOW (t);
7408 }
7409 
7410 /* Return the most significant (sign) bit of T.  */
7411 
7412 int
tree_int_cst_sign_bit(const_tree t)7413 tree_int_cst_sign_bit (const_tree t)
7414 {
7415   unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7416 
7417   return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7418 }
7419 
7420 /* Return an indication of the sign of the integer constant T.
7421    The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7422    Note that -1 will never be returned if T's type is unsigned.  */
7423 
7424 int
tree_int_cst_sgn(const_tree t)7425 tree_int_cst_sgn (const_tree t)
7426 {
7427   if (wi::to_wide (t) == 0)
7428     return 0;
7429   else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7430     return 1;
7431   else if (wi::neg_p (wi::to_wide (t)))
7432     return -1;
7433   else
7434     return 1;
7435 }
7436 
7437 /* Return the minimum number of bits needed to represent VALUE in a
7438    signed or unsigned type, UNSIGNEDP says which.  */
7439 
7440 unsigned int
tree_int_cst_min_precision(tree value,signop sgn)7441 tree_int_cst_min_precision (tree value, signop sgn)
7442 {
7443   /* If the value is negative, compute its negative minus 1.  The latter
7444      adjustment is because the absolute value of the largest negative value
7445      is one larger than the largest positive value.  This is equivalent to
7446      a bit-wise negation, so use that operation instead.  */
7447 
7448   if (tree_int_cst_sgn (value) < 0)
7449     value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7450 
7451   /* Return the number of bits needed, taking into account the fact
7452      that we need one more bit for a signed than unsigned type.
7453      If value is 0 or -1, the minimum precision is 1 no matter
7454      whether unsignedp is true or false.  */
7455 
7456   if (integer_zerop (value))
7457     return 1;
7458   else
7459     return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7460 }
7461 
7462 /* Return truthvalue of whether T1 is the same tree structure as T2.
7463    Return 1 if they are the same.
7464    Return 0 if they are understandably different.
7465    Return -1 if either contains tree structure not understood by
7466    this function.  */
7467 
7468 int
simple_cst_equal(const_tree t1,const_tree t2)7469 simple_cst_equal (const_tree t1, const_tree t2)
7470 {
7471   enum tree_code code1, code2;
7472   int cmp;
7473   int i;
7474 
7475   if (t1 == t2)
7476     return 1;
7477   if (t1 == 0 || t2 == 0)
7478     return 0;
7479 
7480   /* For location wrappers to be the same, they must be at the same
7481      source location (and wrap the same thing).  */
7482   if (location_wrapper_p (t1) && location_wrapper_p (t2))
7483     {
7484       if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7485 	return 0;
7486       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7487     }
7488 
7489   code1 = TREE_CODE (t1);
7490   code2 = TREE_CODE (t2);
7491 
7492   if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7493     {
7494       if (CONVERT_EXPR_CODE_P (code2)
7495 	  || code2 == NON_LVALUE_EXPR)
7496 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7497       else
7498 	return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7499     }
7500 
7501   else if (CONVERT_EXPR_CODE_P (code2)
7502 	   || code2 == NON_LVALUE_EXPR)
7503     return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7504 
7505   if (code1 != code2)
7506     return 0;
7507 
7508   switch (code1)
7509     {
7510     case INTEGER_CST:
7511       return wi::to_widest (t1) == wi::to_widest (t2);
7512 
7513     case REAL_CST:
7514       return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7515 
7516     case FIXED_CST:
7517       return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7518 
7519     case STRING_CST:
7520       return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7521 	      && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7522 			 TREE_STRING_LENGTH (t1)));
7523 
7524     case CONSTRUCTOR:
7525       {
7526 	unsigned HOST_WIDE_INT idx;
7527 	vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7528 	vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7529 
7530 	if (vec_safe_length (v1) != vec_safe_length (v2))
7531 	  return false;
7532 
7533         for (idx = 0; idx < vec_safe_length (v1); ++idx)
7534 	  /* ??? Should we handle also fields here? */
7535 	  if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7536 	    return false;
7537 	return true;
7538       }
7539 
7540     case SAVE_EXPR:
7541       return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7542 
7543     case CALL_EXPR:
7544       cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7545       if (cmp <= 0)
7546 	return cmp;
7547       if (call_expr_nargs (t1) != call_expr_nargs (t2))
7548 	return 0;
7549       {
7550 	const_tree arg1, arg2;
7551 	const_call_expr_arg_iterator iter1, iter2;
7552 	for (arg1 = first_const_call_expr_arg (t1, &iter1),
7553 	       arg2 = first_const_call_expr_arg (t2, &iter2);
7554 	     arg1 && arg2;
7555 	     arg1 = next_const_call_expr_arg (&iter1),
7556 	       arg2 = next_const_call_expr_arg (&iter2))
7557 	  {
7558 	    cmp = simple_cst_equal (arg1, arg2);
7559 	    if (cmp <= 0)
7560 	      return cmp;
7561 	  }
7562 	return arg1 == arg2;
7563       }
7564 
7565     case TARGET_EXPR:
7566       /* Special case: if either target is an unallocated VAR_DECL,
7567 	 it means that it's going to be unified with whatever the
7568 	 TARGET_EXPR is really supposed to initialize, so treat it
7569 	 as being equivalent to anything.  */
7570       if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7571 	   && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7572 	   && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7573 	  || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7574 	      && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7575 	      && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7576 	cmp = 1;
7577       else
7578 	cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7579 
7580       if (cmp <= 0)
7581 	return cmp;
7582 
7583       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7584 
7585     case WITH_CLEANUP_EXPR:
7586       cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7587       if (cmp <= 0)
7588 	return cmp;
7589 
7590       return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7591 
7592     case COMPONENT_REF:
7593       if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7594 	return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7595 
7596       return 0;
7597 
7598     case VAR_DECL:
7599     case PARM_DECL:
7600     case CONST_DECL:
7601     case FUNCTION_DECL:
7602       return 0;
7603 
7604     default:
7605       if (POLY_INT_CST_P (t1))
7606 	/* A false return means maybe_ne rather than known_ne.  */
7607 	return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7608 						TYPE_SIGN (TREE_TYPE (t1))),
7609 			 poly_widest_int::from (poly_int_cst_value (t2),
7610 						TYPE_SIGN (TREE_TYPE (t2))));
7611       break;
7612     }
7613 
7614   /* This general rule works for most tree codes.  All exceptions should be
7615      handled above.  If this is a language-specific tree code, we can't
7616      trust what might be in the operand, so say we don't know
7617      the situation.  */
7618   if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7619     return -1;
7620 
7621   switch (TREE_CODE_CLASS (code1))
7622     {
7623     case tcc_unary:
7624     case tcc_binary:
7625     case tcc_comparison:
7626     case tcc_expression:
7627     case tcc_reference:
7628     case tcc_statement:
7629       cmp = 1;
7630       for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7631 	{
7632 	  cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7633 	  if (cmp <= 0)
7634 	    return cmp;
7635 	}
7636 
7637       return cmp;
7638 
7639     default:
7640       return -1;
7641     }
7642 }
7643 
7644 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7645    Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7646    than U, respectively.  */
7647 
7648 int
compare_tree_int(const_tree t,unsigned HOST_WIDE_INT u)7649 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7650 {
7651   if (tree_int_cst_sgn (t) < 0)
7652     return -1;
7653   else if (!tree_fits_uhwi_p (t))
7654     return 1;
7655   else if (TREE_INT_CST_LOW (t) == u)
7656     return 0;
7657   else if (TREE_INT_CST_LOW (t) < u)
7658     return -1;
7659   else
7660     return 1;
7661 }
7662 
7663 /* Return true if SIZE represents a constant size that is in bounds of
7664    what the middle-end and the backend accepts (covering not more than
7665    half of the address-space).
7666    When PERR is non-null, set *PERR on failure to the description of
7667    why SIZE is not valid.  */
7668 
7669 bool
valid_constant_size_p(const_tree size,cst_size_error * perr)7670 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7671 {
7672   if (POLY_INT_CST_P (size))
7673     {
7674       if (TREE_OVERFLOW (size))
7675 	return false;
7676       for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7677 	if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7678 	  return false;
7679       return true;
7680     }
7681 
7682   cst_size_error error;
7683   if (!perr)
7684     perr = &error;
7685 
7686   if (TREE_CODE (size) != INTEGER_CST)
7687     {
7688       *perr = cst_size_not_constant;
7689       return false;
7690     }
7691 
7692   if (TREE_OVERFLOW_P (size))
7693     {
7694       *perr = cst_size_overflow;
7695       return false;
7696     }
7697 
7698   if (tree_int_cst_sgn (size) < 0)
7699     {
7700       *perr = cst_size_negative;
7701       return false;
7702     }
7703   if (!tree_fits_uhwi_p (size)
7704       || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7705 	  < wi::to_widest (size) * 2))
7706     {
7707       *perr = cst_size_too_big;
7708       return false;
7709     }
7710 
7711   return true;
7712 }
7713 
7714 /* Return the precision of the type, or for a complex or vector type the
7715    precision of the type of its elements.  */
7716 
7717 unsigned int
element_precision(const_tree type)7718 element_precision (const_tree type)
7719 {
7720   if (!TYPE_P (type))
7721     type = TREE_TYPE (type);
7722   enum tree_code code = TREE_CODE (type);
7723   if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7724     type = TREE_TYPE (type);
7725 
7726   return TYPE_PRECISION (type);
7727 }
7728 
7729 /* Return true if CODE represents an associative tree code.  Otherwise
7730    return false.  */
7731 bool
associative_tree_code(enum tree_code code)7732 associative_tree_code (enum tree_code code)
7733 {
7734   switch (code)
7735     {
7736     case BIT_IOR_EXPR:
7737     case BIT_AND_EXPR:
7738     case BIT_XOR_EXPR:
7739     case PLUS_EXPR:
7740     case MULT_EXPR:
7741     case MIN_EXPR:
7742     case MAX_EXPR:
7743       return true;
7744 
7745     default:
7746       break;
7747     }
7748   return false;
7749 }
7750 
7751 /* Return true if CODE represents a commutative tree code.  Otherwise
7752    return false.  */
7753 bool
commutative_tree_code(enum tree_code code)7754 commutative_tree_code (enum tree_code code)
7755 {
7756   switch (code)
7757     {
7758     case PLUS_EXPR:
7759     case MULT_EXPR:
7760     case MULT_HIGHPART_EXPR:
7761     case MIN_EXPR:
7762     case MAX_EXPR:
7763     case BIT_IOR_EXPR:
7764     case BIT_XOR_EXPR:
7765     case BIT_AND_EXPR:
7766     case NE_EXPR:
7767     case EQ_EXPR:
7768     case UNORDERED_EXPR:
7769     case ORDERED_EXPR:
7770     case UNEQ_EXPR:
7771     case LTGT_EXPR:
7772     case TRUTH_AND_EXPR:
7773     case TRUTH_XOR_EXPR:
7774     case TRUTH_OR_EXPR:
7775     case WIDEN_MULT_EXPR:
7776     case VEC_WIDEN_MULT_HI_EXPR:
7777     case VEC_WIDEN_MULT_LO_EXPR:
7778     case VEC_WIDEN_MULT_EVEN_EXPR:
7779     case VEC_WIDEN_MULT_ODD_EXPR:
7780       return true;
7781 
7782     default:
7783       break;
7784     }
7785   return false;
7786 }
7787 
7788 /* Return true if CODE represents a ternary tree code for which the
7789    first two operands are commutative.  Otherwise return false.  */
7790 bool
commutative_ternary_tree_code(enum tree_code code)7791 commutative_ternary_tree_code (enum tree_code code)
7792 {
7793   switch (code)
7794     {
7795     case WIDEN_MULT_PLUS_EXPR:
7796     case WIDEN_MULT_MINUS_EXPR:
7797     case DOT_PROD_EXPR:
7798       return true;
7799 
7800     default:
7801       break;
7802     }
7803   return false;
7804 }
7805 
7806 /* Returns true if CODE can overflow.  */
7807 
7808 bool
operation_can_overflow(enum tree_code code)7809 operation_can_overflow (enum tree_code code)
7810 {
7811   switch (code)
7812     {
7813     case PLUS_EXPR:
7814     case MINUS_EXPR:
7815     case MULT_EXPR:
7816     case LSHIFT_EXPR:
7817       /* Can overflow in various ways.  */
7818       return true;
7819     case TRUNC_DIV_EXPR:
7820     case EXACT_DIV_EXPR:
7821     case FLOOR_DIV_EXPR:
7822     case CEIL_DIV_EXPR:
7823       /* For INT_MIN / -1.  */
7824       return true;
7825     case NEGATE_EXPR:
7826     case ABS_EXPR:
7827       /* For -INT_MIN.  */
7828       return true;
7829     default:
7830       /* These operators cannot overflow.  */
7831       return false;
7832     }
7833 }
7834 
7835 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7836    ftrapv doesn't generate trapping insns for CODE.  */
7837 
7838 bool
operation_no_trapping_overflow(tree type,enum tree_code code)7839 operation_no_trapping_overflow (tree type, enum tree_code code)
7840 {
7841   gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7842 
7843   /* We don't generate instructions that trap on overflow for complex or vector
7844      types.  */
7845   if (!INTEGRAL_TYPE_P (type))
7846     return true;
7847 
7848   if (!TYPE_OVERFLOW_TRAPS (type))
7849     return true;
7850 
7851   switch (code)
7852     {
7853     case PLUS_EXPR:
7854     case MINUS_EXPR:
7855     case MULT_EXPR:
7856     case NEGATE_EXPR:
7857     case ABS_EXPR:
7858       /* These operators can overflow, and -ftrapv generates trapping code for
7859 	 these.  */
7860       return false;
7861     case TRUNC_DIV_EXPR:
7862     case EXACT_DIV_EXPR:
7863     case FLOOR_DIV_EXPR:
7864     case CEIL_DIV_EXPR:
7865     case LSHIFT_EXPR:
7866       /* These operators can overflow, but -ftrapv does not generate trapping
7867 	 code for these.  */
7868       return true;
7869     default:
7870       /* These operators cannot overflow.  */
7871       return true;
7872     }
7873 }
7874 
7875 /* Constructors for pointer, array and function types.
7876    (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7877    constructed by language-dependent code, not here.)  */
7878 
7879 /* Construct, lay out and return the type of pointers to TO_TYPE with
7880    mode MODE.  If CAN_ALIAS_ALL is TRUE, indicate this type can
7881    reference all of memory. If such a type has already been
7882    constructed, reuse it.  */
7883 
7884 tree
build_pointer_type_for_mode(tree to_type,machine_mode mode,bool can_alias_all)7885 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7886 			     bool can_alias_all)
7887 {
7888   tree t;
7889   bool could_alias = can_alias_all;
7890 
7891   if (to_type == error_mark_node)
7892     return error_mark_node;
7893 
7894   /* If the pointed-to type has the may_alias attribute set, force
7895      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
7896   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7897     can_alias_all = true;
7898 
7899   /* In some cases, languages will have things that aren't a POINTER_TYPE
7900      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7901      In that case, return that type without regard to the rest of our
7902      operands.
7903 
7904      ??? This is a kludge, but consistent with the way this function has
7905      always operated and there doesn't seem to be a good way to avoid this
7906      at the moment.  */
7907   if (TYPE_POINTER_TO (to_type) != 0
7908       && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7909     return TYPE_POINTER_TO (to_type);
7910 
7911   /* First, if we already have a type for pointers to TO_TYPE and it's
7912      the proper mode, use it.  */
7913   for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7914     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7915       return t;
7916 
7917   t = make_node (POINTER_TYPE);
7918 
7919   TREE_TYPE (t) = to_type;
7920   SET_TYPE_MODE (t, mode);
7921   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7922   TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7923   TYPE_POINTER_TO (to_type) = t;
7924 
7925   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
7926   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7927     SET_TYPE_STRUCTURAL_EQUALITY (t);
7928   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7929     TYPE_CANONICAL (t)
7930       = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7931 				     mode, false);
7932 
7933   /* Lay out the type.  This function has many callers that are concerned
7934      with expression-construction, and this simplifies them all.  */
7935   layout_type (t);
7936 
7937   return t;
7938 }
7939 
7940 /* By default build pointers in ptr_mode.  */
7941 
7942 tree
build_pointer_type(tree to_type)7943 build_pointer_type (tree to_type)
7944 {
7945   addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7946 					      : TYPE_ADDR_SPACE (to_type);
7947   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7948   return build_pointer_type_for_mode (to_type, pointer_mode, false);
7949 }
7950 
7951 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE.  */
7952 
7953 tree
build_reference_type_for_mode(tree to_type,machine_mode mode,bool can_alias_all)7954 build_reference_type_for_mode (tree to_type, machine_mode mode,
7955 			       bool can_alias_all)
7956 {
7957   tree t;
7958   bool could_alias = can_alias_all;
7959 
7960   if (to_type == error_mark_node)
7961     return error_mark_node;
7962 
7963   /* If the pointed-to type has the may_alias attribute set, force
7964      a TYPE_REF_CAN_ALIAS_ALL pointer to be generated.  */
7965   if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7966     can_alias_all = true;
7967 
7968   /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7969      (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7970      In that case, return that type without regard to the rest of our
7971      operands.
7972 
7973      ??? This is a kludge, but consistent with the way this function has
7974      always operated and there doesn't seem to be a good way to avoid this
7975      at the moment.  */
7976   if (TYPE_REFERENCE_TO (to_type) != 0
7977       && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7978     return TYPE_REFERENCE_TO (to_type);
7979 
7980   /* First, if we already have a type for pointers to TO_TYPE and it's
7981      the proper mode, use it.  */
7982   for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7983     if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7984       return t;
7985 
7986   t = make_node (REFERENCE_TYPE);
7987 
7988   TREE_TYPE (t) = to_type;
7989   SET_TYPE_MODE (t, mode);
7990   TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7991   TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7992   TYPE_REFERENCE_TO (to_type) = t;
7993 
7994   /* During LTO we do not set TYPE_CANONICAL of pointers and references.  */
7995   if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7996     SET_TYPE_STRUCTURAL_EQUALITY (t);
7997   else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7998     TYPE_CANONICAL (t)
7999       = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8000 				       mode, false);
8001 
8002   layout_type (t);
8003 
8004   return t;
8005 }
8006 
8007 
8008 /* Build the node for the type of references-to-TO_TYPE by default
8009    in ptr_mode.  */
8010 
8011 tree
build_reference_type(tree to_type)8012 build_reference_type (tree to_type)
8013 {
8014   addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8015 					      : TYPE_ADDR_SPACE (to_type);
8016   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8017   return build_reference_type_for_mode (to_type, pointer_mode, false);
8018 }
8019 
8020 #define MAX_INT_CACHED_PREC \
8021   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8022 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8023 
8024 /* Builds a signed or unsigned integer type of precision PRECISION.
8025    Used for C bitfields whose precision does not match that of
8026    built-in target types.  */
8027 tree
build_nonstandard_integer_type(unsigned HOST_WIDE_INT precision,int unsignedp)8028 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8029 				int unsignedp)
8030 {
8031   tree itype, ret;
8032 
8033   if (unsignedp)
8034     unsignedp = MAX_INT_CACHED_PREC + 1;
8035 
8036   if (precision <= MAX_INT_CACHED_PREC)
8037     {
8038       itype = nonstandard_integer_type_cache[precision + unsignedp];
8039       if (itype)
8040 	return itype;
8041     }
8042 
8043   itype = make_node (INTEGER_TYPE);
8044   TYPE_PRECISION (itype) = precision;
8045 
8046   if (unsignedp)
8047     fixup_unsigned_type (itype);
8048   else
8049     fixup_signed_type (itype);
8050 
8051   inchash::hash hstate;
8052   inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8053   ret = type_hash_canon (hstate.end (), itype);
8054   if (precision <= MAX_INT_CACHED_PREC)
8055     nonstandard_integer_type_cache[precision + unsignedp] = ret;
8056 
8057   return ret;
8058 }
8059 
8060 #define MAX_BOOL_CACHED_PREC \
8061   (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8062 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8063 
8064 /* Builds a boolean type of precision PRECISION.
8065    Used for boolean vectors to choose proper vector element size.  */
8066 tree
build_nonstandard_boolean_type(unsigned HOST_WIDE_INT precision)8067 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8068 {
8069   tree type;
8070 
8071   if (precision <= MAX_BOOL_CACHED_PREC)
8072     {
8073       type = nonstandard_boolean_type_cache[precision];
8074       if (type)
8075 	return type;
8076     }
8077 
8078   type = make_node (BOOLEAN_TYPE);
8079   TYPE_PRECISION (type) = precision;
8080   fixup_signed_type (type);
8081 
8082   if (precision <= MAX_INT_CACHED_PREC)
8083     nonstandard_boolean_type_cache[precision] = type;
8084 
8085   return type;
8086 }
8087 
8088 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8089    or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL.  If SHARED
8090    is true, reuse such a type that has already been constructed.  */
8091 
8092 static tree
build_range_type_1(tree type,tree lowval,tree highval,bool shared)8093 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8094 {
8095   tree itype = make_node (INTEGER_TYPE);
8096 
8097   TREE_TYPE (itype) = type;
8098 
8099   TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8100   TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8101 
8102   TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8103   SET_TYPE_MODE (itype, TYPE_MODE (type));
8104   TYPE_SIZE (itype) = TYPE_SIZE (type);
8105   TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8106   SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8107   TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8108   SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8109 
8110   if (!shared)
8111     return itype;
8112 
8113   if ((TYPE_MIN_VALUE (itype)
8114        && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8115       || (TYPE_MAX_VALUE (itype)
8116 	  && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8117     {
8118       /* Since we cannot reliably merge this type, we need to compare it using
8119 	 structural equality checks.  */
8120       SET_TYPE_STRUCTURAL_EQUALITY (itype);
8121       return itype;
8122     }
8123 
8124   hashval_t hash = type_hash_canon_hash (itype);
8125   itype = type_hash_canon (hash, itype);
8126 
8127   return itype;
8128 }
8129 
8130 /* Wrapper around build_range_type_1 with SHARED set to true.  */
8131 
8132 tree
build_range_type(tree type,tree lowval,tree highval)8133 build_range_type (tree type, tree lowval, tree highval)
8134 {
8135   return build_range_type_1 (type, lowval, highval, true);
8136 }
8137 
8138 /* Wrapper around build_range_type_1 with SHARED set to false.  */
8139 
8140 tree
build_nonshared_range_type(tree type,tree lowval,tree highval)8141 build_nonshared_range_type (tree type, tree lowval, tree highval)
8142 {
8143   return build_range_type_1 (type, lowval, highval, false);
8144 }
8145 
8146 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8147    MAXVAL should be the maximum value in the domain
8148    (one less than the length of the array).
8149 
8150    The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8151    We don't enforce this limit, that is up to caller (e.g. language front end).
8152    The limit exists because the result is a signed type and we don't handle
8153    sizes that use more than one HOST_WIDE_INT.  */
8154 
8155 tree
build_index_type(tree maxval)8156 build_index_type (tree maxval)
8157 {
8158   return build_range_type (sizetype, size_zero_node, maxval);
8159 }
8160 
8161 /* Return true if the debug information for TYPE, a subtype, should be emitted
8162    as a subrange type.  If so, set LOWVAL to the low bound and HIGHVAL to the
8163    high bound, respectively.  Sometimes doing so unnecessarily obfuscates the
8164    debug info and doesn't reflect the source code.  */
8165 
8166 bool
subrange_type_for_debug_p(const_tree type,tree * lowval,tree * highval)8167 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8168 {
8169   tree base_type = TREE_TYPE (type), low, high;
8170 
8171   /* Subrange types have a base type which is an integral type.  */
8172   if (!INTEGRAL_TYPE_P (base_type))
8173     return false;
8174 
8175   /* Get the real bounds of the subtype.  */
8176   if (lang_hooks.types.get_subrange_bounds)
8177     lang_hooks.types.get_subrange_bounds (type, &low, &high);
8178   else
8179     {
8180       low = TYPE_MIN_VALUE (type);
8181       high = TYPE_MAX_VALUE (type);
8182     }
8183 
8184   /* If the type and its base type have the same representation and the same
8185      name, then the type is not a subrange but a copy of the base type.  */
8186   if ((TREE_CODE (base_type) == INTEGER_TYPE
8187        || TREE_CODE (base_type) == BOOLEAN_TYPE)
8188       && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8189       && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8190       && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8191       && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8192     return false;
8193 
8194   if (lowval)
8195     *lowval = low;
8196   if (highval)
8197     *highval = high;
8198   return true;
8199 }
8200 
8201 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8202    and number of elements specified by the range of values of INDEX_TYPE.
8203    If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8204    If SHARED is true, reuse such a type that has already been constructed.
8205    If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type.  */
8206 
8207 static tree
build_array_type_1(tree elt_type,tree index_type,bool typeless_storage,bool shared,bool set_canonical)8208 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8209 		    bool shared, bool set_canonical)
8210 {
8211   tree t;
8212 
8213   if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8214     {
8215       error ("arrays of functions are not meaningful");
8216       elt_type = integer_type_node;
8217     }
8218 
8219   t = make_node (ARRAY_TYPE);
8220   TREE_TYPE (t) = elt_type;
8221   TYPE_DOMAIN (t) = index_type;
8222   TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8223   TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8224   layout_type (t);
8225 
8226   if (shared)
8227     {
8228       hashval_t hash = type_hash_canon_hash (t);
8229       t = type_hash_canon (hash, t);
8230     }
8231 
8232   if (TYPE_CANONICAL (t) == t && set_canonical)
8233     {
8234       if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8235 	  || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8236 	  || in_lto_p)
8237 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8238       else if (TYPE_CANONICAL (elt_type) != elt_type
8239 	       || (index_type && TYPE_CANONICAL (index_type) != index_type))
8240 	TYPE_CANONICAL (t)
8241 	  = build_array_type_1 (TYPE_CANONICAL (elt_type),
8242 				index_type
8243 				? TYPE_CANONICAL (index_type) : NULL_TREE,
8244 				typeless_storage, shared, set_canonical);
8245     }
8246 
8247   return t;
8248 }
8249 
8250 /* Wrapper around build_array_type_1 with SHARED set to true.  */
8251 
8252 tree
build_array_type(tree elt_type,tree index_type,bool typeless_storage)8253 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8254 {
8255   return
8256     build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
8257 }
8258 
8259 /* Wrapper around build_array_type_1 with SHARED set to false.  */
8260 
8261 tree
build_nonshared_array_type(tree elt_type,tree index_type)8262 build_nonshared_array_type (tree elt_type, tree index_type)
8263 {
8264   return build_array_type_1 (elt_type, index_type, false, false, true);
8265 }
8266 
8267 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8268    sizetype.  */
8269 
8270 tree
build_array_type_nelts(tree elt_type,poly_uint64 nelts)8271 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8272 {
8273   return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8274 }
8275 
8276 /* Recursively examines the array elements of TYPE, until a non-array
8277    element type is found.  */
8278 
8279 tree
strip_array_types(tree type)8280 strip_array_types (tree type)
8281 {
8282   while (TREE_CODE (type) == ARRAY_TYPE)
8283     type = TREE_TYPE (type);
8284 
8285   return type;
8286 }
8287 
8288 /* Computes the canonical argument types from the argument type list
8289    ARGTYPES.
8290 
8291    Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8292    on entry to this function, or if any of the ARGTYPES are
8293    structural.
8294 
8295    Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8296    true on entry to this function, or if any of the ARGTYPES are
8297    non-canonical.
8298 
8299    Returns a canonical argument list, which may be ARGTYPES when the
8300    canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8301    true) or would not differ from ARGTYPES.  */
8302 
8303 static tree
maybe_canonicalize_argtypes(tree argtypes,bool * any_structural_p,bool * any_noncanonical_p)8304 maybe_canonicalize_argtypes (tree argtypes,
8305 			     bool *any_structural_p,
8306 			     bool *any_noncanonical_p)
8307 {
8308   tree arg;
8309   bool any_noncanonical_argtypes_p = false;
8310 
8311   for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8312     {
8313       if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8314 	/* Fail gracefully by stating that the type is structural.  */
8315 	*any_structural_p = true;
8316       else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8317 	*any_structural_p = true;
8318       else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8319 	       || TREE_PURPOSE (arg))
8320 	/* If the argument has a default argument, we consider it
8321 	   non-canonical even though the type itself is canonical.
8322 	   That way, different variants of function and method types
8323 	   with default arguments will all point to the variant with
8324 	   no defaults as their canonical type.  */
8325         any_noncanonical_argtypes_p = true;
8326     }
8327 
8328   if (*any_structural_p)
8329     return argtypes;
8330 
8331   if (any_noncanonical_argtypes_p)
8332     {
8333       /* Build the canonical list of argument types.  */
8334       tree canon_argtypes = NULL_TREE;
8335       bool is_void = false;
8336 
8337       for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8338         {
8339           if (arg == void_list_node)
8340             is_void = true;
8341           else
8342             canon_argtypes = tree_cons (NULL_TREE,
8343                                         TYPE_CANONICAL (TREE_VALUE (arg)),
8344                                         canon_argtypes);
8345         }
8346 
8347       canon_argtypes = nreverse (canon_argtypes);
8348       if (is_void)
8349         canon_argtypes = chainon (canon_argtypes, void_list_node);
8350 
8351       /* There is a non-canonical type.  */
8352       *any_noncanonical_p = true;
8353       return canon_argtypes;
8354     }
8355 
8356   /* The canonical argument types are the same as ARGTYPES.  */
8357   return argtypes;
8358 }
8359 
8360 /* Construct, lay out and return
8361    the type of functions returning type VALUE_TYPE
8362    given arguments of types ARG_TYPES.
8363    ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8364    are data type nodes for the arguments of the function.
8365    If such a type has already been constructed, reuse it.  */
8366 
8367 tree
build_function_type(tree value_type,tree arg_types)8368 build_function_type (tree value_type, tree arg_types)
8369 {
8370   tree t;
8371   inchash::hash hstate;
8372   bool any_structural_p, any_noncanonical_p;
8373   tree canon_argtypes;
8374 
8375   gcc_assert (arg_types != error_mark_node);
8376 
8377   if (TREE_CODE (value_type) == FUNCTION_TYPE)
8378     {
8379       error ("function return type cannot be function");
8380       value_type = integer_type_node;
8381     }
8382 
8383   /* Make a node of the sort we want.  */
8384   t = make_node (FUNCTION_TYPE);
8385   TREE_TYPE (t) = value_type;
8386   TYPE_ARG_TYPES (t) = arg_types;
8387 
8388   /* If we already have such a type, use the old one.  */
8389   hashval_t hash = type_hash_canon_hash (t);
8390   t = type_hash_canon (hash, t);
8391 
8392   /* Set up the canonical type. */
8393   any_structural_p   = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8394   any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8395   canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8396 						&any_structural_p,
8397 						&any_noncanonical_p);
8398   if (any_structural_p)
8399     SET_TYPE_STRUCTURAL_EQUALITY (t);
8400   else if (any_noncanonical_p)
8401     TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8402 					      canon_argtypes);
8403 
8404   if (!COMPLETE_TYPE_P (t))
8405     layout_type (t);
8406   return t;
8407 }
8408 
8409 /* Build a function type.  The RETURN_TYPE is the type returned by the
8410    function.  If VAARGS is set, no void_type_node is appended to the
8411    list.  ARGP must be always be terminated be a NULL_TREE.  */
8412 
8413 static tree
build_function_type_list_1(bool vaargs,tree return_type,va_list argp)8414 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8415 {
8416   tree t, args, last;
8417 
8418   t = va_arg (argp, tree);
8419   for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8420     args = tree_cons (NULL_TREE, t, args);
8421 
8422   if (vaargs)
8423     {
8424       last = args;
8425       if (args != NULL_TREE)
8426 	args = nreverse (args);
8427       gcc_assert (last != void_list_node);
8428     }
8429   else if (args == NULL_TREE)
8430     args = void_list_node;
8431   else
8432     {
8433       last = args;
8434       args = nreverse (args);
8435       TREE_CHAIN (last) = void_list_node;
8436     }
8437   args = build_function_type (return_type, args);
8438 
8439   return args;
8440 }
8441 
8442 /* Build a function type.  The RETURN_TYPE is the type returned by the
8443    function.  If additional arguments are provided, they are
8444    additional argument types.  The list of argument types must always
8445    be terminated by NULL_TREE.  */
8446 
8447 tree
build_function_type_list(tree return_type,...)8448 build_function_type_list (tree return_type, ...)
8449 {
8450   tree args;
8451   va_list p;
8452 
8453   va_start (p, return_type);
8454   args = build_function_type_list_1 (false, return_type, p);
8455   va_end (p);
8456   return args;
8457 }
8458 
8459 /* Build a variable argument function type.  The RETURN_TYPE is the
8460    type returned by the function.  If additional arguments are provided,
8461    they are additional argument types.  The list of argument types must
8462    always be terminated by NULL_TREE.  */
8463 
8464 tree
build_varargs_function_type_list(tree return_type,...)8465 build_varargs_function_type_list (tree return_type, ...)
8466 {
8467   tree args;
8468   va_list p;
8469 
8470   va_start (p, return_type);
8471   args = build_function_type_list_1 (true, return_type, p);
8472   va_end (p);
8473 
8474   return args;
8475 }
8476 
8477 /* Build a function type.  RETURN_TYPE is the type returned by the
8478    function; VAARGS indicates whether the function takes varargs.  The
8479    function takes N named arguments, the types of which are provided in
8480    ARG_TYPES.  */
8481 
8482 static tree
build_function_type_array_1(bool vaargs,tree return_type,int n,tree * arg_types)8483 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8484 			     tree *arg_types)
8485 {
8486   int i;
8487   tree t = vaargs ? NULL_TREE : void_list_node;
8488 
8489   for (i = n - 1; i >= 0; i--)
8490     t = tree_cons (NULL_TREE, arg_types[i], t);
8491 
8492   return build_function_type (return_type, t);
8493 }
8494 
8495 /* Build a function type.  RETURN_TYPE is the type returned by the
8496    function.  The function takes N named arguments, the types of which
8497    are provided in ARG_TYPES.  */
8498 
8499 tree
build_function_type_array(tree return_type,int n,tree * arg_types)8500 build_function_type_array (tree return_type, int n, tree *arg_types)
8501 {
8502   return build_function_type_array_1 (false, return_type, n, arg_types);
8503 }
8504 
8505 /* Build a variable argument function type.  RETURN_TYPE is the type
8506    returned by the function.  The function takes N named arguments, the
8507    types of which are provided in ARG_TYPES.  */
8508 
8509 tree
build_varargs_function_type_array(tree return_type,int n,tree * arg_types)8510 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8511 {
8512   return build_function_type_array_1 (true, return_type, n, arg_types);
8513 }
8514 
8515 /* Build a METHOD_TYPE for a member of BASETYPE.  The RETTYPE (a TYPE)
8516    and ARGTYPES (a TREE_LIST) are the return type and arguments types
8517    for the method.  An implicit additional parameter (of type
8518    pointer-to-BASETYPE) is added to the ARGTYPES.  */
8519 
8520 tree
build_method_type_directly(tree basetype,tree rettype,tree argtypes)8521 build_method_type_directly (tree basetype,
8522 			    tree rettype,
8523 			    tree argtypes)
8524 {
8525   tree t;
8526   tree ptype;
8527   bool any_structural_p, any_noncanonical_p;
8528   tree canon_argtypes;
8529 
8530   /* Make a node of the sort we want.  */
8531   t = make_node (METHOD_TYPE);
8532 
8533   TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8534   TREE_TYPE (t) = rettype;
8535   ptype = build_pointer_type (basetype);
8536 
8537   /* The actual arglist for this function includes a "hidden" argument
8538      which is "this".  Put it into the list of argument types.  */
8539   argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8540   TYPE_ARG_TYPES (t) = argtypes;
8541 
8542   /* If we already have such a type, use the old one.  */
8543   hashval_t hash = type_hash_canon_hash (t);
8544   t = type_hash_canon (hash, t);
8545 
8546   /* Set up the canonical type. */
8547   any_structural_p
8548     = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8549        || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8550   any_noncanonical_p
8551     = (TYPE_CANONICAL (basetype) != basetype
8552        || TYPE_CANONICAL (rettype) != rettype);
8553   canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8554 						&any_structural_p,
8555 						&any_noncanonical_p);
8556   if (any_structural_p)
8557     SET_TYPE_STRUCTURAL_EQUALITY (t);
8558   else if (any_noncanonical_p)
8559     TYPE_CANONICAL (t)
8560       = build_method_type_directly (TYPE_CANONICAL (basetype),
8561 				    TYPE_CANONICAL (rettype),
8562 				    canon_argtypes);
8563   if (!COMPLETE_TYPE_P (t))
8564     layout_type (t);
8565 
8566   return t;
8567 }
8568 
8569 /* Construct, lay out and return the type of methods belonging to class
8570    BASETYPE and whose arguments and values are described by TYPE.
8571    If that type exists already, reuse it.
8572    TYPE must be a FUNCTION_TYPE node.  */
8573 
8574 tree
build_method_type(tree basetype,tree type)8575 build_method_type (tree basetype, tree type)
8576 {
8577   gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8578 
8579   return build_method_type_directly (basetype,
8580 				     TREE_TYPE (type),
8581 				     TYPE_ARG_TYPES (type));
8582 }
8583 
8584 /* Construct, lay out and return the type of offsets to a value
8585    of type TYPE, within an object of type BASETYPE.
8586    If a suitable offset type exists already, reuse it.  */
8587 
8588 tree
build_offset_type(tree basetype,tree type)8589 build_offset_type (tree basetype, tree type)
8590 {
8591   tree t;
8592 
8593   /* Make a node of the sort we want.  */
8594   t = make_node (OFFSET_TYPE);
8595 
8596   TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8597   TREE_TYPE (t) = type;
8598 
8599   /* If we already have such a type, use the old one.  */
8600   hashval_t hash = type_hash_canon_hash (t);
8601   t = type_hash_canon (hash, t);
8602 
8603   if (!COMPLETE_TYPE_P (t))
8604     layout_type (t);
8605 
8606   if (TYPE_CANONICAL (t) == t)
8607     {
8608       if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8609 	  || TYPE_STRUCTURAL_EQUALITY_P (type))
8610 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8611       else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8612 	       || TYPE_CANONICAL (type) != type)
8613 	TYPE_CANONICAL (t)
8614 	  = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8615 			       TYPE_CANONICAL (type));
8616     }
8617 
8618   return t;
8619 }
8620 
8621 /* Create a complex type whose components are COMPONENT_TYPE.
8622 
8623    If NAMED is true, the type is given a TYPE_NAME.  We do not always
8624    do so because this creates a DECL node and thus make the DECL_UIDs
8625    dependent on the type canonicalization hashtable, which is GC-ed,
8626    so the DECL_UIDs would not be stable wrt garbage collection.  */
8627 
8628 tree
build_complex_type(tree component_type,bool named)8629 build_complex_type (tree component_type, bool named)
8630 {
8631   gcc_assert (INTEGRAL_TYPE_P (component_type)
8632 	      || SCALAR_FLOAT_TYPE_P (component_type)
8633 	      || FIXED_POINT_TYPE_P (component_type));
8634 
8635   /* Make a node of the sort we want.  */
8636   tree probe = make_node (COMPLEX_TYPE);
8637 
8638   TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8639 
8640   /* If we already have such a type, use the old one.  */
8641   hashval_t hash = type_hash_canon_hash (probe);
8642   tree t = type_hash_canon (hash, probe);
8643 
8644   if (t == probe)
8645     {
8646       /* We created a new type.  The hash insertion will have laid
8647 	 out the type.  We need to check the canonicalization and
8648 	 maybe set the name.  */
8649       gcc_checking_assert (COMPLETE_TYPE_P (t)
8650 			   && !TYPE_NAME (t)
8651 			   && TYPE_CANONICAL (t) == t);
8652 
8653       if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8654 	SET_TYPE_STRUCTURAL_EQUALITY (t);
8655       else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8656 	TYPE_CANONICAL (t)
8657 	  = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8658 
8659       /* We need to create a name, since complex is a fundamental type.  */
8660       if (named)
8661 	{
8662 	  const char *name = NULL;
8663 
8664 	  if (TREE_TYPE (t) == char_type_node)
8665 	    name = "complex char";
8666 	  else if (TREE_TYPE (t) == signed_char_type_node)
8667 	    name = "complex signed char";
8668 	  else if (TREE_TYPE (t) == unsigned_char_type_node)
8669 	    name = "complex unsigned char";
8670 	  else if (TREE_TYPE (t) == short_integer_type_node)
8671 	    name = "complex short int";
8672 	  else if (TREE_TYPE (t) == short_unsigned_type_node)
8673 	    name = "complex short unsigned int";
8674 	  else if (TREE_TYPE (t) == integer_type_node)
8675 	    name = "complex int";
8676 	  else if (TREE_TYPE (t) == unsigned_type_node)
8677 	    name = "complex unsigned int";
8678 	  else if (TREE_TYPE (t) == long_integer_type_node)
8679 	    name = "complex long int";
8680 	  else if (TREE_TYPE (t) == long_unsigned_type_node)
8681 	    name = "complex long unsigned int";
8682 	  else if (TREE_TYPE (t) == long_long_integer_type_node)
8683 	    name = "complex long long int";
8684 	  else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8685 	    name = "complex long long unsigned int";
8686 
8687 	  if (name != NULL)
8688 	    TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8689 					get_identifier (name), t);
8690 	}
8691     }
8692 
8693   return build_qualified_type (t, TYPE_QUALS (component_type));
8694 }
8695 
8696 /* If TYPE is a real or complex floating-point type and the target
8697    does not directly support arithmetic on TYPE then return the wider
8698    type to be used for arithmetic on TYPE.  Otherwise, return
8699    NULL_TREE.  */
8700 
8701 tree
excess_precision_type(tree type)8702 excess_precision_type (tree type)
8703 {
8704   /* The target can give two different responses to the question of
8705      which excess precision mode it would like depending on whether we
8706      are in -fexcess-precision=standard or -fexcess-precision=fast.  */
8707 
8708   enum excess_precision_type requested_type
8709     = (flag_excess_precision == EXCESS_PRECISION_FAST
8710        ? EXCESS_PRECISION_TYPE_FAST
8711        : EXCESS_PRECISION_TYPE_STANDARD);
8712 
8713   enum flt_eval_method target_flt_eval_method
8714     = targetm.c.excess_precision (requested_type);
8715 
8716   /* The target should not ask for unpredictable float evaluation (though
8717      it might advertise that implicitly the evaluation is unpredictable,
8718      but we don't care about that here, it will have been reported
8719      elsewhere).  If it does ask for unpredictable evaluation, we have
8720      nothing to do here.  */
8721   gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8722 
8723   /* Nothing to do.  The target has asked for all types we know about
8724      to be computed with their native precision and range.  */
8725   if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8726     return NULL_TREE;
8727 
8728   /* The target will promote this type in a target-dependent way, so excess
8729      precision ought to leave it alone.  */
8730   if (targetm.promoted_type (type) != NULL_TREE)
8731     return NULL_TREE;
8732 
8733   machine_mode float16_type_mode = (float16_type_node
8734 				    ? TYPE_MODE (float16_type_node)
8735 				    : VOIDmode);
8736   machine_mode float_type_mode = TYPE_MODE (float_type_node);
8737   machine_mode double_type_mode = TYPE_MODE (double_type_node);
8738 
8739   switch (TREE_CODE (type))
8740     {
8741     case REAL_TYPE:
8742       {
8743 	machine_mode type_mode = TYPE_MODE (type);
8744 	switch (target_flt_eval_method)
8745 	  {
8746 	  case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8747 	    if (type_mode == float16_type_mode)
8748 	      return float_type_node;
8749 	    break;
8750 	  case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8751 	    if (type_mode == float16_type_mode
8752 		|| type_mode == float_type_mode)
8753 	      return double_type_node;
8754 	    break;
8755 	  case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8756 	    if (type_mode == float16_type_mode
8757 		|| type_mode == float_type_mode
8758 		|| type_mode == double_type_mode)
8759 	      return long_double_type_node;
8760 	    break;
8761 	  default:
8762 	    gcc_unreachable ();
8763 	  }
8764 	break;
8765       }
8766     case COMPLEX_TYPE:
8767       {
8768 	if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8769 	  return NULL_TREE;
8770 	machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8771 	switch (target_flt_eval_method)
8772 	  {
8773 	  case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8774 	    if (type_mode == float16_type_mode)
8775 	      return complex_float_type_node;
8776 	    break;
8777 	  case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8778 	    if (type_mode == float16_type_mode
8779 		|| type_mode == float_type_mode)
8780 	      return complex_double_type_node;
8781 	    break;
8782 	  case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8783 	    if (type_mode == float16_type_mode
8784 		|| type_mode == float_type_mode
8785 		|| type_mode == double_type_mode)
8786 	      return complex_long_double_type_node;
8787 	    break;
8788 	  default:
8789 	    gcc_unreachable ();
8790 	  }
8791 	break;
8792       }
8793     default:
8794       break;
8795     }
8796 
8797   return NULL_TREE;
8798 }
8799 
8800 /* Return OP, stripped of any conversions to wider types as much as is safe.
8801    Converting the value back to OP's type makes a value equivalent to OP.
8802 
8803    If FOR_TYPE is nonzero, we return a value which, if converted to
8804    type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8805 
8806    OP must have integer, real or enumeral type.  Pointers are not allowed!
8807 
8808    There are some cases where the obvious value we could return
8809    would regenerate to OP if converted to OP's type,
8810    but would not extend like OP to wider types.
8811    If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8812    For example, if OP is (unsigned short)(signed char)-1,
8813    we avoid returning (signed char)-1 if FOR_TYPE is int,
8814    even though extending that to an unsigned short would regenerate OP,
8815    since the result of extending (signed char)-1 to (int)
8816    is different from (int) OP.  */
8817 
8818 tree
get_unwidened(tree op,tree for_type)8819 get_unwidened (tree op, tree for_type)
8820 {
8821   /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension.  */
8822   tree type = TREE_TYPE (op);
8823   unsigned final_prec
8824     = TYPE_PRECISION (for_type != 0 ? for_type : type);
8825   int uns
8826     = (for_type != 0 && for_type != type
8827        && final_prec > TYPE_PRECISION (type)
8828        && TYPE_UNSIGNED (type));
8829   tree win = op;
8830 
8831   while (CONVERT_EXPR_P (op))
8832     {
8833       int bitschange;
8834 
8835       /* TYPE_PRECISION on vector types has different meaning
8836 	 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8837 	 so avoid them here.  */
8838       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8839 	break;
8840 
8841       bitschange = TYPE_PRECISION (TREE_TYPE (op))
8842 		   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8843 
8844       /* Truncations are many-one so cannot be removed.
8845 	 Unless we are later going to truncate down even farther.  */
8846       if (bitschange < 0
8847 	  && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8848 	break;
8849 
8850       /* See what's inside this conversion.  If we decide to strip it,
8851 	 we will set WIN.  */
8852       op = TREE_OPERAND (op, 0);
8853 
8854       /* If we have not stripped any zero-extensions (uns is 0),
8855 	 we can strip any kind of extension.
8856 	 If we have previously stripped a zero-extension,
8857 	 only zero-extensions can safely be stripped.
8858 	 Any extension can be stripped if the bits it would produce
8859 	 are all going to be discarded later by truncating to FOR_TYPE.  */
8860 
8861       if (bitschange > 0)
8862 	{
8863 	  if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8864 	    win = op;
8865 	  /* TYPE_UNSIGNED says whether this is a zero-extension.
8866 	     Let's avoid computing it if it does not affect WIN
8867 	     and if UNS will not be needed again.  */
8868 	  if ((uns
8869 	       || CONVERT_EXPR_P (op))
8870 	      && TYPE_UNSIGNED (TREE_TYPE (op)))
8871 	    {
8872 	      uns = 1;
8873 	      win = op;
8874 	    }
8875 	}
8876     }
8877 
8878   /* If we finally reach a constant see if it fits in sth smaller and
8879      in that case convert it.  */
8880   if (TREE_CODE (win) == INTEGER_CST)
8881     {
8882       tree wtype = TREE_TYPE (win);
8883       unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8884       if (for_type)
8885 	prec = MAX (prec, final_prec);
8886       if (prec < TYPE_PRECISION (wtype))
8887 	{
8888 	  tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8889 	  if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8890 	    win = fold_convert (t, win);
8891 	}
8892     }
8893 
8894   return win;
8895 }
8896 
8897 /* Return OP or a simpler expression for a narrower value
8898    which can be sign-extended or zero-extended to give back OP.
8899    Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8900    or 0 if the value should be sign-extended.  */
8901 
8902 tree
get_narrower(tree op,int * unsignedp_ptr)8903 get_narrower (tree op, int *unsignedp_ptr)
8904 {
8905   int uns = 0;
8906   int first = 1;
8907   tree win = op;
8908   bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8909 
8910   if (TREE_CODE (op) == COMPOUND_EXPR)
8911     {
8912       do
8913 	op = TREE_OPERAND (op, 1);
8914       while (TREE_CODE (op) == COMPOUND_EXPR);
8915       tree ret = get_narrower (op, unsignedp_ptr);
8916       if (ret == op)
8917 	return win;
8918       auto_vec <tree, 16> v;
8919       unsigned int i;
8920       for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8921 	   op = TREE_OPERAND (op, 1))
8922 	v.safe_push (op);
8923       FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8924 	ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8925 			  TREE_TYPE (ret), TREE_OPERAND (op, 0),
8926 			  ret);
8927       return ret;
8928     }
8929   while (TREE_CODE (op) == NOP_EXPR)
8930     {
8931       int bitschange
8932 	= (TYPE_PRECISION (TREE_TYPE (op))
8933 	   - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8934 
8935       /* Truncations are many-one so cannot be removed.  */
8936       if (bitschange < 0)
8937 	break;
8938 
8939       /* See what's inside this conversion.  If we decide to strip it,
8940 	 we will set WIN.  */
8941 
8942       if (bitschange > 0)
8943 	{
8944 	  op = TREE_OPERAND (op, 0);
8945 	  /* An extension: the outermost one can be stripped,
8946 	     but remember whether it is zero or sign extension.  */
8947 	  if (first)
8948 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
8949 	  /* Otherwise, if a sign extension has been stripped,
8950 	     only sign extensions can now be stripped;
8951 	     if a zero extension has been stripped, only zero-extensions.  */
8952 	  else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8953 	    break;
8954 	  first = 0;
8955 	}
8956       else /* bitschange == 0 */
8957 	{
8958 	  /* A change in nominal type can always be stripped, but we must
8959 	     preserve the unsignedness.  */
8960 	  if (first)
8961 	    uns = TYPE_UNSIGNED (TREE_TYPE (op));
8962 	  first = 0;
8963 	  op = TREE_OPERAND (op, 0);
8964 	  /* Keep trying to narrow, but don't assign op to win if it
8965 	     would turn an integral type into something else.  */
8966 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8967 	    continue;
8968 	}
8969 
8970       win = op;
8971     }
8972 
8973   if (TREE_CODE (op) == COMPONENT_REF
8974       /* Since type_for_size always gives an integer type.  */
8975       && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8976       && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8977       /* Ensure field is laid out already.  */
8978       && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8979       && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8980     {
8981       unsigned HOST_WIDE_INT innerprec
8982 	= tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8983       int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8984 		       || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8985       tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8986 
8987       /* We can get this structure field in a narrower type that fits it,
8988 	 but the resulting extension to its nominal type (a fullword type)
8989 	 must satisfy the same conditions as for other extensions.
8990 
8991 	 Do this only for fields that are aligned (not bit-fields),
8992 	 because when bit-field insns will be used there is no
8993 	 advantage in doing this.  */
8994 
8995       if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8996 	  && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8997 	  && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8998 	  && type != 0)
8999 	{
9000 	  if (first)
9001 	    uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9002 	  win = fold_convert (type, op);
9003 	}
9004     }
9005 
9006   *unsignedp_ptr = uns;
9007   return win;
9008 }
9009 
9010 /* Return true if integer constant C has a value that is permissible
9011    for TYPE, an integral type.  */
9012 
9013 bool
int_fits_type_p(const_tree c,const_tree type)9014 int_fits_type_p (const_tree c, const_tree type)
9015 {
9016   tree type_low_bound, type_high_bound;
9017   bool ok_for_low_bound, ok_for_high_bound;
9018   signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9019 
9020   /* Non-standard boolean types can have arbitrary precision but various
9021      transformations assume that they can only take values 0 and +/-1.  */
9022   if (TREE_CODE (type) == BOOLEAN_TYPE)
9023     return wi::fits_to_boolean_p (wi::to_wide (c), type);
9024 
9025 retry:
9026   type_low_bound = TYPE_MIN_VALUE (type);
9027   type_high_bound = TYPE_MAX_VALUE (type);
9028 
9029   /* If at least one bound of the type is a constant integer, we can check
9030      ourselves and maybe make a decision. If no such decision is possible, but
9031      this type is a subtype, try checking against that.  Otherwise, use
9032      fits_to_tree_p, which checks against the precision.
9033 
9034      Compute the status for each possibly constant bound, and return if we see
9035      one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9036      for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9037      for "constant known to fit".  */
9038 
9039   /* Check if c >= type_low_bound.  */
9040   if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9041     {
9042       if (tree_int_cst_lt (c, type_low_bound))
9043 	return false;
9044       ok_for_low_bound = true;
9045     }
9046   else
9047     ok_for_low_bound = false;
9048 
9049   /* Check if c <= type_high_bound.  */
9050   if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9051     {
9052       if (tree_int_cst_lt (type_high_bound, c))
9053 	return false;
9054       ok_for_high_bound = true;
9055     }
9056   else
9057     ok_for_high_bound = false;
9058 
9059   /* If the constant fits both bounds, the result is known.  */
9060   if (ok_for_low_bound && ok_for_high_bound)
9061     return true;
9062 
9063   /* Perform some generic filtering which may allow making a decision
9064      even if the bounds are not constant.  First, negative integers
9065      never fit in unsigned types, */
9066   if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
9067     return false;
9068 
9069   /* Second, narrower types always fit in wider ones.  */
9070   if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9071     return true;
9072 
9073   /* Third, unsigned integers with top bit set never fit signed types.  */
9074   if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9075     {
9076       int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
9077       if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9078 	{
9079 	  /* When a tree_cst is converted to a wide-int, the precision
9080 	     is taken from the type.  However, if the precision of the
9081 	     mode underneath the type is smaller than that, it is
9082 	     possible that the value will not fit.  The test below
9083 	     fails if any bit is set between the sign bit of the
9084 	     underlying mode and the top bit of the type.  */
9085 	  if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
9086 	    return false;
9087 	}
9088       else if (wi::neg_p (wi::to_wide (c)))
9089 	return false;
9090     }
9091 
9092   /* If we haven't been able to decide at this point, there nothing more we
9093      can check ourselves here.  Look at the base type if we have one and it
9094      has the same precision.  */
9095   if (TREE_CODE (type) == INTEGER_TYPE
9096       && TREE_TYPE (type) != 0
9097       && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9098     {
9099       type = TREE_TYPE (type);
9100       goto retry;
9101     }
9102 
9103   /* Or to fits_to_tree_p, if nothing else.  */
9104   return wi::fits_to_tree_p (wi::to_wide (c), type);
9105 }
9106 
9107 /* Stores bounds of an integer TYPE in MIN and MAX.  If TYPE has non-constant
9108    bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9109    represented (assuming two's-complement arithmetic) within the bit
9110    precision of the type are returned instead.  */
9111 
9112 void
get_type_static_bounds(const_tree type,mpz_t min,mpz_t max)9113 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9114 {
9115   if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9116       && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9117     wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9118   else
9119     {
9120       if (TYPE_UNSIGNED (type))
9121 	mpz_set_ui (min, 0);
9122       else
9123 	{
9124 	  wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9125 	  wi::to_mpz (mn, min, SIGNED);
9126 	}
9127     }
9128 
9129   if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9130       && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9131     wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9132   else
9133     {
9134       wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9135       wi::to_mpz (mn, max, TYPE_SIGN (type));
9136     }
9137 }
9138 
9139 /* Return true if VAR is an automatic variable.  */
9140 
9141 bool
auto_var_p(const_tree var)9142 auto_var_p (const_tree var)
9143 {
9144   return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9145 	    || TREE_CODE (var) == PARM_DECL)
9146 	   && ! TREE_STATIC (var))
9147 	  || TREE_CODE (var) == RESULT_DECL);
9148 }
9149 
9150 /* Return true if VAR is an automatic variable defined in function FN.  */
9151 
9152 bool
auto_var_in_fn_p(const_tree var,const_tree fn)9153 auto_var_in_fn_p (const_tree var, const_tree fn)
9154 {
9155   return (DECL_P (var) && DECL_CONTEXT (var) == fn
9156 	  && (auto_var_p (var)
9157 	      || TREE_CODE (var) == LABEL_DECL));
9158 }
9159 
9160 /* Subprogram of following function.  Called by walk_tree.
9161 
9162    Return *TP if it is an automatic variable or parameter of the
9163    function passed in as DATA.  */
9164 
9165 static tree
find_var_from_fn(tree * tp,int * walk_subtrees,void * data)9166 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9167 {
9168   tree fn = (tree) data;
9169 
9170   if (TYPE_P (*tp))
9171     *walk_subtrees = 0;
9172 
9173   else if (DECL_P (*tp)
9174 	   && auto_var_in_fn_p (*tp, fn))
9175     return *tp;
9176 
9177   return NULL_TREE;
9178 }
9179 
9180 /* Returns true if T is, contains, or refers to a type with variable
9181    size.  For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9182    arguments, but not the return type.  If FN is nonzero, only return
9183    true if a modifier of the type or position of FN is a variable or
9184    parameter inside FN.
9185 
9186    This concept is more general than that of C99 'variably modified types':
9187    in C99, a struct type is never variably modified because a VLA may not
9188    appear as a structure member.  However, in GNU C code like:
9189 
9190      struct S { int i[f()]; };
9191 
9192    is valid, and other languages may define similar constructs.  */
9193 
9194 bool
variably_modified_type_p(tree type,tree fn)9195 variably_modified_type_p (tree type, tree fn)
9196 {
9197   tree t;
9198 
9199 /* Test if T is either variable (if FN is zero) or an expression containing
9200    a variable in FN.  If TYPE isn't gimplified, return true also if
9201    gimplify_one_sizepos would gimplify the expression into a local
9202    variable.  */
9203 #define RETURN_TRUE_IF_VAR(T)						\
9204   do { tree _t = (T);							\
9205     if (_t != NULL_TREE							\
9206 	&& _t != error_mark_node					\
9207 	&& !CONSTANT_CLASS_P (_t)					\
9208 	&& TREE_CODE (_t) != PLACEHOLDER_EXPR				\
9209 	&& (!fn								\
9210 	    || (!TYPE_SIZES_GIMPLIFIED (type)				\
9211 		&& (TREE_CODE (_t) != VAR_DECL				\
9212 		    && !CONTAINS_PLACEHOLDER_P (_t)))			\
9213 	    || walk_tree (&_t, find_var_from_fn, fn, NULL)))		\
9214       return true;  } while (0)
9215 
9216   if (type == error_mark_node)
9217     return false;
9218 
9219   /* If TYPE itself has variable size, it is variably modified.  */
9220   RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9221   RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9222 
9223   switch (TREE_CODE (type))
9224     {
9225     case POINTER_TYPE:
9226     case REFERENCE_TYPE:
9227     case VECTOR_TYPE:
9228       /* Ada can have pointer types refering to themselves indirectly.  */
9229       if (TREE_VISITED (type))
9230 	return false;
9231       TREE_VISITED (type) = true;
9232       if (variably_modified_type_p (TREE_TYPE (type), fn))
9233 	{
9234 	  TREE_VISITED (type) = false;
9235 	  return true;
9236 	}
9237       TREE_VISITED (type) = false;
9238       break;
9239 
9240     case FUNCTION_TYPE:
9241     case METHOD_TYPE:
9242       /* If TYPE is a function type, it is variably modified if the
9243 	 return type is variably modified.  */
9244       if (variably_modified_type_p (TREE_TYPE (type), fn))
9245 	  return true;
9246       break;
9247 
9248     case INTEGER_TYPE:
9249     case REAL_TYPE:
9250     case FIXED_POINT_TYPE:
9251     case ENUMERAL_TYPE:
9252     case BOOLEAN_TYPE:
9253       /* Scalar types are variably modified if their end points
9254 	 aren't constant.  */
9255       RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9256       RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9257       break;
9258 
9259     case RECORD_TYPE:
9260     case UNION_TYPE:
9261     case QUAL_UNION_TYPE:
9262       /* We can't see if any of the fields are variably-modified by the
9263 	 definition we normally use, since that would produce infinite
9264 	 recursion via pointers.  */
9265       /* This is variably modified if some field's type is.  */
9266       for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9267 	if (TREE_CODE (t) == FIELD_DECL)
9268 	  {
9269 	    RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9270 	    RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9271 	    RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9272 
9273 	    /* If the type is a qualified union, then the DECL_QUALIFIER
9274 	       of fields can also be an expression containing a variable.  */
9275 	    if (TREE_CODE (type) == QUAL_UNION_TYPE)
9276 	      RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9277 
9278 	    /* If the field is a qualified union, then it's only a container
9279 	       for what's inside so we look into it.  That's necessary in LTO
9280 	       mode because the sizes of the field tested above have been set
9281 	       to PLACEHOLDER_EXPRs by free_lang_data.  */
9282 	    if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
9283 		&& variably_modified_type_p (TREE_TYPE (t), fn))
9284 	      return true;
9285 	  }
9286       break;
9287 
9288     case ARRAY_TYPE:
9289       /* Do not call ourselves to avoid infinite recursion.  This is
9290 	 variably modified if the element type is.  */
9291       RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9292       RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9293       break;
9294 
9295     default:
9296       break;
9297     }
9298 
9299   /* The current language may have other cases to check, but in general,
9300      all other types are not variably modified.  */
9301   return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9302 
9303 #undef RETURN_TRUE_IF_VAR
9304 }
9305 
9306 /* Given a DECL or TYPE, return the scope in which it was declared, or
9307    NULL_TREE if there is no containing scope.  */
9308 
9309 tree
get_containing_scope(const_tree t)9310 get_containing_scope (const_tree t)
9311 {
9312   return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9313 }
9314 
9315 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL.  */
9316 
9317 const_tree
get_ultimate_context(const_tree decl)9318 get_ultimate_context (const_tree decl)
9319 {
9320   while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9321     {
9322       if (TREE_CODE (decl) == BLOCK)
9323 	decl = BLOCK_SUPERCONTEXT (decl);
9324       else
9325 	decl = get_containing_scope (decl);
9326     }
9327   return decl;
9328 }
9329 
9330 /* Return the innermost context enclosing DECL that is
9331    a FUNCTION_DECL, or zero if none.  */
9332 
9333 tree
decl_function_context(const_tree decl)9334 decl_function_context (const_tree decl)
9335 {
9336   tree context;
9337 
9338   if (TREE_CODE (decl) == ERROR_MARK)
9339     return 0;
9340 
9341   /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9342      where we look up the function at runtime.  Such functions always take
9343      a first argument of type 'pointer to real context'.
9344 
9345      C++ should really be fixed to use DECL_CONTEXT for the real context,
9346      and use something else for the "virtual context".  */
9347   else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9348     context
9349       = TYPE_MAIN_VARIANT
9350 	(TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9351   else
9352     context = DECL_CONTEXT (decl);
9353 
9354   while (context && TREE_CODE (context) != FUNCTION_DECL)
9355     {
9356       if (TREE_CODE (context) == BLOCK)
9357 	context = BLOCK_SUPERCONTEXT (context);
9358       else
9359 	context = get_containing_scope (context);
9360     }
9361 
9362   return context;
9363 }
9364 
9365 /* Return the innermost context enclosing DECL that is
9366    a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9367    TYPE_DECLs and FUNCTION_DECLs are transparent to this function.  */
9368 
9369 tree
decl_type_context(const_tree decl)9370 decl_type_context (const_tree decl)
9371 {
9372   tree context = DECL_CONTEXT (decl);
9373 
9374   while (context)
9375     switch (TREE_CODE (context))
9376       {
9377       case NAMESPACE_DECL:
9378       case TRANSLATION_UNIT_DECL:
9379 	return NULL_TREE;
9380 
9381       case RECORD_TYPE:
9382       case UNION_TYPE:
9383       case QUAL_UNION_TYPE:
9384 	return context;
9385 
9386       case TYPE_DECL:
9387       case FUNCTION_DECL:
9388 	context = DECL_CONTEXT (context);
9389 	break;
9390 
9391       case BLOCK:
9392 	context = BLOCK_SUPERCONTEXT (context);
9393 	break;
9394 
9395       default:
9396 	gcc_unreachable ();
9397       }
9398 
9399   return NULL_TREE;
9400 }
9401 
9402 /* CALL is a CALL_EXPR.  Return the declaration for the function
9403    called, or NULL_TREE if the called function cannot be
9404    determined.  */
9405 
9406 tree
get_callee_fndecl(const_tree call)9407 get_callee_fndecl (const_tree call)
9408 {
9409   tree addr;
9410 
9411   if (call == error_mark_node)
9412     return error_mark_node;
9413 
9414   /* It's invalid to call this function with anything but a
9415      CALL_EXPR.  */
9416   gcc_assert (TREE_CODE (call) == CALL_EXPR);
9417 
9418   /* The first operand to the CALL is the address of the function
9419      called.  */
9420   addr = CALL_EXPR_FN (call);
9421 
9422   /* If there is no function, return early.  */
9423   if (addr == NULL_TREE)
9424     return NULL_TREE;
9425 
9426   STRIP_NOPS (addr);
9427 
9428   /* If this is a readonly function pointer, extract its initial value.  */
9429   if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9430       && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9431       && DECL_INITIAL (addr))
9432     addr = DECL_INITIAL (addr);
9433 
9434   /* If the address is just `&f' for some function `f', then we know
9435      that `f' is being called.  */
9436   if (TREE_CODE (addr) == ADDR_EXPR
9437       && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9438     return TREE_OPERAND (addr, 0);
9439 
9440   /* We couldn't figure out what was being called.  */
9441   return NULL_TREE;
9442 }
9443 
9444 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9445    return the associated function code, otherwise return CFN_LAST.  */
9446 
9447 combined_fn
get_call_combined_fn(const_tree call)9448 get_call_combined_fn (const_tree call)
9449 {
9450   /* It's invalid to call this function with anything but a CALL_EXPR.  */
9451   gcc_assert (TREE_CODE (call) == CALL_EXPR);
9452 
9453   if (!CALL_EXPR_FN (call))
9454     return as_combined_fn (CALL_EXPR_IFN (call));
9455 
9456   tree fndecl = get_callee_fndecl (call);
9457   if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9458     return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9459 
9460   return CFN_LAST;
9461 }
9462 
9463 /* Comparator of indices based on tree_node_counts.  */
9464 
9465 static int
tree_nodes_cmp(const void * p1,const void * p2)9466 tree_nodes_cmp (const void *p1, const void *p2)
9467 {
9468   const unsigned *n1 = (const unsigned *)p1;
9469   const unsigned *n2 = (const unsigned *)p2;
9470 
9471   return tree_node_counts[*n1] - tree_node_counts[*n2];
9472 }
9473 
9474 /* Comparator of indices based on tree_code_counts.  */
9475 
9476 static int
tree_codes_cmp(const void * p1,const void * p2)9477 tree_codes_cmp (const void *p1, const void *p2)
9478 {
9479   const unsigned *n1 = (const unsigned *)p1;
9480   const unsigned *n2 = (const unsigned *)p2;
9481 
9482   return tree_code_counts[*n1] - tree_code_counts[*n2];
9483 }
9484 
9485 #define TREE_MEM_USAGE_SPACES 40
9486 
9487 /* Print debugging information about tree nodes generated during the compile,
9488    and any language-specific information.  */
9489 
9490 void
dump_tree_statistics(void)9491 dump_tree_statistics (void)
9492 {
9493   if (GATHER_STATISTICS)
9494     {
9495       uint64_t total_nodes, total_bytes;
9496       fprintf (stderr, "\nKind                   Nodes      Bytes\n");
9497       mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9498       total_nodes = total_bytes = 0;
9499 
9500       {
9501 	auto_vec<unsigned> indices (all_kinds);
9502 	for (unsigned i = 0; i < all_kinds; i++)
9503 	  indices.quick_push (i);
9504 	indices.qsort (tree_nodes_cmp);
9505 
9506 	for (unsigned i = 0; i < (int) all_kinds; i++)
9507 	  {
9508 	    unsigned j = indices[i];
9509 	    fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9510 		     tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
9511 		     SIZE_AMOUNT (tree_node_sizes[j]));
9512 	    total_nodes += tree_node_counts[j];
9513 	    total_bytes += tree_node_sizes[j];
9514 	  }
9515 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9516 	fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9517 		 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9518 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9519       }
9520 
9521       {
9522 	fprintf (stderr, "Code                              Nodes\n");
9523 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9524 
9525 	auto_vec<unsigned> indices (MAX_TREE_CODES);
9526 	for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9527 	  indices.quick_push (i);
9528 	indices.qsort (tree_codes_cmp);
9529 
9530 	for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9531 	  {
9532 	    unsigned j = indices[i];
9533 	    fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9534 		     get_tree_code_name ((enum tree_code) j),
9535 		     SIZE_AMOUNT (tree_code_counts[j]));
9536 	  }
9537 	mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9538 	fprintf (stderr, "\n");
9539 	ssanames_print_statistics ();
9540 	fprintf (stderr, "\n");
9541 	phinodes_print_statistics ();
9542 	fprintf (stderr, "\n");
9543       }
9544     }
9545   else
9546     fprintf (stderr, "(No per-node statistics)\n");
9547 
9548   print_type_hash_statistics ();
9549   print_debug_expr_statistics ();
9550   print_value_expr_statistics ();
9551   lang_hooks.print_statistics ();
9552 }
9553 
9554 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9555 
9556 /* Generate a crc32 of the low BYTES bytes of VALUE.  */
9557 
9558 unsigned
crc32_unsigned_n(unsigned chksum,unsigned value,unsigned bytes)9559 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9560 {
9561   /* This relies on the raw feedback's top 4 bits being zero.  */
9562 #define FEEDBACK(X) ((X) * 0x04c11db7)
9563 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9564 		     ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9565   static const unsigned syndromes[16] =
9566     {
9567       SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9568       SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9569       SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9570       SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9571     };
9572 #undef FEEDBACK
9573 #undef SYNDROME
9574 
9575   value <<= (32 - bytes * 8);
9576   for (unsigned ix = bytes * 2; ix--; value <<= 4)
9577     {
9578       unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9579 
9580       chksum = (chksum << 4) ^ feedback;
9581     }
9582 
9583   return chksum;
9584 }
9585 
9586 /* Generate a crc32 of a string.  */
9587 
9588 unsigned
crc32_string(unsigned chksum,const char * string)9589 crc32_string (unsigned chksum, const char *string)
9590 {
9591   do
9592     chksum = crc32_byte (chksum, *string);
9593   while (*string++);
9594   return chksum;
9595 }
9596 
9597 /* P is a string that will be used in a symbol.  Mask out any characters
9598    that are not valid in that context.  */
9599 
9600 void
clean_symbol_name(char * p)9601 clean_symbol_name (char *p)
9602 {
9603   for (; *p; p++)
9604     if (! (ISALNUM (*p)
9605 #ifndef NO_DOLLAR_IN_LABEL	/* this for `$'; unlikely, but... -- kr */
9606 	    || *p == '$'
9607 #endif
9608 #ifndef NO_DOT_IN_LABEL		/* this for `.'; unlikely, but...  */
9609 	    || *p == '.'
9610 #endif
9611 	   ))
9612       *p = '_';
9613 }
9614 
9615 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH.  */
9616 
9617 /* Create a unique anonymous identifier.  The identifier is still a
9618    valid assembly label.  */
9619 
9620 tree
make_anon_name()9621 make_anon_name ()
9622 {
9623   const char *fmt =
9624 #if !defined (NO_DOT_IN_LABEL)
9625     "."
9626 #elif !defined (NO_DOLLAR_IN_LABEL)
9627     "$"
9628 #else
9629     "_"
9630 #endif
9631     "_anon_%d";
9632 
9633   char buf[24];
9634   int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9635   gcc_checking_assert (len < int (sizeof (buf)));
9636 
9637   tree id = get_identifier_with_length (buf, len);
9638   IDENTIFIER_ANON_P (id) = true;
9639 
9640   return id;
9641 }
9642 
9643 /* Generate a name for a special-purpose function.
9644    The generated name may need to be unique across the whole link.
9645    Changes to this function may also require corresponding changes to
9646    xstrdup_mask_random.
9647    TYPE is some string to identify the purpose of this function to the
9648    linker or collect2; it must start with an uppercase letter,
9649    one of:
9650    I - for constructors
9651    D - for destructors
9652    N - for C++ anonymous namespaces
9653    F - for DWARF unwind frame information.  */
9654 
9655 tree
get_file_function_name(const char * type)9656 get_file_function_name (const char *type)
9657 {
9658   char *buf;
9659   const char *p;
9660   char *q;
9661 
9662   /* If we already have a name we know to be unique, just use that.  */
9663   if (first_global_object_name)
9664     p = q = ASTRDUP (first_global_object_name);
9665   /* If the target is handling the constructors/destructors, they
9666      will be local to this file and the name is only necessary for
9667      debugging purposes.
9668      We also assign sub_I and sub_D sufixes to constructors called from
9669      the global static constructors.  These are always local.  */
9670   else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9671 	   || (strncmp (type, "sub_", 4) == 0
9672 	       && (type[4] == 'I' || type[4] == 'D')))
9673     {
9674       const char *file = main_input_filename;
9675       if (! file)
9676 	file = LOCATION_FILE (input_location);
9677       /* Just use the file's basename, because the full pathname
9678 	 might be quite long.  */
9679       p = q = ASTRDUP (lbasename (file));
9680     }
9681   else
9682     {
9683       /* Otherwise, the name must be unique across the entire link.
9684 	 We don't have anything that we know to be unique to this translation
9685 	 unit, so use what we do have and throw in some randomness.  */
9686       unsigned len;
9687       const char *name = weak_global_object_name;
9688       const char *file = main_input_filename;
9689 
9690       if (! name)
9691 	name = "";
9692       if (! file)
9693 	file = LOCATION_FILE (input_location);
9694 
9695       len = strlen (file);
9696       q = (char *) alloca (9 + 19 + len + 1);
9697       memcpy (q, file, len + 1);
9698 
9699       snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9700 		crc32_string (0, name), get_random_seed (false));
9701 
9702       p = q;
9703     }
9704 
9705   clean_symbol_name (q);
9706   buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9707 			 + strlen (type));
9708 
9709   /* Set up the name of the file-level functions we may need.
9710      Use a global object (which is already required to be unique over
9711      the program) rather than the file name (which imposes extra
9712      constraints).  */
9713   sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9714 
9715   return get_identifier (buf);
9716 }
9717 
9718 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9719 
9720 /* Complain that the tree code of NODE does not match the expected 0
9721    terminated list of trailing codes. The trailing code list can be
9722    empty, for a more vague error message.  FILE, LINE, and FUNCTION
9723    are of the caller.  */
9724 
9725 void
tree_check_failed(const_tree node,const char * file,int line,const char * function,...)9726 tree_check_failed (const_tree node, const char *file,
9727 		   int line, const char *function, ...)
9728 {
9729   va_list args;
9730   const char *buffer;
9731   unsigned length = 0;
9732   enum tree_code code;
9733 
9734   va_start (args, function);
9735   while ((code = (enum tree_code) va_arg (args, int)))
9736     length += 4 + strlen (get_tree_code_name (code));
9737   va_end (args);
9738   if (length)
9739     {
9740       char *tmp;
9741       va_start (args, function);
9742       length += strlen ("expected ");
9743       buffer = tmp = (char *) alloca (length);
9744       length = 0;
9745       while ((code = (enum tree_code) va_arg (args, int)))
9746 	{
9747 	  const char *prefix = length ? " or " : "expected ";
9748 
9749 	  strcpy (tmp + length, prefix);
9750 	  length += strlen (prefix);
9751 	  strcpy (tmp + length, get_tree_code_name (code));
9752 	  length += strlen (get_tree_code_name (code));
9753 	}
9754       va_end (args);
9755     }
9756   else
9757     buffer = "unexpected node";
9758 
9759   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9760 		  buffer, get_tree_code_name (TREE_CODE (node)),
9761 		  function, trim_filename (file), line);
9762 }
9763 
9764 /* Complain that the tree code of NODE does match the expected 0
9765    terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9766    the caller.  */
9767 
9768 void
tree_not_check_failed(const_tree node,const char * file,int line,const char * function,...)9769 tree_not_check_failed (const_tree node, const char *file,
9770 		       int line, const char *function, ...)
9771 {
9772   va_list args;
9773   char *buffer;
9774   unsigned length = 0;
9775   enum tree_code code;
9776 
9777   va_start (args, function);
9778   while ((code = (enum tree_code) va_arg (args, int)))
9779     length += 4 + strlen (get_tree_code_name (code));
9780   va_end (args);
9781   va_start (args, function);
9782   buffer = (char *) alloca (length);
9783   length = 0;
9784   while ((code = (enum tree_code) va_arg (args, int)))
9785     {
9786       if (length)
9787 	{
9788 	  strcpy (buffer + length, " or ");
9789 	  length += 4;
9790 	}
9791       strcpy (buffer + length, get_tree_code_name (code));
9792       length += strlen (get_tree_code_name (code));
9793     }
9794   va_end (args);
9795 
9796   internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9797 		  buffer, get_tree_code_name (TREE_CODE (node)),
9798 		  function, trim_filename (file), line);
9799 }
9800 
9801 /* Similar to tree_check_failed, except that we check for a class of tree
9802    code, given in CL.  */
9803 
9804 void
tree_class_check_failed(const_tree node,const enum tree_code_class cl,const char * file,int line,const char * function)9805 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9806 			 const char *file, int line, const char *function)
9807 {
9808   internal_error
9809     ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9810      TREE_CODE_CLASS_STRING (cl),
9811      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9812      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9813 }
9814 
9815 /* Similar to tree_check_failed, except that instead of specifying a
9816    dozen codes, use the knowledge that they're all sequential.  */
9817 
9818 void
tree_range_check_failed(const_tree node,const char * file,int line,const char * function,enum tree_code c1,enum tree_code c2)9819 tree_range_check_failed (const_tree node, const char *file, int line,
9820 			 const char *function, enum tree_code c1,
9821 			 enum tree_code c2)
9822 {
9823   char *buffer;
9824   unsigned length = 0;
9825   unsigned int c;
9826 
9827   for (c = c1; c <= c2; ++c)
9828     length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9829 
9830   length += strlen ("expected ");
9831   buffer = (char *) alloca (length);
9832   length = 0;
9833 
9834   for (c = c1; c <= c2; ++c)
9835     {
9836       const char *prefix = length ? " or " : "expected ";
9837 
9838       strcpy (buffer + length, prefix);
9839       length += strlen (prefix);
9840       strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9841       length += strlen (get_tree_code_name ((enum tree_code) c));
9842     }
9843 
9844   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9845 		  buffer, get_tree_code_name (TREE_CODE (node)),
9846 		  function, trim_filename (file), line);
9847 }
9848 
9849 
9850 /* Similar to tree_check_failed, except that we check that a tree does
9851    not have the specified code, given in CL.  */
9852 
9853 void
tree_not_class_check_failed(const_tree node,const enum tree_code_class cl,const char * file,int line,const char * function)9854 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9855 			     const char *file, int line, const char *function)
9856 {
9857   internal_error
9858     ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9859      TREE_CODE_CLASS_STRING (cl),
9860      TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9861      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9862 }
9863 
9864 
9865 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes.  */
9866 
9867 void
omp_clause_check_failed(const_tree node,const char * file,int line,const char * function,enum omp_clause_code code)9868 omp_clause_check_failed (const_tree node, const char *file, int line,
9869                          const char *function, enum omp_clause_code code)
9870 {
9871   internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9872 		  "in %s, at %s:%d",
9873 		  omp_clause_code_name[code],
9874 		  get_tree_code_name (TREE_CODE (node)),
9875 		  function, trim_filename (file), line);
9876 }
9877 
9878 
9879 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes.  */
9880 
9881 void
omp_clause_range_check_failed(const_tree node,const char * file,int line,const char * function,enum omp_clause_code c1,enum omp_clause_code c2)9882 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9883 			       const char *function, enum omp_clause_code c1,
9884 			       enum omp_clause_code c2)
9885 {
9886   char *buffer;
9887   unsigned length = 0;
9888   unsigned int c;
9889 
9890   for (c = c1; c <= c2; ++c)
9891     length += 4 + strlen (omp_clause_code_name[c]);
9892 
9893   length += strlen ("expected ");
9894   buffer = (char *) alloca (length);
9895   length = 0;
9896 
9897   for (c = c1; c <= c2; ++c)
9898     {
9899       const char *prefix = length ? " or " : "expected ";
9900 
9901       strcpy (buffer + length, prefix);
9902       length += strlen (prefix);
9903       strcpy (buffer + length, omp_clause_code_name[c]);
9904       length += strlen (omp_clause_code_name[c]);
9905     }
9906 
9907   internal_error ("tree check: %s, have %s in %s, at %s:%d",
9908 		  buffer, omp_clause_code_name[TREE_CODE (node)],
9909 		  function, trim_filename (file), line);
9910 }
9911 
9912 
9913 #undef DEFTREESTRUCT
9914 #define DEFTREESTRUCT(VAL, NAME) NAME,
9915 
9916 static const char *ts_enum_names[] = {
9917 #include "treestruct.def"
9918 };
9919 #undef DEFTREESTRUCT
9920 
9921 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9922 
9923 /* Similar to tree_class_check_failed, except that we check for
9924    whether CODE contains the tree structure identified by EN.  */
9925 
9926 void
tree_contains_struct_check_failed(const_tree node,const enum tree_node_structure_enum en,const char * file,int line,const char * function)9927 tree_contains_struct_check_failed (const_tree node,
9928 				   const enum tree_node_structure_enum en,
9929 				   const char *file, int line,
9930 				   const char *function)
9931 {
9932   internal_error
9933     ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9934      TS_ENUM_NAME (en),
9935      get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9936 }
9937 
9938 
9939 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9940    (dynamically sized) vector.  */
9941 
9942 void
tree_int_cst_elt_check_failed(int idx,int len,const char * file,int line,const char * function)9943 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9944 			       const char *function)
9945 {
9946   internal_error
9947     ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9948      "at %s:%d",
9949      idx + 1, len, function, trim_filename (file), line);
9950 }
9951 
9952 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9953    (dynamically sized) vector.  */
9954 
9955 void
tree_vec_elt_check_failed(int idx,int len,const char * file,int line,const char * function)9956 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9957 			   const char *function)
9958 {
9959   internal_error
9960     ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9961      idx + 1, len, function, trim_filename (file), line);
9962 }
9963 
9964 /* Similar to above, except that the check is for the bounds of the operand
9965    vector of an expression node EXP.  */
9966 
9967 void
tree_operand_check_failed(int idx,const_tree exp,const char * file,int line,const char * function)9968 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9969 			   int line, const char *function)
9970 {
9971   enum tree_code code = TREE_CODE (exp);
9972   internal_error
9973     ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9974      idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9975      function, trim_filename (file), line);
9976 }
9977 
9978 /* Similar to above, except that the check is for the number of
9979    operands of an OMP_CLAUSE node.  */
9980 
9981 void
omp_clause_operand_check_failed(int idx,const_tree t,const char * file,int line,const char * function)9982 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9983 			         int line, const char *function)
9984 {
9985   internal_error
9986     ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9987      "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9988      omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9989      trim_filename (file), line);
9990 }
9991 #endif /* ENABLE_TREE_CHECKING */
9992 
9993 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9994    and mapped to the machine mode MODE.  Initialize its fields and build
9995    the information necessary for debugging output.  */
9996 
9997 static tree
make_vector_type(tree innertype,poly_int64 nunits,machine_mode mode)9998 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9999 {
10000   tree t;
10001   tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
10002 
10003   t = make_node (VECTOR_TYPE);
10004   TREE_TYPE (t) = mv_innertype;
10005   SET_TYPE_VECTOR_SUBPARTS (t, nunits);
10006   SET_TYPE_MODE (t, mode);
10007 
10008   if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
10009     SET_TYPE_STRUCTURAL_EQUALITY (t);
10010   else if ((TYPE_CANONICAL (mv_innertype) != innertype
10011 	    || mode != VOIDmode)
10012 	   && !VECTOR_BOOLEAN_TYPE_P (t))
10013     TYPE_CANONICAL (t)
10014       = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
10015 
10016   layout_type (t);
10017 
10018   hashval_t hash = type_hash_canon_hash (t);
10019   t = type_hash_canon (hash, t);
10020 
10021   /* We have built a main variant, based on the main variant of the
10022      inner type. Use it to build the variant we return.  */
10023   if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10024       && TREE_TYPE (t) != innertype)
10025     return build_type_attribute_qual_variant (t,
10026 					      TYPE_ATTRIBUTES (innertype),
10027 					      TYPE_QUALS (innertype));
10028 
10029   return t;
10030 }
10031 
10032 static tree
make_or_reuse_type(unsigned size,int unsignedp)10033 make_or_reuse_type (unsigned size, int unsignedp)
10034 {
10035   int i;
10036 
10037   if (size == INT_TYPE_SIZE)
10038     return unsignedp ? unsigned_type_node : integer_type_node;
10039   if (size == CHAR_TYPE_SIZE)
10040     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10041   if (size == SHORT_TYPE_SIZE)
10042     return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10043   if (size == LONG_TYPE_SIZE)
10044     return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10045   if (size == LONG_LONG_TYPE_SIZE)
10046     return (unsignedp ? long_long_unsigned_type_node
10047             : long_long_integer_type_node);
10048 
10049   for (i = 0; i < NUM_INT_N_ENTS; i ++)
10050     if (size == int_n_data[i].bitsize
10051 	&& int_n_enabled_p[i])
10052       return (unsignedp ? int_n_trees[i].unsigned_type
10053 	      : int_n_trees[i].signed_type);
10054 
10055   if (unsignedp)
10056     return make_unsigned_type (size);
10057   else
10058     return make_signed_type (size);
10059 }
10060 
10061 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP.  */
10062 
10063 static tree
make_or_reuse_fract_type(unsigned size,int unsignedp,int satp)10064 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10065 {
10066   if (satp)
10067     {
10068       if (size == SHORT_FRACT_TYPE_SIZE)
10069 	return unsignedp ? sat_unsigned_short_fract_type_node
10070 			 : sat_short_fract_type_node;
10071       if (size == FRACT_TYPE_SIZE)
10072 	return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10073       if (size == LONG_FRACT_TYPE_SIZE)
10074 	return unsignedp ? sat_unsigned_long_fract_type_node
10075 			 : sat_long_fract_type_node;
10076       if (size == LONG_LONG_FRACT_TYPE_SIZE)
10077 	return unsignedp ? sat_unsigned_long_long_fract_type_node
10078 			 : sat_long_long_fract_type_node;
10079     }
10080   else
10081     {
10082       if (size == SHORT_FRACT_TYPE_SIZE)
10083 	return unsignedp ? unsigned_short_fract_type_node
10084 			 : short_fract_type_node;
10085       if (size == FRACT_TYPE_SIZE)
10086 	return unsignedp ? unsigned_fract_type_node : fract_type_node;
10087       if (size == LONG_FRACT_TYPE_SIZE)
10088 	return unsignedp ? unsigned_long_fract_type_node
10089 			 : long_fract_type_node;
10090       if (size == LONG_LONG_FRACT_TYPE_SIZE)
10091 	return unsignedp ? unsigned_long_long_fract_type_node
10092 			 : long_long_fract_type_node;
10093     }
10094 
10095   return make_fract_type (size, unsignedp, satp);
10096 }
10097 
10098 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP.  */
10099 
10100 static tree
make_or_reuse_accum_type(unsigned size,int unsignedp,int satp)10101 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10102 {
10103   if (satp)
10104     {
10105       if (size == SHORT_ACCUM_TYPE_SIZE)
10106 	return unsignedp ? sat_unsigned_short_accum_type_node
10107 			 : sat_short_accum_type_node;
10108       if (size == ACCUM_TYPE_SIZE)
10109 	return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10110       if (size == LONG_ACCUM_TYPE_SIZE)
10111 	return unsignedp ? sat_unsigned_long_accum_type_node
10112 			 : sat_long_accum_type_node;
10113       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10114 	return unsignedp ? sat_unsigned_long_long_accum_type_node
10115 			 : sat_long_long_accum_type_node;
10116     }
10117   else
10118     {
10119       if (size == SHORT_ACCUM_TYPE_SIZE)
10120 	return unsignedp ? unsigned_short_accum_type_node
10121 			 : short_accum_type_node;
10122       if (size == ACCUM_TYPE_SIZE)
10123 	return unsignedp ? unsigned_accum_type_node : accum_type_node;
10124       if (size == LONG_ACCUM_TYPE_SIZE)
10125 	return unsignedp ? unsigned_long_accum_type_node
10126 			 : long_accum_type_node;
10127       if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10128 	return unsignedp ? unsigned_long_long_accum_type_node
10129 			 : long_long_accum_type_node;
10130     }
10131 
10132   return make_accum_type (size, unsignedp, satp);
10133 }
10134 
10135 
10136 /* Create an atomic variant node for TYPE.  This routine is called
10137    during initialization of data types to create the 5 basic atomic
10138    types. The generic build_variant_type function requires these to
10139    already be set up in order to function properly, so cannot be
10140    called from there.  If ALIGN is non-zero, then ensure alignment is
10141    overridden to this value.  */
10142 
10143 static tree
build_atomic_base(tree type,unsigned int align)10144 build_atomic_base (tree type, unsigned int align)
10145 {
10146   tree t;
10147 
10148   /* Make sure its not already registered.  */
10149   if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10150     return t;
10151 
10152   t = build_variant_type_copy (type);
10153   set_type_quals (t, TYPE_QUAL_ATOMIC);
10154 
10155   if (align)
10156     SET_TYPE_ALIGN (t, align);
10157 
10158   return t;
10159 }
10160 
10161 /* Information about the _FloatN and _FloatNx types.  This must be in
10162    the same order as the corresponding TI_* enum values.  */
10163 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10164   {
10165     { 16, false },
10166     { 32, false },
10167     { 64, false },
10168     { 128, false },
10169     { 32, true },
10170     { 64, true },
10171     { 128, true },
10172   };
10173 
10174 
10175 /* Create nodes for all integer types (and error_mark_node) using the sizes
10176    of C datatypes.  SIGNED_CHAR specifies whether char is signed.  */
10177 
10178 void
build_common_tree_nodes(bool signed_char)10179 build_common_tree_nodes (bool signed_char)
10180 {
10181   int i;
10182 
10183   error_mark_node = make_node (ERROR_MARK);
10184   TREE_TYPE (error_mark_node) = error_mark_node;
10185 
10186   initialize_sizetypes ();
10187 
10188   /* Define both `signed char' and `unsigned char'.  */
10189   signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10190   TYPE_STRING_FLAG (signed_char_type_node) = 1;
10191   unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10192   TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10193 
10194   /* Define `char', which is like either `signed char' or `unsigned char'
10195      but not the same as either.  */
10196   char_type_node
10197     = (signed_char
10198        ? make_signed_type (CHAR_TYPE_SIZE)
10199        : make_unsigned_type (CHAR_TYPE_SIZE));
10200   TYPE_STRING_FLAG (char_type_node) = 1;
10201 
10202   short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10203   short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10204   integer_type_node = make_signed_type (INT_TYPE_SIZE);
10205   unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10206   long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10207   long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10208   long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10209   long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10210 
10211   for (i = 0; i < NUM_INT_N_ENTS; i ++)
10212     {
10213       int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10214       int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10215 
10216       if (int_n_enabled_p[i])
10217 	{
10218 	  integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10219 	  integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10220 	}
10221     }
10222 
10223   /* Define a boolean type.  This type only represents boolean values but
10224      may be larger than char depending on the value of BOOL_TYPE_SIZE.  */
10225   boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10226   TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10227   TYPE_PRECISION (boolean_type_node) = 1;
10228   TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10229 
10230   /* Define what type to use for size_t.  */
10231   if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10232     size_type_node = unsigned_type_node;
10233   else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10234     size_type_node = long_unsigned_type_node;
10235   else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10236     size_type_node = long_long_unsigned_type_node;
10237   else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10238     size_type_node = short_unsigned_type_node;
10239   else
10240     {
10241       int i;
10242 
10243       size_type_node = NULL_TREE;
10244       for (i = 0; i < NUM_INT_N_ENTS; i++)
10245 	if (int_n_enabled_p[i])
10246 	  {
10247 	    char name[50], altname[50];
10248 	    sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10249 	    sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10250 
10251 	    if (strcmp (name, SIZE_TYPE) == 0
10252 		|| strcmp (altname, SIZE_TYPE) == 0)
10253 	      {
10254 		size_type_node = int_n_trees[i].unsigned_type;
10255 	      }
10256 	  }
10257       if (size_type_node == NULL_TREE)
10258 	gcc_unreachable ();
10259     }
10260 
10261   /* Define what type to use for ptrdiff_t.  */
10262   if (strcmp (PTRDIFF_TYPE, "int") == 0)
10263     ptrdiff_type_node = integer_type_node;
10264   else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10265     ptrdiff_type_node = long_integer_type_node;
10266   else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10267     ptrdiff_type_node = long_long_integer_type_node;
10268   else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10269     ptrdiff_type_node = short_integer_type_node;
10270   else
10271     {
10272       ptrdiff_type_node = NULL_TREE;
10273       for (int i = 0; i < NUM_INT_N_ENTS; i++)
10274 	if (int_n_enabled_p[i])
10275 	  {
10276 	    char name[50], altname[50];
10277 	    sprintf (name, "__int%d", int_n_data[i].bitsize);
10278 	    sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10279 
10280 	    if (strcmp (name, PTRDIFF_TYPE) == 0
10281 		|| strcmp (altname, PTRDIFF_TYPE) == 0)
10282 	      ptrdiff_type_node = int_n_trees[i].signed_type;
10283 	  }
10284       if (ptrdiff_type_node == NULL_TREE)
10285 	gcc_unreachable ();
10286     }
10287 
10288   /* Fill in the rest of the sized types.  Reuse existing type nodes
10289      when possible.  */
10290   intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10291   intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10292   intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10293   intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10294   intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10295 
10296   unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10297   unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10298   unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10299   unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10300   unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10301 
10302   /* Don't call build_qualified type for atomics.  That routine does
10303      special processing for atomics, and until they are initialized
10304      it's better not to make that call.
10305 
10306      Check to see if there is a target override for atomic types.  */
10307 
10308   atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10309 					targetm.atomic_align_for_mode (QImode));
10310   atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10311 					targetm.atomic_align_for_mode (HImode));
10312   atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10313 					targetm.atomic_align_for_mode (SImode));
10314   atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10315 					targetm.atomic_align_for_mode (DImode));
10316   atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10317 					targetm.atomic_align_for_mode (TImode));
10318 
10319   access_public_node = get_identifier ("public");
10320   access_protected_node = get_identifier ("protected");
10321   access_private_node = get_identifier ("private");
10322 
10323   /* Define these next since types below may used them.  */
10324   integer_zero_node = build_int_cst (integer_type_node, 0);
10325   integer_one_node = build_int_cst (integer_type_node, 1);
10326   integer_three_node = build_int_cst (integer_type_node, 3);
10327   integer_minus_one_node = build_int_cst (integer_type_node, -1);
10328 
10329   size_zero_node = size_int (0);
10330   size_one_node = size_int (1);
10331   bitsize_zero_node = bitsize_int (0);
10332   bitsize_one_node = bitsize_int (1);
10333   bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10334 
10335   boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10336   boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10337 
10338   void_type_node = make_node (VOID_TYPE);
10339   layout_type (void_type_node);
10340 
10341   /* We are not going to have real types in C with less than byte alignment,
10342      so we might as well not have any types that claim to have it.  */
10343   SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10344   TYPE_USER_ALIGN (void_type_node) = 0;
10345 
10346   void_node = make_node (VOID_CST);
10347   TREE_TYPE (void_node) = void_type_node;
10348 
10349   null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10350   layout_type (TREE_TYPE (null_pointer_node));
10351 
10352   ptr_type_node = build_pointer_type (void_type_node);
10353   const_ptr_type_node
10354     = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10355   for (unsigned i = 0;
10356        i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10357        ++i)
10358     builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10359 
10360   pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10361 
10362   float_type_node = make_node (REAL_TYPE);
10363   TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10364   layout_type (float_type_node);
10365 
10366   double_type_node = make_node (REAL_TYPE);
10367   TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10368   layout_type (double_type_node);
10369 
10370   long_double_type_node = make_node (REAL_TYPE);
10371   TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10372   layout_type (long_double_type_node);
10373 
10374   for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10375     {
10376       int n = floatn_nx_types[i].n;
10377       bool extended = floatn_nx_types[i].extended;
10378       scalar_float_mode mode;
10379       if (!targetm.floatn_mode (n, extended).exists (&mode))
10380 	continue;
10381       int precision = GET_MODE_PRECISION (mode);
10382       /* Work around the rs6000 KFmode having precision 113 not
10383 	 128.  */
10384       const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10385       gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10386       int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10387       if (!extended)
10388 	gcc_assert (min_precision == n);
10389       if (precision < min_precision)
10390 	precision = min_precision;
10391       FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10392       TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10393       layout_type (FLOATN_NX_TYPE_NODE (i));
10394       SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10395     }
10396 
10397   float_ptr_type_node = build_pointer_type (float_type_node);
10398   double_ptr_type_node = build_pointer_type (double_type_node);
10399   long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10400   integer_ptr_type_node = build_pointer_type (integer_type_node);
10401 
10402   /* Fixed size integer types.  */
10403   uint16_type_node = make_or_reuse_type (16, 1);
10404   uint32_type_node = make_or_reuse_type (32, 1);
10405   uint64_type_node = make_or_reuse_type (64, 1);
10406 
10407   /* Decimal float types. */
10408   if (targetm.decimal_float_supported_p ())
10409     {
10410       dfloat32_type_node = make_node (REAL_TYPE);
10411       TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10412       SET_TYPE_MODE (dfloat32_type_node, SDmode);
10413       layout_type (dfloat32_type_node);
10414 
10415       dfloat64_type_node = make_node (REAL_TYPE);
10416       TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10417       SET_TYPE_MODE (dfloat64_type_node, DDmode);
10418       layout_type (dfloat64_type_node);
10419 
10420       dfloat128_type_node = make_node (REAL_TYPE);
10421       TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10422       SET_TYPE_MODE (dfloat128_type_node, TDmode);
10423       layout_type (dfloat128_type_node);
10424     }
10425 
10426   complex_integer_type_node = build_complex_type (integer_type_node, true);
10427   complex_float_type_node = build_complex_type (float_type_node, true);
10428   complex_double_type_node = build_complex_type (double_type_node, true);
10429   complex_long_double_type_node = build_complex_type (long_double_type_node,
10430 						      true);
10431 
10432   for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10433     {
10434       if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10435 	COMPLEX_FLOATN_NX_TYPE_NODE (i)
10436 	  = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10437     }
10438 
10439 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned.  */
10440 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10441   sat_ ## KIND ## _type_node = \
10442     make_sat_signed_ ## KIND ## _type (SIZE); \
10443   sat_unsigned_ ## KIND ## _type_node = \
10444     make_sat_unsigned_ ## KIND ## _type (SIZE); \
10445   KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10446   unsigned_ ## KIND ## _type_node = \
10447     make_unsigned_ ## KIND ## _type (SIZE);
10448 
10449 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10450   sat_ ## WIDTH ## KIND ## _type_node = \
10451     make_sat_signed_ ## KIND ## _type (SIZE); \
10452   sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10453     make_sat_unsigned_ ## KIND ## _type (SIZE); \
10454   WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10455   unsigned_ ## WIDTH ## KIND ## _type_node = \
10456     make_unsigned_ ## KIND ## _type (SIZE);
10457 
10458 /* Make fixed-point type nodes based on four different widths.  */
10459 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10460   MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10461   MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10462   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10463   MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10464 
10465 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned.  */
10466 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10467   NAME ## _type_node = \
10468     make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10469   u ## NAME ## _type_node = \
10470     make_or_reuse_unsigned_ ## KIND ## _type \
10471       (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10472   sat_ ## NAME ## _type_node = \
10473     make_or_reuse_sat_signed_ ## KIND ## _type \
10474       (GET_MODE_BITSIZE (MODE ## mode)); \
10475   sat_u ## NAME ## _type_node = \
10476     make_or_reuse_sat_unsigned_ ## KIND ## _type \
10477       (GET_MODE_BITSIZE (U ## MODE ## mode));
10478 
10479   /* Fixed-point type and mode nodes.  */
10480   MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10481   MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10482   MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10483   MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10484   MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10485   MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10486   MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10487   MAKE_FIXED_MODE_NODE (accum, ha, HA)
10488   MAKE_FIXED_MODE_NODE (accum, sa, SA)
10489   MAKE_FIXED_MODE_NODE (accum, da, DA)
10490   MAKE_FIXED_MODE_NODE (accum, ta, TA)
10491 
10492   {
10493     tree t = targetm.build_builtin_va_list ();
10494 
10495     /* Many back-ends define record types without setting TYPE_NAME.
10496        If we copied the record type here, we'd keep the original
10497        record type without a name.  This breaks name mangling.  So,
10498        don't copy record types and let c_common_nodes_and_builtins()
10499        declare the type to be __builtin_va_list.  */
10500     if (TREE_CODE (t) != RECORD_TYPE)
10501       t = build_variant_type_copy (t);
10502 
10503     va_list_type_node = t;
10504   }
10505 
10506   /* SCEV analyzer global shared trees.  */
10507   chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10508   TREE_TYPE (chrec_dont_know) = void_type_node;
10509   chrec_known = make_node (SCEV_KNOWN);
10510   TREE_TYPE (chrec_known) = void_type_node;
10511 }
10512 
10513 /* Modify DECL for given flags.
10514    TM_PURE attribute is set only on types, so the function will modify
10515    DECL's type when ECF_TM_PURE is used.  */
10516 
10517 void
set_call_expr_flags(tree decl,int flags)10518 set_call_expr_flags (tree decl, int flags)
10519 {
10520   if (flags & ECF_NOTHROW)
10521     TREE_NOTHROW (decl) = 1;
10522   if (flags & ECF_CONST)
10523     TREE_READONLY (decl) = 1;
10524   if (flags & ECF_PURE)
10525     DECL_PURE_P (decl) = 1;
10526   if (flags & ECF_LOOPING_CONST_OR_PURE)
10527     DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10528   if (flags & ECF_NOVOPS)
10529     DECL_IS_NOVOPS (decl) = 1;
10530   if (flags & ECF_NORETURN)
10531     TREE_THIS_VOLATILE (decl) = 1;
10532   if (flags & ECF_MALLOC)
10533     DECL_IS_MALLOC (decl) = 1;
10534   if (flags & ECF_RETURNS_TWICE)
10535     DECL_IS_RETURNS_TWICE (decl) = 1;
10536   if (flags & ECF_LEAF)
10537     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10538 					NULL, DECL_ATTRIBUTES (decl));
10539   if (flags & ECF_COLD)
10540     DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10541 					NULL, DECL_ATTRIBUTES (decl));
10542   if (flags & ECF_RET1)
10543     DECL_ATTRIBUTES (decl)
10544       = tree_cons (get_identifier ("fn spec"),
10545 		   build_tree_list (NULL_TREE, build_string (1, "1")),
10546 		   DECL_ATTRIBUTES (decl));
10547   if ((flags & ECF_TM_PURE) && flag_tm)
10548     apply_tm_attr (decl, get_identifier ("transaction_pure"));
10549   /* Looping const or pure is implied by noreturn.
10550      There is currently no way to declare looping const or looping pure alone.  */
10551   gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10552 	      || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10553 }
10554 
10555 
10556 /* A subroutine of build_common_builtin_nodes.  Define a builtin function.  */
10557 
10558 static void
local_define_builtin(const char * name,tree type,enum built_in_function code,const char * library_name,int ecf_flags)10559 local_define_builtin (const char *name, tree type, enum built_in_function code,
10560                       const char *library_name, int ecf_flags)
10561 {
10562   tree decl;
10563 
10564   decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10565 			       library_name, NULL_TREE);
10566   set_call_expr_flags (decl, ecf_flags);
10567 
10568   set_builtin_decl (code, decl, true);
10569 }
10570 
10571 /* Call this function after instantiating all builtins that the language
10572    front end cares about.  This will build the rest of the builtins
10573    and internal functions that are relied upon by the tree optimizers and
10574    the middle-end.  */
10575 
10576 void
build_common_builtin_nodes(void)10577 build_common_builtin_nodes (void)
10578 {
10579   tree tmp, ftype;
10580   int ecf_flags;
10581 
10582   if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10583       || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10584     {
10585       ftype = build_function_type (void_type_node, void_list_node);
10586       if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10587 	local_define_builtin ("__builtin_unreachable", ftype,
10588 			      BUILT_IN_UNREACHABLE,
10589 			      "__builtin_unreachable",
10590 			      ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10591 			      | ECF_CONST | ECF_COLD);
10592       if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10593 	local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10594 			      "abort",
10595 			      ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10596     }
10597 
10598   if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10599       || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10600     {
10601       ftype = build_function_type_list (ptr_type_node,
10602 					ptr_type_node, const_ptr_type_node,
10603 					size_type_node, NULL_TREE);
10604 
10605       if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10606 	local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10607 			      "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10608       if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10609 	local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10610 			      "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10611     }
10612 
10613   if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10614     {
10615       ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10616 					const_ptr_type_node, size_type_node,
10617 					NULL_TREE);
10618       local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10619 			    "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10620     }
10621 
10622   if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10623     {
10624       ftype = build_function_type_list (ptr_type_node,
10625 					ptr_type_node, integer_type_node,
10626 					size_type_node, NULL_TREE);
10627       local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10628 			    "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10629     }
10630 
10631   /* If we're checking the stack, `alloca' can throw.  */
10632   const int alloca_flags
10633     = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10634 
10635   if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10636     {
10637       ftype = build_function_type_list (ptr_type_node,
10638 					size_type_node, NULL_TREE);
10639       local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10640 			    "alloca", alloca_flags);
10641     }
10642 
10643   ftype = build_function_type_list (ptr_type_node, size_type_node,
10644 				    size_type_node, NULL_TREE);
10645   local_define_builtin ("__builtin_alloca_with_align", ftype,
10646 			BUILT_IN_ALLOCA_WITH_ALIGN,
10647 			"__builtin_alloca_with_align",
10648 			alloca_flags);
10649 
10650   ftype = build_function_type_list (ptr_type_node, size_type_node,
10651 				    size_type_node, size_type_node, NULL_TREE);
10652   local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10653 			BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10654 			"__builtin_alloca_with_align_and_max",
10655 			alloca_flags);
10656 
10657   ftype = build_function_type_list (void_type_node,
10658 				    ptr_type_node, ptr_type_node,
10659 				    ptr_type_node, NULL_TREE);
10660   local_define_builtin ("__builtin_init_trampoline", ftype,
10661 			BUILT_IN_INIT_TRAMPOLINE,
10662 			"__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10663   local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10664 			BUILT_IN_INIT_HEAP_TRAMPOLINE,
10665 			"__builtin_init_heap_trampoline",
10666 			ECF_NOTHROW | ECF_LEAF);
10667   local_define_builtin ("__builtin_init_descriptor", ftype,
10668 			BUILT_IN_INIT_DESCRIPTOR,
10669 			"__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10670 
10671   ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10672   local_define_builtin ("__builtin_adjust_trampoline", ftype,
10673 			BUILT_IN_ADJUST_TRAMPOLINE,
10674 			"__builtin_adjust_trampoline",
10675 			ECF_CONST | ECF_NOTHROW);
10676   local_define_builtin ("__builtin_adjust_descriptor", ftype,
10677 			BUILT_IN_ADJUST_DESCRIPTOR,
10678 			"__builtin_adjust_descriptor",
10679 			ECF_CONST | ECF_NOTHROW);
10680 
10681   ftype = build_function_type_list (void_type_node,
10682 				    ptr_type_node, ptr_type_node, NULL_TREE);
10683   local_define_builtin ("__builtin_nonlocal_goto", ftype,
10684 			BUILT_IN_NONLOCAL_GOTO,
10685 			"__builtin_nonlocal_goto",
10686 			ECF_NORETURN | ECF_NOTHROW);
10687 
10688   ftype = build_function_type_list (void_type_node,
10689 				    ptr_type_node, ptr_type_node, NULL_TREE);
10690   local_define_builtin ("__builtin_setjmp_setup", ftype,
10691 			BUILT_IN_SETJMP_SETUP,
10692 			"__builtin_setjmp_setup", ECF_NOTHROW);
10693 
10694   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10695   local_define_builtin ("__builtin_setjmp_receiver", ftype,
10696 			BUILT_IN_SETJMP_RECEIVER,
10697 			"__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10698 
10699   ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10700   local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10701 			"__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10702 
10703   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10704   local_define_builtin ("__builtin_stack_restore", ftype,
10705 			BUILT_IN_STACK_RESTORE,
10706 			"__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10707 
10708   ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10709 				    const_ptr_type_node, size_type_node,
10710 				    NULL_TREE);
10711   local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10712 			"__builtin_memcmp_eq",
10713 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10714 
10715   local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10716 			"__builtin_strncmp_eq",
10717 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10718 
10719   local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10720 			"__builtin_strcmp_eq",
10721 			ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10722 
10723   /* If there's a possibility that we might use the ARM EABI, build the
10724     alternate __cxa_end_cleanup node used to resume from C++.  */
10725   if (targetm.arm_eabi_unwinder)
10726     {
10727       ftype = build_function_type_list (void_type_node, NULL_TREE);
10728       local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10729 			    BUILT_IN_CXA_END_CLEANUP,
10730 			    "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10731     }
10732 
10733   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10734   local_define_builtin ("__builtin_unwind_resume", ftype,
10735 			BUILT_IN_UNWIND_RESUME,
10736 			((targetm_common.except_unwind_info (&global_options)
10737 			  == UI_SJLJ)
10738 			 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10739 			ECF_NORETURN);
10740 
10741   if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10742     {
10743       ftype = build_function_type_list (ptr_type_node, integer_type_node,
10744 					NULL_TREE);
10745       local_define_builtin ("__builtin_return_address", ftype,
10746 			    BUILT_IN_RETURN_ADDRESS,
10747 			    "__builtin_return_address",
10748 			    ECF_NOTHROW);
10749     }
10750 
10751   if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10752       || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10753     {
10754       ftype = build_function_type_list (void_type_node, ptr_type_node,
10755 					ptr_type_node, NULL_TREE);
10756       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10757 	local_define_builtin ("__cyg_profile_func_enter", ftype,
10758 			      BUILT_IN_PROFILE_FUNC_ENTER,
10759 			      "__cyg_profile_func_enter", 0);
10760       if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10761 	local_define_builtin ("__cyg_profile_func_exit", ftype,
10762 			      BUILT_IN_PROFILE_FUNC_EXIT,
10763 			      "__cyg_profile_func_exit", 0);
10764     }
10765 
10766   /* The exception object and filter values from the runtime.  The argument
10767      must be zero before exception lowering, i.e. from the front end.  After
10768      exception lowering, it will be the region number for the exception
10769      landing pad.  These functions are PURE instead of CONST to prevent
10770      them from being hoisted past the exception edge that will initialize
10771      its value in the landing pad.  */
10772   ftype = build_function_type_list (ptr_type_node,
10773 				    integer_type_node, NULL_TREE);
10774   ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10775   /* Only use TM_PURE if we have TM language support.  */
10776   if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10777     ecf_flags |= ECF_TM_PURE;
10778   local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10779 			"__builtin_eh_pointer", ecf_flags);
10780 
10781   tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10782   ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10783   local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10784 			"__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10785 
10786   ftype = build_function_type_list (void_type_node,
10787 				    integer_type_node, integer_type_node,
10788 				    NULL_TREE);
10789   local_define_builtin ("__builtin_eh_copy_values", ftype,
10790 			BUILT_IN_EH_COPY_VALUES,
10791 			"__builtin_eh_copy_values", ECF_NOTHROW);
10792 
10793   /* Complex multiplication and division.  These are handled as builtins
10794      rather than optabs because emit_library_call_value doesn't support
10795      complex.  Further, we can do slightly better with folding these
10796      beasties if the real and complex parts of the arguments are separate.  */
10797   {
10798     int mode;
10799 
10800     for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10801       {
10802 	char mode_name_buf[4], *q;
10803 	const char *p;
10804 	enum built_in_function mcode, dcode;
10805 	tree type, inner_type;
10806 	const char *prefix = "__";
10807 
10808 	if (targetm.libfunc_gnu_prefix)
10809 	  prefix = "__gnu_";
10810 
10811 	type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10812 	if (type == NULL)
10813 	  continue;
10814 	inner_type = TREE_TYPE (type);
10815 
10816 	ftype = build_function_type_list (type, inner_type, inner_type,
10817 					  inner_type, inner_type, NULL_TREE);
10818 
10819         mcode = ((enum built_in_function)
10820 		 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10821         dcode = ((enum built_in_function)
10822 		 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10823 
10824         for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10825 	  *q = TOLOWER (*p);
10826 	*q = '\0';
10827 
10828 	/* For -ftrapping-math these should throw from a former
10829 	   -fnon-call-exception stmt.  */
10830 	built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10831 					NULL);
10832         local_define_builtin (built_in_names[mcode], ftype, mcode,
10833 			      built_in_names[mcode],
10834 			      ECF_CONST | ECF_LEAF);
10835 
10836 	built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10837 					NULL);
10838         local_define_builtin (built_in_names[dcode], ftype, dcode,
10839 			      built_in_names[dcode],
10840 			      ECF_CONST | ECF_LEAF);
10841       }
10842   }
10843 
10844   init_internal_fns ();
10845 }
10846 
10847 /* HACK.  GROSS.  This is absolutely disgusting.  I wish there was a
10848    better way.
10849 
10850    If we requested a pointer to a vector, build up the pointers that
10851    we stripped off while looking for the inner type.  Similarly for
10852    return values from functions.
10853 
10854    The argument TYPE is the top of the chain, and BOTTOM is the
10855    new type which we will point to.  */
10856 
10857 tree
reconstruct_complex_type(tree type,tree bottom)10858 reconstruct_complex_type (tree type, tree bottom)
10859 {
10860   tree inner, outer;
10861 
10862   if (TREE_CODE (type) == POINTER_TYPE)
10863     {
10864       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10865       outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10866 					   TYPE_REF_CAN_ALIAS_ALL (type));
10867     }
10868   else if (TREE_CODE (type) == REFERENCE_TYPE)
10869     {
10870       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10871       outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10872 					     TYPE_REF_CAN_ALIAS_ALL (type));
10873     }
10874   else if (TREE_CODE (type) == ARRAY_TYPE)
10875     {
10876       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10877       outer = build_array_type (inner, TYPE_DOMAIN (type));
10878     }
10879   else if (TREE_CODE (type) == FUNCTION_TYPE)
10880     {
10881       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10882       outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10883     }
10884   else if (TREE_CODE (type) == METHOD_TYPE)
10885     {
10886       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10887       /* The build_method_type_directly() routine prepends 'this' to argument list,
10888          so we must compensate by getting rid of it.  */
10889       outer
10890 	= build_method_type_directly
10891 	    (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10892 	     inner,
10893 	     TREE_CHAIN (TYPE_ARG_TYPES (type)));
10894     }
10895   else if (TREE_CODE (type) == OFFSET_TYPE)
10896     {
10897       inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10898       outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10899     }
10900   else
10901     return bottom;
10902 
10903   return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10904 					    TYPE_QUALS (type));
10905 }
10906 
10907 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10908    the inner type.  */
10909 tree
build_vector_type_for_mode(tree innertype,machine_mode mode)10910 build_vector_type_for_mode (tree innertype, machine_mode mode)
10911 {
10912   poly_int64 nunits;
10913   unsigned int bitsize;
10914 
10915   switch (GET_MODE_CLASS (mode))
10916     {
10917     case MODE_VECTOR_BOOL:
10918     case MODE_VECTOR_INT:
10919     case MODE_VECTOR_FLOAT:
10920     case MODE_VECTOR_FRACT:
10921     case MODE_VECTOR_UFRACT:
10922     case MODE_VECTOR_ACCUM:
10923     case MODE_VECTOR_UACCUM:
10924       nunits = GET_MODE_NUNITS (mode);
10925       break;
10926 
10927     case MODE_INT:
10928       /* Check that there are no leftover bits.  */
10929       bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10930       gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10931       nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10932       break;
10933 
10934     default:
10935       gcc_unreachable ();
10936     }
10937 
10938   return make_vector_type (innertype, nunits, mode);
10939 }
10940 
10941 /* Similarly, but takes the inner type and number of units, which must be
10942    a power of two.  */
10943 
10944 tree
build_vector_type(tree innertype,poly_int64 nunits)10945 build_vector_type (tree innertype, poly_int64 nunits)
10946 {
10947   return make_vector_type (innertype, nunits, VOIDmode);
10948 }
10949 
10950 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE.  */
10951 
10952 tree
build_truth_vector_type_for_mode(poly_uint64 nunits,machine_mode mask_mode)10953 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10954 {
10955   gcc_assert (mask_mode != BLKmode);
10956 
10957   poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10958   unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10959   tree bool_type = build_nonstandard_boolean_type (esize);
10960 
10961   return make_vector_type (bool_type, nunits, mask_mode);
10962 }
10963 
10964 /* Build a vector type that holds one boolean result for each element of
10965    vector type VECTYPE.  The public interface for this operation is
10966    truth_type_for.  */
10967 
10968 static tree
build_truth_vector_type_for(tree vectype)10969 build_truth_vector_type_for (tree vectype)
10970 {
10971   machine_mode vector_mode = TYPE_MODE (vectype);
10972   poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10973 
10974   machine_mode mask_mode;
10975   if (VECTOR_MODE_P (vector_mode)
10976       && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10977     return build_truth_vector_type_for_mode (nunits, mask_mode);
10978 
10979   poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10980   unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10981   tree bool_type = build_nonstandard_boolean_type (esize);
10982 
10983   return make_vector_type (bool_type, nunits, BLKmode);
10984 }
10985 
10986 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10987    set.  */
10988 
10989 tree
build_opaque_vector_type(tree innertype,poly_int64 nunits)10990 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10991 {
10992   tree t = make_vector_type (innertype, nunits, VOIDmode);
10993   tree cand;
10994   /* We always build the non-opaque variant before the opaque one,
10995      so if it already exists, it is TYPE_NEXT_VARIANT of this one.  */
10996   cand = TYPE_NEXT_VARIANT (t);
10997   if (cand
10998       && TYPE_VECTOR_OPAQUE (cand)
10999       && check_qualified_type (cand, t, TYPE_QUALS (t)))
11000     return cand;
11001   /* Othewise build a variant type and make sure to queue it after
11002      the non-opaque type.  */
11003   cand = build_distinct_type_copy (t);
11004   TYPE_VECTOR_OPAQUE (cand) = true;
11005   TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
11006   TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
11007   TYPE_NEXT_VARIANT (t) = cand;
11008   TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
11009   return cand;
11010 }
11011 
11012 /* Return the value of element I of VECTOR_CST T as a wide_int.  */
11013 
11014 static poly_wide_int
vector_cst_int_elt(const_tree t,unsigned int i)11015 vector_cst_int_elt (const_tree t, unsigned int i)
11016 {
11017   /* First handle elements that are directly encoded.  */
11018   unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11019   if (i < encoded_nelts)
11020     return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
11021 
11022   /* Identify the pattern that contains element I and work out the index of
11023      the last encoded element for that pattern.  */
11024   unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11025   unsigned int pattern = i % npatterns;
11026   unsigned int count = i / npatterns;
11027   unsigned int final_i = encoded_nelts - npatterns + pattern;
11028 
11029   /* If there are no steps, the final encoded value is the right one.  */
11030   if (!VECTOR_CST_STEPPED_P (t))
11031     return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
11032 
11033   /* Otherwise work out the value from the last two encoded elements.  */
11034   tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
11035   tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
11036   poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
11037   return wi::to_poly_wide (v2) + (count - 2) * diff;
11038 }
11039 
11040 /* Return the value of element I of VECTOR_CST T.  */
11041 
11042 tree
vector_cst_elt(const_tree t,unsigned int i)11043 vector_cst_elt (const_tree t, unsigned int i)
11044 {
11045   /* First handle elements that are directly encoded.  */
11046   unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11047   if (i < encoded_nelts)
11048     return VECTOR_CST_ENCODED_ELT (t, i);
11049 
11050   /* If there are no steps, the final encoded value is the right one.  */
11051   if (!VECTOR_CST_STEPPED_P (t))
11052     {
11053       /* Identify the pattern that contains element I and work out the index of
11054 	 the last encoded element for that pattern.  */
11055       unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11056       unsigned int pattern = i % npatterns;
11057       unsigned int final_i = encoded_nelts - npatterns + pattern;
11058       return VECTOR_CST_ENCODED_ELT (t, final_i);
11059     }
11060 
11061   /* Otherwise work out the value from the last two encoded elements.  */
11062   return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
11063 			   vector_cst_int_elt (t, i));
11064 }
11065 
11066 /* Given an initializer INIT, return TRUE if INIT is zero or some
11067    aggregate of zeros.  Otherwise return FALSE.  If NONZERO is not
11068    null, set *NONZERO if and only if INIT is known not to be all
11069    zeros.  The combination of return value of false and *NONZERO
11070    false implies that INIT may but need not be all zeros.  Other
11071    combinations indicate definitive answers.  */
11072 
11073 bool
initializer_zerop(const_tree init,bool * nonzero)11074 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
11075 {
11076   bool dummy;
11077   if (!nonzero)
11078     nonzero = &dummy;
11079 
11080   /* Conservatively clear NONZERO and set it only if INIT is definitely
11081      not all zero.  */
11082   *nonzero = false;
11083 
11084   STRIP_NOPS (init);
11085 
11086   unsigned HOST_WIDE_INT off = 0;
11087 
11088   switch (TREE_CODE (init))
11089     {
11090     case INTEGER_CST:
11091       if (integer_zerop (init))
11092 	return true;
11093 
11094       *nonzero = true;
11095       return false;
11096 
11097     case REAL_CST:
11098       /* ??? Note that this is not correct for C4X float formats.  There,
11099 	 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11100 	 negative exponent.  */
11101       if (real_zerop (init)
11102 	  && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
11103 	return true;
11104 
11105       *nonzero = true;
11106       return false;
11107 
11108     case FIXED_CST:
11109       if (fixed_zerop (init))
11110 	return true;
11111 
11112       *nonzero = true;
11113       return false;
11114 
11115     case COMPLEX_CST:
11116       if (integer_zerop (init)
11117 	  || (real_zerop (init)
11118 	      && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11119 	      && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11120 	return true;
11121 
11122       *nonzero = true;
11123       return false;
11124 
11125     case VECTOR_CST:
11126       if (VECTOR_CST_NPATTERNS (init) == 1
11127 	  && VECTOR_CST_DUPLICATE_P (init)
11128 	  && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11129 	return true;
11130 
11131       *nonzero = true;
11132       return false;
11133 
11134     case CONSTRUCTOR:
11135       {
11136 	if (TREE_CLOBBER_P (init))
11137 	  return false;
11138 
11139 	unsigned HOST_WIDE_INT idx;
11140 	tree elt;
11141 
11142 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11143 	  if (!initializer_zerop (elt, nonzero))
11144 	    return false;
11145 
11146 	return true;
11147       }
11148 
11149     case MEM_REF:
11150       {
11151 	tree arg = TREE_OPERAND (init, 0);
11152 	if (TREE_CODE (arg) != ADDR_EXPR)
11153 	  return false;
11154 	tree offset = TREE_OPERAND (init, 1);
11155 	if (TREE_CODE (offset) != INTEGER_CST
11156 	    || !tree_fits_uhwi_p (offset))
11157 	  return false;
11158 	off = tree_to_uhwi (offset);
11159 	if (INT_MAX < off)
11160 	  return false;
11161 	arg = TREE_OPERAND (arg, 0);
11162 	if (TREE_CODE (arg) != STRING_CST)
11163 	  return false;
11164 	init = arg;
11165       }
11166       /* Fall through.  */
11167 
11168     case STRING_CST:
11169       {
11170 	gcc_assert (off <= INT_MAX);
11171 
11172 	int i = off;
11173 	int n = TREE_STRING_LENGTH (init);
11174 	if (n <= i)
11175 	  return false;
11176 
11177 	/* We need to loop through all elements to handle cases like
11178 	   "\0" and "\0foobar".  */
11179 	for (i = 0; i < n; ++i)
11180 	  if (TREE_STRING_POINTER (init)[i] != '\0')
11181 	    {
11182 	      *nonzero = true;
11183 	      return false;
11184 	    }
11185 
11186 	return true;
11187       }
11188 
11189     default:
11190       return false;
11191     }
11192 }
11193 
11194 /* Return true if EXPR is an initializer expression in which every element
11195    is a constant that is numerically equal to 0 or 1.  The elements do not
11196    need to be equal to each other.  */
11197 
11198 bool
initializer_each_zero_or_onep(const_tree expr)11199 initializer_each_zero_or_onep (const_tree expr)
11200 {
11201   STRIP_ANY_LOCATION_WRAPPER (expr);
11202 
11203   switch (TREE_CODE (expr))
11204     {
11205     case INTEGER_CST:
11206       return integer_zerop (expr) || integer_onep (expr);
11207 
11208     case REAL_CST:
11209       return real_zerop (expr) || real_onep (expr);
11210 
11211     case VECTOR_CST:
11212       {
11213 	unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11214 	if (VECTOR_CST_STEPPED_P (expr)
11215 	    && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11216 	  return false;
11217 
11218 	for (unsigned int i = 0; i < nelts; ++i)
11219 	  {
11220 	    tree elt = vector_cst_elt (expr, i);
11221 	    if (!initializer_each_zero_or_onep (elt))
11222 	      return false;
11223 	  }
11224 
11225 	return true;
11226       }
11227 
11228     default:
11229       return false;
11230     }
11231 }
11232 
11233 /* Check if vector VEC consists of all the equal elements and
11234    that the number of elements corresponds to the type of VEC.
11235    The function returns first element of the vector
11236    or NULL_TREE if the vector is not uniform.  */
11237 tree
uniform_vector_p(const_tree vec)11238 uniform_vector_p (const_tree vec)
11239 {
11240   tree first, t;
11241   unsigned HOST_WIDE_INT i, nelts;
11242 
11243   if (vec == NULL_TREE)
11244     return NULL_TREE;
11245 
11246   gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11247 
11248   if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11249     return TREE_OPERAND (vec, 0);
11250 
11251   else if (TREE_CODE (vec) == VECTOR_CST)
11252     {
11253       if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11254 	return VECTOR_CST_ENCODED_ELT (vec, 0);
11255       return NULL_TREE;
11256     }
11257 
11258   else if (TREE_CODE (vec) == CONSTRUCTOR
11259 	   && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11260     {
11261       first = error_mark_node;
11262 
11263       FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11264         {
11265           if (i == 0)
11266             {
11267               first = t;
11268               continue;
11269             }
11270 	  if (!operand_equal_p (first, t, 0))
11271 	    return NULL_TREE;
11272         }
11273       if (i != nelts)
11274 	return NULL_TREE;
11275 
11276       return first;
11277     }
11278 
11279   return NULL_TREE;
11280 }
11281 
11282 /* If the argument is INTEGER_CST, return it.  If the argument is vector
11283    with all elements the same INTEGER_CST, return that INTEGER_CST.  Otherwise
11284    return NULL_TREE.
11285    Look through location wrappers. */
11286 
11287 tree
uniform_integer_cst_p(tree t)11288 uniform_integer_cst_p (tree t)
11289 {
11290   STRIP_ANY_LOCATION_WRAPPER (t);
11291 
11292   if (TREE_CODE (t) == INTEGER_CST)
11293     return t;
11294 
11295   if (VECTOR_TYPE_P (TREE_TYPE (t)))
11296     {
11297       t = uniform_vector_p (t);
11298       if (t && TREE_CODE (t) == INTEGER_CST)
11299 	return t;
11300     }
11301 
11302   return NULL_TREE;
11303 }
11304 
11305 /* If VECTOR_CST T has a single nonzero element, return the index of that
11306    element, otherwise return -1.  */
11307 
11308 int
single_nonzero_element(const_tree t)11309 single_nonzero_element (const_tree t)
11310 {
11311   unsigned HOST_WIDE_INT nelts;
11312   unsigned int repeat_nelts;
11313   if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11314     repeat_nelts = nelts;
11315   else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11316     {
11317       nelts = vector_cst_encoded_nelts (t);
11318       repeat_nelts = VECTOR_CST_NPATTERNS (t);
11319     }
11320   else
11321     return -1;
11322 
11323   int res = -1;
11324   for (unsigned int i = 0; i < nelts; ++i)
11325     {
11326       tree elt = vector_cst_elt (t, i);
11327       if (!integer_zerop (elt) && !real_zerop (elt))
11328 	{
11329 	  if (res >= 0 || i >= repeat_nelts)
11330 	    return -1;
11331 	  res = i;
11332 	}
11333     }
11334   return res;
11335 }
11336 
11337 /* Build an empty statement at location LOC.  */
11338 
11339 tree
build_empty_stmt(location_t loc)11340 build_empty_stmt (location_t loc)
11341 {
11342   tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11343   SET_EXPR_LOCATION (t, loc);
11344   return t;
11345 }
11346 
11347 
11348 /* Build an OpenMP clause with code CODE.  LOC is the location of the
11349    clause.  */
11350 
11351 tree
build_omp_clause(location_t loc,enum omp_clause_code code)11352 build_omp_clause (location_t loc, enum omp_clause_code code)
11353 {
11354   tree t;
11355   int size, length;
11356 
11357   length = omp_clause_num_ops[code];
11358   size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11359 
11360   record_node_allocation_statistics (OMP_CLAUSE, size);
11361 
11362   t = (tree) ggc_internal_alloc (size);
11363   memset (t, 0, size);
11364   TREE_SET_CODE (t, OMP_CLAUSE);
11365   OMP_CLAUSE_SET_CODE (t, code);
11366   OMP_CLAUSE_LOCATION (t) = loc;
11367 
11368   return t;
11369 }
11370 
11371 /* Build a tcc_vl_exp object with code CODE and room for LEN operands.  LEN
11372    includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11373    Except for the CODE and operand count field, other storage for the
11374    object is initialized to zeros.  */
11375 
11376 tree
build_vl_exp(enum tree_code code,int len MEM_STAT_DECL)11377 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11378 {
11379   tree t;
11380   int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11381 
11382   gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11383   gcc_assert (len >= 1);
11384 
11385   record_node_allocation_statistics (code, length);
11386 
11387   t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11388 
11389   TREE_SET_CODE (t, code);
11390 
11391   /* Can't use TREE_OPERAND to store the length because if checking is
11392      enabled, it will try to check the length before we store it.  :-P  */
11393   t->exp.operands[0] = build_int_cst (sizetype, len);
11394 
11395   return t;
11396 }
11397 
11398 /* Helper function for build_call_* functions; build a CALL_EXPR with
11399    indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11400    the argument slots.  */
11401 
11402 static tree
build_call_1(tree return_type,tree fn,int nargs)11403 build_call_1 (tree return_type, tree fn, int nargs)
11404 {
11405   tree t;
11406 
11407   t = build_vl_exp (CALL_EXPR, nargs + 3);
11408   TREE_TYPE (t) = return_type;
11409   CALL_EXPR_FN (t) = fn;
11410   CALL_EXPR_STATIC_CHAIN (t) = NULL;
11411 
11412   return t;
11413 }
11414 
11415 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11416    FN and a null static chain slot.  NARGS is the number of call arguments
11417    which are specified as "..." arguments.  */
11418 
11419 tree
build_call_nary(tree return_type,tree fn,int nargs,...)11420 build_call_nary (tree return_type, tree fn, int nargs, ...)
11421 {
11422   tree ret;
11423   va_list args;
11424   va_start (args, nargs);
11425   ret = build_call_valist (return_type, fn, nargs, args);
11426   va_end (args);
11427   return ret;
11428 }
11429 
11430 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11431    FN and a null static chain slot.  NARGS is the number of call arguments
11432    which are specified as a va_list ARGS.  */
11433 
11434 tree
build_call_valist(tree return_type,tree fn,int nargs,va_list args)11435 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11436 {
11437   tree t;
11438   int i;
11439 
11440   t = build_call_1 (return_type, fn, nargs);
11441   for (i = 0; i < nargs; i++)
11442     CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11443   process_call_operands (t);
11444   return t;
11445 }
11446 
11447 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11448    FN and a null static chain slot.  NARGS is the number of call arguments
11449    which are specified as a tree array ARGS.  */
11450 
11451 tree
build_call_array_loc(location_t loc,tree return_type,tree fn,int nargs,const tree * args)11452 build_call_array_loc (location_t loc, tree return_type, tree fn,
11453 		      int nargs, const tree *args)
11454 {
11455   tree t;
11456   int i;
11457 
11458   t = build_call_1 (return_type, fn, nargs);
11459   for (i = 0; i < nargs; i++)
11460     CALL_EXPR_ARG (t, i) = args[i];
11461   process_call_operands (t);
11462   SET_EXPR_LOCATION (t, loc);
11463   return t;
11464 }
11465 
11466 /* Like build_call_array, but takes a vec.  */
11467 
11468 tree
build_call_vec(tree return_type,tree fn,vec<tree,va_gc> * args)11469 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11470 {
11471   tree ret, t;
11472   unsigned int ix;
11473 
11474   ret = build_call_1 (return_type, fn, vec_safe_length (args));
11475   FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11476     CALL_EXPR_ARG (ret, ix) = t;
11477   process_call_operands (ret);
11478   return ret;
11479 }
11480 
11481 /* Conveniently construct a function call expression.  FNDECL names the
11482    function to be called and N arguments are passed in the array
11483    ARGARRAY.  */
11484 
11485 tree
build_call_expr_loc_array(location_t loc,tree fndecl,int n,tree * argarray)11486 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11487 {
11488   tree fntype = TREE_TYPE (fndecl);
11489   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11490 
11491   return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11492 }
11493 
11494 /* Conveniently construct a function call expression.  FNDECL names the
11495    function to be called and the arguments are passed in the vector
11496    VEC.  */
11497 
11498 tree
build_call_expr_loc_vec(location_t loc,tree fndecl,vec<tree,va_gc> * vec)11499 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11500 {
11501   return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11502 				    vec_safe_address (vec));
11503 }
11504 
11505 
11506 /* Conveniently construct a function call expression.  FNDECL names the
11507    function to be called, N is the number of arguments, and the "..."
11508    parameters are the argument expressions.  */
11509 
11510 tree
build_call_expr_loc(location_t loc,tree fndecl,int n,...)11511 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11512 {
11513   va_list ap;
11514   tree *argarray = XALLOCAVEC (tree, n);
11515   int i;
11516 
11517   va_start (ap, n);
11518   for (i = 0; i < n; i++)
11519     argarray[i] = va_arg (ap, tree);
11520   va_end (ap);
11521   return build_call_expr_loc_array (loc, fndecl, n, argarray);
11522 }
11523 
11524 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...).  Duplicated because
11525    varargs macros aren't supported by all bootstrap compilers.  */
11526 
11527 tree
build_call_expr(tree fndecl,int n,...)11528 build_call_expr (tree fndecl, int n, ...)
11529 {
11530   va_list ap;
11531   tree *argarray = XALLOCAVEC (tree, n);
11532   int i;
11533 
11534   va_start (ap, n);
11535   for (i = 0; i < n; i++)
11536     argarray[i] = va_arg (ap, tree);
11537   va_end (ap);
11538   return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11539 }
11540 
11541 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11542    type TYPE.  This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11543    It will get gimplified later into an ordinary internal function.  */
11544 
11545 tree
build_call_expr_internal_loc_array(location_t loc,internal_fn ifn,tree type,int n,const tree * args)11546 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11547 				    tree type, int n, const tree *args)
11548 {
11549   tree t = build_call_1 (type, NULL_TREE, n);
11550   for (int i = 0; i < n; ++i)
11551     CALL_EXPR_ARG (t, i) = args[i];
11552   SET_EXPR_LOCATION (t, loc);
11553   CALL_EXPR_IFN (t) = ifn;
11554   process_call_operands (t);
11555   return t;
11556 }
11557 
11558 /* Build internal call expression.  This is just like CALL_EXPR, except
11559    its CALL_EXPR_FN is NULL.  It will get gimplified later into ordinary
11560    internal function.  */
11561 
11562 tree
build_call_expr_internal_loc(location_t loc,enum internal_fn ifn,tree type,int n,...)11563 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11564 			      tree type, int n, ...)
11565 {
11566   va_list ap;
11567   tree *argarray = XALLOCAVEC (tree, n);
11568   int i;
11569 
11570   va_start (ap, n);
11571   for (i = 0; i < n; i++)
11572     argarray[i] = va_arg (ap, tree);
11573   va_end (ap);
11574   return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11575 }
11576 
11577 /* Return a function call to FN, if the target is guaranteed to support it,
11578    or null otherwise.
11579 
11580    N is the number of arguments, passed in the "...", and TYPE is the
11581    type of the return value.  */
11582 
11583 tree
maybe_build_call_expr_loc(location_t loc,combined_fn fn,tree type,int n,...)11584 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11585 			   int n, ...)
11586 {
11587   va_list ap;
11588   tree *argarray = XALLOCAVEC (tree, n);
11589   int i;
11590 
11591   va_start (ap, n);
11592   for (i = 0; i < n; i++)
11593     argarray[i] = va_arg (ap, tree);
11594   va_end (ap);
11595   if (internal_fn_p (fn))
11596     {
11597       internal_fn ifn = as_internal_fn (fn);
11598       if (direct_internal_fn_p (ifn))
11599 	{
11600 	  tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11601 	  if (!direct_internal_fn_supported_p (ifn, types,
11602 					       OPTIMIZE_FOR_BOTH))
11603 	    return NULL_TREE;
11604 	}
11605       return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11606     }
11607   else
11608     {
11609       tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11610       if (!fndecl)
11611 	return NULL_TREE;
11612       return build_call_expr_loc_array (loc, fndecl, n, argarray);
11613     }
11614 }
11615 
11616 /* Return a function call to the appropriate builtin alloca variant.
11617 
11618    SIZE is the size to be allocated.  ALIGN, if non-zero, is the requested
11619    alignment of the allocated area.  MAX_SIZE, if non-negative, is an upper
11620    bound for SIZE in case it is not a fixed value.  */
11621 
11622 tree
build_alloca_call_expr(tree size,unsigned int align,HOST_WIDE_INT max_size)11623 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11624 {
11625   if (max_size >= 0)
11626     {
11627       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11628       return
11629 	build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11630     }
11631   else if (align > 0)
11632     {
11633       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11634       return build_call_expr (t, 2, size, size_int (align));
11635     }
11636   else
11637     {
11638       tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11639       return build_call_expr (t, 1, size);
11640     }
11641 }
11642 
11643 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11644    if SIZE == -1) and return a tree node representing char* pointer to
11645    it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)).  When STR is nonnull
11646    the STRING_CST value is the LEN bytes at STR (the representation
11647    of the string, which may be wide).  Otherwise it's all zeros.  */
11648 
11649 tree
build_string_literal(unsigned len,const char * str,tree eltype,unsigned HOST_WIDE_INT size)11650 build_string_literal (unsigned len, const char *str /* = NULL */,
11651 		      tree eltype /* = char_type_node */,
11652 		      unsigned HOST_WIDE_INT size /* = -1 */)
11653 {
11654   tree t = build_string (len, str);
11655   /* Set the maximum valid index based on the string length or SIZE.  */
11656   unsigned HOST_WIDE_INT maxidx
11657     = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11658 
11659   tree index = build_index_type (size_int (maxidx));
11660   eltype = build_type_variant (eltype, 1, 0);
11661   tree type = build_array_type (eltype, index);
11662   TREE_TYPE (t) = type;
11663   TREE_CONSTANT (t) = 1;
11664   TREE_READONLY (t) = 1;
11665   TREE_STATIC (t) = 1;
11666 
11667   type = build_pointer_type (eltype);
11668   t = build1 (ADDR_EXPR, type,
11669 	      build4 (ARRAY_REF, eltype,
11670 		      t, integer_zero_node, NULL_TREE, NULL_TREE));
11671   return t;
11672 }
11673 
11674 
11675 
11676 /* Return true if T (assumed to be a DECL) must be assigned a memory
11677    location.  */
11678 
11679 bool
needs_to_live_in_memory(const_tree t)11680 needs_to_live_in_memory (const_tree t)
11681 {
11682   return (TREE_ADDRESSABLE (t)
11683 	  || is_global_var (t)
11684 	  || (TREE_CODE (t) == RESULT_DECL
11685 	      && !DECL_BY_REFERENCE (t)
11686 	      && aggregate_value_p (t, current_function_decl)));
11687 }
11688 
11689 /* Return value of a constant X and sign-extend it.  */
11690 
11691 HOST_WIDE_INT
int_cst_value(const_tree x)11692 int_cst_value (const_tree x)
11693 {
11694   unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11695   unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11696 
11697   /* Make sure the sign-extended value will fit in a HOST_WIDE_INT.  */
11698   gcc_assert (cst_and_fits_in_hwi (x));
11699 
11700   if (bits < HOST_BITS_PER_WIDE_INT)
11701     {
11702       bool negative = ((val >> (bits - 1)) & 1) != 0;
11703       if (negative)
11704 	val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11705       else
11706 	val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11707     }
11708 
11709   return val;
11710 }
11711 
11712 /* If TYPE is an integral or pointer type, return an integer type with
11713    the same precision which is unsigned iff UNSIGNEDP is true, or itself
11714    if TYPE is already an integer type of signedness UNSIGNEDP.
11715    If TYPE is a floating-point type, return an integer type with the same
11716    bitsize and with the signedness given by UNSIGNEDP; this is useful
11717    when doing bit-level operations on a floating-point value.  */
11718 
11719 tree
signed_or_unsigned_type_for(int unsignedp,tree type)11720 signed_or_unsigned_type_for (int unsignedp, tree type)
11721 {
11722   if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11723     return type;
11724 
11725   if (TREE_CODE (type) == VECTOR_TYPE)
11726     {
11727       tree inner = TREE_TYPE (type);
11728       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11729       if (!inner2)
11730 	return NULL_TREE;
11731       if (inner == inner2)
11732 	return type;
11733       return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11734     }
11735 
11736   if (TREE_CODE (type) == COMPLEX_TYPE)
11737     {
11738       tree inner = TREE_TYPE (type);
11739       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11740       if (!inner2)
11741 	return NULL_TREE;
11742       if (inner == inner2)
11743 	return type;
11744       return build_complex_type (inner2);
11745     }
11746 
11747   unsigned int bits;
11748   if (INTEGRAL_TYPE_P (type)
11749       || POINTER_TYPE_P (type)
11750       || TREE_CODE (type) == OFFSET_TYPE)
11751     bits = TYPE_PRECISION (type);
11752   else if (TREE_CODE (type) == REAL_TYPE)
11753     bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11754   else
11755     return NULL_TREE;
11756 
11757   return build_nonstandard_integer_type (bits, unsignedp);
11758 }
11759 
11760 /* If TYPE is an integral or pointer type, return an integer type with
11761    the same precision which is unsigned, or itself if TYPE is already an
11762    unsigned integer type.  If TYPE is a floating-point type, return an
11763    unsigned integer type with the same bitsize as TYPE.  */
11764 
11765 tree
unsigned_type_for(tree type)11766 unsigned_type_for (tree type)
11767 {
11768   return signed_or_unsigned_type_for (1, type);
11769 }
11770 
11771 /* If TYPE is an integral or pointer type, return an integer type with
11772    the same precision which is signed, or itself if TYPE is already a
11773    signed integer type.  If TYPE is a floating-point type, return a
11774    signed integer type with the same bitsize as TYPE.  */
11775 
11776 tree
signed_type_for(tree type)11777 signed_type_for (tree type)
11778 {
11779   return signed_or_unsigned_type_for (0, type);
11780 }
11781 
11782 /* If TYPE is a vector type, return a signed integer vector type with the
11783    same width and number of subparts. Otherwise return boolean_type_node.  */
11784 
11785 tree
truth_type_for(tree type)11786 truth_type_for (tree type)
11787 {
11788   if (TREE_CODE (type) == VECTOR_TYPE)
11789     {
11790       if (VECTOR_BOOLEAN_TYPE_P (type))
11791 	return type;
11792       return build_truth_vector_type_for (type);
11793     }
11794   else
11795     return boolean_type_node;
11796 }
11797 
11798 /* Returns the largest value obtainable by casting something in INNER type to
11799    OUTER type.  */
11800 
11801 tree
upper_bound_in_type(tree outer,tree inner)11802 upper_bound_in_type (tree outer, tree inner)
11803 {
11804   unsigned int det = 0;
11805   unsigned oprec = TYPE_PRECISION (outer);
11806   unsigned iprec = TYPE_PRECISION (inner);
11807   unsigned prec;
11808 
11809   /* Compute a unique number for every combination.  */
11810   det |= (oprec > iprec) ? 4 : 0;
11811   det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11812   det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11813 
11814   /* Determine the exponent to use.  */
11815   switch (det)
11816     {
11817     case 0:
11818     case 1:
11819       /* oprec <= iprec, outer: signed, inner: don't care.  */
11820       prec = oprec - 1;
11821       break;
11822     case 2:
11823     case 3:
11824       /* oprec <= iprec, outer: unsigned, inner: don't care.  */
11825       prec = oprec;
11826       break;
11827     case 4:
11828       /* oprec > iprec, outer: signed, inner: signed.  */
11829       prec = iprec - 1;
11830       break;
11831     case 5:
11832       /* oprec > iprec, outer: signed, inner: unsigned.  */
11833       prec = iprec;
11834       break;
11835     case 6:
11836       /* oprec > iprec, outer: unsigned, inner: signed.  */
11837       prec = oprec;
11838       break;
11839     case 7:
11840       /* oprec > iprec, outer: unsigned, inner: unsigned.  */
11841       prec = iprec;
11842       break;
11843     default:
11844       gcc_unreachable ();
11845     }
11846 
11847   return wide_int_to_tree (outer,
11848 			   wi::mask (prec, false, TYPE_PRECISION (outer)));
11849 }
11850 
11851 /* Returns the smallest value obtainable by casting something in INNER type to
11852    OUTER type.  */
11853 
11854 tree
lower_bound_in_type(tree outer,tree inner)11855 lower_bound_in_type (tree outer, tree inner)
11856 {
11857   unsigned oprec = TYPE_PRECISION (outer);
11858   unsigned iprec = TYPE_PRECISION (inner);
11859 
11860   /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11861      and obtain 0.  */
11862   if (TYPE_UNSIGNED (outer)
11863       /* If we are widening something of an unsigned type, OUTER type
11864 	 contains all values of INNER type.  In particular, both INNER
11865 	 and OUTER types have zero in common.  */
11866       || (oprec > iprec && TYPE_UNSIGNED (inner)))
11867     return build_int_cst (outer, 0);
11868   else
11869     {
11870       /* If we are widening a signed type to another signed type, we
11871 	 want to obtain -2^^(iprec-1).  If we are keeping the
11872 	 precision or narrowing to a signed type, we want to obtain
11873 	 -2^(oprec-1).  */
11874       unsigned prec = oprec > iprec ? iprec : oprec;
11875       return wide_int_to_tree (outer,
11876 			       wi::mask (prec - 1, true,
11877 					 TYPE_PRECISION (outer)));
11878     }
11879 }
11880 
11881 /* Return nonzero if two operands that are suitable for PHI nodes are
11882    necessarily equal.  Specifically, both ARG0 and ARG1 must be either
11883    SSA_NAME or invariant.  Note that this is strictly an optimization.
11884    That is, callers of this function can directly call operand_equal_p
11885    and get the same result, only slower.  */
11886 
11887 int
operand_equal_for_phi_arg_p(const_tree arg0,const_tree arg1)11888 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11889 {
11890   if (arg0 == arg1)
11891     return 1;
11892   if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11893     return 0;
11894   return operand_equal_p (arg0, arg1, 0);
11895 }
11896 
11897 /* Returns number of zeros at the end of binary representation of X.  */
11898 
11899 tree
num_ending_zeros(const_tree x)11900 num_ending_zeros (const_tree x)
11901 {
11902   return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11903 }
11904 
11905 
11906 #define WALK_SUBTREE(NODE)				\
11907   do							\
11908     {							\
11909       result = walk_tree_1 (&(NODE), func, data, pset, lh);	\
11910       if (result)					\
11911 	return result;					\
11912     }							\
11913   while (0)
11914 
11915 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11916    be walked whenever a type is seen in the tree.  Rest of operands and return
11917    value are as for walk_tree.  */
11918 
11919 static tree
walk_type_fields(tree type,walk_tree_fn func,void * data,hash_set<tree> * pset,walk_tree_lh lh)11920 walk_type_fields (tree type, walk_tree_fn func, void *data,
11921 		  hash_set<tree> *pset, walk_tree_lh lh)
11922 {
11923   tree result = NULL_TREE;
11924 
11925   switch (TREE_CODE (type))
11926     {
11927     case POINTER_TYPE:
11928     case REFERENCE_TYPE:
11929     case VECTOR_TYPE:
11930       /* We have to worry about mutually recursive pointers.  These can't
11931 	 be written in C.  They can in Ada.  It's pathological, but
11932 	 there's an ACATS test (c38102a) that checks it.  Deal with this
11933 	 by checking if we're pointing to another pointer, that one
11934 	 points to another pointer, that one does too, and we have no htab.
11935 	 If so, get a hash table.  We check three levels deep to avoid
11936 	 the cost of the hash table if we don't need one.  */
11937       if (POINTER_TYPE_P (TREE_TYPE (type))
11938 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11939 	  && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11940 	  && !pset)
11941 	{
11942 	  result = walk_tree_without_duplicates (&TREE_TYPE (type),
11943 						 func, data);
11944 	  if (result)
11945 	    return result;
11946 
11947 	  break;
11948 	}
11949 
11950       /* fall through */
11951 
11952     case COMPLEX_TYPE:
11953       WALK_SUBTREE (TREE_TYPE (type));
11954       break;
11955 
11956     case METHOD_TYPE:
11957       WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11958 
11959       /* Fall through.  */
11960 
11961     case FUNCTION_TYPE:
11962       WALK_SUBTREE (TREE_TYPE (type));
11963       {
11964 	tree arg;
11965 
11966 	/* We never want to walk into default arguments.  */
11967 	for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11968 	  WALK_SUBTREE (TREE_VALUE (arg));
11969       }
11970       break;
11971 
11972     case ARRAY_TYPE:
11973       /* Don't follow this nodes's type if a pointer for fear that
11974 	 we'll have infinite recursion.  If we have a PSET, then we
11975 	 need not fear.  */
11976       if (pset
11977 	  || (!POINTER_TYPE_P (TREE_TYPE (type))
11978 	      && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11979 	WALK_SUBTREE (TREE_TYPE (type));
11980       WALK_SUBTREE (TYPE_DOMAIN (type));
11981       break;
11982 
11983     case OFFSET_TYPE:
11984       WALK_SUBTREE (TREE_TYPE (type));
11985       WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11986       break;
11987 
11988     default:
11989       break;
11990     }
11991 
11992   return NULL_TREE;
11993 }
11994 
11995 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.  FUNC is
11996    called with the DATA and the address of each sub-tree.  If FUNC returns a
11997    non-NULL value, the traversal is stopped, and the value returned by FUNC
11998    is returned.  If PSET is non-NULL it is used to record the nodes visited,
11999    and to avoid visiting a node more than once.  */
12000 
12001 tree
walk_tree_1(tree * tp,walk_tree_fn func,void * data,hash_set<tree> * pset,walk_tree_lh lh)12002 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
12003 	     hash_set<tree> *pset, walk_tree_lh lh)
12004 {
12005   enum tree_code code;
12006   int walk_subtrees;
12007   tree result;
12008 
12009 #define WALK_SUBTREE_TAIL(NODE)				\
12010   do							\
12011     {							\
12012        tp = & (NODE);					\
12013        goto tail_recurse;				\
12014     }							\
12015   while (0)
12016 
12017  tail_recurse:
12018   /* Skip empty subtrees.  */
12019   if (!*tp)
12020     return NULL_TREE;
12021 
12022   /* Don't walk the same tree twice, if the user has requested
12023      that we avoid doing so.  */
12024   if (pset && pset->add (*tp))
12025     return NULL_TREE;
12026 
12027   /* Call the function.  */
12028   walk_subtrees = 1;
12029   result = (*func) (tp, &walk_subtrees, data);
12030 
12031   /* If we found something, return it.  */
12032   if (result)
12033     return result;
12034 
12035   code = TREE_CODE (*tp);
12036 
12037   /* Even if we didn't, FUNC may have decided that there was nothing
12038      interesting below this point in the tree.  */
12039   if (!walk_subtrees)
12040     {
12041       /* But we still need to check our siblings.  */
12042       if (code == TREE_LIST)
12043 	WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12044       else if (code == OMP_CLAUSE)
12045 	WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12046       else
12047 	return NULL_TREE;
12048     }
12049 
12050   if (lh)
12051     {
12052       result = (*lh) (tp, &walk_subtrees, func, data, pset);
12053       if (result || !walk_subtrees)
12054         return result;
12055     }
12056 
12057   switch (code)
12058     {
12059     case ERROR_MARK:
12060     case IDENTIFIER_NODE:
12061     case INTEGER_CST:
12062     case REAL_CST:
12063     case FIXED_CST:
12064     case STRING_CST:
12065     case BLOCK:
12066     case PLACEHOLDER_EXPR:
12067     case SSA_NAME:
12068     case FIELD_DECL:
12069     case RESULT_DECL:
12070       /* None of these have subtrees other than those already walked
12071 	 above.  */
12072       break;
12073 
12074     case TREE_LIST:
12075       WALK_SUBTREE (TREE_VALUE (*tp));
12076       WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12077       break;
12078 
12079     case TREE_VEC:
12080       {
12081 	int len = TREE_VEC_LENGTH (*tp);
12082 
12083 	if (len == 0)
12084 	  break;
12085 
12086 	/* Walk all elements but the first.  */
12087 	while (--len)
12088 	  WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
12089 
12090 	/* Now walk the first one as a tail call.  */
12091 	WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
12092       }
12093 
12094     case VECTOR_CST:
12095       {
12096 	unsigned len = vector_cst_encoded_nelts (*tp);
12097 	if (len == 0)
12098 	  break;
12099 	/* Walk all elements but the first.  */
12100 	while (--len)
12101 	  WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
12102 	/* Now walk the first one as a tail call.  */
12103 	WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
12104       }
12105 
12106     case COMPLEX_CST:
12107       WALK_SUBTREE (TREE_REALPART (*tp));
12108       WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
12109 
12110     case CONSTRUCTOR:
12111       {
12112 	unsigned HOST_WIDE_INT idx;
12113 	constructor_elt *ce;
12114 
12115 	for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
12116 	     idx++)
12117 	  WALK_SUBTREE (ce->value);
12118       }
12119       break;
12120 
12121     case SAVE_EXPR:
12122       WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12123 
12124     case BIND_EXPR:
12125       {
12126 	tree decl;
12127 	for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12128 	  {
12129 	    /* Walk the DECL_INITIAL and DECL_SIZE.  We don't want to walk
12130 	       into declarations that are just mentioned, rather than
12131 	       declared; they don't really belong to this part of the tree.
12132 	       And, we can see cycles: the initializer for a declaration
12133 	       can refer to the declaration itself.  */
12134 	    WALK_SUBTREE (DECL_INITIAL (decl));
12135 	    WALK_SUBTREE (DECL_SIZE (decl));
12136 	    WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12137 	  }
12138 	WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12139       }
12140 
12141     case STATEMENT_LIST:
12142       {
12143 	tree_stmt_iterator i;
12144 	for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12145 	  WALK_SUBTREE (*tsi_stmt_ptr (i));
12146       }
12147       break;
12148 
12149     case OMP_CLAUSE:
12150       switch (OMP_CLAUSE_CODE (*tp))
12151 	{
12152 	case OMP_CLAUSE_GANG:
12153 	case OMP_CLAUSE__GRIDDIM_:
12154 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12155 	  /* FALLTHRU */
12156 
12157 	case OMP_CLAUSE_ASYNC:
12158 	case OMP_CLAUSE_WAIT:
12159 	case OMP_CLAUSE_WORKER:
12160 	case OMP_CLAUSE_VECTOR:
12161 	case OMP_CLAUSE_NUM_GANGS:
12162 	case OMP_CLAUSE_NUM_WORKERS:
12163 	case OMP_CLAUSE_VECTOR_LENGTH:
12164 	case OMP_CLAUSE_PRIVATE:
12165 	case OMP_CLAUSE_SHARED:
12166 	case OMP_CLAUSE_FIRSTPRIVATE:
12167 	case OMP_CLAUSE_COPYIN:
12168 	case OMP_CLAUSE_COPYPRIVATE:
12169 	case OMP_CLAUSE_FINAL:
12170 	case OMP_CLAUSE_IF:
12171 	case OMP_CLAUSE_NUM_THREADS:
12172 	case OMP_CLAUSE_SCHEDULE:
12173 	case OMP_CLAUSE_UNIFORM:
12174 	case OMP_CLAUSE_DEPEND:
12175 	case OMP_CLAUSE_NONTEMPORAL:
12176 	case OMP_CLAUSE_NUM_TEAMS:
12177 	case OMP_CLAUSE_THREAD_LIMIT:
12178 	case OMP_CLAUSE_DEVICE:
12179 	case OMP_CLAUSE_DIST_SCHEDULE:
12180 	case OMP_CLAUSE_SAFELEN:
12181 	case OMP_CLAUSE_SIMDLEN:
12182 	case OMP_CLAUSE_ORDERED:
12183 	case OMP_CLAUSE_PRIORITY:
12184 	case OMP_CLAUSE_GRAINSIZE:
12185 	case OMP_CLAUSE_NUM_TASKS:
12186 	case OMP_CLAUSE_HINT:
12187 	case OMP_CLAUSE_TO_DECLARE:
12188 	case OMP_CLAUSE_LINK:
12189 	case OMP_CLAUSE_USE_DEVICE_PTR:
12190 	case OMP_CLAUSE_USE_DEVICE_ADDR:
12191 	case OMP_CLAUSE_IS_DEVICE_PTR:
12192 	case OMP_CLAUSE_INCLUSIVE:
12193 	case OMP_CLAUSE_EXCLUSIVE:
12194 	case OMP_CLAUSE__LOOPTEMP_:
12195 	case OMP_CLAUSE__REDUCTEMP_:
12196 	case OMP_CLAUSE__CONDTEMP_:
12197 	case OMP_CLAUSE__SCANTEMP_:
12198 	case OMP_CLAUSE__SIMDUID_:
12199 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12200 	  /* FALLTHRU */
12201 
12202 	case OMP_CLAUSE_INDEPENDENT:
12203 	case OMP_CLAUSE_NOWAIT:
12204 	case OMP_CLAUSE_DEFAULT:
12205 	case OMP_CLAUSE_UNTIED:
12206 	case OMP_CLAUSE_MERGEABLE:
12207 	case OMP_CLAUSE_PROC_BIND:
12208 	case OMP_CLAUSE_DEVICE_TYPE:
12209 	case OMP_CLAUSE_INBRANCH:
12210 	case OMP_CLAUSE_NOTINBRANCH:
12211 	case OMP_CLAUSE_FOR:
12212 	case OMP_CLAUSE_PARALLEL:
12213 	case OMP_CLAUSE_SECTIONS:
12214 	case OMP_CLAUSE_TASKGROUP:
12215 	case OMP_CLAUSE_NOGROUP:
12216 	case OMP_CLAUSE_THREADS:
12217 	case OMP_CLAUSE_SIMD:
12218 	case OMP_CLAUSE_DEFAULTMAP:
12219 	case OMP_CLAUSE_ORDER:
12220 	case OMP_CLAUSE_BIND:
12221 	case OMP_CLAUSE_AUTO:
12222 	case OMP_CLAUSE_SEQ:
12223 	case OMP_CLAUSE__SIMT_:
12224 	case OMP_CLAUSE_IF_PRESENT:
12225 	case OMP_CLAUSE_FINALIZE:
12226 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12227 
12228 	case OMP_CLAUSE_LASTPRIVATE:
12229 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12230 	  WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12231 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12232 
12233 	case OMP_CLAUSE_COLLAPSE:
12234 	case OMP_CLAUSE_TILE:
12235 	  {
12236 	    int i;
12237 	    for (i = 0; i < 3; i++)
12238 	      WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12239 	    WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12240 	  }
12241 
12242 	case OMP_CLAUSE_LINEAR:
12243 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12244 	  WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12245 	  WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12246 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12247 
12248 	case OMP_CLAUSE_ALIGNED:
12249 	case OMP_CLAUSE_FROM:
12250 	case OMP_CLAUSE_TO:
12251 	case OMP_CLAUSE_MAP:
12252 	case OMP_CLAUSE__CACHE_:
12253 	  WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12254 	  WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12255 	  WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12256 
12257 	case OMP_CLAUSE_REDUCTION:
12258 	case OMP_CLAUSE_TASK_REDUCTION:
12259 	case OMP_CLAUSE_IN_REDUCTION:
12260 	  {
12261 	    int i;
12262 	    for (i = 0; i < 5; i++)
12263 	      WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12264 	    WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12265 	  }
12266 
12267 	default:
12268 	  gcc_unreachable ();
12269 	}
12270       break;
12271 
12272     case TARGET_EXPR:
12273       {
12274 	int i, len;
12275 
12276 	/* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12277 	   But, we only want to walk once.  */
12278 	len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12279 	for (i = 0; i < len; ++i)
12280 	  WALK_SUBTREE (TREE_OPERAND (*tp, i));
12281 	WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12282       }
12283 
12284     case DECL_EXPR:
12285       /* If this is a TYPE_DECL, walk into the fields of the type that it's
12286 	 defining.  We only want to walk into these fields of a type in this
12287 	 case and not in the general case of a mere reference to the type.
12288 
12289 	 The criterion is as follows: if the field can be an expression, it
12290 	 must be walked only here.  This should be in keeping with the fields
12291 	 that are directly gimplified in gimplify_type_sizes in order for the
12292 	 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12293 	 variable-sized types.
12294 
12295 	 Note that DECLs get walked as part of processing the BIND_EXPR.  */
12296       if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12297 	{
12298 	  tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12299 	  if (TREE_CODE (*type_p) == ERROR_MARK)
12300 	    return NULL_TREE;
12301 
12302 	  /* Call the function for the type.  See if it returns anything or
12303 	     doesn't want us to continue.  If we are to continue, walk both
12304 	     the normal fields and those for the declaration case.  */
12305 	  result = (*func) (type_p, &walk_subtrees, data);
12306 	  if (result || !walk_subtrees)
12307 	    return result;
12308 
12309 	  /* But do not walk a pointed-to type since it may itself need to
12310 	     be walked in the declaration case if it isn't anonymous.  */
12311 	  if (!POINTER_TYPE_P (*type_p))
12312 	    {
12313 	      result = walk_type_fields (*type_p, func, data, pset, lh);
12314 	      if (result)
12315 		return result;
12316 	    }
12317 
12318 	  /* If this is a record type, also walk the fields.  */
12319 	  if (RECORD_OR_UNION_TYPE_P (*type_p))
12320 	    {
12321 	      tree field;
12322 
12323 	      for (field = TYPE_FIELDS (*type_p); field;
12324 		   field = DECL_CHAIN (field))
12325 		{
12326 		  /* We'd like to look at the type of the field, but we can
12327 		     easily get infinite recursion.  So assume it's pointed
12328 		     to elsewhere in the tree.  Also, ignore things that
12329 		     aren't fields.  */
12330 		  if (TREE_CODE (field) != FIELD_DECL)
12331 		    continue;
12332 
12333 		  WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12334 		  WALK_SUBTREE (DECL_SIZE (field));
12335 		  WALK_SUBTREE (DECL_SIZE_UNIT (field));
12336 		  if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12337 		    WALK_SUBTREE (DECL_QUALIFIER (field));
12338 		}
12339 	    }
12340 
12341 	  /* Same for scalar types.  */
12342 	  else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12343 		   || TREE_CODE (*type_p) == ENUMERAL_TYPE
12344 		   || TREE_CODE (*type_p) == INTEGER_TYPE
12345 		   || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12346 		   || TREE_CODE (*type_p) == REAL_TYPE)
12347 	    {
12348 	      WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12349 	      WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12350 	    }
12351 
12352 	  WALK_SUBTREE (TYPE_SIZE (*type_p));
12353 	  WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12354 	}
12355       /* FALLTHRU */
12356 
12357     default:
12358       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12359 	{
12360 	  int i, len;
12361 
12362 	  /* Walk over all the sub-trees of this operand.  */
12363 	  len = TREE_OPERAND_LENGTH (*tp);
12364 
12365 	  /* Go through the subtrees.  We need to do this in forward order so
12366 	     that the scope of a FOR_EXPR is handled properly.  */
12367 	  if (len)
12368 	    {
12369 	      for (i = 0; i < len - 1; ++i)
12370 		WALK_SUBTREE (TREE_OPERAND (*tp, i));
12371 	      WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12372 	    }
12373 	}
12374       /* If this is a type, walk the needed fields in the type.  */
12375       else if (TYPE_P (*tp))
12376 	return walk_type_fields (*tp, func, data, pset, lh);
12377       break;
12378     }
12379 
12380   /* We didn't find what we were looking for.  */
12381   return NULL_TREE;
12382 
12383 #undef WALK_SUBTREE_TAIL
12384 }
12385 #undef WALK_SUBTREE
12386 
12387 /* Like walk_tree, but does not walk duplicate nodes more than once.  */
12388 
12389 tree
walk_tree_without_duplicates_1(tree * tp,walk_tree_fn func,void * data,walk_tree_lh lh)12390 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12391 				walk_tree_lh lh)
12392 {
12393   tree result;
12394 
12395   hash_set<tree> pset;
12396   result = walk_tree_1 (tp, func, data, &pset, lh);
12397   return result;
12398 }
12399 
12400 
12401 tree
tree_block(tree t)12402 tree_block (tree t)
12403 {
12404   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12405 
12406   if (IS_EXPR_CODE_CLASS (c))
12407     return LOCATION_BLOCK (t->exp.locus);
12408   gcc_unreachable ();
12409   return NULL;
12410 }
12411 
12412 void
tree_set_block(tree t,tree b)12413 tree_set_block (tree t, tree b)
12414 {
12415   const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12416 
12417   if (IS_EXPR_CODE_CLASS (c))
12418     {
12419       t->exp.locus = set_block (t->exp.locus, b);
12420     }
12421   else
12422     gcc_unreachable ();
12423 }
12424 
12425 /* Create a nameless artificial label and put it in the current
12426    function context.  The label has a location of LOC.  Returns the
12427    newly created label.  */
12428 
12429 tree
create_artificial_label(location_t loc)12430 create_artificial_label (location_t loc)
12431 {
12432   tree lab = build_decl (loc,
12433       			 LABEL_DECL, NULL_TREE, void_type_node);
12434 
12435   DECL_ARTIFICIAL (lab) = 1;
12436   DECL_IGNORED_P (lab) = 1;
12437   DECL_CONTEXT (lab) = current_function_decl;
12438   return lab;
12439 }
12440 
12441 /*  Given a tree, try to return a useful variable name that we can use
12442     to prefix a temporary that is being assigned the value of the tree.
12443     I.E. given  <temp> = &A, return A.  */
12444 
12445 const char *
get_name(tree t)12446 get_name (tree t)
12447 {
12448   tree stripped_decl;
12449 
12450   stripped_decl = t;
12451   STRIP_NOPS (stripped_decl);
12452   if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12453     return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12454   else if (TREE_CODE (stripped_decl) == SSA_NAME)
12455     {
12456       tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12457       if (!name)
12458 	return NULL;
12459       return IDENTIFIER_POINTER (name);
12460     }
12461   else
12462     {
12463       switch (TREE_CODE (stripped_decl))
12464 	{
12465 	case ADDR_EXPR:
12466 	  return get_name (TREE_OPERAND (stripped_decl, 0));
12467 	default:
12468 	  return NULL;
12469 	}
12470     }
12471 }
12472 
12473 /* Return true if TYPE has a variable argument list.  */
12474 
12475 bool
stdarg_p(const_tree fntype)12476 stdarg_p (const_tree fntype)
12477 {
12478   function_args_iterator args_iter;
12479   tree n = NULL_TREE, t;
12480 
12481   if (!fntype)
12482     return false;
12483 
12484   FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12485     {
12486       n = t;
12487     }
12488 
12489   return n != NULL_TREE && n != void_type_node;
12490 }
12491 
12492 /* Return true if TYPE has a prototype.  */
12493 
12494 bool
prototype_p(const_tree fntype)12495 prototype_p (const_tree fntype)
12496 {
12497   tree t;
12498 
12499   gcc_assert (fntype != NULL_TREE);
12500 
12501   t = TYPE_ARG_TYPES (fntype);
12502   return (t != NULL_TREE);
12503 }
12504 
12505 /* If BLOCK is inlined from an __attribute__((__artificial__))
12506    routine, return pointer to location from where it has been
12507    called.  */
12508 location_t *
block_nonartificial_location(tree block)12509 block_nonartificial_location (tree block)
12510 {
12511   location_t *ret = NULL;
12512 
12513   while (block && TREE_CODE (block) == BLOCK
12514 	 && BLOCK_ABSTRACT_ORIGIN (block))
12515     {
12516       tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12517       if (TREE_CODE (ao) == FUNCTION_DECL)
12518 	{
12519 	  /* If AO is an artificial inline, point RET to the
12520 	     call site locus at which it has been inlined and continue
12521 	     the loop, in case AO's caller is also an artificial
12522 	     inline.  */
12523 	  if (DECL_DECLARED_INLINE_P (ao)
12524 	      && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12525 	    ret = &BLOCK_SOURCE_LOCATION (block);
12526 	  else
12527 	    break;
12528 	}
12529       else if (TREE_CODE (ao) != BLOCK)
12530 	break;
12531 
12532       block = BLOCK_SUPERCONTEXT (block);
12533     }
12534   return ret;
12535 }
12536 
12537 
12538 /* If EXP is inlined from an __attribute__((__artificial__))
12539    function, return the location of the original call expression.  */
12540 
12541 location_t
tree_nonartificial_location(tree exp)12542 tree_nonartificial_location (tree exp)
12543 {
12544   location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12545 
12546   if (loc)
12547     return *loc;
12548   else
12549     return EXPR_LOCATION (exp);
12550 }
12551 
12552 
12553 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12554    nodes.  */
12555 
12556 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code.  */
12557 
12558 hashval_t
hash(tree x)12559 cl_option_hasher::hash (tree x)
12560 {
12561   const_tree const t = x;
12562   const char *p;
12563   size_t i;
12564   size_t len = 0;
12565   hashval_t hash = 0;
12566 
12567   if (TREE_CODE (t) == OPTIMIZATION_NODE)
12568     {
12569       p = (const char *)TREE_OPTIMIZATION (t);
12570       len = sizeof (struct cl_optimization);
12571     }
12572 
12573   else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12574     return cl_target_option_hash (TREE_TARGET_OPTION (t));
12575 
12576   else
12577     gcc_unreachable ();
12578 
12579   /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12580      something else.  */
12581   for (i = 0; i < len; i++)
12582     if (p[i])
12583       hash = (hash << 4) ^ ((i << 2) | p[i]);
12584 
12585   return hash;
12586 }
12587 
12588 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12589    TARGET_OPTION tree node) is the same as that given by *Y, which is the
12590    same.  */
12591 
12592 bool
equal(tree x,tree y)12593 cl_option_hasher::equal (tree x, tree y)
12594 {
12595   const_tree const xt = x;
12596   const_tree const yt = y;
12597 
12598   if (TREE_CODE (xt) != TREE_CODE (yt))
12599     return 0;
12600 
12601   if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12602     return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12603 				      TREE_OPTIMIZATION (yt));
12604   else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12605     return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12606 				TREE_TARGET_OPTION (yt));
12607   else
12608     gcc_unreachable ();
12609 }
12610 
12611 /* Build an OPTIMIZATION_NODE based on the options in OPTS.  */
12612 
12613 tree
build_optimization_node(struct gcc_options * opts)12614 build_optimization_node (struct gcc_options *opts)
12615 {
12616   tree t;
12617 
12618   /* Use the cache of optimization nodes.  */
12619 
12620   cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12621 			opts);
12622 
12623   tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12624   t = *slot;
12625   if (!t)
12626     {
12627       /* Insert this one into the hash table.  */
12628       t = cl_optimization_node;
12629       *slot = t;
12630 
12631       /* Make a new node for next time round.  */
12632       cl_optimization_node = make_node (OPTIMIZATION_NODE);
12633     }
12634 
12635   return t;
12636 }
12637 
12638 /* Build a TARGET_OPTION_NODE based on the options in OPTS.  */
12639 
12640 tree
build_target_option_node(struct gcc_options * opts)12641 build_target_option_node (struct gcc_options *opts)
12642 {
12643   tree t;
12644 
12645   /* Use the cache of optimization nodes.  */
12646 
12647   cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12648 			 opts);
12649 
12650   tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12651   t = *slot;
12652   if (!t)
12653     {
12654       /* Insert this one into the hash table.  */
12655       t = cl_target_option_node;
12656       *slot = t;
12657 
12658       /* Make a new node for next time round.  */
12659       cl_target_option_node = make_node (TARGET_OPTION_NODE);
12660     }
12661 
12662   return t;
12663 }
12664 
12665 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12666    so that they aren't saved during PCH writing.  */
12667 
12668 void
prepare_target_option_nodes_for_pch(void)12669 prepare_target_option_nodes_for_pch (void)
12670 {
12671   hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12672   for (; iter != cl_option_hash_table->end (); ++iter)
12673     if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12674       TREE_TARGET_GLOBALS (*iter) = NULL;
12675 }
12676 
12677 /* Determine the "ultimate origin" of a block.  */
12678 
12679 tree
block_ultimate_origin(const_tree block)12680 block_ultimate_origin (const_tree block)
12681 {
12682   tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12683 
12684   if (origin == NULL_TREE)
12685     return NULL_TREE;
12686   else
12687     {
12688       gcc_checking_assert ((DECL_P (origin)
12689 			    && DECL_ORIGIN (origin) == origin)
12690 			   || BLOCK_ORIGIN (origin) == origin);
12691       return origin;
12692     }
12693 }
12694 
12695 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12696    no instruction.  */
12697 
12698 bool
tree_nop_conversion_p(const_tree outer_type,const_tree inner_type)12699 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12700 {
12701   /* Do not strip casts into or out of differing address spaces.  */
12702   if (POINTER_TYPE_P (outer_type)
12703       && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12704     {
12705       if (!POINTER_TYPE_P (inner_type)
12706 	  || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12707 	      != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12708 	return false;
12709     }
12710   else if (POINTER_TYPE_P (inner_type)
12711 	   && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12712     {
12713       /* We already know that outer_type is not a pointer with
12714 	 a non-generic address space.  */
12715       return false;
12716     }
12717 
12718   /* Use precision rather then machine mode when we can, which gives
12719      the correct answer even for submode (bit-field) types.  */
12720   if ((INTEGRAL_TYPE_P (outer_type)
12721        || POINTER_TYPE_P (outer_type)
12722        || TREE_CODE (outer_type) == OFFSET_TYPE)
12723       && (INTEGRAL_TYPE_P (inner_type)
12724 	  || POINTER_TYPE_P (inner_type)
12725 	  || TREE_CODE (inner_type) == OFFSET_TYPE))
12726     return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12727 
12728   /* Otherwise fall back on comparing machine modes (e.g. for
12729      aggregate types, floats).  */
12730   return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12731 }
12732 
12733 /* Return true iff conversion in EXP generates no instruction.  Mark
12734    it inline so that we fully inline into the stripping functions even
12735    though we have two uses of this function.  */
12736 
12737 static inline bool
tree_nop_conversion(const_tree exp)12738 tree_nop_conversion (const_tree exp)
12739 {
12740   tree outer_type, inner_type;
12741 
12742   if (location_wrapper_p (exp))
12743     return true;
12744   if (!CONVERT_EXPR_P (exp)
12745       && TREE_CODE (exp) != NON_LVALUE_EXPR)
12746     return false;
12747 
12748   outer_type = TREE_TYPE (exp);
12749   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12750   if (!inner_type || inner_type == error_mark_node)
12751     return false;
12752 
12753   return tree_nop_conversion_p (outer_type, inner_type);
12754 }
12755 
12756 /* Return true iff conversion in EXP generates no instruction.  Don't
12757    consider conversions changing the signedness.  */
12758 
12759 static bool
tree_sign_nop_conversion(const_tree exp)12760 tree_sign_nop_conversion (const_tree exp)
12761 {
12762   tree outer_type, inner_type;
12763 
12764   if (!tree_nop_conversion (exp))
12765     return false;
12766 
12767   outer_type = TREE_TYPE (exp);
12768   inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12769 
12770   return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12771 	  && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12772 }
12773 
12774 /* Strip conversions from EXP according to tree_nop_conversion and
12775    return the resulting expression.  */
12776 
12777 tree
tree_strip_nop_conversions(tree exp)12778 tree_strip_nop_conversions (tree exp)
12779 {
12780   while (tree_nop_conversion (exp))
12781     exp = TREE_OPERAND (exp, 0);
12782   return exp;
12783 }
12784 
12785 /* Strip conversions from EXP according to tree_sign_nop_conversion
12786    and return the resulting expression.  */
12787 
12788 tree
tree_strip_sign_nop_conversions(tree exp)12789 tree_strip_sign_nop_conversions (tree exp)
12790 {
12791   while (tree_sign_nop_conversion (exp))
12792     exp = TREE_OPERAND (exp, 0);
12793   return exp;
12794 }
12795 
12796 /* Avoid any floating point extensions from EXP.  */
12797 tree
strip_float_extensions(tree exp)12798 strip_float_extensions (tree exp)
12799 {
12800   tree sub, expt, subt;
12801 
12802   /*  For floating point constant look up the narrowest type that can hold
12803       it properly and handle it like (type)(narrowest_type)constant.
12804       This way we can optimize for instance a=a*2.0 where "a" is float
12805       but 2.0 is double constant.  */
12806   if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12807     {
12808       REAL_VALUE_TYPE orig;
12809       tree type = NULL;
12810 
12811       orig = TREE_REAL_CST (exp);
12812       if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12813 	  && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12814 	type = float_type_node;
12815       else if (TYPE_PRECISION (TREE_TYPE (exp))
12816 	       > TYPE_PRECISION (double_type_node)
12817 	       && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12818 	type = double_type_node;
12819       if (type)
12820 	return build_real_truncate (type, orig);
12821     }
12822 
12823   if (!CONVERT_EXPR_P (exp))
12824     return exp;
12825 
12826   sub = TREE_OPERAND (exp, 0);
12827   subt = TREE_TYPE (sub);
12828   expt = TREE_TYPE (exp);
12829 
12830   if (!FLOAT_TYPE_P (subt))
12831     return exp;
12832 
12833   if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12834     return exp;
12835 
12836   if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12837     return exp;
12838 
12839   return strip_float_extensions (sub);
12840 }
12841 
12842 /* Strip out all handled components that produce invariant
12843    offsets.  */
12844 
12845 const_tree
strip_invariant_refs(const_tree op)12846 strip_invariant_refs (const_tree op)
12847 {
12848   while (handled_component_p (op))
12849     {
12850       switch (TREE_CODE (op))
12851 	{
12852 	case ARRAY_REF:
12853 	case ARRAY_RANGE_REF:
12854 	  if (!is_gimple_constant (TREE_OPERAND (op, 1))
12855 	      || TREE_OPERAND (op, 2) != NULL_TREE
12856 	      || TREE_OPERAND (op, 3) != NULL_TREE)
12857 	    return NULL;
12858 	  break;
12859 
12860 	case COMPONENT_REF:
12861 	  if (TREE_OPERAND (op, 2) != NULL_TREE)
12862 	    return NULL;
12863 	  break;
12864 
12865 	default:;
12866 	}
12867       op = TREE_OPERAND (op, 0);
12868     }
12869 
12870   return op;
12871 }
12872 
12873 static GTY(()) tree gcc_eh_personality_decl;
12874 
12875 /* Return the GCC personality function decl.  */
12876 
12877 tree
lhd_gcc_personality(void)12878 lhd_gcc_personality (void)
12879 {
12880   if (!gcc_eh_personality_decl)
12881     gcc_eh_personality_decl = build_personality_function ("gcc");
12882   return gcc_eh_personality_decl;
12883 }
12884 
12885 /* TARGET is a call target of GIMPLE call statement
12886    (obtained by gimple_call_fn).  Return true if it is
12887    OBJ_TYPE_REF representing an virtual call of C++ method.
12888    (As opposed to OBJ_TYPE_REF representing objc calls
12889    through a cast where middle-end devirtualization machinery
12890    can't apply.)  FOR_DUMP_P is true when being called from
12891    the dump routines.  */
12892 
12893 bool
virtual_method_call_p(const_tree target,bool for_dump_p)12894 virtual_method_call_p (const_tree target, bool for_dump_p)
12895 {
12896   if (TREE_CODE (target) != OBJ_TYPE_REF)
12897     return false;
12898   tree t = TREE_TYPE (target);
12899   gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12900   t = TREE_TYPE (t);
12901   if (TREE_CODE (t) == FUNCTION_TYPE)
12902     return false;
12903   gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12904   /* If we do not have BINFO associated, it means that type was built
12905      without devirtualization enabled.  Do not consider this a virtual
12906      call.  */
12907   if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12908     return false;
12909   return true;
12910 }
12911 
12912 /* Lookup sub-BINFO of BINFO of TYPE at offset POS.  */
12913 
12914 static tree
lookup_binfo_at_offset(tree binfo,tree type,HOST_WIDE_INT pos)12915 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12916 {
12917   unsigned int i;
12918   tree base_binfo, b;
12919 
12920   for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12921     if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12922 	&& types_same_for_odr (TREE_TYPE (base_binfo), type))
12923       return base_binfo;
12924     else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12925       return b;
12926   return NULL;
12927 }
12928 
12929 /* Try to find a base info of BINFO that would have its field decl at offset
12930    OFFSET within the BINFO type and which is of EXPECTED_TYPE.  If it can be
12931    found, return, otherwise return NULL_TREE.  */
12932 
12933 tree
get_binfo_at_offset(tree binfo,poly_int64 offset,tree expected_type)12934 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12935 {
12936   tree type = BINFO_TYPE (binfo);
12937 
12938   while (true)
12939     {
12940       HOST_WIDE_INT pos, size;
12941       tree fld;
12942       int i;
12943 
12944       if (types_same_for_odr (type, expected_type))
12945 	  return binfo;
12946       if (maybe_lt (offset, 0))
12947 	return NULL_TREE;
12948 
12949       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12950 	{
12951 	  if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12952 	    continue;
12953 
12954 	  pos = int_bit_position (fld);
12955 	  size = tree_to_uhwi (DECL_SIZE (fld));
12956 	  if (known_in_range_p (offset, pos, size))
12957 	    break;
12958 	}
12959       if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12960 	return NULL_TREE;
12961 
12962       /* Offset 0 indicates the primary base, whose vtable contents are
12963 	 represented in the binfo for the derived class.  */
12964       else if (maybe_ne (offset, 0))
12965 	{
12966 	  tree found_binfo = NULL, base_binfo;
12967 	  /* Offsets in BINFO are in bytes relative to the whole structure
12968 	     while POS is in bits relative to the containing field.  */
12969 	  int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12970 			     / BITS_PER_UNIT);
12971 
12972 	  for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12973 	    if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12974 		&& types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12975 	      {
12976 		found_binfo = base_binfo;
12977 		break;
12978 	      }
12979 	  if (found_binfo)
12980 	    binfo = found_binfo;
12981 	  else
12982 	    binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12983 					    binfo_offset);
12984 	 }
12985 
12986       type = TREE_TYPE (fld);
12987       offset -= pos;
12988     }
12989 }
12990 
12991 /* Returns true if X is a typedef decl.  */
12992 
12993 bool
is_typedef_decl(const_tree x)12994 is_typedef_decl (const_tree x)
12995 {
12996   return (x && TREE_CODE (x) == TYPE_DECL
12997           && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12998 }
12999 
13000 /* Returns true iff TYPE is a type variant created for a typedef. */
13001 
13002 bool
typedef_variant_p(const_tree type)13003 typedef_variant_p (const_tree type)
13004 {
13005   return is_typedef_decl (TYPE_NAME (type));
13006 }
13007 
13008 /* PR 84195: Replace control characters in "unescaped" with their
13009    escaped equivalents.  Allow newlines if -fmessage-length has
13010    been set to a non-zero value.  This is done here, rather than
13011    where the attribute is recorded as the message length can
13012    change between these two locations.  */
13013 
13014 void
escape(const char * unescaped)13015 escaped_string::escape (const char *unescaped)
13016 {
13017   char *escaped;
13018   size_t i, new_i, len;
13019 
13020   if (m_owned)
13021     free (m_str);
13022 
13023   m_str = const_cast<char *> (unescaped);
13024   m_owned = false;
13025 
13026   if (unescaped == NULL || *unescaped == 0)
13027     return;
13028 
13029   len = strlen (unescaped);
13030   escaped = NULL;
13031   new_i = 0;
13032 
13033   for (i = 0; i < len; i++)
13034     {
13035       char c = unescaped[i];
13036 
13037       if (!ISCNTRL (c))
13038 	{
13039 	  if (escaped)
13040 	    escaped[new_i++] = c;
13041 	  continue;
13042 	}
13043 
13044       if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
13045 	{
13046 	  if (escaped == NULL)
13047 	    {
13048 	      /* We only allocate space for a new string if we
13049 		 actually encounter a control character that
13050 		 needs replacing.  */
13051 	      escaped = (char *) xmalloc (len * 2 + 1);
13052 	      strncpy (escaped, unescaped, i);
13053 	      new_i = i;
13054 	    }
13055 
13056 	  escaped[new_i++] = '\\';
13057 
13058 	  switch (c)
13059 	    {
13060 	    case '\a': escaped[new_i++] = 'a'; break;
13061 	    case '\b': escaped[new_i++] = 'b'; break;
13062 	    case '\f': escaped[new_i++] = 'f'; break;
13063 	    case '\n': escaped[new_i++] = 'n'; break;
13064 	    case '\r': escaped[new_i++] = 'r'; break;
13065 	    case '\t': escaped[new_i++] = 't'; break;
13066 	    case '\v': escaped[new_i++] = 'v'; break;
13067 	    default:   escaped[new_i++] = '?'; break;
13068 	    }
13069 	}
13070       else if (escaped)
13071 	escaped[new_i++] = c;
13072     }
13073 
13074   if (escaped)
13075     {
13076       escaped[new_i] = 0;
13077       m_str = escaped;
13078       m_owned = true;
13079     }
13080 }
13081 
13082 /* Warn about a use of an identifier which was marked deprecated.  Returns
13083    whether a warning was given.  */
13084 
13085 bool
warn_deprecated_use(tree node,tree attr)13086 warn_deprecated_use (tree node, tree attr)
13087 {
13088   escaped_string msg;
13089 
13090   if (node == 0 || !warn_deprecated_decl)
13091     return false;
13092 
13093   if (!attr)
13094     {
13095       if (DECL_P (node))
13096 	attr = DECL_ATTRIBUTES (node);
13097       else if (TYPE_P (node))
13098 	{
13099 	  tree decl = TYPE_STUB_DECL (node);
13100 	  if (decl)
13101 	    attr = lookup_attribute ("deprecated",
13102 				     TYPE_ATTRIBUTES (TREE_TYPE (decl)));
13103 	}
13104     }
13105 
13106   if (attr)
13107     attr = lookup_attribute ("deprecated", attr);
13108 
13109   if (attr)
13110     msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
13111 
13112   bool w = false;
13113   if (DECL_P (node))
13114     {
13115       auto_diagnostic_group d;
13116       if (msg)
13117 	w = warning (OPT_Wdeprecated_declarations,
13118 		     "%qD is deprecated: %s", node, (const char *) msg);
13119       else
13120 	w = warning (OPT_Wdeprecated_declarations,
13121 		     "%qD is deprecated", node);
13122       if (w)
13123 	inform (DECL_SOURCE_LOCATION (node), "declared here");
13124     }
13125   else if (TYPE_P (node))
13126     {
13127       tree what = NULL_TREE;
13128       tree decl = TYPE_STUB_DECL (node);
13129 
13130       if (TYPE_NAME (node))
13131 	{
13132 	  if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13133 	    what = TYPE_NAME (node);
13134 	  else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13135 		   && DECL_NAME (TYPE_NAME (node)))
13136 	    what = DECL_NAME (TYPE_NAME (node));
13137 	}
13138 
13139       auto_diagnostic_group d;
13140       if (what)
13141 	{
13142 	  if (msg)
13143 	    w = warning (OPT_Wdeprecated_declarations,
13144 			 "%qE is deprecated: %s", what, (const char *) msg);
13145 	  else
13146 	    w = warning (OPT_Wdeprecated_declarations,
13147 			 "%qE is deprecated", what);
13148 	}
13149       else
13150 	{
13151 	  if (msg)
13152 	    w = warning (OPT_Wdeprecated_declarations,
13153 			 "type is deprecated: %s", (const char *) msg);
13154 	  else
13155 	    w = warning (OPT_Wdeprecated_declarations,
13156 			 "type is deprecated");
13157 	}
13158 
13159       if (w && decl)
13160 	inform (DECL_SOURCE_LOCATION (decl), "declared here");
13161     }
13162 
13163   return w;
13164 }
13165 
13166 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13167    somewhere in it.  */
13168 
13169 bool
contains_bitfld_component_ref_p(const_tree ref)13170 contains_bitfld_component_ref_p (const_tree ref)
13171 {
13172   while (handled_component_p (ref))
13173     {
13174       if (TREE_CODE (ref) == COMPONENT_REF
13175           && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13176         return true;
13177       ref = TREE_OPERAND (ref, 0);
13178     }
13179 
13180   return false;
13181 }
13182 
13183 /* Try to determine whether a TRY_CATCH expression can fall through.
13184    This is a subroutine of block_may_fallthru.  */
13185 
13186 static bool
try_catch_may_fallthru(const_tree stmt)13187 try_catch_may_fallthru (const_tree stmt)
13188 {
13189   tree_stmt_iterator i;
13190 
13191   /* If the TRY block can fall through, the whole TRY_CATCH can
13192      fall through.  */
13193   if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13194     return true;
13195 
13196   i = tsi_start (TREE_OPERAND (stmt, 1));
13197   switch (TREE_CODE (tsi_stmt (i)))
13198     {
13199     case CATCH_EXPR:
13200       /* We expect to see a sequence of CATCH_EXPR trees, each with a
13201 	 catch expression and a body.  The whole TRY_CATCH may fall
13202 	 through iff any of the catch bodies falls through.  */
13203       for (; !tsi_end_p (i); tsi_next (&i))
13204 	{
13205 	  if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13206 	    return true;
13207 	}
13208       return false;
13209 
13210     case EH_FILTER_EXPR:
13211       /* The exception filter expression only matters if there is an
13212 	 exception.  If the exception does not match EH_FILTER_TYPES,
13213 	 we will execute EH_FILTER_FAILURE, and we will fall through
13214 	 if that falls through.  If the exception does match
13215 	 EH_FILTER_TYPES, the stack unwinder will continue up the
13216 	 stack, so we will not fall through.  We don't know whether we
13217 	 will throw an exception which matches EH_FILTER_TYPES or not,
13218 	 so we just ignore EH_FILTER_TYPES and assume that we might
13219 	 throw an exception which doesn't match.  */
13220       return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13221 
13222     default:
13223       /* This case represents statements to be executed when an
13224 	 exception occurs.  Those statements are implicitly followed
13225 	 by a RESX statement to resume execution after the exception.
13226 	 So in this case the TRY_CATCH never falls through.  */
13227       return false;
13228     }
13229 }
13230 
13231 /* Try to determine if we can fall out of the bottom of BLOCK.  This guess
13232    need not be 100% accurate; simply be conservative and return true if we
13233    don't know.  This is used only to avoid stupidly generating extra code.
13234    If we're wrong, we'll just delete the extra code later.  */
13235 
13236 bool
block_may_fallthru(const_tree block)13237 block_may_fallthru (const_tree block)
13238 {
13239   /* This CONST_CAST is okay because expr_last returns its argument
13240      unmodified and we assign it to a const_tree.  */
13241   const_tree stmt = expr_last (CONST_CAST_TREE (block));
13242 
13243   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13244     {
13245     case GOTO_EXPR:
13246     case RETURN_EXPR:
13247       /* Easy cases.  If the last statement of the block implies
13248 	 control transfer, then we can't fall through.  */
13249       return false;
13250 
13251     case SWITCH_EXPR:
13252       /* If there is a default: label or case labels cover all possible
13253 	 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13254 	 to some case label in all cases and all we care is whether the
13255 	 SWITCH_BODY falls through.  */
13256       if (SWITCH_ALL_CASES_P (stmt))
13257 	return block_may_fallthru (SWITCH_BODY (stmt));
13258       return true;
13259 
13260     case COND_EXPR:
13261       if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13262 	return true;
13263       return block_may_fallthru (COND_EXPR_ELSE (stmt));
13264 
13265     case BIND_EXPR:
13266       return block_may_fallthru (BIND_EXPR_BODY (stmt));
13267 
13268     case TRY_CATCH_EXPR:
13269       return try_catch_may_fallthru (stmt);
13270 
13271     case TRY_FINALLY_EXPR:
13272       /* The finally clause is always executed after the try clause,
13273 	 so if it does not fall through, then the try-finally will not
13274 	 fall through.  Otherwise, if the try clause does not fall
13275 	 through, then when the finally clause falls through it will
13276 	 resume execution wherever the try clause was going.  So the
13277 	 whole try-finally will only fall through if both the try
13278 	 clause and the finally clause fall through.  */
13279       return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13280 	      && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13281 
13282     case EH_ELSE_EXPR:
13283       return block_may_fallthru (TREE_OPERAND (stmt, 0));
13284 
13285     case MODIFY_EXPR:
13286       if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13287 	stmt = TREE_OPERAND (stmt, 1);
13288       else
13289 	return true;
13290       /* FALLTHRU */
13291 
13292     case CALL_EXPR:
13293       /* Functions that do not return do not fall through.  */
13294       return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13295 
13296     case CLEANUP_POINT_EXPR:
13297       return block_may_fallthru (TREE_OPERAND (stmt, 0));
13298 
13299     case TARGET_EXPR:
13300       return block_may_fallthru (TREE_OPERAND (stmt, 1));
13301 
13302     case ERROR_MARK:
13303       return true;
13304 
13305     default:
13306       return lang_hooks.block_may_fallthru (stmt);
13307     }
13308 }
13309 
13310 /* True if we are using EH to handle cleanups.  */
13311 static bool using_eh_for_cleanups_flag = false;
13312 
13313 /* This routine is called from front ends to indicate eh should be used for
13314    cleanups.  */
13315 void
using_eh_for_cleanups(void)13316 using_eh_for_cleanups (void)
13317 {
13318   using_eh_for_cleanups_flag = true;
13319 }
13320 
13321 /* Query whether EH is used for cleanups.  */
13322 bool
using_eh_for_cleanups_p(void)13323 using_eh_for_cleanups_p (void)
13324 {
13325   return using_eh_for_cleanups_flag;
13326 }
13327 
13328 /* Wrapper for tree_code_name to ensure that tree code is valid */
13329 const char *
get_tree_code_name(enum tree_code code)13330 get_tree_code_name (enum tree_code code)
13331 {
13332   const char *invalid = "<invalid tree code>";
13333 
13334   if (code >= MAX_TREE_CODES)
13335     {
13336       if (code == 0xa5a5)
13337 	return "ggc_freed";
13338       return invalid;
13339     }
13340 
13341   return tree_code_name[code];
13342 }
13343 
13344 /* Drops the TREE_OVERFLOW flag from T.  */
13345 
13346 tree
drop_tree_overflow(tree t)13347 drop_tree_overflow (tree t)
13348 {
13349   gcc_checking_assert (TREE_OVERFLOW (t));
13350 
13351   /* For tree codes with a sharing machinery re-build the result.  */
13352   if (poly_int_tree_p (t))
13353     return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13354 
13355   /* For VECTOR_CST, remove the overflow bits from the encoded elements
13356      and canonicalize the result.  */
13357   if (TREE_CODE (t) == VECTOR_CST)
13358     {
13359       tree_vector_builder builder;
13360       builder.new_unary_operation (TREE_TYPE (t), t, true);
13361       unsigned int count = builder.encoded_nelts ();
13362       for (unsigned int i = 0; i < count; ++i)
13363 	{
13364 	  tree elt = VECTOR_CST_ELT (t, i);
13365 	  if (TREE_OVERFLOW (elt))
13366 	    elt = drop_tree_overflow (elt);
13367 	  builder.quick_push (elt);
13368 	}
13369       return builder.build ();
13370     }
13371 
13372   /* Otherwise, as all tcc_constants are possibly shared, copy the node
13373      and drop the flag.  */
13374   t = copy_node (t);
13375   TREE_OVERFLOW (t) = 0;
13376 
13377   /* For constants that contain nested constants, drop the flag
13378      from those as well.  */
13379   if (TREE_CODE (t) == COMPLEX_CST)
13380     {
13381       if (TREE_OVERFLOW (TREE_REALPART (t)))
13382 	TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13383       if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13384 	TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13385     }
13386 
13387   return t;
13388 }
13389 
13390 /* Given a memory reference expression T, return its base address.
13391    The base address of a memory reference expression is the main
13392    object being referenced.  For instance, the base address for
13393    'array[i].fld[j]' is 'array'.  You can think of this as stripping
13394    away the offset part from a memory address.
13395 
13396    This function calls handled_component_p to strip away all the inner
13397    parts of the memory reference until it reaches the base object.  */
13398 
13399 tree
get_base_address(tree t)13400 get_base_address (tree t)
13401 {
13402   while (handled_component_p (t))
13403     t = TREE_OPERAND (t, 0);
13404 
13405   if ((TREE_CODE (t) == MEM_REF
13406        || TREE_CODE (t) == TARGET_MEM_REF)
13407       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13408     t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13409 
13410   /* ???  Either the alias oracle or all callers need to properly deal
13411      with WITH_SIZE_EXPRs before we can look through those.  */
13412   if (TREE_CODE (t) == WITH_SIZE_EXPR)
13413     return NULL_TREE;
13414 
13415   return t;
13416 }
13417 
13418 /* Return a tree of sizetype representing the size, in bytes, of the element
13419    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
13420 
13421 tree
array_ref_element_size(tree exp)13422 array_ref_element_size (tree exp)
13423 {
13424   tree aligned_size = TREE_OPERAND (exp, 3);
13425   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13426   location_t loc = EXPR_LOCATION (exp);
13427 
13428   /* If a size was specified in the ARRAY_REF, it's the size measured
13429      in alignment units of the element type.  So multiply by that value.  */
13430   if (aligned_size)
13431     {
13432       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13433 	 sizetype from another type of the same width and signedness.  */
13434       if (TREE_TYPE (aligned_size) != sizetype)
13435 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13436       return size_binop_loc (loc, MULT_EXPR, aligned_size,
13437 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
13438     }
13439 
13440   /* Otherwise, take the size from that of the element type.  Substitute
13441      any PLACEHOLDER_EXPR that we have.  */
13442   else
13443     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13444 }
13445 
13446 /* Return a tree representing the lower bound of the array mentioned in
13447    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
13448 
13449 tree
array_ref_low_bound(tree exp)13450 array_ref_low_bound (tree exp)
13451 {
13452   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13453 
13454   /* If a lower bound is specified in EXP, use it.  */
13455   if (TREE_OPERAND (exp, 2))
13456     return TREE_OPERAND (exp, 2);
13457 
13458   /* Otherwise, if there is a domain type and it has a lower bound, use it,
13459      substituting for a PLACEHOLDER_EXPR as needed.  */
13460   if (domain_type && TYPE_MIN_VALUE (domain_type))
13461     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13462 
13463   /* Otherwise, return a zero of the appropriate type.  */
13464   tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
13465   return (idxtype == error_mark_node
13466 	  ? integer_zero_node : build_int_cst (idxtype, 0));
13467 }
13468 
13469 /* Return a tree representing the upper bound of the array mentioned in
13470    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
13471 
13472 tree
array_ref_up_bound(tree exp)13473 array_ref_up_bound (tree exp)
13474 {
13475   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13476 
13477   /* If there is a domain type and it has an upper bound, use it, substituting
13478      for a PLACEHOLDER_EXPR as needed.  */
13479   if (domain_type && TYPE_MAX_VALUE (domain_type))
13480     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13481 
13482   /* Otherwise fail.  */
13483   return NULL_TREE;
13484 }
13485 
13486 /* Returns true if REF is an array reference, component reference,
13487    or memory reference to an array at the end of a structure.
13488    If this is the case, the array may be allocated larger
13489    than its upper bound implies.  */
13490 
13491 bool
array_at_struct_end_p(tree ref)13492 array_at_struct_end_p (tree ref)
13493 {
13494   tree atype;
13495 
13496   if (TREE_CODE (ref) == ARRAY_REF
13497       || TREE_CODE (ref) == ARRAY_RANGE_REF)
13498     {
13499       atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13500       ref = TREE_OPERAND (ref, 0);
13501     }
13502   else if (TREE_CODE (ref) == COMPONENT_REF
13503 	   && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13504     atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13505   else if (TREE_CODE (ref) == MEM_REF)
13506     {
13507       tree arg = TREE_OPERAND (ref, 0);
13508       if (TREE_CODE (arg) == ADDR_EXPR)
13509 	arg = TREE_OPERAND (arg, 0);
13510       tree argtype = TREE_TYPE (arg);
13511       if (TREE_CODE (argtype) == RECORD_TYPE)
13512 	{
13513 	  if (tree fld = last_field (argtype))
13514 	    {
13515 	      atype = TREE_TYPE (fld);
13516 	      if (TREE_CODE (atype) != ARRAY_TYPE)
13517 		return false;
13518 	      if (VAR_P (arg) && DECL_SIZE (fld))
13519 		return false;
13520 	    }
13521 	  else
13522 	    return false;
13523 	}
13524       else
13525 	return false;
13526     }
13527   else
13528     return false;
13529 
13530   if (TREE_CODE (ref) == STRING_CST)
13531     return false;
13532 
13533   tree ref_to_array = ref;
13534   while (handled_component_p (ref))
13535     {
13536       /* If the reference chain contains a component reference to a
13537          non-union type and there follows another field the reference
13538 	 is not at the end of a structure.  */
13539       if (TREE_CODE (ref) == COMPONENT_REF)
13540 	{
13541 	  if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13542 	    {
13543 	      tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13544 	      while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13545 		nextf = DECL_CHAIN (nextf);
13546 	      if (nextf)
13547 		return false;
13548 	    }
13549 	}
13550       /* If we have a multi-dimensional array we do not consider
13551          a non-innermost dimension as flex array if the whole
13552 	 multi-dimensional array is at struct end.
13553 	 Same for an array of aggregates with a trailing array
13554 	 member.  */
13555       else if (TREE_CODE (ref) == ARRAY_REF)
13556 	return false;
13557       else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13558 	;
13559       /* If we view an underlying object as sth else then what we
13560          gathered up to now is what we have to rely on.  */
13561       else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13562 	break;
13563       else
13564 	gcc_unreachable ();
13565 
13566       ref = TREE_OPERAND (ref, 0);
13567     }
13568 
13569   /* The array now is at struct end.  Treat flexible arrays as
13570      always subject to extend, even into just padding constrained by
13571      an underlying decl.  */
13572   if (! TYPE_SIZE (atype)
13573       || ! TYPE_DOMAIN (atype)
13574       || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13575     return true;
13576 
13577   if (TREE_CODE (ref) == MEM_REF
13578       && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13579     ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13580 
13581   /* If the reference is based on a declared entity, the size of the array
13582      is constrained by its given domain.  (Do not trust commons PR/69368).  */
13583   if (DECL_P (ref)
13584       && !(flag_unconstrained_commons
13585 	   && VAR_P (ref) && DECL_COMMON (ref))
13586       && DECL_SIZE_UNIT (ref)
13587       && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13588     {
13589       /* Check whether the array domain covers all of the available
13590          padding.  */
13591       poly_int64 offset;
13592       if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13593 	  || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13594           || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13595 	return true;
13596       if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13597 	return true;
13598 
13599       /* If at least one extra element fits it is a flexarray.  */
13600       if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13601 		     - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13602 		     + 2)
13603 		    * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13604 		    wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13605 	return true;
13606 
13607       return false;
13608     }
13609 
13610   return true;
13611 }
13612 
13613 /* Return a tree representing the offset, in bytes, of the field referenced
13614    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
13615 
13616 tree
component_ref_field_offset(tree exp)13617 component_ref_field_offset (tree exp)
13618 {
13619   tree aligned_offset = TREE_OPERAND (exp, 2);
13620   tree field = TREE_OPERAND (exp, 1);
13621   location_t loc = EXPR_LOCATION (exp);
13622 
13623   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13624      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
13625      value.  */
13626   if (aligned_offset)
13627     {
13628       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13629 	 sizetype from another type of the same width and signedness.  */
13630       if (TREE_TYPE (aligned_offset) != sizetype)
13631 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13632       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13633 			     size_int (DECL_OFFSET_ALIGN (field)
13634 				       / BITS_PER_UNIT));
13635     }
13636 
13637   /* Otherwise, take the offset from that of the field.  Substitute
13638      any PLACEHOLDER_EXPR that we have.  */
13639   else
13640     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13641 }
13642 
13643 /* Given the initializer INIT, return the initializer for the field
13644    DECL if it exists, otherwise null.  Used to obtain the initializer
13645    for a flexible array member and determine its size.  */
13646 
13647 static tree
get_initializer_for(tree init,tree decl)13648 get_initializer_for (tree init, tree decl)
13649 {
13650   STRIP_NOPS (init);
13651 
13652   tree fld, fld_init;
13653   unsigned HOST_WIDE_INT i;
13654   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13655     {
13656       if (decl == fld)
13657 	return fld_init;
13658 
13659       if (TREE_CODE (fld) == CONSTRUCTOR)
13660 	{
13661 	  fld_init = get_initializer_for (fld_init, decl);
13662 	  if (fld_init)
13663 	    return fld_init;
13664 	}
13665     }
13666 
13667   return NULL_TREE;
13668 }
13669 
13670 /* Determines the size of the member referenced by the COMPONENT_REF
13671    REF, using its initializer expression if necessary in order to
13672    determine the size of an initialized flexible array member.
13673    If non-null, *INTERIOR_ZERO_LENGTH is set when REF refers to
13674    an interior zero-length array.
13675    Returns the size as sizetype (which might be zero for an object
13676    with an uninitialized flexible array member) or null if the size
13677    cannot be determined.  */
13678 
13679 tree
component_ref_size(tree ref,bool * interior_zero_length)13680 component_ref_size (tree ref, bool *interior_zero_length /* = NULL */)
13681 {
13682   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13683 
13684   bool int_0_len = false;
13685   if (!interior_zero_length)
13686     interior_zero_length = &int_0_len;
13687 
13688   /* The object/argument referenced by the COMPONENT_REF and its type.  */
13689   tree arg = TREE_OPERAND (ref, 0);
13690   tree argtype = TREE_TYPE (arg);
13691   /* The referenced member.  */
13692   tree member = TREE_OPERAND (ref, 1);
13693 
13694   tree memsize = DECL_SIZE_UNIT (member);
13695   if (memsize)
13696     {
13697       tree memtype = TREE_TYPE (member);
13698       if (TREE_CODE (memtype) != ARRAY_TYPE)
13699 	return memsize;
13700 
13701       bool trailing = array_at_struct_end_p (ref);
13702       bool zero_length = integer_zerop (memsize);
13703       if (!trailing && !zero_length)
13704 	/* MEMBER is either an interior array or is an array with
13705 	   more than one element.  */
13706 	return memsize;
13707 
13708       *interior_zero_length = zero_length && !trailing;
13709       if (*interior_zero_length)
13710 	memsize = NULL_TREE;
13711 
13712       if (!zero_length)
13713 	if (tree dom = TYPE_DOMAIN (memtype))
13714 	  if (tree min = TYPE_MIN_VALUE (dom))
13715 	    if (tree max = TYPE_MAX_VALUE (dom))
13716 	      if (TREE_CODE (min) == INTEGER_CST
13717 		  && TREE_CODE (max) == INTEGER_CST)
13718 		{
13719 		  offset_int minidx = wi::to_offset (min);
13720 		  offset_int maxidx = wi::to_offset (max);
13721 		  if (maxidx - minidx > 0)
13722 		    /* MEMBER is an array with more than one element.  */
13723 		    return memsize;
13724 		}
13725 
13726       /* For a refernce to a zero- or one-element array member of a union
13727 	 use the size of the union instead of the size of the member.  */
13728       if (TREE_CODE (argtype) == UNION_TYPE)
13729 	memsize = TYPE_SIZE_UNIT (argtype);
13730     }
13731 
13732   /* MEMBER is either a bona fide flexible array member, or a zero-length
13733      array member, or an array of length one treated as such.  */
13734 
13735   /* If the reference is to a declared object and the member a true
13736      flexible array, try to determine its size from its initializer.  */
13737   poly_int64 baseoff = 0;
13738   tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13739   if (!base || !VAR_P (base))
13740     {
13741       if (!*interior_zero_length)
13742 	return NULL_TREE;
13743 
13744       if (TREE_CODE (arg) != COMPONENT_REF)
13745 	return NULL_TREE;
13746 
13747       base = arg;
13748       while (TREE_CODE (base) == COMPONENT_REF)
13749 	base = TREE_OPERAND (base, 0);
13750       baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13751     }
13752 
13753   /* BASE is the declared object of which MEMBER is either a member
13754      or that is cast to ARGTYPE (e.g., a char buffer used to store
13755      an ARGTYPE object).  */
13756   tree basetype = TREE_TYPE (base);
13757 
13758   /* Determine the base type of the referenced object.  If it's
13759      the same as ARGTYPE and MEMBER has a known size, return it.  */
13760   tree bt = basetype;
13761   if (!*interior_zero_length)
13762     while (TREE_CODE (bt) == ARRAY_TYPE)
13763       bt = TREE_TYPE (bt);
13764   bool typematch = useless_type_conversion_p (argtype, bt);
13765   if (memsize && typematch)
13766     return memsize;
13767 
13768   memsize = NULL_TREE;
13769 
13770   if (typematch)
13771     /* MEMBER is a true flexible array member.  Compute its size from
13772        the initializer of the BASE object if it has one.  */
13773     if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13774       if (init != error_mark_node)
13775 	{
13776 	  init = get_initializer_for (init, member);
13777 	  if (init)
13778 	    {
13779 	      memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13780 	      if (tree refsize = TYPE_SIZE_UNIT (argtype))
13781 		{
13782 		  /* Use the larger of the initializer size and the tail
13783 		     padding in the enclosing struct.  */
13784 		  poly_int64 rsz = tree_to_poly_int64 (refsize);
13785 		  rsz -= baseoff;
13786 		  if (known_lt (tree_to_poly_int64 (memsize), rsz))
13787 		    memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13788 		}
13789 
13790 	      baseoff = 0;
13791 	    }
13792 	}
13793 
13794   if (!memsize)
13795     {
13796       if (typematch)
13797 	{
13798 	  if (DECL_P (base)
13799 	      && DECL_EXTERNAL (base)
13800 	      && bt == basetype
13801 	      && !*interior_zero_length)
13802 	    /* The size of a flexible array member of an extern struct
13803 	       with no initializer cannot be determined (it's defined
13804 	       in another translation unit and can have an initializer
13805 	       with an arbitrary number of elements).  */
13806 	    return NULL_TREE;
13807 
13808 	  /* Use the size of the base struct or, for interior zero-length
13809 	     arrays, the size of the enclosing type.  */
13810 	  memsize = TYPE_SIZE_UNIT (bt);
13811 	}
13812       else if (DECL_P (base))
13813 	/* Use the size of the BASE object (possibly an array of some
13814 	   other type such as char used to store the struct).  */
13815 	memsize = DECL_SIZE_UNIT (base);
13816       else
13817 	return NULL_TREE;
13818     }
13819 
13820   /* If the flexible array member has a known size use the greater
13821      of it and the tail padding in the enclosing struct.
13822      Otherwise, when the size of the flexible array member is unknown
13823      and the referenced object is not a struct, use the size of its
13824      type when known.  This detects sizes of array buffers when cast
13825      to struct types with flexible array members.  */
13826   if (memsize)
13827     {
13828       poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13829       if (known_lt (baseoff, memsz64))
13830 	{
13831 	  memsz64 -= baseoff;
13832 	  return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13833 	}
13834       return size_zero_node;
13835     }
13836 
13837   /* Return "don't know" for an external non-array object since its
13838      flexible array member can be initialized to have any number of
13839      elements.  Otherwise, return zero because the flexible array
13840      member has no elements.  */
13841   return (DECL_P (base)
13842 	  && DECL_EXTERNAL (base)
13843 	  && (!typematch
13844 	      || TREE_CODE (basetype) != ARRAY_TYPE)
13845 	  ? NULL_TREE : size_zero_node);
13846 }
13847 
13848 /* Return the machine mode of T.  For vectors, returns the mode of the
13849    inner type.  The main use case is to feed the result to HONOR_NANS,
13850    avoiding the BLKmode that a direct TYPE_MODE (T) might return.  */
13851 
13852 machine_mode
element_mode(const_tree t)13853 element_mode (const_tree t)
13854 {
13855   if (!TYPE_P (t))
13856     t = TREE_TYPE (t);
13857   if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13858     t = TREE_TYPE (t);
13859   return TYPE_MODE (t);
13860 }
13861 
13862 /* Vector types need to re-check the target flags each time we report
13863    the machine mode.  We need to do this because attribute target can
13864    change the result of vector_mode_supported_p and have_regs_of_mode
13865    on a per-function basis.  Thus the TYPE_MODE of a VECTOR_TYPE can
13866    change on a per-function basis.  */
13867 /* ??? Possibly a better solution is to run through all the types
13868    referenced by a function and re-compute the TYPE_MODE once, rather
13869    than make the TYPE_MODE macro call a function.  */
13870 
13871 machine_mode
vector_type_mode(const_tree t)13872 vector_type_mode (const_tree t)
13873 {
13874   machine_mode mode;
13875 
13876   gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13877 
13878   mode = t->type_common.mode;
13879   if (VECTOR_MODE_P (mode)
13880       && (!targetm.vector_mode_supported_p (mode)
13881 	  || !have_regs_of_mode[mode]))
13882     {
13883       scalar_int_mode innermode;
13884 
13885       /* For integers, try mapping it to a same-sized scalar mode.  */
13886       if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13887 	{
13888 	  poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13889 			     * GET_MODE_BITSIZE (innermode));
13890 	  scalar_int_mode mode;
13891 	  if (int_mode_for_size (size, 0).exists (&mode)
13892 	      && have_regs_of_mode[mode])
13893 	    return mode;
13894 	}
13895 
13896       return BLKmode;
13897     }
13898 
13899   return mode;
13900 }
13901 
13902 /* Verify that basic properties of T match TV and thus T can be a variant of
13903    TV.  TV should be the more specified variant (i.e. the main variant).  */
13904 
13905 static bool
verify_type_variant(const_tree t,tree tv)13906 verify_type_variant (const_tree t, tree tv)
13907 {
13908   /* Type variant can differ by:
13909 
13910      - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13911                    ENCODE_QUAL_ADDR_SPACE.
13912      - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13913        in this case some values may not be set in the variant types
13914        (see TYPE_COMPLETE_P checks).
13915      - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13916      - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13917      - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13918      - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13919      - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13920        this is necessary to make it possible to merge types form different TUs
13921      - arrays, pointers and references may have TREE_TYPE that is a variant
13922        of TREE_TYPE of their main variants.
13923      - aggregates may have new TYPE_FIELDS list that list variants of
13924        the main variant TYPE_FIELDS.
13925      - vector types may differ by TYPE_VECTOR_OPAQUE
13926    */
13927 
13928   /* Convenience macro for matching individual fields.  */
13929 #define verify_variant_match(flag)					    \
13930   do {									    \
13931     if (flag (tv) != flag (t))						    \
13932       {									    \
13933 	error ("type variant differs by %s", #flag);			    \
13934 	debug_tree (tv);						    \
13935 	return false;							    \
13936       }									    \
13937   } while (false)
13938 
13939   /* tree_base checks.  */
13940 
13941   verify_variant_match (TREE_CODE);
13942   /* FIXME: Ada builds non-artificial variants of artificial types.  */
13943   if (TYPE_ARTIFICIAL (tv) && 0)
13944     verify_variant_match (TYPE_ARTIFICIAL);
13945   if (POINTER_TYPE_P (tv))
13946     verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13947   /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build.  */
13948   verify_variant_match (TYPE_UNSIGNED);
13949   verify_variant_match (TYPE_PACKED);
13950   if (TREE_CODE (t) == REFERENCE_TYPE)
13951     verify_variant_match (TYPE_REF_IS_RVALUE);
13952   if (AGGREGATE_TYPE_P (t))
13953     verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13954   else
13955     verify_variant_match (TYPE_SATURATING);
13956   /* FIXME: This check trigger during libstdc++ build.  */
13957   if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13958     verify_variant_match (TYPE_FINAL_P);
13959 
13960   /* tree_type_common checks.  */
13961 
13962   if (COMPLETE_TYPE_P (t))
13963     {
13964       verify_variant_match (TYPE_MODE);
13965       if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13966 	  && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13967 	verify_variant_match (TYPE_SIZE);
13968       if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13969 	  && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13970 	  && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13971 	{
13972 	  gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13973 					TYPE_SIZE_UNIT (tv), 0));
13974 	  error ("type variant has different %<TYPE_SIZE_UNIT%>");
13975 	  debug_tree (tv);
13976 	  error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13977 	  debug_tree (TYPE_SIZE_UNIT (tv));
13978 	  error ("type%'s %<TYPE_SIZE_UNIT%>");
13979 	  debug_tree (TYPE_SIZE_UNIT (t));
13980 	  return false;
13981 	}
13982       verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13983     }
13984   verify_variant_match (TYPE_PRECISION);
13985   if (RECORD_OR_UNION_TYPE_P (t))
13986     verify_variant_match (TYPE_TRANSPARENT_AGGR);
13987   else if (TREE_CODE (t) == ARRAY_TYPE)
13988     verify_variant_match (TYPE_NONALIASED_COMPONENT);
13989   /* During LTO we merge variant lists from diferent translation units
13990      that may differ BY TYPE_CONTEXT that in turn may point
13991      to TRANSLATION_UNIT_DECL.
13992      Ada also builds variants of types with different TYPE_CONTEXT.   */
13993   if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13994     verify_variant_match (TYPE_CONTEXT);
13995   if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13996     verify_variant_match (TYPE_STRING_FLAG);
13997   if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13998     verify_variant_match (TYPE_CXX_ODR_P);
13999   if (TYPE_ALIAS_SET_KNOWN_P (t))
14000     {
14001       error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
14002       debug_tree (tv);
14003       return false;
14004     }
14005 
14006   /* tree_type_non_common checks.  */
14007 
14008   /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14009      and dangle the pointer from time to time.  */
14010   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
14011       && (in_lto_p || !TYPE_VFIELD (tv)
14012 	  || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
14013     {
14014       error ("type variant has different %<TYPE_VFIELD%>");
14015       debug_tree (tv);
14016       return false;
14017     }
14018   if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
14019        || TREE_CODE (t) == INTEGER_TYPE
14020        || TREE_CODE (t) == BOOLEAN_TYPE
14021        || TREE_CODE (t) == REAL_TYPE
14022        || TREE_CODE (t) == FIXED_POINT_TYPE)
14023     {
14024       verify_variant_match (TYPE_MAX_VALUE);
14025       verify_variant_match (TYPE_MIN_VALUE);
14026     }
14027   if (TREE_CODE (t) == METHOD_TYPE)
14028     verify_variant_match (TYPE_METHOD_BASETYPE);
14029   if (TREE_CODE (t) == OFFSET_TYPE)
14030     verify_variant_match (TYPE_OFFSET_BASETYPE);
14031   if (TREE_CODE (t) == ARRAY_TYPE)
14032     verify_variant_match (TYPE_ARRAY_MAX_SIZE);
14033   /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
14034      or even type's main variant.  This is needed to make bootstrap pass
14035      and the bug seems new in GCC 5.
14036      C++ FE should be updated to make this consistent and we should check
14037      that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
14038      is a match with main variant.
14039 
14040      Also disable the check for Java for now because of parser hack that builds
14041      first an dummy BINFO and then sometimes replace it by real BINFO in some
14042      of the copies.  */
14043   if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
14044       && TYPE_BINFO (t) != TYPE_BINFO (tv)
14045       /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
14046 	 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
14047 	 at LTO time only.  */
14048       && (in_lto_p && odr_type_p (t)))
14049     {
14050       error ("type variant has different %<TYPE_BINFO%>");
14051       debug_tree (tv);
14052       error ("type variant%'s %<TYPE_BINFO%>");
14053       debug_tree (TYPE_BINFO (tv));
14054       error ("type%'s %<TYPE_BINFO%>");
14055       debug_tree (TYPE_BINFO (t));
14056       return false;
14057     }
14058 
14059   /* Check various uses of TYPE_VALUES_RAW.  */
14060   if (TREE_CODE (t) == ENUMERAL_TYPE
14061       && TYPE_VALUES (t))
14062     verify_variant_match (TYPE_VALUES);
14063   else if (TREE_CODE (t) == ARRAY_TYPE)
14064     verify_variant_match (TYPE_DOMAIN);
14065   /* Permit incomplete variants of complete type.  While FEs may complete
14066      all variants, this does not happen for C++ templates in all cases.  */
14067   else if (RECORD_OR_UNION_TYPE_P (t)
14068 	   && COMPLETE_TYPE_P (t)
14069 	   && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
14070     {
14071       tree f1, f2;
14072 
14073       /* Fortran builds qualified variants as new records with items of
14074 	 qualified type. Verify that they looks same.  */
14075       for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
14076 	   f1 && f2;
14077 	   f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14078 	if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
14079 	    || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
14080 		 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
14081 		/* FIXME: gfc_nonrestricted_type builds all types as variants
14082 		   with exception of pointer types.  It deeply copies the type
14083 		   which means that we may end up with a variant type
14084 		   referring non-variant pointer.  We may change it to
14085 		   produce types as variants, too, like
14086 		   objc_get_protocol_qualified_type does.  */
14087 		&& !POINTER_TYPE_P (TREE_TYPE (f1)))
14088 	    || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
14089 	    || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
14090 	  break;
14091       if (f1 || f2)
14092 	{
14093 	  error ("type variant has different %<TYPE_FIELDS%>");
14094 	  debug_tree (tv);
14095 	  error ("first mismatch is field");
14096 	  debug_tree (f1);
14097 	  error ("and field");
14098 	  debug_tree (f2);
14099           return false;
14100 	}
14101     }
14102   else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
14103     verify_variant_match (TYPE_ARG_TYPES);
14104   /* For C++ the qualified variant of array type is really an array type
14105      of qualified TREE_TYPE.
14106      objc builds variants of pointer where pointer to type is a variant, too
14107      in objc_get_protocol_qualified_type.  */
14108   if (TREE_TYPE (t) != TREE_TYPE (tv)
14109       && ((TREE_CODE (t) != ARRAY_TYPE
14110 	   && !POINTER_TYPE_P (t))
14111 	  || TYPE_MAIN_VARIANT (TREE_TYPE (t))
14112 	     != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
14113     {
14114       error ("type variant has different %<TREE_TYPE%>");
14115       debug_tree (tv);
14116       error ("type variant%'s %<TREE_TYPE%>");
14117       debug_tree (TREE_TYPE (tv));
14118       error ("type%'s %<TREE_TYPE%>");
14119       debug_tree (TREE_TYPE (t));
14120       return false;
14121     }
14122   if (type_with_alias_set_p (t)
14123       && !gimple_canonical_types_compatible_p (t, tv, false))
14124     {
14125       error ("type is not compatible with its variant");
14126       debug_tree (tv);
14127       error ("type variant%'s %<TREE_TYPE%>");
14128       debug_tree (TREE_TYPE (tv));
14129       error ("type%'s %<TREE_TYPE%>");
14130       debug_tree (TREE_TYPE (t));
14131       return false;
14132     }
14133   return true;
14134 #undef verify_variant_match
14135 }
14136 
14137 
14138 /* The TYPE_CANONICAL merging machinery.  It should closely resemble
14139    the middle-end types_compatible_p function.  It needs to avoid
14140    claiming types are different for types that should be treated
14141    the same with respect to TBAA.  Canonical types are also used
14142    for IL consistency checks via the useless_type_conversion_p
14143    predicate which does not handle all type kinds itself but falls
14144    back to pointer-comparison of TYPE_CANONICAL for aggregates
14145    for example.  */
14146 
14147 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
14148    type calculation because we need to allow inter-operability between signed
14149    and unsigned variants.  */
14150 
14151 bool
type_with_interoperable_signedness(const_tree type)14152 type_with_interoperable_signedness (const_tree type)
14153 {
14154   /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
14155      signed char and unsigned char.  Similarly fortran FE builds
14156      C_SIZE_T as signed type, while C defines it unsigned.  */
14157 
14158   return tree_code_for_canonical_type_merging (TREE_CODE (type))
14159 	   == INTEGER_TYPE
14160          && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
14161 	     || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
14162 }
14163 
14164 /* Return true iff T1 and T2 are structurally identical for what
14165    TBAA is concerned.
14166    This function is used both by lto.c canonical type merging and by the
14167    verifier.  If TRUST_TYPE_CANONICAL we do not look into structure of types
14168    that have TYPE_CANONICAL defined and assume them equivalent.  This is useful
14169    only for LTO because only in these cases TYPE_CANONICAL equivalence
14170    correspond to one defined by gimple_canonical_types_compatible_p.  */
14171 
14172 bool
gimple_canonical_types_compatible_p(const_tree t1,const_tree t2,bool trust_type_canonical)14173 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
14174 				     bool trust_type_canonical)
14175 {
14176   /* Type variants should be same as the main variant.  When not doing sanity
14177      checking to verify this fact, go to main variants and save some work.  */
14178   if (trust_type_canonical)
14179     {
14180       t1 = TYPE_MAIN_VARIANT (t1);
14181       t2 = TYPE_MAIN_VARIANT (t2);
14182     }
14183 
14184   /* Check first for the obvious case of pointer identity.  */
14185   if (t1 == t2)
14186     return true;
14187 
14188   /* Check that we have two types to compare.  */
14189   if (t1 == NULL_TREE || t2 == NULL_TREE)
14190     return false;
14191 
14192   /* We consider complete types always compatible with incomplete type.
14193      This does not make sense for canonical type calculation and thus we
14194      need to ensure that we are never called on it.
14195 
14196      FIXME: For more correctness the function probably should have three modes
14197 	1) mode assuming that types are complete mathcing their structure
14198 	2) mode allowing incomplete types but producing equivalence classes
14199 	   and thus ignoring all info from complete types
14200 	3) mode allowing incomplete types to match complete but checking
14201 	   compatibility between complete types.
14202 
14203      1 and 2 can be used for canonical type calculation. 3 is the real
14204      definition of type compatibility that can be used i.e. for warnings during
14205      declaration merging.  */
14206 
14207   gcc_assert (!trust_type_canonical
14208 	      || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
14209 
14210   /* If the types have been previously registered and found equal
14211      they still are.  */
14212 
14213   if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
14214       && trust_type_canonical)
14215     {
14216       /* Do not use TYPE_CANONICAL of pointer types.  For LTO streamed types
14217 	 they are always NULL, but they are set to non-NULL for types
14218 	 constructed by build_pointer_type and variants.  In this case the
14219 	 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
14220 	 all pointers are considered equal.  Be sure to not return false
14221 	 negatives.  */
14222       gcc_checking_assert (canonical_type_used_p (t1)
14223 			   && canonical_type_used_p (t2));
14224       return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
14225     }
14226 
14227   /* For types where we do ODR based TBAA the canonical type is always
14228      set correctly, so we know that types are different if their
14229      canonical types does not match.  */
14230   if (trust_type_canonical
14231       && (odr_type_p (t1) && odr_based_tbaa_p (t1))
14232 	  != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
14233     return false;
14234 
14235   /* Can't be the same type if the types don't have the same code.  */
14236   enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
14237   if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
14238     return false;
14239 
14240   /* Qualifiers do not matter for canonical type comparison purposes.  */
14241 
14242   /* Void types and nullptr types are always the same.  */
14243   if (TREE_CODE (t1) == VOID_TYPE
14244       || TREE_CODE (t1) == NULLPTR_TYPE)
14245     return true;
14246 
14247   /* Can't be the same type if they have different mode.  */
14248   if (TYPE_MODE (t1) != TYPE_MODE (t2))
14249     return false;
14250 
14251   /* Non-aggregate types can be handled cheaply.  */
14252   if (INTEGRAL_TYPE_P (t1)
14253       || SCALAR_FLOAT_TYPE_P (t1)
14254       || FIXED_POINT_TYPE_P (t1)
14255       || TREE_CODE (t1) == VECTOR_TYPE
14256       || TREE_CODE (t1) == COMPLEX_TYPE
14257       || TREE_CODE (t1) == OFFSET_TYPE
14258       || POINTER_TYPE_P (t1))
14259     {
14260       /* Can't be the same type if they have different recision.  */
14261       if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
14262 	return false;
14263 
14264       /* In some cases the signed and unsigned types are required to be
14265 	 inter-operable.  */
14266       if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
14267 	  && !type_with_interoperable_signedness (t1))
14268 	return false;
14269 
14270       /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
14271 	 interoperable with "signed char".  Unless all frontends are revisited
14272 	 to agree on these types, we must ignore the flag completely.  */
14273 
14274       /* Fortran standard define C_PTR type that is compatible with every
14275  	 C pointer.  For this reason we need to glob all pointers into one.
14276 	 Still pointers in different address spaces are not compatible.  */
14277       if (POINTER_TYPE_P (t1))
14278 	{
14279 	  if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14280 	      != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14281 	    return false;
14282 	}
14283 
14284       /* Tail-recurse to components.  */
14285       if (TREE_CODE (t1) == VECTOR_TYPE
14286 	  || TREE_CODE (t1) == COMPLEX_TYPE)
14287 	return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14288 						    TREE_TYPE (t2),
14289 						    trust_type_canonical);
14290 
14291       return true;
14292     }
14293 
14294   /* Do type-specific comparisons.  */
14295   switch (TREE_CODE (t1))
14296     {
14297     case ARRAY_TYPE:
14298       /* Array types are the same if the element types are the same and
14299 	 the number of elements are the same.  */
14300       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14301 						trust_type_canonical)
14302 	  || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14303 	  || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14304 	  || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14305 	return false;
14306       else
14307 	{
14308 	  tree i1 = TYPE_DOMAIN (t1);
14309 	  tree i2 = TYPE_DOMAIN (t2);
14310 
14311 	  /* For an incomplete external array, the type domain can be
14312  	     NULL_TREE.  Check this condition also.  */
14313 	  if (i1 == NULL_TREE && i2 == NULL_TREE)
14314 	    return true;
14315 	  else if (i1 == NULL_TREE || i2 == NULL_TREE)
14316 	    return false;
14317 	  else
14318 	    {
14319 	      tree min1 = TYPE_MIN_VALUE (i1);
14320 	      tree min2 = TYPE_MIN_VALUE (i2);
14321 	      tree max1 = TYPE_MAX_VALUE (i1);
14322 	      tree max2 = TYPE_MAX_VALUE (i2);
14323 
14324 	      /* The minimum/maximum values have to be the same.  */
14325 	      if ((min1 == min2
14326 		   || (min1 && min2
14327 		       && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14328 			    && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14329 		           || operand_equal_p (min1, min2, 0))))
14330 		  && (max1 == max2
14331 		      || (max1 && max2
14332 			  && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14333 			       && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14334 			      || operand_equal_p (max1, max2, 0)))))
14335 		return true;
14336 	      else
14337 		return false;
14338 	    }
14339 	}
14340 
14341     case METHOD_TYPE:
14342     case FUNCTION_TYPE:
14343       /* Function types are the same if the return type and arguments types
14344 	 are the same.  */
14345       if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14346 						trust_type_canonical))
14347 	return false;
14348 
14349       if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14350 	return true;
14351       else
14352 	{
14353 	  tree parms1, parms2;
14354 
14355 	  for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14356 	       parms1 && parms2;
14357 	       parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14358 	    {
14359 	      if (!gimple_canonical_types_compatible_p
14360 		     (TREE_VALUE (parms1), TREE_VALUE (parms2),
14361 		      trust_type_canonical))
14362 		return false;
14363 	    }
14364 
14365 	  if (parms1 || parms2)
14366 	    return false;
14367 
14368 	  return true;
14369 	}
14370 
14371     case RECORD_TYPE:
14372     case UNION_TYPE:
14373     case QUAL_UNION_TYPE:
14374       {
14375 	tree f1, f2;
14376 
14377 	/* Don't try to compare variants of an incomplete type, before
14378 	   TYPE_FIELDS has been copied around.  */
14379 	if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14380 	  return true;
14381 
14382 
14383 	if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14384 	  return false;
14385 
14386 	/* For aggregate types, all the fields must be the same.  */
14387 	for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14388 	     f1 || f2;
14389 	     f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14390 	  {
14391 	    /* Skip non-fields and zero-sized fields.  */
14392 	    while (f1 && (TREE_CODE (f1) != FIELD_DECL
14393 			  || (DECL_SIZE (f1)
14394 			      && integer_zerop (DECL_SIZE (f1)))))
14395 	      f1 = TREE_CHAIN (f1);
14396 	    while (f2 && (TREE_CODE (f2) != FIELD_DECL
14397 			  || (DECL_SIZE (f2)
14398 			      && integer_zerop (DECL_SIZE (f2)))))
14399 	      f2 = TREE_CHAIN (f2);
14400 	    if (!f1 || !f2)
14401 	      break;
14402 	    /* The fields must have the same name, offset and type.  */
14403 	    if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14404 		|| !gimple_compare_field_offset (f1, f2)
14405 		|| !gimple_canonical_types_compatible_p
14406 		      (TREE_TYPE (f1), TREE_TYPE (f2),
14407 		       trust_type_canonical))
14408 	      return false;
14409 	  }
14410 
14411 	/* If one aggregate has more fields than the other, they
14412 	   are not the same.  */
14413 	if (f1 || f2)
14414 	  return false;
14415 
14416 	return true;
14417       }
14418 
14419     default:
14420       /* Consider all types with language specific trees in them mutually
14421 	 compatible.  This is executed only from verify_type and false
14422          positives can be tolerated.  */
14423       gcc_assert (!in_lto_p);
14424       return true;
14425     }
14426 }
14427 
14428 /* Verify type T.  */
14429 
14430 void
verify_type(const_tree t)14431 verify_type (const_tree t)
14432 {
14433   bool error_found = false;
14434   tree mv = TYPE_MAIN_VARIANT (t);
14435   if (!mv)
14436     {
14437       error ("main variant is not defined");
14438       error_found = true;
14439     }
14440   else if (mv != TYPE_MAIN_VARIANT (mv))
14441     {
14442       error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14443       debug_tree (mv);
14444       error_found = true;
14445     }
14446   else if (t != mv && !verify_type_variant (t, mv))
14447     error_found = true;
14448 
14449   tree ct = TYPE_CANONICAL (t);
14450   if (!ct)
14451     ;
14452   else if (TYPE_CANONICAL (t) != ct)
14453     {
14454       error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14455       debug_tree (ct);
14456       error_found = true;
14457     }
14458   /* Method and function types cannot be used to address memory and thus
14459      TYPE_CANONICAL really matters only for determining useless conversions.
14460 
14461      FIXME: C++ FE produce declarations of builtin functions that are not
14462      compatible with main variants.  */
14463   else if (TREE_CODE (t) == FUNCTION_TYPE)
14464     ;
14465   else if (t != ct
14466 	   /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14467 	      with variably sized arrays because their sizes possibly
14468 	      gimplified to different variables.  */
14469 	   && !variably_modified_type_p (ct, NULL)
14470 	   && !gimple_canonical_types_compatible_p (t, ct, false)
14471 	   && COMPLETE_TYPE_P (t))
14472     {
14473       error ("%<TYPE_CANONICAL%> is not compatible");
14474       debug_tree (ct);
14475       error_found = true;
14476     }
14477 
14478   if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14479       && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14480     {
14481       error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14482       debug_tree (ct);
14483       error_found = true;
14484     }
14485   if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14486    {
14487       error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14488       debug_tree (ct);
14489       debug_tree (TYPE_MAIN_VARIANT (ct));
14490       error_found = true;
14491    }
14492 
14493 
14494   /* Check various uses of TYPE_MIN_VALUE_RAW.  */
14495   if (RECORD_OR_UNION_TYPE_P (t))
14496     {
14497       /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14498 	 and danagle the pointer from time to time.  */
14499       if (TYPE_VFIELD (t)
14500 	  && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14501 	  && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14502 	{
14503 	  error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14504 	  debug_tree (TYPE_VFIELD (t));
14505 	  error_found = true;
14506 	}
14507     }
14508   else if (TREE_CODE (t) == POINTER_TYPE)
14509     {
14510       if (TYPE_NEXT_PTR_TO (t)
14511 	  && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14512 	{
14513 	  error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14514 	  debug_tree (TYPE_NEXT_PTR_TO (t));
14515 	  error_found = true;
14516 	}
14517     }
14518   else if (TREE_CODE (t) == REFERENCE_TYPE)
14519     {
14520       if (TYPE_NEXT_REF_TO (t)
14521 	  && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14522 	{
14523 	  error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14524 	  debug_tree (TYPE_NEXT_REF_TO (t));
14525 	  error_found = true;
14526 	}
14527     }
14528   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14529 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
14530     {
14531       /* FIXME: The following check should pass:
14532 	  useless_type_conversion_p (const_cast <tree> (t),
14533 				     TREE_TYPE (TYPE_MIN_VALUE (t))
14534 	 but does not for C sizetypes in LTO.  */
14535     }
14536 
14537   /* Check various uses of TYPE_MAXVAL_RAW.  */
14538   if (RECORD_OR_UNION_TYPE_P (t))
14539     {
14540       if (!TYPE_BINFO (t))
14541 	;
14542       else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14543 	{
14544 	  error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14545 	  debug_tree (TYPE_BINFO (t));
14546 	  error_found = true;
14547 	}
14548       else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14549 	{
14550 	  error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14551 	  debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14552 	  error_found = true;
14553 	}
14554     }
14555   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14556     {
14557       if (TYPE_METHOD_BASETYPE (t)
14558 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14559 	  && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14560 	{
14561 	  error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14562 	  debug_tree (TYPE_METHOD_BASETYPE (t));
14563 	  error_found = true;
14564 	}
14565     }
14566   else if (TREE_CODE (t) == OFFSET_TYPE)
14567     {
14568       if (TYPE_OFFSET_BASETYPE (t)
14569 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14570 	  && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14571 	{
14572 	  error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14573 	  debug_tree (TYPE_OFFSET_BASETYPE (t));
14574 	  error_found = true;
14575 	}
14576     }
14577   else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14578 	   || TREE_CODE (t) == FIXED_POINT_TYPE)
14579     {
14580       /* FIXME: The following check should pass:
14581 	  useless_type_conversion_p (const_cast <tree> (t),
14582 				     TREE_TYPE (TYPE_MAX_VALUE (t))
14583 	 but does not for C sizetypes in LTO.  */
14584     }
14585   else if (TREE_CODE (t) == ARRAY_TYPE)
14586     {
14587       if (TYPE_ARRAY_MAX_SIZE (t)
14588 	  && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14589         {
14590 	  error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14591 	  debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14592 	  error_found = true;
14593         }
14594     }
14595   else if (TYPE_MAX_VALUE_RAW (t))
14596     {
14597       error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14598       debug_tree (TYPE_MAX_VALUE_RAW (t));
14599       error_found = true;
14600     }
14601 
14602   if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14603     {
14604       error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14605       debug_tree (TYPE_LANG_SLOT_1 (t));
14606       error_found = true;
14607     }
14608 
14609   /* Check various uses of TYPE_VALUES_RAW.  */
14610   if (TREE_CODE (t) == ENUMERAL_TYPE)
14611     for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14612       {
14613 	tree value = TREE_VALUE (l);
14614 	tree name = TREE_PURPOSE (l);
14615 
14616 	/* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14617  	   CONST_DECL of ENUMERAL TYPE.  */
14618 	if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14619 	  {
14620 	    error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14621 	    debug_tree (value);
14622 	    debug_tree (name);
14623 	    error_found = true;
14624 	  }
14625 	if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14626 	    && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14627 	  {
14628 	    error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14629 		   "to the enum");
14630 	    debug_tree (value);
14631 	    debug_tree (name);
14632 	    error_found = true;
14633 	  }
14634 	if (TREE_CODE (name) != IDENTIFIER_NODE)
14635 	  {
14636 	    error ("enum value name is not %<IDENTIFIER_NODE%>");
14637 	    debug_tree (value);
14638 	    debug_tree (name);
14639 	    error_found = true;
14640 	  }
14641       }
14642   else if (TREE_CODE (t) == ARRAY_TYPE)
14643     {
14644       if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14645 	{
14646 	  error ("array %<TYPE_DOMAIN%> is not integer type");
14647 	  debug_tree (TYPE_DOMAIN (t));
14648 	  error_found = true;
14649 	}
14650     }
14651   else if (RECORD_OR_UNION_TYPE_P (t))
14652     {
14653       if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14654 	{
14655 	  error ("%<TYPE_FIELDS%> defined in incomplete type");
14656 	  error_found = true;
14657 	}
14658       for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14659 	{
14660 	  /* TODO: verify properties of decls.  */
14661 	  if (TREE_CODE (fld) == FIELD_DECL)
14662 	    ;
14663 	  else if (TREE_CODE (fld) == TYPE_DECL)
14664 	    ;
14665 	  else if (TREE_CODE (fld) == CONST_DECL)
14666 	    ;
14667 	  else if (VAR_P (fld))
14668 	    ;
14669 	  else if (TREE_CODE (fld) == TEMPLATE_DECL)
14670 	    ;
14671 	  else if (TREE_CODE (fld) == USING_DECL)
14672 	    ;
14673 	  else if (TREE_CODE (fld) == FUNCTION_DECL)
14674 	    ;
14675 	  else
14676 	    {
14677 	      error ("wrong tree in %<TYPE_FIELDS%> list");
14678 	      debug_tree (fld);
14679 	      error_found = true;
14680 	    }
14681 	}
14682     }
14683   else if (TREE_CODE (t) == INTEGER_TYPE
14684 	   || TREE_CODE (t) == BOOLEAN_TYPE
14685 	   || TREE_CODE (t) == OFFSET_TYPE
14686 	   || TREE_CODE (t) == REFERENCE_TYPE
14687 	   || TREE_CODE (t) == NULLPTR_TYPE
14688 	   || TREE_CODE (t) == POINTER_TYPE)
14689     {
14690       if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14691 	{
14692 	  error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14693 		 "is %p",
14694 		 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14695 	  error_found = true;
14696 	}
14697       else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14698 	{
14699 	  error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14700 	  debug_tree (TYPE_CACHED_VALUES (t));
14701 	  error_found = true;
14702 	}
14703       /* Verify just enough of cache to ensure that no one copied it to new type.
14704  	 All copying should go by copy_node that should clear it.  */
14705       else if (TYPE_CACHED_VALUES_P (t))
14706 	{
14707 	  int i;
14708 	  for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14709 	    if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14710 		&& TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14711 	      {
14712 		error ("wrong %<TYPE_CACHED_VALUES%> entry");
14713 		debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14714 		error_found = true;
14715 		break;
14716 	      }
14717 	}
14718     }
14719   else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14720     for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14721       {
14722 	/* C++ FE uses TREE_PURPOSE to store initial values.  */
14723 	if (TREE_PURPOSE (l) && in_lto_p)
14724 	  {
14725 	    error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14726 	    debug_tree (l);
14727 	    error_found = true;
14728 	  }
14729 	if (!TYPE_P (TREE_VALUE (l)))
14730 	  {
14731 	    error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14732 	    debug_tree (l);
14733 	    error_found = true;
14734 	  }
14735       }
14736   else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14737     {
14738       error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14739       debug_tree (TYPE_VALUES_RAW (t));
14740       error_found = true;
14741     }
14742   if (TREE_CODE (t) != INTEGER_TYPE
14743       && TREE_CODE (t) != BOOLEAN_TYPE
14744       && TREE_CODE (t) != OFFSET_TYPE
14745       && TREE_CODE (t) != REFERENCE_TYPE
14746       && TREE_CODE (t) != NULLPTR_TYPE
14747       && TREE_CODE (t) != POINTER_TYPE
14748       && TYPE_CACHED_VALUES_P (t))
14749     {
14750       error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14751       error_found = true;
14752     }
14753 
14754   /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14755      TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14756      of a type. */
14757   if (TREE_CODE (t) == METHOD_TYPE
14758       && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14759     {
14760 	error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14761 	error_found = true;
14762     }
14763 
14764   if (error_found)
14765     {
14766       debug_tree (const_cast <tree> (t));
14767       internal_error ("%qs failed", __func__);
14768     }
14769 }
14770 
14771 
14772 /* Return 1 if ARG interpreted as signed in its precision is known to be
14773    always positive or 2 if ARG is known to be always negative, or 3 if
14774    ARG may be positive or negative.  */
14775 
14776 int
get_range_pos_neg(tree arg)14777 get_range_pos_neg (tree arg)
14778 {
14779   if (arg == error_mark_node)
14780     return 3;
14781 
14782   int prec = TYPE_PRECISION (TREE_TYPE (arg));
14783   int cnt = 0;
14784   if (TREE_CODE (arg) == INTEGER_CST)
14785     {
14786       wide_int w = wi::sext (wi::to_wide (arg), prec);
14787       if (wi::neg_p (w))
14788 	return 2;
14789       else
14790 	return 1;
14791     }
14792   while (CONVERT_EXPR_P (arg)
14793 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14794 	 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14795     {
14796       arg = TREE_OPERAND (arg, 0);
14797       /* Narrower value zero extended into wider type
14798 	 will always result in positive values.  */
14799       if (TYPE_UNSIGNED (TREE_TYPE (arg))
14800 	  && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14801 	return 1;
14802       prec = TYPE_PRECISION (TREE_TYPE (arg));
14803       if (++cnt > 30)
14804 	return 3;
14805     }
14806 
14807   if (TREE_CODE (arg) != SSA_NAME)
14808     return 3;
14809   wide_int arg_min, arg_max;
14810   while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14811     {
14812       gimple *g = SSA_NAME_DEF_STMT (arg);
14813       if (is_gimple_assign (g)
14814 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14815 	{
14816 	  tree t = gimple_assign_rhs1 (g);
14817 	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14818 	      && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14819 	    {
14820 	      if (TYPE_UNSIGNED (TREE_TYPE (t))
14821 		  && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14822 		return 1;
14823 	      prec = TYPE_PRECISION (TREE_TYPE (t));
14824 	      arg = t;
14825 	      if (++cnt > 30)
14826 		return 3;
14827 	      continue;
14828 	    }
14829 	}
14830       return 3;
14831     }
14832   if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14833     {
14834       /* For unsigned values, the "positive" range comes
14835 	 below the "negative" range.  */
14836       if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14837 	return 1;
14838       if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14839 	return 2;
14840     }
14841   else
14842     {
14843       if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14844 	return 1;
14845       if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14846 	return 2;
14847     }
14848   return 3;
14849 }
14850 
14851 
14852 
14853 
14854 /* Return true if ARG is marked with the nonnull attribute in the
14855    current function signature.  */
14856 
14857 bool
nonnull_arg_p(const_tree arg)14858 nonnull_arg_p (const_tree arg)
14859 {
14860   tree t, attrs, fntype;
14861   unsigned HOST_WIDE_INT arg_num;
14862 
14863   gcc_assert (TREE_CODE (arg) == PARM_DECL
14864 	      && (POINTER_TYPE_P (TREE_TYPE (arg))
14865 		  || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14866 
14867   /* The static chain decl is always non null.  */
14868   if (arg == cfun->static_chain_decl)
14869     return true;
14870 
14871   /* THIS argument of method is always non-NULL.  */
14872   if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14873       && arg == DECL_ARGUMENTS (cfun->decl)
14874       && flag_delete_null_pointer_checks)
14875     return true;
14876 
14877   /* Values passed by reference are always non-NULL.  */
14878   if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14879       && flag_delete_null_pointer_checks)
14880     return true;
14881 
14882   fntype = TREE_TYPE (cfun->decl);
14883   for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14884     {
14885       attrs = lookup_attribute ("nonnull", attrs);
14886 
14887       /* If "nonnull" wasn't specified, we know nothing about the argument.  */
14888       if (attrs == NULL_TREE)
14889 	return false;
14890 
14891       /* If "nonnull" applies to all the arguments, then ARG is non-null.  */
14892       if (TREE_VALUE (attrs) == NULL_TREE)
14893 	return true;
14894 
14895       /* Get the position number for ARG in the function signature.  */
14896       for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14897 	   t;
14898 	   t = DECL_CHAIN (t), arg_num++)
14899 	{
14900 	  if (t == arg)
14901 	    break;
14902 	}
14903 
14904       gcc_assert (t == arg);
14905 
14906       /* Now see if ARG_NUM is mentioned in the nonnull list.  */
14907       for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14908 	{
14909 	  if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14910 	    return true;
14911 	}
14912     }
14913 
14914   return false;
14915 }
14916 
14917 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14918    information.  */
14919 
14920 location_t
set_block(location_t loc,tree block)14921 set_block (location_t loc, tree block)
14922 {
14923   location_t pure_loc = get_pure_location (loc);
14924   source_range src_range = get_range_from_loc (line_table, loc);
14925   return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14926 }
14927 
14928 location_t
set_source_range(tree expr,location_t start,location_t finish)14929 set_source_range (tree expr, location_t start, location_t finish)
14930 {
14931   source_range src_range;
14932   src_range.m_start = start;
14933   src_range.m_finish = finish;
14934   return set_source_range (expr, src_range);
14935 }
14936 
14937 location_t
set_source_range(tree expr,source_range src_range)14938 set_source_range (tree expr, source_range src_range)
14939 {
14940   if (!EXPR_P (expr))
14941     return UNKNOWN_LOCATION;
14942 
14943   location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14944   location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14945 					    pure_loc,
14946 					    src_range,
14947 					    NULL);
14948   SET_EXPR_LOCATION (expr, adhoc);
14949   return adhoc;
14950 }
14951 
14952 /* Return EXPR, potentially wrapped with a node expression LOC,
14953    if !CAN_HAVE_LOCATION_P (expr).
14954 
14955    NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14956    VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14957 
14958    Wrapper nodes can be identified using location_wrapper_p.  */
14959 
14960 tree
maybe_wrap_with_location(tree expr,location_t loc)14961 maybe_wrap_with_location (tree expr, location_t loc)
14962 {
14963   if (expr == NULL)
14964     return NULL;
14965   if (loc == UNKNOWN_LOCATION)
14966     return expr;
14967   if (CAN_HAVE_LOCATION_P (expr))
14968     return expr;
14969   /* We should only be adding wrappers for constants and for decls,
14970      or for some exceptional tree nodes (e.g. BASELINK in the C++ FE).  */
14971   gcc_assert (CONSTANT_CLASS_P (expr)
14972 	      || DECL_P (expr)
14973 	      || EXCEPTIONAL_CLASS_P (expr));
14974 
14975   /* For now, don't add wrappers to exceptional tree nodes, to minimize
14976      any impact of the wrapper nodes.  */
14977   if (EXCEPTIONAL_CLASS_P (expr))
14978     return expr;
14979 
14980   /* If any auto_suppress_location_wrappers are active, don't create
14981      wrappers.  */
14982   if (suppress_location_wrappers > 0)
14983     return expr;
14984 
14985   tree_code code
14986     = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14987 	|| (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14988        ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14989   tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14990   /* Mark this node as being a wrapper.  */
14991   EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14992   return wrapper;
14993 }
14994 
14995 int suppress_location_wrappers;
14996 
14997 /* Return the name of combined function FN, for debugging purposes.  */
14998 
14999 const char *
combined_fn_name(combined_fn fn)15000 combined_fn_name (combined_fn fn)
15001 {
15002   if (builtin_fn_p (fn))
15003     {
15004       tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
15005       return IDENTIFIER_POINTER (DECL_NAME (fndecl));
15006     }
15007   else
15008     return internal_fn_name (as_internal_fn (fn));
15009 }
15010 
15011 /* Return a bitmap with a bit set corresponding to each argument in
15012    a function call type FNTYPE declared with attribute nonnull,
15013    or null if none of the function's argument are nonnull.  The caller
15014    must free the bitmap.  */
15015 
15016 bitmap
get_nonnull_args(const_tree fntype)15017 get_nonnull_args (const_tree fntype)
15018 {
15019   if (fntype == NULL_TREE)
15020     return NULL;
15021 
15022   tree attrs = TYPE_ATTRIBUTES (fntype);
15023   if (!attrs)
15024     return NULL;
15025 
15026   bitmap argmap = NULL;
15027 
15028   /* A function declaration can specify multiple attribute nonnull,
15029      each with zero or more arguments.  The loop below creates a bitmap
15030      representing a union of all the arguments.  An empty (but non-null)
15031      bitmap means that all arguments have been declaraed nonnull.  */
15032   for ( ; attrs; attrs = TREE_CHAIN (attrs))
15033     {
15034       attrs = lookup_attribute ("nonnull", attrs);
15035       if (!attrs)
15036 	break;
15037 
15038       if (!argmap)
15039 	argmap = BITMAP_ALLOC (NULL);
15040 
15041       if (!TREE_VALUE (attrs))
15042 	{
15043 	  /* Clear the bitmap in case a previous attribute nonnull
15044 	     set it and this one overrides it for all arguments.  */
15045 	  bitmap_clear (argmap);
15046 	  return argmap;
15047 	}
15048 
15049       /* Iterate over the indices of the format arguments declared nonnull
15050 	 and set a bit for each.  */
15051       for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
15052 	{
15053 	  unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
15054 	  bitmap_set_bit (argmap, val);
15055 	}
15056     }
15057 
15058   return argmap;
15059 }
15060 
15061 /* Returns true if TYPE is a type where it and all of its subobjects
15062    (recursively) are of structure, union, or array type.  */
15063 
15064 static bool
default_is_empty_type(tree type)15065 default_is_empty_type (tree type)
15066 {
15067   if (RECORD_OR_UNION_TYPE_P (type))
15068     {
15069       for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15070 	if (TREE_CODE (field) == FIELD_DECL
15071 	    && !DECL_PADDING_P (field)
15072 	    && !default_is_empty_type (TREE_TYPE (field)))
15073 	  return false;
15074       return true;
15075     }
15076   else if (TREE_CODE (type) == ARRAY_TYPE)
15077     return (integer_minus_onep (array_type_nelts (type))
15078 	    || TYPE_DOMAIN (type) == NULL_TREE
15079 	    || default_is_empty_type (TREE_TYPE (type)));
15080   return false;
15081 }
15082 
15083 /* Implement TARGET_EMPTY_RECORD_P.  Return true if TYPE is an empty type
15084    that shouldn't be passed via stack.  */
15085 
15086 bool
default_is_empty_record(const_tree type)15087 default_is_empty_record (const_tree type)
15088 {
15089   if (!abi_version_at_least (12))
15090     return false;
15091 
15092   if (type == error_mark_node)
15093     return false;
15094 
15095   if (TREE_ADDRESSABLE (type))
15096     return false;
15097 
15098   return default_is_empty_type (TYPE_MAIN_VARIANT (type));
15099 }
15100 
15101 /* Determine whether TYPE is a structure with a flexible array member,
15102    or a union containing such a structure (possibly recursively).  */
15103 
15104 bool
flexible_array_type_p(const_tree type)15105 flexible_array_type_p (const_tree type)
15106 {
15107   tree x, last;
15108   switch (TREE_CODE (type))
15109     {
15110     case RECORD_TYPE:
15111       last = NULL_TREE;
15112       for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15113 	if (TREE_CODE (x) == FIELD_DECL)
15114 	  last = x;
15115       if (last == NULL_TREE)
15116 	return false;
15117       if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
15118 	  && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
15119 	  && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
15120 	  && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
15121 	return true;
15122       return false;
15123     case UNION_TYPE:
15124       for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15125 	{
15126 	  if (TREE_CODE (x) == FIELD_DECL
15127 	      && flexible_array_type_p (TREE_TYPE (x)))
15128 	    return true;
15129 	}
15130       return false;
15131     default:
15132       return false;
15133   }
15134 }
15135 
15136 /* Like int_size_in_bytes, but handle empty records specially.  */
15137 
15138 HOST_WIDE_INT
arg_int_size_in_bytes(const_tree type)15139 arg_int_size_in_bytes (const_tree type)
15140 {
15141   return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
15142 }
15143 
15144 /* Like size_in_bytes, but handle empty records specially.  */
15145 
15146 tree
arg_size_in_bytes(const_tree type)15147 arg_size_in_bytes (const_tree type)
15148 {
15149   return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
15150 }
15151 
15152 /* Return true if an expression with CODE has to have the same result type as
15153    its first operand.  */
15154 
15155 bool
expr_type_first_operand_type_p(tree_code code)15156 expr_type_first_operand_type_p (tree_code code)
15157 {
15158   switch (code)
15159     {
15160     case NEGATE_EXPR:
15161     case ABS_EXPR:
15162     case BIT_NOT_EXPR:
15163     case PAREN_EXPR:
15164     case CONJ_EXPR:
15165 
15166     case PLUS_EXPR:
15167     case MINUS_EXPR:
15168     case MULT_EXPR:
15169     case TRUNC_DIV_EXPR:
15170     case CEIL_DIV_EXPR:
15171     case FLOOR_DIV_EXPR:
15172     case ROUND_DIV_EXPR:
15173     case TRUNC_MOD_EXPR:
15174     case CEIL_MOD_EXPR:
15175     case FLOOR_MOD_EXPR:
15176     case ROUND_MOD_EXPR:
15177     case RDIV_EXPR:
15178     case EXACT_DIV_EXPR:
15179     case MIN_EXPR:
15180     case MAX_EXPR:
15181     case BIT_IOR_EXPR:
15182     case BIT_XOR_EXPR:
15183     case BIT_AND_EXPR:
15184 
15185     case LSHIFT_EXPR:
15186     case RSHIFT_EXPR:
15187     case LROTATE_EXPR:
15188     case RROTATE_EXPR:
15189       return true;
15190 
15191     default:
15192       return false;
15193     }
15194 }
15195 
15196 /* Return a typenode for the "standard" C type with a given name.  */
15197 tree
get_typenode_from_name(const char * name)15198 get_typenode_from_name (const char *name)
15199 {
15200   if (name == NULL || *name == '\0')
15201     return NULL_TREE;
15202 
15203   if (strcmp (name, "char") == 0)
15204     return char_type_node;
15205   if (strcmp (name, "unsigned char") == 0)
15206     return unsigned_char_type_node;
15207   if (strcmp (name, "signed char") == 0)
15208     return signed_char_type_node;
15209 
15210   if (strcmp (name, "short int") == 0)
15211     return short_integer_type_node;
15212   if (strcmp (name, "short unsigned int") == 0)
15213     return short_unsigned_type_node;
15214 
15215   if (strcmp (name, "int") == 0)
15216     return integer_type_node;
15217   if (strcmp (name, "unsigned int") == 0)
15218     return unsigned_type_node;
15219 
15220   if (strcmp (name, "long int") == 0)
15221     return long_integer_type_node;
15222   if (strcmp (name, "long unsigned int") == 0)
15223     return long_unsigned_type_node;
15224 
15225   if (strcmp (name, "long long int") == 0)
15226     return long_long_integer_type_node;
15227   if (strcmp (name, "long long unsigned int") == 0)
15228     return long_long_unsigned_type_node;
15229 
15230   gcc_unreachable ();
15231 }
15232 
15233 /* List of pointer types used to declare builtins before we have seen their
15234    real declaration.
15235 
15236    Keep the size up to date in tree.h !  */
15237 const builtin_structptr_type builtin_structptr_types[6] =
15238 {
15239   { fileptr_type_node, ptr_type_node, "FILE" },
15240   { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
15241   { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
15242   { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
15243   { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
15244   { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
15245 };
15246 
15247 /* Return the maximum object size.  */
15248 
15249 tree
max_object_size(void)15250 max_object_size (void)
15251 {
15252   /* To do: Make this a configurable parameter.  */
15253   return TYPE_MAX_VALUE (ptrdiff_type_node);
15254 }
15255 
15256 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
15257    parameter default to false and that weeds out error_mark_node.  */
15258 
15259 bool
verify_type_context(location_t loc,type_context_kind context,const_tree type,bool silent_p)15260 verify_type_context (location_t loc, type_context_kind context,
15261 		     const_tree type, bool silent_p)
15262 {
15263   if (type == error_mark_node)
15264     return true;
15265 
15266   gcc_assert (TYPE_P (type));
15267   return (!targetm.verify_type_context
15268 	  || targetm.verify_type_context (loc, context, type, silent_p));
15269 }
15270 
15271 #if CHECKING_P
15272 
15273 namespace selftest {
15274 
15275 /* Selftests for tree.  */
15276 
15277 /* Verify that integer constants are sane.  */
15278 
15279 static void
test_integer_constants()15280 test_integer_constants ()
15281 {
15282   ASSERT_TRUE (integer_type_node != NULL);
15283   ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15284 
15285   tree type = integer_type_node;
15286 
15287   tree zero = build_zero_cst (type);
15288   ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15289   ASSERT_EQ (type, TREE_TYPE (zero));
15290 
15291   tree one = build_int_cst (type, 1);
15292   ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15293   ASSERT_EQ (type, TREE_TYPE (zero));
15294 }
15295 
15296 /* Verify identifiers.  */
15297 
15298 static void
test_identifiers()15299 test_identifiers ()
15300 {
15301   tree identifier = get_identifier ("foo");
15302   ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15303   ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15304 }
15305 
15306 /* Verify LABEL_DECL.  */
15307 
15308 static void
test_labels()15309 test_labels ()
15310 {
15311   tree identifier = get_identifier ("err");
15312   tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15313 				identifier, void_type_node);
15314   ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15315   ASSERT_FALSE (FORCED_LABEL (label_decl));
15316 }
15317 
15318 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15319    are given by VALS.  */
15320 
15321 static tree
build_vector(tree type,vec<tree> vals MEM_STAT_DECL)15322 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
15323 {
15324   gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15325   tree_vector_builder builder (type, vals.length (), 1);
15326   builder.splice (vals);
15327   return builder.build ();
15328 }
15329 
15330 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED.  */
15331 
15332 static void
check_vector_cst(vec<tree> expected,tree actual)15333 check_vector_cst (vec<tree> expected, tree actual)
15334 {
15335   ASSERT_KNOWN_EQ (expected.length (),
15336 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15337   for (unsigned int i = 0; i < expected.length (); ++i)
15338     ASSERT_EQ (wi::to_wide (expected[i]),
15339 	       wi::to_wide (vector_cst_elt (actual, i)));
15340 }
15341 
15342 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15343    and that its elements match EXPECTED.  */
15344 
15345 static void
check_vector_cst_duplicate(vec<tree> expected,tree actual,unsigned int npatterns)15346 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15347 			    unsigned int npatterns)
15348 {
15349   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15350   ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15351   ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15352   ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15353   ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15354   check_vector_cst (expected, actual);
15355 }
15356 
15357 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15358    and NPATTERNS background elements, and that its elements match
15359    EXPECTED.  */
15360 
15361 static void
check_vector_cst_fill(vec<tree> expected,tree actual,unsigned int npatterns)15362 check_vector_cst_fill (vec<tree> expected, tree actual,
15363 		       unsigned int npatterns)
15364 {
15365   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15366   ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15367   ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15368   ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15369   ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15370   check_vector_cst (expected, actual);
15371 }
15372 
15373 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15374    and that its elements match EXPECTED.  */
15375 
15376 static void
check_vector_cst_stepped(vec<tree> expected,tree actual,unsigned int npatterns)15377 check_vector_cst_stepped (vec<tree> expected, tree actual,
15378 			  unsigned int npatterns)
15379 {
15380   ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15381   ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15382   ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15383   ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15384   ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15385   check_vector_cst (expected, actual);
15386 }
15387 
15388 /* Test the creation of VECTOR_CSTs.  */
15389 
15390 static void
test_vector_cst_patterns(ALONE_CXX_MEM_STAT_INFO)15391 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15392 {
15393   auto_vec<tree, 8> elements (8);
15394   elements.quick_grow (8);
15395   tree element_type = build_nonstandard_integer_type (16, true);
15396   tree vector_type = build_vector_type (element_type, 8);
15397 
15398   /* Test a simple linear series with a base of 0 and a step of 1:
15399      { 0, 1, 2, 3, 4, 5, 6, 7 }.  */
15400   for (unsigned int i = 0; i < 8; ++i)
15401     elements[i] = build_int_cst (element_type, i);
15402   tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15403   check_vector_cst_stepped (elements, vector, 1);
15404 
15405   /* Try the same with the first element replaced by 100:
15406      { 100, 1, 2, 3, 4, 5, 6, 7 }.  */
15407   elements[0] = build_int_cst (element_type, 100);
15408   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15409   check_vector_cst_stepped (elements, vector, 1);
15410 
15411   /* Try a series that wraps around.
15412      { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }.  */
15413   for (unsigned int i = 1; i < 8; ++i)
15414     elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15415   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15416   check_vector_cst_stepped (elements, vector, 1);
15417 
15418   /* Try a downward series:
15419      { 100, 79, 78, 77, 76, 75, 75, 73 }.  */
15420   for (unsigned int i = 1; i < 8; ++i)
15421     elements[i] = build_int_cst (element_type, 80 - i);
15422   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15423   check_vector_cst_stepped (elements, vector, 1);
15424 
15425   /* Try two interleaved series with different bases and steps:
15426      { 100, 53, 66, 206, 62, 212, 58, 218 }.  */
15427   elements[1] = build_int_cst (element_type, 53);
15428   for (unsigned int i = 2; i < 8; i += 2)
15429     {
15430       elements[i] = build_int_cst (element_type, 70 - i * 2);
15431       elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15432     }
15433   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15434   check_vector_cst_stepped (elements, vector, 2);
15435 
15436   /* Try a duplicated value:
15437      { 100, 100, 100, 100, 100, 100, 100, 100 }.  */
15438   for (unsigned int i = 1; i < 8; ++i)
15439     elements[i] = elements[0];
15440   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15441   check_vector_cst_duplicate (elements, vector, 1);
15442 
15443   /* Try an interleaved duplicated value:
15444      { 100, 55, 100, 55, 100, 55, 100, 55 }.  */
15445   elements[1] = build_int_cst (element_type, 55);
15446   for (unsigned int i = 2; i < 8; ++i)
15447     elements[i] = elements[i - 2];
15448   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15449   check_vector_cst_duplicate (elements, vector, 2);
15450 
15451   /* Try a duplicated value with 2 exceptions
15452      { 41, 97, 100, 55, 100, 55, 100, 55 }.  */
15453   elements[0] = build_int_cst (element_type, 41);
15454   elements[1] = build_int_cst (element_type, 97);
15455   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15456   check_vector_cst_fill (elements, vector, 2);
15457 
15458   /* Try with and without a step
15459      { 41, 97, 100, 21, 100, 35, 100, 49 }.  */
15460   for (unsigned int i = 3; i < 8; i += 2)
15461     elements[i] = build_int_cst (element_type, i * 7);
15462   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15463   check_vector_cst_stepped (elements, vector, 2);
15464 
15465   /* Try a fully-general constant:
15466      { 41, 97, 100, 21, 100, 9990, 100, 49 }.  */
15467   elements[5] = build_int_cst (element_type, 9990);
15468   vector = build_vector (vector_type, elements PASS_MEM_STAT);
15469   check_vector_cst_fill (elements, vector, 4);
15470 }
15471 
15472 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15473    Helper function for test_location_wrappers, to deal with STRIP_NOPS
15474    modifying its argument in-place.  */
15475 
15476 static void
check_strip_nops(tree node,tree expected)15477 check_strip_nops (tree node, tree expected)
15478 {
15479   STRIP_NOPS (node);
15480   ASSERT_EQ (expected, node);
15481 }
15482 
15483 /* Verify location wrappers.  */
15484 
15485 static void
test_location_wrappers()15486 test_location_wrappers ()
15487 {
15488   location_t loc = BUILTINS_LOCATION;
15489 
15490   ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15491 
15492   /* Wrapping a constant.  */
15493   tree int_cst = build_int_cst (integer_type_node, 42);
15494   ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15495   ASSERT_FALSE (location_wrapper_p (int_cst));
15496 
15497   tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15498   ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15499   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15500   ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15501 
15502   /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION.  */
15503   ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15504 
15505   /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P.  */
15506   tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15507   ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15508   ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15509 
15510   /* Wrapping a STRING_CST.  */
15511   tree string_cst = build_string (4, "foo");
15512   ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15513   ASSERT_FALSE (location_wrapper_p (string_cst));
15514 
15515   tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15516   ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15517   ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15518   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15519   ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15520 
15521 
15522   /* Wrapping a variable.  */
15523   tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15524 			     get_identifier ("some_int_var"),
15525 			     integer_type_node);
15526   ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15527   ASSERT_FALSE (location_wrapper_p (int_var));
15528 
15529   tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15530   ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15531   ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15532   ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15533 
15534   /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15535      wrapper.  */
15536   tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15537   ASSERT_FALSE (location_wrapper_p (r_cast));
15538   ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15539 
15540   /* Verify that STRIP_NOPS removes wrappers.  */
15541   check_strip_nops (wrapped_int_cst, int_cst);
15542   check_strip_nops (wrapped_string_cst, string_cst);
15543   check_strip_nops (wrapped_int_var, int_var);
15544 }
15545 
15546 /* Test various tree predicates.  Verify that location wrappers don't
15547    affect the results.  */
15548 
15549 static void
test_predicates()15550 test_predicates ()
15551 {
15552   /* Build various constants and wrappers around them.  */
15553 
15554   location_t loc = BUILTINS_LOCATION;
15555 
15556   tree i_0 = build_int_cst (integer_type_node, 0);
15557   tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15558 
15559   tree i_1 = build_int_cst (integer_type_node, 1);
15560   tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15561 
15562   tree i_m1 = build_int_cst (integer_type_node, -1);
15563   tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15564 
15565   tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15566   tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15567   tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15568   tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15569   tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15570   tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15571 
15572   tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15573   tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15574   tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15575 
15576   tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15577   tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15578   tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15579 
15580   /* TODO: vector constants.  */
15581 
15582   /* Test integer_onep.  */
15583   ASSERT_FALSE (integer_onep (i_0));
15584   ASSERT_FALSE (integer_onep (wr_i_0));
15585   ASSERT_TRUE (integer_onep (i_1));
15586   ASSERT_TRUE (integer_onep (wr_i_1));
15587   ASSERT_FALSE (integer_onep (i_m1));
15588   ASSERT_FALSE (integer_onep (wr_i_m1));
15589   ASSERT_FALSE (integer_onep (f_0));
15590   ASSERT_FALSE (integer_onep (wr_f_0));
15591   ASSERT_FALSE (integer_onep (f_1));
15592   ASSERT_FALSE (integer_onep (wr_f_1));
15593   ASSERT_FALSE (integer_onep (f_m1));
15594   ASSERT_FALSE (integer_onep (wr_f_m1));
15595   ASSERT_FALSE (integer_onep (c_i_0));
15596   ASSERT_TRUE (integer_onep (c_i_1));
15597   ASSERT_FALSE (integer_onep (c_i_m1));
15598   ASSERT_FALSE (integer_onep (c_f_0));
15599   ASSERT_FALSE (integer_onep (c_f_1));
15600   ASSERT_FALSE (integer_onep (c_f_m1));
15601 
15602   /* Test integer_zerop.  */
15603   ASSERT_TRUE (integer_zerop (i_0));
15604   ASSERT_TRUE (integer_zerop (wr_i_0));
15605   ASSERT_FALSE (integer_zerop (i_1));
15606   ASSERT_FALSE (integer_zerop (wr_i_1));
15607   ASSERT_FALSE (integer_zerop (i_m1));
15608   ASSERT_FALSE (integer_zerop (wr_i_m1));
15609   ASSERT_FALSE (integer_zerop (f_0));
15610   ASSERT_FALSE (integer_zerop (wr_f_0));
15611   ASSERT_FALSE (integer_zerop (f_1));
15612   ASSERT_FALSE (integer_zerop (wr_f_1));
15613   ASSERT_FALSE (integer_zerop (f_m1));
15614   ASSERT_FALSE (integer_zerop (wr_f_m1));
15615   ASSERT_TRUE (integer_zerop (c_i_0));
15616   ASSERT_FALSE (integer_zerop (c_i_1));
15617   ASSERT_FALSE (integer_zerop (c_i_m1));
15618   ASSERT_FALSE (integer_zerop (c_f_0));
15619   ASSERT_FALSE (integer_zerop (c_f_1));
15620   ASSERT_FALSE (integer_zerop (c_f_m1));
15621 
15622   /* Test integer_all_onesp.  */
15623   ASSERT_FALSE (integer_all_onesp (i_0));
15624   ASSERT_FALSE (integer_all_onesp (wr_i_0));
15625   ASSERT_FALSE (integer_all_onesp (i_1));
15626   ASSERT_FALSE (integer_all_onesp (wr_i_1));
15627   ASSERT_TRUE (integer_all_onesp (i_m1));
15628   ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15629   ASSERT_FALSE (integer_all_onesp (f_0));
15630   ASSERT_FALSE (integer_all_onesp (wr_f_0));
15631   ASSERT_FALSE (integer_all_onesp (f_1));
15632   ASSERT_FALSE (integer_all_onesp (wr_f_1));
15633   ASSERT_FALSE (integer_all_onesp (f_m1));
15634   ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15635   ASSERT_FALSE (integer_all_onesp (c_i_0));
15636   ASSERT_FALSE (integer_all_onesp (c_i_1));
15637   ASSERT_FALSE (integer_all_onesp (c_i_m1));
15638   ASSERT_FALSE (integer_all_onesp (c_f_0));
15639   ASSERT_FALSE (integer_all_onesp (c_f_1));
15640   ASSERT_FALSE (integer_all_onesp (c_f_m1));
15641 
15642   /* Test integer_minus_onep.  */
15643   ASSERT_FALSE (integer_minus_onep (i_0));
15644   ASSERT_FALSE (integer_minus_onep (wr_i_0));
15645   ASSERT_FALSE (integer_minus_onep (i_1));
15646   ASSERT_FALSE (integer_minus_onep (wr_i_1));
15647   ASSERT_TRUE (integer_minus_onep (i_m1));
15648   ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15649   ASSERT_FALSE (integer_minus_onep (f_0));
15650   ASSERT_FALSE (integer_minus_onep (wr_f_0));
15651   ASSERT_FALSE (integer_minus_onep (f_1));
15652   ASSERT_FALSE (integer_minus_onep (wr_f_1));
15653   ASSERT_FALSE (integer_minus_onep (f_m1));
15654   ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15655   ASSERT_FALSE (integer_minus_onep (c_i_0));
15656   ASSERT_FALSE (integer_minus_onep (c_i_1));
15657   ASSERT_TRUE (integer_minus_onep (c_i_m1));
15658   ASSERT_FALSE (integer_minus_onep (c_f_0));
15659   ASSERT_FALSE (integer_minus_onep (c_f_1));
15660   ASSERT_FALSE (integer_minus_onep (c_f_m1));
15661 
15662   /* Test integer_each_onep.  */
15663   ASSERT_FALSE (integer_each_onep (i_0));
15664   ASSERT_FALSE (integer_each_onep (wr_i_0));
15665   ASSERT_TRUE (integer_each_onep (i_1));
15666   ASSERT_TRUE (integer_each_onep (wr_i_1));
15667   ASSERT_FALSE (integer_each_onep (i_m1));
15668   ASSERT_FALSE (integer_each_onep (wr_i_m1));
15669   ASSERT_FALSE (integer_each_onep (f_0));
15670   ASSERT_FALSE (integer_each_onep (wr_f_0));
15671   ASSERT_FALSE (integer_each_onep (f_1));
15672   ASSERT_FALSE (integer_each_onep (wr_f_1));
15673   ASSERT_FALSE (integer_each_onep (f_m1));
15674   ASSERT_FALSE (integer_each_onep (wr_f_m1));
15675   ASSERT_FALSE (integer_each_onep (c_i_0));
15676   ASSERT_FALSE (integer_each_onep (c_i_1));
15677   ASSERT_FALSE (integer_each_onep (c_i_m1));
15678   ASSERT_FALSE (integer_each_onep (c_f_0));
15679   ASSERT_FALSE (integer_each_onep (c_f_1));
15680   ASSERT_FALSE (integer_each_onep (c_f_m1));
15681 
15682   /* Test integer_truep.  */
15683   ASSERT_FALSE (integer_truep (i_0));
15684   ASSERT_FALSE (integer_truep (wr_i_0));
15685   ASSERT_TRUE (integer_truep (i_1));
15686   ASSERT_TRUE (integer_truep (wr_i_1));
15687   ASSERT_FALSE (integer_truep (i_m1));
15688   ASSERT_FALSE (integer_truep (wr_i_m1));
15689   ASSERT_FALSE (integer_truep (f_0));
15690   ASSERT_FALSE (integer_truep (wr_f_0));
15691   ASSERT_FALSE (integer_truep (f_1));
15692   ASSERT_FALSE (integer_truep (wr_f_1));
15693   ASSERT_FALSE (integer_truep (f_m1));
15694   ASSERT_FALSE (integer_truep (wr_f_m1));
15695   ASSERT_FALSE (integer_truep (c_i_0));
15696   ASSERT_TRUE (integer_truep (c_i_1));
15697   ASSERT_FALSE (integer_truep (c_i_m1));
15698   ASSERT_FALSE (integer_truep (c_f_0));
15699   ASSERT_FALSE (integer_truep (c_f_1));
15700   ASSERT_FALSE (integer_truep (c_f_m1));
15701 
15702   /* Test integer_nonzerop.  */
15703   ASSERT_FALSE (integer_nonzerop (i_0));
15704   ASSERT_FALSE (integer_nonzerop (wr_i_0));
15705   ASSERT_TRUE (integer_nonzerop (i_1));
15706   ASSERT_TRUE (integer_nonzerop (wr_i_1));
15707   ASSERT_TRUE (integer_nonzerop (i_m1));
15708   ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15709   ASSERT_FALSE (integer_nonzerop (f_0));
15710   ASSERT_FALSE (integer_nonzerop (wr_f_0));
15711   ASSERT_FALSE (integer_nonzerop (f_1));
15712   ASSERT_FALSE (integer_nonzerop (wr_f_1));
15713   ASSERT_FALSE (integer_nonzerop (f_m1));
15714   ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15715   ASSERT_FALSE (integer_nonzerop (c_i_0));
15716   ASSERT_TRUE (integer_nonzerop (c_i_1));
15717   ASSERT_TRUE (integer_nonzerop (c_i_m1));
15718   ASSERT_FALSE (integer_nonzerop (c_f_0));
15719   ASSERT_FALSE (integer_nonzerop (c_f_1));
15720   ASSERT_FALSE (integer_nonzerop (c_f_m1));
15721 
15722   /* Test real_zerop.  */
15723   ASSERT_FALSE (real_zerop (i_0));
15724   ASSERT_FALSE (real_zerop (wr_i_0));
15725   ASSERT_FALSE (real_zerop (i_1));
15726   ASSERT_FALSE (real_zerop (wr_i_1));
15727   ASSERT_FALSE (real_zerop (i_m1));
15728   ASSERT_FALSE (real_zerop (wr_i_m1));
15729   ASSERT_TRUE (real_zerop (f_0));
15730   ASSERT_TRUE (real_zerop (wr_f_0));
15731   ASSERT_FALSE (real_zerop (f_1));
15732   ASSERT_FALSE (real_zerop (wr_f_1));
15733   ASSERT_FALSE (real_zerop (f_m1));
15734   ASSERT_FALSE (real_zerop (wr_f_m1));
15735   ASSERT_FALSE (real_zerop (c_i_0));
15736   ASSERT_FALSE (real_zerop (c_i_1));
15737   ASSERT_FALSE (real_zerop (c_i_m1));
15738   ASSERT_TRUE (real_zerop (c_f_0));
15739   ASSERT_FALSE (real_zerop (c_f_1));
15740   ASSERT_FALSE (real_zerop (c_f_m1));
15741 
15742   /* Test real_onep.  */
15743   ASSERT_FALSE (real_onep (i_0));
15744   ASSERT_FALSE (real_onep (wr_i_0));
15745   ASSERT_FALSE (real_onep (i_1));
15746   ASSERT_FALSE (real_onep (wr_i_1));
15747   ASSERT_FALSE (real_onep (i_m1));
15748   ASSERT_FALSE (real_onep (wr_i_m1));
15749   ASSERT_FALSE (real_onep (f_0));
15750   ASSERT_FALSE (real_onep (wr_f_0));
15751   ASSERT_TRUE (real_onep (f_1));
15752   ASSERT_TRUE (real_onep (wr_f_1));
15753   ASSERT_FALSE (real_onep (f_m1));
15754   ASSERT_FALSE (real_onep (wr_f_m1));
15755   ASSERT_FALSE (real_onep (c_i_0));
15756   ASSERT_FALSE (real_onep (c_i_1));
15757   ASSERT_FALSE (real_onep (c_i_m1));
15758   ASSERT_FALSE (real_onep (c_f_0));
15759   ASSERT_TRUE (real_onep (c_f_1));
15760   ASSERT_FALSE (real_onep (c_f_m1));
15761 
15762   /* Test real_minus_onep.  */
15763   ASSERT_FALSE (real_minus_onep (i_0));
15764   ASSERT_FALSE (real_minus_onep (wr_i_0));
15765   ASSERT_FALSE (real_minus_onep (i_1));
15766   ASSERT_FALSE (real_minus_onep (wr_i_1));
15767   ASSERT_FALSE (real_minus_onep (i_m1));
15768   ASSERT_FALSE (real_minus_onep (wr_i_m1));
15769   ASSERT_FALSE (real_minus_onep (f_0));
15770   ASSERT_FALSE (real_minus_onep (wr_f_0));
15771   ASSERT_FALSE (real_minus_onep (f_1));
15772   ASSERT_FALSE (real_minus_onep (wr_f_1));
15773   ASSERT_TRUE (real_minus_onep (f_m1));
15774   ASSERT_TRUE (real_minus_onep (wr_f_m1));
15775   ASSERT_FALSE (real_minus_onep (c_i_0));
15776   ASSERT_FALSE (real_minus_onep (c_i_1));
15777   ASSERT_FALSE (real_minus_onep (c_i_m1));
15778   ASSERT_FALSE (real_minus_onep (c_f_0));
15779   ASSERT_FALSE (real_minus_onep (c_f_1));
15780   ASSERT_TRUE (real_minus_onep (c_f_m1));
15781 
15782   /* Test zerop.  */
15783   ASSERT_TRUE (zerop (i_0));
15784   ASSERT_TRUE (zerop (wr_i_0));
15785   ASSERT_FALSE (zerop (i_1));
15786   ASSERT_FALSE (zerop (wr_i_1));
15787   ASSERT_FALSE (zerop (i_m1));
15788   ASSERT_FALSE (zerop (wr_i_m1));
15789   ASSERT_TRUE (zerop (f_0));
15790   ASSERT_TRUE (zerop (wr_f_0));
15791   ASSERT_FALSE (zerop (f_1));
15792   ASSERT_FALSE (zerop (wr_f_1));
15793   ASSERT_FALSE (zerop (f_m1));
15794   ASSERT_FALSE (zerop (wr_f_m1));
15795   ASSERT_TRUE (zerop (c_i_0));
15796   ASSERT_FALSE (zerop (c_i_1));
15797   ASSERT_FALSE (zerop (c_i_m1));
15798   ASSERT_TRUE (zerop (c_f_0));
15799   ASSERT_FALSE (zerop (c_f_1));
15800   ASSERT_FALSE (zerop (c_f_m1));
15801 
15802   /* Test tree_expr_nonnegative_p.  */
15803   ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15804   ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15805   ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15806   ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15807   ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15808   ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15809   ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15810   ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15811   ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15812   ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15813   ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15814   ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15815   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15816   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15817   ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15818   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15819   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15820   ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15821 
15822   /* Test tree_expr_nonzero_p.  */
15823   ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15824   ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15825   ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15826   ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15827   ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15828   ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15829 
15830   /* Test integer_valued_real_p.  */
15831   ASSERT_FALSE (integer_valued_real_p (i_0));
15832   ASSERT_TRUE (integer_valued_real_p (f_0));
15833   ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15834   ASSERT_TRUE (integer_valued_real_p (f_1));
15835   ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15836 
15837   /* Test integer_pow2p.  */
15838   ASSERT_FALSE (integer_pow2p (i_0));
15839   ASSERT_TRUE (integer_pow2p (i_1));
15840   ASSERT_TRUE (integer_pow2p (wr_i_1));
15841 
15842   /* Test uniform_integer_cst_p.  */
15843   ASSERT_TRUE (uniform_integer_cst_p (i_0));
15844   ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15845   ASSERT_TRUE (uniform_integer_cst_p (i_1));
15846   ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15847   ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15848   ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15849   ASSERT_FALSE (uniform_integer_cst_p (f_0));
15850   ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15851   ASSERT_FALSE (uniform_integer_cst_p (f_1));
15852   ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15853   ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15854   ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15855   ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15856   ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15857   ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15858   ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15859   ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15860   ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15861 }
15862 
15863 /* Check that string escaping works correctly.  */
15864 
15865 static void
test_escaped_strings(void)15866 test_escaped_strings (void)
15867 {
15868   int saved_cutoff;
15869   escaped_string msg;
15870 
15871   msg.escape (NULL);
15872   /* ASSERT_STREQ does not accept NULL as a valid test
15873      result, so we have to use ASSERT_EQ instead.  */
15874   ASSERT_EQ (NULL, (const char *) msg);
15875 
15876   msg.escape ("");
15877   ASSERT_STREQ ("", (const char *) msg);
15878 
15879   msg.escape ("foobar");
15880   ASSERT_STREQ ("foobar", (const char *) msg);
15881 
15882   /* Ensure that we have -fmessage-length set to 0.  */
15883   saved_cutoff = pp_line_cutoff (global_dc->printer);
15884   pp_line_cutoff (global_dc->printer) = 0;
15885 
15886   msg.escape ("foo\nbar");
15887   ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15888 
15889   msg.escape ("\a\b\f\n\r\t\v");
15890   ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15891 
15892   /* Now repeat the tests with -fmessage-length set to 5.  */
15893   pp_line_cutoff (global_dc->printer) = 5;
15894 
15895   /* Note that the newline is not translated into an escape.  */
15896   msg.escape ("foo\nbar");
15897   ASSERT_STREQ ("foo\nbar", (const char *) msg);
15898 
15899   msg.escape ("\a\b\f\n\r\t\v");
15900   ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15901 
15902   /* Restore the original message length setting.  */
15903   pp_line_cutoff (global_dc->printer) = saved_cutoff;
15904 }
15905 
15906 /* Run all of the selftests within this file.  */
15907 
15908 void
tree_c_tests()15909 tree_c_tests ()
15910 {
15911   test_integer_constants ();
15912   test_identifiers ();
15913   test_labels ();
15914   test_vector_cst_patterns ();
15915   test_location_wrappers ();
15916   test_predicates ();
15917   test_escaped_strings ();
15918 }
15919 
15920 } // namespace selftest
15921 
15922 #endif /* CHECKING_P */
15923 
15924 #include "gt-tree.h"
15925